Lines Matching full:emit
66 static inline void emit(const u32 insn, struct jit_ctx *ctx) in emit() function
82 emit(A64_MOVN(is64, reg, (u16)~lo, 0), ctx); in emit_a64_mov_i()
84 emit(A64_MOVN(is64, reg, (u16)~hi, 16), ctx); in emit_a64_mov_i()
86 emit(A64_MOVK(is64, reg, lo, 0), ctx); in emit_a64_mov_i()
89 emit(A64_MOVZ(is64, reg, lo, 0), ctx); in emit_a64_mov_i()
91 emit(A64_MOVK(is64, reg, hi, 16), ctx); in emit_a64_mov_i()
117 emit(A64_MOVN(1, reg, (rev_tmp >> shift) & 0xffff, shift), ctx); in emit_a64_mov_i64()
119 emit(A64_MOVZ(1, reg, (nrm_tmp >> shift) & 0xffff, shift), ctx); in emit_a64_mov_i64()
123 emit(A64_MOVK(1, reg, (nrm_tmp >> shift) & 0xffff, shift), ctx); in emit_a64_mov_i64()
139 emit(A64_MOVN(1, reg, ~tmp & 0xffff, shift), ctx); in emit_addr_mov_i64()
143 emit(A64_MOVK(1, reg, tmp & 0xffff, shift), ctx); in emit_addr_mov_i64()
229 emit(A64_BTI_C, ctx); in build_prologue()
232 emit(A64_PUSH(A64_FP, A64_LR, A64_SP), ctx); in build_prologue()
233 emit(A64_MOV(1, A64_FP, A64_SP), ctx); in build_prologue()
236 emit(A64_PUSH(r6, r7, A64_SP), ctx); in build_prologue()
237 emit(A64_PUSH(r8, r9, A64_SP), ctx); in build_prologue()
238 emit(A64_PUSH(fp, tcc, A64_SP), ctx); in build_prologue()
241 emit(A64_MOV(1, fp, A64_SP), ctx); in build_prologue()
245 emit(A64_MOVZ(1, tcc, 0, 0), ctx); in build_prologue()
256 emit(A64_BTI_J, ctx); in build_prologue()
262 emit(A64_SUB_I(1, A64_SP, A64_SP, ctx->stack_size), ctx); in build_prologue()
286 emit(A64_LDR32(tmp, r2, tmp), ctx); in emit_bpf_tail_call()
287 emit(A64_MOV(0, r3, r3), ctx); in emit_bpf_tail_call()
288 emit(A64_CMP(0, r3, tmp), ctx); in emit_bpf_tail_call()
289 emit(A64_B_(A64_COND_CS, jmp_offset), ctx); in emit_bpf_tail_call()
296 emit(A64_CMP(1, tcc, tmp), ctx); in emit_bpf_tail_call()
297 emit(A64_B_(A64_COND_HI, jmp_offset), ctx); in emit_bpf_tail_call()
298 emit(A64_ADD_I(1, tcc, tcc, 1), ctx); in emit_bpf_tail_call()
306 emit(A64_ADD(1, tmp, r2, tmp), ctx); in emit_bpf_tail_call()
307 emit(A64_LSL(1, prg, r3, 3), ctx); in emit_bpf_tail_call()
308 emit(A64_LDR64(prg, tmp, prg), ctx); in emit_bpf_tail_call()
309 emit(A64_CBZ(1, prg, jmp_offset), ctx); in emit_bpf_tail_call()
314 emit(A64_LDR64(tmp, prg, tmp), ctx); in emit_bpf_tail_call()
315 emit(A64_ADD_I(1, tmp, tmp, sizeof(u32) * PROLOGUE_OFFSET), ctx); in emit_bpf_tail_call()
316 emit(A64_ADD_I(1, A64_SP, A64_SP, ctx->stack_size), ctx); in emit_bpf_tail_call()
317 emit(A64_BR(tmp), ctx); in emit_bpf_tail_call()
342 emit(A64_ADD_I(1, A64_SP, A64_SP, ctx->stack_size), ctx); in build_epilogue()
345 emit(A64_POP(fp, A64_R(26), A64_SP), ctx); in build_epilogue()
348 emit(A64_POP(r8, r9, A64_SP), ctx); in build_epilogue()
349 emit(A64_POP(r6, r7, A64_SP), ctx); in build_epilogue()
352 emit(A64_POP(A64_FP, A64_LR, A64_SP), ctx); in build_epilogue()
355 emit(A64_MOV(1, A64_R(0), r0), ctx); in build_epilogue()
357 emit(A64_RET(A64_LR), ctx); in build_epilogue()
462 emit(A64_MOV(is64, dst, src), ctx); in build_insn()
467 emit(A64_ADD(is64, dst, dst, src), ctx); in build_insn()
471 emit(A64_SUB(is64, dst, dst, src), ctx); in build_insn()
475 emit(A64_AND(is64, dst, dst, src), ctx); in build_insn()
479 emit(A64_ORR(is64, dst, dst, src), ctx); in build_insn()
483 emit(A64_EOR(is64, dst, dst, src), ctx); in build_insn()
487 emit(A64_MUL(is64, dst, dst, src), ctx); in build_insn()
495 emit(A64_UDIV(is64, dst, dst, src), ctx); in build_insn()
498 emit(A64_UDIV(is64, tmp, dst, src), ctx); in build_insn()
499 emit(A64_MSUB(is64, dst, dst, tmp, src), ctx); in build_insn()
505 emit(A64_LSLV(is64, dst, dst, src), ctx); in build_insn()
509 emit(A64_LSRV(is64, dst, dst, src), ctx); in build_insn()
513 emit(A64_ASRV(is64, dst, dst, src), ctx); in build_insn()
518 emit(A64_NEG(is64, dst, dst), ctx); in build_insn()
532 emit(A64_REV16(is64, dst, dst), ctx); in build_insn()
534 emit(A64_UXTH(is64, dst, dst), ctx); in build_insn()
537 emit(A64_REV32(is64, dst, dst), ctx); in build_insn()
541 emit(A64_REV64(dst, dst), ctx); in build_insn()
549 emit(A64_UXTH(is64, dst, dst), ctx); in build_insn()
553 emit(A64_UXTW(is64, dst, dst), ctx); in build_insn()
569 emit(A64_ADD_I(is64, dst, dst, imm), ctx); in build_insn()
571 emit(A64_SUB_I(is64, dst, dst, -imm), ctx); in build_insn()
574 emit(A64_ADD(is64, dst, dst, tmp), ctx); in build_insn()
580 emit(A64_SUB_I(is64, dst, dst, imm), ctx); in build_insn()
582 emit(A64_ADD_I(is64, dst, dst, -imm), ctx); in build_insn()
585 emit(A64_SUB(is64, dst, dst, tmp), ctx); in build_insn()
592 emit(a64_insn, ctx); in build_insn()
595 emit(A64_AND(is64, dst, dst, tmp), ctx); in build_insn()
602 emit(a64_insn, ctx); in build_insn()
605 emit(A64_ORR(is64, dst, dst, tmp), ctx); in build_insn()
612 emit(a64_insn, ctx); in build_insn()
615 emit(A64_EOR(is64, dst, dst, tmp), ctx); in build_insn()
621 emit(A64_MUL(is64, dst, dst, tmp), ctx); in build_insn()
626 emit(A64_UDIV(is64, dst, dst, tmp), ctx); in build_insn()
631 emit(A64_UDIV(is64, tmp, dst, tmp2), ctx); in build_insn()
632 emit(A64_MSUB(is64, dst, dst, tmp, tmp2), ctx); in build_insn()
636 emit(A64_LSL(is64, dst, dst, imm), ctx); in build_insn()
640 emit(A64_LSR(is64, dst, dst, imm), ctx); in build_insn()
644 emit(A64_ASR(is64, dst, dst, imm), ctx); in build_insn()
651 emit(A64_B(jmp_offset), ctx); in build_insn()
674 emit(A64_CMP(is64, dst, src), ctx); in build_insn()
713 emit(A64_B_(jmp_cond, jmp_offset), ctx); in build_insn()
717 emit(A64_TST(is64, dst, src), ctx); in build_insn()
741 emit(A64_CMP_I(is64, dst, imm), ctx); in build_insn()
743 emit(A64_CMN_I(is64, dst, -imm), ctx); in build_insn()
746 emit(A64_CMP(is64, dst, tmp), ctx); in build_insn()
753 emit(a64_insn, ctx); in build_insn()
756 emit(A64_TST(is64, dst, tmp), ctx); in build_insn()
771 emit(A64_BLR(tmp), ctx); in build_insn()
772 emit(A64_MOV(1, r0, A64_R(0)), ctx); in build_insn()
788 emit(A64_B(jmp_offset), ctx); in build_insn()
815 emit(A64_LDR32(dst, src, tmp), ctx); in build_insn()
818 emit(A64_LDRH(dst, src, tmp), ctx); in build_insn()
821 emit(A64_LDRB(dst, src, tmp), ctx); in build_insn()
824 emit(A64_LDR64(dst, src, tmp), ctx); in build_insn()
856 emit(A64_STR32(tmp, dst, tmp2), ctx); in build_insn()
859 emit(A64_STRH(tmp, dst, tmp2), ctx); in build_insn()
862 emit(A64_STRB(tmp, dst, tmp2), ctx); in build_insn()
865 emit(A64_STR64(tmp, dst, tmp2), ctx); in build_insn()
878 emit(A64_STR32(src, dst, tmp), ctx); in build_insn()
881 emit(A64_STRH(src, dst, tmp), ctx); in build_insn()
884 emit(A64_STRB(src, dst, tmp), ctx); in build_insn()
887 emit(A64_STR64(src, dst, tmp), ctx); in build_insn()
900 emit(A64_ADD(1, tmp, tmp, dst), ctx); in build_insn()
904 emit(A64_STADD(isdw, reg, src), ctx); in build_insn()
906 emit(A64_LDXR(isdw, tmp2, reg), ctx); in build_insn()
907 emit(A64_ADD(isdw, tmp2, tmp2, src), ctx); in build_insn()
908 emit(A64_STXR(isdw, tmp2, reg, tmp3), ctx); in build_insn()
911 emit(A64_CBNZ(0, tmp3, jmp_offset), ctx); in build_insn()