Lines Matching refs:imm

176 static bool is_addsub_imm(u32 imm)  in is_addsub_imm()  argument
179 return !(imm & ~0xfff) || !(imm & ~0xfff000); in is_addsub_imm()
437 const s32 imm = insn->imm; in build_insn() local
447 #define check_imm(bits, imm) do { \ in build_insn() argument
448 if ((((imm) > 0) && ((imm) >> (bits))) || \ in build_insn()
449 (((imm) < 0) && (~(imm) >> (bits)))) { \ in build_insn()
451 i, imm, imm); \ in build_insn()
455 #define check_imm19(imm) check_imm(19, imm) in build_insn() argument
456 #define check_imm26(imm) check_imm(26, imm) in build_insn() argument
530 switch (imm) { in build_insn()
546 switch (imm) { in build_insn()
563 emit_a64_mov_i(is64, dst, imm, ctx); in build_insn()
568 if (is_addsub_imm(imm)) { in build_insn()
569 emit(A64_ADD_I(is64, dst, dst, imm), ctx); in build_insn()
570 } else if (is_addsub_imm(-imm)) { in build_insn()
571 emit(A64_SUB_I(is64, dst, dst, -imm), ctx); in build_insn()
573 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
579 if (is_addsub_imm(imm)) { in build_insn()
580 emit(A64_SUB_I(is64, dst, dst, imm), ctx); in build_insn()
581 } else if (is_addsub_imm(-imm)) { in build_insn()
582 emit(A64_ADD_I(is64, dst, dst, -imm), ctx); in build_insn()
584 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
590 a64_insn = A64_AND_I(is64, dst, dst, imm); in build_insn()
594 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
600 a64_insn = A64_ORR_I(is64, dst, dst, imm); in build_insn()
604 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
610 a64_insn = A64_EOR_I(is64, dst, dst, imm); in build_insn()
614 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
620 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
625 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
630 emit_a64_mov_i(is64, tmp2, imm, ctx); in build_insn()
636 emit(A64_LSL(is64, dst, dst, imm), ctx); in build_insn()
640 emit(A64_LSR(is64, dst, dst, imm), ctx); in build_insn()
644 emit(A64_ASR(is64, dst, dst, imm), ctx); in build_insn()
740 if (is_addsub_imm(imm)) { in build_insn()
741 emit(A64_CMP_I(is64, dst, imm), ctx); in build_insn()
742 } else if (is_addsub_imm(-imm)) { in build_insn()
743 emit(A64_CMN_I(is64, dst, -imm), ctx); in build_insn()
745 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
751 a64_insn = A64_TST_I(is64, dst, imm); in build_insn()
755 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
797 imm64 = (u64)insn1.imm << 32 | (u32)imm; in build_insn()
853 emit_a64_mov_i(1, tmp, imm, ctx); in build_insn()