Lines Matching refs:imm

177 static bool is_addsub_imm(u32 imm)  in is_addsub_imm()  argument
180 return !(imm & ~0xfff) || !(imm & ~0xfff000); in is_addsub_imm()
438 const s32 imm = insn->imm; in build_insn() local
448 #define check_imm(bits, imm) do { \ in build_insn() argument
449 if ((((imm) > 0) && ((imm) >> (bits))) || \ in build_insn()
450 (((imm) < 0) && (~(imm) >> (bits)))) { \ in build_insn()
452 i, imm, imm); \ in build_insn()
456 #define check_imm19(imm) check_imm(19, imm) in build_insn() argument
457 #define check_imm26(imm) check_imm(26, imm) in build_insn() argument
526 switch (imm) { in build_insn()
542 switch (imm) { in build_insn()
559 emit_a64_mov_i(is64, dst, imm, ctx); in build_insn()
564 if (is_addsub_imm(imm)) { in build_insn()
565 emit(A64_ADD_I(is64, dst, dst, imm), ctx); in build_insn()
566 } else if (is_addsub_imm(-imm)) { in build_insn()
567 emit(A64_SUB_I(is64, dst, dst, -imm), ctx); in build_insn()
569 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
575 if (is_addsub_imm(imm)) { in build_insn()
576 emit(A64_SUB_I(is64, dst, dst, imm), ctx); in build_insn()
577 } else if (is_addsub_imm(-imm)) { in build_insn()
578 emit(A64_ADD_I(is64, dst, dst, -imm), ctx); in build_insn()
580 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
586 a64_insn = A64_AND_I(is64, dst, dst, imm); in build_insn()
590 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
596 a64_insn = A64_ORR_I(is64, dst, dst, imm); in build_insn()
600 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
606 a64_insn = A64_EOR_I(is64, dst, dst, imm); in build_insn()
610 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
616 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
621 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
626 emit_a64_mov_i(is64, tmp2, imm, ctx); in build_insn()
632 emit(A64_LSL(is64, dst, dst, imm), ctx); in build_insn()
636 emit(A64_LSR(is64, dst, dst, imm), ctx); in build_insn()
640 emit(A64_ASR(is64, dst, dst, imm), ctx); in build_insn()
736 if (is_addsub_imm(imm)) { in build_insn()
737 emit(A64_CMP_I(is64, dst, imm), ctx); in build_insn()
738 } else if (is_addsub_imm(-imm)) { in build_insn()
739 emit(A64_CMN_I(is64, dst, -imm), ctx); in build_insn()
741 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
747 a64_insn = A64_TST_I(is64, dst, imm); in build_insn()
751 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
793 imm64 = (u64)insn1.imm << 32 | (u32)imm; in build_insn()
849 emit_a64_mov_i(1, tmp, imm, ctx); in build_insn()
890 if (insn->imm != BPF_ADD) { in build_insn()
891 pr_err_once("unknown atomic op code %02x\n", insn->imm); in build_insn()