Lines Matching refs:imm
298 static void emit_alu_K(unsigned int opcode, unsigned int dst, unsigned int imm, in emit_alu_K() argument
301 bool small_immed = is_simm13(imm); in emit_alu_K()
306 emit(insn | IMMED | S13(imm), ctx); in emit_alu_K()
312 emit_set_const_sext(imm, tmp, ctx); in emit_alu_K()
317 static void emit_alu3_K(unsigned int opcode, unsigned int src, unsigned int imm, in emit_alu3_K() argument
320 bool small_immed = is_simm13(imm); in emit_alu3_K()
325 emit(insn | IMMED | S13(imm), ctx); in emit_alu3_K()
331 emit_set_const_sext(imm, tmp, ctx); in emit_alu3_K()
636 const u8 dst, s32 imm, struct jit_ctx *ctx) in emit_cbcondi() argument
640 emit(cb_opc | IMMED | WDISP10(off << 2) | RS1(dst) | S5(imm), ctx); in emit_cbcondi()
659 const s32 imm, bool is_imm, int branch_dst, in emit_compare_and_branch() argument
675 if (!is_simm5(imm)) in emit_compare_and_branch()
677 } else if (!is_simm13(imm)) { in emit_compare_and_branch()
682 emit_loadimm_sext(imm, tmp, ctx); in emit_compare_and_branch()
693 emit_btsti(dst, imm, ctx); in emit_compare_and_branch()
698 emit_cmpi(dst, imm, ctx); in emit_compare_and_branch()
785 dst, imm, ctx); in emit_compare_and_branch()
902 const s32 imm = insn->imm; in build_insn() local
1002 switch (imm) { in build_insn()
1026 switch (imm) { in build_insn()
1065 emit_loadimm32(imm, dst, ctx); in build_insn()
1070 emit_loadimm_sext(imm, dst, ctx); in build_insn()
1075 emit_alu_K(ADD, dst, imm, ctx); in build_insn()
1079 emit_alu_K(SUB, dst, imm, ctx); in build_insn()
1083 emit_alu_K(AND, dst, imm, ctx); in build_insn()
1087 emit_alu_K(OR, dst, imm, ctx); in build_insn()
1091 emit_alu_K(XOR, dst, imm, ctx); in build_insn()
1094 emit_alu_K(MUL, dst, imm, ctx); in build_insn()
1097 emit_alu_K(MULX, dst, imm, ctx); in build_insn()
1100 if (imm == 0) in build_insn()
1104 emit_alu_K(DIV, dst, imm, ctx); in build_insn()
1107 if (imm == 0) in build_insn()
1110 emit_alu_K(UDIVX, dst, imm, ctx); in build_insn()
1117 if (imm == 0) in build_insn()
1126 if (is_simm13(imm)) { in build_insn()
1127 emit(div | IMMED | RS1(dst) | S13(imm) | RD(tmp), ctx); in build_insn()
1128 emit(MULX | IMMED | RS1(tmp) | S13(imm) | RD(tmp), ctx); in build_insn()
1135 emit_set_const_sext(imm, tmp1, ctx); in build_insn()
1143 emit_alu_K(SLL, dst, imm, ctx); in build_insn()
1146 emit_alu_K(SLLX, dst, imm, ctx); in build_insn()
1149 emit_alu_K(SRL, dst, imm, ctx); in build_insn()
1154 emit_alu_K(SRLX, dst, imm, ctx); in build_insn()
1157 emit_alu_K(SRA, dst, imm, ctx); in build_insn()
1160 emit_alu_K(SRAX, dst, imm, ctx); in build_insn()
1207 err = emit_compare_and_branch(code, dst, 0, imm, true, i + off, ctx); in build_insn()
1216 u8 *func = ((u8 *)__bpf_call_base) + imm; in build_insn()
1248 imm64 = (u64)insn1.imm << 32 | (u32)imm; in build_insn()
1306 emit_loadimm(imm, tmp2, ctx); in build_insn()
1377 if (insn->imm != BPF_ADD) { in build_insn()
1378 pr_err_once("unknown atomic op %02x\n", insn->imm); in build_insn()
1407 if (insn->imm != BPF_ADD) { in build_insn()
1408 pr_err_once("unknown atomic op %02x\n", insn->imm); in build_insn()