Lines Matching refs:imm
33 #define check_imm(bits, imm) do { \ argument
34 if ((((imm) > 0) && ((imm) >> (bits))) || \
35 (((imm) < 0) && (~(imm) >> (bits)))) { \
37 i, imm, imm); \
41 #define check_imm19(imm) check_imm(19, imm) argument
42 #define check_imm26(imm) check_imm(26, imm) argument
216 static bool is_addsub_imm(u32 imm) in is_addsub_imm() argument
219 return !(imm & ~0xfff) || !(imm & ~0xfff000); in is_addsub_imm()
459 switch (insn->imm) { in emit_lse_atomic()
497 pr_err_once("unknown atomic op code %02x\n", insn->imm); in emit_lse_atomic()
519 const s32 imm = insn->imm; in emit_ll_sc_atomic() local
533 if (imm == BPF_ADD || imm == BPF_AND || in emit_ll_sc_atomic()
534 imm == BPF_OR || imm == BPF_XOR) { in emit_ll_sc_atomic()
537 if (imm == BPF_ADD) in emit_ll_sc_atomic()
539 else if (imm == BPF_AND) in emit_ll_sc_atomic()
541 else if (imm == BPF_OR) in emit_ll_sc_atomic()
549 } else if (imm == (BPF_ADD | BPF_FETCH) || in emit_ll_sc_atomic()
550 imm == (BPF_AND | BPF_FETCH) || in emit_ll_sc_atomic()
551 imm == (BPF_OR | BPF_FETCH) || in emit_ll_sc_atomic()
552 imm == (BPF_XOR | BPF_FETCH)) { in emit_ll_sc_atomic()
558 if (imm == (BPF_ADD | BPF_FETCH)) in emit_ll_sc_atomic()
560 else if (imm == (BPF_AND | BPF_FETCH)) in emit_ll_sc_atomic()
562 else if (imm == (BPF_OR | BPF_FETCH)) in emit_ll_sc_atomic()
571 } else if (imm == BPF_XCHG) { in emit_ll_sc_atomic()
580 } else if (imm == BPF_CMPXCHG) { in emit_ll_sc_atomic()
596 pr_err_once("unknown atomic op code %02x\n", imm); in emit_ll_sc_atomic()
765 const s32 imm = insn->imm; in build_insn() local
844 switch (imm) { in build_insn()
860 switch (imm) { in build_insn()
877 emit_a64_mov_i(is64, dst, imm, ctx); in build_insn()
882 if (is_addsub_imm(imm)) { in build_insn()
883 emit(A64_ADD_I(is64, dst, dst, imm), ctx); in build_insn()
884 } else if (is_addsub_imm(-imm)) { in build_insn()
885 emit(A64_SUB_I(is64, dst, dst, -imm), ctx); in build_insn()
887 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
893 if (is_addsub_imm(imm)) { in build_insn()
894 emit(A64_SUB_I(is64, dst, dst, imm), ctx); in build_insn()
895 } else if (is_addsub_imm(-imm)) { in build_insn()
896 emit(A64_ADD_I(is64, dst, dst, -imm), ctx); in build_insn()
898 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
904 a64_insn = A64_AND_I(is64, dst, dst, imm); in build_insn()
908 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
914 a64_insn = A64_ORR_I(is64, dst, dst, imm); in build_insn()
918 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
924 a64_insn = A64_EOR_I(is64, dst, dst, imm); in build_insn()
928 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
934 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
939 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
944 emit_a64_mov_i(is64, tmp2, imm, ctx); in build_insn()
950 emit(A64_LSL(is64, dst, dst, imm), ctx); in build_insn()
954 emit(A64_LSR(is64, dst, dst, imm), ctx); in build_insn()
958 emit(A64_ASR(is64, dst, dst, imm), ctx); in build_insn()
1054 if (is_addsub_imm(imm)) { in build_insn()
1055 emit(A64_CMP_I(is64, dst, imm), ctx); in build_insn()
1056 } else if (is_addsub_imm(-imm)) { in build_insn()
1057 emit(A64_CMN_I(is64, dst, -imm), ctx); in build_insn()
1059 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
1065 a64_insn = A64_TST_I(is64, dst, imm); in build_insn()
1069 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
1110 imm64 = (u64)insn1.imm << 32 | (u32)imm; in build_insn()
1201 emit_a64_mov_i(1, tmp, imm, ctx); in build_insn()
1319 const s32 imm = insn->imm; in find_fpb_offset() local
1327 ((imm == BPF_XCHG || in find_fpb_offset()
1328 imm == (BPF_FETCH | BPF_ADD) || in find_fpb_offset()
1329 imm == (BPF_FETCH | BPF_AND) || in find_fpb_offset()
1330 imm == (BPF_FETCH | BPF_XOR) || in find_fpb_offset()
1331 imm == (BPF_FETCH | BPF_OR)) && in find_fpb_offset()