Lines Matching refs:imm

33 #define check_imm(bits, imm) do {				\  argument
34 if ((((imm) > 0) && ((imm) >> (bits))) || \
35 (((imm) < 0) && (~(imm) >> (bits)))) { \
37 i, imm, imm); \
41 #define check_imm19(imm) check_imm(19, imm) argument
42 #define check_imm26(imm) check_imm(26, imm) argument
216 static bool is_addsub_imm(u32 imm) in is_addsub_imm() argument
219 return !(imm & ~0xfff) || !(imm & ~0xfff000); in is_addsub_imm()
465 switch (insn->imm) { in emit_lse_atomic()
503 pr_err_once("unknown atomic op code %02x\n", insn->imm); in emit_lse_atomic()
525 const s32 imm = insn->imm; in emit_ll_sc_atomic() local
539 if (imm == BPF_ADD || imm == BPF_AND || in emit_ll_sc_atomic()
540 imm == BPF_OR || imm == BPF_XOR) { in emit_ll_sc_atomic()
543 if (imm == BPF_ADD) in emit_ll_sc_atomic()
545 else if (imm == BPF_AND) in emit_ll_sc_atomic()
547 else if (imm == BPF_OR) in emit_ll_sc_atomic()
555 } else if (imm == (BPF_ADD | BPF_FETCH) || in emit_ll_sc_atomic()
556 imm == (BPF_AND | BPF_FETCH) || in emit_ll_sc_atomic()
557 imm == (BPF_OR | BPF_FETCH) || in emit_ll_sc_atomic()
558 imm == (BPF_XOR | BPF_FETCH)) { in emit_ll_sc_atomic()
564 if (imm == (BPF_ADD | BPF_FETCH)) in emit_ll_sc_atomic()
566 else if (imm == (BPF_AND | BPF_FETCH)) in emit_ll_sc_atomic()
568 else if (imm == (BPF_OR | BPF_FETCH)) in emit_ll_sc_atomic()
577 } else if (imm == BPF_XCHG) { in emit_ll_sc_atomic()
586 } else if (imm == BPF_CMPXCHG) { in emit_ll_sc_atomic()
602 pr_err_once("unknown atomic op code %02x\n", imm); in emit_ll_sc_atomic()
772 const s32 imm = insn->imm; in build_insn() local
872 switch (imm) { in build_insn()
888 switch (imm) { in build_insn()
905 emit_a64_mov_i(is64, dst, imm, ctx); in build_insn()
910 if (is_addsub_imm(imm)) { in build_insn()
911 emit(A64_ADD_I(is64, dst, dst, imm), ctx); in build_insn()
912 } else if (is_addsub_imm(-imm)) { in build_insn()
913 emit(A64_SUB_I(is64, dst, dst, -imm), ctx); in build_insn()
915 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
921 if (is_addsub_imm(imm)) { in build_insn()
922 emit(A64_SUB_I(is64, dst, dst, imm), ctx); in build_insn()
923 } else if (is_addsub_imm(-imm)) { in build_insn()
924 emit(A64_ADD_I(is64, dst, dst, -imm), ctx); in build_insn()
926 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
932 a64_insn = A64_AND_I(is64, dst, dst, imm); in build_insn()
936 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
942 a64_insn = A64_ORR_I(is64, dst, dst, imm); in build_insn()
946 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
952 a64_insn = A64_EOR_I(is64, dst, dst, imm); in build_insn()
956 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
962 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
967 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
975 emit_a64_mov_i(is64, tmp2, imm, ctx); in build_insn()
984 emit(A64_LSL(is64, dst, dst, imm), ctx); in build_insn()
988 emit(A64_LSR(is64, dst, dst, imm), ctx); in build_insn()
992 emit(A64_ASR(is64, dst, dst, imm), ctx); in build_insn()
1001 jmp_offset = bpf2a64_offset(i, imm, ctx); in build_insn()
1092 if (is_addsub_imm(imm)) { in build_insn()
1093 emit(A64_CMP_I(is64, dst, imm), ctx); in build_insn()
1094 } else if (is_addsub_imm(-imm)) { in build_insn()
1095 emit(A64_CMN_I(is64, dst, -imm), ctx); in build_insn()
1097 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
1103 a64_insn = A64_TST_I(is64, dst, imm); in build_insn()
1107 emit_a64_mov_i(is64, tmp, imm, ctx); in build_insn()
1148 imm64 = (u64)insn1.imm << 32 | (u32)imm; in build_insn()
1266 emit_a64_mov_i(1, tmp, imm, ctx); in build_insn()
1384 const s32 imm = insn->imm; in find_fpb_offset() local
1392 ((imm == BPF_XCHG || in find_fpb_offset()
1393 imm == (BPF_FETCH | BPF_ADD) || in find_fpb_offset()
1394 imm == (BPF_FETCH | BPF_AND) || in find_fpb_offset()
1395 imm == (BPF_FETCH | BPF_XOR) || in find_fpb_offset()
1396 imm == (BPF_FETCH | BPF_OR)) && in find_fpb_offset()