Lines Matching full:emit
65 static inline void emit(const u32 insn, struct jit_ctx *ctx) in emit() function
81 emit(A64_MOVN(is64, reg, (u16)~lo, 0), ctx); in emit_a64_mov_i()
83 emit(A64_MOVN(is64, reg, (u16)~hi, 16), ctx); in emit_a64_mov_i()
85 emit(A64_MOVK(is64, reg, lo, 0), ctx); in emit_a64_mov_i()
88 emit(A64_MOVZ(is64, reg, lo, 0), ctx); in emit_a64_mov_i()
90 emit(A64_MOVK(is64, reg, hi, 16), ctx); in emit_a64_mov_i()
116 emit(A64_MOVN(1, reg, (rev_tmp >> shift) & 0xffff, shift), ctx); in emit_a64_mov_i64()
118 emit(A64_MOVZ(1, reg, (nrm_tmp >> shift) & 0xffff, shift), ctx); in emit_a64_mov_i64()
122 emit(A64_MOVK(1, reg, (nrm_tmp >> shift) & 0xffff, shift), ctx); in emit_a64_mov_i64()
138 emit(A64_MOVN(1, reg, ~tmp & 0xffff, shift), ctx); in emit_addr_mov_i64()
142 emit(A64_MOVK(1, reg, tmp & 0xffff, shift), ctx); in emit_addr_mov_i64()
228 emit(A64_BTI_C, ctx); in build_prologue()
231 emit(A64_PUSH(A64_FP, A64_LR, A64_SP), ctx); in build_prologue()
232 emit(A64_MOV(1, A64_FP, A64_SP), ctx); in build_prologue()
235 emit(A64_PUSH(r6, r7, A64_SP), ctx); in build_prologue()
236 emit(A64_PUSH(r8, r9, A64_SP), ctx); in build_prologue()
237 emit(A64_PUSH(fp, tcc, A64_SP), ctx); in build_prologue()
240 emit(A64_MOV(1, fp, A64_SP), ctx); in build_prologue()
244 emit(A64_MOVZ(1, tcc, 0, 0), ctx); in build_prologue()
255 emit(A64_BTI_J, ctx); in build_prologue()
261 emit(A64_SUB_I(1, A64_SP, A64_SP, ctx->stack_size), ctx); in build_prologue()
285 emit(A64_LDR32(tmp, r2, tmp), ctx); in emit_bpf_tail_call()
286 emit(A64_MOV(0, r3, r3), ctx); in emit_bpf_tail_call()
287 emit(A64_CMP(0, r3, tmp), ctx); in emit_bpf_tail_call()
288 emit(A64_B_(A64_COND_CS, jmp_offset), ctx); in emit_bpf_tail_call()
295 emit(A64_CMP(1, tcc, tmp), ctx); in emit_bpf_tail_call()
296 emit(A64_B_(A64_COND_HI, jmp_offset), ctx); in emit_bpf_tail_call()
297 emit(A64_ADD_I(1, tcc, tcc, 1), ctx); in emit_bpf_tail_call()
305 emit(A64_ADD(1, tmp, r2, tmp), ctx); in emit_bpf_tail_call()
306 emit(A64_LSL(1, prg, r3, 3), ctx); in emit_bpf_tail_call()
307 emit(A64_LDR64(prg, tmp, prg), ctx); in emit_bpf_tail_call()
308 emit(A64_CBZ(1, prg, jmp_offset), ctx); in emit_bpf_tail_call()
313 emit(A64_LDR64(tmp, prg, tmp), ctx); in emit_bpf_tail_call()
314 emit(A64_ADD_I(1, tmp, tmp, sizeof(u32) * PROLOGUE_OFFSET), ctx); in emit_bpf_tail_call()
315 emit(A64_ADD_I(1, A64_SP, A64_SP, ctx->stack_size), ctx); in emit_bpf_tail_call()
316 emit(A64_BR(tmp), ctx); in emit_bpf_tail_call()
341 emit(A64_ADD_I(1, A64_SP, A64_SP, ctx->stack_size), ctx); in build_epilogue()
344 emit(A64_POP(fp, A64_R(26), A64_SP), ctx); in build_epilogue()
347 emit(A64_POP(r8, r9, A64_SP), ctx); in build_epilogue()
348 emit(A64_POP(r6, r7, A64_SP), ctx); in build_epilogue()
351 emit(A64_POP(A64_FP, A64_LR, A64_SP), ctx); in build_epilogue()
354 emit(A64_MOV(1, A64_R(0), r0), ctx); in build_epilogue()
356 emit(A64_RET(A64_LR), ctx); in build_epilogue()
461 emit(A64_MOV(is64, dst, src), ctx); in build_insn()
466 emit(A64_ADD(is64, dst, dst, src), ctx); in build_insn()
470 emit(A64_SUB(is64, dst, dst, src), ctx); in build_insn()
474 emit(A64_AND(is64, dst, dst, src), ctx); in build_insn()
478 emit(A64_ORR(is64, dst, dst, src), ctx); in build_insn()
482 emit(A64_EOR(is64, dst, dst, src), ctx); in build_insn()
486 emit(A64_MUL(is64, dst, dst, src), ctx); in build_insn()
494 emit(A64_UDIV(is64, dst, dst, src), ctx); in build_insn()
497 emit(A64_UDIV(is64, tmp, dst, src), ctx); in build_insn()
498 emit(A64_MSUB(is64, dst, dst, tmp, src), ctx); in build_insn()
504 emit(A64_LSLV(is64, dst, dst, src), ctx); in build_insn()
508 emit(A64_LSRV(is64, dst, dst, src), ctx); in build_insn()
512 emit(A64_ASRV(is64, dst, dst, src), ctx); in build_insn()
517 emit(A64_NEG(is64, dst, dst), ctx); in build_insn()
531 emit(A64_REV16(is64, dst, dst), ctx); in build_insn()
533 emit(A64_UXTH(is64, dst, dst), ctx); in build_insn()
536 emit(A64_REV32(is64, dst, dst), ctx); in build_insn()
540 emit(A64_REV64(dst, dst), ctx); in build_insn()
548 emit(A64_UXTH(is64, dst, dst), ctx); in build_insn()
552 emit(A64_UXTW(is64, dst, dst), ctx); in build_insn()
568 emit(A64_ADD_I(is64, dst, dst, imm), ctx); in build_insn()
570 emit(A64_SUB_I(is64, dst, dst, -imm), ctx); in build_insn()
573 emit(A64_ADD(is64, dst, dst, tmp), ctx); in build_insn()
579 emit(A64_SUB_I(is64, dst, dst, imm), ctx); in build_insn()
581 emit(A64_ADD_I(is64, dst, dst, -imm), ctx); in build_insn()
584 emit(A64_SUB(is64, dst, dst, tmp), ctx); in build_insn()
591 emit(a64_insn, ctx); in build_insn()
594 emit(A64_AND(is64, dst, dst, tmp), ctx); in build_insn()
601 emit(a64_insn, ctx); in build_insn()
604 emit(A64_ORR(is64, dst, dst, tmp), ctx); in build_insn()
611 emit(a64_insn, ctx); in build_insn()
614 emit(A64_EOR(is64, dst, dst, tmp), ctx); in build_insn()
620 emit(A64_MUL(is64, dst, dst, tmp), ctx); in build_insn()
625 emit(A64_UDIV(is64, dst, dst, tmp), ctx); in build_insn()
630 emit(A64_UDIV(is64, tmp, dst, tmp2), ctx); in build_insn()
631 emit(A64_MSUB(is64, dst, dst, tmp, tmp2), ctx); in build_insn()
635 emit(A64_LSL(is64, dst, dst, imm), ctx); in build_insn()
639 emit(A64_LSR(is64, dst, dst, imm), ctx); in build_insn()
643 emit(A64_ASR(is64, dst, dst, imm), ctx); in build_insn()
650 emit(A64_B(jmp_offset), ctx); in build_insn()
673 emit(A64_CMP(is64, dst, src), ctx); in build_insn()
712 emit(A64_B_(jmp_cond, jmp_offset), ctx); in build_insn()
716 emit(A64_TST(is64, dst, src), ctx); in build_insn()
740 emit(A64_CMP_I(is64, dst, imm), ctx); in build_insn()
742 emit(A64_CMN_I(is64, dst, -imm), ctx); in build_insn()
745 emit(A64_CMP(is64, dst, tmp), ctx); in build_insn()
752 emit(a64_insn, ctx); in build_insn()
755 emit(A64_TST(is64, dst, tmp), ctx); in build_insn()
770 emit(A64_BLR(tmp), ctx); in build_insn()
771 emit(A64_MOV(1, r0, A64_R(0)), ctx); in build_insn()
787 emit(A64_B(jmp_offset), ctx); in build_insn()
814 emit(A64_LDR32(dst, src, tmp), ctx); in build_insn()
817 emit(A64_LDRH(dst, src, tmp), ctx); in build_insn()
820 emit(A64_LDRB(dst, src, tmp), ctx); in build_insn()
823 emit(A64_LDR64(dst, src, tmp), ctx); in build_insn()
842 emit(A64_STR32(tmp, dst, tmp2), ctx); in build_insn()
845 emit(A64_STRH(tmp, dst, tmp2), ctx); in build_insn()
848 emit(A64_STRB(tmp, dst, tmp2), ctx); in build_insn()
851 emit(A64_STR64(tmp, dst, tmp2), ctx); in build_insn()
864 emit(A64_STR32(src, dst, tmp), ctx); in build_insn()
867 emit(A64_STRH(src, dst, tmp), ctx); in build_insn()
870 emit(A64_STRB(src, dst, tmp), ctx); in build_insn()
873 emit(A64_STR64(src, dst, tmp), ctx); in build_insn()
886 emit(A64_ADD(1, tmp, tmp, dst), ctx); in build_insn()
890 emit(A64_STADD(isdw, reg, src), ctx); in build_insn()
892 emit(A64_LDXR(isdw, tmp2, reg), ctx); in build_insn()
893 emit(A64_ADD(isdw, tmp2, tmp2, src), ctx); in build_insn()
894 emit(A64_STXR(isdw, tmp2, reg, tmp3), ctx); in build_insn()
897 emit(A64_CBNZ(0, tmp3, jmp_offset), ctx); in build_insn()