Lines Matching refs:nfp_prog
20 nfp_bpf_goto_meta(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, in nfp_bpf_goto_meta() argument
28 if (min(forward, backward) > nfp_prog->n_insns - insn_idx - 1) { in nfp_bpf_goto_meta()
29 backward = nfp_prog->n_insns - insn_idx - 1; in nfp_bpf_goto_meta()
30 meta = nfp_prog_last_meta(nfp_prog); in nfp_bpf_goto_meta()
34 meta = nfp_prog_first_meta(nfp_prog); in nfp_bpf_goto_meta()
48 nfp_record_adjust_head(struct nfp_app_bpf *bpf, struct nfp_prog *nfp_prog, in nfp_record_adjust_head() argument
69 if (nfp_prog->adjust_head_location) { in nfp_record_adjust_head()
71 if (nfp_prog->adjust_head_location != meta->n) in nfp_record_adjust_head()
80 nfp_prog->adjust_head_location = location; in nfp_record_adjust_head()
170 nfp_bpf_check_helper_call(struct nfp_prog *nfp_prog, in nfp_bpf_check_helper_call() argument
177 struct nfp_app_bpf *bpf = nfp_prog->bpf; in nfp_bpf_check_helper_call()
191 nfp_record_adjust_head(bpf, nfp_prog, meta, reg2); in nfp_bpf_check_helper_call()
284 dev_warn_once(&nfp_prog->bpf->app->pf->pdev->dev, in nfp_bpf_check_helper_call()
311 nfp_bpf_check_exit(struct nfp_prog *nfp_prog, in nfp_bpf_check_exit() argument
317 if (nfp_prog->type == BPF_PROG_TYPE_XDP) in nfp_bpf_check_exit()
330 if (nfp_prog->type == BPF_PROG_TYPE_SCHED_CLS && in nfp_bpf_check_exit()
343 nfp_bpf_check_stack_access(struct nfp_prog *nfp_prog, in nfp_bpf_check_stack_access() argument
451 nfp_bpf_check_ptr(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, in nfp_bpf_check_ptr() argument
466 err = nfp_bpf_check_stack_access(nfp_prog, meta, reg, env); in nfp_bpf_check_ptr()
502 nfp_bpf_check_store(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, in nfp_bpf_check_store() argument
508 if (nfp_prog->type == BPF_PROG_TYPE_XDP) { in nfp_bpf_check_store()
512 if (nfp_prog->bpf->queue_select) in nfp_bpf_check_store()
522 return nfp_bpf_check_ptr(nfp_prog, meta, env, meta->insn.dst_reg); in nfp_bpf_check_store()
526 nfp_bpf_check_xadd(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, in nfp_bpf_check_xadd() argument
547 return nfp_bpf_check_ptr(nfp_prog, meta, env, meta->insn.dst_reg); in nfp_bpf_check_xadd()
551 nfp_bpf_check_alu(struct nfp_prog *nfp_prog, struct nfp_insn_meta *meta, in nfp_bpf_check_alu() argument
629 struct nfp_prog *nfp_prog = env->prog->aux->offload->dev_priv; in nfp_verify_insn() local
630 struct nfp_insn_meta *meta = nfp_prog->verifier_meta; in nfp_verify_insn()
632 meta = nfp_bpf_goto_meta(nfp_prog, meta, insn_idx); in nfp_verify_insn()
633 nfp_prog->verifier_meta = meta; in nfp_verify_insn()
648 return nfp_bpf_check_helper_call(nfp_prog, env, meta); in nfp_verify_insn()
650 return nfp_bpf_check_exit(nfp_prog, env); in nfp_verify_insn()
653 return nfp_bpf_check_ptr(nfp_prog, meta, env, in nfp_verify_insn()
656 return nfp_bpf_check_store(nfp_prog, meta, env); in nfp_verify_insn()
659 return nfp_bpf_check_xadd(nfp_prog, meta, env); in nfp_verify_insn()
662 return nfp_bpf_check_alu(nfp_prog, meta, env); in nfp_verify_insn()
669 struct nfp_prog *nfp_prog) in nfp_assign_subprog_idx_and_regs() argument
674 list_for_each_entry(meta, &nfp_prog->insns, l) { in nfp_assign_subprog_idx_and_regs()
681 nfp_prog->subprog[index].needs_reg_push = 1; in nfp_assign_subprog_idx_and_regs()
684 if (index + 1 != nfp_prog->subprog_cnt) { in nfp_assign_subprog_idx_and_regs()
686 index + 1, nfp_prog->subprog_cnt); in nfp_assign_subprog_idx_and_regs()
693 static unsigned int nfp_bpf_get_stack_usage(struct nfp_prog *nfp_prog) in nfp_bpf_get_stack_usage() argument
695 struct nfp_insn_meta *meta = nfp_prog_first_meta(nfp_prog); in nfp_bpf_get_stack_usage()
709 frame_depths[frame] = nfp_prog->subprog[idx].stack_depth; in nfp_bpf_get_stack_usage()
715 for (; meta != nfp_prog_last_meta(nfp_prog) && meta->subprog_idx == idx; in nfp_bpf_get_stack_usage()
727 meta = nfp_bpf_goto_meta(nfp_prog, meta, in nfp_bpf_get_stack_usage()
747 static void nfp_bpf_insn_flag_zext(struct nfp_prog *nfp_prog, in nfp_bpf_insn_flag_zext() argument
752 list_for_each_entry(meta, &nfp_prog->insns, l) { in nfp_bpf_insn_flag_zext()
761 struct nfp_prog *nfp_prog; in nfp_bpf_finalize() local
766 nfp_prog = env->prog->aux->offload->dev_priv; in nfp_bpf_finalize()
767 nfp_prog->subprog_cnt = env->subprog_cnt; in nfp_bpf_finalize()
768 nfp_prog->subprog = kcalloc(nfp_prog->subprog_cnt, in nfp_bpf_finalize()
769 sizeof(nfp_prog->subprog[0]), GFP_KERNEL); in nfp_bpf_finalize()
770 if (!nfp_prog->subprog) in nfp_bpf_finalize()
773 nfp_assign_subprog_idx_and_regs(env, nfp_prog); in nfp_bpf_finalize()
776 for (i = 0; i < nfp_prog->subprog_cnt; i++) { in nfp_bpf_finalize()
777 nfp_prog->subprog[i].stack_depth = info[i].stack_depth; in nfp_bpf_finalize()
783 nfp_prog->subprog[i].stack_depth += REG_WIDTH; in nfp_bpf_finalize()
785 if (nfp_prog->subprog[i].needs_reg_push) in nfp_bpf_finalize()
786 nfp_prog->subprog[i].stack_depth += BPF_REG_SIZE * 4; in nfp_bpf_finalize()
791 nfp_prog->stack_size = nfp_bpf_get_stack_usage(nfp_prog); in nfp_bpf_finalize()
792 if (nfp_prog->stack_size > max_stack) { in nfp_bpf_finalize()
794 nfp_prog->stack_size, max_stack); in nfp_bpf_finalize()
798 nfp_bpf_insn_flag_zext(nfp_prog, env->insn_aux_data); in nfp_bpf_finalize()
805 struct nfp_prog *nfp_prog = env->prog->aux->offload->dev_priv; in nfp_bpf_opt_replace_insn() local
807 struct nfp_insn_meta *meta = nfp_prog->verifier_meta; in nfp_bpf_opt_replace_insn()
809 meta = nfp_bpf_goto_meta(nfp_prog, meta, aux_data[off].orig_idx); in nfp_bpf_opt_replace_insn()
810 nfp_prog->verifier_meta = meta; in nfp_bpf_opt_replace_insn()
838 struct nfp_prog *nfp_prog = env->prog->aux->offload->dev_priv; in nfp_bpf_opt_remove_insns() local
840 struct nfp_insn_meta *meta = nfp_prog->verifier_meta; in nfp_bpf_opt_remove_insns()
843 meta = nfp_bpf_goto_meta(nfp_prog, meta, aux_data[off].orig_idx); in nfp_bpf_opt_remove_insns()
846 if (WARN_ON_ONCE(&meta->l == &nfp_prog->insns)) in nfp_bpf_opt_remove_insns()