Lines Matching refs:src_reg

1488 		if (insn[i].src_reg != BPF_PSEUDO_CALL)  in check_subprogs()
1517 insn[i].src_reg != BPF_PSEUDO_CALL) in check_subprogs()
1626 if (insn->src_reg == BPF_PSEUDO_CALL) in is_reg64()
1813 u32 sreg = 1u << insn->src_reg; in backtrack_insn()
1866 if (insn->src_reg != BPF_REG_FP) in backtrack_insn()
1907 if (insn->src_reg == BPF_PSEUDO_CALL) in backtrack_insn()
3070 if (insn[i].src_reg != BPF_PSEUDO_CALL) in check_max_stack_depth()
3608 err = check_reg_arg(env, insn->src_reg, SRC_OP); in check_xadd()
3617 if (is_pointer_value(env, insn->src_reg)) { in check_xadd()
3618 verbose(env, "R%d leaks addr into mem\n", insn->src_reg); in check_xadd()
5463 u32 dst = insn->dst_reg, src = insn->src_reg; in adjust_ptr_min_max_vals()
5700 struct bpf_reg_state *src_reg) in scalar32_min_max_add() argument
5702 s32 smin_val = src_reg->s32_min_value; in scalar32_min_max_add()
5703 s32 smax_val = src_reg->s32_max_value; in scalar32_min_max_add()
5704 u32 umin_val = src_reg->u32_min_value; in scalar32_min_max_add()
5705 u32 umax_val = src_reg->u32_max_value; in scalar32_min_max_add()
5726 struct bpf_reg_state *src_reg) in scalar_min_max_add() argument
5728 s64 smin_val = src_reg->smin_value; in scalar_min_max_add()
5729 s64 smax_val = src_reg->smax_value; in scalar_min_max_add()
5730 u64 umin_val = src_reg->umin_value; in scalar_min_max_add()
5731 u64 umax_val = src_reg->umax_value; in scalar_min_max_add()
5752 struct bpf_reg_state *src_reg) in scalar32_min_max_sub() argument
5754 s32 smin_val = src_reg->s32_min_value; in scalar32_min_max_sub()
5755 s32 smax_val = src_reg->s32_max_value; in scalar32_min_max_sub()
5756 u32 umin_val = src_reg->u32_min_value; in scalar32_min_max_sub()
5757 u32 umax_val = src_reg->u32_max_value; in scalar32_min_max_sub()
5780 struct bpf_reg_state *src_reg) in scalar_min_max_sub() argument
5782 s64 smin_val = src_reg->smin_value; in scalar_min_max_sub()
5783 s64 smax_val = src_reg->smax_value; in scalar_min_max_sub()
5784 u64 umin_val = src_reg->umin_value; in scalar_min_max_sub()
5785 u64 umax_val = src_reg->umax_value; in scalar_min_max_sub()
5808 struct bpf_reg_state *src_reg) in scalar32_min_max_mul() argument
5810 s32 smin_val = src_reg->s32_min_value; in scalar32_min_max_mul()
5811 u32 umin_val = src_reg->u32_min_value; in scalar32_min_max_mul()
5812 u32 umax_val = src_reg->u32_max_value; in scalar32_min_max_mul()
5840 struct bpf_reg_state *src_reg) in scalar_min_max_mul() argument
5842 s64 smin_val = src_reg->smin_value; in scalar_min_max_mul()
5843 u64 umin_val = src_reg->umin_value; in scalar_min_max_mul()
5844 u64 umax_val = src_reg->umax_value; in scalar_min_max_mul()
5872 struct bpf_reg_state *src_reg) in scalar32_min_max_and() argument
5874 bool src_known = tnum_subreg_is_const(src_reg->var_off); in scalar32_min_max_and()
5877 s32 smin_val = src_reg->s32_min_value; in scalar32_min_max_and()
5878 u32 umax_val = src_reg->u32_max_value; in scalar32_min_max_and()
5908 struct bpf_reg_state *src_reg) in scalar_min_max_and() argument
5910 bool src_known = tnum_is_const(src_reg->var_off); in scalar_min_max_and()
5912 s64 smin_val = src_reg->smin_value; in scalar_min_max_and()
5913 u64 umax_val = src_reg->umax_value; in scalar_min_max_and()
5943 struct bpf_reg_state *src_reg) in scalar32_min_max_or() argument
5945 bool src_known = tnum_subreg_is_const(src_reg->var_off); in scalar32_min_max_or()
5948 s32 smin_val = src_reg->s32_min_value; in scalar32_min_max_or()
5949 u32 umin_val = src_reg->u32_min_value; in scalar32_min_max_or()
5978 struct bpf_reg_state *src_reg) in scalar_min_max_or() argument
5980 bool src_known = tnum_is_const(src_reg->var_off); in scalar_min_max_or()
5982 s64 smin_val = src_reg->smin_value; in scalar_min_max_or()
5983 u64 umin_val = src_reg->umin_value; in scalar_min_max_or()
6013 struct bpf_reg_state *src_reg) in scalar32_min_max_xor() argument
6015 bool src_known = tnum_subreg_is_const(src_reg->var_off); in scalar32_min_max_xor()
6018 s32 smin_val = src_reg->s32_min_value; in scalar32_min_max_xor()
6043 struct bpf_reg_state *src_reg) in scalar_min_max_xor() argument
6045 bool src_known = tnum_is_const(src_reg->var_off); in scalar_min_max_xor()
6047 s64 smin_val = src_reg->smin_value; in scalar_min_max_xor()
6092 struct bpf_reg_state *src_reg) in scalar32_min_max_lsh() argument
6094 u32 umax_val = src_reg->u32_max_value; in scalar32_min_max_lsh()
6095 u32 umin_val = src_reg->u32_min_value; in scalar32_min_max_lsh()
6140 struct bpf_reg_state *src_reg) in scalar_min_max_lsh() argument
6142 u64 umax_val = src_reg->umax_value; in scalar_min_max_lsh()
6143 u64 umin_val = src_reg->umin_value; in scalar_min_max_lsh()
6155 struct bpf_reg_state *src_reg) in scalar32_min_max_rsh() argument
6158 u32 umax_val = src_reg->u32_max_value; in scalar32_min_max_rsh()
6159 u32 umin_val = src_reg->u32_min_value; in scalar32_min_max_rsh()
6187 struct bpf_reg_state *src_reg) in scalar_min_max_rsh() argument
6189 u64 umax_val = src_reg->umax_value; in scalar_min_max_rsh()
6190 u64 umin_val = src_reg->umin_value; in scalar_min_max_rsh()
6221 struct bpf_reg_state *src_reg) in scalar32_min_max_arsh() argument
6223 u64 umin_val = src_reg->u32_min_value; in scalar32_min_max_arsh()
6244 struct bpf_reg_state *src_reg) in scalar_min_max_arsh() argument
6246 u64 umin_val = src_reg->umin_value; in scalar_min_max_arsh()
6277 struct bpf_reg_state src_reg) in adjust_scalar_min_max_vals() argument
6291 smin_val = src_reg.smin_value; in adjust_scalar_min_max_vals()
6292 smax_val = src_reg.smax_value; in adjust_scalar_min_max_vals()
6293 umin_val = src_reg.umin_value; in adjust_scalar_min_max_vals()
6294 umax_val = src_reg.umax_value; in adjust_scalar_min_max_vals()
6296 s32_min_val = src_reg.s32_min_value; in adjust_scalar_min_max_vals()
6297 s32_max_val = src_reg.s32_max_value; in adjust_scalar_min_max_vals()
6298 u32_min_val = src_reg.u32_min_value; in adjust_scalar_min_max_vals()
6299 u32_max_val = src_reg.u32_max_value; in adjust_scalar_min_max_vals()
6302 src_known = tnum_subreg_is_const(src_reg.var_off); in adjust_scalar_min_max_vals()
6313 src_known = tnum_is_const(src_reg.var_off); in adjust_scalar_min_max_vals()
6352 scalar32_min_max_add(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
6353 scalar_min_max_add(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
6354 dst_reg->var_off = tnum_add(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
6362 scalar32_min_max_sub(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
6363 scalar_min_max_sub(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
6364 dst_reg->var_off = tnum_sub(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
6367 dst_reg->var_off = tnum_mul(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
6368 scalar32_min_max_mul(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
6369 scalar_min_max_mul(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
6372 dst_reg->var_off = tnum_and(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
6373 scalar32_min_max_and(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
6374 scalar_min_max_and(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
6377 dst_reg->var_off = tnum_or(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
6378 scalar32_min_max_or(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
6379 scalar_min_max_or(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
6382 dst_reg->var_off = tnum_xor(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
6383 scalar32_min_max_xor(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
6384 scalar_min_max_xor(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
6395 scalar32_min_max_lsh(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
6397 scalar_min_max_lsh(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
6408 scalar32_min_max_rsh(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
6410 scalar_min_max_rsh(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
6421 scalar32_min_max_arsh(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
6423 scalar_min_max_arsh(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
6448 struct bpf_reg_state *regs = state->regs, *dst_reg, *src_reg; in adjust_reg_min_max_vals() local
6454 src_reg = NULL; in adjust_reg_min_max_vals()
6463 src_reg = &regs[insn->src_reg]; in adjust_reg_min_max_vals()
6464 if (src_reg->type != SCALAR_VALUE) { in adjust_reg_min_max_vals()
6487 src_reg, dst_reg); in adjust_reg_min_max_vals()
6491 err = mark_chain_precision(env, insn->src_reg); in adjust_reg_min_max_vals()
6495 dst_reg, src_reg); in adjust_reg_min_max_vals()
6503 src_reg = &off_reg; in adjust_reg_min_max_vals()
6506 ptr_reg, src_reg); in adjust_reg_min_max_vals()
6515 if (WARN_ON(!src_reg)) { in adjust_reg_min_max_vals()
6520 return adjust_scalar_min_max_vals(env, insn, dst_reg, *src_reg); in adjust_reg_min_max_vals()
6533 insn->src_reg != BPF_REG_0 || in check_alu_op()
6539 if (insn->src_reg != BPF_REG_0 || insn->off != 0 || in check_alu_op()
6572 err = check_reg_arg(env, insn->src_reg, SRC_OP); in check_alu_op()
6576 if (insn->src_reg != BPF_REG_0 || insn->off != 0) { in check_alu_op()
6588 struct bpf_reg_state *src_reg = regs + insn->src_reg; in check_alu_op() local
6595 if (src_reg->type == SCALAR_VALUE && !src_reg->id) in check_alu_op()
6600 src_reg->id = ++env->id_gen; in check_alu_op()
6601 *dst_reg = *src_reg; in check_alu_op()
6606 if (is_pointer_value(env, insn->src_reg)) { in check_alu_op()
6609 insn->src_reg); in check_alu_op()
6611 } else if (src_reg->type == SCALAR_VALUE) { in check_alu_op()
6612 *dst_reg = *src_reg; in check_alu_op()
6654 err = check_reg_arg(env, insn->src_reg, SRC_OP); in check_alu_op()
6658 if (insn->src_reg != BPF_REG_0 || insn->off != 0) { in check_alu_op()
7163 static void __reg_combine_min_max(struct bpf_reg_state *src_reg, in __reg_combine_min_max() argument
7166 src_reg->umin_value = dst_reg->umin_value = max(src_reg->umin_value, in __reg_combine_min_max()
7168 src_reg->umax_value = dst_reg->umax_value = min(src_reg->umax_value, in __reg_combine_min_max()
7170 src_reg->smin_value = dst_reg->smin_value = max(src_reg->smin_value, in __reg_combine_min_max()
7172 src_reg->smax_value = dst_reg->smax_value = min(src_reg->smax_value, in __reg_combine_min_max()
7174 src_reg->var_off = dst_reg->var_off = tnum_intersect(src_reg->var_off, in __reg_combine_min_max()
7177 __update_reg_bounds(src_reg); in __reg_combine_min_max()
7180 __reg_deduce_bounds(src_reg); in __reg_combine_min_max()
7183 __reg_bound_offset(src_reg); in __reg_combine_min_max()
7189 __update_reg_bounds(src_reg); in __reg_combine_min_max()
7316 struct bpf_reg_state *src_reg, in try_match_pkt_pointers() argument
7330 src_reg->type == PTR_TO_PACKET_END) || in try_match_pkt_pointers()
7332 reg_is_init_pkt_pointer(src_reg, PTR_TO_PACKET))) { in try_match_pkt_pointers()
7337 src_reg->type == PTR_TO_PACKET) || in try_match_pkt_pointers()
7339 src_reg->type == PTR_TO_PACKET_META)) { in try_match_pkt_pointers()
7341 find_good_pkt_pointers(other_branch, src_reg, in try_match_pkt_pointers()
7342 src_reg->type, true); in try_match_pkt_pointers()
7349 src_reg->type == PTR_TO_PACKET_END) || in try_match_pkt_pointers()
7351 reg_is_init_pkt_pointer(src_reg, PTR_TO_PACKET))) { in try_match_pkt_pointers()
7356 src_reg->type == PTR_TO_PACKET) || in try_match_pkt_pointers()
7358 src_reg->type == PTR_TO_PACKET_META)) { in try_match_pkt_pointers()
7360 find_good_pkt_pointers(this_branch, src_reg, in try_match_pkt_pointers()
7361 src_reg->type, false); in try_match_pkt_pointers()
7368 src_reg->type == PTR_TO_PACKET_END) || in try_match_pkt_pointers()
7370 reg_is_init_pkt_pointer(src_reg, PTR_TO_PACKET))) { in try_match_pkt_pointers()
7375 src_reg->type == PTR_TO_PACKET) || in try_match_pkt_pointers()
7377 src_reg->type == PTR_TO_PACKET_META)) { in try_match_pkt_pointers()
7379 find_good_pkt_pointers(other_branch, src_reg, in try_match_pkt_pointers()
7380 src_reg->type, false); in try_match_pkt_pointers()
7387 src_reg->type == PTR_TO_PACKET_END) || in try_match_pkt_pointers()
7389 reg_is_init_pkt_pointer(src_reg, PTR_TO_PACKET))) { in try_match_pkt_pointers()
7394 src_reg->type == PTR_TO_PACKET) || in try_match_pkt_pointers()
7396 src_reg->type == PTR_TO_PACKET_META)) { in try_match_pkt_pointers()
7398 find_good_pkt_pointers(this_branch, src_reg, in try_match_pkt_pointers()
7399 src_reg->type, true); in try_match_pkt_pointers()
7441 struct bpf_reg_state *dst_reg, *other_branch_regs, *src_reg = NULL; in check_cond_jmp_op() local
7460 err = check_reg_arg(env, insn->src_reg, SRC_OP); in check_cond_jmp_op()
7464 if (is_pointer_value(env, insn->src_reg)) { in check_cond_jmp_op()
7466 insn->src_reg); in check_cond_jmp_op()
7469 src_reg = &regs[insn->src_reg]; in check_cond_jmp_op()
7471 if (insn->src_reg != BPF_REG_0) { in check_cond_jmp_op()
7487 } else if (src_reg->type == SCALAR_VALUE && in check_cond_jmp_op()
7488 is_jmp32 && tnum_is_const(tnum_subreg(src_reg->var_off))) { in check_cond_jmp_op()
7490 tnum_subreg(src_reg->var_off).value, in check_cond_jmp_op()
7493 } else if (src_reg->type == SCALAR_VALUE && in check_cond_jmp_op()
7494 !is_jmp32 && tnum_is_const(src_reg->var_off)) { in check_cond_jmp_op()
7496 src_reg->var_off.value, in check_cond_jmp_op()
7508 err = mark_chain_precision(env, insn->src_reg); in check_cond_jmp_op()
7537 struct bpf_reg_state *src_reg = &regs[insn->src_reg]; in check_cond_jmp_op() local
7540 src_reg->type == SCALAR_VALUE) { in check_cond_jmp_op()
7541 if (tnum_is_const(src_reg->var_off) || in check_cond_jmp_op()
7543 tnum_is_const(tnum_subreg(src_reg->var_off)))) in check_cond_jmp_op()
7546 src_reg->var_off.value, in check_cond_jmp_op()
7547 tnum_subreg(src_reg->var_off).value, in check_cond_jmp_op()
7552 reg_set_min_max_inv(&other_branch_regs[insn->src_reg], in check_cond_jmp_op()
7553 src_reg, in check_cond_jmp_op()
7560 reg_combine_min_max(&other_branch_regs[insn->src_reg], in check_cond_jmp_op()
7562 src_reg, dst_reg, opcode); in check_cond_jmp_op()
7563 if (src_reg->id && in check_cond_jmp_op()
7564 !WARN_ON_ONCE(src_reg->id != other_branch_regs[insn->src_reg].id)) { in check_cond_jmp_op()
7565 find_equal_scalars(this_branch, src_reg); in check_cond_jmp_op()
7566 find_equal_scalars(other_branch, &other_branch_regs[insn->src_reg]); in check_cond_jmp_op()
7596 } else if (!try_match_pkt_pointers(insn, dst_reg, &regs[insn->src_reg], in check_cond_jmp_op()
7631 if (insn->src_reg == 0) { in check_ld_imm()
7639 if (insn->src_reg == BPF_PSEUDO_BTF_ID) { in check_ld_imm()
7662 if (insn->src_reg == BPF_PSEUDO_MAP_VALUE) { in check_ld_imm()
7667 } else if (insn->src_reg == BPF_PSEUDO_MAP_FD) { in check_ld_imm()
7723 (mode == BPF_ABS && insn->src_reg != BPF_REG_0)) { in check_ld_abs()
7756 err = check_reg_arg(env, insn->src_reg, SRC_OP); in check_ld_abs()
8059 if (insns[t].src_reg == BPF_PSEUDO_CALL) { in check_cfg()
9359 err = check_reg_arg(env, insn->src_reg, SRC_OP); in do_check()
9367 src_reg_type = regs[insn->src_reg].type; in do_check()
9372 err = check_mem_access(env, env->insn_idx, insn->src_reg, in do_check()
9411 err = check_reg_arg(env, insn->src_reg, SRC_OP); in do_check()
9424 BPF_WRITE, insn->src_reg, false); in do_check()
9439 insn->src_reg != BPF_REG_0) { in do_check()
9469 (insn->src_reg != BPF_REG_0 && in do_check()
9470 insn->src_reg != BPF_PSEUDO_CALL) || in do_check()
9478 (insn->src_reg == BPF_PSEUDO_CALL || in do_check()
9483 if (insn->src_reg == BPF_PSEUDO_CALL) in do_check()
9493 insn->src_reg != BPF_REG_0 || in do_check()
9506 insn->src_reg != BPF_REG_0 || in do_check()
9816 insn[1].dst_reg != 0 || insn[1].src_reg != 0 || in resolve_pseudo_ldimm64()
9822 if (insn[0].src_reg == 0) in resolve_pseudo_ldimm64()
9826 if (insn[0].src_reg == BPF_PSEUDO_BTF_ID) { in resolve_pseudo_ldimm64()
9837 if ((insn[0].src_reg != BPF_PSEUDO_MAP_FD && in resolve_pseudo_ldimm64()
9838 insn[0].src_reg != BPF_PSEUDO_MAP_VALUE) || in resolve_pseudo_ldimm64()
9839 (insn[0].src_reg == BPF_PSEUDO_MAP_FD && in resolve_pseudo_ldimm64()
9861 if (insn->src_reg == BPF_PSEUDO_MAP_FD) { in resolve_pseudo_ldimm64()
9961 insn->src_reg = 0; in convert_pseudo_ld_imm64()
10367 zext_patch[1].src_reg = insn.dst_reg; in opt_subreg_zext_lo32_rnd_hi32()
10584 insn->src_reg != BPF_PSEUDO_CALL) in jit_subprogs()
10714 insn->src_reg != BPF_PSEUDO_CALL) in jit_subprogs()
10760 insn->src_reg != BPF_PSEUDO_CALL) in jit_subprogs()
10790 insn->src_reg != BPF_PSEUDO_CALL) in jit_subprogs()
10826 insn->src_reg != BPF_PSEUDO_CALL) in fixup_call_args()
10864 BPF_MOV32_REG(insn->src_reg, insn->src_reg), in fixup_bpf_calls()
10866 BPF_JMP_IMM(BPF_JNE, insn->src_reg, 0, 2), in fixup_bpf_calls()
10872 BPF_MOV32_REG(insn->src_reg, insn->src_reg), in fixup_bpf_calls()
10874 BPF_JMP_IMM(BPF_JEQ, insn->src_reg, 0, 1), in fixup_bpf_calls()
10935 off_reg = issrc ? insn->src_reg : insn->dst_reg; in fixup_bpf_calls()
10946 insn->src_reg = BPF_REG_AX; in fixup_bpf_calls()
10971 if (insn->src_reg == BPF_PSEUDO_CALL) in fixup_bpf_calls()