Lines Matching refs:dst_reg
1342 return !is_reg64(env, insn, insn->dst_reg, NULL, DST_OP); in insn_has_def32()
1453 u32 dreg = 1u << insn->dst_reg; in backtrack_insn()
1531 if (insn->dst_reg != BPF_REG_FP) in backtrack_insn()
1890 u32 dst_reg = env->prog->insnsi[insn_idx].dst_reg; in check_stack_write() local
1913 if (dst_reg != BPF_REG_FP) { in check_stack_write()
2903 err = check_reg_arg(env, insn->dst_reg, SRC_OP); in check_xadd()
2912 if (is_ctx_reg(env, insn->dst_reg) || in check_xadd()
2913 is_pkt_reg(env, insn->dst_reg) || in check_xadd()
2914 is_flow_key_reg(env, insn->dst_reg) || in check_xadd()
2915 is_sk_reg(env, insn->dst_reg)) { in check_xadd()
2917 insn->dst_reg, in check_xadd()
2918 reg_type_str[reg_state(env, insn->dst_reg)->type]); in check_xadd()
2923 err = check_mem_access(env, insn_idx, insn->dst_reg, insn->off, in check_xadd()
2929 return check_mem_access(env, insn_idx, insn->dst_reg, insn->off, in check_xadd()
4271 struct bpf_reg_state *dst_reg, in sanitize_ptr_alu() argument
4276 bool ptr_is_dst_reg = ptr_reg == dst_reg; in sanitize_ptr_alu()
4311 tmp = *dst_reg; in sanitize_ptr_alu()
4312 *dst_reg = *ptr_reg; in sanitize_ptr_alu()
4316 *dst_reg = tmp; in sanitize_ptr_alu()
4332 struct bpf_reg_state *regs = state->regs, *dst_reg; in adjust_ptr_min_max_vals() local
4338 u32 dst = insn->dst_reg, src = insn->src_reg; in adjust_ptr_min_max_vals()
4342 dst_reg = ®s[dst]; in adjust_ptr_min_max_vals()
4349 __mark_reg_unknown(dst_reg); in adjust_ptr_min_max_vals()
4381 off_reg == dst_reg ? dst : src); in adjust_ptr_min_max_vals()
4392 dst_reg->type = ptr_reg->type; in adjust_ptr_min_max_vals()
4393 dst_reg->id = ptr_reg->id; in adjust_ptr_min_max_vals()
4401 ret = sanitize_ptr_alu(env, insn, ptr_reg, dst_reg, smin_val < 0); in adjust_ptr_min_max_vals()
4412 dst_reg->smin_value = smin_ptr; in adjust_ptr_min_max_vals()
4413 dst_reg->smax_value = smax_ptr; in adjust_ptr_min_max_vals()
4414 dst_reg->umin_value = umin_ptr; in adjust_ptr_min_max_vals()
4415 dst_reg->umax_value = umax_ptr; in adjust_ptr_min_max_vals()
4416 dst_reg->var_off = ptr_reg->var_off; in adjust_ptr_min_max_vals()
4417 dst_reg->off = ptr_reg->off + smin_val; in adjust_ptr_min_max_vals()
4418 dst_reg->raw = ptr_reg->raw; in adjust_ptr_min_max_vals()
4432 dst_reg->smin_value = S64_MIN; in adjust_ptr_min_max_vals()
4433 dst_reg->smax_value = S64_MAX; in adjust_ptr_min_max_vals()
4435 dst_reg->smin_value = smin_ptr + smin_val; in adjust_ptr_min_max_vals()
4436 dst_reg->smax_value = smax_ptr + smax_val; in adjust_ptr_min_max_vals()
4440 dst_reg->umin_value = 0; in adjust_ptr_min_max_vals()
4441 dst_reg->umax_value = U64_MAX; in adjust_ptr_min_max_vals()
4443 dst_reg->umin_value = umin_ptr + umin_val; in adjust_ptr_min_max_vals()
4444 dst_reg->umax_value = umax_ptr + umax_val; in adjust_ptr_min_max_vals()
4446 dst_reg->var_off = tnum_add(ptr_reg->var_off, off_reg->var_off); in adjust_ptr_min_max_vals()
4447 dst_reg->off = ptr_reg->off; in adjust_ptr_min_max_vals()
4448 dst_reg->raw = ptr_reg->raw; in adjust_ptr_min_max_vals()
4450 dst_reg->id = ++env->id_gen; in adjust_ptr_min_max_vals()
4452 dst_reg->raw = 0; in adjust_ptr_min_max_vals()
4456 ret = sanitize_ptr_alu(env, insn, ptr_reg, dst_reg, smin_val < 0); in adjust_ptr_min_max_vals()
4461 if (dst_reg == off_reg) { in adjust_ptr_min_max_vals()
4479 dst_reg->smin_value = smin_ptr; in adjust_ptr_min_max_vals()
4480 dst_reg->smax_value = smax_ptr; in adjust_ptr_min_max_vals()
4481 dst_reg->umin_value = umin_ptr; in adjust_ptr_min_max_vals()
4482 dst_reg->umax_value = umax_ptr; in adjust_ptr_min_max_vals()
4483 dst_reg->var_off = ptr_reg->var_off; in adjust_ptr_min_max_vals()
4484 dst_reg->id = ptr_reg->id; in adjust_ptr_min_max_vals()
4485 dst_reg->off = ptr_reg->off - smin_val; in adjust_ptr_min_max_vals()
4486 dst_reg->raw = ptr_reg->raw; in adjust_ptr_min_max_vals()
4495 dst_reg->smin_value = S64_MIN; in adjust_ptr_min_max_vals()
4496 dst_reg->smax_value = S64_MAX; in adjust_ptr_min_max_vals()
4498 dst_reg->smin_value = smin_ptr - smax_val; in adjust_ptr_min_max_vals()
4499 dst_reg->smax_value = smax_ptr - smin_val; in adjust_ptr_min_max_vals()
4503 dst_reg->umin_value = 0; in adjust_ptr_min_max_vals()
4504 dst_reg->umax_value = U64_MAX; in adjust_ptr_min_max_vals()
4507 dst_reg->umin_value = umin_ptr - umax_val; in adjust_ptr_min_max_vals()
4508 dst_reg->umax_value = umax_ptr - umin_val; in adjust_ptr_min_max_vals()
4510 dst_reg->var_off = tnum_sub(ptr_reg->var_off, off_reg->var_off); in adjust_ptr_min_max_vals()
4511 dst_reg->off = ptr_reg->off; in adjust_ptr_min_max_vals()
4512 dst_reg->raw = ptr_reg->raw; in adjust_ptr_min_max_vals()
4514 dst_reg->id = ++env->id_gen; in adjust_ptr_min_max_vals()
4517 dst_reg->raw = 0; in adjust_ptr_min_max_vals()
4534 if (!check_reg_sane_offset(env, dst_reg, ptr_reg->type)) in adjust_ptr_min_max_vals()
4537 __update_reg_bounds(dst_reg); in adjust_ptr_min_max_vals()
4538 __reg_deduce_bounds(dst_reg); in adjust_ptr_min_max_vals()
4539 __reg_bound_offset(dst_reg); in adjust_ptr_min_max_vals()
4545 if (dst_reg->type == PTR_TO_MAP_VALUE && in adjust_ptr_min_max_vals()
4546 check_map_access(env, dst, dst_reg->off, 1, false)) { in adjust_ptr_min_max_vals()
4550 } else if (dst_reg->type == PTR_TO_STACK && in adjust_ptr_min_max_vals()
4551 check_stack_access(env, dst_reg, dst_reg->off + in adjust_ptr_min_max_vals()
4552 dst_reg->var_off.value, 1)) { in adjust_ptr_min_max_vals()
4568 struct bpf_reg_state *dst_reg, in adjust_scalar_min_max_vals() argument
4577 u32 dst = insn->dst_reg; in adjust_scalar_min_max_vals()
4585 coerce_reg_to_size(dst_reg, 4); in adjust_scalar_min_max_vals()
4594 dst_known = tnum_is_const(dst_reg->var_off); in adjust_scalar_min_max_vals()
4601 __mark_reg_unknown(dst_reg); in adjust_scalar_min_max_vals()
4607 __mark_reg_unknown(dst_reg); in adjust_scalar_min_max_vals()
4618 if (signed_add_overflows(dst_reg->smin_value, smin_val) || in adjust_scalar_min_max_vals()
4619 signed_add_overflows(dst_reg->smax_value, smax_val)) { in adjust_scalar_min_max_vals()
4620 dst_reg->smin_value = S64_MIN; in adjust_scalar_min_max_vals()
4621 dst_reg->smax_value = S64_MAX; in adjust_scalar_min_max_vals()
4623 dst_reg->smin_value += smin_val; in adjust_scalar_min_max_vals()
4624 dst_reg->smax_value += smax_val; in adjust_scalar_min_max_vals()
4626 if (dst_reg->umin_value + umin_val < umin_val || in adjust_scalar_min_max_vals()
4627 dst_reg->umax_value + umax_val < umax_val) { in adjust_scalar_min_max_vals()
4628 dst_reg->umin_value = 0; in adjust_scalar_min_max_vals()
4629 dst_reg->umax_value = U64_MAX; in adjust_scalar_min_max_vals()
4631 dst_reg->umin_value += umin_val; in adjust_scalar_min_max_vals()
4632 dst_reg->umax_value += umax_val; in adjust_scalar_min_max_vals()
4634 dst_reg->var_off = tnum_add(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
4642 if (signed_sub_overflows(dst_reg->smin_value, smax_val) || in adjust_scalar_min_max_vals()
4643 signed_sub_overflows(dst_reg->smax_value, smin_val)) { in adjust_scalar_min_max_vals()
4645 dst_reg->smin_value = S64_MIN; in adjust_scalar_min_max_vals()
4646 dst_reg->smax_value = S64_MAX; in adjust_scalar_min_max_vals()
4648 dst_reg->smin_value -= smax_val; in adjust_scalar_min_max_vals()
4649 dst_reg->smax_value -= smin_val; in adjust_scalar_min_max_vals()
4651 if (dst_reg->umin_value < umax_val) { in adjust_scalar_min_max_vals()
4653 dst_reg->umin_value = 0; in adjust_scalar_min_max_vals()
4654 dst_reg->umax_value = U64_MAX; in adjust_scalar_min_max_vals()
4657 dst_reg->umin_value -= umax_val; in adjust_scalar_min_max_vals()
4658 dst_reg->umax_value -= umin_val; in adjust_scalar_min_max_vals()
4660 dst_reg->var_off = tnum_sub(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
4663 dst_reg->var_off = tnum_mul(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
4664 if (smin_val < 0 || dst_reg->smin_value < 0) { in adjust_scalar_min_max_vals()
4666 __mark_reg_unbounded(dst_reg); in adjust_scalar_min_max_vals()
4667 __update_reg_bounds(dst_reg); in adjust_scalar_min_max_vals()
4673 if (umax_val > U32_MAX || dst_reg->umax_value > U32_MAX) { in adjust_scalar_min_max_vals()
4675 __mark_reg_unbounded(dst_reg); in adjust_scalar_min_max_vals()
4677 __update_reg_bounds(dst_reg); in adjust_scalar_min_max_vals()
4680 dst_reg->umin_value *= umin_val; in adjust_scalar_min_max_vals()
4681 dst_reg->umax_value *= umax_val; in adjust_scalar_min_max_vals()
4682 if (dst_reg->umax_value > S64_MAX) { in adjust_scalar_min_max_vals()
4684 dst_reg->smin_value = S64_MIN; in adjust_scalar_min_max_vals()
4685 dst_reg->smax_value = S64_MAX; in adjust_scalar_min_max_vals()
4687 dst_reg->smin_value = dst_reg->umin_value; in adjust_scalar_min_max_vals()
4688 dst_reg->smax_value = dst_reg->umax_value; in adjust_scalar_min_max_vals()
4693 __mark_reg_known(dst_reg, dst_reg->var_off.value & in adjust_scalar_min_max_vals()
4700 dst_reg->var_off = tnum_and(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
4701 dst_reg->umin_value = dst_reg->var_off.value; in adjust_scalar_min_max_vals()
4702 dst_reg->umax_value = min(dst_reg->umax_value, umax_val); in adjust_scalar_min_max_vals()
4703 if (dst_reg->smin_value < 0 || smin_val < 0) { in adjust_scalar_min_max_vals()
4707 dst_reg->smin_value = S64_MIN; in adjust_scalar_min_max_vals()
4708 dst_reg->smax_value = S64_MAX; in adjust_scalar_min_max_vals()
4713 dst_reg->smin_value = dst_reg->umin_value; in adjust_scalar_min_max_vals()
4714 dst_reg->smax_value = dst_reg->umax_value; in adjust_scalar_min_max_vals()
4717 __update_reg_bounds(dst_reg); in adjust_scalar_min_max_vals()
4721 __mark_reg_known(dst_reg, dst_reg->var_off.value | in adjust_scalar_min_max_vals()
4728 dst_reg->var_off = tnum_or(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
4729 dst_reg->umin_value = max(dst_reg->umin_value, umin_val); in adjust_scalar_min_max_vals()
4730 dst_reg->umax_value = dst_reg->var_off.value | in adjust_scalar_min_max_vals()
4731 dst_reg->var_off.mask; in adjust_scalar_min_max_vals()
4732 if (dst_reg->smin_value < 0 || smin_val < 0) { in adjust_scalar_min_max_vals()
4736 dst_reg->smin_value = S64_MIN; in adjust_scalar_min_max_vals()
4737 dst_reg->smax_value = S64_MAX; in adjust_scalar_min_max_vals()
4742 dst_reg->smin_value = dst_reg->umin_value; in adjust_scalar_min_max_vals()
4743 dst_reg->smax_value = dst_reg->umax_value; in adjust_scalar_min_max_vals()
4746 __update_reg_bounds(dst_reg); in adjust_scalar_min_max_vals()
4753 mark_reg_unknown(env, regs, insn->dst_reg); in adjust_scalar_min_max_vals()
4759 dst_reg->smin_value = S64_MIN; in adjust_scalar_min_max_vals()
4760 dst_reg->smax_value = S64_MAX; in adjust_scalar_min_max_vals()
4762 if (dst_reg->umax_value > 1ULL << (63 - umax_val)) { in adjust_scalar_min_max_vals()
4763 dst_reg->umin_value = 0; in adjust_scalar_min_max_vals()
4764 dst_reg->umax_value = U64_MAX; in adjust_scalar_min_max_vals()
4766 dst_reg->umin_value <<= umin_val; in adjust_scalar_min_max_vals()
4767 dst_reg->umax_value <<= umax_val; in adjust_scalar_min_max_vals()
4769 dst_reg->var_off = tnum_lshift(dst_reg->var_off, umin_val); in adjust_scalar_min_max_vals()
4771 __update_reg_bounds(dst_reg); in adjust_scalar_min_max_vals()
4778 mark_reg_unknown(env, regs, insn->dst_reg); in adjust_scalar_min_max_vals()
4795 dst_reg->smin_value = S64_MIN; in adjust_scalar_min_max_vals()
4796 dst_reg->smax_value = S64_MAX; in adjust_scalar_min_max_vals()
4797 dst_reg->var_off = tnum_rshift(dst_reg->var_off, umin_val); in adjust_scalar_min_max_vals()
4798 dst_reg->umin_value >>= umax_val; in adjust_scalar_min_max_vals()
4799 dst_reg->umax_value >>= umin_val; in adjust_scalar_min_max_vals()
4801 __update_reg_bounds(dst_reg); in adjust_scalar_min_max_vals()
4808 mark_reg_unknown(env, regs, insn->dst_reg); in adjust_scalar_min_max_vals()
4815 dst_reg->smin_value >>= umin_val; in adjust_scalar_min_max_vals()
4816 dst_reg->smax_value >>= umin_val; in adjust_scalar_min_max_vals()
4817 dst_reg->var_off = tnum_arshift(dst_reg->var_off, umin_val); in adjust_scalar_min_max_vals()
4822 dst_reg->umin_value = 0; in adjust_scalar_min_max_vals()
4823 dst_reg->umax_value = U64_MAX; in adjust_scalar_min_max_vals()
4824 __update_reg_bounds(dst_reg); in adjust_scalar_min_max_vals()
4827 mark_reg_unknown(env, regs, insn->dst_reg); in adjust_scalar_min_max_vals()
4833 coerce_reg_to_size(dst_reg, 4); in adjust_scalar_min_max_vals()
4836 __reg_deduce_bounds(dst_reg); in adjust_scalar_min_max_vals()
4837 __reg_bound_offset(dst_reg); in adjust_scalar_min_max_vals()
4849 struct bpf_reg_state *regs = state->regs, *dst_reg, *src_reg; in adjust_reg_min_max_vals() local
4854 dst_reg = ®s[insn->dst_reg]; in adjust_reg_min_max_vals()
4856 if (dst_reg->type != SCALAR_VALUE) in adjust_reg_min_max_vals()
4857 ptr_reg = dst_reg; in adjust_reg_min_max_vals()
4861 if (dst_reg->type != SCALAR_VALUE) { in adjust_reg_min_max_vals()
4867 mark_reg_unknown(env, regs, insn->dst_reg); in adjust_reg_min_max_vals()
4871 insn->dst_reg, in adjust_reg_min_max_vals()
4879 err = mark_chain_precision(env, insn->dst_reg); in adjust_reg_min_max_vals()
4883 src_reg, dst_reg); in adjust_reg_min_max_vals()
4891 dst_reg, src_reg); in adjust_reg_min_max_vals()
4916 return adjust_scalar_min_max_vals(env, insn, dst_reg, *src_reg); in adjust_reg_min_max_vals()
4944 err = check_reg_arg(env, insn->dst_reg, SRC_OP); in check_alu_op()
4948 if (is_pointer_value(env, insn->dst_reg)) { in check_alu_op()
4950 insn->dst_reg); in check_alu_op()
4955 err = check_reg_arg(env, insn->dst_reg, DST_OP); in check_alu_op()
4979 err = check_reg_arg(env, insn->dst_reg, DST_OP_NO_MARK); in check_alu_op()
4985 struct bpf_reg_state *dst_reg = regs + insn->dst_reg; in check_alu_op() local
4991 *dst_reg = *src_reg; in check_alu_op()
4992 dst_reg->live |= REG_LIVE_WRITTEN; in check_alu_op()
4993 dst_reg->subreg_def = DEF_NOT_SUBREG; in check_alu_op()
5002 *dst_reg = *src_reg; in check_alu_op()
5003 dst_reg->live |= REG_LIVE_WRITTEN; in check_alu_op()
5004 dst_reg->subreg_def = env->insn_idx + 1; in check_alu_op()
5007 insn->dst_reg); in check_alu_op()
5009 coerce_reg_to_size(dst_reg, 4); in check_alu_op()
5016 mark_reg_unknown(env, regs, insn->dst_reg); in check_alu_op()
5017 regs[insn->dst_reg].type = SCALAR_VALUE; in check_alu_op()
5019 __mark_reg_known(regs + insn->dst_reg, in check_alu_op()
5022 __mark_reg_known(regs + insn->dst_reg, in check_alu_op()
5050 err = check_reg_arg(env, insn->dst_reg, SRC_OP); in check_alu_op()
5071 err = check_reg_arg(env, insn->dst_reg, DST_OP_NO_MARK); in check_alu_op()
5082 struct bpf_reg_state *dst_reg, in __find_good_pkt_pointers() argument
5090 if (reg->type == type && reg->id == dst_reg->id) in __find_good_pkt_pointers()
5098 if (reg->type == type && reg->id == dst_reg->id) in __find_good_pkt_pointers()
5104 struct bpf_reg_state *dst_reg, in find_good_pkt_pointers() argument
5111 if (dst_reg->off < 0 || in find_good_pkt_pointers()
5112 (dst_reg->off == 0 && range_right_open)) in find_good_pkt_pointers()
5116 if (dst_reg->umax_value > MAX_PACKET_OFF || in find_good_pkt_pointers()
5117 dst_reg->umax_value + dst_reg->off > MAX_PACKET_OFF) in find_good_pkt_pointers()
5123 new_range = dst_reg->off; in find_good_pkt_pointers()
5175 __find_good_pkt_pointers(vstate->frame[i], dst_reg, type, in find_good_pkt_pointers()
5555 struct bpf_reg_state *dst_reg) in __reg_combine_min_max() argument
5557 src_reg->umin_value = dst_reg->umin_value = max(src_reg->umin_value, in __reg_combine_min_max()
5558 dst_reg->umin_value); in __reg_combine_min_max()
5559 src_reg->umax_value = dst_reg->umax_value = min(src_reg->umax_value, in __reg_combine_min_max()
5560 dst_reg->umax_value); in __reg_combine_min_max()
5561 src_reg->smin_value = dst_reg->smin_value = max(src_reg->smin_value, in __reg_combine_min_max()
5562 dst_reg->smin_value); in __reg_combine_min_max()
5563 src_reg->smax_value = dst_reg->smax_value = min(src_reg->smax_value, in __reg_combine_min_max()
5564 dst_reg->smax_value); in __reg_combine_min_max()
5565 src_reg->var_off = dst_reg->var_off = tnum_intersect(src_reg->var_off, in __reg_combine_min_max()
5566 dst_reg->var_off); in __reg_combine_min_max()
5569 __update_reg_bounds(dst_reg); in __reg_combine_min_max()
5572 __reg_deduce_bounds(dst_reg); in __reg_combine_min_max()
5575 __reg_bound_offset(dst_reg); in __reg_combine_min_max()
5581 __update_reg_bounds(dst_reg); in __reg_combine_min_max()
5693 struct bpf_reg_state *dst_reg, in try_match_pkt_pointers() argument
5707 if ((dst_reg->type == PTR_TO_PACKET && in try_match_pkt_pointers()
5709 (dst_reg->type == PTR_TO_PACKET_META && in try_match_pkt_pointers()
5712 find_good_pkt_pointers(this_branch, dst_reg, in try_match_pkt_pointers()
5713 dst_reg->type, false); in try_match_pkt_pointers()
5714 } else if ((dst_reg->type == PTR_TO_PACKET_END && in try_match_pkt_pointers()
5716 (reg_is_init_pkt_pointer(dst_reg, PTR_TO_PACKET) && in try_match_pkt_pointers()
5726 if ((dst_reg->type == PTR_TO_PACKET && in try_match_pkt_pointers()
5728 (dst_reg->type == PTR_TO_PACKET_META && in try_match_pkt_pointers()
5731 find_good_pkt_pointers(other_branch, dst_reg, in try_match_pkt_pointers()
5732 dst_reg->type, true); in try_match_pkt_pointers()
5733 } else if ((dst_reg->type == PTR_TO_PACKET_END && in try_match_pkt_pointers()
5735 (reg_is_init_pkt_pointer(dst_reg, PTR_TO_PACKET) && in try_match_pkt_pointers()
5745 if ((dst_reg->type == PTR_TO_PACKET && in try_match_pkt_pointers()
5747 (dst_reg->type == PTR_TO_PACKET_META && in try_match_pkt_pointers()
5750 find_good_pkt_pointers(this_branch, dst_reg, in try_match_pkt_pointers()
5751 dst_reg->type, true); in try_match_pkt_pointers()
5752 } else if ((dst_reg->type == PTR_TO_PACKET_END && in try_match_pkt_pointers()
5754 (reg_is_init_pkt_pointer(dst_reg, PTR_TO_PACKET) && in try_match_pkt_pointers()
5764 if ((dst_reg->type == PTR_TO_PACKET && in try_match_pkt_pointers()
5766 (dst_reg->type == PTR_TO_PACKET_META && in try_match_pkt_pointers()
5769 find_good_pkt_pointers(other_branch, dst_reg, in try_match_pkt_pointers()
5770 dst_reg->type, false); in try_match_pkt_pointers()
5771 } else if ((dst_reg->type == PTR_TO_PACKET_END && in try_match_pkt_pointers()
5773 (reg_is_init_pkt_pointer(dst_reg, PTR_TO_PACKET) && in try_match_pkt_pointers()
5795 struct bpf_reg_state *dst_reg, *other_branch_regs, *src_reg = NULL; in check_cond_jmp_op() local
5832 err = check_reg_arg(env, insn->dst_reg, SRC_OP); in check_cond_jmp_op()
5836 dst_reg = ®s[insn->dst_reg]; in check_cond_jmp_op()
5840 pred = is_branch_taken(dst_reg, insn->imm, in check_cond_jmp_op()
5844 pred = is_branch_taken(dst_reg, src_reg->var_off.value, in check_cond_jmp_op()
5847 err = mark_chain_precision(env, insn->dst_reg); in check_cond_jmp_op()
5879 struct bpf_reg_state lo_reg0 = *dst_reg; in check_cond_jmp_op()
5888 if (dst_reg->type == SCALAR_VALUE && in check_cond_jmp_op()
5892 reg_set_min_max(&other_branch_regs[insn->dst_reg], in check_cond_jmp_op()
5893 dst_reg, in check_cond_jmp_op()
5898 else if (tnum_is_const(dst_reg->var_off) || in check_cond_jmp_op()
5904 : dst_reg->var_off.value, in check_cond_jmp_op()
5910 &other_branch_regs[insn->dst_reg], in check_cond_jmp_op()
5911 src_reg, dst_reg, opcode); in check_cond_jmp_op()
5913 } else if (dst_reg->type == SCALAR_VALUE) { in check_cond_jmp_op()
5914 reg_set_min_max(&other_branch_regs[insn->dst_reg], in check_cond_jmp_op()
5915 dst_reg, insn->imm, opcode, is_jmp32); in check_cond_jmp_op()
5924 reg_type_may_be_null(dst_reg->type)) { in check_cond_jmp_op()
5928 mark_ptr_or_null_regs(this_branch, insn->dst_reg, in check_cond_jmp_op()
5930 mark_ptr_or_null_regs(other_branch, insn->dst_reg, in check_cond_jmp_op()
5932 } else if (!try_match_pkt_pointers(insn, dst_reg, ®s[insn->src_reg], in check_cond_jmp_op()
5934 is_pointer_value(env, insn->dst_reg)) { in check_cond_jmp_op()
5936 insn->dst_reg); in check_cond_jmp_op()
5961 err = check_reg_arg(env, insn->dst_reg, DST_OP); in check_ld_imm()
5968 regs[insn->dst_reg].type = SCALAR_VALUE; in check_ld_imm()
5969 __mark_reg_known(®s[insn->dst_reg], imm); in check_ld_imm()
5974 mark_reg_known_zero(env, regs, insn->dst_reg); in check_ld_imm()
5975 regs[insn->dst_reg].map_ptr = map; in check_ld_imm()
5978 regs[insn->dst_reg].type = PTR_TO_MAP_VALUE; in check_ld_imm()
5979 regs[insn->dst_reg].off = aux->map_off; in check_ld_imm()
5981 regs[insn->dst_reg].id = ++env->id_gen; in check_ld_imm()
5983 regs[insn->dst_reg].type = CONST_PTR_TO_MAP; in check_ld_imm()
6047 if (insn->dst_reg != BPF_REG_0 || insn->off != 0 || in check_ld_abs()
7590 err = check_reg_arg(env, insn->dst_reg, DST_OP_NO_MARK); in do_check()
7601 BPF_READ, insn->dst_reg, false); in do_check()
7642 err = check_reg_arg(env, insn->dst_reg, SRC_OP); in do_check()
7646 dst_reg_type = regs[insn->dst_reg].type; in do_check()
7649 err = check_mem_access(env, env->insn_idx, insn->dst_reg, in do_check()
7671 err = check_reg_arg(env, insn->dst_reg, SRC_OP); in do_check()
7675 if (is_ctx_reg(env, insn->dst_reg)) { in do_check()
7677 insn->dst_reg, in do_check()
7678 reg_type_str[reg_state(env, insn->dst_reg)->type]); in do_check()
7683 err = check_mem_access(env, env->insn_idx, insn->dst_reg, in do_check()
7698 insn->dst_reg != BPF_REG_0 || in do_check()
7721 insn->dst_reg != BPF_REG_0 || in do_check()
7734 insn->dst_reg != BPF_REG_0 || in do_check()
7923 insn[1].dst_reg != 0 || insn[1].src_reg != 0 || in replace_map_fd_with_map_ptr()
8441 if (is_reg64(env, &insn, insn.dst_reg, NULL, DST_OP)) { in opt_subreg_zext_lo32_rnd_hi32()
8456 rnd_hi32_patch[3].dst_reg = insn.dst_reg; in opt_subreg_zext_lo32_rnd_hi32()
8466 zext_patch[1].dst_reg = insn.dst_reg; in opt_subreg_zext_lo32_rnd_hi32()
8467 zext_patch[1].src_reg = insn.dst_reg; in opt_subreg_zext_lo32_rnd_hi32()
8632 insn->dst_reg, in convert_ctx_accesses()
8634 insn_buf[cnt++] = BPF_ALU32_IMM(BPF_AND, insn->dst_reg, in convert_ctx_accesses()
8639 insn->dst_reg, in convert_ctx_accesses()
8641 insn_buf[cnt++] = BPF_ALU64_IMM(BPF_AND, insn->dst_reg, in convert_ctx_accesses()
8894 BPF_ALU32_REG(BPF_XOR, insn->dst_reg, insn->dst_reg), in fixup_bpf_calls()
8962 off_reg = issrc ? insn->src_reg : insn->dst_reg; in fixup_bpf_calls()