Lines Matching refs:dst_reg
2044 return insn->dst_reg; in insn_def_regno()
2051 int dst_reg = insn_def_regno(insn); in insn_has_def32() local
2053 if (dst_reg == -1) in insn_has_def32()
2056 return !is_reg64(env, insn, dst_reg, NULL, DST_OP); in insn_has_def32()
2179 u32 dreg = 1u << insn->dst_reg; in backtrack_insn()
2257 if (insn->dst_reg != BPF_REG_FP) in backtrack_insn()
2651 u32 dst_reg = env->prog->insnsi[insn_idx].dst_reg; in check_stack_write_fixed_off() local
2686 if (dst_reg != BPF_REG_FP) { in check_stack_write_fixed_off()
4361 err = check_reg_arg(env, insn->dst_reg, SRC_OP); in check_atomic()
4377 if (is_ctx_reg(env, insn->dst_reg) || in check_atomic()
4378 is_pkt_reg(env, insn->dst_reg) || in check_atomic()
4379 is_flow_key_reg(env, insn->dst_reg) || in check_atomic()
4380 is_sk_reg(env, insn->dst_reg)) { in check_atomic()
4382 insn->dst_reg, in check_atomic()
4383 reg_type_str[reg_state(env, insn->dst_reg)->type]); in check_atomic()
4405 err = check_mem_access(env, insn_idx, insn->dst_reg, insn->off, in check_atomic()
4411 err = check_mem_access(env, insn_idx, insn->dst_reg, insn->off, in check_atomic()
6725 mark_reg_unknown(env, regs, insn->dst_reg); in sanitize_speculative_path()
6727 mark_reg_unknown(env, regs, insn->dst_reg); in sanitize_speculative_path()
6738 struct bpf_reg_state *dst_reg, in sanitize_ptr_alu() argument
6746 bool ptr_is_dst_reg = ptr_reg == dst_reg; in sanitize_ptr_alu()
6820 tmp = *dst_reg; in sanitize_ptr_alu()
6821 *dst_reg = *ptr_reg; in sanitize_ptr_alu()
6826 *dst_reg = tmp; in sanitize_ptr_alu()
6846 const struct bpf_reg_state *dst_reg) in sanitize_err() argument
6850 u32 dst = insn->dst_reg, src = insn->src_reg; in sanitize_err()
6855 off_reg == dst_reg ? dst : src, err); in sanitize_err()
6859 off_reg == dst_reg ? src : dst, err); in sanitize_err()
6918 const struct bpf_reg_state *dst_reg) in sanitize_check_bounds() argument
6920 u32 dst = insn->dst_reg; in sanitize_check_bounds()
6928 switch (dst_reg->type) { in sanitize_check_bounds()
6930 if (check_stack_access_for_ptr_arithmetic(env, dst, dst_reg, in sanitize_check_bounds()
6931 dst_reg->off + dst_reg->var_off.value)) in sanitize_check_bounds()
6935 if (check_map_access(env, dst, dst_reg->off, 1, false)) { in sanitize_check_bounds()
6960 struct bpf_reg_state *regs = state->regs, *dst_reg; in adjust_ptr_min_max_vals() local
6968 u32 dst = insn->dst_reg; in adjust_ptr_min_max_vals()
6971 dst_reg = ®s[dst]; in adjust_ptr_min_max_vals()
6978 __mark_reg_unknown(env, dst_reg); in adjust_ptr_min_max_vals()
6985 __mark_reg_unknown(env, dst_reg); in adjust_ptr_min_max_vals()
7023 dst_reg->type = ptr_reg->type; in adjust_ptr_min_max_vals()
7024 dst_reg->id = ptr_reg->id; in adjust_ptr_min_max_vals()
7031 __mark_reg32_unbounded(dst_reg); in adjust_ptr_min_max_vals()
7034 ret = sanitize_ptr_alu(env, insn, ptr_reg, off_reg, dst_reg, in adjust_ptr_min_max_vals()
7037 return sanitize_err(env, insn, ret, off_reg, dst_reg); in adjust_ptr_min_max_vals()
7048 dst_reg->smin_value = smin_ptr; in adjust_ptr_min_max_vals()
7049 dst_reg->smax_value = smax_ptr; in adjust_ptr_min_max_vals()
7050 dst_reg->umin_value = umin_ptr; in adjust_ptr_min_max_vals()
7051 dst_reg->umax_value = umax_ptr; in adjust_ptr_min_max_vals()
7052 dst_reg->var_off = ptr_reg->var_off; in adjust_ptr_min_max_vals()
7053 dst_reg->off = ptr_reg->off + smin_val; in adjust_ptr_min_max_vals()
7054 dst_reg->raw = ptr_reg->raw; in adjust_ptr_min_max_vals()
7068 dst_reg->smin_value = S64_MIN; in adjust_ptr_min_max_vals()
7069 dst_reg->smax_value = S64_MAX; in adjust_ptr_min_max_vals()
7071 dst_reg->smin_value = smin_ptr + smin_val; in adjust_ptr_min_max_vals()
7072 dst_reg->smax_value = smax_ptr + smax_val; in adjust_ptr_min_max_vals()
7076 dst_reg->umin_value = 0; in adjust_ptr_min_max_vals()
7077 dst_reg->umax_value = U64_MAX; in adjust_ptr_min_max_vals()
7079 dst_reg->umin_value = umin_ptr + umin_val; in adjust_ptr_min_max_vals()
7080 dst_reg->umax_value = umax_ptr + umax_val; in adjust_ptr_min_max_vals()
7082 dst_reg->var_off = tnum_add(ptr_reg->var_off, off_reg->var_off); in adjust_ptr_min_max_vals()
7083 dst_reg->off = ptr_reg->off; in adjust_ptr_min_max_vals()
7084 dst_reg->raw = ptr_reg->raw; in adjust_ptr_min_max_vals()
7086 dst_reg->id = ++env->id_gen; in adjust_ptr_min_max_vals()
7088 memset(&dst_reg->raw, 0, sizeof(dst_reg->raw)); in adjust_ptr_min_max_vals()
7092 if (dst_reg == off_reg) { in adjust_ptr_min_max_vals()
7110 dst_reg->smin_value = smin_ptr; in adjust_ptr_min_max_vals()
7111 dst_reg->smax_value = smax_ptr; in adjust_ptr_min_max_vals()
7112 dst_reg->umin_value = umin_ptr; in adjust_ptr_min_max_vals()
7113 dst_reg->umax_value = umax_ptr; in adjust_ptr_min_max_vals()
7114 dst_reg->var_off = ptr_reg->var_off; in adjust_ptr_min_max_vals()
7115 dst_reg->id = ptr_reg->id; in adjust_ptr_min_max_vals()
7116 dst_reg->off = ptr_reg->off - smin_val; in adjust_ptr_min_max_vals()
7117 dst_reg->raw = ptr_reg->raw; in adjust_ptr_min_max_vals()
7126 dst_reg->smin_value = S64_MIN; in adjust_ptr_min_max_vals()
7127 dst_reg->smax_value = S64_MAX; in adjust_ptr_min_max_vals()
7129 dst_reg->smin_value = smin_ptr - smax_val; in adjust_ptr_min_max_vals()
7130 dst_reg->smax_value = smax_ptr - smin_val; in adjust_ptr_min_max_vals()
7134 dst_reg->umin_value = 0; in adjust_ptr_min_max_vals()
7135 dst_reg->umax_value = U64_MAX; in adjust_ptr_min_max_vals()
7138 dst_reg->umin_value = umin_ptr - umax_val; in adjust_ptr_min_max_vals()
7139 dst_reg->umax_value = umax_ptr - umin_val; in adjust_ptr_min_max_vals()
7141 dst_reg->var_off = tnum_sub(ptr_reg->var_off, off_reg->var_off); in adjust_ptr_min_max_vals()
7142 dst_reg->off = ptr_reg->off; in adjust_ptr_min_max_vals()
7143 dst_reg->raw = ptr_reg->raw; in adjust_ptr_min_max_vals()
7145 dst_reg->id = ++env->id_gen; in adjust_ptr_min_max_vals()
7148 memset(&dst_reg->raw, 0, sizeof(dst_reg->raw)); in adjust_ptr_min_max_vals()
7165 if (!check_reg_sane_offset(env, dst_reg, ptr_reg->type)) in adjust_ptr_min_max_vals()
7168 __update_reg_bounds(dst_reg); in adjust_ptr_min_max_vals()
7169 __reg_deduce_bounds(dst_reg); in adjust_ptr_min_max_vals()
7170 __reg_bound_offset(dst_reg); in adjust_ptr_min_max_vals()
7172 if (sanitize_check_bounds(env, insn, dst_reg) < 0) in adjust_ptr_min_max_vals()
7175 ret = sanitize_ptr_alu(env, insn, dst_reg, off_reg, dst_reg, in adjust_ptr_min_max_vals()
7178 return sanitize_err(env, insn, ret, off_reg, dst_reg); in adjust_ptr_min_max_vals()
7184 static void scalar32_min_max_add(struct bpf_reg_state *dst_reg, in scalar32_min_max_add() argument
7192 if (signed_add32_overflows(dst_reg->s32_min_value, smin_val) || in scalar32_min_max_add()
7193 signed_add32_overflows(dst_reg->s32_max_value, smax_val)) { in scalar32_min_max_add()
7194 dst_reg->s32_min_value = S32_MIN; in scalar32_min_max_add()
7195 dst_reg->s32_max_value = S32_MAX; in scalar32_min_max_add()
7197 dst_reg->s32_min_value += smin_val; in scalar32_min_max_add()
7198 dst_reg->s32_max_value += smax_val; in scalar32_min_max_add()
7200 if (dst_reg->u32_min_value + umin_val < umin_val || in scalar32_min_max_add()
7201 dst_reg->u32_max_value + umax_val < umax_val) { in scalar32_min_max_add()
7202 dst_reg->u32_min_value = 0; in scalar32_min_max_add()
7203 dst_reg->u32_max_value = U32_MAX; in scalar32_min_max_add()
7205 dst_reg->u32_min_value += umin_val; in scalar32_min_max_add()
7206 dst_reg->u32_max_value += umax_val; in scalar32_min_max_add()
7210 static void scalar_min_max_add(struct bpf_reg_state *dst_reg, in scalar_min_max_add() argument
7218 if (signed_add_overflows(dst_reg->smin_value, smin_val) || in scalar_min_max_add()
7219 signed_add_overflows(dst_reg->smax_value, smax_val)) { in scalar_min_max_add()
7220 dst_reg->smin_value = S64_MIN; in scalar_min_max_add()
7221 dst_reg->smax_value = S64_MAX; in scalar_min_max_add()
7223 dst_reg->smin_value += smin_val; in scalar_min_max_add()
7224 dst_reg->smax_value += smax_val; in scalar_min_max_add()
7226 if (dst_reg->umin_value + umin_val < umin_val || in scalar_min_max_add()
7227 dst_reg->umax_value + umax_val < umax_val) { in scalar_min_max_add()
7228 dst_reg->umin_value = 0; in scalar_min_max_add()
7229 dst_reg->umax_value = U64_MAX; in scalar_min_max_add()
7231 dst_reg->umin_value += umin_val; in scalar_min_max_add()
7232 dst_reg->umax_value += umax_val; in scalar_min_max_add()
7236 static void scalar32_min_max_sub(struct bpf_reg_state *dst_reg, in scalar32_min_max_sub() argument
7244 if (signed_sub32_overflows(dst_reg->s32_min_value, smax_val) || in scalar32_min_max_sub()
7245 signed_sub32_overflows(dst_reg->s32_max_value, smin_val)) { in scalar32_min_max_sub()
7247 dst_reg->s32_min_value = S32_MIN; in scalar32_min_max_sub()
7248 dst_reg->s32_max_value = S32_MAX; in scalar32_min_max_sub()
7250 dst_reg->s32_min_value -= smax_val; in scalar32_min_max_sub()
7251 dst_reg->s32_max_value -= smin_val; in scalar32_min_max_sub()
7253 if (dst_reg->u32_min_value < umax_val) { in scalar32_min_max_sub()
7255 dst_reg->u32_min_value = 0; in scalar32_min_max_sub()
7256 dst_reg->u32_max_value = U32_MAX; in scalar32_min_max_sub()
7259 dst_reg->u32_min_value -= umax_val; in scalar32_min_max_sub()
7260 dst_reg->u32_max_value -= umin_val; in scalar32_min_max_sub()
7264 static void scalar_min_max_sub(struct bpf_reg_state *dst_reg, in scalar_min_max_sub() argument
7272 if (signed_sub_overflows(dst_reg->smin_value, smax_val) || in scalar_min_max_sub()
7273 signed_sub_overflows(dst_reg->smax_value, smin_val)) { in scalar_min_max_sub()
7275 dst_reg->smin_value = S64_MIN; in scalar_min_max_sub()
7276 dst_reg->smax_value = S64_MAX; in scalar_min_max_sub()
7278 dst_reg->smin_value -= smax_val; in scalar_min_max_sub()
7279 dst_reg->smax_value -= smin_val; in scalar_min_max_sub()
7281 if (dst_reg->umin_value < umax_val) { in scalar_min_max_sub()
7283 dst_reg->umin_value = 0; in scalar_min_max_sub()
7284 dst_reg->umax_value = U64_MAX; in scalar_min_max_sub()
7287 dst_reg->umin_value -= umax_val; in scalar_min_max_sub()
7288 dst_reg->umax_value -= umin_val; in scalar_min_max_sub()
7292 static void scalar32_min_max_mul(struct bpf_reg_state *dst_reg, in scalar32_min_max_mul() argument
7299 if (smin_val < 0 || dst_reg->s32_min_value < 0) { in scalar32_min_max_mul()
7301 __mark_reg32_unbounded(dst_reg); in scalar32_min_max_mul()
7307 if (umax_val > U16_MAX || dst_reg->u32_max_value > U16_MAX) { in scalar32_min_max_mul()
7309 __mark_reg32_unbounded(dst_reg); in scalar32_min_max_mul()
7312 dst_reg->u32_min_value *= umin_val; in scalar32_min_max_mul()
7313 dst_reg->u32_max_value *= umax_val; in scalar32_min_max_mul()
7314 if (dst_reg->u32_max_value > S32_MAX) { in scalar32_min_max_mul()
7316 dst_reg->s32_min_value = S32_MIN; in scalar32_min_max_mul()
7317 dst_reg->s32_max_value = S32_MAX; in scalar32_min_max_mul()
7319 dst_reg->s32_min_value = dst_reg->u32_min_value; in scalar32_min_max_mul()
7320 dst_reg->s32_max_value = dst_reg->u32_max_value; in scalar32_min_max_mul()
7324 static void scalar_min_max_mul(struct bpf_reg_state *dst_reg, in scalar_min_max_mul() argument
7331 if (smin_val < 0 || dst_reg->smin_value < 0) { in scalar_min_max_mul()
7333 __mark_reg64_unbounded(dst_reg); in scalar_min_max_mul()
7339 if (umax_val > U32_MAX || dst_reg->umax_value > U32_MAX) { in scalar_min_max_mul()
7341 __mark_reg64_unbounded(dst_reg); in scalar_min_max_mul()
7344 dst_reg->umin_value *= umin_val; in scalar_min_max_mul()
7345 dst_reg->umax_value *= umax_val; in scalar_min_max_mul()
7346 if (dst_reg->umax_value > S64_MAX) { in scalar_min_max_mul()
7348 dst_reg->smin_value = S64_MIN; in scalar_min_max_mul()
7349 dst_reg->smax_value = S64_MAX; in scalar_min_max_mul()
7351 dst_reg->smin_value = dst_reg->umin_value; in scalar_min_max_mul()
7352 dst_reg->smax_value = dst_reg->umax_value; in scalar_min_max_mul()
7356 static void scalar32_min_max_and(struct bpf_reg_state *dst_reg, in scalar32_min_max_and() argument
7360 bool dst_known = tnum_subreg_is_const(dst_reg->var_off); in scalar32_min_max_and()
7361 struct tnum var32_off = tnum_subreg(dst_reg->var_off); in scalar32_min_max_and()
7366 __mark_reg32_known(dst_reg, var32_off.value); in scalar32_min_max_and()
7373 dst_reg->u32_min_value = var32_off.value; in scalar32_min_max_and()
7374 dst_reg->u32_max_value = min(dst_reg->u32_max_value, umax_val); in scalar32_min_max_and()
7375 if (dst_reg->s32_min_value < 0 || smin_val < 0) { in scalar32_min_max_and()
7379 dst_reg->s32_min_value = S32_MIN; in scalar32_min_max_and()
7380 dst_reg->s32_max_value = S32_MAX; in scalar32_min_max_and()
7385 dst_reg->s32_min_value = dst_reg->u32_min_value; in scalar32_min_max_and()
7386 dst_reg->s32_max_value = dst_reg->u32_max_value; in scalar32_min_max_and()
7390 static void scalar_min_max_and(struct bpf_reg_state *dst_reg, in scalar_min_max_and() argument
7394 bool dst_known = tnum_is_const(dst_reg->var_off); in scalar_min_max_and()
7399 __mark_reg_known(dst_reg, dst_reg->var_off.value); in scalar_min_max_and()
7406 dst_reg->umin_value = dst_reg->var_off.value; in scalar_min_max_and()
7407 dst_reg->umax_value = min(dst_reg->umax_value, umax_val); in scalar_min_max_and()
7408 if (dst_reg->smin_value < 0 || smin_val < 0) { in scalar_min_max_and()
7412 dst_reg->smin_value = S64_MIN; in scalar_min_max_and()
7413 dst_reg->smax_value = S64_MAX; in scalar_min_max_and()
7418 dst_reg->smin_value = dst_reg->umin_value; in scalar_min_max_and()
7419 dst_reg->smax_value = dst_reg->umax_value; in scalar_min_max_and()
7422 __update_reg_bounds(dst_reg); in scalar_min_max_and()
7425 static void scalar32_min_max_or(struct bpf_reg_state *dst_reg, in scalar32_min_max_or() argument
7429 bool dst_known = tnum_subreg_is_const(dst_reg->var_off); in scalar32_min_max_or()
7430 struct tnum var32_off = tnum_subreg(dst_reg->var_off); in scalar32_min_max_or()
7435 __mark_reg32_known(dst_reg, var32_off.value); in scalar32_min_max_or()
7442 dst_reg->u32_min_value = max(dst_reg->u32_min_value, umin_val); in scalar32_min_max_or()
7443 dst_reg->u32_max_value = var32_off.value | var32_off.mask; in scalar32_min_max_or()
7444 if (dst_reg->s32_min_value < 0 || smin_val < 0) { in scalar32_min_max_or()
7448 dst_reg->s32_min_value = S32_MIN; in scalar32_min_max_or()
7449 dst_reg->s32_max_value = S32_MAX; in scalar32_min_max_or()
7454 dst_reg->s32_min_value = dst_reg->u32_min_value; in scalar32_min_max_or()
7455 dst_reg->s32_max_value = dst_reg->u32_max_value; in scalar32_min_max_or()
7459 static void scalar_min_max_or(struct bpf_reg_state *dst_reg, in scalar_min_max_or() argument
7463 bool dst_known = tnum_is_const(dst_reg->var_off); in scalar_min_max_or()
7468 __mark_reg_known(dst_reg, dst_reg->var_off.value); in scalar_min_max_or()
7475 dst_reg->umin_value = max(dst_reg->umin_value, umin_val); in scalar_min_max_or()
7476 dst_reg->umax_value = dst_reg->var_off.value | dst_reg->var_off.mask; in scalar_min_max_or()
7477 if (dst_reg->smin_value < 0 || smin_val < 0) { in scalar_min_max_or()
7481 dst_reg->smin_value = S64_MIN; in scalar_min_max_or()
7482 dst_reg->smax_value = S64_MAX; in scalar_min_max_or()
7487 dst_reg->smin_value = dst_reg->umin_value; in scalar_min_max_or()
7488 dst_reg->smax_value = dst_reg->umax_value; in scalar_min_max_or()
7491 __update_reg_bounds(dst_reg); in scalar_min_max_or()
7494 static void scalar32_min_max_xor(struct bpf_reg_state *dst_reg, in scalar32_min_max_xor() argument
7498 bool dst_known = tnum_subreg_is_const(dst_reg->var_off); in scalar32_min_max_xor()
7499 struct tnum var32_off = tnum_subreg(dst_reg->var_off); in scalar32_min_max_xor()
7503 __mark_reg32_known(dst_reg, var32_off.value); in scalar32_min_max_xor()
7508 dst_reg->u32_min_value = var32_off.value; in scalar32_min_max_xor()
7509 dst_reg->u32_max_value = var32_off.value | var32_off.mask; in scalar32_min_max_xor()
7511 if (dst_reg->s32_min_value >= 0 && smin_val >= 0) { in scalar32_min_max_xor()
7515 dst_reg->s32_min_value = dst_reg->u32_min_value; in scalar32_min_max_xor()
7516 dst_reg->s32_max_value = dst_reg->u32_max_value; in scalar32_min_max_xor()
7518 dst_reg->s32_min_value = S32_MIN; in scalar32_min_max_xor()
7519 dst_reg->s32_max_value = S32_MAX; in scalar32_min_max_xor()
7523 static void scalar_min_max_xor(struct bpf_reg_state *dst_reg, in scalar_min_max_xor() argument
7527 bool dst_known = tnum_is_const(dst_reg->var_off); in scalar_min_max_xor()
7532 __mark_reg_known(dst_reg, dst_reg->var_off.value); in scalar_min_max_xor()
7537 dst_reg->umin_value = dst_reg->var_off.value; in scalar_min_max_xor()
7538 dst_reg->umax_value = dst_reg->var_off.value | dst_reg->var_off.mask; in scalar_min_max_xor()
7540 if (dst_reg->smin_value >= 0 && smin_val >= 0) { in scalar_min_max_xor()
7544 dst_reg->smin_value = dst_reg->umin_value; in scalar_min_max_xor()
7545 dst_reg->smax_value = dst_reg->umax_value; in scalar_min_max_xor()
7547 dst_reg->smin_value = S64_MIN; in scalar_min_max_xor()
7548 dst_reg->smax_value = S64_MAX; in scalar_min_max_xor()
7551 __update_reg_bounds(dst_reg); in scalar_min_max_xor()
7554 static void __scalar32_min_max_lsh(struct bpf_reg_state *dst_reg, in __scalar32_min_max_lsh() argument
7560 dst_reg->s32_min_value = S32_MIN; in __scalar32_min_max_lsh()
7561 dst_reg->s32_max_value = S32_MAX; in __scalar32_min_max_lsh()
7563 if (umax_val > 31 || dst_reg->u32_max_value > 1ULL << (31 - umax_val)) { in __scalar32_min_max_lsh()
7564 dst_reg->u32_min_value = 0; in __scalar32_min_max_lsh()
7565 dst_reg->u32_max_value = U32_MAX; in __scalar32_min_max_lsh()
7567 dst_reg->u32_min_value <<= umin_val; in __scalar32_min_max_lsh()
7568 dst_reg->u32_max_value <<= umax_val; in __scalar32_min_max_lsh()
7572 static void scalar32_min_max_lsh(struct bpf_reg_state *dst_reg, in scalar32_min_max_lsh() argument
7578 struct tnum subreg = tnum_subreg(dst_reg->var_off); in scalar32_min_max_lsh()
7580 __scalar32_min_max_lsh(dst_reg, umin_val, umax_val); in scalar32_min_max_lsh()
7581 dst_reg->var_off = tnum_subreg(tnum_lshift(subreg, umin_val)); in scalar32_min_max_lsh()
7586 __mark_reg64_unbounded(dst_reg); in scalar32_min_max_lsh()
7587 __update_reg32_bounds(dst_reg); in scalar32_min_max_lsh()
7590 static void __scalar64_min_max_lsh(struct bpf_reg_state *dst_reg, in __scalar64_min_max_lsh() argument
7600 if (umin_val == 32 && umax_val == 32 && dst_reg->s32_max_value >= 0) in __scalar64_min_max_lsh()
7601 dst_reg->smax_value = (s64)dst_reg->s32_max_value << 32; in __scalar64_min_max_lsh()
7603 dst_reg->smax_value = S64_MAX; in __scalar64_min_max_lsh()
7605 if (umin_val == 32 && umax_val == 32 && dst_reg->s32_min_value >= 0) in __scalar64_min_max_lsh()
7606 dst_reg->smin_value = (s64)dst_reg->s32_min_value << 32; in __scalar64_min_max_lsh()
7608 dst_reg->smin_value = S64_MIN; in __scalar64_min_max_lsh()
7611 if (dst_reg->umax_value > 1ULL << (63 - umax_val)) { in __scalar64_min_max_lsh()
7612 dst_reg->umin_value = 0; in __scalar64_min_max_lsh()
7613 dst_reg->umax_value = U64_MAX; in __scalar64_min_max_lsh()
7615 dst_reg->umin_value <<= umin_val; in __scalar64_min_max_lsh()
7616 dst_reg->umax_value <<= umax_val; in __scalar64_min_max_lsh()
7620 static void scalar_min_max_lsh(struct bpf_reg_state *dst_reg, in scalar_min_max_lsh() argument
7627 __scalar64_min_max_lsh(dst_reg, umin_val, umax_val); in scalar_min_max_lsh()
7628 __scalar32_min_max_lsh(dst_reg, umin_val, umax_val); in scalar_min_max_lsh()
7630 dst_reg->var_off = tnum_lshift(dst_reg->var_off, umin_val); in scalar_min_max_lsh()
7632 __update_reg_bounds(dst_reg); in scalar_min_max_lsh()
7635 static void scalar32_min_max_rsh(struct bpf_reg_state *dst_reg, in scalar32_min_max_rsh() argument
7638 struct tnum subreg = tnum_subreg(dst_reg->var_off); in scalar32_min_max_rsh()
7656 dst_reg->s32_min_value = S32_MIN; in scalar32_min_max_rsh()
7657 dst_reg->s32_max_value = S32_MAX; in scalar32_min_max_rsh()
7659 dst_reg->var_off = tnum_rshift(subreg, umin_val); in scalar32_min_max_rsh()
7660 dst_reg->u32_min_value >>= umax_val; in scalar32_min_max_rsh()
7661 dst_reg->u32_max_value >>= umin_val; in scalar32_min_max_rsh()
7663 __mark_reg64_unbounded(dst_reg); in scalar32_min_max_rsh()
7664 __update_reg32_bounds(dst_reg); in scalar32_min_max_rsh()
7667 static void scalar_min_max_rsh(struct bpf_reg_state *dst_reg, in scalar_min_max_rsh() argument
7687 dst_reg->smin_value = S64_MIN; in scalar_min_max_rsh()
7688 dst_reg->smax_value = S64_MAX; in scalar_min_max_rsh()
7689 dst_reg->var_off = tnum_rshift(dst_reg->var_off, umin_val); in scalar_min_max_rsh()
7690 dst_reg->umin_value >>= umax_val; in scalar_min_max_rsh()
7691 dst_reg->umax_value >>= umin_val; in scalar_min_max_rsh()
7697 __mark_reg32_unbounded(dst_reg); in scalar_min_max_rsh()
7698 __update_reg_bounds(dst_reg); in scalar_min_max_rsh()
7701 static void scalar32_min_max_arsh(struct bpf_reg_state *dst_reg, in scalar32_min_max_arsh() argument
7709 dst_reg->s32_min_value = (u32)(((s32)dst_reg->s32_min_value) >> umin_val); in scalar32_min_max_arsh()
7710 dst_reg->s32_max_value = (u32)(((s32)dst_reg->s32_max_value) >> umin_val); in scalar32_min_max_arsh()
7712 dst_reg->var_off = tnum_arshift(tnum_subreg(dst_reg->var_off), umin_val, 32); in scalar32_min_max_arsh()
7717 dst_reg->u32_min_value = 0; in scalar32_min_max_arsh()
7718 dst_reg->u32_max_value = U32_MAX; in scalar32_min_max_arsh()
7720 __mark_reg64_unbounded(dst_reg); in scalar32_min_max_arsh()
7721 __update_reg32_bounds(dst_reg); in scalar32_min_max_arsh()
7724 static void scalar_min_max_arsh(struct bpf_reg_state *dst_reg, in scalar_min_max_arsh() argument
7732 dst_reg->smin_value >>= umin_val; in scalar_min_max_arsh()
7733 dst_reg->smax_value >>= umin_val; in scalar_min_max_arsh()
7735 dst_reg->var_off = tnum_arshift(dst_reg->var_off, umin_val, 64); in scalar_min_max_arsh()
7740 dst_reg->umin_value = 0; in scalar_min_max_arsh()
7741 dst_reg->umax_value = U64_MAX; in scalar_min_max_arsh()
7747 __mark_reg32_unbounded(dst_reg); in scalar_min_max_arsh()
7748 __update_reg_bounds(dst_reg); in scalar_min_max_arsh()
7757 struct bpf_reg_state *dst_reg, in adjust_scalar_min_max_vals() argument
7789 __mark_reg_unknown(env, dst_reg); in adjust_scalar_min_max_vals()
7800 __mark_reg_unknown(env, dst_reg); in adjust_scalar_min_max_vals()
7807 __mark_reg_unknown(env, dst_reg); in adjust_scalar_min_max_vals()
7833 scalar32_min_max_add(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
7834 scalar_min_max_add(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
7835 dst_reg->var_off = tnum_add(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
7838 scalar32_min_max_sub(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
7839 scalar_min_max_sub(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
7840 dst_reg->var_off = tnum_sub(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
7843 dst_reg->var_off = tnum_mul(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
7844 scalar32_min_max_mul(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
7845 scalar_min_max_mul(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
7848 dst_reg->var_off = tnum_and(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
7849 scalar32_min_max_and(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
7850 scalar_min_max_and(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
7853 dst_reg->var_off = tnum_or(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
7854 scalar32_min_max_or(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
7855 scalar_min_max_or(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
7858 dst_reg->var_off = tnum_xor(dst_reg->var_off, src_reg.var_off); in adjust_scalar_min_max_vals()
7859 scalar32_min_max_xor(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
7860 scalar_min_max_xor(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
7867 mark_reg_unknown(env, regs, insn->dst_reg); in adjust_scalar_min_max_vals()
7871 scalar32_min_max_lsh(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
7873 scalar_min_max_lsh(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
7880 mark_reg_unknown(env, regs, insn->dst_reg); in adjust_scalar_min_max_vals()
7884 scalar32_min_max_rsh(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
7886 scalar_min_max_rsh(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
7893 mark_reg_unknown(env, regs, insn->dst_reg); in adjust_scalar_min_max_vals()
7897 scalar32_min_max_arsh(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
7899 scalar_min_max_arsh(dst_reg, &src_reg); in adjust_scalar_min_max_vals()
7902 mark_reg_unknown(env, regs, insn->dst_reg); in adjust_scalar_min_max_vals()
7908 zext_32_to_64(dst_reg); in adjust_scalar_min_max_vals()
7910 __update_reg_bounds(dst_reg); in adjust_scalar_min_max_vals()
7911 __reg_deduce_bounds(dst_reg); in adjust_scalar_min_max_vals()
7912 __reg_bound_offset(dst_reg); in adjust_scalar_min_max_vals()
7924 struct bpf_reg_state *regs = state->regs, *dst_reg, *src_reg; in adjust_reg_min_max_vals() local
7929 dst_reg = ®s[insn->dst_reg]; in adjust_reg_min_max_vals()
7931 if (dst_reg->type != SCALAR_VALUE) in adjust_reg_min_max_vals()
7932 ptr_reg = dst_reg; in adjust_reg_min_max_vals()
7937 dst_reg->id = 0; in adjust_reg_min_max_vals()
7941 if (dst_reg->type != SCALAR_VALUE) { in adjust_reg_min_max_vals()
7947 mark_reg_unknown(env, regs, insn->dst_reg); in adjust_reg_min_max_vals()
7951 insn->dst_reg, in adjust_reg_min_max_vals()
7959 err = mark_chain_precision(env, insn->dst_reg); in adjust_reg_min_max_vals()
7963 src_reg, dst_reg); in adjust_reg_min_max_vals()
7971 dst_reg, src_reg); in adjust_reg_min_max_vals()
7996 return adjust_scalar_min_max_vals(env, insn, dst_reg, *src_reg); in adjust_reg_min_max_vals()
8024 err = check_reg_arg(env, insn->dst_reg, SRC_OP); in check_alu_op()
8028 if (is_pointer_value(env, insn->dst_reg)) { in check_alu_op()
8030 insn->dst_reg); in check_alu_op()
8035 err = check_reg_arg(env, insn->dst_reg, DST_OP); in check_alu_op()
8059 err = check_reg_arg(env, insn->dst_reg, DST_OP_NO_MARK); in check_alu_op()
8065 struct bpf_reg_state *dst_reg = regs + insn->dst_reg; in check_alu_op() local
8077 *dst_reg = *src_reg; in check_alu_op()
8078 dst_reg->live |= REG_LIVE_WRITTEN; in check_alu_op()
8079 dst_reg->subreg_def = DEF_NOT_SUBREG; in check_alu_op()
8088 *dst_reg = *src_reg; in check_alu_op()
8093 dst_reg->id = 0; in check_alu_op()
8094 dst_reg->live |= REG_LIVE_WRITTEN; in check_alu_op()
8095 dst_reg->subreg_def = env->insn_idx + 1; in check_alu_op()
8098 insn->dst_reg); in check_alu_op()
8100 zext_32_to_64(dst_reg); in check_alu_op()
8107 mark_reg_unknown(env, regs, insn->dst_reg); in check_alu_op()
8108 regs[insn->dst_reg].type = SCALAR_VALUE; in check_alu_op()
8110 __mark_reg_known(regs + insn->dst_reg, in check_alu_op()
8113 __mark_reg_known(regs + insn->dst_reg, in check_alu_op()
8141 err = check_reg_arg(env, insn->dst_reg, SRC_OP); in check_alu_op()
8162 err = check_reg_arg(env, insn->dst_reg, DST_OP_NO_MARK); in check_alu_op()
8173 struct bpf_reg_state *dst_reg, in __find_good_pkt_pointers() argument
8181 if (reg->type == type && reg->id == dst_reg->id) in __find_good_pkt_pointers()
8189 if (reg->type == type && reg->id == dst_reg->id) in __find_good_pkt_pointers()
8195 struct bpf_reg_state *dst_reg, in find_good_pkt_pointers() argument
8201 if (dst_reg->off < 0 || in find_good_pkt_pointers()
8202 (dst_reg->off == 0 && range_right_open)) in find_good_pkt_pointers()
8206 if (dst_reg->umax_value > MAX_PACKET_OFF || in find_good_pkt_pointers()
8207 dst_reg->umax_value + dst_reg->off > MAX_PACKET_OFF) in find_good_pkt_pointers()
8213 new_range = dst_reg->off; in find_good_pkt_pointers()
8265 __find_good_pkt_pointers(vstate->frame[i], dst_reg, type, in find_good_pkt_pointers()
8471 static int is_pkt_ptr_branch_taken(struct bpf_reg_state *dst_reg, in is_pkt_ptr_branch_taken() argument
8478 pkt = dst_reg; in is_pkt_ptr_branch_taken()
8479 } else if (dst_reg->type == PTR_TO_PACKET_END) { in is_pkt_ptr_branch_taken()
8684 struct bpf_reg_state *dst_reg) in __reg_combine_min_max() argument
8686 src_reg->umin_value = dst_reg->umin_value = max(src_reg->umin_value, in __reg_combine_min_max()
8687 dst_reg->umin_value); in __reg_combine_min_max()
8688 src_reg->umax_value = dst_reg->umax_value = min(src_reg->umax_value, in __reg_combine_min_max()
8689 dst_reg->umax_value); in __reg_combine_min_max()
8690 src_reg->smin_value = dst_reg->smin_value = max(src_reg->smin_value, in __reg_combine_min_max()
8691 dst_reg->smin_value); in __reg_combine_min_max()
8692 src_reg->smax_value = dst_reg->smax_value = min(src_reg->smax_value, in __reg_combine_min_max()
8693 dst_reg->smax_value); in __reg_combine_min_max()
8694 src_reg->var_off = dst_reg->var_off = tnum_intersect(src_reg->var_off, in __reg_combine_min_max()
8695 dst_reg->var_off); in __reg_combine_min_max()
8698 __update_reg_bounds(dst_reg); in __reg_combine_min_max()
8701 __reg_deduce_bounds(dst_reg); in __reg_combine_min_max()
8704 __reg_bound_offset(dst_reg); in __reg_combine_min_max()
8710 __update_reg_bounds(dst_reg); in __reg_combine_min_max()
8811 struct bpf_reg_state *dst_reg, in try_match_pkt_pointers() argument
8825 if ((dst_reg->type == PTR_TO_PACKET && in try_match_pkt_pointers()
8827 (dst_reg->type == PTR_TO_PACKET_META && in try_match_pkt_pointers()
8830 find_good_pkt_pointers(this_branch, dst_reg, in try_match_pkt_pointers()
8831 dst_reg->type, false); in try_match_pkt_pointers()
8832 mark_pkt_end(other_branch, insn->dst_reg, true); in try_match_pkt_pointers()
8833 } else if ((dst_reg->type == PTR_TO_PACKET_END && in try_match_pkt_pointers()
8835 (reg_is_init_pkt_pointer(dst_reg, PTR_TO_PACKET) && in try_match_pkt_pointers()
8846 if ((dst_reg->type == PTR_TO_PACKET && in try_match_pkt_pointers()
8848 (dst_reg->type == PTR_TO_PACKET_META && in try_match_pkt_pointers()
8851 find_good_pkt_pointers(other_branch, dst_reg, in try_match_pkt_pointers()
8852 dst_reg->type, true); in try_match_pkt_pointers()
8853 mark_pkt_end(this_branch, insn->dst_reg, false); in try_match_pkt_pointers()
8854 } else if ((dst_reg->type == PTR_TO_PACKET_END && in try_match_pkt_pointers()
8856 (reg_is_init_pkt_pointer(dst_reg, PTR_TO_PACKET) && in try_match_pkt_pointers()
8867 if ((dst_reg->type == PTR_TO_PACKET && in try_match_pkt_pointers()
8869 (dst_reg->type == PTR_TO_PACKET_META && in try_match_pkt_pointers()
8872 find_good_pkt_pointers(this_branch, dst_reg, in try_match_pkt_pointers()
8873 dst_reg->type, true); in try_match_pkt_pointers()
8874 mark_pkt_end(other_branch, insn->dst_reg, false); in try_match_pkt_pointers()
8875 } else if ((dst_reg->type == PTR_TO_PACKET_END && in try_match_pkt_pointers()
8877 (reg_is_init_pkt_pointer(dst_reg, PTR_TO_PACKET) && in try_match_pkt_pointers()
8888 if ((dst_reg->type == PTR_TO_PACKET && in try_match_pkt_pointers()
8890 (dst_reg->type == PTR_TO_PACKET_META && in try_match_pkt_pointers()
8893 find_good_pkt_pointers(other_branch, dst_reg, in try_match_pkt_pointers()
8894 dst_reg->type, false); in try_match_pkt_pointers()
8895 mark_pkt_end(this_branch, insn->dst_reg, true); in try_match_pkt_pointers()
8896 } else if ((dst_reg->type == PTR_TO_PACKET_END && in try_match_pkt_pointers()
8898 (reg_is_init_pkt_pointer(dst_reg, PTR_TO_PACKET) && in try_match_pkt_pointers()
8945 struct bpf_reg_state *dst_reg, *other_branch_regs, *src_reg = NULL; in check_cond_jmp_op() local
8982 err = check_reg_arg(env, insn->dst_reg, SRC_OP); in check_cond_jmp_op()
8986 dst_reg = ®s[insn->dst_reg]; in check_cond_jmp_op()
8990 pred = is_branch_taken(dst_reg, insn->imm, opcode, is_jmp32); in check_cond_jmp_op()
8993 pred = is_branch_taken(dst_reg, in check_cond_jmp_op()
8999 pred = is_branch_taken(dst_reg, in check_cond_jmp_op()
9003 } else if (reg_is_pkt_pointer_any(dst_reg) && in check_cond_jmp_op()
9006 pred = is_pkt_ptr_branch_taken(dst_reg, src_reg, opcode); in check_cond_jmp_op()
9013 if (!__is_pointer_value(false, dst_reg)) in check_cond_jmp_op()
9014 err = mark_chain_precision(env, insn->dst_reg); in check_cond_jmp_op()
9062 if (dst_reg->type == SCALAR_VALUE && in check_cond_jmp_op()
9067 reg_set_min_max(&other_branch_regs[insn->dst_reg], in check_cond_jmp_op()
9068 dst_reg, in check_cond_jmp_op()
9072 else if (tnum_is_const(dst_reg->var_off) || in check_cond_jmp_op()
9074 tnum_is_const(tnum_subreg(dst_reg->var_off)))) in check_cond_jmp_op()
9077 dst_reg->var_off.value, in check_cond_jmp_op()
9078 tnum_subreg(dst_reg->var_off).value, in check_cond_jmp_op()
9084 &other_branch_regs[insn->dst_reg], in check_cond_jmp_op()
9085 src_reg, dst_reg, opcode); in check_cond_jmp_op()
9093 } else if (dst_reg->type == SCALAR_VALUE) { in check_cond_jmp_op()
9094 reg_set_min_max(&other_branch_regs[insn->dst_reg], in check_cond_jmp_op()
9095 dst_reg, insn->imm, (u32)insn->imm, in check_cond_jmp_op()
9099 if (dst_reg->type == SCALAR_VALUE && dst_reg->id && in check_cond_jmp_op()
9100 !WARN_ON_ONCE(dst_reg->id != other_branch_regs[insn->dst_reg].id)) { in check_cond_jmp_op()
9101 find_equal_scalars(this_branch, dst_reg); in check_cond_jmp_op()
9102 find_equal_scalars(other_branch, &other_branch_regs[insn->dst_reg]); in check_cond_jmp_op()
9111 reg_type_may_be_null(dst_reg->type)) { in check_cond_jmp_op()
9115 mark_ptr_or_null_regs(this_branch, insn->dst_reg, in check_cond_jmp_op()
9117 mark_ptr_or_null_regs(other_branch, insn->dst_reg, in check_cond_jmp_op()
9119 } else if (!try_match_pkt_pointers(insn, dst_reg, ®s[insn->src_reg], in check_cond_jmp_op()
9121 is_pointer_value(env, insn->dst_reg)) { in check_cond_jmp_op()
9123 insn->dst_reg); in check_cond_jmp_op()
9136 struct bpf_reg_state *dst_reg; in check_ld_imm() local
9149 err = check_reg_arg(env, insn->dst_reg, DST_OP); in check_ld_imm()
9153 dst_reg = ®s[insn->dst_reg]; in check_ld_imm()
9157 dst_reg->type = SCALAR_VALUE; in check_ld_imm()
9158 __mark_reg_known(®s[insn->dst_reg], imm); in check_ld_imm()
9163 mark_reg_known_zero(env, regs, insn->dst_reg); in check_ld_imm()
9165 dst_reg->type = aux->btf_var.reg_type; in check_ld_imm()
9166 switch (dst_reg->type) { in check_ld_imm()
9168 dst_reg->mem_size = aux->btf_var.mem_size; in check_ld_imm()
9172 dst_reg->btf = aux->btf_var.btf; in check_ld_imm()
9173 dst_reg->btf_id = aux->btf_var.btf_id; in check_ld_imm()
9195 dst_reg->type = PTR_TO_FUNC; in check_ld_imm()
9196 dst_reg->subprogno = subprogno; in check_ld_imm()
9201 mark_reg_known_zero(env, regs, insn->dst_reg); in check_ld_imm()
9202 dst_reg->map_ptr = map; in check_ld_imm()
9206 dst_reg->type = PTR_TO_MAP_VALUE; in check_ld_imm()
9207 dst_reg->off = aux->map_off; in check_ld_imm()
9209 dst_reg->id = ++env->id_gen; in check_ld_imm()
9212 dst_reg->type = CONST_PTR_TO_MAP; in check_ld_imm()
9265 if (insn->dst_reg != BPF_REG_0 || insn->off != 0 || in check_ld_abs()
10969 err = check_reg_arg(env, insn->dst_reg, DST_OP_NO_MARK); in do_check()
10980 BPF_READ, insn->dst_reg, false); in do_check()
11026 err = check_reg_arg(env, insn->dst_reg, SRC_OP); in do_check()
11030 dst_reg_type = regs[insn->dst_reg].type; in do_check()
11033 err = check_mem_access(env, env->insn_idx, insn->dst_reg, in do_check()
11055 err = check_reg_arg(env, insn->dst_reg, SRC_OP); in do_check()
11059 if (is_ctx_reg(env, insn->dst_reg)) { in do_check()
11061 insn->dst_reg, in do_check()
11062 reg_type_str[reg_state(env, insn->dst_reg)->type]); in do_check()
11067 err = check_mem_access(env, env->insn_idx, insn->dst_reg, in do_check()
11083 insn->dst_reg != BPF_REG_0 || in do_check()
11107 insn->dst_reg != BPF_REG_0 || in do_check()
11120 insn->dst_reg != BPF_REG_0 || in do_check()
11514 insn[1].dst_reg != 0 || insn[1].src_reg != 0 || in resolve_pseudo_ldimm64()
12106 rnd_hi32_patch[3].dst_reg = load_reg; in opt_subreg_zext_lo32_rnd_hi32()
12131 zext_patch[1].dst_reg = load_reg; in opt_subreg_zext_lo32_rnd_hi32()
12312 insn->dst_reg, in convert_ctx_accesses()
12314 insn_buf[cnt++] = BPF_ALU32_IMM(BPF_AND, insn->dst_reg, in convert_ctx_accesses()
12319 insn->dst_reg, in convert_ctx_accesses()
12321 insn_buf[cnt++] = BPF_ALU64_IMM(BPF_AND, insn->dst_reg, in convert_ctx_accesses()
12668 BPF_ALU32_REG(BPF_XOR, insn->dst_reg, insn->dst_reg), in do_misc_fixups()
12679 BPF_MOV32_REG(insn->dst_reg, insn->dst_reg), in do_misc_fixups()
12735 off_reg = issrc ? insn->src_reg : insn->dst_reg; in do_misc_fixups()
12749 *patch++ = BPF_MOV64_REG(insn->dst_reg, insn->src_reg); in do_misc_fixups()