Lines Matching refs:umax_value

668 				if (reg->smax_value != reg->umax_value &&  in print_verifier_state()
675 if (reg->umax_value != U64_MAX) in print_verifier_state()
677 (unsigned long long)reg->umax_value); in print_verifier_state()
696 if (reg->u32_max_value != reg->umax_value && in print_verifier_state()
1079 reg->umax_value = imm; in ___mark_reg_known()
1214 reg->umax_value = U64_MAX; in __mark_reg_unbounded()
1227 reg->umax_value = U64_MAX; in __mark_reg64_unbounded()
1262 reg->umax_value = min(reg->umax_value, in __update_reg64_bounds()
1317 reg->smax_value = reg->umax_value = min_t(u64, reg->smax_value, in __reg64_deduce_bounds()
1318 reg->umax_value); in __reg64_deduce_bounds()
1324 if ((s64)reg->umax_value >= 0) { in __reg64_deduce_bounds()
1329 reg->smax_value = reg->umax_value = min_t(u64, reg->smax_value, in __reg64_deduce_bounds()
1330 reg->umax_value); in __reg64_deduce_bounds()
1337 reg->smax_value = reg->umax_value; in __reg64_deduce_bounds()
1352 reg->umax_value)); in __reg_bound_offset()
1363 reg->umax_value = reg->u32_max_value; in __reg_assign_32_into_64()
1425 if (__reg64_bound_u32(reg->umin_value) && __reg64_bound_u32(reg->umax_value)) { in __reg_combine_64_into_32()
1427 reg->u32_max_value = (u32)reg->umax_value; in __reg_combine_64_into_32()
2609 reg->umin_value == 0 && reg->umax_value == U64_MAX && in __is_scalar_unbounded()
3232 if (reg->umax_value >= BPF_MAX_VAR_OFF) { in check_mem_region_access()
3237 err = __check_mem_access(env, regno, reg->umax_value + off, size, in check_mem_region_access()
3272 lock < reg->umax_value + off + size) { in check_map_access()
3281 t < reg->umax_value + off + size) { in check_map_access()
3375 off + reg->umax_value + size - 1); in check_packet_access()
3866 if ((reg->umin_value & ~mask) == (reg->umax_value & ~mask)) { in coerce_reg_to_size()
3868 reg->umax_value &= mask; in coerce_reg_to_size()
3871 reg->umax_value = mask; in coerce_reg_to_size()
3874 reg->smax_value = reg->umax_value; in coerce_reg_to_size()
5151 meta->msize_max_value = reg->umax_value; in check_func_arg()
5178 if (reg->umax_value >= BPF_MAX_VAR_SIZ) { in check_func_arg()
5184 reg->umax_value, in check_func_arg()
6657 ptr_reg->umax_value) + ptr_reg->off; in retrieve_ptr_limit()
6964 u64 umin_val = off_reg->umin_value, umax_val = off_reg->umax_value, in adjust_ptr_min_max_vals()
6965 umin_ptr = ptr_reg->umin_value, umax_ptr = ptr_reg->umax_value; in adjust_ptr_min_max_vals()
7051 dst_reg->umax_value = umax_ptr; in adjust_ptr_min_max_vals()
7077 dst_reg->umax_value = U64_MAX; in adjust_ptr_min_max_vals()
7080 dst_reg->umax_value = umax_ptr + umax_val; in adjust_ptr_min_max_vals()
7113 dst_reg->umax_value = umax_ptr; in adjust_ptr_min_max_vals()
7135 dst_reg->umax_value = U64_MAX; in adjust_ptr_min_max_vals()
7139 dst_reg->umax_value = umax_ptr - umin_val; in adjust_ptr_min_max_vals()
7216 u64 umax_val = src_reg->umax_value; in scalar_min_max_add()
7227 dst_reg->umax_value + umax_val < umax_val) { in scalar_min_max_add()
7229 dst_reg->umax_value = U64_MAX; in scalar_min_max_add()
7232 dst_reg->umax_value += umax_val; in scalar_min_max_add()
7270 u64 umax_val = src_reg->umax_value; in scalar_min_max_sub()
7284 dst_reg->umax_value = U64_MAX; in scalar_min_max_sub()
7288 dst_reg->umax_value -= umin_val; in scalar_min_max_sub()
7329 u64 umax_val = src_reg->umax_value; in scalar_min_max_mul()
7339 if (umax_val > U32_MAX || dst_reg->umax_value > U32_MAX) { in scalar_min_max_mul()
7345 dst_reg->umax_value *= umax_val; in scalar_min_max_mul()
7346 if (dst_reg->umax_value > S64_MAX) { in scalar_min_max_mul()
7352 dst_reg->smax_value = dst_reg->umax_value; in scalar_min_max_mul()
7396 u64 umax_val = src_reg->umax_value; in scalar_min_max_and()
7407 dst_reg->umax_value = min(dst_reg->umax_value, umax_val); in scalar_min_max_and()
7419 dst_reg->smax_value = dst_reg->umax_value; in scalar_min_max_and()
7476 dst_reg->umax_value = dst_reg->var_off.value | dst_reg->var_off.mask; in scalar_min_max_or()
7488 dst_reg->smax_value = dst_reg->umax_value; in scalar_min_max_or()
7538 dst_reg->umax_value = dst_reg->var_off.value | dst_reg->var_off.mask; in scalar_min_max_xor()
7545 dst_reg->smax_value = dst_reg->umax_value; in scalar_min_max_xor()
7611 if (dst_reg->umax_value > 1ULL << (63 - umax_val)) { in __scalar64_min_max_lsh()
7613 dst_reg->umax_value = U64_MAX; in __scalar64_min_max_lsh()
7616 dst_reg->umax_value <<= umax_val; in __scalar64_min_max_lsh()
7623 u64 umax_val = src_reg->umax_value; in scalar_min_max_lsh()
7670 u64 umax_val = src_reg->umax_value; in scalar_min_max_rsh()
7691 dst_reg->umax_value >>= umin_val; in scalar_min_max_rsh()
7741 dst_reg->umax_value = U64_MAX; in scalar_min_max_arsh()
7774 umax_val = src_reg.umax_value; in adjust_scalar_min_max_vals()
8206 if (dst_reg->umax_value > MAX_PACKET_OFF || in find_good_pkt_pointers()
8207 dst_reg->umax_value + dst_reg->off > MAX_PACKET_OFF) in find_good_pkt_pointers()
8365 else if (reg->umax_value <= val) in is_branch64_taken()
8375 if (reg->umax_value < val) in is_branch64_taken()
8389 else if (reg->umax_value < val) in is_branch64_taken()
8399 if (reg->umax_value <= val) in is_branch64_taken()
8586 false_reg->umax_value = min(false_reg->umax_value, false_umax); in reg_set_min_max()
8625 true_reg->umax_value = min(true_reg->umax_value, true_umax); in reg_set_min_max()
8688 src_reg->umax_value = dst_reg->umax_value = min(src_reg->umax_value, in __reg_combine_min_max()
8689 dst_reg->umax_value); in __reg_combine_min_max()
10063 old->umax_value >= cur->umax_value && in range_within()