Lines Matching refs:umax_value

900 				if (reg->smax_value != reg->umax_value &&  in print_verifier_state()
905 if (reg->umax_value != U64_MAX) in print_verifier_state()
906 verbose_a("umax=%llu", (unsigned long long)reg->umax_value); in print_verifier_state()
922 if (reg->u32_max_value != reg->umax_value && in print_verifier_state()
1337 reg->umax_value = imm; in ___mark_reg_known()
1450 reg->umax_value = U64_MAX; in __mark_reg_unbounded()
1463 reg->umax_value = U64_MAX; in __mark_reg64_unbounded()
1498 reg->umax_value = min(reg->umax_value, in __update_reg64_bounds()
1553 reg->smax_value = reg->umax_value = min_t(u64, reg->smax_value, in __reg64_deduce_bounds()
1554 reg->umax_value); in __reg64_deduce_bounds()
1560 if ((s64)reg->umax_value >= 0) { in __reg64_deduce_bounds()
1565 reg->smax_value = reg->umax_value = min_t(u64, reg->smax_value, in __reg64_deduce_bounds()
1566 reg->umax_value); in __reg64_deduce_bounds()
1573 reg->smax_value = reg->umax_value; in __reg64_deduce_bounds()
1588 reg->umax_value)); in __reg_bound_offset()
1619 reg->umax_value = reg->u32_max_value; in __reg_assign_32_into_64()
1673 if (__reg64_bound_u32(reg->umin_value) && __reg64_bound_u32(reg->umax_value)) { in __reg_combine_64_into_32()
1675 reg->u32_max_value = (u32)reg->umax_value; in __reg_combine_64_into_32()
2984 reg->umin_value == 0 && reg->umax_value == U64_MAX && in __is_scalar_unbounded()
3631 if (reg->umax_value >= BPF_MAX_VAR_OFF) { in check_mem_region_access()
3636 err = __check_mem_access(env, regno, reg->umax_value + off, size, in check_mem_region_access()
3837 lock < reg->umax_value + off + size) { in check_map_access()
3846 t < reg->umax_value + off + size) { in check_map_access()
3859 p < reg->umax_value + off + size) { in check_map_access()
3965 off + reg->umax_value + size - 1); in check_packet_access()
4432 if ((reg->umin_value & ~mask) == (reg->umax_value & ~mask)) { in coerce_reg_to_size()
4434 reg->umax_value &= mask; in coerce_reg_to_size()
4437 reg->umax_value = mask; in coerce_reg_to_size()
4440 reg->smax_value = reg->umax_value; in coerce_reg_to_size()
5305 meta->msize_max_value = reg->umax_value; in check_mem_size_reg()
5332 if (reg->umax_value >= BPF_MAX_VAR_SIZ) { in check_mem_size_reg()
5338 reg->umax_value, in check_mem_size_reg()
7907 ptr_reg->umax_value) + ptr_reg->off; in retrieve_ptr_limit()
8214 u64 umin_val = off_reg->umin_value, umax_val = off_reg->umax_value, in adjust_ptr_min_max_vals()
8215 umin_ptr = ptr_reg->umin_value, umax_ptr = ptr_reg->umax_value; in adjust_ptr_min_max_vals()
8300 dst_reg->umax_value = umax_ptr; in adjust_ptr_min_max_vals()
8326 dst_reg->umax_value = U64_MAX; in adjust_ptr_min_max_vals()
8329 dst_reg->umax_value = umax_ptr + umax_val; in adjust_ptr_min_max_vals()
8362 dst_reg->umax_value = umax_ptr; in adjust_ptr_min_max_vals()
8384 dst_reg->umax_value = U64_MAX; in adjust_ptr_min_max_vals()
8388 dst_reg->umax_value = umax_ptr - umin_val; in adjust_ptr_min_max_vals()
8461 u64 umax_val = src_reg->umax_value; in scalar_min_max_add()
8472 dst_reg->umax_value + umax_val < umax_val) { in scalar_min_max_add()
8474 dst_reg->umax_value = U64_MAX; in scalar_min_max_add()
8477 dst_reg->umax_value += umax_val; in scalar_min_max_add()
8515 u64 umax_val = src_reg->umax_value; in scalar_min_max_sub()
8529 dst_reg->umax_value = U64_MAX; in scalar_min_max_sub()
8533 dst_reg->umax_value -= umin_val; in scalar_min_max_sub()
8574 u64 umax_val = src_reg->umax_value; in scalar_min_max_mul()
8584 if (umax_val > U32_MAX || dst_reg->umax_value > U32_MAX) { in scalar_min_max_mul()
8590 dst_reg->umax_value *= umax_val; in scalar_min_max_mul()
8591 if (dst_reg->umax_value > S64_MAX) { in scalar_min_max_mul()
8597 dst_reg->smax_value = dst_reg->umax_value; in scalar_min_max_mul()
8641 u64 umax_val = src_reg->umax_value; in scalar_min_max_and()
8652 dst_reg->umax_value = min(dst_reg->umax_value, umax_val); in scalar_min_max_and()
8664 dst_reg->smax_value = dst_reg->umax_value; in scalar_min_max_and()
8721 dst_reg->umax_value = dst_reg->var_off.value | dst_reg->var_off.mask; in scalar_min_max_or()
8733 dst_reg->smax_value = dst_reg->umax_value; in scalar_min_max_or()
8783 dst_reg->umax_value = dst_reg->var_off.value | dst_reg->var_off.mask; in scalar_min_max_xor()
8790 dst_reg->smax_value = dst_reg->umax_value; in scalar_min_max_xor()
8856 if (dst_reg->umax_value > 1ULL << (63 - umax_val)) { in __scalar64_min_max_lsh()
8858 dst_reg->umax_value = U64_MAX; in __scalar64_min_max_lsh()
8861 dst_reg->umax_value <<= umax_val; in __scalar64_min_max_lsh()
8868 u64 umax_val = src_reg->umax_value; in scalar_min_max_lsh()
8915 u64 umax_val = src_reg->umax_value; in scalar_min_max_rsh()
8936 dst_reg->umax_value >>= umin_val; in scalar_min_max_rsh()
8986 dst_reg->umax_value = U64_MAX; in scalar_min_max_arsh()
9019 umax_val = src_reg.umax_value; in adjust_scalar_min_max_vals()
9429 if (dst_reg->umax_value > MAX_PACKET_OFF || in find_good_pkt_pointers()
9430 dst_reg->umax_value + dst_reg->off > MAX_PACKET_OFF) in find_good_pkt_pointers()
9590 else if (reg->umax_value <= val) in is_branch64_taken()
9600 if (reg->umax_value < val) in is_branch64_taken()
9614 else if (reg->umax_value < val) in is_branch64_taken()
9624 if (reg->umax_value <= val) in is_branch64_taken()
9818 false_reg->umax_value = min(false_reg->umax_value, false_umax); in reg_set_min_max()
9857 true_reg->umax_value = min(true_reg->umax_value, true_umax); in reg_set_min_max()
9920 src_reg->umax_value = dst_reg->umax_value = min(src_reg->umax_value, in __reg_combine_min_max()
9921 dst_reg->umax_value); in __reg_combine_min_max()
11362 old->umax_value >= cur->umax_value && in range_within()