Lines Matching refs:BPF_REG_SIZE
351 for (i = 0; i < state->allocated_stack / BPF_REG_SIZE; i++) { in print_verifier_state()
354 (-i - 1) * BPF_REG_SIZE); in print_verifier_state()
360 verbose(env, " fp%d=0", (-i - 1) * BPF_REG_SIZE); in print_verifier_state()
376 sizeof(*src->stack) * (src->allocated_stack / BPF_REG_SIZE)); in copy_stack_state()
392 int slot = size / BPF_REG_SIZE; in realloc_func_state()
397 state->allocated_stack = slot * BPF_REG_SIZE; in realloc_func_state()
411 sizeof(*new_stack) * (old_size / BPF_REG_SIZE)); in realloc_func_state()
412 memset(new_stack + old_size / BPF_REG_SIZE, 0, in realloc_func_state()
413 sizeof(*new_stack) * (size - old_size) / BPF_REG_SIZE); in realloc_func_state()
415 state->allocated_stack = slot * BPF_REG_SIZE; in realloc_func_state()
1017 int i, slot = -off - 1, spi = slot / BPF_REG_SIZE, err; in check_stack_write()
1020 err = realloc_func_state(state, round_up(slot + 1, BPF_REG_SIZE), in check_stack_write()
1029 size != BPF_REG_SIZE) { in check_stack_write()
1039 if (size != BPF_REG_SIZE) { in check_stack_write()
1053 for (i = 0; i < BPF_REG_SIZE; i++) { in check_stack_write()
1057 int soff = (-spi - 1) * BPF_REG_SIZE; in check_stack_write()
1094 if (size == BPF_REG_SIZE) in check_stack_write()
1103 state->stack[spi].slot_type[(slot - i) % BPF_REG_SIZE] = in check_stack_write()
1145 if (parent->frame[frameno]->allocated_stack <= slot * BPF_REG_SIZE) in mark_stack_slot_read()
1170 int i, slot = -off - 1, spi = slot / BPF_REG_SIZE; in check_stack_read()
1181 if (size != BPF_REG_SIZE) { in check_stack_read()
1185 for (i = 1; i < BPF_REG_SIZE; i++) { in check_stack_read()
1186 if (stype[(slot - i) % BPF_REG_SIZE] != STACK_SPILL) { in check_stack_read()
1208 if (stype[(slot - i) % BPF_REG_SIZE] == STACK_MISC) in check_stack_read()
1210 if (stype[(slot - i) % BPF_REG_SIZE] == STACK_ZERO) { in check_stack_read()
1787 if (!err && size < BPF_REG_SIZE && value_regno >= 0 && t == BPF_READ && in check_mem_access()
1892 spi = slot / BPF_REG_SIZE; in check_stack_boundary()
1895 stype = &state->stack[spi].slot_type[slot % BPF_REG_SIZE]; in check_stack_boundary()
2308 for (i = 0; i < state->allocated_stack / BPF_REG_SIZE; i++) { in __clear_all_pkt_pointers()
3458 for (i = 0; i < state->allocated_stack / BPF_REG_SIZE; i++) { in find_good_pkt_pointers()
3715 for (i = 0; i < state->allocated_stack / BPF_REG_SIZE; i++) { in mark_map_regs()
4336 #define ID_MAP_SIZE (MAX_BPF_REG + MAX_BPF_STACK / BPF_REG_SIZE)
4492 spi = i / BPF_REG_SIZE; in stacksafe()
4498 if (old->stack[spi].slot_type[i % BPF_REG_SIZE] == STACK_INVALID) in stacksafe()
4504 if (old->stack[spi].slot_type[i % BPF_REG_SIZE] == STACK_MISC && in stacksafe()
4505 cur->stack[spi].slot_type[i % BPF_REG_SIZE] == STACK_ZERO) in stacksafe()
4507 if (old->stack[spi].slot_type[i % BPF_REG_SIZE] != in stacksafe()
4508 cur->stack[spi].slot_type[i % BPF_REG_SIZE]) in stacksafe()
4515 if (i % BPF_REG_SIZE) in stacksafe()
4645 for (i = 0; i < state->allocated_stack / BPF_REG_SIZE && in propagate_liveness()
4646 i < parent->allocated_stack / BPF_REG_SIZE; i++) { in propagate_liveness()
4725 for (i = 0; i < frame->allocated_stack / BPF_REG_SIZE; i++) in is_state_visited()