Lines Matching refs:va_block

357 	struct hl_vm_va_block *va_block, *tmp;  in clear_va_list_locked()  local
359 list_for_each_entry_safe(va_block, tmp, va_list, node) { in clear_va_list_locked()
360 list_del(&va_block->node); in clear_va_list_locked()
361 kfree(va_block); in clear_va_list_locked()
380 struct hl_vm_va_block *va_block; in print_va_list_locked() local
384 list_for_each_entry(va_block, va_list, node) in print_va_list_locked()
387 va_block->start, va_block->end, va_block->size); in print_va_list_locked()
405 struct list_head *va_list, struct hl_vm_va_block *va_block) in merge_va_blocks_locked() argument
409 prev = list_prev_entry(va_block, node); in merge_va_blocks_locked()
410 if (&prev->node != va_list && prev->end + 1 == va_block->start) { in merge_va_blocks_locked()
411 prev->end = va_block->end; in merge_va_blocks_locked()
413 list_del(&va_block->node); in merge_va_blocks_locked()
414 kfree(va_block); in merge_va_blocks_locked()
415 va_block = prev; in merge_va_blocks_locked()
418 next = list_next_entry(va_block, node); in merge_va_blocks_locked()
419 if (&next->node != va_list && va_block->end + 1 == next->start) { in merge_va_blocks_locked()
420 next->start = va_block->start; in merge_va_blocks_locked()
422 list_del(&va_block->node); in merge_va_blocks_locked()
423 kfree(va_block); in merge_va_blocks_locked()
444 struct hl_vm_va_block *va_block, *res = NULL; in add_va_block_locked() local
449 list_for_each_entry(va_block, va_list, node) { in add_va_block_locked()
451 if (hl_mem_area_crosses_range(start, size, va_block->start, in add_va_block_locked()
452 va_block->end)) { in add_va_block_locked()
455 va_block->start, va_block->end); in add_va_block_locked()
459 if (va_block->end < start) in add_va_block_locked()
460 res = va_block; in add_va_block_locked()
463 va_block = kmalloc(sizeof(*va_block), GFP_KERNEL); in add_va_block_locked()
464 if (!va_block) in add_va_block_locked()
467 va_block->start = start; in add_va_block_locked()
468 va_block->end = end; in add_va_block_locked()
469 va_block->size = size; in add_va_block_locked()
472 list_add(&va_block->node, va_list); in add_va_block_locked()
474 list_add(&va_block->node, &res->node); in add_va_block_locked()
476 merge_va_blocks_locked(hdev, va_list, va_block); in add_va_block_locked()
525 struct hl_vm_va_block *va_block, *new_va_block = NULL; in get_va_block() local
548 list_for_each_entry(va_block, &va_range->list, node) { in get_va_block()
550 valid_start = va_block->start; in get_va_block()
556 if (valid_start > va_block->end) in get_va_block()
560 valid_size = va_block->end - valid_start; in get_va_block()
565 new_va_block = va_block; in get_va_block()
571 ((hint_addr + size) <= va_block->end)) { in get_va_block()
572 new_va_block = va_block; in get_va_block()
1552 struct hl_vm_va_block *va_block; in hl_va_range_fini() local
1566 va_block = list_first_entry(&va_range->list, typeof(*va_block), node); in hl_va_range_fini()
1568 if (va_block->start != va_range->start_addr || in hl_va_range_fini()
1569 va_block->end != va_range->end_addr) { in hl_va_range_fini()
1572 va_block->start, va_block->end); in hl_va_range_fini()