Lines Matching refs:nvbo
137 struct nouveau_bo *nvbo = nouveau_bo(bo); in nouveau_bo_del_ttm() local
139 if (unlikely(nvbo->gem.filp)) in nouveau_bo_del_ttm()
141 WARN_ON(nvbo->pin_refcnt > 0); in nouveau_bo_del_ttm()
142 nv10_bo_put_tile_region(dev, nvbo->tile, NULL); in nouveau_bo_del_ttm()
143 kfree(nvbo); in nouveau_bo_del_ttm()
155 nouveau_bo_fixup_align(struct nouveau_bo *nvbo, u32 flags, in nouveau_bo_fixup_align() argument
158 struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev); in nouveau_bo_fixup_align()
162 if (nvbo->mode) { in nouveau_bo_fixup_align()
165 *size = roundup_64(*size, 64 * nvbo->mode); in nouveau_bo_fixup_align()
169 *size = roundup_64(*size, 64 * nvbo->mode); in nouveau_bo_fixup_align()
173 *size = roundup_64(*size, 64 * nvbo->mode); in nouveau_bo_fixup_align()
177 *size = roundup_64(*size, 32 * nvbo->mode); in nouveau_bo_fixup_align()
181 *size = roundup_64(*size, (1 << nvbo->page)); in nouveau_bo_fixup_align()
182 *align = max((1 << nvbo->page), *align); in nouveau_bo_fixup_align()
195 struct nouveau_bo *nvbo; in nouveau_bo_new() local
210 nvbo = kzalloc(sizeof(struct nouveau_bo), GFP_KERNEL); in nouveau_bo_new()
211 if (!nvbo) in nouveau_bo_new()
213 INIT_LIST_HEAD(&nvbo->head); in nouveau_bo_new()
214 INIT_LIST_HEAD(&nvbo->entry); in nouveau_bo_new()
215 INIT_LIST_HEAD(&nvbo->vma_list); in nouveau_bo_new()
216 nvbo->bo.bdev = &drm->ttm.bdev; in nouveau_bo_new()
227 nvbo->force_coherent = true; in nouveau_bo_new()
231 nvbo->kind = (tile_flags & 0x0000ff00) >> 8; in nouveau_bo_new()
232 if (!nvif_mmu_kind_valid(mmu, nvbo->kind)) { in nouveau_bo_new()
233 kfree(nvbo); in nouveau_bo_new()
237 nvbo->comp = mmu->kind[nvbo->kind] != nvbo->kind; in nouveau_bo_new()
240 nvbo->kind = (tile_flags & 0x00007f00) >> 8; in nouveau_bo_new()
241 nvbo->comp = (tile_flags & 0x00030000) >> 16; in nouveau_bo_new()
242 if (!nvif_mmu_kind_valid(mmu, nvbo->kind)) { in nouveau_bo_new()
243 kfree(nvbo); in nouveau_bo_new()
247 nvbo->zeta = (tile_flags & 0x00000007); in nouveau_bo_new()
249 nvbo->mode = tile_mode; in nouveau_bo_new()
250 nvbo->contig = !(tile_flags & NOUVEAU_GEM_TILE_NONCONTIG); in nouveau_bo_new()
272 if (pi < 0 || !nvbo->comp || vmm->page[i].comp) in nouveau_bo_new()
284 if (nvbo->comp && !vmm->page[pi].comp) { in nouveau_bo_new()
286 nvbo->kind = mmu->kind[nvbo->kind]; in nouveau_bo_new()
287 nvbo->comp = 0; in nouveau_bo_new()
289 nvbo->page = vmm->page[pi].shift; in nouveau_bo_new()
291 nouveau_bo_fixup_align(nvbo, flags, &align, &size); in nouveau_bo_new()
292 nvbo->bo.mem.num_pages = size >> PAGE_SHIFT; in nouveau_bo_new()
293 nouveau_bo_placement_set(nvbo, flags, 0); in nouveau_bo_new()
298 ret = ttm_bo_init(&drm->ttm.bdev, &nvbo->bo, size, in nouveau_bo_new()
299 type, &nvbo->placement, in nouveau_bo_new()
307 *pnvbo = nvbo; in nouveau_bo_new()
325 set_placement_range(struct nouveau_bo *nvbo, uint32_t type) in set_placement_range() argument
327 struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev); in set_placement_range()
332 nvbo->mode && (type & TTM_PL_FLAG_VRAM) && in set_placement_range()
333 nvbo->bo.mem.num_pages < vram_pages / 4) { in set_placement_range()
340 if (nvbo->zeta) { in set_placement_range()
347 for (i = 0; i < nvbo->placement.num_placement; ++i) { in set_placement_range()
348 nvbo->placements[i].fpfn = fpfn; in set_placement_range()
349 nvbo->placements[i].lpfn = lpfn; in set_placement_range()
351 for (i = 0; i < nvbo->placement.num_busy_placement; ++i) { in set_placement_range()
352 nvbo->busy_placements[i].fpfn = fpfn; in set_placement_range()
353 nvbo->busy_placements[i].lpfn = lpfn; in set_placement_range()
359 nouveau_bo_placement_set(struct nouveau_bo *nvbo, uint32_t type, uint32_t busy) in nouveau_bo_placement_set() argument
361 struct ttm_placement *pl = &nvbo->placement; in nouveau_bo_placement_set()
362 uint32_t flags = (nvbo->force_coherent ? TTM_PL_FLAG_UNCACHED : in nouveau_bo_placement_set()
364 (nvbo->pin_refcnt ? TTM_PL_FLAG_NO_EVICT : 0); in nouveau_bo_placement_set()
366 pl->placement = nvbo->placements; in nouveau_bo_placement_set()
367 set_placement_list(nvbo->placements, &pl->num_placement, in nouveau_bo_placement_set()
370 pl->busy_placement = nvbo->busy_placements; in nouveau_bo_placement_set()
371 set_placement_list(nvbo->busy_placements, &pl->num_busy_placement, in nouveau_bo_placement_set()
374 set_placement_range(nvbo, type); in nouveau_bo_placement_set()
378 nouveau_bo_pin(struct nouveau_bo *nvbo, uint32_t memtype, bool contig) in nouveau_bo_pin() argument
380 struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev); in nouveau_bo_pin()
381 struct ttm_buffer_object *bo = &nvbo->bo; in nouveau_bo_pin()
391 if (!nvbo->contig) { in nouveau_bo_pin()
392 nvbo->contig = true; in nouveau_bo_pin()
398 if (nvbo->pin_refcnt) { in nouveau_bo_pin()
405 nvbo->pin_refcnt++; in nouveau_bo_pin()
410 nouveau_bo_placement_set(nvbo, TTM_PL_FLAG_TT, 0); in nouveau_bo_pin()
411 ret = nouveau_bo_validate(nvbo, false, false); in nouveau_bo_pin()
416 nvbo->pin_refcnt++; in nouveau_bo_pin()
417 nouveau_bo_placement_set(nvbo, memtype, 0); in nouveau_bo_pin()
423 nvbo->pin_refcnt--; in nouveau_bo_pin()
424 ret = nouveau_bo_validate(nvbo, false, false); in nouveau_bo_pin()
427 nvbo->pin_refcnt++; in nouveau_bo_pin()
442 nvbo->contig = false; in nouveau_bo_pin()
448 nouveau_bo_unpin(struct nouveau_bo *nvbo) in nouveau_bo_unpin() argument
450 struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev); in nouveau_bo_unpin()
451 struct ttm_buffer_object *bo = &nvbo->bo; in nouveau_bo_unpin()
458 ref = --nvbo->pin_refcnt; in nouveau_bo_unpin()
463 nouveau_bo_placement_set(nvbo, bo->mem.placement, 0); in nouveau_bo_unpin()
465 ret = nouveau_bo_validate(nvbo, false, false); in nouveau_bo_unpin()
485 nouveau_bo_map(struct nouveau_bo *nvbo) in nouveau_bo_map() argument
489 ret = ttm_bo_reserve(&nvbo->bo, false, false, NULL); in nouveau_bo_map()
493 ret = ttm_bo_kmap(&nvbo->bo, 0, nvbo->bo.mem.num_pages, &nvbo->kmap); in nouveau_bo_map()
495 ttm_bo_unreserve(&nvbo->bo); in nouveau_bo_map()
500 nouveau_bo_unmap(struct nouveau_bo *nvbo) in nouveau_bo_unmap() argument
502 if (!nvbo) in nouveau_bo_unmap()
505 ttm_bo_kunmap(&nvbo->kmap); in nouveau_bo_unmap()
509 nouveau_bo_sync_for_device(struct nouveau_bo *nvbo) in nouveau_bo_sync_for_device() argument
511 struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev); in nouveau_bo_sync_for_device()
512 struct ttm_dma_tt *ttm_dma = (struct ttm_dma_tt *)nvbo->bo.ttm; in nouveau_bo_sync_for_device()
519 if (nvbo->force_coherent) in nouveau_bo_sync_for_device()
529 nouveau_bo_sync_for_cpu(struct nouveau_bo *nvbo) in nouveau_bo_sync_for_cpu() argument
531 struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev); in nouveau_bo_sync_for_cpu()
532 struct ttm_dma_tt *ttm_dma = (struct ttm_dma_tt *)nvbo->bo.ttm; in nouveau_bo_sync_for_cpu()
539 if (nvbo->force_coherent) in nouveau_bo_sync_for_cpu()
548 nouveau_bo_validate(struct nouveau_bo *nvbo, bool interruptible, in nouveau_bo_validate() argument
554 ret = ttm_bo_validate(&nvbo->bo, &nvbo->placement, &ctx); in nouveau_bo_validate()
558 nouveau_bo_sync_for_device(nvbo); in nouveau_bo_validate()
564 nouveau_bo_wr16(struct nouveau_bo *nvbo, unsigned index, u16 val) in nouveau_bo_wr16() argument
567 u16 *mem = ttm_kmap_obj_virtual(&nvbo->kmap, &is_iomem); in nouveau_bo_wr16()
578 nouveau_bo_rd32(struct nouveau_bo *nvbo, unsigned index) in nouveau_bo_rd32() argument
581 u32 *mem = ttm_kmap_obj_virtual(&nvbo->kmap, &is_iomem); in nouveau_bo_rd32()
592 nouveau_bo_wr32(struct nouveau_bo *nvbo, unsigned index, u32 val) in nouveau_bo_wr32() argument
595 u32 *mem = ttm_kmap_obj_virtual(&nvbo->kmap, &is_iomem); in nouveau_bo_wr32()
692 struct nouveau_bo *nvbo = nouveau_bo(bo); in nouveau_bo_evict_flags() local
696 nouveau_bo_placement_set(nvbo, TTM_PL_FLAG_TT, in nouveau_bo_evict_flags()
700 nouveau_bo_placement_set(nvbo, TTM_PL_FLAG_SYSTEM, 0); in nouveau_bo_evict_flags()
704 *pl = nvbo->placement; in nouveau_bo_evict_flags()
1276 struct nouveau_bo *nvbo = nouveau_bo(bo); in nouveau_bo_move_ntfy() local
1284 mem->mem.page == nvbo->page) { in nouveau_bo_move_ntfy()
1285 list_for_each_entry(vma, &nvbo->vma_list, head) { in nouveau_bo_move_ntfy()
1289 list_for_each_entry(vma, &nvbo->vma_list, head) { in nouveau_bo_move_ntfy()
1302 struct nouveau_bo *nvbo = nouveau_bo(bo); in nouveau_bo_vm_bind() local
1311 nvbo->mode, nvbo->zeta); in nouveau_bo_vm_bind()
1336 struct nouveau_bo *nvbo = nouveau_bo(bo); in nouveau_bo_move() local
1345 if (nvbo->pin_refcnt) in nouveau_bo_move()
1346 NV_WARN(drm, "Moving pinned object %p!\n", nvbo); in nouveau_bo_move()
1390 nouveau_bo_vm_cleanup(bo, new_tile, &nvbo->tile); in nouveau_bo_move()
1399 struct nouveau_bo *nvbo = nouveau_bo(bo); in nouveau_bo_verify_access() local
1401 return drm_vma_node_verify_access(&nvbo->gem.vma_node, in nouveau_bo_verify_access()
1509 struct nouveau_bo *nvbo = nouveau_bo(bo); in nouveau_ttm_fault_reserve_notify() local
1519 !nvbo->kind) in nouveau_ttm_fault_reserve_notify()
1523 nouveau_bo_placement_set(nvbo, TTM_PL_TT, 0); in nouveau_ttm_fault_reserve_notify()
1525 ret = nouveau_bo_validate(nvbo, false, false); in nouveau_ttm_fault_reserve_notify()
1537 for (i = 0; i < nvbo->placement.num_placement; ++i) { in nouveau_ttm_fault_reserve_notify()
1538 nvbo->placements[i].fpfn = 0; in nouveau_ttm_fault_reserve_notify()
1539 nvbo->placements[i].lpfn = mappable; in nouveau_ttm_fault_reserve_notify()
1542 for (i = 0; i < nvbo->placement.num_busy_placement; ++i) { in nouveau_ttm_fault_reserve_notify()
1543 nvbo->busy_placements[i].fpfn = 0; in nouveau_ttm_fault_reserve_notify()
1544 nvbo->busy_placements[i].lpfn = mappable; in nouveau_ttm_fault_reserve_notify()
1547 nouveau_bo_placement_set(nvbo, TTM_PL_FLAG_VRAM, 0); in nouveau_ttm_fault_reserve_notify()
1548 return nouveau_bo_validate(nvbo, false, false); in nouveau_ttm_fault_reserve_notify()
1653 nouveau_bo_fence(struct nouveau_bo *nvbo, struct nouveau_fence *fence, bool exclusive) in nouveau_bo_fence() argument
1655 struct reservation_object *resv = nvbo->bo.resv; in nouveau_bo_fence()