/Linux-v5.15/drivers/gpu/drm/radeon/ |
D | radeon_gem.c | 85 struct radeon_bo *robj = gem_to_radeon_bo(gobj); in radeon_gem_object_free() local 87 if (robj) { in radeon_gem_object_free() 88 radeon_mn_unregister(robj); in radeon_gem_object_free() 89 radeon_bo_unref(&robj); in radeon_gem_object_free() 98 struct radeon_bo *robj; in radeon_gem_object_create() local 120 flags, NULL, NULL, &robj); in radeon_gem_object_create() 132 *obj = &robj->tbo.base; in radeon_gem_object_create() 134 robj->pid = task_pid_nr(current); in radeon_gem_object_create() 137 list_add_tail(&robj->list, &rdev->gem.objects); in radeon_gem_object_create() 146 struct radeon_bo *robj; in radeon_gem_set_domain() local [all …]
|
D | radeon_gart.c | 132 if (rdev->gart.robj == NULL) { in radeon_gart_table_vram_alloc() 135 0, NULL, NULL, &rdev->gart.robj); in radeon_gart_table_vram_alloc() 158 r = radeon_bo_reserve(rdev->gart.robj, false); in radeon_gart_table_vram_pin() 161 r = radeon_bo_pin(rdev->gart.robj, in radeon_gart_table_vram_pin() 164 radeon_bo_unreserve(rdev->gart.robj); in radeon_gart_table_vram_pin() 167 r = radeon_bo_kmap(rdev->gart.robj, &rdev->gart.ptr); in radeon_gart_table_vram_pin() 169 radeon_bo_unpin(rdev->gart.robj); in radeon_gart_table_vram_pin() 170 radeon_bo_unreserve(rdev->gart.robj); in radeon_gart_table_vram_pin() 200 if (rdev->gart.robj == NULL) { in radeon_gart_table_vram_unpin() 203 r = radeon_bo_reserve(rdev->gart.robj, false); in radeon_gart_table_vram_unpin() [all …]
|
D | r100_track.h | 16 struct radeon_bo *robj; member 23 struct radeon_bo *robj; member 28 struct radeon_bo *robj; member 39 struct radeon_bo *robj; member
|
D | evergreen_cs.c | 1221 track->db_z_read_bo = reloc->robj; in evergreen_cs_handle_reg() 1233 track->db_z_write_bo = reloc->robj; in evergreen_cs_handle_reg() 1245 track->db_s_read_bo = reloc->robj; in evergreen_cs_handle_reg() 1257 track->db_s_write_bo = reloc->robj; in evergreen_cs_handle_reg() 1281 track->vgt_strmout_bo[tmp] = reloc->robj; in evergreen_cs_handle_reg() 1505 track->cb_color_fmask_bo[tmp] = reloc->robj; in evergreen_cs_handle_reg() 1522 track->cb_color_cmask_bo[tmp] = reloc->robj; in evergreen_cs_handle_reg() 1563 track->cb_color_bo[tmp] = reloc->robj; in evergreen_cs_handle_reg() 1579 track->cb_color_bo[tmp] = reloc->robj; in evergreen_cs_handle_reg() 1591 track->htile_bo = reloc->robj; in evergreen_cs_handle_reg() [all …]
|
D | radeon_cursor.c | 287 struct radeon_bo *robj; in radeon_crtc_cursor_set2() local 309 robj = gem_to_radeon_bo(obj); in radeon_crtc_cursor_set2() 310 ret = radeon_bo_reserve(robj, false); in radeon_crtc_cursor_set2() 316 ret = radeon_bo_pin_restricted(robj, RADEON_GEM_DOMAIN_VRAM, in radeon_crtc_cursor_set2() 319 radeon_bo_unreserve(robj); in radeon_crtc_cursor_set2() 351 struct radeon_bo *robj = gem_to_radeon_bo(radeon_crtc->cursor_bo); in radeon_crtc_cursor_set2() local 352 ret = radeon_bo_reserve(robj, false); in radeon_crtc_cursor_set2() 354 radeon_bo_unpin(robj); in radeon_crtc_cursor_set2() 355 radeon_bo_unreserve(robj); in radeon_crtc_cursor_set2()
|
D | r600_cs.c | 1085 track->vgt_strmout_bo[tmp] = reloc->robj; in r600_cs_check_reg() 1212 track->cb_color_frag_bo[tmp] = reloc->robj; in r600_cs_check_reg() 1243 track->cb_color_tile_bo[tmp] = reloc->robj; in r600_cs_check_reg() 1283 track->cb_color_bo[tmp] = reloc->robj; in r600_cs_check_reg() 1296 track->db_bo = reloc->robj; in r600_cs_check_reg() 1309 track->htile_bo = reloc->robj; in r600_cs_check_reg() 1810 if ((tmp + size) > radeon_bo_size(reloc->robj)) { in r600_packet3_check() 1812 tmp + size, radeon_bo_size(reloc->robj)); in r600_packet3_check() 1840 if ((tmp + size) > radeon_bo_size(reloc->robj)) { in r600_packet3_check() 1842 tmp + size, radeon_bo_size(reloc->robj)); in r600_packet3_check() [all …]
|
D | r200.c | 188 track->zb.robj = reloc->robj; in r200_packet0_check() 201 track->cb[0].robj = reloc->robj; in r200_packet0_check() 231 track->textures[i].robj = reloc->robj; in r200_packet0_check() 275 track->textures[i].cube_info[face - 1].robj = reloc->robj; in r200_packet0_check()
|
D | r100.c | 1341 track->arrays[i + 0].robj = reloc->robj; in r100_packet3_load_vbpntr() 1351 track->arrays[i + 1].robj = reloc->robj; in r100_packet3_load_vbpntr() 1365 track->arrays[i + 0].robj = reloc->robj; in r100_packet3_load_vbpntr() 1602 track->zb.robj = reloc->robj; in r100_packet0_check() 1615 track->cb[0].robj = reloc->robj; in r100_packet0_check() 1642 track->textures[i].robj = reloc->robj; in r100_packet0_check() 1660 track->textures[0].cube_info[i].robj = reloc->robj; in r100_packet0_check() 1678 track->textures[1].cube_info[i].robj = reloc->robj; in r100_packet0_check() 1696 track->textures[2].cube_info[i].robj = reloc->robj; in r100_packet0_check() 1901 struct radeon_bo *robj) in r100_cs_track_check_pkt3_indx_buffer() argument [all …]
|
D | radeon_device.c | 1587 struct radeon_bo *robj; in radeon_suspend_kms() local 1590 struct radeon_bo *robj = gem_to_radeon_bo(radeon_crtc->cursor_bo); in radeon_suspend_kms() local 1591 r = radeon_bo_reserve(robj, false); in radeon_suspend_kms() 1593 radeon_bo_unpin(robj); in radeon_suspend_kms() 1594 radeon_bo_unreserve(robj); in radeon_suspend_kms() 1601 robj = gem_to_radeon_bo(fb->obj[0]); in radeon_suspend_kms() 1603 if (!radeon_fbdev_robj_is_fb(rdev, robj)) { in radeon_suspend_kms() 1604 r = radeon_bo_reserve(robj, false); in radeon_suspend_kms() 1606 radeon_bo_unpin(robj); in radeon_suspend_kms() 1607 radeon_bo_unreserve(robj); in radeon_suspend_kms() [all …]
|
D | radeon_cs.c | 116 p->relocs[i].robj = gem_to_radeon_bo(gobj); in radeon_cs_parser_relocs() 162 if (radeon_ttm_tt_has_userptr(p->rdev, p->relocs[i].robj->tbo.ttm)) { in radeon_cs_parser_relocs() 176 if (p->relocs[i].robj->prime_shared_count) { in radeon_cs_parser_relocs() 185 p->relocs[i].tv.bo = &p->relocs[i].robj->tbo; in radeon_cs_parser_relocs() 259 resv = reloc->robj->tbo.base.resv; in radeon_cs_sync_rings() 403 return (int)la->robj->tbo.resource->num_pages - in cmp_size_smaller_first() 404 (int)lb->robj->tbo.resource->num_pages; in cmp_size_smaller_first() 443 struct radeon_bo *bo = parser->relocs[i].robj; in radeon_cs_parser_fini() 526 bo = p->relocs[i].robj; in radeon_bo_vm_update_pte()
|
D | r300.c | 134 if (rdev->gart.robj) { in rv370_pcie_gart_init() 157 if (rdev->gart.robj == NULL) { in rv370_pcie_gart_enable() 671 track->cb[i].robj = reloc->robj; in r300_packet0_check() 684 track->zb.robj = reloc->robj; in r300_packet0_check() 729 track->textures[i].robj = reloc->robj; in r300_packet0_check() 1128 track->aa.robj = reloc->robj; in r300_packet0_check() 1198 r = r100_cs_track_check_pkt3_indx_buffer(p, pkt, reloc->robj); in r300_packet3_check()
|
D | radeon_fb.c | 391 bool radeon_fbdev_robj_is_fb(struct radeon_device *rdev, struct radeon_bo *robj) in radeon_fbdev_robj_is_fb() argument 396 if (robj == gem_to_radeon_bo(rdev->mode_info.rfbdev->fb.obj[0])) in radeon_fbdev_robj_is_fb()
|
D | r600.c | 1116 if (rdev->gart.robj) { in r600_pcie_gart_init() 1133 if (rdev->gart.robj == NULL) { in r600_pcie_gart_enable() 1511 if (rdev->vram_scratch.robj == NULL) { in r600_vram_scratch_init() 1514 0, NULL, NULL, &rdev->vram_scratch.robj); in r600_vram_scratch_init() 1520 r = radeon_bo_reserve(rdev->vram_scratch.robj, false); in r600_vram_scratch_init() 1523 r = radeon_bo_pin(rdev->vram_scratch.robj, in r600_vram_scratch_init() 1526 radeon_bo_unreserve(rdev->vram_scratch.robj); in r600_vram_scratch_init() 1529 r = radeon_bo_kmap(rdev->vram_scratch.robj, in r600_vram_scratch_init() 1532 radeon_bo_unpin(rdev->vram_scratch.robj); in r600_vram_scratch_init() 1533 radeon_bo_unreserve(rdev->vram_scratch.robj); in r600_vram_scratch_init() [all …]
|
D | radeon_object.c | 502 struct radeon_bo *bo = lobj->robj; in radeon_bo_list_validate() 549 lobj->gpu_offset = radeon_bo_gpu_offset(lobj->robj); in radeon_bo_list_validate() 550 lobj->tiling_flags = lobj->robj->tiling_flags; in radeon_bo_list_validate()
|
D | radeon_vm.c | 142 list[0].robj = vm->page_directory; in radeon_vm_get_bos() 154 list[idx].robj = vm->page_tables[i].bo; in radeon_vm_get_bos() 157 list[idx].tv.bo = &list[idx].robj->tbo; in radeon_vm_get_bos()
|
/Linux-v5.15/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_gem.c | 87 struct amdgpu_bo *robj = gem_to_amdgpu_bo(gobj); in amdgpu_gem_object_free() local 89 if (robj) { in amdgpu_gem_object_free() 90 amdgpu_mn_unregister(robj); in amdgpu_gem_object_free() 91 amdgpu_bo_unref(&robj); in amdgpu_gem_object_free() 465 struct amdgpu_bo *robj; in amdgpu_mode_dumb_mmap() local 471 robj = gem_to_amdgpu_bo(gobj); in amdgpu_mode_dumb_mmap() 472 if (amdgpu_ttm_tt_get_usermm(robj->tbo.ttm) || in amdgpu_mode_dumb_mmap() 473 (robj->flags & AMDGPU_GEM_CREATE_NO_CPU_ACCESS)) { in amdgpu_mode_dumb_mmap() 477 *offset_p = amdgpu_bo_mmap_offset(robj); in amdgpu_mode_dumb_mmap() 524 struct amdgpu_bo *robj; in amdgpu_gem_wait_idle_ioctl() local [all …]
|
D | amdgpu_fb.c | 370 struct amdgpu_bo *robj; in amdgpu_fbdev_total_size() local 376 robj = gem_to_amdgpu_bo(adev->mode_info.rfbdev->rfb.base.obj[0]); in amdgpu_fbdev_total_size() 377 size += amdgpu_bo_size(robj); in amdgpu_fbdev_total_size() 381 bool amdgpu_fbdev_robj_is_fb(struct amdgpu_device *adev, struct amdgpu_bo *robj) in amdgpu_fbdev_robj_is_fb() argument 385 if (robj == gem_to_amdgpu_bo(adev->mode_info.rfbdev->rfb.base.obj[0])) in amdgpu_fbdev_robj_is_fb()
|
D | amdgpu_display.c | 1591 struct amdgpu_bo *robj; in amdgpu_display_suspend_helper() local 1605 robj = gem_to_amdgpu_bo(fb->obj[0]); in amdgpu_display_suspend_helper() 1607 if (!amdgpu_fbdev_robj_is_fb(adev, robj)) { in amdgpu_display_suspend_helper() 1608 r = amdgpu_bo_reserve(robj, true); in amdgpu_display_suspend_helper() 1610 amdgpu_bo_unpin(robj); in amdgpu_display_suspend_helper() 1611 amdgpu_bo_unreserve(robj); in amdgpu_display_suspend_helper()
|
/Linux-v5.15/drivers/gpu/drm/nouveau/ |
D | nouveau_prime.c | 45 struct dma_resv *robj = attach->dmabuf->resv; in nouveau_gem_prime_import_sg_table() local 50 dma_resv_lock(robj, NULL); in nouveau_gem_prime_import_sg_table() 72 sg, robj); in nouveau_gem_prime_import_sg_table() 82 dma_resv_unlock(robj); in nouveau_gem_prime_import_sg_table()
|
D | nouveau_bo.h | 77 struct sg_table *sg, struct dma_resv *robj); 80 struct dma_resv *robj,
|
D | nouveau_bo.c | 302 struct sg_table *sg, struct dma_resv *robj) in nouveau_bo_init() argument 312 robj, nouveau_bo_del_ttm); in nouveau_bo_init() 324 struct sg_table *sg, struct dma_resv *robj, in nouveau_bo_new() argument 339 ret = nouveau_bo_init(nvbo, size, align, domain, sg, robj); in nouveau_bo_new()
|
/Linux-v5.15/drivers/gpu/drm/etnaviv/ |
D | etnaviv_gem_submit.c | 180 struct dma_resv *robj = bo->obj->base.resv; in submit_fence_sync() local 183 ret = dma_resv_reserve_shared(robj, 1); in submit_fence_sync() 192 ret = dma_resv_get_fences(robj, &bo->excl, in submit_fence_sync() 198 bo->excl = dma_resv_get_excl_unlocked(robj); in submit_fence_sync()
|
D | etnaviv_gem.c | 441 struct dma_resv *robj = obj->resv; in etnaviv_gem_describe() local 452 fobj = dma_resv_shared_list(robj); in etnaviv_gem_describe() 462 fence = dma_resv_excl_fence(robj); in etnaviv_gem_describe()
|
/Linux-v5.15/drivers/dma-buf/ |
D | dma-buf.c | 1370 struct dma_resv *robj; in dma_buf_debug_show() local 1400 robj = buf_obj->resv; in dma_buf_debug_show() 1401 fence = dma_resv_excl_fence(robj); in dma_buf_debug_show() 1408 fobj = rcu_dereference_protected(robj->fence, in dma_buf_debug_show() 1409 dma_resv_held(robj)); in dma_buf_debug_show() 1413 dma_resv_held(robj)); in dma_buf_debug_show()
|
/Linux-v5.15/drivers/gpu/drm/msm/ |
D | msm_gem.c | 882 struct dma_resv *robj = obj->resv; in msm_gem_describe() local 959 fobj = dma_resv_shared_list(robj); in msm_gem_describe() 969 fence = dma_resv_excl_fence(robj); in msm_gem_describe()
|