Lines Matching refs:vce

159 	r = request_firmware(&adev->vce.fw, fw_name, adev->dev);  in amdgpu_vce_sw_init()
166 r = amdgpu_ucode_validate(adev->vce.fw); in amdgpu_vce_sw_init()
170 release_firmware(adev->vce.fw); in amdgpu_vce_sw_init()
171 adev->vce.fw = NULL; in amdgpu_vce_sw_init()
175 hdr = (const struct common_firmware_header *)adev->vce.fw->data; in amdgpu_vce_sw_init()
183 adev->vce.fw_version = ((version_major << 24) | (version_minor << 16) | in amdgpu_vce_sw_init()
187 AMDGPU_GEM_DOMAIN_VRAM, &adev->vce.vcpu_bo, in amdgpu_vce_sw_init()
188 &adev->vce.gpu_addr, &adev->vce.cpu_addr); in amdgpu_vce_sw_init()
195 atomic_set(&adev->vce.handles[i], 0); in amdgpu_vce_sw_init()
196 adev->vce.filp[i] = NULL; in amdgpu_vce_sw_init()
199 INIT_DELAYED_WORK(&adev->vce.idle_work, amdgpu_vce_idle_work_handler); in amdgpu_vce_sw_init()
200 mutex_init(&adev->vce.idle_mutex); in amdgpu_vce_sw_init()
216 if (adev->vce.vcpu_bo == NULL) in amdgpu_vce_sw_fini()
219 cancel_delayed_work_sync(&adev->vce.idle_work); in amdgpu_vce_sw_fini()
220 drm_sched_entity_destroy(&adev->vce.entity); in amdgpu_vce_sw_fini()
222 amdgpu_bo_free_kernel(&adev->vce.vcpu_bo, &adev->vce.gpu_addr, in amdgpu_vce_sw_fini()
223 (void **)&adev->vce.cpu_addr); in amdgpu_vce_sw_fini()
225 for (i = 0; i < adev->vce.num_rings; i++) in amdgpu_vce_sw_fini()
226 amdgpu_ring_fini(&adev->vce.ring[i]); in amdgpu_vce_sw_fini()
228 release_firmware(adev->vce.fw); in amdgpu_vce_sw_fini()
229 mutex_destroy(&adev->vce.idle_mutex); in amdgpu_vce_sw_fini()
246 ring = &adev->vce.ring[0]; in amdgpu_vce_entity_init()
248 r = drm_sched_entity_init(&adev->vce.entity, DRM_SCHED_PRIORITY_NORMAL, in amdgpu_vce_entity_init()
268 cancel_delayed_work_sync(&adev->vce.idle_work); in amdgpu_vce_suspend()
270 if (adev->vce.vcpu_bo == NULL) in amdgpu_vce_suspend()
274 if (atomic_read(&adev->vce.handles[i])) in amdgpu_vce_suspend()
297 if (adev->vce.vcpu_bo == NULL) in amdgpu_vce_resume()
300 r = amdgpu_bo_reserve(adev->vce.vcpu_bo, false); in amdgpu_vce_resume()
306 r = amdgpu_bo_kmap(adev->vce.vcpu_bo, &cpu_addr); in amdgpu_vce_resume()
308 amdgpu_bo_unreserve(adev->vce.vcpu_bo); in amdgpu_vce_resume()
313 hdr = (const struct common_firmware_header *)adev->vce.fw->data; in amdgpu_vce_resume()
315 memcpy_toio(cpu_addr, adev->vce.fw->data + offset, in amdgpu_vce_resume()
316 adev->vce.fw->size - offset); in amdgpu_vce_resume()
318 amdgpu_bo_kunmap(adev->vce.vcpu_bo); in amdgpu_vce_resume()
320 amdgpu_bo_unreserve(adev->vce.vcpu_bo); in amdgpu_vce_resume()
335 container_of(work, struct amdgpu_device, vce.idle_work.work); in amdgpu_vce_idle_work_handler()
338 for (i = 0; i < adev->vce.num_rings; i++) in amdgpu_vce_idle_work_handler()
339 count += amdgpu_fence_count_emitted(&adev->vce.ring[i]); in amdgpu_vce_idle_work_handler()
352 schedule_delayed_work(&adev->vce.idle_work, VCE_IDLE_TIMEOUT); in amdgpu_vce_idle_work_handler()
371 mutex_lock(&adev->vce.idle_mutex); in amdgpu_vce_ring_begin_use()
372 set_clocks = !cancel_delayed_work_sync(&adev->vce.idle_work); in amdgpu_vce_ring_begin_use()
385 mutex_unlock(&adev->vce.idle_mutex); in amdgpu_vce_ring_begin_use()
398 schedule_delayed_work(&ring->adev->vce.idle_work, VCE_IDLE_TIMEOUT); in amdgpu_vce_ring_end_use()
411 struct amdgpu_ring *ring = &adev->vce.ring[0]; in amdgpu_vce_free_handles()
414 uint32_t handle = atomic_read(&adev->vce.handles[i]); in amdgpu_vce_free_handles()
416 if (!handle || adev->vce.filp[i] != filp) in amdgpu_vce_free_handles()
423 adev->vce.filp[i] = NULL; in amdgpu_vce_free_handles()
424 atomic_set(&adev->vce.handles[i], 0); in amdgpu_vce_free_handles()
464 if ((ring->adev->vce.fw_version >> 24) >= 52) in amdgpu_vce_get_create_msg()
479 if ((ring->adev->vce.fw_version >> 24) >= 52) { in amdgpu_vce_get_create_msg()
560 r = amdgpu_job_submit(job, &ring->adev->vce.entity, in amdgpu_vce_get_destroy_msg()
690 if (atomic_read(&p->adev->vce.handles[i]) == handle) { in amdgpu_vce_validate_handle()
691 if (p->adev->vce.filp[i] != p->filp) { in amdgpu_vce_validate_handle()
701 if (!atomic_cmpxchg(&p->adev->vce.handles[i], 0, handle)) { in amdgpu_vce_validate_handle()
702 p->adev->vce.filp[i] = p->filp; in amdgpu_vce_validate_handle()
703 p->adev->vce.img_size[i] = 0; in amdgpu_vce_validate_handle()
814 size = &p->adev->vce.img_size[session_idx]; in amdgpu_vce_ring_parse_cs()
944 atomic_set(&p->adev->vce.handles[i], 0); in amdgpu_vce_ring_parse_cs()
1034 atomic_set(&p->adev->vce.handles[i], 0); in amdgpu_vce_ring_parse_cs_vm()
1128 if (ring != &ring->adev->vce.ring[0]) in amdgpu_vce_ring_test_ib()