Lines Matching refs:a5xx_gpu

24 	struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu);  in a5xx_flush()  local
42 if (a5xx_gpu->cur_ring == ring && !a5xx_in_preempt(a5xx_gpu)) in a5xx_flush()
110 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_submit() local
130 OUT_RING(ring, lower_32_bits(a5xx_gpu->preempt_iova[submit->ring->id])); in a5xx_submit()
131 OUT_RING(ring, upper_32_bits(a5xx_gpu->preempt_iova[submit->ring->id])); in a5xx_submit()
371 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_preempt_start() local
383 OUT_RING(ring, lower_32_bits(a5xx_gpu->preempt_iova[ring->id])); in a5xx_preempt_start()
384 OUT_RING(ring, upper_32_bits(a5xx_gpu->preempt_iova[ring->id])); in a5xx_preempt_start()
414 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_ucode_init() local
417 if (!a5xx_gpu->pm4_bo) { in a5xx_ucode_init()
418 a5xx_gpu->pm4_bo = adreno_fw_create_bo(gpu, in a5xx_ucode_init()
419 adreno_gpu->fw[ADRENO_FW_PM4], &a5xx_gpu->pm4_iova); in a5xx_ucode_init()
422 if (IS_ERR(a5xx_gpu->pm4_bo)) { in a5xx_ucode_init()
423 ret = PTR_ERR(a5xx_gpu->pm4_bo); in a5xx_ucode_init()
424 a5xx_gpu->pm4_bo = NULL; in a5xx_ucode_init()
430 msm_gem_object_set_name(a5xx_gpu->pm4_bo, "pm4fw"); in a5xx_ucode_init()
433 if (!a5xx_gpu->pfp_bo) { in a5xx_ucode_init()
434 a5xx_gpu->pfp_bo = adreno_fw_create_bo(gpu, in a5xx_ucode_init()
435 adreno_gpu->fw[ADRENO_FW_PFP], &a5xx_gpu->pfp_iova); in a5xx_ucode_init()
437 if (IS_ERR(a5xx_gpu->pfp_bo)) { in a5xx_ucode_init()
438 ret = PTR_ERR(a5xx_gpu->pfp_bo); in a5xx_ucode_init()
439 a5xx_gpu->pfp_bo = NULL; in a5xx_ucode_init()
445 msm_gem_object_set_name(a5xx_gpu->pfp_bo, "pfpfw"); in a5xx_ucode_init()
449 REG_A5XX_CP_ME_INSTR_BASE_HI, a5xx_gpu->pm4_iova); in a5xx_ucode_init()
452 REG_A5XX_CP_PFP_INSTR_BASE_HI, a5xx_gpu->pfp_iova); in a5xx_ucode_init()
765 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_destroy() local
771 if (a5xx_gpu->pm4_bo) { in a5xx_destroy()
772 msm_gem_unpin_iova(a5xx_gpu->pm4_bo, gpu->aspace); in a5xx_destroy()
773 drm_gem_object_put_unlocked(a5xx_gpu->pm4_bo); in a5xx_destroy()
776 if (a5xx_gpu->pfp_bo) { in a5xx_destroy()
777 msm_gem_unpin_iova(a5xx_gpu->pfp_bo, gpu->aspace); in a5xx_destroy()
778 drm_gem_object_put_unlocked(a5xx_gpu->pfp_bo); in a5xx_destroy()
781 if (a5xx_gpu->gpmu_bo) { in a5xx_destroy()
782 msm_gem_unpin_iova(a5xx_gpu->gpmu_bo, gpu->aspace); in a5xx_destroy()
783 drm_gem_object_put_unlocked(a5xx_gpu->gpmu_bo); in a5xx_destroy()
787 kfree(a5xx_gpu); in a5xx_destroy()
806 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_idle() local
808 if (ring != a5xx_gpu->cur_ring) { in a5xx_idle()
1346 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_active_ring() local
1348 return a5xx_gpu->cur_ring; in a5xx_active_ring()
1417 struct a5xx_gpu *a5xx_gpu = NULL; in a5xx_gpu_init() local
1427 a5xx_gpu = kzalloc(sizeof(*a5xx_gpu), GFP_KERNEL); in a5xx_gpu_init()
1428 if (!a5xx_gpu) in a5xx_gpu_init()
1431 adreno_gpu = &a5xx_gpu->base; in a5xx_gpu_init()
1437 a5xx_gpu->lm_leakage = 0x4E001A; in a5xx_gpu_init()
1443 a5xx_destroy(&(a5xx_gpu->base.base)); in a5xx_gpu_init()