Lines Matching refs:a5xx_gpu
125 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_flush() local
143 if (a5xx_gpu->cur_ring == ring && !a5xx_in_preempt(a5xx_gpu)) in a5xx_flush()
210 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_submit() local
230 OUT_RING(ring, lower_32_bits(a5xx_gpu->preempt_iova[submit->ring->id])); in a5xx_submit()
231 OUT_RING(ring, upper_32_bits(a5xx_gpu->preempt_iova[submit->ring->id])); in a5xx_submit()
464 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_preempt_start() local
476 OUT_RING(ring, lower_32_bits(a5xx_gpu->preempt_iova[ring->id])); in a5xx_preempt_start()
477 OUT_RING(ring, upper_32_bits(a5xx_gpu->preempt_iova[ring->id])); in a5xx_preempt_start()
507 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_ucode_init() local
510 if (!a5xx_gpu->pm4_bo) { in a5xx_ucode_init()
511 a5xx_gpu->pm4_bo = adreno_fw_create_bo(gpu, in a5xx_ucode_init()
512 adreno_gpu->fw[ADRENO_FW_PM4], &a5xx_gpu->pm4_iova); in a5xx_ucode_init()
514 if (IS_ERR(a5xx_gpu->pm4_bo)) { in a5xx_ucode_init()
515 ret = PTR_ERR(a5xx_gpu->pm4_bo); in a5xx_ucode_init()
516 a5xx_gpu->pm4_bo = NULL; in a5xx_ucode_init()
523 if (!a5xx_gpu->pfp_bo) { in a5xx_ucode_init()
524 a5xx_gpu->pfp_bo = adreno_fw_create_bo(gpu, in a5xx_ucode_init()
525 adreno_gpu->fw[ADRENO_FW_PFP], &a5xx_gpu->pfp_iova); in a5xx_ucode_init()
527 if (IS_ERR(a5xx_gpu->pfp_bo)) { in a5xx_ucode_init()
528 ret = PTR_ERR(a5xx_gpu->pfp_bo); in a5xx_ucode_init()
529 a5xx_gpu->pfp_bo = NULL; in a5xx_ucode_init()
537 REG_A5XX_CP_ME_INSTR_BASE_HI, a5xx_gpu->pm4_iova); in a5xx_ucode_init()
540 REG_A5XX_CP_PFP_INSTR_BASE_HI, a5xx_gpu->pfp_iova); in a5xx_ucode_init()
837 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_destroy() local
843 if (a5xx_gpu->pm4_bo) { in a5xx_destroy()
844 if (a5xx_gpu->pm4_iova) in a5xx_destroy()
845 msm_gem_put_iova(a5xx_gpu->pm4_bo, gpu->aspace); in a5xx_destroy()
846 drm_gem_object_put_unlocked(a5xx_gpu->pm4_bo); in a5xx_destroy()
849 if (a5xx_gpu->pfp_bo) { in a5xx_destroy()
850 if (a5xx_gpu->pfp_iova) in a5xx_destroy()
851 msm_gem_put_iova(a5xx_gpu->pfp_bo, gpu->aspace); in a5xx_destroy()
852 drm_gem_object_put_unlocked(a5xx_gpu->pfp_bo); in a5xx_destroy()
855 if (a5xx_gpu->gpmu_bo) { in a5xx_destroy()
856 if (a5xx_gpu->gpmu_iova) in a5xx_destroy()
857 msm_gem_put_iova(a5xx_gpu->gpmu_bo, gpu->aspace); in a5xx_destroy()
858 drm_gem_object_put_unlocked(a5xx_gpu->gpmu_bo); in a5xx_destroy()
862 kfree(a5xx_gpu); in a5xx_destroy()
881 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_idle() local
883 if (ring != a5xx_gpu->cur_ring) { in a5xx_idle()
1434 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_active_ring() local
1436 return a5xx_gpu->cur_ring; in a5xx_active_ring()
1495 struct a5xx_gpu *a5xx_gpu = NULL; in a5xx_gpu_init() local
1505 a5xx_gpu = kzalloc(sizeof(*a5xx_gpu), GFP_KERNEL); in a5xx_gpu_init()
1506 if (!a5xx_gpu) in a5xx_gpu_init()
1509 adreno_gpu = &a5xx_gpu->base; in a5xx_gpu_init()
1515 a5xx_gpu->lm_leakage = 0x4E001A; in a5xx_gpu_init()
1521 a5xx_destroy(&(a5xx_gpu->base.base)); in a5xx_gpu_init()