Lines Matching refs:a5xx_gpu
24 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in update_shadow_rptr() local
26 if (a5xx_gpu->has_whereami) { in update_shadow_rptr()
28 OUT_RING(ring, lower_32_bits(shadowptr(a5xx_gpu, ring))); in update_shadow_rptr()
29 OUT_RING(ring, upper_32_bits(shadowptr(a5xx_gpu, ring))); in update_shadow_rptr()
37 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_flush() local
62 if (a5xx_gpu->cur_ring == ring && !a5xx_in_preempt(a5xx_gpu)) in a5xx_flush()
127 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_submit() local
146 OUT_RING(ring, lower_32_bits(a5xx_gpu->preempt_iova[submit->ring->id])); in a5xx_submit()
147 OUT_RING(ring, upper_32_bits(a5xx_gpu->preempt_iova[submit->ring->id])); in a5xx_submit()
510 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_preempt_start() local
522 OUT_RING(ring, lower_32_bits(a5xx_gpu->preempt_iova[ring->id])); in a5xx_preempt_start()
523 OUT_RING(ring, upper_32_bits(a5xx_gpu->preempt_iova[ring->id])); in a5xx_preempt_start()
551 static void a5xx_ucode_check_version(struct a5xx_gpu *a5xx_gpu, in a5xx_ucode_check_version() argument
565 a5xx_gpu->has_whereami = true; in a5xx_ucode_check_version()
573 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_ucode_load() local
576 if (!a5xx_gpu->pm4_bo) { in a5xx_ucode_load()
577 a5xx_gpu->pm4_bo = adreno_fw_create_bo(gpu, in a5xx_ucode_load()
578 adreno_gpu->fw[ADRENO_FW_PM4], &a5xx_gpu->pm4_iova); in a5xx_ucode_load()
581 if (IS_ERR(a5xx_gpu->pm4_bo)) { in a5xx_ucode_load()
582 ret = PTR_ERR(a5xx_gpu->pm4_bo); in a5xx_ucode_load()
583 a5xx_gpu->pm4_bo = NULL; in a5xx_ucode_load()
589 msm_gem_object_set_name(a5xx_gpu->pm4_bo, "pm4fw"); in a5xx_ucode_load()
592 if (!a5xx_gpu->pfp_bo) { in a5xx_ucode_load()
593 a5xx_gpu->pfp_bo = adreno_fw_create_bo(gpu, in a5xx_ucode_load()
594 adreno_gpu->fw[ADRENO_FW_PFP], &a5xx_gpu->pfp_iova); in a5xx_ucode_load()
596 if (IS_ERR(a5xx_gpu->pfp_bo)) { in a5xx_ucode_load()
597 ret = PTR_ERR(a5xx_gpu->pfp_bo); in a5xx_ucode_load()
598 a5xx_gpu->pfp_bo = NULL; in a5xx_ucode_load()
604 msm_gem_object_set_name(a5xx_gpu->pfp_bo, "pfpfw"); in a5xx_ucode_load()
605 a5xx_ucode_check_version(a5xx_gpu, a5xx_gpu->pfp_bo); in a5xx_ucode_load()
608 if (a5xx_gpu->has_whereami) { in a5xx_ucode_load()
609 if (!a5xx_gpu->shadow_bo) { in a5xx_ucode_load()
610 a5xx_gpu->shadow = msm_gem_kernel_new(gpu->dev, in a5xx_ucode_load()
613 gpu->aspace, &a5xx_gpu->shadow_bo, in a5xx_ucode_load()
614 &a5xx_gpu->shadow_iova); in a5xx_ucode_load()
616 if (IS_ERR(a5xx_gpu->shadow)) in a5xx_ucode_load()
617 return PTR_ERR(a5xx_gpu->shadow); in a5xx_ucode_load()
619 msm_gem_object_set_name(a5xx_gpu->shadow_bo, "shadow"); in a5xx_ucode_load()
686 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_hw_init() local
918 gpu_write64(gpu, REG_A5XX_CP_ME_INSTR_BASE_LO, a5xx_gpu->pm4_iova); in a5xx_hw_init()
919 gpu_write64(gpu, REG_A5XX_CP_PFP_INSTR_BASE_LO, a5xx_gpu->pfp_iova); in a5xx_hw_init()
934 if (a5xx_gpu->shadow_bo) { in a5xx_hw_init()
936 shadowptr(a5xx_gpu, gpu->rb[0])); in a5xx_hw_init()
1026 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_destroy() local
1032 if (a5xx_gpu->pm4_bo) { in a5xx_destroy()
1033 msm_gem_unpin_iova(a5xx_gpu->pm4_bo, gpu->aspace); in a5xx_destroy()
1034 drm_gem_object_put(a5xx_gpu->pm4_bo); in a5xx_destroy()
1037 if (a5xx_gpu->pfp_bo) { in a5xx_destroy()
1038 msm_gem_unpin_iova(a5xx_gpu->pfp_bo, gpu->aspace); in a5xx_destroy()
1039 drm_gem_object_put(a5xx_gpu->pfp_bo); in a5xx_destroy()
1042 if (a5xx_gpu->gpmu_bo) { in a5xx_destroy()
1043 msm_gem_unpin_iova(a5xx_gpu->gpmu_bo, gpu->aspace); in a5xx_destroy()
1044 drm_gem_object_put(a5xx_gpu->gpmu_bo); in a5xx_destroy()
1047 if (a5xx_gpu->shadow_bo) { in a5xx_destroy()
1048 msm_gem_unpin_iova(a5xx_gpu->shadow_bo, gpu->aspace); in a5xx_destroy()
1049 drm_gem_object_put(a5xx_gpu->shadow_bo); in a5xx_destroy()
1053 kfree(a5xx_gpu); in a5xx_destroy()
1072 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_idle() local
1074 if (ring != a5xx_gpu->cur_ring) { in a5xx_idle()
1390 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_pm_suspend() local
1419 if (a5xx_gpu->has_whereami) in a5xx_pm_suspend()
1421 a5xx_gpu->shadow[i] = 0; in a5xx_pm_suspend()
1656 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_active_ring() local
1658 return a5xx_gpu->cur_ring; in a5xx_active_ring()
1674 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_get_rptr() local
1676 if (a5xx_gpu->has_whereami) in a5xx_get_rptr()
1677 return a5xx_gpu->shadow[ring->id]; in a5xx_get_rptr()
1747 struct a5xx_gpu *a5xx_gpu = NULL; in a5xx_gpu_init() local
1758 a5xx_gpu = kzalloc(sizeof(*a5xx_gpu), GFP_KERNEL); in a5xx_gpu_init()
1759 if (!a5xx_gpu) in a5xx_gpu_init()
1762 adreno_gpu = &a5xx_gpu->base; in a5xx_gpu_init()
1767 a5xx_gpu->lm_leakage = 0x4E001A; in a5xx_gpu_init()
1778 a5xx_destroy(&(a5xx_gpu->base.base)); in a5xx_gpu_init()