Lines Matching refs:a5xx_gpu
25 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_flush() local
33 if (a5xx_gpu->has_whereami && sync) { in a5xx_flush()
35 OUT_RING(ring, lower_32_bits(shadowptr(a5xx_gpu, ring))); in a5xx_flush()
36 OUT_RING(ring, upper_32_bits(shadowptr(a5xx_gpu, ring))); in a5xx_flush()
53 if (a5xx_gpu->cur_ring == ring && !a5xx_in_preempt(a5xx_gpu)) in a5xx_flush()
119 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_submit() local
139 OUT_RING(ring, lower_32_bits(a5xx_gpu->preempt_iova[submit->ring->id])); in a5xx_submit()
140 OUT_RING(ring, upper_32_bits(a5xx_gpu->preempt_iova[submit->ring->id])); in a5xx_submit()
385 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_preempt_start() local
397 OUT_RING(ring, lower_32_bits(a5xx_gpu->preempt_iova[ring->id])); in a5xx_preempt_start()
398 OUT_RING(ring, upper_32_bits(a5xx_gpu->preempt_iova[ring->id])); in a5xx_preempt_start()
426 static void a5xx_ucode_check_version(struct a5xx_gpu *a5xx_gpu, in a5xx_ucode_check_version() argument
440 a5xx_gpu->has_whereami = true; in a5xx_ucode_check_version()
448 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_ucode_init() local
451 if (!a5xx_gpu->pm4_bo) { in a5xx_ucode_init()
452 a5xx_gpu->pm4_bo = adreno_fw_create_bo(gpu, in a5xx_ucode_init()
453 adreno_gpu->fw[ADRENO_FW_PM4], &a5xx_gpu->pm4_iova); in a5xx_ucode_init()
456 if (IS_ERR(a5xx_gpu->pm4_bo)) { in a5xx_ucode_init()
457 ret = PTR_ERR(a5xx_gpu->pm4_bo); in a5xx_ucode_init()
458 a5xx_gpu->pm4_bo = NULL; in a5xx_ucode_init()
464 msm_gem_object_set_name(a5xx_gpu->pm4_bo, "pm4fw"); in a5xx_ucode_init()
467 if (!a5xx_gpu->pfp_bo) { in a5xx_ucode_init()
468 a5xx_gpu->pfp_bo = adreno_fw_create_bo(gpu, in a5xx_ucode_init()
469 adreno_gpu->fw[ADRENO_FW_PFP], &a5xx_gpu->pfp_iova); in a5xx_ucode_init()
471 if (IS_ERR(a5xx_gpu->pfp_bo)) { in a5xx_ucode_init()
472 ret = PTR_ERR(a5xx_gpu->pfp_bo); in a5xx_ucode_init()
473 a5xx_gpu->pfp_bo = NULL; in a5xx_ucode_init()
479 msm_gem_object_set_name(a5xx_gpu->pfp_bo, "pfpfw"); in a5xx_ucode_init()
480 a5xx_ucode_check_version(a5xx_gpu, a5xx_gpu->pfp_bo); in a5xx_ucode_init()
484 REG_A5XX_CP_ME_INSTR_BASE_HI, a5xx_gpu->pm4_iova); in a5xx_ucode_init()
487 REG_A5XX_CP_PFP_INSTR_BASE_HI, a5xx_gpu->pfp_iova); in a5xx_ucode_init()
540 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_hw_init() local
759 if (!a5xx_gpu->has_whereami && gpu->nr_rings > 1) { in a5xx_hw_init()
764 if (!a5xx_gpu->shadow_bo) { in a5xx_hw_init()
765 a5xx_gpu->shadow = msm_gem_kernel_new(gpu->dev, in a5xx_hw_init()
768 gpu->aspace, &a5xx_gpu->shadow_bo, in a5xx_hw_init()
769 &a5xx_gpu->shadow_iova); in a5xx_hw_init()
771 if (IS_ERR(a5xx_gpu->shadow)) in a5xx_hw_init()
772 return PTR_ERR(a5xx_gpu->shadow); in a5xx_hw_init()
776 REG_A5XX_CP_RB_RPTR_ADDR_HI, shadowptr(a5xx_gpu, gpu->rb[0])); in a5xx_hw_init()
866 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_destroy() local
872 if (a5xx_gpu->pm4_bo) { in a5xx_destroy()
873 msm_gem_unpin_iova(a5xx_gpu->pm4_bo, gpu->aspace); in a5xx_destroy()
874 drm_gem_object_put(a5xx_gpu->pm4_bo); in a5xx_destroy()
877 if (a5xx_gpu->pfp_bo) { in a5xx_destroy()
878 msm_gem_unpin_iova(a5xx_gpu->pfp_bo, gpu->aspace); in a5xx_destroy()
879 drm_gem_object_put(a5xx_gpu->pfp_bo); in a5xx_destroy()
882 if (a5xx_gpu->gpmu_bo) { in a5xx_destroy()
883 msm_gem_unpin_iova(a5xx_gpu->gpmu_bo, gpu->aspace); in a5xx_destroy()
884 drm_gem_object_put(a5xx_gpu->gpmu_bo); in a5xx_destroy()
887 if (a5xx_gpu->shadow_bo) { in a5xx_destroy()
888 msm_gem_unpin_iova(a5xx_gpu->shadow_bo, gpu->aspace); in a5xx_destroy()
889 drm_gem_object_put(a5xx_gpu->shadow_bo); in a5xx_destroy()
893 kfree(a5xx_gpu); in a5xx_destroy()
912 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_idle() local
914 if (ring != a5xx_gpu->cur_ring) { in a5xx_idle()
1459 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_active_ring() local
1461 return a5xx_gpu->cur_ring; in a5xx_active_ring()
1491 struct a5xx_gpu *a5xx_gpu = to_a5xx_gpu(adreno_gpu); in a5xx_get_rptr() local
1493 if (a5xx_gpu->has_whereami) in a5xx_get_rptr()
1494 return a5xx_gpu->shadow[ring->id]; in a5xx_get_rptr()
1561 struct a5xx_gpu *a5xx_gpu = NULL; in a5xx_gpu_init() local
1571 a5xx_gpu = kzalloc(sizeof(*a5xx_gpu), GFP_KERNEL); in a5xx_gpu_init()
1572 if (!a5xx_gpu) in a5xx_gpu_init()
1575 adreno_gpu = &a5xx_gpu->base; in a5xx_gpu_init()
1580 a5xx_gpu->lm_leakage = 0x4E001A; in a5xx_gpu_init()
1586 a5xx_destroy(&(a5xx_gpu->base.base)); in a5xx_gpu_init()