Lines Matching refs:a6xx_gpu

18 	struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);  in _a6xx_check_idle()  local
21 if (!a6xx_gmu_isidle(&a6xx_gpu->gmu)) in _a6xx_check_idle()
55 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_flush() local
60 if (a6xx_gpu->has_whereami && !adreno_gpu->base.hw_apriv) { in a6xx_flush()
61 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_flush() local
64 OUT_RING(ring, lower_32_bits(shadowptr(a6xx_gpu, ring))); in a6xx_flush()
65 OUT_RING(ring, upper_32_bits(shadowptr(a6xx_gpu, ring))); in a6xx_flush()
95 static void a6xx_set_pagetable(struct a6xx_gpu *a6xx_gpu, in a6xx_set_pagetable() argument
102 if (ctx == a6xx_gpu->cur_ctx) in a6xx_set_pagetable()
135 a6xx_gpu->cur_ctx = ctx; in a6xx_set_pagetable()
143 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_submit() local
147 a6xx_set_pagetable(a6xx_gpu, ring, submit->queue->ctx); in a6xx_submit()
206 gmu_read64(&a6xx_gpu->gmu, REG_A6XX_GMU_ALWAYS_ON_COUNTER_L, in a6xx_submit()
430 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_set_hwcg() local
431 struct a6xx_gmu *gmu = &a6xx_gpu->gmu; in a6xx_set_hwcg()
522 static void a6xx_ucode_check_version(struct a6xx_gpu *a6xx_gpu, in a6xx_ucode_check_version() argument
539 a6xx_gpu->has_whereami = true; in a6xx_ucode_check_version()
541 a6xx_gpu->has_whereami = true; in a6xx_ucode_check_version()
549 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_ucode_init() local
551 if (!a6xx_gpu->sqe_bo) { in a6xx_ucode_init()
552 a6xx_gpu->sqe_bo = adreno_fw_create_bo(gpu, in a6xx_ucode_init()
553 adreno_gpu->fw[ADRENO_FW_SQE], &a6xx_gpu->sqe_iova); in a6xx_ucode_init()
555 if (IS_ERR(a6xx_gpu->sqe_bo)) { in a6xx_ucode_init()
556 int ret = PTR_ERR(a6xx_gpu->sqe_bo); in a6xx_ucode_init()
558 a6xx_gpu->sqe_bo = NULL; in a6xx_ucode_init()
565 msm_gem_object_set_name(a6xx_gpu->sqe_bo, "sqefw"); in a6xx_ucode_init()
566 a6xx_ucode_check_version(a6xx_gpu, a6xx_gpu->sqe_bo); in a6xx_ucode_init()
570 REG_A6XX_CP_SQE_INSTR_BASE_HI, a6xx_gpu->sqe_iova); in a6xx_ucode_init()
604 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_hw_init() local
608 a6xx_gmu_set_oob(&a6xx_gpu->gmu, GMU_OOB_GPU_SET); in a6xx_hw_init()
797 if (adreno_gpu->base.hw_apriv || a6xx_gpu->has_whereami) { in a6xx_hw_init()
798 if (!a6xx_gpu->shadow_bo) { in a6xx_hw_init()
799 a6xx_gpu->shadow = msm_gem_kernel_new_locked(gpu->dev, in a6xx_hw_init()
802 gpu->aspace, &a6xx_gpu->shadow_bo, in a6xx_hw_init()
803 &a6xx_gpu->shadow_iova); in a6xx_hw_init()
805 if (IS_ERR(a6xx_gpu->shadow)) in a6xx_hw_init()
806 return PTR_ERR(a6xx_gpu->shadow); in a6xx_hw_init()
811 shadowptr(a6xx_gpu, gpu->rb[0])); in a6xx_hw_init()
815 a6xx_gpu->cur_ring = gpu->rb[0]; in a6xx_hw_init()
817 a6xx_gpu->cur_ctx = NULL; in a6xx_hw_init()
861 a6xx_gmu_clear_oob(&a6xx_gpu->gmu, GMU_OOB_GPU_SET); in a6xx_hw_init()
863 if (a6xx_gpu->gmu.legacy) { in a6xx_hw_init()
865 a6xx_gmu_clear_oob(&a6xx_gpu->gmu, GMU_OOB_BOOT_SLUMBER); in a6xx_hw_init()
884 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_recover() local
900 gmu_write(&a6xx_gpu->gmu, REG_A6XX_GMU_GMU_PWR_COL_KEEPALIVE, 0); in a6xx_recover()
967 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_fault_detect_irq() local
976 gmu_write(&a6xx_gpu->gmu, REG_A6XX_GMU_GMU_PWR_COL_KEEPALIVE, 1); in a6xx_fault_detect_irq()
1028 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_pm_resume() local
1035 ret = a6xx_gmu_resume(a6xx_gpu); in a6xx_pm_resume()
1047 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_pm_suspend() local
1053 return a6xx_gmu_stop(a6xx_gpu); in a6xx_pm_suspend()
1059 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_get_timestamp() local
1062 a6xx_gmu_set_oob(&a6xx_gpu->gmu, GMU_OOB_GPU_SET); in a6xx_get_timestamp()
1067 a6xx_gmu_clear_oob(&a6xx_gpu->gmu, GMU_OOB_GPU_SET); in a6xx_get_timestamp()
1074 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_active_ring() local
1076 return a6xx_gpu->cur_ring; in a6xx_active_ring()
1082 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_destroy() local
1084 if (a6xx_gpu->sqe_bo) { in a6xx_destroy()
1085 msm_gem_unpin_iova(a6xx_gpu->sqe_bo, gpu->aspace); in a6xx_destroy()
1086 drm_gem_object_put(a6xx_gpu->sqe_bo); in a6xx_destroy()
1089 if (a6xx_gpu->shadow_bo) { in a6xx_destroy()
1090 msm_gem_unpin_iova(a6xx_gpu->shadow_bo, gpu->aspace); in a6xx_destroy()
1091 drm_gem_object_put(a6xx_gpu->shadow_bo); in a6xx_destroy()
1094 a6xx_gmu_remove(a6xx_gpu); in a6xx_destroy()
1097 kfree(a6xx_gpu); in a6xx_destroy()
1103 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_gpu_busy() local
1108 if (pm_runtime_get_if_in_use(a6xx_gpu->gmu.dev) == 0) in a6xx_gpu_busy()
1111 busy_cycles = gmu_read64(&a6xx_gpu->gmu, in a6xx_gpu_busy()
1120 pm_runtime_put(a6xx_gpu->gmu.dev); in a6xx_gpu_busy()
1145 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_get_rptr() local
1147 if (adreno_gpu->base.hw_apriv || a6xx_gpu->has_whereami) in a6xx_get_rptr()
1148 return a6xx_gpu->shadow[ring->id]; in a6xx_get_rptr()
1188 struct a6xx_gpu *a6xx_gpu; in a6xx_gpu_init() local
1193 a6xx_gpu = kzalloc(sizeof(*a6xx_gpu), GFP_KERNEL); in a6xx_gpu_init()
1194 if (!a6xx_gpu) in a6xx_gpu_init()
1197 adreno_gpu = &a6xx_gpu->base; in a6xx_gpu_init()
1214 a6xx_destroy(&(a6xx_gpu->base.base)); in a6xx_gpu_init()
1224 ret = a6xx_gmu_init(a6xx_gpu, node); in a6xx_gpu_init()
1226 a6xx_destroy(&(a6xx_gpu->base.base)); in a6xx_gpu_init()