Lines Matching refs:a6xx_gpu

21 	struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu);  in _a6xx_check_idle()  local
24 if (!adreno_has_gmu_wrapper(adreno_gpu) && !a6xx_gmu_isidle(&a6xx_gpu->gmu)) in _a6xx_check_idle()
58 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in update_shadow_rptr() local
61 if (a6xx_gpu->has_whereami && !adreno_gpu->base.hw_apriv) { in update_shadow_rptr()
63 OUT_RING(ring, lower_32_bits(shadowptr(a6xx_gpu, ring))); in update_shadow_rptr()
64 OUT_RING(ring, upper_32_bits(shadowptr(a6xx_gpu, ring))); in update_shadow_rptr()
102 static void a6xx_set_pagetable(struct a6xx_gpu *a6xx_gpu, in a6xx_set_pagetable() argument
105 bool sysprof = refcount_read(&a6xx_gpu->base.base.sysprof_active) > 1; in a6xx_set_pagetable()
110 if (ctx->seqno == a6xx_gpu->base.base.cur_ctx_seqno) in a6xx_set_pagetable()
176 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_submit() local
180 a6xx_set_pagetable(a6xx_gpu, ring, submit->queue->ctx); in a6xx_submit()
701 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_set_hwcg() local
702 struct a6xx_gmu *gmu = &a6xx_gpu->gmu; in a6xx_set_hwcg()
1056 static bool a6xx_ucode_check_version(struct a6xx_gpu *a6xx_gpu, in a6xx_ucode_check_version() argument
1059 struct adreno_gpu *adreno_gpu = &a6xx_gpu->base; in a6xx_ucode_check_version()
1091 a6xx_gpu->has_whereami = true; in a6xx_ucode_check_version()
1122 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_ucode_load() local
1124 if (!a6xx_gpu->sqe_bo) { in a6xx_ucode_load()
1125 a6xx_gpu->sqe_bo = adreno_fw_create_bo(gpu, in a6xx_ucode_load()
1126 adreno_gpu->fw[ADRENO_FW_SQE], &a6xx_gpu->sqe_iova); in a6xx_ucode_load()
1128 if (IS_ERR(a6xx_gpu->sqe_bo)) { in a6xx_ucode_load()
1129 int ret = PTR_ERR(a6xx_gpu->sqe_bo); in a6xx_ucode_load()
1131 a6xx_gpu->sqe_bo = NULL; in a6xx_ucode_load()
1138 msm_gem_object_set_name(a6xx_gpu->sqe_bo, "sqefw"); in a6xx_ucode_load()
1139 if (!a6xx_ucode_check_version(a6xx_gpu, a6xx_gpu->sqe_bo)) { in a6xx_ucode_load()
1140 msm_gem_unpin_iova(a6xx_gpu->sqe_bo, gpu->aspace); in a6xx_ucode_load()
1141 drm_gem_object_put(a6xx_gpu->sqe_bo); in a6xx_ucode_load()
1143 a6xx_gpu->sqe_bo = NULL; in a6xx_ucode_load()
1152 if ((adreno_gpu->base.hw_apriv || a6xx_gpu->has_whereami) && in a6xx_ucode_load()
1153 !a6xx_gpu->shadow_bo) { in a6xx_ucode_load()
1154 a6xx_gpu->shadow = msm_gem_kernel_new(gpu->dev, in a6xx_ucode_load()
1157 gpu->aspace, &a6xx_gpu->shadow_bo, in a6xx_ucode_load()
1158 &a6xx_gpu->shadow_iova); in a6xx_ucode_load()
1160 if (IS_ERR(a6xx_gpu->shadow)) in a6xx_ucode_load()
1161 return PTR_ERR(a6xx_gpu->shadow); in a6xx_ucode_load()
1163 msm_gem_object_set_name(a6xx_gpu->shadow_bo, "shadow"); in a6xx_ucode_load()
1198 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in hw_init() local
1199 struct a6xx_gmu *gmu = &a6xx_gpu->gmu; in hw_init()
1204 ret = a6xx_gmu_set_oob(&a6xx_gpu->gmu, GMU_OOB_GPU_SET); in hw_init()
1390 gpu_write64(gpu, REG_A6XX_CP_SQE_INSTR_BASE, a6xx_gpu->sqe_iova); in hw_init()
1406 if (a6xx_gpu->shadow_bo) { in hw_init()
1408 shadowptr(a6xx_gpu, gpu->rb[0])); in hw_init()
1412 a6xx_gpu->cur_ring = gpu->rb[0]; in hw_init()
1460 a6xx_gmu_clear_oob(&a6xx_gpu->gmu, GMU_OOB_GPU_SET); in hw_init()
1462 if (a6xx_gpu->gmu.legacy) { in hw_init()
1464 a6xx_gmu_clear_oob(&a6xx_gpu->gmu, GMU_OOB_BOOT_SLUMBER); in hw_init()
1473 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_hw_init() local
1476 mutex_lock(&a6xx_gpu->gmu.lock); in a6xx_hw_init()
1478 mutex_unlock(&a6xx_gpu->gmu.lock); in a6xx_hw_init()
1493 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_recover() local
1494 struct a6xx_gmu *gmu = &a6xx_gpu->gmu; in a6xx_recover()
1510 a6xx_gpu->hung = true; in a6xx_recover()
1563 a6xx_gpu->hung = false; in a6xx_recover()
1670 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_fault_detect_irq() local
1687 gmu_write(&a6xx_gpu->gmu, REG_A6XX_GMU_GMU_PWR_COL_KEEPALIVE, 1); in a6xx_fault_detect_irq()
1740 static void a6xx_llc_deactivate(struct a6xx_gpu *a6xx_gpu) in a6xx_llc_deactivate() argument
1742 llcc_slice_deactivate(a6xx_gpu->llc_slice); in a6xx_llc_deactivate()
1743 llcc_slice_deactivate(a6xx_gpu->htw_llc_slice); in a6xx_llc_deactivate()
1746 static void a6xx_llc_activate(struct a6xx_gpu *a6xx_gpu) in a6xx_llc_activate() argument
1748 struct adreno_gpu *adreno_gpu = &a6xx_gpu->base; in a6xx_llc_activate()
1752 if (IS_ERR(a6xx_gpu->llc_mmio)) in a6xx_llc_activate()
1755 if (!llcc_slice_activate(a6xx_gpu->llc_slice)) { in a6xx_llc_activate()
1756 u32 gpu_scid = llcc_get_slice_id(a6xx_gpu->llc_slice); in a6xx_llc_activate()
1774 if (!llcc_slice_activate(a6xx_gpu->htw_llc_slice)) { in a6xx_llc_activate()
1775 if (!a6xx_gpu->have_mmu500) { in a6xx_llc_activate()
1776 u32 gpuhtw_scid = llcc_get_slice_id(a6xx_gpu->htw_llc_slice); in a6xx_llc_activate()
1790 if (!a6xx_gpu->have_mmu500) { in a6xx_llc_activate()
1791 a6xx_llc_write(a6xx_gpu, in a6xx_llc_activate()
1798 a6xx_llc_rmw(a6xx_gpu, in a6xx_llc_activate()
1806 static void a6xx_llc_slices_destroy(struct a6xx_gpu *a6xx_gpu) in a6xx_llc_slices_destroy() argument
1809 if (adreno_has_gmu_wrapper(&a6xx_gpu->base)) in a6xx_llc_slices_destroy()
1812 llcc_slice_putd(a6xx_gpu->llc_slice); in a6xx_llc_slices_destroy()
1813 llcc_slice_putd(a6xx_gpu->htw_llc_slice); in a6xx_llc_slices_destroy()
1817 struct a6xx_gpu *a6xx_gpu) in a6xx_llc_slices_init() argument
1822 if (adreno_has_gmu_wrapper(&a6xx_gpu->base)) in a6xx_llc_slices_init()
1830 a6xx_gpu->have_mmu500 = (phandle && in a6xx_llc_slices_init()
1834 if (a6xx_gpu->have_mmu500) in a6xx_llc_slices_init()
1835 a6xx_gpu->llc_mmio = NULL; in a6xx_llc_slices_init()
1837 a6xx_gpu->llc_mmio = msm_ioremap(pdev, "cx_mem"); in a6xx_llc_slices_init()
1839 a6xx_gpu->llc_slice = llcc_slice_getd(LLCC_GPU); in a6xx_llc_slices_init()
1840 a6xx_gpu->htw_llc_slice = llcc_slice_getd(LLCC_GPUHTW); in a6xx_llc_slices_init()
1842 if (IS_ERR_OR_NULL(a6xx_gpu->llc_slice) && IS_ERR_OR_NULL(a6xx_gpu->htw_llc_slice)) in a6xx_llc_slices_init()
1843 a6xx_gpu->llc_mmio = ERR_PTR(-EINVAL); in a6xx_llc_slices_init()
1908 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_gmu_pm_resume() local
1915 mutex_lock(&a6xx_gpu->gmu.lock); in a6xx_gmu_pm_resume()
1916 ret = a6xx_gmu_resume(a6xx_gpu); in a6xx_gmu_pm_resume()
1917 mutex_unlock(&a6xx_gpu->gmu.lock); in a6xx_gmu_pm_resume()
1923 a6xx_llc_activate(a6xx_gpu); in a6xx_gmu_pm_resume()
1931 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_pm_resume() local
1932 struct a6xx_gmu *gmu = &a6xx_gpu->gmu; in a6xx_pm_resume()
1941 mutex_lock(&a6xx_gpu->gmu.lock); in a6xx_pm_resume()
1971 mutex_unlock(&a6xx_gpu->gmu.lock); in a6xx_pm_resume()
1982 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_gmu_pm_suspend() local
1987 a6xx_llc_deactivate(a6xx_gpu); in a6xx_gmu_pm_suspend()
1991 mutex_lock(&a6xx_gpu->gmu.lock); in a6xx_gmu_pm_suspend()
1992 ret = a6xx_gmu_stop(a6xx_gpu); in a6xx_gmu_pm_suspend()
1993 mutex_unlock(&a6xx_gpu->gmu.lock); in a6xx_gmu_pm_suspend()
1997 if (a6xx_gpu->shadow_bo) in a6xx_gmu_pm_suspend()
1999 a6xx_gpu->shadow[i] = 0; in a6xx_gmu_pm_suspend()
2009 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_pm_suspend() local
2010 struct a6xx_gmu *gmu = &a6xx_gpu->gmu; in a6xx_pm_suspend()
2017 mutex_lock(&a6xx_gpu->gmu.lock); in a6xx_pm_suspend()
2031 mutex_unlock(&a6xx_gpu->gmu.lock); in a6xx_pm_suspend()
2033 if (a6xx_gpu->shadow_bo) in a6xx_pm_suspend()
2035 a6xx_gpu->shadow[i] = 0; in a6xx_pm_suspend()
2045 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_gmu_get_timestamp() local
2047 mutex_lock(&a6xx_gpu->gmu.lock); in a6xx_gmu_get_timestamp()
2050 a6xx_gmu_set_oob(&a6xx_gpu->gmu, GMU_OOB_PERFCOUNTER_SET); in a6xx_gmu_get_timestamp()
2054 a6xx_gmu_clear_oob(&a6xx_gpu->gmu, GMU_OOB_PERFCOUNTER_SET); in a6xx_gmu_get_timestamp()
2056 mutex_unlock(&a6xx_gpu->gmu.lock); in a6xx_gmu_get_timestamp()
2070 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_active_ring() local
2072 return a6xx_gpu->cur_ring; in a6xx_active_ring()
2078 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_destroy() local
2080 if (a6xx_gpu->sqe_bo) { in a6xx_destroy()
2081 msm_gem_unpin_iova(a6xx_gpu->sqe_bo, gpu->aspace); in a6xx_destroy()
2082 drm_gem_object_put(a6xx_gpu->sqe_bo); in a6xx_destroy()
2085 if (a6xx_gpu->shadow_bo) { in a6xx_destroy()
2086 msm_gem_unpin_iova(a6xx_gpu->shadow_bo, gpu->aspace); in a6xx_destroy()
2087 drm_gem_object_put(a6xx_gpu->shadow_bo); in a6xx_destroy()
2090 a6xx_llc_slices_destroy(a6xx_gpu); in a6xx_destroy()
2092 a6xx_gmu_remove(a6xx_gpu); in a6xx_destroy()
2096 kfree(a6xx_gpu); in a6xx_destroy()
2102 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_gpu_busy() local
2108 busy_cycles = gmu_read64(&a6xx_gpu->gmu, in a6xx_gpu_busy()
2119 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_gpu_set_freq() local
2121 mutex_lock(&a6xx_gpu->gmu.lock); in a6xx_gpu_set_freq()
2123 mutex_unlock(&a6xx_gpu->gmu.lock); in a6xx_gpu_set_freq()
2130 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_create_address_space() local
2137 if (!IS_ERR_OR_NULL(a6xx_gpu->htw_llc_slice) && in a6xx_create_address_space()
2162 struct a6xx_gpu *a6xx_gpu = to_a6xx_gpu(adreno_gpu); in a6xx_get_rptr() local
2164 if (adreno_gpu->base.hw_apriv || a6xx_gpu->has_whereami) in a6xx_get_rptr()
2165 return a6xx_gpu->shadow[ring->id]; in a6xx_get_rptr()
2316 struct a6xx_gpu *a6xx_gpu; in a6xx_gpu_init() local
2321 a6xx_gpu = kzalloc(sizeof(*a6xx_gpu), GFP_KERNEL); in a6xx_gpu_init()
2322 if (!a6xx_gpu) in a6xx_gpu_init()
2325 adreno_gpu = &a6xx_gpu->base; in a6xx_gpu_init()
2328 mutex_init(&a6xx_gpu->gmu.lock); in a6xx_gpu_init()
2342 a6xx_llc_slices_init(pdev, a6xx_gpu); in a6xx_gpu_init()
2346 a6xx_destroy(&(a6xx_gpu->base.base)); in a6xx_gpu_init()
2355 a6xx_destroy(&(a6xx_gpu->base.base)); in a6xx_gpu_init()
2367 ret = a6xx_gmu_wrapper_init(a6xx_gpu, node); in a6xx_gpu_init()
2369 ret = a6xx_gmu_init(a6xx_gpu, node); in a6xx_gpu_init()
2372 a6xx_destroy(&(a6xx_gpu->base.base)); in a6xx_gpu_init()