Lines Matching refs:gpu

30 static void a5xx_dump(struct msm_gpu *gpu);
34 static int zap_shader_load_mdt(struct msm_gpu *gpu, const char *fwname) in zap_shader_load_mdt() argument
36 struct device *dev = &gpu->pdev->dev; in zap_shader_load_mdt()
64 fw = adreno_request_fw(to_adreno_gpu(gpu), fwname); in zap_shader_load_mdt()
93 if (to_adreno_gpu(gpu)->fwloc == FW_LOCATION_LEGACY) { in zap_shader_load_mdt()
122 static void a5xx_flush(struct msm_gpu *gpu, struct msm_ringbuffer *ring) in a5xx_flush() argument
124 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a5xx_flush()
144 gpu_write(gpu, REG_A5XX_CP_RB_WPTR, wptr); in a5xx_flush()
147 static void a5xx_submit_in_rb(struct msm_gpu *gpu, struct msm_gem_submit *submit, in a5xx_submit_in_rb() argument
150 struct msm_drm_private *priv = gpu->dev->dev_private; in a5xx_submit_in_rb()
194 a5xx_flush(gpu, ring); in a5xx_submit_in_rb()
195 a5xx_preempt_trigger(gpu); in a5xx_submit_in_rb()
201 a5xx_idle(gpu, ring); in a5xx_submit_in_rb()
203 msm_gpu_retire(gpu); in a5xx_submit_in_rb()
206 static void a5xx_submit(struct msm_gpu *gpu, struct msm_gem_submit *submit, in a5xx_submit() argument
209 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a5xx_submit()
211 struct msm_drm_private *priv = gpu->dev->dev_private; in a5xx_submit()
217 a5xx_submit_in_rb(gpu, submit, ctx); in a5xx_submit()
307 a5xx_flush(gpu, ring); in a5xx_submit()
310 a5xx_preempt_trigger(gpu); in a5xx_submit()
411 void a5xx_set_hwcg(struct msm_gpu *gpu, bool state) in a5xx_set_hwcg() argument
416 gpu_write(gpu, a5xx_hwcg[i].offset, in a5xx_set_hwcg()
419 gpu_write(gpu, REG_A5XX_RBBM_CLOCK_CNTL, state ? 0xAAA8AA00 : 0); in a5xx_set_hwcg()
420 gpu_write(gpu, REG_A5XX_RBBM_ISDB_CNT, state ? 0x182 : 0x180); in a5xx_set_hwcg()
423 static int a5xx_me_init(struct msm_gpu *gpu) in a5xx_me_init() argument
425 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a5xx_me_init()
426 struct msm_ringbuffer *ring = gpu->rb[0]; in a5xx_me_init()
457 gpu->funcs->flush(gpu, ring); in a5xx_me_init()
458 return a5xx_idle(gpu, ring) ? 0 : -EINVAL; in a5xx_me_init()
461 static int a5xx_preempt_start(struct msm_gpu *gpu) in a5xx_preempt_start() argument
463 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a5xx_preempt_start()
465 struct msm_ringbuffer *ring = gpu->rb[0]; in a5xx_preempt_start()
467 if (gpu->nr_rings == 1) in a5xx_preempt_start()
499 gpu->funcs->flush(gpu, ring); in a5xx_preempt_start()
501 return a5xx_idle(gpu, ring) ? 0 : -EINVAL; in a5xx_preempt_start()
504 static int a5xx_ucode_init(struct msm_gpu *gpu) in a5xx_ucode_init() argument
506 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a5xx_ucode_init()
511 a5xx_gpu->pm4_bo = adreno_fw_create_bo(gpu, in a5xx_ucode_init()
517 dev_err(gpu->dev->dev, "could not allocate PM4: %d\n", in a5xx_ucode_init()
524 a5xx_gpu->pfp_bo = adreno_fw_create_bo(gpu, in a5xx_ucode_init()
530 dev_err(gpu->dev->dev, "could not allocate PFP: %d\n", in a5xx_ucode_init()
536 gpu_write64(gpu, REG_A5XX_CP_ME_INSTR_BASE_LO, in a5xx_ucode_init()
539 gpu_write64(gpu, REG_A5XX_CP_PFP_INSTR_BASE_LO, in a5xx_ucode_init()
547 static int a5xx_zap_shader_resume(struct msm_gpu *gpu) in a5xx_zap_shader_resume() argument
554 gpu->name, ret); in a5xx_zap_shader_resume()
559 static int a5xx_zap_shader_init(struct msm_gpu *gpu) in a5xx_zap_shader_init() argument
562 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a5xx_zap_shader_init()
563 struct platform_device *pdev = gpu->pdev; in a5xx_zap_shader_init()
571 return a5xx_zap_shader_resume(gpu); in a5xx_zap_shader_init()
586 ret = zap_shader_load_mdt(gpu, adreno_gpu->info->zapfw); in a5xx_zap_shader_init()
606 static int a5xx_hw_init(struct msm_gpu *gpu) in a5xx_hw_init() argument
608 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a5xx_hw_init()
611 gpu_write(gpu, REG_A5XX_VBIF_ROUND_ROBIN_QOS_ARB, 0x00000003); in a5xx_hw_init()
614 gpu_write(gpu, REG_A5XX_RBBM_PERFCTR_GPU_BUSY_MASKED, 0xFFFFFFFF); in a5xx_hw_init()
617 gpu_write(gpu, REG_A5XX_RBBM_AHB_CNTL0, 0x00000001); in a5xx_hw_init()
625 gpu_write(gpu, REG_A5XX_RBBM_INTERFACE_HANG_MASK_CNTL11, in a5xx_hw_init()
627 gpu_write(gpu, REG_A5XX_RBBM_INTERFACE_HANG_MASK_CNTL12, in a5xx_hw_init()
629 gpu_write(gpu, REG_A5XX_RBBM_INTERFACE_HANG_MASK_CNTL13, in a5xx_hw_init()
631 gpu_write(gpu, REG_A5XX_RBBM_INTERFACE_HANG_MASK_CNTL14, in a5xx_hw_init()
633 gpu_write(gpu, REG_A5XX_RBBM_INTERFACE_HANG_MASK_CNTL15, in a5xx_hw_init()
635 gpu_write(gpu, REG_A5XX_RBBM_INTERFACE_HANG_MASK_CNTL16, in a5xx_hw_init()
637 gpu_write(gpu, REG_A5XX_RBBM_INTERFACE_HANG_MASK_CNTL17, in a5xx_hw_init()
639 gpu_write(gpu, REG_A5XX_RBBM_INTERFACE_HANG_MASK_CNTL18, in a5xx_hw_init()
644 gpu_write(gpu, REG_A5XX_RBBM_INTERFACE_HANG_INT_CNTL, in a5xx_hw_init()
648 gpu_write(gpu, REG_A5XX_RBBM_PERFCTR_CNTL, 0x01); in a5xx_hw_init()
651 gpu_write(gpu, REG_A5XX_CP_PERFCTR_CP_SEL_0, PERF_CP_ALWAYS_COUNT); in a5xx_hw_init()
654 gpu_write(gpu, REG_A5XX_RBBM_PERFCTR_RBBM_SEL_0, 6); in a5xx_hw_init()
657 gpu_write(gpu, REG_A5XX_UCHE_CACHE_WAYS, 0x02); in a5xx_hw_init()
660 gpu_write(gpu, REG_A5XX_UCHE_TRAP_BASE_LO, 0xFFFF0000); in a5xx_hw_init()
661 gpu_write(gpu, REG_A5XX_UCHE_TRAP_BASE_HI, 0x0001FFFF); in a5xx_hw_init()
662 gpu_write(gpu, REG_A5XX_UCHE_WRITE_THRU_BASE_LO, 0xFFFF0000); in a5xx_hw_init()
663 gpu_write(gpu, REG_A5XX_UCHE_WRITE_THRU_BASE_HI, 0x0001FFFF); in a5xx_hw_init()
666 gpu_write(gpu, REG_A5XX_UCHE_GMEM_RANGE_MIN_LO, 0x00100000); in a5xx_hw_init()
667 gpu_write(gpu, REG_A5XX_UCHE_GMEM_RANGE_MIN_HI, 0x00000000); in a5xx_hw_init()
668 gpu_write(gpu, REG_A5XX_UCHE_GMEM_RANGE_MAX_LO, in a5xx_hw_init()
670 gpu_write(gpu, REG_A5XX_UCHE_GMEM_RANGE_MAX_HI, 0x00000000); in a5xx_hw_init()
672 gpu_write(gpu, REG_A5XX_CP_MEQ_THRESHOLDS, 0x40); in a5xx_hw_init()
673 gpu_write(gpu, REG_A5XX_CP_MERCIU_SIZE, 0x40); in a5xx_hw_init()
674 gpu_write(gpu, REG_A5XX_CP_ROQ_THRESHOLDS_2, 0x80000060); in a5xx_hw_init()
675 gpu_write(gpu, REG_A5XX_CP_ROQ_THRESHOLDS_1, 0x40201B16); in a5xx_hw_init()
677 gpu_write(gpu, REG_A5XX_PC_DBG_ECO_CNTL, (0x400 << 11 | 0x300 << 22)); in a5xx_hw_init()
680 gpu_rmw(gpu, REG_A5XX_PC_DBG_ECO_CNTL, 0, (1 << 8)); in a5xx_hw_init()
682 gpu_write(gpu, REG_A5XX_PC_DBG_ECO_CNTL, 0xc0200100); in a5xx_hw_init()
685 gpu_write(gpu, REG_A5XX_CP_CHICKEN_DBG, 0x02000000); in a5xx_hw_init()
688 gpu_write(gpu, REG_A5XX_RBBM_AHB_CNTL1, 0xA6FFFFFF); in a5xx_hw_init()
691 a5xx_set_hwcg(gpu, true); in a5xx_hw_init()
693 gpu_write(gpu, REG_A5XX_RBBM_AHB_CNTL2, 0x0000003F); in a5xx_hw_init()
696 gpu_write(gpu, REG_A5XX_TPL1_MODE_CNTL, 2 << 7); in a5xx_hw_init()
697 gpu_write(gpu, REG_A5XX_RB_MODE_CNTL, 2 << 1); in a5xx_hw_init()
700 gpu_write(gpu, REG_A5XX_CP_PROTECT_CNTL, 0x00000007); in a5xx_hw_init()
703 gpu_write(gpu, REG_A5XX_CP_PROTECT(0), ADRENO_PROTECT_RW(0x04, 4)); in a5xx_hw_init()
704 gpu_write(gpu, REG_A5XX_CP_PROTECT(1), ADRENO_PROTECT_RW(0x08, 8)); in a5xx_hw_init()
705 gpu_write(gpu, REG_A5XX_CP_PROTECT(2), ADRENO_PROTECT_RW(0x10, 16)); in a5xx_hw_init()
706 gpu_write(gpu, REG_A5XX_CP_PROTECT(3), ADRENO_PROTECT_RW(0x20, 32)); in a5xx_hw_init()
707 gpu_write(gpu, REG_A5XX_CP_PROTECT(4), ADRENO_PROTECT_RW(0x40, 64)); in a5xx_hw_init()
708 gpu_write(gpu, REG_A5XX_CP_PROTECT(5), ADRENO_PROTECT_RW(0x80, 64)); in a5xx_hw_init()
711 gpu_write(gpu, REG_A5XX_CP_PROTECT(6), in a5xx_hw_init()
714 gpu_write(gpu, REG_A5XX_CP_PROTECT(7), in a5xx_hw_init()
718 gpu_write(gpu, REG_A5XX_CP_PROTECT(8), ADRENO_PROTECT_RW(0x800, 64)); in a5xx_hw_init()
719 gpu_write(gpu, REG_A5XX_CP_PROTECT(9), ADRENO_PROTECT_RW(0x840, 8)); in a5xx_hw_init()
720 gpu_write(gpu, REG_A5XX_CP_PROTECT(10), ADRENO_PROTECT_RW(0x880, 32)); in a5xx_hw_init()
721 gpu_write(gpu, REG_A5XX_CP_PROTECT(11), ADRENO_PROTECT_RW(0xAA0, 1)); in a5xx_hw_init()
724 gpu_write(gpu, REG_A5XX_CP_PROTECT(12), ADRENO_PROTECT_RW(0xCC0, 1)); in a5xx_hw_init()
725 gpu_write(gpu, REG_A5XX_CP_PROTECT(13), ADRENO_PROTECT_RW(0xCF0, 2)); in a5xx_hw_init()
728 gpu_write(gpu, REG_A5XX_CP_PROTECT(14), ADRENO_PROTECT_RW(0xE68, 8)); in a5xx_hw_init()
729 gpu_write(gpu, REG_A5XX_CP_PROTECT(15), ADRENO_PROTECT_RW(0xE70, 4)); in a5xx_hw_init()
732 gpu_write(gpu, REG_A5XX_CP_PROTECT(16), ADRENO_PROTECT_RW(0xE80, 16)); in a5xx_hw_init()
735 gpu_write(gpu, REG_A5XX_CP_PROTECT(17), in a5xx_hw_init()
738 gpu_write(gpu, REG_A5XX_RBBM_SECVID_TSB_CNTL, 0); in a5xx_hw_init()
744 gpu_write64(gpu, REG_A5XX_RBBM_SECVID_TSB_TRUSTED_BASE_LO, in a5xx_hw_init()
746 gpu_write(gpu, REG_A5XX_RBBM_SECVID_TSB_TRUSTED_SIZE, 0x00000000); in a5xx_hw_init()
748 ret = adreno_hw_init(gpu); in a5xx_hw_init()
752 a5xx_preempt_hw_init(gpu); in a5xx_hw_init()
754 a5xx_gpmu_ucode_init(gpu); in a5xx_hw_init()
756 ret = a5xx_ucode_init(gpu); in a5xx_hw_init()
761 gpu_write(gpu, REG_A5XX_RBBM_INT_0_MASK, A5XX_INT_MASK); in a5xx_hw_init()
764 gpu_write(gpu, REG_A5XX_CP_PFP_ME_CNTL, 0); in a5xx_hw_init()
765 ret = a5xx_me_init(gpu); in a5xx_hw_init()
769 ret = a5xx_power_init(gpu); in a5xx_hw_init()
778 OUT_PKT7(gpu->rb[0], CP_EVENT_WRITE, 1); in a5xx_hw_init()
779 OUT_RING(gpu->rb[0], 0x0F); in a5xx_hw_init()
781 gpu->funcs->flush(gpu, gpu->rb[0]); in a5xx_hw_init()
782 if (!a5xx_idle(gpu, gpu->rb[0])) in a5xx_hw_init()
793 ret = a5xx_zap_shader_init(gpu); in a5xx_hw_init()
795 OUT_PKT7(gpu->rb[0], CP_SET_SECURE_MODE, 1); in a5xx_hw_init()
796 OUT_RING(gpu->rb[0], 0x00000000); in a5xx_hw_init()
798 gpu->funcs->flush(gpu, gpu->rb[0]); in a5xx_hw_init()
799 if (!a5xx_idle(gpu, gpu->rb[0])) in a5xx_hw_init()
803 dev_warn_once(gpu->dev->dev, in a5xx_hw_init()
805 gpu_write(gpu, REG_A5XX_RBBM_SECVID_TRUST_CNTL, 0x0); in a5xx_hw_init()
809 a5xx_preempt_start(gpu); in a5xx_hw_init()
814 static void a5xx_recover(struct msm_gpu *gpu) in a5xx_recover() argument
818 adreno_dump_info(gpu); in a5xx_recover()
822 gpu_read(gpu, REG_A5XX_CP_SCRATCH_REG(i))); in a5xx_recover()
826 a5xx_dump(gpu); in a5xx_recover()
828 gpu_write(gpu, REG_A5XX_RBBM_SW_RESET_CMD, 1); in a5xx_recover()
829 gpu_read(gpu, REG_A5XX_RBBM_SW_RESET_CMD); in a5xx_recover()
830 gpu_write(gpu, REG_A5XX_RBBM_SW_RESET_CMD, 0); in a5xx_recover()
831 adreno_recover(gpu); in a5xx_recover()
834 static void a5xx_destroy(struct msm_gpu *gpu) in a5xx_destroy() argument
836 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a5xx_destroy()
839 DBG("%s", gpu->name); in a5xx_destroy()
841 a5xx_preempt_fini(gpu); in a5xx_destroy()
845 msm_gem_put_iova(a5xx_gpu->pm4_bo, gpu->aspace); in a5xx_destroy()
851 msm_gem_put_iova(a5xx_gpu->pfp_bo, gpu->aspace); in a5xx_destroy()
857 msm_gem_put_iova(a5xx_gpu->gpmu_bo, gpu->aspace); in a5xx_destroy()
865 static inline bool _a5xx_check_idle(struct msm_gpu *gpu) in _a5xx_check_idle() argument
867 if (gpu_read(gpu, REG_A5XX_RBBM_STATUS) & ~A5XX_RBBM_STATUS_HI_BUSY) in _a5xx_check_idle()
874 return !(gpu_read(gpu, REG_A5XX_RBBM_INT_0_STATUS) & in _a5xx_check_idle()
878 bool a5xx_idle(struct msm_gpu *gpu, struct msm_ringbuffer *ring) in a5xx_idle() argument
880 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a5xx_idle()
889 if (!adreno_idle(gpu, ring)) in a5xx_idle()
892 if (spin_until(_a5xx_check_idle(gpu))) { in a5xx_idle()
894 gpu->name, __builtin_return_address(0), in a5xx_idle()
895 gpu_read(gpu, REG_A5XX_RBBM_STATUS), in a5xx_idle()
896 gpu_read(gpu, REG_A5XX_RBBM_INT_0_STATUS), in a5xx_idle()
897 gpu_read(gpu, REG_A5XX_CP_RB_RPTR), in a5xx_idle()
898 gpu_read(gpu, REG_A5XX_CP_RB_WPTR)); in a5xx_idle()
907 struct msm_gpu *gpu = arg; in a5xx_fault_handler() local
910 gpu_read(gpu, REG_A5XX_CP_SCRATCH_REG(4)), in a5xx_fault_handler()
911 gpu_read(gpu, REG_A5XX_CP_SCRATCH_REG(5)), in a5xx_fault_handler()
912 gpu_read(gpu, REG_A5XX_CP_SCRATCH_REG(6)), in a5xx_fault_handler()
913 gpu_read(gpu, REG_A5XX_CP_SCRATCH_REG(7))); in a5xx_fault_handler()
918 static void a5xx_cp_err_irq(struct msm_gpu *gpu) in a5xx_cp_err_irq() argument
920 u32 status = gpu_read(gpu, REG_A5XX_CP_INTERRUPT_STATUS); in a5xx_cp_err_irq()
925 gpu_write(gpu, REG_A5XX_CP_PFP_STAT_ADDR, 0); in a5xx_cp_err_irq()
932 gpu_read(gpu, REG_A5XX_CP_PFP_STAT_DATA); in a5xx_cp_err_irq()
933 val = gpu_read(gpu, REG_A5XX_CP_PFP_STAT_DATA); in a5xx_cp_err_irq()
935 dev_err_ratelimited(gpu->dev->dev, "CP | opcode error | possible opcode=0x%8.8X\n", in a5xx_cp_err_irq()
940 dev_err_ratelimited(gpu->dev->dev, "CP | HW fault | status=0x%8.8X\n", in a5xx_cp_err_irq()
941 gpu_read(gpu, REG_A5XX_CP_HW_FAULT)); in a5xx_cp_err_irq()
944 dev_err_ratelimited(gpu->dev->dev, "CP | DMA error\n"); in a5xx_cp_err_irq()
947 u32 val = gpu_read(gpu, REG_A5XX_CP_PROTECT_STATUS); in a5xx_cp_err_irq()
949 dev_err_ratelimited(gpu->dev->dev, in a5xx_cp_err_irq()
956 u32 status = gpu_read(gpu, REG_A5XX_CP_AHB_FAULT); in a5xx_cp_err_irq()
962 dev_err_ratelimited(gpu->dev->dev, in a5xx_cp_err_irq()
969 static void a5xx_rbbm_err_irq(struct msm_gpu *gpu, u32 status) in a5xx_rbbm_err_irq() argument
972 u32 val = gpu_read(gpu, REG_A5XX_RBBM_AHB_ERROR_STATUS); in a5xx_rbbm_err_irq()
974 dev_err_ratelimited(gpu->dev->dev, in a5xx_rbbm_err_irq()
981 gpu_write(gpu, REG_A5XX_RBBM_AHB_CMD, (1 << 4)); in a5xx_rbbm_err_irq()
984 gpu_write(gpu, REG_A5XX_RBBM_INT_CLEAR_CMD, in a5xx_rbbm_err_irq()
989 dev_err_ratelimited(gpu->dev->dev, "RBBM | AHB transfer timeout\n"); in a5xx_rbbm_err_irq()
992 dev_err_ratelimited(gpu->dev->dev, "RBBM | ME master split | status=0x%X\n", in a5xx_rbbm_err_irq()
993 gpu_read(gpu, REG_A5XX_RBBM_AHB_ME_SPLIT_STATUS)); in a5xx_rbbm_err_irq()
996 dev_err_ratelimited(gpu->dev->dev, "RBBM | PFP master split | status=0x%X\n", in a5xx_rbbm_err_irq()
997 gpu_read(gpu, REG_A5XX_RBBM_AHB_PFP_SPLIT_STATUS)); in a5xx_rbbm_err_irq()
1000 dev_err_ratelimited(gpu->dev->dev, "RBBM | ETS master split | status=0x%X\n", in a5xx_rbbm_err_irq()
1001 gpu_read(gpu, REG_A5XX_RBBM_AHB_ETS_SPLIT_STATUS)); in a5xx_rbbm_err_irq()
1004 dev_err_ratelimited(gpu->dev->dev, "RBBM | ATB ASYNC overflow\n"); in a5xx_rbbm_err_irq()
1007 dev_err_ratelimited(gpu->dev->dev, "RBBM | ATB bus overflow\n"); in a5xx_rbbm_err_irq()
1010 static void a5xx_uche_err_irq(struct msm_gpu *gpu) in a5xx_uche_err_irq() argument
1012 uint64_t addr = (uint64_t) gpu_read(gpu, REG_A5XX_UCHE_TRAP_LOG_HI); in a5xx_uche_err_irq()
1014 addr |= gpu_read(gpu, REG_A5XX_UCHE_TRAP_LOG_LO); in a5xx_uche_err_irq()
1016 dev_err_ratelimited(gpu->dev->dev, "UCHE | Out of bounds access | addr=0x%llX\n", in a5xx_uche_err_irq()
1020 static void a5xx_gpmu_err_irq(struct msm_gpu *gpu) in a5xx_gpmu_err_irq() argument
1022 dev_err_ratelimited(gpu->dev->dev, "GPMU | voltage droop\n"); in a5xx_gpmu_err_irq()
1025 static void a5xx_fault_detect_irq(struct msm_gpu *gpu) in a5xx_fault_detect_irq() argument
1027 struct drm_device *dev = gpu->dev; in a5xx_fault_detect_irq()
1029 struct msm_ringbuffer *ring = gpu->funcs->active_ring(gpu); in a5xx_fault_detect_irq()
1033 gpu_read(gpu, REG_A5XX_RBBM_STATUS), in a5xx_fault_detect_irq()
1034 gpu_read(gpu, REG_A5XX_CP_RB_RPTR), in a5xx_fault_detect_irq()
1035 gpu_read(gpu, REG_A5XX_CP_RB_WPTR), in a5xx_fault_detect_irq()
1036 gpu_read64(gpu, REG_A5XX_CP_IB1_BASE, REG_A5XX_CP_IB1_BASE_HI), in a5xx_fault_detect_irq()
1037 gpu_read(gpu, REG_A5XX_CP_IB1_BUFSZ), in a5xx_fault_detect_irq()
1038 gpu_read64(gpu, REG_A5XX_CP_IB2_BASE, REG_A5XX_CP_IB2_BASE_HI), in a5xx_fault_detect_irq()
1039 gpu_read(gpu, REG_A5XX_CP_IB2_BUFSZ)); in a5xx_fault_detect_irq()
1042 del_timer(&gpu->hangcheck_timer); in a5xx_fault_detect_irq()
1044 queue_work(priv->wq, &gpu->recover_work); in a5xx_fault_detect_irq()
1055 static irqreturn_t a5xx_irq(struct msm_gpu *gpu) in a5xx_irq() argument
1057 u32 status = gpu_read(gpu, REG_A5XX_RBBM_INT_0_STATUS); in a5xx_irq()
1063 gpu_write(gpu, REG_A5XX_RBBM_INT_CLEAR_CMD, in a5xx_irq()
1068 a5xx_rbbm_err_irq(gpu, status); in a5xx_irq()
1071 a5xx_cp_err_irq(gpu); in a5xx_irq()
1074 a5xx_fault_detect_irq(gpu); in a5xx_irq()
1077 a5xx_uche_err_irq(gpu); in a5xx_irq()
1080 a5xx_gpmu_err_irq(gpu); in a5xx_irq()
1083 a5xx_preempt_trigger(gpu); in a5xx_irq()
1084 msm_gpu_retire(gpu); in a5xx_irq()
1088 a5xx_preempt_irq(gpu); in a5xx_irq()
1135 static void a5xx_dump(struct msm_gpu *gpu) in a5xx_dump() argument
1137 dev_info(gpu->dev->dev, "status: %08x\n", in a5xx_dump()
1138 gpu_read(gpu, REG_A5XX_RBBM_STATUS)); in a5xx_dump()
1139 adreno_dump(gpu); in a5xx_dump()
1142 static int a5xx_pm_resume(struct msm_gpu *gpu) in a5xx_pm_resume() argument
1147 ret = msm_gpu_pm_resume(gpu); in a5xx_pm_resume()
1152 gpu_write(gpu, REG_A5XX_GPMU_RBCCU_POWER_CNTL, 0x778000); in a5xx_pm_resume()
1157 ret = spin_usecs(gpu, 20, REG_A5XX_GPMU_RBCCU_PWR_CLK_STATUS, in a5xx_pm_resume()
1161 gpu->name, in a5xx_pm_resume()
1162 gpu_read(gpu, REG_A5XX_GPMU_RBCCU_PWR_CLK_STATUS)); in a5xx_pm_resume()
1167 gpu_write(gpu, REG_A5XX_GPMU_SP_POWER_CNTL, 0x778000); in a5xx_pm_resume()
1168 ret = spin_usecs(gpu, 20, REG_A5XX_GPMU_SP_PWR_CLK_STATUS, in a5xx_pm_resume()
1172 gpu->name); in a5xx_pm_resume()
1177 static int a5xx_pm_suspend(struct msm_gpu *gpu) in a5xx_pm_suspend() argument
1180 gpu_write(gpu, REG_A5XX_VBIF_XIN_HALT_CTRL0, 0xF); in a5xx_pm_suspend()
1181 spin_until((gpu_read(gpu, REG_A5XX_VBIF_XIN_HALT_CTRL1) & 0xF) == 0xF); in a5xx_pm_suspend()
1183 gpu_write(gpu, REG_A5XX_VBIF_XIN_HALT_CTRL0, 0); in a5xx_pm_suspend()
1189 gpu_write(gpu, REG_A5XX_RBBM_BLOCK_SW_RESET_CMD, 0x003C0000); in a5xx_pm_suspend()
1190 gpu_write(gpu, REG_A5XX_RBBM_BLOCK_SW_RESET_CMD, 0x00000000); in a5xx_pm_suspend()
1192 return msm_gpu_pm_suspend(gpu); in a5xx_pm_suspend()
1195 static int a5xx_get_timestamp(struct msm_gpu *gpu, uint64_t *value) in a5xx_get_timestamp() argument
1197 *value = gpu_read64(gpu, REG_A5XX_RBBM_PERFCTR_CP_0_LO, in a5xx_get_timestamp()
1214 #define gpu_poll_timeout(gpu, addr, val, cond, interval, timeout) \ argument
1215 readl_poll_timeout((gpu)->mmio + ((addr) << 2), val, cond, \
1218 static int a5xx_crashdumper_init(struct msm_gpu *gpu, in a5xx_crashdumper_init() argument
1221 dumper->ptr = msm_gem_kernel_new_locked(gpu->dev, in a5xx_crashdumper_init()
1222 SZ_1M, MSM_BO_UNCACHED, gpu->aspace, in a5xx_crashdumper_init()
1231 static void a5xx_crashdumper_free(struct msm_gpu *gpu, in a5xx_crashdumper_free() argument
1234 msm_gem_put_iova(dumper->bo, gpu->aspace); in a5xx_crashdumper_free()
1240 static int a5xx_crashdumper_run(struct msm_gpu *gpu, in a5xx_crashdumper_run() argument
1248 gpu_write64(gpu, REG_A5XX_CP_CRASH_SCRIPT_BASE_LO, in a5xx_crashdumper_run()
1251 gpu_write(gpu, REG_A5XX_CP_CRASH_DUMP_CNTL, 1); in a5xx_crashdumper_run()
1253 return gpu_poll_timeout(gpu, REG_A5XX_CP_CRASH_DUMP_CNTL, val, in a5xx_crashdumper_run()
1284 static void a5xx_gpu_state_get_hlsq_regs(struct msm_gpu *gpu, in a5xx_gpu_state_get_hlsq_regs() argument
1292 if (a5xx_crashdumper_init(gpu, &dumper)) in a5xx_gpu_state_get_hlsq_regs()
1330 if (a5xx_crashdumper_run(gpu, &dumper)) { in a5xx_gpu_state_get_hlsq_regs()
1332 a5xx_crashdumper_free(gpu, &dumper); in a5xx_gpu_state_get_hlsq_regs()
1340 a5xx_crashdumper_free(gpu, &dumper); in a5xx_gpu_state_get_hlsq_regs()
1343 static struct msm_gpu_state *a5xx_gpu_state_get(struct msm_gpu *gpu) in a5xx_gpu_state_get() argument
1352 a5xx_set_hwcg(gpu, false); in a5xx_gpu_state_get()
1355 adreno_gpu_state_get(gpu, &(a5xx_state->base)); in a5xx_gpu_state_get()
1357 a5xx_state->base.rbbm_status = gpu_read(gpu, REG_A5XX_RBBM_STATUS); in a5xx_gpu_state_get()
1360 a5xx_gpu_state_get_hlsq_regs(gpu, a5xx_state); in a5xx_gpu_state_get()
1362 a5xx_set_hwcg(gpu, true); in a5xx_gpu_state_get()
1390 void a5xx_show(struct msm_gpu *gpu, struct msm_gpu_state *state, in a5xx_show() argument
1401 adreno_show(gpu, state, p); in a5xx_show()
1431 static struct msm_ringbuffer *a5xx_active_ring(struct msm_gpu *gpu) in a5xx_active_ring() argument
1433 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a5xx_active_ring()
1439 static int a5xx_gpu_busy(struct msm_gpu *gpu, uint64_t *value) in a5xx_gpu_busy() argument
1441 *value = gpu_read64(gpu, REG_A5XX_RBBM_PERFCTR_RBBM_0_LO, in a5xx_gpu_busy()
1497 struct msm_gpu *gpu; in a5xx_gpu_init() local
1510 gpu = &adreno_gpu->base; in a5xx_gpu_init()
1525 if (gpu->aspace) in a5xx_gpu_init()
1526 msm_mmu_set_fault_handler(gpu->aspace->mmu, gpu, a5xx_fault_handler); in a5xx_gpu_init()
1529 a5xx_preempt_init(gpu); in a5xx_gpu_init()
1531 return gpu; in a5xx_gpu_init()