Lines Matching full:gpu
17 static void a5xx_dump(struct msm_gpu *gpu);
21 static void update_shadow_rptr(struct msm_gpu *gpu, struct msm_ringbuffer *ring) in update_shadow_rptr() argument
23 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in update_shadow_rptr()
33 void a5xx_flush(struct msm_gpu *gpu, struct msm_ringbuffer *ring, in a5xx_flush() argument
36 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a5xx_flush()
46 update_shadow_rptr(gpu, ring); in a5xx_flush()
63 gpu_write(gpu, REG_A5XX_CP_RB_WPTR, wptr); in a5xx_flush()
66 static void a5xx_submit_in_rb(struct msm_gpu *gpu, struct msm_gem_submit *submit) in a5xx_submit_in_rb() argument
68 struct msm_drm_private *priv = gpu->dev->dev_private; in a5xx_submit_in_rb()
113 a5xx_flush(gpu, ring, true); in a5xx_submit_in_rb()
114 a5xx_preempt_trigger(gpu); in a5xx_submit_in_rb()
120 a5xx_idle(gpu, ring); in a5xx_submit_in_rb()
122 msm_gpu_retire(gpu); in a5xx_submit_in_rb()
125 static void a5xx_submit(struct msm_gpu *gpu, struct msm_gem_submit *submit) in a5xx_submit() argument
127 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a5xx_submit()
129 struct msm_drm_private *priv = gpu->dev->dev_private; in a5xx_submit()
135 a5xx_submit_in_rb(gpu, submit); in a5xx_submit()
189 update_shadow_rptr(gpu, ring); in a5xx_submit()
238 a5xx_flush(gpu, ring, false); in a5xx_submit()
241 a5xx_preempt_trigger(gpu); in a5xx_submit()
438 void a5xx_set_hwcg(struct msm_gpu *gpu, bool state) in a5xx_set_hwcg() argument
440 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a5xx_set_hwcg()
456 gpu_write(gpu, regs[i].offset, in a5xx_set_hwcg()
460 gpu_write(gpu, REG_A5XX_RBBM_CLOCK_DELAY_GPMU, state ? 0x00000770 : 0); in a5xx_set_hwcg()
461 gpu_write(gpu, REG_A5XX_RBBM_CLOCK_HYST_GPMU, state ? 0x00000004 : 0); in a5xx_set_hwcg()
464 gpu_write(gpu, REG_A5XX_RBBM_CLOCK_CNTL, state ? 0xAAA8AA00 : 0); in a5xx_set_hwcg()
465 gpu_write(gpu, REG_A5XX_RBBM_ISDB_CNT, state ? 0x182 : 0x180); in a5xx_set_hwcg()
468 static int a5xx_me_init(struct msm_gpu *gpu) in a5xx_me_init() argument
470 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a5xx_me_init()
471 struct msm_ringbuffer *ring = gpu->rb[0]; in a5xx_me_init()
505 a5xx_flush(gpu, ring, true); in a5xx_me_init()
506 return a5xx_idle(gpu, ring) ? 0 : -EINVAL; in a5xx_me_init()
509 static int a5xx_preempt_start(struct msm_gpu *gpu) in a5xx_preempt_start() argument
511 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a5xx_preempt_start()
513 struct msm_ringbuffer *ring = gpu->rb[0]; in a5xx_preempt_start()
515 if (gpu->nr_rings == 1) in a5xx_preempt_start()
548 a5xx_flush(gpu, ring, false); in a5xx_preempt_start()
550 return a5xx_idle(gpu, ring) ? 0 : -EINVAL; in a5xx_preempt_start()
572 static int a5xx_ucode_init(struct msm_gpu *gpu) in a5xx_ucode_init() argument
574 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a5xx_ucode_init()
579 a5xx_gpu->pm4_bo = adreno_fw_create_bo(gpu, in a5xx_ucode_init()
586 DRM_DEV_ERROR(gpu->dev->dev, "could not allocate PM4: %d\n", in a5xx_ucode_init()
595 a5xx_gpu->pfp_bo = adreno_fw_create_bo(gpu, in a5xx_ucode_init()
601 DRM_DEV_ERROR(gpu->dev->dev, "could not allocate PFP: %d\n", in a5xx_ucode_init()
610 gpu_write64(gpu, REG_A5XX_CP_ME_INSTR_BASE_LO, in a5xx_ucode_init()
613 gpu_write64(gpu, REG_A5XX_CP_PFP_INSTR_BASE_LO, in a5xx_ucode_init()
621 static int a5xx_zap_shader_resume(struct msm_gpu *gpu) in a5xx_zap_shader_resume() argument
628 gpu->name, ret); in a5xx_zap_shader_resume()
633 static int a5xx_zap_shader_init(struct msm_gpu *gpu) in a5xx_zap_shader_init() argument
643 return a5xx_zap_shader_resume(gpu); in a5xx_zap_shader_init()
645 ret = adreno_zap_shader_load(gpu, GPU_PAS_ID); in a5xx_zap_shader_init()
664 static int a5xx_hw_init(struct msm_gpu *gpu) in a5xx_hw_init() argument
666 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a5xx_hw_init()
671 gpu_write(gpu, REG_A5XX_VBIF_ROUND_ROBIN_QOS_ARB, 0x00000003); in a5xx_hw_init()
675 gpu_write(gpu, REG_A5XX_VBIF_GATE_OFF_WRREQ_EN, 0x00000009); in a5xx_hw_init()
677 /* Make all blocks contribute to the GPU BUSY perf counter */ in a5xx_hw_init()
678 gpu_write(gpu, REG_A5XX_RBBM_PERFCTR_GPU_BUSY_MASKED, 0xFFFFFFFF); in a5xx_hw_init()
681 gpu_write(gpu, REG_A5XX_RBBM_AHB_CNTL0, 0x00000001); in a5xx_hw_init()
689 gpu_write(gpu, REG_A5XX_RBBM_INTERFACE_HANG_MASK_CNTL11, in a5xx_hw_init()
691 gpu_write(gpu, REG_A5XX_RBBM_INTERFACE_HANG_MASK_CNTL12, in a5xx_hw_init()
693 gpu_write(gpu, REG_A5XX_RBBM_INTERFACE_HANG_MASK_CNTL13, in a5xx_hw_init()
695 gpu_write(gpu, REG_A5XX_RBBM_INTERFACE_HANG_MASK_CNTL14, in a5xx_hw_init()
697 gpu_write(gpu, REG_A5XX_RBBM_INTERFACE_HANG_MASK_CNTL15, in a5xx_hw_init()
699 gpu_write(gpu, REG_A5XX_RBBM_INTERFACE_HANG_MASK_CNTL16, in a5xx_hw_init()
701 gpu_write(gpu, REG_A5XX_RBBM_INTERFACE_HANG_MASK_CNTL17, in a5xx_hw_init()
703 gpu_write(gpu, REG_A5XX_RBBM_INTERFACE_HANG_MASK_CNTL18, in a5xx_hw_init()
708 gpu_write(gpu, REG_A5XX_RBBM_INTERFACE_HANG_INT_CNTL, in a5xx_hw_init()
712 gpu_write(gpu, REG_A5XX_RBBM_PERFCTR_CNTL, 0x01); in a5xx_hw_init()
715 gpu_write(gpu, REG_A5XX_CP_PERFCTR_CP_SEL_0, PERF_CP_ALWAYS_COUNT); in a5xx_hw_init()
718 gpu_write(gpu, REG_A5XX_RBBM_PERFCTR_RBBM_SEL_0, 6); in a5xx_hw_init()
721 gpu_write(gpu, REG_A5XX_UCHE_CACHE_WAYS, 0x02); in a5xx_hw_init()
724 gpu_write(gpu, REG_A5XX_UCHE_TRAP_BASE_LO, 0xFFFF0000); in a5xx_hw_init()
725 gpu_write(gpu, REG_A5XX_UCHE_TRAP_BASE_HI, 0x0001FFFF); in a5xx_hw_init()
726 gpu_write(gpu, REG_A5XX_UCHE_WRITE_THRU_BASE_LO, 0xFFFF0000); in a5xx_hw_init()
727 gpu_write(gpu, REG_A5XX_UCHE_WRITE_THRU_BASE_HI, 0x0001FFFF); in a5xx_hw_init()
729 /* Set the GMEM VA range (0 to gpu->gmem) */ in a5xx_hw_init()
730 gpu_write(gpu, REG_A5XX_UCHE_GMEM_RANGE_MIN_LO, 0x00100000); in a5xx_hw_init()
731 gpu_write(gpu, REG_A5XX_UCHE_GMEM_RANGE_MIN_HI, 0x00000000); in a5xx_hw_init()
732 gpu_write(gpu, REG_A5XX_UCHE_GMEM_RANGE_MAX_LO, in a5xx_hw_init()
734 gpu_write(gpu, REG_A5XX_UCHE_GMEM_RANGE_MAX_HI, 0x00000000); in a5xx_hw_init()
737 gpu_write(gpu, REG_A5XX_CP_MEQ_THRESHOLDS, 0x20); in a5xx_hw_init()
739 gpu_write(gpu, REG_A5XX_CP_MERCIU_SIZE, 0x400); in a5xx_hw_init()
741 gpu_write(gpu, REG_A5XX_CP_MERCIU_SIZE, 0x20); in a5xx_hw_init()
742 gpu_write(gpu, REG_A5XX_CP_ROQ_THRESHOLDS_2, 0x40000030); in a5xx_hw_init()
743 gpu_write(gpu, REG_A5XX_CP_ROQ_THRESHOLDS_1, 0x20100D0A); in a5xx_hw_init()
745 gpu_write(gpu, REG_A5XX_CP_MEQ_THRESHOLDS, 0x40); in a5xx_hw_init()
747 gpu_write(gpu, REG_A5XX_CP_MERCIU_SIZE, 0x40); in a5xx_hw_init()
749 gpu_write(gpu, REG_A5XX_CP_MERCIU_SIZE, 0x400); in a5xx_hw_init()
750 gpu_write(gpu, REG_A5XX_CP_ROQ_THRESHOLDS_2, 0x80000060); in a5xx_hw_init()
751 gpu_write(gpu, REG_A5XX_CP_ROQ_THRESHOLDS_1, 0x40201B16); in a5xx_hw_init()
755 gpu_write(gpu, REG_A5XX_PC_DBG_ECO_CNTL, in a5xx_hw_init()
759 gpu_write(gpu, REG_A5XX_PC_DBG_ECO_CNTL, in a5xx_hw_init()
762 gpu_write(gpu, REG_A5XX_PC_DBG_ECO_CNTL, in a5xx_hw_init()
766 gpu_rmw(gpu, REG_A5XX_PC_DBG_ECO_CNTL, 0, (1 << 8)); in a5xx_hw_init()
774 gpu_rmw(gpu, REG_A5XX_RB_DBG_ECO_CNTL, 0, (1 << 9)); in a5xx_hw_init()
777 gpu_write(gpu, REG_A5XX_UCHE_MODE_CNTL, BIT(29)); in a5xx_hw_init()
780 gpu_write(gpu, REG_A5XX_CP_CHICKEN_DBG, 0x02000000); in a5xx_hw_init()
783 gpu_write(gpu, REG_A5XX_RBBM_AHB_CNTL1, 0xA6FFFFFF); in a5xx_hw_init()
790 * CCU to be interpreted differently. This can cause gpu fault. This in a5xx_hw_init()
796 gpu_rmw(gpu, REG_A5XX_RB_DBG_ECO_CNTL, (1 << 11), 0); in a5xx_hw_init()
799 a5xx_set_hwcg(gpu, true); in a5xx_hw_init()
801 gpu_write(gpu, REG_A5XX_RBBM_AHB_CNTL2, 0x0000003F); in a5xx_hw_init()
809 gpu_write(gpu, REG_A5XX_TPL1_MODE_CNTL, regbit << 7); in a5xx_hw_init()
810 gpu_write(gpu, REG_A5XX_RB_MODE_CNTL, regbit << 1); in a5xx_hw_init()
814 gpu_write(gpu, REG_A5XX_UCHE_DBG_ECO_CNTL_2, regbit); in a5xx_hw_init()
817 gpu_rmw(gpu, REG_A5XX_VPC_DBG_ECO_CNTL, 0, (1 << 10)); in a5xx_hw_init()
820 gpu_write(gpu, REG_A5XX_CP_PROTECT_CNTL, 0x00000007); in a5xx_hw_init()
823 gpu_write(gpu, REG_A5XX_CP_PROTECT(0), ADRENO_PROTECT_RW(0x04, 4)); in a5xx_hw_init()
824 gpu_write(gpu, REG_A5XX_CP_PROTECT(1), ADRENO_PROTECT_RW(0x08, 8)); in a5xx_hw_init()
825 gpu_write(gpu, REG_A5XX_CP_PROTECT(2), ADRENO_PROTECT_RW(0x10, 16)); in a5xx_hw_init()
826 gpu_write(gpu, REG_A5XX_CP_PROTECT(3), ADRENO_PROTECT_RW(0x20, 32)); in a5xx_hw_init()
827 gpu_write(gpu, REG_A5XX_CP_PROTECT(4), ADRENO_PROTECT_RW(0x40, 64)); in a5xx_hw_init()
828 gpu_write(gpu, REG_A5XX_CP_PROTECT(5), ADRENO_PROTECT_RW(0x80, 64)); in a5xx_hw_init()
831 gpu_write(gpu, REG_A5XX_CP_PROTECT(6), in a5xx_hw_init()
834 gpu_write(gpu, REG_A5XX_CP_PROTECT(7), in a5xx_hw_init()
838 gpu_write(gpu, REG_A5XX_CP_PROTECT(8), ADRENO_PROTECT_RW(0x800, 64)); in a5xx_hw_init()
839 gpu_write(gpu, REG_A5XX_CP_PROTECT(9), ADRENO_PROTECT_RW(0x840, 8)); in a5xx_hw_init()
840 gpu_write(gpu, REG_A5XX_CP_PROTECT(10), ADRENO_PROTECT_RW(0x880, 32)); in a5xx_hw_init()
841 gpu_write(gpu, REG_A5XX_CP_PROTECT(11), ADRENO_PROTECT_RW(0xAA0, 1)); in a5xx_hw_init()
844 gpu_write(gpu, REG_A5XX_CP_PROTECT(12), ADRENO_PROTECT_RW(0xCC0, 1)); in a5xx_hw_init()
845 gpu_write(gpu, REG_A5XX_CP_PROTECT(13), ADRENO_PROTECT_RW(0xCF0, 2)); in a5xx_hw_init()
848 gpu_write(gpu, REG_A5XX_CP_PROTECT(14), ADRENO_PROTECT_RW(0xE68, 8)); in a5xx_hw_init()
849 gpu_write(gpu, REG_A5XX_CP_PROTECT(15), ADRENO_PROTECT_RW(0xE70, 16)); in a5xx_hw_init()
852 gpu_write(gpu, REG_A5XX_CP_PROTECT(16), ADRENO_PROTECT_RW(0xE80, 16)); in a5xx_hw_init()
857 gpu_write(gpu, REG_A5XX_CP_PROTECT(17), in a5xx_hw_init()
860 gpu_write(gpu, REG_A5XX_RBBM_SECVID_TSB_CNTL, 0); in a5xx_hw_init()
866 gpu_write64(gpu, REG_A5XX_RBBM_SECVID_TSB_TRUSTED_BASE_LO, in a5xx_hw_init()
868 gpu_write(gpu, REG_A5XX_RBBM_SECVID_TSB_TRUSTED_SIZE, 0x00000000); in a5xx_hw_init()
870 /* Put the GPU into 64 bit by default */ in a5xx_hw_init()
871 gpu_write(gpu, REG_A5XX_CP_ADDR_MODE_CNTL, 0x1); in a5xx_hw_init()
872 gpu_write(gpu, REG_A5XX_VSC_ADDR_MODE_CNTL, 0x1); in a5xx_hw_init()
873 gpu_write(gpu, REG_A5XX_GRAS_ADDR_MODE_CNTL, 0x1); in a5xx_hw_init()
874 gpu_write(gpu, REG_A5XX_RB_ADDR_MODE_CNTL, 0x1); in a5xx_hw_init()
875 gpu_write(gpu, REG_A5XX_PC_ADDR_MODE_CNTL, 0x1); in a5xx_hw_init()
876 gpu_write(gpu, REG_A5XX_HLSQ_ADDR_MODE_CNTL, 0x1); in a5xx_hw_init()
877 gpu_write(gpu, REG_A5XX_VFD_ADDR_MODE_CNTL, 0x1); in a5xx_hw_init()
878 gpu_write(gpu, REG_A5XX_VPC_ADDR_MODE_CNTL, 0x1); in a5xx_hw_init()
879 gpu_write(gpu, REG_A5XX_UCHE_ADDR_MODE_CNTL, 0x1); in a5xx_hw_init()
880 gpu_write(gpu, REG_A5XX_SP_ADDR_MODE_CNTL, 0x1); in a5xx_hw_init()
881 gpu_write(gpu, REG_A5XX_TPL1_ADDR_MODE_CNTL, 0x1); in a5xx_hw_init()
882 gpu_write(gpu, REG_A5XX_RBBM_SECVID_TSB_ADDR_MODE_CNTL, 0x1); in a5xx_hw_init()
890 gpu_rmw(gpu, REG_A5XX_VPC_DBG_ECO_CNTL, 0, BIT(23)); in a5xx_hw_init()
891 gpu_rmw(gpu, REG_A5XX_HLSQ_DBG_ECO_CNTL, BIT(18), 0); in a5xx_hw_init()
894 ret = adreno_hw_init(gpu); in a5xx_hw_init()
900 a5xx_gpmu_ucode_init(gpu); in a5xx_hw_init()
902 ret = a5xx_ucode_init(gpu); in a5xx_hw_init()
907 gpu_write64(gpu, REG_A5XX_CP_RB_BASE, REG_A5XX_CP_RB_BASE_HI, in a5xx_hw_init()
908 gpu->rb[0]->iova); in a5xx_hw_init()
916 gpu_write(gpu, REG_A5XX_CP_RB_CNTL, in a5xx_hw_init()
922 a5xx_gpu->shadow = msm_gem_kernel_new(gpu->dev, in a5xx_hw_init()
923 sizeof(u32) * gpu->nr_rings, in a5xx_hw_init()
925 gpu->aspace, &a5xx_gpu->shadow_bo, in a5xx_hw_init()
932 gpu_write64(gpu, REG_A5XX_CP_RB_RPTR_ADDR, in a5xx_hw_init()
933 REG_A5XX_CP_RB_RPTR_ADDR_HI, shadowptr(a5xx_gpu, gpu->rb[0])); in a5xx_hw_init()
934 } else if (gpu->nr_rings > 1) { in a5xx_hw_init()
936 a5xx_preempt_fini(gpu); in a5xx_hw_init()
937 gpu->nr_rings = 1; in a5xx_hw_init()
940 a5xx_preempt_hw_init(gpu); in a5xx_hw_init()
943 gpu_write(gpu, REG_A5XX_RBBM_INT_0_MASK, A5XX_INT_MASK); in a5xx_hw_init()
946 gpu_write(gpu, REG_A5XX_CP_PFP_ME_CNTL, 0); in a5xx_hw_init()
947 ret = a5xx_me_init(gpu); in a5xx_hw_init()
951 ret = a5xx_power_init(gpu); in a5xx_hw_init()
960 OUT_PKT7(gpu->rb[0], CP_EVENT_WRITE, 1); in a5xx_hw_init()
961 OUT_RING(gpu->rb[0], CP_EVENT_WRITE_0_EVENT(STAT_EVENT)); in a5xx_hw_init()
963 a5xx_flush(gpu, gpu->rb[0], true); in a5xx_hw_init()
964 if (!a5xx_idle(gpu, gpu->rb[0])) in a5xx_hw_init()
976 ret = a5xx_zap_shader_init(gpu); in a5xx_hw_init()
978 OUT_PKT7(gpu->rb[0], CP_SET_SECURE_MODE, 1); in a5xx_hw_init()
979 OUT_RING(gpu->rb[0], 0x00000000); in a5xx_hw_init()
981 a5xx_flush(gpu, gpu->rb[0], true); in a5xx_hw_init()
982 if (!a5xx_idle(gpu, gpu->rb[0])) in a5xx_hw_init()
991 dev_warn_once(gpu->dev->dev, in a5xx_hw_init()
993 gpu_write(gpu, REG_A5XX_RBBM_SECVID_TRUST_CNTL, 0x0); in a5xx_hw_init()
999 a5xx_preempt_start(gpu); in a5xx_hw_init()
1004 static void a5xx_recover(struct msm_gpu *gpu) in a5xx_recover() argument
1008 adreno_dump_info(gpu); in a5xx_recover()
1012 gpu_read(gpu, REG_A5XX_CP_SCRATCH_REG(i))); in a5xx_recover()
1016 a5xx_dump(gpu); in a5xx_recover()
1018 gpu_write(gpu, REG_A5XX_RBBM_SW_RESET_CMD, 1); in a5xx_recover()
1019 gpu_read(gpu, REG_A5XX_RBBM_SW_RESET_CMD); in a5xx_recover()
1020 gpu_write(gpu, REG_A5XX_RBBM_SW_RESET_CMD, 0); in a5xx_recover()
1021 adreno_recover(gpu); in a5xx_recover()
1024 static void a5xx_destroy(struct msm_gpu *gpu) in a5xx_destroy() argument
1026 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a5xx_destroy()
1029 DBG("%s", gpu->name); in a5xx_destroy()
1031 a5xx_preempt_fini(gpu); in a5xx_destroy()
1034 msm_gem_unpin_iova(a5xx_gpu->pm4_bo, gpu->aspace); in a5xx_destroy()
1039 msm_gem_unpin_iova(a5xx_gpu->pfp_bo, gpu->aspace); in a5xx_destroy()
1044 msm_gem_unpin_iova(a5xx_gpu->gpmu_bo, gpu->aspace); in a5xx_destroy()
1049 msm_gem_unpin_iova(a5xx_gpu->shadow_bo, gpu->aspace); in a5xx_destroy()
1057 static inline bool _a5xx_check_idle(struct msm_gpu *gpu) in _a5xx_check_idle() argument
1059 if (gpu_read(gpu, REG_A5XX_RBBM_STATUS) & ~A5XX_RBBM_STATUS_HI_BUSY) in _a5xx_check_idle()
1063 * Nearly every abnormality ends up pausing the GPU and triggering a in _a5xx_check_idle()
1066 return !(gpu_read(gpu, REG_A5XX_RBBM_INT_0_STATUS) & in _a5xx_check_idle()
1070 bool a5xx_idle(struct msm_gpu *gpu, struct msm_ringbuffer *ring) in a5xx_idle() argument
1072 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a5xx_idle()
1081 if (!adreno_idle(gpu, ring)) in a5xx_idle()
1084 if (spin_until(_a5xx_check_idle(gpu))) { in a5xx_idle()
1085 DRM_ERROR("%s: %ps: timeout waiting for GPU to idle: status %8.8X irq %8.8X rptr/wptr %d/%d\n", in a5xx_idle()
1086 gpu->name, __builtin_return_address(0), in a5xx_idle()
1087 gpu_read(gpu, REG_A5XX_RBBM_STATUS), in a5xx_idle()
1088 gpu_read(gpu, REG_A5XX_RBBM_INT_0_STATUS), in a5xx_idle()
1089 gpu_read(gpu, REG_A5XX_CP_RB_RPTR), in a5xx_idle()
1090 gpu_read(gpu, REG_A5XX_CP_RB_WPTR)); in a5xx_idle()
1099 struct msm_gpu *gpu = arg; in a5xx_fault_handler() local
1100 pr_warn_ratelimited("*** gpu fault: iova=%08lx, flags=%d (%u,%u,%u,%u)\n", in a5xx_fault_handler()
1102 gpu_read(gpu, REG_A5XX_CP_SCRATCH_REG(4)), in a5xx_fault_handler()
1103 gpu_read(gpu, REG_A5XX_CP_SCRATCH_REG(5)), in a5xx_fault_handler()
1104 gpu_read(gpu, REG_A5XX_CP_SCRATCH_REG(6)), in a5xx_fault_handler()
1105 gpu_read(gpu, REG_A5XX_CP_SCRATCH_REG(7))); in a5xx_fault_handler()
1110 static void a5xx_cp_err_irq(struct msm_gpu *gpu) in a5xx_cp_err_irq() argument
1112 u32 status = gpu_read(gpu, REG_A5XX_CP_INTERRUPT_STATUS); in a5xx_cp_err_irq()
1117 gpu_write(gpu, REG_A5XX_CP_PFP_STAT_ADDR, 0); in a5xx_cp_err_irq()
1124 gpu_read(gpu, REG_A5XX_CP_PFP_STAT_DATA); in a5xx_cp_err_irq()
1125 val = gpu_read(gpu, REG_A5XX_CP_PFP_STAT_DATA); in a5xx_cp_err_irq()
1127 dev_err_ratelimited(gpu->dev->dev, "CP | opcode error | possible opcode=0x%8.8X\n", in a5xx_cp_err_irq()
1132 dev_err_ratelimited(gpu->dev->dev, "CP | HW fault | status=0x%8.8X\n", in a5xx_cp_err_irq()
1133 gpu_read(gpu, REG_A5XX_CP_HW_FAULT)); in a5xx_cp_err_irq()
1136 dev_err_ratelimited(gpu->dev->dev, "CP | DMA error\n"); in a5xx_cp_err_irq()
1139 u32 val = gpu_read(gpu, REG_A5XX_CP_PROTECT_STATUS); in a5xx_cp_err_irq()
1141 dev_err_ratelimited(gpu->dev->dev, in a5xx_cp_err_irq()
1148 u32 status = gpu_read(gpu, REG_A5XX_CP_AHB_FAULT); in a5xx_cp_err_irq()
1154 dev_err_ratelimited(gpu->dev->dev, in a5xx_cp_err_irq()
1161 static void a5xx_rbbm_err_irq(struct msm_gpu *gpu, u32 status) in a5xx_rbbm_err_irq() argument
1164 u32 val = gpu_read(gpu, REG_A5XX_RBBM_AHB_ERROR_STATUS); in a5xx_rbbm_err_irq()
1166 dev_err_ratelimited(gpu->dev->dev, in a5xx_rbbm_err_irq()
1173 gpu_write(gpu, REG_A5XX_RBBM_AHB_CMD, (1 << 4)); in a5xx_rbbm_err_irq()
1176 gpu_write(gpu, REG_A5XX_RBBM_INT_CLEAR_CMD, in a5xx_rbbm_err_irq()
1181 dev_err_ratelimited(gpu->dev->dev, "RBBM | AHB transfer timeout\n"); in a5xx_rbbm_err_irq()
1184 dev_err_ratelimited(gpu->dev->dev, "RBBM | ME master split | status=0x%X\n", in a5xx_rbbm_err_irq()
1185 gpu_read(gpu, REG_A5XX_RBBM_AHB_ME_SPLIT_STATUS)); in a5xx_rbbm_err_irq()
1188 dev_err_ratelimited(gpu->dev->dev, "RBBM | PFP master split | status=0x%X\n", in a5xx_rbbm_err_irq()
1189 gpu_read(gpu, REG_A5XX_RBBM_AHB_PFP_SPLIT_STATUS)); in a5xx_rbbm_err_irq()
1192 dev_err_ratelimited(gpu->dev->dev, "RBBM | ETS master split | status=0x%X\n", in a5xx_rbbm_err_irq()
1193 gpu_read(gpu, REG_A5XX_RBBM_AHB_ETS_SPLIT_STATUS)); in a5xx_rbbm_err_irq()
1196 dev_err_ratelimited(gpu->dev->dev, "RBBM | ATB ASYNC overflow\n"); in a5xx_rbbm_err_irq()
1199 dev_err_ratelimited(gpu->dev->dev, "RBBM | ATB bus overflow\n"); in a5xx_rbbm_err_irq()
1202 static void a5xx_uche_err_irq(struct msm_gpu *gpu) in a5xx_uche_err_irq() argument
1204 uint64_t addr = (uint64_t) gpu_read(gpu, REG_A5XX_UCHE_TRAP_LOG_HI); in a5xx_uche_err_irq()
1206 addr |= gpu_read(gpu, REG_A5XX_UCHE_TRAP_LOG_LO); in a5xx_uche_err_irq()
1208 dev_err_ratelimited(gpu->dev->dev, "UCHE | Out of bounds access | addr=0x%llX\n", in a5xx_uche_err_irq()
1212 static void a5xx_gpmu_err_irq(struct msm_gpu *gpu) in a5xx_gpmu_err_irq() argument
1214 dev_err_ratelimited(gpu->dev->dev, "GPMU | voltage droop\n"); in a5xx_gpmu_err_irq()
1217 static void a5xx_fault_detect_irq(struct msm_gpu *gpu) in a5xx_fault_detect_irq() argument
1219 struct drm_device *dev = gpu->dev; in a5xx_fault_detect_irq()
1220 struct msm_ringbuffer *ring = gpu->funcs->active_ring(gpu); in a5xx_fault_detect_irq()
1223 * If stalled on SMMU fault, we could trip the GPU's hang detection, in a5xx_fault_detect_irq()
1228 if (gpu_read(gpu, REG_A5XX_RBBM_STATUS3) & BIT(24)) in a5xx_fault_detect_irq()
1231 …DRM_DEV_ERROR(dev->dev, "gpu fault ring %d fence %x status %8.8X rb %4.4x/%4.4x ib1 %16.16llX/%4.4… in a5xx_fault_detect_irq()
1233 gpu_read(gpu, REG_A5XX_RBBM_STATUS), in a5xx_fault_detect_irq()
1234 gpu_read(gpu, REG_A5XX_CP_RB_RPTR), in a5xx_fault_detect_irq()
1235 gpu_read(gpu, REG_A5XX_CP_RB_WPTR), in a5xx_fault_detect_irq()
1236 gpu_read64(gpu, REG_A5XX_CP_IB1_BASE, REG_A5XX_CP_IB1_BASE_HI), in a5xx_fault_detect_irq()
1237 gpu_read(gpu, REG_A5XX_CP_IB1_BUFSZ), in a5xx_fault_detect_irq()
1238 gpu_read64(gpu, REG_A5XX_CP_IB2_BASE, REG_A5XX_CP_IB2_BASE_HI), in a5xx_fault_detect_irq()
1239 gpu_read(gpu, REG_A5XX_CP_IB2_BUFSZ)); in a5xx_fault_detect_irq()
1242 del_timer(&gpu->hangcheck_timer); in a5xx_fault_detect_irq()
1244 kthread_queue_work(gpu->worker, &gpu->recover_work); in a5xx_fault_detect_irq()
1255 static irqreturn_t a5xx_irq(struct msm_gpu *gpu) in a5xx_irq() argument
1257 u32 status = gpu_read(gpu, REG_A5XX_RBBM_INT_0_STATUS); in a5xx_irq()
1263 gpu_write(gpu, REG_A5XX_RBBM_INT_CLEAR_CMD, in a5xx_irq()
1268 a5xx_rbbm_err_irq(gpu, status); in a5xx_irq()
1271 a5xx_cp_err_irq(gpu); in a5xx_irq()
1274 a5xx_fault_detect_irq(gpu); in a5xx_irq()
1277 a5xx_uche_err_irq(gpu); in a5xx_irq()
1280 a5xx_gpmu_err_irq(gpu); in a5xx_irq()
1283 a5xx_preempt_trigger(gpu); in a5xx_irq()
1284 msm_gpu_retire(gpu); in a5xx_irq()
1288 a5xx_preempt_irq(gpu); in a5xx_irq()
1324 static void a5xx_dump(struct msm_gpu *gpu) in a5xx_dump() argument
1326 DRM_DEV_INFO(gpu->dev->dev, "status: %08x\n", in a5xx_dump()
1327 gpu_read(gpu, REG_A5XX_RBBM_STATUS)); in a5xx_dump()
1328 adreno_dump(gpu); in a5xx_dump()
1331 static int a5xx_pm_resume(struct msm_gpu *gpu) in a5xx_pm_resume() argument
1333 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a5xx_pm_resume()
1337 ret = msm_gpu_pm_resume(gpu); in a5xx_pm_resume()
1344 gpu_write(gpu, REG_A5XX_RBBM_CLOCK_CNTL, 0x00000055); in a5xx_pm_resume()
1345 a5xx_set_hwcg(gpu, true); in a5xx_pm_resume()
1347 gpu_rmw(gpu, REG_A5XX_RBBM_CLOCK_CNTL, 0xff, 0); in a5xx_pm_resume()
1352 gpu_write(gpu, REG_A5XX_GPMU_RBCCU_POWER_CNTL, 0x778000); in a5xx_pm_resume()
1357 ret = spin_usecs(gpu, 20, REG_A5XX_GPMU_RBCCU_PWR_CLK_STATUS, in a5xx_pm_resume()
1361 gpu->name, in a5xx_pm_resume()
1362 gpu_read(gpu, REG_A5XX_GPMU_RBCCU_PWR_CLK_STATUS)); in a5xx_pm_resume()
1367 gpu_write(gpu, REG_A5XX_GPMU_SP_POWER_CNTL, 0x778000); in a5xx_pm_resume()
1368 ret = spin_usecs(gpu, 20, REG_A5XX_GPMU_SP_PWR_CLK_STATUS, in a5xx_pm_resume()
1372 gpu->name); in a5xx_pm_resume()
1377 static int a5xx_pm_suspend(struct msm_gpu *gpu) in a5xx_pm_suspend() argument
1379 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a5xx_pm_suspend()
1389 gpu_write(gpu, REG_A5XX_VBIF_XIN_HALT_CTRL0, mask); in a5xx_pm_suspend()
1390 spin_until((gpu_read(gpu, REG_A5XX_VBIF_XIN_HALT_CTRL1) & in a5xx_pm_suspend()
1393 gpu_write(gpu, REG_A5XX_VBIF_XIN_HALT_CTRL0, 0); in a5xx_pm_suspend()
1400 gpu_write(gpu, REG_A5XX_RBBM_BLOCK_SW_RESET_CMD, 0x003C0000); in a5xx_pm_suspend()
1401 gpu_write(gpu, REG_A5XX_RBBM_BLOCK_SW_RESET_CMD, 0x00000000); in a5xx_pm_suspend()
1404 ret = msm_gpu_pm_suspend(gpu); in a5xx_pm_suspend()
1409 for (i = 0; i < gpu->nr_rings; i++) in a5xx_pm_suspend()
1415 static int a5xx_get_timestamp(struct msm_gpu *gpu, uint64_t *value) in a5xx_get_timestamp() argument
1417 *value = gpu_read64(gpu, REG_A5XX_RBBM_ALWAYSON_COUNTER_LO, in a5xx_get_timestamp()
1434 static int a5xx_crashdumper_init(struct msm_gpu *gpu, in a5xx_crashdumper_init() argument
1437 dumper->ptr = msm_gem_kernel_new(gpu->dev, in a5xx_crashdumper_init()
1438 SZ_1M, MSM_BO_WC, gpu->aspace, in a5xx_crashdumper_init()
1447 static int a5xx_crashdumper_run(struct msm_gpu *gpu, in a5xx_crashdumper_run() argument
1455 gpu_write64(gpu, REG_A5XX_CP_CRASH_SCRIPT_BASE_LO, in a5xx_crashdumper_run()
1458 gpu_write(gpu, REG_A5XX_CP_CRASH_DUMP_CNTL, 1); in a5xx_crashdumper_run()
1460 return gpu_poll_timeout(gpu, REG_A5XX_CP_CRASH_DUMP_CNTL, val, in a5xx_crashdumper_run()
1491 static void a5xx_gpu_state_get_hlsq_regs(struct msm_gpu *gpu, in a5xx_gpu_state_get_hlsq_regs() argument
1499 if (a5xx_crashdumper_init(gpu, &dumper)) in a5xx_gpu_state_get_hlsq_regs()
1537 if (a5xx_crashdumper_run(gpu, &dumper)) { in a5xx_gpu_state_get_hlsq_regs()
1539 msm_gem_kernel_put(dumper.bo, gpu->aspace); in a5xx_gpu_state_get_hlsq_regs()
1547 msm_gem_kernel_put(dumper.bo, gpu->aspace); in a5xx_gpu_state_get_hlsq_regs()
1550 static struct msm_gpu_state *a5xx_gpu_state_get(struct msm_gpu *gpu) in a5xx_gpu_state_get() argument
1554 bool stalled = !!(gpu_read(gpu, REG_A5XX_RBBM_STATUS3) & BIT(24)); in a5xx_gpu_state_get()
1560 a5xx_set_hwcg(gpu, false); in a5xx_gpu_state_get()
1563 adreno_gpu_state_get(gpu, &(a5xx_state->base)); in a5xx_gpu_state_get()
1565 a5xx_state->base.rbbm_status = gpu_read(gpu, REG_A5XX_RBBM_STATUS); in a5xx_gpu_state_get()
1573 a5xx_gpu_state_get_hlsq_regs(gpu, a5xx_state); in a5xx_gpu_state_get()
1575 a5xx_set_hwcg(gpu, true); in a5xx_gpu_state_get()
1603 static void a5xx_show(struct msm_gpu *gpu, struct msm_gpu_state *state, in a5xx_show() argument
1614 adreno_show(gpu, state, p); in a5xx_show()
1644 static struct msm_ringbuffer *a5xx_active_ring(struct msm_gpu *gpu) in a5xx_active_ring() argument
1646 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a5xx_active_ring()
1652 static unsigned long a5xx_gpu_busy(struct msm_gpu *gpu) in a5xx_gpu_busy() argument
1656 /* Only read the gpu busy if the hardware is already active */ in a5xx_gpu_busy()
1657 if (pm_runtime_get_if_in_use(&gpu->pdev->dev) == 0) in a5xx_gpu_busy()
1660 busy_cycles = gpu_read64(gpu, REG_A5XX_RBBM_PERFCTR_RBBM_0_LO, in a5xx_gpu_busy()
1663 busy_time = busy_cycles - gpu->devfreq.busy_cycles; in a5xx_gpu_busy()
1664 do_div(busy_time, clk_get_rate(gpu->core_clk) / 1000000); in a5xx_gpu_busy()
1666 gpu->devfreq.busy_cycles = busy_cycles; in a5xx_gpu_busy()
1668 pm_runtime_put(&gpu->pdev->dev); in a5xx_gpu_busy()
1676 static uint32_t a5xx_get_rptr(struct msm_gpu *gpu, struct msm_ringbuffer *ring) in a5xx_get_rptr() argument
1678 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu); in a5xx_get_rptr()
1684 return ring->memptrs->rptr = gpu_read(gpu, REG_A5XX_CP_RB_RPTR); in a5xx_get_rptr()
1751 struct msm_gpu *gpu; in a5xx_gpu_init() local
1764 gpu = &adreno_gpu->base; in a5xx_gpu_init()
1778 if (gpu->aspace) in a5xx_gpu_init()
1779 msm_mmu_set_fault_handler(gpu->aspace->mmu, gpu, a5xx_fault_handler); in a5xx_gpu_init()
1782 a5xx_preempt_init(gpu); in a5xx_gpu_init()
1784 return gpu; in a5xx_gpu_init()