Home
last modified time | relevance | path

Searched refs:gmu (Results 1 – 5 of 5) sorted by relevance

/Linux-v4.19/drivers/gpu/drm/msm/adreno/
Da6xx_gmu.c14 struct a6xx_gmu *gmu = data; in a6xx_gmu_irq() local
17 status = gmu_read(gmu, REG_A6XX_GMU_AO_HOST_INTERRUPT_STATUS); in a6xx_gmu_irq()
18 gmu_write(gmu, REG_A6XX_GMU_AO_HOST_INTERRUPT_CLR, status); in a6xx_gmu_irq()
21 dev_err_ratelimited(gmu->dev, "GMU watchdog expired\n"); in a6xx_gmu_irq()
28 dev_err_ratelimited(gmu->dev, "GMU AHB bus error\n"); in a6xx_gmu_irq()
31 dev_err_ratelimited(gmu->dev, "GMU fence error: 0x%x\n", in a6xx_gmu_irq()
32 gmu_read(gmu, REG_A6XX_GMU_AHB_FENCE_STATUS)); in a6xx_gmu_irq()
39 struct a6xx_gmu *gmu = data; in a6xx_hfi_irq() local
42 status = gmu_read(gmu, REG_A6XX_GMU_GMU2HOST_INTR_INFO); in a6xx_hfi_irq()
43 gmu_write(gmu, REG_A6XX_GMU_GMU2HOST_INTR_CLR, status); in a6xx_hfi_irq()
[all …]
Da6xx_hfi.c54 static int a6xx_hfi_queue_write(struct a6xx_gmu *gmu, in a6xx_hfi_queue_write() argument
78 gmu_write(gmu, REG_A6XX_GMU_HOST2GMU_INTR_SET, 0x01); in a6xx_hfi_queue_write()
99 static void a6xx_hfi_handle_ack(struct a6xx_gmu *gmu, in a6xx_hfi_handle_ack() argument
123 dev_err(gmu->dev, "Nobody was waiting for HFI message %d\n", seqnum); in a6xx_hfi_handle_ack()
126 static void a6xx_hfi_handle_error(struct a6xx_gmu *gmu, in a6xx_hfi_handle_error() argument
131 dev_err(gmu->dev, "GMU firmware error %d\n", error->code); in a6xx_hfi_handle_error()
136 struct a6xx_gmu *gmu = (struct a6xx_gmu *) data; in a6xx_hfi_task() local
137 struct a6xx_hfi_queue *queue = &gmu->queues[HFI_RESPONSE_QUEUE]; in a6xx_hfi_task()
148 dev_err(gmu->dev, in a6xx_hfi_task()
156 a6xx_hfi_handle_ack(gmu, &resp); in a6xx_hfi_task()
[all …]
Da6xx_gmu.h82 static inline u32 gmu_read(struct a6xx_gmu *gmu, u32 offset) in gmu_read() argument
84 return msm_readl(gmu->mmio + (offset << 2)); in gmu_read()
87 static inline void gmu_write(struct a6xx_gmu *gmu, u32 offset, u32 value) in gmu_write() argument
89 return msm_writel(value, gmu->mmio + (offset << 2)); in gmu_write()
92 static inline void pdc_write(struct a6xx_gmu *gmu, u32 offset, u32 value) in pdc_write() argument
94 return msm_writel(value, gmu->pdc_mmio + (offset << 2)); in pdc_write()
97 static inline void gmu_rmw(struct a6xx_gmu *gmu, u32 reg, u32 mask, u32 or) in gmu_rmw() argument
99 u32 val = gmu_read(gmu, reg); in gmu_rmw()
103 gmu_write(gmu, reg, val | or); in gmu_rmw()
106 #define gmu_poll_timeout(gmu, addr, val, cond, interval, timeout) \ argument
[all …]
Da6xx_gpu.h23 struct a6xx_gmu gmu; member
52 bool a6xx_gmu_isidle(struct a6xx_gmu *gmu);
54 int a6xx_gmu_set_oob(struct a6xx_gmu *gmu, enum a6xx_gmu_oob_state state);
55 void a6xx_gmu_clear_oob(struct a6xx_gmu *gmu, enum a6xx_gmu_oob_state state);
Da6xx_gpu.c16 if (!a6xx_gmu_isidle(&a6xx_gpu->gmu)) in _a6xx_check_idle()
231 struct a6xx_gmu *gmu = &a6xx_gpu->gmu; in a6xx_set_hwcg() local
242 gmu_rmw(gmu, REG_A6XX_GPU_GMU_GX_SPTPRAC_CLOCK_CONTROL, 1, 0); in a6xx_set_hwcg()
249 gmu_rmw(gmu, REG_A6XX_GPU_GMU_GX_SPTPRAC_CLOCK_CONTROL, 0, 1); in a6xx_set_hwcg()
328 a6xx_gmu_set_oob(&a6xx_gpu->gmu, GMU_OOB_GPU_SET); in a6xx_hw_init()
389 gmu_write(&a6xx_gpu->gmu, REG_A6XX_GPU_GMU_AO_GPU_CX_BUSY_MASK, in a6xx_hw_init()
391 gmu_rmw(&a6xx_gpu->gmu, REG_A6XX_GMU_CX_GMU_POWER_COUNTER_SELECT_0, in a6xx_hw_init()
393 gmu_write(&a6xx_gpu->gmu, REG_A6XX_GMU_CX_GMU_POWER_COUNTER_ENABLE, in a6xx_hw_init()
474 a6xx_gmu_clear_oob(&a6xx_gpu->gmu, GMU_OOB_GPU_SET); in a6xx_hw_init()
477 a6xx_gmu_clear_oob(&a6xx_gpu->gmu, GMU_OOB_BOOT_SLUMBER); in a6xx_hw_init()
[all …]