1 /* SPDX-License-Identifier: GPL-2.0+ */
2 /*
3  * Copyright 2013-2014 Freescale Semiconductor, Inc.
4  * Copyright 2018 Angelo Dureghello <angelo@sysam.it>
5  */
6 #ifndef _FSL_EDMA_COMMON_H_
7 #define _FSL_EDMA_COMMON_H_
8 
9 #include <linux/dma-direction.h>
10 #include <linux/platform_device.h>
11 #include "virt-dma.h"
12 
13 #define EDMA_CR_EDBG		BIT(1)
14 #define EDMA_CR_ERCA		BIT(2)
15 #define EDMA_CR_ERGA		BIT(3)
16 #define EDMA_CR_HOE		BIT(4)
17 #define EDMA_CR_HALT		BIT(5)
18 #define EDMA_CR_CLM		BIT(6)
19 #define EDMA_CR_EMLM		BIT(7)
20 #define EDMA_CR_ECX		BIT(16)
21 #define EDMA_CR_CX		BIT(17)
22 
23 #define EDMA_SEEI_SEEI(x)	((x) & GENMASK(4, 0))
24 #define EDMA_CEEI_CEEI(x)	((x) & GENMASK(4, 0))
25 #define EDMA_CINT_CINT(x)	((x) & GENMASK(4, 0))
26 #define EDMA_CERR_CERR(x)	((x) & GENMASK(4, 0))
27 
28 #define EDMA_TCD_ATTR_DSIZE(x)		(((x) & GENMASK(2, 0)))
29 #define EDMA_TCD_ATTR_DMOD(x)		(((x) & GENMASK(4, 0)) << 3)
30 #define EDMA_TCD_ATTR_SSIZE(x)		(((x) & GENMASK(2, 0)) << 8)
31 #define EDMA_TCD_ATTR_SMOD(x)		(((x) & GENMASK(4, 0)) << 11)
32 
33 #define EDMA_TCD_CITER_CITER(x)		((x) & GENMASK(14, 0))
34 #define EDMA_TCD_BITER_BITER(x)		((x) & GENMASK(14, 0))
35 
36 #define EDMA_TCD_CSR_START		BIT(0)
37 #define EDMA_TCD_CSR_INT_MAJOR		BIT(1)
38 #define EDMA_TCD_CSR_INT_HALF		BIT(2)
39 #define EDMA_TCD_CSR_D_REQ		BIT(3)
40 #define EDMA_TCD_CSR_E_SG		BIT(4)
41 #define EDMA_TCD_CSR_E_LINK		BIT(5)
42 #define EDMA_TCD_CSR_ACTIVE		BIT(6)
43 #define EDMA_TCD_CSR_DONE		BIT(7)
44 
45 #define EDMA_V3_TCD_NBYTES_MLOFF_NBYTES(x) ((x) & GENMASK(9, 0))
46 #define EDMA_V3_TCD_NBYTES_MLOFF(x)        (x << 10)
47 #define EDMA_V3_TCD_NBYTES_DMLOE           (1 << 30)
48 #define EDMA_V3_TCD_NBYTES_SMLOE           (1 << 31)
49 
50 #define EDMAMUX_CHCFG_DIS		0x0
51 #define EDMAMUX_CHCFG_ENBL		0x80
52 #define EDMAMUX_CHCFG_SOURCE(n)		((n) & 0x3F)
53 
54 #define DMAMUX_NR	2
55 
56 #define EDMA_TCD                0x1000
57 
58 #define FSL_EDMA_BUSWIDTHS	(BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) | \
59 				 BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) | \
60 				 BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) | \
61 				 BIT(DMA_SLAVE_BUSWIDTH_8_BYTES))
62 
63 #define EDMA_V3_CH_SBR_RD          BIT(22)
64 #define EDMA_V3_CH_SBR_WR          BIT(21)
65 #define EDMA_V3_CH_CSR_ERQ         BIT(0)
66 #define EDMA_V3_CH_CSR_EARQ        BIT(1)
67 #define EDMA_V3_CH_CSR_EEI         BIT(2)
68 #define EDMA_V3_CH_CSR_DONE        BIT(30)
69 #define EDMA_V3_CH_CSR_ACTIVE      BIT(31)
70 
71 enum fsl_edma_pm_state {
72 	RUNNING = 0,
73 	SUSPENDED,
74 };
75 
76 struct fsl_edma_hw_tcd {
77 	__le32	saddr;
78 	__le16	soff;
79 	__le16	attr;
80 	__le32	nbytes;
81 	__le32	slast;
82 	__le32	daddr;
83 	__le16	doff;
84 	__le16	citer;
85 	__le32	dlast_sga;
86 	__le16	csr;
87 	__le16	biter;
88 };
89 
90 struct fsl_edma3_ch_reg {
91 	__le32	ch_csr;
92 	__le32	ch_es;
93 	__le32	ch_int;
94 	__le32	ch_sbr;
95 	__le32	ch_pri;
96 	__le32	ch_mux;
97 	__le32  ch_mattr; /* edma4, reserved for edma3 */
98 	__le32  ch_reserved;
99 	struct fsl_edma_hw_tcd tcd;
100 } __packed;
101 
102 /*
103  * These are iomem pointers, for both v32 and v64.
104  */
105 struct edma_regs {
106 	void __iomem *cr;
107 	void __iomem *es;
108 	void __iomem *erqh;
109 	void __iomem *erql;	/* aka erq on v32 */
110 	void __iomem *eeih;
111 	void __iomem *eeil;	/* aka eei on v32 */
112 	void __iomem *seei;
113 	void __iomem *ceei;
114 	void __iomem *serq;
115 	void __iomem *cerq;
116 	void __iomem *cint;
117 	void __iomem *cerr;
118 	void __iomem *ssrt;
119 	void __iomem *cdne;
120 	void __iomem *inth;
121 	void __iomem *intl;
122 	void __iomem *errh;
123 	void __iomem *errl;
124 };
125 
126 struct fsl_edma_sw_tcd {
127 	dma_addr_t			ptcd;
128 	struct fsl_edma_hw_tcd		*vtcd;
129 };
130 
131 struct fsl_edma_chan {
132 	struct virt_dma_chan		vchan;
133 	enum dma_status			status;
134 	enum fsl_edma_pm_state		pm_state;
135 	bool				idle;
136 	u32				slave_id;
137 	struct fsl_edma_engine		*edma;
138 	struct fsl_edma_desc		*edesc;
139 	struct dma_slave_config		cfg;
140 	u32				attr;
141 	bool                            is_sw;
142 	struct dma_pool			*tcd_pool;
143 	dma_addr_t			dma_dev_addr;
144 	u32				dma_dev_size;
145 	enum dma_data_direction		dma_dir;
146 	char				chan_name[32];
147 	struct fsl_edma_hw_tcd __iomem *tcd;
148 	u32				real_count;
149 	struct work_struct		issue_worker;
150 	struct platform_device		*pdev;
151 	struct device			*pd_dev;
152 	u32				srcid;
153 	struct clk			*clk;
154 	int                             priority;
155 	int				hw_chanid;
156 	int				txirq;
157 	bool				is_rxchan;
158 	bool				is_remote;
159 	bool				is_multi_fifo;
160 };
161 
162 struct fsl_edma_desc {
163 	struct virt_dma_desc		vdesc;
164 	struct fsl_edma_chan		*echan;
165 	bool				iscyclic;
166 	enum dma_transfer_direction	dirn;
167 	unsigned int			n_tcds;
168 	struct fsl_edma_sw_tcd		tcd[];
169 };
170 
171 #define FSL_EDMA_DRV_HAS_DMACLK		BIT(0)
172 #define FSL_EDMA_DRV_MUX_SWAP		BIT(1)
173 #define FSL_EDMA_DRV_CONFIG32		BIT(2)
174 #define FSL_EDMA_DRV_WRAP_IO		BIT(3)
175 #define FSL_EDMA_DRV_EDMA64		BIT(4)
176 #define FSL_EDMA_DRV_HAS_PD		BIT(5)
177 #define FSL_EDMA_DRV_HAS_CHCLK		BIT(6)
178 #define FSL_EDMA_DRV_HAS_CHMUX		BIT(7)
179 /* imx8 QM audio edma remote local swapped */
180 #define FSL_EDMA_DRV_QUIRK_SWAPPED	BIT(8)
181 /* control and status register is in tcd address space, edma3 reg layout */
182 #define FSL_EDMA_DRV_SPLIT_REG		BIT(9)
183 #define FSL_EDMA_DRV_BUS_8BYTE		BIT(10)
184 #define FSL_EDMA_DRV_DEV_TO_DEV		BIT(11)
185 #define FSL_EDMA_DRV_ALIGN_64BYTE	BIT(12)
186 /* Need clean CHn_CSR DONE before enable TCD's ESG */
187 #define FSL_EDMA_DRV_CLEAR_DONE_E_SG	BIT(13)
188 /* Need clean CHn_CSR DONE before enable TCD's MAJORELINK */
189 #define FSL_EDMA_DRV_CLEAR_DONE_E_LINK	BIT(14)
190 
191 #define FSL_EDMA_DRV_EDMA3	(FSL_EDMA_DRV_SPLIT_REG |	\
192 				 FSL_EDMA_DRV_BUS_8BYTE |	\
193 				 FSL_EDMA_DRV_DEV_TO_DEV |	\
194 				 FSL_EDMA_DRV_ALIGN_64BYTE |	\
195 				 FSL_EDMA_DRV_CLEAR_DONE_E_SG |	\
196 				 FSL_EDMA_DRV_CLEAR_DONE_E_LINK)
197 
198 #define FSL_EDMA_DRV_EDMA4	(FSL_EDMA_DRV_SPLIT_REG |	\
199 				 FSL_EDMA_DRV_BUS_8BYTE |	\
200 				 FSL_EDMA_DRV_DEV_TO_DEV |	\
201 				 FSL_EDMA_DRV_ALIGN_64BYTE |	\
202 				 FSL_EDMA_DRV_CLEAR_DONE_E_LINK)
203 
204 struct fsl_edma_drvdata {
205 	u32			dmamuxs; /* only used before v3 */
206 	u32			chreg_off;
207 	u32			chreg_space_sz;
208 	u32			flags;
209 	int			(*setup_irq)(struct platform_device *pdev,
210 					     struct fsl_edma_engine *fsl_edma);
211 };
212 
213 struct fsl_edma_engine {
214 	struct dma_device	dma_dev;
215 	void __iomem		*membase;
216 	void __iomem		*muxbase[DMAMUX_NR];
217 	struct clk		*muxclk[DMAMUX_NR];
218 	struct clk		*dmaclk;
219 	struct clk		*chclk;
220 	struct mutex		fsl_edma_mutex;
221 	const struct fsl_edma_drvdata *drvdata;
222 	u32			n_chans;
223 	int			txirq;
224 	int			errirq;
225 	bool			big_endian;
226 	struct edma_regs	regs;
227 	u64			chan_masked;
228 	struct fsl_edma_chan	chans[];
229 };
230 
231 #define edma_read_tcdreg(chan, __name)				\
232 (sizeof(chan->tcd->__name) == sizeof(u32) ?			\
233 	edma_readl(chan->edma, &chan->tcd->__name) :		\
234 	edma_readw(chan->edma, &chan->tcd->__name))
235 
236 #define edma_write_tcdreg(chan, val, __name)			\
237 (sizeof(chan->tcd->__name) == sizeof(u32) ?			\
238 	edma_writel(chan->edma, (u32 __force)val, &chan->tcd->__name) :	\
239 	edma_writew(chan->edma, (u16 __force)val, &chan->tcd->__name))
240 
241 #define edma_readl_chreg(chan, __name)				\
242 	edma_readl(chan->edma,					\
243 		   (void __iomem *)&(container_of(chan->tcd, struct fsl_edma3_ch_reg, tcd)->__name))
244 
245 #define edma_writel_chreg(chan, val,  __name)			\
246 	edma_writel(chan->edma, val,				\
247 		   (void __iomem *)&(container_of(chan->tcd, struct fsl_edma3_ch_reg, tcd)->__name))
248 
249 /*
250  * R/W functions for big- or little-endian registers:
251  * The eDMA controller's endian is independent of the CPU core's endian.
252  * For the big-endian IP module, the offset for 8-bit or 16-bit registers
253  * should also be swapped opposite to that in little-endian IP.
254  */
edma_readl(struct fsl_edma_engine * edma,void __iomem * addr)255 static inline u32 edma_readl(struct fsl_edma_engine *edma, void __iomem *addr)
256 {
257 	if (edma->big_endian)
258 		return ioread32be(addr);
259 	else
260 		return ioread32(addr);
261 }
262 
edma_readw(struct fsl_edma_engine * edma,void __iomem * addr)263 static inline u16 edma_readw(struct fsl_edma_engine *edma, void __iomem *addr)
264 {
265 	if (edma->big_endian)
266 		return ioread16be(addr);
267 	else
268 		return ioread16(addr);
269 }
270 
edma_writeb(struct fsl_edma_engine * edma,u8 val,void __iomem * addr)271 static inline void edma_writeb(struct fsl_edma_engine *edma,
272 			       u8 val, void __iomem *addr)
273 {
274 	/* swap the reg offset for these in big-endian mode */
275 	if (edma->big_endian)
276 		iowrite8(val, (void __iomem *)((unsigned long)addr ^ 0x3));
277 	else
278 		iowrite8(val, addr);
279 }
280 
edma_writew(struct fsl_edma_engine * edma,u16 val,void __iomem * addr)281 static inline void edma_writew(struct fsl_edma_engine *edma,
282 			       u16 val, void __iomem *addr)
283 {
284 	/* swap the reg offset for these in big-endian mode */
285 	if (edma->big_endian)
286 		iowrite16be(val, (void __iomem *)((unsigned long)addr ^ 0x2));
287 	else
288 		iowrite16(val, addr);
289 }
290 
edma_writel(struct fsl_edma_engine * edma,u32 val,void __iomem * addr)291 static inline void edma_writel(struct fsl_edma_engine *edma,
292 			       u32 val, void __iomem *addr)
293 {
294 	if (edma->big_endian)
295 		iowrite32be(val, addr);
296 	else
297 		iowrite32(val, addr);
298 }
299 
to_fsl_edma_chan(struct dma_chan * chan)300 static inline struct fsl_edma_chan *to_fsl_edma_chan(struct dma_chan *chan)
301 {
302 	return container_of(chan, struct fsl_edma_chan, vchan.chan);
303 }
304 
fsl_edma_drvflags(struct fsl_edma_chan * fsl_chan)305 static inline u32 fsl_edma_drvflags(struct fsl_edma_chan *fsl_chan)
306 {
307 	return fsl_chan->edma->drvdata->flags;
308 }
309 
to_fsl_edma_desc(struct virt_dma_desc * vd)310 static inline struct fsl_edma_desc *to_fsl_edma_desc(struct virt_dma_desc *vd)
311 {
312 	return container_of(vd, struct fsl_edma_desc, vdesc);
313 }
314 
fsl_edma_err_chan_handler(struct fsl_edma_chan * fsl_chan)315 static inline void fsl_edma_err_chan_handler(struct fsl_edma_chan *fsl_chan)
316 {
317 	fsl_chan->status = DMA_ERROR;
318 	fsl_chan->idle = true;
319 }
320 
321 void fsl_edma_tx_chan_handler(struct fsl_edma_chan *fsl_chan);
322 void fsl_edma_disable_request(struct fsl_edma_chan *fsl_chan);
323 void fsl_edma_chan_mux(struct fsl_edma_chan *fsl_chan,
324 			unsigned int slot, bool enable);
325 void fsl_edma_free_desc(struct virt_dma_desc *vdesc);
326 int fsl_edma_terminate_all(struct dma_chan *chan);
327 int fsl_edma_pause(struct dma_chan *chan);
328 int fsl_edma_resume(struct dma_chan *chan);
329 int fsl_edma_slave_config(struct dma_chan *chan,
330 				 struct dma_slave_config *cfg);
331 enum dma_status fsl_edma_tx_status(struct dma_chan *chan,
332 		dma_cookie_t cookie, struct dma_tx_state *txstate);
333 struct dma_async_tx_descriptor *fsl_edma_prep_dma_cyclic(
334 		struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len,
335 		size_t period_len, enum dma_transfer_direction direction,
336 		unsigned long flags);
337 struct dma_async_tx_descriptor *fsl_edma_prep_slave_sg(
338 		struct dma_chan *chan, struct scatterlist *sgl,
339 		unsigned int sg_len, enum dma_transfer_direction direction,
340 		unsigned long flags, void *context);
341 struct dma_async_tx_descriptor *fsl_edma_prep_memcpy(
342 		struct dma_chan *chan, dma_addr_t dma_dst, dma_addr_t dma_src,
343 		size_t len, unsigned long flags);
344 void fsl_edma_xfer_desc(struct fsl_edma_chan *fsl_chan);
345 void fsl_edma_issue_pending(struct dma_chan *chan);
346 int fsl_edma_alloc_chan_resources(struct dma_chan *chan);
347 void fsl_edma_free_chan_resources(struct dma_chan *chan);
348 void fsl_edma_cleanup_vchan(struct dma_device *dmadev);
349 void fsl_edma_setup_regs(struct fsl_edma_engine *edma);
350 
351 #endif /* _FSL_EDMA_COMMON_H_ */
352