1 /*
2  * Copyright (c) 2022 TOKITA Hiroshi <tokita.hiroshi@gmail.com>
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  */
6 
7 #include <zephyr/device.h>
8 #include <zephyr/drivers/clock_control.h>
9 #include <zephyr/drivers/clock_control/gd32.h>
10 #include <zephyr/drivers/dma.h>
11 #include <zephyr/drivers/reset.h>
12 #include <zephyr/logging/log.h>
13 
14 #include <gd32_dma.h>
15 #include <zephyr/irq.h>
16 
17 #if DT_HAS_COMPAT_STATUS_OKAY(gd_gd32_dma_v1)
18 #define DT_DRV_COMPAT gd_gd32_dma_v1
19 #elif DT_HAS_COMPAT_STATUS_OKAY(gd_gd32_dma)
20 #define DT_DRV_COMPAT gd_gd32_dma
21 #endif
22 
23 #if DT_HAS_COMPAT_STATUS_OKAY(gd_gd32_dma_v1)
24 #define CHXCTL_PERIEN_OFFSET	  ((uint32_t)25U)
25 #define GD32_DMA_CHXCTL_DIR	  BIT(6)
26 #define GD32_DMA_CHXCTL_M2M	  BIT(7)
27 #define GD32_DMA_INTERRUPT_ERRORS (DMA_CHXCTL_SDEIE | DMA_CHXCTL_TAEIE)
28 #define GD32_DMA_FLAG_ERRORS	  (DMA_FLAG_SDE | DMA_FLAG_TAE)
29 #else
30 #define GD32_DMA_CHXCTL_DIR	  BIT(4)
31 #define GD32_DMA_CHXCTL_M2M	  BIT(14)
32 #define GD32_DMA_INTERRUPT_ERRORS DMA_CHXCTL_ERRIE
33 #define GD32_DMA_FLAG_ERRORS	  DMA_FLAG_ERR
34 #endif
35 
36 #ifdef CONFIG_SOC_SERIES_GD32F3X0
37 #undef DMA_INTF
38 #undef DMA_INTC
39 #undef DMA_CHCTL
40 #undef DMA_CHCNT
41 #undef DMA_CHPADDR
42 #undef DMA_CHMADDR
43 
44 #define DMA_INTF(dma)	     REG32(dma + 0x00UL)
45 #define DMA_INTC(dma)	     REG32(dma + 0x04UL)
46 #define DMA_CHCTL(dma, ch)   REG32((dma + 0x08UL) + 0x14UL * (uint32_t)(ch))
47 #define DMA_CHCNT(dma, ch)   REG32((dma + 0x0CUL) + 0x14UL * (uint32_t)(ch))
48 #define DMA_CHPADDR(dma, ch) REG32((dma + 0x10UL) + 0x14UL * (uint32_t)(ch))
49 #define DMA_CHMADDR(dma, ch) REG32((dma + 0x14UL) + 0x14UL * (uint32_t)(ch))
50 #endif
51 
52 #define GD32_DMA_INTF(dma)	  DMA_INTF(dma)
53 #define GD32_DMA_INTC(dma)	  DMA_INTC(dma)
54 #define GD32_DMA_CHCTL(dma, ch)	  DMA_CHCTL((dma), (ch))
55 #define GD32_DMA_CHCNT(dma, ch)	  DMA_CHCNT((dma), (ch))
56 #define GD32_DMA_CHPADDR(dma, ch) DMA_CHPADDR((dma), (ch))
57 #define GD32_DMA_CHMADDR(dma, ch) DMA_CHMADDR((dma), (ch))
58 
59 LOG_MODULE_REGISTER(dma_gd32, CONFIG_DMA_LOG_LEVEL);
60 
61 struct dma_gd32_config {
62 	uint32_t reg;
63 	uint32_t channels;
64 	uint16_t clkid;
65 	bool mem2mem;
66 #if DT_HAS_COMPAT_STATUS_OKAY(gd_gd32_dma_v1)
67 	struct reset_dt_spec reset;
68 #endif
69 	void (*irq_configure)(void);
70 };
71 
72 struct dma_gd32_channel {
73 	dma_callback_t callback;
74 	void *user_data;
75 	uint32_t direction;
76 	bool busy;
77 };
78 
79 struct dma_gd32_data {
80 	struct dma_context ctx;
81 	struct dma_gd32_channel *channels;
82 };
83 
84 struct dma_gd32_srcdst_config {
85 	uint32_t addr;
86 	uint32_t adj;
87 	uint32_t width;
88 };
89 
90 /*
91  * Register access functions
92  */
93 
94 static inline void
gd32_dma_periph_increase_enable(uint32_t reg,dma_channel_enum ch)95 gd32_dma_periph_increase_enable(uint32_t reg, dma_channel_enum ch)
96 {
97 	GD32_DMA_CHCTL(reg, ch) |= DMA_CHXCTL_PNAGA;
98 }
99 
100 static inline void
gd32_dma_periph_increase_disable(uint32_t reg,dma_channel_enum ch)101 gd32_dma_periph_increase_disable(uint32_t reg, dma_channel_enum ch)
102 {
103 	GD32_DMA_CHCTL(reg, ch) &= ~DMA_CHXCTL_PNAGA;
104 }
105 
106 static inline void
gd32_dma_transfer_set_memory_to_memory(uint32_t reg,dma_channel_enum ch)107 gd32_dma_transfer_set_memory_to_memory(uint32_t reg, dma_channel_enum ch)
108 {
109 	GD32_DMA_CHCTL(reg, ch) |= GD32_DMA_CHXCTL_M2M;
110 	GD32_DMA_CHCTL(reg, ch) &= ~GD32_DMA_CHXCTL_DIR;
111 }
112 
113 static inline void
gd32_dma_transfer_set_memory_to_periph(uint32_t reg,dma_channel_enum ch)114 gd32_dma_transfer_set_memory_to_periph(uint32_t reg, dma_channel_enum ch)
115 {
116 	GD32_DMA_CHCTL(reg, ch) &= ~GD32_DMA_CHXCTL_M2M;
117 	GD32_DMA_CHCTL(reg, ch) |= GD32_DMA_CHXCTL_DIR;
118 }
119 
120 static inline void
gd32_dma_transfer_set_periph_to_memory(uint32_t reg,dma_channel_enum ch)121 gd32_dma_transfer_set_periph_to_memory(uint32_t reg, dma_channel_enum ch)
122 {
123 	GD32_DMA_CHCTL(reg, ch) &= ~GD32_DMA_CHXCTL_M2M;
124 	GD32_DMA_CHCTL(reg, ch) &= ~GD32_DMA_CHXCTL_DIR;
125 }
126 
127 static inline void
gd32_dma_memory_increase_enable(uint32_t reg,dma_channel_enum ch)128 gd32_dma_memory_increase_enable(uint32_t reg, dma_channel_enum ch)
129 {
130 	GD32_DMA_CHCTL(reg, ch) |= DMA_CHXCTL_MNAGA;
131 }
132 
133 static inline void
gd32_dma_memory_increase_disable(uint32_t reg,dma_channel_enum ch)134 gd32_dma_memory_increase_disable(uint32_t reg, dma_channel_enum ch)
135 {
136 	GD32_DMA_CHCTL(reg, ch) &= ~DMA_CHXCTL_MNAGA;
137 }
138 
139 static inline void
gd32_dma_circulation_enable(uint32_t reg,dma_channel_enum ch)140 gd32_dma_circulation_enable(uint32_t reg, dma_channel_enum ch)
141 {
142 	GD32_DMA_CHCTL(reg, ch) |= DMA_CHXCTL_CMEN;
143 }
144 
145 static inline void
gd32_dma_circulation_disable(uint32_t reg,dma_channel_enum ch)146 gd32_dma_circulation_disable(uint32_t reg, dma_channel_enum ch)
147 {
148 	GD32_DMA_CHCTL(reg, ch) &= ~DMA_CHXCTL_CMEN;
149 }
150 
gd32_dma_channel_enable(uint32_t reg,dma_channel_enum ch)151 static inline void gd32_dma_channel_enable(uint32_t reg, dma_channel_enum ch)
152 {
153 	GD32_DMA_CHCTL(reg, ch) |= DMA_CHXCTL_CHEN;
154 }
155 
gd32_dma_channel_disable(uint32_t reg,dma_channel_enum ch)156 static inline void gd32_dma_channel_disable(uint32_t reg, dma_channel_enum ch)
157 {
158 	GD32_DMA_CHCTL(reg, ch) &= ~DMA_CHXCTL_CHEN;
159 }
160 
161 static inline void
gd32_dma_interrupt_enable(uint32_t reg,dma_channel_enum ch,uint32_t source)162 gd32_dma_interrupt_enable(uint32_t reg, dma_channel_enum ch, uint32_t source)
163 {
164 	GD32_DMA_CHCTL(reg, ch) |= source;
165 }
166 
167 static inline void
gd32_dma_interrupt_disable(uint32_t reg,dma_channel_enum ch,uint32_t source)168 gd32_dma_interrupt_disable(uint32_t reg, dma_channel_enum ch, uint32_t source)
169 {
170 	GD32_DMA_CHCTL(reg, ch) &= ~source;
171 }
172 
173 static inline void
gd32_dma_priority_config(uint32_t reg,dma_channel_enum ch,uint32_t priority)174 gd32_dma_priority_config(uint32_t reg, dma_channel_enum ch, uint32_t priority)
175 {
176 	uint32_t ctl = GD32_DMA_CHCTL(reg, ch);
177 
178 	GD32_DMA_CHCTL(reg, ch) = (ctl & (~DMA_CHXCTL_PRIO)) | priority;
179 }
180 
181 static inline void
gd32_dma_memory_width_config(uint32_t reg,dma_channel_enum ch,uint32_t mwidth)182 gd32_dma_memory_width_config(uint32_t reg, dma_channel_enum ch, uint32_t mwidth)
183 {
184 	uint32_t ctl = GD32_DMA_CHCTL(reg, ch);
185 
186 	GD32_DMA_CHCTL(reg, ch) = (ctl & (~DMA_CHXCTL_MWIDTH)) | mwidth;
187 }
188 
189 static inline void
gd32_dma_periph_width_config(uint32_t reg,dma_channel_enum ch,uint32_t pwidth)190 gd32_dma_periph_width_config(uint32_t reg, dma_channel_enum ch, uint32_t pwidth)
191 {
192 	uint32_t ctl = GD32_DMA_CHCTL(reg, ch);
193 
194 	GD32_DMA_CHCTL(reg, ch) = (ctl & (~DMA_CHXCTL_PWIDTH)) | pwidth;
195 }
196 
197 #if DT_HAS_COMPAT_STATUS_OKAY(gd_gd32_dma_v1)
198 static inline void
gd32_dma_channel_subperipheral_select(uint32_t reg,dma_channel_enum ch,dma_subperipheral_enum sub_periph)199 gd32_dma_channel_subperipheral_select(uint32_t reg, dma_channel_enum ch,
200 				      dma_subperipheral_enum sub_periph)
201 {
202 	uint32_t ctl = GD32_DMA_CHCTL(reg, ch);
203 
204 	GD32_DMA_CHCTL(reg, ch) =
205 		(ctl & (~DMA_CHXCTL_PERIEN)) |
206 		((uint32_t)sub_periph << CHXCTL_PERIEN_OFFSET);
207 }
208 #endif
209 
210 static inline void
gd32_dma_periph_address_config(uint32_t reg,dma_channel_enum ch,uint32_t addr)211 gd32_dma_periph_address_config(uint32_t reg, dma_channel_enum ch, uint32_t addr)
212 {
213 	GD32_DMA_CHPADDR(reg, ch) = addr;
214 }
215 
216 static inline void
gd32_dma_memory_address_config(uint32_t reg,dma_channel_enum ch,uint32_t addr)217 gd32_dma_memory_address_config(uint32_t reg, dma_channel_enum ch, uint32_t addr)
218 {
219 #if DT_HAS_COMPAT_STATUS_OKAY(gd_gd32_dma_v1)
220 	DMA_CHM0ADDR(reg, ch) = addr;
221 #else
222 	GD32_DMA_CHMADDR(reg, ch) = addr;
223 #endif
224 }
225 
226 static inline void
gd32_dma_transfer_number_config(uint32_t reg,dma_channel_enum ch,uint32_t num)227 gd32_dma_transfer_number_config(uint32_t reg, dma_channel_enum ch, uint32_t num)
228 {
229 	GD32_DMA_CHCNT(reg, ch) = (num & DMA_CHXCNT_CNT);
230 }
231 
232 static inline uint32_t
gd32_dma_transfer_number_get(uint32_t reg,dma_channel_enum ch)233 gd32_dma_transfer_number_get(uint32_t reg, dma_channel_enum ch)
234 {
235 	return GD32_DMA_CHCNT(reg, ch);
236 }
237 
238 static inline void
gd32_dma_interrupt_flag_clear(uint32_t reg,dma_channel_enum ch,uint32_t flag)239 gd32_dma_interrupt_flag_clear(uint32_t reg, dma_channel_enum ch, uint32_t flag)
240 {
241 #if DT_HAS_COMPAT_STATUS_OKAY(gd_gd32_dma_v1)
242 	if (ch < DMA_CH4) {
243 		DMA_INTC0(reg) |= DMA_FLAG_ADD(flag, ch);
244 	} else {
245 		DMA_INTC1(reg) |= DMA_FLAG_ADD(flag, ch - DMA_CH4);
246 	}
247 #else
248 	GD32_DMA_INTC(reg) |= DMA_FLAG_ADD(flag, ch);
249 #endif
250 }
251 
252 static inline void
gd32_dma_flag_clear(uint32_t reg,dma_channel_enum ch,uint32_t flag)253 gd32_dma_flag_clear(uint32_t reg, dma_channel_enum ch, uint32_t flag)
254 {
255 #if DT_HAS_COMPAT_STATUS_OKAY(gd_gd32_dma_v1)
256 	if (ch < DMA_CH4) {
257 		DMA_INTC0(reg) |= DMA_FLAG_ADD(flag, ch);
258 	} else {
259 		DMA_INTC1(reg) |= DMA_FLAG_ADD(flag, ch - DMA_CH4);
260 	}
261 #else
262 	GD32_DMA_INTC(reg) |= DMA_FLAG_ADD(flag, ch);
263 #endif
264 }
265 
266 static inline uint32_t
gd32_dma_interrupt_flag_get(uint32_t reg,dma_channel_enum ch,uint32_t flag)267 gd32_dma_interrupt_flag_get(uint32_t reg, dma_channel_enum ch, uint32_t flag)
268 {
269 #if DT_HAS_COMPAT_STATUS_OKAY(gd_gd32_dma_v1)
270 	if (ch < DMA_CH4) {
271 		return (DMA_INTF0(reg) & DMA_FLAG_ADD(flag, ch));
272 	} else {
273 		return (DMA_INTF1(reg) & DMA_FLAG_ADD(flag, ch - DMA_CH4));
274 	}
275 #else
276 	return (GD32_DMA_INTF(reg) & DMA_FLAG_ADD(flag, ch));
277 #endif
278 }
279 
gd32_dma_deinit(uint32_t reg,dma_channel_enum ch)280 static inline void gd32_dma_deinit(uint32_t reg, dma_channel_enum ch)
281 {
282 	GD32_DMA_CHCTL(reg, ch) &= ~DMA_CHXCTL_CHEN;
283 
284 	GD32_DMA_CHCTL(reg, ch) = DMA_CHCTL_RESET_VALUE;
285 	GD32_DMA_CHCNT(reg, ch) = DMA_CHCNT_RESET_VALUE;
286 	GD32_DMA_CHPADDR(reg, ch) = DMA_CHPADDR_RESET_VALUE;
287 #if DT_HAS_COMPAT_STATUS_OKAY(gd_gd32_dma_v1)
288 	DMA_CHM0ADDR(reg, ch) = DMA_CHMADDR_RESET_VALUE;
289 	DMA_CHFCTL(reg, ch) = DMA_CHFCTL_RESET_VALUE;
290 	if (ch < DMA_CH4) {
291 		DMA_INTC0(reg) |= DMA_FLAG_ADD(DMA_CHINTF_RESET_VALUE, ch);
292 	} else {
293 		DMA_INTC1(reg) |=
294 			DMA_FLAG_ADD(DMA_CHINTF_RESET_VALUE, ch - DMA_CH4);
295 	}
296 #else
297 	GD32_DMA_CHMADDR(reg, ch) = DMA_CHMADDR_RESET_VALUE;
298 	GD32_DMA_INTC(reg) |= DMA_FLAG_ADD(DMA_CHINTF_RESET_VALUE, ch);
299 #endif
300 }
301 
302 /*
303  * Utility functions
304  */
305 
dma_gd32_priority(uint32_t prio)306 static inline uint32_t dma_gd32_priority(uint32_t prio)
307 {
308 	return CHCTL_PRIO(prio);
309 }
310 
dma_gd32_memory_width(uint32_t width)311 static inline uint32_t dma_gd32_memory_width(uint32_t width)
312 {
313 	switch (width) {
314 	case 4:
315 		return CHCTL_MWIDTH(2);
316 	case 2:
317 		return CHCTL_MWIDTH(1);
318 	default:
319 		return CHCTL_MWIDTH(0);
320 	}
321 }
322 
dma_gd32_periph_width(uint32_t width)323 static inline uint32_t dma_gd32_periph_width(uint32_t width)
324 {
325 	switch (width) {
326 	case 4:
327 		return CHCTL_PWIDTH(2);
328 	case 2:
329 		return CHCTL_PWIDTH(1);
330 	default:
331 		return CHCTL_PWIDTH(0);
332 	}
333 }
334 
335 /*
336  * API functions
337  */
338 
dma_gd32_config(const struct device * dev,uint32_t channel,struct dma_config * dma_cfg)339 static int dma_gd32_config(const struct device *dev, uint32_t channel,
340 			   struct dma_config *dma_cfg)
341 {
342 	const struct dma_gd32_config *cfg = dev->config;
343 	struct dma_gd32_data *data = dev->data;
344 	struct dma_gd32_srcdst_config src_cfg;
345 	struct dma_gd32_srcdst_config dst_cfg;
346 	struct dma_gd32_srcdst_config *memory_cfg = NULL;
347 	struct dma_gd32_srcdst_config *periph_cfg = NULL;
348 
349 	if (channel >= cfg->channels) {
350 		LOG_ERR("channel must be < %" PRIu32 " (%" PRIu32 ")",
351 			cfg->channels, channel);
352 		return -EINVAL;
353 	}
354 
355 	if (dma_cfg->block_count != 1) {
356 		LOG_ERR("chained block transfer not supported.");
357 		return -ENOTSUP;
358 	}
359 
360 	if (dma_cfg->channel_priority > 3) {
361 		LOG_ERR("channel_priority must be < 4 (%" PRIu32 ")",
362 			dma_cfg->channel_priority);
363 		return -EINVAL;
364 	}
365 
366 	if (dma_cfg->head_block->source_addr_adj == DMA_ADDR_ADJ_DECREMENT) {
367 		LOG_ERR("source_addr_adj not supported DMA_ADDR_ADJ_DECREMENT");
368 		return -ENOTSUP;
369 	}
370 
371 	if (dma_cfg->head_block->dest_addr_adj == DMA_ADDR_ADJ_DECREMENT) {
372 		LOG_ERR("dest_addr_adj not supported DMA_ADDR_ADJ_DECREMENT");
373 		return -ENOTSUP;
374 	}
375 
376 	if (dma_cfg->head_block->source_addr_adj != DMA_ADDR_ADJ_INCREMENT &&
377 	    dma_cfg->head_block->source_addr_adj != DMA_ADDR_ADJ_NO_CHANGE) {
378 		LOG_ERR("invalid source_addr_adj %" PRIu16,
379 			dma_cfg->head_block->source_addr_adj);
380 		return -ENOTSUP;
381 	}
382 	if (dma_cfg->head_block->dest_addr_adj != DMA_ADDR_ADJ_INCREMENT &&
383 	    dma_cfg->head_block->dest_addr_adj != DMA_ADDR_ADJ_NO_CHANGE) {
384 		LOG_ERR("invalid dest_addr_adj %" PRIu16,
385 			dma_cfg->head_block->dest_addr_adj);
386 		return -ENOTSUP;
387 	}
388 
389 	if (dma_cfg->source_data_size != 1 && dma_cfg->source_data_size != 2 &&
390 	    dma_cfg->source_data_size != 4) {
391 		LOG_ERR("source_data_size must be 1, 2, or 4 (%" PRIu32 ")",
392 			dma_cfg->source_data_size);
393 		return -EINVAL;
394 	}
395 
396 	if (dma_cfg->dest_data_size != 1 && dma_cfg->dest_data_size != 2 &&
397 	    dma_cfg->dest_data_size != 4) {
398 		LOG_ERR("dest_data_size must be 1, 2, or 4 (%" PRIu32 ")",
399 			dma_cfg->dest_data_size);
400 		return -EINVAL;
401 	}
402 
403 	if (dma_cfg->channel_direction > PERIPHERAL_TO_MEMORY) {
404 		LOG_ERR("channel_direction must be MEMORY_TO_MEMORY, "
405 			"MEMORY_TO_PERIPHERAL or PERIPHERAL_TO_MEMORY (%" PRIu32
406 			")",
407 			dma_cfg->channel_direction);
408 		return -ENOTSUP;
409 	}
410 
411 	if (dma_cfg->channel_direction == MEMORY_TO_MEMORY && !cfg->mem2mem) {
412 		LOG_ERR("not supporting MEMORY_TO_MEMORY");
413 		return -ENOTSUP;
414 	}
415 
416 #if DT_HAS_COMPAT_STATUS_OKAY(gd_gd32_dma_v1)
417 	if (dma_cfg->dma_slot > 0xF) {
418 		LOG_ERR("dma_slot must be <7 (%" PRIu32 ")",
419 			dma_cfg->dma_slot);
420 		return -EINVAL;
421 	}
422 #endif
423 
424 	gd32_dma_deinit(cfg->reg, channel);
425 
426 	src_cfg.addr = dma_cfg->head_block->source_address;
427 	src_cfg.adj = dma_cfg->head_block->source_addr_adj;
428 	src_cfg.width = dma_cfg->source_data_size;
429 
430 	dst_cfg.addr = dma_cfg->head_block->dest_address;
431 	dst_cfg.adj = dma_cfg->head_block->dest_addr_adj;
432 	dst_cfg.width = dma_cfg->dest_data_size;
433 
434 	switch (dma_cfg->channel_direction) {
435 	case MEMORY_TO_MEMORY:
436 		gd32_dma_transfer_set_memory_to_memory(cfg->reg, channel);
437 		memory_cfg = &dst_cfg;
438 		periph_cfg = &src_cfg;
439 		break;
440 	case PERIPHERAL_TO_MEMORY:
441 		gd32_dma_transfer_set_periph_to_memory(cfg->reg, channel);
442 		memory_cfg = &dst_cfg;
443 		periph_cfg = &src_cfg;
444 		break;
445 	case MEMORY_TO_PERIPHERAL:
446 		gd32_dma_transfer_set_memory_to_periph(cfg->reg, channel);
447 		memory_cfg = &src_cfg;
448 		periph_cfg = &dst_cfg;
449 		break;
450 	}
451 
452 	gd32_dma_memory_address_config(cfg->reg, channel, memory_cfg->addr);
453 	if (memory_cfg->adj == DMA_ADDR_ADJ_INCREMENT) {
454 		gd32_dma_memory_increase_enable(cfg->reg, channel);
455 	} else {
456 		gd32_dma_memory_increase_disable(cfg->reg, channel);
457 	}
458 
459 	gd32_dma_periph_address_config(cfg->reg, channel, periph_cfg->addr);
460 	if (periph_cfg->adj == DMA_ADDR_ADJ_INCREMENT) {
461 		gd32_dma_periph_increase_enable(cfg->reg, channel);
462 	} else {
463 		gd32_dma_periph_increase_disable(cfg->reg, channel);
464 	}
465 
466 	gd32_dma_transfer_number_config(cfg->reg, channel,
467 					dma_cfg->head_block->block_size);
468 	gd32_dma_priority_config(cfg->reg, channel,
469 				 dma_gd32_priority(dma_cfg->channel_priority));
470 	gd32_dma_memory_width_config(cfg->reg, channel,
471 				     dma_gd32_memory_width(memory_cfg->width));
472 	gd32_dma_periph_width_config(cfg->reg, channel,
473 				     dma_gd32_periph_width(periph_cfg->width));
474 	gd32_dma_circulation_disable(cfg->reg, channel);
475 #if DT_HAS_COMPAT_STATUS_OKAY(gd_gd32_dma_v1)
476 	if (dma_cfg->channel_direction != MEMORY_TO_MEMORY) {
477 		gd32_dma_channel_subperipheral_select(cfg->reg, channel,
478 						      dma_cfg->dma_slot);
479 	}
480 #endif
481 
482 	data->channels[channel].callback = dma_cfg->dma_callback;
483 	data->channels[channel].user_data = dma_cfg->user_data;
484 	data->channels[channel].direction = dma_cfg->channel_direction;
485 
486 	return 0;
487 }
488 
dma_gd32_reload(const struct device * dev,uint32_t ch,uint32_t src,uint32_t dst,size_t size)489 static int dma_gd32_reload(const struct device *dev, uint32_t ch, uint32_t src,
490 			   uint32_t dst, size_t size)
491 {
492 	const struct dma_gd32_config *cfg = dev->config;
493 	struct dma_gd32_data *data = dev->data;
494 
495 	if (ch >= cfg->channels) {
496 		LOG_ERR("reload channel must be < %" PRIu32 " (%" PRIu32 ")",
497 			cfg->channels, ch);
498 		return -EINVAL;
499 	}
500 
501 	if (data->channels[ch].busy) {
502 		return -EBUSY;
503 	}
504 
505 	gd32_dma_channel_disable(cfg->reg, ch);
506 
507 	gd32_dma_transfer_number_config(cfg->reg, ch, size);
508 
509 	switch (data->channels[ch].direction) {
510 	case MEMORY_TO_MEMORY:
511 	case PERIPHERAL_TO_MEMORY:
512 		gd32_dma_memory_address_config(cfg->reg, ch, dst);
513 		gd32_dma_periph_address_config(cfg->reg, ch, src);
514 		break;
515 	case MEMORY_TO_PERIPHERAL:
516 		gd32_dma_memory_address_config(cfg->reg, ch, src);
517 		gd32_dma_periph_address_config(cfg->reg, ch, dst);
518 		break;
519 	}
520 
521 	gd32_dma_channel_enable(cfg->reg, ch);
522 
523 	return 0;
524 }
525 
dma_gd32_start(const struct device * dev,uint32_t ch)526 static int dma_gd32_start(const struct device *dev, uint32_t ch)
527 {
528 	const struct dma_gd32_config *cfg = dev->config;
529 	struct dma_gd32_data *data = dev->data;
530 
531 	if (ch >= cfg->channels) {
532 		LOG_ERR("start channel must be < %" PRIu32 " (%" PRIu32 ")",
533 			cfg->channels, ch);
534 		return -EINVAL;
535 	}
536 
537 	gd32_dma_interrupt_enable(cfg->reg, ch,
538 				  DMA_CHXCTL_FTFIE | GD32_DMA_INTERRUPT_ERRORS);
539 	gd32_dma_channel_enable(cfg->reg, ch);
540 	data->channels[ch].busy = true;
541 
542 	return 0;
543 }
544 
dma_gd32_stop(const struct device * dev,uint32_t ch)545 static int dma_gd32_stop(const struct device *dev, uint32_t ch)
546 {
547 	const struct dma_gd32_config *cfg = dev->config;
548 	struct dma_gd32_data *data = dev->data;
549 
550 	if (ch >= cfg->channels) {
551 		LOG_ERR("stop channel must be < %" PRIu32 " (%" PRIu32 ")",
552 			cfg->channels, ch);
553 		return -EINVAL;
554 	}
555 
556 	gd32_dma_interrupt_disable(
557 		cfg->reg, ch, DMA_CHXCTL_FTFIE | GD32_DMA_INTERRUPT_ERRORS);
558 	gd32_dma_interrupt_flag_clear(cfg->reg, ch,
559 				      DMA_FLAG_FTF | GD32_DMA_FLAG_ERRORS);
560 	gd32_dma_channel_disable(cfg->reg, ch);
561 	data->channels[ch].busy = false;
562 
563 	return 0;
564 }
565 
dma_gd32_get_status(const struct device * dev,uint32_t ch,struct dma_status * stat)566 static int dma_gd32_get_status(const struct device *dev, uint32_t ch,
567 			       struct dma_status *stat)
568 {
569 	const struct dma_gd32_config *cfg = dev->config;
570 	struct dma_gd32_data *data = dev->data;
571 
572 	if (ch >= cfg->channels) {
573 		LOG_ERR("channel must be < %" PRIu32 " (%" PRIu32 ")",
574 			cfg->channels, ch);
575 		return -EINVAL;
576 	}
577 
578 	stat->pending_length = gd32_dma_transfer_number_get(cfg->reg, ch);
579 	stat->dir = data->channels[ch].direction;
580 	stat->busy = data->channels[ch].busy;
581 
582 	return 0;
583 }
584 
dma_gd32_api_chan_filter(const struct device * dev,int ch,void * filter_param)585 static bool dma_gd32_api_chan_filter(const struct device *dev, int ch,
586 				     void *filter_param)
587 {
588 	uint32_t filter;
589 
590 	if (!filter_param) {
591 		LOG_ERR("filter_param must not be NULL");
592 		return false;
593 	}
594 
595 	filter = *((uint32_t *)filter_param);
596 
597 	return (filter & BIT(ch));
598 }
599 
dma_gd32_init(const struct device * dev)600 static int dma_gd32_init(const struct device *dev)
601 {
602 	const struct dma_gd32_config *cfg = dev->config;
603 
604 	(void)clock_control_on(GD32_CLOCK_CONTROLLER,
605 			       (clock_control_subsys_t)&cfg->clkid);
606 
607 #if DT_HAS_COMPAT_STATUS_OKAY(gd_gd32_dma_v1)
608 	(void)reset_line_toggle_dt(&cfg->reset);
609 #endif
610 
611 	for (uint32_t i = 0; i < cfg->channels; i++) {
612 		gd32_dma_interrupt_disable(cfg->reg, i,
613 			   DMA_CHXCTL_FTFIE | GD32_DMA_INTERRUPT_ERRORS);
614 		gd32_dma_deinit(cfg->reg, i);
615 	}
616 
617 	cfg->irq_configure();
618 
619 	return 0;
620 }
621 
dma_gd32_isr(const struct device * dev)622 static void dma_gd32_isr(const struct device *dev)
623 {
624 	const struct dma_gd32_config *cfg = dev->config;
625 	struct dma_gd32_data *data = dev->data;
626 	uint32_t errflag, ftfflag;
627 	int err = 0;
628 
629 	for (uint32_t i = 0; i < cfg->channels; i++) {
630 		errflag = gd32_dma_interrupt_flag_get(cfg->reg, i,
631 						      GD32_DMA_FLAG_ERRORS);
632 		ftfflag =
633 			gd32_dma_interrupt_flag_get(cfg->reg, i, DMA_FLAG_FTF);
634 
635 		if (errflag == 0 && ftfflag == 0) {
636 			continue;
637 		}
638 
639 		if (errflag) {
640 			err = -EIO;
641 		}
642 
643 		gd32_dma_interrupt_flag_clear(
644 			cfg->reg, i, DMA_FLAG_FTF | GD32_DMA_FLAG_ERRORS);
645 		data->channels[i].busy = false;
646 
647 		if (data->channels[i].callback) {
648 			data->channels[i].callback(
649 				dev, data->channels[i].user_data, i, err);
650 		}
651 	}
652 }
653 
654 static const struct dma_driver_api dma_gd32_driver_api = {
655 	.config = dma_gd32_config,
656 	.reload = dma_gd32_reload,
657 	.start = dma_gd32_start,
658 	.stop = dma_gd32_stop,
659 	.get_status = dma_gd32_get_status,
660 	.chan_filter = dma_gd32_api_chan_filter,
661 };
662 
663 #define IRQ_CONFIGURE(n, inst)                                                 \
664 	IRQ_CONNECT(DT_INST_IRQ_BY_IDX(inst, n, irq),                          \
665 		    DT_INST_IRQ_BY_IDX(inst, n, priority), dma_gd32_isr,       \
666 		    DEVICE_DT_INST_GET(inst), 0);                              \
667 	irq_enable(DT_INST_IRQ_BY_IDX(inst, n, irq));
668 
669 #define CONFIGURE_ALL_IRQS(inst, n) LISTIFY(n, IRQ_CONFIGURE, (), inst)
670 
671 #define GD32_DMA_INIT(inst)                                                    \
672 	static void dma_gd32##inst##_irq_configure(void)                       \
673 	{                                                                      \
674 		CONFIGURE_ALL_IRQS(inst, DT_NUM_IRQS(DT_DRV_INST(inst)));      \
675 	}                                                                      \
676 	static const struct dma_gd32_config dma_gd32##inst##_config = {        \
677 		.reg = DT_INST_REG_ADDR(inst),                                 \
678 		.channels = DT_INST_PROP(inst, dma_channels),                  \
679 		.clkid = DT_INST_CLOCKS_CELL(inst, id),                        \
680 		.mem2mem = DT_INST_PROP(inst, gd_mem2mem),                     \
681 		IF_ENABLED(DT_HAS_COMPAT_STATUS_OKAY(gd_gd32_dma_v1),          \
682 			   (.reset = RESET_DT_SPEC_INST_GET(inst),))           \
683 		.irq_configure = dma_gd32##inst##_irq_configure,               \
684 	};                                                                     \
685                                                                                \
686 	static struct dma_gd32_channel                                         \
687 		dma_gd32##inst##_channels[DT_INST_PROP(inst, dma_channels)];   \
688 	ATOMIC_DEFINE(dma_gd32_atomic##inst,                                   \
689 		      DT_INST_PROP(inst, dma_channels));                       \
690 	static struct dma_gd32_data dma_gd32##inst##_data = {                  \
691 		.ctx =  {                                                      \
692 			.magic = DMA_MAGIC,                                    \
693 			.atomic = dma_gd32_atomic##inst,                       \
694 			.dma_channels = DT_INST_PROP(inst, dma_channels),      \
695 		},                                                             \
696 		.channels = dma_gd32##inst##_channels,                         \
697 	};                                                                     \
698                                                                                \
699 	DEVICE_DT_INST_DEFINE(inst, &dma_gd32_init, NULL,                      \
700 			      &dma_gd32##inst##_data,                          \
701 			      &dma_gd32##inst##_config, POST_KERNEL,           \
702 			      CONFIG_DMA_INIT_PRIORITY, &dma_gd32_driver_api);
703 
704 DT_INST_FOREACH_STATUS_OKAY(GD32_DMA_INIT)
705