1 /*
2 * Copyright (c) 2023 Analog Devices, Inc.
3 *
4 * SPDX-License-Identifier: Apache-2.0
5 */
6
7 #include <zephyr/device.h>
8 #include <zephyr/drivers/clock_control.h>
9 #include <zephyr/drivers/dma.h>
10 #include <zephyr/logging/log.h>
11 #include <zephyr/irq.h>
12 #include <zephyr/drivers/clock_control/adi_max32_clock_control.h>
13
14 #include <wrap_max32_dma.h>
15
16 #define DT_DRV_COMPAT adi_max32_dma
17
18 LOG_MODULE_REGISTER(max32_dma, CONFIG_DMA_LOG_LEVEL);
19
20 struct max32_dma_config {
21 mxc_dma_regs_t *regs;
22 const struct device *clock;
23 struct max32_perclk perclk;
24 uint8_t channels;
25 void (*irq_configure)(void);
26 };
27
28 struct max32_dma_data {
29 dma_callback_t callback;
30 void *cb_data;
31 uint32_t err_cb_dis;
32 };
33
max32_dma_ch_prio_valid(uint32_t ch_prio)34 static inline bool max32_dma_ch_prio_valid(uint32_t ch_prio)
35 {
36 /* mxc_dma_priority_t is limited to values 0-3 */
37 if (!(ch_prio >= 0 && ch_prio <= 3)) {
38 LOG_ERR("Invalid DMA priority - must be type mxc_dma_priority_t (0-3)");
39 return false;
40 }
41 return true;
42 }
43
max32_dma_width(uint32_t width)44 static inline int max32_dma_width(uint32_t width)
45 {
46 switch (width) {
47 case 1:
48 return MXC_DMA_WIDTH_BYTE;
49 case 2:
50 return MXC_DMA_WIDTH_HALFWORD;
51 case 4:
52 return MXC_DMA_WIDTH_WORD;
53 default:
54 LOG_ERR("Invalid DMA width - must be byte (1), halfword (2) or word (4)");
55 return -EINVAL;
56 }
57 }
58
max32_dma_addr_adj(uint16_t addr_adj)59 static inline int max32_dma_addr_adj(uint16_t addr_adj)
60 {
61 switch (addr_adj) {
62 case DMA_ADDR_ADJ_NO_CHANGE:
63 return 0;
64 case DMA_ADDR_ADJ_INCREMENT:
65 return 1;
66 default:
67 LOG_ERR("Invalid DMA address adjust - must be NO_CHANGE (0) or INCREMENT (1)");
68 return 0;
69 }
70 }
71
max32_dma_ch_index(mxc_dma_regs_t * dma,uint8_t ch)72 static inline int max32_dma_ch_index(mxc_dma_regs_t *dma, uint8_t ch)
73 {
74 return (ch + MXC_DMA_GET_IDX(dma) * (MXC_DMA_CHANNELS / MXC_DMA_INSTANCES));
75 }
76
max32_dma_config(const struct device * dev,uint32_t channel,struct dma_config * config)77 static int max32_dma_config(const struct device *dev, uint32_t channel, struct dma_config *config)
78 {
79 int ret = 0;
80 const struct max32_dma_config *cfg = dev->config;
81 struct max32_dma_data *data = dev->data;
82 uint32_t ch;
83
84 if (channel >= cfg->channels) {
85 LOG_ERR("Invalid DMA channel - must be < %" PRIu32 " (%" PRIu32 ")", cfg->channels,
86 channel);
87 return -EINVAL;
88 }
89
90 ch = max32_dma_ch_index(cfg->regs, channel);
91
92 /* DMA Channel Config */
93 mxc_dma_config_t mxc_dma_cfg;
94
95 mxc_dma_cfg.ch = ch;
96 mxc_dma_cfg.reqsel = config->dma_slot << ADI_MAX32_DMA_CFG_REQ_POS;
97 if (((max32_dma_width(config->source_data_size)) < 0) ||
98 ((max32_dma_width(config->dest_data_size)) < 0)) {
99 return -EINVAL;
100 }
101 mxc_dma_cfg.srcwd = max32_dma_width(config->source_data_size);
102 mxc_dma_cfg.dstwd = max32_dma_width(config->dest_data_size);
103 mxc_dma_cfg.srcinc_en = max32_dma_addr_adj(config->head_block->source_addr_adj);
104 mxc_dma_cfg.dstinc_en = max32_dma_addr_adj(config->head_block->dest_addr_adj);
105
106 /* DMA Channel Advanced Config */
107 mxc_dma_adv_config_t mxc_dma_cfg_adv;
108
109 mxc_dma_cfg_adv.ch = ch;
110 if (!max32_dma_ch_prio_valid(config->channel_priority)) {
111 return -EINVAL;
112 }
113 mxc_dma_cfg_adv.prio = config->channel_priority;
114 mxc_dma_cfg_adv.reqwait_en = 0;
115 mxc_dma_cfg_adv.tosel = MXC_DMA_TIMEOUT_4_CLK;
116 mxc_dma_cfg_adv.pssel = MXC_DMA_PRESCALE_DISABLE;
117 mxc_dma_cfg_adv.burst_size = config->source_burst_length;
118
119 /* DMA Transfer Config */
120 mxc_dma_srcdst_t txfer;
121
122 txfer.ch = ch;
123 txfer.source = (void *)config->head_block->source_address;
124 txfer.dest = (void *)config->head_block->dest_address;
125 txfer.len = config->head_block->block_size;
126
127 ret = MXC_DMA_ConfigChannel(mxc_dma_cfg, txfer);
128 if (ret != E_NO_ERROR) {
129 return ret;
130 }
131
132 ret = MXC_DMA_AdvConfigChannel(mxc_dma_cfg_adv);
133 if (ret) {
134 return ret;
135 }
136
137 /* Enable interrupts for the DMA peripheral */
138 ret = MXC_DMA_EnableInt(ch);
139 if (ret != E_NO_ERROR) {
140 return ret;
141 }
142
143 /* Enable complete and count-to-zero interrupts for the channel */
144 ret = MXC_DMA_ChannelEnableInt(ch, ADI_MAX32_DMA_CTRL_DIS_IE | ADI_MAX32_DMA_CTRL_CTZIEN);
145 if (ret != E_NO_ERROR) {
146 return ret;
147 }
148
149 data[channel].callback = config->dma_callback;
150 data[channel].cb_data = config->user_data;
151 data[channel].err_cb_dis = config->error_callback_dis;
152
153 return ret;
154 }
155
max32_dma_reload(const struct device * dev,uint32_t channel,uint32_t src,uint32_t dst,size_t size)156 static int max32_dma_reload(const struct device *dev, uint32_t channel, uint32_t src, uint32_t dst,
157 size_t size)
158 {
159 const struct max32_dma_config *cfg = dev->config;
160 mxc_dma_srcdst_t reload;
161 int flags;
162
163 if (channel >= cfg->channels) {
164 LOG_ERR("Invalid DMA channel - must be < %" PRIu32 " (%" PRIu32 ")", cfg->channels,
165 channel);
166 return -EINVAL;
167 }
168
169 channel = max32_dma_ch_index(cfg->regs, channel);
170 flags = MXC_DMA_ChannelGetFlags(channel);
171 if (flags & ADI_MAX32_DMA_STATUS_ST) {
172 return -EBUSY;
173 }
174
175 reload.ch = channel;
176 reload.source = (void *)src;
177 reload.dest = (void *)dst;
178 reload.len = size;
179 return MXC_DMA_SetSrcDst(reload);
180 }
181
max32_dma_start(const struct device * dev,uint32_t channel)182 static int max32_dma_start(const struct device *dev, uint32_t channel)
183 {
184 const struct max32_dma_config *cfg = dev->config;
185 int flags;
186
187 if (channel >= cfg->channels) {
188 LOG_ERR("Invalid DMA channel - must be < %" PRIu32 " (%" PRIu32 ")", cfg->channels,
189 channel);
190 return -EINVAL;
191 }
192
193 channel = max32_dma_ch_index(cfg->regs, channel);
194 flags = MXC_DMA_ChannelGetFlags(channel);
195 if (flags & ADI_MAX32_DMA_STATUS_ST) {
196 return -EBUSY;
197 }
198
199 return MXC_DMA_Start(channel);
200 }
201
max32_dma_stop(const struct device * dev,uint32_t channel)202 static int max32_dma_stop(const struct device *dev, uint32_t channel)
203 {
204 const struct max32_dma_config *cfg = dev->config;
205
206 if (channel >= cfg->channels) {
207 LOG_ERR("Invalid DMA channel - must be < %" PRIu32 " (%" PRIu32 ")", cfg->channels,
208 channel);
209 return -EINVAL;
210 }
211
212 channel = max32_dma_ch_index(cfg->regs, channel);
213
214 return MXC_DMA_Stop(channel);
215 }
216
max32_dma_get_status(const struct device * dev,uint32_t channel,struct dma_status * stat)217 static int max32_dma_get_status(const struct device *dev, uint32_t channel, struct dma_status *stat)
218 {
219 const struct max32_dma_config *cfg = dev->config;
220 int ret = 0;
221 int flags = 0;
222 mxc_dma_srcdst_t txfer;
223
224 if (channel >= cfg->channels) {
225 LOG_ERR("Invalid DMA channel - must be < %" PRIu32 " (%" PRIu32 ")", cfg->channels,
226 channel);
227 return -EINVAL;
228 }
229
230 channel = max32_dma_ch_index(cfg->regs, channel);
231 txfer.ch = channel;
232
233 flags = MXC_DMA_ChannelGetFlags(channel);
234
235 ret = MXC_DMA_GetSrcDst(&txfer);
236 if (ret != E_NO_ERROR) {
237 return ret;
238 }
239
240 /* Channel is busy if channel status is enabled */
241 stat->busy = (flags & ADI_MAX32_DMA_STATUS_ST) != 0;
242 stat->pending_length = txfer.len;
243
244 return ret;
245 }
246
max32_dma_isr(const struct device * dev)247 static void max32_dma_isr(const struct device *dev)
248 {
249 const struct max32_dma_config *cfg = dev->config;
250 struct max32_dma_data *data = dev->data;
251 mxc_dma_regs_t *regs = cfg->regs;
252 int ch, c;
253 int flags;
254 int status = 0;
255
256 uint8_t channel_base = max32_dma_ch_index(cfg->regs, 0);
257
258 for (ch = channel_base, c = 0; c < cfg->channels; ch++, c++) {
259 flags = MXC_DMA_ChannelGetFlags(ch);
260
261 /* Check if channel is in use, if not, move to next channel */
262 if (flags <= 0) {
263 continue;
264 }
265
266 /* Check for error interrupts */
267 if (flags & (ADI_MAX32_DMA_STATUS_BUS_ERR | ADI_MAX32_DMA_STATUS_TO_IF)) {
268 status = -EIO;
269 }
270
271 MXC_DMA_ChannelClearFlags(ch, flags);
272
273 if (data[c].callback) {
274 /* Only call error callback if enabled during DMA config */
275 if (status < 0 && (data[c].err_cb_dis)) {
276 break;
277 }
278 data[c].callback(dev, data[c].cb_data, c, status);
279 }
280
281 /* No need to check rest of the channels if no interrupt flags set */
282 if (MXC_DMA_GetIntFlags(regs) == 0) {
283 break;
284 }
285 }
286 }
287
max32_dma_init(const struct device * dev)288 static int max32_dma_init(const struct device *dev)
289 {
290 int ret = 0;
291 const struct max32_dma_config *cfg = dev->config;
292
293 if (!device_is_ready(cfg->clock)) {
294 return -ENODEV;
295 }
296
297 /* Enable peripheral clock */
298 ret = clock_control_on(cfg->clock, (clock_control_subsys_t) &(cfg->perclk));
299 if (ret) {
300 return ret;
301 }
302
303 ret = Wrap_MXC_DMA_Init(cfg->regs);
304 if (ret) {
305 return ret;
306 }
307
308 /* Acquire all channels so they are available to Zephyr application */
309 for (int i = 0; i < cfg->channels; i++) {
310 ret = Wrap_MXC_DMA_AcquireChannel(cfg->regs);
311 if (ret < 0) {
312 break;
313 } /* Channels already acquired */
314 }
315
316 cfg->irq_configure();
317
318 return 0;
319 }
320
321 static DEVICE_API(dma, max32_dma_driver_api) = {
322 .config = max32_dma_config,
323 .reload = max32_dma_reload,
324 .start = max32_dma_start,
325 .stop = max32_dma_stop,
326 .get_status = max32_dma_get_status,
327 };
328
329 #define MAX32_DMA_IRQ_CONNECT(n, inst) \
330 IRQ_CONNECT(DT_INST_IRQ_BY_IDX(inst, n, irq), DT_INST_IRQ_BY_IDX(inst, n, priority), \
331 max32_dma_isr, DEVICE_DT_INST_GET(inst), 0); \
332 irq_enable(DT_INST_IRQ_BY_IDX(inst, n, irq));
333
334 #define CONFIGURE_ALL_IRQS(inst, n) LISTIFY(n, MAX32_DMA_IRQ_CONNECT, (), inst)
335
336 #define MAX32_DMA_INIT(inst) \
337 static struct max32_dma_data dma##inst##_data[DT_INST_PROP(inst, dma_channels)]; \
338 static void max32_dma##inst##_irq_configure(void) \
339 { \
340 CONFIGURE_ALL_IRQS(inst, DT_NUM_IRQS(DT_DRV_INST(inst))); \
341 } \
342 static const struct max32_dma_config dma##inst##_cfg = { \
343 .regs = (mxc_dma_regs_t *)DT_INST_REG_ADDR(inst), \
344 .clock = DEVICE_DT_GET(DT_INST_CLOCKS_CTLR(inst)), \
345 .perclk.bus = DT_INST_CLOCKS_CELL(inst, offset), \
346 .perclk.bit = DT_INST_CLOCKS_CELL(inst, bit), \
347 .channels = DT_INST_PROP(inst, dma_channels), \
348 .irq_configure = max32_dma##inst##_irq_configure, \
349 }; \
350 DEVICE_DT_INST_DEFINE(inst, &max32_dma_init, NULL, &dma##inst##_data, &dma##inst##_cfg, \
351 PRE_KERNEL_1, CONFIG_DMA_INIT_PRIORITY, &max32_dma_driver_api);
352
353 DT_INST_FOREACH_STATUS_OKAY(MAX32_DMA_INIT)
354