Lines Matching +full:dma +full:- +full:channel +full:- +full:mask

4  * SPDX-License-Identifier: Apache-2.0
14 #include <zephyr/drivers/dma.h>
29 const struct dw_dma_dev_cfg *const dev_cfg = dev->config; in dw_dma_isr()
30 struct dw_dma_dev_data *const dev_data = dev->data; in dw_dma_isr()
37 uint32_t channel; in dw_dma_isr() local
39 status_intr = dw_read(dev_cfg->base, DW_INTR_STATUS); in dw_dma_isr()
41 LOG_ERR("%s: status_intr = %d", dev->name, status_intr); in dw_dma_isr()
45 status_block = dw_read(dev_cfg->base, DW_STATUS_BLOCK); in dw_dma_isr()
46 status_tfr = dw_read(dev_cfg->base, DW_STATUS_TFR); in dw_dma_isr()
49 status_err = dw_read(dev_cfg->base, DW_STATUS_ERR); in dw_dma_isr()
51 LOG_ERR("%s: status_err = %d\n", dev->name, status_err); in dw_dma_isr()
52 dw_write(dev_cfg->base, DW_CLEAR_ERR, status_err); in dw_dma_isr()
56 dw_write(dev_cfg->base, DW_CLEAR_BLOCK, status_block); in dw_dma_isr()
57 dw_write(dev_cfg->base, DW_CLEAR_TFR, status_tfr); in dw_dma_isr()
61 channel = find_lsb_set(status_block) - 1; in dw_dma_isr()
62 status_block &= ~(1 << channel); in dw_dma_isr()
63 chan_data = &dev_data->chan[channel]; in dw_dma_isr()
65 if (chan_data->dma_blkcallback) { in dw_dma_isr()
66 LOG_DBG("%s: Dispatching block complete callback fro channel %d", dev->name, in dw_dma_isr()
67 channel); in dw_dma_isr()
69 /* Ensure the linked list (chan_data->lli) is in dw_dma_isr()
73 chan_data->dma_blkcallback(dev, in dw_dma_isr()
74 chan_data->blkuser_data, in dw_dma_isr()
75 channel, DMA_STATUS_BLOCK); in dw_dma_isr()
80 channel = find_lsb_set(status_tfr) - 1; in dw_dma_isr()
81 status_tfr &= ~(1 << channel); in dw_dma_isr()
82 chan_data = &dev_data->chan[channel]; in dw_dma_isr()
84 /* Transfer complete, channel now idle, a reload in dw_dma_isr()
88 chan_data->state = DW_DMA_IDLE; in dw_dma_isr()
90 if (chan_data->dma_tfrcallback) { in dw_dma_isr()
91 LOG_DBG("%s: Dispatching transfer callback for channel %d", dev->name, in dw_dma_isr()
92 channel); in dw_dma_isr()
93 chan_data->dma_tfrcallback(dev, in dw_dma_isr()
94 chan_data->tfruser_data, in dw_dma_isr()
95 channel, DMA_STATUS_COMPLETE); in dw_dma_isr()
101 /* mask address for dma to identify memory space. */
105 lli_desc->sar = block_cfg->source_address; in dw_dma_mask_address()
106 lli_desc->dar = block_cfg->dest_address; in dw_dma_mask_address()
110 lli_desc->sar |= CONFIG_DMA_DW_HOST_MASK; in dw_dma_mask_address()
113 lli_desc->dar |= CONFIG_DMA_DW_HOST_MASK; in dw_dma_mask_address()
116 lli_desc->sar |= CONFIG_DMA_DW_HOST_MASK; in dw_dma_mask_address()
117 lli_desc->dar |= CONFIG_DMA_DW_HOST_MASK; in dw_dma_mask_address()
124 int dw_dma_config(const struct device *dev, uint32_t channel, in dw_dma_config() argument
127 const struct dw_dma_dev_cfg *const dev_cfg = dev->config; in dw_dma_config()
128 struct dw_dma_dev_data *const dev_data = dev->data; in dw_dma_config()
138 if (channel >= DW_CHAN_COUNT) { in dw_dma_config()
139 LOG_ERR("%s: invalid dma channel %d", dev->name, channel); in dw_dma_config()
140 ret = -EINVAL; in dw_dma_config()
144 struct dw_dma_chan_data *chan_data = &dev_data->chan[channel]; in dw_dma_config()
146 if (chan_data->state != DW_DMA_IDLE && chan_data->state != DW_DMA_PREPARED) { in dw_dma_config()
147 LOG_ERR("%s: channel %d must be inactive to reconfigure, currently %d", dev->name, in dw_dma_config()
148 channel, chan_data->state); in dw_dma_config()
149 ret = -EBUSY; in dw_dma_config()
153 LOG_DBG("%s: channel %d config", dev->name, channel); in dw_dma_config()
155 __ASSERT_NO_MSG(cfg->source_data_size == cfg->dest_data_size); in dw_dma_config()
156 __ASSERT_NO_MSG(cfg->source_burst_length == cfg->dest_burst_length); in dw_dma_config()
157 __ASSERT_NO_MSG(cfg->block_count > 0); in dw_dma_config()
158 __ASSERT_NO_MSG(cfg->head_block != NULL); in dw_dma_config()
160 if (cfg->source_data_size != 1 && cfg->source_data_size != 2 && in dw_dma_config()
161 cfg->source_data_size != 4 && cfg->source_data_size != 8 && in dw_dma_config()
162 cfg->source_data_size != 16) { in dw_dma_config()
163 LOG_ERR("%s: channel %d 'invalid source_data_size' value %d", dev->name, channel, in dw_dma_config()
164 cfg->source_data_size); in dw_dma_config()
165 ret = -EINVAL; in dw_dma_config()
169 if (cfg->block_count > CONFIG_DMA_DW_LLI_POOL_SIZE) { in dw_dma_config()
170 LOG_ERR("%s: channel %d scatter gather list larger than" in dw_dma_config()
172 dev->name, channel); in dw_dma_config()
173 ret = -EINVAL; in dw_dma_config()
178 msize = find_msb_set(cfg->source_burst_length) - 1; in dw_dma_config()
179 LOG_DBG("%s: channel %d m_size=%d", dev->name, channel, msize); in dw_dma_config()
182 /* default channel config */ in dw_dma_config()
183 chan_data->direction = cfg->channel_direction; in dw_dma_config()
184 chan_data->cfg_lo = 0; in dw_dma_config()
185 chan_data->cfg_hi = 0; in dw_dma_config()
188 chan_data->lli = &dev_data->lli_pool[channel][0]; /* TODO allocate here */ in dw_dma_config()
189 chan_data->lli_count = cfg->block_count; in dw_dma_config()
192 memset(chan_data->lli, 0, sizeof(struct dw_lli) * chan_data->lli_count); in dw_dma_config()
193 lli_desc = chan_data->lli; in dw_dma_config()
194 lli_desc_head = &chan_data->lli[0]; in dw_dma_config()
195 lli_desc_tail = &chan_data->lli[chan_data->lli_count - 1]; in dw_dma_config()
197 chan_data->ptr_data.buffer_bytes = 0; in dw_dma_config()
200 block_cfg = cfg->head_block; in dw_dma_config()
201 for (int i = 0; i < cfg->block_count; i++) { in dw_dma_config()
203 LOG_DBG("%s: copying block_cfg %p to lli_desc %p", dev->name, block_cfg, lli_desc); in dw_dma_config()
206 switch (cfg->source_data_size) { in dw_dma_config()
209 lli_desc->ctrl_lo |= DW_CTLL_SRC_WIDTH(0); in dw_dma_config()
213 switch (cfg->channel_direction) { in dw_dma_config()
216 lli_desc->ctrl_lo |= DW_CTLL_SRC_WIDTH(2); in dw_dma_config()
220 lli_desc->ctrl_lo |= DW_CTLL_SRC_WIDTH(1); in dw_dma_config()
226 lli_desc->ctrl_lo |= DW_CTLL_SRC_WIDTH(2); in dw_dma_config()
229 LOG_ERR("%s: channel %d invalid src width %d", dev->name, channel, in dw_dma_config()
230 cfg->source_data_size); in dw_dma_config()
231 ret = -EINVAL; in dw_dma_config()
235 LOG_DBG("%s: source data size: lli_desc %p, ctrl_lo %x", dev->name, in dw_dma_config()
236 lli_desc, lli_desc->ctrl_lo); in dw_dma_config()
238 switch (cfg->dest_data_size) { in dw_dma_config()
241 lli_desc->ctrl_lo |= DW_CTLL_DST_WIDTH(0); in dw_dma_config()
245 switch (cfg->channel_direction) { in dw_dma_config()
248 lli_desc->ctrl_lo |= DW_CTLL_DST_WIDTH(2); in dw_dma_config()
252 lli_desc->ctrl_lo |= DW_CTLL_DST_WIDTH(1); in dw_dma_config()
258 lli_desc->ctrl_lo |= DW_CTLL_DST_WIDTH(2); in dw_dma_config()
261 LOG_ERR("%s: channel %d invalid dest width %d", dev->name, channel, in dw_dma_config()
262 cfg->dest_data_size); in dw_dma_config()
263 ret = -EINVAL; in dw_dma_config()
267 LOG_DBG("%s: dest data size: lli_desc %p, ctrl_lo %x", dev->name, in dw_dma_config()
268 lli_desc, lli_desc->ctrl_lo); in dw_dma_config()
270 lli_desc->ctrl_lo |= DW_CTLL_SRC_MSIZE(msize) | in dw_dma_config()
273 if (cfg->dma_callback) { in dw_dma_config()
274 lli_desc->ctrl_lo |= DW_CTLL_INT_EN; /* enable interrupt */ in dw_dma_config()
277 LOG_DBG("%s: msize, int_en: lli_desc %p, ctrl_lo %x", dev->name, in dw_dma_config()
278 lli_desc, lli_desc->ctrl_lo); in dw_dma_config()
283 switch (cfg->channel_direction) { in dw_dma_config()
285 lli_desc->ctrl_lo |= DW_CTLL_FC_M2M | DW_CTLL_SRC_INC | in dw_dma_config()
288 LOG_DBG("%s: setting LLP_D_EN, LLP_S_EN in lli_desc->ctrl_lo %x", dev->name, in dw_dma_config()
289 lli_desc->ctrl_lo); in dw_dma_config()
290 lli_desc->ctrl_lo |= in dw_dma_config()
292 LOG_DBG("%s: lli_desc->ctrl_lo %x", dev->name, lli_desc->ctrl_lo); in dw_dma_config()
295 chan_data->cfg_lo |= DW_CFGL_SRC_SW_HS; in dw_dma_config()
296 chan_data->cfg_lo |= DW_CFGL_DST_SW_HS; in dw_dma_config()
300 lli_desc->ctrl_lo |= DW_CTLL_FC_M2P | DW_CTLL_SRC_INC | in dw_dma_config()
303 lli_desc->ctrl_lo |= DW_CTLL_LLP_S_EN; in dw_dma_config()
304 chan_data->cfg_lo |= DW_CFGL_RELOAD_DST; in dw_dma_config()
306 /* Assign a hardware handshake interface (0-15) to the in dw_dma_config()
307 * destination of the channel in dw_dma_config()
309 chan_data->cfg_hi |= DW_CFGH_DST(cfg->dma_slot); in dw_dma_config()
311 chan_data->cfg_lo |= DW_CFGL_SRC_SW_HS; in dw_dma_config()
315 lli_desc->ctrl_lo |= DW_CTLL_FC_P2M | DW_CTLL_SRC_FIX | in dw_dma_config()
318 if (!block_cfg->dest_scatter_en) { in dw_dma_config()
319 lli_desc->ctrl_lo |= DW_CTLL_LLP_D_EN; in dw_dma_config()
321 /* Use contiguous auto-reload. Line 3 in in dw_dma_config()
322 * table 3-3 in dw_dma_config()
324 lli_desc->ctrl_lo |= DW_CTLL_D_SCAT_EN; in dw_dma_config()
326 chan_data->cfg_lo |= DW_CFGL_RELOAD_SRC; in dw_dma_config()
328 /* Assign a hardware handshake interface (0-15) to the in dw_dma_config()
329 * source of the channel in dw_dma_config()
331 chan_data->cfg_hi |= DW_CFGH_SRC(cfg->dma_slot); in dw_dma_config()
333 chan_data->cfg_lo |= DW_CFGL_DST_SW_HS; in dw_dma_config()
337 LOG_ERR("%s: channel %d invalid direction %d", dev->name, channel, in dw_dma_config()
338 cfg->channel_direction); in dw_dma_config()
339 ret = -EINVAL; in dw_dma_config()
343 LOG_DBG("%s: direction: lli_desc %p, ctrl_lo %x, cfg_hi %x, cfg_lo %x", dev->name, in dw_dma_config()
344 lli_desc, lli_desc->ctrl_lo, chan_data->cfg_hi, chan_data->cfg_lo); in dw_dma_config()
346 dw_dma_mask_address(block_cfg, lli_desc, cfg->channel_direction); in dw_dma_config()
348 LOG_DBG("%s: mask address: lli_desc %p, ctrl_lo %x, cfg_hi %x, cfg_lo %x", in dw_dma_config()
349 dev->name, lli_desc, lli_desc->ctrl_lo, chan_data->cfg_hi, in dw_dma_config()
350 chan_data->cfg_lo); in dw_dma_config()
352 if (block_cfg->block_size > DW_CTLH_BLOCK_TS_MASK) { in dw_dma_config()
353 LOG_ERR("%s: channel %d block size too big %d", dev->name, channel, in dw_dma_config()
354 block_cfg->block_size); in dw_dma_config()
355 ret = -EINVAL; in dw_dma_config()
360 lli_desc->ctrl_hi |= DW_CTLH_CLASS(dev_data->channel_data->chan[channel].class) | in dw_dma_config()
361 (block_cfg->block_size & DW_CTLH_BLOCK_TS_MASK); in dw_dma_config()
364 dev->name, lli_desc, lli_desc->ctrl_lo, chan_data->cfg_hi, in dw_dma_config()
365 chan_data->cfg_lo); in dw_dma_config()
367 chan_data->ptr_data.buffer_bytes += block_cfg->block_size; in dw_dma_config()
370 lli_desc->llp = (uintptr_t)(lli_desc + 1); in dw_dma_config()
372 LOG_DBG("%s: lli_desc llp %x", dev->name, lli_desc->llp); in dw_dma_config()
377 block_cfg = block_cfg->next_block; in dw_dma_config()
381 chan_data->cfg_lo |= DW_CFGL_CTL_HI_UPD_EN; in dw_dma_config()
385 if (cfg->cyclic) { in dw_dma_config()
386 lli_desc_tail->llp = (uintptr_t)lli_desc_head; in dw_dma_config()
388 lli_desc_tail->llp = 0; in dw_dma_config()
390 LOG_DBG("%s: Clearing LLP_S_EN, LLP_D_EN from tail LLI %x", dev->name, in dw_dma_config()
391 lli_desc_tail->ctrl_lo); in dw_dma_config()
392 lli_desc_tail->ctrl_lo &= ~(DW_CTLL_LLP_S_EN | DW_CTLL_LLP_D_EN); in dw_dma_config()
393 LOG_DBG("%s: ctrl_lo %x", dev->name, lli_desc_tail->ctrl_lo); in dw_dma_config()
397 /* set the initial lli, mark the channel as prepared (ready to be started) */ in dw_dma_config()
398 chan_data->state = DW_DMA_PREPARED; in dw_dma_config()
399 chan_data->lli_current = chan_data->lli; in dw_dma_config()
402 chan_data->ptr_data.start_ptr = DW_DMA_LLI_ADDRESS(chan_data->lli, in dw_dma_config()
403 chan_data->direction); in dw_dma_config()
404 chan_data->ptr_data.end_ptr = chan_data->ptr_data.start_ptr + in dw_dma_config()
405 chan_data->ptr_data.buffer_bytes; in dw_dma_config()
406 chan_data->ptr_data.current_ptr = chan_data->ptr_data.start_ptr; in dw_dma_config()
407 chan_data->ptr_data.hw_ptr = chan_data->ptr_data.start_ptr; in dw_dma_config()
413 if (cfg->complete_callback_en) { in dw_dma_config()
414 chan_data->dma_blkcallback = cfg->dma_callback; in dw_dma_config()
415 chan_data->blkuser_data = cfg->user_data; in dw_dma_config()
416 dw_write(dev_cfg->base, DW_MASK_BLOCK, DW_CHAN_UNMASK(channel)); in dw_dma_config()
418 chan_data->dma_tfrcallback = cfg->dma_callback; in dw_dma_config()
419 chan_data->tfruser_data = cfg->user_data; in dw_dma_config()
420 dw_write(dev_cfg->base, DW_MASK_TFR, DW_CHAN_UNMASK(channel)); in dw_dma_config()
423 dw_write(dev_cfg->base, DW_MASK_ERR, DW_CHAN_UNMASK(channel)); in dw_dma_config()
425 /* write interrupt clear registers for the channel in dw_dma_config()
428 dw_write(dev_cfg->base, DW_CLEAR_TFR, 0x1 << channel); in dw_dma_config()
429 dw_write(dev_cfg->base, DW_CLEAR_BLOCK, 0x1 << channel); in dw_dma_config()
430 dw_write(dev_cfg->base, DW_CLEAR_SRC_TRAN, 0x1 << channel); in dw_dma_config()
431 dw_write(dev_cfg->base, DW_CLEAR_DST_TRAN, 0x1 << channel); in dw_dma_config()
432 dw_write(dev_cfg->base, DW_CLEAR_ERR, 0x1 << channel); in dw_dma_config()
439 bool dw_dma_is_enabled(const struct device *dev, uint32_t channel) in dw_dma_is_enabled() argument
441 const struct dw_dma_dev_cfg *const dev_cfg = dev->config; in dw_dma_is_enabled()
443 return dw_read(dev_cfg->base, DW_DMA_CHAN_EN) & DW_CHAN(channel); in dw_dma_is_enabled()
446 int dw_dma_start(const struct device *dev, uint32_t channel) in dw_dma_start() argument
448 const struct dw_dma_dev_cfg *const dev_cfg = dev->config; in dw_dma_start()
449 struct dw_dma_dev_data *dev_data = dev->data; in dw_dma_start()
452 /* validate channel */ in dw_dma_start()
453 if (channel >= DW_CHAN_COUNT) { in dw_dma_start()
454 ret = -EINVAL; in dw_dma_start()
458 if (dw_dma_is_enabled(dev, channel)) { in dw_dma_start()
462 struct dw_dma_chan_data *chan_data = &dev_data->chan[channel]; in dw_dma_start()
464 /* validate channel state */ in dw_dma_start()
465 if (chan_data->state != DW_DMA_PREPARED) { in dw_dma_start()
466 LOG_ERR("%s: channel %d not ready ena 0x%x status 0x%x", dev->name, channel, in dw_dma_start()
467 dw_read(dev_cfg->base, DW_DMA_CHAN_EN), chan_data->state); in dw_dma_start()
468 ret = -EBUSY; in dw_dma_start()
473 if (!chan_data->lli) { in dw_dma_start()
474 LOG_ERR("%s: channel %d invalid stream", dev->name, channel); in dw_dma_start()
475 ret = -EINVAL; in dw_dma_start()
479 LOG_INF("%s: channel %d start", dev->name, channel); in dw_dma_start()
481 struct dw_lli *lli = chan_data->lli_current; in dw_dma_start()
484 /* LLP mode - write LLP pointer */ in dw_dma_start()
486 uint32_t masked_ctrl_lo = lli->ctrl_lo & (DW_CTLL_LLP_D_EN | DW_CTLL_LLP_S_EN); in dw_dma_start()
491 LOG_DBG("%s: Setting llp", dev->name); in dw_dma_start()
493 dw_write(dev_cfg->base, DW_LLP(channel), llp); in dw_dma_start()
494 LOG_DBG("%s: ctrl_lo %x, masked ctrl_lo %x, LLP %x", dev->name, in dw_dma_start()
495 lli->ctrl_lo, masked_ctrl_lo, dw_read(dev_cfg->base, DW_LLP(channel))); in dw_dma_start()
498 /* channel needs to start from scratch, so write SAR and DAR */ in dw_dma_start()
500 dw_write(dev_cfg->base, DW_SAR(channel), (uint32_t)(lli->sar & DW_ADDR_MASK_32)); in dw_dma_start()
501 dw_write(dev_cfg->base, DW_SAR_HI(channel), (uint32_t)(lli->sar >> DW_ADDR_RIGHT_SHIFT)); in dw_dma_start()
502 dw_write(dev_cfg->base, DW_DAR(channel), (uint32_t)(lli->dar & DW_ADDR_MASK_32)); in dw_dma_start()
503 dw_write(dev_cfg->base, DW_DAR_HI(channel), (uint32_t)(lli->dar >> DW_ADDR_RIGHT_SHIFT)); in dw_dma_start()
505 dw_write(dev_cfg->base, DW_SAR(channel), lli->sar); in dw_dma_start()
506 dw_write(dev_cfg->base, DW_DAR(channel), lli->dar); in dw_dma_start()
510 dw_write(dev_cfg->base, DW_CTRL_LOW(channel), lli->ctrl_lo); in dw_dma_start()
511 dw_write(dev_cfg->base, DW_CTRL_HIGH(channel), lli->ctrl_hi); in dw_dma_start()
514 dw_write(dev_cfg->base, DW_CFG_LOW(channel), chan_data->cfg_lo); in dw_dma_start()
515 dw_write(dev_cfg->base, DW_CFG_HIGH(channel), chan_data->cfg_hi); in dw_dma_start()
519 dev->name, lli->sar, lli->dar, lli->ctrl_lo, lli->ctrl_hi, chan_data->cfg_lo, in dw_dma_start()
520 chan_data->cfg_hi, dw_read(dev_cfg->base, DW_LLP(channel)) in dw_dma_start()
524 dev->name, lli->sar, lli->dar, lli->ctrl_lo, lli->ctrl_hi, chan_data->cfg_lo, in dw_dma_start()
525 chan_data->cfg_hi, dw_read(dev_cfg->base, DW_LLP(channel)) in dw_dma_start()
530 if (lli->ctrl_lo & DW_CTLL_D_SCAT_EN) { in dw_dma_start()
531 LOG_DBG("%s: configuring DW_DSR", dev->name); in dw_dma_start()
532 uint32_t words_per_tfr = (lli->ctrl_hi & DW_CTLH_BLOCK_TS_MASK) >> in dw_dma_start()
533 ((lli->ctrl_lo & DW_CTLL_DST_WIDTH_MASK) >> DW_CTLL_DST_WIDTH_SHIFT); in dw_dma_start()
534 dw_write(dev_cfg->base, DW_DSR(channel), in dw_dma_start()
539 chan_data->state = DW_DMA_ACTIVE; in dw_dma_start()
541 /* enable the channel */ in dw_dma_start()
542 dw_write(dev_cfg->base, DW_DMA_CHAN_EN, DW_CHAN_UNMASK(channel)); in dw_dma_start()
549 int dw_dma_stop(const struct device *dev, uint32_t channel) in dw_dma_stop() argument
551 const struct dw_dma_dev_cfg *const dev_cfg = dev->config; in dw_dma_stop()
552 struct dw_dma_dev_data *dev_data = dev->data; in dw_dma_stop()
553 struct dw_dma_chan_data *chan_data = &dev_data->chan[channel]; in dw_dma_stop()
557 if (channel >= DW_CHAN_COUNT) { in dw_dma_stop()
558 ret = -EINVAL; in dw_dma_stop()
572 if (!dw_dma_is_enabled(dev, channel) && chan_data->state != DW_DMA_SUSPENDED) { in dw_dma_stop()
578 struct dw_lli *lli = chan_data->lli; in dw_dma_stop()
582 LOG_INF("%s: channel %d stop", dev->name, channel); in dw_dma_stop()
584 /* Validate the channel state */ in dw_dma_stop()
585 if (chan_data->state != DW_DMA_ACTIVE && in dw_dma_stop()
586 chan_data->state != DW_DMA_SUSPENDED) { in dw_dma_stop()
587 ret = -EINVAL; in dw_dma_stop()
592 /* channel cannot be disabled right away, so first we need to) in dw_dma_stop()
595 dw_write(dev_cfg->base, DW_CFG_LOW(channel), in dw_dma_stop()
596 chan_data->cfg_lo | DW_CFGL_SUSPEND | DW_CFGL_DRAIN); in dw_dma_stop()
599 bool fifo_empty = WAIT_FOR(dw_read(dev_cfg->base, DW_CFG_LOW(channel)) & DW_CFGL_FIFO_EMPTY, in dw_dma_stop()
602 LOG_WRN("%s: channel %d drain time out", dev->name, channel); in dw_dma_stop()
604 /* Continue even if draining timed out to make sure that the channel is going to be in dw_dma_stop()
606 * The same channel might be requested for other purpose (or for same) next time in dw_dma_stop()
607 * which will fail if the channel has been left enabled. in dw_dma_stop()
612 dw_write(dev_cfg->base, DW_DMA_CHAN_EN, DW_CHAN_MASK(channel)); in dw_dma_stop()
614 /* now we wait for channel to be disabled */ in dw_dma_stop()
615 bool is_disabled = WAIT_FOR(!(dw_read(dev_cfg->base, DW_DMA_CHAN_EN) & DW_CHAN(channel)), in dw_dma_stop()
618 LOG_ERR("%s: channel %d disable timeout", dev->name, channel); in dw_dma_stop()
619 return -ETIMEDOUT; in dw_dma_stop()
623 for (i = 0; i < chan_data->lli_count; i++) { in dw_dma_stop()
624 lli->ctrl_hi &= ~DW_CTLH_DONE(1); in dw_dma_stop()
628 chan_data->state = DW_DMA_IDLE; in dw_dma_stop()
634 int dw_dma_resume(const struct device *dev, uint32_t channel) in dw_dma_resume() argument
636 const struct dw_dma_dev_cfg *const dev_cfg = dev->config; in dw_dma_resume()
637 struct dw_dma_dev_data *dev_data = dev->data; in dw_dma_resume()
640 /* Validate channel index */ in dw_dma_resume()
641 if (channel >= DW_CHAN_COUNT) { in dw_dma_resume()
642 ret = -EINVAL; in dw_dma_resume()
646 struct dw_dma_chan_data *chan_data = &dev_data->chan[channel]; in dw_dma_resume()
648 /* Validate channel state */ in dw_dma_resume()
649 if (chan_data->state != DW_DMA_SUSPENDED) { in dw_dma_resume()
650 ret = -EINVAL; in dw_dma_resume()
654 LOG_DBG("%s: channel %d resume", dev->name, channel); in dw_dma_resume()
656 dw_write(dev_cfg->base, DW_CFG_LOW(channel), chan_data->cfg_lo); in dw_dma_resume()
658 /* Channel is now active */ in dw_dma_resume()
659 chan_data->state = DW_DMA_ACTIVE; in dw_dma_resume()
665 int dw_dma_suspend(const struct device *dev, uint32_t channel) in dw_dma_suspend() argument
667 const struct dw_dma_dev_cfg *const dev_cfg = dev->config; in dw_dma_suspend()
668 struct dw_dma_dev_data *dev_data = dev->data; in dw_dma_suspend()
671 /* Validate channel index */ in dw_dma_suspend()
672 if (channel >= DW_CHAN_COUNT) { in dw_dma_suspend()
673 ret = -EINVAL; in dw_dma_suspend()
677 struct dw_dma_chan_data *chan_data = &dev_data->chan[channel]; in dw_dma_suspend()
679 /* Validate channel state */ in dw_dma_suspend()
680 if (chan_data->state != DW_DMA_ACTIVE) { in dw_dma_suspend()
681 ret = -EINVAL; in dw_dma_suspend()
686 LOG_DBG("%s: channel %d suspend", dev->name, channel); in dw_dma_suspend()
688 dw_write(dev_cfg->base, DW_CFG_LOW(channel), in dw_dma_suspend()
689 chan_data->cfg_lo | DW_CFGL_SUSPEND); in dw_dma_suspend()
691 /* Channel is now suspended */ in dw_dma_suspend()
692 chan_data->state = DW_DMA_SUSPENDED; in dw_dma_suspend()
702 const struct dw_dma_dev_cfg *const dev_cfg = dev->config; in dw_dma_setup()
707 if (dw_read(dev_cfg->base, DW_DMA_CFG) != 0) { in dw_dma_setup()
708 dw_write(dev_cfg->base, DW_DMA_CFG, 0x0); in dw_dma_setup()
711 for (i = DW_DMA_CFG_TRIES; i > 0; i--) { in dw_dma_setup()
712 if (!dw_read(dev_cfg->base, DW_DMA_CFG)) { in dw_dma_setup()
718 LOG_ERR("%s: setup failed", dev->name); in dw_dma_setup()
719 ret = -EIO; in dw_dma_setup()
723 LOG_DBG("%s: ENTER", dev->name); in dw_dma_setup()
726 dw_read(dev_cfg->base, DW_DMA_CHAN_EN); in dw_dma_setup()
730 /* enable the DMA controller */ in dw_dma_setup()
731 dw_write(dev_cfg->base, DW_DMA_CFG, 1); in dw_dma_setup()
733 /* mask all interrupts for all 8 channels */ in dw_dma_setup()
734 dw_write(dev_cfg->base, DW_MASK_TFR, DW_CHAN_MASK_ALL); in dw_dma_setup()
735 dw_write(dev_cfg->base, DW_MASK_BLOCK, DW_CHAN_MASK_ALL); in dw_dma_setup()
736 dw_write(dev_cfg->base, DW_MASK_SRC_TRAN, DW_CHAN_MASK_ALL); in dw_dma_setup()
737 dw_write(dev_cfg->base, DW_MASK_DST_TRAN, DW_CHAN_MASK_ALL); in dw_dma_setup()
738 dw_write(dev_cfg->base, DW_MASK_ERR, DW_CHAN_MASK_ALL); in dw_dma_setup()
741 /* allocate FIFO partitions for each channel */ in dw_dma_setup()
742 dw_write(dev_cfg->base, DW_FIFO_PART1_HI, in dw_dma_setup()
744 dw_write(dev_cfg->base, DW_FIFO_PART1_LO, in dw_dma_setup()
746 dw_write(dev_cfg->base, DW_FIFO_PART0_HI, in dw_dma_setup()
748 dw_write(dev_cfg->base, DW_FIFO_PART0_LO, in dw_dma_setup()
760 uint32_t channel) in dw_dma_avail_data_size() argument
762 int32_t read_ptr = chan_data->ptr_data.current_ptr; in dw_dma_avail_data_size()
763 int32_t write_ptr = dw_read(base, DW_DAR(channel)); in dw_dma_avail_data_size()
764 int32_t delta = write_ptr - chan_data->ptr_data.hw_ptr; in dw_dma_avail_data_size()
767 chan_data->ptr_data.hw_ptr = write_ptr; in dw_dma_avail_data_size()
769 size = write_ptr - read_ptr; in dw_dma_avail_data_size()
772 size += chan_data->ptr_data.buffer_bytes; in dw_dma_avail_data_size()
775 * Buffer is either full or empty. If the DMA pointer has in dw_dma_avail_data_size()
776 * changed, then the DMA has filled the buffer. in dw_dma_avail_data_size()
779 size = chan_data->ptr_data.buffer_bytes; in dw_dma_avail_data_size()
781 LOG_DBG("%s: channel %d: size is 0!", dev->name, channel); in dw_dma_avail_data_size()
785 LOG_DBG("%s: channel %d: DAR %x reader 0x%x free 0x%x avail 0x%x", dev->name, channel, in dw_dma_avail_data_size()
786 write_ptr, read_ptr, chan_data->ptr_data.buffer_bytes - size, size); in dw_dma_avail_data_size()
793 uint32_t channel) in dw_dma_free_data_size() argument
795 int32_t read_ptr = dw_read(base, DW_SAR(channel)); in dw_dma_free_data_size()
796 int32_t write_ptr = chan_data->ptr_data.current_ptr; in dw_dma_free_data_size()
797 int32_t delta = read_ptr - chan_data->ptr_data.hw_ptr; in dw_dma_free_data_size()
800 chan_data->ptr_data.hw_ptr = read_ptr; in dw_dma_free_data_size()
802 size = read_ptr - write_ptr; in dw_dma_free_data_size()
804 size += chan_data->ptr_data.buffer_bytes; in dw_dma_free_data_size()
807 * Buffer is either full or empty. If the DMA pointer has in dw_dma_free_data_size()
808 * changed, then the DMA has emptied the buffer. in dw_dma_free_data_size()
811 size = chan_data->ptr_data.buffer_bytes; in dw_dma_free_data_size()
813 LOG_DBG("%s: channel %d: size is 0!", dev->name, channel); in dw_dma_free_data_size()
817 LOG_DBG("%s: channel %d: SAR %x writer 0x%x free 0x%x avail 0x%x", dev->name, channel, in dw_dma_free_data_size()
818 read_ptr, write_ptr, size, chan_data->ptr_data.buffer_bytes - size); in dw_dma_free_data_size()
823 int dw_dma_get_status(const struct device *dev, uint32_t channel, in dw_dma_get_status() argument
826 struct dw_dma_dev_data *const dev_data = dev->data; in dw_dma_get_status()
827 const struct dw_dma_dev_cfg *const dev_cfg = dev->config; in dw_dma_get_status()
830 if (channel >= DW_CHAN_COUNT) { in dw_dma_get_status()
831 return -EINVAL; in dw_dma_get_status()
834 chan_data = &dev_data->chan[channel]; in dw_dma_get_status()
836 if (chan_data->direction == MEMORY_TO_MEMORY || in dw_dma_get_status()
837 chan_data->direction == PERIPHERAL_TO_MEMORY) { in dw_dma_get_status()
838 stat->pending_length = dw_dma_avail_data_size(dev, dev_cfg->base, chan_data, in dw_dma_get_status()
839 channel); in dw_dma_get_status()
840 stat->free = chan_data->ptr_data.buffer_bytes - stat->pending_length; in dw_dma_get_status()
843 stat->free = dw_dma_free_data_size(dev, dev_cfg->base, chan_data, channel); in dw_dma_get_status()
844 stat->pending_length = chan_data->ptr_data.buffer_bytes - stat->free; in dw_dma_get_status()
847 if (!(dw_read(dev_cfg->base, DW_DMA_CHAN_EN) & DW_CHAN(channel))) { in dw_dma_get_status()
848 LOG_ERR("%s: xrun detected", dev->name); in dw_dma_get_status()
849 return -EPIPE; in dw_dma_get_status()