Lines Matching +full:enable +full:- +full:channel

4  * SPDX-License-Identifier: Apache-2.0
17 #define DEV_CFG(_dev) ((const struct dma_dw_axi_dev_cfg *)(_dev)->config)
18 #define DEV_DATA(_dev) ((struct dma_dw_axi_dev_data *const)(_dev)->data)
29 #define DMA_DW_AXI_GET_MSIZE(blen) ((blen == 1) ? (0U) : (find_msb_set(blen) - 2U))
44 /* Channel enable by setting ch_en and ch_en_we */
46 /* Channel enable by setting ch_susp and ch_susp_we */
48 /* Channel enable by setting ch_abort and ch_abort_we */
51 /* channel susp/resume write enable pos */
53 /* channel resume bit pos */
58 /* source address register for a channel */
60 /* destination address register for a channel */
62 /* block transfer size register for a channel */
64 /* channel control register */
66 /* channel configuration register */
70 /* channel status register */
72 /* channel software handshake source register */
74 /* channel software handshake destination register */
76 /* channel block transfer resume request register */
78 /* channel AXI ID rester */
80 /* channel AXI QOS register */
82 /* channel interrupt status enable register */
84 /* channel interrupt status register */
86 /* channel interrupt signal enable register */
88 /* channel interrupt clear register */
91 /* bitfield configuration for multi-block transfer */
113 /* source status enable bit */
115 /* destination status enable bit */
117 /* source burst length enable */
119 /* source burst length(considered when corresponding enable bit is set) */
121 /* destination burst length enable */
123 /* destination burst length(considered when corresponding enable bit is set) */
137 /* enable block completion transfer interrupt */
139 /* enable transfer completion interrupt */
141 /* enable interrupts on any dma transfer error */
144 /* global enable bit for dma controller */
146 /* global enable bit for interrupt */
163 /* status of the channel */
194 /* type of multi-block transfer */
202 /* dma driver channel specific information */
208 /* dma channel state */
235 /* pointer to store channel specific info */
255 * @brief get current status of the channel
258 * @param channel channel number
260 * @retval status of the channel
270 /* channel is active/busy in the dma transfer */ in dma_dw_axi_get_ch_status()
276 /* channel is currently suspended */ in dma_dw_axi_get_ch_status()
282 /* channel is idle */ in dma_dw_axi_get_ch_status()
288 unsigned int channel; in dma_dw_axi_isr() local
295 /* read interrupt status register to find interrupt is for which channel */ in dma_dw_axi_isr()
297 channel = find_lsb_set(status) - 1; in dma_dw_axi_isr()
298 if (channel < 0) { in dma_dw_axi_isr()
299 LOG_ERR("Spurious interrupt received channel:%u\n", channel); in dma_dw_axi_isr()
303 if (channel > (dw_dev_data->dma_ctx.dma_channels - 1)) { in dma_dw_axi_isr()
304 LOG_ERR("Interrupt received on invalid channel:%d\n", channel); in dma_dw_axi_isr()
308 /* retrieve channel specific data pointer for a channel */ in dma_dw_axi_isr()
309 chan_data = &dw_dev_data->chan[channel]; in dma_dw_axi_isr()
312 ch_status = sys_read64(reg_base + DMA_DW_AXI_CH_INTSTATUS(channel)); in dma_dw_axi_isr()
321 reg_base + DMA_DW_AXI_CH_INTCLEARREG(channel)); in dma_dw_axi_isr()
322 LOG_ERR("DMA Error: Channel:%d Channel interrupt status:0x%llx\n", in dma_dw_axi_isr()
323 channel, ch_status); in dma_dw_axi_isr()
324 ret_status = -(ch_status & DMA_DW_AXI_IRQ_ALL_ERR); in dma_dw_axi_isr()
330 reg_base + DMA_DW_AXI_CH_INTCLEARREG(channel)); in dma_dw_axi_isr()
332 if (chan_data->dma_blk_xfer_callback) { in dma_dw_axi_isr()
333 chan_data->dma_blk_xfer_callback(dev, in dma_dw_axi_isr()
334 chan_data->priv_data_blk_tfr, channel, ret_status); in dma_dw_axi_isr()
341 reg_base + DMA_DW_AXI_CH_INTCLEARREG(channel)); in dma_dw_axi_isr()
343 if (chan_data->dma_xfer_callback) { in dma_dw_axi_isr()
344 chan_data->dma_xfer_callback(dev, chan_data->priv_data_xfer, in dma_dw_axi_isr()
345 channel, ret_status); in dma_dw_axi_isr()
346 chan_data->ch_state = dma_dw_axi_get_ch_status(dev, channel); in dma_dw_axi_isr()
358 * @retval 0 on success, -ENOTSUP if the data width is not supported
366 return -ENOTSUP; in dma_dw_axi_set_data_width()
372 lli_desc->ctl |= DMA_DW_AXI_CTL_SRC_WIDTH(BITS_8); in dma_dw_axi_set_data_width()
375 /* 2-bytes transfer width */ in dma_dw_axi_set_data_width()
376 lli_desc->ctl |= DMA_DW_AXI_CTL_SRC_WIDTH(BITS_16); in dma_dw_axi_set_data_width()
379 /* 4-bytes transfer width */ in dma_dw_axi_set_data_width()
380 lli_desc->ctl |= DMA_DW_AXI_CTL_SRC_WIDTH(BITS_32); in dma_dw_axi_set_data_width()
383 /* 8-bytes transfer width */ in dma_dw_axi_set_data_width()
384 lli_desc->ctl |= DMA_DW_AXI_CTL_SRC_WIDTH(BITS_64); in dma_dw_axi_set_data_width()
387 /* 16-bytes transfer width */ in dma_dw_axi_set_data_width()
388 lli_desc->ctl |= DMA_DW_AXI_CTL_SRC_WIDTH(BITS_128); in dma_dw_axi_set_data_width()
391 /* 32-bytes transfer width */ in dma_dw_axi_set_data_width()
392 lli_desc->ctl |= DMA_DW_AXI_CTL_SRC_WIDTH(BITS_256); in dma_dw_axi_set_data_width()
395 /* 64-bytes transfer width */ in dma_dw_axi_set_data_width()
396 lli_desc->ctl |= DMA_DW_AXI_CTL_SRC_WIDTH(BITS_512); in dma_dw_axi_set_data_width()
400 return -ENOTSUP; in dma_dw_axi_set_data_width()
406 lli_desc->ctl |= DMA_DW_AXI_CTL_DST_WIDTH(BITS_8); in dma_dw_axi_set_data_width()
409 /* 2-bytes transfer width */ in dma_dw_axi_set_data_width()
410 lli_desc->ctl |= DMA_DW_AXI_CTL_DST_WIDTH(BITS_16); in dma_dw_axi_set_data_width()
413 /* 4-bytes transfer width */ in dma_dw_axi_set_data_width()
414 lli_desc->ctl |= DMA_DW_AXI_CTL_DST_WIDTH(BITS_32); in dma_dw_axi_set_data_width()
417 /* 8-bytes transfer width */ in dma_dw_axi_set_data_width()
418 lli_desc->ctl |= DMA_DW_AXI_CTL_DST_WIDTH(BITS_64); in dma_dw_axi_set_data_width()
421 /* 16-bytes transfer width */ in dma_dw_axi_set_data_width()
422 lli_desc->ctl |= DMA_DW_AXI_CTL_DST_WIDTH(BITS_128); in dma_dw_axi_set_data_width()
425 /* 32-bytes transfer width */ in dma_dw_axi_set_data_width()
426 lli_desc->ctl |= DMA_DW_AXI_CTL_DST_WIDTH(BITS_256); in dma_dw_axi_set_data_width()
429 /* 64-bytes transfer width */ in dma_dw_axi_set_data_width()
430 lli_desc->ctl |= DMA_DW_AXI_CTL_DST_WIDTH(BITS_512); in dma_dw_axi_set_data_width()
434 return -ENOTSUP; in dma_dw_axi_set_data_width()
440 static int dma_dw_axi_config(const struct device *dev, uint32_t channel, in dma_dw_axi_config() argument
453 return -ENODATA; in dma_dw_axi_config()
456 /* check if the channel is valid */ in dma_dw_axi_config()
457 if (channel > (dw_dev_data->dma_ctx.dma_channels - 1)) { in dma_dw_axi_config()
458 LOG_ERR("invalid dma channel %d", channel); in dma_dw_axi_config()
459 return -EINVAL; in dma_dw_axi_config()
462 /* return if the channel is not idle */ in dma_dw_axi_config()
463 ch_state = dma_dw_axi_get_ch_status(dev, channel); in dma_dw_axi_config()
465 LOG_ERR("DMA channel:%d is not idle(status:%d)", channel, ch_state); in dma_dw_axi_config()
466 return -EBUSY; in dma_dw_axi_config()
469 if (!cfg->block_count) { in dma_dw_axi_config()
471 return -EINVAL; in dma_dw_axi_config()
475 if (cfg->block_count > CONFIG_DMA_DW_AXI_MAX_DESC) { in dma_dw_axi_config()
476 LOG_ERR("dma:%s channel %d descriptor block count: %d larger than" in dma_dw_axi_config()
477 " max descriptors in pool: %d", dev->name, channel, in dma_dw_axi_config()
478 cfg->block_count, CONFIG_DMA_DW_AXI_MAX_DESC); in dma_dw_axi_config()
479 return -EINVAL; in dma_dw_axi_config()
482 if (cfg->source_burst_length > CONFIG_DMA_DW_AXI_MAX_BURST_TXN_LEN || in dma_dw_axi_config()
483 cfg->dest_burst_length > CONFIG_DMA_DW_AXI_MAX_BURST_TXN_LEN || in dma_dw_axi_config()
484 cfg->source_burst_length == 0 || cfg->dest_burst_length == 0) { in dma_dw_axi_config()
485 LOG_ERR("dma:%s burst length not supported", dev->name); in dma_dw_axi_config()
486 return -ENOTSUP; in dma_dw_axi_config()
489 /* get channel specific data pointer */ in dma_dw_axi_config()
490 chan_data = &dw_dev_data->chan[channel]; in dma_dw_axi_config()
492 /* check if the channel is currently idle */ in dma_dw_axi_config()
493 if (chan_data->ch_state != DMA_DW_AXI_CH_IDLE) { in dma_dw_axi_config()
494 LOG_ERR("DMA channel:%d is busy", channel); in dma_dw_axi_config()
495 return -EBUSY; in dma_dw_axi_config()
499 msize_src = DMA_DW_AXI_GET_MSIZE(cfg->source_burst_length); in dma_dw_axi_config()
500 msize_dst = DMA_DW_AXI_GET_MSIZE(cfg->dest_burst_length); in dma_dw_axi_config()
502 chan_data->cfg = 0; in dma_dw_axi_config()
503 chan_data->irq_unmask = 0; in dma_dw_axi_config()
505 chan_data->direction = cfg->channel_direction; in dma_dw_axi_config()
507 chan_data->lli_desc_base = in dma_dw_axi_config()
508 &dw_dev_data->dma_desc_pool[channel * CONFIG_DMA_DW_AXI_MAX_DESC]; in dma_dw_axi_config()
509 chan_data->lli_desc_count = cfg->block_count; in dma_dw_axi_config()
510 memset(chan_data->lli_desc_base, 0, in dma_dw_axi_config()
511 sizeof(struct dma_lli) * chan_data->lli_desc_count); in dma_dw_axi_config()
513 lli_desc = chan_data->lli_desc_base; in dma_dw_axi_config()
514 blk_cfg = cfg->head_block; in dma_dw_axi_config()
516 /* max channel priority can be MAX_CHANNEL - 1 */ in dma_dw_axi_config()
517 if (cfg->channel_priority < dw_dev_data->dma_ctx.dma_channels) { in dma_dw_axi_config()
518 chan_data->cfg |= DMA_DW_AXI_CFG_PRIORITY(cfg->channel_priority); in dma_dw_axi_config()
522 for (i = 0; i < cfg->block_count; i++) { in dma_dw_axi_config()
524 ret = dma_dw_axi_set_data_width(lli_desc, cfg->source_data_size, in dma_dw_axi_config()
525 cfg->dest_data_size); in dma_dw_axi_config()
530 lli_desc->ctl |= DMA_DW_AXI_CTL_SRC_STAT_EN | in dma_dw_axi_config()
533 lli_desc->sar = blk_cfg->source_address; in dma_dw_axi_config()
534 lli_desc->dar = blk_cfg->dest_address; in dma_dw_axi_config()
537 lli_desc->block_ts_lo = (blk_cfg->block_size / cfg->source_data_size) - 1; in dma_dw_axi_config()
538 if (lli_desc->block_ts_lo > CONFIG_DMA_DW_AXI_MAX_BLOCK_TS) { in dma_dw_axi_config()
541 return -ENOTSUP; in dma_dw_axi_config()
544 /* configuration based on channel direction */ in dma_dw_axi_config()
545 if (cfg->channel_direction == MEMORY_TO_MEMORY) { in dma_dw_axi_config()
546 chan_data->cfg |= DMA_DW_AXI_CFG_TT_FC(M2M_DMAC); in dma_dw_axi_config()
548 lli_desc->ctl |= DMA_DW_AXI_CTL_SRC_MSIZE(msize_src) | in dma_dw_axi_config()
551 } else if (cfg->channel_direction == MEMORY_TO_PERIPHERAL) { in dma_dw_axi_config()
553 chan_data->cfg |= DMA_DW_AXI_CFG_TT_FC(M2P_DMAC); in dma_dw_axi_config()
554 lli_desc->ctl |= DMA_DW_AXI_CTL_SRC_MSIZE(msize_src) | in dma_dw_axi_config()
556 WRITE_BIT(chan_data->cfg, DMA_DW_AXI_CFG_HW_HS_DST_BIT_POS, 0); in dma_dw_axi_config()
559 chan_data->cfg |= DMA_DW_AXI_CFG_DST_PER(cfg->dma_slot); in dma_dw_axi_config()
561 } else if (cfg->channel_direction == PERIPHERAL_TO_MEMORY) { in dma_dw_axi_config()
562 lli_desc->ctl |= DMA_DW_AXI_CTL_SRC_MSIZE(msize_src) | in dma_dw_axi_config()
564 chan_data->cfg |= DMA_DW_AXI_CFG_TT_FC(P2M_DMAC); in dma_dw_axi_config()
565 WRITE_BIT(chan_data->cfg, DMA_DW_AXI_CFG_HW_HS_SRC_BIT_POS, 0); in dma_dw_axi_config()
568 chan_data->cfg |= DMA_DW_AXI_CFG_SRC_PER(cfg->dma_slot); in dma_dw_axi_config()
571 LOG_ERR("%s: dma %s channel %d invalid direction %d", in dma_dw_axi_config()
572 __func__, dev->name, channel, cfg->channel_direction); in dma_dw_axi_config()
574 return -EINVAL; in dma_dw_axi_config()
578 lli_desc->llp = ((uint64_t)(lli_desc + 1)); in dma_dw_axi_config()
582 chan_data->cfg |= DMA_DW_AXI_CFG_SRC_MULTBLK_TYPE(MULTI_BLK_LLI) | in dma_dw_axi_config()
585 lli_desc->ctl |= DMA_DW_AXI_CTL_LLI_VALID; in dma_dw_axi_config()
587 if ((i + 1) == chan_data->lli_desc_count) { in dma_dw_axi_config()
588 lli_desc->ctl |= DMA_DW_AXI_CTL_LLI_LAST | DMA_DW_AXI_CTL_LLI_VALID; in dma_dw_axi_config()
589 lli_desc->llp = 0; in dma_dw_axi_config()
592 /* configure multi-block transfer as contiguous mode */ in dma_dw_axi_config()
593 chan_data->cfg |= DMA_DW_AXI_CFG_SRC_MULTBLK_TYPE(MULTI_BLK_CONTIGUOUS) | in dma_dw_axi_config()
599 blk_cfg = blk_cfg->next_block; in dma_dw_axi_config()
602 arch_dcache_flush_range((void *)chan_data->lli_desc_base, in dma_dw_axi_config()
603 sizeof(struct dma_lli) * cfg->block_count); in dma_dw_axi_config()
605 chan_data->lli_desc_current = chan_data->lli_desc_base; in dma_dw_axi_config()
607 /* enable an interrupt depending on whether the callback is requested after dma transfer in dma_dw_axi_config()
612 if (cfg->dma_callback && cfg->complete_callback_en) { in dma_dw_axi_config()
613 chan_data->dma_blk_xfer_callback = cfg->dma_callback; in dma_dw_axi_config()
614 chan_data->priv_data_blk_tfr = cfg->user_data; in dma_dw_axi_config()
616 chan_data->irq_unmask = DMA_DW_AXI_IRQ_BLOCK_TFR | DMA_DW_AXI_IRQ_DMA_TFR; in dma_dw_axi_config()
617 } else if (cfg->dma_callback && !cfg->complete_callback_en) { in dma_dw_axi_config()
618 chan_data->dma_xfer_callback = cfg->dma_callback; in dma_dw_axi_config()
619 chan_data->priv_data_xfer = cfg->user_data; in dma_dw_axi_config()
621 chan_data->irq_unmask = DMA_DW_AXI_IRQ_DMA_TFR; in dma_dw_axi_config()
623 chan_data->irq_unmask = DMA_DW_AXI_IRQ_NONE; in dma_dw_axi_config()
627 if (!cfg->error_callback_dis) { in dma_dw_axi_config()
628 chan_data->irq_unmask |= DMA_DW_AXI_IRQ_ALL_ERR; in dma_dw_axi_config()
632 chan_data->ch_state = DMA_DW_AXI_CH_PREPARED; in dma_dw_axi_config()
637 static int dma_dw_axi_start(const struct device *dev, uint32_t channel) in dma_dw_axi_start() argument
645 /* validate channel number */ in dma_dw_axi_start()
646 if (channel > (dw_dev_data->dma_ctx.dma_channels - 1)) { in dma_dw_axi_start()
647 LOG_ERR("invalid dma channel %d", channel); in dma_dw_axi_start()
648 return -EINVAL; in dma_dw_axi_start()
651 /* check whether channel is idle before initiating DMA transfer */ in dma_dw_axi_start()
652 ch_state = dma_dw_axi_get_ch_status(dev, channel); in dma_dw_axi_start()
654 LOG_ERR("DMA channel:%d is not idle", channel); in dma_dw_axi_start()
655 return -EBUSY; in dma_dw_axi_start()
658 /* get channel specific data pointer */ in dma_dw_axi_start()
659 chan_data = &dw_dev_data->chan[channel]; in dma_dw_axi_start()
661 if (chan_data->ch_state != DMA_DW_AXI_CH_PREPARED) { in dma_dw_axi_start()
663 return -EINVAL; in dma_dw_axi_start()
666 /* enable dma controller and global interrupt bit */ in dma_dw_axi_start()
669 sys_write64(chan_data->cfg, reg_base + DMA_DW_AXI_CH_CFG(channel)); in dma_dw_axi_start()
671 sys_write64(chan_data->irq_unmask, in dma_dw_axi_start()
672 reg_base + DMA_DW_AXI_CH_INTSTATUS_ENABLEREG(channel)); in dma_dw_axi_start()
673 sys_write64(chan_data->irq_unmask, in dma_dw_axi_start()
674 reg_base + DMA_DW_AXI_CH_INTSIGNAL_ENABLEREG(channel)); in dma_dw_axi_start()
676 lli_desc = chan_data->lli_desc_current; in dma_dw_axi_start()
679 sys_write64(((uint64_t)lli_desc), reg_base + DMA_DW_AXI_CH_LLP(channel)); in dma_dw_axi_start()
682 sys_write64(lli_desc->sar, reg_base + DMA_DW_AXI_CH_SAR(channel)); in dma_dw_axi_start()
683 sys_write64(lli_desc->dar, reg_base + DMA_DW_AXI_CH_DAR(channel)); in dma_dw_axi_start()
685 sys_write64(lli_desc->block_ts_lo & BLOCK_TS_MASK, in dma_dw_axi_start()
686 reg_base + DMA_DW_AXI_CH_BLOCK_TS(channel)); in dma_dw_axi_start()
689 sys_write64(lli_desc->ctl, reg_base + DMA_DW_AXI_CH_CTL(channel)); in dma_dw_axi_start()
692 /* Enable the channel which will initiate DMA transfer */ in dma_dw_axi_start()
693 sys_write64(CH_EN(channel), reg_base + DMA_DW_AXI_CHENREG); in dma_dw_axi_start()
695 chan_data->ch_state = dma_dw_axi_get_ch_status(dev, channel); in dma_dw_axi_start()
700 static int dma_dw_axi_stop(const struct device *dev, uint32_t channel) in dma_dw_axi_stop() argument
707 /* channel should be valid */ in dma_dw_axi_stop()
708 if (channel > (dw_dev_data->dma_ctx.dma_channels - 1)) { in dma_dw_axi_stop()
709 LOG_ERR("invalid dma channel %d", channel); in dma_dw_axi_stop()
710 return -EINVAL; in dma_dw_axi_stop()
713 /* return if the channel is idle as there is nothing to stop */ in dma_dw_axi_stop()
714 ch_state = dma_dw_axi_get_ch_status(dev, channel); in dma_dw_axi_stop()
716 /* channel is already idle */ in dma_dw_axi_stop()
720 /* To stop transfer or abort the channel in case of abnormal state: in dma_dw_axi_stop()
721 * 1. To disable channel, first suspend channel and drain the FIFO in dma_dw_axi_stop()
722 * 2. Disable the channel. Channel may get hung and can't be disabled in dma_dw_axi_stop()
724 * 3. If channel is not disabled, Abort the channel. Aborting channel will in dma_dw_axi_stop()
728 sys_write64(CH_SUSP(channel), reg_base + DMA_DW_AXI_CHENREG); in dma_dw_axi_stop()
730 /* Try to disable the channel */ in dma_dw_axi_stop()
731 sys_clear_bit(reg_base + DMA_DW_AXI_CHENREG, channel); in dma_dw_axi_stop()
733 is_channel_busy = WAIT_FOR((sys_read64(reg_base + DMA_DW_AXI_CHENREG)) & (BIT(channel)), in dma_dw_axi_stop()
736 LOG_WRN("No response from handshaking interface... Aborting a channel..."); in dma_dw_axi_stop()
737 sys_write64(CH_ABORT(channel), reg_base + DMA_DW_AXI_CHENREG); in dma_dw_axi_stop()
740 (BIT(channel)), CONFIG_DMA_CHANNEL_STATUS_TIMEOUT, in dma_dw_axi_stop()
743 LOG_ERR("Channel abort failed"); in dma_dw_axi_stop()
744 return -EBUSY; in dma_dw_axi_stop()
751 static int dma_dw_axi_resume(const struct device *dev, uint32_t channel) in dma_dw_axi_resume() argument
758 /* channel should be valid */ in dma_dw_axi_resume()
759 if (channel > (dw_dev_data->dma_ctx.dma_channels - 1)) { in dma_dw_axi_resume()
760 LOG_ERR("invalid dma channel %d", channel); in dma_dw_axi_resume()
761 return -EINVAL; in dma_dw_axi_resume()
764 ch_state = dma_dw_axi_get_ch_status(dev, channel); in dma_dw_axi_resume()
766 LOG_INF("channel %u is not in suspended state so cannot resume channel", channel); in dma_dw_axi_resume()
771 /* channel susp write enable bit has to be asserted */ in dma_dw_axi_resume()
772 WRITE_BIT(reg, CH_RESUME_WE(channel), 1); in dma_dw_axi_resume()
773 /* channel susp bit must be cleared to resume a channel*/ in dma_dw_axi_resume()
774 WRITE_BIT(reg, CH_RESUME(channel), 0); in dma_dw_axi_resume()
775 /* resume a channel by writing 0: ch_susp and 1: ch_susp_we */ in dma_dw_axi_resume()
781 /* suspend a dma channel */
782 static int dma_dw_axi_suspend(const struct device *dev, uint32_t channel) in dma_dw_axi_suspend() argument
789 /* channel should be valid */ in dma_dw_axi_suspend()
790 if (channel > (dw_dev_data->dma_ctx.dma_channels - 1)) { in dma_dw_axi_suspend()
791 LOG_ERR("invalid dma channel %u", channel); in dma_dw_axi_suspend()
792 return -EINVAL; in dma_dw_axi_suspend()
795 ch_state = dma_dw_axi_get_ch_status(dev, channel); in dma_dw_axi_suspend()
797 LOG_INF("nothing to suspend as dma channel %u is not busy", channel); in dma_dw_axi_suspend()
802 sys_write64(CH_SUSP(channel), reg_base + DMA_DW_AXI_CHENREG); in dma_dw_axi_suspend()
804 ret = WAIT_FOR(dma_dw_axi_get_ch_status(dev, channel) & in dma_dw_axi_suspend()
808 LOG_ERR("channel suspend failed"); in dma_dw_axi_suspend()
825 if (dw_dma_config->reset.dev != NULL) { in dma_dw_axi_init()
827 if (!device_is_ready(dw_dma_config->reset.dev)) { in dma_dw_axi_init()
829 return -ENODEV; in dma_dw_axi_init()
832 /* assert and de-assert dma controller */ in dma_dw_axi_init()
833 ret = reset_line_toggle(dw_dma_config->reset.dev, dw_dma_config->reset.id); in dma_dw_axi_init()
841 /* initialize channel state variable */ in dma_dw_axi_init()
842 for (i = 0; i < dw_dev_data->dma_ctx.dma_channels; i++) { in dma_dw_axi_init()
843 chan_data = &dw_dev_data->chan[i]; in dma_dw_axi_init()
844 /* initialize channel state */ in dma_dw_axi_init()
845 chan_data->ch_state = DMA_DW_AXI_CH_IDLE; in dma_dw_axi_init()
848 /* configure and enable interrupt lines */ in dma_dw_axi_init()
849 dw_dma_config->irq_config(); in dma_dw_axi_init()
862 /* enable irq lines */