Lines Matching +full:count +full:- +full:width

4  * SPDX-License-Identifier: Apache-2.0
17 #define DEV_CFG(_dev) ((const struct dma_dw_axi_dev_cfg *)(_dev)->config)
18 #define DEV_DATA(_dev) ((struct dma_dw_axi_dev_data *const)(_dev)->data)
29 #define DMA_DW_AXI_GET_MSIZE(blen) ((blen == 1) ? (0U) : (find_msb_set(blen) - 2U))
91 /* bitfield configuration for multi-block transfer */
130 /* source transfer width */
132 /* destination transfer width */
171 /* source and destination transfer width */
194 /* type of multi-block transfer */
297 channel = find_lsb_set(status) - 1; in dma_dw_axi_isr()
303 if (channel > (dw_dev_data->dma_ctx.dma_channels - 1)) { in dma_dw_axi_isr()
309 chan_data = &dw_dev_data->chan[channel]; in dma_dw_axi_isr()
324 ret_status = -(ch_status & DMA_DW_AXI_IRQ_ALL_ERR); in dma_dw_axi_isr()
332 if (chan_data->dma_blk_xfer_callback) { in dma_dw_axi_isr()
333 chan_data->dma_blk_xfer_callback(dev, in dma_dw_axi_isr()
334 chan_data->priv_data_blk_tfr, channel, ret_status); in dma_dw_axi_isr()
343 if (chan_data->dma_xfer_callback) { in dma_dw_axi_isr()
344 chan_data->dma_xfer_callback(dev, chan_data->priv_data_xfer, in dma_dw_axi_isr()
346 chan_data->ch_state = dma_dw_axi_get_ch_status(dev, channel); in dma_dw_axi_isr()
352 * @brief set data source and destination data width
355 * @param src_data_width source data width
356 * @param dest_data_width destination data width
358 * @retval 0 on success, -ENOTSUP if the data width is not supported
365 LOG_ERR("transfer width more than %u not supported", CONFIG_DMA_DW_AXI_DATA_WIDTH); in dma_dw_axi_set_data_width()
366 return -ENOTSUP; in dma_dw_axi_set_data_width()
372 lli_desc->ctl |= DMA_DW_AXI_CTL_SRC_WIDTH(BITS_8); in dma_dw_axi_set_data_width()
375 /* 2-bytes transfer width */ in dma_dw_axi_set_data_width()
376 lli_desc->ctl |= DMA_DW_AXI_CTL_SRC_WIDTH(BITS_16); in dma_dw_axi_set_data_width()
379 /* 4-bytes transfer width */ in dma_dw_axi_set_data_width()
380 lli_desc->ctl |= DMA_DW_AXI_CTL_SRC_WIDTH(BITS_32); in dma_dw_axi_set_data_width()
383 /* 8-bytes transfer width */ in dma_dw_axi_set_data_width()
384 lli_desc->ctl |= DMA_DW_AXI_CTL_SRC_WIDTH(BITS_64); in dma_dw_axi_set_data_width()
387 /* 16-bytes transfer width */ in dma_dw_axi_set_data_width()
388 lli_desc->ctl |= DMA_DW_AXI_CTL_SRC_WIDTH(BITS_128); in dma_dw_axi_set_data_width()
391 /* 32-bytes transfer width */ in dma_dw_axi_set_data_width()
392 lli_desc->ctl |= DMA_DW_AXI_CTL_SRC_WIDTH(BITS_256); in dma_dw_axi_set_data_width()
395 /* 64-bytes transfer width */ in dma_dw_axi_set_data_width()
396 lli_desc->ctl |= DMA_DW_AXI_CTL_SRC_WIDTH(BITS_512); in dma_dw_axi_set_data_width()
399 LOG_ERR("Source transfer width not supported"); in dma_dw_axi_set_data_width()
400 return -ENOTSUP; in dma_dw_axi_set_data_width()
406 lli_desc->ctl |= DMA_DW_AXI_CTL_DST_WIDTH(BITS_8); in dma_dw_axi_set_data_width()
409 /* 2-bytes transfer width */ in dma_dw_axi_set_data_width()
410 lli_desc->ctl |= DMA_DW_AXI_CTL_DST_WIDTH(BITS_16); in dma_dw_axi_set_data_width()
413 /* 4-bytes transfer width */ in dma_dw_axi_set_data_width()
414 lli_desc->ctl |= DMA_DW_AXI_CTL_DST_WIDTH(BITS_32); in dma_dw_axi_set_data_width()
417 /* 8-bytes transfer width */ in dma_dw_axi_set_data_width()
418 lli_desc->ctl |= DMA_DW_AXI_CTL_DST_WIDTH(BITS_64); in dma_dw_axi_set_data_width()
421 /* 16-bytes transfer width */ in dma_dw_axi_set_data_width()
422 lli_desc->ctl |= DMA_DW_AXI_CTL_DST_WIDTH(BITS_128); in dma_dw_axi_set_data_width()
425 /* 32-bytes transfer width */ in dma_dw_axi_set_data_width()
426 lli_desc->ctl |= DMA_DW_AXI_CTL_DST_WIDTH(BITS_256); in dma_dw_axi_set_data_width()
429 /* 64-bytes transfer width */ in dma_dw_axi_set_data_width()
430 lli_desc->ctl |= DMA_DW_AXI_CTL_DST_WIDTH(BITS_512); in dma_dw_axi_set_data_width()
433 LOG_ERR("Destination transfer width not supported"); in dma_dw_axi_set_data_width()
434 return -ENOTSUP; in dma_dw_axi_set_data_width()
453 return -ENODATA; in dma_dw_axi_config()
457 if (channel > (dw_dev_data->dma_ctx.dma_channels - 1)) { in dma_dw_axi_config()
459 return -EINVAL; in dma_dw_axi_config()
466 return -EBUSY; in dma_dw_axi_config()
469 if (!cfg->block_count) { in dma_dw_axi_config()
471 return -EINVAL; in dma_dw_axi_config()
475 if (cfg->block_count > CONFIG_DMA_DW_AXI_MAX_DESC) { in dma_dw_axi_config()
476 LOG_ERR("dma:%s channel %d descriptor block count: %d larger than" in dma_dw_axi_config()
477 " max descriptors in pool: %d", dev->name, channel, in dma_dw_axi_config()
478 cfg->block_count, CONFIG_DMA_DW_AXI_MAX_DESC); in dma_dw_axi_config()
479 return -EINVAL; in dma_dw_axi_config()
482 if (cfg->source_burst_length > CONFIG_DMA_DW_AXI_MAX_BURST_TXN_LEN || in dma_dw_axi_config()
483 cfg->dest_burst_length > CONFIG_DMA_DW_AXI_MAX_BURST_TXN_LEN || in dma_dw_axi_config()
484 cfg->source_burst_length == 0 || cfg->dest_burst_length == 0) { in dma_dw_axi_config()
485 LOG_ERR("dma:%s burst length not supported", dev->name); in dma_dw_axi_config()
486 return -ENOTSUP; in dma_dw_axi_config()
490 chan_data = &dw_dev_data->chan[channel]; in dma_dw_axi_config()
493 if (chan_data->ch_state != DMA_DW_AXI_CH_IDLE) { in dma_dw_axi_config()
495 return -EBUSY; in dma_dw_axi_config()
499 msize_src = DMA_DW_AXI_GET_MSIZE(cfg->source_burst_length); in dma_dw_axi_config()
500 msize_dst = DMA_DW_AXI_GET_MSIZE(cfg->dest_burst_length); in dma_dw_axi_config()
502 chan_data->cfg = 0; in dma_dw_axi_config()
503 chan_data->irq_unmask = 0; in dma_dw_axi_config()
505 chan_data->direction = cfg->channel_direction; in dma_dw_axi_config()
507 chan_data->lli_desc_base = in dma_dw_axi_config()
508 &dw_dev_data->dma_desc_pool[channel * CONFIG_DMA_DW_AXI_MAX_DESC]; in dma_dw_axi_config()
509 chan_data->lli_desc_count = cfg->block_count; in dma_dw_axi_config()
510 memset(chan_data->lli_desc_base, 0, in dma_dw_axi_config()
511 sizeof(struct dma_lli) * chan_data->lli_desc_count); in dma_dw_axi_config()
513 lli_desc = chan_data->lli_desc_base; in dma_dw_axi_config()
514 blk_cfg = cfg->head_block; in dma_dw_axi_config()
516 /* max channel priority can be MAX_CHANNEL - 1 */ in dma_dw_axi_config()
517 if (cfg->channel_priority < dw_dev_data->dma_ctx.dma_channels) { in dma_dw_axi_config()
518 chan_data->cfg |= DMA_DW_AXI_CFG_PRIORITY(cfg->channel_priority); in dma_dw_axi_config()
522 for (i = 0; i < cfg->block_count; i++) { in dma_dw_axi_config()
524 ret = dma_dw_axi_set_data_width(lli_desc, cfg->source_data_size, in dma_dw_axi_config()
525 cfg->dest_data_size); in dma_dw_axi_config()
530 lli_desc->ctl |= DMA_DW_AXI_CTL_SRC_STAT_EN | in dma_dw_axi_config()
533 lli_desc->sar = blk_cfg->source_address; in dma_dw_axi_config()
534 lli_desc->dar = blk_cfg->dest_address; in dma_dw_axi_config()
537 lli_desc->block_ts_lo = (blk_cfg->block_size / cfg->source_data_size) - 1; in dma_dw_axi_config()
538 if (lli_desc->block_ts_lo > CONFIG_DMA_DW_AXI_MAX_BLOCK_TS) { in dma_dw_axi_config()
541 return -ENOTSUP; in dma_dw_axi_config()
545 if (cfg->channel_direction == MEMORY_TO_MEMORY) { in dma_dw_axi_config()
546 chan_data->cfg |= DMA_DW_AXI_CFG_TT_FC(M2M_DMAC); in dma_dw_axi_config()
548 lli_desc->ctl |= DMA_DW_AXI_CTL_SRC_MSIZE(msize_src) | in dma_dw_axi_config()
551 } else if (cfg->channel_direction == MEMORY_TO_PERIPHERAL) { in dma_dw_axi_config()
553 chan_data->cfg |= DMA_DW_AXI_CFG_TT_FC(M2P_DMAC); in dma_dw_axi_config()
554 lli_desc->ctl |= DMA_DW_AXI_CTL_SRC_MSIZE(msize_src) | in dma_dw_axi_config()
556 WRITE_BIT(chan_data->cfg, DMA_DW_AXI_CFG_HW_HS_DST_BIT_POS, 0); in dma_dw_axi_config()
559 chan_data->cfg |= DMA_DW_AXI_CFG_DST_PER(cfg->dma_slot); in dma_dw_axi_config()
561 } else if (cfg->channel_direction == PERIPHERAL_TO_MEMORY) { in dma_dw_axi_config()
562 lli_desc->ctl |= DMA_DW_AXI_CTL_SRC_MSIZE(msize_src) | in dma_dw_axi_config()
564 chan_data->cfg |= DMA_DW_AXI_CFG_TT_FC(P2M_DMAC); in dma_dw_axi_config()
565 WRITE_BIT(chan_data->cfg, DMA_DW_AXI_CFG_HW_HS_SRC_BIT_POS, 0); in dma_dw_axi_config()
568 chan_data->cfg |= DMA_DW_AXI_CFG_SRC_PER(cfg->dma_slot); in dma_dw_axi_config()
572 __func__, dev->name, channel, cfg->channel_direction); in dma_dw_axi_config()
574 return -EINVAL; in dma_dw_axi_config()
578 lli_desc->llp = ((uint64_t)(lli_desc + 1)); in dma_dw_axi_config()
582 chan_data->cfg |= DMA_DW_AXI_CFG_SRC_MULTBLK_TYPE(MULTI_BLK_LLI) | in dma_dw_axi_config()
585 lli_desc->ctl |= DMA_DW_AXI_CTL_LLI_VALID; in dma_dw_axi_config()
587 if ((i + 1) == chan_data->lli_desc_count) { in dma_dw_axi_config()
588 lli_desc->ctl |= DMA_DW_AXI_CTL_LLI_LAST | DMA_DW_AXI_CTL_LLI_VALID; in dma_dw_axi_config()
589 lli_desc->llp = 0; in dma_dw_axi_config()
592 /* configure multi-block transfer as contiguous mode */ in dma_dw_axi_config()
593 chan_data->cfg |= DMA_DW_AXI_CFG_SRC_MULTBLK_TYPE(MULTI_BLK_CONTIGUOUS) | in dma_dw_axi_config()
599 blk_cfg = blk_cfg->next_block; in dma_dw_axi_config()
602 arch_dcache_flush_range((void *)chan_data->lli_desc_base, in dma_dw_axi_config()
603 sizeof(struct dma_lli) * cfg->block_count); in dma_dw_axi_config()
605 chan_data->lli_desc_current = chan_data->lli_desc_base; in dma_dw_axi_config()
612 if (cfg->dma_callback && cfg->complete_callback_en) { in dma_dw_axi_config()
613 chan_data->dma_blk_xfer_callback = cfg->dma_callback; in dma_dw_axi_config()
614 chan_data->priv_data_blk_tfr = cfg->user_data; in dma_dw_axi_config()
616 chan_data->irq_unmask = DMA_DW_AXI_IRQ_BLOCK_TFR | DMA_DW_AXI_IRQ_DMA_TFR; in dma_dw_axi_config()
617 } else if (cfg->dma_callback && !cfg->complete_callback_en) { in dma_dw_axi_config()
618 chan_data->dma_xfer_callback = cfg->dma_callback; in dma_dw_axi_config()
619 chan_data->priv_data_xfer = cfg->user_data; in dma_dw_axi_config()
621 chan_data->irq_unmask = DMA_DW_AXI_IRQ_DMA_TFR; in dma_dw_axi_config()
623 chan_data->irq_unmask = DMA_DW_AXI_IRQ_NONE; in dma_dw_axi_config()
627 if (!cfg->error_callback_dis) { in dma_dw_axi_config()
628 chan_data->irq_unmask |= DMA_DW_AXI_IRQ_ALL_ERR; in dma_dw_axi_config()
632 chan_data->ch_state = DMA_DW_AXI_CH_PREPARED; in dma_dw_axi_config()
646 if (channel > (dw_dev_data->dma_ctx.dma_channels - 1)) { in dma_dw_axi_start()
648 return -EINVAL; in dma_dw_axi_start()
655 return -EBUSY; in dma_dw_axi_start()
659 chan_data = &dw_dev_data->chan[channel]; in dma_dw_axi_start()
661 if (chan_data->ch_state != DMA_DW_AXI_CH_PREPARED) { in dma_dw_axi_start()
663 return -EINVAL; in dma_dw_axi_start()
669 sys_write64(chan_data->cfg, reg_base + DMA_DW_AXI_CH_CFG(channel)); in dma_dw_axi_start()
671 sys_write64(chan_data->irq_unmask, in dma_dw_axi_start()
673 sys_write64(chan_data->irq_unmask, in dma_dw_axi_start()
676 lli_desc = chan_data->lli_desc_current; in dma_dw_axi_start()
682 sys_write64(lli_desc->sar, reg_base + DMA_DW_AXI_CH_SAR(channel)); in dma_dw_axi_start()
683 sys_write64(lli_desc->dar, reg_base + DMA_DW_AXI_CH_DAR(channel)); in dma_dw_axi_start()
685 sys_write64(lli_desc->block_ts_lo & BLOCK_TS_MASK, in dma_dw_axi_start()
689 sys_write64(lli_desc->ctl, reg_base + DMA_DW_AXI_CH_CTL(channel)); in dma_dw_axi_start()
695 chan_data->ch_state = dma_dw_axi_get_ch_status(dev, channel); in dma_dw_axi_start()
708 if (channel > (dw_dev_data->dma_ctx.dma_channels - 1)) { in dma_dw_axi_stop()
710 return -EINVAL; in dma_dw_axi_stop()
744 return -EBUSY; in dma_dw_axi_stop()
759 if (channel > (dw_dev_data->dma_ctx.dma_channels - 1)) { in dma_dw_axi_resume()
761 return -EINVAL; in dma_dw_axi_resume()
790 if (channel > (dw_dev_data->dma_ctx.dma_channels - 1)) { in dma_dw_axi_suspend()
792 return -EINVAL; in dma_dw_axi_suspend()
825 if (dw_dma_config->reset.dev != NULL) { in dma_dw_axi_init()
827 if (!device_is_ready(dw_dma_config->reset.dev)) { in dma_dw_axi_init()
829 return -ENODEV; in dma_dw_axi_init()
832 /* assert and de-assert dma controller */ in dma_dw_axi_init()
833 ret = reset_line_toggle(dw_dma_config->reset.dev, dw_dma_config->reset.id); in dma_dw_axi_init()
842 for (i = 0; i < dw_dev_data->dma_ctx.dma_channels; i++) { in dma_dw_axi_init()
843 chan_data = &dw_dev_data->chan[i]; in dma_dw_axi_init()
845 chan_data->ch_state = DMA_DW_AXI_CH_IDLE; in dma_dw_axi_init()
849 dw_dma_config->irq_config(); in dma_dw_axi_init()