Lines Matching +full:dma +full:- +full:channel +full:- +full:mask

4  * SPDX-License-Identifier: Apache-2.0
9 #include <zephyr/drivers/dma.h>
16 #include <zephyr/drivers/dma/dma_smartbond.h>
28 #define DMA_CHANNELS_COUNT DT_PROP(DT_NODELABEL(dma), dma_channels)
29 #define DMA_BLOCK_COUNT DT_PROP(DT_NODELABEL(dma), block_count)
40 #define DMA_CHN2REG(_idx) (&((struct channel_regs *)DMA)[(_idx)])
45 DMA->DMA_REQ_MUX_REG = \
46 (DMA->DMA_REQ_MUX_REG & ~(0xf << DMA_MUX_SHIFT((_idx)))) | \
50 ((DMA->DMA_REQ_MUX_REG >> DMA_MUX_SHIFT((_idx))) & 0xf)
58 * DMA channel priority level. The smaller the value the lower the priority granted to a channel
60 * inherent mechanism is applied in which the lower the channel number the higher the priority.
63 DMA_SMARTBOND_CHANNEL_PRIO_0 = 0x0, /* Lowest channel priority */
70 DMA_SMARTBOND_CHANNEL_PRIO_7, /* Highest channel priority */
93 * DMA bus width indicating how many bytes are retrived/written per transfer.
160 /* User callbacks and data to be stored per channel */
164 /* True if there is any DMA activity on any channel, false otheriwise. */
173 if (DMA_CTRL_REG_GET_FIELD(DMA_ON, regs->DMA_CTRL_REG)) { in dma_smartbond_is_dma_active()
198 uint32_t channel, bool status) in dma_smartbond_set_channel_status() argument
201 struct channel_regs *regs = DMA_CHN2REG(channel); in dma_smartbond_set_channel_status()
206 /* Make sure the status register for the requested channel is cleared. */ in dma_smartbond_set_channel_status()
207 DMA->DMA_CLEAR_INT_REG |= BIT(channel); in dma_smartbond_set_channel_status()
208 /* Enable interrupts for the requested channel. */ in dma_smartbond_set_channel_status()
209 DMA->DMA_INT_MASK_REG |= BIT(channel); in dma_smartbond_set_channel_status()
211 /* Check if this is the first attempt to enable DMA interrupts. */ in dma_smartbond_set_channel_status()
214 /* Prevent sleep as long as DMA operations are ongoing */ in dma_smartbond_set_channel_status()
218 DMA_CTRL_REG_SET_FIELD(DMA_ON, regs->DMA_CTRL_REG, 0x1); in dma_smartbond_set_channel_status()
220 DMA_CTRL_REG_SET_FIELD(DMA_ON, regs->DMA_CTRL_REG, 0x0); in dma_smartbond_set_channel_status()
223 * It might happen that DMA is already in progress. Make sure the current in dma_smartbond_set_channel_status()
224 * on-going transfer is complete (cannot be interrupted). in dma_smartbond_set_channel_status()
226 while (DMA_CTRL_REG_GET_FIELD(DMA_ON, regs->DMA_CTRL_REG)) { in dma_smartbond_set_channel_status()
229 /* Disable interrupts for the requested channel */ in dma_smartbond_set_channel_status()
230 DMA->DMA_INT_MASK_REG &= ~(BIT(channel)); in dma_smartbond_set_channel_status()
231 /* Clear the status register; the requested channel should be considered obsolete */ in dma_smartbond_set_channel_status()
232 DMA->DMA_CLEAR_INT_REG |= BIT(channel); in dma_smartbond_set_channel_status()
234 /* DMA interrupts should be disabled only if all channels are disabled. */ in dma_smartbond_set_channel_status()
237 /* Allow entering sleep once all DMA channels are inactive */ in dma_smartbond_set_channel_status()
245 static bool dma_channel_dst_addr_check_and_adjust(uint32_t channel, uint32_t *dst) in dma_channel_dst_addr_check_and_adjust() argument
253 secure_boot_reg = CRG_TOP->SECURE_BOOT_REG; in dma_channel_dst_addr_check_and_adjust()
261 * then only the secure channel #7 can be used to transfer data to AES key buffer. in dma_channel_dst_addr_check_and_adjust()
265 (channel != DMA_SECURE_CHANNEL))) { in dma_channel_dst_addr_check_and_adjust()
266 LOG_ERR("Keys are protected. Only secure channel #7 can be employed."); in dma_channel_dst_addr_check_and_adjust()
281 static bool dma_channel_src_addr_check_and_adjust(uint32_t channel, uint32_t *src) in dma_channel_src_addr_check_and_adjust() argument
287 /* DMA can only access physical addresses, not remapped. */ in dma_channel_src_addr_check_and_adjust()
295 phy_address += (MCU_QSPIF_M_BASE - MCU_QSPIF_M_CACHED_BASE); in dma_channel_src_addr_check_and_adjust()
298 phy_address += (MCU_OTP_M_P_BASE - MCU_OTP_M_BASE); in dma_channel_src_addr_check_and_adjust()
301 secure_boot_reg = CRG_TOP->SECURE_BOOT_REG; in dma_channel_src_addr_check_and_adjust()
309 * secure channel #7 can be used to fetch secure keys data. in dma_channel_src_addr_check_and_adjust()
313 (channel != DMA_SECURE_CHANNEL)) { in dma_channel_src_addr_check_and_adjust()
314 LOG_ERR("Keys are protected. Only secure channel #7 can be employed."); in dma_channel_src_addr_check_and_adjust()
330 /* DMA channel starts immediately */ in dma_channel_update_dreq_mode()
336 /* DMA channels starts by peripheral DMA req */ in dma_channel_update_dreq_mode()
417 uint32_t channel, uint32_t *dma_ctrl_reg) in dma_channel_update_req_sense() argument
426 /* Odd channel numbers should reflect TX path */ in dma_channel_update_req_sense()
427 if (channel & BIT(0)) { in dma_channel_update_req_sense()
436 static void dma_set_mux_request(enum dma_smartbond_trig_mux trig_mux, uint32_t channel) in dma_set_mux_request() argument
441 DMA_REQ_MUX_REG_SET(channel, trig_mux); in dma_set_mux_request()
446 * for DMA access. in dma_set_mux_request()
448 * the DMA acknowledge signal driven to the selected peripheral. Make sure in dma_set_mux_request()
450 * higher priorities (dma channels of lower indexing). It's OK if a in dma_set_mux_request()
451 * channel of higher indexing defines the same peripheral request source in dma_set_mux_request()
455 switch (channel) { in dma_set_mux_request()
462 /* fall-through */ in dma_set_mux_request()
469 /* fall-through */ in dma_set_mux_request()
485 static int dma_smartbond_config(const struct device *dev, uint32_t channel, struct dma_config *cfg) in dma_smartbond_config() argument
487 struct dma_smartbond_data *data = dev->data; in dma_smartbond_config()
492 if (channel >= DMA_CHANNELS_COUNT) { in dma_smartbond_config()
493 LOG_ERR("Inavlid DMA channel index"); in dma_smartbond_config()
494 return -EINVAL; in dma_smartbond_config()
496 regs = DMA_CHN2REG(channel); in dma_smartbond_config()
498 dma_ctrl_reg = regs->DMA_CTRL_REG; in dma_smartbond_config()
501 LOG_ERR("Requested channel is enabled. It should first be disabled"); in dma_smartbond_config()
502 return -EIO; in dma_smartbond_config()
505 if (cfg == NULL || cfg->head_block == NULL) { in dma_smartbond_config()
507 return -EINVAL; in dma_smartbond_config()
511 if (!cfg->error_callback_dis) { in dma_smartbond_config()
515 if (!cfg->complete_callback_en) { in dma_smartbond_config()
516 data->channel_data[channel].cb = cfg->dma_callback; in dma_smartbond_config()
517 data->channel_data[channel].user_data = cfg->user_data; in dma_smartbond_config()
522 data->channel_data[channel].cb = NULL; in dma_smartbond_config()
523 data->channel_data[channel].user_data = NULL; in dma_smartbond_config()
526 data->channel_data[channel].dir = cfg->channel_direction; in dma_smartbond_config()
528 if (cfg->block_count > DMA_BLOCK_COUNT) { in dma_smartbond_config()
532 if (cfg->channel_priority >= DMA_SMARTBOND_CHANNEL_PRIO_MAX) { in dma_smartbond_config()
533 cfg->channel_priority = DMA_SMARTBOND_CHANNEL_PRIO_7; in dma_smartbond_config()
534 LOG_WRN("Channel priority exceeded max. Setting to highest valid level"); in dma_smartbond_config()
537 DMA_CTRL_REG_SET_FIELD(DMA_PRIO, dma_ctrl_reg, cfg->channel_priority); in dma_smartbond_config()
539 if (((cfg->source_burst_length != cfg->dest_burst_length) || in dma_smartbond_config()
540 !dma_channel_update_burst_mode(cfg->source_burst_length, &dma_ctrl_reg))) { in dma_smartbond_config()
542 return -EINVAL; in dma_smartbond_config()
545 data->channel_data[channel].burst_len = cfg->source_burst_length; in dma_smartbond_config()
547 if (cfg->source_data_size != cfg->dest_data_size || in dma_smartbond_config()
548 !dma_channel_update_bus_width(cfg->source_data_size, &dma_ctrl_reg)) { in dma_smartbond_config()
550 return -EINVAL; in dma_smartbond_config()
553 data->channel_data[channel].bus_width = cfg->source_data_size; in dma_smartbond_config()
555 if (cfg->source_chaining_en || cfg->dest_chaining_en || in dma_smartbond_config()
556 cfg->head_block->source_gather_en || cfg->head_block->dest_scatter_en || in dma_smartbond_config()
557 cfg->head_block->source_reload_en || cfg->head_block->dest_reload_en) { in dma_smartbond_config()
561 if (!dma_channel_update_src_addr_adj(cfg->head_block->source_addr_adj, in dma_smartbond_config()
564 return -EINVAL; in dma_smartbond_config()
567 if (!dma_channel_update_dst_addr_adj(cfg->head_block->dest_addr_adj, &dma_ctrl_reg)) { in dma_smartbond_config()
569 return -EINVAL; in dma_smartbond_config()
572 if (!dma_channel_update_dreq_mode(cfg->channel_direction, &dma_ctrl_reg)) { in dma_smartbond_config()
573 LOG_ERR("Inavlid channel direction"); in dma_smartbond_config()
574 return -EINVAL; in dma_smartbond_config()
578 if (cfg->cyclic && DMA_CTRL_REG_GET_FIELD(DREQ_MODE, dma_ctrl_reg) != DREQ_MODE_HW) { in dma_smartbond_config()
579 LOG_ERR("Circular mode is only supported for non memory-memory transfers"); in dma_smartbond_config()
580 return -EINVAL; in dma_smartbond_config()
583 DMA_CTRL_REG_SET_FIELD(CIRCULAR, dma_ctrl_reg, cfg->cyclic); in dma_smartbond_config()
590 * cannot be interrupted by other DMA channels. in dma_smartbond_config()
597 dma_channel_update_req_sense(cfg->dma_slot, channel, &dma_ctrl_reg); in dma_smartbond_config()
599 regs->DMA_CTRL_REG = dma_ctrl_reg; in dma_smartbond_config()
602 src_dst_address = cfg->head_block->source_address; in dma_smartbond_config()
603 if (!dma_channel_src_addr_check_and_adjust(channel, &src_dst_address)) { in dma_smartbond_config()
604 return -EINVAL; in dma_smartbond_config()
607 if (src_dst_address % cfg->source_data_size) { in dma_smartbond_config()
609 return -EINVAL; in dma_smartbond_config()
612 regs->DMA_A_START = src_dst_address; in dma_smartbond_config()
614 src_dst_address = cfg->head_block->dest_address; in dma_smartbond_config()
615 if (!dma_channel_dst_addr_check_and_adjust(channel, &src_dst_address)) { in dma_smartbond_config()
616 return -EINVAL; in dma_smartbond_config()
619 if (src_dst_address % cfg->dest_data_size) { in dma_smartbond_config()
621 return -EINVAL; in dma_smartbond_config()
624 regs->DMA_B_START = src_dst_address; in dma_smartbond_config()
626 if (cfg->head_block->block_size % (cfg->source_data_size * cfg->source_burst_length)) { in dma_smartbond_config()
628 return -EINVAL; in dma_smartbond_config()
631 regs->DMA_LEN_REG = (cfg->head_block->block_size / cfg->source_data_size) - 1; in dma_smartbond_config()
634 regs->DMA_INT_REG = (cfg->head_block->block_size / cfg->source_data_size) - 1; in dma_smartbond_config()
636 if ((cfg->source_handshake != cfg->dest_handshake) || in dma_smartbond_config()
637 (cfg->source_handshake != 0)/*HW*/) { in dma_smartbond_config()
639 return -EINVAL; in dma_smartbond_config()
642 dma_set_mux_request(cfg->dma_slot, channel); in dma_smartbond_config()
644 /* Designate that channel has been configured */ in dma_smartbond_config()
645 data->channel_data[channel].is_dma_configured = true; in dma_smartbond_config()
651 static int dma_smartbond_reload(const struct device *dev, uint32_t channel, uint32_t src, in dma_smartbond_reload() argument
654 struct dma_smartbond_data *data = dev->data; in dma_smartbond_reload()
657 if (channel >= DMA_CHANNELS_COUNT) { in dma_smartbond_reload()
658 LOG_ERR("Inavlid DMA channel index"); in dma_smartbond_reload()
659 return -EINVAL; in dma_smartbond_reload()
661 regs = DMA_CHN2REG(channel); in dma_smartbond_reload()
663 if (!data->channel_data[channel].is_dma_configured) { in dma_smartbond_reload()
664 LOG_ERR("Requested DMA channel should first be configured"); in dma_smartbond_reload()
665 return -EINVAL; in dma_smartbond_reload()
670 return -EINVAL; in dma_smartbond_reload()
673 if (DMA_CTRL_REG_GET_FIELD(DMA_ON, regs->DMA_CTRL_REG)) { in dma_smartbond_reload()
674 LOG_ERR("Channel is busy, settings cannot be changed mid-transfer"); in dma_smartbond_reload()
675 return -EBUSY; in dma_smartbond_reload()
678 if (src % data->channel_data[channel].bus_width) { in dma_smartbond_reload()
680 return -EINVAL; in dma_smartbond_reload()
683 if (!dma_channel_src_addr_check_and_adjust(channel, &src)) { in dma_smartbond_reload()
684 return -EINVAL; in dma_smartbond_reload()
687 regs->DMA_A_START = src; in dma_smartbond_reload()
689 if (dst % data->channel_data[channel].bus_width) { in dma_smartbond_reload()
691 return -EINVAL; in dma_smartbond_reload()
694 if (!dma_channel_dst_addr_check_and_adjust(channel, &dst)) { in dma_smartbond_reload()
695 return -EINVAL; in dma_smartbond_reload()
698 regs->DMA_B_START = dst; in dma_smartbond_reload()
700 if (size % (data->channel_data[channel].burst_len * in dma_smartbond_reload()
701 data->channel_data[channel].bus_width)) { in dma_smartbond_reload()
703 return -EINVAL; in dma_smartbond_reload()
706 regs->DMA_LEN_REG = (size / data->channel_data[channel].bus_width) - 1; in dma_smartbond_reload()
709 regs->DMA_INT_REG = (size / data->channel_data[channel].bus_width) - 1; in dma_smartbond_reload()
714 static int dma_smartbond_start(const struct device *dev, uint32_t channel) in dma_smartbond_start() argument
717 struct dma_smartbond_data *data = dev->data; in dma_smartbond_start()
719 if (channel >= DMA_CHANNELS_COUNT) { in dma_smartbond_start()
720 LOG_ERR("Inavlid DMA channel index"); in dma_smartbond_start()
721 return -EINVAL; in dma_smartbond_start()
723 regs = DMA_CHN2REG(channel); in dma_smartbond_start()
725 if (!data->channel_data[channel].is_dma_configured) { in dma_smartbond_start()
726 LOG_ERR("Requested DMA channel should first be configured"); in dma_smartbond_start()
727 return -EINVAL; in dma_smartbond_start()
730 /* Should return succss if the requested channel is already started. */ in dma_smartbond_start()
731 if (DMA_CTRL_REG_GET_FIELD(DMA_ON, regs->DMA_CTRL_REG)) { in dma_smartbond_start()
735 dma_smartbond_set_channel_status(dev, channel, true); in dma_smartbond_start()
740 static int dma_smartbond_stop(const struct device *dev, uint32_t channel) in dma_smartbond_stop() argument
744 if (channel >= DMA_CHANNELS_COUNT) { in dma_smartbond_stop()
745 LOG_ERR("Inavlid DMA channel index"); in dma_smartbond_stop()
746 return -EINVAL; in dma_smartbond_stop()
748 regs = DMA_CHN2REG(channel); in dma_smartbond_stop()
752 * the corresponding register mask and disable NVIC if there is no other in dma_smartbond_stop()
753 * channel in use. in dma_smartbond_stop()
755 dma_smartbond_set_channel_status(dev, channel, false); in dma_smartbond_stop()
760 static int dma_smartbond_suspend(const struct device *dev, uint32_t channel) in dma_smartbond_suspend() argument
762 if (channel >= DMA_CHANNELS_COUNT) { in dma_smartbond_suspend()
763 LOG_ERR("Inavlid DMA channel index"); in dma_smartbond_suspend()
764 return -EINVAL; in dma_smartbond_suspend()
768 * Freezing the DMA engine is valid for memory-to-memory operations. in dma_smartbond_suspend()
771 LOG_WRN("DMA is freezed globally"); in dma_smartbond_suspend()
774 * Freezing the DMA engine can be done universally and not per channel!. in dma_smartbond_suspend()
775 * An attempt to disable the channel would result in resetting the IDX in dma_smartbond_suspend()
776 * register next time the channel was re-enabled. in dma_smartbond_suspend()
778 GPREG->SET_FREEZE_REG = GPREG_SET_FREEZE_REG_FRZ_DMA_Msk; in dma_smartbond_suspend()
783 static int dma_smartbond_resume(const struct device *dev, uint32_t channel) in dma_smartbond_resume() argument
785 if (channel >= DMA_CHANNELS_COUNT) { in dma_smartbond_resume()
786 LOG_ERR("Inavlid DMA channel index"); in dma_smartbond_resume()
787 return -EINVAL; in dma_smartbond_resume()
790 LOG_WRN("DMA is unfreezed globally"); in dma_smartbond_resume()
792 /* Unfreezing the DMA engine can be done unviversally and not per channel! */ in dma_smartbond_resume()
793 GPREG->RESET_FREEZE_REG = GPREG_RESET_FREEZE_REG_FRZ_DMA_Msk; in dma_smartbond_resume()
798 static int dma_smartbond_get_status(const struct device *dev, uint32_t channel, in dma_smartbond_get_status() argument
803 struct dma_smartbond_data *data = dev->data; in dma_smartbond_get_status()
807 if (channel >= DMA_CHANNELS_COUNT) { in dma_smartbond_get_status()
808 LOG_ERR("Inavlid DMA channel index"); in dma_smartbond_get_status()
809 return -EINVAL; in dma_smartbond_get_status()
816 if (!data->channel_data[channel].is_dma_configured) { in dma_smartbond_get_status()
817 LOG_ERR("Requested DMA channel should first be configured"); in dma_smartbond_get_status()
818 return -EINVAL; in dma_smartbond_get_status()
821 regs = DMA_CHN2REG(channel); in dma_smartbond_get_status()
824 * The DMA is running in parallel with CPU and so it might happen that an on-going transfer in dma_smartbond_get_status()
827 * the channel registers values. in dma_smartbond_get_status()
831 dma_ctrl_reg = regs->DMA_CTRL_REG; in dma_smartbond_get_status()
832 dma_idx_reg = regs->DMA_IDX_REG; in dma_smartbond_get_status()
833 dma_len_reg = regs->DMA_LEN_REG; in dma_smartbond_get_status()
844 stat->total_copied = dma_idx_reg * bus_width; in dma_smartbond_get_status()
845 stat->pending_length = (dma_len_reg - dma_idx_reg) * bus_width; in dma_smartbond_get_status()
846 stat->busy = DMA_CTRL_REG_GET_FIELD(DMA_ON, dma_ctrl_reg); in dma_smartbond_get_status()
847 stat->dir = data->channel_data[channel].dir; in dma_smartbond_get_status()
849 /* DMA does not support circular buffer functionality */ in dma_smartbond_get_status()
850 stat->free = 0; in dma_smartbond_get_status()
851 stat->read_position = 0; in dma_smartbond_get_status()
852 stat->write_position = 0; in dma_smartbond_get_status()
863 return -EINVAL; in dma_smartbond_get_attribute()
868 * Source and destination addresses should be multiple of a channel's bus width. in dma_smartbond_get_attribute()
870 * channel could be requested. in dma_smartbond_get_attribute()
875 * Buffer size should be multiple of a channel's bus width multiplied by burst length. in dma_smartbond_get_attribute()
876 * This info could be provided at runtime given that attributes of a specific channel in dma_smartbond_get_attribute()
880 return -ENOSYS; in dma_smartbond_get_attribute()
885 return -EINVAL; in dma_smartbond_get_attribute()
889 static bool dma_smartbond_chan_filter(const struct device *dev, int channel, void *filter_param) in dma_smartbond_chan_filter() argument
893 if (channel >= DMA_CHANNELS_COUNT) { in dma_smartbond_chan_filter()
894 LOG_ERR("Inavlid DMA channel index"); in dma_smartbond_chan_filter()
895 return -EINVAL; in dma_smartbond_chan_filter()
898 /* If user does not provide any channel request explicitly, return true. */ in dma_smartbond_chan_filter()
905 if (channel == requested_channel) { in dma_smartbond_chan_filter()
912 static DEVICE_API(dma, dma_smartbond_driver_api) = {
929 struct dma_smartbond_data *data = ((const struct device *)arg)->data; in smartbond_dma_isr()
932 * A single interrupt line is generated for all channels and so each channel in smartbond_dma_isr()
935 for (i = 0, dma_int_status_reg = DMA->DMA_INT_STATUS_REG; in smartbond_dma_isr()
937 /* Check if the selected channel has raised the interrupt line */ in smartbond_dma_isr()
946 if (data->channel_data[i].cb) { in smartbond_dma_isr()
947 data->channel_data[i].cb((const struct device *)arg, in smartbond_dma_isr()
948 data->channel_data[i].user_data, i, DMA_STATUS_COMPLETE); in smartbond_dma_isr()
950 /* Channel line should be cleared otherwise ISR will keep firing! */ in smartbond_dma_isr()
951 DMA->DMA_CLEAR_INT_REG = BIT(i); in smartbond_dma_isr()
959 struct dma_smartbond_data *data = dev->data; in dma_smartbond_is_sleep_allowed()
961 for (int i = 0; i < data->dma_ctx.dma_channels; i++) { in dma_smartbond_is_sleep_allowed()
962 if (atomic_test_bit(data->dma_ctx.atomic, i)) { in dma_smartbond_is_sleep_allowed()
963 /* Abort sleeping if at least one dma channel is acquired */ in dma_smartbond_is_sleep_allowed()
979 * When we reach this point there should be no ongoing DMA transfers. in dma_smartbond_pm_action()
980 * However, a DMA channel can still be acquired and so the configured in dma_smartbond_pm_action()
981 * channel(s) should be retained. To avoid reconfiguring DMA or in dma_smartbond_pm_action()
982 * read/write DMA channels' registers we assume that sleep is not allowed in dma_smartbond_pm_action()
983 * as long as all DMA channels are released. in dma_smartbond_pm_action()
986 ret = -EBUSY; in dma_smartbond_pm_action()
989 * No need to perform any actions here as the DMA engine in dma_smartbond_pm_action()
995 * No need to perform any actions here as the DMA engine in dma_smartbond_pm_action()
1000 return -ENOTSUP; in dma_smartbond_pm_action()
1010 LOG_ERR("64-bit addressing mode is not supported\n"); in dma_smartbond_init()
1011 return -ENOSYS; in dma_smartbond_init()
1017 data = dev->data; in dma_smartbond_init()
1018 data->dma_ctx.magic = DMA_MAGIC; in dma_smartbond_init()
1019 data->dma_ctx.dma_channels = DMA_CHANNELS_COUNT; in dma_smartbond_init()
1020 data->dma_ctx.atomic = data->channels_atomic; in dma_smartbond_init()
1025 data->channel_data[idx].is_dma_configured = false; in dma_smartbond_init()