Lines Matching +full:tx +full:- +full:dma +full:- +full:channel

2  *@brief Driver for Xilinx AXI DMA.
7 * SPDX-License-Identifier: Apache-2.0
11 #include <zephyr/drivers/dma.h>
40 /* internal DMA error, e.g., 0-length transfer */
54 /* masks for DMA registers */
59 /* interrupt timeout - trigger interrupt after X cycles when no transfer. Unit is 125 * */
62 /* irqthreshold - this can be used to generate interrupts after X completed packets */
76 /* DMA ignores completed bit in SG descriptor and overwrites descriptors */
78 /* use AXI fixed burst instead of incrementing burst for TX transfers, e.g., useful for reading a */
84 /* run-stop */
107 /* DMA decode error */
109 /* DMA slave error */
111 /* DMA internal error */
116 /* DMA channel is idle, i.e., DMA operations completed; writing tail restarts operation */
118 /* RS (run-stop) in DMACR is 0 and operations completed; writing tail does nothing */
147 /* in-memory descriptor, read by the DMA, that instructs it how many bits to transfer from which */
150 /* next descriptor[31:6], bits 5-0 reserved */
166 /* application-specific fields used, e.g., to enable checksum offloading */
181 /* DMA control register */
184 /* DMA status register */
213 /* global configuration per DMA device */
216 /* this should always be 2 - one for TX, one for RX */
229 /* number of this channel's IRQ */
231 /* ISR that normally handles the channel's interrupts */
235 /* per-channel state */
261 /* global state for device and array of per-channel states */
291 /* TX is 0, RX is 1 */ in dma_xilinx_axi_dma_lock_irq()
292 ret = irq_is_enabled(cfg->irq0_channels[0]) ? 1 : 0; in dma_xilinx_axi_dma_lock_irq()
293 ret |= (irq_is_enabled(cfg->irq0_channels[1]) ? 1 : 0) << 1; in dma_xilinx_axi_dma_lock_irq()
295 LOG_DBG("DMA IRQ state: %x TX IRQN: %" PRIu32 " RX IRQN: %" PRIu32, ret, in dma_xilinx_axi_dma_lock_irq()
296 cfg->irq0_channels[0], cfg->irq0_channels[1]); in dma_xilinx_axi_dma_lock_irq()
298 irq_disable(cfg->irq0_channels[0]); in dma_xilinx_axi_dma_lock_irq()
299 irq_disable(cfg->irq0_channels[1]); in dma_xilinx_axi_dma_lock_irq()
310 /* TX was enabled */ in dma_xilinx_axi_dma_unlock_irq()
311 irq_enable(cfg->irq0_channels[0]); in dma_xilinx_axi_dma_unlock_irq()
315 irq_enable(cfg->irq0_channels[1]); in dma_xilinx_axi_dma_unlock_irq()
324 ret = irq_is_enabled(cfg->irq0_channels[channel_num]); in dma_xilinx_axi_dma_lock_irq()
326 LOG_DBG("DMA IRQ state: %x ", ret); in dma_xilinx_axi_dma_lock_irq()
328 irq_disable(cfg->irq0_channels[channel_num]); in dma_xilinx_axi_dma_lock_irq()
338 irq_enable(cfg->irq0_channels[channel_num]); in dma_xilinx_axi_dma_unlock_irq()
357 const struct dma_xilinx_axi_dma_data *data = dev->data; in dma_xilinx_axi_dma_last_received_frame_length()
359 return data->channels[XILINX_AXI_DMA_RX_CHANNEL_NUM].last_rx_size; in dma_xilinx_axi_dma_last_received_frame_length()
363 #pragma GCC diagnostic ignored "-Waddress-of-packed-member"
368 /* this overwrites the DMA control register */ in dma_xilinx_axi_dma_acknowledge_interrupt()
370 uint32_t dmacr = dma_xilinx_axi_dma_read_reg(&channel_data->channel_regs->dmacr); in dma_xilinx_axi_dma_acknowledge_interrupt()
372 dma_xilinx_axi_dma_write_reg(&channel_data->channel_regs->dmacr, dmacr); in dma_xilinx_axi_dma_acknowledge_interrupt()
377 #pragma GCC diagnostic ignored "-Waddress-of-packed-member"
385 if (dma_xilinx_axi_dma_read_reg(&channel_data->channel_regs->dmasr) & in dma_xilinx_axi_dma_channel_has_error()
387 LOG_ERR("DMA has internal error, DMASR = %" PRIx32, in dma_xilinx_axi_dma_channel_has_error()
388 dma_xilinx_axi_dma_read_reg(&channel_data->channel_regs->dmasr)); in dma_xilinx_axi_dma_channel_has_error()
392 if (dma_xilinx_axi_dma_read_reg(&channel_data->channel_regs->dmasr) & in dma_xilinx_axi_dma_channel_has_error()
394 LOG_ERR("DMA has slave error, DMASR = %" PRIx32, in dma_xilinx_axi_dma_channel_has_error()
395 dma_xilinx_axi_dma_read_reg(&channel_data->channel_regs->dmasr)); in dma_xilinx_axi_dma_channel_has_error()
399 if (dma_xilinx_axi_dma_read_reg(&channel_data->channel_regs->dmasr) & in dma_xilinx_axi_dma_channel_has_error()
401 LOG_ERR("DMA has decode error, DMASR = %" PRIx32, in dma_xilinx_axi_dma_channel_has_error()
402 dma_xilinx_axi_dma_read_reg(&channel_data->channel_regs->dmasr)); in dma_xilinx_axi_dma_channel_has_error()
406 if (dma_xilinx_axi_dma_read_reg(&channel_data->channel_regs->dmasr) & in dma_xilinx_axi_dma_channel_has_error()
408 LOG_ERR("DMA has SG internal error, DMASR = %" PRIx32, in dma_xilinx_axi_dma_channel_has_error()
409 dma_xilinx_axi_dma_read_reg(&channel_data->channel_regs->dmasr)); in dma_xilinx_axi_dma_channel_has_error()
413 if (dma_xilinx_axi_dma_read_reg(&channel_data->channel_regs->dmasr) & in dma_xilinx_axi_dma_channel_has_error()
415 LOG_ERR("DMA has SG slave error, DMASR = %" PRIx32, in dma_xilinx_axi_dma_channel_has_error()
416 dma_xilinx_axi_dma_read_reg(&channel_data->channel_regs->dmasr)); in dma_xilinx_axi_dma_channel_has_error()
420 if (dma_xilinx_axi_dma_read_reg(&channel_data->channel_regs->dmasr) & in dma_xilinx_axi_dma_channel_has_error()
422 LOG_ERR("DMA has SG decode error, DMASR = %" PRIx32, in dma_xilinx_axi_dma_channel_has_error()
423 dma_xilinx_axi_dma_read_reg(&channel_data->channel_regs->dmasr)); in dma_xilinx_axi_dma_channel_has_error()
427 if (descriptor->status & XILINX_AXI_DMA_SG_DESCRIPTOR_STATUS_DEC_ERR_MASK) { in dma_xilinx_axi_dma_channel_has_error()
428 LOG_ERR("Descriptor has SG decode error, status=%" PRIx32, descriptor->status); in dma_xilinx_axi_dma_channel_has_error()
432 if (descriptor->status & XILINX_AXI_DMA_SG_DESCRIPTOR_STATUS_SLV_ERR_MASK) { in dma_xilinx_axi_dma_channel_has_error()
433 LOG_ERR("Descriptor has SG slave error, status=%" PRIx32, descriptor->status); in dma_xilinx_axi_dma_channel_has_error()
437 if (descriptor->status & XILINX_AXI_DMA_SG_DESCRIPTOR_STATUS_INT_ERR_MASK) { in dma_xilinx_axi_dma_channel_has_error()
438 LOG_ERR("Descriptor has SG internal error, status=%" PRIx32, descriptor->status); in dma_xilinx_axi_dma_channel_has_error()
452 &channel_data->descriptors[channel_data->current_transfer_end_index]; in dma_xilinx_axi_dma_clean_up_sg_descriptors()
455 while (current_descriptor->status & XILINX_AXI_DMA_SG_DESCRIPTOR_STATUS_COMPLETE_MASK || in dma_xilinx_axi_dma_clean_up_sg_descriptors()
456 current_descriptor->status & ~XILINX_AXI_DMA_SG_DESCRIPTOR_STATUS_TRANSFERRED_MASK) { in dma_xilinx_axi_dma_clean_up_sg_descriptors()
457 /* descriptor completed or errored out - need to call callback */ in dma_xilinx_axi_dma_clean_up_sg_descriptors()
460 /* this is meaningless / ignored for TX channel */ in dma_xilinx_axi_dma_clean_up_sg_descriptors()
461 channel_data->last_rx_size = current_descriptor->status & in dma_xilinx_axi_dma_clean_up_sg_descriptors()
465 LOG_ERR("Channel / descriptor error on %s chan!", chan_name); in dma_xilinx_axi_dma_clean_up_sg_descriptors()
466 retval = -EFAULT; in dma_xilinx_axi_dma_clean_up_sg_descriptors()
469 if (channel_data->check_csum_in_isr) { in dma_xilinx_axi_dma_clean_up_sg_descriptors()
470 uint32_t checksum_status = current_descriptor->app2; in dma_xilinx_axi_dma_clean_up_sg_descriptors()
475 retval = -EFAULT; in dma_xilinx_axi_dma_clean_up_sg_descriptors()
482 retval = -EFAULT; in dma_xilinx_axi_dma_clean_up_sg_descriptors()
489 retval = -EFAULT; in dma_xilinx_axi_dma_clean_up_sg_descriptors()
496 retval = -EFAULT; in dma_xilinx_axi_dma_clean_up_sg_descriptors()
500 /* as we do not have per-skb flags for checksum status */ in dma_xilinx_axi_dma_clean_up_sg_descriptors()
503 /* clears the flags such that the DMA does not transfer it twice or errors */ in dma_xilinx_axi_dma_clean_up_sg_descriptors()
504 current_descriptor->control = current_descriptor->status = 0; in dma_xilinx_axi_dma_clean_up_sg_descriptors()
508 channel_data->current_transfer_end_index++; in dma_xilinx_axi_dma_clean_up_sg_descriptors()
509 if (channel_data->current_transfer_end_index >= channel_data->num_descriptors) { in dma_xilinx_axi_dma_clean_up_sg_descriptors()
510 channel_data->current_transfer_end_index = 0; in dma_xilinx_axi_dma_clean_up_sg_descriptors()
513 if (channel_data->completion_callback) { in dma_xilinx_axi_dma_clean_up_sg_descriptors()
514 LOG_DBG("Received packet with %u bytes!", channel_data->last_rx_size); in dma_xilinx_axi_dma_clean_up_sg_descriptors()
515 channel_data->completion_callback( in dma_xilinx_axi_dma_clean_up_sg_descriptors()
516 dev, channel_data->completion_callback_user_data, in dma_xilinx_axi_dma_clean_up_sg_descriptors()
521 &channel_data->descriptors[channel_data->current_transfer_end_index]; in dma_xilinx_axi_dma_clean_up_sg_descriptors()
526 #pragma GCC diagnostic ignored "-Waddress-of-packed-member" in dma_xilinx_axi_dma_clean_up_sg_descriptors()
529 dma_xilinx_axi_dma_write_reg(&channel_data->channel_regs->dmasr, 0xffffffff); in dma_xilinx_axi_dma_clean_up_sg_descriptors()
540 struct dma_xilinx_axi_dma_data *data = dev->data; in dma_xilinx_axi_dma_tx_isr()
542 &data->channels[XILINX_AXI_DMA_TX_CHANNEL_NUM]; in dma_xilinx_axi_dma_tx_isr()
547 processed_packets = dma_xilinx_axi_dma_clean_up_sg_descriptors(dev, channel_data, "TX"); in dma_xilinx_axi_dma_tx_isr()
558 struct dma_xilinx_axi_dma_data *data = dev->data; in dma_xilinx_axi_dma_rx_isr()
560 &data->channels[XILINX_AXI_DMA_RX_CHANNEL_NUM]; in dma_xilinx_axi_dma_rx_isr()
569 LOG_DBG("Cleaned up %u TX packets in this ISR!\n", processed_packets); in dma_xilinx_axi_dma_rx_isr()
580 static int dma_xilinx_axi_dma_start(const struct device *dev, uint32_t channel) in dma_xilinx_axi_dma_start() argument
582 const struct dma_xilinx_axi_dma_config *cfg = dev->config; in dma_xilinx_axi_dma_start()
583 struct dma_xilinx_axi_dma_data *data = dev->data; in dma_xilinx_axi_dma_start()
584 struct dma_xilinx_axi_dma_channel *channel_data = &data->channels[channel]; in dma_xilinx_axi_dma_start()
592 const int irq_key = dma_xilinx_axi_dma_lock_irq(cfg, channel); in dma_xilinx_axi_dma_start()
594 if (channel >= cfg->channels) { in dma_xilinx_axi_dma_start()
595 LOG_ERR("Invalid channel %" PRIu32 " - must be < %" PRIu32 "!", channel, in dma_xilinx_axi_dma_start()
596 cfg->channels); in dma_xilinx_axi_dma_start()
597 dma_xilinx_axi_dma_unlock_irq(cfg, channel, irq_key); in dma_xilinx_axi_dma_start()
598 return -EINVAL; in dma_xilinx_axi_dma_start()
601 tail_descriptor = channel_data->current_transfer_start_index++; in dma_xilinx_axi_dma_start()
603 if (channel_data->current_transfer_start_index >= channel_data->num_descriptors) { in dma_xilinx_axi_dma_start()
605 channel == XILINX_AXI_DMA_TX_CHANNEL_NUM ? "TX" : "RX"); in dma_xilinx_axi_dma_start()
606 channel_data->current_transfer_start_index = 0; in dma_xilinx_axi_dma_start()
610 current_descriptor = &channel_data->descriptors[tail_descriptor]; in dma_xilinx_axi_dma_start()
612 &channel_data->descriptors[channel_data->current_transfer_end_index]; in dma_xilinx_axi_dma_start()
614 LOG_DBG("Starting DMA on %s channel with tail ptr %zu start ptr %zu", in dma_xilinx_axi_dma_start()
615 channel == XILINX_AXI_DMA_TX_CHANNEL_NUM ? "TX" : "RX", tail_descriptor, in dma_xilinx_axi_dma_start()
616 channel_data->current_transfer_end_index); in dma_xilinx_axi_dma_start()
619 #pragma GCC diagnostic ignored "-Waddress-of-packed-member" in dma_xilinx_axi_dma_start()
620 if (dma_xilinx_axi_dma_read_reg(&channel_data->channel_regs->dmasr) & in dma_xilinx_axi_dma_start()
625 LOG_DBG("AXI DMA is halted - restart operation!"); in dma_xilinx_axi_dma_start()
629 &channel_data->channel_regs->curdesc, in dma_xilinx_axi_dma_start()
632 &channel_data->channel_regs->curdesc_msb, in dma_xilinx_axi_dma_start()
635 dma_xilinx_axi_dma_write_reg(&channel_data->channel_regs->curdesc, in dma_xilinx_axi_dma_start()
651 /* for Eth DMA, this should never be used */ in dma_xilinx_axi_dma_start()
653 /* no cyclic mode - we use completed bit to control which */ in dma_xilinx_axi_dma_start()
673 #pragma GCC diagnostic ignored "-Waddress-of-packed-member" in dma_xilinx_axi_dma_start()
674 dma_xilinx_axi_dma_write_reg(&channel_data->channel_regs->dmacr, new_control); in dma_xilinx_axi_dma_start()
680 dma_xilinx_axi_dma_write_reg(&channel_data->channel_regs->taildesc, in dma_xilinx_axi_dma_start()
682 dma_xilinx_axi_dma_write_reg(&channel_data->channel_regs->taildesc_msb, in dma_xilinx_axi_dma_start()
685 dma_xilinx_axi_dma_write_reg(&channel_data->channel_regs->taildesc, in dma_xilinx_axi_dma_start()
692 dma_xilinx_axi_dma_unlock_irq(cfg, channel, irq_key); in dma_xilinx_axi_dma_start()
700 static int dma_xilinx_axi_dma_stop(const struct device *dev, uint32_t channel) in dma_xilinx_axi_dma_stop() argument
702 const struct dma_xilinx_axi_dma_config *cfg = dev->config; in dma_xilinx_axi_dma_stop()
703 struct dma_xilinx_axi_dma_data *data = dev->data; in dma_xilinx_axi_dma_stop()
704 struct dma_xilinx_axi_dma_channel *channel_data = &data->channels[channel]; in dma_xilinx_axi_dma_stop()
708 if (channel >= cfg->channels) { in dma_xilinx_axi_dma_stop()
709 LOG_ERR("Invalid channel %" PRIu32 " - must be < %" PRIu32 "!", channel, in dma_xilinx_axi_dma_stop()
710 cfg->channels); in dma_xilinx_axi_dma_stop()
711 return -EINVAL; in dma_xilinx_axi_dma_stop()
714 k_timer_stop(&channel_data->polling_timer); in dma_xilinx_axi_dma_stop()
716 new_control = channel_data->channel_regs->dmacr; in dma_xilinx_axi_dma_stop()
717 /* RS = 0 --> DMA will complete ongoing transactions and then go into hold */ in dma_xilinx_axi_dma_stop()
721 #pragma GCC diagnostic ignored "-Waddress-of-packed-member" in dma_xilinx_axi_dma_stop()
722 dma_xilinx_axi_dma_write_reg(&channel_data->channel_regs->dmacr, new_control); in dma_xilinx_axi_dma_stop()
731 static int dma_xilinx_axi_dma_get_status(const struct device *dev, uint32_t channel, in dma_xilinx_axi_dma_get_status() argument
734 const struct dma_xilinx_axi_dma_config *cfg = dev->config; in dma_xilinx_axi_dma_get_status()
735 struct dma_xilinx_axi_dma_data *data = dev->data; in dma_xilinx_axi_dma_get_status()
736 struct dma_xilinx_axi_dma_channel *channel_data = &data->channels[channel]; in dma_xilinx_axi_dma_get_status()
738 if (channel >= cfg->channels) { in dma_xilinx_axi_dma_get_status()
739 LOG_ERR("Invalid channel %" PRIu32 " - must be < %" PRIu32 "!", channel, in dma_xilinx_axi_dma_get_status()
740 cfg->channels); in dma_xilinx_axi_dma_get_status()
741 return -EINVAL; in dma_xilinx_axi_dma_get_status()
747 #pragma GCC diagnostic ignored "-Waddress-of-packed-member" in dma_xilinx_axi_dma_get_status()
748 stat->busy = !(dma_xilinx_axi_dma_read_reg(&channel_data->channel_regs->dmasr) & in dma_xilinx_axi_dma_get_status()
750 !(dma_xilinx_axi_dma_read_reg(&channel_data->channel_regs->dmasr) & in dma_xilinx_axi_dma_get_status()
753 stat->dir = channel_data->last_transfer_direction; in dma_xilinx_axi_dma_get_status()
755 /* FIXME fill hardware-specific fields */ in dma_xilinx_axi_dma_get_status()
760 * Transfers a single buffer through the DMA
765 uint32_t channel, in dma_xilinx_axi_dma_transfer_block() argument
773 const int irq_key = dma_xilinx_axi_dma_lock_irq(cfg, channel); in dma_xilinx_axi_dma_transfer_block()
775 current_descriptor = &channel_data->descriptors[channel_data->current_transfer_start_index]; in dma_xilinx_axi_dma_transfer_block()
780 current_descriptor->buffer_address = (uint32_t)buffer_addr & 0xffffffff; in dma_xilinx_axi_dma_transfer_block()
781 current_descriptor->buffer_address_msb = (uint32_t)(buffer_addr >> 32); in dma_xilinx_axi_dma_transfer_block()
783 current_descriptor->buffer_address = buffer_addr; in dma_xilinx_axi_dma_transfer_block()
785 current_descriptor->app0 = channel_data->sg_desc_app0; in dma_xilinx_axi_dma_transfer_block()
791 dma_xilinx_axi_dma_unlock_irq(cfg, channel, irq_key); in dma_xilinx_axi_dma_transfer_block()
793 return -EINVAL; in dma_xilinx_axi_dma_transfer_block()
796 current_descriptor->control = (uint32_t)block_size; in dma_xilinx_axi_dma_transfer_block()
799 current_descriptor->control = in dma_xilinx_axi_dma_transfer_block()
800 current_descriptor->control | XILINX_AXI_DMA_SG_DESCRIPTOR_CTRL_SOF_MASK; in dma_xilinx_axi_dma_transfer_block()
803 current_descriptor->control = in dma_xilinx_axi_dma_transfer_block()
804 current_descriptor->control | XILINX_AXI_DMA_SG_DESCRIPTOR_CTRL_EOF_MASK; in dma_xilinx_axi_dma_transfer_block()
812 dma_xilinx_axi_dma_unlock_irq(cfg, channel, irq_key); in dma_xilinx_axi_dma_transfer_block()
818 static inline int dma_xilinx_axi_dma_config_reload(const struct device *dev, uint32_t channel, in dma_xilinx_axi_dma_config_reload() argument
821 static inline int dma_xilinx_axi_dma_config_reload(const struct device *dev, uint32_t channel, in dma_xilinx_axi_dma_config_reload()
825 const struct dma_xilinx_axi_dma_config *cfg = dev->config; in dma_xilinx_axi_dma_config_reload()
826 struct dma_xilinx_axi_dma_data *data = dev->data; in dma_xilinx_axi_dma_config_reload()
827 struct dma_xilinx_axi_dma_channel *channel_data = &data->channels[channel]; in dma_xilinx_axi_dma_config_reload()
829 if (channel >= cfg->channels) { in dma_xilinx_axi_dma_config_reload()
830 LOG_ERR("Invalid channel %" PRIu32 " - must be < %" PRIu32 "!", channel, in dma_xilinx_axi_dma_config_reload()
831 cfg->channels); in dma_xilinx_axi_dma_config_reload()
832 return -EINVAL; in dma_xilinx_axi_dma_config_reload()
834 /* one-block-at-a-time transfer */ in dma_xilinx_axi_dma_config_reload()
836 cfg, channel, channel_data, channel == XILINX_AXI_DMA_TX_CHANNEL_NUM ? src : dst, in dma_xilinx_axi_dma_config_reload()
841 /* as interrupts are level-sensitive, this can happen on certain platforms */
844 struct dma_xilinx_axi_dma_channel *channel = in polling_timer_handler() local
846 const struct device *dev = channel->polling_timer_params.dev; in polling_timer_handler()
847 const unsigned int irq_number = channel->polling_timer_params.irq_number; in polling_timer_handler()
854 channel->polling_timer_params.isr(dev); in polling_timer_handler()
861 static int dma_xilinx_axi_dma_configure(const struct device *dev, uint32_t channel, in dma_xilinx_axi_dma_configure() argument
864 const struct dma_xilinx_axi_dma_config *cfg = dev->config; in dma_xilinx_axi_dma_configure()
865 struct dma_xilinx_axi_dma_data *data = dev->data; in dma_xilinx_axi_dma_configure()
866 struct dma_block_config *current_block = dma_cfg->head_block; in dma_xilinx_axi_dma_configure()
871 (struct dma_xilinx_axi_dma_register_space *)cfg->reg; in dma_xilinx_axi_dma_configure()
873 if (channel >= cfg->channels) { in dma_xilinx_axi_dma_configure()
874 LOG_ERR("Invalid channel %" PRIu32 " - must be < %" PRIu32 "!", channel, in dma_xilinx_axi_dma_configure()
875 cfg->channels); in dma_xilinx_axi_dma_configure()
876 return -EINVAL; in dma_xilinx_axi_dma_configure()
879 if (cfg->channels != XILINX_AXI_DMA_NUM_CHANNELS) { in dma_xilinx_axi_dma_configure()
881 ") - Xilinx AXI DMA must have %" PRIu32 " channels!", in dma_xilinx_axi_dma_configure()
882 cfg->channels, XILINX_AXI_DMA_NUM_CHANNELS); in dma_xilinx_axi_dma_configure()
883 return -EINVAL; in dma_xilinx_axi_dma_configure()
886 if (dma_cfg->head_block->source_addr_adj == DMA_ADDR_ADJ_DECREMENT) { in dma_xilinx_axi_dma_configure()
887 LOG_ERR("Xilinx AXI DMA only supports incrementing addresses!"); in dma_xilinx_axi_dma_configure()
888 return -ENOTSUP; in dma_xilinx_axi_dma_configure()
891 if (dma_cfg->head_block->dest_addr_adj == DMA_ADDR_ADJ_DECREMENT) { in dma_xilinx_axi_dma_configure()
892 LOG_ERR("Xilinx AXI DMA only supports incrementing addresses!"); in dma_xilinx_axi_dma_configure()
893 return -ENOTSUP; in dma_xilinx_axi_dma_configure()
896 if (dma_cfg->head_block->source_addr_adj != DMA_ADDR_ADJ_INCREMENT && in dma_xilinx_axi_dma_configure()
897 dma_cfg->head_block->source_addr_adj != DMA_ADDR_ADJ_NO_CHANGE) { in dma_xilinx_axi_dma_configure()
898 LOG_ERR("invalid source_addr_adj %" PRIu16, dma_cfg->head_block->source_addr_adj); in dma_xilinx_axi_dma_configure()
899 return -ENOTSUP; in dma_xilinx_axi_dma_configure()
901 if (dma_cfg->head_block->dest_addr_adj != DMA_ADDR_ADJ_INCREMENT && in dma_xilinx_axi_dma_configure()
902 dma_cfg->head_block->dest_addr_adj != DMA_ADDR_ADJ_NO_CHANGE) { in dma_xilinx_axi_dma_configure()
903 LOG_ERR("invalid dest_addr_adj %" PRIu16, dma_cfg->head_block->dest_addr_adj); in dma_xilinx_axi_dma_configure()
904 return -ENOTSUP; in dma_xilinx_axi_dma_configure()
907 if (channel == XILINX_AXI_DMA_TX_CHANNEL_NUM && in dma_xilinx_axi_dma_configure()
908 dma_cfg->channel_direction != MEMORY_TO_PERIPHERAL) { in dma_xilinx_axi_dma_configure()
909 LOG_ERR("TX channel must be used with MEMORY_TO_PERIPHERAL!"); in dma_xilinx_axi_dma_configure()
910 return -ENOTSUP; in dma_xilinx_axi_dma_configure()
913 if (channel == XILINX_AXI_DMA_RX_CHANNEL_NUM && in dma_xilinx_axi_dma_configure()
914 dma_cfg->channel_direction != PERIPHERAL_TO_MEMORY) { in dma_xilinx_axi_dma_configure()
915 LOG_ERR("RX channel must be used with PERIPHERAL_TO_MEMORY!"); in dma_xilinx_axi_dma_configure()
916 return -ENOTSUP; in dma_xilinx_axi_dma_configure()
919 k_timer_init(&data->channels[channel].polling_timer, polling_timer_handler, NULL); in dma_xilinx_axi_dma_configure()
921 data->channels[channel].polling_timer_params.dev = dev; in dma_xilinx_axi_dma_configure()
922 data->channels[channel].polling_timer_params.irq_number = cfg->irq0_channels[channel]; in dma_xilinx_axi_dma_configure()
923 data->channels[channel].polling_timer_params.isr = in dma_xilinx_axi_dma_configure()
924 (channel == XILINX_AXI_DMA_TX_CHANNEL_NUM) ? dma_xilinx_axi_dma_tx_isr in dma_xilinx_axi_dma_configure()
927 data->channels[channel].last_transfer_direction = dma_cfg->channel_direction; in dma_xilinx_axi_dma_configure()
931 if (channel == XILINX_AXI_DMA_TX_CHANNEL_NUM) { in dma_xilinx_axi_dma_configure()
932 data->channels[channel].descriptors = descriptors_tx; in dma_xilinx_axi_dma_configure()
933 data->channels[channel].num_descriptors = ARRAY_SIZE(descriptors_tx); in dma_xilinx_axi_dma_configure()
935 data->channels[channel].channel_regs = &regs->mm2s_registers; in dma_xilinx_axi_dma_configure()
937 data->channels[channel].descriptors = descriptors_rx; in dma_xilinx_axi_dma_configure()
938 data->channels[channel].num_descriptors = ARRAY_SIZE(descriptors_rx); in dma_xilinx_axi_dma_configure()
940 data->channels[channel].channel_regs = &regs->s2mm_registers; in dma_xilinx_axi_dma_configure()
943 LOG_DBG("Resetting DMA channel!"); in dma_xilinx_axi_dma_configure()
945 if (!data->device_has_been_reset) { in dma_xilinx_axi_dma_configure()
946 LOG_INF("Soft-resetting the DMA core!"); in dma_xilinx_axi_dma_configure()
948 #pragma GCC diagnostic ignored "-Waddress-of-packed-member" in dma_xilinx_axi_dma_configure()
949 /* this resets BOTH RX and TX channels, although it is triggered in per-channel in dma_xilinx_axi_dma_configure()
952 dma_xilinx_axi_dma_write_reg(&data->channels[channel].channel_regs->dmacr, in dma_xilinx_axi_dma_configure()
955 data->device_has_been_reset = true; in dma_xilinx_axi_dma_configure()
958 LOG_DBG("Configuring %zu DMA descriptors for %s", data->channels[channel].num_descriptors, in dma_xilinx_axi_dma_configure()
959 channel == XILINX_AXI_DMA_TX_CHANNEL_NUM ? "TX" : "RX"); in dma_xilinx_axi_dma_configure()
961 /* only configures fields whos default is not 0, as descriptors are in zero-initialized */ in dma_xilinx_axi_dma_configure()
963 data->channels[channel].current_transfer_start_index = in dma_xilinx_axi_dma_configure()
964 data->channels[channel].current_transfer_end_index = 0; in dma_xilinx_axi_dma_configure()
965 for (int i = 0; i < data->channels[channel].num_descriptors; i++) { in dma_xilinx_axi_dma_configure()
971 if (i + 1 < data->channels[channel].num_descriptors) { in dma_xilinx_axi_dma_configure()
972 nextdesc = (uintptr_t)&data->channels[channel].descriptors[i + 1]; in dma_xilinx_axi_dma_configure()
974 nextdesc = (uintptr_t)&data->channels[channel].descriptors[0]; in dma_xilinx_axi_dma_configure()
976 /* SG descriptors have 64-byte alignment requirements */ in dma_xilinx_axi_dma_configure()
980 "SG descriptor address %p (offset %u) was not aligned to 64-byte boundary!", in dma_xilinx_axi_dma_configure()
984 data->channels[channel].descriptors[i].nxtdesc = low_bytes; in dma_xilinx_axi_dma_configure()
988 data->channels[channel].descriptors[i].nxtdesc_msb = high_bytes; in dma_xilinx_axi_dma_configure()
994 data->channels[channel].check_csum_in_isr = false; in dma_xilinx_axi_dma_configure()
996 /* the DMA passes the app fields through to the AXIStream-connected device */ in dma_xilinx_axi_dma_configure()
999 switch (dma_cfg->linked_channel) { in dma_xilinx_axi_dma_configure()
1001 if (channel == XILINX_AXI_DMA_TX_CHANNEL_NUM) { in dma_xilinx_axi_dma_configure()
1002 /* for the TX channel, we need to indicate that we would like to use */ in dma_xilinx_axi_dma_configure()
1004 data->channels[channel].sg_desc_app0 = in dma_xilinx_axi_dma_configure()
1007 /* for the RX channel, the Ethernet core will indicate to us that it has */ in dma_xilinx_axi_dma_configure()
1010 data->channels[channel].check_csum_in_isr = true; in dma_xilinx_axi_dma_configure()
1014 data->channels[channel].sg_desc_app0 = in dma_xilinx_axi_dma_configure()
1018 LOG_ERR("Linked channel invalid! Valid values: %u for full ethernt checksum " in dma_xilinx_axi_dma_configure()
1022 return -EINVAL; in dma_xilinx_axi_dma_configure()
1025 data->channels[channel].completion_callback = dma_cfg->dma_callback; in dma_xilinx_axi_dma_configure()
1026 data->channels[channel].completion_callback_user_data = dma_cfg->user_data; in dma_xilinx_axi_dma_configure()
1028 LOG_INF("Completed configuration of AXI DMA - Starting transfer!"); in dma_xilinx_axi_dma_configure()
1032 dma_xilinx_axi_dma_transfer_block(cfg, channel, &data->channels[channel], in dma_xilinx_axi_dma_configure()
1033 channel == XILINX_AXI_DMA_TX_CHANNEL_NUM in dma_xilinx_axi_dma_configure()
1034 ? current_block->source_address in dma_xilinx_axi_dma_configure()
1035 : current_block->dest_address, in dma_xilinx_axi_dma_configure()
1036 current_block->block_size, block_count == 0, in dma_xilinx_axi_dma_configure()
1037 current_block->next_block == NULL); in dma_xilinx_axi_dma_configure()
1039 } while ((current_block = current_block->next_block) && ret == 0); in dma_xilinx_axi_dma_configure()
1041 k_timer_start(&data->channels[channel].polling_timer, in dma_xilinx_axi_dma_configure()
1048 static bool dma_xilinx_axi_dma_chan_filter(const struct device *dev, int channel, in dma_xilinx_axi_dma_chan_filter() argument
1053 if (strcmp(filter_str, "tx") == 0) { in dma_xilinx_axi_dma_chan_filter()
1054 return channel == XILINX_AXI_DMA_TX_CHANNEL_NUM; in dma_xilinx_axi_dma_chan_filter()
1057 return channel == XILINX_AXI_DMA_RX_CHANNEL_NUM; in dma_xilinx_axi_dma_chan_filter()
1063 /* DMA API callbacks */
1064 static DEVICE_API(dma, dma_xilinx_axi_dma_driver_api) = {
1077 const struct dma_xilinx_axi_dma_config *cfg = dev->config; in dma_xilinx_axi_dma_init()
1079 cfg->irq_configure(); in dma_xilinx_axi_dma_init()
1083 /* first IRQ is TX */
1125 /* two different compatibles match the very same Xilinx AXI DMA, */