Lines Matching refs:channel

580 static int dma_xilinx_axi_dma_start(const struct device *dev, uint32_t channel)  in dma_xilinx_axi_dma_start()  argument
584 struct dma_xilinx_axi_dma_channel *channel_data = &data->channels[channel]; in dma_xilinx_axi_dma_start()
592 const int irq_key = dma_xilinx_axi_dma_lock_irq(cfg, channel); in dma_xilinx_axi_dma_start()
594 if (channel >= cfg->channels) { in dma_xilinx_axi_dma_start()
595 LOG_ERR("Invalid channel %" PRIu32 " - must be < %" PRIu32 "!", channel, in dma_xilinx_axi_dma_start()
597 dma_xilinx_axi_dma_unlock_irq(cfg, channel, irq_key); in dma_xilinx_axi_dma_start()
605 channel == XILINX_AXI_DMA_TX_CHANNEL_NUM ? "TX" : "RX"); in dma_xilinx_axi_dma_start()
615 channel == XILINX_AXI_DMA_TX_CHANNEL_NUM ? "TX" : "RX", tail_descriptor, in dma_xilinx_axi_dma_start()
692 dma_xilinx_axi_dma_unlock_irq(cfg, channel, irq_key); in dma_xilinx_axi_dma_start()
700 static int dma_xilinx_axi_dma_stop(const struct device *dev, uint32_t channel) in dma_xilinx_axi_dma_stop() argument
704 struct dma_xilinx_axi_dma_channel *channel_data = &data->channels[channel]; in dma_xilinx_axi_dma_stop()
708 if (channel >= cfg->channels) { in dma_xilinx_axi_dma_stop()
709 LOG_ERR("Invalid channel %" PRIu32 " - must be < %" PRIu32 "!", channel, in dma_xilinx_axi_dma_stop()
731 static int dma_xilinx_axi_dma_get_status(const struct device *dev, uint32_t channel, in dma_xilinx_axi_dma_get_status() argument
736 struct dma_xilinx_axi_dma_channel *channel_data = &data->channels[channel]; in dma_xilinx_axi_dma_get_status()
738 if (channel >= cfg->channels) { in dma_xilinx_axi_dma_get_status()
739 LOG_ERR("Invalid channel %" PRIu32 " - must be < %" PRIu32 "!", channel, in dma_xilinx_axi_dma_get_status()
765 uint32_t channel, in dma_xilinx_axi_dma_transfer_block() argument
773 const int irq_key = dma_xilinx_axi_dma_lock_irq(cfg, channel); in dma_xilinx_axi_dma_transfer_block()
791 dma_xilinx_axi_dma_unlock_irq(cfg, channel, irq_key); in dma_xilinx_axi_dma_transfer_block()
812 dma_xilinx_axi_dma_unlock_irq(cfg, channel, irq_key); in dma_xilinx_axi_dma_transfer_block()
818 static inline int dma_xilinx_axi_dma_config_reload(const struct device *dev, uint32_t channel, in dma_xilinx_axi_dma_config_reload() argument
821 static inline int dma_xilinx_axi_dma_config_reload(const struct device *dev, uint32_t channel, in dma_xilinx_axi_dma_config_reload()
827 struct dma_xilinx_axi_dma_channel *channel_data = &data->channels[channel]; in dma_xilinx_axi_dma_config_reload()
829 if (channel >= cfg->channels) { in dma_xilinx_axi_dma_config_reload()
830 LOG_ERR("Invalid channel %" PRIu32 " - must be < %" PRIu32 "!", channel, in dma_xilinx_axi_dma_config_reload()
836 cfg, channel, channel_data, channel == XILINX_AXI_DMA_TX_CHANNEL_NUM ? src : dst, in dma_xilinx_axi_dma_config_reload()
844 struct dma_xilinx_axi_dma_channel *channel = in polling_timer_handler() local
846 const struct device *dev = channel->polling_timer_params.dev; in polling_timer_handler()
847 const unsigned int irq_number = channel->polling_timer_params.irq_number; in polling_timer_handler()
854 channel->polling_timer_params.isr(dev); in polling_timer_handler()
861 static int dma_xilinx_axi_dma_configure(const struct device *dev, uint32_t channel, in dma_xilinx_axi_dma_configure() argument
873 if (channel >= cfg->channels) { in dma_xilinx_axi_dma_configure()
874 LOG_ERR("Invalid channel %" PRIu32 " - must be < %" PRIu32 "!", channel, in dma_xilinx_axi_dma_configure()
907 if (channel == XILINX_AXI_DMA_TX_CHANNEL_NUM && in dma_xilinx_axi_dma_configure()
913 if (channel == XILINX_AXI_DMA_RX_CHANNEL_NUM && in dma_xilinx_axi_dma_configure()
919 k_timer_init(&data->channels[channel].polling_timer, polling_timer_handler, NULL); in dma_xilinx_axi_dma_configure()
921 data->channels[channel].polling_timer_params.dev = dev; in dma_xilinx_axi_dma_configure()
922 data->channels[channel].polling_timer_params.irq_number = cfg->irq0_channels[channel]; in dma_xilinx_axi_dma_configure()
923 data->channels[channel].polling_timer_params.isr = in dma_xilinx_axi_dma_configure()
924 (channel == XILINX_AXI_DMA_TX_CHANNEL_NUM) ? dma_xilinx_axi_dma_tx_isr in dma_xilinx_axi_dma_configure()
927 data->channels[channel].last_transfer_direction = dma_cfg->channel_direction; in dma_xilinx_axi_dma_configure()
931 if (channel == XILINX_AXI_DMA_TX_CHANNEL_NUM) { in dma_xilinx_axi_dma_configure()
932 data->channels[channel].descriptors = descriptors_tx; in dma_xilinx_axi_dma_configure()
933 data->channels[channel].num_descriptors = ARRAY_SIZE(descriptors_tx); in dma_xilinx_axi_dma_configure()
935 data->channels[channel].channel_regs = &regs->mm2s_registers; in dma_xilinx_axi_dma_configure()
937 data->channels[channel].descriptors = descriptors_rx; in dma_xilinx_axi_dma_configure()
938 data->channels[channel].num_descriptors = ARRAY_SIZE(descriptors_rx); in dma_xilinx_axi_dma_configure()
940 data->channels[channel].channel_regs = &regs->s2mm_registers; in dma_xilinx_axi_dma_configure()
952 dma_xilinx_axi_dma_write_reg(&data->channels[channel].channel_regs->dmacr, in dma_xilinx_axi_dma_configure()
958 LOG_DBG("Configuring %zu DMA descriptors for %s", data->channels[channel].num_descriptors, in dma_xilinx_axi_dma_configure()
959 channel == XILINX_AXI_DMA_TX_CHANNEL_NUM ? "TX" : "RX"); in dma_xilinx_axi_dma_configure()
963 data->channels[channel].current_transfer_start_index = in dma_xilinx_axi_dma_configure()
964 data->channels[channel].current_transfer_end_index = 0; in dma_xilinx_axi_dma_configure()
965 for (int i = 0; i < data->channels[channel].num_descriptors; i++) { in dma_xilinx_axi_dma_configure()
971 if (i + 1 < data->channels[channel].num_descriptors) { in dma_xilinx_axi_dma_configure()
972 nextdesc = (uintptr_t)&data->channels[channel].descriptors[i + 1]; in dma_xilinx_axi_dma_configure()
974 nextdesc = (uintptr_t)&data->channels[channel].descriptors[0]; in dma_xilinx_axi_dma_configure()
984 data->channels[channel].descriptors[i].nxtdesc = low_bytes; in dma_xilinx_axi_dma_configure()
988 data->channels[channel].descriptors[i].nxtdesc_msb = high_bytes; in dma_xilinx_axi_dma_configure()
994 data->channels[channel].check_csum_in_isr = false; in dma_xilinx_axi_dma_configure()
1001 if (channel == XILINX_AXI_DMA_TX_CHANNEL_NUM) { in dma_xilinx_axi_dma_configure()
1004 data->channels[channel].sg_desc_app0 = in dma_xilinx_axi_dma_configure()
1010 data->channels[channel].check_csum_in_isr = true; in dma_xilinx_axi_dma_configure()
1014 data->channels[channel].sg_desc_app0 = in dma_xilinx_axi_dma_configure()
1025 data->channels[channel].completion_callback = dma_cfg->dma_callback; in dma_xilinx_axi_dma_configure()
1026 data->channels[channel].completion_callback_user_data = dma_cfg->user_data; in dma_xilinx_axi_dma_configure()
1032 dma_xilinx_axi_dma_transfer_block(cfg, channel, &data->channels[channel], in dma_xilinx_axi_dma_configure()
1033 channel == XILINX_AXI_DMA_TX_CHANNEL_NUM in dma_xilinx_axi_dma_configure()
1041 k_timer_start(&data->channels[channel].polling_timer, in dma_xilinx_axi_dma_configure()
1048 static bool dma_xilinx_axi_dma_chan_filter(const struct device *dev, int channel, in dma_xilinx_axi_dma_chan_filter() argument
1054 return channel == XILINX_AXI_DMA_TX_CHANNEL_NUM; in dma_xilinx_axi_dma_chan_filter()
1057 return channel == XILINX_AXI_DMA_RX_CHANNEL_NUM; in dma_xilinx_axi_dma_chan_filter()