Lines Matching +full:girq +full:- +full:id
4 * SPDX-License-Identifier: Apache-2.0
15 #include <zephyr/dt-bindings/interrupt-controller/mchp-xec-ecia.h>
105 uint8_t gid; /* GIRQ id [8, 26] */
106 uint8_t gpos; /* bit position in GIRQ [0, 31] */
166 return &devcfg->irq_info_list[channel]; in xec_chan_irq_info()
187 if ((src | dest) & (unitsz - 1U)) { in is_data_aligned()
197 chregs->actv = 0; in xec_dma_chan_clr()
198 chregs->control = 0; in xec_dma_chan_clr()
199 chregs->mem_addr = 0; in xec_dma_chan_clr()
200 chregs->mem_addr_end = 0; in xec_dma_chan_clr()
201 chregs->dev_addr = 0; in xec_dma_chan_clr()
202 chregs->control = 0; in xec_dma_chan_clr()
203 chregs->ienable = 0; in xec_dma_chan_clr()
204 chregs->istatus = 0xffu; in xec_dma_chan_clr()
205 mchp_xec_ecia_girq_src_clr(info->gid, info->gpos); in xec_dma_chan_clr()
210 const struct dma_xec_config * const devcfg = dev->config; in is_dma_config_valid()
212 if (config->dma_slot >= (uint32_t)devcfg->dma_requests) { in is_dma_config_valid()
217 if (config->source_data_size != config->dest_data_size) { in is_dma_config_valid()
222 if (!((config->channel_direction == MEMORY_TO_MEMORY) || in is_dma_config_valid()
223 (config->channel_direction == MEMORY_TO_PERIPHERAL) || in is_dma_config_valid()
224 (config->channel_direction == PERIPHERAL_TO_MEMORY))) { in is_dma_config_valid()
229 if (!is_dma_data_size_valid(config->source_data_size)) { in is_dma_config_valid()
234 if (config->block_count != 1) { in is_dma_config_valid()
247 return -EINVAL; in check_blocks()
250 chdata->total_req_xfr_len = 0; in check_blocks()
253 if ((block->source_addr_adj == DMA_ADDR_ADJ_DECREMENT) || in check_blocks()
254 (block->dest_addr_adj == DMA_ADDR_ADJ_DECREMENT)) { in check_blocks()
256 return -EINVAL; in check_blocks()
259 if (!is_data_aligned(block->source_address, block->dest_address, unit_size)) { in check_blocks()
261 return -EINVAL; in check_blocks()
264 chdata->total_req_xfr_len += block->block_size; in check_blocks()
272 * dma_slot - peripheral source/target ID. Not used for Mem2Mem
273 * channel_direction - HW supports Mem2Mem, Mem2Periph, and Periph2Mem
274 * complete_callback_en - if true invoke callback on completion (no error)
275 * error_callback_dis - if true disable callback on error
276 * source_handshake - 0=HW, 1=SW
277 * dest_handshake - 0=HW, 1=SW
278 * channel_priority - 4-bit field. HW implements round-robin only.
279 * source_chaining_en - Chaining channel together
280 * dest_chaining_en - HW does not support channel chaining.
281 * linked_channel - HW does not support
282 * cyclic - HW does not support cyclic buffer. Would have to emulate with SW.
283 * source_data_size - unit size of source data. HW supports 1, 2, or 4 bytes
284 * dest_data_size - unit size of dest data. HW requires same as source_data_size
285 * source_burst_length - HW does not support
286 * dest_burst_length - HW does not support
287 * block_count -
288 * user_data -
289 * dma_callback -
290 * head_block - pointer to struct dma_block_config
293 * source_address -
294 * source_gather_interval - N/A
295 * dest_address -
296 * dest_scatter_interval - N/A
297 * dest_scatter_count - N/A
298 * source_gather_count - N/A
300 * config - flags
301 * source_gather_en - N/A
302 * dest_scatter_en - N/A
303 * source_addr_adj - 0(increment), 1(decrement), 2(no change)
304 * dest_addr_adj - 0(increment), 1(decrement), 2(no change)
305 * source_reload_en - reload source address at end of block
306 * dest_reload_en - reload destination address at end of block
307 * fifo_mode_control - N/A
308 * flow_control_mode - 0(source req service on data available) HW does this
338 const struct dma_xec_config * const devcfg = dev->config; in dma_xec_configure()
339 struct dma_xec_regs * const regs = devcfg->regs; in dma_xec_configure()
340 struct dma_xec_data * const data = dev->data; in dma_xec_configure()
344 if (!config || (channel >= (uint32_t)devcfg->dma_channels)) { in dma_xec_configure()
345 return -EINVAL; in dma_xec_configure()
354 struct dma_xec_channel *chdata = &data->channels[channel]; in dma_xec_configure()
356 chdata->total_req_xfr_len = 0; in dma_xec_configure()
357 chdata->total_curr_xfr_len = 0; in dma_xec_configure()
362 return -EINVAL; in dma_xec_configure()
365 struct dma_block_config *block = config->head_block; in dma_xec_configure()
367 ret = check_blocks(chdata, block, config->block_count, config->source_data_size); in dma_xec_configure()
372 unit_size = config->source_data_size; in dma_xec_configure()
373 chdata->unit_size = unit_size; in dma_xec_configure()
374 chdata->head = block; in dma_xec_configure()
375 chdata->curr = block; in dma_xec_configure()
376 chdata->block_count = config->block_count; in dma_xec_configure()
377 chdata->dir = config->channel_direction; in dma_xec_configure()
379 chdata->flags = 0; in dma_xec_configure()
380 chdata->cb = config->dma_callback; in dma_xec_configure()
381 chdata->user_data = config->user_data; in dma_xec_configure()
384 if (config->complete_callback_en) { in dma_xec_configure()
385 chdata->flags |= BIT(DMA_XEC_CHAN_FLAGS_CB_EOB_POS); in dma_xec_configure()
387 if (config->error_callback_dis) { /* disable callback on errors ? */ in dma_xec_configure()
388 chdata->flags |= BIT(DMA_XEC_CHAN_FLAGS_CB_ERR_DIS_POS); in dma_xec_configure()
397 if (config->channel_direction == MEMORY_TO_MEMORY) { in dma_xec_configure()
400 ctrl |= XEC_DMA_HWFL_DEV_VAL(config->dma_slot); in dma_xec_configure()
403 if (config->channel_direction == PERIPHERAL_TO_MEMORY) { in dma_xec_configure()
404 mstart = block->dest_address; in dma_xec_configure()
405 mend = block->dest_address + block->block_size; in dma_xec_configure()
406 dstart = block->source_address; in dma_xec_configure()
407 if (block->source_addr_adj == DMA_ADDR_ADJ_INCREMENT) { in dma_xec_configure()
410 if (block->dest_addr_adj == DMA_ADDR_ADJ_INCREMENT) { in dma_xec_configure()
414 mstart = block->source_address; in dma_xec_configure()
415 mend = block->source_address + block->block_size; in dma_xec_configure()
416 dstart = block->dest_address; in dma_xec_configure()
418 if (block->source_addr_adj == DMA_ADDR_ADJ_INCREMENT) { in dma_xec_configure()
421 if (block->dest_addr_adj == DMA_ADDR_ADJ_INCREMENT) { in dma_xec_configure()
426 chdata->control = ctrl; in dma_xec_configure()
427 chdata->mstart = mstart; in dma_xec_configure()
428 chdata->mend = mend; in dma_xec_configure()
429 chdata->dstart = dstart; in dma_xec_configure()
431 chregs->actv &= ~BIT(XEC_DMA_CHAN_ACTV_EN_POS); in dma_xec_configure()
432 chregs->mem_addr = mstart; in dma_xec_configure()
433 chregs->mem_addr_end = mend; in dma_xec_configure()
434 chregs->dev_addr = dstart; in dma_xec_configure()
436 chregs->control = ctrl; in dma_xec_configure()
437 chregs->ienable = BIT(XEC_DMA_CHAN_IES_BERR_POS) | BIT(XEC_DMA_CHAN_IES_DONE_POS); in dma_xec_configure()
438 chregs->actv |= BIT(XEC_DMA_CHAN_ACTV_EN_POS); in dma_xec_configure()
454 const struct dma_xec_config * const devcfg = dev->config; in dma_xec_reload()
455 struct dma_xec_data * const data = dev->data; in dma_xec_reload()
456 struct dma_xec_regs * const regs = devcfg->regs; in dma_xec_reload()
459 if (channel >= (uint32_t)devcfg->dma_channels) { in dma_xec_reload()
460 return -EINVAL; in dma_xec_reload()
463 struct dma_xec_channel *chdata = &data->channels[channel]; in dma_xec_reload()
466 if (chregs->control & BIT(XEC_DMA_CHAN_CTRL_BUSY_POS)) { in dma_xec_reload()
467 return -EBUSY; in dma_xec_reload()
470 ctrl = chregs->control & ~(BIT(XEC_DMA_CHAN_CTRL_HWFL_RUN_POS) in dma_xec_reload()
472 chregs->ienable = 0; in dma_xec_reload()
473 chregs->control = 0; in dma_xec_reload()
474 chregs->istatus = 0xffu; in dma_xec_reload()
477 chdata->mstart = src; in dma_xec_reload()
478 chdata->dstart = dst; in dma_xec_reload()
480 chdata->mstart = dst; in dma_xec_reload()
481 chdata->dstart = src; in dma_xec_reload()
484 chdata->mend = chdata->mstart + size; in dma_xec_reload()
485 chdata->total_req_xfr_len = size; in dma_xec_reload()
486 chdata->total_curr_xfr_len = 0; in dma_xec_reload()
488 chregs->mem_addr = chdata->mstart; in dma_xec_reload()
489 chregs->mem_addr_end = chdata->mend; in dma_xec_reload()
490 chregs->dev_addr = chdata->dstart; in dma_xec_reload()
491 chregs->control = ctrl; in dma_xec_reload()
498 const struct dma_xec_config * const devcfg = dev->config; in dma_xec_start()
499 struct dma_xec_regs * const regs = devcfg->regs; in dma_xec_start()
502 if (channel >= (uint32_t)devcfg->dma_channels) { in dma_xec_start()
503 return -EINVAL; in dma_xec_start()
508 if (chregs->control & BIT(XEC_DMA_CHAN_CTRL_BUSY_POS)) { in dma_xec_start()
509 return -EBUSY; in dma_xec_start()
512 chregs->ienable = 0u; in dma_xec_start()
513 chregs->istatus = 0xffu; in dma_xec_start()
514 chan_ctrl = chregs->control; in dma_xec_start()
522 chregs->ienable = BIT(XEC_DMA_CHAN_IES_BERR_POS) | BIT(XEC_DMA_CHAN_IES_DONE_POS); in dma_xec_start()
523 chregs->control = chan_ctrl; in dma_xec_start()
524 chregs->actv |= BIT(XEC_DMA_CHAN_ACTV_EN_POS); in dma_xec_start()
531 const struct dma_xec_config * const devcfg = dev->config; in dma_xec_stop()
532 struct dma_xec_regs * const regs = devcfg->regs; in dma_xec_stop()
535 if (channel >= (uint32_t)devcfg->dma_channels) { in dma_xec_stop()
536 return -EINVAL; in dma_xec_stop()
541 chregs->ienable = 0; in dma_xec_stop()
543 if (chregs->control & BIT(XEC_DMA_CHAN_CTRL_BUSY_POS)) { in dma_xec_stop()
544 chregs->ienable = 0; in dma_xec_stop()
545 chregs->control |= BIT(XEC_DMA_CHAN_CTRL_ABORT_POS); in dma_xec_stop()
549 if (!(chregs->control & BIT(XEC_DMA_CHAN_CTRL_BUSY_POS))) { in dma_xec_stop()
552 } while (wait_loops--); in dma_xec_stop()
555 chregs->mem_addr = chregs->mem_addr_end; in dma_xec_stop()
556 chregs->fsm = 0; /* delay */ in dma_xec_stop()
557 chregs->control = 0; in dma_xec_stop()
558 chregs->istatus = 0xffu; in dma_xec_stop()
559 chregs->actv = 0; in dma_xec_stop()
569 * busy - is current DMA transfer busy or idle
570 * dir - DMA transfer direction
571 * pending_length - data length pending to be transferred in bytes
574 * free - free buffer space
575 * write_position - write position in a circular dma buffer
576 * read_position - read position in a circular dma buffer
582 const struct dma_xec_config * const devcfg = dev->config; in dma_xec_get_status()
583 struct dma_xec_data * const data = dev->data; in dma_xec_get_status()
584 struct dma_xec_regs * const regs = devcfg->regs; in dma_xec_get_status()
587 if ((channel >= (uint32_t)devcfg->dma_channels) || (!status)) { in dma_xec_get_status()
589 return -EINVAL; in dma_xec_get_status()
592 struct dma_xec_channel *chan_data = &data->channels[channel]; in dma_xec_get_status()
595 chan_ctrl = chregs->control; in dma_xec_get_status()
598 status->busy = true; in dma_xec_get_status()
600 status->pending_length = chan_data->total_req_xfr_len - in dma_xec_get_status()
601 (chregs->mem_addr_end - chregs->mem_addr); in dma_xec_get_status()
603 status->pending_length = chan_data->total_req_xfr_len - in dma_xec_get_status()
604 chan_data->total_curr_xfr_len; in dma_xec_get_status()
605 status->busy = false; in dma_xec_get_status()
609 status->dir = MEMORY_TO_MEMORY; in dma_xec_get_status()
611 status->dir = MEMORY_TO_PERIPHERAL; in dma_xec_get_status()
613 status->dir = PERIPHERAL_TO_MEMORY; in dma_xec_get_status()
616 status->total_copied = chan_data->total_curr_xfr_len; in dma_xec_get_status()
628 return -EINVAL; in xec_dma_get_attribute()
634 const struct dma_xec_config * const devcfg = dev->config; in dma_xec_chan_filter()
637 if (!filter_param && devcfg->dma_channels) { in dma_xec_chan_filter()
638 filter = GENMASK(devcfg->dma_channels-1u, 0); in dma_xec_chan_filter()
646 /* API - HW does not stupport suspend/resume */
658 /* TODO - DMA block has one PCR SLP_EN and one CLK_REQ.
667 const struct dma_xec_config * const devcfg = dev->config; in dmac_xec_pm_action()
668 struct dma_xec_regs * const regs = devcfg->regs; in dmac_xec_pm_action()
673 regs->mctrl |= BIT(XEC_DMA_MAIN_CTRL_EN_POS); in dmac_xec_pm_action()
677 /* regs->mctrl &= ~BIT(XEC_DMA_MAIN_CTRL_EN_POS); */ in dmac_xec_pm_action()
681 ret = -ENOTSUP; in dmac_xec_pm_action()
713 const struct dma_xec_config * const devcfg = dev->config; in dma_xec_irq_handler()
714 const struct dma_xec_irq_info *info = devcfg->irq_info_list; in dma_xec_irq_handler()
715 struct dma_xec_data * const data = dev->data; in dma_xec_irq_handler()
716 struct dma_xec_channel *chan_data = &data->channels[channel]; in dma_xec_irq_handler()
717 struct dma_xec_chan_regs * const regs = xec_chan_regs(devcfg->regs, channel); in dma_xec_irq_handler()
718 uint32_t sts = regs->istatus; in dma_xec_irq_handler()
726 channel_isr_ctrl[channel][idx] = regs->control; in dma_xec_irq_handler()
730 LOG_DBG("maddr=0x%08x mend=0x%08x daddr=0x%08x ctrl=0x%08x sts=0x%02x", regs->mem_addr, in dma_xec_irq_handler()
731 regs->mem_addr_end, regs->dev_addr, regs->control, sts); in dma_xec_irq_handler()
733 regs->ienable = 0u; in dma_xec_irq_handler()
734 regs->istatus = 0xffu; in dma_xec_irq_handler()
737 chan_data->isr_hw_status = sts; in dma_xec_irq_handler()
738 chan_data->total_curr_xfr_len += (regs->mem_addr - chan_data->mstart); in dma_xec_irq_handler()
741 if (!(chan_data->flags & BIT(DMA_XEC_CHAN_FLAGS_CB_ERR_DIS_POS))) { in dma_xec_irq_handler()
742 cb_status = -EIO; in dma_xec_irq_handler()
746 if (chan_data->cb) { in dma_xec_irq_handler()
747 chan_data->cb(dev, chan_data->user_data, channel, cb_status); in dma_xec_irq_handler()
753 const struct dma_xec_config * const devcfg = dev->config; in dma_xec_init()
754 struct dma_xec_regs * const regs = devcfg->regs; in dma_xec_init()
758 z_mchp_xec_pcr_periph_sleep(devcfg->pcr_idx, devcfg->pcr_pos, 0); in dma_xec_init()
760 /* soft reset, self-clearing */ in dma_xec_init()
761 regs->mctrl = BIT(XEC_DMA_MAIN_CTRL_SRST_POS); in dma_xec_init()
762 regs->mpkt = 0u; /* I/O delay, write to read-only register */ in dma_xec_init()
763 regs->mctrl = BIT(XEC_DMA_MAIN_CTRL_EN_POS); in dma_xec_init()
765 devcfg->irq_connect(); in dma_xec_init()
770 /* n = node-id, p = property, i = index */
782 /* n = node-id, p = property, i = index(channel?) */
806 BUILD_ASSERT(DT_INST_PROP(i, dma_channels) <= 16, "XEC DMA dma-channels > 16"); \
807 BUILD_ASSERT(DT_INST_PROP(i, dma_requests) <= 16, "XEC DMA dma-requests > 16"); \