Lines Matching +full:pd +full:- +full:disable
4 * SPDX-License-Identifier: Apache-2.0
36 return ring->pkt_id = 0x0; in reset_pkt_id()
41 ring->pkt_id = (ring->pkt_id + 1) % 32; in alloc_pkt_id()
42 return ring->pkt_id; in alloc_pkt_id()
47 return ring->pkt_id; in curr_pkt_id()
52 return ring->curr.toggle; in curr_toggle_val()
64 r->opq = opq; in rm_write_header_desc()
65 r->bdf = 0x0; in rm_write_header_desc()
66 r->res1 = 0x0; in rm_write_header_desc()
68 r->bdcount = bdcount; in rm_write_header_desc()
69 r->prot = 0x0; in rm_write_header_desc()
70 r->res2 = 0x0; in rm_write_header_desc()
72 r->start = 1; in rm_write_header_desc()
73 r->end = 1; in rm_write_header_desc()
75 r->type = PAX_DMA_TYPE_RM_HEADER; in rm_write_header_desc()
76 r->pcie_addr_msb = PAX_DMA_PCI_ADDR_HI_MSB8(pci_addr); in rm_write_header_desc()
77 r->res3 = 0x0; in rm_write_header_desc()
78 r->res4 = 0x0; in rm_write_header_desc()
80 r->toggle = toggle; in rm_write_header_desc()
82 r->toggle = 0; in rm_write_header_desc()
95 pcie->pcie_addr_lsb = pci_addr; in rm_write_pcie_desc()
96 pcie->res1 = 0x0; in rm_write_pcie_desc()
98 pcie->type = PAX_DMA_TYPE_PCIE_DESC; in rm_write_pcie_desc()
100 pcie->toggle = toggle; in rm_write_pcie_desc()
102 pcie->toggle = 0; in rm_write_pcie_desc()
119 desc->axi_addr = axi_addr; in rm_write_src_dst_desc()
120 desc->length = size; in rm_write_src_dst_desc()
122 desc->toggle = toggle; in rm_write_src_dst_desc()
124 desc->toggle = 0; in rm_write_src_dst_desc()
128 desc->type = is_mega ? in rm_write_src_dst_desc()
131 desc->type = is_mega ? in rm_write_src_dst_desc()
141 r->toggle = toggle; in init_toggle()
154 curr = (uintptr_t)ring->curr.write_ptr; in get_curr_desc_addr()
157 if (nxt->type == PAX_DMA_TYPE_NEXT_PTR) { in get_curr_desc_addr()
159 curr, nxt->toggle, (uintptr_t)nxt->addr); in get_curr_desc_addr()
160 uintptr_t last = (uintptr_t)ring->bd + in get_curr_desc_addr()
162 nxt->toggle = ring->curr.toggle; in get_curr_desc_addr()
163 ring->curr.toggle = (ring->curr.toggle == 0) ? 1 : 0; in get_curr_desc_addr()
167 curr = (uintptr_t)ring->bd; in get_curr_desc_addr()
171 ring->descs_inflight++; in get_curr_desc_addr()
174 ring->curr.write_ptr = (void *)(curr + PAX_DMA_RM_DESC_BDWIDTH); in get_curr_desc_addr()
175 ring->descs_inflight++; in get_curr_desc_addr()
188 nxt->addr = (uintptr_t)next_ptr; in rm_write_next_table_desc()
189 nxt->type = PAX_DMA_TYPE_NEXT_PTR; in rm_write_next_table_desc()
190 nxt->toggle = toggle; in rm_write_next_table_desc()
202 memset(ring->bd, 0x0, PAX_DMA_RM_DESC_RING_SIZE * PAX_DMA_NUM_BD_BUFFS); in prepare_ring()
203 memset(ring->cmpl, 0x0, PAX_DMA_RM_CMPL_RING_SIZE); in prepare_ring()
209 curr = (uintptr_t)ring->bd; in prepare_ring()
232 next = (uintptr_t)ring->bd; in prepare_ring()
235 } while (--buff_count); in prepare_ring()
240 ring->curr.write_ptr = ring->bd; in prepare_ring()
242 ring->curr.toggle = 1; in prepare_ring()
244 ring->curr.cmpl_rd_offs = 0; in prepare_ring()
246 ring->descs_inflight = 0; in prepare_ring()
249 ring->curr.sync_data.signature = PAX_DMA_WRITE_SYNC_SIGNATURE; in prepare_ring()
250 ring->curr.sync_data.ring = ring->idx; in prepare_ring()
252 ring->curr.sync_data.opaque = 0x0; in prepare_ring()
254 ring->curr.sync_data.total_pkts = 0x0; in prepare_ring()
257 static int init_rm(struct dma_iproc_pax_data *pd) in init_rm() argument
259 int ret = -ETIMEDOUT, timeout = 1000; in init_rm()
261 k_mutex_lock(&pd->dma_lock, K_FOREVER); in init_rm()
265 if ((sys_read32(RM_COMM_REG(pd, RM_COMM_MAIN_HW_INIT_DONE)) & in init_rm()
271 } while (--timeout); in init_rm()
272 k_mutex_unlock(&pd->dma_lock); in init_rm()
283 static void rm_cfg_start(struct dma_iproc_pax_data *pd) in rm_cfg_start() argument
287 k_mutex_lock(&pd->dma_lock, K_FOREVER); in rm_cfg_start()
290 val = sys_read32(RM_COMM_REG(pd, RM_COMM_CONTROL)); in rm_cfg_start()
292 sys_write32(val, RM_COMM_REG(pd, RM_COMM_CONTROL)); in rm_cfg_start()
303 sys_write32(val, RM_COMM_REG(pd, RM_COMM_CONTROL)); in rm_cfg_start()
305 RM_COMM_REG(pd, RM_COMM_MSI_DISABLE)); in rm_cfg_start()
307 val = sys_read32(RM_COMM_REG(pd, RM_COMM_AXI_READ_BURST_THRESHOLD)); in rm_cfg_start()
312 sys_write32(val, RM_COMM_REG(pd, RM_COMM_AXI_READ_BURST_THRESHOLD)); in rm_cfg_start()
314 val = sys_read32(RM_COMM_REG(pd, RM_COMM_FIFO_FULL_THRESHOLD)); in rm_cfg_start()
324 sys_write32(val, RM_COMM_REG(pd, RM_COMM_FIFO_FULL_THRESHOLD)); in rm_cfg_start()
327 val = sys_read32(RM_COMM_REG(pd, RM_COMM_CONTROL)); in rm_cfg_start()
329 sys_write32(val, RM_COMM_REG(pd, RM_COMM_CONTROL)); in rm_cfg_start()
333 RM_COMM_REG(pd, RM_COMM_AE_TIMEOUT)); in rm_cfg_start()
334 val = sys_read32(RM_COMM_REG(pd, RM_COMM_CONTROL)); in rm_cfg_start()
336 sys_write32(val, RM_COMM_REG(pd, RM_COMM_CONTROL)); in rm_cfg_start()
339 val = sys_read32(RM_COMM_REG(pd, RM_AE0_AE_CONTROL)); in rm_cfg_start()
341 sys_write32(val, RM_COMM_REG(pd, RM_AE0_AE_CONTROL)); in rm_cfg_start()
343 sys_write32(val, RM_COMM_REG(pd, RM_AE0_AE_CONTROL)); in rm_cfg_start()
346 val = sys_read32(RM_COMM_REG(pd, RM_COMM_AXI_CONTROL)); in rm_cfg_start()
348 sys_write32(val, RM_COMM_REG(pd, RM_COMM_AXI_CONTROL)); in rm_cfg_start()
352 RM_COMM_REG(pd, RM_COMM_TIMER_CONTROL_0)); in rm_cfg_start()
354 RM_COMM_REG(pd, RM_COMM_TIMER_CONTROL_1)); in rm_cfg_start()
355 val = sys_read32(RM_COMM_REG(pd, RM_COMM_BURST_LENGTH)); in rm_cfg_start()
360 sys_write32(val, RM_COMM_REG(pd, RM_COMM_BURST_LENGTH)); in rm_cfg_start()
362 val = sys_read32(RM_COMM_REG(pd, RM_COMM_BD_FETCH_MODE_CONTROL)); in rm_cfg_start()
365 sys_write32(val, RM_COMM_REG(pd, RM_COMM_BD_FETCH_MODE_CONTROL)); in rm_cfg_start()
368 val = sys_read32(RM_COMM_REG(pd, RM_COMM_MASK_SEQUENCE_MAX_COUNT)); in rm_cfg_start()
370 sys_write32(val, RM_COMM_REG(pd, RM_COMM_MASK_SEQUENCE_MAX_COUNT)); in rm_cfg_start()
372 k_mutex_unlock(&pd->dma_lock); in rm_cfg_start()
375 static void rm_ring_clear_stats(struct dma_iproc_pax_data *pd, in rm_ring_clear_stats() argument
379 sys_read32(RM_RING_REG(pd, idx, RING_NUM_REQ_RECV_LS)); in rm_ring_clear_stats()
380 sys_read32(RM_RING_REG(pd, idx, RING_NUM_REQ_RECV_MS)); in rm_ring_clear_stats()
381 sys_read32(RM_RING_REG(pd, idx, RING_NUM_REQ_TRANS_LS)); in rm_ring_clear_stats()
382 sys_read32(RM_RING_REG(pd, idx, RING_NUM_REQ_TRANS_MS)); in rm_ring_clear_stats()
383 sys_read32(RM_RING_REG(pd, idx, RING_NUM_REQ_OUTSTAND)); in rm_ring_clear_stats()
386 static void rm_cfg_finish(struct dma_iproc_pax_data *pd) in rm_cfg_finish() argument
390 k_mutex_lock(&pd->dma_lock, K_FOREVER); in rm_cfg_finish()
393 val = sys_read32(RM_COMM_REG(pd, RM_COMM_CONTROL)); in rm_cfg_finish()
395 sys_write32(val, RM_COMM_REG(pd, RM_COMM_CONTROL)); in rm_cfg_finish()
397 k_mutex_unlock(&pd->dma_lock); in rm_cfg_finish()
400 static inline void write_doorbell(struct dma_iproc_pax_data *pd, in write_doorbell() argument
403 struct dma_iproc_pax_ring_data *ring = &(pd->ring[idx]); in write_doorbell()
405 sys_write32(ring->descs_inflight, in write_doorbell()
406 RM_RING_REG(pd, idx, RING_DOORBELL_BD_WRITE_COUNT)); in write_doorbell()
407 ring->descs_inflight = 0; in write_doorbell()
410 static inline void set_ring_active(struct dma_iproc_pax_data *pd, in set_ring_active() argument
416 val = sys_read32(RM_RING_REG(pd, idx, RING_CONTROL)); in set_ring_active()
422 sys_write32(val, RM_RING_REG(pd, idx, RING_CONTROL)); in set_ring_active()
425 static int init_ring(struct dma_iproc_pax_data *pd, enum ring_idx idx) in init_ring() argument
428 uintptr_t desc = (uintptr_t)pd->ring[idx].bd; in init_ring()
429 uintptr_t cmpl = (uintptr_t)pd->ring[idx].cmpl; in init_ring()
432 k_mutex_lock(&pd->dma_lock, K_FOREVER); in init_ring()
435 sys_read32(RM_RING_REG(pd, idx, RING_CMPL_WRITE_PTR)); in init_ring()
438 sys_write32(0x0, RM_RING_REG(pd, idx, RING_CONTROL)); in init_ring()
441 val = sys_read32(RM_COMM_REG(pd, RM_COMM_CONTROL)); in init_ring()
443 sys_write32(val, RM_COMM_REG(pd, RM_COMM_CONTROL)); in init_ring()
445 sys_write32(RING_CONTROL_FLUSH, RM_RING_REG(pd, idx, in init_ring()
448 if (sys_read32(RM_RING_REG(pd, idx, RING_FLUSH_DONE)) & RING_FLUSH_DONE_MASK) { in init_ring()
452 } while (--timeout); in init_ring()
456 ret = -ETIMEDOUT; in init_ring()
461 sys_write32(0x0, RM_RING_REG(pd, idx, RING_CONTROL)); in init_ring()
464 val = sys_read32(RM_COMM_REG(pd, RM_COMM_CONTROL)); in init_ring()
466 sys_write32(val, RM_COMM_REG(pd, RM_COMM_CONTROL)); in init_ring()
468 val = sys_read32(RM_COMM_REG(pd, RM_COMM_CTRL_REG(idx))); in init_ring()
470 sys_write32(val, RM_COMM_REG(pd, RM_COMM_CTRL_REG(idx))); in init_ring()
477 sys_write32(val, RM_RING_REG(pd, idx, RING_CMPL_WR_PTR_DDR_CONTROL)); in init_ring()
478 /* Disable Ring MSI Timeout */ in init_ring()
480 RM_RING_REG(pd, idx, RING_DISABLE_MSI_TIMEOUT)); in init_ring()
483 sys_write32((uint32_t)desc, RM_RING_REG(pd, idx, RING_BD_START_ADDR)); in init_ring()
484 sys_write32((uint32_t)cmpl, RM_RING_REG(pd, idx, RING_CMPL_START_ADDR)); in init_ring()
485 val = sys_read32(RM_RING_REG(pd, idx, RING_BD_READ_PTR)); in init_ring()
489 set_ring_active(pd, idx, false); in init_ring()
491 set_ring_active(pd, idx, true); in init_ring()
496 sys_write32(0x0, RM_RING_REG(pd, idx, in init_ring()
499 rm_ring_clear_stats(pd, idx); in init_ring()
501 k_mutex_unlock(&pd->dma_lock); in init_ring()
509 const struct dma_iproc_pax_cfg *cfg = dev->config; in poll_on_write_sync()
517 sent = &(ring->curr.sync_data); in poll_on_write_sync()
520 pci32[0] = ring->sync_pci.addr_lo; in poll_on_write_sync()
521 pci32[1] = ring->sync_pci.addr_hi; in poll_on_write_sync()
525 ret = pcie_ep_xfer_data_memcpy(cfg->pcie_dev, pci_addr, in poll_on_write_sync()
531 ret = pcie_ep_xfer_data_memcpy(cfg->pcie_dev, pci_addr, in poll_on_write_sync()
540 } while (--timeout); in poll_on_write_sync()
543 LOG_ERR("[ring %d]: not recvd write sync!\n", ring->idx); in poll_on_write_sync()
544 ret = -ETIMEDOUT; in poll_on_write_sync()
553 struct dma_iproc_pax_data *pd = dev->data; in process_cmpl_event() local
555 struct dma_iproc_pax_ring_data *ring = &(pd->ring[idx]); in process_cmpl_event()
560 rd_offs = ring->curr.cmpl_rd_offs; in process_cmpl_event()
562 wr_offs = sys_read32(RM_RING_REG(pd, idx, in process_cmpl_event()
566 ring->curr.cmpl_rd_offs = wr_offs; in process_cmpl_event()
579 c = (struct cmpl_pkt *)((uintptr_t)ring->cmpl + in process_cmpl_event()
583 idx, wr_offs, c->opq, c->rm_status, c->dma_status); in process_cmpl_event()
585 is_outstanding = sys_read32(RM_RING_REG(pd, idx, in process_cmpl_event()
587 if ((ring->curr.opq != c->opq) && (is_outstanding != 0)) { in process_cmpl_event()
589 idx, ring->curr.opq, c->opq, is_outstanding); in process_cmpl_event()
590 ret = -EIO; in process_cmpl_event()
593 if (c->rm_status == RM_COMPLETION_AE_TIMEOUT) { in process_cmpl_event()
595 idx, wr_offs, c->rm_status); in process_cmpl_event()
598 ret = -ETIMEDOUT; in process_cmpl_event()
601 if (ring->dma_callback) { in process_cmpl_event()
602 ring->dma_callback(dev, ring->callback_arg, idx, ret); in process_cmpl_event()
606 ring->total_pkt_count = 0; in process_cmpl_event()
615 struct dma_iproc_pax_data *pd = dev->data; in peek_ring_cmpl() local
617 struct dma_iproc_pax_ring_data *ring = &(pd->ring[idx]); in peek_ring_cmpl()
620 rd_offs = ring->curr.cmpl_rd_offs; in peek_ring_cmpl()
624 wr_offs = sys_read32(RM_RING_REG(pd, idx, in peek_ring_cmpl()
630 } while (--timeout); in peek_ring_cmpl()
637 return -ETIMEDOUT; in peek_ring_cmpl()
646 struct dma_iproc_pax_data *pd = dev->data; in rm_isr() local
649 sys_read32(RM_COMM_REG(pd, in rm_isr()
652 RM_COMM_REG(pd, in rm_isr()
658 sys_read32(RM_RING_REG(pd, idx, in rm_isr()
661 RM_RING_REG(pd, idx, in rm_isr()
664 k_sem_give(&pd->ring[idx].alert); in rm_isr()
672 const struct dma_iproc_pax_cfg *cfg = dev->config; in dma_iproc_pax_init()
673 struct dma_iproc_pax_data *pd = dev->data; in dma_iproc_pax_init() local
677 if (!device_is_ready(cfg->pcie_dev)) { in dma_iproc_pax_init()
679 return -ENODEV; in dma_iproc_pax_init()
682 pd->dma_base = cfg->dma_base; in dma_iproc_pax_init()
683 pd->rm_comm_base = cfg->rm_comm_base; in dma_iproc_pax_init()
684 pd->used_rings = (cfg->use_rings < PAX_DMA_RINGS_MAX) ? in dma_iproc_pax_init()
685 cfg->use_rings : PAX_DMA_RINGS_MAX; in dma_iproc_pax_init()
688 k_mutex_init(&pd->dma_lock); in dma_iproc_pax_init()
691 if (init_rm(pd)) { in dma_iproc_pax_init()
692 return -ETIMEDOUT; in dma_iproc_pax_init()
696 rm_cfg_start(pd); in dma_iproc_pax_init()
699 for (r = 0; r < pd->used_rings; r++) { in dma_iproc_pax_init()
700 /* per-ring mutex lock */ in dma_iproc_pax_init()
701 k_mutex_init(&pd->ring[r].lock); in dma_iproc_pax_init()
703 k_sem_init(&pd->ring[r].alert, 0, 1); in dma_iproc_pax_init()
705 pd->ring[r].idx = r; in dma_iproc_pax_init()
706 pd->ring[r].ring_base = cfg->rm_base + in dma_iproc_pax_init()
708 LOG_DBG("RING%d,VERSION:0x%x\n", pd->ring[r].idx, in dma_iproc_pax_init()
709 sys_read32(RM_RING_REG(pd, r, RING_VER))); in dma_iproc_pax_init()
712 pd->ring[r].ring_mem = (void *)((uintptr_t)cfg->bd_memory_base + in dma_iproc_pax_init()
714 if (!pd->ring[r].ring_mem) { in dma_iproc_pax_init()
716 return -ENOMEM; in dma_iproc_pax_init()
719 mem_aligned = ((uintptr_t)pd->ring[r].ring_mem + in dma_iproc_pax_init()
720 PAX_DMA_RING_ALIGN - 1) & in dma_iproc_pax_init()
721 ~(PAX_DMA_RING_ALIGN - 1); in dma_iproc_pax_init()
723 pd->ring[r].cmpl = (void *)mem_aligned; in dma_iproc_pax_init()
724 pd->ring[r].bd = (void *)(mem_aligned + in dma_iproc_pax_init()
726 pd->ring[r].sync_loc = (void *)((uintptr_t)pd->ring[r].bd + in dma_iproc_pax_init()
731 pd->ring[r].idx, in dma_iproc_pax_init()
732 pd->ring[r].ring_mem, in dma_iproc_pax_init()
735 pd->ring[r].idx, in dma_iproc_pax_init()
736 pd->ring[r].bd, in dma_iproc_pax_init()
737 pd->ring[r].cmpl, in dma_iproc_pax_init()
738 pd->ring[r].sync_loc); in dma_iproc_pax_init()
741 prepare_ring(&(pd->ring[r])); in dma_iproc_pax_init()
744 init_ring(pd, r); in dma_iproc_pax_init()
748 rm_cfg_finish(pd); in dma_iproc_pax_init()
759 LOG_INF("%s PAX DMA rings in poll mode!\n", dev->name); in dma_iproc_pax_init()
761 LOG_INF("%s RM setup %d rings\n", dev->name, pd->used_rings); in dma_iproc_pax_init()
778 ring->current_hdr = (uintptr_t)get_curr_desc_addr(ring); in dma_iproc_pax_gen_desc()
779 rm_write_header_desc((void *)ring->current_hdr, in dma_iproc_pax_gen_desc()
784 ring->total_pkt_count++; in dma_iproc_pax_gen_desc()
796 hdr = (struct rm_header *)ring->current_hdr; in dma_iproc_pax_gen_desc()
797 hdr->bdcount = *non_hdr_bd_count; in dma_iproc_pax_gen_desc()
819 pci_addr = config->dest_address; in dma_iproc_pax_gen_packets()
820 axi_addr = config->source_address; in dma_iproc_pax_gen_packets()
824 axi_addr = config->dest_address; in dma_iproc_pax_gen_packets()
825 pci_addr = config->source_address; in dma_iproc_pax_gen_packets()
830 return -EINVAL; in dma_iproc_pax_gen_packets()
833 outstanding = config->block_size; in dma_iproc_pax_gen_packets()
859 outstanding = outstanding - curr; in dma_iproc_pax_gen_packets()
885 struct dma_iproc_pax_data *pd = dev->data; in set_pkt_count() local
889 val = sys_read32(RM_RING_REG(pd, idx, in set_pkt_count()
893 sys_write32(val, RM_RING_REG(pd, idx, in set_pkt_count()
901 struct dma_iproc_pax_data *pd = dev->data; in wait_for_pkt_completion() local
904 ring = &(pd->ring[idx]); in wait_for_pkt_completion()
906 if (k_sem_take(&ring->alert, K_MSEC(PAX_DMA_TIMEOUT)) != 0) { in wait_for_pkt_completion()
908 return -ETIMEDOUT; in wait_for_pkt_completion()
919 struct dma_iproc_pax_data *pd = dev->data; in dma_iproc_pax_process_dma_blocks() local
920 const struct dma_iproc_pax_cfg *cfg = dev->config; in dma_iproc_pax_process_dma_blocks()
926 struct dma_block_config *block_config = config->head_block; in dma_iproc_pax_process_dma_blocks()
930 return -EINVAL; in dma_iproc_pax_process_dma_blocks()
933 ring = &(pd->ring[idx]); in dma_iproc_pax_process_dma_blocks()
936 * Host sync buffer isn't ready at zephyr/driver init-time in dma_iproc_pax_process_dma_blocks()
940 if ((ring->sync_pci.addr_lo == 0x0) && in dma_iproc_pax_process_dma_blocks()
941 (ring->sync_pci.addr_hi == 0x0)) { in dma_iproc_pax_process_dma_blocks()
943 LOG_DBG("sync addr loc 0x%x\n", cfg->scr_addr_loc); in dma_iproc_pax_process_dma_blocks()
944 sync.addr_lo = sys_read32(cfg->scr_addr_loc + 4); in dma_iproc_pax_process_dma_blocks()
945 sync.addr_hi = sys_read32(cfg->scr_addr_loc); in dma_iproc_pax_process_dma_blocks()
946 ring->sync_pci.addr_lo = sync.addr_lo + idx * 4; in dma_iproc_pax_process_dma_blocks()
947 ring->sync_pci.addr_hi = sync.addr_hi; in dma_iproc_pax_process_dma_blocks()
949 ring->sync_pci.addr_hi, in dma_iproc_pax_process_dma_blocks()
950 ring->sync_pci.addr_lo); in dma_iproc_pax_process_dma_blocks()
954 ring->curr.sync_data.opaque = ring->curr.opq; in dma_iproc_pax_process_dma_blocks()
955 ring->curr.sync_data.total_pkts = config->block_count; in dma_iproc_pax_process_dma_blocks()
956 memcpy((void *)ring->sync_loc, in dma_iproc_pax_process_dma_blocks()
957 (void *)&(ring->curr.sync_data), 4); in dma_iproc_pax_process_dma_blocks()
958 sync_pl.dest_address = ring->sync_pci.addr_lo | in dma_iproc_pax_process_dma_blocks()
959 (uint64_t)ring->sync_pci.addr_hi << 32; in dma_iproc_pax_process_dma_blocks()
960 sync_pl.source_address = (uintptr_t)ring->sync_loc; in dma_iproc_pax_process_dma_blocks()
961 sync_pl.block_size = 4; /* 4-bytes */ in dma_iproc_pax_process_dma_blocks()
964 toggle_bit = ring->curr.toggle; in dma_iproc_pax_process_dma_blocks()
966 ring->curr.opq = curr_pkt_id(ring); in dma_iproc_pax_process_dma_blocks()
971 config->channel_direction, in dma_iproc_pax_process_dma_blocks()
977 block_config = block_config->next_block; in dma_iproc_pax_process_dma_blocks()
986 ring->non_hdr_bd_count = 0; in dma_iproc_pax_process_dma_blocks()
998 struct dma_iproc_pax_data *pd = dev->data; in dma_iproc_pax_configure() local
1004 return -EINVAL; in dma_iproc_pax_configure()
1007 ring = &(pd->ring[channel]); in dma_iproc_pax_configure()
1008 k_mutex_lock(&ring->lock, K_FOREVER); in dma_iproc_pax_configure()
1010 if (ring->ring_active) { in dma_iproc_pax_configure()
1011 ret = -EBUSY; in dma_iproc_pax_configure()
1015 if (cfg->block_count >= RM_V2_MAX_BLOCK_COUNT) { in dma_iproc_pax_configure()
1017 cfg->block_count, RM_V2_MAX_BLOCK_COUNT); in dma_iproc_pax_configure()
1018 ret = -ENOTSUP; in dma_iproc_pax_configure()
1022 ring->ring_active = 1; in dma_iproc_pax_configure()
1026 ring->ring_active = 0; in dma_iproc_pax_configure()
1030 ring->dma_callback = cfg->dma_callback; in dma_iproc_pax_configure()
1031 ring->callback_arg = cfg->user_data; in dma_iproc_pax_configure()
1033 k_mutex_unlock(&ring->lock); in dma_iproc_pax_configure()
1041 struct dma_iproc_pax_data *pd = dev->data; in dma_iproc_pax_transfer_start() local
1046 return -EINVAL; in dma_iproc_pax_transfer_start()
1049 ring = &(pd->ring[channel]); in dma_iproc_pax_transfer_start()
1050 set_pkt_count(dev, channel, ring->total_pkt_count); in dma_iproc_pax_transfer_start()
1053 write_doorbell(pd, channel); in dma_iproc_pax_transfer_start()
1056 set_ring_active(pd, channel, true); in dma_iproc_pax_transfer_start()
1059 ret = wait_for_pkt_completion(dev, channel, ring->total_pkt_count); in dma_iproc_pax_transfer_start()
1067 k_mutex_lock(&ring->lock, K_FOREVER); in dma_iproc_pax_transfer_start()
1068 ring->ring_active = 0; in dma_iproc_pax_transfer_start()
1069 k_mutex_unlock(&ring->lock); in dma_iproc_pax_transfer_start()
1073 set_ring_active(pd, channel, false); in dma_iproc_pax_transfer_start()