Lines Matching full:sdma

104 	/* protect SDMA with concurrrent access from multiple CPUs */
109 struct prestera_sdma sdma; member
112 static int prestera_sdma_buf_init(struct prestera_sdma *sdma, in prestera_sdma_buf_init() argument
118 desc = dma_pool_alloc(sdma->desc_pool, GFP_DMA | GFP_KERNEL, &dma); in prestera_sdma_buf_init()
130 static u32 prestera_sdma_map(struct prestera_sdma *sdma, dma_addr_t pa) in prestera_sdma_map() argument
132 return sdma->map_addr + pa; in prestera_sdma_map()
135 static void prestera_sdma_rx_desc_init(struct prestera_sdma *sdma, in prestera_sdma_rx_desc_init() argument
144 desc->buff = cpu_to_le32(prestera_sdma_map(sdma, buf)); in prestera_sdma_rx_desc_init()
152 static void prestera_sdma_rx_desc_set_next(struct prestera_sdma *sdma, in prestera_sdma_rx_desc_set_next() argument
156 desc->next = cpu_to_le32(prestera_sdma_map(sdma, next)); in prestera_sdma_rx_desc_set_next()
159 static int prestera_sdma_rx_skb_alloc(struct prestera_sdma *sdma, in prestera_sdma_rx_skb_alloc() argument
162 struct device *dev = sdma->sw->dev->dev; in prestera_sdma_rx_skb_alloc()
189 static struct sk_buff *prestera_sdma_rx_skb_get(struct prestera_sdma *sdma, in prestera_sdma_rx_skb_get() argument
197 err = prestera_sdma_rx_skb_alloc(sdma, buf); in prestera_sdma_rx_skb_get()
209 prestera_sdma_rx_desc_init(sdma, buf->desc, buf->buf_dma); in prestera_sdma_rx_skb_get()
214 static int prestera_rxtx_process_skb(struct prestera_sdma *sdma, in prestera_rxtx_process_skb() argument
232 port = prestera_port_find_by_hwid(sdma->sw, dev_id, hw_port); in prestera_rxtx_process_skb()
234 dev_warn_ratelimited(prestera_dev(sdma->sw), "received pkt for non-existent port(%u, %u)\n", in prestera_rxtx_process_skb()
274 struct prestera_sdma *sdma; in prestera_sdma_rx_poll() local
285 sdma = container_of(napi, struct prestera_sdma, rx_napi); in prestera_sdma_rx_poll()
289 struct prestera_rx_ring *ring = &sdma->rx_ring[q]; in prestera_sdma_rx_poll()
309 skb = prestera_sdma_rx_skb_get(sdma, buf); in prestera_sdma_rx_poll()
313 if (unlikely(prestera_rxtx_process_skb(sdma, skb))) in prestera_sdma_rx_poll()
323 prestera_write(sdma->sw, PRESTERA_SDMA_RX_INTR_MASK_REG, in prestera_sdma_rx_poll()
331 static void prestera_sdma_rx_fini(struct prestera_sdma *sdma) in prestera_sdma_rx_fini() argument
337 prestera_write(sdma->sw, PRESTERA_SDMA_RX_QUEUE_STATUS_REG, in prestera_sdma_rx_fini()
341 struct prestera_rx_ring *ring = &sdma->rx_ring[q]; in prestera_sdma_rx_fini()
350 dma_pool_free(sdma->desc_pool, buf->desc, in prestera_sdma_rx_fini()
357 dma_unmap_single(sdma->sw->dev->dev, in prestera_sdma_rx_fini()
365 static int prestera_sdma_rx_init(struct prestera_sdma *sdma) in prestera_sdma_rx_init() argument
373 prestera_write(sdma->sw, PRESTERA_SDMA_RX_QUEUE_STATUS_REG, in prestera_sdma_rx_init()
378 struct prestera_rx_ring *ring = &sdma->rx_ring[q]; in prestera_sdma_rx_init()
392 err = prestera_sdma_buf_init(sdma, next); in prestera_sdma_rx_init()
396 err = prestera_sdma_rx_skb_alloc(sdma, next); in prestera_sdma_rx_init()
400 prestera_sdma_rx_desc_init(sdma, next->desc, in prestera_sdma_rx_init()
403 prestera_sdma_rx_desc_set_next(sdma, prev->desc, in prestera_sdma_rx_init()
411 prestera_sdma_rx_desc_set_next(sdma, tail->desc, head->desc_dma); in prestera_sdma_rx_init()
413 prestera_write(sdma->sw, PRESTERA_SDMA_RX_QUEUE_DESC_REG(q), in prestera_sdma_rx_init()
414 prestera_sdma_map(sdma, head->desc_dma)); in prestera_sdma_rx_init()
420 prestera_write(sdma->sw, PRESTERA_SDMA_RX_QUEUE_STATUS_REG, in prestera_sdma_rx_init()
426 static void prestera_sdma_tx_desc_init(struct prestera_sdma *sdma, in prestera_sdma_tx_desc_init() argument
433 static void prestera_sdma_tx_desc_set_next(struct prestera_sdma *sdma, in prestera_sdma_tx_desc_set_next() argument
437 desc->next = cpu_to_le32(prestera_sdma_map(sdma, next)); in prestera_sdma_tx_desc_set_next()
440 static void prestera_sdma_tx_desc_set_buf(struct prestera_sdma *sdma, in prestera_sdma_tx_desc_set_buf() argument
448 desc->buff = cpu_to_le32(prestera_sdma_map(sdma, buf)); in prestera_sdma_tx_desc_set_buf()
464 static int prestera_sdma_tx_buf_map(struct prestera_sdma *sdma, in prestera_sdma_tx_buf_map() argument
468 struct device *dma_dev = sdma->sw->dev->dev; in prestera_sdma_tx_buf_map()
481 static void prestera_sdma_tx_buf_unmap(struct prestera_sdma *sdma, in prestera_sdma_tx_buf_unmap() argument
484 struct device *dma_dev = sdma->sw->dev->dev; in prestera_sdma_tx_buf_unmap()
493 struct prestera_sdma *sdma; in prestera_sdma_tx_recycle_work_fn() local
496 sdma = container_of(work, struct prestera_sdma, tx_work); in prestera_sdma_tx_recycle_work_fn()
498 tx_ring = &sdma->tx_ring; in prestera_sdma_tx_recycle_work_fn()
509 prestera_sdma_tx_buf_unmap(sdma, buf); in prestera_sdma_tx_recycle_work_fn()
520 static int prestera_sdma_tx_init(struct prestera_sdma *sdma) in prestera_sdma_tx_init() argument
523 struct prestera_tx_ring *tx_ring = &sdma->tx_ring; in prestera_sdma_tx_init()
527 INIT_WORK(&sdma->tx_work, prestera_sdma_tx_recycle_work_fn); in prestera_sdma_tx_init()
528 spin_lock_init(&sdma->tx_lock); in prestera_sdma_tx_init()
544 err = prestera_sdma_buf_init(sdma, next); in prestera_sdma_tx_init()
550 prestera_sdma_tx_desc_init(sdma, next->desc); in prestera_sdma_tx_init()
552 prestera_sdma_tx_desc_set_next(sdma, prev->desc, in prestera_sdma_tx_init()
560 prestera_sdma_tx_desc_set_next(sdma, tail->desc, head->desc_dma); in prestera_sdma_tx_init()
565 prestera_write(sdma->sw, PRESTERA_SDMA_TX_QUEUE_DESC_REG, in prestera_sdma_tx_init()
566 prestera_sdma_map(sdma, head->desc_dma)); in prestera_sdma_tx_init()
571 static void prestera_sdma_tx_fini(struct prestera_sdma *sdma) in prestera_sdma_tx_fini() argument
573 struct prestera_tx_ring *ring = &sdma->tx_ring; in prestera_sdma_tx_fini()
577 cancel_work_sync(&sdma->tx_work); in prestera_sdma_tx_fini()
586 dma_pool_free(sdma->desc_pool, buf->desc, in prestera_sdma_tx_fini()
592 dma_unmap_single(sdma->sw->dev->dev, buf->buf_dma, in prestera_sdma_tx_fini()
603 struct prestera_sdma *sdma = arg; in prestera_rxtx_handle_event() local
608 prestera_write(sdma->sw, PRESTERA_SDMA_RX_INTR_MASK_REG, 0); in prestera_rxtx_handle_event()
609 napi_schedule(&sdma->rx_napi); in prestera_rxtx_handle_event()
614 struct prestera_sdma *sdma = &sw->rxtx->sdma; in prestera_sdma_switch_init() local
627 sdma->dma_mask = dma_get_mask(dev); in prestera_sdma_switch_init()
628 sdma->map_addr = p.map_addr; in prestera_sdma_switch_init()
629 sdma->sw = sw; in prestera_sdma_switch_init()
631 sdma->desc_pool = dma_pool_create("desc_pool", dev, in prestera_sdma_switch_init()
634 if (!sdma->desc_pool) in prestera_sdma_switch_init()
637 err = prestera_sdma_rx_init(sdma); in prestera_sdma_switch_init()
643 err = prestera_sdma_tx_init(sdma); in prestera_sdma_switch_init()
651 sdma); in prestera_sdma_switch_init()
655 init_dummy_netdev(&sdma->napi_dev); in prestera_sdma_switch_init()
657 netif_napi_add(&sdma->napi_dev, &sdma->rx_napi, prestera_sdma_rx_poll, 64); in prestera_sdma_switch_init()
658 napi_enable(&sdma->rx_napi); in prestera_sdma_switch_init()
664 prestera_sdma_tx_fini(sdma); in prestera_sdma_switch_init()
666 prestera_sdma_rx_fini(sdma); in prestera_sdma_switch_init()
668 dma_pool_destroy(sdma->desc_pool); in prestera_sdma_switch_init()
674 struct prestera_sdma *sdma = &sw->rxtx->sdma; in prestera_sdma_switch_fini() local
676 napi_disable(&sdma->rx_napi); in prestera_sdma_switch_fini()
677 netif_napi_del(&sdma->rx_napi); in prestera_sdma_switch_fini()
680 prestera_sdma_tx_fini(sdma); in prestera_sdma_switch_fini()
681 prestera_sdma_rx_fini(sdma); in prestera_sdma_switch_fini()
682 dma_pool_destroy(sdma->desc_pool); in prestera_sdma_switch_fini()
685 static bool prestera_sdma_is_ready(struct prestera_sdma *sdma) in prestera_sdma_is_ready() argument
687 return !(prestera_read(sdma->sw, PRESTERA_SDMA_TX_QUEUE_START_REG) & 1); in prestera_sdma_is_ready()
690 static int prestera_sdma_tx_wait(struct prestera_sdma *sdma, in prestera_sdma_tx_wait() argument
696 if (prestera_sdma_is_ready(sdma)) in prestera_sdma_tx_wait()
705 static void prestera_sdma_tx_start(struct prestera_sdma *sdma) in prestera_sdma_tx_start() argument
707 prestera_write(sdma->sw, PRESTERA_SDMA_TX_QUEUE_START_REG, 1); in prestera_sdma_tx_start()
708 schedule_work(&sdma->tx_work); in prestera_sdma_tx_start()
711 static netdev_tx_t prestera_sdma_xmit(struct prestera_sdma *sdma, in prestera_sdma_xmit() argument
714 struct device *dma_dev = sdma->sw->dev->dev; in prestera_sdma_xmit()
720 spin_lock(&sdma->tx_lock); in prestera_sdma_xmit()
722 tx_ring = &sdma->tx_ring; in prestera_sdma_xmit()
726 schedule_work(&sdma->tx_work); in prestera_sdma_xmit()
733 err = prestera_sdma_tx_buf_map(sdma, buf, skb); in prestera_sdma_xmit()
737 prestera_sdma_tx_desc_set_buf(sdma, buf->desc, buf->buf_dma, skb->len); in prestera_sdma_xmit()
747 err = prestera_sdma_tx_wait(sdma, tx_ring); in prestera_sdma_xmit()
756 prestera_sdma_tx_start(sdma); in prestera_sdma_xmit()
761 prestera_sdma_tx_buf_unmap(sdma, buf); in prestera_sdma_xmit()
767 spin_unlock(&sdma->tx_lock); in prestera_sdma_xmit()
819 return prestera_sdma_xmit(&port->sw->rxtx->sdma, skb); in prestera_rxtx_xmit()