Lines Matching full:rx

79 static void sparx5_fdma_rx_add_dcb(struct sparx5_rx *rx,  in sparx5_fdma_rx_add_dcb()  argument
93 rx->last_entry->nextptr = nextptr; in sparx5_fdma_rx_add_dcb()
94 rx->last_entry = dcb; in sparx5_fdma_rx_add_dcb()
113 static void sparx5_fdma_rx_activate(struct sparx5 *sparx5, struct sparx5_rx *rx) in sparx5_fdma_rx_activate() argument
116 spx5_wr(((u64)rx->dma) & GENMASK(31, 0), sparx5, in sparx5_fdma_rx_activate()
117 FDMA_DCB_LLP(rx->channel_id)); in sparx5_fdma_rx_activate()
118 spx5_wr(((u64)rx->dma) >> 32, sparx5, FDMA_DCB_LLP1(rx->channel_id)); in sparx5_fdma_rx_activate()
120 /* Set the number of RX DBs to be used, and DB end-of-frame interrupt */ in sparx5_fdma_rx_activate()
124 sparx5, FDMA_CH_CFG(rx->channel_id)); in sparx5_fdma_rx_activate()
126 /* Set the RX Watermark to max */ in sparx5_fdma_rx_activate()
131 /* Start RX fdma */ in sparx5_fdma_rx_activate()
135 /* Enable RX channel DB interrupt */ in sparx5_fdma_rx_activate()
136 spx5_rmw(BIT(rx->channel_id), in sparx5_fdma_rx_activate()
137 BIT(rx->channel_id) & FDMA_INTR_DB_ENA_INTR_DB_ENA, in sparx5_fdma_rx_activate()
140 /* Activate the RX channel */ in sparx5_fdma_rx_activate()
141 spx5_wr(BIT(rx->channel_id), sparx5, FDMA_CH_ACTIVATE); in sparx5_fdma_rx_activate()
144 static void sparx5_fdma_rx_deactivate(struct sparx5 *sparx5, struct sparx5_rx *rx) in sparx5_fdma_rx_deactivate() argument
146 /* Dectivate the RX channel */ in sparx5_fdma_rx_deactivate()
147 spx5_rmw(0, BIT(rx->channel_id) & FDMA_CH_ACTIVATE_CH_ACTIVATE, in sparx5_fdma_rx_deactivate()
150 /* Disable RX channel DB interrupt */ in sparx5_fdma_rx_deactivate()
151 spx5_rmw(0, BIT(rx->channel_id) & FDMA_INTR_DB_ENA_INTR_DB_ENA, in sparx5_fdma_rx_deactivate()
154 /* Stop RX fdma */ in sparx5_fdma_rx_deactivate()
187 static void sparx5_fdma_rx_reload(struct sparx5 *sparx5, struct sparx5_rx *rx) in sparx5_fdma_rx_reload() argument
189 /* Reload the RX channel */ in sparx5_fdma_rx_reload()
190 spx5_wr(BIT(rx->channel_id), sparx5, FDMA_CH_RELOAD); in sparx5_fdma_rx_reload()
199 static struct sk_buff *sparx5_fdma_rx_alloc_skb(struct sparx5_rx *rx) in sparx5_fdma_rx_alloc_skb() argument
201 return __netdev_alloc_skb(rx->ndev, FDMA_XTR_BUFFER_SIZE, in sparx5_fdma_rx_alloc_skb()
205 static bool sparx5_fdma_rx_get_frame(struct sparx5 *sparx5, struct sparx5_rx *rx) in sparx5_fdma_rx_get_frame() argument
216 db_hw = &rx->dcb_entries[rx->dcb_index].db[rx->db_index]; in sparx5_fdma_rx_get_frame()
219 skb = rx->skb[rx->dcb_index][rx->db_index]; in sparx5_fdma_rx_get_frame()
221 new_skb = sparx5_fdma_rx_alloc_skb(rx); in sparx5_fdma_rx_get_frame()
226 rx->skb[rx->dcb_index][rx->db_index] = new_skb; in sparx5_fdma_rx_get_frame()
253 rx->packets++; in sparx5_fdma_rx_get_frame()
260 struct sparx5_rx *rx = container_of(napi, struct sparx5_rx, napi); in sparx5_fdma_napi_callback() local
261 struct sparx5 *sparx5 = container_of(rx, struct sparx5, rx); in sparx5_fdma_napi_callback()
264 while (counter < weight && sparx5_fdma_rx_get_frame(sparx5, rx)) { in sparx5_fdma_napi_callback()
267 rx->db_index++; in sparx5_fdma_napi_callback()
270 if (rx->db_index != FDMA_RX_DCB_MAX_DBS) in sparx5_fdma_napi_callback()
275 rx->db_index = 0; in sparx5_fdma_napi_callback()
276 old_dcb = &rx->dcb_entries[rx->dcb_index]; in sparx5_fdma_napi_callback()
277 rx->dcb_index++; in sparx5_fdma_napi_callback()
278 rx->dcb_index &= FDMA_DCB_MAX - 1; in sparx5_fdma_napi_callback()
279 sparx5_fdma_rx_add_dcb(rx, old_dcb, in sparx5_fdma_napi_callback()
280 rx->dma + in sparx5_fdma_napi_callback()
282 (unsigned long)rx->dcb_entries)); in sparx5_fdma_napi_callback()
285 napi_complete_done(&rx->napi, counter); in sparx5_fdma_napi_callback()
286 spx5_rmw(BIT(rx->channel_id), in sparx5_fdma_napi_callback()
287 BIT(rx->channel_id) & FDMA_INTR_DB_ENA_INTR_DB_ENA, in sparx5_fdma_napi_callback()
291 sparx5_fdma_rx_reload(sparx5, rx); in sparx5_fdma_napi_callback()
346 struct sparx5_rx *rx = &sparx5->rx; in sparx5_fdma_rx_alloc() local
353 rx->dcb_entries = devm_kzalloc(sparx5->dev, size, GFP_KERNEL); in sparx5_fdma_rx_alloc()
354 if (!rx->dcb_entries) in sparx5_fdma_rx_alloc()
356 rx->dma = virt_to_phys(rx->dcb_entries); in sparx5_fdma_rx_alloc()
357 rx->last_entry = rx->dcb_entries; in sparx5_fdma_rx_alloc()
358 rx->db_index = 0; in sparx5_fdma_rx_alloc()
359 rx->dcb_index = 0; in sparx5_fdma_rx_alloc()
362 dcb = &rx->dcb_entries[idx]; in sparx5_fdma_rx_alloc()
373 skb = sparx5_fdma_rx_alloc_skb(rx); in sparx5_fdma_rx_alloc()
380 rx->skb[idx][jdx] = skb; in sparx5_fdma_rx_alloc()
382 sparx5_fdma_rx_add_dcb(rx, dcb, rx->dma + sizeof(*dcb) * idx); in sparx5_fdma_rx_alloc()
384 netif_napi_add_weight(rx->ndev, &rx->napi, sparx5_fdma_napi_callback, in sparx5_fdma_rx_alloc()
386 napi_enable(&rx->napi); in sparx5_fdma_rx_alloc()
387 sparx5_fdma_rx_activate(sparx5, rx); in sparx5_fdma_rx_alloc()
440 struct sparx5_rx *rx, int channel) in sparx5_fdma_rx_init() argument
444 rx->channel_id = channel; in sparx5_fdma_rx_init()
450 rx->ndev = port->ndev; in sparx5_fdma_rx_init()
473 napi_schedule(&sparx5->rx.napi); in sparx5_fdma_handler()
565 sparx5_fdma_rx_init(sparx5, &sparx5->rx, FDMA_XTR_CHANNEL); in sparx5_fdma_start()
569 dev_err(sparx5->dev, "Could not allocate RX buffers: %d\n", err); in sparx5_fdma_start()
589 napi_disable(&sparx5->rx.napi); in sparx5_fdma_stop()
591 sparx5_fdma_rx_deactivate(sparx5, &sparx5->rx); in sparx5_fdma_stop()
593 /* Wait for the RX channel to stop */ in sparx5_fdma_stop()