Lines Matching refs:put_tx_ctx
826 struct nv_skb_map *get_tx_ctx, *put_tx_ctx; member
1945 np->put_tx_ctx = np->tx_skb; in nv_init_tx()
2077 …return (u32)(np->tx_ring_size - ((np->tx_ring_size + (np->put_tx_ctx - np->get_tx_ctx)) % np->tx_r… in nv_get_empty_tx_slots()
2234 np->put_tx_ctx->dma = dma_map_single(&np->pci_dev->dev, in nv_start_xmit()
2238 np->put_tx_ctx->dma))) { in nv_start_xmit()
2246 np->put_tx_ctx->dma_len = bcnt; in nv_start_xmit()
2247 np->put_tx_ctx->dma_single = 1; in nv_start_xmit()
2248 put_tx->buf = cpu_to_le32(np->put_tx_ctx->dma); in nv_start_xmit()
2256 if (unlikely(np->put_tx_ctx++ == np->last_tx_ctx)) in nv_start_xmit()
2257 np->put_tx_ctx = np->tx_skb; in nv_start_xmit()
2268 start_tx_ctx = tmp_tx_ctx = np->put_tx_ctx; in nv_start_xmit()
2271 np->put_tx_ctx->dma = skb_frag_dma_map( in nv_start_xmit()
2277 np->put_tx_ctx->dma))) { in nv_start_xmit()
2284 } while (tmp_tx_ctx != np->put_tx_ctx); in nv_start_xmit()
2286 np->put_tx_ctx = start_tx_ctx; in nv_start_xmit()
2293 np->put_tx_ctx->dma_len = bcnt; in nv_start_xmit()
2294 np->put_tx_ctx->dma_single = 0; in nv_start_xmit()
2295 put_tx->buf = cpu_to_le32(np->put_tx_ctx->dma); in nv_start_xmit()
2302 if (unlikely(np->put_tx_ctx++ == np->last_tx_ctx)) in nv_start_xmit()
2303 np->put_tx_ctx = np->tx_skb; in nv_start_xmit()
2312 if (unlikely(np->put_tx_ctx == np->tx_skb)) in nv_start_xmit()
2315 prev_tx_ctx = np->put_tx_ctx - 1; in nv_start_xmit()
2386 start_tx_ctx = np->put_tx_ctx; in nv_start_xmit_optimized()
2391 np->put_tx_ctx->dma = dma_map_single(&np->pci_dev->dev, in nv_start_xmit_optimized()
2395 np->put_tx_ctx->dma))) { in nv_start_xmit_optimized()
2403 np->put_tx_ctx->dma_len = bcnt; in nv_start_xmit_optimized()
2404 np->put_tx_ctx->dma_single = 1; in nv_start_xmit_optimized()
2405 put_tx->bufhigh = cpu_to_le32(dma_high(np->put_tx_ctx->dma)); in nv_start_xmit_optimized()
2406 put_tx->buflow = cpu_to_le32(dma_low(np->put_tx_ctx->dma)); in nv_start_xmit_optimized()
2414 if (unlikely(np->put_tx_ctx++ == np->last_tx_ctx)) in nv_start_xmit_optimized()
2415 np->put_tx_ctx = np->tx_skb; in nv_start_xmit_optimized()
2427 start_tx_ctx = tmp_tx_ctx = np->put_tx_ctx; in nv_start_xmit_optimized()
2428 np->put_tx_ctx->dma = skb_frag_dma_map( in nv_start_xmit_optimized()
2435 np->put_tx_ctx->dma))) { in nv_start_xmit_optimized()
2442 } while (tmp_tx_ctx != np->put_tx_ctx); in nv_start_xmit_optimized()
2444 np->put_tx_ctx = start_tx_ctx; in nv_start_xmit_optimized()
2450 np->put_tx_ctx->dma_len = bcnt; in nv_start_xmit_optimized()
2451 np->put_tx_ctx->dma_single = 0; in nv_start_xmit_optimized()
2452 put_tx->bufhigh = cpu_to_le32(dma_high(np->put_tx_ctx->dma)); in nv_start_xmit_optimized()
2453 put_tx->buflow = cpu_to_le32(dma_low(np->put_tx_ctx->dma)); in nv_start_xmit_optimized()
2460 if (unlikely(np->put_tx_ctx++ == np->last_tx_ctx)) in nv_start_xmit_optimized()
2461 np->put_tx_ctx = np->tx_skb; in nv_start_xmit_optimized()
2470 if (unlikely(np->put_tx_ctx == np->tx_skb)) in nv_start_xmit_optimized()
2473 prev_tx_ctx = np->put_tx_ctx - 1; in nv_start_xmit_optimized()
2509 start_tx_ctx->next_tx_ctx = np->put_tx_ctx; in nv_start_xmit_optimized()
2510 np->tx_end_flip = np->put_tx_ctx; in nv_start_xmit_optimized()