Lines Matching refs:put_tx_ctx

826 	struct nv_skb_map *get_tx_ctx, *put_tx_ctx;  member
1963 np->put_tx_ctx = np->tx_skb; in nv_init_tx()
2095 …return (u32)(np->tx_ring_size - ((np->tx_ring_size + (np->put_tx_ctx - np->get_tx_ctx)) % np->tx_r… in nv_get_empty_tx_slots()
2258 np->put_tx_ctx->dma = dma_map_single(&np->pci_dev->dev, in nv_start_xmit()
2262 np->put_tx_ctx->dma))) { in nv_start_xmit()
2273 np->put_tx_ctx->dma_len = bcnt; in nv_start_xmit()
2274 np->put_tx_ctx->dma_single = 1; in nv_start_xmit()
2275 put_tx->buf = cpu_to_le32(np->put_tx_ctx->dma); in nv_start_xmit()
2283 if (unlikely(np->put_tx_ctx++ == np->last_tx_ctx)) in nv_start_xmit()
2284 np->put_tx_ctx = np->tx_skb; in nv_start_xmit()
2295 start_tx_ctx = tmp_tx_ctx = np->put_tx_ctx; in nv_start_xmit()
2298 np->put_tx_ctx->dma = skb_frag_dma_map( in nv_start_xmit()
2304 np->put_tx_ctx->dma))) { in nv_start_xmit()
2311 } while (tmp_tx_ctx != np->put_tx_ctx); in nv_start_xmit()
2313 np->put_tx_ctx = start_tx_ctx; in nv_start_xmit()
2323 np->put_tx_ctx->dma_len = bcnt; in nv_start_xmit()
2324 np->put_tx_ctx->dma_single = 0; in nv_start_xmit()
2325 put_tx->buf = cpu_to_le32(np->put_tx_ctx->dma); in nv_start_xmit()
2332 if (unlikely(np->put_tx_ctx++ == np->last_tx_ctx)) in nv_start_xmit()
2333 np->put_tx_ctx = np->tx_skb; in nv_start_xmit()
2342 if (unlikely(np->put_tx_ctx == np->tx_skb)) in nv_start_xmit()
2345 prev_tx_ctx = np->put_tx_ctx - 1; in nv_start_xmit()
2430 start_tx_ctx = np->put_tx_ctx; in nv_start_xmit_optimized()
2435 np->put_tx_ctx->dma = dma_map_single(&np->pci_dev->dev, in nv_start_xmit_optimized()
2439 np->put_tx_ctx->dma))) { in nv_start_xmit_optimized()
2450 np->put_tx_ctx->dma_len = bcnt; in nv_start_xmit_optimized()
2451 np->put_tx_ctx->dma_single = 1; in nv_start_xmit_optimized()
2452 put_tx->bufhigh = cpu_to_le32(dma_high(np->put_tx_ctx->dma)); in nv_start_xmit_optimized()
2453 put_tx->buflow = cpu_to_le32(dma_low(np->put_tx_ctx->dma)); in nv_start_xmit_optimized()
2461 if (unlikely(np->put_tx_ctx++ == np->last_tx_ctx)) in nv_start_xmit_optimized()
2462 np->put_tx_ctx = np->tx_skb; in nv_start_xmit_optimized()
2474 start_tx_ctx = tmp_tx_ctx = np->put_tx_ctx; in nv_start_xmit_optimized()
2475 np->put_tx_ctx->dma = skb_frag_dma_map( in nv_start_xmit_optimized()
2482 np->put_tx_ctx->dma))) { in nv_start_xmit_optimized()
2489 } while (tmp_tx_ctx != np->put_tx_ctx); in nv_start_xmit_optimized()
2491 np->put_tx_ctx = start_tx_ctx; in nv_start_xmit_optimized()
2500 np->put_tx_ctx->dma_len = bcnt; in nv_start_xmit_optimized()
2501 np->put_tx_ctx->dma_single = 0; in nv_start_xmit_optimized()
2502 put_tx->bufhigh = cpu_to_le32(dma_high(np->put_tx_ctx->dma)); in nv_start_xmit_optimized()
2503 put_tx->buflow = cpu_to_le32(dma_low(np->put_tx_ctx->dma)); in nv_start_xmit_optimized()
2510 if (unlikely(np->put_tx_ctx++ == np->last_tx_ctx)) in nv_start_xmit_optimized()
2511 np->put_tx_ctx = np->tx_skb; in nv_start_xmit_optimized()
2520 if (unlikely(np->put_tx_ctx == np->tx_skb)) in nv_start_xmit_optimized()
2523 prev_tx_ctx = np->put_tx_ctx - 1; in nv_start_xmit_optimized()
2559 start_tx_ctx->next_tx_ctx = np->put_tx_ctx; in nv_start_xmit_optimized()
2560 np->tx_end_flip = np->put_tx_ctx; in nv_start_xmit_optimized()