Lines Matching refs:rdata

1114 	struct xgbe_ring_data *rdata;  in xgbe_free_tx_data()  local
1125 rdata = XGBE_GET_DESC_DATA(ring, j); in xgbe_free_tx_data()
1126 desc_if->unmap_rdata(pdata, rdata); in xgbe_free_tx_data()
1137 struct xgbe_ring_data *rdata; in xgbe_free_rx_data() local
1148 rdata = XGBE_GET_DESC_DATA(ring, j); in xgbe_free_rx_data()
1149 desc_if->unmap_rdata(pdata, rdata); in xgbe_free_rx_data()
2308 struct xgbe_ring_data *rdata; in xgbe_rx_refresh() local
2311 rdata = XGBE_GET_DESC_DATA(ring, ring->dirty); in xgbe_rx_refresh()
2314 desc_if->unmap_rdata(pdata, rdata); in xgbe_rx_refresh()
2316 if (desc_if->map_rx_buffer(pdata, ring, rdata)) in xgbe_rx_refresh()
2319 hw_if->rx_desc_reset(pdata, rdata, ring->dirty); in xgbe_rx_refresh()
2329 rdata = XGBE_GET_DESC_DATA(ring, ring->dirty - 1); in xgbe_rx_refresh()
2331 lower_32_bits(rdata->rdesc_dma)); in xgbe_rx_refresh()
2336 struct xgbe_ring_data *rdata, in xgbe_create_skb() argument
2342 skb = napi_alloc_skb(napi, rdata->rx.hdr.dma_len); in xgbe_create_skb()
2349 dma_sync_single_range_for_cpu(pdata->dev, rdata->rx.hdr.dma_base, in xgbe_create_skb()
2350 rdata->rx.hdr.dma_off, in xgbe_create_skb()
2351 rdata->rx.hdr.dma_len, DMA_FROM_DEVICE); in xgbe_create_skb()
2353 packet = page_address(rdata->rx.hdr.pa.pages) + in xgbe_create_skb()
2354 rdata->rx.hdr.pa.pages_offset; in xgbe_create_skb()
2361 static unsigned int xgbe_rx_buf1_len(struct xgbe_ring_data *rdata, in xgbe_rx_buf1_len() argument
2369 if (rdata->rx.hdr_len) in xgbe_rx_buf1_len()
2370 return rdata->rx.hdr_len; in xgbe_rx_buf1_len()
2376 return rdata->rx.hdr.dma_len; in xgbe_rx_buf1_len()
2381 return min_t(unsigned int, rdata->rx.hdr.dma_len, rdata->rx.len); in xgbe_rx_buf1_len()
2384 static unsigned int xgbe_rx_buf2_len(struct xgbe_ring_data *rdata, in xgbe_rx_buf2_len() argument
2390 return rdata->rx.buf.dma_len; in xgbe_rx_buf2_len()
2395 return rdata->rx.len - len; in xgbe_rx_buf2_len()
2404 struct xgbe_ring_data *rdata; in xgbe_tx_poll() local
2427 rdata = XGBE_GET_DESC_DATA(ring, ring->dirty); in xgbe_tx_poll()
2428 rdesc = rdata->rdesc; in xgbe_tx_poll()
2441 tx_packets += rdata->tx.packets; in xgbe_tx_poll()
2442 tx_bytes += rdata->tx.bytes; in xgbe_tx_poll()
2446 desc_if->unmap_rdata(pdata, rdata); in xgbe_tx_poll()
2447 hw_if->tx_desc_reset(rdata); in xgbe_tx_poll()
2474 struct xgbe_ring_data *rdata; in xgbe_rx_poll() local
2496 rdata = XGBE_GET_DESC_DATA(ring, ring->cur); in xgbe_rx_poll()
2502 if (!received && rdata->state_saved) { in xgbe_rx_poll()
2503 skb = rdata->state.skb; in xgbe_rx_poll()
2504 error = rdata->state.error; in xgbe_rx_poll()
2505 len = rdata->state.len; in xgbe_rx_poll()
2514 rdata = XGBE_GET_DESC_DATA(ring, ring->cur); in xgbe_rx_poll()
2548 buf1_len = xgbe_rx_buf1_len(rdata, packet); in xgbe_rx_poll()
2550 buf2_len = xgbe_rx_buf2_len(rdata, packet, len); in xgbe_rx_poll()
2553 if (buf2_len > rdata->rx.buf.dma_len) { in xgbe_rx_poll()
2562 skb = xgbe_create_skb(pdata, napi, rdata, in xgbe_rx_poll()
2572 rdata->rx.buf.dma_base, in xgbe_rx_poll()
2573 rdata->rx.buf.dma_off, in xgbe_rx_poll()
2574 rdata->rx.buf.dma_len, in xgbe_rx_poll()
2578 rdata->rx.buf.pa.pages, in xgbe_rx_poll()
2579 rdata->rx.buf.pa.pages_offset, in xgbe_rx_poll()
2581 rdata->rx.buf.dma_len); in xgbe_rx_poll()
2582 rdata->rx.buf.pa.pages = NULL; in xgbe_rx_poll()
2657 rdata = XGBE_GET_DESC_DATA(ring, ring->cur); in xgbe_rx_poll()
2658 rdata->state_saved = 1; in xgbe_rx_poll()
2659 rdata->state.skb = skb; in xgbe_rx_poll()
2660 rdata->state.len = len; in xgbe_rx_poll()
2661 rdata->state.error = error; in xgbe_rx_poll()
2741 struct xgbe_ring_data *rdata; in xgbe_dump_tx_desc() local
2745 rdata = XGBE_GET_DESC_DATA(ring, idx); in xgbe_dump_tx_desc()
2746 rdesc = rdata->rdesc; in xgbe_dump_tx_desc()
2761 struct xgbe_ring_data *rdata; in xgbe_dump_rx_desc() local
2764 rdata = XGBE_GET_DESC_DATA(ring, idx); in xgbe_dump_rx_desc()
2765 rdesc = rdata->rdesc; in xgbe_dump_rx_desc()