Lines Matching refs:buf_info
36 struct ionic_buf_info *buf_info) in ionic_rx_page_alloc() argument
46 if (unlikely(!buf_info)) { in ionic_rx_page_alloc()
60 buf_info->dma_addr = dma_map_page(dev, page, 0, in ionic_rx_page_alloc()
62 if (unlikely(dma_mapping_error(dev, buf_info->dma_addr))) { in ionic_rx_page_alloc()
70 buf_info->page = page; in ionic_rx_page_alloc()
71 buf_info->page_offset = 0; in ionic_rx_page_alloc()
77 struct ionic_buf_info *buf_info) in ionic_rx_page_free() argument
82 if (unlikely(!buf_info)) { in ionic_rx_page_free()
88 if (!buf_info->page) in ionic_rx_page_free()
91 dma_unmap_page(dev, buf_info->dma_addr, IONIC_PAGE_SIZE, DMA_FROM_DEVICE); in ionic_rx_page_free()
92 __free_pages(buf_info->page, 0); in ionic_rx_page_free()
93 buf_info->page = NULL; in ionic_rx_page_free()
97 struct ionic_buf_info *buf_info, u32 used) in ionic_rx_buf_recycle() argument
102 if (page_is_pfmemalloc(buf_info->page)) in ionic_rx_buf_recycle()
106 if (page_to_nid(buf_info->page) != numa_mem_id()) in ionic_rx_buf_recycle()
110 buf_info->page_offset += size; in ionic_rx_buf_recycle()
111 if (buf_info->page_offset >= IONIC_PAGE_SIZE) in ionic_rx_buf_recycle()
114 get_page(buf_info->page); in ionic_rx_buf_recycle()
124 struct ionic_buf_info *buf_info; in ionic_rx_frags() local
134 buf_info = &desc_info->bufs[0]; in ionic_rx_frags()
137 prefetchw(buf_info->page); in ionic_rx_frags()
149 if (unlikely(!buf_info->page)) { in ionic_rx_frags()
154 frag_len = min_t(u16, len, IONIC_PAGE_SIZE - buf_info->page_offset); in ionic_rx_frags()
158 buf_info->dma_addr + buf_info->page_offset, in ionic_rx_frags()
162 buf_info->page, buf_info->page_offset, frag_len, in ionic_rx_frags()
165 if (!ionic_rx_buf_recycle(q, buf_info, frag_len)) { in ionic_rx_frags()
166 dma_unmap_page(dev, buf_info->dma_addr, in ionic_rx_frags()
168 buf_info->page = NULL; in ionic_rx_frags()
171 buf_info++; in ionic_rx_frags()
184 struct ionic_buf_info *buf_info; in ionic_rx_copybreak() local
192 buf_info = &desc_info->bufs[0]; in ionic_rx_copybreak()
203 if (unlikely(!buf_info->page)) { in ionic_rx_copybreak()
208 dma_sync_single_for_cpu(dev, buf_info->dma_addr + buf_info->page_offset, in ionic_rx_copybreak()
210 skb_copy_to_linear_data(skb, page_address(buf_info->page) + buf_info->page_offset, len); in ionic_rx_copybreak()
211 dma_sync_single_for_device(dev, buf_info->dma_addr + buf_info->page_offset, in ionic_rx_copybreak()
355 struct ionic_buf_info *buf_info; in ionic_rx_fill() local
370 buf_info = &desc_info->bufs[0]; in ionic_rx_fill()
372 if (!buf_info->page) { /* alloc a new buffer? */ in ionic_rx_fill()
373 if (unlikely(ionic_rx_page_alloc(q, buf_info))) { in ionic_rx_fill()
381 desc->addr = cpu_to_le64(buf_info->dma_addr + buf_info->page_offset); in ionic_rx_fill()
382 frag_len = min_t(u16, len, IONIC_PAGE_SIZE - buf_info->page_offset); in ionic_rx_fill()
385 buf_info++; in ionic_rx_fill()
392 if (!buf_info->page) { /* alloc a new sg buffer? */ in ionic_rx_fill()
393 if (unlikely(ionic_rx_page_alloc(q, buf_info))) { in ionic_rx_fill()
400 sg_elem->addr = cpu_to_le64(buf_info->dma_addr + buf_info->page_offset); in ionic_rx_fill()
401 frag_len = min_t(u16, remain_len, IONIC_PAGE_SIZE - buf_info->page_offset); in ionic_rx_fill()
404 buf_info++; in ionic_rx_fill()
428 struct ionic_buf_info *buf_info; in ionic_rx_empty() local
434 buf_info = &desc_info->bufs[j]; in ionic_rx_empty()
435 if (buf_info->page) in ionic_rx_empty()
436 ionic_rx_page_free(q, buf_info); in ionic_rx_empty()
637 struct ionic_buf_info *buf_info = desc_info->bufs; in ionic_tx_map_skb() local
650 buf_info->dma_addr = dma_addr; in ionic_tx_map_skb()
651 buf_info->len = skb_headlen(skb); in ionic_tx_map_skb()
652 buf_info++; in ionic_tx_map_skb()
662 buf_info->dma_addr = dma_addr; in ionic_tx_map_skb()
663 buf_info->len = skb_frag_size(frag); in ionic_tx_map_skb()
664 buf_info++; in ionic_tx_map_skb()
675 buf_info--; in ionic_tx_map_skb()
676 dma_unmap_page(dev, buf_info->dma_addr, in ionic_tx_map_skb()
677 buf_info->len, DMA_TO_DEVICE); in ionic_tx_map_skb()
679 dma_unmap_single(dev, buf_info->dma_addr, buf_info->len, DMA_TO_DEVICE); in ionic_tx_map_skb()
688 struct ionic_buf_info *buf_info = desc_info->bufs; in ionic_tx_clean() local
697 dma_unmap_single(dev, (dma_addr_t)buf_info->dma_addr, in ionic_tx_clean()
698 buf_info->len, DMA_TO_DEVICE); in ionic_tx_clean()
699 buf_info++; in ionic_tx_clean()
700 for (i = 1; i < desc_info->nbufs; i++, buf_info++) in ionic_tx_clean()
701 dma_unmap_page(dev, (dma_addr_t)buf_info->dma_addr, in ionic_tx_clean()
702 buf_info->len, DMA_TO_DEVICE); in ionic_tx_clean()
902 struct ionic_buf_info *buf_info; in ionic_tx_tso() local
924 buf_info = desc_info->bufs; in ionic_tx_tso()
974 frag_addr = buf_info->dma_addr; in ionic_tx_tso()
975 frag_rem = buf_info->len; in ionic_tx_tso()
976 buf_info++; in ionic_tx_tso()
1022 struct ionic_buf_info *buf_info = desc_info->bufs; in ionic_tx_calc_csum() local
1037 buf_info->dma_addr); in ionic_tx_calc_csum()
1039 desc->len = cpu_to_le16(buf_info->len); in ionic_tx_calc_csum()
1061 struct ionic_buf_info *buf_info = desc_info->bufs; in ionic_tx_calc_no_csum() local
1076 buf_info->dma_addr); in ionic_tx_calc_no_csum()
1078 desc->len = cpu_to_le16(buf_info->len); in ionic_tx_calc_no_csum()
1097 struct ionic_buf_info *buf_info = &desc_info->bufs[1]; in ionic_tx_skb_frags() local
1102 for (i = 0; i < skb_shinfo(skb)->nr_frags; i++, buf_info++, elem++) { in ionic_tx_skb_frags()
1103 elem->addr = cpu_to_le64(buf_info->dma_addr); in ionic_tx_skb_frags()
1104 elem->len = cpu_to_le16(buf_info->len); in ionic_tx_skb_frags()