Lines Matching refs:tx_buff
625 memset(tx_pool->tx_buff, 0, in reset_one_tx_pool()
673 kfree(tx_pool->tx_buff); in release_one_tx_pool()
704 tx_pool->tx_buff = kcalloc(num_entries, in init_one_tx_pool()
707 if (!tx_pool->tx_buff) in init_one_tx_pool()
1244 struct ibmvnic_tx_buff *tx_buff; in clean_one_tx_pool() local
1248 if (!tx_pool || !tx_pool->tx_buff) in clean_one_tx_pool()
1254 tx_buff = &tx_pool->tx_buff[i]; in clean_one_tx_pool()
1255 if (tx_buff && tx_buff->skb) { in clean_one_tx_pool()
1256 dev_kfree_skb_any(tx_buff->skb); in clean_one_tx_pool()
1257 tx_buff->skb = NULL; in clean_one_tx_pool()
1513 struct ibmvnic_tx_buff *tx_buff = NULL; in ibmvnic_xmit() local
1599 tx_buff = &tx_pool->tx_buff[index]; in ibmvnic_xmit()
1600 tx_buff->skb = skb; in ibmvnic_xmit()
1601 tx_buff->data_dma[0] = data_dma_addr; in ibmvnic_xmit()
1602 tx_buff->data_len[0] = skb->len; in ibmvnic_xmit()
1603 tx_buff->index = index; in ibmvnic_xmit()
1604 tx_buff->pool_index = queue_num; in ibmvnic_xmit()
1605 tx_buff->last_frag = true; in ibmvnic_xmit()
1652 build_hdr_descs_arr(tx_buff, &num_entries, *hdrs); in ibmvnic_xmit()
1654 tx_buff->num_entries = num_entries; in ibmvnic_xmit()
1655 tx_buff->indir_arr[0] = tx_crq; in ibmvnic_xmit()
1656 tx_buff->indir_dma = dma_map_single(dev, tx_buff->indir_arr, in ibmvnic_xmit()
1657 sizeof(tx_buff->indir_arr), in ibmvnic_xmit()
1659 if (dma_mapping_error(dev, tx_buff->indir_dma)) { in ibmvnic_xmit()
1661 tx_buff->skb = NULL; in ibmvnic_xmit()
1670 (u64)tx_buff->indir_dma, in ibmvnic_xmit()
1672 dma_unmap_single(dev, tx_buff->indir_dma, in ibmvnic_xmit()
1673 sizeof(tx_buff->indir_arr), DMA_TO_DEVICE); in ibmvnic_xmit()
1675 tx_buff->num_entries = num_entries; in ibmvnic_xmit()
1683 tx_buff->skb = NULL; in ibmvnic_xmit()
3142 txbuff = &tx_pool->tx_buff[index]; in ibmvnic_complete_tx()