Lines Matching refs:tx_buff

580 	memset(tx_pool->tx_buff, 0,  in reset_one_tx_pool()
625 kfree(tx_pool->tx_buff); in release_one_tx_pool()
656 tx_pool->tx_buff = kcalloc(num_entries, in init_one_tx_pool()
659 if (!tx_pool->tx_buff) in init_one_tx_pool()
1166 struct ibmvnic_tx_buff *tx_buff; in clean_one_tx_pool() local
1170 if (!tx_pool || !tx_pool->tx_buff) in clean_one_tx_pool()
1176 tx_buff = &tx_pool->tx_buff[i]; in clean_one_tx_pool()
1177 if (tx_buff && tx_buff->skb) { in clean_one_tx_pool()
1178 dev_kfree_skb_any(tx_buff->skb); in clean_one_tx_pool()
1179 tx_buff->skb = NULL; in clean_one_tx_pool()
1437 struct ibmvnic_tx_buff *tx_buff = NULL; in ibmvnic_xmit() local
1524 tx_buff = &tx_pool->tx_buff[index]; in ibmvnic_xmit()
1525 tx_buff->skb = skb; in ibmvnic_xmit()
1526 tx_buff->data_dma[0] = data_dma_addr; in ibmvnic_xmit()
1527 tx_buff->data_len[0] = skb->len; in ibmvnic_xmit()
1528 tx_buff->index = index; in ibmvnic_xmit()
1529 tx_buff->pool_index = queue_num; in ibmvnic_xmit()
1530 tx_buff->last_frag = true; in ibmvnic_xmit()
1577 build_hdr_descs_arr(tx_buff, &num_entries, *hdrs); in ibmvnic_xmit()
1579 tx_buff->num_entries = num_entries; in ibmvnic_xmit()
1580 tx_buff->indir_arr[0] = tx_crq; in ibmvnic_xmit()
1581 tx_buff->indir_dma = dma_map_single(dev, tx_buff->indir_arr, in ibmvnic_xmit()
1582 sizeof(tx_buff->indir_arr), in ibmvnic_xmit()
1584 if (dma_mapping_error(dev, tx_buff->indir_dma)) { in ibmvnic_xmit()
1586 tx_buff->skb = NULL; in ibmvnic_xmit()
1595 (u64)tx_buff->indir_dma, in ibmvnic_xmit()
1598 tx_buff->num_entries = num_entries; in ibmvnic_xmit()
1606 tx_buff->skb = NULL; in ibmvnic_xmit()
2773 txbuff = &tx_pool->tx_buff[index]; in ibmvnic_complete_tx()