Home
last modified time | relevance | path

Searched refs:tx_desc (Results 1 – 25 of 99) sorted by relevance

1234

/Linux-v4.19/drivers/crypto/ccp/
Dccp-dmaengine.c109 if (!async_tx_test_ack(&desc->tx_desc)) in ccp_cleanup_desc_resources()
146 desc->tx_desc.cookie, cmd); in ccp_issue_next_cmd()
153 ret, desc->tx_desc.cookie, cmd); in ccp_issue_next_cmd()
168 __func__, desc->tx_desc.cookie, cmd); in ccp_free_active_cmd()
191 struct dma_async_tx_descriptor *tx_desc; in ccp_handle_active_desc() local
210 tx_desc = &desc->tx_desc; in ccp_handle_active_desc()
212 tx_desc = NULL; in ccp_handle_active_desc()
223 desc->tx_desc.cookie, desc->status); in ccp_handle_active_desc()
225 dma_cookie_complete(tx_desc); in ccp_handle_active_desc()
226 dma_descriptor_unmap(tx_desc); in ccp_handle_active_desc()
[all …]
/Linux-v4.19/drivers/infiniband/ulp/isert/
Dib_isert.c864 isert_login_post_send(struct isert_conn *isert_conn, struct iser_tx_desc *tx_desc) in isert_login_post_send() argument
870 ib_dma_sync_single_for_device(ib_dev, tx_desc->dma_addr, in isert_login_post_send()
873 tx_desc->tx_cqe.done = isert_login_send_done; in isert_login_post_send()
876 send_wr.wr_cqe = &tx_desc->tx_cqe; in isert_login_post_send()
877 send_wr.sg_list = tx_desc->tx_sg; in isert_login_post_send()
878 send_wr.num_sge = tx_desc->num_sge; in isert_login_post_send()
891 struct iser_tx_desc *tx_desc) in __isert_create_send_desc() argument
894 memset(&tx_desc->iser_header, 0, sizeof(struct iser_ctrl)); in __isert_create_send_desc()
895 tx_desc->iser_header.flags = ISCSI_CTRL; in __isert_create_send_desc()
897 tx_desc->num_sge = 1; in __isert_create_send_desc()
[all …]
/Linux-v4.19/drivers/net/ethernet/mellanox/mlx4/
Den_tx.c238 struct mlx4_en_tx_desc *tx_desc = ring->buf + (index << LOG_TXBB_SIZE); in mlx4_en_stamp_wqe() local
241 __be32 *ptr = (__be32 *)tx_desc; in mlx4_en_stamp_wqe()
245 if (likely((void *)tx_desc + in mlx4_en_stamp_wqe()
274 struct mlx4_en_tx_desc *tx_desc = ring->buf + (index << LOG_TXBB_SIZE); in mlx4_en_free_tx_desc() local
275 struct mlx4_wqe_data_seg *data = (void *) tx_desc + tx_info->data_offset; in mlx4_en_free_tx_desc()
305 if (likely((void *)tx_desc + in mlx4_en_free_tx_desc()
641 static void build_inline_wqe(struct mlx4_en_tx_desc *tx_desc, in build_inline_wqe() argument
646 struct mlx4_wqe_inline_seg *inl = &tx_desc->inl; in build_inline_wqe()
727 struct mlx4_en_tx_desc *tx_desc, in mlx4_en_tx_write_desc() argument
733 tx_desc->ctrl.qpn_vlan = qpn_vlan; in mlx4_en_tx_write_desc()
[all …]
/Linux-v4.19/drivers/infiniband/ulp/iser/
Diser_initiator.c164 struct iser_tx_desc *tx_desc) in iser_create_send_desc() argument
169 tx_desc->dma_addr, ISER_HEADERS_LEN, DMA_TO_DEVICE); in iser_create_send_desc()
171 memset(&tx_desc->iser_header, 0, sizeof(struct iser_ctrl)); in iser_create_send_desc()
172 tx_desc->iser_header.flags = ISER_VER; in iser_create_send_desc()
173 tx_desc->num_sge = 1; in iser_create_send_desc()
372 struct iser_tx_desc *tx_desc = &iser_task->desc; in iser_send_command() local
378 tx_desc->type = ISCSI_TX_SCSI_COMMAND; in iser_send_command()
379 tx_desc->cqe.done = iser_cmd_comp; in iser_send_command()
380 iser_create_send_desc(iser_conn, tx_desc); in iser_send_command()
420 err = iser_post_send(&iser_conn->ib_conn, tx_desc, in iser_send_command()
[all …]
Discsi_iser.h640 int iser_post_send(struct ib_conn *ib_conn, struct iser_tx_desc *tx_desc,
653 struct iser_tx_desc *tx_desc);
678 iser_tx_next_wr(struct iser_tx_desc *tx_desc) in iser_tx_next_wr() argument
680 struct ib_send_wr *cur_wr = &tx_desc->wrs[tx_desc->wr_idx].send; in iser_tx_next_wr()
683 if (tx_desc->wr_idx) { in iser_tx_next_wr()
684 last_wr = &tx_desc->wrs[tx_desc->wr_idx - 1].send; in iser_tx_next_wr()
687 tx_desc->wr_idx++; in iser_tx_next_wr()
Discsi_iser.c184 struct iser_tx_desc *tx_desc) in iser_initialize_task_headers() argument
201 dma_addr = ib_dma_map_single(device->ib_device, (void *)tx_desc, in iser_initialize_task_headers()
208 tx_desc->wr_idx = 0; in iser_initialize_task_headers()
209 tx_desc->mapped = true; in iser_initialize_task_headers()
210 tx_desc->dma_addr = dma_addr; in iser_initialize_task_headers()
211 tx_desc->tx_sg[0].addr = tx_desc->dma_addr; in iser_initialize_task_headers()
212 tx_desc->tx_sg[0].length = ISER_HEADERS_LEN; in iser_initialize_task_headers()
213 tx_desc->tx_sg[0].lkey = device->pd->local_dma_lkey; in iser_initialize_task_headers()
369 struct iser_tx_desc *tx_desc = &iser_task->desc; in iscsi_iser_cleanup_task() local
377 if (likely(tx_desc->mapped)) { in iscsi_iser_cleanup_task()
[all …]
Diser_memory.c389 struct iser_tx_desc *tx_desc = &iser_task->desc; in iser_reg_sig_mr() local
390 struct ib_sig_attrs *sig_attrs = &tx_desc->sig_attrs; in iser_reg_sig_mr()
404 iser_inv_rkey(iser_tx_next_wr(tx_desc), mr, cqe); in iser_reg_sig_mr()
408 wr = container_of(iser_tx_next_wr(tx_desc), struct ib_sig_handover_wr, in iser_reg_sig_mr()
443 struct iser_tx_desc *tx_desc = &iser_task->desc; in iser_fast_reg_mr() local
450 iser_inv_rkey(iser_tx_next_wr(tx_desc), mr, cqe); in iser_fast_reg_mr()
461 wr = container_of(iser_tx_next_wr(tx_desc), struct ib_reg_wr, wr); in iser_fast_reg_mr()
/Linux-v4.19/drivers/net/ethernet/intel/fm10k/
Dfm10k_main.c755 struct fm10k_tx_desc *tx_desc; in fm10k_tso() local
784 tx_desc = FM10K_TX_DESC(tx_ring, tx_ring->next_to_use); in fm10k_tso()
785 tx_desc->hdrlen = hdrlen; in fm10k_tso()
786 tx_desc->mss = cpu_to_le16(skb_shinfo(skb)->gso_size); in fm10k_tso()
802 struct fm10k_tx_desc *tx_desc; in fm10k_tx_csum() local
878 tx_desc = FM10K_TX_DESC(tx_ring, tx_ring->next_to_use); in fm10k_tx_csum()
879 tx_desc->hdrlen = 0; in fm10k_tx_csum()
880 tx_desc->mss = 0; in fm10k_tx_csum()
901 struct fm10k_tx_desc *tx_desc, u16 i, in fm10k_tx_desc_push() argument
909 tx_desc->buffer_addr = cpu_to_le64(dma); in fm10k_tx_desc_push()
[all …]
/Linux-v4.19/drivers/staging/rtl8712/
Drtl8712_xmit.h52 #define tx_cmd tx_desc
92 struct tx_desc { struct
106 struct tx_desc txdesc; argument
Drtl8712_xmit.c263 struct tx_desc *ptx_desc = (struct tx_desc *)pxmitbuf->pbuf; in r8712_construct_txaggr_cmd_desc()
301 struct tx_desc *ptx_desc = (struct tx_desc *)pxmitbuf->pbuf; in r8712_append_mpdu_unit()
365 (((struct tx_desc *)pxmitbuf->pbuf)->txdw0 & 0x0000ffff); in r8712_xmitframe_aggr_next()
373 (((struct tx_desc *)pxmitbuf->pbuf)->txdw0 & 0x0000ffff); in r8712_xmitframe_aggr_next()
381 struct tx_desc *ptxdesc = pxmitbuf->pbuf; in r8712_dump_aggr_xframe()
428 struct tx_desc *ptxdesc = (struct tx_desc *)pmem; in update_txdesc()
436 struct tx_desc txdesc_mp; in update_txdesc()
438 memcpy(&txdesc_mp, ptxdesc, sizeof(struct tx_desc)); in update_txdesc()
439 memset(ptxdesc, 0, sizeof(struct tx_desc)); in update_txdesc()
560 struct tx_desc *ptxdesc_mp; in update_txdesc()
/Linux-v4.19/drivers/staging/mt7621-dma/
Dmtk-hsdma.c240 struct hsdma_desc *tx_desc; in hsdma_dump_desc() local
248 tx_desc = &chan->tx_ring[i]; in hsdma_dump_desc()
253 i, tx_desc->addr0, tx_desc->flags, \ in hsdma_dump_desc()
254 tx_desc->addr1, rx_desc->addr0, rx_desc->flags); in hsdma_dump_desc()
326 struct hsdma_desc *tx_desc, *rx_desc; in mtk_hsdma_start_transfer() local
344 tx_desc->addr1 = src; in mtk_hsdma_start_transfer()
345 tx_desc->flags |= HSDMA_DESC_PLEN1(tlen); in mtk_hsdma_start_transfer()
347 tx_desc = &chan->tx_ring[chan->tx_idx]; in mtk_hsdma_start_transfer()
348 tx_desc->addr0 = src; in mtk_hsdma_start_transfer()
349 tx_desc->flags = HSDMA_DESC_PLEN0(tlen); in mtk_hsdma_start_transfer()
[all …]
/Linux-v4.19/drivers/net/ethernet/intel/ice/
Dice_txrx.c111 struct ice_tx_desc *tx_desc; in ice_clean_tx_irq() local
115 tx_desc = ICE_TX_DESC(tx_ring, i); in ice_clean_tx_irq()
153 while (tx_desc != eop_desc) { in ice_clean_tx_irq()
155 tx_desc++; in ice_clean_tx_irq()
160 tx_desc = ICE_TX_DESC(tx_ring, 0); in ice_clean_tx_irq()
175 tx_desc++; in ice_clean_tx_irq()
180 tx_desc = ICE_TX_DESC(tx_ring, 0); in ice_clean_tx_irq()
183 prefetch(tx_desc); in ice_clean_tx_irq()
1178 struct ice_tx_desc *tx_desc; in ice_tx_map() local
1191 tx_desc = ICE_TX_DESC(tx_ring, i); in ice_tx_map()
[all …]
/Linux-v4.19/drivers/net/ethernet/seeq/
Dsgiseeq.c95 struct sgiseeq_tx_desc *tx_desc; member
191 sp->tx_desc[i].tdma.cntinfo = TCNTINFO_INIT; in seeq_init_ring()
192 dma_sync_desc_dev(dev, &sp->tx_desc[i]); in seeq_init_ring()
225 if (sp->tx_desc[i].skb) { in seeq_purge_ring()
226 dev_kfree_skb(sp->tx_desc[i].skb); in seeq_purge_ring()
227 sp->tx_desc[i].skb = NULL; in seeq_purge_ring()
248 struct sgiseeq_tx_desc *t = gpriv->tx_desc; in sgiseeq_dump_rings()
307 hregs->tx_ndptr = VIRT_TO_DMA(sp, sp->tx_desc); in init_seeq()
436 td = &sp->tx_desc[i]; in kick_tx()
441 td = &sp->tx_desc[i]; in kick_tx()
[all …]
/Linux-v4.19/drivers/net/ethernet/marvell/mvpp2/
Dmvpp2_main.c150 struct mvpp2_tx_desc *tx_desc) in mvpp2_txdesc_dma_addr_get() argument
153 return le32_to_cpu(tx_desc->pp21.buf_dma_addr); in mvpp2_txdesc_dma_addr_get()
155 return le64_to_cpu(tx_desc->pp22.buf_dma_addr_ptp) & in mvpp2_txdesc_dma_addr_get()
160 struct mvpp2_tx_desc *tx_desc, in mvpp2_txdesc_dma_addr_set() argument
169 tx_desc->pp21.buf_dma_addr = cpu_to_le32(addr); in mvpp2_txdesc_dma_addr_set()
170 tx_desc->pp21.packet_offset = offset; in mvpp2_txdesc_dma_addr_set()
174 tx_desc->pp22.buf_dma_addr_ptp &= ~cpu_to_le64(MVPP2_DESC_DMA_MASK); in mvpp2_txdesc_dma_addr_set()
175 tx_desc->pp22.buf_dma_addr_ptp |= val; in mvpp2_txdesc_dma_addr_set()
176 tx_desc->pp22.packet_offset = offset; in mvpp2_txdesc_dma_addr_set()
181 struct mvpp2_tx_desc *tx_desc) in mvpp2_txdesc_size_get() argument
[all …]
/Linux-v4.19/drivers/spi/
Dspi-pxa2xx-dma.c146 struct dma_async_tx_descriptor *tx_desc, *rx_desc; in pxa2xx_spi_dma_prepare() local
149 tx_desc = pxa2xx_spi_dma_prepare_one(drv_data, DMA_MEM_TO_DEV, xfer); in pxa2xx_spi_dma_prepare()
150 if (!tx_desc) { in pxa2xx_spi_dma_prepare()
170 dmaengine_submit(tx_desc); in pxa2xx_spi_dma_prepare()
/Linux-v4.19/drivers/net/ethernet/intel/i40evf/
Di40e_txrx.c196 struct i40e_tx_desc *tx_desc; in i40e_clean_tx_irq() local
201 tx_desc = I40E_TX_DESC(tx_ring, i); in i40e_clean_tx_irq()
214 i40e_trace(clean_tx_irq, tx_ring, tx_desc, tx_buf); in i40e_clean_tx_irq()
241 while (tx_desc != eop_desc) { in i40e_clean_tx_irq()
243 tx_ring, tx_desc, tx_buf); in i40e_clean_tx_irq()
246 tx_desc++; in i40e_clean_tx_irq()
251 tx_desc = I40E_TX_DESC(tx_ring, 0); in i40e_clean_tx_irq()
266 tx_desc++; in i40e_clean_tx_irq()
271 tx_desc = I40E_TX_DESC(tx_ring, 0); in i40e_clean_tx_irq()
274 prefetch(tx_desc); in i40e_clean_tx_irq()
[all …]
/Linux-v4.19/drivers/net/ethernet/oki-semi/pch_gbe/
Dpch_gbe_main.c1111 struct pch_gbe_tx_desc *tx_desc; in pch_gbe_tx_queue() local
1196 tx_desc = PCH_GBE_TX_DESC(*tx_ring, ring_num); in pch_gbe_tx_queue()
1197 tx_desc->buffer_addr = (buffer_info->dma); in pch_gbe_tx_queue()
1198 tx_desc->length = (tmp_skb->len); in pch_gbe_tx_queue()
1199 tx_desc->tx_words_eob = ((tmp_skb->len + 3)); in pch_gbe_tx_queue()
1200 tx_desc->tx_frame_ctrl = (frame_ctrl); in pch_gbe_tx_queue()
1201 tx_desc->gbec_status = (DSC_INIT16); in pch_gbe_tx_queue()
1469 struct pch_gbe_tx_desc *tx_desc; in pch_gbe_alloc_tx_buffers() local
1479 tx_desc = PCH_GBE_TX_DESC(*tx_ring, i); in pch_gbe_alloc_tx_buffers()
1480 tx_desc->gbec_status = (DSC_INIT16); in pch_gbe_alloc_tx_buffers()
[all …]
/Linux-v4.19/drivers/net/ethernet/hisilicon/
Dhip04_eth.c134 struct tx_desc { struct
162 struct tx_desc *tx_desc; argument
371 struct tx_desc *desc; in hip04_tx_reclaim()
381 desc = &priv->tx_desc[tx_tail]; in hip04_tx_reclaim()
430 struct tx_desc *desc = &priv->tx_desc[tx_head]; in hip04_mac_start_xmit()
451 phys = priv->tx_desc_dma + tx_head * sizeof(struct tx_desc); in hip04_mac_start_xmit()
773 priv->tx_desc = dma_alloc_coherent(d, in hip04_alloc_ring()
774 TX_DESC_NUM * sizeof(struct tx_desc), in hip04_alloc_ring()
776 if (!priv->tx_desc) in hip04_alloc_ring()
803 dma_free_coherent(d, TX_DESC_NUM * sizeof(struct tx_desc), in hip04_free_ring()
[all …]
/Linux-v4.19/drivers/net/ethernet/intel/i40e/
Di40e_txrx.c102 struct i40e_tx_desc *tx_desc; in i40e_program_fdir_filter() local
137 tx_desc = I40E_TX_DESC(tx_ring, i); in i40e_program_fdir_filter()
148 tx_desc->buffer_addr = cpu_to_le64(dma); in i40e_program_fdir_filter()
154 tx_desc->cmd_type_offset_bsz = in i40e_program_fdir_filter()
163 first->next_to_watch = tx_desc; in i40e_program_fdir_filter()
786 struct i40e_tx_desc *tx_desc; in i40e_clean_tx_irq() local
791 tx_desc = I40E_TX_DESC(tx_ring, i); in i40e_clean_tx_irq()
806 i40e_trace(clean_tx_irq, tx_ring, tx_desc, tx_buf); in i40e_clean_tx_irq()
808 if (tx_head == tx_desc) in i40e_clean_tx_irq()
835 while (tx_desc != eop_desc) { in i40e_clean_tx_irq()
[all …]
/Linux-v4.19/drivers/net/ethernet/
Dec_bhf.c112 struct tx_desc { struct
153 struct tx_desc *tx_descs;
179 static void ec_bhf_send_packet(struct ec_bhf_priv *priv, struct tx_desc *desc) in ec_bhf_send_packet()
187 static int ec_bhf_desc_sent(struct tx_desc *desc) in ec_bhf_desc_sent()
295 struct tx_desc *desc; in ec_bhf_start_xmit()
370 priv->tx_dcount = priv->tx_buf.len / sizeof(struct tx_desc); in ec_bhf_setup_tx_descs()
371 priv->tx_descs = (struct tx_desc *)priv->tx_buf.buf; in ec_bhf_setup_tx_descs()
418 FIFO_SIZE * sizeof(struct tx_desc)); in ec_bhf_open()
/Linux-v4.19/drivers/staging/rtl8188eu/hal/
Drtl8188eu_xmit.c31 static void rtl8188eu_cal_txdesc_chksum(struct tx_desc *ptxdesc) in rtl8188eu_cal_txdesc_chksum()
53 struct tx_desc *ptxdesc; in rtl8188e_fill_fake_txdesc()
56 ptxdesc = (struct tx_desc *)desc; in rtl8188e_fill_fake_txdesc()
88 static void fill_txdesc_sectype(struct pkt_attrib *pattrib, struct tx_desc *ptxdesc) in fill_txdesc_sectype()
169 struct tx_desc *ptxdesc = (struct tx_desc *)pmem; in update_txdesc()
175 ptxdesc = (struct tx_desc *)(pmem+PACKET_OFFSET_SZ); in update_txdesc()
180 memset(ptxdesc, 0, sizeof(struct tx_desc)); in update_txdesc()
/Linux-v4.19/drivers/net/wireless/ti/wl18xx/
Dtx.c82 struct wl1271_tx_hw_descr *tx_desc; in wl18xx_tx_complete_packet() local
95 tx_desc = (struct wl1271_tx_hw_descr *)skb->data; in wl18xx_tx_complete_packet()
112 tx_desc->hlid); in wl18xx_tx_complete_packet()
/Linux-v4.19/drivers/net/ethernet/marvell/
Dmvneta.c871 int tx_desc = txq->next_desc_to_proc; in mvneta_txq_next_desc_get() local
873 txq->next_desc_to_proc = MVNETA_QUEUE_NEXT_DESC(txq, tx_desc); in mvneta_txq_next_desc_get()
874 return txq->descs + tx_desc; in mvneta_txq_next_desc_get()
1760 struct mvneta_tx_desc *tx_desc = txq->descs + in mvneta_txq_bufs_free() local
1771 if (!IS_TSO_HEADER(txq, tx_desc->buf_phys_addr)) in mvneta_txq_bufs_free()
1773 tx_desc->buf_phys_addr, in mvneta_txq_bufs_free()
1774 tx_desc->data_size, DMA_TO_DEVICE); in mvneta_txq_bufs_free()
2227 struct mvneta_tx_desc *tx_desc; in mvneta_tso_put_hdr() local
2231 tx_desc = mvneta_txq_next_desc_get(txq); in mvneta_tso_put_hdr()
2232 tx_desc->data_size = hdr_len; in mvneta_tso_put_hdr()
[all …]
/Linux-v4.19/drivers/net/ethernet/dec/tulip/
Duli526x.c116 struct tx_desc { struct
119 struct tx_desc *next_tx_desc; argument
155 struct tx_desc *first_tx_desc;
156 struct tx_desc *tx_insert_ptr;
157 struct tx_desc *tx_remove_ptr;
337 …db->desc_pool_ptr = pci_alloc_consistent(pdev, sizeof(struct tx_desc) * DESC_ALL_CNT + 0x20, &db->… in uli526x_init_one()
345 db->first_tx_desc = (struct tx_desc *) db->desc_pool_ptr; in uli526x_init_one()
424 pci_free_consistent(pdev, sizeof(struct tx_desc) * DESC_ALL_CNT + 0x20, in uli526x_init_one()
444 pci_free_consistent(db->pdev, sizeof(struct tx_desc) * in uli526x_remove_one()
596 struct tx_desc *txptr; in uli526x_start_xmit()
[all …]
/Linux-v4.19/drivers/net/ethernet/intel/ixgbevf/
Dixgbevf_main.c270 union ixgbe_adv_tx_desc *tx_desc; in ixgbevf_clean_tx_irq() local
279 tx_desc = IXGBEVF_TX_DESC(tx_ring, i); in ixgbevf_clean_tx_irq()
319 while (tx_desc != eop_desc) { in ixgbevf_clean_tx_irq()
321 tx_desc++; in ixgbevf_clean_tx_irq()
326 tx_desc = IXGBEVF_TX_DESC(tx_ring, 0); in ixgbevf_clean_tx_irq()
341 tx_desc++; in ixgbevf_clean_tx_irq()
346 tx_desc = IXGBEVF_TX_DESC(tx_ring, 0); in ixgbevf_clean_tx_irq()
350 prefetch(tx_desc); in ixgbevf_clean_tx_irq()
979 union ixgbe_adv_tx_desc *tx_desc; in ixgbevf_xmit_xdp_ring() local
1030 tx_desc = IXGBEVF_TX_DESC(ring, i); in ixgbevf_xmit_xdp_ring()
[all …]

1234