| /Linux-v5.10/drivers/infiniband/sw/rdmavt/ |
| D | trace_tx.h | 108 __field(int, send_flags) 129 __entry->send_flags = wqe->wr.send_flags; 138 __entry->send_flags, 170 __field(int, send_flags) 182 __entry->send_flags = wqe->wr.send_flags; 195 __entry->send_flags
|
| /Linux-v5.10/net/rds/ |
| D | ib_send.c | 273 if (send->s_wr.send_flags & IB_SEND_SIGNALED) in rds_ib_send_cqe_handler() 466 send->s_wr.send_flags |= IB_SEND_SIGNALED; in rds_ib_set_wr_signal_state() 501 int send_flags = 0; in rds_ib_xmit() local 617 send_flags = IB_SEND_FENCE; in rds_ib_xmit() 629 send->s_wr.send_flags = send_flags; in rds_ib_xmit() 673 send->s_wr.send_flags |= IB_SEND_SOLICITED; in rds_ib_xmit() 676 if (send->s_wr.send_flags & IB_SEND_SIGNALED) in rds_ib_xmit() 711 prev->s_wr.send_flags |= IB_SEND_SOLICITED; in rds_ib_xmit() 712 if (!(prev->s_wr.send_flags & IB_SEND_SIGNALED)) in rds_ib_xmit() 793 send->s_wr.send_flags = 0; in rds_ib_xmit_atomic() [all …]
|
| D | ib_frmr.c | 162 reg_wr.wr.send_flags = IB_SEND_SIGNALED; in rds_ib_post_reg_frmr() 291 s_wr->send_flags = IB_SEND_SIGNALED; in rds_ib_post_inv()
|
| /Linux-v5.10/drivers/infiniband/hw/mlx5/ |
| D | wr.c | 111 if (wr->send_flags & IB_SEND_IP_CSUM) in set_eth_seg() 328 if (wr->send_flags & MLX5_IB_SEND_UMR_FAIL_IF_FREE) in set_reg_umr_segment() 337 if (wr->send_flags & MLX5_IB_SEND_UMR_UPDATE_XLT) { in set_reg_umr_segment() 344 if (wr->send_flags & MLX5_IB_SEND_UMR_UPDATE_TRANSLATION) in set_reg_umr_segment() 346 if (wr->send_flags & MLX5_IB_SEND_UMR_UPDATE_PD_ACCESS) { in set_reg_umr_segment() 353 if (wr->send_flags & MLX5_IB_SEND_UMR_ENABLE_MR) in set_reg_umr_segment() 355 if (wr->send_flags & MLX5_IB_SEND_UMR_DISABLE_MR) in set_reg_umr_segment() 408 if (wr->send_flags & MLX5_IB_SEND_UMR_DISABLE_MR) in set_reg_mkey_segment() 427 if (wr->send_flags & MLX5_IB_SEND_UMR_UPDATE_TRANSLATION && in set_reg_mkey_segment() 877 if (unlikely(wr->wr.send_flags & IB_SEND_INLINE)) { in set_reg_wr() [all …]
|
| D | mr.c | 1094 wr.wr.send_flags = MLX5_IB_SEND_UMR_UPDATE_XLT; in mlx5_ib_update_xlt() 1096 wr.wr.send_flags |= MLX5_IB_SEND_UMR_FAIL_IF_FREE; in mlx5_ib_update_xlt() 1131 wr.wr.send_flags |= in mlx5_ib_update_xlt() 1137 wr.wr.send_flags |= in mlx5_ib_update_xlt() 1140 wr.wr.send_flags |= in mlx5_ib_update_xlt() 1480 umrwr.wr.send_flags = MLX5_IB_SEND_UMR_DISABLE_MR | in mlx5_mr_cache_invalidate() 1497 umrwr.wr.send_flags = MLX5_IB_SEND_UMR_FAIL_IF_FREE; in rereg_umr() 1505 umrwr.wr.send_flags |= MLX5_IB_SEND_UMR_UPDATE_PD_ACCESS; in rereg_umr()
|
| /Linux-v5.10/drivers/infiniband/hw/hfi1/ |
| D | uc.c | 138 if (!(wqe->wr.send_flags & RVT_SEND_COMPLETION_ONLY)) { in hfi1_make_uc_req() 176 if (wqe->wr.send_flags & IB_SEND_SOLICITED) in hfi1_make_uc_req() 204 if (wqe->wr.send_flags & IB_SEND_SOLICITED) in hfi1_make_uc_req() 235 if (wqe->wr.send_flags & IB_SEND_SOLICITED) in hfi1_make_uc_req() 260 if (wqe->wr.send_flags & IB_SEND_SOLICITED) in hfi1_make_uc_req()
|
| D | trace_tx.h | 792 __field(int, send_flags) 804 __entry->send_flags = wqe->wr.send_flags; 817 __entry->send_flags
|
| /Linux-v5.10/drivers/infiniband/hw/qib/ |
| D | qib_uc.c | 121 if (wqe->wr.send_flags & IB_SEND_SOLICITED) in qib_make_uc_req() 149 if (wqe->wr.send_flags & IB_SEND_SOLICITED) in qib_make_uc_req() 179 if (wqe->wr.send_flags & IB_SEND_SOLICITED) in qib_make_uc_req() 203 if (wqe->wr.send_flags & IB_SEND_SOLICITED) in qib_make_uc_req()
|
| D | qib_ud.c | 214 rvt_recv_cq(qp, &wc, swqe->wr.send_flags & IB_SEND_SOLICITED); in qib_ud_loopback() 361 if (wqe->wr.send_flags & IB_SEND_SOLICITED) in qib_make_ud_req()
|
| /Linux-v5.10/drivers/infiniband/ulp/iser/ |
| D | iser_memory.c | 227 inv_wr->send_flags = 0; in iser_inv_rkey() 271 wr->wr.send_flags = 0; in iser_reg_sig_mr() 317 wr->wr.send_flags = 0; in iser_fast_reg_mr()
|
| /Linux-v5.10/drivers/infiniband/sw/rxe/ |
| D | rxe_req.c | 30 if (wqe->wr.send_flags & IB_SEND_INLINE) { in retry_first_write_send() 157 if (unlikely((wqe->wr.send_flags & IB_SEND_FENCE) && in req_next_wqe() 388 solicited = (ibwr->send_flags & IB_SEND_SOLICITED) && in init_req_packet() 457 if (wqe->wr.send_flags & IB_SEND_INLINE) { in fill_packet() 628 if ((wqe->wr.send_flags & IB_SEND_SIGNALED) || in rxe_requester()
|
| D | rxe_verbs.c | 488 if (unlikely((ibwr->send_flags & IB_SEND_INLINE) && in validate_send_wr() 504 wr->send_flags = ibwr->send_flags; in init_send_wr() 570 if (unlikely(ibwr->send_flags & IB_SEND_INLINE)) { in init_send_wqe() 660 if (unlikely((wr->send_flags & IB_SEND_INLINE) && in rxe_post_send_kernel()
|
| /Linux-v5.10/drivers/media/rc/ |
| D | ati_remote.c | 260 int send_flags; member 393 ati_remote->send_flags |= SEND_FLAG_COMPLETE; in ati_remote_irq_out() 414 ati_remote->send_flags = SEND_FLAG_IN_PROGRESS; in ati_remote_sendpacket() 425 (ati_remote->send_flags & SEND_FLAG_COMPLETE)), in ati_remote_sendpacket()
|
| /Linux-v5.10/net/sunrpc/xprtrdma/ |
| D | frwr_ops.c | 402 frwr->fr_regwr.wr.send_flags = 0; in frwr_send() 518 last->send_flags = IB_SEND_SIGNALED; in frwr_unmap_sync() 623 last->send_flags = IB_SEND_SIGNALED; in frwr_unmap_async()
|
| /Linux-v5.10/include/uapi/rdma/ |
| D | rdma_user_rxe.h | 79 __u32 send_flags; member
|
| D | vmw_pvrdma-abi.h | 235 __u32 send_flags; /* wr flags */ member
|
| /Linux-v5.10/drivers/infiniband/ulp/rtrs/ |
| D | rtrs.c | 127 .send_flags = IB_SEND_SIGNALED, in rtrs_iu_post_send() 161 .wr.send_flags = flags, in rtrs_iu_post_rdma_write_imm() 194 .send_flags = flags, in rtrs_post_rdma_write_imm_empty()
|
| D | rtrs-srv.c | 281 wr->wr.send_flags = 0; in rdma_write_sg() 307 inv_wr.send_flags = 0; in rdma_write_sg() 319 rwr.wr.send_flags = 0; in rdma_write_sg() 342 imm_wr.send_flags = flags; in rdma_write_sg() 395 inv_wr.send_flags = 0; in send_io_resp_imm() 435 rwr.wr.send_flags = 0; in send_io_resp_imm() 459 imm_wr.send_flags = flags; in send_io_resp_imm() 832 rwr[mri].wr.send_flags = mri ? 0 : IB_SEND_SIGNALED; in process_info_req() 1146 .send_flags = IB_SEND_SIGNALED, in rtrs_srv_inv_rkey()
|
| /Linux-v5.10/drivers/infiniband/hw/mthca/ |
| D | mthca_qp.c | 1539 sqp->ud_header.bth.solicited_event = !!(wr->wr.send_flags & IB_SEND_SOLICITED); in build_mlx_header() 1670 ((wr->send_flags & IB_SEND_SIGNALED) ? in mthca_tavor_post_send() 1672 ((wr->send_flags & IB_SEND_SOLICITED) ? in mthca_tavor_post_send() 1787 ((wr->send_flags & IB_SEND_FENCE) ? in mthca_tavor_post_send() 1793 f0 = wr->send_flags & IB_SEND_FENCE ? in mthca_tavor_post_send() 1998 ((wr->send_flags & IB_SEND_SIGNALED) ? in mthca_arbel_post_send() 2000 ((wr->send_flags & IB_SEND_SOLICITED) ? in mthca_arbel_post_send() 2002 ((wr->send_flags & IB_SEND_IP_CSUM) ? in mthca_arbel_post_send() 2117 ((wr->send_flags & IB_SEND_FENCE) ? in mthca_arbel_post_send() 2123 f0 = wr->send_flags & IB_SEND_FENCE ? in mthca_arbel_post_send()
|
| /Linux-v5.10/drivers/net/ethernet/mellanox/mlx5/core/steering/ |
| D | dr_send.c | 17 unsigned int send_flags; member 240 wq_ctrl->fm_ce_se = (data_seg->send_flags) ? in dr_rdma_segments() 339 send_info->write.send_flags |= IB_SEND_SIGNALED; in dr_fill_data_segs() 348 send_info->read.send_flags = IB_SEND_SIGNALED; in dr_fill_data_segs() 350 send_info->read.send_flags = 0; in dr_fill_data_segs()
|
| /Linux-v5.10/drivers/infiniband/hw/bnxt_re/ |
| D | ib_verbs.c | 2228 if (wr->send_flags & IB_SEND_SOLICITED) in bnxt_re_build_qp1_send_v2() 2370 if (wr->send_flags & IB_SEND_SIGNALED) in bnxt_re_build_send_wqe() 2372 if (wr->send_flags & IB_SEND_FENCE) in bnxt_re_build_send_wqe() 2374 if (wr->send_flags & IB_SEND_SOLICITED) in bnxt_re_build_send_wqe() 2376 if (wr->send_flags & IB_SEND_INLINE) in bnxt_re_build_send_wqe() 2402 if (wr->send_flags & IB_SEND_SIGNALED) in bnxt_re_build_rdma_wqe() 2404 if (wr->send_flags & IB_SEND_FENCE) in bnxt_re_build_rdma_wqe() 2406 if (wr->send_flags & IB_SEND_SOLICITED) in bnxt_re_build_rdma_wqe() 2408 if (wr->send_flags & IB_SEND_INLINE) in bnxt_re_build_rdma_wqe() 2432 if (wr->send_flags & IB_SEND_SIGNALED) in bnxt_re_build_atomic_wqe() [all …]
|
| /Linux-v5.10/drivers/infiniband/hw/cxgb4/ |
| D | qp.c | 499 if (wr->send_flags & IB_SEND_SOLICITED) in build_rdma_send() 508 if (wr->send_flags & IB_SEND_SOLICITED) in build_rdma_send() 525 if (wr->send_flags & IB_SEND_INLINE) { in build_rdma_send() 576 if (wr->send_flags & IB_SEND_INLINE) { in build_rdma_write() 643 if (wr->next->send_flags & IB_SEND_INLINE) in build_rdma_write_cmpl() 691 bool send_signaled = (wr->next->send_flags & IB_SEND_SIGNALED) || in post_write_cmpl() 693 bool write_signaled = (wr->send_flags & IB_SEND_SIGNALED) || in post_write_cmpl() 1145 if (wr->send_flags & IB_SEND_SOLICITED) in c4iw_post_send() 1147 if (wr->send_flags & IB_SEND_SIGNALED || qhp->sq_sig_all) in c4iw_post_send() 1153 if (wr->send_flags & IB_SEND_FENCE) in c4iw_post_send() [all …]
|
| /Linux-v5.10/drivers/infiniband/hw/i40iw/ |
| D | i40iw_verbs.c | 2120 if ((ib_wr->send_flags & IB_SEND_SIGNALED) || iwqp->sig_all) in i40iw_post_send() 2122 if (ib_wr->send_flags & IB_SEND_FENCE) in i40iw_post_send() 2129 if (ib_wr->send_flags & IB_SEND_SOLICITED) in i40iw_post_send() 2134 if (ib_wr->send_flags & IB_SEND_SOLICITED) in i40iw_post_send() 2140 if (ib_wr->send_flags & IB_SEND_INLINE) { in i40iw_post_send() 2160 if (ib_wr->send_flags & IB_SEND_INLINE) { in i40iw_post_send() 2231 info.local_fence = ib_wr->send_flags & IB_SEND_FENCE; in i40iw_post_send() 2232 info.signaled = ib_wr->send_flags & IB_SEND_SIGNALED; in i40iw_post_send()
|
| /Linux-v5.10/drivers/infiniband/sw/siw/ |
| D | siw_verbs.c | 792 if ((wr->send_flags & IB_SEND_SIGNALED) || in siw_post_send() 796 if (wr->send_flags & IB_SEND_FENCE) in siw_post_send() 802 if (wr->send_flags & IB_SEND_SOLICITED) in siw_post_send() 805 if (!(wr->send_flags & IB_SEND_INLINE)) { in siw_post_send() 854 if (!(wr->send_flags & IB_SEND_INLINE)) { in siw_post_send()
|
| /Linux-v5.10/drivers/net/wireless/ath/ath6kl/ |
| D | htc_pipe.c | 97 u8 send_flags; in get_htc_packet_credit_based() local 105 send_flags = 0; in get_htc_packet_credit_based() 150 send_flags |= HTC_FLAGS_NEED_CREDIT_UPDATE; in get_htc_packet_credit_based() 165 packet->info.tx.flags = send_flags; in get_htc_packet_credit_based()
|