Home
last modified time | relevance | path

Searched refs:cur_edge (Results 1 – 5 of 5) sorted by relevance

/Linux-v6.6/drivers/infiniband/hw/mlx5/
Dwr.c55 void **seg, int *size, void **cur_edge) in set_eth_seg() argument
79 copysz = min_t(u64, *cur_edge - (void *)eseg->inline_hdr.start, in set_eth_seg()
88 handle_post_send_edge(&qp->sq, seg, *size, cur_edge); in set_eth_seg()
91 mlx5r_memcpy_send_wqe(&qp->sq, cur_edge, seg, size, in set_eth_seg()
261 void **wqe, int *wqe_sz, void **cur_edge) in set_data_inl_seg() argument
287 cur_edge); in set_data_inl_seg()
289 leftlen = *cur_edge - *wqe; in set_data_inl_seg()
420 void **cur_edge) in set_sig_data_segment() argument
514 handle_post_send_edge(&qp->sq, seg, *size, cur_edge); in set_sig_data_segment()
523 handle_post_send_edge(&qp->sq, seg, *size, cur_edge); in set_sig_data_segment()
[all …]
Dwr.h52 u32 wqe_sz, void **cur_edge) in handle_post_send_edge() argument
56 if (likely(*seg != *cur_edge)) in handle_post_send_edge()
60 *cur_edge = get_sq_edge(sq, idx); in handle_post_send_edge()
74 static inline void mlx5r_memcpy_send_wqe(struct mlx5_ib_wq *sq, void **cur_edge, in mlx5r_memcpy_send_wqe() argument
79 size_t leftlen = *cur_edge - *seg; in mlx5r_memcpy_send_wqe()
90 handle_post_send_edge(sq, seg, *wqe_sz, cur_edge); in mlx5r_memcpy_send_wqe()
97 int *size, void **cur_edge, int nreq, __be32 general_id,
100 void *seg, u8 size, void *cur_edge, unsigned int idx,
Dumr.c242 void *cur_edge, *seg; in mlx5r_umr_post_send() local
252 err = mlx5r_begin_wqe(qp, &seg, &ctrl, &idx, &size, &cur_edge, 0, in mlx5r_umr_post_send()
259 mlx5r_memcpy_send_wqe(&qp->sq, &cur_edge, &seg, &size, wqe, wqe_size); in mlx5r_umr_post_send()
262 mlx5r_finish_wqe(qp, ctrl, seg, size, cur_edge, idx, id.wr_id, 0, in mlx5r_umr_post_send()
Dmlx5_ib.h376 void *cur_edge; member
Dqp.c1151 qp->sq.cur_edge = get_sq_edge(&qp->sq, 0); in _create_kernel_qp()
4394 qp->sq.cur_edge = get_sq_edge(&qp->sq, 0); in __mlx5_ib_modify_qp()