/Linux-v5.4/drivers/net/ethernet/mellanox/mlx5/core/en/xsk/ |
D | rx.h | 12 bool mlx5e_xsk_pages_enough_umem(struct mlx5e_rq *rq, int count); 13 int mlx5e_xsk_page_alloc_umem(struct mlx5e_rq *rq, 15 void mlx5e_xsk_page_release(struct mlx5e_rq *rq, 18 struct sk_buff *mlx5e_xsk_skb_from_cqe_mpwrq_linear(struct mlx5e_rq *rq, 23 struct sk_buff *mlx5e_xsk_skb_from_cqe_linear(struct mlx5e_rq *rq, 28 static inline bool mlx5e_xsk_update_rx_wakeup(struct mlx5e_rq *rq, bool alloc_err) in mlx5e_xsk_update_rx_wakeup()
|
D | rx.c | 10 bool mlx5e_xsk_pages_enough_umem(struct mlx5e_rq *rq, int count) in mlx5e_xsk_pages_enough_umem() 18 int mlx5e_xsk_page_alloc_umem(struct mlx5e_rq *rq, in mlx5e_xsk_page_alloc_umem() 46 static inline void mlx5e_xsk_recycle_frame(struct mlx5e_rq *rq, u64 handle) in mlx5e_xsk_recycle_frame() 55 void mlx5e_xsk_page_release(struct mlx5e_rq *rq, in mlx5e_xsk_page_release() 66 struct mlx5e_rq *rq = container_of(zca, struct mlx5e_rq, zca); in mlx5e_xsk_zca_free() 71 static struct sk_buff *mlx5e_xsk_construct_skb(struct mlx5e_rq *rq, void *data, in mlx5e_xsk_construct_skb() 87 struct sk_buff *mlx5e_xsk_skb_from_cqe_mpwrq_linear(struct mlx5e_rq *rq, in mlx5e_xsk_skb_from_cqe_mpwrq_linear() 151 struct sk_buff *mlx5e_xsk_skb_from_cqe_linear(struct mlx5e_rq *rq, in mlx5e_xsk_skb_from_cqe_linear()
|
/Linux-v5.4/drivers/net/ethernet/mellanox/mlx5/core/ |
D | en.h | 378 struct mlx5e_rq *rq; 466 struct mlx5e_rq *rq; 599 struct mlx5e_rq; 600 typedef void (*mlx5e_fp_handle_rx_cqe)(struct mlx5e_rq*, struct mlx5_cqe64*); 602 (*mlx5e_fp_skb_from_cqe_mpwrq)(struct mlx5e_rq *rq, struct mlx5e_mpw_info *wi, 605 (*mlx5e_fp_skb_from_cqe)(struct mlx5e_rq *rq, struct mlx5_cqe64 *cqe, 607 typedef bool (*mlx5e_fp_post_rx_wqes)(struct mlx5e_rq *rq); 608 typedef void (*mlx5e_fp_dealloc_wqe)(struct mlx5e_rq*, u16); 627 struct mlx5e_rq { struct 707 struct mlx5e_rq rq; argument [all …]
|
D | en_rx.c | 66 static inline void mlx5e_read_title_slot(struct mlx5e_rq *rq, in mlx5e_read_title_slot() 111 static inline void mlx5e_decompress_cqe(struct mlx5e_rq *rq, in mlx5e_decompress_cqe() 132 static inline void mlx5e_decompress_cqe_no_hash(struct mlx5e_rq *rq, in mlx5e_decompress_cqe_no_hash() 143 static inline u32 mlx5e_decompress_cqes_cont(struct mlx5e_rq *rq, in mlx5e_decompress_cqes_cont() 171 static inline u32 mlx5e_decompress_cqes_start(struct mlx5e_rq *rq, in mlx5e_decompress_cqes_start() 192 static inline bool mlx5e_rx_cache_put(struct mlx5e_rq *rq, in mlx5e_rx_cache_put() 214 static inline bool mlx5e_rx_cache_get(struct mlx5e_rq *rq, in mlx5e_rx_cache_get() 240 static inline int mlx5e_page_alloc_pool(struct mlx5e_rq *rq, in mlx5e_page_alloc_pool() 261 static inline int mlx5e_page_alloc(struct mlx5e_rq *rq, in mlx5e_page_alloc() 270 void mlx5e_page_dma_unmap(struct mlx5e_rq *rq, struct mlx5e_dma_info *dma_info) in mlx5e_page_dma_unmap() [all …]
|
D | en_txrx.c | 62 static void mlx5e_handle_rx_dim(struct mlx5e_rq *rq) in mlx5e_handle_rx_dim() 85 static bool mlx5e_napi_xsk_post(struct mlx5e_xdpsq *xsksq, struct mlx5e_rq *xskrq) in mlx5e_napi_xsk_post() 114 struct mlx5e_rq *xskrq = &c->xskrq; in mlx5e_napi_poll() 115 struct mlx5e_rq *rq = &c->rq; in mlx5e_napi_poll()
|
D | en_dim.c | 47 struct mlx5e_rq *rq = container_of(dim, struct mlx5e_rq, dim); in mlx5e_rx_dim_work()
|
D | en_main.c | 232 static inline void mlx5e_build_umr_wqe(struct mlx5e_rq *rq, in mlx5e_build_umr_wqe() 251 static int mlx5e_rq_alloc_mpwqe_info(struct mlx5e_rq *rq, in mlx5e_rq_alloc_mpwqe_info() 301 static int mlx5e_create_rq_umr_mkey(struct mlx5_core_dev *mdev, struct mlx5e_rq *rq) in mlx5e_create_rq_umr_mkey() 308 static inline u64 mlx5e_get_mpwqe_offset(struct mlx5e_rq *rq, u16 wqe_ix) in mlx5e_get_mpwqe_offset() 313 static void mlx5e_init_frags_partition(struct mlx5e_rq *rq) in mlx5e_init_frags_partition() 346 static int mlx5e_init_di_list(struct mlx5e_rq *rq, in mlx5e_init_di_list() 361 static void mlx5e_free_di_list(struct mlx5e_rq *rq) in mlx5e_free_di_list() 368 struct mlx5e_rq *rq = container_of(recover_work, struct mlx5e_rq, recover_work); in mlx5e_rq_err_cqe_work() 378 struct mlx5e_rq *rq) in mlx5e_alloc_rq() 641 static void mlx5e_free_rq(struct mlx5e_rq *rq) in mlx5e_free_rq() [all …]
|
D | en_rep.h | 192 void mlx5e_handle_rx_cqe_rep(struct mlx5e_rq *rq, struct mlx5_cqe64 *cqe);
|
/Linux-v5.4/drivers/net/ethernet/mellanox/mlx5/core/en/ |
D | reporter_rx.c | 61 struct mlx5e_rq *rq; in mlx5e_rx_reporter_err_icosq_cqe_recover() 118 static int mlx5e_rq_to_ready(struct mlx5e_rq *rq, int curr_state) in mlx5e_rq_to_ready() 141 struct mlx5e_rq *rq; in mlx5e_rx_reporter_err_rq_cqe_recover() 174 void mlx5e_reporter_rq_cqe_err(struct mlx5e_rq *rq) in mlx5e_reporter_rq_cqe_err() 191 struct mlx5e_rq *rq; in mlx5e_rx_reporter_timeout_recover() 204 void mlx5e_reporter_rx_timeout(struct mlx5e_rq *rq) in mlx5e_reporter_rx_timeout() 234 static int mlx5e_rx_reporter_build_diagnose_output(struct mlx5e_rq *rq, in mlx5e_rx_reporter_build_diagnose_output() 308 struct mlx5e_rq *generic_rq; in mlx5e_rx_reporter_diagnose() 358 struct mlx5e_rq *rq = &priv->channels.c[i]->rq; in mlx5e_rx_reporter_diagnose()
|
D | health.h | 32 void mlx5e_reporter_rq_cqe_err(struct mlx5e_rq *rq); 33 void mlx5e_reporter_rx_timeout(struct mlx5e_rq *rq);
|
D | xdp.h | 65 bool mlx5e_xdp_handle(struct mlx5e_rq *rq, struct mlx5e_dma_info *di, 71 void mlx5e_xdp_rx_poll_complete(struct mlx5e_rq *rq);
|
D | xdp.c | 59 mlx5e_xmit_xdp_buff(struct mlx5e_xdpsq *sq, struct mlx5e_rq *rq, in mlx5e_xmit_xdp_buff() 121 bool mlx5e_xdp_handle(struct mlx5e_rq *rq, struct mlx5e_dma_info *di, in mlx5e_xdp_handle() 531 void mlx5e_xdp_rx_poll_complete(struct mlx5e_rq *rq) in mlx5e_xdp_rx_poll_complete()
|
/Linux-v5.4/drivers/net/ethernet/mellanox/mlx5/core/en_accel/ |
D | ipsec_rxtx.h | 46 void mlx5e_ipsec_handle_rx_cqe(struct mlx5e_rq *rq, struct mlx5_cqe64 *cqe);
|
/Linux-v5.4/drivers/net/ethernet/mellanox/mlx5/core/ipoib/ |
D | ipoib.h | 126 void mlx5i_handle_rx_cqe(struct mlx5e_rq *rq, struct mlx5_cqe64 *cqe);
|