Lines Matching refs:xsk_queue

40 struct xsk_queue {  struct
114 static inline bool xskq_cons_read_addr_unchecked(struct xsk_queue *q, u64 *addr) in xskq_cons_read_addr_unchecked()
175 static inline bool xskq_cons_is_valid_desc(struct xsk_queue *q, in xskq_cons_is_valid_desc()
186 static inline bool xskq_cons_read_desc(struct xsk_queue *q, in xskq_cons_read_desc()
204 static inline u32 xskq_cons_read_desc_batch(struct xsk_queue *q, in xskq_cons_read_desc_batch()
230 static inline void __xskq_cons_release(struct xsk_queue *q) in __xskq_cons_release()
235 static inline void __xskq_cons_peek(struct xsk_queue *q) in __xskq_cons_peek()
241 static inline void xskq_cons_get_entries(struct xsk_queue *q) in xskq_cons_get_entries()
247 static inline u32 xskq_cons_nb_entries(struct xsk_queue *q, u32 max) in xskq_cons_nb_entries()
260 static inline bool xskq_cons_has_entries(struct xsk_queue *q, u32 cnt) in xskq_cons_has_entries()
265 static inline bool xskq_cons_peek_addr_unchecked(struct xsk_queue *q, u64 *addr) in xskq_cons_peek_addr_unchecked()
272 static inline bool xskq_cons_peek_desc(struct xsk_queue *q, in xskq_cons_peek_desc()
281 static inline u32 xskq_cons_peek_desc_batch(struct xsk_queue *q, struct xdp_desc *descs, in xskq_cons_peek_desc_batch()
293 static inline void xskq_cons_release(struct xsk_queue *q) in xskq_cons_release()
298 static inline void xskq_cons_release_n(struct xsk_queue *q, u32 cnt) in xskq_cons_release_n()
303 static inline bool xskq_cons_is_full(struct xsk_queue *q) in xskq_cons_is_full()
310 static inline u32 xskq_cons_present_entries(struct xsk_queue *q) in xskq_cons_present_entries()
318 static inline u32 xskq_prod_nb_free(struct xsk_queue *q, u32 max) in xskq_prod_nb_free()
332 static inline bool xskq_prod_is_full(struct xsk_queue *q) in xskq_prod_is_full()
337 static inline void xskq_prod_cancel(struct xsk_queue *q) in xskq_prod_cancel()
342 static inline int xskq_prod_reserve(struct xsk_queue *q) in xskq_prod_reserve()
352 static inline int xskq_prod_reserve_addr(struct xsk_queue *q, u64 addr) in xskq_prod_reserve_addr()
364 static inline u32 xskq_prod_reserve_addr_batch(struct xsk_queue *q, struct xdp_desc *descs, in xskq_prod_reserve_addr_batch()
381 static inline int xskq_prod_reserve_desc(struct xsk_queue *q, in xskq_prod_reserve_desc()
398 static inline void __xskq_prod_submit(struct xsk_queue *q, u32 idx) in __xskq_prod_submit()
403 static inline void xskq_prod_submit(struct xsk_queue *q) in xskq_prod_submit()
408 static inline void xskq_prod_submit_addr(struct xsk_queue *q, u64 addr) in xskq_prod_submit_addr()
418 static inline void xskq_prod_submit_n(struct xsk_queue *q, u32 nb_entries) in xskq_prod_submit_n()
423 static inline bool xskq_prod_is_empty(struct xsk_queue *q) in xskq_prod_is_empty()
431 static inline u64 xskq_nb_invalid_descs(struct xsk_queue *q) in xskq_nb_invalid_descs()
436 static inline u64 xskq_nb_queue_empty_descs(struct xsk_queue *q) in xskq_nb_queue_empty_descs()
441 struct xsk_queue *xskq_create(u32 nentries, bool umem_queue);
442 void xskq_destroy(struct xsk_queue *q_ops);