Lines Matching refs:r
53 static inline bool __ptr_ring_full(struct ptr_ring *r) in __ptr_ring_full() argument
55 return r->queue[r->producer]; in __ptr_ring_full()
58 static inline bool ptr_ring_full(struct ptr_ring *r) in ptr_ring_full() argument
62 spin_lock(&r->producer_lock); in ptr_ring_full()
63 ret = __ptr_ring_full(r); in ptr_ring_full()
64 spin_unlock(&r->producer_lock); in ptr_ring_full()
69 static inline bool ptr_ring_full_irq(struct ptr_ring *r) in ptr_ring_full_irq() argument
73 spin_lock_irq(&r->producer_lock); in ptr_ring_full_irq()
74 ret = __ptr_ring_full(r); in ptr_ring_full_irq()
75 spin_unlock_irq(&r->producer_lock); in ptr_ring_full_irq()
80 static inline bool ptr_ring_full_any(struct ptr_ring *r) in ptr_ring_full_any() argument
85 spin_lock_irqsave(&r->producer_lock, flags); in ptr_ring_full_any()
86 ret = __ptr_ring_full(r); in ptr_ring_full_any()
87 spin_unlock_irqrestore(&r->producer_lock, flags); in ptr_ring_full_any()
92 static inline bool ptr_ring_full_bh(struct ptr_ring *r) in ptr_ring_full_bh() argument
96 spin_lock_bh(&r->producer_lock); in ptr_ring_full_bh()
97 ret = __ptr_ring_full(r); in ptr_ring_full_bh()
98 spin_unlock_bh(&r->producer_lock); in ptr_ring_full_bh()
108 static inline int __ptr_ring_produce(struct ptr_ring *r, void *ptr) in __ptr_ring_produce() argument
110 if (unlikely(!r->size) || r->queue[r->producer]) in __ptr_ring_produce()
117 WRITE_ONCE(r->queue[r->producer++], ptr); in __ptr_ring_produce()
118 if (unlikely(r->producer >= r->size)) in __ptr_ring_produce()
119 r->producer = 0; in __ptr_ring_produce()
128 static inline int ptr_ring_produce(struct ptr_ring *r, void *ptr) in ptr_ring_produce() argument
132 spin_lock(&r->producer_lock); in ptr_ring_produce()
133 ret = __ptr_ring_produce(r, ptr); in ptr_ring_produce()
134 spin_unlock(&r->producer_lock); in ptr_ring_produce()
139 static inline int ptr_ring_produce_irq(struct ptr_ring *r, void *ptr) in ptr_ring_produce_irq() argument
143 spin_lock_irq(&r->producer_lock); in ptr_ring_produce_irq()
144 ret = __ptr_ring_produce(r, ptr); in ptr_ring_produce_irq()
145 spin_unlock_irq(&r->producer_lock); in ptr_ring_produce_irq()
150 static inline int ptr_ring_produce_any(struct ptr_ring *r, void *ptr) in ptr_ring_produce_any() argument
155 spin_lock_irqsave(&r->producer_lock, flags); in ptr_ring_produce_any()
156 ret = __ptr_ring_produce(r, ptr); in ptr_ring_produce_any()
157 spin_unlock_irqrestore(&r->producer_lock, flags); in ptr_ring_produce_any()
162 static inline int ptr_ring_produce_bh(struct ptr_ring *r, void *ptr) in ptr_ring_produce_bh() argument
166 spin_lock_bh(&r->producer_lock); in ptr_ring_produce_bh()
167 ret = __ptr_ring_produce(r, ptr); in ptr_ring_produce_bh()
168 spin_unlock_bh(&r->producer_lock); in ptr_ring_produce_bh()
173 static inline void *__ptr_ring_peek(struct ptr_ring *r) in __ptr_ring_peek() argument
175 if (likely(r->size)) in __ptr_ring_peek()
176 return READ_ONCE(r->queue[r->consumer_head]); in __ptr_ring_peek()
198 static inline bool __ptr_ring_empty(struct ptr_ring *r) in __ptr_ring_empty() argument
200 if (likely(r->size)) in __ptr_ring_empty()
201 return !r->queue[READ_ONCE(r->consumer_head)]; in __ptr_ring_empty()
205 static inline bool ptr_ring_empty(struct ptr_ring *r) in ptr_ring_empty() argument
209 spin_lock(&r->consumer_lock); in ptr_ring_empty()
210 ret = __ptr_ring_empty(r); in ptr_ring_empty()
211 spin_unlock(&r->consumer_lock); in ptr_ring_empty()
216 static inline bool ptr_ring_empty_irq(struct ptr_ring *r) in ptr_ring_empty_irq() argument
220 spin_lock_irq(&r->consumer_lock); in ptr_ring_empty_irq()
221 ret = __ptr_ring_empty(r); in ptr_ring_empty_irq()
222 spin_unlock_irq(&r->consumer_lock); in ptr_ring_empty_irq()
227 static inline bool ptr_ring_empty_any(struct ptr_ring *r) in ptr_ring_empty_any() argument
232 spin_lock_irqsave(&r->consumer_lock, flags); in ptr_ring_empty_any()
233 ret = __ptr_ring_empty(r); in ptr_ring_empty_any()
234 spin_unlock_irqrestore(&r->consumer_lock, flags); in ptr_ring_empty_any()
239 static inline bool ptr_ring_empty_bh(struct ptr_ring *r) in ptr_ring_empty_bh() argument
243 spin_lock_bh(&r->consumer_lock); in ptr_ring_empty_bh()
244 ret = __ptr_ring_empty(r); in ptr_ring_empty_bh()
245 spin_unlock_bh(&r->consumer_lock); in ptr_ring_empty_bh()
251 static inline void __ptr_ring_discard_one(struct ptr_ring *r) in __ptr_ring_discard_one() argument
268 int consumer_head = r->consumer_head; in __ptr_ring_discard_one()
276 if (unlikely(consumer_head - r->consumer_tail >= r->batch || in __ptr_ring_discard_one()
277 consumer_head >= r->size)) { in __ptr_ring_discard_one()
283 while (likely(head >= r->consumer_tail)) in __ptr_ring_discard_one()
284 r->queue[head--] = NULL; in __ptr_ring_discard_one()
285 r->consumer_tail = consumer_head; in __ptr_ring_discard_one()
287 if (unlikely(consumer_head >= r->size)) { in __ptr_ring_discard_one()
289 r->consumer_tail = 0; in __ptr_ring_discard_one()
292 WRITE_ONCE(r->consumer_head, consumer_head); in __ptr_ring_discard_one()
295 static inline void *__ptr_ring_consume(struct ptr_ring *r) in __ptr_ring_consume() argument
303 ptr = __ptr_ring_peek(r); in __ptr_ring_consume()
305 __ptr_ring_discard_one(r); in __ptr_ring_consume()
310 static inline int __ptr_ring_consume_batched(struct ptr_ring *r, in __ptr_ring_consume_batched() argument
317 ptr = __ptr_ring_consume(r); in __ptr_ring_consume_batched()
331 static inline void *ptr_ring_consume(struct ptr_ring *r) in ptr_ring_consume() argument
335 spin_lock(&r->consumer_lock); in ptr_ring_consume()
336 ptr = __ptr_ring_consume(r); in ptr_ring_consume()
337 spin_unlock(&r->consumer_lock); in ptr_ring_consume()
342 static inline void *ptr_ring_consume_irq(struct ptr_ring *r) in ptr_ring_consume_irq() argument
346 spin_lock_irq(&r->consumer_lock); in ptr_ring_consume_irq()
347 ptr = __ptr_ring_consume(r); in ptr_ring_consume_irq()
348 spin_unlock_irq(&r->consumer_lock); in ptr_ring_consume_irq()
353 static inline void *ptr_ring_consume_any(struct ptr_ring *r) in ptr_ring_consume_any() argument
358 spin_lock_irqsave(&r->consumer_lock, flags); in ptr_ring_consume_any()
359 ptr = __ptr_ring_consume(r); in ptr_ring_consume_any()
360 spin_unlock_irqrestore(&r->consumer_lock, flags); in ptr_ring_consume_any()
365 static inline void *ptr_ring_consume_bh(struct ptr_ring *r) in ptr_ring_consume_bh() argument
369 spin_lock_bh(&r->consumer_lock); in ptr_ring_consume_bh()
370 ptr = __ptr_ring_consume(r); in ptr_ring_consume_bh()
371 spin_unlock_bh(&r->consumer_lock); in ptr_ring_consume_bh()
376 static inline int ptr_ring_consume_batched(struct ptr_ring *r, in ptr_ring_consume_batched() argument
381 spin_lock(&r->consumer_lock); in ptr_ring_consume_batched()
382 ret = __ptr_ring_consume_batched(r, array, n); in ptr_ring_consume_batched()
383 spin_unlock(&r->consumer_lock); in ptr_ring_consume_batched()
388 static inline int ptr_ring_consume_batched_irq(struct ptr_ring *r, in ptr_ring_consume_batched_irq() argument
393 spin_lock_irq(&r->consumer_lock); in ptr_ring_consume_batched_irq()
394 ret = __ptr_ring_consume_batched(r, array, n); in ptr_ring_consume_batched_irq()
395 spin_unlock_irq(&r->consumer_lock); in ptr_ring_consume_batched_irq()
400 static inline int ptr_ring_consume_batched_any(struct ptr_ring *r, in ptr_ring_consume_batched_any() argument
406 spin_lock_irqsave(&r->consumer_lock, flags); in ptr_ring_consume_batched_any()
407 ret = __ptr_ring_consume_batched(r, array, n); in ptr_ring_consume_batched_any()
408 spin_unlock_irqrestore(&r->consumer_lock, flags); in ptr_ring_consume_batched_any()
413 static inline int ptr_ring_consume_batched_bh(struct ptr_ring *r, in ptr_ring_consume_batched_bh() argument
418 spin_lock_bh(&r->consumer_lock); in ptr_ring_consume_batched_bh()
419 ret = __ptr_ring_consume_batched(r, array, n); in ptr_ring_consume_batched_bh()
420 spin_unlock_bh(&r->consumer_lock); in ptr_ring_consume_batched_bh()
429 #define __PTR_RING_PEEK_CALL(r, f) ((f)(__ptr_ring_peek(r))) argument
431 #define PTR_RING_PEEK_CALL(r, f) ({ \ argument
434 spin_lock(&(r)->consumer_lock); \
435 __PTR_RING_PEEK_CALL_v = __PTR_RING_PEEK_CALL(r, f); \
436 spin_unlock(&(r)->consumer_lock); \
440 #define PTR_RING_PEEK_CALL_IRQ(r, f) ({ \ argument
443 spin_lock_irq(&(r)->consumer_lock); \
444 __PTR_RING_PEEK_CALL_v = __PTR_RING_PEEK_CALL(r, f); \
445 spin_unlock_irq(&(r)->consumer_lock); \
449 #define PTR_RING_PEEK_CALL_BH(r, f) ({ \ argument
452 spin_lock_bh(&(r)->consumer_lock); \
453 __PTR_RING_PEEK_CALL_v = __PTR_RING_PEEK_CALL(r, f); \
454 spin_unlock_bh(&(r)->consumer_lock); \
458 #define PTR_RING_PEEK_CALL_ANY(r, f) ({ \ argument
462 spin_lock_irqsave(&(r)->consumer_lock, __PTR_RING_PEEK_CALL_f); \
463 __PTR_RING_PEEK_CALL_v = __PTR_RING_PEEK_CALL(r, f); \
464 spin_unlock_irqrestore(&(r)->consumer_lock, __PTR_RING_PEEK_CALL_f); \
478 static inline void __ptr_ring_set_size(struct ptr_ring *r, int size) in __ptr_ring_set_size() argument
480 r->size = size; in __ptr_ring_set_size()
481 r->batch = SMP_CACHE_BYTES * 2 / sizeof(*(r->queue)); in __ptr_ring_set_size()
487 if (r->batch > r->size / 2 || !r->batch) in __ptr_ring_set_size()
488 r->batch = 1; in __ptr_ring_set_size()
491 static inline int ptr_ring_init(struct ptr_ring *r, int size, gfp_t gfp) in ptr_ring_init() argument
493 r->queue = __ptr_ring_init_queue_alloc(size, gfp); in ptr_ring_init()
494 if (!r->queue) in ptr_ring_init()
497 __ptr_ring_set_size(r, size); in ptr_ring_init()
498 r->producer = r->consumer_head = r->consumer_tail = 0; in ptr_ring_init()
499 spin_lock_init(&r->producer_lock); in ptr_ring_init()
500 spin_lock_init(&r->consumer_lock); in ptr_ring_init()
515 static inline void ptr_ring_unconsume(struct ptr_ring *r, void **batch, int n, in ptr_ring_unconsume() argument
521 spin_lock_irqsave(&r->consumer_lock, flags); in ptr_ring_unconsume()
522 spin_lock(&r->producer_lock); in ptr_ring_unconsume()
524 if (!r->size) in ptr_ring_unconsume()
531 head = r->consumer_head - 1; in ptr_ring_unconsume()
532 while (likely(head >= r->consumer_tail)) in ptr_ring_unconsume()
533 r->queue[head--] = NULL; in ptr_ring_unconsume()
534 r->consumer_tail = r->consumer_head; in ptr_ring_unconsume()
541 head = r->consumer_head - 1; in ptr_ring_unconsume()
543 head = r->size - 1; in ptr_ring_unconsume()
544 if (r->queue[head]) { in ptr_ring_unconsume()
548 r->queue[head] = batch[--n]; in ptr_ring_unconsume()
549 r->consumer_tail = head; in ptr_ring_unconsume()
551 WRITE_ONCE(r->consumer_head, head); in ptr_ring_unconsume()
558 spin_unlock(&r->producer_lock); in ptr_ring_unconsume()
559 spin_unlock_irqrestore(&r->consumer_lock, flags); in ptr_ring_unconsume()
562 static inline void **__ptr_ring_swap_queue(struct ptr_ring *r, void **queue, in __ptr_ring_swap_queue() argument
570 while ((ptr = __ptr_ring_consume(r))) in __ptr_ring_swap_queue()
576 __ptr_ring_set_size(r, size); in __ptr_ring_swap_queue()
577 r->producer = producer; in __ptr_ring_swap_queue()
578 r->consumer_head = 0; in __ptr_ring_swap_queue()
579 r->consumer_tail = 0; in __ptr_ring_swap_queue()
580 old = r->queue; in __ptr_ring_swap_queue()
581 r->queue = queue; in __ptr_ring_swap_queue()
592 static inline int ptr_ring_resize(struct ptr_ring *r, int size, gfp_t gfp, in ptr_ring_resize() argument
602 spin_lock_irqsave(&(r)->consumer_lock, flags); in ptr_ring_resize()
603 spin_lock(&(r)->producer_lock); in ptr_ring_resize()
605 old = __ptr_ring_swap_queue(r, queue, size, gfp, destroy); in ptr_ring_resize()
607 spin_unlock(&(r)->producer_lock); in ptr_ring_resize()
608 spin_unlock_irqrestore(&(r)->consumer_lock, flags); in ptr_ring_resize()
666 static inline void ptr_ring_cleanup(struct ptr_ring *r, void (*destroy)(void *)) in ptr_ring_cleanup() argument
671 while ((ptr = ptr_ring_consume(r))) in ptr_ring_cleanup()
673 kvfree(r->queue); in ptr_ring_cleanup()