Lines Matching refs:r
48 static inline bool __ptr_ring_full(struct ptr_ring *r) in __ptr_ring_full() argument
50 return r->queue[r->producer]; in __ptr_ring_full()
53 static inline bool ptr_ring_full(struct ptr_ring *r) in ptr_ring_full() argument
57 spin_lock(&r->producer_lock); in ptr_ring_full()
58 ret = __ptr_ring_full(r); in ptr_ring_full()
59 spin_unlock(&r->producer_lock); in ptr_ring_full()
64 static inline bool ptr_ring_full_irq(struct ptr_ring *r) in ptr_ring_full_irq() argument
68 spin_lock_irq(&r->producer_lock); in ptr_ring_full_irq()
69 ret = __ptr_ring_full(r); in ptr_ring_full_irq()
70 spin_unlock_irq(&r->producer_lock); in ptr_ring_full_irq()
75 static inline bool ptr_ring_full_any(struct ptr_ring *r) in ptr_ring_full_any() argument
80 spin_lock_irqsave(&r->producer_lock, flags); in ptr_ring_full_any()
81 ret = __ptr_ring_full(r); in ptr_ring_full_any()
82 spin_unlock_irqrestore(&r->producer_lock, flags); in ptr_ring_full_any()
87 static inline bool ptr_ring_full_bh(struct ptr_ring *r) in ptr_ring_full_bh() argument
91 spin_lock_bh(&r->producer_lock); in ptr_ring_full_bh()
92 ret = __ptr_ring_full(r); in ptr_ring_full_bh()
93 spin_unlock_bh(&r->producer_lock); in ptr_ring_full_bh()
103 static inline int __ptr_ring_produce(struct ptr_ring *r, void *ptr) in __ptr_ring_produce() argument
105 if (unlikely(!r->size) || r->queue[r->producer]) in __ptr_ring_produce()
112 WRITE_ONCE(r->queue[r->producer++], ptr); in __ptr_ring_produce()
113 if (unlikely(r->producer >= r->size)) in __ptr_ring_produce()
114 r->producer = 0; in __ptr_ring_produce()
123 static inline int ptr_ring_produce(struct ptr_ring *r, void *ptr) in ptr_ring_produce() argument
127 spin_lock(&r->producer_lock); in ptr_ring_produce()
128 ret = __ptr_ring_produce(r, ptr); in ptr_ring_produce()
129 spin_unlock(&r->producer_lock); in ptr_ring_produce()
134 static inline int ptr_ring_produce_irq(struct ptr_ring *r, void *ptr) in ptr_ring_produce_irq() argument
138 spin_lock_irq(&r->producer_lock); in ptr_ring_produce_irq()
139 ret = __ptr_ring_produce(r, ptr); in ptr_ring_produce_irq()
140 spin_unlock_irq(&r->producer_lock); in ptr_ring_produce_irq()
145 static inline int ptr_ring_produce_any(struct ptr_ring *r, void *ptr) in ptr_ring_produce_any() argument
150 spin_lock_irqsave(&r->producer_lock, flags); in ptr_ring_produce_any()
151 ret = __ptr_ring_produce(r, ptr); in ptr_ring_produce_any()
152 spin_unlock_irqrestore(&r->producer_lock, flags); in ptr_ring_produce_any()
157 static inline int ptr_ring_produce_bh(struct ptr_ring *r, void *ptr) in ptr_ring_produce_bh() argument
161 spin_lock_bh(&r->producer_lock); in ptr_ring_produce_bh()
162 ret = __ptr_ring_produce(r, ptr); in ptr_ring_produce_bh()
163 spin_unlock_bh(&r->producer_lock); in ptr_ring_produce_bh()
168 static inline void *__ptr_ring_peek(struct ptr_ring *r) in __ptr_ring_peek() argument
170 if (likely(r->size)) in __ptr_ring_peek()
171 return READ_ONCE(r->queue[r->consumer_head]); in __ptr_ring_peek()
193 static inline bool __ptr_ring_empty(struct ptr_ring *r) in __ptr_ring_empty() argument
195 if (likely(r->size)) in __ptr_ring_empty()
196 return !r->queue[READ_ONCE(r->consumer_head)]; in __ptr_ring_empty()
200 static inline bool ptr_ring_empty(struct ptr_ring *r) in ptr_ring_empty() argument
204 spin_lock(&r->consumer_lock); in ptr_ring_empty()
205 ret = __ptr_ring_empty(r); in ptr_ring_empty()
206 spin_unlock(&r->consumer_lock); in ptr_ring_empty()
211 static inline bool ptr_ring_empty_irq(struct ptr_ring *r) in ptr_ring_empty_irq() argument
215 spin_lock_irq(&r->consumer_lock); in ptr_ring_empty_irq()
216 ret = __ptr_ring_empty(r); in ptr_ring_empty_irq()
217 spin_unlock_irq(&r->consumer_lock); in ptr_ring_empty_irq()
222 static inline bool ptr_ring_empty_any(struct ptr_ring *r) in ptr_ring_empty_any() argument
227 spin_lock_irqsave(&r->consumer_lock, flags); in ptr_ring_empty_any()
228 ret = __ptr_ring_empty(r); in ptr_ring_empty_any()
229 spin_unlock_irqrestore(&r->consumer_lock, flags); in ptr_ring_empty_any()
234 static inline bool ptr_ring_empty_bh(struct ptr_ring *r) in ptr_ring_empty_bh() argument
238 spin_lock_bh(&r->consumer_lock); in ptr_ring_empty_bh()
239 ret = __ptr_ring_empty(r); in ptr_ring_empty_bh()
240 spin_unlock_bh(&r->consumer_lock); in ptr_ring_empty_bh()
246 static inline void __ptr_ring_discard_one(struct ptr_ring *r) in __ptr_ring_discard_one() argument
263 int consumer_head = r->consumer_head; in __ptr_ring_discard_one()
271 if (unlikely(consumer_head - r->consumer_tail >= r->batch || in __ptr_ring_discard_one()
272 consumer_head >= r->size)) { in __ptr_ring_discard_one()
278 while (likely(head >= r->consumer_tail)) in __ptr_ring_discard_one()
279 r->queue[head--] = NULL; in __ptr_ring_discard_one()
280 r->consumer_tail = consumer_head; in __ptr_ring_discard_one()
282 if (unlikely(consumer_head >= r->size)) { in __ptr_ring_discard_one()
284 r->consumer_tail = 0; in __ptr_ring_discard_one()
287 WRITE_ONCE(r->consumer_head, consumer_head); in __ptr_ring_discard_one()
290 static inline void *__ptr_ring_consume(struct ptr_ring *r) in __ptr_ring_consume() argument
298 ptr = __ptr_ring_peek(r); in __ptr_ring_consume()
300 __ptr_ring_discard_one(r); in __ptr_ring_consume()
305 static inline int __ptr_ring_consume_batched(struct ptr_ring *r, in __ptr_ring_consume_batched() argument
312 ptr = __ptr_ring_consume(r); in __ptr_ring_consume_batched()
326 static inline void *ptr_ring_consume(struct ptr_ring *r) in ptr_ring_consume() argument
330 spin_lock(&r->consumer_lock); in ptr_ring_consume()
331 ptr = __ptr_ring_consume(r); in ptr_ring_consume()
332 spin_unlock(&r->consumer_lock); in ptr_ring_consume()
337 static inline void *ptr_ring_consume_irq(struct ptr_ring *r) in ptr_ring_consume_irq() argument
341 spin_lock_irq(&r->consumer_lock); in ptr_ring_consume_irq()
342 ptr = __ptr_ring_consume(r); in ptr_ring_consume_irq()
343 spin_unlock_irq(&r->consumer_lock); in ptr_ring_consume_irq()
348 static inline void *ptr_ring_consume_any(struct ptr_ring *r) in ptr_ring_consume_any() argument
353 spin_lock_irqsave(&r->consumer_lock, flags); in ptr_ring_consume_any()
354 ptr = __ptr_ring_consume(r); in ptr_ring_consume_any()
355 spin_unlock_irqrestore(&r->consumer_lock, flags); in ptr_ring_consume_any()
360 static inline void *ptr_ring_consume_bh(struct ptr_ring *r) in ptr_ring_consume_bh() argument
364 spin_lock_bh(&r->consumer_lock); in ptr_ring_consume_bh()
365 ptr = __ptr_ring_consume(r); in ptr_ring_consume_bh()
366 spin_unlock_bh(&r->consumer_lock); in ptr_ring_consume_bh()
371 static inline int ptr_ring_consume_batched(struct ptr_ring *r, in ptr_ring_consume_batched() argument
376 spin_lock(&r->consumer_lock); in ptr_ring_consume_batched()
377 ret = __ptr_ring_consume_batched(r, array, n); in ptr_ring_consume_batched()
378 spin_unlock(&r->consumer_lock); in ptr_ring_consume_batched()
383 static inline int ptr_ring_consume_batched_irq(struct ptr_ring *r, in ptr_ring_consume_batched_irq() argument
388 spin_lock_irq(&r->consumer_lock); in ptr_ring_consume_batched_irq()
389 ret = __ptr_ring_consume_batched(r, array, n); in ptr_ring_consume_batched_irq()
390 spin_unlock_irq(&r->consumer_lock); in ptr_ring_consume_batched_irq()
395 static inline int ptr_ring_consume_batched_any(struct ptr_ring *r, in ptr_ring_consume_batched_any() argument
401 spin_lock_irqsave(&r->consumer_lock, flags); in ptr_ring_consume_batched_any()
402 ret = __ptr_ring_consume_batched(r, array, n); in ptr_ring_consume_batched_any()
403 spin_unlock_irqrestore(&r->consumer_lock, flags); in ptr_ring_consume_batched_any()
408 static inline int ptr_ring_consume_batched_bh(struct ptr_ring *r, in ptr_ring_consume_batched_bh() argument
413 spin_lock_bh(&r->consumer_lock); in ptr_ring_consume_batched_bh()
414 ret = __ptr_ring_consume_batched(r, array, n); in ptr_ring_consume_batched_bh()
415 spin_unlock_bh(&r->consumer_lock); in ptr_ring_consume_batched_bh()
424 #define __PTR_RING_PEEK_CALL(r, f) ((f)(__ptr_ring_peek(r))) argument
426 #define PTR_RING_PEEK_CALL(r, f) ({ \ argument
429 spin_lock(&(r)->consumer_lock); \
430 __PTR_RING_PEEK_CALL_v = __PTR_RING_PEEK_CALL(r, f); \
431 spin_unlock(&(r)->consumer_lock); \
435 #define PTR_RING_PEEK_CALL_IRQ(r, f) ({ \ argument
438 spin_lock_irq(&(r)->consumer_lock); \
439 __PTR_RING_PEEK_CALL_v = __PTR_RING_PEEK_CALL(r, f); \
440 spin_unlock_irq(&(r)->consumer_lock); \
444 #define PTR_RING_PEEK_CALL_BH(r, f) ({ \ argument
447 spin_lock_bh(&(r)->consumer_lock); \
448 __PTR_RING_PEEK_CALL_v = __PTR_RING_PEEK_CALL(r, f); \
449 spin_unlock_bh(&(r)->consumer_lock); \
453 #define PTR_RING_PEEK_CALL_ANY(r, f) ({ \ argument
457 spin_lock_irqsave(&(r)->consumer_lock, __PTR_RING_PEEK_CALL_f); \
458 __PTR_RING_PEEK_CALL_v = __PTR_RING_PEEK_CALL(r, f); \
459 spin_unlock_irqrestore(&(r)->consumer_lock, __PTR_RING_PEEK_CALL_f); \
473 static inline void __ptr_ring_set_size(struct ptr_ring *r, int size) in __ptr_ring_set_size() argument
475 r->size = size; in __ptr_ring_set_size()
476 r->batch = SMP_CACHE_BYTES * 2 / sizeof(*(r->queue)); in __ptr_ring_set_size()
482 if (r->batch > r->size / 2 || !r->batch) in __ptr_ring_set_size()
483 r->batch = 1; in __ptr_ring_set_size()
486 static inline int ptr_ring_init(struct ptr_ring *r, int size, gfp_t gfp) in ptr_ring_init() argument
488 r->queue = __ptr_ring_init_queue_alloc(size, gfp); in ptr_ring_init()
489 if (!r->queue) in ptr_ring_init()
492 __ptr_ring_set_size(r, size); in ptr_ring_init()
493 r->producer = r->consumer_head = r->consumer_tail = 0; in ptr_ring_init()
494 spin_lock_init(&r->producer_lock); in ptr_ring_init()
495 spin_lock_init(&r->consumer_lock); in ptr_ring_init()
510 static inline void ptr_ring_unconsume(struct ptr_ring *r, void **batch, int n, in ptr_ring_unconsume() argument
516 spin_lock_irqsave(&r->consumer_lock, flags); in ptr_ring_unconsume()
517 spin_lock(&r->producer_lock); in ptr_ring_unconsume()
519 if (!r->size) in ptr_ring_unconsume()
526 head = r->consumer_head - 1; in ptr_ring_unconsume()
527 while (likely(head >= r->consumer_tail)) in ptr_ring_unconsume()
528 r->queue[head--] = NULL; in ptr_ring_unconsume()
529 r->consumer_tail = r->consumer_head; in ptr_ring_unconsume()
536 head = r->consumer_head - 1; in ptr_ring_unconsume()
538 head = r->size - 1; in ptr_ring_unconsume()
539 if (r->queue[head]) { in ptr_ring_unconsume()
543 r->queue[head] = batch[--n]; in ptr_ring_unconsume()
544 r->consumer_tail = head; in ptr_ring_unconsume()
546 WRITE_ONCE(r->consumer_head, head); in ptr_ring_unconsume()
553 spin_unlock(&r->producer_lock); in ptr_ring_unconsume()
554 spin_unlock_irqrestore(&r->consumer_lock, flags); in ptr_ring_unconsume()
557 static inline void **__ptr_ring_swap_queue(struct ptr_ring *r, void **queue, in __ptr_ring_swap_queue() argument
565 while ((ptr = __ptr_ring_consume(r))) in __ptr_ring_swap_queue()
573 __ptr_ring_set_size(r, size); in __ptr_ring_swap_queue()
574 r->producer = producer; in __ptr_ring_swap_queue()
575 r->consumer_head = 0; in __ptr_ring_swap_queue()
576 r->consumer_tail = 0; in __ptr_ring_swap_queue()
577 old = r->queue; in __ptr_ring_swap_queue()
578 r->queue = queue; in __ptr_ring_swap_queue()
589 static inline int ptr_ring_resize(struct ptr_ring *r, int size, gfp_t gfp, in ptr_ring_resize() argument
599 spin_lock_irqsave(&(r)->consumer_lock, flags); in ptr_ring_resize()
600 spin_lock(&(r)->producer_lock); in ptr_ring_resize()
602 old = __ptr_ring_swap_queue(r, queue, size, gfp, destroy); in ptr_ring_resize()
604 spin_unlock(&(r)->producer_lock); in ptr_ring_resize()
605 spin_unlock_irqrestore(&(r)->consumer_lock, flags); in ptr_ring_resize()
663 static inline void ptr_ring_cleanup(struct ptr_ring *r, void (*destroy)(void *)) in ptr_ring_cleanup() argument
668 while ((ptr = ptr_ring_consume(r))) in ptr_ring_cleanup()
670 kvfree(r->queue); in ptr_ring_cleanup()