Lines Matching full:r
49 static inline bool __ptr_ring_full(struct ptr_ring *r) in __ptr_ring_full() argument
51 return r->queue[r->producer]; in __ptr_ring_full()
54 static inline bool ptr_ring_full(struct ptr_ring *r) in ptr_ring_full() argument
58 spin_lock(&r->producer_lock); in ptr_ring_full()
59 ret = __ptr_ring_full(r); in ptr_ring_full()
60 spin_unlock(&r->producer_lock); in ptr_ring_full()
65 static inline bool ptr_ring_full_irq(struct ptr_ring *r) in ptr_ring_full_irq() argument
69 spin_lock_irq(&r->producer_lock); in ptr_ring_full_irq()
70 ret = __ptr_ring_full(r); in ptr_ring_full_irq()
71 spin_unlock_irq(&r->producer_lock); in ptr_ring_full_irq()
76 static inline bool ptr_ring_full_any(struct ptr_ring *r) in ptr_ring_full_any() argument
81 spin_lock_irqsave(&r->producer_lock, flags); in ptr_ring_full_any()
82 ret = __ptr_ring_full(r); in ptr_ring_full_any()
83 spin_unlock_irqrestore(&r->producer_lock, flags); in ptr_ring_full_any()
88 static inline bool ptr_ring_full_bh(struct ptr_ring *r) in ptr_ring_full_bh() argument
92 spin_lock_bh(&r->producer_lock); in ptr_ring_full_bh()
93 ret = __ptr_ring_full(r); in ptr_ring_full_bh()
94 spin_unlock_bh(&r->producer_lock); in ptr_ring_full_bh()
104 static inline int __ptr_ring_produce(struct ptr_ring *r, void *ptr) in __ptr_ring_produce() argument
106 if (unlikely(!r->size) || r->queue[r->producer]) in __ptr_ring_produce()
113 WRITE_ONCE(r->queue[r->producer++], ptr); in __ptr_ring_produce()
114 if (unlikely(r->producer >= r->size)) in __ptr_ring_produce()
115 r->producer = 0; in __ptr_ring_produce()
124 static inline int ptr_ring_produce(struct ptr_ring *r, void *ptr) in ptr_ring_produce() argument
128 spin_lock(&r->producer_lock); in ptr_ring_produce()
129 ret = __ptr_ring_produce(r, ptr); in ptr_ring_produce()
130 spin_unlock(&r->producer_lock); in ptr_ring_produce()
135 static inline int ptr_ring_produce_irq(struct ptr_ring *r, void *ptr) in ptr_ring_produce_irq() argument
139 spin_lock_irq(&r->producer_lock); in ptr_ring_produce_irq()
140 ret = __ptr_ring_produce(r, ptr); in ptr_ring_produce_irq()
141 spin_unlock_irq(&r->producer_lock); in ptr_ring_produce_irq()
146 static inline int ptr_ring_produce_any(struct ptr_ring *r, void *ptr) in ptr_ring_produce_any() argument
151 spin_lock_irqsave(&r->producer_lock, flags); in ptr_ring_produce_any()
152 ret = __ptr_ring_produce(r, ptr); in ptr_ring_produce_any()
153 spin_unlock_irqrestore(&r->producer_lock, flags); in ptr_ring_produce_any()
158 static inline int ptr_ring_produce_bh(struct ptr_ring *r, void *ptr) in ptr_ring_produce_bh() argument
162 spin_lock_bh(&r->producer_lock); in ptr_ring_produce_bh()
163 ret = __ptr_ring_produce(r, ptr); in ptr_ring_produce_bh()
164 spin_unlock_bh(&r->producer_lock); in ptr_ring_produce_bh()
169 static inline void *__ptr_ring_peek(struct ptr_ring *r) in __ptr_ring_peek() argument
171 if (likely(r->size)) in __ptr_ring_peek()
172 return READ_ONCE(r->queue[r->consumer_head]); in __ptr_ring_peek()
194 static inline bool __ptr_ring_empty(struct ptr_ring *r) in __ptr_ring_empty() argument
196 if (likely(r->size)) in __ptr_ring_empty()
197 return !r->queue[READ_ONCE(r->consumer_head)]; in __ptr_ring_empty()
201 static inline bool ptr_ring_empty(struct ptr_ring *r) in ptr_ring_empty() argument
205 spin_lock(&r->consumer_lock); in ptr_ring_empty()
206 ret = __ptr_ring_empty(r); in ptr_ring_empty()
207 spin_unlock(&r->consumer_lock); in ptr_ring_empty()
212 static inline bool ptr_ring_empty_irq(struct ptr_ring *r) in ptr_ring_empty_irq() argument
216 spin_lock_irq(&r->consumer_lock); in ptr_ring_empty_irq()
217 ret = __ptr_ring_empty(r); in ptr_ring_empty_irq()
218 spin_unlock_irq(&r->consumer_lock); in ptr_ring_empty_irq()
223 static inline bool ptr_ring_empty_any(struct ptr_ring *r) in ptr_ring_empty_any() argument
228 spin_lock_irqsave(&r->consumer_lock, flags); in ptr_ring_empty_any()
229 ret = __ptr_ring_empty(r); in ptr_ring_empty_any()
230 spin_unlock_irqrestore(&r->consumer_lock, flags); in ptr_ring_empty_any()
235 static inline bool ptr_ring_empty_bh(struct ptr_ring *r) in ptr_ring_empty_bh() argument
239 spin_lock_bh(&r->consumer_lock); in ptr_ring_empty_bh()
240 ret = __ptr_ring_empty(r); in ptr_ring_empty_bh()
241 spin_unlock_bh(&r->consumer_lock); in ptr_ring_empty_bh()
247 static inline void __ptr_ring_discard_one(struct ptr_ring *r) in __ptr_ring_discard_one() argument
252 * consumer = r->consumer; in __ptr_ring_discard_one()
253 * r->queue[consumer++] = NULL; in __ptr_ring_discard_one()
254 * if (unlikely(consumer >= r->size)) in __ptr_ring_discard_one()
256 * r->consumer = consumer; in __ptr_ring_discard_one()
264 int consumer_head = r->consumer_head; in __ptr_ring_discard_one()
272 if (unlikely(consumer_head - r->consumer_tail >= r->batch || in __ptr_ring_discard_one()
273 consumer_head >= r->size)) { in __ptr_ring_discard_one()
279 while (likely(head >= r->consumer_tail)) in __ptr_ring_discard_one()
280 r->queue[head--] = NULL; in __ptr_ring_discard_one()
281 r->consumer_tail = consumer_head; in __ptr_ring_discard_one()
283 if (unlikely(consumer_head >= r->size)) { in __ptr_ring_discard_one()
285 r->consumer_tail = 0; in __ptr_ring_discard_one()
288 WRITE_ONCE(r->consumer_head, consumer_head); in __ptr_ring_discard_one()
291 static inline void *__ptr_ring_consume(struct ptr_ring *r) in __ptr_ring_consume() argument
299 ptr = __ptr_ring_peek(r); in __ptr_ring_consume()
301 __ptr_ring_discard_one(r); in __ptr_ring_consume()
306 static inline int __ptr_ring_consume_batched(struct ptr_ring *r, in __ptr_ring_consume_batched() argument
313 ptr = __ptr_ring_consume(r); in __ptr_ring_consume_batched()
327 static inline void *ptr_ring_consume(struct ptr_ring *r) in ptr_ring_consume() argument
331 spin_lock(&r->consumer_lock); in ptr_ring_consume()
332 ptr = __ptr_ring_consume(r); in ptr_ring_consume()
333 spin_unlock(&r->consumer_lock); in ptr_ring_consume()
338 static inline void *ptr_ring_consume_irq(struct ptr_ring *r) in ptr_ring_consume_irq() argument
342 spin_lock_irq(&r->consumer_lock); in ptr_ring_consume_irq()
343 ptr = __ptr_ring_consume(r); in ptr_ring_consume_irq()
344 spin_unlock_irq(&r->consumer_lock); in ptr_ring_consume_irq()
349 static inline void *ptr_ring_consume_any(struct ptr_ring *r) in ptr_ring_consume_any() argument
354 spin_lock_irqsave(&r->consumer_lock, flags); in ptr_ring_consume_any()
355 ptr = __ptr_ring_consume(r); in ptr_ring_consume_any()
356 spin_unlock_irqrestore(&r->consumer_lock, flags); in ptr_ring_consume_any()
361 static inline void *ptr_ring_consume_bh(struct ptr_ring *r) in ptr_ring_consume_bh() argument
365 spin_lock_bh(&r->consumer_lock); in ptr_ring_consume_bh()
366 ptr = __ptr_ring_consume(r); in ptr_ring_consume_bh()
367 spin_unlock_bh(&r->consumer_lock); in ptr_ring_consume_bh()
372 static inline int ptr_ring_consume_batched(struct ptr_ring *r, in ptr_ring_consume_batched() argument
377 spin_lock(&r->consumer_lock); in ptr_ring_consume_batched()
378 ret = __ptr_ring_consume_batched(r, array, n); in ptr_ring_consume_batched()
379 spin_unlock(&r->consumer_lock); in ptr_ring_consume_batched()
384 static inline int ptr_ring_consume_batched_irq(struct ptr_ring *r, in ptr_ring_consume_batched_irq() argument
389 spin_lock_irq(&r->consumer_lock); in ptr_ring_consume_batched_irq()
390 ret = __ptr_ring_consume_batched(r, array, n); in ptr_ring_consume_batched_irq()
391 spin_unlock_irq(&r->consumer_lock); in ptr_ring_consume_batched_irq()
396 static inline int ptr_ring_consume_batched_any(struct ptr_ring *r, in ptr_ring_consume_batched_any() argument
402 spin_lock_irqsave(&r->consumer_lock, flags); in ptr_ring_consume_batched_any()
403 ret = __ptr_ring_consume_batched(r, array, n); in ptr_ring_consume_batched_any()
404 spin_unlock_irqrestore(&r->consumer_lock, flags); in ptr_ring_consume_batched_any()
409 static inline int ptr_ring_consume_batched_bh(struct ptr_ring *r, in ptr_ring_consume_batched_bh() argument
414 spin_lock_bh(&r->consumer_lock); in ptr_ring_consume_batched_bh()
415 ret = __ptr_ring_consume_batched(r, array, n); in ptr_ring_consume_batched_bh()
416 spin_unlock_bh(&r->consumer_lock); in ptr_ring_consume_batched_bh()
425 #define __PTR_RING_PEEK_CALL(r, f) ((f)(__ptr_ring_peek(r))) argument
427 #define PTR_RING_PEEK_CALL(r, f) ({ \ argument
430 spin_lock(&(r)->consumer_lock); \
431 __PTR_RING_PEEK_CALL_v = __PTR_RING_PEEK_CALL(r, f); \
432 spin_unlock(&(r)->consumer_lock); \
436 #define PTR_RING_PEEK_CALL_IRQ(r, f) ({ \ argument
439 spin_lock_irq(&(r)->consumer_lock); \
440 __PTR_RING_PEEK_CALL_v = __PTR_RING_PEEK_CALL(r, f); \
441 spin_unlock_irq(&(r)->consumer_lock); \
445 #define PTR_RING_PEEK_CALL_BH(r, f) ({ \ argument
448 spin_lock_bh(&(r)->consumer_lock); \
449 __PTR_RING_PEEK_CALL_v = __PTR_RING_PEEK_CALL(r, f); \
450 spin_unlock_bh(&(r)->consumer_lock); \
454 #define PTR_RING_PEEK_CALL_ANY(r, f) ({ \ argument
458 spin_lock_irqsave(&(r)->consumer_lock, __PTR_RING_PEEK_CALL_f); \
459 __PTR_RING_PEEK_CALL_v = __PTR_RING_PEEK_CALL(r, f); \
460 spin_unlock_irqrestore(&(r)->consumer_lock, __PTR_RING_PEEK_CALL_f); \
474 static inline void __ptr_ring_set_size(struct ptr_ring *r, int size) in __ptr_ring_set_size() argument
476 r->size = size; in __ptr_ring_set_size()
477 r->batch = SMP_CACHE_BYTES * 2 / sizeof(*(r->queue)); in __ptr_ring_set_size()
483 if (r->batch > r->size / 2 || !r->batch) in __ptr_ring_set_size()
484 r->batch = 1; in __ptr_ring_set_size()
487 static inline int ptr_ring_init(struct ptr_ring *r, int size, gfp_t gfp) in ptr_ring_init() argument
489 r->queue = __ptr_ring_init_queue_alloc(size, gfp); in ptr_ring_init()
490 if (!r->queue) in ptr_ring_init()
493 __ptr_ring_set_size(r, size); in ptr_ring_init()
494 r->producer = r->consumer_head = r->consumer_tail = 0; in ptr_ring_init()
495 spin_lock_init(&r->producer_lock); in ptr_ring_init()
496 spin_lock_init(&r->consumer_lock); in ptr_ring_init()
511 static inline void ptr_ring_unconsume(struct ptr_ring *r, void **batch, int n, in ptr_ring_unconsume() argument
517 spin_lock_irqsave(&r->consumer_lock, flags); in ptr_ring_unconsume()
518 spin_lock(&r->producer_lock); in ptr_ring_unconsume()
520 if (!r->size) in ptr_ring_unconsume()
527 head = r->consumer_head - 1; in ptr_ring_unconsume()
528 while (likely(head >= r->consumer_tail)) in ptr_ring_unconsume()
529 r->queue[head--] = NULL; in ptr_ring_unconsume()
530 r->consumer_tail = r->consumer_head; in ptr_ring_unconsume()
537 head = r->consumer_head - 1; in ptr_ring_unconsume()
539 head = r->size - 1; in ptr_ring_unconsume()
540 if (r->queue[head]) { in ptr_ring_unconsume()
544 r->queue[head] = batch[--n]; in ptr_ring_unconsume()
545 r->consumer_tail = head; in ptr_ring_unconsume()
547 WRITE_ONCE(r->consumer_head, head); in ptr_ring_unconsume()
554 spin_unlock(&r->producer_lock); in ptr_ring_unconsume()
555 spin_unlock_irqrestore(&r->consumer_lock, flags); in ptr_ring_unconsume()
558 static inline void **__ptr_ring_swap_queue(struct ptr_ring *r, void **queue, in __ptr_ring_swap_queue() argument
566 while ((ptr = __ptr_ring_consume(r))) in __ptr_ring_swap_queue()
574 __ptr_ring_set_size(r, size); in __ptr_ring_swap_queue()
575 r->producer = producer; in __ptr_ring_swap_queue()
576 r->consumer_head = 0; in __ptr_ring_swap_queue()
577 r->consumer_tail = 0; in __ptr_ring_swap_queue()
578 old = r->queue; in __ptr_ring_swap_queue()
579 r->queue = queue; in __ptr_ring_swap_queue()
590 static inline int ptr_ring_resize(struct ptr_ring *r, int size, gfp_t gfp, in ptr_ring_resize() argument
600 spin_lock_irqsave(&(r)->consumer_lock, flags); in ptr_ring_resize()
601 spin_lock(&(r)->producer_lock); in ptr_ring_resize()
603 old = __ptr_ring_swap_queue(r, queue, size, gfp, destroy); in ptr_ring_resize()
605 spin_unlock(&(r)->producer_lock); in ptr_ring_resize()
606 spin_unlock_irqrestore(&(r)->consumer_lock, flags); in ptr_ring_resize()
664 static inline void ptr_ring_cleanup(struct ptr_ring *r, void (*destroy)(void *)) in ptr_ring_cleanup() argument
669 while ((ptr = ptr_ring_consume(r))) in ptr_ring_cleanup()
671 kvfree(r->queue); in ptr_ring_cleanup()