Lines Matching refs:krcp

3069 	struct kfree_rcu_cpu *krcp;  member
3125 struct kfree_rcu_cpu *krcp; in krc_this_cpu_lock() local
3128 krcp = this_cpu_ptr(&krc); in krc_this_cpu_lock()
3129 raw_spin_lock(&krcp->lock); in krc_this_cpu_lock()
3131 return krcp; in krc_this_cpu_lock()
3135 krc_this_cpu_unlock(struct kfree_rcu_cpu *krcp, unsigned long flags) in krc_this_cpu_unlock() argument
3137 raw_spin_unlock(&krcp->lock); in krc_this_cpu_unlock()
3142 get_cached_bnode(struct kfree_rcu_cpu *krcp) in get_cached_bnode() argument
3144 if (!krcp->nr_bkv_objs) in get_cached_bnode()
3147 krcp->nr_bkv_objs--; in get_cached_bnode()
3149 llist_del_first(&krcp->bkvcache); in get_cached_bnode()
3153 put_cached_bnode(struct kfree_rcu_cpu *krcp, in put_cached_bnode() argument
3157 if (krcp->nr_bkv_objs >= rcu_min_cached_objs) in put_cached_bnode()
3160 llist_add((struct llist_node *) bnode, &krcp->bkvcache); in put_cached_bnode()
3161 krcp->nr_bkv_objs++; in put_cached_bnode()
3175 struct kfree_rcu_cpu *krcp; in kfree_rcu_work() local
3181 krcp = krwp->krcp; in kfree_rcu_work()
3183 raw_spin_lock_irqsave(&krcp->lock, flags); in kfree_rcu_work()
3193 raw_spin_unlock_irqrestore(&krcp->lock, flags); in kfree_rcu_work()
3220 krcp = krc_this_cpu_lock(&flags); in kfree_rcu_work()
3221 if (put_cached_bnode(krcp, bkvhead[i])) in kfree_rcu_work()
3223 krc_this_cpu_unlock(krcp, flags); in kfree_rcu_work()
3260 static inline bool queue_kfree_rcu_work(struct kfree_rcu_cpu *krcp) in queue_kfree_rcu_work() argument
3266 lockdep_assert_held(&krcp->lock); in queue_kfree_rcu_work()
3269 krwp = &(krcp->krw_arr[i]); in queue_kfree_rcu_work()
3278 if ((krcp->bkvhead[0] && !krwp->bkvhead_free[0]) || in queue_kfree_rcu_work()
3279 (krcp->bkvhead[1] && !krwp->bkvhead_free[1]) || in queue_kfree_rcu_work()
3280 (krcp->head && !krwp->head_free)) { in queue_kfree_rcu_work()
3285 krwp->bkvhead_free[j] = krcp->bkvhead[j]; in queue_kfree_rcu_work()
3286 krcp->bkvhead[j] = NULL; in queue_kfree_rcu_work()
3292 krwp->head_free = krcp->head; in queue_kfree_rcu_work()
3293 krcp->head = NULL; in queue_kfree_rcu_work()
3296 WRITE_ONCE(krcp->count, 0); in queue_kfree_rcu_work()
3309 if (krcp->bkvhead[0] || krcp->bkvhead[1] || krcp->head) in queue_kfree_rcu_work()
3316 static inline void kfree_rcu_drain_unlock(struct kfree_rcu_cpu *krcp, in kfree_rcu_drain_unlock() argument
3320 krcp->monitor_todo = false; in kfree_rcu_drain_unlock()
3321 if (queue_kfree_rcu_work(krcp)) { in kfree_rcu_drain_unlock()
3323 raw_spin_unlock_irqrestore(&krcp->lock, flags); in kfree_rcu_drain_unlock()
3328 krcp->monitor_todo = true; in kfree_rcu_drain_unlock()
3329 schedule_delayed_work(&krcp->monitor_work, KFREE_DRAIN_JIFFIES); in kfree_rcu_drain_unlock()
3330 raw_spin_unlock_irqrestore(&krcp->lock, flags); in kfree_rcu_drain_unlock()
3340 struct kfree_rcu_cpu *krcp = container_of(work, struct kfree_rcu_cpu, in kfree_rcu_monitor() local
3343 raw_spin_lock_irqsave(&krcp->lock, flags); in kfree_rcu_monitor()
3344 if (krcp->monitor_todo) in kfree_rcu_monitor()
3345 kfree_rcu_drain_unlock(krcp, flags); in kfree_rcu_monitor()
3347 raw_spin_unlock_irqrestore(&krcp->lock, flags); in kfree_rcu_monitor()
3351 kvfree_call_rcu_add_ptr_to_bulk(struct kfree_rcu_cpu *krcp, void *ptr) in kvfree_call_rcu_add_ptr_to_bulk() argument
3356 if (unlikely(!krcp->initialized)) in kvfree_call_rcu_add_ptr_to_bulk()
3359 lockdep_assert_held(&krcp->lock); in kvfree_call_rcu_add_ptr_to_bulk()
3363 if (!krcp->bkvhead[idx] || in kvfree_call_rcu_add_ptr_to_bulk()
3364 krcp->bkvhead[idx]->nr_records == KVFREE_BULK_MAX_ENTR) { in kvfree_call_rcu_add_ptr_to_bulk()
3365 bnode = get_cached_bnode(krcp); in kvfree_call_rcu_add_ptr_to_bulk()
3396 bnode->next = krcp->bkvhead[idx]; in kvfree_call_rcu_add_ptr_to_bulk()
3399 krcp->bkvhead[idx] = bnode; in kvfree_call_rcu_add_ptr_to_bulk()
3403 krcp->bkvhead[idx]->records in kvfree_call_rcu_add_ptr_to_bulk()
3404 [krcp->bkvhead[idx]->nr_records++] = ptr; in kvfree_call_rcu_add_ptr_to_bulk()
3424 struct kfree_rcu_cpu *krcp; in kvfree_call_rcu() local
3442 krcp = krc_this_cpu_lock(&flags); in kvfree_call_rcu()
3459 success = kvfree_call_rcu_add_ptr_to_bulk(krcp, ptr); in kvfree_call_rcu()
3466 head->next = krcp->head; in kvfree_call_rcu()
3467 krcp->head = head; in kvfree_call_rcu()
3471 WRITE_ONCE(krcp->count, krcp->count + 1); in kvfree_call_rcu()
3475 !krcp->monitor_todo) { in kvfree_call_rcu()
3476 krcp->monitor_todo = true; in kvfree_call_rcu()
3477 schedule_delayed_work(&krcp->monitor_work, KFREE_DRAIN_JIFFIES); in kvfree_call_rcu()
3481 krc_this_cpu_unlock(krcp, flags); in kvfree_call_rcu()
3504 struct kfree_rcu_cpu *krcp = per_cpu_ptr(&krc, cpu); in kfree_rcu_shrink_count() local
3506 count += READ_ONCE(krcp->count); in kfree_rcu_shrink_count()
3520 struct kfree_rcu_cpu *krcp = per_cpu_ptr(&krc, cpu); in kfree_rcu_shrink_scan() local
3522 count = krcp->count; in kfree_rcu_shrink_scan()
3523 raw_spin_lock_irqsave(&krcp->lock, flags); in kfree_rcu_shrink_scan()
3524 if (krcp->monitor_todo) in kfree_rcu_shrink_scan()
3525 kfree_rcu_drain_unlock(krcp, flags); in kfree_rcu_shrink_scan()
3527 raw_spin_unlock_irqrestore(&krcp->lock, flags); in kfree_rcu_shrink_scan()
3552 struct kfree_rcu_cpu *krcp = per_cpu_ptr(&krc, cpu); in kfree_rcu_scheduler_running() local
3554 raw_spin_lock_irqsave(&krcp->lock, flags); in kfree_rcu_scheduler_running()
3555 if (!krcp->head || krcp->monitor_todo) { in kfree_rcu_scheduler_running()
3556 raw_spin_unlock_irqrestore(&krcp->lock, flags); in kfree_rcu_scheduler_running()
3559 krcp->monitor_todo = true; in kfree_rcu_scheduler_running()
3560 schedule_delayed_work_on(cpu, &krcp->monitor_work, in kfree_rcu_scheduler_running()
3562 raw_spin_unlock_irqrestore(&krcp->lock, flags); in kfree_rcu_scheduler_running()
4451 struct kfree_rcu_cpu *krcp = per_cpu_ptr(&krc, cpu); in kfree_rcu_batch_init() local
4455 INIT_RCU_WORK(&krcp->krw_arr[i].rcu_work, kfree_rcu_work); in kfree_rcu_batch_init()
4456 krcp->krw_arr[i].krcp = krcp; in kfree_rcu_batch_init()
4464 put_cached_bnode(krcp, bnode); in kfree_rcu_batch_init()
4469 INIT_DELAYED_WORK(&krcp->monitor_work, kfree_rcu_monitor); in kfree_rcu_batch_init()
4470 krcp->initialized = true; in kfree_rcu_batch_init()