Lines Matching +full:1 +full:ma

48 	1,	/* 72 */
49 1, /* 80 */
50 1, /* 88 */
51 1, /* 96 */
69 return -1; in bpf_mem_cache_idx()
72 return size_index[(size - 1) / 8] - 1; in bpf_mem_cache_idx()
74 return fls(size - 1) - 1; in bpf_mem_cache_idx()
142 obj[1] = pptr; in __alloc()
190 WARN_ON_ONCE(local_inc_return(&c->active) != 1); in alloc_bulk()
204 free_percpu(((void **)obj)[1]); in free_one()
244 if (atomic_xchg(&c->call_rcu_in_progress, 1)) in do_call_rcu()
271 WARN_ON_ONCE(local_inc_return(&c->active) != 1); in free_bulk()
336 * 8k allocs and above low == 1, high == 3, batch == 1. in prefill_mem_cache()
338 c->low_watermark = max(32 * 256 / c->unit_size, 1); in prefill_mem_cache()
341 c->batch = max((c->high_watermark - c->low_watermark) / 4 * 3, 1); in prefill_mem_cache()
343 /* To avoid consuming memory assume that 1st run of bpf in prefill_mem_cache()
347 alloc_bulk(c, c->unit_size <= 256 ? 4 : 1, cpu_to_node(cpu)); in prefill_mem_cache()
357 int bpf_mem_alloc_init(struct bpf_mem_alloc *ma, int size, bool percpu) in bpf_mem_alloc_init() argument
387 ma->cache = pc; in bpf_mem_alloc_init()
410 ma->caches = pcc; in bpf_mem_alloc_init()
435 static void free_mem_alloc_no_barrier(struct bpf_mem_alloc *ma) in free_mem_alloc_no_barrier() argument
437 free_percpu(ma->cache); in free_mem_alloc_no_barrier()
438 free_percpu(ma->caches); in free_mem_alloc_no_barrier()
439 ma->cache = NULL; in free_mem_alloc_no_barrier()
440 ma->caches = NULL; in free_mem_alloc_no_barrier()
443 static void free_mem_alloc(struct bpf_mem_alloc *ma) in free_mem_alloc() argument
450 free_mem_alloc_no_barrier(ma); in free_mem_alloc()
455 struct bpf_mem_alloc *ma = container_of(work, struct bpf_mem_alloc, work); in free_mem_alloc_deferred() local
457 free_mem_alloc(ma); in free_mem_alloc_deferred()
458 kfree(ma); in free_mem_alloc_deferred()
461 static void destroy_mem_alloc(struct bpf_mem_alloc *ma, int rcu_in_progress) in destroy_mem_alloc() argument
469 free_mem_alloc_no_barrier(ma); in destroy_mem_alloc()
473 copy = kmalloc(sizeof(*ma), GFP_KERNEL); in destroy_mem_alloc()
476 free_mem_alloc(ma); in destroy_mem_alloc()
481 copy->cache = ma->cache; in destroy_mem_alloc()
482 ma->cache = NULL; in destroy_mem_alloc()
483 copy->caches = ma->caches; in destroy_mem_alloc()
484 ma->caches = NULL; in destroy_mem_alloc()
489 void bpf_mem_alloc_destroy(struct bpf_mem_alloc *ma) in bpf_mem_alloc_destroy() argument
495 if (ma->cache) { in bpf_mem_alloc_destroy()
498 c = per_cpu_ptr(ma->cache, cpu); in bpf_mem_alloc_destroy()
515 destroy_mem_alloc(ma, rcu_in_progress); in bpf_mem_alloc_destroy()
517 if (ma->caches) { in bpf_mem_alloc_destroy()
520 cc = per_cpu_ptr(ma->caches, cpu); in bpf_mem_alloc_destroy()
530 destroy_mem_alloc(ma, rcu_in_progress); in bpf_mem_alloc_destroy()
554 if (local_inc_return(&c->active) == 1) { in unit_alloc()
582 if (local_inc_return(&c->active) == 1) { in unit_free()
605 void notrace *bpf_mem_alloc(struct bpf_mem_alloc *ma, size_t size) in bpf_mem_alloc() argument
617 ret = unit_alloc(this_cpu_ptr(ma->caches)->cache + idx); in bpf_mem_alloc()
621 void notrace bpf_mem_free(struct bpf_mem_alloc *ma, void *ptr) in bpf_mem_free() argument
632 unit_free(this_cpu_ptr(ma->caches)->cache + idx, ptr); in bpf_mem_free()
635 void notrace *bpf_mem_cache_alloc(struct bpf_mem_alloc *ma) in bpf_mem_cache_alloc() argument
639 ret = unit_alloc(this_cpu_ptr(ma->cache)); in bpf_mem_cache_alloc()
643 void notrace bpf_mem_cache_free(struct bpf_mem_alloc *ma, void *ptr) in bpf_mem_cache_free() argument
648 unit_free(this_cpu_ptr(ma->cache), ptr); in bpf_mem_cache_free()