Lines Matching refs:rcpu

142 static void get_cpu_map_entry(struct bpf_cpu_map_entry *rcpu)  in get_cpu_map_entry()  argument
144 atomic_inc(&rcpu->refcnt); in get_cpu_map_entry()
150 struct bpf_cpu_map_entry *rcpu; in cpu_map_kthread_stop() local
152 rcpu = container_of(work, struct bpf_cpu_map_entry, kthread_stop_wq); in cpu_map_kthread_stop()
160 kthread_stop(rcpu->kthread); in cpu_map_kthread_stop()
163 static struct sk_buff *cpu_map_build_skb(struct bpf_cpu_map_entry *rcpu, in cpu_map_build_skb() argument
236 static void put_cpu_map_entry(struct bpf_cpu_map_entry *rcpu) in put_cpu_map_entry() argument
238 if (atomic_dec_and_test(&rcpu->refcnt)) { in put_cpu_map_entry()
240 __cpu_map_ring_cleanup(rcpu->queue); in put_cpu_map_entry()
241 ptr_ring_cleanup(rcpu->queue, NULL); in put_cpu_map_entry()
242 kfree(rcpu->queue); in put_cpu_map_entry()
243 kfree(rcpu); in put_cpu_map_entry()
251 struct bpf_cpu_map_entry *rcpu = data; in cpu_map_kthread_run() local
260 while (!kthread_should_stop() || !__ptr_ring_empty(rcpu->queue)) { in cpu_map_kthread_run()
268 if (__ptr_ring_empty(rcpu->queue)) { in cpu_map_kthread_run()
271 if (__ptr_ring_empty(rcpu->queue)) { in cpu_map_kthread_run()
286 n = ptr_ring_consume_batched(rcpu->queue, frames, CPUMAP_BATCH); in cpu_map_kthread_run()
312 skb = cpu_map_build_skb(rcpu, xdpf, skb); in cpu_map_kthread_run()
324 trace_xdp_cpumap_kthread(rcpu->map_id, n, drops, sched); in cpu_map_kthread_run()
330 put_cpu_map_entry(rcpu); in cpu_map_kthread_run()
338 struct bpf_cpu_map_entry *rcpu; in __cpu_map_entry_alloc() local
345 rcpu = kzalloc_node(sizeof(*rcpu), gfp, numa); in __cpu_map_entry_alloc()
346 if (!rcpu) in __cpu_map_entry_alloc()
350 rcpu->bulkq = __alloc_percpu_gfp(sizeof(*rcpu->bulkq), in __cpu_map_entry_alloc()
352 if (!rcpu->bulkq) in __cpu_map_entry_alloc()
356 bq = per_cpu_ptr(rcpu->bulkq, i); in __cpu_map_entry_alloc()
357 bq->obj = rcpu; in __cpu_map_entry_alloc()
361 rcpu->queue = kzalloc_node(sizeof(*rcpu->queue), gfp, numa); in __cpu_map_entry_alloc()
362 if (!rcpu->queue) in __cpu_map_entry_alloc()
365 err = ptr_ring_init(rcpu->queue, qsize, gfp); in __cpu_map_entry_alloc()
369 rcpu->cpu = cpu; in __cpu_map_entry_alloc()
370 rcpu->map_id = map_id; in __cpu_map_entry_alloc()
371 rcpu->qsize = qsize; in __cpu_map_entry_alloc()
374 rcpu->kthread = kthread_create_on_node(cpu_map_kthread_run, rcpu, numa, in __cpu_map_entry_alloc()
376 if (IS_ERR(rcpu->kthread)) in __cpu_map_entry_alloc()
379 get_cpu_map_entry(rcpu); /* 1-refcnt for being in cmap->cpu_map[] */ in __cpu_map_entry_alloc()
380 get_cpu_map_entry(rcpu); /* 1-refcnt for kthread */ in __cpu_map_entry_alloc()
383 kthread_bind(rcpu->kthread, cpu); in __cpu_map_entry_alloc()
384 wake_up_process(rcpu->kthread); in __cpu_map_entry_alloc()
386 return rcpu; in __cpu_map_entry_alloc()
389 ptr_ring_cleanup(rcpu->queue, NULL); in __cpu_map_entry_alloc()
391 kfree(rcpu->queue); in __cpu_map_entry_alloc()
393 free_percpu(rcpu->bulkq); in __cpu_map_entry_alloc()
395 kfree(rcpu); in __cpu_map_entry_alloc()
401 struct bpf_cpu_map_entry *rcpu; in __cpu_map_entry_free() local
409 rcpu = container_of(rcu, struct bpf_cpu_map_entry, rcu); in __cpu_map_entry_free()
413 struct xdp_bulk_queue *bq = per_cpu_ptr(rcpu->bulkq, cpu); in __cpu_map_entry_free()
418 free_percpu(rcpu->bulkq); in __cpu_map_entry_free()
420 put_cpu_map_entry(rcpu); in __cpu_map_entry_free()
443 u32 key_cpu, struct bpf_cpu_map_entry *rcpu) in __cpu_map_entry_replace() argument
447 old_rcpu = xchg(&cmap->cpu_map[key_cpu], rcpu); in __cpu_map_entry_replace()
472 struct bpf_cpu_map_entry *rcpu; in cpu_map_update_elem() local
493 rcpu = NULL; /* Same as deleting */ in cpu_map_update_elem()
496 rcpu = __cpu_map_entry_alloc(qsize, key_cpu, map->id); in cpu_map_update_elem()
497 if (!rcpu) in cpu_map_update_elem()
499 rcpu->cmap = cmap; in cpu_map_update_elem()
502 __cpu_map_entry_replace(cmap, key_cpu, rcpu); in cpu_map_update_elem()
541 struct bpf_cpu_map_entry *rcpu; in cpu_map_free() local
543 rcpu = READ_ONCE(cmap->cpu_map[i]); in cpu_map_free()
544 if (!rcpu) in cpu_map_free()
558 struct bpf_cpu_map_entry *rcpu; in __cpu_map_lookup_elem() local
563 rcpu = READ_ONCE(cmap->cpu_map[key]); in __cpu_map_lookup_elem()
564 return rcpu; in __cpu_map_lookup_elem()
569 struct bpf_cpu_map_entry *rcpu = in cpu_map_lookup_elem() local
572 return rcpu ? &rcpu->qsize : NULL; in cpu_map_lookup_elem()
604 struct bpf_cpu_map_entry *rcpu = bq->obj; in bq_flush_to_queue() local
606 const int to_cpu = rcpu->cpu; in bq_flush_to_queue()
613 q = rcpu->queue; in bq_flush_to_queue()
636 trace_xdp_cpumap_enqueue(rcpu->map_id, processed, drops, to_cpu); in bq_flush_to_queue()
643 static int bq_enqueue(struct bpf_cpu_map_entry *rcpu, struct xdp_frame *xdpf) in bq_enqueue() argument
645 struct list_head *flush_list = this_cpu_ptr(rcpu->cmap->flush_list); in bq_enqueue()
646 struct xdp_bulk_queue *bq = this_cpu_ptr(rcpu->bulkq); in bq_enqueue()
668 int cpu_map_enqueue(struct bpf_cpu_map_entry *rcpu, struct xdp_buff *xdp, in cpu_map_enqueue() argument
680 bq_enqueue(rcpu, xdpf); in cpu_map_enqueue()