Lines Matching refs:bpf_cpu_map_entry

41 struct bpf_cpu_map_entry;
47 struct bpf_cpu_map_entry *obj;
52 struct bpf_cpu_map_entry { struct
77 struct bpf_cpu_map_entry **cpu_map; argument
113 cost = (u64) cmap->map.max_entries * sizeof(struct bpf_cpu_map_entry *); in cpu_map_alloc()
124 sizeof(struct bpf_cpu_map_entry *), in cpu_map_alloc()
137 static void get_cpu_map_entry(struct bpf_cpu_map_entry *rcpu) in get_cpu_map_entry()
145 struct bpf_cpu_map_entry *rcpu; in cpu_map_kthread_stop()
147 rcpu = container_of(work, struct bpf_cpu_map_entry, kthread_stop_wq); in cpu_map_kthread_stop()
215 static void put_cpu_map_entry(struct bpf_cpu_map_entry *rcpu) in put_cpu_map_entry()
228 static int cpu_map_bpf_prog_run_xdp(struct bpf_cpu_map_entry *rcpu, in cpu_map_bpf_prog_run_xdp()
301 struct bpf_cpu_map_entry *rcpu = data; in cpu_map_kthread_run()
395 static int __cpu_map_load_bpf_program(struct bpf_cpu_map_entry *rcpu, int fd) in __cpu_map_load_bpf_program()
414 static struct bpf_cpu_map_entry *
419 struct bpf_cpu_map_entry *rcpu; in __cpu_map_entry_alloc()
487 struct bpf_cpu_map_entry *rcpu; in __cpu_map_entry_free()
494 rcpu = container_of(rcu, struct bpf_cpu_map_entry, rcu); in __cpu_map_entry_free()
521 u32 key_cpu, struct bpf_cpu_map_entry *rcpu) in __cpu_map_entry_replace()
523 struct bpf_cpu_map_entry *old_rcpu; in __cpu_map_entry_replace()
551 struct bpf_cpu_map_entry *rcpu; in cpu_map_update_elem()
606 struct bpf_cpu_map_entry *rcpu; in cpu_map_free()
619 struct bpf_cpu_map_entry *__cpu_map_lookup_elem(struct bpf_map *map, u32 key) in __cpu_map_lookup_elem()
622 struct bpf_cpu_map_entry *rcpu; in __cpu_map_lookup_elem()
633 struct bpf_cpu_map_entry *rcpu = in cpu_map_lookup_elem()
672 struct bpf_cpu_map_entry *rcpu = bq->obj; in bq_flush_to_queue()
707 static void bq_enqueue(struct bpf_cpu_map_entry *rcpu, struct xdp_frame *xdpf) in bq_enqueue()
730 int cpu_map_enqueue(struct bpf_cpu_map_entry *rcpu, struct xdp_buff *xdp, in cpu_map_enqueue()