| /Linux-v6.1/tools/testing/selftests/bpf/prog_tests/ |
| D | btf_map_in_map.c | 17 err = bpf_obj_get_info_by_fd(bpf_map__fd(map), &info, &info_len); in bpf_map_id() 38 map1_fd = bpf_map__fd(skel->maps.inner_map1); in test_lookup_update() 39 map2_fd = bpf_map__fd(skel->maps.inner_map2); in test_lookup_update() 40 map3_fd = bpf_map__fd(skel->maps.inner_map3); in test_lookup_update() 41 map4_fd = bpf_map__fd(skel->maps.inner_map4); in test_lookup_update() 42 map5_fd = bpf_map__fd(skel->maps.inner_map5); in test_lookup_update() 43 outer_arr_dyn_fd = bpf_map__fd(skel->maps.outer_arr_dyn); in test_lookup_update() 44 outer_arr_fd = bpf_map__fd(skel->maps.outer_arr); in test_lookup_update() 45 outer_hash_fd = bpf_map__fd(skel->maps.outer_hash); in test_lookup_update() 142 inner_map_fd = bpf_map__fd(skel->maps.sockarr_sz2); in test_diff_size() [all …]
|
| D | recursion.c | 23 bpf_map_delete_elem(bpf_map__fd(skel->maps.hash1), &key); in test_recursion() 25 bpf_map_delete_elem(bpf_map__fd(skel->maps.hash1), &key); in test_recursion() 29 bpf_map_delete_elem(bpf_map__fd(skel->maps.hash2), &key); in test_recursion() 31 bpf_map_delete_elem(bpf_map__fd(skel->maps.hash2), &key); in test_recursion()
|
| D | unpriv_bpf_disabled.c | 77 perfbuf = perf_buffer__new(bpf_map__fd(skel->maps.perfbuf), 8, process_perfbuf, NULL, NULL, in test_unpriv_bpf_disabled_positive() 82 ringbuf = ring_buffer__new(bpf_map__fd(skel->maps.ringbuf), process_ringbuf, NULL, NULL); in test_unpriv_bpf_disabled_positive() 242 map_fds[0] = bpf_map__fd(skel->maps.array); in test_unpriv_bpf_disabled() 243 map_fds[1] = bpf_map__fd(skel->maps.percpu_array); in test_unpriv_bpf_disabled() 244 map_fds[2] = bpf_map__fd(skel->maps.hash); in test_unpriv_bpf_disabled() 245 map_fds[3] = bpf_map__fd(skel->maps.percpu_hash); in test_unpriv_bpf_disabled() 246 map_fds[4] = bpf_map__fd(skel->maps.perfbuf); in test_unpriv_bpf_disabled() 247 map_fds[5] = bpf_map__fd(skel->maps.ringbuf); in test_unpriv_bpf_disabled() 248 map_fds[6] = bpf_map__fd(skel->maps.prog_array); in test_unpriv_bpf_disabled()
|
| D | verify_pkcs7_sig.c | 308 ret = bpf_map_update_elem(bpf_map__fd(map), &zero, &data, BPF_ANY); in test_verify_pkcs7_sig() 317 ret = bpf_map_update_elem(bpf_map__fd(map), &zero, &data, BPF_ANY); in test_verify_pkcs7_sig() 326 ret = bpf_map_update_elem(bpf_map__fd(map), &zero, &data, BPF_ANY); in test_verify_pkcs7_sig() 337 ret = bpf_map_update_elem(bpf_map__fd(map), &zero, &data, BPF_ANY); in test_verify_pkcs7_sig() 351 ret = bpf_map_update_elem(bpf_map__fd(map), &zero, &data, BPF_ANY); in test_verify_pkcs7_sig() 359 ret = bpf_map_update_elem(bpf_map__fd(map), &zero, &data, BPF_ANY); in test_verify_pkcs7_sig() 372 ret = bpf_map_update_elem(bpf_map__fd(map), &zero, &data, in test_verify_pkcs7_sig() 379 ret = bpf_map_update_elem(bpf_map__fd(map), &zero, &data, in test_verify_pkcs7_sig() 386 ret = bpf_map_update_elem(bpf_map__fd(map), &zero, &data, in test_verify_pkcs7_sig()
|
| D | xdp_noinline.c | 44 bpf_map_update_elem(bpf_map__fd(skel->maps.vip_map), &key, &value, 0); in test_xdp_noinline() 45 bpf_map_update_elem(bpf_map__fd(skel->maps.ch_rings), &ch_key, &real_num, 0); in test_xdp_noinline() 46 bpf_map_update_elem(bpf_map__fd(skel->maps.reals), &real_num, &real_def, 0); in test_xdp_noinline() 65 bpf_map_lookup_elem(bpf_map__fd(skel->maps.stats), &stats_key, stats); in test_xdp_noinline()
|
| D | stacktrace_build_id.c | 28 control_map_fd = bpf_map__fd(skel->maps.control_map); in test_stacktrace_build_id() 29 stackid_hmap_fd = bpf_map__fd(skel->maps.stackid_hmap); in test_stacktrace_build_id() 30 stackmap_fd = bpf_map__fd(skel->maps.stackmap); in test_stacktrace_build_id() 31 stack_amap_fd = bpf_map__fd(skel->maps.stack_amap); in test_stacktrace_build_id()
|
| D | metadata.c | 76 bpf_map__fd(obj->maps.rodata)); in test_metadata_unused() 93 bpf_map__fd(obj->maps.rodata), NULL); in test_metadata_unused() 110 bpf_map__fd(obj->maps.rodata)); in test_metadata_used() 127 bpf_map__fd(obj->maps.rodata), NULL); in test_metadata_used()
|
| D | tailcalls.c | 40 map_fd = bpf_map__fd(prog_array); in test_tailcall_1() 176 map_fd = bpf_map__fd(prog_array); in test_tailcall_2() 251 map_fd = bpf_map__fd(prog_array); in test_tailcall_count() 276 data_fd = bpf_map__fd(data_map); in test_tailcall_count() 349 map_fd = bpf_map__fd(prog_array); in test_tailcall_4() 357 data_fd = bpf_map__fd(data_map); in test_tailcall_4() 439 map_fd = bpf_map__fd(prog_array); in test_tailcall_5() 447 data_fd = bpf_map__fd(data_map); in test_tailcall_5() 527 map_fd = bpf_map__fd(prog_array); in test_tailcall_bpf2bpf_1() 611 map_fd = bpf_map__fd(prog_array); in test_tailcall_bpf2bpf_2() [all …]
|
| D | stacktrace_map_skip.c | 18 stackid_hmap_fd = bpf_map__fd(skel->maps.stackid_hmap); in test_stacktrace_map_skip() 22 stackmap_fd = bpf_map__fd(skel->maps.stackmap); in test_stacktrace_map_skip() 26 stack_amap_fd = bpf_map__fd(skel->maps.stack_amap); in test_stacktrace_map_skip()
|
| D | map_lookup_percpu_elem.c | 37 ret = bpf_map_update_elem(bpf_map__fd(skel->maps.percpu_array_map), &key, buf, 0); in test_map_lookup_percpu_elem() 40 ret = bpf_map_update_elem(bpf_map__fd(skel->maps.percpu_hash_map), &key, buf, 0); in test_map_lookup_percpu_elem() 43 ret = bpf_map_update_elem(bpf_map__fd(skel->maps.percpu_lru_hash_map), &key, buf, 0); in test_map_lookup_percpu_elem()
|
| D | tcp_hdr_options.c | 300 hdr_stg_map_fd = bpf_map__fd(skel->maps.hdr_stg_map); in fastopen_estab() 301 lport_linum_map_fd = bpf_map__fd(skel->maps.lport_linum_map); in fastopen_estab() 337 hdr_stg_map_fd = bpf_map__fd(skel->maps.hdr_stg_map); in syncookie_estab() 338 lport_linum_map_fd = bpf_map__fd(skel->maps.lport_linum_map); in syncookie_estab() 381 hdr_stg_map_fd = bpf_map__fd(skel->maps.hdr_stg_map); in fin() 382 lport_linum_map_fd = bpf_map__fd(skel->maps.lport_linum_map); in fin() 413 hdr_stg_map_fd = bpf_map__fd(skel->maps.hdr_stg_map); in __simple_estab() 414 lport_linum_map_fd = bpf_map__fd(skel->maps.lport_linum_map); in __simple_estab() 466 lport_linum_map_fd = bpf_map__fd(misc_skel->maps.lport_linum_map); in misc()
|
| D | sockmap_basic.c | 59 src_fd = bpf_map__fd(src); in compare_cookies() 60 dst_fd = bpf_map__fd(dst); in compare_cookies() 116 map = bpf_map__fd(skel->maps.sock_map); in test_skmsg_helpers() 152 src = bpf_map__fd(skel->maps.src); in test_sockmap_update() 218 src_fd = bpf_map__fd(src); in test_sockmap_copy() 280 map = bpf_map__fd(skel->maps.sock_map); in test_sockmap_skb_verdict_attach() 322 map_fd = bpf_map__fd(skel->maps.sock_map); in test_sockmap_progs_query()
|
| D | netns_cookie.c | 36 map = bpf_map__fd(skel->maps.sock_map); in test_netns_cookie() 53 err = bpf_map_lookup_elem(bpf_map__fd(skel->maps.sockops_netns_cookies), in test_netns_cookie() 65 err = bpf_map_lookup_elem(bpf_map__fd(skel->maps.sk_msg_netns_cookies), in test_netns_cookie()
|
| D | sk_storage_tracing.c | 30 err = bpf_map_lookup_elem(bpf_map__fd(skel->maps.sk_stg_map), &sk_fd, in check_sk_stg() 63 err = bpf_map_update_elem(bpf_map__fd(skel->maps.del_sk_stg_map), in do_test() 83 err = bpf_map_lookup_elem(bpf_map__fd(skel->maps.del_sk_stg_map), in do_test()
|
| D | stacktrace_build_id_nmi.c | 71 control_map_fd = bpf_map__fd(skel->maps.control_map); in test_stacktrace_build_id_nmi() 72 stackid_hmap_fd = bpf_map__fd(skel->maps.stackid_hmap); in test_stacktrace_build_id_nmi() 73 stackmap_fd = bpf_map__fd(skel->maps.stackmap); in test_stacktrace_build_id_nmi()
|
| D | test_local_storage.c | 119 if (!check_syscall_operations(bpf_map__fd(skel->maps.task_storage_map), in test_test_local_storage() 138 if (!check_syscall_operations(bpf_map__fd(skel->maps.inode_storage_map), in test_test_local_storage() 172 if (!check_syscall_operations(bpf_map__fd(skel->maps.sk_storage_map), in test_test_local_storage()
|
| D | legacy_printk.c | 24 map_fd = bpf_map__fd(skel->maps.my_pid_map); in execute_one_variant() 40 map_fd = bpf_map__fd(skel->maps.res_map); in execute_one_variant()
|
| D | test_bprm_opts.c | 95 err = run_set_secureexec(bpf_map__fd(skel->maps.secure_exec_task_map), in test_test_bprm_opts() 101 err = run_set_secureexec(bpf_map__fd(skel->maps.secure_exec_task_map), in test_test_bprm_opts()
|
| D | netcnt.c | 45 map_fd = bpf_map__fd(skel->maps.netcnt); in serial_test_netcnt() 52 percpu_map_fd = bpf_map__fd(skel->maps.percpu_netcnt); in serial_test_netcnt()
|
| D | sockmap_listen.c | 1028 int verdict_map = bpf_map__fd(skel->maps.verdict_map); in test_skb_redir_to_connected() 1029 int sock_map = bpf_map__fd(inner_map); in test_skb_redir_to_connected() 1052 int verdict_map = bpf_map__fd(skel->maps.verdict_map); in test_msg_redir_to_connected() 1053 int sock_map = bpf_map__fd(inner_map); in test_msg_redir_to_connected() 1130 int verdict_map = bpf_map__fd(skel->maps.verdict_map); in test_skb_redir_to_listening() 1131 int sock_map = bpf_map__fd(inner_map); in test_skb_redir_to_listening() 1154 int verdict_map = bpf_map__fd(skel->maps.verdict_map); in test_msg_redir_to_listening() 1155 int sock_map = bpf_map__fd(inner_map); in test_msg_redir_to_listening() 1419 mapfd = bpf_map__fd(map); in test_ops_cleanup() 1511 map_fd = bpf_map__fd(map); in test_ops() [all …]
|
| /Linux-v6.1/tools/perf/util/ |
| D | bpf_lock_contention.c | 61 fd = bpf_map__fd(skel->maps.cpu_filter); in lock_contention_prepare() 74 fd = bpf_map__fd(skel->maps.task_filter); in lock_contention_prepare() 87 fd = bpf_map__fd(skel->maps.task_filter); in lock_contention_prepare() 118 fd = bpf_map__fd(skel->maps.lock_stat); in lock_contention_read() 119 stack = bpf_map__fd(skel->maps.stacks); in lock_contention_read()
|
| D | bpf_off_cpu.c | 84 fd = bpf_map__fd(skel->maps.task_filter); in off_cpu_start() 209 fd = bpf_map__fd(skel->maps.cpu_filter); in off_cpu_prepare() 222 fd = bpf_map__fd(skel->maps.task_filter); in off_cpu_prepare() 241 fd = bpf_map__fd(skel->maps.task_filter); in off_cpu_prepare() 254 fd = bpf_map__fd(skel->maps.cgroup_filter); in off_cpu_prepare() 328 fd = bpf_map__fd(skel->maps.off_cpu); in off_cpu_write() 329 stack = bpf_map__fd(skel->maps.stacks); in off_cpu_write()
|
| D | bpf_ftrace.c | 63 fd = bpf_map__fd(skel->maps.cpu_filter); in perf_ftrace__latency_prepare_bpf() 76 fd = bpf_map__fd(skel->maps.task_filter); in perf_ftrace__latency_prepare_bpf() 129 fd = bpf_map__fd(skel->maps.latency); in perf_ftrace__latency_read_bpf()
|
| D | bpf_kwork.c | 68 int fd = bpf_map__fd(skel->maps.perf_kwork_names); in get_work_name_from_map() 152 fd = bpf_map__fd(skel->maps.perf_kwork_cpu_filter); in setup_filters() 187 fd = bpf_map__fd(skel->maps.perf_kwork_name_filter); in setup_filters() 322 int fd = bpf_map__fd(skel->maps.perf_kwork_report); in perf_kwork__report_read_bpf()
|
| /Linux-v6.1/tools/testing/selftests/bpf/ |
| D | flow_dissector_load.h | 39 prog_array_fd = bpf_map__fd(prog_array); in bpf_flow_load() 48 *keys_fd = bpf_map__fd(keys); in bpf_flow_load()
|