/Linux-v6.6/tools/lib/perf/include/perf/ |
D | cpumap.h | 35 LIBPERF_API int perf_cpu_map__nr(const struct perf_cpu_map *cpus); 51 (idx) < perf_cpu_map__nr(cpus); \ 55 for ((idx) = 0; (idx) < perf_cpu_map__nr(cpus); (idx)++)
|
/Linux-v6.6/tools/perf/tests/ |
D | topology.c | 126 for (i = 0; i < perf_cpu_map__nr(map); i++) { in check_cpu_topology() 144 for (i = 0; i < perf_cpu_map__nr(map); i++) { in check_cpu_topology() 160 for (i = 0; i < perf_cpu_map__nr(map); i++) { in check_cpu_topology() 176 for (i = 0; i < perf_cpu_map__nr(map); i++) { in check_cpu_topology() 190 for (i = 0; i < perf_cpu_map__nr(map); i++) { in check_cpu_topology()
|
D | cpumap.c | 40 TEST_ASSERT_VAL("wrong nr", perf_cpu_map__nr(map) == 20); in process_event_mask() 68 TEST_ASSERT_VAL("wrong nr", perf_cpu_map__nr(map) == 2); in process_event_cpus() 94 TEST_ASSERT_VAL("wrong nr", perf_cpu_map__nr(map) == 256); in process_event_range_cpus() 166 TEST_ASSERT_VAL("failed to merge map: bad nr", perf_cpu_map__nr(c) == 5); in test__cpu_map_merge() 181 TEST_ASSERT_EQUAL("failed to intersect map: bad nr", perf_cpu_map__nr(c), nr); in __test__cpu_map_intersect()
|
D | bitmap.c | 20 for (i = 0; i < perf_cpu_map__nr(map); i++) in get_bitmap()
|
D | event_update.c | 72 TEST_ASSERT_VAL("wrong cpus", perf_cpu_map__nr(map) == 3); in process_event_cpus()
|
/Linux-v6.6/tools/perf/util/ |
D | top.c | 98 perf_cpu_map__nr(top->evlist->core.user_requested_cpus) > 1 in perf_top__header_snprintf() 106 perf_cpu_map__nr(top->evlist->core.user_requested_cpus), in perf_top__header_snprintf() 107 perf_cpu_map__nr(top->evlist->core.user_requested_cpus) > 1 in perf_top__header_snprintf()
|
D | cpumap.c | 241 struct cpu_aggr_map *c = cpu_aggr_map__empty_new(perf_cpu_map__nr(cpus)); in cpu_aggr_map__new() 265 if (c->nr != perf_cpu_map__nr(cpus)) { in cpu_aggr_map__new() 584 for (i = 0; i < perf_cpu_map__nr(map) + 1; i++) { in cpu_map__snprint() 586 bool last = i == perf_cpu_map__nr(map); in cpu_map__snprint() 635 struct perf_cpu last_cpu = perf_cpu_map__cpu(map, perf_cpu_map__nr(map) - 1); in cpu_map__snprint_mask() 646 for (i = 0; i < perf_cpu_map__nr(map); i++) { in cpu_map__snprint_mask()
|
D | counts.c | 63 evsel->counts = perf_counts__new(perf_cpu_map__nr(cpus), nthreads); in evsel__alloc_counts()
|
D | cpumap.h | 71 return perf_cpu_map__nr(cpus) == 1 && perf_cpu_map__cpu(cpus, 0).cpu == -1; in cpu_map__is_dummy()
|
D | bpf_ftrace.c | 41 ncpus = perf_cpu_map__nr(ftrace->evlist->core.user_requested_cpus); in perf_ftrace__latency_prepare_bpf()
|
D | bpf_counter.c | 378 *filter_entry_cnt = perf_cpu_map__nr(evsel__cpus(evsel)); in bperf_check_target() 590 num_cpu = perf_cpu_map__nr(all_cpu_map); in bperf_sync_counters()
|
D | bpf_kwork.c | 168 for (i = 0; i < perf_cpu_map__nr(map); i++) { in setup_filters()
|
/Linux-v6.6/tools/perf/bench/ |
D | futex-wake.c | 103 int nrcpus = perf_cpu_map__nr(cpu); in block_threads() 116 CPU_SET_S(perf_cpu_map__cpu(cpu, i % perf_cpu_map__nr(cpu)).cpu, size, cpuset); in block_threads() 167 params.nthreads = perf_cpu_map__nr(cpu); in bench_futex_wake()
|
D | futex-hash.c | 156 params.nthreads = perf_cpu_map__nr(cpu); in bench_futex_hash() 177 nrcpus = perf_cpu_map__nr(cpu); in bench_futex_hash() 190 CPU_SET_S(perf_cpu_map__cpu(cpu, i % perf_cpu_map__nr(cpu)).cpu, size, cpuset); in bench_futex_hash()
|
D | futex-lock-pi.c | 125 int nrcpus = perf_cpu_map__nr(cpu); in create_threads() 148 CPU_SET_S(perf_cpu_map__cpu(cpu, i % perf_cpu_map__nr(cpu)).cpu, size, cpuset); in create_threads() 190 params.nthreads = perf_cpu_map__nr(cpu); in bench_futex_lock_pi()
|
D | futex-requeue.c | 128 int nrcpus = perf_cpu_map__nr(cpu); in block_threads() 143 CPU_SET_S(perf_cpu_map__cpu(cpu, i % perf_cpu_map__nr(cpu)).cpu, size, cpuset); in block_threads() 192 params.nthreads = perf_cpu_map__nr(cpu); in bench_futex_requeue()
|
D | futex-wake-parallel.c | 152 int nrcpus = perf_cpu_map__nr(cpu); in block_threads() 167 CPU_SET_S(perf_cpu_map__cpu(cpu, i % perf_cpu_map__nr(cpu)).cpu, size, cpuset); in block_threads() 272 params.nthreads = perf_cpu_map__nr(cpu); in bench_futex_wake_parallel()
|
D | epoll-ctl.c | 235 nrcpus = perf_cpu_map__nr(cpu); in do_threads() 264 CPU_SET_S(perf_cpu_map__cpu(cpu, i % perf_cpu_map__nr(cpu)).cpu, in do_threads() 350 nthreads = perf_cpu_map__nr(cpu); in bench_epoll_ctl()
|
D | evlist-open-close.c | 74 cnt += evsel->core.threads->nr * perf_cpu_map__nr(evsel->core.cpus); in evlist__count_evsel_fds() 154 printf(" Number of cpus:\t%d\n", perf_cpu_map__nr(evlist->core.user_requested_cpus)); in bench_evlist_open_close__run()
|
D | epoll-wait.c | 312 nrcpus = perf_cpu_map__nr(cpu); in do_threads() 353 CPU_SET_S(perf_cpu_map__cpu(cpu, i % perf_cpu_map__nr(cpu)).cpu, in do_threads() 469 nthreads = perf_cpu_map__nr(cpu) - 1; in bench_epoll_wait()
|
/Linux-v6.6/tools/lib/perf/ |
D | libperf.map | 10 perf_cpu_map__nr;
|
D | evlist.c | 321 int nr_cpus = perf_cpu_map__nr(evlist->all_cpus); in perf_evlist__alloc_pollfd() 547 int nr_cpus = perf_cpu_map__nr(evlist->all_cpus); in mmap_per_thread() 589 int nr_cpus = perf_cpu_map__nr(evlist->all_cpus); in mmap_per_cpu() 621 nr_mmaps = perf_cpu_map__nr(evlist->all_cpus); in perf_evlist__nr_mmaps()
|
D | evsel.c | 144 perf_evsel__alloc_fd(evsel, perf_cpu_map__nr(cpus), threads->nr) < 0) in perf_evsel__open() 491 for (i = 0; i < perf_cpu_map__nr(evsel->cpus) && !err; i++) in perf_evsel__apply_filter()
|
/Linux-v6.6/tools/perf/arch/arm64/util/ |
D | header.c | 30 for (cpu = 0; cpu < perf_cpu_map__nr(cpus); cpu++) { in _get_cpuid()
|
/Linux-v6.6/tools/lib/perf/tests/ |
D | test-evlist.c | 77 for (idx = 0; idx < perf_cpu_map__nr(cpus); idx++) { in test_stat_cpu() 414 __T("failed count", count >= perf_cpu_map__nr(cpus)); in test_mmap_cpus()
|