Searched refs:all_cpus (Results 1 – 8 of 8) sorted by relevance
60 bool all_cpus; member75 int idx = ctx->all_cpus ? cpu : ctx->idx; in print_bpf_output()130 .all_cpus = true, in do_event_pipe()181 ctx.all_cpus = false; in do_event_pipe()184 if (!ctx.all_cpus) { in do_event_pipe()197 opts.cpu_cnt = ctx.all_cpus ? 0 : 1; in do_event_pipe()
39 static bool all_cpus; variable40 module_param(all_cpus, bool, 0400);41 MODULE_PARM_DESC(all_cpus, "trigger lockup at all cpus at once");538 all_cpus ? "all_cpus " : "", in test_lockup_init()555 if (all_cpus) { in test_lockup_init()
58 evlist->all_cpus = perf_cpu_map__merge(evlist->all_cpus, evsel->cpus); in __perf_evlist__propagate_maps()129 perf_cpu_map__put(evlist->all_cpus); in perf_evlist__exit()132 evlist->all_cpus = NULL; in perf_evlist__exit()170 if (!evlist->all_cpus && cpus) in perf_evlist__set_maps()171 evlist->all_cpus = perf_cpu_map__get(cpus); in perf_evlist__set_maps()
1506 bool all_cpus; in kvm_hv_flush_tlb() local1525 all_cpus = (flush.flags & HV_FLUSH_ALL_PROCESSORS) || in kvm_hv_flush_tlb()1538 all_cpus = flush_ex.hv_vp_set.format != in kvm_hv_flush_tlb()1545 if (!sparse_banks_len && !all_cpus) in kvm_hv_flush_tlb()1548 if (!all_cpus && in kvm_hv_flush_tlb()1559 vcpu_mask = all_cpus ? NULL : in kvm_hv_flush_tlb()1608 bool all_cpus; in kvm_hv_send_ipi() local1624 all_cpus = false; in kvm_hv_send_ipi()1642 all_cpus = send_ipi_ex.vp_set.format == HV_GENERIC_SET_ALL; in kvm_hv_send_ipi()1647 if (!all_cpus && in kvm_hv_send_ipi()[all …]
524 int all_cpus = num_online_cpus(); in __wait_for_cpus() local528 while (atomic_read(t) < all_cpus) { in __wait_for_cpus()531 all_cpus - atomic_read(t)); in __wait_for_cpus()
21 struct perf_cpu_map *all_cpus; member
340 perf_cpu_map__for_each_cpu (cpu, index, (evlist)->core.all_cpus)
391 ncpus = perf_cpu_map__nr(evsel_list->core.all_cpus); in read_affinity_counters()