/Linux-v5.15/kernel/sched/ |
D | topology.c | 284 static void perf_domain_debug(const struct cpumask *cpu_map, in perf_domain_debug() argument 290 printk(KERN_DEBUG "root_domain %*pbl:", cpumask_pr_args(cpu_map)); in perf_domain_debug() 352 static bool build_perf_domains(const struct cpumask *cpu_map) in build_perf_domains() argument 354 int i, nr_pd = 0, nr_ps = 0, nr_cpus = cpumask_weight(cpu_map); in build_perf_domains() 356 int cpu = cpumask_first(cpu_map); in build_perf_domains() 368 cpumask_pr_args(cpu_map)); in build_perf_domains() 376 cpumask_pr_args(cpu_map)); in build_perf_domains() 383 cpumask_pr_args(cpu_map)); in build_perf_domains() 388 for_each_cpu(i, cpu_map) { in build_perf_domains() 402 cpumask_pr_args(cpu_map)); in build_perf_domains() [all …]
|
/Linux-v5.15/arch/mips/kernel/ |
D | cacheinfo.c | 58 static void fill_cpumask_siblings(int cpu, cpumask_t *cpu_map) in fill_cpumask_siblings() argument 64 cpumask_set_cpu(cpu1, cpu_map); in fill_cpumask_siblings() 67 static void fill_cpumask_cluster(int cpu, cpumask_t *cpu_map) in fill_cpumask_cluster() argument 74 cpumask_set_cpu(cpu1, cpu_map); in fill_cpumask_cluster()
|
/Linux-v5.15/arch/ia64/mm/ |
D | discontig.c | 184 unsigned int *cpu_map; in setup_per_cpu_areas() local 194 cpu_map = ai->groups[0].cpu_map; in setup_per_cpu_areas() 208 cpu_map[unit++] = cpu; in setup_per_cpu_areas() 233 cpu = cpu_map[unit]; in setup_per_cpu_areas() 245 gi->cpu_map = &cpu_map[unit]; in setup_per_cpu_areas()
|
D | contig.c | 115 gi->cpu_map[gi->nr_units++] = cpu; in setup_per_cpu_areas()
|
/Linux-v5.15/tools/power/x86/intel-speed-select/ |
D | isst-config.c | 66 struct _cpu_map *cpu_map; variable 590 if (cpu_map[i].pkg_id == pkg_id && in get_max_punit_core_id() 591 cpu_map[i].die_id == die_id && in get_max_punit_core_id() 592 cpu_map[i].punit_cpu_core > max_id) in get_max_punit_core_id() 593 max_id = cpu_map[i].punit_cpu_core; in get_max_punit_core_id() 629 cpu_map = malloc(sizeof(*cpu_map) * topo_max_cpus); in create_cpu_map() 630 if (!cpu_map) in create_cpu_map() 642 map.cpu_map[0].logical_cpu = i; in create_cpu_map() 645 map.cpu_map[0].logical_cpu); in create_cpu_map() 649 map.cpu_map[0].logical_cpu); in create_cpu_map() [all …]
|
/Linux-v5.15/tools/testing/selftests/bpf/progs/ |
D | test_xdp_with_cpumap_helpers.c | 13 } cpu_map SEC(".maps"); 18 return bpf_redirect_map(&cpu_map, 1, 0); in xdp_redir_prog()
|
/Linux-v5.15/samples/bpf/ |
D | xdp_redirect_cpu.bpf.c | 15 } cpu_map SEC(".maps"); 166 return bpf_redirect_map(&cpu_map, cpu_dest, 0); in xdp_prognum0_no_touch() 208 return bpf_redirect_map(&cpu_map, cpu_dest, 0); in xdp_prognum1_touch_data() 253 return bpf_redirect_map(&cpu_map, cpu_dest, 0); in xdp_prognum2_round_robin() 319 return bpf_redirect_map(&cpu_map, cpu_dest, 0); in xdp_prognum3_proto_separate() 392 return bpf_redirect_map(&cpu_map, cpu_dest, 0); in xdp_prognum4_ddos_filter_pktgen() 493 return bpf_redirect_map(&cpu_map, cpu_dest, 0); in xdp_prognum5_lb_hash_ip_pairs()
|
D | xdp_redirect_cpu_user.c | 367 if (bpf_map__set_max_entries(skel->maps.cpu_map, n_cpus) < 0) { in main() 501 ret = bpf_obj_get_info_by_fd(bpf_map__fd(skel->maps.cpu_map), &info, &infosz); in main() 510 map_fd = bpf_map__fd(skel->maps.cpu_map); in main()
|
/Linux-v5.15/drivers/platform/x86/intel/speed_select_if/ |
D | isst_if_common.c | 441 struct isst_if_cpu_map *cpu_map; in isst_if_proc_phyid_req() local 443 cpu_map = (struct isst_if_cpu_map *)cmd_ptr; in isst_if_proc_phyid_req() 444 if (cpu_map->logical_cpu >= nr_cpu_ids || in isst_if_proc_phyid_req() 445 cpu_map->logical_cpu >= num_possible_cpus()) in isst_if_proc_phyid_req() 449 cpu_map->physical_cpu = isst_cpu_info[cpu_map->logical_cpu].punit_cpu_id; in isst_if_proc_phyid_req() 572 cmd_cb.offset = offsetof(struct isst_if_cpu_maps, cpu_map); in isst_if_def_ioctl()
|
/Linux-v5.15/tools/perf/util/ |
D | mmap.c | 244 const struct perf_cpu_map *cpu_map = NULL; in build_node_mask() local 246 cpu_map = cpu_map__online(); in build_node_mask() 247 if (!cpu_map) in build_node_mask() 250 nr_cpus = perf_cpu_map__nr(cpu_map); in build_node_mask() 252 cpu = cpu_map->map[c]; /* map c index to online cpu index */ in build_node_mask()
|
D | tool.h | 73 cpu_map, member
|
D | session.c | 542 if (tool->cpu_map == NULL) in perf_tool__fill_defaults() 543 tool->cpu_map = process_event_cpu_map_stub; in perf_tool__fill_defaults() 907 struct perf_record_cpu_map_data *data = &event->cpu_map.data; in perf_event__cpu_map_swap() 1651 return tool->cpu_map(session, event); in perf_session__process_user_event()
|
D | event.c | 341 struct perf_cpu_map *cpus = cpu_map__new_data(&event->cpu_map.data); in perf_event__fprintf_cpu_map()
|
/Linux-v5.15/kernel/bpf/ |
D | cpumap.c | 78 struct bpf_cpu_map_entry __rcu **cpu_map; member 112 cmap->cpu_map = bpf_map_area_alloc(cmap->map.max_entries * in cpu_map_alloc() 115 if (!cmap->cpu_map) in cpu_map_alloc() 533 old_rcpu = unrcu_pointer(xchg(&cmap->cpu_map[key_cpu], RCU_INITIALIZER(rcpu))); in __cpu_map_entry_replace() 615 rcpu = rcu_dereference_raw(cmap->cpu_map[i]); in cpu_map_free() 622 bpf_map_area_free(cmap->cpu_map); in cpu_map_free() 638 rcpu = rcu_dereference_check(cmap->cpu_map[key], in __cpu_map_lookup_elem()
|
/Linux-v5.15/tools/perf/tests/ |
D | cpumap.c | 19 struct perf_record_cpu_map *map_event = &event->cpu_map; in process_event_mask() 53 struct perf_record_cpu_map *map_event = &event->cpu_map; in process_event_cpus()
|
/Linux-v5.15/mm/ |
D | percpu.c | 2447 __alignof__(ai->groups[0].cpu_map[0])); in pcpu_alloc_alloc_info() 2448 ai_size = base_size + nr_units * sizeof(ai->groups[0].cpu_map[0]); in pcpu_alloc_alloc_info() 2456 ai->groups[0].cpu_map = ptr; in pcpu_alloc_alloc_info() 2459 ai->groups[0].cpu_map[unit] = NR_CPUS; in pcpu_alloc_alloc_info() 2525 if (gi->cpu_map[unit] != NR_CPUS) in pcpu_dump_alloc_info() 2527 cpu_width, gi->cpu_map[unit]); in pcpu_dump_alloc_info() 2672 cpu = gi->cpu_map[i]; in pcpu_setup_first_chunk() 2869 unsigned int *cpu_map; in pcpu_build_alloc_info() local 2961 cpu_map = ai->groups[0].cpu_map; in pcpu_build_alloc_info() 2964 ai->groups[group].cpu_map = cpu_map; in pcpu_build_alloc_info() [all …]
|
/Linux-v5.15/tools/perf/arch/nds32/util/ |
D | header.c | 15 struct cpu_map *cpus; in get_cpuid_str()
|
/Linux-v5.15/tools/perf/arch/arm/util/ |
D | cs-etm.c | 725 struct perf_cpu_map *cpu_map; in cs_etm_info_fill() local 740 cpu_map = online_cpus; in cs_etm_info_fill() 749 cpu_map = event_cpus; in cs_etm_info_fill() 752 nr_cpu = perf_cpu_map__nr(cpu_map); in cs_etm_info_fill() 766 if (cpu_map__has(cpu_map, i)) in cs_etm_info_fill()
|
/Linux-v5.15/tools/perf/python/ |
D | twatch.py | 12 cpus = perf.cpu_map()
|
D | tracepoint.py | 19 cpus = perf.cpu_map()
|
/Linux-v5.15/include/uapi/linux/ |
D | isst_if.h | 63 struct isst_if_cpu_map cpu_map[1]; member
|
/Linux-v5.15/tools/testing/selftests/bpf/prog_tests/ |
D | xdp_cpumap_attach.c | 34 map_fd = bpf_map__fd(skel->maps.cpu_map); in test_xdp_cpumap_attach()
|
/Linux-v5.15/include/linux/ |
D | percpu.h | 71 unsigned int *cpu_map; /* unit->cpu map, empty member
|
/Linux-v5.15/tools/lib/perf/include/perf/ |
D | event.h | 419 struct perf_record_cpu_map cpu_map; member
|
/Linux-v5.15/drivers/scsi/lpfc/ |
D | lpfc_init.c | 1252 hdwq = &phba->sli4_hba.hdwq[phba->sli4_hba.cpu_map[i].hdwq]; in lpfc_idle_stat_delay_work() 8213 phba->sli4_hba.cpu_map = kcalloc(phba->sli4_hba.num_possible_cpu, in lpfc_sli4_driver_resource_setup() 8216 if (!phba->sli4_hba.cpu_map) { in lpfc_sli4_driver_resource_setup() 8288 kfree(phba->sli4_hba.cpu_map); in lpfc_sli4_driver_resource_setup() 8332 kfree(phba->sli4_hba.cpu_map); in lpfc_sli4_driver_resource_unset() 10314 cpup = &phba->sli4_hba.cpu_map[cpu]; in lpfc_sli4_queue_create() 10347 cpup = &phba->sli4_hba.cpu_map[cpu]; in lpfc_sli4_queue_create() 10360 eqcpup = &phba->sli4_hba.cpu_map[eqcpu]; in lpfc_sli4_queue_create() 10921 cpup = &phba->sli4_hba.cpu_map[cpu]; in lpfc_sli4_queue_setup() 10956 cpup = &phba->sli4_hba.cpu_map[cpu]; in lpfc_sli4_queue_setup() [all …]
|