Home
last modified time | relevance | path

Searched refs:next_cpu (Results 1 – 16 of 16) sorted by relevance

/Linux-v5.4/arch/x86/platform/uv/
Duv_time.c49 int next_cpu; member
169 head->next_cpu = -1; in uv_rtc_allocate_timers()
186 head->next_cpu = -1; in uv_rtc_find_next_timer()
195 head->next_cpu = bcpu; in uv_rtc_find_next_timer()
219 int next_cpu; in uv_rtc_set_timer() local
223 next_cpu = head->next_cpu; in uv_rtc_set_timer()
227 if (next_cpu < 0 || bcpu == next_cpu || in uv_rtc_set_timer()
228 expires < head->cpu[next_cpu].expires) { in uv_rtc_set_timer()
229 head->next_cpu = bcpu; in uv_rtc_set_timer()
259 if ((head->next_cpu == bcpu && uv_read_rtc(NULL) >= *t) || force) in uv_rtc_unset_timer()
[all …]
/Linux-v5.4/tools/testing/selftests/bpf/
Dtest_lru_map.c163 int next_cpu = 0; in test_lru_sanity0() local
168 assert(sched_next_online(0, &next_cpu) != -1); in test_lru_sanity0()
254 int next_cpu = 0; in test_lru_sanity1() local
263 assert(sched_next_online(0, &next_cpu) != -1); in test_lru_sanity1()
331 int next_cpu = 0; in test_lru_sanity2() local
340 assert(sched_next_online(0, &next_cpu) != -1); in test_lru_sanity2()
438 int next_cpu = 0; in test_lru_sanity3() local
447 assert(sched_next_online(0, &next_cpu) != -1); in test_lru_sanity3()
501 int next_cpu = 0; in test_lru_sanity4() local
506 assert(sched_next_online(0, &next_cpu) != -1); in test_lru_sanity4()
[all …]
/Linux-v5.4/kernel/trace/
Dtrace_hwlat.c273 int next_cpu; in move_to_next_cpu() local
287 next_cpu = cpumask_next(smp_processor_id(), current_mask); in move_to_next_cpu()
290 if (next_cpu >= nr_cpu_ids) in move_to_next_cpu()
291 next_cpu = cpumask_first(current_mask); in move_to_next_cpu()
293 if (next_cpu >= nr_cpu_ids) /* Shouldn't happen! */ in move_to_next_cpu()
297 cpumask_set_cpu(next_cpu, current_mask); in move_to_next_cpu()
355 int next_cpu; in start_kthread() local
365 next_cpu = cpumask_first(current_mask); in start_kthread()
374 cpumask_set_cpu(next_cpu, current_mask); in start_kthread()
Dtrace_entries.h123 __field( unsigned int, next_cpu ) \
140 __entry->next_cpu),
160 __entry->next_cpu),
Dtrace_output.c912 field->next_cpu, in trace_ctxwake_print()
946 field->next_cpu, in trace_ctxwake_raw()
982 SEQ_PUT_HEX_FIELD(s, field->next_cpu); in trace_ctxwake_hex()
1013 SEQ_PUT_FIELD(s, field->next_cpu); in trace_ctxwake_bin()
Dtrace_sched_wakeup.c396 entry->next_cpu = task_cpu(next); in tracing_sched_switch_trace()
424 entry->next_cpu = task_cpu(wakee); in tracing_sched_wakeup_trace()
Dtrace.c3297 int next_cpu = -1; in __find_next_entry() local
3327 next_cpu = cpu; in __find_next_entry()
3337 *ent_cpu = next_cpu; in __find_next_entry()
/Linux-v5.4/arch/parisc/kernel/
Dirq.c347 static int next_cpu = -1; in txn_alloc_addr() local
349 next_cpu++; /* assign to "next" CPU we want this bugger on */ in txn_alloc_addr()
352 while ((next_cpu < nr_cpu_ids) && in txn_alloc_addr()
353 (!per_cpu(cpu_data, next_cpu).txn_addr || in txn_alloc_addr()
354 !cpu_online(next_cpu))) in txn_alloc_addr()
355 next_cpu++; in txn_alloc_addr()
357 if (next_cpu >= nr_cpu_ids) in txn_alloc_addr()
358 next_cpu = 0; /* nothing else, assign monarch */ in txn_alloc_addr()
360 return txn_affinity_addr(virt_irq, next_cpu); in txn_alloc_addr()
/Linux-v5.4/kernel/
Dsmp.c416 int cpu, next_cpu, this_cpu = smp_processor_id(); in smp_call_function_many() local
445 next_cpu = cpumask_next_and(cpu, mask, cpu_online_mask); in smp_call_function_many()
446 if (next_cpu == this_cpu) in smp_call_function_many()
447 next_cpu = cpumask_next_and(next_cpu, mask, cpu_online_mask); in smp_call_function_many()
450 if (next_cpu >= nr_cpu_ids) { in smp_call_function_many()
/Linux-v5.4/kernel/time/
Dclocksource.c192 int next_cpu, reset_pending; in clocksource_watchdog() local
293 next_cpu = cpumask_next(raw_smp_processor_id(), cpu_online_mask); in clocksource_watchdog()
294 if (next_cpu >= nr_cpu_ids) in clocksource_watchdog()
295 next_cpu = cpumask_first(cpu_online_mask); in clocksource_watchdog()
297 add_timer_on(&watchdog_timer, next_cpu); in clocksource_watchdog()
Dtick-broadcast.c608 int cpu, next_cpu = 0; in tick_handle_oneshot_broadcast() local
637 next_cpu = cpu; in tick_handle_oneshot_broadcast()
674 tick_broadcast_set_event(dev, next_cpu, next_event); in tick_handle_oneshot_broadcast()
/Linux-v5.4/block/
Dblk-mq.c1387 cpu_online(hctx->next_cpu)) { in __blk_mq_run_hw_queue()
1425 int next_cpu = hctx->next_cpu; in blk_mq_hctx_next_cpu() local
1432 next_cpu = cpumask_next_and(next_cpu, hctx->cpumask, in blk_mq_hctx_next_cpu()
1434 if (next_cpu >= nr_cpu_ids) in blk_mq_hctx_next_cpu()
1435 next_cpu = blk_mq_first_mapped_cpu(hctx); in blk_mq_hctx_next_cpu()
1443 if (!cpu_online(next_cpu)) { in blk_mq_hctx_next_cpu()
1453 hctx->next_cpu = next_cpu; in blk_mq_hctx_next_cpu()
1458 hctx->next_cpu = next_cpu; in blk_mq_hctx_next_cpu()
1459 return next_cpu; in blk_mq_hctx_next_cpu()
2568 hctx->next_cpu = blk_mq_first_mapped_cpu(hctx); in blk_mq_map_swqueue()
/Linux-v5.4/include/linux/
Dblk-mq.h24 int next_cpu; member
/Linux-v5.4/drivers/irqchip/
Dirq-gic-v3.c1052 int next_cpu, cpu = *base_cpu; in gic_compute_target_list() local
1059 next_cpu = cpumask_next(cpu, mask); in gic_compute_target_list()
1060 if (next_cpu >= nr_cpu_ids) in gic_compute_target_list()
1062 cpu = next_cpu; in gic_compute_target_list()
/Linux-v5.4/net/core/
Ddev.c3915 struct rps_dev_flow *rflow, u16 next_cpu) in set_rps_cpu() argument
3917 if (next_cpu < nr_cpu_ids) { in set_rps_cpu()
3930 rxq_index = cpu_rmap_lookup_index(dev->rx_cpu_rmap, next_cpu); in set_rps_cpu()
3951 per_cpu(softnet_data, next_cpu).input_queue_head; in set_rps_cpu()
3954 rflow->cpu = next_cpu; in set_rps_cpu()
4002 u32 next_cpu; in get_rps_cpu() local
4010 next_cpu = ident & rps_cpu_mask; in get_rps_cpu()
4029 if (unlikely(tcpu != next_cpu) && in get_rps_cpu()
4033 tcpu = next_cpu; in get_rps_cpu()
4034 rflow = set_rps_cpu(dev, skb, rflow, next_cpu); in get_rps_cpu()
/Linux-v5.4/drivers/net/ethernet/mediatek/
Dmtk_eth_soc.c1334 u32 next_cpu = desc->txd2; in mtk_poll_tx_qdma() local
1359 cpu = next_cpu; in mtk_poll_tx_qdma()