/Linux-v4.19/net/netfilter/ |
D | nft_counter.c | 35 struct nft_counter *this_cpu; in nft_counter_do_eval() local 39 this_cpu = this_cpu_ptr(priv->counter); in nft_counter_do_eval() 44 this_cpu->bytes += pkt->skb->len; in nft_counter_do_eval() 45 this_cpu->packets++; in nft_counter_do_eval() 64 struct nft_counter *this_cpu; in nft_counter_do_init() local 71 this_cpu = this_cpu_ptr(cpu_stats); in nft_counter_do_init() 73 this_cpu->packets = in nft_counter_do_init() 77 this_cpu->bytes = in nft_counter_do_init() 110 struct nft_counter *this_cpu; in nft_counter_reset() local 113 this_cpu = this_cpu_ptr(priv->counter); in nft_counter_reset() [all …]
|
/Linux-v4.19/arch/parisc/kernel/ |
D | smp.c | 123 int this_cpu = smp_processor_id(); in ipi_interrupt() local 124 struct cpuinfo_parisc *p = &per_cpu(cpu_data, this_cpu); in ipi_interrupt() 129 spinlock_t *lock = &per_cpu(ipi_lock, this_cpu); in ipi_interrupt() 147 smp_debug(100, KERN_DEBUG "CPU%d IPI_NOP\n", this_cpu); in ipi_interrupt() 151 smp_debug(100, KERN_DEBUG "CPU%d IPI_RESCHEDULE\n", this_cpu); in ipi_interrupt() 157 smp_debug(100, KERN_DEBUG "CPU%d IPI_CALL_FUNC\n", this_cpu); in ipi_interrupt() 162 smp_debug(100, KERN_DEBUG "CPU%d IPI_CPU_START\n", this_cpu); in ipi_interrupt() 166 smp_debug(100, KERN_DEBUG "CPU%d IPI_CPU_STOP\n", this_cpu); in ipi_interrupt() 171 smp_debug(100, KERN_DEBUG "CPU%d is alive!\n", this_cpu); in ipi_interrupt() 176 this_cpu, which); in ipi_interrupt()
|
/Linux-v4.19/lib/ |
D | nmi_backtrace.c | 40 int i, this_cpu = get_cpu(); in nmi_trigger_cpumask_backtrace() local 53 cpumask_clear_cpu(this_cpu, to_cpumask(backtrace_mask)); in nmi_trigger_cpumask_backtrace() 61 if (cpumask_test_cpu(this_cpu, to_cpumask(backtrace_mask))) in nmi_trigger_cpumask_backtrace() 66 this_cpu, nr_cpumask_bits, to_cpumask(backtrace_mask)); in nmi_trigger_cpumask_backtrace()
|
D | smp_processor_id.c | 13 int this_cpu = raw_smp_processor_id(); in check_preemption_disabled() local 25 if (cpumask_equal(¤t->cpus_allowed, cpumask_of(this_cpu))) in check_preemption_disabled() 51 return this_cpu; in check_preemption_disabled()
|
/Linux-v4.19/arch/sparc/kernel/ |
D | process_64.c | 204 int this_cpu) in __global_reg_self() argument 210 rp = &global_cpu_snapshot[this_cpu].reg; in __global_reg_self() 256 int this_cpu, cpu; in arch_trigger_cpumask_backtrace() local 263 this_cpu = raw_smp_processor_id(); in arch_trigger_cpumask_backtrace() 267 if (cpumask_test_cpu(this_cpu, mask) && !exclude_self) in arch_trigger_cpumask_backtrace() 268 __global_reg_self(tp, regs, this_cpu); in arch_trigger_cpumask_backtrace() 275 if (exclude_self && cpu == this_cpu) in arch_trigger_cpumask_backtrace() 284 (cpu == this_cpu ? '*' : ' '), cpu, in arch_trigger_cpumask_backtrace() 321 static void __global_pmu_self(int this_cpu) in __global_pmu_self() argument 329 pp = &global_cpu_snapshot[this_cpu].pmu; in __global_pmu_self() [all …]
|
D | smp_64.c | 650 int this_cpu, tot_cpus, prev_sent, i, rem; in hypervisor_xcall_deliver() local 660 this_cpu = smp_processor_id(); in hypervisor_xcall_deliver() 767 this_cpu, ecpuerror_id - 1); in hypervisor_xcall_deliver() 770 this_cpu, enocpu_id - 1); in hypervisor_xcall_deliver() 777 this_cpu, tot_cpus, tb->cpu_list_pa, tb->cpu_mondo_block_pa); in hypervisor_xcall_deliver() 783 this_cpu, first_cpu, (tot_retries + retries), tot_cpus); in hypervisor_xcall_deliver() 792 int this_cpu, i, cnt; in xcall_deliver() local 809 this_cpu = smp_processor_id(); in xcall_deliver() 810 tb = &trap_block[this_cpu]; in xcall_deliver() 823 if (i == this_cpu || !cpu_online(i)) in xcall_deliver() [all …]
|
/Linux-v4.19/arch/alpha/kernel/ |
D | smp.c | 526 int this_cpu = smp_processor_id(); in handle_ipi() local 527 unsigned long *pending_ipis = &ipi_data[this_cpu].bits; in handle_ipi() 532 this_cpu, *pending_ipis, regs->pc)); in handle_ipi() 559 this_cpu, which); in handle_ipi() 567 cpu_data[this_cpu].ipi_count++; in handle_ipi() 658 int cpu, this_cpu = smp_processor_id(); in flush_tlb_mm() local 660 if (!cpu_online(cpu) || cpu == this_cpu) in flush_tlb_mm() 707 int cpu, this_cpu = smp_processor_id(); in flush_tlb_page() local 709 if (!cpu_online(cpu) || cpu == this_cpu) in flush_tlb_page() 763 int cpu, this_cpu = smp_processor_id(); in flush_icache_user_range() local [all …]
|
/Linux-v4.19/tools/power/cpupower/utils/idle_monitor/ |
D | cpuidle_sysfs.c | 147 int this_cpu; in cpuidle_register() local 149 this_cpu = sched_getcpu(); in cpuidle_register() 152 cpuidle_sysfs_monitor.hw_states_num = cpuidle_state_count(this_cpu); in cpuidle_register() 158 tmp = cpuidle_state_name(this_cpu, num); in cpuidle_register() 167 tmp = cpuidle_state_desc(this_cpu, num); in cpuidle_register()
|
/Linux-v4.19/kernel/trace/ |
D | trace_clock.c | 97 int this_cpu; in trace_clock_global() local 102 this_cpu = raw_smp_processor_id(); in trace_clock_global() 103 now = sched_clock_cpu(this_cpu); in trace_clock_global()
|
/Linux-v4.19/arch/x86/lib/ |
D | msr-smp.c | 12 int this_cpu = raw_smp_processor_id(); in __rdmsr_on_cpu() local 15 reg = per_cpu_ptr(rv->msrs, this_cpu); in __rdmsr_on_cpu() 26 int this_cpu = raw_smp_processor_id(); in __wrmsr_on_cpu() local 29 reg = per_cpu_ptr(rv->msrs, this_cpu); in __wrmsr_on_cpu() 104 int this_cpu; in __rwmsr_on_cpus() local 111 this_cpu = get_cpu(); in __rwmsr_on_cpus() 113 if (cpumask_test_cpu(this_cpu, mask)) in __rwmsr_on_cpus()
|
/Linux-v4.19/arch/x86/kernel/apic/ |
D | ipi.c | 112 unsigned int this_cpu = smp_processor_id(); in default_send_IPI_mask_allbutself_phys() local 120 if (query_cpu == this_cpu) in default_send_IPI_mask_allbutself_phys() 163 unsigned int this_cpu = smp_processor_id(); in default_send_IPI_mask_allbutself_logical() local 169 if (query_cpu == this_cpu) in default_send_IPI_mask_allbutself_logical()
|
D | x2apic_phys.c | 53 unsigned long this_cpu; in __x2apic_send_IPI_mask() local 60 this_cpu = smp_processor_id(); in __x2apic_send_IPI_mask() 62 if (apic_dest == APIC_DEST_ALLBUT && this_cpu == query_cpu) in __x2apic_send_IPI_mask()
|
D | apic_numachip.c | 130 unsigned int this_cpu = smp_processor_id(); in numachip_send_IPI_mask_allbutself() local 134 if (cpu != this_cpu) in numachip_send_IPI_mask_allbutself() 141 unsigned int this_cpu = smp_processor_id(); in numachip_send_IPI_allbutself() local 145 if (cpu != this_cpu) in numachip_send_IPI_allbutself()
|
/Linux-v4.19/Documentation/arm/ |
D | vlocks.txt | 35 bool vlock_trylock(int this_cpu) 38 currently_voting[this_cpu] = 1; 41 currently_voting[this_cpu] = 0; 46 last_vote = this_cpu; 47 currently_voting[this_cpu] = 0; 56 if (last_vote == this_cpu) 99 my_town = towns[(this_cpu >> 4) & 0xf]; 100 I_won = vlock_trylock(my_town, this_cpu & 0xf); 103 my_state = states[(this_cpu >> 8) & 0xf]; 104 I_won = vlock_lock(my_state, this_cpu & 0xf)); [all …]
|
/Linux-v4.19/arch/x86/kernel/cpu/ |
D | common.c | 111 static const struct cpu_dev *this_cpu = &default_cpu; variable 477 if (!this_cpu) in table_lookup_model() 480 info = this_cpu->legacy_models; in table_lookup_model() 628 if (this_cpu->legacy_cache_size) in cpu_detect_cache_sizes() 629 l2size = this_cpu->legacy_cache_size(c, l2size); in cpu_detect_cache_sizes() 652 if (this_cpu->c_detect_tlb) in cpu_detect_tlb() 653 this_cpu->c_detect_tlb(c); in cpu_detect_tlb() 722 this_cpu = cpu_devs[i]; in get_cpu_vendor() 723 c->x86_vendor = this_cpu->c_x86_vendor; in get_cpu_vendor() 732 this_cpu = &default_cpu; in get_cpu_vendor() [all …]
|
/Linux-v4.19/kernel/ |
D | smp.c | 275 int this_cpu; in smp_call_function_single() local 282 this_cpu = get_cpu(); in smp_call_function_single() 290 WARN_ON_ONCE(cpu_online(this_cpu) && irqs_disabled() in smp_call_function_single() 407 int cpu, next_cpu, this_cpu = smp_processor_id(); in smp_call_function_many() local 415 WARN_ON_ONCE(cpu_online(this_cpu) && irqs_disabled() in smp_call_function_many() 420 if (cpu == this_cpu) in smp_call_function_many() 429 if (next_cpu == this_cpu) in smp_call_function_many() 441 __cpumask_clear_cpu(this_cpu, cfd->cpumask); in smp_call_function_many()
|
/Linux-v4.19/arch/arm/common/ |
D | bL_switcher.c | 152 unsigned int mpidr, this_cpu, that_cpu; in bL_switch_to() local 158 this_cpu = smp_processor_id(); in bL_switch_to() 162 BUG_ON(cpu_logical_map(this_cpu) != ob_mpidr); in bL_switch_to() 167 that_cpu = bL_switcher_cpu_pairing[this_cpu]; in bL_switch_to() 173 this_cpu, ob_mpidr, ib_mpidr); in bL_switch_to() 175 this_cpu = smp_processor_id(); in bL_switch_to() 183 ipi_nr = register_ipi_completion(&inbound_alive, this_cpu); in bL_switch_to() 230 cpu_logical_map(this_cpu) = ib_mpidr; in bL_switch_to() 240 pr_debug("after switch: CPU %d MPIDR %#x\n", this_cpu, mpidr); in bL_switch_to()
|
/Linux-v4.19/Documentation/ |
D | this_cpu_ops.txt | 2 this_cpu operations 8 this_cpu operations are a way of optimizing access to per cpu 14 this_cpu operations add a per cpu variable offset to the processor 41 The main use of the this_cpu operations has been to optimize counter 44 The following this_cpu() operations with implied preemption protection 65 Inner working of this_cpu operations 91 Consider the following this_cpu operation:: 115 after the this_cpu instruction is executed. In general this means that 167 cpu variable. Most this_cpu operations take a cpu variable. 207 if we do not make use of this_cpu ops later to manipulate fields:: [all …]
|
/Linux-v4.19/init/ |
D | calibrate.c | 279 int this_cpu = smp_processor_id(); in calibrate_delay() local 281 if (per_cpu(cpu_loops_per_jiffy, this_cpu)) { in calibrate_delay() 282 lpj = per_cpu(cpu_loops_per_jiffy, this_cpu); in calibrate_delay() 306 per_cpu(cpu_loops_per_jiffy, this_cpu) = lpj; in calibrate_delay()
|
/Linux-v4.19/arch/powerpc/kernel/ |
D | dbell.c | 66 int this_cpu = get_cpu(); in doorbell_try_core_ipi() local 69 if (cpumask_test_cpu(cpu, cpu_sibling_mask(this_cpu))) { in doorbell_try_core_ipi()
|
/Linux-v4.19/arch/x86/kernel/cpu/mcheck/ |
D | therm_throt.c | 148 unsigned int this_cpu = smp_processor_id(); in therm_throt_process() local 151 struct thermal_state *pstate = &per_cpu(thermal_state, this_cpu); in therm_throt_process() 188 this_cpu, in therm_throt_process() 195 pr_info("CPU%d: %s temperature/speed normal\n", this_cpu, in therm_throt_process() 204 unsigned int this_cpu = smp_processor_id(); in thresh_event_valid() local 205 struct thermal_state *pstate = &per_cpu(thermal_state, this_cpu); in thresh_event_valid()
|
/Linux-v4.19/drivers/cpuidle/ |
D | cpuidle-ux500.c | 29 int this_cpu = smp_processor_id(); in ux500_enter_idle() local 52 if (!prcmu_is_cpu_in_wfi(this_cpu ? 0 : 1)) in ux500_enter_idle()
|
/Linux-v4.19/arch/arm/kernel/ |
D | smp_tlb.c | 169 int this_cpu; in broadcast_tlb_mm_a15_erratum() local 175 this_cpu = get_cpu(); in broadcast_tlb_mm_a15_erratum() 176 a15_erratum_get_cpumask(this_cpu, mm, &mask); in broadcast_tlb_mm_a15_erratum()
|
/Linux-v4.19/arch/s390/kernel/ |
D | machine_kexec.c | 109 int this_cpu, cpu; in __machine_kdump() local 113 this_cpu = smp_find_processor_id(stap()); in __machine_kdump() 115 if (cpu == this_cpu) in __machine_kdump()
|
/Linux-v4.19/arch/arm/include/asm/ |
D | mmu_context.h | 40 void a15_erratum_get_cpumask(int this_cpu, struct mm_struct *mm, 43 static inline void a15_erratum_get_cpumask(int this_cpu, struct mm_struct *mm, in a15_erratum_get_cpumask() argument
|