Lines Matching full:pl
90 int fprop_local_init_single(struct fprop_local_single *pl) in fprop_local_init_single() argument
92 pl->events = 0; in fprop_local_init_single()
93 pl->period = 0; in fprop_local_init_single()
94 raw_spin_lock_init(&pl->lock); in fprop_local_init_single()
98 void fprop_local_destroy_single(struct fprop_local_single *pl) in fprop_local_destroy_single() argument
103 struct fprop_local_single *pl) in fprop_reflect_period_single() argument
109 if (pl->period == period) in fprop_reflect_period_single()
111 raw_spin_lock_irqsave(&pl->lock, flags); in fprop_reflect_period_single()
112 /* Someone updated pl->period while we were spinning? */ in fprop_reflect_period_single()
113 if (pl->period >= period) { in fprop_reflect_period_single()
114 raw_spin_unlock_irqrestore(&pl->lock, flags); in fprop_reflect_period_single()
118 if (period - pl->period < BITS_PER_LONG) in fprop_reflect_period_single()
119 pl->events >>= period - pl->period; in fprop_reflect_period_single()
121 pl->events = 0; in fprop_reflect_period_single()
122 pl->period = period; in fprop_reflect_period_single()
123 raw_spin_unlock_irqrestore(&pl->lock, flags); in fprop_reflect_period_single()
126 /* Event of type pl happened */
127 void __fprop_inc_single(struct fprop_global *p, struct fprop_local_single *pl) in __fprop_inc_single() argument
129 fprop_reflect_period_single(p, pl); in __fprop_inc_single()
130 pl->events++; in __fprop_inc_single()
134 /* Return fraction of events of type pl */
136 struct fprop_local_single *pl, in fprop_fraction_single() argument
144 fprop_reflect_period_single(p, pl); in fprop_fraction_single()
145 num = pl->events; in fprop_fraction_single()
168 int fprop_local_init_percpu(struct fprop_local_percpu *pl, gfp_t gfp) in fprop_local_init_percpu() argument
172 err = percpu_counter_init(&pl->events, 0, gfp); in fprop_local_init_percpu()
175 pl->period = 0; in fprop_local_init_percpu()
176 raw_spin_lock_init(&pl->lock); in fprop_local_init_percpu()
180 void fprop_local_destroy_percpu(struct fprop_local_percpu *pl) in fprop_local_destroy_percpu() argument
182 percpu_counter_destroy(&pl->events); in fprop_local_destroy_percpu()
186 struct fprop_local_percpu *pl) in fprop_reflect_period_percpu() argument
192 if (pl->period == period) in fprop_reflect_period_percpu()
194 raw_spin_lock_irqsave(&pl->lock, flags); in fprop_reflect_period_percpu()
195 /* Someone updated pl->period while we were spinning? */ in fprop_reflect_period_percpu()
196 if (pl->period >= period) { in fprop_reflect_period_percpu()
197 raw_spin_unlock_irqrestore(&pl->lock, flags); in fprop_reflect_period_percpu()
201 if (period - pl->period < BITS_PER_LONG) { in fprop_reflect_period_percpu()
202 s64 val = percpu_counter_read(&pl->events); in fprop_reflect_period_percpu()
205 val = percpu_counter_sum(&pl->events); in fprop_reflect_period_percpu()
207 percpu_counter_add_batch(&pl->events, in fprop_reflect_period_percpu()
208 -val + (val >> (period-pl->period)), PROP_BATCH); in fprop_reflect_period_percpu()
210 percpu_counter_set(&pl->events, 0); in fprop_reflect_period_percpu()
211 pl->period = period; in fprop_reflect_period_percpu()
212 raw_spin_unlock_irqrestore(&pl->lock, flags); in fprop_reflect_period_percpu()
215 /* Event of type pl happened */
216 void __fprop_add_percpu(struct fprop_global *p, struct fprop_local_percpu *pl, in __fprop_add_percpu() argument
219 fprop_reflect_period_percpu(p, pl); in __fprop_add_percpu()
220 percpu_counter_add_batch(&pl->events, nr, PROP_BATCH); in __fprop_add_percpu()
225 struct fprop_local_percpu *pl, in fprop_fraction_percpu() argument
233 fprop_reflect_period_percpu(p, pl); in fprop_fraction_percpu()
234 num = percpu_counter_read_positive(&pl->events); in fprop_fraction_percpu()
257 struct fprop_local_percpu *pl, int max_frac, long nr) in __fprop_add_percpu_max() argument
263 fprop_fraction_percpu(p, pl, &numerator, &denominator); in __fprop_add_percpu_max()
277 __fprop_add_percpu(p, pl, nr); in __fprop_add_percpu_max()