Lines Matching full:cache
3 * Manage cache of swap slots to be used for and returned from
25 * The swap slots cache is protected by a mutex instead of
42 /* Serialize swap slots cache enable/disable operations */
108 /* if global pool of slot caches too low, deactivate cache */ in check_cache_active()
117 struct swap_slots_cache *cache; in alloc_swap_slot_cache() local
138 cache = &per_cpu(swp_slots, cpu); in alloc_swap_slot_cache()
139 if (cache->slots || cache->slots_ret) in alloc_swap_slot_cache()
140 /* cache already allocated */ in alloc_swap_slot_cache()
142 if (!cache->lock_initialized) { in alloc_swap_slot_cache()
143 mutex_init(&cache->alloc_lock); in alloc_swap_slot_cache()
144 spin_lock_init(&cache->free_lock); in alloc_swap_slot_cache()
145 cache->lock_initialized = true; in alloc_swap_slot_cache()
147 cache->nr = 0; in alloc_swap_slot_cache()
148 cache->cur = 0; in alloc_swap_slot_cache()
149 cache->n_ret = 0; in alloc_swap_slot_cache()
152 * !cache->slots or !cache->slots_ret to know if it is safe to acquire in alloc_swap_slot_cache()
153 * the corresponding lock and use the cache. Memory barrier below in alloc_swap_slot_cache()
157 cache->slots = slots; in alloc_swap_slot_cache()
159 cache->slots_ret = slots_ret; in alloc_swap_slot_cache()
173 struct swap_slots_cache *cache; in drain_slots_cache_cpu() local
176 cache = &per_cpu(swp_slots, cpu); in drain_slots_cache_cpu()
177 if ((type & SLOTS_CACHE) && cache->slots) { in drain_slots_cache_cpu()
178 mutex_lock(&cache->alloc_lock); in drain_slots_cache_cpu()
179 swapcache_free_entries(cache->slots + cache->cur, cache->nr); in drain_slots_cache_cpu()
180 cache->cur = 0; in drain_slots_cache_cpu()
181 cache->nr = 0; in drain_slots_cache_cpu()
182 if (free_slots && cache->slots) { in drain_slots_cache_cpu()
183 kvfree(cache->slots); in drain_slots_cache_cpu()
184 cache->slots = NULL; in drain_slots_cache_cpu()
186 mutex_unlock(&cache->alloc_lock); in drain_slots_cache_cpu()
188 if ((type & SLOTS_CACHE_RET) && cache->slots_ret) { in drain_slots_cache_cpu()
189 spin_lock_irq(&cache->free_lock); in drain_slots_cache_cpu()
190 swapcache_free_entries(cache->slots_ret, cache->n_ret); in drain_slots_cache_cpu()
191 cache->n_ret = 0; in drain_slots_cache_cpu()
192 if (free_slots && cache->slots_ret) { in drain_slots_cache_cpu()
193 slots = cache->slots_ret; in drain_slots_cache_cpu()
194 cache->slots_ret = NULL; in drain_slots_cache_cpu()
196 spin_unlock_irq(&cache->free_lock); in drain_slots_cache_cpu()
209 * left over slots are in cache when we remove in __drain_swap_slots_cache()
211 * 2) disabling of swap slot cache, when we run low in __drain_swap_slots_cache()
226 * fill any swap slots in slots cache of such cpu. in __drain_swap_slots_cache()
253 if (WARN_ONCE(ret < 0, "Cache allocation failed (%s), operating " in enable_swap_slots_cache()
254 "without swap slots cache.\n", __func__)) in enable_swap_slots_cache()
264 /* called with swap slot cache's alloc lock held */
265 static int refill_swap_slots_cache(struct swap_slots_cache *cache) in refill_swap_slots_cache() argument
267 if (!use_swap_slot_cache || cache->nr) in refill_swap_slots_cache()
270 cache->cur = 0; in refill_swap_slots_cache()
272 cache->nr = get_swap_pages(SWAP_SLOTS_CACHE_SIZE, in refill_swap_slots_cache()
273 cache->slots, 1); in refill_swap_slots_cache()
275 return cache->nr; in refill_swap_slots_cache()
280 struct swap_slots_cache *cache; in free_swap_slot() local
282 cache = raw_cpu_ptr(&swp_slots); in free_swap_slot()
283 if (likely(use_swap_slot_cache && cache->slots_ret)) { in free_swap_slot()
284 spin_lock_irq(&cache->free_lock); in free_swap_slot()
285 /* Swap slots cache may be deactivated before acquiring lock */ in free_swap_slot()
286 if (!use_swap_slot_cache || !cache->slots_ret) { in free_swap_slot()
287 spin_unlock_irq(&cache->free_lock); in free_swap_slot()
290 if (cache->n_ret >= SWAP_SLOTS_CACHE_SIZE) { in free_swap_slot()
297 swapcache_free_entries(cache->slots_ret, cache->n_ret); in free_swap_slot()
298 cache->n_ret = 0; in free_swap_slot()
300 cache->slots_ret[cache->n_ret++] = entry; in free_swap_slot()
301 spin_unlock_irq(&cache->free_lock); in free_swap_slot()
313 struct swap_slots_cache *cache; in get_swap_page() local
327 * mutex cache->alloc_lock. in get_swap_page()
329 * The alloc path here does not touch cache->slots_ret in get_swap_page()
330 * so cache->free_lock is not taken. in get_swap_page()
332 cache = raw_cpu_ptr(&swp_slots); in get_swap_page()
334 if (likely(check_cache_active() && cache->slots)) { in get_swap_page()
335 mutex_lock(&cache->alloc_lock); in get_swap_page()
336 if (cache->slots) { in get_swap_page()
338 if (cache->nr) { in get_swap_page()
339 pentry = &cache->slots[cache->cur++]; in get_swap_page()
342 cache->nr--; in get_swap_page()
344 if (refill_swap_slots_cache(cache)) in get_swap_page()
348 mutex_unlock(&cache->alloc_lock); in get_swap_page()