Lines Matching full:cache

3  * Manage cache of swap slots to be used for and returned from
25 * The swap slots cache is protected by a mutex instead of
42 /* Serialize swap slots cache enable/disable operations */
105 /* if global pool of slot caches too low, deactivate cache */ in check_cache_active()
114 struct swap_slots_cache *cache; in alloc_swap_slot_cache() local
135 cache = &per_cpu(swp_slots, cpu); in alloc_swap_slot_cache()
136 if (cache->slots || cache->slots_ret) { in alloc_swap_slot_cache()
137 /* cache already allocated */ in alloc_swap_slot_cache()
146 if (!cache->lock_initialized) { in alloc_swap_slot_cache()
147 mutex_init(&cache->alloc_lock); in alloc_swap_slot_cache()
148 spin_lock_init(&cache->free_lock); in alloc_swap_slot_cache()
149 cache->lock_initialized = true; in alloc_swap_slot_cache()
151 cache->nr = 0; in alloc_swap_slot_cache()
152 cache->cur = 0; in alloc_swap_slot_cache()
153 cache->n_ret = 0; in alloc_swap_slot_cache()
156 * !cache->slots or !cache->slots_ret to know if it is safe to acquire in alloc_swap_slot_cache()
157 * the corresponding lock and use the cache. Memory barrier below in alloc_swap_slot_cache()
161 cache->slots = slots; in alloc_swap_slot_cache()
162 cache->slots_ret = slots_ret; in alloc_swap_slot_cache()
170 struct swap_slots_cache *cache; in drain_slots_cache_cpu() local
173 cache = &per_cpu(swp_slots, cpu); in drain_slots_cache_cpu()
174 if ((type & SLOTS_CACHE) && cache->slots) { in drain_slots_cache_cpu()
175 mutex_lock(&cache->alloc_lock); in drain_slots_cache_cpu()
176 swapcache_free_entries(cache->slots + cache->cur, cache->nr); in drain_slots_cache_cpu()
177 cache->cur = 0; in drain_slots_cache_cpu()
178 cache->nr = 0; in drain_slots_cache_cpu()
179 if (free_slots && cache->slots) { in drain_slots_cache_cpu()
180 kvfree(cache->slots); in drain_slots_cache_cpu()
181 cache->slots = NULL; in drain_slots_cache_cpu()
183 mutex_unlock(&cache->alloc_lock); in drain_slots_cache_cpu()
185 if ((type & SLOTS_CACHE_RET) && cache->slots_ret) { in drain_slots_cache_cpu()
186 spin_lock_irq(&cache->free_lock); in drain_slots_cache_cpu()
187 swapcache_free_entries(cache->slots_ret, cache->n_ret); in drain_slots_cache_cpu()
188 cache->n_ret = 0; in drain_slots_cache_cpu()
189 if (free_slots && cache->slots_ret) { in drain_slots_cache_cpu()
190 slots = cache->slots_ret; in drain_slots_cache_cpu()
191 cache->slots_ret = NULL; in drain_slots_cache_cpu()
193 spin_unlock_irq(&cache->free_lock); in drain_slots_cache_cpu()
205 * left over slots are in cache when we remove in __drain_swap_slots_cache()
207 * 2) disabling of swap slot cache, when we run low in __drain_swap_slots_cache()
222 * fill any swap slots in slots cache of such cpu. in __drain_swap_slots_cache()
245 if (WARN_ONCE(ret < 0, "Cache allocation failed (%s), operating " in enable_swap_slots_cache()
246 "without swap slots cache.\n", __func__)) in enable_swap_slots_cache()
257 /* called with swap slot cache's alloc lock held */
258 static int refill_swap_slots_cache(struct swap_slots_cache *cache) in refill_swap_slots_cache() argument
260 if (!use_swap_slot_cache || cache->nr) in refill_swap_slots_cache()
263 cache->cur = 0; in refill_swap_slots_cache()
265 cache->nr = get_swap_pages(SWAP_SLOTS_CACHE_SIZE, in refill_swap_slots_cache()
266 cache->slots, 1); in refill_swap_slots_cache()
268 return cache->nr; in refill_swap_slots_cache()
273 struct swap_slots_cache *cache; in free_swap_slot() local
275 cache = raw_cpu_ptr(&swp_slots); in free_swap_slot()
276 if (likely(use_swap_slot_cache && cache->slots_ret)) { in free_swap_slot()
277 spin_lock_irq(&cache->free_lock); in free_swap_slot()
278 /* Swap slots cache may be deactivated before acquiring lock */ in free_swap_slot()
279 if (!use_swap_slot_cache || !cache->slots_ret) { in free_swap_slot()
280 spin_unlock_irq(&cache->free_lock); in free_swap_slot()
283 if (cache->n_ret >= SWAP_SLOTS_CACHE_SIZE) { in free_swap_slot()
290 swapcache_free_entries(cache->slots_ret, cache->n_ret); in free_swap_slot()
291 cache->n_ret = 0; in free_swap_slot()
293 cache->slots_ret[cache->n_ret++] = entry; in free_swap_slot()
294 spin_unlock_irq(&cache->free_lock); in free_swap_slot()
306 struct swap_slots_cache *cache; in get_swap_page() local
320 * mutex cache->alloc_lock. in get_swap_page()
322 * The alloc path here does not touch cache->slots_ret in get_swap_page()
323 * so cache->free_lock is not taken. in get_swap_page()
325 cache = raw_cpu_ptr(&swp_slots); in get_swap_page()
327 if (likely(check_cache_active() && cache->slots)) { in get_swap_page()
328 mutex_lock(&cache->alloc_lock); in get_swap_page()
329 if (cache->slots) { in get_swap_page()
331 if (cache->nr) { in get_swap_page()
332 entry = cache->slots[cache->cur]; in get_swap_page()
333 cache->slots[cache->cur++].val = 0; in get_swap_page()
334 cache->nr--; in get_swap_page()
335 } else if (refill_swap_slots_cache(cache)) { in get_swap_page()
339 mutex_unlock(&cache->alloc_lock); in get_swap_page()