Lines Matching full:entry
21 * identifies a cache entry.
24 * and a special "delete entry with given key-value pair" operation. Fixed
63 * mb_cache_entry_create - create entry in cache
64 * @cache - cache where the entry should be created
65 * @mask - gfp mask with which the entry should be allocated
66 * @key - key of the entry
67 * @value - value of the entry
68 * @reusable - is the entry reusable by others?
70 * Creates entry in @cache with key @key and value @value. The function returns
71 * -EBUSY if entry with the same key and value already exists in cache.
77 struct mb_cache_entry *entry, *dup; in mb_cache_entry_create() local
88 entry = kmem_cache_alloc(mb_entry_cache, mask); in mb_cache_entry_create()
89 if (!entry) in mb_cache_entry_create()
92 INIT_LIST_HEAD(&entry->e_list); in mb_cache_entry_create()
94 atomic_set(&entry->e_refcnt, 1); in mb_cache_entry_create()
95 entry->e_key = key; in mb_cache_entry_create()
96 entry->e_value = value; in mb_cache_entry_create()
97 entry->e_reusable = reusable; in mb_cache_entry_create()
98 entry->e_referenced = 0; in mb_cache_entry_create()
104 kmem_cache_free(mb_entry_cache, entry); in mb_cache_entry_create()
108 hlist_bl_add_head(&entry->e_hash_list, head); in mb_cache_entry_create()
112 list_add_tail(&entry->e_list, &cache->c_list); in mb_cache_entry_create()
114 atomic_inc(&entry->e_refcnt); in mb_cache_entry_create()
122 void __mb_cache_entry_free(struct mb_cache_entry *entry) in __mb_cache_entry_free() argument
124 kmem_cache_free(mb_entry_cache, entry); in __mb_cache_entry_free()
129 struct mb_cache_entry *entry, in __entry_find() argument
132 struct mb_cache_entry *old_entry = entry; in __entry_find()
138 if (entry && !hlist_bl_unhashed(&entry->e_hash_list)) in __entry_find()
139 node = entry->e_hash_list.next; in __entry_find()
143 entry = hlist_bl_entry(node, struct mb_cache_entry, in __entry_find()
145 if (entry->e_key == key && entry->e_reusable) { in __entry_find()
146 atomic_inc(&entry->e_refcnt); in __entry_find()
151 entry = NULL; in __entry_find()
157 return entry; in __entry_find()
161 * mb_cache_entry_find_first - find the first reusable entry with the given key
165 * Search in @cache for a reusable entry with key @key. Grabs reference to the
166 * first reusable entry found and returns the entry.
176 * mb_cache_entry_find_next - find next reusable entry with the same key
178 * @entry: entry to start search from
180 * Finds next reusable entry in the hash chain which has the same key as @entry.
181 * If @entry is unhashed (which can happen when deletion of entry races with the
182 * search), finds the first reusable entry in the hash chain. The function drops
183 * reference to @entry and returns with a reference to the found entry.
186 struct mb_cache_entry *entry) in mb_cache_entry_find_next() argument
188 return __entry_find(cache, entry, entry->e_key); in mb_cache_entry_find_next()
193 * mb_cache_entry_get - get a cache entry by value (and key)
203 struct mb_cache_entry *entry; in mb_cache_entry_get() local
207 hlist_bl_for_each_entry(entry, node, head, e_hash_list) { in mb_cache_entry_get()
208 if (entry->e_key == key && entry->e_value == value) { in mb_cache_entry_get()
209 atomic_inc(&entry->e_refcnt); in mb_cache_entry_get()
213 entry = NULL; in mb_cache_entry_get()
216 return entry; in mb_cache_entry_get()
220 /* mb_cache_entry_delete - remove a cache entry
225 * Remove entry from cache @cache with key @key and value @value.
231 struct mb_cache_entry *entry; in mb_cache_entry_delete() local
235 hlist_bl_for_each_entry(entry, node, head, e_hash_list) { in mb_cache_entry_delete()
236 if (entry->e_key == key && entry->e_value == value) { in mb_cache_entry_delete()
237 /* We keep hash list reference to keep entry alive */ in mb_cache_entry_delete()
238 hlist_bl_del_init(&entry->e_hash_list); in mb_cache_entry_delete()
241 if (!list_empty(&entry->e_list)) { in mb_cache_entry_delete()
242 list_del_init(&entry->e_list); in mb_cache_entry_delete()
246 atomic_dec(&entry->e_refcnt); in mb_cache_entry_delete()
249 mb_cache_entry_put(cache, entry); in mb_cache_entry_delete()
257 /* mb_cache_entry_touch - cache entry got used
258 * @cache - cache the entry belongs to
259 * @entry - entry that got used
261 * Marks entry as used to give hit higher chances of surviving in cache.
264 struct mb_cache_entry *entry) in mb_cache_entry_touch() argument
266 entry->e_referenced = 1; in mb_cache_entry_touch()
283 struct mb_cache_entry *entry; in mb_cache_shrink() local
289 entry = list_first_entry(&cache->c_list, in mb_cache_shrink()
291 if (entry->e_referenced) { in mb_cache_shrink()
292 entry->e_referenced = 0; in mb_cache_shrink()
293 list_move_tail(&entry->e_list, &cache->c_list); in mb_cache_shrink()
296 list_del_init(&entry->e_list); in mb_cache_shrink()
299 * We keep LRU list reference so that entry doesn't go away in mb_cache_shrink()
303 head = mb_cache_entry_head(cache, entry->e_key); in mb_cache_shrink()
305 if (!hlist_bl_unhashed(&entry->e_hash_list)) { in mb_cache_shrink()
306 hlist_bl_del_init(&entry->e_hash_list); in mb_cache_shrink()
307 atomic_dec(&entry->e_refcnt); in mb_cache_shrink()
310 if (mb_cache_entry_put(cache, entry)) in mb_cache_shrink()
394 struct mb_cache_entry *entry, *next; in mb_cache_destroy() local
402 list_for_each_entry_safe(entry, next, &cache->c_list, e_list) { in mb_cache_destroy()
403 if (!hlist_bl_unhashed(&entry->e_hash_list)) { in mb_cache_destroy()
404 hlist_bl_del_init(&entry->e_hash_list); in mb_cache_destroy()
405 atomic_dec(&entry->e_refcnt); in mb_cache_destroy()
408 list_del(&entry->e_list); in mb_cache_destroy()
409 WARN_ON(atomic_read(&entry->e_refcnt) != 1); in mb_cache_destroy()
410 mb_cache_entry_put(cache, entry); in mb_cache_destroy()