Lines Matching refs:cache_alloc
814 struct entry_alloc cache_alloc; member
997 return to_cblock(get_index(&mq->cache_alloc, e)); in infer_cblock()
1054 unsigned int threshold_level = allocator_empty(&mq->cache_alloc) ? in update_promote_levels()
1164 nr_free = from_cblock(mq->cache_size) - mq->cache_alloc.nr_allocated; in free_target_met()
1247 if (allocator_empty(&mq->cache_alloc)) { in queue_promotion()
1264 e = alloc_entry(&mq->cache_alloc); in queue_promotion()
1272 free_entry(&mq->cache_alloc, e); in queue_promotion()
1295 if (!allocator_empty(&mq->cache_alloc) && fast_promote) in should_promote()
1473 struct entry *e = get_entry(&mq->cache_alloc, in __complete_background_work()
1486 free_entry(&mq->cache_alloc, e); in __complete_background_work()
1495 free_entry(&mq->cache_alloc, e); in __complete_background_work()
1530 struct entry *e = get_entry(&mq->cache_alloc, from_cblock(cblock)); in __smq_set_clear_dirty()
1573 e = alloc_particular_entry(&mq->cache_alloc, from_cblock(cblock)); in smq_load_mapping()
1591 struct entry *e = get_entry(&mq->cache_alloc, from_cblock(cblock)); in smq_invalidate_mapping()
1599 free_entry(&mq->cache_alloc, e); in smq_invalidate_mapping()
1606 struct entry *e = get_entry(&mq->cache_alloc, from_cblock(cblock)); in smq_get_hint()
1621 r = to_cblock(mq->cache_alloc.nr_allocated); in smq_residency()
1768 init_allocator(&mq->cache_alloc, &mq->es, in __smq_create()