Lines Matching refs:objects
595 bitmap_zero(obj_map, slab->objects); in __fill_map()
681 if (object < base || object >= base + slab->objects * s->size || in check_valid_pointer()
814 slab, slab->objects, slab->inuse, slab->freelist, in print_slab_info()
1184 if (slab->objects > maxobj) { in check_slab()
1186 slab->objects, maxobj); in check_slab()
1189 if (slab->inuse > slab->objects) { in check_slab()
1191 slab->inuse, slab->objects); in check_slab()
1211 while (fp && nr <= slab->objects) { in on_freelist()
1222 slab->inuse = slab->objects; in on_freelist()
1237 if (slab->objects != max_objects) { in on_freelist()
1239 slab->objects, max_objects); in on_freelist()
1240 slab->objects = max_objects; in on_freelist()
1243 if (slab->inuse != slab->objects - nr) { in on_freelist()
1245 slab->inuse, slab->objects - nr); in on_freelist()
1246 slab->inuse = slab->objects - nr; in on_freelist()
1305 static inline void inc_slabs_node(struct kmem_cache *s, int node, int objects) in inc_slabs_node() argument
1317 atomic_long_add(objects, &n->total_objects); in inc_slabs_node()
1320 static inline void dec_slabs_node(struct kmem_cache *s, int node, int objects) in dec_slabs_node() argument
1325 atomic_long_sub(objects, &n->total_objects); in dec_slabs_node()
1388 slab->inuse = slab->objects; in alloc_debug_processing()
1675 int objects) {} in inc_slabs_node() argument
1677 int objects) {} in dec_slabs_node() argument
1880 if (slab->objects < 2 || !s->random_seq) in shuffle_freelist()
1886 page_limit = slab->objects * s->size; in shuffle_freelist()
1895 for (idx = 1; idx < slab->objects; idx++) { in shuffle_freelist()
1953 slab->objects = oo_objects(oo); in allocate_slab()
1973 for (idx = 0, p = start; idx < slab->objects - 1; idx++) { in allocate_slab()
2006 for_each_object(p, s, slab_address(slab), slab->objects) in __free_slab()
2036 dec_slabs_node(s, slab_nid(slab), slab->objects); in discard_slab()
2090 if (slab->inuse == slab->objects) { in alloc_single_from_partial()
2126 if (slab->inuse == slab->objects) in alloc_single_from_new_slab()
2131 inc_slabs_node(s, nid, slab->objects); in alloc_single_from_new_slab()
2162 new.inuse = slab->objects; in acquire_slab()
2822 return slab->objects - slab->inuse; in count_free()
2923 dec_slabs_node(s, slab_nid(slab_free), slab_free->objects); in free_debug_processing()
3022 new.inuse = slab->objects; in get_freelist()
3208 slab->inuse = slab->objects; in ___slab_alloc()
3211 inc_slabs_node(s, slab_nid(slab), slab->objects); in ___slab_alloc()
4058 inc_slabs_node(kmem_cache_node, slab_nid(slab), slab->objects); in early_kmem_cache_node_alloc()
4075 inc_slabs_node(kmem_cache_node, node, slab->objects); in early_kmem_cache_node_alloc()
4371 for_each_object(p, s, addr, slab->objects) { in list_slab_objects()
4463 if (WARN_ON_ONCE(objp < base || objp >= base + slab->objects * s->size in __kmem_obj_info()
4616 int free = slab->objects - slab->inuse; in __kmem_cache_do_shrink()
4624 if (free == slab->objects) { in __kmem_cache_do_shrink()
4627 dec_slabs_node(s, node, slab->objects); in __kmem_cache_do_shrink()
4935 return slab->objects; in count_total()
4951 for_each_object(p, s, addr, slab->objects) { in validate_slab()
5176 for_each_object(p, s, addr, slab->objects) in process_slab()
5228 x = slab->objects; in show_slab_objects()
5391 unsigned int objects; in cpu_partial_store() local
5394 err = kstrtouint(buf, 10, &objects); in cpu_partial_store()
5397 if (objects && !kmem_cache_has_cpu_partial(s)) in cpu_partial_store()
5400 slub_set_cpu_partial(s, objects); in cpu_partial_store()
5436 SLAB_ATTR_RO(objects);
5446 int objects = 0; in slabs_cpu_partial_show() local
5463 objects = (slabs * oo_objects(s->oo)) / 2; in slabs_cpu_partial_show()
5464 len += sysfs_emit_at(buf, len, "%d(%d)", objects, slabs); in slabs_cpu_partial_show()
5473 objects = (slabs * oo_objects(s->oo)) / 2; in slabs_cpu_partial_show()
5475 cpu, objects, slabs); in slabs_cpu_partial_show()