Home
last modified time | relevance | path

Searched refs:cache_set (Results 1 – 23 of 23) sorted by relevance

/Linux-v5.15/drivers/md/bcache/
Dbcache.h254 struct cache_set *c;
411 struct cache_set *set;
510 struct cache_set { struct
791 static inline size_t sector_to_bucket(struct cache_set *c, sector_t s) in sector_to_bucket()
796 static inline sector_t bucket_to_sector(struct cache_set *c, size_t b) in bucket_to_sector()
801 static inline sector_t bucket_remainder(struct cache_set *c, sector_t s) in bucket_remainder()
806 static inline size_t PTR_BUCKET_NR(struct cache_set *c, in PTR_BUCKET_NR()
813 static inline struct bucket *PTR_BUCKET(struct cache_set *c, in PTR_BUCKET()
827 static inline uint8_t ptr_stale(struct cache_set *c, const struct bkey *k, in ptr_stale()
833 static inline bool ptr_available(struct cache_set *c, const struct bkey *k, in ptr_available()
[all …]
Dbtree.h126 struct cache_set *c;
195 static inline void set_gc_sectors(struct cache_set *c) in set_gc_sectors()
200 void bkey_put(struct cache_set *c, struct bkey *k);
231 struct cache_set *c;
268 struct btree *__bch_btree_node_alloc(struct cache_set *c, struct btree_op *op,
271 struct btree *bch_btree_node_get(struct cache_set *c, struct btree_op *op,
277 int bch_btree_insert(struct cache_set *c, struct keylist *keys,
280 int bch_gc_thread_start(struct cache_set *c);
281 void bch_initial_gc_finish(struct cache_set *c);
282 void bch_moving_gc(struct cache_set *c);
[all …]
Djournal.h97 struct cache_set *c;
170 struct cache_set;
174 atomic_t *bch_journal(struct cache_set *c,
178 void bch_journal_mark(struct cache_set *c, struct list_head *list);
179 void bch_journal_meta(struct cache_set *c, struct closure *cl);
180 int bch_journal_read(struct cache_set *c, struct list_head *list);
181 int bch_journal_replay(struct cache_set *c, struct list_head *list);
183 void bch_journal_free(struct cache_set *c);
184 int bch_journal_alloc(struct cache_set *c);
Dstats.h41 struct cache_set;
55 void bch_mark_cache_accounting(struct cache_set *c, struct bcache_device *d,
57 void bch_mark_cache_readahead(struct cache_set *c, struct bcache_device *d);
58 void bch_mark_cache_miss_collision(struct cache_set *c,
60 void bch_mark_sectors_bypassed(struct cache_set *c,
Dio.c17 void bch_bbio_free(struct bio *bio, struct cache_set *c) in bch_bbio_free()
24 struct bio *bch_bbio_alloc(struct cache_set *c) in bch_bbio_alloc()
34 void __bch_submit_bbio(struct bio *bio, struct cache_set *c) in __bch_submit_bbio()
45 void bch_submit_bbio(struct bio *bio, struct cache_set *c, in bch_submit_bbio()
136 void bch_bbio_count_io_errors(struct cache_set *c, struct bio *bio, in bch_bbio_count_io_errors()
166 void bch_bbio_endio(struct cache_set *c, struct bio *bio, in bch_bbio_endio()
Dextents.h9 struct cache_set;
12 bool __bch_btree_ptr_invalid(struct cache_set *c, const struct bkey *k);
13 bool __bch_extent_invalid(struct cache_set *c, const struct bkey *k);
Ddebug.h7 struct cache_set;
30 void bch_debug_init_cache_set(struct cache_set *c);
32 static inline void bch_debug_init_cache_set(struct cache_set *c) {} in bch_debug_init_cache_set()
Djournal.c171 int bch_journal_read(struct cache_set *c, struct list_head *list) in bch_journal_read()
294 void bch_journal_mark(struct cache_set *c, struct list_head *list) in bch_journal_mark()
340 static bool is_discard_enabled(struct cache_set *s) in is_discard_enabled()
350 int bch_journal_replay(struct cache_set *s, struct list_head *list) in bch_journal_replay()
412 static void btree_flush_write(struct cache_set *c) in btree_flush_write()
628 static void journal_reclaim(struct cache_set *c) in journal_reclaim()
724 struct cache_set *c = container_of(cl, struct cache_set, journal.io); in journal_write_unlock()
733 struct cache_set *c = container_of(cl, struct cache_set, journal.io); in journal_write_unlocked()
812 struct cache_set *c = container_of(cl, struct cache_set, journal.io); in journal_write()
818 static void journal_try_write(struct cache_set *c) in journal_try_write()
[all …]
Dfeatures.c56 int bch_print_cache_set_feature_compat(struct cache_set *c, char *buf, int size) in bch_print_cache_set_feature_compat()
63 int bch_print_cache_set_feature_ro_compat(struct cache_set *c, char *buf, int size) in bch_print_cache_set_feature_ro_compat()
70 int bch_print_cache_set_feature_incompat(struct cache_set *c, char *buf, int size) in bch_print_cache_set_feature_incompat()
Dsuper.c370 struct cache_set *c = container_of(cl, struct cache_set, sb_write); in bcache_write_super_unlock()
375 void bcache_write_super(struct cache_set *c) in bcache_write_super()
406 struct cache_set *c = container_of(cl, struct cache_set, uuid_write); in uuid_endio()
415 struct cache_set *c = container_of(cl, struct cache_set, uuid_write); in uuid_io_unlock()
420 static void uuid_io(struct cache_set *c, int op, unsigned long op_flags, in uuid_io()
461 static char *uuid_read(struct cache_set *c, struct jset *j, struct closure *cl) in uuid_read()
502 static int __uuid_write(struct cache_set *c) in __uuid_write()
528 int bch_uuid_write(struct cache_set *c) in bch_uuid_write()
538 static struct uuid_entry *uuid_find(struct cache_set *c, const char *uuid) in uuid_find()
550 static struct uuid_entry *uuid_find_empty(struct cache_set *c) in uuid_find_empty()
[all …]
Dfeatures.h108 int bch_print_cache_set_feature_compat(struct cache_set *c, char *buf, int size);
109 int bch_print_cache_set_feature_ro_compat(struct cache_set *c, char *buf, int size);
110 int bch_print_cache_set_feature_incompat(struct cache_set *c, char *buf, int size);
Dbtree.c128 void bkey_put(struct cache_set *c, struct bkey *k) in bkey_put()
562 static struct btree *mca_bucket_alloc(struct cache_set *c, in mca_bucket_alloc()
649 struct cache_set *c = container_of(shrink, struct cache_set, shrink); in bch_mca_scan()
716 struct cache_set *c = container_of(shrink, struct cache_set, shrink); in bch_mca_count()
727 void bch_btree_cache_free(struct cache_set *c) in bch_btree_cache_free()
775 int bch_btree_cache_alloc(struct cache_set *c) in bch_btree_cache_alloc()
824 static struct hlist_head *mca_hash(struct cache_set *c, struct bkey *k) in mca_hash()
829 static struct btree *mca_find(struct cache_set *c, struct bkey *k) in mca_find()
843 static int mca_cannibalize_lock(struct cache_set *c, struct btree_op *op) in mca_cannibalize_lock()
860 static struct btree *mca_cannibalize(struct cache_set *c, struct btree_op *op, in mca_cannibalize()
[all …]
Drequest.h7 struct cache_set *c;
36 unsigned int bch_get_congested(const struct cache_set *c);
Dmovinggc.c24 struct cache_set *c = container_of(buf, struct cache_set, in moving_pred()
126 static void read_moving(struct cache_set *c) in read_moving()
197 void bch_moving_gc(struct cache_set *c) in bch_moving_gc()
248 void bch_moving_init_cache_set(struct cache_set *c) in bch_moving_init_cache_set()
Dsysfs.c296 struct cache_set *c; in STORE()
630 static int bch_bset_print_stats(struct cache_set *c, char *buf) in bch_bset_print_stats()
656 static unsigned int bch_root_usage(struct cache_set *c) in bch_root_usage()
680 static size_t bch_cache_size(struct cache_set *c) in bch_cache_size()
693 static unsigned int bch_cache_max_chain(struct cache_set *c) in bch_cache_max_chain()
716 static unsigned int bch_btree_used(struct cache_set *c) in bch_btree_used()
722 static unsigned int bch_average_key_size(struct cache_set *c) in bch_average_key_size()
731 struct cache_set *c = container_of(kobj, struct cache_set, kobj); in SHOW()
818 struct cache_set *c = container_of(kobj, struct cache_set, kobj); in STORE()
938 struct cache_set *c = container_of(kobj, struct cache_set, internal); in SHOW()
[all …]
Dalloc.c86 void bch_rescale_priorities(struct cache_set *c, int sectors) in bch_rescale_priorities()
480 void bch_bucket_free(struct cache_set *c, struct bkey *k) in bch_bucket_free()
488 int __bch_bucket_alloc_set(struct cache_set *c, unsigned int reserve, in __bch_bucket_alloc_set()
520 int bch_bucket_alloc_set(struct cache_set *c, unsigned int reserve, in bch_bucket_alloc_set()
565 static struct open_bucket *pick_data_bucket(struct cache_set *c, in pick_data_bucket()
606 bool bch_alloc_sectors(struct cache_set *c, in bch_alloc_sectors()
697 void bch_open_buckets_free(struct cache_set *c) in bch_open_buckets_free()
709 int bch_open_buckets_alloc(struct cache_set *c) in bch_open_buckets_alloc()
Dwriteback.c20 static void update_gc_after_writeback(struct cache_set *c) in update_gc_after_writeback()
32 struct cache_set *c = dc->disk.c; in __calc_target_rate()
101 struct cache_set *c = dc->disk.c; in __update_writeback_rate()
160 static bool set_at_max_writeback_rate(struct cache_set *c, in set_at_max_writeback_rate()
216 struct cache_set *c = dc->disk.c; in update_writeback_rate()
557 void bcache_dev_sectors_dirty_add(struct cache_set *c, unsigned int inode, in bcache_dev_sectors_dirty_add()
700 struct cache_set *c = dc->disk.c; in bch_writeback_thread()
836 static int bch_root_node_dirty_init(struct cache_set *c, in bch_root_node_dirty_init()
872 struct cache_set *c = state->c; in bch_dirty_init_thread()
947 struct cache_set *c = d->c; in bch_sectors_dirty_init()
Dstats.c197 void bch_mark_cache_accounting(struct cache_set *c, struct bcache_device *d, in bch_mark_cache_accounting()
206 void bch_mark_cache_miss_collision(struct cache_set *c, struct bcache_device *d) in bch_mark_cache_miss_collision()
214 void bch_mark_sectors_bypassed(struct cache_set *c, struct cached_dev *dc, in bch_mark_sectors_bypassed()
Dwriteback.h38 struct cache_set *c;
148 void bcache_dev_sectors_dirty_add(struct cache_set *c, unsigned int inode,
Dextents.c47 static bool __ptr_invalid(struct cache_set *c, const struct bkey *k) in __ptr_invalid()
68 static const char *bch_ptr_status(struct cache_set *c, const struct bkey *k) in bch_ptr_status()
149 bool __bch_btree_ptr_invalid(struct cache_set *c, const struct bkey *k) in __bch_btree_ptr_invalid()
314 struct cache_set *c, in bch_subtract_dirty()
328 struct cache_set *c = container_of(b, struct btree, keys)->c; in bch_extent_insert_fixup()
480 bool __bch_extent_invalid(struct cache_set *c, const struct bkey *k) in __bch_extent_invalid()
Ddebug.c162 struct cache_set *c;
208 struct cache_set *c = inode->i_private; in bch_dump_open()
236 void bch_debug_init_cache_set(struct cache_set *c) in bch_debug_init_cache_set()
Drequest.c91 struct cache_set *c) in bch_keylist_realloc()
324 unsigned int bch_get_congested(const struct cache_set *c) in bch_get_congested()
365 struct cache_set *c = dc->disk.c; in check_should_bypass()
1126 static void quit_max_writeback_rate(struct cache_set *c, in quit_max_writeback_rate()
/Linux-v5.15/include/trace/events/
Dbcache.h152 TP_PROTO(struct cache_set *c, u64 inode, struct bio *bio,
194 DECLARE_EVENT_CLASS(cache_set,
195 TP_PROTO(struct cache_set *c),
214 DEFINE_EVENT(cache_set, bcache_journal_full,
215 TP_PROTO(struct cache_set *c),
219 DEFINE_EVENT(cache_set, bcache_journal_entry_full,
220 TP_PROTO(struct cache_set *c),
252 DEFINE_EVENT(cache_set, bcache_btree_cache_cannibalize,
253 TP_PROTO(struct cache_set *c),
287 DEFINE_EVENT(cache_set, bcache_btree_node_alloc_fail,
[all …]