Lines Matching +refs:region +refs:id +refs:attrs
35 static bool __damon_is_registered_ops(enum damon_ops_id id) in __damon_is_registered_ops() argument
39 if (!memcmp(&empty_ops, &damon_registered_ops[id], sizeof(empty_ops))) in __damon_is_registered_ops()
50 bool damon_is_registered_ops(enum damon_ops_id id) in damon_is_registered_ops() argument
54 if (id >= NR_DAMON_OPS) in damon_is_registered_ops()
57 registered = __damon_is_registered_ops(id); in damon_is_registered_ops()
75 if (ops->id >= NR_DAMON_OPS) in damon_register_ops()
79 if (__damon_is_registered_ops(ops->id)) { in damon_register_ops()
83 damon_registered_ops[ops->id] = *ops; in damon_register_ops()
99 int damon_select_ops(struct damon_ctx *ctx, enum damon_ops_id id) in damon_select_ops() argument
103 if (id >= NR_DAMON_OPS) in damon_select_ops()
107 if (!__damon_is_registered_ops(id)) in damon_select_ops()
110 ctx->ops = damon_registered_ops[id]; in damon_select_ops()
122 struct damon_region *region; in damon_new_region() local
124 region = kmem_cache_alloc(damon_region_cache, GFP_KERNEL); in damon_new_region()
125 if (!region) in damon_new_region()
128 region->ar.start = start; in damon_new_region()
129 region->ar.end = end; in damon_new_region()
130 region->nr_accesses = 0; in damon_new_region()
131 INIT_LIST_HEAD(®ion->list); in damon_new_region()
133 region->age = 0; in damon_new_region()
134 region->last_nr_accesses = 0; in damon_new_region()
136 return region; in damon_new_region()
426 ctx->attrs.sample_interval = 5 * 1000; in damon_new_ctx()
427 ctx->attrs.aggr_interval = 100 * 1000; in damon_new_ctx()
428 ctx->attrs.ops_update_interval = 60 * 1000 * 1000; in damon_new_ctx()
435 ctx->attrs.min_nr_regions = 10; in damon_new_ctx()
436 ctx->attrs.max_nr_regions = 1000; in damon_new_ctx()
477 unsigned int accesses_bp, struct damon_attrs *attrs) in damon_accesses_bp_to_nr_accesses() argument
480 attrs->aggr_interval / attrs->sample_interval; in damon_accesses_bp_to_nr_accesses()
487 unsigned int nr_accesses, struct damon_attrs *attrs) in damon_nr_accesses_to_accesses_bp() argument
490 attrs->aggr_interval / attrs->sample_interval; in damon_nr_accesses_to_accesses_bp()
523 struct damon_attrs *old_attrs = &ctx->attrs; in damon_update_monitoring_results()
549 int damon_set_attrs(struct damon_ctx *ctx, struct damon_attrs *attrs) in damon_set_attrs() argument
551 if (attrs->min_nr_regions < 3) in damon_set_attrs()
553 if (attrs->min_nr_regions > attrs->max_nr_regions) in damon_set_attrs()
555 if (attrs->sample_interval > attrs->aggr_interval) in damon_set_attrs()
558 damon_update_monitoring_results(ctx, attrs); in damon_set_attrs()
559 ctx->attrs = *attrs; in damon_set_attrs()
610 if (ctx->attrs.min_nr_regions) in damon_region_sz_limit()
611 sz /= ctx->attrs.min_nr_regions; in damon_region_sz_limit()
760 ctx->attrs.aggr_interval); in kdamond_aggregate_interval_passed()
1229 if (nr_regions > ctx->attrs.max_nr_regions / 2) in kdamond_split_regions()
1234 nr_regions < ctx->attrs.max_nr_regions / 3) in kdamond_split_regions()
1252 ctx->attrs.ops_update_interval); in kdamond_need_update_operations()
1396 kdamond_usleep(ctx->attrs.sample_interval); in kdamond_fn()