Lines Matching refs:cache_map
36 struct cache_map { struct
79 static struct cache_map init_cache_map[CACHE_MAP_MAX] __initdata;
80 static struct cache_map *cache_map __refdata = init_cache_map;
165 memmove(cache_map + idx, cache_map + idx + 1, in rm_map_entry_at()
166 sizeof(*cache_map) * (cache_map_n - idx)); in rm_map_entry_at()
188 struct cache_map *prev = cache_map + idx - 1; in add_map_entry_at()
195 struct cache_map *next = cache_map + idx; in add_map_entry_at()
202 cache_map[idx - 1].end = cache_map[idx].end; in add_map_entry_at()
207 cache_map[idx - 1].end = end; in add_map_entry_at()
211 cache_map[idx].start = start; in add_map_entry_at()
223 memmove(cache_map + idx + 1, cache_map + idx, in add_map_entry_at()
224 sizeof(*cache_map) * (cache_map_n - idx)); in add_map_entry_at()
227 cache_map[idx].start = start; in add_map_entry_at()
228 cache_map[idx].end = end; in add_map_entry_at()
229 cache_map[idx].type = type; in add_map_entry_at()
230 cache_map[idx].fixed = 0; in add_map_entry_at()
239 int ret = start != cache_map[idx].start; in clr_map_range_at()
242 if (start == cache_map[idx].start && end == cache_map[idx].end) { in clr_map_range_at()
244 } else if (start == cache_map[idx].start) { in clr_map_range_at()
245 cache_map[idx].start = end; in clr_map_range_at()
246 } else if (end == cache_map[idx].end) { in clr_map_range_at()
247 cache_map[idx].end = start; in clr_map_range_at()
249 tmp = cache_map[idx].end; in clr_map_range_at()
250 cache_map[idx].end = start; in clr_map_range_at()
251 add_map_entry_at(end, tmp, cache_map[idx].type, idx + 1); in clr_map_range_at()
269 if (start >= cache_map[i].end) in add_map_entry()
272 if (start < cache_map[i].start) { in add_map_entry()
274 tmp = min(end, cache_map[i].start); in add_map_entry()
280 new_type = get_effective_type(type, cache_map[i].type); in add_map_entry()
281 old_type = cache_map[i].type; in add_map_entry()
283 if (cache_map[i].fixed || new_type == old_type) { in add_map_entry()
285 start = cache_map[i].end; in add_map_entry()
290 tmp = min(end, cache_map[i].end); in add_map_entry()
314 cache_map[cache_map_n - 1].fixed = 1; in map_add_var()
379 cache_map[i].fixed = 1; in mtrr_build_map()
391 cache_map[i].start, cache_map[i].end - 1, in mtrr_build_map()
392 mtrr_attrib_to_str(cache_map[i].type)); in mtrr_build_map()
403 cache_map = NULL; in mtrr_copy_map()
409 cache_map = kcalloc(new_size, sizeof(*cache_map), GFP_KERNEL); in mtrr_copy_map()
410 if (cache_map) { in mtrr_copy_map()
411 memmove(cache_map, init_cache_map, in mtrr_copy_map()
412 cache_map_n * sizeof(*cache_map)); in mtrr_copy_map()
521 if (start >= cache_map[i].end) in mtrr_type_lookup()
525 if (start < cache_map[i].start) { in mtrr_type_lookup()
529 if (end <= cache_map[i].start) in mtrr_type_lookup()
534 type = type_merge(type, cache_map[i].type, uniform); in mtrr_type_lookup()
536 start = cache_map[i].end; in mtrr_type_lookup()