Lines Matching full:mi
126 struct numa_meminfo *mi) in numa_add_memblk_to() argument
139 if (mi->nr_blks >= NR_NODE_MEMBLKS) { in numa_add_memblk_to()
144 mi->blk[mi->nr_blks].start = start; in numa_add_memblk_to()
145 mi->blk[mi->nr_blks].end = end; in numa_add_memblk_to()
146 mi->blk[mi->nr_blks].nid = nid; in numa_add_memblk_to()
147 mi->nr_blks++; in numa_add_memblk_to()
154 * @mi: numa_meminfo to remove memblk from
156 * Remove @idx'th numa_memblk from @mi by shifting @mi->blk[] and
157 * decrementing @mi->nr_blks.
159 void __init numa_remove_memblk_from(int idx, struct numa_meminfo *mi) in numa_remove_memblk_from() argument
161 mi->nr_blks--; in numa_remove_memblk_from()
162 memmove(&mi->blk[idx], &mi->blk[idx + 1], in numa_remove_memblk_from()
163 (mi->nr_blks - idx) * sizeof(mi->blk[0])); in numa_remove_memblk_from()
230 * @mi: numa_meminfo to clean up
232 * Sanitize @mi by merging and removing unnecessary memblks. Also check for
238 int __init numa_cleanup_meminfo(struct numa_meminfo *mi) in numa_cleanup_meminfo() argument
245 for (i = 0; i < mi->nr_blks; i++) { in numa_cleanup_meminfo()
246 struct numa_memblk *bi = &mi->blk[i]; in numa_cleanup_meminfo()
251 numa_move_tail_memblk(&numa_reserved_meminfo, i--, mi); in numa_cleanup_meminfo()
267 numa_remove_memblk_from(i--, mi); in numa_cleanup_meminfo()
271 for (i = 0; i < mi->nr_blks; i++) { in numa_cleanup_meminfo()
272 struct numa_memblk *bi = &mi->blk[i]; in numa_cleanup_meminfo()
274 for (j = i + 1; j < mi->nr_blks; j++) { in numa_cleanup_meminfo()
275 struct numa_memblk *bj = &mi->blk[j]; in numa_cleanup_meminfo()
304 for (k = 0; k < mi->nr_blks; k++) { in numa_cleanup_meminfo()
305 struct numa_memblk *bk = &mi->blk[k]; in numa_cleanup_meminfo()
312 if (k < mi->nr_blks) in numa_cleanup_meminfo()
319 numa_remove_memblk_from(j--, mi); in numa_cleanup_meminfo()
324 for (i = mi->nr_blks; i < ARRAY_SIZE(mi->blk); i++) { in numa_cleanup_meminfo()
325 mi->blk[i].start = mi->blk[i].end = 0; in numa_cleanup_meminfo()
326 mi->blk[i].nid = NUMA_NO_NODE; in numa_cleanup_meminfo()
333 * Set nodes, which have memory in @mi, in *@nodemask.
336 const struct numa_meminfo *mi) in numa_nodemask_from_meminfo() argument
340 for (i = 0; i < ARRAY_SIZE(mi->blk); i++) in numa_nodemask_from_meminfo()
341 if (mi->blk[i].start != mi->blk[i].end && in numa_nodemask_from_meminfo()
342 mi->blk[i].nid != NUMA_NO_NODE) in numa_nodemask_from_meminfo()
343 node_set(mi->blk[i].nid, *nodemask); in numa_nodemask_from_meminfo()
454 static bool __init numa_meminfo_cover_memory(const struct numa_meminfo *mi) in numa_meminfo_cover_memory() argument
460 for (i = 0; i < mi->nr_blks; i++) { in numa_meminfo_cover_memory()
461 u64 s = mi->blk[i].start >> PAGE_SHIFT; in numa_meminfo_cover_memory()
462 u64 e = mi->blk[i].end >> PAGE_SHIFT; in numa_meminfo_cover_memory()
464 numaram -= __absent_pages_in_range(mi->blk[i].nid, s, e); in numa_meminfo_cover_memory()
547 static int __init numa_register_memblks(struct numa_meminfo *mi) in numa_register_memblks() argument
553 numa_nodemask_from_meminfo(&node_possible_map, mi); in numa_register_memblks()
557 for (i = 0; i < mi->nr_blks; i++) { in numa_register_memblks()
558 struct numa_memblk *mb = &mi->blk[i]; in numa_register_memblks()
586 if (!numa_meminfo_cover_memory(mi)) in numa_register_memblks()
594 for (i = 0; i < mi->nr_blks; i++) { in numa_register_memblks()
595 if (nid != mi->blk[i].nid) in numa_register_memblks()
597 start = min(mi->blk[i].start, start); in numa_register_memblks()
598 end = max(mi->blk[i].end, end); in numa_register_memblks()
930 static int meminfo_to_nid(struct numa_meminfo *mi, u64 start) in meminfo_to_nid() argument
934 for (i = 0; i < mi->nr_blks; i++) in meminfo_to_nid()
935 if (mi->blk[i].start <= start && mi->blk[i].end > start) in meminfo_to_nid()
936 return mi->blk[i].nid; in meminfo_to_nid()