Lines Matching +full:memory +full:- +full:region

1 /* SPDX-License-Identifier: GPL-2.0-or-later */
6 * Logical memory blocks.
28 * enum memblock_flags - definition of memory region attributes
30 * @MEMBLOCK_HOTPLUG: memory region indicated in the firmware-provided memory
31 * map during early boot as hot(un)pluggable system RAM (e.g., memory range
33 * commandline, try keeping this memory region hotunpluggable. Does not apply
35 * @MEMBLOCK_MIRROR: mirrored region
37 * reserved in the memory map; refer to memblock_mark_nomap() description
39 * @MEMBLOCK_DRIVER_MANAGED: memory region that is always detected and added
40 * via a driver, and never indicated in the firmware-provided memory map as
46 MEMBLOCK_HOTPLUG = 0x1, /* hotpluggable region */
47 MEMBLOCK_MIRROR = 0x2, /* mirrored region */
53 * struct memblock_region - represents a memory region
54 * @base: base address of the region
55 * @size: size of the region
56 * @flags: memory region attributes
69 * struct memblock_type - collection of memory regions of certain type
74 * @name: the memory type symbolic name
85 * struct memblock - memblock allocator metadata
88 * @memory: usable memory regions
89 * @reserved: reserved memory regions
94 struct memblock_type memory; member
159 * for_each_physmem_range - iterate through physmem areas not included in type.
172 * __for_each_mem_range - iterate through memblock areas from type_a and not
178 * @flags: pick from blocks based on memory attributes
192 * __for_each_mem_range_rev - reverse iterate through memblock areas from
198 * @flags: pick from blocks based on memory attributes
213 * for_each_mem_range - iterate through memory areas.
219 __for_each_mem_range(i, &memblock.memory, NULL, NUMA_NO_NODE, \
224 * for_each_mem_range_rev - reverse iterate through memblock areas from
231 __for_each_mem_range_rev(i, &memblock.memory, NULL, NUMA_NO_NODE, \
236 * for_each_reserved_mem_range - iterate over all reserved memblock areas
250 return m->flags & MEMBLOCK_HOTPLUG; in memblock_is_hotpluggable()
255 return m->flags & MEMBLOCK_MIRROR; in memblock_is_mirror()
260 return m->flags & MEMBLOCK_NOMAP; in memblock_is_nomap()
265 return m->flags & MEMBLOCK_DRIVER_MANAGED; in memblock_is_driver_managed()
274 * for_each_mem_pfn_range - early memory pfn range iterator
281 * Walks over configured memory ranges.
284 for (i = -1, __next_mem_pfn_range(&i, nid, p_start, p_end, p_nid); \
292 * for_each_free_mem_pfn_range_in_zone - iterate through zone specific free
295 * @zone: zone in which all of the memory blocks reside
299 * Walks over free (memory && !reserved) areas of memblock in a specific
312 * for_each_free_mem_pfn_range_in_zone_from - iterate through zone specific
315 * @zone: zone in which all of the memory blocks reside
319 * Walks over free (memory && !reserved) areas of memblock in a specific
332 * for_each_free_mem_range - iterate through free memblock areas
335 * @flags: pick from blocks based on memory attributes
340 * Walks over free (memory && !reserved) areas of memblock. Available as
344 __for_each_mem_range(i, &memblock.memory, &memblock.reserved, \
348 * for_each_free_mem_range_reverse - rev-iterate through free memblock areas
351 * @flags: pick from blocks based on memory attributes
356 * Walks over free (memory && !reserved) areas of memblock in reverse
361 __for_each_mem_range_rev(i, &memblock.memory, &memblock.reserved, \
370 r->nid = nid; in memblock_set_region_node()
375 return r->nid; in memblock_get_region_node()
461 * Set the allocation direction to bottom-up or top-down.
469 * Check if the allocation direction is bottom-up or not.
470 * if this is true, that said, memblock will allocate memory
471 * in bottom-up direction.
494 * memblock_set_current_limit - Set the current allocation limit to allow
507 * While the memory MEMBLOCKs should always be page aligned, the reserved
513 * memblock_region_memory_base_pfn - get the lowest pfn of the memory region
516 * Return: the lowest pfn intersecting with the memory region
520 return PFN_UP(reg->base); in memblock_region_memory_base_pfn()
524 * memblock_region_memory_end_pfn - get the end pfn of the memory region
527 * Return: the end_pfn of the reserved region
531 return PFN_DOWN(reg->base + reg->size); in memblock_region_memory_end_pfn()
535 * memblock_region_reserved_base_pfn - get the lowest pfn of the reserved region
538 * Return: the lowest pfn intersecting with the reserved region
542 return PFN_DOWN(reg->base); in memblock_region_reserved_base_pfn()
546 * memblock_region_reserved_end_pfn - get the end pfn of the reserved region
549 * Return: the end_pfn of the reserved region
553 return PFN_UP(reg->base + reg->size); in memblock_region_reserved_end_pfn()
557 * for_each_mem_region - itereate over memory regions
558 * @region: loop variable
560 #define for_each_mem_region(region) \ argument
561 for (region = memblock.memory.regions; \
562 region < (memblock.memory.regions + memblock.memory.cnt); \
563 region++)
566 * for_each_reserved_mem_region - itereate over reserved memory regions
567 * @region: loop variable
569 #define for_each_reserved_mem_region(region) \ argument
570 for (region = memblock.reserved.regions; \
571 region < (memblock.reserved.regions + memblock.reserved.cnt); \
572 region++)
585 #define HASH_SMALL 0x00000002 /* sub-page allocation allowed, min