Lines Matching +full:memory +full:- +full:region

1 /* SPDX-License-Identifier: GPL-2.0-or-later */
7 * Logical memory blocks.
33 * enum memblock_flags - definition of memory region attributes
35 * @MEMBLOCK_HOTPLUG: hotpluggable region
36 * @MEMBLOCK_MIRROR: mirrored region
41 MEMBLOCK_HOTPLUG = 0x1, /* hotpluggable region */
42 MEMBLOCK_MIRROR = 0x2, /* mirrored region */
47 * struct memblock_region - represents a memory region
48 * @base: base address of the region
49 * @size: size of the region
50 * @flags: memory region attributes
63 * struct memblock_type - collection of memory regions of certain type
68 * @name: the memory type symbolic name
79 * struct memblock - memblock allocator metadata
82 * @memory: usable memory regions
83 * @reserved: reserved memory regions
88 struct memblock_type memory; member
153 * for_each_physmem_range - iterate through physmem areas not included in type.
166 * __for_each_mem_range - iterate through memblock areas from type_a and not
172 * @flags: pick from blocks based on memory attributes
186 * __for_each_mem_range_rev - reverse iterate through memblock areas from
192 * @flags: pick from blocks based on memory attributes
207 * for_each_mem_range - iterate through memory areas.
213 __for_each_mem_range(i, &memblock.memory, NULL, NUMA_NO_NODE, \
217 * for_each_mem_range_rev - reverse iterate through memblock areas from
224 __for_each_mem_range_rev(i, &memblock.memory, NULL, NUMA_NO_NODE, \
228 * for_each_reserved_mem_range - iterate over all reserved memblock areas
242 return m->flags & MEMBLOCK_HOTPLUG; in memblock_is_hotpluggable()
247 return m->flags & MEMBLOCK_MIRROR; in memblock_is_mirror()
252 return m->flags & MEMBLOCK_NOMAP; in memblock_is_nomap()
261 * for_each_mem_pfn_range - early memory pfn range iterator
268 * Walks over configured memory ranges.
271 for (i = -1, __next_mem_pfn_range(&i, nid, p_start, p_end, p_nid); \
279 * for_each_free_mem_range_in_zone - iterate through zone specific free
282 * @zone: zone in which all of the memory blocks reside
286 * Walks over free (memory && !reserved) areas of memblock in a specific
299 * for_each_free_mem_range_in_zone_from - iterate through zone specific
302 * @zone: zone in which all of the memory blocks reside
306 * Walks over free (memory && !reserved) areas of memblock in a specific
319 * for_each_free_mem_range - iterate through free memblock areas
322 * @flags: pick from blocks based on memory attributes
327 * Walks over free (memory && !reserved) areas of memblock. Available as
331 __for_each_mem_range(i, &memblock.memory, &memblock.reserved, \
335 * for_each_free_mem_range_reverse - rev-iterate through free memblock areas
338 * @flags: pick from blocks based on memory attributes
343 * Walks over free (memory && !reserved) areas of memblock in reverse
348 __for_each_mem_range_rev(i, &memblock.memory, &memblock.reserved, \
357 r->nid = nid; in memblock_set_region_node()
362 return r->nid; in memblock_get_region_node()
465 * Set the allocation direction to bottom-up or top-down.
473 * Check if the allocation direction is bottom-up or not.
474 * if this is true, that said, memblock will allocate memory
475 * in bottom-up direction.
499 * memblock_set_current_limit - Set the current allocation limit to allow
512 * While the memory MEMBLOCKs should always be page aligned, the reserved
518 * memblock_region_memory_base_pfn - get the lowest pfn of the memory region
521 * Return: the lowest pfn intersecting with the memory region
525 return PFN_UP(reg->base); in memblock_region_memory_base_pfn()
529 * memblock_region_memory_end_pfn - get the end pfn of the memory region
532 * Return: the end_pfn of the reserved region
536 return PFN_DOWN(reg->base + reg->size); in memblock_region_memory_end_pfn()
540 * memblock_region_reserved_base_pfn - get the lowest pfn of the reserved region
543 * Return: the lowest pfn intersecting with the reserved region
547 return PFN_DOWN(reg->base); in memblock_region_reserved_base_pfn()
551 * memblock_region_reserved_end_pfn - get the end pfn of the reserved region
554 * Return: the end_pfn of the reserved region
558 return PFN_UP(reg->base + reg->size); in memblock_region_reserved_end_pfn()
562 * for_each_mem_region - itereate over memory regions
563 * @region: loop variable
565 #define for_each_mem_region(region) \ argument
566 for (region = memblock.memory.regions; \
567 region < (memblock.memory.regions + memblock.memory.cnt); \
568 region++)
571 * for_each_reserved_mem_region - itereate over reserved memory regions
572 * @region: loop variable
574 #define for_each_reserved_mem_region(region) \ argument
575 for (region = memblock.reserved.regions; \
576 region < (memblock.reserved.regions + memblock.reserved.cnt); \
577 region++)
590 #define HASH_SMALL 0x00000002 /* sub-page allocation allowed, min