Lines Matching refs:size
23 (unsigned long long)lmb->memory.size); in lmb_dump_all()
28 (long long unsigned)lmb->memory.region[i].size); in lmb_dump_all()
34 (long long unsigned)lmb->reserved.size); in lmb_dump_all()
39 (long long unsigned)lmb->reserved.region[i].size); in lmb_dump_all()
65 phys_size_t size1 = rgn->region[r1].size; in lmb_regions_adjacent()
67 phys_size_t size2 = rgn->region[r2].size; in lmb_regions_adjacent()
78 rgn->region[i].size = rgn->region[i + 1].size; in lmb_remove_region()
87 rgn->region[r1].size += rgn->region[r2].size; in lmb_coalesce_regions()
97 lmb->memory.region[0].size = 0; in lmb_init()
99 lmb->memory.size = 0; in lmb_init()
103 lmb->reserved.region[0].size = 0; in lmb_init()
105 lmb->reserved.size = 0; in lmb_init()
109 static long lmb_add_region(struct lmb_region *rgn, phys_addr_t base, phys_size_t size) in lmb_add_region() argument
114 if ((rgn->cnt == 1) && (rgn->region[0].size == 0)) { in lmb_add_region()
116 rgn->region[0].size = size; in lmb_add_region()
123 phys_size_t rgnsize = rgn->region[i].size; in lmb_add_region()
125 if ((rgnbase == base) && (rgnsize == size)) in lmb_add_region()
129 adjacent = lmb_addrs_adjacent(base,size,rgnbase,rgnsize); in lmb_add_region()
131 rgn->region[i].base -= size; in lmb_add_region()
132 rgn->region[i].size += size; in lmb_add_region()
137 rgn->region[i].size += size; in lmb_add_region()
157 rgn->region[i+1].size = rgn->region[i].size; in lmb_add_region()
160 rgn->region[i+1].size = size; in lmb_add_region()
167 rgn->region[0].size = size; in lmb_add_region()
176 long lmb_add(struct lmb *lmb, phys_addr_t base, phys_size_t size) in lmb_add() argument
180 return lmb_add_region(_rgn, base, size); in lmb_add()
183 long lmb_free(struct lmb *lmb, phys_addr_t base, phys_size_t size) in lmb_free() argument
187 phys_addr_t end = base + size; in lmb_free()
195 rgnend = rgnbegin + rgn->region[i].size; in lmb_free()
214 rgn->region[i].size -= size; in lmb_free()
220 rgn->region[i].size -= size; in lmb_free()
228 rgn->region[i].size = base - rgn->region[i].base; in lmb_free()
232 long lmb_reserve(struct lmb *lmb, phys_addr_t base, phys_size_t size) in lmb_reserve() argument
236 return lmb_add_region(_rgn, base, size); in lmb_reserve()
240 phys_size_t size) in lmb_overlaps_region() argument
246 phys_size_t rgnsize = rgn->region[i].size; in lmb_overlaps_region()
247 if ( lmb_addrs_overlap(base,size,rgnbase,rgnsize) ) { in lmb_overlaps_region()
255 phys_addr_t lmb_alloc(struct lmb *lmb, phys_size_t size, ulong align) in lmb_alloc() argument
257 return lmb_alloc_base(lmb, size, align, LMB_ALLOC_ANYWHERE); in lmb_alloc()
260 phys_addr_t lmb_alloc_base(struct lmb *lmb, phys_size_t size, ulong align, phys_addr_t max_addr) in lmb_alloc_base() argument
264 alloc = __lmb_alloc_base(lmb, size, align, max_addr); in lmb_alloc_base()
268 (ulong)size, (ulong)max_addr); in lmb_alloc_base()
273 static phys_addr_t lmb_align_down(phys_addr_t addr, phys_size_t size) in lmb_align_down() argument
275 return addr & ~(size - 1); in lmb_align_down()
278 static phys_addr_t lmb_align_up(phys_addr_t addr, ulong size) in lmb_align_up() argument
280 return (addr + (size - 1)) & ~(size - 1); in lmb_align_up()
283 phys_addr_t __lmb_alloc_base(struct lmb *lmb, phys_size_t size, ulong align, phys_addr_t max_addr) in __lmb_alloc_base() argument
291 phys_size_t lmbsize = lmb->memory.region[i].size; in __lmb_alloc_base()
293 if (lmbsize < size) in __lmb_alloc_base()
296 base = lmb_align_down(lmbbase + lmbsize - size, align); in __lmb_alloc_base()
302 base = lmb_align_down(base - size, align); in __lmb_alloc_base()
307 j = lmb_overlaps_region(&lmb->reserved, base, size); in __lmb_alloc_base()
311 lmb_align_up(size, in __lmb_alloc_base()
317 if (res_base < size) in __lmb_alloc_base()
319 base = lmb_align_down(res_base - size, align); in __lmb_alloc_base()
331 lmb->reserved.region[i].size - 1; in lmb_is_reserved()