Lines Matching refs:n

202 	size_t n;  in _pbuf_intersects()  local
204 for (n = 0; n < alen; n++) in _pbuf_intersects()
205 if (core_is_buffer_intersect(pa, size, a[n].paddr, a[n].size)) in _pbuf_intersects()
216 size_t n; in _pbuf_is_inside() local
218 for (n = 0; n < alen; n++) in _pbuf_is_inside()
219 if (core_is_buffer_inside(pa, size, a[n].paddr, a[n].size)) in _pbuf_is_inside()
260 size_t n = 0; in core_mmu_for_each_map() local
262 for (n = 0; n < mem_map->count; n++) { in core_mmu_for_each_map()
263 res = fn(mem_map->map + n, ptr); in core_mmu_for_each_map()
274 size_t n = 0; in find_map_by_type() local
276 for (n = 0; n < mem_map->count; n++) { in find_map_by_type()
277 if (mem_map->map[n].type == type) in find_map_by_type()
278 return mem_map->map + n; in find_map_by_type()
287 size_t n = 0; in find_map_by_type_and_pa() local
289 for (n = 0; n < mem_map->count; n++) { in find_map_by_type_and_pa()
290 if (mem_map->map[n].type != type) in find_map_by_type_and_pa()
292 if (pa_is_in_map(mem_map->map + n, pa, len)) in find_map_by_type_and_pa()
293 return mem_map->map + n; in find_map_by_type_and_pa()
302 size_t n = 0; in find_map_by_va() local
304 for (n = 0; n < mem_map->count; n++) { in find_map_by_va()
305 if (a >= mem_map->map[n].va && in find_map_by_va()
306 a <= (mem_map->map[n].va - 1 + mem_map->map[n].size)) in find_map_by_va()
307 return mem_map->map + n; in find_map_by_va()
316 size_t n = 0; in find_map_by_pa() local
318 for (n = 0; n < mem_map->count; n++) { in find_map_by_pa()
320 if ((mem_map->map[n].attr & TEE_MATTR_VALID_BLOCK) && in find_map_by_pa()
321 pa >= mem_map->map[n].pa && in find_map_by_pa()
322 pa <= (mem_map->map[n].pa - 1 + mem_map->map[n].size)) in find_map_by_pa()
323 return mem_map->map + n; in find_map_by_pa()
393 size_t n = 0; in carve_out_phys_mem() local
395 while (n < *nelems) { in carve_out_phys_mem()
396 if (!core_is_buffer_intersect(pa, size, m[n].addr, m[n].size)) { in carve_out_phys_mem()
397 n++; in carve_out_phys_mem()
401 if (core_is_buffer_inside(m[n].addr, m[n].size, pa, size)) { in carve_out_phys_mem()
403 rem_array_elem(m, *nelems, sizeof(*m), n); in carve_out_phys_mem()
412 if (pa > m[n].addr && in carve_out_phys_mem()
413 pa + size - 1 < m[n].addr + m[n].size - 1) { in carve_out_phys_mem()
423 ins_array_elem(m, *nelems, sizeof(*m), n + 1, NULL); in carve_out_phys_mem()
424 m[n + 1].addr = pa + size; in carve_out_phys_mem()
425 m[n + 1].size = m[n].addr + m[n].size - pa - size; in carve_out_phys_mem()
426 m[n].size = pa - m[n].addr; in carve_out_phys_mem()
427 n++; in carve_out_phys_mem()
428 } else if (pa <= m[n].addr) { in carve_out_phys_mem()
433 m[n].size = m[n].addr + m[n].size - pa - size; in carve_out_phys_mem()
434 m[n].addr = pa + size; in carve_out_phys_mem()
440 m[n].size = pa - m[n].addr; in carve_out_phys_mem()
442 n++; in carve_out_phys_mem()
450 size_t n; in check_phys_mem_is_outside() local
452 for (n = 0; n < nelems; n++) { in check_phys_mem_is_outside()
453 if (!core_is_buffer_outside(start[n].addr, start[n].size, in check_phys_mem_is_outside()
457 start[n].addr, start[n].size, in check_phys_mem_is_outside()
482 size_t n = 0; in core_mmu_set_discovered_nsec_ddr() local
509 for (n = 0; n < ARRAY_SIZE(secure_only); n++) in core_mmu_set_discovered_nsec_ddr()
510 carve_out_phys_mem(&m, &num_elems, secure_only[n].paddr, in core_mmu_set_discovered_nsec_ddr()
511 secure_only[n].size); in core_mmu_set_discovered_nsec_ddr()
513 for (n = 0; n < mem_map->count; n++) { in core_mmu_set_discovered_nsec_ddr()
514 switch (mem_map->map[n].type) { in core_mmu_set_discovered_nsec_ddr()
516 carve_out_phys_mem(&m, &num_elems, mem_map->map[n].pa, in core_mmu_set_discovered_nsec_ddr()
517 mem_map->map[n].size); in core_mmu_set_discovered_nsec_ddr()
531 mem_map->map + n); in core_mmu_set_discovered_nsec_ddr()
539 for (n = 0; n < num_elems; n++) in core_mmu_set_discovered_nsec_ddr()
541 n, m[n].addr, m[n].addr + m[n].size - 1, m[n].size); in core_mmu_set_discovered_nsec_ddr()
672 size_t n = 0; in verify_special_mem_areas() local
700 for (n = 0; n < mem_map->count; n++) { in verify_special_mem_areas()
708 mem_map->map[n].type == MEM_AREA_SEC_RAM_OVERALL) in verify_special_mem_areas()
712 mem_map->map[n].pa, in verify_special_mem_areas()
713 mem_map->map[n].size)) { in verify_special_mem_areas()
715 mem_map->map[n].pa, in verify_special_mem_areas()
716 mem_map->map[n].size); in verify_special_mem_areas()
756 size_t n = 0; in add_phys_mem() local
778 for (n = 0; n < mem_map->count; n++) { in add_phys_mem()
779 if (mmaps_are_mergeable(mem_map->map + n, &m0)) { in add_phys_mem()
780 merge_mmaps(mem_map->map + n, &m0); in add_phys_mem()
785 if (n + 1 < mem_map->count && in add_phys_mem()
786 mmaps_are_mergeable(mem_map->map + n, in add_phys_mem()
787 mem_map->map + n + 1)) { in add_phys_mem()
788 merge_mmaps(mem_map->map + n, in add_phys_mem()
789 mem_map->map + n + 1); in add_phys_mem()
791 sizeof(*mem_map->map), n + 1); in add_phys_mem()
794 if (n > 0 && mmaps_are_mergeable(mem_map->map + n - 1, in add_phys_mem()
795 mem_map->map + n)) { in add_phys_mem()
796 merge_mmaps(mem_map->map + n - 1, in add_phys_mem()
797 mem_map->map + n); in add_phys_mem()
799 sizeof(*mem_map->map), n); in add_phys_mem()
804 if (mem_type < mem_map->map[n].type || in add_phys_mem()
805 (mem_type == mem_map->map[n].type && in add_phys_mem()
806 mem_addr < mem_map->map[n].pa)) in add_phys_mem()
812 n, &m0); in add_phys_mem()
818 size_t n = 0; in add_va_space() local
821 for (n = 0; n < mem_map->count; n++) { in add_va_space()
822 if (type < mem_map->map[n].type) in add_va_space()
828 n, NULL); in add_va_space()
829 mem_map->map[n] = (struct tee_mmap_region){ in add_va_space()
941 size_t n = 0; in dump_mmap_table() local
943 for (n = 0; n < mem_map->count; n++) { in dump_mmap_table()
944 struct tee_mmap_region *map __maybe_unused = mem_map->map + n; in dump_mmap_table()
1025 size_t n = 0; in add_pager_vaspace() local
1028 for (n = 0; n < mem_map->count; n++) { in add_pager_vaspace()
1029 if (map_is_tee_ram(mem_map->map + n)) { in add_pager_vaspace()
1031 begin = mem_map->map[n].pa; in add_pager_vaspace()
1032 pos = n + 1; in add_pager_vaspace()
1042 n, NULL); in add_pager_vaspace()
1043 mem_map->map[n] = (struct tee_mmap_region){ in add_pager_vaspace()
1053 size_t n = 0; in check_sec_nsec_mem_config() local
1055 for (n = 0; n < ARRAY_SIZE(secure_only); n++) { in check_sec_nsec_mem_config()
1056 if (pbuf_intersects(nsec_shared, secure_only[n].paddr, in check_sec_nsec_mem_config()
1057 secure_only[n].size)) in check_sec_nsec_mem_config()
1109 size_t n = 0; in collect_mem_ranges() local
1185 ADD_PHYS_MEM(MEM_AREA_SEC_RAM_OVERALL, secure_only[n].paddr, in collect_mem_ranges()
1189 for (n = 1; n < ARRAY_SIZE(secure_only); n++) in collect_mem_ranges()
1190 ADD_PHYS_MEM(MEM_AREA_SEC_RAM_OVERALL, secure_only[n].paddr, in collect_mem_ranges()
1191 secure_only[n].size); in collect_mem_ranges()
1228 size_t n = 0; in assign_mem_granularity() local
1234 for (n = 0; n < mem_map->count; n++) { in assign_mem_granularity()
1235 paddr_t mask = mem_map->map[n].pa | mem_map->map[n].size; in assign_mem_granularity()
1240 if (map_is_tee_ram(mem_map->map + n)) in assign_mem_granularity()
1241 mem_map->map[n].region_size = SMALL_PAGE_SIZE; in assign_mem_granularity()
1243 mem_map->map[n].region_size = CORE_MMU_PGDIR_SIZE; in assign_mem_granularity()
1268 size_t n = 0; in assign_mem_va_dir() local
1278 for (n = 0; n < mem_map->count; n++) in assign_mem_va_dir()
1279 mem_map->map[n].va = 0; in assign_mem_va_dir()
1289 for (n = 0; n < mem_map->count; n++) { in assign_mem_va_dir()
1290 map = mem_map->map + n; in assign_mem_va_dir()
1309 for (n = 0; n < mem_map->count; n++) { in assign_mem_va_dir()
1310 map = mem_map->map + n; in assign_mem_va_dir()
1345 for (n = 0; n < mem_map->count; n++) { in assign_mem_va_dir()
1346 map = mem_map->map + n; in assign_mem_va_dir()
1461 size_t n = 0; in mem_map_add_id_map() local
1464 for (n = 0; n < mem_map->count; n++) in mem_map_add_id_map()
1465 if (core_is_buffer_intersect(mem_map->map[n].va, in mem_map_add_id_map()
1466 mem_map->map[n].size, start, len)) in mem_map_add_id_map()
1516 size_t n = 0; in init_mem_map() local
1518 for (n = 0; n < 3; n++) { in init_mem_map()
1519 ba = arch_aslr_base_addr(start_addr, seed, n); in init_mem_map()
1550 size_t n = 0; in check_mem_map() local
1552 for (n = 0; n < mem_map->count; n++) { in check_mem_map()
1553 m = mem_map->map + n; in check_mem_map()
2361 size_t n = 0; in core_mmu_find_mapping_exclusive() local
2366 for (n = 0; n < mem_map->count; n++) { in core_mmu_find_mapping_exclusive()
2367 if (mem_map->map[n].type != type) in core_mmu_find_mapping_exclusive()
2373 map_found = mem_map->map + n; in core_mmu_find_mapping_exclusive()