Lines Matching refs:pa

200 			     paddr_t pa, size_t size)  in _pbuf_intersects()  argument
205 if (core_is_buffer_intersect(pa, size, a[n].paddr, a[n].size)) in _pbuf_intersects()
210 #define pbuf_intersects(a, pa, size) \ argument
211 _pbuf_intersects((a), ARRAY_SIZE(a), (pa), (size))
214 paddr_t pa, size_t size) in _pbuf_is_inside() argument
219 if (core_is_buffer_inside(pa, size, a[n].paddr, a[n].size)) in _pbuf_is_inside()
224 #define pbuf_is_inside(a, pa, size) \ argument
225 _pbuf_is_inside((a), ARRAY_SIZE(a), (pa), (size))
227 static bool pa_is_in_map(struct tee_mmap_region *map, paddr_t pa, size_t len) in pa_is_in_map() argument
234 if (SUB_OVERFLOW(len, 1, &end_pa) || ADD_OVERFLOW(pa, end_pa, &end_pa)) in pa_is_in_map()
237 return (pa >= map->pa && end_pa <= map->pa + map->size - 1); in pa_is_in_map()
251 return core_is_buffer_inside(p, l, map->pa, map->size); in pbuf_inside_map_area()
284 find_map_by_type_and_pa(enum teecore_memtypes type, paddr_t pa, size_t len) in find_map_by_type_and_pa() argument
292 if (pa_is_in_map(mem_map->map + n, pa, len)) in find_map_by_type_and_pa()
313 static struct tee_mmap_region *find_map_by_pa(unsigned long pa) in find_map_by_pa() argument
321 pa >= mem_map->map[n].pa && in find_map_by_pa()
322 pa <= (mem_map->map[n].pa - 1 + mem_map->map[n].size)) in find_map_by_pa()
390 paddr_t pa, size_t size) in carve_out_phys_mem() argument
396 if (!core_is_buffer_intersect(pa, size, m[n].addr, m[n].size)) { in carve_out_phys_mem()
401 if (core_is_buffer_inside(m[n].addr, m[n].size, pa, size)) { in carve_out_phys_mem()
412 if (pa > m[n].addr && in carve_out_phys_mem()
413 pa + size - 1 < m[n].addr + m[n].size - 1) { in carve_out_phys_mem()
424 m[n + 1].addr = pa + size; in carve_out_phys_mem()
425 m[n + 1].size = m[n].addr + m[n].size - pa - size; in carve_out_phys_mem()
426 m[n].size = pa - m[n].addr; in carve_out_phys_mem()
428 } else if (pa <= m[n].addr) { in carve_out_phys_mem()
433 m[n].size = m[n].addr + m[n].size - pa - size; in carve_out_phys_mem()
434 m[n].addr = pa + size; in carve_out_phys_mem()
440 m[n].size = pa - m[n].addr; in carve_out_phys_mem()
454 map->pa, map->size)) { in check_phys_mem_is_outside()
458 map->type, map->pa, map->size); in check_phys_mem_is_outside()
516 carve_out_phys_mem(&m, &num_elems, mem_map->map[n].pa, in core_mmu_set_discovered_nsec_ddr()
619 static struct mobj *core_sdp_mem_alloc_mobj(paddr_t pa, size_t size) in core_sdp_mem_alloc_mobj() argument
621 struct mobj *mobj = mobj_phys_alloc(pa, size, TEE_MATTR_MEM_TYPE_CACHED, in core_sdp_mem_alloc_mobj()
712 mem_map->map[n].pa, in verify_special_mem_areas()
715 mem_map->map[n].pa, in verify_special_mem_areas()
726 paddr_t end_pa = MAX(dst->pa + dst->size - 1, src->pa + src->size - 1); in merge_mmaps()
727 paddr_t pa = MIN(dst->pa, src->pa); in merge_mmaps() local
730 dst->pa, dst->pa + dst->size - 1, src->pa, in merge_mmaps()
731 src->pa + src->size - 1); in merge_mmaps()
732 dst->pa = pa; in merge_mmaps()
733 dst->size = end_pa - pa + 1; in merge_mmaps()
742 if (r1->pa == r2->pa) in mmaps_are_mergeable()
745 if (r1->pa < r2->pa) in mmaps_are_mergeable()
746 return r1->pa + r1->size >= r2->pa; in mmaps_are_mergeable()
748 return r2->pa + r2->size >= r1->pa; in mmaps_are_mergeable()
759 .pa = mem_addr, in add_phys_mem()
806 mem_addr < mem_map->map[n].pa)) in add_phys_mem()
949 map->va + map->size - 1, map->pa, in dump_mmap_table()
950 (paddr_t)(map->pa + map->size - 1), map->size, in dump_mmap_table()
961 paddr_t pa; in dump_xlat_table() local
967 core_mmu_get_entry(&tbl_info, idx, &pa, &attr); in dump_xlat_table()
981 level * 2, "", level, va, pa, in dump_xlat_table()
987 level * 2, "", level, va, pa, in dump_xlat_table()
1031 begin = mem_map->map[n].pa; in add_pager_vaspace()
1036 end = mem_map->map[pos - 1].pa + mem_map->map[pos - 1].size; in add_pager_vaspace()
1162 paddr_t pa = 0; in collect_mem_ranges() local
1175 pa = VCORE_INIT_RO_PA + VCORE_INIT_RO_SZ; in collect_mem_ranges()
1176 sz = TEE_RAM_START + TEE_RAM_PH_SIZE - pa; in collect_mem_ranges()
1177 ADD_PHYS_MEM(MEM_AREA_TEE_RAM, pa, sz); in collect_mem_ranges()
1235 paddr_t mask = mem_map->map[n].pa | mem_map->map[n].size; in assign_mem_granularity()
1336 va += (map->pa - va) & CORE_MMU_PGDIR_MASK; in assign_mem_va_dir()
1373 vaddr_t offs = (map->pa - va) & in assign_mem_va_dir()
1434 rc = CMP_TRILEAN(mm_a->pa, mm_b->pa); in cmp_init_mem_map()
1478 .pa = start, in mem_map_add_id_map()
1564 if (!pbuf_is_inside(secure_only, m->pa, m->size)) in check_mem_map()
1568 if (!pbuf_is_inside(secure_only, m->pa, m->size)) in check_mem_map()
1572 if (!pbuf_is_inside(nsec_shared, m->pa, m->size)) in check_mem_map()
1651 .pa = start, in core_init_mmu_map()
1758 static int __maybe_unused core_va2pa_helper(void *va, paddr_t *pa) in core_va2pa_helper() argument
1771 if (map->pa) in core_va2pa_helper()
1772 *pa = map->pa + (vaddr_t)va - map->va; in core_va2pa_helper()
1774 *pa = 0; in core_va2pa_helper()
1779 static void *map_pa2va(struct tee_mmap_region *map, paddr_t pa, size_t len) in map_pa2va() argument
1781 if (!pa_is_in_map(map, pa, len)) in map_pa2va()
1784 return (void *)(vaddr_t)(map->va + pa - map->pa); in map_pa2va()
1804 enum teecore_memtypes core_mmu_get_type_by_pa(paddr_t pa) in core_mmu_get_type_by_pa() argument
1806 struct tee_mmap_region *map = find_map_by_pa(pa); in core_mmu_get_type_by_pa()
1816 paddr_t pa, uint32_t attr) in core_mmu_set_entry() argument
1820 idx, pa, attr); in core_mmu_set_entry()
1824 paddr_t *pa, uint32_t *attr) in core_mmu_get_entry() argument
1828 idx, pa, attr); in core_mmu_get_entry()
1840 assert(!core_mmu_get_block_offset(tbl_info, region->pa)); in clear_region()
1856 paddr_t pa; in set_region() local
1861 assert(!core_mmu_get_block_offset(tbl_info, region->pa)); in set_region()
1865 pa = region->pa; in set_region()
1868 core_mmu_set_entry(tbl_info, idx, pa, region->attr); in set_region()
1870 pa += BIT64(tbl_info->shift); in set_region()
1930 &r.pa) != TEE_SUCCESS) in set_pg_region()
1975 paddr_t paddr = mm->pa; in core_mmu_map_region()
2227 mm->pa += num_pages * SMALL_PAGE_SIZE; in maybe_remove_from_mem_map()
2240 m.pa += mm->size + num_pages * SMALL_PAGE_SIZE; in maybe_remove_from_mem_map()
2311 paddr_t pa = virt_to_phys(addr); in core_mmu_remove_mapping() local
2317 map = find_map_by_type_and_pa(type, pa, len); in core_mmu_remove_mapping()
2334 p = ROUNDDOWN2(pa, granule); in core_mmu_remove_mapping()
2335 l = ROUNDUP2(len + pa - p, granule); in core_mmu_remove_mapping()
2336 if (map->pa != p || map->size != l) in core_mmu_remove_mapping()
2400 return (void *)(vaddr_t)(map->va + addr - map->pa); in core_mmu_add_mapping()
2435 .pa = p, in core_mmu_add_mapping()
2447 return (void *)(vaddr_t)(map->va + addr - map->pa); in core_mmu_add_mapping()
2464 static void check_pa_matches_va(void *va, paddr_t pa) in check_pa_matches_va() argument
2479 if (pa) in check_pa_matches_va()
2488 if (res == TEE_SUCCESS && pa != p) in check_pa_matches_va()
2490 if (res != TEE_SUCCESS && pa) in check_pa_matches_va()
2497 if (v - boot_mmu_config.map_offset != pa) in check_pa_matches_va()
2516 if (pa != p) in check_pa_matches_va()
2519 if (pa) in check_pa_matches_va()
2528 if (p && pa != p) { in check_pa_matches_va()
2530 va, p, pa); in check_pa_matches_va()
2534 if (pa) { in check_pa_matches_va()
2535 DMSG("va %p unmapped, expect 0x%" PRIxPA, va, pa); in check_pa_matches_va()
2541 static void check_pa_matches_va(void *va __unused, paddr_t pa __unused) in check_pa_matches_va()
2548 paddr_t pa = 0; in virt_to_phys() local
2550 if (!arch_va2pa_helper(va, &pa)) in virt_to_phys()
2551 pa = 0; in virt_to_phys()
2552 check_pa_matches_va(memtag_strip_tag(va), pa); in virt_to_phys()
2553 return pa; in virt_to_phys()
2564 static void check_va_matches_pa(paddr_t pa, void *va) in check_va_matches_pa() argument
2572 if (p != pa) { in check_va_matches_pa()
2573 DMSG("va %p maps 0x%" PRIxPA " expect 0x%" PRIxPA, va, p, pa); in check_va_matches_pa()
2578 static void check_va_matches_pa(paddr_t pa __unused, void *va __unused) in check_va_matches_pa()
2583 static void *phys_to_virt_ts_vaspace(paddr_t pa, size_t len) in phys_to_virt_ts_vaspace() argument
2588 return vm_pa2va(to_user_mode_ctx(thread_get_tsd()->ctx), pa, len); in phys_to_virt_ts_vaspace()
2592 static void *phys_to_virt_tee_ram(paddr_t pa, size_t len) in phys_to_virt_tee_ram() argument
2596 if (SUB_OVERFLOW(len, 1, &end_pa) || ADD_OVERFLOW(pa, end_pa, &end_pa)) in phys_to_virt_tee_ram()
2599 if (pa >= TEE_LOAD_ADDR && pa < get_linear_map_end_pa()) { in phys_to_virt_tee_ram()
2602 return (void *)(vaddr_t)(pa + boot_mmu_config.map_offset); in phys_to_virt_tee_ram()
2605 return tee_pager_phys_to_virt(pa, len); in phys_to_virt_tee_ram()
2608 static void *phys_to_virt_tee_ram(paddr_t pa, size_t len) in phys_to_virt_tee_ram() argument
2612 mmap = find_map_by_type_and_pa(MEM_AREA_TEE_RAM, pa, len); in phys_to_virt_tee_ram()
2614 mmap = find_map_by_type_and_pa(MEM_AREA_NEX_RAM_RW, pa, len); in phys_to_virt_tee_ram()
2616 mmap = find_map_by_type_and_pa(MEM_AREA_NEX_RAM_RO, pa, len); in phys_to_virt_tee_ram()
2618 mmap = find_map_by_type_and_pa(MEM_AREA_TEE_RAM_RW, pa, len); in phys_to_virt_tee_ram()
2620 mmap = find_map_by_type_and_pa(MEM_AREA_TEE_RAM_RO, pa, len); in phys_to_virt_tee_ram()
2622 mmap = find_map_by_type_and_pa(MEM_AREA_TEE_RAM_RX, pa, len); in phys_to_virt_tee_ram()
2628 return map_pa2va(mmap, pa, len); in phys_to_virt_tee_ram()
2632 void *phys_to_virt(paddr_t pa, enum teecore_memtypes m, size_t len) in phys_to_virt() argument
2638 va = phys_to_virt_ts_vaspace(pa, len); in phys_to_virt()
2646 va = phys_to_virt_tee_ram(pa, len); in phys_to_virt()
2655 va = map_pa2va(find_map_by_type_and_pa(m, pa, len), pa, len); in phys_to_virt()
2658 check_va_matches_pa(pa, va); in phys_to_virt()
2662 void *phys_to_virt_io(paddr_t pa, size_t len) in phys_to_virt_io() argument
2667 map = find_map_by_type_and_pa(MEM_AREA_IO_SEC, pa, len); in phys_to_virt_io()
2669 map = find_map_by_type_and_pa(MEM_AREA_IO_NSEC, pa, len); in phys_to_virt_io()
2672 va = map_pa2va(map, pa, len); in phys_to_virt_io()
2673 check_va_matches_pa(pa, va); in phys_to_virt_io()
2677 vaddr_t core_mmu_get_va(paddr_t pa, enum teecore_memtypes type, size_t len) in core_mmu_get_va() argument
2680 return (vaddr_t)phys_to_virt(pa, type, len); in core_mmu_get_va()
2682 return (vaddr_t)pa; in core_mmu_get_va()
2705 assert(p->pa); in io_pa_or_va()
2708 p->va = (vaddr_t)phys_to_virt_io(p->pa, len); in io_pa_or_va()
2712 return p->pa; in io_pa_or_va()
2717 assert(p->pa); in io_pa_or_va_secure()
2720 p->va = (vaddr_t)phys_to_virt(p->pa, MEM_AREA_IO_SEC, in io_pa_or_va_secure()
2725 return p->pa; in io_pa_or_va_secure()
2730 assert(p->pa); in io_pa_or_va_nsec()
2733 p->va = (vaddr_t)phys_to_virt(p->pa, MEM_AREA_IO_NSEC, in io_pa_or_va_nsec()
2738 return p->pa; in io_pa_or_va_nsec()
2772 static void __maybe_unused carve_out_core_mem(paddr_t pa, paddr_t end_pa) in carve_out_core_mem() argument
2776 DMSG("%#"PRIxPA" .. %#"PRIxPA, pa, end_pa); in carve_out_core_mem()
2777 mm = phys_mem_alloc2(pa, end_pa - pa); in carve_out_core_mem()
2808 paddr_t pa = 0; in core_mmu_init_phys_mem()
2827 pa = vaddr_to_phys(ROUNDUP2(ASAN_MAP_PA, align)); in core_mmu_init_phys_mem()
2828 carve_out_core_mem(pa, pa + ASAN_MAP_SZ); in core_mmu_init_phys_mem()
2834 pa = TEE_SDP_TEST_MEM_BASE; in core_mmu_init_phys_mem()
2835 carve_out_core_mem(pa, pa + TEE_SDP_TEST_MEM_SIZE); in core_mmu_init_phys_mem()