Lines Matching refs:va

240 static bool va_is_in_map(struct tee_mmap_region *map, vaddr_t va)  in va_is_in_map()  argument
244 return (va >= map->va && va <= (map->va + map->size - 1)); in va_is_in_map()
298 static struct tee_mmap_region *find_map_by_va(void *va) in find_map_by_va() argument
301 vaddr_t a = (vaddr_t)va; in find_map_by_va()
305 if (a >= mem_map->map[n].va && in find_map_by_va()
306 a <= (mem_map->map[n].va - 1 + mem_map->map[n].size)) in find_map_by_va()
936 return CMP_TRILEAN(mm_a->va, mm_b->va); in cmp_mmap_by_lower_va()
948 teecore_memtype_name(map->type), map->va, in dump_mmap_table()
949 map->va + map->size - 1, map->pa, in dump_mmap_table()
957 static void dump_xlat_table(vaddr_t va, unsigned int level) in dump_xlat_table() argument
964 core_mmu_find_table(NULL, va, level, &tbl_info); in dump_xlat_table()
965 va = tbl_info.va_base; in dump_xlat_table()
981 level * 2, "", level, va, pa, in dump_xlat_table()
983 dump_xlat_table(va, level + 1); in dump_xlat_table()
987 level * 2, "", level, va, pa, in dump_xlat_table()
996 level * 2, "", level, va); in dump_xlat_table()
999 va += BIT64(tbl_info.shift); in dump_xlat_table()
1005 static void dump_xlat_table(vaddr_t va __unused, int level __unused) in dump_xlat_table()
1267 vaddr_t va = 0; in assign_mem_va_dir() local
1279 mem_map->map[n].va = 0; in assign_mem_va_dir()
1288 va = tee_ram_va + tee_ram_initial_offs; in assign_mem_va_dir()
1293 assert(!(va & (map->region_size - 1))); in assign_mem_va_dir()
1295 map->va = va; in assign_mem_va_dir()
1296 if (ADD_OVERFLOW(va, map->size, &va)) in assign_mem_va_dir()
1298 if (!core_mmu_va_is_valid(va)) in assign_mem_va_dir()
1308 va = tee_ram_va; in assign_mem_va_dir()
1312 if (map->va) in assign_mem_va_dir()
1318 va = ROUNDDOWN(va, CORE_MMU_PGDIR_SIZE); in assign_mem_va_dir()
1322 va = ROUNDDOWN(va, CORE_MMU_PGDIR_SIZE); in assign_mem_va_dir()
1325 if (SUB_OVERFLOW(va, map->size, &va)) in assign_mem_va_dir()
1327 va = ROUNDDOWN2(va, map->region_size); in assign_mem_va_dir()
1334 if (SUB_OVERFLOW(va, CORE_MMU_PGDIR_SIZE, &va)) in assign_mem_va_dir()
1336 va += (map->pa - va) & CORE_MMU_PGDIR_MASK; in assign_mem_va_dir()
1338 map->va = va; in assign_mem_va_dir()
1348 if (map->va) in assign_mem_va_dir()
1354 if (ROUNDUP_OVERFLOW(va, CORE_MMU_PGDIR_SIZE, in assign_mem_va_dir()
1355 &va)) in assign_mem_va_dir()
1360 if (ROUNDUP_OVERFLOW(va, CORE_MMU_PGDIR_SIZE, in assign_mem_va_dir()
1361 &va)) in assign_mem_va_dir()
1365 if (ROUNDUP2_OVERFLOW(va, map->region_size, &va)) in assign_mem_va_dir()
1373 vaddr_t offs = (map->pa - va) & in assign_mem_va_dir()
1376 if (ADD_OVERFLOW(va, offs, &va)) in assign_mem_va_dir()
1380 map->va = va; in assign_mem_va_dir()
1381 if (ADD_OVERFLOW(va, map->size, &va)) in assign_mem_va_dir()
1383 if (!core_mmu_va_is_valid(va)) in assign_mem_va_dir()
1465 if (core_is_buffer_intersect(mem_map->map[n].va, in mem_map_add_id_map()
1479 .va = start, in mem_map_add_id_map()
1652 .va = start, in core_init_mmu_map()
1758 static int __maybe_unused core_va2pa_helper(void *va, paddr_t *pa) in core_va2pa_helper() argument
1762 map = find_map_by_va(va); in core_va2pa_helper()
1763 if (!va_is_in_map(map, (vaddr_t)va)) in core_va2pa_helper()
1772 *pa = map->pa + (vaddr_t)va - map->va; in core_va2pa_helper()
1784 return (void *)(vaddr_t)(map->va + pa - map->pa); in map_pa2va()
1796 *s = map->va; in core_mmu_get_mem_by_type()
1797 *e = map->va + map->size; in core_mmu_get_mem_by_type()
1838 assert(!core_mmu_get_block_offset(tbl_info, region->va)); in clear_region()
1842 idx = core_mmu_va2idx(tbl_info, region->va); in clear_region()
1843 end = core_mmu_va2idx(tbl_info, region->va + region->size); in clear_region()
1859 assert(!core_mmu_get_block_offset(tbl_info, region->va)); in set_region()
1863 idx = core_mmu_va2idx(tbl_info, region->va); in set_region()
1864 end = core_mmu_va2idx(tbl_info, region->va + region->size); in set_region()
1879 .va = region->va, in set_pg_region()
1883 vaddr_t end = r.va + r.size; in set_pg_region()
1886 while (r.va < end) { in set_pg_region()
1888 r.va >= (pg_info->va_base + CORE_MMU_PGDIR_SIZE)) { in set_pg_region()
1895 assert(r.va > pg_info->va_base); in set_pg_region()
1897 idx = core_mmu_va2idx(dir_info, r.va); in set_pg_region()
1918 r.size = MIN(CORE_MMU_PGDIR_SIZE - (r.va - pg_info->va_base), in set_pg_region()
1919 end - r.va); in set_pg_region()
1923 size_t offset = r.va - region->va + region->offset; in set_pg_region()
1934 r.va += r.size; in set_pg_region()
1974 vaddr_t vaddr = mm->va; in core_mmu_map_region()
2195 vaddr_t va = 0; in maybe_remove_from_mem_map() local
2214 va = ROUNDDOWN(vstart, SMALL_PAGE_SIZE); in maybe_remove_from_mem_map()
2217 if (va == mm->va && mm->size == num_pages * SMALL_PAGE_SIZE) { in maybe_remove_from_mem_map()
2225 } else if (va == mm->va) { in maybe_remove_from_mem_map()
2226 mm->va += num_pages * SMALL_PAGE_SIZE; in maybe_remove_from_mem_map()
2229 } else if (va + num_pages * SMALL_PAGE_SIZE == mm->va + mm->size) { in maybe_remove_from_mem_map()
2238 mm->size = va - mm->va; in maybe_remove_from_mem_map()
2239 m.va += mm->size + num_pages * SMALL_PAGE_SIZE; in maybe_remove_from_mem_map()
2324 if (!core_mmu_find_table(NULL, res_map->va, UINT_MAX, &tbl_info)) in core_mmu_remove_mapping()
2343 if (res_map->va - map->size == map->va) { in core_mmu_remove_mapping()
2344 res_map->va -= map->size; in core_mmu_remove_mapping()
2400 return (void *)(vaddr_t)(map->va + addr - map->pa); in core_mmu_add_mapping()
2407 if (!core_mmu_find_table(NULL, map->va, UINT_MAX, &tbl_info)) in core_mmu_add_mapping()
2423 if (core_mmu_va2idx(&tbl_info, map->va + len) >= tbl_info.num_entries) in core_mmu_add_mapping()
2430 .va = map->va, in core_mmu_add_mapping()
2437 map->va += l; in core_mmu_add_mapping()
2447 return (void *)(vaddr_t)(map->va + addr - map->pa); in core_mmu_add_mapping()
2464 static void check_pa_matches_va(void *va, paddr_t pa) in check_pa_matches_va() argument
2467 vaddr_t v = (vaddr_t)va; in check_pa_matches_va()
2485 va, &p); in check_pa_matches_va()
2496 if (is_unpaged(va)) { in check_pa_matches_va()
2526 if (!core_va2pa_helper(va, &p)) { in check_pa_matches_va()
2530 va, p, pa); in check_pa_matches_va()
2535 DMSG("va %p unmapped, expect 0x%" PRIxPA, va, pa); in check_pa_matches_va()
2541 static void check_pa_matches_va(void *va __unused, paddr_t pa __unused) in check_pa_matches_va()
2546 paddr_t virt_to_phys(void *va) in virt_to_phys() argument
2550 if (!arch_va2pa_helper(va, &pa)) in virt_to_phys()
2552 check_pa_matches_va(memtag_strip_tag(va), pa); in virt_to_phys()
2564 static void check_va_matches_pa(paddr_t pa, void *va) in check_va_matches_pa() argument
2568 if (!va) in check_va_matches_pa()
2571 p = virt_to_phys(va); in check_va_matches_pa()
2573 DMSG("va %p maps 0x%" PRIxPA " expect 0x%" PRIxPA, va, p, pa); in check_va_matches_pa()
2578 static void check_va_matches_pa(paddr_t pa __unused, void *va __unused) in check_va_matches_pa()
2634 void *va = NULL; in phys_to_virt() local
2638 va = phys_to_virt_ts_vaspace(pa, len); in phys_to_virt()
2646 va = phys_to_virt_tee_ram(pa, len); in phys_to_virt()
2652 va = NULL; in phys_to_virt()
2655 va = map_pa2va(find_map_by_type_and_pa(m, pa, len), pa, len); in phys_to_virt()
2658 check_va_matches_pa(pa, va); in phys_to_virt()
2659 return va; in phys_to_virt()
2665 void *va = NULL; in phys_to_virt_io() local
2672 va = map_pa2va(map, pa, len); in phys_to_virt_io()
2673 check_va_matches_pa(pa, va); in phys_to_virt_io()
2674 return va; in phys_to_virt_io()
2686 bool is_unpaged(const void *va) in is_unpaged() argument
2688 vaddr_t v = (vaddr_t)va; in is_unpaged()
2695 bool is_nexus(const void *va) in is_nexus() argument
2697 vaddr_t v = (vaddr_t)va; in is_nexus()
2707 if (!p->va) in io_pa_or_va()
2708 p->va = (vaddr_t)phys_to_virt_io(p->pa, len); in io_pa_or_va()
2709 assert(p->va); in io_pa_or_va()
2710 return p->va; in io_pa_or_va()
2719 if (!p->va) in io_pa_or_va_secure()
2720 p->va = (vaddr_t)phys_to_virt(p->pa, MEM_AREA_IO_SEC, in io_pa_or_va_secure()
2722 assert(p->va); in io_pa_or_va_secure()
2723 return p->va; in io_pa_or_va_secure()
2732 if (!p->va) in io_pa_or_va_nsec()
2733 p->va = (vaddr_t)phys_to_virt(p->pa, MEM_AREA_IO_NSEC, in io_pa_or_va_nsec()
2735 assert(p->va); in io_pa_or_va_nsec()
2736 return p->va; in io_pa_or_va_nsec()