Home
last modified time | relevance | path

Searched refs:tbl_info (Results 1 – 6 of 6) sorted by relevance

/optee_os/core/include/mm/
H A Dcore_mmu.h430 struct core_mmu_table_info *tbl_info);
440 bool core_mmu_entry_to_finer_grained(struct core_mmu_table_info *tbl_info,
455 void core_mmu_set_entry(struct core_mmu_table_info *tbl_info, unsigned idx,
468 void core_mmu_get_entry(struct core_mmu_table_info *tbl_info, unsigned idx,
477 static inline unsigned core_mmu_va2idx(struct core_mmu_table_info *tbl_info, in core_mmu_va2idx() argument
481 if (tbl_info->level == CORE_MMU_BASE_TABLE_LEVEL) in core_mmu_va2idx()
484 return (va - tbl_info->va_base) >> tbl_info->shift; in core_mmu_va2idx()
493 static inline vaddr_t core_mmu_idx2va(struct core_mmu_table_info *tbl_info, in core_mmu_idx2va() argument
496 return (idx << tbl_info->shift) + tbl_info->va_base; in core_mmu_idx2va()
506 struct core_mmu_table_info *tbl_info, paddr_t pa) in core_mmu_get_block_offset() argument
[all …]
/optee_os/core/arch/riscv/mm/
H A Dcore_mmu_arch.c423 static bool core_mmu_entry_copy(struct core_mmu_table_info *tbl_info, in core_mmu_entry_copy() argument
435 if (idx >= tbl_info->num_entries) in core_mmu_entry_copy()
438 orig_pgt = tbl_info->table; in core_mmu_entry_copy()
442 if (core_mmu_entry_is_leaf(pte) || tbl_info->level >= RISCV_PGLEVELS) in core_mmu_entry_copy()
479 struct core_mmu_table_info tbl_info = { }; in core_init_mmu_prtn_ta_core() local
494 core_mmu_set_info_table(&tbl_info, level, 0, pgt); in core_init_mmu_prtn_ta_core()
503 if (!core_mmu_entry_copy(&tbl_info, 0)) in core_init_mmu_prtn_ta_core()
507 if (!core_mmu_entry_to_finer_grained(&tbl_info, 0, true)) in core_init_mmu_prtn_ta_core()
752 struct core_mmu_table_info *tbl_info) in core_mmu_find_table() argument
777 core_mmu_set_info_table(tbl_info, level, va_base, pgt); in core_mmu_find_table()
[all …]
/optee_os/core/arch/arm/mm/
H A Dcore_mmu_lpae.c753 static bool core_mmu_entry_copy(struct core_mmu_table_info *tbl_info, in core_mmu_entry_copy() argument
762 prtn = tbl_info->prtn; in core_mmu_entry_copy()
768 if (idx >= tbl_info->num_entries) in core_mmu_entry_copy()
771 entry = (uint64_t *)tbl_info->table + idx; in core_mmu_entry_copy()
775 tbl_info->level >= XLAT_TABLE_LEVEL_MAX) in core_mmu_entry_copy()
782 orig_table = core_mmu_xlat_table_entry_pa2va(prtn, tbl_info->level, in core_mmu_entry_copy()
971 struct core_mmu_table_info tbl_info = { }; in core_init_mmu_prtn_ta_core() local
983 core_mmu_set_info_table(&tbl_info, 0, 0, tbl); in core_init_mmu_prtn_ta_core()
985 tbl_info.prtn = prtn; in core_init_mmu_prtn_ta_core()
995 if (!core_mmu_entry_copy(&tbl_info, 0)) in core_init_mmu_prtn_ta_core()
[all …]
H A Dcore_mmu_v7.c539 void core_mmu_set_info_table(struct core_mmu_table_info *tbl_info, in core_mmu_set_info_table() argument
542 tbl_info->level = level; in core_mmu_set_info_table()
543 tbl_info->next_level = level + 1; in core_mmu_set_info_table()
544 tbl_info->table = table; in core_mmu_set_info_table()
545 tbl_info->va_base = va_base; in core_mmu_set_info_table()
548 tbl_info->shift = SECTION_SHIFT; in core_mmu_set_info_table()
549 tbl_info->num_entries = NUM_L1_ENTRIES; in core_mmu_set_info_table()
551 tbl_info->shift = SMALL_PAGE_SHIFT; in core_mmu_set_info_table()
552 tbl_info->num_entries = NUM_L2_ENTRIES; in core_mmu_set_info_table()
581 struct core_mmu_table_info *tbl_info) in core_mmu_find_table() argument
[all …]
H A Dtee_pager.c152 struct core_mmu_table_info tbl_info; member
250 idx = core_mmu_va2idx(&pager_tables[n].tbl_info, TEE_RAM_START); in tee_pager_phys_to_virt()
253 v = core_mmu_idx2va(&pager_tables[n].tbl_info, idx); in tee_pager_phys_to_virt()
257 core_mmu_get_entry(&pager_tables[n].tbl_info, in tee_pager_phys_to_virt()
339 n = ((va & ~mask) - pager_tables[0].tbl_info.va_base) >> in find_pager_table_may_fail()
344 assert(va >= pager_tables[n].tbl_info.va_base && in find_pager_table_may_fail()
345 va <= (pager_tables[n].tbl_info.va_base | mask)); in find_pager_table_may_fail()
365 *ti = pt->tbl_info; in tee_pager_get_table_info()
371 return &find_pager_table(va)->tbl_info; in find_table_info()
396 idx = core_mmu_va2idx(&pt->tbl_info, smem); in tee_pager_set_alias_area()
[all …]
/optee_os/core/mm/
H A Dcore_mmu.c959 struct core_mmu_table_info tbl_info; in dump_xlat_table() local
964 core_mmu_find_table(NULL, va, level, &tbl_info); in dump_xlat_table()
965 va = tbl_info.va_base; in dump_xlat_table()
966 for (idx = 0; idx < tbl_info.num_entries; idx++) { in dump_xlat_table()
967 core_mmu_get_entry(&tbl_info, idx, &pa, &attr); in dump_xlat_table()
999 va += BIT64(tbl_info.shift); in dump_xlat_table()
1815 void core_mmu_set_entry(struct core_mmu_table_info *tbl_info, unsigned int idx, in core_mmu_set_entry() argument
1818 assert(idx < tbl_info->num_entries); in core_mmu_set_entry()
1819 core_mmu_set_entry_primitive(tbl_info->table, tbl_info->level, in core_mmu_set_entry()
1823 void core_mmu_get_entry(struct core_mmu_table_info *tbl_info, unsigned int idx, in core_mmu_get_entry() argument
[all …]