Home
last modified time | relevance | path

Searched refs:mm (Results 1 – 25 of 43) sorted by relevance

12

/optee_os/core/mm/
H A Dtee_mm.c238 tee_mm_entry_t *mm; in tee_mm_alloc2() local
249 mm = malloc_flags(pool->flags, NULL, MALLOC_DEFAULT_ALIGNMENT, in tee_mm_alloc2()
251 if (!mm) in tee_mm_alloc2()
274 tee_mm_add(entry, mm); in tee_mm_alloc2()
276 mm->offset = offslo; in tee_mm_alloc2()
277 mm->size = offshi - offslo; in tee_mm_alloc2()
278 mm->pool = pool; in tee_mm_alloc2()
282 return mm; in tee_mm_alloc2()
285 free_flags(pool->flags, mm); in tee_mm_alloc2()
313 size_t tee_mm_get_bytes(const tee_mm_entry_t *mm) in tee_mm_get_bytes() argument
[all …]
H A Dboot_mem.c283 tee_mm_entry_t *mm = NULL; in boot_mem_release_unused() local
307 mm = nex_phys_mem_mm_find(pa); in boot_mem_release_unused()
308 if (!mm) in boot_mem_release_unused()
317 pa = tee_mm_get_smem(mm); in boot_mem_release_unused()
319 tee_mm_free(mm); in boot_mem_release_unused()
321 mm = nex_phys_mem_alloc2(pa, n); in boot_mem_release_unused()
322 if (!mm) in boot_mem_release_unused()
324 mm = nex_phys_mem_alloc2(tmp_pa, tmp_n); in boot_mem_release_unused()
325 if (!mm) in boot_mem_release_unused()
342 tee_mm_entry_t *mm = NULL; in boot_mem_release_tmp_alloc() local
[all …]
H A Dfobj.c145 tee_mm_entry_t *mm = NULL; in rwp_paged_iv_alloc() local
156 mm = nex_phys_mem_ta_alloc(size); in rwp_paged_iv_alloc()
157 if (!mm) in rwp_paged_iv_alloc()
159 rwp->idx = (tee_mm_get_smem(mm) - nex_phys_mem_get_ta_base()) / in rwp_paged_iv_alloc()
169 tee_mm_free(mm); in rwp_paged_iv_alloc()
222 tee_mm_entry_t *mm = nex_phys_mem_mm_find(pa); in rwp_paged_iv_free() local
224 assert(mm); in rwp_paged_iv_free()
227 tee_mm_free(mm); in rwp_paged_iv_free()
257 tee_mm_entry_t *mm = NULL; in rwp_unpaged_iv_alloc() local
270 mm = nex_phys_mem_ta_alloc(size); in rwp_unpaged_iv_alloc()
[all …]
H A Dphys_mem.c91 tee_mm_entry_t *mm = NULL; in mm_alloc() local
94 mm = tee_mm_alloc_flags(p0, size, flags); in mm_alloc()
95 if (!mm && p1) in mm_alloc()
96 mm = tee_mm_alloc_flags(p1, size, flags); in mm_alloc()
98 return mm; in mm_alloc()
130 tee_mm_entry_t *mm __maybe_unused = NULL; in partial_carve_out()
139 mm = tee_mm_alloc2(pool, pa, sz); in partial_carve_out()
140 assert(mm); in partial_carve_out()
H A Dmobj_dyn_shm.c36 tee_mm_entry_t *mm; member
123 if (!mrs->mm || !mobj_check_offset_and_len(mobj, offst, len)) in mobj_reg_shm_get_va()
126 return (void *)(vaddr_t)(tee_mm_get_smem(mrs->mm) + offst + in mobj_reg_shm_get_va()
132 assert(r->mm); in reg_shm_unmap_helper()
133 assert(r->mm->pool->shift == SMALL_PAGE_SHIFT); in reg_shm_unmap_helper()
134 core_mmu_unmap_pages(tee_mm_get_smem(r->mm), r->mm->size); in reg_shm_unmap_helper()
135 tee_mm_free(r->mm); in reg_shm_unmap_helper()
136 r->mm = NULL; in reg_shm_unmap_helper()
143 if (mobj_reg_shm->mm) in reg_shm_free_helper()
221 if (!r->mm) { in mobj_reg_shm_inc_map()
[all …]
H A Dcore_mmu.c903 static bool __maybe_unused map_is_tee_ram(const struct tee_mmap_region *mm) in map_is_tee_ram() argument
905 switch (mm->type) { in map_is_tee_ram()
921 static bool __maybe_unused map_is_secure(const struct tee_mmap_region *mm) in map_is_secure() argument
923 return !!(core_mmu_type_to_attr(mm->type) & TEE_MATTR_SECURE); in map_is_secure()
926 static bool __maybe_unused map_is_pgdir(const struct tee_mmap_region *mm) in map_is_pgdir() argument
928 return mm->region_size == CORE_MMU_PGDIR_SIZE; in map_is_pgdir()
1940 struct tee_mmap_region *mm) in can_map_at_level() argument
1954 if (mm->region_size < block_size) in can_map_at_level()
1962 if ((map_is_tee_ram(mm) || mm->type == MEM_AREA_PAGER_VASPACE) && in can_map_at_level()
1970 void core_mmu_map_region(struct mmu_partition *prtn, struct tee_mmap_region *mm) in core_mmu_map_region() argument
[all …]
/optee_os/core/pta/tests/
H A Dtransfer_list.c151 tee_mm_entry_t *mm = NULL; in transfer_list_tests() local
153 mm = phys_mem_core_alloc(SMALL_PAGE_SIZE); in transfer_list_tests()
154 if (!mm) in transfer_list_tests()
157 tl = transfer_list_init(tee_mm_get_smem(mm), TEST_TL_MAX_SIZE); in transfer_list_tests()
233 tee_mm_free(mm); in transfer_list_tests()
/optee_os/core/arch/arm/kernel/
H A Dboot.c459 static struct fobj *ro_paged_alloc(tee_mm_entry_t *mm, void *hashes, in ro_paged_alloc() argument
462 const unsigned int num_pages = tee_mm_get_bytes(mm) / SMALL_PAGE_SIZE; in ro_paged_alloc()
488 tee_mm_entry_t *mm = NULL; in init_pager_runtime() local
520 mm = nex_phys_mem_ta_alloc(pageable_size); in init_pager_runtime()
521 assert(mm); in init_pager_runtime()
522 paged_store = phys_to_virt(tee_mm_get_smem(mm), in init_pager_runtime()
576 mm = tee_mm_alloc2(&core_virt_mem_pool, in init_pager_runtime()
580 assert(mm); in init_pager_runtime()
581 tee_pager_set_alias_area(mm); in init_pager_runtime()
587 mm = tee_mm_alloc2(&core_virt_mem_pool, VCORE_UNPG_RX_PA, in init_pager_runtime()
[all …]
H A Dthread_spmc.c49 tee_mm_entry_t *mm; member
330 tee_mm_entry_t *mm = NULL; in map_buf() local
335 mm = tee_mm_alloc(&core_virt_shm_pool, sz); in map_buf()
336 if (!mm) in map_buf()
339 if (core_mmu_map_contiguous_pages(tee_mm_get_smem(mm), pa, in map_buf()
342 tee_mm_free(mm); in map_buf()
346 *va_ret = (void *)tee_mm_get_smem(mm); in map_buf()
358 tee_mm_entry_t *mm = tee_mm_find(&core_virt_shm_pool, (vaddr_t)va); in unmap_buf() local
360 assert(mm); in unmap_buf()
361 core_mmu_unmap_pages(tee_mm_get_smem(mm), sz / SMALL_PAGE_SIZE); in unmap_buf()
[all …]
H A Dsecure_partition.c502 tee_mm_entry_t *mm = NULL; in load_binary_sp() local
552 mm = phys_mem_ta_alloc(bin_size_rounded); in load_binary_sp()
553 if (!mm) { in load_binary_sp()
558 base_addr = tee_mm_get_smem(mm); in load_binary_sp()
607 tee_mm_free(mm); in load_binary_sp()
846 tee_mm_entry_t *mm = NULL; in handle_fdt_load_relative_mem_regions() local
868 mm = NULL; in handle_fdt_load_relative_mem_regions()
945 mm = phys_mem_ta_alloc(size); in handle_fdt_load_relative_mem_regions()
946 if (!mm) in handle_fdt_load_relative_mem_regions()
949 base_addr = tee_mm_get_smem(mm); in handle_fdt_load_relative_mem_regions()
[all …]
/optee_os/core/arch/arm/mm/
H A Dcore_mmu_lpae.c652 tee_mm_entry_t *mm = NULL; in core_mmu_xlat_table_alloc() local
665 mm = nex_phys_mem_core_alloc(XLAT_TABLE_SIZE); in core_mmu_xlat_table_alloc()
666 if (!mm) in core_mmu_xlat_table_alloc()
669 mm = phys_mem_core_alloc(XLAT_TABLE_SIZE); in core_mmu_xlat_table_alloc()
670 if (!mm) in core_mmu_xlat_table_alloc()
673 if (!mm) in core_mmu_xlat_table_alloc()
675 pa = tee_mm_get_smem(mm); in core_mmu_xlat_table_alloc()
798 struct tee_mmap_region *mm) in share_region() argument
803 struct tee_mmap_region dummy_mm = *mm; in share_region()
810 assert(!(mm->size % CORE_MMU_PGDIR_SIZE)); in share_region()
[all …]
H A Dmobj_ffa.c90 tee_mm_entry_t *mm; member
398 assert(!m->mm); in mobj_ffa_sel1_spmc_delete()
514 if (m->mm) { in unmap_helper()
515 core_mmu_unmap_pages(tee_mm_get_smem(m->mm), in unmap_helper()
517 tee_mm_free(m->mm); in unmap_helper()
518 m->mm = NULL; in unmap_helper()
783 if (!m->mm || !mobj_check_offset_and_len(mobj, offset, len)) in ffa_shm_get_va()
786 return (void *)(tee_mm_get_smem(m->mm) + offset + m->page_offset); in ffa_shm_get_va()
881 if (!m->mm) { in ffa_shm_inc_map()
883 m->mm = tee_mm_alloc(&core_virt_shm_pool, sz); in ffa_shm_inc_map()
[all …]
H A Dtee_pager.c379 void tee_pager_set_alias_area(tee_mm_entry_t *mm) in tee_pager_set_alias_area() argument
383 vaddr_t smem = tee_mm_get_smem(mm); in tee_pager_set_alias_area()
384 size_t nbytes = tee_mm_get_bytes(mm); in tee_pager_set_alias_area()
391 pager_alias_area = mm; in tee_pager_set_alias_area()
1999 tee_mm_entry_t *mm = NULL; in tee_pager_alloc() local
2007 mm = tee_mm_alloc(&core_virt_mem_pool, ROUNDUP(size, SMALL_PAGE_SIZE)); in tee_pager_alloc()
2008 if (!mm) in tee_pager_alloc()
2011 smem = (uint8_t *)tee_mm_get_smem(mm); in tee_pager_alloc()
2012 num_pages = tee_mm_get_bytes(mm) / SMALL_PAGE_SIZE; in tee_pager_alloc()
2015 tee_mm_free(mm); in tee_pager_alloc()
[all …]
/optee_os/core/arch/riscv/mm/
H A Dcore_mmu_arch.c306 tee_mm_entry_t *mm = NULL; in core_mmu_pgt_alloc() local
311 mm = phys_mem_core_alloc(size); in core_mmu_pgt_alloc()
312 if (!mm) in core_mmu_pgt_alloc()
315 mm = nex_phys_mem_core_alloc(size); in core_mmu_pgt_alloc()
316 if (!mm) in core_mmu_pgt_alloc()
319 if (!mm) in core_mmu_pgt_alloc()
321 pa = tee_mm_get_smem(mm); in core_mmu_pgt_alloc()
542 struct tee_mmap_region *mm = mem_map->map + n; in core_init_mmu_prtn_tee() local
545 mm->va, mm->pa, mm->size, mm->attr); in core_init_mmu_prtn_tee()
547 if (!IS_PAGE_ALIGNED(mm->pa) || !IS_PAGE_ALIGNED(mm->size)) in core_init_mmu_prtn_tee()
/optee_os/lib/libmbedtls/mbedtls/library/
H A Dbignum_core.c532 mbedtls_mpi_uint mm, in mbedtls_mpi_core_montmul() argument
540 mbedtls_mpi_uint u1 = (T[0] + u0 * B[0]) * mm; in mbedtls_mpi_core_montmul()
725 mbedtls_mpi_uint mm, in exp_mod_precompute_window() argument
734 mbedtls_mpi_core_montmul(Wtable, Wtable, RR, AN_limbs, N, AN_limbs, mm, temp); in exp_mod_precompute_window()
744 mbedtls_mpi_core_montmul(Wcur, Wprev, W1, AN_limbs, N, AN_limbs, mm, temp); in exp_mod_precompute_window()
884 const mbedtls_mpi_uint mm = mbedtls_mpi_core_montmul_init(N); in mbedtls_mpi_core_exp_mod_optionally_safe() local
888 mm, RR, in mbedtls_mpi_core_exp_mod_optionally_safe()
905 mbedtls_mpi_core_montmul(X, X, X, AN_limbs, N, AN_limbs, mm, temp); in mbedtls_mpi_core_exp_mod_optionally_safe()
927 mbedtls_mpi_core_montmul(X, X, Wselect, AN_limbs, N, AN_limbs, mm, in mbedtls_mpi_core_exp_mod_optionally_safe()
1003 mbedtls_mpi_uint mm, in mbedtls_mpi_core_to_mont_rep() argument
[all …]
H A Dbignum_core.h513 mbedtls_mpi_uint mm, mbedtls_mpi_uint *T);
785 mbedtls_mpi_uint mm,
822 mbedtls_mpi_uint mm,
H A Dbignum_mod_raw.c130 N->rep.mont.mm, T); in mbedtls_mpi_mod_raw_mul()
242 N->rep.mont.mm, N->rep.mont.rr, T); in mbedtls_mpi_mod_raw_to_mont_rep()
258 mbedtls_mpi_core_from_mont_rep(X, X, N->p, N->limbs, N->rep.mont.mm, T); in mbedtls_mpi_mod_raw_from_mont_rep()
H A Dbignum_mod.c75 N->rep.mont.mm = 0; in mbedtls_mpi_mod_modulus_free()
143 N->rep.mont.mm = mbedtls_mpi_core_montmul_init(N->p); in mbedtls_mpi_mod_modulus_setup()
231 Nmont.rep.mont.mm, Nmont.rep.mont.rr, in mbedtls_mpi_mod_inv_non_mont()
242 Nmont.rep.mont.mm, working_memory); in mbedtls_mpi_mod_inv_non_mont()
/optee_os/core/include/mm/
H A Dtee_mm.h62 uintptr_t tee_mm_get_smem(const tee_mm_entry_t *mm);
106 size_t tee_mm_get_bytes(const tee_mm_entry_t *mm);
H A Dcore_mmu.h517 static inline bool core_mmu_is_dynamic_vaspace(struct tee_mmap_region *mm) in core_mmu_is_dynamic_vaspace() argument
519 switch (mm->type) { in core_mmu_is_dynamic_vaspace()
685 struct tee_mmap_region *mm);
/optee_os/core/kernel/
H A Dthread.c536 tee_mm_entry_t *mm = NULL; in init_thread_stacks() local
542 mm = tee_mm_alloc(&core_virt_mem_pool, in init_thread_stacks()
544 assert(mm); in init_thread_stacks()
547 tee_pager_add_pages(tee_mm_get_smem(mm), tee_mm_get_size(mm), in init_thread_stacks()
550 num_pages = tee_mm_get_bytes(mm) / SMALL_PAGE_SIZE - 1; in init_thread_stacks()
554 tee_pager_add_core_region(tee_mm_get_smem(mm) + SMALL_PAGE_SIZE, in init_thread_stacks()
559 sp = tee_mm_get_smem(mm) + tee_mm_get_bytes(mm); in init_thread_stacks()
560 asan_tag_access((void *)tee_mm_get_smem(mm), (void *)sp); in init_thread_stacks()
H A Dree_fs_ta.c685 tee_mm_entry_t *mm; member
729 handle->mm = phys_mem_ta_alloc(handle->ta_size); in buf_ta_open()
730 if (!handle->mm) { in buf_ta_open()
734 handle->buf = phys_to_virt(tee_mm_get_smem(handle->mm), in buf_ta_open()
756 tee_mm_free(handle->mm); in buf_ta_open()
815 tee_mm_free(handle->mm); in buf_ta_close()
/optee_os/lib/libmbedtls/mbedtls/include/mbedtls/
H A Dbignum.h1082 void mbedtls_mpi_montg_init( mbedtls_mpi_uint *mm, const mbedtls_mpi *N );
1093 const mbedtls_mpi *N, mbedtls_mpi_uint mm,
1104 mbedtls_mpi_uint mm, mbedtls_mpi *T);
/optee_os/core/lib/libtomcrypt/src/pk/asn1/der/utctime/
H A Dder_encode_utctime.c52 STORE_V(utctime->mm); in der_encode_utctime()
H A Dder_decode_utctime.c84 DECODE_V(out->mm, 60); in der_decode_utctime()

12