Home
last modified time | relevance | path

Searched refs:uctx (Results 1 – 25 of 41) sorted by relevance

12

/optee_os/core/include/mm/
H A Dvm.h13 TEE_Result vm_info_init(struct user_mode_ctx *uctx, struct ts_ctx *ts_ctx);
16 void vm_info_final(struct user_mode_ctx *uctx);
29 TEE_Result vm_map_pad(struct user_mode_ctx *uctx, vaddr_t *va, size_t len,
39 static inline TEE_Result vm_map(struct user_mode_ctx *uctx, vaddr_t *va, in vm_map() argument
43 return vm_map_pad(uctx, va, len, prot, flags, mobj, offs, 0, 0, 0); in vm_map()
46 TEE_Result vm_remap(struct user_mode_ctx *uctx, vaddr_t *new_va, vaddr_t old_va,
49 TEE_Result vm_get_flags(struct user_mode_ctx *uctx, vaddr_t va, size_t len,
52 TEE_Result vm_get_prot(struct user_mode_ctx *uctx, vaddr_t va, size_t len,
55 TEE_Result vm_set_prot(struct user_mode_ctx *uctx, vaddr_t va, size_t len,
58 TEE_Result vm_unmap(struct user_mode_ctx *uctx, vaddr_t va, size_t len);
[all …]
H A Dtee_pager.h102 TEE_Result tee_pager_add_um_region(struct user_mode_ctx *uctx, vaddr_t base,
106 tee_pager_add_um_region(struct user_mode_ctx *uctx __unused, in tee_pager_add_um_region()
125 bool tee_pager_set_um_region_attr(struct user_mode_ctx *uctx, vaddr_t base,
129 tee_pager_set_um_region_attr(struct user_mode_ctx *uctx __unused, in tee_pager_set_um_region_attr()
138 void tee_pager_rem_um_region(struct user_mode_ctx *uctx, vaddr_t base,
141 static inline void tee_pager_rem_um_region(struct user_mode_ctx *uctx __unused, in tee_pager_rem_um_region()
149 TEE_Result tee_pager_split_um_region(struct user_mode_ctx *uctx, vaddr_t va);
152 tee_pager_split_um_region(struct user_mode_ctx *uctx __unused, in tee_pager_split_um_region()
160 void tee_pager_merge_um_region(struct user_mode_ctx *uctx, vaddr_t va,
164 tee_pager_merge_um_region(struct user_mode_ctx *uctx __unused, in tee_pager_merge_um_region()
[all …]
H A Dpgt_cache.h44 bool pgt_check_avail(struct user_mode_ctx *uctx);
54 static inline void pgt_get_all(struct user_mode_ctx *uctx __unused) { } in pgt_get_all()
56 void pgt_get_all(struct user_mode_ctx *uctx);
65 static inline void pgt_put_all(struct user_mode_ctx *uctx __unused) { } in pgt_put_all()
67 void pgt_put_all(struct user_mode_ctx *uctx);
70 void pgt_clear_range(struct user_mode_ctx *uctx, vaddr_t begin, vaddr_t end);
71 void pgt_flush_range(struct user_mode_ctx *uctx, vaddr_t begin, vaddr_t last);
89 void pgt_flush(struct user_mode_ctx *uctx);
/optee_os/core/mm/
H A Dvm.c111 static TEE_Result alloc_pgt(struct user_mode_ctx *uctx) in alloc_pgt() argument
115 if (!pgt_check_avail(uctx)) { in alloc_pgt()
122 if (uctx->ts_ctx == tsd->ctx) { in alloc_pgt()
127 pgt_get_all(uctx); in alloc_pgt()
134 static void rem_um_region(struct user_mode_ctx *uctx, struct vm_region *r) in rem_um_region() argument
141 tee_pager_rem_um_region(uctx, r->va, r->size); in rem_um_region()
143 pgt_clear_range(uctx, r->va, r->va + r->size); in rem_um_region()
145 uctx->vm_info.asid); in rem_um_region()
166 pgt_flush_range(uctx, begin, last); in rem_um_region()
201 static void set_um_region(struct user_mode_ctx *uctx, struct vm_region *r) in set_um_region() argument
[all …]
H A Dpgt_cache.c171 void pgt_flush_range(struct user_mode_ctx *uctx, vaddr_t begin, vaddr_t last) in pgt_flush_range() argument
173 struct pgt_cache *pgt_cache = &uctx->pgt_cache; in pgt_flush_range()
213 void pgt_flush(struct user_mode_ctx *uctx) in pgt_flush() argument
215 struct pgt_cache *pgt_cache = &uctx->pgt_cache; in pgt_flush()
227 void pgt_clear_range(struct user_mode_ctx *uctx, vaddr_t begin, vaddr_t end) in pgt_clear_range() argument
229 struct pgt_cache *pgt_cache = &uctx->pgt_cache; in pgt_clear_range()
273 bool pgt_check_avail(struct user_mode_ctx *uctx) in pgt_check_avail() argument
275 struct pgt_cache *pgt_cache = &uctx->pgt_cache; in pgt_check_avail()
276 struct vm_info *vm_info = &uctx->vm_info; in pgt_check_avail()
610 void pgt_flush(struct user_mode_ctx *uctx) in pgt_flush() argument
[all …]
/optee_os/core/kernel/
H A Dldelf_loader.c31 static TEE_Result alloc_and_map_fobj(struct user_mode_ctx *uctx, size_t sz, in alloc_and_map_fobj() argument
43 res = vm_map(uctx, va, num_pgs * SMALL_PAGE_SIZE, prot, flags, mobj, 0); in alloc_and_map_fobj()
54 TEE_Result ldelf_load_ldelf(struct user_mode_ctx *uctx) in ldelf_load_ldelf() argument
63 uctx->is_32bit = is_32bit; in ldelf_load_ldelf()
65 res = alloc_and_map_fobj(uctx, BOUNCE_BUFFER_SIZE, TEE_MATTR_PRW, 0, in ldelf_load_ldelf()
69 uctx->bbuf = (void *)bb_addr; in ldelf_load_ldelf()
70 uctx->bbuf_size = BOUNCE_BUFFER_SIZE; in ldelf_load_ldelf()
72 res = alloc_and_map_fobj(uctx, LDELF_STACK_SIZE, in ldelf_load_ldelf()
77 uctx->ldelf_stack_ptr = stack_addr + LDELF_STACK_SIZE; in ldelf_load_ldelf()
79 res = alloc_and_map_fobj(uctx, ldelf_code_siz in ldelf_load_ldelf()
116 ldelf_init_with_ldelf(struct ts_session * sess,struct user_mode_ctx * uctx) ldelf_init_with_ldelf() argument
187 ldelf_dump_state(struct user_mode_ctx * uctx) ldelf_dump_state() argument
315 ldelf_dump_ftrace(struct user_mode_ctx * uctx,void * buf,size_t * blen) ldelf_dump_ftrace() argument
370 ldelf_dlopen(struct user_mode_ctx * uctx,TEE_UUID * uuid,uint32_t flags) ldelf_dlopen() argument
425 ldelf_dlsym(struct user_mode_ctx * uctx,TEE_UUID * uuid,const char * sym,size_t symlen,vaddr_t * val) ldelf_dlsym() argument
[all...]
H A Duser_access.c39 struct user_mode_ctx *uctx = get_current_uctx(); in check_user_access() local
41 if (!uctx) in check_user_access()
44 return vm_check_access_rights(uctx, flags, (vaddr_t)uaddr, len); in check_user_access()
132 struct user_mode_ctx *uctx = get_current_uctx(); in bb_alloc() local
136 if (uctx && !ADD_OVERFLOW(uctx->bbuf_offs, len, &offs) && in bb_alloc()
137 offs <= uctx->bbuf_size) { in bb_alloc()
138 bb = maybe_tag_bb(uctx->bbuf + uctx->bbuf_offs, len); in bb_alloc()
139 uctx->bbuf_offs = ROUNDUP(offs, BB_ALIGNMENT); in bb_alloc()
144 static void bb_free_helper(struct user_mode_ctx *uctx, vaddr_t bb, size_t len) in bb_free_helper() argument
146 vaddr_t bbuf = (vaddr_t)uctx->bbuf; in bb_free_helper()
[all …]
H A Duser_ta.c165 res = vm_map_param(&utc->uctx, ta_sess->param, param_va); in user_ta_enter()
174 usr_stack = utc->uctx.stack_ptr; in user_ta_enter()
187 utc->uctx.entry_func, utc->uctx.is_32bit, in user_ta_enter()
191 thread_user_clear_vfp(&utc->uctx); in user_ta_enter()
217 vm_clean_param(&utc->uctx); in user_ta_enter()
262 user_mode_ctx_print_mappings(&utc->uctx); in dump_state_no_ldelf_dbg()
269 if (utc->uctx.dump_entry_func) { in user_ta_dump_state()
270 TEE_Result res = ldelf_dump_state(&utc->uctx); in user_ta_dump_state()
303 res = ldelf_dump_ftrace(&utc->uctx, NUL in user_ta_dump_ftrace()
[all...]
H A Dldelf_syscalls.c30 static void unmap_or_panic(struct user_mode_ctx *uctx, vaddr_t va, in unmap_or_panic() argument
33 TEE_Result res = vm_unmap(uctx, va, byte_count); in unmap_or_panic()
47 struct user_mode_ctx *uctx = to_user_mode_ctx(sess->ctx); in ldelf_syscall_map_zi() local
71 res = vm_map_pad(uctx, &va_copy, num_bytes, prot, vm_flags, in ldelf_syscall_map_zi()
77 unmap_or_panic(uctx, va_copy, num_bytes); in ldelf_syscall_map_zi()
87 struct user_mode_ctx *uctx = to_user_mode_ctx(sess->ctx); in ldelf_syscall_unmap() local
101 res = vm_get_flags(uctx, va, sz, &vm_flags); in ldelf_syscall_unmap()
107 return vm_unmap(uctx, va, sz); in ldelf_syscall_unmap()
127 struct user_mode_ctx *uctx = to_user_mode_ctx(sess->ctx); in ldelf_syscall_open_bin() local
139 res = vm_check_access_rights(uctx, in ldelf_syscall_open_bin()
[all …]
H A Duser_mode_ctx.c10 void user_mode_ctx_print_mappings(struct user_mode_ctx *uctx) in user_mode_ctx_print_mappings() argument
16 TAILQ_FOREACH(r, &uctx->vm_info.regions, link) { in user_mode_ctx_print_mappings()
/optee_os/core/include/kernel/
H A Dldelf_loader.h13 TEE_Result ldelf_load_ldelf(struct user_mode_ctx *uctx);
15 struct user_mode_ctx *uctx);
16 TEE_Result ldelf_dump_state(struct user_mode_ctx *uctx);
17 TEE_Result ldelf_dump_ftrace(struct user_mode_ctx *uctx,
19 TEE_Result ldelf_dlopen(struct user_mode_ctx *uctx, TEE_UUID *uuid,
21 TEE_Result ldelf_dlsym(struct user_mode_ctx *uctx, TEE_UUID *uuid,
H A Duser_mode_ctx.h25 return &to_user_ta_ctx(ctx)->uctx; in to_user_mode_ctx()
27 return &to_sp_ctx(ctx)->uctx; in to_user_mode_ctx()
29 return &to_stmm_ctx(ctx)->uctx; in to_user_mode_ctx()
H A Duser_ta.h39 struct user_mode_ctx uctx; member
/optee_os/core/pta/
H A Dsystem.c62 static TEE_Result system_derive_ta_unique_key(struct user_mode_ctx *uctx, in system_derive_ta_unique_key() argument
94 res = vm_check_access_rights(uctx, access_flags, in system_derive_ta_unique_key()
108 memcpy(data, &uctx->ts_ctx->uuid, sizeof(TEE_UUID)); in system_derive_ta_unique_key()
136 static TEE_Result system_map_zi(struct user_mode_ctx *uctx, in system_map_zi() argument
174 res = vm_map_pad(uctx, &va, num_bytes, prot, vm_flags, in system_map_zi()
183 static TEE_Result system_unmap(struct user_mode_ctx *uctx, uint32_t param_types, in system_unmap() argument
214 res = vm_get_flags(uctx, va, sz, &vm_flags); in system_unmap()
220 return vm_unmap(uctx, va, sz); in system_unmap()
223 static TEE_Result system_dlopen(struct user_mode_ctx *uctx, in system_dlopen() argument
249 res = ldelf_dlopen(uctx, &uuid, flags); in system_dlopen()
[all …]
/optee_os/core/arch/arm/kernel/
H A Dsecure_partition.c173 struct user_mode_ctx *uctx) in sp_has_exclusive_access() argument
179 if (uctx) { in sp_has_exclusive_access()
183 TAILQ_FOREACH(region, &uctx->vm_info.regions, link) { in sp_has_exclusive_access()
235 res = vm_info_init(&spc->uctx, &spc->ts_ctx); in sp_create_ctx()
242 crypto_rng_read(&spc->uctx.keys, sizeof(spc->uctx.keys)); in sp_create_ctx()
314 sp_regs->sp = ctx->uctx.stack_ptr; in sp_init_set_registers()
315 sp_regs->pc = ctx->uctx.entry_func; in sp_init_set_registers()
353 res = vm_map(&ctx->uctx, va, reg->page_count * SMALL_PAGE_SIZE, in sp_map_shared()
373 vaddr = (vaddr_t)sp_mem_get_va(&ctx->uctx, reg->page_offset, in sp_unmap_ffa_regions()
377 res = vm_unmap(&ctx->uctx, vaddr, len); in sp_unmap_ffa_regions()
[all …]
H A Darch_scall.c55 if (vm_check_access_rights(&utc->uctx, in scall_save_panic_stack()
123 if (vm_check_access_rights(&utc->uctx, in scall_save_panic_stack()
127 utc->uctx.is_32bit ? in scall_save_panic_stack()
140 if (utc->uctx.is_32bit) in scall_save_panic_stack()
H A Dstmm_sp.c105 res = vm_info_init(&spc->uctx, &spc->ta_ctx.ts_ctx); in stmm_alloc_ctx()
142 thread_user_clear_vfp(&spc->uctx); in stmm_enter_user_mode()
204 res = vm_map(&spc->uctx, va, num_pgs * SMALL_PAGE_SIZE, in alloc_and_map_sp_fobj()
392 res = vm_set_prot(&spc->uctx, stmm_image_addr, in load_stmm()
398 res = vm_set_prot(&spc->uctx, stmm_heap_addr, stmm_heap_size, in load_stmm()
403 res = vm_set_prot(&spc->uctx, stmm_sec_buf_addr, stmm_sec_buf_size, in load_stmm()
585 return to_stmm_ctx(ctx)->uctx.vm_info.asid; in stmm_get_instance_id()
592 vm_info_final(&spc->uctx); in stmm_ctx_destroy()
862 res = vm_get_prot(&spc->uctx, va, SMALL_PAGE_SIZE, &attrs); in spm_handle_get_mem_attr()
917 res = vm_set_prot(&spc->uctx, va, sz, prot); in spm_handle_set_mem_attr()
/optee_os/core/pta/veraison_attestation/
H A Dhash.c105 struct user_mode_ctx *uctx = NULL; in get_hash_ta_memory() local
113 uctx = to_user_mode_ctx(s->ctx); in get_hash_ta_memory()
114 if (!uctx) in get_hash_ta_memory()
118 res = hash_regions(&uctx->vm_info, out); in get_hash_ta_memory()
/optee_os/core/arch/arm/tee/
H A Dsvc_cache.c27 if (vm_buf_intersects_um_private(&utc->uctx, va, len)) in syscall_cache_operation()
30 res = vm_check_access_rights(&utc->uctx, in syscall_cache_operation()
/optee_os/core/arch/arm/mm/
H A Dtee_pager.c735 static TEE_Result pager_add_um_region(struct user_mode_ctx *uctx, vaddr_t base, in pager_add_um_region() argument
744 if (!uctx->regions) { in pager_add_um_region()
745 uctx->regions = malloc(sizeof(*uctx->regions)); in pager_add_um_region()
746 if (!uctx->regions) in pager_add_um_region()
748 TAILQ_INIT(uctx->regions); in pager_add_um_region()
751 reg = TAILQ_FIRST(uctx->regions); in pager_add_um_region()
771 region_insert(uctx->regions, reg, r_prev); in pager_add_um_region()
813 TEE_Result tee_pager_add_um_region(struct user_mode_ctx *uctx, vaddr_t base, in tee_pager_add_um_region() argument
820 res = pager_add_um_region(uctx, base, fobj, prot); in tee_pager_add_um_region()
824 if (uctx->ts_ctx == tsd->ctx) { in tee_pager_add_um_region()
[all …]
/optee_os/core/arch/arm/include/kernel/
H A Dsecure_partition.h44 struct user_mode_ctx uctx; member
81 struct user_mode_ctx *uctx);
/optee_os/core/arch/riscv/include/kernel/
H A Dsecure_partition.h15 struct user_mode_ctx uctx; member
H A Dthread_arch.h193 void thread_user_clear_vfp(struct user_mode_ctx *uctx);
195 static inline void thread_user_clear_vfp(struct user_mode_ctx *uctx __unused) in thread_user_clear_vfp()
H A Dstmm_sp.h16 struct user_mode_ctx uctx; member
/optee_os/core/arch/riscv/kernel/
H A Darch_scall.c59 if (vm_check_access_rights(&utc->uctx, in scall_save_panic_stack()

12