Lines Matching +full:runs +full:- +full:on

1 // SPDX-License-Identifier: BSD-2-Clause
4 * Copyright (c) 2015-2020, 2022 Linaro Limited
5 * Copyright (c) 2020-2023, Arm Limited
50 * This function may leave a few mappings behind on error, but that's taken
63 uctx->is_32bit = is_32bit; in ldelf_load_ldelf()
69 uctx->bbuf = (void *)bb_addr; in ldelf_load_ldelf()
70 uctx->bbuf_size = BOUNCE_BUFFER_SIZE; in ldelf_load_ldelf()
77 uctx->ldelf_stack_ptr = stack_addr + LDELF_STACK_SIZE; in ldelf_load_ldelf()
83 uctx->entry_func = code_addr + ldelf_entry; in ldelf_load_ldelf()
92 vm_set_ctx(uctx->ts_ctx); in ldelf_load_ldelf()
125 usr_stack = uctx->ldelf_stack_ptr; in ldelf_init_with_ldelf()
126 usr_stack -= ROUNDUP(sizeof(*arg), STACK_ALIGNMENT); in ldelf_init_with_ldelf()
128 sess->handle_scall = scall_handle_ldelf; in ldelf_init_with_ldelf()
134 res = PUT_USER_SCALAR(uctx->ts_ctx->uuid, &arg->uuid); in ldelf_init_with_ldelf()
139 usr_stack, uctx->entry_func, in ldelf_init_with_ldelf()
142 sess->handle_scall = sess->ctx->ops->handle_scall; in ldelf_init_with_ldelf()
160 if (is_user_ta_ctx(uctx->ts_ctx)) { in ldelf_init_with_ldelf()
162 * This is already checked by the elf loader, but since it runs in ldelf_init_with_ldelf()
165 if (arg_bbuf->flags & ~TA_FLAGS_MASK) in ldelf_init_with_ldelf()
168 to_user_ta_ctx(uctx->ts_ctx)->ta_ctx.flags = arg_bbuf->flags; in ldelf_init_with_ldelf()
171 uctx->is_32bit = arg_bbuf->is_32bit; in ldelf_init_with_ldelf()
172 uctx->entry_func = arg_bbuf->entry_func; in ldelf_init_with_ldelf()
173 uctx->load_addr = arg_bbuf->load_addr; in ldelf_init_with_ldelf()
174 uctx->stack_ptr = arg_bbuf->stack_ptr; in ldelf_init_with_ldelf()
175 uctx->dump_entry_func = arg_bbuf->dump_entry; in ldelf_init_with_ldelf()
177 uctx->ftrace_entry_func = arg_bbuf->ftrace_entry; in ldelf_init_with_ldelf()
178 sess->fbuf = arg_bbuf->fbuf; in ldelf_init_with_ldelf()
180 uctx->dl_entry_func = arg_bbuf->dl_entry; in ldelf_init_with_ldelf()
190 uaddr_t usr_stack = uctx->ldelf_stack_ptr; in ldelf_dump_state()
200 TAILQ_FOREACH(r, &uctx->vm_info.regions, link) in ldelf_dump_state()
201 if (r->attr & TEE_MATTR_URWX) in ldelf_dump_state()
207 usr_stack = uctx->ldelf_stack_ptr; in ldelf_dump_state()
208 usr_stack -= arg_size; in ldelf_dump_state()
215 arg->num_maps = n; in ldelf_dump_state()
217 TAILQ_FOREACH(r, &uctx->vm_info.regions, link) { in ldelf_dump_state()
218 if (r->attr & TEE_MATTR_URWX) { in ldelf_dump_state()
219 if (r->mobj) in ldelf_dump_state()
220 mobj_get_pa(r->mobj, r->offset, 0, in ldelf_dump_state()
221 &arg->maps[n].pa); in ldelf_dump_state()
222 arg->maps[n].va = r->va; in ldelf_dump_state()
223 arg->maps[n].sz = r->size; in ldelf_dump_state()
224 if (r->attr & TEE_MATTR_UR) in ldelf_dump_state()
225 arg->maps[n].flags |= DUMP_MAP_READ; in ldelf_dump_state()
226 if (r->attr & TEE_MATTR_UW) in ldelf_dump_state()
227 arg->maps[n].flags |= DUMP_MAP_WRITE; in ldelf_dump_state()
228 if (r->attr & TEE_MATTR_UX) in ldelf_dump_state()
229 arg->maps[n].flags |= DUMP_MAP_EXEC; in ldelf_dump_state()
230 if (r->attr & TEE_MATTR_SECURE) in ldelf_dump_state()
231 arg->maps[n].flags |= DUMP_MAP_SECURE; in ldelf_dump_state()
232 if (r->flags & VM_FLAG_EPHEMERAL) in ldelf_dump_state()
233 arg->maps[n].flags |= DUMP_MAP_EPHEM; in ldelf_dump_state()
234 if (r->flags & VM_FLAG_LDELF) in ldelf_dump_state()
235 arg->maps[n].flags |= DUMP_MAP_LDELF; in ldelf_dump_state()
240 arg->is_32bit = uctx->is_32bit; in ldelf_dump_state()
242 arg->arm32.regs[0] = tsd->abort_regs.r0; in ldelf_dump_state()
243 arg->arm32.regs[1] = tsd->abort_regs.r1; in ldelf_dump_state()
244 arg->arm32.regs[2] = tsd->abort_regs.r2; in ldelf_dump_state()
245 arg->arm32.regs[3] = tsd->abort_regs.r3; in ldelf_dump_state()
246 arg->arm32.regs[4] = tsd->abort_regs.r4; in ldelf_dump_state()
247 arg->arm32.regs[5] = tsd->abort_regs.r5; in ldelf_dump_state()
248 arg->arm32.regs[6] = tsd->abort_regs.r6; in ldelf_dump_state()
249 arg->arm32.regs[7] = tsd->abort_regs.r7; in ldelf_dump_state()
250 arg->arm32.regs[8] = tsd->abort_regs.r8; in ldelf_dump_state()
251 arg->arm32.regs[9] = tsd->abort_regs.r9; in ldelf_dump_state()
252 arg->arm32.regs[10] = tsd->abort_regs.r10; in ldelf_dump_state()
253 arg->arm32.regs[11] = tsd->abort_regs.r11; in ldelf_dump_state()
254 arg->arm32.regs[12] = tsd->abort_regs.ip; in ldelf_dump_state()
255 arg->arm32.regs[13] = tsd->abort_regs.usr_sp; /*SP*/ in ldelf_dump_state()
256 arg->arm32.regs[14] = tsd->abort_regs.usr_lr; /*LR*/ in ldelf_dump_state()
257 arg->arm32.regs[15] = tsd->abort_regs.elr; /*PC*/ in ldelf_dump_state()
260 if (uctx->is_32bit) { in ldelf_dump_state()
261 arg->arm32.regs[0] = tsd->abort_regs.x0; in ldelf_dump_state()
262 arg->arm32.regs[1] = tsd->abort_regs.x1; in ldelf_dump_state()
263 arg->arm32.regs[2] = tsd->abort_regs.x2; in ldelf_dump_state()
264 arg->arm32.regs[3] = tsd->abort_regs.x3; in ldelf_dump_state()
265 arg->arm32.regs[4] = tsd->abort_regs.x4; in ldelf_dump_state()
266 arg->arm32.regs[5] = tsd->abort_regs.x5; in ldelf_dump_state()
267 arg->arm32.regs[6] = tsd->abort_regs.x6; in ldelf_dump_state()
268 arg->arm32.regs[7] = tsd->abort_regs.x7; in ldelf_dump_state()
269 arg->arm32.regs[8] = tsd->abort_regs.x8; in ldelf_dump_state()
270 arg->arm32.regs[9] = tsd->abort_regs.x9; in ldelf_dump_state()
271 arg->arm32.regs[10] = tsd->abort_regs.x10; in ldelf_dump_state()
272 arg->arm32.regs[11] = tsd->abort_regs.x11; in ldelf_dump_state()
273 arg->arm32.regs[12] = tsd->abort_regs.x12; in ldelf_dump_state()
274 arg->arm32.regs[13] = tsd->abort_regs.x13; /*SP*/ in ldelf_dump_state()
275 arg->arm32.regs[14] = tsd->abort_regs.x14; /*LR*/ in ldelf_dump_state()
276 arg->arm32.regs[15] = tsd->abort_regs.elr; /*PC*/ in ldelf_dump_state()
278 arg->arm64.fp = tsd->abort_regs.x29; in ldelf_dump_state()
279 arg->arm64.pc = tsd->abort_regs.elr; in ldelf_dump_state()
280 arg->arm64.sp = tsd->abort_regs.sp_el0; in ldelf_dump_state()
284 arg->rv.fp = tsd->abort_regs.s0; in ldelf_dump_state()
285 arg->rv.pc = tsd->abort_regs.epc; in ldelf_dump_state()
286 arg->rv.sp = tsd->abort_regs.sp; in ldelf_dump_state()
294 sess->handle_scall = scall_handle_ldelf; in ldelf_dump_state()
297 usr_stack, uctx->dump_entry_func, in ldelf_dump_state()
300 sess->handle_scall = sess->ctx->ops->handle_scall; in ldelf_dump_state()
305 uctx->dump_entry_func = 0; in ldelf_dump_state()
318 uaddr_t usr_stack = uctx->ldelf_stack_ptr; in ldelf_dump_ftrace()
325 if (!uctx->ftrace_entry_func) in ldelf_dump_ftrace()
328 usr_stack -= ROUNDUP(sizeof(*arg), STACK_ALIGNMENT); in ldelf_dump_ftrace()
343 sess->handle_scall = scall_handle_ldelf; in ldelf_dump_ftrace()
346 usr_stack, uctx->ftrace_entry_func, in ldelf_dump_ftrace()
349 sess->handle_scall = sess->ctx->ops->handle_scall; in ldelf_dump_ftrace()
354 uctx->ftrace_entry_func = 0; in ldelf_dump_ftrace()
373 uaddr_t usr_stack = uctx->ldelf_stack_ptr; in ldelf_dlopen()
388 arg->cmd = LDELF_DL_ENTRY_DLOPEN; in ldelf_dlopen()
389 arg->dlopen.uuid = *uuid; in ldelf_dlopen()
390 arg->dlopen.flags = flags; in ldelf_dlopen()
392 usr_stack -= ROUNDUP(sizeof(*arg), STACK_ALIGNMENT); in ldelf_dlopen()
400 sess->handle_scall = scall_handle_ldelf; in ldelf_dlopen()
403 usr_stack, uctx->dl_entry_func, in ldelf_dlopen()
406 sess->handle_scall = sess->ctx->ops->handle_scall; in ldelf_dlopen()
417 res2 = GET_USER_SCALAR(res, &usr_arg->ret); in ldelf_dlopen()
428 uaddr_t usr_stack = uctx->ldelf_stack_ptr; in ldelf_dlsym()
436 usr_stack -= ROUNDUP(sizeof(*arg) + symlen + 1, STACK_ALIGNMENT); in ldelf_dlsym()
442 arg->cmd = LDELF_DL_ENTRY_DLSYM; in ldelf_dlsym()
443 arg->dlsym.uuid = *uuid; in ldelf_dlsym()
447 res = copy_to_user(usr_arg->dlsym.symbol, sym, symlen + 1); in ldelf_dlsym()
452 sess->handle_scall = scall_handle_ldelf; in ldelf_dlsym()
455 usr_stack, uctx->dl_entry_func, in ldelf_dlsym()
458 sess->handle_scall = sess->ctx->ops->handle_scall; in ldelf_dlsym()
469 res2 = GET_USER_SCALAR(res, &usr_arg->ret); in ldelf_dlsym()
473 res = GET_USER_SCALAR(*val, &usr_arg->dlsym.val); in ldelf_dlsym()