Lines Matching refs:mobj
140 if (mobj_is_paged(r->mobj)) { in rem_um_region()
187 size_t sz = MIN(end - va, mobj_get_phys_granule(r->mobj)); in set_reg_in_table()
194 if (mobj_get_pa(r->mobj, offset, granule, &pa)) in set_reg_in_table()
206 assert(!mobj_is_paged(r->mobj)); in set_um_region()
261 if (offs_plus_size > ROUNDUP(reg->mobj->size, SMALL_PAGE_SIZE)) in umap_add_region()
295 uint32_t prot, uint32_t flags, struct mobj *mobj, in vm_map_pad() argument
310 if (!mobj_is_paged(mobj)) { in vm_map_pad()
313 res = mobj_get_mem_type(mobj, &mem_type); in vm_map_pad()
319 if (mobj_is_secure(mobj)) in vm_map_pad()
322 reg->mobj = mobj_get(mobj); in vm_map_pad()
337 if (mobj_is_paged(mobj)) { in vm_map_pad()
338 struct fobj *fobj = mobj_get_fobj(mobj); in vm_map_pad()
367 mobj_put(reg->mobj); in vm_map_pad()
424 if (mobj_is_paged(r->mobj)) { in split_vm_region()
433 r2->mobj = mobj_get(r->mobj); in split_vm_region()
526 if (r->mobj != r_next->mobj || in merge_vm_range()
535 mobj_put(r_next->mobj); in merge_vm_range()
553 r0->mobj == r->mobj && rn->offset == r->offset + r->size; in cmp_region_for_remap()
578 if (mobj_is_paged(r0->mobj)) { in vm_remap()
579 fobj = mobj_get_fobj(r0->mobj); in vm_remap()
769 if (!mobj_is_paged(r->mobj)) { in vm_set_prot()
788 if (mobj_is_paged(r->mobj)) { in vm_set_prot()
809 mobj_put(reg->mobj); in umap_remove_region()
853 struct mobj *mobj = NULL; in map_kinit() local
859 thread_get_user_kcode(&mobj, &offs, &va, &sz); in map_kinit()
865 mobj, offs); in map_kinit()
870 thread_get_user_kdata(&mobj, &offs, &va, &sz); in map_kinit()
873 mobj, offs); in map_kinit()
932 if (mem->mobj != region->mobj) in param_mem_to_user_va()
935 phys_offs = mobj_get_phys_offs(mem->mobj, in param_mem_to_user_va()
956 if (!m0->mobj && !m1->mobj) in cmp_param_mem()
958 if (!m0->mobj) in cmp_param_mem()
960 if (!m1->mobj) in cmp_param_mem()
963 ret = CMP_TRILEAN(mobj_is_secure(m0->mobj), mobj_is_secure(m1->mobj)); in cmp_param_mem()
967 ret = CMP_TRILEAN((vaddr_t)m0->mobj, (vaddr_t)m1->mobj); in cmp_param_mem()
995 phys_offs = mobj_get_phys_offs(param->u[n].mem.mobj, in vm_map_param()
997 mem[n].mobj = param->u[n].mem.mobj; in vm_map_param()
1021 for (n = 1, m = 0; n < TEE_NUM_PARAMS && mem[n].mobj; n++) { in vm_map_param()
1022 if (mem[n].mobj == mem[m].mobj && in vm_map_param()
1038 if (mem[0].mobj) in vm_map_param()
1049 mem[n].mobj, mem[n].offs); in vm_map_param()
1061 if (!param->u[n].mem.mobj) in vm_map_param()
1131 struct mobj **mobj, size_t *offs) in vm_buf_to_mboj_offs() argument
1136 if (!r->mobj) in vm_buf_to_mboj_offs()
1141 poffs = mobj_get_phys_offs(r->mobj, in vm_buf_to_mboj_offs()
1143 *mobj = r->mobj; in vm_buf_to_mboj_offs()
1176 granule = MAX(region->mobj->phys_granule, in tee_mmu_user_va2pa_attr()
1183 res = mobj_get_pa(region->mobj, offset, granule, &p); in tee_mmu_user_va2pa_attr()
1214 if (!region->mobj) in vm_pa2va()
1218 granule = region->mobj->phys_granule; in vm_pa2va()
1233 if (mobj_get_pa(region->mobj, ofs, granule, &p)) in vm_pa2va()
1328 struct mobj *vm_get_mobj(struct user_mode_ctx *uctx, vaddr_t va, size_t *len, in vm_get_mobj()
1346 return mobj_get(r->mobj); in vm_get_mobj()