Lines Matching refs:r
134 static void rem_um_region(struct user_mode_ctx *uctx, struct vm_region *r) in rem_um_region() argument
136 vaddr_t begin = ROUNDDOWN(r->va, CORE_MMU_PGDIR_SIZE); in rem_um_region()
137 vaddr_t last = ROUNDUP(r->va + r->size, CORE_MMU_PGDIR_SIZE); in rem_um_region()
140 if (mobj_is_paged(r->mobj)) { in rem_um_region()
141 tee_pager_rem_um_region(uctx, r->va, r->size); in rem_um_region()
143 pgt_clear_range(uctx, r->va, r->va + r->size); in rem_um_region()
144 tlbi_va_range_asid(r->va, r->size, SMALL_PAGE_SIZE, in rem_um_region()
156 r2 = TAILQ_NEXT(r, link); in rem_um_region()
160 r2 = TAILQ_PREV(r, vm_region_head, link); in rem_um_region()
183 struct vm_region *r) in set_reg_in_table() argument
185 vaddr_t va = MAX(r->va, ti->va_base); in set_reg_in_table()
186 vaddr_t end = MIN(r->va + r->size, ti->va_base + CORE_MMU_PGDIR_SIZE); in set_reg_in_table()
187 size_t sz = MIN(end - va, mobj_get_phys_granule(r->mobj)); in set_reg_in_table()
193 offset = va - r->va + r->offset; in set_reg_in_table()
194 if (mobj_get_pa(r->mobj, offset, granule, &pa)) in set_reg_in_table()
196 set_pa_range(ti, va, pa, sz, r->attr); in set_reg_in_table()
201 static void set_um_region(struct user_mode_ctx *uctx, struct vm_region *r) in set_um_region() argument
206 assert(!mobj_is_paged(r->mobj)); in set_um_region()
215 set_reg_in_table(&ti, r); in set_um_region()
223 for (ti.va_base = ROUNDDOWN(r->va, CORE_MMU_PGDIR_SIZE); in set_um_region()
224 ti.va_base < r->va + r->size; in set_um_region()
230 set_reg_in_table(&ti, r); in set_um_region()
242 struct vm_region *r = NULL; in umap_add_region() local
269 TAILQ_FOREACH(r, &vmi->regions, link) { in umap_add_region()
270 va = select_va_in_range(prev_r, r, reg, pad_begin, pad_end, in umap_add_region()
274 TAILQ_INSERT_BEFORE(r, reg, link); in umap_add_region()
277 prev_r = r; in umap_add_region()
280 r = TAILQ_LAST(&vmi->regions, vm_region_head); in umap_add_region()
281 if (!r) in umap_add_region()
282 r = &dummy_first_reg; in umap_add_region()
283 va = select_va_in_range(r, &dummy_last_reg, reg, pad_begin, pad_end, in umap_add_region()
375 struct vm_region *r = NULL; in find_vm_region() local
377 TAILQ_FOREACH(r, &vm_info->regions, link) in find_vm_region()
378 if (va >= r->va && va < r->va + r->size) in find_vm_region()
379 return r; in find_vm_region()
387 const struct vm_region *r, in va_range_is_contiguous() argument
390 struct vm_region *r = r0; in va_range_is_contiguous() local
397 struct vm_region *r_next = TAILQ_NEXT(r, link); in va_range_is_contiguous()
398 vaddr_t r_end_va = r->va + r->size; in va_range_is_contiguous()
406 if (cmp_regs && !cmp_regs(r0, r, r_next)) in va_range_is_contiguous()
408 r = r_next; in va_range_is_contiguous()
413 struct vm_region *r, vaddr_t va) in split_vm_region() argument
416 size_t diff = va - r->va; in split_vm_region()
418 assert(diff && diff < r->size); in split_vm_region()
424 if (mobj_is_paged(r->mobj)) { in split_vm_region()
433 r2->mobj = mobj_get(r->mobj); in split_vm_region()
434 r2->offset = r->offset + diff; in split_vm_region()
436 r2->size = r->size - diff; in split_vm_region()
437 r2->attr = r->attr; in split_vm_region()
438 r2->flags = r->flags; in split_vm_region()
440 r->size = diff; in split_vm_region()
442 TAILQ_INSERT_AFTER(&uctx->vm_info.regions, r, r2, link); in split_vm_region()
450 const struct vm_region *r, in split_vm_range() argument
455 struct vm_region *r = NULL; in split_vm_range() local
468 r = find_vm_region(&uctx->vm_info, va); in split_vm_range()
469 if (!r || !va_range_is_contiguous(r, va, len, cmp_regs)) in split_vm_range()
476 if (va != r->va) { in split_vm_range()
477 res = split_vm_region(uctx, r, va); in split_vm_range()
480 r = TAILQ_NEXT(r, link); in split_vm_range()
483 *r0_ret = r; in split_vm_range()
484 r = find_vm_region(&uctx->vm_info, va + len - 1); in split_vm_range()
485 if (!r) in split_vm_range()
487 if (end_va != r->va + r->size) { in split_vm_range()
488 res = split_vm_region(uctx, r, end_va); in split_vm_range()
499 struct vm_region *r = NULL; in merge_vm_range() local
507 for (r = TAILQ_FIRST(&uctx->vm_info.regions);; r = r_next) { in merge_vm_range()
508 r_next = TAILQ_NEXT(r, link); in merge_vm_range()
513 if (r->va + r->size < va) in merge_vm_range()
521 if (r->va > end_va) in merge_vm_range()
524 if (r->va + r->size != r_next->va) in merge_vm_range()
526 if (r->mobj != r_next->mobj || in merge_vm_range()
527 r->flags != r_next->flags || in merge_vm_range()
528 r->attr != r_next->attr) in merge_vm_range()
530 if (r->offset + r->size != r_next->offset) in merge_vm_range()
534 r->size += r_next->size; in merge_vm_range()
537 r_next = r; in merge_vm_range()
542 const struct vm_region *r, in cmp_region_for_remap()
552 return r0->flags == r->flags && r0->attr == r->attr && in cmp_region_for_remap()
553 r0->mobj == r->mobj && rn->offset == r->offset + r->size; in cmp_region_for_remap()
562 struct vm_region *r = NULL; in vm_remap() local
584 for (r = r0; r; r = r_next) { in vm_remap()
585 if (r->va + r->size > old_va + len) in vm_remap()
587 r_next = TAILQ_NEXT(r, link); in vm_remap()
588 rem_um_region(uctx, r); in vm_remap()
589 TAILQ_REMOVE(&uctx->vm_info.regions, r, link); in vm_remap()
590 TAILQ_INSERT_TAIL(®s, r, link); in vm_remap()
601 r = TAILQ_FIRST(®s); in vm_remap()
602 TAILQ_REMOVE(®s, r, link); in vm_remap()
604 r->va = r_last->va + r_last->size; in vm_remap()
605 res = umap_add_region(&uctx->vm_info, r, 0, 0, 0); in vm_remap()
607 r->va = *new_va; in vm_remap()
608 res = umap_add_region(&uctx->vm_info, r, pad_begin, in vm_remap()
609 pad_end + len - r->size, 0); in vm_remap()
612 r_last = r; in vm_remap()
617 set_um_region(uctx, r); in vm_remap()
619 res = tee_pager_add_um_region(uctx, r->va, fobj, in vm_remap()
620 r->attr); in vm_remap()
632 if (r != r_last) { in vm_remap()
637 TAILQ_INSERT_HEAD(®s, r, link); in vm_remap()
641 for (r = r_first; r != r_stop; r = r_next) { in vm_remap()
642 r_next = TAILQ_NEXT(r, link); in vm_remap()
643 TAILQ_REMOVE(&uctx->vm_info.regions, r, link); in vm_remap()
645 TAILQ_INSERT_AFTER(®s, r_tmp, r, in vm_remap()
648 TAILQ_INSERT_HEAD(®s, r, link); in vm_remap()
649 r_tmp = r; in vm_remap()
666 r = TAILQ_FIRST(®s); in vm_remap()
667 TAILQ_REMOVE(®s, r, link); in vm_remap()
668 r->va = next_va; in vm_remap()
669 next_va += r->size; in vm_remap()
670 if (umap_add_region(&uctx->vm_info, r, 0, 0, 0)) in vm_remap()
675 if (tee_pager_add_um_region(uctx, r->va, fobj, r->attr)) in vm_remap()
678 set_um_region(uctx, r); in vm_remap()
688 const struct vm_region *r, in cmp_region_for_get_flags()
691 return r0->flags == r->flags; in cmp_region_for_get_flags()
697 struct vm_region *r = NULL; in vm_get_flags() local
702 r = find_vm_region(&uctx->vm_info, va); in vm_get_flags()
703 if (!r) in vm_get_flags()
706 if (!va_range_is_contiguous(r, va, len, cmp_region_for_get_flags)) in vm_get_flags()
709 *flags = r->flags; in vm_get_flags()
715 const struct vm_region *r, in cmp_region_for_get_prot()
719 (r->attr & TEE_MATTR_PROT_MASK); in cmp_region_for_get_prot()
725 struct vm_region *r = NULL; in vm_get_prot() local
730 r = find_vm_region(&uctx->vm_info, va); in vm_get_prot()
731 if (!r) in vm_get_prot()
734 if (!va_range_is_contiguous(r, va, len, cmp_region_for_get_prot)) in vm_get_prot()
737 *prot = r->attr & TEE_MATTR_PROT_MASK; in vm_get_prot()
747 struct vm_region *r = NULL; in vm_set_prot() local
760 for (r = r0; r; r = TAILQ_NEXT(r, link)) { in vm_set_prot()
761 if (r->va + r->size > va + len) in vm_set_prot()
763 if (r->attr & (TEE_MATTR_UW | TEE_MATTR_PW)) in vm_set_prot()
766 r->attr &= ~TEE_MATTR_PROT_MASK; in vm_set_prot()
767 r->attr |= prot; in vm_set_prot()
769 if (!mobj_is_paged(r->mobj)) { in vm_set_prot()
771 set_um_region(uctx, r); in vm_set_prot()
780 tlbi_va_range_asid(r->va, r->size, SMALL_PAGE_SIZE, in vm_set_prot()
785 for (r = r0; r; r = TAILQ_NEXT(r, link)) { in vm_set_prot()
786 if (r->va + r->size > va + len) in vm_set_prot()
788 if (mobj_is_paged(r->mobj)) { in vm_set_prot()
789 if (!tee_pager_set_um_region_attr(uctx, r->va, r->size, in vm_set_prot()
793 cache_op_inner(DCACHE_AREA_CLEAN, (void *)r->va, in vm_set_prot()
794 r->size); in vm_set_prot()
816 struct vm_region *r = NULL; in vm_unmap() local
833 res = split_vm_range(uctx, va, l, NULL, &r); in vm_unmap()
838 r_next = TAILQ_NEXT(r, link); in vm_unmap()
839 unmap_end_va = r->va + r->size; in vm_unmap()
840 rem_um_region(uctx, r); in vm_unmap()
841 umap_remove_region(&uctx->vm_info, r); in vm_unmap()
844 r = r_next; in vm_unmap()
903 struct vm_region *r; in vm_clean_param() local
905 TAILQ_FOREACH_SAFE(r, &uctx->vm_info.regions, link, next_r) { in vm_clean_param()
906 if (r->flags & VM_FLAG_EPHEMERAL) { in vm_clean_param()
907 rem_um_region(uctx, r); in vm_clean_param()
908 umap_remove_region(&uctx->vm_info, r); in vm_clean_param()
915 struct vm_region *r = NULL; in check_param_map_empty() local
917 TAILQ_FOREACH(r, &uctx->vm_info.regions, link) in check_param_map_empty()
918 assert(!(r->flags & VM_FLAG_EPHEMERAL)); in check_param_map_empty()
1101 struct vm_region *r = NULL; in vm_buf_is_inside_um_private() local
1103 TAILQ_FOREACH(r, &uctx->vm_info.regions, link) { in vm_buf_is_inside_um_private()
1104 if (r->flags & VM_FLAGS_NONPRIV) in vm_buf_is_inside_um_private()
1106 if (core_is_buffer_inside((vaddr_t)va, size, r->va, r->size)) in vm_buf_is_inside_um_private()
1117 struct vm_region *r = NULL; in vm_buf_intersects_um_private() local
1119 TAILQ_FOREACH(r, &uctx->vm_info.regions, link) { in vm_buf_intersects_um_private()
1120 if (r->attr & VM_FLAGS_NONPRIV) in vm_buf_intersects_um_private()
1122 if (core_is_buffer_intersect((vaddr_t)va, size, r->va, r->size)) in vm_buf_intersects_um_private()
1133 struct vm_region *r = NULL; in vm_buf_to_mboj_offs() local
1135 TAILQ_FOREACH(r, &uctx->vm_info.regions, link) { in vm_buf_to_mboj_offs()
1136 if (!r->mobj) in vm_buf_to_mboj_offs()
1138 if (core_is_buffer_inside((vaddr_t)va, size, r->va, r->size)) { in vm_buf_to_mboj_offs()
1141 poffs = mobj_get_phys_offs(r->mobj, in vm_buf_to_mboj_offs()
1143 *mobj = r->mobj; in vm_buf_to_mboj_offs()
1144 *offs = (vaddr_t)va - r->va + r->offset - poffs; in vm_buf_to_mboj_offs()
1331 struct vm_region *r = NULL; in vm_get_mobj() local
1337 r = find_vm_region(&uctx->vm_info, va); in vm_get_mobj()
1338 if (!r) in vm_get_mobj()
1341 r_offs = va - r->va; in vm_get_mobj()
1343 *len = MIN(r->size - r_offs, *len); in vm_get_mobj()
1344 *offs = r->offset + r_offs; in vm_get_mobj()
1345 *prot = r->attr & TEE_MATTR_PROT_MASK; in vm_get_mobj()
1346 return mobj_get(r->mobj); in vm_get_mobj()