| /optee_os/lib/libutils/ext/arch/arm/ |
| H A D | memtag.c | 30 static void dc_gzva(uint64_t va) in dc_gzva() argument 32 asm volatile ("dc gzva, %0" : : "r" (va)); in dc_gzva() 35 static void dc_gva(uint64_t va) in dc_gva() argument 37 asm volatile ("dc gva, %0" : : "r" (va)); in dc_gva() 40 static vaddr_t stg_and_advance(vaddr_t va) in stg_and_advance() argument 42 asm volatile("stg %0, [%0], #16" : "+r"(va) : : "memory"); in stg_and_advance() 43 return va; in stg_and_advance() 58 static void set_tags_dc_gva(vaddr_t va, size_t size, size_t dcsz) in set_tags_dc_gva() argument 61 dc_gva(va); in set_tags_dc_gva() 62 va += dcsz; in set_tags_dc_gva() [all …]
|
| /optee_os/core/mm/ |
| H A D | vm.c | 75 if (ADD_OVERFLOW(prev_reg->va, prev_reg->size, &begin_va) || in select_va_in_range() 81 if (reg->va) { in select_va_in_range() 82 if (reg->va < begin_va) in select_va_in_range() 84 begin_va = reg->va; in select_va_in_range() 103 if (end_va <= next_reg->va) { in select_va_in_range() 104 assert(!reg->va || reg->va == begin_va); in select_va_in_range() 136 vaddr_t begin = ROUNDDOWN(r->va, CORE_MMU_PGDIR_SIZE); in rem_um_region() 137 vaddr_t last = ROUNDUP(r->va + r->size, CORE_MMU_PGDIR_SIZE); in rem_um_region() 141 tee_pager_rem_um_region(uctx, r->va, r->size); in rem_um_region() 143 pgt_clear_range(uctx, r->va, r->va + r->size); in rem_um_region() [all …]
|
| H A D | boot_mem.c | 63 vaddr_t va = 0; in mem_alloc_tmp() local 70 if (SUB_OVERFLOW(desc->mem_end, len, &va)) in mem_alloc_tmp() 72 va = ROUNDDOWN2(va, align); in mem_alloc_tmp() 73 if (va < desc->mem_start) in mem_alloc_tmp() 75 desc->mem_end = va; in mem_alloc_tmp() 77 asan_tag_access((void *)va, (void *)(va + len)); in mem_alloc_tmp() 79 return (void *)va; in mem_alloc_tmp() 82 static void add_padding(struct boot_mem_desc *desc, vaddr_t va) in add_padding() argument 92 if (rounded < va && va - rounded > sizeof(*pad)) { in add_padding() 95 pad->len = va - start; in add_padding() [all …]
|
| H A D | core_mmu.c | 240 static bool va_is_in_map(struct tee_mmap_region *map, vaddr_t va) in va_is_in_map() argument 244 return (va >= map->va && va <= (map->va + map->size - 1)); in va_is_in_map() 298 static struct tee_mmap_region *find_map_by_va(void *va) in find_map_by_va() argument 301 vaddr_t a = (vaddr_t)va; in find_map_by_va() 305 if (a >= mem_map->map[n].va && in find_map_by_va() 306 a <= (mem_map->map[n].va - 1 + mem_map->map[n].size)) in find_map_by_va() 936 return CMP_TRILEAN(mm_a->va, mm_b->va); in cmp_mmap_by_lower_va() 948 teecore_memtype_name(map->type), map->va, in dump_mmap_table() 949 map->va + map->size - 1, map->pa, in dump_mmap_table() 957 static void dump_xlat_table(vaddr_t va, unsigned int level) in dump_xlat_table() argument [all …]
|
| H A D | pgt_cache.c | 254 struct pgt *pp, vaddr_t va) in prune_before_va() argument 256 while (p && p->vabase < va) { in prune_before_va() 280 vaddr_t va = 0; in pgt_check_avail() local 291 for (va = ROUNDDOWN(r->va, CORE_MMU_PGDIR_SIZE); in pgt_check_avail() 292 va < r->va + r->size; va += CORE_MMU_PGDIR_SIZE) { in pgt_check_avail() 294 p = prune_before_va(pgt_cache, p, pp, va); in pgt_check_avail() 298 if (p->vabase < va) { in pgt_check_avail() 306 if (p->vabase == va) in pgt_check_avail() 315 for (va = ROUNDDOWN(r->va, CORE_MMU_PGDIR_SIZE); in pgt_check_avail() 316 va < r->va + r->size; va += CORE_MMU_PGDIR_SIZE) { in pgt_check_avail() [all …]
|
| /optee_os/core/drivers/pm/imx/ |
| H A D | src.c | 34 vaddr_t va = core_mmu_get_va(SRC_BASE, MEM_AREA_IO_SEC, SRC_SIZE); in imx_get_src_gpr_arg() local 36 return io_read32(va + SRC_GPR1 + ARG_OFFSET(cpu)); in imx_get_src_gpr_arg() 41 vaddr_t va = core_mmu_get_va(SRC_BASE, MEM_AREA_IO_SEC, SRC_SIZE); in imx_set_src_gpr_arg() local 43 io_write32(va + SRC_GPR1 + ARG_OFFSET(cpu), val); in imx_set_src_gpr_arg() 48 vaddr_t va = core_mmu_get_va(SRC_BASE, MEM_AREA_IO_SEC, SRC_SIZE); in imx_get_src_gpr_entry() local 50 return io_read32(va + SRC_GPR1 + ENTRY_OFFSET(cpu)); in imx_get_src_gpr_entry() 55 vaddr_t va = core_mmu_get_va(SRC_BASE, MEM_AREA_IO_SEC, SRC_SIZE); in imx_set_src_gpr_entry() local 57 io_write32(va + SRC_GPR1 + ENTRY_OFFSET(cpu), val); in imx_set_src_gpr_entry() 62 vaddr_t va = core_mmu_get_va(SRC_BASE, MEM_AREA_IO_SEC, SRC_SIZE); in imx_src_release_secondary_core() local 65 io_setbits32(va + SRC_A7RCR1, in imx_src_release_secondary_core() [all …]
|
| H A D | gpcv2.c | 25 vaddr_t va = core_mmu_get_va(GPC_BASE, MEM_AREA_IO_SEC, GPC_SIZE); in imx_gpcv2_set_core_pgc() local 28 io_setbits32(va + offset, GPC_PGC_PCG_MASK); in imx_gpcv2_set_core_pgc() 30 io_clrbits32(va + offset, GPC_PGC_PCG_MASK); in imx_gpcv2_set_core_pgc() 35 vaddr_t va = core_mmu_get_va(GPC_BASE, MEM_AREA_IO_SEC, GPC_SIZE); in imx_gpcv2_set_core1_pup_by_software() local 40 io_setbits32(va + GPC_CPU_PGC_SW_PUP_REQ, in imx_gpcv2_set_core1_pup_by_software() 43 while ((io_read32(va + GPC_CPU_PGC_SW_PUP_REQ) & in imx_gpcv2_set_core1_pup_by_software()
|
| /optee_os/core/arch/arm/include/kernel/ |
| H A D | tlb_helpers.h | 19 static inline void tlbi_va_allasid_nosync(vaddr_t va) in tlbi_va_allasid_nosync() argument 22 tlbi_vaae1is(va >> TLBI_VA_SHIFT); in tlbi_va_allasid_nosync() 24 write_tlbimvaais(va); in tlbi_va_allasid_nosync() 28 static inline void tlbi_va_asid_nosync(vaddr_t va, uint32_t asid) in tlbi_va_asid_nosync() argument 33 tlbi_vale1is((va >> TLBI_VA_SHIFT) | SHIFT_U64(a, TLBI_ASID_SHIFT)); in tlbi_va_asid_nosync() 34 tlbi_vale1is((va >> TLBI_VA_SHIFT) | in tlbi_va_asid_nosync() 37 write_tlbimvais((va & ~(BIT32(TLBI_MVA_SHIFT) - 1)) | a); in tlbi_va_asid_nosync() 38 write_tlbimvais((va & ~(BIT32(TLBI_MVA_SHIFT) - 1)) | a | 1); in tlbi_va_asid_nosync() 42 static inline void tlbi_va_asid(vaddr_t va, uint32_t asid) in tlbi_va_asid() argument 45 tlbi_va_asid_nosync(va, asid); in tlbi_va_asid()
|
| /optee_os/core/kernel/ |
| H A D | thread.c | 143 vaddr_t va = 0; in thread_init_canaries() local 149 va = thread_core_local[n].tmp_stack_va_end + in thread_init_canaries() 151 init_canaries(STACK_TMP_SIZE, va); in thread_init_canaries() 153 va = thread_core_local[n].abt_stack_va_end; in thread_init_canaries() 154 if (va) in thread_init_canaries() 155 init_canaries(STACK_ABT_SIZE, va); in thread_init_canaries() 164 va = threads[n].stack_va_end; in thread_init_canaries() 165 if (va) in thread_init_canaries() 166 init_canaries(STACK_THREAD_SIZE, va); in thread_init_canaries() 215 vaddr_t va = 0; in thread_check_canaries() local [all …]
|
| H A D | msg_param.c | 68 uint64_t *va; in msg_param_extract_pages() local 82 va = mobj_get_va(mobj, 0, SMALL_PAGE_SIZE); in msg_param_extract_pages() 83 assert(va); in msg_param_extract_pages() 85 for (cnt = 0; cnt < num_pages; cnt++, va++) { in msg_param_extract_pages() 91 if (!((vaddr_t)(va + 1) & SMALL_PAGE_MASK)) { in msg_param_extract_pages() 92 page = *va; in msg_param_extract_pages() 101 va = mobj_get_va(mobj, 0, SMALL_PAGE_SIZE); in msg_param_extract_pages() 102 assert(va); in msg_param_extract_pages() 104 pages[cnt] = *va; in msg_param_extract_pages()
|
| /optee_os/core/arch/arm/mm/ |
| H A D | core_mmu.c | 34 void tlbi_va_range(vaddr_t va, size_t len, size_t granule) in tlbi_va_range() argument 37 assert(!(va & (granule - 1)) && !(len & (granule - 1))); in tlbi_va_range() 41 tlbi_va_allasid_nosync(va); in tlbi_va_range() 43 va += granule; in tlbi_va_range() 49 void tlbi_va_range_asid(vaddr_t va, size_t len, size_t granule, uint32_t asid) in tlbi_va_range_asid() argument 52 assert(!(va & (granule - 1)) && !(len & (granule - 1))); in tlbi_va_range_asid() 56 tlbi_va_asid_nosync(va, asid); in tlbi_va_range_asid() 58 va += granule; in tlbi_va_range_asid() 64 TEE_Result cache_op_inner(enum cache_op op, void *va, size_t len) in cache_op_inner() argument 71 dcache_clean_range(va, len); in cache_op_inner() [all …]
|
| H A D | tee_pager.c | 331 static struct pager_table *find_pager_table_may_fail(vaddr_t va) in find_pager_table_may_fail() argument 339 n = ((va & ~mask) - pager_tables[0].tbl_info.va_base) >> in find_pager_table_may_fail() 344 assert(va >= pager_tables[n].tbl_info.va_base && in find_pager_table_may_fail() 345 va <= (pager_tables[n].tbl_info.va_base | mask)); in find_pager_table_may_fail() 350 static struct pager_table *find_pager_table(vaddr_t va) in find_pager_table() argument 352 struct pager_table *pt = find_pager_table_may_fail(va); in find_pager_table() 358 bool tee_pager_get_table_info(vaddr_t va, struct core_mmu_table_info *ti) in tee_pager_get_table_info() argument 360 struct pager_table *pt = find_pager_table_may_fail(va); in tee_pager_get_table_info() 369 static struct core_mmu_table_info *find_table_info(vaddr_t va) in find_table_info() argument 371 return &find_pager_table(va)->tbl_info; in find_table_info() [all …]
|
| /optee_os/core/include/mm/ |
| H A D | core_memprot.h | 90 paddr_t virt_to_phys(void *va); 92 static inline paddr_t vaddr_to_phys(vaddr_t va) in vaddr_to_phys() argument 94 return virt_to_phys((void *)va); in vaddr_to_phys() 112 bool is_unpaged(const void *va); 114 static inline bool is_unpaged(const void *va) { return va; } in is_unpaged() argument 125 bool is_nexus(const void *va); 127 static inline bool is_nexus(const void *va) { return va; } in is_nexus() argument 132 vaddr_t va; member
|
| H A D | vm.h | 29 TEE_Result vm_map_pad(struct user_mode_ctx *uctx, vaddr_t *va, size_t len, 39 static inline TEE_Result vm_map(struct user_mode_ctx *uctx, vaddr_t *va, in vm_map() argument 43 return vm_map_pad(uctx, va, len, prot, flags, mobj, offs, 0, 0, 0); in vm_map() 49 TEE_Result vm_get_flags(struct user_mode_ctx *uctx, vaddr_t va, size_t len, 52 TEE_Result vm_get_prot(struct user_mode_ctx *uctx, vaddr_t va, size_t len, 55 TEE_Result vm_set_prot(struct user_mode_ctx *uctx, vaddr_t va, size_t len, 58 TEE_Result vm_unmap(struct user_mode_ctx *uctx, vaddr_t va, size_t len); 73 const void *va, size_t size); 76 const void *va, size_t size); 79 const void *va, size_t size, [all …]
|
| /optee_os/core/arch/arm/kernel/ |
| H A D | tee_l2cc_mutex.c | 38 void *va; in l2cc_mutex_alloc() local 45 va = phys_to_virt(l2cc_mutex_pa, MEM_AREA_NSEC_SHM, MUTEX_SZ); in l2cc_mutex_alloc() 46 if (!va) in l2cc_mutex_alloc() 49 *(uint32_t *)va = 0; in l2cc_mutex_alloc() 50 l2cc_mutex_va = va; in l2cc_mutex_alloc() 113 void *va; in tee_set_l2cc_mutex() local 118 va = phys_to_virt(addr, MEM_AREA_NSEC_SHM, MUTEX_SZ); in tee_set_l2cc_mutex() 119 if (!va) in tee_set_l2cc_mutex() 122 l2cc_mutex_va = va; in tee_set_l2cc_mutex()
|
| /optee_os/core/include/drivers/ |
| H A D | stm32_shared_io.h | 16 void io_clrsetbits32_stm32shregs(vaddr_t va, uint32_t clr, uint32_t set); 17 void io_mask32_stm32shregs(vaddr_t va, uint32_t value, uint32_t mask); 19 static inline void io_setbits32_stm32shregs(vaddr_t va, uint32_t value) in io_setbits32_stm32shregs() argument 21 io_mask32_stm32shregs(va, value, value); in io_setbits32_stm32shregs() 24 static inline void io_clrbits32_stm32shregs(vaddr_t va, uint32_t value) in io_clrbits32_stm32shregs() argument 26 io_mask32_stm32shregs(va, 0, value); in io_clrbits32_stm32shregs()
|
| /optee_os/core/tee/ |
| H A D | tee_fs_rpc.c | 55 void *va = NULL; in operation_open_dfh() local 57 va = thread_rpc_shm_cache_alloc(THREAD_SHM_CACHE_USER_FS, in operation_open_dfh() 60 if (!va) in operation_open_dfh() 63 res = create_filename(va, TEE_FS_NAME_MAX, dfh); in operation_open_dfh() 112 uint8_t *va; in tee_fs_rpc_read_init() local 117 va = thread_rpc_shm_cache_alloc(THREAD_SHM_CACHE_USER_FS, in tee_fs_rpc_read_init() 120 if (!va) in tee_fs_rpc_read_init() 131 *out_data = va; in tee_fs_rpc_read_init() 151 uint8_t *va; in tee_fs_rpc_write_init() local 156 va = thread_rpc_shm_cache_alloc(THREAD_SHM_CACHE_USER_FS, in tee_fs_rpc_write_init() [all …]
|
| H A D | tee_supp_plugin_rpc.c | 27 void *va = NULL; in tee_invoke_supp_plugin_rpc() local 51 va = mobj_get_va(mobj, 0, len); in tee_invoke_supp_plugin_rpc() 52 if (!va) { in tee_invoke_supp_plugin_rpc() 58 memcpy(va, buf_core, len); in tee_invoke_supp_plugin_rpc() 60 res = copy_from_user(va, buf_user, len); in tee_invoke_supp_plugin_rpc() 81 memcpy(buf_core, va, *outlen <= len ? *outlen : len); in tee_invoke_supp_plugin_rpc() 83 res = copy_to_user(buf_user, va, len); in tee_invoke_supp_plugin_rpc()
|
| /optee_os/core/arch/arm/plat-zynq7k/ |
| H A D | main.c | 100 static void *va; in pl310_base() local 103 if (!va) in pl310_base() 104 va = phys_to_virt(PL310_BASE, MEM_AREA_IO_SEC, 1); in pl310_base() 105 return (vaddr_t)va; in pl310_base() 168 static vaddr_t va; in write_slcr() local 170 if (!va) in write_slcr() 171 va = (vaddr_t)phys_to_virt(SLCR_BASE, in write_slcr() 175 io_write32(va + addr, val); in write_slcr() 189 static vaddr_t va; in read_slcr() local 191 if (!va) in read_slcr() [all …]
|
| /optee_os/core/drivers/ |
| H A D | imx_rngb.c | 79 rng->error = io_read32(rng->base.va + RNG_ESR); in wait_for_irq() 80 status = io_read32(rng->base.va + RNG_SR); in wait_for_irq() 90 io_setbits32(rng->base.va + RNG_CR, in irq_clear() 92 io_setbits32(rng->base.va + RNG_CMD, in irq_clear() 98 io_clrbits32(rng->base.va + RNG_CR, in irq_unmask() 110 io_setbits32(rng->base.va + RNG_CR, RNG_CR_AR); in rng_seed() 121 rngb.base.va = (vaddr_t)core_mmu_add_mapping(MEM_AREA_IO_SEC, in map_controller_static() 123 if (!rngb.base.va) in map_controller_static() 160 if (dt_map_dev(fdt, off, &rngb.base.va, &rngb.size, DT_MAP_AUTO) < 0) in map_controller() 163 rngb.base.pa = virt_to_phys((void *)rngb.base.va); in map_controller() [all …]
|
| /optee_os/core/arch/arm/tee/ |
| H A D | cache.c | 18 TEE_Result cache_operation(enum utee_cache_operation op, void *va, size_t len) in cache_operation() argument 23 pa = virt_to_phys(va); in cache_operation() 31 res = cache_op_inner(DCACHE_AREA_CLEAN, va, len); in cache_operation() 38 return cache_op_inner(DCACHE_AREA_CLEAN_INV, va, len); in cache_operation() 42 res = cache_op_inner(DCACHE_AREA_CLEAN, va, len); in cache_operation() 52 return cache_op_inner(DCACHE_AREA_INVALIDATE, va, len); in cache_operation()
|
| /optee_os/core/drivers/qcom/prng/ |
| H A D | prng.c | 20 vaddr_t va; member 30 if (!prng.va) in hw_get_random_bytes() 37 if (!(io_read32(prng.va + SEC_PRNG_STATUS) & in hw_get_random_bytes() 41 while ((val = io_read32(prng.va + SEC_PRNG_DATA_OUT)) == 0) in hw_get_random_bytes() 58 prng.va = (vaddr_t)phys_to_virt_io(prng.pa, SEC_PRNG_REG_SIZE); in qcom_prng_init() 59 if (!prng.va) in qcom_prng_init()
|
| /optee_os/ldelf/ |
| H A D | sys.c | 39 TEE_Result sys_map_zi(size_t num_bytes, uint32_t flags, vaddr_t *va, in sys_map_zi() argument 42 return _ldelf_map_zi(va, num_bytes, pad_begin, pad_end, flags); in sys_map_zi() 45 TEE_Result sys_unmap(vaddr_t va, size_t num_bytes) in sys_unmap() argument 47 return _ldelf_unmap(va, num_bytes); in sys_unmap() 60 TEE_Result sys_map_ta_bin(vaddr_t *va, size_t num_bytes, uint32_t flags, in sys_map_ta_bin() argument 64 return _ldelf_map_bin(va, num_bytes, handle, offs, in sys_map_ta_bin() 75 TEE_Result sys_set_prot(vaddr_t va, size_t num_bytes, uint32_t flags) in sys_set_prot() argument 77 return _ldelf_set_prot(va, num_bytes, flags); in sys_set_prot()
|
| /optee_os/core/arch/arm/plat-ti/ |
| H A D | ti_pl310.c | 19 static void *va; in pl310_base() local 22 if (!va) in pl310_base() 23 va = phys_to_virt(PL310_BASE, MEM_AREA_IO_SEC, in pl310_base() 25 return (vaddr_t)va; in pl310_base()
|
| /optee_os/lib/libutils/ext/include/ |
| H A D | memtag.h | 124 vaddr_t va = (vaddr_t)addr; in memtag_strip_tag_vaddr() local 127 va &= ~SHIFT_U64(MEMTAG_TAG_MASK, MEMTAG_TAG_SHIFT); in memtag_strip_tag_vaddr() 130 return va; in memtag_strip_tag_vaddr() 165 vaddr_t va = memtag_strip_tag_vaddr((void *)addr); in memtag_insert_tag_vaddr() local 168 va |= SHIFT_U64(tag, MEMTAG_TAG_SHIFT); in memtag_insert_tag_vaddr() 171 return va; in memtag_insert_tag_vaddr() 195 uint64_t va = (vaddr_t)addr; in memtag_get_tag() local 197 return (va >> MEMTAG_TAG_SHIFT) & MEMTAG_TAG_MASK; in memtag_get_tag()
|