| /optee_os/core/arch/arm/plat-nuvoton/ |
| H A D | main.c | 71 void *vaddr = NULL; in tee_otp_get_hw_unique_key() local 79 vaddr = phys_to_virt(NPCM_MEASURE_BASE + NPCM_MEASURE_UUID, in tee_otp_get_hw_unique_key() 81 if (!vaddr) { in tee_otp_get_hw_unique_key() 86 res = tee_hash_createdigest(TEE_ALG_SHA256, (uint8_t *)vaddr, in tee_otp_get_hw_unique_key()
|
| /optee_os/ldelf/ |
| H A D | ta_elf.c | 540 static void add_segment(struct ta_elf *elf, size_t offset, size_t vaddr, in add_segment() argument 552 seg->vaddr = vaddr; in add_segment() 604 uint8_t *dst = (void *)(seg->vaddr + elf->load_addr); in copy_remapped_to() 640 assert(!ADD_OVERFLOW(seg->vaddr, seg->memsz, &dummy)); in adjust_segments() 643 (seg->vaddr & SMALL_PAGE_MASK)); in adjust_segments() 647 assert(seg->vaddr >= prev_seg->vaddr + prev_seg->memsz); in adjust_segments() 663 prev_end_addr = prev_seg->vaddr + prev_seg->memsz; in adjust_segments() 680 if (rounddown(seg->vaddr) < prev_end_addr) { in adjust_segments() 681 assert((seg->vaddr & mask) == (seg->offset & mask)); in adjust_segments() 689 prev_seg->filesz = seg->vaddr + seg->filesz - in adjust_segments() [all …]
|
| H A D | ta_elf.h | 18 size_t vaddr; member
|
| /optee_os/ta/remoteproc/src/ |
| H A D | elf_parser.c | 15 uint8_t *vaddr = va; in va_in_fwm_image_range() local 17 return vaddr >= fw && vaddr < fw + fw_size; in va_in_fwm_image_range()
|
| /optee_os/core/arch/arm/plat-synquacer/ |
| H A D | rng_pta.c | 228 void *vaddr = 0; in rng_collect_entropy() local 235 vaddr = phys_to_virt_io(THERMAL_SENSOR_BASE0 + in rng_collect_entropy() 240 (uint8_t)io_read32((vaddr_t)vaddr); in rng_collect_entropy()
|
| /optee_os/core/drivers/crypto/caam/include/ |
| H A D | caam_sm.h | 16 vaddr_t vaddr; /* Secure memory virtual base address */ member
|
| /optee_os/core/drivers/crypto/caam/ |
| H A D | caam_sm.c | 68 page_addr->vaddr = sm_privdata.baseaddr + in caam_sm_alloc() 75 page_desc->page + page_desc->page_count - 1, page_addr->vaddr, in caam_sm_alloc()
|
| /optee_os/core/mm/ |
| H A D | core_mmu.c | 1938 static bool can_map_at_level(paddr_t paddr, vaddr_t vaddr, in can_map_at_level() argument 1943 if ((vaddr | paddr) & (block_size - 1)) in can_map_at_level() 1974 vaddr_t vaddr = mm->va; in core_mmu_map_region() local 1982 assert(!((vaddr | paddr) & SMALL_PAGE_MASK)); in core_mmu_map_region() 1994 table_found = core_mmu_find_table(prtn, vaddr, level, in core_mmu_map_region() 2001 idx = core_mmu_va2idx(&tbl_info, vaddr); in core_mmu_map_region() 2002 if (!can_map_at_level(paddr, vaddr, size_left, in core_mmu_map_region() 2032 vaddr += block_size; in core_mmu_map_region() 2049 vaddr_t vaddr = vstart; in core_mmu_map_pages() local 2057 if (vaddr & SMALL_PAGE_MASK) in core_mmu_map_pages() [all …]
|
| /optee_os/core/drivers/crypto/caam/blob/ |
| H A D | caam_dek.c | 59 memcpy((void *)dek_sm_addr.vaddr, payload, payload_size); in caam_dek_generate()
|
| /optee_os/core/include/mm/ |
| H A D | tee_pager.h | 207 void tee_pager_add_pages(vaddr_t vaddr, size_t npages, bool unmap);
|
| /optee_os/core/arch/riscv/mm/ |
| H A D | core_mmu_arch.c | 687 vaddr_t vaddr = (vaddr_t)va; in arch_va2pa_helper() local 700 idx = core_mmu_pgt_idx(vaddr, level); in arch_va2pa_helper() 708 *pa = pte_to_pa(pte) | (vaddr & offset_mask); in arch_va2pa_helper()
|
| /optee_os/core/arch/arm/mm/ |
| H A D | tee_pager.c | 1826 void tee_pager_add_pages(vaddr_t vaddr, size_t npages, bool unmap) in tee_pager_add_pages() argument 1831 vaddr, vaddr + npages * SMALL_PAGE_SIZE, (int)unmap); in tee_pager_add_pages() 1837 vaddr_t va = vaddr + n * SMALL_PAGE_SIZE; in tee_pager_add_pages()
|
| /optee_os/core/arch/arm/kernel/ |
| H A D | secure_partition.c | 367 vaddr_t vaddr = 0; in sp_unmap_ffa_regions() local 373 vaddr = (vaddr_t)sp_mem_get_va(&ctx->uctx, reg->page_offset, in sp_unmap_ffa_regions() 377 res = vm_unmap(&ctx->uctx, vaddr, len); in sp_unmap_ffa_regions()
|