Searched refs:SMALL_PAGE_MASK (Results 1 – 19 of 19) sorted by relevance
71 if (buffer & SMALL_PAGE_MASK) in msg_param_extract_pages()91 if (!((vaddr_t)(va + 1) & SMALL_PAGE_MASK)) { in msg_param_extract_pages()93 if (page & SMALL_PAGE_MASK) in msg_param_extract_pages()105 if (pages[cnt] & SMALL_PAGE_MASK) in msg_param_extract_pages()125 page_offset = buf_ptr & SMALL_PAGE_MASK; in msg_param_mobj_from_noncontig()136 if (!msg_param_extract_pages(buf_ptr & ~SMALL_PAGE_MASK, in msg_param_mobj_from_noncontig()
340 if (offs_bytes & SMALL_PAGE_MASK) in ldelf_syscall_map_bin()
25 #define SMALL_PAGE_MASK 0x00000fff macro
642 assert((seg->offset & SMALL_PAGE_MASK) == in adjust_segments()643 (seg->vaddr & SMALL_PAGE_MASK)); in adjust_segments()
255 if ((reg->va | reg->size | pad_begin | pad_end) & SMALL_PAGE_MASK) in umap_add_region()458 if ((va | len) & SMALL_PAGE_MASK) in split_vm_range()571 if (!len || ((len | old_va) & SMALL_PAGE_MASK)) in vm_remap()699 if (!len || ((len | va) & SMALL_PAGE_MASK)) in vm_get_flags()727 if (!len || ((len | va) & SMALL_PAGE_MASK)) in vm_get_prot()827 if (!l || (va & SMALL_PAGE_MASK)) in vm_unmap()1334 if (!len || ((*len | va) & SMALL_PAGE_MASK)) in vm_get_mobj()
98 (full_offset & SMALL_PAGE_MASK); in mobj_reg_shm_get_pa()375 if (mobj_reg_shm->pages[i] & SMALL_PAGE_MASK) in mobj_reg_shm_alloc()566 if ((m->pa | m->mobj.size) & SMALL_PAGE_MASK) in protect_mem()
1089 if (base & SMALL_PAGE_MASK) { in collect_device_mem_ranges()1237 if (mask & SMALL_PAGE_MASK) in assign_mem_granularity()1628 (core_mmu_tee_load_pa & SMALL_PAGE_MASK)) in core_init_mmu_map()1982 assert(!((vaddr | paddr) & SMALL_PAGE_MASK)); in core_mmu_map_region()2057 if (vaddr & SMALL_PAGE_MASK) in core_mmu_map_pages()2070 if (pages[i] & SMALL_PAGE_MASK) { in core_mmu_map_pages()2135 if ((vaddr | paddr) & SMALL_PAGE_MASK) in core_mmu_map_contiguous_pages()2750 if (s >= e || s & SMALL_PAGE_MASK || e & SMALL_PAGE_MASK) in teecore_init_pub_ram()
238 return (vaddr_t)&st->state & ~SMALL_PAGE_MASK; in rwp_paged_iv_get_iv_vaddr()
480 vaddr_t va = (vaddr_t)p->tbl & ~SMALL_PAGE_MASK; in push_to_free_list()
236 if (pa & SMALL_PAGE_MASK || len > SMALL_PAGE_SIZE) in tee_pager_phys_to_virt()605 if ((base & SMALL_PAGE_MASK) || !size) { in alloc_region()875 if (va & SMALL_PAGE_MASK) in tee_pager_split_um_region()961 if ((va | len) & SMALL_PAGE_MASK) in tee_pager_merge_um_region()1510 page_va = fobj_get_iv_vaddr(fobj, fobj_pgidx) & ~SMALL_PAGE_MASK; in make_iv_available()1551 vaddr_t page_va = ai->va & ~SMALL_PAGE_MASK; in pager_get_page()1758 vaddr_t page_va = ai->va & ~SMALL_PAGE_MASK; in tee_pager_handle_fault()
139 (offset & SMALL_PAGE_MASK); in get_pa()
719 assert(!(mf->mobj.size & SMALL_PAGE_MASK)); in mobj_ffa_get_by_cookie()758 (full_offset & SMALL_PAGE_MASK); in ffa_shm_get_pa()
216 if (parg & SMALL_PAGE_MASK) in std_entry_with_parg()376 !(pa & SMALL_PAGE_MASK) && sz <= SMALL_PAGE_SIZE) in rpc_shm_mobj_alloc()
214 if (parg & SMALL_PAGE_MASK) in std_entry_with_parg()375 !(pa & SMALL_PAGE_MASK) && sz <= SMALL_PAGE_SIZE) in rpc_shm_mobj_alloc()
453 if ((tx_pa & SMALL_PAGE_MASK) || (rx_pa & SMALL_PAGE_MASK)) { in spmc_handle_rxtx_map()1253 vaddr_t offs = pbuf & SMALL_PAGE_MASK; in handle_mem_op_tmem()
562 assert(!(init_size & SMALL_PAGE_MASK)); in init_pager_runtime()
321 MIN(SMALL_PAGE_SIZE - (va & SMALL_PAGE_MASK), len); in caam_mem_get_pa_area()
24 #define SMALL_PAGE_MASK ((paddr_t)SMALL_PAGE_SIZE - 1) macro
27 #define IS_PAGE_ALIGNED(addr) (((addr) & SMALL_PAGE_MASK) == 0)