Home
last modified time | relevance | path

Searched refs:dst_vma (Results 1 – 10 of 10) sorted by relevance

/OK3568_Linux_fs/kernel/mm/
H A Duserfaultfd.c30 struct vm_area_struct *dst_vma; in find_dst_vma() local
32 dst_vma = find_vma(dst_mm, dst_start); in find_dst_vma()
33 if (!dst_vma) in find_dst_vma()
36 if (dst_start < dst_vma->vm_start || in find_dst_vma()
37 dst_start + len > dst_vma->vm_end) in find_dst_vma()
45 if (!dst_vma->vm_userfaultfd_ctx.ctx) in find_dst_vma()
48 return dst_vma; in find_dst_vma()
58 struct vm_area_struct *dst_vma, in mfill_atomic_install_pte() argument
64 bool writable = dst_vma->vm_flags & VM_WRITE; in mfill_atomic_install_pte()
65 bool vm_shared = dst_vma->vm_flags & VM_SHARED; in mfill_atomic_install_pte()
[all …]
H A Dmemory.c738 pte_t *dst_pte, pte_t *src_pte, struct vm_area_struct *dst_vma, in copy_nonpresent_pte() argument
741 unsigned long vm_flags = dst_vma->vm_flags; in copy_nonpresent_pte()
810 if (!userfaultfd_wp(dst_vma)) in copy_nonpresent_pte()
837 copy_present_page(struct vm_area_struct *dst_vma, struct vm_area_struct *src_vma, in copy_present_page() argument
887 page_add_new_anon_rmap(new_page, dst_vma, addr, false); in copy_present_page()
888 lru_cache_add_inactive_or_unevictable(new_page, dst_vma); in copy_present_page()
892 pte = mk_pte(new_page, dst_vma->vm_page_prot); in copy_present_page()
893 pte = maybe_mkwrite(pte_mkdirty(pte), dst_vma->vm_flags); in copy_present_page()
894 if (userfaultfd_pte_wp(dst_vma, *src_pte)) in copy_present_page()
897 set_pte_at(dst_vma->vm_mm, addr, dst_pte, pte); in copy_present_page()
[all …]
H A Dhugetlb.c4722 struct vm_area_struct *dst_vma, in hugetlb_mcopy_atomic_pte() argument
4732 int vm_shared = dst_vma->vm_flags & VM_SHARED; in hugetlb_mcopy_atomic_pte()
4733 struct hstate *h = hstate_vma(dst_vma); in hugetlb_mcopy_atomic_pte()
4740 mapping = dst_vma->vm_file->f_mapping; in hugetlb_mcopy_atomic_pte()
4741 idx = vma_hugecache_offset(h, dst_vma, dst_addr); in hugetlb_mcopy_atomic_pte()
4753 hugetlbfs_pagecache_present(h, dst_vma, dst_addr)) { in hugetlb_mcopy_atomic_pte()
4758 page = alloc_huge_page(dst_vma, dst_addr, 0); in hugetlb_mcopy_atomic_pte()
4830 hugepage_add_new_anon_rmap(page, dst_vma, dst_addr); in hugetlb_mcopy_atomic_pte()
4837 writable = dst_vma->vm_flags & VM_WRITE; in hugetlb_mcopy_atomic_pte()
4839 _dst_pte = make_huge_pte(dst_vma, page, writable); in hugetlb_mcopy_atomic_pte()
[all …]
H A Dhuge_memory.c1015 struct vm_area_struct *dst_vma, struct vm_area_struct *src_vma) in copy_huge_pmd() argument
1024 if (!vma_is_anonymous(dst_vma)) in copy_huge_pmd()
1055 if (!userfaultfd_wp(dst_vma)) in copy_huge_pmd()
1109 if (!userfaultfd_wp(dst_vma)) in copy_huge_pmd()
H A Dshmem.c2376 struct vm_area_struct *dst_vma, in shmem_mfill_atomic_pte() argument
2382 struct inode *inode = file_inode(dst_vma->vm_file); in shmem_mfill_atomic_pte()
2386 pgoff_t pgoff = linear_page_index(dst_vma, dst_addr); in shmem_mfill_atomic_pte()
2449 ret = mfill_atomic_install_pte(dst_mm, dst_pmd, dst_vma, dst_addr, in shmem_mfill_atomic_pte()
/OK3568_Linux_fs/kernel/include/linux/
H A Dshmem_fs.h125 struct vm_area_struct *dst_vma,
131 #define shmem_mfill_atomic_pte(dst_mm, dst_pmd, dst_vma, dst_addr, \ argument
H A Duserfaultfd_k.h57 struct vm_area_struct *dst_vma,
H A Dhugetlb.h140 struct vm_area_struct *dst_vma,
320 struct vm_area_struct *dst_vma, in hugetlb_mcopy_atomic_pte() argument
H A Dhuge_mm.h13 struct vm_area_struct *dst_vma, struct vm_area_struct *src_vma);
H A Dmm.h1747 copy_page_range(struct vm_area_struct *dst_vma, struct vm_area_struct *src_vma);