Lines Matching refs:anon_vma

87 static inline struct anon_vma *anon_vma_alloc(void)  in anon_vma_alloc()
89 struct anon_vma *anon_vma; in anon_vma_alloc() local
91 anon_vma = kmem_cache_alloc(anon_vma_cachep, GFP_KERNEL); in anon_vma_alloc()
92 if (anon_vma) { in anon_vma_alloc()
93 atomic_set(&anon_vma->refcount, 1); in anon_vma_alloc()
94 anon_vma->degree = 1; /* Reference for first vma */ in anon_vma_alloc()
95 anon_vma->parent = anon_vma; in anon_vma_alloc()
100 anon_vma->root = anon_vma; in anon_vma_alloc()
103 return anon_vma; in anon_vma_alloc()
106 static inline void anon_vma_free(struct anon_vma *anon_vma) in anon_vma_free() argument
108 VM_BUG_ON(atomic_read(&anon_vma->refcount)); in anon_vma_free()
128 if (rwsem_is_locked(&anon_vma->root->rwsem)) { in anon_vma_free()
129 anon_vma_lock_write(anon_vma); in anon_vma_free()
130 anon_vma_unlock_write(anon_vma); in anon_vma_free()
133 kmem_cache_free(anon_vma_cachep, anon_vma); in anon_vma_free()
148 struct anon_vma *anon_vma) in anon_vma_chain_link() argument
151 avc->anon_vma = anon_vma; in anon_vma_chain_link()
153 anon_vma_interval_tree_insert(avc, &anon_vma->rb_root); in anon_vma_chain_link()
187 struct anon_vma *anon_vma, *allocated; in __anon_vma_prepare() local
196 anon_vma = find_mergeable_anon_vma(vma); in __anon_vma_prepare()
198 if (!anon_vma) { in __anon_vma_prepare()
199 anon_vma = anon_vma_alloc(); in __anon_vma_prepare()
200 if (unlikely(!anon_vma)) in __anon_vma_prepare()
202 allocated = anon_vma; in __anon_vma_prepare()
205 anon_vma_lock_write(anon_vma); in __anon_vma_prepare()
208 if (likely(!vma->anon_vma)) { in __anon_vma_prepare()
209 vma->anon_vma = anon_vma; in __anon_vma_prepare()
210 anon_vma_chain_link(vma, avc, anon_vma); in __anon_vma_prepare()
212 anon_vma->degree++; in __anon_vma_prepare()
217 anon_vma_unlock_write(anon_vma); in __anon_vma_prepare()
240 static inline struct anon_vma *lock_anon_vma_root(struct anon_vma *root, struct anon_vma *anon_vma) in lock_anon_vma_root() argument
242 struct anon_vma *new_root = anon_vma->root; in lock_anon_vma_root()
252 static inline void unlock_anon_vma_root(struct anon_vma *root) in unlock_anon_vma_root()
279 struct anon_vma *root = NULL; in anon_vma_clone()
282 struct anon_vma *anon_vma; in anon_vma_clone() local
292 anon_vma = pavc->anon_vma; in anon_vma_clone()
293 root = lock_anon_vma_root(root, anon_vma); in anon_vma_clone()
294 anon_vma_chain_link(dst, avc, anon_vma); in anon_vma_clone()
304 if (!dst->anon_vma && src->anon_vma && in anon_vma_clone()
305 anon_vma != src->anon_vma && anon_vma->degree < 2) in anon_vma_clone()
306 dst->anon_vma = anon_vma; in anon_vma_clone()
308 if (dst->anon_vma) in anon_vma_clone()
309 dst->anon_vma->degree++; in anon_vma_clone()
320 dst->anon_vma = NULL; in anon_vma_clone()
333 struct anon_vma *anon_vma; in anon_vma_fork() local
337 if (!pvma->anon_vma) in anon_vma_fork()
341 vma->anon_vma = NULL; in anon_vma_fork()
352 if (vma->anon_vma) in anon_vma_fork()
356 anon_vma = anon_vma_alloc(); in anon_vma_fork()
357 if (!anon_vma) in anon_vma_fork()
367 anon_vma->root = pvma->anon_vma->root; in anon_vma_fork()
368 anon_vma->parent = pvma->anon_vma; in anon_vma_fork()
374 get_anon_vma(anon_vma->root); in anon_vma_fork()
376 vma->anon_vma = anon_vma; in anon_vma_fork()
377 anon_vma_lock_write(anon_vma); in anon_vma_fork()
378 anon_vma_chain_link(vma, avc, anon_vma); in anon_vma_fork()
379 anon_vma->parent->degree++; in anon_vma_fork()
380 anon_vma_unlock_write(anon_vma); in anon_vma_fork()
385 put_anon_vma(anon_vma); in anon_vma_fork()
394 struct anon_vma *root = NULL; in unlink_anon_vmas()
401 struct anon_vma *anon_vma = avc->anon_vma; in unlink_anon_vmas() local
403 root = lock_anon_vma_root(root, anon_vma); in unlink_anon_vmas()
404 anon_vma_interval_tree_remove(avc, &anon_vma->rb_root); in unlink_anon_vmas()
410 if (RB_EMPTY_ROOT(&anon_vma->rb_root.rb_root)) { in unlink_anon_vmas()
411 anon_vma->parent->degree--; in unlink_anon_vmas()
418 if (vma->anon_vma) in unlink_anon_vmas()
419 vma->anon_vma->degree--; in unlink_anon_vmas()
428 struct anon_vma *anon_vma = avc->anon_vma; in unlink_anon_vmas() local
430 VM_WARN_ON(anon_vma->degree); in unlink_anon_vmas()
431 put_anon_vma(anon_vma); in unlink_anon_vmas()
440 struct anon_vma *anon_vma = data; in anon_vma_ctor() local
442 init_rwsem(&anon_vma->rwsem); in anon_vma_ctor()
443 atomic_set(&anon_vma->refcount, 0); in anon_vma_ctor()
444 anon_vma->rb_root = RB_ROOT_CACHED; in anon_vma_ctor()
449 anon_vma_cachep = kmem_cache_create("anon_vma", sizeof(struct anon_vma), in anon_vma_init()
480 struct anon_vma *page_get_anon_vma(struct page *page) in page_get_anon_vma()
482 struct anon_vma *anon_vma = NULL; in page_get_anon_vma() local
492 anon_vma = (struct anon_vma *) (anon_mapping - PAGE_MAPPING_ANON); in page_get_anon_vma()
493 if (!atomic_inc_not_zero(&anon_vma->refcount)) { in page_get_anon_vma()
494 anon_vma = NULL; in page_get_anon_vma()
507 put_anon_vma(anon_vma); in page_get_anon_vma()
513 return anon_vma; in page_get_anon_vma()
524 struct anon_vma *page_lock_anon_vma_read(struct page *page, in page_lock_anon_vma_read()
527 struct anon_vma *anon_vma = NULL; in page_lock_anon_vma_read() local
528 struct anon_vma *root_anon_vma; in page_lock_anon_vma_read()
539 anon_vma = (struct anon_vma *) (anon_mapping - PAGE_MAPPING_ANON); in page_lock_anon_vma_read()
540 root_anon_vma = READ_ONCE(anon_vma->root); in page_lock_anon_vma_read()
549 anon_vma = NULL; in page_lock_anon_vma_read()
555 anon_vma = NULL; in page_lock_anon_vma_read()
560 anon_vma = NULL; in page_lock_anon_vma_read()
566 if (!atomic_inc_not_zero(&anon_vma->refcount)) { in page_lock_anon_vma_read()
567 anon_vma = NULL; in page_lock_anon_vma_read()
573 put_anon_vma(anon_vma); in page_lock_anon_vma_read()
579 anon_vma_lock_read(anon_vma); in page_lock_anon_vma_read()
581 if (atomic_dec_and_test(&anon_vma->refcount)) { in page_lock_anon_vma_read()
587 anon_vma_unlock_read(anon_vma); in page_lock_anon_vma_read()
588 __put_anon_vma(anon_vma); in page_lock_anon_vma_read()
589 anon_vma = NULL; in page_lock_anon_vma_read()
592 return anon_vma; in page_lock_anon_vma_read()
596 return anon_vma; in page_lock_anon_vma_read()
599 void page_unlock_anon_vma_read(struct anon_vma *anon_vma) in page_unlock_anon_vma_read() argument
601 anon_vma_unlock_read(anon_vma); in page_unlock_anon_vma_read()
720 struct anon_vma *page__anon_vma = page_anon_vma(page); in page_address_in_vma()
725 if (!vma->anon_vma || !page__anon_vma || in page_address_in_vma()
726 vma->anon_vma->root != page__anon_vma->root) in page_address_in_vma()
1034 struct anon_vma *anon_vma = vma->anon_vma; in page_move_anon_rmap() local
1039 VM_BUG_ON_VMA(!anon_vma, vma); in page_move_anon_rmap()
1041 anon_vma = (void *) anon_vma + PAGE_MAPPING_ANON; in page_move_anon_rmap()
1047 WRITE_ONCE(page->mapping, (struct address_space *) anon_vma); in page_move_anon_rmap()
1060 struct anon_vma *anon_vma = vma->anon_vma; in __page_set_anon_rmap() local
1062 BUG_ON(!anon_vma); in __page_set_anon_rmap()
1073 anon_vma = anon_vma->root; in __page_set_anon_rmap()
1075 anon_vma = (void *) anon_vma + PAGE_MAPPING_ANON; in __page_set_anon_rmap()
1076 page->mapping = (struct address_space *) anon_vma; in __page_set_anon_rmap()
1101 VM_BUG_ON_PAGE(page_anon_vma(page)->root != vma->anon_vma->root, page); in __page_check_anon_rmap()
1898 void __put_anon_vma(struct anon_vma *anon_vma) in __put_anon_vma() argument
1900 struct anon_vma *root = anon_vma->root; in __put_anon_vma()
1902 anon_vma_free(anon_vma); in __put_anon_vma()
1903 if (root != anon_vma && atomic_dec_and_test(&root->refcount)) in __put_anon_vma()
1907 static struct anon_vma *rmap_walk_anon_lock(struct page *page, in rmap_walk_anon_lock()
1910 struct anon_vma *anon_vma; in rmap_walk_anon_lock() local
1921 anon_vma = page_anon_vma(page); in rmap_walk_anon_lock()
1922 if (!anon_vma) in rmap_walk_anon_lock()
1925 if (anon_vma_trylock_read(anon_vma)) in rmap_walk_anon_lock()
1929 anon_vma = NULL; in rmap_walk_anon_lock()
1934 anon_vma_lock_read(anon_vma); in rmap_walk_anon_lock()
1936 return anon_vma; in rmap_walk_anon_lock()
1956 struct anon_vma *anon_vma; in rmap_walk_anon() local
1961 anon_vma = page_anon_vma(page); in rmap_walk_anon()
1963 VM_BUG_ON_PAGE(!anon_vma, page); in rmap_walk_anon()
1965 anon_vma = rmap_walk_anon_lock(page, rwc); in rmap_walk_anon()
1967 if (!anon_vma) in rmap_walk_anon()
1972 anon_vma_interval_tree_foreach(avc, &anon_vma->rb_root, in rmap_walk_anon()
1990 anon_vma_unlock_read(anon_vma); in rmap_walk_anon()
2097 struct anon_vma *anon_vma = vma->anon_vma; in hugepage_add_anon_rmap() local
2101 BUG_ON(!anon_vma); in hugepage_add_anon_rmap()