Lines Matching refs:anon_vma
89 static inline struct anon_vma *anon_vma_alloc(void) in anon_vma_alloc()
91 struct anon_vma *anon_vma; in anon_vma_alloc() local
93 anon_vma = kmem_cache_alloc(anon_vma_cachep, GFP_KERNEL); in anon_vma_alloc()
94 if (anon_vma) { in anon_vma_alloc()
95 atomic_set(&anon_vma->refcount, 1); in anon_vma_alloc()
96 anon_vma->num_children = 0; in anon_vma_alloc()
97 anon_vma->num_active_vmas = 0; in anon_vma_alloc()
98 anon_vma->parent = anon_vma; in anon_vma_alloc()
103 anon_vma->root = anon_vma; in anon_vma_alloc()
106 return anon_vma; in anon_vma_alloc()
109 static inline void anon_vma_free(struct anon_vma *anon_vma) in anon_vma_free() argument
111 VM_BUG_ON(atomic_read(&anon_vma->refcount)); in anon_vma_free()
131 if (rwsem_is_locked(&anon_vma->root->rwsem)) { in anon_vma_free()
132 anon_vma_lock_write(anon_vma); in anon_vma_free()
133 anon_vma_unlock_write(anon_vma); in anon_vma_free()
136 kmem_cache_free(anon_vma_cachep, anon_vma); in anon_vma_free()
151 struct anon_vma *anon_vma) in anon_vma_chain_link() argument
154 avc->anon_vma = anon_vma; in anon_vma_chain_link()
156 anon_vma_interval_tree_insert(avc, &anon_vma->rb_root); in anon_vma_chain_link()
190 struct anon_vma *anon_vma, *allocated; in __anon_vma_prepare() local
199 anon_vma = find_mergeable_anon_vma(vma); in __anon_vma_prepare()
201 if (!anon_vma) { in __anon_vma_prepare()
202 anon_vma = anon_vma_alloc(); in __anon_vma_prepare()
203 if (unlikely(!anon_vma)) in __anon_vma_prepare()
205 anon_vma->num_children++; /* self-parent link for new root */ in __anon_vma_prepare()
206 allocated = anon_vma; in __anon_vma_prepare()
209 anon_vma_lock_write(anon_vma); in __anon_vma_prepare()
212 if (likely(!vma->anon_vma)) { in __anon_vma_prepare()
213 vma->anon_vma = anon_vma; in __anon_vma_prepare()
214 anon_vma_chain_link(vma, avc, anon_vma); in __anon_vma_prepare()
215 anon_vma->num_active_vmas++; in __anon_vma_prepare()
220 anon_vma_unlock_write(anon_vma); in __anon_vma_prepare()
243 static inline struct anon_vma *lock_anon_vma_root(struct anon_vma *root, struct anon_vma *anon_vma) in lock_anon_vma_root() argument
245 struct anon_vma *new_root = anon_vma->root; in lock_anon_vma_root()
255 static inline void unlock_anon_vma_root(struct anon_vma *root) in unlock_anon_vma_root()
282 struct anon_vma *root = NULL; in anon_vma_clone()
285 struct anon_vma *anon_vma; in anon_vma_clone() local
295 anon_vma = pavc->anon_vma; in anon_vma_clone()
296 root = lock_anon_vma_root(root, anon_vma); in anon_vma_clone()
297 anon_vma_chain_link(dst, avc, anon_vma); in anon_vma_clone()
306 if (!dst->anon_vma && src->anon_vma && in anon_vma_clone()
307 anon_vma->num_children < 2 && in anon_vma_clone()
308 anon_vma->num_active_vmas == 0) in anon_vma_clone()
309 dst->anon_vma = anon_vma; in anon_vma_clone()
311 if (dst->anon_vma) in anon_vma_clone()
312 dst->anon_vma->num_active_vmas++; in anon_vma_clone()
323 dst->anon_vma = NULL; in anon_vma_clone()
336 struct anon_vma *anon_vma; in anon_vma_fork() local
340 if (!pvma->anon_vma) in anon_vma_fork()
344 vma->anon_vma = NULL; in anon_vma_fork()
355 if (vma->anon_vma) in anon_vma_fork()
359 anon_vma = anon_vma_alloc(); in anon_vma_fork()
360 if (!anon_vma) in anon_vma_fork()
362 anon_vma->num_active_vmas++; in anon_vma_fork()
371 anon_vma->root = pvma->anon_vma->root; in anon_vma_fork()
372 anon_vma->parent = pvma->anon_vma; in anon_vma_fork()
378 get_anon_vma(anon_vma->root); in anon_vma_fork()
380 vma->anon_vma = anon_vma; in anon_vma_fork()
381 anon_vma_lock_write(anon_vma); in anon_vma_fork()
382 anon_vma_chain_link(vma, avc, anon_vma); in anon_vma_fork()
383 anon_vma->parent->num_children++; in anon_vma_fork()
384 anon_vma_unlock_write(anon_vma); in anon_vma_fork()
389 put_anon_vma(anon_vma); in anon_vma_fork()
398 struct anon_vma *root = NULL; in unlink_anon_vmas()
405 struct anon_vma *anon_vma = avc->anon_vma; in unlink_anon_vmas() local
407 root = lock_anon_vma_root(root, anon_vma); in unlink_anon_vmas()
408 anon_vma_interval_tree_remove(avc, &anon_vma->rb_root); in unlink_anon_vmas()
414 if (RB_EMPTY_ROOT(&anon_vma->rb_root.rb_root)) { in unlink_anon_vmas()
415 anon_vma->parent->num_children--; in unlink_anon_vmas()
422 if (vma->anon_vma) { in unlink_anon_vmas()
423 vma->anon_vma->num_active_vmas--; in unlink_anon_vmas()
429 vma->anon_vma = NULL; in unlink_anon_vmas()
439 struct anon_vma *anon_vma = avc->anon_vma; in unlink_anon_vmas() local
441 VM_WARN_ON(anon_vma->num_children); in unlink_anon_vmas()
442 VM_WARN_ON(anon_vma->num_active_vmas); in unlink_anon_vmas()
443 put_anon_vma(anon_vma); in unlink_anon_vmas()
452 struct anon_vma *anon_vma = data; in anon_vma_ctor() local
454 init_rwsem(&anon_vma->rwsem); in anon_vma_ctor()
455 atomic_set(&anon_vma->refcount, 0); in anon_vma_ctor()
456 anon_vma->rb_root = RB_ROOT_CACHED; in anon_vma_ctor()
461 anon_vma_cachep = kmem_cache_create("anon_vma", sizeof(struct anon_vma), in anon_vma_init()
492 struct anon_vma *folio_get_anon_vma(struct folio *folio) in folio_get_anon_vma()
494 struct anon_vma *anon_vma = NULL; in folio_get_anon_vma() local
504 anon_vma = (struct anon_vma *) (anon_mapping - PAGE_MAPPING_ANON); in folio_get_anon_vma()
505 if (!atomic_inc_not_zero(&anon_vma->refcount)) { in folio_get_anon_vma()
506 anon_vma = NULL; in folio_get_anon_vma()
519 put_anon_vma(anon_vma); in folio_get_anon_vma()
525 return anon_vma; in folio_get_anon_vma()
536 struct anon_vma *folio_lock_anon_vma_read(struct folio *folio, in folio_lock_anon_vma_read()
539 struct anon_vma *anon_vma = NULL; in folio_lock_anon_vma_read() local
540 struct anon_vma *root_anon_vma; in folio_lock_anon_vma_read()
550 anon_vma = (struct anon_vma *) (anon_mapping - PAGE_MAPPING_ANON); in folio_lock_anon_vma_read()
551 root_anon_vma = READ_ONCE(anon_vma->root); in folio_lock_anon_vma_read()
560 anon_vma = NULL; in folio_lock_anon_vma_read()
566 anon_vma = NULL; in folio_lock_anon_vma_read()
572 if (!atomic_inc_not_zero(&anon_vma->refcount)) { in folio_lock_anon_vma_read()
573 anon_vma = NULL; in folio_lock_anon_vma_read()
579 put_anon_vma(anon_vma); in folio_lock_anon_vma_read()
585 anon_vma_lock_read(anon_vma); in folio_lock_anon_vma_read()
587 if (atomic_dec_and_test(&anon_vma->refcount)) { in folio_lock_anon_vma_read()
593 anon_vma_unlock_read(anon_vma); in folio_lock_anon_vma_read()
594 __put_anon_vma(anon_vma); in folio_lock_anon_vma_read()
595 anon_vma = NULL; in folio_lock_anon_vma_read()
598 return anon_vma; in folio_lock_anon_vma_read()
602 return anon_vma; in folio_lock_anon_vma_read()
751 struct anon_vma *page__anon_vma = folio_anon_vma(folio); in page_address_in_vma()
756 if (!vma->anon_vma || !page__anon_vma || in page_address_in_vma()
757 vma->anon_vma->root != page__anon_vma->root) in page_address_in_vma()
1100 void *anon_vma = vma->anon_vma; in page_move_anon_rmap() local
1104 VM_BUG_ON_VMA(!anon_vma, vma); in page_move_anon_rmap()
1106 anon_vma += PAGE_MAPPING_ANON; in page_move_anon_rmap()
1112 WRITE_ONCE(folio->mapping, anon_vma); in page_move_anon_rmap()
1126 struct anon_vma *anon_vma = vma->anon_vma; in __page_set_anon_rmap() local
1128 BUG_ON(!anon_vma); in __page_set_anon_rmap()
1139 anon_vma = anon_vma->root; in __page_set_anon_rmap()
1147 anon_vma = (void *) anon_vma + PAGE_MAPPING_ANON; in __page_set_anon_rmap()
1148 WRITE_ONCE(page->mapping, (struct address_space *) anon_vma); in __page_set_anon_rmap()
1176 VM_BUG_ON_FOLIO(folio_anon_vma(folio)->root != vma->anon_vma->root, in __page_check_anon_rmap()
2371 void __put_anon_vma(struct anon_vma *anon_vma) in __put_anon_vma() argument
2373 struct anon_vma *root = anon_vma->root; in __put_anon_vma()
2375 anon_vma_free(anon_vma); in __put_anon_vma()
2376 if (root != anon_vma && atomic_dec_and_test(&root->refcount)) in __put_anon_vma()
2380 static struct anon_vma *rmap_walk_anon_lock(struct folio *folio, in rmap_walk_anon_lock()
2383 struct anon_vma *anon_vma; in rmap_walk_anon_lock() local
2394 anon_vma = folio_anon_vma(folio); in rmap_walk_anon_lock()
2395 if (!anon_vma) in rmap_walk_anon_lock()
2398 if (anon_vma_trylock_read(anon_vma)) in rmap_walk_anon_lock()
2402 anon_vma = NULL; in rmap_walk_anon_lock()
2407 anon_vma_lock_read(anon_vma); in rmap_walk_anon_lock()
2409 return anon_vma; in rmap_walk_anon_lock()
2424 struct anon_vma *anon_vma; in rmap_walk_anon() local
2429 anon_vma = folio_anon_vma(folio); in rmap_walk_anon()
2431 VM_BUG_ON_FOLIO(!anon_vma, folio); in rmap_walk_anon()
2433 anon_vma = rmap_walk_anon_lock(folio, rwc); in rmap_walk_anon()
2435 if (!anon_vma) in rmap_walk_anon()
2440 anon_vma_interval_tree_foreach(avc, &anon_vma->rb_root, in rmap_walk_anon()
2458 anon_vma_unlock_read(anon_vma); in rmap_walk_anon()
2554 struct anon_vma *anon_vma = vma->anon_vma; in hugepage_add_anon_rmap() local
2558 BUG_ON(!anon_vma); in hugepage_add_anon_rmap()