Searched refs:tree_entry (Results 1 – 4 of 4) sorted by relevance
/linux-5.19.10/kernel/locking/ |
D | rtmutex_common.h | 34 struct rb_node tree_entry; member 108 return rb_entry(leftmost, struct rt_mutex_waiter, tree_entry) == waiter; in rt_mutex_waiter_is_top_waiter() 117 w = rb_entry(leftmost, struct rt_mutex_waiter, tree_entry); in rt_mutex_top_waiter() 194 RB_CLEAR_NODE(&waiter->tree_entry); in rt_mutex_init_waiter()
|
D | ww_mutex.h | 99 return rb_entry(n, struct rt_mutex_waiter, tree_entry); in __ww_waiter_first() 105 struct rb_node *n = rb_next(&w->tree_entry); in __ww_waiter_next() 108 return rb_entry(n, struct rt_mutex_waiter, tree_entry); in __ww_waiter_next() 114 struct rb_node *n = rb_prev(&w->tree_entry); in __ww_waiter_prev() 117 return rb_entry(n, struct rt_mutex_waiter, tree_entry); in __ww_waiter_prev() 126 return rb_entry(n, struct rt_mutex_waiter, tree_entry); in __ww_waiter_last()
|
D | rtmutex.c | 369 rb_entry((node), struct rt_mutex_waiter, tree_entry) 400 rb_add_cached(&waiter->tree_entry, &lock->waiters, __waiter_less); in rt_mutex_enqueue() 406 if (RB_EMPTY_NODE(&waiter->tree_entry)) in rt_mutex_dequeue() 409 rb_erase_cached(&waiter->tree_entry, &lock->waiters); in rt_mutex_dequeue() 410 RB_CLEAR_NODE(&waiter->tree_entry); in rt_mutex_dequeue()
|
/linux-5.19.10/fs/btrfs/ |
D | extent_io.c | 135 struct tree_entry { struct 388 struct tree_entry *entry; in tree_insert() 399 entry = rb_entry(parent, struct tree_entry, rb_node); in tree_insert() 443 struct tree_entry *entry; in __etree_search() 444 struct tree_entry *prev_entry = NULL; in __etree_search() 448 entry = rb_entry(prev, struct tree_entry, rb_node); in __etree_search() 468 prev_entry = rb_entry(prev, struct tree_entry, rb_node); in __etree_search() 475 prev_entry = rb_entry(prev, struct tree_entry, rb_node); in __etree_search() 478 prev_entry = rb_entry(prev, struct tree_entry, rb_node); in __etree_search()
|