/linux-6.6.21/tools/perf/util/ |
D | rblist.c | 17 bool leftmost = true; in rblist__add_node() local 29 leftmost = false; in rblist__add_node() 40 rb_insert_color_cached(new_node, &rblist->entries, leftmost); in rblist__add_node() 59 bool leftmost = true; in __rblist__findnew() local 71 leftmost = false; in __rblist__findnew() 82 &rblist->entries, leftmost); in __rblist__findnew()
|
D | hist.c | 601 bool leftmost = true; in hists__findnew_entry() local 651 leftmost = false; in hists__findnew_entry() 664 rb_insert_color_cached(&he->rb_node_in, hists->entries_in, leftmost); in hists__findnew_entry() 1485 bool leftmost = true; in hierarchy_insert_entry() local 1507 leftmost = false; in hierarchy_insert_entry() 1542 rb_insert_color_cached(&new->rb_node_in, root, leftmost); in hierarchy_insert_entry() 1606 bool leftmost = true; in hists__collapse_insert_entry() local 1643 leftmost = false; in hists__collapse_insert_entry() 1649 rb_insert_color_cached(&he->rb_node_in, root, leftmost); in hists__collapse_insert_entry() 1796 bool leftmost = true; in hierarchy_insert_output_entry() local [all …]
|
D | srcline.c | 920 bool leftmost = true; in srcline__tree_insert() local 938 leftmost = false; in srcline__tree_insert() 942 rb_insert_color_cached(&node->rb_node, tree, leftmost); in srcline__tree_insert() 1013 bool leftmost = true; in inlines__tree_insert() local 1022 leftmost = false; in inlines__tree_insert() 1026 rb_insert_color_cached(&inlines->rb_node, tree, leftmost); in inlines__tree_insert()
|
/linux-6.6.21/include/linux/ |
D | rbtree.h | 110 bool leftmost) in rb_insert_color_cached() argument 112 if (leftmost) in rb_insert_color_cached() 121 struct rb_node *leftmost = NULL; in rb_erase_cached() local 124 leftmost = root->rb_leftmost = rb_next(node); in rb_erase_cached() 128 return leftmost; in rb_erase_cached() 170 bool leftmost = true; in rb_add_cached() local 178 leftmost = false; in rb_add_cached() 183 rb_insert_color_cached(node, tree, leftmost); in rb_add_cached() 185 return leftmost ? node : NULL; in rb_add_cached()
|
D | interval_tree_generic.h | 44 bool leftmost = true; \ 55 leftmost = false; \ 62 leftmost, &ITPREFIX ## _augment); \ 122 ITSTRUCT *node, *leftmost; \ 144 leftmost = rb_entry(root->rb_leftmost, ITSTRUCT, ITRB); \ 145 if (ITSTART(leftmost) > last) \
|
D | timerqueue.h | 36 struct rb_node *leftmost = rb_first_cached(&head->rb_root); in timerqueue_getnext() local 38 return rb_entry_safe(leftmost, struct timerqueue_node, node); in timerqueue_getnext()
|
D | rbtree_augmented.h | 70 bool leftmost = true; in rb_add_augmented_cached() local 78 leftmost = false; in rb_add_augmented_cached() 84 rb_insert_augmented_cached(node, tree, leftmost, augment); in rb_add_augmented_cached() 86 return leftmost ? node : NULL; in rb_add_augmented_cached()
|
/linux-6.6.21/tools/include/linux/ |
D | interval_tree_generic.h | 44 bool leftmost = true; \ 55 leftmost = false; \ 62 leftmost, &ITPREFIX ## _augment); \ 122 ITSTRUCT *node, *leftmost; \ 144 leftmost = rb_entry(root->rb_leftmost, ITSTRUCT, ITRB); \ 145 if (ITSTART(leftmost) > last) \
|
D | rbtree.h | 131 bool leftmost) in rb_insert_color_cached() argument 133 if (leftmost) in rb_insert_color_cached() 183 bool leftmost = true; in rb_add_cached() local 191 leftmost = false; in rb_add_cached() 196 rb_insert_color_cached(node, tree, leftmost); in rb_add_cached()
|
/linux-6.6.21/kernel/locking/ |
D | rtmutex_common.h | 123 struct rb_node *leftmost = rb_first_cached(&lock->waiters); in rt_mutex_waiter_is_top_waiter() local 125 return rb_entry(leftmost, struct rt_mutex_waiter, tree.entry) == waiter; in rt_mutex_waiter_is_top_waiter() 130 struct rb_node *leftmost = rb_first_cached(&lock->waiters); in rt_mutex_top_waiter() local 135 if (leftmost) { in rt_mutex_top_waiter() 136 w = rb_entry(leftmost, struct rt_mutex_waiter, tree.entry); in rt_mutex_top_waiter()
|
/linux-6.6.21/fs/f2fs/ |
D | extent_cache.c | 201 bool *leftmost) in __lookup_extent_node_ret() argument 218 *leftmost = true; in __lookup_extent_node_ret() 228 *leftmost = false; in __lookup_extent_node_ret() 271 bool leftmost) in __attach_extent_node() argument 285 rb_insert_color_cached(&en->rb_node, &et->root, leftmost); in __attach_extent_node() 548 bool leftmost) in __insert_extent_tree() argument 561 leftmost = true; in __insert_extent_tree() 572 leftmost = false; in __insert_extent_tree() 579 en = __attach_extent_node(sbi, et, ei, parent, p, leftmost); in __insert_extent_tree() 605 bool leftmost = false; in __update_extent_tree_range() local [all …]
|
/linux-6.6.21/lib/ |
D | rbtree_test.c | 53 bool leftmost = true; in insert_cached() local 61 leftmost = false; in insert_cached() 66 rb_insert_color_cached(&node->rb, root, leftmost); in insert_cached() 116 bool leftmost = true; in insert_augmented_cached() local 127 leftmost = false; in insert_augmented_cached() 134 leftmost, &augment_callbacks); in insert_augmented_cached()
|
/linux-6.6.21/fs/btrfs/ |
D | delayed-ref.c | 296 bool leftmost = true; in htree_insert() local 309 leftmost = false; in htree_insert() 316 rb_insert_color_cached(node, root, leftmost); in htree_insert() 327 bool leftmost = true; in tree_insert() local 340 leftmost = false; in tree_insert() 347 rb_insert_color_cached(node, root, leftmost); in tree_insert()
|
D | extent_map.c | 91 bool leftmost = true; in tree_insert() local 101 leftmost = false; in tree_insert() 127 rb_insert_color_cached(&em->rb_node, root, leftmost); in tree_insert()
|
/linux-6.6.21/Documentation/scheduler/ |
D | sched-design-CFS.rst | 72 p->se.vruntime key. CFS picks the "leftmost" task from this tree and sticks to it. 75 to become the "leftmost task" and thus get on the CPU within a deterministic 82 becomes the "leftmost task" of the time-ordered rbtree it maintains (plus a 83 small amount of "granularity" distance relative to the leftmost task so that we 84 do not over-schedule tasks and trash the cache), then the new leftmost task is
|
/linux-6.6.21/drivers/gpu/drm/ |
D | drm_mm.c | 170 bool leftmost; in drm_mm_interval_tree_add_node() local 187 leftmost = false; in drm_mm_interval_tree_add_node() 191 leftmost = true; in drm_mm_interval_tree_add_node() 203 leftmost = false; in drm_mm_interval_tree_add_node() 208 rb_insert_augmented_cached(&node->rb, &mm->interval_tree, leftmost, in drm_mm_interval_tree_add_node()
|
/linux-6.6.21/Documentation/userspace-api/media/v4l/ |
D | pixfmt-intro.rst | 29 leftmost pixel of the topmost row. Following that is the pixel 34 leftmost pixel of the second row from the top, and so on. The last row
|
D | pixfmt-v4l2-mplane.rst | 43 - Distance in bytes between the leftmost pixels in two adjacent
|
/linux-6.6.21/net/sched/ |
D | sch_etf.c | 168 bool leftmost = true; in etf_enqueue_timesortedlist() local 183 leftmost = false; in etf_enqueue_timesortedlist() 189 rb_insert_color_cached(&nskb->rbnode, &q->head, leftmost); in etf_enqueue_timesortedlist()
|
/linux-6.6.21/kernel/sched/ |
D | deadline.c | 603 struct rb_node *leftmost; in enqueue_pushable_dl_task() local 607 leftmost = rb_add_cached(&p->pushable_dl_tasks, in enqueue_pushable_dl_task() 610 if (leftmost) in enqueue_pushable_dl_task() 618 struct rb_node *leftmost; in dequeue_pushable_dl_task() local 623 leftmost = rb_erase_cached(&p->pushable_dl_tasks, root); in dequeue_pushable_dl_task() 624 if (leftmost) in dequeue_pushable_dl_task() 625 dl_rq->earliest_dl.next = __node_2_pdl(leftmost)->dl.deadline; in dequeue_pushable_dl_task() 1481 struct rb_node *leftmost = rb_first_cached(&dl_rq->root); in dec_dl_deadline() local 1482 struct sched_dl_entity *entry = __node_2_dle(leftmost); in dec_dl_deadline()
|
/linux-6.6.21/Documentation/translations/zh_CN/core-api/ |
D | rbtree.rst | 281 * Iterate to find the leftmost such node N. 293 return node; /* node is leftmost match */
|
/linux-6.6.21/Documentation/core-api/ |
D | rbtree.rst | 199 Computing the leftmost (smallest) node is quite a common task for binary 212 leftmost node. This allows rb_root_cached to exist wherever rb_root does, 319 * Iterate to find the leftmost such node N. 331 return node; /* node is leftmost match */
|
/linux-6.6.21/Documentation/bpf/ |
D | map_lpm_trie.rst | 105 from leftmost leaf first. This means that iteration will return more
|
/linux-6.6.21/fs/ |
D | eventpoll.c | 1304 bool leftmost = true; in ep_rbtree_insert() local 1312 leftmost = false; in ep_rbtree_insert() 1317 rb_insert_color_cached(&epi->rbn, &ep->rbr, leftmost); in ep_rbtree_insert()
|
/linux-6.6.21/tools/perf/ |
D | builtin-sched.c | 1016 bool leftmost = true; in __thread_latency_insert() local 1031 leftmost = false; in __thread_latency_insert() 1036 rb_insert_color_cached(&data->node, root, leftmost); in __thread_latency_insert() 3156 bool leftmost = true; in __merge_work_atoms() local 3170 leftmost = false; in __merge_work_atoms() 3189 rb_insert_color_cached(&data->node, root, leftmost); in __merge_work_atoms()
|