1 /*
2 * Copyright (C) 2001 Momchil Velikov
3 * Portions Copyright (C) 2001 Christoph Hellwig
4 * Copyright (C) 2005 SGI, Christoph Lameter
5 * Copyright (C) 2006 Nick Piggin
6 * Copyright (C) 2012 Konstantin Khlebnikov
7 *
8 * This program is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU General Public License as
10 * published by the Free Software Foundation; either version 2, or (at
11 * your option) any later version.
12 *
13 * This program is distributed in the hope that it will be useful, but
14 * WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * General Public License for more details.
17 *
18 * You should have received a copy of the GNU General Public License
19 * along with this program; if not, write to the Free Software
20 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
21 */
22
23 #include <linux/errno.h>
24 #include <linux/init.h>
25 #include <linux/kernel.h>
26 #include <linux/export.h>
27 #include <linux/radix-tree.h>
28 #include <linux/percpu.h>
29 #include <linux/slab.h>
30 #include <linux/notifier.h>
31 #include <linux/cpu.h>
32 #include <linux/string.h>
33 #include <linux/bitops.h>
34 #include <linux/rcupdate.h>
35
36
37 #ifdef __KERNEL__
38 #define RADIX_TREE_MAP_SHIFT (CONFIG_BASE_SMALL ? 4 : 6)
39 #else
40 #define RADIX_TREE_MAP_SHIFT 3 /* For more stressful testing */
41 #endif
42
43 #define RADIX_TREE_MAP_SIZE (1UL << RADIX_TREE_MAP_SHIFT)
44 #define RADIX_TREE_MAP_MASK (RADIX_TREE_MAP_SIZE-1)
45
46 #define RADIX_TREE_TAG_LONGS \
47 ((RADIX_TREE_MAP_SIZE + BITS_PER_LONG - 1) / BITS_PER_LONG)
48
49 struct radix_tree_node {
50 unsigned int height; /* Height from the bottom */
51 unsigned int count;
52 union {
53 struct radix_tree_node *parent; /* Used when ascending tree */
54 struct rcu_head rcu_head; /* Used when freeing node */
55 };
56 void __rcu *slots[RADIX_TREE_MAP_SIZE];
57 unsigned long tags[RADIX_TREE_MAX_TAGS][RADIX_TREE_TAG_LONGS];
58 };
59
60 #define RADIX_TREE_INDEX_BITS (8 /* CHAR_BIT */ * sizeof(unsigned long))
61 #define RADIX_TREE_MAX_PATH (DIV_ROUND_UP(RADIX_TREE_INDEX_BITS, \
62 RADIX_TREE_MAP_SHIFT))
63
64 /*
65 * The height_to_maxindex array needs to be one deeper than the maximum
66 * path as height 0 holds only 1 entry.
67 */
68 static unsigned long height_to_maxindex[RADIX_TREE_MAX_PATH + 1] __read_mostly;
69
70 /*
71 * Radix tree node cache.
72 */
73 static struct kmem_cache *radix_tree_node_cachep;
74
75 /*
76 * Per-cpu pool of preloaded nodes
77 */
78 struct radix_tree_preload {
79 int nr;
80 struct radix_tree_node *nodes[RADIX_TREE_MAX_PATH];
81 };
82 static DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = { 0, };
83
ptr_to_indirect(void * ptr)84 static inline void *ptr_to_indirect(void *ptr)
85 {
86 return (void *)((unsigned long)ptr | RADIX_TREE_INDIRECT_PTR);
87 }
88
indirect_to_ptr(void * ptr)89 static inline void *indirect_to_ptr(void *ptr)
90 {
91 return (void *)((unsigned long)ptr & ~RADIX_TREE_INDIRECT_PTR);
92 }
93
root_gfp_mask(struct radix_tree_root * root)94 static inline gfp_t root_gfp_mask(struct radix_tree_root *root)
95 {
96 return root->gfp_mask & __GFP_BITS_MASK;
97 }
98
tag_set(struct radix_tree_node * node,unsigned int tag,int offset)99 static inline void tag_set(struct radix_tree_node *node, unsigned int tag,
100 int offset)
101 {
102 __set_bit(offset, node->tags[tag]);
103 }
104
tag_clear(struct radix_tree_node * node,unsigned int tag,int offset)105 static inline void tag_clear(struct radix_tree_node *node, unsigned int tag,
106 int offset)
107 {
108 __clear_bit(offset, node->tags[tag]);
109 }
110
tag_get(struct radix_tree_node * node,unsigned int tag,int offset)111 static inline int tag_get(struct radix_tree_node *node, unsigned int tag,
112 int offset)
113 {
114 return test_bit(offset, node->tags[tag]);
115 }
116
root_tag_set(struct radix_tree_root * root,unsigned int tag)117 static inline void root_tag_set(struct radix_tree_root *root, unsigned int tag)
118 {
119 root->gfp_mask |= (__force gfp_t)(1 << (tag + __GFP_BITS_SHIFT));
120 }
121
root_tag_clear(struct radix_tree_root * root,unsigned int tag)122 static inline void root_tag_clear(struct radix_tree_root *root, unsigned int tag)
123 {
124 root->gfp_mask &= (__force gfp_t)~(1 << (tag + __GFP_BITS_SHIFT));
125 }
126
root_tag_clear_all(struct radix_tree_root * root)127 static inline void root_tag_clear_all(struct radix_tree_root *root)
128 {
129 root->gfp_mask &= __GFP_BITS_MASK;
130 }
131
root_tag_get(struct radix_tree_root * root,unsigned int tag)132 static inline int root_tag_get(struct radix_tree_root *root, unsigned int tag)
133 {
134 return (__force unsigned)root->gfp_mask & (1 << (tag + __GFP_BITS_SHIFT));
135 }
136
137 /*
138 * Returns 1 if any slot in the node has this tag set.
139 * Otherwise returns 0.
140 */
any_tag_set(struct radix_tree_node * node,unsigned int tag)141 static inline int any_tag_set(struct radix_tree_node *node, unsigned int tag)
142 {
143 int idx;
144 for (idx = 0; idx < RADIX_TREE_TAG_LONGS; idx++) {
145 if (node->tags[tag][idx])
146 return 1;
147 }
148 return 0;
149 }
150
151 /**
152 * radix_tree_find_next_bit - find the next set bit in a memory region
153 *
154 * @addr: The address to base the search on
155 * @size: The bitmap size in bits
156 * @offset: The bitnumber to start searching at
157 *
158 * Unrollable variant of find_next_bit() for constant size arrays.
159 * Tail bits starting from size to roundup(size, BITS_PER_LONG) must be zero.
160 * Returns next bit offset, or size if nothing found.
161 */
162 static __always_inline unsigned long
radix_tree_find_next_bit(const unsigned long * addr,unsigned long size,unsigned long offset)163 radix_tree_find_next_bit(const unsigned long *addr,
164 unsigned long size, unsigned long offset)
165 {
166 if (!__builtin_constant_p(size))
167 return find_next_bit(addr, size, offset);
168
169 if (offset < size) {
170 unsigned long tmp;
171
172 addr += offset / BITS_PER_LONG;
173 tmp = *addr >> (offset % BITS_PER_LONG);
174 if (tmp)
175 return __ffs(tmp) + offset;
176 offset = (offset + BITS_PER_LONG) & ~(BITS_PER_LONG - 1);
177 while (offset < size) {
178 tmp = *++addr;
179 if (tmp)
180 return __ffs(tmp) + offset;
181 offset += BITS_PER_LONG;
182 }
183 }
184 return size;
185 }
186
187 /*
188 * This assumes that the caller has performed appropriate preallocation, and
189 * that the caller has pinned this thread of control to the current CPU.
190 */
191 static struct radix_tree_node *
radix_tree_node_alloc(struct radix_tree_root * root)192 radix_tree_node_alloc(struct radix_tree_root *root)
193 {
194 struct radix_tree_node *ret = NULL;
195 gfp_t gfp_mask = root_gfp_mask(root);
196
197 if (!(gfp_mask & __GFP_WAIT)) {
198 struct radix_tree_preload *rtp;
199
200 /*
201 * Provided the caller has preloaded here, we will always
202 * succeed in getting a node here (and never reach
203 * kmem_cache_alloc)
204 */
205 rtp = &__get_cpu_var(radix_tree_preloads);
206 if (rtp->nr) {
207 ret = rtp->nodes[rtp->nr - 1];
208 rtp->nodes[rtp->nr - 1] = NULL;
209 rtp->nr--;
210 }
211 }
212 if (ret == NULL)
213 ret = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
214
215 BUG_ON(radix_tree_is_indirect_ptr(ret));
216 return ret;
217 }
218
radix_tree_node_rcu_free(struct rcu_head * head)219 static void radix_tree_node_rcu_free(struct rcu_head *head)
220 {
221 struct radix_tree_node *node =
222 container_of(head, struct radix_tree_node, rcu_head);
223 int i;
224
225 /*
226 * must only free zeroed nodes into the slab. radix_tree_shrink
227 * can leave us with a non-NULL entry in the first slot, so clear
228 * that here to make sure.
229 */
230 for (i = 0; i < RADIX_TREE_MAX_TAGS; i++)
231 tag_clear(node, i, 0);
232
233 node->slots[0] = NULL;
234 node->count = 0;
235
236 kmem_cache_free(radix_tree_node_cachep, node);
237 }
238
239 static inline void
radix_tree_node_free(struct radix_tree_node * node)240 radix_tree_node_free(struct radix_tree_node *node)
241 {
242 call_rcu(&node->rcu_head, radix_tree_node_rcu_free);
243 }
244
245 /*
246 * Load up this CPU's radix_tree_node buffer with sufficient objects to
247 * ensure that the addition of a single element in the tree cannot fail. On
248 * success, return zero, with preemption disabled. On error, return -ENOMEM
249 * with preemption not disabled.
250 *
251 * To make use of this facility, the radix tree must be initialised without
252 * __GFP_WAIT being passed to INIT_RADIX_TREE().
253 */
radix_tree_preload(gfp_t gfp_mask)254 int radix_tree_preload(gfp_t gfp_mask)
255 {
256 struct radix_tree_preload *rtp;
257 struct radix_tree_node *node;
258 int ret = -ENOMEM;
259
260 preempt_disable();
261 rtp = &__get_cpu_var(radix_tree_preloads);
262 while (rtp->nr < ARRAY_SIZE(rtp->nodes)) {
263 preempt_enable();
264 node = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
265 if (node == NULL)
266 goto out;
267 preempt_disable();
268 rtp = &__get_cpu_var(radix_tree_preloads);
269 if (rtp->nr < ARRAY_SIZE(rtp->nodes))
270 rtp->nodes[rtp->nr++] = node;
271 else
272 kmem_cache_free(radix_tree_node_cachep, node);
273 }
274 ret = 0;
275 out:
276 return ret;
277 }
278 EXPORT_SYMBOL(radix_tree_preload);
279
280 /*
281 * Return the maximum key which can be store into a
282 * radix tree with height HEIGHT.
283 */
radix_tree_maxindex(unsigned int height)284 static inline unsigned long radix_tree_maxindex(unsigned int height)
285 {
286 return height_to_maxindex[height];
287 }
288
289 /*
290 * Extend a radix tree so it can store key @index.
291 */
radix_tree_extend(struct radix_tree_root * root,unsigned long index)292 static int radix_tree_extend(struct radix_tree_root *root, unsigned long index)
293 {
294 struct radix_tree_node *node;
295 struct radix_tree_node *slot;
296 unsigned int height;
297 int tag;
298
299 /* Figure out what the height should be. */
300 height = root->height + 1;
301 while (index > radix_tree_maxindex(height))
302 height++;
303
304 if (root->rnode == NULL) {
305 root->height = height;
306 goto out;
307 }
308
309 do {
310 unsigned int newheight;
311 if (!(node = radix_tree_node_alloc(root)))
312 return -ENOMEM;
313
314 /* Propagate the aggregated tag info into the new root */
315 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) {
316 if (root_tag_get(root, tag))
317 tag_set(node, tag, 0);
318 }
319
320 /* Increase the height. */
321 newheight = root->height+1;
322 node->height = newheight;
323 node->count = 1;
324 node->parent = NULL;
325 slot = root->rnode;
326 if (newheight > 1) {
327 slot = indirect_to_ptr(slot);
328 slot->parent = node;
329 }
330 node->slots[0] = slot;
331 node = ptr_to_indirect(node);
332 rcu_assign_pointer(root->rnode, node);
333 root->height = newheight;
334 } while (height > root->height);
335 out:
336 return 0;
337 }
338
339 /**
340 * radix_tree_insert - insert into a radix tree
341 * @root: radix tree root
342 * @index: index key
343 * @item: item to insert
344 *
345 * Insert an item into the radix tree at position @index.
346 */
radix_tree_insert(struct radix_tree_root * root,unsigned long index,void * item)347 int radix_tree_insert(struct radix_tree_root *root,
348 unsigned long index, void *item)
349 {
350 struct radix_tree_node *node = NULL, *slot;
351 unsigned int height, shift;
352 int offset;
353 int error;
354
355 BUG_ON(radix_tree_is_indirect_ptr(item));
356
357 /* Make sure the tree is high enough. */
358 if (index > radix_tree_maxindex(root->height)) {
359 error = radix_tree_extend(root, index);
360 if (error)
361 return error;
362 }
363
364 slot = indirect_to_ptr(root->rnode);
365
366 height = root->height;
367 shift = (height-1) * RADIX_TREE_MAP_SHIFT;
368
369 offset = 0; /* uninitialised var warning */
370 while (height > 0) {
371 if (slot == NULL) {
372 /* Have to add a child node. */
373 if (!(slot = radix_tree_node_alloc(root)))
374 return -ENOMEM;
375 slot->height = height;
376 slot->parent = node;
377 if (node) {
378 rcu_assign_pointer(node->slots[offset], slot);
379 node->count++;
380 } else
381 rcu_assign_pointer(root->rnode, ptr_to_indirect(slot));
382 }
383
384 /* Go a level down */
385 offset = (index >> shift) & RADIX_TREE_MAP_MASK;
386 node = slot;
387 slot = node->slots[offset];
388 shift -= RADIX_TREE_MAP_SHIFT;
389 height--;
390 }
391
392 if (slot != NULL)
393 return -EEXIST;
394
395 if (node) {
396 node->count++;
397 rcu_assign_pointer(node->slots[offset], item);
398 BUG_ON(tag_get(node, 0, offset));
399 BUG_ON(tag_get(node, 1, offset));
400 } else {
401 rcu_assign_pointer(root->rnode, item);
402 BUG_ON(root_tag_get(root, 0));
403 BUG_ON(root_tag_get(root, 1));
404 }
405
406 return 0;
407 }
408 EXPORT_SYMBOL(radix_tree_insert);
409
410 /*
411 * is_slot == 1 : search for the slot.
412 * is_slot == 0 : search for the node.
413 */
radix_tree_lookup_element(struct radix_tree_root * root,unsigned long index,int is_slot)414 static void *radix_tree_lookup_element(struct radix_tree_root *root,
415 unsigned long index, int is_slot)
416 {
417 unsigned int height, shift;
418 struct radix_tree_node *node, **slot;
419
420 node = rcu_dereference_raw(root->rnode);
421 if (node == NULL)
422 return NULL;
423
424 if (!radix_tree_is_indirect_ptr(node)) {
425 if (index > 0)
426 return NULL;
427 return is_slot ? (void *)&root->rnode : node;
428 }
429 node = indirect_to_ptr(node);
430
431 height = node->height;
432 if (index > radix_tree_maxindex(height))
433 return NULL;
434
435 shift = (height-1) * RADIX_TREE_MAP_SHIFT;
436
437 do {
438 slot = (struct radix_tree_node **)
439 (node->slots + ((index>>shift) & RADIX_TREE_MAP_MASK));
440 node = rcu_dereference_raw(*slot);
441 if (node == NULL)
442 return NULL;
443
444 shift -= RADIX_TREE_MAP_SHIFT;
445 height--;
446 } while (height > 0);
447
448 return is_slot ? (void *)slot : indirect_to_ptr(node);
449 }
450
451 /**
452 * radix_tree_lookup_slot - lookup a slot in a radix tree
453 * @root: radix tree root
454 * @index: index key
455 *
456 * Returns: the slot corresponding to the position @index in the
457 * radix tree @root. This is useful for update-if-exists operations.
458 *
459 * This function can be called under rcu_read_lock iff the slot is not
460 * modified by radix_tree_replace_slot, otherwise it must be called
461 * exclusive from other writers. Any dereference of the slot must be done
462 * using radix_tree_deref_slot.
463 */
radix_tree_lookup_slot(struct radix_tree_root * root,unsigned long index)464 void **radix_tree_lookup_slot(struct radix_tree_root *root, unsigned long index)
465 {
466 return (void **)radix_tree_lookup_element(root, index, 1);
467 }
468 EXPORT_SYMBOL(radix_tree_lookup_slot);
469
470 /**
471 * radix_tree_lookup - perform lookup operation on a radix tree
472 * @root: radix tree root
473 * @index: index key
474 *
475 * Lookup the item at the position @index in the radix tree @root.
476 *
477 * This function can be called under rcu_read_lock, however the caller
478 * must manage lifetimes of leaf nodes (eg. RCU may also be used to free
479 * them safely). No RCU barriers are required to access or modify the
480 * returned item, however.
481 */
radix_tree_lookup(struct radix_tree_root * root,unsigned long index)482 void *radix_tree_lookup(struct radix_tree_root *root, unsigned long index)
483 {
484 return radix_tree_lookup_element(root, index, 0);
485 }
486 EXPORT_SYMBOL(radix_tree_lookup);
487
488 /**
489 * radix_tree_tag_set - set a tag on a radix tree node
490 * @root: radix tree root
491 * @index: index key
492 * @tag: tag index
493 *
494 * Set the search tag (which must be < RADIX_TREE_MAX_TAGS)
495 * corresponding to @index in the radix tree. From
496 * the root all the way down to the leaf node.
497 *
498 * Returns the address of the tagged item. Setting a tag on a not-present
499 * item is a bug.
500 */
radix_tree_tag_set(struct radix_tree_root * root,unsigned long index,unsigned int tag)501 void *radix_tree_tag_set(struct radix_tree_root *root,
502 unsigned long index, unsigned int tag)
503 {
504 unsigned int height, shift;
505 struct radix_tree_node *slot;
506
507 height = root->height;
508 BUG_ON(index > radix_tree_maxindex(height));
509
510 slot = indirect_to_ptr(root->rnode);
511 shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
512
513 while (height > 0) {
514 int offset;
515
516 offset = (index >> shift) & RADIX_TREE_MAP_MASK;
517 if (!tag_get(slot, tag, offset))
518 tag_set(slot, tag, offset);
519 slot = slot->slots[offset];
520 BUG_ON(slot == NULL);
521 shift -= RADIX_TREE_MAP_SHIFT;
522 height--;
523 }
524
525 /* set the root's tag bit */
526 if (slot && !root_tag_get(root, tag))
527 root_tag_set(root, tag);
528
529 return slot;
530 }
531 EXPORT_SYMBOL(radix_tree_tag_set);
532
533 /**
534 * radix_tree_tag_clear - clear a tag on a radix tree node
535 * @root: radix tree root
536 * @index: index key
537 * @tag: tag index
538 *
539 * Clear the search tag (which must be < RADIX_TREE_MAX_TAGS)
540 * corresponding to @index in the radix tree. If
541 * this causes the leaf node to have no tags set then clear the tag in the
542 * next-to-leaf node, etc.
543 *
544 * Returns the address of the tagged item on success, else NULL. ie:
545 * has the same return value and semantics as radix_tree_lookup().
546 */
radix_tree_tag_clear(struct radix_tree_root * root,unsigned long index,unsigned int tag)547 void *radix_tree_tag_clear(struct radix_tree_root *root,
548 unsigned long index, unsigned int tag)
549 {
550 struct radix_tree_node *node = NULL;
551 struct radix_tree_node *slot = NULL;
552 unsigned int height, shift;
553 int uninitialized_var(offset);
554
555 height = root->height;
556 if (index > radix_tree_maxindex(height))
557 goto out;
558
559 shift = height * RADIX_TREE_MAP_SHIFT;
560 slot = indirect_to_ptr(root->rnode);
561
562 while (shift) {
563 if (slot == NULL)
564 goto out;
565
566 shift -= RADIX_TREE_MAP_SHIFT;
567 offset = (index >> shift) & RADIX_TREE_MAP_MASK;
568 node = slot;
569 slot = slot->slots[offset];
570 }
571
572 if (slot == NULL)
573 goto out;
574
575 while (node) {
576 if (!tag_get(node, tag, offset))
577 goto out;
578 tag_clear(node, tag, offset);
579 if (any_tag_set(node, tag))
580 goto out;
581
582 index >>= RADIX_TREE_MAP_SHIFT;
583 offset = index & RADIX_TREE_MAP_MASK;
584 node = node->parent;
585 }
586
587 /* clear the root's tag bit */
588 if (root_tag_get(root, tag))
589 root_tag_clear(root, tag);
590
591 out:
592 return slot;
593 }
594 EXPORT_SYMBOL(radix_tree_tag_clear);
595
596 /**
597 * radix_tree_tag_get - get a tag on a radix tree node
598 * @root: radix tree root
599 * @index: index key
600 * @tag: tag index (< RADIX_TREE_MAX_TAGS)
601 *
602 * Return values:
603 *
604 * 0: tag not present or not set
605 * 1: tag set
606 *
607 * Note that the return value of this function may not be relied on, even if
608 * the RCU lock is held, unless tag modification and node deletion are excluded
609 * from concurrency.
610 */
radix_tree_tag_get(struct radix_tree_root * root,unsigned long index,unsigned int tag)611 int radix_tree_tag_get(struct radix_tree_root *root,
612 unsigned long index, unsigned int tag)
613 {
614 unsigned int height, shift;
615 struct radix_tree_node *node;
616
617 /* check the root's tag bit */
618 if (!root_tag_get(root, tag))
619 return 0;
620
621 node = rcu_dereference_raw(root->rnode);
622 if (node == NULL)
623 return 0;
624
625 if (!radix_tree_is_indirect_ptr(node))
626 return (index == 0);
627 node = indirect_to_ptr(node);
628
629 height = node->height;
630 if (index > radix_tree_maxindex(height))
631 return 0;
632
633 shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
634
635 for ( ; ; ) {
636 int offset;
637
638 if (node == NULL)
639 return 0;
640
641 offset = (index >> shift) & RADIX_TREE_MAP_MASK;
642 if (!tag_get(node, tag, offset))
643 return 0;
644 if (height == 1)
645 return 1;
646 node = rcu_dereference_raw(node->slots[offset]);
647 shift -= RADIX_TREE_MAP_SHIFT;
648 height--;
649 }
650 }
651 EXPORT_SYMBOL(radix_tree_tag_get);
652
653 /**
654 * radix_tree_next_chunk - find next chunk of slots for iteration
655 *
656 * @root: radix tree root
657 * @iter: iterator state
658 * @flags: RADIX_TREE_ITER_* flags and tag index
659 * Returns: pointer to chunk first slot, or NULL if iteration is over
660 */
radix_tree_next_chunk(struct radix_tree_root * root,struct radix_tree_iter * iter,unsigned flags)661 void **radix_tree_next_chunk(struct radix_tree_root *root,
662 struct radix_tree_iter *iter, unsigned flags)
663 {
664 unsigned shift, tag = flags & RADIX_TREE_ITER_TAG_MASK;
665 struct radix_tree_node *rnode, *node;
666 unsigned long index, offset;
667
668 if ((flags & RADIX_TREE_ITER_TAGGED) && !root_tag_get(root, tag))
669 return NULL;
670
671 /*
672 * Catch next_index overflow after ~0UL. iter->index never overflows
673 * during iterating; it can be zero only at the beginning.
674 * And we cannot overflow iter->next_index in a single step,
675 * because RADIX_TREE_MAP_SHIFT < BITS_PER_LONG.
676 *
677 * This condition also used by radix_tree_next_slot() to stop
678 * contiguous iterating, and forbid swithing to the next chunk.
679 */
680 index = iter->next_index;
681 if (!index && iter->index)
682 return NULL;
683
684 rnode = rcu_dereference_raw(root->rnode);
685 if (radix_tree_is_indirect_ptr(rnode)) {
686 rnode = indirect_to_ptr(rnode);
687 } else if (rnode && !index) {
688 /* Single-slot tree */
689 iter->index = 0;
690 iter->next_index = 1;
691 iter->tags = 1;
692 return (void **)&root->rnode;
693 } else
694 return NULL;
695
696 restart:
697 shift = (rnode->height - 1) * RADIX_TREE_MAP_SHIFT;
698 offset = index >> shift;
699
700 /* Index outside of the tree */
701 if (offset >= RADIX_TREE_MAP_SIZE)
702 return NULL;
703
704 node = rnode;
705 while (1) {
706 if ((flags & RADIX_TREE_ITER_TAGGED) ?
707 !test_bit(offset, node->tags[tag]) :
708 !node->slots[offset]) {
709 /* Hole detected */
710 if (flags & RADIX_TREE_ITER_CONTIG)
711 return NULL;
712
713 if (flags & RADIX_TREE_ITER_TAGGED)
714 offset = radix_tree_find_next_bit(
715 node->tags[tag],
716 RADIX_TREE_MAP_SIZE,
717 offset + 1);
718 else
719 while (++offset < RADIX_TREE_MAP_SIZE) {
720 if (node->slots[offset])
721 break;
722 }
723 index &= ~((RADIX_TREE_MAP_SIZE << shift) - 1);
724 index += offset << shift;
725 /* Overflow after ~0UL */
726 if (!index)
727 return NULL;
728 if (offset == RADIX_TREE_MAP_SIZE)
729 goto restart;
730 }
731
732 /* This is leaf-node */
733 if (!shift)
734 break;
735
736 node = rcu_dereference_raw(node->slots[offset]);
737 if (node == NULL)
738 goto restart;
739 shift -= RADIX_TREE_MAP_SHIFT;
740 offset = (index >> shift) & RADIX_TREE_MAP_MASK;
741 }
742
743 /* Update the iterator state */
744 iter->index = index;
745 iter->next_index = (index | RADIX_TREE_MAP_MASK) + 1;
746
747 /* Construct iter->tags bit-mask from node->tags[tag] array */
748 if (flags & RADIX_TREE_ITER_TAGGED) {
749 unsigned tag_long, tag_bit;
750
751 tag_long = offset / BITS_PER_LONG;
752 tag_bit = offset % BITS_PER_LONG;
753 iter->tags = node->tags[tag][tag_long] >> tag_bit;
754 /* This never happens if RADIX_TREE_TAG_LONGS == 1 */
755 if (tag_long < RADIX_TREE_TAG_LONGS - 1) {
756 /* Pick tags from next element */
757 if (tag_bit)
758 iter->tags |= node->tags[tag][tag_long + 1] <<
759 (BITS_PER_LONG - tag_bit);
760 /* Clip chunk size, here only BITS_PER_LONG tags */
761 iter->next_index = index + BITS_PER_LONG;
762 }
763 }
764
765 return node->slots + offset;
766 }
767 EXPORT_SYMBOL(radix_tree_next_chunk);
768
769 /**
770 * radix_tree_range_tag_if_tagged - for each item in given range set given
771 * tag if item has another tag set
772 * @root: radix tree root
773 * @first_indexp: pointer to a starting index of a range to scan
774 * @last_index: last index of a range to scan
775 * @nr_to_tag: maximum number items to tag
776 * @iftag: tag index to test
777 * @settag: tag index to set if tested tag is set
778 *
779 * This function scans range of radix tree from first_index to last_index
780 * (inclusive). For each item in the range if iftag is set, the function sets
781 * also settag. The function stops either after tagging nr_to_tag items or
782 * after reaching last_index.
783 *
784 * The tags must be set from the leaf level only and propagated back up the
785 * path to the root. We must do this so that we resolve the full path before
786 * setting any tags on intermediate nodes. If we set tags as we descend, then
787 * we can get to the leaf node and find that the index that has the iftag
788 * set is outside the range we are scanning. This reults in dangling tags and
789 * can lead to problems with later tag operations (e.g. livelocks on lookups).
790 *
791 * The function returns number of leaves where the tag was set and sets
792 * *first_indexp to the first unscanned index.
793 * WARNING! *first_indexp can wrap if last_index is ULONG_MAX. Caller must
794 * be prepared to handle that.
795 */
radix_tree_range_tag_if_tagged(struct radix_tree_root * root,unsigned long * first_indexp,unsigned long last_index,unsigned long nr_to_tag,unsigned int iftag,unsigned int settag)796 unsigned long radix_tree_range_tag_if_tagged(struct radix_tree_root *root,
797 unsigned long *first_indexp, unsigned long last_index,
798 unsigned long nr_to_tag,
799 unsigned int iftag, unsigned int settag)
800 {
801 unsigned int height = root->height;
802 struct radix_tree_node *node = NULL;
803 struct radix_tree_node *slot;
804 unsigned int shift;
805 unsigned long tagged = 0;
806 unsigned long index = *first_indexp;
807
808 last_index = min(last_index, radix_tree_maxindex(height));
809 if (index > last_index)
810 return 0;
811 if (!nr_to_tag)
812 return 0;
813 if (!root_tag_get(root, iftag)) {
814 *first_indexp = last_index + 1;
815 return 0;
816 }
817 if (height == 0) {
818 *first_indexp = last_index + 1;
819 root_tag_set(root, settag);
820 return 1;
821 }
822
823 shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
824 slot = indirect_to_ptr(root->rnode);
825
826 for (;;) {
827 unsigned long upindex;
828 int offset;
829
830 offset = (index >> shift) & RADIX_TREE_MAP_MASK;
831 if (!slot->slots[offset])
832 goto next;
833 if (!tag_get(slot, iftag, offset))
834 goto next;
835 if (shift) {
836 /* Go down one level */
837 shift -= RADIX_TREE_MAP_SHIFT;
838 node = slot;
839 slot = slot->slots[offset];
840 continue;
841 }
842
843 /* tag the leaf */
844 tagged++;
845 tag_set(slot, settag, offset);
846
847 /* walk back up the path tagging interior nodes */
848 upindex = index;
849 while (node) {
850 upindex >>= RADIX_TREE_MAP_SHIFT;
851 offset = upindex & RADIX_TREE_MAP_MASK;
852
853 /* stop if we find a node with the tag already set */
854 if (tag_get(node, settag, offset))
855 break;
856 tag_set(node, settag, offset);
857 node = node->parent;
858 }
859
860 /*
861 * Small optimization: now clear that node pointer.
862 * Since all of this slot's ancestors now have the tag set
863 * from setting it above, we have no further need to walk
864 * back up the tree setting tags, until we update slot to
865 * point to another radix_tree_node.
866 */
867 node = NULL;
868
869 next:
870 /* Go to next item at level determined by 'shift' */
871 index = ((index >> shift) + 1) << shift;
872 /* Overflow can happen when last_index is ~0UL... */
873 if (index > last_index || !index)
874 break;
875 if (tagged >= nr_to_tag)
876 break;
877 while (((index >> shift) & RADIX_TREE_MAP_MASK) == 0) {
878 /*
879 * We've fully scanned this node. Go up. Because
880 * last_index is guaranteed to be in the tree, what
881 * we do below cannot wander astray.
882 */
883 slot = slot->parent;
884 shift += RADIX_TREE_MAP_SHIFT;
885 }
886 }
887 /*
888 * We need not to tag the root tag if there is no tag which is set with
889 * settag within the range from *first_indexp to last_index.
890 */
891 if (tagged > 0)
892 root_tag_set(root, settag);
893 *first_indexp = index;
894
895 return tagged;
896 }
897 EXPORT_SYMBOL(radix_tree_range_tag_if_tagged);
898
899
900 /**
901 * radix_tree_next_hole - find the next hole (not-present entry)
902 * @root: tree root
903 * @index: index key
904 * @max_scan: maximum range to search
905 *
906 * Search the set [index, min(index+max_scan-1, MAX_INDEX)] for the lowest
907 * indexed hole.
908 *
909 * Returns: the index of the hole if found, otherwise returns an index
910 * outside of the set specified (in which case 'return - index >= max_scan'
911 * will be true). In rare cases of index wrap-around, 0 will be returned.
912 *
913 * radix_tree_next_hole may be called under rcu_read_lock. However, like
914 * radix_tree_gang_lookup, this will not atomically search a snapshot of
915 * the tree at a single point in time. For example, if a hole is created
916 * at index 5, then subsequently a hole is created at index 10,
917 * radix_tree_next_hole covering both indexes may return 10 if called
918 * under rcu_read_lock.
919 */
radix_tree_next_hole(struct radix_tree_root * root,unsigned long index,unsigned long max_scan)920 unsigned long radix_tree_next_hole(struct radix_tree_root *root,
921 unsigned long index, unsigned long max_scan)
922 {
923 unsigned long i;
924
925 for (i = 0; i < max_scan; i++) {
926 if (!radix_tree_lookup(root, index))
927 break;
928 index++;
929 if (index == 0)
930 break;
931 }
932
933 return index;
934 }
935 EXPORT_SYMBOL(radix_tree_next_hole);
936
937 /**
938 * radix_tree_prev_hole - find the prev hole (not-present entry)
939 * @root: tree root
940 * @index: index key
941 * @max_scan: maximum range to search
942 *
943 * Search backwards in the range [max(index-max_scan+1, 0), index]
944 * for the first hole.
945 *
946 * Returns: the index of the hole if found, otherwise returns an index
947 * outside of the set specified (in which case 'index - return >= max_scan'
948 * will be true). In rare cases of wrap-around, ULONG_MAX will be returned.
949 *
950 * radix_tree_next_hole may be called under rcu_read_lock. However, like
951 * radix_tree_gang_lookup, this will not atomically search a snapshot of
952 * the tree at a single point in time. For example, if a hole is created
953 * at index 10, then subsequently a hole is created at index 5,
954 * radix_tree_prev_hole covering both indexes may return 5 if called under
955 * rcu_read_lock.
956 */
radix_tree_prev_hole(struct radix_tree_root * root,unsigned long index,unsigned long max_scan)957 unsigned long radix_tree_prev_hole(struct radix_tree_root *root,
958 unsigned long index, unsigned long max_scan)
959 {
960 unsigned long i;
961
962 for (i = 0; i < max_scan; i++) {
963 if (!radix_tree_lookup(root, index))
964 break;
965 index--;
966 if (index == ULONG_MAX)
967 break;
968 }
969
970 return index;
971 }
972 EXPORT_SYMBOL(radix_tree_prev_hole);
973
974 /**
975 * radix_tree_gang_lookup - perform multiple lookup on a radix tree
976 * @root: radix tree root
977 * @results: where the results of the lookup are placed
978 * @first_index: start the lookup from this key
979 * @max_items: place up to this many items at *results
980 *
981 * Performs an index-ascending scan of the tree for present items. Places
982 * them at *@results and returns the number of items which were placed at
983 * *@results.
984 *
985 * The implementation is naive.
986 *
987 * Like radix_tree_lookup, radix_tree_gang_lookup may be called under
988 * rcu_read_lock. In this case, rather than the returned results being
989 * an atomic snapshot of the tree at a single point in time, the semantics
990 * of an RCU protected gang lookup are as though multiple radix_tree_lookups
991 * have been issued in individual locks, and results stored in 'results'.
992 */
993 unsigned int
radix_tree_gang_lookup(struct radix_tree_root * root,void ** results,unsigned long first_index,unsigned int max_items)994 radix_tree_gang_lookup(struct radix_tree_root *root, void **results,
995 unsigned long first_index, unsigned int max_items)
996 {
997 struct radix_tree_iter iter;
998 void **slot;
999 unsigned int ret = 0;
1000
1001 if (unlikely(!max_items))
1002 return 0;
1003
1004 radix_tree_for_each_slot(slot, root, &iter, first_index) {
1005 results[ret] = indirect_to_ptr(rcu_dereference_raw(*slot));
1006 if (!results[ret])
1007 continue;
1008 if (++ret == max_items)
1009 break;
1010 }
1011
1012 return ret;
1013 }
1014 EXPORT_SYMBOL(radix_tree_gang_lookup);
1015
1016 /**
1017 * radix_tree_gang_lookup_slot - perform multiple slot lookup on radix tree
1018 * @root: radix tree root
1019 * @results: where the results of the lookup are placed
1020 * @indices: where their indices should be placed (but usually NULL)
1021 * @first_index: start the lookup from this key
1022 * @max_items: place up to this many items at *results
1023 *
1024 * Performs an index-ascending scan of the tree for present items. Places
1025 * their slots at *@results and returns the number of items which were
1026 * placed at *@results.
1027 *
1028 * The implementation is naive.
1029 *
1030 * Like radix_tree_gang_lookup as far as RCU and locking goes. Slots must
1031 * be dereferenced with radix_tree_deref_slot, and if using only RCU
1032 * protection, radix_tree_deref_slot may fail requiring a retry.
1033 */
1034 unsigned int
radix_tree_gang_lookup_slot(struct radix_tree_root * root,void *** results,unsigned long * indices,unsigned long first_index,unsigned int max_items)1035 radix_tree_gang_lookup_slot(struct radix_tree_root *root,
1036 void ***results, unsigned long *indices,
1037 unsigned long first_index, unsigned int max_items)
1038 {
1039 struct radix_tree_iter iter;
1040 void **slot;
1041 unsigned int ret = 0;
1042
1043 if (unlikely(!max_items))
1044 return 0;
1045
1046 radix_tree_for_each_slot(slot, root, &iter, first_index) {
1047 results[ret] = slot;
1048 if (indices)
1049 indices[ret] = iter.index;
1050 if (++ret == max_items)
1051 break;
1052 }
1053
1054 return ret;
1055 }
1056 EXPORT_SYMBOL(radix_tree_gang_lookup_slot);
1057
1058 /**
1059 * radix_tree_gang_lookup_tag - perform multiple lookup on a radix tree
1060 * based on a tag
1061 * @root: radix tree root
1062 * @results: where the results of the lookup are placed
1063 * @first_index: start the lookup from this key
1064 * @max_items: place up to this many items at *results
1065 * @tag: the tag index (< RADIX_TREE_MAX_TAGS)
1066 *
1067 * Performs an index-ascending scan of the tree for present items which
1068 * have the tag indexed by @tag set. Places the items at *@results and
1069 * returns the number of items which were placed at *@results.
1070 */
1071 unsigned int
radix_tree_gang_lookup_tag(struct radix_tree_root * root,void ** results,unsigned long first_index,unsigned int max_items,unsigned int tag)1072 radix_tree_gang_lookup_tag(struct radix_tree_root *root, void **results,
1073 unsigned long first_index, unsigned int max_items,
1074 unsigned int tag)
1075 {
1076 struct radix_tree_iter iter;
1077 void **slot;
1078 unsigned int ret = 0;
1079
1080 if (unlikely(!max_items))
1081 return 0;
1082
1083 radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) {
1084 results[ret] = indirect_to_ptr(rcu_dereference_raw(*slot));
1085 if (!results[ret])
1086 continue;
1087 if (++ret == max_items)
1088 break;
1089 }
1090
1091 return ret;
1092 }
1093 EXPORT_SYMBOL(radix_tree_gang_lookup_tag);
1094
1095 /**
1096 * radix_tree_gang_lookup_tag_slot - perform multiple slot lookup on a
1097 * radix tree based on a tag
1098 * @root: radix tree root
1099 * @results: where the results of the lookup are placed
1100 * @first_index: start the lookup from this key
1101 * @max_items: place up to this many items at *results
1102 * @tag: the tag index (< RADIX_TREE_MAX_TAGS)
1103 *
1104 * Performs an index-ascending scan of the tree for present items which
1105 * have the tag indexed by @tag set. Places the slots at *@results and
1106 * returns the number of slots which were placed at *@results.
1107 */
1108 unsigned int
radix_tree_gang_lookup_tag_slot(struct radix_tree_root * root,void *** results,unsigned long first_index,unsigned int max_items,unsigned int tag)1109 radix_tree_gang_lookup_tag_slot(struct radix_tree_root *root, void ***results,
1110 unsigned long first_index, unsigned int max_items,
1111 unsigned int tag)
1112 {
1113 struct radix_tree_iter iter;
1114 void **slot;
1115 unsigned int ret = 0;
1116
1117 if (unlikely(!max_items))
1118 return 0;
1119
1120 radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) {
1121 results[ret] = slot;
1122 if (++ret == max_items)
1123 break;
1124 }
1125
1126 return ret;
1127 }
1128 EXPORT_SYMBOL(radix_tree_gang_lookup_tag_slot);
1129
1130 #if defined(CONFIG_SHMEM) && defined(CONFIG_SWAP)
1131 #include <linux/sched.h> /* for cond_resched() */
1132
1133 /*
1134 * This linear search is at present only useful to shmem_unuse_inode().
1135 */
__locate(struct radix_tree_node * slot,void * item,unsigned long index,unsigned long * found_index)1136 static unsigned long __locate(struct radix_tree_node *slot, void *item,
1137 unsigned long index, unsigned long *found_index)
1138 {
1139 unsigned int shift, height;
1140 unsigned long i;
1141
1142 height = slot->height;
1143 shift = (height-1) * RADIX_TREE_MAP_SHIFT;
1144
1145 for ( ; height > 1; height--) {
1146 i = (index >> shift) & RADIX_TREE_MAP_MASK;
1147 for (;;) {
1148 if (slot->slots[i] != NULL)
1149 break;
1150 index &= ~((1UL << shift) - 1);
1151 index += 1UL << shift;
1152 if (index == 0)
1153 goto out; /* 32-bit wraparound */
1154 i++;
1155 if (i == RADIX_TREE_MAP_SIZE)
1156 goto out;
1157 }
1158
1159 shift -= RADIX_TREE_MAP_SHIFT;
1160 slot = rcu_dereference_raw(slot->slots[i]);
1161 if (slot == NULL)
1162 goto out;
1163 }
1164
1165 /* Bottom level: check items */
1166 for (i = 0; i < RADIX_TREE_MAP_SIZE; i++) {
1167 if (slot->slots[i] == item) {
1168 *found_index = index + i;
1169 index = 0;
1170 goto out;
1171 }
1172 }
1173 index += RADIX_TREE_MAP_SIZE;
1174 out:
1175 return index;
1176 }
1177
1178 /**
1179 * radix_tree_locate_item - search through radix tree for item
1180 * @root: radix tree root
1181 * @item: item to be found
1182 *
1183 * Returns index where item was found, or -1 if not found.
1184 * Caller must hold no lock (since this time-consuming function needs
1185 * to be preemptible), and must check afterwards if item is still there.
1186 */
radix_tree_locate_item(struct radix_tree_root * root,void * item)1187 unsigned long radix_tree_locate_item(struct radix_tree_root *root, void *item)
1188 {
1189 struct radix_tree_node *node;
1190 unsigned long max_index;
1191 unsigned long cur_index = 0;
1192 unsigned long found_index = -1;
1193
1194 do {
1195 rcu_read_lock();
1196 node = rcu_dereference_raw(root->rnode);
1197 if (!radix_tree_is_indirect_ptr(node)) {
1198 rcu_read_unlock();
1199 if (node == item)
1200 found_index = 0;
1201 break;
1202 }
1203
1204 node = indirect_to_ptr(node);
1205 max_index = radix_tree_maxindex(node->height);
1206 if (cur_index > max_index)
1207 break;
1208
1209 cur_index = __locate(node, item, cur_index, &found_index);
1210 rcu_read_unlock();
1211 cond_resched();
1212 } while (cur_index != 0 && cur_index <= max_index);
1213
1214 return found_index;
1215 }
1216 #else
radix_tree_locate_item(struct radix_tree_root * root,void * item)1217 unsigned long radix_tree_locate_item(struct radix_tree_root *root, void *item)
1218 {
1219 return -1;
1220 }
1221 #endif /* CONFIG_SHMEM && CONFIG_SWAP */
1222
1223 /**
1224 * radix_tree_shrink - shrink height of a radix tree to minimal
1225 * @root radix tree root
1226 */
radix_tree_shrink(struct radix_tree_root * root)1227 static inline void radix_tree_shrink(struct radix_tree_root *root)
1228 {
1229 /* try to shrink tree height */
1230 while (root->height > 0) {
1231 struct radix_tree_node *to_free = root->rnode;
1232 struct radix_tree_node *slot;
1233
1234 BUG_ON(!radix_tree_is_indirect_ptr(to_free));
1235 to_free = indirect_to_ptr(to_free);
1236
1237 /*
1238 * The candidate node has more than one child, or its child
1239 * is not at the leftmost slot, we cannot shrink.
1240 */
1241 if (to_free->count != 1)
1242 break;
1243 if (!to_free->slots[0])
1244 break;
1245
1246 /*
1247 * We don't need rcu_assign_pointer(), since we are simply
1248 * moving the node from one part of the tree to another: if it
1249 * was safe to dereference the old pointer to it
1250 * (to_free->slots[0]), it will be safe to dereference the new
1251 * one (root->rnode) as far as dependent read barriers go.
1252 */
1253 slot = to_free->slots[0];
1254 if (root->height > 1) {
1255 slot->parent = NULL;
1256 slot = ptr_to_indirect(slot);
1257 }
1258 root->rnode = slot;
1259 root->height--;
1260
1261 /*
1262 * We have a dilemma here. The node's slot[0] must not be
1263 * NULLed in case there are concurrent lookups expecting to
1264 * find the item. However if this was a bottom-level node,
1265 * then it may be subject to the slot pointer being visible
1266 * to callers dereferencing it. If item corresponding to
1267 * slot[0] is subsequently deleted, these callers would expect
1268 * their slot to become empty sooner or later.
1269 *
1270 * For example, lockless pagecache will look up a slot, deref
1271 * the page pointer, and if the page is 0 refcount it means it
1272 * was concurrently deleted from pagecache so try the deref
1273 * again. Fortunately there is already a requirement for logic
1274 * to retry the entire slot lookup -- the indirect pointer
1275 * problem (replacing direct root node with an indirect pointer
1276 * also results in a stale slot). So tag the slot as indirect
1277 * to force callers to retry.
1278 */
1279 if (root->height == 0)
1280 *((unsigned long *)&to_free->slots[0]) |=
1281 RADIX_TREE_INDIRECT_PTR;
1282
1283 radix_tree_node_free(to_free);
1284 }
1285 }
1286
1287 /**
1288 * radix_tree_delete - delete an item from a radix tree
1289 * @root: radix tree root
1290 * @index: index key
1291 *
1292 * Remove the item at @index from the radix tree rooted at @root.
1293 *
1294 * Returns the address of the deleted item, or NULL if it was not present.
1295 */
radix_tree_delete(struct radix_tree_root * root,unsigned long index)1296 void *radix_tree_delete(struct radix_tree_root *root, unsigned long index)
1297 {
1298 struct radix_tree_node *node = NULL;
1299 struct radix_tree_node *slot = NULL;
1300 struct radix_tree_node *to_free;
1301 unsigned int height, shift;
1302 int tag;
1303 int uninitialized_var(offset);
1304
1305 height = root->height;
1306 if (index > radix_tree_maxindex(height))
1307 goto out;
1308
1309 slot = root->rnode;
1310 if (height == 0) {
1311 root_tag_clear_all(root);
1312 root->rnode = NULL;
1313 goto out;
1314 }
1315 slot = indirect_to_ptr(slot);
1316 shift = height * RADIX_TREE_MAP_SHIFT;
1317
1318 do {
1319 if (slot == NULL)
1320 goto out;
1321
1322 shift -= RADIX_TREE_MAP_SHIFT;
1323 offset = (index >> shift) & RADIX_TREE_MAP_MASK;
1324 node = slot;
1325 slot = slot->slots[offset];
1326 } while (shift);
1327
1328 if (slot == NULL)
1329 goto out;
1330
1331 /*
1332 * Clear all tags associated with the item to be deleted.
1333 * This way of doing it would be inefficient, but seldom is any set.
1334 */
1335 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) {
1336 if (tag_get(node, tag, offset))
1337 radix_tree_tag_clear(root, index, tag);
1338 }
1339
1340 to_free = NULL;
1341 /* Now free the nodes we do not need anymore */
1342 while (node) {
1343 node->slots[offset] = NULL;
1344 node->count--;
1345 /*
1346 * Queue the node for deferred freeing after the
1347 * last reference to it disappears (set NULL, above).
1348 */
1349 if (to_free)
1350 radix_tree_node_free(to_free);
1351
1352 if (node->count) {
1353 if (node == indirect_to_ptr(root->rnode))
1354 radix_tree_shrink(root);
1355 goto out;
1356 }
1357
1358 /* Node with zero slots in use so free it */
1359 to_free = node;
1360
1361 index >>= RADIX_TREE_MAP_SHIFT;
1362 offset = index & RADIX_TREE_MAP_MASK;
1363 node = node->parent;
1364 }
1365
1366 root_tag_clear_all(root);
1367 root->height = 0;
1368 root->rnode = NULL;
1369 if (to_free)
1370 radix_tree_node_free(to_free);
1371
1372 out:
1373 return slot;
1374 }
1375 EXPORT_SYMBOL(radix_tree_delete);
1376
1377 /**
1378 * radix_tree_tagged - test whether any items in the tree are tagged
1379 * @root: radix tree root
1380 * @tag: tag to test
1381 */
radix_tree_tagged(struct radix_tree_root * root,unsigned int tag)1382 int radix_tree_tagged(struct radix_tree_root *root, unsigned int tag)
1383 {
1384 return root_tag_get(root, tag);
1385 }
1386 EXPORT_SYMBOL(radix_tree_tagged);
1387
1388 static void
radix_tree_node_ctor(void * node)1389 radix_tree_node_ctor(void *node)
1390 {
1391 memset(node, 0, sizeof(struct radix_tree_node));
1392 }
1393
__maxindex(unsigned int height)1394 static __init unsigned long __maxindex(unsigned int height)
1395 {
1396 unsigned int width = height * RADIX_TREE_MAP_SHIFT;
1397 int shift = RADIX_TREE_INDEX_BITS - width;
1398
1399 if (shift < 0)
1400 return ~0UL;
1401 if (shift >= BITS_PER_LONG)
1402 return 0UL;
1403 return ~0UL >> shift;
1404 }
1405
radix_tree_init_maxindex(void)1406 static __init void radix_tree_init_maxindex(void)
1407 {
1408 unsigned int i;
1409
1410 for (i = 0; i < ARRAY_SIZE(height_to_maxindex); i++)
1411 height_to_maxindex[i] = __maxindex(i);
1412 }
1413
radix_tree_callback(struct notifier_block * nfb,unsigned long action,void * hcpu)1414 static int radix_tree_callback(struct notifier_block *nfb,
1415 unsigned long action,
1416 void *hcpu)
1417 {
1418 int cpu = (long)hcpu;
1419 struct radix_tree_preload *rtp;
1420
1421 /* Free per-cpu pool of perloaded nodes */
1422 if (action == CPU_DEAD || action == CPU_DEAD_FROZEN) {
1423 rtp = &per_cpu(radix_tree_preloads, cpu);
1424 while (rtp->nr) {
1425 kmem_cache_free(radix_tree_node_cachep,
1426 rtp->nodes[rtp->nr-1]);
1427 rtp->nodes[rtp->nr-1] = NULL;
1428 rtp->nr--;
1429 }
1430 }
1431 return NOTIFY_OK;
1432 }
1433
radix_tree_init(void)1434 void __init radix_tree_init(void)
1435 {
1436 radix_tree_node_cachep = kmem_cache_create("radix_tree_node",
1437 sizeof(struct radix_tree_node), 0,
1438 SLAB_PANIC | SLAB_RECLAIM_ACCOUNT,
1439 radix_tree_node_ctor);
1440 radix_tree_init_maxindex();
1441 hotcpu_notifier(radix_tree_callback, 0);
1442 }
1443