radix-tree: introduce radix_tree_load_root()
[deliverable/linux.git] / lib / radix-tree.c
... / ...
CommitLineData
1/*
2 * Copyright (C) 2001 Momchil Velikov
3 * Portions Copyright (C) 2001 Christoph Hellwig
4 * Copyright (C) 2005 SGI, Christoph Lameter
5 * Copyright (C) 2006 Nick Piggin
6 * Copyright (C) 2012 Konstantin Khlebnikov
7 *
8 * This program is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU General Public License as
10 * published by the Free Software Foundation; either version 2, or (at
11 * your option) any later version.
12 *
13 * This program is distributed in the hope that it will be useful, but
14 * WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * General Public License for more details.
17 *
18 * You should have received a copy of the GNU General Public License
19 * along with this program; if not, write to the Free Software
20 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
21 */
22
23#include <linux/errno.h>
24#include <linux/init.h>
25#include <linux/kernel.h>
26#include <linux/export.h>
27#include <linux/radix-tree.h>
28#include <linux/percpu.h>
29#include <linux/slab.h>
30#include <linux/kmemleak.h>
31#include <linux/notifier.h>
32#include <linux/cpu.h>
33#include <linux/string.h>
34#include <linux/bitops.h>
35#include <linux/rcupdate.h>
36#include <linux/preempt.h> /* in_interrupt() */
37
38
39/*
40 * The height_to_maxindex array needs to be one deeper than the maximum
41 * path as height 0 holds only 1 entry.
42 */
43static unsigned long height_to_maxindex[RADIX_TREE_MAX_PATH + 1] __read_mostly;
44
45/*
46 * Radix tree node cache.
47 */
48static struct kmem_cache *radix_tree_node_cachep;
49
50/*
51 * The radix tree is variable-height, so an insert operation not only has
52 * to build the branch to its corresponding item, it also has to build the
53 * branch to existing items if the size has to be increased (by
54 * radix_tree_extend).
55 *
56 * The worst case is a zero height tree with just a single item at index 0,
57 * and then inserting an item at index ULONG_MAX. This requires 2 new branches
58 * of RADIX_TREE_MAX_PATH size to be created, with only the root node shared.
59 * Hence:
60 */
61#define RADIX_TREE_PRELOAD_SIZE (RADIX_TREE_MAX_PATH * 2 - 1)
62
63/*
64 * Per-cpu pool of preloaded nodes
65 */
66struct radix_tree_preload {
67 int nr;
68 /* nodes->private_data points to next preallocated node */
69 struct radix_tree_node *nodes;
70};
71static DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = { 0, };
72
73static inline void *ptr_to_indirect(void *ptr)
74{
75 return (void *)((unsigned long)ptr | RADIX_TREE_INDIRECT_PTR);
76}
77
78static inline void *indirect_to_ptr(void *ptr)
79{
80 return (void *)((unsigned long)ptr & ~RADIX_TREE_INDIRECT_PTR);
81}
82
83#ifdef CONFIG_RADIX_TREE_MULTIORDER
84/* Sibling slots point directly to another slot in the same node */
85static inline bool is_sibling_entry(struct radix_tree_node *parent, void *node)
86{
87 void **ptr = node;
88 return (parent->slots <= ptr) &&
89 (ptr < parent->slots + RADIX_TREE_MAP_SIZE);
90}
91#else
92static inline bool is_sibling_entry(struct radix_tree_node *parent, void *node)
93{
94 return false;
95}
96#endif
97
98static inline unsigned long get_slot_offset(struct radix_tree_node *parent,
99 void **slot)
100{
101 return slot - parent->slots;
102}
103
104static unsigned radix_tree_descend(struct radix_tree_node *parent,
105 struct radix_tree_node **nodep, unsigned offset)
106{
107 void **entry = rcu_dereference_raw(parent->slots[offset]);
108
109#ifdef CONFIG_RADIX_TREE_MULTIORDER
110 if (radix_tree_is_indirect_ptr(entry)) {
111 unsigned long siboff = get_slot_offset(parent, entry);
112 if (siboff < RADIX_TREE_MAP_SIZE) {
113 offset = siboff;
114 entry = rcu_dereference_raw(parent->slots[offset]);
115 }
116 }
117#endif
118
119 *nodep = (void *)entry;
120 return offset;
121}
122
123static inline gfp_t root_gfp_mask(struct radix_tree_root *root)
124{
125 return root->gfp_mask & __GFP_BITS_MASK;
126}
127
128static inline void tag_set(struct radix_tree_node *node, unsigned int tag,
129 int offset)
130{
131 __set_bit(offset, node->tags[tag]);
132}
133
134static inline void tag_clear(struct radix_tree_node *node, unsigned int tag,
135 int offset)
136{
137 __clear_bit(offset, node->tags[tag]);
138}
139
140static inline int tag_get(struct radix_tree_node *node, unsigned int tag,
141 int offset)
142{
143 return test_bit(offset, node->tags[tag]);
144}
145
146static inline void root_tag_set(struct radix_tree_root *root, unsigned int tag)
147{
148 root->gfp_mask |= (__force gfp_t)(1 << (tag + __GFP_BITS_SHIFT));
149}
150
151static inline void root_tag_clear(struct radix_tree_root *root, unsigned int tag)
152{
153 root->gfp_mask &= (__force gfp_t)~(1 << (tag + __GFP_BITS_SHIFT));
154}
155
156static inline void root_tag_clear_all(struct radix_tree_root *root)
157{
158 root->gfp_mask &= __GFP_BITS_MASK;
159}
160
161static inline int root_tag_get(struct radix_tree_root *root, unsigned int tag)
162{
163 return (__force unsigned)root->gfp_mask & (1 << (tag + __GFP_BITS_SHIFT));
164}
165
166/*
167 * Returns 1 if any slot in the node has this tag set.
168 * Otherwise returns 0.
169 */
170static inline int any_tag_set(struct radix_tree_node *node, unsigned int tag)
171{
172 int idx;
173 for (idx = 0; idx < RADIX_TREE_TAG_LONGS; idx++) {
174 if (node->tags[tag][idx])
175 return 1;
176 }
177 return 0;
178}
179
180/**
181 * radix_tree_find_next_bit - find the next set bit in a memory region
182 *
183 * @addr: The address to base the search on
184 * @size: The bitmap size in bits
185 * @offset: The bitnumber to start searching at
186 *
187 * Unrollable variant of find_next_bit() for constant size arrays.
188 * Tail bits starting from size to roundup(size, BITS_PER_LONG) must be zero.
189 * Returns next bit offset, or size if nothing found.
190 */
191static __always_inline unsigned long
192radix_tree_find_next_bit(const unsigned long *addr,
193 unsigned long size, unsigned long offset)
194{
195 if (!__builtin_constant_p(size))
196 return find_next_bit(addr, size, offset);
197
198 if (offset < size) {
199 unsigned long tmp;
200
201 addr += offset / BITS_PER_LONG;
202 tmp = *addr >> (offset % BITS_PER_LONG);
203 if (tmp)
204 return __ffs(tmp) + offset;
205 offset = (offset + BITS_PER_LONG) & ~(BITS_PER_LONG - 1);
206 while (offset < size) {
207 tmp = *++addr;
208 if (tmp)
209 return __ffs(tmp) + offset;
210 offset += BITS_PER_LONG;
211 }
212 }
213 return size;
214}
215
216#if 0
217static void dump_node(void *slot, int height, int offset)
218{
219 struct radix_tree_node *node;
220 int i;
221
222 if (!slot)
223 return;
224
225 if (height == 0) {
226 pr_debug("radix entry %p offset %d\n", slot, offset);
227 return;
228 }
229
230 node = indirect_to_ptr(slot);
231 pr_debug("radix node: %p offset %d tags %lx %lx %lx path %x count %d parent %p\n",
232 slot, offset, node->tags[0][0], node->tags[1][0],
233 node->tags[2][0], node->path, node->count, node->parent);
234
235 for (i = 0; i < RADIX_TREE_MAP_SIZE; i++)
236 dump_node(node->slots[i], height - 1, i);
237}
238
239/* For debug */
240static void radix_tree_dump(struct radix_tree_root *root)
241{
242 pr_debug("radix root: %p height %d rnode %p tags %x\n",
243 root, root->height, root->rnode,
244 root->gfp_mask >> __GFP_BITS_SHIFT);
245 if (!radix_tree_is_indirect_ptr(root->rnode))
246 return;
247 dump_node(root->rnode, root->height, 0);
248}
249#endif
250
251/*
252 * This assumes that the caller has performed appropriate preallocation, and
253 * that the caller has pinned this thread of control to the current CPU.
254 */
255static struct radix_tree_node *
256radix_tree_node_alloc(struct radix_tree_root *root)
257{
258 struct radix_tree_node *ret = NULL;
259 gfp_t gfp_mask = root_gfp_mask(root);
260
261 /*
262 * Preload code isn't irq safe and it doesn't make sence to use
263 * preloading in the interrupt anyway as all the allocations have to
264 * be atomic. So just do normal allocation when in interrupt.
265 */
266 if (!gfpflags_allow_blocking(gfp_mask) && !in_interrupt()) {
267 struct radix_tree_preload *rtp;
268
269 /*
270 * Even if the caller has preloaded, try to allocate from the
271 * cache first for the new node to get accounted.
272 */
273 ret = kmem_cache_alloc(radix_tree_node_cachep,
274 gfp_mask | __GFP_ACCOUNT | __GFP_NOWARN);
275 if (ret)
276 goto out;
277
278 /*
279 * Provided the caller has preloaded here, we will always
280 * succeed in getting a node here (and never reach
281 * kmem_cache_alloc)
282 */
283 rtp = this_cpu_ptr(&radix_tree_preloads);
284 if (rtp->nr) {
285 ret = rtp->nodes;
286 rtp->nodes = ret->private_data;
287 ret->private_data = NULL;
288 rtp->nr--;
289 }
290 /*
291 * Update the allocation stack trace as this is more useful
292 * for debugging.
293 */
294 kmemleak_update_trace(ret);
295 goto out;
296 }
297 ret = kmem_cache_alloc(radix_tree_node_cachep,
298 gfp_mask | __GFP_ACCOUNT);
299out:
300 BUG_ON(radix_tree_is_indirect_ptr(ret));
301 return ret;
302}
303
304static void radix_tree_node_rcu_free(struct rcu_head *head)
305{
306 struct radix_tree_node *node =
307 container_of(head, struct radix_tree_node, rcu_head);
308 int i;
309
310 /*
311 * must only free zeroed nodes into the slab. radix_tree_shrink
312 * can leave us with a non-NULL entry in the first slot, so clear
313 * that here to make sure.
314 */
315 for (i = 0; i < RADIX_TREE_MAX_TAGS; i++)
316 tag_clear(node, i, 0);
317
318 node->slots[0] = NULL;
319 node->count = 0;
320
321 kmem_cache_free(radix_tree_node_cachep, node);
322}
323
324static inline void
325radix_tree_node_free(struct radix_tree_node *node)
326{
327 call_rcu(&node->rcu_head, radix_tree_node_rcu_free);
328}
329
330/*
331 * Load up this CPU's radix_tree_node buffer with sufficient objects to
332 * ensure that the addition of a single element in the tree cannot fail. On
333 * success, return zero, with preemption disabled. On error, return -ENOMEM
334 * with preemption not disabled.
335 *
336 * To make use of this facility, the radix tree must be initialised without
337 * __GFP_DIRECT_RECLAIM being passed to INIT_RADIX_TREE().
338 */
339static int __radix_tree_preload(gfp_t gfp_mask)
340{
341 struct radix_tree_preload *rtp;
342 struct radix_tree_node *node;
343 int ret = -ENOMEM;
344
345 preempt_disable();
346 rtp = this_cpu_ptr(&radix_tree_preloads);
347 while (rtp->nr < RADIX_TREE_PRELOAD_SIZE) {
348 preempt_enable();
349 node = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
350 if (node == NULL)
351 goto out;
352 preempt_disable();
353 rtp = this_cpu_ptr(&radix_tree_preloads);
354 if (rtp->nr < RADIX_TREE_PRELOAD_SIZE) {
355 node->private_data = rtp->nodes;
356 rtp->nodes = node;
357 rtp->nr++;
358 } else {
359 kmem_cache_free(radix_tree_node_cachep, node);
360 }
361 }
362 ret = 0;
363out:
364 return ret;
365}
366
367/*
368 * Load up this CPU's radix_tree_node buffer with sufficient objects to
369 * ensure that the addition of a single element in the tree cannot fail. On
370 * success, return zero, with preemption disabled. On error, return -ENOMEM
371 * with preemption not disabled.
372 *
373 * To make use of this facility, the radix tree must be initialised without
374 * __GFP_DIRECT_RECLAIM being passed to INIT_RADIX_TREE().
375 */
376int radix_tree_preload(gfp_t gfp_mask)
377{
378 /* Warn on non-sensical use... */
379 WARN_ON_ONCE(!gfpflags_allow_blocking(gfp_mask));
380 return __radix_tree_preload(gfp_mask);
381}
382EXPORT_SYMBOL(radix_tree_preload);
383
384/*
385 * The same as above function, except we don't guarantee preloading happens.
386 * We do it, if we decide it helps. On success, return zero with preemption
387 * disabled. On error, return -ENOMEM with preemption not disabled.
388 */
389int radix_tree_maybe_preload(gfp_t gfp_mask)
390{
391 if (gfpflags_allow_blocking(gfp_mask))
392 return __radix_tree_preload(gfp_mask);
393 /* Preloading doesn't help anything with this gfp mask, skip it */
394 preempt_disable();
395 return 0;
396}
397EXPORT_SYMBOL(radix_tree_maybe_preload);
398
399/*
400 * Return the maximum key which can be store into a
401 * radix tree with height HEIGHT.
402 */
403static inline unsigned long radix_tree_maxindex(unsigned int height)
404{
405 return height_to_maxindex[height];
406}
407
408static inline unsigned long node_maxindex(struct radix_tree_node *node)
409{
410 return radix_tree_maxindex(node->path & RADIX_TREE_HEIGHT_MASK);
411}
412
413static unsigned radix_tree_load_root(struct radix_tree_root *root,
414 struct radix_tree_node **nodep, unsigned long *maxindex)
415{
416 struct radix_tree_node *node = rcu_dereference_raw(root->rnode);
417
418 *nodep = node;
419
420 if (likely(radix_tree_is_indirect_ptr(node))) {
421 node = indirect_to_ptr(node);
422 *maxindex = node_maxindex(node);
423 return (node->path & RADIX_TREE_HEIGHT_MASK) *
424 RADIX_TREE_MAP_SHIFT;
425 }
426
427 *maxindex = 0;
428 return 0;
429}
430
431/*
432 * Extend a radix tree so it can store key @index.
433 */
434static int radix_tree_extend(struct radix_tree_root *root,
435 unsigned long index, unsigned order)
436{
437 struct radix_tree_node *node;
438 struct radix_tree_node *slot;
439 unsigned int height;
440 int tag;
441
442 /* Figure out what the height should be. */
443 height = root->height + 1;
444 while (index > radix_tree_maxindex(height))
445 height++;
446
447 if ((root->rnode == NULL) && (order == 0)) {
448 root->height = height;
449 goto out;
450 }
451
452 do {
453 unsigned int newheight;
454 if (!(node = radix_tree_node_alloc(root)))
455 return -ENOMEM;
456
457 /* Propagate the aggregated tag info into the new root */
458 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) {
459 if (root_tag_get(root, tag))
460 tag_set(node, tag, 0);
461 }
462
463 /* Increase the height. */
464 newheight = root->height+1;
465 BUG_ON(newheight & ~RADIX_TREE_HEIGHT_MASK);
466 node->path = newheight;
467 node->count = 1;
468 node->parent = NULL;
469 slot = root->rnode;
470 if (radix_tree_is_indirect_ptr(slot) && newheight > 1) {
471 slot = indirect_to_ptr(slot);
472 slot->parent = node;
473 slot = ptr_to_indirect(slot);
474 }
475 node->slots[0] = slot;
476 node = ptr_to_indirect(node);
477 rcu_assign_pointer(root->rnode, node);
478 root->height = newheight;
479 } while (height > root->height);
480out:
481 return 0;
482}
483
484/**
485 * __radix_tree_create - create a slot in a radix tree
486 * @root: radix tree root
487 * @index: index key
488 * @order: index occupies 2^order aligned slots
489 * @nodep: returns node
490 * @slotp: returns slot
491 *
492 * Create, if necessary, and return the node and slot for an item
493 * at position @index in the radix tree @root.
494 *
495 * Until there is more than one item in the tree, no nodes are
496 * allocated and @root->rnode is used as a direct slot instead of
497 * pointing to a node, in which case *@nodep will be NULL.
498 *
499 * Returns -ENOMEM, or 0 for success.
500 */
501int __radix_tree_create(struct radix_tree_root *root, unsigned long index,
502 unsigned order, struct radix_tree_node **nodep,
503 void ***slotp)
504{
505 struct radix_tree_node *node = NULL, *slot;
506 unsigned int height, shift, offset;
507 int error;
508
509 /* Make sure the tree is high enough. */
510 if (index > radix_tree_maxindex(root->height)) {
511 error = radix_tree_extend(root, index, order);
512 if (error)
513 return error;
514 }
515
516 slot = root->rnode;
517
518 height = root->height;
519 shift = height * RADIX_TREE_MAP_SHIFT;
520
521 offset = 0; /* uninitialised var warning */
522 while (shift > order) {
523 if (slot == NULL) {
524 /* Have to add a child node. */
525 if (!(slot = radix_tree_node_alloc(root)))
526 return -ENOMEM;
527 slot->path = height;
528 slot->parent = node;
529 if (node) {
530 rcu_assign_pointer(node->slots[offset],
531 ptr_to_indirect(slot));
532 node->count++;
533 slot->path |= offset << RADIX_TREE_HEIGHT_SHIFT;
534 } else
535 rcu_assign_pointer(root->rnode,
536 ptr_to_indirect(slot));
537 } else if (!radix_tree_is_indirect_ptr(slot))
538 break;
539
540 /* Go a level down */
541 height--;
542 shift -= RADIX_TREE_MAP_SHIFT;
543 offset = (index >> shift) & RADIX_TREE_MAP_MASK;
544 node = indirect_to_ptr(slot);
545 slot = node->slots[offset];
546 }
547
548#ifdef CONFIG_RADIX_TREE_MULTIORDER
549 /* Insert pointers to the canonical entry */
550 if (order > shift) {
551 int i, n = 1 << (order - shift);
552 offset = offset & ~(n - 1);
553 slot = ptr_to_indirect(&node->slots[offset]);
554 for (i = 0; i < n; i++) {
555 if (node->slots[offset + i])
556 return -EEXIST;
557 }
558
559 for (i = 1; i < n; i++) {
560 rcu_assign_pointer(node->slots[offset + i], slot);
561 node->count++;
562 }
563 }
564#endif
565
566 if (nodep)
567 *nodep = node;
568 if (slotp)
569 *slotp = node ? node->slots + offset : (void **)&root->rnode;
570 return 0;
571}
572
573/**
574 * __radix_tree_insert - insert into a radix tree
575 * @root: radix tree root
576 * @index: index key
577 * @order: key covers the 2^order indices around index
578 * @item: item to insert
579 *
580 * Insert an item into the radix tree at position @index.
581 */
582int __radix_tree_insert(struct radix_tree_root *root, unsigned long index,
583 unsigned order, void *item)
584{
585 struct radix_tree_node *node;
586 void **slot;
587 int error;
588
589 BUG_ON(radix_tree_is_indirect_ptr(item));
590
591 error = __radix_tree_create(root, index, order, &node, &slot);
592 if (error)
593 return error;
594 if (*slot != NULL)
595 return -EEXIST;
596 rcu_assign_pointer(*slot, item);
597
598 if (node) {
599 node->count++;
600 BUG_ON(tag_get(node, 0, index & RADIX_TREE_MAP_MASK));
601 BUG_ON(tag_get(node, 1, index & RADIX_TREE_MAP_MASK));
602 } else {
603 BUG_ON(root_tag_get(root, 0));
604 BUG_ON(root_tag_get(root, 1));
605 }
606
607 return 0;
608}
609EXPORT_SYMBOL(__radix_tree_insert);
610
611/**
612 * __radix_tree_lookup - lookup an item in a radix tree
613 * @root: radix tree root
614 * @index: index key
615 * @nodep: returns node
616 * @slotp: returns slot
617 *
618 * Lookup and return the item at position @index in the radix
619 * tree @root.
620 *
621 * Until there is more than one item in the tree, no nodes are
622 * allocated and @root->rnode is used as a direct slot instead of
623 * pointing to a node, in which case *@nodep will be NULL.
624 */
625void *__radix_tree_lookup(struct radix_tree_root *root, unsigned long index,
626 struct radix_tree_node **nodep, void ***slotp)
627{
628 struct radix_tree_node *node, *parent;
629 unsigned int height, shift;
630 void **slot;
631
632 node = rcu_dereference_raw(root->rnode);
633 if (node == NULL)
634 return NULL;
635
636 if (!radix_tree_is_indirect_ptr(node)) {
637 if (index > 0)
638 return NULL;
639
640 if (nodep)
641 *nodep = NULL;
642 if (slotp)
643 *slotp = (void **)&root->rnode;
644 return node;
645 }
646 node = indirect_to_ptr(node);
647
648 height = node->path & RADIX_TREE_HEIGHT_MASK;
649 if (index > radix_tree_maxindex(height))
650 return NULL;
651
652 shift = (height-1) * RADIX_TREE_MAP_SHIFT;
653
654 do {
655 parent = node;
656 slot = node->slots + ((index >> shift) & RADIX_TREE_MAP_MASK);
657 node = rcu_dereference_raw(*slot);
658 if (node == NULL)
659 return NULL;
660 if (!radix_tree_is_indirect_ptr(node))
661 break;
662 node = indirect_to_ptr(node);
663
664 shift -= RADIX_TREE_MAP_SHIFT;
665 height--;
666 } while (height > 0);
667
668 if (nodep)
669 *nodep = parent;
670 if (slotp)
671 *slotp = slot;
672 return node;
673}
674
675/**
676 * radix_tree_lookup_slot - lookup a slot in a radix tree
677 * @root: radix tree root
678 * @index: index key
679 *
680 * Returns: the slot corresponding to the position @index in the
681 * radix tree @root. This is useful for update-if-exists operations.
682 *
683 * This function can be called under rcu_read_lock iff the slot is not
684 * modified by radix_tree_replace_slot, otherwise it must be called
685 * exclusive from other writers. Any dereference of the slot must be done
686 * using radix_tree_deref_slot.
687 */
688void **radix_tree_lookup_slot(struct radix_tree_root *root, unsigned long index)
689{
690 void **slot;
691
692 if (!__radix_tree_lookup(root, index, NULL, &slot))
693 return NULL;
694 return slot;
695}
696EXPORT_SYMBOL(radix_tree_lookup_slot);
697
698/**
699 * radix_tree_lookup - perform lookup operation on a radix tree
700 * @root: radix tree root
701 * @index: index key
702 *
703 * Lookup the item at the position @index in the radix tree @root.
704 *
705 * This function can be called under rcu_read_lock, however the caller
706 * must manage lifetimes of leaf nodes (eg. RCU may also be used to free
707 * them safely). No RCU barriers are required to access or modify the
708 * returned item, however.
709 */
710void *radix_tree_lookup(struct radix_tree_root *root, unsigned long index)
711{
712 return __radix_tree_lookup(root, index, NULL, NULL);
713}
714EXPORT_SYMBOL(radix_tree_lookup);
715
716/**
717 * radix_tree_tag_set - set a tag on a radix tree node
718 * @root: radix tree root
719 * @index: index key
720 * @tag: tag index
721 *
722 * Set the search tag (which must be < RADIX_TREE_MAX_TAGS)
723 * corresponding to @index in the radix tree. From
724 * the root all the way down to the leaf node.
725 *
726 * Returns the address of the tagged item. Setting a tag on a not-present
727 * item is a bug.
728 */
729void *radix_tree_tag_set(struct radix_tree_root *root,
730 unsigned long index, unsigned int tag)
731{
732 unsigned int height, shift;
733 struct radix_tree_node *slot;
734
735 height = root->height;
736 BUG_ON(index > radix_tree_maxindex(height));
737
738 slot = indirect_to_ptr(root->rnode);
739 shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
740
741 while (height > 0) {
742 int offset;
743
744 offset = (index >> shift) & RADIX_TREE_MAP_MASK;
745 if (!tag_get(slot, tag, offset))
746 tag_set(slot, tag, offset);
747 slot = slot->slots[offset];
748 BUG_ON(slot == NULL);
749 if (!radix_tree_is_indirect_ptr(slot))
750 break;
751 slot = indirect_to_ptr(slot);
752 shift -= RADIX_TREE_MAP_SHIFT;
753 height--;
754 }
755
756 /* set the root's tag bit */
757 if (slot && !root_tag_get(root, tag))
758 root_tag_set(root, tag);
759
760 return slot;
761}
762EXPORT_SYMBOL(radix_tree_tag_set);
763
764/**
765 * radix_tree_tag_clear - clear a tag on a radix tree node
766 * @root: radix tree root
767 * @index: index key
768 * @tag: tag index
769 *
770 * Clear the search tag (which must be < RADIX_TREE_MAX_TAGS)
771 * corresponding to @index in the radix tree. If
772 * this causes the leaf node to have no tags set then clear the tag in the
773 * next-to-leaf node, etc.
774 *
775 * Returns the address of the tagged item on success, else NULL. ie:
776 * has the same return value and semantics as radix_tree_lookup().
777 */
778void *radix_tree_tag_clear(struct radix_tree_root *root,
779 unsigned long index, unsigned int tag)
780{
781 struct radix_tree_node *node = NULL;
782 struct radix_tree_node *slot = NULL;
783 unsigned int height, shift;
784 int uninitialized_var(offset);
785
786 height = root->height;
787 if (index > radix_tree_maxindex(height))
788 goto out;
789
790 shift = height * RADIX_TREE_MAP_SHIFT;
791 slot = root->rnode;
792
793 while (shift) {
794 if (slot == NULL)
795 goto out;
796 if (!radix_tree_is_indirect_ptr(slot))
797 break;
798 slot = indirect_to_ptr(slot);
799
800 shift -= RADIX_TREE_MAP_SHIFT;
801 offset = (index >> shift) & RADIX_TREE_MAP_MASK;
802 node = slot;
803 slot = slot->slots[offset];
804 }
805
806 if (slot == NULL)
807 goto out;
808
809 while (node) {
810 if (!tag_get(node, tag, offset))
811 goto out;
812 tag_clear(node, tag, offset);
813 if (any_tag_set(node, tag))
814 goto out;
815
816 index >>= RADIX_TREE_MAP_SHIFT;
817 offset = index & RADIX_TREE_MAP_MASK;
818 node = node->parent;
819 }
820
821 /* clear the root's tag bit */
822 if (root_tag_get(root, tag))
823 root_tag_clear(root, tag);
824
825out:
826 return slot;
827}
828EXPORT_SYMBOL(radix_tree_tag_clear);
829
830/**
831 * radix_tree_tag_get - get a tag on a radix tree node
832 * @root: radix tree root
833 * @index: index key
834 * @tag: tag index (< RADIX_TREE_MAX_TAGS)
835 *
836 * Return values:
837 *
838 * 0: tag not present or not set
839 * 1: tag set
840 *
841 * Note that the return value of this function may not be relied on, even if
842 * the RCU lock is held, unless tag modification and node deletion are excluded
843 * from concurrency.
844 */
845int radix_tree_tag_get(struct radix_tree_root *root,
846 unsigned long index, unsigned int tag)
847{
848 unsigned int height, shift;
849 struct radix_tree_node *node;
850
851 /* check the root's tag bit */
852 if (!root_tag_get(root, tag))
853 return 0;
854
855 node = rcu_dereference_raw(root->rnode);
856 if (node == NULL)
857 return 0;
858
859 if (!radix_tree_is_indirect_ptr(node))
860 return (index == 0);
861 node = indirect_to_ptr(node);
862
863 height = node->path & RADIX_TREE_HEIGHT_MASK;
864 if (index > radix_tree_maxindex(height))
865 return 0;
866
867 shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
868
869 for ( ; ; ) {
870 int offset;
871
872 if (node == NULL)
873 return 0;
874 node = indirect_to_ptr(node);
875
876 offset = (index >> shift) & RADIX_TREE_MAP_MASK;
877 if (!tag_get(node, tag, offset))
878 return 0;
879 if (height == 1)
880 return 1;
881 node = rcu_dereference_raw(node->slots[offset]);
882 if (!radix_tree_is_indirect_ptr(node))
883 return 1;
884 shift -= RADIX_TREE_MAP_SHIFT;
885 height--;
886 }
887}
888EXPORT_SYMBOL(radix_tree_tag_get);
889
890/**
891 * radix_tree_next_chunk - find next chunk of slots for iteration
892 *
893 * @root: radix tree root
894 * @iter: iterator state
895 * @flags: RADIX_TREE_ITER_* flags and tag index
896 * Returns: pointer to chunk first slot, or NULL if iteration is over
897 */
898void **radix_tree_next_chunk(struct radix_tree_root *root,
899 struct radix_tree_iter *iter, unsigned flags)
900{
901 unsigned shift, tag = flags & RADIX_TREE_ITER_TAG_MASK;
902 struct radix_tree_node *rnode, *node;
903 unsigned long index, offset, height;
904
905 if ((flags & RADIX_TREE_ITER_TAGGED) && !root_tag_get(root, tag))
906 return NULL;
907
908 /*
909 * Catch next_index overflow after ~0UL. iter->index never overflows
910 * during iterating; it can be zero only at the beginning.
911 * And we cannot overflow iter->next_index in a single step,
912 * because RADIX_TREE_MAP_SHIFT < BITS_PER_LONG.
913 *
914 * This condition also used by radix_tree_next_slot() to stop
915 * contiguous iterating, and forbid swithing to the next chunk.
916 */
917 index = iter->next_index;
918 if (!index && iter->index)
919 return NULL;
920
921 rnode = rcu_dereference_raw(root->rnode);
922 if (radix_tree_is_indirect_ptr(rnode)) {
923 rnode = indirect_to_ptr(rnode);
924 } else if (rnode && !index) {
925 /* Single-slot tree */
926 iter->index = 0;
927 iter->next_index = 1;
928 iter->tags = 1;
929 return (void **)&root->rnode;
930 } else
931 return NULL;
932
933restart:
934 height = rnode->path & RADIX_TREE_HEIGHT_MASK;
935 shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
936 offset = index >> shift;
937
938 /* Index outside of the tree */
939 if (offset >= RADIX_TREE_MAP_SIZE)
940 return NULL;
941
942 node = rnode;
943 while (1) {
944 struct radix_tree_node *slot;
945 if ((flags & RADIX_TREE_ITER_TAGGED) ?
946 !test_bit(offset, node->tags[tag]) :
947 !node->slots[offset]) {
948 /* Hole detected */
949 if (flags & RADIX_TREE_ITER_CONTIG)
950 return NULL;
951
952 if (flags & RADIX_TREE_ITER_TAGGED)
953 offset = radix_tree_find_next_bit(
954 node->tags[tag],
955 RADIX_TREE_MAP_SIZE,
956 offset + 1);
957 else
958 while (++offset < RADIX_TREE_MAP_SIZE) {
959 if (node->slots[offset])
960 break;
961 }
962 index &= ~((RADIX_TREE_MAP_SIZE << shift) - 1);
963 index += offset << shift;
964 /* Overflow after ~0UL */
965 if (!index)
966 return NULL;
967 if (offset == RADIX_TREE_MAP_SIZE)
968 goto restart;
969 }
970
971 /* This is leaf-node */
972 if (!shift)
973 break;
974
975 slot = rcu_dereference_raw(node->slots[offset]);
976 if (slot == NULL)
977 goto restart;
978 if (!radix_tree_is_indirect_ptr(slot))
979 break;
980 node = indirect_to_ptr(slot);
981 shift -= RADIX_TREE_MAP_SHIFT;
982 offset = (index >> shift) & RADIX_TREE_MAP_MASK;
983 }
984
985 /* Update the iterator state */
986 iter->index = index;
987 iter->next_index = (index | RADIX_TREE_MAP_MASK) + 1;
988
989 /* Construct iter->tags bit-mask from node->tags[tag] array */
990 if (flags & RADIX_TREE_ITER_TAGGED) {
991 unsigned tag_long, tag_bit;
992
993 tag_long = offset / BITS_PER_LONG;
994 tag_bit = offset % BITS_PER_LONG;
995 iter->tags = node->tags[tag][tag_long] >> tag_bit;
996 /* This never happens if RADIX_TREE_TAG_LONGS == 1 */
997 if (tag_long < RADIX_TREE_TAG_LONGS - 1) {
998 /* Pick tags from next element */
999 if (tag_bit)
1000 iter->tags |= node->tags[tag][tag_long + 1] <<
1001 (BITS_PER_LONG - tag_bit);
1002 /* Clip chunk size, here only BITS_PER_LONG tags */
1003 iter->next_index = index + BITS_PER_LONG;
1004 }
1005 }
1006
1007 return node->slots + offset;
1008}
1009EXPORT_SYMBOL(radix_tree_next_chunk);
1010
1011/**
1012 * radix_tree_range_tag_if_tagged - for each item in given range set given
1013 * tag if item has another tag set
1014 * @root: radix tree root
1015 * @first_indexp: pointer to a starting index of a range to scan
1016 * @last_index: last index of a range to scan
1017 * @nr_to_tag: maximum number items to tag
1018 * @iftag: tag index to test
1019 * @settag: tag index to set if tested tag is set
1020 *
1021 * This function scans range of radix tree from first_index to last_index
1022 * (inclusive). For each item in the range if iftag is set, the function sets
1023 * also settag. The function stops either after tagging nr_to_tag items or
1024 * after reaching last_index.
1025 *
1026 * The tags must be set from the leaf level only and propagated back up the
1027 * path to the root. We must do this so that we resolve the full path before
1028 * setting any tags on intermediate nodes. If we set tags as we descend, then
1029 * we can get to the leaf node and find that the index that has the iftag
1030 * set is outside the range we are scanning. This reults in dangling tags and
1031 * can lead to problems with later tag operations (e.g. livelocks on lookups).
1032 *
1033 * The function returns number of leaves where the tag was set and sets
1034 * *first_indexp to the first unscanned index.
1035 * WARNING! *first_indexp can wrap if last_index is ULONG_MAX. Caller must
1036 * be prepared to handle that.
1037 */
1038unsigned long radix_tree_range_tag_if_tagged(struct radix_tree_root *root,
1039 unsigned long *first_indexp, unsigned long last_index,
1040 unsigned long nr_to_tag,
1041 unsigned int iftag, unsigned int settag)
1042{
1043 unsigned int height = root->height;
1044 struct radix_tree_node *node = NULL;
1045 struct radix_tree_node *slot;
1046 unsigned int shift;
1047 unsigned long tagged = 0;
1048 unsigned long index = *first_indexp;
1049
1050 last_index = min(last_index, radix_tree_maxindex(height));
1051 if (index > last_index)
1052 return 0;
1053 if (!nr_to_tag)
1054 return 0;
1055 if (!root_tag_get(root, iftag)) {
1056 *first_indexp = last_index + 1;
1057 return 0;
1058 }
1059 if (height == 0) {
1060 *first_indexp = last_index + 1;
1061 root_tag_set(root, settag);
1062 return 1;
1063 }
1064
1065 shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
1066 slot = indirect_to_ptr(root->rnode);
1067
1068 for (;;) {
1069 unsigned long upindex;
1070 int offset;
1071
1072 offset = (index >> shift) & RADIX_TREE_MAP_MASK;
1073 if (!slot->slots[offset])
1074 goto next;
1075 if (!tag_get(slot, iftag, offset))
1076 goto next;
1077 if (shift) {
1078 node = slot;
1079 slot = slot->slots[offset];
1080 if (radix_tree_is_indirect_ptr(slot)) {
1081 slot = indirect_to_ptr(slot);
1082 shift -= RADIX_TREE_MAP_SHIFT;
1083 continue;
1084 } else {
1085 slot = node;
1086 node = node->parent;
1087 }
1088 }
1089
1090 /* tag the leaf */
1091 tagged += 1 << shift;
1092 tag_set(slot, settag, offset);
1093
1094 /* walk back up the path tagging interior nodes */
1095 upindex = index;
1096 while (node) {
1097 upindex >>= RADIX_TREE_MAP_SHIFT;
1098 offset = upindex & RADIX_TREE_MAP_MASK;
1099
1100 /* stop if we find a node with the tag already set */
1101 if (tag_get(node, settag, offset))
1102 break;
1103 tag_set(node, settag, offset);
1104 node = node->parent;
1105 }
1106
1107 /*
1108 * Small optimization: now clear that node pointer.
1109 * Since all of this slot's ancestors now have the tag set
1110 * from setting it above, we have no further need to walk
1111 * back up the tree setting tags, until we update slot to
1112 * point to another radix_tree_node.
1113 */
1114 node = NULL;
1115
1116next:
1117 /* Go to next item at level determined by 'shift' */
1118 index = ((index >> shift) + 1) << shift;
1119 /* Overflow can happen when last_index is ~0UL... */
1120 if (index > last_index || !index)
1121 break;
1122 if (tagged >= nr_to_tag)
1123 break;
1124 while (((index >> shift) & RADIX_TREE_MAP_MASK) == 0) {
1125 /*
1126 * We've fully scanned this node. Go up. Because
1127 * last_index is guaranteed to be in the tree, what
1128 * we do below cannot wander astray.
1129 */
1130 slot = slot->parent;
1131 shift += RADIX_TREE_MAP_SHIFT;
1132 }
1133 }
1134 /*
1135 * We need not to tag the root tag if there is no tag which is set with
1136 * settag within the range from *first_indexp to last_index.
1137 */
1138 if (tagged > 0)
1139 root_tag_set(root, settag);
1140 *first_indexp = index;
1141
1142 return tagged;
1143}
1144EXPORT_SYMBOL(radix_tree_range_tag_if_tagged);
1145
1146/**
1147 * radix_tree_gang_lookup - perform multiple lookup on a radix tree
1148 * @root: radix tree root
1149 * @results: where the results of the lookup are placed
1150 * @first_index: start the lookup from this key
1151 * @max_items: place up to this many items at *results
1152 *
1153 * Performs an index-ascending scan of the tree for present items. Places
1154 * them at *@results and returns the number of items which were placed at
1155 * *@results.
1156 *
1157 * The implementation is naive.
1158 *
1159 * Like radix_tree_lookup, radix_tree_gang_lookup may be called under
1160 * rcu_read_lock. In this case, rather than the returned results being
1161 * an atomic snapshot of the tree at a single point in time, the semantics
1162 * of an RCU protected gang lookup are as though multiple radix_tree_lookups
1163 * have been issued in individual locks, and results stored in 'results'.
1164 */
1165unsigned int
1166radix_tree_gang_lookup(struct radix_tree_root *root, void **results,
1167 unsigned long first_index, unsigned int max_items)
1168{
1169 struct radix_tree_iter iter;
1170 void **slot;
1171 unsigned int ret = 0;
1172
1173 if (unlikely(!max_items))
1174 return 0;
1175
1176 radix_tree_for_each_slot(slot, root, &iter, first_index) {
1177 results[ret] = rcu_dereference_raw(*slot);
1178 if (!results[ret])
1179 continue;
1180 if (radix_tree_is_indirect_ptr(results[ret])) {
1181 slot = radix_tree_iter_retry(&iter);
1182 continue;
1183 }
1184 if (++ret == max_items)
1185 break;
1186 }
1187
1188 return ret;
1189}
1190EXPORT_SYMBOL(radix_tree_gang_lookup);
1191
1192/**
1193 * radix_tree_gang_lookup_slot - perform multiple slot lookup on radix tree
1194 * @root: radix tree root
1195 * @results: where the results of the lookup are placed
1196 * @indices: where their indices should be placed (but usually NULL)
1197 * @first_index: start the lookup from this key
1198 * @max_items: place up to this many items at *results
1199 *
1200 * Performs an index-ascending scan of the tree for present items. Places
1201 * their slots at *@results and returns the number of items which were
1202 * placed at *@results.
1203 *
1204 * The implementation is naive.
1205 *
1206 * Like radix_tree_gang_lookup as far as RCU and locking goes. Slots must
1207 * be dereferenced with radix_tree_deref_slot, and if using only RCU
1208 * protection, radix_tree_deref_slot may fail requiring a retry.
1209 */
1210unsigned int
1211radix_tree_gang_lookup_slot(struct radix_tree_root *root,
1212 void ***results, unsigned long *indices,
1213 unsigned long first_index, unsigned int max_items)
1214{
1215 struct radix_tree_iter iter;
1216 void **slot;
1217 unsigned int ret = 0;
1218
1219 if (unlikely(!max_items))
1220 return 0;
1221
1222 radix_tree_for_each_slot(slot, root, &iter, first_index) {
1223 results[ret] = slot;
1224 if (indices)
1225 indices[ret] = iter.index;
1226 if (++ret == max_items)
1227 break;
1228 }
1229
1230 return ret;
1231}
1232EXPORT_SYMBOL(radix_tree_gang_lookup_slot);
1233
1234/**
1235 * radix_tree_gang_lookup_tag - perform multiple lookup on a radix tree
1236 * based on a tag
1237 * @root: radix tree root
1238 * @results: where the results of the lookup are placed
1239 * @first_index: start the lookup from this key
1240 * @max_items: place up to this many items at *results
1241 * @tag: the tag index (< RADIX_TREE_MAX_TAGS)
1242 *
1243 * Performs an index-ascending scan of the tree for present items which
1244 * have the tag indexed by @tag set. Places the items at *@results and
1245 * returns the number of items which were placed at *@results.
1246 */
1247unsigned int
1248radix_tree_gang_lookup_tag(struct radix_tree_root *root, void **results,
1249 unsigned long first_index, unsigned int max_items,
1250 unsigned int tag)
1251{
1252 struct radix_tree_iter iter;
1253 void **slot;
1254 unsigned int ret = 0;
1255
1256 if (unlikely(!max_items))
1257 return 0;
1258
1259 radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) {
1260 results[ret] = rcu_dereference_raw(*slot);
1261 if (!results[ret])
1262 continue;
1263 if (radix_tree_is_indirect_ptr(results[ret])) {
1264 slot = radix_tree_iter_retry(&iter);
1265 continue;
1266 }
1267 if (++ret == max_items)
1268 break;
1269 }
1270
1271 return ret;
1272}
1273EXPORT_SYMBOL(radix_tree_gang_lookup_tag);
1274
1275/**
1276 * radix_tree_gang_lookup_tag_slot - perform multiple slot lookup on a
1277 * radix tree based on a tag
1278 * @root: radix tree root
1279 * @results: where the results of the lookup are placed
1280 * @first_index: start the lookup from this key
1281 * @max_items: place up to this many items at *results
1282 * @tag: the tag index (< RADIX_TREE_MAX_TAGS)
1283 *
1284 * Performs an index-ascending scan of the tree for present items which
1285 * have the tag indexed by @tag set. Places the slots at *@results and
1286 * returns the number of slots which were placed at *@results.
1287 */
1288unsigned int
1289radix_tree_gang_lookup_tag_slot(struct radix_tree_root *root, void ***results,
1290 unsigned long first_index, unsigned int max_items,
1291 unsigned int tag)
1292{
1293 struct radix_tree_iter iter;
1294 void **slot;
1295 unsigned int ret = 0;
1296
1297 if (unlikely(!max_items))
1298 return 0;
1299
1300 radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) {
1301 results[ret] = slot;
1302 if (++ret == max_items)
1303 break;
1304 }
1305
1306 return ret;
1307}
1308EXPORT_SYMBOL(radix_tree_gang_lookup_tag_slot);
1309
1310#if defined(CONFIG_SHMEM) && defined(CONFIG_SWAP)
1311#include <linux/sched.h> /* for cond_resched() */
1312
1313/*
1314 * This linear search is at present only useful to shmem_unuse_inode().
1315 */
1316static unsigned long __locate(struct radix_tree_node *slot, void *item,
1317 unsigned long index, unsigned long *found_index)
1318{
1319 unsigned int shift, height;
1320 unsigned long i;
1321
1322 height = slot->path & RADIX_TREE_HEIGHT_MASK;
1323 shift = (height-1) * RADIX_TREE_MAP_SHIFT;
1324
1325 for ( ; height > 1; height--) {
1326 i = (index >> shift) & RADIX_TREE_MAP_MASK;
1327 for (;;) {
1328 if (slot->slots[i] != NULL)
1329 break;
1330 index &= ~((1UL << shift) - 1);
1331 index += 1UL << shift;
1332 if (index == 0)
1333 goto out; /* 32-bit wraparound */
1334 i++;
1335 if (i == RADIX_TREE_MAP_SIZE)
1336 goto out;
1337 }
1338
1339 slot = rcu_dereference_raw(slot->slots[i]);
1340 if (slot == NULL)
1341 goto out;
1342 if (!radix_tree_is_indirect_ptr(slot)) {
1343 if (slot == item) {
1344 *found_index = index + i;
1345 index = 0;
1346 } else {
1347 index += shift;
1348 }
1349 goto out;
1350 }
1351 slot = indirect_to_ptr(slot);
1352 shift -= RADIX_TREE_MAP_SHIFT;
1353 }
1354
1355 /* Bottom level: check items */
1356 for (i = 0; i < RADIX_TREE_MAP_SIZE; i++) {
1357 if (slot->slots[i] == item) {
1358 *found_index = index + i;
1359 index = 0;
1360 goto out;
1361 }
1362 }
1363 index += RADIX_TREE_MAP_SIZE;
1364out:
1365 return index;
1366}
1367
1368/**
1369 * radix_tree_locate_item - search through radix tree for item
1370 * @root: radix tree root
1371 * @item: item to be found
1372 *
1373 * Returns index where item was found, or -1 if not found.
1374 * Caller must hold no lock (since this time-consuming function needs
1375 * to be preemptible), and must check afterwards if item is still there.
1376 */
1377unsigned long radix_tree_locate_item(struct radix_tree_root *root, void *item)
1378{
1379 struct radix_tree_node *node;
1380 unsigned long max_index;
1381 unsigned long cur_index = 0;
1382 unsigned long found_index = -1;
1383
1384 do {
1385 rcu_read_lock();
1386 node = rcu_dereference_raw(root->rnode);
1387 if (!radix_tree_is_indirect_ptr(node)) {
1388 rcu_read_unlock();
1389 if (node == item)
1390 found_index = 0;
1391 break;
1392 }
1393
1394 node = indirect_to_ptr(node);
1395 max_index = radix_tree_maxindex(node->path &
1396 RADIX_TREE_HEIGHT_MASK);
1397 if (cur_index > max_index) {
1398 rcu_read_unlock();
1399 break;
1400 }
1401
1402 cur_index = __locate(node, item, cur_index, &found_index);
1403 rcu_read_unlock();
1404 cond_resched();
1405 } while (cur_index != 0 && cur_index <= max_index);
1406
1407 return found_index;
1408}
1409#else
1410unsigned long radix_tree_locate_item(struct radix_tree_root *root, void *item)
1411{
1412 return -1;
1413}
1414#endif /* CONFIG_SHMEM && CONFIG_SWAP */
1415
1416/**
1417 * radix_tree_shrink - shrink height of a radix tree to minimal
1418 * @root radix tree root
1419 */
1420static inline void radix_tree_shrink(struct radix_tree_root *root)
1421{
1422 /* try to shrink tree height */
1423 while (root->height > 0) {
1424 struct radix_tree_node *to_free = root->rnode;
1425 struct radix_tree_node *slot;
1426
1427 BUG_ON(!radix_tree_is_indirect_ptr(to_free));
1428 to_free = indirect_to_ptr(to_free);
1429
1430 /*
1431 * The candidate node has more than one child, or its child
1432 * is not at the leftmost slot, or it is a multiorder entry,
1433 * we cannot shrink.
1434 */
1435 if (to_free->count != 1)
1436 break;
1437 slot = to_free->slots[0];
1438 if (!slot)
1439 break;
1440
1441 /*
1442 * We don't need rcu_assign_pointer(), since we are simply
1443 * moving the node from one part of the tree to another: if it
1444 * was safe to dereference the old pointer to it
1445 * (to_free->slots[0]), it will be safe to dereference the new
1446 * one (root->rnode) as far as dependent read barriers go.
1447 */
1448 if (root->height > 1) {
1449 if (!radix_tree_is_indirect_ptr(slot))
1450 break;
1451
1452 slot = indirect_to_ptr(slot);
1453 slot->parent = NULL;
1454 slot = ptr_to_indirect(slot);
1455 }
1456 root->rnode = slot;
1457 root->height--;
1458
1459 /*
1460 * We have a dilemma here. The node's slot[0] must not be
1461 * NULLed in case there are concurrent lookups expecting to
1462 * find the item. However if this was a bottom-level node,
1463 * then it may be subject to the slot pointer being visible
1464 * to callers dereferencing it. If item corresponding to
1465 * slot[0] is subsequently deleted, these callers would expect
1466 * their slot to become empty sooner or later.
1467 *
1468 * For example, lockless pagecache will look up a slot, deref
1469 * the page pointer, and if the page is 0 refcount it means it
1470 * was concurrently deleted from pagecache so try the deref
1471 * again. Fortunately there is already a requirement for logic
1472 * to retry the entire slot lookup -- the indirect pointer
1473 * problem (replacing direct root node with an indirect pointer
1474 * also results in a stale slot). So tag the slot as indirect
1475 * to force callers to retry.
1476 */
1477 if (root->height == 0)
1478 *((unsigned long *)&to_free->slots[0]) |=
1479 RADIX_TREE_INDIRECT_PTR;
1480
1481 radix_tree_node_free(to_free);
1482 }
1483}
1484
1485/**
1486 * __radix_tree_delete_node - try to free node after clearing a slot
1487 * @root: radix tree root
1488 * @node: node containing @index
1489 *
1490 * After clearing the slot at @index in @node from radix tree
1491 * rooted at @root, call this function to attempt freeing the
1492 * node and shrinking the tree.
1493 *
1494 * Returns %true if @node was freed, %false otherwise.
1495 */
1496bool __radix_tree_delete_node(struct radix_tree_root *root,
1497 struct radix_tree_node *node)
1498{
1499 bool deleted = false;
1500
1501 do {
1502 struct radix_tree_node *parent;
1503
1504 if (node->count) {
1505 if (node == indirect_to_ptr(root->rnode)) {
1506 radix_tree_shrink(root);
1507 if (root->height == 0)
1508 deleted = true;
1509 }
1510 return deleted;
1511 }
1512
1513 parent = node->parent;
1514 if (parent) {
1515 unsigned int offset;
1516
1517 offset = node->path >> RADIX_TREE_HEIGHT_SHIFT;
1518 parent->slots[offset] = NULL;
1519 parent->count--;
1520 } else {
1521 root_tag_clear_all(root);
1522 root->height = 0;
1523 root->rnode = NULL;
1524 }
1525
1526 radix_tree_node_free(node);
1527 deleted = true;
1528
1529 node = parent;
1530 } while (node);
1531
1532 return deleted;
1533}
1534
1535static inline void delete_sibling_entries(struct radix_tree_node *node,
1536 void *ptr, unsigned offset)
1537{
1538#ifdef CONFIG_RADIX_TREE_MULTIORDER
1539 int i;
1540 for (i = 1; offset + i < RADIX_TREE_MAP_SIZE; i++) {
1541 if (node->slots[offset + i] != ptr)
1542 break;
1543 node->slots[offset + i] = NULL;
1544 node->count--;
1545 }
1546#endif
1547}
1548
1549/**
1550 * radix_tree_delete_item - delete an item from a radix tree
1551 * @root: radix tree root
1552 * @index: index key
1553 * @item: expected item
1554 *
1555 * Remove @item at @index from the radix tree rooted at @root.
1556 *
1557 * Returns the address of the deleted item, or NULL if it was not present
1558 * or the entry at the given @index was not @item.
1559 */
1560void *radix_tree_delete_item(struct radix_tree_root *root,
1561 unsigned long index, void *item)
1562{
1563 struct radix_tree_node *node;
1564 unsigned int offset;
1565 void **slot;
1566 void *entry;
1567 int tag;
1568
1569 entry = __radix_tree_lookup(root, index, &node, &slot);
1570 if (!entry)
1571 return NULL;
1572
1573 if (item && entry != item)
1574 return NULL;
1575
1576 if (!node) {
1577 root_tag_clear_all(root);
1578 root->rnode = NULL;
1579 return entry;
1580 }
1581
1582 offset = get_slot_offset(node, slot);
1583
1584 /*
1585 * Clear all tags associated with the item to be deleted.
1586 * This way of doing it would be inefficient, but seldom is any set.
1587 */
1588 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) {
1589 if (tag_get(node, tag, offset))
1590 radix_tree_tag_clear(root, index, tag);
1591 }
1592
1593 delete_sibling_entries(node, ptr_to_indirect(slot), offset);
1594 node->slots[offset] = NULL;
1595 node->count--;
1596
1597 __radix_tree_delete_node(root, node);
1598
1599 return entry;
1600}
1601EXPORT_SYMBOL(radix_tree_delete_item);
1602
1603/**
1604 * radix_tree_delete - delete an item from a radix tree
1605 * @root: radix tree root
1606 * @index: index key
1607 *
1608 * Remove the item at @index from the radix tree rooted at @root.
1609 *
1610 * Returns the address of the deleted item, or NULL if it was not present.
1611 */
1612void *radix_tree_delete(struct radix_tree_root *root, unsigned long index)
1613{
1614 return radix_tree_delete_item(root, index, NULL);
1615}
1616EXPORT_SYMBOL(radix_tree_delete);
1617
1618/**
1619 * radix_tree_tagged - test whether any items in the tree are tagged
1620 * @root: radix tree root
1621 * @tag: tag to test
1622 */
1623int radix_tree_tagged(struct radix_tree_root *root, unsigned int tag)
1624{
1625 return root_tag_get(root, tag);
1626}
1627EXPORT_SYMBOL(radix_tree_tagged);
1628
1629static void
1630radix_tree_node_ctor(void *arg)
1631{
1632 struct radix_tree_node *node = arg;
1633
1634 memset(node, 0, sizeof(*node));
1635 INIT_LIST_HEAD(&node->private_list);
1636}
1637
1638static __init unsigned long __maxindex(unsigned int height)
1639{
1640 unsigned int width = height * RADIX_TREE_MAP_SHIFT;
1641 int shift = RADIX_TREE_INDEX_BITS - width;
1642
1643 if (shift < 0)
1644 return ~0UL;
1645 if (shift >= BITS_PER_LONG)
1646 return 0UL;
1647 return ~0UL >> shift;
1648}
1649
1650static __init void radix_tree_init_maxindex(void)
1651{
1652 unsigned int i;
1653
1654 for (i = 0; i < ARRAY_SIZE(height_to_maxindex); i++)
1655 height_to_maxindex[i] = __maxindex(i);
1656}
1657
1658static int radix_tree_callback(struct notifier_block *nfb,
1659 unsigned long action,
1660 void *hcpu)
1661{
1662 int cpu = (long)hcpu;
1663 struct radix_tree_preload *rtp;
1664 struct radix_tree_node *node;
1665
1666 /* Free per-cpu pool of perloaded nodes */
1667 if (action == CPU_DEAD || action == CPU_DEAD_FROZEN) {
1668 rtp = &per_cpu(radix_tree_preloads, cpu);
1669 while (rtp->nr) {
1670 node = rtp->nodes;
1671 rtp->nodes = node->private_data;
1672 kmem_cache_free(radix_tree_node_cachep, node);
1673 rtp->nr--;
1674 }
1675 }
1676 return NOTIFY_OK;
1677}
1678
1679void __init radix_tree_init(void)
1680{
1681 radix_tree_node_cachep = kmem_cache_create("radix_tree_node",
1682 sizeof(struct radix_tree_node), 0,
1683 SLAB_PANIC | SLAB_RECLAIM_ACCOUNT,
1684 radix_tree_node_ctor);
1685 radix_tree_init_maxindex();
1686 hotcpu_notifier(radix_tree_callback, 0);
1687}
This page took 0.032162 seconds and 5 git commands to generate.