lib: radix_tree: tree node interface
[deliverable/linux.git] / lib / radix-tree.c
CommitLineData
1da177e4
LT
1/*
2 * Copyright (C) 2001 Momchil Velikov
3 * Portions Copyright (C) 2001 Christoph Hellwig
cde53535 4 * Copyright (C) 2005 SGI, Christoph Lameter
7cf9c2c7 5 * Copyright (C) 2006 Nick Piggin
78c1d784 6 * Copyright (C) 2012 Konstantin Khlebnikov
1da177e4
LT
7 *
8 * This program is free software; you can redistribute it and/or
9 * modify it under the terms of the GNU General Public License as
10 * published by the Free Software Foundation; either version 2, or (at
11 * your option) any later version.
12 *
13 * This program is distributed in the hope that it will be useful, but
14 * WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 * General Public License for more details.
17 *
18 * You should have received a copy of the GNU General Public License
19 * along with this program; if not, write to the Free Software
20 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
21 */
22
23#include <linux/errno.h>
24#include <linux/init.h>
25#include <linux/kernel.h>
8bc3bcc9 26#include <linux/export.h>
1da177e4
LT
27#include <linux/radix-tree.h>
28#include <linux/percpu.h>
29#include <linux/slab.h>
30#include <linux/notifier.h>
31#include <linux/cpu.h>
1da177e4
LT
32#include <linux/string.h>
33#include <linux/bitops.h>
7cf9c2c7 34#include <linux/rcupdate.h>
5e4c0d97 35#include <linux/hardirq.h> /* in_interrupt() */
1da177e4
LT
36
37
26fb1589
JM
38/*
39 * The height_to_maxindex array needs to be one deeper than the maximum
40 * path as height 0 holds only 1 entry.
41 */
42static unsigned long height_to_maxindex[RADIX_TREE_MAX_PATH + 1] __read_mostly;
1da177e4
LT
43
44/*
45 * Radix tree node cache.
46 */
e18b890b 47static struct kmem_cache *radix_tree_node_cachep;
1da177e4 48
55368052
NP
49/*
50 * The radix tree is variable-height, so an insert operation not only has
51 * to build the branch to its corresponding item, it also has to build the
52 * branch to existing items if the size has to be increased (by
53 * radix_tree_extend).
54 *
55 * The worst case is a zero height tree with just a single item at index 0,
56 * and then inserting an item at index ULONG_MAX. This requires 2 new branches
57 * of RADIX_TREE_MAX_PATH size to be created, with only the root node shared.
58 * Hence:
59 */
60#define RADIX_TREE_PRELOAD_SIZE (RADIX_TREE_MAX_PATH * 2 - 1)
61
1da177e4
LT
62/*
63 * Per-cpu pool of preloaded nodes
64 */
65struct radix_tree_preload {
66 int nr;
55368052 67 struct radix_tree_node *nodes[RADIX_TREE_PRELOAD_SIZE];
1da177e4 68};
8cef7d57 69static DEFINE_PER_CPU(struct radix_tree_preload, radix_tree_preloads) = { 0, };
1da177e4 70
27d20fdd
NP
71static inline void *ptr_to_indirect(void *ptr)
72{
73 return (void *)((unsigned long)ptr | RADIX_TREE_INDIRECT_PTR);
74}
75
76static inline void *indirect_to_ptr(void *ptr)
77{
78 return (void *)((unsigned long)ptr & ~RADIX_TREE_INDIRECT_PTR);
79}
80
612d6c19
NP
81static inline gfp_t root_gfp_mask(struct radix_tree_root *root)
82{
83 return root->gfp_mask & __GFP_BITS_MASK;
84}
85
643b52b9
NP
86static inline void tag_set(struct radix_tree_node *node, unsigned int tag,
87 int offset)
88{
89 __set_bit(offset, node->tags[tag]);
90}
91
92static inline void tag_clear(struct radix_tree_node *node, unsigned int tag,
93 int offset)
94{
95 __clear_bit(offset, node->tags[tag]);
96}
97
98static inline int tag_get(struct radix_tree_node *node, unsigned int tag,
99 int offset)
100{
101 return test_bit(offset, node->tags[tag]);
102}
103
104static inline void root_tag_set(struct radix_tree_root *root, unsigned int tag)
105{
106 root->gfp_mask |= (__force gfp_t)(1 << (tag + __GFP_BITS_SHIFT));
107}
108
109static inline void root_tag_clear(struct radix_tree_root *root, unsigned int tag)
110{
111 root->gfp_mask &= (__force gfp_t)~(1 << (tag + __GFP_BITS_SHIFT));
112}
113
114static inline void root_tag_clear_all(struct radix_tree_root *root)
115{
116 root->gfp_mask &= __GFP_BITS_MASK;
117}
118
119static inline int root_tag_get(struct radix_tree_root *root, unsigned int tag)
120{
121 return (__force unsigned)root->gfp_mask & (1 << (tag + __GFP_BITS_SHIFT));
122}
123
124/*
125 * Returns 1 if any slot in the node has this tag set.
126 * Otherwise returns 0.
127 */
128static inline int any_tag_set(struct radix_tree_node *node, unsigned int tag)
129{
130 int idx;
131 for (idx = 0; idx < RADIX_TREE_TAG_LONGS; idx++) {
132 if (node->tags[tag][idx])
133 return 1;
134 }
135 return 0;
136}
78c1d784
KK
137
138/**
139 * radix_tree_find_next_bit - find the next set bit in a memory region
140 *
141 * @addr: The address to base the search on
142 * @size: The bitmap size in bits
143 * @offset: The bitnumber to start searching at
144 *
145 * Unrollable variant of find_next_bit() for constant size arrays.
146 * Tail bits starting from size to roundup(size, BITS_PER_LONG) must be zero.
147 * Returns next bit offset, or size if nothing found.
148 */
149static __always_inline unsigned long
150radix_tree_find_next_bit(const unsigned long *addr,
151 unsigned long size, unsigned long offset)
152{
153 if (!__builtin_constant_p(size))
154 return find_next_bit(addr, size, offset);
155
156 if (offset < size) {
157 unsigned long tmp;
158
159 addr += offset / BITS_PER_LONG;
160 tmp = *addr >> (offset % BITS_PER_LONG);
161 if (tmp)
162 return __ffs(tmp) + offset;
163 offset = (offset + BITS_PER_LONG) & ~(BITS_PER_LONG - 1);
164 while (offset < size) {
165 tmp = *++addr;
166 if (tmp)
167 return __ffs(tmp) + offset;
168 offset += BITS_PER_LONG;
169 }
170 }
171 return size;
172}
173
1da177e4
LT
174/*
175 * This assumes that the caller has performed appropriate preallocation, and
176 * that the caller has pinned this thread of control to the current CPU.
177 */
178static struct radix_tree_node *
179radix_tree_node_alloc(struct radix_tree_root *root)
180{
e2848a0e 181 struct radix_tree_node *ret = NULL;
612d6c19 182 gfp_t gfp_mask = root_gfp_mask(root);
1da177e4 183
5e4c0d97
JK
184 /*
185 * Preload code isn't irq safe and it doesn't make sence to use
186 * preloading in the interrupt anyway as all the allocations have to
187 * be atomic. So just do normal allocation when in interrupt.
188 */
189 if (!(gfp_mask & __GFP_WAIT) && !in_interrupt()) {
1da177e4
LT
190 struct radix_tree_preload *rtp;
191
e2848a0e
NP
192 /*
193 * Provided the caller has preloaded here, we will always
194 * succeed in getting a node here (and never reach
195 * kmem_cache_alloc)
196 */
1da177e4
LT
197 rtp = &__get_cpu_var(radix_tree_preloads);
198 if (rtp->nr) {
199 ret = rtp->nodes[rtp->nr - 1];
200 rtp->nodes[rtp->nr - 1] = NULL;
201 rtp->nr--;
202 }
203 }
e2848a0e 204 if (ret == NULL)
488514d1 205 ret = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
e2848a0e 206
c0bc9875 207 BUG_ON(radix_tree_is_indirect_ptr(ret));
1da177e4
LT
208 return ret;
209}
210
7cf9c2c7
NP
211static void radix_tree_node_rcu_free(struct rcu_head *head)
212{
213 struct radix_tree_node *node =
214 container_of(head, struct radix_tree_node, rcu_head);
b6dd0865 215 int i;
643b52b9
NP
216
217 /*
218 * must only free zeroed nodes into the slab. radix_tree_shrink
219 * can leave us with a non-NULL entry in the first slot, so clear
220 * that here to make sure.
221 */
b6dd0865
DC
222 for (i = 0; i < RADIX_TREE_MAX_TAGS; i++)
223 tag_clear(node, i, 0);
224
643b52b9
NP
225 node->slots[0] = NULL;
226 node->count = 0;
227
7cf9c2c7
NP
228 kmem_cache_free(radix_tree_node_cachep, node);
229}
230
1da177e4
LT
231static inline void
232radix_tree_node_free(struct radix_tree_node *node)
233{
7cf9c2c7 234 call_rcu(&node->rcu_head, radix_tree_node_rcu_free);
1da177e4
LT
235}
236
237/*
238 * Load up this CPU's radix_tree_node buffer with sufficient objects to
239 * ensure that the addition of a single element in the tree cannot fail. On
240 * success, return zero, with preemption disabled. On error, return -ENOMEM
241 * with preemption not disabled.
b34df792
DH
242 *
243 * To make use of this facility, the radix tree must be initialised without
244 * __GFP_WAIT being passed to INIT_RADIX_TREE().
1da177e4 245 */
5e4c0d97 246static int __radix_tree_preload(gfp_t gfp_mask)
1da177e4
LT
247{
248 struct radix_tree_preload *rtp;
249 struct radix_tree_node *node;
250 int ret = -ENOMEM;
251
252 preempt_disable();
253 rtp = &__get_cpu_var(radix_tree_preloads);
254 while (rtp->nr < ARRAY_SIZE(rtp->nodes)) {
255 preempt_enable();
488514d1 256 node = kmem_cache_alloc(radix_tree_node_cachep, gfp_mask);
1da177e4
LT
257 if (node == NULL)
258 goto out;
259 preempt_disable();
260 rtp = &__get_cpu_var(radix_tree_preloads);
261 if (rtp->nr < ARRAY_SIZE(rtp->nodes))
262 rtp->nodes[rtp->nr++] = node;
263 else
264 kmem_cache_free(radix_tree_node_cachep, node);
265 }
266 ret = 0;
267out:
268 return ret;
269}
5e4c0d97
JK
270
271/*
272 * Load up this CPU's radix_tree_node buffer with sufficient objects to
273 * ensure that the addition of a single element in the tree cannot fail. On
274 * success, return zero, with preemption disabled. On error, return -ENOMEM
275 * with preemption not disabled.
276 *
277 * To make use of this facility, the radix tree must be initialised without
278 * __GFP_WAIT being passed to INIT_RADIX_TREE().
279 */
280int radix_tree_preload(gfp_t gfp_mask)
281{
282 /* Warn on non-sensical use... */
283 WARN_ON_ONCE(!(gfp_mask & __GFP_WAIT));
284 return __radix_tree_preload(gfp_mask);
285}
d7f0923d 286EXPORT_SYMBOL(radix_tree_preload);
1da177e4 287
5e4c0d97
JK
288/*
289 * The same as above function, except we don't guarantee preloading happens.
290 * We do it, if we decide it helps. On success, return zero with preemption
291 * disabled. On error, return -ENOMEM with preemption not disabled.
292 */
293int radix_tree_maybe_preload(gfp_t gfp_mask)
294{
295 if (gfp_mask & __GFP_WAIT)
296 return __radix_tree_preload(gfp_mask);
297 /* Preloading doesn't help anything with this gfp mask, skip it */
298 preempt_disable();
299 return 0;
300}
301EXPORT_SYMBOL(radix_tree_maybe_preload);
302
1da177e4
LT
303/*
304 * Return the maximum key which can be store into a
305 * radix tree with height HEIGHT.
306 */
307static inline unsigned long radix_tree_maxindex(unsigned int height)
308{
309 return height_to_maxindex[height];
310}
311
312/*
313 * Extend a radix tree so it can store key @index.
314 */
315static int radix_tree_extend(struct radix_tree_root *root, unsigned long index)
316{
317 struct radix_tree_node *node;
e2bdb933 318 struct radix_tree_node *slot;
1da177e4 319 unsigned int height;
1da177e4
LT
320 int tag;
321
322 /* Figure out what the height should be. */
323 height = root->height + 1;
324 while (index > radix_tree_maxindex(height))
325 height++;
326
327 if (root->rnode == NULL) {
328 root->height = height;
329 goto out;
330 }
331
1da177e4 332 do {
7cf9c2c7 333 unsigned int newheight;
1da177e4
LT
334 if (!(node = radix_tree_node_alloc(root)))
335 return -ENOMEM;
336
1da177e4 337 /* Propagate the aggregated tag info into the new root */
daff89f3 338 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) {
612d6c19 339 if (root_tag_get(root, tag))
1da177e4
LT
340 tag_set(node, tag, 0);
341 }
342
e2bdb933 343 /* Increase the height. */
7cf9c2c7
NP
344 newheight = root->height+1;
345 node->height = newheight;
1da177e4 346 node->count = 1;
e2bdb933
HD
347 node->parent = NULL;
348 slot = root->rnode;
349 if (newheight > 1) {
350 slot = indirect_to_ptr(slot);
351 slot->parent = node;
352 }
353 node->slots[0] = slot;
27d20fdd 354 node = ptr_to_indirect(node);
7cf9c2c7
NP
355 rcu_assign_pointer(root->rnode, node);
356 root->height = newheight;
1da177e4
LT
357 } while (height > root->height);
358out:
359 return 0;
360}
361
362/**
139e5616 363 * __radix_tree_create - create a slot in a radix tree
1da177e4
LT
364 * @root: radix tree root
365 * @index: index key
139e5616
JW
366 * @nodep: returns node
367 * @slotp: returns slot
1da177e4 368 *
139e5616
JW
369 * Create, if necessary, and return the node and slot for an item
370 * at position @index in the radix tree @root.
371 *
372 * Until there is more than one item in the tree, no nodes are
373 * allocated and @root->rnode is used as a direct slot instead of
374 * pointing to a node, in which case *@nodep will be NULL.
375 *
376 * Returns -ENOMEM, or 0 for success.
1da177e4 377 */
139e5616
JW
378int __radix_tree_create(struct radix_tree_root *root, unsigned long index,
379 struct radix_tree_node **nodep, void ***slotp)
1da177e4 380{
201b6264 381 struct radix_tree_node *node = NULL, *slot;
139e5616 382 unsigned int height, shift, offset;
1da177e4
LT
383 int error;
384
385 /* Make sure the tree is high enough. */
612d6c19 386 if (index > radix_tree_maxindex(root->height)) {
1da177e4
LT
387 error = radix_tree_extend(root, index);
388 if (error)
389 return error;
390 }
391
27d20fdd 392 slot = indirect_to_ptr(root->rnode);
c0bc9875 393
1da177e4
LT
394 height = root->height;
395 shift = (height-1) * RADIX_TREE_MAP_SHIFT;
396
397 offset = 0; /* uninitialised var warning */
612d6c19 398 while (height > 0) {
201b6264 399 if (slot == NULL) {
1da177e4 400 /* Have to add a child node. */
201b6264 401 if (!(slot = radix_tree_node_alloc(root)))
1da177e4 402 return -ENOMEM;
7cf9c2c7 403 slot->height = height;
e2bdb933 404 slot->parent = node;
201b6264 405 if (node) {
7cf9c2c7 406 rcu_assign_pointer(node->slots[offset], slot);
1da177e4 407 node->count++;
201b6264 408 } else
27d20fdd 409 rcu_assign_pointer(root->rnode, ptr_to_indirect(slot));
1da177e4
LT
410 }
411
412 /* Go a level down */
413 offset = (index >> shift) & RADIX_TREE_MAP_MASK;
201b6264
CL
414 node = slot;
415 slot = node->slots[offset];
1da177e4
LT
416 shift -= RADIX_TREE_MAP_SHIFT;
417 height--;
612d6c19 418 }
1da177e4 419
139e5616
JW
420 if (nodep)
421 *nodep = node;
422 if (slotp)
423 *slotp = node ? node->slots + offset : (void **)&root->rnode;
424 return 0;
425}
426
427/**
428 * radix_tree_insert - insert into a radix tree
429 * @root: radix tree root
430 * @index: index key
431 * @item: item to insert
432 *
433 * Insert an item into the radix tree at position @index.
434 */
435int radix_tree_insert(struct radix_tree_root *root,
436 unsigned long index, void *item)
437{
438 struct radix_tree_node *node;
439 void **slot;
440 int error;
441
442 BUG_ON(radix_tree_is_indirect_ptr(item));
443
444 error = __radix_tree_create(root, index, &node, &slot);
445 if (error)
446 return error;
447 if (*slot != NULL)
1da177e4 448 return -EEXIST;
139e5616 449 rcu_assign_pointer(*slot, item);
201b6264 450
612d6c19
NP
451 if (node) {
452 node->count++;
139e5616
JW
453 BUG_ON(tag_get(node, 0, index & RADIX_TREE_MAP_MASK));
454 BUG_ON(tag_get(node, 1, index & RADIX_TREE_MAP_MASK));
612d6c19 455 } else {
612d6c19
NP
456 BUG_ON(root_tag_get(root, 0));
457 BUG_ON(root_tag_get(root, 1));
458 }
1da177e4 459
1da177e4
LT
460 return 0;
461}
462EXPORT_SYMBOL(radix_tree_insert);
463
139e5616
JW
464/**
465 * __radix_tree_lookup - lookup an item in a radix tree
466 * @root: radix tree root
467 * @index: index key
468 * @nodep: returns node
469 * @slotp: returns slot
470 *
471 * Lookup and return the item at position @index in the radix
472 * tree @root.
473 *
474 * Until there is more than one item in the tree, no nodes are
475 * allocated and @root->rnode is used as a direct slot instead of
476 * pointing to a node, in which case *@nodep will be NULL.
7cf9c2c7 477 */
139e5616
JW
478void *__radix_tree_lookup(struct radix_tree_root *root, unsigned long index,
479 struct radix_tree_node **nodep, void ***slotp)
1da177e4 480{
139e5616 481 struct radix_tree_node *node, *parent;
1da177e4 482 unsigned int height, shift;
139e5616 483 void **slot;
612d6c19 484
2676a58c 485 node = rcu_dereference_raw(root->rnode);
7cf9c2c7 486 if (node == NULL)
1da177e4
LT
487 return NULL;
488
c0bc9875 489 if (!radix_tree_is_indirect_ptr(node)) {
7cf9c2c7
NP
490 if (index > 0)
491 return NULL;
139e5616
JW
492
493 if (nodep)
494 *nodep = NULL;
495 if (slotp)
496 *slotp = (void **)&root->rnode;
497 return node;
7cf9c2c7 498 }
27d20fdd 499 node = indirect_to_ptr(node);
7cf9c2c7
NP
500
501 height = node->height;
502 if (index > radix_tree_maxindex(height))
503 return NULL;
612d6c19 504
1da177e4 505 shift = (height-1) * RADIX_TREE_MAP_SHIFT;
1da177e4 506
7cf9c2c7 507 do {
139e5616
JW
508 parent = node;
509 slot = node->slots + ((index >> shift) & RADIX_TREE_MAP_MASK);
2676a58c 510 node = rcu_dereference_raw(*slot);
7cf9c2c7 511 if (node == NULL)
1da177e4
LT
512 return NULL;
513
1da177e4
LT
514 shift -= RADIX_TREE_MAP_SHIFT;
515 height--;
7cf9c2c7 516 } while (height > 0);
1da177e4 517
139e5616
JW
518 if (nodep)
519 *nodep = parent;
520 if (slotp)
521 *slotp = slot;
522 return node;
b72b71c6
HS
523}
524
525/**
526 * radix_tree_lookup_slot - lookup a slot in a radix tree
527 * @root: radix tree root
528 * @index: index key
529 *
530 * Returns: the slot corresponding to the position @index in the
531 * radix tree @root. This is useful for update-if-exists operations.
532 *
533 * This function can be called under rcu_read_lock iff the slot is not
534 * modified by radix_tree_replace_slot, otherwise it must be called
535 * exclusive from other writers. Any dereference of the slot must be done
536 * using radix_tree_deref_slot.
537 */
538void **radix_tree_lookup_slot(struct radix_tree_root *root, unsigned long index)
539{
139e5616
JW
540 void **slot;
541
542 if (!__radix_tree_lookup(root, index, NULL, &slot))
543 return NULL;
544 return slot;
a4331366 545}
a4331366
HR
546EXPORT_SYMBOL(radix_tree_lookup_slot);
547
548/**
549 * radix_tree_lookup - perform lookup operation on a radix tree
550 * @root: radix tree root
551 * @index: index key
552 *
553 * Lookup the item at the position @index in the radix tree @root.
7cf9c2c7
NP
554 *
555 * This function can be called under rcu_read_lock, however the caller
556 * must manage lifetimes of leaf nodes (eg. RCU may also be used to free
557 * them safely). No RCU barriers are required to access or modify the
558 * returned item, however.
a4331366
HR
559 */
560void *radix_tree_lookup(struct radix_tree_root *root, unsigned long index)
561{
139e5616 562 return __radix_tree_lookup(root, index, NULL, NULL);
1da177e4
LT
563}
564EXPORT_SYMBOL(radix_tree_lookup);
565
566/**
567 * radix_tree_tag_set - set a tag on a radix tree node
568 * @root: radix tree root
569 * @index: index key
570 * @tag: tag index
571 *
daff89f3
JC
572 * Set the search tag (which must be < RADIX_TREE_MAX_TAGS)
573 * corresponding to @index in the radix tree. From
1da177e4
LT
574 * the root all the way down to the leaf node.
575 *
576 * Returns the address of the tagged item. Setting a tag on a not-present
577 * item is a bug.
578 */
579void *radix_tree_tag_set(struct radix_tree_root *root,
daff89f3 580 unsigned long index, unsigned int tag)
1da177e4
LT
581{
582 unsigned int height, shift;
201b6264 583 struct radix_tree_node *slot;
1da177e4
LT
584
585 height = root->height;
4c91c364 586 BUG_ON(index > radix_tree_maxindex(height));
1da177e4 587
27d20fdd 588 slot = indirect_to_ptr(root->rnode);
612d6c19 589 shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
1da177e4
LT
590
591 while (height > 0) {
592 int offset;
593
594 offset = (index >> shift) & RADIX_TREE_MAP_MASK;
d5274261
NP
595 if (!tag_get(slot, tag, offset))
596 tag_set(slot, tag, offset);
201b6264
CL
597 slot = slot->slots[offset];
598 BUG_ON(slot == NULL);
1da177e4
LT
599 shift -= RADIX_TREE_MAP_SHIFT;
600 height--;
601 }
602
612d6c19
NP
603 /* set the root's tag bit */
604 if (slot && !root_tag_get(root, tag))
605 root_tag_set(root, tag);
606
201b6264 607 return slot;
1da177e4
LT
608}
609EXPORT_SYMBOL(radix_tree_tag_set);
610
611/**
612 * radix_tree_tag_clear - clear a tag on a radix tree node
613 * @root: radix tree root
614 * @index: index key
615 * @tag: tag index
616 *
daff89f3
JC
617 * Clear the search tag (which must be < RADIX_TREE_MAX_TAGS)
618 * corresponding to @index in the radix tree. If
1da177e4
LT
619 * this causes the leaf node to have no tags set then clear the tag in the
620 * next-to-leaf node, etc.
621 *
622 * Returns the address of the tagged item on success, else NULL. ie:
623 * has the same return value and semantics as radix_tree_lookup().
624 */
625void *radix_tree_tag_clear(struct radix_tree_root *root,
daff89f3 626 unsigned long index, unsigned int tag)
1da177e4 627{
e2bdb933 628 struct radix_tree_node *node = NULL;
612d6c19 629 struct radix_tree_node *slot = NULL;
1da177e4 630 unsigned int height, shift;
e2bdb933 631 int uninitialized_var(offset);
1da177e4
LT
632
633 height = root->height;
634 if (index > radix_tree_maxindex(height))
635 goto out;
636
e2bdb933 637 shift = height * RADIX_TREE_MAP_SHIFT;
27d20fdd 638 slot = indirect_to_ptr(root->rnode);
1da177e4 639
e2bdb933 640 while (shift) {
201b6264 641 if (slot == NULL)
1da177e4
LT
642 goto out;
643
e2bdb933 644 shift -= RADIX_TREE_MAP_SHIFT;
1da177e4 645 offset = (index >> shift) & RADIX_TREE_MAP_MASK;
e2bdb933 646 node = slot;
201b6264 647 slot = slot->slots[offset];
1da177e4
LT
648 }
649
612d6c19 650 if (slot == NULL)
1da177e4
LT
651 goto out;
652
e2bdb933
HD
653 while (node) {
654 if (!tag_get(node, tag, offset))
d5274261 655 goto out;
e2bdb933
HD
656 tag_clear(node, tag, offset);
657 if (any_tag_set(node, tag))
6e954b9e 658 goto out;
e2bdb933
HD
659
660 index >>= RADIX_TREE_MAP_SHIFT;
661 offset = index & RADIX_TREE_MAP_MASK;
662 node = node->parent;
612d6c19
NP
663 }
664
665 /* clear the root's tag bit */
666 if (root_tag_get(root, tag))
667 root_tag_clear(root, tag);
668
1da177e4 669out:
612d6c19 670 return slot;
1da177e4
LT
671}
672EXPORT_SYMBOL(radix_tree_tag_clear);
673
1da177e4 674/**
32605a18
MT
675 * radix_tree_tag_get - get a tag on a radix tree node
676 * @root: radix tree root
677 * @index: index key
daff89f3 678 * @tag: tag index (< RADIX_TREE_MAX_TAGS)
1da177e4 679 *
32605a18 680 * Return values:
1da177e4 681 *
612d6c19
NP
682 * 0: tag not present or not set
683 * 1: tag set
ce82653d
DH
684 *
685 * Note that the return value of this function may not be relied on, even if
686 * the RCU lock is held, unless tag modification and node deletion are excluded
687 * from concurrency.
1da177e4
LT
688 */
689int radix_tree_tag_get(struct radix_tree_root *root,
daff89f3 690 unsigned long index, unsigned int tag)
1da177e4
LT
691{
692 unsigned int height, shift;
7cf9c2c7 693 struct radix_tree_node *node;
1da177e4 694
612d6c19
NP
695 /* check the root's tag bit */
696 if (!root_tag_get(root, tag))
697 return 0;
698
2676a58c 699 node = rcu_dereference_raw(root->rnode);
7cf9c2c7
NP
700 if (node == NULL)
701 return 0;
702
c0bc9875 703 if (!radix_tree_is_indirect_ptr(node))
7cf9c2c7 704 return (index == 0);
27d20fdd 705 node = indirect_to_ptr(node);
7cf9c2c7
NP
706
707 height = node->height;
708 if (index > radix_tree_maxindex(height))
709 return 0;
612d6c19 710
1da177e4 711 shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
1da177e4
LT
712
713 for ( ; ; ) {
714 int offset;
715
7cf9c2c7 716 if (node == NULL)
1da177e4
LT
717 return 0;
718
719 offset = (index >> shift) & RADIX_TREE_MAP_MASK;
7cf9c2c7 720 if (!tag_get(node, tag, offset))
3fa36acb 721 return 0;
ce82653d 722 if (height == 1)
3fa36acb 723 return 1;
2676a58c 724 node = rcu_dereference_raw(node->slots[offset]);
1da177e4
LT
725 shift -= RADIX_TREE_MAP_SHIFT;
726 height--;
727 }
728}
729EXPORT_SYMBOL(radix_tree_tag_get);
1da177e4 730
78c1d784
KK
731/**
732 * radix_tree_next_chunk - find next chunk of slots for iteration
733 *
734 * @root: radix tree root
735 * @iter: iterator state
736 * @flags: RADIX_TREE_ITER_* flags and tag index
737 * Returns: pointer to chunk first slot, or NULL if iteration is over
738 */
739void **radix_tree_next_chunk(struct radix_tree_root *root,
740 struct radix_tree_iter *iter, unsigned flags)
741{
742 unsigned shift, tag = flags & RADIX_TREE_ITER_TAG_MASK;
743 struct radix_tree_node *rnode, *node;
744 unsigned long index, offset;
745
746 if ((flags & RADIX_TREE_ITER_TAGGED) && !root_tag_get(root, tag))
747 return NULL;
748
749 /*
750 * Catch next_index overflow after ~0UL. iter->index never overflows
751 * during iterating; it can be zero only at the beginning.
752 * And we cannot overflow iter->next_index in a single step,
753 * because RADIX_TREE_MAP_SHIFT < BITS_PER_LONG.
fffaee36
KK
754 *
755 * This condition also used by radix_tree_next_slot() to stop
756 * contiguous iterating, and forbid swithing to the next chunk.
78c1d784
KK
757 */
758 index = iter->next_index;
759 if (!index && iter->index)
760 return NULL;
761
762 rnode = rcu_dereference_raw(root->rnode);
763 if (radix_tree_is_indirect_ptr(rnode)) {
764 rnode = indirect_to_ptr(rnode);
765 } else if (rnode && !index) {
766 /* Single-slot tree */
767 iter->index = 0;
768 iter->next_index = 1;
769 iter->tags = 1;
770 return (void **)&root->rnode;
771 } else
772 return NULL;
773
774restart:
775 shift = (rnode->height - 1) * RADIX_TREE_MAP_SHIFT;
776 offset = index >> shift;
777
778 /* Index outside of the tree */
779 if (offset >= RADIX_TREE_MAP_SIZE)
780 return NULL;
781
782 node = rnode;
783 while (1) {
784 if ((flags & RADIX_TREE_ITER_TAGGED) ?
785 !test_bit(offset, node->tags[tag]) :
786 !node->slots[offset]) {
787 /* Hole detected */
788 if (flags & RADIX_TREE_ITER_CONTIG)
789 return NULL;
790
791 if (flags & RADIX_TREE_ITER_TAGGED)
792 offset = radix_tree_find_next_bit(
793 node->tags[tag],
794 RADIX_TREE_MAP_SIZE,
795 offset + 1);
796 else
797 while (++offset < RADIX_TREE_MAP_SIZE) {
798 if (node->slots[offset])
799 break;
800 }
801 index &= ~((RADIX_TREE_MAP_SIZE << shift) - 1);
802 index += offset << shift;
803 /* Overflow after ~0UL */
804 if (!index)
805 return NULL;
806 if (offset == RADIX_TREE_MAP_SIZE)
807 goto restart;
808 }
809
810 /* This is leaf-node */
811 if (!shift)
812 break;
813
814 node = rcu_dereference_raw(node->slots[offset]);
815 if (node == NULL)
816 goto restart;
817 shift -= RADIX_TREE_MAP_SHIFT;
818 offset = (index >> shift) & RADIX_TREE_MAP_MASK;
819 }
820
821 /* Update the iterator state */
822 iter->index = index;
823 iter->next_index = (index | RADIX_TREE_MAP_MASK) + 1;
824
825 /* Construct iter->tags bit-mask from node->tags[tag] array */
826 if (flags & RADIX_TREE_ITER_TAGGED) {
827 unsigned tag_long, tag_bit;
828
829 tag_long = offset / BITS_PER_LONG;
830 tag_bit = offset % BITS_PER_LONG;
831 iter->tags = node->tags[tag][tag_long] >> tag_bit;
832 /* This never happens if RADIX_TREE_TAG_LONGS == 1 */
833 if (tag_long < RADIX_TREE_TAG_LONGS - 1) {
834 /* Pick tags from next element */
835 if (tag_bit)
836 iter->tags |= node->tags[tag][tag_long + 1] <<
837 (BITS_PER_LONG - tag_bit);
838 /* Clip chunk size, here only BITS_PER_LONG tags */
839 iter->next_index = index + BITS_PER_LONG;
840 }
841 }
842
843 return node->slots + offset;
844}
845EXPORT_SYMBOL(radix_tree_next_chunk);
846
ebf8aa44
JK
847/**
848 * radix_tree_range_tag_if_tagged - for each item in given range set given
849 * tag if item has another tag set
850 * @root: radix tree root
851 * @first_indexp: pointer to a starting index of a range to scan
852 * @last_index: last index of a range to scan
853 * @nr_to_tag: maximum number items to tag
854 * @iftag: tag index to test
855 * @settag: tag index to set if tested tag is set
856 *
857 * This function scans range of radix tree from first_index to last_index
858 * (inclusive). For each item in the range if iftag is set, the function sets
859 * also settag. The function stops either after tagging nr_to_tag items or
860 * after reaching last_index.
861 *
144dcfc0
DC
862 * The tags must be set from the leaf level only and propagated back up the
863 * path to the root. We must do this so that we resolve the full path before
864 * setting any tags on intermediate nodes. If we set tags as we descend, then
865 * we can get to the leaf node and find that the index that has the iftag
866 * set is outside the range we are scanning. This reults in dangling tags and
867 * can lead to problems with later tag operations (e.g. livelocks on lookups).
868 *
ebf8aa44
JK
869 * The function returns number of leaves where the tag was set and sets
870 * *first_indexp to the first unscanned index.
d5ed3a4a
JK
871 * WARNING! *first_indexp can wrap if last_index is ULONG_MAX. Caller must
872 * be prepared to handle that.
ebf8aa44
JK
873 */
874unsigned long radix_tree_range_tag_if_tagged(struct radix_tree_root *root,
875 unsigned long *first_indexp, unsigned long last_index,
876 unsigned long nr_to_tag,
877 unsigned int iftag, unsigned int settag)
878{
144dcfc0 879 unsigned int height = root->height;
e2bdb933 880 struct radix_tree_node *node = NULL;
144dcfc0
DC
881 struct radix_tree_node *slot;
882 unsigned int shift;
883 unsigned long tagged = 0;
884 unsigned long index = *first_indexp;
ebf8aa44
JK
885
886 last_index = min(last_index, radix_tree_maxindex(height));
887 if (index > last_index)
888 return 0;
889 if (!nr_to_tag)
890 return 0;
891 if (!root_tag_get(root, iftag)) {
892 *first_indexp = last_index + 1;
893 return 0;
894 }
895 if (height == 0) {
896 *first_indexp = last_index + 1;
897 root_tag_set(root, settag);
898 return 1;
899 }
900
901 shift = (height - 1) * RADIX_TREE_MAP_SHIFT;
27d20fdd 902 slot = indirect_to_ptr(root->rnode);
ebf8aa44
JK
903
904 for (;;) {
e2bdb933 905 unsigned long upindex;
ebf8aa44
JK
906 int offset;
907
908 offset = (index >> shift) & RADIX_TREE_MAP_MASK;
909 if (!slot->slots[offset])
910 goto next;
911 if (!tag_get(slot, iftag, offset))
912 goto next;
e2bdb933 913 if (shift) {
144dcfc0 914 /* Go down one level */
144dcfc0 915 shift -= RADIX_TREE_MAP_SHIFT;
e2bdb933 916 node = slot;
144dcfc0
DC
917 slot = slot->slots[offset];
918 continue;
919 }
920
921 /* tag the leaf */
922 tagged++;
ebf8aa44 923 tag_set(slot, settag, offset);
144dcfc0
DC
924
925 /* walk back up the path tagging interior nodes */
e2bdb933
HD
926 upindex = index;
927 while (node) {
928 upindex >>= RADIX_TREE_MAP_SHIFT;
929 offset = upindex & RADIX_TREE_MAP_MASK;
930
144dcfc0 931 /* stop if we find a node with the tag already set */
e2bdb933 932 if (tag_get(node, settag, offset))
144dcfc0 933 break;
e2bdb933
HD
934 tag_set(node, settag, offset);
935 node = node->parent;
ebf8aa44 936 }
144dcfc0 937
e2bdb933
HD
938 /*
939 * Small optimization: now clear that node pointer.
940 * Since all of this slot's ancestors now have the tag set
941 * from setting it above, we have no further need to walk
942 * back up the tree setting tags, until we update slot to
943 * point to another radix_tree_node.
944 */
945 node = NULL;
946
ebf8aa44
JK
947next:
948 /* Go to next item at level determined by 'shift' */
949 index = ((index >> shift) + 1) << shift;
d5ed3a4a
JK
950 /* Overflow can happen when last_index is ~0UL... */
951 if (index > last_index || !index)
ebf8aa44
JK
952 break;
953 if (tagged >= nr_to_tag)
954 break;
955 while (((index >> shift) & RADIX_TREE_MAP_MASK) == 0) {
956 /*
957 * We've fully scanned this node. Go up. Because
958 * last_index is guaranteed to be in the tree, what
959 * we do below cannot wander astray.
960 */
e2bdb933 961 slot = slot->parent;
ebf8aa44
JK
962 shift += RADIX_TREE_MAP_SHIFT;
963 }
964 }
965 /*
ac15ee69
TO
966 * We need not to tag the root tag if there is no tag which is set with
967 * settag within the range from *first_indexp to last_index.
ebf8aa44 968 */
ac15ee69
TO
969 if (tagged > 0)
970 root_tag_set(root, settag);
ebf8aa44
JK
971 *first_indexp = index;
972
973 return tagged;
974}
975EXPORT_SYMBOL(radix_tree_range_tag_if_tagged);
976
1da177e4
LT
977/**
978 * radix_tree_gang_lookup - perform multiple lookup on a radix tree
979 * @root: radix tree root
980 * @results: where the results of the lookup are placed
981 * @first_index: start the lookup from this key
982 * @max_items: place up to this many items at *results
983 *
984 * Performs an index-ascending scan of the tree for present items. Places
985 * them at *@results and returns the number of items which were placed at
986 * *@results.
987 *
988 * The implementation is naive.
7cf9c2c7
NP
989 *
990 * Like radix_tree_lookup, radix_tree_gang_lookup may be called under
991 * rcu_read_lock. In this case, rather than the returned results being
992 * an atomic snapshot of the tree at a single point in time, the semantics
993 * of an RCU protected gang lookup are as though multiple radix_tree_lookups
994 * have been issued in individual locks, and results stored in 'results'.
1da177e4
LT
995 */
996unsigned int
997radix_tree_gang_lookup(struct radix_tree_root *root, void **results,
998 unsigned long first_index, unsigned int max_items)
999{
cebbd29e
KK
1000 struct radix_tree_iter iter;
1001 void **slot;
1002 unsigned int ret = 0;
7cf9c2c7 1003
cebbd29e 1004 if (unlikely(!max_items))
7cf9c2c7 1005 return 0;
1da177e4 1006
cebbd29e
KK
1007 radix_tree_for_each_slot(slot, root, &iter, first_index) {
1008 results[ret] = indirect_to_ptr(rcu_dereference_raw(*slot));
1009 if (!results[ret])
1010 continue;
1011 if (++ret == max_items)
1da177e4 1012 break;
1da177e4 1013 }
7cf9c2c7 1014
1da177e4
LT
1015 return ret;
1016}
1017EXPORT_SYMBOL(radix_tree_gang_lookup);
1018
47feff2c
NP
1019/**
1020 * radix_tree_gang_lookup_slot - perform multiple slot lookup on radix tree
1021 * @root: radix tree root
1022 * @results: where the results of the lookup are placed
6328650b 1023 * @indices: where their indices should be placed (but usually NULL)
47feff2c
NP
1024 * @first_index: start the lookup from this key
1025 * @max_items: place up to this many items at *results
1026 *
1027 * Performs an index-ascending scan of the tree for present items. Places
1028 * their slots at *@results and returns the number of items which were
1029 * placed at *@results.
1030 *
1031 * The implementation is naive.
1032 *
1033 * Like radix_tree_gang_lookup as far as RCU and locking goes. Slots must
1034 * be dereferenced with radix_tree_deref_slot, and if using only RCU
1035 * protection, radix_tree_deref_slot may fail requiring a retry.
1036 */
1037unsigned int
6328650b
HD
1038radix_tree_gang_lookup_slot(struct radix_tree_root *root,
1039 void ***results, unsigned long *indices,
47feff2c
NP
1040 unsigned long first_index, unsigned int max_items)
1041{
cebbd29e
KK
1042 struct radix_tree_iter iter;
1043 void **slot;
1044 unsigned int ret = 0;
47feff2c 1045
cebbd29e 1046 if (unlikely(!max_items))
47feff2c
NP
1047 return 0;
1048
cebbd29e
KK
1049 radix_tree_for_each_slot(slot, root, &iter, first_index) {
1050 results[ret] = slot;
6328650b 1051 if (indices)
cebbd29e
KK
1052 indices[ret] = iter.index;
1053 if (++ret == max_items)
47feff2c 1054 break;
47feff2c
NP
1055 }
1056
1057 return ret;
1058}
1059EXPORT_SYMBOL(radix_tree_gang_lookup_slot);
1060
1da177e4
LT
1061/**
1062 * radix_tree_gang_lookup_tag - perform multiple lookup on a radix tree
1063 * based on a tag
1064 * @root: radix tree root
1065 * @results: where the results of the lookup are placed
1066 * @first_index: start the lookup from this key
1067 * @max_items: place up to this many items at *results
daff89f3 1068 * @tag: the tag index (< RADIX_TREE_MAX_TAGS)
1da177e4
LT
1069 *
1070 * Performs an index-ascending scan of the tree for present items which
1071 * have the tag indexed by @tag set. Places the items at *@results and
1072 * returns the number of items which were placed at *@results.
1073 */
1074unsigned int
1075radix_tree_gang_lookup_tag(struct radix_tree_root *root, void **results,
daff89f3
JC
1076 unsigned long first_index, unsigned int max_items,
1077 unsigned int tag)
1da177e4 1078{
cebbd29e
KK
1079 struct radix_tree_iter iter;
1080 void **slot;
1081 unsigned int ret = 0;
612d6c19 1082
cebbd29e 1083 if (unlikely(!max_items))
7cf9c2c7
NP
1084 return 0;
1085
cebbd29e
KK
1086 radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) {
1087 results[ret] = indirect_to_ptr(rcu_dereference_raw(*slot));
1088 if (!results[ret])
1089 continue;
1090 if (++ret == max_items)
1da177e4 1091 break;
1da177e4 1092 }
7cf9c2c7 1093
1da177e4
LT
1094 return ret;
1095}
1096EXPORT_SYMBOL(radix_tree_gang_lookup_tag);
1097
47feff2c
NP
1098/**
1099 * radix_tree_gang_lookup_tag_slot - perform multiple slot lookup on a
1100 * radix tree based on a tag
1101 * @root: radix tree root
1102 * @results: where the results of the lookup are placed
1103 * @first_index: start the lookup from this key
1104 * @max_items: place up to this many items at *results
1105 * @tag: the tag index (< RADIX_TREE_MAX_TAGS)
1106 *
1107 * Performs an index-ascending scan of the tree for present items which
1108 * have the tag indexed by @tag set. Places the slots at *@results and
1109 * returns the number of slots which were placed at *@results.
1110 */
1111unsigned int
1112radix_tree_gang_lookup_tag_slot(struct radix_tree_root *root, void ***results,
1113 unsigned long first_index, unsigned int max_items,
1114 unsigned int tag)
1115{
cebbd29e
KK
1116 struct radix_tree_iter iter;
1117 void **slot;
1118 unsigned int ret = 0;
47feff2c 1119
cebbd29e 1120 if (unlikely(!max_items))
47feff2c
NP
1121 return 0;
1122
cebbd29e
KK
1123 radix_tree_for_each_tagged(slot, root, &iter, first_index, tag) {
1124 results[ret] = slot;
1125 if (++ret == max_items)
47feff2c 1126 break;
47feff2c
NP
1127 }
1128
1129 return ret;
1130}
1131EXPORT_SYMBOL(radix_tree_gang_lookup_tag_slot);
1132
e504f3fd
HD
1133#if defined(CONFIG_SHMEM) && defined(CONFIG_SWAP)
1134#include <linux/sched.h> /* for cond_resched() */
1135
1136/*
1137 * This linear search is at present only useful to shmem_unuse_inode().
1138 */
1139static unsigned long __locate(struct radix_tree_node *slot, void *item,
1140 unsigned long index, unsigned long *found_index)
1141{
1142 unsigned int shift, height;
1143 unsigned long i;
1144
1145 height = slot->height;
1146 shift = (height-1) * RADIX_TREE_MAP_SHIFT;
1147
1148 for ( ; height > 1; height--) {
1149 i = (index >> shift) & RADIX_TREE_MAP_MASK;
1150 for (;;) {
1151 if (slot->slots[i] != NULL)
1152 break;
1153 index &= ~((1UL << shift) - 1);
1154 index += 1UL << shift;
1155 if (index == 0)
1156 goto out; /* 32-bit wraparound */
1157 i++;
1158 if (i == RADIX_TREE_MAP_SIZE)
1159 goto out;
1160 }
1161
1162 shift -= RADIX_TREE_MAP_SHIFT;
1163 slot = rcu_dereference_raw(slot->slots[i]);
1164 if (slot == NULL)
1165 goto out;
1166 }
1167
1168 /* Bottom level: check items */
1169 for (i = 0; i < RADIX_TREE_MAP_SIZE; i++) {
1170 if (slot->slots[i] == item) {
1171 *found_index = index + i;
1172 index = 0;
1173 goto out;
1174 }
1175 }
1176 index += RADIX_TREE_MAP_SIZE;
1177out:
1178 return index;
1179}
1180
1181/**
1182 * radix_tree_locate_item - search through radix tree for item
1183 * @root: radix tree root
1184 * @item: item to be found
1185 *
1186 * Returns index where item was found, or -1 if not found.
1187 * Caller must hold no lock (since this time-consuming function needs
1188 * to be preemptible), and must check afterwards if item is still there.
1189 */
1190unsigned long radix_tree_locate_item(struct radix_tree_root *root, void *item)
1191{
1192 struct radix_tree_node *node;
1193 unsigned long max_index;
1194 unsigned long cur_index = 0;
1195 unsigned long found_index = -1;
1196
1197 do {
1198 rcu_read_lock();
1199 node = rcu_dereference_raw(root->rnode);
1200 if (!radix_tree_is_indirect_ptr(node)) {
1201 rcu_read_unlock();
1202 if (node == item)
1203 found_index = 0;
1204 break;
1205 }
1206
1207 node = indirect_to_ptr(node);
1208 max_index = radix_tree_maxindex(node->height);
5f30fc94
HD
1209 if (cur_index > max_index) {
1210 rcu_read_unlock();
e504f3fd 1211 break;
5f30fc94 1212 }
e504f3fd
HD
1213
1214 cur_index = __locate(node, item, cur_index, &found_index);
1215 rcu_read_unlock();
1216 cond_resched();
1217 } while (cur_index != 0 && cur_index <= max_index);
1218
1219 return found_index;
1220}
1221#else
1222unsigned long radix_tree_locate_item(struct radix_tree_root *root, void *item)
1223{
1224 return -1;
1225}
1226#endif /* CONFIG_SHMEM && CONFIG_SWAP */
47feff2c 1227
a5f51c96
NP
1228/**
1229 * radix_tree_shrink - shrink height of a radix tree to minimal
1230 * @root radix tree root
1231 */
1232static inline void radix_tree_shrink(struct radix_tree_root *root)
1233{
1234 /* try to shrink tree height */
c0bc9875 1235 while (root->height > 0) {
a5f51c96 1236 struct radix_tree_node *to_free = root->rnode;
e2bdb933 1237 struct radix_tree_node *slot;
a5f51c96 1238
c0bc9875 1239 BUG_ON(!radix_tree_is_indirect_ptr(to_free));
27d20fdd 1240 to_free = indirect_to_ptr(to_free);
c0bc9875
NP
1241
1242 /*
1243 * The candidate node has more than one child, or its child
1244 * is not at the leftmost slot, we cannot shrink.
1245 */
1246 if (to_free->count != 1)
1247 break;
1248 if (!to_free->slots[0])
1249 break;
1250
7cf9c2c7
NP
1251 /*
1252 * We don't need rcu_assign_pointer(), since we are simply
27d20fdd
NP
1253 * moving the node from one part of the tree to another: if it
1254 * was safe to dereference the old pointer to it
7cf9c2c7 1255 * (to_free->slots[0]), it will be safe to dereference the new
27d20fdd 1256 * one (root->rnode) as far as dependent read barriers go.
7cf9c2c7 1257 */
e2bdb933
HD
1258 slot = to_free->slots[0];
1259 if (root->height > 1) {
1260 slot->parent = NULL;
1261 slot = ptr_to_indirect(slot);
1262 }
1263 root->rnode = slot;
a5f51c96 1264 root->height--;
27d20fdd
NP
1265
1266 /*
1267 * We have a dilemma here. The node's slot[0] must not be
1268 * NULLed in case there are concurrent lookups expecting to
1269 * find the item. However if this was a bottom-level node,
1270 * then it may be subject to the slot pointer being visible
1271 * to callers dereferencing it. If item corresponding to
1272 * slot[0] is subsequently deleted, these callers would expect
1273 * their slot to become empty sooner or later.
1274 *
1275 * For example, lockless pagecache will look up a slot, deref
1276 * the page pointer, and if the page is 0 refcount it means it
1277 * was concurrently deleted from pagecache so try the deref
1278 * again. Fortunately there is already a requirement for logic
1279 * to retry the entire slot lookup -- the indirect pointer
1280 * problem (replacing direct root node with an indirect pointer
1281 * also results in a stale slot). So tag the slot as indirect
1282 * to force callers to retry.
1283 */
1284 if (root->height == 0)
1285 *((unsigned long *)&to_free->slots[0]) |=
1286 RADIX_TREE_INDIRECT_PTR;
1287
a5f51c96
NP
1288 radix_tree_node_free(to_free);
1289 }
1290}
1291
139e5616
JW
1292/**
1293 * __radix_tree_delete_node - try to free node after clearing a slot
1294 * @root: radix tree root
1295 * @index: index key
1296 * @node: node containing @index
1297 *
1298 * After clearing the slot at @index in @node from radix tree
1299 * rooted at @root, call this function to attempt freeing the
1300 * node and shrinking the tree.
1301 *
1302 * Returns %true if @node was freed, %false otherwise.
1303 */
1304bool __radix_tree_delete_node(struct radix_tree_root *root, unsigned long index,
1305 struct radix_tree_node *node)
1306{
1307 bool deleted = false;
1308
1309 do {
1310 struct radix_tree_node *parent;
1311
1312 if (node->count) {
1313 if (node == indirect_to_ptr(root->rnode)) {
1314 radix_tree_shrink(root);
1315 if (root->height == 0)
1316 deleted = true;
1317 }
1318 return deleted;
1319 }
1320
1321 parent = node->parent;
1322 if (parent) {
1323 index >>= RADIX_TREE_MAP_SHIFT;
1324
1325 parent->slots[index & RADIX_TREE_MAP_MASK] = NULL;
1326 parent->count--;
1327 } else {
1328 root_tag_clear_all(root);
1329 root->height = 0;
1330 root->rnode = NULL;
1331 }
1332
1333 radix_tree_node_free(node);
1334 deleted = true;
1335
1336 node = parent;
1337 } while (node);
1338
1339 return deleted;
1340}
1341
1da177e4 1342/**
53c59f26 1343 * radix_tree_delete_item - delete an item from a radix tree
1da177e4
LT
1344 * @root: radix tree root
1345 * @index: index key
53c59f26 1346 * @item: expected item
1da177e4 1347 *
53c59f26 1348 * Remove @item at @index from the radix tree rooted at @root.
1da177e4 1349 *
53c59f26
JW
1350 * Returns the address of the deleted item, or NULL if it was not present
1351 * or the entry at the given @index was not @item.
1da177e4 1352 */
53c59f26
JW
1353void *radix_tree_delete_item(struct radix_tree_root *root,
1354 unsigned long index, void *item)
1da177e4 1355{
139e5616
JW
1356 struct radix_tree_node *node;
1357 unsigned int offset;
1358 void **slot;
1359 void *entry;
d5274261 1360 int tag;
1da177e4 1361
139e5616
JW
1362 entry = __radix_tree_lookup(root, index, &node, &slot);
1363 if (!entry)
1364 return NULL;
1da177e4 1365
139e5616
JW
1366 if (item && entry != item)
1367 return NULL;
1368
1369 if (!node) {
612d6c19
NP
1370 root_tag_clear_all(root);
1371 root->rnode = NULL;
139e5616 1372 return entry;
612d6c19 1373 }
1da177e4 1374
139e5616 1375 offset = index & RADIX_TREE_MAP_MASK;
53c59f26 1376
1da177e4 1377 /*
e2bdb933
HD
1378 * Clear all tags associated with the item to be deleted.
1379 * This way of doing it would be inefficient, but seldom is any set.
1da177e4 1380 */
daff89f3 1381 for (tag = 0; tag < RADIX_TREE_MAX_TAGS; tag++) {
e2bdb933 1382 if (tag_get(node, tag, offset))
612d6c19 1383 radix_tree_tag_clear(root, index, tag);
d5274261 1384 }
1da177e4 1385
139e5616
JW
1386 node->slots[offset] = NULL;
1387 node->count--;
e2bdb933 1388
139e5616 1389 __radix_tree_delete_node(root, index, node);
612d6c19 1390
139e5616 1391 return entry;
1da177e4 1392}
53c59f26
JW
1393EXPORT_SYMBOL(radix_tree_delete_item);
1394
1395/**
1396 * radix_tree_delete - delete an item from a radix tree
1397 * @root: radix tree root
1398 * @index: index key
1399 *
1400 * Remove the item at @index from the radix tree rooted at @root.
1401 *
1402 * Returns the address of the deleted item, or NULL if it was not present.
1403 */
1404void *radix_tree_delete(struct radix_tree_root *root, unsigned long index)
1405{
1406 return radix_tree_delete_item(root, index, NULL);
1407}
1da177e4
LT
1408EXPORT_SYMBOL(radix_tree_delete);
1409
1410/**
1411 * radix_tree_tagged - test whether any items in the tree are tagged
1412 * @root: radix tree root
1413 * @tag: tag to test
1414 */
daff89f3 1415int radix_tree_tagged(struct radix_tree_root *root, unsigned int tag)
1da177e4 1416{
612d6c19 1417 return root_tag_get(root, tag);
1da177e4
LT
1418}
1419EXPORT_SYMBOL(radix_tree_tagged);
1420
1421static void
51cc5068 1422radix_tree_node_ctor(void *node)
1da177e4
LT
1423{
1424 memset(node, 0, sizeof(struct radix_tree_node));
1425}
1426
1427static __init unsigned long __maxindex(unsigned int height)
1428{
430d275a
PL
1429 unsigned int width = height * RADIX_TREE_MAP_SHIFT;
1430 int shift = RADIX_TREE_INDEX_BITS - width;
1431
1432 if (shift < 0)
1433 return ~0UL;
1434 if (shift >= BITS_PER_LONG)
1435 return 0UL;
1436 return ~0UL >> shift;
1da177e4
LT
1437}
1438
1439static __init void radix_tree_init_maxindex(void)
1440{
1441 unsigned int i;
1442
1443 for (i = 0; i < ARRAY_SIZE(height_to_maxindex); i++)
1444 height_to_maxindex[i] = __maxindex(i);
1445}
1446
1da177e4
LT
1447static int radix_tree_callback(struct notifier_block *nfb,
1448 unsigned long action,
1449 void *hcpu)
1450{
1451 int cpu = (long)hcpu;
1452 struct radix_tree_preload *rtp;
1453
1454 /* Free per-cpu pool of perloaded nodes */
8bb78442 1455 if (action == CPU_DEAD || action == CPU_DEAD_FROZEN) {
1da177e4
LT
1456 rtp = &per_cpu(radix_tree_preloads, cpu);
1457 while (rtp->nr) {
1458 kmem_cache_free(radix_tree_node_cachep,
1459 rtp->nodes[rtp->nr-1]);
1460 rtp->nodes[rtp->nr-1] = NULL;
1461 rtp->nr--;
1462 }
1463 }
1464 return NOTIFY_OK;
1465}
1da177e4
LT
1466
1467void __init radix_tree_init(void)
1468{
1469 radix_tree_node_cachep = kmem_cache_create("radix_tree_node",
1470 sizeof(struct radix_tree_node), 0,
488514d1
CL
1471 SLAB_PANIC | SLAB_RECLAIM_ACCOUNT,
1472 radix_tree_node_ctor);
1da177e4
LT
1473 radix_tree_init_maxindex();
1474 hotcpu_notifier(radix_tree_callback, 0);
1475}
This page took 0.784245 seconds and 5 git commands to generate.