staging: lustre: remove RETURN macro
[deliverable/linux.git] / drivers / staging / lustre / lustre / ldlm / interval_tree.c
1 /*
2 * GPL HEADER START
3 *
4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 only,
8 * as published by the Free Software Foundation.
9 *
10 * This program is distributed in the hope that it will be useful, but
11 * WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * General Public License version 2 for more details (a copy is included
14 * in the LICENSE file that accompanied this code).
15 *
16 * You should have received a copy of the GNU General Public License
17 * version 2 along with this program; If not, see
18 * http://www.sun.com/software/products/lustre/docs/GPLv2.pdf
19 *
20 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
21 * CA 95054 USA or visit www.sun.com if you need additional information or
22 * have any questions.
23 *
24 * GPL HEADER END
25 */
26 /*
27 * Copyright (c) 2008, 2010, Oracle and/or its affiliates. All rights reserved.
28 * Use is subject to license terms.
29 */
30 /*
31 * This file is part of Lustre, http://www.lustre.org/
32 * Lustre is a trademark of Sun Microsystems, Inc.
33 *
34 * lustre/ldlm/interval_tree.c
35 *
36 * Interval tree library used by ldlm extent lock code
37 *
38 * Author: Huang Wei <huangwei@clusterfs.com>
39 * Author: Jay Xiong <jinshan.xiong@sun.com>
40 */
41 # include <lustre_dlm.h>
42 #include <obd_support.h>
43 #include <interval_tree.h>
44
45 enum {
46 INTERVAL_RED = 0,
47 INTERVAL_BLACK = 1
48 };
49
50 static inline int node_is_left_child(struct interval_node *node)
51 {
52 LASSERT(node->in_parent != NULL);
53 return node == node->in_parent->in_left;
54 }
55
56 static inline int node_is_right_child(struct interval_node *node)
57 {
58 LASSERT(node->in_parent != NULL);
59 return node == node->in_parent->in_right;
60 }
61
62 static inline int node_is_red(struct interval_node *node)
63 {
64 return node->in_color == INTERVAL_RED;
65 }
66
67 static inline int node_is_black(struct interval_node *node)
68 {
69 return node->in_color == INTERVAL_BLACK;
70 }
71
72 static inline int extent_compare(struct interval_node_extent *e1,
73 struct interval_node_extent *e2)
74 {
75 int rc;
76 if (e1->start == e2->start) {
77 if (e1->end < e2->end)
78 rc = -1;
79 else if (e1->end > e2->end)
80 rc = 1;
81 else
82 rc = 0;
83 } else {
84 if (e1->start < e2->start)
85 rc = -1;
86 else
87 rc = 1;
88 }
89 return rc;
90 }
91
92 static inline int extent_equal(struct interval_node_extent *e1,
93 struct interval_node_extent *e2)
94 {
95 return (e1->start == e2->start) && (e1->end == e2->end);
96 }
97
98 static inline int extent_overlapped(struct interval_node_extent *e1,
99 struct interval_node_extent *e2)
100 {
101 return (e1->start <= e2->end) && (e2->start <= e1->end);
102 }
103
104 static inline int node_compare(struct interval_node *n1,
105 struct interval_node *n2)
106 {
107 return extent_compare(&n1->in_extent, &n2->in_extent);
108 }
109
110 static inline int node_equal(struct interval_node *n1,
111 struct interval_node *n2)
112 {
113 return extent_equal(&n1->in_extent, &n2->in_extent);
114 }
115
116 static inline __u64 max_u64(__u64 x, __u64 y)
117 {
118 return x > y ? x : y;
119 }
120
121 static inline __u64 min_u64(__u64 x, __u64 y)
122 {
123 return x < y ? x : y;
124 }
125
126 #define interval_for_each(node, root) \
127 for (node = interval_first(root); node != NULL; \
128 node = interval_next(node))
129
130 #define interval_for_each_reverse(node, root) \
131 for (node = interval_last(root); node != NULL; \
132 node = interval_prev(node))
133
134 static struct interval_node *interval_first(struct interval_node *node)
135 {
136 if (!node)
137 return NULL;
138 while (node->in_left)
139 node = node->in_left;
140 return node;
141 }
142
143 static struct interval_node *interval_last(struct interval_node *node)
144 {
145 if (!node)
146 return NULL;
147 while (node->in_right)
148 node = node->in_right;
149 return node;
150 }
151
152 static struct interval_node *interval_next(struct interval_node *node)
153 {
154 if (!node)
155 return NULL;
156 if (node->in_right)
157 return interval_first(node->in_right);
158 while (node->in_parent && node_is_right_child(node))
159 node = node->in_parent;
160 return node->in_parent;
161 }
162
163 static struct interval_node *interval_prev(struct interval_node *node)
164 {
165 if (!node)
166 return NULL;
167
168 if (node->in_left)
169 return interval_last(node->in_left);
170
171 while (node->in_parent && node_is_left_child(node))
172 node = node->in_parent;
173
174 return node->in_parent;
175 }
176
177 enum interval_iter interval_iterate(struct interval_node *root,
178 interval_callback_t func,
179 void *data)
180 {
181 struct interval_node *node;
182 enum interval_iter rc = INTERVAL_ITER_CONT;
183
184 interval_for_each(node, root) {
185 rc = func(node, data);
186 if (rc == INTERVAL_ITER_STOP)
187 break;
188 }
189
190 return rc;
191 }
192 EXPORT_SYMBOL(interval_iterate);
193
194 enum interval_iter interval_iterate_reverse(struct interval_node *root,
195 interval_callback_t func,
196 void *data)
197 {
198 struct interval_node *node;
199 enum interval_iter rc = INTERVAL_ITER_CONT;
200
201 interval_for_each_reverse(node, root) {
202 rc = func(node, data);
203 if (rc == INTERVAL_ITER_STOP)
204 break;
205 }
206
207 return rc;
208 }
209 EXPORT_SYMBOL(interval_iterate_reverse);
210
211 /* try to find a node with same interval in the tree,
212 * if found, return the pointer to the node, otherwise return NULL*/
213 struct interval_node *interval_find(struct interval_node *root,
214 struct interval_node_extent *ex)
215 {
216 struct interval_node *walk = root;
217 int rc;
218
219 while (walk) {
220 rc = extent_compare(ex, &walk->in_extent);
221 if (rc == 0)
222 break;
223 else if (rc < 0)
224 walk = walk->in_left;
225 else
226 walk = walk->in_right;
227 }
228
229 return walk;
230 }
231 EXPORT_SYMBOL(interval_find);
232
233 static void __rotate_change_maxhigh(struct interval_node *node,
234 struct interval_node *rotate)
235 {
236 __u64 left_max, right_max;
237
238 rotate->in_max_high = node->in_max_high;
239 left_max = node->in_left ? node->in_left->in_max_high : 0;
240 right_max = node->in_right ? node->in_right->in_max_high : 0;
241 node->in_max_high = max_u64(interval_high(node),
242 max_u64(left_max,right_max));
243 }
244
245 /* The left rotation "pivots" around the link from node to node->right, and
246 * - node will be linked to node->right's left child, and
247 * - node->right's left child will be linked to node's right child. */
248 static void __rotate_left(struct interval_node *node,
249 struct interval_node **root)
250 {
251 struct interval_node *right = node->in_right;
252 struct interval_node *parent = node->in_parent;
253
254 node->in_right = right->in_left;
255 if (node->in_right)
256 right->in_left->in_parent = node;
257
258 right->in_left = node;
259 right->in_parent = parent;
260 if (parent) {
261 if (node_is_left_child(node))
262 parent->in_left = right;
263 else
264 parent->in_right = right;
265 } else {
266 *root = right;
267 }
268 node->in_parent = right;
269
270 /* update max_high for node and right */
271 __rotate_change_maxhigh(node, right);
272 }
273
274 /* The right rotation "pivots" around the link from node to node->left, and
275 * - node will be linked to node->left's right child, and
276 * - node->left's right child will be linked to node's left child. */
277 static void __rotate_right(struct interval_node *node,
278 struct interval_node **root)
279 {
280 struct interval_node *left = node->in_left;
281 struct interval_node *parent = node->in_parent;
282
283 node->in_left = left->in_right;
284 if (node->in_left)
285 left->in_right->in_parent = node;
286 left->in_right = node;
287
288 left->in_parent = parent;
289 if (parent) {
290 if (node_is_right_child(node))
291 parent->in_right = left;
292 else
293 parent->in_left = left;
294 } else {
295 *root = left;
296 }
297 node->in_parent = left;
298
299 /* update max_high for node and left */
300 __rotate_change_maxhigh(node, left);
301 }
302
303 #define interval_swap(a, b) do { \
304 struct interval_node *c = a; a = b; b = c; \
305 } while (0)
306
307 /*
308 * Operations INSERT and DELETE, when run on a tree with n keys,
309 * take O(logN) time.Because they modify the tree, the result
310 * may violate the red-black properties.To restore these properties,
311 * we must change the colors of some of the nodes in the tree
312 * and also change the pointer structure.
313 */
314 static void interval_insert_color(struct interval_node *node,
315 struct interval_node **root)
316 {
317 struct interval_node *parent, *gparent;
318
319 while ((parent = node->in_parent) && node_is_red(parent)) {
320 gparent = parent->in_parent;
321 /* Parent is RED, so gparent must not be NULL */
322 if (node_is_left_child(parent)) {
323 struct interval_node *uncle;
324 uncle = gparent->in_right;
325 if (uncle && node_is_red(uncle)) {
326 uncle->in_color = INTERVAL_BLACK;
327 parent->in_color = INTERVAL_BLACK;
328 gparent->in_color = INTERVAL_RED;
329 node = gparent;
330 continue;
331 }
332
333 if (parent->in_right == node) {
334 __rotate_left(parent, root);
335 interval_swap(node, parent);
336 }
337
338 parent->in_color = INTERVAL_BLACK;
339 gparent->in_color = INTERVAL_RED;
340 __rotate_right(gparent, root);
341 } else {
342 struct interval_node *uncle;
343 uncle = gparent->in_left;
344 if (uncle && node_is_red(uncle)) {
345 uncle->in_color = INTERVAL_BLACK;
346 parent->in_color = INTERVAL_BLACK;
347 gparent->in_color = INTERVAL_RED;
348 node = gparent;
349 continue;
350 }
351
352 if (node_is_left_child(node)) {
353 __rotate_right(parent, root);
354 interval_swap(node, parent);
355 }
356
357 parent->in_color = INTERVAL_BLACK;
358 gparent->in_color = INTERVAL_RED;
359 __rotate_left(gparent, root);
360 }
361 }
362
363 (*root)->in_color = INTERVAL_BLACK;
364 }
365
366 struct interval_node *interval_insert(struct interval_node *node,
367 struct interval_node **root)
368
369 {
370 struct interval_node **p, *parent = NULL;
371
372 LASSERT(!interval_is_intree(node));
373 p = root;
374 while (*p) {
375 parent = *p;
376 if (node_equal(parent, node))
377 return parent;
378
379 /* max_high field must be updated after each iteration */
380 if (parent->in_max_high < interval_high(node))
381 parent->in_max_high = interval_high(node);
382
383 if (node_compare(node, parent) < 0)
384 p = &parent->in_left;
385 else
386 p = &parent->in_right;
387 }
388
389 /* link node into the tree */
390 node->in_parent = parent;
391 node->in_color = INTERVAL_RED;
392 node->in_left = node->in_right = NULL;
393 *p = node;
394
395 interval_insert_color(node, root);
396 node->in_intree = 1;
397
398 return NULL;
399 }
400 EXPORT_SYMBOL(interval_insert);
401
402 static inline int node_is_black_or_0(struct interval_node *node)
403 {
404 return !node || node_is_black(node);
405 }
406
407 static void interval_erase_color(struct interval_node *node,
408 struct interval_node *parent,
409 struct interval_node **root)
410 {
411 struct interval_node *tmp;
412
413 while (node_is_black_or_0(node) && node != *root) {
414 if (parent->in_left == node) {
415 tmp = parent->in_right;
416 if (node_is_red(tmp)) {
417 tmp->in_color = INTERVAL_BLACK;
418 parent->in_color = INTERVAL_RED;
419 __rotate_left(parent, root);
420 tmp = parent->in_right;
421 }
422 if (node_is_black_or_0(tmp->in_left) &&
423 node_is_black_or_0(tmp->in_right)) {
424 tmp->in_color = INTERVAL_RED;
425 node = parent;
426 parent = node->in_parent;
427 } else {
428 if (node_is_black_or_0(tmp->in_right)) {
429 struct interval_node *o_left;
430 if ((o_left = tmp->in_left))
431 o_left->in_color = INTERVAL_BLACK;
432 tmp->in_color = INTERVAL_RED;
433 __rotate_right(tmp, root);
434 tmp = parent->in_right;
435 }
436 tmp->in_color = parent->in_color;
437 parent->in_color = INTERVAL_BLACK;
438 if (tmp->in_right)
439 tmp->in_right->in_color = INTERVAL_BLACK;
440 __rotate_left(parent, root);
441 node = *root;
442 break;
443 }
444 } else {
445 tmp = parent->in_left;
446 if (node_is_red(tmp)) {
447 tmp->in_color = INTERVAL_BLACK;
448 parent->in_color = INTERVAL_RED;
449 __rotate_right(parent, root);
450 tmp = parent->in_left;
451 }
452 if (node_is_black_or_0(tmp->in_left) &&
453 node_is_black_or_0(tmp->in_right)) {
454 tmp->in_color = INTERVAL_RED;
455 node = parent;
456 parent = node->in_parent;
457 } else {
458 if (node_is_black_or_0(tmp->in_left)) {
459 struct interval_node *o_right;
460 if ((o_right = tmp->in_right))
461 o_right->in_color = INTERVAL_BLACK;
462 tmp->in_color = INTERVAL_RED;
463 __rotate_left(tmp, root);
464 tmp = parent->in_left;
465 }
466 tmp->in_color = parent->in_color;
467 parent->in_color = INTERVAL_BLACK;
468 if (tmp->in_left)
469 tmp->in_left->in_color = INTERVAL_BLACK;
470 __rotate_right(parent, root);
471 node = *root;
472 break;
473 }
474 }
475 }
476 if (node)
477 node->in_color = INTERVAL_BLACK;
478 }
479
480 /*
481 * if the @max_high value of @node is changed, this function traverse a path
482 * from node up to the root to update max_high for the whole tree.
483 */
484 static void update_maxhigh(struct interval_node *node,
485 __u64 old_maxhigh)
486 {
487 __u64 left_max, right_max;
488
489 while (node) {
490 left_max = node->in_left ? node->in_left->in_max_high : 0;
491 right_max = node->in_right ? node->in_right->in_max_high : 0;
492 node->in_max_high = max_u64(interval_high(node),
493 max_u64(left_max, right_max));
494
495 if (node->in_max_high >= old_maxhigh)
496 break;
497 node = node->in_parent;
498 }
499 }
500
501 void interval_erase(struct interval_node *node,
502 struct interval_node **root)
503 {
504 struct interval_node *child, *parent;
505 int color;
506
507 LASSERT(interval_is_intree(node));
508 node->in_intree = 0;
509 if (!node->in_left) {
510 child = node->in_right;
511 } else if (!node->in_right) {
512 child = node->in_left;
513 } else { /* Both left and right child are not NULL */
514 struct interval_node *old = node;
515
516 node = interval_next(node);
517 child = node->in_right;
518 parent = node->in_parent;
519 color = node->in_color;
520
521 if (child)
522 child->in_parent = parent;
523 if (parent == old)
524 parent->in_right = child;
525 else
526 parent->in_left = child;
527
528 node->in_color = old->in_color;
529 node->in_right = old->in_right;
530 node->in_left = old->in_left;
531 node->in_parent = old->in_parent;
532
533 if (old->in_parent) {
534 if (node_is_left_child(old))
535 old->in_parent->in_left = node;
536 else
537 old->in_parent->in_right = node;
538 } else {
539 *root = node;
540 }
541
542 old->in_left->in_parent = node;
543 if (old->in_right)
544 old->in_right->in_parent = node;
545 update_maxhigh(child ? : parent, node->in_max_high);
546 update_maxhigh(node, old->in_max_high);
547 if (parent == old)
548 parent = node;
549 goto color;
550 }
551 parent = node->in_parent;
552 color = node->in_color;
553
554 if (child)
555 child->in_parent = parent;
556 if (parent) {
557 if (node_is_left_child(node))
558 parent->in_left = child;
559 else
560 parent->in_right = child;
561 } else {
562 *root = child;
563 }
564
565 update_maxhigh(child ? : parent, node->in_max_high);
566
567 color:
568 if (color == INTERVAL_BLACK)
569 interval_erase_color(child, parent, root);
570 }
571 EXPORT_SYMBOL(interval_erase);
572
573 static inline int interval_may_overlap(struct interval_node *node,
574 struct interval_node_extent *ext)
575 {
576 return (ext->start <= node->in_max_high &&
577 ext->end >= interval_low(node));
578 }
579
580 /*
581 * This function finds all intervals that overlap interval ext,
582 * and calls func to handle resulted intervals one by one.
583 * in lustre, this function will find all conflicting locks in
584 * the granted queue and add these locks to the ast work list.
585 *
586 * {
587 * if (node == NULL)
588 * return 0;
589 * if (ext->end < interval_low(node)) {
590 * interval_search(node->in_left, ext, func, data);
591 * } else if (interval_may_overlap(node, ext)) {
592 * if (extent_overlapped(ext, &node->in_extent))
593 * func(node, data);
594 * interval_search(node->in_left, ext, func, data);
595 * interval_search(node->in_right, ext, func, data);
596 * }
597 * return 0;
598 * }
599 *
600 */
601 enum interval_iter interval_search(struct interval_node *node,
602 struct interval_node_extent *ext,
603 interval_callback_t func,
604 void *data)
605 {
606 struct interval_node *parent;
607 enum interval_iter rc = INTERVAL_ITER_CONT;
608
609 LASSERT(ext != NULL);
610 LASSERT(func != NULL);
611
612 while (node) {
613 if (ext->end < interval_low(node)) {
614 if (node->in_left) {
615 node = node->in_left;
616 continue;
617 }
618 } else if (interval_may_overlap(node, ext)) {
619 if (extent_overlapped(ext, &node->in_extent)) {
620 rc = func(node, data);
621 if (rc == INTERVAL_ITER_STOP)
622 break;
623 }
624
625 if (node->in_left) {
626 node = node->in_left;
627 continue;
628 }
629 if (node->in_right) {
630 node = node->in_right;
631 continue;
632 }
633 }
634
635 parent = node->in_parent;
636 while (parent) {
637 if (node_is_left_child(node) &&
638 parent->in_right) {
639 /* If we ever got the left, it means that the
640 * parent met ext->end<interval_low(parent), or
641 * may_overlap(parent). If the former is true,
642 * we needn't go back. So stop early and check
643 * may_overlap(parent) after this loop. */
644 node = parent->in_right;
645 break;
646 }
647 node = parent;
648 parent = parent->in_parent;
649 }
650 if (parent == NULL || !interval_may_overlap(parent, ext))
651 break;
652 }
653
654 return rc;
655 }
656 EXPORT_SYMBOL(interval_search);
657
658 static enum interval_iter interval_overlap_cb(struct interval_node *n,
659 void *args)
660 {
661 *(int *)args = 1;
662 return INTERVAL_ITER_STOP;
663 }
664
665 int interval_is_overlapped(struct interval_node *root,
666 struct interval_node_extent *ext)
667 {
668 int has = 0;
669 (void)interval_search(root, ext, interval_overlap_cb, &has);
670 return has;
671 }
672 EXPORT_SYMBOL(interval_is_overlapped);
673
674 /* Don't expand to low. Expanding downwards is expensive, and meaningless to
675 * some extents, because programs seldom do IO backward.
676 *
677 * The recursive algorithm of expanding low:
678 * expand_low {
679 * struct interval_node *tmp;
680 * static __u64 res = 0;
681 *
682 * if (root == NULL)
683 * return res;
684 * if (root->in_max_high < low) {
685 * res = max_u64(root->in_max_high + 1, res);
686 * return res;
687 * } else if (low < interval_low(root)) {
688 * interval_expand_low(root->in_left, low);
689 * return res;
690 * }
691 *
692 * if (interval_high(root) < low)
693 * res = max_u64(interval_high(root) + 1, res);
694 * interval_expand_low(root->in_left, low);
695 * interval_expand_low(root->in_right, low);
696 *
697 * return res;
698 * }
699 *
700 * It's much easy to eliminate the recursion, see interval_search for
701 * an example. -jay
702 */
703 static inline __u64 interval_expand_low(struct interval_node *root, __u64 low)
704 {
705 /* we only concern the empty tree right now. */
706 if (root == NULL)
707 return 0;
708 return low;
709 }
710
711 static inline __u64 interval_expand_high(struct interval_node *node, __u64 high)
712 {
713 __u64 result = ~0;
714
715 while (node != NULL) {
716 if (node->in_max_high < high)
717 break;
718
719 if (interval_low(node) > high) {
720 result = interval_low(node) - 1;
721 node = node->in_left;
722 } else {
723 node = node->in_right;
724 }
725 }
726
727 return result;
728 }
729
730 /* expanding the extent based on @ext. */
731 void interval_expand(struct interval_node *root,
732 struct interval_node_extent *ext,
733 struct interval_node_extent *limiter)
734 {
735 /* The assertion of interval_is_overlapped is expensive because we may
736 * travel many nodes to find the overlapped node. */
737 LASSERT(interval_is_overlapped(root, ext) == 0);
738 if (!limiter || limiter->start < ext->start)
739 ext->start = interval_expand_low(root, ext->start);
740 if (!limiter || limiter->end > ext->end)
741 ext->end = interval_expand_high(root, ext->end);
742 LASSERT(interval_is_overlapped(root, ext) == 0);
743 }
744 EXPORT_SYMBOL(interval_expand);
This page took 0.052209 seconds and 5 git commands to generate.