staging/lustre/ldlm: drop redundant ibits lock interoperability check
[deliverable/linux.git] / drivers / staging / lustre / lustre / ldlm / interval_tree.c
1 /*
2 * GPL HEADER START
3 *
4 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 only,
8 * as published by the Free Software Foundation.
9 *
10 * This program is distributed in the hope that it will be useful, but
11 * WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * General Public License version 2 for more details (a copy is included
14 * in the LICENSE file that accompanied this code).
15 *
16 * You should have received a copy of the GNU General Public License
17 * version 2 along with this program; If not, see
18 * http://www.sun.com/software/products/lustre/docs/GPLv2.pdf
19 *
20 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
21 * CA 95054 USA or visit www.sun.com if you need additional information or
22 * have any questions.
23 *
24 * GPL HEADER END
25 */
26 /*
27 * Copyright (c) 2008, 2010, Oracle and/or its affiliates. All rights reserved.
28 * Use is subject to license terms.
29 */
30 /*
31 * This file is part of Lustre, http://www.lustre.org/
32 * Lustre is a trademark of Sun Microsystems, Inc.
33 *
34 * lustre/ldlm/interval_tree.c
35 *
36 * Interval tree library used by ldlm extent lock code
37 *
38 * Author: Huang Wei <huangwei@clusterfs.com>
39 * Author: Jay Xiong <jinshan.xiong@sun.com>
40 */
41 #include "../include/lustre_dlm.h"
42 #include "../include/obd_support.h"
43 #include "../include/interval_tree.h"
44
45 enum {
46 INTERVAL_RED = 0,
47 INTERVAL_BLACK = 1
48 };
49
50 static inline int node_is_left_child(struct interval_node *node)
51 {
52 LASSERT(node->in_parent != NULL);
53 return node == node->in_parent->in_left;
54 }
55
56 static inline int node_is_right_child(struct interval_node *node)
57 {
58 LASSERT(node->in_parent != NULL);
59 return node == node->in_parent->in_right;
60 }
61
62 static inline int node_is_red(struct interval_node *node)
63 {
64 return node->in_color == INTERVAL_RED;
65 }
66
67 static inline int node_is_black(struct interval_node *node)
68 {
69 return node->in_color == INTERVAL_BLACK;
70 }
71
72 static inline int extent_compare(struct interval_node_extent *e1,
73 struct interval_node_extent *e2)
74 {
75 int rc;
76 if (e1->start == e2->start) {
77 if (e1->end < e2->end)
78 rc = -1;
79 else if (e1->end > e2->end)
80 rc = 1;
81 else
82 rc = 0;
83 } else {
84 if (e1->start < e2->start)
85 rc = -1;
86 else
87 rc = 1;
88 }
89 return rc;
90 }
91
92 static inline int extent_equal(struct interval_node_extent *e1,
93 struct interval_node_extent *e2)
94 {
95 return (e1->start == e2->start) && (e1->end == e2->end);
96 }
97
98 static inline int extent_overlapped(struct interval_node_extent *e1,
99 struct interval_node_extent *e2)
100 {
101 return (e1->start <= e2->end) && (e2->start <= e1->end);
102 }
103
104 static inline int node_compare(struct interval_node *n1,
105 struct interval_node *n2)
106 {
107 return extent_compare(&n1->in_extent, &n2->in_extent);
108 }
109
110 static inline int node_equal(struct interval_node *n1,
111 struct interval_node *n2)
112 {
113 return extent_equal(&n1->in_extent, &n2->in_extent);
114 }
115
116 static inline __u64 max_u64(__u64 x, __u64 y)
117 {
118 return x > y ? x : y;
119 }
120
121 static inline __u64 min_u64(__u64 x, __u64 y)
122 {
123 return x < y ? x : y;
124 }
125
126 #define interval_for_each(node, root) \
127 for (node = interval_first(root); node != NULL; \
128 node = interval_next(node))
129
130 #define interval_for_each_reverse(node, root) \
131 for (node = interval_last(root); node != NULL; \
132 node = interval_prev(node))
133
134 static struct interval_node *interval_first(struct interval_node *node)
135 {
136 if (!node)
137 return NULL;
138 while (node->in_left)
139 node = node->in_left;
140 return node;
141 }
142
143 static struct interval_node *interval_last(struct interval_node *node)
144 {
145 if (!node)
146 return NULL;
147 while (node->in_right)
148 node = node->in_right;
149 return node;
150 }
151
152 static struct interval_node *interval_next(struct interval_node *node)
153 {
154 if (!node)
155 return NULL;
156 if (node->in_right)
157 return interval_first(node->in_right);
158 while (node->in_parent && node_is_right_child(node))
159 node = node->in_parent;
160 return node->in_parent;
161 }
162
163 static struct interval_node *interval_prev(struct interval_node *node)
164 {
165 if (!node)
166 return NULL;
167
168 if (node->in_left)
169 return interval_last(node->in_left);
170
171 while (node->in_parent && node_is_left_child(node))
172 node = node->in_parent;
173
174 return node->in_parent;
175 }
176
177 enum interval_iter interval_iterate(struct interval_node *root,
178 interval_callback_t func,
179 void *data)
180 {
181 struct interval_node *node;
182 enum interval_iter rc = INTERVAL_ITER_CONT;
183
184 interval_for_each(node, root) {
185 rc = func(node, data);
186 if (rc == INTERVAL_ITER_STOP)
187 break;
188 }
189
190 return rc;
191 }
192 EXPORT_SYMBOL(interval_iterate);
193
194 enum interval_iter interval_iterate_reverse(struct interval_node *root,
195 interval_callback_t func,
196 void *data)
197 {
198 struct interval_node *node;
199 enum interval_iter rc = INTERVAL_ITER_CONT;
200
201 interval_for_each_reverse(node, root) {
202 rc = func(node, data);
203 if (rc == INTERVAL_ITER_STOP)
204 break;
205 }
206
207 return rc;
208 }
209 EXPORT_SYMBOL(interval_iterate_reverse);
210
211 /* try to find a node with same interval in the tree,
212 * if found, return the pointer to the node, otherwise return NULL*/
213 struct interval_node *interval_find(struct interval_node *root,
214 struct interval_node_extent *ex)
215 {
216 struct interval_node *walk = root;
217 int rc;
218
219 while (walk) {
220 rc = extent_compare(ex, &walk->in_extent);
221 if (rc == 0)
222 break;
223 else if (rc < 0)
224 walk = walk->in_left;
225 else
226 walk = walk->in_right;
227 }
228
229 return walk;
230 }
231 EXPORT_SYMBOL(interval_find);
232
233 static void __rotate_change_maxhigh(struct interval_node *node,
234 struct interval_node *rotate)
235 {
236 __u64 left_max, right_max;
237
238 rotate->in_max_high = node->in_max_high;
239 left_max = node->in_left ? node->in_left->in_max_high : 0;
240 right_max = node->in_right ? node->in_right->in_max_high : 0;
241 node->in_max_high = max_u64(interval_high(node),
242 max_u64(left_max, right_max));
243 }
244
245 /* The left rotation "pivots" around the link from node to node->right, and
246 * - node will be linked to node->right's left child, and
247 * - node->right's left child will be linked to node's right child. */
248 static void __rotate_left(struct interval_node *node,
249 struct interval_node **root)
250 {
251 struct interval_node *right = node->in_right;
252 struct interval_node *parent = node->in_parent;
253
254 node->in_right = right->in_left;
255 if (node->in_right)
256 right->in_left->in_parent = node;
257
258 right->in_left = node;
259 right->in_parent = parent;
260 if (parent) {
261 if (node_is_left_child(node))
262 parent->in_left = right;
263 else
264 parent->in_right = right;
265 } else {
266 *root = right;
267 }
268 node->in_parent = right;
269
270 /* update max_high for node and right */
271 __rotate_change_maxhigh(node, right);
272 }
273
274 /* The right rotation "pivots" around the link from node to node->left, and
275 * - node will be linked to node->left's right child, and
276 * - node->left's right child will be linked to node's left child. */
277 static void __rotate_right(struct interval_node *node,
278 struct interval_node **root)
279 {
280 struct interval_node *left = node->in_left;
281 struct interval_node *parent = node->in_parent;
282
283 node->in_left = left->in_right;
284 if (node->in_left)
285 left->in_right->in_parent = node;
286 left->in_right = node;
287
288 left->in_parent = parent;
289 if (parent) {
290 if (node_is_right_child(node))
291 parent->in_right = left;
292 else
293 parent->in_left = left;
294 } else {
295 *root = left;
296 }
297 node->in_parent = left;
298
299 /* update max_high for node and left */
300 __rotate_change_maxhigh(node, left);
301 }
302
303 #define interval_swap(a, b) do { \
304 struct interval_node *c = a; a = b; b = c; \
305 } while (0)
306
307 /*
308 * Operations INSERT and DELETE, when run on a tree with n keys,
309 * take O(logN) time.Because they modify the tree, the result
310 * may violate the red-black properties.To restore these properties,
311 * we must change the colors of some of the nodes in the tree
312 * and also change the pointer structure.
313 */
314 static void interval_insert_color(struct interval_node *node,
315 struct interval_node **root)
316 {
317 struct interval_node *parent, *gparent;
318
319 while ((parent = node->in_parent) && node_is_red(parent)) {
320 gparent = parent->in_parent;
321 /* Parent is RED, so gparent must not be NULL */
322 if (node_is_left_child(parent)) {
323 struct interval_node *uncle;
324 uncle = gparent->in_right;
325 if (uncle && node_is_red(uncle)) {
326 uncle->in_color = INTERVAL_BLACK;
327 parent->in_color = INTERVAL_BLACK;
328 gparent->in_color = INTERVAL_RED;
329 node = gparent;
330 continue;
331 }
332
333 if (parent->in_right == node) {
334 __rotate_left(parent, root);
335 interval_swap(node, parent);
336 }
337
338 parent->in_color = INTERVAL_BLACK;
339 gparent->in_color = INTERVAL_RED;
340 __rotate_right(gparent, root);
341 } else {
342 struct interval_node *uncle;
343 uncle = gparent->in_left;
344 if (uncle && node_is_red(uncle)) {
345 uncle->in_color = INTERVAL_BLACK;
346 parent->in_color = INTERVAL_BLACK;
347 gparent->in_color = INTERVAL_RED;
348 node = gparent;
349 continue;
350 }
351
352 if (node_is_left_child(node)) {
353 __rotate_right(parent, root);
354 interval_swap(node, parent);
355 }
356
357 parent->in_color = INTERVAL_BLACK;
358 gparent->in_color = INTERVAL_RED;
359 __rotate_left(gparent, root);
360 }
361 }
362
363 (*root)->in_color = INTERVAL_BLACK;
364 }
365
366 struct interval_node *interval_insert(struct interval_node *node,
367 struct interval_node **root)
368
369 {
370 struct interval_node **p, *parent = NULL;
371
372 LASSERT(!interval_is_intree(node));
373 p = root;
374 while (*p) {
375 parent = *p;
376 if (node_equal(parent, node))
377 return parent;
378
379 /* max_high field must be updated after each iteration */
380 if (parent->in_max_high < interval_high(node))
381 parent->in_max_high = interval_high(node);
382
383 if (node_compare(node, parent) < 0)
384 p = &parent->in_left;
385 else
386 p = &parent->in_right;
387 }
388
389 /* link node into the tree */
390 node->in_parent = parent;
391 node->in_color = INTERVAL_RED;
392 node->in_left = node->in_right = NULL;
393 *p = node;
394
395 interval_insert_color(node, root);
396 node->in_intree = 1;
397
398 return NULL;
399 }
400 EXPORT_SYMBOL(interval_insert);
401
402 static inline int node_is_black_or_0(struct interval_node *node)
403 {
404 return !node || node_is_black(node);
405 }
406
407 static void interval_erase_color(struct interval_node *node,
408 struct interval_node *parent,
409 struct interval_node **root)
410 {
411 struct interval_node *tmp;
412
413 while (node_is_black_or_0(node) && node != *root) {
414 if (parent->in_left == node) {
415 tmp = parent->in_right;
416 if (node_is_red(tmp)) {
417 tmp->in_color = INTERVAL_BLACK;
418 parent->in_color = INTERVAL_RED;
419 __rotate_left(parent, root);
420 tmp = parent->in_right;
421 }
422 if (node_is_black_or_0(tmp->in_left) &&
423 node_is_black_or_0(tmp->in_right)) {
424 tmp->in_color = INTERVAL_RED;
425 node = parent;
426 parent = node->in_parent;
427 } else {
428 if (node_is_black_or_0(tmp->in_right)) {
429 struct interval_node *o_left;
430 o_left = tmp->in_left;
431 if (o_left)
432 o_left->in_color = INTERVAL_BLACK;
433 tmp->in_color = INTERVAL_RED;
434 __rotate_right(tmp, root);
435 tmp = parent->in_right;
436 }
437 tmp->in_color = parent->in_color;
438 parent->in_color = INTERVAL_BLACK;
439 if (tmp->in_right)
440 tmp->in_right->in_color = INTERVAL_BLACK;
441 __rotate_left(parent, root);
442 node = *root;
443 break;
444 }
445 } else {
446 tmp = parent->in_left;
447 if (node_is_red(tmp)) {
448 tmp->in_color = INTERVAL_BLACK;
449 parent->in_color = INTERVAL_RED;
450 __rotate_right(parent, root);
451 tmp = parent->in_left;
452 }
453 if (node_is_black_or_0(tmp->in_left) &&
454 node_is_black_or_0(tmp->in_right)) {
455 tmp->in_color = INTERVAL_RED;
456 node = parent;
457 parent = node->in_parent;
458 } else {
459 if (node_is_black_or_0(tmp->in_left)) {
460 struct interval_node *o_right;
461 o_right = tmp->in_right;
462 if (o_right)
463 o_right->in_color = INTERVAL_BLACK;
464 tmp->in_color = INTERVAL_RED;
465 __rotate_left(tmp, root);
466 tmp = parent->in_left;
467 }
468 tmp->in_color = parent->in_color;
469 parent->in_color = INTERVAL_BLACK;
470 if (tmp->in_left)
471 tmp->in_left->in_color = INTERVAL_BLACK;
472 __rotate_right(parent, root);
473 node = *root;
474 break;
475 }
476 }
477 }
478 if (node)
479 node->in_color = INTERVAL_BLACK;
480 }
481
482 /*
483 * if the @max_high value of @node is changed, this function traverse a path
484 * from node up to the root to update max_high for the whole tree.
485 */
486 static void update_maxhigh(struct interval_node *node,
487 __u64 old_maxhigh)
488 {
489 __u64 left_max, right_max;
490
491 while (node) {
492 left_max = node->in_left ? node->in_left->in_max_high : 0;
493 right_max = node->in_right ? node->in_right->in_max_high : 0;
494 node->in_max_high = max_u64(interval_high(node),
495 max_u64(left_max, right_max));
496
497 if (node->in_max_high >= old_maxhigh)
498 break;
499 node = node->in_parent;
500 }
501 }
502
503 void interval_erase(struct interval_node *node,
504 struct interval_node **root)
505 {
506 struct interval_node *child, *parent;
507 int color;
508
509 LASSERT(interval_is_intree(node));
510 node->in_intree = 0;
511 if (!node->in_left) {
512 child = node->in_right;
513 } else if (!node->in_right) {
514 child = node->in_left;
515 } else { /* Both left and right child are not NULL */
516 struct interval_node *old = node;
517
518 node = interval_next(node);
519 child = node->in_right;
520 parent = node->in_parent;
521 color = node->in_color;
522
523 if (child)
524 child->in_parent = parent;
525 if (parent == old)
526 parent->in_right = child;
527 else
528 parent->in_left = child;
529
530 node->in_color = old->in_color;
531 node->in_right = old->in_right;
532 node->in_left = old->in_left;
533 node->in_parent = old->in_parent;
534
535 if (old->in_parent) {
536 if (node_is_left_child(old))
537 old->in_parent->in_left = node;
538 else
539 old->in_parent->in_right = node;
540 } else {
541 *root = node;
542 }
543
544 old->in_left->in_parent = node;
545 if (old->in_right)
546 old->in_right->in_parent = node;
547 update_maxhigh(child ? : parent, node->in_max_high);
548 update_maxhigh(node, old->in_max_high);
549 if (parent == old)
550 parent = node;
551 goto color;
552 }
553 parent = node->in_parent;
554 color = node->in_color;
555
556 if (child)
557 child->in_parent = parent;
558 if (parent) {
559 if (node_is_left_child(node))
560 parent->in_left = child;
561 else
562 parent->in_right = child;
563 } else {
564 *root = child;
565 }
566
567 update_maxhigh(child ? : parent, node->in_max_high);
568
569 color:
570 if (color == INTERVAL_BLACK)
571 interval_erase_color(child, parent, root);
572 }
573 EXPORT_SYMBOL(interval_erase);
574
575 static inline int interval_may_overlap(struct interval_node *node,
576 struct interval_node_extent *ext)
577 {
578 return (ext->start <= node->in_max_high &&
579 ext->end >= interval_low(node));
580 }
581
582 /*
583 * This function finds all intervals that overlap interval ext,
584 * and calls func to handle resulted intervals one by one.
585 * in lustre, this function will find all conflicting locks in
586 * the granted queue and add these locks to the ast work list.
587 *
588 * {
589 * if (node == NULL)
590 * return 0;
591 * if (ext->end < interval_low(node)) {
592 * interval_search(node->in_left, ext, func, data);
593 * } else if (interval_may_overlap(node, ext)) {
594 * if (extent_overlapped(ext, &node->in_extent))
595 * func(node, data);
596 * interval_search(node->in_left, ext, func, data);
597 * interval_search(node->in_right, ext, func, data);
598 * }
599 * return 0;
600 * }
601 *
602 */
603 enum interval_iter interval_search(struct interval_node *node,
604 struct interval_node_extent *ext,
605 interval_callback_t func,
606 void *data)
607 {
608 struct interval_node *parent;
609 enum interval_iter rc = INTERVAL_ITER_CONT;
610
611 LASSERT(ext != NULL);
612 LASSERT(func != NULL);
613
614 while (node) {
615 if (ext->end < interval_low(node)) {
616 if (node->in_left) {
617 node = node->in_left;
618 continue;
619 }
620 } else if (interval_may_overlap(node, ext)) {
621 if (extent_overlapped(ext, &node->in_extent)) {
622 rc = func(node, data);
623 if (rc == INTERVAL_ITER_STOP)
624 break;
625 }
626
627 if (node->in_left) {
628 node = node->in_left;
629 continue;
630 }
631 if (node->in_right) {
632 node = node->in_right;
633 continue;
634 }
635 }
636
637 parent = node->in_parent;
638 while (parent) {
639 if (node_is_left_child(node) &&
640 parent->in_right) {
641 /* If we ever got the left, it means that the
642 * parent met ext->end<interval_low(parent), or
643 * may_overlap(parent). If the former is true,
644 * we needn't go back. So stop early and check
645 * may_overlap(parent) after this loop. */
646 node = parent->in_right;
647 break;
648 }
649 node = parent;
650 parent = parent->in_parent;
651 }
652 if (parent == NULL || !interval_may_overlap(parent, ext))
653 break;
654 }
655
656 return rc;
657 }
658 EXPORT_SYMBOL(interval_search);
659
660 static enum interval_iter interval_overlap_cb(struct interval_node *n,
661 void *args)
662 {
663 *(int *)args = 1;
664 return INTERVAL_ITER_STOP;
665 }
666
667 int interval_is_overlapped(struct interval_node *root,
668 struct interval_node_extent *ext)
669 {
670 int has = 0;
671 (void)interval_search(root, ext, interval_overlap_cb, &has);
672 return has;
673 }
674 EXPORT_SYMBOL(interval_is_overlapped);
675
676 /* Don't expand to low. Expanding downwards is expensive, and meaningless to
677 * some extents, because programs seldom do IO backward.
678 *
679 * The recursive algorithm of expanding low:
680 * expand_low {
681 * struct interval_node *tmp;
682 * static __u64 res = 0;
683 *
684 * if (root == NULL)
685 * return res;
686 * if (root->in_max_high < low) {
687 * res = max_u64(root->in_max_high + 1, res);
688 * return res;
689 * } else if (low < interval_low(root)) {
690 * interval_expand_low(root->in_left, low);
691 * return res;
692 * }
693 *
694 * if (interval_high(root) < low)
695 * res = max_u64(interval_high(root) + 1, res);
696 * interval_expand_low(root->in_left, low);
697 * interval_expand_low(root->in_right, low);
698 *
699 * return res;
700 * }
701 *
702 * It's much easy to eliminate the recursion, see interval_search for
703 * an example. -jay
704 */
705 static inline __u64 interval_expand_low(struct interval_node *root, __u64 low)
706 {
707 /* we only concern the empty tree right now. */
708 if (root == NULL)
709 return 0;
710 return low;
711 }
712
713 static inline __u64 interval_expand_high(struct interval_node *node, __u64 high)
714 {
715 __u64 result = ~0;
716
717 while (node != NULL) {
718 if (node->in_max_high < high)
719 break;
720
721 if (interval_low(node) > high) {
722 result = interval_low(node) - 1;
723 node = node->in_left;
724 } else {
725 node = node->in_right;
726 }
727 }
728
729 return result;
730 }
731
732 /* expanding the extent based on @ext. */
733 void interval_expand(struct interval_node *root,
734 struct interval_node_extent *ext,
735 struct interval_node_extent *limiter)
736 {
737 /* The assertion of interval_is_overlapped is expensive because we may
738 * travel many nodes to find the overlapped node. */
739 LASSERT(interval_is_overlapped(root, ext) == 0);
740 if (!limiter || limiter->start < ext->start)
741 ext->start = interval_expand_low(root, ext->start);
742 if (!limiter || limiter->end > ext->end)
743 ext->end = interval_expand_high(root, ext->end);
744 LASSERT(interval_is_overlapped(root, ext) == 0);
745 }
746 EXPORT_SYMBOL(interval_expand);
This page took 0.045312 seconds and 5 git commands to generate.