Merge tag 'ceph-for-6.2-rc1' of https://github.com/ceph/ceph-client
[platform/kernel/linux-rpi.git] / lib / interval_tree.c
1 // SPDX-License-Identifier: GPL-2.0-only
2 #include <linux/interval_tree.h>
3 #include <linux/interval_tree_generic.h>
4 #include <linux/compiler.h>
5 #include <linux/export.h>
6
7 #define START(node) ((node)->start)
8 #define LAST(node)  ((node)->last)
9
10 INTERVAL_TREE_DEFINE(struct interval_tree_node, rb,
11                      unsigned long, __subtree_last,
12                      START, LAST,, interval_tree)
13
14 EXPORT_SYMBOL_GPL(interval_tree_insert);
15 EXPORT_SYMBOL_GPL(interval_tree_remove);
16 EXPORT_SYMBOL_GPL(interval_tree_iter_first);
17 EXPORT_SYMBOL_GPL(interval_tree_iter_next);
18
19 #ifdef CONFIG_INTERVAL_TREE_SPAN_ITER
20 /*
21  * Roll nodes[1] into nodes[0] by advancing nodes[1] to the end of a contiguous
22  * span of nodes. This makes nodes[0]->last the end of that contiguous used span
23  * indexes that started at the original nodes[1]->start. nodes[1] is now the
24  * first node starting the next used span. A hole span is between nodes[0]->last
25  * and nodes[1]->start. nodes[1] must be !NULL.
26  */
27 static void
28 interval_tree_span_iter_next_gap(struct interval_tree_span_iter *state)
29 {
30         struct interval_tree_node *cur = state->nodes[1];
31
32         state->nodes[0] = cur;
33         do {
34                 if (cur->last > state->nodes[0]->last)
35                         state->nodes[0] = cur;
36                 cur = interval_tree_iter_next(cur, state->first_index,
37                                               state->last_index);
38         } while (cur && (state->nodes[0]->last >= cur->start ||
39                          state->nodes[0]->last + 1 == cur->start));
40         state->nodes[1] = cur;
41 }
42
43 void interval_tree_span_iter_first(struct interval_tree_span_iter *iter,
44                                    struct rb_root_cached *itree,
45                                    unsigned long first_index,
46                                    unsigned long last_index)
47 {
48         iter->first_index = first_index;
49         iter->last_index = last_index;
50         iter->nodes[0] = NULL;
51         iter->nodes[1] =
52                 interval_tree_iter_first(itree, first_index, last_index);
53         if (!iter->nodes[1]) {
54                 /* No nodes intersect the span, whole span is hole */
55                 iter->start_hole = first_index;
56                 iter->last_hole = last_index;
57                 iter->is_hole = 1;
58                 return;
59         }
60         if (iter->nodes[1]->start > first_index) {
61                 /* Leading hole on first iteration */
62                 iter->start_hole = first_index;
63                 iter->last_hole = iter->nodes[1]->start - 1;
64                 iter->is_hole = 1;
65                 interval_tree_span_iter_next_gap(iter);
66                 return;
67         }
68
69         /* Starting inside a used */
70         iter->start_used = first_index;
71         iter->is_hole = 0;
72         interval_tree_span_iter_next_gap(iter);
73         iter->last_used = iter->nodes[0]->last;
74         if (iter->last_used >= last_index) {
75                 iter->last_used = last_index;
76                 iter->nodes[0] = NULL;
77                 iter->nodes[1] = NULL;
78         }
79 }
80 EXPORT_SYMBOL_GPL(interval_tree_span_iter_first);
81
82 void interval_tree_span_iter_next(struct interval_tree_span_iter *iter)
83 {
84         if (!iter->nodes[0] && !iter->nodes[1]) {
85                 iter->is_hole = -1;
86                 return;
87         }
88
89         if (iter->is_hole) {
90                 iter->start_used = iter->last_hole + 1;
91                 iter->last_used = iter->nodes[0]->last;
92                 if (iter->last_used >= iter->last_index) {
93                         iter->last_used = iter->last_index;
94                         iter->nodes[0] = NULL;
95                         iter->nodes[1] = NULL;
96                 }
97                 iter->is_hole = 0;
98                 return;
99         }
100
101         if (!iter->nodes[1]) {
102                 /* Trailing hole */
103                 iter->start_hole = iter->nodes[0]->last + 1;
104                 iter->last_hole = iter->last_index;
105                 iter->nodes[0] = NULL;
106                 iter->is_hole = 1;
107                 return;
108         }
109
110         /* must have both nodes[0] and [1], interior hole */
111         iter->start_hole = iter->nodes[0]->last + 1;
112         iter->last_hole = iter->nodes[1]->start - 1;
113         iter->is_hole = 1;
114         interval_tree_span_iter_next_gap(iter);
115 }
116 EXPORT_SYMBOL_GPL(interval_tree_span_iter_next);
117
118 /*
119  * Advance the iterator index to a specific position. The returned used/hole is
120  * updated to start at new_index. This is faster than calling
121  * interval_tree_span_iter_first() as it can avoid full searches in several
122  * cases where the iterator is already set.
123  */
124 void interval_tree_span_iter_advance(struct interval_tree_span_iter *iter,
125                                      struct rb_root_cached *itree,
126                                      unsigned long new_index)
127 {
128         if (iter->is_hole == -1)
129                 return;
130
131         iter->first_index = new_index;
132         if (new_index > iter->last_index) {
133                 iter->is_hole = -1;
134                 return;
135         }
136
137         /* Rely on the union aliasing hole/used */
138         if (iter->start_hole <= new_index && new_index <= iter->last_hole) {
139                 iter->start_hole = new_index;
140                 return;
141         }
142         if (new_index == iter->last_hole + 1)
143                 interval_tree_span_iter_next(iter);
144         else
145                 interval_tree_span_iter_first(iter, itree, new_index,
146                                               iter->last_index);
147 }
148 EXPORT_SYMBOL_GPL(interval_tree_span_iter_advance);
149 #endif