Btrfs: use fastpath in extent state ops as much as possible
authorLiu Bo <liubo2009@cn.fujitsu.com>
Thu, 10 May 2012 10:10:39 +0000 (18:10 +0800)
committerJosef Bacik <josef@redhat.com>
Wed, 30 May 2012 14:23:34 +0000 (10:23 -0400)
Fully utilize our extent state's new helper functions to use
fastpath as much as possible.

Signed-off-by: Liu Bo <liubo2009@cn.fujitsu.com>
Reviewed-by: Josef Bacik <josef@redhat.com>
fs/btrfs/extent_io.c

index 7af9343..69a527c 100644 (file)
@@ -569,10 +569,8 @@ hit_next:
                if (err)
                        goto out;
                if (state->end <= end) {
                if (err)
                        goto out;
                if (state->end <= end) {
-                       clear_state_bit(tree, state, &bits, wake);
-                       if (last_end == (u64)-1)
-                               goto out;
-                       start = last_end + 1;
+                       state = clear_state_bit(tree, state, &bits, wake);
+                       goto next;
                }
                goto search_again;
        }
                }
                goto search_again;
        }
@@ -780,7 +778,6 @@ hit_next:
         * Just lock what we found and keep going
         */
        if (state->start == start && state->end <= end) {
         * Just lock what we found and keep going
         */
        if (state->start == start && state->end <= end) {
-               struct rb_node *next_node;
                if (state->state & exclusive_bits) {
                        *failed_start = state->start;
                        err = -EEXIST;
                if (state->state & exclusive_bits) {
                        *failed_start = state->start;
                        err = -EEXIST;
@@ -788,20 +785,15 @@ hit_next:
                }
 
                set_state_bits(tree, state, &bits);
                }
 
                set_state_bits(tree, state, &bits);
-
                cache_state(state, cached_state);
                merge_state(tree, state);
                if (last_end == (u64)-1)
                        goto out;
                cache_state(state, cached_state);
                merge_state(tree, state);
                if (last_end == (u64)-1)
                        goto out;
-
                start = last_end + 1;
                start = last_end + 1;
-               next_node = rb_next(&state->rb_node);
-               if (next_node && start < end && prealloc && !need_resched()) {
-                       state = rb_entry(next_node, struct extent_state,
-                                        rb_node);
-                       if (state->start == start)
-                               goto hit_next;
-               }
+               state = next_state(state);
+               if (start < end && state && state->start == start &&
+                   !need_resched())
+                       goto hit_next;
                goto search_again;
        }
 
                goto search_again;
        }
 
@@ -844,6 +836,10 @@ hit_next:
                        if (last_end == (u64)-1)
                                goto out;
                        start = last_end + 1;
                        if (last_end == (u64)-1)
                                goto out;
                        start = last_end + 1;
+                       state = next_state(state);
+                       if (start < end && state && state->start == start &&
+                           !need_resched())
+                               goto hit_next;
                }
                goto search_again;
        }
                }
                goto search_again;
        }
@@ -993,21 +989,14 @@ hit_next:
         * Just lock what we found and keep going
         */
        if (state->start == start && state->end <= end) {
         * Just lock what we found and keep going
         */
        if (state->start == start && state->end <= end) {
-               struct rb_node *next_node;
-
                set_state_bits(tree, state, &bits);
                set_state_bits(tree, state, &bits);
-               clear_state_bit(tree, state, &clear_bits, 0);
+               state = clear_state_bit(tree, state, &clear_bits, 0);
                if (last_end == (u64)-1)
                        goto out;
                if (last_end == (u64)-1)
                        goto out;
-
                start = last_end + 1;
                start = last_end + 1;
-               next_node = rb_next(&state->rb_node);
-               if (next_node && start < end && prealloc && !need_resched()) {
-                       state = rb_entry(next_node, struct extent_state,
-                                        rb_node);
-                       if (state->start == start)
-                               goto hit_next;
-               }
+               if (start < end && state && state->start == start &&
+                   !need_resched())
+                       goto hit_next;
                goto search_again;
        }
 
                goto search_again;
        }
 
@@ -1041,10 +1030,13 @@ hit_next:
                        goto out;
                if (state->end <= end) {
                        set_state_bits(tree, state, &bits);
                        goto out;
                if (state->end <= end) {
                        set_state_bits(tree, state, &bits);
-                       clear_state_bit(tree, state, &clear_bits, 0);
+                       state = clear_state_bit(tree, state, &clear_bits, 0);
                        if (last_end == (u64)-1)
                                goto out;
                        start = last_end + 1;
                        if (last_end == (u64)-1)
                                goto out;
                        start = last_end + 1;
+                       if (start < end && state && state->start == start &&
+                           !need_resched())
+                               goto hit_next;
                }
                goto search_again;
        }
                }
                goto search_again;
        }