unsigned int nofs_flag;
u64 *alloc_offsets = NULL;
u64 *caps = NULL;
+ unsigned long *active = NULL;
u64 last_alloc = 0;
u32 num_sequential = 0, num_conventional = 0;
goto out;
}
+ active = bitmap_zalloc(map->num_stripes, GFP_NOFS);
+ if (!active) {
+ ret = -ENOMEM;
+ goto out;
+ }
+
for (i = 0; i < map->num_stripes; i++) {
bool is_sequential;
struct blk_zone zone;
/* Partially used zone */
alloc_offsets[i] =
((zone.wp - zone.start) << SECTOR_SHIFT);
+ __set_bit(i, active);
break;
}
+
+ /*
+ * Consider a zone as active if we can allow any number of
+ * active zones.
+ */
+ if (!device->zone_info->max_active_zones)
+ __set_bit(i, active);
}
if (num_sequential > 0)
}
cache->alloc_offset = alloc_offsets[0];
cache->zone_capacity = caps[0];
+ cache->zone_is_active = test_bit(0, active);
break;
case BTRFS_BLOCK_GROUP_DUP:
case BTRFS_BLOCK_GROUP_RAID1:
goto out;
}
+ if (cache->zone_is_active) {
+ btrfs_get_block_group(cache);
+ spin_lock(&fs_info->zone_active_bgs_lock);
+ list_add_tail(&cache->active_bg_list, &fs_info->zone_active_bgs);
+ spin_unlock(&fs_info->zone_active_bgs_lock);
+ }
+
out:
if (cache->alloc_offset > fs_info->zone_size) {
btrfs_err(fs_info,
kfree(cache->physical_map);
cache->physical_map = NULL;
}
+ bitmap_free(active);
kfree(caps);
kfree(alloc_offsets);
free_extent_map(em);