aboutsummaryrefslogtreecommitdiff
path: root/util/hbitmap.c
diff options
context:
space:
mode:
authorVladimir Sementsov-Ogievskiy <vsementsov@virtuozzo.com>2019-01-15 18:26:50 -0500
committerJohn Snow <jsnow@redhat.com>2019-01-15 18:26:50 -0500
commita78a1a48cd1a2e86f23f113bb05e2d3dd8ae0bf6 (patch)
tree2e2007b9051da3cdab632a4f29af3c8f1e777f3e /util/hbitmap.c
parentfa9c2da29404be9baeb7b8f88fed3cb232688cd9 (diff)
dirty-bitmap: add bdrv_dirty_bitmap_next_dirty_area
The function alters bdrv_dirty_iter_next_area(), which is wrong and less efficient (see further commit "block/mirror: fix and improve do_sync_target_write" for description). Signed-off-by: Vladimir Sementsov-Ogievskiy <vsementsov@virtuozzo.com>
Diffstat (limited to 'util/hbitmap.c')
-rw-r--r--util/hbitmap.c39
1 files changed, 39 insertions, 0 deletions
diff --git a/util/hbitmap.c b/util/hbitmap.c
index 09b3719e44..fa356522c4 100644
--- a/util/hbitmap.c
+++ b/util/hbitmap.c
@@ -246,6 +246,45 @@ int64_t hbitmap_next_zero(const HBitmap *hb, uint64_t start, uint64_t count)
return res;
}
+bool hbitmap_next_dirty_area(const HBitmap *hb, uint64_t *start,
+ uint64_t *count)
+{
+ HBitmapIter hbi;
+ int64_t firt_dirty_off, area_end;
+ uint32_t granularity = 1UL << hb->granularity;
+ uint64_t end;
+
+ if (*start >= hb->orig_size || *count == 0) {
+ return false;
+ }
+
+ end = *count > hb->orig_size - *start ? hb->orig_size : *start + *count;
+
+ hbitmap_iter_init(&hbi, hb, *start);
+ firt_dirty_off = hbitmap_iter_next(&hbi, false);
+
+ if (firt_dirty_off < 0 || firt_dirty_off >= end) {
+ return false;
+ }
+
+ if (firt_dirty_off + granularity >= end) {
+ area_end = end;
+ } else {
+ area_end = hbitmap_next_zero(hb, firt_dirty_off + granularity,
+ end - firt_dirty_off - granularity);
+ if (area_end < 0) {
+ area_end = end;
+ }
+ }
+
+ if (firt_dirty_off > *start) {
+ *start = firt_dirty_off;
+ }
+ *count = area_end - *start;
+
+ return true;
+}
+
bool hbitmap_empty(const HBitmap *hb)
{
return hb->count == 0;