Showing with 43 additions and 3 deletions.
  1. +13 −0 include/qemu/hbitmap.h
  2. +11 −0 tests/test-hbitmap.c
  3. +19 −3 util/hbitmap.c
@@ -145,6 +145,19 @@ void hbitmap_reset_all(HBitmap *hb);
*/
bool hbitmap_get(const HBitmap *hb, uint64_t item);

/**
* hbitmap_is_serializable:
* @hb: HBitmap which should be (de-)serialized.
*
* Returns whether the bitmap can actually be (de-)serialized. Other
* (de-)serialization functions may only be invoked if this function returns
* true.
*
* Calling (de-)serialization functions does not affect a bitmap's
* (de-)serializability.
*/
bool hbitmap_is_serializable(const HBitmap *hb);

/**
* hbitmap_serialization_granularity:
* @hb: HBitmap to operate on.
@@ -744,6 +744,8 @@ static void test_hbitmap_serialize_granularity(TestHBitmapData *data,
int r;

hbitmap_test_init(data, L3 * 2, 3);
g_assert(hbitmap_is_serializable(data->hb));

r = hbitmap_serialization_granularity(data->hb);
g_assert_cmpint(r, ==, 64 << 3);
}
@@ -768,6 +770,8 @@ static void hbitmap_test_serialize_range(TestHBitmapData *data,
if (count) {
hbitmap_set(data->hb, pos, count);
}

g_assert(hbitmap_is_serializable(data->hb));
hbitmap_serialize_part(data->hb, buf, 0, data->size);

/* Serialized buffer is inherently LE, convert it back manually to test */
@@ -788,6 +792,8 @@ static void hbitmap_test_serialize_range(TestHBitmapData *data,
memset(buf, 0, buf_size);
hbitmap_serialize_part(data->hb, buf, 0, data->size);
hbitmap_reset_all(data->hb);

g_assert(hbitmap_is_serializable(data->hb));
hbitmap_deserialize_part(data->hb, buf, 0, data->size, true);

for (i = 0; i < data->size; i++) {
@@ -810,6 +816,7 @@ static void test_hbitmap_serialize_basic(TestHBitmapData *data,
int num_positions = sizeof(positions) / sizeof(positions[0]);

hbitmap_test_init(data, L3, 0);
g_assert(hbitmap_is_serializable(data->hb));
buf_size = hbitmap_serialization_size(data->hb, 0, data->size);
buf = g_malloc0(buf_size);

@@ -841,6 +848,8 @@ static void test_hbitmap_serialize_part(TestHBitmapData *data,
hbitmap_set(data->hb, positions[i], 1);
}

g_assert(hbitmap_is_serializable(data->hb));

for (i = 0; i < data->size; i += buf_size) {
unsigned long *el = (unsigned long *)buf;
hbitmap_serialize_part(data->hb, buf, i, buf_size);
@@ -879,6 +888,8 @@ static void test_hbitmap_serialize_zeroes(TestHBitmapData *data,
hbitmap_set(data->hb, positions[i], L1);
}

g_assert(hbitmap_is_serializable(data->hb));

for (i = 0; i < num_positions; i++) {
hbitmap_deserialize_zeroes(data->hb, positions[i], min_l1, true);
hbitmap_iter_init(&iter, data->hb, 0);
@@ -387,6 +387,24 @@ void hbitmap_reset_all(HBitmap *hb)
hb->count = 0;
}

bool hbitmap_is_serializable(const HBitmap *hb)
{
/* Every serialized chunk must be aligned to 64 bits so that endianness
* requirements can be fulfilled on both 64 bit and 32 bit hosts.
* We have hbitmap_serialization_granularity() which converts this
* alignment requirement from bitmap bits to items covered (e.g. sectors).
* That value is:
* 64 << hb->granularity
* Since this value must not exceed UINT64_MAX, hb->granularity must be
* less than 58 (== 64 - 6, where 6 is ld(64), i.e. 1 << 6 == 64).
*
* In order for hbitmap_serialization_granularity() to always return a
* meaningful value, bitmaps that are to be serialized must have a
* granularity of less than 58. */

return hb->granularity < 58;
}

bool hbitmap_get(const HBitmap *hb, uint64_t item)
{
/* Compute position and bit in the last layer. */
@@ -399,9 +417,7 @@ bool hbitmap_get(const HBitmap *hb, uint64_t item)

uint64_t hbitmap_serialization_granularity(const HBitmap *hb)
{
/* Must hold true so that the shift below is defined
* (ld(64) == 6, i.e. 1 << 6 == 64) */
assert(hb->granularity < 64 - 6);
assert(hbitmap_is_serializable(hb));

/* Require at least 64 bit granularity to be safe on both 64 bit and 32 bit
* hosts. */