Lines Matching full:start

75      *    set(start=0, count=9)    11111000 (iter: 0, 2, 4, 6, 8)
76 * reset(start=1, count=3) 00111000 (iter: 4, 6, 8)
77 * set(start=9, count=2) 00111100 (iter: 4, 6, 8, 10)
78 * reset(start=5, count=5) 00000000
196 int64_t hbitmap_next_dirty(const HBitmap *hb, int64_t start, int64_t count) in hbitmap_next_dirty() argument
202 assert(start >= 0 && count >= 0); in hbitmap_next_dirty()
204 if (start >= hb->orig_size || count == 0) { in hbitmap_next_dirty()
208 end = count > hb->orig_size - start ? hb->orig_size : start + count; in hbitmap_next_dirty()
210 hbitmap_iter_init(&hbi, hb, start); in hbitmap_next_dirty()
217 return MAX(start, first_dirty_off); in hbitmap_next_dirty()
220 int64_t hbitmap_next_zero(const HBitmap *hb, int64_t start, int64_t count) in hbitmap_next_zero() argument
222 size_t pos = (start >> hb->granularity) >> BITS_PER_LEVEL; in hbitmap_next_zero()
229 assert(start >= 0 && count >= 0); in hbitmap_next_zero()
231 if (start >= hb->orig_size || count == 0) { in hbitmap_next_zero()
235 end_bit = count > hb->orig_size - start ? in hbitmap_next_zero()
237 ((start + count - 1) >> hb->granularity) + 1; in hbitmap_next_zero()
240 /* There may be some zero bits in @cur before @start. We are not interested in hbitmap_next_zero()
243 start_bit_offset = (start >> hb->granularity) & (BITS_PER_LONG - 1); in hbitmap_next_zero()
245 assert((start >> hb->granularity) < hb->size); in hbitmap_next_zero()
265 if (res < start) { in hbitmap_next_zero()
266 assert(((start - res) >> hb->granularity) == 0); in hbitmap_next_zero()
267 return start; in hbitmap_next_zero()
273 bool hbitmap_next_dirty_area(const HBitmap *hb, int64_t start, int64_t end, in hbitmap_next_dirty_area() argument
279 assert(start >= 0 && end >= 0 && max_dirty_count > 0); in hbitmap_next_dirty_area()
282 if (start >= end) { in hbitmap_next_dirty_area()
286 start = hbitmap_next_dirty(hb, start, end - start); in hbitmap_next_dirty_area()
287 if (start < 0) { in hbitmap_next_dirty_area()
291 end = start + MIN(end - start, max_dirty_count); in hbitmap_next_dirty_area()
293 next_zero = hbitmap_next_zero(hb, start, end - start); in hbitmap_next_dirty_area()
298 *dirty_start = start; in hbitmap_next_dirty_area()
299 *dirty_count = end - start; in hbitmap_next_dirty_area()
304 bool hbitmap_status(const HBitmap *hb, int64_t start, int64_t count, in hbitmap_status() argument
309 assert(start >= 0); in hbitmap_status()
311 assert(start + count <= hb->orig_size); in hbitmap_status()
313 next_dirty = hbitmap_next_dirty(hb, start, count); in hbitmap_status()
319 if (next_dirty > start) { in hbitmap_status()
320 *pnum = next_dirty - start; in hbitmap_status()
324 assert(next_dirty == start); in hbitmap_status()
326 next_zero = hbitmap_next_zero(hb, start, count); in hbitmap_status()
332 assert(next_zero > start); in hbitmap_status()
333 *pnum = next_zero - start; in hbitmap_status()
381 /* Count the number of set bits between start and end, not accounting for
384 static uint64_t hb_count_between(HBitmap *hb, uint64_t start, uint64_t last) in hb_count_between() argument
392 hbitmap_iter_init(&hbi, hb, start << hb->granularity); in hb_count_between()
414 static inline bool hb_set_elem(unsigned long *elem, uint64_t start, uint64_t last) in hb_set_elem() argument
419 assert((last >> BITS_PER_LEVEL) == (start >> BITS_PER_LEVEL)); in hb_set_elem()
420 assert(start <= last); in hb_set_elem()
423 mask -= 1UL << (start & (BITS_PER_LONG - 1)); in hb_set_elem()
431 static bool hb_set_between(HBitmap *hb, int level, uint64_t start, in hb_set_between() argument
434 size_t pos = start >> BITS_PER_LEVEL; in hb_set_between()
441 uint64_t next = (start | (BITS_PER_LONG - 1)) + 1; in hb_set_between()
442 changed |= hb_set_elem(&hb->levels[level][i], start, next - 1); in hb_set_between()
444 start = next; in hb_set_between()
453 changed |= hb_set_elem(&hb->levels[level][i], start, last); in hb_set_between()
464 void hbitmap_set(HBitmap *hb, uint64_t start, uint64_t count) in hbitmap_set() argument
468 uint64_t last = start + count - 1; in hbitmap_set()
474 trace_hbitmap_set(hb, start, count, in hbitmap_set()
475 start >> hb->granularity, last >> hb->granularity); in hbitmap_set()
477 first = start >> hb->granularity; in hbitmap_set()
485 hbitmap_set(hb->meta, start, count); in hbitmap_set()
492 static inline bool hb_reset_elem(unsigned long *elem, uint64_t start, uint64_t last) in hb_reset_elem() argument
497 assert((last >> BITS_PER_LEVEL) == (start >> BITS_PER_LEVEL)); in hb_reset_elem()
498 assert(start <= last); in hb_reset_elem()
501 mask -= 1UL << (start & (BITS_PER_LONG - 1)); in hb_reset_elem()
509 static bool hb_reset_between(HBitmap *hb, int level, uint64_t start, in hb_reset_between() argument
512 size_t pos = start >> BITS_PER_LEVEL; in hb_reset_between()
519 uint64_t next = (start | (BITS_PER_LONG - 1)) + 1; in hb_reset_between()
526 if (hb_reset_elem(&hb->levels[level][i], start, next - 1)) { in hb_reset_between()
533 start = next; in hb_reset_between()
544 if (hb_reset_elem(&hb->levels[level][i], start, last)) { in hb_reset_between()
558 void hbitmap_reset(HBitmap *hb, uint64_t start, uint64_t count) in hbitmap_reset() argument
562 uint64_t last = start + count - 1; in hbitmap_reset()
569 assert(QEMU_IS_ALIGNED(start, gran)); in hbitmap_reset()
570 assert(QEMU_IS_ALIGNED(count, gran) || (start + count == hb->orig_size)); in hbitmap_reset()
572 trace_hbitmap_reset(hb, start, count, in hbitmap_reset()
573 start >> hb->granularity, last >> hb->granularity); in hbitmap_reset()
575 first = start >> hb->granularity; in hbitmap_reset()
582 hbitmap_set(hb->meta, start, count); in hbitmap_reset()
636 /* Start should be aligned to serialization granularity, chunk size should be
640 uint64_t start, uint64_t count, in serialization_chunk() argument
643 uint64_t last = start + count - 1; in serialization_chunk()
646 assert((start & (gran - 1)) == 0); in serialization_chunk()
652 start = (start >> hb->granularity) >> BITS_PER_LEVEL; in serialization_chunk()
655 *first_el = &hb->levels[HBITMAP_LEVELS - 1][start]; in serialization_chunk()
656 *el_count = last - start + 1; in serialization_chunk()
660 uint64_t start, uint64_t count) in hbitmap_serialization_size() argument
668 serialization_chunk(hb, start, count, &cur, &el_count); in hbitmap_serialization_size()
674 uint64_t start, uint64_t count) in hbitmap_serialize_part() argument
682 serialization_chunk(hb, start, count, &cur, &el_count); in hbitmap_serialize_part()
696 uint64_t start, uint64_t count, in hbitmap_deserialize_part() argument
705 serialization_chunk(hb, start, count, &cur, &el_count); in hbitmap_deserialize_part()
725 void hbitmap_deserialize_zeroes(HBitmap *hb, uint64_t start, uint64_t count, in hbitmap_deserialize_zeroes() argument
734 serialization_chunk(hb, start, count, &first, &el_count); in hbitmap_deserialize_zeroes()
742 void hbitmap_deserialize_ones(HBitmap *hb, uint64_t start, uint64_t count, in hbitmap_deserialize_ones() argument
751 serialization_chunk(hb, start, count, &first, &el_count); in hbitmap_deserialize_ones()
849 * start at the first full one. */ in hbitmap_truncate()
850 uint64_t start = ROUND_UP(num_elements, UINT64_C(1) << hb->granularity); in hbitmap_truncate() local
851 uint64_t fix_count = (hb->size << hb->granularity) - start; in hbitmap_truncate()
854 hbitmap_reset(hb, start, fix_count); in hbitmap_truncate()