1
2
3
4
5
6
7
8
9
10
11
12#include "qemu/osdep.h"
13#include "qemu/hbitmap.h"
14#include "qemu/host-utils.h"
15#include "trace.h"
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54struct HBitmap {
55
56 uint64_t size;
57
58
59 uint64_t count;
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79 int granularity;
80
81
82 HBitmap *meta;
83
84
85
86
87
88
89
90
91
92 unsigned long *levels[HBITMAP_LEVELS];
93
94
95 uint64_t sizes[HBITMAP_LEVELS];
96};
97
98
99
100
101unsigned long hbitmap_iter_skip_words(HBitmapIter *hbi)
102{
103 size_t pos = hbi->pos;
104 const HBitmap *hb = hbi->hb;
105 unsigned i = HBITMAP_LEVELS - 1;
106
107 unsigned long cur;
108 do {
109 cur = hbi->cur[--i];
110 pos >>= BITS_PER_LEVEL;
111 } while (cur == 0);
112
113
114
115
116
117
118
119 if (i == 0 && cur == (1UL << (BITS_PER_LONG - 1))) {
120 return 0;
121 }
122 for (; i < HBITMAP_LEVELS - 1; i++) {
123
124
125
126
127 assert(cur);
128 pos = (pos << BITS_PER_LEVEL) + ctzl(cur);
129 hbi->cur[i] = cur & (cur - 1);
130
131
132 cur = hb->levels[i + 1][pos];
133 }
134
135 hbi->pos = pos;
136 trace_hbitmap_iter_skip_words(hbi->hb, hbi, pos, cur);
137
138 assert(cur);
139 return cur;
140}
141
142void hbitmap_iter_init(HBitmapIter *hbi, const HBitmap *hb, uint64_t first)
143{
144 unsigned i, bit;
145 uint64_t pos;
146
147 hbi->hb = hb;
148 pos = first >> hb->granularity;
149 assert(pos < hb->size);
150 hbi->pos = pos >> BITS_PER_LEVEL;
151 hbi->granularity = hb->granularity;
152
153 for (i = HBITMAP_LEVELS; i-- > 0; ) {
154 bit = pos & (BITS_PER_LONG - 1);
155 pos >>= BITS_PER_LEVEL;
156
157
158 hbi->cur[i] = hb->levels[i][pos] & ~((1UL << bit) - 1);
159
160
161
162
163 if (i != HBITMAP_LEVELS - 1) {
164 hbi->cur[i] &= ~(1UL << bit);
165 }
166 }
167}
168
169bool hbitmap_empty(const HBitmap *hb)
170{
171 return hb->count == 0;
172}
173
174int hbitmap_granularity(const HBitmap *hb)
175{
176 return hb->granularity;
177}
178
179uint64_t hbitmap_count(const HBitmap *hb)
180{
181 return hb->count << hb->granularity;
182}
183
184
185
186
187static uint64_t hb_count_between(HBitmap *hb, uint64_t start, uint64_t last)
188{
189 HBitmapIter hbi;
190 uint64_t count = 0;
191 uint64_t end = last + 1;
192 unsigned long cur;
193 size_t pos;
194
195 hbitmap_iter_init(&hbi, hb, start << hb->granularity);
196 for (;;) {
197 pos = hbitmap_iter_next_word(&hbi, &cur);
198 if (pos >= (end >> BITS_PER_LEVEL)) {
199 break;
200 }
201 count += ctpopl(cur);
202 }
203
204 if (pos == (end >> BITS_PER_LEVEL)) {
205
206 int bit = end & (BITS_PER_LONG - 1);
207 cur &= (1UL << bit) - 1;
208 count += ctpopl(cur);
209 }
210
211 return count;
212}
213
214
215
216
217static inline bool hb_set_elem(unsigned long *elem, uint64_t start, uint64_t last)
218{
219 unsigned long mask;
220 unsigned long old;
221
222 assert((last >> BITS_PER_LEVEL) == (start >> BITS_PER_LEVEL));
223 assert(start <= last);
224
225 mask = 2UL << (last & (BITS_PER_LONG - 1));
226 mask -= 1UL << (start & (BITS_PER_LONG - 1));
227 old = *elem;
228 *elem |= mask;
229 return old != *elem;
230}
231
232
233
234static bool hb_set_between(HBitmap *hb, int level, uint64_t start,
235 uint64_t last)
236{
237 size_t pos = start >> BITS_PER_LEVEL;
238 size_t lastpos = last >> BITS_PER_LEVEL;
239 bool changed = false;
240 size_t i;
241
242 i = pos;
243 if (i < lastpos) {
244 uint64_t next = (start | (BITS_PER_LONG - 1)) + 1;
245 changed |= hb_set_elem(&hb->levels[level][i], start, next - 1);
246 for (;;) {
247 start = next;
248 next += BITS_PER_LONG;
249 if (++i == lastpos) {
250 break;
251 }
252 changed |= (hb->levels[level][i] == 0);
253 hb->levels[level][i] = ~0UL;
254 }
255 }
256 changed |= hb_set_elem(&hb->levels[level][i], start, last);
257
258
259
260
261 if (level > 0 && changed) {
262 hb_set_between(hb, level - 1, pos, lastpos);
263 }
264 return changed;
265}
266
267void hbitmap_set(HBitmap *hb, uint64_t start, uint64_t count)
268{
269
270 uint64_t first, n;
271 uint64_t last = start + count - 1;
272
273 trace_hbitmap_set(hb, start, count,
274 start >> hb->granularity, last >> hb->granularity);
275
276 first = start >> hb->granularity;
277 last >>= hb->granularity;
278 assert(last < hb->size);
279 n = last - first + 1;
280
281 hb->count += n - hb_count_between(hb, first, last);
282 if (hb_set_between(hb, HBITMAP_LEVELS - 1, first, last) &&
283 hb->meta) {
284 hbitmap_set(hb->meta, start, count);
285 }
286}
287
288
289
290
291static inline bool hb_reset_elem(unsigned long *elem, uint64_t start, uint64_t last)
292{
293 unsigned long mask;
294 bool blanked;
295
296 assert((last >> BITS_PER_LEVEL) == (start >> BITS_PER_LEVEL));
297 assert(start <= last);
298
299 mask = 2UL << (last & (BITS_PER_LONG - 1));
300 mask -= 1UL << (start & (BITS_PER_LONG - 1));
301 blanked = *elem != 0 && ((*elem & ~mask) == 0);
302 *elem &= ~mask;
303 return blanked;
304}
305
306
307
308static bool hb_reset_between(HBitmap *hb, int level, uint64_t start,
309 uint64_t last)
310{
311 size_t pos = start >> BITS_PER_LEVEL;
312 size_t lastpos = last >> BITS_PER_LEVEL;
313 bool changed = false;
314 size_t i;
315
316 i = pos;
317 if (i < lastpos) {
318 uint64_t next = (start | (BITS_PER_LONG - 1)) + 1;
319
320
321
322
323
324
325 if (hb_reset_elem(&hb->levels[level][i], start, next - 1)) {
326 changed = true;
327 } else {
328 pos++;
329 }
330
331 for (;;) {
332 start = next;
333 next += BITS_PER_LONG;
334 if (++i == lastpos) {
335 break;
336 }
337 changed |= (hb->levels[level][i] != 0);
338 hb->levels[level][i] = 0UL;
339 }
340 }
341
342
343 if (hb_reset_elem(&hb->levels[level][i], start, last)) {
344 changed = true;
345 } else {
346 lastpos--;
347 }
348
349 if (level > 0 && changed) {
350 hb_reset_between(hb, level - 1, pos, lastpos);
351 }
352
353 return changed;
354
355}
356
357void hbitmap_reset(HBitmap *hb, uint64_t start, uint64_t count)
358{
359
360 uint64_t first;
361 uint64_t last = start + count - 1;
362
363 trace_hbitmap_reset(hb, start, count,
364 start >> hb->granularity, last >> hb->granularity);
365
366 first = start >> hb->granularity;
367 last >>= hb->granularity;
368 assert(last < hb->size);
369
370 hb->count -= hb_count_between(hb, first, last);
371 if (hb_reset_between(hb, HBITMAP_LEVELS - 1, first, last) &&
372 hb->meta) {
373 hbitmap_set(hb->meta, start, count);
374 }
375}
376
377void hbitmap_reset_all(HBitmap *hb)
378{
379 unsigned int i;
380
381
382 for (i = HBITMAP_LEVELS; --i >= 1; ) {
383 memset(hb->levels[i], 0, hb->sizes[i] * sizeof(unsigned long));
384 }
385
386 hb->levels[0][0] = 1UL << (BITS_PER_LONG - 1);
387 hb->count = 0;
388}
389
390bool hbitmap_get(const HBitmap *hb, uint64_t item)
391{
392
393 uint64_t pos = item >> hb->granularity;
394 unsigned long bit = 1UL << (pos & (BITS_PER_LONG - 1));
395 assert(pos < hb->size);
396
397 return (hb->levels[HBITMAP_LEVELS - 1][pos >> BITS_PER_LEVEL] & bit) != 0;
398}
399
400uint64_t hbitmap_serialization_granularity(const HBitmap *hb)
401{
402
403
404 assert(hb->granularity < 64 - 6);
405
406
407
408 return UINT64_C(64) << hb->granularity;
409}
410
411
412
413
414static void serialization_chunk(const HBitmap *hb,
415 uint64_t start, uint64_t count,
416 unsigned long **first_el, uint64_t *el_count)
417{
418 uint64_t last = start + count - 1;
419 uint64_t gran = hbitmap_serialization_granularity(hb);
420
421 assert((start & (gran - 1)) == 0);
422 assert((last >> hb->granularity) < hb->size);
423 if ((last >> hb->granularity) != hb->size - 1) {
424 assert((count & (gran - 1)) == 0);
425 }
426
427 start = (start >> hb->granularity) >> BITS_PER_LEVEL;
428 last = (last >> hb->granularity) >> BITS_PER_LEVEL;
429
430 *first_el = &hb->levels[HBITMAP_LEVELS - 1][start];
431 *el_count = last - start + 1;
432}
433
434uint64_t hbitmap_serialization_size(const HBitmap *hb,
435 uint64_t start, uint64_t count)
436{
437 uint64_t el_count;
438 unsigned long *cur;
439
440 if (!count) {
441 return 0;
442 }
443 serialization_chunk(hb, start, count, &cur, &el_count);
444
445 return el_count * sizeof(unsigned long);
446}
447
448void hbitmap_serialize_part(const HBitmap *hb, uint8_t *buf,
449 uint64_t start, uint64_t count)
450{
451 uint64_t el_count;
452 unsigned long *cur, *end;
453
454 if (!count) {
455 return;
456 }
457 serialization_chunk(hb, start, count, &cur, &el_count);
458 end = cur + el_count;
459
460 while (cur != end) {
461 unsigned long el =
462 (BITS_PER_LONG == 32 ? cpu_to_le32(*cur) : cpu_to_le64(*cur));
463
464 memcpy(buf, &el, sizeof(el));
465 buf += sizeof(el);
466 cur++;
467 }
468}
469
470void hbitmap_deserialize_part(HBitmap *hb, uint8_t *buf,
471 uint64_t start, uint64_t count,
472 bool finish)
473{
474 uint64_t el_count;
475 unsigned long *cur, *end;
476
477 if (!count) {
478 return;
479 }
480 serialization_chunk(hb, start, count, &cur, &el_count);
481 end = cur + el_count;
482
483 while (cur != end) {
484 memcpy(cur, buf, sizeof(*cur));
485
486 if (BITS_PER_LONG == 32) {
487 le32_to_cpus((uint32_t *)cur);
488 } else {
489 le64_to_cpus((uint64_t *)cur);
490 }
491
492 buf += sizeof(unsigned long);
493 cur++;
494 }
495 if (finish) {
496 hbitmap_deserialize_finish(hb);
497 }
498}
499
500void hbitmap_deserialize_zeroes(HBitmap *hb, uint64_t start, uint64_t count,
501 bool finish)
502{
503 uint64_t el_count;
504 unsigned long *first;
505
506 if (!count) {
507 return;
508 }
509 serialization_chunk(hb, start, count, &first, &el_count);
510
511 memset(first, 0, el_count * sizeof(unsigned long));
512 if (finish) {
513 hbitmap_deserialize_finish(hb);
514 }
515}
516
517void hbitmap_deserialize_finish(HBitmap *bitmap)
518{
519 int64_t i, size, prev_size;
520 int lev;
521
522
523
524 size = MAX((bitmap->size + BITS_PER_LONG - 1) >> BITS_PER_LEVEL, 1);
525 for (lev = HBITMAP_LEVELS - 1; lev-- > 0; ) {
526 prev_size = size;
527 size = MAX((size + BITS_PER_LONG - 1) >> BITS_PER_LEVEL, 1);
528 memset(bitmap->levels[lev], 0, size * sizeof(unsigned long));
529
530 for (i = 0; i < prev_size; ++i) {
531 if (bitmap->levels[lev + 1][i]) {
532 bitmap->levels[lev][i >> BITS_PER_LEVEL] |=
533 1UL << (i & (BITS_PER_LONG - 1));
534 }
535 }
536 }
537
538 bitmap->levels[0][0] |= 1UL << (BITS_PER_LONG - 1);
539}
540
541void hbitmap_free(HBitmap *hb)
542{
543 unsigned i;
544 assert(!hb->meta);
545 for (i = HBITMAP_LEVELS; i-- > 0; ) {
546 g_free(hb->levels[i]);
547 }
548 g_free(hb);
549}
550
551HBitmap *hbitmap_alloc(uint64_t size, int granularity)
552{
553 HBitmap *hb = g_new0(struct HBitmap, 1);
554 unsigned i;
555
556 assert(granularity >= 0 && granularity < 64);
557 size = (size + (1ULL << granularity) - 1) >> granularity;
558 assert(size <= ((uint64_t)1 << HBITMAP_LOG_MAX_SIZE));
559
560 hb->size = size;
561 hb->granularity = granularity;
562 for (i = HBITMAP_LEVELS; i-- > 0; ) {
563 size = MAX((size + BITS_PER_LONG - 1) >> BITS_PER_LEVEL, 1);
564 hb->sizes[i] = size;
565 hb->levels[i] = g_new0(unsigned long, size);
566 }
567
568
569
570
571
572 assert(size == 1);
573 hb->levels[0][0] |= 1UL << (BITS_PER_LONG - 1);
574 return hb;
575}
576
577void hbitmap_truncate(HBitmap *hb, uint64_t size)
578{
579 bool shrink;
580 unsigned i;
581 uint64_t num_elements = size;
582 uint64_t old;
583
584
585 size = (size + (1ULL << hb->granularity) - 1) >> hb->granularity;
586 assert(size <= ((uint64_t)1 << HBITMAP_LOG_MAX_SIZE));
587 shrink = size < hb->size;
588
589
590 if (size == hb->size) {
591 return;
592 }
593
594
595
596
597
598 if (shrink) {
599
600
601 uint64_t start = ROUND_UP(num_elements, UINT64_C(1) << hb->granularity);
602 uint64_t fix_count = (hb->size << hb->granularity) - start;
603
604 assert(fix_count);
605 hbitmap_reset(hb, start, fix_count);
606 }
607
608 hb->size = size;
609 for (i = HBITMAP_LEVELS; i-- > 0; ) {
610 size = MAX(BITS_TO_LONGS(size), 1);
611 if (hb->sizes[i] == size) {
612 break;
613 }
614 old = hb->sizes[i];
615 hb->sizes[i] = size;
616 hb->levels[i] = g_realloc(hb->levels[i], size * sizeof(unsigned long));
617 if (!shrink) {
618 memset(&hb->levels[i][old], 0x00,
619 (size - old) * sizeof(*hb->levels[i]));
620 }
621 }
622 if (hb->meta) {
623 hbitmap_truncate(hb->meta, hb->size << hb->granularity);
624 }
625}
626
627
628
629
630
631
632
633
634
635bool hbitmap_merge(HBitmap *a, const HBitmap *b)
636{
637 int i;
638 uint64_t j;
639
640 if ((a->size != b->size) || (a->granularity != b->granularity)) {
641 return false;
642 }
643
644 if (hbitmap_count(b) == 0) {
645 return true;
646 }
647
648
649
650
651
652 for (i = HBITMAP_LEVELS - 1; i >= 0; i--) {
653 for (j = 0; j < a->sizes[i]; j++) {
654 a->levels[i][j] |= b->levels[i][j];
655 }
656 }
657
658 return true;
659}
660
661HBitmap *hbitmap_create_meta(HBitmap *hb, int chunk_size)
662{
663 assert(!(chunk_size & (chunk_size - 1)));
664 assert(!hb->meta);
665 hb->meta = hbitmap_alloc(hb->size << hb->granularity,
666 hb->granularity + ctz32(chunk_size));
667 return hb->meta;
668}
669
670void hbitmap_free_meta(HBitmap *hb)
671{
672 assert(hb->meta);
673 hbitmap_free(hb->meta);
674 hb->meta = NULL;
675}
676