1
2
3
4
5
6
7
8
9
10
11
12#include "qemu/osdep.h"
13#include <glib.h>
14#include "qemu/hbitmap.h"
15#include "qemu/host-utils.h"
16#include "trace.h"
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55struct HBitmap {
56
57 uint64_t size;
58
59
60 uint64_t count;
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80 int granularity;
81
82
83 HBitmap *meta;
84
85
86
87
88
89
90
91
92
93 unsigned long *levels[HBITMAP_LEVELS];
94
95
96 uint64_t sizes[HBITMAP_LEVELS];
97};
98
99
100
101
102unsigned long hbitmap_iter_skip_words(HBitmapIter *hbi)
103{
104 size_t pos = hbi->pos;
105 const HBitmap *hb = hbi->hb;
106 unsigned i = HBITMAP_LEVELS - 1;
107
108 unsigned long cur;
109 do {
110 cur = hbi->cur[--i];
111 pos >>= BITS_PER_LEVEL;
112 } while (cur == 0);
113
114
115
116
117
118
119
120 if (i == 0 && cur == (1UL << (BITS_PER_LONG - 1))) {
121 return 0;
122 }
123 for (; i < HBITMAP_LEVELS - 1; i++) {
124
125
126
127
128 assert(cur);
129 pos = (pos << BITS_PER_LEVEL) + ctzl(cur);
130 hbi->cur[i] = cur & (cur - 1);
131
132
133 cur = hb->levels[i + 1][pos];
134 }
135
136 hbi->pos = pos;
137 trace_hbitmap_iter_skip_words(hbi->hb, hbi, pos, cur);
138
139 assert(cur);
140 return cur;
141}
142
143void hbitmap_iter_init(HBitmapIter *hbi, const HBitmap *hb, uint64_t first)
144{
145 unsigned i, bit;
146 uint64_t pos;
147
148 hbi->hb = hb;
149 pos = first >> hb->granularity;
150 assert(pos < hb->size);
151 hbi->pos = pos >> BITS_PER_LEVEL;
152 hbi->granularity = hb->granularity;
153
154 for (i = HBITMAP_LEVELS; i-- > 0; ) {
155 bit = pos & (BITS_PER_LONG - 1);
156 pos >>= BITS_PER_LEVEL;
157
158
159 hbi->cur[i] = hb->levels[i][pos] & ~((1UL << bit) - 1);
160
161
162
163
164 if (i != HBITMAP_LEVELS - 1) {
165 hbi->cur[i] &= ~(1UL << bit);
166 }
167 }
168}
169
170bool hbitmap_empty(const HBitmap *hb)
171{
172 return hb->count == 0;
173}
174
175int hbitmap_granularity(const HBitmap *hb)
176{
177 return hb->granularity;
178}
179
180uint64_t hbitmap_count(const HBitmap *hb)
181{
182 return hb->count << hb->granularity;
183}
184
185
186
187
188static uint64_t hb_count_between(HBitmap *hb, uint64_t start, uint64_t last)
189{
190 HBitmapIter hbi;
191 uint64_t count = 0;
192 uint64_t end = last + 1;
193 unsigned long cur;
194 size_t pos;
195
196 hbitmap_iter_init(&hbi, hb, start << hb->granularity);
197 for (;;) {
198 pos = hbitmap_iter_next_word(&hbi, &cur);
199 if (pos >= (end >> BITS_PER_LEVEL)) {
200 break;
201 }
202 count += ctpopl(cur);
203 }
204
205 if (pos == (end >> BITS_PER_LEVEL)) {
206
207 int bit = end & (BITS_PER_LONG - 1);
208 cur &= (1UL << bit) - 1;
209 count += ctpopl(cur);
210 }
211
212 return count;
213}
214
215
216
217
218static inline bool hb_set_elem(unsigned long *elem, uint64_t start, uint64_t last)
219{
220 unsigned long mask;
221 unsigned long old;
222
223 assert((last >> BITS_PER_LEVEL) == (start >> BITS_PER_LEVEL));
224 assert(start <= last);
225
226 mask = 2UL << (last & (BITS_PER_LONG - 1));
227 mask -= 1UL << (start & (BITS_PER_LONG - 1));
228 old = *elem;
229 *elem |= mask;
230 return old != *elem;
231}
232
233
234
235static bool hb_set_between(HBitmap *hb, int level, uint64_t start,
236 uint64_t last)
237{
238 size_t pos = start >> BITS_PER_LEVEL;
239 size_t lastpos = last >> BITS_PER_LEVEL;
240 bool changed = false;
241 size_t i;
242
243 i = pos;
244 if (i < lastpos) {
245 uint64_t next = (start | (BITS_PER_LONG - 1)) + 1;
246 changed |= hb_set_elem(&hb->levels[level][i], start, next - 1);
247 for (;;) {
248 start = next;
249 next += BITS_PER_LONG;
250 if (++i == lastpos) {
251 break;
252 }
253 changed |= (hb->levels[level][i] == 0);
254 hb->levels[level][i] = ~0UL;
255 }
256 }
257 changed |= hb_set_elem(&hb->levels[level][i], start, last);
258
259
260
261
262 if (level > 0 && changed) {
263 hb_set_between(hb, level - 1, pos, lastpos);
264 }
265 return changed;
266}
267
268void hbitmap_set(HBitmap *hb, uint64_t start, uint64_t count)
269{
270
271 uint64_t first, n;
272 uint64_t last = start + count - 1;
273
274 trace_hbitmap_set(hb, start, count,
275 start >> hb->granularity, last >> hb->granularity);
276
277 first = start >> hb->granularity;
278 last >>= hb->granularity;
279 assert(last < hb->size);
280 n = last - first + 1;
281
282 hb->count += n - hb_count_between(hb, first, last);
283 if (hb_set_between(hb, HBITMAP_LEVELS - 1, first, last) &&
284 hb->meta) {
285 hbitmap_set(hb->meta, start, count);
286 }
287}
288
289
290
291
292static inline bool hb_reset_elem(unsigned long *elem, uint64_t start, uint64_t last)
293{
294 unsigned long mask;
295 bool blanked;
296
297 assert((last >> BITS_PER_LEVEL) == (start >> BITS_PER_LEVEL));
298 assert(start <= last);
299
300 mask = 2UL << (last & (BITS_PER_LONG - 1));
301 mask -= 1UL << (start & (BITS_PER_LONG - 1));
302 blanked = *elem != 0 && ((*elem & ~mask) == 0);
303 *elem &= ~mask;
304 return blanked;
305}
306
307
308
309static bool hb_reset_between(HBitmap *hb, int level, uint64_t start,
310 uint64_t last)
311{
312 size_t pos = start >> BITS_PER_LEVEL;
313 size_t lastpos = last >> BITS_PER_LEVEL;
314 bool changed = false;
315 size_t i;
316
317 i = pos;
318 if (i < lastpos) {
319 uint64_t next = (start | (BITS_PER_LONG - 1)) + 1;
320
321
322
323
324
325
326 if (hb_reset_elem(&hb->levels[level][i], start, next - 1)) {
327 changed = true;
328 } else {
329 pos++;
330 }
331
332 for (;;) {
333 start = next;
334 next += BITS_PER_LONG;
335 if (++i == lastpos) {
336 break;
337 }
338 changed |= (hb->levels[level][i] != 0);
339 hb->levels[level][i] = 0UL;
340 }
341 }
342
343
344 if (hb_reset_elem(&hb->levels[level][i], start, last)) {
345 changed = true;
346 } else {
347 lastpos--;
348 }
349
350 if (level > 0 && changed) {
351 hb_reset_between(hb, level - 1, pos, lastpos);
352 }
353
354 return changed;
355
356}
357
358void hbitmap_reset(HBitmap *hb, uint64_t start, uint64_t count)
359{
360
361 uint64_t first;
362 uint64_t last = start + count - 1;
363
364 trace_hbitmap_reset(hb, start, count,
365 start >> hb->granularity, last >> hb->granularity);
366
367 first = start >> hb->granularity;
368 last >>= hb->granularity;
369 assert(last < hb->size);
370
371 hb->count -= hb_count_between(hb, first, last);
372 if (hb_reset_between(hb, HBITMAP_LEVELS - 1, first, last) &&
373 hb->meta) {
374 hbitmap_set(hb->meta, start, count);
375 }
376}
377
378void hbitmap_reset_all(HBitmap *hb)
379{
380 unsigned int i;
381
382
383 for (i = HBITMAP_LEVELS; --i >= 1; ) {
384 memset(hb->levels[i], 0, hb->sizes[i] * sizeof(unsigned long));
385 }
386
387 hb->levels[0][0] = 1UL << (BITS_PER_LONG - 1);
388 hb->count = 0;
389}
390
391bool hbitmap_get(const HBitmap *hb, uint64_t item)
392{
393
394 uint64_t pos = item >> hb->granularity;
395 unsigned long bit = 1UL << (pos & (BITS_PER_LONG - 1));
396 assert(pos < hb->size);
397
398 return (hb->levels[HBITMAP_LEVELS - 1][pos >> BITS_PER_LEVEL] & bit) != 0;
399}
400
401uint64_t hbitmap_serialization_granularity(const HBitmap *hb)
402{
403
404
405 assert(hb->granularity < 64 - 6);
406
407
408
409 return UINT64_C(64) << hb->granularity;
410}
411
412
413
414
415static void serialization_chunk(const HBitmap *hb,
416 uint64_t start, uint64_t count,
417 unsigned long **first_el, uint64_t *el_count)
418{
419 uint64_t last = start + count - 1;
420 uint64_t gran = hbitmap_serialization_granularity(hb);
421
422 assert((start & (gran - 1)) == 0);
423 assert((last >> hb->granularity) < hb->size);
424 if ((last >> hb->granularity) != hb->size - 1) {
425 assert((count & (gran - 1)) == 0);
426 }
427
428 start = (start >> hb->granularity) >> BITS_PER_LEVEL;
429 last = (last >> hb->granularity) >> BITS_PER_LEVEL;
430
431 *first_el = &hb->levels[HBITMAP_LEVELS - 1][start];
432 *el_count = last - start + 1;
433}
434
435uint64_t hbitmap_serialization_size(const HBitmap *hb,
436 uint64_t start, uint64_t count)
437{
438 uint64_t el_count;
439 unsigned long *cur;
440
441 if (!count) {
442 return 0;
443 }
444 serialization_chunk(hb, start, count, &cur, &el_count);
445
446 return el_count * sizeof(unsigned long);
447}
448
449void hbitmap_serialize_part(const HBitmap *hb, uint8_t *buf,
450 uint64_t start, uint64_t count)
451{
452 uint64_t el_count;
453 unsigned long *cur, *end;
454
455 if (!count) {
456 return;
457 }
458 serialization_chunk(hb, start, count, &cur, &el_count);
459 end = cur + el_count;
460
461 while (cur != end) {
462 unsigned long el =
463 (BITS_PER_LONG == 32 ? cpu_to_le32(*cur) : cpu_to_le64(*cur));
464
465 memcpy(buf, &el, sizeof(el));
466 buf += sizeof(el);
467 cur++;
468 }
469}
470
471void hbitmap_deserialize_part(HBitmap *hb, uint8_t *buf,
472 uint64_t start, uint64_t count,
473 bool finish)
474{
475 uint64_t el_count;
476 unsigned long *cur, *end;
477
478 if (!count) {
479 return;
480 }
481 serialization_chunk(hb, start, count, &cur, &el_count);
482 end = cur + el_count;
483
484 while (cur != end) {
485 memcpy(cur, buf, sizeof(*cur));
486
487 if (BITS_PER_LONG == 32) {
488 le32_to_cpus((uint32_t *)cur);
489 } else {
490 le64_to_cpus((uint64_t *)cur);
491 }
492
493 buf += sizeof(unsigned long);
494 cur++;
495 }
496 if (finish) {
497 hbitmap_deserialize_finish(hb);
498 }
499}
500
501void hbitmap_deserialize_zeroes(HBitmap *hb, uint64_t start, uint64_t count,
502 bool finish)
503{
504 uint64_t el_count;
505 unsigned long *first;
506
507 if (!count) {
508 return;
509 }
510 serialization_chunk(hb, start, count, &first, &el_count);
511
512 memset(first, 0, el_count * sizeof(unsigned long));
513 if (finish) {
514 hbitmap_deserialize_finish(hb);
515 }
516}
517
518void hbitmap_deserialize_finish(HBitmap *bitmap)
519{
520 int64_t i, size, prev_size;
521 int lev;
522
523
524
525 size = MAX((bitmap->size + BITS_PER_LONG - 1) >> BITS_PER_LEVEL, 1);
526 for (lev = HBITMAP_LEVELS - 1; lev-- > 0; ) {
527 prev_size = size;
528 size = MAX((size + BITS_PER_LONG - 1) >> BITS_PER_LEVEL, 1);
529 memset(bitmap->levels[lev], 0, size * sizeof(unsigned long));
530
531 for (i = 0; i < prev_size; ++i) {
532 if (bitmap->levels[lev + 1][i]) {
533 bitmap->levels[lev][i >> BITS_PER_LEVEL] |=
534 1UL << (i & (BITS_PER_LONG - 1));
535 }
536 }
537 }
538
539 bitmap->levels[0][0] |= 1UL << (BITS_PER_LONG - 1);
540}
541
542void hbitmap_free(HBitmap *hb)
543{
544 unsigned i;
545 assert(!hb->meta);
546 for (i = HBITMAP_LEVELS; i-- > 0; ) {
547 g_free(hb->levels[i]);
548 }
549 g_free(hb);
550}
551
552HBitmap *hbitmap_alloc(uint64_t size, int granularity)
553{
554 HBitmap *hb = g_new0(struct HBitmap, 1);
555 unsigned i;
556
557 assert(granularity >= 0 && granularity < 64);
558 size = (size + (1ULL << granularity) - 1) >> granularity;
559 assert(size <= ((uint64_t)1 << HBITMAP_LOG_MAX_SIZE));
560
561 hb->size = size;
562 hb->granularity = granularity;
563 for (i = HBITMAP_LEVELS; i-- > 0; ) {
564 size = MAX((size + BITS_PER_LONG - 1) >> BITS_PER_LEVEL, 1);
565 hb->sizes[i] = size;
566 hb->levels[i] = g_new0(unsigned long, size);
567 }
568
569
570
571
572
573 assert(size == 1);
574 hb->levels[0][0] |= 1UL << (BITS_PER_LONG - 1);
575 return hb;
576}
577
578void hbitmap_truncate(HBitmap *hb, uint64_t size)
579{
580 bool shrink;
581 unsigned i;
582 uint64_t num_elements = size;
583 uint64_t old;
584
585
586 size = (size + (1ULL << hb->granularity) - 1) >> hb->granularity;
587 assert(size <= ((uint64_t)1 << HBITMAP_LOG_MAX_SIZE));
588 shrink = size < hb->size;
589
590
591 if (size == hb->size) {
592 return;
593 }
594
595
596
597
598
599 if (shrink) {
600
601
602 uint64_t start = ROUND_UP(num_elements, UINT64_C(1) << hb->granularity);
603 uint64_t fix_count = (hb->size << hb->granularity) - start;
604
605 assert(fix_count);
606 hbitmap_reset(hb, start, fix_count);
607 }
608
609 hb->size = size;
610 for (i = HBITMAP_LEVELS; i-- > 0; ) {
611 size = MAX(BITS_TO_LONGS(size), 1);
612 if (hb->sizes[i] == size) {
613 break;
614 }
615 old = hb->sizes[i];
616 hb->sizes[i] = size;
617 hb->levels[i] = g_realloc(hb->levels[i], size * sizeof(unsigned long));
618 if (!shrink) {
619 memset(&hb->levels[i][old], 0x00,
620 (size - old) * sizeof(*hb->levels[i]));
621 }
622 }
623 if (hb->meta) {
624 hbitmap_truncate(hb->meta, hb->size << hb->granularity);
625 }
626}
627
628
629
630
631
632
633
634
635
636bool hbitmap_merge(HBitmap *a, const HBitmap *b)
637{
638 int i;
639 uint64_t j;
640
641 if ((a->size != b->size) || (a->granularity != b->granularity)) {
642 return false;
643 }
644
645 if (hbitmap_count(b) == 0) {
646 return true;
647 }
648
649
650
651
652
653 for (i = HBITMAP_LEVELS - 1; i >= 0; i--) {
654 for (j = 0; j < a->sizes[i]; j++) {
655 a->levels[i][j] |= b->levels[i][j];
656 }
657 }
658
659 return true;
660}
661
662HBitmap *hbitmap_create_meta(HBitmap *hb, int chunk_size)
663{
664 assert(!(chunk_size & (chunk_size - 1)));
665 assert(!hb->meta);
666 hb->meta = hbitmap_alloc(hb->size << hb->granularity,
667 hb->granularity + ctz32(chunk_size));
668 return hb->meta;
669}
670
671void hbitmap_free_meta(HBitmap *hb)
672{
673 assert(hb->meta);
674 hbitmap_free(hb->meta);
675 hb->meta = NULL;
676}
677