qemu/util/bitmap.c
<<
>>
Prefs
   1/*
   2 * Bitmap Module
   3 *
   4 * Stolen from linux/src/lib/bitmap.c
   5 *
   6 * Copyright (C) 2010 Corentin Chary
   7 *
   8 * This source code is licensed under the GNU General Public License,
   9 * Version 2.
  10 */
  11
  12#include "qemu/bitops.h"
  13#include "qemu/bitmap.h"
  14
  15/*
  16 * bitmaps provide an array of bits, implemented using an an
  17 * array of unsigned longs.  The number of valid bits in a
  18 * given bitmap does _not_ need to be an exact multiple of
  19 * BITS_PER_LONG.
  20 *
  21 * The possible unused bits in the last, partially used word
  22 * of a bitmap are 'don't care'.  The implementation makes
  23 * no particular effort to keep them zero.  It ensures that
  24 * their value will not affect the results of any operation.
  25 * The bitmap operations that return Boolean (bitmap_empty,
  26 * for example) or scalar (bitmap_weight, for example) results
  27 * carefully filter out these unused bits from impacting their
  28 * results.
  29 *
  30 * These operations actually hold to a slightly stronger rule:
  31 * if you don't input any bitmaps to these ops that have some
  32 * unused bits set, then they won't output any set unused bits
  33 * in output bitmaps.
  34 *
  35 * The byte ordering of bitmaps is more natural on little
  36 * endian architectures.
  37 */
  38
  39int slow_bitmap_empty(const unsigned long *bitmap, int bits)
  40{
  41    int k, lim = bits/BITS_PER_LONG;
  42
  43    for (k = 0; k < lim; ++k) {
  44        if (bitmap[k]) {
  45            return 0;
  46        }
  47    }
  48    if (bits % BITS_PER_LONG) {
  49        if (bitmap[k] & BITMAP_LAST_WORD_MASK(bits)) {
  50            return 0;
  51        }
  52    }
  53
  54    return 1;
  55}
  56
  57int slow_bitmap_full(const unsigned long *bitmap, int bits)
  58{
  59    int k, lim = bits/BITS_PER_LONG;
  60
  61    for (k = 0; k < lim; ++k) {
  62        if (~bitmap[k]) {
  63            return 0;
  64        }
  65    }
  66
  67    if (bits % BITS_PER_LONG) {
  68        if (~bitmap[k] & BITMAP_LAST_WORD_MASK(bits)) {
  69            return 0;
  70        }
  71    }
  72
  73    return 1;
  74}
  75
  76int slow_bitmap_equal(const unsigned long *bitmap1,
  77                      const unsigned long *bitmap2, int bits)
  78{
  79    int k, lim = bits/BITS_PER_LONG;
  80
  81    for (k = 0; k < lim; ++k) {
  82        if (bitmap1[k] != bitmap2[k]) {
  83            return 0;
  84        }
  85    }
  86
  87    if (bits % BITS_PER_LONG) {
  88        if ((bitmap1[k] ^ bitmap2[k]) & BITMAP_LAST_WORD_MASK(bits)) {
  89            return 0;
  90        }
  91    }
  92
  93    return 1;
  94}
  95
  96void slow_bitmap_complement(unsigned long *dst, const unsigned long *src,
  97                            int bits)
  98{
  99    int k, lim = bits/BITS_PER_LONG;
 100
 101    for (k = 0; k < lim; ++k) {
 102        dst[k] = ~src[k];
 103    }
 104
 105    if (bits % BITS_PER_LONG) {
 106        dst[k] = ~src[k] & BITMAP_LAST_WORD_MASK(bits);
 107    }
 108}
 109
 110int slow_bitmap_and(unsigned long *dst, const unsigned long *bitmap1,
 111                    const unsigned long *bitmap2, int bits)
 112{
 113    int k;
 114    int nr = BITS_TO_LONGS(bits);
 115    unsigned long result = 0;
 116
 117    for (k = 0; k < nr; k++) {
 118        result |= (dst[k] = bitmap1[k] & bitmap2[k]);
 119    }
 120    return result != 0;
 121}
 122
 123void slow_bitmap_or(unsigned long *dst, const unsigned long *bitmap1,
 124                    const unsigned long *bitmap2, int bits)
 125{
 126    int k;
 127    int nr = BITS_TO_LONGS(bits);
 128
 129    for (k = 0; k < nr; k++) {
 130        dst[k] = bitmap1[k] | bitmap2[k];
 131    }
 132}
 133
 134void slow_bitmap_xor(unsigned long *dst, const unsigned long *bitmap1,
 135                     const unsigned long *bitmap2, int bits)
 136{
 137    int k;
 138    int nr = BITS_TO_LONGS(bits);
 139
 140    for (k = 0; k < nr; k++) {
 141        dst[k] = bitmap1[k] ^ bitmap2[k];
 142    }
 143}
 144
 145int slow_bitmap_andnot(unsigned long *dst, const unsigned long *bitmap1,
 146                       const unsigned long *bitmap2, int bits)
 147{
 148    int k;
 149    int nr = BITS_TO_LONGS(bits);
 150    unsigned long result = 0;
 151
 152    for (k = 0; k < nr; k++) {
 153        result |= (dst[k] = bitmap1[k] & ~bitmap2[k]);
 154    }
 155    return result != 0;
 156}
 157
 158#define BITMAP_FIRST_WORD_MASK(start) (~0UL << ((start) % BITS_PER_LONG))
 159
 160void bitmap_set(unsigned long *map, int start, int nr)
 161{
 162    unsigned long *p = map + BIT_WORD(start);
 163    const int size = start + nr;
 164    int bits_to_set = BITS_PER_LONG - (start % BITS_PER_LONG);
 165    unsigned long mask_to_set = BITMAP_FIRST_WORD_MASK(start);
 166
 167    while (nr - bits_to_set >= 0) {
 168        *p |= mask_to_set;
 169        nr -= bits_to_set;
 170        bits_to_set = BITS_PER_LONG;
 171        mask_to_set = ~0UL;
 172        p++;
 173    }
 174    if (nr) {
 175        mask_to_set &= BITMAP_LAST_WORD_MASK(size);
 176        *p |= mask_to_set;
 177    }
 178}
 179
 180void bitmap_clear(unsigned long *map, int start, int nr)
 181{
 182    unsigned long *p = map + BIT_WORD(start);
 183    const int size = start + nr;
 184    int bits_to_clear = BITS_PER_LONG - (start % BITS_PER_LONG);
 185    unsigned long mask_to_clear = BITMAP_FIRST_WORD_MASK(start);
 186
 187    while (nr - bits_to_clear >= 0) {
 188        *p &= ~mask_to_clear;
 189        nr -= bits_to_clear;
 190        bits_to_clear = BITS_PER_LONG;
 191        mask_to_clear = ~0UL;
 192        p++;
 193    }
 194    if (nr) {
 195        mask_to_clear &= BITMAP_LAST_WORD_MASK(size);
 196        *p &= ~mask_to_clear;
 197    }
 198}
 199
 200#define ALIGN_MASK(x,mask)      (((x)+(mask))&~(mask))
 201
 202/**
 203 * bitmap_find_next_zero_area - find a contiguous aligned zero area
 204 * @map: The address to base the search on
 205 * @size: The bitmap size in bits
 206 * @start: The bitnumber to start searching at
 207 * @nr: The number of zeroed bits we're looking for
 208 * @align_mask: Alignment mask for zero area
 209 *
 210 * The @align_mask should be one less than a power of 2; the effect is that
 211 * the bit offset of all zero areas this function finds is multiples of that
 212 * power of 2. A @align_mask of 0 means no alignment is required.
 213 */
 214unsigned long bitmap_find_next_zero_area(unsigned long *map,
 215                                         unsigned long size,
 216                                         unsigned long start,
 217                                         unsigned int nr,
 218                                         unsigned long align_mask)
 219{
 220    unsigned long index, end, i;
 221again:
 222    index = find_next_zero_bit(map, size, start);
 223
 224    /* Align allocation */
 225    index = ALIGN_MASK(index, align_mask);
 226
 227    end = index + nr;
 228    if (end > size) {
 229        return end;
 230    }
 231    i = find_next_bit(map, end, index);
 232    if (i < end) {
 233        start = i + 1;
 234        goto again;
 235    }
 236    return index;
 237}
 238
 239int slow_bitmap_intersects(const unsigned long *bitmap1,
 240                           const unsigned long *bitmap2, int bits)
 241{
 242    int k, lim = bits/BITS_PER_LONG;
 243
 244    for (k = 0; k < lim; ++k) {
 245        if (bitmap1[k] & bitmap2[k]) {
 246            return 1;
 247        }
 248    }
 249
 250    if (bits % BITS_PER_LONG) {
 251        if ((bitmap1[k] & bitmap2[k]) & BITMAP_LAST_WORD_MASK(bits)) {
 252            return 1;
 253        }
 254    }
 255    return 0;
 256}
 257