linux/include/asm-powerpc/bitops.h
<<
>>
Prefs
   1/*
   2 * PowerPC atomic bit operations.
   3 *
   4 * Merged version by David Gibson <david@gibson.dropbear.id.au>.
   5 * Based on ppc64 versions by: Dave Engebretsen, Todd Inglett, Don
   6 * Reed, Pat McCarthy, Peter Bergner, Anton Blanchard.  They
   7 * originally took it from the ppc32 code.
   8 *
   9 * Within a word, bits are numbered LSB first.  Lot's of places make
  10 * this assumption by directly testing bits with (val & (1<<nr)).
  11 * This can cause confusion for large (> 1 word) bitmaps on a
  12 * big-endian system because, unlike little endian, the number of each
  13 * bit depends on the word size.
  14 *
  15 * The bitop functions are defined to work on unsigned longs, so for a
  16 * ppc64 system the bits end up numbered:
  17 *   |63..............0|127............64|191...........128|255...........196|
  18 * and on ppc32:
  19 *   |31.....0|63....31|95....64|127...96|159..128|191..160|223..192|255..224|
  20 *
  21 * There are a few little-endian macros used mostly for filesystem
  22 * bitmaps, these work on similar bit arrays layouts, but
  23 * byte-oriented:
  24 *   |7...0|15...8|23...16|31...24|39...32|47...40|55...48|63...56|
  25 *
  26 * The main difference is that bit 3-5 (64b) or 3-4 (32b) in the bit
  27 * number field needs to be reversed compared to the big-endian bit
  28 * fields. This can be achieved by XOR with 0x38 (64b) or 0x18 (32b).
  29 *
  30 * This program is free software; you can redistribute it and/or
  31 * modify it under the terms of the GNU General Public License
  32 * as published by the Free Software Foundation; either version
  33 * 2 of the License, or (at your option) any later version.
  34 */
  35
  36#ifndef _ASM_POWERPC_BITOPS_H
  37#define _ASM_POWERPC_BITOPS_H
  38
  39#ifdef __KERNEL__
  40
  41#ifndef _LINUX_BITOPS_H
  42#error only <linux/bitops.h> can be included directly
  43#endif
  44
  45#include <linux/compiler.h>
  46#include <asm/asm-compat.h>
  47#include <asm/synch.h>
  48
  49/*
  50 * clear_bit doesn't imply a memory barrier
  51 */
  52#define smp_mb__before_clear_bit()      smp_mb()
  53#define smp_mb__after_clear_bit()       smp_mb()
  54
  55#define BITOP_MASK(nr)          (1UL << ((nr) % BITS_PER_LONG))
  56#define BITOP_WORD(nr)          ((nr) / BITS_PER_LONG)
  57#define BITOP_LE_SWIZZLE        ((BITS_PER_LONG-1) & ~0x7)
  58
  59static __inline__ void set_bit(int nr, volatile unsigned long *addr)
  60{
  61        unsigned long old;
  62        unsigned long mask = BITOP_MASK(nr);
  63        unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
  64
  65        __asm__ __volatile__(
  66"1:"    PPC_LLARX "%0,0,%3      # set_bit\n"
  67        "or     %0,%0,%2\n"
  68        PPC405_ERR77(0,%3)
  69        PPC_STLCX "%0,0,%3\n"
  70        "bne-   1b"
  71        : "=&r" (old), "+m" (*p)
  72        : "r" (mask), "r" (p)
  73        : "cc" );
  74}
  75
  76static __inline__ void clear_bit(int nr, volatile unsigned long *addr)
  77{
  78        unsigned long old;
  79        unsigned long mask = BITOP_MASK(nr);
  80        unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
  81
  82        __asm__ __volatile__(
  83"1:"    PPC_LLARX "%0,0,%3      # clear_bit\n"
  84        "andc   %0,%0,%2\n"
  85        PPC405_ERR77(0,%3)
  86        PPC_STLCX "%0,0,%3\n"
  87        "bne-   1b"
  88        : "=&r" (old), "+m" (*p)
  89        : "r" (mask), "r" (p)
  90        : "cc" );
  91}
  92
  93static __inline__ void clear_bit_unlock(int nr, volatile unsigned long *addr)
  94{
  95        unsigned long old;
  96        unsigned long mask = BITOP_MASK(nr);
  97        unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
  98
  99        __asm__ __volatile__(
 100        LWSYNC_ON_SMP
 101"1:"    PPC_LLARX "%0,0,%3      # clear_bit_unlock\n"
 102        "andc   %0,%0,%2\n"
 103        PPC405_ERR77(0,%3)
 104        PPC_STLCX "%0,0,%3\n"
 105        "bne-   1b"
 106        : "=&r" (old), "+m" (*p)
 107        : "r" (mask), "r" (p)
 108        : "cc", "memory");
 109}
 110
 111static __inline__ void change_bit(int nr, volatile unsigned long *addr)
 112{
 113        unsigned long old;
 114        unsigned long mask = BITOP_MASK(nr);
 115        unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
 116
 117        __asm__ __volatile__(
 118"1:"    PPC_LLARX "%0,0,%3      # change_bit\n"
 119        "xor    %0,%0,%2\n"
 120        PPC405_ERR77(0,%3)
 121        PPC_STLCX "%0,0,%3\n"
 122        "bne-   1b"
 123        : "=&r" (old), "+m" (*p)
 124        : "r" (mask), "r" (p)
 125        : "cc" );
 126}
 127
 128static __inline__ int test_and_set_bit(unsigned long nr,
 129                                       volatile unsigned long *addr)
 130{
 131        unsigned long old, t;
 132        unsigned long mask = BITOP_MASK(nr);
 133        unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
 134
 135        __asm__ __volatile__(
 136        LWSYNC_ON_SMP
 137"1:"    PPC_LLARX "%0,0,%3              # test_and_set_bit\n"
 138        "or     %1,%0,%2 \n"
 139        PPC405_ERR77(0,%3)
 140        PPC_STLCX "%1,0,%3 \n"
 141        "bne-   1b"
 142        ISYNC_ON_SMP
 143        : "=&r" (old), "=&r" (t)
 144        : "r" (mask), "r" (p)
 145        : "cc", "memory");
 146
 147        return (old & mask) != 0;
 148}
 149
 150static __inline__ int test_and_set_bit_lock(unsigned long nr,
 151                                       volatile unsigned long *addr)
 152{
 153        unsigned long old, t;
 154        unsigned long mask = BITOP_MASK(nr);
 155        unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
 156
 157        __asm__ __volatile__(
 158"1:"    PPC_LLARX "%0,0,%3              # test_and_set_bit_lock\n"
 159        "or     %1,%0,%2 \n"
 160        PPC405_ERR77(0,%3)
 161        PPC_STLCX "%1,0,%3 \n"
 162        "bne-   1b"
 163        ISYNC_ON_SMP
 164        : "=&r" (old), "=&r" (t)
 165        : "r" (mask), "r" (p)
 166        : "cc", "memory");
 167
 168        return (old & mask) != 0;
 169}
 170
 171static __inline__ int test_and_clear_bit(unsigned long nr,
 172                                         volatile unsigned long *addr)
 173{
 174        unsigned long old, t;
 175        unsigned long mask = BITOP_MASK(nr);
 176        unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
 177
 178        __asm__ __volatile__(
 179        LWSYNC_ON_SMP
 180"1:"    PPC_LLARX "%0,0,%3              # test_and_clear_bit\n"
 181        "andc   %1,%0,%2 \n"
 182        PPC405_ERR77(0,%3)
 183        PPC_STLCX "%1,0,%3 \n"
 184        "bne-   1b"
 185        ISYNC_ON_SMP
 186        : "=&r" (old), "=&r" (t)
 187        : "r" (mask), "r" (p)
 188        : "cc", "memory");
 189
 190        return (old & mask) != 0;
 191}
 192
 193static __inline__ int test_and_change_bit(unsigned long nr,
 194                                          volatile unsigned long *addr)
 195{
 196        unsigned long old, t;
 197        unsigned long mask = BITOP_MASK(nr);
 198        unsigned long *p = ((unsigned long *)addr) + BITOP_WORD(nr);
 199
 200        __asm__ __volatile__(
 201        LWSYNC_ON_SMP
 202"1:"    PPC_LLARX "%0,0,%3              # test_and_change_bit\n"
 203        "xor    %1,%0,%2 \n"
 204        PPC405_ERR77(0,%3)
 205        PPC_STLCX "%1,0,%3 \n"
 206        "bne-   1b"
 207        ISYNC_ON_SMP
 208        : "=&r" (old), "=&r" (t)
 209        : "r" (mask), "r" (p)
 210        : "cc", "memory");
 211
 212        return (old & mask) != 0;
 213}
 214
 215static __inline__ void set_bits(unsigned long mask, unsigned long *addr)
 216{
 217        unsigned long old;
 218
 219        __asm__ __volatile__(
 220"1:"    PPC_LLARX "%0,0,%3         # set_bits\n"
 221        "or     %0,%0,%2\n"
 222        PPC_STLCX "%0,0,%3\n"
 223        "bne-   1b"
 224        : "=&r" (old), "+m" (*addr)
 225        : "r" (mask), "r" (addr)
 226        : "cc");
 227}
 228
 229#include <asm-generic/bitops/non-atomic.h>
 230
 231static __inline__ void __clear_bit_unlock(int nr, volatile unsigned long *addr)
 232{
 233        __asm__ __volatile__(LWSYNC_ON_SMP "" ::: "memory");
 234        __clear_bit(nr, addr);
 235}
 236
 237/*
 238 * Return the zero-based bit position (LE, not IBM bit numbering) of
 239 * the most significant 1-bit in a double word.
 240 */
 241static __inline__ __attribute__((const))
 242int __ilog2(unsigned long x)
 243{
 244        int lz;
 245
 246        asm (PPC_CNTLZL "%0,%1" : "=r" (lz) : "r" (x));
 247        return BITS_PER_LONG - 1 - lz;
 248}
 249
 250static inline __attribute__((const))
 251int __ilog2_u32(u32 n)
 252{
 253        int bit;
 254        asm ("cntlzw %0,%1" : "=r" (bit) : "r" (n));
 255        return 31 - bit;
 256}
 257
 258#ifdef __powerpc64__
 259static inline __attribute__((const))
 260int __ilog2_u64(u64 n)
 261{
 262        int bit;
 263        asm ("cntlzd %0,%1" : "=r" (bit) : "r" (n));
 264        return 63 - bit;
 265}
 266#endif
 267
 268/*
 269 * Determines the bit position of the least significant 0 bit in the
 270 * specified double word. The returned bit position will be
 271 * zero-based, starting from the right side (63/31 - 0).
 272 */
 273static __inline__ unsigned long ffz(unsigned long x)
 274{
 275        /* no zero exists anywhere in the 8 byte area. */
 276        if ((x = ~x) == 0)
 277                return BITS_PER_LONG;
 278
 279        /*
 280         * Calculate the bit position of the least signficant '1' bit in x
 281         * (since x has been changed this will actually be the least signficant
 282         * '0' bit in * the original x).  Note: (x & -x) gives us a mask that
 283         * is the least significant * (RIGHT-most) 1-bit of the value in x.
 284         */
 285        return __ilog2(x & -x);
 286}
 287
 288static __inline__ int __ffs(unsigned long x)
 289{
 290        return __ilog2(x & -x);
 291}
 292
 293/*
 294 * ffs: find first bit set. This is defined the same way as
 295 * the libc and compiler builtin ffs routines, therefore
 296 * differs in spirit from the above ffz (man ffs).
 297 */
 298static __inline__ int ffs(int x)
 299{
 300        unsigned long i = (unsigned long)x;
 301        return __ilog2(i & -i) + 1;
 302}
 303
 304/*
 305 * fls: find last (most-significant) bit set.
 306 * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
 307 */
 308static __inline__ int fls(unsigned int x)
 309{
 310        int lz;
 311
 312        asm ("cntlzw %0,%1" : "=r" (lz) : "r" (x));
 313        return 32 - lz;
 314}
 315#include <asm-generic/bitops/fls64.h>
 316
 317#include <asm-generic/bitops/hweight.h>
 318
 319#define find_first_zero_bit(addr, size) find_next_zero_bit((addr), (size), 0)
 320unsigned long find_next_zero_bit(const unsigned long *addr,
 321                                 unsigned long size, unsigned long offset);
 322/**
 323 * find_first_bit - find the first set bit in a memory region
 324 * @addr: The address to start the search at
 325 * @size: The maximum size to search
 326 *
 327 * Returns the bit-number of the first set bit, not the number of the byte
 328 * containing a bit.
 329 */
 330#define find_first_bit(addr, size) find_next_bit((addr), (size), 0)
 331unsigned long find_next_bit(const unsigned long *addr,
 332                            unsigned long size, unsigned long offset);
 333
 334/* Little-endian versions */
 335
 336static __inline__ int test_le_bit(unsigned long nr,
 337                                  __const__ unsigned long *addr)
 338{
 339        __const__ unsigned char *tmp = (__const__ unsigned char *) addr;
 340        return (tmp[nr >> 3] >> (nr & 7)) & 1;
 341}
 342
 343#define __set_le_bit(nr, addr) \
 344        __set_bit((nr) ^ BITOP_LE_SWIZZLE, (addr))
 345#define __clear_le_bit(nr, addr) \
 346        __clear_bit((nr) ^ BITOP_LE_SWIZZLE, (addr))
 347
 348#define test_and_set_le_bit(nr, addr) \
 349        test_and_set_bit((nr) ^ BITOP_LE_SWIZZLE, (addr))
 350#define test_and_clear_le_bit(nr, addr) \
 351        test_and_clear_bit((nr) ^ BITOP_LE_SWIZZLE, (addr))
 352
 353#define __test_and_set_le_bit(nr, addr) \
 354        __test_and_set_bit((nr) ^ BITOP_LE_SWIZZLE, (addr))
 355#define __test_and_clear_le_bit(nr, addr) \
 356        __test_and_clear_bit((nr) ^ BITOP_LE_SWIZZLE, (addr))
 357
 358#define find_first_zero_le_bit(addr, size) generic_find_next_zero_le_bit((addr), (size), 0)
 359unsigned long generic_find_next_zero_le_bit(const unsigned long *addr,
 360                                    unsigned long size, unsigned long offset);
 361
 362/* Bitmap functions for the ext2 filesystem */
 363
 364#define ext2_set_bit(nr,addr) \
 365        __test_and_set_le_bit((nr), (unsigned long*)addr)
 366#define ext2_clear_bit(nr, addr) \
 367        __test_and_clear_le_bit((nr), (unsigned long*)addr)
 368
 369#define ext2_set_bit_atomic(lock, nr, addr) \
 370        test_and_set_le_bit((nr), (unsigned long*)addr)
 371#define ext2_clear_bit_atomic(lock, nr, addr) \
 372        test_and_clear_le_bit((nr), (unsigned long*)addr)
 373
 374#define ext2_test_bit(nr, addr)      test_le_bit((nr),(unsigned long*)addr)
 375
 376#define ext2_find_first_zero_bit(addr, size) \
 377        find_first_zero_le_bit((unsigned long*)addr, size)
 378#define ext2_find_next_zero_bit(addr, size, off) \
 379        generic_find_next_zero_le_bit((unsigned long*)addr, size, off)
 380
 381/* Bitmap functions for the minix filesystem.  */
 382
 383#define minix_test_and_set_bit(nr,addr) \
 384        __test_and_set_le_bit(nr, (unsigned long *)addr)
 385#define minix_set_bit(nr,addr) \
 386        __set_le_bit(nr, (unsigned long *)addr)
 387#define minix_test_and_clear_bit(nr,addr) \
 388        __test_and_clear_le_bit(nr, (unsigned long *)addr)
 389#define minix_test_bit(nr,addr) \
 390        test_le_bit(nr, (unsigned long *)addr)
 391
 392#define minix_find_first_zero_bit(addr,size) \
 393        find_first_zero_le_bit((unsigned long *)addr, size)
 394
 395#include <asm-generic/bitops/sched.h>
 396
 397#endif /* __KERNEL__ */
 398
 399#endif /* _ASM_POWERPC_BITOPS_H */
 400