linux/arch/mips/include/asm/bitops.h
<<
>>
Prefs
   1/*
   2 * This file is subject to the terms and conditions of the GNU General Public
   3 * License.  See the file "COPYING" in the main directory of this archive
   4 * for more details.
   5 *
   6 * Copyright (c) 1994 - 1997, 99, 2000, 06, 07  Ralf Baechle (ralf@linux-mips.org)
   7 * Copyright (c) 1999, 2000  Silicon Graphics, Inc.
   8 */
   9#ifndef _ASM_BITOPS_H
  10#define _ASM_BITOPS_H
  11
  12#ifndef _LINUX_BITOPS_H
  13#error only <linux/bitops.h> can be included directly
  14#endif
  15
  16#include <linux/compiler.h>
  17#include <linux/types.h>
  18#include <asm/barrier.h>
  19#include <asm/byteorder.h>              /* sigh ... */
  20#include <asm/compiler.h>
  21#include <asm/cpu-features.h>
  22#include <asm/sgidefs.h>
  23#include <asm/war.h>
  24
  25#if _MIPS_SZLONG == 32
  26#define SZLONG_LOG 5
  27#define SZLONG_MASK 31UL
  28#define __LL            "ll     "
  29#define __SC            "sc     "
  30#define __INS           "ins    "
  31#define __EXT           "ext    "
  32#elif _MIPS_SZLONG == 64
  33#define SZLONG_LOG 6
  34#define SZLONG_MASK 63UL
  35#define __LL            "lld    "
  36#define __SC            "scd    "
  37#define __INS           "dins    "
  38#define __EXT           "dext    "
  39#endif
  40
  41/*
  42 * These are the "slower" versions of the functions and are in bitops.c.
  43 * These functions call raw_local_irq_{save,restore}().
  44 */
  45void __mips_set_bit(unsigned long nr, volatile unsigned long *addr);
  46void __mips_clear_bit(unsigned long nr, volatile unsigned long *addr);
  47void __mips_change_bit(unsigned long nr, volatile unsigned long *addr);
  48int __mips_test_and_set_bit(unsigned long nr,
  49                            volatile unsigned long *addr);
  50int __mips_test_and_set_bit_lock(unsigned long nr,
  51                                 volatile unsigned long *addr);
  52int __mips_test_and_clear_bit(unsigned long nr,
  53                              volatile unsigned long *addr);
  54int __mips_test_and_change_bit(unsigned long nr,
  55                               volatile unsigned long *addr);
  56
  57
  58/*
  59 * set_bit - Atomically set a bit in memory
  60 * @nr: the bit to set
  61 * @addr: the address to start counting from
  62 *
  63 * This function is atomic and may not be reordered.  See __set_bit()
  64 * if you do not require the atomic guarantees.
  65 * Note that @nr may be almost arbitrarily large; this function is not
  66 * restricted to acting on a single-word quantity.
  67 */
  68static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
  69{
  70        unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
  71        int bit = nr & SZLONG_MASK;
  72        unsigned long temp;
  73
  74        if (kernel_uses_llsc && R10000_LLSC_WAR) {
  75                __asm__ __volatile__(
  76                "       .set    arch=r4000                              \n"
  77                "1:     " __LL "%0, %1                  # set_bit       \n"
  78                "       or      %0, %2                                  \n"
  79                "       " __SC  "%0, %1                                 \n"
  80                "       beqzl   %0, 1b                                  \n"
  81                "       .set    mips0                                   \n"
  82                : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*m)
  83                : "ir" (1UL << bit), GCC_OFF_SMALL_ASM() (*m));
  84#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
  85        } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
  86                do {
  87                        __asm__ __volatile__(
  88                        "       " __LL "%0, %1          # set_bit       \n"
  89                        "       " __INS "%0, %3, %2, 1                  \n"
  90                        "       " __SC "%0, %1                          \n"
  91                        : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
  92                        : "ir" (bit), "r" (~0));
  93                } while (unlikely(!temp));
  94#endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */
  95        } else if (kernel_uses_llsc) {
  96                do {
  97                        __asm__ __volatile__(
  98                        "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
  99                        "       " __LL "%0, %1          # set_bit       \n"
 100                        "       or      %0, %2                          \n"
 101                        "       " __SC  "%0, %1                         \n"
 102                        "       .set    mips0                           \n"
 103                        : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
 104                        : "ir" (1UL << bit));
 105                } while (unlikely(!temp));
 106        } else
 107                __mips_set_bit(nr, addr);
 108}
 109
 110/*
 111 * clear_bit - Clears a bit in memory
 112 * @nr: Bit to clear
 113 * @addr: Address to start counting from
 114 *
 115 * clear_bit() is atomic and may not be reordered.  However, it does
 116 * not contain a memory barrier, so if it is used for locking purposes,
 117 * you should call smp_mb__before_atomic() and/or smp_mb__after_atomic()
 118 * in order to ensure changes are visible on other processors.
 119 */
 120static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
 121{
 122        unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
 123        int bit = nr & SZLONG_MASK;
 124        unsigned long temp;
 125
 126        if (kernel_uses_llsc && R10000_LLSC_WAR) {
 127                __asm__ __volatile__(
 128                "       .set    arch=r4000                              \n"
 129                "1:     " __LL "%0, %1                  # clear_bit     \n"
 130                "       and     %0, %2                                  \n"
 131                "       " __SC "%0, %1                                  \n"
 132                "       beqzl   %0, 1b                                  \n"
 133                "       .set    mips0                                   \n"
 134                : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
 135                : "ir" (~(1UL << bit)));
 136#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
 137        } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
 138                do {
 139                        __asm__ __volatile__(
 140                        "       " __LL "%0, %1          # clear_bit     \n"
 141                        "       " __INS "%0, $0, %2, 1                  \n"
 142                        "       " __SC "%0, %1                          \n"
 143                        : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
 144                        : "ir" (bit));
 145                } while (unlikely(!temp));
 146#endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */
 147        } else if (kernel_uses_llsc) {
 148                do {
 149                        __asm__ __volatile__(
 150                        "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
 151                        "       " __LL "%0, %1          # clear_bit     \n"
 152                        "       and     %0, %2                          \n"
 153                        "       " __SC "%0, %1                          \n"
 154                        "       .set    mips0                           \n"
 155                        : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
 156                        : "ir" (~(1UL << bit)));
 157                } while (unlikely(!temp));
 158        } else
 159                __mips_clear_bit(nr, addr);
 160}
 161
 162/*
 163 * clear_bit_unlock - Clears a bit in memory
 164 * @nr: Bit to clear
 165 * @addr: Address to start counting from
 166 *
 167 * clear_bit() is atomic and implies release semantics before the memory
 168 * operation. It can be used for an unlock.
 169 */
 170static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
 171{
 172        smp_mb__before_atomic();
 173        clear_bit(nr, addr);
 174}
 175
 176/*
 177 * change_bit - Toggle a bit in memory
 178 * @nr: Bit to change
 179 * @addr: Address to start counting from
 180 *
 181 * change_bit() is atomic and may not be reordered.
 182 * Note that @nr may be almost arbitrarily large; this function is not
 183 * restricted to acting on a single-word quantity.
 184 */
 185static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
 186{
 187        int bit = nr & SZLONG_MASK;
 188
 189        if (kernel_uses_llsc && R10000_LLSC_WAR) {
 190                unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
 191                unsigned long temp;
 192
 193                __asm__ __volatile__(
 194                "       .set    arch=r4000                      \n"
 195                "1:     " __LL "%0, %1          # change_bit    \n"
 196                "       xor     %0, %2                          \n"
 197                "       " __SC  "%0, %1                         \n"
 198                "       beqzl   %0, 1b                          \n"
 199                "       .set    mips0                           \n"
 200                : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
 201                : "ir" (1UL << bit));
 202        } else if (kernel_uses_llsc) {
 203                unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
 204                unsigned long temp;
 205
 206                do {
 207                        __asm__ __volatile__(
 208                        "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
 209                        "       " __LL "%0, %1          # change_bit    \n"
 210                        "       xor     %0, %2                          \n"
 211                        "       " __SC  "%0, %1                         \n"
 212                        "       .set    mips0                           \n"
 213                        : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
 214                        : "ir" (1UL << bit));
 215                } while (unlikely(!temp));
 216        } else
 217                __mips_change_bit(nr, addr);
 218}
 219
 220/*
 221 * test_and_set_bit - Set a bit and return its old value
 222 * @nr: Bit to set
 223 * @addr: Address to count from
 224 *
 225 * This operation is atomic and cannot be reordered.
 226 * It also implies a memory barrier.
 227 */
 228static inline int test_and_set_bit(unsigned long nr,
 229        volatile unsigned long *addr)
 230{
 231        int bit = nr & SZLONG_MASK;
 232        unsigned long res;
 233
 234        smp_mb__before_llsc();
 235
 236        if (kernel_uses_llsc && R10000_LLSC_WAR) {
 237                unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
 238                unsigned long temp;
 239
 240                __asm__ __volatile__(
 241                "       .set    arch=r4000                              \n"
 242                "1:     " __LL "%0, %1          # test_and_set_bit      \n"
 243                "       or      %2, %0, %3                              \n"
 244                "       " __SC  "%2, %1                                 \n"
 245                "       beqzl   %2, 1b                                  \n"
 246                "       and     %2, %0, %3                              \n"
 247                "       .set    mips0                                   \n"
 248                : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
 249                : "r" (1UL << bit)
 250                : "memory");
 251        } else if (kernel_uses_llsc) {
 252                unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
 253                unsigned long temp;
 254
 255                do {
 256                        __asm__ __volatile__(
 257                        "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
 258                        "       " __LL "%0, %1  # test_and_set_bit      \n"
 259                        "       or      %2, %0, %3                      \n"
 260                        "       " __SC  "%2, %1                         \n"
 261                        "       .set    mips0                           \n"
 262                        : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
 263                        : "r" (1UL << bit)
 264                        : "memory");
 265                } while (unlikely(!res));
 266
 267                res = temp & (1UL << bit);
 268        } else
 269                res = __mips_test_and_set_bit(nr, addr);
 270
 271        smp_llsc_mb();
 272
 273        return res != 0;
 274}
 275
 276/*
 277 * test_and_set_bit_lock - Set a bit and return its old value
 278 * @nr: Bit to set
 279 * @addr: Address to count from
 280 *
 281 * This operation is atomic and implies acquire ordering semantics
 282 * after the memory operation.
 283 */
 284static inline int test_and_set_bit_lock(unsigned long nr,
 285        volatile unsigned long *addr)
 286{
 287        int bit = nr & SZLONG_MASK;
 288        unsigned long res;
 289
 290        if (kernel_uses_llsc && R10000_LLSC_WAR) {
 291                unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
 292                unsigned long temp;
 293
 294                __asm__ __volatile__(
 295                "       .set    arch=r4000                              \n"
 296                "1:     " __LL "%0, %1          # test_and_set_bit      \n"
 297                "       or      %2, %0, %3                              \n"
 298                "       " __SC  "%2, %1                                 \n"
 299                "       beqzl   %2, 1b                                  \n"
 300                "       and     %2, %0, %3                              \n"
 301                "       .set    mips0                                   \n"
 302                : "=&r" (temp), "+m" (*m), "=&r" (res)
 303                : "r" (1UL << bit)
 304                : "memory");
 305        } else if (kernel_uses_llsc) {
 306                unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
 307                unsigned long temp;
 308
 309                do {
 310                        __asm__ __volatile__(
 311                        "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
 312                        "       " __LL "%0, %1  # test_and_set_bit      \n"
 313                        "       or      %2, %0, %3                      \n"
 314                        "       " __SC  "%2, %1                         \n"
 315                        "       .set    mips0                           \n"
 316                        : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
 317                        : "r" (1UL << bit)
 318                        : "memory");
 319                } while (unlikely(!res));
 320
 321                res = temp & (1UL << bit);
 322        } else
 323                res = __mips_test_and_set_bit_lock(nr, addr);
 324
 325        smp_llsc_mb();
 326
 327        return res != 0;
 328}
 329/*
 330 * test_and_clear_bit - Clear a bit and return its old value
 331 * @nr: Bit to clear
 332 * @addr: Address to count from
 333 *
 334 * This operation is atomic and cannot be reordered.
 335 * It also implies a memory barrier.
 336 */
 337static inline int test_and_clear_bit(unsigned long nr,
 338        volatile unsigned long *addr)
 339{
 340        int bit = nr & SZLONG_MASK;
 341        unsigned long res;
 342
 343        smp_mb__before_llsc();
 344
 345        if (kernel_uses_llsc && R10000_LLSC_WAR) {
 346                unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
 347                unsigned long temp;
 348
 349                __asm__ __volatile__(
 350                "       .set    arch=r4000                              \n"
 351                "1:     " __LL  "%0, %1         # test_and_clear_bit    \n"
 352                "       or      %2, %0, %3                              \n"
 353                "       xor     %2, %3                                  \n"
 354                "       " __SC  "%2, %1                                 \n"
 355                "       beqzl   %2, 1b                                  \n"
 356                "       and     %2, %0, %3                              \n"
 357                "       .set    mips0                                   \n"
 358                : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
 359                : "r" (1UL << bit)
 360                : "memory");
 361#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
 362        } else if (kernel_uses_llsc && __builtin_constant_p(nr)) {
 363                unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
 364                unsigned long temp;
 365
 366                do {
 367                        __asm__ __volatile__(
 368                        "       " __LL  "%0, %1 # test_and_clear_bit    \n"
 369                        "       " __EXT "%2, %0, %3, 1                  \n"
 370                        "       " __INS "%0, $0, %3, 1                  \n"
 371                        "       " __SC  "%0, %1                         \n"
 372                        : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
 373                        : "ir" (bit)
 374                        : "memory");
 375                } while (unlikely(!temp));
 376#endif
 377        } else if (kernel_uses_llsc) {
 378                unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
 379                unsigned long temp;
 380
 381                do {
 382                        __asm__ __volatile__(
 383                        "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
 384                        "       " __LL  "%0, %1 # test_and_clear_bit    \n"
 385                        "       or      %2, %0, %3                      \n"
 386                        "       xor     %2, %3                          \n"
 387                        "       " __SC  "%2, %1                         \n"
 388                        "       .set    mips0                           \n"
 389                        : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
 390                        : "r" (1UL << bit)
 391                        : "memory");
 392                } while (unlikely(!res));
 393
 394                res = temp & (1UL << bit);
 395        } else
 396                res = __mips_test_and_clear_bit(nr, addr);
 397
 398        smp_llsc_mb();
 399
 400        return res != 0;
 401}
 402
 403/*
 404 * test_and_change_bit - Change a bit and return its old value
 405 * @nr: Bit to change
 406 * @addr: Address to count from
 407 *
 408 * This operation is atomic and cannot be reordered.
 409 * It also implies a memory barrier.
 410 */
 411static inline int test_and_change_bit(unsigned long nr,
 412        volatile unsigned long *addr)
 413{
 414        int bit = nr & SZLONG_MASK;
 415        unsigned long res;
 416
 417        smp_mb__before_llsc();
 418
 419        if (kernel_uses_llsc && R10000_LLSC_WAR) {
 420                unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
 421                unsigned long temp;
 422
 423                __asm__ __volatile__(
 424                "       .set    arch=r4000                              \n"
 425                "1:     " __LL  "%0, %1         # test_and_change_bit   \n"
 426                "       xor     %2, %0, %3                              \n"
 427                "       " __SC  "%2, %1                                 \n"
 428                "       beqzl   %2, 1b                                  \n"
 429                "       and     %2, %0, %3                              \n"
 430                "       .set    mips0                                   \n"
 431                : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
 432                : "r" (1UL << bit)
 433                : "memory");
 434        } else if (kernel_uses_llsc) {
 435                unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
 436                unsigned long temp;
 437
 438                do {
 439                        __asm__ __volatile__(
 440                        "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
 441                        "       " __LL  "%0, %1 # test_and_change_bit   \n"
 442                        "       xor     %2, %0, %3                      \n"
 443                        "       " __SC  "\t%2, %1                       \n"
 444                        "       .set    mips0                           \n"
 445                        : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
 446                        : "r" (1UL << bit)
 447                        : "memory");
 448                } while (unlikely(!res));
 449
 450                res = temp & (1UL << bit);
 451        } else
 452                res = __mips_test_and_change_bit(nr, addr);
 453
 454        smp_llsc_mb();
 455
 456        return res != 0;
 457}
 458
 459#include <asm-generic/bitops/non-atomic.h>
 460
 461/*
 462 * __clear_bit_unlock - Clears a bit in memory
 463 * @nr: Bit to clear
 464 * @addr: Address to start counting from
 465 *
 466 * __clear_bit() is non-atomic and implies release semantics before the memory
 467 * operation. It can be used for an unlock if no other CPUs can concurrently
 468 * modify other bits in the word.
 469 */
 470static inline void __clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
 471{
 472        smp_mb__before_llsc();
 473        __clear_bit(nr, addr);
 474}
 475
 476/*
 477 * Return the bit position (0..63) of the most significant 1 bit in a word
 478 * Returns -1 if no 1 bit exists
 479 */
 480static inline unsigned long __fls(unsigned long word)
 481{
 482        int num;
 483
 484        if (BITS_PER_LONG == 32 && !__builtin_constant_p(word) &&
 485            __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
 486                __asm__(
 487                "       .set    push                                    \n"
 488                "       .set    "MIPS_ISA_LEVEL"                        \n"
 489                "       clz     %0, %1                                  \n"
 490                "       .set    pop                                     \n"
 491                : "=r" (num)
 492                : "r" (word));
 493
 494                return 31 - num;
 495        }
 496
 497        if (BITS_PER_LONG == 64 && !__builtin_constant_p(word) &&
 498            __builtin_constant_p(cpu_has_mips64) && cpu_has_mips64) {
 499                __asm__(
 500                "       .set    push                                    \n"
 501                "       .set    "MIPS_ISA_LEVEL"                        \n"
 502                "       dclz    %0, %1                                  \n"
 503                "       .set    pop                                     \n"
 504                : "=r" (num)
 505                : "r" (word));
 506
 507                return 63 - num;
 508        }
 509
 510        num = BITS_PER_LONG - 1;
 511
 512#if BITS_PER_LONG == 64
 513        if (!(word & (~0ul << 32))) {
 514                num -= 32;
 515                word <<= 32;
 516        }
 517#endif
 518        if (!(word & (~0ul << (BITS_PER_LONG-16)))) {
 519                num -= 16;
 520                word <<= 16;
 521        }
 522        if (!(word & (~0ul << (BITS_PER_LONG-8)))) {
 523                num -= 8;
 524                word <<= 8;
 525        }
 526        if (!(word & (~0ul << (BITS_PER_LONG-4)))) {
 527                num -= 4;
 528                word <<= 4;
 529        }
 530        if (!(word & (~0ul << (BITS_PER_LONG-2)))) {
 531                num -= 2;
 532                word <<= 2;
 533        }
 534        if (!(word & (~0ul << (BITS_PER_LONG-1))))
 535                num -= 1;
 536        return num;
 537}
 538
 539/*
 540 * __ffs - find first bit in word.
 541 * @word: The word to search
 542 *
 543 * Returns 0..SZLONG-1
 544 * Undefined if no bit exists, so code should check against 0 first.
 545 */
 546static inline unsigned long __ffs(unsigned long word)
 547{
 548        return __fls(word & -word);
 549}
 550
 551/*
 552 * fls - find last bit set.
 553 * @word: The word to search
 554 *
 555 * This is defined the same way as ffs.
 556 * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
 557 */
 558static inline int fls(int x)
 559{
 560        int r;
 561
 562        if (!__builtin_constant_p(x) &&
 563            __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
 564                __asm__(
 565                "       .set    push                                    \n"
 566                "       .set    "MIPS_ISA_LEVEL"                        \n"
 567                "       clz     %0, %1                                  \n"
 568                "       .set    pop                                     \n"
 569                : "=r" (x)
 570                : "r" (x));
 571
 572                return 32 - x;
 573        }
 574
 575        r = 32;
 576        if (!x)
 577                return 0;
 578        if (!(x & 0xffff0000u)) {
 579                x <<= 16;
 580                r -= 16;
 581        }
 582        if (!(x & 0xff000000u)) {
 583                x <<= 8;
 584                r -= 8;
 585        }
 586        if (!(x & 0xf0000000u)) {
 587                x <<= 4;
 588                r -= 4;
 589        }
 590        if (!(x & 0xc0000000u)) {
 591                x <<= 2;
 592                r -= 2;
 593        }
 594        if (!(x & 0x80000000u)) {
 595                x <<= 1;
 596                r -= 1;
 597        }
 598        return r;
 599}
 600
 601#include <asm-generic/bitops/fls64.h>
 602
 603/*
 604 * ffs - find first bit set.
 605 * @word: The word to search
 606 *
 607 * This is defined the same way as
 608 * the libc and compiler builtin ffs routines, therefore
 609 * differs in spirit from the above ffz (man ffs).
 610 */
 611static inline int ffs(int word)
 612{
 613        if (!word)
 614                return 0;
 615
 616        return fls(word & -word);
 617}
 618
 619#include <asm-generic/bitops/ffz.h>
 620#include <asm-generic/bitops/find.h>
 621
 622#ifdef __KERNEL__
 623
 624#include <asm-generic/bitops/sched.h>
 625
 626#include <asm/arch_hweight.h>
 627#include <asm-generic/bitops/const_hweight.h>
 628
 629#include <asm-generic/bitops/le.h>
 630#include <asm-generic/bitops/ext2-atomic.h>
 631
 632#endif /* __KERNEL__ */
 633
 634#endif /* _ASM_BITOPS_H */
 635