linux/arch/mips/include/asm/bitops.h
<<
>>
Prefs
   1/*
   2 * This file is subject to the terms and conditions of the GNU General Public
   3 * License.  See the file "COPYING" in the main directory of this archive
   4 * for more details.
   5 *
   6 * Copyright (c) 1994 - 1997, 99, 2000, 06, 07  Ralf Baechle (ralf@linux-mips.org)
   7 * Copyright (c) 1999, 2000  Silicon Graphics, Inc.
   8 */
   9#ifndef _ASM_BITOPS_H
  10#define _ASM_BITOPS_H
  11
  12#ifndef _LINUX_BITOPS_H
  13#error only <linux/bitops.h> can be included directly
  14#endif
  15
  16#include <linux/compiler.h>
  17#include <linux/types.h>
  18#include <asm/barrier.h>
  19#include <asm/byteorder.h>              /* sigh ... */
  20#include <asm/compiler.h>
  21#include <asm/cpu-features.h>
  22#include <asm/llsc.h>
  23#include <asm/sgidefs.h>
  24#include <asm/war.h>
  25
  26/*
  27 * These are the "slower" versions of the functions and are in bitops.c.
  28 * These functions call raw_local_irq_{save,restore}().
  29 */
  30void __mips_set_bit(unsigned long nr, volatile unsigned long *addr);
  31void __mips_clear_bit(unsigned long nr, volatile unsigned long *addr);
  32void __mips_change_bit(unsigned long nr, volatile unsigned long *addr);
  33int __mips_test_and_set_bit(unsigned long nr,
  34                            volatile unsigned long *addr);
  35int __mips_test_and_set_bit_lock(unsigned long nr,
  36                                 volatile unsigned long *addr);
  37int __mips_test_and_clear_bit(unsigned long nr,
  38                              volatile unsigned long *addr);
  39int __mips_test_and_change_bit(unsigned long nr,
  40                               volatile unsigned long *addr);
  41
  42
  43/*
  44 * set_bit - Atomically set a bit in memory
  45 * @nr: the bit to set
  46 * @addr: the address to start counting from
  47 *
  48 * This function is atomic and may not be reordered.  See __set_bit()
  49 * if you do not require the atomic guarantees.
  50 * Note that @nr may be almost arbitrarily large; this function is not
  51 * restricted to acting on a single-word quantity.
  52 */
  53static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
  54{
  55        unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
  56        int bit = nr & SZLONG_MASK;
  57        unsigned long temp;
  58
  59        if (kernel_uses_llsc && R10000_LLSC_WAR) {
  60                __asm__ __volatile__(
  61                "       .set    arch=r4000                              \n"
  62                "1:     " __LL "%0, %1                  # set_bit       \n"
  63                "       or      %0, %2                                  \n"
  64                "       " __SC  "%0, %1                                 \n"
  65                "       beqzl   %0, 1b                                  \n"
  66                "       .set    mips0                                   \n"
  67                : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*m)
  68                : "ir" (1UL << bit), GCC_OFF_SMALL_ASM() (*m));
  69#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
  70        } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
  71                do {
  72                        __asm__ __volatile__(
  73                        "       " __LL "%0, %1          # set_bit       \n"
  74                        "       " __INS "%0, %3, %2, 1                  \n"
  75                        "       " __SC "%0, %1                          \n"
  76                        : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
  77                        : "ir" (bit), "r" (~0));
  78                } while (unlikely(!temp));
  79#endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */
  80        } else if (kernel_uses_llsc) {
  81                do {
  82                        __asm__ __volatile__(
  83                        "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
  84                        "       " __LL "%0, %1          # set_bit       \n"
  85                        "       or      %0, %2                          \n"
  86                        "       " __SC  "%0, %1                         \n"
  87                        "       .set    mips0                           \n"
  88                        : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
  89                        : "ir" (1UL << bit));
  90                } while (unlikely(!temp));
  91        } else
  92                __mips_set_bit(nr, addr);
  93}
  94
  95/*
  96 * clear_bit - Clears a bit in memory
  97 * @nr: Bit to clear
  98 * @addr: Address to start counting from
  99 *
 100 * clear_bit() is atomic and may not be reordered.  However, it does
 101 * not contain a memory barrier, so if it is used for locking purposes,
 102 * you should call smp_mb__before_atomic() and/or smp_mb__after_atomic()
 103 * in order to ensure changes are visible on other processors.
 104 */
 105static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
 106{
 107        unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
 108        int bit = nr & SZLONG_MASK;
 109        unsigned long temp;
 110
 111        if (kernel_uses_llsc && R10000_LLSC_WAR) {
 112                __asm__ __volatile__(
 113                "       .set    arch=r4000                              \n"
 114                "1:     " __LL "%0, %1                  # clear_bit     \n"
 115                "       and     %0, %2                                  \n"
 116                "       " __SC "%0, %1                                  \n"
 117                "       beqzl   %0, 1b                                  \n"
 118                "       .set    mips0                                   \n"
 119                : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
 120                : "ir" (~(1UL << bit)));
 121#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
 122        } else if (kernel_uses_llsc && __builtin_constant_p(bit)) {
 123                do {
 124                        __asm__ __volatile__(
 125                        "       " __LL "%0, %1          # clear_bit     \n"
 126                        "       " __INS "%0, $0, %2, 1                  \n"
 127                        "       " __SC "%0, %1                          \n"
 128                        : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
 129                        : "ir" (bit));
 130                } while (unlikely(!temp));
 131#endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */
 132        } else if (kernel_uses_llsc) {
 133                do {
 134                        __asm__ __volatile__(
 135                        "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
 136                        "       " __LL "%0, %1          # clear_bit     \n"
 137                        "       and     %0, %2                          \n"
 138                        "       " __SC "%0, %1                          \n"
 139                        "       .set    mips0                           \n"
 140                        : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
 141                        : "ir" (~(1UL << bit)));
 142                } while (unlikely(!temp));
 143        } else
 144                __mips_clear_bit(nr, addr);
 145}
 146
 147/*
 148 * clear_bit_unlock - Clears a bit in memory
 149 * @nr: Bit to clear
 150 * @addr: Address to start counting from
 151 *
 152 * clear_bit() is atomic and implies release semantics before the memory
 153 * operation. It can be used for an unlock.
 154 */
 155static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
 156{
 157        smp_mb__before_atomic();
 158        clear_bit(nr, addr);
 159}
 160
 161/*
 162 * change_bit - Toggle a bit in memory
 163 * @nr: Bit to change
 164 * @addr: Address to start counting from
 165 *
 166 * change_bit() is atomic and may not be reordered.
 167 * Note that @nr may be almost arbitrarily large; this function is not
 168 * restricted to acting on a single-word quantity.
 169 */
 170static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
 171{
 172        int bit = nr & SZLONG_MASK;
 173
 174        if (kernel_uses_llsc && R10000_LLSC_WAR) {
 175                unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
 176                unsigned long temp;
 177
 178                __asm__ __volatile__(
 179                "       .set    arch=r4000                      \n"
 180                "1:     " __LL "%0, %1          # change_bit    \n"
 181                "       xor     %0, %2                          \n"
 182                "       " __SC  "%0, %1                         \n"
 183                "       beqzl   %0, 1b                          \n"
 184                "       .set    mips0                           \n"
 185                : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
 186                : "ir" (1UL << bit));
 187        } else if (kernel_uses_llsc) {
 188                unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
 189                unsigned long temp;
 190
 191                do {
 192                        __asm__ __volatile__(
 193                        "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
 194                        "       " __LL "%0, %1          # change_bit    \n"
 195                        "       xor     %0, %2                          \n"
 196                        "       " __SC  "%0, %1                         \n"
 197                        "       .set    mips0                           \n"
 198                        : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m)
 199                        : "ir" (1UL << bit));
 200                } while (unlikely(!temp));
 201        } else
 202                __mips_change_bit(nr, addr);
 203}
 204
 205/*
 206 * test_and_set_bit - Set a bit and return its old value
 207 * @nr: Bit to set
 208 * @addr: Address to count from
 209 *
 210 * This operation is atomic and cannot be reordered.
 211 * It also implies a memory barrier.
 212 */
 213static inline int test_and_set_bit(unsigned long nr,
 214        volatile unsigned long *addr)
 215{
 216        int bit = nr & SZLONG_MASK;
 217        unsigned long res;
 218
 219        smp_mb__before_llsc();
 220
 221        if (kernel_uses_llsc && R10000_LLSC_WAR) {
 222                unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
 223                unsigned long temp;
 224
 225                __asm__ __volatile__(
 226                "       .set    arch=r4000                              \n"
 227                "1:     " __LL "%0, %1          # test_and_set_bit      \n"
 228                "       or      %2, %0, %3                              \n"
 229                "       " __SC  "%2, %1                                 \n"
 230                "       beqzl   %2, 1b                                  \n"
 231                "       and     %2, %0, %3                              \n"
 232                "       .set    mips0                                   \n"
 233                : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
 234                : "r" (1UL << bit)
 235                : "memory");
 236        } else if (kernel_uses_llsc) {
 237                unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
 238                unsigned long temp;
 239
 240                do {
 241                        __asm__ __volatile__(
 242                        "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
 243                        "       " __LL "%0, %1  # test_and_set_bit      \n"
 244                        "       or      %2, %0, %3                      \n"
 245                        "       " __SC  "%2, %1                         \n"
 246                        "       .set    mips0                           \n"
 247                        : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
 248                        : "r" (1UL << bit)
 249                        : "memory");
 250                } while (unlikely(!res));
 251
 252                res = temp & (1UL << bit);
 253        } else
 254                res = __mips_test_and_set_bit(nr, addr);
 255
 256        smp_llsc_mb();
 257
 258        return res != 0;
 259}
 260
 261/*
 262 * test_and_set_bit_lock - Set a bit and return its old value
 263 * @nr: Bit to set
 264 * @addr: Address to count from
 265 *
 266 * This operation is atomic and implies acquire ordering semantics
 267 * after the memory operation.
 268 */
 269static inline int test_and_set_bit_lock(unsigned long nr,
 270        volatile unsigned long *addr)
 271{
 272        int bit = nr & SZLONG_MASK;
 273        unsigned long res;
 274
 275        if (kernel_uses_llsc && R10000_LLSC_WAR) {
 276                unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
 277                unsigned long temp;
 278
 279                __asm__ __volatile__(
 280                "       .set    arch=r4000                              \n"
 281                "1:     " __LL "%0, %1          # test_and_set_bit      \n"
 282                "       or      %2, %0, %3                              \n"
 283                "       " __SC  "%2, %1                                 \n"
 284                "       beqzl   %2, 1b                                  \n"
 285                "       and     %2, %0, %3                              \n"
 286                "       .set    mips0                                   \n"
 287                : "=&r" (temp), "+m" (*m), "=&r" (res)
 288                : "r" (1UL << bit)
 289                : "memory");
 290        } else if (kernel_uses_llsc) {
 291                unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
 292                unsigned long temp;
 293
 294                do {
 295                        __asm__ __volatile__(
 296                        "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
 297                        "       " __LL "%0, %1  # test_and_set_bit      \n"
 298                        "       or      %2, %0, %3                      \n"
 299                        "       " __SC  "%2, %1                         \n"
 300                        "       .set    mips0                           \n"
 301                        : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
 302                        : "r" (1UL << bit)
 303                        : "memory");
 304                } while (unlikely(!res));
 305
 306                res = temp & (1UL << bit);
 307        } else
 308                res = __mips_test_and_set_bit_lock(nr, addr);
 309
 310        smp_llsc_mb();
 311
 312        return res != 0;
 313}
 314/*
 315 * test_and_clear_bit - Clear a bit and return its old value
 316 * @nr: Bit to clear
 317 * @addr: Address to count from
 318 *
 319 * This operation is atomic and cannot be reordered.
 320 * It also implies a memory barrier.
 321 */
 322static inline int test_and_clear_bit(unsigned long nr,
 323        volatile unsigned long *addr)
 324{
 325        int bit = nr & SZLONG_MASK;
 326        unsigned long res;
 327
 328        smp_mb__before_llsc();
 329
 330        if (kernel_uses_llsc && R10000_LLSC_WAR) {
 331                unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
 332                unsigned long temp;
 333
 334                __asm__ __volatile__(
 335                "       .set    arch=r4000                              \n"
 336                "1:     " __LL  "%0, %1         # test_and_clear_bit    \n"
 337                "       or      %2, %0, %3                              \n"
 338                "       xor     %2, %3                                  \n"
 339                "       " __SC  "%2, %1                                 \n"
 340                "       beqzl   %2, 1b                                  \n"
 341                "       and     %2, %0, %3                              \n"
 342                "       .set    mips0                                   \n"
 343                : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
 344                : "r" (1UL << bit)
 345                : "memory");
 346#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6)
 347        } else if (kernel_uses_llsc && __builtin_constant_p(nr)) {
 348                unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
 349                unsigned long temp;
 350
 351                do {
 352                        __asm__ __volatile__(
 353                        "       " __LL  "%0, %1 # test_and_clear_bit    \n"
 354                        "       " __EXT "%2, %0, %3, 1                  \n"
 355                        "       " __INS "%0, $0, %3, 1                  \n"
 356                        "       " __SC  "%0, %1                         \n"
 357                        : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
 358                        : "ir" (bit)
 359                        : "memory");
 360                } while (unlikely(!temp));
 361#endif
 362        } else if (kernel_uses_llsc) {
 363                unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
 364                unsigned long temp;
 365
 366                do {
 367                        __asm__ __volatile__(
 368                        "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
 369                        "       " __LL  "%0, %1 # test_and_clear_bit    \n"
 370                        "       or      %2, %0, %3                      \n"
 371                        "       xor     %2, %3                          \n"
 372                        "       " __SC  "%2, %1                         \n"
 373                        "       .set    mips0                           \n"
 374                        : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
 375                        : "r" (1UL << bit)
 376                        : "memory");
 377                } while (unlikely(!res));
 378
 379                res = temp & (1UL << bit);
 380        } else
 381                res = __mips_test_and_clear_bit(nr, addr);
 382
 383        smp_llsc_mb();
 384
 385        return res != 0;
 386}
 387
 388/*
 389 * test_and_change_bit - Change a bit and return its old value
 390 * @nr: Bit to change
 391 * @addr: Address to count from
 392 *
 393 * This operation is atomic and cannot be reordered.
 394 * It also implies a memory barrier.
 395 */
 396static inline int test_and_change_bit(unsigned long nr,
 397        volatile unsigned long *addr)
 398{
 399        int bit = nr & SZLONG_MASK;
 400        unsigned long res;
 401
 402        smp_mb__before_llsc();
 403
 404        if (kernel_uses_llsc && R10000_LLSC_WAR) {
 405                unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
 406                unsigned long temp;
 407
 408                __asm__ __volatile__(
 409                "       .set    arch=r4000                              \n"
 410                "1:     " __LL  "%0, %1         # test_and_change_bit   \n"
 411                "       xor     %2, %0, %3                              \n"
 412                "       " __SC  "%2, %1                                 \n"
 413                "       beqzl   %2, 1b                                  \n"
 414                "       and     %2, %0, %3                              \n"
 415                "       .set    mips0                                   \n"
 416                : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
 417                : "r" (1UL << bit)
 418                : "memory");
 419        } else if (kernel_uses_llsc) {
 420                unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
 421                unsigned long temp;
 422
 423                do {
 424                        __asm__ __volatile__(
 425                        "       .set    "MIPS_ISA_ARCH_LEVEL"           \n"
 426                        "       " __LL  "%0, %1 # test_and_change_bit   \n"
 427                        "       xor     %2, %0, %3                      \n"
 428                        "       " __SC  "\t%2, %1                       \n"
 429                        "       .set    mips0                           \n"
 430                        : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res)
 431                        : "r" (1UL << bit)
 432                        : "memory");
 433                } while (unlikely(!res));
 434
 435                res = temp & (1UL << bit);
 436        } else
 437                res = __mips_test_and_change_bit(nr, addr);
 438
 439        smp_llsc_mb();
 440
 441        return res != 0;
 442}
 443
 444#include <asm-generic/bitops/non-atomic.h>
 445
 446/*
 447 * __clear_bit_unlock - Clears a bit in memory
 448 * @nr: Bit to clear
 449 * @addr: Address to start counting from
 450 *
 451 * __clear_bit() is non-atomic and implies release semantics before the memory
 452 * operation. It can be used for an unlock if no other CPUs can concurrently
 453 * modify other bits in the word.
 454 */
 455static inline void __clear_bit_unlock(unsigned long nr, volatile unsigned long *addr)
 456{
 457        smp_mb__before_llsc();
 458        __clear_bit(nr, addr);
 459}
 460
 461/*
 462 * Return the bit position (0..63) of the most significant 1 bit in a word
 463 * Returns -1 if no 1 bit exists
 464 */
 465static inline unsigned long __fls(unsigned long word)
 466{
 467        int num;
 468
 469        if (BITS_PER_LONG == 32 && !__builtin_constant_p(word) &&
 470            __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
 471                __asm__(
 472                "       .set    push                                    \n"
 473                "       .set    "MIPS_ISA_LEVEL"                        \n"
 474                "       clz     %0, %1                                  \n"
 475                "       .set    pop                                     \n"
 476                : "=r" (num)
 477                : "r" (word));
 478
 479                return 31 - num;
 480        }
 481
 482        if (BITS_PER_LONG == 64 && !__builtin_constant_p(word) &&
 483            __builtin_constant_p(cpu_has_mips64) && cpu_has_mips64) {
 484                __asm__(
 485                "       .set    push                                    \n"
 486                "       .set    "MIPS_ISA_LEVEL"                        \n"
 487                "       dclz    %0, %1                                  \n"
 488                "       .set    pop                                     \n"
 489                : "=r" (num)
 490                : "r" (word));
 491
 492                return 63 - num;
 493        }
 494
 495        num = BITS_PER_LONG - 1;
 496
 497#if BITS_PER_LONG == 64
 498        if (!(word & (~0ul << 32))) {
 499                num -= 32;
 500                word <<= 32;
 501        }
 502#endif
 503        if (!(word & (~0ul << (BITS_PER_LONG-16)))) {
 504                num -= 16;
 505                word <<= 16;
 506        }
 507        if (!(word & (~0ul << (BITS_PER_LONG-8)))) {
 508                num -= 8;
 509                word <<= 8;
 510        }
 511        if (!(word & (~0ul << (BITS_PER_LONG-4)))) {
 512                num -= 4;
 513                word <<= 4;
 514        }
 515        if (!(word & (~0ul << (BITS_PER_LONG-2)))) {
 516                num -= 2;
 517                word <<= 2;
 518        }
 519        if (!(word & (~0ul << (BITS_PER_LONG-1))))
 520                num -= 1;
 521        return num;
 522}
 523
 524/*
 525 * __ffs - find first bit in word.
 526 * @word: The word to search
 527 *
 528 * Returns 0..SZLONG-1
 529 * Undefined if no bit exists, so code should check against 0 first.
 530 */
 531static inline unsigned long __ffs(unsigned long word)
 532{
 533        return __fls(word & -word);
 534}
 535
 536/*
 537 * fls - find last bit set.
 538 * @word: The word to search
 539 *
 540 * This is defined the same way as ffs.
 541 * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
 542 */
 543static inline int fls(int x)
 544{
 545        int r;
 546
 547        if (!__builtin_constant_p(x) &&
 548            __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) {
 549                __asm__(
 550                "       .set    push                                    \n"
 551                "       .set    "MIPS_ISA_LEVEL"                        \n"
 552                "       clz     %0, %1                                  \n"
 553                "       .set    pop                                     \n"
 554                : "=r" (x)
 555                : "r" (x));
 556
 557                return 32 - x;
 558        }
 559
 560        r = 32;
 561        if (!x)
 562                return 0;
 563        if (!(x & 0xffff0000u)) {
 564                x <<= 16;
 565                r -= 16;
 566        }
 567        if (!(x & 0xff000000u)) {
 568                x <<= 8;
 569                r -= 8;
 570        }
 571        if (!(x & 0xf0000000u)) {
 572                x <<= 4;
 573                r -= 4;
 574        }
 575        if (!(x & 0xc0000000u)) {
 576                x <<= 2;
 577                r -= 2;
 578        }
 579        if (!(x & 0x80000000u)) {
 580                x <<= 1;
 581                r -= 1;
 582        }
 583        return r;
 584}
 585
 586#include <asm-generic/bitops/fls64.h>
 587
 588/*
 589 * ffs - find first bit set.
 590 * @word: The word to search
 591 *
 592 * This is defined the same way as
 593 * the libc and compiler builtin ffs routines, therefore
 594 * differs in spirit from the above ffz (man ffs).
 595 */
 596static inline int ffs(int word)
 597{
 598        if (!word)
 599                return 0;
 600
 601        return fls(word & -word);
 602}
 603
 604#include <asm-generic/bitops/ffz.h>
 605#include <asm-generic/bitops/find.h>
 606
 607#ifdef __KERNEL__
 608
 609#include <asm-generic/bitops/sched.h>
 610
 611#include <asm/arch_hweight.h>
 612#include <asm-generic/bitops/const_hweight.h>
 613
 614#include <asm-generic/bitops/le.h>
 615#include <asm-generic/bitops/ext2-atomic.h>
 616
 617#endif /* __KERNEL__ */
 618
 619#endif /* _ASM_BITOPS_H */
 620