linux/arch/alpha/include/asm/bitops.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0 */
   2#ifndef _ALPHA_BITOPS_H
   3#define _ALPHA_BITOPS_H
   4
   5#ifndef _LINUX_BITOPS_H
   6#error only <linux/bitops.h> can be included directly
   7#endif
   8
   9#include <asm/compiler.h>
  10#include <asm/barrier.h>
  11
  12/*
  13 * Copyright 1994, Linus Torvalds.
  14 */
  15
  16/*
  17 * These have to be done with inline assembly: that way the bit-setting
  18 * is guaranteed to be atomic. All bit operations return 0 if the bit
  19 * was cleared before the operation and != 0 if it was not.
  20 *
  21 * To get proper branch prediction for the main line, we must branch
  22 * forward to code at the end of this object's .text section, then
  23 * branch back to restart the operation.
  24 *
  25 * bit 0 is the LSB of addr; bit 64 is the LSB of (addr+1).
  26 */
  27
  28static inline void
  29set_bit(unsigned long nr, volatile void * addr)
  30{
  31        unsigned long temp;
  32        int *m = ((int *) addr) + (nr >> 5);
  33
  34        __asm__ __volatile__(
  35        "1:     ldl_l %0,%3\n"
  36        "       bis %0,%2,%0\n"
  37        "       stl_c %0,%1\n"
  38        "       beq %0,2f\n"
  39        ".subsection 2\n"
  40        "2:     br 1b\n"
  41        ".previous"
  42        :"=&r" (temp), "=m" (*m)
  43        :"Ir" (1UL << (nr & 31)), "m" (*m));
  44}
  45
  46/*
  47 * WARNING: non atomic version.
  48 */
  49static inline void
  50__set_bit(unsigned long nr, volatile void * addr)
  51{
  52        int *m = ((int *) addr) + (nr >> 5);
  53
  54        *m |= 1 << (nr & 31);
  55}
  56
  57static inline void
  58clear_bit(unsigned long nr, volatile void * addr)
  59{
  60        unsigned long temp;
  61        int *m = ((int *) addr) + (nr >> 5);
  62
  63        __asm__ __volatile__(
  64        "1:     ldl_l %0,%3\n"
  65        "       bic %0,%2,%0\n"
  66        "       stl_c %0,%1\n"
  67        "       beq %0,2f\n"
  68        ".subsection 2\n"
  69        "2:     br 1b\n"
  70        ".previous"
  71        :"=&r" (temp), "=m" (*m)
  72        :"Ir" (1UL << (nr & 31)), "m" (*m));
  73}
  74
  75static inline void
  76clear_bit_unlock(unsigned long nr, volatile void * addr)
  77{
  78        smp_mb();
  79        clear_bit(nr, addr);
  80}
  81
  82/*
  83 * WARNING: non atomic version.
  84 */
  85static __inline__ void
  86__clear_bit(unsigned long nr, volatile void * addr)
  87{
  88        int *m = ((int *) addr) + (nr >> 5);
  89
  90        *m &= ~(1 << (nr & 31));
  91}
  92
  93static inline void
  94__clear_bit_unlock(unsigned long nr, volatile void * addr)
  95{
  96        smp_mb();
  97        __clear_bit(nr, addr);
  98}
  99
 100static inline void
 101change_bit(unsigned long nr, volatile void * addr)
 102{
 103        unsigned long temp;
 104        int *m = ((int *) addr) + (nr >> 5);
 105
 106        __asm__ __volatile__(
 107        "1:     ldl_l %0,%3\n"
 108        "       xor %0,%2,%0\n"
 109        "       stl_c %0,%1\n"
 110        "       beq %0,2f\n"
 111        ".subsection 2\n"
 112        "2:     br 1b\n"
 113        ".previous"
 114        :"=&r" (temp), "=m" (*m)
 115        :"Ir" (1UL << (nr & 31)), "m" (*m));
 116}
 117
 118/*
 119 * WARNING: non atomic version.
 120 */
 121static __inline__ void
 122__change_bit(unsigned long nr, volatile void * addr)
 123{
 124        int *m = ((int *) addr) + (nr >> 5);
 125
 126        *m ^= 1 << (nr & 31);
 127}
 128
 129static inline int
 130test_and_set_bit(unsigned long nr, volatile void *addr)
 131{
 132        unsigned long oldbit;
 133        unsigned long temp;
 134        int *m = ((int *) addr) + (nr >> 5);
 135
 136        __asm__ __volatile__(
 137#ifdef CONFIG_SMP
 138        "       mb\n"
 139#endif
 140        "1:     ldl_l %0,%4\n"
 141        "       and %0,%3,%2\n"
 142        "       bne %2,2f\n"
 143        "       xor %0,%3,%0\n"
 144        "       stl_c %0,%1\n"
 145        "       beq %0,3f\n"
 146        "2:\n"
 147#ifdef CONFIG_SMP
 148        "       mb\n"
 149#endif
 150        ".subsection 2\n"
 151        "3:     br 1b\n"
 152        ".previous"
 153        :"=&r" (temp), "=m" (*m), "=&r" (oldbit)
 154        :"Ir" (1UL << (nr & 31)), "m" (*m) : "memory");
 155
 156        return oldbit != 0;
 157}
 158
 159static inline int
 160test_and_set_bit_lock(unsigned long nr, volatile void *addr)
 161{
 162        unsigned long oldbit;
 163        unsigned long temp;
 164        int *m = ((int *) addr) + (nr >> 5);
 165
 166        __asm__ __volatile__(
 167        "1:     ldl_l %0,%4\n"
 168        "       and %0,%3,%2\n"
 169        "       bne %2,2f\n"
 170        "       xor %0,%3,%0\n"
 171        "       stl_c %0,%1\n"
 172        "       beq %0,3f\n"
 173        "2:\n"
 174#ifdef CONFIG_SMP
 175        "       mb\n"
 176#endif
 177        ".subsection 2\n"
 178        "3:     br 1b\n"
 179        ".previous"
 180        :"=&r" (temp), "=m" (*m), "=&r" (oldbit)
 181        :"Ir" (1UL << (nr & 31)), "m" (*m) : "memory");
 182
 183        return oldbit != 0;
 184}
 185
 186/*
 187 * WARNING: non atomic version.
 188 */
 189static inline int
 190__test_and_set_bit(unsigned long nr, volatile void * addr)
 191{
 192        unsigned long mask = 1 << (nr & 0x1f);
 193        int *m = ((int *) addr) + (nr >> 5);
 194        int old = *m;
 195
 196        *m = old | mask;
 197        return (old & mask) != 0;
 198}
 199
 200static inline int
 201test_and_clear_bit(unsigned long nr, volatile void * addr)
 202{
 203        unsigned long oldbit;
 204        unsigned long temp;
 205        int *m = ((int *) addr) + (nr >> 5);
 206
 207        __asm__ __volatile__(
 208#ifdef CONFIG_SMP
 209        "       mb\n"
 210#endif
 211        "1:     ldl_l %0,%4\n"
 212        "       and %0,%3,%2\n"
 213        "       beq %2,2f\n"
 214        "       xor %0,%3,%0\n"
 215        "       stl_c %0,%1\n"
 216        "       beq %0,3f\n"
 217        "2:\n"
 218#ifdef CONFIG_SMP
 219        "       mb\n"
 220#endif
 221        ".subsection 2\n"
 222        "3:     br 1b\n"
 223        ".previous"
 224        :"=&r" (temp), "=m" (*m), "=&r" (oldbit)
 225        :"Ir" (1UL << (nr & 31)), "m" (*m) : "memory");
 226
 227        return oldbit != 0;
 228}
 229
 230/*
 231 * WARNING: non atomic version.
 232 */
 233static inline int
 234__test_and_clear_bit(unsigned long nr, volatile void * addr)
 235{
 236        unsigned long mask = 1 << (nr & 0x1f);
 237        int *m = ((int *) addr) + (nr >> 5);
 238        int old = *m;
 239
 240        *m = old & ~mask;
 241        return (old & mask) != 0;
 242}
 243
 244static inline int
 245test_and_change_bit(unsigned long nr, volatile void * addr)
 246{
 247        unsigned long oldbit;
 248        unsigned long temp;
 249        int *m = ((int *) addr) + (nr >> 5);
 250
 251        __asm__ __volatile__(
 252#ifdef CONFIG_SMP
 253        "       mb\n"
 254#endif
 255        "1:     ldl_l %0,%4\n"
 256        "       and %0,%3,%2\n"
 257        "       xor %0,%3,%0\n"
 258        "       stl_c %0,%1\n"
 259        "       beq %0,3f\n"
 260#ifdef CONFIG_SMP
 261        "       mb\n"
 262#endif
 263        ".subsection 2\n"
 264        "3:     br 1b\n"
 265        ".previous"
 266        :"=&r" (temp), "=m" (*m), "=&r" (oldbit)
 267        :"Ir" (1UL << (nr & 31)), "m" (*m) : "memory");
 268
 269        return oldbit != 0;
 270}
 271
 272/*
 273 * WARNING: non atomic version.
 274 */
 275static __inline__ int
 276__test_and_change_bit(unsigned long nr, volatile void * addr)
 277{
 278        unsigned long mask = 1 << (nr & 0x1f);
 279        int *m = ((int *) addr) + (nr >> 5);
 280        int old = *m;
 281
 282        *m = old ^ mask;
 283        return (old & mask) != 0;
 284}
 285
 286static inline int
 287test_bit(int nr, const volatile void * addr)
 288{
 289        return (1UL & (((const int *) addr)[nr >> 5] >> (nr & 31))) != 0UL;
 290}
 291
 292/*
 293 * ffz = Find First Zero in word. Undefined if no zero exists,
 294 * so code should check against ~0UL first..
 295 *
 296 * Do a binary search on the bits.  Due to the nature of large
 297 * constants on the alpha, it is worthwhile to split the search.
 298 */
 299static inline unsigned long ffz_b(unsigned long x)
 300{
 301        unsigned long sum, x1, x2, x4;
 302
 303        x = ~x & -~x;           /* set first 0 bit, clear others */
 304        x1 = x & 0xAA;
 305        x2 = x & 0xCC;
 306        x4 = x & 0xF0;
 307        sum = x2 ? 2 : 0;
 308        sum += (x4 != 0) * 4;
 309        sum += (x1 != 0);
 310
 311        return sum;
 312}
 313
 314static inline unsigned long ffz(unsigned long word)
 315{
 316#if defined(CONFIG_ALPHA_EV6) && defined(CONFIG_ALPHA_EV67)
 317        /* Whee.  EV67 can calculate it directly.  */
 318        return __kernel_cttz(~word);
 319#else
 320        unsigned long bits, qofs, bofs;
 321
 322        bits = __kernel_cmpbge(word, ~0UL);
 323        qofs = ffz_b(bits);
 324        bits = __kernel_extbl(word, qofs);
 325        bofs = ffz_b(bits);
 326
 327        return qofs*8 + bofs;
 328#endif
 329}
 330
 331/*
 332 * __ffs = Find First set bit in word.  Undefined if no set bit exists.
 333 */
 334static inline unsigned long __ffs(unsigned long word)
 335{
 336#if defined(CONFIG_ALPHA_EV6) && defined(CONFIG_ALPHA_EV67)
 337        /* Whee.  EV67 can calculate it directly.  */
 338        return __kernel_cttz(word);
 339#else
 340        unsigned long bits, qofs, bofs;
 341
 342        bits = __kernel_cmpbge(0, word);
 343        qofs = ffz_b(bits);
 344        bits = __kernel_extbl(word, qofs);
 345        bofs = ffz_b(~bits);
 346
 347        return qofs*8 + bofs;
 348#endif
 349}
 350
 351#ifdef __KERNEL__
 352
 353/*
 354 * ffs: find first bit set. This is defined the same way as
 355 * the libc and compiler builtin ffs routines, therefore
 356 * differs in spirit from the above __ffs.
 357 */
 358
 359static inline int ffs(int word)
 360{
 361        int result = __ffs(word) + 1;
 362        return word ? result : 0;
 363}
 364
 365/*
 366 * fls: find last bit set.
 367 */
 368#if defined(CONFIG_ALPHA_EV6) && defined(CONFIG_ALPHA_EV67)
 369static inline int fls64(unsigned long word)
 370{
 371        return 64 - __kernel_ctlz(word);
 372}
 373#else
 374extern const unsigned char __flsm1_tab[256];
 375
 376static inline int fls64(unsigned long x)
 377{
 378        unsigned long t, a, r;
 379
 380        t = __kernel_cmpbge (x, 0x0101010101010101UL);
 381        a = __flsm1_tab[t];
 382        t = __kernel_extbl (x, a);
 383        r = a*8 + __flsm1_tab[t] + (x != 0);
 384
 385        return r;
 386}
 387#endif
 388
 389static inline unsigned long __fls(unsigned long x)
 390{
 391        return fls64(x) - 1;
 392}
 393
 394static inline int fls(unsigned int x)
 395{
 396        return fls64(x);
 397}
 398
 399/*
 400 * hweightN: returns the hamming weight (i.e. the number
 401 * of bits set) of a N-bit word
 402 */
 403
 404#if defined(CONFIG_ALPHA_EV6) && defined(CONFIG_ALPHA_EV67)
 405/* Whee.  EV67 can calculate it directly.  */
 406static inline unsigned long __arch_hweight64(unsigned long w)
 407{
 408        return __kernel_ctpop(w);
 409}
 410
 411static inline unsigned int __arch_hweight32(unsigned int w)
 412{
 413        return __arch_hweight64(w);
 414}
 415
 416static inline unsigned int __arch_hweight16(unsigned int w)
 417{
 418        return __arch_hweight64(w & 0xffff);
 419}
 420
 421static inline unsigned int __arch_hweight8(unsigned int w)
 422{
 423        return __arch_hweight64(w & 0xff);
 424}
 425#else
 426#include <asm-generic/bitops/arch_hweight.h>
 427#endif
 428
 429#include <asm-generic/bitops/const_hweight.h>
 430
 431#endif /* __KERNEL__ */
 432
 433#include <asm-generic/bitops/find.h>
 434
 435#ifdef __KERNEL__
 436
 437/*
 438 * Every architecture must define this function. It's the fastest
 439 * way of searching a 100-bit bitmap.  It's guaranteed that at least
 440 * one of the 100 bits is cleared.
 441 */
 442static inline unsigned long
 443sched_find_first_bit(const unsigned long b[2])
 444{
 445        unsigned long b0, b1, ofs, tmp;
 446
 447        b0 = b[0];
 448        b1 = b[1];
 449        ofs = (b0 ? 0 : 64);
 450        tmp = (b0 ? b0 : b1);
 451
 452        return __ffs(tmp) + ofs;
 453}
 454
 455#include <asm-generic/bitops/le.h>
 456
 457#include <asm-generic/bitops/ext2-atomic-setbit.h>
 458
 459#endif /* __KERNEL__ */
 460
 461#endif /* _ALPHA_BITOPS_H */
 462