linux/arch/avr32/include/asm/bitops.h
<<
>>
Prefs
   1/*
   2 * Copyright (C) 2004-2006 Atmel Corporation
   3 *
   4 * This program is free software; you can redistribute it and/or modify
   5 * it under the terms of the GNU General Public License version 2 as
   6 * published by the Free Software Foundation.
   7 */
   8#ifndef __ASM_AVR32_BITOPS_H
   9#define __ASM_AVR32_BITOPS_H
  10
  11#ifndef _LINUX_BITOPS_H
  12#error only <linux/bitops.h> can be included directly
  13#endif
  14
  15#include <asm/byteorder.h>
  16#include <asm/barrier.h>
  17
  18/*
  19 * set_bit - Atomically set a bit in memory
  20 * @nr: the bit to set
  21 * @addr: the address to start counting from
  22 *
  23 * This function is atomic and may not be reordered.  See __set_bit()
  24 * if you do not require the atomic guarantees.
  25 *
  26 * Note that @nr may be almost arbitrarily large; this function is not
  27 * restricted to acting on a single-word quantity.
  28 */
  29static inline void set_bit(int nr, volatile void * addr)
  30{
  31        unsigned long *p = ((unsigned long *)addr) + nr / BITS_PER_LONG;
  32        unsigned long tmp;
  33
  34        if (__builtin_constant_p(nr)) {
  35                asm volatile(
  36                        "1:     ssrf    5\n"
  37                        "       ld.w    %0, %2\n"
  38                        "       sbr     %0, %3\n"
  39                        "       stcond  %1, %0\n"
  40                        "       brne    1b"
  41                        : "=&r"(tmp), "=o"(*p)
  42                        : "m"(*p), "i"(nr)
  43                        : "cc");
  44        } else {
  45                unsigned long mask = 1UL << (nr % BITS_PER_LONG);
  46                asm volatile(
  47                        "1:     ssrf    5\n"
  48                        "       ld.w    %0, %2\n"
  49                        "       or      %0, %3\n"
  50                        "       stcond  %1, %0\n"
  51                        "       brne    1b"
  52                        : "=&r"(tmp), "=o"(*p)
  53                        : "m"(*p), "r"(mask)
  54                        : "cc");
  55        }
  56}
  57
  58/*
  59 * clear_bit - Clears a bit in memory
  60 * @nr: Bit to clear
  61 * @addr: Address to start counting from
  62 *
  63 * clear_bit() is atomic and may not be reordered.  However, it does
  64 * not contain a memory barrier, so if it is used for locking purposes,
  65 * you should call smp_mb__before_atomic() and/or smp_mb__after_atomic()
  66 * in order to ensure changes are visible on other processors.
  67 */
  68static inline void clear_bit(int nr, volatile void * addr)
  69{
  70        unsigned long *p = ((unsigned long *)addr) + nr / BITS_PER_LONG;
  71        unsigned long tmp;
  72
  73        if (__builtin_constant_p(nr)) {
  74                asm volatile(
  75                        "1:     ssrf    5\n"
  76                        "       ld.w    %0, %2\n"
  77                        "       cbr     %0, %3\n"
  78                        "       stcond  %1, %0\n"
  79                        "       brne    1b"
  80                        : "=&r"(tmp), "=o"(*p)
  81                        : "m"(*p), "i"(nr)
  82                        : "cc");
  83        } else {
  84                unsigned long mask = 1UL << (nr % BITS_PER_LONG);
  85                asm volatile(
  86                        "1:     ssrf    5\n"
  87                        "       ld.w    %0, %2\n"
  88                        "       andn    %0, %3\n"
  89                        "       stcond  %1, %0\n"
  90                        "       brne    1b"
  91                        : "=&r"(tmp), "=o"(*p)
  92                        : "m"(*p), "r"(mask)
  93                        : "cc");
  94        }
  95}
  96
  97/*
  98 * change_bit - Toggle a bit in memory
  99 * @nr: Bit to change
 100 * @addr: Address to start counting from
 101 *
 102 * change_bit() is atomic and may not be reordered.
 103 * Note that @nr may be almost arbitrarily large; this function is not
 104 * restricted to acting on a single-word quantity.
 105 */
 106static inline void change_bit(int nr, volatile void * addr)
 107{
 108        unsigned long *p = ((unsigned long *)addr) + nr / BITS_PER_LONG;
 109        unsigned long mask = 1UL << (nr % BITS_PER_LONG);
 110        unsigned long tmp;
 111
 112        asm volatile(
 113                "1:     ssrf    5\n"
 114                "       ld.w    %0, %2\n"
 115                "       eor     %0, %3\n"
 116                "       stcond  %1, %0\n"
 117                "       brne    1b"
 118                : "=&r"(tmp), "=o"(*p)
 119                : "m"(*p), "r"(mask)
 120                : "cc");
 121}
 122
 123/*
 124 * test_and_set_bit - Set a bit and return its old value
 125 * @nr: Bit to set
 126 * @addr: Address to count from
 127 *
 128 * This operation is atomic and cannot be reordered.
 129 * It also implies a memory barrier.
 130 */
 131static inline int test_and_set_bit(int nr, volatile void * addr)
 132{
 133        unsigned long *p = ((unsigned long *)addr) + nr / BITS_PER_LONG;
 134        unsigned long mask = 1UL << (nr % BITS_PER_LONG);
 135        unsigned long tmp, old;
 136
 137        if (__builtin_constant_p(nr)) {
 138                asm volatile(
 139                        "1:     ssrf    5\n"
 140                        "       ld.w    %0, %3\n"
 141                        "       mov     %2, %0\n"
 142                        "       sbr     %0, %4\n"
 143                        "       stcond  %1, %0\n"
 144                        "       brne    1b"
 145                        : "=&r"(tmp), "=o"(*p), "=&r"(old)
 146                        : "m"(*p), "i"(nr)
 147                        : "memory", "cc");
 148        } else {
 149                asm volatile(
 150                        "1:     ssrf    5\n"
 151                        "       ld.w    %2, %3\n"
 152                        "       or      %0, %2, %4\n"
 153                        "       stcond  %1, %0\n"
 154                        "       brne    1b"
 155                        : "=&r"(tmp), "=o"(*p), "=&r"(old)
 156                        : "m"(*p), "r"(mask)
 157                        : "memory", "cc");
 158        }
 159
 160        return (old & mask) != 0;
 161}
 162
 163/*
 164 * test_and_clear_bit - Clear a bit and return its old value
 165 * @nr: Bit to clear
 166 * @addr: Address to count from
 167 *
 168 * This operation is atomic and cannot be reordered.
 169 * It also implies a memory barrier.
 170 */
 171static inline int test_and_clear_bit(int nr, volatile void * addr)
 172{
 173        unsigned long *p = ((unsigned long *)addr) + nr / BITS_PER_LONG;
 174        unsigned long mask = 1UL << (nr % BITS_PER_LONG);
 175        unsigned long tmp, old;
 176
 177        if (__builtin_constant_p(nr)) {
 178                asm volatile(
 179                        "1:     ssrf    5\n"
 180                        "       ld.w    %0, %3\n"
 181                        "       mov     %2, %0\n"
 182                        "       cbr     %0, %4\n"
 183                        "       stcond  %1, %0\n"
 184                        "       brne    1b"
 185                        : "=&r"(tmp), "=o"(*p), "=&r"(old)
 186                        : "m"(*p), "i"(nr)
 187                        : "memory", "cc");
 188        } else {
 189                asm volatile(
 190                        "1:     ssrf    5\n"
 191                        "       ld.w    %0, %3\n"
 192                        "       mov     %2, %0\n"
 193                        "       andn    %0, %4\n"
 194                        "       stcond  %1, %0\n"
 195                        "       brne    1b"
 196                        : "=&r"(tmp), "=o"(*p), "=&r"(old)
 197                        : "m"(*p), "r"(mask)
 198                        : "memory", "cc");
 199        }
 200
 201        return (old & mask) != 0;
 202}
 203
 204/*
 205 * test_and_change_bit - Change a bit and return its old value
 206 * @nr: Bit to change
 207 * @addr: Address to count from
 208 *
 209 * This operation is atomic and cannot be reordered.
 210 * It also implies a memory barrier.
 211 */
 212static inline int test_and_change_bit(int nr, volatile void * addr)
 213{
 214        unsigned long *p = ((unsigned long *)addr) + nr / BITS_PER_LONG;
 215        unsigned long mask = 1UL << (nr % BITS_PER_LONG);
 216        unsigned long tmp, old;
 217
 218        asm volatile(
 219                "1:     ssrf    5\n"
 220                "       ld.w    %2, %3\n"
 221                "       eor     %0, %2, %4\n"
 222                "       stcond  %1, %0\n"
 223                "       brne    1b"
 224                : "=&r"(tmp), "=o"(*p), "=&r"(old)
 225                : "m"(*p), "r"(mask)
 226                : "memory", "cc");
 227
 228        return (old & mask) != 0;
 229}
 230
 231#include <asm-generic/bitops/non-atomic.h>
 232
 233/* Find First bit Set */
 234static inline unsigned long __ffs(unsigned long word)
 235{
 236        unsigned long result;
 237
 238        asm("brev %1\n\t"
 239            "clz %0,%1"
 240            : "=r"(result), "=&r"(word)
 241            : "1"(word));
 242        return result;
 243}
 244
 245/* Find First Zero */
 246static inline unsigned long ffz(unsigned long word)
 247{
 248        return __ffs(~word);
 249}
 250
 251/* Find Last bit Set */
 252static inline int fls(unsigned long word)
 253{
 254        unsigned long result;
 255
 256        asm("clz %0,%1" : "=r"(result) : "r"(word));
 257        return 32 - result;
 258}
 259
 260static inline int __fls(unsigned long word)
 261{
 262        return fls(word) - 1;
 263}
 264
 265unsigned long find_first_zero_bit(const unsigned long *addr,
 266                                  unsigned long size);
 267#define find_first_zero_bit find_first_zero_bit
 268
 269unsigned long find_next_zero_bit(const unsigned long *addr,
 270                                 unsigned long size,
 271                                 unsigned long offset);
 272#define find_next_zero_bit find_next_zero_bit
 273
 274unsigned long find_first_bit(const unsigned long *addr,
 275                             unsigned long size);
 276#define find_first_bit find_first_bit
 277
 278unsigned long find_next_bit(const unsigned long *addr,
 279                                 unsigned long size,
 280                                 unsigned long offset);
 281#define find_next_bit find_next_bit
 282
 283/*
 284 * ffs: find first bit set. This is defined the same way as
 285 * the libc and compiler builtin ffs routines, therefore
 286 * differs in spirit from the above ffz (man ffs).
 287 *
 288 * The difference is that bit numbering starts at 1, and if no bit is set,
 289 * the function returns 0.
 290 */
 291static inline int ffs(unsigned long word)
 292{
 293        if(word == 0)
 294                return 0;
 295        return __ffs(word) + 1;
 296}
 297
 298#include <asm-generic/bitops/fls64.h>
 299#include <asm-generic/bitops/sched.h>
 300#include <asm-generic/bitops/hweight.h>
 301#include <asm-generic/bitops/lock.h>
 302
 303extern unsigned long find_next_zero_bit_le(const void *addr,
 304                unsigned long size, unsigned long offset);
 305#define find_next_zero_bit_le find_next_zero_bit_le
 306
 307extern unsigned long find_next_bit_le(const void *addr,
 308                unsigned long size, unsigned long offset);
 309#define find_next_bit_le find_next_bit_le
 310
 311#include <asm-generic/bitops/le.h>
 312#include <asm-generic/bitops/ext2-atomic.h>
 313
 314#endif /* __ASM_AVR32_BITOPS_H */
 315