linux/arch/mn10300/include/asm/bitops.h
<<
>>
Prefs
   1/* MN10300 bit operations
   2 *
   3 * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.
   4 * Written by David Howells (dhowells@redhat.com)
   5 *
   6 * This program is free software; you can redistribute it and/or
   7 * modify it under the terms of the GNU General Public Licence
   8 * as published by the Free Software Foundation; either version
   9 * 2 of the Licence, or (at your option) any later version.
  10 *
  11 * These have to be done with inline assembly: that way the bit-setting
  12 * is guaranteed to be atomic. All bit operations return 0 if the bit
  13 * was cleared before the operation and != 0 if it was not.
  14 *
  15 * bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1).
  16 */
  17#ifndef __ASM_BITOPS_H
  18#define __ASM_BITOPS_H
  19
  20#include <asm/cpu-regs.h>
  21
  22#define smp_mb__before_clear_bit()      barrier()
  23#define smp_mb__after_clear_bit()       barrier()
  24
  25/*
  26 * set bit
  27 */
  28#define __set_bit(nr, addr)                                     \
  29({                                                              \
  30        volatile unsigned char *_a = (unsigned char *)(addr);   \
  31        const unsigned shift = (nr) & 7;                        \
  32        _a += (nr) >> 3;                                        \
  33                                                                \
  34        asm volatile("bset %2,(%1) # set_bit reg"               \
  35                     : "=m"(*_a)                                \
  36                     : "a"(_a), "d"(1 << shift),  "m"(*_a)      \
  37                     : "memory", "cc");                         \
  38})
  39
  40#define set_bit(nr, addr) __set_bit((nr), (addr))
  41
  42/*
  43 * clear bit
  44 */
  45#define ___clear_bit(nr, addr)                                  \
  46({                                                              \
  47        volatile unsigned char *_a = (unsigned char *)(addr);   \
  48        const unsigned shift = (nr) & 7;                        \
  49        _a += (nr) >> 3;                                        \
  50                                                                \
  51        asm volatile("bclr %2,(%1) # clear_bit reg"             \
  52                     : "=m"(*_a)                                \
  53                     : "a"(_a), "d"(1 << shift), "m"(*_a)       \
  54                     : "memory", "cc");                         \
  55})
  56
  57#define clear_bit(nr, addr) ___clear_bit((nr), (addr))
  58
  59
  60static inline void __clear_bit(int nr, volatile void *addr)
  61{
  62        unsigned int *a = (unsigned int *) addr;
  63        int mask;
  64
  65        a += nr >> 5;
  66        mask = 1 << (nr & 0x1f);
  67        *a &= ~mask;
  68}
  69
  70/*
  71 * test bit
  72 */
  73static inline int test_bit(int nr, const volatile void *addr)
  74{
  75        return 1UL & (((const unsigned int *) addr)[nr >> 5] >> (nr & 31));
  76}
  77
  78/*
  79 * change bit
  80 */
  81static inline void __change_bit(int nr, volatile void *addr)
  82{
  83        int     mask;
  84        unsigned int *a = (unsigned int *) addr;
  85
  86        a += nr >> 5;
  87        mask = 1 << (nr & 0x1f);
  88        *a ^= mask;
  89}
  90
  91extern void change_bit(int nr, volatile void *addr);
  92
  93/*
  94 * test and set bit
  95 */
  96#define __test_and_set_bit(nr,addr)                             \
  97({                                                              \
  98        volatile unsigned char *_a = (unsigned char *)(addr);   \
  99        const unsigned shift = (nr) & 7;                        \
 100        unsigned epsw;                                          \
 101        _a += (nr) >> 3;                                        \
 102                                                                \
 103        asm volatile("bset %3,(%2) # test_set_bit reg\n"        \
 104                     "mov epsw,%1"                              \
 105                     : "=m"(*_a), "=d"(epsw)                    \
 106                     : "a"(_a), "d"(1 << shift), "m"(*_a)       \
 107                     : "memory", "cc");                         \
 108                                                                \
 109        !(epsw & EPSW_FLAG_Z);                                  \
 110})
 111
 112#define test_and_set_bit(nr, addr) __test_and_set_bit((nr), (addr))
 113
 114/*
 115 * test and clear bit
 116 */
 117#define __test_and_clear_bit(nr, addr)                          \
 118({                                                              \
 119        volatile unsigned char *_a = (unsigned char *)(addr);   \
 120        const unsigned shift = (nr) & 7;                        \
 121        unsigned epsw;                                          \
 122        _a += (nr) >> 3;                                        \
 123                                                                \
 124        asm volatile("bclr %3,(%2) # test_clear_bit reg\n"      \
 125                     "mov epsw,%1"                              \
 126                     : "=m"(*_a), "=d"(epsw)                    \
 127                     : "a"(_a), "d"(1 << shift), "m"(*_a)       \
 128                     : "memory", "cc");                         \
 129                                                                \
 130        !(epsw & EPSW_FLAG_Z);                                  \
 131})
 132
 133#define test_and_clear_bit(nr, addr) __test_and_clear_bit((nr), (addr))
 134
 135/*
 136 * test and change bit
 137 */
 138static inline int __test_and_change_bit(int nr, volatile void *addr)
 139{
 140        int     mask, retval;
 141        unsigned int *a = (unsigned int *)addr;
 142
 143        a += nr >> 5;
 144        mask = 1 << (nr & 0x1f);
 145        retval = (mask & *a) != 0;
 146        *a ^= mask;
 147
 148        return retval;
 149}
 150
 151extern int test_and_change_bit(int nr, volatile void *addr);
 152
 153#include <asm-generic/bitops/lock.h>
 154
 155#ifdef __KERNEL__
 156
 157/**
 158 * __ffs - find first bit set
 159 * @x: the word to search
 160 *
 161 * - return 31..0 to indicate bit 31..0 most least significant bit set
 162 * - if no bits are set in x, the result is undefined
 163 */
 164static inline __attribute__((const))
 165unsigned long __ffs(unsigned long x)
 166{
 167        int bit;
 168        asm("bsch %2,%0" : "=r"(bit) : "0"(0), "r"(x & -x));
 169        return bit;
 170}
 171
 172/*
 173 * special slimline version of fls() for calculating ilog2_u32()
 174 * - note: no protection against n == 0
 175 */
 176static inline __attribute__((const))
 177int __ilog2_u32(u32 n)
 178{
 179        int bit;
 180        asm("bsch %2,%0" : "=r"(bit) : "0"(0), "r"(n));
 181        return bit;
 182}
 183
 184/**
 185 * fls - find last bit set
 186 * @x: the word to search
 187 *
 188 * This is defined the same way as ffs:
 189 * - return 32..1 to indicate bit 31..0 most significant bit set
 190 * - return 0 to indicate no bits set
 191 */
 192static inline __attribute__((const))
 193int fls(int x)
 194{
 195        return (x != 0) ? __ilog2_u32(x) + 1 : 0;
 196}
 197
 198/**
 199 * __fls - find last (most-significant) set bit in a long word
 200 * @word: the word to search
 201 *
 202 * Undefined if no set bit exists, so code should check against 0 first.
 203 */
 204static inline unsigned long __fls(unsigned long word)
 205{
 206        return __ilog2_u32(word);
 207}
 208
 209/**
 210 * ffs - find first bit set
 211 * @x: the word to search
 212 *
 213 * - return 32..1 to indicate bit 31..0 most least significant bit set
 214 * - return 0 to indicate no bits set
 215 */
 216static inline __attribute__((const))
 217int ffs(int x)
 218{
 219        /* Note: (x & -x) gives us a mask that is the least significant
 220         * (rightmost) 1-bit of the value in x.
 221         */
 222        return fls(x & -x);
 223}
 224
 225#include <asm-generic/bitops/ffz.h>
 226#include <asm-generic/bitops/fls64.h>
 227#include <asm-generic/bitops/find.h>
 228#include <asm-generic/bitops/sched.h>
 229#include <asm-generic/bitops/hweight.h>
 230
 231#define ext2_set_bit_atomic(lock, nr, addr) \
 232        test_and_set_bit((nr) ^ 0x18, (addr))
 233#define ext2_clear_bit_atomic(lock, nr, addr) \
 234        test_and_clear_bit((nr) ^ 0x18, (addr))
 235
 236#include <asm-generic/bitops/ext2-non-atomic.h>
 237#include <asm-generic/bitops/minix-le.h>
 238
 239#endif /* __KERNEL__ */
 240#endif /* __ASM_BITOPS_H */
 241