linux/arch/metag/include/asm/bitops.h
<<
>>
Prefs
   1#ifndef __ASM_METAG_BITOPS_H
   2#define __ASM_METAG_BITOPS_H
   3
   4#include <linux/compiler.h>
   5#include <asm/barrier.h>
   6#include <asm/global_lock.h>
   7
   8/*
   9 * clear_bit() doesn't provide any barrier for the compiler.
  10 */
  11#define smp_mb__before_clear_bit()      barrier()
  12#define smp_mb__after_clear_bit()       barrier()
  13
  14#ifdef CONFIG_SMP
  15/*
  16 * These functions are the basis of our bit ops.
  17 */
  18static inline void set_bit(unsigned int bit, volatile unsigned long *p)
  19{
  20        unsigned long flags;
  21        unsigned long mask = 1UL << (bit & 31);
  22
  23        p += bit >> 5;
  24
  25        __global_lock1(flags);
  26        fence();
  27        *p |= mask;
  28        __global_unlock1(flags);
  29}
  30
  31static inline void clear_bit(unsigned int bit, volatile unsigned long *p)
  32{
  33        unsigned long flags;
  34        unsigned long mask = 1UL << (bit & 31);
  35
  36        p += bit >> 5;
  37
  38        __global_lock1(flags);
  39        fence();
  40        *p &= ~mask;
  41        __global_unlock1(flags);
  42}
  43
  44static inline void change_bit(unsigned int bit, volatile unsigned long *p)
  45{
  46        unsigned long flags;
  47        unsigned long mask = 1UL << (bit & 31);
  48
  49        p += bit >> 5;
  50
  51        __global_lock1(flags);
  52        fence();
  53        *p ^= mask;
  54        __global_unlock1(flags);
  55}
  56
  57static inline int test_and_set_bit(unsigned int bit, volatile unsigned long *p)
  58{
  59        unsigned long flags;
  60        unsigned long old;
  61        unsigned long mask = 1UL << (bit & 31);
  62
  63        p += bit >> 5;
  64
  65        __global_lock1(flags);
  66        old = *p;
  67        if (!(old & mask)) {
  68                fence();
  69                *p = old | mask;
  70        }
  71        __global_unlock1(flags);
  72
  73        return (old & mask) != 0;
  74}
  75
  76static inline int test_and_clear_bit(unsigned int bit,
  77                                     volatile unsigned long *p)
  78{
  79        unsigned long flags;
  80        unsigned long old;
  81        unsigned long mask = 1UL << (bit & 31);
  82
  83        p += bit >> 5;
  84
  85        __global_lock1(flags);
  86        old = *p;
  87        if (old & mask) {
  88                fence();
  89                *p = old & ~mask;
  90        }
  91        __global_unlock1(flags);
  92
  93        return (old & mask) != 0;
  94}
  95
  96static inline int test_and_change_bit(unsigned int bit,
  97                                      volatile unsigned long *p)
  98{
  99        unsigned long flags;
 100        unsigned long old;
 101        unsigned long mask = 1UL << (bit & 31);
 102
 103        p += bit >> 5;
 104
 105        __global_lock1(flags);
 106        fence();
 107        old = *p;
 108        *p = old ^ mask;
 109        __global_unlock1(flags);
 110
 111        return (old & mask) != 0;
 112}
 113
 114#else
 115#include <asm-generic/bitops/atomic.h>
 116#endif /* CONFIG_SMP */
 117
 118#include <asm-generic/bitops/non-atomic.h>
 119#include <asm-generic/bitops/find.h>
 120#include <asm-generic/bitops/ffs.h>
 121#include <asm-generic/bitops/__ffs.h>
 122#include <asm-generic/bitops/ffz.h>
 123#include <asm-generic/bitops/fls.h>
 124#include <asm-generic/bitops/__fls.h>
 125#include <asm-generic/bitops/fls64.h>
 126#include <asm-generic/bitops/hweight.h>
 127#include <asm-generic/bitops/lock.h>
 128#include <asm-generic/bitops/sched.h>
 129#include <asm-generic/bitops/le.h>
 130#include <asm-generic/bitops/ext2-atomic.h>
 131
 132#endif /* __ASM_METAG_BITOPS_H */
 133