linux/arch/metag/include/asm/bitops.h
<<
>>
Prefs
   1#ifndef __ASM_METAG_BITOPS_H
   2#define __ASM_METAG_BITOPS_H
   3
   4#include <linux/compiler.h>
   5#include <asm/barrier.h>
   6#include <asm/global_lock.h>
   7
   8#ifdef CONFIG_SMP
   9/*
  10 * These functions are the basis of our bit ops.
  11 */
  12static inline void set_bit(unsigned int bit, volatile unsigned long *p)
  13{
  14        unsigned long flags;
  15        unsigned long mask = 1UL << (bit & 31);
  16
  17        p += bit >> 5;
  18
  19        __global_lock1(flags);
  20        fence();
  21        *p |= mask;
  22        __global_unlock1(flags);
  23}
  24
  25static inline void clear_bit(unsigned int bit, volatile unsigned long *p)
  26{
  27        unsigned long flags;
  28        unsigned long mask = 1UL << (bit & 31);
  29
  30        p += bit >> 5;
  31
  32        __global_lock1(flags);
  33        fence();
  34        *p &= ~mask;
  35        __global_unlock1(flags);
  36}
  37
  38static inline void change_bit(unsigned int bit, volatile unsigned long *p)
  39{
  40        unsigned long flags;
  41        unsigned long mask = 1UL << (bit & 31);
  42
  43        p += bit >> 5;
  44
  45        __global_lock1(flags);
  46        fence();
  47        *p ^= mask;
  48        __global_unlock1(flags);
  49}
  50
  51static inline int test_and_set_bit(unsigned int bit, volatile unsigned long *p)
  52{
  53        unsigned long flags;
  54        unsigned long old;
  55        unsigned long mask = 1UL << (bit & 31);
  56
  57        p += bit >> 5;
  58
  59        __global_lock1(flags);
  60        old = *p;
  61        if (!(old & mask)) {
  62                fence();
  63                *p = old | mask;
  64        }
  65        __global_unlock1(flags);
  66
  67        return (old & mask) != 0;
  68}
  69
  70static inline int test_and_clear_bit(unsigned int bit,
  71                                     volatile unsigned long *p)
  72{
  73        unsigned long flags;
  74        unsigned long old;
  75        unsigned long mask = 1UL << (bit & 31);
  76
  77        p += bit >> 5;
  78
  79        __global_lock1(flags);
  80        old = *p;
  81        if (old & mask) {
  82                fence();
  83                *p = old & ~mask;
  84        }
  85        __global_unlock1(flags);
  86
  87        return (old & mask) != 0;
  88}
  89
  90static inline int test_and_change_bit(unsigned int bit,
  91                                      volatile unsigned long *p)
  92{
  93        unsigned long flags;
  94        unsigned long old;
  95        unsigned long mask = 1UL << (bit & 31);
  96
  97        p += bit >> 5;
  98
  99        __global_lock1(flags);
 100        fence();
 101        old = *p;
 102        *p = old ^ mask;
 103        __global_unlock1(flags);
 104
 105        return (old & mask) != 0;
 106}
 107
 108#else
 109#include <asm-generic/bitops/atomic.h>
 110#endif /* CONFIG_SMP */
 111
 112#include <asm-generic/bitops/non-atomic.h>
 113#include <asm-generic/bitops/find.h>
 114#include <asm-generic/bitops/ffs.h>
 115#include <asm-generic/bitops/__ffs.h>
 116#include <asm-generic/bitops/ffz.h>
 117#include <asm-generic/bitops/fls.h>
 118#include <asm-generic/bitops/__fls.h>
 119#include <asm-generic/bitops/fls64.h>
 120#include <asm-generic/bitops/hweight.h>
 121#include <asm-generic/bitops/lock.h>
 122#include <asm-generic/bitops/sched.h>
 123#include <asm-generic/bitops/le.h>
 124#include <asm-generic/bitops/ext2-atomic.h>
 125
 126#endif /* __ASM_METAG_BITOPS_H */
 127