linux/arch/blackfin/include/asm/bitops.h
<<
>>
Prefs
   1/*
   2 * Copyright 2004-2009 Analog Devices Inc.
   3 *
   4 * Licensed under the GPL-2 or later.
   5 */
   6
   7#ifndef _BLACKFIN_BITOPS_H
   8#define _BLACKFIN_BITOPS_H
   9
  10#include <linux/compiler.h>
  11
  12#include <asm-generic/bitops/__ffs.h>
  13#include <asm-generic/bitops/ffz.h>
  14#include <asm-generic/bitops/fls.h>
  15#include <asm-generic/bitops/__fls.h>
  16#include <asm-generic/bitops/fls64.h>
  17#include <asm-generic/bitops/find.h>
  18
  19#ifndef _LINUX_BITOPS_H
  20#error only <linux/bitops.h> can be included directly
  21#endif
  22
  23#include <asm-generic/bitops/sched.h>
  24#include <asm-generic/bitops/ffs.h>
  25#include <asm-generic/bitops/const_hweight.h>
  26#include <asm-generic/bitops/lock.h>
  27
  28#include <asm-generic/bitops/ext2-atomic.h>
  29
  30#ifndef CONFIG_SMP
  31#include <linux/irqflags.h>
  32
  33/*
  34 * clear_bit may not imply a memory barrier
  35 */
  36#ifndef smp_mb__before_clear_bit
  37#define smp_mb__before_clear_bit()      smp_mb()
  38#define smp_mb__after_clear_bit()       smp_mb()
  39#endif
  40#include <asm-generic/bitops/atomic.h>
  41#include <asm-generic/bitops/non-atomic.h>
  42#else
  43
  44#include <asm/barrier.h>
  45#include <asm/byteorder.h>      /* swab32 */
  46#include <linux/linkage.h>
  47
  48asmlinkage int __raw_bit_set_asm(volatile unsigned long *addr, int nr);
  49
  50asmlinkage int __raw_bit_clear_asm(volatile unsigned long *addr, int nr);
  51
  52asmlinkage int __raw_bit_toggle_asm(volatile unsigned long *addr, int nr);
  53
  54asmlinkage int __raw_bit_test_set_asm(volatile unsigned long *addr, int nr);
  55
  56asmlinkage int __raw_bit_test_clear_asm(volatile unsigned long *addr, int nr);
  57
  58asmlinkage int __raw_bit_test_toggle_asm(volatile unsigned long *addr, int nr);
  59
  60asmlinkage int __raw_bit_test_asm(const volatile unsigned long *addr, int nr);
  61
  62static inline void set_bit(int nr, volatile unsigned long *addr)
  63{
  64        volatile unsigned long *a = addr + (nr >> 5);
  65        __raw_bit_set_asm(a, nr & 0x1f);
  66}
  67
  68static inline void clear_bit(int nr, volatile unsigned long *addr)
  69{
  70        volatile unsigned long *a = addr + (nr >> 5);
  71        __raw_bit_clear_asm(a, nr & 0x1f);
  72}
  73
  74static inline void change_bit(int nr, volatile unsigned long *addr)
  75{
  76        volatile unsigned long *a = addr + (nr >> 5);
  77        __raw_bit_toggle_asm(a, nr & 0x1f);
  78}
  79
  80static inline int test_bit(int nr, const volatile unsigned long *addr)
  81{
  82        volatile const unsigned long *a = addr + (nr >> 5);
  83        return __raw_bit_test_asm(a, nr & 0x1f) != 0;
  84}
  85
  86static inline int test_and_set_bit(int nr, volatile unsigned long *addr)
  87{
  88        volatile unsigned long *a = addr + (nr >> 5);
  89        return __raw_bit_test_set_asm(a, nr & 0x1f);
  90}
  91
  92static inline int test_and_clear_bit(int nr, volatile unsigned long *addr)
  93{
  94        volatile unsigned long *a = addr + (nr >> 5);
  95        return __raw_bit_test_clear_asm(a, nr & 0x1f);
  96}
  97
  98static inline int test_and_change_bit(int nr, volatile unsigned long *addr)
  99{
 100        volatile unsigned long *a = addr + (nr >> 5);
 101        return __raw_bit_test_toggle_asm(a, nr & 0x1f);
 102}
 103
 104/*
 105 * clear_bit() doesn't provide any barrier for the compiler.
 106 */
 107#define smp_mb__before_clear_bit()      barrier()
 108#define smp_mb__after_clear_bit()       barrier()
 109
 110#define test_bit __skip_test_bit
 111#include <asm-generic/bitops/non-atomic.h>
 112#undef test_bit
 113
 114#endif /* CONFIG_SMP */
 115
 116/* Needs to be after test_bit and friends */
 117#include <asm-generic/bitops/le.h>
 118
 119/*
 120 * hweightN: returns the hamming weight (i.e. the number
 121 * of bits set) of a N-bit word
 122 */
 123
 124static inline unsigned int __arch_hweight32(unsigned int w)
 125{
 126        unsigned int res;
 127
 128        __asm__ ("%0.l = ONES %1;"
 129                "%0 = %0.l (Z);"
 130                : "=d" (res) : "d" (w));
 131        return res;
 132}
 133
 134static inline unsigned int __arch_hweight64(__u64 w)
 135{
 136        return __arch_hweight32((unsigned int)(w >> 32)) +
 137               __arch_hweight32((unsigned int)w);
 138}
 139
 140static inline unsigned int __arch_hweight16(unsigned int w)
 141{
 142        return __arch_hweight32(w & 0xffff);
 143}
 144
 145static inline unsigned int __arch_hweight8(unsigned int w)
 146{
 147        return __arch_hweight32(w & 0xff);
 148}
 149
 150#endif                          /* _BLACKFIN_BITOPS_H */
 151