1
2
3
4
5
6
7#ifndef __ARCH_BLACKFIN_ATOMIC__
8#define __ARCH_BLACKFIN_ATOMIC__
9
10#include <asm/cmpxchg.h>
11
12#ifdef CONFIG_SMP
13
14#include <asm/barrier.h>
15#include <linux/linkage.h>
16#include <linux/types.h>
17
18asmlinkage int __raw_uncached_fetch_asm(const volatile int *ptr);
19asmlinkage int __raw_atomic_add_asm(volatile int *ptr, int value);
20asmlinkage int __raw_atomic_xadd_asm(volatile int *ptr, int value);
21
22asmlinkage int __raw_atomic_and_asm(volatile int *ptr, int value);
23asmlinkage int __raw_atomic_or_asm(volatile int *ptr, int value);
24asmlinkage int __raw_atomic_xor_asm(volatile int *ptr, int value);
25asmlinkage int __raw_atomic_test_asm(const volatile int *ptr, int value);
26
27#define atomic_read(v) __raw_uncached_fetch_asm(&(v)->counter)
28
29#define atomic_add_return(i, v) __raw_atomic_add_asm(&(v)->counter, i)
30#define atomic_sub_return(i, v) __raw_atomic_add_asm(&(v)->counter, -(i))
31
32#define atomic_fetch_add(i, v) __raw_atomic_xadd_asm(&(v)->counter, i)
33#define atomic_fetch_sub(i, v) __raw_atomic_xadd_asm(&(v)->counter, -(i))
34
35#define atomic_or(i, v) (void)__raw_atomic_or_asm(&(v)->counter, i)
36#define atomic_and(i, v) (void)__raw_atomic_and_asm(&(v)->counter, i)
37#define atomic_xor(i, v) (void)__raw_atomic_xor_asm(&(v)->counter, i)
38
39#define atomic_fetch_or(i, v) __raw_atomic_or_asm(&(v)->counter, i)
40#define atomic_fetch_and(i, v) __raw_atomic_and_asm(&(v)->counter, i)
41#define atomic_fetch_xor(i, v) __raw_atomic_xor_asm(&(v)->counter, i)
42
43#endif
44
45#include <asm-generic/atomic.h>
46
47#endif
48