1#ifndef __ARCH_H8300_CMPXCHG__
2#define __ARCH_H8300_CMPXCHG__
3
4#include <linux/irqflags.h>
5
6#define xchg(ptr, x) \
7 ((__typeof__(*(ptr)))__xchg((unsigned long)(x), (ptr), \
8 sizeof(*(ptr))))
9
10struct __xchg_dummy { unsigned long a[100]; };
11#define __xg(x) ((volatile struct __xchg_dummy *)(x))
12
13static inline unsigned long __xchg(unsigned long x,
14 volatile void *ptr, int size)
15{
16 unsigned long tmp, flags;
17
18 local_irq_save(flags);
19
20 switch (size) {
21 case 1:
22 __asm__ __volatile__
23 ("mov.b %2,%0\n\t"
24 "mov.b %1,%2"
25 : "=&r" (tmp) : "r" (x), "m" (*__xg(ptr)));
26 break;
27 case 2:
28 __asm__ __volatile__
29 ("mov.w %2,%0\n\t"
30 "mov.w %1,%2"
31 : "=&r" (tmp) : "r" (x), "m" (*__xg(ptr)));
32 break;
33 case 4:
34 __asm__ __volatile__
35 ("mov.l %2,%0\n\t"
36 "mov.l %1,%2"
37 : "=&r" (tmp) : "r" (x), "m" (*__xg(ptr)));
38 break;
39 default:
40 tmp = 0;
41 }
42 local_irq_restore(flags);
43 return tmp;
44}
45
46#include <asm-generic/cmpxchg-local.h>
47
48
49
50
51
52#define cmpxchg_local(ptr, o, n) \
53 ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), \
54 (unsigned long)(o), \
55 (unsigned long)(n), \
56 sizeof(*(ptr))))
57#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
58
59#ifndef CONFIG_SMP
60#include <asm-generic/cmpxchg.h>
61#endif
62
63#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
64
65#endif
66