1
2#ifndef __ASM_SH_CMPXCHG_LLSC_H
3#define __ASM_SH_CMPXCHG_LLSC_H
4
5static inline unsigned long xchg_u32(volatile u32 *m, unsigned long val)
6{
7 unsigned long retval;
8 unsigned long tmp;
9
10 __asm__ __volatile__ (
11 "1: \n\t"
12 "movli.l @%2, %0 ! xchg_u32 \n\t"
13 "mov %0, %1 \n\t"
14 "mov %3, %0 \n\t"
15 "movco.l %0, @%2 \n\t"
16 "bf 1b \n\t"
17 "synco \n\t"
18 : "=&z"(tmp), "=&r" (retval)
19 : "r" (m), "r" (val)
20 : "t", "memory"
21 );
22
23 return retval;
24}
25
26static inline unsigned long
27__cmpxchg_u32(volatile u32 *m, unsigned long old, unsigned long new)
28{
29 unsigned long retval;
30 unsigned long tmp;
31
32 __asm__ __volatile__ (
33 "1: \n\t"
34 "movli.l @%2, %0 ! __cmpxchg_u32 \n\t"
35 "mov %0, %1 \n\t"
36 "cmp/eq %1, %3 \n\t"
37 "bf 2f \n\t"
38 "mov %4, %0 \n\t"
39 "2: \n\t"
40 "movco.l %0, @%2 \n\t"
41 "bf 1b \n\t"
42 "synco \n\t"
43 : "=&z" (tmp), "=&r" (retval)
44 : "r" (m), "r" (old), "r" (new)
45 : "t", "memory"
46 );
47
48 return retval;
49}
50
51#include <asm/cmpxchg-xchg.h>
52
53#endif
54