1
2
3
4
5
6
7
8#ifndef _BLACKFIN_SYSTEM_H
9#define _BLACKFIN_SYSTEM_H
10
11#include <linux/linkage.h>
12#include <linux/irqflags.h>
13#include <mach/anomaly.h>
14#include <asm/cache.h>
15#include <asm/pda.h>
16#include <asm/irq.h>
17
18
19
20
21#define nop() __asm__ __volatile__ ("nop;\n\t" : : )
22#define mb() __asm__ __volatile__ ("" : : : "memory")
23#define rmb() __asm__ __volatile__ ("" : : : "memory")
24#define wmb() __asm__ __volatile__ ("" : : : "memory")
25#define set_mb(var, value) do { (void) xchg(&var, value); } while (0)
26#define read_barrier_depends() do { } while(0)
27
28#ifdef CONFIG_SMP
29asmlinkage unsigned long __raw_xchg_1_asm(volatile void *ptr, unsigned long value);
30asmlinkage unsigned long __raw_xchg_2_asm(volatile void *ptr, unsigned long value);
31asmlinkage unsigned long __raw_xchg_4_asm(volatile void *ptr, unsigned long value);
32asmlinkage unsigned long __raw_cmpxchg_1_asm(volatile void *ptr,
33 unsigned long new, unsigned long old);
34asmlinkage unsigned long __raw_cmpxchg_2_asm(volatile void *ptr,
35 unsigned long new, unsigned long old);
36asmlinkage unsigned long __raw_cmpxchg_4_asm(volatile void *ptr,
37 unsigned long new, unsigned long old);
38
39#ifdef __ARCH_SYNC_CORE_DCACHE
40# define smp_mb() do { barrier(); smp_check_barrier(); smp_mark_barrier(); } while (0)
41# define smp_rmb() do { barrier(); smp_check_barrier(); } while (0)
42# define smp_wmb() do { barrier(); smp_mark_barrier(); } while (0)
43#define smp_read_barrier_depends() do { barrier(); smp_check_barrier(); } while (0)
44
45#else
46# define smp_mb() barrier()
47# define smp_rmb() barrier()
48# define smp_wmb() barrier()
49#define smp_read_barrier_depends() barrier()
50#endif
51
52static inline unsigned long __xchg(unsigned long x, volatile void *ptr,
53 int size)
54{
55 unsigned long tmp;
56
57 switch (size) {
58 case 1:
59 tmp = __raw_xchg_1_asm(ptr, x);
60 break;
61 case 2:
62 tmp = __raw_xchg_2_asm(ptr, x);
63 break;
64 case 4:
65 tmp = __raw_xchg_4_asm(ptr, x);
66 break;
67 }
68
69 return tmp;
70}
71
72
73
74
75
76
77static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
78 unsigned long new, int size)
79{
80 unsigned long tmp;
81
82 switch (size) {
83 case 1:
84 tmp = __raw_cmpxchg_1_asm(ptr, new, old);
85 break;
86 case 2:
87 tmp = __raw_cmpxchg_2_asm(ptr, new, old);
88 break;
89 case 4:
90 tmp = __raw_cmpxchg_4_asm(ptr, new, old);
91 break;
92 }
93
94 return tmp;
95}
96#define cmpxchg(ptr, o, n) \
97 ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o), \
98 (unsigned long)(n), sizeof(*(ptr))))
99
100#else
101
102#define smp_mb() barrier()
103#define smp_rmb() barrier()
104#define smp_wmb() barrier()
105#define smp_read_barrier_depends() do { } while(0)
106
107struct __xchg_dummy {
108 unsigned long a[100];
109};
110#define __xg(x) ((volatile struct __xchg_dummy *)(x))
111
112#include <mach/blackfin.h>
113
114static inline unsigned long __xchg(unsigned long x, volatile void *ptr,
115 int size)
116{
117 unsigned long tmp = 0;
118 unsigned long flags;
119
120 local_irq_save_hw(flags);
121
122 switch (size) {
123 case 1:
124 __asm__ __volatile__
125 ("%0 = b%2 (z);\n\t"
126 "b%2 = %1;\n\t"
127 : "=&d" (tmp) : "d" (x), "m" (*__xg(ptr)) : "memory");
128 break;
129 case 2:
130 __asm__ __volatile__
131 ("%0 = w%2 (z);\n\t"
132 "w%2 = %1;\n\t"
133 : "=&d" (tmp) : "d" (x), "m" (*__xg(ptr)) : "memory");
134 break;
135 case 4:
136 __asm__ __volatile__
137 ("%0 = %2;\n\t"
138 "%2 = %1;\n\t"
139 : "=&d" (tmp) : "d" (x), "m" (*__xg(ptr)) : "memory");
140 break;
141 }
142 local_irq_restore_hw(flags);
143 return tmp;
144}
145
146#include <asm-generic/cmpxchg-local.h>
147
148
149
150
151
152#define cmpxchg_local(ptr, o, n) \
153 ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
154 (unsigned long)(n), sizeof(*(ptr))))
155#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
156
157#include <asm-generic/cmpxchg.h>
158
159#endif
160
161#define xchg(ptr, x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x), (ptr), sizeof(*(ptr))))
162#define tas(ptr) ((void)xchg((ptr), 1))
163
164#define prepare_to_switch() do { } while(0)
165
166
167
168
169
170
171#include <asm/l1layout.h>
172#include <asm/mem_map.h>
173
174asmlinkage struct task_struct *resume(struct task_struct *prev, struct task_struct *next);
175
176#ifndef CONFIG_SMP
177#define switch_to(prev,next,last) \
178do { \
179 memcpy (&task_thread_info(prev)->l1_task_info, L1_SCRATCH_TASK_INFO, \
180 sizeof *L1_SCRATCH_TASK_INFO); \
181 memcpy (L1_SCRATCH_TASK_INFO, &task_thread_info(next)->l1_task_info, \
182 sizeof *L1_SCRATCH_TASK_INFO); \
183 (last) = resume (prev, next); \
184} while (0)
185#else
186#define switch_to(prev, next, last) \
187do { \
188 (last) = resume(prev, next); \
189} while (0)
190#endif
191
192#endif
193