1#ifndef _M68KNOMMU_SYSTEM_H
2#define _M68KNOMMU_SYSTEM_H
3
4#include <linux/linkage.h>
5#include <asm/segment.h>
6#include <asm/entry.h>
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34asmlinkage void resume(void);
35#define switch_to(prev,next,last) \
36{ \
37 void *_last; \
38 __asm__ __volatile__( \
39 "movel %1, %%a0\n\t" \
40 "movel %2, %%a1\n\t" \
41 "jbsr resume\n\t" \
42 "movel %%d1, %0\n\t" \
43 : "=d" (_last) \
44 : "d" (prev), "d" (next) \
45 : "cc", "d0", "d1", "d2", "d3", "d4", "d5", "a0", "a1"); \
46 (last) = _last; \
47}
48
49#ifdef CONFIG_COLDFIRE
50#define local_irq_enable() __asm__ __volatile__ ( \
51 "move %/sr,%%d0\n\t" \
52 "andi.l #0xf8ff,%%d0\n\t" \
53 "move %%d0,%/sr\n" \
54 : \
55 : \
56 : "cc", "%d0", "memory")
57#define local_irq_disable() __asm__ __volatile__ ( \
58 "move %/sr,%%d0\n\t" \
59 "ori.l #0x0700,%%d0\n\t" \
60 "move %%d0,%/sr\n" \
61 : \
62 : \
63 : "cc", "%d0", "memory")
64
65#define local_irq_save(x) __asm__ __volatile__ ( \
66 "movew %%sr,%0\n\t" \
67 "movew #0x0700,%%d0\n\t" \
68 "or.l %0,%%d0\n\t" \
69 "movew %%d0,%/sr" \
70 : "=d" (x) \
71 : \
72 : "cc", "%d0", "memory")
73#else
74
75
76#define ALLOWINT 0xf8ff
77
78#define local_irq_enable() asm volatile ("andiw %0,%%sr": : "i" (ALLOWINT) : "memory")
79#define local_irq_disable() asm volatile ("oriw #0x0700,%%sr": : : "memory")
80#endif
81
82#define local_save_flags(x) asm volatile ("movew %%sr,%0":"=d" (x) : : "memory")
83#define local_irq_restore(x) asm volatile ("movew %0,%%sr": :"d" (x) : "memory")
84
85
86#ifndef local_irq_save
87#define local_irq_save(x) do { local_save_flags(x); local_irq_disable(); } while (0)
88#endif
89
90#define irqs_disabled() \
91({ \
92 unsigned long flags; \
93 local_save_flags(flags); \
94 ((flags & 0x0700) == 0x0700); \
95})
96
97#define iret() __asm__ __volatile__ ("rte": : :"memory", "sp", "cc")
98
99
100
101
102
103#define nop() asm volatile ("nop"::)
104#define mb() asm volatile ("" : : :"memory")
105#define rmb() asm volatile ("" : : :"memory")
106#define wmb() asm volatile ("" : : :"memory")
107#define set_mb(var, value) do { xchg(&var, value); } while (0)
108
109#ifdef CONFIG_SMP
110#define smp_mb() mb()
111#define smp_rmb() rmb()
112#define smp_wmb() wmb()
113#define smp_read_barrier_depends() read_barrier_depends()
114#else
115#define smp_mb() barrier()
116#define smp_rmb() barrier()
117#define smp_wmb() barrier()
118#define smp_read_barrier_depends() do { } while(0)
119#endif
120
121#define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
122
123struct __xchg_dummy { unsigned long a[100]; };
124#define __xg(x) ((volatile struct __xchg_dummy *)(x))
125
126#ifndef CONFIG_RMW_INSNS
127static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
128{
129 unsigned long tmp, flags;
130
131 local_irq_save(flags);
132
133 switch (size) {
134 case 1:
135 __asm__ __volatile__
136 ("moveb %2,%0\n\t"
137 "moveb %1,%2"
138 : "=&d" (tmp) : "d" (x), "m" (*__xg(ptr)) : "memory");
139 break;
140 case 2:
141 __asm__ __volatile__
142 ("movew %2,%0\n\t"
143 "movew %1,%2"
144 : "=&d" (tmp) : "d" (x), "m" (*__xg(ptr)) : "memory");
145 break;
146 case 4:
147 __asm__ __volatile__
148 ("movel %2,%0\n\t"
149 "movel %1,%2"
150 : "=&d" (tmp) : "d" (x), "m" (*__xg(ptr)) : "memory");
151 break;
152 }
153 local_irq_restore(flags);
154 return tmp;
155}
156#else
157static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
158{
159 switch (size) {
160 case 1:
161 __asm__ __volatile__
162 ("moveb %2,%0\n\t"
163 "1:\n\t"
164 "casb %0,%1,%2\n\t"
165 "jne 1b"
166 : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
167 break;
168 case 2:
169 __asm__ __volatile__
170 ("movew %2,%0\n\t"
171 "1:\n\t"
172 "casw %0,%1,%2\n\t"
173 "jne 1b"
174 : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
175 break;
176 case 4:
177 __asm__ __volatile__
178 ("movel %2,%0\n\t"
179 "1:\n\t"
180 "casl %0,%1,%2\n\t"
181 "jne 1b"
182 : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
183 break;
184 }
185 return x;
186}
187#endif
188
189
190
191
192
193
194#define __HAVE_ARCH_CMPXCHG 1
195
196static __inline__ unsigned long
197cmpxchg(volatile int *p, int old, int new)
198{
199 unsigned long flags;
200 int prev;
201
202 local_irq_save(flags);
203 if ((prev = *p) == old)
204 *p = new;
205 local_irq_restore(flags);
206 return(prev);
207}
208
209
210#ifdef CONFIG_M68332
211#define HARD_RESET_NOW() ({ \
212 local_irq_disable(); \
213 asm(" \
214 movew #0x0000, 0xfffa6a; \
215 reset; \
216 \
217 \
218 moveal #0, %a0; \
219 movec %a0, %vbr; \
220 moveal 0, %sp; \
221 moveal 4, %a0; \
222 jmp (%a0); \
223 "); \
224})
225#endif
226
227#if defined( CONFIG_M68328 ) || defined( CONFIG_M68EZ328 ) || \
228 defined (CONFIG_M68360) || defined( CONFIG_M68VZ328 )
229#define HARD_RESET_NOW() ({ \
230 local_irq_disable(); \
231 asm(" \
232 moveal #0x10c00000, %a0; \
233 moveb #0, 0xFFFFF300; \
234 moveal 0(%a0), %sp; \
235 moveal 4(%a0), %a0; \
236 jmp (%a0); \
237 "); \
238})
239#endif
240
241#ifdef CONFIG_COLDFIRE
242#if defined(CONFIG_M5272) && defined(CONFIG_NETtel)
243
244
245
246
247
248#define HARD_RESET_NOW() ({ \
249 asm(" \
250 movew #0x2700, %sr; \
251 jmp 0xf0000400; \
252 "); \
253})
254#elif defined(CONFIG_NETtel) || defined(CONFIG_eLIA) || \
255 defined(CONFIG_SECUREEDGEMP3) || defined(CONFIG_CLEOPATRA)
256#define HARD_RESET_NOW() ({ \
257 asm(" \
258 movew #0x2700, %sr; \
259 moveal #0x10000044, %a0; \
260 movel #0xffffffff, (%a0); \
261 moveal #0x10000001, %a0; \
262 moveb #0x00, (%a0); \
263 moveal #0xf0000004, %a0; \
264 moveal (%a0), %a0; \
265 jmp (%a0); \
266 "); \
267})
268#elif defined(CONFIG_M5272)
269
270
271
272
273
274
275#define HARD_RESET_NOW() ({ \
276 asm(" \
277 movew #0x2700, %%sr; \
278 move.l %0+0x40,%%d0; \
279 and.l %0+0x44,%%d0; \
280 andi.l #0xfffff000,%%d0; \
281 mov.l %%d0,%%a0; \
282 or.l 4(%%a0),%%d0; \
283 mov.l %%d0,%%a0; \
284 jmp (%%a0);" \
285 : \
286 : "o" (*(char *)MCF_MBAR) ); \
287})
288#elif defined(CONFIG_M528x)
289
290
291
292
293#define HARD_RESET_NOW() \
294({ \
295 unsigned char volatile *reset; \
296 asm("move.w #0x2700, %sr"); \
297 reset = ((volatile unsigned char *)(MCF_IPSBAR + 0x110000)); \
298 while(1) \
299 *reset |= (0x01 << 7);\
300})
301#elif defined(CONFIG_M523x)
302#define HARD_RESET_NOW() ({ \
303 asm(" \
304 movew #0x2700, %sr; \
305 movel #0x01000000, %sp; \
306 moveal #0x40110000, %a0; \
307 moveb #0x80, (%a0); \
308 "); \
309})
310#elif defined(CONFIG_M520x)
311
312
313
314
315#define HARD_RESET_NOW() \
316({ \
317 unsigned char volatile *reset; \
318 asm("move.w #0x2700, %sr"); \
319 reset = ((volatile unsigned char *)(MCF_IPSBAR + 0xA0000)); \
320 while(1) \
321 *reset |= 0x80; \
322})
323#else
324#define HARD_RESET_NOW() ({ \
325 asm(" \
326 movew #0x2700, %sr; \
327 moveal #0x4, %a0; \
328 moveal (%a0), %a0; \
329 jmp (%a0); \
330 "); \
331})
332#endif
333#endif
334#define arch_align_stack(x) (x)
335
336#endif
337