1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21#ifndef _ASM_IA64_PARAVIRT_PRIVOP_H
22#define _ASM_IA64_PARAVIRT_PRIVOP_H
23
24#ifdef CONFIG_PARAVIRT
25
26#ifndef __ASSEMBLY__
27
28#include <linux/types.h>
29#include <asm/kregs.h>
30
31
32
33
34
35struct pv_cpu_ops {
36 void (*fc)(void *addr);
37 unsigned long (*thash)(unsigned long addr);
38 unsigned long (*get_cpuid)(int index);
39 unsigned long (*get_pmd)(int index);
40 unsigned long (*getreg)(int reg);
41 void (*setreg)(int reg, unsigned long val);
42 void (*ptcga)(unsigned long addr, unsigned long size);
43 unsigned long (*get_rr)(unsigned long index);
44 void (*set_rr)(unsigned long index, unsigned long val);
45 void (*set_rr0_to_rr4)(unsigned long val0, unsigned long val1,
46 unsigned long val2, unsigned long val3,
47 unsigned long val4);
48 void (*ssm_i)(void);
49 void (*rsm_i)(void);
50 unsigned long (*get_psr_i)(void);
51 void (*intrin_local_irq_restore)(unsigned long flags);
52};
53
54extern struct pv_cpu_ops pv_cpu_ops;
55
56extern void ia64_native_setreg_func(int regnum, unsigned long val);
57extern unsigned long ia64_native_getreg_func(int regnum);
58
59
60
61
62
63#ifndef ASM_SUPPORTED
64#define paravirt_ssm_i() pv_cpu_ops.ssm_i()
65#define paravirt_rsm_i() pv_cpu_ops.rsm_i()
66#define __paravirt_getreg() pv_cpu_ops.getreg()
67#endif
68
69
70
71#define paravirt_ssm(mask) \
72 do { \
73 if ((mask) == IA64_PSR_I) \
74 paravirt_ssm_i(); \
75 else \
76 ia64_native_ssm(mask); \
77 } while (0)
78
79#define paravirt_rsm(mask) \
80 do { \
81 if ((mask) == IA64_PSR_I) \
82 paravirt_rsm_i(); \
83 else \
84 ia64_native_rsm(mask); \
85 } while (0)
86
87
88
89#define paravirt_getreg(reg) \
90 ({ \
91 unsigned long res; \
92 if ((reg) == _IA64_REG_IP) \
93 res = ia64_native_getreg(_IA64_REG_IP); \
94 else \
95 res = __paravirt_getreg(reg); \
96 res; \
97 })
98
99
100
101
102struct pv_cpu_asm_switch {
103 unsigned long switch_to;
104 unsigned long leave_syscall;
105 unsigned long work_processed_syscall;
106 unsigned long leave_kernel;
107};
108void paravirt_cpu_asm_init(const struct pv_cpu_asm_switch *cpu_asm_switch);
109
110#endif
111
112#define IA64_PARAVIRT_ASM_FUNC(name) paravirt_ ## name
113
114#else
115
116
117#define IA64_PARAVIRT_ASM_FUNC(name) ia64_native_ ## name
118
119#endif
120
121#if defined(CONFIG_PARAVIRT) && defined(ASM_SUPPORTED)
122#define paravirt_dv_serialize_data() ia64_dv_serialize_data()
123#else
124#define paravirt_dv_serialize_data()
125#endif
126
127
128
129
130#define ia64_switch_to IA64_PARAVIRT_ASM_FUNC(switch_to)
131#define ia64_leave_syscall IA64_PARAVIRT_ASM_FUNC(leave_syscall)
132#define ia64_work_processed_syscall \
133 IA64_PARAVIRT_ASM_FUNC(work_processed_syscall)
134#define ia64_leave_kernel IA64_PARAVIRT_ASM_FUNC(leave_kernel)
135
136
137#if defined(CONFIG_PARAVIRT)
138
139
140
141#define PARAVIRT_PATCH_TYPE_FC 1
142#define PARAVIRT_PATCH_TYPE_THASH 2
143#define PARAVIRT_PATCH_TYPE_GET_CPUID 3
144#define PARAVIRT_PATCH_TYPE_GET_PMD 4
145#define PARAVIRT_PATCH_TYPE_PTCGA 5
146#define PARAVIRT_PATCH_TYPE_GET_RR 6
147#define PARAVIRT_PATCH_TYPE_SET_RR 7
148#define PARAVIRT_PATCH_TYPE_SET_RR0_TO_RR4 8
149#define PARAVIRT_PATCH_TYPE_SSM_I 9
150#define PARAVIRT_PATCH_TYPE_RSM_I 10
151#define PARAVIRT_PATCH_TYPE_GET_PSR_I 11
152#define PARAVIRT_PATCH_TYPE_INTRIN_LOCAL_IRQ_RESTORE 12
153
154
155#define PARAVIRT_PATCH_TYPE_GETREG 0x10000000
156#define PARAVIRT_PATCH_TYPE_SETREG 0x20000000
157
158
159
160
161
162
163
164
165#define PARAVIRT_PATCH_TYPE_BR_START 0x30000000
166#define PARAVIRT_PATCH_TYPE_BR_SWITCH_TO \
167 (PARAVIRT_PATCH_TYPE_BR_START + 0)
168#define PARAVIRT_PATCH_TYPE_BR_LEAVE_SYSCALL \
169 (PARAVIRT_PATCH_TYPE_BR_START + 1)
170#define PARAVIRT_PATCH_TYPE_BR_WORK_PROCESSED_SYSCALL \
171 (PARAVIRT_PATCH_TYPE_BR_START + 2)
172#define PARAVIRT_PATCH_TYPE_BR_LEAVE_KERNEL \
173 (PARAVIRT_PATCH_TYPE_BR_START + 3)
174
175#ifdef ASM_SUPPORTED
176#include <asm/paravirt_patch.h>
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207#define __PARAVIRT_BR \
208 ";;\n" \
209 "{ .mlx\n" \
210 "nop 0\n" \
211 "movl r2 = %[op_addr]\n" \
212 ";;\n" \
213 "}\n" \
214 "1:\n" \
215 "{ .mii\n" \
216 "ld8 r2 = [r2]\n" \
217 "mov r17 = ip\n" \
218 "mov r16 = gp\n" \
219 ";;\n" \
220 "}\n" \
221 "{ .mii\n" \
222 "ld8 r3 = [r2], 8\n" \
223 "adds r17 = 1f - 1b, r17\n" \
224 ";;\n" \
225 "mov b7 = r3\n" \
226 "}\n" \
227 "{ .mib\n" \
228 "ld8 gp = [r2]\n" \
229 "mov b6 = r17\n" \
230 "br.cond.sptk.few b7\n" \
231 "}\n" \
232 "1:\n" \
233 "{ .mii\n" \
234 "mov gp = r16\n" \
235 "nop 0\n" \
236 "nop 0\n" \
237 ";;\n" \
238 "}\n"
239
240#define PARAVIRT_OP(op) \
241 [op_addr] "i"(&pv_cpu_ops.op)
242
243#define PARAVIRT_TYPE(type) \
244 PARAVIRT_PATCH_TYPE_ ## type
245
246#define PARAVIRT_REG_CLOBBERS0 \
247 "r2", "r3", "r9", "r10", "r11", "r14", \
248 "r15", "r16", "r17"
249
250#define PARAVIRT_REG_CLOBBERS1 \
251 "r2","r3", "r9", "r10", "r11", "r14", \
252 "r15", "r16", "r17"
253
254#define PARAVIRT_REG_CLOBBERS2 \
255 "r2", "r3", "r10", "r11", "r14", \
256 "r15", "r16", "r17"
257
258#define PARAVIRT_REG_CLOBBERS5 \
259 "r2", "r3", \
260 "r15", "r16", "r17"
261
262#define PARAVIRT_BR_CLOBBERS \
263 "b6", "b7"
264
265#define PARAVIRT_PR_CLOBBERS \
266 "p6", "p7", "p8", "p9", "p10", "p11", "p12", "p13", "p14", "p15"
267
268#define PARAVIRT_AR_CLOBBERS \
269 "ar.ccv"
270
271#define PARAVIRT_CLOBBERS0 \
272 PARAVIRT_REG_CLOBBERS0, \
273 PARAVIRT_BR_CLOBBERS, \
274 PARAVIRT_PR_CLOBBERS, \
275 PARAVIRT_AR_CLOBBERS, \
276 "memory"
277
278#define PARAVIRT_CLOBBERS1 \
279 PARAVIRT_REG_CLOBBERS1, \
280 PARAVIRT_BR_CLOBBERS, \
281 PARAVIRT_PR_CLOBBERS, \
282 PARAVIRT_AR_CLOBBERS, \
283 "memory"
284
285#define PARAVIRT_CLOBBERS2 \
286 PARAVIRT_REG_CLOBBERS2, \
287 PARAVIRT_BR_CLOBBERS, \
288 PARAVIRT_PR_CLOBBERS, \
289 PARAVIRT_AR_CLOBBERS, \
290 "memory"
291
292#define PARAVIRT_CLOBBERS5 \
293 PARAVIRT_REG_CLOBBERS5, \
294 PARAVIRT_BR_CLOBBERS, \
295 PARAVIRT_PR_CLOBBERS, \
296 PARAVIRT_AR_CLOBBERS, \
297 "memory"
298
299#define PARAVIRT_BR0(op, type) \
300 register unsigned long ia64_clobber asm ("r8"); \
301 asm volatile (paravirt_alt_bundle(__PARAVIRT_BR, \
302 PARAVIRT_TYPE(type)) \
303 : "=r"(ia64_clobber) \
304 : PARAVIRT_OP(op) \
305 : PARAVIRT_CLOBBERS0)
306
307#define PARAVIRT_BR0_RET(op, type) \
308 register unsigned long ia64_intri_res asm ("r8"); \
309 asm volatile (paravirt_alt_bundle(__PARAVIRT_BR, \
310 PARAVIRT_TYPE(type)) \
311 : "=r"(ia64_intri_res) \
312 : PARAVIRT_OP(op) \
313 : PARAVIRT_CLOBBERS0)
314
315#define PARAVIRT_BR1(op, type, arg1) \
316 register unsigned long __##arg1 asm ("r8") = arg1; \
317 register unsigned long ia64_clobber asm ("r8"); \
318 asm volatile (paravirt_alt_bundle(__PARAVIRT_BR, \
319 PARAVIRT_TYPE(type)) \
320 : "=r"(ia64_clobber) \
321 : PARAVIRT_OP(op), "0"(__##arg1) \
322 : PARAVIRT_CLOBBERS1)
323
324#define PARAVIRT_BR1_RET(op, type, arg1) \
325 register unsigned long ia64_intri_res asm ("r8"); \
326 register unsigned long __##arg1 asm ("r8") = arg1; \
327 asm volatile (paravirt_alt_bundle(__PARAVIRT_BR, \
328 PARAVIRT_TYPE(type)) \
329 : "=r"(ia64_intri_res) \
330 : PARAVIRT_OP(op), "0"(__##arg1) \
331 : PARAVIRT_CLOBBERS1)
332
333#define PARAVIRT_BR1_VOID(op, type, arg1) \
334 register void *__##arg1 asm ("r8") = arg1; \
335 register unsigned long ia64_clobber asm ("r8"); \
336 asm volatile (paravirt_alt_bundle(__PARAVIRT_BR, \
337 PARAVIRT_TYPE(type)) \
338 : "=r"(ia64_clobber) \
339 : PARAVIRT_OP(op), "0"(__##arg1) \
340 : PARAVIRT_CLOBBERS1)
341
342#define PARAVIRT_BR2(op, type, arg1, arg2) \
343 register unsigned long __##arg1 asm ("r8") = arg1; \
344 register unsigned long __##arg2 asm ("r9") = arg2; \
345 register unsigned long ia64_clobber1 asm ("r8"); \
346 register unsigned long ia64_clobber2 asm ("r9"); \
347 asm volatile (paravirt_alt_bundle(__PARAVIRT_BR, \
348 PARAVIRT_TYPE(type)) \
349 : "=r"(ia64_clobber1), "=r"(ia64_clobber2) \
350 : PARAVIRT_OP(op), "0"(__##arg1), "1"(__##arg2) \
351 : PARAVIRT_CLOBBERS2)
352
353
354#define PARAVIRT_DEFINE_CPU_OP0(op, type) \
355 static inline void \
356 paravirt_ ## op (void) \
357 { \
358 PARAVIRT_BR0(op, type); \
359 }
360
361#define PARAVIRT_DEFINE_CPU_OP0_RET(op, type) \
362 static inline unsigned long \
363 paravirt_ ## op (void) \
364 { \
365 PARAVIRT_BR0_RET(op, type); \
366 return ia64_intri_res; \
367 }
368
369#define PARAVIRT_DEFINE_CPU_OP1_VOID(op, type) \
370 static inline void \
371 paravirt_ ## op (void *arg1) \
372 { \
373 PARAVIRT_BR1_VOID(op, type, arg1); \
374 }
375
376#define PARAVIRT_DEFINE_CPU_OP1(op, type) \
377 static inline void \
378 paravirt_ ## op (unsigned long arg1) \
379 { \
380 PARAVIRT_BR1(op, type, arg1); \
381 }
382
383#define PARAVIRT_DEFINE_CPU_OP1_RET(op, type) \
384 static inline unsigned long \
385 paravirt_ ## op (unsigned long arg1) \
386 { \
387 PARAVIRT_BR1_RET(op, type, arg1); \
388 return ia64_intri_res; \
389 }
390
391#define PARAVIRT_DEFINE_CPU_OP2(op, type) \
392 static inline void \
393 paravirt_ ## op (unsigned long arg1, \
394 unsigned long arg2) \
395 { \
396 PARAVIRT_BR2(op, type, arg1, arg2); \
397 }
398
399
400PARAVIRT_DEFINE_CPU_OP1_VOID(fc, FC);
401PARAVIRT_DEFINE_CPU_OP1_RET(thash, THASH)
402PARAVIRT_DEFINE_CPU_OP1_RET(get_cpuid, GET_CPUID)
403PARAVIRT_DEFINE_CPU_OP1_RET(get_pmd, GET_PMD)
404PARAVIRT_DEFINE_CPU_OP2(ptcga, PTCGA)
405PARAVIRT_DEFINE_CPU_OP1_RET(get_rr, GET_RR)
406PARAVIRT_DEFINE_CPU_OP2(set_rr, SET_RR)
407PARAVIRT_DEFINE_CPU_OP0(ssm_i, SSM_I)
408PARAVIRT_DEFINE_CPU_OP0(rsm_i, RSM_I)
409PARAVIRT_DEFINE_CPU_OP0_RET(get_psr_i, GET_PSR_I)
410PARAVIRT_DEFINE_CPU_OP1(intrin_local_irq_restore, INTRIN_LOCAL_IRQ_RESTORE)
411
412static inline void
413paravirt_set_rr0_to_rr4(unsigned long val0, unsigned long val1,
414 unsigned long val2, unsigned long val3,
415 unsigned long val4)
416{
417 register unsigned long __val0 asm ("r8") = val0;
418 register unsigned long __val1 asm ("r9") = val1;
419 register unsigned long __val2 asm ("r10") = val2;
420 register unsigned long __val3 asm ("r11") = val3;
421 register unsigned long __val4 asm ("r14") = val4;
422
423 register unsigned long ia64_clobber0 asm ("r8");
424 register unsigned long ia64_clobber1 asm ("r9");
425 register unsigned long ia64_clobber2 asm ("r10");
426 register unsigned long ia64_clobber3 asm ("r11");
427 register unsigned long ia64_clobber4 asm ("r14");
428
429 asm volatile (paravirt_alt_bundle(__PARAVIRT_BR,
430 PARAVIRT_TYPE(SET_RR0_TO_RR4))
431 : "=r"(ia64_clobber0),
432 "=r"(ia64_clobber1),
433 "=r"(ia64_clobber2),
434 "=r"(ia64_clobber3),
435 "=r"(ia64_clobber4)
436 : PARAVIRT_OP(set_rr0_to_rr4),
437 "0"(__val0), "1"(__val1), "2"(__val2),
438 "3"(__val3), "4"(__val4)
439 : PARAVIRT_CLOBBERS5);
440}
441
442
443#define __paravirt_getreg(reg) \
444 ({ \
445 register unsigned long ia64_intri_res asm ("r8"); \
446 register unsigned long __reg asm ("r8") = (reg); \
447 \
448 asm volatile (paravirt_alt_bundle(__PARAVIRT_BR, \
449 PARAVIRT_TYPE(GETREG) \
450 + (reg)) \
451 : "=r"(ia64_intri_res) \
452 : PARAVIRT_OP(getreg), "0"(__reg) \
453 : PARAVIRT_CLOBBERS1); \
454 \
455 ia64_intri_res; \
456 })
457
458
459#define paravirt_setreg(reg, val) \
460 do { \
461 register unsigned long __val asm ("r8") = val; \
462 register unsigned long __reg asm ("r9") = reg; \
463 register unsigned long ia64_clobber1 asm ("r8"); \
464 register unsigned long ia64_clobber2 asm ("r9"); \
465 \
466 asm volatile (paravirt_alt_bundle(__PARAVIRT_BR, \
467 PARAVIRT_TYPE(SETREG) \
468 + (reg)) \
469 : "=r"(ia64_clobber1), \
470 "=r"(ia64_clobber2) \
471 : PARAVIRT_OP(setreg), \
472 "1"(__reg), "0"(__val) \
473 : PARAVIRT_CLOBBERS2); \
474 } while (0)
475
476#endif
477#endif
478
479#endif
480