1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24#ifndef __VALGRIND_H
25#define __VALGRIND_H
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42#define __VALGRIND_MAJOR__ 3
43#define __VALGRIND_MINOR__ 16
44
45
46#include <stdarg.h>
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62#undef PLAT_x86_darwin
63#undef PLAT_amd64_darwin
64#undef PLAT_x86_win32
65#undef PLAT_amd64_win64
66#undef PLAT_x86_linux
67#undef PLAT_amd64_linux
68#undef PLAT_ppc32_linux
69#undef PLAT_ppc64be_linux
70#undef PLAT_ppc64le_linux
71#undef PLAT_arm_linux
72#undef PLAT_arm64_linux
73#undef PLAT_s390x_linux
74#undef PLAT_mips32_linux
75#undef PLAT_mips64_linux
76#undef PLAT_nanomips_linux
77#undef PLAT_x86_solaris
78#undef PLAT_amd64_solaris
79
80
81#if defined(__APPLE__) && defined(__i386__)
82# define PLAT_x86_darwin 1
83#elif defined(__APPLE__) && defined(__x86_64__)
84# define PLAT_amd64_darwin 1
85#elif (defined(__MINGW32__) && defined(__i386__)) \
86 || defined(__CYGWIN32__) \
87 || (defined(_WIN32) && defined(_M_IX86))
88# define PLAT_x86_win32 1
89#elif (defined(__MINGW32__) && defined(__x86_64__)) \
90 || (defined(_WIN32) && defined(_M_X64))
91
92# define PLAT_amd64_win64 1
93#elif defined(__linux__) && defined(__i386__)
94# define PLAT_x86_linux 1
95#elif defined(__linux__) && defined(__x86_64__) && !defined(__ILP32__)
96# define PLAT_amd64_linux 1
97#elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
98# define PLAT_ppc32_linux 1
99#elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF != 2
100
101# define PLAT_ppc64be_linux 1
102#elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF == 2
103
104# define PLAT_ppc64le_linux 1
105#elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
106# define PLAT_arm_linux 1
107#elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
108# define PLAT_arm64_linux 1
109#elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
110# define PLAT_s390x_linux 1
111#elif defined(__linux__) && defined(__mips__) && (__mips==64)
112# define PLAT_mips64_linux 1
113#elif defined(__linux__) && defined(__mips__) && (__mips==32)
114# define PLAT_mips32_linux 1
115#elif defined(__linux__) && defined(__nanomips__)
116# define PLAT_nanomips_linux 1
117#elif defined(__sun) && defined(__i386__)
118# define PLAT_x86_solaris 1
119#elif defined(__sun) && defined(__x86_64__)
120# define PLAT_amd64_solaris 1
121#else
122
123
124# undef CONFIG_VALGRIND
125#endif
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146#define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
147 _zzq_request, _zzq_arg1, _zzq_arg2, \
148 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
149 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
150 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
151 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
152
153#define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
154 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
155 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
156 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
157 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
158
159#if !IS_ENABLED(CONFIG_VALGRIND)
160
161
162
163
164#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
165 _zzq_default, _zzq_request, \
166 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
167 (_zzq_default)
168
169#else
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208#if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
209 || (defined(PLAT_x86_win32) && defined(__GNUC__)) \
210 || defined(PLAT_x86_solaris)
211
212typedef
213 struct {
214 unsigned int nraddr;
215 }
216 OrigFn;
217
218#define __SPECIAL_INSTRUCTION_PREAMBLE \
219 "roll $3, %%edi ; roll $13, %%edi\n\t" \
220 "roll $29, %%edi ; roll $19, %%edi\n\t"
221
222#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
223 _zzq_default, _zzq_request, \
224 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
225 __extension__ \
226 ({volatile unsigned int _zzq_args[6]; \
227 volatile unsigned int _zzq_result; \
228 _zzq_args[0] = (unsigned int)(_zzq_request); \
229 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
230 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
231 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
232 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
233 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
234 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
235 \
236 "xchgl %%ebx,%%ebx" \
237 : "=d" (_zzq_result) \
238 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
239 : "cc", "memory" \
240 ); \
241 _zzq_result; \
242 })
243
244#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
245 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
246 volatile unsigned int __addr; \
247 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
248 \
249 "xchgl %%ecx,%%ecx" \
250 : "=a" (__addr) \
251 : \
252 : "cc", "memory" \
253 ); \
254 _zzq_orig->nraddr = __addr; \
255 }
256
257#define VALGRIND_CALL_NOREDIR_EAX \
258 __SPECIAL_INSTRUCTION_PREAMBLE \
259 \
260 "xchgl %%edx,%%edx\n\t"
261
262#define VALGRIND_VEX_INJECT_IR() \
263 do { \
264 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
265 "xchgl %%edi,%%edi\n\t" \
266 : : : "cc", "memory" \
267 ); \
268 } while (0)
269
270#endif
271
272
273
274
275#if defined(PLAT_x86_win32) && !defined(__GNUC__)
276
277typedef
278 struct {
279 unsigned int nraddr;
280 }
281 OrigFn;
282
283#if defined(_MSC_VER)
284
285#define __SPECIAL_INSTRUCTION_PREAMBLE \
286 __asm rol edi, 3 __asm rol edi, 13 \
287 __asm rol edi, 29 __asm rol edi, 19
288
289#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
290 _zzq_default, _zzq_request, \
291 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
292 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
293 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
294 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
295 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
296
297static __inline uintptr_t
298valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
299 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
300 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
301 uintptr_t _zzq_arg5)
302{
303 volatile uintptr_t _zzq_args[6];
304 volatile unsigned int _zzq_result;
305 _zzq_args[0] = (uintptr_t)(_zzq_request);
306 _zzq_args[1] = (uintptr_t)(_zzq_arg1);
307 _zzq_args[2] = (uintptr_t)(_zzq_arg2);
308 _zzq_args[3] = (uintptr_t)(_zzq_arg3);
309 _zzq_args[4] = (uintptr_t)(_zzq_arg4);
310 _zzq_args[5] = (uintptr_t)(_zzq_arg5);
311 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
312 __SPECIAL_INSTRUCTION_PREAMBLE
313
314 __asm xchg ebx,ebx
315 __asm mov _zzq_result, edx
316 }
317 return _zzq_result;
318}
319
320#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
321 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
322 volatile unsigned int __addr; \
323 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
324 \
325 __asm xchg ecx,ecx \
326 __asm mov __addr, eax \
327 } \
328 _zzq_orig->nraddr = __addr; \
329 }
330
331#define VALGRIND_CALL_NOREDIR_EAX ERROR
332
333#define VALGRIND_VEX_INJECT_IR() \
334 do { \
335 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
336 __asm xchg edi,edi \
337 } \
338 } while (0)
339
340#else
341#error Unsupported compiler.
342#endif
343
344#endif
345
346
347
348#if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
349 || defined(PLAT_amd64_solaris) \
350 || (defined(PLAT_amd64_win64) && defined(__GNUC__))
351
352typedef
353 struct {
354 unsigned long int nraddr;
355 }
356 OrigFn;
357
358#define __SPECIAL_INSTRUCTION_PREAMBLE \
359 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
360 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
361
362#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
363 _zzq_default, _zzq_request, \
364 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
365 __extension__ \
366 ({ volatile unsigned long int _zzq_args[6]; \
367 volatile unsigned long int _zzq_result; \
368 _zzq_args[0] = (unsigned long int)(_zzq_request); \
369 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
370 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
371 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
372 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
373 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
374 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
375 \
376 "xchgq %%rbx,%%rbx" \
377 : "=d" (_zzq_result) \
378 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
379 : "cc", "memory" \
380 ); \
381 _zzq_result; \
382 })
383
384#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
385 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
386 volatile unsigned long int __addr; \
387 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
388 \
389 "xchgq %%rcx,%%rcx" \
390 : "=a" (__addr) \
391 : \
392 : "cc", "memory" \
393 ); \
394 _zzq_orig->nraddr = __addr; \
395 }
396
397#define VALGRIND_CALL_NOREDIR_RAX \
398 __SPECIAL_INSTRUCTION_PREAMBLE \
399 \
400 "xchgq %%rdx,%%rdx\n\t"
401
402#define VALGRIND_VEX_INJECT_IR() \
403 do { \
404 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
405 "xchgq %%rdi,%%rdi\n\t" \
406 : : : "cc", "memory" \
407 ); \
408 } while (0)
409
410#endif
411
412
413
414#if defined(PLAT_amd64_win64) && !defined(__GNUC__)
415
416#error Unsupported compiler.
417
418#endif
419
420
421
422#if defined(PLAT_ppc32_linux)
423
424typedef
425 struct {
426 unsigned int nraddr;
427 }
428 OrigFn;
429
430#define __SPECIAL_INSTRUCTION_PREAMBLE \
431 "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
432 "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
433
434#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
435 _zzq_default, _zzq_request, \
436 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
437 \
438 __extension__ \
439 ({ unsigned int _zzq_args[6]; \
440 unsigned int _zzq_result; \
441 unsigned int* _zzq_ptr; \
442 _zzq_args[0] = (unsigned int)(_zzq_request); \
443 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
444 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
445 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
446 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
447 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
448 _zzq_ptr = _zzq_args; \
449 __asm__ volatile("mr 3,%1\n\t" \
450 "mr 4,%2\n\t" \
451 __SPECIAL_INSTRUCTION_PREAMBLE \
452 \
453 "or 1,1,1\n\t" \
454 "mr %0,3" \
455 : "=b" (_zzq_result) \
456 : "b" (_zzq_default), "b" (_zzq_ptr) \
457 : "cc", "memory", "r3", "r4"); \
458 _zzq_result; \
459 })
460
461#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
462 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
463 unsigned int __addr; \
464 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
465 \
466 "or 2,2,2\n\t" \
467 "mr %0,3" \
468 : "=b" (__addr) \
469 : \
470 : "cc", "memory", "r3" \
471 ); \
472 _zzq_orig->nraddr = __addr; \
473 }
474
475#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
476 __SPECIAL_INSTRUCTION_PREAMBLE \
477 \
478 "or 3,3,3\n\t"
479
480#define VALGRIND_VEX_INJECT_IR() \
481 do { \
482 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
483 "or 5,5,5\n\t" \
484 ); \
485 } while (0)
486
487#endif
488
489
490
491#if defined(PLAT_ppc64be_linux)
492
493typedef
494 struct {
495 unsigned long int nraddr;
496 unsigned long int r2;
497 }
498 OrigFn;
499
500#define __SPECIAL_INSTRUCTION_PREAMBLE \
501 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
502 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
503
504#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
505 _zzq_default, _zzq_request, \
506 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
507 \
508 __extension__ \
509 ({ unsigned long int _zzq_args[6]; \
510 unsigned long int _zzq_result; \
511 unsigned long int* _zzq_ptr; \
512 _zzq_args[0] = (unsigned long int)(_zzq_request); \
513 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
514 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
515 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
516 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
517 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
518 _zzq_ptr = _zzq_args; \
519 __asm__ volatile("mr 3,%1\n\t" \
520 "mr 4,%2\n\t" \
521 __SPECIAL_INSTRUCTION_PREAMBLE \
522 \
523 "or 1,1,1\n\t" \
524 "mr %0,3" \
525 : "=b" (_zzq_result) \
526 : "b" (_zzq_default), "b" (_zzq_ptr) \
527 : "cc", "memory", "r3", "r4"); \
528 _zzq_result; \
529 })
530
531#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
532 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
533 unsigned long int __addr; \
534 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
535 \
536 "or 2,2,2\n\t" \
537 "mr %0,3" \
538 : "=b" (__addr) \
539 : \
540 : "cc", "memory", "r3" \
541 ); \
542 _zzq_orig->nraddr = __addr; \
543 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
544 \
545 "or 4,4,4\n\t" \
546 "mr %0,3" \
547 : "=b" (__addr) \
548 : \
549 : "cc", "memory", "r3" \
550 ); \
551 _zzq_orig->r2 = __addr; \
552 }
553
554#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
555 __SPECIAL_INSTRUCTION_PREAMBLE \
556 \
557 "or 3,3,3\n\t"
558
559#define VALGRIND_VEX_INJECT_IR() \
560 do { \
561 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
562 "or 5,5,5\n\t" \
563 ); \
564 } while (0)
565
566#endif
567
568#if defined(PLAT_ppc64le_linux)
569
570typedef
571 struct {
572 unsigned long int nraddr;
573 unsigned long int r2;
574 }
575 OrigFn;
576
577#define __SPECIAL_INSTRUCTION_PREAMBLE \
578 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
579 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
580
581#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
582 _zzq_default, _zzq_request, \
583 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
584 \
585 __extension__ \
586 ({ unsigned long int _zzq_args[6]; \
587 unsigned long int _zzq_result; \
588 unsigned long int* _zzq_ptr; \
589 _zzq_args[0] = (unsigned long int)(_zzq_request); \
590 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
591 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
592 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
593 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
594 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
595 _zzq_ptr = _zzq_args; \
596 __asm__ volatile("mr 3,%1\n\t" \
597 "mr 4,%2\n\t" \
598 __SPECIAL_INSTRUCTION_PREAMBLE \
599 \
600 "or 1,1,1\n\t" \
601 "mr %0,3" \
602 : "=b" (_zzq_result) \
603 : "b" (_zzq_default), "b" (_zzq_ptr) \
604 : "cc", "memory", "r3", "r4"); \
605 _zzq_result; \
606 })
607
608#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
609 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
610 unsigned long int __addr; \
611 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
612 \
613 "or 2,2,2\n\t" \
614 "mr %0,3" \
615 : "=b" (__addr) \
616 : \
617 : "cc", "memory", "r3" \
618 ); \
619 _zzq_orig->nraddr = __addr; \
620 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
621 \
622 "or 4,4,4\n\t" \
623 "mr %0,3" \
624 : "=b" (__addr) \
625 : \
626 : "cc", "memory", "r3" \
627 ); \
628 _zzq_orig->r2 = __addr; \
629 }
630
631#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
632 __SPECIAL_INSTRUCTION_PREAMBLE \
633 \
634 "or 3,3,3\n\t"
635
636#define VALGRIND_VEX_INJECT_IR() \
637 do { \
638 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
639 "or 5,5,5\n\t" \
640 ); \
641 } while (0)
642
643#endif
644
645
646
647#if defined(PLAT_arm_linux)
648
649typedef
650 struct {
651 unsigned int nraddr;
652 }
653 OrigFn;
654
655#define __SPECIAL_INSTRUCTION_PREAMBLE \
656 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
657 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
658
659#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
660 _zzq_default, _zzq_request, \
661 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
662 \
663 __extension__ \
664 ({volatile unsigned int _zzq_args[6]; \
665 volatile unsigned int _zzq_result; \
666 _zzq_args[0] = (unsigned int)(_zzq_request); \
667 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
668 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
669 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
670 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
671 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
672 __asm__ volatile("mov r3, %1\n\t" \
673 "mov r4, %2\n\t" \
674 __SPECIAL_INSTRUCTION_PREAMBLE \
675 \
676 "orr r10, r10, r10\n\t" \
677 "mov %0, r3" \
678 : "=r" (_zzq_result) \
679 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
680 : "cc","memory", "r3", "r4"); \
681 _zzq_result; \
682 })
683
684#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
685 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
686 unsigned int __addr; \
687 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
688 \
689 "orr r11, r11, r11\n\t" \
690 "mov %0, r3" \
691 : "=r" (__addr) \
692 : \
693 : "cc", "memory", "r3" \
694 ); \
695 _zzq_orig->nraddr = __addr; \
696 }
697
698#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
699 __SPECIAL_INSTRUCTION_PREAMBLE \
700 \
701 "orr r12, r12, r12\n\t"
702
703#define VALGRIND_VEX_INJECT_IR() \
704 do { \
705 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
706 "orr r9, r9, r9\n\t" \
707 : : : "cc", "memory" \
708 ); \
709 } while (0)
710
711#endif
712
713
714
715#if defined(PLAT_arm64_linux)
716
717typedef
718 struct {
719 unsigned long int nraddr;
720 }
721 OrigFn;
722
723#define __SPECIAL_INSTRUCTION_PREAMBLE \
724 "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
725 "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
726
727#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
728 _zzq_default, _zzq_request, \
729 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
730 \
731 __extension__ \
732 ({volatile unsigned long int _zzq_args[6]; \
733 volatile unsigned long int _zzq_result; \
734 _zzq_args[0] = (unsigned long int)(_zzq_request); \
735 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
736 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
737 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
738 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
739 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
740 __asm__ volatile("mov x3, %1\n\t" \
741 "mov x4, %2\n\t" \
742 __SPECIAL_INSTRUCTION_PREAMBLE \
743 \
744 "orr x10, x10, x10\n\t" \
745 "mov %0, x3" \
746 : "=r" (_zzq_result) \
747 : "r" ((unsigned long int)(_zzq_default)), \
748 "r" (&_zzq_args[0]) \
749 : "cc","memory", "x3", "x4"); \
750 _zzq_result; \
751 })
752
753#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
754 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
755 unsigned long int __addr; \
756 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
757 \
758 "orr x11, x11, x11\n\t" \
759 "mov %0, x3" \
760 : "=r" (__addr) \
761 : \
762 : "cc", "memory", "x3" \
763 ); \
764 _zzq_orig->nraddr = __addr; \
765 }
766
767#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
768 __SPECIAL_INSTRUCTION_PREAMBLE \
769 \
770 "orr x12, x12, x12\n\t"
771
772#define VALGRIND_VEX_INJECT_IR() \
773 do { \
774 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
775 "orr x9, x9, x9\n\t" \
776 : : : "cc", "memory" \
777 ); \
778 } while (0)
779
780#endif
781
782
783
784#if defined(PLAT_s390x_linux)
785
786typedef
787 struct {
788 unsigned long int nraddr;
789 }
790 OrigFn;
791
792
793
794
795
796#define __SPECIAL_INSTRUCTION_PREAMBLE \
797 "lr 15,15\n\t" \
798 "lr 1,1\n\t" \
799 "lr 2,2\n\t" \
800 "lr 3,3\n\t"
801
802#define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
803#define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
804#define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
805#define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
806
807#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
808 _zzq_default, _zzq_request, \
809 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
810 __extension__ \
811 ({volatile unsigned long int _zzq_args[6]; \
812 volatile unsigned long int _zzq_result; \
813 _zzq_args[0] = (unsigned long int)(_zzq_request); \
814 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
815 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
816 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
817 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
818 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
819 __asm__ volatile( \
820 "lgr 2,%1\n\t" \
821 \
822 "lgr 3,%2\n\t" \
823 __SPECIAL_INSTRUCTION_PREAMBLE \
824 __CLIENT_REQUEST_CODE \
825 \
826 "lgr %0, 3\n\t" \
827 : "=d" (_zzq_result) \
828 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
829 : "cc", "2", "3", "memory" \
830 ); \
831 _zzq_result; \
832 })
833
834#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
835 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
836 volatile unsigned long int __addr; \
837 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
838 __GET_NR_CONTEXT_CODE \
839 "lgr %0, 3\n\t" \
840 : "=a" (__addr) \
841 : \
842 : "cc", "3", "memory" \
843 ); \
844 _zzq_orig->nraddr = __addr; \
845 }
846
847#define VALGRIND_CALL_NOREDIR_R1 \
848 __SPECIAL_INSTRUCTION_PREAMBLE \
849 __CALL_NO_REDIR_CODE
850
851#define VALGRIND_VEX_INJECT_IR() \
852 do { \
853 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
854 __VEX_INJECT_IR_CODE); \
855 } while (0)
856
857#endif
858
859
860
861#if defined(PLAT_mips32_linux)
862
863typedef
864 struct {
865 unsigned int nraddr;
866 }
867 OrigFn;
868
869
870
871
872
873#define __SPECIAL_INSTRUCTION_PREAMBLE \
874 "srl $0, $0, 13\n\t" \
875 "srl $0, $0, 29\n\t" \
876 "srl $0, $0, 3\n\t" \
877 "srl $0, $0, 19\n\t"
878
879#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
880 _zzq_default, _zzq_request, \
881 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
882 __extension__ \
883 ({ volatile unsigned int _zzq_args[6]; \
884 volatile unsigned int _zzq_result; \
885 _zzq_args[0] = (unsigned int)(_zzq_request); \
886 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
887 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
888 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
889 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
890 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
891 __asm__ volatile("move $11, %1\n\t" \
892 "move $12, %2\n\t" \
893 __SPECIAL_INSTRUCTION_PREAMBLE \
894 \
895 "or $13, $13, $13\n\t" \
896 "move %0, $11\n\t" \
897 : "=r" (_zzq_result) \
898 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
899 : "$11", "$12", "memory"); \
900 _zzq_result; \
901 })
902
903#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
904 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
905 volatile unsigned int __addr; \
906 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
907 \
908 "or $14, $14, $14\n\t" \
909 "move %0, $11" \
910 : "=r" (__addr) \
911 : \
912 : "$11" \
913 ); \
914 _zzq_orig->nraddr = __addr; \
915 }
916
917#define VALGRIND_CALL_NOREDIR_T9 \
918 __SPECIAL_INSTRUCTION_PREAMBLE \
919 \
920 "or $15, $15, $15\n\t"
921
922#define VALGRIND_VEX_INJECT_IR() \
923 do { \
924 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
925 "or $11, $11, $11\n\t" \
926 ); \
927 } while (0)
928
929
930#endif
931
932
933
934#if defined(PLAT_mips64_linux)
935
936typedef
937 struct {
938 unsigned long nraddr;
939 }
940 OrigFn;
941
942
943
944
945
946#define __SPECIAL_INSTRUCTION_PREAMBLE \
947 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
948 "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
949
950#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
951 _zzq_default, _zzq_request, \
952 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
953 __extension__ \
954 ({ volatile unsigned long int _zzq_args[6]; \
955 volatile unsigned long int _zzq_result; \
956 _zzq_args[0] = (unsigned long int)(_zzq_request); \
957 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
958 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
959 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
960 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
961 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
962 __asm__ volatile("move $11, %1\n\t" \
963 "move $12, %2\n\t" \
964 __SPECIAL_INSTRUCTION_PREAMBLE \
965 \
966 "or $13, $13, $13\n\t" \
967 "move %0, $11\n\t" \
968 : "=r" (_zzq_result) \
969 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
970 : "$11", "$12", "memory"); \
971 _zzq_result; \
972 })
973
974#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
975 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
976 volatile unsigned long int __addr; \
977 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
978 \
979 "or $14, $14, $14\n\t" \
980 "move %0, $11" \
981 : "=r" (__addr) \
982 : \
983 : "$11"); \
984 _zzq_orig->nraddr = __addr; \
985 }
986
987#define VALGRIND_CALL_NOREDIR_T9 \
988 __SPECIAL_INSTRUCTION_PREAMBLE \
989 \
990 "or $15, $15, $15\n\t"
991
992#define VALGRIND_VEX_INJECT_IR() \
993 do { \
994 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
995 "or $11, $11, $11\n\t" \
996 ); \
997 } while (0)
998
999#endif
1000
1001#if defined(PLAT_nanomips_linux)
1002
1003typedef
1004 struct {
1005 unsigned int nraddr;
1006 }
1007 OrigFn;
1008
1009
1010
1011
1012
1013
1014
1015#define __SPECIAL_INSTRUCTION_PREAMBLE "srl[32] $zero, $zero, 13 \n\t" \
1016 "srl[32] $zero, $zero, 29 \n\t" \
1017 "srl[32] $zero, $zero, 3 \n\t" \
1018 "srl[32] $zero, $zero, 19 \n\t"
1019
1020#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1021 _zzq_default, _zzq_request, \
1022 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1023 __extension__ \
1024 ({ volatile unsigned int _zzq_args[6]; \
1025 volatile unsigned int _zzq_result; \
1026 _zzq_args[0] = (unsigned int)(_zzq_request); \
1027 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
1028 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
1029 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
1030 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
1031 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
1032 __asm__ volatile("move $a7, %1\n\t" \
1033 "move $t0, %2\n\t" \
1034 __SPECIAL_INSTRUCTION_PREAMBLE \
1035 \
1036 "or[32] $t0, $t0, $t0\n\t" \
1037 "move %0, $a7\n\t" \
1038 : "=r" (_zzq_result) \
1039 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1040 : "$a7", "$t0", "memory"); \
1041 _zzq_result; \
1042 })
1043
1044#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1045 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1046 volatile unsigned long int __addr; \
1047 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1048 \
1049 "or[32] $t1, $t1, $t1\n\t" \
1050 "move %0, $a7" \
1051 : "=r" (__addr) \
1052 : \
1053 : "$a7"); \
1054 _zzq_orig->nraddr = __addr; \
1055 }
1056
1057#define VALGRIND_CALL_NOREDIR_T9 \
1058 __SPECIAL_INSTRUCTION_PREAMBLE \
1059 \
1060 "or[32] $t2, $t2, $t2\n\t"
1061
1062#define VALGRIND_VEX_INJECT_IR() \
1063 do { \
1064 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1065 "or[32] $t3, $t3, $t3\n\t" \
1066 ); \
1067 } while (0)
1068
1069#endif
1070
1071
1072#endif
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106#define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
1107
1108#define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
1109 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
1110
1111#define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
1112 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
1113
1114
1115
1116
1117
1118#define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
1119
1120
1121
1122
1123
1124
1125
1126#define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1127 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1128
1129#define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1130 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1131
1132
1133
1134
1135#define CALL_FN_v_v(fnptr) \
1136 do { volatile unsigned long _junk; \
1137 CALL_FN_W_v(_junk,fnptr); } while (0)
1138
1139#define CALL_FN_v_W(fnptr, arg1) \
1140 do { volatile unsigned long _junk; \
1141 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1142
1143#define CALL_FN_v_WW(fnptr, arg1,arg2) \
1144 do { volatile unsigned long _junk; \
1145 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1146
1147#define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1148 do { volatile unsigned long _junk; \
1149 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1150
1151#define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1152 do { volatile unsigned long _junk; \
1153 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1154
1155#define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1156 do { volatile unsigned long _junk; \
1157 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1158
1159#define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1160 do { volatile unsigned long _junk; \
1161 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1162
1163#define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1164 do { volatile unsigned long _junk; \
1165 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1166
1167
1168
1169#if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
1170 || defined(PLAT_x86_solaris)
1171
1172
1173
1174#define __CALLER_SAVED_REGS "ecx", "edx"
1175
1176
1177
1178
1179
1180
1181#define VALGRIND_ALIGN_STACK \
1182 "movl %%esp,%%edi\n\t" \
1183 "andl $0xfffffff0,%%esp\n\t"
1184#define VALGRIND_RESTORE_STACK \
1185 "movl %%edi,%%esp\n\t"
1186
1187
1188
1189
1190#define CALL_FN_W_v(lval, orig) \
1191 do { \
1192 volatile OrigFn _orig = (orig); \
1193 volatile unsigned long _argvec[1]; \
1194 volatile unsigned long _res; \
1195 _argvec[0] = (unsigned long)_orig.nraddr; \
1196 __asm__ volatile( \
1197 VALGRIND_ALIGN_STACK \
1198 "movl (%%eax), %%eax\n\t" \
1199 VALGRIND_CALL_NOREDIR_EAX \
1200 VALGRIND_RESTORE_STACK \
1201 : "=a" (_res) \
1202 : "a" (&_argvec[0]) \
1203 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1204 ); \
1205 lval = (__typeof__(lval)) _res; \
1206 } while (0)
1207
1208#define CALL_FN_W_W(lval, orig, arg1) \
1209 do { \
1210 volatile OrigFn _orig = (orig); \
1211 volatile unsigned long _argvec[2]; \
1212 volatile unsigned long _res; \
1213 _argvec[0] = (unsigned long)_orig.nraddr; \
1214 _argvec[1] = (unsigned long)(arg1); \
1215 __asm__ volatile( \
1216 VALGRIND_ALIGN_STACK \
1217 "subl $12, %%esp\n\t" \
1218 "pushl 4(%%eax)\n\t" \
1219 "movl (%%eax), %%eax\n\t" \
1220 VALGRIND_CALL_NOREDIR_EAX \
1221 VALGRIND_RESTORE_STACK \
1222 : "=a" (_res) \
1223 : "a" (&_argvec[0]) \
1224 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1225 ); \
1226 lval = (__typeof__(lval)) _res; \
1227 } while (0)
1228
1229#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1230 do { \
1231 volatile OrigFn _orig = (orig); \
1232 volatile unsigned long _argvec[3]; \
1233 volatile unsigned long _res; \
1234 _argvec[0] = (unsigned long)_orig.nraddr; \
1235 _argvec[1] = (unsigned long)(arg1); \
1236 _argvec[2] = (unsigned long)(arg2); \
1237 __asm__ volatile( \
1238 VALGRIND_ALIGN_STACK \
1239 "subl $8, %%esp\n\t" \
1240 "pushl 8(%%eax)\n\t" \
1241 "pushl 4(%%eax)\n\t" \
1242 "movl (%%eax), %%eax\n\t" \
1243 VALGRIND_CALL_NOREDIR_EAX \
1244 VALGRIND_RESTORE_STACK \
1245 : "=a" (_res) \
1246 : "a" (&_argvec[0]) \
1247 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1248 ); \
1249 lval = (__typeof__(lval)) _res; \
1250 } while (0)
1251
1252#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1253 do { \
1254 volatile OrigFn _orig = (orig); \
1255 volatile unsigned long _argvec[4]; \
1256 volatile unsigned long _res; \
1257 _argvec[0] = (unsigned long)_orig.nraddr; \
1258 _argvec[1] = (unsigned long)(arg1); \
1259 _argvec[2] = (unsigned long)(arg2); \
1260 _argvec[3] = (unsigned long)(arg3); \
1261 __asm__ volatile( \
1262 VALGRIND_ALIGN_STACK \
1263 "subl $4, %%esp\n\t" \
1264 "pushl 12(%%eax)\n\t" \
1265 "pushl 8(%%eax)\n\t" \
1266 "pushl 4(%%eax)\n\t" \
1267 "movl (%%eax), %%eax\n\t" \
1268 VALGRIND_CALL_NOREDIR_EAX \
1269 VALGRIND_RESTORE_STACK \
1270 : "=a" (_res) \
1271 : "a" (&_argvec[0]) \
1272 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1273 ); \
1274 lval = (__typeof__(lval)) _res; \
1275 } while (0)
1276
1277#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1278 do { \
1279 volatile OrigFn _orig = (orig); \
1280 volatile unsigned long _argvec[5]; \
1281 volatile unsigned long _res; \
1282 _argvec[0] = (unsigned long)_orig.nraddr; \
1283 _argvec[1] = (unsigned long)(arg1); \
1284 _argvec[2] = (unsigned long)(arg2); \
1285 _argvec[3] = (unsigned long)(arg3); \
1286 _argvec[4] = (unsigned long)(arg4); \
1287 __asm__ volatile( \
1288 VALGRIND_ALIGN_STACK \
1289 "pushl 16(%%eax)\n\t" \
1290 "pushl 12(%%eax)\n\t" \
1291 "pushl 8(%%eax)\n\t" \
1292 "pushl 4(%%eax)\n\t" \
1293 "movl (%%eax), %%eax\n\t" \
1294 VALGRIND_CALL_NOREDIR_EAX \
1295 VALGRIND_RESTORE_STACK \
1296 : "=a" (_res) \
1297 : "a" (&_argvec[0]) \
1298 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1299 ); \
1300 lval = (__typeof__(lval)) _res; \
1301 } while (0)
1302
1303#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1304 do { \
1305 volatile OrigFn _orig = (orig); \
1306 volatile unsigned long _argvec[6]; \
1307 volatile unsigned long _res; \
1308 _argvec[0] = (unsigned long)_orig.nraddr; \
1309 _argvec[1] = (unsigned long)(arg1); \
1310 _argvec[2] = (unsigned long)(arg2); \
1311 _argvec[3] = (unsigned long)(arg3); \
1312 _argvec[4] = (unsigned long)(arg4); \
1313 _argvec[5] = (unsigned long)(arg5); \
1314 __asm__ volatile( \
1315 VALGRIND_ALIGN_STACK \
1316 "subl $12, %%esp\n\t" \
1317 "pushl 20(%%eax)\n\t" \
1318 "pushl 16(%%eax)\n\t" \
1319 "pushl 12(%%eax)\n\t" \
1320 "pushl 8(%%eax)\n\t" \
1321 "pushl 4(%%eax)\n\t" \
1322 "movl (%%eax), %%eax\n\t" \
1323 VALGRIND_CALL_NOREDIR_EAX \
1324 VALGRIND_RESTORE_STACK \
1325 : "=a" (_res) \
1326 : "a" (&_argvec[0]) \
1327 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1328 ); \
1329 lval = (__typeof__(lval)) _res; \
1330 } while (0)
1331
1332#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1333 do { \
1334 volatile OrigFn _orig = (orig); \
1335 volatile unsigned long _argvec[7]; \
1336 volatile unsigned long _res; \
1337 _argvec[0] = (unsigned long)_orig.nraddr; \
1338 _argvec[1] = (unsigned long)(arg1); \
1339 _argvec[2] = (unsigned long)(arg2); \
1340 _argvec[3] = (unsigned long)(arg3); \
1341 _argvec[4] = (unsigned long)(arg4); \
1342 _argvec[5] = (unsigned long)(arg5); \
1343 _argvec[6] = (unsigned long)(arg6); \
1344 __asm__ volatile( \
1345 VALGRIND_ALIGN_STACK \
1346 "subl $8, %%esp\n\t" \
1347 "pushl 24(%%eax)\n\t" \
1348 "pushl 20(%%eax)\n\t" \
1349 "pushl 16(%%eax)\n\t" \
1350 "pushl 12(%%eax)\n\t" \
1351 "pushl 8(%%eax)\n\t" \
1352 "pushl 4(%%eax)\n\t" \
1353 "movl (%%eax), %%eax\n\t" \
1354 VALGRIND_CALL_NOREDIR_EAX \
1355 VALGRIND_RESTORE_STACK \
1356 : "=a" (_res) \
1357 : "a" (&_argvec[0]) \
1358 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1359 ); \
1360 lval = (__typeof__(lval)) _res; \
1361 } while (0)
1362
1363#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1364 arg7) \
1365 do { \
1366 volatile OrigFn _orig = (orig); \
1367 volatile unsigned long _argvec[8]; \
1368 volatile unsigned long _res; \
1369 _argvec[0] = (unsigned long)_orig.nraddr; \
1370 _argvec[1] = (unsigned long)(arg1); \
1371 _argvec[2] = (unsigned long)(arg2); \
1372 _argvec[3] = (unsigned long)(arg3); \
1373 _argvec[4] = (unsigned long)(arg4); \
1374 _argvec[5] = (unsigned long)(arg5); \
1375 _argvec[6] = (unsigned long)(arg6); \
1376 _argvec[7] = (unsigned long)(arg7); \
1377 __asm__ volatile( \
1378 VALGRIND_ALIGN_STACK \
1379 "subl $4, %%esp\n\t" \
1380 "pushl 28(%%eax)\n\t" \
1381 "pushl 24(%%eax)\n\t" \
1382 "pushl 20(%%eax)\n\t" \
1383 "pushl 16(%%eax)\n\t" \
1384 "pushl 12(%%eax)\n\t" \
1385 "pushl 8(%%eax)\n\t" \
1386 "pushl 4(%%eax)\n\t" \
1387 "movl (%%eax), %%eax\n\t" \
1388 VALGRIND_CALL_NOREDIR_EAX \
1389 VALGRIND_RESTORE_STACK \
1390 : "=a" (_res) \
1391 : "a" (&_argvec[0]) \
1392 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1393 ); \
1394 lval = (__typeof__(lval)) _res; \
1395 } while (0)
1396
1397#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1398 arg7,arg8) \
1399 do { \
1400 volatile OrigFn _orig = (orig); \
1401 volatile unsigned long _argvec[9]; \
1402 volatile unsigned long _res; \
1403 _argvec[0] = (unsigned long)_orig.nraddr; \
1404 _argvec[1] = (unsigned long)(arg1); \
1405 _argvec[2] = (unsigned long)(arg2); \
1406 _argvec[3] = (unsigned long)(arg3); \
1407 _argvec[4] = (unsigned long)(arg4); \
1408 _argvec[5] = (unsigned long)(arg5); \
1409 _argvec[6] = (unsigned long)(arg6); \
1410 _argvec[7] = (unsigned long)(arg7); \
1411 _argvec[8] = (unsigned long)(arg8); \
1412 __asm__ volatile( \
1413 VALGRIND_ALIGN_STACK \
1414 "pushl 32(%%eax)\n\t" \
1415 "pushl 28(%%eax)\n\t" \
1416 "pushl 24(%%eax)\n\t" \
1417 "pushl 20(%%eax)\n\t" \
1418 "pushl 16(%%eax)\n\t" \
1419 "pushl 12(%%eax)\n\t" \
1420 "pushl 8(%%eax)\n\t" \
1421 "pushl 4(%%eax)\n\t" \
1422 "movl (%%eax), %%eax\n\t" \
1423 VALGRIND_CALL_NOREDIR_EAX \
1424 VALGRIND_RESTORE_STACK \
1425 : "=a" (_res) \
1426 : "a" (&_argvec[0]) \
1427 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1428 ); \
1429 lval = (__typeof__(lval)) _res; \
1430 } while (0)
1431
1432#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1433 arg7,arg8,arg9) \
1434 do { \
1435 volatile OrigFn _orig = (orig); \
1436 volatile unsigned long _argvec[10]; \
1437 volatile unsigned long _res; \
1438 _argvec[0] = (unsigned long)_orig.nraddr; \
1439 _argvec[1] = (unsigned long)(arg1); \
1440 _argvec[2] = (unsigned long)(arg2); \
1441 _argvec[3] = (unsigned long)(arg3); \
1442 _argvec[4] = (unsigned long)(arg4); \
1443 _argvec[5] = (unsigned long)(arg5); \
1444 _argvec[6] = (unsigned long)(arg6); \
1445 _argvec[7] = (unsigned long)(arg7); \
1446 _argvec[8] = (unsigned long)(arg8); \
1447 _argvec[9] = (unsigned long)(arg9); \
1448 __asm__ volatile( \
1449 VALGRIND_ALIGN_STACK \
1450 "subl $12, %%esp\n\t" \
1451 "pushl 36(%%eax)\n\t" \
1452 "pushl 32(%%eax)\n\t" \
1453 "pushl 28(%%eax)\n\t" \
1454 "pushl 24(%%eax)\n\t" \
1455 "pushl 20(%%eax)\n\t" \
1456 "pushl 16(%%eax)\n\t" \
1457 "pushl 12(%%eax)\n\t" \
1458 "pushl 8(%%eax)\n\t" \
1459 "pushl 4(%%eax)\n\t" \
1460 "movl (%%eax), %%eax\n\t" \
1461 VALGRIND_CALL_NOREDIR_EAX \
1462 VALGRIND_RESTORE_STACK \
1463 : "=a" (_res) \
1464 : "a" (&_argvec[0]) \
1465 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1466 ); \
1467 lval = (__typeof__(lval)) _res; \
1468 } while (0)
1469
1470#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1471 arg7,arg8,arg9,arg10) \
1472 do { \
1473 volatile OrigFn _orig = (orig); \
1474 volatile unsigned long _argvec[11]; \
1475 volatile unsigned long _res; \
1476 _argvec[0] = (unsigned long)_orig.nraddr; \
1477 _argvec[1] = (unsigned long)(arg1); \
1478 _argvec[2] = (unsigned long)(arg2); \
1479 _argvec[3] = (unsigned long)(arg3); \
1480 _argvec[4] = (unsigned long)(arg4); \
1481 _argvec[5] = (unsigned long)(arg5); \
1482 _argvec[6] = (unsigned long)(arg6); \
1483 _argvec[7] = (unsigned long)(arg7); \
1484 _argvec[8] = (unsigned long)(arg8); \
1485 _argvec[9] = (unsigned long)(arg9); \
1486 _argvec[10] = (unsigned long)(arg10); \
1487 __asm__ volatile( \
1488 VALGRIND_ALIGN_STACK \
1489 "subl $8, %%esp\n\t" \
1490 "pushl 40(%%eax)\n\t" \
1491 "pushl 36(%%eax)\n\t" \
1492 "pushl 32(%%eax)\n\t" \
1493 "pushl 28(%%eax)\n\t" \
1494 "pushl 24(%%eax)\n\t" \
1495 "pushl 20(%%eax)\n\t" \
1496 "pushl 16(%%eax)\n\t" \
1497 "pushl 12(%%eax)\n\t" \
1498 "pushl 8(%%eax)\n\t" \
1499 "pushl 4(%%eax)\n\t" \
1500 "movl (%%eax), %%eax\n\t" \
1501 VALGRIND_CALL_NOREDIR_EAX \
1502 VALGRIND_RESTORE_STACK \
1503 : "=a" (_res) \
1504 : "a" (&_argvec[0]) \
1505 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1506 ); \
1507 lval = (__typeof__(lval)) _res; \
1508 } while (0)
1509
1510#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1511 arg6,arg7,arg8,arg9,arg10, \
1512 arg11) \
1513 do { \
1514 volatile OrigFn _orig = (orig); \
1515 volatile unsigned long _argvec[12]; \
1516 volatile unsigned long _res; \
1517 _argvec[0] = (unsigned long)_orig.nraddr; \
1518 _argvec[1] = (unsigned long)(arg1); \
1519 _argvec[2] = (unsigned long)(arg2); \
1520 _argvec[3] = (unsigned long)(arg3); \
1521 _argvec[4] = (unsigned long)(arg4); \
1522 _argvec[5] = (unsigned long)(arg5); \
1523 _argvec[6] = (unsigned long)(arg6); \
1524 _argvec[7] = (unsigned long)(arg7); \
1525 _argvec[8] = (unsigned long)(arg8); \
1526 _argvec[9] = (unsigned long)(arg9); \
1527 _argvec[10] = (unsigned long)(arg10); \
1528 _argvec[11] = (unsigned long)(arg11); \
1529 __asm__ volatile( \
1530 VALGRIND_ALIGN_STACK \
1531 "subl $4, %%esp\n\t" \
1532 "pushl 44(%%eax)\n\t" \
1533 "pushl 40(%%eax)\n\t" \
1534 "pushl 36(%%eax)\n\t" \
1535 "pushl 32(%%eax)\n\t" \
1536 "pushl 28(%%eax)\n\t" \
1537 "pushl 24(%%eax)\n\t" \
1538 "pushl 20(%%eax)\n\t" \
1539 "pushl 16(%%eax)\n\t" \
1540 "pushl 12(%%eax)\n\t" \
1541 "pushl 8(%%eax)\n\t" \
1542 "pushl 4(%%eax)\n\t" \
1543 "movl (%%eax), %%eax\n\t" \
1544 VALGRIND_CALL_NOREDIR_EAX \
1545 VALGRIND_RESTORE_STACK \
1546 : "=a" (_res) \
1547 : "a" (&_argvec[0]) \
1548 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1549 ); \
1550 lval = (__typeof__(lval)) _res; \
1551 } while (0)
1552
1553#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1554 arg6,arg7,arg8,arg9,arg10, \
1555 arg11,arg12) \
1556 do { \
1557 volatile OrigFn _orig = (orig); \
1558 volatile unsigned long _argvec[13]; \
1559 volatile unsigned long _res; \
1560 _argvec[0] = (unsigned long)_orig.nraddr; \
1561 _argvec[1] = (unsigned long)(arg1); \
1562 _argvec[2] = (unsigned long)(arg2); \
1563 _argvec[3] = (unsigned long)(arg3); \
1564 _argvec[4] = (unsigned long)(arg4); \
1565 _argvec[5] = (unsigned long)(arg5); \
1566 _argvec[6] = (unsigned long)(arg6); \
1567 _argvec[7] = (unsigned long)(arg7); \
1568 _argvec[8] = (unsigned long)(arg8); \
1569 _argvec[9] = (unsigned long)(arg9); \
1570 _argvec[10] = (unsigned long)(arg10); \
1571 _argvec[11] = (unsigned long)(arg11); \
1572 _argvec[12] = (unsigned long)(arg12); \
1573 __asm__ volatile( \
1574 VALGRIND_ALIGN_STACK \
1575 "pushl 48(%%eax)\n\t" \
1576 "pushl 44(%%eax)\n\t" \
1577 "pushl 40(%%eax)\n\t" \
1578 "pushl 36(%%eax)\n\t" \
1579 "pushl 32(%%eax)\n\t" \
1580 "pushl 28(%%eax)\n\t" \
1581 "pushl 24(%%eax)\n\t" \
1582 "pushl 20(%%eax)\n\t" \
1583 "pushl 16(%%eax)\n\t" \
1584 "pushl 12(%%eax)\n\t" \
1585 "pushl 8(%%eax)\n\t" \
1586 "pushl 4(%%eax)\n\t" \
1587 "movl (%%eax), %%eax\n\t" \
1588 VALGRIND_CALL_NOREDIR_EAX \
1589 VALGRIND_RESTORE_STACK \
1590 : "=a" (_res) \
1591 : "a" (&_argvec[0]) \
1592 : "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1593 ); \
1594 lval = (__typeof__(lval)) _res; \
1595 } while (0)
1596
1597#endif
1598
1599
1600
1601#if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
1602 || defined(PLAT_amd64_solaris)
1603
1604
1605
1606
1607#define __CALLER_SAVED_REGS "rcx", "rdx", "rsi", \
1608 "rdi", "r8", "r9", "r10", "r11"
1609
1610
1611
1612
1613
1614
1615
1616
1617
1618
1619
1620
1621
1622
1623
1624
1625
1626
1627
1628
1629
1630
1631
1632
1633
1634
1635
1636
1637
1638
1639
1640
1641
1642
1643
1644
1645
1646
1647
1648
1649
1650
1651
1652
1653
1654
1655
1656
1657
1658
1659
1660
1661
1662
1663
1664#if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1665# define __FRAME_POINTER \
1666 ,"r"(__builtin_dwarf_cfa())
1667# define VALGRIND_CFI_PROLOGUE \
1668 "movq %%rbp, %%r15\n\t" \
1669 "movq %2, %%rbp\n\t" \
1670 ".cfi_remember_state\n\t" \
1671 ".cfi_def_cfa rbp, 0\n\t"
1672# define VALGRIND_CFI_EPILOGUE \
1673 "movq %%r15, %%rbp\n\t" \
1674 ".cfi_restore_state\n\t"
1675#else
1676# define __FRAME_POINTER
1677# define VALGRIND_CFI_PROLOGUE
1678# define VALGRIND_CFI_EPILOGUE
1679#endif
1680
1681
1682
1683
1684
1685
1686#define VALGRIND_ALIGN_STACK \
1687 "movq %%rsp,%%r14\n\t" \
1688 "andq $0xfffffffffffffff0,%%rsp\n\t"
1689#define VALGRIND_RESTORE_STACK \
1690 "movq %%r14,%%rsp\n\t"
1691
1692
1693
1694
1695
1696
1697
1698
1699
1700
1701
1702
1703
1704
1705
1706
1707
1708
1709
1710
1711
1712
1713
1714
1715
1716#define CALL_FN_W_v(lval, orig) \
1717 do { \
1718 volatile OrigFn _orig = (orig); \
1719 volatile unsigned long _argvec[1]; \
1720 volatile unsigned long _res; \
1721 _argvec[0] = (unsigned long)_orig.nraddr; \
1722 __asm__ volatile( \
1723 VALGRIND_CFI_PROLOGUE \
1724 VALGRIND_ALIGN_STACK \
1725 "subq $128,%%rsp\n\t" \
1726 "movq (%%rax), %%rax\n\t" \
1727 VALGRIND_CALL_NOREDIR_RAX \
1728 VALGRIND_RESTORE_STACK \
1729 VALGRIND_CFI_EPILOGUE \
1730 : "=a" (_res) \
1731 : "a" (&_argvec[0]) __FRAME_POINTER \
1732 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1733 ); \
1734 lval = (__typeof__(lval)) _res; \
1735 } while (0)
1736
1737#define CALL_FN_W_W(lval, orig, arg1) \
1738 do { \
1739 volatile OrigFn _orig = (orig); \
1740 volatile unsigned long _argvec[2]; \
1741 volatile unsigned long _res; \
1742 _argvec[0] = (unsigned long)_orig.nraddr; \
1743 _argvec[1] = (unsigned long)(arg1); \
1744 __asm__ volatile( \
1745 VALGRIND_CFI_PROLOGUE \
1746 VALGRIND_ALIGN_STACK \
1747 "subq $128,%%rsp\n\t" \
1748 "movq 8(%%rax), %%rdi\n\t" \
1749 "movq (%%rax), %%rax\n\t" \
1750 VALGRIND_CALL_NOREDIR_RAX \
1751 VALGRIND_RESTORE_STACK \
1752 VALGRIND_CFI_EPILOGUE \
1753 : "=a" (_res) \
1754 : "a" (&_argvec[0]) __FRAME_POINTER \
1755 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1756 ); \
1757 lval = (__typeof__(lval)) _res; \
1758 } while (0)
1759
1760#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1761 do { \
1762 volatile OrigFn _orig = (orig); \
1763 volatile unsigned long _argvec[3]; \
1764 volatile unsigned long _res; \
1765 _argvec[0] = (unsigned long)_orig.nraddr; \
1766 _argvec[1] = (unsigned long)(arg1); \
1767 _argvec[2] = (unsigned long)(arg2); \
1768 __asm__ volatile( \
1769 VALGRIND_CFI_PROLOGUE \
1770 VALGRIND_ALIGN_STACK \
1771 "subq $128,%%rsp\n\t" \
1772 "movq 16(%%rax), %%rsi\n\t" \
1773 "movq 8(%%rax), %%rdi\n\t" \
1774 "movq (%%rax), %%rax\n\t" \
1775 VALGRIND_CALL_NOREDIR_RAX \
1776 VALGRIND_RESTORE_STACK \
1777 VALGRIND_CFI_EPILOGUE \
1778 : "=a" (_res) \
1779 : "a" (&_argvec[0]) __FRAME_POINTER \
1780 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1781 ); \
1782 lval = (__typeof__(lval)) _res; \
1783 } while (0)
1784
1785#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1786 do { \
1787 volatile OrigFn _orig = (orig); \
1788 volatile unsigned long _argvec[4]; \
1789 volatile unsigned long _res; \
1790 _argvec[0] = (unsigned long)_orig.nraddr; \
1791 _argvec[1] = (unsigned long)(arg1); \
1792 _argvec[2] = (unsigned long)(arg2); \
1793 _argvec[3] = (unsigned long)(arg3); \
1794 __asm__ volatile( \
1795 VALGRIND_CFI_PROLOGUE \
1796 VALGRIND_ALIGN_STACK \
1797 "subq $128,%%rsp\n\t" \
1798 "movq 24(%%rax), %%rdx\n\t" \
1799 "movq 16(%%rax), %%rsi\n\t" \
1800 "movq 8(%%rax), %%rdi\n\t" \
1801 "movq (%%rax), %%rax\n\t" \
1802 VALGRIND_CALL_NOREDIR_RAX \
1803 VALGRIND_RESTORE_STACK \
1804 VALGRIND_CFI_EPILOGUE \
1805 : "=a" (_res) \
1806 : "a" (&_argvec[0]) __FRAME_POINTER \
1807 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1808 ); \
1809 lval = (__typeof__(lval)) _res; \
1810 } while (0)
1811
1812#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1813 do { \
1814 volatile OrigFn _orig = (orig); \
1815 volatile unsigned long _argvec[5]; \
1816 volatile unsigned long _res; \
1817 _argvec[0] = (unsigned long)_orig.nraddr; \
1818 _argvec[1] = (unsigned long)(arg1); \
1819 _argvec[2] = (unsigned long)(arg2); \
1820 _argvec[3] = (unsigned long)(arg3); \
1821 _argvec[4] = (unsigned long)(arg4); \
1822 __asm__ volatile( \
1823 VALGRIND_CFI_PROLOGUE \
1824 VALGRIND_ALIGN_STACK \
1825 "subq $128,%%rsp\n\t" \
1826 "movq 32(%%rax), %%rcx\n\t" \
1827 "movq 24(%%rax), %%rdx\n\t" \
1828 "movq 16(%%rax), %%rsi\n\t" \
1829 "movq 8(%%rax), %%rdi\n\t" \
1830 "movq (%%rax), %%rax\n\t" \
1831 VALGRIND_CALL_NOREDIR_RAX \
1832 VALGRIND_RESTORE_STACK \
1833 VALGRIND_CFI_EPILOGUE \
1834 : "=a" (_res) \
1835 : "a" (&_argvec[0]) __FRAME_POINTER \
1836 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1837 ); \
1838 lval = (__typeof__(lval)) _res; \
1839 } while (0)
1840
1841#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1842 do { \
1843 volatile OrigFn _orig = (orig); \
1844 volatile unsigned long _argvec[6]; \
1845 volatile unsigned long _res; \
1846 _argvec[0] = (unsigned long)_orig.nraddr; \
1847 _argvec[1] = (unsigned long)(arg1); \
1848 _argvec[2] = (unsigned long)(arg2); \
1849 _argvec[3] = (unsigned long)(arg3); \
1850 _argvec[4] = (unsigned long)(arg4); \
1851 _argvec[5] = (unsigned long)(arg5); \
1852 __asm__ volatile( \
1853 VALGRIND_CFI_PROLOGUE \
1854 VALGRIND_ALIGN_STACK \
1855 "subq $128,%%rsp\n\t" \
1856 "movq 40(%%rax), %%r8\n\t" \
1857 "movq 32(%%rax), %%rcx\n\t" \
1858 "movq 24(%%rax), %%rdx\n\t" \
1859 "movq 16(%%rax), %%rsi\n\t" \
1860 "movq 8(%%rax), %%rdi\n\t" \
1861 "movq (%%rax), %%rax\n\t" \
1862 VALGRIND_CALL_NOREDIR_RAX \
1863 VALGRIND_RESTORE_STACK \
1864 VALGRIND_CFI_EPILOGUE \
1865 : "=a" (_res) \
1866 : "a" (&_argvec[0]) __FRAME_POINTER \
1867 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1868 ); \
1869 lval = (__typeof__(lval)) _res; \
1870 } while (0)
1871
1872#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1873 do { \
1874 volatile OrigFn _orig = (orig); \
1875 volatile unsigned long _argvec[7]; \
1876 volatile unsigned long _res; \
1877 _argvec[0] = (unsigned long)_orig.nraddr; \
1878 _argvec[1] = (unsigned long)(arg1); \
1879 _argvec[2] = (unsigned long)(arg2); \
1880 _argvec[3] = (unsigned long)(arg3); \
1881 _argvec[4] = (unsigned long)(arg4); \
1882 _argvec[5] = (unsigned long)(arg5); \
1883 _argvec[6] = (unsigned long)(arg6); \
1884 __asm__ volatile( \
1885 VALGRIND_CFI_PROLOGUE \
1886 VALGRIND_ALIGN_STACK \
1887 "subq $128,%%rsp\n\t" \
1888 "movq 48(%%rax), %%r9\n\t" \
1889 "movq 40(%%rax), %%r8\n\t" \
1890 "movq 32(%%rax), %%rcx\n\t" \
1891 "movq 24(%%rax), %%rdx\n\t" \
1892 "movq 16(%%rax), %%rsi\n\t" \
1893 "movq 8(%%rax), %%rdi\n\t" \
1894 "movq (%%rax), %%rax\n\t" \
1895 VALGRIND_CALL_NOREDIR_RAX \
1896 VALGRIND_RESTORE_STACK \
1897 VALGRIND_CFI_EPILOGUE \
1898 : "=a" (_res) \
1899 : "a" (&_argvec[0]) __FRAME_POINTER \
1900 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1901 ); \
1902 lval = (__typeof__(lval)) _res; \
1903 } while (0)
1904
1905#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1906 arg7) \
1907 do { \
1908 volatile OrigFn _orig = (orig); \
1909 volatile unsigned long _argvec[8]; \
1910 volatile unsigned long _res; \
1911 _argvec[0] = (unsigned long)_orig.nraddr; \
1912 _argvec[1] = (unsigned long)(arg1); \
1913 _argvec[2] = (unsigned long)(arg2); \
1914 _argvec[3] = (unsigned long)(arg3); \
1915 _argvec[4] = (unsigned long)(arg4); \
1916 _argvec[5] = (unsigned long)(arg5); \
1917 _argvec[6] = (unsigned long)(arg6); \
1918 _argvec[7] = (unsigned long)(arg7); \
1919 __asm__ volatile( \
1920 VALGRIND_CFI_PROLOGUE \
1921 VALGRIND_ALIGN_STACK \
1922 "subq $136,%%rsp\n\t" \
1923 "pushq 56(%%rax)\n\t" \
1924 "movq 48(%%rax), %%r9\n\t" \
1925 "movq 40(%%rax), %%r8\n\t" \
1926 "movq 32(%%rax), %%rcx\n\t" \
1927 "movq 24(%%rax), %%rdx\n\t" \
1928 "movq 16(%%rax), %%rsi\n\t" \
1929 "movq 8(%%rax), %%rdi\n\t" \
1930 "movq (%%rax), %%rax\n\t" \
1931 VALGRIND_CALL_NOREDIR_RAX \
1932 VALGRIND_RESTORE_STACK \
1933 VALGRIND_CFI_EPILOGUE \
1934 : "=a" (_res) \
1935 : "a" (&_argvec[0]) __FRAME_POINTER \
1936 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1937 ); \
1938 lval = (__typeof__(lval)) _res; \
1939 } while (0)
1940
1941#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1942 arg7,arg8) \
1943 do { \
1944 volatile OrigFn _orig = (orig); \
1945 volatile unsigned long _argvec[9]; \
1946 volatile unsigned long _res; \
1947 _argvec[0] = (unsigned long)_orig.nraddr; \
1948 _argvec[1] = (unsigned long)(arg1); \
1949 _argvec[2] = (unsigned long)(arg2); \
1950 _argvec[3] = (unsigned long)(arg3); \
1951 _argvec[4] = (unsigned long)(arg4); \
1952 _argvec[5] = (unsigned long)(arg5); \
1953 _argvec[6] = (unsigned long)(arg6); \
1954 _argvec[7] = (unsigned long)(arg7); \
1955 _argvec[8] = (unsigned long)(arg8); \
1956 __asm__ volatile( \
1957 VALGRIND_CFI_PROLOGUE \
1958 VALGRIND_ALIGN_STACK \
1959 "subq $128,%%rsp\n\t" \
1960 "pushq 64(%%rax)\n\t" \
1961 "pushq 56(%%rax)\n\t" \
1962 "movq 48(%%rax), %%r9\n\t" \
1963 "movq 40(%%rax), %%r8\n\t" \
1964 "movq 32(%%rax), %%rcx\n\t" \
1965 "movq 24(%%rax), %%rdx\n\t" \
1966 "movq 16(%%rax), %%rsi\n\t" \
1967 "movq 8(%%rax), %%rdi\n\t" \
1968 "movq (%%rax), %%rax\n\t" \
1969 VALGRIND_CALL_NOREDIR_RAX \
1970 VALGRIND_RESTORE_STACK \
1971 VALGRIND_CFI_EPILOGUE \
1972 : "=a" (_res) \
1973 : "a" (&_argvec[0]) __FRAME_POINTER \
1974 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1975 ); \
1976 lval = (__typeof__(lval)) _res; \
1977 } while (0)
1978
1979#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1980 arg7,arg8,arg9) \
1981 do { \
1982 volatile OrigFn _orig = (orig); \
1983 volatile unsigned long _argvec[10]; \
1984 volatile unsigned long _res; \
1985 _argvec[0] = (unsigned long)_orig.nraddr; \
1986 _argvec[1] = (unsigned long)(arg1); \
1987 _argvec[2] = (unsigned long)(arg2); \
1988 _argvec[3] = (unsigned long)(arg3); \
1989 _argvec[4] = (unsigned long)(arg4); \
1990 _argvec[5] = (unsigned long)(arg5); \
1991 _argvec[6] = (unsigned long)(arg6); \
1992 _argvec[7] = (unsigned long)(arg7); \
1993 _argvec[8] = (unsigned long)(arg8); \
1994 _argvec[9] = (unsigned long)(arg9); \
1995 __asm__ volatile( \
1996 VALGRIND_CFI_PROLOGUE \
1997 VALGRIND_ALIGN_STACK \
1998 "subq $136,%%rsp\n\t" \
1999 "pushq 72(%%rax)\n\t" \
2000 "pushq 64(%%rax)\n\t" \
2001 "pushq 56(%%rax)\n\t" \
2002 "movq 48(%%rax), %%r9\n\t" \
2003 "movq 40(%%rax), %%r8\n\t" \
2004 "movq 32(%%rax), %%rcx\n\t" \
2005 "movq 24(%%rax), %%rdx\n\t" \
2006 "movq 16(%%rax), %%rsi\n\t" \
2007 "movq 8(%%rax), %%rdi\n\t" \
2008 "movq (%%rax), %%rax\n\t" \
2009 VALGRIND_CALL_NOREDIR_RAX \
2010 VALGRIND_RESTORE_STACK \
2011 VALGRIND_CFI_EPILOGUE \
2012 : "=a" (_res) \
2013 : "a" (&_argvec[0]) __FRAME_POINTER \
2014 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2015 ); \
2016 lval = (__typeof__(lval)) _res; \
2017 } while (0)
2018
2019#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2020 arg7,arg8,arg9,arg10) \
2021 do { \
2022 volatile OrigFn _orig = (orig); \
2023 volatile unsigned long _argvec[11]; \
2024 volatile unsigned long _res; \
2025 _argvec[0] = (unsigned long)_orig.nraddr; \
2026 _argvec[1] = (unsigned long)(arg1); \
2027 _argvec[2] = (unsigned long)(arg2); \
2028 _argvec[3] = (unsigned long)(arg3); \
2029 _argvec[4] = (unsigned long)(arg4); \
2030 _argvec[5] = (unsigned long)(arg5); \
2031 _argvec[6] = (unsigned long)(arg6); \
2032 _argvec[7] = (unsigned long)(arg7); \
2033 _argvec[8] = (unsigned long)(arg8); \
2034 _argvec[9] = (unsigned long)(arg9); \
2035 _argvec[10] = (unsigned long)(arg10); \
2036 __asm__ volatile( \
2037 VALGRIND_CFI_PROLOGUE \
2038 VALGRIND_ALIGN_STACK \
2039 "subq $128,%%rsp\n\t" \
2040 "pushq 80(%%rax)\n\t" \
2041 "pushq 72(%%rax)\n\t" \
2042 "pushq 64(%%rax)\n\t" \
2043 "pushq 56(%%rax)\n\t" \
2044 "movq 48(%%rax), %%r9\n\t" \
2045 "movq 40(%%rax), %%r8\n\t" \
2046 "movq 32(%%rax), %%rcx\n\t" \
2047 "movq 24(%%rax), %%rdx\n\t" \
2048 "movq 16(%%rax), %%rsi\n\t" \
2049 "movq 8(%%rax), %%rdi\n\t" \
2050 "movq (%%rax), %%rax\n\t" \
2051 VALGRIND_CALL_NOREDIR_RAX \
2052 VALGRIND_RESTORE_STACK \
2053 VALGRIND_CFI_EPILOGUE \
2054 : "=a" (_res) \
2055 : "a" (&_argvec[0]) __FRAME_POINTER \
2056 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2057 ); \
2058 lval = (__typeof__(lval)) _res; \
2059 } while (0)
2060
2061#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2062 arg7,arg8,arg9,arg10,arg11) \
2063 do { \
2064 volatile OrigFn _orig = (orig); \
2065 volatile unsigned long _argvec[12]; \
2066 volatile unsigned long _res; \
2067 _argvec[0] = (unsigned long)_orig.nraddr; \
2068 _argvec[1] = (unsigned long)(arg1); \
2069 _argvec[2] = (unsigned long)(arg2); \
2070 _argvec[3] = (unsigned long)(arg3); \
2071 _argvec[4] = (unsigned long)(arg4); \
2072 _argvec[5] = (unsigned long)(arg5); \
2073 _argvec[6] = (unsigned long)(arg6); \
2074 _argvec[7] = (unsigned long)(arg7); \
2075 _argvec[8] = (unsigned long)(arg8); \
2076 _argvec[9] = (unsigned long)(arg9); \
2077 _argvec[10] = (unsigned long)(arg10); \
2078 _argvec[11] = (unsigned long)(arg11); \
2079 __asm__ volatile( \
2080 VALGRIND_CFI_PROLOGUE \
2081 VALGRIND_ALIGN_STACK \
2082 "subq $136,%%rsp\n\t" \
2083 "pushq 88(%%rax)\n\t" \
2084 "pushq 80(%%rax)\n\t" \
2085 "pushq 72(%%rax)\n\t" \
2086 "pushq 64(%%rax)\n\t" \
2087 "pushq 56(%%rax)\n\t" \
2088 "movq 48(%%rax), %%r9\n\t" \
2089 "movq 40(%%rax), %%r8\n\t" \
2090 "movq 32(%%rax), %%rcx\n\t" \
2091 "movq 24(%%rax), %%rdx\n\t" \
2092 "movq 16(%%rax), %%rsi\n\t" \
2093 "movq 8(%%rax), %%rdi\n\t" \
2094 "movq (%%rax), %%rax\n\t" \
2095 VALGRIND_CALL_NOREDIR_RAX \
2096 VALGRIND_RESTORE_STACK \
2097 VALGRIND_CFI_EPILOGUE \
2098 : "=a" (_res) \
2099 : "a" (&_argvec[0]) __FRAME_POINTER \
2100 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2101 ); \
2102 lval = (__typeof__(lval)) _res; \
2103 } while (0)
2104
2105#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2106 arg7,arg8,arg9,arg10,arg11,arg12) \
2107 do { \
2108 volatile OrigFn _orig = (orig); \
2109 volatile unsigned long _argvec[13]; \
2110 volatile unsigned long _res; \
2111 _argvec[0] = (unsigned long)_orig.nraddr; \
2112 _argvec[1] = (unsigned long)(arg1); \
2113 _argvec[2] = (unsigned long)(arg2); \
2114 _argvec[3] = (unsigned long)(arg3); \
2115 _argvec[4] = (unsigned long)(arg4); \
2116 _argvec[5] = (unsigned long)(arg5); \
2117 _argvec[6] = (unsigned long)(arg6); \
2118 _argvec[7] = (unsigned long)(arg7); \
2119 _argvec[8] = (unsigned long)(arg8); \
2120 _argvec[9] = (unsigned long)(arg9); \
2121 _argvec[10] = (unsigned long)(arg10); \
2122 _argvec[11] = (unsigned long)(arg11); \
2123 _argvec[12] = (unsigned long)(arg12); \
2124 __asm__ volatile( \
2125 VALGRIND_CFI_PROLOGUE \
2126 VALGRIND_ALIGN_STACK \
2127 "subq $128,%%rsp\n\t" \
2128 "pushq 96(%%rax)\n\t" \
2129 "pushq 88(%%rax)\n\t" \
2130 "pushq 80(%%rax)\n\t" \
2131 "pushq 72(%%rax)\n\t" \
2132 "pushq 64(%%rax)\n\t" \
2133 "pushq 56(%%rax)\n\t" \
2134 "movq 48(%%rax), %%r9\n\t" \
2135 "movq 40(%%rax), %%r8\n\t" \
2136 "movq 32(%%rax), %%rcx\n\t" \
2137 "movq 24(%%rax), %%rdx\n\t" \
2138 "movq 16(%%rax), %%rsi\n\t" \
2139 "movq 8(%%rax), %%rdi\n\t" \
2140 "movq (%%rax), %%rax\n\t" \
2141 VALGRIND_CALL_NOREDIR_RAX \
2142 VALGRIND_RESTORE_STACK \
2143 VALGRIND_CFI_EPILOGUE \
2144 : "=a" (_res) \
2145 : "a" (&_argvec[0]) __FRAME_POINTER \
2146 : "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2147 ); \
2148 lval = (__typeof__(lval)) _res; \
2149 } while (0)
2150
2151#endif
2152
2153
2154
2155#if defined(PLAT_ppc32_linux)
2156
2157
2158
2159
2160
2161
2162
2163
2164
2165
2166
2167
2168
2169
2170
2171
2172
2173
2174
2175
2176
2177
2178
2179
2180
2181#define __CALLER_SAVED_REGS \
2182 "lr", "ctr", "xer", \
2183 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2184 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2185 "r11", "r12", "r13"
2186
2187
2188
2189
2190
2191
2192#define VALGRIND_ALIGN_STACK \
2193 "mr 28,1\n\t" \
2194 "rlwinm 1,1,0,0,27\n\t"
2195#define VALGRIND_RESTORE_STACK \
2196 "mr 1,28\n\t"
2197
2198
2199
2200
2201#define CALL_FN_W_v(lval, orig) \
2202 do { \
2203 volatile OrigFn _orig = (orig); \
2204 volatile unsigned long _argvec[1]; \
2205 volatile unsigned long _res; \
2206 _argvec[0] = (unsigned long)_orig.nraddr; \
2207 __asm__ volatile( \
2208 VALGRIND_ALIGN_STACK \
2209 "mr 11,%1\n\t" \
2210 "lwz 11,0(11)\n\t" \
2211 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2212 VALGRIND_RESTORE_STACK \
2213 "mr %0,3" \
2214 : "=r" (_res) \
2215 : "r" (&_argvec[0]) \
2216 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2217 ); \
2218 lval = (__typeof__(lval)) _res; \
2219 } while (0)
2220
2221#define CALL_FN_W_W(lval, orig, arg1) \
2222 do { \
2223 volatile OrigFn _orig = (orig); \
2224 volatile unsigned long _argvec[2]; \
2225 volatile unsigned long _res; \
2226 _argvec[0] = (unsigned long)_orig.nraddr; \
2227 _argvec[1] = (unsigned long)arg1; \
2228 __asm__ volatile( \
2229 VALGRIND_ALIGN_STACK \
2230 "mr 11,%1\n\t" \
2231 "lwz 3,4(11)\n\t" \
2232 "lwz 11,0(11)\n\t" \
2233 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2234 VALGRIND_RESTORE_STACK \
2235 "mr %0,3" \
2236 : "=r" (_res) \
2237 : "r" (&_argvec[0]) \
2238 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2239 ); \
2240 lval = (__typeof__(lval)) _res; \
2241 } while (0)
2242
2243#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2244 do { \
2245 volatile OrigFn _orig = (orig); \
2246 volatile unsigned long _argvec[3]; \
2247 volatile unsigned long _res; \
2248 _argvec[0] = (unsigned long)_orig.nraddr; \
2249 _argvec[1] = (unsigned long)arg1; \
2250 _argvec[2] = (unsigned long)arg2; \
2251 __asm__ volatile( \
2252 VALGRIND_ALIGN_STACK \
2253 "mr 11,%1\n\t" \
2254 "lwz 3,4(11)\n\t" \
2255 "lwz 4,8(11)\n\t" \
2256 "lwz 11,0(11)\n\t" \
2257 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2258 VALGRIND_RESTORE_STACK \
2259 "mr %0,3" \
2260 : "=r" (_res) \
2261 : "r" (&_argvec[0]) \
2262 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2263 ); \
2264 lval = (__typeof__(lval)) _res; \
2265 } while (0)
2266
2267#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2268 do { \
2269 volatile OrigFn _orig = (orig); \
2270 volatile unsigned long _argvec[4]; \
2271 volatile unsigned long _res; \
2272 _argvec[0] = (unsigned long)_orig.nraddr; \
2273 _argvec[1] = (unsigned long)arg1; \
2274 _argvec[2] = (unsigned long)arg2; \
2275 _argvec[3] = (unsigned long)arg3; \
2276 __asm__ volatile( \
2277 VALGRIND_ALIGN_STACK \
2278 "mr 11,%1\n\t" \
2279 "lwz 3,4(11)\n\t" \
2280 "lwz 4,8(11)\n\t" \
2281 "lwz 5,12(11)\n\t" \
2282 "lwz 11,0(11)\n\t" \
2283 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2284 VALGRIND_RESTORE_STACK \
2285 "mr %0,3" \
2286 : "=r" (_res) \
2287 : "r" (&_argvec[0]) \
2288 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2289 ); \
2290 lval = (__typeof__(lval)) _res; \
2291 } while (0)
2292
2293#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2294 do { \
2295 volatile OrigFn _orig = (orig); \
2296 volatile unsigned long _argvec[5]; \
2297 volatile unsigned long _res; \
2298 _argvec[0] = (unsigned long)_orig.nraddr; \
2299 _argvec[1] = (unsigned long)arg1; \
2300 _argvec[2] = (unsigned long)arg2; \
2301 _argvec[3] = (unsigned long)arg3; \
2302 _argvec[4] = (unsigned long)arg4; \
2303 __asm__ volatile( \
2304 VALGRIND_ALIGN_STACK \
2305 "mr 11,%1\n\t" \
2306 "lwz 3,4(11)\n\t" \
2307 "lwz 4,8(11)\n\t" \
2308 "lwz 5,12(11)\n\t" \
2309 "lwz 6,16(11)\n\t" \
2310 "lwz 11,0(11)\n\t" \
2311 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2312 VALGRIND_RESTORE_STACK \
2313 "mr %0,3" \
2314 : "=r" (_res) \
2315 : "r" (&_argvec[0]) \
2316 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2317 ); \
2318 lval = (__typeof__(lval)) _res; \
2319 } while (0)
2320
2321#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2322 do { \
2323 volatile OrigFn _orig = (orig); \
2324 volatile unsigned long _argvec[6]; \
2325 volatile unsigned long _res; \
2326 _argvec[0] = (unsigned long)_orig.nraddr; \
2327 _argvec[1] = (unsigned long)arg1; \
2328 _argvec[2] = (unsigned long)arg2; \
2329 _argvec[3] = (unsigned long)arg3; \
2330 _argvec[4] = (unsigned long)arg4; \
2331 _argvec[5] = (unsigned long)arg5; \
2332 __asm__ volatile( \
2333 VALGRIND_ALIGN_STACK \
2334 "mr 11,%1\n\t" \
2335 "lwz 3,4(11)\n\t" \
2336 "lwz 4,8(11)\n\t" \
2337 "lwz 5,12(11)\n\t" \
2338 "lwz 6,16(11)\n\t" \
2339 "lwz 7,20(11)\n\t" \
2340 "lwz 11,0(11)\n\t" \
2341 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2342 VALGRIND_RESTORE_STACK \
2343 "mr %0,3" \
2344 : "=r" (_res) \
2345 : "r" (&_argvec[0]) \
2346 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2347 ); \
2348 lval = (__typeof__(lval)) _res; \
2349 } while (0)
2350
2351#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2352 do { \
2353 volatile OrigFn _orig = (orig); \
2354 volatile unsigned long _argvec[7]; \
2355 volatile unsigned long _res; \
2356 _argvec[0] = (unsigned long)_orig.nraddr; \
2357 _argvec[1] = (unsigned long)arg1; \
2358 _argvec[2] = (unsigned long)arg2; \
2359 _argvec[3] = (unsigned long)arg3; \
2360 _argvec[4] = (unsigned long)arg4; \
2361 _argvec[5] = (unsigned long)arg5; \
2362 _argvec[6] = (unsigned long)arg6; \
2363 __asm__ volatile( \
2364 VALGRIND_ALIGN_STACK \
2365 "mr 11,%1\n\t" \
2366 "lwz 3,4(11)\n\t" \
2367 "lwz 4,8(11)\n\t" \
2368 "lwz 5,12(11)\n\t" \
2369 "lwz 6,16(11)\n\t" \
2370 "lwz 7,20(11)\n\t" \
2371 "lwz 8,24(11)\n\t" \
2372 "lwz 11,0(11)\n\t" \
2373 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2374 VALGRIND_RESTORE_STACK \
2375 "mr %0,3" \
2376 : "=r" (_res) \
2377 : "r" (&_argvec[0]) \
2378 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2379 ); \
2380 lval = (__typeof__(lval)) _res; \
2381 } while (0)
2382
2383#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2384 arg7) \
2385 do { \
2386 volatile OrigFn _orig = (orig); \
2387 volatile unsigned long _argvec[8]; \
2388 volatile unsigned long _res; \
2389 _argvec[0] = (unsigned long)_orig.nraddr; \
2390 _argvec[1] = (unsigned long)arg1; \
2391 _argvec[2] = (unsigned long)arg2; \
2392 _argvec[3] = (unsigned long)arg3; \
2393 _argvec[4] = (unsigned long)arg4; \
2394 _argvec[5] = (unsigned long)arg5; \
2395 _argvec[6] = (unsigned long)arg6; \
2396 _argvec[7] = (unsigned long)arg7; \
2397 __asm__ volatile( \
2398 VALGRIND_ALIGN_STACK \
2399 "mr 11,%1\n\t" \
2400 "lwz 3,4(11)\n\t" \
2401 "lwz 4,8(11)\n\t" \
2402 "lwz 5,12(11)\n\t" \
2403 "lwz 6,16(11)\n\t" \
2404 "lwz 7,20(11)\n\t" \
2405 "lwz 8,24(11)\n\t" \
2406 "lwz 9,28(11)\n\t" \
2407 "lwz 11,0(11)\n\t" \
2408 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2409 VALGRIND_RESTORE_STACK \
2410 "mr %0,3" \
2411 : "=r" (_res) \
2412 : "r" (&_argvec[0]) \
2413 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2414 ); \
2415 lval = (__typeof__(lval)) _res; \
2416 } while (0)
2417
2418#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2419 arg7,arg8) \
2420 do { \
2421 volatile OrigFn _orig = (orig); \
2422 volatile unsigned long _argvec[9]; \
2423 volatile unsigned long _res; \
2424 _argvec[0] = (unsigned long)_orig.nraddr; \
2425 _argvec[1] = (unsigned long)arg1; \
2426 _argvec[2] = (unsigned long)arg2; \
2427 _argvec[3] = (unsigned long)arg3; \
2428 _argvec[4] = (unsigned long)arg4; \
2429 _argvec[5] = (unsigned long)arg5; \
2430 _argvec[6] = (unsigned long)arg6; \
2431 _argvec[7] = (unsigned long)arg7; \
2432 _argvec[8] = (unsigned long)arg8; \
2433 __asm__ volatile( \
2434 VALGRIND_ALIGN_STACK \
2435 "mr 11,%1\n\t" \
2436 "lwz 3,4(11)\n\t" \
2437 "lwz 4,8(11)\n\t" \
2438 "lwz 5,12(11)\n\t" \
2439 "lwz 6,16(11)\n\t" \
2440 "lwz 7,20(11)\n\t" \
2441 "lwz 8,24(11)\n\t" \
2442 "lwz 9,28(11)\n\t" \
2443 "lwz 10,32(11)\n\t" \
2444 "lwz 11,0(11)\n\t" \
2445 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2446 VALGRIND_RESTORE_STACK \
2447 "mr %0,3" \
2448 : "=r" (_res) \
2449 : "r" (&_argvec[0]) \
2450 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2451 ); \
2452 lval = (__typeof__(lval)) _res; \
2453 } while (0)
2454
2455#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2456 arg7,arg8,arg9) \
2457 do { \
2458 volatile OrigFn _orig = (orig); \
2459 volatile unsigned long _argvec[10]; \
2460 volatile unsigned long _res; \
2461 _argvec[0] = (unsigned long)_orig.nraddr; \
2462 _argvec[1] = (unsigned long)arg1; \
2463 _argvec[2] = (unsigned long)arg2; \
2464 _argvec[3] = (unsigned long)arg3; \
2465 _argvec[4] = (unsigned long)arg4; \
2466 _argvec[5] = (unsigned long)arg5; \
2467 _argvec[6] = (unsigned long)arg6; \
2468 _argvec[7] = (unsigned long)arg7; \
2469 _argvec[8] = (unsigned long)arg8; \
2470 _argvec[9] = (unsigned long)arg9; \
2471 __asm__ volatile( \
2472 VALGRIND_ALIGN_STACK \
2473 "mr 11,%1\n\t" \
2474 "addi 1,1,-16\n\t" \
2475 \
2476 "lwz 3,36(11)\n\t" \
2477 "stw 3,8(1)\n\t" \
2478 \
2479 "lwz 3,4(11)\n\t" \
2480 "lwz 4,8(11)\n\t" \
2481 "lwz 5,12(11)\n\t" \
2482 "lwz 6,16(11)\n\t" \
2483 "lwz 7,20(11)\n\t" \
2484 "lwz 8,24(11)\n\t" \
2485 "lwz 9,28(11)\n\t" \
2486 "lwz 10,32(11)\n\t" \
2487 "lwz 11,0(11)\n\t" \
2488 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2489 VALGRIND_RESTORE_STACK \
2490 "mr %0,3" \
2491 : "=r" (_res) \
2492 : "r" (&_argvec[0]) \
2493 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2494 ); \
2495 lval = (__typeof__(lval)) _res; \
2496 } while (0)
2497
2498#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2499 arg7,arg8,arg9,arg10) \
2500 do { \
2501 volatile OrigFn _orig = (orig); \
2502 volatile unsigned long _argvec[11]; \
2503 volatile unsigned long _res; \
2504 _argvec[0] = (unsigned long)_orig.nraddr; \
2505 _argvec[1] = (unsigned long)arg1; \
2506 _argvec[2] = (unsigned long)arg2; \
2507 _argvec[3] = (unsigned long)arg3; \
2508 _argvec[4] = (unsigned long)arg4; \
2509 _argvec[5] = (unsigned long)arg5; \
2510 _argvec[6] = (unsigned long)arg6; \
2511 _argvec[7] = (unsigned long)arg7; \
2512 _argvec[8] = (unsigned long)arg8; \
2513 _argvec[9] = (unsigned long)arg9; \
2514 _argvec[10] = (unsigned long)arg10; \
2515 __asm__ volatile( \
2516 VALGRIND_ALIGN_STACK \
2517 "mr 11,%1\n\t" \
2518 "addi 1,1,-16\n\t" \
2519 \
2520 "lwz 3,40(11)\n\t" \
2521 "stw 3,12(1)\n\t" \
2522 \
2523 "lwz 3,36(11)\n\t" \
2524 "stw 3,8(1)\n\t" \
2525 \
2526 "lwz 3,4(11)\n\t" \
2527 "lwz 4,8(11)\n\t" \
2528 "lwz 5,12(11)\n\t" \
2529 "lwz 6,16(11)\n\t" \
2530 "lwz 7,20(11)\n\t" \
2531 "lwz 8,24(11)\n\t" \
2532 "lwz 9,28(11)\n\t" \
2533 "lwz 10,32(11)\n\t" \
2534 "lwz 11,0(11)\n\t" \
2535 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2536 VALGRIND_RESTORE_STACK \
2537 "mr %0,3" \
2538 : "=r" (_res) \
2539 : "r" (&_argvec[0]) \
2540 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2541 ); \
2542 lval = (__typeof__(lval)) _res; \
2543 } while (0)
2544
2545#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2546 arg7,arg8,arg9,arg10,arg11) \
2547 do { \
2548 volatile OrigFn _orig = (orig); \
2549 volatile unsigned long _argvec[12]; \
2550 volatile unsigned long _res; \
2551 _argvec[0] = (unsigned long)_orig.nraddr; \
2552 _argvec[1] = (unsigned long)arg1; \
2553 _argvec[2] = (unsigned long)arg2; \
2554 _argvec[3] = (unsigned long)arg3; \
2555 _argvec[4] = (unsigned long)arg4; \
2556 _argvec[5] = (unsigned long)arg5; \
2557 _argvec[6] = (unsigned long)arg6; \
2558 _argvec[7] = (unsigned long)arg7; \
2559 _argvec[8] = (unsigned long)arg8; \
2560 _argvec[9] = (unsigned long)arg9; \
2561 _argvec[10] = (unsigned long)arg10; \
2562 _argvec[11] = (unsigned long)arg11; \
2563 __asm__ volatile( \
2564 VALGRIND_ALIGN_STACK \
2565 "mr 11,%1\n\t" \
2566 "addi 1,1,-32\n\t" \
2567 \
2568 "lwz 3,44(11)\n\t" \
2569 "stw 3,16(1)\n\t" \
2570 \
2571 "lwz 3,40(11)\n\t" \
2572 "stw 3,12(1)\n\t" \
2573 \
2574 "lwz 3,36(11)\n\t" \
2575 "stw 3,8(1)\n\t" \
2576 \
2577 "lwz 3,4(11)\n\t" \
2578 "lwz 4,8(11)\n\t" \
2579 "lwz 5,12(11)\n\t" \
2580 "lwz 6,16(11)\n\t" \
2581 "lwz 7,20(11)\n\t" \
2582 "lwz 8,24(11)\n\t" \
2583 "lwz 9,28(11)\n\t" \
2584 "lwz 10,32(11)\n\t" \
2585 "lwz 11,0(11)\n\t" \
2586 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2587 VALGRIND_RESTORE_STACK \
2588 "mr %0,3" \
2589 : "=r" (_res) \
2590 : "r" (&_argvec[0]) \
2591 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2592 ); \
2593 lval = (__typeof__(lval)) _res; \
2594 } while (0)
2595
2596#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2597 arg7,arg8,arg9,arg10,arg11,arg12) \
2598 do { \
2599 volatile OrigFn _orig = (orig); \
2600 volatile unsigned long _argvec[13]; \
2601 volatile unsigned long _res; \
2602 _argvec[0] = (unsigned long)_orig.nraddr; \
2603 _argvec[1] = (unsigned long)arg1; \
2604 _argvec[2] = (unsigned long)arg2; \
2605 _argvec[3] = (unsigned long)arg3; \
2606 _argvec[4] = (unsigned long)arg4; \
2607 _argvec[5] = (unsigned long)arg5; \
2608 _argvec[6] = (unsigned long)arg6; \
2609 _argvec[7] = (unsigned long)arg7; \
2610 _argvec[8] = (unsigned long)arg8; \
2611 _argvec[9] = (unsigned long)arg9; \
2612 _argvec[10] = (unsigned long)arg10; \
2613 _argvec[11] = (unsigned long)arg11; \
2614 _argvec[12] = (unsigned long)arg12; \
2615 __asm__ volatile( \
2616 VALGRIND_ALIGN_STACK \
2617 "mr 11,%1\n\t" \
2618 "addi 1,1,-32\n\t" \
2619 \
2620 "lwz 3,48(11)\n\t" \
2621 "stw 3,20(1)\n\t" \
2622 \
2623 "lwz 3,44(11)\n\t" \
2624 "stw 3,16(1)\n\t" \
2625 \
2626 "lwz 3,40(11)\n\t" \
2627 "stw 3,12(1)\n\t" \
2628 \
2629 "lwz 3,36(11)\n\t" \
2630 "stw 3,8(1)\n\t" \
2631 \
2632 "lwz 3,4(11)\n\t" \
2633 "lwz 4,8(11)\n\t" \
2634 "lwz 5,12(11)\n\t" \
2635 "lwz 6,16(11)\n\t" \
2636 "lwz 7,20(11)\n\t" \
2637 "lwz 8,24(11)\n\t" \
2638 "lwz 9,28(11)\n\t" \
2639 "lwz 10,32(11)\n\t" \
2640 "lwz 11,0(11)\n\t" \
2641 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2642 VALGRIND_RESTORE_STACK \
2643 "mr %0,3" \
2644 : "=r" (_res) \
2645 : "r" (&_argvec[0]) \
2646 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2647 ); \
2648 lval = (__typeof__(lval)) _res; \
2649 } while (0)
2650
2651#endif
2652
2653
2654
2655#if defined(PLAT_ppc64be_linux)
2656
2657
2658
2659
2660#define __CALLER_SAVED_REGS \
2661 "lr", "ctr", "xer", \
2662 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2663 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2664 "r11", "r12", "r13"
2665
2666
2667
2668
2669
2670
2671#define VALGRIND_ALIGN_STACK \
2672 "mr 28,1\n\t" \
2673 "rldicr 1,1,0,59\n\t"
2674#define VALGRIND_RESTORE_STACK \
2675 "mr 1,28\n\t"
2676
2677
2678
2679
2680#define CALL_FN_W_v(lval, orig) \
2681 do { \
2682 volatile OrigFn _orig = (orig); \
2683 volatile unsigned long _argvec[3+0]; \
2684 volatile unsigned long _res; \
2685 \
2686 _argvec[1] = (unsigned long)_orig.r2; \
2687 _argvec[2] = (unsigned long)_orig.nraddr; \
2688 __asm__ volatile( \
2689 VALGRIND_ALIGN_STACK \
2690 "mr 11,%1\n\t" \
2691 "std 2,-16(11)\n\t" \
2692 "ld 2,-8(11)\n\t" \
2693 "ld 11, 0(11)\n\t" \
2694 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2695 "mr 11,%1\n\t" \
2696 "mr %0,3\n\t" \
2697 "ld 2,-16(11)\n\t" \
2698 VALGRIND_RESTORE_STACK \
2699 : "=r" (_res) \
2700 : "r" (&_argvec[2]) \
2701 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2702 ); \
2703 lval = (__typeof__(lval)) _res; \
2704 } while (0)
2705
2706#define CALL_FN_W_W(lval, orig, arg1) \
2707 do { \
2708 volatile OrigFn _orig = (orig); \
2709 volatile unsigned long _argvec[3+1]; \
2710 volatile unsigned long _res; \
2711 \
2712 _argvec[1] = (unsigned long)_orig.r2; \
2713 _argvec[2] = (unsigned long)_orig.nraddr; \
2714 _argvec[2+1] = (unsigned long)arg1; \
2715 __asm__ volatile( \
2716 VALGRIND_ALIGN_STACK \
2717 "mr 11,%1\n\t" \
2718 "std 2,-16(11)\n\t" \
2719 "ld 2,-8(11)\n\t" \
2720 "ld 3, 8(11)\n\t" \
2721 "ld 11, 0(11)\n\t" \
2722 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2723 "mr 11,%1\n\t" \
2724 "mr %0,3\n\t" \
2725 "ld 2,-16(11)\n\t" \
2726 VALGRIND_RESTORE_STACK \
2727 : "=r" (_res) \
2728 : "r" (&_argvec[2]) \
2729 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2730 ); \
2731 lval = (__typeof__(lval)) _res; \
2732 } while (0)
2733
2734#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2735 do { \
2736 volatile OrigFn _orig = (orig); \
2737 volatile unsigned long _argvec[3+2]; \
2738 volatile unsigned long _res; \
2739 \
2740 _argvec[1] = (unsigned long)_orig.r2; \
2741 _argvec[2] = (unsigned long)_orig.nraddr; \
2742 _argvec[2+1] = (unsigned long)arg1; \
2743 _argvec[2+2] = (unsigned long)arg2; \
2744 __asm__ volatile( \
2745 VALGRIND_ALIGN_STACK \
2746 "mr 11,%1\n\t" \
2747 "std 2,-16(11)\n\t" \
2748 "ld 2,-8(11)\n\t" \
2749 "ld 3, 8(11)\n\t" \
2750 "ld 4, 16(11)\n\t" \
2751 "ld 11, 0(11)\n\t" \
2752 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2753 "mr 11,%1\n\t" \
2754 "mr %0,3\n\t" \
2755 "ld 2,-16(11)\n\t" \
2756 VALGRIND_RESTORE_STACK \
2757 : "=r" (_res) \
2758 : "r" (&_argvec[2]) \
2759 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2760 ); \
2761 lval = (__typeof__(lval)) _res; \
2762 } while (0)
2763
2764#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2765 do { \
2766 volatile OrigFn _orig = (orig); \
2767 volatile unsigned long _argvec[3+3]; \
2768 volatile unsigned long _res; \
2769 \
2770 _argvec[1] = (unsigned long)_orig.r2; \
2771 _argvec[2] = (unsigned long)_orig.nraddr; \
2772 _argvec[2+1] = (unsigned long)arg1; \
2773 _argvec[2+2] = (unsigned long)arg2; \
2774 _argvec[2+3] = (unsigned long)arg3; \
2775 __asm__ volatile( \
2776 VALGRIND_ALIGN_STACK \
2777 "mr 11,%1\n\t" \
2778 "std 2,-16(11)\n\t" \
2779 "ld 2,-8(11)\n\t" \
2780 "ld 3, 8(11)\n\t" \
2781 "ld 4, 16(11)\n\t" \
2782 "ld 5, 24(11)\n\t" \
2783 "ld 11, 0(11)\n\t" \
2784 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2785 "mr 11,%1\n\t" \
2786 "mr %0,3\n\t" \
2787 "ld 2,-16(11)\n\t" \
2788 VALGRIND_RESTORE_STACK \
2789 : "=r" (_res) \
2790 : "r" (&_argvec[2]) \
2791 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2792 ); \
2793 lval = (__typeof__(lval)) _res; \
2794 } while (0)
2795
2796#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2797 do { \
2798 volatile OrigFn _orig = (orig); \
2799 volatile unsigned long _argvec[3+4]; \
2800 volatile unsigned long _res; \
2801 \
2802 _argvec[1] = (unsigned long)_orig.r2; \
2803 _argvec[2] = (unsigned long)_orig.nraddr; \
2804 _argvec[2+1] = (unsigned long)arg1; \
2805 _argvec[2+2] = (unsigned long)arg2; \
2806 _argvec[2+3] = (unsigned long)arg3; \
2807 _argvec[2+4] = (unsigned long)arg4; \
2808 __asm__ volatile( \
2809 VALGRIND_ALIGN_STACK \
2810 "mr 11,%1\n\t" \
2811 "std 2,-16(11)\n\t" \
2812 "ld 2,-8(11)\n\t" \
2813 "ld 3, 8(11)\n\t" \
2814 "ld 4, 16(11)\n\t" \
2815 "ld 5, 24(11)\n\t" \
2816 "ld 6, 32(11)\n\t" \
2817 "ld 11, 0(11)\n\t" \
2818 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2819 "mr 11,%1\n\t" \
2820 "mr %0,3\n\t" \
2821 "ld 2,-16(11)\n\t" \
2822 VALGRIND_RESTORE_STACK \
2823 : "=r" (_res) \
2824 : "r" (&_argvec[2]) \
2825 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2826 ); \
2827 lval = (__typeof__(lval)) _res; \
2828 } while (0)
2829
2830#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2831 do { \
2832 volatile OrigFn _orig = (orig); \
2833 volatile unsigned long _argvec[3+5]; \
2834 volatile unsigned long _res; \
2835 \
2836 _argvec[1] = (unsigned long)_orig.r2; \
2837 _argvec[2] = (unsigned long)_orig.nraddr; \
2838 _argvec[2+1] = (unsigned long)arg1; \
2839 _argvec[2+2] = (unsigned long)arg2; \
2840 _argvec[2+3] = (unsigned long)arg3; \
2841 _argvec[2+4] = (unsigned long)arg4; \
2842 _argvec[2+5] = (unsigned long)arg5; \
2843 __asm__ volatile( \
2844 VALGRIND_ALIGN_STACK \
2845 "mr 11,%1\n\t" \
2846 "std 2,-16(11)\n\t" \
2847 "ld 2,-8(11)\n\t" \
2848 "ld 3, 8(11)\n\t" \
2849 "ld 4, 16(11)\n\t" \
2850 "ld 5, 24(11)\n\t" \
2851 "ld 6, 32(11)\n\t" \
2852 "ld 7, 40(11)\n\t" \
2853 "ld 11, 0(11)\n\t" \
2854 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2855 "mr 11,%1\n\t" \
2856 "mr %0,3\n\t" \
2857 "ld 2,-16(11)\n\t" \
2858 VALGRIND_RESTORE_STACK \
2859 : "=r" (_res) \
2860 : "r" (&_argvec[2]) \
2861 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2862 ); \
2863 lval = (__typeof__(lval)) _res; \
2864 } while (0)
2865
2866#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2867 do { \
2868 volatile OrigFn _orig = (orig); \
2869 volatile unsigned long _argvec[3+6]; \
2870 volatile unsigned long _res; \
2871 \
2872 _argvec[1] = (unsigned long)_orig.r2; \
2873 _argvec[2] = (unsigned long)_orig.nraddr; \
2874 _argvec[2+1] = (unsigned long)arg1; \
2875 _argvec[2+2] = (unsigned long)arg2; \
2876 _argvec[2+3] = (unsigned long)arg3; \
2877 _argvec[2+4] = (unsigned long)arg4; \
2878 _argvec[2+5] = (unsigned long)arg5; \
2879 _argvec[2+6] = (unsigned long)arg6; \
2880 __asm__ volatile( \
2881 VALGRIND_ALIGN_STACK \
2882 "mr 11,%1\n\t" \
2883 "std 2,-16(11)\n\t" \
2884 "ld 2,-8(11)\n\t" \
2885 "ld 3, 8(11)\n\t" \
2886 "ld 4, 16(11)\n\t" \
2887 "ld 5, 24(11)\n\t" \
2888 "ld 6, 32(11)\n\t" \
2889 "ld 7, 40(11)\n\t" \
2890 "ld 8, 48(11)\n\t" \
2891 "ld 11, 0(11)\n\t" \
2892 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2893 "mr 11,%1\n\t" \
2894 "mr %0,3\n\t" \
2895 "ld 2,-16(11)\n\t" \
2896 VALGRIND_RESTORE_STACK \
2897 : "=r" (_res) \
2898 : "r" (&_argvec[2]) \
2899 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2900 ); \
2901 lval = (__typeof__(lval)) _res; \
2902 } while (0)
2903
2904#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2905 arg7) \
2906 do { \
2907 volatile OrigFn _orig = (orig); \
2908 volatile unsigned long _argvec[3+7]; \
2909 volatile unsigned long _res; \
2910 \
2911 _argvec[1] = (unsigned long)_orig.r2; \
2912 _argvec[2] = (unsigned long)_orig.nraddr; \
2913 _argvec[2+1] = (unsigned long)arg1; \
2914 _argvec[2+2] = (unsigned long)arg2; \
2915 _argvec[2+3] = (unsigned long)arg3; \
2916 _argvec[2+4] = (unsigned long)arg4; \
2917 _argvec[2+5] = (unsigned long)arg5; \
2918 _argvec[2+6] = (unsigned long)arg6; \
2919 _argvec[2+7] = (unsigned long)arg7; \
2920 __asm__ volatile( \
2921 VALGRIND_ALIGN_STACK \
2922 "mr 11,%1\n\t" \
2923 "std 2,-16(11)\n\t" \
2924 "ld 2,-8(11)\n\t" \
2925 "ld 3, 8(11)\n\t" \
2926 "ld 4, 16(11)\n\t" \
2927 "ld 5, 24(11)\n\t" \
2928 "ld 6, 32(11)\n\t" \
2929 "ld 7, 40(11)\n\t" \
2930 "ld 8, 48(11)\n\t" \
2931 "ld 9, 56(11)\n\t" \
2932 "ld 11, 0(11)\n\t" \
2933 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2934 "mr 11,%1\n\t" \
2935 "mr %0,3\n\t" \
2936 "ld 2,-16(11)\n\t" \
2937 VALGRIND_RESTORE_STACK \
2938 : "=r" (_res) \
2939 : "r" (&_argvec[2]) \
2940 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2941 ); \
2942 lval = (__typeof__(lval)) _res; \
2943 } while (0)
2944
2945#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2946 arg7,arg8) \
2947 do { \
2948 volatile OrigFn _orig = (orig); \
2949 volatile unsigned long _argvec[3+8]; \
2950 volatile unsigned long _res; \
2951 \
2952 _argvec[1] = (unsigned long)_orig.r2; \
2953 _argvec[2] = (unsigned long)_orig.nraddr; \
2954 _argvec[2+1] = (unsigned long)arg1; \
2955 _argvec[2+2] = (unsigned long)arg2; \
2956 _argvec[2+3] = (unsigned long)arg3; \
2957 _argvec[2+4] = (unsigned long)arg4; \
2958 _argvec[2+5] = (unsigned long)arg5; \
2959 _argvec[2+6] = (unsigned long)arg6; \
2960 _argvec[2+7] = (unsigned long)arg7; \
2961 _argvec[2+8] = (unsigned long)arg8; \
2962 __asm__ volatile( \
2963 VALGRIND_ALIGN_STACK \
2964 "mr 11,%1\n\t" \
2965 "std 2,-16(11)\n\t" \
2966 "ld 2,-8(11)\n\t" \
2967 "ld 3, 8(11)\n\t" \
2968 "ld 4, 16(11)\n\t" \
2969 "ld 5, 24(11)\n\t" \
2970 "ld 6, 32(11)\n\t" \
2971 "ld 7, 40(11)\n\t" \
2972 "ld 8, 48(11)\n\t" \
2973 "ld 9, 56(11)\n\t" \
2974 "ld 10, 64(11)\n\t" \
2975 "ld 11, 0(11)\n\t" \
2976 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2977 "mr 11,%1\n\t" \
2978 "mr %0,3\n\t" \
2979 "ld 2,-16(11)\n\t" \
2980 VALGRIND_RESTORE_STACK \
2981 : "=r" (_res) \
2982 : "r" (&_argvec[2]) \
2983 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2984 ); \
2985 lval = (__typeof__(lval)) _res; \
2986 } while (0)
2987
2988#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2989 arg7,arg8,arg9) \
2990 do { \
2991 volatile OrigFn _orig = (orig); \
2992 volatile unsigned long _argvec[3+9]; \
2993 volatile unsigned long _res; \
2994 \
2995 _argvec[1] = (unsigned long)_orig.r2; \
2996 _argvec[2] = (unsigned long)_orig.nraddr; \
2997 _argvec[2+1] = (unsigned long)arg1; \
2998 _argvec[2+2] = (unsigned long)arg2; \
2999 _argvec[2+3] = (unsigned long)arg3; \
3000 _argvec[2+4] = (unsigned long)arg4; \
3001 _argvec[2+5] = (unsigned long)arg5; \
3002 _argvec[2+6] = (unsigned long)arg6; \
3003 _argvec[2+7] = (unsigned long)arg7; \
3004 _argvec[2+8] = (unsigned long)arg8; \
3005 _argvec[2+9] = (unsigned long)arg9; \
3006 __asm__ volatile( \
3007 VALGRIND_ALIGN_STACK \
3008 "mr 11,%1\n\t" \
3009 "std 2,-16(11)\n\t" \
3010 "ld 2,-8(11)\n\t" \
3011 "addi 1,1,-128\n\t" \
3012 \
3013 "ld 3,72(11)\n\t" \
3014 "std 3,112(1)\n\t" \
3015 \
3016 "ld 3, 8(11)\n\t" \
3017 "ld 4, 16(11)\n\t" \
3018 "ld 5, 24(11)\n\t" \
3019 "ld 6, 32(11)\n\t" \
3020 "ld 7, 40(11)\n\t" \
3021 "ld 8, 48(11)\n\t" \
3022 "ld 9, 56(11)\n\t" \
3023 "ld 10, 64(11)\n\t" \
3024 "ld 11, 0(11)\n\t" \
3025 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3026 "mr 11,%1\n\t" \
3027 "mr %0,3\n\t" \
3028 "ld 2,-16(11)\n\t" \
3029 VALGRIND_RESTORE_STACK \
3030 : "=r" (_res) \
3031 : "r" (&_argvec[2]) \
3032 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3033 ); \
3034 lval = (__typeof__(lval)) _res; \
3035 } while (0)
3036
3037#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3038 arg7,arg8,arg9,arg10) \
3039 do { \
3040 volatile OrigFn _orig = (orig); \
3041 volatile unsigned long _argvec[3+10]; \
3042 volatile unsigned long _res; \
3043 \
3044 _argvec[1] = (unsigned long)_orig.r2; \
3045 _argvec[2] = (unsigned long)_orig.nraddr; \
3046 _argvec[2+1] = (unsigned long)arg1; \
3047 _argvec[2+2] = (unsigned long)arg2; \
3048 _argvec[2+3] = (unsigned long)arg3; \
3049 _argvec[2+4] = (unsigned long)arg4; \
3050 _argvec[2+5] = (unsigned long)arg5; \
3051 _argvec[2+6] = (unsigned long)arg6; \
3052 _argvec[2+7] = (unsigned long)arg7; \
3053 _argvec[2+8] = (unsigned long)arg8; \
3054 _argvec[2+9] = (unsigned long)arg9; \
3055 _argvec[2+10] = (unsigned long)arg10; \
3056 __asm__ volatile( \
3057 VALGRIND_ALIGN_STACK \
3058 "mr 11,%1\n\t" \
3059 "std 2,-16(11)\n\t" \
3060 "ld 2,-8(11)\n\t" \
3061 "addi 1,1,-128\n\t" \
3062 \
3063 "ld 3,80(11)\n\t" \
3064 "std 3,120(1)\n\t" \
3065 \
3066 "ld 3,72(11)\n\t" \
3067 "std 3,112(1)\n\t" \
3068 \
3069 "ld 3, 8(11)\n\t" \
3070 "ld 4, 16(11)\n\t" \
3071 "ld 5, 24(11)\n\t" \
3072 "ld 6, 32(11)\n\t" \
3073 "ld 7, 40(11)\n\t" \
3074 "ld 8, 48(11)\n\t" \
3075 "ld 9, 56(11)\n\t" \
3076 "ld 10, 64(11)\n\t" \
3077 "ld 11, 0(11)\n\t" \
3078 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3079 "mr 11,%1\n\t" \
3080 "mr %0,3\n\t" \
3081 "ld 2,-16(11)\n\t" \
3082 VALGRIND_RESTORE_STACK \
3083 : "=r" (_res) \
3084 : "r" (&_argvec[2]) \
3085 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3086 ); \
3087 lval = (__typeof__(lval)) _res; \
3088 } while (0)
3089
3090#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3091 arg7,arg8,arg9,arg10,arg11) \
3092 do { \
3093 volatile OrigFn _orig = (orig); \
3094 volatile unsigned long _argvec[3+11]; \
3095 volatile unsigned long _res; \
3096 \
3097 _argvec[1] = (unsigned long)_orig.r2; \
3098 _argvec[2] = (unsigned long)_orig.nraddr; \
3099 _argvec[2+1] = (unsigned long)arg1; \
3100 _argvec[2+2] = (unsigned long)arg2; \
3101 _argvec[2+3] = (unsigned long)arg3; \
3102 _argvec[2+4] = (unsigned long)arg4; \
3103 _argvec[2+5] = (unsigned long)arg5; \
3104 _argvec[2+6] = (unsigned long)arg6; \
3105 _argvec[2+7] = (unsigned long)arg7; \
3106 _argvec[2+8] = (unsigned long)arg8; \
3107 _argvec[2+9] = (unsigned long)arg9; \
3108 _argvec[2+10] = (unsigned long)arg10; \
3109 _argvec[2+11] = (unsigned long)arg11; \
3110 __asm__ volatile( \
3111 VALGRIND_ALIGN_STACK \
3112 "mr 11,%1\n\t" \
3113 "std 2,-16(11)\n\t" \
3114 "ld 2,-8(11)\n\t" \
3115 "addi 1,1,-144\n\t" \
3116 \
3117 "ld 3,88(11)\n\t" \
3118 "std 3,128(1)\n\t" \
3119 \
3120 "ld 3,80(11)\n\t" \
3121 "std 3,120(1)\n\t" \
3122 \
3123 "ld 3,72(11)\n\t" \
3124 "std 3,112(1)\n\t" \
3125 \
3126 "ld 3, 8(11)\n\t" \
3127 "ld 4, 16(11)\n\t" \
3128 "ld 5, 24(11)\n\t" \
3129 "ld 6, 32(11)\n\t" \
3130 "ld 7, 40(11)\n\t" \
3131 "ld 8, 48(11)\n\t" \
3132 "ld 9, 56(11)\n\t" \
3133 "ld 10, 64(11)\n\t" \
3134 "ld 11, 0(11)\n\t" \
3135 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3136 "mr 11,%1\n\t" \
3137 "mr %0,3\n\t" \
3138 "ld 2,-16(11)\n\t" \
3139 VALGRIND_RESTORE_STACK \
3140 : "=r" (_res) \
3141 : "r" (&_argvec[2]) \
3142 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3143 ); \
3144 lval = (__typeof__(lval)) _res; \
3145 } while (0)
3146
3147#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3148 arg7,arg8,arg9,arg10,arg11,arg12) \
3149 do { \
3150 volatile OrigFn _orig = (orig); \
3151 volatile unsigned long _argvec[3+12]; \
3152 volatile unsigned long _res; \
3153 \
3154 _argvec[1] = (unsigned long)_orig.r2; \
3155 _argvec[2] = (unsigned long)_orig.nraddr; \
3156 _argvec[2+1] = (unsigned long)arg1; \
3157 _argvec[2+2] = (unsigned long)arg2; \
3158 _argvec[2+3] = (unsigned long)arg3; \
3159 _argvec[2+4] = (unsigned long)arg4; \
3160 _argvec[2+5] = (unsigned long)arg5; \
3161 _argvec[2+6] = (unsigned long)arg6; \
3162 _argvec[2+7] = (unsigned long)arg7; \
3163 _argvec[2+8] = (unsigned long)arg8; \
3164 _argvec[2+9] = (unsigned long)arg9; \
3165 _argvec[2+10] = (unsigned long)arg10; \
3166 _argvec[2+11] = (unsigned long)arg11; \
3167 _argvec[2+12] = (unsigned long)arg12; \
3168 __asm__ volatile( \
3169 VALGRIND_ALIGN_STACK \
3170 "mr 11,%1\n\t" \
3171 "std 2,-16(11)\n\t" \
3172 "ld 2,-8(11)\n\t" \
3173 "addi 1,1,-144\n\t" \
3174 \
3175 "ld 3,96(11)\n\t" \
3176 "std 3,136(1)\n\t" \
3177 \
3178 "ld 3,88(11)\n\t" \
3179 "std 3,128(1)\n\t" \
3180 \
3181 "ld 3,80(11)\n\t" \
3182 "std 3,120(1)\n\t" \
3183 \
3184 "ld 3,72(11)\n\t" \
3185 "std 3,112(1)\n\t" \
3186 \
3187 "ld 3, 8(11)\n\t" \
3188 "ld 4, 16(11)\n\t" \
3189 "ld 5, 24(11)\n\t" \
3190 "ld 6, 32(11)\n\t" \
3191 "ld 7, 40(11)\n\t" \
3192 "ld 8, 48(11)\n\t" \
3193 "ld 9, 56(11)\n\t" \
3194 "ld 10, 64(11)\n\t" \
3195 "ld 11, 0(11)\n\t" \
3196 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3197 "mr 11,%1\n\t" \
3198 "mr %0,3\n\t" \
3199 "ld 2,-16(11)\n\t" \
3200 VALGRIND_RESTORE_STACK \
3201 : "=r" (_res) \
3202 : "r" (&_argvec[2]) \
3203 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3204 ); \
3205 lval = (__typeof__(lval)) _res; \
3206 } while (0)
3207
3208#endif
3209
3210
3211#if defined(PLAT_ppc64le_linux)
3212
3213
3214
3215
3216#define __CALLER_SAVED_REGS \
3217 "lr", "ctr", "xer", \
3218 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
3219 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
3220 "r11", "r12", "r13"
3221
3222
3223
3224
3225
3226
3227#define VALGRIND_ALIGN_STACK \
3228 "mr 28,1\n\t" \
3229 "rldicr 1,1,0,59\n\t"
3230#define VALGRIND_RESTORE_STACK \
3231 "mr 1,28\n\t"
3232
3233
3234
3235
3236#define CALL_FN_W_v(lval, orig) \
3237 do { \
3238 volatile OrigFn _orig = (orig); \
3239 volatile unsigned long _argvec[3+0]; \
3240 volatile unsigned long _res; \
3241 \
3242 _argvec[1] = (unsigned long)_orig.r2; \
3243 _argvec[2] = (unsigned long)_orig.nraddr; \
3244 __asm__ volatile( \
3245 VALGRIND_ALIGN_STACK \
3246 "mr 12,%1\n\t" \
3247 "std 2,-16(12)\n\t" \
3248 "ld 2,-8(12)\n\t" \
3249 "ld 12, 0(12)\n\t" \
3250 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3251 "mr 12,%1\n\t" \
3252 "mr %0,3\n\t" \
3253 "ld 2,-16(12)\n\t" \
3254 VALGRIND_RESTORE_STACK \
3255 : "=r" (_res) \
3256 : "r" (&_argvec[2]) \
3257 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3258 ); \
3259 lval = (__typeof__(lval)) _res; \
3260 } while (0)
3261
3262#define CALL_FN_W_W(lval, orig, arg1) \
3263 do { \
3264 volatile OrigFn _orig = (orig); \
3265 volatile unsigned long _argvec[3+1]; \
3266 volatile unsigned long _res; \
3267 \
3268 _argvec[1] = (unsigned long)_orig.r2; \
3269 _argvec[2] = (unsigned long)_orig.nraddr; \
3270 _argvec[2+1] = (unsigned long)arg1; \
3271 __asm__ volatile( \
3272 VALGRIND_ALIGN_STACK \
3273 "mr 12,%1\n\t" \
3274 "std 2,-16(12)\n\t" \
3275 "ld 2,-8(12)\n\t" \
3276 "ld 3, 8(12)\n\t" \
3277 "ld 12, 0(12)\n\t" \
3278 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3279 "mr 12,%1\n\t" \
3280 "mr %0,3\n\t" \
3281 "ld 2,-16(12)\n\t" \
3282 VALGRIND_RESTORE_STACK \
3283 : "=r" (_res) \
3284 : "r" (&_argvec[2]) \
3285 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3286 ); \
3287 lval = (__typeof__(lval)) _res; \
3288 } while (0)
3289
3290#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3291 do { \
3292 volatile OrigFn _orig = (orig); \
3293 volatile unsigned long _argvec[3+2]; \
3294 volatile unsigned long _res; \
3295 \
3296 _argvec[1] = (unsigned long)_orig.r2; \
3297 _argvec[2] = (unsigned long)_orig.nraddr; \
3298 _argvec[2+1] = (unsigned long)arg1; \
3299 _argvec[2+2] = (unsigned long)arg2; \
3300 __asm__ volatile( \
3301 VALGRIND_ALIGN_STACK \
3302 "mr 12,%1\n\t" \
3303 "std 2,-16(12)\n\t" \
3304 "ld 2,-8(12)\n\t" \
3305 "ld 3, 8(12)\n\t" \
3306 "ld 4, 16(12)\n\t" \
3307 "ld 12, 0(12)\n\t" \
3308 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3309 "mr 12,%1\n\t" \
3310 "mr %0,3\n\t" \
3311 "ld 2,-16(12)\n\t" \
3312 VALGRIND_RESTORE_STACK \
3313 : "=r" (_res) \
3314 : "r" (&_argvec[2]) \
3315 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3316 ); \
3317 lval = (__typeof__(lval)) _res; \
3318 } while (0)
3319
3320#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3321 do { \
3322 volatile OrigFn _orig = (orig); \
3323 volatile unsigned long _argvec[3+3]; \
3324 volatile unsigned long _res; \
3325 \
3326 _argvec[1] = (unsigned long)_orig.r2; \
3327 _argvec[2] = (unsigned long)_orig.nraddr; \
3328 _argvec[2+1] = (unsigned long)arg1; \
3329 _argvec[2+2] = (unsigned long)arg2; \
3330 _argvec[2+3] = (unsigned long)arg3; \
3331 __asm__ volatile( \
3332 VALGRIND_ALIGN_STACK \
3333 "mr 12,%1\n\t" \
3334 "std 2,-16(12)\n\t" \
3335 "ld 2,-8(12)\n\t" \
3336 "ld 3, 8(12)\n\t" \
3337 "ld 4, 16(12)\n\t" \
3338 "ld 5, 24(12)\n\t" \
3339 "ld 12, 0(12)\n\t" \
3340 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3341 "mr 12,%1\n\t" \
3342 "mr %0,3\n\t" \
3343 "ld 2,-16(12)\n\t" \
3344 VALGRIND_RESTORE_STACK \
3345 : "=r" (_res) \
3346 : "r" (&_argvec[2]) \
3347 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3348 ); \
3349 lval = (__typeof__(lval)) _res; \
3350 } while (0)
3351
3352#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3353 do { \
3354 volatile OrigFn _orig = (orig); \
3355 volatile unsigned long _argvec[3+4]; \
3356 volatile unsigned long _res; \
3357 \
3358 _argvec[1] = (unsigned long)_orig.r2; \
3359 _argvec[2] = (unsigned long)_orig.nraddr; \
3360 _argvec[2+1] = (unsigned long)arg1; \
3361 _argvec[2+2] = (unsigned long)arg2; \
3362 _argvec[2+3] = (unsigned long)arg3; \
3363 _argvec[2+4] = (unsigned long)arg4; \
3364 __asm__ volatile( \
3365 VALGRIND_ALIGN_STACK \
3366 "mr 12,%1\n\t" \
3367 "std 2,-16(12)\n\t" \
3368 "ld 2,-8(12)\n\t" \
3369 "ld 3, 8(12)\n\t" \
3370 "ld 4, 16(12)\n\t" \
3371 "ld 5, 24(12)\n\t" \
3372 "ld 6, 32(12)\n\t" \
3373 "ld 12, 0(12)\n\t" \
3374 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3375 "mr 12,%1\n\t" \
3376 "mr %0,3\n\t" \
3377 "ld 2,-16(12)\n\t" \
3378 VALGRIND_RESTORE_STACK \
3379 : "=r" (_res) \
3380 : "r" (&_argvec[2]) \
3381 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3382 ); \
3383 lval = (__typeof__(lval)) _res; \
3384 } while (0)
3385
3386#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3387 do { \
3388 volatile OrigFn _orig = (orig); \
3389 volatile unsigned long _argvec[3+5]; \
3390 volatile unsigned long _res; \
3391 \
3392 _argvec[1] = (unsigned long)_orig.r2; \
3393 _argvec[2] = (unsigned long)_orig.nraddr; \
3394 _argvec[2+1] = (unsigned long)arg1; \
3395 _argvec[2+2] = (unsigned long)arg2; \
3396 _argvec[2+3] = (unsigned long)arg3; \
3397 _argvec[2+4] = (unsigned long)arg4; \
3398 _argvec[2+5] = (unsigned long)arg5; \
3399 __asm__ volatile( \
3400 VALGRIND_ALIGN_STACK \
3401 "mr 12,%1\n\t" \
3402 "std 2,-16(12)\n\t" \
3403 "ld 2,-8(12)\n\t" \
3404 "ld 3, 8(12)\n\t" \
3405 "ld 4, 16(12)\n\t" \
3406 "ld 5, 24(12)\n\t" \
3407 "ld 6, 32(12)\n\t" \
3408 "ld 7, 40(12)\n\t" \
3409 "ld 12, 0(12)\n\t" \
3410 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3411 "mr 12,%1\n\t" \
3412 "mr %0,3\n\t" \
3413 "ld 2,-16(12)\n\t" \
3414 VALGRIND_RESTORE_STACK \
3415 : "=r" (_res) \
3416 : "r" (&_argvec[2]) \
3417 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3418 ); \
3419 lval = (__typeof__(lval)) _res; \
3420 } while (0)
3421
3422#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3423 do { \
3424 volatile OrigFn _orig = (orig); \
3425 volatile unsigned long _argvec[3+6]; \
3426 volatile unsigned long _res; \
3427 \
3428 _argvec[1] = (unsigned long)_orig.r2; \
3429 _argvec[2] = (unsigned long)_orig.nraddr; \
3430 _argvec[2+1] = (unsigned long)arg1; \
3431 _argvec[2+2] = (unsigned long)arg2; \
3432 _argvec[2+3] = (unsigned long)arg3; \
3433 _argvec[2+4] = (unsigned long)arg4; \
3434 _argvec[2+5] = (unsigned long)arg5; \
3435 _argvec[2+6] = (unsigned long)arg6; \
3436 __asm__ volatile( \
3437 VALGRIND_ALIGN_STACK \
3438 "mr 12,%1\n\t" \
3439 "std 2,-16(12)\n\t" \
3440 "ld 2,-8(12)\n\t" \
3441 "ld 3, 8(12)\n\t" \
3442 "ld 4, 16(12)\n\t" \
3443 "ld 5, 24(12)\n\t" \
3444 "ld 6, 32(12)\n\t" \
3445 "ld 7, 40(12)\n\t" \
3446 "ld 8, 48(12)\n\t" \
3447 "ld 12, 0(12)\n\t" \
3448 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3449 "mr 12,%1\n\t" \
3450 "mr %0,3\n\t" \
3451 "ld 2,-16(12)\n\t" \
3452 VALGRIND_RESTORE_STACK \
3453 : "=r" (_res) \
3454 : "r" (&_argvec[2]) \
3455 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3456 ); \
3457 lval = (__typeof__(lval)) _res; \
3458 } while (0)
3459
3460#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3461 arg7) \
3462 do { \
3463 volatile OrigFn _orig = (orig); \
3464 volatile unsigned long _argvec[3+7]; \
3465 volatile unsigned long _res; \
3466 \
3467 _argvec[1] = (unsigned long)_orig.r2; \
3468 _argvec[2] = (unsigned long)_orig.nraddr; \
3469 _argvec[2+1] = (unsigned long)arg1; \
3470 _argvec[2+2] = (unsigned long)arg2; \
3471 _argvec[2+3] = (unsigned long)arg3; \
3472 _argvec[2+4] = (unsigned long)arg4; \
3473 _argvec[2+5] = (unsigned long)arg5; \
3474 _argvec[2+6] = (unsigned long)arg6; \
3475 _argvec[2+7] = (unsigned long)arg7; \
3476 __asm__ volatile( \
3477 VALGRIND_ALIGN_STACK \
3478 "mr 12,%1\n\t" \
3479 "std 2,-16(12)\n\t" \
3480 "ld 2,-8(12)\n\t" \
3481 "ld 3, 8(12)\n\t" \
3482 "ld 4, 16(12)\n\t" \
3483 "ld 5, 24(12)\n\t" \
3484 "ld 6, 32(12)\n\t" \
3485 "ld 7, 40(12)\n\t" \
3486 "ld 8, 48(12)\n\t" \
3487 "ld 9, 56(12)\n\t" \
3488 "ld 12, 0(12)\n\t" \
3489 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3490 "mr 12,%1\n\t" \
3491 "mr %0,3\n\t" \
3492 "ld 2,-16(12)\n\t" \
3493 VALGRIND_RESTORE_STACK \
3494 : "=r" (_res) \
3495 : "r" (&_argvec[2]) \
3496 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3497 ); \
3498 lval = (__typeof__(lval)) _res; \
3499 } while (0)
3500
3501#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3502 arg7,arg8) \
3503 do { \
3504 volatile OrigFn _orig = (orig); \
3505 volatile unsigned long _argvec[3+8]; \
3506 volatile unsigned long _res; \
3507 \
3508 _argvec[1] = (unsigned long)_orig.r2; \
3509 _argvec[2] = (unsigned long)_orig.nraddr; \
3510 _argvec[2+1] = (unsigned long)arg1; \
3511 _argvec[2+2] = (unsigned long)arg2; \
3512 _argvec[2+3] = (unsigned long)arg3; \
3513 _argvec[2+4] = (unsigned long)arg4; \
3514 _argvec[2+5] = (unsigned long)arg5; \
3515 _argvec[2+6] = (unsigned long)arg6; \
3516 _argvec[2+7] = (unsigned long)arg7; \
3517 _argvec[2+8] = (unsigned long)arg8; \
3518 __asm__ volatile( \
3519 VALGRIND_ALIGN_STACK \
3520 "mr 12,%1\n\t" \
3521 "std 2,-16(12)\n\t" \
3522 "ld 2,-8(12)\n\t" \
3523 "ld 3, 8(12)\n\t" \
3524 "ld 4, 16(12)\n\t" \
3525 "ld 5, 24(12)\n\t" \
3526 "ld 6, 32(12)\n\t" \
3527 "ld 7, 40(12)\n\t" \
3528 "ld 8, 48(12)\n\t" \
3529 "ld 9, 56(12)\n\t" \
3530 "ld 10, 64(12)\n\t" \
3531 "ld 12, 0(12)\n\t" \
3532 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3533 "mr 12,%1\n\t" \
3534 "mr %0,3\n\t" \
3535 "ld 2,-16(12)\n\t" \
3536 VALGRIND_RESTORE_STACK \
3537 : "=r" (_res) \
3538 : "r" (&_argvec[2]) \
3539 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3540 ); \
3541 lval = (__typeof__(lval)) _res; \
3542 } while (0)
3543
3544#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3545 arg7,arg8,arg9) \
3546 do { \
3547 volatile OrigFn _orig = (orig); \
3548 volatile unsigned long _argvec[3+9]; \
3549 volatile unsigned long _res; \
3550 \
3551 _argvec[1] = (unsigned long)_orig.r2; \
3552 _argvec[2] = (unsigned long)_orig.nraddr; \
3553 _argvec[2+1] = (unsigned long)arg1; \
3554 _argvec[2+2] = (unsigned long)arg2; \
3555 _argvec[2+3] = (unsigned long)arg3; \
3556 _argvec[2+4] = (unsigned long)arg4; \
3557 _argvec[2+5] = (unsigned long)arg5; \
3558 _argvec[2+6] = (unsigned long)arg6; \
3559 _argvec[2+7] = (unsigned long)arg7; \
3560 _argvec[2+8] = (unsigned long)arg8; \
3561 _argvec[2+9] = (unsigned long)arg9; \
3562 __asm__ volatile( \
3563 VALGRIND_ALIGN_STACK \
3564 "mr 12,%1\n\t" \
3565 "std 2,-16(12)\n\t" \
3566 "ld 2,-8(12)\n\t" \
3567 "addi 1,1,-128\n\t" \
3568 \
3569 "ld 3,72(12)\n\t" \
3570 "std 3,96(1)\n\t" \
3571 \
3572 "ld 3, 8(12)\n\t" \
3573 "ld 4, 16(12)\n\t" \
3574 "ld 5, 24(12)\n\t" \
3575 "ld 6, 32(12)\n\t" \
3576 "ld 7, 40(12)\n\t" \
3577 "ld 8, 48(12)\n\t" \
3578 "ld 9, 56(12)\n\t" \
3579 "ld 10, 64(12)\n\t" \
3580 "ld 12, 0(12)\n\t" \
3581 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3582 "mr 12,%1\n\t" \
3583 "mr %0,3\n\t" \
3584 "ld 2,-16(12)\n\t" \
3585 VALGRIND_RESTORE_STACK \
3586 : "=r" (_res) \
3587 : "r" (&_argvec[2]) \
3588 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3589 ); \
3590 lval = (__typeof__(lval)) _res; \
3591 } while (0)
3592
3593#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3594 arg7,arg8,arg9,arg10) \
3595 do { \
3596 volatile OrigFn _orig = (orig); \
3597 volatile unsigned long _argvec[3+10]; \
3598 volatile unsigned long _res; \
3599 \
3600 _argvec[1] = (unsigned long)_orig.r2; \
3601 _argvec[2] = (unsigned long)_orig.nraddr; \
3602 _argvec[2+1] = (unsigned long)arg1; \
3603 _argvec[2+2] = (unsigned long)arg2; \
3604 _argvec[2+3] = (unsigned long)arg3; \
3605 _argvec[2+4] = (unsigned long)arg4; \
3606 _argvec[2+5] = (unsigned long)arg5; \
3607 _argvec[2+6] = (unsigned long)arg6; \
3608 _argvec[2+7] = (unsigned long)arg7; \
3609 _argvec[2+8] = (unsigned long)arg8; \
3610 _argvec[2+9] = (unsigned long)arg9; \
3611 _argvec[2+10] = (unsigned long)arg10; \
3612 __asm__ volatile( \
3613 VALGRIND_ALIGN_STACK \
3614 "mr 12,%1\n\t" \
3615 "std 2,-16(12)\n\t" \
3616 "ld 2,-8(12)\n\t" \
3617 "addi 1,1,-128\n\t" \
3618 \
3619 "ld 3,80(12)\n\t" \
3620 "std 3,104(1)\n\t" \
3621 \
3622 "ld 3,72(12)\n\t" \
3623 "std 3,96(1)\n\t" \
3624 \
3625 "ld 3, 8(12)\n\t" \
3626 "ld 4, 16(12)\n\t" \
3627 "ld 5, 24(12)\n\t" \
3628 "ld 6, 32(12)\n\t" \
3629 "ld 7, 40(12)\n\t" \
3630 "ld 8, 48(12)\n\t" \
3631 "ld 9, 56(12)\n\t" \
3632 "ld 10, 64(12)\n\t" \
3633 "ld 12, 0(12)\n\t" \
3634 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3635 "mr 12,%1\n\t" \
3636 "mr %0,3\n\t" \
3637 "ld 2,-16(12)\n\t" \
3638 VALGRIND_RESTORE_STACK \
3639 : "=r" (_res) \
3640 : "r" (&_argvec[2]) \
3641 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3642 ); \
3643 lval = (__typeof__(lval)) _res; \
3644 } while (0)
3645
3646#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3647 arg7,arg8,arg9,arg10,arg11) \
3648 do { \
3649 volatile OrigFn _orig = (orig); \
3650 volatile unsigned long _argvec[3+11]; \
3651 volatile unsigned long _res; \
3652 \
3653 _argvec[1] = (unsigned long)_orig.r2; \
3654 _argvec[2] = (unsigned long)_orig.nraddr; \
3655 _argvec[2+1] = (unsigned long)arg1; \
3656 _argvec[2+2] = (unsigned long)arg2; \
3657 _argvec[2+3] = (unsigned long)arg3; \
3658 _argvec[2+4] = (unsigned long)arg4; \
3659 _argvec[2+5] = (unsigned long)arg5; \
3660 _argvec[2+6] = (unsigned long)arg6; \
3661 _argvec[2+7] = (unsigned long)arg7; \
3662 _argvec[2+8] = (unsigned long)arg8; \
3663 _argvec[2+9] = (unsigned long)arg9; \
3664 _argvec[2+10] = (unsigned long)arg10; \
3665 _argvec[2+11] = (unsigned long)arg11; \
3666 __asm__ volatile( \
3667 VALGRIND_ALIGN_STACK \
3668 "mr 12,%1\n\t" \
3669 "std 2,-16(12)\n\t" \
3670 "ld 2,-8(12)\n\t" \
3671 "addi 1,1,-144\n\t" \
3672 \
3673 "ld 3,88(12)\n\t" \
3674 "std 3,112(1)\n\t" \
3675 \
3676 "ld 3,80(12)\n\t" \
3677 "std 3,104(1)\n\t" \
3678 \
3679 "ld 3,72(12)\n\t" \
3680 "std 3,96(1)\n\t" \
3681 \
3682 "ld 3, 8(12)\n\t" \
3683 "ld 4, 16(12)\n\t" \
3684 "ld 5, 24(12)\n\t" \
3685 "ld 6, 32(12)\n\t" \
3686 "ld 7, 40(12)\n\t" \
3687 "ld 8, 48(12)\n\t" \
3688 "ld 9, 56(12)\n\t" \
3689 "ld 10, 64(12)\n\t" \
3690 "ld 12, 0(12)\n\t" \
3691 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3692 "mr 12,%1\n\t" \
3693 "mr %0,3\n\t" \
3694 "ld 2,-16(12)\n\t" \
3695 VALGRIND_RESTORE_STACK \
3696 : "=r" (_res) \
3697 : "r" (&_argvec[2]) \
3698 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3699 ); \
3700 lval = (__typeof__(lval)) _res; \
3701 } while (0)
3702
3703#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3704 arg7,arg8,arg9,arg10,arg11,arg12) \
3705 do { \
3706 volatile OrigFn _orig = (orig); \
3707 volatile unsigned long _argvec[3+12]; \
3708 volatile unsigned long _res; \
3709 \
3710 _argvec[1] = (unsigned long)_orig.r2; \
3711 _argvec[2] = (unsigned long)_orig.nraddr; \
3712 _argvec[2+1] = (unsigned long)arg1; \
3713 _argvec[2+2] = (unsigned long)arg2; \
3714 _argvec[2+3] = (unsigned long)arg3; \
3715 _argvec[2+4] = (unsigned long)arg4; \
3716 _argvec[2+5] = (unsigned long)arg5; \
3717 _argvec[2+6] = (unsigned long)arg6; \
3718 _argvec[2+7] = (unsigned long)arg7; \
3719 _argvec[2+8] = (unsigned long)arg8; \
3720 _argvec[2+9] = (unsigned long)arg9; \
3721 _argvec[2+10] = (unsigned long)arg10; \
3722 _argvec[2+11] = (unsigned long)arg11; \
3723 _argvec[2+12] = (unsigned long)arg12; \
3724 __asm__ volatile( \
3725 VALGRIND_ALIGN_STACK \
3726 "mr 12,%1\n\t" \
3727 "std 2,-16(12)\n\t" \
3728 "ld 2,-8(12)\n\t" \
3729 "addi 1,1,-144\n\t" \
3730 \
3731 "ld 3,96(12)\n\t" \
3732 "std 3,120(1)\n\t" \
3733 \
3734 "ld 3,88(12)\n\t" \
3735 "std 3,112(1)\n\t" \
3736 \
3737 "ld 3,80(12)\n\t" \
3738 "std 3,104(1)\n\t" \
3739 \
3740 "ld 3,72(12)\n\t" \
3741 "std 3,96(1)\n\t" \
3742 \
3743 "ld 3, 8(12)\n\t" \
3744 "ld 4, 16(12)\n\t" \
3745 "ld 5, 24(12)\n\t" \
3746 "ld 6, 32(12)\n\t" \
3747 "ld 7, 40(12)\n\t" \
3748 "ld 8, 48(12)\n\t" \
3749 "ld 9, 56(12)\n\t" \
3750 "ld 10, 64(12)\n\t" \
3751 "ld 12, 0(12)\n\t" \
3752 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3753 "mr 12,%1\n\t" \
3754 "mr %0,3\n\t" \
3755 "ld 2,-16(12)\n\t" \
3756 VALGRIND_RESTORE_STACK \
3757 : "=r" (_res) \
3758 : "r" (&_argvec[2]) \
3759 : "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3760 ); \
3761 lval = (__typeof__(lval)) _res; \
3762 } while (0)
3763
3764#endif
3765
3766
3767
3768#if defined(PLAT_arm_linux)
3769
3770
3771#define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4", "r12", "r14"
3772
3773
3774
3775
3776
3777
3778
3779
3780
3781
3782
3783
3784
3785
3786#define VALGRIND_ALIGN_STACK \
3787 "mov r10, sp\n\t" \
3788 "mov r4, sp\n\t" \
3789 "bic r4, r4, #7\n\t" \
3790 "mov sp, r4\n\t"
3791#define VALGRIND_RESTORE_STACK \
3792 "mov sp, r10\n\t"
3793
3794
3795
3796
3797#define CALL_FN_W_v(lval, orig) \
3798 do { \
3799 volatile OrigFn _orig = (orig); \
3800 volatile unsigned long _argvec[1]; \
3801 volatile unsigned long _res; \
3802 _argvec[0] = (unsigned long)_orig.nraddr; \
3803 __asm__ volatile( \
3804 VALGRIND_ALIGN_STACK \
3805 "ldr r4, [%1] \n\t" \
3806 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3807 VALGRIND_RESTORE_STACK \
3808 "mov %0, r0\n" \
3809 : "=r" (_res) \
3810 : "0" (&_argvec[0]) \
3811 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3812 ); \
3813 lval = (__typeof__(lval)) _res; \
3814 } while (0)
3815
3816#define CALL_FN_W_W(lval, orig, arg1) \
3817 do { \
3818 volatile OrigFn _orig = (orig); \
3819 volatile unsigned long _argvec[2]; \
3820 volatile unsigned long _res; \
3821 _argvec[0] = (unsigned long)_orig.nraddr; \
3822 _argvec[1] = (unsigned long)(arg1); \
3823 __asm__ volatile( \
3824 VALGRIND_ALIGN_STACK \
3825 "ldr r0, [%1, #4] \n\t" \
3826 "ldr r4, [%1] \n\t" \
3827 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3828 VALGRIND_RESTORE_STACK \
3829 "mov %0, r0\n" \
3830 : "=r" (_res) \
3831 : "0" (&_argvec[0]) \
3832 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3833 ); \
3834 lval = (__typeof__(lval)) _res; \
3835 } while (0)
3836
3837#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3838 do { \
3839 volatile OrigFn _orig = (orig); \
3840 volatile unsigned long _argvec[3]; \
3841 volatile unsigned long _res; \
3842 _argvec[0] = (unsigned long)_orig.nraddr; \
3843 _argvec[1] = (unsigned long)(arg1); \
3844 _argvec[2] = (unsigned long)(arg2); \
3845 __asm__ volatile( \
3846 VALGRIND_ALIGN_STACK \
3847 "ldr r0, [%1, #4] \n\t" \
3848 "ldr r1, [%1, #8] \n\t" \
3849 "ldr r4, [%1] \n\t" \
3850 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3851 VALGRIND_RESTORE_STACK \
3852 "mov %0, r0\n" \
3853 : "=r" (_res) \
3854 : "0" (&_argvec[0]) \
3855 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3856 ); \
3857 lval = (__typeof__(lval)) _res; \
3858 } while (0)
3859
3860#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3861 do { \
3862 volatile OrigFn _orig = (orig); \
3863 volatile unsigned long _argvec[4]; \
3864 volatile unsigned long _res; \
3865 _argvec[0] = (unsigned long)_orig.nraddr; \
3866 _argvec[1] = (unsigned long)(arg1); \
3867 _argvec[2] = (unsigned long)(arg2); \
3868 _argvec[3] = (unsigned long)(arg3); \
3869 __asm__ volatile( \
3870 VALGRIND_ALIGN_STACK \
3871 "ldr r0, [%1, #4] \n\t" \
3872 "ldr r1, [%1, #8] \n\t" \
3873 "ldr r2, [%1, #12] \n\t" \
3874 "ldr r4, [%1] \n\t" \
3875 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3876 VALGRIND_RESTORE_STACK \
3877 "mov %0, r0\n" \
3878 : "=r" (_res) \
3879 : "0" (&_argvec[0]) \
3880 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3881 ); \
3882 lval = (__typeof__(lval)) _res; \
3883 } while (0)
3884
3885#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3886 do { \
3887 volatile OrigFn _orig = (orig); \
3888 volatile unsigned long _argvec[5]; \
3889 volatile unsigned long _res; \
3890 _argvec[0] = (unsigned long)_orig.nraddr; \
3891 _argvec[1] = (unsigned long)(arg1); \
3892 _argvec[2] = (unsigned long)(arg2); \
3893 _argvec[3] = (unsigned long)(arg3); \
3894 _argvec[4] = (unsigned long)(arg4); \
3895 __asm__ volatile( \
3896 VALGRIND_ALIGN_STACK \
3897 "ldr r0, [%1, #4] \n\t" \
3898 "ldr r1, [%1, #8] \n\t" \
3899 "ldr r2, [%1, #12] \n\t" \
3900 "ldr r3, [%1, #16] \n\t" \
3901 "ldr r4, [%1] \n\t" \
3902 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3903 VALGRIND_RESTORE_STACK \
3904 "mov %0, r0" \
3905 : "=r" (_res) \
3906 : "0" (&_argvec[0]) \
3907 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3908 ); \
3909 lval = (__typeof__(lval)) _res; \
3910 } while (0)
3911
3912#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3913 do { \
3914 volatile OrigFn _orig = (orig); \
3915 volatile unsigned long _argvec[6]; \
3916 volatile unsigned long _res; \
3917 _argvec[0] = (unsigned long)_orig.nraddr; \
3918 _argvec[1] = (unsigned long)(arg1); \
3919 _argvec[2] = (unsigned long)(arg2); \
3920 _argvec[3] = (unsigned long)(arg3); \
3921 _argvec[4] = (unsigned long)(arg4); \
3922 _argvec[5] = (unsigned long)(arg5); \
3923 __asm__ volatile( \
3924 VALGRIND_ALIGN_STACK \
3925 "sub sp, sp, #4 \n\t" \
3926 "ldr r0, [%1, #20] \n\t" \
3927 "push {r0} \n\t" \
3928 "ldr r0, [%1, #4] \n\t" \
3929 "ldr r1, [%1, #8] \n\t" \
3930 "ldr r2, [%1, #12] \n\t" \
3931 "ldr r3, [%1, #16] \n\t" \
3932 "ldr r4, [%1] \n\t" \
3933 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3934 VALGRIND_RESTORE_STACK \
3935 "mov %0, r0" \
3936 : "=r" (_res) \
3937 : "0" (&_argvec[0]) \
3938 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3939 ); \
3940 lval = (__typeof__(lval)) _res; \
3941 } while (0)
3942
3943#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3944 do { \
3945 volatile OrigFn _orig = (orig); \
3946 volatile unsigned long _argvec[7]; \
3947 volatile unsigned long _res; \
3948 _argvec[0] = (unsigned long)_orig.nraddr; \
3949 _argvec[1] = (unsigned long)(arg1); \
3950 _argvec[2] = (unsigned long)(arg2); \
3951 _argvec[3] = (unsigned long)(arg3); \
3952 _argvec[4] = (unsigned long)(arg4); \
3953 _argvec[5] = (unsigned long)(arg5); \
3954 _argvec[6] = (unsigned long)(arg6); \
3955 __asm__ volatile( \
3956 VALGRIND_ALIGN_STACK \
3957 "ldr r0, [%1, #20] \n\t" \
3958 "ldr r1, [%1, #24] \n\t" \
3959 "push {r0, r1} \n\t" \
3960 "ldr r0, [%1, #4] \n\t" \
3961 "ldr r1, [%1, #8] \n\t" \
3962 "ldr r2, [%1, #12] \n\t" \
3963 "ldr r3, [%1, #16] \n\t" \
3964 "ldr r4, [%1] \n\t" \
3965 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3966 VALGRIND_RESTORE_STACK \
3967 "mov %0, r0" \
3968 : "=r" (_res) \
3969 : "0" (&_argvec[0]) \
3970 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3971 ); \
3972 lval = (__typeof__(lval)) _res; \
3973 } while (0)
3974
3975#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3976 arg7) \
3977 do { \
3978 volatile OrigFn _orig = (orig); \
3979 volatile unsigned long _argvec[8]; \
3980 volatile unsigned long _res; \
3981 _argvec[0] = (unsigned long)_orig.nraddr; \
3982 _argvec[1] = (unsigned long)(arg1); \
3983 _argvec[2] = (unsigned long)(arg2); \
3984 _argvec[3] = (unsigned long)(arg3); \
3985 _argvec[4] = (unsigned long)(arg4); \
3986 _argvec[5] = (unsigned long)(arg5); \
3987 _argvec[6] = (unsigned long)(arg6); \
3988 _argvec[7] = (unsigned long)(arg7); \
3989 __asm__ volatile( \
3990 VALGRIND_ALIGN_STACK \
3991 "sub sp, sp, #4 \n\t" \
3992 "ldr r0, [%1, #20] \n\t" \
3993 "ldr r1, [%1, #24] \n\t" \
3994 "ldr r2, [%1, #28] \n\t" \
3995 "push {r0, r1, r2} \n\t" \
3996 "ldr r0, [%1, #4] \n\t" \
3997 "ldr r1, [%1, #8] \n\t" \
3998 "ldr r2, [%1, #12] \n\t" \
3999 "ldr r3, [%1, #16] \n\t" \
4000 "ldr r4, [%1] \n\t" \
4001 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4002 VALGRIND_RESTORE_STACK \
4003 "mov %0, r0" \
4004 : "=r" (_res) \
4005 : "0" (&_argvec[0]) \
4006 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4007 ); \
4008 lval = (__typeof__(lval)) _res; \
4009 } while (0)
4010
4011#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4012 arg7,arg8) \
4013 do { \
4014 volatile OrigFn _orig = (orig); \
4015 volatile unsigned long _argvec[9]; \
4016 volatile unsigned long _res; \
4017 _argvec[0] = (unsigned long)_orig.nraddr; \
4018 _argvec[1] = (unsigned long)(arg1); \
4019 _argvec[2] = (unsigned long)(arg2); \
4020 _argvec[3] = (unsigned long)(arg3); \
4021 _argvec[4] = (unsigned long)(arg4); \
4022 _argvec[5] = (unsigned long)(arg5); \
4023 _argvec[6] = (unsigned long)(arg6); \
4024 _argvec[7] = (unsigned long)(arg7); \
4025 _argvec[8] = (unsigned long)(arg8); \
4026 __asm__ volatile( \
4027 VALGRIND_ALIGN_STACK \
4028 "ldr r0, [%1, #20] \n\t" \
4029 "ldr r1, [%1, #24] \n\t" \
4030 "ldr r2, [%1, #28] \n\t" \
4031 "ldr r3, [%1, #32] \n\t" \
4032 "push {r0, r1, r2, r3} \n\t" \
4033 "ldr r0, [%1, #4] \n\t" \
4034 "ldr r1, [%1, #8] \n\t" \
4035 "ldr r2, [%1, #12] \n\t" \
4036 "ldr r3, [%1, #16] \n\t" \
4037 "ldr r4, [%1] \n\t" \
4038 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4039 VALGRIND_RESTORE_STACK \
4040 "mov %0, r0" \
4041 : "=r" (_res) \
4042 : "0" (&_argvec[0]) \
4043 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4044 ); \
4045 lval = (__typeof__(lval)) _res; \
4046 } while (0)
4047
4048#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4049 arg7,arg8,arg9) \
4050 do { \
4051 volatile OrigFn _orig = (orig); \
4052 volatile unsigned long _argvec[10]; \
4053 volatile unsigned long _res; \
4054 _argvec[0] = (unsigned long)_orig.nraddr; \
4055 _argvec[1] = (unsigned long)(arg1); \
4056 _argvec[2] = (unsigned long)(arg2); \
4057 _argvec[3] = (unsigned long)(arg3); \
4058 _argvec[4] = (unsigned long)(arg4); \
4059 _argvec[5] = (unsigned long)(arg5); \
4060 _argvec[6] = (unsigned long)(arg6); \
4061 _argvec[7] = (unsigned long)(arg7); \
4062 _argvec[8] = (unsigned long)(arg8); \
4063 _argvec[9] = (unsigned long)(arg9); \
4064 __asm__ volatile( \
4065 VALGRIND_ALIGN_STACK \
4066 "sub sp, sp, #4 \n\t" \
4067 "ldr r0, [%1, #20] \n\t" \
4068 "ldr r1, [%1, #24] \n\t" \
4069 "ldr r2, [%1, #28] \n\t" \
4070 "ldr r3, [%1, #32] \n\t" \
4071 "ldr r4, [%1, #36] \n\t" \
4072 "push {r0, r1, r2, r3, r4} \n\t" \
4073 "ldr r0, [%1, #4] \n\t" \
4074 "ldr r1, [%1, #8] \n\t" \
4075 "ldr r2, [%1, #12] \n\t" \
4076 "ldr r3, [%1, #16] \n\t" \
4077 "ldr r4, [%1] \n\t" \
4078 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4079 VALGRIND_RESTORE_STACK \
4080 "mov %0, r0" \
4081 : "=r" (_res) \
4082 : "0" (&_argvec[0]) \
4083 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4084 ); \
4085 lval = (__typeof__(lval)) _res; \
4086 } while (0)
4087
4088#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4089 arg7,arg8,arg9,arg10) \
4090 do { \
4091 volatile OrigFn _orig = (orig); \
4092 volatile unsigned long _argvec[11]; \
4093 volatile unsigned long _res; \
4094 _argvec[0] = (unsigned long)_orig.nraddr; \
4095 _argvec[1] = (unsigned long)(arg1); \
4096 _argvec[2] = (unsigned long)(arg2); \
4097 _argvec[3] = (unsigned long)(arg3); \
4098 _argvec[4] = (unsigned long)(arg4); \
4099 _argvec[5] = (unsigned long)(arg5); \
4100 _argvec[6] = (unsigned long)(arg6); \
4101 _argvec[7] = (unsigned long)(arg7); \
4102 _argvec[8] = (unsigned long)(arg8); \
4103 _argvec[9] = (unsigned long)(arg9); \
4104 _argvec[10] = (unsigned long)(arg10); \
4105 __asm__ volatile( \
4106 VALGRIND_ALIGN_STACK \
4107 "ldr r0, [%1, #40] \n\t" \
4108 "push {r0} \n\t" \
4109 "ldr r0, [%1, #20] \n\t" \
4110 "ldr r1, [%1, #24] \n\t" \
4111 "ldr r2, [%1, #28] \n\t" \
4112 "ldr r3, [%1, #32] \n\t" \
4113 "ldr r4, [%1, #36] \n\t" \
4114 "push {r0, r1, r2, r3, r4} \n\t" \
4115 "ldr r0, [%1, #4] \n\t" \
4116 "ldr r1, [%1, #8] \n\t" \
4117 "ldr r2, [%1, #12] \n\t" \
4118 "ldr r3, [%1, #16] \n\t" \
4119 "ldr r4, [%1] \n\t" \
4120 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4121 VALGRIND_RESTORE_STACK \
4122 "mov %0, r0" \
4123 : "=r" (_res) \
4124 : "0" (&_argvec[0]) \
4125 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4126 ); \
4127 lval = (__typeof__(lval)) _res; \
4128 } while (0)
4129
4130#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4131 arg6,arg7,arg8,arg9,arg10, \
4132 arg11) \
4133 do { \
4134 volatile OrigFn _orig = (orig); \
4135 volatile unsigned long _argvec[12]; \
4136 volatile unsigned long _res; \
4137 _argvec[0] = (unsigned long)_orig.nraddr; \
4138 _argvec[1] = (unsigned long)(arg1); \
4139 _argvec[2] = (unsigned long)(arg2); \
4140 _argvec[3] = (unsigned long)(arg3); \
4141 _argvec[4] = (unsigned long)(arg4); \
4142 _argvec[5] = (unsigned long)(arg5); \
4143 _argvec[6] = (unsigned long)(arg6); \
4144 _argvec[7] = (unsigned long)(arg7); \
4145 _argvec[8] = (unsigned long)(arg8); \
4146 _argvec[9] = (unsigned long)(arg9); \
4147 _argvec[10] = (unsigned long)(arg10); \
4148 _argvec[11] = (unsigned long)(arg11); \
4149 __asm__ volatile( \
4150 VALGRIND_ALIGN_STACK \
4151 "sub sp, sp, #4 \n\t" \
4152 "ldr r0, [%1, #40] \n\t" \
4153 "ldr r1, [%1, #44] \n\t" \
4154 "push {r0, r1} \n\t" \
4155 "ldr r0, [%1, #20] \n\t" \
4156 "ldr r1, [%1, #24] \n\t" \
4157 "ldr r2, [%1, #28] \n\t" \
4158 "ldr r3, [%1, #32] \n\t" \
4159 "ldr r4, [%1, #36] \n\t" \
4160 "push {r0, r1, r2, r3, r4} \n\t" \
4161 "ldr r0, [%1, #4] \n\t" \
4162 "ldr r1, [%1, #8] \n\t" \
4163 "ldr r2, [%1, #12] \n\t" \
4164 "ldr r3, [%1, #16] \n\t" \
4165 "ldr r4, [%1] \n\t" \
4166 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4167 VALGRIND_RESTORE_STACK \
4168 "mov %0, r0" \
4169 : "=r" (_res) \
4170 : "0" (&_argvec[0]) \
4171 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4172 ); \
4173 lval = (__typeof__(lval)) _res; \
4174 } while (0)
4175
4176#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4177 arg6,arg7,arg8,arg9,arg10, \
4178 arg11,arg12) \
4179 do { \
4180 volatile OrigFn _orig = (orig); \
4181 volatile unsigned long _argvec[13]; \
4182 volatile unsigned long _res; \
4183 _argvec[0] = (unsigned long)_orig.nraddr; \
4184 _argvec[1] = (unsigned long)(arg1); \
4185 _argvec[2] = (unsigned long)(arg2); \
4186 _argvec[3] = (unsigned long)(arg3); \
4187 _argvec[4] = (unsigned long)(arg4); \
4188 _argvec[5] = (unsigned long)(arg5); \
4189 _argvec[6] = (unsigned long)(arg6); \
4190 _argvec[7] = (unsigned long)(arg7); \
4191 _argvec[8] = (unsigned long)(arg8); \
4192 _argvec[9] = (unsigned long)(arg9); \
4193 _argvec[10] = (unsigned long)(arg10); \
4194 _argvec[11] = (unsigned long)(arg11); \
4195 _argvec[12] = (unsigned long)(arg12); \
4196 __asm__ volatile( \
4197 VALGRIND_ALIGN_STACK \
4198 "ldr r0, [%1, #40] \n\t" \
4199 "ldr r1, [%1, #44] \n\t" \
4200 "ldr r2, [%1, #48] \n\t" \
4201 "push {r0, r1, r2} \n\t" \
4202 "ldr r0, [%1, #20] \n\t" \
4203 "ldr r1, [%1, #24] \n\t" \
4204 "ldr r2, [%1, #28] \n\t" \
4205 "ldr r3, [%1, #32] \n\t" \
4206 "ldr r4, [%1, #36] \n\t" \
4207 "push {r0, r1, r2, r3, r4} \n\t" \
4208 "ldr r0, [%1, #4] \n\t" \
4209 "ldr r1, [%1, #8] \n\t" \
4210 "ldr r2, [%1, #12] \n\t" \
4211 "ldr r3, [%1, #16] \n\t" \
4212 "ldr r4, [%1] \n\t" \
4213 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4214 VALGRIND_RESTORE_STACK \
4215 "mov %0, r0" \
4216 : "=r" (_res) \
4217 : "0" (&_argvec[0]) \
4218 : "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4219 ); \
4220 lval = (__typeof__(lval)) _res; \
4221 } while (0)
4222
4223#endif
4224
4225
4226
4227#if defined(PLAT_arm64_linux)
4228
4229
4230#define __CALLER_SAVED_REGS \
4231 "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
4232 "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
4233 "x18", "x19", "x20", "x30", \
4234 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
4235 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
4236 "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
4237 "v26", "v27", "v28", "v29", "v30", "v31"
4238
4239
4240
4241#define VALGRIND_ALIGN_STACK \
4242 "mov x21, sp\n\t" \
4243 "bic sp, x21, #15\n\t"
4244#define VALGRIND_RESTORE_STACK \
4245 "mov sp, x21\n\t"
4246
4247
4248
4249
4250#define CALL_FN_W_v(lval, orig) \
4251 do { \
4252 volatile OrigFn _orig = (orig); \
4253 volatile unsigned long _argvec[1]; \
4254 volatile unsigned long _res; \
4255 _argvec[0] = (unsigned long)_orig.nraddr; \
4256 __asm__ volatile( \
4257 VALGRIND_ALIGN_STACK \
4258 "ldr x8, [%1] \n\t" \
4259 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4260 VALGRIND_RESTORE_STACK \
4261 "mov %0, x0\n" \
4262 : "=r" (_res) \
4263 : "0" (&_argvec[0]) \
4264 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4265 ); \
4266 lval = (__typeof__(lval)) _res; \
4267 } while (0)
4268
4269#define CALL_FN_W_W(lval, orig, arg1) \
4270 do { \
4271 volatile OrigFn _orig = (orig); \
4272 volatile unsigned long _argvec[2]; \
4273 volatile unsigned long _res; \
4274 _argvec[0] = (unsigned long)_orig.nraddr; \
4275 _argvec[1] = (unsigned long)(arg1); \
4276 __asm__ volatile( \
4277 VALGRIND_ALIGN_STACK \
4278 "ldr x0, [%1, #8] \n\t" \
4279 "ldr x8, [%1] \n\t" \
4280 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4281 VALGRIND_RESTORE_STACK \
4282 "mov %0, x0\n" \
4283 : "=r" (_res) \
4284 : "0" (&_argvec[0]) \
4285 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4286 ); \
4287 lval = (__typeof__(lval)) _res; \
4288 } while (0)
4289
4290#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4291 do { \
4292 volatile OrigFn _orig = (orig); \
4293 volatile unsigned long _argvec[3]; \
4294 volatile unsigned long _res; \
4295 _argvec[0] = (unsigned long)_orig.nraddr; \
4296 _argvec[1] = (unsigned long)(arg1); \
4297 _argvec[2] = (unsigned long)(arg2); \
4298 __asm__ volatile( \
4299 VALGRIND_ALIGN_STACK \
4300 "ldr x0, [%1, #8] \n\t" \
4301 "ldr x1, [%1, #16] \n\t" \
4302 "ldr x8, [%1] \n\t" \
4303 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4304 VALGRIND_RESTORE_STACK \
4305 "mov %0, x0\n" \
4306 : "=r" (_res) \
4307 : "0" (&_argvec[0]) \
4308 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4309 ); \
4310 lval = (__typeof__(lval)) _res; \
4311 } while (0)
4312
4313#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4314 do { \
4315 volatile OrigFn _orig = (orig); \
4316 volatile unsigned long _argvec[4]; \
4317 volatile unsigned long _res; \
4318 _argvec[0] = (unsigned long)_orig.nraddr; \
4319 _argvec[1] = (unsigned long)(arg1); \
4320 _argvec[2] = (unsigned long)(arg2); \
4321 _argvec[3] = (unsigned long)(arg3); \
4322 __asm__ volatile( \
4323 VALGRIND_ALIGN_STACK \
4324 "ldr x0, [%1, #8] \n\t" \
4325 "ldr x1, [%1, #16] \n\t" \
4326 "ldr x2, [%1, #24] \n\t" \
4327 "ldr x8, [%1] \n\t" \
4328 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4329 VALGRIND_RESTORE_STACK \
4330 "mov %0, x0\n" \
4331 : "=r" (_res) \
4332 : "0" (&_argvec[0]) \
4333 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4334 ); \
4335 lval = (__typeof__(lval)) _res; \
4336 } while (0)
4337
4338#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4339 do { \
4340 volatile OrigFn _orig = (orig); \
4341 volatile unsigned long _argvec[5]; \
4342 volatile unsigned long _res; \
4343 _argvec[0] = (unsigned long)_orig.nraddr; \
4344 _argvec[1] = (unsigned long)(arg1); \
4345 _argvec[2] = (unsigned long)(arg2); \
4346 _argvec[3] = (unsigned long)(arg3); \
4347 _argvec[4] = (unsigned long)(arg4); \
4348 __asm__ volatile( \
4349 VALGRIND_ALIGN_STACK \
4350 "ldr x0, [%1, #8] \n\t" \
4351 "ldr x1, [%1, #16] \n\t" \
4352 "ldr x2, [%1, #24] \n\t" \
4353 "ldr x3, [%1, #32] \n\t" \
4354 "ldr x8, [%1] \n\t" \
4355 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4356 VALGRIND_RESTORE_STACK \
4357 "mov %0, x0" \
4358 : "=r" (_res) \
4359 : "0" (&_argvec[0]) \
4360 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4361 ); \
4362 lval = (__typeof__(lval)) _res; \
4363 } while (0)
4364
4365#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4366 do { \
4367 volatile OrigFn _orig = (orig); \
4368 volatile unsigned long _argvec[6]; \
4369 volatile unsigned long _res; \
4370 _argvec[0] = (unsigned long)_orig.nraddr; \
4371 _argvec[1] = (unsigned long)(arg1); \
4372 _argvec[2] = (unsigned long)(arg2); \
4373 _argvec[3] = (unsigned long)(arg3); \
4374 _argvec[4] = (unsigned long)(arg4); \
4375 _argvec[5] = (unsigned long)(arg5); \
4376 __asm__ volatile( \
4377 VALGRIND_ALIGN_STACK \
4378 "ldr x0, [%1, #8] \n\t" \
4379 "ldr x1, [%1, #16] \n\t" \
4380 "ldr x2, [%1, #24] \n\t" \
4381 "ldr x3, [%1, #32] \n\t" \
4382 "ldr x4, [%1, #40] \n\t" \
4383 "ldr x8, [%1] \n\t" \
4384 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4385 VALGRIND_RESTORE_STACK \
4386 "mov %0, x0" \
4387 : "=r" (_res) \
4388 : "0" (&_argvec[0]) \
4389 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4390 ); \
4391 lval = (__typeof__(lval)) _res; \
4392 } while (0)
4393
4394#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4395 do { \
4396 volatile OrigFn _orig = (orig); \
4397 volatile unsigned long _argvec[7]; \
4398 volatile unsigned long _res; \
4399 _argvec[0] = (unsigned long)_orig.nraddr; \
4400 _argvec[1] = (unsigned long)(arg1); \
4401 _argvec[2] = (unsigned long)(arg2); \
4402 _argvec[3] = (unsigned long)(arg3); \
4403 _argvec[4] = (unsigned long)(arg4); \
4404 _argvec[5] = (unsigned long)(arg5); \
4405 _argvec[6] = (unsigned long)(arg6); \
4406 __asm__ volatile( \
4407 VALGRIND_ALIGN_STACK \
4408 "ldr x0, [%1, #8] \n\t" \
4409 "ldr x1, [%1, #16] \n\t" \
4410 "ldr x2, [%1, #24] \n\t" \
4411 "ldr x3, [%1, #32] \n\t" \
4412 "ldr x4, [%1, #40] \n\t" \
4413 "ldr x5, [%1, #48] \n\t" \
4414 "ldr x8, [%1] \n\t" \
4415 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4416 VALGRIND_RESTORE_STACK \
4417 "mov %0, x0" \
4418 : "=r" (_res) \
4419 : "0" (&_argvec[0]) \
4420 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4421 ); \
4422 lval = (__typeof__(lval)) _res; \
4423 } while (0)
4424
4425#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4426 arg7) \
4427 do { \
4428 volatile OrigFn _orig = (orig); \
4429 volatile unsigned long _argvec[8]; \
4430 volatile unsigned long _res; \
4431 _argvec[0] = (unsigned long)_orig.nraddr; \
4432 _argvec[1] = (unsigned long)(arg1); \
4433 _argvec[2] = (unsigned long)(arg2); \
4434 _argvec[3] = (unsigned long)(arg3); \
4435 _argvec[4] = (unsigned long)(arg4); \
4436 _argvec[5] = (unsigned long)(arg5); \
4437 _argvec[6] = (unsigned long)(arg6); \
4438 _argvec[7] = (unsigned long)(arg7); \
4439 __asm__ volatile( \
4440 VALGRIND_ALIGN_STACK \
4441 "ldr x0, [%1, #8] \n\t" \
4442 "ldr x1, [%1, #16] \n\t" \
4443 "ldr x2, [%1, #24] \n\t" \
4444 "ldr x3, [%1, #32] \n\t" \
4445 "ldr x4, [%1, #40] \n\t" \
4446 "ldr x5, [%1, #48] \n\t" \
4447 "ldr x6, [%1, #56] \n\t" \
4448 "ldr x8, [%1] \n\t" \
4449 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4450 VALGRIND_RESTORE_STACK \
4451 "mov %0, x0" \
4452 : "=r" (_res) \
4453 : "0" (&_argvec[0]) \
4454 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4455 ); \
4456 lval = (__typeof__(lval)) _res; \
4457 } while (0)
4458
4459#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4460 arg7,arg8) \
4461 do { \
4462 volatile OrigFn _orig = (orig); \
4463 volatile unsigned long _argvec[9]; \
4464 volatile unsigned long _res; \
4465 _argvec[0] = (unsigned long)_orig.nraddr; \
4466 _argvec[1] = (unsigned long)(arg1); \
4467 _argvec[2] = (unsigned long)(arg2); \
4468 _argvec[3] = (unsigned long)(arg3); \
4469 _argvec[4] = (unsigned long)(arg4); \
4470 _argvec[5] = (unsigned long)(arg5); \
4471 _argvec[6] = (unsigned long)(arg6); \
4472 _argvec[7] = (unsigned long)(arg7); \
4473 _argvec[8] = (unsigned long)(arg8); \
4474 __asm__ volatile( \
4475 VALGRIND_ALIGN_STACK \
4476 "ldr x0, [%1, #8] \n\t" \
4477 "ldr x1, [%1, #16] \n\t" \
4478 "ldr x2, [%1, #24] \n\t" \
4479 "ldr x3, [%1, #32] \n\t" \
4480 "ldr x4, [%1, #40] \n\t" \
4481 "ldr x5, [%1, #48] \n\t" \
4482 "ldr x6, [%1, #56] \n\t" \
4483 "ldr x7, [%1, #64] \n\t" \
4484 "ldr x8, [%1] \n\t" \
4485 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4486 VALGRIND_RESTORE_STACK \
4487 "mov %0, x0" \
4488 : "=r" (_res) \
4489 : "0" (&_argvec[0]) \
4490 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4491 ); \
4492 lval = (__typeof__(lval)) _res; \
4493 } while (0)
4494
4495#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4496 arg7,arg8,arg9) \
4497 do { \
4498 volatile OrigFn _orig = (orig); \
4499 volatile unsigned long _argvec[10]; \
4500 volatile unsigned long _res; \
4501 _argvec[0] = (unsigned long)_orig.nraddr; \
4502 _argvec[1] = (unsigned long)(arg1); \
4503 _argvec[2] = (unsigned long)(arg2); \
4504 _argvec[3] = (unsigned long)(arg3); \
4505 _argvec[4] = (unsigned long)(arg4); \
4506 _argvec[5] = (unsigned long)(arg5); \
4507 _argvec[6] = (unsigned long)(arg6); \
4508 _argvec[7] = (unsigned long)(arg7); \
4509 _argvec[8] = (unsigned long)(arg8); \
4510 _argvec[9] = (unsigned long)(arg9); \
4511 __asm__ volatile( \
4512 VALGRIND_ALIGN_STACK \
4513 "sub sp, sp, #0x20 \n\t" \
4514 "ldr x0, [%1, #8] \n\t" \
4515 "ldr x1, [%1, #16] \n\t" \
4516 "ldr x2, [%1, #24] \n\t" \
4517 "ldr x3, [%1, #32] \n\t" \
4518 "ldr x4, [%1, #40] \n\t" \
4519 "ldr x5, [%1, #48] \n\t" \
4520 "ldr x6, [%1, #56] \n\t" \
4521 "ldr x7, [%1, #64] \n\t" \
4522 "ldr x8, [%1, #72] \n\t" \
4523 "str x8, [sp, #0] \n\t" \
4524 "ldr x8, [%1] \n\t" \
4525 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4526 VALGRIND_RESTORE_STACK \
4527 "mov %0, x0" \
4528 : "=r" (_res) \
4529 : "0" (&_argvec[0]) \
4530 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4531 ); \
4532 lval = (__typeof__(lval)) _res; \
4533 } while (0)
4534
4535#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4536 arg7,arg8,arg9,arg10) \
4537 do { \
4538 volatile OrigFn _orig = (orig); \
4539 volatile unsigned long _argvec[11]; \
4540 volatile unsigned long _res; \
4541 _argvec[0] = (unsigned long)_orig.nraddr; \
4542 _argvec[1] = (unsigned long)(arg1); \
4543 _argvec[2] = (unsigned long)(arg2); \
4544 _argvec[3] = (unsigned long)(arg3); \
4545 _argvec[4] = (unsigned long)(arg4); \
4546 _argvec[5] = (unsigned long)(arg5); \
4547 _argvec[6] = (unsigned long)(arg6); \
4548 _argvec[7] = (unsigned long)(arg7); \
4549 _argvec[8] = (unsigned long)(arg8); \
4550 _argvec[9] = (unsigned long)(arg9); \
4551 _argvec[10] = (unsigned long)(arg10); \
4552 __asm__ volatile( \
4553 VALGRIND_ALIGN_STACK \
4554 "sub sp, sp, #0x20 \n\t" \
4555 "ldr x0, [%1, #8] \n\t" \
4556 "ldr x1, [%1, #16] \n\t" \
4557 "ldr x2, [%1, #24] \n\t" \
4558 "ldr x3, [%1, #32] \n\t" \
4559 "ldr x4, [%1, #40] \n\t" \
4560 "ldr x5, [%1, #48] \n\t" \
4561 "ldr x6, [%1, #56] \n\t" \
4562 "ldr x7, [%1, #64] \n\t" \
4563 "ldr x8, [%1, #72] \n\t" \
4564 "str x8, [sp, #0] \n\t" \
4565 "ldr x8, [%1, #80] \n\t" \
4566 "str x8, [sp, #8] \n\t" \
4567 "ldr x8, [%1] \n\t" \
4568 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4569 VALGRIND_RESTORE_STACK \
4570 "mov %0, x0" \
4571 : "=r" (_res) \
4572 : "0" (&_argvec[0]) \
4573 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4574 ); \
4575 lval = (__typeof__(lval)) _res; \
4576 } while (0)
4577
4578#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4579 arg7,arg8,arg9,arg10,arg11) \
4580 do { \
4581 volatile OrigFn _orig = (orig); \
4582 volatile unsigned long _argvec[12]; \
4583 volatile unsigned long _res; \
4584 _argvec[0] = (unsigned long)_orig.nraddr; \
4585 _argvec[1] = (unsigned long)(arg1); \
4586 _argvec[2] = (unsigned long)(arg2); \
4587 _argvec[3] = (unsigned long)(arg3); \
4588 _argvec[4] = (unsigned long)(arg4); \
4589 _argvec[5] = (unsigned long)(arg5); \
4590 _argvec[6] = (unsigned long)(arg6); \
4591 _argvec[7] = (unsigned long)(arg7); \
4592 _argvec[8] = (unsigned long)(arg8); \
4593 _argvec[9] = (unsigned long)(arg9); \
4594 _argvec[10] = (unsigned long)(arg10); \
4595 _argvec[11] = (unsigned long)(arg11); \
4596 __asm__ volatile( \
4597 VALGRIND_ALIGN_STACK \
4598 "sub sp, sp, #0x30 \n\t" \
4599 "ldr x0, [%1, #8] \n\t" \
4600 "ldr x1, [%1, #16] \n\t" \
4601 "ldr x2, [%1, #24] \n\t" \
4602 "ldr x3, [%1, #32] \n\t" \
4603 "ldr x4, [%1, #40] \n\t" \
4604 "ldr x5, [%1, #48] \n\t" \
4605 "ldr x6, [%1, #56] \n\t" \
4606 "ldr x7, [%1, #64] \n\t" \
4607 "ldr x8, [%1, #72] \n\t" \
4608 "str x8, [sp, #0] \n\t" \
4609 "ldr x8, [%1, #80] \n\t" \
4610 "str x8, [sp, #8] \n\t" \
4611 "ldr x8, [%1, #88] \n\t" \
4612 "str x8, [sp, #16] \n\t" \
4613 "ldr x8, [%1] \n\t" \
4614 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4615 VALGRIND_RESTORE_STACK \
4616 "mov %0, x0" \
4617 : "=r" (_res) \
4618 : "0" (&_argvec[0]) \
4619 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4620 ); \
4621 lval = (__typeof__(lval)) _res; \
4622 } while (0)
4623
4624#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4625 arg7,arg8,arg9,arg10,arg11, \
4626 arg12) \
4627 do { \
4628 volatile OrigFn _orig = (orig); \
4629 volatile unsigned long _argvec[13]; \
4630 volatile unsigned long _res; \
4631 _argvec[0] = (unsigned long)_orig.nraddr; \
4632 _argvec[1] = (unsigned long)(arg1); \
4633 _argvec[2] = (unsigned long)(arg2); \
4634 _argvec[3] = (unsigned long)(arg3); \
4635 _argvec[4] = (unsigned long)(arg4); \
4636 _argvec[5] = (unsigned long)(arg5); \
4637 _argvec[6] = (unsigned long)(arg6); \
4638 _argvec[7] = (unsigned long)(arg7); \
4639 _argvec[8] = (unsigned long)(arg8); \
4640 _argvec[9] = (unsigned long)(arg9); \
4641 _argvec[10] = (unsigned long)(arg10); \
4642 _argvec[11] = (unsigned long)(arg11); \
4643 _argvec[12] = (unsigned long)(arg12); \
4644 __asm__ volatile( \
4645 VALGRIND_ALIGN_STACK \
4646 "sub sp, sp, #0x30 \n\t" \
4647 "ldr x0, [%1, #8] \n\t" \
4648 "ldr x1, [%1, #16] \n\t" \
4649 "ldr x2, [%1, #24] \n\t" \
4650 "ldr x3, [%1, #32] \n\t" \
4651 "ldr x4, [%1, #40] \n\t" \
4652 "ldr x5, [%1, #48] \n\t" \
4653 "ldr x6, [%1, #56] \n\t" \
4654 "ldr x7, [%1, #64] \n\t" \
4655 "ldr x8, [%1, #72] \n\t" \
4656 "str x8, [sp, #0] \n\t" \
4657 "ldr x8, [%1, #80] \n\t" \
4658 "str x8, [sp, #8] \n\t" \
4659 "ldr x8, [%1, #88] \n\t" \
4660 "str x8, [sp, #16] \n\t" \
4661 "ldr x8, [%1, #96] \n\t" \
4662 "str x8, [sp, #24] \n\t" \
4663 "ldr x8, [%1] \n\t" \
4664 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4665 VALGRIND_RESTORE_STACK \
4666 "mov %0, x0" \
4667 : "=r" (_res) \
4668 : "0" (&_argvec[0]) \
4669 : "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4670 ); \
4671 lval = (__typeof__(lval)) _res; \
4672 } while (0)
4673
4674#endif
4675
4676
4677
4678#if defined(PLAT_s390x_linux)
4679
4680
4681
4682
4683
4684#if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
4685# define __FRAME_POINTER \
4686 ,"d"(__builtin_dwarf_cfa())
4687# define VALGRIND_CFI_PROLOGUE \
4688 ".cfi_remember_state\n\t" \
4689 "lgr 1,%1\n\t" \
4690 "lgr 7,11\n\t" \
4691 "lgr 11,%2\n\t" \
4692 ".cfi_def_cfa r11, 0\n\t"
4693# define VALGRIND_CFI_EPILOGUE \
4694 "lgr 11, 7\n\t" \
4695 ".cfi_restore_state\n\t"
4696#else
4697# define __FRAME_POINTER
4698# define VALGRIND_CFI_PROLOGUE \
4699 "lgr 1,%1\n\t"
4700# define VALGRIND_CFI_EPILOGUE
4701#endif
4702
4703
4704
4705
4706
4707
4708
4709
4710
4711
4712#if defined(__VX__) || defined(__S390_VX__)
4713#define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", \
4714 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", \
4715 "v8", "v9", "v10", "v11", "v12", "v13", "v14", "v15", \
4716 "v16", "v17", "v18", "v19", "v20", "v21", "v22", "v23", \
4717 "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31"
4718#else
4719#define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", \
4720 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7"
4721#endif
4722
4723
4724
4725
4726
4727
4728
4729
4730
4731
4732#define CALL_FN_W_v(lval, orig) \
4733 do { \
4734 volatile OrigFn _orig = (orig); \
4735 volatile unsigned long _argvec[1]; \
4736 volatile unsigned long _res; \
4737 _argvec[0] = (unsigned long)_orig.nraddr; \
4738 __asm__ volatile( \
4739 VALGRIND_CFI_PROLOGUE \
4740 "aghi 15,-160\n\t" \
4741 "lg 1, 0(1)\n\t" \
4742 VALGRIND_CALL_NOREDIR_R1 \
4743 "aghi 15,160\n\t" \
4744 VALGRIND_CFI_EPILOGUE \
4745 "lgr %0, 2\n\t" \
4746 : "=d" (_res) \
4747 : "d" (&_argvec[0]) __FRAME_POINTER \
4748 : "cc", "memory", __CALLER_SAVED_REGS,"7" \
4749 ); \
4750 lval = (__typeof__(lval)) _res; \
4751 } while (0)
4752
4753
4754#define CALL_FN_W_W(lval, orig, arg1) \
4755 do { \
4756 volatile OrigFn _orig = (orig); \
4757 volatile unsigned long _argvec[2]; \
4758 volatile unsigned long _res; \
4759 _argvec[0] = (unsigned long)_orig.nraddr; \
4760 _argvec[1] = (unsigned long)arg1; \
4761 __asm__ volatile( \
4762 VALGRIND_CFI_PROLOGUE \
4763 "aghi 15,-160\n\t" \
4764 "lg 2, 8(1)\n\t" \
4765 "lg 1, 0(1)\n\t" \
4766 VALGRIND_CALL_NOREDIR_R1 \
4767 "aghi 15,160\n\t" \
4768 VALGRIND_CFI_EPILOGUE \
4769 "lgr %0, 2\n\t" \
4770 : "=d" (_res) \
4771 : "a" (&_argvec[0]) __FRAME_POINTER \
4772 : "cc", "memory", __CALLER_SAVED_REGS,"7" \
4773 ); \
4774 lval = (__typeof__(lval)) _res; \
4775 } while (0)
4776
4777#define CALL_FN_W_WW(lval, orig, arg1, arg2) \
4778 do { \
4779 volatile OrigFn _orig = (orig); \
4780 volatile unsigned long _argvec[3]; \
4781 volatile unsigned long _res; \
4782 _argvec[0] = (unsigned long)_orig.nraddr; \
4783 _argvec[1] = (unsigned long)arg1; \
4784 _argvec[2] = (unsigned long)arg2; \
4785 __asm__ volatile( \
4786 VALGRIND_CFI_PROLOGUE \
4787 "aghi 15,-160\n\t" \
4788 "lg 2, 8(1)\n\t" \
4789 "lg 3,16(1)\n\t" \
4790 "lg 1, 0(1)\n\t" \
4791 VALGRIND_CALL_NOREDIR_R1 \
4792 "aghi 15,160\n\t" \
4793 VALGRIND_CFI_EPILOGUE \
4794 "lgr %0, 2\n\t" \
4795 : "=d" (_res) \
4796 : "a" (&_argvec[0]) __FRAME_POINTER \
4797 : "cc", "memory", __CALLER_SAVED_REGS,"7" \
4798 ); \
4799 lval = (__typeof__(lval)) _res; \
4800 } while (0)
4801
4802#define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
4803 do { \
4804 volatile OrigFn _orig = (orig); \
4805 volatile unsigned long _argvec[4]; \
4806 volatile unsigned long _res; \
4807 _argvec[0] = (unsigned long)_orig.nraddr; \
4808 _argvec[1] = (unsigned long)arg1; \
4809 _argvec[2] = (unsigned long)arg2; \
4810 _argvec[3] = (unsigned long)arg3; \
4811 __asm__ volatile( \
4812 VALGRIND_CFI_PROLOGUE \
4813 "aghi 15,-160\n\t" \
4814 "lg 2, 8(1)\n\t" \
4815 "lg 3,16(1)\n\t" \
4816 "lg 4,24(1)\n\t" \
4817 "lg 1, 0(1)\n\t" \
4818 VALGRIND_CALL_NOREDIR_R1 \
4819 "aghi 15,160\n\t" \
4820 VALGRIND_CFI_EPILOGUE \
4821 "lgr %0, 2\n\t" \
4822 : "=d" (_res) \
4823 : "a" (&_argvec[0]) __FRAME_POINTER \
4824 : "cc", "memory", __CALLER_SAVED_REGS,"7" \
4825 ); \
4826 lval = (__typeof__(lval)) _res; \
4827 } while (0)
4828
4829#define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
4830 do { \
4831 volatile OrigFn _orig = (orig); \
4832 volatile unsigned long _argvec[5]; \
4833 volatile unsigned long _res; \
4834 _argvec[0] = (unsigned long)_orig.nraddr; \
4835 _argvec[1] = (unsigned long)arg1; \
4836 _argvec[2] = (unsigned long)arg2; \
4837 _argvec[3] = (unsigned long)arg3; \
4838 _argvec[4] = (unsigned long)arg4; \
4839 __asm__ volatile( \
4840 VALGRIND_CFI_PROLOGUE \
4841 "aghi 15,-160\n\t" \
4842 "lg 2, 8(1)\n\t" \
4843 "lg 3,16(1)\n\t" \
4844 "lg 4,24(1)\n\t" \
4845 "lg 5,32(1)\n\t" \
4846 "lg 1, 0(1)\n\t" \
4847 VALGRIND_CALL_NOREDIR_R1 \
4848 "aghi 15,160\n\t" \
4849 VALGRIND_CFI_EPILOGUE \
4850 "lgr %0, 2\n\t" \
4851 : "=d" (_res) \
4852 : "a" (&_argvec[0]) __FRAME_POINTER \
4853 : "cc", "memory", __CALLER_SAVED_REGS,"7" \
4854 ); \
4855 lval = (__typeof__(lval)) _res; \
4856 } while (0)
4857
4858#define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
4859 do { \
4860 volatile OrigFn _orig = (orig); \
4861 volatile unsigned long _argvec[6]; \
4862 volatile unsigned long _res; \
4863 _argvec[0] = (unsigned long)_orig.nraddr; \
4864 _argvec[1] = (unsigned long)arg1; \
4865 _argvec[2] = (unsigned long)arg2; \
4866 _argvec[3] = (unsigned long)arg3; \
4867 _argvec[4] = (unsigned long)arg4; \
4868 _argvec[5] = (unsigned long)arg5; \
4869 __asm__ volatile( \
4870 VALGRIND_CFI_PROLOGUE \
4871 "aghi 15,-160\n\t" \
4872 "lg 2, 8(1)\n\t" \
4873 "lg 3,16(1)\n\t" \
4874 "lg 4,24(1)\n\t" \
4875 "lg 5,32(1)\n\t" \
4876 "lg 6,40(1)\n\t" \
4877 "lg 1, 0(1)\n\t" \
4878 VALGRIND_CALL_NOREDIR_R1 \
4879 "aghi 15,160\n\t" \
4880 VALGRIND_CFI_EPILOGUE \
4881 "lgr %0, 2\n\t" \
4882 : "=d" (_res) \
4883 : "a" (&_argvec[0]) __FRAME_POINTER \
4884 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4885 ); \
4886 lval = (__typeof__(lval)) _res; \
4887 } while (0)
4888
4889#define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4890 arg6) \
4891 do { \
4892 volatile OrigFn _orig = (orig); \
4893 volatile unsigned long _argvec[7]; \
4894 volatile unsigned long _res; \
4895 _argvec[0] = (unsigned long)_orig.nraddr; \
4896 _argvec[1] = (unsigned long)arg1; \
4897 _argvec[2] = (unsigned long)arg2; \
4898 _argvec[3] = (unsigned long)arg3; \
4899 _argvec[4] = (unsigned long)arg4; \
4900 _argvec[5] = (unsigned long)arg5; \
4901 _argvec[6] = (unsigned long)arg6; \
4902 __asm__ volatile( \
4903 VALGRIND_CFI_PROLOGUE \
4904 "aghi 15,-168\n\t" \
4905 "lg 2, 8(1)\n\t" \
4906 "lg 3,16(1)\n\t" \
4907 "lg 4,24(1)\n\t" \
4908 "lg 5,32(1)\n\t" \
4909 "lg 6,40(1)\n\t" \
4910 "mvc 160(8,15), 48(1)\n\t" \
4911 "lg 1, 0(1)\n\t" \
4912 VALGRIND_CALL_NOREDIR_R1 \
4913 "aghi 15,168\n\t" \
4914 VALGRIND_CFI_EPILOGUE \
4915 "lgr %0, 2\n\t" \
4916 : "=d" (_res) \
4917 : "a" (&_argvec[0]) __FRAME_POINTER \
4918 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4919 ); \
4920 lval = (__typeof__(lval)) _res; \
4921 } while (0)
4922
4923#define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4924 arg6, arg7) \
4925 do { \
4926 volatile OrigFn _orig = (orig); \
4927 volatile unsigned long _argvec[8]; \
4928 volatile unsigned long _res; \
4929 _argvec[0] = (unsigned long)_orig.nraddr; \
4930 _argvec[1] = (unsigned long)arg1; \
4931 _argvec[2] = (unsigned long)arg2; \
4932 _argvec[3] = (unsigned long)arg3; \
4933 _argvec[4] = (unsigned long)arg4; \
4934 _argvec[5] = (unsigned long)arg5; \
4935 _argvec[6] = (unsigned long)arg6; \
4936 _argvec[7] = (unsigned long)arg7; \
4937 __asm__ volatile( \
4938 VALGRIND_CFI_PROLOGUE \
4939 "aghi 15,-176\n\t" \
4940 "lg 2, 8(1)\n\t" \
4941 "lg 3,16(1)\n\t" \
4942 "lg 4,24(1)\n\t" \
4943 "lg 5,32(1)\n\t" \
4944 "lg 6,40(1)\n\t" \
4945 "mvc 160(8,15), 48(1)\n\t" \
4946 "mvc 168(8,15), 56(1)\n\t" \
4947 "lg 1, 0(1)\n\t" \
4948 VALGRIND_CALL_NOREDIR_R1 \
4949 "aghi 15,176\n\t" \
4950 VALGRIND_CFI_EPILOGUE \
4951 "lgr %0, 2\n\t" \
4952 : "=d" (_res) \
4953 : "a" (&_argvec[0]) __FRAME_POINTER \
4954 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4955 ); \
4956 lval = (__typeof__(lval)) _res; \
4957 } while (0)
4958
4959#define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4960 arg6, arg7 ,arg8) \
4961 do { \
4962 volatile OrigFn _orig = (orig); \
4963 volatile unsigned long _argvec[9]; \
4964 volatile unsigned long _res; \
4965 _argvec[0] = (unsigned long)_orig.nraddr; \
4966 _argvec[1] = (unsigned long)arg1; \
4967 _argvec[2] = (unsigned long)arg2; \
4968 _argvec[3] = (unsigned long)arg3; \
4969 _argvec[4] = (unsigned long)arg4; \
4970 _argvec[5] = (unsigned long)arg5; \
4971 _argvec[6] = (unsigned long)arg6; \
4972 _argvec[7] = (unsigned long)arg7; \
4973 _argvec[8] = (unsigned long)arg8; \
4974 __asm__ volatile( \
4975 VALGRIND_CFI_PROLOGUE \
4976 "aghi 15,-184\n\t" \
4977 "lg 2, 8(1)\n\t" \
4978 "lg 3,16(1)\n\t" \
4979 "lg 4,24(1)\n\t" \
4980 "lg 5,32(1)\n\t" \
4981 "lg 6,40(1)\n\t" \
4982 "mvc 160(8,15), 48(1)\n\t" \
4983 "mvc 168(8,15), 56(1)\n\t" \
4984 "mvc 176(8,15), 64(1)\n\t" \
4985 "lg 1, 0(1)\n\t" \
4986 VALGRIND_CALL_NOREDIR_R1 \
4987 "aghi 15,184\n\t" \
4988 VALGRIND_CFI_EPILOGUE \
4989 "lgr %0, 2\n\t" \
4990 : "=d" (_res) \
4991 : "a" (&_argvec[0]) __FRAME_POINTER \
4992 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4993 ); \
4994 lval = (__typeof__(lval)) _res; \
4995 } while (0)
4996
4997#define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4998 arg6, arg7 ,arg8, arg9) \
4999 do { \
5000 volatile OrigFn _orig = (orig); \
5001 volatile unsigned long _argvec[10]; \
5002 volatile unsigned long _res; \
5003 _argvec[0] = (unsigned long)_orig.nraddr; \
5004 _argvec[1] = (unsigned long)arg1; \
5005 _argvec[2] = (unsigned long)arg2; \
5006 _argvec[3] = (unsigned long)arg3; \
5007 _argvec[4] = (unsigned long)arg4; \
5008 _argvec[5] = (unsigned long)arg5; \
5009 _argvec[6] = (unsigned long)arg6; \
5010 _argvec[7] = (unsigned long)arg7; \
5011 _argvec[8] = (unsigned long)arg8; \
5012 _argvec[9] = (unsigned long)arg9; \
5013 __asm__ volatile( \
5014 VALGRIND_CFI_PROLOGUE \
5015 "aghi 15,-192\n\t" \
5016 "lg 2, 8(1)\n\t" \
5017 "lg 3,16(1)\n\t" \
5018 "lg 4,24(1)\n\t" \
5019 "lg 5,32(1)\n\t" \
5020 "lg 6,40(1)\n\t" \
5021 "mvc 160(8,15), 48(1)\n\t" \
5022 "mvc 168(8,15), 56(1)\n\t" \
5023 "mvc 176(8,15), 64(1)\n\t" \
5024 "mvc 184(8,15), 72(1)\n\t" \
5025 "lg 1, 0(1)\n\t" \
5026 VALGRIND_CALL_NOREDIR_R1 \
5027 "aghi 15,192\n\t" \
5028 VALGRIND_CFI_EPILOGUE \
5029 "lgr %0, 2\n\t" \
5030 : "=d" (_res) \
5031 : "a" (&_argvec[0]) __FRAME_POINTER \
5032 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5033 ); \
5034 lval = (__typeof__(lval)) _res; \
5035 } while (0)
5036
5037#define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5038 arg6, arg7 ,arg8, arg9, arg10) \
5039 do { \
5040 volatile OrigFn _orig = (orig); \
5041 volatile unsigned long _argvec[11]; \
5042 volatile unsigned long _res; \
5043 _argvec[0] = (unsigned long)_orig.nraddr; \
5044 _argvec[1] = (unsigned long)arg1; \
5045 _argvec[2] = (unsigned long)arg2; \
5046 _argvec[3] = (unsigned long)arg3; \
5047 _argvec[4] = (unsigned long)arg4; \
5048 _argvec[5] = (unsigned long)arg5; \
5049 _argvec[6] = (unsigned long)arg6; \
5050 _argvec[7] = (unsigned long)arg7; \
5051 _argvec[8] = (unsigned long)arg8; \
5052 _argvec[9] = (unsigned long)arg9; \
5053 _argvec[10] = (unsigned long)arg10; \
5054 __asm__ volatile( \
5055 VALGRIND_CFI_PROLOGUE \
5056 "aghi 15,-200\n\t" \
5057 "lg 2, 8(1)\n\t" \
5058 "lg 3,16(1)\n\t" \
5059 "lg 4,24(1)\n\t" \
5060 "lg 5,32(1)\n\t" \
5061 "lg 6,40(1)\n\t" \
5062 "mvc 160(8,15), 48(1)\n\t" \
5063 "mvc 168(8,15), 56(1)\n\t" \
5064 "mvc 176(8,15), 64(1)\n\t" \
5065 "mvc 184(8,15), 72(1)\n\t" \
5066 "mvc 192(8,15), 80(1)\n\t" \
5067 "lg 1, 0(1)\n\t" \
5068 VALGRIND_CALL_NOREDIR_R1 \
5069 "aghi 15,200\n\t" \
5070 VALGRIND_CFI_EPILOGUE \
5071 "lgr %0, 2\n\t" \
5072 : "=d" (_res) \
5073 : "a" (&_argvec[0]) __FRAME_POINTER \
5074 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5075 ); \
5076 lval = (__typeof__(lval)) _res; \
5077 } while (0)
5078
5079#define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5080 arg6, arg7 ,arg8, arg9, arg10, arg11) \
5081 do { \
5082 volatile OrigFn _orig = (orig); \
5083 volatile unsigned long _argvec[12]; \
5084 volatile unsigned long _res; \
5085 _argvec[0] = (unsigned long)_orig.nraddr; \
5086 _argvec[1] = (unsigned long)arg1; \
5087 _argvec[2] = (unsigned long)arg2; \
5088 _argvec[3] = (unsigned long)arg3; \
5089 _argvec[4] = (unsigned long)arg4; \
5090 _argvec[5] = (unsigned long)arg5; \
5091 _argvec[6] = (unsigned long)arg6; \
5092 _argvec[7] = (unsigned long)arg7; \
5093 _argvec[8] = (unsigned long)arg8; \
5094 _argvec[9] = (unsigned long)arg9; \
5095 _argvec[10] = (unsigned long)arg10; \
5096 _argvec[11] = (unsigned long)arg11; \
5097 __asm__ volatile( \
5098 VALGRIND_CFI_PROLOGUE \
5099 "aghi 15,-208\n\t" \
5100 "lg 2, 8(1)\n\t" \
5101 "lg 3,16(1)\n\t" \
5102 "lg 4,24(1)\n\t" \
5103 "lg 5,32(1)\n\t" \
5104 "lg 6,40(1)\n\t" \
5105 "mvc 160(8,15), 48(1)\n\t" \
5106 "mvc 168(8,15), 56(1)\n\t" \
5107 "mvc 176(8,15), 64(1)\n\t" \
5108 "mvc 184(8,15), 72(1)\n\t" \
5109 "mvc 192(8,15), 80(1)\n\t" \
5110 "mvc 200(8,15), 88(1)\n\t" \
5111 "lg 1, 0(1)\n\t" \
5112 VALGRIND_CALL_NOREDIR_R1 \
5113 "aghi 15,208\n\t" \
5114 VALGRIND_CFI_EPILOGUE \
5115 "lgr %0, 2\n\t" \
5116 : "=d" (_res) \
5117 : "a" (&_argvec[0]) __FRAME_POINTER \
5118 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5119 ); \
5120 lval = (__typeof__(lval)) _res; \
5121 } while (0)
5122
5123#define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5124 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
5125 do { \
5126 volatile OrigFn _orig = (orig); \
5127 volatile unsigned long _argvec[13]; \
5128 volatile unsigned long _res; \
5129 _argvec[0] = (unsigned long)_orig.nraddr; \
5130 _argvec[1] = (unsigned long)arg1; \
5131 _argvec[2] = (unsigned long)arg2; \
5132 _argvec[3] = (unsigned long)arg3; \
5133 _argvec[4] = (unsigned long)arg4; \
5134 _argvec[5] = (unsigned long)arg5; \
5135 _argvec[6] = (unsigned long)arg6; \
5136 _argvec[7] = (unsigned long)arg7; \
5137 _argvec[8] = (unsigned long)arg8; \
5138 _argvec[9] = (unsigned long)arg9; \
5139 _argvec[10] = (unsigned long)arg10; \
5140 _argvec[11] = (unsigned long)arg11; \
5141 _argvec[12] = (unsigned long)arg12; \
5142 __asm__ volatile( \
5143 VALGRIND_CFI_PROLOGUE \
5144 "aghi 15,-216\n\t" \
5145 "lg 2, 8(1)\n\t" \
5146 "lg 3,16(1)\n\t" \
5147 "lg 4,24(1)\n\t" \
5148 "lg 5,32(1)\n\t" \
5149 "lg 6,40(1)\n\t" \
5150 "mvc 160(8,15), 48(1)\n\t" \
5151 "mvc 168(8,15), 56(1)\n\t" \
5152 "mvc 176(8,15), 64(1)\n\t" \
5153 "mvc 184(8,15), 72(1)\n\t" \
5154 "mvc 192(8,15), 80(1)\n\t" \
5155 "mvc 200(8,15), 88(1)\n\t" \
5156 "mvc 208(8,15), 96(1)\n\t" \
5157 "lg 1, 0(1)\n\t" \
5158 VALGRIND_CALL_NOREDIR_R1 \
5159 "aghi 15,216\n\t" \
5160 VALGRIND_CFI_EPILOGUE \
5161 "lgr %0, 2\n\t" \
5162 : "=d" (_res) \
5163 : "a" (&_argvec[0]) __FRAME_POINTER \
5164 : "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5165 ); \
5166 lval = (__typeof__(lval)) _res; \
5167 } while (0)
5168
5169
5170#endif
5171
5172
5173
5174#if defined(PLAT_mips32_linux)
5175
5176
5177#define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5178"$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5179"$25", "$31"
5180
5181
5182
5183
5184#define CALL_FN_W_v(lval, orig) \
5185 do { \
5186 volatile OrigFn _orig = (orig); \
5187 volatile unsigned long _argvec[1]; \
5188 volatile unsigned long _res; \
5189 _argvec[0] = (unsigned long)_orig.nraddr; \
5190 __asm__ volatile( \
5191 "subu $29, $29, 8 \n\t" \
5192 "sw $28, 0($29) \n\t" \
5193 "sw $31, 4($29) \n\t" \
5194 "subu $29, $29, 16 \n\t" \
5195 "lw $25, 0(%1) \n\t" \
5196 VALGRIND_CALL_NOREDIR_T9 \
5197 "addu $29, $29, 16\n\t" \
5198 "lw $28, 0($29) \n\t" \
5199 "lw $31, 4($29) \n\t" \
5200 "addu $29, $29, 8 \n\t" \
5201 "move %0, $2\n" \
5202 : "=r" (_res) \
5203 : "0" (&_argvec[0]) \
5204 : "memory", __CALLER_SAVED_REGS \
5205 ); \
5206 lval = (__typeof__(lval)) _res; \
5207 } while (0)
5208
5209#define CALL_FN_W_W(lval, orig, arg1) \
5210 do { \
5211 volatile OrigFn _orig = (orig); \
5212 volatile unsigned long _argvec[2]; \
5213 volatile unsigned long _res; \
5214 _argvec[0] = (unsigned long)_orig.nraddr; \
5215 _argvec[1] = (unsigned long)(arg1); \
5216 __asm__ volatile( \
5217 "subu $29, $29, 8 \n\t" \
5218 "sw $28, 0($29) \n\t" \
5219 "sw $31, 4($29) \n\t" \
5220 "subu $29, $29, 16 \n\t" \
5221 "lw $4, 4(%1) \n\t" \
5222 "lw $25, 0(%1) \n\t" \
5223 VALGRIND_CALL_NOREDIR_T9 \
5224 "addu $29, $29, 16 \n\t" \
5225 "lw $28, 0($29) \n\t" \
5226 "lw $31, 4($29) \n\t" \
5227 "addu $29, $29, 8 \n\t" \
5228 "move %0, $2\n" \
5229 : "=r" (_res) \
5230 : "0" (&_argvec[0]) \
5231 : "memory", __CALLER_SAVED_REGS \
5232 ); \
5233 lval = (__typeof__(lval)) _res; \
5234 } while (0)
5235
5236#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5237 do { \
5238 volatile OrigFn _orig = (orig); \
5239 volatile unsigned long _argvec[3]; \
5240 volatile unsigned long _res; \
5241 _argvec[0] = (unsigned long)_orig.nraddr; \
5242 _argvec[1] = (unsigned long)(arg1); \
5243 _argvec[2] = (unsigned long)(arg2); \
5244 __asm__ volatile( \
5245 "subu $29, $29, 8 \n\t" \
5246 "sw $28, 0($29) \n\t" \
5247 "sw $31, 4($29) \n\t" \
5248 "subu $29, $29, 16 \n\t" \
5249 "lw $4, 4(%1) \n\t" \
5250 "lw $5, 8(%1) \n\t" \
5251 "lw $25, 0(%1) \n\t" \
5252 VALGRIND_CALL_NOREDIR_T9 \
5253 "addu $29, $29, 16 \n\t" \
5254 "lw $28, 0($29) \n\t" \
5255 "lw $31, 4($29) \n\t" \
5256 "addu $29, $29, 8 \n\t" \
5257 "move %0, $2\n" \
5258 : "=r" (_res) \
5259 : "0" (&_argvec[0]) \
5260 : "memory", __CALLER_SAVED_REGS \
5261 ); \
5262 lval = (__typeof__(lval)) _res; \
5263 } while (0)
5264
5265#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5266 do { \
5267 volatile OrigFn _orig = (orig); \
5268 volatile unsigned long _argvec[4]; \
5269 volatile unsigned long _res; \
5270 _argvec[0] = (unsigned long)_orig.nraddr; \
5271 _argvec[1] = (unsigned long)(arg1); \
5272 _argvec[2] = (unsigned long)(arg2); \
5273 _argvec[3] = (unsigned long)(arg3); \
5274 __asm__ volatile( \
5275 "subu $29, $29, 8 \n\t" \
5276 "sw $28, 0($29) \n\t" \
5277 "sw $31, 4($29) \n\t" \
5278 "subu $29, $29, 16 \n\t" \
5279 "lw $4, 4(%1) \n\t" \
5280 "lw $5, 8(%1) \n\t" \
5281 "lw $6, 12(%1) \n\t" \
5282 "lw $25, 0(%1) \n\t" \
5283 VALGRIND_CALL_NOREDIR_T9 \
5284 "addu $29, $29, 16 \n\t" \
5285 "lw $28, 0($29) \n\t" \
5286 "lw $31, 4($29) \n\t" \
5287 "addu $29, $29, 8 \n\t" \
5288 "move %0, $2\n" \
5289 : "=r" (_res) \
5290 : "0" (&_argvec[0]) \
5291 : "memory", __CALLER_SAVED_REGS \
5292 ); \
5293 lval = (__typeof__(lval)) _res; \
5294 } while (0)
5295
5296#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5297 do { \
5298 volatile OrigFn _orig = (orig); \
5299 volatile unsigned long _argvec[5]; \
5300 volatile unsigned long _res; \
5301 _argvec[0] = (unsigned long)_orig.nraddr; \
5302 _argvec[1] = (unsigned long)(arg1); \
5303 _argvec[2] = (unsigned long)(arg2); \
5304 _argvec[3] = (unsigned long)(arg3); \
5305 _argvec[4] = (unsigned long)(arg4); \
5306 __asm__ volatile( \
5307 "subu $29, $29, 8 \n\t" \
5308 "sw $28, 0($29) \n\t" \
5309 "sw $31, 4($29) \n\t" \
5310 "subu $29, $29, 16 \n\t" \
5311 "lw $4, 4(%1) \n\t" \
5312 "lw $5, 8(%1) \n\t" \
5313 "lw $6, 12(%1) \n\t" \
5314 "lw $7, 16(%1) \n\t" \
5315 "lw $25, 0(%1) \n\t" \
5316 VALGRIND_CALL_NOREDIR_T9 \
5317 "addu $29, $29, 16 \n\t" \
5318 "lw $28, 0($29) \n\t" \
5319 "lw $31, 4($29) \n\t" \
5320 "addu $29, $29, 8 \n\t" \
5321 "move %0, $2\n" \
5322 : "=r" (_res) \
5323 : "0" (&_argvec[0]) \
5324 : "memory", __CALLER_SAVED_REGS \
5325 ); \
5326 lval = (__typeof__(lval)) _res; \
5327 } while (0)
5328
5329#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5330 do { \
5331 volatile OrigFn _orig = (orig); \
5332 volatile unsigned long _argvec[6]; \
5333 volatile unsigned long _res; \
5334 _argvec[0] = (unsigned long)_orig.nraddr; \
5335 _argvec[1] = (unsigned long)(arg1); \
5336 _argvec[2] = (unsigned long)(arg2); \
5337 _argvec[3] = (unsigned long)(arg3); \
5338 _argvec[4] = (unsigned long)(arg4); \
5339 _argvec[5] = (unsigned long)(arg5); \
5340 __asm__ volatile( \
5341 "subu $29, $29, 8 \n\t" \
5342 "sw $28, 0($29) \n\t" \
5343 "sw $31, 4($29) \n\t" \
5344 "lw $4, 20(%1) \n\t" \
5345 "subu $29, $29, 24\n\t" \
5346 "sw $4, 16($29) \n\t" \
5347 "lw $4, 4(%1) \n\t" \
5348 "lw $5, 8(%1) \n\t" \
5349 "lw $6, 12(%1) \n\t" \
5350 "lw $7, 16(%1) \n\t" \
5351 "lw $25, 0(%1) \n\t" \
5352 VALGRIND_CALL_NOREDIR_T9 \
5353 "addu $29, $29, 24 \n\t" \
5354 "lw $28, 0($29) \n\t" \
5355 "lw $31, 4($29) \n\t" \
5356 "addu $29, $29, 8 \n\t" \
5357 "move %0, $2\n" \
5358 : "=r" (_res) \
5359 : "0" (&_argvec[0]) \
5360 : "memory", __CALLER_SAVED_REGS \
5361 ); \
5362 lval = (__typeof__(lval)) _res; \
5363 } while (0)
5364#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5365 do { \
5366 volatile OrigFn _orig = (orig); \
5367 volatile unsigned long _argvec[7]; \
5368 volatile unsigned long _res; \
5369 _argvec[0] = (unsigned long)_orig.nraddr; \
5370 _argvec[1] = (unsigned long)(arg1); \
5371 _argvec[2] = (unsigned long)(arg2); \
5372 _argvec[3] = (unsigned long)(arg3); \
5373 _argvec[4] = (unsigned long)(arg4); \
5374 _argvec[5] = (unsigned long)(arg5); \
5375 _argvec[6] = (unsigned long)(arg6); \
5376 __asm__ volatile( \
5377 "subu $29, $29, 8 \n\t" \
5378 "sw $28, 0($29) \n\t" \
5379 "sw $31, 4($29) \n\t" \
5380 "lw $4, 20(%1) \n\t" \
5381 "subu $29, $29, 32\n\t" \
5382 "sw $4, 16($29) \n\t" \
5383 "lw $4, 24(%1) \n\t" \
5384 "nop\n\t" \
5385 "sw $4, 20($29) \n\t" \
5386 "lw $4, 4(%1) \n\t" \
5387 "lw $5, 8(%1) \n\t" \
5388 "lw $6, 12(%1) \n\t" \
5389 "lw $7, 16(%1) \n\t" \
5390 "lw $25, 0(%1) \n\t" \
5391 VALGRIND_CALL_NOREDIR_T9 \
5392 "addu $29, $29, 32 \n\t" \
5393 "lw $28, 0($29) \n\t" \
5394 "lw $31, 4($29) \n\t" \
5395 "addu $29, $29, 8 \n\t" \
5396 "move %0, $2\n" \
5397 : "=r" (_res) \
5398 : "0" (&_argvec[0]) \
5399 : "memory", __CALLER_SAVED_REGS \
5400 ); \
5401 lval = (__typeof__(lval)) _res; \
5402 } while (0)
5403
5404#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5405 arg7) \
5406 do { \
5407 volatile OrigFn _orig = (orig); \
5408 volatile unsigned long _argvec[8]; \
5409 volatile unsigned long _res; \
5410 _argvec[0] = (unsigned long)_orig.nraddr; \
5411 _argvec[1] = (unsigned long)(arg1); \
5412 _argvec[2] = (unsigned long)(arg2); \
5413 _argvec[3] = (unsigned long)(arg3); \
5414 _argvec[4] = (unsigned long)(arg4); \
5415 _argvec[5] = (unsigned long)(arg5); \
5416 _argvec[6] = (unsigned long)(arg6); \
5417 _argvec[7] = (unsigned long)(arg7); \
5418 __asm__ volatile( \
5419 "subu $29, $29, 8 \n\t" \
5420 "sw $28, 0($29) \n\t" \
5421 "sw $31, 4($29) \n\t" \
5422 "lw $4, 20(%1) \n\t" \
5423 "subu $29, $29, 32\n\t" \
5424 "sw $4, 16($29) \n\t" \
5425 "lw $4, 24(%1) \n\t" \
5426 "sw $4, 20($29) \n\t" \
5427 "lw $4, 28(%1) \n\t" \
5428 "sw $4, 24($29) \n\t" \
5429 "lw $4, 4(%1) \n\t" \
5430 "lw $5, 8(%1) \n\t" \
5431 "lw $6, 12(%1) \n\t" \
5432 "lw $7, 16(%1) \n\t" \
5433 "lw $25, 0(%1) \n\t" \
5434 VALGRIND_CALL_NOREDIR_T9 \
5435 "addu $29, $29, 32 \n\t" \
5436 "lw $28, 0($29) \n\t" \
5437 "lw $31, 4($29) \n\t" \
5438 "addu $29, $29, 8 \n\t" \
5439 "move %0, $2\n" \
5440 : "=r" (_res) \
5441 : "0" (&_argvec[0]) \
5442 : "memory", __CALLER_SAVED_REGS \
5443 ); \
5444 lval = (__typeof__(lval)) _res; \
5445 } while (0)
5446
5447#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5448 arg7,arg8) \
5449 do { \
5450 volatile OrigFn _orig = (orig); \
5451 volatile unsigned long _argvec[9]; \
5452 volatile unsigned long _res; \
5453 _argvec[0] = (unsigned long)_orig.nraddr; \
5454 _argvec[1] = (unsigned long)(arg1); \
5455 _argvec[2] = (unsigned long)(arg2); \
5456 _argvec[3] = (unsigned long)(arg3); \
5457 _argvec[4] = (unsigned long)(arg4); \
5458 _argvec[5] = (unsigned long)(arg5); \
5459 _argvec[6] = (unsigned long)(arg6); \
5460 _argvec[7] = (unsigned long)(arg7); \
5461 _argvec[8] = (unsigned long)(arg8); \
5462 __asm__ volatile( \
5463 "subu $29, $29, 8 \n\t" \
5464 "sw $28, 0($29) \n\t" \
5465 "sw $31, 4($29) \n\t" \
5466 "lw $4, 20(%1) \n\t" \
5467 "subu $29, $29, 40\n\t" \
5468 "sw $4, 16($29) \n\t" \
5469 "lw $4, 24(%1) \n\t" \
5470 "sw $4, 20($29) \n\t" \
5471 "lw $4, 28(%1) \n\t" \
5472 "sw $4, 24($29) \n\t" \
5473 "lw $4, 32(%1) \n\t" \
5474 "sw $4, 28($29) \n\t" \
5475 "lw $4, 4(%1) \n\t" \
5476 "lw $5, 8(%1) \n\t" \
5477 "lw $6, 12(%1) \n\t" \
5478 "lw $7, 16(%1) \n\t" \
5479 "lw $25, 0(%1) \n\t" \
5480 VALGRIND_CALL_NOREDIR_T9 \
5481 "addu $29, $29, 40 \n\t" \
5482 "lw $28, 0($29) \n\t" \
5483 "lw $31, 4($29) \n\t" \
5484 "addu $29, $29, 8 \n\t" \
5485 "move %0, $2\n" \
5486 : "=r" (_res) \
5487 : "0" (&_argvec[0]) \
5488 : "memory", __CALLER_SAVED_REGS \
5489 ); \
5490 lval = (__typeof__(lval)) _res; \
5491 } while (0)
5492
5493#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5494 arg7,arg8,arg9) \
5495 do { \
5496 volatile OrigFn _orig = (orig); \
5497 volatile unsigned long _argvec[10]; \
5498 volatile unsigned long _res; \
5499 _argvec[0] = (unsigned long)_orig.nraddr; \
5500 _argvec[1] = (unsigned long)(arg1); \
5501 _argvec[2] = (unsigned long)(arg2); \
5502 _argvec[3] = (unsigned long)(arg3); \
5503 _argvec[4] = (unsigned long)(arg4); \
5504 _argvec[5] = (unsigned long)(arg5); \
5505 _argvec[6] = (unsigned long)(arg6); \
5506 _argvec[7] = (unsigned long)(arg7); \
5507 _argvec[8] = (unsigned long)(arg8); \
5508 _argvec[9] = (unsigned long)(arg9); \
5509 __asm__ volatile( \
5510 "subu $29, $29, 8 \n\t" \
5511 "sw $28, 0($29) \n\t" \
5512 "sw $31, 4($29) \n\t" \
5513 "lw $4, 20(%1) \n\t" \
5514 "subu $29, $29, 40\n\t" \
5515 "sw $4, 16($29) \n\t" \
5516 "lw $4, 24(%1) \n\t" \
5517 "sw $4, 20($29) \n\t" \
5518 "lw $4, 28(%1) \n\t" \
5519 "sw $4, 24($29) \n\t" \
5520 "lw $4, 32(%1) \n\t" \
5521 "sw $4, 28($29) \n\t" \
5522 "lw $4, 36(%1) \n\t" \
5523 "sw $4, 32($29) \n\t" \
5524 "lw $4, 4(%1) \n\t" \
5525 "lw $5, 8(%1) \n\t" \
5526 "lw $6, 12(%1) \n\t" \
5527 "lw $7, 16(%1) \n\t" \
5528 "lw $25, 0(%1) \n\t" \
5529 VALGRIND_CALL_NOREDIR_T9 \
5530 "addu $29, $29, 40 \n\t" \
5531 "lw $28, 0($29) \n\t" \
5532 "lw $31, 4($29) \n\t" \
5533 "addu $29, $29, 8 \n\t" \
5534 "move %0, $2\n" \
5535 : "=r" (_res) \
5536 : "0" (&_argvec[0]) \
5537 : "memory", __CALLER_SAVED_REGS \
5538 ); \
5539 lval = (__typeof__(lval)) _res; \
5540 } while (0)
5541
5542#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5543 arg7,arg8,arg9,arg10) \
5544 do { \
5545 volatile OrigFn _orig = (orig); \
5546 volatile unsigned long _argvec[11]; \
5547 volatile unsigned long _res; \
5548 _argvec[0] = (unsigned long)_orig.nraddr; \
5549 _argvec[1] = (unsigned long)(arg1); \
5550 _argvec[2] = (unsigned long)(arg2); \
5551 _argvec[3] = (unsigned long)(arg3); \
5552 _argvec[4] = (unsigned long)(arg4); \
5553 _argvec[5] = (unsigned long)(arg5); \
5554 _argvec[6] = (unsigned long)(arg6); \
5555 _argvec[7] = (unsigned long)(arg7); \
5556 _argvec[8] = (unsigned long)(arg8); \
5557 _argvec[9] = (unsigned long)(arg9); \
5558 _argvec[10] = (unsigned long)(arg10); \
5559 __asm__ volatile( \
5560 "subu $29, $29, 8 \n\t" \
5561 "sw $28, 0($29) \n\t" \
5562 "sw $31, 4($29) \n\t" \
5563 "lw $4, 20(%1) \n\t" \
5564 "subu $29, $29, 48\n\t" \
5565 "sw $4, 16($29) \n\t" \
5566 "lw $4, 24(%1) \n\t" \
5567 "sw $4, 20($29) \n\t" \
5568 "lw $4, 28(%1) \n\t" \
5569 "sw $4, 24($29) \n\t" \
5570 "lw $4, 32(%1) \n\t" \
5571 "sw $4, 28($29) \n\t" \
5572 "lw $4, 36(%1) \n\t" \
5573 "sw $4, 32($29) \n\t" \
5574 "lw $4, 40(%1) \n\t" \
5575 "sw $4, 36($29) \n\t" \
5576 "lw $4, 4(%1) \n\t" \
5577 "lw $5, 8(%1) \n\t" \
5578 "lw $6, 12(%1) \n\t" \
5579 "lw $7, 16(%1) \n\t" \
5580 "lw $25, 0(%1) \n\t" \
5581 VALGRIND_CALL_NOREDIR_T9 \
5582 "addu $29, $29, 48 \n\t" \
5583 "lw $28, 0($29) \n\t" \
5584 "lw $31, 4($29) \n\t" \
5585 "addu $29, $29, 8 \n\t" \
5586 "move %0, $2\n" \
5587 : "=r" (_res) \
5588 : "0" (&_argvec[0]) \
5589 : "memory", __CALLER_SAVED_REGS \
5590 ); \
5591 lval = (__typeof__(lval)) _res; \
5592 } while (0)
5593
5594#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5595 arg6,arg7,arg8,arg9,arg10, \
5596 arg11) \
5597 do { \
5598 volatile OrigFn _orig = (orig); \
5599 volatile unsigned long _argvec[12]; \
5600 volatile unsigned long _res; \
5601 _argvec[0] = (unsigned long)_orig.nraddr; \
5602 _argvec[1] = (unsigned long)(arg1); \
5603 _argvec[2] = (unsigned long)(arg2); \
5604 _argvec[3] = (unsigned long)(arg3); \
5605 _argvec[4] = (unsigned long)(arg4); \
5606 _argvec[5] = (unsigned long)(arg5); \
5607 _argvec[6] = (unsigned long)(arg6); \
5608 _argvec[7] = (unsigned long)(arg7); \
5609 _argvec[8] = (unsigned long)(arg8); \
5610 _argvec[9] = (unsigned long)(arg9); \
5611 _argvec[10] = (unsigned long)(arg10); \
5612 _argvec[11] = (unsigned long)(arg11); \
5613 __asm__ volatile( \
5614 "subu $29, $29, 8 \n\t" \
5615 "sw $28, 0($29) \n\t" \
5616 "sw $31, 4($29) \n\t" \
5617 "lw $4, 20(%1) \n\t" \
5618 "subu $29, $29, 48\n\t" \
5619 "sw $4, 16($29) \n\t" \
5620 "lw $4, 24(%1) \n\t" \
5621 "sw $4, 20($29) \n\t" \
5622 "lw $4, 28(%1) \n\t" \
5623 "sw $4, 24($29) \n\t" \
5624 "lw $4, 32(%1) \n\t" \
5625 "sw $4, 28($29) \n\t" \
5626 "lw $4, 36(%1) \n\t" \
5627 "sw $4, 32($29) \n\t" \
5628 "lw $4, 40(%1) \n\t" \
5629 "sw $4, 36($29) \n\t" \
5630 "lw $4, 44(%1) \n\t" \
5631 "sw $4, 40($29) \n\t" \
5632 "lw $4, 4(%1) \n\t" \
5633 "lw $5, 8(%1) \n\t" \
5634 "lw $6, 12(%1) \n\t" \
5635 "lw $7, 16(%1) \n\t" \
5636 "lw $25, 0(%1) \n\t" \
5637 VALGRIND_CALL_NOREDIR_T9 \
5638 "addu $29, $29, 48 \n\t" \
5639 "lw $28, 0($29) \n\t" \
5640 "lw $31, 4($29) \n\t" \
5641 "addu $29, $29, 8 \n\t" \
5642 "move %0, $2\n" \
5643 : "=r" (_res) \
5644 : "0" (&_argvec[0]) \
5645 : "memory", __CALLER_SAVED_REGS \
5646 ); \
5647 lval = (__typeof__(lval)) _res; \
5648 } while (0)
5649
5650#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5651 arg6,arg7,arg8,arg9,arg10, \
5652 arg11,arg12) \
5653 do { \
5654 volatile OrigFn _orig = (orig); \
5655 volatile unsigned long _argvec[13]; \
5656 volatile unsigned long _res; \
5657 _argvec[0] = (unsigned long)_orig.nraddr; \
5658 _argvec[1] = (unsigned long)(arg1); \
5659 _argvec[2] = (unsigned long)(arg2); \
5660 _argvec[3] = (unsigned long)(arg3); \
5661 _argvec[4] = (unsigned long)(arg4); \
5662 _argvec[5] = (unsigned long)(arg5); \
5663 _argvec[6] = (unsigned long)(arg6); \
5664 _argvec[7] = (unsigned long)(arg7); \
5665 _argvec[8] = (unsigned long)(arg8); \
5666 _argvec[9] = (unsigned long)(arg9); \
5667 _argvec[10] = (unsigned long)(arg10); \
5668 _argvec[11] = (unsigned long)(arg11); \
5669 _argvec[12] = (unsigned long)(arg12); \
5670 __asm__ volatile( \
5671 "subu $29, $29, 8 \n\t" \
5672 "sw $28, 0($29) \n\t" \
5673 "sw $31, 4($29) \n\t" \
5674 "lw $4, 20(%1) \n\t" \
5675 "subu $29, $29, 56\n\t" \
5676 "sw $4, 16($29) \n\t" \
5677 "lw $4, 24(%1) \n\t" \
5678 "sw $4, 20($29) \n\t" \
5679 "lw $4, 28(%1) \n\t" \
5680 "sw $4, 24($29) \n\t" \
5681 "lw $4, 32(%1) \n\t" \
5682 "sw $4, 28($29) \n\t" \
5683 "lw $4, 36(%1) \n\t" \
5684 "sw $4, 32($29) \n\t" \
5685 "lw $4, 40(%1) \n\t" \
5686 "sw $4, 36($29) \n\t" \
5687 "lw $4, 44(%1) \n\t" \
5688 "sw $4, 40($29) \n\t" \
5689 "lw $4, 48(%1) \n\t" \
5690 "sw $4, 44($29) \n\t" \
5691 "lw $4, 4(%1) \n\t" \
5692 "lw $5, 8(%1) \n\t" \
5693 "lw $6, 12(%1) \n\t" \
5694 "lw $7, 16(%1) \n\t" \
5695 "lw $25, 0(%1) \n\t" \
5696 VALGRIND_CALL_NOREDIR_T9 \
5697 "addu $29, $29, 56 \n\t" \
5698 "lw $28, 0($29) \n\t" \
5699 "lw $31, 4($29) \n\t" \
5700 "addu $29, $29, 8 \n\t" \
5701 "move %0, $2\n" \
5702 : "=r" (_res) \
5703 : "r" (&_argvec[0]) \
5704 : "memory", __CALLER_SAVED_REGS \
5705 ); \
5706 lval = (__typeof__(lval)) _res; \
5707 } while (0)
5708
5709#endif
5710
5711
5712
5713#if defined(PLAT_nanomips_linux)
5714
5715
5716#define __CALLER_SAVED_REGS "$t4", "$t5", "$a0", "$a1", "$a2", \
5717"$a3", "$a4", "$a5", "$a6", "$a7", "$t0", "$t1", "$t2", "$t3", \
5718"$t8","$t9", "$at"
5719
5720
5721
5722
5723#define CALL_FN_W_v(lval, orig) \
5724 do { \
5725 volatile OrigFn _orig = (orig); \
5726 volatile unsigned long _argvec[1]; \
5727 volatile unsigned long _res; \
5728 _argvec[0] = (unsigned long)_orig.nraddr; \
5729 __asm__ volatile( \
5730 "lw $t9, 0(%1)\n\t" \
5731 VALGRIND_CALL_NOREDIR_T9 \
5732 "move %0, $a0\n" \
5733 : "=r" (_res) \
5734 : "r" (&_argvec[0]) \
5735 : "memory", __CALLER_SAVED_REGS \
5736 ); \
5737 lval = (__typeof__(lval)) _res; \
5738 } while (0)
5739
5740#define CALL_FN_W_W(lval, orig, arg1) \
5741 do { \
5742 volatile OrigFn _orig = (orig); \
5743 volatile unsigned long _argvec[2]; \
5744 volatile unsigned long _res; \
5745 _argvec[0] = (unsigned long)_orig.nraddr; \
5746 _argvec[1] = (unsigned long)(arg1); \
5747 __asm__ volatile( \
5748 "lw $t9, 0(%1)\n\t" \
5749 "lw $a0, 4(%1)\n\t" \
5750 VALGRIND_CALL_NOREDIR_T9 \
5751 "move %0, $a0\n" \
5752 : "=r" (_res) \
5753 : "r" (&_argvec[0]) \
5754 : "memory", __CALLER_SAVED_REGS \
5755 ); \
5756 lval = (__typeof__(lval)) _res; \
5757 } while (0)
5758
5759#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5760 do { \
5761 volatile OrigFn _orig = (orig); \
5762 volatile unsigned long _argvec[3]; \
5763 volatile unsigned long _res; \
5764 _argvec[0] = (unsigned long)_orig.nraddr; \
5765 _argvec[1] = (unsigned long)(arg1); \
5766 _argvec[2] = (unsigned long)(arg2); \
5767 __asm__ volatile( \
5768 "lw $t9, 0(%1)\n\t" \
5769 "lw $a0, 4(%1)\n\t" \
5770 "lw $a1, 8(%1)\n\t" \
5771 VALGRIND_CALL_NOREDIR_T9 \
5772 "move %0, $a0\n" \
5773 : "=r" (_res) \
5774 : "r" (&_argvec[0]) \
5775 : "memory", __CALLER_SAVED_REGS \
5776 ); \
5777 lval = (__typeof__(lval)) _res; \
5778 } while (0)
5779
5780#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5781 do { \
5782 volatile OrigFn _orig = (orig); \
5783 volatile unsigned long _argvec[4]; \
5784 volatile unsigned long _res; \
5785 _argvec[0] = (unsigned long)_orig.nraddr; \
5786 _argvec[1] = (unsigned long)(arg1); \
5787 _argvec[2] = (unsigned long)(arg2); \
5788 _argvec[3] = (unsigned long)(arg3); \
5789 __asm__ volatile( \
5790 "lw $t9, 0(%1)\n\t" \
5791 "lw $a0, 4(%1)\n\t" \
5792 "lw $a1, 8(%1)\n\t" \
5793 "lw $a2,12(%1)\n\t" \
5794 VALGRIND_CALL_NOREDIR_T9 \
5795 "move %0, $a0\n" \
5796 : "=r" (_res) \
5797 : "r" (&_argvec[0]) \
5798 : "memory", __CALLER_SAVED_REGS \
5799 ); \
5800 lval = (__typeof__(lval)) _res; \
5801 } while (0)
5802
5803#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5804 do { \
5805 volatile OrigFn _orig = (orig); \
5806 volatile unsigned long _argvec[5]; \
5807 volatile unsigned long _res; \
5808 _argvec[0] = (unsigned long)_orig.nraddr; \
5809 _argvec[1] = (unsigned long)(arg1); \
5810 _argvec[2] = (unsigned long)(arg2); \
5811 _argvec[3] = (unsigned long)(arg3); \
5812 _argvec[4] = (unsigned long)(arg4); \
5813 __asm__ volatile( \
5814 "lw $t9, 0(%1)\n\t" \
5815 "lw $a0, 4(%1)\n\t" \
5816 "lw $a1, 8(%1)\n\t" \
5817 "lw $a2,12(%1)\n\t" \
5818 "lw $a3,16(%1)\n\t" \
5819 VALGRIND_CALL_NOREDIR_T9 \
5820 "move %0, $a0\n" \
5821 : "=r" (_res) \
5822 : "r" (&_argvec[0]) \
5823 : "memory", __CALLER_SAVED_REGS \
5824 ); \
5825 lval = (__typeof__(lval)) _res; \
5826 } while (0)
5827
5828#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5829 do { \
5830 volatile OrigFn _orig = (orig); \
5831 volatile unsigned long _argvec[6]; \
5832 volatile unsigned long _res; \
5833 _argvec[0] = (unsigned long)_orig.nraddr; \
5834 _argvec[1] = (unsigned long)(arg1); \
5835 _argvec[2] = (unsigned long)(arg2); \
5836 _argvec[3] = (unsigned long)(arg3); \
5837 _argvec[4] = (unsigned long)(arg4); \
5838 _argvec[5] = (unsigned long)(arg5); \
5839 __asm__ volatile( \
5840 "lw $t9, 0(%1)\n\t" \
5841 "lw $a0, 4(%1)\n\t" \
5842 "lw $a1, 8(%1)\n\t" \
5843 "lw $a2,12(%1)\n\t" \
5844 "lw $a3,16(%1)\n\t" \
5845 "lw $a4,20(%1)\n\t" \
5846 VALGRIND_CALL_NOREDIR_T9 \
5847 "move %0, $a0\n" \
5848 : "=r" (_res) \
5849 : "r" (&_argvec[0]) \
5850 : "memory", __CALLER_SAVED_REGS \
5851 ); \
5852 lval = (__typeof__(lval)) _res; \
5853 } while (0)
5854#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5855 do { \
5856 volatile OrigFn _orig = (orig); \
5857 volatile unsigned long _argvec[7]; \
5858 volatile unsigned long _res; \
5859 _argvec[0] = (unsigned long)_orig.nraddr; \
5860 _argvec[1] = (unsigned long)(arg1); \
5861 _argvec[2] = (unsigned long)(arg2); \
5862 _argvec[3] = (unsigned long)(arg3); \
5863 _argvec[4] = (unsigned long)(arg4); \
5864 _argvec[5] = (unsigned long)(arg5); \
5865 _argvec[6] = (unsigned long)(arg6); \
5866 __asm__ volatile( \
5867 "lw $t9, 0(%1)\n\t" \
5868 "lw $a0, 4(%1)\n\t" \
5869 "lw $a1, 8(%1)\n\t" \
5870 "lw $a2,12(%1)\n\t" \
5871 "lw $a3,16(%1)\n\t" \
5872 "lw $a4,20(%1)\n\t" \
5873 "lw $a5,24(%1)\n\t" \
5874 VALGRIND_CALL_NOREDIR_T9 \
5875 "move %0, $a0\n" \
5876 : "=r" (_res) \
5877 : "r" (&_argvec[0]) \
5878 : "memory", __CALLER_SAVED_REGS \
5879 ); \
5880 lval = (__typeof__(lval)) _res; \
5881 } while (0)
5882
5883#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5884 arg7) \
5885 do { \
5886 volatile OrigFn _orig = (orig); \
5887 volatile unsigned long _argvec[8]; \
5888 volatile unsigned long _res; \
5889 _argvec[0] = (unsigned long)_orig.nraddr; \
5890 _argvec[1] = (unsigned long)(arg1); \
5891 _argvec[2] = (unsigned long)(arg2); \
5892 _argvec[3] = (unsigned long)(arg3); \
5893 _argvec[4] = (unsigned long)(arg4); \
5894 _argvec[5] = (unsigned long)(arg5); \
5895 _argvec[6] = (unsigned long)(arg6); \
5896 _argvec[7] = (unsigned long)(arg7); \
5897 __asm__ volatile( \
5898 "lw $t9, 0(%1)\n\t" \
5899 "lw $a0, 4(%1)\n\t" \
5900 "lw $a1, 8(%1)\n\t" \
5901 "lw $a2,12(%1)\n\t" \
5902 "lw $a3,16(%1)\n\t" \
5903 "lw $a4,20(%1)\n\t" \
5904 "lw $a5,24(%1)\n\t" \
5905 "lw $a6,28(%1)\n\t" \
5906 VALGRIND_CALL_NOREDIR_T9 \
5907 "move %0, $a0\n" \
5908 : "=r" (_res) \
5909 : "r" (&_argvec[0]) \
5910 : "memory", __CALLER_SAVED_REGS \
5911 ); \
5912 lval = (__typeof__(lval)) _res; \
5913 } while (0)
5914
5915#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5916 arg7,arg8) \
5917 do { \
5918 volatile OrigFn _orig = (orig); \
5919 volatile unsigned long _argvec[9]; \
5920 volatile unsigned long _res; \
5921 _argvec[0] = (unsigned long)_orig.nraddr; \
5922 _argvec[1] = (unsigned long)(arg1); \
5923 _argvec[2] = (unsigned long)(arg2); \
5924 _argvec[3] = (unsigned long)(arg3); \
5925 _argvec[4] = (unsigned long)(arg4); \
5926 _argvec[5] = (unsigned long)(arg5); \
5927 _argvec[6] = (unsigned long)(arg6); \
5928 _argvec[7] = (unsigned long)(arg7); \
5929 _argvec[8] = (unsigned long)(arg8); \
5930 __asm__ volatile( \
5931 "lw $t9, 0(%1)\n\t" \
5932 "lw $a0, 4(%1)\n\t" \
5933 "lw $a1, 8(%1)\n\t" \
5934 "lw $a2,12(%1)\n\t" \
5935 "lw $a3,16(%1)\n\t" \
5936 "lw $a4,20(%1)\n\t" \
5937 "lw $a5,24(%1)\n\t" \
5938 "lw $a6,28(%1)\n\t" \
5939 "lw $a7,32(%1)\n\t" \
5940 VALGRIND_CALL_NOREDIR_T9 \
5941 "move %0, $a0\n" \
5942 : "=r" (_res) \
5943 : "r" (&_argvec[0]) \
5944 : "memory", __CALLER_SAVED_REGS \
5945 ); \
5946 lval = (__typeof__(lval)) _res; \
5947 } while (0)
5948
5949#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5950 arg7,arg8,arg9) \
5951 do { \
5952 volatile OrigFn _orig = (orig); \
5953 volatile unsigned long _argvec[10]; \
5954 volatile unsigned long _res; \
5955 _argvec[0] = (unsigned long)_orig.nraddr; \
5956 _argvec[1] = (unsigned long)(arg1); \
5957 _argvec[2] = (unsigned long)(arg2); \
5958 _argvec[3] = (unsigned long)(arg3); \
5959 _argvec[4] = (unsigned long)(arg4); \
5960 _argvec[5] = (unsigned long)(arg5); \
5961 _argvec[6] = (unsigned long)(arg6); \
5962 _argvec[7] = (unsigned long)(arg7); \
5963 _argvec[8] = (unsigned long)(arg8); \
5964 _argvec[9] = (unsigned long)(arg9); \
5965 __asm__ volatile( \
5966 "addiu $sp, $sp, -16 \n\t" \
5967 "lw $t9,36(%1) \n\t" \
5968 "sw $t9, 0($sp) \n\t" \
5969 "lw $t9, 0(%1) \n\t" \
5970 "lw $a0, 4(%1) \n\t" \
5971 "lw $a1, 8(%1) \n\t" \
5972 "lw $a2,12(%1) \n\t" \
5973 "lw $a3,16(%1) \n\t" \
5974 "lw $a4,20(%1) \n\t" \
5975 "lw $a5,24(%1) \n\t" \
5976 "lw $a6,28(%1) \n\t" \
5977 "lw $a7,32(%1) \n\t" \
5978 VALGRIND_CALL_NOREDIR_T9 \
5979 "move %0, $a0 \n\t" \
5980 "addiu $sp, $sp, 16 \n\t" \
5981 : "=r" (_res) \
5982 : "r" (&_argvec[0]) \
5983 : "memory", __CALLER_SAVED_REGS \
5984 ); \
5985 lval = (__typeof__(lval)) _res; \
5986 } while (0)
5987
5988#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5989 arg7,arg8,arg9,arg10) \
5990 do { \
5991 volatile OrigFn _orig = (orig); \
5992 volatile unsigned long _argvec[11]; \
5993 volatile unsigned long _res; \
5994 _argvec[0] = (unsigned long)_orig.nraddr; \
5995 _argvec[1] = (unsigned long)(arg1); \
5996 _argvec[2] = (unsigned long)(arg2); \
5997 _argvec[3] = (unsigned long)(arg3); \
5998 _argvec[4] = (unsigned long)(arg4); \
5999 _argvec[5] = (unsigned long)(arg5); \
6000 _argvec[6] = (unsigned long)(arg6); \
6001 _argvec[7] = (unsigned long)(arg7); \
6002 _argvec[8] = (unsigned long)(arg8); \
6003 _argvec[9] = (unsigned long)(arg9); \
6004 _argvec[10] = (unsigned long)(arg10); \
6005 __asm__ volatile( \
6006 "addiu $sp, $sp, -16 \n\t" \
6007 "lw $t9,36(%1) \n\t" \
6008 "sw $t9, 0($sp) \n\t" \
6009 "lw $t9,40(%1) \n\t" \
6010 "sw $t9, 4($sp) \n\t" \
6011 "lw $t9, 0(%1) \n\t" \
6012 "lw $a0, 4(%1) \n\t" \
6013 "lw $a1, 8(%1) \n\t" \
6014 "lw $a2,12(%1) \n\t" \
6015 "lw $a3,16(%1) \n\t" \
6016 "lw $a4,20(%1) \n\t" \
6017 "lw $a5,24(%1) \n\t" \
6018 "lw $a6,28(%1) \n\t" \
6019 "lw $a7,32(%1) \n\t" \
6020 VALGRIND_CALL_NOREDIR_T9 \
6021 "move %0, $a0 \n\t" \
6022 "addiu $sp, $sp, 16 \n\t" \
6023 : "=r" (_res) \
6024 : "r" (&_argvec[0]) \
6025 : "memory", __CALLER_SAVED_REGS \
6026 ); \
6027 lval = (__typeof__(lval)) _res; \
6028 } while (0)
6029
6030#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6031 arg6,arg7,arg8,arg9,arg10, \
6032 arg11) \
6033 do { \
6034 volatile OrigFn _orig = (orig); \
6035 volatile unsigned long _argvec[12]; \
6036 volatile unsigned long _res; \
6037 _argvec[0] = (unsigned long)_orig.nraddr; \
6038 _argvec[1] = (unsigned long)(arg1); \
6039 _argvec[2] = (unsigned long)(arg2); \
6040 _argvec[3] = (unsigned long)(arg3); \
6041 _argvec[4] = (unsigned long)(arg4); \
6042 _argvec[5] = (unsigned long)(arg5); \
6043 _argvec[6] = (unsigned long)(arg6); \
6044 _argvec[7] = (unsigned long)(arg7); \
6045 _argvec[8] = (unsigned long)(arg8); \
6046 _argvec[9] = (unsigned long)(arg9); \
6047 _argvec[10] = (unsigned long)(arg10); \
6048 _argvec[11] = (unsigned long)(arg11); \
6049 __asm__ volatile( \
6050 "addiu $sp, $sp, -16 \n\t" \
6051 "lw $t9,36(%1) \n\t" \
6052 "sw $t9, 0($sp) \n\t" \
6053 "lw $t9,40(%1) \n\t" \
6054 "sw $t9, 4($sp) \n\t" \
6055 "lw $t9,44(%1) \n\t" \
6056 "sw $t9, 8($sp) \n\t" \
6057 "lw $t9, 0(%1) \n\t" \
6058 "lw $a0, 4(%1) \n\t" \
6059 "lw $a1, 8(%1) \n\t" \
6060 "lw $a2,12(%1) \n\t" \
6061 "lw $a3,16(%1) \n\t" \
6062 "lw $a4,20(%1) \n\t" \
6063 "lw $a5,24(%1) \n\t" \
6064 "lw $a6,28(%1) \n\t" \
6065 "lw $a7,32(%1) \n\t" \
6066 VALGRIND_CALL_NOREDIR_T9 \
6067 "move %0, $a0 \n\t" \
6068 "addiu $sp, $sp, 16 \n\t" \
6069 : "=r" (_res) \
6070 : "r" (&_argvec[0]) \
6071 : "memory", __CALLER_SAVED_REGS \
6072 ); \
6073 lval = (__typeof__(lval)) _res; \
6074 } while (0)
6075
6076#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6077 arg6,arg7,arg8,arg9,arg10, \
6078 arg11,arg12) \
6079 do { \
6080 volatile OrigFn _orig = (orig); \
6081 volatile unsigned long _argvec[13]; \
6082 volatile unsigned long _res; \
6083 _argvec[0] = (unsigned long)_orig.nraddr; \
6084 _argvec[1] = (unsigned long)(arg1); \
6085 _argvec[2] = (unsigned long)(arg2); \
6086 _argvec[3] = (unsigned long)(arg3); \
6087 _argvec[4] = (unsigned long)(arg4); \
6088 _argvec[5] = (unsigned long)(arg5); \
6089 _argvec[6] = (unsigned long)(arg6); \
6090 _argvec[7] = (unsigned long)(arg7); \
6091 _argvec[8] = (unsigned long)(arg8); \
6092 _argvec[9] = (unsigned long)(arg9); \
6093 _argvec[10] = (unsigned long)(arg10); \
6094 _argvec[11] = (unsigned long)(arg11); \
6095 _argvec[12] = (unsigned long)(arg12); \
6096 __asm__ volatile( \
6097 "addiu $sp, $sp, -16 \n\t" \
6098 "lw $t9,36(%1) \n\t" \
6099 "sw $t9, 0($sp) \n\t" \
6100 "lw $t9,40(%1) \n\t" \
6101 "sw $t9, 4($sp) \n\t" \
6102 "lw $t9,44(%1) \n\t" \
6103 "sw $t9, 8($sp) \n\t" \
6104 "lw $t9,48(%1) \n\t" \
6105 "sw $t9,12($sp) \n\t" \
6106 "lw $t9, 0(%1) \n\t" \
6107 "lw $a0, 4(%1) \n\t" \
6108 "lw $a1, 8(%1) \n\t" \
6109 "lw $a2,12(%1) \n\t" \
6110 "lw $a3,16(%1) \n\t" \
6111 "lw $a4,20(%1) \n\t" \
6112 "lw $a5,24(%1) \n\t" \
6113 "lw $a6,28(%1) \n\t" \
6114 "lw $a7,32(%1) \n\t" \
6115 VALGRIND_CALL_NOREDIR_T9 \
6116 "move %0, $a0 \n\t" \
6117 "addiu $sp, $sp, 16 \n\t" \
6118 : "=r" (_res) \
6119 : "r" (&_argvec[0]) \
6120 : "memory", __CALLER_SAVED_REGS \
6121 ); \
6122 lval = (__typeof__(lval)) _res; \
6123 } while (0)
6124
6125#endif
6126
6127
6128
6129#if defined(PLAT_mips64_linux)
6130
6131
6132#define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
6133"$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
6134"$25", "$31"
6135
6136
6137
6138
6139#define MIPS64_LONG2REG_CAST(x) ((long long)(long)x)
6140
6141#define CALL_FN_W_v(lval, orig) \
6142 do { \
6143 volatile OrigFn _orig = (orig); \
6144 volatile unsigned long long _argvec[1]; \
6145 volatile unsigned long long _res; \
6146 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6147 __asm__ volatile( \
6148 "ld $25, 0(%1)\n\t" \
6149 VALGRIND_CALL_NOREDIR_T9 \
6150 "move %0, $2\n" \
6151 : "=r" (_res) \
6152 : "0" (&_argvec[0]) \
6153 : "memory", __CALLER_SAVED_REGS \
6154 ); \
6155 lval = (__typeof__(lval)) (long)_res; \
6156 } while (0)
6157
6158#define CALL_FN_W_W(lval, orig, arg1) \
6159 do { \
6160 volatile OrigFn _orig = (orig); \
6161 volatile unsigned long long _argvec[2]; \
6162 volatile unsigned long long _res; \
6163 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6164 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6165 __asm__ volatile( \
6166 "ld $4, 8(%1)\n\t" \
6167 "ld $25, 0(%1)\n\t" \
6168 VALGRIND_CALL_NOREDIR_T9 \
6169 "move %0, $2\n" \
6170 : "=r" (_res) \
6171 : "r" (&_argvec[0]) \
6172 : "memory", __CALLER_SAVED_REGS \
6173 ); \
6174 lval = (__typeof__(lval)) (long)_res; \
6175 } while (0)
6176
6177#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
6178 do { \
6179 volatile OrigFn _orig = (orig); \
6180 volatile unsigned long long _argvec[3]; \
6181 volatile unsigned long long _res; \
6182 _argvec[0] = _orig.nraddr; \
6183 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6184 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6185 __asm__ volatile( \
6186 "ld $4, 8(%1)\n\t" \
6187 "ld $5, 16(%1)\n\t" \
6188 "ld $25, 0(%1)\n\t" \
6189 VALGRIND_CALL_NOREDIR_T9 \
6190 "move %0, $2\n" \
6191 : "=r" (_res) \
6192 : "r" (&_argvec[0]) \
6193 : "memory", __CALLER_SAVED_REGS \
6194 ); \
6195 lval = (__typeof__(lval)) (long)_res; \
6196 } while (0)
6197
6198
6199#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
6200 do { \
6201 volatile OrigFn _orig = (orig); \
6202 volatile unsigned long long _argvec[4]; \
6203 volatile unsigned long long _res; \
6204 _argvec[0] = _orig.nraddr; \
6205 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6206 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6207 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6208 __asm__ volatile( \
6209 "ld $4, 8(%1)\n\t" \
6210 "ld $5, 16(%1)\n\t" \
6211 "ld $6, 24(%1)\n\t" \
6212 "ld $25, 0(%1)\n\t" \
6213 VALGRIND_CALL_NOREDIR_T9 \
6214 "move %0, $2\n" \
6215 : "=r" (_res) \
6216 : "r" (&_argvec[0]) \
6217 : "memory", __CALLER_SAVED_REGS \
6218 ); \
6219 lval = (__typeof__(lval)) (long)_res; \
6220 } while (0)
6221
6222#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
6223 do { \
6224 volatile OrigFn _orig = (orig); \
6225 volatile unsigned long long _argvec[5]; \
6226 volatile unsigned long long _res; \
6227 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6228 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6229 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6230 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6231 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6232 __asm__ volatile( \
6233 "ld $4, 8(%1)\n\t" \
6234 "ld $5, 16(%1)\n\t" \
6235 "ld $6, 24(%1)\n\t" \
6236 "ld $7, 32(%1)\n\t" \
6237 "ld $25, 0(%1)\n\t" \
6238 VALGRIND_CALL_NOREDIR_T9 \
6239 "move %0, $2\n" \
6240 : "=r" (_res) \
6241 : "r" (&_argvec[0]) \
6242 : "memory", __CALLER_SAVED_REGS \
6243 ); \
6244 lval = (__typeof__(lval)) (long)_res; \
6245 } while (0)
6246
6247#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
6248 do { \
6249 volatile OrigFn _orig = (orig); \
6250 volatile unsigned long long _argvec[6]; \
6251 volatile unsigned long long _res; \
6252 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6253 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6254 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6255 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6256 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6257 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6258 __asm__ volatile( \
6259 "ld $4, 8(%1)\n\t" \
6260 "ld $5, 16(%1)\n\t" \
6261 "ld $6, 24(%1)\n\t" \
6262 "ld $7, 32(%1)\n\t" \
6263 "ld $8, 40(%1)\n\t" \
6264 "ld $25, 0(%1)\n\t" \
6265 VALGRIND_CALL_NOREDIR_T9 \
6266 "move %0, $2\n" \
6267 : "=r" (_res) \
6268 : "r" (&_argvec[0]) \
6269 : "memory", __CALLER_SAVED_REGS \
6270 ); \
6271 lval = (__typeof__(lval)) (long)_res; \
6272 } while (0)
6273
6274#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
6275 do { \
6276 volatile OrigFn _orig = (orig); \
6277 volatile unsigned long long _argvec[7]; \
6278 volatile unsigned long long _res; \
6279 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6280 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6281 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6282 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6283 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6284 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6285 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6286 __asm__ volatile( \
6287 "ld $4, 8(%1)\n\t" \
6288 "ld $5, 16(%1)\n\t" \
6289 "ld $6, 24(%1)\n\t" \
6290 "ld $7, 32(%1)\n\t" \
6291 "ld $8, 40(%1)\n\t" \
6292 "ld $9, 48(%1)\n\t" \
6293 "ld $25, 0(%1)\n\t" \
6294 VALGRIND_CALL_NOREDIR_T9 \
6295 "move %0, $2\n" \
6296 : "=r" (_res) \
6297 : "r" (&_argvec[0]) \
6298 : "memory", __CALLER_SAVED_REGS \
6299 ); \
6300 lval = (__typeof__(lval)) (long)_res; \
6301 } while (0)
6302
6303#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6304 arg7) \
6305 do { \
6306 volatile OrigFn _orig = (orig); \
6307 volatile unsigned long long _argvec[8]; \
6308 volatile unsigned long long _res; \
6309 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6310 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6311 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6312 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6313 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6314 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6315 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6316 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6317 __asm__ volatile( \
6318 "ld $4, 8(%1)\n\t" \
6319 "ld $5, 16(%1)\n\t" \
6320 "ld $6, 24(%1)\n\t" \
6321 "ld $7, 32(%1)\n\t" \
6322 "ld $8, 40(%1)\n\t" \
6323 "ld $9, 48(%1)\n\t" \
6324 "ld $10, 56(%1)\n\t" \
6325 "ld $25, 0(%1) \n\t" \
6326 VALGRIND_CALL_NOREDIR_T9 \
6327 "move %0, $2\n" \
6328 : "=r" (_res) \
6329 : "r" (&_argvec[0]) \
6330 : "memory", __CALLER_SAVED_REGS \
6331 ); \
6332 lval = (__typeof__(lval)) (long)_res; \
6333 } while (0)
6334
6335#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6336 arg7,arg8) \
6337 do { \
6338 volatile OrigFn _orig = (orig); \
6339 volatile unsigned long long _argvec[9]; \
6340 volatile unsigned long long _res; \
6341 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6342 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6343 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6344 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6345 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6346 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6347 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6348 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6349 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6350 __asm__ volatile( \
6351 "ld $4, 8(%1)\n\t" \
6352 "ld $5, 16(%1)\n\t" \
6353 "ld $6, 24(%1)\n\t" \
6354 "ld $7, 32(%1)\n\t" \
6355 "ld $8, 40(%1)\n\t" \
6356 "ld $9, 48(%1)\n\t" \
6357 "ld $10, 56(%1)\n\t" \
6358 "ld $11, 64(%1)\n\t" \
6359 "ld $25, 0(%1) \n\t" \
6360 VALGRIND_CALL_NOREDIR_T9 \
6361 "move %0, $2\n" \
6362 : "=r" (_res) \
6363 : "r" (&_argvec[0]) \
6364 : "memory", __CALLER_SAVED_REGS \
6365 ); \
6366 lval = (__typeof__(lval)) (long)_res; \
6367 } while (0)
6368
6369#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6370 arg7,arg8,arg9) \
6371 do { \
6372 volatile OrigFn _orig = (orig); \
6373 volatile unsigned long long _argvec[10]; \
6374 volatile unsigned long long _res; \
6375 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6376 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6377 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6378 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6379 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6380 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6381 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6382 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6383 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6384 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6385 __asm__ volatile( \
6386 "dsubu $29, $29, 8\n\t" \
6387 "ld $4, 72(%1)\n\t" \
6388 "sd $4, 0($29)\n\t" \
6389 "ld $4, 8(%1)\n\t" \
6390 "ld $5, 16(%1)\n\t" \
6391 "ld $6, 24(%1)\n\t" \
6392 "ld $7, 32(%1)\n\t" \
6393 "ld $8, 40(%1)\n\t" \
6394 "ld $9, 48(%1)\n\t" \
6395 "ld $10, 56(%1)\n\t" \
6396 "ld $11, 64(%1)\n\t" \
6397 "ld $25, 0(%1)\n\t" \
6398 VALGRIND_CALL_NOREDIR_T9 \
6399 "daddu $29, $29, 8\n\t" \
6400 "move %0, $2\n" \
6401 : "=r" (_res) \
6402 : "r" (&_argvec[0]) \
6403 : "memory", __CALLER_SAVED_REGS \
6404 ); \
6405 lval = (__typeof__(lval)) (long)_res; \
6406 } while (0)
6407
6408#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6409 arg7,arg8,arg9,arg10) \
6410 do { \
6411 volatile OrigFn _orig = (orig); \
6412 volatile unsigned long long _argvec[11]; \
6413 volatile unsigned long long _res; \
6414 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6415 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6416 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6417 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6418 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6419 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6420 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6421 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6422 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6423 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6424 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6425 __asm__ volatile( \
6426 "dsubu $29, $29, 16\n\t" \
6427 "ld $4, 72(%1)\n\t" \
6428 "sd $4, 0($29)\n\t" \
6429 "ld $4, 80(%1)\n\t" \
6430 "sd $4, 8($29)\n\t" \
6431 "ld $4, 8(%1)\n\t" \
6432 "ld $5, 16(%1)\n\t" \
6433 "ld $6, 24(%1)\n\t" \
6434 "ld $7, 32(%1)\n\t" \
6435 "ld $8, 40(%1)\n\t" \
6436 "ld $9, 48(%1)\n\t" \
6437 "ld $10, 56(%1)\n\t" \
6438 "ld $11, 64(%1)\n\t" \
6439 "ld $25, 0(%1)\n\t" \
6440 VALGRIND_CALL_NOREDIR_T9 \
6441 "daddu $29, $29, 16\n\t" \
6442 "move %0, $2\n" \
6443 : "=r" (_res) \
6444 : "r" (&_argvec[0]) \
6445 : "memory", __CALLER_SAVED_REGS \
6446 ); \
6447 lval = (__typeof__(lval)) (long)_res; \
6448 } while (0)
6449
6450#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6451 arg6,arg7,arg8,arg9,arg10, \
6452 arg11) \
6453 do { \
6454 volatile OrigFn _orig = (orig); \
6455 volatile unsigned long long _argvec[12]; \
6456 volatile unsigned long long _res; \
6457 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6458 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6459 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6460 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6461 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6462 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6463 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6464 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6465 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6466 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6467 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6468 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6469 __asm__ volatile( \
6470 "dsubu $29, $29, 24\n\t" \
6471 "ld $4, 72(%1)\n\t" \
6472 "sd $4, 0($29)\n\t" \
6473 "ld $4, 80(%1)\n\t" \
6474 "sd $4, 8($29)\n\t" \
6475 "ld $4, 88(%1)\n\t" \
6476 "sd $4, 16($29)\n\t" \
6477 "ld $4, 8(%1)\n\t" \
6478 "ld $5, 16(%1)\n\t" \
6479 "ld $6, 24(%1)\n\t" \
6480 "ld $7, 32(%1)\n\t" \
6481 "ld $8, 40(%1)\n\t" \
6482 "ld $9, 48(%1)\n\t" \
6483 "ld $10, 56(%1)\n\t" \
6484 "ld $11, 64(%1)\n\t" \
6485 "ld $25, 0(%1)\n\t" \
6486 VALGRIND_CALL_NOREDIR_T9 \
6487 "daddu $29, $29, 24\n\t" \
6488 "move %0, $2\n" \
6489 : "=r" (_res) \
6490 : "r" (&_argvec[0]) \
6491 : "memory", __CALLER_SAVED_REGS \
6492 ); \
6493 lval = (__typeof__(lval)) (long)_res; \
6494 } while (0)
6495
6496#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6497 arg6,arg7,arg8,arg9,arg10, \
6498 arg11,arg12) \
6499 do { \
6500 volatile OrigFn _orig = (orig); \
6501 volatile unsigned long long _argvec[13]; \
6502 volatile unsigned long long _res; \
6503 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6504 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6505 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6506 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6507 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6508 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6509 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6510 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6511 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6512 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6513 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6514 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6515 _argvec[12] = MIPS64_LONG2REG_CAST(arg12); \
6516 __asm__ volatile( \
6517 "dsubu $29, $29, 32\n\t" \
6518 "ld $4, 72(%1)\n\t" \
6519 "sd $4, 0($29)\n\t" \
6520 "ld $4, 80(%1)\n\t" \
6521 "sd $4, 8($29)\n\t" \
6522 "ld $4, 88(%1)\n\t" \
6523 "sd $4, 16($29)\n\t" \
6524 "ld $4, 96(%1)\n\t" \
6525 "sd $4, 24($29)\n\t" \
6526 "ld $4, 8(%1)\n\t" \
6527 "ld $5, 16(%1)\n\t" \
6528 "ld $6, 24(%1)\n\t" \
6529 "ld $7, 32(%1)\n\t" \
6530 "ld $8, 40(%1)\n\t" \
6531 "ld $9, 48(%1)\n\t" \
6532 "ld $10, 56(%1)\n\t" \
6533 "ld $11, 64(%1)\n\t" \
6534 "ld $25, 0(%1)\n\t" \
6535 VALGRIND_CALL_NOREDIR_T9 \
6536 "daddu $29, $29, 32\n\t" \
6537 "move %0, $2\n" \
6538 : "=r" (_res) \
6539 : "r" (&_argvec[0]) \
6540 : "memory", __CALLER_SAVED_REGS \
6541 ); \
6542 lval = (__typeof__(lval)) (long)_res; \
6543 } while (0)
6544
6545#endif
6546
6547
6548
6549
6550
6551
6552
6553
6554
6555
6556
6557
6558
6559
6560
6561
6562#define VG_USERREQ_TOOL_BASE(a,b) \
6563 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
6564#define VG_IS_TOOL_USERREQ(a, b, v) \
6565 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
6566
6567
6568
6569
6570
6571
6572typedef
6573 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
6574 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
6575
6576
6577
6578
6579
6580
6581 VG_USERREQ__CLIENT_CALL0 = 0x1101,
6582 VG_USERREQ__CLIENT_CALL1 = 0x1102,
6583 VG_USERREQ__CLIENT_CALL2 = 0x1103,
6584 VG_USERREQ__CLIENT_CALL3 = 0x1104,
6585
6586
6587
6588
6589 VG_USERREQ__COUNT_ERRORS = 0x1201,
6590
6591
6592
6593 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
6594
6595
6596
6597 VG_USERREQ__CLO_CHANGE = 0x1203,
6598
6599
6600
6601 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
6602 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
6603 VG_USERREQ__FREELIKE_BLOCK = 0x1302,
6604
6605 VG_USERREQ__CREATE_MEMPOOL = 0x1303,
6606 VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
6607 VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
6608 VG_USERREQ__MEMPOOL_FREE = 0x1306,
6609 VG_USERREQ__MEMPOOL_TRIM = 0x1307,
6610 VG_USERREQ__MOVE_MEMPOOL = 0x1308,
6611 VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
6612 VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
6613
6614
6615
6616
6617
6618
6619
6620
6621 VG_USERREQ__PRINTF = 0x1401,
6622 VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
6623
6624 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
6625 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
6626
6627
6628 VG_USERREQ__STACK_REGISTER = 0x1501,
6629 VG_USERREQ__STACK_DEREGISTER = 0x1502,
6630 VG_USERREQ__STACK_CHANGE = 0x1503,
6631
6632
6633 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
6634
6635
6636 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701,
6637
6638
6639
6640
6641
6642
6643 VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801,
6644
6645
6646
6647
6648 VG_USERREQ__VEX_INIT_FOR_IRI = 0x1901,
6649
6650
6651 VG_USERREQ__INNER_THREADS = 0x1902
6652 } Vg_ClientRequest;
6653
6654#if !defined(__GNUC__)
6655# define __extension__
6656#endif
6657
6658
6659
6660
6661
6662
6663#define RUNNING_ON_VALGRIND \
6664 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 , \
6665 VG_USERREQ__RUNNING_ON_VALGRIND, \
6666 0, 0, 0, 0, 0) \
6667
6668
6669
6670
6671
6672
6673#define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
6674 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
6675 _qzz_addr, _qzz_len, 0, 0, 0)
6676
6677#define VALGRIND_INNER_THREADS(_qzz_addr) \
6678 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__INNER_THREADS, \
6679 _qzz_addr, 0, 0, 0, 0)
6680
6681
6682
6683
6684
6685
6686
6687#if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6688
6689
6690static int VALGRIND_PRINTF(const char *format, ...)
6691 __attribute__((format(__printf__, 1, 2), __unused__));
6692#endif
6693static int
6694#if defined(_MSC_VER)
6695__inline
6696#endif
6697VALGRIND_PRINTF(const char *format, ...)
6698{
6699#if !IS_ENABLED(CONFIG_VALGRIND)
6700 (void)format;
6701 return 0;
6702#else
6703#if defined(_MSC_VER) || defined(__MINGW64__)
6704 uintptr_t _qzz_res;
6705#else
6706 unsigned long _qzz_res;
6707#endif
6708 va_list vargs;
6709 va_start(vargs, format);
6710#if defined(_MSC_VER) || defined(__MINGW64__)
6711 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6712 VG_USERREQ__PRINTF_VALIST_BY_REF,
6713 (uintptr_t)format,
6714 (uintptr_t)&vargs,
6715 0, 0, 0);
6716#else
6717 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6718 VG_USERREQ__PRINTF_VALIST_BY_REF,
6719 (unsigned long)format,
6720 (unsigned long)&vargs,
6721 0, 0, 0);
6722#endif
6723 va_end(vargs);
6724 return (int)_qzz_res;
6725#endif
6726}
6727
6728#if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6729static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6730 __attribute__((format(__printf__, 1, 2), __unused__));
6731#endif
6732static int
6733#if defined(_MSC_VER)
6734__inline
6735#endif
6736VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6737{
6738#if !IS_ENABLED(CONFIG_VALGRIND)
6739 (void)format;
6740 return 0;
6741#else
6742#if defined(_MSC_VER) || defined(__MINGW64__)
6743 uintptr_t _qzz_res;
6744#else
6745 unsigned long _qzz_res;
6746#endif
6747 va_list vargs;
6748 va_start(vargs, format);
6749#if defined(_MSC_VER) || defined(__MINGW64__)
6750 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6751 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6752 (uintptr_t)format,
6753 (uintptr_t)&vargs,
6754 0, 0, 0);
6755#else
6756 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6757 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6758 (unsigned long)format,
6759 (unsigned long)&vargs,
6760 0, 0, 0);
6761#endif
6762 va_end(vargs);
6763 return (int)_qzz_res;
6764#endif
6765}
6766
6767
6768
6769
6770
6771
6772
6773
6774
6775
6776
6777
6778
6779
6780
6781
6782
6783
6784
6785
6786
6787
6788
6789
6790
6791#define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
6792 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 , \
6793 VG_USERREQ__CLIENT_CALL0, \
6794 _qyy_fn, \
6795 0, 0, 0, 0)
6796
6797#define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
6798 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 , \
6799 VG_USERREQ__CLIENT_CALL1, \
6800 _qyy_fn, \
6801 _qyy_arg1, 0, 0, 0)
6802
6803#define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
6804 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 , \
6805 VG_USERREQ__CLIENT_CALL2, \
6806 _qyy_fn, \
6807 _qyy_arg1, _qyy_arg2, 0, 0)
6808
6809#define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
6810 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 , \
6811 VG_USERREQ__CLIENT_CALL3, \
6812 _qyy_fn, \
6813 _qyy_arg1, _qyy_arg2, \
6814 _qyy_arg3, 0)
6815
6816
6817
6818
6819
6820#define VALGRIND_COUNT_ERRORS \
6821 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
6822 0 , \
6823 VG_USERREQ__COUNT_ERRORS, \
6824 0, 0, 0, 0, 0)
6825
6826
6827
6828
6829
6830
6831
6832
6833
6834
6835
6836
6837
6838
6839
6840
6841
6842
6843
6844
6845
6846
6847
6848
6849
6850
6851
6852
6853
6854
6855
6856
6857
6858
6859
6860
6861
6862
6863
6864
6865
6866
6867
6868
6869
6870
6871
6872
6873
6874
6875
6876
6877
6878
6879
6880
6881
6882
6883
6884
6885
6886
6887
6888
6889
6890
6891
6892
6893
6894
6895
6896
6897
6898
6899
6900
6901
6902
6903
6904
6905
6906
6907
6908
6909
6910
6911
6912
6913
6914
6915
6916
6917
6918
6919
6920
6921
6922
6923
6924
6925
6926
6927#define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
6928 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
6929 addr, sizeB, rzB, is_zeroed, 0)
6930
6931
6932
6933
6934#define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
6935 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
6936 addr, oldSizeB, newSizeB, rzB, 0)
6937
6938
6939
6940
6941#define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
6942 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
6943 addr, rzB, 0, 0, 0)
6944
6945
6946#define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
6947 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
6948 pool, rzB, is_zeroed, 0, 0)
6949
6950
6951
6952
6953
6954
6955
6956
6957
6958
6959
6960
6961
6962
6963
6964
6965
6966
6967
6968
6969
6970
6971
6972
6973
6974
6975
6976#define VALGRIND_MEMPOOL_AUTO_FREE 1
6977#define VALGRIND_MEMPOOL_METAPOOL 2
6978#define VALGRIND_CREATE_MEMPOOL_EXT(pool, rzB, is_zeroed, flags) \
6979 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
6980 pool, rzB, is_zeroed, flags, 0)
6981
6982
6983#define VALGRIND_DESTROY_MEMPOOL(pool) \
6984 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
6985 pool, 0, 0, 0, 0)
6986
6987
6988#define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
6989 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
6990 pool, addr, size, 0, 0)
6991
6992
6993#define VALGRIND_MEMPOOL_FREE(pool, addr) \
6994 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
6995 pool, addr, 0, 0, 0)
6996
6997
6998#define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
6999 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
7000 pool, addr, size, 0, 0)
7001
7002
7003#define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
7004 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
7005 poolA, poolB, 0, 0, 0)
7006
7007
7008#define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
7009 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
7010 pool, addrA, addrB, size, 0)
7011
7012
7013#define VALGRIND_MEMPOOL_EXISTS(pool) \
7014 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7015 VG_USERREQ__MEMPOOL_EXISTS, \
7016 pool, 0, 0, 0, 0)
7017
7018
7019
7020
7021#define VALGRIND_STACK_REGISTER(start, end) \
7022 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7023 VG_USERREQ__STACK_REGISTER, \
7024 start, end, 0, 0, 0)
7025
7026
7027
7028#define VALGRIND_STACK_DEREGISTER(id) \
7029 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
7030 id, 0, 0, 0, 0)
7031
7032
7033
7034
7035#define VALGRIND_STACK_CHANGE(id, start, end) \
7036 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
7037 id, start, end, 0, 0)
7038
7039
7040#define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
7041 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
7042 fd, ptr, total_size, delta, 0)
7043
7044
7045
7046
7047
7048#define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
7049 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7050 VG_USERREQ__MAP_IP_TO_SRCLOC, \
7051 addr, buf64, 0, 0, 0)
7052
7053
7054
7055
7056
7057
7058
7059
7060
7061#define VALGRIND_DISABLE_ERROR_REPORTING \
7062 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7063 1, 0, 0, 0, 0)
7064
7065
7066
7067#define VALGRIND_ENABLE_ERROR_REPORTING \
7068 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7069 -1, 0, 0, 0, 0)
7070
7071
7072
7073
7074
7075
7076#define VALGRIND_MONITOR_COMMAND(command) \
7077 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
7078 command, 0, 0, 0, 0)
7079
7080
7081
7082
7083
7084#define VALGRIND_CLO_CHANGE(option) \
7085 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CLO_CHANGE, \
7086 option, 0, 0, 0, 0)
7087
7088
7089#undef PLAT_x86_darwin
7090#undef PLAT_amd64_darwin
7091#undef PLAT_x86_win32
7092#undef PLAT_amd64_win64
7093#undef PLAT_x86_linux
7094#undef PLAT_amd64_linux
7095#undef PLAT_ppc32_linux
7096#undef PLAT_ppc64be_linux
7097#undef PLAT_ppc64le_linux
7098#undef PLAT_arm_linux
7099#undef PLAT_s390x_linux
7100#undef PLAT_mips32_linux
7101#undef PLAT_mips64_linux
7102#undef PLAT_nanomips_linux
7103#undef PLAT_x86_solaris
7104#undef PLAT_amd64_solaris
7105
7106#endif
7107