1 /* SPDX-License-Identifier: GPL-2.0+ AND bzip2-1.0.6 */
3 This file is part of Valgrind, a dynamic binary instrumentation
6 Copyright (C) 2000-2017 Julian Seward. All rights reserved.
7 Copyright (C) 2021 Sean Anderson <seanga2@gmail.com>
10 /* This file is for inclusion into client (your!) code.
12 You can use these macros to manipulate and query Valgrind's
13 execution inside your own programs.
15 The resulting executables will still run without Valgrind, just a
16 little bit more slowly than they otherwise would, but otherwise
17 unchanged. When not running on valgrind, each client request
18 consumes very few (eg. 7) instructions, so the resulting performance
19 loss is negligible unless you plan to execute client requests
20 millions of times per second. Nevertheless, if that is still a
21 problem, you can compile with the NVALGRIND symbol defined (gcc
22 -DNVALGRIND) so that client requests are not even compiled in. */
28 /* ------------------------------------------------------------------ */
29 /* VERSION NUMBER OF VALGRIND */
30 /* ------------------------------------------------------------------ */
32 /* Specify Valgrind's version number, so that user code can
33 conditionally compile based on our version number. Note that these
34 were introduced at version 3.6 and so do not exist in version 3.5
35 or earlier. The recommended way to use them to check for "version
38 #if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
39 && (__VALGRIND_MAJOR__ > 3 \
40 || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
42 #define __VALGRIND_MAJOR__ 3
43 #define __VALGRIND_MINOR__ 16
48 /* Nb: this file might be included in a file compiled with -ansi. So
49 we can't use C++ style "//" comments nor the "asm" keyword (instead
52 /* Derive some tags indicating what the target platform is. Note
53 that in this file we're using the compiler's CPP symbols for
54 identifying architectures, which are different to the ones we use
55 within the rest of Valgrind. Note, __powerpc__ is active for both
56 32 and 64-bit PPC, whereas __powerpc64__ is only active for the
57 latter (on Linux, that is).
59 Misc note: how to find out what's predefined in gcc by default:
60 gcc -Wp,-dM somefile.c
62 #undef PLAT_x86_darwin
63 #undef PLAT_amd64_darwin
65 #undef PLAT_amd64_win64
67 #undef PLAT_amd64_linux
68 #undef PLAT_ppc32_linux
69 #undef PLAT_ppc64be_linux
70 #undef PLAT_ppc64le_linux
72 #undef PLAT_arm64_linux
73 #undef PLAT_s390x_linux
74 #undef PLAT_mips32_linux
75 #undef PLAT_mips64_linux
76 #undef PLAT_nanomips_linux
77 #undef PLAT_x86_solaris
78 #undef PLAT_amd64_solaris
81 #if defined(__APPLE__) && defined(__i386__)
82 # define PLAT_x86_darwin 1
83 #elif defined(__APPLE__) && defined(__x86_64__)
84 # define PLAT_amd64_darwin 1
85 #elif (defined(__MINGW32__) && defined(__i386__)) \
86 || defined(__CYGWIN32__) \
87 || (defined(_WIN32) && defined(_M_IX86))
88 # define PLAT_x86_win32 1
89 #elif (defined(__MINGW32__) && defined(__x86_64__)) \
90 || (defined(_WIN32) && defined(_M_X64))
91 /* __MINGW32__ and _WIN32 are defined in 64 bit mode as well. */
92 # define PLAT_amd64_win64 1
93 #elif defined(__linux__) && defined(__i386__)
94 # define PLAT_x86_linux 1
95 #elif defined(__linux__) && defined(__x86_64__) && !defined(__ILP32__)
96 # define PLAT_amd64_linux 1
97 #elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
98 # define PLAT_ppc32_linux 1
99 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF != 2
100 /* Big Endian uses ELF version 1 */
101 # define PLAT_ppc64be_linux 1
102 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF == 2
103 /* Little Endian uses ELF version 2 */
104 # define PLAT_ppc64le_linux 1
105 #elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
106 # define PLAT_arm_linux 1
107 #elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
108 # define PLAT_arm64_linux 1
109 #elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
110 # define PLAT_s390x_linux 1
111 #elif defined(__linux__) && defined(__mips__) && (__mips==64)
112 # define PLAT_mips64_linux 1
113 #elif defined(__linux__) && defined(__mips__) && (__mips==32)
114 # define PLAT_mips32_linux 1
115 #elif defined(__linux__) && defined(__nanomips__)
116 # define PLAT_nanomips_linux 1
117 #elif defined(__sun) && defined(__i386__)
118 # define PLAT_x86_solaris 1
119 #elif defined(__sun) && defined(__x86_64__)
120 # define PLAT_amd64_solaris 1
122 /* If we're not compiling for our target platform, don't generate
124 # if IS_ENABLED(CONFIG_VALGRIND)
125 # error "Unsupported platform for valgrind"
130 /* ------------------------------------------------------------------ */
131 /* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
132 /* in here of use to end-users -- skip to the next section. */
133 /* ------------------------------------------------------------------ */
136 * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
137 * request. Accepts both pointers and integers as arguments.
139 * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
140 * client request that does not return a value.
142 * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
143 * client request and whose value equals the client request result. Accepts
144 * both pointers and integers as arguments. Note that such calls are not
145 * necessarily pure functions -- they may have side effects.
148 #define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
149 _zzq_request, _zzq_arg1, _zzq_arg2, \
150 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
151 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
152 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
153 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
155 #define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
156 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
157 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
158 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
159 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
161 #if !IS_ENABLED(CONFIG_VALGRIND)
163 /* Define NVALGRIND to completely remove the Valgrind magic sequence
164 from the compiled code (analogous to NDEBUG's effects on
166 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
167 _zzq_default, _zzq_request, \
168 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
171 #else /* ! CONFIG_VALGRIND */
173 /* The following defines the magic code sequences which the JITter
174 spots and handles magically. Don't look too closely at them as
175 they will rot your brain.
177 The assembly code sequences for all architectures is in this one
178 file. This is because this file must be stand-alone, and we don't
179 want to have multiple files.
181 For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
182 value gets put in the return slot, so that everything works when
183 this is executed not under Valgrind. Args are passed in a memory
184 block, and so there's no intrinsic limit to the number that could
185 be passed, but it's currently five.
188 _zzq_rlval result lvalue
189 _zzq_default default value (result returned when running on real CPU)
190 _zzq_request request code
191 _zzq_arg1..5 request params
193 The other two macros are used to support function wrapping, and are
194 a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
195 guest's NRADDR pseudo-register and whatever other information is
196 needed to safely run the call original from the wrapper: on
197 ppc64-linux, the R2 value at the divert point is also needed. This
198 information is abstracted into a user-visible type, OrigFn.
200 VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
201 guest, but guarantees that the branch instruction will not be
202 redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
203 branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
204 complete inline asm, since it needs to be combined with more magic
205 inline asm stuff to be useful.
208 /* ----------------- x86-{linux,darwin,solaris} ---------------- */
210 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
211 || (defined(PLAT_x86_win32) && defined(__GNUC__)) \
212 || defined(PLAT_x86_solaris)
216 unsigned int nraddr; /* where's the code? */
220 #define __SPECIAL_INSTRUCTION_PREAMBLE \
221 "roll $3, %%edi ; roll $13, %%edi\n\t" \
222 "roll $29, %%edi ; roll $19, %%edi\n\t"
224 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
225 _zzq_default, _zzq_request, \
226 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
228 ({volatile unsigned int _zzq_args[6]; \
229 volatile unsigned int _zzq_result; \
230 _zzq_args[0] = (unsigned int)(_zzq_request); \
231 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
232 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
233 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
234 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
235 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
236 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
237 /* %EDX = client_request ( %EAX ) */ \
238 "xchgl %%ebx,%%ebx" \
239 : "=d" (_zzq_result) \
240 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
246 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
247 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
248 volatile unsigned int __addr; \
249 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
250 /* %EAX = guest_NRADDR */ \
251 "xchgl %%ecx,%%ecx" \
256 _zzq_orig->nraddr = __addr; \
259 #define VALGRIND_CALL_NOREDIR_EAX \
260 __SPECIAL_INSTRUCTION_PREAMBLE \
261 /* call-noredir *%EAX */ \
262 "xchgl %%edx,%%edx\n\t"
264 #define VALGRIND_VEX_INJECT_IR() \
266 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
267 "xchgl %%edi,%%edi\n\t" \
268 : : : "cc", "memory" \
272 #endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__)
273 || PLAT_x86_solaris */
275 /* ------------------------- x86-Win32 ------------------------- */
277 #if defined(PLAT_x86_win32) && !defined(__GNUC__)
281 unsigned int nraddr; /* where's the code? */
285 #if defined(_MSC_VER)
287 #define __SPECIAL_INSTRUCTION_PREAMBLE \
288 __asm rol edi, 3 __asm rol edi, 13 \
289 __asm rol edi, 29 __asm rol edi, 19
291 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
292 _zzq_default, _zzq_request, \
293 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
294 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
295 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
296 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
297 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
299 static __inline uintptr_t
300 valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
301 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
302 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
305 volatile uintptr_t _zzq_args[6];
306 volatile unsigned int _zzq_result;
307 _zzq_args[0] = (uintptr_t)(_zzq_request);
308 _zzq_args[1] = (uintptr_t)(_zzq_arg1);
309 _zzq_args[2] = (uintptr_t)(_zzq_arg2);
310 _zzq_args[3] = (uintptr_t)(_zzq_arg3);
311 _zzq_args[4] = (uintptr_t)(_zzq_arg4);
312 _zzq_args[5] = (uintptr_t)(_zzq_arg5);
313 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
314 __SPECIAL_INSTRUCTION_PREAMBLE
315 /* %EDX = client_request ( %EAX ) */
317 __asm mov _zzq_result, edx
322 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
323 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
324 volatile unsigned int __addr; \
325 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
326 /* %EAX = guest_NRADDR */ \
328 __asm mov __addr, eax \
330 _zzq_orig->nraddr = __addr; \
333 #define VALGRIND_CALL_NOREDIR_EAX ERROR
335 #define VALGRIND_VEX_INJECT_IR() \
337 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
343 #error Unsupported compiler.
346 #endif /* PLAT_x86_win32 */
348 /* ----------------- amd64-{linux,darwin,solaris} --------------- */
350 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
351 || defined(PLAT_amd64_solaris) \
352 || (defined(PLAT_amd64_win64) && defined(__GNUC__))
356 unsigned long int nraddr; /* where's the code? */
360 #define __SPECIAL_INSTRUCTION_PREAMBLE \
361 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
362 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
364 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
365 _zzq_default, _zzq_request, \
366 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
368 ({ volatile unsigned long int _zzq_args[6]; \
369 volatile unsigned long int _zzq_result; \
370 _zzq_args[0] = (unsigned long int)(_zzq_request); \
371 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
372 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
373 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
374 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
375 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
376 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
377 /* %RDX = client_request ( %RAX ) */ \
378 "xchgq %%rbx,%%rbx" \
379 : "=d" (_zzq_result) \
380 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
386 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
387 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
388 volatile unsigned long int __addr; \
389 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
390 /* %RAX = guest_NRADDR */ \
391 "xchgq %%rcx,%%rcx" \
396 _zzq_orig->nraddr = __addr; \
399 #define VALGRIND_CALL_NOREDIR_RAX \
400 __SPECIAL_INSTRUCTION_PREAMBLE \
401 /* call-noredir *%RAX */ \
402 "xchgq %%rdx,%%rdx\n\t"
404 #define VALGRIND_VEX_INJECT_IR() \
406 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
407 "xchgq %%rdi,%%rdi\n\t" \
408 : : : "cc", "memory" \
412 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
414 /* ------------------------- amd64-Win64 ------------------------- */
416 #if defined(PLAT_amd64_win64) && !defined(__GNUC__)
418 #error Unsupported compiler.
420 #endif /* PLAT_amd64_win64 */
422 /* ------------------------ ppc32-linux ------------------------ */
424 #if defined(PLAT_ppc32_linux)
428 unsigned int nraddr; /* where's the code? */
432 #define __SPECIAL_INSTRUCTION_PREAMBLE \
433 "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
434 "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
436 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
437 _zzq_default, _zzq_request, \
438 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
441 ({ unsigned int _zzq_args[6]; \
442 unsigned int _zzq_result; \
443 unsigned int* _zzq_ptr; \
444 _zzq_args[0] = (unsigned int)(_zzq_request); \
445 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
446 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
447 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
448 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
449 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
450 _zzq_ptr = _zzq_args; \
451 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
452 "mr 4,%2\n\t" /*ptr*/ \
453 __SPECIAL_INSTRUCTION_PREAMBLE \
454 /* %R3 = client_request ( %R4 ) */ \
456 "mr %0,3" /*result*/ \
457 : "=b" (_zzq_result) \
458 : "b" (_zzq_default), "b" (_zzq_ptr) \
459 : "cc", "memory", "r3", "r4"); \
463 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
464 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
465 unsigned int __addr; \
466 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
467 /* %R3 = guest_NRADDR */ \
472 : "cc", "memory", "r3" \
474 _zzq_orig->nraddr = __addr; \
477 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
478 __SPECIAL_INSTRUCTION_PREAMBLE \
479 /* branch-and-link-to-noredir *%R11 */ \
482 #define VALGRIND_VEX_INJECT_IR() \
484 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
489 #endif /* PLAT_ppc32_linux */
491 /* ------------------------ ppc64-linux ------------------------ */
493 #if defined(PLAT_ppc64be_linux)
497 unsigned long int nraddr; /* where's the code? */
498 unsigned long int r2; /* what tocptr do we need? */
502 #define __SPECIAL_INSTRUCTION_PREAMBLE \
503 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
504 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
506 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
507 _zzq_default, _zzq_request, \
508 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
511 ({ unsigned long int _zzq_args[6]; \
512 unsigned long int _zzq_result; \
513 unsigned long int* _zzq_ptr; \
514 _zzq_args[0] = (unsigned long int)(_zzq_request); \
515 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
516 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
517 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
518 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
519 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
520 _zzq_ptr = _zzq_args; \
521 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
522 "mr 4,%2\n\t" /*ptr*/ \
523 __SPECIAL_INSTRUCTION_PREAMBLE \
524 /* %R3 = client_request ( %R4 ) */ \
526 "mr %0,3" /*result*/ \
527 : "=b" (_zzq_result) \
528 : "b" (_zzq_default), "b" (_zzq_ptr) \
529 : "cc", "memory", "r3", "r4"); \
533 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
534 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
535 unsigned long int __addr; \
536 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
537 /* %R3 = guest_NRADDR */ \
542 : "cc", "memory", "r3" \
544 _zzq_orig->nraddr = __addr; \
545 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
546 /* %R3 = guest_NRADDR_GPR2 */ \
551 : "cc", "memory", "r3" \
553 _zzq_orig->r2 = __addr; \
556 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
557 __SPECIAL_INSTRUCTION_PREAMBLE \
558 /* branch-and-link-to-noredir *%R11 */ \
561 #define VALGRIND_VEX_INJECT_IR() \
563 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
568 #endif /* PLAT_ppc64be_linux */
570 #if defined(PLAT_ppc64le_linux)
574 unsigned long int nraddr; /* where's the code? */
575 unsigned long int r2; /* what tocptr do we need? */
579 #define __SPECIAL_INSTRUCTION_PREAMBLE \
580 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
581 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
583 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
584 _zzq_default, _zzq_request, \
585 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
588 ({ unsigned long int _zzq_args[6]; \
589 unsigned long int _zzq_result; \
590 unsigned long int* _zzq_ptr; \
591 _zzq_args[0] = (unsigned long int)(_zzq_request); \
592 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
593 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
594 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
595 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
596 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
597 _zzq_ptr = _zzq_args; \
598 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
599 "mr 4,%2\n\t" /*ptr*/ \
600 __SPECIAL_INSTRUCTION_PREAMBLE \
601 /* %R3 = client_request ( %R4 ) */ \
603 "mr %0,3" /*result*/ \
604 : "=b" (_zzq_result) \
605 : "b" (_zzq_default), "b" (_zzq_ptr) \
606 : "cc", "memory", "r3", "r4"); \
610 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
611 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
612 unsigned long int __addr; \
613 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
614 /* %R3 = guest_NRADDR */ \
619 : "cc", "memory", "r3" \
621 _zzq_orig->nraddr = __addr; \
622 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
623 /* %R3 = guest_NRADDR_GPR2 */ \
628 : "cc", "memory", "r3" \
630 _zzq_orig->r2 = __addr; \
633 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
634 __SPECIAL_INSTRUCTION_PREAMBLE \
635 /* branch-and-link-to-noredir *%R12 */ \
638 #define VALGRIND_VEX_INJECT_IR() \
640 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
645 #endif /* PLAT_ppc64le_linux */
647 /* ------------------------- arm-linux ------------------------- */
649 #if defined(PLAT_arm_linux)
653 unsigned int nraddr; /* where's the code? */
657 #define __SPECIAL_INSTRUCTION_PREAMBLE \
658 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
659 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
661 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
662 _zzq_default, _zzq_request, \
663 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
666 ({volatile unsigned int _zzq_args[6]; \
667 volatile unsigned int _zzq_result; \
668 _zzq_args[0] = (unsigned int)(_zzq_request); \
669 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
670 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
671 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
672 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
673 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
674 __asm__ volatile("mov r3, %1\n\t" /*default*/ \
675 "mov r4, %2\n\t" /*ptr*/ \
676 __SPECIAL_INSTRUCTION_PREAMBLE \
677 /* R3 = client_request ( R4 ) */ \
678 "orr r10, r10, r10\n\t" \
679 "mov %0, r3" /*result*/ \
680 : "=r" (_zzq_result) \
681 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
682 : "cc","memory", "r3", "r4"); \
686 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
687 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
688 unsigned int __addr; \
689 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
690 /* R3 = guest_NRADDR */ \
691 "orr r11, r11, r11\n\t" \
695 : "cc", "memory", "r3" \
697 _zzq_orig->nraddr = __addr; \
700 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
701 __SPECIAL_INSTRUCTION_PREAMBLE \
702 /* branch-and-link-to-noredir *%R4 */ \
703 "orr r12, r12, r12\n\t"
705 #define VALGRIND_VEX_INJECT_IR() \
707 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
708 "orr r9, r9, r9\n\t" \
709 : : : "cc", "memory" \
713 #endif /* PLAT_arm_linux */
715 /* ------------------------ arm64-linux ------------------------- */
717 #if defined(PLAT_arm64_linux)
721 unsigned long int nraddr; /* where's the code? */
725 #define __SPECIAL_INSTRUCTION_PREAMBLE \
726 "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
727 "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
729 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
730 _zzq_default, _zzq_request, \
731 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
734 ({volatile unsigned long int _zzq_args[6]; \
735 volatile unsigned long int _zzq_result; \
736 _zzq_args[0] = (unsigned long int)(_zzq_request); \
737 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
738 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
739 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
740 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
741 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
742 __asm__ volatile("mov x3, %1\n\t" /*default*/ \
743 "mov x4, %2\n\t" /*ptr*/ \
744 __SPECIAL_INSTRUCTION_PREAMBLE \
745 /* X3 = client_request ( X4 ) */ \
746 "orr x10, x10, x10\n\t" \
747 "mov %0, x3" /*result*/ \
748 : "=r" (_zzq_result) \
749 : "r" ((unsigned long int)(_zzq_default)), \
750 "r" (&_zzq_args[0]) \
751 : "cc","memory", "x3", "x4"); \
755 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
756 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
757 unsigned long int __addr; \
758 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
759 /* X3 = guest_NRADDR */ \
760 "orr x11, x11, x11\n\t" \
764 : "cc", "memory", "x3" \
766 _zzq_orig->nraddr = __addr; \
769 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
770 __SPECIAL_INSTRUCTION_PREAMBLE \
771 /* branch-and-link-to-noredir X8 */ \
772 "orr x12, x12, x12\n\t"
774 #define VALGRIND_VEX_INJECT_IR() \
776 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
777 "orr x9, x9, x9\n\t" \
778 : : : "cc", "memory" \
782 #endif /* PLAT_arm64_linux */
784 /* ------------------------ s390x-linux ------------------------ */
786 #if defined(PLAT_s390x_linux)
790 unsigned long int nraddr; /* where's the code? */
794 /* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
795 * code. This detection is implemented in platform specific toIR.c
796 * (e.g. VEX/priv/guest_s390_decoder.c).
798 #define __SPECIAL_INSTRUCTION_PREAMBLE \
804 #define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
805 #define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
806 #define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
807 #define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
809 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
810 _zzq_default, _zzq_request, \
811 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
813 ({volatile unsigned long int _zzq_args[6]; \
814 volatile unsigned long int _zzq_result; \
815 _zzq_args[0] = (unsigned long int)(_zzq_request); \
816 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
817 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
818 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
819 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
820 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
821 __asm__ volatile(/* r2 = args */ \
825 __SPECIAL_INSTRUCTION_PREAMBLE \
826 __CLIENT_REQUEST_CODE \
829 : "=d" (_zzq_result) \
830 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
831 : "cc", "2", "3", "memory" \
836 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
837 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
838 volatile unsigned long int __addr; \
839 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
840 __GET_NR_CONTEXT_CODE \
844 : "cc", "3", "memory" \
846 _zzq_orig->nraddr = __addr; \
849 #define VALGRIND_CALL_NOREDIR_R1 \
850 __SPECIAL_INSTRUCTION_PREAMBLE \
853 #define VALGRIND_VEX_INJECT_IR() \
855 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
856 __VEX_INJECT_IR_CODE); \
859 #endif /* PLAT_s390x_linux */
861 /* ------------------------- mips32-linux ---------------- */
863 #if defined(PLAT_mips32_linux)
867 unsigned int nraddr; /* where's the code? */
875 #define __SPECIAL_INSTRUCTION_PREAMBLE \
876 "srl $0, $0, 13\n\t" \
877 "srl $0, $0, 29\n\t" \
878 "srl $0, $0, 3\n\t" \
881 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
882 _zzq_default, _zzq_request, \
883 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
885 ({ volatile unsigned int _zzq_args[6]; \
886 volatile unsigned int _zzq_result; \
887 _zzq_args[0] = (unsigned int)(_zzq_request); \
888 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
889 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
890 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
891 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
892 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
893 __asm__ volatile("move $11, %1\n\t" /*default*/ \
894 "move $12, %2\n\t" /*ptr*/ \
895 __SPECIAL_INSTRUCTION_PREAMBLE \
896 /* T3 = client_request ( T4 ) */ \
897 "or $13, $13, $13\n\t" \
898 "move %0, $11\n\t" /*result*/ \
899 : "=r" (_zzq_result) \
900 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
901 : "$11", "$12", "memory"); \
905 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
906 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
907 volatile unsigned int __addr; \
908 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
909 /* %t9 = guest_NRADDR */ \
910 "or $14, $14, $14\n\t" \
911 "move %0, $11" /*result*/ \
916 _zzq_orig->nraddr = __addr; \
919 #define VALGRIND_CALL_NOREDIR_T9 \
920 __SPECIAL_INSTRUCTION_PREAMBLE \
921 /* call-noredir *%t9 */ \
922 "or $15, $15, $15\n\t"
924 #define VALGRIND_VEX_INJECT_IR() \
926 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
927 "or $11, $11, $11\n\t" \
932 #endif /* PLAT_mips32_linux */
934 /* ------------------------- mips64-linux ---------------- */
936 #if defined(PLAT_mips64_linux)
940 unsigned long nraddr; /* where's the code? */
948 #define __SPECIAL_INSTRUCTION_PREAMBLE \
949 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
950 "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
952 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
953 _zzq_default, _zzq_request, \
954 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
956 ({ volatile unsigned long int _zzq_args[6]; \
957 volatile unsigned long int _zzq_result; \
958 _zzq_args[0] = (unsigned long int)(_zzq_request); \
959 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
960 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
961 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
962 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
963 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
964 __asm__ volatile("move $11, %1\n\t" /*default*/ \
965 "move $12, %2\n\t" /*ptr*/ \
966 __SPECIAL_INSTRUCTION_PREAMBLE \
967 /* $11 = client_request ( $12 ) */ \
968 "or $13, $13, $13\n\t" \
969 "move %0, $11\n\t" /*result*/ \
970 : "=r" (_zzq_result) \
971 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
972 : "$11", "$12", "memory"); \
976 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
977 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
978 volatile unsigned long int __addr; \
979 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
980 /* $11 = guest_NRADDR */ \
981 "or $14, $14, $14\n\t" \
982 "move %0, $11" /*result*/ \
986 _zzq_orig->nraddr = __addr; \
989 #define VALGRIND_CALL_NOREDIR_T9 \
990 __SPECIAL_INSTRUCTION_PREAMBLE \
991 /* call-noredir $25 */ \
992 "or $15, $15, $15\n\t"
994 #define VALGRIND_VEX_INJECT_IR() \
996 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
997 "or $11, $11, $11\n\t" \
1001 #endif /* PLAT_mips64_linux */
1003 #if defined(PLAT_nanomips_linux)
1007 unsigned int nraddr; /* where's the code? */
1011 8000 c04d srl zero, zero, 13
1012 8000 c05d srl zero, zero, 29
1013 8000 c043 srl zero, zero, 3
1014 8000 c053 srl zero, zero, 19
1017 #define __SPECIAL_INSTRUCTION_PREAMBLE "srl[32] $zero, $zero, 13 \n\t" \
1018 "srl[32] $zero, $zero, 29 \n\t" \
1019 "srl[32] $zero, $zero, 3 \n\t" \
1020 "srl[32] $zero, $zero, 19 \n\t"
1022 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1023 _zzq_default, _zzq_request, \
1024 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1026 ({ volatile unsigned int _zzq_args[6]; \
1027 volatile unsigned int _zzq_result; \
1028 _zzq_args[0] = (unsigned int)(_zzq_request); \
1029 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
1030 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
1031 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
1032 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
1033 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
1034 __asm__ volatile("move $a7, %1\n\t" /* default */ \
1035 "move $t0, %2\n\t" /* ptr */ \
1036 __SPECIAL_INSTRUCTION_PREAMBLE \
1037 /* $a7 = client_request( $t0 ) */ \
1038 "or[32] $t0, $t0, $t0\n\t" \
1039 "move %0, $a7\n\t" /* result */ \
1040 : "=r" (_zzq_result) \
1041 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1042 : "$a7", "$t0", "memory"); \
1046 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1047 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1048 volatile unsigned long int __addr; \
1049 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1050 /* $a7 = guest_NRADDR */ \
1051 "or[32] $t1, $t1, $t1\n\t" \
1052 "move %0, $a7" /*result*/ \
1056 _zzq_orig->nraddr = __addr; \
1059 #define VALGRIND_CALL_NOREDIR_T9 \
1060 __SPECIAL_INSTRUCTION_PREAMBLE \
1061 /* call-noredir $25 */ \
1062 "or[32] $t2, $t2, $t2\n\t"
1064 #define VALGRIND_VEX_INJECT_IR() \
1066 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1067 "or[32] $t3, $t3, $t3\n\t" \
1072 /* Insert assembly code for other platforms here... */
1074 #endif /* CONFIG_VALGRIND */
1077 /* ------------------------------------------------------------------ */
1078 /* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
1079 /* ugly. It's the least-worst tradeoff I can think of. */
1080 /* ------------------------------------------------------------------ */
1082 /* This section defines magic (a.k.a appalling-hack) macros for doing
1083 guaranteed-no-redirection macros, so as to get from function
1084 wrappers to the functions they are wrapping. The whole point is to
1085 construct standard call sequences, but to do the call itself with a
1086 special no-redirect call pseudo-instruction that the JIT
1087 understands and handles specially. This section is long and
1088 repetitious, and I can't see a way to make it shorter.
1090 The naming scheme is as follows:
1092 CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
1094 'W' stands for "word" and 'v' for "void". Hence there are
1095 different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
1096 and for each, the possibility of returning a word-typed result, or
1100 /* Use these to write the name of your wrapper. NOTE: duplicates
1101 VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
1102 the default behaviour equivalance class tag "0000" into the name.
1103 See pub_tool_redir.h for details -- normally you don't need to
1104 think about this, though. */
1106 /* Use an extra level of macroisation so as to ensure the soname/fnname
1107 args are fully macro-expanded before pasting them together. */
1108 #define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
1110 #define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
1111 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
1113 #define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
1114 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
1116 /* Use this macro from within a wrapper function to collect the
1117 context (address and possibly other info) of the original function.
1118 Once you have that you can then use it in one of the CALL_FN_
1119 macros. The type of the argument _lval is OrigFn. */
1120 #define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
1122 /* Also provide end-user facilities for function replacement, rather
1123 than wrapping. A replacement function differs from a wrapper in
1124 that it has no way to get hold of the original function being
1125 called, and hence no way to call onwards to it. In a replacement
1126 function, VALGRIND_GET_ORIG_FN always returns zero. */
1128 #define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1129 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1131 #define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1132 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1134 /* Derivatives of the main macros below, for calling functions
1137 #define CALL_FN_v_v(fnptr) \
1138 do { volatile unsigned long _junk; \
1139 CALL_FN_W_v(_junk,fnptr); } while (0)
1141 #define CALL_FN_v_W(fnptr, arg1) \
1142 do { volatile unsigned long _junk; \
1143 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1145 #define CALL_FN_v_WW(fnptr, arg1,arg2) \
1146 do { volatile unsigned long _junk; \
1147 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1149 #define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1150 do { volatile unsigned long _junk; \
1151 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1153 #define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1154 do { volatile unsigned long _junk; \
1155 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1157 #define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1158 do { volatile unsigned long _junk; \
1159 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1161 #define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1162 do { volatile unsigned long _junk; \
1163 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1165 #define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1166 do { volatile unsigned long _junk; \
1167 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1169 /* ----------------- x86-{linux,darwin,solaris} ---------------- */
1171 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
1172 || defined(PLAT_x86_solaris)
1174 /* These regs are trashed by the hidden call. No need to mention eax
1175 as gcc can already see that, plus causes gcc to bomb. */
1176 #define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
1178 /* Macros to save and align the stack before making a function
1179 call and restore it afterwards as gcc may not keep the stack
1180 pointer aligned if it doesn't realise calls are being made
1181 to other functions. */
1183 #define VALGRIND_ALIGN_STACK \
1184 "movl %%esp,%%edi\n\t" \
1185 "andl $0xfffffff0,%%esp\n\t"
1186 #define VALGRIND_RESTORE_STACK \
1187 "movl %%edi,%%esp\n\t"
1189 /* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
1192 #define CALL_FN_W_v(lval, orig) \
1194 volatile OrigFn _orig = (orig); \
1195 volatile unsigned long _argvec[1]; \
1196 volatile unsigned long _res; \
1197 _argvec[0] = (unsigned long)_orig.nraddr; \
1199 VALGRIND_ALIGN_STACK \
1200 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1201 VALGRIND_CALL_NOREDIR_EAX \
1202 VALGRIND_RESTORE_STACK \
1203 : /*out*/ "=a" (_res) \
1204 : /*in*/ "a" (&_argvec[0]) \
1205 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1207 lval = (__typeof__(lval)) _res; \
1210 #define CALL_FN_W_W(lval, orig, arg1) \
1212 volatile OrigFn _orig = (orig); \
1213 volatile unsigned long _argvec[2]; \
1214 volatile unsigned long _res; \
1215 _argvec[0] = (unsigned long)_orig.nraddr; \
1216 _argvec[1] = (unsigned long)(arg1); \
1218 VALGRIND_ALIGN_STACK \
1219 "subl $12, %%esp\n\t" \
1220 "pushl 4(%%eax)\n\t" \
1221 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1222 VALGRIND_CALL_NOREDIR_EAX \
1223 VALGRIND_RESTORE_STACK \
1224 : /*out*/ "=a" (_res) \
1225 : /*in*/ "a" (&_argvec[0]) \
1226 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1228 lval = (__typeof__(lval)) _res; \
1231 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1233 volatile OrigFn _orig = (orig); \
1234 volatile unsigned long _argvec[3]; \
1235 volatile unsigned long _res; \
1236 _argvec[0] = (unsigned long)_orig.nraddr; \
1237 _argvec[1] = (unsigned long)(arg1); \
1238 _argvec[2] = (unsigned long)(arg2); \
1240 VALGRIND_ALIGN_STACK \
1241 "subl $8, %%esp\n\t" \
1242 "pushl 8(%%eax)\n\t" \
1243 "pushl 4(%%eax)\n\t" \
1244 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1245 VALGRIND_CALL_NOREDIR_EAX \
1246 VALGRIND_RESTORE_STACK \
1247 : /*out*/ "=a" (_res) \
1248 : /*in*/ "a" (&_argvec[0]) \
1249 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1251 lval = (__typeof__(lval)) _res; \
1254 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1256 volatile OrigFn _orig = (orig); \
1257 volatile unsigned long _argvec[4]; \
1258 volatile unsigned long _res; \
1259 _argvec[0] = (unsigned long)_orig.nraddr; \
1260 _argvec[1] = (unsigned long)(arg1); \
1261 _argvec[2] = (unsigned long)(arg2); \
1262 _argvec[3] = (unsigned long)(arg3); \
1264 VALGRIND_ALIGN_STACK \
1265 "subl $4, %%esp\n\t" \
1266 "pushl 12(%%eax)\n\t" \
1267 "pushl 8(%%eax)\n\t" \
1268 "pushl 4(%%eax)\n\t" \
1269 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1270 VALGRIND_CALL_NOREDIR_EAX \
1271 VALGRIND_RESTORE_STACK \
1272 : /*out*/ "=a" (_res) \
1273 : /*in*/ "a" (&_argvec[0]) \
1274 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1276 lval = (__typeof__(lval)) _res; \
1279 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1281 volatile OrigFn _orig = (orig); \
1282 volatile unsigned long _argvec[5]; \
1283 volatile unsigned long _res; \
1284 _argvec[0] = (unsigned long)_orig.nraddr; \
1285 _argvec[1] = (unsigned long)(arg1); \
1286 _argvec[2] = (unsigned long)(arg2); \
1287 _argvec[3] = (unsigned long)(arg3); \
1288 _argvec[4] = (unsigned long)(arg4); \
1290 VALGRIND_ALIGN_STACK \
1291 "pushl 16(%%eax)\n\t" \
1292 "pushl 12(%%eax)\n\t" \
1293 "pushl 8(%%eax)\n\t" \
1294 "pushl 4(%%eax)\n\t" \
1295 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1296 VALGRIND_CALL_NOREDIR_EAX \
1297 VALGRIND_RESTORE_STACK \
1298 : /*out*/ "=a" (_res) \
1299 : /*in*/ "a" (&_argvec[0]) \
1300 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1302 lval = (__typeof__(lval)) _res; \
1305 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1307 volatile OrigFn _orig = (orig); \
1308 volatile unsigned long _argvec[6]; \
1309 volatile unsigned long _res; \
1310 _argvec[0] = (unsigned long)_orig.nraddr; \
1311 _argvec[1] = (unsigned long)(arg1); \
1312 _argvec[2] = (unsigned long)(arg2); \
1313 _argvec[3] = (unsigned long)(arg3); \
1314 _argvec[4] = (unsigned long)(arg4); \
1315 _argvec[5] = (unsigned long)(arg5); \
1317 VALGRIND_ALIGN_STACK \
1318 "subl $12, %%esp\n\t" \
1319 "pushl 20(%%eax)\n\t" \
1320 "pushl 16(%%eax)\n\t" \
1321 "pushl 12(%%eax)\n\t" \
1322 "pushl 8(%%eax)\n\t" \
1323 "pushl 4(%%eax)\n\t" \
1324 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1325 VALGRIND_CALL_NOREDIR_EAX \
1326 VALGRIND_RESTORE_STACK \
1327 : /*out*/ "=a" (_res) \
1328 : /*in*/ "a" (&_argvec[0]) \
1329 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1331 lval = (__typeof__(lval)) _res; \
1334 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1336 volatile OrigFn _orig = (orig); \
1337 volatile unsigned long _argvec[7]; \
1338 volatile unsigned long _res; \
1339 _argvec[0] = (unsigned long)_orig.nraddr; \
1340 _argvec[1] = (unsigned long)(arg1); \
1341 _argvec[2] = (unsigned long)(arg2); \
1342 _argvec[3] = (unsigned long)(arg3); \
1343 _argvec[4] = (unsigned long)(arg4); \
1344 _argvec[5] = (unsigned long)(arg5); \
1345 _argvec[6] = (unsigned long)(arg6); \
1347 VALGRIND_ALIGN_STACK \
1348 "subl $8, %%esp\n\t" \
1349 "pushl 24(%%eax)\n\t" \
1350 "pushl 20(%%eax)\n\t" \
1351 "pushl 16(%%eax)\n\t" \
1352 "pushl 12(%%eax)\n\t" \
1353 "pushl 8(%%eax)\n\t" \
1354 "pushl 4(%%eax)\n\t" \
1355 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1356 VALGRIND_CALL_NOREDIR_EAX \
1357 VALGRIND_RESTORE_STACK \
1358 : /*out*/ "=a" (_res) \
1359 : /*in*/ "a" (&_argvec[0]) \
1360 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1362 lval = (__typeof__(lval)) _res; \
1365 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1368 volatile OrigFn _orig = (orig); \
1369 volatile unsigned long _argvec[8]; \
1370 volatile unsigned long _res; \
1371 _argvec[0] = (unsigned long)_orig.nraddr; \
1372 _argvec[1] = (unsigned long)(arg1); \
1373 _argvec[2] = (unsigned long)(arg2); \
1374 _argvec[3] = (unsigned long)(arg3); \
1375 _argvec[4] = (unsigned long)(arg4); \
1376 _argvec[5] = (unsigned long)(arg5); \
1377 _argvec[6] = (unsigned long)(arg6); \
1378 _argvec[7] = (unsigned long)(arg7); \
1380 VALGRIND_ALIGN_STACK \
1381 "subl $4, %%esp\n\t" \
1382 "pushl 28(%%eax)\n\t" \
1383 "pushl 24(%%eax)\n\t" \
1384 "pushl 20(%%eax)\n\t" \
1385 "pushl 16(%%eax)\n\t" \
1386 "pushl 12(%%eax)\n\t" \
1387 "pushl 8(%%eax)\n\t" \
1388 "pushl 4(%%eax)\n\t" \
1389 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1390 VALGRIND_CALL_NOREDIR_EAX \
1391 VALGRIND_RESTORE_STACK \
1392 : /*out*/ "=a" (_res) \
1393 : /*in*/ "a" (&_argvec[0]) \
1394 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1396 lval = (__typeof__(lval)) _res; \
1399 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1402 volatile OrigFn _orig = (orig); \
1403 volatile unsigned long _argvec[9]; \
1404 volatile unsigned long _res; \
1405 _argvec[0] = (unsigned long)_orig.nraddr; \
1406 _argvec[1] = (unsigned long)(arg1); \
1407 _argvec[2] = (unsigned long)(arg2); \
1408 _argvec[3] = (unsigned long)(arg3); \
1409 _argvec[4] = (unsigned long)(arg4); \
1410 _argvec[5] = (unsigned long)(arg5); \
1411 _argvec[6] = (unsigned long)(arg6); \
1412 _argvec[7] = (unsigned long)(arg7); \
1413 _argvec[8] = (unsigned long)(arg8); \
1415 VALGRIND_ALIGN_STACK \
1416 "pushl 32(%%eax)\n\t" \
1417 "pushl 28(%%eax)\n\t" \
1418 "pushl 24(%%eax)\n\t" \
1419 "pushl 20(%%eax)\n\t" \
1420 "pushl 16(%%eax)\n\t" \
1421 "pushl 12(%%eax)\n\t" \
1422 "pushl 8(%%eax)\n\t" \
1423 "pushl 4(%%eax)\n\t" \
1424 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1425 VALGRIND_CALL_NOREDIR_EAX \
1426 VALGRIND_RESTORE_STACK \
1427 : /*out*/ "=a" (_res) \
1428 : /*in*/ "a" (&_argvec[0]) \
1429 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1431 lval = (__typeof__(lval)) _res; \
1434 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1437 volatile OrigFn _orig = (orig); \
1438 volatile unsigned long _argvec[10]; \
1439 volatile unsigned long _res; \
1440 _argvec[0] = (unsigned long)_orig.nraddr; \
1441 _argvec[1] = (unsigned long)(arg1); \
1442 _argvec[2] = (unsigned long)(arg2); \
1443 _argvec[3] = (unsigned long)(arg3); \
1444 _argvec[4] = (unsigned long)(arg4); \
1445 _argvec[5] = (unsigned long)(arg5); \
1446 _argvec[6] = (unsigned long)(arg6); \
1447 _argvec[7] = (unsigned long)(arg7); \
1448 _argvec[8] = (unsigned long)(arg8); \
1449 _argvec[9] = (unsigned long)(arg9); \
1451 VALGRIND_ALIGN_STACK \
1452 "subl $12, %%esp\n\t" \
1453 "pushl 36(%%eax)\n\t" \
1454 "pushl 32(%%eax)\n\t" \
1455 "pushl 28(%%eax)\n\t" \
1456 "pushl 24(%%eax)\n\t" \
1457 "pushl 20(%%eax)\n\t" \
1458 "pushl 16(%%eax)\n\t" \
1459 "pushl 12(%%eax)\n\t" \
1460 "pushl 8(%%eax)\n\t" \
1461 "pushl 4(%%eax)\n\t" \
1462 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1463 VALGRIND_CALL_NOREDIR_EAX \
1464 VALGRIND_RESTORE_STACK \
1465 : /*out*/ "=a" (_res) \
1466 : /*in*/ "a" (&_argvec[0]) \
1467 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1469 lval = (__typeof__(lval)) _res; \
1472 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1473 arg7,arg8,arg9,arg10) \
1475 volatile OrigFn _orig = (orig); \
1476 volatile unsigned long _argvec[11]; \
1477 volatile unsigned long _res; \
1478 _argvec[0] = (unsigned long)_orig.nraddr; \
1479 _argvec[1] = (unsigned long)(arg1); \
1480 _argvec[2] = (unsigned long)(arg2); \
1481 _argvec[3] = (unsigned long)(arg3); \
1482 _argvec[4] = (unsigned long)(arg4); \
1483 _argvec[5] = (unsigned long)(arg5); \
1484 _argvec[6] = (unsigned long)(arg6); \
1485 _argvec[7] = (unsigned long)(arg7); \
1486 _argvec[8] = (unsigned long)(arg8); \
1487 _argvec[9] = (unsigned long)(arg9); \
1488 _argvec[10] = (unsigned long)(arg10); \
1490 VALGRIND_ALIGN_STACK \
1491 "subl $8, %%esp\n\t" \
1492 "pushl 40(%%eax)\n\t" \
1493 "pushl 36(%%eax)\n\t" \
1494 "pushl 32(%%eax)\n\t" \
1495 "pushl 28(%%eax)\n\t" \
1496 "pushl 24(%%eax)\n\t" \
1497 "pushl 20(%%eax)\n\t" \
1498 "pushl 16(%%eax)\n\t" \
1499 "pushl 12(%%eax)\n\t" \
1500 "pushl 8(%%eax)\n\t" \
1501 "pushl 4(%%eax)\n\t" \
1502 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1503 VALGRIND_CALL_NOREDIR_EAX \
1504 VALGRIND_RESTORE_STACK \
1505 : /*out*/ "=a" (_res) \
1506 : /*in*/ "a" (&_argvec[0]) \
1507 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1509 lval = (__typeof__(lval)) _res; \
1512 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1513 arg6,arg7,arg8,arg9,arg10, \
1516 volatile OrigFn _orig = (orig); \
1517 volatile unsigned long _argvec[12]; \
1518 volatile unsigned long _res; \
1519 _argvec[0] = (unsigned long)_orig.nraddr; \
1520 _argvec[1] = (unsigned long)(arg1); \
1521 _argvec[2] = (unsigned long)(arg2); \
1522 _argvec[3] = (unsigned long)(arg3); \
1523 _argvec[4] = (unsigned long)(arg4); \
1524 _argvec[5] = (unsigned long)(arg5); \
1525 _argvec[6] = (unsigned long)(arg6); \
1526 _argvec[7] = (unsigned long)(arg7); \
1527 _argvec[8] = (unsigned long)(arg8); \
1528 _argvec[9] = (unsigned long)(arg9); \
1529 _argvec[10] = (unsigned long)(arg10); \
1530 _argvec[11] = (unsigned long)(arg11); \
1532 VALGRIND_ALIGN_STACK \
1533 "subl $4, %%esp\n\t" \
1534 "pushl 44(%%eax)\n\t" \
1535 "pushl 40(%%eax)\n\t" \
1536 "pushl 36(%%eax)\n\t" \
1537 "pushl 32(%%eax)\n\t" \
1538 "pushl 28(%%eax)\n\t" \
1539 "pushl 24(%%eax)\n\t" \
1540 "pushl 20(%%eax)\n\t" \
1541 "pushl 16(%%eax)\n\t" \
1542 "pushl 12(%%eax)\n\t" \
1543 "pushl 8(%%eax)\n\t" \
1544 "pushl 4(%%eax)\n\t" \
1545 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1546 VALGRIND_CALL_NOREDIR_EAX \
1547 VALGRIND_RESTORE_STACK \
1548 : /*out*/ "=a" (_res) \
1549 : /*in*/ "a" (&_argvec[0]) \
1550 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1552 lval = (__typeof__(lval)) _res; \
1555 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1556 arg6,arg7,arg8,arg9,arg10, \
1559 volatile OrigFn _orig = (orig); \
1560 volatile unsigned long _argvec[13]; \
1561 volatile unsigned long _res; \
1562 _argvec[0] = (unsigned long)_orig.nraddr; \
1563 _argvec[1] = (unsigned long)(arg1); \
1564 _argvec[2] = (unsigned long)(arg2); \
1565 _argvec[3] = (unsigned long)(arg3); \
1566 _argvec[4] = (unsigned long)(arg4); \
1567 _argvec[5] = (unsigned long)(arg5); \
1568 _argvec[6] = (unsigned long)(arg6); \
1569 _argvec[7] = (unsigned long)(arg7); \
1570 _argvec[8] = (unsigned long)(arg8); \
1571 _argvec[9] = (unsigned long)(arg9); \
1572 _argvec[10] = (unsigned long)(arg10); \
1573 _argvec[11] = (unsigned long)(arg11); \
1574 _argvec[12] = (unsigned long)(arg12); \
1576 VALGRIND_ALIGN_STACK \
1577 "pushl 48(%%eax)\n\t" \
1578 "pushl 44(%%eax)\n\t" \
1579 "pushl 40(%%eax)\n\t" \
1580 "pushl 36(%%eax)\n\t" \
1581 "pushl 32(%%eax)\n\t" \
1582 "pushl 28(%%eax)\n\t" \
1583 "pushl 24(%%eax)\n\t" \
1584 "pushl 20(%%eax)\n\t" \
1585 "pushl 16(%%eax)\n\t" \
1586 "pushl 12(%%eax)\n\t" \
1587 "pushl 8(%%eax)\n\t" \
1588 "pushl 4(%%eax)\n\t" \
1589 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1590 VALGRIND_CALL_NOREDIR_EAX \
1591 VALGRIND_RESTORE_STACK \
1592 : /*out*/ "=a" (_res) \
1593 : /*in*/ "a" (&_argvec[0]) \
1594 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1596 lval = (__typeof__(lval)) _res; \
1599 #endif /* PLAT_x86_linux || PLAT_x86_darwin || PLAT_x86_solaris */
1601 /* ---------------- amd64-{linux,darwin,solaris} --------------- */
1603 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
1604 || defined(PLAT_amd64_solaris)
1606 /* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1608 /* These regs are trashed by the hidden call. */
1609 #define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1610 "rdi", "r8", "r9", "r10", "r11"
1612 /* This is all pretty complex. It's so as to make stack unwinding
1613 work reliably. See bug 243270. The basic problem is the sub and
1614 add of 128 of %rsp in all of the following macros. If gcc believes
1615 the CFA is in %rsp, then unwinding may fail, because what's at the
1616 CFA is not what gcc "expected" when it constructs the CFIs for the
1617 places where the macros are instantiated.
1619 But we can't just add a CFI annotation to increase the CFA offset
1620 by 128, to match the sub of 128 from %rsp, because we don't know
1621 whether gcc has chosen %rsp as the CFA at that point, or whether it
1622 has chosen some other register (eg, %rbp). In the latter case,
1623 adding a CFI annotation to change the CFA offset is simply wrong.
1625 So the solution is to get hold of the CFA using
1626 __builtin_dwarf_cfa(), put it in a known register, and add a
1627 CFI annotation to say what the register is. We choose %rbp for
1628 this (perhaps perversely), because:
1630 (1) %rbp is already subject to unwinding. If a new register was
1631 chosen then the unwinder would have to unwind it in all stack
1632 traces, which is expensive, and
1634 (2) %rbp is already subject to precise exception updates in the
1635 JIT. If a new register was chosen, we'd have to have precise
1636 exceptions for it too, which reduces performance of the
1639 However .. one extra complication. We can't just whack the result
1640 of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1641 list of trashed registers at the end of the inline assembly
1642 fragments; gcc won't allow %rbp to appear in that list. Hence
1643 instead we need to stash %rbp in %r15 for the duration of the asm,
1644 and say that %r15 is trashed instead. gcc seems happy to go with
1647 Oh .. and this all needs to be conditionalised so that it is
1648 unchanged from before this commit, when compiled with older gccs
1649 that don't support __builtin_dwarf_cfa. Furthermore, since
1650 this header file is freestanding, it has to be independent of
1651 config.h, and so the following conditionalisation cannot depend on
1652 configure time checks.
1654 Although it's not clear from
1655 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1656 this expression excludes Darwin.
1657 .cfi directives in Darwin assembly appear to be completely
1658 different and I haven't investigated how they work.
1660 For even more entertainment value, note we have to use the
1661 completely undocumented __builtin_dwarf_cfa(), which appears to
1662 really compute the CFA, whereas __builtin_frame_address(0) claims
1663 to but actually doesn't. See
1664 https://bugs.kde.org/show_bug.cgi?id=243270#c47
1666 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1667 # define __FRAME_POINTER \
1668 ,"r"(__builtin_dwarf_cfa())
1669 # define VALGRIND_CFI_PROLOGUE \
1670 "movq %%rbp, %%r15\n\t" \
1671 "movq %2, %%rbp\n\t" \
1672 ".cfi_remember_state\n\t" \
1673 ".cfi_def_cfa rbp, 0\n\t"
1674 # define VALGRIND_CFI_EPILOGUE \
1675 "movq %%r15, %%rbp\n\t" \
1676 ".cfi_restore_state\n\t"
1678 # define __FRAME_POINTER
1679 # define VALGRIND_CFI_PROLOGUE
1680 # define VALGRIND_CFI_EPILOGUE
1683 /* Macros to save and align the stack before making a function
1684 call and restore it afterwards as gcc may not keep the stack
1685 pointer aligned if it doesn't realise calls are being made
1686 to other functions. */
1688 #define VALGRIND_ALIGN_STACK \
1689 "movq %%rsp,%%r14\n\t" \
1690 "andq $0xfffffffffffffff0,%%rsp\n\t"
1691 #define VALGRIND_RESTORE_STACK \
1692 "movq %%r14,%%rsp\n\t"
1694 /* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1697 /* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1698 macros. In order not to trash the stack redzone, we need to drop
1699 %rsp by 128 before the hidden call, and restore afterwards. The
1700 nastyness is that it is only by luck that the stack still appears
1701 to be unwindable during the hidden call - since then the behaviour
1702 of any routine using this macro does not match what the CFI data
1705 Why is this important? Imagine that a wrapper has a stack
1706 allocated local, and passes to the hidden call, a pointer to it.
1707 Because gcc does not know about the hidden call, it may allocate
1708 that local in the redzone. Unfortunately the hidden call may then
1709 trash it before it comes to use it. So we must step clear of the
1710 redzone, for the duration of the hidden call, to make it safe.
1712 Probably the same problem afflicts the other redzone-style ABIs too
1713 (ppc64-linux); but for those, the stack is
1714 self describing (none of this CFI nonsense) so at least messing
1715 with the stack pointer doesn't give a danger of non-unwindable
1718 #define CALL_FN_W_v(lval, orig) \
1720 volatile OrigFn _orig = (orig); \
1721 volatile unsigned long _argvec[1]; \
1722 volatile unsigned long _res; \
1723 _argvec[0] = (unsigned long)_orig.nraddr; \
1725 VALGRIND_CFI_PROLOGUE \
1726 VALGRIND_ALIGN_STACK \
1727 "subq $128,%%rsp\n\t" \
1728 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1729 VALGRIND_CALL_NOREDIR_RAX \
1730 VALGRIND_RESTORE_STACK \
1731 VALGRIND_CFI_EPILOGUE \
1732 : /*out*/ "=a" (_res) \
1733 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1734 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1736 lval = (__typeof__(lval)) _res; \
1739 #define CALL_FN_W_W(lval, orig, arg1) \
1741 volatile OrigFn _orig = (orig); \
1742 volatile unsigned long _argvec[2]; \
1743 volatile unsigned long _res; \
1744 _argvec[0] = (unsigned long)_orig.nraddr; \
1745 _argvec[1] = (unsigned long)(arg1); \
1747 VALGRIND_CFI_PROLOGUE \
1748 VALGRIND_ALIGN_STACK \
1749 "subq $128,%%rsp\n\t" \
1750 "movq 8(%%rax), %%rdi\n\t" \
1751 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1752 VALGRIND_CALL_NOREDIR_RAX \
1753 VALGRIND_RESTORE_STACK \
1754 VALGRIND_CFI_EPILOGUE \
1755 : /*out*/ "=a" (_res) \
1756 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1757 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1759 lval = (__typeof__(lval)) _res; \
1762 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1764 volatile OrigFn _orig = (orig); \
1765 volatile unsigned long _argvec[3]; \
1766 volatile unsigned long _res; \
1767 _argvec[0] = (unsigned long)_orig.nraddr; \
1768 _argvec[1] = (unsigned long)(arg1); \
1769 _argvec[2] = (unsigned long)(arg2); \
1771 VALGRIND_CFI_PROLOGUE \
1772 VALGRIND_ALIGN_STACK \
1773 "subq $128,%%rsp\n\t" \
1774 "movq 16(%%rax), %%rsi\n\t" \
1775 "movq 8(%%rax), %%rdi\n\t" \
1776 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1777 VALGRIND_CALL_NOREDIR_RAX \
1778 VALGRIND_RESTORE_STACK \
1779 VALGRIND_CFI_EPILOGUE \
1780 : /*out*/ "=a" (_res) \
1781 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1782 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1784 lval = (__typeof__(lval)) _res; \
1787 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1789 volatile OrigFn _orig = (orig); \
1790 volatile unsigned long _argvec[4]; \
1791 volatile unsigned long _res; \
1792 _argvec[0] = (unsigned long)_orig.nraddr; \
1793 _argvec[1] = (unsigned long)(arg1); \
1794 _argvec[2] = (unsigned long)(arg2); \
1795 _argvec[3] = (unsigned long)(arg3); \
1797 VALGRIND_CFI_PROLOGUE \
1798 VALGRIND_ALIGN_STACK \
1799 "subq $128,%%rsp\n\t" \
1800 "movq 24(%%rax), %%rdx\n\t" \
1801 "movq 16(%%rax), %%rsi\n\t" \
1802 "movq 8(%%rax), %%rdi\n\t" \
1803 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1804 VALGRIND_CALL_NOREDIR_RAX \
1805 VALGRIND_RESTORE_STACK \
1806 VALGRIND_CFI_EPILOGUE \
1807 : /*out*/ "=a" (_res) \
1808 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1809 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1811 lval = (__typeof__(lval)) _res; \
1814 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1816 volatile OrigFn _orig = (orig); \
1817 volatile unsigned long _argvec[5]; \
1818 volatile unsigned long _res; \
1819 _argvec[0] = (unsigned long)_orig.nraddr; \
1820 _argvec[1] = (unsigned long)(arg1); \
1821 _argvec[2] = (unsigned long)(arg2); \
1822 _argvec[3] = (unsigned long)(arg3); \
1823 _argvec[4] = (unsigned long)(arg4); \
1825 VALGRIND_CFI_PROLOGUE \
1826 VALGRIND_ALIGN_STACK \
1827 "subq $128,%%rsp\n\t" \
1828 "movq 32(%%rax), %%rcx\n\t" \
1829 "movq 24(%%rax), %%rdx\n\t" \
1830 "movq 16(%%rax), %%rsi\n\t" \
1831 "movq 8(%%rax), %%rdi\n\t" \
1832 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1833 VALGRIND_CALL_NOREDIR_RAX \
1834 VALGRIND_RESTORE_STACK \
1835 VALGRIND_CFI_EPILOGUE \
1836 : /*out*/ "=a" (_res) \
1837 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1838 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1840 lval = (__typeof__(lval)) _res; \
1843 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1845 volatile OrigFn _orig = (orig); \
1846 volatile unsigned long _argvec[6]; \
1847 volatile unsigned long _res; \
1848 _argvec[0] = (unsigned long)_orig.nraddr; \
1849 _argvec[1] = (unsigned long)(arg1); \
1850 _argvec[2] = (unsigned long)(arg2); \
1851 _argvec[3] = (unsigned long)(arg3); \
1852 _argvec[4] = (unsigned long)(arg4); \
1853 _argvec[5] = (unsigned long)(arg5); \
1855 VALGRIND_CFI_PROLOGUE \
1856 VALGRIND_ALIGN_STACK \
1857 "subq $128,%%rsp\n\t" \
1858 "movq 40(%%rax), %%r8\n\t" \
1859 "movq 32(%%rax), %%rcx\n\t" \
1860 "movq 24(%%rax), %%rdx\n\t" \
1861 "movq 16(%%rax), %%rsi\n\t" \
1862 "movq 8(%%rax), %%rdi\n\t" \
1863 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1864 VALGRIND_CALL_NOREDIR_RAX \
1865 VALGRIND_RESTORE_STACK \
1866 VALGRIND_CFI_EPILOGUE \
1867 : /*out*/ "=a" (_res) \
1868 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1869 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1871 lval = (__typeof__(lval)) _res; \
1874 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1876 volatile OrigFn _orig = (orig); \
1877 volatile unsigned long _argvec[7]; \
1878 volatile unsigned long _res; \
1879 _argvec[0] = (unsigned long)_orig.nraddr; \
1880 _argvec[1] = (unsigned long)(arg1); \
1881 _argvec[2] = (unsigned long)(arg2); \
1882 _argvec[3] = (unsigned long)(arg3); \
1883 _argvec[4] = (unsigned long)(arg4); \
1884 _argvec[5] = (unsigned long)(arg5); \
1885 _argvec[6] = (unsigned long)(arg6); \
1887 VALGRIND_CFI_PROLOGUE \
1888 VALGRIND_ALIGN_STACK \
1889 "subq $128,%%rsp\n\t" \
1890 "movq 48(%%rax), %%r9\n\t" \
1891 "movq 40(%%rax), %%r8\n\t" \
1892 "movq 32(%%rax), %%rcx\n\t" \
1893 "movq 24(%%rax), %%rdx\n\t" \
1894 "movq 16(%%rax), %%rsi\n\t" \
1895 "movq 8(%%rax), %%rdi\n\t" \
1896 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1897 VALGRIND_CALL_NOREDIR_RAX \
1898 VALGRIND_RESTORE_STACK \
1899 VALGRIND_CFI_EPILOGUE \
1900 : /*out*/ "=a" (_res) \
1901 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1902 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1904 lval = (__typeof__(lval)) _res; \
1907 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1910 volatile OrigFn _orig = (orig); \
1911 volatile unsigned long _argvec[8]; \
1912 volatile unsigned long _res; \
1913 _argvec[0] = (unsigned long)_orig.nraddr; \
1914 _argvec[1] = (unsigned long)(arg1); \
1915 _argvec[2] = (unsigned long)(arg2); \
1916 _argvec[3] = (unsigned long)(arg3); \
1917 _argvec[4] = (unsigned long)(arg4); \
1918 _argvec[5] = (unsigned long)(arg5); \
1919 _argvec[6] = (unsigned long)(arg6); \
1920 _argvec[7] = (unsigned long)(arg7); \
1922 VALGRIND_CFI_PROLOGUE \
1923 VALGRIND_ALIGN_STACK \
1924 "subq $136,%%rsp\n\t" \
1925 "pushq 56(%%rax)\n\t" \
1926 "movq 48(%%rax), %%r9\n\t" \
1927 "movq 40(%%rax), %%r8\n\t" \
1928 "movq 32(%%rax), %%rcx\n\t" \
1929 "movq 24(%%rax), %%rdx\n\t" \
1930 "movq 16(%%rax), %%rsi\n\t" \
1931 "movq 8(%%rax), %%rdi\n\t" \
1932 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1933 VALGRIND_CALL_NOREDIR_RAX \
1934 VALGRIND_RESTORE_STACK \
1935 VALGRIND_CFI_EPILOGUE \
1936 : /*out*/ "=a" (_res) \
1937 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1938 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1940 lval = (__typeof__(lval)) _res; \
1943 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1946 volatile OrigFn _orig = (orig); \
1947 volatile unsigned long _argvec[9]; \
1948 volatile unsigned long _res; \
1949 _argvec[0] = (unsigned long)_orig.nraddr; \
1950 _argvec[1] = (unsigned long)(arg1); \
1951 _argvec[2] = (unsigned long)(arg2); \
1952 _argvec[3] = (unsigned long)(arg3); \
1953 _argvec[4] = (unsigned long)(arg4); \
1954 _argvec[5] = (unsigned long)(arg5); \
1955 _argvec[6] = (unsigned long)(arg6); \
1956 _argvec[7] = (unsigned long)(arg7); \
1957 _argvec[8] = (unsigned long)(arg8); \
1959 VALGRIND_CFI_PROLOGUE \
1960 VALGRIND_ALIGN_STACK \
1961 "subq $128,%%rsp\n\t" \
1962 "pushq 64(%%rax)\n\t" \
1963 "pushq 56(%%rax)\n\t" \
1964 "movq 48(%%rax), %%r9\n\t" \
1965 "movq 40(%%rax), %%r8\n\t" \
1966 "movq 32(%%rax), %%rcx\n\t" \
1967 "movq 24(%%rax), %%rdx\n\t" \
1968 "movq 16(%%rax), %%rsi\n\t" \
1969 "movq 8(%%rax), %%rdi\n\t" \
1970 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1971 VALGRIND_CALL_NOREDIR_RAX \
1972 VALGRIND_RESTORE_STACK \
1973 VALGRIND_CFI_EPILOGUE \
1974 : /*out*/ "=a" (_res) \
1975 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1976 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1978 lval = (__typeof__(lval)) _res; \
1981 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1984 volatile OrigFn _orig = (orig); \
1985 volatile unsigned long _argvec[10]; \
1986 volatile unsigned long _res; \
1987 _argvec[0] = (unsigned long)_orig.nraddr; \
1988 _argvec[1] = (unsigned long)(arg1); \
1989 _argvec[2] = (unsigned long)(arg2); \
1990 _argvec[3] = (unsigned long)(arg3); \
1991 _argvec[4] = (unsigned long)(arg4); \
1992 _argvec[5] = (unsigned long)(arg5); \
1993 _argvec[6] = (unsigned long)(arg6); \
1994 _argvec[7] = (unsigned long)(arg7); \
1995 _argvec[8] = (unsigned long)(arg8); \
1996 _argvec[9] = (unsigned long)(arg9); \
1998 VALGRIND_CFI_PROLOGUE \
1999 VALGRIND_ALIGN_STACK \
2000 "subq $136,%%rsp\n\t" \
2001 "pushq 72(%%rax)\n\t" \
2002 "pushq 64(%%rax)\n\t" \
2003 "pushq 56(%%rax)\n\t" \
2004 "movq 48(%%rax), %%r9\n\t" \
2005 "movq 40(%%rax), %%r8\n\t" \
2006 "movq 32(%%rax), %%rcx\n\t" \
2007 "movq 24(%%rax), %%rdx\n\t" \
2008 "movq 16(%%rax), %%rsi\n\t" \
2009 "movq 8(%%rax), %%rdi\n\t" \
2010 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2011 VALGRIND_CALL_NOREDIR_RAX \
2012 VALGRIND_RESTORE_STACK \
2013 VALGRIND_CFI_EPILOGUE \
2014 : /*out*/ "=a" (_res) \
2015 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2016 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2018 lval = (__typeof__(lval)) _res; \
2021 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2022 arg7,arg8,arg9,arg10) \
2024 volatile OrigFn _orig = (orig); \
2025 volatile unsigned long _argvec[11]; \
2026 volatile unsigned long _res; \
2027 _argvec[0] = (unsigned long)_orig.nraddr; \
2028 _argvec[1] = (unsigned long)(arg1); \
2029 _argvec[2] = (unsigned long)(arg2); \
2030 _argvec[3] = (unsigned long)(arg3); \
2031 _argvec[4] = (unsigned long)(arg4); \
2032 _argvec[5] = (unsigned long)(arg5); \
2033 _argvec[6] = (unsigned long)(arg6); \
2034 _argvec[7] = (unsigned long)(arg7); \
2035 _argvec[8] = (unsigned long)(arg8); \
2036 _argvec[9] = (unsigned long)(arg9); \
2037 _argvec[10] = (unsigned long)(arg10); \
2039 VALGRIND_CFI_PROLOGUE \
2040 VALGRIND_ALIGN_STACK \
2041 "subq $128,%%rsp\n\t" \
2042 "pushq 80(%%rax)\n\t" \
2043 "pushq 72(%%rax)\n\t" \
2044 "pushq 64(%%rax)\n\t" \
2045 "pushq 56(%%rax)\n\t" \
2046 "movq 48(%%rax), %%r9\n\t" \
2047 "movq 40(%%rax), %%r8\n\t" \
2048 "movq 32(%%rax), %%rcx\n\t" \
2049 "movq 24(%%rax), %%rdx\n\t" \
2050 "movq 16(%%rax), %%rsi\n\t" \
2051 "movq 8(%%rax), %%rdi\n\t" \
2052 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2053 VALGRIND_CALL_NOREDIR_RAX \
2054 VALGRIND_RESTORE_STACK \
2055 VALGRIND_CFI_EPILOGUE \
2056 : /*out*/ "=a" (_res) \
2057 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2058 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2060 lval = (__typeof__(lval)) _res; \
2063 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2064 arg7,arg8,arg9,arg10,arg11) \
2066 volatile OrigFn _orig = (orig); \
2067 volatile unsigned long _argvec[12]; \
2068 volatile unsigned long _res; \
2069 _argvec[0] = (unsigned long)_orig.nraddr; \
2070 _argvec[1] = (unsigned long)(arg1); \
2071 _argvec[2] = (unsigned long)(arg2); \
2072 _argvec[3] = (unsigned long)(arg3); \
2073 _argvec[4] = (unsigned long)(arg4); \
2074 _argvec[5] = (unsigned long)(arg5); \
2075 _argvec[6] = (unsigned long)(arg6); \
2076 _argvec[7] = (unsigned long)(arg7); \
2077 _argvec[8] = (unsigned long)(arg8); \
2078 _argvec[9] = (unsigned long)(arg9); \
2079 _argvec[10] = (unsigned long)(arg10); \
2080 _argvec[11] = (unsigned long)(arg11); \
2082 VALGRIND_CFI_PROLOGUE \
2083 VALGRIND_ALIGN_STACK \
2084 "subq $136,%%rsp\n\t" \
2085 "pushq 88(%%rax)\n\t" \
2086 "pushq 80(%%rax)\n\t" \
2087 "pushq 72(%%rax)\n\t" \
2088 "pushq 64(%%rax)\n\t" \
2089 "pushq 56(%%rax)\n\t" \
2090 "movq 48(%%rax), %%r9\n\t" \
2091 "movq 40(%%rax), %%r8\n\t" \
2092 "movq 32(%%rax), %%rcx\n\t" \
2093 "movq 24(%%rax), %%rdx\n\t" \
2094 "movq 16(%%rax), %%rsi\n\t" \
2095 "movq 8(%%rax), %%rdi\n\t" \
2096 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2097 VALGRIND_CALL_NOREDIR_RAX \
2098 VALGRIND_RESTORE_STACK \
2099 VALGRIND_CFI_EPILOGUE \
2100 : /*out*/ "=a" (_res) \
2101 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2102 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2104 lval = (__typeof__(lval)) _res; \
2107 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2108 arg7,arg8,arg9,arg10,arg11,arg12) \
2110 volatile OrigFn _orig = (orig); \
2111 volatile unsigned long _argvec[13]; \
2112 volatile unsigned long _res; \
2113 _argvec[0] = (unsigned long)_orig.nraddr; \
2114 _argvec[1] = (unsigned long)(arg1); \
2115 _argvec[2] = (unsigned long)(arg2); \
2116 _argvec[3] = (unsigned long)(arg3); \
2117 _argvec[4] = (unsigned long)(arg4); \
2118 _argvec[5] = (unsigned long)(arg5); \
2119 _argvec[6] = (unsigned long)(arg6); \
2120 _argvec[7] = (unsigned long)(arg7); \
2121 _argvec[8] = (unsigned long)(arg8); \
2122 _argvec[9] = (unsigned long)(arg9); \
2123 _argvec[10] = (unsigned long)(arg10); \
2124 _argvec[11] = (unsigned long)(arg11); \
2125 _argvec[12] = (unsigned long)(arg12); \
2127 VALGRIND_CFI_PROLOGUE \
2128 VALGRIND_ALIGN_STACK \
2129 "subq $128,%%rsp\n\t" \
2130 "pushq 96(%%rax)\n\t" \
2131 "pushq 88(%%rax)\n\t" \
2132 "pushq 80(%%rax)\n\t" \
2133 "pushq 72(%%rax)\n\t" \
2134 "pushq 64(%%rax)\n\t" \
2135 "pushq 56(%%rax)\n\t" \
2136 "movq 48(%%rax), %%r9\n\t" \
2137 "movq 40(%%rax), %%r8\n\t" \
2138 "movq 32(%%rax), %%rcx\n\t" \
2139 "movq 24(%%rax), %%rdx\n\t" \
2140 "movq 16(%%rax), %%rsi\n\t" \
2141 "movq 8(%%rax), %%rdi\n\t" \
2142 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2143 VALGRIND_CALL_NOREDIR_RAX \
2144 VALGRIND_RESTORE_STACK \
2145 VALGRIND_CFI_EPILOGUE \
2146 : /*out*/ "=a" (_res) \
2147 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2148 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2150 lval = (__typeof__(lval)) _res; \
2153 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
2155 /* ------------------------ ppc32-linux ------------------------ */
2157 #if defined(PLAT_ppc32_linux)
2159 /* This is useful for finding out about the on-stack stuff:
2161 extern int f9 ( int,int,int,int,int,int,int,int,int );
2162 extern int f10 ( int,int,int,int,int,int,int,int,int,int );
2163 extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
2164 extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
2167 return f9(11,22,33,44,55,66,77,88,99);
2170 return f10(11,22,33,44,55,66,77,88,99,110);
2173 return f11(11,22,33,44,55,66,77,88,99,110,121);
2176 return f12(11,22,33,44,55,66,77,88,99,110,121,132);
2180 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2182 /* These regs are trashed by the hidden call. */
2183 #define __CALLER_SAVED_REGS \
2184 "lr", "ctr", "xer", \
2185 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2186 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2189 /* Macros to save and align the stack before making a function
2190 call and restore it afterwards as gcc may not keep the stack
2191 pointer aligned if it doesn't realise calls are being made
2192 to other functions. */
2194 #define VALGRIND_ALIGN_STACK \
2196 "rlwinm 1,1,0,0,27\n\t"
2197 #define VALGRIND_RESTORE_STACK \
2200 /* These CALL_FN_ macros assume that on ppc32-linux,
2201 sizeof(unsigned long) == 4. */
2203 #define CALL_FN_W_v(lval, orig) \
2205 volatile OrigFn _orig = (orig); \
2206 volatile unsigned long _argvec[1]; \
2207 volatile unsigned long _res; \
2208 _argvec[0] = (unsigned long)_orig.nraddr; \
2210 VALGRIND_ALIGN_STACK \
2212 "lwz 11,0(11)\n\t" /* target->r11 */ \
2213 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2214 VALGRIND_RESTORE_STACK \
2216 : /*out*/ "=r" (_res) \
2217 : /*in*/ "r" (&_argvec[0]) \
2218 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2220 lval = (__typeof__(lval)) _res; \
2223 #define CALL_FN_W_W(lval, orig, arg1) \
2225 volatile OrigFn _orig = (orig); \
2226 volatile unsigned long _argvec[2]; \
2227 volatile unsigned long _res; \
2228 _argvec[0] = (unsigned long)_orig.nraddr; \
2229 _argvec[1] = (unsigned long)arg1; \
2231 VALGRIND_ALIGN_STACK \
2233 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2234 "lwz 11,0(11)\n\t" /* target->r11 */ \
2235 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2236 VALGRIND_RESTORE_STACK \
2238 : /*out*/ "=r" (_res) \
2239 : /*in*/ "r" (&_argvec[0]) \
2240 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2242 lval = (__typeof__(lval)) _res; \
2245 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2247 volatile OrigFn _orig = (orig); \
2248 volatile unsigned long _argvec[3]; \
2249 volatile unsigned long _res; \
2250 _argvec[0] = (unsigned long)_orig.nraddr; \
2251 _argvec[1] = (unsigned long)arg1; \
2252 _argvec[2] = (unsigned long)arg2; \
2254 VALGRIND_ALIGN_STACK \
2256 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2258 "lwz 11,0(11)\n\t" /* target->r11 */ \
2259 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2260 VALGRIND_RESTORE_STACK \
2262 : /*out*/ "=r" (_res) \
2263 : /*in*/ "r" (&_argvec[0]) \
2264 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2266 lval = (__typeof__(lval)) _res; \
2269 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2271 volatile OrigFn _orig = (orig); \
2272 volatile unsigned long _argvec[4]; \
2273 volatile unsigned long _res; \
2274 _argvec[0] = (unsigned long)_orig.nraddr; \
2275 _argvec[1] = (unsigned long)arg1; \
2276 _argvec[2] = (unsigned long)arg2; \
2277 _argvec[3] = (unsigned long)arg3; \
2279 VALGRIND_ALIGN_STACK \
2281 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2283 "lwz 5,12(11)\n\t" \
2284 "lwz 11,0(11)\n\t" /* target->r11 */ \
2285 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2286 VALGRIND_RESTORE_STACK \
2288 : /*out*/ "=r" (_res) \
2289 : /*in*/ "r" (&_argvec[0]) \
2290 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2292 lval = (__typeof__(lval)) _res; \
2295 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2297 volatile OrigFn _orig = (orig); \
2298 volatile unsigned long _argvec[5]; \
2299 volatile unsigned long _res; \
2300 _argvec[0] = (unsigned long)_orig.nraddr; \
2301 _argvec[1] = (unsigned long)arg1; \
2302 _argvec[2] = (unsigned long)arg2; \
2303 _argvec[3] = (unsigned long)arg3; \
2304 _argvec[4] = (unsigned long)arg4; \
2306 VALGRIND_ALIGN_STACK \
2308 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2310 "lwz 5,12(11)\n\t" \
2311 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2312 "lwz 11,0(11)\n\t" /* target->r11 */ \
2313 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2314 VALGRIND_RESTORE_STACK \
2316 : /*out*/ "=r" (_res) \
2317 : /*in*/ "r" (&_argvec[0]) \
2318 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2320 lval = (__typeof__(lval)) _res; \
2323 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2325 volatile OrigFn _orig = (orig); \
2326 volatile unsigned long _argvec[6]; \
2327 volatile unsigned long _res; \
2328 _argvec[0] = (unsigned long)_orig.nraddr; \
2329 _argvec[1] = (unsigned long)arg1; \
2330 _argvec[2] = (unsigned long)arg2; \
2331 _argvec[3] = (unsigned long)arg3; \
2332 _argvec[4] = (unsigned long)arg4; \
2333 _argvec[5] = (unsigned long)arg5; \
2335 VALGRIND_ALIGN_STACK \
2337 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2339 "lwz 5,12(11)\n\t" \
2340 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2341 "lwz 7,20(11)\n\t" \
2342 "lwz 11,0(11)\n\t" /* target->r11 */ \
2343 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2344 VALGRIND_RESTORE_STACK \
2346 : /*out*/ "=r" (_res) \
2347 : /*in*/ "r" (&_argvec[0]) \
2348 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2350 lval = (__typeof__(lval)) _res; \
2353 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2355 volatile OrigFn _orig = (orig); \
2356 volatile unsigned long _argvec[7]; \
2357 volatile unsigned long _res; \
2358 _argvec[0] = (unsigned long)_orig.nraddr; \
2359 _argvec[1] = (unsigned long)arg1; \
2360 _argvec[2] = (unsigned long)arg2; \
2361 _argvec[3] = (unsigned long)arg3; \
2362 _argvec[4] = (unsigned long)arg4; \
2363 _argvec[5] = (unsigned long)arg5; \
2364 _argvec[6] = (unsigned long)arg6; \
2366 VALGRIND_ALIGN_STACK \
2368 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2370 "lwz 5,12(11)\n\t" \
2371 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2372 "lwz 7,20(11)\n\t" \
2373 "lwz 8,24(11)\n\t" \
2374 "lwz 11,0(11)\n\t" /* target->r11 */ \
2375 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2376 VALGRIND_RESTORE_STACK \
2378 : /*out*/ "=r" (_res) \
2379 : /*in*/ "r" (&_argvec[0]) \
2380 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2382 lval = (__typeof__(lval)) _res; \
2385 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2388 volatile OrigFn _orig = (orig); \
2389 volatile unsigned long _argvec[8]; \
2390 volatile unsigned long _res; \
2391 _argvec[0] = (unsigned long)_orig.nraddr; \
2392 _argvec[1] = (unsigned long)arg1; \
2393 _argvec[2] = (unsigned long)arg2; \
2394 _argvec[3] = (unsigned long)arg3; \
2395 _argvec[4] = (unsigned long)arg4; \
2396 _argvec[5] = (unsigned long)arg5; \
2397 _argvec[6] = (unsigned long)arg6; \
2398 _argvec[7] = (unsigned long)arg7; \
2400 VALGRIND_ALIGN_STACK \
2402 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2404 "lwz 5,12(11)\n\t" \
2405 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2406 "lwz 7,20(11)\n\t" \
2407 "lwz 8,24(11)\n\t" \
2408 "lwz 9,28(11)\n\t" \
2409 "lwz 11,0(11)\n\t" /* target->r11 */ \
2410 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2411 VALGRIND_RESTORE_STACK \
2413 : /*out*/ "=r" (_res) \
2414 : /*in*/ "r" (&_argvec[0]) \
2415 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2417 lval = (__typeof__(lval)) _res; \
2420 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2423 volatile OrigFn _orig = (orig); \
2424 volatile unsigned long _argvec[9]; \
2425 volatile unsigned long _res; \
2426 _argvec[0] = (unsigned long)_orig.nraddr; \
2427 _argvec[1] = (unsigned long)arg1; \
2428 _argvec[2] = (unsigned long)arg2; \
2429 _argvec[3] = (unsigned long)arg3; \
2430 _argvec[4] = (unsigned long)arg4; \
2431 _argvec[5] = (unsigned long)arg5; \
2432 _argvec[6] = (unsigned long)arg6; \
2433 _argvec[7] = (unsigned long)arg7; \
2434 _argvec[8] = (unsigned long)arg8; \
2436 VALGRIND_ALIGN_STACK \
2438 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2440 "lwz 5,12(11)\n\t" \
2441 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2442 "lwz 7,20(11)\n\t" \
2443 "lwz 8,24(11)\n\t" \
2444 "lwz 9,28(11)\n\t" \
2445 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2446 "lwz 11,0(11)\n\t" /* target->r11 */ \
2447 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2448 VALGRIND_RESTORE_STACK \
2450 : /*out*/ "=r" (_res) \
2451 : /*in*/ "r" (&_argvec[0]) \
2452 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2454 lval = (__typeof__(lval)) _res; \
2457 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2460 volatile OrigFn _orig = (orig); \
2461 volatile unsigned long _argvec[10]; \
2462 volatile unsigned long _res; \
2463 _argvec[0] = (unsigned long)_orig.nraddr; \
2464 _argvec[1] = (unsigned long)arg1; \
2465 _argvec[2] = (unsigned long)arg2; \
2466 _argvec[3] = (unsigned long)arg3; \
2467 _argvec[4] = (unsigned long)arg4; \
2468 _argvec[5] = (unsigned long)arg5; \
2469 _argvec[6] = (unsigned long)arg6; \
2470 _argvec[7] = (unsigned long)arg7; \
2471 _argvec[8] = (unsigned long)arg8; \
2472 _argvec[9] = (unsigned long)arg9; \
2474 VALGRIND_ALIGN_STACK \
2476 "addi 1,1,-16\n\t" \
2478 "lwz 3,36(11)\n\t" \
2481 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2483 "lwz 5,12(11)\n\t" \
2484 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2485 "lwz 7,20(11)\n\t" \
2486 "lwz 8,24(11)\n\t" \
2487 "lwz 9,28(11)\n\t" \
2488 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2489 "lwz 11,0(11)\n\t" /* target->r11 */ \
2490 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2491 VALGRIND_RESTORE_STACK \
2493 : /*out*/ "=r" (_res) \
2494 : /*in*/ "r" (&_argvec[0]) \
2495 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2497 lval = (__typeof__(lval)) _res; \
2500 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2501 arg7,arg8,arg9,arg10) \
2503 volatile OrigFn _orig = (orig); \
2504 volatile unsigned long _argvec[11]; \
2505 volatile unsigned long _res; \
2506 _argvec[0] = (unsigned long)_orig.nraddr; \
2507 _argvec[1] = (unsigned long)arg1; \
2508 _argvec[2] = (unsigned long)arg2; \
2509 _argvec[3] = (unsigned long)arg3; \
2510 _argvec[4] = (unsigned long)arg4; \
2511 _argvec[5] = (unsigned long)arg5; \
2512 _argvec[6] = (unsigned long)arg6; \
2513 _argvec[7] = (unsigned long)arg7; \
2514 _argvec[8] = (unsigned long)arg8; \
2515 _argvec[9] = (unsigned long)arg9; \
2516 _argvec[10] = (unsigned long)arg10; \
2518 VALGRIND_ALIGN_STACK \
2520 "addi 1,1,-16\n\t" \
2522 "lwz 3,40(11)\n\t" \
2525 "lwz 3,36(11)\n\t" \
2528 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2530 "lwz 5,12(11)\n\t" \
2531 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2532 "lwz 7,20(11)\n\t" \
2533 "lwz 8,24(11)\n\t" \
2534 "lwz 9,28(11)\n\t" \
2535 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2536 "lwz 11,0(11)\n\t" /* target->r11 */ \
2537 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2538 VALGRIND_RESTORE_STACK \
2540 : /*out*/ "=r" (_res) \
2541 : /*in*/ "r" (&_argvec[0]) \
2542 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2544 lval = (__typeof__(lval)) _res; \
2547 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2548 arg7,arg8,arg9,arg10,arg11) \
2550 volatile OrigFn _orig = (orig); \
2551 volatile unsigned long _argvec[12]; \
2552 volatile unsigned long _res; \
2553 _argvec[0] = (unsigned long)_orig.nraddr; \
2554 _argvec[1] = (unsigned long)arg1; \
2555 _argvec[2] = (unsigned long)arg2; \
2556 _argvec[3] = (unsigned long)arg3; \
2557 _argvec[4] = (unsigned long)arg4; \
2558 _argvec[5] = (unsigned long)arg5; \
2559 _argvec[6] = (unsigned long)arg6; \
2560 _argvec[7] = (unsigned long)arg7; \
2561 _argvec[8] = (unsigned long)arg8; \
2562 _argvec[9] = (unsigned long)arg9; \
2563 _argvec[10] = (unsigned long)arg10; \
2564 _argvec[11] = (unsigned long)arg11; \
2566 VALGRIND_ALIGN_STACK \
2568 "addi 1,1,-32\n\t" \
2570 "lwz 3,44(11)\n\t" \
2573 "lwz 3,40(11)\n\t" \
2576 "lwz 3,36(11)\n\t" \
2579 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2581 "lwz 5,12(11)\n\t" \
2582 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2583 "lwz 7,20(11)\n\t" \
2584 "lwz 8,24(11)\n\t" \
2585 "lwz 9,28(11)\n\t" \
2586 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2587 "lwz 11,0(11)\n\t" /* target->r11 */ \
2588 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2589 VALGRIND_RESTORE_STACK \
2591 : /*out*/ "=r" (_res) \
2592 : /*in*/ "r" (&_argvec[0]) \
2593 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2595 lval = (__typeof__(lval)) _res; \
2598 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2599 arg7,arg8,arg9,arg10,arg11,arg12) \
2601 volatile OrigFn _orig = (orig); \
2602 volatile unsigned long _argvec[13]; \
2603 volatile unsigned long _res; \
2604 _argvec[0] = (unsigned long)_orig.nraddr; \
2605 _argvec[1] = (unsigned long)arg1; \
2606 _argvec[2] = (unsigned long)arg2; \
2607 _argvec[3] = (unsigned long)arg3; \
2608 _argvec[4] = (unsigned long)arg4; \
2609 _argvec[5] = (unsigned long)arg5; \
2610 _argvec[6] = (unsigned long)arg6; \
2611 _argvec[7] = (unsigned long)arg7; \
2612 _argvec[8] = (unsigned long)arg8; \
2613 _argvec[9] = (unsigned long)arg9; \
2614 _argvec[10] = (unsigned long)arg10; \
2615 _argvec[11] = (unsigned long)arg11; \
2616 _argvec[12] = (unsigned long)arg12; \
2618 VALGRIND_ALIGN_STACK \
2620 "addi 1,1,-32\n\t" \
2622 "lwz 3,48(11)\n\t" \
2625 "lwz 3,44(11)\n\t" \
2628 "lwz 3,40(11)\n\t" \
2631 "lwz 3,36(11)\n\t" \
2634 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2636 "lwz 5,12(11)\n\t" \
2637 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2638 "lwz 7,20(11)\n\t" \
2639 "lwz 8,24(11)\n\t" \
2640 "lwz 9,28(11)\n\t" \
2641 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2642 "lwz 11,0(11)\n\t" /* target->r11 */ \
2643 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2644 VALGRIND_RESTORE_STACK \
2646 : /*out*/ "=r" (_res) \
2647 : /*in*/ "r" (&_argvec[0]) \
2648 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2650 lval = (__typeof__(lval)) _res; \
2653 #endif /* PLAT_ppc32_linux */
2655 /* ------------------------ ppc64-linux ------------------------ */
2657 #if defined(PLAT_ppc64be_linux)
2659 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2661 /* These regs are trashed by the hidden call. */
2662 #define __CALLER_SAVED_REGS \
2663 "lr", "ctr", "xer", \
2664 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2665 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2668 /* Macros to save and align the stack before making a function
2669 call and restore it afterwards as gcc may not keep the stack
2670 pointer aligned if it doesn't realise calls are being made
2671 to other functions. */
2673 #define VALGRIND_ALIGN_STACK \
2675 "rldicr 1,1,0,59\n\t"
2676 #define VALGRIND_RESTORE_STACK \
2679 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2682 #define CALL_FN_W_v(lval, orig) \
2684 volatile OrigFn _orig = (orig); \
2685 volatile unsigned long _argvec[3+0]; \
2686 volatile unsigned long _res; \
2687 /* _argvec[0] holds current r2 across the call */ \
2688 _argvec[1] = (unsigned long)_orig.r2; \
2689 _argvec[2] = (unsigned long)_orig.nraddr; \
2691 VALGRIND_ALIGN_STACK \
2693 "std 2,-16(11)\n\t" /* save tocptr */ \
2694 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2695 "ld 11, 0(11)\n\t" /* target->r11 */ \
2696 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2699 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2700 VALGRIND_RESTORE_STACK \
2701 : /*out*/ "=r" (_res) \
2702 : /*in*/ "r" (&_argvec[2]) \
2703 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2705 lval = (__typeof__(lval)) _res; \
2708 #define CALL_FN_W_W(lval, orig, arg1) \
2710 volatile OrigFn _orig = (orig); \
2711 volatile unsigned long _argvec[3+1]; \
2712 volatile unsigned long _res; \
2713 /* _argvec[0] holds current r2 across the call */ \
2714 _argvec[1] = (unsigned long)_orig.r2; \
2715 _argvec[2] = (unsigned long)_orig.nraddr; \
2716 _argvec[2+1] = (unsigned long)arg1; \
2718 VALGRIND_ALIGN_STACK \
2720 "std 2,-16(11)\n\t" /* save tocptr */ \
2721 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2722 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2723 "ld 11, 0(11)\n\t" /* target->r11 */ \
2724 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2727 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2728 VALGRIND_RESTORE_STACK \
2729 : /*out*/ "=r" (_res) \
2730 : /*in*/ "r" (&_argvec[2]) \
2731 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2733 lval = (__typeof__(lval)) _res; \
2736 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2738 volatile OrigFn _orig = (orig); \
2739 volatile unsigned long _argvec[3+2]; \
2740 volatile unsigned long _res; \
2741 /* _argvec[0] holds current r2 across the call */ \
2742 _argvec[1] = (unsigned long)_orig.r2; \
2743 _argvec[2] = (unsigned long)_orig.nraddr; \
2744 _argvec[2+1] = (unsigned long)arg1; \
2745 _argvec[2+2] = (unsigned long)arg2; \
2747 VALGRIND_ALIGN_STACK \
2749 "std 2,-16(11)\n\t" /* save tocptr */ \
2750 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2751 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2752 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2753 "ld 11, 0(11)\n\t" /* target->r11 */ \
2754 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2757 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2758 VALGRIND_RESTORE_STACK \
2759 : /*out*/ "=r" (_res) \
2760 : /*in*/ "r" (&_argvec[2]) \
2761 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2763 lval = (__typeof__(lval)) _res; \
2766 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2768 volatile OrigFn _orig = (orig); \
2769 volatile unsigned long _argvec[3+3]; \
2770 volatile unsigned long _res; \
2771 /* _argvec[0] holds current r2 across the call */ \
2772 _argvec[1] = (unsigned long)_orig.r2; \
2773 _argvec[2] = (unsigned long)_orig.nraddr; \
2774 _argvec[2+1] = (unsigned long)arg1; \
2775 _argvec[2+2] = (unsigned long)arg2; \
2776 _argvec[2+3] = (unsigned long)arg3; \
2778 VALGRIND_ALIGN_STACK \
2780 "std 2,-16(11)\n\t" /* save tocptr */ \
2781 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2782 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2783 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2784 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2785 "ld 11, 0(11)\n\t" /* target->r11 */ \
2786 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2789 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2790 VALGRIND_RESTORE_STACK \
2791 : /*out*/ "=r" (_res) \
2792 : /*in*/ "r" (&_argvec[2]) \
2793 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2795 lval = (__typeof__(lval)) _res; \
2798 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2800 volatile OrigFn _orig = (orig); \
2801 volatile unsigned long _argvec[3+4]; \
2802 volatile unsigned long _res; \
2803 /* _argvec[0] holds current r2 across the call */ \
2804 _argvec[1] = (unsigned long)_orig.r2; \
2805 _argvec[2] = (unsigned long)_orig.nraddr; \
2806 _argvec[2+1] = (unsigned long)arg1; \
2807 _argvec[2+2] = (unsigned long)arg2; \
2808 _argvec[2+3] = (unsigned long)arg3; \
2809 _argvec[2+4] = (unsigned long)arg4; \
2811 VALGRIND_ALIGN_STACK \
2813 "std 2,-16(11)\n\t" /* save tocptr */ \
2814 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2815 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2816 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2817 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2818 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2819 "ld 11, 0(11)\n\t" /* target->r11 */ \
2820 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2823 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2824 VALGRIND_RESTORE_STACK \
2825 : /*out*/ "=r" (_res) \
2826 : /*in*/ "r" (&_argvec[2]) \
2827 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2829 lval = (__typeof__(lval)) _res; \
2832 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2834 volatile OrigFn _orig = (orig); \
2835 volatile unsigned long _argvec[3+5]; \
2836 volatile unsigned long _res; \
2837 /* _argvec[0] holds current r2 across the call */ \
2838 _argvec[1] = (unsigned long)_orig.r2; \
2839 _argvec[2] = (unsigned long)_orig.nraddr; \
2840 _argvec[2+1] = (unsigned long)arg1; \
2841 _argvec[2+2] = (unsigned long)arg2; \
2842 _argvec[2+3] = (unsigned long)arg3; \
2843 _argvec[2+4] = (unsigned long)arg4; \
2844 _argvec[2+5] = (unsigned long)arg5; \
2846 VALGRIND_ALIGN_STACK \
2848 "std 2,-16(11)\n\t" /* save tocptr */ \
2849 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2850 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2851 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2852 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2853 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2854 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2855 "ld 11, 0(11)\n\t" /* target->r11 */ \
2856 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2859 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2860 VALGRIND_RESTORE_STACK \
2861 : /*out*/ "=r" (_res) \
2862 : /*in*/ "r" (&_argvec[2]) \
2863 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2865 lval = (__typeof__(lval)) _res; \
2868 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2870 volatile OrigFn _orig = (orig); \
2871 volatile unsigned long _argvec[3+6]; \
2872 volatile unsigned long _res; \
2873 /* _argvec[0] holds current r2 across the call */ \
2874 _argvec[1] = (unsigned long)_orig.r2; \
2875 _argvec[2] = (unsigned long)_orig.nraddr; \
2876 _argvec[2+1] = (unsigned long)arg1; \
2877 _argvec[2+2] = (unsigned long)arg2; \
2878 _argvec[2+3] = (unsigned long)arg3; \
2879 _argvec[2+4] = (unsigned long)arg4; \
2880 _argvec[2+5] = (unsigned long)arg5; \
2881 _argvec[2+6] = (unsigned long)arg6; \
2883 VALGRIND_ALIGN_STACK \
2885 "std 2,-16(11)\n\t" /* save tocptr */ \
2886 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2887 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2888 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2889 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2890 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2891 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2892 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2893 "ld 11, 0(11)\n\t" /* target->r11 */ \
2894 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2897 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2898 VALGRIND_RESTORE_STACK \
2899 : /*out*/ "=r" (_res) \
2900 : /*in*/ "r" (&_argvec[2]) \
2901 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2903 lval = (__typeof__(lval)) _res; \
2906 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2909 volatile OrigFn _orig = (orig); \
2910 volatile unsigned long _argvec[3+7]; \
2911 volatile unsigned long _res; \
2912 /* _argvec[0] holds current r2 across the call */ \
2913 _argvec[1] = (unsigned long)_orig.r2; \
2914 _argvec[2] = (unsigned long)_orig.nraddr; \
2915 _argvec[2+1] = (unsigned long)arg1; \
2916 _argvec[2+2] = (unsigned long)arg2; \
2917 _argvec[2+3] = (unsigned long)arg3; \
2918 _argvec[2+4] = (unsigned long)arg4; \
2919 _argvec[2+5] = (unsigned long)arg5; \
2920 _argvec[2+6] = (unsigned long)arg6; \
2921 _argvec[2+7] = (unsigned long)arg7; \
2923 VALGRIND_ALIGN_STACK \
2925 "std 2,-16(11)\n\t" /* save tocptr */ \
2926 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2927 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2928 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2929 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2930 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2931 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2932 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2933 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2934 "ld 11, 0(11)\n\t" /* target->r11 */ \
2935 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2938 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2939 VALGRIND_RESTORE_STACK \
2940 : /*out*/ "=r" (_res) \
2941 : /*in*/ "r" (&_argvec[2]) \
2942 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2944 lval = (__typeof__(lval)) _res; \
2947 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2950 volatile OrigFn _orig = (orig); \
2951 volatile unsigned long _argvec[3+8]; \
2952 volatile unsigned long _res; \
2953 /* _argvec[0] holds current r2 across the call */ \
2954 _argvec[1] = (unsigned long)_orig.r2; \
2955 _argvec[2] = (unsigned long)_orig.nraddr; \
2956 _argvec[2+1] = (unsigned long)arg1; \
2957 _argvec[2+2] = (unsigned long)arg2; \
2958 _argvec[2+3] = (unsigned long)arg3; \
2959 _argvec[2+4] = (unsigned long)arg4; \
2960 _argvec[2+5] = (unsigned long)arg5; \
2961 _argvec[2+6] = (unsigned long)arg6; \
2962 _argvec[2+7] = (unsigned long)arg7; \
2963 _argvec[2+8] = (unsigned long)arg8; \
2965 VALGRIND_ALIGN_STACK \
2967 "std 2,-16(11)\n\t" /* save tocptr */ \
2968 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2969 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2970 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2971 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2972 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2973 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2974 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2975 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2976 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2977 "ld 11, 0(11)\n\t" /* target->r11 */ \
2978 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2981 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2982 VALGRIND_RESTORE_STACK \
2983 : /*out*/ "=r" (_res) \
2984 : /*in*/ "r" (&_argvec[2]) \
2985 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2987 lval = (__typeof__(lval)) _res; \
2990 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2993 volatile OrigFn _orig = (orig); \
2994 volatile unsigned long _argvec[3+9]; \
2995 volatile unsigned long _res; \
2996 /* _argvec[0] holds current r2 across the call */ \
2997 _argvec[1] = (unsigned long)_orig.r2; \
2998 _argvec[2] = (unsigned long)_orig.nraddr; \
2999 _argvec[2+1] = (unsigned long)arg1; \
3000 _argvec[2+2] = (unsigned long)arg2; \
3001 _argvec[2+3] = (unsigned long)arg3; \
3002 _argvec[2+4] = (unsigned long)arg4; \
3003 _argvec[2+5] = (unsigned long)arg5; \
3004 _argvec[2+6] = (unsigned long)arg6; \
3005 _argvec[2+7] = (unsigned long)arg7; \
3006 _argvec[2+8] = (unsigned long)arg8; \
3007 _argvec[2+9] = (unsigned long)arg9; \
3009 VALGRIND_ALIGN_STACK \
3011 "std 2,-16(11)\n\t" /* save tocptr */ \
3012 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3013 "addi 1,1,-128\n\t" /* expand stack frame */ \
3016 "std 3,112(1)\n\t" \
3018 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3019 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3020 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3021 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3022 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3023 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3024 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3025 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3026 "ld 11, 0(11)\n\t" /* target->r11 */ \
3027 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3030 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3031 VALGRIND_RESTORE_STACK \
3032 : /*out*/ "=r" (_res) \
3033 : /*in*/ "r" (&_argvec[2]) \
3034 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3036 lval = (__typeof__(lval)) _res; \
3039 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3040 arg7,arg8,arg9,arg10) \
3042 volatile OrigFn _orig = (orig); \
3043 volatile unsigned long _argvec[3+10]; \
3044 volatile unsigned long _res; \
3045 /* _argvec[0] holds current r2 across the call */ \
3046 _argvec[1] = (unsigned long)_orig.r2; \
3047 _argvec[2] = (unsigned long)_orig.nraddr; \
3048 _argvec[2+1] = (unsigned long)arg1; \
3049 _argvec[2+2] = (unsigned long)arg2; \
3050 _argvec[2+3] = (unsigned long)arg3; \
3051 _argvec[2+4] = (unsigned long)arg4; \
3052 _argvec[2+5] = (unsigned long)arg5; \
3053 _argvec[2+6] = (unsigned long)arg6; \
3054 _argvec[2+7] = (unsigned long)arg7; \
3055 _argvec[2+8] = (unsigned long)arg8; \
3056 _argvec[2+9] = (unsigned long)arg9; \
3057 _argvec[2+10] = (unsigned long)arg10; \
3059 VALGRIND_ALIGN_STACK \
3061 "std 2,-16(11)\n\t" /* save tocptr */ \
3062 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3063 "addi 1,1,-128\n\t" /* expand stack frame */ \
3066 "std 3,120(1)\n\t" \
3069 "std 3,112(1)\n\t" \
3071 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3072 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3073 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3074 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3075 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3076 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3077 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3078 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3079 "ld 11, 0(11)\n\t" /* target->r11 */ \
3080 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3083 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3084 VALGRIND_RESTORE_STACK \
3085 : /*out*/ "=r" (_res) \
3086 : /*in*/ "r" (&_argvec[2]) \
3087 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3089 lval = (__typeof__(lval)) _res; \
3092 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3093 arg7,arg8,arg9,arg10,arg11) \
3095 volatile OrigFn _orig = (orig); \
3096 volatile unsigned long _argvec[3+11]; \
3097 volatile unsigned long _res; \
3098 /* _argvec[0] holds current r2 across the call */ \
3099 _argvec[1] = (unsigned long)_orig.r2; \
3100 _argvec[2] = (unsigned long)_orig.nraddr; \
3101 _argvec[2+1] = (unsigned long)arg1; \
3102 _argvec[2+2] = (unsigned long)arg2; \
3103 _argvec[2+3] = (unsigned long)arg3; \
3104 _argvec[2+4] = (unsigned long)arg4; \
3105 _argvec[2+5] = (unsigned long)arg5; \
3106 _argvec[2+6] = (unsigned long)arg6; \
3107 _argvec[2+7] = (unsigned long)arg7; \
3108 _argvec[2+8] = (unsigned long)arg8; \
3109 _argvec[2+9] = (unsigned long)arg9; \
3110 _argvec[2+10] = (unsigned long)arg10; \
3111 _argvec[2+11] = (unsigned long)arg11; \
3113 VALGRIND_ALIGN_STACK \
3115 "std 2,-16(11)\n\t" /* save tocptr */ \
3116 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3117 "addi 1,1,-144\n\t" /* expand stack frame */ \
3120 "std 3,128(1)\n\t" \
3123 "std 3,120(1)\n\t" \
3126 "std 3,112(1)\n\t" \
3128 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3129 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3130 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3131 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3132 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3133 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3134 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3135 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3136 "ld 11, 0(11)\n\t" /* target->r11 */ \
3137 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3140 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3141 VALGRIND_RESTORE_STACK \
3142 : /*out*/ "=r" (_res) \
3143 : /*in*/ "r" (&_argvec[2]) \
3144 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3146 lval = (__typeof__(lval)) _res; \
3149 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3150 arg7,arg8,arg9,arg10,arg11,arg12) \
3152 volatile OrigFn _orig = (orig); \
3153 volatile unsigned long _argvec[3+12]; \
3154 volatile unsigned long _res; \
3155 /* _argvec[0] holds current r2 across the call */ \
3156 _argvec[1] = (unsigned long)_orig.r2; \
3157 _argvec[2] = (unsigned long)_orig.nraddr; \
3158 _argvec[2+1] = (unsigned long)arg1; \
3159 _argvec[2+2] = (unsigned long)arg2; \
3160 _argvec[2+3] = (unsigned long)arg3; \
3161 _argvec[2+4] = (unsigned long)arg4; \
3162 _argvec[2+5] = (unsigned long)arg5; \
3163 _argvec[2+6] = (unsigned long)arg6; \
3164 _argvec[2+7] = (unsigned long)arg7; \
3165 _argvec[2+8] = (unsigned long)arg8; \
3166 _argvec[2+9] = (unsigned long)arg9; \
3167 _argvec[2+10] = (unsigned long)arg10; \
3168 _argvec[2+11] = (unsigned long)arg11; \
3169 _argvec[2+12] = (unsigned long)arg12; \
3171 VALGRIND_ALIGN_STACK \
3173 "std 2,-16(11)\n\t" /* save tocptr */ \
3174 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3175 "addi 1,1,-144\n\t" /* expand stack frame */ \
3178 "std 3,136(1)\n\t" \
3181 "std 3,128(1)\n\t" \
3184 "std 3,120(1)\n\t" \
3187 "std 3,112(1)\n\t" \
3189 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3190 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3191 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3192 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3193 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3194 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3195 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3196 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3197 "ld 11, 0(11)\n\t" /* target->r11 */ \
3198 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3201 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3202 VALGRIND_RESTORE_STACK \
3203 : /*out*/ "=r" (_res) \
3204 : /*in*/ "r" (&_argvec[2]) \
3205 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3207 lval = (__typeof__(lval)) _res; \
3210 #endif /* PLAT_ppc64be_linux */
3212 /* ------------------------- ppc64le-linux ----------------------- */
3213 #if defined(PLAT_ppc64le_linux)
3215 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
3217 /* These regs are trashed by the hidden call. */
3218 #define __CALLER_SAVED_REGS \
3219 "lr", "ctr", "xer", \
3220 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
3221 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
3224 /* Macros to save and align the stack before making a function
3225 call and restore it afterwards as gcc may not keep the stack
3226 pointer aligned if it doesn't realise calls are being made
3227 to other functions. */
3229 #define VALGRIND_ALIGN_STACK \
3231 "rldicr 1,1,0,59\n\t"
3232 #define VALGRIND_RESTORE_STACK \
3235 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
3238 #define CALL_FN_W_v(lval, orig) \
3240 volatile OrigFn _orig = (orig); \
3241 volatile unsigned long _argvec[3+0]; \
3242 volatile unsigned long _res; \
3243 /* _argvec[0] holds current r2 across the call */ \
3244 _argvec[1] = (unsigned long)_orig.r2; \
3245 _argvec[2] = (unsigned long)_orig.nraddr; \
3247 VALGRIND_ALIGN_STACK \
3249 "std 2,-16(12)\n\t" /* save tocptr */ \
3250 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3251 "ld 12, 0(12)\n\t" /* target->r12 */ \
3252 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3255 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3256 VALGRIND_RESTORE_STACK \
3257 : /*out*/ "=r" (_res) \
3258 : /*in*/ "r" (&_argvec[2]) \
3259 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3261 lval = (__typeof__(lval)) _res; \
3264 #define CALL_FN_W_W(lval, orig, arg1) \
3266 volatile OrigFn _orig = (orig); \
3267 volatile unsigned long _argvec[3+1]; \
3268 volatile unsigned long _res; \
3269 /* _argvec[0] holds current r2 across the call */ \
3270 _argvec[1] = (unsigned long)_orig.r2; \
3271 _argvec[2] = (unsigned long)_orig.nraddr; \
3272 _argvec[2+1] = (unsigned long)arg1; \
3274 VALGRIND_ALIGN_STACK \
3276 "std 2,-16(12)\n\t" /* save tocptr */ \
3277 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3278 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3279 "ld 12, 0(12)\n\t" /* target->r12 */ \
3280 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3283 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3284 VALGRIND_RESTORE_STACK \
3285 : /*out*/ "=r" (_res) \
3286 : /*in*/ "r" (&_argvec[2]) \
3287 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3289 lval = (__typeof__(lval)) _res; \
3292 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3294 volatile OrigFn _orig = (orig); \
3295 volatile unsigned long _argvec[3+2]; \
3296 volatile unsigned long _res; \
3297 /* _argvec[0] holds current r2 across the call */ \
3298 _argvec[1] = (unsigned long)_orig.r2; \
3299 _argvec[2] = (unsigned long)_orig.nraddr; \
3300 _argvec[2+1] = (unsigned long)arg1; \
3301 _argvec[2+2] = (unsigned long)arg2; \
3303 VALGRIND_ALIGN_STACK \
3305 "std 2,-16(12)\n\t" /* save tocptr */ \
3306 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3307 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3308 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3309 "ld 12, 0(12)\n\t" /* target->r12 */ \
3310 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3313 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3314 VALGRIND_RESTORE_STACK \
3315 : /*out*/ "=r" (_res) \
3316 : /*in*/ "r" (&_argvec[2]) \
3317 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3319 lval = (__typeof__(lval)) _res; \
3322 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3324 volatile OrigFn _orig = (orig); \
3325 volatile unsigned long _argvec[3+3]; \
3326 volatile unsigned long _res; \
3327 /* _argvec[0] holds current r2 across the call */ \
3328 _argvec[1] = (unsigned long)_orig.r2; \
3329 _argvec[2] = (unsigned long)_orig.nraddr; \
3330 _argvec[2+1] = (unsigned long)arg1; \
3331 _argvec[2+2] = (unsigned long)arg2; \
3332 _argvec[2+3] = (unsigned long)arg3; \
3334 VALGRIND_ALIGN_STACK \
3336 "std 2,-16(12)\n\t" /* save tocptr */ \
3337 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3338 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3339 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3340 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3341 "ld 12, 0(12)\n\t" /* target->r12 */ \
3342 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3345 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3346 VALGRIND_RESTORE_STACK \
3347 : /*out*/ "=r" (_res) \
3348 : /*in*/ "r" (&_argvec[2]) \
3349 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3351 lval = (__typeof__(lval)) _res; \
3354 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3356 volatile OrigFn _orig = (orig); \
3357 volatile unsigned long _argvec[3+4]; \
3358 volatile unsigned long _res; \
3359 /* _argvec[0] holds current r2 across the call */ \
3360 _argvec[1] = (unsigned long)_orig.r2; \
3361 _argvec[2] = (unsigned long)_orig.nraddr; \
3362 _argvec[2+1] = (unsigned long)arg1; \
3363 _argvec[2+2] = (unsigned long)arg2; \
3364 _argvec[2+3] = (unsigned long)arg3; \
3365 _argvec[2+4] = (unsigned long)arg4; \
3367 VALGRIND_ALIGN_STACK \
3369 "std 2,-16(12)\n\t" /* save tocptr */ \
3370 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3371 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3372 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3373 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3374 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3375 "ld 12, 0(12)\n\t" /* target->r12 */ \
3376 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3379 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3380 VALGRIND_RESTORE_STACK \
3381 : /*out*/ "=r" (_res) \
3382 : /*in*/ "r" (&_argvec[2]) \
3383 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3385 lval = (__typeof__(lval)) _res; \
3388 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3390 volatile OrigFn _orig = (orig); \
3391 volatile unsigned long _argvec[3+5]; \
3392 volatile unsigned long _res; \
3393 /* _argvec[0] holds current r2 across the call */ \
3394 _argvec[1] = (unsigned long)_orig.r2; \
3395 _argvec[2] = (unsigned long)_orig.nraddr; \
3396 _argvec[2+1] = (unsigned long)arg1; \
3397 _argvec[2+2] = (unsigned long)arg2; \
3398 _argvec[2+3] = (unsigned long)arg3; \
3399 _argvec[2+4] = (unsigned long)arg4; \
3400 _argvec[2+5] = (unsigned long)arg5; \
3402 VALGRIND_ALIGN_STACK \
3404 "std 2,-16(12)\n\t" /* save tocptr */ \
3405 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3406 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3407 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3408 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3409 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3410 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3411 "ld 12, 0(12)\n\t" /* target->r12 */ \
3412 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3415 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3416 VALGRIND_RESTORE_STACK \
3417 : /*out*/ "=r" (_res) \
3418 : /*in*/ "r" (&_argvec[2]) \
3419 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3421 lval = (__typeof__(lval)) _res; \
3424 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3426 volatile OrigFn _orig = (orig); \
3427 volatile unsigned long _argvec[3+6]; \
3428 volatile unsigned long _res; \
3429 /* _argvec[0] holds current r2 across the call */ \
3430 _argvec[1] = (unsigned long)_orig.r2; \
3431 _argvec[2] = (unsigned long)_orig.nraddr; \
3432 _argvec[2+1] = (unsigned long)arg1; \
3433 _argvec[2+2] = (unsigned long)arg2; \
3434 _argvec[2+3] = (unsigned long)arg3; \
3435 _argvec[2+4] = (unsigned long)arg4; \
3436 _argvec[2+5] = (unsigned long)arg5; \
3437 _argvec[2+6] = (unsigned long)arg6; \
3439 VALGRIND_ALIGN_STACK \
3441 "std 2,-16(12)\n\t" /* save tocptr */ \
3442 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3443 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3444 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3445 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3446 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3447 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3448 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3449 "ld 12, 0(12)\n\t" /* target->r12 */ \
3450 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3453 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3454 VALGRIND_RESTORE_STACK \
3455 : /*out*/ "=r" (_res) \
3456 : /*in*/ "r" (&_argvec[2]) \
3457 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3459 lval = (__typeof__(lval)) _res; \
3462 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3465 volatile OrigFn _orig = (orig); \
3466 volatile unsigned long _argvec[3+7]; \
3467 volatile unsigned long _res; \
3468 /* _argvec[0] holds current r2 across the call */ \
3469 _argvec[1] = (unsigned long)_orig.r2; \
3470 _argvec[2] = (unsigned long)_orig.nraddr; \
3471 _argvec[2+1] = (unsigned long)arg1; \
3472 _argvec[2+2] = (unsigned long)arg2; \
3473 _argvec[2+3] = (unsigned long)arg3; \
3474 _argvec[2+4] = (unsigned long)arg4; \
3475 _argvec[2+5] = (unsigned long)arg5; \
3476 _argvec[2+6] = (unsigned long)arg6; \
3477 _argvec[2+7] = (unsigned long)arg7; \
3479 VALGRIND_ALIGN_STACK \
3481 "std 2,-16(12)\n\t" /* save tocptr */ \
3482 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3483 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3484 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3485 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3486 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3487 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3488 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3489 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3490 "ld 12, 0(12)\n\t" /* target->r12 */ \
3491 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3494 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3495 VALGRIND_RESTORE_STACK \
3496 : /*out*/ "=r" (_res) \
3497 : /*in*/ "r" (&_argvec[2]) \
3498 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3500 lval = (__typeof__(lval)) _res; \
3503 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3506 volatile OrigFn _orig = (orig); \
3507 volatile unsigned long _argvec[3+8]; \
3508 volatile unsigned long _res; \
3509 /* _argvec[0] holds current r2 across the call */ \
3510 _argvec[1] = (unsigned long)_orig.r2; \
3511 _argvec[2] = (unsigned long)_orig.nraddr; \
3512 _argvec[2+1] = (unsigned long)arg1; \
3513 _argvec[2+2] = (unsigned long)arg2; \
3514 _argvec[2+3] = (unsigned long)arg3; \
3515 _argvec[2+4] = (unsigned long)arg4; \
3516 _argvec[2+5] = (unsigned long)arg5; \
3517 _argvec[2+6] = (unsigned long)arg6; \
3518 _argvec[2+7] = (unsigned long)arg7; \
3519 _argvec[2+8] = (unsigned long)arg8; \
3521 VALGRIND_ALIGN_STACK \
3523 "std 2,-16(12)\n\t" /* save tocptr */ \
3524 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3525 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3526 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3527 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3528 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3529 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3530 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3531 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3532 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3533 "ld 12, 0(12)\n\t" /* target->r12 */ \
3534 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3537 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3538 VALGRIND_RESTORE_STACK \
3539 : /*out*/ "=r" (_res) \
3540 : /*in*/ "r" (&_argvec[2]) \
3541 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3543 lval = (__typeof__(lval)) _res; \
3546 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3549 volatile OrigFn _orig = (orig); \
3550 volatile unsigned long _argvec[3+9]; \
3551 volatile unsigned long _res; \
3552 /* _argvec[0] holds current r2 across the call */ \
3553 _argvec[1] = (unsigned long)_orig.r2; \
3554 _argvec[2] = (unsigned long)_orig.nraddr; \
3555 _argvec[2+1] = (unsigned long)arg1; \
3556 _argvec[2+2] = (unsigned long)arg2; \
3557 _argvec[2+3] = (unsigned long)arg3; \
3558 _argvec[2+4] = (unsigned long)arg4; \
3559 _argvec[2+5] = (unsigned long)arg5; \
3560 _argvec[2+6] = (unsigned long)arg6; \
3561 _argvec[2+7] = (unsigned long)arg7; \
3562 _argvec[2+8] = (unsigned long)arg8; \
3563 _argvec[2+9] = (unsigned long)arg9; \
3565 VALGRIND_ALIGN_STACK \
3567 "std 2,-16(12)\n\t" /* save tocptr */ \
3568 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3569 "addi 1,1,-128\n\t" /* expand stack frame */ \
3574 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3575 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3576 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3577 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3578 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3579 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3580 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3581 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3582 "ld 12, 0(12)\n\t" /* target->r12 */ \
3583 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3586 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3587 VALGRIND_RESTORE_STACK \
3588 : /*out*/ "=r" (_res) \
3589 : /*in*/ "r" (&_argvec[2]) \
3590 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3592 lval = (__typeof__(lval)) _res; \
3595 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3596 arg7,arg8,arg9,arg10) \
3598 volatile OrigFn _orig = (orig); \
3599 volatile unsigned long _argvec[3+10]; \
3600 volatile unsigned long _res; \
3601 /* _argvec[0] holds current r2 across the call */ \
3602 _argvec[1] = (unsigned long)_orig.r2; \
3603 _argvec[2] = (unsigned long)_orig.nraddr; \
3604 _argvec[2+1] = (unsigned long)arg1; \
3605 _argvec[2+2] = (unsigned long)arg2; \
3606 _argvec[2+3] = (unsigned long)arg3; \
3607 _argvec[2+4] = (unsigned long)arg4; \
3608 _argvec[2+5] = (unsigned long)arg5; \
3609 _argvec[2+6] = (unsigned long)arg6; \
3610 _argvec[2+7] = (unsigned long)arg7; \
3611 _argvec[2+8] = (unsigned long)arg8; \
3612 _argvec[2+9] = (unsigned long)arg9; \
3613 _argvec[2+10] = (unsigned long)arg10; \
3615 VALGRIND_ALIGN_STACK \
3617 "std 2,-16(12)\n\t" /* save tocptr */ \
3618 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3619 "addi 1,1,-128\n\t" /* expand stack frame */ \
3622 "std 3,104(1)\n\t" \
3627 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3628 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3629 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3630 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3631 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3632 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3633 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3634 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3635 "ld 12, 0(12)\n\t" /* target->r12 */ \
3636 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3639 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3640 VALGRIND_RESTORE_STACK \
3641 : /*out*/ "=r" (_res) \
3642 : /*in*/ "r" (&_argvec[2]) \
3643 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3645 lval = (__typeof__(lval)) _res; \
3648 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3649 arg7,arg8,arg9,arg10,arg11) \
3651 volatile OrigFn _orig = (orig); \
3652 volatile unsigned long _argvec[3+11]; \
3653 volatile unsigned long _res; \
3654 /* _argvec[0] holds current r2 across the call */ \
3655 _argvec[1] = (unsigned long)_orig.r2; \
3656 _argvec[2] = (unsigned long)_orig.nraddr; \
3657 _argvec[2+1] = (unsigned long)arg1; \
3658 _argvec[2+2] = (unsigned long)arg2; \
3659 _argvec[2+3] = (unsigned long)arg3; \
3660 _argvec[2+4] = (unsigned long)arg4; \
3661 _argvec[2+5] = (unsigned long)arg5; \
3662 _argvec[2+6] = (unsigned long)arg6; \
3663 _argvec[2+7] = (unsigned long)arg7; \
3664 _argvec[2+8] = (unsigned long)arg8; \
3665 _argvec[2+9] = (unsigned long)arg9; \
3666 _argvec[2+10] = (unsigned long)arg10; \
3667 _argvec[2+11] = (unsigned long)arg11; \
3669 VALGRIND_ALIGN_STACK \
3671 "std 2,-16(12)\n\t" /* save tocptr */ \
3672 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3673 "addi 1,1,-144\n\t" /* expand stack frame */ \
3676 "std 3,112(1)\n\t" \
3679 "std 3,104(1)\n\t" \
3684 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3685 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3686 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3687 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3688 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3689 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3690 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3691 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3692 "ld 12, 0(12)\n\t" /* target->r12 */ \
3693 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3696 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3697 VALGRIND_RESTORE_STACK \
3698 : /*out*/ "=r" (_res) \
3699 : /*in*/ "r" (&_argvec[2]) \
3700 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3702 lval = (__typeof__(lval)) _res; \
3705 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3706 arg7,arg8,arg9,arg10,arg11,arg12) \
3708 volatile OrigFn _orig = (orig); \
3709 volatile unsigned long _argvec[3+12]; \
3710 volatile unsigned long _res; \
3711 /* _argvec[0] holds current r2 across the call */ \
3712 _argvec[1] = (unsigned long)_orig.r2; \
3713 _argvec[2] = (unsigned long)_orig.nraddr; \
3714 _argvec[2+1] = (unsigned long)arg1; \
3715 _argvec[2+2] = (unsigned long)arg2; \
3716 _argvec[2+3] = (unsigned long)arg3; \
3717 _argvec[2+4] = (unsigned long)arg4; \
3718 _argvec[2+5] = (unsigned long)arg5; \
3719 _argvec[2+6] = (unsigned long)arg6; \
3720 _argvec[2+7] = (unsigned long)arg7; \
3721 _argvec[2+8] = (unsigned long)arg8; \
3722 _argvec[2+9] = (unsigned long)arg9; \
3723 _argvec[2+10] = (unsigned long)arg10; \
3724 _argvec[2+11] = (unsigned long)arg11; \
3725 _argvec[2+12] = (unsigned long)arg12; \
3727 VALGRIND_ALIGN_STACK \
3729 "std 2,-16(12)\n\t" /* save tocptr */ \
3730 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3731 "addi 1,1,-144\n\t" /* expand stack frame */ \
3734 "std 3,120(1)\n\t" \
3737 "std 3,112(1)\n\t" \
3740 "std 3,104(1)\n\t" \
3745 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3746 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3747 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3748 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3749 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3750 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3751 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3752 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3753 "ld 12, 0(12)\n\t" /* target->r12 */ \
3754 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3757 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3758 VALGRIND_RESTORE_STACK \
3759 : /*out*/ "=r" (_res) \
3760 : /*in*/ "r" (&_argvec[2]) \
3761 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3763 lval = (__typeof__(lval)) _res; \
3766 #endif /* PLAT_ppc64le_linux */
3768 /* ------------------------- arm-linux ------------------------- */
3770 #if defined(PLAT_arm_linux)
3772 /* These regs are trashed by the hidden call. */
3773 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4", "r12", "r14"
3775 /* Macros to save and align the stack before making a function
3776 call and restore it afterwards as gcc may not keep the stack
3777 pointer aligned if it doesn't realise calls are being made
3778 to other functions. */
3780 /* This is a bit tricky. We store the original stack pointer in r10
3781 as it is callee-saves. gcc doesn't allow the use of r11 for some
3782 reason. Also, we can't directly "bic" the stack pointer in thumb
3783 mode since r13 isn't an allowed register number in that context.
3784 So use r4 as a temporary, since that is about to get trashed
3785 anyway, just after each use of this macro. Side effect is we need
3786 to be very careful about any future changes, since
3787 VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
3788 #define VALGRIND_ALIGN_STACK \
3791 "bic r4, r4, #7\n\t" \
3793 #define VALGRIND_RESTORE_STACK \
3796 /* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
3799 #define CALL_FN_W_v(lval, orig) \
3801 volatile OrigFn _orig = (orig); \
3802 volatile unsigned long _argvec[1]; \
3803 volatile unsigned long _res; \
3804 _argvec[0] = (unsigned long)_orig.nraddr; \
3806 VALGRIND_ALIGN_STACK \
3807 "ldr r4, [%1] \n\t" /* target->r4 */ \
3808 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3809 VALGRIND_RESTORE_STACK \
3811 : /*out*/ "=r" (_res) \
3812 : /*in*/ "0" (&_argvec[0]) \
3813 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3815 lval = (__typeof__(lval)) _res; \
3818 #define CALL_FN_W_W(lval, orig, arg1) \
3820 volatile OrigFn _orig = (orig); \
3821 volatile unsigned long _argvec[2]; \
3822 volatile unsigned long _res; \
3823 _argvec[0] = (unsigned long)_orig.nraddr; \
3824 _argvec[1] = (unsigned long)(arg1); \
3826 VALGRIND_ALIGN_STACK \
3827 "ldr r0, [%1, #4] \n\t" \
3828 "ldr r4, [%1] \n\t" /* target->r4 */ \
3829 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3830 VALGRIND_RESTORE_STACK \
3832 : /*out*/ "=r" (_res) \
3833 : /*in*/ "0" (&_argvec[0]) \
3834 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3836 lval = (__typeof__(lval)) _res; \
3839 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3841 volatile OrigFn _orig = (orig); \
3842 volatile unsigned long _argvec[3]; \
3843 volatile unsigned long _res; \
3844 _argvec[0] = (unsigned long)_orig.nraddr; \
3845 _argvec[1] = (unsigned long)(arg1); \
3846 _argvec[2] = (unsigned long)(arg2); \
3848 VALGRIND_ALIGN_STACK \
3849 "ldr r0, [%1, #4] \n\t" \
3850 "ldr r1, [%1, #8] \n\t" \
3851 "ldr r4, [%1] \n\t" /* target->r4 */ \
3852 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3853 VALGRIND_RESTORE_STACK \
3855 : /*out*/ "=r" (_res) \
3856 : /*in*/ "0" (&_argvec[0]) \
3857 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3859 lval = (__typeof__(lval)) _res; \
3862 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3864 volatile OrigFn _orig = (orig); \
3865 volatile unsigned long _argvec[4]; \
3866 volatile unsigned long _res; \
3867 _argvec[0] = (unsigned long)_orig.nraddr; \
3868 _argvec[1] = (unsigned long)(arg1); \
3869 _argvec[2] = (unsigned long)(arg2); \
3870 _argvec[3] = (unsigned long)(arg3); \
3872 VALGRIND_ALIGN_STACK \
3873 "ldr r0, [%1, #4] \n\t" \
3874 "ldr r1, [%1, #8] \n\t" \
3875 "ldr r2, [%1, #12] \n\t" \
3876 "ldr r4, [%1] \n\t" /* target->r4 */ \
3877 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3878 VALGRIND_RESTORE_STACK \
3880 : /*out*/ "=r" (_res) \
3881 : /*in*/ "0" (&_argvec[0]) \
3882 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3884 lval = (__typeof__(lval)) _res; \
3887 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3889 volatile OrigFn _orig = (orig); \
3890 volatile unsigned long _argvec[5]; \
3891 volatile unsigned long _res; \
3892 _argvec[0] = (unsigned long)_orig.nraddr; \
3893 _argvec[1] = (unsigned long)(arg1); \
3894 _argvec[2] = (unsigned long)(arg2); \
3895 _argvec[3] = (unsigned long)(arg3); \
3896 _argvec[4] = (unsigned long)(arg4); \
3898 VALGRIND_ALIGN_STACK \
3899 "ldr r0, [%1, #4] \n\t" \
3900 "ldr r1, [%1, #8] \n\t" \
3901 "ldr r2, [%1, #12] \n\t" \
3902 "ldr r3, [%1, #16] \n\t" \
3903 "ldr r4, [%1] \n\t" /* target->r4 */ \
3904 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3905 VALGRIND_RESTORE_STACK \
3907 : /*out*/ "=r" (_res) \
3908 : /*in*/ "0" (&_argvec[0]) \
3909 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3911 lval = (__typeof__(lval)) _res; \
3914 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3916 volatile OrigFn _orig = (orig); \
3917 volatile unsigned long _argvec[6]; \
3918 volatile unsigned long _res; \
3919 _argvec[0] = (unsigned long)_orig.nraddr; \
3920 _argvec[1] = (unsigned long)(arg1); \
3921 _argvec[2] = (unsigned long)(arg2); \
3922 _argvec[3] = (unsigned long)(arg3); \
3923 _argvec[4] = (unsigned long)(arg4); \
3924 _argvec[5] = (unsigned long)(arg5); \
3926 VALGRIND_ALIGN_STACK \
3927 "sub sp, sp, #4 \n\t" \
3928 "ldr r0, [%1, #20] \n\t" \
3930 "ldr r0, [%1, #4] \n\t" \
3931 "ldr r1, [%1, #8] \n\t" \
3932 "ldr r2, [%1, #12] \n\t" \
3933 "ldr r3, [%1, #16] \n\t" \
3934 "ldr r4, [%1] \n\t" /* target->r4 */ \
3935 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3936 VALGRIND_RESTORE_STACK \
3938 : /*out*/ "=r" (_res) \
3939 : /*in*/ "0" (&_argvec[0]) \
3940 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3942 lval = (__typeof__(lval)) _res; \
3945 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3947 volatile OrigFn _orig = (orig); \
3948 volatile unsigned long _argvec[7]; \
3949 volatile unsigned long _res; \
3950 _argvec[0] = (unsigned long)_orig.nraddr; \
3951 _argvec[1] = (unsigned long)(arg1); \
3952 _argvec[2] = (unsigned long)(arg2); \
3953 _argvec[3] = (unsigned long)(arg3); \
3954 _argvec[4] = (unsigned long)(arg4); \
3955 _argvec[5] = (unsigned long)(arg5); \
3956 _argvec[6] = (unsigned long)(arg6); \
3958 VALGRIND_ALIGN_STACK \
3959 "ldr r0, [%1, #20] \n\t" \
3960 "ldr r1, [%1, #24] \n\t" \
3961 "push {r0, r1} \n\t" \
3962 "ldr r0, [%1, #4] \n\t" \
3963 "ldr r1, [%1, #8] \n\t" \
3964 "ldr r2, [%1, #12] \n\t" \
3965 "ldr r3, [%1, #16] \n\t" \
3966 "ldr r4, [%1] \n\t" /* target->r4 */ \
3967 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3968 VALGRIND_RESTORE_STACK \
3970 : /*out*/ "=r" (_res) \
3971 : /*in*/ "0" (&_argvec[0]) \
3972 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3974 lval = (__typeof__(lval)) _res; \
3977 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3980 volatile OrigFn _orig = (orig); \
3981 volatile unsigned long _argvec[8]; \
3982 volatile unsigned long _res; \
3983 _argvec[0] = (unsigned long)_orig.nraddr; \
3984 _argvec[1] = (unsigned long)(arg1); \
3985 _argvec[2] = (unsigned long)(arg2); \
3986 _argvec[3] = (unsigned long)(arg3); \
3987 _argvec[4] = (unsigned long)(arg4); \
3988 _argvec[5] = (unsigned long)(arg5); \
3989 _argvec[6] = (unsigned long)(arg6); \
3990 _argvec[7] = (unsigned long)(arg7); \
3992 VALGRIND_ALIGN_STACK \
3993 "sub sp, sp, #4 \n\t" \
3994 "ldr r0, [%1, #20] \n\t" \
3995 "ldr r1, [%1, #24] \n\t" \
3996 "ldr r2, [%1, #28] \n\t" \
3997 "push {r0, r1, r2} \n\t" \
3998 "ldr r0, [%1, #4] \n\t" \
3999 "ldr r1, [%1, #8] \n\t" \
4000 "ldr r2, [%1, #12] \n\t" \
4001 "ldr r3, [%1, #16] \n\t" \
4002 "ldr r4, [%1] \n\t" /* target->r4 */ \
4003 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4004 VALGRIND_RESTORE_STACK \
4006 : /*out*/ "=r" (_res) \
4007 : /*in*/ "0" (&_argvec[0]) \
4008 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4010 lval = (__typeof__(lval)) _res; \
4013 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4016 volatile OrigFn _orig = (orig); \
4017 volatile unsigned long _argvec[9]; \
4018 volatile unsigned long _res; \
4019 _argvec[0] = (unsigned long)_orig.nraddr; \
4020 _argvec[1] = (unsigned long)(arg1); \
4021 _argvec[2] = (unsigned long)(arg2); \
4022 _argvec[3] = (unsigned long)(arg3); \
4023 _argvec[4] = (unsigned long)(arg4); \
4024 _argvec[5] = (unsigned long)(arg5); \
4025 _argvec[6] = (unsigned long)(arg6); \
4026 _argvec[7] = (unsigned long)(arg7); \
4027 _argvec[8] = (unsigned long)(arg8); \
4029 VALGRIND_ALIGN_STACK \
4030 "ldr r0, [%1, #20] \n\t" \
4031 "ldr r1, [%1, #24] \n\t" \
4032 "ldr r2, [%1, #28] \n\t" \
4033 "ldr r3, [%1, #32] \n\t" \
4034 "push {r0, r1, r2, r3} \n\t" \
4035 "ldr r0, [%1, #4] \n\t" \
4036 "ldr r1, [%1, #8] \n\t" \
4037 "ldr r2, [%1, #12] \n\t" \
4038 "ldr r3, [%1, #16] \n\t" \
4039 "ldr r4, [%1] \n\t" /* target->r4 */ \
4040 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4041 VALGRIND_RESTORE_STACK \
4043 : /*out*/ "=r" (_res) \
4044 : /*in*/ "0" (&_argvec[0]) \
4045 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4047 lval = (__typeof__(lval)) _res; \
4050 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4053 volatile OrigFn _orig = (orig); \
4054 volatile unsigned long _argvec[10]; \
4055 volatile unsigned long _res; \
4056 _argvec[0] = (unsigned long)_orig.nraddr; \
4057 _argvec[1] = (unsigned long)(arg1); \
4058 _argvec[2] = (unsigned long)(arg2); \
4059 _argvec[3] = (unsigned long)(arg3); \
4060 _argvec[4] = (unsigned long)(arg4); \
4061 _argvec[5] = (unsigned long)(arg5); \
4062 _argvec[6] = (unsigned long)(arg6); \
4063 _argvec[7] = (unsigned long)(arg7); \
4064 _argvec[8] = (unsigned long)(arg8); \
4065 _argvec[9] = (unsigned long)(arg9); \
4067 VALGRIND_ALIGN_STACK \
4068 "sub sp, sp, #4 \n\t" \
4069 "ldr r0, [%1, #20] \n\t" \
4070 "ldr r1, [%1, #24] \n\t" \
4071 "ldr r2, [%1, #28] \n\t" \
4072 "ldr r3, [%1, #32] \n\t" \
4073 "ldr r4, [%1, #36] \n\t" \
4074 "push {r0, r1, r2, r3, r4} \n\t" \
4075 "ldr r0, [%1, #4] \n\t" \
4076 "ldr r1, [%1, #8] \n\t" \
4077 "ldr r2, [%1, #12] \n\t" \
4078 "ldr r3, [%1, #16] \n\t" \
4079 "ldr r4, [%1] \n\t" /* target->r4 */ \
4080 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4081 VALGRIND_RESTORE_STACK \
4083 : /*out*/ "=r" (_res) \
4084 : /*in*/ "0" (&_argvec[0]) \
4085 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4087 lval = (__typeof__(lval)) _res; \
4090 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4091 arg7,arg8,arg9,arg10) \
4093 volatile OrigFn _orig = (orig); \
4094 volatile unsigned long _argvec[11]; \
4095 volatile unsigned long _res; \
4096 _argvec[0] = (unsigned long)_orig.nraddr; \
4097 _argvec[1] = (unsigned long)(arg1); \
4098 _argvec[2] = (unsigned long)(arg2); \
4099 _argvec[3] = (unsigned long)(arg3); \
4100 _argvec[4] = (unsigned long)(arg4); \
4101 _argvec[5] = (unsigned long)(arg5); \
4102 _argvec[6] = (unsigned long)(arg6); \
4103 _argvec[7] = (unsigned long)(arg7); \
4104 _argvec[8] = (unsigned long)(arg8); \
4105 _argvec[9] = (unsigned long)(arg9); \
4106 _argvec[10] = (unsigned long)(arg10); \
4108 VALGRIND_ALIGN_STACK \
4109 "ldr r0, [%1, #40] \n\t" \
4111 "ldr r0, [%1, #20] \n\t" \
4112 "ldr r1, [%1, #24] \n\t" \
4113 "ldr r2, [%1, #28] \n\t" \
4114 "ldr r3, [%1, #32] \n\t" \
4115 "ldr r4, [%1, #36] \n\t" \
4116 "push {r0, r1, r2, r3, r4} \n\t" \
4117 "ldr r0, [%1, #4] \n\t" \
4118 "ldr r1, [%1, #8] \n\t" \
4119 "ldr r2, [%1, #12] \n\t" \
4120 "ldr r3, [%1, #16] \n\t" \
4121 "ldr r4, [%1] \n\t" /* target->r4 */ \
4122 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4123 VALGRIND_RESTORE_STACK \
4125 : /*out*/ "=r" (_res) \
4126 : /*in*/ "0" (&_argvec[0]) \
4127 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4129 lval = (__typeof__(lval)) _res; \
4132 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4133 arg6,arg7,arg8,arg9,arg10, \
4136 volatile OrigFn _orig = (orig); \
4137 volatile unsigned long _argvec[12]; \
4138 volatile unsigned long _res; \
4139 _argvec[0] = (unsigned long)_orig.nraddr; \
4140 _argvec[1] = (unsigned long)(arg1); \
4141 _argvec[2] = (unsigned long)(arg2); \
4142 _argvec[3] = (unsigned long)(arg3); \
4143 _argvec[4] = (unsigned long)(arg4); \
4144 _argvec[5] = (unsigned long)(arg5); \
4145 _argvec[6] = (unsigned long)(arg6); \
4146 _argvec[7] = (unsigned long)(arg7); \
4147 _argvec[8] = (unsigned long)(arg8); \
4148 _argvec[9] = (unsigned long)(arg9); \
4149 _argvec[10] = (unsigned long)(arg10); \
4150 _argvec[11] = (unsigned long)(arg11); \
4152 VALGRIND_ALIGN_STACK \
4153 "sub sp, sp, #4 \n\t" \
4154 "ldr r0, [%1, #40] \n\t" \
4155 "ldr r1, [%1, #44] \n\t" \
4156 "push {r0, r1} \n\t" \
4157 "ldr r0, [%1, #20] \n\t" \
4158 "ldr r1, [%1, #24] \n\t" \
4159 "ldr r2, [%1, #28] \n\t" \
4160 "ldr r3, [%1, #32] \n\t" \
4161 "ldr r4, [%1, #36] \n\t" \
4162 "push {r0, r1, r2, r3, r4} \n\t" \
4163 "ldr r0, [%1, #4] \n\t" \
4164 "ldr r1, [%1, #8] \n\t" \
4165 "ldr r2, [%1, #12] \n\t" \
4166 "ldr r3, [%1, #16] \n\t" \
4167 "ldr r4, [%1] \n\t" /* target->r4 */ \
4168 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4169 VALGRIND_RESTORE_STACK \
4171 : /*out*/ "=r" (_res) \
4172 : /*in*/ "0" (&_argvec[0]) \
4173 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4175 lval = (__typeof__(lval)) _res; \
4178 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4179 arg6,arg7,arg8,arg9,arg10, \
4182 volatile OrigFn _orig = (orig); \
4183 volatile unsigned long _argvec[13]; \
4184 volatile unsigned long _res; \
4185 _argvec[0] = (unsigned long)_orig.nraddr; \
4186 _argvec[1] = (unsigned long)(arg1); \
4187 _argvec[2] = (unsigned long)(arg2); \
4188 _argvec[3] = (unsigned long)(arg3); \
4189 _argvec[4] = (unsigned long)(arg4); \
4190 _argvec[5] = (unsigned long)(arg5); \
4191 _argvec[6] = (unsigned long)(arg6); \
4192 _argvec[7] = (unsigned long)(arg7); \
4193 _argvec[8] = (unsigned long)(arg8); \
4194 _argvec[9] = (unsigned long)(arg9); \
4195 _argvec[10] = (unsigned long)(arg10); \
4196 _argvec[11] = (unsigned long)(arg11); \
4197 _argvec[12] = (unsigned long)(arg12); \
4199 VALGRIND_ALIGN_STACK \
4200 "ldr r0, [%1, #40] \n\t" \
4201 "ldr r1, [%1, #44] \n\t" \
4202 "ldr r2, [%1, #48] \n\t" \
4203 "push {r0, r1, r2} \n\t" \
4204 "ldr r0, [%1, #20] \n\t" \
4205 "ldr r1, [%1, #24] \n\t" \
4206 "ldr r2, [%1, #28] \n\t" \
4207 "ldr r3, [%1, #32] \n\t" \
4208 "ldr r4, [%1, #36] \n\t" \
4209 "push {r0, r1, r2, r3, r4} \n\t" \
4210 "ldr r0, [%1, #4] \n\t" \
4211 "ldr r1, [%1, #8] \n\t" \
4212 "ldr r2, [%1, #12] \n\t" \
4213 "ldr r3, [%1, #16] \n\t" \
4214 "ldr r4, [%1] \n\t" /* target->r4 */ \
4215 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4216 VALGRIND_RESTORE_STACK \
4218 : /*out*/ "=r" (_res) \
4219 : /*in*/ "0" (&_argvec[0]) \
4220 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4222 lval = (__typeof__(lval)) _res; \
4225 #endif /* PLAT_arm_linux */
4227 /* ------------------------ arm64-linux ------------------------ */
4229 #if defined(PLAT_arm64_linux)
4231 /* These regs are trashed by the hidden call. */
4232 #define __CALLER_SAVED_REGS \
4233 "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
4234 "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
4235 "x18", "x19", "x20", "x30", \
4236 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
4237 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
4238 "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
4239 "v26", "v27", "v28", "v29", "v30", "v31"
4241 /* x21 is callee-saved, so we can use it to save and restore SP around
4243 #define VALGRIND_ALIGN_STACK \
4245 "bic sp, x21, #15\n\t"
4246 #define VALGRIND_RESTORE_STACK \
4249 /* These CALL_FN_ macros assume that on arm64-linux,
4250 sizeof(unsigned long) == 8. */
4252 #define CALL_FN_W_v(lval, orig) \
4254 volatile OrigFn _orig = (orig); \
4255 volatile unsigned long _argvec[1]; \
4256 volatile unsigned long _res; \
4257 _argvec[0] = (unsigned long)_orig.nraddr; \
4259 VALGRIND_ALIGN_STACK \
4260 "ldr x8, [%1] \n\t" /* target->x8 */ \
4261 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4262 VALGRIND_RESTORE_STACK \
4264 : /*out*/ "=r" (_res) \
4265 : /*in*/ "0" (&_argvec[0]) \
4266 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4268 lval = (__typeof__(lval)) _res; \
4271 #define CALL_FN_W_W(lval, orig, arg1) \
4273 volatile OrigFn _orig = (orig); \
4274 volatile unsigned long _argvec[2]; \
4275 volatile unsigned long _res; \
4276 _argvec[0] = (unsigned long)_orig.nraddr; \
4277 _argvec[1] = (unsigned long)(arg1); \
4279 VALGRIND_ALIGN_STACK \
4280 "ldr x0, [%1, #8] \n\t" \
4281 "ldr x8, [%1] \n\t" /* target->x8 */ \
4282 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4283 VALGRIND_RESTORE_STACK \
4285 : /*out*/ "=r" (_res) \
4286 : /*in*/ "0" (&_argvec[0]) \
4287 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4289 lval = (__typeof__(lval)) _res; \
4292 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4294 volatile OrigFn _orig = (orig); \
4295 volatile unsigned long _argvec[3]; \
4296 volatile unsigned long _res; \
4297 _argvec[0] = (unsigned long)_orig.nraddr; \
4298 _argvec[1] = (unsigned long)(arg1); \
4299 _argvec[2] = (unsigned long)(arg2); \
4301 VALGRIND_ALIGN_STACK \
4302 "ldr x0, [%1, #8] \n\t" \
4303 "ldr x1, [%1, #16] \n\t" \
4304 "ldr x8, [%1] \n\t" /* target->x8 */ \
4305 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4306 VALGRIND_RESTORE_STACK \
4308 : /*out*/ "=r" (_res) \
4309 : /*in*/ "0" (&_argvec[0]) \
4310 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4312 lval = (__typeof__(lval)) _res; \
4315 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4317 volatile OrigFn _orig = (orig); \
4318 volatile unsigned long _argvec[4]; \
4319 volatile unsigned long _res; \
4320 _argvec[0] = (unsigned long)_orig.nraddr; \
4321 _argvec[1] = (unsigned long)(arg1); \
4322 _argvec[2] = (unsigned long)(arg2); \
4323 _argvec[3] = (unsigned long)(arg3); \
4325 VALGRIND_ALIGN_STACK \
4326 "ldr x0, [%1, #8] \n\t" \
4327 "ldr x1, [%1, #16] \n\t" \
4328 "ldr x2, [%1, #24] \n\t" \
4329 "ldr x8, [%1] \n\t" /* target->x8 */ \
4330 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4331 VALGRIND_RESTORE_STACK \
4333 : /*out*/ "=r" (_res) \
4334 : /*in*/ "0" (&_argvec[0]) \
4335 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4337 lval = (__typeof__(lval)) _res; \
4340 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4342 volatile OrigFn _orig = (orig); \
4343 volatile unsigned long _argvec[5]; \
4344 volatile unsigned long _res; \
4345 _argvec[0] = (unsigned long)_orig.nraddr; \
4346 _argvec[1] = (unsigned long)(arg1); \
4347 _argvec[2] = (unsigned long)(arg2); \
4348 _argvec[3] = (unsigned long)(arg3); \
4349 _argvec[4] = (unsigned long)(arg4); \
4351 VALGRIND_ALIGN_STACK \
4352 "ldr x0, [%1, #8] \n\t" \
4353 "ldr x1, [%1, #16] \n\t" \
4354 "ldr x2, [%1, #24] \n\t" \
4355 "ldr x3, [%1, #32] \n\t" \
4356 "ldr x8, [%1] \n\t" /* target->x8 */ \
4357 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4358 VALGRIND_RESTORE_STACK \
4360 : /*out*/ "=r" (_res) \
4361 : /*in*/ "0" (&_argvec[0]) \
4362 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4364 lval = (__typeof__(lval)) _res; \
4367 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4369 volatile OrigFn _orig = (orig); \
4370 volatile unsigned long _argvec[6]; \
4371 volatile unsigned long _res; \
4372 _argvec[0] = (unsigned long)_orig.nraddr; \
4373 _argvec[1] = (unsigned long)(arg1); \
4374 _argvec[2] = (unsigned long)(arg2); \
4375 _argvec[3] = (unsigned long)(arg3); \
4376 _argvec[4] = (unsigned long)(arg4); \
4377 _argvec[5] = (unsigned long)(arg5); \
4379 VALGRIND_ALIGN_STACK \
4380 "ldr x0, [%1, #8] \n\t" \
4381 "ldr x1, [%1, #16] \n\t" \
4382 "ldr x2, [%1, #24] \n\t" \
4383 "ldr x3, [%1, #32] \n\t" \
4384 "ldr x4, [%1, #40] \n\t" \
4385 "ldr x8, [%1] \n\t" /* target->x8 */ \
4386 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4387 VALGRIND_RESTORE_STACK \
4389 : /*out*/ "=r" (_res) \
4390 : /*in*/ "0" (&_argvec[0]) \
4391 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4393 lval = (__typeof__(lval)) _res; \
4396 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4398 volatile OrigFn _orig = (orig); \
4399 volatile unsigned long _argvec[7]; \
4400 volatile unsigned long _res; \
4401 _argvec[0] = (unsigned long)_orig.nraddr; \
4402 _argvec[1] = (unsigned long)(arg1); \
4403 _argvec[2] = (unsigned long)(arg2); \
4404 _argvec[3] = (unsigned long)(arg3); \
4405 _argvec[4] = (unsigned long)(arg4); \
4406 _argvec[5] = (unsigned long)(arg5); \
4407 _argvec[6] = (unsigned long)(arg6); \
4409 VALGRIND_ALIGN_STACK \
4410 "ldr x0, [%1, #8] \n\t" \
4411 "ldr x1, [%1, #16] \n\t" \
4412 "ldr x2, [%1, #24] \n\t" \
4413 "ldr x3, [%1, #32] \n\t" \
4414 "ldr x4, [%1, #40] \n\t" \
4415 "ldr x5, [%1, #48] \n\t" \
4416 "ldr x8, [%1] \n\t" /* target->x8 */ \
4417 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4418 VALGRIND_RESTORE_STACK \
4420 : /*out*/ "=r" (_res) \
4421 : /*in*/ "0" (&_argvec[0]) \
4422 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4424 lval = (__typeof__(lval)) _res; \
4427 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4430 volatile OrigFn _orig = (orig); \
4431 volatile unsigned long _argvec[8]; \
4432 volatile unsigned long _res; \
4433 _argvec[0] = (unsigned long)_orig.nraddr; \
4434 _argvec[1] = (unsigned long)(arg1); \
4435 _argvec[2] = (unsigned long)(arg2); \
4436 _argvec[3] = (unsigned long)(arg3); \
4437 _argvec[4] = (unsigned long)(arg4); \
4438 _argvec[5] = (unsigned long)(arg5); \
4439 _argvec[6] = (unsigned long)(arg6); \
4440 _argvec[7] = (unsigned long)(arg7); \
4442 VALGRIND_ALIGN_STACK \
4443 "ldr x0, [%1, #8] \n\t" \
4444 "ldr x1, [%1, #16] \n\t" \
4445 "ldr x2, [%1, #24] \n\t" \
4446 "ldr x3, [%1, #32] \n\t" \
4447 "ldr x4, [%1, #40] \n\t" \
4448 "ldr x5, [%1, #48] \n\t" \
4449 "ldr x6, [%1, #56] \n\t" \
4450 "ldr x8, [%1] \n\t" /* target->x8 */ \
4451 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4452 VALGRIND_RESTORE_STACK \
4454 : /*out*/ "=r" (_res) \
4455 : /*in*/ "0" (&_argvec[0]) \
4456 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4458 lval = (__typeof__(lval)) _res; \
4461 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4464 volatile OrigFn _orig = (orig); \
4465 volatile unsigned long _argvec[9]; \
4466 volatile unsigned long _res; \
4467 _argvec[0] = (unsigned long)_orig.nraddr; \
4468 _argvec[1] = (unsigned long)(arg1); \
4469 _argvec[2] = (unsigned long)(arg2); \
4470 _argvec[3] = (unsigned long)(arg3); \
4471 _argvec[4] = (unsigned long)(arg4); \
4472 _argvec[5] = (unsigned long)(arg5); \
4473 _argvec[6] = (unsigned long)(arg6); \
4474 _argvec[7] = (unsigned long)(arg7); \
4475 _argvec[8] = (unsigned long)(arg8); \
4477 VALGRIND_ALIGN_STACK \
4478 "ldr x0, [%1, #8] \n\t" \
4479 "ldr x1, [%1, #16] \n\t" \
4480 "ldr x2, [%1, #24] \n\t" \
4481 "ldr x3, [%1, #32] \n\t" \
4482 "ldr x4, [%1, #40] \n\t" \
4483 "ldr x5, [%1, #48] \n\t" \
4484 "ldr x6, [%1, #56] \n\t" \
4485 "ldr x7, [%1, #64] \n\t" \
4486 "ldr x8, [%1] \n\t" /* target->x8 */ \
4487 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4488 VALGRIND_RESTORE_STACK \
4490 : /*out*/ "=r" (_res) \
4491 : /*in*/ "0" (&_argvec[0]) \
4492 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4494 lval = (__typeof__(lval)) _res; \
4497 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4500 volatile OrigFn _orig = (orig); \
4501 volatile unsigned long _argvec[10]; \
4502 volatile unsigned long _res; \
4503 _argvec[0] = (unsigned long)_orig.nraddr; \
4504 _argvec[1] = (unsigned long)(arg1); \
4505 _argvec[2] = (unsigned long)(arg2); \
4506 _argvec[3] = (unsigned long)(arg3); \
4507 _argvec[4] = (unsigned long)(arg4); \
4508 _argvec[5] = (unsigned long)(arg5); \
4509 _argvec[6] = (unsigned long)(arg6); \
4510 _argvec[7] = (unsigned long)(arg7); \
4511 _argvec[8] = (unsigned long)(arg8); \
4512 _argvec[9] = (unsigned long)(arg9); \
4514 VALGRIND_ALIGN_STACK \
4515 "sub sp, sp, #0x20 \n\t" \
4516 "ldr x0, [%1, #8] \n\t" \
4517 "ldr x1, [%1, #16] \n\t" \
4518 "ldr x2, [%1, #24] \n\t" \
4519 "ldr x3, [%1, #32] \n\t" \
4520 "ldr x4, [%1, #40] \n\t" \
4521 "ldr x5, [%1, #48] \n\t" \
4522 "ldr x6, [%1, #56] \n\t" \
4523 "ldr x7, [%1, #64] \n\t" \
4524 "ldr x8, [%1, #72] \n\t" \
4525 "str x8, [sp, #0] \n\t" \
4526 "ldr x8, [%1] \n\t" /* target->x8 */ \
4527 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4528 VALGRIND_RESTORE_STACK \
4530 : /*out*/ "=r" (_res) \
4531 : /*in*/ "0" (&_argvec[0]) \
4532 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4534 lval = (__typeof__(lval)) _res; \
4537 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4538 arg7,arg8,arg9,arg10) \
4540 volatile OrigFn _orig = (orig); \
4541 volatile unsigned long _argvec[11]; \
4542 volatile unsigned long _res; \
4543 _argvec[0] = (unsigned long)_orig.nraddr; \
4544 _argvec[1] = (unsigned long)(arg1); \
4545 _argvec[2] = (unsigned long)(arg2); \
4546 _argvec[3] = (unsigned long)(arg3); \
4547 _argvec[4] = (unsigned long)(arg4); \
4548 _argvec[5] = (unsigned long)(arg5); \
4549 _argvec[6] = (unsigned long)(arg6); \
4550 _argvec[7] = (unsigned long)(arg7); \
4551 _argvec[8] = (unsigned long)(arg8); \
4552 _argvec[9] = (unsigned long)(arg9); \
4553 _argvec[10] = (unsigned long)(arg10); \
4555 VALGRIND_ALIGN_STACK \
4556 "sub sp, sp, #0x20 \n\t" \
4557 "ldr x0, [%1, #8] \n\t" \
4558 "ldr x1, [%1, #16] \n\t" \
4559 "ldr x2, [%1, #24] \n\t" \
4560 "ldr x3, [%1, #32] \n\t" \
4561 "ldr x4, [%1, #40] \n\t" \
4562 "ldr x5, [%1, #48] \n\t" \
4563 "ldr x6, [%1, #56] \n\t" \
4564 "ldr x7, [%1, #64] \n\t" \
4565 "ldr x8, [%1, #72] \n\t" \
4566 "str x8, [sp, #0] \n\t" \
4567 "ldr x8, [%1, #80] \n\t" \
4568 "str x8, [sp, #8] \n\t" \
4569 "ldr x8, [%1] \n\t" /* target->x8 */ \
4570 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4571 VALGRIND_RESTORE_STACK \
4573 : /*out*/ "=r" (_res) \
4574 : /*in*/ "0" (&_argvec[0]) \
4575 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4577 lval = (__typeof__(lval)) _res; \
4580 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4581 arg7,arg8,arg9,arg10,arg11) \
4583 volatile OrigFn _orig = (orig); \
4584 volatile unsigned long _argvec[12]; \
4585 volatile unsigned long _res; \
4586 _argvec[0] = (unsigned long)_orig.nraddr; \
4587 _argvec[1] = (unsigned long)(arg1); \
4588 _argvec[2] = (unsigned long)(arg2); \
4589 _argvec[3] = (unsigned long)(arg3); \
4590 _argvec[4] = (unsigned long)(arg4); \
4591 _argvec[5] = (unsigned long)(arg5); \
4592 _argvec[6] = (unsigned long)(arg6); \
4593 _argvec[7] = (unsigned long)(arg7); \
4594 _argvec[8] = (unsigned long)(arg8); \
4595 _argvec[9] = (unsigned long)(arg9); \
4596 _argvec[10] = (unsigned long)(arg10); \
4597 _argvec[11] = (unsigned long)(arg11); \
4599 VALGRIND_ALIGN_STACK \
4600 "sub sp, sp, #0x30 \n\t" \
4601 "ldr x0, [%1, #8] \n\t" \
4602 "ldr x1, [%1, #16] \n\t" \
4603 "ldr x2, [%1, #24] \n\t" \
4604 "ldr x3, [%1, #32] \n\t" \
4605 "ldr x4, [%1, #40] \n\t" \
4606 "ldr x5, [%1, #48] \n\t" \
4607 "ldr x6, [%1, #56] \n\t" \
4608 "ldr x7, [%1, #64] \n\t" \
4609 "ldr x8, [%1, #72] \n\t" \
4610 "str x8, [sp, #0] \n\t" \
4611 "ldr x8, [%1, #80] \n\t" \
4612 "str x8, [sp, #8] \n\t" \
4613 "ldr x8, [%1, #88] \n\t" \
4614 "str x8, [sp, #16] \n\t" \
4615 "ldr x8, [%1] \n\t" /* target->x8 */ \
4616 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4617 VALGRIND_RESTORE_STACK \
4619 : /*out*/ "=r" (_res) \
4620 : /*in*/ "0" (&_argvec[0]) \
4621 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4623 lval = (__typeof__(lval)) _res; \
4626 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4627 arg7,arg8,arg9,arg10,arg11, \
4630 volatile OrigFn _orig = (orig); \
4631 volatile unsigned long _argvec[13]; \
4632 volatile unsigned long _res; \
4633 _argvec[0] = (unsigned long)_orig.nraddr; \
4634 _argvec[1] = (unsigned long)(arg1); \
4635 _argvec[2] = (unsigned long)(arg2); \
4636 _argvec[3] = (unsigned long)(arg3); \
4637 _argvec[4] = (unsigned long)(arg4); \
4638 _argvec[5] = (unsigned long)(arg5); \
4639 _argvec[6] = (unsigned long)(arg6); \
4640 _argvec[7] = (unsigned long)(arg7); \
4641 _argvec[8] = (unsigned long)(arg8); \
4642 _argvec[9] = (unsigned long)(arg9); \
4643 _argvec[10] = (unsigned long)(arg10); \
4644 _argvec[11] = (unsigned long)(arg11); \
4645 _argvec[12] = (unsigned long)(arg12); \
4647 VALGRIND_ALIGN_STACK \
4648 "sub sp, sp, #0x30 \n\t" \
4649 "ldr x0, [%1, #8] \n\t" \
4650 "ldr x1, [%1, #16] \n\t" \
4651 "ldr x2, [%1, #24] \n\t" \
4652 "ldr x3, [%1, #32] \n\t" \
4653 "ldr x4, [%1, #40] \n\t" \
4654 "ldr x5, [%1, #48] \n\t" \
4655 "ldr x6, [%1, #56] \n\t" \
4656 "ldr x7, [%1, #64] \n\t" \
4657 "ldr x8, [%1, #72] \n\t" \
4658 "str x8, [sp, #0] \n\t" \
4659 "ldr x8, [%1, #80] \n\t" \
4660 "str x8, [sp, #8] \n\t" \
4661 "ldr x8, [%1, #88] \n\t" \
4662 "str x8, [sp, #16] \n\t" \
4663 "ldr x8, [%1, #96] \n\t" \
4664 "str x8, [sp, #24] \n\t" \
4665 "ldr x8, [%1] \n\t" /* target->x8 */ \
4666 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4667 VALGRIND_RESTORE_STACK \
4669 : /*out*/ "=r" (_res) \
4670 : /*in*/ "0" (&_argvec[0]) \
4671 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4673 lval = (__typeof__(lval)) _res; \
4676 #endif /* PLAT_arm64_linux */
4678 /* ------------------------- s390x-linux ------------------------- */
4680 #if defined(PLAT_s390x_linux)
4682 /* Similar workaround as amd64 (see above), but we use r11 as frame
4683 pointer and save the old r11 in r7. r11 might be used for
4684 argvec, therefore we copy argvec in r1 since r1 is clobbered
4685 after the call anyway. */
4686 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
4687 # define __FRAME_POINTER \
4688 ,"d"(__builtin_dwarf_cfa())
4689 # define VALGRIND_CFI_PROLOGUE \
4690 ".cfi_remember_state\n\t" \
4691 "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
4694 ".cfi_def_cfa r11, 0\n\t"
4695 # define VALGRIND_CFI_EPILOGUE \
4697 ".cfi_restore_state\n\t"
4699 # define __FRAME_POINTER
4700 # define VALGRIND_CFI_PROLOGUE \
4702 # define VALGRIND_CFI_EPILOGUE
4705 /* Nb: On s390 the stack pointer is properly aligned *at all times*
4706 according to the s390 GCC maintainer. (The ABI specification is not
4707 precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
4708 VALGRIND_RESTORE_STACK are not defined here. */
4710 /* These regs are trashed by the hidden call. Note that we overwrite
4711 r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
4712 function a proper return address. All others are ABI defined call
4714 #if defined(__VX__) || defined(__S390_VX__)
4715 #define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", \
4716 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", \
4717 "v8", "v9", "v10", "v11", "v12", "v13", "v14", "v15", \
4718 "v16", "v17", "v18", "v19", "v20", "v21", "v22", "v23", \
4719 "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31"
4721 #define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", \
4722 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7"
4725 /* Nb: Although r11 is modified in the asm snippets below (inside
4726 VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
4728 (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
4730 (2) GCC will complain that r11 cannot appear inside a clobber section,
4731 when compiled with -O -fno-omit-frame-pointer
4734 #define CALL_FN_W_v(lval, orig) \
4736 volatile OrigFn _orig = (orig); \
4737 volatile unsigned long _argvec[1]; \
4738 volatile unsigned long _res; \
4739 _argvec[0] = (unsigned long)_orig.nraddr; \
4741 VALGRIND_CFI_PROLOGUE \
4742 "aghi 15,-160\n\t" \
4743 "lg 1, 0(1)\n\t" /* target->r1 */ \
4744 VALGRIND_CALL_NOREDIR_R1 \
4746 VALGRIND_CFI_EPILOGUE \
4748 : /*out*/ "=d" (_res) \
4749 : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
4750 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4752 lval = (__typeof__(lval)) _res; \
4755 /* The call abi has the arguments in r2-r6 and stack */
4756 #define CALL_FN_W_W(lval, orig, arg1) \
4758 volatile OrigFn _orig = (orig); \
4759 volatile unsigned long _argvec[2]; \
4760 volatile unsigned long _res; \
4761 _argvec[0] = (unsigned long)_orig.nraddr; \
4762 _argvec[1] = (unsigned long)arg1; \
4764 VALGRIND_CFI_PROLOGUE \
4765 "aghi 15,-160\n\t" \
4768 VALGRIND_CALL_NOREDIR_R1 \
4770 VALGRIND_CFI_EPILOGUE \
4772 : /*out*/ "=d" (_res) \
4773 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4774 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4776 lval = (__typeof__(lval)) _res; \
4779 #define CALL_FN_W_WW(lval, orig, arg1, arg2) \
4781 volatile OrigFn _orig = (orig); \
4782 volatile unsigned long _argvec[3]; \
4783 volatile unsigned long _res; \
4784 _argvec[0] = (unsigned long)_orig.nraddr; \
4785 _argvec[1] = (unsigned long)arg1; \
4786 _argvec[2] = (unsigned long)arg2; \
4788 VALGRIND_CFI_PROLOGUE \
4789 "aghi 15,-160\n\t" \
4793 VALGRIND_CALL_NOREDIR_R1 \
4795 VALGRIND_CFI_EPILOGUE \
4797 : /*out*/ "=d" (_res) \
4798 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4799 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4801 lval = (__typeof__(lval)) _res; \
4804 #define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
4806 volatile OrigFn _orig = (orig); \
4807 volatile unsigned long _argvec[4]; \
4808 volatile unsigned long _res; \
4809 _argvec[0] = (unsigned long)_orig.nraddr; \
4810 _argvec[1] = (unsigned long)arg1; \
4811 _argvec[2] = (unsigned long)arg2; \
4812 _argvec[3] = (unsigned long)arg3; \
4814 VALGRIND_CFI_PROLOGUE \
4815 "aghi 15,-160\n\t" \
4820 VALGRIND_CALL_NOREDIR_R1 \
4822 VALGRIND_CFI_EPILOGUE \
4824 : /*out*/ "=d" (_res) \
4825 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4826 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4828 lval = (__typeof__(lval)) _res; \
4831 #define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
4833 volatile OrigFn _orig = (orig); \
4834 volatile unsigned long _argvec[5]; \
4835 volatile unsigned long _res; \
4836 _argvec[0] = (unsigned long)_orig.nraddr; \
4837 _argvec[1] = (unsigned long)arg1; \
4838 _argvec[2] = (unsigned long)arg2; \
4839 _argvec[3] = (unsigned long)arg3; \
4840 _argvec[4] = (unsigned long)arg4; \
4842 VALGRIND_CFI_PROLOGUE \
4843 "aghi 15,-160\n\t" \
4849 VALGRIND_CALL_NOREDIR_R1 \
4851 VALGRIND_CFI_EPILOGUE \
4853 : /*out*/ "=d" (_res) \
4854 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4855 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4857 lval = (__typeof__(lval)) _res; \
4860 #define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
4862 volatile OrigFn _orig = (orig); \
4863 volatile unsigned long _argvec[6]; \
4864 volatile unsigned long _res; \
4865 _argvec[0] = (unsigned long)_orig.nraddr; \
4866 _argvec[1] = (unsigned long)arg1; \
4867 _argvec[2] = (unsigned long)arg2; \
4868 _argvec[3] = (unsigned long)arg3; \
4869 _argvec[4] = (unsigned long)arg4; \
4870 _argvec[5] = (unsigned long)arg5; \
4872 VALGRIND_CFI_PROLOGUE \
4873 "aghi 15,-160\n\t" \
4880 VALGRIND_CALL_NOREDIR_R1 \
4882 VALGRIND_CFI_EPILOGUE \
4884 : /*out*/ "=d" (_res) \
4885 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4886 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4888 lval = (__typeof__(lval)) _res; \
4891 #define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4894 volatile OrigFn _orig = (orig); \
4895 volatile unsigned long _argvec[7]; \
4896 volatile unsigned long _res; \
4897 _argvec[0] = (unsigned long)_orig.nraddr; \
4898 _argvec[1] = (unsigned long)arg1; \
4899 _argvec[2] = (unsigned long)arg2; \
4900 _argvec[3] = (unsigned long)arg3; \
4901 _argvec[4] = (unsigned long)arg4; \
4902 _argvec[5] = (unsigned long)arg5; \
4903 _argvec[6] = (unsigned long)arg6; \
4905 VALGRIND_CFI_PROLOGUE \
4906 "aghi 15,-168\n\t" \
4912 "mvc 160(8,15), 48(1)\n\t" \
4914 VALGRIND_CALL_NOREDIR_R1 \
4916 VALGRIND_CFI_EPILOGUE \
4918 : /*out*/ "=d" (_res) \
4919 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4920 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4922 lval = (__typeof__(lval)) _res; \
4925 #define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4928 volatile OrigFn _orig = (orig); \
4929 volatile unsigned long _argvec[8]; \
4930 volatile unsigned long _res; \
4931 _argvec[0] = (unsigned long)_orig.nraddr; \
4932 _argvec[1] = (unsigned long)arg1; \
4933 _argvec[2] = (unsigned long)arg2; \
4934 _argvec[3] = (unsigned long)arg3; \
4935 _argvec[4] = (unsigned long)arg4; \
4936 _argvec[5] = (unsigned long)arg5; \
4937 _argvec[6] = (unsigned long)arg6; \
4938 _argvec[7] = (unsigned long)arg7; \
4940 VALGRIND_CFI_PROLOGUE \
4941 "aghi 15,-176\n\t" \
4947 "mvc 160(8,15), 48(1)\n\t" \
4948 "mvc 168(8,15), 56(1)\n\t" \
4950 VALGRIND_CALL_NOREDIR_R1 \
4952 VALGRIND_CFI_EPILOGUE \
4954 : /*out*/ "=d" (_res) \
4955 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4956 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4958 lval = (__typeof__(lval)) _res; \
4961 #define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4964 volatile OrigFn _orig = (orig); \
4965 volatile unsigned long _argvec[9]; \
4966 volatile unsigned long _res; \
4967 _argvec[0] = (unsigned long)_orig.nraddr; \
4968 _argvec[1] = (unsigned long)arg1; \
4969 _argvec[2] = (unsigned long)arg2; \
4970 _argvec[3] = (unsigned long)arg3; \
4971 _argvec[4] = (unsigned long)arg4; \
4972 _argvec[5] = (unsigned long)arg5; \
4973 _argvec[6] = (unsigned long)arg6; \
4974 _argvec[7] = (unsigned long)arg7; \
4975 _argvec[8] = (unsigned long)arg8; \
4977 VALGRIND_CFI_PROLOGUE \
4978 "aghi 15,-184\n\t" \
4984 "mvc 160(8,15), 48(1)\n\t" \
4985 "mvc 168(8,15), 56(1)\n\t" \
4986 "mvc 176(8,15), 64(1)\n\t" \
4988 VALGRIND_CALL_NOREDIR_R1 \
4990 VALGRIND_CFI_EPILOGUE \
4992 : /*out*/ "=d" (_res) \
4993 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4994 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4996 lval = (__typeof__(lval)) _res; \
4999 #define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5000 arg6, arg7 ,arg8, arg9) \
5002 volatile OrigFn _orig = (orig); \
5003 volatile unsigned long _argvec[10]; \
5004 volatile unsigned long _res; \
5005 _argvec[0] = (unsigned long)_orig.nraddr; \
5006 _argvec[1] = (unsigned long)arg1; \
5007 _argvec[2] = (unsigned long)arg2; \
5008 _argvec[3] = (unsigned long)arg3; \
5009 _argvec[4] = (unsigned long)arg4; \
5010 _argvec[5] = (unsigned long)arg5; \
5011 _argvec[6] = (unsigned long)arg6; \
5012 _argvec[7] = (unsigned long)arg7; \
5013 _argvec[8] = (unsigned long)arg8; \
5014 _argvec[9] = (unsigned long)arg9; \
5016 VALGRIND_CFI_PROLOGUE \
5017 "aghi 15,-192\n\t" \
5023 "mvc 160(8,15), 48(1)\n\t" \
5024 "mvc 168(8,15), 56(1)\n\t" \
5025 "mvc 176(8,15), 64(1)\n\t" \
5026 "mvc 184(8,15), 72(1)\n\t" \
5028 VALGRIND_CALL_NOREDIR_R1 \
5030 VALGRIND_CFI_EPILOGUE \
5032 : /*out*/ "=d" (_res) \
5033 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5034 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5036 lval = (__typeof__(lval)) _res; \
5039 #define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5040 arg6, arg7 ,arg8, arg9, arg10) \
5042 volatile OrigFn _orig = (orig); \
5043 volatile unsigned long _argvec[11]; \
5044 volatile unsigned long _res; \
5045 _argvec[0] = (unsigned long)_orig.nraddr; \
5046 _argvec[1] = (unsigned long)arg1; \
5047 _argvec[2] = (unsigned long)arg2; \
5048 _argvec[3] = (unsigned long)arg3; \
5049 _argvec[4] = (unsigned long)arg4; \
5050 _argvec[5] = (unsigned long)arg5; \
5051 _argvec[6] = (unsigned long)arg6; \
5052 _argvec[7] = (unsigned long)arg7; \
5053 _argvec[8] = (unsigned long)arg8; \
5054 _argvec[9] = (unsigned long)arg9; \
5055 _argvec[10] = (unsigned long)arg10; \
5057 VALGRIND_CFI_PROLOGUE \
5058 "aghi 15,-200\n\t" \
5064 "mvc 160(8,15), 48(1)\n\t" \
5065 "mvc 168(8,15), 56(1)\n\t" \
5066 "mvc 176(8,15), 64(1)\n\t" \
5067 "mvc 184(8,15), 72(1)\n\t" \
5068 "mvc 192(8,15), 80(1)\n\t" \
5070 VALGRIND_CALL_NOREDIR_R1 \
5072 VALGRIND_CFI_EPILOGUE \
5074 : /*out*/ "=d" (_res) \
5075 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5076 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5078 lval = (__typeof__(lval)) _res; \
5081 #define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5082 arg6, arg7 ,arg8, arg9, arg10, arg11) \
5084 volatile OrigFn _orig = (orig); \
5085 volatile unsigned long _argvec[12]; \
5086 volatile unsigned long _res; \
5087 _argvec[0] = (unsigned long)_orig.nraddr; \
5088 _argvec[1] = (unsigned long)arg1; \
5089 _argvec[2] = (unsigned long)arg2; \
5090 _argvec[3] = (unsigned long)arg3; \
5091 _argvec[4] = (unsigned long)arg4; \
5092 _argvec[5] = (unsigned long)arg5; \
5093 _argvec[6] = (unsigned long)arg6; \
5094 _argvec[7] = (unsigned long)arg7; \
5095 _argvec[8] = (unsigned long)arg8; \
5096 _argvec[9] = (unsigned long)arg9; \
5097 _argvec[10] = (unsigned long)arg10; \
5098 _argvec[11] = (unsigned long)arg11; \
5100 VALGRIND_CFI_PROLOGUE \
5101 "aghi 15,-208\n\t" \
5107 "mvc 160(8,15), 48(1)\n\t" \
5108 "mvc 168(8,15), 56(1)\n\t" \
5109 "mvc 176(8,15), 64(1)\n\t" \
5110 "mvc 184(8,15), 72(1)\n\t" \
5111 "mvc 192(8,15), 80(1)\n\t" \
5112 "mvc 200(8,15), 88(1)\n\t" \
5114 VALGRIND_CALL_NOREDIR_R1 \
5116 VALGRIND_CFI_EPILOGUE \
5118 : /*out*/ "=d" (_res) \
5119 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5120 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5122 lval = (__typeof__(lval)) _res; \
5125 #define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5126 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
5128 volatile OrigFn _orig = (orig); \
5129 volatile unsigned long _argvec[13]; \
5130 volatile unsigned long _res; \
5131 _argvec[0] = (unsigned long)_orig.nraddr; \
5132 _argvec[1] = (unsigned long)arg1; \
5133 _argvec[2] = (unsigned long)arg2; \
5134 _argvec[3] = (unsigned long)arg3; \
5135 _argvec[4] = (unsigned long)arg4; \
5136 _argvec[5] = (unsigned long)arg5; \
5137 _argvec[6] = (unsigned long)arg6; \
5138 _argvec[7] = (unsigned long)arg7; \
5139 _argvec[8] = (unsigned long)arg8; \
5140 _argvec[9] = (unsigned long)arg9; \
5141 _argvec[10] = (unsigned long)arg10; \
5142 _argvec[11] = (unsigned long)arg11; \
5143 _argvec[12] = (unsigned long)arg12; \
5145 VALGRIND_CFI_PROLOGUE \
5146 "aghi 15,-216\n\t" \
5152 "mvc 160(8,15), 48(1)\n\t" \
5153 "mvc 168(8,15), 56(1)\n\t" \
5154 "mvc 176(8,15), 64(1)\n\t" \
5155 "mvc 184(8,15), 72(1)\n\t" \
5156 "mvc 192(8,15), 80(1)\n\t" \
5157 "mvc 200(8,15), 88(1)\n\t" \
5158 "mvc 208(8,15), 96(1)\n\t" \
5160 VALGRIND_CALL_NOREDIR_R1 \
5162 VALGRIND_CFI_EPILOGUE \
5164 : /*out*/ "=d" (_res) \
5165 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5166 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5168 lval = (__typeof__(lval)) _res; \
5172 #endif /* PLAT_s390x_linux */
5174 /* ------------------------- mips32-linux ----------------------- */
5176 #if defined(PLAT_mips32_linux)
5178 /* These regs are trashed by the hidden call. */
5179 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5180 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5183 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5186 #define CALL_FN_W_v(lval, orig) \
5188 volatile OrigFn _orig = (orig); \
5189 volatile unsigned long _argvec[1]; \
5190 volatile unsigned long _res; \
5191 _argvec[0] = (unsigned long)_orig.nraddr; \
5193 "subu $29, $29, 8 \n\t" \
5194 "sw $28, 0($29) \n\t" \
5195 "sw $31, 4($29) \n\t" \
5196 "subu $29, $29, 16 \n\t" \
5197 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5198 VALGRIND_CALL_NOREDIR_T9 \
5199 "addu $29, $29, 16\n\t" \
5200 "lw $28, 0($29) \n\t" \
5201 "lw $31, 4($29) \n\t" \
5202 "addu $29, $29, 8 \n\t" \
5204 : /*out*/ "=r" (_res) \
5205 : /*in*/ "0" (&_argvec[0]) \
5206 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5208 lval = (__typeof__(lval)) _res; \
5211 #define CALL_FN_W_W(lval, orig, arg1) \
5213 volatile OrigFn _orig = (orig); \
5214 volatile unsigned long _argvec[2]; \
5215 volatile unsigned long _res; \
5216 _argvec[0] = (unsigned long)_orig.nraddr; \
5217 _argvec[1] = (unsigned long)(arg1); \
5219 "subu $29, $29, 8 \n\t" \
5220 "sw $28, 0($29) \n\t" \
5221 "sw $31, 4($29) \n\t" \
5222 "subu $29, $29, 16 \n\t" \
5223 "lw $4, 4(%1) \n\t" /* arg1*/ \
5224 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5225 VALGRIND_CALL_NOREDIR_T9 \
5226 "addu $29, $29, 16 \n\t" \
5227 "lw $28, 0($29) \n\t" \
5228 "lw $31, 4($29) \n\t" \
5229 "addu $29, $29, 8 \n\t" \
5231 : /*out*/ "=r" (_res) \
5232 : /*in*/ "0" (&_argvec[0]) \
5233 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5235 lval = (__typeof__(lval)) _res; \
5238 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5240 volatile OrigFn _orig = (orig); \
5241 volatile unsigned long _argvec[3]; \
5242 volatile unsigned long _res; \
5243 _argvec[0] = (unsigned long)_orig.nraddr; \
5244 _argvec[1] = (unsigned long)(arg1); \
5245 _argvec[2] = (unsigned long)(arg2); \
5247 "subu $29, $29, 8 \n\t" \
5248 "sw $28, 0($29) \n\t" \
5249 "sw $31, 4($29) \n\t" \
5250 "subu $29, $29, 16 \n\t" \
5251 "lw $4, 4(%1) \n\t" \
5252 "lw $5, 8(%1) \n\t" \
5253 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5254 VALGRIND_CALL_NOREDIR_T9 \
5255 "addu $29, $29, 16 \n\t" \
5256 "lw $28, 0($29) \n\t" \
5257 "lw $31, 4($29) \n\t" \
5258 "addu $29, $29, 8 \n\t" \
5260 : /*out*/ "=r" (_res) \
5261 : /*in*/ "0" (&_argvec[0]) \
5262 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5264 lval = (__typeof__(lval)) _res; \
5267 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5269 volatile OrigFn _orig = (orig); \
5270 volatile unsigned long _argvec[4]; \
5271 volatile unsigned long _res; \
5272 _argvec[0] = (unsigned long)_orig.nraddr; \
5273 _argvec[1] = (unsigned long)(arg1); \
5274 _argvec[2] = (unsigned long)(arg2); \
5275 _argvec[3] = (unsigned long)(arg3); \
5277 "subu $29, $29, 8 \n\t" \
5278 "sw $28, 0($29) \n\t" \
5279 "sw $31, 4($29) \n\t" \
5280 "subu $29, $29, 16 \n\t" \
5281 "lw $4, 4(%1) \n\t" \
5282 "lw $5, 8(%1) \n\t" \
5283 "lw $6, 12(%1) \n\t" \
5284 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5285 VALGRIND_CALL_NOREDIR_T9 \
5286 "addu $29, $29, 16 \n\t" \
5287 "lw $28, 0($29) \n\t" \
5288 "lw $31, 4($29) \n\t" \
5289 "addu $29, $29, 8 \n\t" \
5291 : /*out*/ "=r" (_res) \
5292 : /*in*/ "0" (&_argvec[0]) \
5293 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5295 lval = (__typeof__(lval)) _res; \
5298 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5300 volatile OrigFn _orig = (orig); \
5301 volatile unsigned long _argvec[5]; \
5302 volatile unsigned long _res; \
5303 _argvec[0] = (unsigned long)_orig.nraddr; \
5304 _argvec[1] = (unsigned long)(arg1); \
5305 _argvec[2] = (unsigned long)(arg2); \
5306 _argvec[3] = (unsigned long)(arg3); \
5307 _argvec[4] = (unsigned long)(arg4); \
5309 "subu $29, $29, 8 \n\t" \
5310 "sw $28, 0($29) \n\t" \
5311 "sw $31, 4($29) \n\t" \
5312 "subu $29, $29, 16 \n\t" \
5313 "lw $4, 4(%1) \n\t" \
5314 "lw $5, 8(%1) \n\t" \
5315 "lw $6, 12(%1) \n\t" \
5316 "lw $7, 16(%1) \n\t" \
5317 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5318 VALGRIND_CALL_NOREDIR_T9 \
5319 "addu $29, $29, 16 \n\t" \
5320 "lw $28, 0($29) \n\t" \
5321 "lw $31, 4($29) \n\t" \
5322 "addu $29, $29, 8 \n\t" \
5324 : /*out*/ "=r" (_res) \
5325 : /*in*/ "0" (&_argvec[0]) \
5326 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5328 lval = (__typeof__(lval)) _res; \
5331 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5333 volatile OrigFn _orig = (orig); \
5334 volatile unsigned long _argvec[6]; \
5335 volatile unsigned long _res; \
5336 _argvec[0] = (unsigned long)_orig.nraddr; \
5337 _argvec[1] = (unsigned long)(arg1); \
5338 _argvec[2] = (unsigned long)(arg2); \
5339 _argvec[3] = (unsigned long)(arg3); \
5340 _argvec[4] = (unsigned long)(arg4); \
5341 _argvec[5] = (unsigned long)(arg5); \
5343 "subu $29, $29, 8 \n\t" \
5344 "sw $28, 0($29) \n\t" \
5345 "sw $31, 4($29) \n\t" \
5346 "lw $4, 20(%1) \n\t" \
5347 "subu $29, $29, 24\n\t" \
5348 "sw $4, 16($29) \n\t" \
5349 "lw $4, 4(%1) \n\t" \
5350 "lw $5, 8(%1) \n\t" \
5351 "lw $6, 12(%1) \n\t" \
5352 "lw $7, 16(%1) \n\t" \
5353 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5354 VALGRIND_CALL_NOREDIR_T9 \
5355 "addu $29, $29, 24 \n\t" \
5356 "lw $28, 0($29) \n\t" \
5357 "lw $31, 4($29) \n\t" \
5358 "addu $29, $29, 8 \n\t" \
5360 : /*out*/ "=r" (_res) \
5361 : /*in*/ "0" (&_argvec[0]) \
5362 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5364 lval = (__typeof__(lval)) _res; \
5366 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5368 volatile OrigFn _orig = (orig); \
5369 volatile unsigned long _argvec[7]; \
5370 volatile unsigned long _res; \
5371 _argvec[0] = (unsigned long)_orig.nraddr; \
5372 _argvec[1] = (unsigned long)(arg1); \
5373 _argvec[2] = (unsigned long)(arg2); \
5374 _argvec[3] = (unsigned long)(arg3); \
5375 _argvec[4] = (unsigned long)(arg4); \
5376 _argvec[5] = (unsigned long)(arg5); \
5377 _argvec[6] = (unsigned long)(arg6); \
5379 "subu $29, $29, 8 \n\t" \
5380 "sw $28, 0($29) \n\t" \
5381 "sw $31, 4($29) \n\t" \
5382 "lw $4, 20(%1) \n\t" \
5383 "subu $29, $29, 32\n\t" \
5384 "sw $4, 16($29) \n\t" \
5385 "lw $4, 24(%1) \n\t" \
5387 "sw $4, 20($29) \n\t" \
5388 "lw $4, 4(%1) \n\t" \
5389 "lw $5, 8(%1) \n\t" \
5390 "lw $6, 12(%1) \n\t" \
5391 "lw $7, 16(%1) \n\t" \
5392 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5393 VALGRIND_CALL_NOREDIR_T9 \
5394 "addu $29, $29, 32 \n\t" \
5395 "lw $28, 0($29) \n\t" \
5396 "lw $31, 4($29) \n\t" \
5397 "addu $29, $29, 8 \n\t" \
5399 : /*out*/ "=r" (_res) \
5400 : /*in*/ "0" (&_argvec[0]) \
5401 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5403 lval = (__typeof__(lval)) _res; \
5406 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5409 volatile OrigFn _orig = (orig); \
5410 volatile unsigned long _argvec[8]; \
5411 volatile unsigned long _res; \
5412 _argvec[0] = (unsigned long)_orig.nraddr; \
5413 _argvec[1] = (unsigned long)(arg1); \
5414 _argvec[2] = (unsigned long)(arg2); \
5415 _argvec[3] = (unsigned long)(arg3); \
5416 _argvec[4] = (unsigned long)(arg4); \
5417 _argvec[5] = (unsigned long)(arg5); \
5418 _argvec[6] = (unsigned long)(arg6); \
5419 _argvec[7] = (unsigned long)(arg7); \
5421 "subu $29, $29, 8 \n\t" \
5422 "sw $28, 0($29) \n\t" \
5423 "sw $31, 4($29) \n\t" \
5424 "lw $4, 20(%1) \n\t" \
5425 "subu $29, $29, 32\n\t" \
5426 "sw $4, 16($29) \n\t" \
5427 "lw $4, 24(%1) \n\t" \
5428 "sw $4, 20($29) \n\t" \
5429 "lw $4, 28(%1) \n\t" \
5430 "sw $4, 24($29) \n\t" \
5431 "lw $4, 4(%1) \n\t" \
5432 "lw $5, 8(%1) \n\t" \
5433 "lw $6, 12(%1) \n\t" \
5434 "lw $7, 16(%1) \n\t" \
5435 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5436 VALGRIND_CALL_NOREDIR_T9 \
5437 "addu $29, $29, 32 \n\t" \
5438 "lw $28, 0($29) \n\t" \
5439 "lw $31, 4($29) \n\t" \
5440 "addu $29, $29, 8 \n\t" \
5442 : /*out*/ "=r" (_res) \
5443 : /*in*/ "0" (&_argvec[0]) \
5444 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5446 lval = (__typeof__(lval)) _res; \
5449 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5452 volatile OrigFn _orig = (orig); \
5453 volatile unsigned long _argvec[9]; \
5454 volatile unsigned long _res; \
5455 _argvec[0] = (unsigned long)_orig.nraddr; \
5456 _argvec[1] = (unsigned long)(arg1); \
5457 _argvec[2] = (unsigned long)(arg2); \
5458 _argvec[3] = (unsigned long)(arg3); \
5459 _argvec[4] = (unsigned long)(arg4); \
5460 _argvec[5] = (unsigned long)(arg5); \
5461 _argvec[6] = (unsigned long)(arg6); \
5462 _argvec[7] = (unsigned long)(arg7); \
5463 _argvec[8] = (unsigned long)(arg8); \
5465 "subu $29, $29, 8 \n\t" \
5466 "sw $28, 0($29) \n\t" \
5467 "sw $31, 4($29) \n\t" \
5468 "lw $4, 20(%1) \n\t" \
5469 "subu $29, $29, 40\n\t" \
5470 "sw $4, 16($29) \n\t" \
5471 "lw $4, 24(%1) \n\t" \
5472 "sw $4, 20($29) \n\t" \
5473 "lw $4, 28(%1) \n\t" \
5474 "sw $4, 24($29) \n\t" \
5475 "lw $4, 32(%1) \n\t" \
5476 "sw $4, 28($29) \n\t" \
5477 "lw $4, 4(%1) \n\t" \
5478 "lw $5, 8(%1) \n\t" \
5479 "lw $6, 12(%1) \n\t" \
5480 "lw $7, 16(%1) \n\t" \
5481 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5482 VALGRIND_CALL_NOREDIR_T9 \
5483 "addu $29, $29, 40 \n\t" \
5484 "lw $28, 0($29) \n\t" \
5485 "lw $31, 4($29) \n\t" \
5486 "addu $29, $29, 8 \n\t" \
5488 : /*out*/ "=r" (_res) \
5489 : /*in*/ "0" (&_argvec[0]) \
5490 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5492 lval = (__typeof__(lval)) _res; \
5495 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5498 volatile OrigFn _orig = (orig); \
5499 volatile unsigned long _argvec[10]; \
5500 volatile unsigned long _res; \
5501 _argvec[0] = (unsigned long)_orig.nraddr; \
5502 _argvec[1] = (unsigned long)(arg1); \
5503 _argvec[2] = (unsigned long)(arg2); \
5504 _argvec[3] = (unsigned long)(arg3); \
5505 _argvec[4] = (unsigned long)(arg4); \
5506 _argvec[5] = (unsigned long)(arg5); \
5507 _argvec[6] = (unsigned long)(arg6); \
5508 _argvec[7] = (unsigned long)(arg7); \
5509 _argvec[8] = (unsigned long)(arg8); \
5510 _argvec[9] = (unsigned long)(arg9); \
5512 "subu $29, $29, 8 \n\t" \
5513 "sw $28, 0($29) \n\t" \
5514 "sw $31, 4($29) \n\t" \
5515 "lw $4, 20(%1) \n\t" \
5516 "subu $29, $29, 40\n\t" \
5517 "sw $4, 16($29) \n\t" \
5518 "lw $4, 24(%1) \n\t" \
5519 "sw $4, 20($29) \n\t" \
5520 "lw $4, 28(%1) \n\t" \
5521 "sw $4, 24($29) \n\t" \
5522 "lw $4, 32(%1) \n\t" \
5523 "sw $4, 28($29) \n\t" \
5524 "lw $4, 36(%1) \n\t" \
5525 "sw $4, 32($29) \n\t" \
5526 "lw $4, 4(%1) \n\t" \
5527 "lw $5, 8(%1) \n\t" \
5528 "lw $6, 12(%1) \n\t" \
5529 "lw $7, 16(%1) \n\t" \
5530 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5531 VALGRIND_CALL_NOREDIR_T9 \
5532 "addu $29, $29, 40 \n\t" \
5533 "lw $28, 0($29) \n\t" \
5534 "lw $31, 4($29) \n\t" \
5535 "addu $29, $29, 8 \n\t" \
5537 : /*out*/ "=r" (_res) \
5538 : /*in*/ "0" (&_argvec[0]) \
5539 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5541 lval = (__typeof__(lval)) _res; \
5544 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5545 arg7,arg8,arg9,arg10) \
5547 volatile OrigFn _orig = (orig); \
5548 volatile unsigned long _argvec[11]; \
5549 volatile unsigned long _res; \
5550 _argvec[0] = (unsigned long)_orig.nraddr; \
5551 _argvec[1] = (unsigned long)(arg1); \
5552 _argvec[2] = (unsigned long)(arg2); \
5553 _argvec[3] = (unsigned long)(arg3); \
5554 _argvec[4] = (unsigned long)(arg4); \
5555 _argvec[5] = (unsigned long)(arg5); \
5556 _argvec[6] = (unsigned long)(arg6); \
5557 _argvec[7] = (unsigned long)(arg7); \
5558 _argvec[8] = (unsigned long)(arg8); \
5559 _argvec[9] = (unsigned long)(arg9); \
5560 _argvec[10] = (unsigned long)(arg10); \
5562 "subu $29, $29, 8 \n\t" \
5563 "sw $28, 0($29) \n\t" \
5564 "sw $31, 4($29) \n\t" \
5565 "lw $4, 20(%1) \n\t" \
5566 "subu $29, $29, 48\n\t" \
5567 "sw $4, 16($29) \n\t" \
5568 "lw $4, 24(%1) \n\t" \
5569 "sw $4, 20($29) \n\t" \
5570 "lw $4, 28(%1) \n\t" \
5571 "sw $4, 24($29) \n\t" \
5572 "lw $4, 32(%1) \n\t" \
5573 "sw $4, 28($29) \n\t" \
5574 "lw $4, 36(%1) \n\t" \
5575 "sw $4, 32($29) \n\t" \
5576 "lw $4, 40(%1) \n\t" \
5577 "sw $4, 36($29) \n\t" \
5578 "lw $4, 4(%1) \n\t" \
5579 "lw $5, 8(%1) \n\t" \
5580 "lw $6, 12(%1) \n\t" \
5581 "lw $7, 16(%1) \n\t" \
5582 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5583 VALGRIND_CALL_NOREDIR_T9 \
5584 "addu $29, $29, 48 \n\t" \
5585 "lw $28, 0($29) \n\t" \
5586 "lw $31, 4($29) \n\t" \
5587 "addu $29, $29, 8 \n\t" \
5589 : /*out*/ "=r" (_res) \
5590 : /*in*/ "0" (&_argvec[0]) \
5591 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5593 lval = (__typeof__(lval)) _res; \
5596 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5597 arg6,arg7,arg8,arg9,arg10, \
5600 volatile OrigFn _orig = (orig); \
5601 volatile unsigned long _argvec[12]; \
5602 volatile unsigned long _res; \
5603 _argvec[0] = (unsigned long)_orig.nraddr; \
5604 _argvec[1] = (unsigned long)(arg1); \
5605 _argvec[2] = (unsigned long)(arg2); \
5606 _argvec[3] = (unsigned long)(arg3); \
5607 _argvec[4] = (unsigned long)(arg4); \
5608 _argvec[5] = (unsigned long)(arg5); \
5609 _argvec[6] = (unsigned long)(arg6); \
5610 _argvec[7] = (unsigned long)(arg7); \
5611 _argvec[8] = (unsigned long)(arg8); \
5612 _argvec[9] = (unsigned long)(arg9); \
5613 _argvec[10] = (unsigned long)(arg10); \
5614 _argvec[11] = (unsigned long)(arg11); \
5616 "subu $29, $29, 8 \n\t" \
5617 "sw $28, 0($29) \n\t" \
5618 "sw $31, 4($29) \n\t" \
5619 "lw $4, 20(%1) \n\t" \
5620 "subu $29, $29, 48\n\t" \
5621 "sw $4, 16($29) \n\t" \
5622 "lw $4, 24(%1) \n\t" \
5623 "sw $4, 20($29) \n\t" \
5624 "lw $4, 28(%1) \n\t" \
5625 "sw $4, 24($29) \n\t" \
5626 "lw $4, 32(%1) \n\t" \
5627 "sw $4, 28($29) \n\t" \
5628 "lw $4, 36(%1) \n\t" \
5629 "sw $4, 32($29) \n\t" \
5630 "lw $4, 40(%1) \n\t" \
5631 "sw $4, 36($29) \n\t" \
5632 "lw $4, 44(%1) \n\t" \
5633 "sw $4, 40($29) \n\t" \
5634 "lw $4, 4(%1) \n\t" \
5635 "lw $5, 8(%1) \n\t" \
5636 "lw $6, 12(%1) \n\t" \
5637 "lw $7, 16(%1) \n\t" \
5638 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5639 VALGRIND_CALL_NOREDIR_T9 \
5640 "addu $29, $29, 48 \n\t" \
5641 "lw $28, 0($29) \n\t" \
5642 "lw $31, 4($29) \n\t" \
5643 "addu $29, $29, 8 \n\t" \
5645 : /*out*/ "=r" (_res) \
5646 : /*in*/ "0" (&_argvec[0]) \
5647 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5649 lval = (__typeof__(lval)) _res; \
5652 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5653 arg6,arg7,arg8,arg9,arg10, \
5656 volatile OrigFn _orig = (orig); \
5657 volatile unsigned long _argvec[13]; \
5658 volatile unsigned long _res; \
5659 _argvec[0] = (unsigned long)_orig.nraddr; \
5660 _argvec[1] = (unsigned long)(arg1); \
5661 _argvec[2] = (unsigned long)(arg2); \
5662 _argvec[3] = (unsigned long)(arg3); \
5663 _argvec[4] = (unsigned long)(arg4); \
5664 _argvec[5] = (unsigned long)(arg5); \
5665 _argvec[6] = (unsigned long)(arg6); \
5666 _argvec[7] = (unsigned long)(arg7); \
5667 _argvec[8] = (unsigned long)(arg8); \
5668 _argvec[9] = (unsigned long)(arg9); \
5669 _argvec[10] = (unsigned long)(arg10); \
5670 _argvec[11] = (unsigned long)(arg11); \
5671 _argvec[12] = (unsigned long)(arg12); \
5673 "subu $29, $29, 8 \n\t" \
5674 "sw $28, 0($29) \n\t" \
5675 "sw $31, 4($29) \n\t" \
5676 "lw $4, 20(%1) \n\t" \
5677 "subu $29, $29, 56\n\t" \
5678 "sw $4, 16($29) \n\t" \
5679 "lw $4, 24(%1) \n\t" \
5680 "sw $4, 20($29) \n\t" \
5681 "lw $4, 28(%1) \n\t" \
5682 "sw $4, 24($29) \n\t" \
5683 "lw $4, 32(%1) \n\t" \
5684 "sw $4, 28($29) \n\t" \
5685 "lw $4, 36(%1) \n\t" \
5686 "sw $4, 32($29) \n\t" \
5687 "lw $4, 40(%1) \n\t" \
5688 "sw $4, 36($29) \n\t" \
5689 "lw $4, 44(%1) \n\t" \
5690 "sw $4, 40($29) \n\t" \
5691 "lw $4, 48(%1) \n\t" \
5692 "sw $4, 44($29) \n\t" \
5693 "lw $4, 4(%1) \n\t" \
5694 "lw $5, 8(%1) \n\t" \
5695 "lw $6, 12(%1) \n\t" \
5696 "lw $7, 16(%1) \n\t" \
5697 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5698 VALGRIND_CALL_NOREDIR_T9 \
5699 "addu $29, $29, 56 \n\t" \
5700 "lw $28, 0($29) \n\t" \
5701 "lw $31, 4($29) \n\t" \
5702 "addu $29, $29, 8 \n\t" \
5704 : /*out*/ "=r" (_res) \
5705 : /*in*/ "r" (&_argvec[0]) \
5706 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5708 lval = (__typeof__(lval)) _res; \
5711 #endif /* PLAT_mips32_linux */
5713 /* ------------------------- nanomips-linux -------------------- */
5715 #if defined(PLAT_nanomips_linux)
5717 /* These regs are trashed by the hidden call. */
5718 #define __CALLER_SAVED_REGS "$t4", "$t5", "$a0", "$a1", "$a2", \
5719 "$a3", "$a4", "$a5", "$a6", "$a7", "$t0", "$t1", "$t2", "$t3", \
5722 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5725 #define CALL_FN_W_v(lval, orig) \
5727 volatile OrigFn _orig = (orig); \
5728 volatile unsigned long _argvec[1]; \
5729 volatile unsigned long _res; \
5730 _argvec[0] = (unsigned long)_orig.nraddr; \
5732 "lw $t9, 0(%1)\n\t" \
5733 VALGRIND_CALL_NOREDIR_T9 \
5735 : /*out*/ "=r" (_res) \
5736 : /*in*/ "r" (&_argvec[0]) \
5737 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5739 lval = (__typeof__(lval)) _res; \
5742 #define CALL_FN_W_W(lval, orig, arg1) \
5744 volatile OrigFn _orig = (orig); \
5745 volatile unsigned long _argvec[2]; \
5746 volatile unsigned long _res; \
5747 _argvec[0] = (unsigned long)_orig.nraddr; \
5748 _argvec[1] = (unsigned long)(arg1); \
5750 "lw $t9, 0(%1)\n\t" \
5751 "lw $a0, 4(%1)\n\t" \
5752 VALGRIND_CALL_NOREDIR_T9 \
5754 : /*out*/ "=r" (_res) \
5755 : /*in*/ "r" (&_argvec[0]) \
5756 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5758 lval = (__typeof__(lval)) _res; \
5761 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5763 volatile OrigFn _orig = (orig); \
5764 volatile unsigned long _argvec[3]; \
5765 volatile unsigned long _res; \
5766 _argvec[0] = (unsigned long)_orig.nraddr; \
5767 _argvec[1] = (unsigned long)(arg1); \
5768 _argvec[2] = (unsigned long)(arg2); \
5770 "lw $t9, 0(%1)\n\t" \
5771 "lw $a0, 4(%1)\n\t" \
5772 "lw $a1, 8(%1)\n\t" \
5773 VALGRIND_CALL_NOREDIR_T9 \
5775 : /*out*/ "=r" (_res) \
5776 : /*in*/ "r" (&_argvec[0]) \
5777 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5779 lval = (__typeof__(lval)) _res; \
5782 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5784 volatile OrigFn _orig = (orig); \
5785 volatile unsigned long _argvec[4]; \
5786 volatile unsigned long _res; \
5787 _argvec[0] = (unsigned long)_orig.nraddr; \
5788 _argvec[1] = (unsigned long)(arg1); \
5789 _argvec[2] = (unsigned long)(arg2); \
5790 _argvec[3] = (unsigned long)(arg3); \
5792 "lw $t9, 0(%1)\n\t" \
5793 "lw $a0, 4(%1)\n\t" \
5794 "lw $a1, 8(%1)\n\t" \
5795 "lw $a2,12(%1)\n\t" \
5796 VALGRIND_CALL_NOREDIR_T9 \
5798 : /*out*/ "=r" (_res) \
5799 : /*in*/ "r" (&_argvec[0]) \
5800 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5802 lval = (__typeof__(lval)) _res; \
5805 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5807 volatile OrigFn _orig = (orig); \
5808 volatile unsigned long _argvec[5]; \
5809 volatile unsigned long _res; \
5810 _argvec[0] = (unsigned long)_orig.nraddr; \
5811 _argvec[1] = (unsigned long)(arg1); \
5812 _argvec[2] = (unsigned long)(arg2); \
5813 _argvec[3] = (unsigned long)(arg3); \
5814 _argvec[4] = (unsigned long)(arg4); \
5816 "lw $t9, 0(%1)\n\t" \
5817 "lw $a0, 4(%1)\n\t" \
5818 "lw $a1, 8(%1)\n\t" \
5819 "lw $a2,12(%1)\n\t" \
5820 "lw $a3,16(%1)\n\t" \
5821 VALGRIND_CALL_NOREDIR_T9 \
5823 : /*out*/ "=r" (_res) \
5824 : /*in*/ "r" (&_argvec[0]) \
5825 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5827 lval = (__typeof__(lval)) _res; \
5830 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5832 volatile OrigFn _orig = (orig); \
5833 volatile unsigned long _argvec[6]; \
5834 volatile unsigned long _res; \
5835 _argvec[0] = (unsigned long)_orig.nraddr; \
5836 _argvec[1] = (unsigned long)(arg1); \
5837 _argvec[2] = (unsigned long)(arg2); \
5838 _argvec[3] = (unsigned long)(arg3); \
5839 _argvec[4] = (unsigned long)(arg4); \
5840 _argvec[5] = (unsigned long)(arg5); \
5842 "lw $t9, 0(%1)\n\t" \
5843 "lw $a0, 4(%1)\n\t" \
5844 "lw $a1, 8(%1)\n\t" \
5845 "lw $a2,12(%1)\n\t" \
5846 "lw $a3,16(%1)\n\t" \
5847 "lw $a4,20(%1)\n\t" \
5848 VALGRIND_CALL_NOREDIR_T9 \
5850 : /*out*/ "=r" (_res) \
5851 : /*in*/ "r" (&_argvec[0]) \
5852 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5854 lval = (__typeof__(lval)) _res; \
5856 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5858 volatile OrigFn _orig = (orig); \
5859 volatile unsigned long _argvec[7]; \
5860 volatile unsigned long _res; \
5861 _argvec[0] = (unsigned long)_orig.nraddr; \
5862 _argvec[1] = (unsigned long)(arg1); \
5863 _argvec[2] = (unsigned long)(arg2); \
5864 _argvec[3] = (unsigned long)(arg3); \
5865 _argvec[4] = (unsigned long)(arg4); \
5866 _argvec[5] = (unsigned long)(arg5); \
5867 _argvec[6] = (unsigned long)(arg6); \
5869 "lw $t9, 0(%1)\n\t" \
5870 "lw $a0, 4(%1)\n\t" \
5871 "lw $a1, 8(%1)\n\t" \
5872 "lw $a2,12(%1)\n\t" \
5873 "lw $a3,16(%1)\n\t" \
5874 "lw $a4,20(%1)\n\t" \
5875 "lw $a5,24(%1)\n\t" \
5876 VALGRIND_CALL_NOREDIR_T9 \
5878 : /*out*/ "=r" (_res) \
5879 : /*in*/ "r" (&_argvec[0]) \
5880 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5882 lval = (__typeof__(lval)) _res; \
5885 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5888 volatile OrigFn _orig = (orig); \
5889 volatile unsigned long _argvec[8]; \
5890 volatile unsigned long _res; \
5891 _argvec[0] = (unsigned long)_orig.nraddr; \
5892 _argvec[1] = (unsigned long)(arg1); \
5893 _argvec[2] = (unsigned long)(arg2); \
5894 _argvec[3] = (unsigned long)(arg3); \
5895 _argvec[4] = (unsigned long)(arg4); \
5896 _argvec[5] = (unsigned long)(arg5); \
5897 _argvec[6] = (unsigned long)(arg6); \
5898 _argvec[7] = (unsigned long)(arg7); \
5900 "lw $t9, 0(%1)\n\t" \
5901 "lw $a0, 4(%1)\n\t" \
5902 "lw $a1, 8(%1)\n\t" \
5903 "lw $a2,12(%1)\n\t" \
5904 "lw $a3,16(%1)\n\t" \
5905 "lw $a4,20(%1)\n\t" \
5906 "lw $a5,24(%1)\n\t" \
5907 "lw $a6,28(%1)\n\t" \
5908 VALGRIND_CALL_NOREDIR_T9 \
5910 : /*out*/ "=r" (_res) \
5911 : /*in*/ "r" (&_argvec[0]) \
5912 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5914 lval = (__typeof__(lval)) _res; \
5917 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5920 volatile OrigFn _orig = (orig); \
5921 volatile unsigned long _argvec[9]; \
5922 volatile unsigned long _res; \
5923 _argvec[0] = (unsigned long)_orig.nraddr; \
5924 _argvec[1] = (unsigned long)(arg1); \
5925 _argvec[2] = (unsigned long)(arg2); \
5926 _argvec[3] = (unsigned long)(arg3); \
5927 _argvec[4] = (unsigned long)(arg4); \
5928 _argvec[5] = (unsigned long)(arg5); \
5929 _argvec[6] = (unsigned long)(arg6); \
5930 _argvec[7] = (unsigned long)(arg7); \
5931 _argvec[8] = (unsigned long)(arg8); \
5933 "lw $t9, 0(%1)\n\t" \
5934 "lw $a0, 4(%1)\n\t" \
5935 "lw $a1, 8(%1)\n\t" \
5936 "lw $a2,12(%1)\n\t" \
5937 "lw $a3,16(%1)\n\t" \
5938 "lw $a4,20(%1)\n\t" \
5939 "lw $a5,24(%1)\n\t" \
5940 "lw $a6,28(%1)\n\t" \
5941 "lw $a7,32(%1)\n\t" \
5942 VALGRIND_CALL_NOREDIR_T9 \
5944 : /*out*/ "=r" (_res) \
5945 : /*in*/ "r" (&_argvec[0]) \
5946 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5948 lval = (__typeof__(lval)) _res; \
5951 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5954 volatile OrigFn _orig = (orig); \
5955 volatile unsigned long _argvec[10]; \
5956 volatile unsigned long _res; \
5957 _argvec[0] = (unsigned long)_orig.nraddr; \
5958 _argvec[1] = (unsigned long)(arg1); \
5959 _argvec[2] = (unsigned long)(arg2); \
5960 _argvec[3] = (unsigned long)(arg3); \
5961 _argvec[4] = (unsigned long)(arg4); \
5962 _argvec[5] = (unsigned long)(arg5); \
5963 _argvec[6] = (unsigned long)(arg6); \
5964 _argvec[7] = (unsigned long)(arg7); \
5965 _argvec[8] = (unsigned long)(arg8); \
5966 _argvec[9] = (unsigned long)(arg9); \
5968 "addiu $sp, $sp, -16 \n\t" \
5969 "lw $t9,36(%1) \n\t" \
5970 "sw $t9, 0($sp) \n\t" \
5971 "lw $t9, 0(%1) \n\t" \
5972 "lw $a0, 4(%1) \n\t" \
5973 "lw $a1, 8(%1) \n\t" \
5974 "lw $a2,12(%1) \n\t" \
5975 "lw $a3,16(%1) \n\t" \
5976 "lw $a4,20(%1) \n\t" \
5977 "lw $a5,24(%1) \n\t" \
5978 "lw $a6,28(%1) \n\t" \
5979 "lw $a7,32(%1) \n\t" \
5980 VALGRIND_CALL_NOREDIR_T9 \
5981 "move %0, $a0 \n\t" \
5982 "addiu $sp, $sp, 16 \n\t" \
5983 : /*out*/ "=r" (_res) \
5984 : /*in*/ "r" (&_argvec[0]) \
5985 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5987 lval = (__typeof__(lval)) _res; \
5990 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5991 arg7,arg8,arg9,arg10) \
5993 volatile OrigFn _orig = (orig); \
5994 volatile unsigned long _argvec[11]; \
5995 volatile unsigned long _res; \
5996 _argvec[0] = (unsigned long)_orig.nraddr; \
5997 _argvec[1] = (unsigned long)(arg1); \
5998 _argvec[2] = (unsigned long)(arg2); \
5999 _argvec[3] = (unsigned long)(arg3); \
6000 _argvec[4] = (unsigned long)(arg4); \
6001 _argvec[5] = (unsigned long)(arg5); \
6002 _argvec[6] = (unsigned long)(arg6); \
6003 _argvec[7] = (unsigned long)(arg7); \
6004 _argvec[8] = (unsigned long)(arg8); \
6005 _argvec[9] = (unsigned long)(arg9); \
6006 _argvec[10] = (unsigned long)(arg10); \
6008 "addiu $sp, $sp, -16 \n\t" \
6009 "lw $t9,36(%1) \n\t" \
6010 "sw $t9, 0($sp) \n\t" \
6011 "lw $t9,40(%1) \n\t" \
6012 "sw $t9, 4($sp) \n\t" \
6013 "lw $t9, 0(%1) \n\t" \
6014 "lw $a0, 4(%1) \n\t" \
6015 "lw $a1, 8(%1) \n\t" \
6016 "lw $a2,12(%1) \n\t" \
6017 "lw $a3,16(%1) \n\t" \
6018 "lw $a4,20(%1) \n\t" \
6019 "lw $a5,24(%1) \n\t" \
6020 "lw $a6,28(%1) \n\t" \
6021 "lw $a7,32(%1) \n\t" \
6022 VALGRIND_CALL_NOREDIR_T9 \
6023 "move %0, $a0 \n\t" \
6024 "addiu $sp, $sp, 16 \n\t" \
6025 : /*out*/ "=r" (_res) \
6026 : /*in*/ "r" (&_argvec[0]) \
6027 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6029 lval = (__typeof__(lval)) _res; \
6032 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6033 arg6,arg7,arg8,arg9,arg10, \
6036 volatile OrigFn _orig = (orig); \
6037 volatile unsigned long _argvec[12]; \
6038 volatile unsigned long _res; \
6039 _argvec[0] = (unsigned long)_orig.nraddr; \
6040 _argvec[1] = (unsigned long)(arg1); \
6041 _argvec[2] = (unsigned long)(arg2); \
6042 _argvec[3] = (unsigned long)(arg3); \
6043 _argvec[4] = (unsigned long)(arg4); \
6044 _argvec[5] = (unsigned long)(arg5); \
6045 _argvec[6] = (unsigned long)(arg6); \
6046 _argvec[7] = (unsigned long)(arg7); \
6047 _argvec[8] = (unsigned long)(arg8); \
6048 _argvec[9] = (unsigned long)(arg9); \
6049 _argvec[10] = (unsigned long)(arg10); \
6050 _argvec[11] = (unsigned long)(arg11); \
6052 "addiu $sp, $sp, -16 \n\t" \
6053 "lw $t9,36(%1) \n\t" \
6054 "sw $t9, 0($sp) \n\t" \
6055 "lw $t9,40(%1) \n\t" \
6056 "sw $t9, 4($sp) \n\t" \
6057 "lw $t9,44(%1) \n\t" \
6058 "sw $t9, 8($sp) \n\t" \
6059 "lw $t9, 0(%1) \n\t" \
6060 "lw $a0, 4(%1) \n\t" \
6061 "lw $a1, 8(%1) \n\t" \
6062 "lw $a2,12(%1) \n\t" \
6063 "lw $a3,16(%1) \n\t" \
6064 "lw $a4,20(%1) \n\t" \
6065 "lw $a5,24(%1) \n\t" \
6066 "lw $a6,28(%1) \n\t" \
6067 "lw $a7,32(%1) \n\t" \
6068 VALGRIND_CALL_NOREDIR_T9 \
6069 "move %0, $a0 \n\t" \
6070 "addiu $sp, $sp, 16 \n\t" \
6071 : /*out*/ "=r" (_res) \
6072 : /*in*/ "r" (&_argvec[0]) \
6073 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6075 lval = (__typeof__(lval)) _res; \
6078 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6079 arg6,arg7,arg8,arg9,arg10, \
6082 volatile OrigFn _orig = (orig); \
6083 volatile unsigned long _argvec[13]; \
6084 volatile unsigned long _res; \
6085 _argvec[0] = (unsigned long)_orig.nraddr; \
6086 _argvec[1] = (unsigned long)(arg1); \
6087 _argvec[2] = (unsigned long)(arg2); \
6088 _argvec[3] = (unsigned long)(arg3); \
6089 _argvec[4] = (unsigned long)(arg4); \
6090 _argvec[5] = (unsigned long)(arg5); \
6091 _argvec[6] = (unsigned long)(arg6); \
6092 _argvec[7] = (unsigned long)(arg7); \
6093 _argvec[8] = (unsigned long)(arg8); \
6094 _argvec[9] = (unsigned long)(arg9); \
6095 _argvec[10] = (unsigned long)(arg10); \
6096 _argvec[11] = (unsigned long)(arg11); \
6097 _argvec[12] = (unsigned long)(arg12); \
6099 "addiu $sp, $sp, -16 \n\t" \
6100 "lw $t9,36(%1) \n\t" \
6101 "sw $t9, 0($sp) \n\t" \
6102 "lw $t9,40(%1) \n\t" \
6103 "sw $t9, 4($sp) \n\t" \
6104 "lw $t9,44(%1) \n\t" \
6105 "sw $t9, 8($sp) \n\t" \
6106 "lw $t9,48(%1) \n\t" \
6107 "sw $t9,12($sp) \n\t" \
6108 "lw $t9, 0(%1) \n\t" \
6109 "lw $a0, 4(%1) \n\t" \
6110 "lw $a1, 8(%1) \n\t" \
6111 "lw $a2,12(%1) \n\t" \
6112 "lw $a3,16(%1) \n\t" \
6113 "lw $a4,20(%1) \n\t" \
6114 "lw $a5,24(%1) \n\t" \
6115 "lw $a6,28(%1) \n\t" \
6116 "lw $a7,32(%1) \n\t" \
6117 VALGRIND_CALL_NOREDIR_T9 \
6118 "move %0, $a0 \n\t" \
6119 "addiu $sp, $sp, 16 \n\t" \
6120 : /*out*/ "=r" (_res) \
6121 : /*in*/ "r" (&_argvec[0]) \
6122 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6124 lval = (__typeof__(lval)) _res; \
6127 #endif /* PLAT_nanomips_linux */
6129 /* ------------------------- mips64-linux ------------------------- */
6131 #if defined(PLAT_mips64_linux)
6133 /* These regs are trashed by the hidden call. */
6134 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
6135 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
6138 /* These CALL_FN_ macros assume that on mips64-linux,
6139 sizeof(long long) == 8. */
6141 #define MIPS64_LONG2REG_CAST(x) ((long long)(long)x)
6143 #define CALL_FN_W_v(lval, orig) \
6145 volatile OrigFn _orig = (orig); \
6146 volatile unsigned long long _argvec[1]; \
6147 volatile unsigned long long _res; \
6148 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6150 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6151 VALGRIND_CALL_NOREDIR_T9 \
6153 : /*out*/ "=r" (_res) \
6154 : /*in*/ "0" (&_argvec[0]) \
6155 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6157 lval = (__typeof__(lval)) (long)_res; \
6160 #define CALL_FN_W_W(lval, orig, arg1) \
6162 volatile OrigFn _orig = (orig); \
6163 volatile unsigned long long _argvec[2]; \
6164 volatile unsigned long long _res; \
6165 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6166 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6168 "ld $4, 8(%1)\n\t" /* arg1*/ \
6169 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6170 VALGRIND_CALL_NOREDIR_T9 \
6172 : /*out*/ "=r" (_res) \
6173 : /*in*/ "r" (&_argvec[0]) \
6174 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6176 lval = (__typeof__(lval)) (long)_res; \
6179 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
6181 volatile OrigFn _orig = (orig); \
6182 volatile unsigned long long _argvec[3]; \
6183 volatile unsigned long long _res; \
6184 _argvec[0] = _orig.nraddr; \
6185 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6186 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6188 "ld $4, 8(%1)\n\t" \
6189 "ld $5, 16(%1)\n\t" \
6190 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6191 VALGRIND_CALL_NOREDIR_T9 \
6193 : /*out*/ "=r" (_res) \
6194 : /*in*/ "r" (&_argvec[0]) \
6195 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6197 lval = (__typeof__(lval)) (long)_res; \
6201 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
6203 volatile OrigFn _orig = (orig); \
6204 volatile unsigned long long _argvec[4]; \
6205 volatile unsigned long long _res; \
6206 _argvec[0] = _orig.nraddr; \
6207 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6208 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6209 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6211 "ld $4, 8(%1)\n\t" \
6212 "ld $5, 16(%1)\n\t" \
6213 "ld $6, 24(%1)\n\t" \
6214 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6215 VALGRIND_CALL_NOREDIR_T9 \
6217 : /*out*/ "=r" (_res) \
6218 : /*in*/ "r" (&_argvec[0]) \
6219 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6221 lval = (__typeof__(lval)) (long)_res; \
6224 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
6226 volatile OrigFn _orig = (orig); \
6227 volatile unsigned long long _argvec[5]; \
6228 volatile unsigned long long _res; \
6229 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6230 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6231 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6232 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6233 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6235 "ld $4, 8(%1)\n\t" \
6236 "ld $5, 16(%1)\n\t" \
6237 "ld $6, 24(%1)\n\t" \
6238 "ld $7, 32(%1)\n\t" \
6239 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6240 VALGRIND_CALL_NOREDIR_T9 \
6242 : /*out*/ "=r" (_res) \
6243 : /*in*/ "r" (&_argvec[0]) \
6244 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6246 lval = (__typeof__(lval)) (long)_res; \
6249 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
6251 volatile OrigFn _orig = (orig); \
6252 volatile unsigned long long _argvec[6]; \
6253 volatile unsigned long long _res; \
6254 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6255 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6256 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6257 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6258 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6259 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6261 "ld $4, 8(%1)\n\t" \
6262 "ld $5, 16(%1)\n\t" \
6263 "ld $6, 24(%1)\n\t" \
6264 "ld $7, 32(%1)\n\t" \
6265 "ld $8, 40(%1)\n\t" \
6266 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6267 VALGRIND_CALL_NOREDIR_T9 \
6269 : /*out*/ "=r" (_res) \
6270 : /*in*/ "r" (&_argvec[0]) \
6271 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6273 lval = (__typeof__(lval)) (long)_res; \
6276 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
6278 volatile OrigFn _orig = (orig); \
6279 volatile unsigned long long _argvec[7]; \
6280 volatile unsigned long long _res; \
6281 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6282 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6283 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6284 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6285 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6286 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6287 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6289 "ld $4, 8(%1)\n\t" \
6290 "ld $5, 16(%1)\n\t" \
6291 "ld $6, 24(%1)\n\t" \
6292 "ld $7, 32(%1)\n\t" \
6293 "ld $8, 40(%1)\n\t" \
6294 "ld $9, 48(%1)\n\t" \
6295 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6296 VALGRIND_CALL_NOREDIR_T9 \
6298 : /*out*/ "=r" (_res) \
6299 : /*in*/ "r" (&_argvec[0]) \
6300 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6302 lval = (__typeof__(lval)) (long)_res; \
6305 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6308 volatile OrigFn _orig = (orig); \
6309 volatile unsigned long long _argvec[8]; \
6310 volatile unsigned long long _res; \
6311 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6312 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6313 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6314 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6315 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6316 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6317 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6318 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6320 "ld $4, 8(%1)\n\t" \
6321 "ld $5, 16(%1)\n\t" \
6322 "ld $6, 24(%1)\n\t" \
6323 "ld $7, 32(%1)\n\t" \
6324 "ld $8, 40(%1)\n\t" \
6325 "ld $9, 48(%1)\n\t" \
6326 "ld $10, 56(%1)\n\t" \
6327 "ld $25, 0(%1) \n\t" /* target->t9 */ \
6328 VALGRIND_CALL_NOREDIR_T9 \
6330 : /*out*/ "=r" (_res) \
6331 : /*in*/ "r" (&_argvec[0]) \
6332 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6334 lval = (__typeof__(lval)) (long)_res; \
6337 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6340 volatile OrigFn _orig = (orig); \
6341 volatile unsigned long long _argvec[9]; \
6342 volatile unsigned long long _res; \
6343 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6344 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6345 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6346 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6347 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6348 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6349 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6350 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6351 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6353 "ld $4, 8(%1)\n\t" \
6354 "ld $5, 16(%1)\n\t" \
6355 "ld $6, 24(%1)\n\t" \
6356 "ld $7, 32(%1)\n\t" \
6357 "ld $8, 40(%1)\n\t" \
6358 "ld $9, 48(%1)\n\t" \
6359 "ld $10, 56(%1)\n\t" \
6360 "ld $11, 64(%1)\n\t" \
6361 "ld $25, 0(%1) \n\t" /* target->t9 */ \
6362 VALGRIND_CALL_NOREDIR_T9 \
6364 : /*out*/ "=r" (_res) \
6365 : /*in*/ "r" (&_argvec[0]) \
6366 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6368 lval = (__typeof__(lval)) (long)_res; \
6371 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6374 volatile OrigFn _orig = (orig); \
6375 volatile unsigned long long _argvec[10]; \
6376 volatile unsigned long long _res; \
6377 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6378 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6379 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6380 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6381 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6382 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6383 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6384 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6385 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6386 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6388 "dsubu $29, $29, 8\n\t" \
6389 "ld $4, 72(%1)\n\t" \
6390 "sd $4, 0($29)\n\t" \
6391 "ld $4, 8(%1)\n\t" \
6392 "ld $5, 16(%1)\n\t" \
6393 "ld $6, 24(%1)\n\t" \
6394 "ld $7, 32(%1)\n\t" \
6395 "ld $8, 40(%1)\n\t" \
6396 "ld $9, 48(%1)\n\t" \
6397 "ld $10, 56(%1)\n\t" \
6398 "ld $11, 64(%1)\n\t" \
6399 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6400 VALGRIND_CALL_NOREDIR_T9 \
6401 "daddu $29, $29, 8\n\t" \
6403 : /*out*/ "=r" (_res) \
6404 : /*in*/ "r" (&_argvec[0]) \
6405 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6407 lval = (__typeof__(lval)) (long)_res; \
6410 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6411 arg7,arg8,arg9,arg10) \
6413 volatile OrigFn _orig = (orig); \
6414 volatile unsigned long long _argvec[11]; \
6415 volatile unsigned long long _res; \
6416 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6417 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6418 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6419 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6420 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6421 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6422 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6423 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6424 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6425 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6426 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6428 "dsubu $29, $29, 16\n\t" \
6429 "ld $4, 72(%1)\n\t" \
6430 "sd $4, 0($29)\n\t" \
6431 "ld $4, 80(%1)\n\t" \
6432 "sd $4, 8($29)\n\t" \
6433 "ld $4, 8(%1)\n\t" \
6434 "ld $5, 16(%1)\n\t" \
6435 "ld $6, 24(%1)\n\t" \
6436 "ld $7, 32(%1)\n\t" \
6437 "ld $8, 40(%1)\n\t" \
6438 "ld $9, 48(%1)\n\t" \
6439 "ld $10, 56(%1)\n\t" \
6440 "ld $11, 64(%1)\n\t" \
6441 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6442 VALGRIND_CALL_NOREDIR_T9 \
6443 "daddu $29, $29, 16\n\t" \
6445 : /*out*/ "=r" (_res) \
6446 : /*in*/ "r" (&_argvec[0]) \
6447 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6449 lval = (__typeof__(lval)) (long)_res; \
6452 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6453 arg6,arg7,arg8,arg9,arg10, \
6456 volatile OrigFn _orig = (orig); \
6457 volatile unsigned long long _argvec[12]; \
6458 volatile unsigned long long _res; \
6459 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6460 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6461 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6462 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6463 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6464 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6465 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6466 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6467 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6468 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6469 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6470 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6472 "dsubu $29, $29, 24\n\t" \
6473 "ld $4, 72(%1)\n\t" \
6474 "sd $4, 0($29)\n\t" \
6475 "ld $4, 80(%1)\n\t" \
6476 "sd $4, 8($29)\n\t" \
6477 "ld $4, 88(%1)\n\t" \
6478 "sd $4, 16($29)\n\t" \
6479 "ld $4, 8(%1)\n\t" \
6480 "ld $5, 16(%1)\n\t" \
6481 "ld $6, 24(%1)\n\t" \
6482 "ld $7, 32(%1)\n\t" \
6483 "ld $8, 40(%1)\n\t" \
6484 "ld $9, 48(%1)\n\t" \
6485 "ld $10, 56(%1)\n\t" \
6486 "ld $11, 64(%1)\n\t" \
6487 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6488 VALGRIND_CALL_NOREDIR_T9 \
6489 "daddu $29, $29, 24\n\t" \
6491 : /*out*/ "=r" (_res) \
6492 : /*in*/ "r" (&_argvec[0]) \
6493 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6495 lval = (__typeof__(lval)) (long)_res; \
6498 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6499 arg6,arg7,arg8,arg9,arg10, \
6502 volatile OrigFn _orig = (orig); \
6503 volatile unsigned long long _argvec[13]; \
6504 volatile unsigned long long _res; \
6505 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6506 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6507 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6508 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6509 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6510 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6511 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6512 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6513 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6514 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6515 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6516 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6517 _argvec[12] = MIPS64_LONG2REG_CAST(arg12); \
6519 "dsubu $29, $29, 32\n\t" \
6520 "ld $4, 72(%1)\n\t" \
6521 "sd $4, 0($29)\n\t" \
6522 "ld $4, 80(%1)\n\t" \
6523 "sd $4, 8($29)\n\t" \
6524 "ld $4, 88(%1)\n\t" \
6525 "sd $4, 16($29)\n\t" \
6526 "ld $4, 96(%1)\n\t" \
6527 "sd $4, 24($29)\n\t" \
6528 "ld $4, 8(%1)\n\t" \
6529 "ld $5, 16(%1)\n\t" \
6530 "ld $6, 24(%1)\n\t" \
6531 "ld $7, 32(%1)\n\t" \
6532 "ld $8, 40(%1)\n\t" \
6533 "ld $9, 48(%1)\n\t" \
6534 "ld $10, 56(%1)\n\t" \
6535 "ld $11, 64(%1)\n\t" \
6536 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6537 VALGRIND_CALL_NOREDIR_T9 \
6538 "daddu $29, $29, 32\n\t" \
6540 : /*out*/ "=r" (_res) \
6541 : /*in*/ "r" (&_argvec[0]) \
6542 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6544 lval = (__typeof__(lval)) (long)_res; \
6547 #endif /* PLAT_mips64_linux */
6549 /* ------------------------------------------------------------------ */
6550 /* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
6552 /* ------------------------------------------------------------------ */
6554 /* Some request codes. There are many more of these, but most are not
6555 exposed to end-user view. These are the public ones, all of the
6556 form 0x1000 + small_number.
6558 Core ones are in the range 0x00000000--0x0000ffff. The non-public
6559 ones start at 0x2000.
6562 /* These macros are used by tools -- they must be public, but don't
6563 embed them into other programs. */
6564 #define VG_USERREQ_TOOL_BASE(a,b) \
6565 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
6566 #define VG_IS_TOOL_USERREQ(a, b, v) \
6567 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
6569 /* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
6570 This enum comprises an ABI exported by Valgrind to programs
6571 which use client requests. DO NOT CHANGE THE NUMERIC VALUES OF THESE
6572 ENTRIES, NOR DELETE ANY -- add new ones at the end of the most
6575 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
6576 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
6578 /* These allow any function to be called from the simulated
6579 CPU but run on the real CPU. Nb: the first arg passed to
6580 the function is always the ThreadId of the running
6581 thread! So CLIENT_CALL0 actually requires a 1 arg
6583 VG_USERREQ__CLIENT_CALL0 = 0x1101,
6584 VG_USERREQ__CLIENT_CALL1 = 0x1102,
6585 VG_USERREQ__CLIENT_CALL2 = 0x1103,
6586 VG_USERREQ__CLIENT_CALL3 = 0x1104,
6588 /* Can be useful in regression testing suites -- eg. can
6589 send Valgrind's output to /dev/null and still count
6591 VG_USERREQ__COUNT_ERRORS = 0x1201,
6593 /* Allows the client program and/or gdbserver to execute a monitor
6595 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
6597 /* Allows the client program to change a dynamic command line
6599 VG_USERREQ__CLO_CHANGE = 0x1203,
6601 /* These are useful and can be interpreted by any tool that
6602 tracks malloc() et al, by using vg_replace_malloc.c. */
6603 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
6604 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
6605 VG_USERREQ__FREELIKE_BLOCK = 0x1302,
6606 /* Memory pool support. */
6607 VG_USERREQ__CREATE_MEMPOOL = 0x1303,
6608 VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
6609 VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
6610 VG_USERREQ__MEMPOOL_FREE = 0x1306,
6611 VG_USERREQ__MEMPOOL_TRIM = 0x1307,
6612 VG_USERREQ__MOVE_MEMPOOL = 0x1308,
6613 VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
6614 VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
6616 /* Allow printfs to valgrind log. */
6617 /* The first two pass the va_list argument by value, which
6618 assumes it is the same size as or smaller than a UWord,
6619 which generally isn't the case. Hence are deprecated.
6620 The second two pass the vargs by reference and so are
6621 immune to this problem. */
6622 /* both :: char* fmt, va_list vargs (DEPRECATED) */
6623 VG_USERREQ__PRINTF = 0x1401,
6624 VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
6625 /* both :: char* fmt, va_list* vargs */
6626 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
6627 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
6629 /* Stack support. */
6630 VG_USERREQ__STACK_REGISTER = 0x1501,
6631 VG_USERREQ__STACK_DEREGISTER = 0x1502,
6632 VG_USERREQ__STACK_CHANGE = 0x1503,
6635 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
6637 /* Querying of debug info. */
6638 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701,
6640 /* Disable/enable error reporting level. Takes a single
6641 Word arg which is the delta to this thread's error
6642 disablement indicator. Hence 1 disables or further
6643 disables errors, and -1 moves back towards enablement.
6644 Other values are not allowed. */
6645 VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801,
6647 /* Some requests used for Valgrind internal, such as
6648 self-test or self-hosting. */
6649 /* Initialise IR injection */
6650 VG_USERREQ__VEX_INIT_FOR_IRI = 0x1901,
6651 /* Used by Inner Valgrind to inform Outer Valgrind where to
6652 find the list of inner guest threads */
6653 VG_USERREQ__INNER_THREADS = 0x1902
6656 #if !defined(__GNUC__)
6657 # define __extension__ /* */
6661 /* Returns the number of Valgrinds this code is running under. That
6662 is, 0 if running natively, 1 if running under Valgrind, 2 if
6663 running under Valgrind which is running under another Valgrind,
6665 #define RUNNING_ON_VALGRIND \
6666 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
6667 VG_USERREQ__RUNNING_ON_VALGRIND, \
6671 /* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
6672 _qzz_len - 1]. Useful if you are debugging a JITter or some such,
6673 since it provides a way to make sure valgrind will retranslate the
6674 invalidated area. Returns no value. */
6675 #define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
6676 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
6677 _qzz_addr, _qzz_len, 0, 0, 0)
6679 #define VALGRIND_INNER_THREADS(_qzz_addr) \
6680 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__INNER_THREADS, \
6681 _qzz_addr, 0, 0, 0, 0)
6684 /* These requests are for getting Valgrind itself to print something.
6685 Possibly with a backtrace. This is a really ugly hack. The return value
6686 is the number of characters printed, excluding the "**<pid>** " part at the
6687 start and the backtrace (if present). */
6689 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6690 /* Modern GCC will optimize the static routine out if unused,
6691 and unused attribute will shut down warnings about it. */
6692 static int VALGRIND_PRINTF(const char *format, ...)
6693 __attribute__((format(__printf__, 1, 2), __unused__));
6696 #if defined(_MSC_VER)
6699 VALGRIND_PRINTF(const char *format, ...)
6701 #if !IS_ENABLED(CONFIG_VALGRIND)
6704 #else /* CONFIG_VALGRIND */
6705 #if defined(_MSC_VER) || defined(__MINGW64__)
6708 unsigned long _qzz_res;
6711 va_start(vargs, format);
6712 #if defined(_MSC_VER) || defined(__MINGW64__)
6713 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6714 VG_USERREQ__PRINTF_VALIST_BY_REF,
6719 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6720 VG_USERREQ__PRINTF_VALIST_BY_REF,
6721 (unsigned long)format,
6722 (unsigned long)&vargs,
6726 return (int)_qzz_res;
6727 #endif /* CONFIG_VALGRIND */
6730 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6731 static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6732 __attribute__((format(__printf__, 1, 2), __unused__));
6735 #if defined(_MSC_VER)
6738 VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6740 #if !IS_ENABLED(CONFIG_VALGRIND)
6743 #else /* CONFIG_VALGRIND */
6744 #if defined(_MSC_VER) || defined(__MINGW64__)
6747 unsigned long _qzz_res;
6750 va_start(vargs, format);
6751 #if defined(_MSC_VER) || defined(__MINGW64__)
6752 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6753 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6758 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6759 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6760 (unsigned long)format,
6761 (unsigned long)&vargs,
6765 return (int)_qzz_res;
6766 #endif /* CONFIG_VALGRIND */
6770 /* These requests allow control to move from the simulated CPU to the
6771 real CPU, calling an arbitrary function.
6773 Note that the current ThreadId is inserted as the first argument.
6776 VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
6778 requires f to have this signature:
6780 Word f(Word tid, Word arg1, Word arg2)
6782 where "Word" is a word-sized type.
6784 Note that these client requests are not entirely reliable. For example,
6785 if you call a function with them that subsequently calls printf(),
6786 there's a high chance Valgrind will crash. Generally, your prospects of
6787 these working are made higher if the called function does not refer to
6788 any global variables, and does not refer to any libc or other functions
6789 (printf et al). Any kind of entanglement with libc or dynamic linking is
6790 likely to have a bad outcome, for tricky reasons which we've grappled
6791 with a lot in the past.
6793 #define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
6794 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6795 VG_USERREQ__CLIENT_CALL0, \
6799 #define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
6800 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6801 VG_USERREQ__CLIENT_CALL1, \
6805 #define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
6806 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6807 VG_USERREQ__CLIENT_CALL2, \
6809 _qyy_arg1, _qyy_arg2, 0, 0)
6811 #define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
6812 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6813 VG_USERREQ__CLIENT_CALL3, \
6815 _qyy_arg1, _qyy_arg2, \
6819 /* Counts the number of errors that have been recorded by a tool. Nb:
6820 the tool must record the errors with VG_(maybe_record_error)() or
6821 VG_(unique_error)() for them to be counted. */
6822 #define VALGRIND_COUNT_ERRORS \
6823 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
6824 0 /* default return */, \
6825 VG_USERREQ__COUNT_ERRORS, \
6828 /* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
6829 when heap blocks are allocated in order to give accurate results. This
6830 happens automatically for the standard allocator functions such as
6831 malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
6834 But if your program uses a custom allocator, this doesn't automatically
6835 happen, and Valgrind will not do as well. For example, if you allocate
6836 superblocks with mmap() and then allocates chunks of the superblocks, all
6837 Valgrind's observations will be at the mmap() level and it won't know that
6838 the chunks should be considered separate entities. In Memcheck's case,
6839 that means you probably won't get heap block overrun detection (because
6840 there won't be redzones marked as unaddressable) and you definitely won't
6841 get any leak detection.
6843 The following client requests allow a custom allocator to be annotated so
6844 that it can be handled accurately by Valgrind.
6846 VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
6847 by a malloc()-like function. For Memcheck (an illustrative case), this
6850 - It records that the block has been allocated. This means any addresses
6851 within the block mentioned in error messages will be
6852 identified as belonging to the block. It also means that if the block
6853 isn't freed it will be detected by the leak checker.
6855 - It marks the block as being addressable and undefined (if 'is_zeroed' is
6856 not set), or addressable and defined (if 'is_zeroed' is set). This
6857 controls how accesses to the block by the program are handled.
6859 'addr' is the start of the usable block (ie. after any
6860 redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
6861 can apply redzones -- these are blocks of padding at the start and end of
6862 each block. Adding redzones is recommended as it makes it much more likely
6863 Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
6864 zeroed (or filled with another predictable value), as is the case for
6867 VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
6868 heap block -- that will be used by the client program -- is allocated.
6869 It's best to put it at the outermost level of the allocator if possible;
6870 for example, if you have a function my_alloc() which calls
6871 internal_alloc(), and the client request is put inside internal_alloc(),
6872 stack traces relating to the heap block will contain entries for both
6873 my_alloc() and internal_alloc(), which is probably not what you want.
6875 For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
6876 custom blocks from within a heap block, B, that has been allocated with
6877 malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
6878 -- the custom blocks will take precedence.
6880 VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
6881 Memcheck, it does two things:
6883 - It records that the block has been deallocated. This assumes that the
6884 block was annotated as having been allocated via
6885 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6887 - It marks the block as being unaddressable.
6889 VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
6890 heap block is deallocated.
6892 VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
6893 Memcheck, it does four things:
6895 - It records that the size of a block has been changed. This assumes that
6896 the block was annotated as having been allocated via
6897 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6899 - If the block shrunk, it marks the freed memory as being unaddressable.
6901 - If the block grew, it marks the new area as undefined and defines a red
6902 zone past the end of the new block.
6904 - The V-bits of the overlap between the old and the new block are preserved.
6906 VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
6907 and before deallocation of the old block.
6909 In many cases, these three client requests will not be enough to get your
6910 allocator working well with Memcheck. More specifically, if your allocator
6911 writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
6912 will be necessary to mark the memory as addressable just before the zeroing
6913 occurs, otherwise you'll get a lot of invalid write errors. For example,
6914 you'll need to do this if your allocator recycles freed blocks, but it
6915 zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
6916 Alternatively, if your allocator reuses freed blocks for allocator-internal
6917 data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
6919 Really, what's happening is a blurring of the lines between the client
6920 program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
6921 memory should be considered unaddressable to the client program, but the
6922 allocator knows more than the rest of the client program and so may be able
6923 to safely access it. Extra client requests are necessary for Valgrind to
6924 understand the distinction between the allocator and the rest of the
6927 Ignored if addr == 0.
6929 #define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
6930 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
6931 addr, sizeB, rzB, is_zeroed, 0)
6933 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6934 Ignored if addr == 0.
6936 #define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
6937 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
6938 addr, oldSizeB, newSizeB, rzB, 0)
6940 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6941 Ignored if addr == 0.
6943 #define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
6944 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
6947 /* Create a memory pool. */
6948 #define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
6949 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
6950 pool, rzB, is_zeroed, 0, 0)
6952 /* Create a memory pool with some flags specifying extended behaviour.
6953 When flags is zero, the behaviour is identical to VALGRIND_CREATE_MEMPOOL.
6955 The flag VALGRIND_MEMPOOL_METAPOOL specifies that the pieces of memory
6956 associated with the pool using VALGRIND_MEMPOOL_ALLOC will be used
6957 by the application as superblocks to dole out MALLOC_LIKE blocks using
6958 VALGRIND_MALLOCLIKE_BLOCK. In other words, a meta pool is a "2 levels"
6959 pool : first level is the blocks described by VALGRIND_MEMPOOL_ALLOC.
6960 The second level blocks are described using VALGRIND_MALLOCLIKE_BLOCK.
6961 Note that the association between the pool and the second level blocks
6962 is implicit : second level blocks will be located inside first level
6963 blocks. It is necessary to use the VALGRIND_MEMPOOL_METAPOOL flag
6964 for such 2 levels pools, as otherwise valgrind will detect overlapping
6965 memory blocks, and will abort execution (e.g. during leak search).
6967 Such a meta pool can also be marked as an 'auto free' pool using the flag
6968 VALGRIND_MEMPOOL_AUTO_FREE, which must be OR-ed together with the
6969 VALGRIND_MEMPOOL_METAPOOL. For an 'auto free' pool, VALGRIND_MEMPOOL_FREE
6970 will automatically free the second level blocks that are contained
6971 inside the first level block freed with VALGRIND_MEMPOOL_FREE.
6972 In other words, calling VALGRIND_MEMPOOL_FREE will cause implicit calls
6973 to VALGRIND_FREELIKE_BLOCK for all the second level blocks included
6974 in the first level block.
6975 Note: it is an error to use the VALGRIND_MEMPOOL_AUTO_FREE flag
6976 without the VALGRIND_MEMPOOL_METAPOOL flag.
6978 #define VALGRIND_MEMPOOL_AUTO_FREE 1
6979 #define VALGRIND_MEMPOOL_METAPOOL 2
6980 #define VALGRIND_CREATE_MEMPOOL_EXT(pool, rzB, is_zeroed, flags) \
6981 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
6982 pool, rzB, is_zeroed, flags, 0)
6984 /* Destroy a memory pool. */
6985 #define VALGRIND_DESTROY_MEMPOOL(pool) \
6986 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
6989 /* Associate a piece of memory with a memory pool. */
6990 #define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
6991 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
6992 pool, addr, size, 0, 0)
6994 /* Disassociate a piece of memory from a memory pool. */
6995 #define VALGRIND_MEMPOOL_FREE(pool, addr) \
6996 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
6997 pool, addr, 0, 0, 0)
6999 /* Disassociate any pieces outside a particular range. */
7000 #define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
7001 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
7002 pool, addr, size, 0, 0)
7004 /* Resize and/or move a piece associated with a memory pool. */
7005 #define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
7006 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
7007 poolA, poolB, 0, 0, 0)
7009 /* Resize and/or move a piece associated with a memory pool. */
7010 #define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
7011 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
7012 pool, addrA, addrB, size, 0)
7014 /* Return 1 if a mempool exists, else 0. */
7015 #define VALGRIND_MEMPOOL_EXISTS(pool) \
7016 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7017 VG_USERREQ__MEMPOOL_EXISTS, \
7020 /* Mark a piece of memory as being a stack. Returns a stack id.
7021 start is the lowest addressable stack byte, end is the highest
7022 addressable stack byte. */
7023 #define VALGRIND_STACK_REGISTER(start, end) \
7024 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7025 VG_USERREQ__STACK_REGISTER, \
7026 start, end, 0, 0, 0)
7028 /* Unmark the piece of memory associated with a stack id as being a
7030 #define VALGRIND_STACK_DEREGISTER(id) \
7031 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
7034 /* Change the start and end address of the stack id.
7035 start is the new lowest addressable stack byte, end is the new highest
7036 addressable stack byte. */
7037 #define VALGRIND_STACK_CHANGE(id, start, end) \
7038 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
7039 id, start, end, 0, 0)
7041 /* Load PDB debug info for Wine PE image_map. */
7042 #define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
7043 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
7044 fd, ptr, total_size, delta, 0)
7046 /* Map a code address to a source file name and line number. buf64
7047 must point to a 64-byte buffer in the caller's address space. The
7048 result will be dumped in there and is guaranteed to be zero
7049 terminated. If no info is found, the first byte is set to zero. */
7050 #define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
7051 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7052 VG_USERREQ__MAP_IP_TO_SRCLOC, \
7053 addr, buf64, 0, 0, 0)
7055 /* Disable error reporting for this thread. Behaves in a stack like
7056 way, so you can safely call this multiple times provided that
7057 VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
7058 to re-enable reporting. The first call of this macro disables
7059 reporting. Subsequent calls have no effect except to increase the
7060 number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
7061 reporting. Child threads do not inherit this setting from their
7062 parents -- they are always created with reporting enabled. */
7063 #define VALGRIND_DISABLE_ERROR_REPORTING \
7064 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7067 /* Re-enable error reporting, as per comments on
7068 VALGRIND_DISABLE_ERROR_REPORTING. */
7069 #define VALGRIND_ENABLE_ERROR_REPORTING \
7070 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7073 /* Execute a monitor command from the client program.
7074 If a connection is opened with GDB, the output will be sent
7075 according to the output mode set for vgdb.
7076 If no connection is opened, output will go to the log output.
7077 Returns 1 if command not recognised, 0 otherwise. */
7078 #define VALGRIND_MONITOR_COMMAND(command) \
7079 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
7080 command, 0, 0, 0, 0)
7083 /* Change the value of a dynamic command line option.
7084 Note that unknown or not dynamically changeable options
7085 will cause a warning message to be output. */
7086 #define VALGRIND_CLO_CHANGE(option) \
7087 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CLO_CHANGE, \
7091 #undef PLAT_x86_darwin
7092 #undef PLAT_amd64_darwin
7093 #undef PLAT_x86_win32
7094 #undef PLAT_amd64_win64
7095 #undef PLAT_x86_linux
7096 #undef PLAT_amd64_linux
7097 #undef PLAT_ppc32_linux
7098 #undef PLAT_ppc64be_linux
7099 #undef PLAT_ppc64le_linux
7100 #undef PLAT_arm_linux
7101 #undef PLAT_s390x_linux
7102 #undef PLAT_mips32_linux
7103 #undef PLAT_mips64_linux
7104 #undef PLAT_nanomips_linux
7105 #undef PLAT_x86_solaris
7106 #undef PLAT_amd64_solaris
7108 #endif /* __VALGRIND_H */