2 ----------------------------------------------------------------
4 Notice that the following BSD-style license applies to this one
5 file (valgrind.h) only. The rest of Valgrind is licensed under the
6 terms of the GNU General Public License, version 2, unless
7 otherwise indicated. See the COPYING file in the source
8 distribution for details.
10 ----------------------------------------------------------------
12 This file is part of Valgrind, a dynamic binary instrumentation
15 Copyright (C) 2000-2012 Julian Seward. All rights reserved.
17 Redistribution and use in source and binary forms, with or without
18 modification, are permitted provided that the following conditions
21 1. Redistributions of source code must retain the above copyright
22 notice, this list of conditions and the following disclaimer.
24 2. The origin of this software must not be misrepresented; you must
25 not claim that you wrote the original software. If you use this
26 software in a product, an acknowledgment in the product
27 documentation would be appreciated but is not required.
29 3. Altered source versions must be plainly marked as such, and must
30 not be misrepresented as being the original software.
32 4. The name of the author may not be used to endorse or promote
33 products derived from this software without specific prior written
36 THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
37 OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
38 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
39 ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
40 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
41 DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
42 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
43 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
44 WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
45 NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
46 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
48 ----------------------------------------------------------------
50 Notice that the above BSD-style license applies to this one file
51 (valgrind.h) only. The entire rest of Valgrind is licensed under
52 the terms of the GNU General Public License, version 2. See the
53 COPYING file in the source distribution for details.
55 ----------------------------------------------------------------
59 /* This file is for inclusion into client (your!) code.
61 You can use these macros to manipulate and query Valgrind's
62 execution inside your own programs.
64 The resulting executables will still run without Valgrind, just a
65 little bit more slowly than they otherwise would, but otherwise
66 unchanged. When not running on valgrind, each client request
67 consumes very few (eg. 7) instructions, so the resulting performance
68 loss is negligible unless you plan to execute client requests
69 millions of times per second. Nevertheless, if that is still a
70 problem, you can compile with the NVALGRIND symbol defined (gcc
71 -DNVALGRIND) so that client requests are not even compiled in. */
77 /* ------------------------------------------------------------------ */
78 /* VERSION NUMBER OF VALGRIND */
79 /* ------------------------------------------------------------------ */
81 /* Specify Valgrind's version number, so that user code can
82 conditionally compile based on our version number. Note that these
83 were introduced at version 3.6 and so do not exist in version 3.5
84 or earlier. The recommended way to use them to check for "version
87 #if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
88 && (__VALGRIND_MAJOR__ > 3 \
89 || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
91 #define __VALGRIND_MAJOR__ 3
92 #define __VALGRIND_MINOR__ 8
97 /* Nb: this file might be included in a file compiled with -ansi. So
98 we can't use C++ style "//" comments nor the "asm" keyword (instead
101 /* Derive some tags indicating what the target platform is. Note
102 that in this file we're using the compiler's CPP symbols for
103 identifying architectures, which are different to the ones we use
104 within the rest of Valgrind. Note, __powerpc__ is active for both
105 32 and 64-bit PPC, whereas __powerpc64__ is only active for the
106 latter (on Linux, that is).
108 Misc note: how to find out what's predefined in gcc by default:
109 gcc -Wp,-dM somefile.c
111 #undef PLAT_x86_darwin
112 #undef PLAT_amd64_darwin
113 #undef PLAT_x86_win32
114 #undef PLAT_x86_linux
115 #undef PLAT_amd64_linux
116 #undef PLAT_ppc32_linux
117 #undef PLAT_ppc64_linux
118 #undef PLAT_arm_linux
119 #undef PLAT_s390x_linux
120 #undef PLAT_mips32_linux
123 #if defined(__APPLE__) && defined(__i386__)
124 # define PLAT_x86_darwin 1
125 #elif defined(__APPLE__) && defined(__x86_64__)
126 # define PLAT_amd64_darwin 1
127 #elif defined(__MINGW32__) || defined(__CYGWIN32__) \
128 || (defined(_WIN32) && defined(_M_IX86))
129 # define PLAT_x86_win32 1
130 #elif defined(__linux__) && defined(__i386__)
131 # define PLAT_x86_linux 1
132 #elif defined(__linux__) && defined(__x86_64__)
133 # define PLAT_amd64_linux 1
134 #elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
135 # define PLAT_ppc32_linux 1
136 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__)
137 # define PLAT_ppc64_linux 1
138 #elif defined(__linux__) && defined(__arm__)
139 # define PLAT_arm_linux 1
140 #elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
141 # define PLAT_s390x_linux 1
142 #elif defined(__linux__) && defined(__mips__)
143 # define PLAT_mips32_linux 1
145 /* If we're not compiling for our target platform, don't generate
147 # if !defined(NVALGRIND)
153 /* ------------------------------------------------------------------ */
154 /* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
155 /* in here of use to end-users -- skip to the next section. */
156 /* ------------------------------------------------------------------ */
159 * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
160 * request. Accepts both pointers and integers as arguments.
162 * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
163 * client request that does not return a value.
165 * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
166 * client request and whose value equals the client request result. Accepts
167 * both pointers and integers as arguments. Note that such calls are not
168 * necessarily pure functions -- they may have side effects.
171 #define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
172 _zzq_request, _zzq_arg1, _zzq_arg2, \
173 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
174 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
175 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
176 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
178 #define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
179 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
180 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
181 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
182 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
184 #if defined(NVALGRIND)
186 /* Define NVALGRIND to completely remove the Valgrind magic sequence
187 from the compiled code (analogous to NDEBUG's effects on
189 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
190 _zzq_default, _zzq_request, \
191 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
194 #else /* ! NVALGRIND */
196 /* The following defines the magic code sequences which the JITter
197 spots and handles magically. Don't look too closely at them as
198 they will rot your brain.
200 The assembly code sequences for all architectures is in this one
201 file. This is because this file must be stand-alone, and we don't
202 want to have multiple files.
204 For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
205 value gets put in the return slot, so that everything works when
206 this is executed not under Valgrind. Args are passed in a memory
207 block, and so there's no intrinsic limit to the number that could
208 be passed, but it's currently five.
211 _zzq_rlval result lvalue
212 _zzq_default default value (result returned when running on real CPU)
213 _zzq_request request code
214 _zzq_arg1..5 request params
216 The other two macros are used to support function wrapping, and are
217 a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
218 guest's NRADDR pseudo-register and whatever other information is
219 needed to safely run the call original from the wrapper: on
220 ppc64-linux, the R2 value at the divert point is also needed. This
221 information is abstracted into a user-visible type, OrigFn.
223 VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
224 guest, but guarantees that the branch instruction will not be
225 redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
226 branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
227 complete inline asm, since it needs to be combined with more magic
228 inline asm stuff to be useful.
231 /* ------------------------- x86-{linux,darwin} ---------------- */
233 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
234 || (defined(PLAT_x86_win32) && defined(__GNUC__))
238 unsigned int nraddr; /* where's the code? */
242 #define __SPECIAL_INSTRUCTION_PREAMBLE \
243 "roll $3, %%edi ; roll $13, %%edi\n\t" \
244 "roll $29, %%edi ; roll $19, %%edi\n\t"
246 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
247 _zzq_default, _zzq_request, \
248 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
250 ({volatile unsigned int _zzq_args[6]; \
251 volatile unsigned int _zzq_result; \
252 _zzq_args[0] = (unsigned int)(_zzq_request); \
253 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
254 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
255 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
256 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
257 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
258 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
259 /* %EDX = client_request ( %EAX ) */ \
260 "xchgl %%ebx,%%ebx" \
261 : "=d" (_zzq_result) \
262 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
268 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
269 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
270 volatile unsigned int __addr; \
271 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
272 /* %EAX = guest_NRADDR */ \
273 "xchgl %%ecx,%%ecx" \
278 _zzq_orig->nraddr = __addr; \
281 #define VALGRIND_CALL_NOREDIR_EAX \
282 __SPECIAL_INSTRUCTION_PREAMBLE \
283 /* call-noredir *%EAX */ \
284 "xchgl %%edx,%%edx\n\t"
285 #endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__) */
287 /* ------------------------- x86-Win32 ------------------------- */
289 #if defined(PLAT_x86_win32) && !defined(__GNUC__)
293 unsigned int nraddr; /* where's the code? */
297 #if defined(_MSC_VER)
299 #define __SPECIAL_INSTRUCTION_PREAMBLE \
300 __asm rol edi, 3 __asm rol edi, 13 \
301 __asm rol edi, 29 __asm rol edi, 19
303 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
304 _zzq_default, _zzq_request, \
305 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
306 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
307 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
308 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
309 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
311 static __inline uintptr_t
312 valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
313 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
314 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
317 volatile uintptr_t _zzq_args[6];
318 volatile unsigned int _zzq_result;
319 _zzq_args[0] = (uintptr_t)(_zzq_request);
320 _zzq_args[1] = (uintptr_t)(_zzq_arg1);
321 _zzq_args[2] = (uintptr_t)(_zzq_arg2);
322 _zzq_args[3] = (uintptr_t)(_zzq_arg3);
323 _zzq_args[4] = (uintptr_t)(_zzq_arg4);
324 _zzq_args[5] = (uintptr_t)(_zzq_arg5);
325 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
326 __SPECIAL_INSTRUCTION_PREAMBLE
327 /* %EDX = client_request ( %EAX ) */
329 __asm mov _zzq_result, edx
334 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
335 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
336 volatile unsigned int __addr; \
337 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
338 /* %EAX = guest_NRADDR */ \
340 __asm mov __addr, eax \
342 _zzq_orig->nraddr = __addr; \
345 #define VALGRIND_CALL_NOREDIR_EAX ERROR
348 #error Unsupported compiler.
351 #endif /* PLAT_x86_win32 */
353 /* ------------------------ amd64-{linux,darwin} --------------- */
355 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin)
359 unsigned long long int nraddr; /* where's the code? */
363 #define __SPECIAL_INSTRUCTION_PREAMBLE \
364 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
365 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
367 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
368 _zzq_default, _zzq_request, \
369 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
371 ({ volatile unsigned long long int _zzq_args[6]; \
372 volatile unsigned long long int _zzq_result; \
373 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
374 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
375 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
376 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
377 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
378 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
379 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
380 /* %RDX = client_request ( %RAX ) */ \
381 "xchgq %%rbx,%%rbx" \
382 : "=d" (_zzq_result) \
383 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
389 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
390 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
391 volatile unsigned long long int __addr; \
392 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
393 /* %RAX = guest_NRADDR */ \
394 "xchgq %%rcx,%%rcx" \
399 _zzq_orig->nraddr = __addr; \
402 #define VALGRIND_CALL_NOREDIR_RAX \
403 __SPECIAL_INSTRUCTION_PREAMBLE \
404 /* call-noredir *%RAX */ \
405 "xchgq %%rdx,%%rdx\n\t"
406 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
408 /* ------------------------ ppc32-linux ------------------------ */
410 #if defined(PLAT_ppc32_linux)
414 unsigned int nraddr; /* where's the code? */
418 #define __SPECIAL_INSTRUCTION_PREAMBLE \
419 "rlwinm 0,0,3,0,0 ; rlwinm 0,0,13,0,0\n\t" \
420 "rlwinm 0,0,29,0,0 ; rlwinm 0,0,19,0,0\n\t"
422 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
423 _zzq_default, _zzq_request, \
424 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
427 ({ unsigned int _zzq_args[6]; \
428 unsigned int _zzq_result; \
429 unsigned int* _zzq_ptr; \
430 _zzq_args[0] = (unsigned int)(_zzq_request); \
431 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
432 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
433 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
434 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
435 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
436 _zzq_ptr = _zzq_args; \
437 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
438 "mr 4,%2\n\t" /*ptr*/ \
439 __SPECIAL_INSTRUCTION_PREAMBLE \
440 /* %R3 = client_request ( %R4 ) */ \
442 "mr %0,3" /*result*/ \
443 : "=b" (_zzq_result) \
444 : "b" (_zzq_default), "b" (_zzq_ptr) \
445 : "cc", "memory", "r3", "r4"); \
449 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
450 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
451 unsigned int __addr; \
452 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
453 /* %R3 = guest_NRADDR */ \
458 : "cc", "memory", "r3" \
460 _zzq_orig->nraddr = __addr; \
463 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
464 __SPECIAL_INSTRUCTION_PREAMBLE \
465 /* branch-and-link-to-noredir *%R11 */ \
467 #endif /* PLAT_ppc32_linux */
469 /* ------------------------ ppc64-linux ------------------------ */
471 #if defined(PLAT_ppc64_linux)
475 unsigned long long int nraddr; /* where's the code? */
476 unsigned long long int r2; /* what tocptr do we need? */
480 #define __SPECIAL_INSTRUCTION_PREAMBLE \
481 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
482 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
484 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
485 _zzq_default, _zzq_request, \
486 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
489 ({ unsigned long long int _zzq_args[6]; \
490 unsigned long long int _zzq_result; \
491 unsigned long long int* _zzq_ptr; \
492 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
493 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
494 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
495 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
496 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
497 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
498 _zzq_ptr = _zzq_args; \
499 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
500 "mr 4,%2\n\t" /*ptr*/ \
501 __SPECIAL_INSTRUCTION_PREAMBLE \
502 /* %R3 = client_request ( %R4 ) */ \
504 "mr %0,3" /*result*/ \
505 : "=b" (_zzq_result) \
506 : "b" (_zzq_default), "b" (_zzq_ptr) \
507 : "cc", "memory", "r3", "r4"); \
511 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
512 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
513 unsigned long long int __addr; \
514 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
515 /* %R3 = guest_NRADDR */ \
520 : "cc", "memory", "r3" \
522 _zzq_orig->nraddr = __addr; \
523 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
524 /* %R3 = guest_NRADDR_GPR2 */ \
529 : "cc", "memory", "r3" \
531 _zzq_orig->r2 = __addr; \
534 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
535 __SPECIAL_INSTRUCTION_PREAMBLE \
536 /* branch-and-link-to-noredir *%R11 */ \
539 #endif /* PLAT_ppc64_linux */
541 /* ------------------------- arm-linux ------------------------- */
543 #if defined(PLAT_arm_linux)
547 unsigned int nraddr; /* where's the code? */
551 #define __SPECIAL_INSTRUCTION_PREAMBLE \
552 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
553 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
555 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
556 _zzq_default, _zzq_request, \
557 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
560 ({volatile unsigned int _zzq_args[6]; \
561 volatile unsigned int _zzq_result; \
562 _zzq_args[0] = (unsigned int)(_zzq_request); \
563 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
564 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
565 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
566 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
567 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
568 __asm__ volatile("mov r3, %1\n\t" /*default*/ \
569 "mov r4, %2\n\t" /*ptr*/ \
570 __SPECIAL_INSTRUCTION_PREAMBLE \
571 /* R3 = client_request ( R4 ) */ \
572 "orr r10, r10, r10\n\t" \
573 "mov %0, r3" /*result*/ \
574 : "=r" (_zzq_result) \
575 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
576 : "cc","memory", "r3", "r4"); \
580 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
581 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
582 unsigned int __addr; \
583 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
584 /* R3 = guest_NRADDR */ \
585 "orr r11, r11, r11\n\t" \
589 : "cc", "memory", "r3" \
591 _zzq_orig->nraddr = __addr; \
594 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
595 __SPECIAL_INSTRUCTION_PREAMBLE \
596 /* branch-and-link-to-noredir *%R4 */ \
597 "orr r12, r12, r12\n\t"
599 #endif /* PLAT_arm_linux */
601 /* ------------------------ s390x-linux ------------------------ */
603 #if defined(PLAT_s390x_linux)
607 unsigned long long int nraddr; /* where's the code? */
611 /* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
612 * code. This detection is implemented in platform specific toIR.c
613 * (e.g. VEX/priv/guest_s390_decoder.c).
615 #define __SPECIAL_INSTRUCTION_PREAMBLE \
621 #define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
622 #define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
623 #define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
625 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
626 _zzq_default, _zzq_request, \
627 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
629 ({volatile unsigned long long int _zzq_args[6]; \
630 volatile unsigned long long int _zzq_result; \
631 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
632 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
633 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
634 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
635 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
636 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
637 __asm__ volatile(/* r2 = args */ \
641 __SPECIAL_INSTRUCTION_PREAMBLE \
642 __CLIENT_REQUEST_CODE \
645 : "=d" (_zzq_result) \
646 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
647 : "cc", "2", "3", "memory" \
652 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
653 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
654 volatile unsigned long long int __addr; \
655 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
656 __GET_NR_CONTEXT_CODE \
660 : "cc", "3", "memory" \
662 _zzq_orig->nraddr = __addr; \
665 #define VALGRIND_CALL_NOREDIR_R1 \
666 __SPECIAL_INSTRUCTION_PREAMBLE \
669 #endif /* PLAT_s390x_linux */
671 /* ------------------------- mips32-linux ---------------- */
673 #if defined(PLAT_mips32_linux)
677 unsigned int nraddr; /* where's the code? */
685 #define __SPECIAL_INSTRUCTION_PREAMBLE \
686 "srl $0, $0, 13\n\t" \
687 "srl $0, $0, 29\n\t" \
688 "srl $0, $0, 3\n\t" \
691 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
692 _zzq_default, _zzq_request, \
693 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
695 ({ volatile unsigned int _zzq_args[6]; \
696 volatile unsigned int _zzq_result; \
697 _zzq_args[0] = (unsigned int)(_zzq_request); \
698 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
699 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
700 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
701 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
702 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
703 __asm__ volatile("move $11, %1\n\t" /*default*/ \
704 "move $12, %2\n\t" /*ptr*/ \
705 __SPECIAL_INSTRUCTION_PREAMBLE \
706 /* T3 = client_request ( T4 ) */ \
707 "or $13, $13, $13\n\t" \
708 "move %0, $11\n\t" /*result*/ \
709 : "=r" (_zzq_result) \
710 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
711 : "cc","memory", "t3", "t4"); \
715 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
716 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
717 volatile unsigned int __addr; \
718 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
719 /* %t9 = guest_NRADDR */ \
720 "or $14, $14, $14\n\t" \
721 "move %0, $11" /*result*/ \
724 : "cc", "memory" , "t3" \
726 _zzq_orig->nraddr = __addr; \
729 #define VALGRIND_CALL_NOREDIR_T9 \
730 __SPECIAL_INSTRUCTION_PREAMBLE \
731 /* call-noredir *%t9 */ \
732 "or $15, $15, $15\n\t"
733 #endif /* PLAT_mips32_linux */
735 /* Insert assembly code for other platforms here... */
737 #endif /* NVALGRIND */
740 /* ------------------------------------------------------------------ */
741 /* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
742 /* ugly. It's the least-worst tradeoff I can think of. */
743 /* ------------------------------------------------------------------ */
745 /* This section defines magic (a.k.a appalling-hack) macros for doing
746 guaranteed-no-redirection macros, so as to get from function
747 wrappers to the functions they are wrapping. The whole point is to
748 construct standard call sequences, but to do the call itself with a
749 special no-redirect call pseudo-instruction that the JIT
750 understands and handles specially. This section is long and
751 repetitious, and I can't see a way to make it shorter.
753 The naming scheme is as follows:
755 CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
757 'W' stands for "word" and 'v' for "void". Hence there are
758 different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
759 and for each, the possibility of returning a word-typed result, or
763 /* Use these to write the name of your wrapper. NOTE: duplicates
764 VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
765 the default behaviour equivalance class tag "0000" into the name.
766 See pub_tool_redir.h for details -- normally you don't need to
767 think about this, though. */
769 /* Use an extra level of macroisation so as to ensure the soname/fnname
770 args are fully macro-expanded before pasting them together. */
771 #define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
773 #define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
774 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
776 #define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
777 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
779 /* Use this macro from within a wrapper function to collect the
780 context (address and possibly other info) of the original function.
781 Once you have that you can then use it in one of the CALL_FN_
782 macros. The type of the argument _lval is OrigFn. */
783 #define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
785 /* Also provide end-user facilities for function replacement, rather
786 than wrapping. A replacement function differs from a wrapper in
787 that it has no way to get hold of the original function being
788 called, and hence no way to call onwards to it. In a replacement
789 function, VALGRIND_GET_ORIG_FN always returns zero. */
791 #define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
792 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
794 #define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
795 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
797 /* Derivatives of the main macros below, for calling functions
800 #define CALL_FN_v_v(fnptr) \
801 do { volatile unsigned long _junk; \
802 CALL_FN_W_v(_junk,fnptr); } while (0)
804 #define CALL_FN_v_W(fnptr, arg1) \
805 do { volatile unsigned long _junk; \
806 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
808 #define CALL_FN_v_WW(fnptr, arg1,arg2) \
809 do { volatile unsigned long _junk; \
810 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
812 #define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
813 do { volatile unsigned long _junk; \
814 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
816 #define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
817 do { volatile unsigned long _junk; \
818 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
820 #define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
821 do { volatile unsigned long _junk; \
822 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
824 #define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
825 do { volatile unsigned long _junk; \
826 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
828 #define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
829 do { volatile unsigned long _junk; \
830 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
832 /* ------------------------- x86-{linux,darwin} ---------------- */
834 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin)
836 /* These regs are trashed by the hidden call. No need to mention eax
837 as gcc can already see that, plus causes gcc to bomb. */
838 #define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
840 /* Macros to save and align the stack before making a function
841 call and restore it afterwards as gcc may not keep the stack
842 pointer aligned if it doesn't realise calls are being made
843 to other functions. */
845 #define VALGRIND_ALIGN_STACK \
846 "movl %%esp,%%edi\n\t" \
847 "andl $0xfffffff0,%%esp\n\t"
848 #define VALGRIND_RESTORE_STACK \
849 "movl %%edi,%%esp\n\t"
851 /* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
854 #define CALL_FN_W_v(lval, orig) \
856 volatile OrigFn _orig = (orig); \
857 volatile unsigned long _argvec[1]; \
858 volatile unsigned long _res; \
859 _argvec[0] = (unsigned long)_orig.nraddr; \
861 VALGRIND_ALIGN_STACK \
862 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
863 VALGRIND_CALL_NOREDIR_EAX \
864 VALGRIND_RESTORE_STACK \
865 : /*out*/ "=a" (_res) \
866 : /*in*/ "a" (&_argvec[0]) \
867 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
869 lval = (__typeof__(lval)) _res; \
872 #define CALL_FN_W_W(lval, orig, arg1) \
874 volatile OrigFn _orig = (orig); \
875 volatile unsigned long _argvec[2]; \
876 volatile unsigned long _res; \
877 _argvec[0] = (unsigned long)_orig.nraddr; \
878 _argvec[1] = (unsigned long)(arg1); \
880 VALGRIND_ALIGN_STACK \
881 "subl $12, %%esp\n\t" \
882 "pushl 4(%%eax)\n\t" \
883 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
884 VALGRIND_CALL_NOREDIR_EAX \
885 VALGRIND_RESTORE_STACK \
886 : /*out*/ "=a" (_res) \
887 : /*in*/ "a" (&_argvec[0]) \
888 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
890 lval = (__typeof__(lval)) _res; \
893 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
895 volatile OrigFn _orig = (orig); \
896 volatile unsigned long _argvec[3]; \
897 volatile unsigned long _res; \
898 _argvec[0] = (unsigned long)_orig.nraddr; \
899 _argvec[1] = (unsigned long)(arg1); \
900 _argvec[2] = (unsigned long)(arg2); \
902 VALGRIND_ALIGN_STACK \
903 "subl $8, %%esp\n\t" \
904 "pushl 8(%%eax)\n\t" \
905 "pushl 4(%%eax)\n\t" \
906 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
907 VALGRIND_CALL_NOREDIR_EAX \
908 VALGRIND_RESTORE_STACK \
909 : /*out*/ "=a" (_res) \
910 : /*in*/ "a" (&_argvec[0]) \
911 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
913 lval = (__typeof__(lval)) _res; \
916 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
918 volatile OrigFn _orig = (orig); \
919 volatile unsigned long _argvec[4]; \
920 volatile unsigned long _res; \
921 _argvec[0] = (unsigned long)_orig.nraddr; \
922 _argvec[1] = (unsigned long)(arg1); \
923 _argvec[2] = (unsigned long)(arg2); \
924 _argvec[3] = (unsigned long)(arg3); \
926 VALGRIND_ALIGN_STACK \
927 "subl $4, %%esp\n\t" \
928 "pushl 12(%%eax)\n\t" \
929 "pushl 8(%%eax)\n\t" \
930 "pushl 4(%%eax)\n\t" \
931 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
932 VALGRIND_CALL_NOREDIR_EAX \
933 VALGRIND_RESTORE_STACK \
934 : /*out*/ "=a" (_res) \
935 : /*in*/ "a" (&_argvec[0]) \
936 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
938 lval = (__typeof__(lval)) _res; \
941 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
943 volatile OrigFn _orig = (orig); \
944 volatile unsigned long _argvec[5]; \
945 volatile unsigned long _res; \
946 _argvec[0] = (unsigned long)_orig.nraddr; \
947 _argvec[1] = (unsigned long)(arg1); \
948 _argvec[2] = (unsigned long)(arg2); \
949 _argvec[3] = (unsigned long)(arg3); \
950 _argvec[4] = (unsigned long)(arg4); \
952 VALGRIND_ALIGN_STACK \
953 "pushl 16(%%eax)\n\t" \
954 "pushl 12(%%eax)\n\t" \
955 "pushl 8(%%eax)\n\t" \
956 "pushl 4(%%eax)\n\t" \
957 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
958 VALGRIND_CALL_NOREDIR_EAX \
959 VALGRIND_RESTORE_STACK \
960 : /*out*/ "=a" (_res) \
961 : /*in*/ "a" (&_argvec[0]) \
962 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
964 lval = (__typeof__(lval)) _res; \
967 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
969 volatile OrigFn _orig = (orig); \
970 volatile unsigned long _argvec[6]; \
971 volatile unsigned long _res; \
972 _argvec[0] = (unsigned long)_orig.nraddr; \
973 _argvec[1] = (unsigned long)(arg1); \
974 _argvec[2] = (unsigned long)(arg2); \
975 _argvec[3] = (unsigned long)(arg3); \
976 _argvec[4] = (unsigned long)(arg4); \
977 _argvec[5] = (unsigned long)(arg5); \
979 VALGRIND_ALIGN_STACK \
980 "subl $12, %%esp\n\t" \
981 "pushl 20(%%eax)\n\t" \
982 "pushl 16(%%eax)\n\t" \
983 "pushl 12(%%eax)\n\t" \
984 "pushl 8(%%eax)\n\t" \
985 "pushl 4(%%eax)\n\t" \
986 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
987 VALGRIND_CALL_NOREDIR_EAX \
988 VALGRIND_RESTORE_STACK \
989 : /*out*/ "=a" (_res) \
990 : /*in*/ "a" (&_argvec[0]) \
991 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
993 lval = (__typeof__(lval)) _res; \
996 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
998 volatile OrigFn _orig = (orig); \
999 volatile unsigned long _argvec[7]; \
1000 volatile unsigned long _res; \
1001 _argvec[0] = (unsigned long)_orig.nraddr; \
1002 _argvec[1] = (unsigned long)(arg1); \
1003 _argvec[2] = (unsigned long)(arg2); \
1004 _argvec[3] = (unsigned long)(arg3); \
1005 _argvec[4] = (unsigned long)(arg4); \
1006 _argvec[5] = (unsigned long)(arg5); \
1007 _argvec[6] = (unsigned long)(arg6); \
1009 VALGRIND_ALIGN_STACK \
1010 "subl $8, %%esp\n\t" \
1011 "pushl 24(%%eax)\n\t" \
1012 "pushl 20(%%eax)\n\t" \
1013 "pushl 16(%%eax)\n\t" \
1014 "pushl 12(%%eax)\n\t" \
1015 "pushl 8(%%eax)\n\t" \
1016 "pushl 4(%%eax)\n\t" \
1017 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1018 VALGRIND_CALL_NOREDIR_EAX \
1019 VALGRIND_RESTORE_STACK \
1020 : /*out*/ "=a" (_res) \
1021 : /*in*/ "a" (&_argvec[0]) \
1022 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1024 lval = (__typeof__(lval)) _res; \
1027 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1030 volatile OrigFn _orig = (orig); \
1031 volatile unsigned long _argvec[8]; \
1032 volatile unsigned long _res; \
1033 _argvec[0] = (unsigned long)_orig.nraddr; \
1034 _argvec[1] = (unsigned long)(arg1); \
1035 _argvec[2] = (unsigned long)(arg2); \
1036 _argvec[3] = (unsigned long)(arg3); \
1037 _argvec[4] = (unsigned long)(arg4); \
1038 _argvec[5] = (unsigned long)(arg5); \
1039 _argvec[6] = (unsigned long)(arg6); \
1040 _argvec[7] = (unsigned long)(arg7); \
1042 VALGRIND_ALIGN_STACK \
1043 "subl $4, %%esp\n\t" \
1044 "pushl 28(%%eax)\n\t" \
1045 "pushl 24(%%eax)\n\t" \
1046 "pushl 20(%%eax)\n\t" \
1047 "pushl 16(%%eax)\n\t" \
1048 "pushl 12(%%eax)\n\t" \
1049 "pushl 8(%%eax)\n\t" \
1050 "pushl 4(%%eax)\n\t" \
1051 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1052 VALGRIND_CALL_NOREDIR_EAX \
1053 VALGRIND_RESTORE_STACK \
1054 : /*out*/ "=a" (_res) \
1055 : /*in*/ "a" (&_argvec[0]) \
1056 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1058 lval = (__typeof__(lval)) _res; \
1061 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1064 volatile OrigFn _orig = (orig); \
1065 volatile unsigned long _argvec[9]; \
1066 volatile unsigned long _res; \
1067 _argvec[0] = (unsigned long)_orig.nraddr; \
1068 _argvec[1] = (unsigned long)(arg1); \
1069 _argvec[2] = (unsigned long)(arg2); \
1070 _argvec[3] = (unsigned long)(arg3); \
1071 _argvec[4] = (unsigned long)(arg4); \
1072 _argvec[5] = (unsigned long)(arg5); \
1073 _argvec[6] = (unsigned long)(arg6); \
1074 _argvec[7] = (unsigned long)(arg7); \
1075 _argvec[8] = (unsigned long)(arg8); \
1077 VALGRIND_ALIGN_STACK \
1078 "pushl 32(%%eax)\n\t" \
1079 "pushl 28(%%eax)\n\t" \
1080 "pushl 24(%%eax)\n\t" \
1081 "pushl 20(%%eax)\n\t" \
1082 "pushl 16(%%eax)\n\t" \
1083 "pushl 12(%%eax)\n\t" \
1084 "pushl 8(%%eax)\n\t" \
1085 "pushl 4(%%eax)\n\t" \
1086 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1087 VALGRIND_CALL_NOREDIR_EAX \
1088 VALGRIND_RESTORE_STACK \
1089 : /*out*/ "=a" (_res) \
1090 : /*in*/ "a" (&_argvec[0]) \
1091 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1093 lval = (__typeof__(lval)) _res; \
1096 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1099 volatile OrigFn _orig = (orig); \
1100 volatile unsigned long _argvec[10]; \
1101 volatile unsigned long _res; \
1102 _argvec[0] = (unsigned long)_orig.nraddr; \
1103 _argvec[1] = (unsigned long)(arg1); \
1104 _argvec[2] = (unsigned long)(arg2); \
1105 _argvec[3] = (unsigned long)(arg3); \
1106 _argvec[4] = (unsigned long)(arg4); \
1107 _argvec[5] = (unsigned long)(arg5); \
1108 _argvec[6] = (unsigned long)(arg6); \
1109 _argvec[7] = (unsigned long)(arg7); \
1110 _argvec[8] = (unsigned long)(arg8); \
1111 _argvec[9] = (unsigned long)(arg9); \
1113 VALGRIND_ALIGN_STACK \
1114 "subl $12, %%esp\n\t" \
1115 "pushl 36(%%eax)\n\t" \
1116 "pushl 32(%%eax)\n\t" \
1117 "pushl 28(%%eax)\n\t" \
1118 "pushl 24(%%eax)\n\t" \
1119 "pushl 20(%%eax)\n\t" \
1120 "pushl 16(%%eax)\n\t" \
1121 "pushl 12(%%eax)\n\t" \
1122 "pushl 8(%%eax)\n\t" \
1123 "pushl 4(%%eax)\n\t" \
1124 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1125 VALGRIND_CALL_NOREDIR_EAX \
1126 VALGRIND_RESTORE_STACK \
1127 : /*out*/ "=a" (_res) \
1128 : /*in*/ "a" (&_argvec[0]) \
1129 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1131 lval = (__typeof__(lval)) _res; \
1134 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1135 arg7,arg8,arg9,arg10) \
1137 volatile OrigFn _orig = (orig); \
1138 volatile unsigned long _argvec[11]; \
1139 volatile unsigned long _res; \
1140 _argvec[0] = (unsigned long)_orig.nraddr; \
1141 _argvec[1] = (unsigned long)(arg1); \
1142 _argvec[2] = (unsigned long)(arg2); \
1143 _argvec[3] = (unsigned long)(arg3); \
1144 _argvec[4] = (unsigned long)(arg4); \
1145 _argvec[5] = (unsigned long)(arg5); \
1146 _argvec[6] = (unsigned long)(arg6); \
1147 _argvec[7] = (unsigned long)(arg7); \
1148 _argvec[8] = (unsigned long)(arg8); \
1149 _argvec[9] = (unsigned long)(arg9); \
1150 _argvec[10] = (unsigned long)(arg10); \
1152 VALGRIND_ALIGN_STACK \
1153 "subl $8, %%esp\n\t" \
1154 "pushl 40(%%eax)\n\t" \
1155 "pushl 36(%%eax)\n\t" \
1156 "pushl 32(%%eax)\n\t" \
1157 "pushl 28(%%eax)\n\t" \
1158 "pushl 24(%%eax)\n\t" \
1159 "pushl 20(%%eax)\n\t" \
1160 "pushl 16(%%eax)\n\t" \
1161 "pushl 12(%%eax)\n\t" \
1162 "pushl 8(%%eax)\n\t" \
1163 "pushl 4(%%eax)\n\t" \
1164 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1165 VALGRIND_CALL_NOREDIR_EAX \
1166 VALGRIND_RESTORE_STACK \
1167 : /*out*/ "=a" (_res) \
1168 : /*in*/ "a" (&_argvec[0]) \
1169 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1171 lval = (__typeof__(lval)) _res; \
1174 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1175 arg6,arg7,arg8,arg9,arg10, \
1178 volatile OrigFn _orig = (orig); \
1179 volatile unsigned long _argvec[12]; \
1180 volatile unsigned long _res; \
1181 _argvec[0] = (unsigned long)_orig.nraddr; \
1182 _argvec[1] = (unsigned long)(arg1); \
1183 _argvec[2] = (unsigned long)(arg2); \
1184 _argvec[3] = (unsigned long)(arg3); \
1185 _argvec[4] = (unsigned long)(arg4); \
1186 _argvec[5] = (unsigned long)(arg5); \
1187 _argvec[6] = (unsigned long)(arg6); \
1188 _argvec[7] = (unsigned long)(arg7); \
1189 _argvec[8] = (unsigned long)(arg8); \
1190 _argvec[9] = (unsigned long)(arg9); \
1191 _argvec[10] = (unsigned long)(arg10); \
1192 _argvec[11] = (unsigned long)(arg11); \
1194 VALGRIND_ALIGN_STACK \
1195 "subl $4, %%esp\n\t" \
1196 "pushl 44(%%eax)\n\t" \
1197 "pushl 40(%%eax)\n\t" \
1198 "pushl 36(%%eax)\n\t" \
1199 "pushl 32(%%eax)\n\t" \
1200 "pushl 28(%%eax)\n\t" \
1201 "pushl 24(%%eax)\n\t" \
1202 "pushl 20(%%eax)\n\t" \
1203 "pushl 16(%%eax)\n\t" \
1204 "pushl 12(%%eax)\n\t" \
1205 "pushl 8(%%eax)\n\t" \
1206 "pushl 4(%%eax)\n\t" \
1207 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1208 VALGRIND_CALL_NOREDIR_EAX \
1209 VALGRIND_RESTORE_STACK \
1210 : /*out*/ "=a" (_res) \
1211 : /*in*/ "a" (&_argvec[0]) \
1212 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1214 lval = (__typeof__(lval)) _res; \
1217 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1218 arg6,arg7,arg8,arg9,arg10, \
1221 volatile OrigFn _orig = (orig); \
1222 volatile unsigned long _argvec[13]; \
1223 volatile unsigned long _res; \
1224 _argvec[0] = (unsigned long)_orig.nraddr; \
1225 _argvec[1] = (unsigned long)(arg1); \
1226 _argvec[2] = (unsigned long)(arg2); \
1227 _argvec[3] = (unsigned long)(arg3); \
1228 _argvec[4] = (unsigned long)(arg4); \
1229 _argvec[5] = (unsigned long)(arg5); \
1230 _argvec[6] = (unsigned long)(arg6); \
1231 _argvec[7] = (unsigned long)(arg7); \
1232 _argvec[8] = (unsigned long)(arg8); \
1233 _argvec[9] = (unsigned long)(arg9); \
1234 _argvec[10] = (unsigned long)(arg10); \
1235 _argvec[11] = (unsigned long)(arg11); \
1236 _argvec[12] = (unsigned long)(arg12); \
1238 VALGRIND_ALIGN_STACK \
1239 "pushl 48(%%eax)\n\t" \
1240 "pushl 44(%%eax)\n\t" \
1241 "pushl 40(%%eax)\n\t" \
1242 "pushl 36(%%eax)\n\t" \
1243 "pushl 32(%%eax)\n\t" \
1244 "pushl 28(%%eax)\n\t" \
1245 "pushl 24(%%eax)\n\t" \
1246 "pushl 20(%%eax)\n\t" \
1247 "pushl 16(%%eax)\n\t" \
1248 "pushl 12(%%eax)\n\t" \
1249 "pushl 8(%%eax)\n\t" \
1250 "pushl 4(%%eax)\n\t" \
1251 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1252 VALGRIND_CALL_NOREDIR_EAX \
1253 VALGRIND_RESTORE_STACK \
1254 : /*out*/ "=a" (_res) \
1255 : /*in*/ "a" (&_argvec[0]) \
1256 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1258 lval = (__typeof__(lval)) _res; \
1261 #endif /* PLAT_x86_linux || PLAT_x86_darwin */
1263 /* ------------------------ amd64-{linux,darwin} --------------- */
1265 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin)
1267 /* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1269 /* These regs are trashed by the hidden call. */
1270 #define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1271 "rdi", "r8", "r9", "r10", "r11"
1273 /* This is all pretty complex. It's so as to make stack unwinding
1274 work reliably. See bug 243270. The basic problem is the sub and
1275 add of 128 of %rsp in all of the following macros. If gcc believes
1276 the CFA is in %rsp, then unwinding may fail, because what's at the
1277 CFA is not what gcc "expected" when it constructs the CFIs for the
1278 places where the macros are instantiated.
1280 But we can't just add a CFI annotation to increase the CFA offset
1281 by 128, to match the sub of 128 from %rsp, because we don't know
1282 whether gcc has chosen %rsp as the CFA at that point, or whether it
1283 has chosen some other register (eg, %rbp). In the latter case,
1284 adding a CFI annotation to change the CFA offset is simply wrong.
1286 So the solution is to get hold of the CFA using
1287 __builtin_dwarf_cfa(), put it in a known register, and add a
1288 CFI annotation to say what the register is. We choose %rbp for
1289 this (perhaps perversely), because:
1291 (1) %rbp is already subject to unwinding. If a new register was
1292 chosen then the unwinder would have to unwind it in all stack
1293 traces, which is expensive, and
1295 (2) %rbp is already subject to precise exception updates in the
1296 JIT. If a new register was chosen, we'd have to have precise
1297 exceptions for it too, which reduces performance of the
1300 However .. one extra complication. We can't just whack the result
1301 of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1302 list of trashed registers at the end of the inline assembly
1303 fragments; gcc won't allow %rbp to appear in that list. Hence
1304 instead we need to stash %rbp in %r15 for the duration of the asm,
1305 and say that %r15 is trashed instead. gcc seems happy to go with
1308 Oh .. and this all needs to be conditionalised so that it is
1309 unchanged from before this commit, when compiled with older gccs
1310 that don't support __builtin_dwarf_cfa. Furthermore, since
1311 this header file is freestanding, it has to be independent of
1312 config.h, and so the following conditionalisation cannot depend on
1313 configure time checks.
1315 Although it's not clear from
1316 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1317 this expression excludes Darwin.
1318 .cfi directives in Darwin assembly appear to be completely
1319 different and I haven't investigated how they work.
1321 For even more entertainment value, note we have to use the
1322 completely undocumented __builtin_dwarf_cfa(), which appears to
1323 really compute the CFA, whereas __builtin_frame_address(0) claims
1324 to but actually doesn't. See
1325 https://bugs.kde.org/show_bug.cgi?id=243270#c47
1327 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1328 # define __FRAME_POINTER \
1329 ,"r"(__builtin_dwarf_cfa())
1330 # define VALGRIND_CFI_PROLOGUE \
1331 "movq %%rbp, %%r15\n\t" \
1332 "movq %2, %%rbp\n\t" \
1333 ".cfi_remember_state\n\t" \
1334 ".cfi_def_cfa rbp, 0\n\t"
1335 # define VALGRIND_CFI_EPILOGUE \
1336 "movq %%r15, %%rbp\n\t" \
1337 ".cfi_restore_state\n\t"
1339 # define __FRAME_POINTER
1340 # define VALGRIND_CFI_PROLOGUE
1341 # define VALGRIND_CFI_EPILOGUE
1344 /* Macros to save and align the stack before making a function
1345 call and restore it afterwards as gcc may not keep the stack
1346 pointer aligned if it doesn't realise calls are being made
1347 to other functions. */
1349 #define VALGRIND_ALIGN_STACK \
1350 "movq %%rsp,%%r14\n\t" \
1351 "andq $0xfffffffffffffff0,%%rsp\n\t"
1352 #define VALGRIND_RESTORE_STACK \
1353 "movq %%r14,%%rsp\n\t"
1355 /* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1358 /* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1359 macros. In order not to trash the stack redzone, we need to drop
1360 %rsp by 128 before the hidden call, and restore afterwards. The
1361 nastyness is that it is only by luck that the stack still appears
1362 to be unwindable during the hidden call - since then the behaviour
1363 of any routine using this macro does not match what the CFI data
1366 Why is this important? Imagine that a wrapper has a stack
1367 allocated local, and passes to the hidden call, a pointer to it.
1368 Because gcc does not know about the hidden call, it may allocate
1369 that local in the redzone. Unfortunately the hidden call may then
1370 trash it before it comes to use it. So we must step clear of the
1371 redzone, for the duration of the hidden call, to make it safe.
1373 Probably the same problem afflicts the other redzone-style ABIs too
1374 (ppc64-linux); but for those, the stack is
1375 self describing (none of this CFI nonsense) so at least messing
1376 with the stack pointer doesn't give a danger of non-unwindable
1379 #define CALL_FN_W_v(lval, orig) \
1381 volatile OrigFn _orig = (orig); \
1382 volatile unsigned long _argvec[1]; \
1383 volatile unsigned long _res; \
1384 _argvec[0] = (unsigned long)_orig.nraddr; \
1386 VALGRIND_CFI_PROLOGUE \
1387 VALGRIND_ALIGN_STACK \
1388 "subq $128,%%rsp\n\t" \
1389 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1390 VALGRIND_CALL_NOREDIR_RAX \
1391 VALGRIND_RESTORE_STACK \
1392 VALGRIND_CFI_EPILOGUE \
1393 : /*out*/ "=a" (_res) \
1394 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1395 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1397 lval = (__typeof__(lval)) _res; \
1400 #define CALL_FN_W_W(lval, orig, arg1) \
1402 volatile OrigFn _orig = (orig); \
1403 volatile unsigned long _argvec[2]; \
1404 volatile unsigned long _res; \
1405 _argvec[0] = (unsigned long)_orig.nraddr; \
1406 _argvec[1] = (unsigned long)(arg1); \
1408 VALGRIND_CFI_PROLOGUE \
1409 VALGRIND_ALIGN_STACK \
1410 "subq $128,%%rsp\n\t" \
1411 "movq 8(%%rax), %%rdi\n\t" \
1412 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1413 VALGRIND_CALL_NOREDIR_RAX \
1414 VALGRIND_RESTORE_STACK \
1415 VALGRIND_CFI_EPILOGUE \
1416 : /*out*/ "=a" (_res) \
1417 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1418 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1420 lval = (__typeof__(lval)) _res; \
1423 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1425 volatile OrigFn _orig = (orig); \
1426 volatile unsigned long _argvec[3]; \
1427 volatile unsigned long _res; \
1428 _argvec[0] = (unsigned long)_orig.nraddr; \
1429 _argvec[1] = (unsigned long)(arg1); \
1430 _argvec[2] = (unsigned long)(arg2); \
1432 VALGRIND_CFI_PROLOGUE \
1433 VALGRIND_ALIGN_STACK \
1434 "subq $128,%%rsp\n\t" \
1435 "movq 16(%%rax), %%rsi\n\t" \
1436 "movq 8(%%rax), %%rdi\n\t" \
1437 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1438 VALGRIND_CALL_NOREDIR_RAX \
1439 VALGRIND_RESTORE_STACK \
1440 VALGRIND_CFI_EPILOGUE \
1441 : /*out*/ "=a" (_res) \
1442 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1443 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1445 lval = (__typeof__(lval)) _res; \
1448 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1450 volatile OrigFn _orig = (orig); \
1451 volatile unsigned long _argvec[4]; \
1452 volatile unsigned long _res; \
1453 _argvec[0] = (unsigned long)_orig.nraddr; \
1454 _argvec[1] = (unsigned long)(arg1); \
1455 _argvec[2] = (unsigned long)(arg2); \
1456 _argvec[3] = (unsigned long)(arg3); \
1458 VALGRIND_CFI_PROLOGUE \
1459 VALGRIND_ALIGN_STACK \
1460 "subq $128,%%rsp\n\t" \
1461 "movq 24(%%rax), %%rdx\n\t" \
1462 "movq 16(%%rax), %%rsi\n\t" \
1463 "movq 8(%%rax), %%rdi\n\t" \
1464 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1465 VALGRIND_CALL_NOREDIR_RAX \
1466 VALGRIND_RESTORE_STACK \
1467 VALGRIND_CFI_EPILOGUE \
1468 : /*out*/ "=a" (_res) \
1469 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1470 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1472 lval = (__typeof__(lval)) _res; \
1475 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1477 volatile OrigFn _orig = (orig); \
1478 volatile unsigned long _argvec[5]; \
1479 volatile unsigned long _res; \
1480 _argvec[0] = (unsigned long)_orig.nraddr; \
1481 _argvec[1] = (unsigned long)(arg1); \
1482 _argvec[2] = (unsigned long)(arg2); \
1483 _argvec[3] = (unsigned long)(arg3); \
1484 _argvec[4] = (unsigned long)(arg4); \
1486 VALGRIND_CFI_PROLOGUE \
1487 VALGRIND_ALIGN_STACK \
1488 "subq $128,%%rsp\n\t" \
1489 "movq 32(%%rax), %%rcx\n\t" \
1490 "movq 24(%%rax), %%rdx\n\t" \
1491 "movq 16(%%rax), %%rsi\n\t" \
1492 "movq 8(%%rax), %%rdi\n\t" \
1493 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1494 VALGRIND_CALL_NOREDIR_RAX \
1495 VALGRIND_RESTORE_STACK \
1496 VALGRIND_CFI_EPILOGUE \
1497 : /*out*/ "=a" (_res) \
1498 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1499 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1501 lval = (__typeof__(lval)) _res; \
1504 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1506 volatile OrigFn _orig = (orig); \
1507 volatile unsigned long _argvec[6]; \
1508 volatile unsigned long _res; \
1509 _argvec[0] = (unsigned long)_orig.nraddr; \
1510 _argvec[1] = (unsigned long)(arg1); \
1511 _argvec[2] = (unsigned long)(arg2); \
1512 _argvec[3] = (unsigned long)(arg3); \
1513 _argvec[4] = (unsigned long)(arg4); \
1514 _argvec[5] = (unsigned long)(arg5); \
1516 VALGRIND_CFI_PROLOGUE \
1517 VALGRIND_ALIGN_STACK \
1518 "subq $128,%%rsp\n\t" \
1519 "movq 40(%%rax), %%r8\n\t" \
1520 "movq 32(%%rax), %%rcx\n\t" \
1521 "movq 24(%%rax), %%rdx\n\t" \
1522 "movq 16(%%rax), %%rsi\n\t" \
1523 "movq 8(%%rax), %%rdi\n\t" \
1524 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1525 VALGRIND_CALL_NOREDIR_RAX \
1526 VALGRIND_RESTORE_STACK \
1527 VALGRIND_CFI_EPILOGUE \
1528 : /*out*/ "=a" (_res) \
1529 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1530 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1532 lval = (__typeof__(lval)) _res; \
1535 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1537 volatile OrigFn _orig = (orig); \
1538 volatile unsigned long _argvec[7]; \
1539 volatile unsigned long _res; \
1540 _argvec[0] = (unsigned long)_orig.nraddr; \
1541 _argvec[1] = (unsigned long)(arg1); \
1542 _argvec[2] = (unsigned long)(arg2); \
1543 _argvec[3] = (unsigned long)(arg3); \
1544 _argvec[4] = (unsigned long)(arg4); \
1545 _argvec[5] = (unsigned long)(arg5); \
1546 _argvec[6] = (unsigned long)(arg6); \
1548 VALGRIND_CFI_PROLOGUE \
1549 VALGRIND_ALIGN_STACK \
1550 "subq $128,%%rsp\n\t" \
1551 "movq 48(%%rax), %%r9\n\t" \
1552 "movq 40(%%rax), %%r8\n\t" \
1553 "movq 32(%%rax), %%rcx\n\t" \
1554 "movq 24(%%rax), %%rdx\n\t" \
1555 "movq 16(%%rax), %%rsi\n\t" \
1556 "movq 8(%%rax), %%rdi\n\t" \
1557 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1558 VALGRIND_CALL_NOREDIR_RAX \
1559 VALGRIND_RESTORE_STACK \
1560 VALGRIND_CFI_EPILOGUE \
1561 : /*out*/ "=a" (_res) \
1562 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1563 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1565 lval = (__typeof__(lval)) _res; \
1568 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1571 volatile OrigFn _orig = (orig); \
1572 volatile unsigned long _argvec[8]; \
1573 volatile unsigned long _res; \
1574 _argvec[0] = (unsigned long)_orig.nraddr; \
1575 _argvec[1] = (unsigned long)(arg1); \
1576 _argvec[2] = (unsigned long)(arg2); \
1577 _argvec[3] = (unsigned long)(arg3); \
1578 _argvec[4] = (unsigned long)(arg4); \
1579 _argvec[5] = (unsigned long)(arg5); \
1580 _argvec[6] = (unsigned long)(arg6); \
1581 _argvec[7] = (unsigned long)(arg7); \
1583 VALGRIND_CFI_PROLOGUE \
1584 VALGRIND_ALIGN_STACK \
1585 "subq $136,%%rsp\n\t" \
1586 "pushq 56(%%rax)\n\t" \
1587 "movq 48(%%rax), %%r9\n\t" \
1588 "movq 40(%%rax), %%r8\n\t" \
1589 "movq 32(%%rax), %%rcx\n\t" \
1590 "movq 24(%%rax), %%rdx\n\t" \
1591 "movq 16(%%rax), %%rsi\n\t" \
1592 "movq 8(%%rax), %%rdi\n\t" \
1593 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1594 VALGRIND_CALL_NOREDIR_RAX \
1595 VALGRIND_RESTORE_STACK \
1596 VALGRIND_CFI_EPILOGUE \
1597 : /*out*/ "=a" (_res) \
1598 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1599 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1601 lval = (__typeof__(lval)) _res; \
1604 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1607 volatile OrigFn _orig = (orig); \
1608 volatile unsigned long _argvec[9]; \
1609 volatile unsigned long _res; \
1610 _argvec[0] = (unsigned long)_orig.nraddr; \
1611 _argvec[1] = (unsigned long)(arg1); \
1612 _argvec[2] = (unsigned long)(arg2); \
1613 _argvec[3] = (unsigned long)(arg3); \
1614 _argvec[4] = (unsigned long)(arg4); \
1615 _argvec[5] = (unsigned long)(arg5); \
1616 _argvec[6] = (unsigned long)(arg6); \
1617 _argvec[7] = (unsigned long)(arg7); \
1618 _argvec[8] = (unsigned long)(arg8); \
1620 VALGRIND_CFI_PROLOGUE \
1621 VALGRIND_ALIGN_STACK \
1622 "subq $128,%%rsp\n\t" \
1623 "pushq 64(%%rax)\n\t" \
1624 "pushq 56(%%rax)\n\t" \
1625 "movq 48(%%rax), %%r9\n\t" \
1626 "movq 40(%%rax), %%r8\n\t" \
1627 "movq 32(%%rax), %%rcx\n\t" \
1628 "movq 24(%%rax), %%rdx\n\t" \
1629 "movq 16(%%rax), %%rsi\n\t" \
1630 "movq 8(%%rax), %%rdi\n\t" \
1631 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1632 VALGRIND_CALL_NOREDIR_RAX \
1633 VALGRIND_RESTORE_STACK \
1634 VALGRIND_CFI_EPILOGUE \
1635 : /*out*/ "=a" (_res) \
1636 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1637 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1639 lval = (__typeof__(lval)) _res; \
1642 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1645 volatile OrigFn _orig = (orig); \
1646 volatile unsigned long _argvec[10]; \
1647 volatile unsigned long _res; \
1648 _argvec[0] = (unsigned long)_orig.nraddr; \
1649 _argvec[1] = (unsigned long)(arg1); \
1650 _argvec[2] = (unsigned long)(arg2); \
1651 _argvec[3] = (unsigned long)(arg3); \
1652 _argvec[4] = (unsigned long)(arg4); \
1653 _argvec[5] = (unsigned long)(arg5); \
1654 _argvec[6] = (unsigned long)(arg6); \
1655 _argvec[7] = (unsigned long)(arg7); \
1656 _argvec[8] = (unsigned long)(arg8); \
1657 _argvec[9] = (unsigned long)(arg9); \
1659 VALGRIND_CFI_PROLOGUE \
1660 VALGRIND_ALIGN_STACK \
1661 "subq $136,%%rsp\n\t" \
1662 "pushq 72(%%rax)\n\t" \
1663 "pushq 64(%%rax)\n\t" \
1664 "pushq 56(%%rax)\n\t" \
1665 "movq 48(%%rax), %%r9\n\t" \
1666 "movq 40(%%rax), %%r8\n\t" \
1667 "movq 32(%%rax), %%rcx\n\t" \
1668 "movq 24(%%rax), %%rdx\n\t" \
1669 "movq 16(%%rax), %%rsi\n\t" \
1670 "movq 8(%%rax), %%rdi\n\t" \
1671 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1672 VALGRIND_CALL_NOREDIR_RAX \
1673 VALGRIND_RESTORE_STACK \
1674 VALGRIND_CFI_EPILOGUE \
1675 : /*out*/ "=a" (_res) \
1676 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1677 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1679 lval = (__typeof__(lval)) _res; \
1682 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1683 arg7,arg8,arg9,arg10) \
1685 volatile OrigFn _orig = (orig); \
1686 volatile unsigned long _argvec[11]; \
1687 volatile unsigned long _res; \
1688 _argvec[0] = (unsigned long)_orig.nraddr; \
1689 _argvec[1] = (unsigned long)(arg1); \
1690 _argvec[2] = (unsigned long)(arg2); \
1691 _argvec[3] = (unsigned long)(arg3); \
1692 _argvec[4] = (unsigned long)(arg4); \
1693 _argvec[5] = (unsigned long)(arg5); \
1694 _argvec[6] = (unsigned long)(arg6); \
1695 _argvec[7] = (unsigned long)(arg7); \
1696 _argvec[8] = (unsigned long)(arg8); \
1697 _argvec[9] = (unsigned long)(arg9); \
1698 _argvec[10] = (unsigned long)(arg10); \
1700 VALGRIND_CFI_PROLOGUE \
1701 VALGRIND_ALIGN_STACK \
1702 "subq $128,%%rsp\n\t" \
1703 "pushq 80(%%rax)\n\t" \
1704 "pushq 72(%%rax)\n\t" \
1705 "pushq 64(%%rax)\n\t" \
1706 "pushq 56(%%rax)\n\t" \
1707 "movq 48(%%rax), %%r9\n\t" \
1708 "movq 40(%%rax), %%r8\n\t" \
1709 "movq 32(%%rax), %%rcx\n\t" \
1710 "movq 24(%%rax), %%rdx\n\t" \
1711 "movq 16(%%rax), %%rsi\n\t" \
1712 "movq 8(%%rax), %%rdi\n\t" \
1713 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1714 VALGRIND_CALL_NOREDIR_RAX \
1715 VALGRIND_RESTORE_STACK \
1716 VALGRIND_CFI_EPILOGUE \
1717 : /*out*/ "=a" (_res) \
1718 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1719 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1721 lval = (__typeof__(lval)) _res; \
1724 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1725 arg7,arg8,arg9,arg10,arg11) \
1727 volatile OrigFn _orig = (orig); \
1728 volatile unsigned long _argvec[12]; \
1729 volatile unsigned long _res; \
1730 _argvec[0] = (unsigned long)_orig.nraddr; \
1731 _argvec[1] = (unsigned long)(arg1); \
1732 _argvec[2] = (unsigned long)(arg2); \
1733 _argvec[3] = (unsigned long)(arg3); \
1734 _argvec[4] = (unsigned long)(arg4); \
1735 _argvec[5] = (unsigned long)(arg5); \
1736 _argvec[6] = (unsigned long)(arg6); \
1737 _argvec[7] = (unsigned long)(arg7); \
1738 _argvec[8] = (unsigned long)(arg8); \
1739 _argvec[9] = (unsigned long)(arg9); \
1740 _argvec[10] = (unsigned long)(arg10); \
1741 _argvec[11] = (unsigned long)(arg11); \
1743 VALGRIND_CFI_PROLOGUE \
1744 VALGRIND_ALIGN_STACK \
1745 "subq $136,%%rsp\n\t" \
1746 "pushq 88(%%rax)\n\t" \
1747 "pushq 80(%%rax)\n\t" \
1748 "pushq 72(%%rax)\n\t" \
1749 "pushq 64(%%rax)\n\t" \
1750 "pushq 56(%%rax)\n\t" \
1751 "movq 48(%%rax), %%r9\n\t" \
1752 "movq 40(%%rax), %%r8\n\t" \
1753 "movq 32(%%rax), %%rcx\n\t" \
1754 "movq 24(%%rax), %%rdx\n\t" \
1755 "movq 16(%%rax), %%rsi\n\t" \
1756 "movq 8(%%rax), %%rdi\n\t" \
1757 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1758 VALGRIND_CALL_NOREDIR_RAX \
1759 VALGRIND_RESTORE_STACK \
1760 VALGRIND_CFI_EPILOGUE \
1761 : /*out*/ "=a" (_res) \
1762 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1763 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1765 lval = (__typeof__(lval)) _res; \
1768 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1769 arg7,arg8,arg9,arg10,arg11,arg12) \
1771 volatile OrigFn _orig = (orig); \
1772 volatile unsigned long _argvec[13]; \
1773 volatile unsigned long _res; \
1774 _argvec[0] = (unsigned long)_orig.nraddr; \
1775 _argvec[1] = (unsigned long)(arg1); \
1776 _argvec[2] = (unsigned long)(arg2); \
1777 _argvec[3] = (unsigned long)(arg3); \
1778 _argvec[4] = (unsigned long)(arg4); \
1779 _argvec[5] = (unsigned long)(arg5); \
1780 _argvec[6] = (unsigned long)(arg6); \
1781 _argvec[7] = (unsigned long)(arg7); \
1782 _argvec[8] = (unsigned long)(arg8); \
1783 _argvec[9] = (unsigned long)(arg9); \
1784 _argvec[10] = (unsigned long)(arg10); \
1785 _argvec[11] = (unsigned long)(arg11); \
1786 _argvec[12] = (unsigned long)(arg12); \
1788 VALGRIND_CFI_PROLOGUE \
1789 VALGRIND_ALIGN_STACK \
1790 "subq $128,%%rsp\n\t" \
1791 "pushq 96(%%rax)\n\t" \
1792 "pushq 88(%%rax)\n\t" \
1793 "pushq 80(%%rax)\n\t" \
1794 "pushq 72(%%rax)\n\t" \
1795 "pushq 64(%%rax)\n\t" \
1796 "pushq 56(%%rax)\n\t" \
1797 "movq 48(%%rax), %%r9\n\t" \
1798 "movq 40(%%rax), %%r8\n\t" \
1799 "movq 32(%%rax), %%rcx\n\t" \
1800 "movq 24(%%rax), %%rdx\n\t" \
1801 "movq 16(%%rax), %%rsi\n\t" \
1802 "movq 8(%%rax), %%rdi\n\t" \
1803 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1804 VALGRIND_CALL_NOREDIR_RAX \
1805 VALGRIND_RESTORE_STACK \
1806 VALGRIND_CFI_EPILOGUE \
1807 : /*out*/ "=a" (_res) \
1808 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1809 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1811 lval = (__typeof__(lval)) _res; \
1814 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
1816 /* ------------------------ ppc32-linux ------------------------ */
1818 #if defined(PLAT_ppc32_linux)
1820 /* This is useful for finding out about the on-stack stuff:
1822 extern int f9 ( int,int,int,int,int,int,int,int,int );
1823 extern int f10 ( int,int,int,int,int,int,int,int,int,int );
1824 extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
1825 extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
1828 return f9(11,22,33,44,55,66,77,88,99);
1831 return f10(11,22,33,44,55,66,77,88,99,110);
1834 return f11(11,22,33,44,55,66,77,88,99,110,121);
1837 return f12(11,22,33,44,55,66,77,88,99,110,121,132);
1841 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
1843 /* These regs are trashed by the hidden call. */
1844 #define __CALLER_SAVED_REGS \
1845 "lr", "ctr", "xer", \
1846 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
1847 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
1850 /* Macros to save and align the stack before making a function
1851 call and restore it afterwards as gcc may not keep the stack
1852 pointer aligned if it doesn't realise calls are being made
1853 to other functions. */
1855 #define VALGRIND_ALIGN_STACK \
1857 "rlwinm 1,1,0,0,27\n\t"
1858 #define VALGRIND_RESTORE_STACK \
1861 /* These CALL_FN_ macros assume that on ppc32-linux,
1862 sizeof(unsigned long) == 4. */
1864 #define CALL_FN_W_v(lval, orig) \
1866 volatile OrigFn _orig = (orig); \
1867 volatile unsigned long _argvec[1]; \
1868 volatile unsigned long _res; \
1869 _argvec[0] = (unsigned long)_orig.nraddr; \
1871 VALGRIND_ALIGN_STACK \
1873 "lwz 11,0(11)\n\t" /* target->r11 */ \
1874 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
1875 VALGRIND_RESTORE_STACK \
1877 : /*out*/ "=r" (_res) \
1878 : /*in*/ "r" (&_argvec[0]) \
1879 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
1881 lval = (__typeof__(lval)) _res; \
1884 #define CALL_FN_W_W(lval, orig, arg1) \
1886 volatile OrigFn _orig = (orig); \
1887 volatile unsigned long _argvec[2]; \
1888 volatile unsigned long _res; \
1889 _argvec[0] = (unsigned long)_orig.nraddr; \
1890 _argvec[1] = (unsigned long)arg1; \
1892 VALGRIND_ALIGN_STACK \
1894 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1895 "lwz 11,0(11)\n\t" /* target->r11 */ \
1896 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
1897 VALGRIND_RESTORE_STACK \
1899 : /*out*/ "=r" (_res) \
1900 : /*in*/ "r" (&_argvec[0]) \
1901 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
1903 lval = (__typeof__(lval)) _res; \
1906 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1908 volatile OrigFn _orig = (orig); \
1909 volatile unsigned long _argvec[3]; \
1910 volatile unsigned long _res; \
1911 _argvec[0] = (unsigned long)_orig.nraddr; \
1912 _argvec[1] = (unsigned long)arg1; \
1913 _argvec[2] = (unsigned long)arg2; \
1915 VALGRIND_ALIGN_STACK \
1917 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1919 "lwz 11,0(11)\n\t" /* target->r11 */ \
1920 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
1921 VALGRIND_RESTORE_STACK \
1923 : /*out*/ "=r" (_res) \
1924 : /*in*/ "r" (&_argvec[0]) \
1925 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
1927 lval = (__typeof__(lval)) _res; \
1930 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1932 volatile OrigFn _orig = (orig); \
1933 volatile unsigned long _argvec[4]; \
1934 volatile unsigned long _res; \
1935 _argvec[0] = (unsigned long)_orig.nraddr; \
1936 _argvec[1] = (unsigned long)arg1; \
1937 _argvec[2] = (unsigned long)arg2; \
1938 _argvec[3] = (unsigned long)arg3; \
1940 VALGRIND_ALIGN_STACK \
1942 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1944 "lwz 5,12(11)\n\t" \
1945 "lwz 11,0(11)\n\t" /* target->r11 */ \
1946 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
1947 VALGRIND_RESTORE_STACK \
1949 : /*out*/ "=r" (_res) \
1950 : /*in*/ "r" (&_argvec[0]) \
1951 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
1953 lval = (__typeof__(lval)) _res; \
1956 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1958 volatile OrigFn _orig = (orig); \
1959 volatile unsigned long _argvec[5]; \
1960 volatile unsigned long _res; \
1961 _argvec[0] = (unsigned long)_orig.nraddr; \
1962 _argvec[1] = (unsigned long)arg1; \
1963 _argvec[2] = (unsigned long)arg2; \
1964 _argvec[3] = (unsigned long)arg3; \
1965 _argvec[4] = (unsigned long)arg4; \
1967 VALGRIND_ALIGN_STACK \
1969 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1971 "lwz 5,12(11)\n\t" \
1972 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
1973 "lwz 11,0(11)\n\t" /* target->r11 */ \
1974 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
1975 VALGRIND_RESTORE_STACK \
1977 : /*out*/ "=r" (_res) \
1978 : /*in*/ "r" (&_argvec[0]) \
1979 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
1981 lval = (__typeof__(lval)) _res; \
1984 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1986 volatile OrigFn _orig = (orig); \
1987 volatile unsigned long _argvec[6]; \
1988 volatile unsigned long _res; \
1989 _argvec[0] = (unsigned long)_orig.nraddr; \
1990 _argvec[1] = (unsigned long)arg1; \
1991 _argvec[2] = (unsigned long)arg2; \
1992 _argvec[3] = (unsigned long)arg3; \
1993 _argvec[4] = (unsigned long)arg4; \
1994 _argvec[5] = (unsigned long)arg5; \
1996 VALGRIND_ALIGN_STACK \
1998 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2000 "lwz 5,12(11)\n\t" \
2001 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2002 "lwz 7,20(11)\n\t" \
2003 "lwz 11,0(11)\n\t" /* target->r11 */ \
2004 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2005 VALGRIND_RESTORE_STACK \
2007 : /*out*/ "=r" (_res) \
2008 : /*in*/ "r" (&_argvec[0]) \
2009 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2011 lval = (__typeof__(lval)) _res; \
2014 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2016 volatile OrigFn _orig = (orig); \
2017 volatile unsigned long _argvec[7]; \
2018 volatile unsigned long _res; \
2019 _argvec[0] = (unsigned long)_orig.nraddr; \
2020 _argvec[1] = (unsigned long)arg1; \
2021 _argvec[2] = (unsigned long)arg2; \
2022 _argvec[3] = (unsigned long)arg3; \
2023 _argvec[4] = (unsigned long)arg4; \
2024 _argvec[5] = (unsigned long)arg5; \
2025 _argvec[6] = (unsigned long)arg6; \
2027 VALGRIND_ALIGN_STACK \
2029 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2031 "lwz 5,12(11)\n\t" \
2032 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2033 "lwz 7,20(11)\n\t" \
2034 "lwz 8,24(11)\n\t" \
2035 "lwz 11,0(11)\n\t" /* target->r11 */ \
2036 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2037 VALGRIND_RESTORE_STACK \
2039 : /*out*/ "=r" (_res) \
2040 : /*in*/ "r" (&_argvec[0]) \
2041 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2043 lval = (__typeof__(lval)) _res; \
2046 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2049 volatile OrigFn _orig = (orig); \
2050 volatile unsigned long _argvec[8]; \
2051 volatile unsigned long _res; \
2052 _argvec[0] = (unsigned long)_orig.nraddr; \
2053 _argvec[1] = (unsigned long)arg1; \
2054 _argvec[2] = (unsigned long)arg2; \
2055 _argvec[3] = (unsigned long)arg3; \
2056 _argvec[4] = (unsigned long)arg4; \
2057 _argvec[5] = (unsigned long)arg5; \
2058 _argvec[6] = (unsigned long)arg6; \
2059 _argvec[7] = (unsigned long)arg7; \
2061 VALGRIND_ALIGN_STACK \
2063 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2065 "lwz 5,12(11)\n\t" \
2066 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2067 "lwz 7,20(11)\n\t" \
2068 "lwz 8,24(11)\n\t" \
2069 "lwz 9,28(11)\n\t" \
2070 "lwz 11,0(11)\n\t" /* target->r11 */ \
2071 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2072 VALGRIND_RESTORE_STACK \
2074 : /*out*/ "=r" (_res) \
2075 : /*in*/ "r" (&_argvec[0]) \
2076 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2078 lval = (__typeof__(lval)) _res; \
2081 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2084 volatile OrigFn _orig = (orig); \
2085 volatile unsigned long _argvec[9]; \
2086 volatile unsigned long _res; \
2087 _argvec[0] = (unsigned long)_orig.nraddr; \
2088 _argvec[1] = (unsigned long)arg1; \
2089 _argvec[2] = (unsigned long)arg2; \
2090 _argvec[3] = (unsigned long)arg3; \
2091 _argvec[4] = (unsigned long)arg4; \
2092 _argvec[5] = (unsigned long)arg5; \
2093 _argvec[6] = (unsigned long)arg6; \
2094 _argvec[7] = (unsigned long)arg7; \
2095 _argvec[8] = (unsigned long)arg8; \
2097 VALGRIND_ALIGN_STACK \
2099 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2101 "lwz 5,12(11)\n\t" \
2102 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2103 "lwz 7,20(11)\n\t" \
2104 "lwz 8,24(11)\n\t" \
2105 "lwz 9,28(11)\n\t" \
2106 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2107 "lwz 11,0(11)\n\t" /* target->r11 */ \
2108 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2109 VALGRIND_RESTORE_STACK \
2111 : /*out*/ "=r" (_res) \
2112 : /*in*/ "r" (&_argvec[0]) \
2113 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2115 lval = (__typeof__(lval)) _res; \
2118 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2121 volatile OrigFn _orig = (orig); \
2122 volatile unsigned long _argvec[10]; \
2123 volatile unsigned long _res; \
2124 _argvec[0] = (unsigned long)_orig.nraddr; \
2125 _argvec[1] = (unsigned long)arg1; \
2126 _argvec[2] = (unsigned long)arg2; \
2127 _argvec[3] = (unsigned long)arg3; \
2128 _argvec[4] = (unsigned long)arg4; \
2129 _argvec[5] = (unsigned long)arg5; \
2130 _argvec[6] = (unsigned long)arg6; \
2131 _argvec[7] = (unsigned long)arg7; \
2132 _argvec[8] = (unsigned long)arg8; \
2133 _argvec[9] = (unsigned long)arg9; \
2135 VALGRIND_ALIGN_STACK \
2137 "addi 1,1,-16\n\t" \
2139 "lwz 3,36(11)\n\t" \
2142 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2144 "lwz 5,12(11)\n\t" \
2145 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2146 "lwz 7,20(11)\n\t" \
2147 "lwz 8,24(11)\n\t" \
2148 "lwz 9,28(11)\n\t" \
2149 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2150 "lwz 11,0(11)\n\t" /* target->r11 */ \
2151 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2152 VALGRIND_RESTORE_STACK \
2154 : /*out*/ "=r" (_res) \
2155 : /*in*/ "r" (&_argvec[0]) \
2156 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2158 lval = (__typeof__(lval)) _res; \
2161 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2162 arg7,arg8,arg9,arg10) \
2164 volatile OrigFn _orig = (orig); \
2165 volatile unsigned long _argvec[11]; \
2166 volatile unsigned long _res; \
2167 _argvec[0] = (unsigned long)_orig.nraddr; \
2168 _argvec[1] = (unsigned long)arg1; \
2169 _argvec[2] = (unsigned long)arg2; \
2170 _argvec[3] = (unsigned long)arg3; \
2171 _argvec[4] = (unsigned long)arg4; \
2172 _argvec[5] = (unsigned long)arg5; \
2173 _argvec[6] = (unsigned long)arg6; \
2174 _argvec[7] = (unsigned long)arg7; \
2175 _argvec[8] = (unsigned long)arg8; \
2176 _argvec[9] = (unsigned long)arg9; \
2177 _argvec[10] = (unsigned long)arg10; \
2179 VALGRIND_ALIGN_STACK \
2181 "addi 1,1,-16\n\t" \
2183 "lwz 3,40(11)\n\t" \
2186 "lwz 3,36(11)\n\t" \
2189 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2191 "lwz 5,12(11)\n\t" \
2192 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2193 "lwz 7,20(11)\n\t" \
2194 "lwz 8,24(11)\n\t" \
2195 "lwz 9,28(11)\n\t" \
2196 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2197 "lwz 11,0(11)\n\t" /* target->r11 */ \
2198 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2199 VALGRIND_RESTORE_STACK \
2201 : /*out*/ "=r" (_res) \
2202 : /*in*/ "r" (&_argvec[0]) \
2203 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2205 lval = (__typeof__(lval)) _res; \
2208 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2209 arg7,arg8,arg9,arg10,arg11) \
2211 volatile OrigFn _orig = (orig); \
2212 volatile unsigned long _argvec[12]; \
2213 volatile unsigned long _res; \
2214 _argvec[0] = (unsigned long)_orig.nraddr; \
2215 _argvec[1] = (unsigned long)arg1; \
2216 _argvec[2] = (unsigned long)arg2; \
2217 _argvec[3] = (unsigned long)arg3; \
2218 _argvec[4] = (unsigned long)arg4; \
2219 _argvec[5] = (unsigned long)arg5; \
2220 _argvec[6] = (unsigned long)arg6; \
2221 _argvec[7] = (unsigned long)arg7; \
2222 _argvec[8] = (unsigned long)arg8; \
2223 _argvec[9] = (unsigned long)arg9; \
2224 _argvec[10] = (unsigned long)arg10; \
2225 _argvec[11] = (unsigned long)arg11; \
2227 VALGRIND_ALIGN_STACK \
2229 "addi 1,1,-32\n\t" \
2231 "lwz 3,44(11)\n\t" \
2234 "lwz 3,40(11)\n\t" \
2237 "lwz 3,36(11)\n\t" \
2240 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2242 "lwz 5,12(11)\n\t" \
2243 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2244 "lwz 7,20(11)\n\t" \
2245 "lwz 8,24(11)\n\t" \
2246 "lwz 9,28(11)\n\t" \
2247 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2248 "lwz 11,0(11)\n\t" /* target->r11 */ \
2249 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2250 VALGRIND_RESTORE_STACK \
2252 : /*out*/ "=r" (_res) \
2253 : /*in*/ "r" (&_argvec[0]) \
2254 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2256 lval = (__typeof__(lval)) _res; \
2259 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2260 arg7,arg8,arg9,arg10,arg11,arg12) \
2262 volatile OrigFn _orig = (orig); \
2263 volatile unsigned long _argvec[13]; \
2264 volatile unsigned long _res; \
2265 _argvec[0] = (unsigned long)_orig.nraddr; \
2266 _argvec[1] = (unsigned long)arg1; \
2267 _argvec[2] = (unsigned long)arg2; \
2268 _argvec[3] = (unsigned long)arg3; \
2269 _argvec[4] = (unsigned long)arg4; \
2270 _argvec[5] = (unsigned long)arg5; \
2271 _argvec[6] = (unsigned long)arg6; \
2272 _argvec[7] = (unsigned long)arg7; \
2273 _argvec[8] = (unsigned long)arg8; \
2274 _argvec[9] = (unsigned long)arg9; \
2275 _argvec[10] = (unsigned long)arg10; \
2276 _argvec[11] = (unsigned long)arg11; \
2277 _argvec[12] = (unsigned long)arg12; \
2279 VALGRIND_ALIGN_STACK \
2281 "addi 1,1,-32\n\t" \
2283 "lwz 3,48(11)\n\t" \
2286 "lwz 3,44(11)\n\t" \
2289 "lwz 3,40(11)\n\t" \
2292 "lwz 3,36(11)\n\t" \
2295 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2297 "lwz 5,12(11)\n\t" \
2298 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2299 "lwz 7,20(11)\n\t" \
2300 "lwz 8,24(11)\n\t" \
2301 "lwz 9,28(11)\n\t" \
2302 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2303 "lwz 11,0(11)\n\t" /* target->r11 */ \
2304 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2305 VALGRIND_RESTORE_STACK \
2307 : /*out*/ "=r" (_res) \
2308 : /*in*/ "r" (&_argvec[0]) \
2309 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2311 lval = (__typeof__(lval)) _res; \
2314 #endif /* PLAT_ppc32_linux */
2316 /* ------------------------ ppc64-linux ------------------------ */
2318 #if defined(PLAT_ppc64_linux)
2320 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2322 /* These regs are trashed by the hidden call. */
2323 #define __CALLER_SAVED_REGS \
2324 "lr", "ctr", "xer", \
2325 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2326 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2329 /* Macros to save and align the stack before making a function
2330 call and restore it afterwards as gcc may not keep the stack
2331 pointer aligned if it doesn't realise calls are being made
2332 to other functions. */
2334 #define VALGRIND_ALIGN_STACK \
2336 "rldicr 1,1,0,59\n\t"
2337 #define VALGRIND_RESTORE_STACK \
2340 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2343 #define CALL_FN_W_v(lval, orig) \
2345 volatile OrigFn _orig = (orig); \
2346 volatile unsigned long _argvec[3+0]; \
2347 volatile unsigned long _res; \
2348 /* _argvec[0] holds current r2 across the call */ \
2349 _argvec[1] = (unsigned long)_orig.r2; \
2350 _argvec[2] = (unsigned long)_orig.nraddr; \
2352 VALGRIND_ALIGN_STACK \
2354 "std 2,-16(11)\n\t" /* save tocptr */ \
2355 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2356 "ld 11, 0(11)\n\t" /* target->r11 */ \
2357 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2360 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2361 VALGRIND_RESTORE_STACK \
2362 : /*out*/ "=r" (_res) \
2363 : /*in*/ "r" (&_argvec[2]) \
2364 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2366 lval = (__typeof__(lval)) _res; \
2369 #define CALL_FN_W_W(lval, orig, arg1) \
2371 volatile OrigFn _orig = (orig); \
2372 volatile unsigned long _argvec[3+1]; \
2373 volatile unsigned long _res; \
2374 /* _argvec[0] holds current r2 across the call */ \
2375 _argvec[1] = (unsigned long)_orig.r2; \
2376 _argvec[2] = (unsigned long)_orig.nraddr; \
2377 _argvec[2+1] = (unsigned long)arg1; \
2379 VALGRIND_ALIGN_STACK \
2381 "std 2,-16(11)\n\t" /* save tocptr */ \
2382 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2383 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2384 "ld 11, 0(11)\n\t" /* target->r11 */ \
2385 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2388 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2389 VALGRIND_RESTORE_STACK \
2390 : /*out*/ "=r" (_res) \
2391 : /*in*/ "r" (&_argvec[2]) \
2392 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2394 lval = (__typeof__(lval)) _res; \
2397 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2399 volatile OrigFn _orig = (orig); \
2400 volatile unsigned long _argvec[3+2]; \
2401 volatile unsigned long _res; \
2402 /* _argvec[0] holds current r2 across the call */ \
2403 _argvec[1] = (unsigned long)_orig.r2; \
2404 _argvec[2] = (unsigned long)_orig.nraddr; \
2405 _argvec[2+1] = (unsigned long)arg1; \
2406 _argvec[2+2] = (unsigned long)arg2; \
2408 VALGRIND_ALIGN_STACK \
2410 "std 2,-16(11)\n\t" /* save tocptr */ \
2411 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2412 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2413 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2414 "ld 11, 0(11)\n\t" /* target->r11 */ \
2415 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2418 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2419 VALGRIND_RESTORE_STACK \
2420 : /*out*/ "=r" (_res) \
2421 : /*in*/ "r" (&_argvec[2]) \
2422 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2424 lval = (__typeof__(lval)) _res; \
2427 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2429 volatile OrigFn _orig = (orig); \
2430 volatile unsigned long _argvec[3+3]; \
2431 volatile unsigned long _res; \
2432 /* _argvec[0] holds current r2 across the call */ \
2433 _argvec[1] = (unsigned long)_orig.r2; \
2434 _argvec[2] = (unsigned long)_orig.nraddr; \
2435 _argvec[2+1] = (unsigned long)arg1; \
2436 _argvec[2+2] = (unsigned long)arg2; \
2437 _argvec[2+3] = (unsigned long)arg3; \
2439 VALGRIND_ALIGN_STACK \
2441 "std 2,-16(11)\n\t" /* save tocptr */ \
2442 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2443 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2444 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2445 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2446 "ld 11, 0(11)\n\t" /* target->r11 */ \
2447 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2450 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2451 VALGRIND_RESTORE_STACK \
2452 : /*out*/ "=r" (_res) \
2453 : /*in*/ "r" (&_argvec[2]) \
2454 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2456 lval = (__typeof__(lval)) _res; \
2459 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2461 volatile OrigFn _orig = (orig); \
2462 volatile unsigned long _argvec[3+4]; \
2463 volatile unsigned long _res; \
2464 /* _argvec[0] holds current r2 across the call */ \
2465 _argvec[1] = (unsigned long)_orig.r2; \
2466 _argvec[2] = (unsigned long)_orig.nraddr; \
2467 _argvec[2+1] = (unsigned long)arg1; \
2468 _argvec[2+2] = (unsigned long)arg2; \
2469 _argvec[2+3] = (unsigned long)arg3; \
2470 _argvec[2+4] = (unsigned long)arg4; \
2472 VALGRIND_ALIGN_STACK \
2474 "std 2,-16(11)\n\t" /* save tocptr */ \
2475 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2476 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2477 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2478 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2479 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2480 "ld 11, 0(11)\n\t" /* target->r11 */ \
2481 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2484 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2485 VALGRIND_RESTORE_STACK \
2486 : /*out*/ "=r" (_res) \
2487 : /*in*/ "r" (&_argvec[2]) \
2488 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2490 lval = (__typeof__(lval)) _res; \
2493 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2495 volatile OrigFn _orig = (orig); \
2496 volatile unsigned long _argvec[3+5]; \
2497 volatile unsigned long _res; \
2498 /* _argvec[0] holds current r2 across the call */ \
2499 _argvec[1] = (unsigned long)_orig.r2; \
2500 _argvec[2] = (unsigned long)_orig.nraddr; \
2501 _argvec[2+1] = (unsigned long)arg1; \
2502 _argvec[2+2] = (unsigned long)arg2; \
2503 _argvec[2+3] = (unsigned long)arg3; \
2504 _argvec[2+4] = (unsigned long)arg4; \
2505 _argvec[2+5] = (unsigned long)arg5; \
2507 VALGRIND_ALIGN_STACK \
2509 "std 2,-16(11)\n\t" /* save tocptr */ \
2510 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2511 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2512 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2513 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2514 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2515 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2516 "ld 11, 0(11)\n\t" /* target->r11 */ \
2517 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2520 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2521 VALGRIND_RESTORE_STACK \
2522 : /*out*/ "=r" (_res) \
2523 : /*in*/ "r" (&_argvec[2]) \
2524 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2526 lval = (__typeof__(lval)) _res; \
2529 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2531 volatile OrigFn _orig = (orig); \
2532 volatile unsigned long _argvec[3+6]; \
2533 volatile unsigned long _res; \
2534 /* _argvec[0] holds current r2 across the call */ \
2535 _argvec[1] = (unsigned long)_orig.r2; \
2536 _argvec[2] = (unsigned long)_orig.nraddr; \
2537 _argvec[2+1] = (unsigned long)arg1; \
2538 _argvec[2+2] = (unsigned long)arg2; \
2539 _argvec[2+3] = (unsigned long)arg3; \
2540 _argvec[2+4] = (unsigned long)arg4; \
2541 _argvec[2+5] = (unsigned long)arg5; \
2542 _argvec[2+6] = (unsigned long)arg6; \
2544 VALGRIND_ALIGN_STACK \
2546 "std 2,-16(11)\n\t" /* save tocptr */ \
2547 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2548 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2549 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2550 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2551 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2552 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2553 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2554 "ld 11, 0(11)\n\t" /* target->r11 */ \
2555 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2558 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2559 VALGRIND_RESTORE_STACK \
2560 : /*out*/ "=r" (_res) \
2561 : /*in*/ "r" (&_argvec[2]) \
2562 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2564 lval = (__typeof__(lval)) _res; \
2567 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2570 volatile OrigFn _orig = (orig); \
2571 volatile unsigned long _argvec[3+7]; \
2572 volatile unsigned long _res; \
2573 /* _argvec[0] holds current r2 across the call */ \
2574 _argvec[1] = (unsigned long)_orig.r2; \
2575 _argvec[2] = (unsigned long)_orig.nraddr; \
2576 _argvec[2+1] = (unsigned long)arg1; \
2577 _argvec[2+2] = (unsigned long)arg2; \
2578 _argvec[2+3] = (unsigned long)arg3; \
2579 _argvec[2+4] = (unsigned long)arg4; \
2580 _argvec[2+5] = (unsigned long)arg5; \
2581 _argvec[2+6] = (unsigned long)arg6; \
2582 _argvec[2+7] = (unsigned long)arg7; \
2584 VALGRIND_ALIGN_STACK \
2586 "std 2,-16(11)\n\t" /* save tocptr */ \
2587 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2588 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2589 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2590 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2591 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2592 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2593 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2594 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2595 "ld 11, 0(11)\n\t" /* target->r11 */ \
2596 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2599 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2600 VALGRIND_RESTORE_STACK \
2601 : /*out*/ "=r" (_res) \
2602 : /*in*/ "r" (&_argvec[2]) \
2603 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2605 lval = (__typeof__(lval)) _res; \
2608 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2611 volatile OrigFn _orig = (orig); \
2612 volatile unsigned long _argvec[3+8]; \
2613 volatile unsigned long _res; \
2614 /* _argvec[0] holds current r2 across the call */ \
2615 _argvec[1] = (unsigned long)_orig.r2; \
2616 _argvec[2] = (unsigned long)_orig.nraddr; \
2617 _argvec[2+1] = (unsigned long)arg1; \
2618 _argvec[2+2] = (unsigned long)arg2; \
2619 _argvec[2+3] = (unsigned long)arg3; \
2620 _argvec[2+4] = (unsigned long)arg4; \
2621 _argvec[2+5] = (unsigned long)arg5; \
2622 _argvec[2+6] = (unsigned long)arg6; \
2623 _argvec[2+7] = (unsigned long)arg7; \
2624 _argvec[2+8] = (unsigned long)arg8; \
2626 VALGRIND_ALIGN_STACK \
2628 "std 2,-16(11)\n\t" /* save tocptr */ \
2629 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2630 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2631 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2632 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2633 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2634 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2635 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2636 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2637 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2638 "ld 11, 0(11)\n\t" /* target->r11 */ \
2639 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2642 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2643 VALGRIND_RESTORE_STACK \
2644 : /*out*/ "=r" (_res) \
2645 : /*in*/ "r" (&_argvec[2]) \
2646 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2648 lval = (__typeof__(lval)) _res; \
2651 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2654 volatile OrigFn _orig = (orig); \
2655 volatile unsigned long _argvec[3+9]; \
2656 volatile unsigned long _res; \
2657 /* _argvec[0] holds current r2 across the call */ \
2658 _argvec[1] = (unsigned long)_orig.r2; \
2659 _argvec[2] = (unsigned long)_orig.nraddr; \
2660 _argvec[2+1] = (unsigned long)arg1; \
2661 _argvec[2+2] = (unsigned long)arg2; \
2662 _argvec[2+3] = (unsigned long)arg3; \
2663 _argvec[2+4] = (unsigned long)arg4; \
2664 _argvec[2+5] = (unsigned long)arg5; \
2665 _argvec[2+6] = (unsigned long)arg6; \
2666 _argvec[2+7] = (unsigned long)arg7; \
2667 _argvec[2+8] = (unsigned long)arg8; \
2668 _argvec[2+9] = (unsigned long)arg9; \
2670 VALGRIND_ALIGN_STACK \
2672 "std 2,-16(11)\n\t" /* save tocptr */ \
2673 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2674 "addi 1,1,-128\n\t" /* expand stack frame */ \
2677 "std 3,112(1)\n\t" \
2679 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2680 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2681 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2682 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2683 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2684 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2685 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2686 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2687 "ld 11, 0(11)\n\t" /* target->r11 */ \
2688 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2691 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2692 VALGRIND_RESTORE_STACK \
2693 : /*out*/ "=r" (_res) \
2694 : /*in*/ "r" (&_argvec[2]) \
2695 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2697 lval = (__typeof__(lval)) _res; \
2700 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2701 arg7,arg8,arg9,arg10) \
2703 volatile OrigFn _orig = (orig); \
2704 volatile unsigned long _argvec[3+10]; \
2705 volatile unsigned long _res; \
2706 /* _argvec[0] holds current r2 across the call */ \
2707 _argvec[1] = (unsigned long)_orig.r2; \
2708 _argvec[2] = (unsigned long)_orig.nraddr; \
2709 _argvec[2+1] = (unsigned long)arg1; \
2710 _argvec[2+2] = (unsigned long)arg2; \
2711 _argvec[2+3] = (unsigned long)arg3; \
2712 _argvec[2+4] = (unsigned long)arg4; \
2713 _argvec[2+5] = (unsigned long)arg5; \
2714 _argvec[2+6] = (unsigned long)arg6; \
2715 _argvec[2+7] = (unsigned long)arg7; \
2716 _argvec[2+8] = (unsigned long)arg8; \
2717 _argvec[2+9] = (unsigned long)arg9; \
2718 _argvec[2+10] = (unsigned long)arg10; \
2720 VALGRIND_ALIGN_STACK \
2722 "std 2,-16(11)\n\t" /* save tocptr */ \
2723 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2724 "addi 1,1,-128\n\t" /* expand stack frame */ \
2727 "std 3,120(1)\n\t" \
2730 "std 3,112(1)\n\t" \
2732 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2733 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2734 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2735 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2736 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2737 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2738 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2739 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2740 "ld 11, 0(11)\n\t" /* target->r11 */ \
2741 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2744 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2745 VALGRIND_RESTORE_STACK \
2746 : /*out*/ "=r" (_res) \
2747 : /*in*/ "r" (&_argvec[2]) \
2748 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2750 lval = (__typeof__(lval)) _res; \
2753 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2754 arg7,arg8,arg9,arg10,arg11) \
2756 volatile OrigFn _orig = (orig); \
2757 volatile unsigned long _argvec[3+11]; \
2758 volatile unsigned long _res; \
2759 /* _argvec[0] holds current r2 across the call */ \
2760 _argvec[1] = (unsigned long)_orig.r2; \
2761 _argvec[2] = (unsigned long)_orig.nraddr; \
2762 _argvec[2+1] = (unsigned long)arg1; \
2763 _argvec[2+2] = (unsigned long)arg2; \
2764 _argvec[2+3] = (unsigned long)arg3; \
2765 _argvec[2+4] = (unsigned long)arg4; \
2766 _argvec[2+5] = (unsigned long)arg5; \
2767 _argvec[2+6] = (unsigned long)arg6; \
2768 _argvec[2+7] = (unsigned long)arg7; \
2769 _argvec[2+8] = (unsigned long)arg8; \
2770 _argvec[2+9] = (unsigned long)arg9; \
2771 _argvec[2+10] = (unsigned long)arg10; \
2772 _argvec[2+11] = (unsigned long)arg11; \
2774 VALGRIND_ALIGN_STACK \
2776 "std 2,-16(11)\n\t" /* save tocptr */ \
2777 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2778 "addi 1,1,-144\n\t" /* expand stack frame */ \
2781 "std 3,128(1)\n\t" \
2784 "std 3,120(1)\n\t" \
2787 "std 3,112(1)\n\t" \
2789 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2790 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2791 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2792 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2793 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2794 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2795 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2796 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2797 "ld 11, 0(11)\n\t" /* target->r11 */ \
2798 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2801 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2802 VALGRIND_RESTORE_STACK \
2803 : /*out*/ "=r" (_res) \
2804 : /*in*/ "r" (&_argvec[2]) \
2805 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2807 lval = (__typeof__(lval)) _res; \
2810 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2811 arg7,arg8,arg9,arg10,arg11,arg12) \
2813 volatile OrigFn _orig = (orig); \
2814 volatile unsigned long _argvec[3+12]; \
2815 volatile unsigned long _res; \
2816 /* _argvec[0] holds current r2 across the call */ \
2817 _argvec[1] = (unsigned long)_orig.r2; \
2818 _argvec[2] = (unsigned long)_orig.nraddr; \
2819 _argvec[2+1] = (unsigned long)arg1; \
2820 _argvec[2+2] = (unsigned long)arg2; \
2821 _argvec[2+3] = (unsigned long)arg3; \
2822 _argvec[2+4] = (unsigned long)arg4; \
2823 _argvec[2+5] = (unsigned long)arg5; \
2824 _argvec[2+6] = (unsigned long)arg6; \
2825 _argvec[2+7] = (unsigned long)arg7; \
2826 _argvec[2+8] = (unsigned long)arg8; \
2827 _argvec[2+9] = (unsigned long)arg9; \
2828 _argvec[2+10] = (unsigned long)arg10; \
2829 _argvec[2+11] = (unsigned long)arg11; \
2830 _argvec[2+12] = (unsigned long)arg12; \
2832 VALGRIND_ALIGN_STACK \
2834 "std 2,-16(11)\n\t" /* save tocptr */ \
2835 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2836 "addi 1,1,-144\n\t" /* expand stack frame */ \
2839 "std 3,136(1)\n\t" \
2842 "std 3,128(1)\n\t" \
2845 "std 3,120(1)\n\t" \
2848 "std 3,112(1)\n\t" \
2850 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2851 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2852 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2853 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2854 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2855 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2856 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2857 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2858 "ld 11, 0(11)\n\t" /* target->r11 */ \
2859 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2862 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2863 VALGRIND_RESTORE_STACK \
2864 : /*out*/ "=r" (_res) \
2865 : /*in*/ "r" (&_argvec[2]) \
2866 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2868 lval = (__typeof__(lval)) _res; \
2871 #endif /* PLAT_ppc64_linux */
2873 /* ------------------------- arm-linux ------------------------- */
2875 #if defined(PLAT_arm_linux)
2877 /* These regs are trashed by the hidden call. */
2878 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4","r14"
2880 /* Macros to save and align the stack before making a function
2881 call and restore it afterwards as gcc may not keep the stack
2882 pointer aligned if it doesn't realise calls are being made
2883 to other functions. */
2885 /* This is a bit tricky. We store the original stack pointer in r10
2886 as it is callee-saves. gcc doesn't allow the use of r11 for some
2887 reason. Also, we can't directly "bic" the stack pointer in thumb
2888 mode since r13 isn't an allowed register number in that context.
2889 So use r4 as a temporary, since that is about to get trashed
2890 anyway, just after each use of this macro. Side effect is we need
2891 to be very careful about any future changes, since
2892 VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
2893 #define VALGRIND_ALIGN_STACK \
2896 "bic r4, r4, #7\n\t" \
2898 #define VALGRIND_RESTORE_STACK \
2901 /* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
2904 #define CALL_FN_W_v(lval, orig) \
2906 volatile OrigFn _orig = (orig); \
2907 volatile unsigned long _argvec[1]; \
2908 volatile unsigned long _res; \
2909 _argvec[0] = (unsigned long)_orig.nraddr; \
2911 VALGRIND_ALIGN_STACK \
2912 "ldr r4, [%1] \n\t" /* target->r4 */ \
2913 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
2914 VALGRIND_RESTORE_STACK \
2916 : /*out*/ "=r" (_res) \
2917 : /*in*/ "0" (&_argvec[0]) \
2918 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
2920 lval = (__typeof__(lval)) _res; \
2923 #define CALL_FN_W_W(lval, orig, arg1) \
2925 volatile OrigFn _orig = (orig); \
2926 volatile unsigned long _argvec[2]; \
2927 volatile unsigned long _res; \
2928 _argvec[0] = (unsigned long)_orig.nraddr; \
2929 _argvec[1] = (unsigned long)(arg1); \
2931 VALGRIND_ALIGN_STACK \
2932 "ldr r0, [%1, #4] \n\t" \
2933 "ldr r4, [%1] \n\t" /* target->r4 */ \
2934 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
2935 VALGRIND_RESTORE_STACK \
2937 : /*out*/ "=r" (_res) \
2938 : /*in*/ "0" (&_argvec[0]) \
2939 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
2941 lval = (__typeof__(lval)) _res; \
2944 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2946 volatile OrigFn _orig = (orig); \
2947 volatile unsigned long _argvec[3]; \
2948 volatile unsigned long _res; \
2949 _argvec[0] = (unsigned long)_orig.nraddr; \
2950 _argvec[1] = (unsigned long)(arg1); \
2951 _argvec[2] = (unsigned long)(arg2); \
2953 VALGRIND_ALIGN_STACK \
2954 "ldr r0, [%1, #4] \n\t" \
2955 "ldr r1, [%1, #8] \n\t" \
2956 "ldr r4, [%1] \n\t" /* target->r4 */ \
2957 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
2958 VALGRIND_RESTORE_STACK \
2960 : /*out*/ "=r" (_res) \
2961 : /*in*/ "0" (&_argvec[0]) \
2962 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
2964 lval = (__typeof__(lval)) _res; \
2967 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2969 volatile OrigFn _orig = (orig); \
2970 volatile unsigned long _argvec[4]; \
2971 volatile unsigned long _res; \
2972 _argvec[0] = (unsigned long)_orig.nraddr; \
2973 _argvec[1] = (unsigned long)(arg1); \
2974 _argvec[2] = (unsigned long)(arg2); \
2975 _argvec[3] = (unsigned long)(arg3); \
2977 VALGRIND_ALIGN_STACK \
2978 "ldr r0, [%1, #4] \n\t" \
2979 "ldr r1, [%1, #8] \n\t" \
2980 "ldr r2, [%1, #12] \n\t" \
2981 "ldr r4, [%1] \n\t" /* target->r4 */ \
2982 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
2983 VALGRIND_RESTORE_STACK \
2985 : /*out*/ "=r" (_res) \
2986 : /*in*/ "0" (&_argvec[0]) \
2987 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
2989 lval = (__typeof__(lval)) _res; \
2992 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2994 volatile OrigFn _orig = (orig); \
2995 volatile unsigned long _argvec[5]; \
2996 volatile unsigned long _res; \
2997 _argvec[0] = (unsigned long)_orig.nraddr; \
2998 _argvec[1] = (unsigned long)(arg1); \
2999 _argvec[2] = (unsigned long)(arg2); \
3000 _argvec[3] = (unsigned long)(arg3); \
3001 _argvec[4] = (unsigned long)(arg4); \
3003 VALGRIND_ALIGN_STACK \
3004 "ldr r0, [%1, #4] \n\t" \
3005 "ldr r1, [%1, #8] \n\t" \
3006 "ldr r2, [%1, #12] \n\t" \
3007 "ldr r3, [%1, #16] \n\t" \
3008 "ldr r4, [%1] \n\t" /* target->r4 */ \
3009 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3010 VALGRIND_RESTORE_STACK \
3012 : /*out*/ "=r" (_res) \
3013 : /*in*/ "0" (&_argvec[0]) \
3014 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3016 lval = (__typeof__(lval)) _res; \
3019 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3021 volatile OrigFn _orig = (orig); \
3022 volatile unsigned long _argvec[6]; \
3023 volatile unsigned long _res; \
3024 _argvec[0] = (unsigned long)_orig.nraddr; \
3025 _argvec[1] = (unsigned long)(arg1); \
3026 _argvec[2] = (unsigned long)(arg2); \
3027 _argvec[3] = (unsigned long)(arg3); \
3028 _argvec[4] = (unsigned long)(arg4); \
3029 _argvec[5] = (unsigned long)(arg5); \
3031 VALGRIND_ALIGN_STACK \
3032 "sub sp, sp, #4 \n\t" \
3033 "ldr r0, [%1, #20] \n\t" \
3035 "ldr r0, [%1, #4] \n\t" \
3036 "ldr r1, [%1, #8] \n\t" \
3037 "ldr r2, [%1, #12] \n\t" \
3038 "ldr r3, [%1, #16] \n\t" \
3039 "ldr r4, [%1] \n\t" /* target->r4 */ \
3040 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3041 VALGRIND_RESTORE_STACK \
3043 : /*out*/ "=r" (_res) \
3044 : /*in*/ "0" (&_argvec[0]) \
3045 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3047 lval = (__typeof__(lval)) _res; \
3050 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3052 volatile OrigFn _orig = (orig); \
3053 volatile unsigned long _argvec[7]; \
3054 volatile unsigned long _res; \
3055 _argvec[0] = (unsigned long)_orig.nraddr; \
3056 _argvec[1] = (unsigned long)(arg1); \
3057 _argvec[2] = (unsigned long)(arg2); \
3058 _argvec[3] = (unsigned long)(arg3); \
3059 _argvec[4] = (unsigned long)(arg4); \
3060 _argvec[5] = (unsigned long)(arg5); \
3061 _argvec[6] = (unsigned long)(arg6); \
3063 VALGRIND_ALIGN_STACK \
3064 "ldr r0, [%1, #20] \n\t" \
3065 "ldr r1, [%1, #24] \n\t" \
3066 "push {r0, r1} \n\t" \
3067 "ldr r0, [%1, #4] \n\t" \
3068 "ldr r1, [%1, #8] \n\t" \
3069 "ldr r2, [%1, #12] \n\t" \
3070 "ldr r3, [%1, #16] \n\t" \
3071 "ldr r4, [%1] \n\t" /* target->r4 */ \
3072 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3073 VALGRIND_RESTORE_STACK \
3075 : /*out*/ "=r" (_res) \
3076 : /*in*/ "0" (&_argvec[0]) \
3077 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3079 lval = (__typeof__(lval)) _res; \
3082 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3085 volatile OrigFn _orig = (orig); \
3086 volatile unsigned long _argvec[8]; \
3087 volatile unsigned long _res; \
3088 _argvec[0] = (unsigned long)_orig.nraddr; \
3089 _argvec[1] = (unsigned long)(arg1); \
3090 _argvec[2] = (unsigned long)(arg2); \
3091 _argvec[3] = (unsigned long)(arg3); \
3092 _argvec[4] = (unsigned long)(arg4); \
3093 _argvec[5] = (unsigned long)(arg5); \
3094 _argvec[6] = (unsigned long)(arg6); \
3095 _argvec[7] = (unsigned long)(arg7); \
3097 VALGRIND_ALIGN_STACK \
3098 "sub sp, sp, #4 \n\t" \
3099 "ldr r0, [%1, #20] \n\t" \
3100 "ldr r1, [%1, #24] \n\t" \
3101 "ldr r2, [%1, #28] \n\t" \
3102 "push {r0, r1, r2} \n\t" \
3103 "ldr r0, [%1, #4] \n\t" \
3104 "ldr r1, [%1, #8] \n\t" \
3105 "ldr r2, [%1, #12] \n\t" \
3106 "ldr r3, [%1, #16] \n\t" \
3107 "ldr r4, [%1] \n\t" /* target->r4 */ \
3108 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3109 VALGRIND_RESTORE_STACK \
3111 : /*out*/ "=r" (_res) \
3112 : /*in*/ "0" (&_argvec[0]) \
3113 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3115 lval = (__typeof__(lval)) _res; \
3118 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3121 volatile OrigFn _orig = (orig); \
3122 volatile unsigned long _argvec[9]; \
3123 volatile unsigned long _res; \
3124 _argvec[0] = (unsigned long)_orig.nraddr; \
3125 _argvec[1] = (unsigned long)(arg1); \
3126 _argvec[2] = (unsigned long)(arg2); \
3127 _argvec[3] = (unsigned long)(arg3); \
3128 _argvec[4] = (unsigned long)(arg4); \
3129 _argvec[5] = (unsigned long)(arg5); \
3130 _argvec[6] = (unsigned long)(arg6); \
3131 _argvec[7] = (unsigned long)(arg7); \
3132 _argvec[8] = (unsigned long)(arg8); \
3134 VALGRIND_ALIGN_STACK \
3135 "ldr r0, [%1, #20] \n\t" \
3136 "ldr r1, [%1, #24] \n\t" \
3137 "ldr r2, [%1, #28] \n\t" \
3138 "ldr r3, [%1, #32] \n\t" \
3139 "push {r0, r1, r2, r3} \n\t" \
3140 "ldr r0, [%1, #4] \n\t" \
3141 "ldr r1, [%1, #8] \n\t" \
3142 "ldr r2, [%1, #12] \n\t" \
3143 "ldr r3, [%1, #16] \n\t" \
3144 "ldr r4, [%1] \n\t" /* target->r4 */ \
3145 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3146 VALGRIND_RESTORE_STACK \
3148 : /*out*/ "=r" (_res) \
3149 : /*in*/ "0" (&_argvec[0]) \
3150 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3152 lval = (__typeof__(lval)) _res; \
3155 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3158 volatile OrigFn _orig = (orig); \
3159 volatile unsigned long _argvec[10]; \
3160 volatile unsigned long _res; \
3161 _argvec[0] = (unsigned long)_orig.nraddr; \
3162 _argvec[1] = (unsigned long)(arg1); \
3163 _argvec[2] = (unsigned long)(arg2); \
3164 _argvec[3] = (unsigned long)(arg3); \
3165 _argvec[4] = (unsigned long)(arg4); \
3166 _argvec[5] = (unsigned long)(arg5); \
3167 _argvec[6] = (unsigned long)(arg6); \
3168 _argvec[7] = (unsigned long)(arg7); \
3169 _argvec[8] = (unsigned long)(arg8); \
3170 _argvec[9] = (unsigned long)(arg9); \
3172 VALGRIND_ALIGN_STACK \
3173 "sub sp, sp, #4 \n\t" \
3174 "ldr r0, [%1, #20] \n\t" \
3175 "ldr r1, [%1, #24] \n\t" \
3176 "ldr r2, [%1, #28] \n\t" \
3177 "ldr r3, [%1, #32] \n\t" \
3178 "ldr r4, [%1, #36] \n\t" \
3179 "push {r0, r1, r2, r3, r4} \n\t" \
3180 "ldr r0, [%1, #4] \n\t" \
3181 "ldr r1, [%1, #8] \n\t" \
3182 "ldr r2, [%1, #12] \n\t" \
3183 "ldr r3, [%1, #16] \n\t" \
3184 "ldr r4, [%1] \n\t" /* target->r4 */ \
3185 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3186 VALGRIND_RESTORE_STACK \
3188 : /*out*/ "=r" (_res) \
3189 : /*in*/ "0" (&_argvec[0]) \
3190 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3192 lval = (__typeof__(lval)) _res; \
3195 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3196 arg7,arg8,arg9,arg10) \
3198 volatile OrigFn _orig = (orig); \
3199 volatile unsigned long _argvec[11]; \
3200 volatile unsigned long _res; \
3201 _argvec[0] = (unsigned long)_orig.nraddr; \
3202 _argvec[1] = (unsigned long)(arg1); \
3203 _argvec[2] = (unsigned long)(arg2); \
3204 _argvec[3] = (unsigned long)(arg3); \
3205 _argvec[4] = (unsigned long)(arg4); \
3206 _argvec[5] = (unsigned long)(arg5); \
3207 _argvec[6] = (unsigned long)(arg6); \
3208 _argvec[7] = (unsigned long)(arg7); \
3209 _argvec[8] = (unsigned long)(arg8); \
3210 _argvec[9] = (unsigned long)(arg9); \
3211 _argvec[10] = (unsigned long)(arg10); \
3213 VALGRIND_ALIGN_STACK \
3214 "ldr r0, [%1, #40] \n\t" \
3216 "ldr r0, [%1, #20] \n\t" \
3217 "ldr r1, [%1, #24] \n\t" \
3218 "ldr r2, [%1, #28] \n\t" \
3219 "ldr r3, [%1, #32] \n\t" \
3220 "ldr r4, [%1, #36] \n\t" \
3221 "push {r0, r1, r2, r3, r4} \n\t" \
3222 "ldr r0, [%1, #4] \n\t" \
3223 "ldr r1, [%1, #8] \n\t" \
3224 "ldr r2, [%1, #12] \n\t" \
3225 "ldr r3, [%1, #16] \n\t" \
3226 "ldr r4, [%1] \n\t" /* target->r4 */ \
3227 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3228 VALGRIND_RESTORE_STACK \
3230 : /*out*/ "=r" (_res) \
3231 : /*in*/ "0" (&_argvec[0]) \
3232 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3234 lval = (__typeof__(lval)) _res; \
3237 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
3238 arg6,arg7,arg8,arg9,arg10, \
3241 volatile OrigFn _orig = (orig); \
3242 volatile unsigned long _argvec[12]; \
3243 volatile unsigned long _res; \
3244 _argvec[0] = (unsigned long)_orig.nraddr; \
3245 _argvec[1] = (unsigned long)(arg1); \
3246 _argvec[2] = (unsigned long)(arg2); \
3247 _argvec[3] = (unsigned long)(arg3); \
3248 _argvec[4] = (unsigned long)(arg4); \
3249 _argvec[5] = (unsigned long)(arg5); \
3250 _argvec[6] = (unsigned long)(arg6); \
3251 _argvec[7] = (unsigned long)(arg7); \
3252 _argvec[8] = (unsigned long)(arg8); \
3253 _argvec[9] = (unsigned long)(arg9); \
3254 _argvec[10] = (unsigned long)(arg10); \
3255 _argvec[11] = (unsigned long)(arg11); \
3257 VALGRIND_ALIGN_STACK \
3258 "sub sp, sp, #4 \n\t" \
3259 "ldr r0, [%1, #40] \n\t" \
3260 "ldr r1, [%1, #44] \n\t" \
3261 "push {r0, r1} \n\t" \
3262 "ldr r0, [%1, #20] \n\t" \
3263 "ldr r1, [%1, #24] \n\t" \
3264 "ldr r2, [%1, #28] \n\t" \
3265 "ldr r3, [%1, #32] \n\t" \
3266 "ldr r4, [%1, #36] \n\t" \
3267 "push {r0, r1, r2, r3, r4} \n\t" \
3268 "ldr r0, [%1, #4] \n\t" \
3269 "ldr r1, [%1, #8] \n\t" \
3270 "ldr r2, [%1, #12] \n\t" \
3271 "ldr r3, [%1, #16] \n\t" \
3272 "ldr r4, [%1] \n\t" /* target->r4 */ \
3273 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3274 VALGRIND_RESTORE_STACK \
3276 : /*out*/ "=r" (_res) \
3277 : /*in*/ "0" (&_argvec[0]) \
3278 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3280 lval = (__typeof__(lval)) _res; \
3283 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
3284 arg6,arg7,arg8,arg9,arg10, \
3287 volatile OrigFn _orig = (orig); \
3288 volatile unsigned long _argvec[13]; \
3289 volatile unsigned long _res; \
3290 _argvec[0] = (unsigned long)_orig.nraddr; \
3291 _argvec[1] = (unsigned long)(arg1); \
3292 _argvec[2] = (unsigned long)(arg2); \
3293 _argvec[3] = (unsigned long)(arg3); \
3294 _argvec[4] = (unsigned long)(arg4); \
3295 _argvec[5] = (unsigned long)(arg5); \
3296 _argvec[6] = (unsigned long)(arg6); \
3297 _argvec[7] = (unsigned long)(arg7); \
3298 _argvec[8] = (unsigned long)(arg8); \
3299 _argvec[9] = (unsigned long)(arg9); \
3300 _argvec[10] = (unsigned long)(arg10); \
3301 _argvec[11] = (unsigned long)(arg11); \
3302 _argvec[12] = (unsigned long)(arg12); \
3304 VALGRIND_ALIGN_STACK \
3305 "ldr r0, [%1, #40] \n\t" \
3306 "ldr r1, [%1, #44] \n\t" \
3307 "ldr r2, [%1, #48] \n\t" \
3308 "push {r0, r1, r2} \n\t" \
3309 "ldr r0, [%1, #20] \n\t" \
3310 "ldr r1, [%1, #24] \n\t" \
3311 "ldr r2, [%1, #28] \n\t" \
3312 "ldr r3, [%1, #32] \n\t" \
3313 "ldr r4, [%1, #36] \n\t" \
3314 "push {r0, r1, r2, r3, r4} \n\t" \
3315 "ldr r0, [%1, #4] \n\t" \
3316 "ldr r1, [%1, #8] \n\t" \
3317 "ldr r2, [%1, #12] \n\t" \
3318 "ldr r3, [%1, #16] \n\t" \
3319 "ldr r4, [%1] \n\t" /* target->r4 */ \
3320 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3321 VALGRIND_RESTORE_STACK \
3323 : /*out*/ "=r" (_res) \
3324 : /*in*/ "0" (&_argvec[0]) \
3325 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3327 lval = (__typeof__(lval)) _res; \
3330 #endif /* PLAT_arm_linux */
3332 /* ------------------------- s390x-linux ------------------------- */
3334 #if defined(PLAT_s390x_linux)
3336 /* Similar workaround as amd64 (see above), but we use r11 as frame
3337 pointer and save the old r11 in r7. r11 might be used for
3338 argvec, therefore we copy argvec in r1 since r1 is clobbered
3339 after the call anyway. */
3340 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
3341 # define __FRAME_POINTER \
3342 ,"d"(__builtin_dwarf_cfa())
3343 # define VALGRIND_CFI_PROLOGUE \
3344 ".cfi_remember_state\n\t" \
3345 "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
3348 ".cfi_def_cfa r11, 0\n\t"
3349 # define VALGRIND_CFI_EPILOGUE \
3351 ".cfi_restore_state\n\t"
3353 # define __FRAME_POINTER
3354 # define VALGRIND_CFI_PROLOGUE \
3356 # define VALGRIND_CFI_EPILOGUE
3359 /* Nb: On s390 the stack pointer is properly aligned *at all times*
3360 according to the s390 GCC maintainer. (The ABI specification is not
3361 precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
3362 VALGRIND_RESTORE_STACK are not defined here. */
3364 /* These regs are trashed by the hidden call. Note that we overwrite
3365 r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
3366 function a proper return address. All others are ABI defined call
3368 #define __CALLER_SAVED_REGS "0","1","2","3","4","5","14", \
3369 "f0","f1","f2","f3","f4","f5","f6","f7"
3371 /* Nb: Although r11 is modified in the asm snippets below (inside
3372 VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
3374 (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
3376 (2) GCC will complain that r11 cannot appear inside a clobber section,
3377 when compiled with -O -fno-omit-frame-pointer
3380 #define CALL_FN_W_v(lval, orig) \
3382 volatile OrigFn _orig = (orig); \
3383 volatile unsigned long _argvec[1]; \
3384 volatile unsigned long _res; \
3385 _argvec[0] = (unsigned long)_orig.nraddr; \
3387 VALGRIND_CFI_PROLOGUE \
3388 "aghi 15,-160\n\t" \
3389 "lg 1, 0(1)\n\t" /* target->r1 */ \
3390 VALGRIND_CALL_NOREDIR_R1 \
3393 VALGRIND_CFI_EPILOGUE \
3394 : /*out*/ "=d" (_res) \
3395 : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
3396 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3398 lval = (__typeof__(lval)) _res; \
3401 /* The call abi has the arguments in r2-r6 and stack */
3402 #define CALL_FN_W_W(lval, orig, arg1) \
3404 volatile OrigFn _orig = (orig); \
3405 volatile unsigned long _argvec[2]; \
3406 volatile unsigned long _res; \
3407 _argvec[0] = (unsigned long)_orig.nraddr; \
3408 _argvec[1] = (unsigned long)arg1; \
3410 VALGRIND_CFI_PROLOGUE \
3411 "aghi 15,-160\n\t" \
3414 VALGRIND_CALL_NOREDIR_R1 \
3417 VALGRIND_CFI_EPILOGUE \
3418 : /*out*/ "=d" (_res) \
3419 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3420 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3422 lval = (__typeof__(lval)) _res; \
3425 #define CALL_FN_W_WW(lval, orig, arg1, arg2) \
3427 volatile OrigFn _orig = (orig); \
3428 volatile unsigned long _argvec[3]; \
3429 volatile unsigned long _res; \
3430 _argvec[0] = (unsigned long)_orig.nraddr; \
3431 _argvec[1] = (unsigned long)arg1; \
3432 _argvec[2] = (unsigned long)arg2; \
3434 VALGRIND_CFI_PROLOGUE \
3435 "aghi 15,-160\n\t" \
3439 VALGRIND_CALL_NOREDIR_R1 \
3442 VALGRIND_CFI_EPILOGUE \
3443 : /*out*/ "=d" (_res) \
3444 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3445 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3447 lval = (__typeof__(lval)) _res; \
3450 #define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
3452 volatile OrigFn _orig = (orig); \
3453 volatile unsigned long _argvec[4]; \
3454 volatile unsigned long _res; \
3455 _argvec[0] = (unsigned long)_orig.nraddr; \
3456 _argvec[1] = (unsigned long)arg1; \
3457 _argvec[2] = (unsigned long)arg2; \
3458 _argvec[3] = (unsigned long)arg3; \
3460 VALGRIND_CFI_PROLOGUE \
3461 "aghi 15,-160\n\t" \
3466 VALGRIND_CALL_NOREDIR_R1 \
3469 VALGRIND_CFI_EPILOGUE \
3470 : /*out*/ "=d" (_res) \
3471 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3472 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3474 lval = (__typeof__(lval)) _res; \
3477 #define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
3479 volatile OrigFn _orig = (orig); \
3480 volatile unsigned long _argvec[5]; \
3481 volatile unsigned long _res; \
3482 _argvec[0] = (unsigned long)_orig.nraddr; \
3483 _argvec[1] = (unsigned long)arg1; \
3484 _argvec[2] = (unsigned long)arg2; \
3485 _argvec[3] = (unsigned long)arg3; \
3486 _argvec[4] = (unsigned long)arg4; \
3488 VALGRIND_CFI_PROLOGUE \
3489 "aghi 15,-160\n\t" \
3495 VALGRIND_CALL_NOREDIR_R1 \
3498 VALGRIND_CFI_EPILOGUE \
3499 : /*out*/ "=d" (_res) \
3500 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3501 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3503 lval = (__typeof__(lval)) _res; \
3506 #define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
3508 volatile OrigFn _orig = (orig); \
3509 volatile unsigned long _argvec[6]; \
3510 volatile unsigned long _res; \
3511 _argvec[0] = (unsigned long)_orig.nraddr; \
3512 _argvec[1] = (unsigned long)arg1; \
3513 _argvec[2] = (unsigned long)arg2; \
3514 _argvec[3] = (unsigned long)arg3; \
3515 _argvec[4] = (unsigned long)arg4; \
3516 _argvec[5] = (unsigned long)arg5; \
3518 VALGRIND_CFI_PROLOGUE \
3519 "aghi 15,-160\n\t" \
3526 VALGRIND_CALL_NOREDIR_R1 \
3529 VALGRIND_CFI_EPILOGUE \
3530 : /*out*/ "=d" (_res) \
3531 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3532 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3534 lval = (__typeof__(lval)) _res; \
3537 #define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3540 volatile OrigFn _orig = (orig); \
3541 volatile unsigned long _argvec[7]; \
3542 volatile unsigned long _res; \
3543 _argvec[0] = (unsigned long)_orig.nraddr; \
3544 _argvec[1] = (unsigned long)arg1; \
3545 _argvec[2] = (unsigned long)arg2; \
3546 _argvec[3] = (unsigned long)arg3; \
3547 _argvec[4] = (unsigned long)arg4; \
3548 _argvec[5] = (unsigned long)arg5; \
3549 _argvec[6] = (unsigned long)arg6; \
3551 VALGRIND_CFI_PROLOGUE \
3552 "aghi 15,-168\n\t" \
3558 "mvc 160(8,15), 48(1)\n\t" \
3560 VALGRIND_CALL_NOREDIR_R1 \
3563 VALGRIND_CFI_EPILOGUE \
3564 : /*out*/ "=d" (_res) \
3565 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3566 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3568 lval = (__typeof__(lval)) _res; \
3571 #define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3574 volatile OrigFn _orig = (orig); \
3575 volatile unsigned long _argvec[8]; \
3576 volatile unsigned long _res; \
3577 _argvec[0] = (unsigned long)_orig.nraddr; \
3578 _argvec[1] = (unsigned long)arg1; \
3579 _argvec[2] = (unsigned long)arg2; \
3580 _argvec[3] = (unsigned long)arg3; \
3581 _argvec[4] = (unsigned long)arg4; \
3582 _argvec[5] = (unsigned long)arg5; \
3583 _argvec[6] = (unsigned long)arg6; \
3584 _argvec[7] = (unsigned long)arg7; \
3586 VALGRIND_CFI_PROLOGUE \
3587 "aghi 15,-176\n\t" \
3593 "mvc 160(8,15), 48(1)\n\t" \
3594 "mvc 168(8,15), 56(1)\n\t" \
3596 VALGRIND_CALL_NOREDIR_R1 \
3599 VALGRIND_CFI_EPILOGUE \
3600 : /*out*/ "=d" (_res) \
3601 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3602 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3604 lval = (__typeof__(lval)) _res; \
3607 #define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3610 volatile OrigFn _orig = (orig); \
3611 volatile unsigned long _argvec[9]; \
3612 volatile unsigned long _res; \
3613 _argvec[0] = (unsigned long)_orig.nraddr; \
3614 _argvec[1] = (unsigned long)arg1; \
3615 _argvec[2] = (unsigned long)arg2; \
3616 _argvec[3] = (unsigned long)arg3; \
3617 _argvec[4] = (unsigned long)arg4; \
3618 _argvec[5] = (unsigned long)arg5; \
3619 _argvec[6] = (unsigned long)arg6; \
3620 _argvec[7] = (unsigned long)arg7; \
3621 _argvec[8] = (unsigned long)arg8; \
3623 VALGRIND_CFI_PROLOGUE \
3624 "aghi 15,-184\n\t" \
3630 "mvc 160(8,15), 48(1)\n\t" \
3631 "mvc 168(8,15), 56(1)\n\t" \
3632 "mvc 176(8,15), 64(1)\n\t" \
3634 VALGRIND_CALL_NOREDIR_R1 \
3637 VALGRIND_CFI_EPILOGUE \
3638 : /*out*/ "=d" (_res) \
3639 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3640 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3642 lval = (__typeof__(lval)) _res; \
3645 #define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3646 arg6, arg7 ,arg8, arg9) \
3648 volatile OrigFn _orig = (orig); \
3649 volatile unsigned long _argvec[10]; \
3650 volatile unsigned long _res; \
3651 _argvec[0] = (unsigned long)_orig.nraddr; \
3652 _argvec[1] = (unsigned long)arg1; \
3653 _argvec[2] = (unsigned long)arg2; \
3654 _argvec[3] = (unsigned long)arg3; \
3655 _argvec[4] = (unsigned long)arg4; \
3656 _argvec[5] = (unsigned long)arg5; \
3657 _argvec[6] = (unsigned long)arg6; \
3658 _argvec[7] = (unsigned long)arg7; \
3659 _argvec[8] = (unsigned long)arg8; \
3660 _argvec[9] = (unsigned long)arg9; \
3662 VALGRIND_CFI_PROLOGUE \
3663 "aghi 15,-192\n\t" \
3669 "mvc 160(8,15), 48(1)\n\t" \
3670 "mvc 168(8,15), 56(1)\n\t" \
3671 "mvc 176(8,15), 64(1)\n\t" \
3672 "mvc 184(8,15), 72(1)\n\t" \
3674 VALGRIND_CALL_NOREDIR_R1 \
3677 VALGRIND_CFI_EPILOGUE \
3678 : /*out*/ "=d" (_res) \
3679 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3680 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3682 lval = (__typeof__(lval)) _res; \
3685 #define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3686 arg6, arg7 ,arg8, arg9, arg10) \
3688 volatile OrigFn _orig = (orig); \
3689 volatile unsigned long _argvec[11]; \
3690 volatile unsigned long _res; \
3691 _argvec[0] = (unsigned long)_orig.nraddr; \
3692 _argvec[1] = (unsigned long)arg1; \
3693 _argvec[2] = (unsigned long)arg2; \
3694 _argvec[3] = (unsigned long)arg3; \
3695 _argvec[4] = (unsigned long)arg4; \
3696 _argvec[5] = (unsigned long)arg5; \
3697 _argvec[6] = (unsigned long)arg6; \
3698 _argvec[7] = (unsigned long)arg7; \
3699 _argvec[8] = (unsigned long)arg8; \
3700 _argvec[9] = (unsigned long)arg9; \
3701 _argvec[10] = (unsigned long)arg10; \
3703 VALGRIND_CFI_PROLOGUE \
3704 "aghi 15,-200\n\t" \
3710 "mvc 160(8,15), 48(1)\n\t" \
3711 "mvc 168(8,15), 56(1)\n\t" \
3712 "mvc 176(8,15), 64(1)\n\t" \
3713 "mvc 184(8,15), 72(1)\n\t" \
3714 "mvc 192(8,15), 80(1)\n\t" \
3716 VALGRIND_CALL_NOREDIR_R1 \
3719 VALGRIND_CFI_EPILOGUE \
3720 : /*out*/ "=d" (_res) \
3721 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3722 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3724 lval = (__typeof__(lval)) _res; \
3727 #define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3728 arg6, arg7 ,arg8, arg9, arg10, arg11) \
3730 volatile OrigFn _orig = (orig); \
3731 volatile unsigned long _argvec[12]; \
3732 volatile unsigned long _res; \
3733 _argvec[0] = (unsigned long)_orig.nraddr; \
3734 _argvec[1] = (unsigned long)arg1; \
3735 _argvec[2] = (unsigned long)arg2; \
3736 _argvec[3] = (unsigned long)arg3; \
3737 _argvec[4] = (unsigned long)arg4; \
3738 _argvec[5] = (unsigned long)arg5; \
3739 _argvec[6] = (unsigned long)arg6; \
3740 _argvec[7] = (unsigned long)arg7; \
3741 _argvec[8] = (unsigned long)arg8; \
3742 _argvec[9] = (unsigned long)arg9; \
3743 _argvec[10] = (unsigned long)arg10; \
3744 _argvec[11] = (unsigned long)arg11; \
3746 VALGRIND_CFI_PROLOGUE \
3747 "aghi 15,-208\n\t" \
3753 "mvc 160(8,15), 48(1)\n\t" \
3754 "mvc 168(8,15), 56(1)\n\t" \
3755 "mvc 176(8,15), 64(1)\n\t" \
3756 "mvc 184(8,15), 72(1)\n\t" \
3757 "mvc 192(8,15), 80(1)\n\t" \
3758 "mvc 200(8,15), 88(1)\n\t" \
3760 VALGRIND_CALL_NOREDIR_R1 \
3763 VALGRIND_CFI_EPILOGUE \
3764 : /*out*/ "=d" (_res) \
3765 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3766 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3768 lval = (__typeof__(lval)) _res; \
3771 #define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3772 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
3774 volatile OrigFn _orig = (orig); \
3775 volatile unsigned long _argvec[13]; \
3776 volatile unsigned long _res; \
3777 _argvec[0] = (unsigned long)_orig.nraddr; \
3778 _argvec[1] = (unsigned long)arg1; \
3779 _argvec[2] = (unsigned long)arg2; \
3780 _argvec[3] = (unsigned long)arg3; \
3781 _argvec[4] = (unsigned long)arg4; \
3782 _argvec[5] = (unsigned long)arg5; \
3783 _argvec[6] = (unsigned long)arg6; \
3784 _argvec[7] = (unsigned long)arg7; \
3785 _argvec[8] = (unsigned long)arg8; \
3786 _argvec[9] = (unsigned long)arg9; \
3787 _argvec[10] = (unsigned long)arg10; \
3788 _argvec[11] = (unsigned long)arg11; \
3789 _argvec[12] = (unsigned long)arg12; \
3791 VALGRIND_CFI_PROLOGUE \
3792 "aghi 15,-216\n\t" \
3798 "mvc 160(8,15), 48(1)\n\t" \
3799 "mvc 168(8,15), 56(1)\n\t" \
3800 "mvc 176(8,15), 64(1)\n\t" \
3801 "mvc 184(8,15), 72(1)\n\t" \
3802 "mvc 192(8,15), 80(1)\n\t" \
3803 "mvc 200(8,15), 88(1)\n\t" \
3804 "mvc 208(8,15), 96(1)\n\t" \
3806 VALGRIND_CALL_NOREDIR_R1 \
3809 VALGRIND_CFI_EPILOGUE \
3810 : /*out*/ "=d" (_res) \
3811 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3812 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3814 lval = (__typeof__(lval)) _res; \
3818 #endif /* PLAT_s390x_linux */
3820 /* ------------------------- mips-linux ------------------------- */
3822 #if defined(PLAT_mips32_linux)
3824 /* These regs are trashed by the hidden call. */
3825 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
3826 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
3829 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
3832 #define CALL_FN_W_v(lval, orig) \
3834 volatile OrigFn _orig = (orig); \
3835 volatile unsigned long _argvec[1]; \
3836 volatile unsigned long _res; \
3837 _argvec[0] = (unsigned long)_orig.nraddr; \
3839 "subu $29, $29, 8 \n\t" \
3840 "sw $gp, 0($sp) \n\t" \
3841 "sw $ra, 4($sp) \n\t" \
3842 "subu $29, $29, 16 \n\t" \
3843 "lw $t9, 0(%1) \n\t" /* target->t9 */ \
3844 VALGRIND_CALL_NOREDIR_T9 \
3845 "addu $29, $29, 16\n\t" \
3846 "lw $gp, 0($sp) \n\t" \
3847 "lw $ra, 4($sp) \n\t" \
3848 "addu $29, $29, 8 \n\t" \
3850 : /*out*/ "=r" (_res) \
3851 : /*in*/ "0" (&_argvec[0]) \
3852 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
3854 lval = (__typeof__(lval)) _res; \
3857 #define CALL_FN_W_W(lval, orig, arg1) \
3859 volatile OrigFn _orig = (orig); \
3860 volatile unsigned long _argvec[2]; \
3861 volatile unsigned long _res; \
3862 _argvec[0] = (unsigned long)_orig.nraddr; \
3863 _argvec[1] = (unsigned long)(arg1); \
3865 "subu $29, $29, 8 \n\t" \
3866 "sw $gp, 0($sp) \n\t" \
3867 "sw $ra, 4($sp) \n\t" \
3868 "subu $29, $29, 16 \n\t" \
3869 "lw $a0, 4(%1) \n\t" /* arg1*/ \
3870 "lw $t9, 0(%1) \n\t" /* target->t9 */ \
3871 VALGRIND_CALL_NOREDIR_T9 \
3872 "addu $29, $29, 16 \n\t" \
3873 "lw $gp, 0($sp) \n\t" \
3874 "lw $ra, 4($sp) \n\t" \
3875 "addu $29, $29, 8 \n\t" \
3877 : /*out*/ "=r" (_res) \
3878 : /*in*/ "0" (&_argvec[0]) \
3879 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
3881 lval = (__typeof__(lval)) _res; \
3884 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3886 volatile OrigFn _orig = (orig); \
3887 volatile unsigned long _argvec[3]; \
3888 volatile unsigned long _res; \
3889 _argvec[0] = (unsigned long)_orig.nraddr; \
3890 _argvec[1] = (unsigned long)(arg1); \
3891 _argvec[2] = (unsigned long)(arg2); \
3893 "subu $29, $29, 8 \n\t" \
3894 "sw $gp, 0($sp) \n\t" \
3895 "sw $ra, 4($sp) \n\t" \
3896 "subu $29, $29, 16 \n\t" \
3897 "lw $a0, 4(%1) \n\t" \
3898 "lw $a1, 8(%1) \n\t" \
3899 "lw $t9, 0(%1) \n\t" /* target->t9 */ \
3900 VALGRIND_CALL_NOREDIR_T9 \
3901 "addu $29, $29, 16 \n\t" \
3902 "lw $gp, 0($sp) \n\t" \
3903 "lw $ra, 4($sp) \n\t" \
3904 "addu $29, $29, 8 \n\t" \
3906 : /*out*/ "=r" (_res) \
3907 : /*in*/ "0" (&_argvec[0]) \
3908 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
3910 lval = (__typeof__(lval)) _res; \
3913 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3915 volatile OrigFn _orig = (orig); \
3916 volatile unsigned long _argvec[4]; \
3917 volatile unsigned long _res; \
3918 _argvec[0] = (unsigned long)_orig.nraddr; \
3919 _argvec[1] = (unsigned long)(arg1); \
3920 _argvec[2] = (unsigned long)(arg2); \
3921 _argvec[3] = (unsigned long)(arg3); \
3923 "subu $29, $29, 8 \n\t" \
3924 "sw $gp, 0($sp) \n\t" \
3925 "sw $ra, 4($sp) \n\t" \
3926 "subu $29, $29, 16 \n\t" \
3927 "lw $a0, 4(%1) \n\t" \
3928 "lw $a1, 8(%1) \n\t" \
3929 "lw $a2, 12(%1) \n\t" \
3930 "lw $t9, 0(%1) \n\t" /* target->t9 */ \
3931 VALGRIND_CALL_NOREDIR_T9 \
3932 "addu $29, $29, 16 \n\t" \
3933 "lw $gp, 0($sp) \n\t" \
3934 "lw $ra, 4($sp) \n\t" \
3935 "addu $29, $29, 8 \n\t" \
3937 : /*out*/ "=r" (_res) \
3938 : /*in*/ "0" (&_argvec[0]) \
3939 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
3941 lval = (__typeof__(lval)) _res; \
3944 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3946 volatile OrigFn _orig = (orig); \
3947 volatile unsigned long _argvec[5]; \
3948 volatile unsigned long _res; \
3949 _argvec[0] = (unsigned long)_orig.nraddr; \
3950 _argvec[1] = (unsigned long)(arg1); \
3951 _argvec[2] = (unsigned long)(arg2); \
3952 _argvec[3] = (unsigned long)(arg3); \
3953 _argvec[4] = (unsigned long)(arg4); \
3955 "subu $29, $29, 8 \n\t" \
3956 "sw $gp, 0($sp) \n\t" \
3957 "sw $ra, 4($sp) \n\t" \
3958 "subu $29, $29, 16 \n\t" \
3959 "lw $a0, 4(%1) \n\t" \
3960 "lw $a1, 8(%1) \n\t" \
3961 "lw $a2, 12(%1) \n\t" \
3962 "lw $a3, 16(%1) \n\t" \
3963 "lw $t9, 0(%1) \n\t" /* target->t9 */ \
3964 VALGRIND_CALL_NOREDIR_T9 \
3965 "addu $29, $29, 16 \n\t" \
3966 "lw $gp, 0($sp) \n\t" \
3967 "lw $ra, 4($sp) \n\t" \
3968 "addu $29, $29, 8 \n\t" \
3970 : /*out*/ "=r" (_res) \
3971 : /*in*/ "0" (&_argvec[0]) \
3972 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
3974 lval = (__typeof__(lval)) _res; \
3977 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3979 volatile OrigFn _orig = (orig); \
3980 volatile unsigned long _argvec[6]; \
3981 volatile unsigned long _res; \
3982 _argvec[0] = (unsigned long)_orig.nraddr; \
3983 _argvec[1] = (unsigned long)(arg1); \
3984 _argvec[2] = (unsigned long)(arg2); \
3985 _argvec[3] = (unsigned long)(arg3); \
3986 _argvec[4] = (unsigned long)(arg4); \
3987 _argvec[5] = (unsigned long)(arg5); \
3989 "subu $29, $29, 8 \n\t" \
3990 "sw $gp, 0($sp) \n\t" \
3991 "sw $ra, 4($sp) \n\t" \
3992 "lw $a0, 20(%1) \n\t" \
3993 "subu $sp, $sp, 24\n\t" \
3994 "sw $a0, 16($sp) \n\t" \
3995 "lw $a0, 4(%1) \n\t" \
3996 "lw $a1, 8(%1) \n\t" \
3997 "lw $a2, 12(%1) \n\t" \
3998 "lw $a3, 16(%1) \n\t" \
3999 "lw $t9, 0(%1) \n\t" /* target->t9 */ \
4000 VALGRIND_CALL_NOREDIR_T9 \
4001 "addu $29, $29, 24 \n\t" \
4002 "lw $gp, 0($sp) \n\t" \
4003 "lw $ra, 4($sp) \n\t" \
4004 "addu $sp, $sp, 8 \n\t" \
4006 : /*out*/ "=r" (_res) \
4007 : /*in*/ "0" (&_argvec[0]) \
4008 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
4010 lval = (__typeof__(lval)) _res; \
4012 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4014 volatile OrigFn _orig = (orig); \
4015 volatile unsigned long _argvec[7]; \
4016 volatile unsigned long _res; \
4017 _argvec[0] = (unsigned long)_orig.nraddr; \
4018 _argvec[1] = (unsigned long)(arg1); \
4019 _argvec[2] = (unsigned long)(arg2); \
4020 _argvec[3] = (unsigned long)(arg3); \
4021 _argvec[4] = (unsigned long)(arg4); \
4022 _argvec[5] = (unsigned long)(arg5); \
4023 _argvec[6] = (unsigned long)(arg6); \
4025 "subu $29, $29, 8 \n\t" \
4026 "sw $gp, 0($sp) \n\t" \
4027 "sw $ra, 4($sp) \n\t" \
4028 "lw $a0, 20(%1) \n\t" \
4029 "subu $sp, $sp, 32\n\t" \
4030 "sw $a0, 16($sp) \n\t" \
4031 "lw $a0, 24(%1) \n\t" \
4033 "sw $a0, 20($sp) \n\t" \
4034 "lw $a0, 4(%1) \n\t" \
4035 "lw $a1, 8(%1) \n\t" \
4036 "lw $a2, 12(%1) \n\t" \
4037 "lw $a3, 16(%1) \n\t" \
4038 "lw $t9, 0(%1) \n\t" /* target->t9 */ \
4039 VALGRIND_CALL_NOREDIR_T9 \
4040 "addu $sp, $sp, 32 \n\t" \
4041 "lw $gp, 0($sp) \n\t" \
4042 "lw $ra, 4($sp) \n\t" \
4043 "addu $sp, $sp, 8 \n\t" \
4045 : /*out*/ "=r" (_res) \
4046 : /*in*/ "0" (&_argvec[0]) \
4047 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
4049 lval = (__typeof__(lval)) _res; \
4052 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4055 volatile OrigFn _orig = (orig); \
4056 volatile unsigned long _argvec[8]; \
4057 volatile unsigned long _res; \
4058 _argvec[0] = (unsigned long)_orig.nraddr; \
4059 _argvec[1] = (unsigned long)(arg1); \
4060 _argvec[2] = (unsigned long)(arg2); \
4061 _argvec[3] = (unsigned long)(arg3); \
4062 _argvec[4] = (unsigned long)(arg4); \
4063 _argvec[5] = (unsigned long)(arg5); \
4064 _argvec[6] = (unsigned long)(arg6); \
4065 _argvec[7] = (unsigned long)(arg7); \
4067 "subu $29, $29, 8 \n\t" \
4068 "sw $gp, 0($sp) \n\t" \
4069 "sw $ra, 4($sp) \n\t" \
4070 "lw $a0, 20(%1) \n\t" \
4071 "subu $sp, $sp, 32\n\t" \
4072 "sw $a0, 16($sp) \n\t" \
4073 "lw $a0, 24(%1) \n\t" \
4074 "sw $a0, 20($sp) \n\t" \
4075 "lw $a0, 28(%1) \n\t" \
4076 "sw $a0, 24($sp) \n\t" \
4077 "lw $a0, 4(%1) \n\t" \
4078 "lw $a1, 8(%1) \n\t" \
4079 "lw $a2, 12(%1) \n\t" \
4080 "lw $a3, 16(%1) \n\t" \
4081 "lw $t9, 0(%1) \n\t" /* target->t9 */ \
4082 VALGRIND_CALL_NOREDIR_T9 \
4083 "addu $sp, $sp, 32 \n\t" \
4084 "lw $gp, 0($sp) \n\t" \
4085 "lw $ra, 4($sp) \n\t" \
4086 "addu $sp, $sp, 8 \n\t" \
4088 : /*out*/ "=r" (_res) \
4089 : /*in*/ "0" (&_argvec[0]) \
4090 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
4092 lval = (__typeof__(lval)) _res; \
4095 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4098 volatile OrigFn _orig = (orig); \
4099 volatile unsigned long _argvec[9]; \
4100 volatile unsigned long _res; \
4101 _argvec[0] = (unsigned long)_orig.nraddr; \
4102 _argvec[1] = (unsigned long)(arg1); \
4103 _argvec[2] = (unsigned long)(arg2); \
4104 _argvec[3] = (unsigned long)(arg3); \
4105 _argvec[4] = (unsigned long)(arg4); \
4106 _argvec[5] = (unsigned long)(arg5); \
4107 _argvec[6] = (unsigned long)(arg6); \
4108 _argvec[7] = (unsigned long)(arg7); \
4109 _argvec[8] = (unsigned long)(arg8); \
4111 "subu $29, $29, 8 \n\t" \
4112 "sw $gp, 0($sp) \n\t" \
4113 "sw $ra, 4($sp) \n\t" \
4114 "lw $a0, 20(%1) \n\t" \
4115 "subu $sp, $sp, 40\n\t" \
4116 "sw $a0, 16($sp) \n\t" \
4117 "lw $a0, 24(%1) \n\t" \
4118 "sw $a0, 20($sp) \n\t" \
4119 "lw $a0, 28(%1) \n\t" \
4120 "sw $a0, 24($sp) \n\t" \
4121 "lw $a0, 32(%1) \n\t" \
4122 "sw $a0, 28($sp) \n\t" \
4123 "lw $a0, 4(%1) \n\t" \
4124 "lw $a1, 8(%1) \n\t" \
4125 "lw $a2, 12(%1) \n\t" \
4126 "lw $a3, 16(%1) \n\t" \
4127 "lw $t9, 0(%1) \n\t" /* target->t9 */ \
4128 VALGRIND_CALL_NOREDIR_T9 \
4129 "addu $sp, $sp, 40 \n\t" \
4130 "lw $gp, 0($sp) \n\t" \
4131 "lw $ra, 4($sp) \n\t" \
4132 "addu $sp, $sp, 8 \n\t" \
4134 : /*out*/ "=r" (_res) \
4135 : /*in*/ "0" (&_argvec[0]) \
4136 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
4138 lval = (__typeof__(lval)) _res; \
4141 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4144 volatile OrigFn _orig = (orig); \
4145 volatile unsigned long _argvec[10]; \
4146 volatile unsigned long _res; \
4147 _argvec[0] = (unsigned long)_orig.nraddr; \
4148 _argvec[1] = (unsigned long)(arg1); \
4149 _argvec[2] = (unsigned long)(arg2); \
4150 _argvec[3] = (unsigned long)(arg3); \
4151 _argvec[4] = (unsigned long)(arg4); \
4152 _argvec[5] = (unsigned long)(arg5); \
4153 _argvec[6] = (unsigned long)(arg6); \
4154 _argvec[7] = (unsigned long)(arg7); \
4155 _argvec[8] = (unsigned long)(arg8); \
4156 _argvec[9] = (unsigned long)(arg9); \
4158 "subu $29, $29, 8 \n\t" \
4159 "sw $gp, 0($sp) \n\t" \
4160 "sw $ra, 4($sp) \n\t" \
4161 "lw $a0, 20(%1) \n\t" \
4162 "subu $sp, $sp, 40\n\t" \
4163 "sw $a0, 16($sp) \n\t" \
4164 "lw $a0, 24(%1) \n\t" \
4165 "sw $a0, 20($sp) \n\t" \
4166 "lw $a0, 28(%1) \n\t" \
4167 "sw $a0, 24($sp) \n\t" \
4168 "lw $a0, 32(%1) \n\t" \
4169 "sw $a0, 28($sp) \n\t" \
4170 "lw $a0, 36(%1) \n\t" \
4171 "sw $a0, 32($sp) \n\t" \
4172 "lw $a0, 4(%1) \n\t" \
4173 "lw $a1, 8(%1) \n\t" \
4174 "lw $a2, 12(%1) \n\t" \
4175 "lw $a3, 16(%1) \n\t" \
4176 "lw $t9, 0(%1) \n\t" /* target->t9 */ \
4177 VALGRIND_CALL_NOREDIR_T9 \
4178 "addu $sp, $sp, 40 \n\t" \
4179 "lw $gp, 0($sp) \n\t" \
4180 "lw $ra, 4($sp) \n\t" \
4181 "addu $sp, $sp, 8 \n\t" \
4183 : /*out*/ "=r" (_res) \
4184 : /*in*/ "0" (&_argvec[0]) \
4185 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
4187 lval = (__typeof__(lval)) _res; \
4190 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4191 arg7,arg8,arg9,arg10) \
4193 volatile OrigFn _orig = (orig); \
4194 volatile unsigned long _argvec[11]; \
4195 volatile unsigned long _res; \
4196 _argvec[0] = (unsigned long)_orig.nraddr; \
4197 _argvec[1] = (unsigned long)(arg1); \
4198 _argvec[2] = (unsigned long)(arg2); \
4199 _argvec[3] = (unsigned long)(arg3); \
4200 _argvec[4] = (unsigned long)(arg4); \
4201 _argvec[5] = (unsigned long)(arg5); \
4202 _argvec[6] = (unsigned long)(arg6); \
4203 _argvec[7] = (unsigned long)(arg7); \
4204 _argvec[8] = (unsigned long)(arg8); \
4205 _argvec[9] = (unsigned long)(arg9); \
4206 _argvec[10] = (unsigned long)(arg10); \
4208 "subu $29, $29, 8 \n\t" \
4209 "sw $gp, 0($sp) \n\t" \
4210 "sw $ra, 4($sp) \n\t" \
4211 "lw $a0, 20(%1) \n\t" \
4212 "subu $sp, $sp, 48\n\t" \
4213 "sw $a0, 16($sp) \n\t" \
4214 "lw $a0, 24(%1) \n\t" \
4215 "sw $a0, 20($sp) \n\t" \
4216 "lw $a0, 28(%1) \n\t" \
4217 "sw $a0, 24($sp) \n\t" \
4218 "lw $a0, 32(%1) \n\t" \
4219 "sw $a0, 28($sp) \n\t" \
4220 "lw $a0, 36(%1) \n\t" \
4221 "sw $a0, 32($sp) \n\t" \
4222 "lw $a0, 40(%1) \n\t" \
4223 "sw $a0, 36($sp) \n\t" \
4224 "lw $a0, 4(%1) \n\t" \
4225 "lw $a1, 8(%1) \n\t" \
4226 "lw $a2, 12(%1) \n\t" \
4227 "lw $a3, 16(%1) \n\t" \
4228 "lw $t9, 0(%1) \n\t" /* target->t9 */ \
4229 VALGRIND_CALL_NOREDIR_T9 \
4230 "addu $sp, $sp, 48 \n\t" \
4231 "lw $gp, 0($sp) \n\t" \
4232 "lw $ra, 4($sp) \n\t" \
4233 "addu $sp, $sp, 8 \n\t" \
4235 : /*out*/ "=r" (_res) \
4236 : /*in*/ "0" (&_argvec[0]) \
4237 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
4239 lval = (__typeof__(lval)) _res; \
4242 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4243 arg6,arg7,arg8,arg9,arg10, \
4246 volatile OrigFn _orig = (orig); \
4247 volatile unsigned long _argvec[12]; \
4248 volatile unsigned long _res; \
4249 _argvec[0] = (unsigned long)_orig.nraddr; \
4250 _argvec[1] = (unsigned long)(arg1); \
4251 _argvec[2] = (unsigned long)(arg2); \
4252 _argvec[3] = (unsigned long)(arg3); \
4253 _argvec[4] = (unsigned long)(arg4); \
4254 _argvec[5] = (unsigned long)(arg5); \
4255 _argvec[6] = (unsigned long)(arg6); \
4256 _argvec[7] = (unsigned long)(arg7); \
4257 _argvec[8] = (unsigned long)(arg8); \
4258 _argvec[9] = (unsigned long)(arg9); \
4259 _argvec[10] = (unsigned long)(arg10); \
4260 _argvec[11] = (unsigned long)(arg11); \
4262 "subu $29, $29, 8 \n\t" \
4263 "sw $gp, 0($sp) \n\t" \
4264 "sw $ra, 4($sp) \n\t" \
4265 "lw $a0, 20(%1) \n\t" \
4266 "subu $sp, $sp, 48\n\t" \
4267 "sw $a0, 16($sp) \n\t" \
4268 "lw $a0, 24(%1) \n\t" \
4269 "sw $a0, 20($sp) \n\t" \
4270 "lw $a0, 28(%1) \n\t" \
4271 "sw $a0, 24($sp) \n\t" \
4272 "lw $a0, 32(%1) \n\t" \
4273 "sw $a0, 28($sp) \n\t" \
4274 "lw $a0, 36(%1) \n\t" \
4275 "sw $a0, 32($sp) \n\t" \
4276 "lw $a0, 40(%1) \n\t" \
4277 "sw $a0, 36($sp) \n\t" \
4278 "lw $a0, 44(%1) \n\t" \
4279 "sw $a0, 40($sp) \n\t" \
4280 "lw $a0, 4(%1) \n\t" \
4281 "lw $a1, 8(%1) \n\t" \
4282 "lw $a2, 12(%1) \n\t" \
4283 "lw $a3, 16(%1) \n\t" \
4284 "lw $t9, 0(%1) \n\t" /* target->t9 */ \
4285 VALGRIND_CALL_NOREDIR_T9 \
4286 "addu $sp, $sp, 48 \n\t" \
4287 "lw $gp, 0($sp) \n\t" \
4288 "lw $ra, 4($sp) \n\t" \
4289 "addu $sp, $sp, 8 \n\t" \
4291 : /*out*/ "=r" (_res) \
4292 : /*in*/ "0" (&_argvec[0]) \
4293 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
4295 lval = (__typeof__(lval)) _res; \
4298 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4299 arg6,arg7,arg8,arg9,arg10, \
4302 volatile OrigFn _orig = (orig); \
4303 volatile unsigned long _argvec[13]; \
4304 volatile unsigned long _res; \
4305 _argvec[0] = (unsigned long)_orig.nraddr; \
4306 _argvec[1] = (unsigned long)(arg1); \
4307 _argvec[2] = (unsigned long)(arg2); \
4308 _argvec[3] = (unsigned long)(arg3); \
4309 _argvec[4] = (unsigned long)(arg4); \
4310 _argvec[5] = (unsigned long)(arg5); \
4311 _argvec[6] = (unsigned long)(arg6); \
4312 _argvec[7] = (unsigned long)(arg7); \
4313 _argvec[8] = (unsigned long)(arg8); \
4314 _argvec[9] = (unsigned long)(arg9); \
4315 _argvec[10] = (unsigned long)(arg10); \
4316 _argvec[11] = (unsigned long)(arg11); \
4317 _argvec[12] = (unsigned long)(arg12); \
4319 "subu $29, $29, 8 \n\t" \
4320 "sw $gp, 0($sp) \n\t" \
4321 "sw $ra, 4($sp) \n\t" \
4322 "lw $a0, 20(%1) \n\t" \
4323 "subu $sp, $sp, 56\n\t" \
4324 "sw $a0, 16($sp) \n\t" \
4325 "lw $a0, 24(%1) \n\t" \
4326 "sw $a0, 20($sp) \n\t" \
4327 "lw $a0, 28(%1) \n\t" \
4328 "sw $a0, 24($sp) \n\t" \
4329 "lw $a0, 32(%1) \n\t" \
4330 "sw $a0, 28($sp) \n\t" \
4331 "lw $a0, 36(%1) \n\t" \
4332 "sw $a0, 32($sp) \n\t" \
4333 "lw $a0, 40(%1) \n\t" \
4334 "sw $a0, 36($sp) \n\t" \
4335 "lw $a0, 44(%1) \n\t" \
4336 "sw $a0, 40($sp) \n\t" \
4337 "lw $a0, 48(%1) \n\t" \
4338 "sw $a0, 44($sp) \n\t" \
4339 "lw $a0, 4(%1) \n\t" \
4340 "lw $a1, 8(%1) \n\t" \
4341 "lw $a2, 12(%1) \n\t" \
4342 "lw $a3, 16(%1) \n\t" \
4343 "lw $t9, 0(%1) \n\t" /* target->t9 */ \
4344 VALGRIND_CALL_NOREDIR_T9 \
4345 "addu $sp, $sp, 56 \n\t" \
4346 "lw $gp, 0($sp) \n\t" \
4347 "lw $ra, 4($sp) \n\t" \
4348 "addu $sp, $sp, 8 \n\t" \
4350 : /*out*/ "=r" (_res) \
4351 : /*in*/ "0" (&_argvec[0]) \
4352 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
4354 lval = (__typeof__(lval)) _res; \
4357 #endif /* PLAT_mips32_linux */
4360 /* ------------------------------------------------------------------ */
4361 /* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
4363 /* ------------------------------------------------------------------ */
4365 /* Some request codes. There are many more of these, but most are not
4366 exposed to end-user view. These are the public ones, all of the
4367 form 0x1000 + small_number.
4369 Core ones are in the range 0x00000000--0x0000ffff. The non-public
4370 ones start at 0x2000.
4373 /* These macros are used by tools -- they must be public, but don't
4374 embed them into other programs. */
4375 #define VG_USERREQ_TOOL_BASE(a,b) \
4376 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
4377 #define VG_IS_TOOL_USERREQ(a, b, v) \
4378 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
4380 /* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
4381 This enum comprises an ABI exported by Valgrind to programs
4382 which use client requests. DO NOT CHANGE THE ORDER OF THESE
4383 ENTRIES, NOR DELETE ANY -- add new ones at the end. */
4385 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
4386 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
4388 /* These allow any function to be called from the simulated
4389 CPU but run on the real CPU. Nb: the first arg passed to
4390 the function is always the ThreadId of the running
4391 thread! So CLIENT_CALL0 actually requires a 1 arg
4393 VG_USERREQ__CLIENT_CALL0 = 0x1101,
4394 VG_USERREQ__CLIENT_CALL1 = 0x1102,
4395 VG_USERREQ__CLIENT_CALL2 = 0x1103,
4396 VG_USERREQ__CLIENT_CALL3 = 0x1104,
4398 /* Can be useful in regression testing suites -- eg. can
4399 send Valgrind's output to /dev/null and still count
4401 VG_USERREQ__COUNT_ERRORS = 0x1201,
4403 /* Allows a string (gdb monitor command) to be passed to the tool
4404 Used for interaction with vgdb/gdb */
4405 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
4407 /* These are useful and can be interpreted by any tool that
4408 tracks malloc() et al, by using vg_replace_malloc.c. */
4409 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
4410 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
4411 VG_USERREQ__FREELIKE_BLOCK = 0x1302,
4412 /* Memory pool support. */
4413 VG_USERREQ__CREATE_MEMPOOL = 0x1303,
4414 VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
4415 VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
4416 VG_USERREQ__MEMPOOL_FREE = 0x1306,
4417 VG_USERREQ__MEMPOOL_TRIM = 0x1307,
4418 VG_USERREQ__MOVE_MEMPOOL = 0x1308,
4419 VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
4420 VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
4422 /* Allow printfs to valgrind log. */
4423 /* The first two pass the va_list argument by value, which
4424 assumes it is the same size as or smaller than a UWord,
4425 which generally isn't the case. Hence are deprecated.
4426 The second two pass the vargs by reference and so are
4427 immune to this problem. */
4428 /* both :: char* fmt, va_list vargs (DEPRECATED) */
4429 VG_USERREQ__PRINTF = 0x1401,
4430 VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
4431 /* both :: char* fmt, va_list* vargs */
4432 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
4433 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
4435 /* Stack support. */
4436 VG_USERREQ__STACK_REGISTER = 0x1501,
4437 VG_USERREQ__STACK_DEREGISTER = 0x1502,
4438 VG_USERREQ__STACK_CHANGE = 0x1503,
4441 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
4443 /* Querying of debug info. */
4444 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701,
4446 /* Disable/enable error reporting level. Takes a single
4447 Word arg which is the delta to this thread's error
4448 disablement indicator. Hence 1 disables or further
4449 disables errors, and -1 moves back towards enablement.
4450 Other values are not allowed. */
4451 VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801
4454 #if !defined(__GNUC__)
4455 # define __extension__ /* */
4459 /* Returns the number of Valgrinds this code is running under. That
4460 is, 0 if running natively, 1 if running under Valgrind, 2 if
4461 running under Valgrind which is running under another Valgrind,
4463 #define RUNNING_ON_VALGRIND \
4464 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
4465 VG_USERREQ__RUNNING_ON_VALGRIND, \
4469 /* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
4470 _qzz_len - 1]. Useful if you are debugging a JITter or some such,
4471 since it provides a way to make sure valgrind will retranslate the
4472 invalidated area. Returns no value. */
4473 #define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
4474 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
4475 _qzz_addr, _qzz_len, 0, 0, 0)
4478 /* These requests are for getting Valgrind itself to print something.
4479 Possibly with a backtrace. This is a really ugly hack. The return value
4480 is the number of characters printed, excluding the "**<pid>** " part at the
4481 start and the backtrace (if present). */
4483 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
4484 /* Modern GCC will optimize the static routine out if unused,
4485 and unused attribute will shut down warnings about it. */
4486 static int VALGRIND_PRINTF(const char *format, ...)
4487 __attribute__((format(__printf__, 1, 2), __unused__));
4490 #if defined(_MSC_VER)
4493 VALGRIND_PRINTF(const char *format, ...)
4495 #if defined(NVALGRIND)
4497 #else /* NVALGRIND */
4498 #if defined(_MSC_VER)
4501 unsigned long _qzz_res;
4504 va_start(vargs, format);
4505 #if defined(_MSC_VER)
4506 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
4507 VG_USERREQ__PRINTF_VALIST_BY_REF,
4512 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
4513 VG_USERREQ__PRINTF_VALIST_BY_REF,
4514 (unsigned long)format,
4515 (unsigned long)&vargs,
4519 return (int)_qzz_res;
4520 #endif /* NVALGRIND */
4523 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
4524 static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
4525 __attribute__((format(__printf__, 1, 2), __unused__));
4528 #if defined(_MSC_VER)
4531 VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
4533 #if defined(NVALGRIND)
4535 #else /* NVALGRIND */
4536 #if defined(_MSC_VER)
4539 unsigned long _qzz_res;
4542 va_start(vargs, format);
4543 #if defined(_MSC_VER)
4544 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
4545 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
4550 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
4551 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
4552 (unsigned long)format,
4553 (unsigned long)&vargs,
4557 return (int)_qzz_res;
4558 #endif /* NVALGRIND */
4562 /* These requests allow control to move from the simulated CPU to the
4563 real CPU, calling an arbitary function.
4565 Note that the current ThreadId is inserted as the first argument.
4568 VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
4570 requires f to have this signature:
4572 Word f(Word tid, Word arg1, Word arg2)
4574 where "Word" is a word-sized type.
4576 Note that these client requests are not entirely reliable. For example,
4577 if you call a function with them that subsequently calls printf(),
4578 there's a high chance Valgrind will crash. Generally, your prospects of
4579 these working are made higher if the called function does not refer to
4580 any global variables, and does not refer to any libc or other functions
4581 (printf et al). Any kind of entanglement with libc or dynamic linking is
4582 likely to have a bad outcome, for tricky reasons which we've grappled
4583 with a lot in the past.
4585 #define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
4586 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
4587 VG_USERREQ__CLIENT_CALL0, \
4591 #define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
4592 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
4593 VG_USERREQ__CLIENT_CALL1, \
4597 #define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
4598 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
4599 VG_USERREQ__CLIENT_CALL2, \
4601 _qyy_arg1, _qyy_arg2, 0, 0)
4603 #define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
4604 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
4605 VG_USERREQ__CLIENT_CALL3, \
4607 _qyy_arg1, _qyy_arg2, \
4611 /* Counts the number of errors that have been recorded by a tool. Nb:
4612 the tool must record the errors with VG_(maybe_record_error)() or
4613 VG_(unique_error)() for them to be counted. */
4614 #define VALGRIND_COUNT_ERRORS \
4615 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
4616 0 /* default return */, \
4617 VG_USERREQ__COUNT_ERRORS, \
4620 /* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
4621 when heap blocks are allocated in order to give accurate results. This
4622 happens automatically for the standard allocator functions such as
4623 malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
4626 But if your program uses a custom allocator, this doesn't automatically
4627 happen, and Valgrind will not do as well. For example, if you allocate
4628 superblocks with mmap() and then allocates chunks of the superblocks, all
4629 Valgrind's observations will be at the mmap() level and it won't know that
4630 the chunks should be considered separate entities. In Memcheck's case,
4631 that means you probably won't get heap block overrun detection (because
4632 there won't be redzones marked as unaddressable) and you definitely won't
4633 get any leak detection.
4635 The following client requests allow a custom allocator to be annotated so
4636 that it can be handled accurately by Valgrind.
4638 VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
4639 by a malloc()-like function. For Memcheck (an illustrative case), this
4642 - It records that the block has been allocated. This means any addresses
4643 within the block mentioned in error messages will be
4644 identified as belonging to the block. It also means that if the block
4645 isn't freed it will be detected by the leak checker.
4647 - It marks the block as being addressable and undefined (if 'is_zeroed' is
4648 not set), or addressable and defined (if 'is_zeroed' is set). This
4649 controls how accesses to the block by the program are handled.
4651 'addr' is the start of the usable block (ie. after any
4652 redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
4653 can apply redzones -- these are blocks of padding at the start and end of
4654 each block. Adding redzones is recommended as it makes it much more likely
4655 Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
4656 zeroed (or filled with another predictable value), as is the case for
4659 VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
4660 heap block -- that will be used by the client program -- is allocated.
4661 It's best to put it at the outermost level of the allocator if possible;
4662 for example, if you have a function my_alloc() which calls
4663 internal_alloc(), and the client request is put inside internal_alloc(),
4664 stack traces relating to the heap block will contain entries for both
4665 my_alloc() and internal_alloc(), which is probably not what you want.
4667 For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
4668 custom blocks from within a heap block, B, that has been allocated with
4669 malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
4670 -- the custom blocks will take precedence.
4672 VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
4673 Memcheck, it does two things:
4675 - It records that the block has been deallocated. This assumes that the
4676 block was annotated as having been allocated via
4677 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
4679 - It marks the block as being unaddressable.
4681 VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
4682 heap block is deallocated.
4684 VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
4685 Memcheck, it does four things:
4687 - It records that the size of a block has been changed. This assumes that
4688 the block was annotated as having been allocated via
4689 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
4691 - If the block shrunk, it marks the freed memory as being unaddressable.
4693 - If the block grew, it marks the new area as undefined and defines a red
4694 zone past the end of the new block.
4696 - The V-bits of the overlap between the old and the new block are preserved.
4698 VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
4699 and before deallocation of the old block.
4701 In many cases, these three client requests will not be enough to get your
4702 allocator working well with Memcheck. More specifically, if your allocator
4703 writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
4704 will be necessary to mark the memory as addressable just before the zeroing
4705 occurs, otherwise you'll get a lot of invalid write errors. For example,
4706 you'll need to do this if your allocator recycles freed blocks, but it
4707 zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
4708 Alternatively, if your allocator reuses freed blocks for allocator-internal
4709 data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
4711 Really, what's happening is a blurring of the lines between the client
4712 program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
4713 memory should be considered unaddressable to the client program, but the
4714 allocator knows more than the rest of the client program and so may be able
4715 to safely access it. Extra client requests are necessary for Valgrind to
4716 understand the distinction between the allocator and the rest of the
4719 Ignored if addr == 0.
4721 #define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
4722 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
4723 addr, sizeB, rzB, is_zeroed, 0)
4725 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
4726 Ignored if addr == 0.
4728 #define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
4729 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
4730 addr, oldSizeB, newSizeB, rzB, 0)
4732 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
4733 Ignored if addr == 0.
4735 #define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
4736 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
4739 /* Create a memory pool. */
4740 #define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
4741 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
4742 pool, rzB, is_zeroed, 0, 0)
4744 /* Destroy a memory pool. */
4745 #define VALGRIND_DESTROY_MEMPOOL(pool) \
4746 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
4749 /* Associate a piece of memory with a memory pool. */
4750 #define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
4751 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
4752 pool, addr, size, 0, 0)
4754 /* Disassociate a piece of memory from a memory pool. */
4755 #define VALGRIND_MEMPOOL_FREE(pool, addr) \
4756 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
4757 pool, addr, 0, 0, 0)
4759 /* Disassociate any pieces outside a particular range. */
4760 #define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
4761 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
4762 pool, addr, size, 0, 0)
4764 /* Resize and/or move a piece associated with a memory pool. */
4765 #define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
4766 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
4767 poolA, poolB, 0, 0, 0)
4769 /* Resize and/or move a piece associated with a memory pool. */
4770 #define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
4771 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
4772 pool, addrA, addrB, size, 0)
4774 /* Return 1 if a mempool exists, else 0. */
4775 #define VALGRIND_MEMPOOL_EXISTS(pool) \
4776 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
4777 VG_USERREQ__MEMPOOL_EXISTS, \
4780 /* Mark a piece of memory as being a stack. Returns a stack id. */
4781 #define VALGRIND_STACK_REGISTER(start, end) \
4782 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
4783 VG_USERREQ__STACK_REGISTER, \
4784 start, end, 0, 0, 0)
4786 /* Unmark the piece of memory associated with a stack id as being a
4788 #define VALGRIND_STACK_DEREGISTER(id) \
4789 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
4792 /* Change the start and end address of the stack id. */
4793 #define VALGRIND_STACK_CHANGE(id, start, end) \
4794 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
4795 id, start, end, 0, 0)
4797 /* Load PDB debug info for Wine PE image_map. */
4798 #define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
4799 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
4800 fd, ptr, total_size, delta, 0)
4802 /* Map a code address to a source file name and line number. buf64
4803 must point to a 64-byte buffer in the caller's address space. The
4804 result will be dumped in there and is guaranteed to be zero
4805 terminated. If no info is found, the first byte is set to zero. */
4806 #define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
4807 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
4808 VG_USERREQ__MAP_IP_TO_SRCLOC, \
4809 addr, buf64, 0, 0, 0)
4811 /* Disable error reporting for this thread. Behaves in a stack like
4812 way, so you can safely call this multiple times provided that
4813 VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
4814 to re-enable reporting. The first call of this macro disables
4815 reporting. Subsequent calls have no effect except to increase the
4816 number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
4817 reporting. Child threads do not inherit this setting from their
4818 parents -- they are always created with reporting enabled. */
4819 #define VALGRIND_DISABLE_ERROR_REPORTING \
4820 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
4823 /* Re-enable error reporting, as per comments on
4824 VALGRIND_DISABLE_ERROR_REPORTING. */
4825 #define VALGRIND_ENABLE_ERROR_REPORTING \
4826 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
4829 #undef PLAT_x86_darwin
4830 #undef PLAT_amd64_darwin
4831 #undef PLAT_x86_win32
4832 #undef PLAT_x86_linux
4833 #undef PLAT_amd64_linux
4834 #undef PLAT_ppc32_linux
4835 #undef PLAT_ppc64_linux
4836 #undef PLAT_arm_linux
4837 #undef PLAT_s390x_linux
4838 #undef PLAT_mips32_linux
4840 #endif /* __VALGRIND_H */