2 ----------------------------------------------------------------
4 Notice that the following BSD-style license applies to this one
5 file (valgrind.h) only. The rest of Valgrind is licensed under the
6 terms of the GNU General Public License, version 2, unless
7 otherwise indicated. See the COPYING file in the source
8 distribution for details.
10 ----------------------------------------------------------------
12 This file is part of Valgrind, a dynamic binary instrumentation
15 Copyright (C) 2000-2013 Julian Seward. All rights reserved.
17 Redistribution and use in source and binary forms, with or without
18 modification, are permitted provided that the following conditions
21 1. Redistributions of source code must retain the above copyright
22 notice, this list of conditions and the following disclaimer.
24 2. The origin of this software must not be misrepresented; you must
25 not claim that you wrote the original software. If you use this
26 software in a product, an acknowledgment in the product
27 documentation would be appreciated but is not required.
29 3. Altered source versions must be plainly marked as such, and must
30 not be misrepresented as being the original software.
32 4. The name of the author may not be used to endorse or promote
33 products derived from this software without specific prior written
36 THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
37 OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
38 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
39 ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
40 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
41 DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
42 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
43 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
44 WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
45 NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
46 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
48 ----------------------------------------------------------------
50 Notice that the above BSD-style license applies to this one file
51 (valgrind.h) only. The entire rest of Valgrind is licensed under
52 the terms of the GNU General Public License, version 2. See the
53 COPYING file in the source distribution for details.
55 ----------------------------------------------------------------
59 /* This file is for inclusion into client (your!) code.
61 You can use these macros to manipulate and query Valgrind's
62 execution inside your own programs.
64 The resulting executables will still run without Valgrind, just a
65 little bit more slowly than they otherwise would, but otherwise
66 unchanged. When not running on valgrind, each client request
67 consumes very few (eg. 7) instructions, so the resulting performance
68 loss is negligible unless you plan to execute client requests
69 millions of times per second. Nevertheless, if that is still a
70 problem, you can compile with the NVALGRIND symbol defined (gcc
71 -DNVALGRIND) so that client requests are not even compiled in. */
77 /* ------------------------------------------------------------------ */
78 /* VERSION NUMBER OF VALGRIND */
79 /* ------------------------------------------------------------------ */
81 /* Specify Valgrind's version number, so that user code can
82 conditionally compile based on our version number. Note that these
83 were introduced at version 3.6 and so do not exist in version 3.5
84 or earlier. The recommended way to use them to check for "version
87 #if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
88 && (__VALGRIND_MAJOR__ > 3 \
89 || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
91 #define __VALGRIND_MAJOR__ 3
92 #define __VALGRIND_MINOR__ 10
97 /* Nb: this file might be included in a file compiled with -ansi. So
98 we can't use C++ style "//" comments nor the "asm" keyword (instead
101 /* Derive some tags indicating what the target platform is. Note
102 that in this file we're using the compiler's CPP symbols for
103 identifying architectures, which are different to the ones we use
104 within the rest of Valgrind. Note, __powerpc__ is active for both
105 32 and 64-bit PPC, whereas __powerpc64__ is only active for the
106 latter (on Linux, that is).
108 Misc note: how to find out what's predefined in gcc by default:
109 gcc -Wp,-dM somefile.c
111 #undef PLAT_x86_darwin
112 #undef PLAT_amd64_darwin
113 #undef PLAT_x86_win32
114 #undef PLAT_amd64_win64
115 #undef PLAT_x86_linux
116 #undef PLAT_amd64_linux
117 #undef PLAT_ppc32_linux
118 #undef PLAT_ppc64_linux
119 #undef PLAT_arm_linux
120 #undef PLAT_arm64_linux
121 #undef PLAT_s390x_linux
122 #undef PLAT_mips32_linux
123 #undef PLAT_mips64_linux
126 #if defined(__APPLE__) && defined(__i386__)
127 # define PLAT_x86_darwin 1
128 #elif defined(__APPLE__) && defined(__x86_64__)
129 # define PLAT_amd64_darwin 1
130 #elif (defined(__MINGW32__) && !defined(__MINGW64__)) \
131 || defined(__CYGWIN32__) \
132 || (defined(_WIN32) && defined(_M_IX86))
133 # define PLAT_x86_win32 1
134 #elif defined(__MINGW64__) \
135 || (defined(_WIN64) && defined(_M_X64))
136 # define PLAT_amd64_win64 1
137 #elif defined(__linux__) && defined(__i386__)
138 # define PLAT_x86_linux 1
139 #elif defined(__linux__) && defined(__x86_64__)
140 # define PLAT_amd64_linux 1
141 #elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
142 # define PLAT_ppc32_linux 1
143 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__)
144 # define PLAT_ppc64_linux 1
145 #elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
146 # define PLAT_arm_linux 1
147 #elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
148 # define PLAT_arm64_linux 1
149 #elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
150 # define PLAT_s390x_linux 1
151 #elif defined(__linux__) && defined(__mips__) && (__mips==64)
152 # define PLAT_mips64_linux 1
153 #elif defined(__linux__) && defined(__mips__) && (__mips!=64)
154 # define PLAT_mips32_linux 1
156 /* If we're not compiling for our target platform, don't generate
158 # if !defined(NVALGRIND)
164 /* ------------------------------------------------------------------ */
165 /* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
166 /* in here of use to end-users -- skip to the next section. */
167 /* ------------------------------------------------------------------ */
170 * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
171 * request. Accepts both pointers and integers as arguments.
173 * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
174 * client request that does not return a value.
176 * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
177 * client request and whose value equals the client request result. Accepts
178 * both pointers and integers as arguments. Note that such calls are not
179 * necessarily pure functions -- they may have side effects.
182 #define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
183 _zzq_request, _zzq_arg1, _zzq_arg2, \
184 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
185 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
186 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
187 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
189 #define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
190 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
191 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
192 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
193 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
195 #if defined(NVALGRIND)
197 /* Define NVALGRIND to completely remove the Valgrind magic sequence
198 from the compiled code (analogous to NDEBUG's effects on
200 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
201 _zzq_default, _zzq_request, \
202 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
205 #else /* ! NVALGRIND */
207 /* The following defines the magic code sequences which the JITter
208 spots and handles magically. Don't look too closely at them as
209 they will rot your brain.
211 The assembly code sequences for all architectures is in this one
212 file. This is because this file must be stand-alone, and we don't
213 want to have multiple files.
215 For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
216 value gets put in the return slot, so that everything works when
217 this is executed not under Valgrind. Args are passed in a memory
218 block, and so there's no intrinsic limit to the number that could
219 be passed, but it's currently five.
222 _zzq_rlval result lvalue
223 _zzq_default default value (result returned when running on real CPU)
224 _zzq_request request code
225 _zzq_arg1..5 request params
227 The other two macros are used to support function wrapping, and are
228 a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
229 guest's NRADDR pseudo-register and whatever other information is
230 needed to safely run the call original from the wrapper: on
231 ppc64-linux, the R2 value at the divert point is also needed. This
232 information is abstracted into a user-visible type, OrigFn.
234 VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
235 guest, but guarantees that the branch instruction will not be
236 redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
237 branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
238 complete inline asm, since it needs to be combined with more magic
239 inline asm stuff to be useful.
242 /* ------------------------- x86-{linux,darwin} ---------------- */
244 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
245 || (defined(PLAT_x86_win32) && defined(__GNUC__))
249 unsigned int nraddr; /* where's the code? */
253 #define __SPECIAL_INSTRUCTION_PREAMBLE \
254 "roll $3, %%edi ; roll $13, %%edi\n\t" \
255 "roll $29, %%edi ; roll $19, %%edi\n\t"
257 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
258 _zzq_default, _zzq_request, \
259 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
261 ({volatile unsigned int _zzq_args[6]; \
262 volatile unsigned int _zzq_result; \
263 _zzq_args[0] = (unsigned int)(_zzq_request); \
264 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
265 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
266 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
267 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
268 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
269 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
270 /* %EDX = client_request ( %EAX ) */ \
271 "xchgl %%ebx,%%ebx" \
272 : "=d" (_zzq_result) \
273 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
279 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
280 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
281 volatile unsigned int __addr; \
282 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
283 /* %EAX = guest_NRADDR */ \
284 "xchgl %%ecx,%%ecx" \
289 _zzq_orig->nraddr = __addr; \
292 #define VALGRIND_CALL_NOREDIR_EAX \
293 __SPECIAL_INSTRUCTION_PREAMBLE \
294 /* call-noredir *%EAX */ \
295 "xchgl %%edx,%%edx\n\t"
297 #define VALGRIND_VEX_INJECT_IR() \
299 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
300 "xchgl %%edi,%%edi\n\t" \
301 : : : "cc", "memory" \
305 #endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__) */
307 /* ------------------------- x86-Win32 ------------------------- */
309 #if defined(PLAT_x86_win32) && !defined(__GNUC__)
313 unsigned int nraddr; /* where's the code? */
317 #if defined(_MSC_VER)
319 #define __SPECIAL_INSTRUCTION_PREAMBLE \
320 __asm rol edi, 3 __asm rol edi, 13 \
321 __asm rol edi, 29 __asm rol edi, 19
323 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
324 _zzq_default, _zzq_request, \
325 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
326 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
327 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
328 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
329 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
331 static __inline uintptr_t
332 valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
333 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
334 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
337 volatile uintptr_t _zzq_args[6];
338 volatile unsigned int _zzq_result;
339 _zzq_args[0] = (uintptr_t)(_zzq_request);
340 _zzq_args[1] = (uintptr_t)(_zzq_arg1);
341 _zzq_args[2] = (uintptr_t)(_zzq_arg2);
342 _zzq_args[3] = (uintptr_t)(_zzq_arg3);
343 _zzq_args[4] = (uintptr_t)(_zzq_arg4);
344 _zzq_args[5] = (uintptr_t)(_zzq_arg5);
345 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
346 __SPECIAL_INSTRUCTION_PREAMBLE
347 /* %EDX = client_request ( %EAX ) */
349 __asm mov _zzq_result, edx
354 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
355 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
356 volatile unsigned int __addr; \
357 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
358 /* %EAX = guest_NRADDR */ \
360 __asm mov __addr, eax \
362 _zzq_orig->nraddr = __addr; \
365 #define VALGRIND_CALL_NOREDIR_EAX ERROR
367 #define VALGRIND_VEX_INJECT_IR() \
369 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
375 #error Unsupported compiler.
378 #endif /* PLAT_x86_win32 */
380 /* ------------------------ amd64-{linux,darwin} --------------- */
382 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
383 || (defined(PLAT_amd64_win64) && defined(__GNUC__))
387 unsigned long long int nraddr; /* where's the code? */
391 #define __SPECIAL_INSTRUCTION_PREAMBLE \
392 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
393 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
395 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
396 _zzq_default, _zzq_request, \
397 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
399 ({ volatile unsigned long long int _zzq_args[6]; \
400 volatile unsigned long long int _zzq_result; \
401 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
402 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
403 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
404 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
405 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
406 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
407 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
408 /* %RDX = client_request ( %RAX ) */ \
409 "xchgq %%rbx,%%rbx" \
410 : "=d" (_zzq_result) \
411 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
417 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
418 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
419 volatile unsigned long long int __addr; \
420 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
421 /* %RAX = guest_NRADDR */ \
422 "xchgq %%rcx,%%rcx" \
427 _zzq_orig->nraddr = __addr; \
430 #define VALGRIND_CALL_NOREDIR_RAX \
431 __SPECIAL_INSTRUCTION_PREAMBLE \
432 /* call-noredir *%RAX */ \
433 "xchgq %%rdx,%%rdx\n\t"
435 #define VALGRIND_VEX_INJECT_IR() \
437 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
438 "xchgq %%rdi,%%rdi\n\t" \
439 : : : "cc", "memory" \
443 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
445 /* ------------------------- amd64-Win64 ------------------------- */
447 #if defined(PLAT_amd64_win64) && !defined(__GNUC__)
449 #error Unsupported compiler.
451 #endif /* PLAT_amd64_win64 */
453 /* ------------------------ ppc32-linux ------------------------ */
455 #if defined(PLAT_ppc32_linux)
459 unsigned int nraddr; /* where's the code? */
463 #define __SPECIAL_INSTRUCTION_PREAMBLE \
464 "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
465 "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
467 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
468 _zzq_default, _zzq_request, \
469 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
472 ({ unsigned int _zzq_args[6]; \
473 unsigned int _zzq_result; \
474 unsigned int* _zzq_ptr; \
475 _zzq_args[0] = (unsigned int)(_zzq_request); \
476 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
477 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
478 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
479 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
480 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
481 _zzq_ptr = _zzq_args; \
482 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
483 "mr 4,%2\n\t" /*ptr*/ \
484 __SPECIAL_INSTRUCTION_PREAMBLE \
485 /* %R3 = client_request ( %R4 ) */ \
487 "mr %0,3" /*result*/ \
488 : "=b" (_zzq_result) \
489 : "b" (_zzq_default), "b" (_zzq_ptr) \
490 : "cc", "memory", "r3", "r4"); \
494 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
495 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
496 unsigned int __addr; \
497 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
498 /* %R3 = guest_NRADDR */ \
503 : "cc", "memory", "r3" \
505 _zzq_orig->nraddr = __addr; \
508 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
509 __SPECIAL_INSTRUCTION_PREAMBLE \
510 /* branch-and-link-to-noredir *%R11 */ \
513 #define VALGRIND_VEX_INJECT_IR() \
515 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
520 #endif /* PLAT_ppc32_linux */
522 /* ------------------------ ppc64-linux ------------------------ */
524 #if defined(PLAT_ppc64_linux)
528 unsigned long long int nraddr; /* where's the code? */
529 unsigned long long int r2; /* what tocptr do we need? */
533 #define __SPECIAL_INSTRUCTION_PREAMBLE \
534 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
535 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
537 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
538 _zzq_default, _zzq_request, \
539 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
542 ({ unsigned long long int _zzq_args[6]; \
543 unsigned long long int _zzq_result; \
544 unsigned long long int* _zzq_ptr; \
545 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
546 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
547 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
548 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
549 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
550 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
551 _zzq_ptr = _zzq_args; \
552 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
553 "mr 4,%2\n\t" /*ptr*/ \
554 __SPECIAL_INSTRUCTION_PREAMBLE \
555 /* %R3 = client_request ( %R4 ) */ \
557 "mr %0,3" /*result*/ \
558 : "=b" (_zzq_result) \
559 : "b" (_zzq_default), "b" (_zzq_ptr) \
560 : "cc", "memory", "r3", "r4"); \
564 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
565 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
566 unsigned long long int __addr; \
567 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
568 /* %R3 = guest_NRADDR */ \
573 : "cc", "memory", "r3" \
575 _zzq_orig->nraddr = __addr; \
576 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
577 /* %R3 = guest_NRADDR_GPR2 */ \
582 : "cc", "memory", "r3" \
584 _zzq_orig->r2 = __addr; \
587 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
588 __SPECIAL_INSTRUCTION_PREAMBLE \
589 /* branch-and-link-to-noredir *%R11 */ \
592 #define VALGRIND_VEX_INJECT_IR() \
594 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
599 #endif /* PLAT_ppc64_linux */
601 /* ------------------------- arm-linux ------------------------- */
603 #if defined(PLAT_arm_linux)
607 unsigned int nraddr; /* where's the code? */
611 #define __SPECIAL_INSTRUCTION_PREAMBLE \
612 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
613 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
615 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
616 _zzq_default, _zzq_request, \
617 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
620 ({volatile unsigned int _zzq_args[6]; \
621 volatile unsigned int _zzq_result; \
622 _zzq_args[0] = (unsigned int)(_zzq_request); \
623 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
624 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
625 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
626 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
627 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
628 __asm__ volatile("mov r3, %1\n\t" /*default*/ \
629 "mov r4, %2\n\t" /*ptr*/ \
630 __SPECIAL_INSTRUCTION_PREAMBLE \
631 /* R3 = client_request ( R4 ) */ \
632 "orr r10, r10, r10\n\t" \
633 "mov %0, r3" /*result*/ \
634 : "=r" (_zzq_result) \
635 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
636 : "cc","memory", "r3", "r4"); \
640 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
641 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
642 unsigned int __addr; \
643 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
644 /* R3 = guest_NRADDR */ \
645 "orr r11, r11, r11\n\t" \
649 : "cc", "memory", "r3" \
651 _zzq_orig->nraddr = __addr; \
654 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
655 __SPECIAL_INSTRUCTION_PREAMBLE \
656 /* branch-and-link-to-noredir *%R4 */ \
657 "orr r12, r12, r12\n\t"
659 #define VALGRIND_VEX_INJECT_IR() \
661 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
662 "orr r9, r9, r9\n\t" \
663 : : : "cc", "memory" \
667 #endif /* PLAT_arm_linux */
669 /* ------------------------ arm64-linux ------------------------- */
671 #if defined(PLAT_arm64_linux)
675 unsigned long long int nraddr; /* where's the code? */
679 #define __SPECIAL_INSTRUCTION_PREAMBLE \
680 "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
681 "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
683 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
684 _zzq_default, _zzq_request, \
685 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
688 ({volatile unsigned long long int _zzq_args[6]; \
689 volatile unsigned long long int _zzq_result; \
690 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
691 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
692 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
693 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
694 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
695 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
696 __asm__ volatile("mov x3, %1\n\t" /*default*/ \
697 "mov x4, %2\n\t" /*ptr*/ \
698 __SPECIAL_INSTRUCTION_PREAMBLE \
699 /* X3 = client_request ( X4 ) */ \
700 "orr x10, x10, x10\n\t" \
701 "mov %0, x3" /*result*/ \
702 : "=r" (_zzq_result) \
703 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
704 : "cc","memory", "x3", "x4"); \
708 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
709 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
710 unsigned long long int __addr; \
711 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
712 /* X3 = guest_NRADDR */ \
713 "orr x11, x11, x11\n\t" \
717 : "cc", "memory", "x3" \
719 _zzq_orig->nraddr = __addr; \
722 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
723 __SPECIAL_INSTRUCTION_PREAMBLE \
724 /* branch-and-link-to-noredir X8 */ \
725 "orr x12, x12, x12\n\t"
727 #define VALGRIND_VEX_INJECT_IR() \
729 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
730 "orr x9, x9, x9\n\t" \
731 : : : "cc", "memory" \
735 #endif /* PLAT_arm64_linux */
737 /* ------------------------ s390x-linux ------------------------ */
739 #if defined(PLAT_s390x_linux)
743 unsigned long long int nraddr; /* where's the code? */
747 /* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
748 * code. This detection is implemented in platform specific toIR.c
749 * (e.g. VEX/priv/guest_s390_decoder.c).
751 #define __SPECIAL_INSTRUCTION_PREAMBLE \
757 #define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
758 #define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
759 #define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
760 #define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
762 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
763 _zzq_default, _zzq_request, \
764 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
766 ({volatile unsigned long long int _zzq_args[6]; \
767 volatile unsigned long long int _zzq_result; \
768 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
769 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
770 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
771 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
772 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
773 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
774 __asm__ volatile(/* r2 = args */ \
778 __SPECIAL_INSTRUCTION_PREAMBLE \
779 __CLIENT_REQUEST_CODE \
782 : "=d" (_zzq_result) \
783 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
784 : "cc", "2", "3", "memory" \
789 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
790 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
791 volatile unsigned long long int __addr; \
792 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
793 __GET_NR_CONTEXT_CODE \
797 : "cc", "3", "memory" \
799 _zzq_orig->nraddr = __addr; \
802 #define VALGRIND_CALL_NOREDIR_R1 \
803 __SPECIAL_INSTRUCTION_PREAMBLE \
806 #define VALGRIND_VEX_INJECT_IR() \
808 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
809 __VEX_INJECT_IR_CODE); \
812 #endif /* PLAT_s390x_linux */
814 /* ------------------------- mips32-linux ---------------- */
816 #if defined(PLAT_mips32_linux)
820 unsigned int nraddr; /* where's the code? */
828 #define __SPECIAL_INSTRUCTION_PREAMBLE \
829 "srl $0, $0, 13\n\t" \
830 "srl $0, $0, 29\n\t" \
831 "srl $0, $0, 3\n\t" \
834 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
835 _zzq_default, _zzq_request, \
836 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
838 ({ volatile unsigned int _zzq_args[6]; \
839 volatile unsigned int _zzq_result; \
840 _zzq_args[0] = (unsigned int)(_zzq_request); \
841 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
842 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
843 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
844 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
845 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
846 __asm__ volatile("move $11, %1\n\t" /*default*/ \
847 "move $12, %2\n\t" /*ptr*/ \
848 __SPECIAL_INSTRUCTION_PREAMBLE \
849 /* T3 = client_request ( T4 ) */ \
850 "or $13, $13, $13\n\t" \
851 "move %0, $11\n\t" /*result*/ \
852 : "=r" (_zzq_result) \
853 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
858 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
859 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
860 volatile unsigned int __addr; \
861 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
862 /* %t9 = guest_NRADDR */ \
863 "or $14, $14, $14\n\t" \
864 "move %0, $11" /*result*/ \
869 _zzq_orig->nraddr = __addr; \
872 #define VALGRIND_CALL_NOREDIR_T9 \
873 __SPECIAL_INSTRUCTION_PREAMBLE \
874 /* call-noredir *%t9 */ \
875 "or $15, $15, $15\n\t"
877 #define VALGRIND_VEX_INJECT_IR() \
879 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
880 "or $11, $11, $11\n\t" \
885 #endif /* PLAT_mips32_linux */
887 /* ------------------------- mips64-linux ---------------- */
889 #if defined(PLAT_mips64_linux)
893 unsigned long long nraddr; /* where's the code? */
901 #define __SPECIAL_INSTRUCTION_PREAMBLE \
902 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
903 "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
905 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
906 _zzq_default, _zzq_request, \
907 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
909 ({ volatile unsigned long long int _zzq_args[6]; \
910 volatile unsigned long long int _zzq_result; \
911 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
912 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
913 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
914 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
915 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
916 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
917 __asm__ volatile("move $11, %1\n\t" /*default*/ \
918 "move $12, %2\n\t" /*ptr*/ \
919 __SPECIAL_INSTRUCTION_PREAMBLE \
920 /* $11 = client_request ( $12 ) */ \
921 "or $13, $13, $13\n\t" \
922 "move %0, $11\n\t" /*result*/ \
923 : "=r" (_zzq_result) \
924 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
929 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
930 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
931 volatile unsigned long long int __addr; \
932 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
933 /* $11 = guest_NRADDR */ \
934 "or $14, $14, $14\n\t" \
935 "move %0, $11" /*result*/ \
939 _zzq_orig->nraddr = __addr; \
942 #define VALGRIND_CALL_NOREDIR_T9 \
943 __SPECIAL_INSTRUCTION_PREAMBLE \
944 /* call-noredir $25 */ \
945 "or $15, $15, $15\n\t"
947 #define VALGRIND_VEX_INJECT_IR() \
949 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
950 "or $11, $11, $11\n\t" \
954 #endif /* PLAT_mips64_linux */
956 /* Insert assembly code for other platforms here... */
958 #endif /* NVALGRIND */
961 /* ------------------------------------------------------------------ */
962 /* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
963 /* ugly. It's the least-worst tradeoff I can think of. */
964 /* ------------------------------------------------------------------ */
966 /* This section defines magic (a.k.a appalling-hack) macros for doing
967 guaranteed-no-redirection macros, so as to get from function
968 wrappers to the functions they are wrapping. The whole point is to
969 construct standard call sequences, but to do the call itself with a
970 special no-redirect call pseudo-instruction that the JIT
971 understands and handles specially. This section is long and
972 repetitious, and I can't see a way to make it shorter.
974 The naming scheme is as follows:
976 CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
978 'W' stands for "word" and 'v' for "void". Hence there are
979 different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
980 and for each, the possibility of returning a word-typed result, or
984 /* Use these to write the name of your wrapper. NOTE: duplicates
985 VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
986 the default behaviour equivalance class tag "0000" into the name.
987 See pub_tool_redir.h for details -- normally you don't need to
988 think about this, though. */
990 /* Use an extra level of macroisation so as to ensure the soname/fnname
991 args are fully macro-expanded before pasting them together. */
992 #define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
994 #define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
995 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
997 #define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
998 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
1000 /* Use this macro from within a wrapper function to collect the
1001 context (address and possibly other info) of the original function.
1002 Once you have that you can then use it in one of the CALL_FN_
1003 macros. The type of the argument _lval is OrigFn. */
1004 #define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
1006 /* Also provide end-user facilities for function replacement, rather
1007 than wrapping. A replacement function differs from a wrapper in
1008 that it has no way to get hold of the original function being
1009 called, and hence no way to call onwards to it. In a replacement
1010 function, VALGRIND_GET_ORIG_FN always returns zero. */
1012 #define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1013 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1015 #define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1016 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1018 /* Derivatives of the main macros below, for calling functions
1021 #define CALL_FN_v_v(fnptr) \
1022 do { volatile unsigned long _junk; \
1023 CALL_FN_W_v(_junk,fnptr); } while (0)
1025 #define CALL_FN_v_W(fnptr, arg1) \
1026 do { volatile unsigned long _junk; \
1027 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1029 #define CALL_FN_v_WW(fnptr, arg1,arg2) \
1030 do { volatile unsigned long _junk; \
1031 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1033 #define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1034 do { volatile unsigned long _junk; \
1035 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1037 #define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1038 do { volatile unsigned long _junk; \
1039 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1041 #define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1042 do { volatile unsigned long _junk; \
1043 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1045 #define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1046 do { volatile unsigned long _junk; \
1047 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1049 #define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1050 do { volatile unsigned long _junk; \
1051 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1053 /* ------------------------- x86-{linux,darwin} ---------------- */
1055 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin)
1057 /* These regs are trashed by the hidden call. No need to mention eax
1058 as gcc can already see that, plus causes gcc to bomb. */
1059 #define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
1061 /* Macros to save and align the stack before making a function
1062 call and restore it afterwards as gcc may not keep the stack
1063 pointer aligned if it doesn't realise calls are being made
1064 to other functions. */
1066 #define VALGRIND_ALIGN_STACK \
1067 "movl %%esp,%%edi\n\t" \
1068 "andl $0xfffffff0,%%esp\n\t"
1069 #define VALGRIND_RESTORE_STACK \
1070 "movl %%edi,%%esp\n\t"
1072 /* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
1075 #define CALL_FN_W_v(lval, orig) \
1077 volatile OrigFn _orig = (orig); \
1078 volatile unsigned long _argvec[1]; \
1079 volatile unsigned long _res; \
1080 _argvec[0] = (unsigned long)_orig.nraddr; \
1082 VALGRIND_ALIGN_STACK \
1083 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1084 VALGRIND_CALL_NOREDIR_EAX \
1085 VALGRIND_RESTORE_STACK \
1086 : /*out*/ "=a" (_res) \
1087 : /*in*/ "a" (&_argvec[0]) \
1088 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1090 lval = (__typeof__(lval)) _res; \
1093 #define CALL_FN_W_W(lval, orig, arg1) \
1095 volatile OrigFn _orig = (orig); \
1096 volatile unsigned long _argvec[2]; \
1097 volatile unsigned long _res; \
1098 _argvec[0] = (unsigned long)_orig.nraddr; \
1099 _argvec[1] = (unsigned long)(arg1); \
1101 VALGRIND_ALIGN_STACK \
1102 "subl $12, %%esp\n\t" \
1103 "pushl 4(%%eax)\n\t" \
1104 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1105 VALGRIND_CALL_NOREDIR_EAX \
1106 VALGRIND_RESTORE_STACK \
1107 : /*out*/ "=a" (_res) \
1108 : /*in*/ "a" (&_argvec[0]) \
1109 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1111 lval = (__typeof__(lval)) _res; \
1114 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1116 volatile OrigFn _orig = (orig); \
1117 volatile unsigned long _argvec[3]; \
1118 volatile unsigned long _res; \
1119 _argvec[0] = (unsigned long)_orig.nraddr; \
1120 _argvec[1] = (unsigned long)(arg1); \
1121 _argvec[2] = (unsigned long)(arg2); \
1123 VALGRIND_ALIGN_STACK \
1124 "subl $8, %%esp\n\t" \
1125 "pushl 8(%%eax)\n\t" \
1126 "pushl 4(%%eax)\n\t" \
1127 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1128 VALGRIND_CALL_NOREDIR_EAX \
1129 VALGRIND_RESTORE_STACK \
1130 : /*out*/ "=a" (_res) \
1131 : /*in*/ "a" (&_argvec[0]) \
1132 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1134 lval = (__typeof__(lval)) _res; \
1137 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1139 volatile OrigFn _orig = (orig); \
1140 volatile unsigned long _argvec[4]; \
1141 volatile unsigned long _res; \
1142 _argvec[0] = (unsigned long)_orig.nraddr; \
1143 _argvec[1] = (unsigned long)(arg1); \
1144 _argvec[2] = (unsigned long)(arg2); \
1145 _argvec[3] = (unsigned long)(arg3); \
1147 VALGRIND_ALIGN_STACK \
1148 "subl $4, %%esp\n\t" \
1149 "pushl 12(%%eax)\n\t" \
1150 "pushl 8(%%eax)\n\t" \
1151 "pushl 4(%%eax)\n\t" \
1152 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1153 VALGRIND_CALL_NOREDIR_EAX \
1154 VALGRIND_RESTORE_STACK \
1155 : /*out*/ "=a" (_res) \
1156 : /*in*/ "a" (&_argvec[0]) \
1157 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1159 lval = (__typeof__(lval)) _res; \
1162 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1164 volatile OrigFn _orig = (orig); \
1165 volatile unsigned long _argvec[5]; \
1166 volatile unsigned long _res; \
1167 _argvec[0] = (unsigned long)_orig.nraddr; \
1168 _argvec[1] = (unsigned long)(arg1); \
1169 _argvec[2] = (unsigned long)(arg2); \
1170 _argvec[3] = (unsigned long)(arg3); \
1171 _argvec[4] = (unsigned long)(arg4); \
1173 VALGRIND_ALIGN_STACK \
1174 "pushl 16(%%eax)\n\t" \
1175 "pushl 12(%%eax)\n\t" \
1176 "pushl 8(%%eax)\n\t" \
1177 "pushl 4(%%eax)\n\t" \
1178 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1179 VALGRIND_CALL_NOREDIR_EAX \
1180 VALGRIND_RESTORE_STACK \
1181 : /*out*/ "=a" (_res) \
1182 : /*in*/ "a" (&_argvec[0]) \
1183 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1185 lval = (__typeof__(lval)) _res; \
1188 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1190 volatile OrigFn _orig = (orig); \
1191 volatile unsigned long _argvec[6]; \
1192 volatile unsigned long _res; \
1193 _argvec[0] = (unsigned long)_orig.nraddr; \
1194 _argvec[1] = (unsigned long)(arg1); \
1195 _argvec[2] = (unsigned long)(arg2); \
1196 _argvec[3] = (unsigned long)(arg3); \
1197 _argvec[4] = (unsigned long)(arg4); \
1198 _argvec[5] = (unsigned long)(arg5); \
1200 VALGRIND_ALIGN_STACK \
1201 "subl $12, %%esp\n\t" \
1202 "pushl 20(%%eax)\n\t" \
1203 "pushl 16(%%eax)\n\t" \
1204 "pushl 12(%%eax)\n\t" \
1205 "pushl 8(%%eax)\n\t" \
1206 "pushl 4(%%eax)\n\t" \
1207 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1208 VALGRIND_CALL_NOREDIR_EAX \
1209 VALGRIND_RESTORE_STACK \
1210 : /*out*/ "=a" (_res) \
1211 : /*in*/ "a" (&_argvec[0]) \
1212 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1214 lval = (__typeof__(lval)) _res; \
1217 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1219 volatile OrigFn _orig = (orig); \
1220 volatile unsigned long _argvec[7]; \
1221 volatile unsigned long _res; \
1222 _argvec[0] = (unsigned long)_orig.nraddr; \
1223 _argvec[1] = (unsigned long)(arg1); \
1224 _argvec[2] = (unsigned long)(arg2); \
1225 _argvec[3] = (unsigned long)(arg3); \
1226 _argvec[4] = (unsigned long)(arg4); \
1227 _argvec[5] = (unsigned long)(arg5); \
1228 _argvec[6] = (unsigned long)(arg6); \
1230 VALGRIND_ALIGN_STACK \
1231 "subl $8, %%esp\n\t" \
1232 "pushl 24(%%eax)\n\t" \
1233 "pushl 20(%%eax)\n\t" \
1234 "pushl 16(%%eax)\n\t" \
1235 "pushl 12(%%eax)\n\t" \
1236 "pushl 8(%%eax)\n\t" \
1237 "pushl 4(%%eax)\n\t" \
1238 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1239 VALGRIND_CALL_NOREDIR_EAX \
1240 VALGRIND_RESTORE_STACK \
1241 : /*out*/ "=a" (_res) \
1242 : /*in*/ "a" (&_argvec[0]) \
1243 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1245 lval = (__typeof__(lval)) _res; \
1248 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1251 volatile OrigFn _orig = (orig); \
1252 volatile unsigned long _argvec[8]; \
1253 volatile unsigned long _res; \
1254 _argvec[0] = (unsigned long)_orig.nraddr; \
1255 _argvec[1] = (unsigned long)(arg1); \
1256 _argvec[2] = (unsigned long)(arg2); \
1257 _argvec[3] = (unsigned long)(arg3); \
1258 _argvec[4] = (unsigned long)(arg4); \
1259 _argvec[5] = (unsigned long)(arg5); \
1260 _argvec[6] = (unsigned long)(arg6); \
1261 _argvec[7] = (unsigned long)(arg7); \
1263 VALGRIND_ALIGN_STACK \
1264 "subl $4, %%esp\n\t" \
1265 "pushl 28(%%eax)\n\t" \
1266 "pushl 24(%%eax)\n\t" \
1267 "pushl 20(%%eax)\n\t" \
1268 "pushl 16(%%eax)\n\t" \
1269 "pushl 12(%%eax)\n\t" \
1270 "pushl 8(%%eax)\n\t" \
1271 "pushl 4(%%eax)\n\t" \
1272 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1273 VALGRIND_CALL_NOREDIR_EAX \
1274 VALGRIND_RESTORE_STACK \
1275 : /*out*/ "=a" (_res) \
1276 : /*in*/ "a" (&_argvec[0]) \
1277 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1279 lval = (__typeof__(lval)) _res; \
1282 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1285 volatile OrigFn _orig = (orig); \
1286 volatile unsigned long _argvec[9]; \
1287 volatile unsigned long _res; \
1288 _argvec[0] = (unsigned long)_orig.nraddr; \
1289 _argvec[1] = (unsigned long)(arg1); \
1290 _argvec[2] = (unsigned long)(arg2); \
1291 _argvec[3] = (unsigned long)(arg3); \
1292 _argvec[4] = (unsigned long)(arg4); \
1293 _argvec[5] = (unsigned long)(arg5); \
1294 _argvec[6] = (unsigned long)(arg6); \
1295 _argvec[7] = (unsigned long)(arg7); \
1296 _argvec[8] = (unsigned long)(arg8); \
1298 VALGRIND_ALIGN_STACK \
1299 "pushl 32(%%eax)\n\t" \
1300 "pushl 28(%%eax)\n\t" \
1301 "pushl 24(%%eax)\n\t" \
1302 "pushl 20(%%eax)\n\t" \
1303 "pushl 16(%%eax)\n\t" \
1304 "pushl 12(%%eax)\n\t" \
1305 "pushl 8(%%eax)\n\t" \
1306 "pushl 4(%%eax)\n\t" \
1307 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1308 VALGRIND_CALL_NOREDIR_EAX \
1309 VALGRIND_RESTORE_STACK \
1310 : /*out*/ "=a" (_res) \
1311 : /*in*/ "a" (&_argvec[0]) \
1312 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1314 lval = (__typeof__(lval)) _res; \
1317 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1320 volatile OrigFn _orig = (orig); \
1321 volatile unsigned long _argvec[10]; \
1322 volatile unsigned long _res; \
1323 _argvec[0] = (unsigned long)_orig.nraddr; \
1324 _argvec[1] = (unsigned long)(arg1); \
1325 _argvec[2] = (unsigned long)(arg2); \
1326 _argvec[3] = (unsigned long)(arg3); \
1327 _argvec[4] = (unsigned long)(arg4); \
1328 _argvec[5] = (unsigned long)(arg5); \
1329 _argvec[6] = (unsigned long)(arg6); \
1330 _argvec[7] = (unsigned long)(arg7); \
1331 _argvec[8] = (unsigned long)(arg8); \
1332 _argvec[9] = (unsigned long)(arg9); \
1334 VALGRIND_ALIGN_STACK \
1335 "subl $12, %%esp\n\t" \
1336 "pushl 36(%%eax)\n\t" \
1337 "pushl 32(%%eax)\n\t" \
1338 "pushl 28(%%eax)\n\t" \
1339 "pushl 24(%%eax)\n\t" \
1340 "pushl 20(%%eax)\n\t" \
1341 "pushl 16(%%eax)\n\t" \
1342 "pushl 12(%%eax)\n\t" \
1343 "pushl 8(%%eax)\n\t" \
1344 "pushl 4(%%eax)\n\t" \
1345 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1346 VALGRIND_CALL_NOREDIR_EAX \
1347 VALGRIND_RESTORE_STACK \
1348 : /*out*/ "=a" (_res) \
1349 : /*in*/ "a" (&_argvec[0]) \
1350 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1352 lval = (__typeof__(lval)) _res; \
1355 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1356 arg7,arg8,arg9,arg10) \
1358 volatile OrigFn _orig = (orig); \
1359 volatile unsigned long _argvec[11]; \
1360 volatile unsigned long _res; \
1361 _argvec[0] = (unsigned long)_orig.nraddr; \
1362 _argvec[1] = (unsigned long)(arg1); \
1363 _argvec[2] = (unsigned long)(arg2); \
1364 _argvec[3] = (unsigned long)(arg3); \
1365 _argvec[4] = (unsigned long)(arg4); \
1366 _argvec[5] = (unsigned long)(arg5); \
1367 _argvec[6] = (unsigned long)(arg6); \
1368 _argvec[7] = (unsigned long)(arg7); \
1369 _argvec[8] = (unsigned long)(arg8); \
1370 _argvec[9] = (unsigned long)(arg9); \
1371 _argvec[10] = (unsigned long)(arg10); \
1373 VALGRIND_ALIGN_STACK \
1374 "subl $8, %%esp\n\t" \
1375 "pushl 40(%%eax)\n\t" \
1376 "pushl 36(%%eax)\n\t" \
1377 "pushl 32(%%eax)\n\t" \
1378 "pushl 28(%%eax)\n\t" \
1379 "pushl 24(%%eax)\n\t" \
1380 "pushl 20(%%eax)\n\t" \
1381 "pushl 16(%%eax)\n\t" \
1382 "pushl 12(%%eax)\n\t" \
1383 "pushl 8(%%eax)\n\t" \
1384 "pushl 4(%%eax)\n\t" \
1385 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1386 VALGRIND_CALL_NOREDIR_EAX \
1387 VALGRIND_RESTORE_STACK \
1388 : /*out*/ "=a" (_res) \
1389 : /*in*/ "a" (&_argvec[0]) \
1390 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1392 lval = (__typeof__(lval)) _res; \
1395 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1396 arg6,arg7,arg8,arg9,arg10, \
1399 volatile OrigFn _orig = (orig); \
1400 volatile unsigned long _argvec[12]; \
1401 volatile unsigned long _res; \
1402 _argvec[0] = (unsigned long)_orig.nraddr; \
1403 _argvec[1] = (unsigned long)(arg1); \
1404 _argvec[2] = (unsigned long)(arg2); \
1405 _argvec[3] = (unsigned long)(arg3); \
1406 _argvec[4] = (unsigned long)(arg4); \
1407 _argvec[5] = (unsigned long)(arg5); \
1408 _argvec[6] = (unsigned long)(arg6); \
1409 _argvec[7] = (unsigned long)(arg7); \
1410 _argvec[8] = (unsigned long)(arg8); \
1411 _argvec[9] = (unsigned long)(arg9); \
1412 _argvec[10] = (unsigned long)(arg10); \
1413 _argvec[11] = (unsigned long)(arg11); \
1415 VALGRIND_ALIGN_STACK \
1416 "subl $4, %%esp\n\t" \
1417 "pushl 44(%%eax)\n\t" \
1418 "pushl 40(%%eax)\n\t" \
1419 "pushl 36(%%eax)\n\t" \
1420 "pushl 32(%%eax)\n\t" \
1421 "pushl 28(%%eax)\n\t" \
1422 "pushl 24(%%eax)\n\t" \
1423 "pushl 20(%%eax)\n\t" \
1424 "pushl 16(%%eax)\n\t" \
1425 "pushl 12(%%eax)\n\t" \
1426 "pushl 8(%%eax)\n\t" \
1427 "pushl 4(%%eax)\n\t" \
1428 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1429 VALGRIND_CALL_NOREDIR_EAX \
1430 VALGRIND_RESTORE_STACK \
1431 : /*out*/ "=a" (_res) \
1432 : /*in*/ "a" (&_argvec[0]) \
1433 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1435 lval = (__typeof__(lval)) _res; \
1438 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1439 arg6,arg7,arg8,arg9,arg10, \
1442 volatile OrigFn _orig = (orig); \
1443 volatile unsigned long _argvec[13]; \
1444 volatile unsigned long _res; \
1445 _argvec[0] = (unsigned long)_orig.nraddr; \
1446 _argvec[1] = (unsigned long)(arg1); \
1447 _argvec[2] = (unsigned long)(arg2); \
1448 _argvec[3] = (unsigned long)(arg3); \
1449 _argvec[4] = (unsigned long)(arg4); \
1450 _argvec[5] = (unsigned long)(arg5); \
1451 _argvec[6] = (unsigned long)(arg6); \
1452 _argvec[7] = (unsigned long)(arg7); \
1453 _argvec[8] = (unsigned long)(arg8); \
1454 _argvec[9] = (unsigned long)(arg9); \
1455 _argvec[10] = (unsigned long)(arg10); \
1456 _argvec[11] = (unsigned long)(arg11); \
1457 _argvec[12] = (unsigned long)(arg12); \
1459 VALGRIND_ALIGN_STACK \
1460 "pushl 48(%%eax)\n\t" \
1461 "pushl 44(%%eax)\n\t" \
1462 "pushl 40(%%eax)\n\t" \
1463 "pushl 36(%%eax)\n\t" \
1464 "pushl 32(%%eax)\n\t" \
1465 "pushl 28(%%eax)\n\t" \
1466 "pushl 24(%%eax)\n\t" \
1467 "pushl 20(%%eax)\n\t" \
1468 "pushl 16(%%eax)\n\t" \
1469 "pushl 12(%%eax)\n\t" \
1470 "pushl 8(%%eax)\n\t" \
1471 "pushl 4(%%eax)\n\t" \
1472 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1473 VALGRIND_CALL_NOREDIR_EAX \
1474 VALGRIND_RESTORE_STACK \
1475 : /*out*/ "=a" (_res) \
1476 : /*in*/ "a" (&_argvec[0]) \
1477 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1479 lval = (__typeof__(lval)) _res; \
1482 #endif /* PLAT_x86_linux || PLAT_x86_darwin */
1484 /* ------------------------ amd64-{linux,darwin} --------------- */
1486 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin)
1488 /* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1490 /* These regs are trashed by the hidden call. */
1491 #define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1492 "rdi", "r8", "r9", "r10", "r11"
1494 /* This is all pretty complex. It's so as to make stack unwinding
1495 work reliably. See bug 243270. The basic problem is the sub and
1496 add of 128 of %rsp in all of the following macros. If gcc believes
1497 the CFA is in %rsp, then unwinding may fail, because what's at the
1498 CFA is not what gcc "expected" when it constructs the CFIs for the
1499 places where the macros are instantiated.
1501 But we can't just add a CFI annotation to increase the CFA offset
1502 by 128, to match the sub of 128 from %rsp, because we don't know
1503 whether gcc has chosen %rsp as the CFA at that point, or whether it
1504 has chosen some other register (eg, %rbp). In the latter case,
1505 adding a CFI annotation to change the CFA offset is simply wrong.
1507 So the solution is to get hold of the CFA using
1508 __builtin_dwarf_cfa(), put it in a known register, and add a
1509 CFI annotation to say what the register is. We choose %rbp for
1510 this (perhaps perversely), because:
1512 (1) %rbp is already subject to unwinding. If a new register was
1513 chosen then the unwinder would have to unwind it in all stack
1514 traces, which is expensive, and
1516 (2) %rbp is already subject to precise exception updates in the
1517 JIT. If a new register was chosen, we'd have to have precise
1518 exceptions for it too, which reduces performance of the
1521 However .. one extra complication. We can't just whack the result
1522 of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1523 list of trashed registers at the end of the inline assembly
1524 fragments; gcc won't allow %rbp to appear in that list. Hence
1525 instead we need to stash %rbp in %r15 for the duration of the asm,
1526 and say that %r15 is trashed instead. gcc seems happy to go with
1529 Oh .. and this all needs to be conditionalised so that it is
1530 unchanged from before this commit, when compiled with older gccs
1531 that don't support __builtin_dwarf_cfa. Furthermore, since
1532 this header file is freestanding, it has to be independent of
1533 config.h, and so the following conditionalisation cannot depend on
1534 configure time checks.
1536 Although it's not clear from
1537 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1538 this expression excludes Darwin.
1539 .cfi directives in Darwin assembly appear to be completely
1540 different and I haven't investigated how they work.
1542 For even more entertainment value, note we have to use the
1543 completely undocumented __builtin_dwarf_cfa(), which appears to
1544 really compute the CFA, whereas __builtin_frame_address(0) claims
1545 to but actually doesn't. See
1546 https://bugs.kde.org/show_bug.cgi?id=243270#c47
1548 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1549 # define __FRAME_POINTER \
1550 ,"r"(__builtin_dwarf_cfa())
1551 # define VALGRIND_CFI_PROLOGUE \
1552 "movq %%rbp, %%r15\n\t" \
1553 "movq %2, %%rbp\n\t" \
1554 ".cfi_remember_state\n\t" \
1555 ".cfi_def_cfa rbp, 0\n\t"
1556 # define VALGRIND_CFI_EPILOGUE \
1557 "movq %%r15, %%rbp\n\t" \
1558 ".cfi_restore_state\n\t"
1560 # define __FRAME_POINTER
1561 # define VALGRIND_CFI_PROLOGUE
1562 # define VALGRIND_CFI_EPILOGUE
1565 /* Macros to save and align the stack before making a function
1566 call and restore it afterwards as gcc may not keep the stack
1567 pointer aligned if it doesn't realise calls are being made
1568 to other functions. */
1570 #define VALGRIND_ALIGN_STACK \
1571 "movq %%rsp,%%r14\n\t" \
1572 "andq $0xfffffffffffffff0,%%rsp\n\t"
1573 #define VALGRIND_RESTORE_STACK \
1574 "movq %%r14,%%rsp\n\t"
1576 /* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1579 /* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1580 macros. In order not to trash the stack redzone, we need to drop
1581 %rsp by 128 before the hidden call, and restore afterwards. The
1582 nastyness is that it is only by luck that the stack still appears
1583 to be unwindable during the hidden call - since then the behaviour
1584 of any routine using this macro does not match what the CFI data
1587 Why is this important? Imagine that a wrapper has a stack
1588 allocated local, and passes to the hidden call, a pointer to it.
1589 Because gcc does not know about the hidden call, it may allocate
1590 that local in the redzone. Unfortunately the hidden call may then
1591 trash it before it comes to use it. So we must step clear of the
1592 redzone, for the duration of the hidden call, to make it safe.
1594 Probably the same problem afflicts the other redzone-style ABIs too
1595 (ppc64-linux); but for those, the stack is
1596 self describing (none of this CFI nonsense) so at least messing
1597 with the stack pointer doesn't give a danger of non-unwindable
1600 #define CALL_FN_W_v(lval, orig) \
1602 volatile OrigFn _orig = (orig); \
1603 volatile unsigned long _argvec[1]; \
1604 volatile unsigned long _res; \
1605 _argvec[0] = (unsigned long)_orig.nraddr; \
1607 VALGRIND_CFI_PROLOGUE \
1608 VALGRIND_ALIGN_STACK \
1609 "subq $128,%%rsp\n\t" \
1610 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1611 VALGRIND_CALL_NOREDIR_RAX \
1612 VALGRIND_RESTORE_STACK \
1613 VALGRIND_CFI_EPILOGUE \
1614 : /*out*/ "=a" (_res) \
1615 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1616 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1618 lval = (__typeof__(lval)) _res; \
1621 #define CALL_FN_W_W(lval, orig, arg1) \
1623 volatile OrigFn _orig = (orig); \
1624 volatile unsigned long _argvec[2]; \
1625 volatile unsigned long _res; \
1626 _argvec[0] = (unsigned long)_orig.nraddr; \
1627 _argvec[1] = (unsigned long)(arg1); \
1629 VALGRIND_CFI_PROLOGUE \
1630 VALGRIND_ALIGN_STACK \
1631 "subq $128,%%rsp\n\t" \
1632 "movq 8(%%rax), %%rdi\n\t" \
1633 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1634 VALGRIND_CALL_NOREDIR_RAX \
1635 VALGRIND_RESTORE_STACK \
1636 VALGRIND_CFI_EPILOGUE \
1637 : /*out*/ "=a" (_res) \
1638 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1639 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1641 lval = (__typeof__(lval)) _res; \
1644 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1646 volatile OrigFn _orig = (orig); \
1647 volatile unsigned long _argvec[3]; \
1648 volatile unsigned long _res; \
1649 _argvec[0] = (unsigned long)_orig.nraddr; \
1650 _argvec[1] = (unsigned long)(arg1); \
1651 _argvec[2] = (unsigned long)(arg2); \
1653 VALGRIND_CFI_PROLOGUE \
1654 VALGRIND_ALIGN_STACK \
1655 "subq $128,%%rsp\n\t" \
1656 "movq 16(%%rax), %%rsi\n\t" \
1657 "movq 8(%%rax), %%rdi\n\t" \
1658 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1659 VALGRIND_CALL_NOREDIR_RAX \
1660 VALGRIND_RESTORE_STACK \
1661 VALGRIND_CFI_EPILOGUE \
1662 : /*out*/ "=a" (_res) \
1663 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1664 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1666 lval = (__typeof__(lval)) _res; \
1669 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1671 volatile OrigFn _orig = (orig); \
1672 volatile unsigned long _argvec[4]; \
1673 volatile unsigned long _res; \
1674 _argvec[0] = (unsigned long)_orig.nraddr; \
1675 _argvec[1] = (unsigned long)(arg1); \
1676 _argvec[2] = (unsigned long)(arg2); \
1677 _argvec[3] = (unsigned long)(arg3); \
1679 VALGRIND_CFI_PROLOGUE \
1680 VALGRIND_ALIGN_STACK \
1681 "subq $128,%%rsp\n\t" \
1682 "movq 24(%%rax), %%rdx\n\t" \
1683 "movq 16(%%rax), %%rsi\n\t" \
1684 "movq 8(%%rax), %%rdi\n\t" \
1685 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1686 VALGRIND_CALL_NOREDIR_RAX \
1687 VALGRIND_RESTORE_STACK \
1688 VALGRIND_CFI_EPILOGUE \
1689 : /*out*/ "=a" (_res) \
1690 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1691 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1693 lval = (__typeof__(lval)) _res; \
1696 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1698 volatile OrigFn _orig = (orig); \
1699 volatile unsigned long _argvec[5]; \
1700 volatile unsigned long _res; \
1701 _argvec[0] = (unsigned long)_orig.nraddr; \
1702 _argvec[1] = (unsigned long)(arg1); \
1703 _argvec[2] = (unsigned long)(arg2); \
1704 _argvec[3] = (unsigned long)(arg3); \
1705 _argvec[4] = (unsigned long)(arg4); \
1707 VALGRIND_CFI_PROLOGUE \
1708 VALGRIND_ALIGN_STACK \
1709 "subq $128,%%rsp\n\t" \
1710 "movq 32(%%rax), %%rcx\n\t" \
1711 "movq 24(%%rax), %%rdx\n\t" \
1712 "movq 16(%%rax), %%rsi\n\t" \
1713 "movq 8(%%rax), %%rdi\n\t" \
1714 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1715 VALGRIND_CALL_NOREDIR_RAX \
1716 VALGRIND_RESTORE_STACK \
1717 VALGRIND_CFI_EPILOGUE \
1718 : /*out*/ "=a" (_res) \
1719 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1720 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1722 lval = (__typeof__(lval)) _res; \
1725 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1727 volatile OrigFn _orig = (orig); \
1728 volatile unsigned long _argvec[6]; \
1729 volatile unsigned long _res; \
1730 _argvec[0] = (unsigned long)_orig.nraddr; \
1731 _argvec[1] = (unsigned long)(arg1); \
1732 _argvec[2] = (unsigned long)(arg2); \
1733 _argvec[3] = (unsigned long)(arg3); \
1734 _argvec[4] = (unsigned long)(arg4); \
1735 _argvec[5] = (unsigned long)(arg5); \
1737 VALGRIND_CFI_PROLOGUE \
1738 VALGRIND_ALIGN_STACK \
1739 "subq $128,%%rsp\n\t" \
1740 "movq 40(%%rax), %%r8\n\t" \
1741 "movq 32(%%rax), %%rcx\n\t" \
1742 "movq 24(%%rax), %%rdx\n\t" \
1743 "movq 16(%%rax), %%rsi\n\t" \
1744 "movq 8(%%rax), %%rdi\n\t" \
1745 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1746 VALGRIND_CALL_NOREDIR_RAX \
1747 VALGRIND_RESTORE_STACK \
1748 VALGRIND_CFI_EPILOGUE \
1749 : /*out*/ "=a" (_res) \
1750 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1751 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1753 lval = (__typeof__(lval)) _res; \
1756 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1758 volatile OrigFn _orig = (orig); \
1759 volatile unsigned long _argvec[7]; \
1760 volatile unsigned long _res; \
1761 _argvec[0] = (unsigned long)_orig.nraddr; \
1762 _argvec[1] = (unsigned long)(arg1); \
1763 _argvec[2] = (unsigned long)(arg2); \
1764 _argvec[3] = (unsigned long)(arg3); \
1765 _argvec[4] = (unsigned long)(arg4); \
1766 _argvec[5] = (unsigned long)(arg5); \
1767 _argvec[6] = (unsigned long)(arg6); \
1769 VALGRIND_CFI_PROLOGUE \
1770 VALGRIND_ALIGN_STACK \
1771 "subq $128,%%rsp\n\t" \
1772 "movq 48(%%rax), %%r9\n\t" \
1773 "movq 40(%%rax), %%r8\n\t" \
1774 "movq 32(%%rax), %%rcx\n\t" \
1775 "movq 24(%%rax), %%rdx\n\t" \
1776 "movq 16(%%rax), %%rsi\n\t" \
1777 "movq 8(%%rax), %%rdi\n\t" \
1778 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1779 VALGRIND_CALL_NOREDIR_RAX \
1780 VALGRIND_RESTORE_STACK \
1781 VALGRIND_CFI_EPILOGUE \
1782 : /*out*/ "=a" (_res) \
1783 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1784 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1786 lval = (__typeof__(lval)) _res; \
1789 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1792 volatile OrigFn _orig = (orig); \
1793 volatile unsigned long _argvec[8]; \
1794 volatile unsigned long _res; \
1795 _argvec[0] = (unsigned long)_orig.nraddr; \
1796 _argvec[1] = (unsigned long)(arg1); \
1797 _argvec[2] = (unsigned long)(arg2); \
1798 _argvec[3] = (unsigned long)(arg3); \
1799 _argvec[4] = (unsigned long)(arg4); \
1800 _argvec[5] = (unsigned long)(arg5); \
1801 _argvec[6] = (unsigned long)(arg6); \
1802 _argvec[7] = (unsigned long)(arg7); \
1804 VALGRIND_CFI_PROLOGUE \
1805 VALGRIND_ALIGN_STACK \
1806 "subq $136,%%rsp\n\t" \
1807 "pushq 56(%%rax)\n\t" \
1808 "movq 48(%%rax), %%r9\n\t" \
1809 "movq 40(%%rax), %%r8\n\t" \
1810 "movq 32(%%rax), %%rcx\n\t" \
1811 "movq 24(%%rax), %%rdx\n\t" \
1812 "movq 16(%%rax), %%rsi\n\t" \
1813 "movq 8(%%rax), %%rdi\n\t" \
1814 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1815 VALGRIND_CALL_NOREDIR_RAX \
1816 VALGRIND_RESTORE_STACK \
1817 VALGRIND_CFI_EPILOGUE \
1818 : /*out*/ "=a" (_res) \
1819 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1820 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1822 lval = (__typeof__(lval)) _res; \
1825 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1828 volatile OrigFn _orig = (orig); \
1829 volatile unsigned long _argvec[9]; \
1830 volatile unsigned long _res; \
1831 _argvec[0] = (unsigned long)_orig.nraddr; \
1832 _argvec[1] = (unsigned long)(arg1); \
1833 _argvec[2] = (unsigned long)(arg2); \
1834 _argvec[3] = (unsigned long)(arg3); \
1835 _argvec[4] = (unsigned long)(arg4); \
1836 _argvec[5] = (unsigned long)(arg5); \
1837 _argvec[6] = (unsigned long)(arg6); \
1838 _argvec[7] = (unsigned long)(arg7); \
1839 _argvec[8] = (unsigned long)(arg8); \
1841 VALGRIND_CFI_PROLOGUE \
1842 VALGRIND_ALIGN_STACK \
1843 "subq $128,%%rsp\n\t" \
1844 "pushq 64(%%rax)\n\t" \
1845 "pushq 56(%%rax)\n\t" \
1846 "movq 48(%%rax), %%r9\n\t" \
1847 "movq 40(%%rax), %%r8\n\t" \
1848 "movq 32(%%rax), %%rcx\n\t" \
1849 "movq 24(%%rax), %%rdx\n\t" \
1850 "movq 16(%%rax), %%rsi\n\t" \
1851 "movq 8(%%rax), %%rdi\n\t" \
1852 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1853 VALGRIND_CALL_NOREDIR_RAX \
1854 VALGRIND_RESTORE_STACK \
1855 VALGRIND_CFI_EPILOGUE \
1856 : /*out*/ "=a" (_res) \
1857 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1858 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1860 lval = (__typeof__(lval)) _res; \
1863 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1866 volatile OrigFn _orig = (orig); \
1867 volatile unsigned long _argvec[10]; \
1868 volatile unsigned long _res; \
1869 _argvec[0] = (unsigned long)_orig.nraddr; \
1870 _argvec[1] = (unsigned long)(arg1); \
1871 _argvec[2] = (unsigned long)(arg2); \
1872 _argvec[3] = (unsigned long)(arg3); \
1873 _argvec[4] = (unsigned long)(arg4); \
1874 _argvec[5] = (unsigned long)(arg5); \
1875 _argvec[6] = (unsigned long)(arg6); \
1876 _argvec[7] = (unsigned long)(arg7); \
1877 _argvec[8] = (unsigned long)(arg8); \
1878 _argvec[9] = (unsigned long)(arg9); \
1880 VALGRIND_CFI_PROLOGUE \
1881 VALGRIND_ALIGN_STACK \
1882 "subq $136,%%rsp\n\t" \
1883 "pushq 72(%%rax)\n\t" \
1884 "pushq 64(%%rax)\n\t" \
1885 "pushq 56(%%rax)\n\t" \
1886 "movq 48(%%rax), %%r9\n\t" \
1887 "movq 40(%%rax), %%r8\n\t" \
1888 "movq 32(%%rax), %%rcx\n\t" \
1889 "movq 24(%%rax), %%rdx\n\t" \
1890 "movq 16(%%rax), %%rsi\n\t" \
1891 "movq 8(%%rax), %%rdi\n\t" \
1892 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1893 VALGRIND_CALL_NOREDIR_RAX \
1894 VALGRIND_RESTORE_STACK \
1895 VALGRIND_CFI_EPILOGUE \
1896 : /*out*/ "=a" (_res) \
1897 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1898 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1900 lval = (__typeof__(lval)) _res; \
1903 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1904 arg7,arg8,arg9,arg10) \
1906 volatile OrigFn _orig = (orig); \
1907 volatile unsigned long _argvec[11]; \
1908 volatile unsigned long _res; \
1909 _argvec[0] = (unsigned long)_orig.nraddr; \
1910 _argvec[1] = (unsigned long)(arg1); \
1911 _argvec[2] = (unsigned long)(arg2); \
1912 _argvec[3] = (unsigned long)(arg3); \
1913 _argvec[4] = (unsigned long)(arg4); \
1914 _argvec[5] = (unsigned long)(arg5); \
1915 _argvec[6] = (unsigned long)(arg6); \
1916 _argvec[7] = (unsigned long)(arg7); \
1917 _argvec[8] = (unsigned long)(arg8); \
1918 _argvec[9] = (unsigned long)(arg9); \
1919 _argvec[10] = (unsigned long)(arg10); \
1921 VALGRIND_CFI_PROLOGUE \
1922 VALGRIND_ALIGN_STACK \
1923 "subq $128,%%rsp\n\t" \
1924 "pushq 80(%%rax)\n\t" \
1925 "pushq 72(%%rax)\n\t" \
1926 "pushq 64(%%rax)\n\t" \
1927 "pushq 56(%%rax)\n\t" \
1928 "movq 48(%%rax), %%r9\n\t" \
1929 "movq 40(%%rax), %%r8\n\t" \
1930 "movq 32(%%rax), %%rcx\n\t" \
1931 "movq 24(%%rax), %%rdx\n\t" \
1932 "movq 16(%%rax), %%rsi\n\t" \
1933 "movq 8(%%rax), %%rdi\n\t" \
1934 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1935 VALGRIND_CALL_NOREDIR_RAX \
1936 VALGRIND_RESTORE_STACK \
1937 VALGRIND_CFI_EPILOGUE \
1938 : /*out*/ "=a" (_res) \
1939 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1940 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1942 lval = (__typeof__(lval)) _res; \
1945 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1946 arg7,arg8,arg9,arg10,arg11) \
1948 volatile OrigFn _orig = (orig); \
1949 volatile unsigned long _argvec[12]; \
1950 volatile unsigned long _res; \
1951 _argvec[0] = (unsigned long)_orig.nraddr; \
1952 _argvec[1] = (unsigned long)(arg1); \
1953 _argvec[2] = (unsigned long)(arg2); \
1954 _argvec[3] = (unsigned long)(arg3); \
1955 _argvec[4] = (unsigned long)(arg4); \
1956 _argvec[5] = (unsigned long)(arg5); \
1957 _argvec[6] = (unsigned long)(arg6); \
1958 _argvec[7] = (unsigned long)(arg7); \
1959 _argvec[8] = (unsigned long)(arg8); \
1960 _argvec[9] = (unsigned long)(arg9); \
1961 _argvec[10] = (unsigned long)(arg10); \
1962 _argvec[11] = (unsigned long)(arg11); \
1964 VALGRIND_CFI_PROLOGUE \
1965 VALGRIND_ALIGN_STACK \
1966 "subq $136,%%rsp\n\t" \
1967 "pushq 88(%%rax)\n\t" \
1968 "pushq 80(%%rax)\n\t" \
1969 "pushq 72(%%rax)\n\t" \
1970 "pushq 64(%%rax)\n\t" \
1971 "pushq 56(%%rax)\n\t" \
1972 "movq 48(%%rax), %%r9\n\t" \
1973 "movq 40(%%rax), %%r8\n\t" \
1974 "movq 32(%%rax), %%rcx\n\t" \
1975 "movq 24(%%rax), %%rdx\n\t" \
1976 "movq 16(%%rax), %%rsi\n\t" \
1977 "movq 8(%%rax), %%rdi\n\t" \
1978 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1979 VALGRIND_CALL_NOREDIR_RAX \
1980 VALGRIND_RESTORE_STACK \
1981 VALGRIND_CFI_EPILOGUE \
1982 : /*out*/ "=a" (_res) \
1983 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1984 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1986 lval = (__typeof__(lval)) _res; \
1989 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1990 arg7,arg8,arg9,arg10,arg11,arg12) \
1992 volatile OrigFn _orig = (orig); \
1993 volatile unsigned long _argvec[13]; \
1994 volatile unsigned long _res; \
1995 _argvec[0] = (unsigned long)_orig.nraddr; \
1996 _argvec[1] = (unsigned long)(arg1); \
1997 _argvec[2] = (unsigned long)(arg2); \
1998 _argvec[3] = (unsigned long)(arg3); \
1999 _argvec[4] = (unsigned long)(arg4); \
2000 _argvec[5] = (unsigned long)(arg5); \
2001 _argvec[6] = (unsigned long)(arg6); \
2002 _argvec[7] = (unsigned long)(arg7); \
2003 _argvec[8] = (unsigned long)(arg8); \
2004 _argvec[9] = (unsigned long)(arg9); \
2005 _argvec[10] = (unsigned long)(arg10); \
2006 _argvec[11] = (unsigned long)(arg11); \
2007 _argvec[12] = (unsigned long)(arg12); \
2009 VALGRIND_CFI_PROLOGUE \
2010 VALGRIND_ALIGN_STACK \
2011 "subq $128,%%rsp\n\t" \
2012 "pushq 96(%%rax)\n\t" \
2013 "pushq 88(%%rax)\n\t" \
2014 "pushq 80(%%rax)\n\t" \
2015 "pushq 72(%%rax)\n\t" \
2016 "pushq 64(%%rax)\n\t" \
2017 "pushq 56(%%rax)\n\t" \
2018 "movq 48(%%rax), %%r9\n\t" \
2019 "movq 40(%%rax), %%r8\n\t" \
2020 "movq 32(%%rax), %%rcx\n\t" \
2021 "movq 24(%%rax), %%rdx\n\t" \
2022 "movq 16(%%rax), %%rsi\n\t" \
2023 "movq 8(%%rax), %%rdi\n\t" \
2024 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2025 VALGRIND_CALL_NOREDIR_RAX \
2026 VALGRIND_RESTORE_STACK \
2027 VALGRIND_CFI_EPILOGUE \
2028 : /*out*/ "=a" (_res) \
2029 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2030 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2032 lval = (__typeof__(lval)) _res; \
2035 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
2037 /* ------------------------ ppc32-linux ------------------------ */
2039 #if defined(PLAT_ppc32_linux)
2041 /* This is useful for finding out about the on-stack stuff:
2043 extern int f9 ( int,int,int,int,int,int,int,int,int );
2044 extern int f10 ( int,int,int,int,int,int,int,int,int,int );
2045 extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
2046 extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
2049 return f9(11,22,33,44,55,66,77,88,99);
2052 return f10(11,22,33,44,55,66,77,88,99,110);
2055 return f11(11,22,33,44,55,66,77,88,99,110,121);
2058 return f12(11,22,33,44,55,66,77,88,99,110,121,132);
2062 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2064 /* These regs are trashed by the hidden call. */
2065 #define __CALLER_SAVED_REGS \
2066 "lr", "ctr", "xer", \
2067 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2068 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2071 /* Macros to save and align the stack before making a function
2072 call and restore it afterwards as gcc may not keep the stack
2073 pointer aligned if it doesn't realise calls are being made
2074 to other functions. */
2076 #define VALGRIND_ALIGN_STACK \
2078 "rlwinm 1,1,0,0,27\n\t"
2079 #define VALGRIND_RESTORE_STACK \
2082 /* These CALL_FN_ macros assume that on ppc32-linux,
2083 sizeof(unsigned long) == 4. */
2085 #define CALL_FN_W_v(lval, orig) \
2087 volatile OrigFn _orig = (orig); \
2088 volatile unsigned long _argvec[1]; \
2089 volatile unsigned long _res; \
2090 _argvec[0] = (unsigned long)_orig.nraddr; \
2092 VALGRIND_ALIGN_STACK \
2094 "lwz 11,0(11)\n\t" /* target->r11 */ \
2095 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2096 VALGRIND_RESTORE_STACK \
2098 : /*out*/ "=r" (_res) \
2099 : /*in*/ "r" (&_argvec[0]) \
2100 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2102 lval = (__typeof__(lval)) _res; \
2105 #define CALL_FN_W_W(lval, orig, arg1) \
2107 volatile OrigFn _orig = (orig); \
2108 volatile unsigned long _argvec[2]; \
2109 volatile unsigned long _res; \
2110 _argvec[0] = (unsigned long)_orig.nraddr; \
2111 _argvec[1] = (unsigned long)arg1; \
2113 VALGRIND_ALIGN_STACK \
2115 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2116 "lwz 11,0(11)\n\t" /* target->r11 */ \
2117 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2118 VALGRIND_RESTORE_STACK \
2120 : /*out*/ "=r" (_res) \
2121 : /*in*/ "r" (&_argvec[0]) \
2122 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2124 lval = (__typeof__(lval)) _res; \
2127 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2129 volatile OrigFn _orig = (orig); \
2130 volatile unsigned long _argvec[3]; \
2131 volatile unsigned long _res; \
2132 _argvec[0] = (unsigned long)_orig.nraddr; \
2133 _argvec[1] = (unsigned long)arg1; \
2134 _argvec[2] = (unsigned long)arg2; \
2136 VALGRIND_ALIGN_STACK \
2138 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2140 "lwz 11,0(11)\n\t" /* target->r11 */ \
2141 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2142 VALGRIND_RESTORE_STACK \
2144 : /*out*/ "=r" (_res) \
2145 : /*in*/ "r" (&_argvec[0]) \
2146 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2148 lval = (__typeof__(lval)) _res; \
2151 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2153 volatile OrigFn _orig = (orig); \
2154 volatile unsigned long _argvec[4]; \
2155 volatile unsigned long _res; \
2156 _argvec[0] = (unsigned long)_orig.nraddr; \
2157 _argvec[1] = (unsigned long)arg1; \
2158 _argvec[2] = (unsigned long)arg2; \
2159 _argvec[3] = (unsigned long)arg3; \
2161 VALGRIND_ALIGN_STACK \
2163 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2165 "lwz 5,12(11)\n\t" \
2166 "lwz 11,0(11)\n\t" /* target->r11 */ \
2167 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2168 VALGRIND_RESTORE_STACK \
2170 : /*out*/ "=r" (_res) \
2171 : /*in*/ "r" (&_argvec[0]) \
2172 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2174 lval = (__typeof__(lval)) _res; \
2177 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2179 volatile OrigFn _orig = (orig); \
2180 volatile unsigned long _argvec[5]; \
2181 volatile unsigned long _res; \
2182 _argvec[0] = (unsigned long)_orig.nraddr; \
2183 _argvec[1] = (unsigned long)arg1; \
2184 _argvec[2] = (unsigned long)arg2; \
2185 _argvec[3] = (unsigned long)arg3; \
2186 _argvec[4] = (unsigned long)arg4; \
2188 VALGRIND_ALIGN_STACK \
2190 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2192 "lwz 5,12(11)\n\t" \
2193 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2194 "lwz 11,0(11)\n\t" /* target->r11 */ \
2195 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2196 VALGRIND_RESTORE_STACK \
2198 : /*out*/ "=r" (_res) \
2199 : /*in*/ "r" (&_argvec[0]) \
2200 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2202 lval = (__typeof__(lval)) _res; \
2205 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2207 volatile OrigFn _orig = (orig); \
2208 volatile unsigned long _argvec[6]; \
2209 volatile unsigned long _res; \
2210 _argvec[0] = (unsigned long)_orig.nraddr; \
2211 _argvec[1] = (unsigned long)arg1; \
2212 _argvec[2] = (unsigned long)arg2; \
2213 _argvec[3] = (unsigned long)arg3; \
2214 _argvec[4] = (unsigned long)arg4; \
2215 _argvec[5] = (unsigned long)arg5; \
2217 VALGRIND_ALIGN_STACK \
2219 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2221 "lwz 5,12(11)\n\t" \
2222 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2223 "lwz 7,20(11)\n\t" \
2224 "lwz 11,0(11)\n\t" /* target->r11 */ \
2225 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2226 VALGRIND_RESTORE_STACK \
2228 : /*out*/ "=r" (_res) \
2229 : /*in*/ "r" (&_argvec[0]) \
2230 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2232 lval = (__typeof__(lval)) _res; \
2235 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2237 volatile OrigFn _orig = (orig); \
2238 volatile unsigned long _argvec[7]; \
2239 volatile unsigned long _res; \
2240 _argvec[0] = (unsigned long)_orig.nraddr; \
2241 _argvec[1] = (unsigned long)arg1; \
2242 _argvec[2] = (unsigned long)arg2; \
2243 _argvec[3] = (unsigned long)arg3; \
2244 _argvec[4] = (unsigned long)arg4; \
2245 _argvec[5] = (unsigned long)arg5; \
2246 _argvec[6] = (unsigned long)arg6; \
2248 VALGRIND_ALIGN_STACK \
2250 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2252 "lwz 5,12(11)\n\t" \
2253 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2254 "lwz 7,20(11)\n\t" \
2255 "lwz 8,24(11)\n\t" \
2256 "lwz 11,0(11)\n\t" /* target->r11 */ \
2257 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2258 VALGRIND_RESTORE_STACK \
2260 : /*out*/ "=r" (_res) \
2261 : /*in*/ "r" (&_argvec[0]) \
2262 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2264 lval = (__typeof__(lval)) _res; \
2267 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2270 volatile OrigFn _orig = (orig); \
2271 volatile unsigned long _argvec[8]; \
2272 volatile unsigned long _res; \
2273 _argvec[0] = (unsigned long)_orig.nraddr; \
2274 _argvec[1] = (unsigned long)arg1; \
2275 _argvec[2] = (unsigned long)arg2; \
2276 _argvec[3] = (unsigned long)arg3; \
2277 _argvec[4] = (unsigned long)arg4; \
2278 _argvec[5] = (unsigned long)arg5; \
2279 _argvec[6] = (unsigned long)arg6; \
2280 _argvec[7] = (unsigned long)arg7; \
2282 VALGRIND_ALIGN_STACK \
2284 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2286 "lwz 5,12(11)\n\t" \
2287 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2288 "lwz 7,20(11)\n\t" \
2289 "lwz 8,24(11)\n\t" \
2290 "lwz 9,28(11)\n\t" \
2291 "lwz 11,0(11)\n\t" /* target->r11 */ \
2292 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2293 VALGRIND_RESTORE_STACK \
2295 : /*out*/ "=r" (_res) \
2296 : /*in*/ "r" (&_argvec[0]) \
2297 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2299 lval = (__typeof__(lval)) _res; \
2302 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2305 volatile OrigFn _orig = (orig); \
2306 volatile unsigned long _argvec[9]; \
2307 volatile unsigned long _res; \
2308 _argvec[0] = (unsigned long)_orig.nraddr; \
2309 _argvec[1] = (unsigned long)arg1; \
2310 _argvec[2] = (unsigned long)arg2; \
2311 _argvec[3] = (unsigned long)arg3; \
2312 _argvec[4] = (unsigned long)arg4; \
2313 _argvec[5] = (unsigned long)arg5; \
2314 _argvec[6] = (unsigned long)arg6; \
2315 _argvec[7] = (unsigned long)arg7; \
2316 _argvec[8] = (unsigned long)arg8; \
2318 VALGRIND_ALIGN_STACK \
2320 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2322 "lwz 5,12(11)\n\t" \
2323 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2324 "lwz 7,20(11)\n\t" \
2325 "lwz 8,24(11)\n\t" \
2326 "lwz 9,28(11)\n\t" \
2327 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2328 "lwz 11,0(11)\n\t" /* target->r11 */ \
2329 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2330 VALGRIND_RESTORE_STACK \
2332 : /*out*/ "=r" (_res) \
2333 : /*in*/ "r" (&_argvec[0]) \
2334 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2336 lval = (__typeof__(lval)) _res; \
2339 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2342 volatile OrigFn _orig = (orig); \
2343 volatile unsigned long _argvec[10]; \
2344 volatile unsigned long _res; \
2345 _argvec[0] = (unsigned long)_orig.nraddr; \
2346 _argvec[1] = (unsigned long)arg1; \
2347 _argvec[2] = (unsigned long)arg2; \
2348 _argvec[3] = (unsigned long)arg3; \
2349 _argvec[4] = (unsigned long)arg4; \
2350 _argvec[5] = (unsigned long)arg5; \
2351 _argvec[6] = (unsigned long)arg6; \
2352 _argvec[7] = (unsigned long)arg7; \
2353 _argvec[8] = (unsigned long)arg8; \
2354 _argvec[9] = (unsigned long)arg9; \
2356 VALGRIND_ALIGN_STACK \
2358 "addi 1,1,-16\n\t" \
2360 "lwz 3,36(11)\n\t" \
2363 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2365 "lwz 5,12(11)\n\t" \
2366 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2367 "lwz 7,20(11)\n\t" \
2368 "lwz 8,24(11)\n\t" \
2369 "lwz 9,28(11)\n\t" \
2370 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2371 "lwz 11,0(11)\n\t" /* target->r11 */ \
2372 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2373 VALGRIND_RESTORE_STACK \
2375 : /*out*/ "=r" (_res) \
2376 : /*in*/ "r" (&_argvec[0]) \
2377 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2379 lval = (__typeof__(lval)) _res; \
2382 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2383 arg7,arg8,arg9,arg10) \
2385 volatile OrigFn _orig = (orig); \
2386 volatile unsigned long _argvec[11]; \
2387 volatile unsigned long _res; \
2388 _argvec[0] = (unsigned long)_orig.nraddr; \
2389 _argvec[1] = (unsigned long)arg1; \
2390 _argvec[2] = (unsigned long)arg2; \
2391 _argvec[3] = (unsigned long)arg3; \
2392 _argvec[4] = (unsigned long)arg4; \
2393 _argvec[5] = (unsigned long)arg5; \
2394 _argvec[6] = (unsigned long)arg6; \
2395 _argvec[7] = (unsigned long)arg7; \
2396 _argvec[8] = (unsigned long)arg8; \
2397 _argvec[9] = (unsigned long)arg9; \
2398 _argvec[10] = (unsigned long)arg10; \
2400 VALGRIND_ALIGN_STACK \
2402 "addi 1,1,-16\n\t" \
2404 "lwz 3,40(11)\n\t" \
2407 "lwz 3,36(11)\n\t" \
2410 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2412 "lwz 5,12(11)\n\t" \
2413 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2414 "lwz 7,20(11)\n\t" \
2415 "lwz 8,24(11)\n\t" \
2416 "lwz 9,28(11)\n\t" \
2417 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2418 "lwz 11,0(11)\n\t" /* target->r11 */ \
2419 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2420 VALGRIND_RESTORE_STACK \
2422 : /*out*/ "=r" (_res) \
2423 : /*in*/ "r" (&_argvec[0]) \
2424 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2426 lval = (__typeof__(lval)) _res; \
2429 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2430 arg7,arg8,arg9,arg10,arg11) \
2432 volatile OrigFn _orig = (orig); \
2433 volatile unsigned long _argvec[12]; \
2434 volatile unsigned long _res; \
2435 _argvec[0] = (unsigned long)_orig.nraddr; \
2436 _argvec[1] = (unsigned long)arg1; \
2437 _argvec[2] = (unsigned long)arg2; \
2438 _argvec[3] = (unsigned long)arg3; \
2439 _argvec[4] = (unsigned long)arg4; \
2440 _argvec[5] = (unsigned long)arg5; \
2441 _argvec[6] = (unsigned long)arg6; \
2442 _argvec[7] = (unsigned long)arg7; \
2443 _argvec[8] = (unsigned long)arg8; \
2444 _argvec[9] = (unsigned long)arg9; \
2445 _argvec[10] = (unsigned long)arg10; \
2446 _argvec[11] = (unsigned long)arg11; \
2448 VALGRIND_ALIGN_STACK \
2450 "addi 1,1,-32\n\t" \
2452 "lwz 3,44(11)\n\t" \
2455 "lwz 3,40(11)\n\t" \
2458 "lwz 3,36(11)\n\t" \
2461 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2463 "lwz 5,12(11)\n\t" \
2464 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2465 "lwz 7,20(11)\n\t" \
2466 "lwz 8,24(11)\n\t" \
2467 "lwz 9,28(11)\n\t" \
2468 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2469 "lwz 11,0(11)\n\t" /* target->r11 */ \
2470 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2471 VALGRIND_RESTORE_STACK \
2473 : /*out*/ "=r" (_res) \
2474 : /*in*/ "r" (&_argvec[0]) \
2475 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2477 lval = (__typeof__(lval)) _res; \
2480 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2481 arg7,arg8,arg9,arg10,arg11,arg12) \
2483 volatile OrigFn _orig = (orig); \
2484 volatile unsigned long _argvec[13]; \
2485 volatile unsigned long _res; \
2486 _argvec[0] = (unsigned long)_orig.nraddr; \
2487 _argvec[1] = (unsigned long)arg1; \
2488 _argvec[2] = (unsigned long)arg2; \
2489 _argvec[3] = (unsigned long)arg3; \
2490 _argvec[4] = (unsigned long)arg4; \
2491 _argvec[5] = (unsigned long)arg5; \
2492 _argvec[6] = (unsigned long)arg6; \
2493 _argvec[7] = (unsigned long)arg7; \
2494 _argvec[8] = (unsigned long)arg8; \
2495 _argvec[9] = (unsigned long)arg9; \
2496 _argvec[10] = (unsigned long)arg10; \
2497 _argvec[11] = (unsigned long)arg11; \
2498 _argvec[12] = (unsigned long)arg12; \
2500 VALGRIND_ALIGN_STACK \
2502 "addi 1,1,-32\n\t" \
2504 "lwz 3,48(11)\n\t" \
2507 "lwz 3,44(11)\n\t" \
2510 "lwz 3,40(11)\n\t" \
2513 "lwz 3,36(11)\n\t" \
2516 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2518 "lwz 5,12(11)\n\t" \
2519 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2520 "lwz 7,20(11)\n\t" \
2521 "lwz 8,24(11)\n\t" \
2522 "lwz 9,28(11)\n\t" \
2523 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2524 "lwz 11,0(11)\n\t" /* target->r11 */ \
2525 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2526 VALGRIND_RESTORE_STACK \
2528 : /*out*/ "=r" (_res) \
2529 : /*in*/ "r" (&_argvec[0]) \
2530 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2532 lval = (__typeof__(lval)) _res; \
2535 #endif /* PLAT_ppc32_linux */
2537 /* ------------------------ ppc64-linux ------------------------ */
2539 #if defined(PLAT_ppc64_linux)
2541 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2543 /* These regs are trashed by the hidden call. */
2544 #define __CALLER_SAVED_REGS \
2545 "lr", "ctr", "xer", \
2546 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2547 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2550 /* Macros to save and align the stack before making a function
2551 call and restore it afterwards as gcc may not keep the stack
2552 pointer aligned if it doesn't realise calls are being made
2553 to other functions. */
2555 #define VALGRIND_ALIGN_STACK \
2557 "rldicr 1,1,0,59\n\t"
2558 #define VALGRIND_RESTORE_STACK \
2561 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2564 #define CALL_FN_W_v(lval, orig) \
2566 volatile OrigFn _orig = (orig); \
2567 volatile unsigned long _argvec[3+0]; \
2568 volatile unsigned long _res; \
2569 /* _argvec[0] holds current r2 across the call */ \
2570 _argvec[1] = (unsigned long)_orig.r2; \
2571 _argvec[2] = (unsigned long)_orig.nraddr; \
2573 VALGRIND_ALIGN_STACK \
2575 "std 2,-16(11)\n\t" /* save tocptr */ \
2576 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2577 "ld 11, 0(11)\n\t" /* target->r11 */ \
2578 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2581 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2582 VALGRIND_RESTORE_STACK \
2583 : /*out*/ "=r" (_res) \
2584 : /*in*/ "r" (&_argvec[2]) \
2585 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2587 lval = (__typeof__(lval)) _res; \
2590 #define CALL_FN_W_W(lval, orig, arg1) \
2592 volatile OrigFn _orig = (orig); \
2593 volatile unsigned long _argvec[3+1]; \
2594 volatile unsigned long _res; \
2595 /* _argvec[0] holds current r2 across the call */ \
2596 _argvec[1] = (unsigned long)_orig.r2; \
2597 _argvec[2] = (unsigned long)_orig.nraddr; \
2598 _argvec[2+1] = (unsigned long)arg1; \
2600 VALGRIND_ALIGN_STACK \
2602 "std 2,-16(11)\n\t" /* save tocptr */ \
2603 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2604 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2605 "ld 11, 0(11)\n\t" /* target->r11 */ \
2606 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2609 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2610 VALGRIND_RESTORE_STACK \
2611 : /*out*/ "=r" (_res) \
2612 : /*in*/ "r" (&_argvec[2]) \
2613 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2615 lval = (__typeof__(lval)) _res; \
2618 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2620 volatile OrigFn _orig = (orig); \
2621 volatile unsigned long _argvec[3+2]; \
2622 volatile unsigned long _res; \
2623 /* _argvec[0] holds current r2 across the call */ \
2624 _argvec[1] = (unsigned long)_orig.r2; \
2625 _argvec[2] = (unsigned long)_orig.nraddr; \
2626 _argvec[2+1] = (unsigned long)arg1; \
2627 _argvec[2+2] = (unsigned long)arg2; \
2629 VALGRIND_ALIGN_STACK \
2631 "std 2,-16(11)\n\t" /* save tocptr */ \
2632 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2633 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2634 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2635 "ld 11, 0(11)\n\t" /* target->r11 */ \
2636 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2639 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2640 VALGRIND_RESTORE_STACK \
2641 : /*out*/ "=r" (_res) \
2642 : /*in*/ "r" (&_argvec[2]) \
2643 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2645 lval = (__typeof__(lval)) _res; \
2648 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2650 volatile OrigFn _orig = (orig); \
2651 volatile unsigned long _argvec[3+3]; \
2652 volatile unsigned long _res; \
2653 /* _argvec[0] holds current r2 across the call */ \
2654 _argvec[1] = (unsigned long)_orig.r2; \
2655 _argvec[2] = (unsigned long)_orig.nraddr; \
2656 _argvec[2+1] = (unsigned long)arg1; \
2657 _argvec[2+2] = (unsigned long)arg2; \
2658 _argvec[2+3] = (unsigned long)arg3; \
2660 VALGRIND_ALIGN_STACK \
2662 "std 2,-16(11)\n\t" /* save tocptr */ \
2663 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2664 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2665 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2666 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2667 "ld 11, 0(11)\n\t" /* target->r11 */ \
2668 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2671 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2672 VALGRIND_RESTORE_STACK \
2673 : /*out*/ "=r" (_res) \
2674 : /*in*/ "r" (&_argvec[2]) \
2675 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2677 lval = (__typeof__(lval)) _res; \
2680 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2682 volatile OrigFn _orig = (orig); \
2683 volatile unsigned long _argvec[3+4]; \
2684 volatile unsigned long _res; \
2685 /* _argvec[0] holds current r2 across the call */ \
2686 _argvec[1] = (unsigned long)_orig.r2; \
2687 _argvec[2] = (unsigned long)_orig.nraddr; \
2688 _argvec[2+1] = (unsigned long)arg1; \
2689 _argvec[2+2] = (unsigned long)arg2; \
2690 _argvec[2+3] = (unsigned long)arg3; \
2691 _argvec[2+4] = (unsigned long)arg4; \
2693 VALGRIND_ALIGN_STACK \
2695 "std 2,-16(11)\n\t" /* save tocptr */ \
2696 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2697 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2698 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2699 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2700 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2701 "ld 11, 0(11)\n\t" /* target->r11 */ \
2702 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2705 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2706 VALGRIND_RESTORE_STACK \
2707 : /*out*/ "=r" (_res) \
2708 : /*in*/ "r" (&_argvec[2]) \
2709 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2711 lval = (__typeof__(lval)) _res; \
2714 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2716 volatile OrigFn _orig = (orig); \
2717 volatile unsigned long _argvec[3+5]; \
2718 volatile unsigned long _res; \
2719 /* _argvec[0] holds current r2 across the call */ \
2720 _argvec[1] = (unsigned long)_orig.r2; \
2721 _argvec[2] = (unsigned long)_orig.nraddr; \
2722 _argvec[2+1] = (unsigned long)arg1; \
2723 _argvec[2+2] = (unsigned long)arg2; \
2724 _argvec[2+3] = (unsigned long)arg3; \
2725 _argvec[2+4] = (unsigned long)arg4; \
2726 _argvec[2+5] = (unsigned long)arg5; \
2728 VALGRIND_ALIGN_STACK \
2730 "std 2,-16(11)\n\t" /* save tocptr */ \
2731 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2732 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2733 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2734 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2735 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2736 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2737 "ld 11, 0(11)\n\t" /* target->r11 */ \
2738 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2741 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2742 VALGRIND_RESTORE_STACK \
2743 : /*out*/ "=r" (_res) \
2744 : /*in*/ "r" (&_argvec[2]) \
2745 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2747 lval = (__typeof__(lval)) _res; \
2750 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2752 volatile OrigFn _orig = (orig); \
2753 volatile unsigned long _argvec[3+6]; \
2754 volatile unsigned long _res; \
2755 /* _argvec[0] holds current r2 across the call */ \
2756 _argvec[1] = (unsigned long)_orig.r2; \
2757 _argvec[2] = (unsigned long)_orig.nraddr; \
2758 _argvec[2+1] = (unsigned long)arg1; \
2759 _argvec[2+2] = (unsigned long)arg2; \
2760 _argvec[2+3] = (unsigned long)arg3; \
2761 _argvec[2+4] = (unsigned long)arg4; \
2762 _argvec[2+5] = (unsigned long)arg5; \
2763 _argvec[2+6] = (unsigned long)arg6; \
2765 VALGRIND_ALIGN_STACK \
2767 "std 2,-16(11)\n\t" /* save tocptr */ \
2768 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2769 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2770 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2771 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2772 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2773 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2774 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2775 "ld 11, 0(11)\n\t" /* target->r11 */ \
2776 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2779 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2780 VALGRIND_RESTORE_STACK \
2781 : /*out*/ "=r" (_res) \
2782 : /*in*/ "r" (&_argvec[2]) \
2783 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2785 lval = (__typeof__(lval)) _res; \
2788 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2791 volatile OrigFn _orig = (orig); \
2792 volatile unsigned long _argvec[3+7]; \
2793 volatile unsigned long _res; \
2794 /* _argvec[0] holds current r2 across the call */ \
2795 _argvec[1] = (unsigned long)_orig.r2; \
2796 _argvec[2] = (unsigned long)_orig.nraddr; \
2797 _argvec[2+1] = (unsigned long)arg1; \
2798 _argvec[2+2] = (unsigned long)arg2; \
2799 _argvec[2+3] = (unsigned long)arg3; \
2800 _argvec[2+4] = (unsigned long)arg4; \
2801 _argvec[2+5] = (unsigned long)arg5; \
2802 _argvec[2+6] = (unsigned long)arg6; \
2803 _argvec[2+7] = (unsigned long)arg7; \
2805 VALGRIND_ALIGN_STACK \
2807 "std 2,-16(11)\n\t" /* save tocptr */ \
2808 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2809 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2810 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2811 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2812 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2813 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2814 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2815 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2816 "ld 11, 0(11)\n\t" /* target->r11 */ \
2817 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2820 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2821 VALGRIND_RESTORE_STACK \
2822 : /*out*/ "=r" (_res) \
2823 : /*in*/ "r" (&_argvec[2]) \
2824 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2826 lval = (__typeof__(lval)) _res; \
2829 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2832 volatile OrigFn _orig = (orig); \
2833 volatile unsigned long _argvec[3+8]; \
2834 volatile unsigned long _res; \
2835 /* _argvec[0] holds current r2 across the call */ \
2836 _argvec[1] = (unsigned long)_orig.r2; \
2837 _argvec[2] = (unsigned long)_orig.nraddr; \
2838 _argvec[2+1] = (unsigned long)arg1; \
2839 _argvec[2+2] = (unsigned long)arg2; \
2840 _argvec[2+3] = (unsigned long)arg3; \
2841 _argvec[2+4] = (unsigned long)arg4; \
2842 _argvec[2+5] = (unsigned long)arg5; \
2843 _argvec[2+6] = (unsigned long)arg6; \
2844 _argvec[2+7] = (unsigned long)arg7; \
2845 _argvec[2+8] = (unsigned long)arg8; \
2847 VALGRIND_ALIGN_STACK \
2849 "std 2,-16(11)\n\t" /* save tocptr */ \
2850 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2851 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2852 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2853 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2854 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2855 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2856 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2857 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2858 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2859 "ld 11, 0(11)\n\t" /* target->r11 */ \
2860 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2863 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2864 VALGRIND_RESTORE_STACK \
2865 : /*out*/ "=r" (_res) \
2866 : /*in*/ "r" (&_argvec[2]) \
2867 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2869 lval = (__typeof__(lval)) _res; \
2872 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2875 volatile OrigFn _orig = (orig); \
2876 volatile unsigned long _argvec[3+9]; \
2877 volatile unsigned long _res; \
2878 /* _argvec[0] holds current r2 across the call */ \
2879 _argvec[1] = (unsigned long)_orig.r2; \
2880 _argvec[2] = (unsigned long)_orig.nraddr; \
2881 _argvec[2+1] = (unsigned long)arg1; \
2882 _argvec[2+2] = (unsigned long)arg2; \
2883 _argvec[2+3] = (unsigned long)arg3; \
2884 _argvec[2+4] = (unsigned long)arg4; \
2885 _argvec[2+5] = (unsigned long)arg5; \
2886 _argvec[2+6] = (unsigned long)arg6; \
2887 _argvec[2+7] = (unsigned long)arg7; \
2888 _argvec[2+8] = (unsigned long)arg8; \
2889 _argvec[2+9] = (unsigned long)arg9; \
2891 VALGRIND_ALIGN_STACK \
2893 "std 2,-16(11)\n\t" /* save tocptr */ \
2894 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2895 "addi 1,1,-128\n\t" /* expand stack frame */ \
2898 "std 3,112(1)\n\t" \
2900 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2901 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2902 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2903 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2904 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2905 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2906 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2907 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2908 "ld 11, 0(11)\n\t" /* target->r11 */ \
2909 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2912 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2913 VALGRIND_RESTORE_STACK \
2914 : /*out*/ "=r" (_res) \
2915 : /*in*/ "r" (&_argvec[2]) \
2916 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2918 lval = (__typeof__(lval)) _res; \
2921 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2922 arg7,arg8,arg9,arg10) \
2924 volatile OrigFn _orig = (orig); \
2925 volatile unsigned long _argvec[3+10]; \
2926 volatile unsigned long _res; \
2927 /* _argvec[0] holds current r2 across the call */ \
2928 _argvec[1] = (unsigned long)_orig.r2; \
2929 _argvec[2] = (unsigned long)_orig.nraddr; \
2930 _argvec[2+1] = (unsigned long)arg1; \
2931 _argvec[2+2] = (unsigned long)arg2; \
2932 _argvec[2+3] = (unsigned long)arg3; \
2933 _argvec[2+4] = (unsigned long)arg4; \
2934 _argvec[2+5] = (unsigned long)arg5; \
2935 _argvec[2+6] = (unsigned long)arg6; \
2936 _argvec[2+7] = (unsigned long)arg7; \
2937 _argvec[2+8] = (unsigned long)arg8; \
2938 _argvec[2+9] = (unsigned long)arg9; \
2939 _argvec[2+10] = (unsigned long)arg10; \
2941 VALGRIND_ALIGN_STACK \
2943 "std 2,-16(11)\n\t" /* save tocptr */ \
2944 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2945 "addi 1,1,-128\n\t" /* expand stack frame */ \
2948 "std 3,120(1)\n\t" \
2951 "std 3,112(1)\n\t" \
2953 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2954 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2955 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2956 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2957 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2958 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2959 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2960 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2961 "ld 11, 0(11)\n\t" /* target->r11 */ \
2962 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2965 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2966 VALGRIND_RESTORE_STACK \
2967 : /*out*/ "=r" (_res) \
2968 : /*in*/ "r" (&_argvec[2]) \
2969 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2971 lval = (__typeof__(lval)) _res; \
2974 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2975 arg7,arg8,arg9,arg10,arg11) \
2977 volatile OrigFn _orig = (orig); \
2978 volatile unsigned long _argvec[3+11]; \
2979 volatile unsigned long _res; \
2980 /* _argvec[0] holds current r2 across the call */ \
2981 _argvec[1] = (unsigned long)_orig.r2; \
2982 _argvec[2] = (unsigned long)_orig.nraddr; \
2983 _argvec[2+1] = (unsigned long)arg1; \
2984 _argvec[2+2] = (unsigned long)arg2; \
2985 _argvec[2+3] = (unsigned long)arg3; \
2986 _argvec[2+4] = (unsigned long)arg4; \
2987 _argvec[2+5] = (unsigned long)arg5; \
2988 _argvec[2+6] = (unsigned long)arg6; \
2989 _argvec[2+7] = (unsigned long)arg7; \
2990 _argvec[2+8] = (unsigned long)arg8; \
2991 _argvec[2+9] = (unsigned long)arg9; \
2992 _argvec[2+10] = (unsigned long)arg10; \
2993 _argvec[2+11] = (unsigned long)arg11; \
2995 VALGRIND_ALIGN_STACK \
2997 "std 2,-16(11)\n\t" /* save tocptr */ \
2998 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2999 "addi 1,1,-144\n\t" /* expand stack frame */ \
3002 "std 3,128(1)\n\t" \
3005 "std 3,120(1)\n\t" \
3008 "std 3,112(1)\n\t" \
3010 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3011 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3012 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3013 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3014 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3015 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3016 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3017 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3018 "ld 11, 0(11)\n\t" /* target->r11 */ \
3019 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3022 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3023 VALGRIND_RESTORE_STACK \
3024 : /*out*/ "=r" (_res) \
3025 : /*in*/ "r" (&_argvec[2]) \
3026 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3028 lval = (__typeof__(lval)) _res; \
3031 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3032 arg7,arg8,arg9,arg10,arg11,arg12) \
3034 volatile OrigFn _orig = (orig); \
3035 volatile unsigned long _argvec[3+12]; \
3036 volatile unsigned long _res; \
3037 /* _argvec[0] holds current r2 across the call */ \
3038 _argvec[1] = (unsigned long)_orig.r2; \
3039 _argvec[2] = (unsigned long)_orig.nraddr; \
3040 _argvec[2+1] = (unsigned long)arg1; \
3041 _argvec[2+2] = (unsigned long)arg2; \
3042 _argvec[2+3] = (unsigned long)arg3; \
3043 _argvec[2+4] = (unsigned long)arg4; \
3044 _argvec[2+5] = (unsigned long)arg5; \
3045 _argvec[2+6] = (unsigned long)arg6; \
3046 _argvec[2+7] = (unsigned long)arg7; \
3047 _argvec[2+8] = (unsigned long)arg8; \
3048 _argvec[2+9] = (unsigned long)arg9; \
3049 _argvec[2+10] = (unsigned long)arg10; \
3050 _argvec[2+11] = (unsigned long)arg11; \
3051 _argvec[2+12] = (unsigned long)arg12; \
3053 VALGRIND_ALIGN_STACK \
3055 "std 2,-16(11)\n\t" /* save tocptr */ \
3056 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3057 "addi 1,1,-144\n\t" /* expand stack frame */ \
3060 "std 3,136(1)\n\t" \
3063 "std 3,128(1)\n\t" \
3066 "std 3,120(1)\n\t" \
3069 "std 3,112(1)\n\t" \
3071 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3072 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3073 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3074 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3075 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3076 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3077 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3078 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3079 "ld 11, 0(11)\n\t" /* target->r11 */ \
3080 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3083 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3084 VALGRIND_RESTORE_STACK \
3085 : /*out*/ "=r" (_res) \
3086 : /*in*/ "r" (&_argvec[2]) \
3087 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3089 lval = (__typeof__(lval)) _res; \
3092 #endif /* PLAT_ppc64_linux */
3094 /* ------------------------- arm-linux ------------------------- */
3096 #if defined(PLAT_arm_linux)
3098 /* These regs are trashed by the hidden call. */
3099 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4","r14"
3101 /* Macros to save and align the stack before making a function
3102 call and restore it afterwards as gcc may not keep the stack
3103 pointer aligned if it doesn't realise calls are being made
3104 to other functions. */
3106 /* This is a bit tricky. We store the original stack pointer in r10
3107 as it is callee-saves. gcc doesn't allow the use of r11 for some
3108 reason. Also, we can't directly "bic" the stack pointer in thumb
3109 mode since r13 isn't an allowed register number in that context.
3110 So use r4 as a temporary, since that is about to get trashed
3111 anyway, just after each use of this macro. Side effect is we need
3112 to be very careful about any future changes, since
3113 VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
3114 #define VALGRIND_ALIGN_STACK \
3117 "bic r4, r4, #7\n\t" \
3119 #define VALGRIND_RESTORE_STACK \
3122 /* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
3125 #define CALL_FN_W_v(lval, orig) \
3127 volatile OrigFn _orig = (orig); \
3128 volatile unsigned long _argvec[1]; \
3129 volatile unsigned long _res; \
3130 _argvec[0] = (unsigned long)_orig.nraddr; \
3132 VALGRIND_ALIGN_STACK \
3133 "ldr r4, [%1] \n\t" /* target->r4 */ \
3134 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3135 VALGRIND_RESTORE_STACK \
3137 : /*out*/ "=r" (_res) \
3138 : /*in*/ "0" (&_argvec[0]) \
3139 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3141 lval = (__typeof__(lval)) _res; \
3144 #define CALL_FN_W_W(lval, orig, arg1) \
3146 volatile OrigFn _orig = (orig); \
3147 volatile unsigned long _argvec[2]; \
3148 volatile unsigned long _res; \
3149 _argvec[0] = (unsigned long)_orig.nraddr; \
3150 _argvec[1] = (unsigned long)(arg1); \
3152 VALGRIND_ALIGN_STACK \
3153 "ldr r0, [%1, #4] \n\t" \
3154 "ldr r4, [%1] \n\t" /* target->r4 */ \
3155 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3156 VALGRIND_RESTORE_STACK \
3158 : /*out*/ "=r" (_res) \
3159 : /*in*/ "0" (&_argvec[0]) \
3160 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3162 lval = (__typeof__(lval)) _res; \
3165 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3167 volatile OrigFn _orig = (orig); \
3168 volatile unsigned long _argvec[3]; \
3169 volatile unsigned long _res; \
3170 _argvec[0] = (unsigned long)_orig.nraddr; \
3171 _argvec[1] = (unsigned long)(arg1); \
3172 _argvec[2] = (unsigned long)(arg2); \
3174 VALGRIND_ALIGN_STACK \
3175 "ldr r0, [%1, #4] \n\t" \
3176 "ldr r1, [%1, #8] \n\t" \
3177 "ldr r4, [%1] \n\t" /* target->r4 */ \
3178 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3179 VALGRIND_RESTORE_STACK \
3181 : /*out*/ "=r" (_res) \
3182 : /*in*/ "0" (&_argvec[0]) \
3183 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3185 lval = (__typeof__(lval)) _res; \
3188 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3190 volatile OrigFn _orig = (orig); \
3191 volatile unsigned long _argvec[4]; \
3192 volatile unsigned long _res; \
3193 _argvec[0] = (unsigned long)_orig.nraddr; \
3194 _argvec[1] = (unsigned long)(arg1); \
3195 _argvec[2] = (unsigned long)(arg2); \
3196 _argvec[3] = (unsigned long)(arg3); \
3198 VALGRIND_ALIGN_STACK \
3199 "ldr r0, [%1, #4] \n\t" \
3200 "ldr r1, [%1, #8] \n\t" \
3201 "ldr r2, [%1, #12] \n\t" \
3202 "ldr r4, [%1] \n\t" /* target->r4 */ \
3203 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3204 VALGRIND_RESTORE_STACK \
3206 : /*out*/ "=r" (_res) \
3207 : /*in*/ "0" (&_argvec[0]) \
3208 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3210 lval = (__typeof__(lval)) _res; \
3213 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3215 volatile OrigFn _orig = (orig); \
3216 volatile unsigned long _argvec[5]; \
3217 volatile unsigned long _res; \
3218 _argvec[0] = (unsigned long)_orig.nraddr; \
3219 _argvec[1] = (unsigned long)(arg1); \
3220 _argvec[2] = (unsigned long)(arg2); \
3221 _argvec[3] = (unsigned long)(arg3); \
3222 _argvec[4] = (unsigned long)(arg4); \
3224 VALGRIND_ALIGN_STACK \
3225 "ldr r0, [%1, #4] \n\t" \
3226 "ldr r1, [%1, #8] \n\t" \
3227 "ldr r2, [%1, #12] \n\t" \
3228 "ldr r3, [%1, #16] \n\t" \
3229 "ldr r4, [%1] \n\t" /* target->r4 */ \
3230 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3231 VALGRIND_RESTORE_STACK \
3233 : /*out*/ "=r" (_res) \
3234 : /*in*/ "0" (&_argvec[0]) \
3235 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3237 lval = (__typeof__(lval)) _res; \
3240 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3242 volatile OrigFn _orig = (orig); \
3243 volatile unsigned long _argvec[6]; \
3244 volatile unsigned long _res; \
3245 _argvec[0] = (unsigned long)_orig.nraddr; \
3246 _argvec[1] = (unsigned long)(arg1); \
3247 _argvec[2] = (unsigned long)(arg2); \
3248 _argvec[3] = (unsigned long)(arg3); \
3249 _argvec[4] = (unsigned long)(arg4); \
3250 _argvec[5] = (unsigned long)(arg5); \
3252 VALGRIND_ALIGN_STACK \
3253 "sub sp, sp, #4 \n\t" \
3254 "ldr r0, [%1, #20] \n\t" \
3256 "ldr r0, [%1, #4] \n\t" \
3257 "ldr r1, [%1, #8] \n\t" \
3258 "ldr r2, [%1, #12] \n\t" \
3259 "ldr r3, [%1, #16] \n\t" \
3260 "ldr r4, [%1] \n\t" /* target->r4 */ \
3261 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3262 VALGRIND_RESTORE_STACK \
3264 : /*out*/ "=r" (_res) \
3265 : /*in*/ "0" (&_argvec[0]) \
3266 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3268 lval = (__typeof__(lval)) _res; \
3271 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3273 volatile OrigFn _orig = (orig); \
3274 volatile unsigned long _argvec[7]; \
3275 volatile unsigned long _res; \
3276 _argvec[0] = (unsigned long)_orig.nraddr; \
3277 _argvec[1] = (unsigned long)(arg1); \
3278 _argvec[2] = (unsigned long)(arg2); \
3279 _argvec[3] = (unsigned long)(arg3); \
3280 _argvec[4] = (unsigned long)(arg4); \
3281 _argvec[5] = (unsigned long)(arg5); \
3282 _argvec[6] = (unsigned long)(arg6); \
3284 VALGRIND_ALIGN_STACK \
3285 "ldr r0, [%1, #20] \n\t" \
3286 "ldr r1, [%1, #24] \n\t" \
3287 "push {r0, r1} \n\t" \
3288 "ldr r0, [%1, #4] \n\t" \
3289 "ldr r1, [%1, #8] \n\t" \
3290 "ldr r2, [%1, #12] \n\t" \
3291 "ldr r3, [%1, #16] \n\t" \
3292 "ldr r4, [%1] \n\t" /* target->r4 */ \
3293 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3294 VALGRIND_RESTORE_STACK \
3296 : /*out*/ "=r" (_res) \
3297 : /*in*/ "0" (&_argvec[0]) \
3298 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3300 lval = (__typeof__(lval)) _res; \
3303 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3306 volatile OrigFn _orig = (orig); \
3307 volatile unsigned long _argvec[8]; \
3308 volatile unsigned long _res; \
3309 _argvec[0] = (unsigned long)_orig.nraddr; \
3310 _argvec[1] = (unsigned long)(arg1); \
3311 _argvec[2] = (unsigned long)(arg2); \
3312 _argvec[3] = (unsigned long)(arg3); \
3313 _argvec[4] = (unsigned long)(arg4); \
3314 _argvec[5] = (unsigned long)(arg5); \
3315 _argvec[6] = (unsigned long)(arg6); \
3316 _argvec[7] = (unsigned long)(arg7); \
3318 VALGRIND_ALIGN_STACK \
3319 "sub sp, sp, #4 \n\t" \
3320 "ldr r0, [%1, #20] \n\t" \
3321 "ldr r1, [%1, #24] \n\t" \
3322 "ldr r2, [%1, #28] \n\t" \
3323 "push {r0, r1, r2} \n\t" \
3324 "ldr r0, [%1, #4] \n\t" \
3325 "ldr r1, [%1, #8] \n\t" \
3326 "ldr r2, [%1, #12] \n\t" \
3327 "ldr r3, [%1, #16] \n\t" \
3328 "ldr r4, [%1] \n\t" /* target->r4 */ \
3329 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3330 VALGRIND_RESTORE_STACK \
3332 : /*out*/ "=r" (_res) \
3333 : /*in*/ "0" (&_argvec[0]) \
3334 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3336 lval = (__typeof__(lval)) _res; \
3339 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3342 volatile OrigFn _orig = (orig); \
3343 volatile unsigned long _argvec[9]; \
3344 volatile unsigned long _res; \
3345 _argvec[0] = (unsigned long)_orig.nraddr; \
3346 _argvec[1] = (unsigned long)(arg1); \
3347 _argvec[2] = (unsigned long)(arg2); \
3348 _argvec[3] = (unsigned long)(arg3); \
3349 _argvec[4] = (unsigned long)(arg4); \
3350 _argvec[5] = (unsigned long)(arg5); \
3351 _argvec[6] = (unsigned long)(arg6); \
3352 _argvec[7] = (unsigned long)(arg7); \
3353 _argvec[8] = (unsigned long)(arg8); \
3355 VALGRIND_ALIGN_STACK \
3356 "ldr r0, [%1, #20] \n\t" \
3357 "ldr r1, [%1, #24] \n\t" \
3358 "ldr r2, [%1, #28] \n\t" \
3359 "ldr r3, [%1, #32] \n\t" \
3360 "push {r0, r1, r2, r3} \n\t" \
3361 "ldr r0, [%1, #4] \n\t" \
3362 "ldr r1, [%1, #8] \n\t" \
3363 "ldr r2, [%1, #12] \n\t" \
3364 "ldr r3, [%1, #16] \n\t" \
3365 "ldr r4, [%1] \n\t" /* target->r4 */ \
3366 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3367 VALGRIND_RESTORE_STACK \
3369 : /*out*/ "=r" (_res) \
3370 : /*in*/ "0" (&_argvec[0]) \
3371 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3373 lval = (__typeof__(lval)) _res; \
3376 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3379 volatile OrigFn _orig = (orig); \
3380 volatile unsigned long _argvec[10]; \
3381 volatile unsigned long _res; \
3382 _argvec[0] = (unsigned long)_orig.nraddr; \
3383 _argvec[1] = (unsigned long)(arg1); \
3384 _argvec[2] = (unsigned long)(arg2); \
3385 _argvec[3] = (unsigned long)(arg3); \
3386 _argvec[4] = (unsigned long)(arg4); \
3387 _argvec[5] = (unsigned long)(arg5); \
3388 _argvec[6] = (unsigned long)(arg6); \
3389 _argvec[7] = (unsigned long)(arg7); \
3390 _argvec[8] = (unsigned long)(arg8); \
3391 _argvec[9] = (unsigned long)(arg9); \
3393 VALGRIND_ALIGN_STACK \
3394 "sub sp, sp, #4 \n\t" \
3395 "ldr r0, [%1, #20] \n\t" \
3396 "ldr r1, [%1, #24] \n\t" \
3397 "ldr r2, [%1, #28] \n\t" \
3398 "ldr r3, [%1, #32] \n\t" \
3399 "ldr r4, [%1, #36] \n\t" \
3400 "push {r0, r1, r2, r3, r4} \n\t" \
3401 "ldr r0, [%1, #4] \n\t" \
3402 "ldr r1, [%1, #8] \n\t" \
3403 "ldr r2, [%1, #12] \n\t" \
3404 "ldr r3, [%1, #16] \n\t" \
3405 "ldr r4, [%1] \n\t" /* target->r4 */ \
3406 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3407 VALGRIND_RESTORE_STACK \
3409 : /*out*/ "=r" (_res) \
3410 : /*in*/ "0" (&_argvec[0]) \
3411 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3413 lval = (__typeof__(lval)) _res; \
3416 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3417 arg7,arg8,arg9,arg10) \
3419 volatile OrigFn _orig = (orig); \
3420 volatile unsigned long _argvec[11]; \
3421 volatile unsigned long _res; \
3422 _argvec[0] = (unsigned long)_orig.nraddr; \
3423 _argvec[1] = (unsigned long)(arg1); \
3424 _argvec[2] = (unsigned long)(arg2); \
3425 _argvec[3] = (unsigned long)(arg3); \
3426 _argvec[4] = (unsigned long)(arg4); \
3427 _argvec[5] = (unsigned long)(arg5); \
3428 _argvec[6] = (unsigned long)(arg6); \
3429 _argvec[7] = (unsigned long)(arg7); \
3430 _argvec[8] = (unsigned long)(arg8); \
3431 _argvec[9] = (unsigned long)(arg9); \
3432 _argvec[10] = (unsigned long)(arg10); \
3434 VALGRIND_ALIGN_STACK \
3435 "ldr r0, [%1, #40] \n\t" \
3437 "ldr r0, [%1, #20] \n\t" \
3438 "ldr r1, [%1, #24] \n\t" \
3439 "ldr r2, [%1, #28] \n\t" \
3440 "ldr r3, [%1, #32] \n\t" \
3441 "ldr r4, [%1, #36] \n\t" \
3442 "push {r0, r1, r2, r3, r4} \n\t" \
3443 "ldr r0, [%1, #4] \n\t" \
3444 "ldr r1, [%1, #8] \n\t" \
3445 "ldr r2, [%1, #12] \n\t" \
3446 "ldr r3, [%1, #16] \n\t" \
3447 "ldr r4, [%1] \n\t" /* target->r4 */ \
3448 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3449 VALGRIND_RESTORE_STACK \
3451 : /*out*/ "=r" (_res) \
3452 : /*in*/ "0" (&_argvec[0]) \
3453 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3455 lval = (__typeof__(lval)) _res; \
3458 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
3459 arg6,arg7,arg8,arg9,arg10, \
3462 volatile OrigFn _orig = (orig); \
3463 volatile unsigned long _argvec[12]; \
3464 volatile unsigned long _res; \
3465 _argvec[0] = (unsigned long)_orig.nraddr; \
3466 _argvec[1] = (unsigned long)(arg1); \
3467 _argvec[2] = (unsigned long)(arg2); \
3468 _argvec[3] = (unsigned long)(arg3); \
3469 _argvec[4] = (unsigned long)(arg4); \
3470 _argvec[5] = (unsigned long)(arg5); \
3471 _argvec[6] = (unsigned long)(arg6); \
3472 _argvec[7] = (unsigned long)(arg7); \
3473 _argvec[8] = (unsigned long)(arg8); \
3474 _argvec[9] = (unsigned long)(arg9); \
3475 _argvec[10] = (unsigned long)(arg10); \
3476 _argvec[11] = (unsigned long)(arg11); \
3478 VALGRIND_ALIGN_STACK \
3479 "sub sp, sp, #4 \n\t" \
3480 "ldr r0, [%1, #40] \n\t" \
3481 "ldr r1, [%1, #44] \n\t" \
3482 "push {r0, r1} \n\t" \
3483 "ldr r0, [%1, #20] \n\t" \
3484 "ldr r1, [%1, #24] \n\t" \
3485 "ldr r2, [%1, #28] \n\t" \
3486 "ldr r3, [%1, #32] \n\t" \
3487 "ldr r4, [%1, #36] \n\t" \
3488 "push {r0, r1, r2, r3, r4} \n\t" \
3489 "ldr r0, [%1, #4] \n\t" \
3490 "ldr r1, [%1, #8] \n\t" \
3491 "ldr r2, [%1, #12] \n\t" \
3492 "ldr r3, [%1, #16] \n\t" \
3493 "ldr r4, [%1] \n\t" /* target->r4 */ \
3494 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3495 VALGRIND_RESTORE_STACK \
3497 : /*out*/ "=r" (_res) \
3498 : /*in*/ "0" (&_argvec[0]) \
3499 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3501 lval = (__typeof__(lval)) _res; \
3504 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
3505 arg6,arg7,arg8,arg9,arg10, \
3508 volatile OrigFn _orig = (orig); \
3509 volatile unsigned long _argvec[13]; \
3510 volatile unsigned long _res; \
3511 _argvec[0] = (unsigned long)_orig.nraddr; \
3512 _argvec[1] = (unsigned long)(arg1); \
3513 _argvec[2] = (unsigned long)(arg2); \
3514 _argvec[3] = (unsigned long)(arg3); \
3515 _argvec[4] = (unsigned long)(arg4); \
3516 _argvec[5] = (unsigned long)(arg5); \
3517 _argvec[6] = (unsigned long)(arg6); \
3518 _argvec[7] = (unsigned long)(arg7); \
3519 _argvec[8] = (unsigned long)(arg8); \
3520 _argvec[9] = (unsigned long)(arg9); \
3521 _argvec[10] = (unsigned long)(arg10); \
3522 _argvec[11] = (unsigned long)(arg11); \
3523 _argvec[12] = (unsigned long)(arg12); \
3525 VALGRIND_ALIGN_STACK \
3526 "ldr r0, [%1, #40] \n\t" \
3527 "ldr r1, [%1, #44] \n\t" \
3528 "ldr r2, [%1, #48] \n\t" \
3529 "push {r0, r1, r2} \n\t" \
3530 "ldr r0, [%1, #20] \n\t" \
3531 "ldr r1, [%1, #24] \n\t" \
3532 "ldr r2, [%1, #28] \n\t" \
3533 "ldr r3, [%1, #32] \n\t" \
3534 "ldr r4, [%1, #36] \n\t" \
3535 "push {r0, r1, r2, r3, r4} \n\t" \
3536 "ldr r0, [%1, #4] \n\t" \
3537 "ldr r1, [%1, #8] \n\t" \
3538 "ldr r2, [%1, #12] \n\t" \
3539 "ldr r3, [%1, #16] \n\t" \
3540 "ldr r4, [%1] \n\t" /* target->r4 */ \
3541 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3542 VALGRIND_RESTORE_STACK \
3544 : /*out*/ "=r" (_res) \
3545 : /*in*/ "0" (&_argvec[0]) \
3546 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3548 lval = (__typeof__(lval)) _res; \
3551 #endif /* PLAT_arm_linux */
3553 /* ------------------------ arm64-linux ------------------------ */
3555 #if defined(PLAT_arm64_linux)
3557 /* These regs are trashed by the hidden call. */
3558 #define __CALLER_SAVED_REGS \
3559 "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
3560 "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
3561 "x18", "x19", "x20", "x30", \
3562 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
3563 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
3564 "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
3565 "v26", "v27", "v28", "v29", "v30", "v31"
3567 /* x21 is callee-saved, so we can use it to save and restore SP around
3569 #define VALGRIND_ALIGN_STACK \
3571 "bic sp, x21, #15\n\t"
3572 #define VALGRIND_RESTORE_STACK \
3575 /* These CALL_FN_ macros assume that on arm64-linux,
3576 sizeof(unsigned long) == 8. */
3578 #define CALL_FN_W_v(lval, orig) \
3580 volatile OrigFn _orig = (orig); \
3581 volatile unsigned long _argvec[1]; \
3582 volatile unsigned long _res; \
3583 _argvec[0] = (unsigned long)_orig.nraddr; \
3585 VALGRIND_ALIGN_STACK \
3586 "ldr x8, [%1] \n\t" /* target->x8 */ \
3587 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3588 VALGRIND_RESTORE_STACK \
3590 : /*out*/ "=r" (_res) \
3591 : /*in*/ "0" (&_argvec[0]) \
3592 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3594 lval = (__typeof__(lval)) _res; \
3597 #define CALL_FN_W_W(lval, orig, arg1) \
3599 volatile OrigFn _orig = (orig); \
3600 volatile unsigned long _argvec[2]; \
3601 volatile unsigned long _res; \
3602 _argvec[0] = (unsigned long)_orig.nraddr; \
3603 _argvec[1] = (unsigned long)(arg1); \
3605 VALGRIND_ALIGN_STACK \
3606 "ldr x0, [%1, #8] \n\t" \
3607 "ldr x8, [%1] \n\t" /* target->x8 */ \
3608 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3609 VALGRIND_RESTORE_STACK \
3611 : /*out*/ "=r" (_res) \
3612 : /*in*/ "0" (&_argvec[0]) \
3613 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3615 lval = (__typeof__(lval)) _res; \
3618 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3620 volatile OrigFn _orig = (orig); \
3621 volatile unsigned long _argvec[3]; \
3622 volatile unsigned long _res; \
3623 _argvec[0] = (unsigned long)_orig.nraddr; \
3624 _argvec[1] = (unsigned long)(arg1); \
3625 _argvec[2] = (unsigned long)(arg2); \
3627 VALGRIND_ALIGN_STACK \
3628 "ldr x0, [%1, #8] \n\t" \
3629 "ldr x1, [%1, #16] \n\t" \
3630 "ldr x8, [%1] \n\t" /* target->x8 */ \
3631 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3632 VALGRIND_RESTORE_STACK \
3634 : /*out*/ "=r" (_res) \
3635 : /*in*/ "0" (&_argvec[0]) \
3636 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3638 lval = (__typeof__(lval)) _res; \
3641 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3643 volatile OrigFn _orig = (orig); \
3644 volatile unsigned long _argvec[4]; \
3645 volatile unsigned long _res; \
3646 _argvec[0] = (unsigned long)_orig.nraddr; \
3647 _argvec[1] = (unsigned long)(arg1); \
3648 _argvec[2] = (unsigned long)(arg2); \
3649 _argvec[3] = (unsigned long)(arg3); \
3651 VALGRIND_ALIGN_STACK \
3652 "ldr x0, [%1, #8] \n\t" \
3653 "ldr x1, [%1, #16] \n\t" \
3654 "ldr x2, [%1, #24] \n\t" \
3655 "ldr x8, [%1] \n\t" /* target->x8 */ \
3656 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3657 VALGRIND_RESTORE_STACK \
3659 : /*out*/ "=r" (_res) \
3660 : /*in*/ "0" (&_argvec[0]) \
3661 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3663 lval = (__typeof__(lval)) _res; \
3666 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3668 volatile OrigFn _orig = (orig); \
3669 volatile unsigned long _argvec[5]; \
3670 volatile unsigned long _res; \
3671 _argvec[0] = (unsigned long)_orig.nraddr; \
3672 _argvec[1] = (unsigned long)(arg1); \
3673 _argvec[2] = (unsigned long)(arg2); \
3674 _argvec[3] = (unsigned long)(arg3); \
3675 _argvec[4] = (unsigned long)(arg4); \
3677 VALGRIND_ALIGN_STACK \
3678 "ldr x0, [%1, #8] \n\t" \
3679 "ldr x1, [%1, #16] \n\t" \
3680 "ldr x2, [%1, #24] \n\t" \
3681 "ldr x3, [%1, #32] \n\t" \
3682 "ldr x8, [%1] \n\t" /* target->x8 */ \
3683 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3684 VALGRIND_RESTORE_STACK \
3686 : /*out*/ "=r" (_res) \
3687 : /*in*/ "0" (&_argvec[0]) \
3688 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3690 lval = (__typeof__(lval)) _res; \
3693 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3695 volatile OrigFn _orig = (orig); \
3696 volatile unsigned long _argvec[6]; \
3697 volatile unsigned long _res; \
3698 _argvec[0] = (unsigned long)_orig.nraddr; \
3699 _argvec[1] = (unsigned long)(arg1); \
3700 _argvec[2] = (unsigned long)(arg2); \
3701 _argvec[3] = (unsigned long)(arg3); \
3702 _argvec[4] = (unsigned long)(arg4); \
3703 _argvec[5] = (unsigned long)(arg5); \
3705 VALGRIND_ALIGN_STACK \
3706 "ldr x0, [%1, #8] \n\t" \
3707 "ldr x1, [%1, #16] \n\t" \
3708 "ldr x2, [%1, #24] \n\t" \
3709 "ldr x3, [%1, #32] \n\t" \
3710 "ldr x4, [%1, #40] \n\t" \
3711 "ldr x8, [%1] \n\t" /* target->x8 */ \
3712 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3713 VALGRIND_RESTORE_STACK \
3715 : /*out*/ "=r" (_res) \
3716 : /*in*/ "0" (&_argvec[0]) \
3717 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3719 lval = (__typeof__(lval)) _res; \
3722 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3724 volatile OrigFn _orig = (orig); \
3725 volatile unsigned long _argvec[7]; \
3726 volatile unsigned long _res; \
3727 _argvec[0] = (unsigned long)_orig.nraddr; \
3728 _argvec[1] = (unsigned long)(arg1); \
3729 _argvec[2] = (unsigned long)(arg2); \
3730 _argvec[3] = (unsigned long)(arg3); \
3731 _argvec[4] = (unsigned long)(arg4); \
3732 _argvec[5] = (unsigned long)(arg5); \
3733 _argvec[6] = (unsigned long)(arg6); \
3735 VALGRIND_ALIGN_STACK \
3736 "ldr x0, [%1, #8] \n\t" \
3737 "ldr x1, [%1, #16] \n\t" \
3738 "ldr x2, [%1, #24] \n\t" \
3739 "ldr x3, [%1, #32] \n\t" \
3740 "ldr x4, [%1, #40] \n\t" \
3741 "ldr x5, [%1, #48] \n\t" \
3742 "ldr x8, [%1] \n\t" /* target->x8 */ \
3743 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3744 VALGRIND_RESTORE_STACK \
3746 : /*out*/ "=r" (_res) \
3747 : /*in*/ "0" (&_argvec[0]) \
3748 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3750 lval = (__typeof__(lval)) _res; \
3753 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3756 volatile OrigFn _orig = (orig); \
3757 volatile unsigned long _argvec[8]; \
3758 volatile unsigned long _res; \
3759 _argvec[0] = (unsigned long)_orig.nraddr; \
3760 _argvec[1] = (unsigned long)(arg1); \
3761 _argvec[2] = (unsigned long)(arg2); \
3762 _argvec[3] = (unsigned long)(arg3); \
3763 _argvec[4] = (unsigned long)(arg4); \
3764 _argvec[5] = (unsigned long)(arg5); \
3765 _argvec[6] = (unsigned long)(arg6); \
3766 _argvec[7] = (unsigned long)(arg7); \
3768 VALGRIND_ALIGN_STACK \
3769 "ldr x0, [%1, #8] \n\t" \
3770 "ldr x1, [%1, #16] \n\t" \
3771 "ldr x2, [%1, #24] \n\t" \
3772 "ldr x3, [%1, #32] \n\t" \
3773 "ldr x4, [%1, #40] \n\t" \
3774 "ldr x5, [%1, #48] \n\t" \
3775 "ldr x6, [%1, #56] \n\t" \
3776 "ldr x8, [%1] \n\t" /* target->x8 */ \
3777 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3778 VALGRIND_RESTORE_STACK \
3780 : /*out*/ "=r" (_res) \
3781 : /*in*/ "0" (&_argvec[0]) \
3782 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3784 lval = (__typeof__(lval)) _res; \
3787 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3790 volatile OrigFn _orig = (orig); \
3791 volatile unsigned long _argvec[9]; \
3792 volatile unsigned long _res; \
3793 _argvec[0] = (unsigned long)_orig.nraddr; \
3794 _argvec[1] = (unsigned long)(arg1); \
3795 _argvec[2] = (unsigned long)(arg2); \
3796 _argvec[3] = (unsigned long)(arg3); \
3797 _argvec[4] = (unsigned long)(arg4); \
3798 _argvec[5] = (unsigned long)(arg5); \
3799 _argvec[6] = (unsigned long)(arg6); \
3800 _argvec[7] = (unsigned long)(arg7); \
3801 _argvec[8] = (unsigned long)(arg8); \
3803 VALGRIND_ALIGN_STACK \
3804 "ldr x0, [%1, #8] \n\t" \
3805 "ldr x1, [%1, #16] \n\t" \
3806 "ldr x2, [%1, #24] \n\t" \
3807 "ldr x3, [%1, #32] \n\t" \
3808 "ldr x4, [%1, #40] \n\t" \
3809 "ldr x5, [%1, #48] \n\t" \
3810 "ldr x6, [%1, #56] \n\t" \
3811 "ldr x7, [%1, #64] \n\t" \
3812 "ldr x8, [%1] \n\t" /* target->x8 */ \
3813 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3814 VALGRIND_RESTORE_STACK \
3816 : /*out*/ "=r" (_res) \
3817 : /*in*/ "0" (&_argvec[0]) \
3818 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3820 lval = (__typeof__(lval)) _res; \
3823 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3826 volatile OrigFn _orig = (orig); \
3827 volatile unsigned long _argvec[10]; \
3828 volatile unsigned long _res; \
3829 _argvec[0] = (unsigned long)_orig.nraddr; \
3830 _argvec[1] = (unsigned long)(arg1); \
3831 _argvec[2] = (unsigned long)(arg2); \
3832 _argvec[3] = (unsigned long)(arg3); \
3833 _argvec[4] = (unsigned long)(arg4); \
3834 _argvec[5] = (unsigned long)(arg5); \
3835 _argvec[6] = (unsigned long)(arg6); \
3836 _argvec[7] = (unsigned long)(arg7); \
3837 _argvec[8] = (unsigned long)(arg8); \
3838 _argvec[9] = (unsigned long)(arg9); \
3840 VALGRIND_ALIGN_STACK \
3841 "sub sp, sp, #0x20 \n\t" \
3842 "ldr x0, [%1, #8] \n\t" \
3843 "ldr x1, [%1, #16] \n\t" \
3844 "ldr x2, [%1, #24] \n\t" \
3845 "ldr x3, [%1, #32] \n\t" \
3846 "ldr x4, [%1, #40] \n\t" \
3847 "ldr x5, [%1, #48] \n\t" \
3848 "ldr x6, [%1, #56] \n\t" \
3849 "ldr x7, [%1, #64] \n\t" \
3850 "ldr x8, [%1, #72] \n\t" \
3851 "str x8, [sp, #0] \n\t" \
3852 "ldr x8, [%1] \n\t" /* target->x8 */ \
3853 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3854 VALGRIND_RESTORE_STACK \
3856 : /*out*/ "=r" (_res) \
3857 : /*in*/ "0" (&_argvec[0]) \
3858 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3860 lval = (__typeof__(lval)) _res; \
3863 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3864 arg7,arg8,arg9,arg10) \
3866 volatile OrigFn _orig = (orig); \
3867 volatile unsigned long _argvec[11]; \
3868 volatile unsigned long _res; \
3869 _argvec[0] = (unsigned long)_orig.nraddr; \
3870 _argvec[1] = (unsigned long)(arg1); \
3871 _argvec[2] = (unsigned long)(arg2); \
3872 _argvec[3] = (unsigned long)(arg3); \
3873 _argvec[4] = (unsigned long)(arg4); \
3874 _argvec[5] = (unsigned long)(arg5); \
3875 _argvec[6] = (unsigned long)(arg6); \
3876 _argvec[7] = (unsigned long)(arg7); \
3877 _argvec[8] = (unsigned long)(arg8); \
3878 _argvec[9] = (unsigned long)(arg9); \
3879 _argvec[10] = (unsigned long)(arg10); \
3881 VALGRIND_ALIGN_STACK \
3882 "sub sp, sp, #0x20 \n\t" \
3883 "ldr x0, [%1, #8] \n\t" \
3884 "ldr x1, [%1, #16] \n\t" \
3885 "ldr x2, [%1, #24] \n\t" \
3886 "ldr x3, [%1, #32] \n\t" \
3887 "ldr x4, [%1, #40] \n\t" \
3888 "ldr x5, [%1, #48] \n\t" \
3889 "ldr x6, [%1, #56] \n\t" \
3890 "ldr x7, [%1, #64] \n\t" \
3891 "ldr x8, [%1, #72] \n\t" \
3892 "str x8, [sp, #0] \n\t" \
3893 "ldr x8, [%1, #80] \n\t" \
3894 "str x8, [sp, #8] \n\t" \
3895 "ldr x8, [%1] \n\t" /* target->x8 */ \
3896 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3897 VALGRIND_RESTORE_STACK \
3899 : /*out*/ "=r" (_res) \
3900 : /*in*/ "0" (&_argvec[0]) \
3901 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3903 lval = (__typeof__(lval)) _res; \
3906 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3907 arg7,arg8,arg9,arg10,arg11) \
3909 volatile OrigFn _orig = (orig); \
3910 volatile unsigned long _argvec[12]; \
3911 volatile unsigned long _res; \
3912 _argvec[0] = (unsigned long)_orig.nraddr; \
3913 _argvec[1] = (unsigned long)(arg1); \
3914 _argvec[2] = (unsigned long)(arg2); \
3915 _argvec[3] = (unsigned long)(arg3); \
3916 _argvec[4] = (unsigned long)(arg4); \
3917 _argvec[5] = (unsigned long)(arg5); \
3918 _argvec[6] = (unsigned long)(arg6); \
3919 _argvec[7] = (unsigned long)(arg7); \
3920 _argvec[8] = (unsigned long)(arg8); \
3921 _argvec[9] = (unsigned long)(arg9); \
3922 _argvec[10] = (unsigned long)(arg10); \
3923 _argvec[11] = (unsigned long)(arg11); \
3925 VALGRIND_ALIGN_STACK \
3926 "sub sp, sp, #0x30 \n\t" \
3927 "ldr x0, [%1, #8] \n\t" \
3928 "ldr x1, [%1, #16] \n\t" \
3929 "ldr x2, [%1, #24] \n\t" \
3930 "ldr x3, [%1, #32] \n\t" \
3931 "ldr x4, [%1, #40] \n\t" \
3932 "ldr x5, [%1, #48] \n\t" \
3933 "ldr x6, [%1, #56] \n\t" \
3934 "ldr x7, [%1, #64] \n\t" \
3935 "ldr x8, [%1, #72] \n\t" \
3936 "str x8, [sp, #0] \n\t" \
3937 "ldr x8, [%1, #80] \n\t" \
3938 "str x8, [sp, #8] \n\t" \
3939 "ldr x8, [%1, #88] \n\t" \
3940 "str x8, [sp, #16] \n\t" \
3941 "ldr x8, [%1] \n\t" /* target->x8 */ \
3942 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3943 VALGRIND_RESTORE_STACK \
3945 : /*out*/ "=r" (_res) \
3946 : /*in*/ "0" (&_argvec[0]) \
3947 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3949 lval = (__typeof__(lval)) _res; \
3952 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3953 arg7,arg8,arg9,arg10,arg11, \
3956 volatile OrigFn _orig = (orig); \
3957 volatile unsigned long _argvec[13]; \
3958 volatile unsigned long _res; \
3959 _argvec[0] = (unsigned long)_orig.nraddr; \
3960 _argvec[1] = (unsigned long)(arg1); \
3961 _argvec[2] = (unsigned long)(arg2); \
3962 _argvec[3] = (unsigned long)(arg3); \
3963 _argvec[4] = (unsigned long)(arg4); \
3964 _argvec[5] = (unsigned long)(arg5); \
3965 _argvec[6] = (unsigned long)(arg6); \
3966 _argvec[7] = (unsigned long)(arg7); \
3967 _argvec[8] = (unsigned long)(arg8); \
3968 _argvec[9] = (unsigned long)(arg9); \
3969 _argvec[10] = (unsigned long)(arg10); \
3970 _argvec[11] = (unsigned long)(arg11); \
3971 _argvec[12] = (unsigned long)(arg12); \
3973 VALGRIND_ALIGN_STACK \
3974 "sub sp, sp, #0x30 \n\t" \
3975 "ldr x0, [%1, #8] \n\t" \
3976 "ldr x1, [%1, #16] \n\t" \
3977 "ldr x2, [%1, #24] \n\t" \
3978 "ldr x3, [%1, #32] \n\t" \
3979 "ldr x4, [%1, #40] \n\t" \
3980 "ldr x5, [%1, #48] \n\t" \
3981 "ldr x6, [%1, #56] \n\t" \
3982 "ldr x7, [%1, #64] \n\t" \
3983 "ldr x8, [%1, #72] \n\t" \
3984 "str x8, [sp, #0] \n\t" \
3985 "ldr x8, [%1, #80] \n\t" \
3986 "str x8, [sp, #8] \n\t" \
3987 "ldr x8, [%1, #88] \n\t" \
3988 "str x8, [sp, #16] \n\t" \
3989 "ldr x8, [%1, #96] \n\t" \
3990 "str x8, [sp, #24] \n\t" \
3991 "ldr x8, [%1] \n\t" /* target->x8 */ \
3992 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3993 VALGRIND_RESTORE_STACK \
3995 : /*out*/ "=r" (_res) \
3996 : /*in*/ "0" (&_argvec[0]) \
3997 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3999 lval = (__typeof__(lval)) _res; \
4002 #endif /* PLAT_arm64_linux */
4004 /* ------------------------- s390x-linux ------------------------- */
4006 #if defined(PLAT_s390x_linux)
4008 /* Similar workaround as amd64 (see above), but we use r11 as frame
4009 pointer and save the old r11 in r7. r11 might be used for
4010 argvec, therefore we copy argvec in r1 since r1 is clobbered
4011 after the call anyway. */
4012 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
4013 # define __FRAME_POINTER \
4014 ,"d"(__builtin_dwarf_cfa())
4015 # define VALGRIND_CFI_PROLOGUE \
4016 ".cfi_remember_state\n\t" \
4017 "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
4020 ".cfi_def_cfa r11, 0\n\t"
4021 # define VALGRIND_CFI_EPILOGUE \
4023 ".cfi_restore_state\n\t"
4025 # define __FRAME_POINTER
4026 # define VALGRIND_CFI_PROLOGUE \
4028 # define VALGRIND_CFI_EPILOGUE
4031 /* Nb: On s390 the stack pointer is properly aligned *at all times*
4032 according to the s390 GCC maintainer. (The ABI specification is not
4033 precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
4034 VALGRIND_RESTORE_STACK are not defined here. */
4036 /* These regs are trashed by the hidden call. Note that we overwrite
4037 r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
4038 function a proper return address. All others are ABI defined call
4040 #define __CALLER_SAVED_REGS "0","1","2","3","4","5","14", \
4041 "f0","f1","f2","f3","f4","f5","f6","f7"
4043 /* Nb: Although r11 is modified in the asm snippets below (inside
4044 VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
4046 (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
4048 (2) GCC will complain that r11 cannot appear inside a clobber section,
4049 when compiled with -O -fno-omit-frame-pointer
4052 #define CALL_FN_W_v(lval, orig) \
4054 volatile OrigFn _orig = (orig); \
4055 volatile unsigned long _argvec[1]; \
4056 volatile unsigned long _res; \
4057 _argvec[0] = (unsigned long)_orig.nraddr; \
4059 VALGRIND_CFI_PROLOGUE \
4060 "aghi 15,-160\n\t" \
4061 "lg 1, 0(1)\n\t" /* target->r1 */ \
4062 VALGRIND_CALL_NOREDIR_R1 \
4065 VALGRIND_CFI_EPILOGUE \
4066 : /*out*/ "=d" (_res) \
4067 : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
4068 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4070 lval = (__typeof__(lval)) _res; \
4073 /* The call abi has the arguments in r2-r6 and stack */
4074 #define CALL_FN_W_W(lval, orig, arg1) \
4076 volatile OrigFn _orig = (orig); \
4077 volatile unsigned long _argvec[2]; \
4078 volatile unsigned long _res; \
4079 _argvec[0] = (unsigned long)_orig.nraddr; \
4080 _argvec[1] = (unsigned long)arg1; \
4082 VALGRIND_CFI_PROLOGUE \
4083 "aghi 15,-160\n\t" \
4086 VALGRIND_CALL_NOREDIR_R1 \
4089 VALGRIND_CFI_EPILOGUE \
4090 : /*out*/ "=d" (_res) \
4091 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4092 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4094 lval = (__typeof__(lval)) _res; \
4097 #define CALL_FN_W_WW(lval, orig, arg1, arg2) \
4099 volatile OrigFn _orig = (orig); \
4100 volatile unsigned long _argvec[3]; \
4101 volatile unsigned long _res; \
4102 _argvec[0] = (unsigned long)_orig.nraddr; \
4103 _argvec[1] = (unsigned long)arg1; \
4104 _argvec[2] = (unsigned long)arg2; \
4106 VALGRIND_CFI_PROLOGUE \
4107 "aghi 15,-160\n\t" \
4111 VALGRIND_CALL_NOREDIR_R1 \
4114 VALGRIND_CFI_EPILOGUE \
4115 : /*out*/ "=d" (_res) \
4116 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4117 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4119 lval = (__typeof__(lval)) _res; \
4122 #define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
4124 volatile OrigFn _orig = (orig); \
4125 volatile unsigned long _argvec[4]; \
4126 volatile unsigned long _res; \
4127 _argvec[0] = (unsigned long)_orig.nraddr; \
4128 _argvec[1] = (unsigned long)arg1; \
4129 _argvec[2] = (unsigned long)arg2; \
4130 _argvec[3] = (unsigned long)arg3; \
4132 VALGRIND_CFI_PROLOGUE \
4133 "aghi 15,-160\n\t" \
4138 VALGRIND_CALL_NOREDIR_R1 \
4141 VALGRIND_CFI_EPILOGUE \
4142 : /*out*/ "=d" (_res) \
4143 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4144 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4146 lval = (__typeof__(lval)) _res; \
4149 #define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
4151 volatile OrigFn _orig = (orig); \
4152 volatile unsigned long _argvec[5]; \
4153 volatile unsigned long _res; \
4154 _argvec[0] = (unsigned long)_orig.nraddr; \
4155 _argvec[1] = (unsigned long)arg1; \
4156 _argvec[2] = (unsigned long)arg2; \
4157 _argvec[3] = (unsigned long)arg3; \
4158 _argvec[4] = (unsigned long)arg4; \
4160 VALGRIND_CFI_PROLOGUE \
4161 "aghi 15,-160\n\t" \
4167 VALGRIND_CALL_NOREDIR_R1 \
4170 VALGRIND_CFI_EPILOGUE \
4171 : /*out*/ "=d" (_res) \
4172 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4173 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4175 lval = (__typeof__(lval)) _res; \
4178 #define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
4180 volatile OrigFn _orig = (orig); \
4181 volatile unsigned long _argvec[6]; \
4182 volatile unsigned long _res; \
4183 _argvec[0] = (unsigned long)_orig.nraddr; \
4184 _argvec[1] = (unsigned long)arg1; \
4185 _argvec[2] = (unsigned long)arg2; \
4186 _argvec[3] = (unsigned long)arg3; \
4187 _argvec[4] = (unsigned long)arg4; \
4188 _argvec[5] = (unsigned long)arg5; \
4190 VALGRIND_CFI_PROLOGUE \
4191 "aghi 15,-160\n\t" \
4198 VALGRIND_CALL_NOREDIR_R1 \
4201 VALGRIND_CFI_EPILOGUE \
4202 : /*out*/ "=d" (_res) \
4203 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4204 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4206 lval = (__typeof__(lval)) _res; \
4209 #define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4212 volatile OrigFn _orig = (orig); \
4213 volatile unsigned long _argvec[7]; \
4214 volatile unsigned long _res; \
4215 _argvec[0] = (unsigned long)_orig.nraddr; \
4216 _argvec[1] = (unsigned long)arg1; \
4217 _argvec[2] = (unsigned long)arg2; \
4218 _argvec[3] = (unsigned long)arg3; \
4219 _argvec[4] = (unsigned long)arg4; \
4220 _argvec[5] = (unsigned long)arg5; \
4221 _argvec[6] = (unsigned long)arg6; \
4223 VALGRIND_CFI_PROLOGUE \
4224 "aghi 15,-168\n\t" \
4230 "mvc 160(8,15), 48(1)\n\t" \
4232 VALGRIND_CALL_NOREDIR_R1 \
4235 VALGRIND_CFI_EPILOGUE \
4236 : /*out*/ "=d" (_res) \
4237 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4238 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4240 lval = (__typeof__(lval)) _res; \
4243 #define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4246 volatile OrigFn _orig = (orig); \
4247 volatile unsigned long _argvec[8]; \
4248 volatile unsigned long _res; \
4249 _argvec[0] = (unsigned long)_orig.nraddr; \
4250 _argvec[1] = (unsigned long)arg1; \
4251 _argvec[2] = (unsigned long)arg2; \
4252 _argvec[3] = (unsigned long)arg3; \
4253 _argvec[4] = (unsigned long)arg4; \
4254 _argvec[5] = (unsigned long)arg5; \
4255 _argvec[6] = (unsigned long)arg6; \
4256 _argvec[7] = (unsigned long)arg7; \
4258 VALGRIND_CFI_PROLOGUE \
4259 "aghi 15,-176\n\t" \
4265 "mvc 160(8,15), 48(1)\n\t" \
4266 "mvc 168(8,15), 56(1)\n\t" \
4268 VALGRIND_CALL_NOREDIR_R1 \
4271 VALGRIND_CFI_EPILOGUE \
4272 : /*out*/ "=d" (_res) \
4273 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4274 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4276 lval = (__typeof__(lval)) _res; \
4279 #define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4282 volatile OrigFn _orig = (orig); \
4283 volatile unsigned long _argvec[9]; \
4284 volatile unsigned long _res; \
4285 _argvec[0] = (unsigned long)_orig.nraddr; \
4286 _argvec[1] = (unsigned long)arg1; \
4287 _argvec[2] = (unsigned long)arg2; \
4288 _argvec[3] = (unsigned long)arg3; \
4289 _argvec[4] = (unsigned long)arg4; \
4290 _argvec[5] = (unsigned long)arg5; \
4291 _argvec[6] = (unsigned long)arg6; \
4292 _argvec[7] = (unsigned long)arg7; \
4293 _argvec[8] = (unsigned long)arg8; \
4295 VALGRIND_CFI_PROLOGUE \
4296 "aghi 15,-184\n\t" \
4302 "mvc 160(8,15), 48(1)\n\t" \
4303 "mvc 168(8,15), 56(1)\n\t" \
4304 "mvc 176(8,15), 64(1)\n\t" \
4306 VALGRIND_CALL_NOREDIR_R1 \
4309 VALGRIND_CFI_EPILOGUE \
4310 : /*out*/ "=d" (_res) \
4311 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4312 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4314 lval = (__typeof__(lval)) _res; \
4317 #define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4318 arg6, arg7 ,arg8, arg9) \
4320 volatile OrigFn _orig = (orig); \
4321 volatile unsigned long _argvec[10]; \
4322 volatile unsigned long _res; \
4323 _argvec[0] = (unsigned long)_orig.nraddr; \
4324 _argvec[1] = (unsigned long)arg1; \
4325 _argvec[2] = (unsigned long)arg2; \
4326 _argvec[3] = (unsigned long)arg3; \
4327 _argvec[4] = (unsigned long)arg4; \
4328 _argvec[5] = (unsigned long)arg5; \
4329 _argvec[6] = (unsigned long)arg6; \
4330 _argvec[7] = (unsigned long)arg7; \
4331 _argvec[8] = (unsigned long)arg8; \
4332 _argvec[9] = (unsigned long)arg9; \
4334 VALGRIND_CFI_PROLOGUE \
4335 "aghi 15,-192\n\t" \
4341 "mvc 160(8,15), 48(1)\n\t" \
4342 "mvc 168(8,15), 56(1)\n\t" \
4343 "mvc 176(8,15), 64(1)\n\t" \
4344 "mvc 184(8,15), 72(1)\n\t" \
4346 VALGRIND_CALL_NOREDIR_R1 \
4349 VALGRIND_CFI_EPILOGUE \
4350 : /*out*/ "=d" (_res) \
4351 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4352 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4354 lval = (__typeof__(lval)) _res; \
4357 #define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4358 arg6, arg7 ,arg8, arg9, arg10) \
4360 volatile OrigFn _orig = (orig); \
4361 volatile unsigned long _argvec[11]; \
4362 volatile unsigned long _res; \
4363 _argvec[0] = (unsigned long)_orig.nraddr; \
4364 _argvec[1] = (unsigned long)arg1; \
4365 _argvec[2] = (unsigned long)arg2; \
4366 _argvec[3] = (unsigned long)arg3; \
4367 _argvec[4] = (unsigned long)arg4; \
4368 _argvec[5] = (unsigned long)arg5; \
4369 _argvec[6] = (unsigned long)arg6; \
4370 _argvec[7] = (unsigned long)arg7; \
4371 _argvec[8] = (unsigned long)arg8; \
4372 _argvec[9] = (unsigned long)arg9; \
4373 _argvec[10] = (unsigned long)arg10; \
4375 VALGRIND_CFI_PROLOGUE \
4376 "aghi 15,-200\n\t" \
4382 "mvc 160(8,15), 48(1)\n\t" \
4383 "mvc 168(8,15), 56(1)\n\t" \
4384 "mvc 176(8,15), 64(1)\n\t" \
4385 "mvc 184(8,15), 72(1)\n\t" \
4386 "mvc 192(8,15), 80(1)\n\t" \
4388 VALGRIND_CALL_NOREDIR_R1 \
4391 VALGRIND_CFI_EPILOGUE \
4392 : /*out*/ "=d" (_res) \
4393 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4394 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4396 lval = (__typeof__(lval)) _res; \
4399 #define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4400 arg6, arg7 ,arg8, arg9, arg10, arg11) \
4402 volatile OrigFn _orig = (orig); \
4403 volatile unsigned long _argvec[12]; \
4404 volatile unsigned long _res; \
4405 _argvec[0] = (unsigned long)_orig.nraddr; \
4406 _argvec[1] = (unsigned long)arg1; \
4407 _argvec[2] = (unsigned long)arg2; \
4408 _argvec[3] = (unsigned long)arg3; \
4409 _argvec[4] = (unsigned long)arg4; \
4410 _argvec[5] = (unsigned long)arg5; \
4411 _argvec[6] = (unsigned long)arg6; \
4412 _argvec[7] = (unsigned long)arg7; \
4413 _argvec[8] = (unsigned long)arg8; \
4414 _argvec[9] = (unsigned long)arg9; \
4415 _argvec[10] = (unsigned long)arg10; \
4416 _argvec[11] = (unsigned long)arg11; \
4418 VALGRIND_CFI_PROLOGUE \
4419 "aghi 15,-208\n\t" \
4425 "mvc 160(8,15), 48(1)\n\t" \
4426 "mvc 168(8,15), 56(1)\n\t" \
4427 "mvc 176(8,15), 64(1)\n\t" \
4428 "mvc 184(8,15), 72(1)\n\t" \
4429 "mvc 192(8,15), 80(1)\n\t" \
4430 "mvc 200(8,15), 88(1)\n\t" \
4432 VALGRIND_CALL_NOREDIR_R1 \
4435 VALGRIND_CFI_EPILOGUE \
4436 : /*out*/ "=d" (_res) \
4437 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4438 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4440 lval = (__typeof__(lval)) _res; \
4443 #define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4444 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
4446 volatile OrigFn _orig = (orig); \
4447 volatile unsigned long _argvec[13]; \
4448 volatile unsigned long _res; \
4449 _argvec[0] = (unsigned long)_orig.nraddr; \
4450 _argvec[1] = (unsigned long)arg1; \
4451 _argvec[2] = (unsigned long)arg2; \
4452 _argvec[3] = (unsigned long)arg3; \
4453 _argvec[4] = (unsigned long)arg4; \
4454 _argvec[5] = (unsigned long)arg5; \
4455 _argvec[6] = (unsigned long)arg6; \
4456 _argvec[7] = (unsigned long)arg7; \
4457 _argvec[8] = (unsigned long)arg8; \
4458 _argvec[9] = (unsigned long)arg9; \
4459 _argvec[10] = (unsigned long)arg10; \
4460 _argvec[11] = (unsigned long)arg11; \
4461 _argvec[12] = (unsigned long)arg12; \
4463 VALGRIND_CFI_PROLOGUE \
4464 "aghi 15,-216\n\t" \
4470 "mvc 160(8,15), 48(1)\n\t" \
4471 "mvc 168(8,15), 56(1)\n\t" \
4472 "mvc 176(8,15), 64(1)\n\t" \
4473 "mvc 184(8,15), 72(1)\n\t" \
4474 "mvc 192(8,15), 80(1)\n\t" \
4475 "mvc 200(8,15), 88(1)\n\t" \
4476 "mvc 208(8,15), 96(1)\n\t" \
4478 VALGRIND_CALL_NOREDIR_R1 \
4481 VALGRIND_CFI_EPILOGUE \
4482 : /*out*/ "=d" (_res) \
4483 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4484 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4486 lval = (__typeof__(lval)) _res; \
4490 #endif /* PLAT_s390x_linux */
4492 /* ------------------------- mips32-linux ----------------------- */
4494 #if defined(PLAT_mips32_linux)
4496 /* These regs are trashed by the hidden call. */
4497 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
4498 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
4501 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
4504 #define CALL_FN_W_v(lval, orig) \
4506 volatile OrigFn _orig = (orig); \
4507 volatile unsigned long _argvec[1]; \
4508 volatile unsigned long _res; \
4509 _argvec[0] = (unsigned long)_orig.nraddr; \
4511 "subu $29, $29, 8 \n\t" \
4512 "sw $28, 0($29) \n\t" \
4513 "sw $31, 4($29) \n\t" \
4514 "subu $29, $29, 16 \n\t" \
4515 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4516 VALGRIND_CALL_NOREDIR_T9 \
4517 "addu $29, $29, 16\n\t" \
4518 "lw $28, 0($29) \n\t" \
4519 "lw $31, 4($29) \n\t" \
4520 "addu $29, $29, 8 \n\t" \
4522 : /*out*/ "=r" (_res) \
4523 : /*in*/ "0" (&_argvec[0]) \
4524 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4526 lval = (__typeof__(lval)) _res; \
4529 #define CALL_FN_W_W(lval, orig, arg1) \
4531 volatile OrigFn _orig = (orig); \
4532 volatile unsigned long _argvec[2]; \
4533 volatile unsigned long _res; \
4534 _argvec[0] = (unsigned long)_orig.nraddr; \
4535 _argvec[1] = (unsigned long)(arg1); \
4537 "subu $29, $29, 8 \n\t" \
4538 "sw $28, 0($29) \n\t" \
4539 "sw $31, 4($29) \n\t" \
4540 "subu $29, $29, 16 \n\t" \
4541 "lw $4, 4(%1) \n\t" /* arg1*/ \
4542 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4543 VALGRIND_CALL_NOREDIR_T9 \
4544 "addu $29, $29, 16 \n\t" \
4545 "lw $28, 0($29) \n\t" \
4546 "lw $31, 4($29) \n\t" \
4547 "addu $29, $29, 8 \n\t" \
4549 : /*out*/ "=r" (_res) \
4550 : /*in*/ "0" (&_argvec[0]) \
4551 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4553 lval = (__typeof__(lval)) _res; \
4556 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4558 volatile OrigFn _orig = (orig); \
4559 volatile unsigned long _argvec[3]; \
4560 volatile unsigned long _res; \
4561 _argvec[0] = (unsigned long)_orig.nraddr; \
4562 _argvec[1] = (unsigned long)(arg1); \
4563 _argvec[2] = (unsigned long)(arg2); \
4565 "subu $29, $29, 8 \n\t" \
4566 "sw $28, 0($29) \n\t" \
4567 "sw $31, 4($29) \n\t" \
4568 "subu $29, $29, 16 \n\t" \
4569 "lw $4, 4(%1) \n\t" \
4570 "lw $5, 8(%1) \n\t" \
4571 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4572 VALGRIND_CALL_NOREDIR_T9 \
4573 "addu $29, $29, 16 \n\t" \
4574 "lw $28, 0($29) \n\t" \
4575 "lw $31, 4($29) \n\t" \
4576 "addu $29, $29, 8 \n\t" \
4578 : /*out*/ "=r" (_res) \
4579 : /*in*/ "0" (&_argvec[0]) \
4580 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4582 lval = (__typeof__(lval)) _res; \
4585 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4587 volatile OrigFn _orig = (orig); \
4588 volatile unsigned long _argvec[4]; \
4589 volatile unsigned long _res; \
4590 _argvec[0] = (unsigned long)_orig.nraddr; \
4591 _argvec[1] = (unsigned long)(arg1); \
4592 _argvec[2] = (unsigned long)(arg2); \
4593 _argvec[3] = (unsigned long)(arg3); \
4595 "subu $29, $29, 8 \n\t" \
4596 "sw $28, 0($29) \n\t" \
4597 "sw $31, 4($29) \n\t" \
4598 "subu $29, $29, 16 \n\t" \
4599 "lw $4, 4(%1) \n\t" \
4600 "lw $5, 8(%1) \n\t" \
4601 "lw $6, 12(%1) \n\t" \
4602 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4603 VALGRIND_CALL_NOREDIR_T9 \
4604 "addu $29, $29, 16 \n\t" \
4605 "lw $28, 0($29) \n\t" \
4606 "lw $31, 4($29) \n\t" \
4607 "addu $29, $29, 8 \n\t" \
4609 : /*out*/ "=r" (_res) \
4610 : /*in*/ "0" (&_argvec[0]) \
4611 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4613 lval = (__typeof__(lval)) _res; \
4616 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4618 volatile OrigFn _orig = (orig); \
4619 volatile unsigned long _argvec[5]; \
4620 volatile unsigned long _res; \
4621 _argvec[0] = (unsigned long)_orig.nraddr; \
4622 _argvec[1] = (unsigned long)(arg1); \
4623 _argvec[2] = (unsigned long)(arg2); \
4624 _argvec[3] = (unsigned long)(arg3); \
4625 _argvec[4] = (unsigned long)(arg4); \
4627 "subu $29, $29, 8 \n\t" \
4628 "sw $28, 0($29) \n\t" \
4629 "sw $31, 4($29) \n\t" \
4630 "subu $29, $29, 16 \n\t" \
4631 "lw $4, 4(%1) \n\t" \
4632 "lw $5, 8(%1) \n\t" \
4633 "lw $6, 12(%1) \n\t" \
4634 "lw $7, 16(%1) \n\t" \
4635 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4636 VALGRIND_CALL_NOREDIR_T9 \
4637 "addu $29, $29, 16 \n\t" \
4638 "lw $28, 0($29) \n\t" \
4639 "lw $31, 4($29) \n\t" \
4640 "addu $29, $29, 8 \n\t" \
4642 : /*out*/ "=r" (_res) \
4643 : /*in*/ "0" (&_argvec[0]) \
4644 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4646 lval = (__typeof__(lval)) _res; \
4649 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4651 volatile OrigFn _orig = (orig); \
4652 volatile unsigned long _argvec[6]; \
4653 volatile unsigned long _res; \
4654 _argvec[0] = (unsigned long)_orig.nraddr; \
4655 _argvec[1] = (unsigned long)(arg1); \
4656 _argvec[2] = (unsigned long)(arg2); \
4657 _argvec[3] = (unsigned long)(arg3); \
4658 _argvec[4] = (unsigned long)(arg4); \
4659 _argvec[5] = (unsigned long)(arg5); \
4661 "subu $29, $29, 8 \n\t" \
4662 "sw $28, 0($29) \n\t" \
4663 "sw $31, 4($29) \n\t" \
4664 "lw $4, 20(%1) \n\t" \
4665 "subu $29, $29, 24\n\t" \
4666 "sw $4, 16($29) \n\t" \
4667 "lw $4, 4(%1) \n\t" \
4668 "lw $5, 8(%1) \n\t" \
4669 "lw $6, 12(%1) \n\t" \
4670 "lw $7, 16(%1) \n\t" \
4671 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4672 VALGRIND_CALL_NOREDIR_T9 \
4673 "addu $29, $29, 24 \n\t" \
4674 "lw $28, 0($29) \n\t" \
4675 "lw $31, 4($29) \n\t" \
4676 "addu $29, $29, 8 \n\t" \
4678 : /*out*/ "=r" (_res) \
4679 : /*in*/ "0" (&_argvec[0]) \
4680 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4682 lval = (__typeof__(lval)) _res; \
4684 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4686 volatile OrigFn _orig = (orig); \
4687 volatile unsigned long _argvec[7]; \
4688 volatile unsigned long _res; \
4689 _argvec[0] = (unsigned long)_orig.nraddr; \
4690 _argvec[1] = (unsigned long)(arg1); \
4691 _argvec[2] = (unsigned long)(arg2); \
4692 _argvec[3] = (unsigned long)(arg3); \
4693 _argvec[4] = (unsigned long)(arg4); \
4694 _argvec[5] = (unsigned long)(arg5); \
4695 _argvec[6] = (unsigned long)(arg6); \
4697 "subu $29, $29, 8 \n\t" \
4698 "sw $28, 0($29) \n\t" \
4699 "sw $31, 4($29) \n\t" \
4700 "lw $4, 20(%1) \n\t" \
4701 "subu $29, $29, 32\n\t" \
4702 "sw $4, 16($29) \n\t" \
4703 "lw $4, 24(%1) \n\t" \
4705 "sw $4, 20($29) \n\t" \
4706 "lw $4, 4(%1) \n\t" \
4707 "lw $5, 8(%1) \n\t" \
4708 "lw $6, 12(%1) \n\t" \
4709 "lw $7, 16(%1) \n\t" \
4710 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4711 VALGRIND_CALL_NOREDIR_T9 \
4712 "addu $29, $29, 32 \n\t" \
4713 "lw $28, 0($29) \n\t" \
4714 "lw $31, 4($29) \n\t" \
4715 "addu $29, $29, 8 \n\t" \
4717 : /*out*/ "=r" (_res) \
4718 : /*in*/ "0" (&_argvec[0]) \
4719 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4721 lval = (__typeof__(lval)) _res; \
4724 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4727 volatile OrigFn _orig = (orig); \
4728 volatile unsigned long _argvec[8]; \
4729 volatile unsigned long _res; \
4730 _argvec[0] = (unsigned long)_orig.nraddr; \
4731 _argvec[1] = (unsigned long)(arg1); \
4732 _argvec[2] = (unsigned long)(arg2); \
4733 _argvec[3] = (unsigned long)(arg3); \
4734 _argvec[4] = (unsigned long)(arg4); \
4735 _argvec[5] = (unsigned long)(arg5); \
4736 _argvec[6] = (unsigned long)(arg6); \
4737 _argvec[7] = (unsigned long)(arg7); \
4739 "subu $29, $29, 8 \n\t" \
4740 "sw $28, 0($29) \n\t" \
4741 "sw $31, 4($29) \n\t" \
4742 "lw $4, 20(%1) \n\t" \
4743 "subu $29, $29, 32\n\t" \
4744 "sw $4, 16($29) \n\t" \
4745 "lw $4, 24(%1) \n\t" \
4746 "sw $4, 20($29) \n\t" \
4747 "lw $4, 28(%1) \n\t" \
4748 "sw $4, 24($29) \n\t" \
4749 "lw $4, 4(%1) \n\t" \
4750 "lw $5, 8(%1) \n\t" \
4751 "lw $6, 12(%1) \n\t" \
4752 "lw $7, 16(%1) \n\t" \
4753 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4754 VALGRIND_CALL_NOREDIR_T9 \
4755 "addu $29, $29, 32 \n\t" \
4756 "lw $28, 0($29) \n\t" \
4757 "lw $31, 4($29) \n\t" \
4758 "addu $29, $29, 8 \n\t" \
4760 : /*out*/ "=r" (_res) \
4761 : /*in*/ "0" (&_argvec[0]) \
4762 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4764 lval = (__typeof__(lval)) _res; \
4767 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4770 volatile OrigFn _orig = (orig); \
4771 volatile unsigned long _argvec[9]; \
4772 volatile unsigned long _res; \
4773 _argvec[0] = (unsigned long)_orig.nraddr; \
4774 _argvec[1] = (unsigned long)(arg1); \
4775 _argvec[2] = (unsigned long)(arg2); \
4776 _argvec[3] = (unsigned long)(arg3); \
4777 _argvec[4] = (unsigned long)(arg4); \
4778 _argvec[5] = (unsigned long)(arg5); \
4779 _argvec[6] = (unsigned long)(arg6); \
4780 _argvec[7] = (unsigned long)(arg7); \
4781 _argvec[8] = (unsigned long)(arg8); \
4783 "subu $29, $29, 8 \n\t" \
4784 "sw $28, 0($29) \n\t" \
4785 "sw $31, 4($29) \n\t" \
4786 "lw $4, 20(%1) \n\t" \
4787 "subu $29, $29, 40\n\t" \
4788 "sw $4, 16($29) \n\t" \
4789 "lw $4, 24(%1) \n\t" \
4790 "sw $4, 20($29) \n\t" \
4791 "lw $4, 28(%1) \n\t" \
4792 "sw $4, 24($29) \n\t" \
4793 "lw $4, 32(%1) \n\t" \
4794 "sw $4, 28($29) \n\t" \
4795 "lw $4, 4(%1) \n\t" \
4796 "lw $5, 8(%1) \n\t" \
4797 "lw $6, 12(%1) \n\t" \
4798 "lw $7, 16(%1) \n\t" \
4799 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4800 VALGRIND_CALL_NOREDIR_T9 \
4801 "addu $29, $29, 40 \n\t" \
4802 "lw $28, 0($29) \n\t" \
4803 "lw $31, 4($29) \n\t" \
4804 "addu $29, $29, 8 \n\t" \
4806 : /*out*/ "=r" (_res) \
4807 : /*in*/ "0" (&_argvec[0]) \
4808 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4810 lval = (__typeof__(lval)) _res; \
4813 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4816 volatile OrigFn _orig = (orig); \
4817 volatile unsigned long _argvec[10]; \
4818 volatile unsigned long _res; \
4819 _argvec[0] = (unsigned long)_orig.nraddr; \
4820 _argvec[1] = (unsigned long)(arg1); \
4821 _argvec[2] = (unsigned long)(arg2); \
4822 _argvec[3] = (unsigned long)(arg3); \
4823 _argvec[4] = (unsigned long)(arg4); \
4824 _argvec[5] = (unsigned long)(arg5); \
4825 _argvec[6] = (unsigned long)(arg6); \
4826 _argvec[7] = (unsigned long)(arg7); \
4827 _argvec[8] = (unsigned long)(arg8); \
4828 _argvec[9] = (unsigned long)(arg9); \
4830 "subu $29, $29, 8 \n\t" \
4831 "sw $28, 0($29) \n\t" \
4832 "sw $31, 4($29) \n\t" \
4833 "lw $4, 20(%1) \n\t" \
4834 "subu $29, $29, 40\n\t" \
4835 "sw $4, 16($29) \n\t" \
4836 "lw $4, 24(%1) \n\t" \
4837 "sw $4, 20($29) \n\t" \
4838 "lw $4, 28(%1) \n\t" \
4839 "sw $4, 24($29) \n\t" \
4840 "lw $4, 32(%1) \n\t" \
4841 "sw $4, 28($29) \n\t" \
4842 "lw $4, 36(%1) \n\t" \
4843 "sw $4, 32($29) \n\t" \
4844 "lw $4, 4(%1) \n\t" \
4845 "lw $5, 8(%1) \n\t" \
4846 "lw $6, 12(%1) \n\t" \
4847 "lw $7, 16(%1) \n\t" \
4848 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4849 VALGRIND_CALL_NOREDIR_T9 \
4850 "addu $29, $29, 40 \n\t" \
4851 "lw $28, 0($29) \n\t" \
4852 "lw $31, 4($29) \n\t" \
4853 "addu $29, $29, 8 \n\t" \
4855 : /*out*/ "=r" (_res) \
4856 : /*in*/ "0" (&_argvec[0]) \
4857 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4859 lval = (__typeof__(lval)) _res; \
4862 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4863 arg7,arg8,arg9,arg10) \
4865 volatile OrigFn _orig = (orig); \
4866 volatile unsigned long _argvec[11]; \
4867 volatile unsigned long _res; \
4868 _argvec[0] = (unsigned long)_orig.nraddr; \
4869 _argvec[1] = (unsigned long)(arg1); \
4870 _argvec[2] = (unsigned long)(arg2); \
4871 _argvec[3] = (unsigned long)(arg3); \
4872 _argvec[4] = (unsigned long)(arg4); \
4873 _argvec[5] = (unsigned long)(arg5); \
4874 _argvec[6] = (unsigned long)(arg6); \
4875 _argvec[7] = (unsigned long)(arg7); \
4876 _argvec[8] = (unsigned long)(arg8); \
4877 _argvec[9] = (unsigned long)(arg9); \
4878 _argvec[10] = (unsigned long)(arg10); \
4880 "subu $29, $29, 8 \n\t" \
4881 "sw $28, 0($29) \n\t" \
4882 "sw $31, 4($29) \n\t" \
4883 "lw $4, 20(%1) \n\t" \
4884 "subu $29, $29, 48\n\t" \
4885 "sw $4, 16($29) \n\t" \
4886 "lw $4, 24(%1) \n\t" \
4887 "sw $4, 20($29) \n\t" \
4888 "lw $4, 28(%1) \n\t" \
4889 "sw $4, 24($29) \n\t" \
4890 "lw $4, 32(%1) \n\t" \
4891 "sw $4, 28($29) \n\t" \
4892 "lw $4, 36(%1) \n\t" \
4893 "sw $4, 32($29) \n\t" \
4894 "lw $4, 40(%1) \n\t" \
4895 "sw $4, 36($29) \n\t" \
4896 "lw $4, 4(%1) \n\t" \
4897 "lw $5, 8(%1) \n\t" \
4898 "lw $6, 12(%1) \n\t" \
4899 "lw $7, 16(%1) \n\t" \
4900 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4901 VALGRIND_CALL_NOREDIR_T9 \
4902 "addu $29, $29, 48 \n\t" \
4903 "lw $28, 0($29) \n\t" \
4904 "lw $31, 4($29) \n\t" \
4905 "addu $29, $29, 8 \n\t" \
4907 : /*out*/ "=r" (_res) \
4908 : /*in*/ "0" (&_argvec[0]) \
4909 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4911 lval = (__typeof__(lval)) _res; \
4914 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4915 arg6,arg7,arg8,arg9,arg10, \
4918 volatile OrigFn _orig = (orig); \
4919 volatile unsigned long _argvec[12]; \
4920 volatile unsigned long _res; \
4921 _argvec[0] = (unsigned long)_orig.nraddr; \
4922 _argvec[1] = (unsigned long)(arg1); \
4923 _argvec[2] = (unsigned long)(arg2); \
4924 _argvec[3] = (unsigned long)(arg3); \
4925 _argvec[4] = (unsigned long)(arg4); \
4926 _argvec[5] = (unsigned long)(arg5); \
4927 _argvec[6] = (unsigned long)(arg6); \
4928 _argvec[7] = (unsigned long)(arg7); \
4929 _argvec[8] = (unsigned long)(arg8); \
4930 _argvec[9] = (unsigned long)(arg9); \
4931 _argvec[10] = (unsigned long)(arg10); \
4932 _argvec[11] = (unsigned long)(arg11); \
4934 "subu $29, $29, 8 \n\t" \
4935 "sw $28, 0($29) \n\t" \
4936 "sw $31, 4($29) \n\t" \
4937 "lw $4, 20(%1) \n\t" \
4938 "subu $29, $29, 48\n\t" \
4939 "sw $4, 16($29) \n\t" \
4940 "lw $4, 24(%1) \n\t" \
4941 "sw $4, 20($29) \n\t" \
4942 "lw $4, 28(%1) \n\t" \
4943 "sw $4, 24($29) \n\t" \
4944 "lw $4, 32(%1) \n\t" \
4945 "sw $4, 28($29) \n\t" \
4946 "lw $4, 36(%1) \n\t" \
4947 "sw $4, 32($29) \n\t" \
4948 "lw $4, 40(%1) \n\t" \
4949 "sw $4, 36($29) \n\t" \
4950 "lw $4, 44(%1) \n\t" \
4951 "sw $4, 40($29) \n\t" \
4952 "lw $4, 4(%1) \n\t" \
4953 "lw $5, 8(%1) \n\t" \
4954 "lw $6, 12(%1) \n\t" \
4955 "lw $7, 16(%1) \n\t" \
4956 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4957 VALGRIND_CALL_NOREDIR_T9 \
4958 "addu $29, $29, 48 \n\t" \
4959 "lw $28, 0($29) \n\t" \
4960 "lw $31, 4($29) \n\t" \
4961 "addu $29, $29, 8 \n\t" \
4963 : /*out*/ "=r" (_res) \
4964 : /*in*/ "0" (&_argvec[0]) \
4965 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4967 lval = (__typeof__(lval)) _res; \
4970 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4971 arg6,arg7,arg8,arg9,arg10, \
4974 volatile OrigFn _orig = (orig); \
4975 volatile unsigned long _argvec[13]; \
4976 volatile unsigned long _res; \
4977 _argvec[0] = (unsigned long)_orig.nraddr; \
4978 _argvec[1] = (unsigned long)(arg1); \
4979 _argvec[2] = (unsigned long)(arg2); \
4980 _argvec[3] = (unsigned long)(arg3); \
4981 _argvec[4] = (unsigned long)(arg4); \
4982 _argvec[5] = (unsigned long)(arg5); \
4983 _argvec[6] = (unsigned long)(arg6); \
4984 _argvec[7] = (unsigned long)(arg7); \
4985 _argvec[8] = (unsigned long)(arg8); \
4986 _argvec[9] = (unsigned long)(arg9); \
4987 _argvec[10] = (unsigned long)(arg10); \
4988 _argvec[11] = (unsigned long)(arg11); \
4989 _argvec[12] = (unsigned long)(arg12); \
4991 "subu $29, $29, 8 \n\t" \
4992 "sw $28, 0($29) \n\t" \
4993 "sw $31, 4($29) \n\t" \
4994 "lw $4, 20(%1) \n\t" \
4995 "subu $29, $29, 56\n\t" \
4996 "sw $4, 16($29) \n\t" \
4997 "lw $4, 24(%1) \n\t" \
4998 "sw $4, 20($29) \n\t" \
4999 "lw $4, 28(%1) \n\t" \
5000 "sw $4, 24($29) \n\t" \
5001 "lw $4, 32(%1) \n\t" \
5002 "sw $4, 28($29) \n\t" \
5003 "lw $4, 36(%1) \n\t" \
5004 "sw $4, 32($29) \n\t" \
5005 "lw $4, 40(%1) \n\t" \
5006 "sw $4, 36($29) \n\t" \
5007 "lw $4, 44(%1) \n\t" \
5008 "sw $4, 40($29) \n\t" \
5009 "lw $4, 48(%1) \n\t" \
5010 "sw $4, 44($29) \n\t" \
5011 "lw $4, 4(%1) \n\t" \
5012 "lw $5, 8(%1) \n\t" \
5013 "lw $6, 12(%1) \n\t" \
5014 "lw $7, 16(%1) \n\t" \
5015 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5016 VALGRIND_CALL_NOREDIR_T9 \
5017 "addu $29, $29, 56 \n\t" \
5018 "lw $28, 0($29) \n\t" \
5019 "lw $31, 4($29) \n\t" \
5020 "addu $29, $29, 8 \n\t" \
5022 : /*out*/ "=r" (_res) \
5023 : /*in*/ "r" (&_argvec[0]) \
5024 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5026 lval = (__typeof__(lval)) _res; \
5029 #endif /* PLAT_mips32_linux */
5031 /* ------------------------- mips64-linux ------------------------- */
5033 #if defined(PLAT_mips64_linux)
5035 /* These regs are trashed by the hidden call. */
5036 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5037 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5040 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5043 #define CALL_FN_W_v(lval, orig) \
5045 volatile OrigFn _orig = (orig); \
5046 volatile unsigned long _argvec[1]; \
5047 volatile unsigned long _res; \
5048 _argvec[0] = (unsigned long)_orig.nraddr; \
5050 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5051 VALGRIND_CALL_NOREDIR_T9 \
5053 : /*out*/ "=r" (_res) \
5054 : /*in*/ "0" (&_argvec[0]) \
5055 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5057 lval = (__typeof__(lval)) _res; \
5060 #define CALL_FN_W_W(lval, orig, arg1) \
5062 volatile OrigFn _orig = (orig); \
5063 volatile unsigned long _argvec[2]; \
5064 volatile unsigned long _res; \
5065 _argvec[0] = (unsigned long)_orig.nraddr; \
5066 _argvec[1] = (unsigned long)(arg1); \
5068 "ld $4, 8(%1)\n\t" /* arg1*/ \
5069 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5070 VALGRIND_CALL_NOREDIR_T9 \
5072 : /*out*/ "=r" (_res) \
5073 : /*in*/ "r" (&_argvec[0]) \
5074 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5076 lval = (__typeof__(lval)) _res; \
5079 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5081 volatile OrigFn _orig = (orig); \
5082 volatile unsigned long _argvec[3]; \
5083 volatile unsigned long _res; \
5084 _argvec[0] = (unsigned long)_orig.nraddr; \
5085 _argvec[1] = (unsigned long)(arg1); \
5086 _argvec[2] = (unsigned long)(arg2); \
5088 "ld $4, 8(%1)\n\t" \
5089 "ld $5, 16(%1)\n\t" \
5090 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5091 VALGRIND_CALL_NOREDIR_T9 \
5093 : /*out*/ "=r" (_res) \
5094 : /*in*/ "r" (&_argvec[0]) \
5095 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5097 lval = (__typeof__(lval)) _res; \
5100 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5102 volatile OrigFn _orig = (orig); \
5103 volatile unsigned long _argvec[4]; \
5104 volatile unsigned long _res; \
5105 _argvec[0] = (unsigned long)_orig.nraddr; \
5106 _argvec[1] = (unsigned long)(arg1); \
5107 _argvec[2] = (unsigned long)(arg2); \
5108 _argvec[3] = (unsigned long)(arg3); \
5110 "ld $4, 8(%1)\n\t" \
5111 "ld $5, 16(%1)\n\t" \
5112 "ld $6, 24(%1)\n\t" \
5113 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5114 VALGRIND_CALL_NOREDIR_T9 \
5116 : /*out*/ "=r" (_res) \
5117 : /*in*/ "r" (&_argvec[0]) \
5118 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5120 lval = (__typeof__(lval)) _res; \
5123 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5125 volatile OrigFn _orig = (orig); \
5126 volatile unsigned long _argvec[5]; \
5127 volatile unsigned long _res; \
5128 _argvec[0] = (unsigned long)_orig.nraddr; \
5129 _argvec[1] = (unsigned long)(arg1); \
5130 _argvec[2] = (unsigned long)(arg2); \
5131 _argvec[3] = (unsigned long)(arg3); \
5132 _argvec[4] = (unsigned long)(arg4); \
5134 "ld $4, 8(%1)\n\t" \
5135 "ld $5, 16(%1)\n\t" \
5136 "ld $6, 24(%1)\n\t" \
5137 "ld $7, 32(%1)\n\t" \
5138 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5139 VALGRIND_CALL_NOREDIR_T9 \
5141 : /*out*/ "=r" (_res) \
5142 : /*in*/ "r" (&_argvec[0]) \
5143 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5145 lval = (__typeof__(lval)) _res; \
5148 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5150 volatile OrigFn _orig = (orig); \
5151 volatile unsigned long _argvec[6]; \
5152 volatile unsigned long _res; \
5153 _argvec[0] = (unsigned long)_orig.nraddr; \
5154 _argvec[1] = (unsigned long)(arg1); \
5155 _argvec[2] = (unsigned long)(arg2); \
5156 _argvec[3] = (unsigned long)(arg3); \
5157 _argvec[4] = (unsigned long)(arg4); \
5158 _argvec[5] = (unsigned long)(arg5); \
5160 "ld $4, 8(%1)\n\t" \
5161 "ld $5, 16(%1)\n\t" \
5162 "ld $6, 24(%1)\n\t" \
5163 "ld $7, 32(%1)\n\t" \
5164 "ld $8, 40(%1)\n\t" \
5165 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5166 VALGRIND_CALL_NOREDIR_T9 \
5168 : /*out*/ "=r" (_res) \
5169 : /*in*/ "r" (&_argvec[0]) \
5170 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5172 lval = (__typeof__(lval)) _res; \
5175 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5177 volatile OrigFn _orig = (orig); \
5178 volatile unsigned long _argvec[7]; \
5179 volatile unsigned long _res; \
5180 _argvec[0] = (unsigned long)_orig.nraddr; \
5181 _argvec[1] = (unsigned long)(arg1); \
5182 _argvec[2] = (unsigned long)(arg2); \
5183 _argvec[3] = (unsigned long)(arg3); \
5184 _argvec[4] = (unsigned long)(arg4); \
5185 _argvec[5] = (unsigned long)(arg5); \
5186 _argvec[6] = (unsigned long)(arg6); \
5188 "ld $4, 8(%1)\n\t" \
5189 "ld $5, 16(%1)\n\t" \
5190 "ld $6, 24(%1)\n\t" \
5191 "ld $7, 32(%1)\n\t" \
5192 "ld $8, 40(%1)\n\t" \
5193 "ld $9, 48(%1)\n\t" \
5194 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5195 VALGRIND_CALL_NOREDIR_T9 \
5197 : /*out*/ "=r" (_res) \
5198 : /*in*/ "r" (&_argvec[0]) \
5199 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5201 lval = (__typeof__(lval)) _res; \
5204 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5207 volatile OrigFn _orig = (orig); \
5208 volatile unsigned long _argvec[8]; \
5209 volatile unsigned long _res; \
5210 _argvec[0] = (unsigned long)_orig.nraddr; \
5211 _argvec[1] = (unsigned long)(arg1); \
5212 _argvec[2] = (unsigned long)(arg2); \
5213 _argvec[3] = (unsigned long)(arg3); \
5214 _argvec[4] = (unsigned long)(arg4); \
5215 _argvec[5] = (unsigned long)(arg5); \
5216 _argvec[6] = (unsigned long)(arg6); \
5217 _argvec[7] = (unsigned long)(arg7); \
5219 "ld $4, 8(%1)\n\t" \
5220 "ld $5, 16(%1)\n\t" \
5221 "ld $6, 24(%1)\n\t" \
5222 "ld $7, 32(%1)\n\t" \
5223 "ld $8, 40(%1)\n\t" \
5224 "ld $9, 48(%1)\n\t" \
5225 "ld $10, 56(%1)\n\t" \
5226 "ld $25, 0(%1) \n\t" /* target->t9 */ \
5227 VALGRIND_CALL_NOREDIR_T9 \
5229 : /*out*/ "=r" (_res) \
5230 : /*in*/ "r" (&_argvec[0]) \
5231 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5233 lval = (__typeof__(lval)) _res; \
5236 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5239 volatile OrigFn _orig = (orig); \
5240 volatile unsigned long _argvec[9]; \
5241 volatile unsigned long _res; \
5242 _argvec[0] = (unsigned long)_orig.nraddr; \
5243 _argvec[1] = (unsigned long)(arg1); \
5244 _argvec[2] = (unsigned long)(arg2); \
5245 _argvec[3] = (unsigned long)(arg3); \
5246 _argvec[4] = (unsigned long)(arg4); \
5247 _argvec[5] = (unsigned long)(arg5); \
5248 _argvec[6] = (unsigned long)(arg6); \
5249 _argvec[7] = (unsigned long)(arg7); \
5250 _argvec[8] = (unsigned long)(arg8); \
5252 "ld $4, 8(%1)\n\t" \
5253 "ld $5, 16(%1)\n\t" \
5254 "ld $6, 24(%1)\n\t" \
5255 "ld $7, 32(%1)\n\t" \
5256 "ld $8, 40(%1)\n\t" \
5257 "ld $9, 48(%1)\n\t" \
5258 "ld $10, 56(%1)\n\t" \
5259 "ld $11, 64(%1)\n\t" \
5260 "ld $25, 0(%1) \n\t" /* target->t9 */ \
5261 VALGRIND_CALL_NOREDIR_T9 \
5263 : /*out*/ "=r" (_res) \
5264 : /*in*/ "r" (&_argvec[0]) \
5265 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5267 lval = (__typeof__(lval)) _res; \
5270 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5273 volatile OrigFn _orig = (orig); \
5274 volatile unsigned long _argvec[10]; \
5275 volatile unsigned long _res; \
5276 _argvec[0] = (unsigned long)_orig.nraddr; \
5277 _argvec[1] = (unsigned long)(arg1); \
5278 _argvec[2] = (unsigned long)(arg2); \
5279 _argvec[3] = (unsigned long)(arg3); \
5280 _argvec[4] = (unsigned long)(arg4); \
5281 _argvec[5] = (unsigned long)(arg5); \
5282 _argvec[6] = (unsigned long)(arg6); \
5283 _argvec[7] = (unsigned long)(arg7); \
5284 _argvec[8] = (unsigned long)(arg8); \
5285 _argvec[9] = (unsigned long)(arg9); \
5287 "dsubu $29, $29, 8\n\t" \
5288 "ld $4, 72(%1)\n\t" \
5289 "sd $4, 0($29)\n\t" \
5290 "ld $4, 8(%1)\n\t" \
5291 "ld $5, 16(%1)\n\t" \
5292 "ld $6, 24(%1)\n\t" \
5293 "ld $7, 32(%1)\n\t" \
5294 "ld $8, 40(%1)\n\t" \
5295 "ld $9, 48(%1)\n\t" \
5296 "ld $10, 56(%1)\n\t" \
5297 "ld $11, 64(%1)\n\t" \
5298 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5299 VALGRIND_CALL_NOREDIR_T9 \
5300 "daddu $29, $29, 8\n\t" \
5302 : /*out*/ "=r" (_res) \
5303 : /*in*/ "r" (&_argvec[0]) \
5304 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5306 lval = (__typeof__(lval)) _res; \
5309 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5310 arg7,arg8,arg9,arg10) \
5312 volatile OrigFn _orig = (orig); \
5313 volatile unsigned long _argvec[11]; \
5314 volatile unsigned long _res; \
5315 _argvec[0] = (unsigned long)_orig.nraddr; \
5316 _argvec[1] = (unsigned long)(arg1); \
5317 _argvec[2] = (unsigned long)(arg2); \
5318 _argvec[3] = (unsigned long)(arg3); \
5319 _argvec[4] = (unsigned long)(arg4); \
5320 _argvec[5] = (unsigned long)(arg5); \
5321 _argvec[6] = (unsigned long)(arg6); \
5322 _argvec[7] = (unsigned long)(arg7); \
5323 _argvec[8] = (unsigned long)(arg8); \
5324 _argvec[9] = (unsigned long)(arg9); \
5325 _argvec[10] = (unsigned long)(arg10); \
5327 "dsubu $29, $29, 16\n\t" \
5328 "ld $4, 72(%1)\n\t" \
5329 "sd $4, 0($29)\n\t" \
5330 "ld $4, 80(%1)\n\t" \
5331 "sd $4, 8($29)\n\t" \
5332 "ld $4, 8(%1)\n\t" \
5333 "ld $5, 16(%1)\n\t" \
5334 "ld $6, 24(%1)\n\t" \
5335 "ld $7, 32(%1)\n\t" \
5336 "ld $8, 40(%1)\n\t" \
5337 "ld $9, 48(%1)\n\t" \
5338 "ld $10, 56(%1)\n\t" \
5339 "ld $11, 64(%1)\n\t" \
5340 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5341 VALGRIND_CALL_NOREDIR_T9 \
5342 "daddu $29, $29, 16\n\t" \
5344 : /*out*/ "=r" (_res) \
5345 : /*in*/ "r" (&_argvec[0]) \
5346 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5348 lval = (__typeof__(lval)) _res; \
5351 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5352 arg6,arg7,arg8,arg9,arg10, \
5355 volatile OrigFn _orig = (orig); \
5356 volatile unsigned long _argvec[12]; \
5357 volatile unsigned long _res; \
5358 _argvec[0] = (unsigned long)_orig.nraddr; \
5359 _argvec[1] = (unsigned long)(arg1); \
5360 _argvec[2] = (unsigned long)(arg2); \
5361 _argvec[3] = (unsigned long)(arg3); \
5362 _argvec[4] = (unsigned long)(arg4); \
5363 _argvec[5] = (unsigned long)(arg5); \
5364 _argvec[6] = (unsigned long)(arg6); \
5365 _argvec[7] = (unsigned long)(arg7); \
5366 _argvec[8] = (unsigned long)(arg8); \
5367 _argvec[9] = (unsigned long)(arg9); \
5368 _argvec[10] = (unsigned long)(arg10); \
5369 _argvec[11] = (unsigned long)(arg11); \
5371 "dsubu $29, $29, 24\n\t" \
5372 "ld $4, 72(%1)\n\t" \
5373 "sd $4, 0($29)\n\t" \
5374 "ld $4, 80(%1)\n\t" \
5375 "sd $4, 8($29)\n\t" \
5376 "ld $4, 88(%1)\n\t" \
5377 "sd $4, 16($29)\n\t" \
5378 "ld $4, 8(%1)\n\t" \
5379 "ld $5, 16(%1)\n\t" \
5380 "ld $6, 24(%1)\n\t" \
5381 "ld $7, 32(%1)\n\t" \
5382 "ld $8, 40(%1)\n\t" \
5383 "ld $9, 48(%1)\n\t" \
5384 "ld $10, 56(%1)\n\t" \
5385 "ld $11, 64(%1)\n\t" \
5386 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5387 VALGRIND_CALL_NOREDIR_T9 \
5388 "daddu $29, $29, 24\n\t" \
5390 : /*out*/ "=r" (_res) \
5391 : /*in*/ "r" (&_argvec[0]) \
5392 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5394 lval = (__typeof__(lval)) _res; \
5397 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5398 arg6,arg7,arg8,arg9,arg10, \
5401 volatile OrigFn _orig = (orig); \
5402 volatile unsigned long _argvec[13]; \
5403 volatile unsigned long _res; \
5404 _argvec[0] = (unsigned long)_orig.nraddr; \
5405 _argvec[1] = (unsigned long)(arg1); \
5406 _argvec[2] = (unsigned long)(arg2); \
5407 _argvec[3] = (unsigned long)(arg3); \
5408 _argvec[4] = (unsigned long)(arg4); \
5409 _argvec[5] = (unsigned long)(arg5); \
5410 _argvec[6] = (unsigned long)(arg6); \
5411 _argvec[7] = (unsigned long)(arg7); \
5412 _argvec[8] = (unsigned long)(arg8); \
5413 _argvec[9] = (unsigned long)(arg9); \
5414 _argvec[10] = (unsigned long)(arg10); \
5415 _argvec[11] = (unsigned long)(arg11); \
5416 _argvec[12] = (unsigned long)(arg12); \
5418 "dsubu $29, $29, 32\n\t" \
5419 "ld $4, 72(%1)\n\t" \
5420 "sd $4, 0($29)\n\t" \
5421 "ld $4, 80(%1)\n\t" \
5422 "sd $4, 8($29)\n\t" \
5423 "ld $4, 88(%1)\n\t" \
5424 "sd $4, 16($29)\n\t" \
5425 "ld $4, 96(%1)\n\t" \
5426 "sd $4, 24($29)\n\t" \
5427 "ld $4, 8(%1)\n\t" \
5428 "ld $5, 16(%1)\n\t" \
5429 "ld $6, 24(%1)\n\t" \
5430 "ld $7, 32(%1)\n\t" \
5431 "ld $8, 40(%1)\n\t" \
5432 "ld $9, 48(%1)\n\t" \
5433 "ld $10, 56(%1)\n\t" \
5434 "ld $11, 64(%1)\n\t" \
5435 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5436 VALGRIND_CALL_NOREDIR_T9 \
5437 "daddu $29, $29, 32\n\t" \
5439 : /*out*/ "=r" (_res) \
5440 : /*in*/ "r" (&_argvec[0]) \
5441 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5443 lval = (__typeof__(lval)) _res; \
5446 #endif /* PLAT_mips64_linux */
5449 /* ------------------------------------------------------------------ */
5450 /* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
5452 /* ------------------------------------------------------------------ */
5454 /* Some request codes. There are many more of these, but most are not
5455 exposed to end-user view. These are the public ones, all of the
5456 form 0x1000 + small_number.
5458 Core ones are in the range 0x00000000--0x0000ffff. The non-public
5459 ones start at 0x2000.
5462 /* These macros are used by tools -- they must be public, but don't
5463 embed them into other programs. */
5464 #define VG_USERREQ_TOOL_BASE(a,b) \
5465 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
5466 #define VG_IS_TOOL_USERREQ(a, b, v) \
5467 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
5469 /* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
5470 This enum comprises an ABI exported by Valgrind to programs
5471 which use client requests. DO NOT CHANGE THE ORDER OF THESE
5472 ENTRIES, NOR DELETE ANY -- add new ones at the end. */
5474 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
5475 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
5477 /* These allow any function to be called from the simulated
5478 CPU but run on the real CPU. Nb: the first arg passed to
5479 the function is always the ThreadId of the running
5480 thread! So CLIENT_CALL0 actually requires a 1 arg
5482 VG_USERREQ__CLIENT_CALL0 = 0x1101,
5483 VG_USERREQ__CLIENT_CALL1 = 0x1102,
5484 VG_USERREQ__CLIENT_CALL2 = 0x1103,
5485 VG_USERREQ__CLIENT_CALL3 = 0x1104,
5487 /* Can be useful in regression testing suites -- eg. can
5488 send Valgrind's output to /dev/null and still count
5490 VG_USERREQ__COUNT_ERRORS = 0x1201,
5492 /* Allows the client program and/or gdbserver to execute a monitor
5494 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
5496 /* These are useful and can be interpreted by any tool that
5497 tracks malloc() et al, by using vg_replace_malloc.c. */
5498 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
5499 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
5500 VG_USERREQ__FREELIKE_BLOCK = 0x1302,
5501 /* Memory pool support. */
5502 VG_USERREQ__CREATE_MEMPOOL = 0x1303,
5503 VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
5504 VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
5505 VG_USERREQ__MEMPOOL_FREE = 0x1306,
5506 VG_USERREQ__MEMPOOL_TRIM = 0x1307,
5507 VG_USERREQ__MOVE_MEMPOOL = 0x1308,
5508 VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
5509 VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
5511 /* Allow printfs to valgrind log. */
5512 /* The first two pass the va_list argument by value, which
5513 assumes it is the same size as or smaller than a UWord,
5514 which generally isn't the case. Hence are deprecated.
5515 The second two pass the vargs by reference and so are
5516 immune to this problem. */
5517 /* both :: char* fmt, va_list vargs (DEPRECATED) */
5518 VG_USERREQ__PRINTF = 0x1401,
5519 VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
5520 /* both :: char* fmt, va_list* vargs */
5521 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
5522 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
5524 /* Stack support. */
5525 VG_USERREQ__STACK_REGISTER = 0x1501,
5526 VG_USERREQ__STACK_DEREGISTER = 0x1502,
5527 VG_USERREQ__STACK_CHANGE = 0x1503,
5530 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
5532 /* Querying of debug info. */
5533 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701,
5535 /* Disable/enable error reporting level. Takes a single
5536 Word arg which is the delta to this thread's error
5537 disablement indicator. Hence 1 disables or further
5538 disables errors, and -1 moves back towards enablement.
5539 Other values are not allowed. */
5540 VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801,
5542 /* Initialise IR injection */
5543 VG_USERREQ__VEX_INIT_FOR_IRI = 0x1901
5546 #if !defined(__GNUC__)
5547 # define __extension__ /* */
5551 /* Returns the number of Valgrinds this code is running under. That
5552 is, 0 if running natively, 1 if running under Valgrind, 2 if
5553 running under Valgrind which is running under another Valgrind,
5555 #define RUNNING_ON_VALGRIND \
5556 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
5557 VG_USERREQ__RUNNING_ON_VALGRIND, \
5561 /* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
5562 _qzz_len - 1]. Useful if you are debugging a JITter or some such,
5563 since it provides a way to make sure valgrind will retranslate the
5564 invalidated area. Returns no value. */
5565 #define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
5566 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
5567 _qzz_addr, _qzz_len, 0, 0, 0)
5570 /* These requests are for getting Valgrind itself to print something.
5571 Possibly with a backtrace. This is a really ugly hack. The return value
5572 is the number of characters printed, excluding the "**<pid>** " part at the
5573 start and the backtrace (if present). */
5575 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
5576 /* Modern GCC will optimize the static routine out if unused,
5577 and unused attribute will shut down warnings about it. */
5578 static int VALGRIND_PRINTF(const char *format, ...)
5579 __attribute__((format(__printf__, 1, 2), __unused__));
5582 #if defined(_MSC_VER)
5585 VALGRIND_PRINTF(const char *format, ...)
5587 #if defined(NVALGRIND)
5589 #else /* NVALGRIND */
5590 #if defined(_MSC_VER) || defined(__MINGW64__)
5593 unsigned long _qzz_res;
5596 va_start(vargs, format);
5597 #if defined(_MSC_VER) || defined(__MINGW64__)
5598 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5599 VG_USERREQ__PRINTF_VALIST_BY_REF,
5604 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5605 VG_USERREQ__PRINTF_VALIST_BY_REF,
5606 (unsigned long)format,
5607 (unsigned long)&vargs,
5611 return (int)_qzz_res;
5612 #endif /* NVALGRIND */
5615 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
5616 static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
5617 __attribute__((format(__printf__, 1, 2), __unused__));
5620 #if defined(_MSC_VER)
5623 VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
5625 #if defined(NVALGRIND)
5627 #else /* NVALGRIND */
5628 #if defined(_MSC_VER) || defined(__MINGW64__)
5631 unsigned long _qzz_res;
5634 va_start(vargs, format);
5635 #if defined(_MSC_VER) || defined(__MINGW64__)
5636 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5637 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
5642 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5643 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
5644 (unsigned long)format,
5645 (unsigned long)&vargs,
5649 return (int)_qzz_res;
5650 #endif /* NVALGRIND */
5654 /* These requests allow control to move from the simulated CPU to the
5655 real CPU, calling an arbitary function.
5657 Note that the current ThreadId is inserted as the first argument.
5660 VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
5662 requires f to have this signature:
5664 Word f(Word tid, Word arg1, Word arg2)
5666 where "Word" is a word-sized type.
5668 Note that these client requests are not entirely reliable. For example,
5669 if you call a function with them that subsequently calls printf(),
5670 there's a high chance Valgrind will crash. Generally, your prospects of
5671 these working are made higher if the called function does not refer to
5672 any global variables, and does not refer to any libc or other functions
5673 (printf et al). Any kind of entanglement with libc or dynamic linking is
5674 likely to have a bad outcome, for tricky reasons which we've grappled
5675 with a lot in the past.
5677 #define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
5678 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5679 VG_USERREQ__CLIENT_CALL0, \
5683 #define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
5684 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5685 VG_USERREQ__CLIENT_CALL1, \
5689 #define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
5690 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5691 VG_USERREQ__CLIENT_CALL2, \
5693 _qyy_arg1, _qyy_arg2, 0, 0)
5695 #define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
5696 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5697 VG_USERREQ__CLIENT_CALL3, \
5699 _qyy_arg1, _qyy_arg2, \
5703 /* Counts the number of errors that have been recorded by a tool. Nb:
5704 the tool must record the errors with VG_(maybe_record_error)() or
5705 VG_(unique_error)() for them to be counted. */
5706 #define VALGRIND_COUNT_ERRORS \
5707 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
5708 0 /* default return */, \
5709 VG_USERREQ__COUNT_ERRORS, \
5712 /* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
5713 when heap blocks are allocated in order to give accurate results. This
5714 happens automatically for the standard allocator functions such as
5715 malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
5718 But if your program uses a custom allocator, this doesn't automatically
5719 happen, and Valgrind will not do as well. For example, if you allocate
5720 superblocks with mmap() and then allocates chunks of the superblocks, all
5721 Valgrind's observations will be at the mmap() level and it won't know that
5722 the chunks should be considered separate entities. In Memcheck's case,
5723 that means you probably won't get heap block overrun detection (because
5724 there won't be redzones marked as unaddressable) and you definitely won't
5725 get any leak detection.
5727 The following client requests allow a custom allocator to be annotated so
5728 that it can be handled accurately by Valgrind.
5730 VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
5731 by a malloc()-like function. For Memcheck (an illustrative case), this
5734 - It records that the block has been allocated. This means any addresses
5735 within the block mentioned in error messages will be
5736 identified as belonging to the block. It also means that if the block
5737 isn't freed it will be detected by the leak checker.
5739 - It marks the block as being addressable and undefined (if 'is_zeroed' is
5740 not set), or addressable and defined (if 'is_zeroed' is set). This
5741 controls how accesses to the block by the program are handled.
5743 'addr' is the start of the usable block (ie. after any
5744 redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
5745 can apply redzones -- these are blocks of padding at the start and end of
5746 each block. Adding redzones is recommended as it makes it much more likely
5747 Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
5748 zeroed (or filled with another predictable value), as is the case for
5751 VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
5752 heap block -- that will be used by the client program -- is allocated.
5753 It's best to put it at the outermost level of the allocator if possible;
5754 for example, if you have a function my_alloc() which calls
5755 internal_alloc(), and the client request is put inside internal_alloc(),
5756 stack traces relating to the heap block will contain entries for both
5757 my_alloc() and internal_alloc(), which is probably not what you want.
5759 For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
5760 custom blocks from within a heap block, B, that has been allocated with
5761 malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
5762 -- the custom blocks will take precedence.
5764 VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
5765 Memcheck, it does two things:
5767 - It records that the block has been deallocated. This assumes that the
5768 block was annotated as having been allocated via
5769 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
5771 - It marks the block as being unaddressable.
5773 VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
5774 heap block is deallocated.
5776 VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
5777 Memcheck, it does four things:
5779 - It records that the size of a block has been changed. This assumes that
5780 the block was annotated as having been allocated via
5781 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
5783 - If the block shrunk, it marks the freed memory as being unaddressable.
5785 - If the block grew, it marks the new area as undefined and defines a red
5786 zone past the end of the new block.
5788 - The V-bits of the overlap between the old and the new block are preserved.
5790 VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
5791 and before deallocation of the old block.
5793 In many cases, these three client requests will not be enough to get your
5794 allocator working well with Memcheck. More specifically, if your allocator
5795 writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
5796 will be necessary to mark the memory as addressable just before the zeroing
5797 occurs, otherwise you'll get a lot of invalid write errors. For example,
5798 you'll need to do this if your allocator recycles freed blocks, but it
5799 zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
5800 Alternatively, if your allocator reuses freed blocks for allocator-internal
5801 data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
5803 Really, what's happening is a blurring of the lines between the client
5804 program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
5805 memory should be considered unaddressable to the client program, but the
5806 allocator knows more than the rest of the client program and so may be able
5807 to safely access it. Extra client requests are necessary for Valgrind to
5808 understand the distinction between the allocator and the rest of the
5811 Ignored if addr == 0.
5813 #define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
5814 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
5815 addr, sizeB, rzB, is_zeroed, 0)
5817 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
5818 Ignored if addr == 0.
5820 #define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
5821 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
5822 addr, oldSizeB, newSizeB, rzB, 0)
5824 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
5825 Ignored if addr == 0.
5827 #define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
5828 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
5831 /* Create a memory pool. */
5832 #define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
5833 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
5834 pool, rzB, is_zeroed, 0, 0)
5836 /* Destroy a memory pool. */
5837 #define VALGRIND_DESTROY_MEMPOOL(pool) \
5838 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
5841 /* Associate a piece of memory with a memory pool. */
5842 #define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
5843 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
5844 pool, addr, size, 0, 0)
5846 /* Disassociate a piece of memory from a memory pool. */
5847 #define VALGRIND_MEMPOOL_FREE(pool, addr) \
5848 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
5849 pool, addr, 0, 0, 0)
5851 /* Disassociate any pieces outside a particular range. */
5852 #define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
5853 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
5854 pool, addr, size, 0, 0)
5856 /* Resize and/or move a piece associated with a memory pool. */
5857 #define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
5858 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
5859 poolA, poolB, 0, 0, 0)
5861 /* Resize and/or move a piece associated with a memory pool. */
5862 #define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
5863 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
5864 pool, addrA, addrB, size, 0)
5866 /* Return 1 if a mempool exists, else 0. */
5867 #define VALGRIND_MEMPOOL_EXISTS(pool) \
5868 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
5869 VG_USERREQ__MEMPOOL_EXISTS, \
5872 /* Mark a piece of memory as being a stack. Returns a stack id. */
5873 #define VALGRIND_STACK_REGISTER(start, end) \
5874 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
5875 VG_USERREQ__STACK_REGISTER, \
5876 start, end, 0, 0, 0)
5878 /* Unmark the piece of memory associated with a stack id as being a
5880 #define VALGRIND_STACK_DEREGISTER(id) \
5881 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
5884 /* Change the start and end address of the stack id. */
5885 #define VALGRIND_STACK_CHANGE(id, start, end) \
5886 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
5887 id, start, end, 0, 0)
5889 /* Load PDB debug info for Wine PE image_map. */
5890 #define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
5891 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
5892 fd, ptr, total_size, delta, 0)
5894 /* Map a code address to a source file name and line number. buf64
5895 must point to a 64-byte buffer in the caller's address space. The
5896 result will be dumped in there and is guaranteed to be zero
5897 terminated. If no info is found, the first byte is set to zero. */
5898 #define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
5899 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
5900 VG_USERREQ__MAP_IP_TO_SRCLOC, \
5901 addr, buf64, 0, 0, 0)
5903 /* Disable error reporting for this thread. Behaves in a stack like
5904 way, so you can safely call this multiple times provided that
5905 VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
5906 to re-enable reporting. The first call of this macro disables
5907 reporting. Subsequent calls have no effect except to increase the
5908 number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
5909 reporting. Child threads do not inherit this setting from their
5910 parents -- they are always created with reporting enabled. */
5911 #define VALGRIND_DISABLE_ERROR_REPORTING \
5912 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
5915 /* Re-enable error reporting, as per comments on
5916 VALGRIND_DISABLE_ERROR_REPORTING. */
5917 #define VALGRIND_ENABLE_ERROR_REPORTING \
5918 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
5921 /* Execute a monitor command from the client program.
5922 If a connection is opened with GDB, the output will be sent
5923 according to the output mode set for vgdb.
5924 If no connection is opened, output will go to the log output.
5925 Returns 1 if command not recognised, 0 otherwise. */
5926 #define VALGRIND_MONITOR_COMMAND(command) \
5927 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
5928 command, 0, 0, 0, 0)
5931 #undef PLAT_x86_darwin
5932 #undef PLAT_amd64_darwin
5933 #undef PLAT_x86_win32
5934 #undef PLAT_amd64_win64
5935 #undef PLAT_x86_linux
5936 #undef PLAT_amd64_linux
5937 #undef PLAT_ppc32_linux
5938 #undef PLAT_ppc64_linux
5939 #undef PLAT_arm_linux
5940 #undef PLAT_s390x_linux
5941 #undef PLAT_mips32_linux
5942 #undef PLAT_mips64_linux
5944 #endif /* __VALGRIND_H */