2 ----------------------------------------------------------------
4 Notice that the following BSD-style license applies to this one
5 file (valgrind.h) only. The rest of Valgrind is licensed under the
6 terms of the GNU General Public License, version 2, unless
7 otherwise indicated. See the COPYING file in the source
8 distribution for details.
10 ----------------------------------------------------------------
12 This file is part of Valgrind, a dynamic binary instrumentation
15 Copyright (C) 2000-2013 Julian Seward. All rights reserved.
17 Redistribution and use in source and binary forms, with or without
18 modification, are permitted provided that the following conditions
21 1. Redistributions of source code must retain the above copyright
22 notice, this list of conditions and the following disclaimer.
24 2. The origin of this software must not be misrepresented; you must
25 not claim that you wrote the original software. If you use this
26 software in a product, an acknowledgment in the product
27 documentation would be appreciated but is not required.
29 3. Altered source versions must be plainly marked as such, and must
30 not be misrepresented as being the original software.
32 4. The name of the author may not be used to endorse or promote
33 products derived from this software without specific prior written
36 THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
37 OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
38 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
39 ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
40 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
41 DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
42 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
43 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
44 WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
45 NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
46 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
48 ----------------------------------------------------------------
50 Notice that the above BSD-style license applies to this one file
51 (valgrind.h) only. The entire rest of Valgrind is licensed under
52 the terms of the GNU General Public License, version 2. See the
53 COPYING file in the source distribution for details.
55 ----------------------------------------------------------------
59 /* This file is for inclusion into client (your!) code.
61 You can use these macros to manipulate and query Valgrind's
62 execution inside your own programs.
64 The resulting executables will still run without Valgrind, just a
65 little bit more slowly than they otherwise would, but otherwise
66 unchanged. When not running on valgrind, each client request
67 consumes very few (eg. 7) instructions, so the resulting performance
68 loss is negligible unless you plan to execute client requests
69 millions of times per second. Nevertheless, if that is still a
70 problem, you can compile with the NVALGRIND symbol defined (gcc
71 -DNVALGRIND) so that client requests are not even compiled in. */
77 /* ------------------------------------------------------------------ */
78 /* VERSION NUMBER OF VALGRIND */
79 /* ------------------------------------------------------------------ */
81 /* Specify Valgrind's version number, so that user code can
82 conditionally compile based on our version number. Note that these
83 were introduced at version 3.6 and so do not exist in version 3.5
84 or earlier. The recommended way to use them to check for "version
87 #if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
88 && (__VALGRIND_MAJOR__ > 3 \
89 || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
91 #define __VALGRIND_MAJOR__ 3
92 #define __VALGRIND_MINOR__ 10
100 /* Nb: this file might be included in a file compiled with -ansi. So
101 we can't use C++ style "//" comments nor the "asm" keyword (instead
104 /* Derive some tags indicating what the target platform is. Note
105 that in this file we're using the compiler's CPP symbols for
106 identifying architectures, which are different to the ones we use
107 within the rest of Valgrind. Note, __powerpc__ is active for both
108 32 and 64-bit PPC, whereas __powerpc64__ is only active for the
109 latter (on Linux, that is).
111 Misc note: how to find out what's predefined in gcc by default:
112 gcc -Wp,-dM somefile.c
114 #undef PLAT_x86_darwin
115 #undef PLAT_amd64_darwin
116 #undef PLAT_x86_win32
117 #undef PLAT_amd64_win64
118 #undef PLAT_x86_linux
119 #undef PLAT_amd64_linux
120 #undef PLAT_ppc32_linux
121 #undef PLAT_ppc64_linux
122 #undef PLAT_arm_linux
123 #undef PLAT_arm64_linux
124 #undef PLAT_s390x_linux
125 #undef PLAT_mips32_linux
126 #undef PLAT_mips64_linux
129 #if defined(__APPLE__) && defined(__i386__)
130 # define PLAT_x86_darwin 1
131 #elif defined(__APPLE__) && defined(__x86_64__)
132 # define PLAT_amd64_darwin 1
133 #elif (defined(__MINGW32__) && !defined(__MINGW64__)) \
134 || defined(__CYGWIN32__) \
135 || (defined(_WIN32) && defined(_M_IX86))
136 # define PLAT_x86_win32 1
137 #elif defined(__MINGW64__) \
138 || (defined(_WIN64) && defined(_M_X64))
139 # define PLAT_amd64_win64 1
140 #elif defined(__linux__) && defined(__i386__)
141 # define PLAT_x86_linux 1
142 #elif defined(__linux__) && defined(__x86_64__)
143 # define PLAT_amd64_linux 1
144 #elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
145 # define PLAT_ppc32_linux 1
146 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__)
147 # define PLAT_ppc64_linux 1
148 #elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
149 # define PLAT_arm_linux 1
150 #elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
151 # define PLAT_arm64_linux 1
152 #elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
153 # define PLAT_s390x_linux 1
154 #elif defined(__linux__) && defined(__mips__) && (__mips==64)
155 # define PLAT_mips64_linux 1
156 #elif defined(__linux__) && defined(__mips__) && (__mips!=64)
157 # define PLAT_mips32_linux 1
159 /* If we're not compiling for our target platform, don't generate
161 # if !defined(NVALGRIND)
166 /* XXX: Unfortunately x64 Visual C++ does not suport inline asms,
167 * so disable the use of valgrind's inline asm's for x64 Visual C++
168 * builds, so that x64 Visual C++ builds of GLib can be maintained
170 #if defined (PLAT_amd64_win64) && defined (_MSC_VER)
171 # if !defined(NVALGRIND)
177 /* ------------------------------------------------------------------ */
178 /* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
179 /* in here of use to end-users -- skip to the next section. */
180 /* ------------------------------------------------------------------ */
183 * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
184 * request. Accepts both pointers and integers as arguments.
186 * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
187 * client request that does not return a value.
189 * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
190 * client request and whose value equals the client request result. Accepts
191 * both pointers and integers as arguments. Note that such calls are not
192 * necessarily pure functions -- they may have side effects.
195 #define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
196 _zzq_request, _zzq_arg1, _zzq_arg2, \
197 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
198 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
199 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
200 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
202 #define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
203 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
204 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
205 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
206 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
208 #if defined(NVALGRIND)
210 /* Define NVALGRIND to completely remove the Valgrind magic sequence
211 from the compiled code (analogous to NDEBUG's effects on
213 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
214 _zzq_default, _zzq_request, \
215 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
218 #else /* ! NVALGRIND */
220 /* The following defines the magic code sequences which the JITter
221 spots and handles magically. Don't look too closely at them as
222 they will rot your brain.
224 The assembly code sequences for all architectures is in this one
225 file. This is because this file must be stand-alone, and we don't
226 want to have multiple files.
228 For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
229 value gets put in the return slot, so that everything works when
230 this is executed not under Valgrind. Args are passed in a memory
231 block, and so there's no intrinsic limit to the number that could
232 be passed, but it's currently five.
235 _zzq_rlval result lvalue
236 _zzq_default default value (result returned when running on real CPU)
237 _zzq_request request code
238 _zzq_arg1..5 request params
240 The other two macros are used to support function wrapping, and are
241 a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
242 guest's NRADDR pseudo-register and whatever other information is
243 needed to safely run the call original from the wrapper: on
244 ppc64-linux, the R2 value at the divert point is also needed. This
245 information is abstracted into a user-visible type, OrigFn.
247 VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
248 guest, but guarantees that the branch instruction will not be
249 redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
250 branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
251 complete inline asm, since it needs to be combined with more magic
252 inline asm stuff to be useful.
255 /* ------------------------- x86-{linux,darwin} ---------------- */
257 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
258 || (defined(PLAT_x86_win32) && defined(__GNUC__))
262 unsigned int nraddr; /* where's the code? */
266 #define __SPECIAL_INSTRUCTION_PREAMBLE \
267 "roll $3, %%edi ; roll $13, %%edi\n\t" \
268 "roll $29, %%edi ; roll $19, %%edi\n\t"
270 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
271 _zzq_default, _zzq_request, \
272 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
274 ({volatile unsigned int _zzq_args[6]; \
275 volatile unsigned int _zzq_result; \
276 _zzq_args[0] = (unsigned int)(_zzq_request); \
277 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
278 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
279 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
280 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
281 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
282 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
283 /* %EDX = client_request ( %EAX ) */ \
284 "xchgl %%ebx,%%ebx" \
285 : "=d" (_zzq_result) \
286 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
292 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
293 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
294 volatile unsigned int __addr; \
295 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
296 /* %EAX = guest_NRADDR */ \
297 "xchgl %%ecx,%%ecx" \
302 _zzq_orig->nraddr = __addr; \
305 #define VALGRIND_CALL_NOREDIR_EAX \
306 __SPECIAL_INSTRUCTION_PREAMBLE \
307 /* call-noredir *%EAX */ \
308 "xchgl %%edx,%%edx\n\t"
310 #define VALGRIND_VEX_INJECT_IR() \
312 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
313 "xchgl %%edi,%%edi\n\t" \
314 : : : "cc", "memory" \
318 #endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__) */
320 /* ------------------------- x86-Win32 ------------------------- */
322 #if defined(PLAT_x86_win32) && !defined(__GNUC__)
326 unsigned int nraddr; /* where's the code? */
330 #if defined(_MSC_VER)
332 #define __SPECIAL_INSTRUCTION_PREAMBLE \
333 __asm rol edi, 3 __asm rol edi, 13 \
334 __asm rol edi, 29 __asm rol edi, 19
336 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
337 _zzq_default, _zzq_request, \
338 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
339 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
340 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
341 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
342 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
344 static __inline uintptr_t
345 valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
346 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
347 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
350 volatile uintptr_t _zzq_args[6];
351 volatile unsigned int _zzq_result;
352 _zzq_args[0] = (uintptr_t)(_zzq_request);
353 _zzq_args[1] = (uintptr_t)(_zzq_arg1);
354 _zzq_args[2] = (uintptr_t)(_zzq_arg2);
355 _zzq_args[3] = (uintptr_t)(_zzq_arg3);
356 _zzq_args[4] = (uintptr_t)(_zzq_arg4);
357 _zzq_args[5] = (uintptr_t)(_zzq_arg5);
358 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
359 __SPECIAL_INSTRUCTION_PREAMBLE
360 /* %EDX = client_request ( %EAX ) */
362 __asm mov _zzq_result, edx
367 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
368 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
369 volatile unsigned int __addr; \
370 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
371 /* %EAX = guest_NRADDR */ \
373 __asm mov __addr, eax \
375 _zzq_orig->nraddr = __addr; \
378 #define VALGRIND_CALL_NOREDIR_EAX ERROR
380 #define VALGRIND_VEX_INJECT_IR() \
382 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
388 #error Unsupported compiler.
391 #endif /* PLAT_x86_win32 */
393 /* ------------------------ amd64-{linux,darwin} --------------- */
395 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
396 || (defined(PLAT_amd64_win64) && defined(__GNUC__))
400 unsigned long long int nraddr; /* where's the code? */
404 #define __SPECIAL_INSTRUCTION_PREAMBLE \
405 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
406 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
408 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
409 _zzq_default, _zzq_request, \
410 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
412 ({ volatile unsigned long long int _zzq_args[6]; \
413 volatile unsigned long long int _zzq_result; \
414 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
415 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
416 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
417 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
418 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
419 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
420 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
421 /* %RDX = client_request ( %RAX ) */ \
422 "xchgq %%rbx,%%rbx" \
423 : "=d" (_zzq_result) \
424 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
430 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
431 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
432 volatile unsigned long long int __addr; \
433 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
434 /* %RAX = guest_NRADDR */ \
435 "xchgq %%rcx,%%rcx" \
440 _zzq_orig->nraddr = __addr; \
443 #define VALGRIND_CALL_NOREDIR_RAX \
444 __SPECIAL_INSTRUCTION_PREAMBLE \
445 /* call-noredir *%RAX */ \
446 "xchgq %%rdx,%%rdx\n\t"
448 #define VALGRIND_VEX_INJECT_IR() \
450 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
451 "xchgq %%rdi,%%rdi\n\t" \
452 : : : "cc", "memory" \
456 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
458 /* ------------------------- amd64-Win64 ------------------------- */
460 #if defined(PLAT_amd64_win64) && !defined(__GNUC__)
462 #error Unsupported compiler.
464 #endif /* PLAT_amd64_win64 */
466 /* ------------------------ ppc32-linux ------------------------ */
468 #if defined(PLAT_ppc32_linux)
472 unsigned int nraddr; /* where's the code? */
476 #define __SPECIAL_INSTRUCTION_PREAMBLE \
477 "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
478 "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
480 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
481 _zzq_default, _zzq_request, \
482 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
485 ({ unsigned int _zzq_args[6]; \
486 unsigned int _zzq_result; \
487 unsigned int* _zzq_ptr; \
488 _zzq_args[0] = (unsigned int)(_zzq_request); \
489 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
490 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
491 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
492 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
493 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
494 _zzq_ptr = _zzq_args; \
495 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
496 "mr 4,%2\n\t" /*ptr*/ \
497 __SPECIAL_INSTRUCTION_PREAMBLE \
498 /* %R3 = client_request ( %R4 ) */ \
500 "mr %0,3" /*result*/ \
501 : "=b" (_zzq_result) \
502 : "b" (_zzq_default), "b" (_zzq_ptr) \
503 : "cc", "memory", "r3", "r4"); \
507 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
508 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
509 unsigned int __addr; \
510 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
511 /* %R3 = guest_NRADDR */ \
516 : "cc", "memory", "r3" \
518 _zzq_orig->nraddr = __addr; \
521 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
522 __SPECIAL_INSTRUCTION_PREAMBLE \
523 /* branch-and-link-to-noredir *%R11 */ \
526 #define VALGRIND_VEX_INJECT_IR() \
528 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
533 #endif /* PLAT_ppc32_linux */
535 /* ------------------------ ppc64-linux ------------------------ */
537 #if defined(PLAT_ppc64_linux)
541 unsigned long long int nraddr; /* where's the code? */
542 unsigned long long int r2; /* what tocptr do we need? */
546 #define __SPECIAL_INSTRUCTION_PREAMBLE \
547 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
548 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
550 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
551 _zzq_default, _zzq_request, \
552 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
555 ({ unsigned long long int _zzq_args[6]; \
556 unsigned long long int _zzq_result; \
557 unsigned long long int* _zzq_ptr; \
558 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
559 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
560 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
561 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
562 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
563 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
564 _zzq_ptr = _zzq_args; \
565 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
566 "mr 4,%2\n\t" /*ptr*/ \
567 __SPECIAL_INSTRUCTION_PREAMBLE \
568 /* %R3 = client_request ( %R4 ) */ \
570 "mr %0,3" /*result*/ \
571 : "=b" (_zzq_result) \
572 : "b" (_zzq_default), "b" (_zzq_ptr) \
573 : "cc", "memory", "r3", "r4"); \
577 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
578 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
579 unsigned long long int __addr; \
580 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
581 /* %R3 = guest_NRADDR */ \
586 : "cc", "memory", "r3" \
588 _zzq_orig->nraddr = __addr; \
589 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
590 /* %R3 = guest_NRADDR_GPR2 */ \
595 : "cc", "memory", "r3" \
597 _zzq_orig->r2 = __addr; \
600 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
601 __SPECIAL_INSTRUCTION_PREAMBLE \
602 /* branch-and-link-to-noredir *%R11 */ \
605 #define VALGRIND_VEX_INJECT_IR() \
607 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
612 #endif /* PLAT_ppc64_linux */
614 /* ------------------------- arm-linux ------------------------- */
616 #if defined(PLAT_arm_linux)
620 unsigned int nraddr; /* where's the code? */
624 #define __SPECIAL_INSTRUCTION_PREAMBLE \
625 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
626 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
628 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
629 _zzq_default, _zzq_request, \
630 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
633 ({volatile unsigned int _zzq_args[6]; \
634 volatile unsigned int _zzq_result; \
635 _zzq_args[0] = (unsigned int)(_zzq_request); \
636 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
637 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
638 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
639 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
640 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
641 __asm__ volatile("mov r3, %1\n\t" /*default*/ \
642 "mov r4, %2\n\t" /*ptr*/ \
643 __SPECIAL_INSTRUCTION_PREAMBLE \
644 /* R3 = client_request ( R4 ) */ \
645 "orr r10, r10, r10\n\t" \
646 "mov %0, r3" /*result*/ \
647 : "=r" (_zzq_result) \
648 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
649 : "cc","memory", "r3", "r4"); \
653 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
654 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
655 unsigned int __addr; \
656 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
657 /* R3 = guest_NRADDR */ \
658 "orr r11, r11, r11\n\t" \
662 : "cc", "memory", "r3" \
664 _zzq_orig->nraddr = __addr; \
667 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
668 __SPECIAL_INSTRUCTION_PREAMBLE \
669 /* branch-and-link-to-noredir *%R4 */ \
670 "orr r12, r12, r12\n\t"
672 #define VALGRIND_VEX_INJECT_IR() \
674 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
675 "orr r9, r9, r9\n\t" \
676 : : : "cc", "memory" \
680 #endif /* PLAT_arm_linux */
682 /* ------------------------ arm64-linux ------------------------- */
684 #if defined(PLAT_arm64_linux)
688 unsigned long long int nraddr; /* where's the code? */
692 #define __SPECIAL_INSTRUCTION_PREAMBLE \
693 "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
694 "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
696 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
697 _zzq_default, _zzq_request, \
698 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
701 ({volatile unsigned long long int _zzq_args[6]; \
702 volatile unsigned long long int _zzq_result; \
703 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
704 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
705 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
706 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
707 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
708 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
709 __asm__ volatile("mov x3, %1\n\t" /*default*/ \
710 "mov x4, %2\n\t" /*ptr*/ \
711 __SPECIAL_INSTRUCTION_PREAMBLE \
712 /* X3 = client_request ( X4 ) */ \
713 "orr x10, x10, x10\n\t" \
714 "mov %0, x3" /*result*/ \
715 : "=r" (_zzq_result) \
716 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
717 : "cc","memory", "x3", "x4"); \
721 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
722 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
723 unsigned long long int __addr; \
724 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
725 /* X3 = guest_NRADDR */ \
726 "orr x11, x11, x11\n\t" \
730 : "cc", "memory", "x3" \
732 _zzq_orig->nraddr = __addr; \
735 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
736 __SPECIAL_INSTRUCTION_PREAMBLE \
737 /* branch-and-link-to-noredir X8 */ \
738 "orr x12, x12, x12\n\t"
740 #define VALGRIND_VEX_INJECT_IR() \
742 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
743 "orr x9, x9, x9\n\t" \
744 : : : "cc", "memory" \
748 #endif /* PLAT_arm64_linux */
750 /* ------------------------ s390x-linux ------------------------ */
752 #if defined(PLAT_s390x_linux)
756 unsigned long long int nraddr; /* where's the code? */
760 /* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
761 * code. This detection is implemented in platform specific toIR.c
762 * (e.g. VEX/priv/guest_s390_decoder.c).
764 #define __SPECIAL_INSTRUCTION_PREAMBLE \
770 #define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
771 #define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
772 #define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
773 #define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
775 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
776 _zzq_default, _zzq_request, \
777 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
779 ({volatile unsigned long long int _zzq_args[6]; \
780 volatile unsigned long long int _zzq_result; \
781 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
782 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
783 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
784 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
785 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
786 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
787 __asm__ volatile(/* r2 = args */ \
791 __SPECIAL_INSTRUCTION_PREAMBLE \
792 __CLIENT_REQUEST_CODE \
795 : "=d" (_zzq_result) \
796 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
797 : "cc", "2", "3", "memory" \
802 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
803 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
804 volatile unsigned long long int __addr; \
805 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
806 __GET_NR_CONTEXT_CODE \
810 : "cc", "3", "memory" \
812 _zzq_orig->nraddr = __addr; \
815 #define VALGRIND_CALL_NOREDIR_R1 \
816 __SPECIAL_INSTRUCTION_PREAMBLE \
819 #define VALGRIND_VEX_INJECT_IR() \
821 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
822 __VEX_INJECT_IR_CODE); \
825 #endif /* PLAT_s390x_linux */
827 /* ------------------------- mips32-linux ---------------- */
829 #if defined(PLAT_mips32_linux)
833 unsigned int nraddr; /* where's the code? */
841 #define __SPECIAL_INSTRUCTION_PREAMBLE \
842 "srl $0, $0, 13\n\t" \
843 "srl $0, $0, 29\n\t" \
844 "srl $0, $0, 3\n\t" \
847 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
848 _zzq_default, _zzq_request, \
849 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
851 ({ volatile unsigned int _zzq_args[6]; \
852 volatile unsigned int _zzq_result; \
853 _zzq_args[0] = (unsigned int)(_zzq_request); \
854 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
855 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
856 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
857 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
858 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
859 __asm__ volatile("move $11, %1\n\t" /*default*/ \
860 "move $12, %2\n\t" /*ptr*/ \
861 __SPECIAL_INSTRUCTION_PREAMBLE \
862 /* T3 = client_request ( T4 ) */ \
863 "or $13, $13, $13\n\t" \
864 "move %0, $11\n\t" /*result*/ \
865 : "=r" (_zzq_result) \
866 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
871 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
872 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
873 volatile unsigned int __addr; \
874 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
875 /* %t9 = guest_NRADDR */ \
876 "or $14, $14, $14\n\t" \
877 "move %0, $11" /*result*/ \
882 _zzq_orig->nraddr = __addr; \
885 #define VALGRIND_CALL_NOREDIR_T9 \
886 __SPECIAL_INSTRUCTION_PREAMBLE \
887 /* call-noredir *%t9 */ \
888 "or $15, $15, $15\n\t"
890 #define VALGRIND_VEX_INJECT_IR() \
892 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
893 "or $11, $11, $11\n\t" \
898 #endif /* PLAT_mips32_linux */
900 /* ------------------------- mips64-linux ---------------- */
902 #if defined(PLAT_mips64_linux)
906 unsigned long long nraddr; /* where's the code? */
914 #define __SPECIAL_INSTRUCTION_PREAMBLE \
915 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
916 "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
918 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
919 _zzq_default, _zzq_request, \
920 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
922 ({ volatile unsigned long long int _zzq_args[6]; \
923 volatile unsigned long long int _zzq_result; \
924 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
925 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
926 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
927 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
928 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
929 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
930 __asm__ volatile("move $11, %1\n\t" /*default*/ \
931 "move $12, %2\n\t" /*ptr*/ \
932 __SPECIAL_INSTRUCTION_PREAMBLE \
933 /* $11 = client_request ( $12 ) */ \
934 "or $13, $13, $13\n\t" \
935 "move %0, $11\n\t" /*result*/ \
936 : "=r" (_zzq_result) \
937 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
942 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
943 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
944 volatile unsigned long long int __addr; \
945 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
946 /* $11 = guest_NRADDR */ \
947 "or $14, $14, $14\n\t" \
948 "move %0, $11" /*result*/ \
952 _zzq_orig->nraddr = __addr; \
955 #define VALGRIND_CALL_NOREDIR_T9 \
956 __SPECIAL_INSTRUCTION_PREAMBLE \
957 /* call-noredir $25 */ \
958 "or $15, $15, $15\n\t"
960 #define VALGRIND_VEX_INJECT_IR() \
962 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
963 "or $11, $11, $11\n\t" \
967 #endif /* PLAT_mips64_linux */
969 /* Insert assembly code for other platforms here... */
971 #endif /* NVALGRIND */
974 /* ------------------------------------------------------------------ */
975 /* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
976 /* ugly. It's the least-worst tradeoff I can think of. */
977 /* ------------------------------------------------------------------ */
979 /* This section defines magic (a.k.a appalling-hack) macros for doing
980 guaranteed-no-redirection macros, so as to get from function
981 wrappers to the functions they are wrapping. The whole point is to
982 construct standard call sequences, but to do the call itself with a
983 special no-redirect call pseudo-instruction that the JIT
984 understands and handles specially. This section is long and
985 repetitious, and I can't see a way to make it shorter.
987 The naming scheme is as follows:
989 CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
991 'W' stands for "word" and 'v' for "void". Hence there are
992 different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
993 and for each, the possibility of returning a word-typed result, or
997 /* Use these to write the name of your wrapper. NOTE: duplicates
998 VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
999 the default behaviour equivalance class tag "0000" into the name.
1000 See pub_tool_redir.h for details -- normally you don't need to
1001 think about this, though. */
1003 /* Use an extra level of macroisation so as to ensure the soname/fnname
1004 args are fully macro-expanded before pasting them together. */
1005 #define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
1007 #define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
1008 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
1010 #define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
1011 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
1013 /* Use this macro from within a wrapper function to collect the
1014 context (address and possibly other info) of the original function.
1015 Once you have that you can then use it in one of the CALL_FN_
1016 macros. The type of the argument _lval is OrigFn. */
1017 #define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
1019 /* Also provide end-user facilities for function replacement, rather
1020 than wrapping. A replacement function differs from a wrapper in
1021 that it has no way to get hold of the original function being
1022 called, and hence no way to call onwards to it. In a replacement
1023 function, VALGRIND_GET_ORIG_FN always returns zero. */
1025 #define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1026 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1028 #define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1029 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1031 /* Derivatives of the main macros below, for calling functions
1034 #define CALL_FN_v_v(fnptr) \
1035 do { volatile unsigned long _junk; \
1036 CALL_FN_W_v(_junk,fnptr); } while (0)
1038 #define CALL_FN_v_W(fnptr, arg1) \
1039 do { volatile unsigned long _junk; \
1040 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1042 #define CALL_FN_v_WW(fnptr, arg1,arg2) \
1043 do { volatile unsigned long _junk; \
1044 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1046 #define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1047 do { volatile unsigned long _junk; \
1048 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1050 #define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1051 do { volatile unsigned long _junk; \
1052 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1054 #define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1055 do { volatile unsigned long _junk; \
1056 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1058 #define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1059 do { volatile unsigned long _junk; \
1060 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1062 #define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1063 do { volatile unsigned long _junk; \
1064 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1066 /* ------------------------- x86-{linux,darwin} ---------------- */
1068 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin)
1070 /* These regs are trashed by the hidden call. No need to mention eax
1071 as gcc can already see that, plus causes gcc to bomb. */
1072 #define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
1074 /* Macros to save and align the stack before making a function
1075 call and restore it afterwards as gcc may not keep the stack
1076 pointer aligned if it doesn't realise calls are being made
1077 to other functions. */
1079 #define VALGRIND_ALIGN_STACK \
1080 "movl %%esp,%%edi\n\t" \
1081 "andl $0xfffffff0,%%esp\n\t"
1082 #define VALGRIND_RESTORE_STACK \
1083 "movl %%edi,%%esp\n\t"
1085 /* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
1088 #define CALL_FN_W_v(lval, orig) \
1090 volatile OrigFn _orig = (orig); \
1091 volatile unsigned long _argvec[1]; \
1092 volatile unsigned long _res; \
1093 _argvec[0] = (unsigned long)_orig.nraddr; \
1095 VALGRIND_ALIGN_STACK \
1096 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1097 VALGRIND_CALL_NOREDIR_EAX \
1098 VALGRIND_RESTORE_STACK \
1099 : /*out*/ "=a" (_res) \
1100 : /*in*/ "a" (&_argvec[0]) \
1101 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1103 lval = (__typeof__(lval)) _res; \
1106 #define CALL_FN_W_W(lval, orig, arg1) \
1108 volatile OrigFn _orig = (orig); \
1109 volatile unsigned long _argvec[2]; \
1110 volatile unsigned long _res; \
1111 _argvec[0] = (unsigned long)_orig.nraddr; \
1112 _argvec[1] = (unsigned long)(arg1); \
1114 VALGRIND_ALIGN_STACK \
1115 "subl $12, %%esp\n\t" \
1116 "pushl 4(%%eax)\n\t" \
1117 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1118 VALGRIND_CALL_NOREDIR_EAX \
1119 VALGRIND_RESTORE_STACK \
1120 : /*out*/ "=a" (_res) \
1121 : /*in*/ "a" (&_argvec[0]) \
1122 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1124 lval = (__typeof__(lval)) _res; \
1127 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1129 volatile OrigFn _orig = (orig); \
1130 volatile unsigned long _argvec[3]; \
1131 volatile unsigned long _res; \
1132 _argvec[0] = (unsigned long)_orig.nraddr; \
1133 _argvec[1] = (unsigned long)(arg1); \
1134 _argvec[2] = (unsigned long)(arg2); \
1136 VALGRIND_ALIGN_STACK \
1137 "subl $8, %%esp\n\t" \
1138 "pushl 8(%%eax)\n\t" \
1139 "pushl 4(%%eax)\n\t" \
1140 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1141 VALGRIND_CALL_NOREDIR_EAX \
1142 VALGRIND_RESTORE_STACK \
1143 : /*out*/ "=a" (_res) \
1144 : /*in*/ "a" (&_argvec[0]) \
1145 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1147 lval = (__typeof__(lval)) _res; \
1150 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1152 volatile OrigFn _orig = (orig); \
1153 volatile unsigned long _argvec[4]; \
1154 volatile unsigned long _res; \
1155 _argvec[0] = (unsigned long)_orig.nraddr; \
1156 _argvec[1] = (unsigned long)(arg1); \
1157 _argvec[2] = (unsigned long)(arg2); \
1158 _argvec[3] = (unsigned long)(arg3); \
1160 VALGRIND_ALIGN_STACK \
1161 "subl $4, %%esp\n\t" \
1162 "pushl 12(%%eax)\n\t" \
1163 "pushl 8(%%eax)\n\t" \
1164 "pushl 4(%%eax)\n\t" \
1165 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1166 VALGRIND_CALL_NOREDIR_EAX \
1167 VALGRIND_RESTORE_STACK \
1168 : /*out*/ "=a" (_res) \
1169 : /*in*/ "a" (&_argvec[0]) \
1170 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1172 lval = (__typeof__(lval)) _res; \
1175 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1177 volatile OrigFn _orig = (orig); \
1178 volatile unsigned long _argvec[5]; \
1179 volatile unsigned long _res; \
1180 _argvec[0] = (unsigned long)_orig.nraddr; \
1181 _argvec[1] = (unsigned long)(arg1); \
1182 _argvec[2] = (unsigned long)(arg2); \
1183 _argvec[3] = (unsigned long)(arg3); \
1184 _argvec[4] = (unsigned long)(arg4); \
1186 VALGRIND_ALIGN_STACK \
1187 "pushl 16(%%eax)\n\t" \
1188 "pushl 12(%%eax)\n\t" \
1189 "pushl 8(%%eax)\n\t" \
1190 "pushl 4(%%eax)\n\t" \
1191 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1192 VALGRIND_CALL_NOREDIR_EAX \
1193 VALGRIND_RESTORE_STACK \
1194 : /*out*/ "=a" (_res) \
1195 : /*in*/ "a" (&_argvec[0]) \
1196 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1198 lval = (__typeof__(lval)) _res; \
1201 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1203 volatile OrigFn _orig = (orig); \
1204 volatile unsigned long _argvec[6]; \
1205 volatile unsigned long _res; \
1206 _argvec[0] = (unsigned long)_orig.nraddr; \
1207 _argvec[1] = (unsigned long)(arg1); \
1208 _argvec[2] = (unsigned long)(arg2); \
1209 _argvec[3] = (unsigned long)(arg3); \
1210 _argvec[4] = (unsigned long)(arg4); \
1211 _argvec[5] = (unsigned long)(arg5); \
1213 VALGRIND_ALIGN_STACK \
1214 "subl $12, %%esp\n\t" \
1215 "pushl 20(%%eax)\n\t" \
1216 "pushl 16(%%eax)\n\t" \
1217 "pushl 12(%%eax)\n\t" \
1218 "pushl 8(%%eax)\n\t" \
1219 "pushl 4(%%eax)\n\t" \
1220 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1221 VALGRIND_CALL_NOREDIR_EAX \
1222 VALGRIND_RESTORE_STACK \
1223 : /*out*/ "=a" (_res) \
1224 : /*in*/ "a" (&_argvec[0]) \
1225 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1227 lval = (__typeof__(lval)) _res; \
1230 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1232 volatile OrigFn _orig = (orig); \
1233 volatile unsigned long _argvec[7]; \
1234 volatile unsigned long _res; \
1235 _argvec[0] = (unsigned long)_orig.nraddr; \
1236 _argvec[1] = (unsigned long)(arg1); \
1237 _argvec[2] = (unsigned long)(arg2); \
1238 _argvec[3] = (unsigned long)(arg3); \
1239 _argvec[4] = (unsigned long)(arg4); \
1240 _argvec[5] = (unsigned long)(arg5); \
1241 _argvec[6] = (unsigned long)(arg6); \
1243 VALGRIND_ALIGN_STACK \
1244 "subl $8, %%esp\n\t" \
1245 "pushl 24(%%eax)\n\t" \
1246 "pushl 20(%%eax)\n\t" \
1247 "pushl 16(%%eax)\n\t" \
1248 "pushl 12(%%eax)\n\t" \
1249 "pushl 8(%%eax)\n\t" \
1250 "pushl 4(%%eax)\n\t" \
1251 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1252 VALGRIND_CALL_NOREDIR_EAX \
1253 VALGRIND_RESTORE_STACK \
1254 : /*out*/ "=a" (_res) \
1255 : /*in*/ "a" (&_argvec[0]) \
1256 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1258 lval = (__typeof__(lval)) _res; \
1261 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1264 volatile OrigFn _orig = (orig); \
1265 volatile unsigned long _argvec[8]; \
1266 volatile unsigned long _res; \
1267 _argvec[0] = (unsigned long)_orig.nraddr; \
1268 _argvec[1] = (unsigned long)(arg1); \
1269 _argvec[2] = (unsigned long)(arg2); \
1270 _argvec[3] = (unsigned long)(arg3); \
1271 _argvec[4] = (unsigned long)(arg4); \
1272 _argvec[5] = (unsigned long)(arg5); \
1273 _argvec[6] = (unsigned long)(arg6); \
1274 _argvec[7] = (unsigned long)(arg7); \
1276 VALGRIND_ALIGN_STACK \
1277 "subl $4, %%esp\n\t" \
1278 "pushl 28(%%eax)\n\t" \
1279 "pushl 24(%%eax)\n\t" \
1280 "pushl 20(%%eax)\n\t" \
1281 "pushl 16(%%eax)\n\t" \
1282 "pushl 12(%%eax)\n\t" \
1283 "pushl 8(%%eax)\n\t" \
1284 "pushl 4(%%eax)\n\t" \
1285 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1286 VALGRIND_CALL_NOREDIR_EAX \
1287 VALGRIND_RESTORE_STACK \
1288 : /*out*/ "=a" (_res) \
1289 : /*in*/ "a" (&_argvec[0]) \
1290 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1292 lval = (__typeof__(lval)) _res; \
1295 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1298 volatile OrigFn _orig = (orig); \
1299 volatile unsigned long _argvec[9]; \
1300 volatile unsigned long _res; \
1301 _argvec[0] = (unsigned long)_orig.nraddr; \
1302 _argvec[1] = (unsigned long)(arg1); \
1303 _argvec[2] = (unsigned long)(arg2); \
1304 _argvec[3] = (unsigned long)(arg3); \
1305 _argvec[4] = (unsigned long)(arg4); \
1306 _argvec[5] = (unsigned long)(arg5); \
1307 _argvec[6] = (unsigned long)(arg6); \
1308 _argvec[7] = (unsigned long)(arg7); \
1309 _argvec[8] = (unsigned long)(arg8); \
1311 VALGRIND_ALIGN_STACK \
1312 "pushl 32(%%eax)\n\t" \
1313 "pushl 28(%%eax)\n\t" \
1314 "pushl 24(%%eax)\n\t" \
1315 "pushl 20(%%eax)\n\t" \
1316 "pushl 16(%%eax)\n\t" \
1317 "pushl 12(%%eax)\n\t" \
1318 "pushl 8(%%eax)\n\t" \
1319 "pushl 4(%%eax)\n\t" \
1320 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1321 VALGRIND_CALL_NOREDIR_EAX \
1322 VALGRIND_RESTORE_STACK \
1323 : /*out*/ "=a" (_res) \
1324 : /*in*/ "a" (&_argvec[0]) \
1325 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1327 lval = (__typeof__(lval)) _res; \
1330 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1333 volatile OrigFn _orig = (orig); \
1334 volatile unsigned long _argvec[10]; \
1335 volatile unsigned long _res; \
1336 _argvec[0] = (unsigned long)_orig.nraddr; \
1337 _argvec[1] = (unsigned long)(arg1); \
1338 _argvec[2] = (unsigned long)(arg2); \
1339 _argvec[3] = (unsigned long)(arg3); \
1340 _argvec[4] = (unsigned long)(arg4); \
1341 _argvec[5] = (unsigned long)(arg5); \
1342 _argvec[6] = (unsigned long)(arg6); \
1343 _argvec[7] = (unsigned long)(arg7); \
1344 _argvec[8] = (unsigned long)(arg8); \
1345 _argvec[9] = (unsigned long)(arg9); \
1347 VALGRIND_ALIGN_STACK \
1348 "subl $12, %%esp\n\t" \
1349 "pushl 36(%%eax)\n\t" \
1350 "pushl 32(%%eax)\n\t" \
1351 "pushl 28(%%eax)\n\t" \
1352 "pushl 24(%%eax)\n\t" \
1353 "pushl 20(%%eax)\n\t" \
1354 "pushl 16(%%eax)\n\t" \
1355 "pushl 12(%%eax)\n\t" \
1356 "pushl 8(%%eax)\n\t" \
1357 "pushl 4(%%eax)\n\t" \
1358 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1359 VALGRIND_CALL_NOREDIR_EAX \
1360 VALGRIND_RESTORE_STACK \
1361 : /*out*/ "=a" (_res) \
1362 : /*in*/ "a" (&_argvec[0]) \
1363 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1365 lval = (__typeof__(lval)) _res; \
1368 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1369 arg7,arg8,arg9,arg10) \
1371 volatile OrigFn _orig = (orig); \
1372 volatile unsigned long _argvec[11]; \
1373 volatile unsigned long _res; \
1374 _argvec[0] = (unsigned long)_orig.nraddr; \
1375 _argvec[1] = (unsigned long)(arg1); \
1376 _argvec[2] = (unsigned long)(arg2); \
1377 _argvec[3] = (unsigned long)(arg3); \
1378 _argvec[4] = (unsigned long)(arg4); \
1379 _argvec[5] = (unsigned long)(arg5); \
1380 _argvec[6] = (unsigned long)(arg6); \
1381 _argvec[7] = (unsigned long)(arg7); \
1382 _argvec[8] = (unsigned long)(arg8); \
1383 _argvec[9] = (unsigned long)(arg9); \
1384 _argvec[10] = (unsigned long)(arg10); \
1386 VALGRIND_ALIGN_STACK \
1387 "subl $8, %%esp\n\t" \
1388 "pushl 40(%%eax)\n\t" \
1389 "pushl 36(%%eax)\n\t" \
1390 "pushl 32(%%eax)\n\t" \
1391 "pushl 28(%%eax)\n\t" \
1392 "pushl 24(%%eax)\n\t" \
1393 "pushl 20(%%eax)\n\t" \
1394 "pushl 16(%%eax)\n\t" \
1395 "pushl 12(%%eax)\n\t" \
1396 "pushl 8(%%eax)\n\t" \
1397 "pushl 4(%%eax)\n\t" \
1398 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1399 VALGRIND_CALL_NOREDIR_EAX \
1400 VALGRIND_RESTORE_STACK \
1401 : /*out*/ "=a" (_res) \
1402 : /*in*/ "a" (&_argvec[0]) \
1403 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1405 lval = (__typeof__(lval)) _res; \
1408 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1409 arg6,arg7,arg8,arg9,arg10, \
1412 volatile OrigFn _orig = (orig); \
1413 volatile unsigned long _argvec[12]; \
1414 volatile unsigned long _res; \
1415 _argvec[0] = (unsigned long)_orig.nraddr; \
1416 _argvec[1] = (unsigned long)(arg1); \
1417 _argvec[2] = (unsigned long)(arg2); \
1418 _argvec[3] = (unsigned long)(arg3); \
1419 _argvec[4] = (unsigned long)(arg4); \
1420 _argvec[5] = (unsigned long)(arg5); \
1421 _argvec[6] = (unsigned long)(arg6); \
1422 _argvec[7] = (unsigned long)(arg7); \
1423 _argvec[8] = (unsigned long)(arg8); \
1424 _argvec[9] = (unsigned long)(arg9); \
1425 _argvec[10] = (unsigned long)(arg10); \
1426 _argvec[11] = (unsigned long)(arg11); \
1428 VALGRIND_ALIGN_STACK \
1429 "subl $4, %%esp\n\t" \
1430 "pushl 44(%%eax)\n\t" \
1431 "pushl 40(%%eax)\n\t" \
1432 "pushl 36(%%eax)\n\t" \
1433 "pushl 32(%%eax)\n\t" \
1434 "pushl 28(%%eax)\n\t" \
1435 "pushl 24(%%eax)\n\t" \
1436 "pushl 20(%%eax)\n\t" \
1437 "pushl 16(%%eax)\n\t" \
1438 "pushl 12(%%eax)\n\t" \
1439 "pushl 8(%%eax)\n\t" \
1440 "pushl 4(%%eax)\n\t" \
1441 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1442 VALGRIND_CALL_NOREDIR_EAX \
1443 VALGRIND_RESTORE_STACK \
1444 : /*out*/ "=a" (_res) \
1445 : /*in*/ "a" (&_argvec[0]) \
1446 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1448 lval = (__typeof__(lval)) _res; \
1451 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1452 arg6,arg7,arg8,arg9,arg10, \
1455 volatile OrigFn _orig = (orig); \
1456 volatile unsigned long _argvec[13]; \
1457 volatile unsigned long _res; \
1458 _argvec[0] = (unsigned long)_orig.nraddr; \
1459 _argvec[1] = (unsigned long)(arg1); \
1460 _argvec[2] = (unsigned long)(arg2); \
1461 _argvec[3] = (unsigned long)(arg3); \
1462 _argvec[4] = (unsigned long)(arg4); \
1463 _argvec[5] = (unsigned long)(arg5); \
1464 _argvec[6] = (unsigned long)(arg6); \
1465 _argvec[7] = (unsigned long)(arg7); \
1466 _argvec[8] = (unsigned long)(arg8); \
1467 _argvec[9] = (unsigned long)(arg9); \
1468 _argvec[10] = (unsigned long)(arg10); \
1469 _argvec[11] = (unsigned long)(arg11); \
1470 _argvec[12] = (unsigned long)(arg12); \
1472 VALGRIND_ALIGN_STACK \
1473 "pushl 48(%%eax)\n\t" \
1474 "pushl 44(%%eax)\n\t" \
1475 "pushl 40(%%eax)\n\t" \
1476 "pushl 36(%%eax)\n\t" \
1477 "pushl 32(%%eax)\n\t" \
1478 "pushl 28(%%eax)\n\t" \
1479 "pushl 24(%%eax)\n\t" \
1480 "pushl 20(%%eax)\n\t" \
1481 "pushl 16(%%eax)\n\t" \
1482 "pushl 12(%%eax)\n\t" \
1483 "pushl 8(%%eax)\n\t" \
1484 "pushl 4(%%eax)\n\t" \
1485 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1486 VALGRIND_CALL_NOREDIR_EAX \
1487 VALGRIND_RESTORE_STACK \
1488 : /*out*/ "=a" (_res) \
1489 : /*in*/ "a" (&_argvec[0]) \
1490 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1492 lval = (__typeof__(lval)) _res; \
1495 #endif /* PLAT_x86_linux || PLAT_x86_darwin */
1497 /* ------------------------ amd64-{linux,darwin} --------------- */
1499 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin)
1501 /* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1503 /* These regs are trashed by the hidden call. */
1504 #define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1505 "rdi", "r8", "r9", "r10", "r11"
1507 /* This is all pretty complex. It's so as to make stack unwinding
1508 work reliably. See bug 243270. The basic problem is the sub and
1509 add of 128 of %rsp in all of the following macros. If gcc believes
1510 the CFA is in %rsp, then unwinding may fail, because what's at the
1511 CFA is not what gcc "expected" when it constructs the CFIs for the
1512 places where the macros are instantiated.
1514 But we can't just add a CFI annotation to increase the CFA offset
1515 by 128, to match the sub of 128 from %rsp, because we don't know
1516 whether gcc has chosen %rsp as the CFA at that point, or whether it
1517 has chosen some other register (eg, %rbp). In the latter case,
1518 adding a CFI annotation to change the CFA offset is simply wrong.
1520 So the solution is to get hold of the CFA using
1521 __builtin_dwarf_cfa(), put it in a known register, and add a
1522 CFI annotation to say what the register is. We choose %rbp for
1523 this (perhaps perversely), because:
1525 (1) %rbp is already subject to unwinding. If a new register was
1526 chosen then the unwinder would have to unwind it in all stack
1527 traces, which is expensive, and
1529 (2) %rbp is already subject to precise exception updates in the
1530 JIT. If a new register was chosen, we'd have to have precise
1531 exceptions for it too, which reduces performance of the
1534 However .. one extra complication. We can't just whack the result
1535 of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1536 list of trashed registers at the end of the inline assembly
1537 fragments; gcc won't allow %rbp to appear in that list. Hence
1538 instead we need to stash %rbp in %r15 for the duration of the asm,
1539 and say that %r15 is trashed instead. gcc seems happy to go with
1542 Oh .. and this all needs to be conditionalised so that it is
1543 unchanged from before this commit, when compiled with older gccs
1544 that don't support __builtin_dwarf_cfa. Furthermore, since
1545 this header file is freestanding, it has to be independent of
1546 config.h, and so the following conditionalisation cannot depend on
1547 configure time checks.
1549 Although it's not clear from
1550 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1551 this expression excludes Darwin.
1552 .cfi directives in Darwin assembly appear to be completely
1553 different and I haven't investigated how they work.
1555 For even more entertainment value, note we have to use the
1556 completely undocumented __builtin_dwarf_cfa(), which appears to
1557 really compute the CFA, whereas __builtin_frame_address(0) claims
1558 to but actually doesn't. See
1559 https://bugs.kde.org/show_bug.cgi?id=243270#c47
1561 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1562 # define __FRAME_POINTER \
1563 ,"r"(__builtin_dwarf_cfa())
1564 # define VALGRIND_CFI_PROLOGUE \
1565 "movq %%rbp, %%r15\n\t" \
1566 "movq %2, %%rbp\n\t" \
1567 ".cfi_remember_state\n\t" \
1568 ".cfi_def_cfa rbp, 0\n\t"
1569 # define VALGRIND_CFI_EPILOGUE \
1570 "movq %%r15, %%rbp\n\t" \
1571 ".cfi_restore_state\n\t"
1573 # define __FRAME_POINTER
1574 # define VALGRIND_CFI_PROLOGUE
1575 # define VALGRIND_CFI_EPILOGUE
1578 /* Macros to save and align the stack before making a function
1579 call and restore it afterwards as gcc may not keep the stack
1580 pointer aligned if it doesn't realise calls are being made
1581 to other functions. */
1583 #define VALGRIND_ALIGN_STACK \
1584 "movq %%rsp,%%r14\n\t" \
1585 "andq $0xfffffffffffffff0,%%rsp\n\t"
1586 #define VALGRIND_RESTORE_STACK \
1587 "movq %%r14,%%rsp\n\t"
1589 /* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1592 /* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1593 macros. In order not to trash the stack redzone, we need to drop
1594 %rsp by 128 before the hidden call, and restore afterwards. The
1595 nastyness is that it is only by luck that the stack still appears
1596 to be unwindable during the hidden call - since then the behaviour
1597 of any routine using this macro does not match what the CFI data
1600 Why is this important? Imagine that a wrapper has a stack
1601 allocated local, and passes to the hidden call, a pointer to it.
1602 Because gcc does not know about the hidden call, it may allocate
1603 that local in the redzone. Unfortunately the hidden call may then
1604 trash it before it comes to use it. So we must step clear of the
1605 redzone, for the duration of the hidden call, to make it safe.
1607 Probably the same problem afflicts the other redzone-style ABIs too
1608 (ppc64-linux); but for those, the stack is
1609 self describing (none of this CFI nonsense) so at least messing
1610 with the stack pointer doesn't give a danger of non-unwindable
1613 #define CALL_FN_W_v(lval, orig) \
1615 volatile OrigFn _orig = (orig); \
1616 volatile unsigned long _argvec[1]; \
1617 volatile unsigned long _res; \
1618 _argvec[0] = (unsigned long)_orig.nraddr; \
1620 VALGRIND_CFI_PROLOGUE \
1621 VALGRIND_ALIGN_STACK \
1622 "subq $128,%%rsp\n\t" \
1623 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1624 VALGRIND_CALL_NOREDIR_RAX \
1625 VALGRIND_RESTORE_STACK \
1626 VALGRIND_CFI_EPILOGUE \
1627 : /*out*/ "=a" (_res) \
1628 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1629 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1631 lval = (__typeof__(lval)) _res; \
1634 #define CALL_FN_W_W(lval, orig, arg1) \
1636 volatile OrigFn _orig = (orig); \
1637 volatile unsigned long _argvec[2]; \
1638 volatile unsigned long _res; \
1639 _argvec[0] = (unsigned long)_orig.nraddr; \
1640 _argvec[1] = (unsigned long)(arg1); \
1642 VALGRIND_CFI_PROLOGUE \
1643 VALGRIND_ALIGN_STACK \
1644 "subq $128,%%rsp\n\t" \
1645 "movq 8(%%rax), %%rdi\n\t" \
1646 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1647 VALGRIND_CALL_NOREDIR_RAX \
1648 VALGRIND_RESTORE_STACK \
1649 VALGRIND_CFI_EPILOGUE \
1650 : /*out*/ "=a" (_res) \
1651 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1652 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1654 lval = (__typeof__(lval)) _res; \
1657 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1659 volatile OrigFn _orig = (orig); \
1660 volatile unsigned long _argvec[3]; \
1661 volatile unsigned long _res; \
1662 _argvec[0] = (unsigned long)_orig.nraddr; \
1663 _argvec[1] = (unsigned long)(arg1); \
1664 _argvec[2] = (unsigned long)(arg2); \
1666 VALGRIND_CFI_PROLOGUE \
1667 VALGRIND_ALIGN_STACK \
1668 "subq $128,%%rsp\n\t" \
1669 "movq 16(%%rax), %%rsi\n\t" \
1670 "movq 8(%%rax), %%rdi\n\t" \
1671 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1672 VALGRIND_CALL_NOREDIR_RAX \
1673 VALGRIND_RESTORE_STACK \
1674 VALGRIND_CFI_EPILOGUE \
1675 : /*out*/ "=a" (_res) \
1676 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1677 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1679 lval = (__typeof__(lval)) _res; \
1682 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1684 volatile OrigFn _orig = (orig); \
1685 volatile unsigned long _argvec[4]; \
1686 volatile unsigned long _res; \
1687 _argvec[0] = (unsigned long)_orig.nraddr; \
1688 _argvec[1] = (unsigned long)(arg1); \
1689 _argvec[2] = (unsigned long)(arg2); \
1690 _argvec[3] = (unsigned long)(arg3); \
1692 VALGRIND_CFI_PROLOGUE \
1693 VALGRIND_ALIGN_STACK \
1694 "subq $128,%%rsp\n\t" \
1695 "movq 24(%%rax), %%rdx\n\t" \
1696 "movq 16(%%rax), %%rsi\n\t" \
1697 "movq 8(%%rax), %%rdi\n\t" \
1698 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1699 VALGRIND_CALL_NOREDIR_RAX \
1700 VALGRIND_RESTORE_STACK \
1701 VALGRIND_CFI_EPILOGUE \
1702 : /*out*/ "=a" (_res) \
1703 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1704 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1706 lval = (__typeof__(lval)) _res; \
1709 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1711 volatile OrigFn _orig = (orig); \
1712 volatile unsigned long _argvec[5]; \
1713 volatile unsigned long _res; \
1714 _argvec[0] = (unsigned long)_orig.nraddr; \
1715 _argvec[1] = (unsigned long)(arg1); \
1716 _argvec[2] = (unsigned long)(arg2); \
1717 _argvec[3] = (unsigned long)(arg3); \
1718 _argvec[4] = (unsigned long)(arg4); \
1720 VALGRIND_CFI_PROLOGUE \
1721 VALGRIND_ALIGN_STACK \
1722 "subq $128,%%rsp\n\t" \
1723 "movq 32(%%rax), %%rcx\n\t" \
1724 "movq 24(%%rax), %%rdx\n\t" \
1725 "movq 16(%%rax), %%rsi\n\t" \
1726 "movq 8(%%rax), %%rdi\n\t" \
1727 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1728 VALGRIND_CALL_NOREDIR_RAX \
1729 VALGRIND_RESTORE_STACK \
1730 VALGRIND_CFI_EPILOGUE \
1731 : /*out*/ "=a" (_res) \
1732 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1733 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1735 lval = (__typeof__(lval)) _res; \
1738 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1740 volatile OrigFn _orig = (orig); \
1741 volatile unsigned long _argvec[6]; \
1742 volatile unsigned long _res; \
1743 _argvec[0] = (unsigned long)_orig.nraddr; \
1744 _argvec[1] = (unsigned long)(arg1); \
1745 _argvec[2] = (unsigned long)(arg2); \
1746 _argvec[3] = (unsigned long)(arg3); \
1747 _argvec[4] = (unsigned long)(arg4); \
1748 _argvec[5] = (unsigned long)(arg5); \
1750 VALGRIND_CFI_PROLOGUE \
1751 VALGRIND_ALIGN_STACK \
1752 "subq $128,%%rsp\n\t" \
1753 "movq 40(%%rax), %%r8\n\t" \
1754 "movq 32(%%rax), %%rcx\n\t" \
1755 "movq 24(%%rax), %%rdx\n\t" \
1756 "movq 16(%%rax), %%rsi\n\t" \
1757 "movq 8(%%rax), %%rdi\n\t" \
1758 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1759 VALGRIND_CALL_NOREDIR_RAX \
1760 VALGRIND_RESTORE_STACK \
1761 VALGRIND_CFI_EPILOGUE \
1762 : /*out*/ "=a" (_res) \
1763 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1764 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1766 lval = (__typeof__(lval)) _res; \
1769 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1771 volatile OrigFn _orig = (orig); \
1772 volatile unsigned long _argvec[7]; \
1773 volatile unsigned long _res; \
1774 _argvec[0] = (unsigned long)_orig.nraddr; \
1775 _argvec[1] = (unsigned long)(arg1); \
1776 _argvec[2] = (unsigned long)(arg2); \
1777 _argvec[3] = (unsigned long)(arg3); \
1778 _argvec[4] = (unsigned long)(arg4); \
1779 _argvec[5] = (unsigned long)(arg5); \
1780 _argvec[6] = (unsigned long)(arg6); \
1782 VALGRIND_CFI_PROLOGUE \
1783 VALGRIND_ALIGN_STACK \
1784 "subq $128,%%rsp\n\t" \
1785 "movq 48(%%rax), %%r9\n\t" \
1786 "movq 40(%%rax), %%r8\n\t" \
1787 "movq 32(%%rax), %%rcx\n\t" \
1788 "movq 24(%%rax), %%rdx\n\t" \
1789 "movq 16(%%rax), %%rsi\n\t" \
1790 "movq 8(%%rax), %%rdi\n\t" \
1791 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1792 VALGRIND_CALL_NOREDIR_RAX \
1793 VALGRIND_RESTORE_STACK \
1794 VALGRIND_CFI_EPILOGUE \
1795 : /*out*/ "=a" (_res) \
1796 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1797 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1799 lval = (__typeof__(lval)) _res; \
1802 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1805 volatile OrigFn _orig = (orig); \
1806 volatile unsigned long _argvec[8]; \
1807 volatile unsigned long _res; \
1808 _argvec[0] = (unsigned long)_orig.nraddr; \
1809 _argvec[1] = (unsigned long)(arg1); \
1810 _argvec[2] = (unsigned long)(arg2); \
1811 _argvec[3] = (unsigned long)(arg3); \
1812 _argvec[4] = (unsigned long)(arg4); \
1813 _argvec[5] = (unsigned long)(arg5); \
1814 _argvec[6] = (unsigned long)(arg6); \
1815 _argvec[7] = (unsigned long)(arg7); \
1817 VALGRIND_CFI_PROLOGUE \
1818 VALGRIND_ALIGN_STACK \
1819 "subq $136,%%rsp\n\t" \
1820 "pushq 56(%%rax)\n\t" \
1821 "movq 48(%%rax), %%r9\n\t" \
1822 "movq 40(%%rax), %%r8\n\t" \
1823 "movq 32(%%rax), %%rcx\n\t" \
1824 "movq 24(%%rax), %%rdx\n\t" \
1825 "movq 16(%%rax), %%rsi\n\t" \
1826 "movq 8(%%rax), %%rdi\n\t" \
1827 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1828 VALGRIND_CALL_NOREDIR_RAX \
1829 VALGRIND_RESTORE_STACK \
1830 VALGRIND_CFI_EPILOGUE \
1831 : /*out*/ "=a" (_res) \
1832 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1833 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1835 lval = (__typeof__(lval)) _res; \
1838 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1841 volatile OrigFn _orig = (orig); \
1842 volatile unsigned long _argvec[9]; \
1843 volatile unsigned long _res; \
1844 _argvec[0] = (unsigned long)_orig.nraddr; \
1845 _argvec[1] = (unsigned long)(arg1); \
1846 _argvec[2] = (unsigned long)(arg2); \
1847 _argvec[3] = (unsigned long)(arg3); \
1848 _argvec[4] = (unsigned long)(arg4); \
1849 _argvec[5] = (unsigned long)(arg5); \
1850 _argvec[6] = (unsigned long)(arg6); \
1851 _argvec[7] = (unsigned long)(arg7); \
1852 _argvec[8] = (unsigned long)(arg8); \
1854 VALGRIND_CFI_PROLOGUE \
1855 VALGRIND_ALIGN_STACK \
1856 "subq $128,%%rsp\n\t" \
1857 "pushq 64(%%rax)\n\t" \
1858 "pushq 56(%%rax)\n\t" \
1859 "movq 48(%%rax), %%r9\n\t" \
1860 "movq 40(%%rax), %%r8\n\t" \
1861 "movq 32(%%rax), %%rcx\n\t" \
1862 "movq 24(%%rax), %%rdx\n\t" \
1863 "movq 16(%%rax), %%rsi\n\t" \
1864 "movq 8(%%rax), %%rdi\n\t" \
1865 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1866 VALGRIND_CALL_NOREDIR_RAX \
1867 VALGRIND_RESTORE_STACK \
1868 VALGRIND_CFI_EPILOGUE \
1869 : /*out*/ "=a" (_res) \
1870 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1871 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1873 lval = (__typeof__(lval)) _res; \
1876 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1879 volatile OrigFn _orig = (orig); \
1880 volatile unsigned long _argvec[10]; \
1881 volatile unsigned long _res; \
1882 _argvec[0] = (unsigned long)_orig.nraddr; \
1883 _argvec[1] = (unsigned long)(arg1); \
1884 _argvec[2] = (unsigned long)(arg2); \
1885 _argvec[3] = (unsigned long)(arg3); \
1886 _argvec[4] = (unsigned long)(arg4); \
1887 _argvec[5] = (unsigned long)(arg5); \
1888 _argvec[6] = (unsigned long)(arg6); \
1889 _argvec[7] = (unsigned long)(arg7); \
1890 _argvec[8] = (unsigned long)(arg8); \
1891 _argvec[9] = (unsigned long)(arg9); \
1893 VALGRIND_CFI_PROLOGUE \
1894 VALGRIND_ALIGN_STACK \
1895 "subq $136,%%rsp\n\t" \
1896 "pushq 72(%%rax)\n\t" \
1897 "pushq 64(%%rax)\n\t" \
1898 "pushq 56(%%rax)\n\t" \
1899 "movq 48(%%rax), %%r9\n\t" \
1900 "movq 40(%%rax), %%r8\n\t" \
1901 "movq 32(%%rax), %%rcx\n\t" \
1902 "movq 24(%%rax), %%rdx\n\t" \
1903 "movq 16(%%rax), %%rsi\n\t" \
1904 "movq 8(%%rax), %%rdi\n\t" \
1905 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1906 VALGRIND_CALL_NOREDIR_RAX \
1907 VALGRIND_RESTORE_STACK \
1908 VALGRIND_CFI_EPILOGUE \
1909 : /*out*/ "=a" (_res) \
1910 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1911 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1913 lval = (__typeof__(lval)) _res; \
1916 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1917 arg7,arg8,arg9,arg10) \
1919 volatile OrigFn _orig = (orig); \
1920 volatile unsigned long _argvec[11]; \
1921 volatile unsigned long _res; \
1922 _argvec[0] = (unsigned long)_orig.nraddr; \
1923 _argvec[1] = (unsigned long)(arg1); \
1924 _argvec[2] = (unsigned long)(arg2); \
1925 _argvec[3] = (unsigned long)(arg3); \
1926 _argvec[4] = (unsigned long)(arg4); \
1927 _argvec[5] = (unsigned long)(arg5); \
1928 _argvec[6] = (unsigned long)(arg6); \
1929 _argvec[7] = (unsigned long)(arg7); \
1930 _argvec[8] = (unsigned long)(arg8); \
1931 _argvec[9] = (unsigned long)(arg9); \
1932 _argvec[10] = (unsigned long)(arg10); \
1934 VALGRIND_CFI_PROLOGUE \
1935 VALGRIND_ALIGN_STACK \
1936 "subq $128,%%rsp\n\t" \
1937 "pushq 80(%%rax)\n\t" \
1938 "pushq 72(%%rax)\n\t" \
1939 "pushq 64(%%rax)\n\t" \
1940 "pushq 56(%%rax)\n\t" \
1941 "movq 48(%%rax), %%r9\n\t" \
1942 "movq 40(%%rax), %%r8\n\t" \
1943 "movq 32(%%rax), %%rcx\n\t" \
1944 "movq 24(%%rax), %%rdx\n\t" \
1945 "movq 16(%%rax), %%rsi\n\t" \
1946 "movq 8(%%rax), %%rdi\n\t" \
1947 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1948 VALGRIND_CALL_NOREDIR_RAX \
1949 VALGRIND_RESTORE_STACK \
1950 VALGRIND_CFI_EPILOGUE \
1951 : /*out*/ "=a" (_res) \
1952 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1953 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1955 lval = (__typeof__(lval)) _res; \
1958 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1959 arg7,arg8,arg9,arg10,arg11) \
1961 volatile OrigFn _orig = (orig); \
1962 volatile unsigned long _argvec[12]; \
1963 volatile unsigned long _res; \
1964 _argvec[0] = (unsigned long)_orig.nraddr; \
1965 _argvec[1] = (unsigned long)(arg1); \
1966 _argvec[2] = (unsigned long)(arg2); \
1967 _argvec[3] = (unsigned long)(arg3); \
1968 _argvec[4] = (unsigned long)(arg4); \
1969 _argvec[5] = (unsigned long)(arg5); \
1970 _argvec[6] = (unsigned long)(arg6); \
1971 _argvec[7] = (unsigned long)(arg7); \
1972 _argvec[8] = (unsigned long)(arg8); \
1973 _argvec[9] = (unsigned long)(arg9); \
1974 _argvec[10] = (unsigned long)(arg10); \
1975 _argvec[11] = (unsigned long)(arg11); \
1977 VALGRIND_CFI_PROLOGUE \
1978 VALGRIND_ALIGN_STACK \
1979 "subq $136,%%rsp\n\t" \
1980 "pushq 88(%%rax)\n\t" \
1981 "pushq 80(%%rax)\n\t" \
1982 "pushq 72(%%rax)\n\t" \
1983 "pushq 64(%%rax)\n\t" \
1984 "pushq 56(%%rax)\n\t" \
1985 "movq 48(%%rax), %%r9\n\t" \
1986 "movq 40(%%rax), %%r8\n\t" \
1987 "movq 32(%%rax), %%rcx\n\t" \
1988 "movq 24(%%rax), %%rdx\n\t" \
1989 "movq 16(%%rax), %%rsi\n\t" \
1990 "movq 8(%%rax), %%rdi\n\t" \
1991 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1992 VALGRIND_CALL_NOREDIR_RAX \
1993 VALGRIND_RESTORE_STACK \
1994 VALGRIND_CFI_EPILOGUE \
1995 : /*out*/ "=a" (_res) \
1996 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1997 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1999 lval = (__typeof__(lval)) _res; \
2002 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2003 arg7,arg8,arg9,arg10,arg11,arg12) \
2005 volatile OrigFn _orig = (orig); \
2006 volatile unsigned long _argvec[13]; \
2007 volatile unsigned long _res; \
2008 _argvec[0] = (unsigned long)_orig.nraddr; \
2009 _argvec[1] = (unsigned long)(arg1); \
2010 _argvec[2] = (unsigned long)(arg2); \
2011 _argvec[3] = (unsigned long)(arg3); \
2012 _argvec[4] = (unsigned long)(arg4); \
2013 _argvec[5] = (unsigned long)(arg5); \
2014 _argvec[6] = (unsigned long)(arg6); \
2015 _argvec[7] = (unsigned long)(arg7); \
2016 _argvec[8] = (unsigned long)(arg8); \
2017 _argvec[9] = (unsigned long)(arg9); \
2018 _argvec[10] = (unsigned long)(arg10); \
2019 _argvec[11] = (unsigned long)(arg11); \
2020 _argvec[12] = (unsigned long)(arg12); \
2022 VALGRIND_CFI_PROLOGUE \
2023 VALGRIND_ALIGN_STACK \
2024 "subq $128,%%rsp\n\t" \
2025 "pushq 96(%%rax)\n\t" \
2026 "pushq 88(%%rax)\n\t" \
2027 "pushq 80(%%rax)\n\t" \
2028 "pushq 72(%%rax)\n\t" \
2029 "pushq 64(%%rax)\n\t" \
2030 "pushq 56(%%rax)\n\t" \
2031 "movq 48(%%rax), %%r9\n\t" \
2032 "movq 40(%%rax), %%r8\n\t" \
2033 "movq 32(%%rax), %%rcx\n\t" \
2034 "movq 24(%%rax), %%rdx\n\t" \
2035 "movq 16(%%rax), %%rsi\n\t" \
2036 "movq 8(%%rax), %%rdi\n\t" \
2037 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2038 VALGRIND_CALL_NOREDIR_RAX \
2039 VALGRIND_RESTORE_STACK \
2040 VALGRIND_CFI_EPILOGUE \
2041 : /*out*/ "=a" (_res) \
2042 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2043 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2045 lval = (__typeof__(lval)) _res; \
2048 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
2050 /* ------------------------ ppc32-linux ------------------------ */
2052 #if defined(PLAT_ppc32_linux)
2054 /* This is useful for finding out about the on-stack stuff:
2056 extern int f9 ( int,int,int,int,int,int,int,int,int );
2057 extern int f10 ( int,int,int,int,int,int,int,int,int,int );
2058 extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
2059 extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
2062 return f9(11,22,33,44,55,66,77,88,99);
2065 return f10(11,22,33,44,55,66,77,88,99,110);
2068 return f11(11,22,33,44,55,66,77,88,99,110,121);
2071 return f12(11,22,33,44,55,66,77,88,99,110,121,132);
2075 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2077 /* These regs are trashed by the hidden call. */
2078 #define __CALLER_SAVED_REGS \
2079 "lr", "ctr", "xer", \
2080 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2081 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2084 /* Macros to save and align the stack before making a function
2085 call and restore it afterwards as gcc may not keep the stack
2086 pointer aligned if it doesn't realise calls are being made
2087 to other functions. */
2089 #define VALGRIND_ALIGN_STACK \
2091 "rlwinm 1,1,0,0,27\n\t"
2092 #define VALGRIND_RESTORE_STACK \
2095 /* These CALL_FN_ macros assume that on ppc32-linux,
2096 sizeof(unsigned long) == 4. */
2098 #define CALL_FN_W_v(lval, orig) \
2100 volatile OrigFn _orig = (orig); \
2101 volatile unsigned long _argvec[1]; \
2102 volatile unsigned long _res; \
2103 _argvec[0] = (unsigned long)_orig.nraddr; \
2105 VALGRIND_ALIGN_STACK \
2107 "lwz 11,0(11)\n\t" /* target->r11 */ \
2108 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2109 VALGRIND_RESTORE_STACK \
2111 : /*out*/ "=r" (_res) \
2112 : /*in*/ "r" (&_argvec[0]) \
2113 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2115 lval = (__typeof__(lval)) _res; \
2118 #define CALL_FN_W_W(lval, orig, arg1) \
2120 volatile OrigFn _orig = (orig); \
2121 volatile unsigned long _argvec[2]; \
2122 volatile unsigned long _res; \
2123 _argvec[0] = (unsigned long)_orig.nraddr; \
2124 _argvec[1] = (unsigned long)arg1; \
2126 VALGRIND_ALIGN_STACK \
2128 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2129 "lwz 11,0(11)\n\t" /* target->r11 */ \
2130 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2131 VALGRIND_RESTORE_STACK \
2133 : /*out*/ "=r" (_res) \
2134 : /*in*/ "r" (&_argvec[0]) \
2135 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2137 lval = (__typeof__(lval)) _res; \
2140 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2142 volatile OrigFn _orig = (orig); \
2143 volatile unsigned long _argvec[3]; \
2144 volatile unsigned long _res; \
2145 _argvec[0] = (unsigned long)_orig.nraddr; \
2146 _argvec[1] = (unsigned long)arg1; \
2147 _argvec[2] = (unsigned long)arg2; \
2149 VALGRIND_ALIGN_STACK \
2151 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2153 "lwz 11,0(11)\n\t" /* target->r11 */ \
2154 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2155 VALGRIND_RESTORE_STACK \
2157 : /*out*/ "=r" (_res) \
2158 : /*in*/ "r" (&_argvec[0]) \
2159 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2161 lval = (__typeof__(lval)) _res; \
2164 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2166 volatile OrigFn _orig = (orig); \
2167 volatile unsigned long _argvec[4]; \
2168 volatile unsigned long _res; \
2169 _argvec[0] = (unsigned long)_orig.nraddr; \
2170 _argvec[1] = (unsigned long)arg1; \
2171 _argvec[2] = (unsigned long)arg2; \
2172 _argvec[3] = (unsigned long)arg3; \
2174 VALGRIND_ALIGN_STACK \
2176 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2178 "lwz 5,12(11)\n\t" \
2179 "lwz 11,0(11)\n\t" /* target->r11 */ \
2180 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2181 VALGRIND_RESTORE_STACK \
2183 : /*out*/ "=r" (_res) \
2184 : /*in*/ "r" (&_argvec[0]) \
2185 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2187 lval = (__typeof__(lval)) _res; \
2190 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2192 volatile OrigFn _orig = (orig); \
2193 volatile unsigned long _argvec[5]; \
2194 volatile unsigned long _res; \
2195 _argvec[0] = (unsigned long)_orig.nraddr; \
2196 _argvec[1] = (unsigned long)arg1; \
2197 _argvec[2] = (unsigned long)arg2; \
2198 _argvec[3] = (unsigned long)arg3; \
2199 _argvec[4] = (unsigned long)arg4; \
2201 VALGRIND_ALIGN_STACK \
2203 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2205 "lwz 5,12(11)\n\t" \
2206 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2207 "lwz 11,0(11)\n\t" /* target->r11 */ \
2208 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2209 VALGRIND_RESTORE_STACK \
2211 : /*out*/ "=r" (_res) \
2212 : /*in*/ "r" (&_argvec[0]) \
2213 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2215 lval = (__typeof__(lval)) _res; \
2218 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2220 volatile OrigFn _orig = (orig); \
2221 volatile unsigned long _argvec[6]; \
2222 volatile unsigned long _res; \
2223 _argvec[0] = (unsigned long)_orig.nraddr; \
2224 _argvec[1] = (unsigned long)arg1; \
2225 _argvec[2] = (unsigned long)arg2; \
2226 _argvec[3] = (unsigned long)arg3; \
2227 _argvec[4] = (unsigned long)arg4; \
2228 _argvec[5] = (unsigned long)arg5; \
2230 VALGRIND_ALIGN_STACK \
2232 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2234 "lwz 5,12(11)\n\t" \
2235 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2236 "lwz 7,20(11)\n\t" \
2237 "lwz 11,0(11)\n\t" /* target->r11 */ \
2238 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2239 VALGRIND_RESTORE_STACK \
2241 : /*out*/ "=r" (_res) \
2242 : /*in*/ "r" (&_argvec[0]) \
2243 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2245 lval = (__typeof__(lval)) _res; \
2248 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2250 volatile OrigFn _orig = (orig); \
2251 volatile unsigned long _argvec[7]; \
2252 volatile unsigned long _res; \
2253 _argvec[0] = (unsigned long)_orig.nraddr; \
2254 _argvec[1] = (unsigned long)arg1; \
2255 _argvec[2] = (unsigned long)arg2; \
2256 _argvec[3] = (unsigned long)arg3; \
2257 _argvec[4] = (unsigned long)arg4; \
2258 _argvec[5] = (unsigned long)arg5; \
2259 _argvec[6] = (unsigned long)arg6; \
2261 VALGRIND_ALIGN_STACK \
2263 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2265 "lwz 5,12(11)\n\t" \
2266 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2267 "lwz 7,20(11)\n\t" \
2268 "lwz 8,24(11)\n\t" \
2269 "lwz 11,0(11)\n\t" /* target->r11 */ \
2270 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2271 VALGRIND_RESTORE_STACK \
2273 : /*out*/ "=r" (_res) \
2274 : /*in*/ "r" (&_argvec[0]) \
2275 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2277 lval = (__typeof__(lval)) _res; \
2280 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2283 volatile OrigFn _orig = (orig); \
2284 volatile unsigned long _argvec[8]; \
2285 volatile unsigned long _res; \
2286 _argvec[0] = (unsigned long)_orig.nraddr; \
2287 _argvec[1] = (unsigned long)arg1; \
2288 _argvec[2] = (unsigned long)arg2; \
2289 _argvec[3] = (unsigned long)arg3; \
2290 _argvec[4] = (unsigned long)arg4; \
2291 _argvec[5] = (unsigned long)arg5; \
2292 _argvec[6] = (unsigned long)arg6; \
2293 _argvec[7] = (unsigned long)arg7; \
2295 VALGRIND_ALIGN_STACK \
2297 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2299 "lwz 5,12(11)\n\t" \
2300 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2301 "lwz 7,20(11)\n\t" \
2302 "lwz 8,24(11)\n\t" \
2303 "lwz 9,28(11)\n\t" \
2304 "lwz 11,0(11)\n\t" /* target->r11 */ \
2305 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2306 VALGRIND_RESTORE_STACK \
2308 : /*out*/ "=r" (_res) \
2309 : /*in*/ "r" (&_argvec[0]) \
2310 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2312 lval = (__typeof__(lval)) _res; \
2315 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2318 volatile OrigFn _orig = (orig); \
2319 volatile unsigned long _argvec[9]; \
2320 volatile unsigned long _res; \
2321 _argvec[0] = (unsigned long)_orig.nraddr; \
2322 _argvec[1] = (unsigned long)arg1; \
2323 _argvec[2] = (unsigned long)arg2; \
2324 _argvec[3] = (unsigned long)arg3; \
2325 _argvec[4] = (unsigned long)arg4; \
2326 _argvec[5] = (unsigned long)arg5; \
2327 _argvec[6] = (unsigned long)arg6; \
2328 _argvec[7] = (unsigned long)arg7; \
2329 _argvec[8] = (unsigned long)arg8; \
2331 VALGRIND_ALIGN_STACK \
2333 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2335 "lwz 5,12(11)\n\t" \
2336 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2337 "lwz 7,20(11)\n\t" \
2338 "lwz 8,24(11)\n\t" \
2339 "lwz 9,28(11)\n\t" \
2340 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2341 "lwz 11,0(11)\n\t" /* target->r11 */ \
2342 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2343 VALGRIND_RESTORE_STACK \
2345 : /*out*/ "=r" (_res) \
2346 : /*in*/ "r" (&_argvec[0]) \
2347 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2349 lval = (__typeof__(lval)) _res; \
2352 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2355 volatile OrigFn _orig = (orig); \
2356 volatile unsigned long _argvec[10]; \
2357 volatile unsigned long _res; \
2358 _argvec[0] = (unsigned long)_orig.nraddr; \
2359 _argvec[1] = (unsigned long)arg1; \
2360 _argvec[2] = (unsigned long)arg2; \
2361 _argvec[3] = (unsigned long)arg3; \
2362 _argvec[4] = (unsigned long)arg4; \
2363 _argvec[5] = (unsigned long)arg5; \
2364 _argvec[6] = (unsigned long)arg6; \
2365 _argvec[7] = (unsigned long)arg7; \
2366 _argvec[8] = (unsigned long)arg8; \
2367 _argvec[9] = (unsigned long)arg9; \
2369 VALGRIND_ALIGN_STACK \
2371 "addi 1,1,-16\n\t" \
2373 "lwz 3,36(11)\n\t" \
2376 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2378 "lwz 5,12(11)\n\t" \
2379 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2380 "lwz 7,20(11)\n\t" \
2381 "lwz 8,24(11)\n\t" \
2382 "lwz 9,28(11)\n\t" \
2383 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2384 "lwz 11,0(11)\n\t" /* target->r11 */ \
2385 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2386 VALGRIND_RESTORE_STACK \
2388 : /*out*/ "=r" (_res) \
2389 : /*in*/ "r" (&_argvec[0]) \
2390 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2392 lval = (__typeof__(lval)) _res; \
2395 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2396 arg7,arg8,arg9,arg10) \
2398 volatile OrigFn _orig = (orig); \
2399 volatile unsigned long _argvec[11]; \
2400 volatile unsigned long _res; \
2401 _argvec[0] = (unsigned long)_orig.nraddr; \
2402 _argvec[1] = (unsigned long)arg1; \
2403 _argvec[2] = (unsigned long)arg2; \
2404 _argvec[3] = (unsigned long)arg3; \
2405 _argvec[4] = (unsigned long)arg4; \
2406 _argvec[5] = (unsigned long)arg5; \
2407 _argvec[6] = (unsigned long)arg6; \
2408 _argvec[7] = (unsigned long)arg7; \
2409 _argvec[8] = (unsigned long)arg8; \
2410 _argvec[9] = (unsigned long)arg9; \
2411 _argvec[10] = (unsigned long)arg10; \
2413 VALGRIND_ALIGN_STACK \
2415 "addi 1,1,-16\n\t" \
2417 "lwz 3,40(11)\n\t" \
2420 "lwz 3,36(11)\n\t" \
2423 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2425 "lwz 5,12(11)\n\t" \
2426 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2427 "lwz 7,20(11)\n\t" \
2428 "lwz 8,24(11)\n\t" \
2429 "lwz 9,28(11)\n\t" \
2430 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2431 "lwz 11,0(11)\n\t" /* target->r11 */ \
2432 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2433 VALGRIND_RESTORE_STACK \
2435 : /*out*/ "=r" (_res) \
2436 : /*in*/ "r" (&_argvec[0]) \
2437 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2439 lval = (__typeof__(lval)) _res; \
2442 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2443 arg7,arg8,arg9,arg10,arg11) \
2445 volatile OrigFn _orig = (orig); \
2446 volatile unsigned long _argvec[12]; \
2447 volatile unsigned long _res; \
2448 _argvec[0] = (unsigned long)_orig.nraddr; \
2449 _argvec[1] = (unsigned long)arg1; \
2450 _argvec[2] = (unsigned long)arg2; \
2451 _argvec[3] = (unsigned long)arg3; \
2452 _argvec[4] = (unsigned long)arg4; \
2453 _argvec[5] = (unsigned long)arg5; \
2454 _argvec[6] = (unsigned long)arg6; \
2455 _argvec[7] = (unsigned long)arg7; \
2456 _argvec[8] = (unsigned long)arg8; \
2457 _argvec[9] = (unsigned long)arg9; \
2458 _argvec[10] = (unsigned long)arg10; \
2459 _argvec[11] = (unsigned long)arg11; \
2461 VALGRIND_ALIGN_STACK \
2463 "addi 1,1,-32\n\t" \
2465 "lwz 3,44(11)\n\t" \
2468 "lwz 3,40(11)\n\t" \
2471 "lwz 3,36(11)\n\t" \
2474 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2476 "lwz 5,12(11)\n\t" \
2477 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2478 "lwz 7,20(11)\n\t" \
2479 "lwz 8,24(11)\n\t" \
2480 "lwz 9,28(11)\n\t" \
2481 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2482 "lwz 11,0(11)\n\t" /* target->r11 */ \
2483 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2484 VALGRIND_RESTORE_STACK \
2486 : /*out*/ "=r" (_res) \
2487 : /*in*/ "r" (&_argvec[0]) \
2488 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2490 lval = (__typeof__(lval)) _res; \
2493 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2494 arg7,arg8,arg9,arg10,arg11,arg12) \
2496 volatile OrigFn _orig = (orig); \
2497 volatile unsigned long _argvec[13]; \
2498 volatile unsigned long _res; \
2499 _argvec[0] = (unsigned long)_orig.nraddr; \
2500 _argvec[1] = (unsigned long)arg1; \
2501 _argvec[2] = (unsigned long)arg2; \
2502 _argvec[3] = (unsigned long)arg3; \
2503 _argvec[4] = (unsigned long)arg4; \
2504 _argvec[5] = (unsigned long)arg5; \
2505 _argvec[6] = (unsigned long)arg6; \
2506 _argvec[7] = (unsigned long)arg7; \
2507 _argvec[8] = (unsigned long)arg8; \
2508 _argvec[9] = (unsigned long)arg9; \
2509 _argvec[10] = (unsigned long)arg10; \
2510 _argvec[11] = (unsigned long)arg11; \
2511 _argvec[12] = (unsigned long)arg12; \
2513 VALGRIND_ALIGN_STACK \
2515 "addi 1,1,-32\n\t" \
2517 "lwz 3,48(11)\n\t" \
2520 "lwz 3,44(11)\n\t" \
2523 "lwz 3,40(11)\n\t" \
2526 "lwz 3,36(11)\n\t" \
2529 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2531 "lwz 5,12(11)\n\t" \
2532 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2533 "lwz 7,20(11)\n\t" \
2534 "lwz 8,24(11)\n\t" \
2535 "lwz 9,28(11)\n\t" \
2536 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2537 "lwz 11,0(11)\n\t" /* target->r11 */ \
2538 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2539 VALGRIND_RESTORE_STACK \
2541 : /*out*/ "=r" (_res) \
2542 : /*in*/ "r" (&_argvec[0]) \
2543 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2545 lval = (__typeof__(lval)) _res; \
2548 #endif /* PLAT_ppc32_linux */
2550 /* ------------------------ ppc64-linux ------------------------ */
2552 #if defined(PLAT_ppc64_linux)
2554 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2556 /* These regs are trashed by the hidden call. */
2557 #define __CALLER_SAVED_REGS \
2558 "lr", "ctr", "xer", \
2559 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2560 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2563 /* Macros to save and align the stack before making a function
2564 call and restore it afterwards as gcc may not keep the stack
2565 pointer aligned if it doesn't realise calls are being made
2566 to other functions. */
2568 #define VALGRIND_ALIGN_STACK \
2570 "rldicr 1,1,0,59\n\t"
2571 #define VALGRIND_RESTORE_STACK \
2574 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2577 #define CALL_FN_W_v(lval, orig) \
2579 volatile OrigFn _orig = (orig); \
2580 volatile unsigned long _argvec[3+0]; \
2581 volatile unsigned long _res; \
2582 /* _argvec[0] holds current r2 across the call */ \
2583 _argvec[1] = (unsigned long)_orig.r2; \
2584 _argvec[2] = (unsigned long)_orig.nraddr; \
2586 VALGRIND_ALIGN_STACK \
2588 "std 2,-16(11)\n\t" /* save tocptr */ \
2589 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2590 "ld 11, 0(11)\n\t" /* target->r11 */ \
2591 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2594 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2595 VALGRIND_RESTORE_STACK \
2596 : /*out*/ "=r" (_res) \
2597 : /*in*/ "r" (&_argvec[2]) \
2598 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2600 lval = (__typeof__(lval)) _res; \
2603 #define CALL_FN_W_W(lval, orig, arg1) \
2605 volatile OrigFn _orig = (orig); \
2606 volatile unsigned long _argvec[3+1]; \
2607 volatile unsigned long _res; \
2608 /* _argvec[0] holds current r2 across the call */ \
2609 _argvec[1] = (unsigned long)_orig.r2; \
2610 _argvec[2] = (unsigned long)_orig.nraddr; \
2611 _argvec[2+1] = (unsigned long)arg1; \
2613 VALGRIND_ALIGN_STACK \
2615 "std 2,-16(11)\n\t" /* save tocptr */ \
2616 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2617 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2618 "ld 11, 0(11)\n\t" /* target->r11 */ \
2619 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2622 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2623 VALGRIND_RESTORE_STACK \
2624 : /*out*/ "=r" (_res) \
2625 : /*in*/ "r" (&_argvec[2]) \
2626 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2628 lval = (__typeof__(lval)) _res; \
2631 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2633 volatile OrigFn _orig = (orig); \
2634 volatile unsigned long _argvec[3+2]; \
2635 volatile unsigned long _res; \
2636 /* _argvec[0] holds current r2 across the call */ \
2637 _argvec[1] = (unsigned long)_orig.r2; \
2638 _argvec[2] = (unsigned long)_orig.nraddr; \
2639 _argvec[2+1] = (unsigned long)arg1; \
2640 _argvec[2+2] = (unsigned long)arg2; \
2642 VALGRIND_ALIGN_STACK \
2644 "std 2,-16(11)\n\t" /* save tocptr */ \
2645 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2646 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2647 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2648 "ld 11, 0(11)\n\t" /* target->r11 */ \
2649 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2652 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2653 VALGRIND_RESTORE_STACK \
2654 : /*out*/ "=r" (_res) \
2655 : /*in*/ "r" (&_argvec[2]) \
2656 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2658 lval = (__typeof__(lval)) _res; \
2661 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2663 volatile OrigFn _orig = (orig); \
2664 volatile unsigned long _argvec[3+3]; \
2665 volatile unsigned long _res; \
2666 /* _argvec[0] holds current r2 across the call */ \
2667 _argvec[1] = (unsigned long)_orig.r2; \
2668 _argvec[2] = (unsigned long)_orig.nraddr; \
2669 _argvec[2+1] = (unsigned long)arg1; \
2670 _argvec[2+2] = (unsigned long)arg2; \
2671 _argvec[2+3] = (unsigned long)arg3; \
2673 VALGRIND_ALIGN_STACK \
2675 "std 2,-16(11)\n\t" /* save tocptr */ \
2676 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2677 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2678 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2679 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2680 "ld 11, 0(11)\n\t" /* target->r11 */ \
2681 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2684 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2685 VALGRIND_RESTORE_STACK \
2686 : /*out*/ "=r" (_res) \
2687 : /*in*/ "r" (&_argvec[2]) \
2688 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2690 lval = (__typeof__(lval)) _res; \
2693 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2695 volatile OrigFn _orig = (orig); \
2696 volatile unsigned long _argvec[3+4]; \
2697 volatile unsigned long _res; \
2698 /* _argvec[0] holds current r2 across the call */ \
2699 _argvec[1] = (unsigned long)_orig.r2; \
2700 _argvec[2] = (unsigned long)_orig.nraddr; \
2701 _argvec[2+1] = (unsigned long)arg1; \
2702 _argvec[2+2] = (unsigned long)arg2; \
2703 _argvec[2+3] = (unsigned long)arg3; \
2704 _argvec[2+4] = (unsigned long)arg4; \
2706 VALGRIND_ALIGN_STACK \
2708 "std 2,-16(11)\n\t" /* save tocptr */ \
2709 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2710 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2711 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2712 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2713 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2714 "ld 11, 0(11)\n\t" /* target->r11 */ \
2715 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2718 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2719 VALGRIND_RESTORE_STACK \
2720 : /*out*/ "=r" (_res) \
2721 : /*in*/ "r" (&_argvec[2]) \
2722 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2724 lval = (__typeof__(lval)) _res; \
2727 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2729 volatile OrigFn _orig = (orig); \
2730 volatile unsigned long _argvec[3+5]; \
2731 volatile unsigned long _res; \
2732 /* _argvec[0] holds current r2 across the call */ \
2733 _argvec[1] = (unsigned long)_orig.r2; \
2734 _argvec[2] = (unsigned long)_orig.nraddr; \
2735 _argvec[2+1] = (unsigned long)arg1; \
2736 _argvec[2+2] = (unsigned long)arg2; \
2737 _argvec[2+3] = (unsigned long)arg3; \
2738 _argvec[2+4] = (unsigned long)arg4; \
2739 _argvec[2+5] = (unsigned long)arg5; \
2741 VALGRIND_ALIGN_STACK \
2743 "std 2,-16(11)\n\t" /* save tocptr */ \
2744 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2745 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2746 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2747 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2748 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2749 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2750 "ld 11, 0(11)\n\t" /* target->r11 */ \
2751 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2754 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2755 VALGRIND_RESTORE_STACK \
2756 : /*out*/ "=r" (_res) \
2757 : /*in*/ "r" (&_argvec[2]) \
2758 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2760 lval = (__typeof__(lval)) _res; \
2763 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2765 volatile OrigFn _orig = (orig); \
2766 volatile unsigned long _argvec[3+6]; \
2767 volatile unsigned long _res; \
2768 /* _argvec[0] holds current r2 across the call */ \
2769 _argvec[1] = (unsigned long)_orig.r2; \
2770 _argvec[2] = (unsigned long)_orig.nraddr; \
2771 _argvec[2+1] = (unsigned long)arg1; \
2772 _argvec[2+2] = (unsigned long)arg2; \
2773 _argvec[2+3] = (unsigned long)arg3; \
2774 _argvec[2+4] = (unsigned long)arg4; \
2775 _argvec[2+5] = (unsigned long)arg5; \
2776 _argvec[2+6] = (unsigned long)arg6; \
2778 VALGRIND_ALIGN_STACK \
2780 "std 2,-16(11)\n\t" /* save tocptr */ \
2781 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2782 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2783 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2784 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2785 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2786 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2787 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2788 "ld 11, 0(11)\n\t" /* target->r11 */ \
2789 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2792 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2793 VALGRIND_RESTORE_STACK \
2794 : /*out*/ "=r" (_res) \
2795 : /*in*/ "r" (&_argvec[2]) \
2796 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2798 lval = (__typeof__(lval)) _res; \
2801 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2804 volatile OrigFn _orig = (orig); \
2805 volatile unsigned long _argvec[3+7]; \
2806 volatile unsigned long _res; \
2807 /* _argvec[0] holds current r2 across the call */ \
2808 _argvec[1] = (unsigned long)_orig.r2; \
2809 _argvec[2] = (unsigned long)_orig.nraddr; \
2810 _argvec[2+1] = (unsigned long)arg1; \
2811 _argvec[2+2] = (unsigned long)arg2; \
2812 _argvec[2+3] = (unsigned long)arg3; \
2813 _argvec[2+4] = (unsigned long)arg4; \
2814 _argvec[2+5] = (unsigned long)arg5; \
2815 _argvec[2+6] = (unsigned long)arg6; \
2816 _argvec[2+7] = (unsigned long)arg7; \
2818 VALGRIND_ALIGN_STACK \
2820 "std 2,-16(11)\n\t" /* save tocptr */ \
2821 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2822 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2823 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2824 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2825 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2826 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2827 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2828 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2829 "ld 11, 0(11)\n\t" /* target->r11 */ \
2830 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2833 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2834 VALGRIND_RESTORE_STACK \
2835 : /*out*/ "=r" (_res) \
2836 : /*in*/ "r" (&_argvec[2]) \
2837 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2839 lval = (__typeof__(lval)) _res; \
2842 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2845 volatile OrigFn _orig = (orig); \
2846 volatile unsigned long _argvec[3+8]; \
2847 volatile unsigned long _res; \
2848 /* _argvec[0] holds current r2 across the call */ \
2849 _argvec[1] = (unsigned long)_orig.r2; \
2850 _argvec[2] = (unsigned long)_orig.nraddr; \
2851 _argvec[2+1] = (unsigned long)arg1; \
2852 _argvec[2+2] = (unsigned long)arg2; \
2853 _argvec[2+3] = (unsigned long)arg3; \
2854 _argvec[2+4] = (unsigned long)arg4; \
2855 _argvec[2+5] = (unsigned long)arg5; \
2856 _argvec[2+6] = (unsigned long)arg6; \
2857 _argvec[2+7] = (unsigned long)arg7; \
2858 _argvec[2+8] = (unsigned long)arg8; \
2860 VALGRIND_ALIGN_STACK \
2862 "std 2,-16(11)\n\t" /* save tocptr */ \
2863 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2864 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2865 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2866 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2867 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2868 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2869 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2870 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2871 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2872 "ld 11, 0(11)\n\t" /* target->r11 */ \
2873 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2876 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2877 VALGRIND_RESTORE_STACK \
2878 : /*out*/ "=r" (_res) \
2879 : /*in*/ "r" (&_argvec[2]) \
2880 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2882 lval = (__typeof__(lval)) _res; \
2885 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2888 volatile OrigFn _orig = (orig); \
2889 volatile unsigned long _argvec[3+9]; \
2890 volatile unsigned long _res; \
2891 /* _argvec[0] holds current r2 across the call */ \
2892 _argvec[1] = (unsigned long)_orig.r2; \
2893 _argvec[2] = (unsigned long)_orig.nraddr; \
2894 _argvec[2+1] = (unsigned long)arg1; \
2895 _argvec[2+2] = (unsigned long)arg2; \
2896 _argvec[2+3] = (unsigned long)arg3; \
2897 _argvec[2+4] = (unsigned long)arg4; \
2898 _argvec[2+5] = (unsigned long)arg5; \
2899 _argvec[2+6] = (unsigned long)arg6; \
2900 _argvec[2+7] = (unsigned long)arg7; \
2901 _argvec[2+8] = (unsigned long)arg8; \
2902 _argvec[2+9] = (unsigned long)arg9; \
2904 VALGRIND_ALIGN_STACK \
2906 "std 2,-16(11)\n\t" /* save tocptr */ \
2907 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2908 "addi 1,1,-128\n\t" /* expand stack frame */ \
2911 "std 3,112(1)\n\t" \
2913 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2914 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2915 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2916 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2917 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2918 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2919 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2920 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2921 "ld 11, 0(11)\n\t" /* target->r11 */ \
2922 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2925 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2926 VALGRIND_RESTORE_STACK \
2927 : /*out*/ "=r" (_res) \
2928 : /*in*/ "r" (&_argvec[2]) \
2929 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2931 lval = (__typeof__(lval)) _res; \
2934 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2935 arg7,arg8,arg9,arg10) \
2937 volatile OrigFn _orig = (orig); \
2938 volatile unsigned long _argvec[3+10]; \
2939 volatile unsigned long _res; \
2940 /* _argvec[0] holds current r2 across the call */ \
2941 _argvec[1] = (unsigned long)_orig.r2; \
2942 _argvec[2] = (unsigned long)_orig.nraddr; \
2943 _argvec[2+1] = (unsigned long)arg1; \
2944 _argvec[2+2] = (unsigned long)arg2; \
2945 _argvec[2+3] = (unsigned long)arg3; \
2946 _argvec[2+4] = (unsigned long)arg4; \
2947 _argvec[2+5] = (unsigned long)arg5; \
2948 _argvec[2+6] = (unsigned long)arg6; \
2949 _argvec[2+7] = (unsigned long)arg7; \
2950 _argvec[2+8] = (unsigned long)arg8; \
2951 _argvec[2+9] = (unsigned long)arg9; \
2952 _argvec[2+10] = (unsigned long)arg10; \
2954 VALGRIND_ALIGN_STACK \
2956 "std 2,-16(11)\n\t" /* save tocptr */ \
2957 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2958 "addi 1,1,-128\n\t" /* expand stack frame */ \
2961 "std 3,120(1)\n\t" \
2964 "std 3,112(1)\n\t" \
2966 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2967 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2968 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2969 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2970 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2971 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2972 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2973 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2974 "ld 11, 0(11)\n\t" /* target->r11 */ \
2975 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2978 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2979 VALGRIND_RESTORE_STACK \
2980 : /*out*/ "=r" (_res) \
2981 : /*in*/ "r" (&_argvec[2]) \
2982 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2984 lval = (__typeof__(lval)) _res; \
2987 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2988 arg7,arg8,arg9,arg10,arg11) \
2990 volatile OrigFn _orig = (orig); \
2991 volatile unsigned long _argvec[3+11]; \
2992 volatile unsigned long _res; \
2993 /* _argvec[0] holds current r2 across the call */ \
2994 _argvec[1] = (unsigned long)_orig.r2; \
2995 _argvec[2] = (unsigned long)_orig.nraddr; \
2996 _argvec[2+1] = (unsigned long)arg1; \
2997 _argvec[2+2] = (unsigned long)arg2; \
2998 _argvec[2+3] = (unsigned long)arg3; \
2999 _argvec[2+4] = (unsigned long)arg4; \
3000 _argvec[2+5] = (unsigned long)arg5; \
3001 _argvec[2+6] = (unsigned long)arg6; \
3002 _argvec[2+7] = (unsigned long)arg7; \
3003 _argvec[2+8] = (unsigned long)arg8; \
3004 _argvec[2+9] = (unsigned long)arg9; \
3005 _argvec[2+10] = (unsigned long)arg10; \
3006 _argvec[2+11] = (unsigned long)arg11; \
3008 VALGRIND_ALIGN_STACK \
3010 "std 2,-16(11)\n\t" /* save tocptr */ \
3011 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3012 "addi 1,1,-144\n\t" /* expand stack frame */ \
3015 "std 3,128(1)\n\t" \
3018 "std 3,120(1)\n\t" \
3021 "std 3,112(1)\n\t" \
3023 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3024 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3025 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3026 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3027 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3028 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3029 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3030 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3031 "ld 11, 0(11)\n\t" /* target->r11 */ \
3032 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3035 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3036 VALGRIND_RESTORE_STACK \
3037 : /*out*/ "=r" (_res) \
3038 : /*in*/ "r" (&_argvec[2]) \
3039 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3041 lval = (__typeof__(lval)) _res; \
3044 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3045 arg7,arg8,arg9,arg10,arg11,arg12) \
3047 volatile OrigFn _orig = (orig); \
3048 volatile unsigned long _argvec[3+12]; \
3049 volatile unsigned long _res; \
3050 /* _argvec[0] holds current r2 across the call */ \
3051 _argvec[1] = (unsigned long)_orig.r2; \
3052 _argvec[2] = (unsigned long)_orig.nraddr; \
3053 _argvec[2+1] = (unsigned long)arg1; \
3054 _argvec[2+2] = (unsigned long)arg2; \
3055 _argvec[2+3] = (unsigned long)arg3; \
3056 _argvec[2+4] = (unsigned long)arg4; \
3057 _argvec[2+5] = (unsigned long)arg5; \
3058 _argvec[2+6] = (unsigned long)arg6; \
3059 _argvec[2+7] = (unsigned long)arg7; \
3060 _argvec[2+8] = (unsigned long)arg8; \
3061 _argvec[2+9] = (unsigned long)arg9; \
3062 _argvec[2+10] = (unsigned long)arg10; \
3063 _argvec[2+11] = (unsigned long)arg11; \
3064 _argvec[2+12] = (unsigned long)arg12; \
3066 VALGRIND_ALIGN_STACK \
3068 "std 2,-16(11)\n\t" /* save tocptr */ \
3069 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3070 "addi 1,1,-144\n\t" /* expand stack frame */ \
3073 "std 3,136(1)\n\t" \
3076 "std 3,128(1)\n\t" \
3079 "std 3,120(1)\n\t" \
3082 "std 3,112(1)\n\t" \
3084 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3085 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3086 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3087 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3088 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3089 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3090 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3091 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3092 "ld 11, 0(11)\n\t" /* target->r11 */ \
3093 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3096 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3097 VALGRIND_RESTORE_STACK \
3098 : /*out*/ "=r" (_res) \
3099 : /*in*/ "r" (&_argvec[2]) \
3100 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3102 lval = (__typeof__(lval)) _res; \
3105 #endif /* PLAT_ppc64_linux */
3107 /* ------------------------- arm-linux ------------------------- */
3109 #if defined(PLAT_arm_linux)
3111 /* These regs are trashed by the hidden call. */
3112 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4","r14"
3114 /* Macros to save and align the stack before making a function
3115 call and restore it afterwards as gcc may not keep the stack
3116 pointer aligned if it doesn't realise calls are being made
3117 to other functions. */
3119 /* This is a bit tricky. We store the original stack pointer in r10
3120 as it is callee-saves. gcc doesn't allow the use of r11 for some
3121 reason. Also, we can't directly "bic" the stack pointer in thumb
3122 mode since r13 isn't an allowed register number in that context.
3123 So use r4 as a temporary, since that is about to get trashed
3124 anyway, just after each use of this macro. Side effect is we need
3125 to be very careful about any future changes, since
3126 VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
3127 #define VALGRIND_ALIGN_STACK \
3130 "bic r4, r4, #7\n\t" \
3132 #define VALGRIND_RESTORE_STACK \
3135 /* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
3138 #define CALL_FN_W_v(lval, orig) \
3140 volatile OrigFn _orig = (orig); \
3141 volatile unsigned long _argvec[1]; \
3142 volatile unsigned long _res; \
3143 _argvec[0] = (unsigned long)_orig.nraddr; \
3145 VALGRIND_ALIGN_STACK \
3146 "ldr r4, [%1] \n\t" /* target->r4 */ \
3147 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3148 VALGRIND_RESTORE_STACK \
3150 : /*out*/ "=r" (_res) \
3151 : /*in*/ "0" (&_argvec[0]) \
3152 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3154 lval = (__typeof__(lval)) _res; \
3157 #define CALL_FN_W_W(lval, orig, arg1) \
3159 volatile OrigFn _orig = (orig); \
3160 volatile unsigned long _argvec[2]; \
3161 volatile unsigned long _res; \
3162 _argvec[0] = (unsigned long)_orig.nraddr; \
3163 _argvec[1] = (unsigned long)(arg1); \
3165 VALGRIND_ALIGN_STACK \
3166 "ldr r0, [%1, #4] \n\t" \
3167 "ldr r4, [%1] \n\t" /* target->r4 */ \
3168 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3169 VALGRIND_RESTORE_STACK \
3171 : /*out*/ "=r" (_res) \
3172 : /*in*/ "0" (&_argvec[0]) \
3173 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3175 lval = (__typeof__(lval)) _res; \
3178 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3180 volatile OrigFn _orig = (orig); \
3181 volatile unsigned long _argvec[3]; \
3182 volatile unsigned long _res; \
3183 _argvec[0] = (unsigned long)_orig.nraddr; \
3184 _argvec[1] = (unsigned long)(arg1); \
3185 _argvec[2] = (unsigned long)(arg2); \
3187 VALGRIND_ALIGN_STACK \
3188 "ldr r0, [%1, #4] \n\t" \
3189 "ldr r1, [%1, #8] \n\t" \
3190 "ldr r4, [%1] \n\t" /* target->r4 */ \
3191 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3192 VALGRIND_RESTORE_STACK \
3194 : /*out*/ "=r" (_res) \
3195 : /*in*/ "0" (&_argvec[0]) \
3196 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3198 lval = (__typeof__(lval)) _res; \
3201 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3203 volatile OrigFn _orig = (orig); \
3204 volatile unsigned long _argvec[4]; \
3205 volatile unsigned long _res; \
3206 _argvec[0] = (unsigned long)_orig.nraddr; \
3207 _argvec[1] = (unsigned long)(arg1); \
3208 _argvec[2] = (unsigned long)(arg2); \
3209 _argvec[3] = (unsigned long)(arg3); \
3211 VALGRIND_ALIGN_STACK \
3212 "ldr r0, [%1, #4] \n\t" \
3213 "ldr r1, [%1, #8] \n\t" \
3214 "ldr r2, [%1, #12] \n\t" \
3215 "ldr r4, [%1] \n\t" /* target->r4 */ \
3216 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3217 VALGRIND_RESTORE_STACK \
3219 : /*out*/ "=r" (_res) \
3220 : /*in*/ "0" (&_argvec[0]) \
3221 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3223 lval = (__typeof__(lval)) _res; \
3226 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3228 volatile OrigFn _orig = (orig); \
3229 volatile unsigned long _argvec[5]; \
3230 volatile unsigned long _res; \
3231 _argvec[0] = (unsigned long)_orig.nraddr; \
3232 _argvec[1] = (unsigned long)(arg1); \
3233 _argvec[2] = (unsigned long)(arg2); \
3234 _argvec[3] = (unsigned long)(arg3); \
3235 _argvec[4] = (unsigned long)(arg4); \
3237 VALGRIND_ALIGN_STACK \
3238 "ldr r0, [%1, #4] \n\t" \
3239 "ldr r1, [%1, #8] \n\t" \
3240 "ldr r2, [%1, #12] \n\t" \
3241 "ldr r3, [%1, #16] \n\t" \
3242 "ldr r4, [%1] \n\t" /* target->r4 */ \
3243 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3244 VALGRIND_RESTORE_STACK \
3246 : /*out*/ "=r" (_res) \
3247 : /*in*/ "0" (&_argvec[0]) \
3248 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3250 lval = (__typeof__(lval)) _res; \
3253 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3255 volatile OrigFn _orig = (orig); \
3256 volatile unsigned long _argvec[6]; \
3257 volatile unsigned long _res; \
3258 _argvec[0] = (unsigned long)_orig.nraddr; \
3259 _argvec[1] = (unsigned long)(arg1); \
3260 _argvec[2] = (unsigned long)(arg2); \
3261 _argvec[3] = (unsigned long)(arg3); \
3262 _argvec[4] = (unsigned long)(arg4); \
3263 _argvec[5] = (unsigned long)(arg5); \
3265 VALGRIND_ALIGN_STACK \
3266 "sub sp, sp, #4 \n\t" \
3267 "ldr r0, [%1, #20] \n\t" \
3269 "ldr r0, [%1, #4] \n\t" \
3270 "ldr r1, [%1, #8] \n\t" \
3271 "ldr r2, [%1, #12] \n\t" \
3272 "ldr r3, [%1, #16] \n\t" \
3273 "ldr r4, [%1] \n\t" /* target->r4 */ \
3274 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3275 VALGRIND_RESTORE_STACK \
3277 : /*out*/ "=r" (_res) \
3278 : /*in*/ "0" (&_argvec[0]) \
3279 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3281 lval = (__typeof__(lval)) _res; \
3284 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3286 volatile OrigFn _orig = (orig); \
3287 volatile unsigned long _argvec[7]; \
3288 volatile unsigned long _res; \
3289 _argvec[0] = (unsigned long)_orig.nraddr; \
3290 _argvec[1] = (unsigned long)(arg1); \
3291 _argvec[2] = (unsigned long)(arg2); \
3292 _argvec[3] = (unsigned long)(arg3); \
3293 _argvec[4] = (unsigned long)(arg4); \
3294 _argvec[5] = (unsigned long)(arg5); \
3295 _argvec[6] = (unsigned long)(arg6); \
3297 VALGRIND_ALIGN_STACK \
3298 "ldr r0, [%1, #20] \n\t" \
3299 "ldr r1, [%1, #24] \n\t" \
3300 "push {r0, r1} \n\t" \
3301 "ldr r0, [%1, #4] \n\t" \
3302 "ldr r1, [%1, #8] \n\t" \
3303 "ldr r2, [%1, #12] \n\t" \
3304 "ldr r3, [%1, #16] \n\t" \
3305 "ldr r4, [%1] \n\t" /* target->r4 */ \
3306 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3307 VALGRIND_RESTORE_STACK \
3309 : /*out*/ "=r" (_res) \
3310 : /*in*/ "0" (&_argvec[0]) \
3311 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3313 lval = (__typeof__(lval)) _res; \
3316 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3319 volatile OrigFn _orig = (orig); \
3320 volatile unsigned long _argvec[8]; \
3321 volatile unsigned long _res; \
3322 _argvec[0] = (unsigned long)_orig.nraddr; \
3323 _argvec[1] = (unsigned long)(arg1); \
3324 _argvec[2] = (unsigned long)(arg2); \
3325 _argvec[3] = (unsigned long)(arg3); \
3326 _argvec[4] = (unsigned long)(arg4); \
3327 _argvec[5] = (unsigned long)(arg5); \
3328 _argvec[6] = (unsigned long)(arg6); \
3329 _argvec[7] = (unsigned long)(arg7); \
3331 VALGRIND_ALIGN_STACK \
3332 "sub sp, sp, #4 \n\t" \
3333 "ldr r0, [%1, #20] \n\t" \
3334 "ldr r1, [%1, #24] \n\t" \
3335 "ldr r2, [%1, #28] \n\t" \
3336 "push {r0, r1, r2} \n\t" \
3337 "ldr r0, [%1, #4] \n\t" \
3338 "ldr r1, [%1, #8] \n\t" \
3339 "ldr r2, [%1, #12] \n\t" \
3340 "ldr r3, [%1, #16] \n\t" \
3341 "ldr r4, [%1] \n\t" /* target->r4 */ \
3342 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3343 VALGRIND_RESTORE_STACK \
3345 : /*out*/ "=r" (_res) \
3346 : /*in*/ "0" (&_argvec[0]) \
3347 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3349 lval = (__typeof__(lval)) _res; \
3352 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3355 volatile OrigFn _orig = (orig); \
3356 volatile unsigned long _argvec[9]; \
3357 volatile unsigned long _res; \
3358 _argvec[0] = (unsigned long)_orig.nraddr; \
3359 _argvec[1] = (unsigned long)(arg1); \
3360 _argvec[2] = (unsigned long)(arg2); \
3361 _argvec[3] = (unsigned long)(arg3); \
3362 _argvec[4] = (unsigned long)(arg4); \
3363 _argvec[5] = (unsigned long)(arg5); \
3364 _argvec[6] = (unsigned long)(arg6); \
3365 _argvec[7] = (unsigned long)(arg7); \
3366 _argvec[8] = (unsigned long)(arg8); \
3368 VALGRIND_ALIGN_STACK \
3369 "ldr r0, [%1, #20] \n\t" \
3370 "ldr r1, [%1, #24] \n\t" \
3371 "ldr r2, [%1, #28] \n\t" \
3372 "ldr r3, [%1, #32] \n\t" \
3373 "push {r0, r1, r2, r3} \n\t" \
3374 "ldr r0, [%1, #4] \n\t" \
3375 "ldr r1, [%1, #8] \n\t" \
3376 "ldr r2, [%1, #12] \n\t" \
3377 "ldr r3, [%1, #16] \n\t" \
3378 "ldr r4, [%1] \n\t" /* target->r4 */ \
3379 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3380 VALGRIND_RESTORE_STACK \
3382 : /*out*/ "=r" (_res) \
3383 : /*in*/ "0" (&_argvec[0]) \
3384 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3386 lval = (__typeof__(lval)) _res; \
3389 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3392 volatile OrigFn _orig = (orig); \
3393 volatile unsigned long _argvec[10]; \
3394 volatile unsigned long _res; \
3395 _argvec[0] = (unsigned long)_orig.nraddr; \
3396 _argvec[1] = (unsigned long)(arg1); \
3397 _argvec[2] = (unsigned long)(arg2); \
3398 _argvec[3] = (unsigned long)(arg3); \
3399 _argvec[4] = (unsigned long)(arg4); \
3400 _argvec[5] = (unsigned long)(arg5); \
3401 _argvec[6] = (unsigned long)(arg6); \
3402 _argvec[7] = (unsigned long)(arg7); \
3403 _argvec[8] = (unsigned long)(arg8); \
3404 _argvec[9] = (unsigned long)(arg9); \
3406 VALGRIND_ALIGN_STACK \
3407 "sub sp, sp, #4 \n\t" \
3408 "ldr r0, [%1, #20] \n\t" \
3409 "ldr r1, [%1, #24] \n\t" \
3410 "ldr r2, [%1, #28] \n\t" \
3411 "ldr r3, [%1, #32] \n\t" \
3412 "ldr r4, [%1, #36] \n\t" \
3413 "push {r0, r1, r2, r3, r4} \n\t" \
3414 "ldr r0, [%1, #4] \n\t" \
3415 "ldr r1, [%1, #8] \n\t" \
3416 "ldr r2, [%1, #12] \n\t" \
3417 "ldr r3, [%1, #16] \n\t" \
3418 "ldr r4, [%1] \n\t" /* target->r4 */ \
3419 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3420 VALGRIND_RESTORE_STACK \
3422 : /*out*/ "=r" (_res) \
3423 : /*in*/ "0" (&_argvec[0]) \
3424 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3426 lval = (__typeof__(lval)) _res; \
3429 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3430 arg7,arg8,arg9,arg10) \
3432 volatile OrigFn _orig = (orig); \
3433 volatile unsigned long _argvec[11]; \
3434 volatile unsigned long _res; \
3435 _argvec[0] = (unsigned long)_orig.nraddr; \
3436 _argvec[1] = (unsigned long)(arg1); \
3437 _argvec[2] = (unsigned long)(arg2); \
3438 _argvec[3] = (unsigned long)(arg3); \
3439 _argvec[4] = (unsigned long)(arg4); \
3440 _argvec[5] = (unsigned long)(arg5); \
3441 _argvec[6] = (unsigned long)(arg6); \
3442 _argvec[7] = (unsigned long)(arg7); \
3443 _argvec[8] = (unsigned long)(arg8); \
3444 _argvec[9] = (unsigned long)(arg9); \
3445 _argvec[10] = (unsigned long)(arg10); \
3447 VALGRIND_ALIGN_STACK \
3448 "ldr r0, [%1, #40] \n\t" \
3450 "ldr r0, [%1, #20] \n\t" \
3451 "ldr r1, [%1, #24] \n\t" \
3452 "ldr r2, [%1, #28] \n\t" \
3453 "ldr r3, [%1, #32] \n\t" \
3454 "ldr r4, [%1, #36] \n\t" \
3455 "push {r0, r1, r2, r3, r4} \n\t" \
3456 "ldr r0, [%1, #4] \n\t" \
3457 "ldr r1, [%1, #8] \n\t" \
3458 "ldr r2, [%1, #12] \n\t" \
3459 "ldr r3, [%1, #16] \n\t" \
3460 "ldr r4, [%1] \n\t" /* target->r4 */ \
3461 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3462 VALGRIND_RESTORE_STACK \
3464 : /*out*/ "=r" (_res) \
3465 : /*in*/ "0" (&_argvec[0]) \
3466 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3468 lval = (__typeof__(lval)) _res; \
3471 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
3472 arg6,arg7,arg8,arg9,arg10, \
3475 volatile OrigFn _orig = (orig); \
3476 volatile unsigned long _argvec[12]; \
3477 volatile unsigned long _res; \
3478 _argvec[0] = (unsigned long)_orig.nraddr; \
3479 _argvec[1] = (unsigned long)(arg1); \
3480 _argvec[2] = (unsigned long)(arg2); \
3481 _argvec[3] = (unsigned long)(arg3); \
3482 _argvec[4] = (unsigned long)(arg4); \
3483 _argvec[5] = (unsigned long)(arg5); \
3484 _argvec[6] = (unsigned long)(arg6); \
3485 _argvec[7] = (unsigned long)(arg7); \
3486 _argvec[8] = (unsigned long)(arg8); \
3487 _argvec[9] = (unsigned long)(arg9); \
3488 _argvec[10] = (unsigned long)(arg10); \
3489 _argvec[11] = (unsigned long)(arg11); \
3491 VALGRIND_ALIGN_STACK \
3492 "sub sp, sp, #4 \n\t" \
3493 "ldr r0, [%1, #40] \n\t" \
3494 "ldr r1, [%1, #44] \n\t" \
3495 "push {r0, r1} \n\t" \
3496 "ldr r0, [%1, #20] \n\t" \
3497 "ldr r1, [%1, #24] \n\t" \
3498 "ldr r2, [%1, #28] \n\t" \
3499 "ldr r3, [%1, #32] \n\t" \
3500 "ldr r4, [%1, #36] \n\t" \
3501 "push {r0, r1, r2, r3, r4} \n\t" \
3502 "ldr r0, [%1, #4] \n\t" \
3503 "ldr r1, [%1, #8] \n\t" \
3504 "ldr r2, [%1, #12] \n\t" \
3505 "ldr r3, [%1, #16] \n\t" \
3506 "ldr r4, [%1] \n\t" /* target->r4 */ \
3507 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3508 VALGRIND_RESTORE_STACK \
3510 : /*out*/ "=r" (_res) \
3511 : /*in*/ "0" (&_argvec[0]) \
3512 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3514 lval = (__typeof__(lval)) _res; \
3517 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
3518 arg6,arg7,arg8,arg9,arg10, \
3521 volatile OrigFn _orig = (orig); \
3522 volatile unsigned long _argvec[13]; \
3523 volatile unsigned long _res; \
3524 _argvec[0] = (unsigned long)_orig.nraddr; \
3525 _argvec[1] = (unsigned long)(arg1); \
3526 _argvec[2] = (unsigned long)(arg2); \
3527 _argvec[3] = (unsigned long)(arg3); \
3528 _argvec[4] = (unsigned long)(arg4); \
3529 _argvec[5] = (unsigned long)(arg5); \
3530 _argvec[6] = (unsigned long)(arg6); \
3531 _argvec[7] = (unsigned long)(arg7); \
3532 _argvec[8] = (unsigned long)(arg8); \
3533 _argvec[9] = (unsigned long)(arg9); \
3534 _argvec[10] = (unsigned long)(arg10); \
3535 _argvec[11] = (unsigned long)(arg11); \
3536 _argvec[12] = (unsigned long)(arg12); \
3538 VALGRIND_ALIGN_STACK \
3539 "ldr r0, [%1, #40] \n\t" \
3540 "ldr r1, [%1, #44] \n\t" \
3541 "ldr r2, [%1, #48] \n\t" \
3542 "push {r0, r1, r2} \n\t" \
3543 "ldr r0, [%1, #20] \n\t" \
3544 "ldr r1, [%1, #24] \n\t" \
3545 "ldr r2, [%1, #28] \n\t" \
3546 "ldr r3, [%1, #32] \n\t" \
3547 "ldr r4, [%1, #36] \n\t" \
3548 "push {r0, r1, r2, r3, r4} \n\t" \
3549 "ldr r0, [%1, #4] \n\t" \
3550 "ldr r1, [%1, #8] \n\t" \
3551 "ldr r2, [%1, #12] \n\t" \
3552 "ldr r3, [%1, #16] \n\t" \
3553 "ldr r4, [%1] \n\t" /* target->r4 */ \
3554 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3555 VALGRIND_RESTORE_STACK \
3557 : /*out*/ "=r" (_res) \
3558 : /*in*/ "0" (&_argvec[0]) \
3559 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3561 lval = (__typeof__(lval)) _res; \
3564 #endif /* PLAT_arm_linux */
3566 /* ------------------------ arm64-linux ------------------------ */
3568 #if defined(PLAT_arm64_linux)
3570 /* These regs are trashed by the hidden call. */
3571 #define __CALLER_SAVED_REGS \
3572 "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
3573 "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
3574 "x18", "x19", "x20", "x30", \
3575 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
3576 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
3577 "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
3578 "v26", "v27", "v28", "v29", "v30", "v31"
3580 /* x21 is callee-saved, so we can use it to save and restore SP around
3582 #define VALGRIND_ALIGN_STACK \
3584 "bic sp, x21, #15\n\t"
3585 #define VALGRIND_RESTORE_STACK \
3588 /* These CALL_FN_ macros assume that on arm64-linux,
3589 sizeof(unsigned long) == 8. */
3591 #define CALL_FN_W_v(lval, orig) \
3593 volatile OrigFn _orig = (orig); \
3594 volatile unsigned long _argvec[1]; \
3595 volatile unsigned long _res; \
3596 _argvec[0] = (unsigned long)_orig.nraddr; \
3598 VALGRIND_ALIGN_STACK \
3599 "ldr x8, [%1] \n\t" /* target->x8 */ \
3600 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3601 VALGRIND_RESTORE_STACK \
3603 : /*out*/ "=r" (_res) \
3604 : /*in*/ "0" (&_argvec[0]) \
3605 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3607 lval = (__typeof__(lval)) _res; \
3610 #define CALL_FN_W_W(lval, orig, arg1) \
3612 volatile OrigFn _orig = (orig); \
3613 volatile unsigned long _argvec[2]; \
3614 volatile unsigned long _res; \
3615 _argvec[0] = (unsigned long)_orig.nraddr; \
3616 _argvec[1] = (unsigned long)(arg1); \
3618 VALGRIND_ALIGN_STACK \
3619 "ldr x0, [%1, #8] \n\t" \
3620 "ldr x8, [%1] \n\t" /* target->x8 */ \
3621 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3622 VALGRIND_RESTORE_STACK \
3624 : /*out*/ "=r" (_res) \
3625 : /*in*/ "0" (&_argvec[0]) \
3626 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3628 lval = (__typeof__(lval)) _res; \
3631 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3633 volatile OrigFn _orig = (orig); \
3634 volatile unsigned long _argvec[3]; \
3635 volatile unsigned long _res; \
3636 _argvec[0] = (unsigned long)_orig.nraddr; \
3637 _argvec[1] = (unsigned long)(arg1); \
3638 _argvec[2] = (unsigned long)(arg2); \
3640 VALGRIND_ALIGN_STACK \
3641 "ldr x0, [%1, #8] \n\t" \
3642 "ldr x1, [%1, #16] \n\t" \
3643 "ldr x8, [%1] \n\t" /* target->x8 */ \
3644 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3645 VALGRIND_RESTORE_STACK \
3647 : /*out*/ "=r" (_res) \
3648 : /*in*/ "0" (&_argvec[0]) \
3649 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3651 lval = (__typeof__(lval)) _res; \
3654 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3656 volatile OrigFn _orig = (orig); \
3657 volatile unsigned long _argvec[4]; \
3658 volatile unsigned long _res; \
3659 _argvec[0] = (unsigned long)_orig.nraddr; \
3660 _argvec[1] = (unsigned long)(arg1); \
3661 _argvec[2] = (unsigned long)(arg2); \
3662 _argvec[3] = (unsigned long)(arg3); \
3664 VALGRIND_ALIGN_STACK \
3665 "ldr x0, [%1, #8] \n\t" \
3666 "ldr x1, [%1, #16] \n\t" \
3667 "ldr x2, [%1, #24] \n\t" \
3668 "ldr x8, [%1] \n\t" /* target->x8 */ \
3669 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3670 VALGRIND_RESTORE_STACK \
3672 : /*out*/ "=r" (_res) \
3673 : /*in*/ "0" (&_argvec[0]) \
3674 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3676 lval = (__typeof__(lval)) _res; \
3679 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3681 volatile OrigFn _orig = (orig); \
3682 volatile unsigned long _argvec[5]; \
3683 volatile unsigned long _res; \
3684 _argvec[0] = (unsigned long)_orig.nraddr; \
3685 _argvec[1] = (unsigned long)(arg1); \
3686 _argvec[2] = (unsigned long)(arg2); \
3687 _argvec[3] = (unsigned long)(arg3); \
3688 _argvec[4] = (unsigned long)(arg4); \
3690 VALGRIND_ALIGN_STACK \
3691 "ldr x0, [%1, #8] \n\t" \
3692 "ldr x1, [%1, #16] \n\t" \
3693 "ldr x2, [%1, #24] \n\t" \
3694 "ldr x3, [%1, #32] \n\t" \
3695 "ldr x8, [%1] \n\t" /* target->x8 */ \
3696 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3697 VALGRIND_RESTORE_STACK \
3699 : /*out*/ "=r" (_res) \
3700 : /*in*/ "0" (&_argvec[0]) \
3701 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3703 lval = (__typeof__(lval)) _res; \
3706 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3708 volatile OrigFn _orig = (orig); \
3709 volatile unsigned long _argvec[6]; \
3710 volatile unsigned long _res; \
3711 _argvec[0] = (unsigned long)_orig.nraddr; \
3712 _argvec[1] = (unsigned long)(arg1); \
3713 _argvec[2] = (unsigned long)(arg2); \
3714 _argvec[3] = (unsigned long)(arg3); \
3715 _argvec[4] = (unsigned long)(arg4); \
3716 _argvec[5] = (unsigned long)(arg5); \
3718 VALGRIND_ALIGN_STACK \
3719 "ldr x0, [%1, #8] \n\t" \
3720 "ldr x1, [%1, #16] \n\t" \
3721 "ldr x2, [%1, #24] \n\t" \
3722 "ldr x3, [%1, #32] \n\t" \
3723 "ldr x4, [%1, #40] \n\t" \
3724 "ldr x8, [%1] \n\t" /* target->x8 */ \
3725 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3726 VALGRIND_RESTORE_STACK \
3728 : /*out*/ "=r" (_res) \
3729 : /*in*/ "0" (&_argvec[0]) \
3730 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3732 lval = (__typeof__(lval)) _res; \
3735 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3737 volatile OrigFn _orig = (orig); \
3738 volatile unsigned long _argvec[7]; \
3739 volatile unsigned long _res; \
3740 _argvec[0] = (unsigned long)_orig.nraddr; \
3741 _argvec[1] = (unsigned long)(arg1); \
3742 _argvec[2] = (unsigned long)(arg2); \
3743 _argvec[3] = (unsigned long)(arg3); \
3744 _argvec[4] = (unsigned long)(arg4); \
3745 _argvec[5] = (unsigned long)(arg5); \
3746 _argvec[6] = (unsigned long)(arg6); \
3748 VALGRIND_ALIGN_STACK \
3749 "ldr x0, [%1, #8] \n\t" \
3750 "ldr x1, [%1, #16] \n\t" \
3751 "ldr x2, [%1, #24] \n\t" \
3752 "ldr x3, [%1, #32] \n\t" \
3753 "ldr x4, [%1, #40] \n\t" \
3754 "ldr x5, [%1, #48] \n\t" \
3755 "ldr x8, [%1] \n\t" /* target->x8 */ \
3756 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3757 VALGRIND_RESTORE_STACK \
3759 : /*out*/ "=r" (_res) \
3760 : /*in*/ "0" (&_argvec[0]) \
3761 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3763 lval = (__typeof__(lval)) _res; \
3766 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3769 volatile OrigFn _orig = (orig); \
3770 volatile unsigned long _argvec[8]; \
3771 volatile unsigned long _res; \
3772 _argvec[0] = (unsigned long)_orig.nraddr; \
3773 _argvec[1] = (unsigned long)(arg1); \
3774 _argvec[2] = (unsigned long)(arg2); \
3775 _argvec[3] = (unsigned long)(arg3); \
3776 _argvec[4] = (unsigned long)(arg4); \
3777 _argvec[5] = (unsigned long)(arg5); \
3778 _argvec[6] = (unsigned long)(arg6); \
3779 _argvec[7] = (unsigned long)(arg7); \
3781 VALGRIND_ALIGN_STACK \
3782 "ldr x0, [%1, #8] \n\t" \
3783 "ldr x1, [%1, #16] \n\t" \
3784 "ldr x2, [%1, #24] \n\t" \
3785 "ldr x3, [%1, #32] \n\t" \
3786 "ldr x4, [%1, #40] \n\t" \
3787 "ldr x5, [%1, #48] \n\t" \
3788 "ldr x6, [%1, #56] \n\t" \
3789 "ldr x8, [%1] \n\t" /* target->x8 */ \
3790 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3791 VALGRIND_RESTORE_STACK \
3793 : /*out*/ "=r" (_res) \
3794 : /*in*/ "0" (&_argvec[0]) \
3795 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3797 lval = (__typeof__(lval)) _res; \
3800 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3803 volatile OrigFn _orig = (orig); \
3804 volatile unsigned long _argvec[9]; \
3805 volatile unsigned long _res; \
3806 _argvec[0] = (unsigned long)_orig.nraddr; \
3807 _argvec[1] = (unsigned long)(arg1); \
3808 _argvec[2] = (unsigned long)(arg2); \
3809 _argvec[3] = (unsigned long)(arg3); \
3810 _argvec[4] = (unsigned long)(arg4); \
3811 _argvec[5] = (unsigned long)(arg5); \
3812 _argvec[6] = (unsigned long)(arg6); \
3813 _argvec[7] = (unsigned long)(arg7); \
3814 _argvec[8] = (unsigned long)(arg8); \
3816 VALGRIND_ALIGN_STACK \
3817 "ldr x0, [%1, #8] \n\t" \
3818 "ldr x1, [%1, #16] \n\t" \
3819 "ldr x2, [%1, #24] \n\t" \
3820 "ldr x3, [%1, #32] \n\t" \
3821 "ldr x4, [%1, #40] \n\t" \
3822 "ldr x5, [%1, #48] \n\t" \
3823 "ldr x6, [%1, #56] \n\t" \
3824 "ldr x7, [%1, #64] \n\t" \
3825 "ldr x8, [%1] \n\t" /* target->x8 */ \
3826 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3827 VALGRIND_RESTORE_STACK \
3829 : /*out*/ "=r" (_res) \
3830 : /*in*/ "0" (&_argvec[0]) \
3831 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3833 lval = (__typeof__(lval)) _res; \
3836 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3839 volatile OrigFn _orig = (orig); \
3840 volatile unsigned long _argvec[10]; \
3841 volatile unsigned long _res; \
3842 _argvec[0] = (unsigned long)_orig.nraddr; \
3843 _argvec[1] = (unsigned long)(arg1); \
3844 _argvec[2] = (unsigned long)(arg2); \
3845 _argvec[3] = (unsigned long)(arg3); \
3846 _argvec[4] = (unsigned long)(arg4); \
3847 _argvec[5] = (unsigned long)(arg5); \
3848 _argvec[6] = (unsigned long)(arg6); \
3849 _argvec[7] = (unsigned long)(arg7); \
3850 _argvec[8] = (unsigned long)(arg8); \
3851 _argvec[9] = (unsigned long)(arg9); \
3853 VALGRIND_ALIGN_STACK \
3854 "sub sp, sp, #0x20 \n\t" \
3855 "ldr x0, [%1, #8] \n\t" \
3856 "ldr x1, [%1, #16] \n\t" \
3857 "ldr x2, [%1, #24] \n\t" \
3858 "ldr x3, [%1, #32] \n\t" \
3859 "ldr x4, [%1, #40] \n\t" \
3860 "ldr x5, [%1, #48] \n\t" \
3861 "ldr x6, [%1, #56] \n\t" \
3862 "ldr x7, [%1, #64] \n\t" \
3863 "ldr x8, [%1, #72] \n\t" \
3864 "str x8, [sp, #0] \n\t" \
3865 "ldr x8, [%1] \n\t" /* target->x8 */ \
3866 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3867 VALGRIND_RESTORE_STACK \
3869 : /*out*/ "=r" (_res) \
3870 : /*in*/ "0" (&_argvec[0]) \
3871 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3873 lval = (__typeof__(lval)) _res; \
3876 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3877 arg7,arg8,arg9,arg10) \
3879 volatile OrigFn _orig = (orig); \
3880 volatile unsigned long _argvec[11]; \
3881 volatile unsigned long _res; \
3882 _argvec[0] = (unsigned long)_orig.nraddr; \
3883 _argvec[1] = (unsigned long)(arg1); \
3884 _argvec[2] = (unsigned long)(arg2); \
3885 _argvec[3] = (unsigned long)(arg3); \
3886 _argvec[4] = (unsigned long)(arg4); \
3887 _argvec[5] = (unsigned long)(arg5); \
3888 _argvec[6] = (unsigned long)(arg6); \
3889 _argvec[7] = (unsigned long)(arg7); \
3890 _argvec[8] = (unsigned long)(arg8); \
3891 _argvec[9] = (unsigned long)(arg9); \
3892 _argvec[10] = (unsigned long)(arg10); \
3894 VALGRIND_ALIGN_STACK \
3895 "sub sp, sp, #0x20 \n\t" \
3896 "ldr x0, [%1, #8] \n\t" \
3897 "ldr x1, [%1, #16] \n\t" \
3898 "ldr x2, [%1, #24] \n\t" \
3899 "ldr x3, [%1, #32] \n\t" \
3900 "ldr x4, [%1, #40] \n\t" \
3901 "ldr x5, [%1, #48] \n\t" \
3902 "ldr x6, [%1, #56] \n\t" \
3903 "ldr x7, [%1, #64] \n\t" \
3904 "ldr x8, [%1, #72] \n\t" \
3905 "str x8, [sp, #0] \n\t" \
3906 "ldr x8, [%1, #80] \n\t" \
3907 "str x8, [sp, #8] \n\t" \
3908 "ldr x8, [%1] \n\t" /* target->x8 */ \
3909 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3910 VALGRIND_RESTORE_STACK \
3912 : /*out*/ "=r" (_res) \
3913 : /*in*/ "0" (&_argvec[0]) \
3914 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3916 lval = (__typeof__(lval)) _res; \
3919 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3920 arg7,arg8,arg9,arg10,arg11) \
3922 volatile OrigFn _orig = (orig); \
3923 volatile unsigned long _argvec[12]; \
3924 volatile unsigned long _res; \
3925 _argvec[0] = (unsigned long)_orig.nraddr; \
3926 _argvec[1] = (unsigned long)(arg1); \
3927 _argvec[2] = (unsigned long)(arg2); \
3928 _argvec[3] = (unsigned long)(arg3); \
3929 _argvec[4] = (unsigned long)(arg4); \
3930 _argvec[5] = (unsigned long)(arg5); \
3931 _argvec[6] = (unsigned long)(arg6); \
3932 _argvec[7] = (unsigned long)(arg7); \
3933 _argvec[8] = (unsigned long)(arg8); \
3934 _argvec[9] = (unsigned long)(arg9); \
3935 _argvec[10] = (unsigned long)(arg10); \
3936 _argvec[11] = (unsigned long)(arg11); \
3938 VALGRIND_ALIGN_STACK \
3939 "sub sp, sp, #0x30 \n\t" \
3940 "ldr x0, [%1, #8] \n\t" \
3941 "ldr x1, [%1, #16] \n\t" \
3942 "ldr x2, [%1, #24] \n\t" \
3943 "ldr x3, [%1, #32] \n\t" \
3944 "ldr x4, [%1, #40] \n\t" \
3945 "ldr x5, [%1, #48] \n\t" \
3946 "ldr x6, [%1, #56] \n\t" \
3947 "ldr x7, [%1, #64] \n\t" \
3948 "ldr x8, [%1, #72] \n\t" \
3949 "str x8, [sp, #0] \n\t" \
3950 "ldr x8, [%1, #80] \n\t" \
3951 "str x8, [sp, #8] \n\t" \
3952 "ldr x8, [%1, #88] \n\t" \
3953 "str x8, [sp, #16] \n\t" \
3954 "ldr x8, [%1] \n\t" /* target->x8 */ \
3955 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3956 VALGRIND_RESTORE_STACK \
3958 : /*out*/ "=r" (_res) \
3959 : /*in*/ "0" (&_argvec[0]) \
3960 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3962 lval = (__typeof__(lval)) _res; \
3965 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3966 arg7,arg8,arg9,arg10,arg11, \
3969 volatile OrigFn _orig = (orig); \
3970 volatile unsigned long _argvec[13]; \
3971 volatile unsigned long _res; \
3972 _argvec[0] = (unsigned long)_orig.nraddr; \
3973 _argvec[1] = (unsigned long)(arg1); \
3974 _argvec[2] = (unsigned long)(arg2); \
3975 _argvec[3] = (unsigned long)(arg3); \
3976 _argvec[4] = (unsigned long)(arg4); \
3977 _argvec[5] = (unsigned long)(arg5); \
3978 _argvec[6] = (unsigned long)(arg6); \
3979 _argvec[7] = (unsigned long)(arg7); \
3980 _argvec[8] = (unsigned long)(arg8); \
3981 _argvec[9] = (unsigned long)(arg9); \
3982 _argvec[10] = (unsigned long)(arg10); \
3983 _argvec[11] = (unsigned long)(arg11); \
3984 _argvec[12] = (unsigned long)(arg12); \
3986 VALGRIND_ALIGN_STACK \
3987 "sub sp, sp, #0x30 \n\t" \
3988 "ldr x0, [%1, #8] \n\t" \
3989 "ldr x1, [%1, #16] \n\t" \
3990 "ldr x2, [%1, #24] \n\t" \
3991 "ldr x3, [%1, #32] \n\t" \
3992 "ldr x4, [%1, #40] \n\t" \
3993 "ldr x5, [%1, #48] \n\t" \
3994 "ldr x6, [%1, #56] \n\t" \
3995 "ldr x7, [%1, #64] \n\t" \
3996 "ldr x8, [%1, #72] \n\t" \
3997 "str x8, [sp, #0] \n\t" \
3998 "ldr x8, [%1, #80] \n\t" \
3999 "str x8, [sp, #8] \n\t" \
4000 "ldr x8, [%1, #88] \n\t" \
4001 "str x8, [sp, #16] \n\t" \
4002 "ldr x8, [%1, #96] \n\t" \
4003 "str x8, [sp, #24] \n\t" \
4004 "ldr x8, [%1] \n\t" /* target->x8 */ \
4005 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4006 VALGRIND_RESTORE_STACK \
4008 : /*out*/ "=r" (_res) \
4009 : /*in*/ "0" (&_argvec[0]) \
4010 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4012 lval = (__typeof__(lval)) _res; \
4015 #endif /* PLAT_arm64_linux */
4017 /* ------------------------- s390x-linux ------------------------- */
4019 #if defined(PLAT_s390x_linux)
4021 /* Similar workaround as amd64 (see above), but we use r11 as frame
4022 pointer and save the old r11 in r7. r11 might be used for
4023 argvec, therefore we copy argvec in r1 since r1 is clobbered
4024 after the call anyway. */
4025 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
4026 # define __FRAME_POINTER \
4027 ,"d"(__builtin_dwarf_cfa())
4028 # define VALGRIND_CFI_PROLOGUE \
4029 ".cfi_remember_state\n\t" \
4030 "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
4033 ".cfi_def_cfa r11, 0\n\t"
4034 # define VALGRIND_CFI_EPILOGUE \
4036 ".cfi_restore_state\n\t"
4038 # define __FRAME_POINTER
4039 # define VALGRIND_CFI_PROLOGUE \
4041 # define VALGRIND_CFI_EPILOGUE
4044 /* Nb: On s390 the stack pointer is properly aligned *at all times*
4045 according to the s390 GCC maintainer. (The ABI specification is not
4046 precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
4047 VALGRIND_RESTORE_STACK are not defined here. */
4049 /* These regs are trashed by the hidden call. Note that we overwrite
4050 r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
4051 function a proper return address. All others are ABI defined call
4053 #define __CALLER_SAVED_REGS "0","1","2","3","4","5","14", \
4054 "f0","f1","f2","f3","f4","f5","f6","f7"
4056 /* Nb: Although r11 is modified in the asm snippets below (inside
4057 VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
4059 (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
4061 (2) GCC will complain that r11 cannot appear inside a clobber section,
4062 when compiled with -O -fno-omit-frame-pointer
4065 #define CALL_FN_W_v(lval, orig) \
4067 volatile OrigFn _orig = (orig); \
4068 volatile unsigned long _argvec[1]; \
4069 volatile unsigned long _res; \
4070 _argvec[0] = (unsigned long)_orig.nraddr; \
4072 VALGRIND_CFI_PROLOGUE \
4073 "aghi 15,-160\n\t" \
4074 "lg 1, 0(1)\n\t" /* target->r1 */ \
4075 VALGRIND_CALL_NOREDIR_R1 \
4078 VALGRIND_CFI_EPILOGUE \
4079 : /*out*/ "=d" (_res) \
4080 : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
4081 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4083 lval = (__typeof__(lval)) _res; \
4086 /* The call abi has the arguments in r2-r6 and stack */
4087 #define CALL_FN_W_W(lval, orig, arg1) \
4089 volatile OrigFn _orig = (orig); \
4090 volatile unsigned long _argvec[2]; \
4091 volatile unsigned long _res; \
4092 _argvec[0] = (unsigned long)_orig.nraddr; \
4093 _argvec[1] = (unsigned long)arg1; \
4095 VALGRIND_CFI_PROLOGUE \
4096 "aghi 15,-160\n\t" \
4099 VALGRIND_CALL_NOREDIR_R1 \
4102 VALGRIND_CFI_EPILOGUE \
4103 : /*out*/ "=d" (_res) \
4104 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4105 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4107 lval = (__typeof__(lval)) _res; \
4110 #define CALL_FN_W_WW(lval, orig, arg1, arg2) \
4112 volatile OrigFn _orig = (orig); \
4113 volatile unsigned long _argvec[3]; \
4114 volatile unsigned long _res; \
4115 _argvec[0] = (unsigned long)_orig.nraddr; \
4116 _argvec[1] = (unsigned long)arg1; \
4117 _argvec[2] = (unsigned long)arg2; \
4119 VALGRIND_CFI_PROLOGUE \
4120 "aghi 15,-160\n\t" \
4124 VALGRIND_CALL_NOREDIR_R1 \
4127 VALGRIND_CFI_EPILOGUE \
4128 : /*out*/ "=d" (_res) \
4129 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4130 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4132 lval = (__typeof__(lval)) _res; \
4135 #define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
4137 volatile OrigFn _orig = (orig); \
4138 volatile unsigned long _argvec[4]; \
4139 volatile unsigned long _res; \
4140 _argvec[0] = (unsigned long)_orig.nraddr; \
4141 _argvec[1] = (unsigned long)arg1; \
4142 _argvec[2] = (unsigned long)arg2; \
4143 _argvec[3] = (unsigned long)arg3; \
4145 VALGRIND_CFI_PROLOGUE \
4146 "aghi 15,-160\n\t" \
4151 VALGRIND_CALL_NOREDIR_R1 \
4154 VALGRIND_CFI_EPILOGUE \
4155 : /*out*/ "=d" (_res) \
4156 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4157 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4159 lval = (__typeof__(lval)) _res; \
4162 #define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
4164 volatile OrigFn _orig = (orig); \
4165 volatile unsigned long _argvec[5]; \
4166 volatile unsigned long _res; \
4167 _argvec[0] = (unsigned long)_orig.nraddr; \
4168 _argvec[1] = (unsigned long)arg1; \
4169 _argvec[2] = (unsigned long)arg2; \
4170 _argvec[3] = (unsigned long)arg3; \
4171 _argvec[4] = (unsigned long)arg4; \
4173 VALGRIND_CFI_PROLOGUE \
4174 "aghi 15,-160\n\t" \
4180 VALGRIND_CALL_NOREDIR_R1 \
4183 VALGRIND_CFI_EPILOGUE \
4184 : /*out*/ "=d" (_res) \
4185 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4186 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4188 lval = (__typeof__(lval)) _res; \
4191 #define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
4193 volatile OrigFn _orig = (orig); \
4194 volatile unsigned long _argvec[6]; \
4195 volatile unsigned long _res; \
4196 _argvec[0] = (unsigned long)_orig.nraddr; \
4197 _argvec[1] = (unsigned long)arg1; \
4198 _argvec[2] = (unsigned long)arg2; \
4199 _argvec[3] = (unsigned long)arg3; \
4200 _argvec[4] = (unsigned long)arg4; \
4201 _argvec[5] = (unsigned long)arg5; \
4203 VALGRIND_CFI_PROLOGUE \
4204 "aghi 15,-160\n\t" \
4211 VALGRIND_CALL_NOREDIR_R1 \
4214 VALGRIND_CFI_EPILOGUE \
4215 : /*out*/ "=d" (_res) \
4216 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4217 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4219 lval = (__typeof__(lval)) _res; \
4222 #define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4225 volatile OrigFn _orig = (orig); \
4226 volatile unsigned long _argvec[7]; \
4227 volatile unsigned long _res; \
4228 _argvec[0] = (unsigned long)_orig.nraddr; \
4229 _argvec[1] = (unsigned long)arg1; \
4230 _argvec[2] = (unsigned long)arg2; \
4231 _argvec[3] = (unsigned long)arg3; \
4232 _argvec[4] = (unsigned long)arg4; \
4233 _argvec[5] = (unsigned long)arg5; \
4234 _argvec[6] = (unsigned long)arg6; \
4236 VALGRIND_CFI_PROLOGUE \
4237 "aghi 15,-168\n\t" \
4243 "mvc 160(8,15), 48(1)\n\t" \
4245 VALGRIND_CALL_NOREDIR_R1 \
4248 VALGRIND_CFI_EPILOGUE \
4249 : /*out*/ "=d" (_res) \
4250 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4251 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4253 lval = (__typeof__(lval)) _res; \
4256 #define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4259 volatile OrigFn _orig = (orig); \
4260 volatile unsigned long _argvec[8]; \
4261 volatile unsigned long _res; \
4262 _argvec[0] = (unsigned long)_orig.nraddr; \
4263 _argvec[1] = (unsigned long)arg1; \
4264 _argvec[2] = (unsigned long)arg2; \
4265 _argvec[3] = (unsigned long)arg3; \
4266 _argvec[4] = (unsigned long)arg4; \
4267 _argvec[5] = (unsigned long)arg5; \
4268 _argvec[6] = (unsigned long)arg6; \
4269 _argvec[7] = (unsigned long)arg7; \
4271 VALGRIND_CFI_PROLOGUE \
4272 "aghi 15,-176\n\t" \
4278 "mvc 160(8,15), 48(1)\n\t" \
4279 "mvc 168(8,15), 56(1)\n\t" \
4281 VALGRIND_CALL_NOREDIR_R1 \
4284 VALGRIND_CFI_EPILOGUE \
4285 : /*out*/ "=d" (_res) \
4286 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4287 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4289 lval = (__typeof__(lval)) _res; \
4292 #define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4295 volatile OrigFn _orig = (orig); \
4296 volatile unsigned long _argvec[9]; \
4297 volatile unsigned long _res; \
4298 _argvec[0] = (unsigned long)_orig.nraddr; \
4299 _argvec[1] = (unsigned long)arg1; \
4300 _argvec[2] = (unsigned long)arg2; \
4301 _argvec[3] = (unsigned long)arg3; \
4302 _argvec[4] = (unsigned long)arg4; \
4303 _argvec[5] = (unsigned long)arg5; \
4304 _argvec[6] = (unsigned long)arg6; \
4305 _argvec[7] = (unsigned long)arg7; \
4306 _argvec[8] = (unsigned long)arg8; \
4308 VALGRIND_CFI_PROLOGUE \
4309 "aghi 15,-184\n\t" \
4315 "mvc 160(8,15), 48(1)\n\t" \
4316 "mvc 168(8,15), 56(1)\n\t" \
4317 "mvc 176(8,15), 64(1)\n\t" \
4319 VALGRIND_CALL_NOREDIR_R1 \
4322 VALGRIND_CFI_EPILOGUE \
4323 : /*out*/ "=d" (_res) \
4324 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4325 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4327 lval = (__typeof__(lval)) _res; \
4330 #define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4331 arg6, arg7 ,arg8, arg9) \
4333 volatile OrigFn _orig = (orig); \
4334 volatile unsigned long _argvec[10]; \
4335 volatile unsigned long _res; \
4336 _argvec[0] = (unsigned long)_orig.nraddr; \
4337 _argvec[1] = (unsigned long)arg1; \
4338 _argvec[2] = (unsigned long)arg2; \
4339 _argvec[3] = (unsigned long)arg3; \
4340 _argvec[4] = (unsigned long)arg4; \
4341 _argvec[5] = (unsigned long)arg5; \
4342 _argvec[6] = (unsigned long)arg6; \
4343 _argvec[7] = (unsigned long)arg7; \
4344 _argvec[8] = (unsigned long)arg8; \
4345 _argvec[9] = (unsigned long)arg9; \
4347 VALGRIND_CFI_PROLOGUE \
4348 "aghi 15,-192\n\t" \
4354 "mvc 160(8,15), 48(1)\n\t" \
4355 "mvc 168(8,15), 56(1)\n\t" \
4356 "mvc 176(8,15), 64(1)\n\t" \
4357 "mvc 184(8,15), 72(1)\n\t" \
4359 VALGRIND_CALL_NOREDIR_R1 \
4362 VALGRIND_CFI_EPILOGUE \
4363 : /*out*/ "=d" (_res) \
4364 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4365 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4367 lval = (__typeof__(lval)) _res; \
4370 #define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4371 arg6, arg7 ,arg8, arg9, arg10) \
4373 volatile OrigFn _orig = (orig); \
4374 volatile unsigned long _argvec[11]; \
4375 volatile unsigned long _res; \
4376 _argvec[0] = (unsigned long)_orig.nraddr; \
4377 _argvec[1] = (unsigned long)arg1; \
4378 _argvec[2] = (unsigned long)arg2; \
4379 _argvec[3] = (unsigned long)arg3; \
4380 _argvec[4] = (unsigned long)arg4; \
4381 _argvec[5] = (unsigned long)arg5; \
4382 _argvec[6] = (unsigned long)arg6; \
4383 _argvec[7] = (unsigned long)arg7; \
4384 _argvec[8] = (unsigned long)arg8; \
4385 _argvec[9] = (unsigned long)arg9; \
4386 _argvec[10] = (unsigned long)arg10; \
4388 VALGRIND_CFI_PROLOGUE \
4389 "aghi 15,-200\n\t" \
4395 "mvc 160(8,15), 48(1)\n\t" \
4396 "mvc 168(8,15), 56(1)\n\t" \
4397 "mvc 176(8,15), 64(1)\n\t" \
4398 "mvc 184(8,15), 72(1)\n\t" \
4399 "mvc 192(8,15), 80(1)\n\t" \
4401 VALGRIND_CALL_NOREDIR_R1 \
4404 VALGRIND_CFI_EPILOGUE \
4405 : /*out*/ "=d" (_res) \
4406 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4407 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4409 lval = (__typeof__(lval)) _res; \
4412 #define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4413 arg6, arg7 ,arg8, arg9, arg10, arg11) \
4415 volatile OrigFn _orig = (orig); \
4416 volatile unsigned long _argvec[12]; \
4417 volatile unsigned long _res; \
4418 _argvec[0] = (unsigned long)_orig.nraddr; \
4419 _argvec[1] = (unsigned long)arg1; \
4420 _argvec[2] = (unsigned long)arg2; \
4421 _argvec[3] = (unsigned long)arg3; \
4422 _argvec[4] = (unsigned long)arg4; \
4423 _argvec[5] = (unsigned long)arg5; \
4424 _argvec[6] = (unsigned long)arg6; \
4425 _argvec[7] = (unsigned long)arg7; \
4426 _argvec[8] = (unsigned long)arg8; \
4427 _argvec[9] = (unsigned long)arg9; \
4428 _argvec[10] = (unsigned long)arg10; \
4429 _argvec[11] = (unsigned long)arg11; \
4431 VALGRIND_CFI_PROLOGUE \
4432 "aghi 15,-208\n\t" \
4438 "mvc 160(8,15), 48(1)\n\t" \
4439 "mvc 168(8,15), 56(1)\n\t" \
4440 "mvc 176(8,15), 64(1)\n\t" \
4441 "mvc 184(8,15), 72(1)\n\t" \
4442 "mvc 192(8,15), 80(1)\n\t" \
4443 "mvc 200(8,15), 88(1)\n\t" \
4445 VALGRIND_CALL_NOREDIR_R1 \
4448 VALGRIND_CFI_EPILOGUE \
4449 : /*out*/ "=d" (_res) \
4450 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4451 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4453 lval = (__typeof__(lval)) _res; \
4456 #define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4457 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
4459 volatile OrigFn _orig = (orig); \
4460 volatile unsigned long _argvec[13]; \
4461 volatile unsigned long _res; \
4462 _argvec[0] = (unsigned long)_orig.nraddr; \
4463 _argvec[1] = (unsigned long)arg1; \
4464 _argvec[2] = (unsigned long)arg2; \
4465 _argvec[3] = (unsigned long)arg3; \
4466 _argvec[4] = (unsigned long)arg4; \
4467 _argvec[5] = (unsigned long)arg5; \
4468 _argvec[6] = (unsigned long)arg6; \
4469 _argvec[7] = (unsigned long)arg7; \
4470 _argvec[8] = (unsigned long)arg8; \
4471 _argvec[9] = (unsigned long)arg9; \
4472 _argvec[10] = (unsigned long)arg10; \
4473 _argvec[11] = (unsigned long)arg11; \
4474 _argvec[12] = (unsigned long)arg12; \
4476 VALGRIND_CFI_PROLOGUE \
4477 "aghi 15,-216\n\t" \
4483 "mvc 160(8,15), 48(1)\n\t" \
4484 "mvc 168(8,15), 56(1)\n\t" \
4485 "mvc 176(8,15), 64(1)\n\t" \
4486 "mvc 184(8,15), 72(1)\n\t" \
4487 "mvc 192(8,15), 80(1)\n\t" \
4488 "mvc 200(8,15), 88(1)\n\t" \
4489 "mvc 208(8,15), 96(1)\n\t" \
4491 VALGRIND_CALL_NOREDIR_R1 \
4494 VALGRIND_CFI_EPILOGUE \
4495 : /*out*/ "=d" (_res) \
4496 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4497 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4499 lval = (__typeof__(lval)) _res; \
4503 #endif /* PLAT_s390x_linux */
4505 /* ------------------------- mips32-linux ----------------------- */
4507 #if defined(PLAT_mips32_linux)
4509 /* These regs are trashed by the hidden call. */
4510 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
4511 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
4514 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
4517 #define CALL_FN_W_v(lval, orig) \
4519 volatile OrigFn _orig = (orig); \
4520 volatile unsigned long _argvec[1]; \
4521 volatile unsigned long _res; \
4522 _argvec[0] = (unsigned long)_orig.nraddr; \
4524 "subu $29, $29, 8 \n\t" \
4525 "sw $28, 0($29) \n\t" \
4526 "sw $31, 4($29) \n\t" \
4527 "subu $29, $29, 16 \n\t" \
4528 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4529 VALGRIND_CALL_NOREDIR_T9 \
4530 "addu $29, $29, 16\n\t" \
4531 "lw $28, 0($29) \n\t" \
4532 "lw $31, 4($29) \n\t" \
4533 "addu $29, $29, 8 \n\t" \
4535 : /*out*/ "=r" (_res) \
4536 : /*in*/ "0" (&_argvec[0]) \
4537 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4539 lval = (__typeof__(lval)) _res; \
4542 #define CALL_FN_W_W(lval, orig, arg1) \
4544 volatile OrigFn _orig = (orig); \
4545 volatile unsigned long _argvec[2]; \
4546 volatile unsigned long _res; \
4547 _argvec[0] = (unsigned long)_orig.nraddr; \
4548 _argvec[1] = (unsigned long)(arg1); \
4550 "subu $29, $29, 8 \n\t" \
4551 "sw $28, 0($29) \n\t" \
4552 "sw $31, 4($29) \n\t" \
4553 "subu $29, $29, 16 \n\t" \
4554 "lw $4, 4(%1) \n\t" /* arg1*/ \
4555 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4556 VALGRIND_CALL_NOREDIR_T9 \
4557 "addu $29, $29, 16 \n\t" \
4558 "lw $28, 0($29) \n\t" \
4559 "lw $31, 4($29) \n\t" \
4560 "addu $29, $29, 8 \n\t" \
4562 : /*out*/ "=r" (_res) \
4563 : /*in*/ "0" (&_argvec[0]) \
4564 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4566 lval = (__typeof__(lval)) _res; \
4569 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4571 volatile OrigFn _orig = (orig); \
4572 volatile unsigned long _argvec[3]; \
4573 volatile unsigned long _res; \
4574 _argvec[0] = (unsigned long)_orig.nraddr; \
4575 _argvec[1] = (unsigned long)(arg1); \
4576 _argvec[2] = (unsigned long)(arg2); \
4578 "subu $29, $29, 8 \n\t" \
4579 "sw $28, 0($29) \n\t" \
4580 "sw $31, 4($29) \n\t" \
4581 "subu $29, $29, 16 \n\t" \
4582 "lw $4, 4(%1) \n\t" \
4583 "lw $5, 8(%1) \n\t" \
4584 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4585 VALGRIND_CALL_NOREDIR_T9 \
4586 "addu $29, $29, 16 \n\t" \
4587 "lw $28, 0($29) \n\t" \
4588 "lw $31, 4($29) \n\t" \
4589 "addu $29, $29, 8 \n\t" \
4591 : /*out*/ "=r" (_res) \
4592 : /*in*/ "0" (&_argvec[0]) \
4593 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4595 lval = (__typeof__(lval)) _res; \
4598 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4600 volatile OrigFn _orig = (orig); \
4601 volatile unsigned long _argvec[4]; \
4602 volatile unsigned long _res; \
4603 _argvec[0] = (unsigned long)_orig.nraddr; \
4604 _argvec[1] = (unsigned long)(arg1); \
4605 _argvec[2] = (unsigned long)(arg2); \
4606 _argvec[3] = (unsigned long)(arg3); \
4608 "subu $29, $29, 8 \n\t" \
4609 "sw $28, 0($29) \n\t" \
4610 "sw $31, 4($29) \n\t" \
4611 "subu $29, $29, 16 \n\t" \
4612 "lw $4, 4(%1) \n\t" \
4613 "lw $5, 8(%1) \n\t" \
4614 "lw $6, 12(%1) \n\t" \
4615 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4616 VALGRIND_CALL_NOREDIR_T9 \
4617 "addu $29, $29, 16 \n\t" \
4618 "lw $28, 0($29) \n\t" \
4619 "lw $31, 4($29) \n\t" \
4620 "addu $29, $29, 8 \n\t" \
4622 : /*out*/ "=r" (_res) \
4623 : /*in*/ "0" (&_argvec[0]) \
4624 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4626 lval = (__typeof__(lval)) _res; \
4629 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4631 volatile OrigFn _orig = (orig); \
4632 volatile unsigned long _argvec[5]; \
4633 volatile unsigned long _res; \
4634 _argvec[0] = (unsigned long)_orig.nraddr; \
4635 _argvec[1] = (unsigned long)(arg1); \
4636 _argvec[2] = (unsigned long)(arg2); \
4637 _argvec[3] = (unsigned long)(arg3); \
4638 _argvec[4] = (unsigned long)(arg4); \
4640 "subu $29, $29, 8 \n\t" \
4641 "sw $28, 0($29) \n\t" \
4642 "sw $31, 4($29) \n\t" \
4643 "subu $29, $29, 16 \n\t" \
4644 "lw $4, 4(%1) \n\t" \
4645 "lw $5, 8(%1) \n\t" \
4646 "lw $6, 12(%1) \n\t" \
4647 "lw $7, 16(%1) \n\t" \
4648 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4649 VALGRIND_CALL_NOREDIR_T9 \
4650 "addu $29, $29, 16 \n\t" \
4651 "lw $28, 0($29) \n\t" \
4652 "lw $31, 4($29) \n\t" \
4653 "addu $29, $29, 8 \n\t" \
4655 : /*out*/ "=r" (_res) \
4656 : /*in*/ "0" (&_argvec[0]) \
4657 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4659 lval = (__typeof__(lval)) _res; \
4662 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4664 volatile OrigFn _orig = (orig); \
4665 volatile unsigned long _argvec[6]; \
4666 volatile unsigned long _res; \
4667 _argvec[0] = (unsigned long)_orig.nraddr; \
4668 _argvec[1] = (unsigned long)(arg1); \
4669 _argvec[2] = (unsigned long)(arg2); \
4670 _argvec[3] = (unsigned long)(arg3); \
4671 _argvec[4] = (unsigned long)(arg4); \
4672 _argvec[5] = (unsigned long)(arg5); \
4674 "subu $29, $29, 8 \n\t" \
4675 "sw $28, 0($29) \n\t" \
4676 "sw $31, 4($29) \n\t" \
4677 "lw $4, 20(%1) \n\t" \
4678 "subu $29, $29, 24\n\t" \
4679 "sw $4, 16($29) \n\t" \
4680 "lw $4, 4(%1) \n\t" \
4681 "lw $5, 8(%1) \n\t" \
4682 "lw $6, 12(%1) \n\t" \
4683 "lw $7, 16(%1) \n\t" \
4684 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4685 VALGRIND_CALL_NOREDIR_T9 \
4686 "addu $29, $29, 24 \n\t" \
4687 "lw $28, 0($29) \n\t" \
4688 "lw $31, 4($29) \n\t" \
4689 "addu $29, $29, 8 \n\t" \
4691 : /*out*/ "=r" (_res) \
4692 : /*in*/ "0" (&_argvec[0]) \
4693 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4695 lval = (__typeof__(lval)) _res; \
4697 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4699 volatile OrigFn _orig = (orig); \
4700 volatile unsigned long _argvec[7]; \
4701 volatile unsigned long _res; \
4702 _argvec[0] = (unsigned long)_orig.nraddr; \
4703 _argvec[1] = (unsigned long)(arg1); \
4704 _argvec[2] = (unsigned long)(arg2); \
4705 _argvec[3] = (unsigned long)(arg3); \
4706 _argvec[4] = (unsigned long)(arg4); \
4707 _argvec[5] = (unsigned long)(arg5); \
4708 _argvec[6] = (unsigned long)(arg6); \
4710 "subu $29, $29, 8 \n\t" \
4711 "sw $28, 0($29) \n\t" \
4712 "sw $31, 4($29) \n\t" \
4713 "lw $4, 20(%1) \n\t" \
4714 "subu $29, $29, 32\n\t" \
4715 "sw $4, 16($29) \n\t" \
4716 "lw $4, 24(%1) \n\t" \
4718 "sw $4, 20($29) \n\t" \
4719 "lw $4, 4(%1) \n\t" \
4720 "lw $5, 8(%1) \n\t" \
4721 "lw $6, 12(%1) \n\t" \
4722 "lw $7, 16(%1) \n\t" \
4723 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4724 VALGRIND_CALL_NOREDIR_T9 \
4725 "addu $29, $29, 32 \n\t" \
4726 "lw $28, 0($29) \n\t" \
4727 "lw $31, 4($29) \n\t" \
4728 "addu $29, $29, 8 \n\t" \
4730 : /*out*/ "=r" (_res) \
4731 : /*in*/ "0" (&_argvec[0]) \
4732 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4734 lval = (__typeof__(lval)) _res; \
4737 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4740 volatile OrigFn _orig = (orig); \
4741 volatile unsigned long _argvec[8]; \
4742 volatile unsigned long _res; \
4743 _argvec[0] = (unsigned long)_orig.nraddr; \
4744 _argvec[1] = (unsigned long)(arg1); \
4745 _argvec[2] = (unsigned long)(arg2); \
4746 _argvec[3] = (unsigned long)(arg3); \
4747 _argvec[4] = (unsigned long)(arg4); \
4748 _argvec[5] = (unsigned long)(arg5); \
4749 _argvec[6] = (unsigned long)(arg6); \
4750 _argvec[7] = (unsigned long)(arg7); \
4752 "subu $29, $29, 8 \n\t" \
4753 "sw $28, 0($29) \n\t" \
4754 "sw $31, 4($29) \n\t" \
4755 "lw $4, 20(%1) \n\t" \
4756 "subu $29, $29, 32\n\t" \
4757 "sw $4, 16($29) \n\t" \
4758 "lw $4, 24(%1) \n\t" \
4759 "sw $4, 20($29) \n\t" \
4760 "lw $4, 28(%1) \n\t" \
4761 "sw $4, 24($29) \n\t" \
4762 "lw $4, 4(%1) \n\t" \
4763 "lw $5, 8(%1) \n\t" \
4764 "lw $6, 12(%1) \n\t" \
4765 "lw $7, 16(%1) \n\t" \
4766 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4767 VALGRIND_CALL_NOREDIR_T9 \
4768 "addu $29, $29, 32 \n\t" \
4769 "lw $28, 0($29) \n\t" \
4770 "lw $31, 4($29) \n\t" \
4771 "addu $29, $29, 8 \n\t" \
4773 : /*out*/ "=r" (_res) \
4774 : /*in*/ "0" (&_argvec[0]) \
4775 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4777 lval = (__typeof__(lval)) _res; \
4780 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4783 volatile OrigFn _orig = (orig); \
4784 volatile unsigned long _argvec[9]; \
4785 volatile unsigned long _res; \
4786 _argvec[0] = (unsigned long)_orig.nraddr; \
4787 _argvec[1] = (unsigned long)(arg1); \
4788 _argvec[2] = (unsigned long)(arg2); \
4789 _argvec[3] = (unsigned long)(arg3); \
4790 _argvec[4] = (unsigned long)(arg4); \
4791 _argvec[5] = (unsigned long)(arg5); \
4792 _argvec[6] = (unsigned long)(arg6); \
4793 _argvec[7] = (unsigned long)(arg7); \
4794 _argvec[8] = (unsigned long)(arg8); \
4796 "subu $29, $29, 8 \n\t" \
4797 "sw $28, 0($29) \n\t" \
4798 "sw $31, 4($29) \n\t" \
4799 "lw $4, 20(%1) \n\t" \
4800 "subu $29, $29, 40\n\t" \
4801 "sw $4, 16($29) \n\t" \
4802 "lw $4, 24(%1) \n\t" \
4803 "sw $4, 20($29) \n\t" \
4804 "lw $4, 28(%1) \n\t" \
4805 "sw $4, 24($29) \n\t" \
4806 "lw $4, 32(%1) \n\t" \
4807 "sw $4, 28($29) \n\t" \
4808 "lw $4, 4(%1) \n\t" \
4809 "lw $5, 8(%1) \n\t" \
4810 "lw $6, 12(%1) \n\t" \
4811 "lw $7, 16(%1) \n\t" \
4812 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4813 VALGRIND_CALL_NOREDIR_T9 \
4814 "addu $29, $29, 40 \n\t" \
4815 "lw $28, 0($29) \n\t" \
4816 "lw $31, 4($29) \n\t" \
4817 "addu $29, $29, 8 \n\t" \
4819 : /*out*/ "=r" (_res) \
4820 : /*in*/ "0" (&_argvec[0]) \
4821 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4823 lval = (__typeof__(lval)) _res; \
4826 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4829 volatile OrigFn _orig = (orig); \
4830 volatile unsigned long _argvec[10]; \
4831 volatile unsigned long _res; \
4832 _argvec[0] = (unsigned long)_orig.nraddr; \
4833 _argvec[1] = (unsigned long)(arg1); \
4834 _argvec[2] = (unsigned long)(arg2); \
4835 _argvec[3] = (unsigned long)(arg3); \
4836 _argvec[4] = (unsigned long)(arg4); \
4837 _argvec[5] = (unsigned long)(arg5); \
4838 _argvec[6] = (unsigned long)(arg6); \
4839 _argvec[7] = (unsigned long)(arg7); \
4840 _argvec[8] = (unsigned long)(arg8); \
4841 _argvec[9] = (unsigned long)(arg9); \
4843 "subu $29, $29, 8 \n\t" \
4844 "sw $28, 0($29) \n\t" \
4845 "sw $31, 4($29) \n\t" \
4846 "lw $4, 20(%1) \n\t" \
4847 "subu $29, $29, 40\n\t" \
4848 "sw $4, 16($29) \n\t" \
4849 "lw $4, 24(%1) \n\t" \
4850 "sw $4, 20($29) \n\t" \
4851 "lw $4, 28(%1) \n\t" \
4852 "sw $4, 24($29) \n\t" \
4853 "lw $4, 32(%1) \n\t" \
4854 "sw $4, 28($29) \n\t" \
4855 "lw $4, 36(%1) \n\t" \
4856 "sw $4, 32($29) \n\t" \
4857 "lw $4, 4(%1) \n\t" \
4858 "lw $5, 8(%1) \n\t" \
4859 "lw $6, 12(%1) \n\t" \
4860 "lw $7, 16(%1) \n\t" \
4861 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4862 VALGRIND_CALL_NOREDIR_T9 \
4863 "addu $29, $29, 40 \n\t" \
4864 "lw $28, 0($29) \n\t" \
4865 "lw $31, 4($29) \n\t" \
4866 "addu $29, $29, 8 \n\t" \
4868 : /*out*/ "=r" (_res) \
4869 : /*in*/ "0" (&_argvec[0]) \
4870 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4872 lval = (__typeof__(lval)) _res; \
4875 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4876 arg7,arg8,arg9,arg10) \
4878 volatile OrigFn _orig = (orig); \
4879 volatile unsigned long _argvec[11]; \
4880 volatile unsigned long _res; \
4881 _argvec[0] = (unsigned long)_orig.nraddr; \
4882 _argvec[1] = (unsigned long)(arg1); \
4883 _argvec[2] = (unsigned long)(arg2); \
4884 _argvec[3] = (unsigned long)(arg3); \
4885 _argvec[4] = (unsigned long)(arg4); \
4886 _argvec[5] = (unsigned long)(arg5); \
4887 _argvec[6] = (unsigned long)(arg6); \
4888 _argvec[7] = (unsigned long)(arg7); \
4889 _argvec[8] = (unsigned long)(arg8); \
4890 _argvec[9] = (unsigned long)(arg9); \
4891 _argvec[10] = (unsigned long)(arg10); \
4893 "subu $29, $29, 8 \n\t" \
4894 "sw $28, 0($29) \n\t" \
4895 "sw $31, 4($29) \n\t" \
4896 "lw $4, 20(%1) \n\t" \
4897 "subu $29, $29, 48\n\t" \
4898 "sw $4, 16($29) \n\t" \
4899 "lw $4, 24(%1) \n\t" \
4900 "sw $4, 20($29) \n\t" \
4901 "lw $4, 28(%1) \n\t" \
4902 "sw $4, 24($29) \n\t" \
4903 "lw $4, 32(%1) \n\t" \
4904 "sw $4, 28($29) \n\t" \
4905 "lw $4, 36(%1) \n\t" \
4906 "sw $4, 32($29) \n\t" \
4907 "lw $4, 40(%1) \n\t" \
4908 "sw $4, 36($29) \n\t" \
4909 "lw $4, 4(%1) \n\t" \
4910 "lw $5, 8(%1) \n\t" \
4911 "lw $6, 12(%1) \n\t" \
4912 "lw $7, 16(%1) \n\t" \
4913 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4914 VALGRIND_CALL_NOREDIR_T9 \
4915 "addu $29, $29, 48 \n\t" \
4916 "lw $28, 0($29) \n\t" \
4917 "lw $31, 4($29) \n\t" \
4918 "addu $29, $29, 8 \n\t" \
4920 : /*out*/ "=r" (_res) \
4921 : /*in*/ "0" (&_argvec[0]) \
4922 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4924 lval = (__typeof__(lval)) _res; \
4927 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4928 arg6,arg7,arg8,arg9,arg10, \
4931 volatile OrigFn _orig = (orig); \
4932 volatile unsigned long _argvec[12]; \
4933 volatile unsigned long _res; \
4934 _argvec[0] = (unsigned long)_orig.nraddr; \
4935 _argvec[1] = (unsigned long)(arg1); \
4936 _argvec[2] = (unsigned long)(arg2); \
4937 _argvec[3] = (unsigned long)(arg3); \
4938 _argvec[4] = (unsigned long)(arg4); \
4939 _argvec[5] = (unsigned long)(arg5); \
4940 _argvec[6] = (unsigned long)(arg6); \
4941 _argvec[7] = (unsigned long)(arg7); \
4942 _argvec[8] = (unsigned long)(arg8); \
4943 _argvec[9] = (unsigned long)(arg9); \
4944 _argvec[10] = (unsigned long)(arg10); \
4945 _argvec[11] = (unsigned long)(arg11); \
4947 "subu $29, $29, 8 \n\t" \
4948 "sw $28, 0($29) \n\t" \
4949 "sw $31, 4($29) \n\t" \
4950 "lw $4, 20(%1) \n\t" \
4951 "subu $29, $29, 48\n\t" \
4952 "sw $4, 16($29) \n\t" \
4953 "lw $4, 24(%1) \n\t" \
4954 "sw $4, 20($29) \n\t" \
4955 "lw $4, 28(%1) \n\t" \
4956 "sw $4, 24($29) \n\t" \
4957 "lw $4, 32(%1) \n\t" \
4958 "sw $4, 28($29) \n\t" \
4959 "lw $4, 36(%1) \n\t" \
4960 "sw $4, 32($29) \n\t" \
4961 "lw $4, 40(%1) \n\t" \
4962 "sw $4, 36($29) \n\t" \
4963 "lw $4, 44(%1) \n\t" \
4964 "sw $4, 40($29) \n\t" \
4965 "lw $4, 4(%1) \n\t" \
4966 "lw $5, 8(%1) \n\t" \
4967 "lw $6, 12(%1) \n\t" \
4968 "lw $7, 16(%1) \n\t" \
4969 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4970 VALGRIND_CALL_NOREDIR_T9 \
4971 "addu $29, $29, 48 \n\t" \
4972 "lw $28, 0($29) \n\t" \
4973 "lw $31, 4($29) \n\t" \
4974 "addu $29, $29, 8 \n\t" \
4976 : /*out*/ "=r" (_res) \
4977 : /*in*/ "0" (&_argvec[0]) \
4978 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4980 lval = (__typeof__(lval)) _res; \
4983 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4984 arg6,arg7,arg8,arg9,arg10, \
4987 volatile OrigFn _orig = (orig); \
4988 volatile unsigned long _argvec[13]; \
4989 volatile unsigned long _res; \
4990 _argvec[0] = (unsigned long)_orig.nraddr; \
4991 _argvec[1] = (unsigned long)(arg1); \
4992 _argvec[2] = (unsigned long)(arg2); \
4993 _argvec[3] = (unsigned long)(arg3); \
4994 _argvec[4] = (unsigned long)(arg4); \
4995 _argvec[5] = (unsigned long)(arg5); \
4996 _argvec[6] = (unsigned long)(arg6); \
4997 _argvec[7] = (unsigned long)(arg7); \
4998 _argvec[8] = (unsigned long)(arg8); \
4999 _argvec[9] = (unsigned long)(arg9); \
5000 _argvec[10] = (unsigned long)(arg10); \
5001 _argvec[11] = (unsigned long)(arg11); \
5002 _argvec[12] = (unsigned long)(arg12); \
5004 "subu $29, $29, 8 \n\t" \
5005 "sw $28, 0($29) \n\t" \
5006 "sw $31, 4($29) \n\t" \
5007 "lw $4, 20(%1) \n\t" \
5008 "subu $29, $29, 56\n\t" \
5009 "sw $4, 16($29) \n\t" \
5010 "lw $4, 24(%1) \n\t" \
5011 "sw $4, 20($29) \n\t" \
5012 "lw $4, 28(%1) \n\t" \
5013 "sw $4, 24($29) \n\t" \
5014 "lw $4, 32(%1) \n\t" \
5015 "sw $4, 28($29) \n\t" \
5016 "lw $4, 36(%1) \n\t" \
5017 "sw $4, 32($29) \n\t" \
5018 "lw $4, 40(%1) \n\t" \
5019 "sw $4, 36($29) \n\t" \
5020 "lw $4, 44(%1) \n\t" \
5021 "sw $4, 40($29) \n\t" \
5022 "lw $4, 48(%1) \n\t" \
5023 "sw $4, 44($29) \n\t" \
5024 "lw $4, 4(%1) \n\t" \
5025 "lw $5, 8(%1) \n\t" \
5026 "lw $6, 12(%1) \n\t" \
5027 "lw $7, 16(%1) \n\t" \
5028 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5029 VALGRIND_CALL_NOREDIR_T9 \
5030 "addu $29, $29, 56 \n\t" \
5031 "lw $28, 0($29) \n\t" \
5032 "lw $31, 4($29) \n\t" \
5033 "addu $29, $29, 8 \n\t" \
5035 : /*out*/ "=r" (_res) \
5036 : /*in*/ "r" (&_argvec[0]) \
5037 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5039 lval = (__typeof__(lval)) _res; \
5042 #endif /* PLAT_mips32_linux */
5044 /* ------------------------- mips64-linux ------------------------- */
5046 #if defined(PLAT_mips64_linux)
5048 /* These regs are trashed by the hidden call. */
5049 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5050 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5053 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5056 #define CALL_FN_W_v(lval, orig) \
5058 volatile OrigFn _orig = (orig); \
5059 volatile unsigned long _argvec[1]; \
5060 volatile unsigned long _res; \
5061 _argvec[0] = (unsigned long)_orig.nraddr; \
5063 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5064 VALGRIND_CALL_NOREDIR_T9 \
5066 : /*out*/ "=r" (_res) \
5067 : /*in*/ "0" (&_argvec[0]) \
5068 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5070 lval = (__typeof__(lval)) _res; \
5073 #define CALL_FN_W_W(lval, orig, arg1) \
5075 volatile OrigFn _orig = (orig); \
5076 volatile unsigned long _argvec[2]; \
5077 volatile unsigned long _res; \
5078 _argvec[0] = (unsigned long)_orig.nraddr; \
5079 _argvec[1] = (unsigned long)(arg1); \
5081 "ld $4, 8(%1)\n\t" /* arg1*/ \
5082 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5083 VALGRIND_CALL_NOREDIR_T9 \
5085 : /*out*/ "=r" (_res) \
5086 : /*in*/ "r" (&_argvec[0]) \
5087 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5089 lval = (__typeof__(lval)) _res; \
5092 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5094 volatile OrigFn _orig = (orig); \
5095 volatile unsigned long _argvec[3]; \
5096 volatile unsigned long _res; \
5097 _argvec[0] = (unsigned long)_orig.nraddr; \
5098 _argvec[1] = (unsigned long)(arg1); \
5099 _argvec[2] = (unsigned long)(arg2); \
5101 "ld $4, 8(%1)\n\t" \
5102 "ld $5, 16(%1)\n\t" \
5103 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5104 VALGRIND_CALL_NOREDIR_T9 \
5106 : /*out*/ "=r" (_res) \
5107 : /*in*/ "r" (&_argvec[0]) \
5108 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5110 lval = (__typeof__(lval)) _res; \
5113 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5115 volatile OrigFn _orig = (orig); \
5116 volatile unsigned long _argvec[4]; \
5117 volatile unsigned long _res; \
5118 _argvec[0] = (unsigned long)_orig.nraddr; \
5119 _argvec[1] = (unsigned long)(arg1); \
5120 _argvec[2] = (unsigned long)(arg2); \
5121 _argvec[3] = (unsigned long)(arg3); \
5123 "ld $4, 8(%1)\n\t" \
5124 "ld $5, 16(%1)\n\t" \
5125 "ld $6, 24(%1)\n\t" \
5126 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5127 VALGRIND_CALL_NOREDIR_T9 \
5129 : /*out*/ "=r" (_res) \
5130 : /*in*/ "r" (&_argvec[0]) \
5131 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5133 lval = (__typeof__(lval)) _res; \
5136 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5138 volatile OrigFn _orig = (orig); \
5139 volatile unsigned long _argvec[5]; \
5140 volatile unsigned long _res; \
5141 _argvec[0] = (unsigned long)_orig.nraddr; \
5142 _argvec[1] = (unsigned long)(arg1); \
5143 _argvec[2] = (unsigned long)(arg2); \
5144 _argvec[3] = (unsigned long)(arg3); \
5145 _argvec[4] = (unsigned long)(arg4); \
5147 "ld $4, 8(%1)\n\t" \
5148 "ld $5, 16(%1)\n\t" \
5149 "ld $6, 24(%1)\n\t" \
5150 "ld $7, 32(%1)\n\t" \
5151 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5152 VALGRIND_CALL_NOREDIR_T9 \
5154 : /*out*/ "=r" (_res) \
5155 : /*in*/ "r" (&_argvec[0]) \
5156 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5158 lval = (__typeof__(lval)) _res; \
5161 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5163 volatile OrigFn _orig = (orig); \
5164 volatile unsigned long _argvec[6]; \
5165 volatile unsigned long _res; \
5166 _argvec[0] = (unsigned long)_orig.nraddr; \
5167 _argvec[1] = (unsigned long)(arg1); \
5168 _argvec[2] = (unsigned long)(arg2); \
5169 _argvec[3] = (unsigned long)(arg3); \
5170 _argvec[4] = (unsigned long)(arg4); \
5171 _argvec[5] = (unsigned long)(arg5); \
5173 "ld $4, 8(%1)\n\t" \
5174 "ld $5, 16(%1)\n\t" \
5175 "ld $6, 24(%1)\n\t" \
5176 "ld $7, 32(%1)\n\t" \
5177 "ld $8, 40(%1)\n\t" \
5178 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5179 VALGRIND_CALL_NOREDIR_T9 \
5181 : /*out*/ "=r" (_res) \
5182 : /*in*/ "r" (&_argvec[0]) \
5183 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5185 lval = (__typeof__(lval)) _res; \
5188 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5190 volatile OrigFn _orig = (orig); \
5191 volatile unsigned long _argvec[7]; \
5192 volatile unsigned long _res; \
5193 _argvec[0] = (unsigned long)_orig.nraddr; \
5194 _argvec[1] = (unsigned long)(arg1); \
5195 _argvec[2] = (unsigned long)(arg2); \
5196 _argvec[3] = (unsigned long)(arg3); \
5197 _argvec[4] = (unsigned long)(arg4); \
5198 _argvec[5] = (unsigned long)(arg5); \
5199 _argvec[6] = (unsigned long)(arg6); \
5201 "ld $4, 8(%1)\n\t" \
5202 "ld $5, 16(%1)\n\t" \
5203 "ld $6, 24(%1)\n\t" \
5204 "ld $7, 32(%1)\n\t" \
5205 "ld $8, 40(%1)\n\t" \
5206 "ld $9, 48(%1)\n\t" \
5207 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5208 VALGRIND_CALL_NOREDIR_T9 \
5210 : /*out*/ "=r" (_res) \
5211 : /*in*/ "r" (&_argvec[0]) \
5212 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5214 lval = (__typeof__(lval)) _res; \
5217 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5220 volatile OrigFn _orig = (orig); \
5221 volatile unsigned long _argvec[8]; \
5222 volatile unsigned long _res; \
5223 _argvec[0] = (unsigned long)_orig.nraddr; \
5224 _argvec[1] = (unsigned long)(arg1); \
5225 _argvec[2] = (unsigned long)(arg2); \
5226 _argvec[3] = (unsigned long)(arg3); \
5227 _argvec[4] = (unsigned long)(arg4); \
5228 _argvec[5] = (unsigned long)(arg5); \
5229 _argvec[6] = (unsigned long)(arg6); \
5230 _argvec[7] = (unsigned long)(arg7); \
5232 "ld $4, 8(%1)\n\t" \
5233 "ld $5, 16(%1)\n\t" \
5234 "ld $6, 24(%1)\n\t" \
5235 "ld $7, 32(%1)\n\t" \
5236 "ld $8, 40(%1)\n\t" \
5237 "ld $9, 48(%1)\n\t" \
5238 "ld $10, 56(%1)\n\t" \
5239 "ld $25, 0(%1) \n\t" /* target->t9 */ \
5240 VALGRIND_CALL_NOREDIR_T9 \
5242 : /*out*/ "=r" (_res) \
5243 : /*in*/ "r" (&_argvec[0]) \
5244 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5246 lval = (__typeof__(lval)) _res; \
5249 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5252 volatile OrigFn _orig = (orig); \
5253 volatile unsigned long _argvec[9]; \
5254 volatile unsigned long _res; \
5255 _argvec[0] = (unsigned long)_orig.nraddr; \
5256 _argvec[1] = (unsigned long)(arg1); \
5257 _argvec[2] = (unsigned long)(arg2); \
5258 _argvec[3] = (unsigned long)(arg3); \
5259 _argvec[4] = (unsigned long)(arg4); \
5260 _argvec[5] = (unsigned long)(arg5); \
5261 _argvec[6] = (unsigned long)(arg6); \
5262 _argvec[7] = (unsigned long)(arg7); \
5263 _argvec[8] = (unsigned long)(arg8); \
5265 "ld $4, 8(%1)\n\t" \
5266 "ld $5, 16(%1)\n\t" \
5267 "ld $6, 24(%1)\n\t" \
5268 "ld $7, 32(%1)\n\t" \
5269 "ld $8, 40(%1)\n\t" \
5270 "ld $9, 48(%1)\n\t" \
5271 "ld $10, 56(%1)\n\t" \
5272 "ld $11, 64(%1)\n\t" \
5273 "ld $25, 0(%1) \n\t" /* target->t9 */ \
5274 VALGRIND_CALL_NOREDIR_T9 \
5276 : /*out*/ "=r" (_res) \
5277 : /*in*/ "r" (&_argvec[0]) \
5278 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5280 lval = (__typeof__(lval)) _res; \
5283 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5286 volatile OrigFn _orig = (orig); \
5287 volatile unsigned long _argvec[10]; \
5288 volatile unsigned long _res; \
5289 _argvec[0] = (unsigned long)_orig.nraddr; \
5290 _argvec[1] = (unsigned long)(arg1); \
5291 _argvec[2] = (unsigned long)(arg2); \
5292 _argvec[3] = (unsigned long)(arg3); \
5293 _argvec[4] = (unsigned long)(arg4); \
5294 _argvec[5] = (unsigned long)(arg5); \
5295 _argvec[6] = (unsigned long)(arg6); \
5296 _argvec[7] = (unsigned long)(arg7); \
5297 _argvec[8] = (unsigned long)(arg8); \
5298 _argvec[9] = (unsigned long)(arg9); \
5300 "dsubu $29, $29, 8\n\t" \
5301 "ld $4, 72(%1)\n\t" \
5302 "sd $4, 0($29)\n\t" \
5303 "ld $4, 8(%1)\n\t" \
5304 "ld $5, 16(%1)\n\t" \
5305 "ld $6, 24(%1)\n\t" \
5306 "ld $7, 32(%1)\n\t" \
5307 "ld $8, 40(%1)\n\t" \
5308 "ld $9, 48(%1)\n\t" \
5309 "ld $10, 56(%1)\n\t" \
5310 "ld $11, 64(%1)\n\t" \
5311 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5312 VALGRIND_CALL_NOREDIR_T9 \
5313 "daddu $29, $29, 8\n\t" \
5315 : /*out*/ "=r" (_res) \
5316 : /*in*/ "r" (&_argvec[0]) \
5317 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5319 lval = (__typeof__(lval)) _res; \
5322 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5323 arg7,arg8,arg9,arg10) \
5325 volatile OrigFn _orig = (orig); \
5326 volatile unsigned long _argvec[11]; \
5327 volatile unsigned long _res; \
5328 _argvec[0] = (unsigned long)_orig.nraddr; \
5329 _argvec[1] = (unsigned long)(arg1); \
5330 _argvec[2] = (unsigned long)(arg2); \
5331 _argvec[3] = (unsigned long)(arg3); \
5332 _argvec[4] = (unsigned long)(arg4); \
5333 _argvec[5] = (unsigned long)(arg5); \
5334 _argvec[6] = (unsigned long)(arg6); \
5335 _argvec[7] = (unsigned long)(arg7); \
5336 _argvec[8] = (unsigned long)(arg8); \
5337 _argvec[9] = (unsigned long)(arg9); \
5338 _argvec[10] = (unsigned long)(arg10); \
5340 "dsubu $29, $29, 16\n\t" \
5341 "ld $4, 72(%1)\n\t" \
5342 "sd $4, 0($29)\n\t" \
5343 "ld $4, 80(%1)\n\t" \
5344 "sd $4, 8($29)\n\t" \
5345 "ld $4, 8(%1)\n\t" \
5346 "ld $5, 16(%1)\n\t" \
5347 "ld $6, 24(%1)\n\t" \
5348 "ld $7, 32(%1)\n\t" \
5349 "ld $8, 40(%1)\n\t" \
5350 "ld $9, 48(%1)\n\t" \
5351 "ld $10, 56(%1)\n\t" \
5352 "ld $11, 64(%1)\n\t" \
5353 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5354 VALGRIND_CALL_NOREDIR_T9 \
5355 "daddu $29, $29, 16\n\t" \
5357 : /*out*/ "=r" (_res) \
5358 : /*in*/ "r" (&_argvec[0]) \
5359 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5361 lval = (__typeof__(lval)) _res; \
5364 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5365 arg6,arg7,arg8,arg9,arg10, \
5368 volatile OrigFn _orig = (orig); \
5369 volatile unsigned long _argvec[12]; \
5370 volatile unsigned long _res; \
5371 _argvec[0] = (unsigned long)_orig.nraddr; \
5372 _argvec[1] = (unsigned long)(arg1); \
5373 _argvec[2] = (unsigned long)(arg2); \
5374 _argvec[3] = (unsigned long)(arg3); \
5375 _argvec[4] = (unsigned long)(arg4); \
5376 _argvec[5] = (unsigned long)(arg5); \
5377 _argvec[6] = (unsigned long)(arg6); \
5378 _argvec[7] = (unsigned long)(arg7); \
5379 _argvec[8] = (unsigned long)(arg8); \
5380 _argvec[9] = (unsigned long)(arg9); \
5381 _argvec[10] = (unsigned long)(arg10); \
5382 _argvec[11] = (unsigned long)(arg11); \
5384 "dsubu $29, $29, 24\n\t" \
5385 "ld $4, 72(%1)\n\t" \
5386 "sd $4, 0($29)\n\t" \
5387 "ld $4, 80(%1)\n\t" \
5388 "sd $4, 8($29)\n\t" \
5389 "ld $4, 88(%1)\n\t" \
5390 "sd $4, 16($29)\n\t" \
5391 "ld $4, 8(%1)\n\t" \
5392 "ld $5, 16(%1)\n\t" \
5393 "ld $6, 24(%1)\n\t" \
5394 "ld $7, 32(%1)\n\t" \
5395 "ld $8, 40(%1)\n\t" \
5396 "ld $9, 48(%1)\n\t" \
5397 "ld $10, 56(%1)\n\t" \
5398 "ld $11, 64(%1)\n\t" \
5399 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5400 VALGRIND_CALL_NOREDIR_T9 \
5401 "daddu $29, $29, 24\n\t" \
5403 : /*out*/ "=r" (_res) \
5404 : /*in*/ "r" (&_argvec[0]) \
5405 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5407 lval = (__typeof__(lval)) _res; \
5410 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5411 arg6,arg7,arg8,arg9,arg10, \
5414 volatile OrigFn _orig = (orig); \
5415 volatile unsigned long _argvec[13]; \
5416 volatile unsigned long _res; \
5417 _argvec[0] = (unsigned long)_orig.nraddr; \
5418 _argvec[1] = (unsigned long)(arg1); \
5419 _argvec[2] = (unsigned long)(arg2); \
5420 _argvec[3] = (unsigned long)(arg3); \
5421 _argvec[4] = (unsigned long)(arg4); \
5422 _argvec[5] = (unsigned long)(arg5); \
5423 _argvec[6] = (unsigned long)(arg6); \
5424 _argvec[7] = (unsigned long)(arg7); \
5425 _argvec[8] = (unsigned long)(arg8); \
5426 _argvec[9] = (unsigned long)(arg9); \
5427 _argvec[10] = (unsigned long)(arg10); \
5428 _argvec[11] = (unsigned long)(arg11); \
5429 _argvec[12] = (unsigned long)(arg12); \
5431 "dsubu $29, $29, 32\n\t" \
5432 "ld $4, 72(%1)\n\t" \
5433 "sd $4, 0($29)\n\t" \
5434 "ld $4, 80(%1)\n\t" \
5435 "sd $4, 8($29)\n\t" \
5436 "ld $4, 88(%1)\n\t" \
5437 "sd $4, 16($29)\n\t" \
5438 "ld $4, 96(%1)\n\t" \
5439 "sd $4, 24($29)\n\t" \
5440 "ld $4, 8(%1)\n\t" \
5441 "ld $5, 16(%1)\n\t" \
5442 "ld $6, 24(%1)\n\t" \
5443 "ld $7, 32(%1)\n\t" \
5444 "ld $8, 40(%1)\n\t" \
5445 "ld $9, 48(%1)\n\t" \
5446 "ld $10, 56(%1)\n\t" \
5447 "ld $11, 64(%1)\n\t" \
5448 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5449 VALGRIND_CALL_NOREDIR_T9 \
5450 "daddu $29, $29, 32\n\t" \
5452 : /*out*/ "=r" (_res) \
5453 : /*in*/ "r" (&_argvec[0]) \
5454 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5456 lval = (__typeof__(lval)) _res; \
5459 #endif /* PLAT_mips64_linux */
5462 /* ------------------------------------------------------------------ */
5463 /* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
5465 /* ------------------------------------------------------------------ */
5467 /* Some request codes. There are many more of these, but most are not
5468 exposed to end-user view. These are the public ones, all of the
5469 form 0x1000 + small_number.
5471 Core ones are in the range 0x00000000--0x0000ffff. The non-public
5472 ones start at 0x2000.
5475 /* These macros are used by tools -- they must be public, but don't
5476 embed them into other programs. */
5477 #define VG_USERREQ_TOOL_BASE(a,b) \
5478 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
5479 #define VG_IS_TOOL_USERREQ(a, b, v) \
5480 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
5482 /* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
5483 This enum comprises an ABI exported by Valgrind to programs
5484 which use client requests. DO NOT CHANGE THE ORDER OF THESE
5485 ENTRIES, NOR DELETE ANY -- add new ones at the end. */
5487 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
5488 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
5490 /* These allow any function to be called from the simulated
5491 CPU but run on the real CPU. Nb: the first arg passed to
5492 the function is always the ThreadId of the running
5493 thread! So CLIENT_CALL0 actually requires a 1 arg
5495 VG_USERREQ__CLIENT_CALL0 = 0x1101,
5496 VG_USERREQ__CLIENT_CALL1 = 0x1102,
5497 VG_USERREQ__CLIENT_CALL2 = 0x1103,
5498 VG_USERREQ__CLIENT_CALL3 = 0x1104,
5500 /* Can be useful in regression testing suites -- eg. can
5501 send Valgrind's output to /dev/null and still count
5503 VG_USERREQ__COUNT_ERRORS = 0x1201,
5505 /* Allows the client program and/or gdbserver to execute a monitor
5507 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
5509 /* These are useful and can be interpreted by any tool that
5510 tracks malloc() et al, by using vg_replace_malloc.c. */
5511 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
5512 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
5513 VG_USERREQ__FREELIKE_BLOCK = 0x1302,
5514 /* Memory pool support. */
5515 VG_USERREQ__CREATE_MEMPOOL = 0x1303,
5516 VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
5517 VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
5518 VG_USERREQ__MEMPOOL_FREE = 0x1306,
5519 VG_USERREQ__MEMPOOL_TRIM = 0x1307,
5520 VG_USERREQ__MOVE_MEMPOOL = 0x1308,
5521 VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
5522 VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
5524 /* Allow printfs to valgrind log. */
5525 /* The first two pass the va_list argument by value, which
5526 assumes it is the same size as or smaller than a UWord,
5527 which generally isn't the case. Hence are deprecated.
5528 The second two pass the vargs by reference and so are
5529 immune to this problem. */
5530 /* both :: char* fmt, va_list vargs (DEPRECATED) */
5531 VG_USERREQ__PRINTF = 0x1401,
5532 VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
5533 /* both :: char* fmt, va_list* vargs */
5534 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
5535 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
5537 /* Stack support. */
5538 VG_USERREQ__STACK_REGISTER = 0x1501,
5539 VG_USERREQ__STACK_DEREGISTER = 0x1502,
5540 VG_USERREQ__STACK_CHANGE = 0x1503,
5543 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
5545 /* Querying of debug info. */
5546 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701,
5548 /* Disable/enable error reporting level. Takes a single
5549 Word arg which is the delta to this thread's error
5550 disablement indicator. Hence 1 disables or further
5551 disables errors, and -1 moves back towards enablement.
5552 Other values are not allowed. */
5553 VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801,
5555 /* Initialise IR injection */
5556 VG_USERREQ__VEX_INIT_FOR_IRI = 0x1901
5559 #if !defined(__GNUC__)
5560 # define __extension__ /* */
5564 /* Returns the number of Valgrinds this code is running under. That
5565 is, 0 if running natively, 1 if running under Valgrind, 2 if
5566 running under Valgrind which is running under another Valgrind,
5568 #define RUNNING_ON_VALGRIND \
5569 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
5570 VG_USERREQ__RUNNING_ON_VALGRIND, \
5574 /* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
5575 _qzz_len - 1]. Useful if you are debugging a JITter or some such,
5576 since it provides a way to make sure valgrind will retranslate the
5577 invalidated area. Returns no value. */
5578 #define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
5579 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
5580 _qzz_addr, _qzz_len, 0, 0, 0)
5583 /* These requests are for getting Valgrind itself to print something.
5584 Possibly with a backtrace. This is a really ugly hack. The return value
5585 is the number of characters printed, excluding the "**<pid>** " part at the
5586 start and the backtrace (if present). */
5588 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
5589 /* Modern GCC will optimize the static routine out if unused,
5590 and unused attribute will shut down warnings about it. */
5591 static int VALGRIND_PRINTF(const char *format, ...)
5592 __attribute__((format(__printf__, 1, 2), __unused__));
5595 #if defined(_MSC_VER)
5598 VALGRIND_PRINTF(const char *format, ...)
5600 #if defined(NVALGRIND)
5602 #else /* NVALGRIND */
5603 #if defined(_MSC_VER) || defined(__MINGW64__)
5606 unsigned long _qzz_res;
5609 va_start(vargs, format);
5610 #if defined(_MSC_VER) || defined(__MINGW64__)
5611 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5612 VG_USERREQ__PRINTF_VALIST_BY_REF,
5617 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5618 VG_USERREQ__PRINTF_VALIST_BY_REF,
5619 (unsigned long)format,
5620 (unsigned long)&vargs,
5624 return (int)_qzz_res;
5625 #endif /* NVALGRIND */
5628 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
5629 static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
5630 __attribute__((format(__printf__, 1, 2), __unused__));
5633 #if defined(_MSC_VER)
5636 VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
5638 #if defined(NVALGRIND)
5640 #else /* NVALGRIND */
5641 #if defined(_MSC_VER) || defined(__MINGW64__)
5644 unsigned long _qzz_res;
5647 va_start(vargs, format);
5648 #if defined(_MSC_VER) || defined(__MINGW64__)
5649 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5650 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
5655 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5656 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
5657 (unsigned long)format,
5658 (unsigned long)&vargs,
5662 return (int)_qzz_res;
5663 #endif /* NVALGRIND */
5667 /* These requests allow control to move from the simulated CPU to the
5668 real CPU, calling an arbitary function.
5670 Note that the current ThreadId is inserted as the first argument.
5673 VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
5675 requires f to have this signature:
5677 Word f(Word tid, Word arg1, Word arg2)
5679 where "Word" is a word-sized type.
5681 Note that these client requests are not entirely reliable. For example,
5682 if you call a function with them that subsequently calls printf(),
5683 there's a high chance Valgrind will crash. Generally, your prospects of
5684 these working are made higher if the called function does not refer to
5685 any global variables, and does not refer to any libc or other functions
5686 (printf et al). Any kind of entanglement with libc or dynamic linking is
5687 likely to have a bad outcome, for tricky reasons which we've grappled
5688 with a lot in the past.
5690 #define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
5691 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5692 VG_USERREQ__CLIENT_CALL0, \
5696 #define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
5697 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5698 VG_USERREQ__CLIENT_CALL1, \
5702 #define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
5703 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5704 VG_USERREQ__CLIENT_CALL2, \
5706 _qyy_arg1, _qyy_arg2, 0, 0)
5708 #define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
5709 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5710 VG_USERREQ__CLIENT_CALL3, \
5712 _qyy_arg1, _qyy_arg2, \
5716 /* Counts the number of errors that have been recorded by a tool. Nb:
5717 the tool must record the errors with VG_(maybe_record_error)() or
5718 VG_(unique_error)() for them to be counted. */
5719 #define VALGRIND_COUNT_ERRORS \
5720 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
5721 0 /* default return */, \
5722 VG_USERREQ__COUNT_ERRORS, \
5725 /* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
5726 when heap blocks are allocated in order to give accurate results. This
5727 happens automatically for the standard allocator functions such as
5728 malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
5731 But if your program uses a custom allocator, this doesn't automatically
5732 happen, and Valgrind will not do as well. For example, if you allocate
5733 superblocks with mmap() and then allocates chunks of the superblocks, all
5734 Valgrind's observations will be at the mmap() level and it won't know that
5735 the chunks should be considered separate entities. In Memcheck's case,
5736 that means you probably won't get heap block overrun detection (because
5737 there won't be redzones marked as unaddressable) and you definitely won't
5738 get any leak detection.
5740 The following client requests allow a custom allocator to be annotated so
5741 that it can be handled accurately by Valgrind.
5743 VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
5744 by a malloc()-like function. For Memcheck (an illustrative case), this
5747 - It records that the block has been allocated. This means any addresses
5748 within the block mentioned in error messages will be
5749 identified as belonging to the block. It also means that if the block
5750 isn't freed it will be detected by the leak checker.
5752 - It marks the block as being addressable and undefined (if 'is_zeroed' is
5753 not set), or addressable and defined (if 'is_zeroed' is set). This
5754 controls how accesses to the block by the program are handled.
5756 'addr' is the start of the usable block (ie. after any
5757 redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
5758 can apply redzones -- these are blocks of padding at the start and end of
5759 each block. Adding redzones is recommended as it makes it much more likely
5760 Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
5761 zeroed (or filled with another predictable value), as is the case for
5764 VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
5765 heap block -- that will be used by the client program -- is allocated.
5766 It's best to put it at the outermost level of the allocator if possible;
5767 for example, if you have a function my_alloc() which calls
5768 internal_alloc(), and the client request is put inside internal_alloc(),
5769 stack traces relating to the heap block will contain entries for both
5770 my_alloc() and internal_alloc(), which is probably not what you want.
5772 For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
5773 custom blocks from within a heap block, B, that has been allocated with
5774 malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
5775 -- the custom blocks will take precedence.
5777 VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
5778 Memcheck, it does two things:
5780 - It records that the block has been deallocated. This assumes that the
5781 block was annotated as having been allocated via
5782 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
5784 - It marks the block as being unaddressable.
5786 VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
5787 heap block is deallocated.
5789 VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
5790 Memcheck, it does four things:
5792 - It records that the size of a block has been changed. This assumes that
5793 the block was annotated as having been allocated via
5794 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
5796 - If the block shrunk, it marks the freed memory as being unaddressable.
5798 - If the block grew, it marks the new area as undefined and defines a red
5799 zone past the end of the new block.
5801 - The V-bits of the overlap between the old and the new block are preserved.
5803 VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
5804 and before deallocation of the old block.
5806 In many cases, these three client requests will not be enough to get your
5807 allocator working well with Memcheck. More specifically, if your allocator
5808 writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
5809 will be necessary to mark the memory as addressable just before the zeroing
5810 occurs, otherwise you'll get a lot of invalid write errors. For example,
5811 you'll need to do this if your allocator recycles freed blocks, but it
5812 zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
5813 Alternatively, if your allocator reuses freed blocks for allocator-internal
5814 data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
5816 Really, what's happening is a blurring of the lines between the client
5817 program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
5818 memory should be considered unaddressable to the client program, but the
5819 allocator knows more than the rest of the client program and so may be able
5820 to safely access it. Extra client requests are necessary for Valgrind to
5821 understand the distinction between the allocator and the rest of the
5824 Ignored if addr == 0.
5826 #define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
5827 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
5828 addr, sizeB, rzB, is_zeroed, 0)
5830 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
5831 Ignored if addr == 0.
5833 #define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
5834 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
5835 addr, oldSizeB, newSizeB, rzB, 0)
5837 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
5838 Ignored if addr == 0.
5840 #define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
5841 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
5844 /* Create a memory pool. */
5845 #define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
5846 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
5847 pool, rzB, is_zeroed, 0, 0)
5849 /* Destroy a memory pool. */
5850 #define VALGRIND_DESTROY_MEMPOOL(pool) \
5851 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
5854 /* Associate a piece of memory with a memory pool. */
5855 #define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
5856 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
5857 pool, addr, size, 0, 0)
5859 /* Disassociate a piece of memory from a memory pool. */
5860 #define VALGRIND_MEMPOOL_FREE(pool, addr) \
5861 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
5862 pool, addr, 0, 0, 0)
5864 /* Disassociate any pieces outside a particular range. */
5865 #define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
5866 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
5867 pool, addr, size, 0, 0)
5869 /* Resize and/or move a piece associated with a memory pool. */
5870 #define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
5871 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
5872 poolA, poolB, 0, 0, 0)
5874 /* Resize and/or move a piece associated with a memory pool. */
5875 #define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
5876 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
5877 pool, addrA, addrB, size, 0)
5879 /* Return 1 if a mempool exists, else 0. */
5880 #define VALGRIND_MEMPOOL_EXISTS(pool) \
5881 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
5882 VG_USERREQ__MEMPOOL_EXISTS, \
5885 /* Mark a piece of memory as being a stack. Returns a stack id. */
5886 #define VALGRIND_STACK_REGISTER(start, end) \
5887 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
5888 VG_USERREQ__STACK_REGISTER, \
5889 start, end, 0, 0, 0)
5891 /* Unmark the piece of memory associated with a stack id as being a
5893 #define VALGRIND_STACK_DEREGISTER(id) \
5894 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
5897 /* Change the start and end address of the stack id. */
5898 #define VALGRIND_STACK_CHANGE(id, start, end) \
5899 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
5900 id, start, end, 0, 0)
5902 /* Load PDB debug info for Wine PE image_map. */
5903 #define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
5904 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
5905 fd, ptr, total_size, delta, 0)
5907 /* Map a code address to a source file name and line number. buf64
5908 must point to a 64-byte buffer in the caller's address space. The
5909 result will be dumped in there and is guaranteed to be zero
5910 terminated. If no info is found, the first byte is set to zero. */
5911 #define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
5912 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
5913 VG_USERREQ__MAP_IP_TO_SRCLOC, \
5914 addr, buf64, 0, 0, 0)
5916 /* Disable error reporting for this thread. Behaves in a stack like
5917 way, so you can safely call this multiple times provided that
5918 VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
5919 to re-enable reporting. The first call of this macro disables
5920 reporting. Subsequent calls have no effect except to increase the
5921 number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
5922 reporting. Child threads do not inherit this setting from their
5923 parents -- they are always created with reporting enabled. */
5924 #define VALGRIND_DISABLE_ERROR_REPORTING \
5925 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
5928 /* Re-enable error reporting, as per comments on
5929 VALGRIND_DISABLE_ERROR_REPORTING. */
5930 #define VALGRIND_ENABLE_ERROR_REPORTING \
5931 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
5934 /* Execute a monitor command from the client program.
5935 If a connection is opened with GDB, the output will be sent
5936 according to the output mode set for vgdb.
5937 If no connection is opened, output will go to the log output.
5938 Returns 1 if command not recognised, 0 otherwise. */
5939 #define VALGRIND_MONITOR_COMMAND(command) \
5940 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
5941 command, 0, 0, 0, 0)
5944 #undef PLAT_x86_darwin
5945 #undef PLAT_amd64_darwin
5946 #undef PLAT_x86_win32
5947 #undef PLAT_amd64_win64
5948 #undef PLAT_x86_linux
5949 #undef PLAT_amd64_linux
5950 #undef PLAT_ppc32_linux
5951 #undef PLAT_ppc64_linux
5952 #undef PLAT_arm_linux
5953 #undef PLAT_s390x_linux
5954 #undef PLAT_mips32_linux
5955 #undef PLAT_mips64_linux
5957 #endif /* __VALGRIND_H */