2 ----------------------------------------------------------------
4 Notice that the following BSD-style license applies to this one
5 file (valgrind.h) only. The rest of Valgrind is licensed under the
6 terms of the GNU General Public License, version 2, unless
7 otherwise indicated. See the COPYING file in the source
8 distribution for details.
10 ----------------------------------------------------------------
12 This file is part of Valgrind, a dynamic binary instrumentation
15 Copyright (C) 2000-2013 Julian Seward. All rights reserved.
17 Redistribution and use in source and binary forms, with or without
18 modification, are permitted provided that the following conditions
21 1. Redistributions of source code must retain the above copyright
22 notice, this list of conditions and the following disclaimer.
24 2. The origin of this software must not be misrepresented; you must
25 not claim that you wrote the original software. If you use this
26 software in a product, an acknowledgment in the product
27 documentation would be appreciated but is not required.
29 3. Altered source versions must be plainly marked as such, and must
30 not be misrepresented as being the original software.
32 4. The name of the author may not be used to endorse or promote
33 products derived from this software without specific prior written
36 THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
37 OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
38 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
39 ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
40 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
41 DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
42 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
43 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
44 WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
45 NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
46 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
48 ----------------------------------------------------------------
50 Notice that the above BSD-style license applies to this one file
51 (valgrind.h) only. The entire rest of Valgrind is licensed under
52 the terms of the GNU General Public License, version 2. See the
53 COPYING file in the source distribution for details.
55 ----------------------------------------------------------------
59 /* This file is for inclusion into client (your!) code.
61 You can use these macros to manipulate and query Valgrind's
62 execution inside your own programs.
64 The resulting executables will still run without Valgrind, just a
65 little bit more slowly than they otherwise would, but otherwise
66 unchanged. When not running on valgrind, each client request
67 consumes very few (eg. 7) instructions, so the resulting performance
68 loss is negligible unless you plan to execute client requests
69 millions of times per second. Nevertheless, if that is still a
70 problem, you can compile with the NVALGRIND symbol defined (gcc
71 -DNVALGRIND) so that client requests are not even compiled in. */
77 /* ------------------------------------------------------------------ */
78 /* VERSION NUMBER OF VALGRIND */
79 /* ------------------------------------------------------------------ */
81 /* Specify Valgrind's version number, so that user code can
82 conditionally compile based on our version number. Note that these
83 were introduced at version 3.6 and so do not exist in version 3.5
84 or earlier. The recommended way to use them to check for "version
87 #if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
88 && (__VALGRIND_MAJOR__ > 3 \
89 || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
91 #define __VALGRIND_MAJOR__ 3
92 #define __VALGRIND_MINOR__ 10
97 /* Nb: this file might be included in a file compiled with -ansi. So
98 we can't use C++ style "//" comments nor the "asm" keyword (instead
101 /* Derive some tags indicating what the target platform is. Note
102 that in this file we're using the compiler's CPP symbols for
103 identifying architectures, which are different to the ones we use
104 within the rest of Valgrind. Note, __powerpc__ is active for both
105 32 and 64-bit PPC, whereas __powerpc64__ is only active for the
106 latter (on Linux, that is).
108 Misc note: how to find out what's predefined in gcc by default:
109 gcc -Wp,-dM somefile.c
111 #undef PLAT_x86_darwin
112 #undef PLAT_amd64_darwin
113 #undef PLAT_x86_win32
114 #undef PLAT_amd64_win64
115 #undef PLAT_x86_linux
116 #undef PLAT_amd64_linux
117 #undef PLAT_ppc32_linux
118 #undef PLAT_ppc64_linux
119 #undef PLAT_arm_linux
120 #undef PLAT_arm64_linux
121 #undef PLAT_s390x_linux
122 #undef PLAT_mips32_linux
123 #undef PLAT_mips64_linux
126 #if defined(__APPLE__) && defined(__i386__)
127 # define PLAT_x86_darwin 1
128 #elif defined(__APPLE__) && defined(__x86_64__)
129 # define PLAT_amd64_darwin 1
130 #elif (defined(__MINGW32__) && !defined(__MINGW64__)) \
131 || defined(__CYGWIN32__) \
132 || (defined(_WIN32) && defined(_M_IX86))
133 # define PLAT_x86_win32 1
134 #elif defined(__MINGW64__) \
135 || (defined(_WIN64) && defined(_M_X64))
136 # define PLAT_amd64_win64 1
137 #elif defined(__linux__) && defined(__i386__)
138 # define PLAT_x86_linux 1
139 #elif defined(__linux__) && defined(__x86_64__)
140 # define PLAT_amd64_linux 1
141 #elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
142 # define PLAT_ppc32_linux 1
143 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__)
144 # define PLAT_ppc64_linux 1
145 #elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
146 # define PLAT_arm_linux 1
147 #elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
148 # define PLAT_arm64_linux 1
149 #elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
150 # define PLAT_s390x_linux 1
151 #elif defined(__linux__) && defined(__mips__) && (__mips==64)
152 # define PLAT_mips64_linux 1
153 #elif defined(__linux__) && defined(__mips__) && (__mips!=64)
154 # define PLAT_mips32_linux 1
156 /* If we're not compiling for our target platform, don't generate
158 # if !defined(NVALGRIND)
163 /* XXX: Unfortunately x64 Visual C++ does not suport inline asms,
164 * so disable the use of valgrind's inline asm's for x64 Visual C++
165 * builds, so that x64 Visual C++ builds of GLib can be maintained
167 #if defined (PLAT_amd64_win64) && defined (_MSC_VER)
168 # if !defined(NVALGRIND)
174 /* ------------------------------------------------------------------ */
175 /* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
176 /* in here of use to end-users -- skip to the next section. */
177 /* ------------------------------------------------------------------ */
180 * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
181 * request. Accepts both pointers and integers as arguments.
183 * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
184 * client request that does not return a value.
186 * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
187 * client request and whose value equals the client request result. Accepts
188 * both pointers and integers as arguments. Note that such calls are not
189 * necessarily pure functions -- they may have side effects.
192 #define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
193 _zzq_request, _zzq_arg1, _zzq_arg2, \
194 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
195 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
196 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
197 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
199 #define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
200 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
201 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
202 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
203 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
205 #if defined(NVALGRIND)
207 /* Define NVALGRIND to completely remove the Valgrind magic sequence
208 from the compiled code (analogous to NDEBUG's effects on
210 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
211 _zzq_default, _zzq_request, \
212 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
215 #else /* ! NVALGRIND */
217 /* The following defines the magic code sequences which the JITter
218 spots and handles magically. Don't look too closely at them as
219 they will rot your brain.
221 The assembly code sequences for all architectures is in this one
222 file. This is because this file must be stand-alone, and we don't
223 want to have multiple files.
225 For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
226 value gets put in the return slot, so that everything works when
227 this is executed not under Valgrind. Args are passed in a memory
228 block, and so there's no intrinsic limit to the number that could
229 be passed, but it's currently five.
232 _zzq_rlval result lvalue
233 _zzq_default default value (result returned when running on real CPU)
234 _zzq_request request code
235 _zzq_arg1..5 request params
237 The other two macros are used to support function wrapping, and are
238 a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
239 guest's NRADDR pseudo-register and whatever other information is
240 needed to safely run the call original from the wrapper: on
241 ppc64-linux, the R2 value at the divert point is also needed. This
242 information is abstracted into a user-visible type, OrigFn.
244 VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
245 guest, but guarantees that the branch instruction will not be
246 redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
247 branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
248 complete inline asm, since it needs to be combined with more magic
249 inline asm stuff to be useful.
252 /* ------------------------- x86-{linux,darwin} ---------------- */
254 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
255 || (defined(PLAT_x86_win32) && defined(__GNUC__))
259 unsigned int nraddr; /* where's the code? */
263 #define __SPECIAL_INSTRUCTION_PREAMBLE \
264 "roll $3, %%edi ; roll $13, %%edi\n\t" \
265 "roll $29, %%edi ; roll $19, %%edi\n\t"
267 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
268 _zzq_default, _zzq_request, \
269 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
271 ({volatile unsigned int _zzq_args[6]; \
272 volatile unsigned int _zzq_result; \
273 _zzq_args[0] = (unsigned int)(_zzq_request); \
274 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
275 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
276 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
277 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
278 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
279 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
280 /* %EDX = client_request ( %EAX ) */ \
281 "xchgl %%ebx,%%ebx" \
282 : "=d" (_zzq_result) \
283 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
289 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
290 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
291 volatile unsigned int __addr; \
292 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
293 /* %EAX = guest_NRADDR */ \
294 "xchgl %%ecx,%%ecx" \
299 _zzq_orig->nraddr = __addr; \
302 #define VALGRIND_CALL_NOREDIR_EAX \
303 __SPECIAL_INSTRUCTION_PREAMBLE \
304 /* call-noredir *%EAX */ \
305 "xchgl %%edx,%%edx\n\t"
307 #define VALGRIND_VEX_INJECT_IR() \
309 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
310 "xchgl %%edi,%%edi\n\t" \
311 : : : "cc", "memory" \
315 #endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__) */
317 /* ------------------------- x86-Win32 ------------------------- */
319 #if defined(PLAT_x86_win32) && !defined(__GNUC__)
323 unsigned int nraddr; /* where's the code? */
327 #if defined(_MSC_VER)
329 #define __SPECIAL_INSTRUCTION_PREAMBLE \
330 __asm rol edi, 3 __asm rol edi, 13 \
331 __asm rol edi, 29 __asm rol edi, 19
333 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
334 _zzq_default, _zzq_request, \
335 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
336 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
337 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
338 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
339 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
341 static __inline uintptr_t
342 valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
343 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
344 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
347 volatile uintptr_t _zzq_args[6];
348 volatile unsigned int _zzq_result;
349 _zzq_args[0] = (uintptr_t)(_zzq_request);
350 _zzq_args[1] = (uintptr_t)(_zzq_arg1);
351 _zzq_args[2] = (uintptr_t)(_zzq_arg2);
352 _zzq_args[3] = (uintptr_t)(_zzq_arg3);
353 _zzq_args[4] = (uintptr_t)(_zzq_arg4);
354 _zzq_args[5] = (uintptr_t)(_zzq_arg5);
355 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
356 __SPECIAL_INSTRUCTION_PREAMBLE
357 /* %EDX = client_request ( %EAX ) */
359 __asm mov _zzq_result, edx
364 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
365 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
366 volatile unsigned int __addr; \
367 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
368 /* %EAX = guest_NRADDR */ \
370 __asm mov __addr, eax \
372 _zzq_orig->nraddr = __addr; \
375 #define VALGRIND_CALL_NOREDIR_EAX ERROR
377 #define VALGRIND_VEX_INJECT_IR() \
379 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
385 #error Unsupported compiler.
388 #endif /* PLAT_x86_win32 */
390 /* ------------------------ amd64-{linux,darwin} --------------- */
392 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
393 || (defined(PLAT_amd64_win64) && defined(__GNUC__))
397 unsigned long long int nraddr; /* where's the code? */
401 #define __SPECIAL_INSTRUCTION_PREAMBLE \
402 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
403 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
405 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
406 _zzq_default, _zzq_request, \
407 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
409 ({ volatile unsigned long long int _zzq_args[6]; \
410 volatile unsigned long long int _zzq_result; \
411 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
412 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
413 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
414 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
415 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
416 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
417 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
418 /* %RDX = client_request ( %RAX ) */ \
419 "xchgq %%rbx,%%rbx" \
420 : "=d" (_zzq_result) \
421 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
427 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
428 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
429 volatile unsigned long long int __addr; \
430 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
431 /* %RAX = guest_NRADDR */ \
432 "xchgq %%rcx,%%rcx" \
437 _zzq_orig->nraddr = __addr; \
440 #define VALGRIND_CALL_NOREDIR_RAX \
441 __SPECIAL_INSTRUCTION_PREAMBLE \
442 /* call-noredir *%RAX */ \
443 "xchgq %%rdx,%%rdx\n\t"
445 #define VALGRIND_VEX_INJECT_IR() \
447 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
448 "xchgq %%rdi,%%rdi\n\t" \
449 : : : "cc", "memory" \
453 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
455 /* ------------------------- amd64-Win64 ------------------------- */
457 #if defined(PLAT_amd64_win64) && !defined(__GNUC__)
459 #error Unsupported compiler.
461 #endif /* PLAT_amd64_win64 */
463 /* ------------------------ ppc32-linux ------------------------ */
465 #if defined(PLAT_ppc32_linux)
469 unsigned int nraddr; /* where's the code? */
473 #define __SPECIAL_INSTRUCTION_PREAMBLE \
474 "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
475 "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
477 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
478 _zzq_default, _zzq_request, \
479 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
482 ({ unsigned int _zzq_args[6]; \
483 unsigned int _zzq_result; \
484 unsigned int* _zzq_ptr; \
485 _zzq_args[0] = (unsigned int)(_zzq_request); \
486 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
487 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
488 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
489 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
490 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
491 _zzq_ptr = _zzq_args; \
492 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
493 "mr 4,%2\n\t" /*ptr*/ \
494 __SPECIAL_INSTRUCTION_PREAMBLE \
495 /* %R3 = client_request ( %R4 ) */ \
497 "mr %0,3" /*result*/ \
498 : "=b" (_zzq_result) \
499 : "b" (_zzq_default), "b" (_zzq_ptr) \
500 : "cc", "memory", "r3", "r4"); \
504 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
505 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
506 unsigned int __addr; \
507 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
508 /* %R3 = guest_NRADDR */ \
513 : "cc", "memory", "r3" \
515 _zzq_orig->nraddr = __addr; \
518 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
519 __SPECIAL_INSTRUCTION_PREAMBLE \
520 /* branch-and-link-to-noredir *%R11 */ \
523 #define VALGRIND_VEX_INJECT_IR() \
525 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
530 #endif /* PLAT_ppc32_linux */
532 /* ------------------------ ppc64-linux ------------------------ */
534 #if defined(PLAT_ppc64_linux)
538 unsigned long long int nraddr; /* where's the code? */
539 unsigned long long int r2; /* what tocptr do we need? */
543 #define __SPECIAL_INSTRUCTION_PREAMBLE \
544 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
545 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
547 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
548 _zzq_default, _zzq_request, \
549 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
552 ({ unsigned long long int _zzq_args[6]; \
553 unsigned long long int _zzq_result; \
554 unsigned long long int* _zzq_ptr; \
555 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
556 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
557 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
558 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
559 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
560 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
561 _zzq_ptr = _zzq_args; \
562 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
563 "mr 4,%2\n\t" /*ptr*/ \
564 __SPECIAL_INSTRUCTION_PREAMBLE \
565 /* %R3 = client_request ( %R4 ) */ \
567 "mr %0,3" /*result*/ \
568 : "=b" (_zzq_result) \
569 : "b" (_zzq_default), "b" (_zzq_ptr) \
570 : "cc", "memory", "r3", "r4"); \
574 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
575 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
576 unsigned long long int __addr; \
577 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
578 /* %R3 = guest_NRADDR */ \
583 : "cc", "memory", "r3" \
585 _zzq_orig->nraddr = __addr; \
586 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
587 /* %R3 = guest_NRADDR_GPR2 */ \
592 : "cc", "memory", "r3" \
594 _zzq_orig->r2 = __addr; \
597 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
598 __SPECIAL_INSTRUCTION_PREAMBLE \
599 /* branch-and-link-to-noredir *%R11 */ \
602 #define VALGRIND_VEX_INJECT_IR() \
604 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
609 #endif /* PLAT_ppc64_linux */
611 /* ------------------------- arm-linux ------------------------- */
613 #if defined(PLAT_arm_linux)
617 unsigned int nraddr; /* where's the code? */
621 #define __SPECIAL_INSTRUCTION_PREAMBLE \
622 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
623 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
625 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
626 _zzq_default, _zzq_request, \
627 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
630 ({volatile unsigned int _zzq_args[6]; \
631 volatile unsigned int _zzq_result; \
632 _zzq_args[0] = (unsigned int)(_zzq_request); \
633 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
634 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
635 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
636 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
637 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
638 __asm__ volatile("mov r3, %1\n\t" /*default*/ \
639 "mov r4, %2\n\t" /*ptr*/ \
640 __SPECIAL_INSTRUCTION_PREAMBLE \
641 /* R3 = client_request ( R4 ) */ \
642 "orr r10, r10, r10\n\t" \
643 "mov %0, r3" /*result*/ \
644 : "=r" (_zzq_result) \
645 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
646 : "cc","memory", "r3", "r4"); \
650 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
651 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
652 unsigned int __addr; \
653 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
654 /* R3 = guest_NRADDR */ \
655 "orr r11, r11, r11\n\t" \
659 : "cc", "memory", "r3" \
661 _zzq_orig->nraddr = __addr; \
664 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
665 __SPECIAL_INSTRUCTION_PREAMBLE \
666 /* branch-and-link-to-noredir *%R4 */ \
667 "orr r12, r12, r12\n\t"
669 #define VALGRIND_VEX_INJECT_IR() \
671 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
672 "orr r9, r9, r9\n\t" \
673 : : : "cc", "memory" \
677 #endif /* PLAT_arm_linux */
679 /* ------------------------ arm64-linux ------------------------- */
681 #if defined(PLAT_arm64_linux)
685 unsigned long long int nraddr; /* where's the code? */
689 #define __SPECIAL_INSTRUCTION_PREAMBLE \
690 "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
691 "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
693 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
694 _zzq_default, _zzq_request, \
695 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
698 ({volatile unsigned long long int _zzq_args[6]; \
699 volatile unsigned long long int _zzq_result; \
700 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
701 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
702 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
703 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
704 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
705 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
706 __asm__ volatile("mov x3, %1\n\t" /*default*/ \
707 "mov x4, %2\n\t" /*ptr*/ \
708 __SPECIAL_INSTRUCTION_PREAMBLE \
709 /* X3 = client_request ( X4 ) */ \
710 "orr x10, x10, x10\n\t" \
711 "mov %0, x3" /*result*/ \
712 : "=r" (_zzq_result) \
713 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
714 : "cc","memory", "x3", "x4"); \
718 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
719 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
720 unsigned long long int __addr; \
721 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
722 /* X3 = guest_NRADDR */ \
723 "orr x11, x11, x11\n\t" \
727 : "cc", "memory", "x3" \
729 _zzq_orig->nraddr = __addr; \
732 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
733 __SPECIAL_INSTRUCTION_PREAMBLE \
734 /* branch-and-link-to-noredir X8 */ \
735 "orr x12, x12, x12\n\t"
737 #define VALGRIND_VEX_INJECT_IR() \
739 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
740 "orr x9, x9, x9\n\t" \
741 : : : "cc", "memory" \
745 #endif /* PLAT_arm64_linux */
747 /* ------------------------ s390x-linux ------------------------ */
749 #if defined(PLAT_s390x_linux)
753 unsigned long long int nraddr; /* where's the code? */
757 /* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
758 * code. This detection is implemented in platform specific toIR.c
759 * (e.g. VEX/priv/guest_s390_decoder.c).
761 #define __SPECIAL_INSTRUCTION_PREAMBLE \
767 #define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
768 #define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
769 #define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
770 #define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
772 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
773 _zzq_default, _zzq_request, \
774 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
776 ({volatile unsigned long long int _zzq_args[6]; \
777 volatile unsigned long long int _zzq_result; \
778 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
779 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
780 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
781 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
782 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
783 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
784 __asm__ volatile(/* r2 = args */ \
788 __SPECIAL_INSTRUCTION_PREAMBLE \
789 __CLIENT_REQUEST_CODE \
792 : "=d" (_zzq_result) \
793 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
794 : "cc", "2", "3", "memory" \
799 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
800 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
801 volatile unsigned long long int __addr; \
802 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
803 __GET_NR_CONTEXT_CODE \
807 : "cc", "3", "memory" \
809 _zzq_orig->nraddr = __addr; \
812 #define VALGRIND_CALL_NOREDIR_R1 \
813 __SPECIAL_INSTRUCTION_PREAMBLE \
816 #define VALGRIND_VEX_INJECT_IR() \
818 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
819 __VEX_INJECT_IR_CODE); \
822 #endif /* PLAT_s390x_linux */
824 /* ------------------------- mips32-linux ---------------- */
826 #if defined(PLAT_mips32_linux)
830 unsigned int nraddr; /* where's the code? */
838 #define __SPECIAL_INSTRUCTION_PREAMBLE \
839 "srl $0, $0, 13\n\t" \
840 "srl $0, $0, 29\n\t" \
841 "srl $0, $0, 3\n\t" \
844 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
845 _zzq_default, _zzq_request, \
846 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
848 ({ volatile unsigned int _zzq_args[6]; \
849 volatile unsigned int _zzq_result; \
850 _zzq_args[0] = (unsigned int)(_zzq_request); \
851 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
852 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
853 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
854 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
855 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
856 __asm__ volatile("move $11, %1\n\t" /*default*/ \
857 "move $12, %2\n\t" /*ptr*/ \
858 __SPECIAL_INSTRUCTION_PREAMBLE \
859 /* T3 = client_request ( T4 ) */ \
860 "or $13, $13, $13\n\t" \
861 "move %0, $11\n\t" /*result*/ \
862 : "=r" (_zzq_result) \
863 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
868 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
869 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
870 volatile unsigned int __addr; \
871 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
872 /* %t9 = guest_NRADDR */ \
873 "or $14, $14, $14\n\t" \
874 "move %0, $11" /*result*/ \
879 _zzq_orig->nraddr = __addr; \
882 #define VALGRIND_CALL_NOREDIR_T9 \
883 __SPECIAL_INSTRUCTION_PREAMBLE \
884 /* call-noredir *%t9 */ \
885 "or $15, $15, $15\n\t"
887 #define VALGRIND_VEX_INJECT_IR() \
889 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
890 "or $11, $11, $11\n\t" \
895 #endif /* PLAT_mips32_linux */
897 /* ------------------------- mips64-linux ---------------- */
899 #if defined(PLAT_mips64_linux)
903 unsigned long long nraddr; /* where's the code? */
911 #define __SPECIAL_INSTRUCTION_PREAMBLE \
912 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
913 "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
915 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
916 _zzq_default, _zzq_request, \
917 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
919 ({ volatile unsigned long long int _zzq_args[6]; \
920 volatile unsigned long long int _zzq_result; \
921 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
922 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
923 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
924 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
925 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
926 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
927 __asm__ volatile("move $11, %1\n\t" /*default*/ \
928 "move $12, %2\n\t" /*ptr*/ \
929 __SPECIAL_INSTRUCTION_PREAMBLE \
930 /* $11 = client_request ( $12 ) */ \
931 "or $13, $13, $13\n\t" \
932 "move %0, $11\n\t" /*result*/ \
933 : "=r" (_zzq_result) \
934 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
939 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
940 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
941 volatile unsigned long long int __addr; \
942 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
943 /* $11 = guest_NRADDR */ \
944 "or $14, $14, $14\n\t" \
945 "move %0, $11" /*result*/ \
949 _zzq_orig->nraddr = __addr; \
952 #define VALGRIND_CALL_NOREDIR_T9 \
953 __SPECIAL_INSTRUCTION_PREAMBLE \
954 /* call-noredir $25 */ \
955 "or $15, $15, $15\n\t"
957 #define VALGRIND_VEX_INJECT_IR() \
959 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
960 "or $11, $11, $11\n\t" \
964 #endif /* PLAT_mips64_linux */
966 /* Insert assembly code for other platforms here... */
968 #endif /* NVALGRIND */
971 /* ------------------------------------------------------------------ */
972 /* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
973 /* ugly. It's the least-worst tradeoff I can think of. */
974 /* ------------------------------------------------------------------ */
976 /* This section defines magic (a.k.a appalling-hack) macros for doing
977 guaranteed-no-redirection macros, so as to get from function
978 wrappers to the functions they are wrapping. The whole point is to
979 construct standard call sequences, but to do the call itself with a
980 special no-redirect call pseudo-instruction that the JIT
981 understands and handles specially. This section is long and
982 repetitious, and I can't see a way to make it shorter.
984 The naming scheme is as follows:
986 CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
988 'W' stands for "word" and 'v' for "void". Hence there are
989 different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
990 and for each, the possibility of returning a word-typed result, or
994 /* Use these to write the name of your wrapper. NOTE: duplicates
995 VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
996 the default behaviour equivalance class tag "0000" into the name.
997 See pub_tool_redir.h for details -- normally you don't need to
998 think about this, though. */
1000 /* Use an extra level of macroisation so as to ensure the soname/fnname
1001 args are fully macro-expanded before pasting them together. */
1002 #define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
1004 #define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
1005 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
1007 #define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
1008 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
1010 /* Use this macro from within a wrapper function to collect the
1011 context (address and possibly other info) of the original function.
1012 Once you have that you can then use it in one of the CALL_FN_
1013 macros. The type of the argument _lval is OrigFn. */
1014 #define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
1016 /* Also provide end-user facilities for function replacement, rather
1017 than wrapping. A replacement function differs from a wrapper in
1018 that it has no way to get hold of the original function being
1019 called, and hence no way to call onwards to it. In a replacement
1020 function, VALGRIND_GET_ORIG_FN always returns zero. */
1022 #define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1023 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1025 #define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1026 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1028 /* Derivatives of the main macros below, for calling functions
1031 #define CALL_FN_v_v(fnptr) \
1032 do { volatile unsigned long _junk; \
1033 CALL_FN_W_v(_junk,fnptr); } while (0)
1035 #define CALL_FN_v_W(fnptr, arg1) \
1036 do { volatile unsigned long _junk; \
1037 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1039 #define CALL_FN_v_WW(fnptr, arg1,arg2) \
1040 do { volatile unsigned long _junk; \
1041 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1043 #define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1044 do { volatile unsigned long _junk; \
1045 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1047 #define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1048 do { volatile unsigned long _junk; \
1049 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1051 #define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1052 do { volatile unsigned long _junk; \
1053 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1055 #define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1056 do { volatile unsigned long _junk; \
1057 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1059 #define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1060 do { volatile unsigned long _junk; \
1061 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1063 /* ------------------------- x86-{linux,darwin} ---------------- */
1065 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin)
1067 /* These regs are trashed by the hidden call. No need to mention eax
1068 as gcc can already see that, plus causes gcc to bomb. */
1069 #define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
1071 /* Macros to save and align the stack before making a function
1072 call and restore it afterwards as gcc may not keep the stack
1073 pointer aligned if it doesn't realise calls are being made
1074 to other functions. */
1076 #define VALGRIND_ALIGN_STACK \
1077 "movl %%esp,%%edi\n\t" \
1078 "andl $0xfffffff0,%%esp\n\t"
1079 #define VALGRIND_RESTORE_STACK \
1080 "movl %%edi,%%esp\n\t"
1082 /* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
1085 #define CALL_FN_W_v(lval, orig) \
1087 volatile OrigFn _orig = (orig); \
1088 volatile unsigned long _argvec[1]; \
1089 volatile unsigned long _res; \
1090 _argvec[0] = (unsigned long)_orig.nraddr; \
1092 VALGRIND_ALIGN_STACK \
1093 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1094 VALGRIND_CALL_NOREDIR_EAX \
1095 VALGRIND_RESTORE_STACK \
1096 : /*out*/ "=a" (_res) \
1097 : /*in*/ "a" (&_argvec[0]) \
1098 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1100 lval = (__typeof__(lval)) _res; \
1103 #define CALL_FN_W_W(lval, orig, arg1) \
1105 volatile OrigFn _orig = (orig); \
1106 volatile unsigned long _argvec[2]; \
1107 volatile unsigned long _res; \
1108 _argvec[0] = (unsigned long)_orig.nraddr; \
1109 _argvec[1] = (unsigned long)(arg1); \
1111 VALGRIND_ALIGN_STACK \
1112 "subl $12, %%esp\n\t" \
1113 "pushl 4(%%eax)\n\t" \
1114 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1115 VALGRIND_CALL_NOREDIR_EAX \
1116 VALGRIND_RESTORE_STACK \
1117 : /*out*/ "=a" (_res) \
1118 : /*in*/ "a" (&_argvec[0]) \
1119 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1121 lval = (__typeof__(lval)) _res; \
1124 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1126 volatile OrigFn _orig = (orig); \
1127 volatile unsigned long _argvec[3]; \
1128 volatile unsigned long _res; \
1129 _argvec[0] = (unsigned long)_orig.nraddr; \
1130 _argvec[1] = (unsigned long)(arg1); \
1131 _argvec[2] = (unsigned long)(arg2); \
1133 VALGRIND_ALIGN_STACK \
1134 "subl $8, %%esp\n\t" \
1135 "pushl 8(%%eax)\n\t" \
1136 "pushl 4(%%eax)\n\t" \
1137 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1138 VALGRIND_CALL_NOREDIR_EAX \
1139 VALGRIND_RESTORE_STACK \
1140 : /*out*/ "=a" (_res) \
1141 : /*in*/ "a" (&_argvec[0]) \
1142 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1144 lval = (__typeof__(lval)) _res; \
1147 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1149 volatile OrigFn _orig = (orig); \
1150 volatile unsigned long _argvec[4]; \
1151 volatile unsigned long _res; \
1152 _argvec[0] = (unsigned long)_orig.nraddr; \
1153 _argvec[1] = (unsigned long)(arg1); \
1154 _argvec[2] = (unsigned long)(arg2); \
1155 _argvec[3] = (unsigned long)(arg3); \
1157 VALGRIND_ALIGN_STACK \
1158 "subl $4, %%esp\n\t" \
1159 "pushl 12(%%eax)\n\t" \
1160 "pushl 8(%%eax)\n\t" \
1161 "pushl 4(%%eax)\n\t" \
1162 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1163 VALGRIND_CALL_NOREDIR_EAX \
1164 VALGRIND_RESTORE_STACK \
1165 : /*out*/ "=a" (_res) \
1166 : /*in*/ "a" (&_argvec[0]) \
1167 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1169 lval = (__typeof__(lval)) _res; \
1172 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1174 volatile OrigFn _orig = (orig); \
1175 volatile unsigned long _argvec[5]; \
1176 volatile unsigned long _res; \
1177 _argvec[0] = (unsigned long)_orig.nraddr; \
1178 _argvec[1] = (unsigned long)(arg1); \
1179 _argvec[2] = (unsigned long)(arg2); \
1180 _argvec[3] = (unsigned long)(arg3); \
1181 _argvec[4] = (unsigned long)(arg4); \
1183 VALGRIND_ALIGN_STACK \
1184 "pushl 16(%%eax)\n\t" \
1185 "pushl 12(%%eax)\n\t" \
1186 "pushl 8(%%eax)\n\t" \
1187 "pushl 4(%%eax)\n\t" \
1188 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1189 VALGRIND_CALL_NOREDIR_EAX \
1190 VALGRIND_RESTORE_STACK \
1191 : /*out*/ "=a" (_res) \
1192 : /*in*/ "a" (&_argvec[0]) \
1193 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1195 lval = (__typeof__(lval)) _res; \
1198 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1200 volatile OrigFn _orig = (orig); \
1201 volatile unsigned long _argvec[6]; \
1202 volatile unsigned long _res; \
1203 _argvec[0] = (unsigned long)_orig.nraddr; \
1204 _argvec[1] = (unsigned long)(arg1); \
1205 _argvec[2] = (unsigned long)(arg2); \
1206 _argvec[3] = (unsigned long)(arg3); \
1207 _argvec[4] = (unsigned long)(arg4); \
1208 _argvec[5] = (unsigned long)(arg5); \
1210 VALGRIND_ALIGN_STACK \
1211 "subl $12, %%esp\n\t" \
1212 "pushl 20(%%eax)\n\t" \
1213 "pushl 16(%%eax)\n\t" \
1214 "pushl 12(%%eax)\n\t" \
1215 "pushl 8(%%eax)\n\t" \
1216 "pushl 4(%%eax)\n\t" \
1217 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1218 VALGRIND_CALL_NOREDIR_EAX \
1219 VALGRIND_RESTORE_STACK \
1220 : /*out*/ "=a" (_res) \
1221 : /*in*/ "a" (&_argvec[0]) \
1222 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1224 lval = (__typeof__(lval)) _res; \
1227 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1229 volatile OrigFn _orig = (orig); \
1230 volatile unsigned long _argvec[7]; \
1231 volatile unsigned long _res; \
1232 _argvec[0] = (unsigned long)_orig.nraddr; \
1233 _argvec[1] = (unsigned long)(arg1); \
1234 _argvec[2] = (unsigned long)(arg2); \
1235 _argvec[3] = (unsigned long)(arg3); \
1236 _argvec[4] = (unsigned long)(arg4); \
1237 _argvec[5] = (unsigned long)(arg5); \
1238 _argvec[6] = (unsigned long)(arg6); \
1240 VALGRIND_ALIGN_STACK \
1241 "subl $8, %%esp\n\t" \
1242 "pushl 24(%%eax)\n\t" \
1243 "pushl 20(%%eax)\n\t" \
1244 "pushl 16(%%eax)\n\t" \
1245 "pushl 12(%%eax)\n\t" \
1246 "pushl 8(%%eax)\n\t" \
1247 "pushl 4(%%eax)\n\t" \
1248 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1249 VALGRIND_CALL_NOREDIR_EAX \
1250 VALGRIND_RESTORE_STACK \
1251 : /*out*/ "=a" (_res) \
1252 : /*in*/ "a" (&_argvec[0]) \
1253 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1255 lval = (__typeof__(lval)) _res; \
1258 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1261 volatile OrigFn _orig = (orig); \
1262 volatile unsigned long _argvec[8]; \
1263 volatile unsigned long _res; \
1264 _argvec[0] = (unsigned long)_orig.nraddr; \
1265 _argvec[1] = (unsigned long)(arg1); \
1266 _argvec[2] = (unsigned long)(arg2); \
1267 _argvec[3] = (unsigned long)(arg3); \
1268 _argvec[4] = (unsigned long)(arg4); \
1269 _argvec[5] = (unsigned long)(arg5); \
1270 _argvec[6] = (unsigned long)(arg6); \
1271 _argvec[7] = (unsigned long)(arg7); \
1273 VALGRIND_ALIGN_STACK \
1274 "subl $4, %%esp\n\t" \
1275 "pushl 28(%%eax)\n\t" \
1276 "pushl 24(%%eax)\n\t" \
1277 "pushl 20(%%eax)\n\t" \
1278 "pushl 16(%%eax)\n\t" \
1279 "pushl 12(%%eax)\n\t" \
1280 "pushl 8(%%eax)\n\t" \
1281 "pushl 4(%%eax)\n\t" \
1282 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1283 VALGRIND_CALL_NOREDIR_EAX \
1284 VALGRIND_RESTORE_STACK \
1285 : /*out*/ "=a" (_res) \
1286 : /*in*/ "a" (&_argvec[0]) \
1287 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1289 lval = (__typeof__(lval)) _res; \
1292 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1295 volatile OrigFn _orig = (orig); \
1296 volatile unsigned long _argvec[9]; \
1297 volatile unsigned long _res; \
1298 _argvec[0] = (unsigned long)_orig.nraddr; \
1299 _argvec[1] = (unsigned long)(arg1); \
1300 _argvec[2] = (unsigned long)(arg2); \
1301 _argvec[3] = (unsigned long)(arg3); \
1302 _argvec[4] = (unsigned long)(arg4); \
1303 _argvec[5] = (unsigned long)(arg5); \
1304 _argvec[6] = (unsigned long)(arg6); \
1305 _argvec[7] = (unsigned long)(arg7); \
1306 _argvec[8] = (unsigned long)(arg8); \
1308 VALGRIND_ALIGN_STACK \
1309 "pushl 32(%%eax)\n\t" \
1310 "pushl 28(%%eax)\n\t" \
1311 "pushl 24(%%eax)\n\t" \
1312 "pushl 20(%%eax)\n\t" \
1313 "pushl 16(%%eax)\n\t" \
1314 "pushl 12(%%eax)\n\t" \
1315 "pushl 8(%%eax)\n\t" \
1316 "pushl 4(%%eax)\n\t" \
1317 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1318 VALGRIND_CALL_NOREDIR_EAX \
1319 VALGRIND_RESTORE_STACK \
1320 : /*out*/ "=a" (_res) \
1321 : /*in*/ "a" (&_argvec[0]) \
1322 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1324 lval = (__typeof__(lval)) _res; \
1327 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1330 volatile OrigFn _orig = (orig); \
1331 volatile unsigned long _argvec[10]; \
1332 volatile unsigned long _res; \
1333 _argvec[0] = (unsigned long)_orig.nraddr; \
1334 _argvec[1] = (unsigned long)(arg1); \
1335 _argvec[2] = (unsigned long)(arg2); \
1336 _argvec[3] = (unsigned long)(arg3); \
1337 _argvec[4] = (unsigned long)(arg4); \
1338 _argvec[5] = (unsigned long)(arg5); \
1339 _argvec[6] = (unsigned long)(arg6); \
1340 _argvec[7] = (unsigned long)(arg7); \
1341 _argvec[8] = (unsigned long)(arg8); \
1342 _argvec[9] = (unsigned long)(arg9); \
1344 VALGRIND_ALIGN_STACK \
1345 "subl $12, %%esp\n\t" \
1346 "pushl 36(%%eax)\n\t" \
1347 "pushl 32(%%eax)\n\t" \
1348 "pushl 28(%%eax)\n\t" \
1349 "pushl 24(%%eax)\n\t" \
1350 "pushl 20(%%eax)\n\t" \
1351 "pushl 16(%%eax)\n\t" \
1352 "pushl 12(%%eax)\n\t" \
1353 "pushl 8(%%eax)\n\t" \
1354 "pushl 4(%%eax)\n\t" \
1355 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1356 VALGRIND_CALL_NOREDIR_EAX \
1357 VALGRIND_RESTORE_STACK \
1358 : /*out*/ "=a" (_res) \
1359 : /*in*/ "a" (&_argvec[0]) \
1360 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1362 lval = (__typeof__(lval)) _res; \
1365 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1366 arg7,arg8,arg9,arg10) \
1368 volatile OrigFn _orig = (orig); \
1369 volatile unsigned long _argvec[11]; \
1370 volatile unsigned long _res; \
1371 _argvec[0] = (unsigned long)_orig.nraddr; \
1372 _argvec[1] = (unsigned long)(arg1); \
1373 _argvec[2] = (unsigned long)(arg2); \
1374 _argvec[3] = (unsigned long)(arg3); \
1375 _argvec[4] = (unsigned long)(arg4); \
1376 _argvec[5] = (unsigned long)(arg5); \
1377 _argvec[6] = (unsigned long)(arg6); \
1378 _argvec[7] = (unsigned long)(arg7); \
1379 _argvec[8] = (unsigned long)(arg8); \
1380 _argvec[9] = (unsigned long)(arg9); \
1381 _argvec[10] = (unsigned long)(arg10); \
1383 VALGRIND_ALIGN_STACK \
1384 "subl $8, %%esp\n\t" \
1385 "pushl 40(%%eax)\n\t" \
1386 "pushl 36(%%eax)\n\t" \
1387 "pushl 32(%%eax)\n\t" \
1388 "pushl 28(%%eax)\n\t" \
1389 "pushl 24(%%eax)\n\t" \
1390 "pushl 20(%%eax)\n\t" \
1391 "pushl 16(%%eax)\n\t" \
1392 "pushl 12(%%eax)\n\t" \
1393 "pushl 8(%%eax)\n\t" \
1394 "pushl 4(%%eax)\n\t" \
1395 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1396 VALGRIND_CALL_NOREDIR_EAX \
1397 VALGRIND_RESTORE_STACK \
1398 : /*out*/ "=a" (_res) \
1399 : /*in*/ "a" (&_argvec[0]) \
1400 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1402 lval = (__typeof__(lval)) _res; \
1405 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1406 arg6,arg7,arg8,arg9,arg10, \
1409 volatile OrigFn _orig = (orig); \
1410 volatile unsigned long _argvec[12]; \
1411 volatile unsigned long _res; \
1412 _argvec[0] = (unsigned long)_orig.nraddr; \
1413 _argvec[1] = (unsigned long)(arg1); \
1414 _argvec[2] = (unsigned long)(arg2); \
1415 _argvec[3] = (unsigned long)(arg3); \
1416 _argvec[4] = (unsigned long)(arg4); \
1417 _argvec[5] = (unsigned long)(arg5); \
1418 _argvec[6] = (unsigned long)(arg6); \
1419 _argvec[7] = (unsigned long)(arg7); \
1420 _argvec[8] = (unsigned long)(arg8); \
1421 _argvec[9] = (unsigned long)(arg9); \
1422 _argvec[10] = (unsigned long)(arg10); \
1423 _argvec[11] = (unsigned long)(arg11); \
1425 VALGRIND_ALIGN_STACK \
1426 "subl $4, %%esp\n\t" \
1427 "pushl 44(%%eax)\n\t" \
1428 "pushl 40(%%eax)\n\t" \
1429 "pushl 36(%%eax)\n\t" \
1430 "pushl 32(%%eax)\n\t" \
1431 "pushl 28(%%eax)\n\t" \
1432 "pushl 24(%%eax)\n\t" \
1433 "pushl 20(%%eax)\n\t" \
1434 "pushl 16(%%eax)\n\t" \
1435 "pushl 12(%%eax)\n\t" \
1436 "pushl 8(%%eax)\n\t" \
1437 "pushl 4(%%eax)\n\t" \
1438 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1439 VALGRIND_CALL_NOREDIR_EAX \
1440 VALGRIND_RESTORE_STACK \
1441 : /*out*/ "=a" (_res) \
1442 : /*in*/ "a" (&_argvec[0]) \
1443 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1445 lval = (__typeof__(lval)) _res; \
1448 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1449 arg6,arg7,arg8,arg9,arg10, \
1452 volatile OrigFn _orig = (orig); \
1453 volatile unsigned long _argvec[13]; \
1454 volatile unsigned long _res; \
1455 _argvec[0] = (unsigned long)_orig.nraddr; \
1456 _argvec[1] = (unsigned long)(arg1); \
1457 _argvec[2] = (unsigned long)(arg2); \
1458 _argvec[3] = (unsigned long)(arg3); \
1459 _argvec[4] = (unsigned long)(arg4); \
1460 _argvec[5] = (unsigned long)(arg5); \
1461 _argvec[6] = (unsigned long)(arg6); \
1462 _argvec[7] = (unsigned long)(arg7); \
1463 _argvec[8] = (unsigned long)(arg8); \
1464 _argvec[9] = (unsigned long)(arg9); \
1465 _argvec[10] = (unsigned long)(arg10); \
1466 _argvec[11] = (unsigned long)(arg11); \
1467 _argvec[12] = (unsigned long)(arg12); \
1469 VALGRIND_ALIGN_STACK \
1470 "pushl 48(%%eax)\n\t" \
1471 "pushl 44(%%eax)\n\t" \
1472 "pushl 40(%%eax)\n\t" \
1473 "pushl 36(%%eax)\n\t" \
1474 "pushl 32(%%eax)\n\t" \
1475 "pushl 28(%%eax)\n\t" \
1476 "pushl 24(%%eax)\n\t" \
1477 "pushl 20(%%eax)\n\t" \
1478 "pushl 16(%%eax)\n\t" \
1479 "pushl 12(%%eax)\n\t" \
1480 "pushl 8(%%eax)\n\t" \
1481 "pushl 4(%%eax)\n\t" \
1482 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1483 VALGRIND_CALL_NOREDIR_EAX \
1484 VALGRIND_RESTORE_STACK \
1485 : /*out*/ "=a" (_res) \
1486 : /*in*/ "a" (&_argvec[0]) \
1487 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1489 lval = (__typeof__(lval)) _res; \
1492 #endif /* PLAT_x86_linux || PLAT_x86_darwin */
1494 /* ------------------------ amd64-{linux,darwin} --------------- */
1496 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin)
1498 /* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1500 /* These regs are trashed by the hidden call. */
1501 #define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1502 "rdi", "r8", "r9", "r10", "r11"
1504 /* This is all pretty complex. It's so as to make stack unwinding
1505 work reliably. See bug 243270. The basic problem is the sub and
1506 add of 128 of %rsp in all of the following macros. If gcc believes
1507 the CFA is in %rsp, then unwinding may fail, because what's at the
1508 CFA is not what gcc "expected" when it constructs the CFIs for the
1509 places where the macros are instantiated.
1511 But we can't just add a CFI annotation to increase the CFA offset
1512 by 128, to match the sub of 128 from %rsp, because we don't know
1513 whether gcc has chosen %rsp as the CFA at that point, or whether it
1514 has chosen some other register (eg, %rbp). In the latter case,
1515 adding a CFI annotation to change the CFA offset is simply wrong.
1517 So the solution is to get hold of the CFA using
1518 __builtin_dwarf_cfa(), put it in a known register, and add a
1519 CFI annotation to say what the register is. We choose %rbp for
1520 this (perhaps perversely), because:
1522 (1) %rbp is already subject to unwinding. If a new register was
1523 chosen then the unwinder would have to unwind it in all stack
1524 traces, which is expensive, and
1526 (2) %rbp is already subject to precise exception updates in the
1527 JIT. If a new register was chosen, we'd have to have precise
1528 exceptions for it too, which reduces performance of the
1531 However .. one extra complication. We can't just whack the result
1532 of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1533 list of trashed registers at the end of the inline assembly
1534 fragments; gcc won't allow %rbp to appear in that list. Hence
1535 instead we need to stash %rbp in %r15 for the duration of the asm,
1536 and say that %r15 is trashed instead. gcc seems happy to go with
1539 Oh .. and this all needs to be conditionalised so that it is
1540 unchanged from before this commit, when compiled with older gccs
1541 that don't support __builtin_dwarf_cfa. Furthermore, since
1542 this header file is freestanding, it has to be independent of
1543 config.h, and so the following conditionalisation cannot depend on
1544 configure time checks.
1546 Although it's not clear from
1547 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1548 this expression excludes Darwin.
1549 .cfi directives in Darwin assembly appear to be completely
1550 different and I haven't investigated how they work.
1552 For even more entertainment value, note we have to use the
1553 completely undocumented __builtin_dwarf_cfa(), which appears to
1554 really compute the CFA, whereas __builtin_frame_address(0) claims
1555 to but actually doesn't. See
1556 https://bugs.kde.org/show_bug.cgi?id=243270#c47
1558 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1559 # define __FRAME_POINTER \
1560 ,"r"(__builtin_dwarf_cfa())
1561 # define VALGRIND_CFI_PROLOGUE \
1562 "movq %%rbp, %%r15\n\t" \
1563 "movq %2, %%rbp\n\t" \
1564 ".cfi_remember_state\n\t" \
1565 ".cfi_def_cfa rbp, 0\n\t"
1566 # define VALGRIND_CFI_EPILOGUE \
1567 "movq %%r15, %%rbp\n\t" \
1568 ".cfi_restore_state\n\t"
1570 # define __FRAME_POINTER
1571 # define VALGRIND_CFI_PROLOGUE
1572 # define VALGRIND_CFI_EPILOGUE
1575 /* Macros to save and align the stack before making a function
1576 call and restore it afterwards as gcc may not keep the stack
1577 pointer aligned if it doesn't realise calls are being made
1578 to other functions. */
1580 #define VALGRIND_ALIGN_STACK \
1581 "movq %%rsp,%%r14\n\t" \
1582 "andq $0xfffffffffffffff0,%%rsp\n\t"
1583 #define VALGRIND_RESTORE_STACK \
1584 "movq %%r14,%%rsp\n\t"
1586 /* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1589 /* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1590 macros. In order not to trash the stack redzone, we need to drop
1591 %rsp by 128 before the hidden call, and restore afterwards. The
1592 nastyness is that it is only by luck that the stack still appears
1593 to be unwindable during the hidden call - since then the behaviour
1594 of any routine using this macro does not match what the CFI data
1597 Why is this important? Imagine that a wrapper has a stack
1598 allocated local, and passes to the hidden call, a pointer to it.
1599 Because gcc does not know about the hidden call, it may allocate
1600 that local in the redzone. Unfortunately the hidden call may then
1601 trash it before it comes to use it. So we must step clear of the
1602 redzone, for the duration of the hidden call, to make it safe.
1604 Probably the same problem afflicts the other redzone-style ABIs too
1605 (ppc64-linux); but for those, the stack is
1606 self describing (none of this CFI nonsense) so at least messing
1607 with the stack pointer doesn't give a danger of non-unwindable
1610 #define CALL_FN_W_v(lval, orig) \
1612 volatile OrigFn _orig = (orig); \
1613 volatile unsigned long _argvec[1]; \
1614 volatile unsigned long _res; \
1615 _argvec[0] = (unsigned long)_orig.nraddr; \
1617 VALGRIND_CFI_PROLOGUE \
1618 VALGRIND_ALIGN_STACK \
1619 "subq $128,%%rsp\n\t" \
1620 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1621 VALGRIND_CALL_NOREDIR_RAX \
1622 VALGRIND_RESTORE_STACK \
1623 VALGRIND_CFI_EPILOGUE \
1624 : /*out*/ "=a" (_res) \
1625 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1626 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1628 lval = (__typeof__(lval)) _res; \
1631 #define CALL_FN_W_W(lval, orig, arg1) \
1633 volatile OrigFn _orig = (orig); \
1634 volatile unsigned long _argvec[2]; \
1635 volatile unsigned long _res; \
1636 _argvec[0] = (unsigned long)_orig.nraddr; \
1637 _argvec[1] = (unsigned long)(arg1); \
1639 VALGRIND_CFI_PROLOGUE \
1640 VALGRIND_ALIGN_STACK \
1641 "subq $128,%%rsp\n\t" \
1642 "movq 8(%%rax), %%rdi\n\t" \
1643 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1644 VALGRIND_CALL_NOREDIR_RAX \
1645 VALGRIND_RESTORE_STACK \
1646 VALGRIND_CFI_EPILOGUE \
1647 : /*out*/ "=a" (_res) \
1648 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1649 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1651 lval = (__typeof__(lval)) _res; \
1654 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1656 volatile OrigFn _orig = (orig); \
1657 volatile unsigned long _argvec[3]; \
1658 volatile unsigned long _res; \
1659 _argvec[0] = (unsigned long)_orig.nraddr; \
1660 _argvec[1] = (unsigned long)(arg1); \
1661 _argvec[2] = (unsigned long)(arg2); \
1663 VALGRIND_CFI_PROLOGUE \
1664 VALGRIND_ALIGN_STACK \
1665 "subq $128,%%rsp\n\t" \
1666 "movq 16(%%rax), %%rsi\n\t" \
1667 "movq 8(%%rax), %%rdi\n\t" \
1668 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1669 VALGRIND_CALL_NOREDIR_RAX \
1670 VALGRIND_RESTORE_STACK \
1671 VALGRIND_CFI_EPILOGUE \
1672 : /*out*/ "=a" (_res) \
1673 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1674 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1676 lval = (__typeof__(lval)) _res; \
1679 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1681 volatile OrigFn _orig = (orig); \
1682 volatile unsigned long _argvec[4]; \
1683 volatile unsigned long _res; \
1684 _argvec[0] = (unsigned long)_orig.nraddr; \
1685 _argvec[1] = (unsigned long)(arg1); \
1686 _argvec[2] = (unsigned long)(arg2); \
1687 _argvec[3] = (unsigned long)(arg3); \
1689 VALGRIND_CFI_PROLOGUE \
1690 VALGRIND_ALIGN_STACK \
1691 "subq $128,%%rsp\n\t" \
1692 "movq 24(%%rax), %%rdx\n\t" \
1693 "movq 16(%%rax), %%rsi\n\t" \
1694 "movq 8(%%rax), %%rdi\n\t" \
1695 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1696 VALGRIND_CALL_NOREDIR_RAX \
1697 VALGRIND_RESTORE_STACK \
1698 VALGRIND_CFI_EPILOGUE \
1699 : /*out*/ "=a" (_res) \
1700 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1701 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1703 lval = (__typeof__(lval)) _res; \
1706 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1708 volatile OrigFn _orig = (orig); \
1709 volatile unsigned long _argvec[5]; \
1710 volatile unsigned long _res; \
1711 _argvec[0] = (unsigned long)_orig.nraddr; \
1712 _argvec[1] = (unsigned long)(arg1); \
1713 _argvec[2] = (unsigned long)(arg2); \
1714 _argvec[3] = (unsigned long)(arg3); \
1715 _argvec[4] = (unsigned long)(arg4); \
1717 VALGRIND_CFI_PROLOGUE \
1718 VALGRIND_ALIGN_STACK \
1719 "subq $128,%%rsp\n\t" \
1720 "movq 32(%%rax), %%rcx\n\t" \
1721 "movq 24(%%rax), %%rdx\n\t" \
1722 "movq 16(%%rax), %%rsi\n\t" \
1723 "movq 8(%%rax), %%rdi\n\t" \
1724 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1725 VALGRIND_CALL_NOREDIR_RAX \
1726 VALGRIND_RESTORE_STACK \
1727 VALGRIND_CFI_EPILOGUE \
1728 : /*out*/ "=a" (_res) \
1729 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1730 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1732 lval = (__typeof__(lval)) _res; \
1735 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1737 volatile OrigFn _orig = (orig); \
1738 volatile unsigned long _argvec[6]; \
1739 volatile unsigned long _res; \
1740 _argvec[0] = (unsigned long)_orig.nraddr; \
1741 _argvec[1] = (unsigned long)(arg1); \
1742 _argvec[2] = (unsigned long)(arg2); \
1743 _argvec[3] = (unsigned long)(arg3); \
1744 _argvec[4] = (unsigned long)(arg4); \
1745 _argvec[5] = (unsigned long)(arg5); \
1747 VALGRIND_CFI_PROLOGUE \
1748 VALGRIND_ALIGN_STACK \
1749 "subq $128,%%rsp\n\t" \
1750 "movq 40(%%rax), %%r8\n\t" \
1751 "movq 32(%%rax), %%rcx\n\t" \
1752 "movq 24(%%rax), %%rdx\n\t" \
1753 "movq 16(%%rax), %%rsi\n\t" \
1754 "movq 8(%%rax), %%rdi\n\t" \
1755 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1756 VALGRIND_CALL_NOREDIR_RAX \
1757 VALGRIND_RESTORE_STACK \
1758 VALGRIND_CFI_EPILOGUE \
1759 : /*out*/ "=a" (_res) \
1760 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1761 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1763 lval = (__typeof__(lval)) _res; \
1766 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1768 volatile OrigFn _orig = (orig); \
1769 volatile unsigned long _argvec[7]; \
1770 volatile unsigned long _res; \
1771 _argvec[0] = (unsigned long)_orig.nraddr; \
1772 _argvec[1] = (unsigned long)(arg1); \
1773 _argvec[2] = (unsigned long)(arg2); \
1774 _argvec[3] = (unsigned long)(arg3); \
1775 _argvec[4] = (unsigned long)(arg4); \
1776 _argvec[5] = (unsigned long)(arg5); \
1777 _argvec[6] = (unsigned long)(arg6); \
1779 VALGRIND_CFI_PROLOGUE \
1780 VALGRIND_ALIGN_STACK \
1781 "subq $128,%%rsp\n\t" \
1782 "movq 48(%%rax), %%r9\n\t" \
1783 "movq 40(%%rax), %%r8\n\t" \
1784 "movq 32(%%rax), %%rcx\n\t" \
1785 "movq 24(%%rax), %%rdx\n\t" \
1786 "movq 16(%%rax), %%rsi\n\t" \
1787 "movq 8(%%rax), %%rdi\n\t" \
1788 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1789 VALGRIND_CALL_NOREDIR_RAX \
1790 VALGRIND_RESTORE_STACK \
1791 VALGRIND_CFI_EPILOGUE \
1792 : /*out*/ "=a" (_res) \
1793 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1794 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1796 lval = (__typeof__(lval)) _res; \
1799 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1802 volatile OrigFn _orig = (orig); \
1803 volatile unsigned long _argvec[8]; \
1804 volatile unsigned long _res; \
1805 _argvec[0] = (unsigned long)_orig.nraddr; \
1806 _argvec[1] = (unsigned long)(arg1); \
1807 _argvec[2] = (unsigned long)(arg2); \
1808 _argvec[3] = (unsigned long)(arg3); \
1809 _argvec[4] = (unsigned long)(arg4); \
1810 _argvec[5] = (unsigned long)(arg5); \
1811 _argvec[6] = (unsigned long)(arg6); \
1812 _argvec[7] = (unsigned long)(arg7); \
1814 VALGRIND_CFI_PROLOGUE \
1815 VALGRIND_ALIGN_STACK \
1816 "subq $136,%%rsp\n\t" \
1817 "pushq 56(%%rax)\n\t" \
1818 "movq 48(%%rax), %%r9\n\t" \
1819 "movq 40(%%rax), %%r8\n\t" \
1820 "movq 32(%%rax), %%rcx\n\t" \
1821 "movq 24(%%rax), %%rdx\n\t" \
1822 "movq 16(%%rax), %%rsi\n\t" \
1823 "movq 8(%%rax), %%rdi\n\t" \
1824 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1825 VALGRIND_CALL_NOREDIR_RAX \
1826 VALGRIND_RESTORE_STACK \
1827 VALGRIND_CFI_EPILOGUE \
1828 : /*out*/ "=a" (_res) \
1829 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1830 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1832 lval = (__typeof__(lval)) _res; \
1835 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1838 volatile OrigFn _orig = (orig); \
1839 volatile unsigned long _argvec[9]; \
1840 volatile unsigned long _res; \
1841 _argvec[0] = (unsigned long)_orig.nraddr; \
1842 _argvec[1] = (unsigned long)(arg1); \
1843 _argvec[2] = (unsigned long)(arg2); \
1844 _argvec[3] = (unsigned long)(arg3); \
1845 _argvec[4] = (unsigned long)(arg4); \
1846 _argvec[5] = (unsigned long)(arg5); \
1847 _argvec[6] = (unsigned long)(arg6); \
1848 _argvec[7] = (unsigned long)(arg7); \
1849 _argvec[8] = (unsigned long)(arg8); \
1851 VALGRIND_CFI_PROLOGUE \
1852 VALGRIND_ALIGN_STACK \
1853 "subq $128,%%rsp\n\t" \
1854 "pushq 64(%%rax)\n\t" \
1855 "pushq 56(%%rax)\n\t" \
1856 "movq 48(%%rax), %%r9\n\t" \
1857 "movq 40(%%rax), %%r8\n\t" \
1858 "movq 32(%%rax), %%rcx\n\t" \
1859 "movq 24(%%rax), %%rdx\n\t" \
1860 "movq 16(%%rax), %%rsi\n\t" \
1861 "movq 8(%%rax), %%rdi\n\t" \
1862 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1863 VALGRIND_CALL_NOREDIR_RAX \
1864 VALGRIND_RESTORE_STACK \
1865 VALGRIND_CFI_EPILOGUE \
1866 : /*out*/ "=a" (_res) \
1867 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1868 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1870 lval = (__typeof__(lval)) _res; \
1873 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1876 volatile OrigFn _orig = (orig); \
1877 volatile unsigned long _argvec[10]; \
1878 volatile unsigned long _res; \
1879 _argvec[0] = (unsigned long)_orig.nraddr; \
1880 _argvec[1] = (unsigned long)(arg1); \
1881 _argvec[2] = (unsigned long)(arg2); \
1882 _argvec[3] = (unsigned long)(arg3); \
1883 _argvec[4] = (unsigned long)(arg4); \
1884 _argvec[5] = (unsigned long)(arg5); \
1885 _argvec[6] = (unsigned long)(arg6); \
1886 _argvec[7] = (unsigned long)(arg7); \
1887 _argvec[8] = (unsigned long)(arg8); \
1888 _argvec[9] = (unsigned long)(arg9); \
1890 VALGRIND_CFI_PROLOGUE \
1891 VALGRIND_ALIGN_STACK \
1892 "subq $136,%%rsp\n\t" \
1893 "pushq 72(%%rax)\n\t" \
1894 "pushq 64(%%rax)\n\t" \
1895 "pushq 56(%%rax)\n\t" \
1896 "movq 48(%%rax), %%r9\n\t" \
1897 "movq 40(%%rax), %%r8\n\t" \
1898 "movq 32(%%rax), %%rcx\n\t" \
1899 "movq 24(%%rax), %%rdx\n\t" \
1900 "movq 16(%%rax), %%rsi\n\t" \
1901 "movq 8(%%rax), %%rdi\n\t" \
1902 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1903 VALGRIND_CALL_NOREDIR_RAX \
1904 VALGRIND_RESTORE_STACK \
1905 VALGRIND_CFI_EPILOGUE \
1906 : /*out*/ "=a" (_res) \
1907 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1908 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1910 lval = (__typeof__(lval)) _res; \
1913 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1914 arg7,arg8,arg9,arg10) \
1916 volatile OrigFn _orig = (orig); \
1917 volatile unsigned long _argvec[11]; \
1918 volatile unsigned long _res; \
1919 _argvec[0] = (unsigned long)_orig.nraddr; \
1920 _argvec[1] = (unsigned long)(arg1); \
1921 _argvec[2] = (unsigned long)(arg2); \
1922 _argvec[3] = (unsigned long)(arg3); \
1923 _argvec[4] = (unsigned long)(arg4); \
1924 _argvec[5] = (unsigned long)(arg5); \
1925 _argvec[6] = (unsigned long)(arg6); \
1926 _argvec[7] = (unsigned long)(arg7); \
1927 _argvec[8] = (unsigned long)(arg8); \
1928 _argvec[9] = (unsigned long)(arg9); \
1929 _argvec[10] = (unsigned long)(arg10); \
1931 VALGRIND_CFI_PROLOGUE \
1932 VALGRIND_ALIGN_STACK \
1933 "subq $128,%%rsp\n\t" \
1934 "pushq 80(%%rax)\n\t" \
1935 "pushq 72(%%rax)\n\t" \
1936 "pushq 64(%%rax)\n\t" \
1937 "pushq 56(%%rax)\n\t" \
1938 "movq 48(%%rax), %%r9\n\t" \
1939 "movq 40(%%rax), %%r8\n\t" \
1940 "movq 32(%%rax), %%rcx\n\t" \
1941 "movq 24(%%rax), %%rdx\n\t" \
1942 "movq 16(%%rax), %%rsi\n\t" \
1943 "movq 8(%%rax), %%rdi\n\t" \
1944 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1945 VALGRIND_CALL_NOREDIR_RAX \
1946 VALGRIND_RESTORE_STACK \
1947 VALGRIND_CFI_EPILOGUE \
1948 : /*out*/ "=a" (_res) \
1949 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1950 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1952 lval = (__typeof__(lval)) _res; \
1955 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1956 arg7,arg8,arg9,arg10,arg11) \
1958 volatile OrigFn _orig = (orig); \
1959 volatile unsigned long _argvec[12]; \
1960 volatile unsigned long _res; \
1961 _argvec[0] = (unsigned long)_orig.nraddr; \
1962 _argvec[1] = (unsigned long)(arg1); \
1963 _argvec[2] = (unsigned long)(arg2); \
1964 _argvec[3] = (unsigned long)(arg3); \
1965 _argvec[4] = (unsigned long)(arg4); \
1966 _argvec[5] = (unsigned long)(arg5); \
1967 _argvec[6] = (unsigned long)(arg6); \
1968 _argvec[7] = (unsigned long)(arg7); \
1969 _argvec[8] = (unsigned long)(arg8); \
1970 _argvec[9] = (unsigned long)(arg9); \
1971 _argvec[10] = (unsigned long)(arg10); \
1972 _argvec[11] = (unsigned long)(arg11); \
1974 VALGRIND_CFI_PROLOGUE \
1975 VALGRIND_ALIGN_STACK \
1976 "subq $136,%%rsp\n\t" \
1977 "pushq 88(%%rax)\n\t" \
1978 "pushq 80(%%rax)\n\t" \
1979 "pushq 72(%%rax)\n\t" \
1980 "pushq 64(%%rax)\n\t" \
1981 "pushq 56(%%rax)\n\t" \
1982 "movq 48(%%rax), %%r9\n\t" \
1983 "movq 40(%%rax), %%r8\n\t" \
1984 "movq 32(%%rax), %%rcx\n\t" \
1985 "movq 24(%%rax), %%rdx\n\t" \
1986 "movq 16(%%rax), %%rsi\n\t" \
1987 "movq 8(%%rax), %%rdi\n\t" \
1988 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1989 VALGRIND_CALL_NOREDIR_RAX \
1990 VALGRIND_RESTORE_STACK \
1991 VALGRIND_CFI_EPILOGUE \
1992 : /*out*/ "=a" (_res) \
1993 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1994 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1996 lval = (__typeof__(lval)) _res; \
1999 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2000 arg7,arg8,arg9,arg10,arg11,arg12) \
2002 volatile OrigFn _orig = (orig); \
2003 volatile unsigned long _argvec[13]; \
2004 volatile unsigned long _res; \
2005 _argvec[0] = (unsigned long)_orig.nraddr; \
2006 _argvec[1] = (unsigned long)(arg1); \
2007 _argvec[2] = (unsigned long)(arg2); \
2008 _argvec[3] = (unsigned long)(arg3); \
2009 _argvec[4] = (unsigned long)(arg4); \
2010 _argvec[5] = (unsigned long)(arg5); \
2011 _argvec[6] = (unsigned long)(arg6); \
2012 _argvec[7] = (unsigned long)(arg7); \
2013 _argvec[8] = (unsigned long)(arg8); \
2014 _argvec[9] = (unsigned long)(arg9); \
2015 _argvec[10] = (unsigned long)(arg10); \
2016 _argvec[11] = (unsigned long)(arg11); \
2017 _argvec[12] = (unsigned long)(arg12); \
2019 VALGRIND_CFI_PROLOGUE \
2020 VALGRIND_ALIGN_STACK \
2021 "subq $128,%%rsp\n\t" \
2022 "pushq 96(%%rax)\n\t" \
2023 "pushq 88(%%rax)\n\t" \
2024 "pushq 80(%%rax)\n\t" \
2025 "pushq 72(%%rax)\n\t" \
2026 "pushq 64(%%rax)\n\t" \
2027 "pushq 56(%%rax)\n\t" \
2028 "movq 48(%%rax), %%r9\n\t" \
2029 "movq 40(%%rax), %%r8\n\t" \
2030 "movq 32(%%rax), %%rcx\n\t" \
2031 "movq 24(%%rax), %%rdx\n\t" \
2032 "movq 16(%%rax), %%rsi\n\t" \
2033 "movq 8(%%rax), %%rdi\n\t" \
2034 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2035 VALGRIND_CALL_NOREDIR_RAX \
2036 VALGRIND_RESTORE_STACK \
2037 VALGRIND_CFI_EPILOGUE \
2038 : /*out*/ "=a" (_res) \
2039 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2040 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2042 lval = (__typeof__(lval)) _res; \
2045 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
2047 /* ------------------------ ppc32-linux ------------------------ */
2049 #if defined(PLAT_ppc32_linux)
2051 /* This is useful for finding out about the on-stack stuff:
2053 extern int f9 ( int,int,int,int,int,int,int,int,int );
2054 extern int f10 ( int,int,int,int,int,int,int,int,int,int );
2055 extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
2056 extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
2059 return f9(11,22,33,44,55,66,77,88,99);
2062 return f10(11,22,33,44,55,66,77,88,99,110);
2065 return f11(11,22,33,44,55,66,77,88,99,110,121);
2068 return f12(11,22,33,44,55,66,77,88,99,110,121,132);
2072 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2074 /* These regs are trashed by the hidden call. */
2075 #define __CALLER_SAVED_REGS \
2076 "lr", "ctr", "xer", \
2077 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2078 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2081 /* Macros to save and align the stack before making a function
2082 call and restore it afterwards as gcc may not keep the stack
2083 pointer aligned if it doesn't realise calls are being made
2084 to other functions. */
2086 #define VALGRIND_ALIGN_STACK \
2088 "rlwinm 1,1,0,0,27\n\t"
2089 #define VALGRIND_RESTORE_STACK \
2092 /* These CALL_FN_ macros assume that on ppc32-linux,
2093 sizeof(unsigned long) == 4. */
2095 #define CALL_FN_W_v(lval, orig) \
2097 volatile OrigFn _orig = (orig); \
2098 volatile unsigned long _argvec[1]; \
2099 volatile unsigned long _res; \
2100 _argvec[0] = (unsigned long)_orig.nraddr; \
2102 VALGRIND_ALIGN_STACK \
2104 "lwz 11,0(11)\n\t" /* target->r11 */ \
2105 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2106 VALGRIND_RESTORE_STACK \
2108 : /*out*/ "=r" (_res) \
2109 : /*in*/ "r" (&_argvec[0]) \
2110 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2112 lval = (__typeof__(lval)) _res; \
2115 #define CALL_FN_W_W(lval, orig, arg1) \
2117 volatile OrigFn _orig = (orig); \
2118 volatile unsigned long _argvec[2]; \
2119 volatile unsigned long _res; \
2120 _argvec[0] = (unsigned long)_orig.nraddr; \
2121 _argvec[1] = (unsigned long)arg1; \
2123 VALGRIND_ALIGN_STACK \
2125 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2126 "lwz 11,0(11)\n\t" /* target->r11 */ \
2127 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2128 VALGRIND_RESTORE_STACK \
2130 : /*out*/ "=r" (_res) \
2131 : /*in*/ "r" (&_argvec[0]) \
2132 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2134 lval = (__typeof__(lval)) _res; \
2137 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2139 volatile OrigFn _orig = (orig); \
2140 volatile unsigned long _argvec[3]; \
2141 volatile unsigned long _res; \
2142 _argvec[0] = (unsigned long)_orig.nraddr; \
2143 _argvec[1] = (unsigned long)arg1; \
2144 _argvec[2] = (unsigned long)arg2; \
2146 VALGRIND_ALIGN_STACK \
2148 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2150 "lwz 11,0(11)\n\t" /* target->r11 */ \
2151 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2152 VALGRIND_RESTORE_STACK \
2154 : /*out*/ "=r" (_res) \
2155 : /*in*/ "r" (&_argvec[0]) \
2156 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2158 lval = (__typeof__(lval)) _res; \
2161 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2163 volatile OrigFn _orig = (orig); \
2164 volatile unsigned long _argvec[4]; \
2165 volatile unsigned long _res; \
2166 _argvec[0] = (unsigned long)_orig.nraddr; \
2167 _argvec[1] = (unsigned long)arg1; \
2168 _argvec[2] = (unsigned long)arg2; \
2169 _argvec[3] = (unsigned long)arg3; \
2171 VALGRIND_ALIGN_STACK \
2173 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2175 "lwz 5,12(11)\n\t" \
2176 "lwz 11,0(11)\n\t" /* target->r11 */ \
2177 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2178 VALGRIND_RESTORE_STACK \
2180 : /*out*/ "=r" (_res) \
2181 : /*in*/ "r" (&_argvec[0]) \
2182 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2184 lval = (__typeof__(lval)) _res; \
2187 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2189 volatile OrigFn _orig = (orig); \
2190 volatile unsigned long _argvec[5]; \
2191 volatile unsigned long _res; \
2192 _argvec[0] = (unsigned long)_orig.nraddr; \
2193 _argvec[1] = (unsigned long)arg1; \
2194 _argvec[2] = (unsigned long)arg2; \
2195 _argvec[3] = (unsigned long)arg3; \
2196 _argvec[4] = (unsigned long)arg4; \
2198 VALGRIND_ALIGN_STACK \
2200 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2202 "lwz 5,12(11)\n\t" \
2203 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2204 "lwz 11,0(11)\n\t" /* target->r11 */ \
2205 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2206 VALGRIND_RESTORE_STACK \
2208 : /*out*/ "=r" (_res) \
2209 : /*in*/ "r" (&_argvec[0]) \
2210 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2212 lval = (__typeof__(lval)) _res; \
2215 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2217 volatile OrigFn _orig = (orig); \
2218 volatile unsigned long _argvec[6]; \
2219 volatile unsigned long _res; \
2220 _argvec[0] = (unsigned long)_orig.nraddr; \
2221 _argvec[1] = (unsigned long)arg1; \
2222 _argvec[2] = (unsigned long)arg2; \
2223 _argvec[3] = (unsigned long)arg3; \
2224 _argvec[4] = (unsigned long)arg4; \
2225 _argvec[5] = (unsigned long)arg5; \
2227 VALGRIND_ALIGN_STACK \
2229 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2231 "lwz 5,12(11)\n\t" \
2232 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2233 "lwz 7,20(11)\n\t" \
2234 "lwz 11,0(11)\n\t" /* target->r11 */ \
2235 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2236 VALGRIND_RESTORE_STACK \
2238 : /*out*/ "=r" (_res) \
2239 : /*in*/ "r" (&_argvec[0]) \
2240 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2242 lval = (__typeof__(lval)) _res; \
2245 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2247 volatile OrigFn _orig = (orig); \
2248 volatile unsigned long _argvec[7]; \
2249 volatile unsigned long _res; \
2250 _argvec[0] = (unsigned long)_orig.nraddr; \
2251 _argvec[1] = (unsigned long)arg1; \
2252 _argvec[2] = (unsigned long)arg2; \
2253 _argvec[3] = (unsigned long)arg3; \
2254 _argvec[4] = (unsigned long)arg4; \
2255 _argvec[5] = (unsigned long)arg5; \
2256 _argvec[6] = (unsigned long)arg6; \
2258 VALGRIND_ALIGN_STACK \
2260 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2262 "lwz 5,12(11)\n\t" \
2263 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2264 "lwz 7,20(11)\n\t" \
2265 "lwz 8,24(11)\n\t" \
2266 "lwz 11,0(11)\n\t" /* target->r11 */ \
2267 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2268 VALGRIND_RESTORE_STACK \
2270 : /*out*/ "=r" (_res) \
2271 : /*in*/ "r" (&_argvec[0]) \
2272 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2274 lval = (__typeof__(lval)) _res; \
2277 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2280 volatile OrigFn _orig = (orig); \
2281 volatile unsigned long _argvec[8]; \
2282 volatile unsigned long _res; \
2283 _argvec[0] = (unsigned long)_orig.nraddr; \
2284 _argvec[1] = (unsigned long)arg1; \
2285 _argvec[2] = (unsigned long)arg2; \
2286 _argvec[3] = (unsigned long)arg3; \
2287 _argvec[4] = (unsigned long)arg4; \
2288 _argvec[5] = (unsigned long)arg5; \
2289 _argvec[6] = (unsigned long)arg6; \
2290 _argvec[7] = (unsigned long)arg7; \
2292 VALGRIND_ALIGN_STACK \
2294 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2296 "lwz 5,12(11)\n\t" \
2297 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2298 "lwz 7,20(11)\n\t" \
2299 "lwz 8,24(11)\n\t" \
2300 "lwz 9,28(11)\n\t" \
2301 "lwz 11,0(11)\n\t" /* target->r11 */ \
2302 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2303 VALGRIND_RESTORE_STACK \
2305 : /*out*/ "=r" (_res) \
2306 : /*in*/ "r" (&_argvec[0]) \
2307 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2309 lval = (__typeof__(lval)) _res; \
2312 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2315 volatile OrigFn _orig = (orig); \
2316 volatile unsigned long _argvec[9]; \
2317 volatile unsigned long _res; \
2318 _argvec[0] = (unsigned long)_orig.nraddr; \
2319 _argvec[1] = (unsigned long)arg1; \
2320 _argvec[2] = (unsigned long)arg2; \
2321 _argvec[3] = (unsigned long)arg3; \
2322 _argvec[4] = (unsigned long)arg4; \
2323 _argvec[5] = (unsigned long)arg5; \
2324 _argvec[6] = (unsigned long)arg6; \
2325 _argvec[7] = (unsigned long)arg7; \
2326 _argvec[8] = (unsigned long)arg8; \
2328 VALGRIND_ALIGN_STACK \
2330 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2332 "lwz 5,12(11)\n\t" \
2333 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2334 "lwz 7,20(11)\n\t" \
2335 "lwz 8,24(11)\n\t" \
2336 "lwz 9,28(11)\n\t" \
2337 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2338 "lwz 11,0(11)\n\t" /* target->r11 */ \
2339 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2340 VALGRIND_RESTORE_STACK \
2342 : /*out*/ "=r" (_res) \
2343 : /*in*/ "r" (&_argvec[0]) \
2344 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2346 lval = (__typeof__(lval)) _res; \
2349 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2352 volatile OrigFn _orig = (orig); \
2353 volatile unsigned long _argvec[10]; \
2354 volatile unsigned long _res; \
2355 _argvec[0] = (unsigned long)_orig.nraddr; \
2356 _argvec[1] = (unsigned long)arg1; \
2357 _argvec[2] = (unsigned long)arg2; \
2358 _argvec[3] = (unsigned long)arg3; \
2359 _argvec[4] = (unsigned long)arg4; \
2360 _argvec[5] = (unsigned long)arg5; \
2361 _argvec[6] = (unsigned long)arg6; \
2362 _argvec[7] = (unsigned long)arg7; \
2363 _argvec[8] = (unsigned long)arg8; \
2364 _argvec[9] = (unsigned long)arg9; \
2366 VALGRIND_ALIGN_STACK \
2368 "addi 1,1,-16\n\t" \
2370 "lwz 3,36(11)\n\t" \
2373 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2375 "lwz 5,12(11)\n\t" \
2376 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2377 "lwz 7,20(11)\n\t" \
2378 "lwz 8,24(11)\n\t" \
2379 "lwz 9,28(11)\n\t" \
2380 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2381 "lwz 11,0(11)\n\t" /* target->r11 */ \
2382 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2383 VALGRIND_RESTORE_STACK \
2385 : /*out*/ "=r" (_res) \
2386 : /*in*/ "r" (&_argvec[0]) \
2387 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2389 lval = (__typeof__(lval)) _res; \
2392 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2393 arg7,arg8,arg9,arg10) \
2395 volatile OrigFn _orig = (orig); \
2396 volatile unsigned long _argvec[11]; \
2397 volatile unsigned long _res; \
2398 _argvec[0] = (unsigned long)_orig.nraddr; \
2399 _argvec[1] = (unsigned long)arg1; \
2400 _argvec[2] = (unsigned long)arg2; \
2401 _argvec[3] = (unsigned long)arg3; \
2402 _argvec[4] = (unsigned long)arg4; \
2403 _argvec[5] = (unsigned long)arg5; \
2404 _argvec[6] = (unsigned long)arg6; \
2405 _argvec[7] = (unsigned long)arg7; \
2406 _argvec[8] = (unsigned long)arg8; \
2407 _argvec[9] = (unsigned long)arg9; \
2408 _argvec[10] = (unsigned long)arg10; \
2410 VALGRIND_ALIGN_STACK \
2412 "addi 1,1,-16\n\t" \
2414 "lwz 3,40(11)\n\t" \
2417 "lwz 3,36(11)\n\t" \
2420 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2422 "lwz 5,12(11)\n\t" \
2423 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2424 "lwz 7,20(11)\n\t" \
2425 "lwz 8,24(11)\n\t" \
2426 "lwz 9,28(11)\n\t" \
2427 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2428 "lwz 11,0(11)\n\t" /* target->r11 */ \
2429 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2430 VALGRIND_RESTORE_STACK \
2432 : /*out*/ "=r" (_res) \
2433 : /*in*/ "r" (&_argvec[0]) \
2434 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2436 lval = (__typeof__(lval)) _res; \
2439 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2440 arg7,arg8,arg9,arg10,arg11) \
2442 volatile OrigFn _orig = (orig); \
2443 volatile unsigned long _argvec[12]; \
2444 volatile unsigned long _res; \
2445 _argvec[0] = (unsigned long)_orig.nraddr; \
2446 _argvec[1] = (unsigned long)arg1; \
2447 _argvec[2] = (unsigned long)arg2; \
2448 _argvec[3] = (unsigned long)arg3; \
2449 _argvec[4] = (unsigned long)arg4; \
2450 _argvec[5] = (unsigned long)arg5; \
2451 _argvec[6] = (unsigned long)arg6; \
2452 _argvec[7] = (unsigned long)arg7; \
2453 _argvec[8] = (unsigned long)arg8; \
2454 _argvec[9] = (unsigned long)arg9; \
2455 _argvec[10] = (unsigned long)arg10; \
2456 _argvec[11] = (unsigned long)arg11; \
2458 VALGRIND_ALIGN_STACK \
2460 "addi 1,1,-32\n\t" \
2462 "lwz 3,44(11)\n\t" \
2465 "lwz 3,40(11)\n\t" \
2468 "lwz 3,36(11)\n\t" \
2471 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2473 "lwz 5,12(11)\n\t" \
2474 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2475 "lwz 7,20(11)\n\t" \
2476 "lwz 8,24(11)\n\t" \
2477 "lwz 9,28(11)\n\t" \
2478 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2479 "lwz 11,0(11)\n\t" /* target->r11 */ \
2480 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2481 VALGRIND_RESTORE_STACK \
2483 : /*out*/ "=r" (_res) \
2484 : /*in*/ "r" (&_argvec[0]) \
2485 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2487 lval = (__typeof__(lval)) _res; \
2490 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2491 arg7,arg8,arg9,arg10,arg11,arg12) \
2493 volatile OrigFn _orig = (orig); \
2494 volatile unsigned long _argvec[13]; \
2495 volatile unsigned long _res; \
2496 _argvec[0] = (unsigned long)_orig.nraddr; \
2497 _argvec[1] = (unsigned long)arg1; \
2498 _argvec[2] = (unsigned long)arg2; \
2499 _argvec[3] = (unsigned long)arg3; \
2500 _argvec[4] = (unsigned long)arg4; \
2501 _argvec[5] = (unsigned long)arg5; \
2502 _argvec[6] = (unsigned long)arg6; \
2503 _argvec[7] = (unsigned long)arg7; \
2504 _argvec[8] = (unsigned long)arg8; \
2505 _argvec[9] = (unsigned long)arg9; \
2506 _argvec[10] = (unsigned long)arg10; \
2507 _argvec[11] = (unsigned long)arg11; \
2508 _argvec[12] = (unsigned long)arg12; \
2510 VALGRIND_ALIGN_STACK \
2512 "addi 1,1,-32\n\t" \
2514 "lwz 3,48(11)\n\t" \
2517 "lwz 3,44(11)\n\t" \
2520 "lwz 3,40(11)\n\t" \
2523 "lwz 3,36(11)\n\t" \
2526 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2528 "lwz 5,12(11)\n\t" \
2529 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2530 "lwz 7,20(11)\n\t" \
2531 "lwz 8,24(11)\n\t" \
2532 "lwz 9,28(11)\n\t" \
2533 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2534 "lwz 11,0(11)\n\t" /* target->r11 */ \
2535 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2536 VALGRIND_RESTORE_STACK \
2538 : /*out*/ "=r" (_res) \
2539 : /*in*/ "r" (&_argvec[0]) \
2540 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2542 lval = (__typeof__(lval)) _res; \
2545 #endif /* PLAT_ppc32_linux */
2547 /* ------------------------ ppc64-linux ------------------------ */
2549 #if defined(PLAT_ppc64_linux)
2551 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2553 /* These regs are trashed by the hidden call. */
2554 #define __CALLER_SAVED_REGS \
2555 "lr", "ctr", "xer", \
2556 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2557 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2560 /* Macros to save and align the stack before making a function
2561 call and restore it afterwards as gcc may not keep the stack
2562 pointer aligned if it doesn't realise calls are being made
2563 to other functions. */
2565 #define VALGRIND_ALIGN_STACK \
2567 "rldicr 1,1,0,59\n\t"
2568 #define VALGRIND_RESTORE_STACK \
2571 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2574 #define CALL_FN_W_v(lval, orig) \
2576 volatile OrigFn _orig = (orig); \
2577 volatile unsigned long _argvec[3+0]; \
2578 volatile unsigned long _res; \
2579 /* _argvec[0] holds current r2 across the call */ \
2580 _argvec[1] = (unsigned long)_orig.r2; \
2581 _argvec[2] = (unsigned long)_orig.nraddr; \
2583 VALGRIND_ALIGN_STACK \
2585 "std 2,-16(11)\n\t" /* save tocptr */ \
2586 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2587 "ld 11, 0(11)\n\t" /* target->r11 */ \
2588 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2591 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2592 VALGRIND_RESTORE_STACK \
2593 : /*out*/ "=r" (_res) \
2594 : /*in*/ "r" (&_argvec[2]) \
2595 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2597 lval = (__typeof__(lval)) _res; \
2600 #define CALL_FN_W_W(lval, orig, arg1) \
2602 volatile OrigFn _orig = (orig); \
2603 volatile unsigned long _argvec[3+1]; \
2604 volatile unsigned long _res; \
2605 /* _argvec[0] holds current r2 across the call */ \
2606 _argvec[1] = (unsigned long)_orig.r2; \
2607 _argvec[2] = (unsigned long)_orig.nraddr; \
2608 _argvec[2+1] = (unsigned long)arg1; \
2610 VALGRIND_ALIGN_STACK \
2612 "std 2,-16(11)\n\t" /* save tocptr */ \
2613 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2614 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2615 "ld 11, 0(11)\n\t" /* target->r11 */ \
2616 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2619 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2620 VALGRIND_RESTORE_STACK \
2621 : /*out*/ "=r" (_res) \
2622 : /*in*/ "r" (&_argvec[2]) \
2623 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2625 lval = (__typeof__(lval)) _res; \
2628 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2630 volatile OrigFn _orig = (orig); \
2631 volatile unsigned long _argvec[3+2]; \
2632 volatile unsigned long _res; \
2633 /* _argvec[0] holds current r2 across the call */ \
2634 _argvec[1] = (unsigned long)_orig.r2; \
2635 _argvec[2] = (unsigned long)_orig.nraddr; \
2636 _argvec[2+1] = (unsigned long)arg1; \
2637 _argvec[2+2] = (unsigned long)arg2; \
2639 VALGRIND_ALIGN_STACK \
2641 "std 2,-16(11)\n\t" /* save tocptr */ \
2642 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2643 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2644 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2645 "ld 11, 0(11)\n\t" /* target->r11 */ \
2646 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2649 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2650 VALGRIND_RESTORE_STACK \
2651 : /*out*/ "=r" (_res) \
2652 : /*in*/ "r" (&_argvec[2]) \
2653 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2655 lval = (__typeof__(lval)) _res; \
2658 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2660 volatile OrigFn _orig = (orig); \
2661 volatile unsigned long _argvec[3+3]; \
2662 volatile unsigned long _res; \
2663 /* _argvec[0] holds current r2 across the call */ \
2664 _argvec[1] = (unsigned long)_orig.r2; \
2665 _argvec[2] = (unsigned long)_orig.nraddr; \
2666 _argvec[2+1] = (unsigned long)arg1; \
2667 _argvec[2+2] = (unsigned long)arg2; \
2668 _argvec[2+3] = (unsigned long)arg3; \
2670 VALGRIND_ALIGN_STACK \
2672 "std 2,-16(11)\n\t" /* save tocptr */ \
2673 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2674 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2675 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2676 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2677 "ld 11, 0(11)\n\t" /* target->r11 */ \
2678 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2681 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2682 VALGRIND_RESTORE_STACK \
2683 : /*out*/ "=r" (_res) \
2684 : /*in*/ "r" (&_argvec[2]) \
2685 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2687 lval = (__typeof__(lval)) _res; \
2690 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2692 volatile OrigFn _orig = (orig); \
2693 volatile unsigned long _argvec[3+4]; \
2694 volatile unsigned long _res; \
2695 /* _argvec[0] holds current r2 across the call */ \
2696 _argvec[1] = (unsigned long)_orig.r2; \
2697 _argvec[2] = (unsigned long)_orig.nraddr; \
2698 _argvec[2+1] = (unsigned long)arg1; \
2699 _argvec[2+2] = (unsigned long)arg2; \
2700 _argvec[2+3] = (unsigned long)arg3; \
2701 _argvec[2+4] = (unsigned long)arg4; \
2703 VALGRIND_ALIGN_STACK \
2705 "std 2,-16(11)\n\t" /* save tocptr */ \
2706 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2707 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2708 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2709 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2710 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2711 "ld 11, 0(11)\n\t" /* target->r11 */ \
2712 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2715 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2716 VALGRIND_RESTORE_STACK \
2717 : /*out*/ "=r" (_res) \
2718 : /*in*/ "r" (&_argvec[2]) \
2719 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2721 lval = (__typeof__(lval)) _res; \
2724 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2726 volatile OrigFn _orig = (orig); \
2727 volatile unsigned long _argvec[3+5]; \
2728 volatile unsigned long _res; \
2729 /* _argvec[0] holds current r2 across the call */ \
2730 _argvec[1] = (unsigned long)_orig.r2; \
2731 _argvec[2] = (unsigned long)_orig.nraddr; \
2732 _argvec[2+1] = (unsigned long)arg1; \
2733 _argvec[2+2] = (unsigned long)arg2; \
2734 _argvec[2+3] = (unsigned long)arg3; \
2735 _argvec[2+4] = (unsigned long)arg4; \
2736 _argvec[2+5] = (unsigned long)arg5; \
2738 VALGRIND_ALIGN_STACK \
2740 "std 2,-16(11)\n\t" /* save tocptr */ \
2741 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2742 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2743 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2744 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2745 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2746 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2747 "ld 11, 0(11)\n\t" /* target->r11 */ \
2748 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2751 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2752 VALGRIND_RESTORE_STACK \
2753 : /*out*/ "=r" (_res) \
2754 : /*in*/ "r" (&_argvec[2]) \
2755 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2757 lval = (__typeof__(lval)) _res; \
2760 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2762 volatile OrigFn _orig = (orig); \
2763 volatile unsigned long _argvec[3+6]; \
2764 volatile unsigned long _res; \
2765 /* _argvec[0] holds current r2 across the call */ \
2766 _argvec[1] = (unsigned long)_orig.r2; \
2767 _argvec[2] = (unsigned long)_orig.nraddr; \
2768 _argvec[2+1] = (unsigned long)arg1; \
2769 _argvec[2+2] = (unsigned long)arg2; \
2770 _argvec[2+3] = (unsigned long)arg3; \
2771 _argvec[2+4] = (unsigned long)arg4; \
2772 _argvec[2+5] = (unsigned long)arg5; \
2773 _argvec[2+6] = (unsigned long)arg6; \
2775 VALGRIND_ALIGN_STACK \
2777 "std 2,-16(11)\n\t" /* save tocptr */ \
2778 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2779 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2780 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2781 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2782 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2783 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2784 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2785 "ld 11, 0(11)\n\t" /* target->r11 */ \
2786 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2789 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2790 VALGRIND_RESTORE_STACK \
2791 : /*out*/ "=r" (_res) \
2792 : /*in*/ "r" (&_argvec[2]) \
2793 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2795 lval = (__typeof__(lval)) _res; \
2798 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2801 volatile OrigFn _orig = (orig); \
2802 volatile unsigned long _argvec[3+7]; \
2803 volatile unsigned long _res; \
2804 /* _argvec[0] holds current r2 across the call */ \
2805 _argvec[1] = (unsigned long)_orig.r2; \
2806 _argvec[2] = (unsigned long)_orig.nraddr; \
2807 _argvec[2+1] = (unsigned long)arg1; \
2808 _argvec[2+2] = (unsigned long)arg2; \
2809 _argvec[2+3] = (unsigned long)arg3; \
2810 _argvec[2+4] = (unsigned long)arg4; \
2811 _argvec[2+5] = (unsigned long)arg5; \
2812 _argvec[2+6] = (unsigned long)arg6; \
2813 _argvec[2+7] = (unsigned long)arg7; \
2815 VALGRIND_ALIGN_STACK \
2817 "std 2,-16(11)\n\t" /* save tocptr */ \
2818 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2819 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2820 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2821 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2822 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2823 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2824 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2825 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2826 "ld 11, 0(11)\n\t" /* target->r11 */ \
2827 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2830 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2831 VALGRIND_RESTORE_STACK \
2832 : /*out*/ "=r" (_res) \
2833 : /*in*/ "r" (&_argvec[2]) \
2834 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2836 lval = (__typeof__(lval)) _res; \
2839 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2842 volatile OrigFn _orig = (orig); \
2843 volatile unsigned long _argvec[3+8]; \
2844 volatile unsigned long _res; \
2845 /* _argvec[0] holds current r2 across the call */ \
2846 _argvec[1] = (unsigned long)_orig.r2; \
2847 _argvec[2] = (unsigned long)_orig.nraddr; \
2848 _argvec[2+1] = (unsigned long)arg1; \
2849 _argvec[2+2] = (unsigned long)arg2; \
2850 _argvec[2+3] = (unsigned long)arg3; \
2851 _argvec[2+4] = (unsigned long)arg4; \
2852 _argvec[2+5] = (unsigned long)arg5; \
2853 _argvec[2+6] = (unsigned long)arg6; \
2854 _argvec[2+7] = (unsigned long)arg7; \
2855 _argvec[2+8] = (unsigned long)arg8; \
2857 VALGRIND_ALIGN_STACK \
2859 "std 2,-16(11)\n\t" /* save tocptr */ \
2860 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2861 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2862 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2863 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2864 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2865 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2866 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2867 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2868 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2869 "ld 11, 0(11)\n\t" /* target->r11 */ \
2870 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2873 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2874 VALGRIND_RESTORE_STACK \
2875 : /*out*/ "=r" (_res) \
2876 : /*in*/ "r" (&_argvec[2]) \
2877 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2879 lval = (__typeof__(lval)) _res; \
2882 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2885 volatile OrigFn _orig = (orig); \
2886 volatile unsigned long _argvec[3+9]; \
2887 volatile unsigned long _res; \
2888 /* _argvec[0] holds current r2 across the call */ \
2889 _argvec[1] = (unsigned long)_orig.r2; \
2890 _argvec[2] = (unsigned long)_orig.nraddr; \
2891 _argvec[2+1] = (unsigned long)arg1; \
2892 _argvec[2+2] = (unsigned long)arg2; \
2893 _argvec[2+3] = (unsigned long)arg3; \
2894 _argvec[2+4] = (unsigned long)arg4; \
2895 _argvec[2+5] = (unsigned long)arg5; \
2896 _argvec[2+6] = (unsigned long)arg6; \
2897 _argvec[2+7] = (unsigned long)arg7; \
2898 _argvec[2+8] = (unsigned long)arg8; \
2899 _argvec[2+9] = (unsigned long)arg9; \
2901 VALGRIND_ALIGN_STACK \
2903 "std 2,-16(11)\n\t" /* save tocptr */ \
2904 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2905 "addi 1,1,-128\n\t" /* expand stack frame */ \
2908 "std 3,112(1)\n\t" \
2910 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2911 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2912 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2913 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2914 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2915 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2916 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2917 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2918 "ld 11, 0(11)\n\t" /* target->r11 */ \
2919 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2922 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2923 VALGRIND_RESTORE_STACK \
2924 : /*out*/ "=r" (_res) \
2925 : /*in*/ "r" (&_argvec[2]) \
2926 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2928 lval = (__typeof__(lval)) _res; \
2931 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2932 arg7,arg8,arg9,arg10) \
2934 volatile OrigFn _orig = (orig); \
2935 volatile unsigned long _argvec[3+10]; \
2936 volatile unsigned long _res; \
2937 /* _argvec[0] holds current r2 across the call */ \
2938 _argvec[1] = (unsigned long)_orig.r2; \
2939 _argvec[2] = (unsigned long)_orig.nraddr; \
2940 _argvec[2+1] = (unsigned long)arg1; \
2941 _argvec[2+2] = (unsigned long)arg2; \
2942 _argvec[2+3] = (unsigned long)arg3; \
2943 _argvec[2+4] = (unsigned long)arg4; \
2944 _argvec[2+5] = (unsigned long)arg5; \
2945 _argvec[2+6] = (unsigned long)arg6; \
2946 _argvec[2+7] = (unsigned long)arg7; \
2947 _argvec[2+8] = (unsigned long)arg8; \
2948 _argvec[2+9] = (unsigned long)arg9; \
2949 _argvec[2+10] = (unsigned long)arg10; \
2951 VALGRIND_ALIGN_STACK \
2953 "std 2,-16(11)\n\t" /* save tocptr */ \
2954 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2955 "addi 1,1,-128\n\t" /* expand stack frame */ \
2958 "std 3,120(1)\n\t" \
2961 "std 3,112(1)\n\t" \
2963 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2964 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2965 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2966 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2967 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2968 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2969 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2970 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2971 "ld 11, 0(11)\n\t" /* target->r11 */ \
2972 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2975 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2976 VALGRIND_RESTORE_STACK \
2977 : /*out*/ "=r" (_res) \
2978 : /*in*/ "r" (&_argvec[2]) \
2979 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2981 lval = (__typeof__(lval)) _res; \
2984 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2985 arg7,arg8,arg9,arg10,arg11) \
2987 volatile OrigFn _orig = (orig); \
2988 volatile unsigned long _argvec[3+11]; \
2989 volatile unsigned long _res; \
2990 /* _argvec[0] holds current r2 across the call */ \
2991 _argvec[1] = (unsigned long)_orig.r2; \
2992 _argvec[2] = (unsigned long)_orig.nraddr; \
2993 _argvec[2+1] = (unsigned long)arg1; \
2994 _argvec[2+2] = (unsigned long)arg2; \
2995 _argvec[2+3] = (unsigned long)arg3; \
2996 _argvec[2+4] = (unsigned long)arg4; \
2997 _argvec[2+5] = (unsigned long)arg5; \
2998 _argvec[2+6] = (unsigned long)arg6; \
2999 _argvec[2+7] = (unsigned long)arg7; \
3000 _argvec[2+8] = (unsigned long)arg8; \
3001 _argvec[2+9] = (unsigned long)arg9; \
3002 _argvec[2+10] = (unsigned long)arg10; \
3003 _argvec[2+11] = (unsigned long)arg11; \
3005 VALGRIND_ALIGN_STACK \
3007 "std 2,-16(11)\n\t" /* save tocptr */ \
3008 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3009 "addi 1,1,-144\n\t" /* expand stack frame */ \
3012 "std 3,128(1)\n\t" \
3015 "std 3,120(1)\n\t" \
3018 "std 3,112(1)\n\t" \
3020 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3021 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3022 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3023 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3024 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3025 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3026 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3027 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3028 "ld 11, 0(11)\n\t" /* target->r11 */ \
3029 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3032 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3033 VALGRIND_RESTORE_STACK \
3034 : /*out*/ "=r" (_res) \
3035 : /*in*/ "r" (&_argvec[2]) \
3036 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3038 lval = (__typeof__(lval)) _res; \
3041 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3042 arg7,arg8,arg9,arg10,arg11,arg12) \
3044 volatile OrigFn _orig = (orig); \
3045 volatile unsigned long _argvec[3+12]; \
3046 volatile unsigned long _res; \
3047 /* _argvec[0] holds current r2 across the call */ \
3048 _argvec[1] = (unsigned long)_orig.r2; \
3049 _argvec[2] = (unsigned long)_orig.nraddr; \
3050 _argvec[2+1] = (unsigned long)arg1; \
3051 _argvec[2+2] = (unsigned long)arg2; \
3052 _argvec[2+3] = (unsigned long)arg3; \
3053 _argvec[2+4] = (unsigned long)arg4; \
3054 _argvec[2+5] = (unsigned long)arg5; \
3055 _argvec[2+6] = (unsigned long)arg6; \
3056 _argvec[2+7] = (unsigned long)arg7; \
3057 _argvec[2+8] = (unsigned long)arg8; \
3058 _argvec[2+9] = (unsigned long)arg9; \
3059 _argvec[2+10] = (unsigned long)arg10; \
3060 _argvec[2+11] = (unsigned long)arg11; \
3061 _argvec[2+12] = (unsigned long)arg12; \
3063 VALGRIND_ALIGN_STACK \
3065 "std 2,-16(11)\n\t" /* save tocptr */ \
3066 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3067 "addi 1,1,-144\n\t" /* expand stack frame */ \
3070 "std 3,136(1)\n\t" \
3073 "std 3,128(1)\n\t" \
3076 "std 3,120(1)\n\t" \
3079 "std 3,112(1)\n\t" \
3081 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3082 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3083 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3084 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3085 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3086 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3087 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3088 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3089 "ld 11, 0(11)\n\t" /* target->r11 */ \
3090 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3093 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3094 VALGRIND_RESTORE_STACK \
3095 : /*out*/ "=r" (_res) \
3096 : /*in*/ "r" (&_argvec[2]) \
3097 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3099 lval = (__typeof__(lval)) _res; \
3102 #endif /* PLAT_ppc64_linux */
3104 /* ------------------------- arm-linux ------------------------- */
3106 #if defined(PLAT_arm_linux)
3108 /* These regs are trashed by the hidden call. */
3109 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4","r14"
3111 /* Macros to save and align the stack before making a function
3112 call and restore it afterwards as gcc may not keep the stack
3113 pointer aligned if it doesn't realise calls are being made
3114 to other functions. */
3116 /* This is a bit tricky. We store the original stack pointer in r10
3117 as it is callee-saves. gcc doesn't allow the use of r11 for some
3118 reason. Also, we can't directly "bic" the stack pointer in thumb
3119 mode since r13 isn't an allowed register number in that context.
3120 So use r4 as a temporary, since that is about to get trashed
3121 anyway, just after each use of this macro. Side effect is we need
3122 to be very careful about any future changes, since
3123 VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
3124 #define VALGRIND_ALIGN_STACK \
3127 "bic r4, r4, #7\n\t" \
3129 #define VALGRIND_RESTORE_STACK \
3132 /* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
3135 #define CALL_FN_W_v(lval, orig) \
3137 volatile OrigFn _orig = (orig); \
3138 volatile unsigned long _argvec[1]; \
3139 volatile unsigned long _res; \
3140 _argvec[0] = (unsigned long)_orig.nraddr; \
3142 VALGRIND_ALIGN_STACK \
3143 "ldr r4, [%1] \n\t" /* target->r4 */ \
3144 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3145 VALGRIND_RESTORE_STACK \
3147 : /*out*/ "=r" (_res) \
3148 : /*in*/ "0" (&_argvec[0]) \
3149 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3151 lval = (__typeof__(lval)) _res; \
3154 #define CALL_FN_W_W(lval, orig, arg1) \
3156 volatile OrigFn _orig = (orig); \
3157 volatile unsigned long _argvec[2]; \
3158 volatile unsigned long _res; \
3159 _argvec[0] = (unsigned long)_orig.nraddr; \
3160 _argvec[1] = (unsigned long)(arg1); \
3162 VALGRIND_ALIGN_STACK \
3163 "ldr r0, [%1, #4] \n\t" \
3164 "ldr r4, [%1] \n\t" /* target->r4 */ \
3165 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3166 VALGRIND_RESTORE_STACK \
3168 : /*out*/ "=r" (_res) \
3169 : /*in*/ "0" (&_argvec[0]) \
3170 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3172 lval = (__typeof__(lval)) _res; \
3175 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3177 volatile OrigFn _orig = (orig); \
3178 volatile unsigned long _argvec[3]; \
3179 volatile unsigned long _res; \
3180 _argvec[0] = (unsigned long)_orig.nraddr; \
3181 _argvec[1] = (unsigned long)(arg1); \
3182 _argvec[2] = (unsigned long)(arg2); \
3184 VALGRIND_ALIGN_STACK \
3185 "ldr r0, [%1, #4] \n\t" \
3186 "ldr r1, [%1, #8] \n\t" \
3187 "ldr r4, [%1] \n\t" /* target->r4 */ \
3188 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3189 VALGRIND_RESTORE_STACK \
3191 : /*out*/ "=r" (_res) \
3192 : /*in*/ "0" (&_argvec[0]) \
3193 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3195 lval = (__typeof__(lval)) _res; \
3198 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3200 volatile OrigFn _orig = (orig); \
3201 volatile unsigned long _argvec[4]; \
3202 volatile unsigned long _res; \
3203 _argvec[0] = (unsigned long)_orig.nraddr; \
3204 _argvec[1] = (unsigned long)(arg1); \
3205 _argvec[2] = (unsigned long)(arg2); \
3206 _argvec[3] = (unsigned long)(arg3); \
3208 VALGRIND_ALIGN_STACK \
3209 "ldr r0, [%1, #4] \n\t" \
3210 "ldr r1, [%1, #8] \n\t" \
3211 "ldr r2, [%1, #12] \n\t" \
3212 "ldr r4, [%1] \n\t" /* target->r4 */ \
3213 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3214 VALGRIND_RESTORE_STACK \
3216 : /*out*/ "=r" (_res) \
3217 : /*in*/ "0" (&_argvec[0]) \
3218 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3220 lval = (__typeof__(lval)) _res; \
3223 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3225 volatile OrigFn _orig = (orig); \
3226 volatile unsigned long _argvec[5]; \
3227 volatile unsigned long _res; \
3228 _argvec[0] = (unsigned long)_orig.nraddr; \
3229 _argvec[1] = (unsigned long)(arg1); \
3230 _argvec[2] = (unsigned long)(arg2); \
3231 _argvec[3] = (unsigned long)(arg3); \
3232 _argvec[4] = (unsigned long)(arg4); \
3234 VALGRIND_ALIGN_STACK \
3235 "ldr r0, [%1, #4] \n\t" \
3236 "ldr r1, [%1, #8] \n\t" \
3237 "ldr r2, [%1, #12] \n\t" \
3238 "ldr r3, [%1, #16] \n\t" \
3239 "ldr r4, [%1] \n\t" /* target->r4 */ \
3240 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3241 VALGRIND_RESTORE_STACK \
3243 : /*out*/ "=r" (_res) \
3244 : /*in*/ "0" (&_argvec[0]) \
3245 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3247 lval = (__typeof__(lval)) _res; \
3250 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3252 volatile OrigFn _orig = (orig); \
3253 volatile unsigned long _argvec[6]; \
3254 volatile unsigned long _res; \
3255 _argvec[0] = (unsigned long)_orig.nraddr; \
3256 _argvec[1] = (unsigned long)(arg1); \
3257 _argvec[2] = (unsigned long)(arg2); \
3258 _argvec[3] = (unsigned long)(arg3); \
3259 _argvec[4] = (unsigned long)(arg4); \
3260 _argvec[5] = (unsigned long)(arg5); \
3262 VALGRIND_ALIGN_STACK \
3263 "sub sp, sp, #4 \n\t" \
3264 "ldr r0, [%1, #20] \n\t" \
3266 "ldr r0, [%1, #4] \n\t" \
3267 "ldr r1, [%1, #8] \n\t" \
3268 "ldr r2, [%1, #12] \n\t" \
3269 "ldr r3, [%1, #16] \n\t" \
3270 "ldr r4, [%1] \n\t" /* target->r4 */ \
3271 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3272 VALGRIND_RESTORE_STACK \
3274 : /*out*/ "=r" (_res) \
3275 : /*in*/ "0" (&_argvec[0]) \
3276 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3278 lval = (__typeof__(lval)) _res; \
3281 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3283 volatile OrigFn _orig = (orig); \
3284 volatile unsigned long _argvec[7]; \
3285 volatile unsigned long _res; \
3286 _argvec[0] = (unsigned long)_orig.nraddr; \
3287 _argvec[1] = (unsigned long)(arg1); \
3288 _argvec[2] = (unsigned long)(arg2); \
3289 _argvec[3] = (unsigned long)(arg3); \
3290 _argvec[4] = (unsigned long)(arg4); \
3291 _argvec[5] = (unsigned long)(arg5); \
3292 _argvec[6] = (unsigned long)(arg6); \
3294 VALGRIND_ALIGN_STACK \
3295 "ldr r0, [%1, #20] \n\t" \
3296 "ldr r1, [%1, #24] \n\t" \
3297 "push {r0, r1} \n\t" \
3298 "ldr r0, [%1, #4] \n\t" \
3299 "ldr r1, [%1, #8] \n\t" \
3300 "ldr r2, [%1, #12] \n\t" \
3301 "ldr r3, [%1, #16] \n\t" \
3302 "ldr r4, [%1] \n\t" /* target->r4 */ \
3303 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3304 VALGRIND_RESTORE_STACK \
3306 : /*out*/ "=r" (_res) \
3307 : /*in*/ "0" (&_argvec[0]) \
3308 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3310 lval = (__typeof__(lval)) _res; \
3313 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3316 volatile OrigFn _orig = (orig); \
3317 volatile unsigned long _argvec[8]; \
3318 volatile unsigned long _res; \
3319 _argvec[0] = (unsigned long)_orig.nraddr; \
3320 _argvec[1] = (unsigned long)(arg1); \
3321 _argvec[2] = (unsigned long)(arg2); \
3322 _argvec[3] = (unsigned long)(arg3); \
3323 _argvec[4] = (unsigned long)(arg4); \
3324 _argvec[5] = (unsigned long)(arg5); \
3325 _argvec[6] = (unsigned long)(arg6); \
3326 _argvec[7] = (unsigned long)(arg7); \
3328 VALGRIND_ALIGN_STACK \
3329 "sub sp, sp, #4 \n\t" \
3330 "ldr r0, [%1, #20] \n\t" \
3331 "ldr r1, [%1, #24] \n\t" \
3332 "ldr r2, [%1, #28] \n\t" \
3333 "push {r0, r1, r2} \n\t" \
3334 "ldr r0, [%1, #4] \n\t" \
3335 "ldr r1, [%1, #8] \n\t" \
3336 "ldr r2, [%1, #12] \n\t" \
3337 "ldr r3, [%1, #16] \n\t" \
3338 "ldr r4, [%1] \n\t" /* target->r4 */ \
3339 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3340 VALGRIND_RESTORE_STACK \
3342 : /*out*/ "=r" (_res) \
3343 : /*in*/ "0" (&_argvec[0]) \
3344 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3346 lval = (__typeof__(lval)) _res; \
3349 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3352 volatile OrigFn _orig = (orig); \
3353 volatile unsigned long _argvec[9]; \
3354 volatile unsigned long _res; \
3355 _argvec[0] = (unsigned long)_orig.nraddr; \
3356 _argvec[1] = (unsigned long)(arg1); \
3357 _argvec[2] = (unsigned long)(arg2); \
3358 _argvec[3] = (unsigned long)(arg3); \
3359 _argvec[4] = (unsigned long)(arg4); \
3360 _argvec[5] = (unsigned long)(arg5); \
3361 _argvec[6] = (unsigned long)(arg6); \
3362 _argvec[7] = (unsigned long)(arg7); \
3363 _argvec[8] = (unsigned long)(arg8); \
3365 VALGRIND_ALIGN_STACK \
3366 "ldr r0, [%1, #20] \n\t" \
3367 "ldr r1, [%1, #24] \n\t" \
3368 "ldr r2, [%1, #28] \n\t" \
3369 "ldr r3, [%1, #32] \n\t" \
3370 "push {r0, r1, r2, r3} \n\t" \
3371 "ldr r0, [%1, #4] \n\t" \
3372 "ldr r1, [%1, #8] \n\t" \
3373 "ldr r2, [%1, #12] \n\t" \
3374 "ldr r3, [%1, #16] \n\t" \
3375 "ldr r4, [%1] \n\t" /* target->r4 */ \
3376 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3377 VALGRIND_RESTORE_STACK \
3379 : /*out*/ "=r" (_res) \
3380 : /*in*/ "0" (&_argvec[0]) \
3381 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3383 lval = (__typeof__(lval)) _res; \
3386 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3389 volatile OrigFn _orig = (orig); \
3390 volatile unsigned long _argvec[10]; \
3391 volatile unsigned long _res; \
3392 _argvec[0] = (unsigned long)_orig.nraddr; \
3393 _argvec[1] = (unsigned long)(arg1); \
3394 _argvec[2] = (unsigned long)(arg2); \
3395 _argvec[3] = (unsigned long)(arg3); \
3396 _argvec[4] = (unsigned long)(arg4); \
3397 _argvec[5] = (unsigned long)(arg5); \
3398 _argvec[6] = (unsigned long)(arg6); \
3399 _argvec[7] = (unsigned long)(arg7); \
3400 _argvec[8] = (unsigned long)(arg8); \
3401 _argvec[9] = (unsigned long)(arg9); \
3403 VALGRIND_ALIGN_STACK \
3404 "sub sp, sp, #4 \n\t" \
3405 "ldr r0, [%1, #20] \n\t" \
3406 "ldr r1, [%1, #24] \n\t" \
3407 "ldr r2, [%1, #28] \n\t" \
3408 "ldr r3, [%1, #32] \n\t" \
3409 "ldr r4, [%1, #36] \n\t" \
3410 "push {r0, r1, r2, r3, r4} \n\t" \
3411 "ldr r0, [%1, #4] \n\t" \
3412 "ldr r1, [%1, #8] \n\t" \
3413 "ldr r2, [%1, #12] \n\t" \
3414 "ldr r3, [%1, #16] \n\t" \
3415 "ldr r4, [%1] \n\t" /* target->r4 */ \
3416 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3417 VALGRIND_RESTORE_STACK \
3419 : /*out*/ "=r" (_res) \
3420 : /*in*/ "0" (&_argvec[0]) \
3421 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3423 lval = (__typeof__(lval)) _res; \
3426 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3427 arg7,arg8,arg9,arg10) \
3429 volatile OrigFn _orig = (orig); \
3430 volatile unsigned long _argvec[11]; \
3431 volatile unsigned long _res; \
3432 _argvec[0] = (unsigned long)_orig.nraddr; \
3433 _argvec[1] = (unsigned long)(arg1); \
3434 _argvec[2] = (unsigned long)(arg2); \
3435 _argvec[3] = (unsigned long)(arg3); \
3436 _argvec[4] = (unsigned long)(arg4); \
3437 _argvec[5] = (unsigned long)(arg5); \
3438 _argvec[6] = (unsigned long)(arg6); \
3439 _argvec[7] = (unsigned long)(arg7); \
3440 _argvec[8] = (unsigned long)(arg8); \
3441 _argvec[9] = (unsigned long)(arg9); \
3442 _argvec[10] = (unsigned long)(arg10); \
3444 VALGRIND_ALIGN_STACK \
3445 "ldr r0, [%1, #40] \n\t" \
3447 "ldr r0, [%1, #20] \n\t" \
3448 "ldr r1, [%1, #24] \n\t" \
3449 "ldr r2, [%1, #28] \n\t" \
3450 "ldr r3, [%1, #32] \n\t" \
3451 "ldr r4, [%1, #36] \n\t" \
3452 "push {r0, r1, r2, r3, r4} \n\t" \
3453 "ldr r0, [%1, #4] \n\t" \
3454 "ldr r1, [%1, #8] \n\t" \
3455 "ldr r2, [%1, #12] \n\t" \
3456 "ldr r3, [%1, #16] \n\t" \
3457 "ldr r4, [%1] \n\t" /* target->r4 */ \
3458 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3459 VALGRIND_RESTORE_STACK \
3461 : /*out*/ "=r" (_res) \
3462 : /*in*/ "0" (&_argvec[0]) \
3463 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3465 lval = (__typeof__(lval)) _res; \
3468 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
3469 arg6,arg7,arg8,arg9,arg10, \
3472 volatile OrigFn _orig = (orig); \
3473 volatile unsigned long _argvec[12]; \
3474 volatile unsigned long _res; \
3475 _argvec[0] = (unsigned long)_orig.nraddr; \
3476 _argvec[1] = (unsigned long)(arg1); \
3477 _argvec[2] = (unsigned long)(arg2); \
3478 _argvec[3] = (unsigned long)(arg3); \
3479 _argvec[4] = (unsigned long)(arg4); \
3480 _argvec[5] = (unsigned long)(arg5); \
3481 _argvec[6] = (unsigned long)(arg6); \
3482 _argvec[7] = (unsigned long)(arg7); \
3483 _argvec[8] = (unsigned long)(arg8); \
3484 _argvec[9] = (unsigned long)(arg9); \
3485 _argvec[10] = (unsigned long)(arg10); \
3486 _argvec[11] = (unsigned long)(arg11); \
3488 VALGRIND_ALIGN_STACK \
3489 "sub sp, sp, #4 \n\t" \
3490 "ldr r0, [%1, #40] \n\t" \
3491 "ldr r1, [%1, #44] \n\t" \
3492 "push {r0, r1} \n\t" \
3493 "ldr r0, [%1, #20] \n\t" \
3494 "ldr r1, [%1, #24] \n\t" \
3495 "ldr r2, [%1, #28] \n\t" \
3496 "ldr r3, [%1, #32] \n\t" \
3497 "ldr r4, [%1, #36] \n\t" \
3498 "push {r0, r1, r2, r3, r4} \n\t" \
3499 "ldr r0, [%1, #4] \n\t" \
3500 "ldr r1, [%1, #8] \n\t" \
3501 "ldr r2, [%1, #12] \n\t" \
3502 "ldr r3, [%1, #16] \n\t" \
3503 "ldr r4, [%1] \n\t" /* target->r4 */ \
3504 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3505 VALGRIND_RESTORE_STACK \
3507 : /*out*/ "=r" (_res) \
3508 : /*in*/ "0" (&_argvec[0]) \
3509 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3511 lval = (__typeof__(lval)) _res; \
3514 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
3515 arg6,arg7,arg8,arg9,arg10, \
3518 volatile OrigFn _orig = (orig); \
3519 volatile unsigned long _argvec[13]; \
3520 volatile unsigned long _res; \
3521 _argvec[0] = (unsigned long)_orig.nraddr; \
3522 _argvec[1] = (unsigned long)(arg1); \
3523 _argvec[2] = (unsigned long)(arg2); \
3524 _argvec[3] = (unsigned long)(arg3); \
3525 _argvec[4] = (unsigned long)(arg4); \
3526 _argvec[5] = (unsigned long)(arg5); \
3527 _argvec[6] = (unsigned long)(arg6); \
3528 _argvec[7] = (unsigned long)(arg7); \
3529 _argvec[8] = (unsigned long)(arg8); \
3530 _argvec[9] = (unsigned long)(arg9); \
3531 _argvec[10] = (unsigned long)(arg10); \
3532 _argvec[11] = (unsigned long)(arg11); \
3533 _argvec[12] = (unsigned long)(arg12); \
3535 VALGRIND_ALIGN_STACK \
3536 "ldr r0, [%1, #40] \n\t" \
3537 "ldr r1, [%1, #44] \n\t" \
3538 "ldr r2, [%1, #48] \n\t" \
3539 "push {r0, r1, r2} \n\t" \
3540 "ldr r0, [%1, #20] \n\t" \
3541 "ldr r1, [%1, #24] \n\t" \
3542 "ldr r2, [%1, #28] \n\t" \
3543 "ldr r3, [%1, #32] \n\t" \
3544 "ldr r4, [%1, #36] \n\t" \
3545 "push {r0, r1, r2, r3, r4} \n\t" \
3546 "ldr r0, [%1, #4] \n\t" \
3547 "ldr r1, [%1, #8] \n\t" \
3548 "ldr r2, [%1, #12] \n\t" \
3549 "ldr r3, [%1, #16] \n\t" \
3550 "ldr r4, [%1] \n\t" /* target->r4 */ \
3551 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3552 VALGRIND_RESTORE_STACK \
3554 : /*out*/ "=r" (_res) \
3555 : /*in*/ "0" (&_argvec[0]) \
3556 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3558 lval = (__typeof__(lval)) _res; \
3561 #endif /* PLAT_arm_linux */
3563 /* ------------------------ arm64-linux ------------------------ */
3565 #if defined(PLAT_arm64_linux)
3567 /* These regs are trashed by the hidden call. */
3568 #define __CALLER_SAVED_REGS \
3569 "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
3570 "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
3571 "x18", "x19", "x20", "x30", \
3572 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
3573 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
3574 "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
3575 "v26", "v27", "v28", "v29", "v30", "v31"
3577 /* x21 is callee-saved, so we can use it to save and restore SP around
3579 #define VALGRIND_ALIGN_STACK \
3581 "bic sp, x21, #15\n\t"
3582 #define VALGRIND_RESTORE_STACK \
3585 /* These CALL_FN_ macros assume that on arm64-linux,
3586 sizeof(unsigned long) == 8. */
3588 #define CALL_FN_W_v(lval, orig) \
3590 volatile OrigFn _orig = (orig); \
3591 volatile unsigned long _argvec[1]; \
3592 volatile unsigned long _res; \
3593 _argvec[0] = (unsigned long)_orig.nraddr; \
3595 VALGRIND_ALIGN_STACK \
3596 "ldr x8, [%1] \n\t" /* target->x8 */ \
3597 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3598 VALGRIND_RESTORE_STACK \
3600 : /*out*/ "=r" (_res) \
3601 : /*in*/ "0" (&_argvec[0]) \
3602 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3604 lval = (__typeof__(lval)) _res; \
3607 #define CALL_FN_W_W(lval, orig, arg1) \
3609 volatile OrigFn _orig = (orig); \
3610 volatile unsigned long _argvec[2]; \
3611 volatile unsigned long _res; \
3612 _argvec[0] = (unsigned long)_orig.nraddr; \
3613 _argvec[1] = (unsigned long)(arg1); \
3615 VALGRIND_ALIGN_STACK \
3616 "ldr x0, [%1, #8] \n\t" \
3617 "ldr x8, [%1] \n\t" /* target->x8 */ \
3618 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3619 VALGRIND_RESTORE_STACK \
3621 : /*out*/ "=r" (_res) \
3622 : /*in*/ "0" (&_argvec[0]) \
3623 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3625 lval = (__typeof__(lval)) _res; \
3628 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3630 volatile OrigFn _orig = (orig); \
3631 volatile unsigned long _argvec[3]; \
3632 volatile unsigned long _res; \
3633 _argvec[0] = (unsigned long)_orig.nraddr; \
3634 _argvec[1] = (unsigned long)(arg1); \
3635 _argvec[2] = (unsigned long)(arg2); \
3637 VALGRIND_ALIGN_STACK \
3638 "ldr x0, [%1, #8] \n\t" \
3639 "ldr x1, [%1, #16] \n\t" \
3640 "ldr x8, [%1] \n\t" /* target->x8 */ \
3641 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3642 VALGRIND_RESTORE_STACK \
3644 : /*out*/ "=r" (_res) \
3645 : /*in*/ "0" (&_argvec[0]) \
3646 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3648 lval = (__typeof__(lval)) _res; \
3651 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3653 volatile OrigFn _orig = (orig); \
3654 volatile unsigned long _argvec[4]; \
3655 volatile unsigned long _res; \
3656 _argvec[0] = (unsigned long)_orig.nraddr; \
3657 _argvec[1] = (unsigned long)(arg1); \
3658 _argvec[2] = (unsigned long)(arg2); \
3659 _argvec[3] = (unsigned long)(arg3); \
3661 VALGRIND_ALIGN_STACK \
3662 "ldr x0, [%1, #8] \n\t" \
3663 "ldr x1, [%1, #16] \n\t" \
3664 "ldr x2, [%1, #24] \n\t" \
3665 "ldr x8, [%1] \n\t" /* target->x8 */ \
3666 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3667 VALGRIND_RESTORE_STACK \
3669 : /*out*/ "=r" (_res) \
3670 : /*in*/ "0" (&_argvec[0]) \
3671 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3673 lval = (__typeof__(lval)) _res; \
3676 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3678 volatile OrigFn _orig = (orig); \
3679 volatile unsigned long _argvec[5]; \
3680 volatile unsigned long _res; \
3681 _argvec[0] = (unsigned long)_orig.nraddr; \
3682 _argvec[1] = (unsigned long)(arg1); \
3683 _argvec[2] = (unsigned long)(arg2); \
3684 _argvec[3] = (unsigned long)(arg3); \
3685 _argvec[4] = (unsigned long)(arg4); \
3687 VALGRIND_ALIGN_STACK \
3688 "ldr x0, [%1, #8] \n\t" \
3689 "ldr x1, [%1, #16] \n\t" \
3690 "ldr x2, [%1, #24] \n\t" \
3691 "ldr x3, [%1, #32] \n\t" \
3692 "ldr x8, [%1] \n\t" /* target->x8 */ \
3693 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3694 VALGRIND_RESTORE_STACK \
3696 : /*out*/ "=r" (_res) \
3697 : /*in*/ "0" (&_argvec[0]) \
3698 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3700 lval = (__typeof__(lval)) _res; \
3703 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3705 volatile OrigFn _orig = (orig); \
3706 volatile unsigned long _argvec[6]; \
3707 volatile unsigned long _res; \
3708 _argvec[0] = (unsigned long)_orig.nraddr; \
3709 _argvec[1] = (unsigned long)(arg1); \
3710 _argvec[2] = (unsigned long)(arg2); \
3711 _argvec[3] = (unsigned long)(arg3); \
3712 _argvec[4] = (unsigned long)(arg4); \
3713 _argvec[5] = (unsigned long)(arg5); \
3715 VALGRIND_ALIGN_STACK \
3716 "ldr x0, [%1, #8] \n\t" \
3717 "ldr x1, [%1, #16] \n\t" \
3718 "ldr x2, [%1, #24] \n\t" \
3719 "ldr x3, [%1, #32] \n\t" \
3720 "ldr x4, [%1, #40] \n\t" \
3721 "ldr x8, [%1] \n\t" /* target->x8 */ \
3722 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3723 VALGRIND_RESTORE_STACK \
3725 : /*out*/ "=r" (_res) \
3726 : /*in*/ "0" (&_argvec[0]) \
3727 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3729 lval = (__typeof__(lval)) _res; \
3732 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3734 volatile OrigFn _orig = (orig); \
3735 volatile unsigned long _argvec[7]; \
3736 volatile unsigned long _res; \
3737 _argvec[0] = (unsigned long)_orig.nraddr; \
3738 _argvec[1] = (unsigned long)(arg1); \
3739 _argvec[2] = (unsigned long)(arg2); \
3740 _argvec[3] = (unsigned long)(arg3); \
3741 _argvec[4] = (unsigned long)(arg4); \
3742 _argvec[5] = (unsigned long)(arg5); \
3743 _argvec[6] = (unsigned long)(arg6); \
3745 VALGRIND_ALIGN_STACK \
3746 "ldr x0, [%1, #8] \n\t" \
3747 "ldr x1, [%1, #16] \n\t" \
3748 "ldr x2, [%1, #24] \n\t" \
3749 "ldr x3, [%1, #32] \n\t" \
3750 "ldr x4, [%1, #40] \n\t" \
3751 "ldr x5, [%1, #48] \n\t" \
3752 "ldr x8, [%1] \n\t" /* target->x8 */ \
3753 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3754 VALGRIND_RESTORE_STACK \
3756 : /*out*/ "=r" (_res) \
3757 : /*in*/ "0" (&_argvec[0]) \
3758 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3760 lval = (__typeof__(lval)) _res; \
3763 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3766 volatile OrigFn _orig = (orig); \
3767 volatile unsigned long _argvec[8]; \
3768 volatile unsigned long _res; \
3769 _argvec[0] = (unsigned long)_orig.nraddr; \
3770 _argvec[1] = (unsigned long)(arg1); \
3771 _argvec[2] = (unsigned long)(arg2); \
3772 _argvec[3] = (unsigned long)(arg3); \
3773 _argvec[4] = (unsigned long)(arg4); \
3774 _argvec[5] = (unsigned long)(arg5); \
3775 _argvec[6] = (unsigned long)(arg6); \
3776 _argvec[7] = (unsigned long)(arg7); \
3778 VALGRIND_ALIGN_STACK \
3779 "ldr x0, [%1, #8] \n\t" \
3780 "ldr x1, [%1, #16] \n\t" \
3781 "ldr x2, [%1, #24] \n\t" \
3782 "ldr x3, [%1, #32] \n\t" \
3783 "ldr x4, [%1, #40] \n\t" \
3784 "ldr x5, [%1, #48] \n\t" \
3785 "ldr x6, [%1, #56] \n\t" \
3786 "ldr x8, [%1] \n\t" /* target->x8 */ \
3787 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3788 VALGRIND_RESTORE_STACK \
3790 : /*out*/ "=r" (_res) \
3791 : /*in*/ "0" (&_argvec[0]) \
3792 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3794 lval = (__typeof__(lval)) _res; \
3797 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3800 volatile OrigFn _orig = (orig); \
3801 volatile unsigned long _argvec[9]; \
3802 volatile unsigned long _res; \
3803 _argvec[0] = (unsigned long)_orig.nraddr; \
3804 _argvec[1] = (unsigned long)(arg1); \
3805 _argvec[2] = (unsigned long)(arg2); \
3806 _argvec[3] = (unsigned long)(arg3); \
3807 _argvec[4] = (unsigned long)(arg4); \
3808 _argvec[5] = (unsigned long)(arg5); \
3809 _argvec[6] = (unsigned long)(arg6); \
3810 _argvec[7] = (unsigned long)(arg7); \
3811 _argvec[8] = (unsigned long)(arg8); \
3813 VALGRIND_ALIGN_STACK \
3814 "ldr x0, [%1, #8] \n\t" \
3815 "ldr x1, [%1, #16] \n\t" \
3816 "ldr x2, [%1, #24] \n\t" \
3817 "ldr x3, [%1, #32] \n\t" \
3818 "ldr x4, [%1, #40] \n\t" \
3819 "ldr x5, [%1, #48] \n\t" \
3820 "ldr x6, [%1, #56] \n\t" \
3821 "ldr x7, [%1, #64] \n\t" \
3822 "ldr x8, [%1] \n\t" /* target->x8 */ \
3823 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3824 VALGRIND_RESTORE_STACK \
3826 : /*out*/ "=r" (_res) \
3827 : /*in*/ "0" (&_argvec[0]) \
3828 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3830 lval = (__typeof__(lval)) _res; \
3833 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3836 volatile OrigFn _orig = (orig); \
3837 volatile unsigned long _argvec[10]; \
3838 volatile unsigned long _res; \
3839 _argvec[0] = (unsigned long)_orig.nraddr; \
3840 _argvec[1] = (unsigned long)(arg1); \
3841 _argvec[2] = (unsigned long)(arg2); \
3842 _argvec[3] = (unsigned long)(arg3); \
3843 _argvec[4] = (unsigned long)(arg4); \
3844 _argvec[5] = (unsigned long)(arg5); \
3845 _argvec[6] = (unsigned long)(arg6); \
3846 _argvec[7] = (unsigned long)(arg7); \
3847 _argvec[8] = (unsigned long)(arg8); \
3848 _argvec[9] = (unsigned long)(arg9); \
3850 VALGRIND_ALIGN_STACK \
3851 "sub sp, sp, #0x20 \n\t" \
3852 "ldr x0, [%1, #8] \n\t" \
3853 "ldr x1, [%1, #16] \n\t" \
3854 "ldr x2, [%1, #24] \n\t" \
3855 "ldr x3, [%1, #32] \n\t" \
3856 "ldr x4, [%1, #40] \n\t" \
3857 "ldr x5, [%1, #48] \n\t" \
3858 "ldr x6, [%1, #56] \n\t" \
3859 "ldr x7, [%1, #64] \n\t" \
3860 "ldr x8, [%1, #72] \n\t" \
3861 "str x8, [sp, #0] \n\t" \
3862 "ldr x8, [%1] \n\t" /* target->x8 */ \
3863 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3864 VALGRIND_RESTORE_STACK \
3866 : /*out*/ "=r" (_res) \
3867 : /*in*/ "0" (&_argvec[0]) \
3868 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3870 lval = (__typeof__(lval)) _res; \
3873 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3874 arg7,arg8,arg9,arg10) \
3876 volatile OrigFn _orig = (orig); \
3877 volatile unsigned long _argvec[11]; \
3878 volatile unsigned long _res; \
3879 _argvec[0] = (unsigned long)_orig.nraddr; \
3880 _argvec[1] = (unsigned long)(arg1); \
3881 _argvec[2] = (unsigned long)(arg2); \
3882 _argvec[3] = (unsigned long)(arg3); \
3883 _argvec[4] = (unsigned long)(arg4); \
3884 _argvec[5] = (unsigned long)(arg5); \
3885 _argvec[6] = (unsigned long)(arg6); \
3886 _argvec[7] = (unsigned long)(arg7); \
3887 _argvec[8] = (unsigned long)(arg8); \
3888 _argvec[9] = (unsigned long)(arg9); \
3889 _argvec[10] = (unsigned long)(arg10); \
3891 VALGRIND_ALIGN_STACK \
3892 "sub sp, sp, #0x20 \n\t" \
3893 "ldr x0, [%1, #8] \n\t" \
3894 "ldr x1, [%1, #16] \n\t" \
3895 "ldr x2, [%1, #24] \n\t" \
3896 "ldr x3, [%1, #32] \n\t" \
3897 "ldr x4, [%1, #40] \n\t" \
3898 "ldr x5, [%1, #48] \n\t" \
3899 "ldr x6, [%1, #56] \n\t" \
3900 "ldr x7, [%1, #64] \n\t" \
3901 "ldr x8, [%1, #72] \n\t" \
3902 "str x8, [sp, #0] \n\t" \
3903 "ldr x8, [%1, #80] \n\t" \
3904 "str x8, [sp, #8] \n\t" \
3905 "ldr x8, [%1] \n\t" /* target->x8 */ \
3906 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3907 VALGRIND_RESTORE_STACK \
3909 : /*out*/ "=r" (_res) \
3910 : /*in*/ "0" (&_argvec[0]) \
3911 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3913 lval = (__typeof__(lval)) _res; \
3916 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3917 arg7,arg8,arg9,arg10,arg11) \
3919 volatile OrigFn _orig = (orig); \
3920 volatile unsigned long _argvec[12]; \
3921 volatile unsigned long _res; \
3922 _argvec[0] = (unsigned long)_orig.nraddr; \
3923 _argvec[1] = (unsigned long)(arg1); \
3924 _argvec[2] = (unsigned long)(arg2); \
3925 _argvec[3] = (unsigned long)(arg3); \
3926 _argvec[4] = (unsigned long)(arg4); \
3927 _argvec[5] = (unsigned long)(arg5); \
3928 _argvec[6] = (unsigned long)(arg6); \
3929 _argvec[7] = (unsigned long)(arg7); \
3930 _argvec[8] = (unsigned long)(arg8); \
3931 _argvec[9] = (unsigned long)(arg9); \
3932 _argvec[10] = (unsigned long)(arg10); \
3933 _argvec[11] = (unsigned long)(arg11); \
3935 VALGRIND_ALIGN_STACK \
3936 "sub sp, sp, #0x30 \n\t" \
3937 "ldr x0, [%1, #8] \n\t" \
3938 "ldr x1, [%1, #16] \n\t" \
3939 "ldr x2, [%1, #24] \n\t" \
3940 "ldr x3, [%1, #32] \n\t" \
3941 "ldr x4, [%1, #40] \n\t" \
3942 "ldr x5, [%1, #48] \n\t" \
3943 "ldr x6, [%1, #56] \n\t" \
3944 "ldr x7, [%1, #64] \n\t" \
3945 "ldr x8, [%1, #72] \n\t" \
3946 "str x8, [sp, #0] \n\t" \
3947 "ldr x8, [%1, #80] \n\t" \
3948 "str x8, [sp, #8] \n\t" \
3949 "ldr x8, [%1, #88] \n\t" \
3950 "str x8, [sp, #16] \n\t" \
3951 "ldr x8, [%1] \n\t" /* target->x8 */ \
3952 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3953 VALGRIND_RESTORE_STACK \
3955 : /*out*/ "=r" (_res) \
3956 : /*in*/ "0" (&_argvec[0]) \
3957 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3959 lval = (__typeof__(lval)) _res; \
3962 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3963 arg7,arg8,arg9,arg10,arg11, \
3966 volatile OrigFn _orig = (orig); \
3967 volatile unsigned long _argvec[13]; \
3968 volatile unsigned long _res; \
3969 _argvec[0] = (unsigned long)_orig.nraddr; \
3970 _argvec[1] = (unsigned long)(arg1); \
3971 _argvec[2] = (unsigned long)(arg2); \
3972 _argvec[3] = (unsigned long)(arg3); \
3973 _argvec[4] = (unsigned long)(arg4); \
3974 _argvec[5] = (unsigned long)(arg5); \
3975 _argvec[6] = (unsigned long)(arg6); \
3976 _argvec[7] = (unsigned long)(arg7); \
3977 _argvec[8] = (unsigned long)(arg8); \
3978 _argvec[9] = (unsigned long)(arg9); \
3979 _argvec[10] = (unsigned long)(arg10); \
3980 _argvec[11] = (unsigned long)(arg11); \
3981 _argvec[12] = (unsigned long)(arg12); \
3983 VALGRIND_ALIGN_STACK \
3984 "sub sp, sp, #0x30 \n\t" \
3985 "ldr x0, [%1, #8] \n\t" \
3986 "ldr x1, [%1, #16] \n\t" \
3987 "ldr x2, [%1, #24] \n\t" \
3988 "ldr x3, [%1, #32] \n\t" \
3989 "ldr x4, [%1, #40] \n\t" \
3990 "ldr x5, [%1, #48] \n\t" \
3991 "ldr x6, [%1, #56] \n\t" \
3992 "ldr x7, [%1, #64] \n\t" \
3993 "ldr x8, [%1, #72] \n\t" \
3994 "str x8, [sp, #0] \n\t" \
3995 "ldr x8, [%1, #80] \n\t" \
3996 "str x8, [sp, #8] \n\t" \
3997 "ldr x8, [%1, #88] \n\t" \
3998 "str x8, [sp, #16] \n\t" \
3999 "ldr x8, [%1, #96] \n\t" \
4000 "str x8, [sp, #24] \n\t" \
4001 "ldr x8, [%1] \n\t" /* target->x8 */ \
4002 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4003 VALGRIND_RESTORE_STACK \
4005 : /*out*/ "=r" (_res) \
4006 : /*in*/ "0" (&_argvec[0]) \
4007 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4009 lval = (__typeof__(lval)) _res; \
4012 #endif /* PLAT_arm64_linux */
4014 /* ------------------------- s390x-linux ------------------------- */
4016 #if defined(PLAT_s390x_linux)
4018 /* Similar workaround as amd64 (see above), but we use r11 as frame
4019 pointer and save the old r11 in r7. r11 might be used for
4020 argvec, therefore we copy argvec in r1 since r1 is clobbered
4021 after the call anyway. */
4022 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
4023 # define __FRAME_POINTER \
4024 ,"d"(__builtin_dwarf_cfa())
4025 # define VALGRIND_CFI_PROLOGUE \
4026 ".cfi_remember_state\n\t" \
4027 "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
4030 ".cfi_def_cfa r11, 0\n\t"
4031 # define VALGRIND_CFI_EPILOGUE \
4033 ".cfi_restore_state\n\t"
4035 # define __FRAME_POINTER
4036 # define VALGRIND_CFI_PROLOGUE \
4038 # define VALGRIND_CFI_EPILOGUE
4041 /* Nb: On s390 the stack pointer is properly aligned *at all times*
4042 according to the s390 GCC maintainer. (The ABI specification is not
4043 precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
4044 VALGRIND_RESTORE_STACK are not defined here. */
4046 /* These regs are trashed by the hidden call. Note that we overwrite
4047 r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
4048 function a proper return address. All others are ABI defined call
4050 #define __CALLER_SAVED_REGS "0","1","2","3","4","5","14", \
4051 "f0","f1","f2","f3","f4","f5","f6","f7"
4053 /* Nb: Although r11 is modified in the asm snippets below (inside
4054 VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
4056 (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
4058 (2) GCC will complain that r11 cannot appear inside a clobber section,
4059 when compiled with -O -fno-omit-frame-pointer
4062 #define CALL_FN_W_v(lval, orig) \
4064 volatile OrigFn _orig = (orig); \
4065 volatile unsigned long _argvec[1]; \
4066 volatile unsigned long _res; \
4067 _argvec[0] = (unsigned long)_orig.nraddr; \
4069 VALGRIND_CFI_PROLOGUE \
4070 "aghi 15,-160\n\t" \
4071 "lg 1, 0(1)\n\t" /* target->r1 */ \
4072 VALGRIND_CALL_NOREDIR_R1 \
4075 VALGRIND_CFI_EPILOGUE \
4076 : /*out*/ "=d" (_res) \
4077 : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
4078 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4080 lval = (__typeof__(lval)) _res; \
4083 /* The call abi has the arguments in r2-r6 and stack */
4084 #define CALL_FN_W_W(lval, orig, arg1) \
4086 volatile OrigFn _orig = (orig); \
4087 volatile unsigned long _argvec[2]; \
4088 volatile unsigned long _res; \
4089 _argvec[0] = (unsigned long)_orig.nraddr; \
4090 _argvec[1] = (unsigned long)arg1; \
4092 VALGRIND_CFI_PROLOGUE \
4093 "aghi 15,-160\n\t" \
4096 VALGRIND_CALL_NOREDIR_R1 \
4099 VALGRIND_CFI_EPILOGUE \
4100 : /*out*/ "=d" (_res) \
4101 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4102 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4104 lval = (__typeof__(lval)) _res; \
4107 #define CALL_FN_W_WW(lval, orig, arg1, arg2) \
4109 volatile OrigFn _orig = (orig); \
4110 volatile unsigned long _argvec[3]; \
4111 volatile unsigned long _res; \
4112 _argvec[0] = (unsigned long)_orig.nraddr; \
4113 _argvec[1] = (unsigned long)arg1; \
4114 _argvec[2] = (unsigned long)arg2; \
4116 VALGRIND_CFI_PROLOGUE \
4117 "aghi 15,-160\n\t" \
4121 VALGRIND_CALL_NOREDIR_R1 \
4124 VALGRIND_CFI_EPILOGUE \
4125 : /*out*/ "=d" (_res) \
4126 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4127 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4129 lval = (__typeof__(lval)) _res; \
4132 #define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
4134 volatile OrigFn _orig = (orig); \
4135 volatile unsigned long _argvec[4]; \
4136 volatile unsigned long _res; \
4137 _argvec[0] = (unsigned long)_orig.nraddr; \
4138 _argvec[1] = (unsigned long)arg1; \
4139 _argvec[2] = (unsigned long)arg2; \
4140 _argvec[3] = (unsigned long)arg3; \
4142 VALGRIND_CFI_PROLOGUE \
4143 "aghi 15,-160\n\t" \
4148 VALGRIND_CALL_NOREDIR_R1 \
4151 VALGRIND_CFI_EPILOGUE \
4152 : /*out*/ "=d" (_res) \
4153 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4154 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4156 lval = (__typeof__(lval)) _res; \
4159 #define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
4161 volatile OrigFn _orig = (orig); \
4162 volatile unsigned long _argvec[5]; \
4163 volatile unsigned long _res; \
4164 _argvec[0] = (unsigned long)_orig.nraddr; \
4165 _argvec[1] = (unsigned long)arg1; \
4166 _argvec[2] = (unsigned long)arg2; \
4167 _argvec[3] = (unsigned long)arg3; \
4168 _argvec[4] = (unsigned long)arg4; \
4170 VALGRIND_CFI_PROLOGUE \
4171 "aghi 15,-160\n\t" \
4177 VALGRIND_CALL_NOREDIR_R1 \
4180 VALGRIND_CFI_EPILOGUE \
4181 : /*out*/ "=d" (_res) \
4182 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4183 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4185 lval = (__typeof__(lval)) _res; \
4188 #define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
4190 volatile OrigFn _orig = (orig); \
4191 volatile unsigned long _argvec[6]; \
4192 volatile unsigned long _res; \
4193 _argvec[0] = (unsigned long)_orig.nraddr; \
4194 _argvec[1] = (unsigned long)arg1; \
4195 _argvec[2] = (unsigned long)arg2; \
4196 _argvec[3] = (unsigned long)arg3; \
4197 _argvec[4] = (unsigned long)arg4; \
4198 _argvec[5] = (unsigned long)arg5; \
4200 VALGRIND_CFI_PROLOGUE \
4201 "aghi 15,-160\n\t" \
4208 VALGRIND_CALL_NOREDIR_R1 \
4211 VALGRIND_CFI_EPILOGUE \
4212 : /*out*/ "=d" (_res) \
4213 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4214 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4216 lval = (__typeof__(lval)) _res; \
4219 #define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4222 volatile OrigFn _orig = (orig); \
4223 volatile unsigned long _argvec[7]; \
4224 volatile unsigned long _res; \
4225 _argvec[0] = (unsigned long)_orig.nraddr; \
4226 _argvec[1] = (unsigned long)arg1; \
4227 _argvec[2] = (unsigned long)arg2; \
4228 _argvec[3] = (unsigned long)arg3; \
4229 _argvec[4] = (unsigned long)arg4; \
4230 _argvec[5] = (unsigned long)arg5; \
4231 _argvec[6] = (unsigned long)arg6; \
4233 VALGRIND_CFI_PROLOGUE \
4234 "aghi 15,-168\n\t" \
4240 "mvc 160(8,15), 48(1)\n\t" \
4242 VALGRIND_CALL_NOREDIR_R1 \
4245 VALGRIND_CFI_EPILOGUE \
4246 : /*out*/ "=d" (_res) \
4247 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4248 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4250 lval = (__typeof__(lval)) _res; \
4253 #define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4256 volatile OrigFn _orig = (orig); \
4257 volatile unsigned long _argvec[8]; \
4258 volatile unsigned long _res; \
4259 _argvec[0] = (unsigned long)_orig.nraddr; \
4260 _argvec[1] = (unsigned long)arg1; \
4261 _argvec[2] = (unsigned long)arg2; \
4262 _argvec[3] = (unsigned long)arg3; \
4263 _argvec[4] = (unsigned long)arg4; \
4264 _argvec[5] = (unsigned long)arg5; \
4265 _argvec[6] = (unsigned long)arg6; \
4266 _argvec[7] = (unsigned long)arg7; \
4268 VALGRIND_CFI_PROLOGUE \
4269 "aghi 15,-176\n\t" \
4275 "mvc 160(8,15), 48(1)\n\t" \
4276 "mvc 168(8,15), 56(1)\n\t" \
4278 VALGRIND_CALL_NOREDIR_R1 \
4281 VALGRIND_CFI_EPILOGUE \
4282 : /*out*/ "=d" (_res) \
4283 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4284 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4286 lval = (__typeof__(lval)) _res; \
4289 #define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4292 volatile OrigFn _orig = (orig); \
4293 volatile unsigned long _argvec[9]; \
4294 volatile unsigned long _res; \
4295 _argvec[0] = (unsigned long)_orig.nraddr; \
4296 _argvec[1] = (unsigned long)arg1; \
4297 _argvec[2] = (unsigned long)arg2; \
4298 _argvec[3] = (unsigned long)arg3; \
4299 _argvec[4] = (unsigned long)arg4; \
4300 _argvec[5] = (unsigned long)arg5; \
4301 _argvec[6] = (unsigned long)arg6; \
4302 _argvec[7] = (unsigned long)arg7; \
4303 _argvec[8] = (unsigned long)arg8; \
4305 VALGRIND_CFI_PROLOGUE \
4306 "aghi 15,-184\n\t" \
4312 "mvc 160(8,15), 48(1)\n\t" \
4313 "mvc 168(8,15), 56(1)\n\t" \
4314 "mvc 176(8,15), 64(1)\n\t" \
4316 VALGRIND_CALL_NOREDIR_R1 \
4319 VALGRIND_CFI_EPILOGUE \
4320 : /*out*/ "=d" (_res) \
4321 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4322 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4324 lval = (__typeof__(lval)) _res; \
4327 #define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4328 arg6, arg7 ,arg8, arg9) \
4330 volatile OrigFn _orig = (orig); \
4331 volatile unsigned long _argvec[10]; \
4332 volatile unsigned long _res; \
4333 _argvec[0] = (unsigned long)_orig.nraddr; \
4334 _argvec[1] = (unsigned long)arg1; \
4335 _argvec[2] = (unsigned long)arg2; \
4336 _argvec[3] = (unsigned long)arg3; \
4337 _argvec[4] = (unsigned long)arg4; \
4338 _argvec[5] = (unsigned long)arg5; \
4339 _argvec[6] = (unsigned long)arg6; \
4340 _argvec[7] = (unsigned long)arg7; \
4341 _argvec[8] = (unsigned long)arg8; \
4342 _argvec[9] = (unsigned long)arg9; \
4344 VALGRIND_CFI_PROLOGUE \
4345 "aghi 15,-192\n\t" \
4351 "mvc 160(8,15), 48(1)\n\t" \
4352 "mvc 168(8,15), 56(1)\n\t" \
4353 "mvc 176(8,15), 64(1)\n\t" \
4354 "mvc 184(8,15), 72(1)\n\t" \
4356 VALGRIND_CALL_NOREDIR_R1 \
4359 VALGRIND_CFI_EPILOGUE \
4360 : /*out*/ "=d" (_res) \
4361 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4362 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4364 lval = (__typeof__(lval)) _res; \
4367 #define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4368 arg6, arg7 ,arg8, arg9, arg10) \
4370 volatile OrigFn _orig = (orig); \
4371 volatile unsigned long _argvec[11]; \
4372 volatile unsigned long _res; \
4373 _argvec[0] = (unsigned long)_orig.nraddr; \
4374 _argvec[1] = (unsigned long)arg1; \
4375 _argvec[2] = (unsigned long)arg2; \
4376 _argvec[3] = (unsigned long)arg3; \
4377 _argvec[4] = (unsigned long)arg4; \
4378 _argvec[5] = (unsigned long)arg5; \
4379 _argvec[6] = (unsigned long)arg6; \
4380 _argvec[7] = (unsigned long)arg7; \
4381 _argvec[8] = (unsigned long)arg8; \
4382 _argvec[9] = (unsigned long)arg9; \
4383 _argvec[10] = (unsigned long)arg10; \
4385 VALGRIND_CFI_PROLOGUE \
4386 "aghi 15,-200\n\t" \
4392 "mvc 160(8,15), 48(1)\n\t" \
4393 "mvc 168(8,15), 56(1)\n\t" \
4394 "mvc 176(8,15), 64(1)\n\t" \
4395 "mvc 184(8,15), 72(1)\n\t" \
4396 "mvc 192(8,15), 80(1)\n\t" \
4398 VALGRIND_CALL_NOREDIR_R1 \
4401 VALGRIND_CFI_EPILOGUE \
4402 : /*out*/ "=d" (_res) \
4403 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4404 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4406 lval = (__typeof__(lval)) _res; \
4409 #define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4410 arg6, arg7 ,arg8, arg9, arg10, arg11) \
4412 volatile OrigFn _orig = (orig); \
4413 volatile unsigned long _argvec[12]; \
4414 volatile unsigned long _res; \
4415 _argvec[0] = (unsigned long)_orig.nraddr; \
4416 _argvec[1] = (unsigned long)arg1; \
4417 _argvec[2] = (unsigned long)arg2; \
4418 _argvec[3] = (unsigned long)arg3; \
4419 _argvec[4] = (unsigned long)arg4; \
4420 _argvec[5] = (unsigned long)arg5; \
4421 _argvec[6] = (unsigned long)arg6; \
4422 _argvec[7] = (unsigned long)arg7; \
4423 _argvec[8] = (unsigned long)arg8; \
4424 _argvec[9] = (unsigned long)arg9; \
4425 _argvec[10] = (unsigned long)arg10; \
4426 _argvec[11] = (unsigned long)arg11; \
4428 VALGRIND_CFI_PROLOGUE \
4429 "aghi 15,-208\n\t" \
4435 "mvc 160(8,15), 48(1)\n\t" \
4436 "mvc 168(8,15), 56(1)\n\t" \
4437 "mvc 176(8,15), 64(1)\n\t" \
4438 "mvc 184(8,15), 72(1)\n\t" \
4439 "mvc 192(8,15), 80(1)\n\t" \
4440 "mvc 200(8,15), 88(1)\n\t" \
4442 VALGRIND_CALL_NOREDIR_R1 \
4445 VALGRIND_CFI_EPILOGUE \
4446 : /*out*/ "=d" (_res) \
4447 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4448 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4450 lval = (__typeof__(lval)) _res; \
4453 #define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4454 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
4456 volatile OrigFn _orig = (orig); \
4457 volatile unsigned long _argvec[13]; \
4458 volatile unsigned long _res; \
4459 _argvec[0] = (unsigned long)_orig.nraddr; \
4460 _argvec[1] = (unsigned long)arg1; \
4461 _argvec[2] = (unsigned long)arg2; \
4462 _argvec[3] = (unsigned long)arg3; \
4463 _argvec[4] = (unsigned long)arg4; \
4464 _argvec[5] = (unsigned long)arg5; \
4465 _argvec[6] = (unsigned long)arg6; \
4466 _argvec[7] = (unsigned long)arg7; \
4467 _argvec[8] = (unsigned long)arg8; \
4468 _argvec[9] = (unsigned long)arg9; \
4469 _argvec[10] = (unsigned long)arg10; \
4470 _argvec[11] = (unsigned long)arg11; \
4471 _argvec[12] = (unsigned long)arg12; \
4473 VALGRIND_CFI_PROLOGUE \
4474 "aghi 15,-216\n\t" \
4480 "mvc 160(8,15), 48(1)\n\t" \
4481 "mvc 168(8,15), 56(1)\n\t" \
4482 "mvc 176(8,15), 64(1)\n\t" \
4483 "mvc 184(8,15), 72(1)\n\t" \
4484 "mvc 192(8,15), 80(1)\n\t" \
4485 "mvc 200(8,15), 88(1)\n\t" \
4486 "mvc 208(8,15), 96(1)\n\t" \
4488 VALGRIND_CALL_NOREDIR_R1 \
4491 VALGRIND_CFI_EPILOGUE \
4492 : /*out*/ "=d" (_res) \
4493 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4494 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4496 lval = (__typeof__(lval)) _res; \
4500 #endif /* PLAT_s390x_linux */
4502 /* ------------------------- mips32-linux ----------------------- */
4504 #if defined(PLAT_mips32_linux)
4506 /* These regs are trashed by the hidden call. */
4507 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
4508 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
4511 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
4514 #define CALL_FN_W_v(lval, orig) \
4516 volatile OrigFn _orig = (orig); \
4517 volatile unsigned long _argvec[1]; \
4518 volatile unsigned long _res; \
4519 _argvec[0] = (unsigned long)_orig.nraddr; \
4521 "subu $29, $29, 8 \n\t" \
4522 "sw $28, 0($29) \n\t" \
4523 "sw $31, 4($29) \n\t" \
4524 "subu $29, $29, 16 \n\t" \
4525 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4526 VALGRIND_CALL_NOREDIR_T9 \
4527 "addu $29, $29, 16\n\t" \
4528 "lw $28, 0($29) \n\t" \
4529 "lw $31, 4($29) \n\t" \
4530 "addu $29, $29, 8 \n\t" \
4532 : /*out*/ "=r" (_res) \
4533 : /*in*/ "0" (&_argvec[0]) \
4534 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4536 lval = (__typeof__(lval)) _res; \
4539 #define CALL_FN_W_W(lval, orig, arg1) \
4541 volatile OrigFn _orig = (orig); \
4542 volatile unsigned long _argvec[2]; \
4543 volatile unsigned long _res; \
4544 _argvec[0] = (unsigned long)_orig.nraddr; \
4545 _argvec[1] = (unsigned long)(arg1); \
4547 "subu $29, $29, 8 \n\t" \
4548 "sw $28, 0($29) \n\t" \
4549 "sw $31, 4($29) \n\t" \
4550 "subu $29, $29, 16 \n\t" \
4551 "lw $4, 4(%1) \n\t" /* arg1*/ \
4552 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4553 VALGRIND_CALL_NOREDIR_T9 \
4554 "addu $29, $29, 16 \n\t" \
4555 "lw $28, 0($29) \n\t" \
4556 "lw $31, 4($29) \n\t" \
4557 "addu $29, $29, 8 \n\t" \
4559 : /*out*/ "=r" (_res) \
4560 : /*in*/ "0" (&_argvec[0]) \
4561 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4563 lval = (__typeof__(lval)) _res; \
4566 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4568 volatile OrigFn _orig = (orig); \
4569 volatile unsigned long _argvec[3]; \
4570 volatile unsigned long _res; \
4571 _argvec[0] = (unsigned long)_orig.nraddr; \
4572 _argvec[1] = (unsigned long)(arg1); \
4573 _argvec[2] = (unsigned long)(arg2); \
4575 "subu $29, $29, 8 \n\t" \
4576 "sw $28, 0($29) \n\t" \
4577 "sw $31, 4($29) \n\t" \
4578 "subu $29, $29, 16 \n\t" \
4579 "lw $4, 4(%1) \n\t" \
4580 "lw $5, 8(%1) \n\t" \
4581 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4582 VALGRIND_CALL_NOREDIR_T9 \
4583 "addu $29, $29, 16 \n\t" \
4584 "lw $28, 0($29) \n\t" \
4585 "lw $31, 4($29) \n\t" \
4586 "addu $29, $29, 8 \n\t" \
4588 : /*out*/ "=r" (_res) \
4589 : /*in*/ "0" (&_argvec[0]) \
4590 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4592 lval = (__typeof__(lval)) _res; \
4595 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4597 volatile OrigFn _orig = (orig); \
4598 volatile unsigned long _argvec[4]; \
4599 volatile unsigned long _res; \
4600 _argvec[0] = (unsigned long)_orig.nraddr; \
4601 _argvec[1] = (unsigned long)(arg1); \
4602 _argvec[2] = (unsigned long)(arg2); \
4603 _argvec[3] = (unsigned long)(arg3); \
4605 "subu $29, $29, 8 \n\t" \
4606 "sw $28, 0($29) \n\t" \
4607 "sw $31, 4($29) \n\t" \
4608 "subu $29, $29, 16 \n\t" \
4609 "lw $4, 4(%1) \n\t" \
4610 "lw $5, 8(%1) \n\t" \
4611 "lw $6, 12(%1) \n\t" \
4612 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4613 VALGRIND_CALL_NOREDIR_T9 \
4614 "addu $29, $29, 16 \n\t" \
4615 "lw $28, 0($29) \n\t" \
4616 "lw $31, 4($29) \n\t" \
4617 "addu $29, $29, 8 \n\t" \
4619 : /*out*/ "=r" (_res) \
4620 : /*in*/ "0" (&_argvec[0]) \
4621 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4623 lval = (__typeof__(lval)) _res; \
4626 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4628 volatile OrigFn _orig = (orig); \
4629 volatile unsigned long _argvec[5]; \
4630 volatile unsigned long _res; \
4631 _argvec[0] = (unsigned long)_orig.nraddr; \
4632 _argvec[1] = (unsigned long)(arg1); \
4633 _argvec[2] = (unsigned long)(arg2); \
4634 _argvec[3] = (unsigned long)(arg3); \
4635 _argvec[4] = (unsigned long)(arg4); \
4637 "subu $29, $29, 8 \n\t" \
4638 "sw $28, 0($29) \n\t" \
4639 "sw $31, 4($29) \n\t" \
4640 "subu $29, $29, 16 \n\t" \
4641 "lw $4, 4(%1) \n\t" \
4642 "lw $5, 8(%1) \n\t" \
4643 "lw $6, 12(%1) \n\t" \
4644 "lw $7, 16(%1) \n\t" \
4645 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4646 VALGRIND_CALL_NOREDIR_T9 \
4647 "addu $29, $29, 16 \n\t" \
4648 "lw $28, 0($29) \n\t" \
4649 "lw $31, 4($29) \n\t" \
4650 "addu $29, $29, 8 \n\t" \
4652 : /*out*/ "=r" (_res) \
4653 : /*in*/ "0" (&_argvec[0]) \
4654 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4656 lval = (__typeof__(lval)) _res; \
4659 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4661 volatile OrigFn _orig = (orig); \
4662 volatile unsigned long _argvec[6]; \
4663 volatile unsigned long _res; \
4664 _argvec[0] = (unsigned long)_orig.nraddr; \
4665 _argvec[1] = (unsigned long)(arg1); \
4666 _argvec[2] = (unsigned long)(arg2); \
4667 _argvec[3] = (unsigned long)(arg3); \
4668 _argvec[4] = (unsigned long)(arg4); \
4669 _argvec[5] = (unsigned long)(arg5); \
4671 "subu $29, $29, 8 \n\t" \
4672 "sw $28, 0($29) \n\t" \
4673 "sw $31, 4($29) \n\t" \
4674 "lw $4, 20(%1) \n\t" \
4675 "subu $29, $29, 24\n\t" \
4676 "sw $4, 16($29) \n\t" \
4677 "lw $4, 4(%1) \n\t" \
4678 "lw $5, 8(%1) \n\t" \
4679 "lw $6, 12(%1) \n\t" \
4680 "lw $7, 16(%1) \n\t" \
4681 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4682 VALGRIND_CALL_NOREDIR_T9 \
4683 "addu $29, $29, 24 \n\t" \
4684 "lw $28, 0($29) \n\t" \
4685 "lw $31, 4($29) \n\t" \
4686 "addu $29, $29, 8 \n\t" \
4688 : /*out*/ "=r" (_res) \
4689 : /*in*/ "0" (&_argvec[0]) \
4690 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4692 lval = (__typeof__(lval)) _res; \
4694 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4696 volatile OrigFn _orig = (orig); \
4697 volatile unsigned long _argvec[7]; \
4698 volatile unsigned long _res; \
4699 _argvec[0] = (unsigned long)_orig.nraddr; \
4700 _argvec[1] = (unsigned long)(arg1); \
4701 _argvec[2] = (unsigned long)(arg2); \
4702 _argvec[3] = (unsigned long)(arg3); \
4703 _argvec[4] = (unsigned long)(arg4); \
4704 _argvec[5] = (unsigned long)(arg5); \
4705 _argvec[6] = (unsigned long)(arg6); \
4707 "subu $29, $29, 8 \n\t" \
4708 "sw $28, 0($29) \n\t" \
4709 "sw $31, 4($29) \n\t" \
4710 "lw $4, 20(%1) \n\t" \
4711 "subu $29, $29, 32\n\t" \
4712 "sw $4, 16($29) \n\t" \
4713 "lw $4, 24(%1) \n\t" \
4715 "sw $4, 20($29) \n\t" \
4716 "lw $4, 4(%1) \n\t" \
4717 "lw $5, 8(%1) \n\t" \
4718 "lw $6, 12(%1) \n\t" \
4719 "lw $7, 16(%1) \n\t" \
4720 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4721 VALGRIND_CALL_NOREDIR_T9 \
4722 "addu $29, $29, 32 \n\t" \
4723 "lw $28, 0($29) \n\t" \
4724 "lw $31, 4($29) \n\t" \
4725 "addu $29, $29, 8 \n\t" \
4727 : /*out*/ "=r" (_res) \
4728 : /*in*/ "0" (&_argvec[0]) \
4729 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4731 lval = (__typeof__(lval)) _res; \
4734 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4737 volatile OrigFn _orig = (orig); \
4738 volatile unsigned long _argvec[8]; \
4739 volatile unsigned long _res; \
4740 _argvec[0] = (unsigned long)_orig.nraddr; \
4741 _argvec[1] = (unsigned long)(arg1); \
4742 _argvec[2] = (unsigned long)(arg2); \
4743 _argvec[3] = (unsigned long)(arg3); \
4744 _argvec[4] = (unsigned long)(arg4); \
4745 _argvec[5] = (unsigned long)(arg5); \
4746 _argvec[6] = (unsigned long)(arg6); \
4747 _argvec[7] = (unsigned long)(arg7); \
4749 "subu $29, $29, 8 \n\t" \
4750 "sw $28, 0($29) \n\t" \
4751 "sw $31, 4($29) \n\t" \
4752 "lw $4, 20(%1) \n\t" \
4753 "subu $29, $29, 32\n\t" \
4754 "sw $4, 16($29) \n\t" \
4755 "lw $4, 24(%1) \n\t" \
4756 "sw $4, 20($29) \n\t" \
4757 "lw $4, 28(%1) \n\t" \
4758 "sw $4, 24($29) \n\t" \
4759 "lw $4, 4(%1) \n\t" \
4760 "lw $5, 8(%1) \n\t" \
4761 "lw $6, 12(%1) \n\t" \
4762 "lw $7, 16(%1) \n\t" \
4763 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4764 VALGRIND_CALL_NOREDIR_T9 \
4765 "addu $29, $29, 32 \n\t" \
4766 "lw $28, 0($29) \n\t" \
4767 "lw $31, 4($29) \n\t" \
4768 "addu $29, $29, 8 \n\t" \
4770 : /*out*/ "=r" (_res) \
4771 : /*in*/ "0" (&_argvec[0]) \
4772 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4774 lval = (__typeof__(lval)) _res; \
4777 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4780 volatile OrigFn _orig = (orig); \
4781 volatile unsigned long _argvec[9]; \
4782 volatile unsigned long _res; \
4783 _argvec[0] = (unsigned long)_orig.nraddr; \
4784 _argvec[1] = (unsigned long)(arg1); \
4785 _argvec[2] = (unsigned long)(arg2); \
4786 _argvec[3] = (unsigned long)(arg3); \
4787 _argvec[4] = (unsigned long)(arg4); \
4788 _argvec[5] = (unsigned long)(arg5); \
4789 _argvec[6] = (unsigned long)(arg6); \
4790 _argvec[7] = (unsigned long)(arg7); \
4791 _argvec[8] = (unsigned long)(arg8); \
4793 "subu $29, $29, 8 \n\t" \
4794 "sw $28, 0($29) \n\t" \
4795 "sw $31, 4($29) \n\t" \
4796 "lw $4, 20(%1) \n\t" \
4797 "subu $29, $29, 40\n\t" \
4798 "sw $4, 16($29) \n\t" \
4799 "lw $4, 24(%1) \n\t" \
4800 "sw $4, 20($29) \n\t" \
4801 "lw $4, 28(%1) \n\t" \
4802 "sw $4, 24($29) \n\t" \
4803 "lw $4, 32(%1) \n\t" \
4804 "sw $4, 28($29) \n\t" \
4805 "lw $4, 4(%1) \n\t" \
4806 "lw $5, 8(%1) \n\t" \
4807 "lw $6, 12(%1) \n\t" \
4808 "lw $7, 16(%1) \n\t" \
4809 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4810 VALGRIND_CALL_NOREDIR_T9 \
4811 "addu $29, $29, 40 \n\t" \
4812 "lw $28, 0($29) \n\t" \
4813 "lw $31, 4($29) \n\t" \
4814 "addu $29, $29, 8 \n\t" \
4816 : /*out*/ "=r" (_res) \
4817 : /*in*/ "0" (&_argvec[0]) \
4818 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4820 lval = (__typeof__(lval)) _res; \
4823 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4826 volatile OrigFn _orig = (orig); \
4827 volatile unsigned long _argvec[10]; \
4828 volatile unsigned long _res; \
4829 _argvec[0] = (unsigned long)_orig.nraddr; \
4830 _argvec[1] = (unsigned long)(arg1); \
4831 _argvec[2] = (unsigned long)(arg2); \
4832 _argvec[3] = (unsigned long)(arg3); \
4833 _argvec[4] = (unsigned long)(arg4); \
4834 _argvec[5] = (unsigned long)(arg5); \
4835 _argvec[6] = (unsigned long)(arg6); \
4836 _argvec[7] = (unsigned long)(arg7); \
4837 _argvec[8] = (unsigned long)(arg8); \
4838 _argvec[9] = (unsigned long)(arg9); \
4840 "subu $29, $29, 8 \n\t" \
4841 "sw $28, 0($29) \n\t" \
4842 "sw $31, 4($29) \n\t" \
4843 "lw $4, 20(%1) \n\t" \
4844 "subu $29, $29, 40\n\t" \
4845 "sw $4, 16($29) \n\t" \
4846 "lw $4, 24(%1) \n\t" \
4847 "sw $4, 20($29) \n\t" \
4848 "lw $4, 28(%1) \n\t" \
4849 "sw $4, 24($29) \n\t" \
4850 "lw $4, 32(%1) \n\t" \
4851 "sw $4, 28($29) \n\t" \
4852 "lw $4, 36(%1) \n\t" \
4853 "sw $4, 32($29) \n\t" \
4854 "lw $4, 4(%1) \n\t" \
4855 "lw $5, 8(%1) \n\t" \
4856 "lw $6, 12(%1) \n\t" \
4857 "lw $7, 16(%1) \n\t" \
4858 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4859 VALGRIND_CALL_NOREDIR_T9 \
4860 "addu $29, $29, 40 \n\t" \
4861 "lw $28, 0($29) \n\t" \
4862 "lw $31, 4($29) \n\t" \
4863 "addu $29, $29, 8 \n\t" \
4865 : /*out*/ "=r" (_res) \
4866 : /*in*/ "0" (&_argvec[0]) \
4867 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4869 lval = (__typeof__(lval)) _res; \
4872 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4873 arg7,arg8,arg9,arg10) \
4875 volatile OrigFn _orig = (orig); \
4876 volatile unsigned long _argvec[11]; \
4877 volatile unsigned long _res; \
4878 _argvec[0] = (unsigned long)_orig.nraddr; \
4879 _argvec[1] = (unsigned long)(arg1); \
4880 _argvec[2] = (unsigned long)(arg2); \
4881 _argvec[3] = (unsigned long)(arg3); \
4882 _argvec[4] = (unsigned long)(arg4); \
4883 _argvec[5] = (unsigned long)(arg5); \
4884 _argvec[6] = (unsigned long)(arg6); \
4885 _argvec[7] = (unsigned long)(arg7); \
4886 _argvec[8] = (unsigned long)(arg8); \
4887 _argvec[9] = (unsigned long)(arg9); \
4888 _argvec[10] = (unsigned long)(arg10); \
4890 "subu $29, $29, 8 \n\t" \
4891 "sw $28, 0($29) \n\t" \
4892 "sw $31, 4($29) \n\t" \
4893 "lw $4, 20(%1) \n\t" \
4894 "subu $29, $29, 48\n\t" \
4895 "sw $4, 16($29) \n\t" \
4896 "lw $4, 24(%1) \n\t" \
4897 "sw $4, 20($29) \n\t" \
4898 "lw $4, 28(%1) \n\t" \
4899 "sw $4, 24($29) \n\t" \
4900 "lw $4, 32(%1) \n\t" \
4901 "sw $4, 28($29) \n\t" \
4902 "lw $4, 36(%1) \n\t" \
4903 "sw $4, 32($29) \n\t" \
4904 "lw $4, 40(%1) \n\t" \
4905 "sw $4, 36($29) \n\t" \
4906 "lw $4, 4(%1) \n\t" \
4907 "lw $5, 8(%1) \n\t" \
4908 "lw $6, 12(%1) \n\t" \
4909 "lw $7, 16(%1) \n\t" \
4910 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4911 VALGRIND_CALL_NOREDIR_T9 \
4912 "addu $29, $29, 48 \n\t" \
4913 "lw $28, 0($29) \n\t" \
4914 "lw $31, 4($29) \n\t" \
4915 "addu $29, $29, 8 \n\t" \
4917 : /*out*/ "=r" (_res) \
4918 : /*in*/ "0" (&_argvec[0]) \
4919 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4921 lval = (__typeof__(lval)) _res; \
4924 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4925 arg6,arg7,arg8,arg9,arg10, \
4928 volatile OrigFn _orig = (orig); \
4929 volatile unsigned long _argvec[12]; \
4930 volatile unsigned long _res; \
4931 _argvec[0] = (unsigned long)_orig.nraddr; \
4932 _argvec[1] = (unsigned long)(arg1); \
4933 _argvec[2] = (unsigned long)(arg2); \
4934 _argvec[3] = (unsigned long)(arg3); \
4935 _argvec[4] = (unsigned long)(arg4); \
4936 _argvec[5] = (unsigned long)(arg5); \
4937 _argvec[6] = (unsigned long)(arg6); \
4938 _argvec[7] = (unsigned long)(arg7); \
4939 _argvec[8] = (unsigned long)(arg8); \
4940 _argvec[9] = (unsigned long)(arg9); \
4941 _argvec[10] = (unsigned long)(arg10); \
4942 _argvec[11] = (unsigned long)(arg11); \
4944 "subu $29, $29, 8 \n\t" \
4945 "sw $28, 0($29) \n\t" \
4946 "sw $31, 4($29) \n\t" \
4947 "lw $4, 20(%1) \n\t" \
4948 "subu $29, $29, 48\n\t" \
4949 "sw $4, 16($29) \n\t" \
4950 "lw $4, 24(%1) \n\t" \
4951 "sw $4, 20($29) \n\t" \
4952 "lw $4, 28(%1) \n\t" \
4953 "sw $4, 24($29) \n\t" \
4954 "lw $4, 32(%1) \n\t" \
4955 "sw $4, 28($29) \n\t" \
4956 "lw $4, 36(%1) \n\t" \
4957 "sw $4, 32($29) \n\t" \
4958 "lw $4, 40(%1) \n\t" \
4959 "sw $4, 36($29) \n\t" \
4960 "lw $4, 44(%1) \n\t" \
4961 "sw $4, 40($29) \n\t" \
4962 "lw $4, 4(%1) \n\t" \
4963 "lw $5, 8(%1) \n\t" \
4964 "lw $6, 12(%1) \n\t" \
4965 "lw $7, 16(%1) \n\t" \
4966 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4967 VALGRIND_CALL_NOREDIR_T9 \
4968 "addu $29, $29, 48 \n\t" \
4969 "lw $28, 0($29) \n\t" \
4970 "lw $31, 4($29) \n\t" \
4971 "addu $29, $29, 8 \n\t" \
4973 : /*out*/ "=r" (_res) \
4974 : /*in*/ "0" (&_argvec[0]) \
4975 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4977 lval = (__typeof__(lval)) _res; \
4980 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4981 arg6,arg7,arg8,arg9,arg10, \
4984 volatile OrigFn _orig = (orig); \
4985 volatile unsigned long _argvec[13]; \
4986 volatile unsigned long _res; \
4987 _argvec[0] = (unsigned long)_orig.nraddr; \
4988 _argvec[1] = (unsigned long)(arg1); \
4989 _argvec[2] = (unsigned long)(arg2); \
4990 _argvec[3] = (unsigned long)(arg3); \
4991 _argvec[4] = (unsigned long)(arg4); \
4992 _argvec[5] = (unsigned long)(arg5); \
4993 _argvec[6] = (unsigned long)(arg6); \
4994 _argvec[7] = (unsigned long)(arg7); \
4995 _argvec[8] = (unsigned long)(arg8); \
4996 _argvec[9] = (unsigned long)(arg9); \
4997 _argvec[10] = (unsigned long)(arg10); \
4998 _argvec[11] = (unsigned long)(arg11); \
4999 _argvec[12] = (unsigned long)(arg12); \
5001 "subu $29, $29, 8 \n\t" \
5002 "sw $28, 0($29) \n\t" \
5003 "sw $31, 4($29) \n\t" \
5004 "lw $4, 20(%1) \n\t" \
5005 "subu $29, $29, 56\n\t" \
5006 "sw $4, 16($29) \n\t" \
5007 "lw $4, 24(%1) \n\t" \
5008 "sw $4, 20($29) \n\t" \
5009 "lw $4, 28(%1) \n\t" \
5010 "sw $4, 24($29) \n\t" \
5011 "lw $4, 32(%1) \n\t" \
5012 "sw $4, 28($29) \n\t" \
5013 "lw $4, 36(%1) \n\t" \
5014 "sw $4, 32($29) \n\t" \
5015 "lw $4, 40(%1) \n\t" \
5016 "sw $4, 36($29) \n\t" \
5017 "lw $4, 44(%1) \n\t" \
5018 "sw $4, 40($29) \n\t" \
5019 "lw $4, 48(%1) \n\t" \
5020 "sw $4, 44($29) \n\t" \
5021 "lw $4, 4(%1) \n\t" \
5022 "lw $5, 8(%1) \n\t" \
5023 "lw $6, 12(%1) \n\t" \
5024 "lw $7, 16(%1) \n\t" \
5025 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5026 VALGRIND_CALL_NOREDIR_T9 \
5027 "addu $29, $29, 56 \n\t" \
5028 "lw $28, 0($29) \n\t" \
5029 "lw $31, 4($29) \n\t" \
5030 "addu $29, $29, 8 \n\t" \
5032 : /*out*/ "=r" (_res) \
5033 : /*in*/ "r" (&_argvec[0]) \
5034 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5036 lval = (__typeof__(lval)) _res; \
5039 #endif /* PLAT_mips32_linux */
5041 /* ------------------------- mips64-linux ------------------------- */
5043 #if defined(PLAT_mips64_linux)
5045 /* These regs are trashed by the hidden call. */
5046 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5047 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5050 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5053 #define CALL_FN_W_v(lval, orig) \
5055 volatile OrigFn _orig = (orig); \
5056 volatile unsigned long _argvec[1]; \
5057 volatile unsigned long _res; \
5058 _argvec[0] = (unsigned long)_orig.nraddr; \
5060 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5061 VALGRIND_CALL_NOREDIR_T9 \
5063 : /*out*/ "=r" (_res) \
5064 : /*in*/ "0" (&_argvec[0]) \
5065 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5067 lval = (__typeof__(lval)) _res; \
5070 #define CALL_FN_W_W(lval, orig, arg1) \
5072 volatile OrigFn _orig = (orig); \
5073 volatile unsigned long _argvec[2]; \
5074 volatile unsigned long _res; \
5075 _argvec[0] = (unsigned long)_orig.nraddr; \
5076 _argvec[1] = (unsigned long)(arg1); \
5078 "ld $4, 8(%1)\n\t" /* arg1*/ \
5079 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5080 VALGRIND_CALL_NOREDIR_T9 \
5082 : /*out*/ "=r" (_res) \
5083 : /*in*/ "r" (&_argvec[0]) \
5084 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5086 lval = (__typeof__(lval)) _res; \
5089 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5091 volatile OrigFn _orig = (orig); \
5092 volatile unsigned long _argvec[3]; \
5093 volatile unsigned long _res; \
5094 _argvec[0] = (unsigned long)_orig.nraddr; \
5095 _argvec[1] = (unsigned long)(arg1); \
5096 _argvec[2] = (unsigned long)(arg2); \
5098 "ld $4, 8(%1)\n\t" \
5099 "ld $5, 16(%1)\n\t" \
5100 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5101 VALGRIND_CALL_NOREDIR_T9 \
5103 : /*out*/ "=r" (_res) \
5104 : /*in*/ "r" (&_argvec[0]) \
5105 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5107 lval = (__typeof__(lval)) _res; \
5110 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5112 volatile OrigFn _orig = (orig); \
5113 volatile unsigned long _argvec[4]; \
5114 volatile unsigned long _res; \
5115 _argvec[0] = (unsigned long)_orig.nraddr; \
5116 _argvec[1] = (unsigned long)(arg1); \
5117 _argvec[2] = (unsigned long)(arg2); \
5118 _argvec[3] = (unsigned long)(arg3); \
5120 "ld $4, 8(%1)\n\t" \
5121 "ld $5, 16(%1)\n\t" \
5122 "ld $6, 24(%1)\n\t" \
5123 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5124 VALGRIND_CALL_NOREDIR_T9 \
5126 : /*out*/ "=r" (_res) \
5127 : /*in*/ "r" (&_argvec[0]) \
5128 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5130 lval = (__typeof__(lval)) _res; \
5133 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5135 volatile OrigFn _orig = (orig); \
5136 volatile unsigned long _argvec[5]; \
5137 volatile unsigned long _res; \
5138 _argvec[0] = (unsigned long)_orig.nraddr; \
5139 _argvec[1] = (unsigned long)(arg1); \
5140 _argvec[2] = (unsigned long)(arg2); \
5141 _argvec[3] = (unsigned long)(arg3); \
5142 _argvec[4] = (unsigned long)(arg4); \
5144 "ld $4, 8(%1)\n\t" \
5145 "ld $5, 16(%1)\n\t" \
5146 "ld $6, 24(%1)\n\t" \
5147 "ld $7, 32(%1)\n\t" \
5148 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5149 VALGRIND_CALL_NOREDIR_T9 \
5151 : /*out*/ "=r" (_res) \
5152 : /*in*/ "r" (&_argvec[0]) \
5153 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5155 lval = (__typeof__(lval)) _res; \
5158 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5160 volatile OrigFn _orig = (orig); \
5161 volatile unsigned long _argvec[6]; \
5162 volatile unsigned long _res; \
5163 _argvec[0] = (unsigned long)_orig.nraddr; \
5164 _argvec[1] = (unsigned long)(arg1); \
5165 _argvec[2] = (unsigned long)(arg2); \
5166 _argvec[3] = (unsigned long)(arg3); \
5167 _argvec[4] = (unsigned long)(arg4); \
5168 _argvec[5] = (unsigned long)(arg5); \
5170 "ld $4, 8(%1)\n\t" \
5171 "ld $5, 16(%1)\n\t" \
5172 "ld $6, 24(%1)\n\t" \
5173 "ld $7, 32(%1)\n\t" \
5174 "ld $8, 40(%1)\n\t" \
5175 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5176 VALGRIND_CALL_NOREDIR_T9 \
5178 : /*out*/ "=r" (_res) \
5179 : /*in*/ "r" (&_argvec[0]) \
5180 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5182 lval = (__typeof__(lval)) _res; \
5185 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5187 volatile OrigFn _orig = (orig); \
5188 volatile unsigned long _argvec[7]; \
5189 volatile unsigned long _res; \
5190 _argvec[0] = (unsigned long)_orig.nraddr; \
5191 _argvec[1] = (unsigned long)(arg1); \
5192 _argvec[2] = (unsigned long)(arg2); \
5193 _argvec[3] = (unsigned long)(arg3); \
5194 _argvec[4] = (unsigned long)(arg4); \
5195 _argvec[5] = (unsigned long)(arg5); \
5196 _argvec[6] = (unsigned long)(arg6); \
5198 "ld $4, 8(%1)\n\t" \
5199 "ld $5, 16(%1)\n\t" \
5200 "ld $6, 24(%1)\n\t" \
5201 "ld $7, 32(%1)\n\t" \
5202 "ld $8, 40(%1)\n\t" \
5203 "ld $9, 48(%1)\n\t" \
5204 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5205 VALGRIND_CALL_NOREDIR_T9 \
5207 : /*out*/ "=r" (_res) \
5208 : /*in*/ "r" (&_argvec[0]) \
5209 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5211 lval = (__typeof__(lval)) _res; \
5214 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5217 volatile OrigFn _orig = (orig); \
5218 volatile unsigned long _argvec[8]; \
5219 volatile unsigned long _res; \
5220 _argvec[0] = (unsigned long)_orig.nraddr; \
5221 _argvec[1] = (unsigned long)(arg1); \
5222 _argvec[2] = (unsigned long)(arg2); \
5223 _argvec[3] = (unsigned long)(arg3); \
5224 _argvec[4] = (unsigned long)(arg4); \
5225 _argvec[5] = (unsigned long)(arg5); \
5226 _argvec[6] = (unsigned long)(arg6); \
5227 _argvec[7] = (unsigned long)(arg7); \
5229 "ld $4, 8(%1)\n\t" \
5230 "ld $5, 16(%1)\n\t" \
5231 "ld $6, 24(%1)\n\t" \
5232 "ld $7, 32(%1)\n\t" \
5233 "ld $8, 40(%1)\n\t" \
5234 "ld $9, 48(%1)\n\t" \
5235 "ld $10, 56(%1)\n\t" \
5236 "ld $25, 0(%1) \n\t" /* target->t9 */ \
5237 VALGRIND_CALL_NOREDIR_T9 \
5239 : /*out*/ "=r" (_res) \
5240 : /*in*/ "r" (&_argvec[0]) \
5241 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5243 lval = (__typeof__(lval)) _res; \
5246 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5249 volatile OrigFn _orig = (orig); \
5250 volatile unsigned long _argvec[9]; \
5251 volatile unsigned long _res; \
5252 _argvec[0] = (unsigned long)_orig.nraddr; \
5253 _argvec[1] = (unsigned long)(arg1); \
5254 _argvec[2] = (unsigned long)(arg2); \
5255 _argvec[3] = (unsigned long)(arg3); \
5256 _argvec[4] = (unsigned long)(arg4); \
5257 _argvec[5] = (unsigned long)(arg5); \
5258 _argvec[6] = (unsigned long)(arg6); \
5259 _argvec[7] = (unsigned long)(arg7); \
5260 _argvec[8] = (unsigned long)(arg8); \
5262 "ld $4, 8(%1)\n\t" \
5263 "ld $5, 16(%1)\n\t" \
5264 "ld $6, 24(%1)\n\t" \
5265 "ld $7, 32(%1)\n\t" \
5266 "ld $8, 40(%1)\n\t" \
5267 "ld $9, 48(%1)\n\t" \
5268 "ld $10, 56(%1)\n\t" \
5269 "ld $11, 64(%1)\n\t" \
5270 "ld $25, 0(%1) \n\t" /* target->t9 */ \
5271 VALGRIND_CALL_NOREDIR_T9 \
5273 : /*out*/ "=r" (_res) \
5274 : /*in*/ "r" (&_argvec[0]) \
5275 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5277 lval = (__typeof__(lval)) _res; \
5280 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5283 volatile OrigFn _orig = (orig); \
5284 volatile unsigned long _argvec[10]; \
5285 volatile unsigned long _res; \
5286 _argvec[0] = (unsigned long)_orig.nraddr; \
5287 _argvec[1] = (unsigned long)(arg1); \
5288 _argvec[2] = (unsigned long)(arg2); \
5289 _argvec[3] = (unsigned long)(arg3); \
5290 _argvec[4] = (unsigned long)(arg4); \
5291 _argvec[5] = (unsigned long)(arg5); \
5292 _argvec[6] = (unsigned long)(arg6); \
5293 _argvec[7] = (unsigned long)(arg7); \
5294 _argvec[8] = (unsigned long)(arg8); \
5295 _argvec[9] = (unsigned long)(arg9); \
5297 "dsubu $29, $29, 8\n\t" \
5298 "ld $4, 72(%1)\n\t" \
5299 "sd $4, 0($29)\n\t" \
5300 "ld $4, 8(%1)\n\t" \
5301 "ld $5, 16(%1)\n\t" \
5302 "ld $6, 24(%1)\n\t" \
5303 "ld $7, 32(%1)\n\t" \
5304 "ld $8, 40(%1)\n\t" \
5305 "ld $9, 48(%1)\n\t" \
5306 "ld $10, 56(%1)\n\t" \
5307 "ld $11, 64(%1)\n\t" \
5308 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5309 VALGRIND_CALL_NOREDIR_T9 \
5310 "daddu $29, $29, 8\n\t" \
5312 : /*out*/ "=r" (_res) \
5313 : /*in*/ "r" (&_argvec[0]) \
5314 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5316 lval = (__typeof__(lval)) _res; \
5319 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5320 arg7,arg8,arg9,arg10) \
5322 volatile OrigFn _orig = (orig); \
5323 volatile unsigned long _argvec[11]; \
5324 volatile unsigned long _res; \
5325 _argvec[0] = (unsigned long)_orig.nraddr; \
5326 _argvec[1] = (unsigned long)(arg1); \
5327 _argvec[2] = (unsigned long)(arg2); \
5328 _argvec[3] = (unsigned long)(arg3); \
5329 _argvec[4] = (unsigned long)(arg4); \
5330 _argvec[5] = (unsigned long)(arg5); \
5331 _argvec[6] = (unsigned long)(arg6); \
5332 _argvec[7] = (unsigned long)(arg7); \
5333 _argvec[8] = (unsigned long)(arg8); \
5334 _argvec[9] = (unsigned long)(arg9); \
5335 _argvec[10] = (unsigned long)(arg10); \
5337 "dsubu $29, $29, 16\n\t" \
5338 "ld $4, 72(%1)\n\t" \
5339 "sd $4, 0($29)\n\t" \
5340 "ld $4, 80(%1)\n\t" \
5341 "sd $4, 8($29)\n\t" \
5342 "ld $4, 8(%1)\n\t" \
5343 "ld $5, 16(%1)\n\t" \
5344 "ld $6, 24(%1)\n\t" \
5345 "ld $7, 32(%1)\n\t" \
5346 "ld $8, 40(%1)\n\t" \
5347 "ld $9, 48(%1)\n\t" \
5348 "ld $10, 56(%1)\n\t" \
5349 "ld $11, 64(%1)\n\t" \
5350 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5351 VALGRIND_CALL_NOREDIR_T9 \
5352 "daddu $29, $29, 16\n\t" \
5354 : /*out*/ "=r" (_res) \
5355 : /*in*/ "r" (&_argvec[0]) \
5356 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5358 lval = (__typeof__(lval)) _res; \
5361 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5362 arg6,arg7,arg8,arg9,arg10, \
5365 volatile OrigFn _orig = (orig); \
5366 volatile unsigned long _argvec[12]; \
5367 volatile unsigned long _res; \
5368 _argvec[0] = (unsigned long)_orig.nraddr; \
5369 _argvec[1] = (unsigned long)(arg1); \
5370 _argvec[2] = (unsigned long)(arg2); \
5371 _argvec[3] = (unsigned long)(arg3); \
5372 _argvec[4] = (unsigned long)(arg4); \
5373 _argvec[5] = (unsigned long)(arg5); \
5374 _argvec[6] = (unsigned long)(arg6); \
5375 _argvec[7] = (unsigned long)(arg7); \
5376 _argvec[8] = (unsigned long)(arg8); \
5377 _argvec[9] = (unsigned long)(arg9); \
5378 _argvec[10] = (unsigned long)(arg10); \
5379 _argvec[11] = (unsigned long)(arg11); \
5381 "dsubu $29, $29, 24\n\t" \
5382 "ld $4, 72(%1)\n\t" \
5383 "sd $4, 0($29)\n\t" \
5384 "ld $4, 80(%1)\n\t" \
5385 "sd $4, 8($29)\n\t" \
5386 "ld $4, 88(%1)\n\t" \
5387 "sd $4, 16($29)\n\t" \
5388 "ld $4, 8(%1)\n\t" \
5389 "ld $5, 16(%1)\n\t" \
5390 "ld $6, 24(%1)\n\t" \
5391 "ld $7, 32(%1)\n\t" \
5392 "ld $8, 40(%1)\n\t" \
5393 "ld $9, 48(%1)\n\t" \
5394 "ld $10, 56(%1)\n\t" \
5395 "ld $11, 64(%1)\n\t" \
5396 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5397 VALGRIND_CALL_NOREDIR_T9 \
5398 "daddu $29, $29, 24\n\t" \
5400 : /*out*/ "=r" (_res) \
5401 : /*in*/ "r" (&_argvec[0]) \
5402 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5404 lval = (__typeof__(lval)) _res; \
5407 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5408 arg6,arg7,arg8,arg9,arg10, \
5411 volatile OrigFn _orig = (orig); \
5412 volatile unsigned long _argvec[13]; \
5413 volatile unsigned long _res; \
5414 _argvec[0] = (unsigned long)_orig.nraddr; \
5415 _argvec[1] = (unsigned long)(arg1); \
5416 _argvec[2] = (unsigned long)(arg2); \
5417 _argvec[3] = (unsigned long)(arg3); \
5418 _argvec[4] = (unsigned long)(arg4); \
5419 _argvec[5] = (unsigned long)(arg5); \
5420 _argvec[6] = (unsigned long)(arg6); \
5421 _argvec[7] = (unsigned long)(arg7); \
5422 _argvec[8] = (unsigned long)(arg8); \
5423 _argvec[9] = (unsigned long)(arg9); \
5424 _argvec[10] = (unsigned long)(arg10); \
5425 _argvec[11] = (unsigned long)(arg11); \
5426 _argvec[12] = (unsigned long)(arg12); \
5428 "dsubu $29, $29, 32\n\t" \
5429 "ld $4, 72(%1)\n\t" \
5430 "sd $4, 0($29)\n\t" \
5431 "ld $4, 80(%1)\n\t" \
5432 "sd $4, 8($29)\n\t" \
5433 "ld $4, 88(%1)\n\t" \
5434 "sd $4, 16($29)\n\t" \
5435 "ld $4, 96(%1)\n\t" \
5436 "sd $4, 24($29)\n\t" \
5437 "ld $4, 8(%1)\n\t" \
5438 "ld $5, 16(%1)\n\t" \
5439 "ld $6, 24(%1)\n\t" \
5440 "ld $7, 32(%1)\n\t" \
5441 "ld $8, 40(%1)\n\t" \
5442 "ld $9, 48(%1)\n\t" \
5443 "ld $10, 56(%1)\n\t" \
5444 "ld $11, 64(%1)\n\t" \
5445 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5446 VALGRIND_CALL_NOREDIR_T9 \
5447 "daddu $29, $29, 32\n\t" \
5449 : /*out*/ "=r" (_res) \
5450 : /*in*/ "r" (&_argvec[0]) \
5451 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5453 lval = (__typeof__(lval)) _res; \
5456 #endif /* PLAT_mips64_linux */
5459 /* ------------------------------------------------------------------ */
5460 /* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
5462 /* ------------------------------------------------------------------ */
5464 /* Some request codes. There are many more of these, but most are not
5465 exposed to end-user view. These are the public ones, all of the
5466 form 0x1000 + small_number.
5468 Core ones are in the range 0x00000000--0x0000ffff. The non-public
5469 ones start at 0x2000.
5472 /* These macros are used by tools -- they must be public, but don't
5473 embed them into other programs. */
5474 #define VG_USERREQ_TOOL_BASE(a,b) \
5475 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
5476 #define VG_IS_TOOL_USERREQ(a, b, v) \
5477 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
5479 /* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
5480 This enum comprises an ABI exported by Valgrind to programs
5481 which use client requests. DO NOT CHANGE THE ORDER OF THESE
5482 ENTRIES, NOR DELETE ANY -- add new ones at the end. */
5484 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
5485 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
5487 /* These allow any function to be called from the simulated
5488 CPU but run on the real CPU. Nb: the first arg passed to
5489 the function is always the ThreadId of the running
5490 thread! So CLIENT_CALL0 actually requires a 1 arg
5492 VG_USERREQ__CLIENT_CALL0 = 0x1101,
5493 VG_USERREQ__CLIENT_CALL1 = 0x1102,
5494 VG_USERREQ__CLIENT_CALL2 = 0x1103,
5495 VG_USERREQ__CLIENT_CALL3 = 0x1104,
5497 /* Can be useful in regression testing suites -- eg. can
5498 send Valgrind's output to /dev/null and still count
5500 VG_USERREQ__COUNT_ERRORS = 0x1201,
5502 /* Allows the client program and/or gdbserver to execute a monitor
5504 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
5506 /* These are useful and can be interpreted by any tool that
5507 tracks malloc() et al, by using vg_replace_malloc.c. */
5508 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
5509 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
5510 VG_USERREQ__FREELIKE_BLOCK = 0x1302,
5511 /* Memory pool support. */
5512 VG_USERREQ__CREATE_MEMPOOL = 0x1303,
5513 VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
5514 VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
5515 VG_USERREQ__MEMPOOL_FREE = 0x1306,
5516 VG_USERREQ__MEMPOOL_TRIM = 0x1307,
5517 VG_USERREQ__MOVE_MEMPOOL = 0x1308,
5518 VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
5519 VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
5521 /* Allow printfs to valgrind log. */
5522 /* The first two pass the va_list argument by value, which
5523 assumes it is the same size as or smaller than a UWord,
5524 which generally isn't the case. Hence are deprecated.
5525 The second two pass the vargs by reference and so are
5526 immune to this problem. */
5527 /* both :: char* fmt, va_list vargs (DEPRECATED) */
5528 VG_USERREQ__PRINTF = 0x1401,
5529 VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
5530 /* both :: char* fmt, va_list* vargs */
5531 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
5532 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
5534 /* Stack support. */
5535 VG_USERREQ__STACK_REGISTER = 0x1501,
5536 VG_USERREQ__STACK_DEREGISTER = 0x1502,
5537 VG_USERREQ__STACK_CHANGE = 0x1503,
5540 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
5542 /* Querying of debug info. */
5543 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701,
5545 /* Disable/enable error reporting level. Takes a single
5546 Word arg which is the delta to this thread's error
5547 disablement indicator. Hence 1 disables or further
5548 disables errors, and -1 moves back towards enablement.
5549 Other values are not allowed. */
5550 VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801,
5552 /* Initialise IR injection */
5553 VG_USERREQ__VEX_INIT_FOR_IRI = 0x1901
5556 #if !defined(__GNUC__)
5557 # define __extension__ /* */
5561 /* Returns the number of Valgrinds this code is running under. That
5562 is, 0 if running natively, 1 if running under Valgrind, 2 if
5563 running under Valgrind which is running under another Valgrind,
5565 #define RUNNING_ON_VALGRIND \
5566 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
5567 VG_USERREQ__RUNNING_ON_VALGRIND, \
5571 /* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
5572 _qzz_len - 1]. Useful if you are debugging a JITter or some such,
5573 since it provides a way to make sure valgrind will retranslate the
5574 invalidated area. Returns no value. */
5575 #define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
5576 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
5577 _qzz_addr, _qzz_len, 0, 0, 0)
5580 /* These requests are for getting Valgrind itself to print something.
5581 Possibly with a backtrace. This is a really ugly hack. The return value
5582 is the number of characters printed, excluding the "**<pid>** " part at the
5583 start and the backtrace (if present). */
5585 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
5586 /* Modern GCC will optimize the static routine out if unused,
5587 and unused attribute will shut down warnings about it. */
5588 static int VALGRIND_PRINTF(const char *format, ...)
5589 __attribute__((format(__printf__, 1, 2), __unused__));
5592 #if defined(_MSC_VER)
5595 VALGRIND_PRINTF(const char *format, ...)
5597 #if defined(NVALGRIND)
5599 #else /* NVALGRIND */
5600 #if defined(_MSC_VER) || defined(__MINGW64__)
5603 unsigned long _qzz_res;
5606 va_start(vargs, format);
5607 #if defined(_MSC_VER) || defined(__MINGW64__)
5608 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5609 VG_USERREQ__PRINTF_VALIST_BY_REF,
5614 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5615 VG_USERREQ__PRINTF_VALIST_BY_REF,
5616 (unsigned long)format,
5617 (unsigned long)&vargs,
5621 return (int)_qzz_res;
5622 #endif /* NVALGRIND */
5625 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
5626 static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
5627 __attribute__((format(__printf__, 1, 2), __unused__));
5630 #if defined(_MSC_VER)
5633 VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
5635 #if defined(NVALGRIND)
5637 #else /* NVALGRIND */
5638 #if defined(_MSC_VER) || defined(__MINGW64__)
5641 unsigned long _qzz_res;
5644 va_start(vargs, format);
5645 #if defined(_MSC_VER) || defined(__MINGW64__)
5646 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5647 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
5652 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5653 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
5654 (unsigned long)format,
5655 (unsigned long)&vargs,
5659 return (int)_qzz_res;
5660 #endif /* NVALGRIND */
5664 /* These requests allow control to move from the simulated CPU to the
5665 real CPU, calling an arbitary function.
5667 Note that the current ThreadId is inserted as the first argument.
5670 VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
5672 requires f to have this signature:
5674 Word f(Word tid, Word arg1, Word arg2)
5676 where "Word" is a word-sized type.
5678 Note that these client requests are not entirely reliable. For example,
5679 if you call a function with them that subsequently calls printf(),
5680 there's a high chance Valgrind will crash. Generally, your prospects of
5681 these working are made higher if the called function does not refer to
5682 any global variables, and does not refer to any libc or other functions
5683 (printf et al). Any kind of entanglement with libc or dynamic linking is
5684 likely to have a bad outcome, for tricky reasons which we've grappled
5685 with a lot in the past.
5687 #define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
5688 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5689 VG_USERREQ__CLIENT_CALL0, \
5693 #define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
5694 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5695 VG_USERREQ__CLIENT_CALL1, \
5699 #define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
5700 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5701 VG_USERREQ__CLIENT_CALL2, \
5703 _qyy_arg1, _qyy_arg2, 0, 0)
5705 #define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
5706 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5707 VG_USERREQ__CLIENT_CALL3, \
5709 _qyy_arg1, _qyy_arg2, \
5713 /* Counts the number of errors that have been recorded by a tool. Nb:
5714 the tool must record the errors with VG_(maybe_record_error)() or
5715 VG_(unique_error)() for them to be counted. */
5716 #define VALGRIND_COUNT_ERRORS \
5717 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
5718 0 /* default return */, \
5719 VG_USERREQ__COUNT_ERRORS, \
5722 /* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
5723 when heap blocks are allocated in order to give accurate results. This
5724 happens automatically for the standard allocator functions such as
5725 malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
5728 But if your program uses a custom allocator, this doesn't automatically
5729 happen, and Valgrind will not do as well. For example, if you allocate
5730 superblocks with mmap() and then allocates chunks of the superblocks, all
5731 Valgrind's observations will be at the mmap() level and it won't know that
5732 the chunks should be considered separate entities. In Memcheck's case,
5733 that means you probably won't get heap block overrun detection (because
5734 there won't be redzones marked as unaddressable) and you definitely won't
5735 get any leak detection.
5737 The following client requests allow a custom allocator to be annotated so
5738 that it can be handled accurately by Valgrind.
5740 VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
5741 by a malloc()-like function. For Memcheck (an illustrative case), this
5744 - It records that the block has been allocated. This means any addresses
5745 within the block mentioned in error messages will be
5746 identified as belonging to the block. It also means that if the block
5747 isn't freed it will be detected by the leak checker.
5749 - It marks the block as being addressable and undefined (if 'is_zeroed' is
5750 not set), or addressable and defined (if 'is_zeroed' is set). This
5751 controls how accesses to the block by the program are handled.
5753 'addr' is the start of the usable block (ie. after any
5754 redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
5755 can apply redzones -- these are blocks of padding at the start and end of
5756 each block. Adding redzones is recommended as it makes it much more likely
5757 Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
5758 zeroed (or filled with another predictable value), as is the case for
5761 VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
5762 heap block -- that will be used by the client program -- is allocated.
5763 It's best to put it at the outermost level of the allocator if possible;
5764 for example, if you have a function my_alloc() which calls
5765 internal_alloc(), and the client request is put inside internal_alloc(),
5766 stack traces relating to the heap block will contain entries for both
5767 my_alloc() and internal_alloc(), which is probably not what you want.
5769 For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
5770 custom blocks from within a heap block, B, that has been allocated with
5771 malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
5772 -- the custom blocks will take precedence.
5774 VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
5775 Memcheck, it does two things:
5777 - It records that the block has been deallocated. This assumes that the
5778 block was annotated as having been allocated via
5779 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
5781 - It marks the block as being unaddressable.
5783 VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
5784 heap block is deallocated.
5786 VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
5787 Memcheck, it does four things:
5789 - It records that the size of a block has been changed. This assumes that
5790 the block was annotated as having been allocated via
5791 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
5793 - If the block shrunk, it marks the freed memory as being unaddressable.
5795 - If the block grew, it marks the new area as undefined and defines a red
5796 zone past the end of the new block.
5798 - The V-bits of the overlap between the old and the new block are preserved.
5800 VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
5801 and before deallocation of the old block.
5803 In many cases, these three client requests will not be enough to get your
5804 allocator working well with Memcheck. More specifically, if your allocator
5805 writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
5806 will be necessary to mark the memory as addressable just before the zeroing
5807 occurs, otherwise you'll get a lot of invalid write errors. For example,
5808 you'll need to do this if your allocator recycles freed blocks, but it
5809 zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
5810 Alternatively, if your allocator reuses freed blocks for allocator-internal
5811 data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
5813 Really, what's happening is a blurring of the lines between the client
5814 program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
5815 memory should be considered unaddressable to the client program, but the
5816 allocator knows more than the rest of the client program and so may be able
5817 to safely access it. Extra client requests are necessary for Valgrind to
5818 understand the distinction between the allocator and the rest of the
5821 Ignored if addr == 0.
5823 #define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
5824 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
5825 addr, sizeB, rzB, is_zeroed, 0)
5827 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
5828 Ignored if addr == 0.
5830 #define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
5831 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
5832 addr, oldSizeB, newSizeB, rzB, 0)
5834 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
5835 Ignored if addr == 0.
5837 #define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
5838 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
5841 /* Create a memory pool. */
5842 #define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
5843 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
5844 pool, rzB, is_zeroed, 0, 0)
5846 /* Destroy a memory pool. */
5847 #define VALGRIND_DESTROY_MEMPOOL(pool) \
5848 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
5851 /* Associate a piece of memory with a memory pool. */
5852 #define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
5853 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
5854 pool, addr, size, 0, 0)
5856 /* Disassociate a piece of memory from a memory pool. */
5857 #define VALGRIND_MEMPOOL_FREE(pool, addr) \
5858 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
5859 pool, addr, 0, 0, 0)
5861 /* Disassociate any pieces outside a particular range. */
5862 #define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
5863 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
5864 pool, addr, size, 0, 0)
5866 /* Resize and/or move a piece associated with a memory pool. */
5867 #define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
5868 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
5869 poolA, poolB, 0, 0, 0)
5871 /* Resize and/or move a piece associated with a memory pool. */
5872 #define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
5873 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
5874 pool, addrA, addrB, size, 0)
5876 /* Return 1 if a mempool exists, else 0. */
5877 #define VALGRIND_MEMPOOL_EXISTS(pool) \
5878 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
5879 VG_USERREQ__MEMPOOL_EXISTS, \
5882 /* Mark a piece of memory as being a stack. Returns a stack id. */
5883 #define VALGRIND_STACK_REGISTER(start, end) \
5884 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
5885 VG_USERREQ__STACK_REGISTER, \
5886 start, end, 0, 0, 0)
5888 /* Unmark the piece of memory associated with a stack id as being a
5890 #define VALGRIND_STACK_DEREGISTER(id) \
5891 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
5894 /* Change the start and end address of the stack id. */
5895 #define VALGRIND_STACK_CHANGE(id, start, end) \
5896 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
5897 id, start, end, 0, 0)
5899 /* Load PDB debug info for Wine PE image_map. */
5900 #define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
5901 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
5902 fd, ptr, total_size, delta, 0)
5904 /* Map a code address to a source file name and line number. buf64
5905 must point to a 64-byte buffer in the caller's address space. The
5906 result will be dumped in there and is guaranteed to be zero
5907 terminated. If no info is found, the first byte is set to zero. */
5908 #define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
5909 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
5910 VG_USERREQ__MAP_IP_TO_SRCLOC, \
5911 addr, buf64, 0, 0, 0)
5913 /* Disable error reporting for this thread. Behaves in a stack like
5914 way, so you can safely call this multiple times provided that
5915 VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
5916 to re-enable reporting. The first call of this macro disables
5917 reporting. Subsequent calls have no effect except to increase the
5918 number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
5919 reporting. Child threads do not inherit this setting from their
5920 parents -- they are always created with reporting enabled. */
5921 #define VALGRIND_DISABLE_ERROR_REPORTING \
5922 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
5925 /* Re-enable error reporting, as per comments on
5926 VALGRIND_DISABLE_ERROR_REPORTING. */
5927 #define VALGRIND_ENABLE_ERROR_REPORTING \
5928 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
5931 /* Execute a monitor command from the client program.
5932 If a connection is opened with GDB, the output will be sent
5933 according to the output mode set for vgdb.
5934 If no connection is opened, output will go to the log output.
5935 Returns 1 if command not recognised, 0 otherwise. */
5936 #define VALGRIND_MONITOR_COMMAND(command) \
5937 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
5938 command, 0, 0, 0, 0)
5941 #undef PLAT_x86_darwin
5942 #undef PLAT_amd64_darwin
5943 #undef PLAT_x86_win32
5944 #undef PLAT_amd64_win64
5945 #undef PLAT_x86_linux
5946 #undef PLAT_amd64_linux
5947 #undef PLAT_ppc32_linux
5948 #undef PLAT_ppc64_linux
5949 #undef PLAT_arm_linux
5950 #undef PLAT_s390x_linux
5951 #undef PLAT_mips32_linux
5952 #undef PLAT_mips64_linux
5954 #endif /* __VALGRIND_H */