2 ----------------------------------------------------------------
4 Notice that the following BSD-style license applies to this one
5 file (valgrind.h) only. The rest of Valgrind is licensed under the
6 terms of the GNU General Public License, version 2, unless
7 otherwise indicated. See the COPYING file in the source
8 distribution for details.
10 ----------------------------------------------------------------
12 This file is part of Valgrind, a dynamic binary instrumentation
15 Copyright (C) 2000-2012 Julian Seward. All rights reserved.
17 Redistribution and use in source and binary forms, with or without
18 modification, are permitted provided that the following conditions
21 1. Redistributions of source code must retain the above copyright
22 notice, this list of conditions and the following disclaimer.
24 2. The origin of this software must not be misrepresented; you must
25 not claim that you wrote the original software. If you use this
26 software in a product, an acknowledgment in the product
27 documentation would be appreciated but is not required.
29 3. Altered source versions must be plainly marked as such, and must
30 not be misrepresented as being the original software.
32 4. The name of the author may not be used to endorse or promote
33 products derived from this software without specific prior written
36 THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
37 OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
38 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
39 ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
40 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
41 DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
42 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
43 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
44 WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
45 NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
46 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
48 ----------------------------------------------------------------
50 Notice that the above BSD-style license applies to this one file
51 (valgrind.h) only. The entire rest of Valgrind is licensed under
52 the terms of the GNU General Public License, version 2. See the
53 COPYING file in the source distribution for details.
55 ----------------------------------------------------------------
59 /* This file is for inclusion into client (your!) code.
61 You can use these macros to manipulate and query Valgrind's
62 execution inside your own programs.
64 The resulting executables will still run without Valgrind, just a
65 little bit more slowly than they otherwise would, but otherwise
66 unchanged. When not running on valgrind, each client request
67 consumes very few (eg. 7) instructions, so the resulting performance
68 loss is negligible unless you plan to execute client requests
69 millions of times per second. Nevertheless, if that is still a
70 problem, you can compile with the NVALGRIND symbol defined (gcc
71 -DNVALGRIND) so that client requests are not even compiled in. */
77 /* ------------------------------------------------------------------ */
78 /* VERSION NUMBER OF VALGRIND */
79 /* ------------------------------------------------------------------ */
81 /* Specify Valgrind's version number, so that user code can
82 conditionally compile based on our version number. Note that these
83 were introduced at version 3.6 and so do not exist in version 3.5
84 or earlier. The recommended way to use them to check for "version
87 #if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
88 && (__VALGRIND_MAJOR__ > 3 \
89 || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
91 #define __VALGRIND_MAJOR__ 3
92 #define __VALGRIND_MINOR__ 8
97 /* Nb: this file might be included in a file compiled with -ansi. So
98 we can't use C++ style "//" comments nor the "asm" keyword (instead
101 /* Derive some tags indicating what the target platform is. Note
102 that in this file we're using the compiler's CPP symbols for
103 identifying architectures, which are different to the ones we use
104 within the rest of Valgrind. Note, __powerpc__ is active for both
105 32 and 64-bit PPC, whereas __powerpc64__ is only active for the
106 latter (on Linux, that is).
108 Misc note: how to find out what's predefined in gcc by default:
109 gcc -Wp,-dM somefile.c
111 #undef PLAT_x86_darwin
112 #undef PLAT_amd64_darwin
113 #undef PLAT_x86_win32
114 #undef PLAT_x86_linux
115 #undef PLAT_amd64_linux
116 #undef PLAT_ppc32_linux
117 #undef PLAT_ppc64_linux
118 #undef PLAT_arm_linux
119 #undef PLAT_s390x_linux
120 #undef PLAT_mips32_linux
123 #if defined(__APPLE__) && defined(__i386__)
124 # define PLAT_x86_darwin 1
125 #elif defined(__APPLE__) && defined(__x86_64__)
126 # define PLAT_amd64_darwin 1
127 #elif defined(__MINGW32__) || defined(__CYGWIN32__) \
128 || (defined(_WIN32) && defined(_M_IX86))
129 # define PLAT_x86_win32 1
130 #elif defined(__linux__) && defined(__i386__)
131 # define PLAT_x86_linux 1
132 #elif defined(__linux__) && defined(__x86_64__)
133 # define PLAT_amd64_linux 1
134 #elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
135 # define PLAT_ppc32_linux 1
136 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__)
137 # define PLAT_ppc64_linux 1
138 #elif defined(__linux__) && defined(__arm__)
139 # define PLAT_arm_linux 1
140 #elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
141 # define PLAT_s390x_linux 1
142 #elif defined(__linux__) && defined(__mips__)
143 # define PLAT_mips32_linux 1
145 /* If we're not compiling for our target platform, don't generate
147 # if !defined(NVALGRIND)
153 /* ------------------------------------------------------------------ */
154 /* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
155 /* in here of use to end-users -- skip to the next section. */
156 /* ------------------------------------------------------------------ */
159 * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
160 * request. Accepts both pointers and integers as arguments.
162 * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
163 * client request that does not return a value.
165 * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
166 * client request and whose value equals the client request result. Accepts
167 * both pointers and integers as arguments. Note that such calls are not
168 * necessarily pure functions -- they may have side effects.
171 #define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
172 _zzq_request, _zzq_arg1, _zzq_arg2, \
173 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
174 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
175 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
176 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
178 #define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
179 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
180 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
181 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
182 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
184 #if defined(NVALGRIND)
186 /* Define NVALGRIND to completely remove the Valgrind magic sequence
187 from the compiled code (analogous to NDEBUG's effects on
189 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
190 _zzq_default, _zzq_request, \
191 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
194 #else /* ! NVALGRIND */
196 /* The following defines the magic code sequences which the JITter
197 spots and handles magically. Don't look too closely at them as
198 they will rot your brain.
200 The assembly code sequences for all architectures is in this one
201 file. This is because this file must be stand-alone, and we don't
202 want to have multiple files.
204 For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
205 value gets put in the return slot, so that everything works when
206 this is executed not under Valgrind. Args are passed in a memory
207 block, and so there's no intrinsic limit to the number that could
208 be passed, but it's currently five.
211 _zzq_rlval result lvalue
212 _zzq_default default value (result returned when running on real CPU)
213 _zzq_request request code
214 _zzq_arg1..5 request params
216 The other two macros are used to support function wrapping, and are
217 a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
218 guest's NRADDR pseudo-register and whatever other information is
219 needed to safely run the call original from the wrapper: on
220 ppc64-linux, the R2 value at the divert point is also needed. This
221 information is abstracted into a user-visible type, OrigFn.
223 VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
224 guest, but guarantees that the branch instruction will not be
225 redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
226 branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
227 complete inline asm, since it needs to be combined with more magic
228 inline asm stuff to be useful.
231 /* ------------------------- x86-{linux,darwin} ---------------- */
233 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
234 || (defined(PLAT_x86_win32) && defined(__GNUC__))
238 unsigned int nraddr; /* where's the code? */
242 #define __SPECIAL_INSTRUCTION_PREAMBLE \
243 "roll $3, %%edi ; roll $13, %%edi\n\t" \
244 "roll $29, %%edi ; roll $19, %%edi\n\t"
246 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
247 _zzq_default, _zzq_request, \
248 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
250 ({volatile unsigned int _zzq_args[6]; \
251 volatile unsigned int _zzq_result; \
252 _zzq_args[0] = (unsigned int)(_zzq_request); \
253 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
254 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
255 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
256 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
257 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
258 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
259 /* %EDX = client_request ( %EAX ) */ \
260 "xchgl %%ebx,%%ebx" \
261 : "=d" (_zzq_result) \
262 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
268 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
269 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
270 volatile unsigned int __addr; \
271 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
272 /* %EAX = guest_NRADDR */ \
273 "xchgl %%ecx,%%ecx" \
278 _zzq_orig->nraddr = __addr; \
281 #define VALGRIND_CALL_NOREDIR_EAX \
282 __SPECIAL_INSTRUCTION_PREAMBLE \
283 /* call-noredir *%EAX */ \
284 "xchgl %%edx,%%edx\n\t"
286 #define VALGRIND_VEX_INJECT_IR() \
288 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
289 "xchgl %%edi,%%edi\n\t" \
290 : : : "cc", "memory" \
294 #endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__) */
296 /* ------------------------- x86-Win32 ------------------------- */
298 #if defined(PLAT_x86_win32) && !defined(__GNUC__)
302 unsigned int nraddr; /* where's the code? */
306 #if defined(_MSC_VER)
308 #define __SPECIAL_INSTRUCTION_PREAMBLE \
309 __asm rol edi, 3 __asm rol edi, 13 \
310 __asm rol edi, 29 __asm rol edi, 19
312 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
313 _zzq_default, _zzq_request, \
314 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
315 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
316 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
317 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
318 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
320 static __inline uintptr_t
321 valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
322 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
323 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
326 volatile uintptr_t _zzq_args[6];
327 volatile unsigned int _zzq_result;
328 _zzq_args[0] = (uintptr_t)(_zzq_request);
329 _zzq_args[1] = (uintptr_t)(_zzq_arg1);
330 _zzq_args[2] = (uintptr_t)(_zzq_arg2);
331 _zzq_args[3] = (uintptr_t)(_zzq_arg3);
332 _zzq_args[4] = (uintptr_t)(_zzq_arg4);
333 _zzq_args[5] = (uintptr_t)(_zzq_arg5);
334 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
335 __SPECIAL_INSTRUCTION_PREAMBLE
336 /* %EDX = client_request ( %EAX ) */
338 __asm mov _zzq_result, edx
343 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
344 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
345 volatile unsigned int __addr; \
346 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
347 /* %EAX = guest_NRADDR */ \
349 __asm mov __addr, eax \
351 _zzq_orig->nraddr = __addr; \
354 #define VALGRIND_CALL_NOREDIR_EAX ERROR
356 #define VALGRIND_VEX_INJECT_IR() \
358 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
364 #error Unsupported compiler.
367 #endif /* PLAT_x86_win32 */
369 /* ------------------------ amd64-{linux,darwin} --------------- */
371 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin)
375 unsigned long long int nraddr; /* where's the code? */
379 #define __SPECIAL_INSTRUCTION_PREAMBLE \
380 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
381 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
383 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
384 _zzq_default, _zzq_request, \
385 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
387 ({ volatile unsigned long long int _zzq_args[6]; \
388 volatile unsigned long long int _zzq_result; \
389 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
390 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
391 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
392 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
393 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
394 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
395 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
396 /* %RDX = client_request ( %RAX ) */ \
397 "xchgq %%rbx,%%rbx" \
398 : "=d" (_zzq_result) \
399 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
405 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
406 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
407 volatile unsigned long long int __addr; \
408 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
409 /* %RAX = guest_NRADDR */ \
410 "xchgq %%rcx,%%rcx" \
415 _zzq_orig->nraddr = __addr; \
418 #define VALGRIND_CALL_NOREDIR_RAX \
419 __SPECIAL_INSTRUCTION_PREAMBLE \
420 /* call-noredir *%RAX */ \
421 "xchgq %%rdx,%%rdx\n\t"
423 #define VALGRIND_VEX_INJECT_IR() \
425 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
426 "xchgq %%rdi,%%rdi\n\t" \
427 : : : "cc", "memory" \
431 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
433 /* ------------------------ ppc32-linux ------------------------ */
435 #if defined(PLAT_ppc32_linux)
439 unsigned int nraddr; /* where's the code? */
443 #define __SPECIAL_INSTRUCTION_PREAMBLE \
444 "rlwinm 0,0,3,0,0 ; rlwinm 0,0,13,0,0\n\t" \
445 "rlwinm 0,0,29,0,0 ; rlwinm 0,0,19,0,0\n\t"
447 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
448 _zzq_default, _zzq_request, \
449 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
452 ({ unsigned int _zzq_args[6]; \
453 unsigned int _zzq_result; \
454 unsigned int* _zzq_ptr; \
455 _zzq_args[0] = (unsigned int)(_zzq_request); \
456 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
457 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
458 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
459 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
460 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
461 _zzq_ptr = _zzq_args; \
462 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
463 "mr 4,%2\n\t" /*ptr*/ \
464 __SPECIAL_INSTRUCTION_PREAMBLE \
465 /* %R3 = client_request ( %R4 ) */ \
467 "mr %0,3" /*result*/ \
468 : "=b" (_zzq_result) \
469 : "b" (_zzq_default), "b" (_zzq_ptr) \
470 : "cc", "memory", "r3", "r4"); \
474 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
475 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
476 unsigned int __addr; \
477 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
478 /* %R3 = guest_NRADDR */ \
483 : "cc", "memory", "r3" \
485 _zzq_orig->nraddr = __addr; \
488 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
489 __SPECIAL_INSTRUCTION_PREAMBLE \
490 /* branch-and-link-to-noredir *%R11 */ \
493 #define VALGRIND_VEX_INJECT_IR() \
495 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
500 #endif /* PLAT_ppc32_linux */
502 /* ------------------------ ppc64-linux ------------------------ */
504 #if defined(PLAT_ppc64_linux)
508 unsigned long long int nraddr; /* where's the code? */
509 unsigned long long int r2; /* what tocptr do we need? */
513 #define __SPECIAL_INSTRUCTION_PREAMBLE \
514 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
515 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
517 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
518 _zzq_default, _zzq_request, \
519 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
522 ({ unsigned long long int _zzq_args[6]; \
523 unsigned long long int _zzq_result; \
524 unsigned long long int* _zzq_ptr; \
525 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
526 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
527 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
528 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
529 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
530 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
531 _zzq_ptr = _zzq_args; \
532 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
533 "mr 4,%2\n\t" /*ptr*/ \
534 __SPECIAL_INSTRUCTION_PREAMBLE \
535 /* %R3 = client_request ( %R4 ) */ \
537 "mr %0,3" /*result*/ \
538 : "=b" (_zzq_result) \
539 : "b" (_zzq_default), "b" (_zzq_ptr) \
540 : "cc", "memory", "r3", "r4"); \
544 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
545 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
546 unsigned long long int __addr; \
547 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
548 /* %R3 = guest_NRADDR */ \
553 : "cc", "memory", "r3" \
555 _zzq_orig->nraddr = __addr; \
556 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
557 /* %R3 = guest_NRADDR_GPR2 */ \
562 : "cc", "memory", "r3" \
564 _zzq_orig->r2 = __addr; \
567 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
568 __SPECIAL_INSTRUCTION_PREAMBLE \
569 /* branch-and-link-to-noredir *%R11 */ \
572 #define VALGRIND_VEX_INJECT_IR() \
574 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
579 #endif /* PLAT_ppc64_linux */
581 /* ------------------------- arm-linux ------------------------- */
583 #if defined(PLAT_arm_linux)
587 unsigned int nraddr; /* where's the code? */
591 #define __SPECIAL_INSTRUCTION_PREAMBLE \
592 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
593 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
595 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
596 _zzq_default, _zzq_request, \
597 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
600 ({volatile unsigned int _zzq_args[6]; \
601 volatile unsigned int _zzq_result; \
602 _zzq_args[0] = (unsigned int)(_zzq_request); \
603 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
604 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
605 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
606 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
607 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
608 __asm__ volatile("mov r3, %1\n\t" /*default*/ \
609 "mov r4, %2\n\t" /*ptr*/ \
610 __SPECIAL_INSTRUCTION_PREAMBLE \
611 /* R3 = client_request ( R4 ) */ \
612 "orr r10, r10, r10\n\t" \
613 "mov %0, r3" /*result*/ \
614 : "=r" (_zzq_result) \
615 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
616 : "cc","memory", "r3", "r4"); \
620 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
621 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
622 unsigned int __addr; \
623 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
624 /* R3 = guest_NRADDR */ \
625 "orr r11, r11, r11\n\t" \
629 : "cc", "memory", "r3" \
631 _zzq_orig->nraddr = __addr; \
634 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
635 __SPECIAL_INSTRUCTION_PREAMBLE \
636 /* branch-and-link-to-noredir *%R4 */ \
637 "orr r12, r12, r12\n\t"
639 #define VALGRIND_VEX_INJECT_IR() \
641 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
642 "orr r9, r9, r9\n\t" \
643 : : : "cc", "memory" \
647 #endif /* PLAT_arm_linux */
649 /* ------------------------ s390x-linux ------------------------ */
651 #if defined(PLAT_s390x_linux)
655 unsigned long long int nraddr; /* where's the code? */
659 /* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
660 * code. This detection is implemented in platform specific toIR.c
661 * (e.g. VEX/priv/guest_s390_decoder.c).
663 #define __SPECIAL_INSTRUCTION_PREAMBLE \
669 #define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
670 #define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
671 #define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
672 #define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
674 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
675 _zzq_default, _zzq_request, \
676 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
678 ({volatile unsigned long long int _zzq_args[6]; \
679 volatile unsigned long long int _zzq_result; \
680 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
681 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
682 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
683 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
684 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
685 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
686 __asm__ volatile(/* r2 = args */ \
690 __SPECIAL_INSTRUCTION_PREAMBLE \
691 __CLIENT_REQUEST_CODE \
694 : "=d" (_zzq_result) \
695 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
696 : "cc", "2", "3", "memory" \
701 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
702 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
703 volatile unsigned long long int __addr; \
704 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
705 __GET_NR_CONTEXT_CODE \
709 : "cc", "3", "memory" \
711 _zzq_orig->nraddr = __addr; \
714 #define VALGRIND_CALL_NOREDIR_R1 \
715 __SPECIAL_INSTRUCTION_PREAMBLE \
718 #define VALGRIND_VEX_INJECT_IR() \
720 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
721 __VEX_INJECT_IR_CODE); \
724 #endif /* PLAT_s390x_linux */
726 /* ------------------------- mips32-linux ---------------- */
728 #if defined(PLAT_mips32_linux)
732 unsigned int nraddr; /* where's the code? */
740 #define __SPECIAL_INSTRUCTION_PREAMBLE \
741 "srl $0, $0, 13\n\t" \
742 "srl $0, $0, 29\n\t" \
743 "srl $0, $0, 3\n\t" \
746 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
747 _zzq_default, _zzq_request, \
748 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
750 ({ volatile unsigned int _zzq_args[6]; \
751 volatile unsigned int _zzq_result; \
752 _zzq_args[0] = (unsigned int)(_zzq_request); \
753 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
754 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
755 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
756 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
757 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
758 __asm__ volatile("move $11, %1\n\t" /*default*/ \
759 "move $12, %2\n\t" /*ptr*/ \
760 __SPECIAL_INSTRUCTION_PREAMBLE \
761 /* T3 = client_request ( T4 ) */ \
762 "or $13, $13, $13\n\t" \
763 "move %0, $11\n\t" /*result*/ \
764 : "=r" (_zzq_result) \
765 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
766 : "cc","memory", "t3", "t4"); \
770 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
771 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
772 volatile unsigned int __addr; \
773 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
774 /* %t9 = guest_NRADDR */ \
775 "or $14, $14, $14\n\t" \
776 "move %0, $11" /*result*/ \
779 : "cc", "memory" , "t3" \
781 _zzq_orig->nraddr = __addr; \
784 #define VALGRIND_CALL_NOREDIR_T9 \
785 __SPECIAL_INSTRUCTION_PREAMBLE \
786 /* call-noredir *%t9 */ \
787 "or $15, $15, $15\n\t"
789 #define VALGRIND_VEX_INJECT_IR() \
791 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
792 "or $11, $11, $11\n\t" \
797 #endif /* PLAT_mips32_linux */
799 /* Insert assembly code for other platforms here... */
801 #endif /* NVALGRIND */
804 /* ------------------------------------------------------------------ */
805 /* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
806 /* ugly. It's the least-worst tradeoff I can think of. */
807 /* ------------------------------------------------------------------ */
809 /* This section defines magic (a.k.a appalling-hack) macros for doing
810 guaranteed-no-redirection macros, so as to get from function
811 wrappers to the functions they are wrapping. The whole point is to
812 construct standard call sequences, but to do the call itself with a
813 special no-redirect call pseudo-instruction that the JIT
814 understands and handles specially. This section is long and
815 repetitious, and I can't see a way to make it shorter.
817 The naming scheme is as follows:
819 CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
821 'W' stands for "word" and 'v' for "void". Hence there are
822 different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
823 and for each, the possibility of returning a word-typed result, or
827 /* Use these to write the name of your wrapper. NOTE: duplicates
828 VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
829 the default behaviour equivalance class tag "0000" into the name.
830 See pub_tool_redir.h for details -- normally you don't need to
831 think about this, though. */
833 /* Use an extra level of macroisation so as to ensure the soname/fnname
834 args are fully macro-expanded before pasting them together. */
835 #define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
837 #define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
838 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
840 #define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
841 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
843 /* Use this macro from within a wrapper function to collect the
844 context (address and possibly other info) of the original function.
845 Once you have that you can then use it in one of the CALL_FN_
846 macros. The type of the argument _lval is OrigFn. */
847 #define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
849 /* Also provide end-user facilities for function replacement, rather
850 than wrapping. A replacement function differs from a wrapper in
851 that it has no way to get hold of the original function being
852 called, and hence no way to call onwards to it. In a replacement
853 function, VALGRIND_GET_ORIG_FN always returns zero. */
855 #define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
856 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
858 #define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
859 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
861 /* Derivatives of the main macros below, for calling functions
864 #define CALL_FN_v_v(fnptr) \
865 do { volatile unsigned long _junk; \
866 CALL_FN_W_v(_junk,fnptr); } while (0)
868 #define CALL_FN_v_W(fnptr, arg1) \
869 do { volatile unsigned long _junk; \
870 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
872 #define CALL_FN_v_WW(fnptr, arg1,arg2) \
873 do { volatile unsigned long _junk; \
874 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
876 #define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
877 do { volatile unsigned long _junk; \
878 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
880 #define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
881 do { volatile unsigned long _junk; \
882 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
884 #define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
885 do { volatile unsigned long _junk; \
886 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
888 #define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
889 do { volatile unsigned long _junk; \
890 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
892 #define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
893 do { volatile unsigned long _junk; \
894 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
896 /* ------------------------- x86-{linux,darwin} ---------------- */
898 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin)
900 /* These regs are trashed by the hidden call. No need to mention eax
901 as gcc can already see that, plus causes gcc to bomb. */
902 #define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
904 /* Macros to save and align the stack before making a function
905 call and restore it afterwards as gcc may not keep the stack
906 pointer aligned if it doesn't realise calls are being made
907 to other functions. */
909 #define VALGRIND_ALIGN_STACK \
910 "movl %%esp,%%edi\n\t" \
911 "andl $0xfffffff0,%%esp\n\t"
912 #define VALGRIND_RESTORE_STACK \
913 "movl %%edi,%%esp\n\t"
915 /* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
918 #define CALL_FN_W_v(lval, orig) \
920 volatile OrigFn _orig = (orig); \
921 volatile unsigned long _argvec[1]; \
922 volatile unsigned long _res; \
923 _argvec[0] = (unsigned long)_orig.nraddr; \
925 VALGRIND_ALIGN_STACK \
926 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
927 VALGRIND_CALL_NOREDIR_EAX \
928 VALGRIND_RESTORE_STACK \
929 : /*out*/ "=a" (_res) \
930 : /*in*/ "a" (&_argvec[0]) \
931 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
933 lval = (__typeof__(lval)) _res; \
936 #define CALL_FN_W_W(lval, orig, arg1) \
938 volatile OrigFn _orig = (orig); \
939 volatile unsigned long _argvec[2]; \
940 volatile unsigned long _res; \
941 _argvec[0] = (unsigned long)_orig.nraddr; \
942 _argvec[1] = (unsigned long)(arg1); \
944 VALGRIND_ALIGN_STACK \
945 "subl $12, %%esp\n\t" \
946 "pushl 4(%%eax)\n\t" \
947 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
948 VALGRIND_CALL_NOREDIR_EAX \
949 VALGRIND_RESTORE_STACK \
950 : /*out*/ "=a" (_res) \
951 : /*in*/ "a" (&_argvec[0]) \
952 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
954 lval = (__typeof__(lval)) _res; \
957 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
959 volatile OrigFn _orig = (orig); \
960 volatile unsigned long _argvec[3]; \
961 volatile unsigned long _res; \
962 _argvec[0] = (unsigned long)_orig.nraddr; \
963 _argvec[1] = (unsigned long)(arg1); \
964 _argvec[2] = (unsigned long)(arg2); \
966 VALGRIND_ALIGN_STACK \
967 "subl $8, %%esp\n\t" \
968 "pushl 8(%%eax)\n\t" \
969 "pushl 4(%%eax)\n\t" \
970 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
971 VALGRIND_CALL_NOREDIR_EAX \
972 VALGRIND_RESTORE_STACK \
973 : /*out*/ "=a" (_res) \
974 : /*in*/ "a" (&_argvec[0]) \
975 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
977 lval = (__typeof__(lval)) _res; \
980 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
982 volatile OrigFn _orig = (orig); \
983 volatile unsigned long _argvec[4]; \
984 volatile unsigned long _res; \
985 _argvec[0] = (unsigned long)_orig.nraddr; \
986 _argvec[1] = (unsigned long)(arg1); \
987 _argvec[2] = (unsigned long)(arg2); \
988 _argvec[3] = (unsigned long)(arg3); \
990 VALGRIND_ALIGN_STACK \
991 "subl $4, %%esp\n\t" \
992 "pushl 12(%%eax)\n\t" \
993 "pushl 8(%%eax)\n\t" \
994 "pushl 4(%%eax)\n\t" \
995 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
996 VALGRIND_CALL_NOREDIR_EAX \
997 VALGRIND_RESTORE_STACK \
998 : /*out*/ "=a" (_res) \
999 : /*in*/ "a" (&_argvec[0]) \
1000 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1002 lval = (__typeof__(lval)) _res; \
1005 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1007 volatile OrigFn _orig = (orig); \
1008 volatile unsigned long _argvec[5]; \
1009 volatile unsigned long _res; \
1010 _argvec[0] = (unsigned long)_orig.nraddr; \
1011 _argvec[1] = (unsigned long)(arg1); \
1012 _argvec[2] = (unsigned long)(arg2); \
1013 _argvec[3] = (unsigned long)(arg3); \
1014 _argvec[4] = (unsigned long)(arg4); \
1016 VALGRIND_ALIGN_STACK \
1017 "pushl 16(%%eax)\n\t" \
1018 "pushl 12(%%eax)\n\t" \
1019 "pushl 8(%%eax)\n\t" \
1020 "pushl 4(%%eax)\n\t" \
1021 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1022 VALGRIND_CALL_NOREDIR_EAX \
1023 VALGRIND_RESTORE_STACK \
1024 : /*out*/ "=a" (_res) \
1025 : /*in*/ "a" (&_argvec[0]) \
1026 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1028 lval = (__typeof__(lval)) _res; \
1031 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1033 volatile OrigFn _orig = (orig); \
1034 volatile unsigned long _argvec[6]; \
1035 volatile unsigned long _res; \
1036 _argvec[0] = (unsigned long)_orig.nraddr; \
1037 _argvec[1] = (unsigned long)(arg1); \
1038 _argvec[2] = (unsigned long)(arg2); \
1039 _argvec[3] = (unsigned long)(arg3); \
1040 _argvec[4] = (unsigned long)(arg4); \
1041 _argvec[5] = (unsigned long)(arg5); \
1043 VALGRIND_ALIGN_STACK \
1044 "subl $12, %%esp\n\t" \
1045 "pushl 20(%%eax)\n\t" \
1046 "pushl 16(%%eax)\n\t" \
1047 "pushl 12(%%eax)\n\t" \
1048 "pushl 8(%%eax)\n\t" \
1049 "pushl 4(%%eax)\n\t" \
1050 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1051 VALGRIND_CALL_NOREDIR_EAX \
1052 VALGRIND_RESTORE_STACK \
1053 : /*out*/ "=a" (_res) \
1054 : /*in*/ "a" (&_argvec[0]) \
1055 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1057 lval = (__typeof__(lval)) _res; \
1060 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1062 volatile OrigFn _orig = (orig); \
1063 volatile unsigned long _argvec[7]; \
1064 volatile unsigned long _res; \
1065 _argvec[0] = (unsigned long)_orig.nraddr; \
1066 _argvec[1] = (unsigned long)(arg1); \
1067 _argvec[2] = (unsigned long)(arg2); \
1068 _argvec[3] = (unsigned long)(arg3); \
1069 _argvec[4] = (unsigned long)(arg4); \
1070 _argvec[5] = (unsigned long)(arg5); \
1071 _argvec[6] = (unsigned long)(arg6); \
1073 VALGRIND_ALIGN_STACK \
1074 "subl $8, %%esp\n\t" \
1075 "pushl 24(%%eax)\n\t" \
1076 "pushl 20(%%eax)\n\t" \
1077 "pushl 16(%%eax)\n\t" \
1078 "pushl 12(%%eax)\n\t" \
1079 "pushl 8(%%eax)\n\t" \
1080 "pushl 4(%%eax)\n\t" \
1081 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1082 VALGRIND_CALL_NOREDIR_EAX \
1083 VALGRIND_RESTORE_STACK \
1084 : /*out*/ "=a" (_res) \
1085 : /*in*/ "a" (&_argvec[0]) \
1086 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1088 lval = (__typeof__(lval)) _res; \
1091 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1094 volatile OrigFn _orig = (orig); \
1095 volatile unsigned long _argvec[8]; \
1096 volatile unsigned long _res; \
1097 _argvec[0] = (unsigned long)_orig.nraddr; \
1098 _argvec[1] = (unsigned long)(arg1); \
1099 _argvec[2] = (unsigned long)(arg2); \
1100 _argvec[3] = (unsigned long)(arg3); \
1101 _argvec[4] = (unsigned long)(arg4); \
1102 _argvec[5] = (unsigned long)(arg5); \
1103 _argvec[6] = (unsigned long)(arg6); \
1104 _argvec[7] = (unsigned long)(arg7); \
1106 VALGRIND_ALIGN_STACK \
1107 "subl $4, %%esp\n\t" \
1108 "pushl 28(%%eax)\n\t" \
1109 "pushl 24(%%eax)\n\t" \
1110 "pushl 20(%%eax)\n\t" \
1111 "pushl 16(%%eax)\n\t" \
1112 "pushl 12(%%eax)\n\t" \
1113 "pushl 8(%%eax)\n\t" \
1114 "pushl 4(%%eax)\n\t" \
1115 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1116 VALGRIND_CALL_NOREDIR_EAX \
1117 VALGRIND_RESTORE_STACK \
1118 : /*out*/ "=a" (_res) \
1119 : /*in*/ "a" (&_argvec[0]) \
1120 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1122 lval = (__typeof__(lval)) _res; \
1125 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1128 volatile OrigFn _orig = (orig); \
1129 volatile unsigned long _argvec[9]; \
1130 volatile unsigned long _res; \
1131 _argvec[0] = (unsigned long)_orig.nraddr; \
1132 _argvec[1] = (unsigned long)(arg1); \
1133 _argvec[2] = (unsigned long)(arg2); \
1134 _argvec[3] = (unsigned long)(arg3); \
1135 _argvec[4] = (unsigned long)(arg4); \
1136 _argvec[5] = (unsigned long)(arg5); \
1137 _argvec[6] = (unsigned long)(arg6); \
1138 _argvec[7] = (unsigned long)(arg7); \
1139 _argvec[8] = (unsigned long)(arg8); \
1141 VALGRIND_ALIGN_STACK \
1142 "pushl 32(%%eax)\n\t" \
1143 "pushl 28(%%eax)\n\t" \
1144 "pushl 24(%%eax)\n\t" \
1145 "pushl 20(%%eax)\n\t" \
1146 "pushl 16(%%eax)\n\t" \
1147 "pushl 12(%%eax)\n\t" \
1148 "pushl 8(%%eax)\n\t" \
1149 "pushl 4(%%eax)\n\t" \
1150 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1151 VALGRIND_CALL_NOREDIR_EAX \
1152 VALGRIND_RESTORE_STACK \
1153 : /*out*/ "=a" (_res) \
1154 : /*in*/ "a" (&_argvec[0]) \
1155 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1157 lval = (__typeof__(lval)) _res; \
1160 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1163 volatile OrigFn _orig = (orig); \
1164 volatile unsigned long _argvec[10]; \
1165 volatile unsigned long _res; \
1166 _argvec[0] = (unsigned long)_orig.nraddr; \
1167 _argvec[1] = (unsigned long)(arg1); \
1168 _argvec[2] = (unsigned long)(arg2); \
1169 _argvec[3] = (unsigned long)(arg3); \
1170 _argvec[4] = (unsigned long)(arg4); \
1171 _argvec[5] = (unsigned long)(arg5); \
1172 _argvec[6] = (unsigned long)(arg6); \
1173 _argvec[7] = (unsigned long)(arg7); \
1174 _argvec[8] = (unsigned long)(arg8); \
1175 _argvec[9] = (unsigned long)(arg9); \
1177 VALGRIND_ALIGN_STACK \
1178 "subl $12, %%esp\n\t" \
1179 "pushl 36(%%eax)\n\t" \
1180 "pushl 32(%%eax)\n\t" \
1181 "pushl 28(%%eax)\n\t" \
1182 "pushl 24(%%eax)\n\t" \
1183 "pushl 20(%%eax)\n\t" \
1184 "pushl 16(%%eax)\n\t" \
1185 "pushl 12(%%eax)\n\t" \
1186 "pushl 8(%%eax)\n\t" \
1187 "pushl 4(%%eax)\n\t" \
1188 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1189 VALGRIND_CALL_NOREDIR_EAX \
1190 VALGRIND_RESTORE_STACK \
1191 : /*out*/ "=a" (_res) \
1192 : /*in*/ "a" (&_argvec[0]) \
1193 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1195 lval = (__typeof__(lval)) _res; \
1198 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1199 arg7,arg8,arg9,arg10) \
1201 volatile OrigFn _orig = (orig); \
1202 volatile unsigned long _argvec[11]; \
1203 volatile unsigned long _res; \
1204 _argvec[0] = (unsigned long)_orig.nraddr; \
1205 _argvec[1] = (unsigned long)(arg1); \
1206 _argvec[2] = (unsigned long)(arg2); \
1207 _argvec[3] = (unsigned long)(arg3); \
1208 _argvec[4] = (unsigned long)(arg4); \
1209 _argvec[5] = (unsigned long)(arg5); \
1210 _argvec[6] = (unsigned long)(arg6); \
1211 _argvec[7] = (unsigned long)(arg7); \
1212 _argvec[8] = (unsigned long)(arg8); \
1213 _argvec[9] = (unsigned long)(arg9); \
1214 _argvec[10] = (unsigned long)(arg10); \
1216 VALGRIND_ALIGN_STACK \
1217 "subl $8, %%esp\n\t" \
1218 "pushl 40(%%eax)\n\t" \
1219 "pushl 36(%%eax)\n\t" \
1220 "pushl 32(%%eax)\n\t" \
1221 "pushl 28(%%eax)\n\t" \
1222 "pushl 24(%%eax)\n\t" \
1223 "pushl 20(%%eax)\n\t" \
1224 "pushl 16(%%eax)\n\t" \
1225 "pushl 12(%%eax)\n\t" \
1226 "pushl 8(%%eax)\n\t" \
1227 "pushl 4(%%eax)\n\t" \
1228 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1229 VALGRIND_CALL_NOREDIR_EAX \
1230 VALGRIND_RESTORE_STACK \
1231 : /*out*/ "=a" (_res) \
1232 : /*in*/ "a" (&_argvec[0]) \
1233 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1235 lval = (__typeof__(lval)) _res; \
1238 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1239 arg6,arg7,arg8,arg9,arg10, \
1242 volatile OrigFn _orig = (orig); \
1243 volatile unsigned long _argvec[12]; \
1244 volatile unsigned long _res; \
1245 _argvec[0] = (unsigned long)_orig.nraddr; \
1246 _argvec[1] = (unsigned long)(arg1); \
1247 _argvec[2] = (unsigned long)(arg2); \
1248 _argvec[3] = (unsigned long)(arg3); \
1249 _argvec[4] = (unsigned long)(arg4); \
1250 _argvec[5] = (unsigned long)(arg5); \
1251 _argvec[6] = (unsigned long)(arg6); \
1252 _argvec[7] = (unsigned long)(arg7); \
1253 _argvec[8] = (unsigned long)(arg8); \
1254 _argvec[9] = (unsigned long)(arg9); \
1255 _argvec[10] = (unsigned long)(arg10); \
1256 _argvec[11] = (unsigned long)(arg11); \
1258 VALGRIND_ALIGN_STACK \
1259 "subl $4, %%esp\n\t" \
1260 "pushl 44(%%eax)\n\t" \
1261 "pushl 40(%%eax)\n\t" \
1262 "pushl 36(%%eax)\n\t" \
1263 "pushl 32(%%eax)\n\t" \
1264 "pushl 28(%%eax)\n\t" \
1265 "pushl 24(%%eax)\n\t" \
1266 "pushl 20(%%eax)\n\t" \
1267 "pushl 16(%%eax)\n\t" \
1268 "pushl 12(%%eax)\n\t" \
1269 "pushl 8(%%eax)\n\t" \
1270 "pushl 4(%%eax)\n\t" \
1271 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1272 VALGRIND_CALL_NOREDIR_EAX \
1273 VALGRIND_RESTORE_STACK \
1274 : /*out*/ "=a" (_res) \
1275 : /*in*/ "a" (&_argvec[0]) \
1276 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1278 lval = (__typeof__(lval)) _res; \
1281 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1282 arg6,arg7,arg8,arg9,arg10, \
1285 volatile OrigFn _orig = (orig); \
1286 volatile unsigned long _argvec[13]; \
1287 volatile unsigned long _res; \
1288 _argvec[0] = (unsigned long)_orig.nraddr; \
1289 _argvec[1] = (unsigned long)(arg1); \
1290 _argvec[2] = (unsigned long)(arg2); \
1291 _argvec[3] = (unsigned long)(arg3); \
1292 _argvec[4] = (unsigned long)(arg4); \
1293 _argvec[5] = (unsigned long)(arg5); \
1294 _argvec[6] = (unsigned long)(arg6); \
1295 _argvec[7] = (unsigned long)(arg7); \
1296 _argvec[8] = (unsigned long)(arg8); \
1297 _argvec[9] = (unsigned long)(arg9); \
1298 _argvec[10] = (unsigned long)(arg10); \
1299 _argvec[11] = (unsigned long)(arg11); \
1300 _argvec[12] = (unsigned long)(arg12); \
1302 VALGRIND_ALIGN_STACK \
1303 "pushl 48(%%eax)\n\t" \
1304 "pushl 44(%%eax)\n\t" \
1305 "pushl 40(%%eax)\n\t" \
1306 "pushl 36(%%eax)\n\t" \
1307 "pushl 32(%%eax)\n\t" \
1308 "pushl 28(%%eax)\n\t" \
1309 "pushl 24(%%eax)\n\t" \
1310 "pushl 20(%%eax)\n\t" \
1311 "pushl 16(%%eax)\n\t" \
1312 "pushl 12(%%eax)\n\t" \
1313 "pushl 8(%%eax)\n\t" \
1314 "pushl 4(%%eax)\n\t" \
1315 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1316 VALGRIND_CALL_NOREDIR_EAX \
1317 VALGRIND_RESTORE_STACK \
1318 : /*out*/ "=a" (_res) \
1319 : /*in*/ "a" (&_argvec[0]) \
1320 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1322 lval = (__typeof__(lval)) _res; \
1325 #endif /* PLAT_x86_linux || PLAT_x86_darwin */
1327 /* ------------------------ amd64-{linux,darwin} --------------- */
1329 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin)
1331 /* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1333 /* These regs are trashed by the hidden call. */
1334 #define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1335 "rdi", "r8", "r9", "r10", "r11"
1337 /* This is all pretty complex. It's so as to make stack unwinding
1338 work reliably. See bug 243270. The basic problem is the sub and
1339 add of 128 of %rsp in all of the following macros. If gcc believes
1340 the CFA is in %rsp, then unwinding may fail, because what's at the
1341 CFA is not what gcc "expected" when it constructs the CFIs for the
1342 places where the macros are instantiated.
1344 But we can't just add a CFI annotation to increase the CFA offset
1345 by 128, to match the sub of 128 from %rsp, because we don't know
1346 whether gcc has chosen %rsp as the CFA at that point, or whether it
1347 has chosen some other register (eg, %rbp). In the latter case,
1348 adding a CFI annotation to change the CFA offset is simply wrong.
1350 So the solution is to get hold of the CFA using
1351 __builtin_dwarf_cfa(), put it in a known register, and add a
1352 CFI annotation to say what the register is. We choose %rbp for
1353 this (perhaps perversely), because:
1355 (1) %rbp is already subject to unwinding. If a new register was
1356 chosen then the unwinder would have to unwind it in all stack
1357 traces, which is expensive, and
1359 (2) %rbp is already subject to precise exception updates in the
1360 JIT. If a new register was chosen, we'd have to have precise
1361 exceptions for it too, which reduces performance of the
1364 However .. one extra complication. We can't just whack the result
1365 of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1366 list of trashed registers at the end of the inline assembly
1367 fragments; gcc won't allow %rbp to appear in that list. Hence
1368 instead we need to stash %rbp in %r15 for the duration of the asm,
1369 and say that %r15 is trashed instead. gcc seems happy to go with
1372 Oh .. and this all needs to be conditionalised so that it is
1373 unchanged from before this commit, when compiled with older gccs
1374 that don't support __builtin_dwarf_cfa. Furthermore, since
1375 this header file is freestanding, it has to be independent of
1376 config.h, and so the following conditionalisation cannot depend on
1377 configure time checks.
1379 Although it's not clear from
1380 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1381 this expression excludes Darwin.
1382 .cfi directives in Darwin assembly appear to be completely
1383 different and I haven't investigated how they work.
1385 For even more entertainment value, note we have to use the
1386 completely undocumented __builtin_dwarf_cfa(), which appears to
1387 really compute the CFA, whereas __builtin_frame_address(0) claims
1388 to but actually doesn't. See
1389 https://bugs.kde.org/show_bug.cgi?id=243270#c47
1391 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1392 # define __FRAME_POINTER \
1393 ,"r"(__builtin_dwarf_cfa())
1394 # define VALGRIND_CFI_PROLOGUE \
1395 "movq %%rbp, %%r15\n\t" \
1396 "movq %2, %%rbp\n\t" \
1397 ".cfi_remember_state\n\t" \
1398 ".cfi_def_cfa rbp, 0\n\t"
1399 # define VALGRIND_CFI_EPILOGUE \
1400 "movq %%r15, %%rbp\n\t" \
1401 ".cfi_restore_state\n\t"
1403 # define __FRAME_POINTER
1404 # define VALGRIND_CFI_PROLOGUE
1405 # define VALGRIND_CFI_EPILOGUE
1408 /* Macros to save and align the stack before making a function
1409 call and restore it afterwards as gcc may not keep the stack
1410 pointer aligned if it doesn't realise calls are being made
1411 to other functions. */
1413 #define VALGRIND_ALIGN_STACK \
1414 "movq %%rsp,%%r14\n\t" \
1415 "andq $0xfffffffffffffff0,%%rsp\n\t"
1416 #define VALGRIND_RESTORE_STACK \
1417 "movq %%r14,%%rsp\n\t"
1419 /* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1422 /* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1423 macros. In order not to trash the stack redzone, we need to drop
1424 %rsp by 128 before the hidden call, and restore afterwards. The
1425 nastyness is that it is only by luck that the stack still appears
1426 to be unwindable during the hidden call - since then the behaviour
1427 of any routine using this macro does not match what the CFI data
1430 Why is this important? Imagine that a wrapper has a stack
1431 allocated local, and passes to the hidden call, a pointer to it.
1432 Because gcc does not know about the hidden call, it may allocate
1433 that local in the redzone. Unfortunately the hidden call may then
1434 trash it before it comes to use it. So we must step clear of the
1435 redzone, for the duration of the hidden call, to make it safe.
1437 Probably the same problem afflicts the other redzone-style ABIs too
1438 (ppc64-linux); but for those, the stack is
1439 self describing (none of this CFI nonsense) so at least messing
1440 with the stack pointer doesn't give a danger of non-unwindable
1443 #define CALL_FN_W_v(lval, orig) \
1445 volatile OrigFn _orig = (orig); \
1446 volatile unsigned long _argvec[1]; \
1447 volatile unsigned long _res; \
1448 _argvec[0] = (unsigned long)_orig.nraddr; \
1450 VALGRIND_CFI_PROLOGUE \
1451 VALGRIND_ALIGN_STACK \
1452 "subq $128,%%rsp\n\t" \
1453 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1454 VALGRIND_CALL_NOREDIR_RAX \
1455 VALGRIND_RESTORE_STACK \
1456 VALGRIND_CFI_EPILOGUE \
1457 : /*out*/ "=a" (_res) \
1458 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1459 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1461 lval = (__typeof__(lval)) _res; \
1464 #define CALL_FN_W_W(lval, orig, arg1) \
1466 volatile OrigFn _orig = (orig); \
1467 volatile unsigned long _argvec[2]; \
1468 volatile unsigned long _res; \
1469 _argvec[0] = (unsigned long)_orig.nraddr; \
1470 _argvec[1] = (unsigned long)(arg1); \
1472 VALGRIND_CFI_PROLOGUE \
1473 VALGRIND_ALIGN_STACK \
1474 "subq $128,%%rsp\n\t" \
1475 "movq 8(%%rax), %%rdi\n\t" \
1476 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1477 VALGRIND_CALL_NOREDIR_RAX \
1478 VALGRIND_RESTORE_STACK \
1479 VALGRIND_CFI_EPILOGUE \
1480 : /*out*/ "=a" (_res) \
1481 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1482 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1484 lval = (__typeof__(lval)) _res; \
1487 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1489 volatile OrigFn _orig = (orig); \
1490 volatile unsigned long _argvec[3]; \
1491 volatile unsigned long _res; \
1492 _argvec[0] = (unsigned long)_orig.nraddr; \
1493 _argvec[1] = (unsigned long)(arg1); \
1494 _argvec[2] = (unsigned long)(arg2); \
1496 VALGRIND_CFI_PROLOGUE \
1497 VALGRIND_ALIGN_STACK \
1498 "subq $128,%%rsp\n\t" \
1499 "movq 16(%%rax), %%rsi\n\t" \
1500 "movq 8(%%rax), %%rdi\n\t" \
1501 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1502 VALGRIND_CALL_NOREDIR_RAX \
1503 VALGRIND_RESTORE_STACK \
1504 VALGRIND_CFI_EPILOGUE \
1505 : /*out*/ "=a" (_res) \
1506 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1507 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1509 lval = (__typeof__(lval)) _res; \
1512 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1514 volatile OrigFn _orig = (orig); \
1515 volatile unsigned long _argvec[4]; \
1516 volatile unsigned long _res; \
1517 _argvec[0] = (unsigned long)_orig.nraddr; \
1518 _argvec[1] = (unsigned long)(arg1); \
1519 _argvec[2] = (unsigned long)(arg2); \
1520 _argvec[3] = (unsigned long)(arg3); \
1522 VALGRIND_CFI_PROLOGUE \
1523 VALGRIND_ALIGN_STACK \
1524 "subq $128,%%rsp\n\t" \
1525 "movq 24(%%rax), %%rdx\n\t" \
1526 "movq 16(%%rax), %%rsi\n\t" \
1527 "movq 8(%%rax), %%rdi\n\t" \
1528 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1529 VALGRIND_CALL_NOREDIR_RAX \
1530 VALGRIND_RESTORE_STACK \
1531 VALGRIND_CFI_EPILOGUE \
1532 : /*out*/ "=a" (_res) \
1533 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1534 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1536 lval = (__typeof__(lval)) _res; \
1539 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1541 volatile OrigFn _orig = (orig); \
1542 volatile unsigned long _argvec[5]; \
1543 volatile unsigned long _res; \
1544 _argvec[0] = (unsigned long)_orig.nraddr; \
1545 _argvec[1] = (unsigned long)(arg1); \
1546 _argvec[2] = (unsigned long)(arg2); \
1547 _argvec[3] = (unsigned long)(arg3); \
1548 _argvec[4] = (unsigned long)(arg4); \
1550 VALGRIND_CFI_PROLOGUE \
1551 VALGRIND_ALIGN_STACK \
1552 "subq $128,%%rsp\n\t" \
1553 "movq 32(%%rax), %%rcx\n\t" \
1554 "movq 24(%%rax), %%rdx\n\t" \
1555 "movq 16(%%rax), %%rsi\n\t" \
1556 "movq 8(%%rax), %%rdi\n\t" \
1557 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1558 VALGRIND_CALL_NOREDIR_RAX \
1559 VALGRIND_RESTORE_STACK \
1560 VALGRIND_CFI_EPILOGUE \
1561 : /*out*/ "=a" (_res) \
1562 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1563 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1565 lval = (__typeof__(lval)) _res; \
1568 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1570 volatile OrigFn _orig = (orig); \
1571 volatile unsigned long _argvec[6]; \
1572 volatile unsigned long _res; \
1573 _argvec[0] = (unsigned long)_orig.nraddr; \
1574 _argvec[1] = (unsigned long)(arg1); \
1575 _argvec[2] = (unsigned long)(arg2); \
1576 _argvec[3] = (unsigned long)(arg3); \
1577 _argvec[4] = (unsigned long)(arg4); \
1578 _argvec[5] = (unsigned long)(arg5); \
1580 VALGRIND_CFI_PROLOGUE \
1581 VALGRIND_ALIGN_STACK \
1582 "subq $128,%%rsp\n\t" \
1583 "movq 40(%%rax), %%r8\n\t" \
1584 "movq 32(%%rax), %%rcx\n\t" \
1585 "movq 24(%%rax), %%rdx\n\t" \
1586 "movq 16(%%rax), %%rsi\n\t" \
1587 "movq 8(%%rax), %%rdi\n\t" \
1588 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1589 VALGRIND_CALL_NOREDIR_RAX \
1590 VALGRIND_RESTORE_STACK \
1591 VALGRIND_CFI_EPILOGUE \
1592 : /*out*/ "=a" (_res) \
1593 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1594 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1596 lval = (__typeof__(lval)) _res; \
1599 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1601 volatile OrigFn _orig = (orig); \
1602 volatile unsigned long _argvec[7]; \
1603 volatile unsigned long _res; \
1604 _argvec[0] = (unsigned long)_orig.nraddr; \
1605 _argvec[1] = (unsigned long)(arg1); \
1606 _argvec[2] = (unsigned long)(arg2); \
1607 _argvec[3] = (unsigned long)(arg3); \
1608 _argvec[4] = (unsigned long)(arg4); \
1609 _argvec[5] = (unsigned long)(arg5); \
1610 _argvec[6] = (unsigned long)(arg6); \
1612 VALGRIND_CFI_PROLOGUE \
1613 VALGRIND_ALIGN_STACK \
1614 "subq $128,%%rsp\n\t" \
1615 "movq 48(%%rax), %%r9\n\t" \
1616 "movq 40(%%rax), %%r8\n\t" \
1617 "movq 32(%%rax), %%rcx\n\t" \
1618 "movq 24(%%rax), %%rdx\n\t" \
1619 "movq 16(%%rax), %%rsi\n\t" \
1620 "movq 8(%%rax), %%rdi\n\t" \
1621 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1622 VALGRIND_CALL_NOREDIR_RAX \
1623 VALGRIND_RESTORE_STACK \
1624 VALGRIND_CFI_EPILOGUE \
1625 : /*out*/ "=a" (_res) \
1626 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1627 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1629 lval = (__typeof__(lval)) _res; \
1632 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1635 volatile OrigFn _orig = (orig); \
1636 volatile unsigned long _argvec[8]; \
1637 volatile unsigned long _res; \
1638 _argvec[0] = (unsigned long)_orig.nraddr; \
1639 _argvec[1] = (unsigned long)(arg1); \
1640 _argvec[2] = (unsigned long)(arg2); \
1641 _argvec[3] = (unsigned long)(arg3); \
1642 _argvec[4] = (unsigned long)(arg4); \
1643 _argvec[5] = (unsigned long)(arg5); \
1644 _argvec[6] = (unsigned long)(arg6); \
1645 _argvec[7] = (unsigned long)(arg7); \
1647 VALGRIND_CFI_PROLOGUE \
1648 VALGRIND_ALIGN_STACK \
1649 "subq $136,%%rsp\n\t" \
1650 "pushq 56(%%rax)\n\t" \
1651 "movq 48(%%rax), %%r9\n\t" \
1652 "movq 40(%%rax), %%r8\n\t" \
1653 "movq 32(%%rax), %%rcx\n\t" \
1654 "movq 24(%%rax), %%rdx\n\t" \
1655 "movq 16(%%rax), %%rsi\n\t" \
1656 "movq 8(%%rax), %%rdi\n\t" \
1657 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1658 VALGRIND_CALL_NOREDIR_RAX \
1659 VALGRIND_RESTORE_STACK \
1660 VALGRIND_CFI_EPILOGUE \
1661 : /*out*/ "=a" (_res) \
1662 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1663 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1665 lval = (__typeof__(lval)) _res; \
1668 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1671 volatile OrigFn _orig = (orig); \
1672 volatile unsigned long _argvec[9]; \
1673 volatile unsigned long _res; \
1674 _argvec[0] = (unsigned long)_orig.nraddr; \
1675 _argvec[1] = (unsigned long)(arg1); \
1676 _argvec[2] = (unsigned long)(arg2); \
1677 _argvec[3] = (unsigned long)(arg3); \
1678 _argvec[4] = (unsigned long)(arg4); \
1679 _argvec[5] = (unsigned long)(arg5); \
1680 _argvec[6] = (unsigned long)(arg6); \
1681 _argvec[7] = (unsigned long)(arg7); \
1682 _argvec[8] = (unsigned long)(arg8); \
1684 VALGRIND_CFI_PROLOGUE \
1685 VALGRIND_ALIGN_STACK \
1686 "subq $128,%%rsp\n\t" \
1687 "pushq 64(%%rax)\n\t" \
1688 "pushq 56(%%rax)\n\t" \
1689 "movq 48(%%rax), %%r9\n\t" \
1690 "movq 40(%%rax), %%r8\n\t" \
1691 "movq 32(%%rax), %%rcx\n\t" \
1692 "movq 24(%%rax), %%rdx\n\t" \
1693 "movq 16(%%rax), %%rsi\n\t" \
1694 "movq 8(%%rax), %%rdi\n\t" \
1695 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1696 VALGRIND_CALL_NOREDIR_RAX \
1697 VALGRIND_RESTORE_STACK \
1698 VALGRIND_CFI_EPILOGUE \
1699 : /*out*/ "=a" (_res) \
1700 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1701 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1703 lval = (__typeof__(lval)) _res; \
1706 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1709 volatile OrigFn _orig = (orig); \
1710 volatile unsigned long _argvec[10]; \
1711 volatile unsigned long _res; \
1712 _argvec[0] = (unsigned long)_orig.nraddr; \
1713 _argvec[1] = (unsigned long)(arg1); \
1714 _argvec[2] = (unsigned long)(arg2); \
1715 _argvec[3] = (unsigned long)(arg3); \
1716 _argvec[4] = (unsigned long)(arg4); \
1717 _argvec[5] = (unsigned long)(arg5); \
1718 _argvec[6] = (unsigned long)(arg6); \
1719 _argvec[7] = (unsigned long)(arg7); \
1720 _argvec[8] = (unsigned long)(arg8); \
1721 _argvec[9] = (unsigned long)(arg9); \
1723 VALGRIND_CFI_PROLOGUE \
1724 VALGRIND_ALIGN_STACK \
1725 "subq $136,%%rsp\n\t" \
1726 "pushq 72(%%rax)\n\t" \
1727 "pushq 64(%%rax)\n\t" \
1728 "pushq 56(%%rax)\n\t" \
1729 "movq 48(%%rax), %%r9\n\t" \
1730 "movq 40(%%rax), %%r8\n\t" \
1731 "movq 32(%%rax), %%rcx\n\t" \
1732 "movq 24(%%rax), %%rdx\n\t" \
1733 "movq 16(%%rax), %%rsi\n\t" \
1734 "movq 8(%%rax), %%rdi\n\t" \
1735 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1736 VALGRIND_CALL_NOREDIR_RAX \
1737 VALGRIND_RESTORE_STACK \
1738 VALGRIND_CFI_EPILOGUE \
1739 : /*out*/ "=a" (_res) \
1740 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1741 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1743 lval = (__typeof__(lval)) _res; \
1746 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1747 arg7,arg8,arg9,arg10) \
1749 volatile OrigFn _orig = (orig); \
1750 volatile unsigned long _argvec[11]; \
1751 volatile unsigned long _res; \
1752 _argvec[0] = (unsigned long)_orig.nraddr; \
1753 _argvec[1] = (unsigned long)(arg1); \
1754 _argvec[2] = (unsigned long)(arg2); \
1755 _argvec[3] = (unsigned long)(arg3); \
1756 _argvec[4] = (unsigned long)(arg4); \
1757 _argvec[5] = (unsigned long)(arg5); \
1758 _argvec[6] = (unsigned long)(arg6); \
1759 _argvec[7] = (unsigned long)(arg7); \
1760 _argvec[8] = (unsigned long)(arg8); \
1761 _argvec[9] = (unsigned long)(arg9); \
1762 _argvec[10] = (unsigned long)(arg10); \
1764 VALGRIND_CFI_PROLOGUE \
1765 VALGRIND_ALIGN_STACK \
1766 "subq $128,%%rsp\n\t" \
1767 "pushq 80(%%rax)\n\t" \
1768 "pushq 72(%%rax)\n\t" \
1769 "pushq 64(%%rax)\n\t" \
1770 "pushq 56(%%rax)\n\t" \
1771 "movq 48(%%rax), %%r9\n\t" \
1772 "movq 40(%%rax), %%r8\n\t" \
1773 "movq 32(%%rax), %%rcx\n\t" \
1774 "movq 24(%%rax), %%rdx\n\t" \
1775 "movq 16(%%rax), %%rsi\n\t" \
1776 "movq 8(%%rax), %%rdi\n\t" \
1777 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1778 VALGRIND_CALL_NOREDIR_RAX \
1779 VALGRIND_RESTORE_STACK \
1780 VALGRIND_CFI_EPILOGUE \
1781 : /*out*/ "=a" (_res) \
1782 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1783 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1785 lval = (__typeof__(lval)) _res; \
1788 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1789 arg7,arg8,arg9,arg10,arg11) \
1791 volatile OrigFn _orig = (orig); \
1792 volatile unsigned long _argvec[12]; \
1793 volatile unsigned long _res; \
1794 _argvec[0] = (unsigned long)_orig.nraddr; \
1795 _argvec[1] = (unsigned long)(arg1); \
1796 _argvec[2] = (unsigned long)(arg2); \
1797 _argvec[3] = (unsigned long)(arg3); \
1798 _argvec[4] = (unsigned long)(arg4); \
1799 _argvec[5] = (unsigned long)(arg5); \
1800 _argvec[6] = (unsigned long)(arg6); \
1801 _argvec[7] = (unsigned long)(arg7); \
1802 _argvec[8] = (unsigned long)(arg8); \
1803 _argvec[9] = (unsigned long)(arg9); \
1804 _argvec[10] = (unsigned long)(arg10); \
1805 _argvec[11] = (unsigned long)(arg11); \
1807 VALGRIND_CFI_PROLOGUE \
1808 VALGRIND_ALIGN_STACK \
1809 "subq $136,%%rsp\n\t" \
1810 "pushq 88(%%rax)\n\t" \
1811 "pushq 80(%%rax)\n\t" \
1812 "pushq 72(%%rax)\n\t" \
1813 "pushq 64(%%rax)\n\t" \
1814 "pushq 56(%%rax)\n\t" \
1815 "movq 48(%%rax), %%r9\n\t" \
1816 "movq 40(%%rax), %%r8\n\t" \
1817 "movq 32(%%rax), %%rcx\n\t" \
1818 "movq 24(%%rax), %%rdx\n\t" \
1819 "movq 16(%%rax), %%rsi\n\t" \
1820 "movq 8(%%rax), %%rdi\n\t" \
1821 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1822 VALGRIND_CALL_NOREDIR_RAX \
1823 VALGRIND_RESTORE_STACK \
1824 VALGRIND_CFI_EPILOGUE \
1825 : /*out*/ "=a" (_res) \
1826 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1827 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1829 lval = (__typeof__(lval)) _res; \
1832 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1833 arg7,arg8,arg9,arg10,arg11,arg12) \
1835 volatile OrigFn _orig = (orig); \
1836 volatile unsigned long _argvec[13]; \
1837 volatile unsigned long _res; \
1838 _argvec[0] = (unsigned long)_orig.nraddr; \
1839 _argvec[1] = (unsigned long)(arg1); \
1840 _argvec[2] = (unsigned long)(arg2); \
1841 _argvec[3] = (unsigned long)(arg3); \
1842 _argvec[4] = (unsigned long)(arg4); \
1843 _argvec[5] = (unsigned long)(arg5); \
1844 _argvec[6] = (unsigned long)(arg6); \
1845 _argvec[7] = (unsigned long)(arg7); \
1846 _argvec[8] = (unsigned long)(arg8); \
1847 _argvec[9] = (unsigned long)(arg9); \
1848 _argvec[10] = (unsigned long)(arg10); \
1849 _argvec[11] = (unsigned long)(arg11); \
1850 _argvec[12] = (unsigned long)(arg12); \
1852 VALGRIND_CFI_PROLOGUE \
1853 VALGRIND_ALIGN_STACK \
1854 "subq $128,%%rsp\n\t" \
1855 "pushq 96(%%rax)\n\t" \
1856 "pushq 88(%%rax)\n\t" \
1857 "pushq 80(%%rax)\n\t" \
1858 "pushq 72(%%rax)\n\t" \
1859 "pushq 64(%%rax)\n\t" \
1860 "pushq 56(%%rax)\n\t" \
1861 "movq 48(%%rax), %%r9\n\t" \
1862 "movq 40(%%rax), %%r8\n\t" \
1863 "movq 32(%%rax), %%rcx\n\t" \
1864 "movq 24(%%rax), %%rdx\n\t" \
1865 "movq 16(%%rax), %%rsi\n\t" \
1866 "movq 8(%%rax), %%rdi\n\t" \
1867 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1868 VALGRIND_CALL_NOREDIR_RAX \
1869 VALGRIND_RESTORE_STACK \
1870 VALGRIND_CFI_EPILOGUE \
1871 : /*out*/ "=a" (_res) \
1872 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1873 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1875 lval = (__typeof__(lval)) _res; \
1878 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
1880 /* ------------------------ ppc32-linux ------------------------ */
1882 #if defined(PLAT_ppc32_linux)
1884 /* This is useful for finding out about the on-stack stuff:
1886 extern int f9 ( int,int,int,int,int,int,int,int,int );
1887 extern int f10 ( int,int,int,int,int,int,int,int,int,int );
1888 extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
1889 extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
1892 return f9(11,22,33,44,55,66,77,88,99);
1895 return f10(11,22,33,44,55,66,77,88,99,110);
1898 return f11(11,22,33,44,55,66,77,88,99,110,121);
1901 return f12(11,22,33,44,55,66,77,88,99,110,121,132);
1905 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
1907 /* These regs are trashed by the hidden call. */
1908 #define __CALLER_SAVED_REGS \
1909 "lr", "ctr", "xer", \
1910 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
1911 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
1914 /* Macros to save and align the stack before making a function
1915 call and restore it afterwards as gcc may not keep the stack
1916 pointer aligned if it doesn't realise calls are being made
1917 to other functions. */
1919 #define VALGRIND_ALIGN_STACK \
1921 "rlwinm 1,1,0,0,27\n\t"
1922 #define VALGRIND_RESTORE_STACK \
1925 /* These CALL_FN_ macros assume that on ppc32-linux,
1926 sizeof(unsigned long) == 4. */
1928 #define CALL_FN_W_v(lval, orig) \
1930 volatile OrigFn _orig = (orig); \
1931 volatile unsigned long _argvec[1]; \
1932 volatile unsigned long _res; \
1933 _argvec[0] = (unsigned long)_orig.nraddr; \
1935 VALGRIND_ALIGN_STACK \
1937 "lwz 11,0(11)\n\t" /* target->r11 */ \
1938 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
1939 VALGRIND_RESTORE_STACK \
1941 : /*out*/ "=r" (_res) \
1942 : /*in*/ "r" (&_argvec[0]) \
1943 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
1945 lval = (__typeof__(lval)) _res; \
1948 #define CALL_FN_W_W(lval, orig, arg1) \
1950 volatile OrigFn _orig = (orig); \
1951 volatile unsigned long _argvec[2]; \
1952 volatile unsigned long _res; \
1953 _argvec[0] = (unsigned long)_orig.nraddr; \
1954 _argvec[1] = (unsigned long)arg1; \
1956 VALGRIND_ALIGN_STACK \
1958 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1959 "lwz 11,0(11)\n\t" /* target->r11 */ \
1960 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
1961 VALGRIND_RESTORE_STACK \
1963 : /*out*/ "=r" (_res) \
1964 : /*in*/ "r" (&_argvec[0]) \
1965 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
1967 lval = (__typeof__(lval)) _res; \
1970 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1972 volatile OrigFn _orig = (orig); \
1973 volatile unsigned long _argvec[3]; \
1974 volatile unsigned long _res; \
1975 _argvec[0] = (unsigned long)_orig.nraddr; \
1976 _argvec[1] = (unsigned long)arg1; \
1977 _argvec[2] = (unsigned long)arg2; \
1979 VALGRIND_ALIGN_STACK \
1981 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
1983 "lwz 11,0(11)\n\t" /* target->r11 */ \
1984 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
1985 VALGRIND_RESTORE_STACK \
1987 : /*out*/ "=r" (_res) \
1988 : /*in*/ "r" (&_argvec[0]) \
1989 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
1991 lval = (__typeof__(lval)) _res; \
1994 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1996 volatile OrigFn _orig = (orig); \
1997 volatile unsigned long _argvec[4]; \
1998 volatile unsigned long _res; \
1999 _argvec[0] = (unsigned long)_orig.nraddr; \
2000 _argvec[1] = (unsigned long)arg1; \
2001 _argvec[2] = (unsigned long)arg2; \
2002 _argvec[3] = (unsigned long)arg3; \
2004 VALGRIND_ALIGN_STACK \
2006 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2008 "lwz 5,12(11)\n\t" \
2009 "lwz 11,0(11)\n\t" /* target->r11 */ \
2010 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2011 VALGRIND_RESTORE_STACK \
2013 : /*out*/ "=r" (_res) \
2014 : /*in*/ "r" (&_argvec[0]) \
2015 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2017 lval = (__typeof__(lval)) _res; \
2020 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2022 volatile OrigFn _orig = (orig); \
2023 volatile unsigned long _argvec[5]; \
2024 volatile unsigned long _res; \
2025 _argvec[0] = (unsigned long)_orig.nraddr; \
2026 _argvec[1] = (unsigned long)arg1; \
2027 _argvec[2] = (unsigned long)arg2; \
2028 _argvec[3] = (unsigned long)arg3; \
2029 _argvec[4] = (unsigned long)arg4; \
2031 VALGRIND_ALIGN_STACK \
2033 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2035 "lwz 5,12(11)\n\t" \
2036 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2037 "lwz 11,0(11)\n\t" /* target->r11 */ \
2038 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2039 VALGRIND_RESTORE_STACK \
2041 : /*out*/ "=r" (_res) \
2042 : /*in*/ "r" (&_argvec[0]) \
2043 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2045 lval = (__typeof__(lval)) _res; \
2048 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2050 volatile OrigFn _orig = (orig); \
2051 volatile unsigned long _argvec[6]; \
2052 volatile unsigned long _res; \
2053 _argvec[0] = (unsigned long)_orig.nraddr; \
2054 _argvec[1] = (unsigned long)arg1; \
2055 _argvec[2] = (unsigned long)arg2; \
2056 _argvec[3] = (unsigned long)arg3; \
2057 _argvec[4] = (unsigned long)arg4; \
2058 _argvec[5] = (unsigned long)arg5; \
2060 VALGRIND_ALIGN_STACK \
2062 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2064 "lwz 5,12(11)\n\t" \
2065 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2066 "lwz 7,20(11)\n\t" \
2067 "lwz 11,0(11)\n\t" /* target->r11 */ \
2068 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2069 VALGRIND_RESTORE_STACK \
2071 : /*out*/ "=r" (_res) \
2072 : /*in*/ "r" (&_argvec[0]) \
2073 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2075 lval = (__typeof__(lval)) _res; \
2078 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2080 volatile OrigFn _orig = (orig); \
2081 volatile unsigned long _argvec[7]; \
2082 volatile unsigned long _res; \
2083 _argvec[0] = (unsigned long)_orig.nraddr; \
2084 _argvec[1] = (unsigned long)arg1; \
2085 _argvec[2] = (unsigned long)arg2; \
2086 _argvec[3] = (unsigned long)arg3; \
2087 _argvec[4] = (unsigned long)arg4; \
2088 _argvec[5] = (unsigned long)arg5; \
2089 _argvec[6] = (unsigned long)arg6; \
2091 VALGRIND_ALIGN_STACK \
2093 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2095 "lwz 5,12(11)\n\t" \
2096 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2097 "lwz 7,20(11)\n\t" \
2098 "lwz 8,24(11)\n\t" \
2099 "lwz 11,0(11)\n\t" /* target->r11 */ \
2100 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2101 VALGRIND_RESTORE_STACK \
2103 : /*out*/ "=r" (_res) \
2104 : /*in*/ "r" (&_argvec[0]) \
2105 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2107 lval = (__typeof__(lval)) _res; \
2110 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2113 volatile OrigFn _orig = (orig); \
2114 volatile unsigned long _argvec[8]; \
2115 volatile unsigned long _res; \
2116 _argvec[0] = (unsigned long)_orig.nraddr; \
2117 _argvec[1] = (unsigned long)arg1; \
2118 _argvec[2] = (unsigned long)arg2; \
2119 _argvec[3] = (unsigned long)arg3; \
2120 _argvec[4] = (unsigned long)arg4; \
2121 _argvec[5] = (unsigned long)arg5; \
2122 _argvec[6] = (unsigned long)arg6; \
2123 _argvec[7] = (unsigned long)arg7; \
2125 VALGRIND_ALIGN_STACK \
2127 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2129 "lwz 5,12(11)\n\t" \
2130 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2131 "lwz 7,20(11)\n\t" \
2132 "lwz 8,24(11)\n\t" \
2133 "lwz 9,28(11)\n\t" \
2134 "lwz 11,0(11)\n\t" /* target->r11 */ \
2135 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2136 VALGRIND_RESTORE_STACK \
2138 : /*out*/ "=r" (_res) \
2139 : /*in*/ "r" (&_argvec[0]) \
2140 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2142 lval = (__typeof__(lval)) _res; \
2145 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2148 volatile OrigFn _orig = (orig); \
2149 volatile unsigned long _argvec[9]; \
2150 volatile unsigned long _res; \
2151 _argvec[0] = (unsigned long)_orig.nraddr; \
2152 _argvec[1] = (unsigned long)arg1; \
2153 _argvec[2] = (unsigned long)arg2; \
2154 _argvec[3] = (unsigned long)arg3; \
2155 _argvec[4] = (unsigned long)arg4; \
2156 _argvec[5] = (unsigned long)arg5; \
2157 _argvec[6] = (unsigned long)arg6; \
2158 _argvec[7] = (unsigned long)arg7; \
2159 _argvec[8] = (unsigned long)arg8; \
2161 VALGRIND_ALIGN_STACK \
2163 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2165 "lwz 5,12(11)\n\t" \
2166 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2167 "lwz 7,20(11)\n\t" \
2168 "lwz 8,24(11)\n\t" \
2169 "lwz 9,28(11)\n\t" \
2170 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2171 "lwz 11,0(11)\n\t" /* target->r11 */ \
2172 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2173 VALGRIND_RESTORE_STACK \
2175 : /*out*/ "=r" (_res) \
2176 : /*in*/ "r" (&_argvec[0]) \
2177 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2179 lval = (__typeof__(lval)) _res; \
2182 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2185 volatile OrigFn _orig = (orig); \
2186 volatile unsigned long _argvec[10]; \
2187 volatile unsigned long _res; \
2188 _argvec[0] = (unsigned long)_orig.nraddr; \
2189 _argvec[1] = (unsigned long)arg1; \
2190 _argvec[2] = (unsigned long)arg2; \
2191 _argvec[3] = (unsigned long)arg3; \
2192 _argvec[4] = (unsigned long)arg4; \
2193 _argvec[5] = (unsigned long)arg5; \
2194 _argvec[6] = (unsigned long)arg6; \
2195 _argvec[7] = (unsigned long)arg7; \
2196 _argvec[8] = (unsigned long)arg8; \
2197 _argvec[9] = (unsigned long)arg9; \
2199 VALGRIND_ALIGN_STACK \
2201 "addi 1,1,-16\n\t" \
2203 "lwz 3,36(11)\n\t" \
2206 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2208 "lwz 5,12(11)\n\t" \
2209 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2210 "lwz 7,20(11)\n\t" \
2211 "lwz 8,24(11)\n\t" \
2212 "lwz 9,28(11)\n\t" \
2213 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2214 "lwz 11,0(11)\n\t" /* target->r11 */ \
2215 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2216 VALGRIND_RESTORE_STACK \
2218 : /*out*/ "=r" (_res) \
2219 : /*in*/ "r" (&_argvec[0]) \
2220 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2222 lval = (__typeof__(lval)) _res; \
2225 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2226 arg7,arg8,arg9,arg10) \
2228 volatile OrigFn _orig = (orig); \
2229 volatile unsigned long _argvec[11]; \
2230 volatile unsigned long _res; \
2231 _argvec[0] = (unsigned long)_orig.nraddr; \
2232 _argvec[1] = (unsigned long)arg1; \
2233 _argvec[2] = (unsigned long)arg2; \
2234 _argvec[3] = (unsigned long)arg3; \
2235 _argvec[4] = (unsigned long)arg4; \
2236 _argvec[5] = (unsigned long)arg5; \
2237 _argvec[6] = (unsigned long)arg6; \
2238 _argvec[7] = (unsigned long)arg7; \
2239 _argvec[8] = (unsigned long)arg8; \
2240 _argvec[9] = (unsigned long)arg9; \
2241 _argvec[10] = (unsigned long)arg10; \
2243 VALGRIND_ALIGN_STACK \
2245 "addi 1,1,-16\n\t" \
2247 "lwz 3,40(11)\n\t" \
2250 "lwz 3,36(11)\n\t" \
2253 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2255 "lwz 5,12(11)\n\t" \
2256 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2257 "lwz 7,20(11)\n\t" \
2258 "lwz 8,24(11)\n\t" \
2259 "lwz 9,28(11)\n\t" \
2260 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2261 "lwz 11,0(11)\n\t" /* target->r11 */ \
2262 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2263 VALGRIND_RESTORE_STACK \
2265 : /*out*/ "=r" (_res) \
2266 : /*in*/ "r" (&_argvec[0]) \
2267 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2269 lval = (__typeof__(lval)) _res; \
2272 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2273 arg7,arg8,arg9,arg10,arg11) \
2275 volatile OrigFn _orig = (orig); \
2276 volatile unsigned long _argvec[12]; \
2277 volatile unsigned long _res; \
2278 _argvec[0] = (unsigned long)_orig.nraddr; \
2279 _argvec[1] = (unsigned long)arg1; \
2280 _argvec[2] = (unsigned long)arg2; \
2281 _argvec[3] = (unsigned long)arg3; \
2282 _argvec[4] = (unsigned long)arg4; \
2283 _argvec[5] = (unsigned long)arg5; \
2284 _argvec[6] = (unsigned long)arg6; \
2285 _argvec[7] = (unsigned long)arg7; \
2286 _argvec[8] = (unsigned long)arg8; \
2287 _argvec[9] = (unsigned long)arg9; \
2288 _argvec[10] = (unsigned long)arg10; \
2289 _argvec[11] = (unsigned long)arg11; \
2291 VALGRIND_ALIGN_STACK \
2293 "addi 1,1,-32\n\t" \
2295 "lwz 3,44(11)\n\t" \
2298 "lwz 3,40(11)\n\t" \
2301 "lwz 3,36(11)\n\t" \
2304 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2306 "lwz 5,12(11)\n\t" \
2307 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2308 "lwz 7,20(11)\n\t" \
2309 "lwz 8,24(11)\n\t" \
2310 "lwz 9,28(11)\n\t" \
2311 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2312 "lwz 11,0(11)\n\t" /* target->r11 */ \
2313 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2314 VALGRIND_RESTORE_STACK \
2316 : /*out*/ "=r" (_res) \
2317 : /*in*/ "r" (&_argvec[0]) \
2318 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2320 lval = (__typeof__(lval)) _res; \
2323 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2324 arg7,arg8,arg9,arg10,arg11,arg12) \
2326 volatile OrigFn _orig = (orig); \
2327 volatile unsigned long _argvec[13]; \
2328 volatile unsigned long _res; \
2329 _argvec[0] = (unsigned long)_orig.nraddr; \
2330 _argvec[1] = (unsigned long)arg1; \
2331 _argvec[2] = (unsigned long)arg2; \
2332 _argvec[3] = (unsigned long)arg3; \
2333 _argvec[4] = (unsigned long)arg4; \
2334 _argvec[5] = (unsigned long)arg5; \
2335 _argvec[6] = (unsigned long)arg6; \
2336 _argvec[7] = (unsigned long)arg7; \
2337 _argvec[8] = (unsigned long)arg8; \
2338 _argvec[9] = (unsigned long)arg9; \
2339 _argvec[10] = (unsigned long)arg10; \
2340 _argvec[11] = (unsigned long)arg11; \
2341 _argvec[12] = (unsigned long)arg12; \
2343 VALGRIND_ALIGN_STACK \
2345 "addi 1,1,-32\n\t" \
2347 "lwz 3,48(11)\n\t" \
2350 "lwz 3,44(11)\n\t" \
2353 "lwz 3,40(11)\n\t" \
2356 "lwz 3,36(11)\n\t" \
2359 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2361 "lwz 5,12(11)\n\t" \
2362 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2363 "lwz 7,20(11)\n\t" \
2364 "lwz 8,24(11)\n\t" \
2365 "lwz 9,28(11)\n\t" \
2366 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2367 "lwz 11,0(11)\n\t" /* target->r11 */ \
2368 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2369 VALGRIND_RESTORE_STACK \
2371 : /*out*/ "=r" (_res) \
2372 : /*in*/ "r" (&_argvec[0]) \
2373 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2375 lval = (__typeof__(lval)) _res; \
2378 #endif /* PLAT_ppc32_linux */
2380 /* ------------------------ ppc64-linux ------------------------ */
2382 #if defined(PLAT_ppc64_linux)
2384 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2386 /* These regs are trashed by the hidden call. */
2387 #define __CALLER_SAVED_REGS \
2388 "lr", "ctr", "xer", \
2389 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2390 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2393 /* Macros to save and align the stack before making a function
2394 call and restore it afterwards as gcc may not keep the stack
2395 pointer aligned if it doesn't realise calls are being made
2396 to other functions. */
2398 #define VALGRIND_ALIGN_STACK \
2400 "rldicr 1,1,0,59\n\t"
2401 #define VALGRIND_RESTORE_STACK \
2404 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2407 #define CALL_FN_W_v(lval, orig) \
2409 volatile OrigFn _orig = (orig); \
2410 volatile unsigned long _argvec[3+0]; \
2411 volatile unsigned long _res; \
2412 /* _argvec[0] holds current r2 across the call */ \
2413 _argvec[1] = (unsigned long)_orig.r2; \
2414 _argvec[2] = (unsigned long)_orig.nraddr; \
2416 VALGRIND_ALIGN_STACK \
2418 "std 2,-16(11)\n\t" /* save tocptr */ \
2419 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2420 "ld 11, 0(11)\n\t" /* target->r11 */ \
2421 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2424 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2425 VALGRIND_RESTORE_STACK \
2426 : /*out*/ "=r" (_res) \
2427 : /*in*/ "r" (&_argvec[2]) \
2428 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2430 lval = (__typeof__(lval)) _res; \
2433 #define CALL_FN_W_W(lval, orig, arg1) \
2435 volatile OrigFn _orig = (orig); \
2436 volatile unsigned long _argvec[3+1]; \
2437 volatile unsigned long _res; \
2438 /* _argvec[0] holds current r2 across the call */ \
2439 _argvec[1] = (unsigned long)_orig.r2; \
2440 _argvec[2] = (unsigned long)_orig.nraddr; \
2441 _argvec[2+1] = (unsigned long)arg1; \
2443 VALGRIND_ALIGN_STACK \
2445 "std 2,-16(11)\n\t" /* save tocptr */ \
2446 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2447 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2448 "ld 11, 0(11)\n\t" /* target->r11 */ \
2449 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2452 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2453 VALGRIND_RESTORE_STACK \
2454 : /*out*/ "=r" (_res) \
2455 : /*in*/ "r" (&_argvec[2]) \
2456 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2458 lval = (__typeof__(lval)) _res; \
2461 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2463 volatile OrigFn _orig = (orig); \
2464 volatile unsigned long _argvec[3+2]; \
2465 volatile unsigned long _res; \
2466 /* _argvec[0] holds current r2 across the call */ \
2467 _argvec[1] = (unsigned long)_orig.r2; \
2468 _argvec[2] = (unsigned long)_orig.nraddr; \
2469 _argvec[2+1] = (unsigned long)arg1; \
2470 _argvec[2+2] = (unsigned long)arg2; \
2472 VALGRIND_ALIGN_STACK \
2474 "std 2,-16(11)\n\t" /* save tocptr */ \
2475 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2476 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2477 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2478 "ld 11, 0(11)\n\t" /* target->r11 */ \
2479 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2482 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2483 VALGRIND_RESTORE_STACK \
2484 : /*out*/ "=r" (_res) \
2485 : /*in*/ "r" (&_argvec[2]) \
2486 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2488 lval = (__typeof__(lval)) _res; \
2491 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2493 volatile OrigFn _orig = (orig); \
2494 volatile unsigned long _argvec[3+3]; \
2495 volatile unsigned long _res; \
2496 /* _argvec[0] holds current r2 across the call */ \
2497 _argvec[1] = (unsigned long)_orig.r2; \
2498 _argvec[2] = (unsigned long)_orig.nraddr; \
2499 _argvec[2+1] = (unsigned long)arg1; \
2500 _argvec[2+2] = (unsigned long)arg2; \
2501 _argvec[2+3] = (unsigned long)arg3; \
2503 VALGRIND_ALIGN_STACK \
2505 "std 2,-16(11)\n\t" /* save tocptr */ \
2506 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2507 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2508 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2509 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2510 "ld 11, 0(11)\n\t" /* target->r11 */ \
2511 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2514 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2515 VALGRIND_RESTORE_STACK \
2516 : /*out*/ "=r" (_res) \
2517 : /*in*/ "r" (&_argvec[2]) \
2518 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2520 lval = (__typeof__(lval)) _res; \
2523 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2525 volatile OrigFn _orig = (orig); \
2526 volatile unsigned long _argvec[3+4]; \
2527 volatile unsigned long _res; \
2528 /* _argvec[0] holds current r2 across the call */ \
2529 _argvec[1] = (unsigned long)_orig.r2; \
2530 _argvec[2] = (unsigned long)_orig.nraddr; \
2531 _argvec[2+1] = (unsigned long)arg1; \
2532 _argvec[2+2] = (unsigned long)arg2; \
2533 _argvec[2+3] = (unsigned long)arg3; \
2534 _argvec[2+4] = (unsigned long)arg4; \
2536 VALGRIND_ALIGN_STACK \
2538 "std 2,-16(11)\n\t" /* save tocptr */ \
2539 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2540 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2541 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2542 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2543 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2544 "ld 11, 0(11)\n\t" /* target->r11 */ \
2545 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2548 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2549 VALGRIND_RESTORE_STACK \
2550 : /*out*/ "=r" (_res) \
2551 : /*in*/ "r" (&_argvec[2]) \
2552 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2554 lval = (__typeof__(lval)) _res; \
2557 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2559 volatile OrigFn _orig = (orig); \
2560 volatile unsigned long _argvec[3+5]; \
2561 volatile unsigned long _res; \
2562 /* _argvec[0] holds current r2 across the call */ \
2563 _argvec[1] = (unsigned long)_orig.r2; \
2564 _argvec[2] = (unsigned long)_orig.nraddr; \
2565 _argvec[2+1] = (unsigned long)arg1; \
2566 _argvec[2+2] = (unsigned long)arg2; \
2567 _argvec[2+3] = (unsigned long)arg3; \
2568 _argvec[2+4] = (unsigned long)arg4; \
2569 _argvec[2+5] = (unsigned long)arg5; \
2571 VALGRIND_ALIGN_STACK \
2573 "std 2,-16(11)\n\t" /* save tocptr */ \
2574 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2575 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2576 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2577 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2578 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2579 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2580 "ld 11, 0(11)\n\t" /* target->r11 */ \
2581 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2584 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2585 VALGRIND_RESTORE_STACK \
2586 : /*out*/ "=r" (_res) \
2587 : /*in*/ "r" (&_argvec[2]) \
2588 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2590 lval = (__typeof__(lval)) _res; \
2593 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2595 volatile OrigFn _orig = (orig); \
2596 volatile unsigned long _argvec[3+6]; \
2597 volatile unsigned long _res; \
2598 /* _argvec[0] holds current r2 across the call */ \
2599 _argvec[1] = (unsigned long)_orig.r2; \
2600 _argvec[2] = (unsigned long)_orig.nraddr; \
2601 _argvec[2+1] = (unsigned long)arg1; \
2602 _argvec[2+2] = (unsigned long)arg2; \
2603 _argvec[2+3] = (unsigned long)arg3; \
2604 _argvec[2+4] = (unsigned long)arg4; \
2605 _argvec[2+5] = (unsigned long)arg5; \
2606 _argvec[2+6] = (unsigned long)arg6; \
2608 VALGRIND_ALIGN_STACK \
2610 "std 2,-16(11)\n\t" /* save tocptr */ \
2611 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2612 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2613 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2614 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2615 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2616 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2617 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2618 "ld 11, 0(11)\n\t" /* target->r11 */ \
2619 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2622 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2623 VALGRIND_RESTORE_STACK \
2624 : /*out*/ "=r" (_res) \
2625 : /*in*/ "r" (&_argvec[2]) \
2626 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2628 lval = (__typeof__(lval)) _res; \
2631 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2634 volatile OrigFn _orig = (orig); \
2635 volatile unsigned long _argvec[3+7]; \
2636 volatile unsigned long _res; \
2637 /* _argvec[0] holds current r2 across the call */ \
2638 _argvec[1] = (unsigned long)_orig.r2; \
2639 _argvec[2] = (unsigned long)_orig.nraddr; \
2640 _argvec[2+1] = (unsigned long)arg1; \
2641 _argvec[2+2] = (unsigned long)arg2; \
2642 _argvec[2+3] = (unsigned long)arg3; \
2643 _argvec[2+4] = (unsigned long)arg4; \
2644 _argvec[2+5] = (unsigned long)arg5; \
2645 _argvec[2+6] = (unsigned long)arg6; \
2646 _argvec[2+7] = (unsigned long)arg7; \
2648 VALGRIND_ALIGN_STACK \
2650 "std 2,-16(11)\n\t" /* save tocptr */ \
2651 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2652 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2653 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2654 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2655 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2656 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2657 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2658 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2659 "ld 11, 0(11)\n\t" /* target->r11 */ \
2660 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2663 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2664 VALGRIND_RESTORE_STACK \
2665 : /*out*/ "=r" (_res) \
2666 : /*in*/ "r" (&_argvec[2]) \
2667 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2669 lval = (__typeof__(lval)) _res; \
2672 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2675 volatile OrigFn _orig = (orig); \
2676 volatile unsigned long _argvec[3+8]; \
2677 volatile unsigned long _res; \
2678 /* _argvec[0] holds current r2 across the call */ \
2679 _argvec[1] = (unsigned long)_orig.r2; \
2680 _argvec[2] = (unsigned long)_orig.nraddr; \
2681 _argvec[2+1] = (unsigned long)arg1; \
2682 _argvec[2+2] = (unsigned long)arg2; \
2683 _argvec[2+3] = (unsigned long)arg3; \
2684 _argvec[2+4] = (unsigned long)arg4; \
2685 _argvec[2+5] = (unsigned long)arg5; \
2686 _argvec[2+6] = (unsigned long)arg6; \
2687 _argvec[2+7] = (unsigned long)arg7; \
2688 _argvec[2+8] = (unsigned long)arg8; \
2690 VALGRIND_ALIGN_STACK \
2692 "std 2,-16(11)\n\t" /* save tocptr */ \
2693 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2694 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2695 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2696 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2697 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2698 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2699 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2700 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2701 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2702 "ld 11, 0(11)\n\t" /* target->r11 */ \
2703 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2706 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2707 VALGRIND_RESTORE_STACK \
2708 : /*out*/ "=r" (_res) \
2709 : /*in*/ "r" (&_argvec[2]) \
2710 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2712 lval = (__typeof__(lval)) _res; \
2715 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2718 volatile OrigFn _orig = (orig); \
2719 volatile unsigned long _argvec[3+9]; \
2720 volatile unsigned long _res; \
2721 /* _argvec[0] holds current r2 across the call */ \
2722 _argvec[1] = (unsigned long)_orig.r2; \
2723 _argvec[2] = (unsigned long)_orig.nraddr; \
2724 _argvec[2+1] = (unsigned long)arg1; \
2725 _argvec[2+2] = (unsigned long)arg2; \
2726 _argvec[2+3] = (unsigned long)arg3; \
2727 _argvec[2+4] = (unsigned long)arg4; \
2728 _argvec[2+5] = (unsigned long)arg5; \
2729 _argvec[2+6] = (unsigned long)arg6; \
2730 _argvec[2+7] = (unsigned long)arg7; \
2731 _argvec[2+8] = (unsigned long)arg8; \
2732 _argvec[2+9] = (unsigned long)arg9; \
2734 VALGRIND_ALIGN_STACK \
2736 "std 2,-16(11)\n\t" /* save tocptr */ \
2737 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2738 "addi 1,1,-128\n\t" /* expand stack frame */ \
2741 "std 3,112(1)\n\t" \
2743 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2744 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2745 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2746 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2747 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2748 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2749 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2750 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2751 "ld 11, 0(11)\n\t" /* target->r11 */ \
2752 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2755 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2756 VALGRIND_RESTORE_STACK \
2757 : /*out*/ "=r" (_res) \
2758 : /*in*/ "r" (&_argvec[2]) \
2759 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2761 lval = (__typeof__(lval)) _res; \
2764 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2765 arg7,arg8,arg9,arg10) \
2767 volatile OrigFn _orig = (orig); \
2768 volatile unsigned long _argvec[3+10]; \
2769 volatile unsigned long _res; \
2770 /* _argvec[0] holds current r2 across the call */ \
2771 _argvec[1] = (unsigned long)_orig.r2; \
2772 _argvec[2] = (unsigned long)_orig.nraddr; \
2773 _argvec[2+1] = (unsigned long)arg1; \
2774 _argvec[2+2] = (unsigned long)arg2; \
2775 _argvec[2+3] = (unsigned long)arg3; \
2776 _argvec[2+4] = (unsigned long)arg4; \
2777 _argvec[2+5] = (unsigned long)arg5; \
2778 _argvec[2+6] = (unsigned long)arg6; \
2779 _argvec[2+7] = (unsigned long)arg7; \
2780 _argvec[2+8] = (unsigned long)arg8; \
2781 _argvec[2+9] = (unsigned long)arg9; \
2782 _argvec[2+10] = (unsigned long)arg10; \
2784 VALGRIND_ALIGN_STACK \
2786 "std 2,-16(11)\n\t" /* save tocptr */ \
2787 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2788 "addi 1,1,-128\n\t" /* expand stack frame */ \
2791 "std 3,120(1)\n\t" \
2794 "std 3,112(1)\n\t" \
2796 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2797 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2798 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2799 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2800 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2801 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2802 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2803 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2804 "ld 11, 0(11)\n\t" /* target->r11 */ \
2805 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2808 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2809 VALGRIND_RESTORE_STACK \
2810 : /*out*/ "=r" (_res) \
2811 : /*in*/ "r" (&_argvec[2]) \
2812 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2814 lval = (__typeof__(lval)) _res; \
2817 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2818 arg7,arg8,arg9,arg10,arg11) \
2820 volatile OrigFn _orig = (orig); \
2821 volatile unsigned long _argvec[3+11]; \
2822 volatile unsigned long _res; \
2823 /* _argvec[0] holds current r2 across the call */ \
2824 _argvec[1] = (unsigned long)_orig.r2; \
2825 _argvec[2] = (unsigned long)_orig.nraddr; \
2826 _argvec[2+1] = (unsigned long)arg1; \
2827 _argvec[2+2] = (unsigned long)arg2; \
2828 _argvec[2+3] = (unsigned long)arg3; \
2829 _argvec[2+4] = (unsigned long)arg4; \
2830 _argvec[2+5] = (unsigned long)arg5; \
2831 _argvec[2+6] = (unsigned long)arg6; \
2832 _argvec[2+7] = (unsigned long)arg7; \
2833 _argvec[2+8] = (unsigned long)arg8; \
2834 _argvec[2+9] = (unsigned long)arg9; \
2835 _argvec[2+10] = (unsigned long)arg10; \
2836 _argvec[2+11] = (unsigned long)arg11; \
2838 VALGRIND_ALIGN_STACK \
2840 "std 2,-16(11)\n\t" /* save tocptr */ \
2841 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2842 "addi 1,1,-144\n\t" /* expand stack frame */ \
2845 "std 3,128(1)\n\t" \
2848 "std 3,120(1)\n\t" \
2851 "std 3,112(1)\n\t" \
2853 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2854 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2855 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2856 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2857 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2858 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2859 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2860 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2861 "ld 11, 0(11)\n\t" /* target->r11 */ \
2862 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2865 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2866 VALGRIND_RESTORE_STACK \
2867 : /*out*/ "=r" (_res) \
2868 : /*in*/ "r" (&_argvec[2]) \
2869 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2871 lval = (__typeof__(lval)) _res; \
2874 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2875 arg7,arg8,arg9,arg10,arg11,arg12) \
2877 volatile OrigFn _orig = (orig); \
2878 volatile unsigned long _argvec[3+12]; \
2879 volatile unsigned long _res; \
2880 /* _argvec[0] holds current r2 across the call */ \
2881 _argvec[1] = (unsigned long)_orig.r2; \
2882 _argvec[2] = (unsigned long)_orig.nraddr; \
2883 _argvec[2+1] = (unsigned long)arg1; \
2884 _argvec[2+2] = (unsigned long)arg2; \
2885 _argvec[2+3] = (unsigned long)arg3; \
2886 _argvec[2+4] = (unsigned long)arg4; \
2887 _argvec[2+5] = (unsigned long)arg5; \
2888 _argvec[2+6] = (unsigned long)arg6; \
2889 _argvec[2+7] = (unsigned long)arg7; \
2890 _argvec[2+8] = (unsigned long)arg8; \
2891 _argvec[2+9] = (unsigned long)arg9; \
2892 _argvec[2+10] = (unsigned long)arg10; \
2893 _argvec[2+11] = (unsigned long)arg11; \
2894 _argvec[2+12] = (unsigned long)arg12; \
2896 VALGRIND_ALIGN_STACK \
2898 "std 2,-16(11)\n\t" /* save tocptr */ \
2899 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2900 "addi 1,1,-144\n\t" /* expand stack frame */ \
2903 "std 3,136(1)\n\t" \
2906 "std 3,128(1)\n\t" \
2909 "std 3,120(1)\n\t" \
2912 "std 3,112(1)\n\t" \
2914 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2915 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2916 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2917 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2918 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2919 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2920 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2921 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2922 "ld 11, 0(11)\n\t" /* target->r11 */ \
2923 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2926 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2927 VALGRIND_RESTORE_STACK \
2928 : /*out*/ "=r" (_res) \
2929 : /*in*/ "r" (&_argvec[2]) \
2930 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2932 lval = (__typeof__(lval)) _res; \
2935 #endif /* PLAT_ppc64_linux */
2937 /* ------------------------- arm-linux ------------------------- */
2939 #if defined(PLAT_arm_linux)
2941 /* These regs are trashed by the hidden call. */
2942 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4","r14"
2944 /* Macros to save and align the stack before making a function
2945 call and restore it afterwards as gcc may not keep the stack
2946 pointer aligned if it doesn't realise calls are being made
2947 to other functions. */
2949 /* This is a bit tricky. We store the original stack pointer in r10
2950 as it is callee-saves. gcc doesn't allow the use of r11 for some
2951 reason. Also, we can't directly "bic" the stack pointer in thumb
2952 mode since r13 isn't an allowed register number in that context.
2953 So use r4 as a temporary, since that is about to get trashed
2954 anyway, just after each use of this macro. Side effect is we need
2955 to be very careful about any future changes, since
2956 VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
2957 #define VALGRIND_ALIGN_STACK \
2960 "bic r4, r4, #7\n\t" \
2962 #define VALGRIND_RESTORE_STACK \
2965 /* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
2968 #define CALL_FN_W_v(lval, orig) \
2970 volatile OrigFn _orig = (orig); \
2971 volatile unsigned long _argvec[1]; \
2972 volatile unsigned long _res; \
2973 _argvec[0] = (unsigned long)_orig.nraddr; \
2975 VALGRIND_ALIGN_STACK \
2976 "ldr r4, [%1] \n\t" /* target->r4 */ \
2977 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
2978 VALGRIND_RESTORE_STACK \
2980 : /*out*/ "=r" (_res) \
2981 : /*in*/ "0" (&_argvec[0]) \
2982 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
2984 lval = (__typeof__(lval)) _res; \
2987 #define CALL_FN_W_W(lval, orig, arg1) \
2989 volatile OrigFn _orig = (orig); \
2990 volatile unsigned long _argvec[2]; \
2991 volatile unsigned long _res; \
2992 _argvec[0] = (unsigned long)_orig.nraddr; \
2993 _argvec[1] = (unsigned long)(arg1); \
2995 VALGRIND_ALIGN_STACK \
2996 "ldr r0, [%1, #4] \n\t" \
2997 "ldr r4, [%1] \n\t" /* target->r4 */ \
2998 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
2999 VALGRIND_RESTORE_STACK \
3001 : /*out*/ "=r" (_res) \
3002 : /*in*/ "0" (&_argvec[0]) \
3003 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3005 lval = (__typeof__(lval)) _res; \
3008 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3010 volatile OrigFn _orig = (orig); \
3011 volatile unsigned long _argvec[3]; \
3012 volatile unsigned long _res; \
3013 _argvec[0] = (unsigned long)_orig.nraddr; \
3014 _argvec[1] = (unsigned long)(arg1); \
3015 _argvec[2] = (unsigned long)(arg2); \
3017 VALGRIND_ALIGN_STACK \
3018 "ldr r0, [%1, #4] \n\t" \
3019 "ldr r1, [%1, #8] \n\t" \
3020 "ldr r4, [%1] \n\t" /* target->r4 */ \
3021 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3022 VALGRIND_RESTORE_STACK \
3024 : /*out*/ "=r" (_res) \
3025 : /*in*/ "0" (&_argvec[0]) \
3026 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3028 lval = (__typeof__(lval)) _res; \
3031 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3033 volatile OrigFn _orig = (orig); \
3034 volatile unsigned long _argvec[4]; \
3035 volatile unsigned long _res; \
3036 _argvec[0] = (unsigned long)_orig.nraddr; \
3037 _argvec[1] = (unsigned long)(arg1); \
3038 _argvec[2] = (unsigned long)(arg2); \
3039 _argvec[3] = (unsigned long)(arg3); \
3041 VALGRIND_ALIGN_STACK \
3042 "ldr r0, [%1, #4] \n\t" \
3043 "ldr r1, [%1, #8] \n\t" \
3044 "ldr r2, [%1, #12] \n\t" \
3045 "ldr r4, [%1] \n\t" /* target->r4 */ \
3046 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3047 VALGRIND_RESTORE_STACK \
3049 : /*out*/ "=r" (_res) \
3050 : /*in*/ "0" (&_argvec[0]) \
3051 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3053 lval = (__typeof__(lval)) _res; \
3056 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3058 volatile OrigFn _orig = (orig); \
3059 volatile unsigned long _argvec[5]; \
3060 volatile unsigned long _res; \
3061 _argvec[0] = (unsigned long)_orig.nraddr; \
3062 _argvec[1] = (unsigned long)(arg1); \
3063 _argvec[2] = (unsigned long)(arg2); \
3064 _argvec[3] = (unsigned long)(arg3); \
3065 _argvec[4] = (unsigned long)(arg4); \
3067 VALGRIND_ALIGN_STACK \
3068 "ldr r0, [%1, #4] \n\t" \
3069 "ldr r1, [%1, #8] \n\t" \
3070 "ldr r2, [%1, #12] \n\t" \
3071 "ldr r3, [%1, #16] \n\t" \
3072 "ldr r4, [%1] \n\t" /* target->r4 */ \
3073 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3074 VALGRIND_RESTORE_STACK \
3076 : /*out*/ "=r" (_res) \
3077 : /*in*/ "0" (&_argvec[0]) \
3078 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3080 lval = (__typeof__(lval)) _res; \
3083 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3085 volatile OrigFn _orig = (orig); \
3086 volatile unsigned long _argvec[6]; \
3087 volatile unsigned long _res; \
3088 _argvec[0] = (unsigned long)_orig.nraddr; \
3089 _argvec[1] = (unsigned long)(arg1); \
3090 _argvec[2] = (unsigned long)(arg2); \
3091 _argvec[3] = (unsigned long)(arg3); \
3092 _argvec[4] = (unsigned long)(arg4); \
3093 _argvec[5] = (unsigned long)(arg5); \
3095 VALGRIND_ALIGN_STACK \
3096 "sub sp, sp, #4 \n\t" \
3097 "ldr r0, [%1, #20] \n\t" \
3099 "ldr r0, [%1, #4] \n\t" \
3100 "ldr r1, [%1, #8] \n\t" \
3101 "ldr r2, [%1, #12] \n\t" \
3102 "ldr r3, [%1, #16] \n\t" \
3103 "ldr r4, [%1] \n\t" /* target->r4 */ \
3104 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3105 VALGRIND_RESTORE_STACK \
3107 : /*out*/ "=r" (_res) \
3108 : /*in*/ "0" (&_argvec[0]) \
3109 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3111 lval = (__typeof__(lval)) _res; \
3114 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3116 volatile OrigFn _orig = (orig); \
3117 volatile unsigned long _argvec[7]; \
3118 volatile unsigned long _res; \
3119 _argvec[0] = (unsigned long)_orig.nraddr; \
3120 _argvec[1] = (unsigned long)(arg1); \
3121 _argvec[2] = (unsigned long)(arg2); \
3122 _argvec[3] = (unsigned long)(arg3); \
3123 _argvec[4] = (unsigned long)(arg4); \
3124 _argvec[5] = (unsigned long)(arg5); \
3125 _argvec[6] = (unsigned long)(arg6); \
3127 VALGRIND_ALIGN_STACK \
3128 "ldr r0, [%1, #20] \n\t" \
3129 "ldr r1, [%1, #24] \n\t" \
3130 "push {r0, r1} \n\t" \
3131 "ldr r0, [%1, #4] \n\t" \
3132 "ldr r1, [%1, #8] \n\t" \
3133 "ldr r2, [%1, #12] \n\t" \
3134 "ldr r3, [%1, #16] \n\t" \
3135 "ldr r4, [%1] \n\t" /* target->r4 */ \
3136 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3137 VALGRIND_RESTORE_STACK \
3139 : /*out*/ "=r" (_res) \
3140 : /*in*/ "0" (&_argvec[0]) \
3141 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3143 lval = (__typeof__(lval)) _res; \
3146 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3149 volatile OrigFn _orig = (orig); \
3150 volatile unsigned long _argvec[8]; \
3151 volatile unsigned long _res; \
3152 _argvec[0] = (unsigned long)_orig.nraddr; \
3153 _argvec[1] = (unsigned long)(arg1); \
3154 _argvec[2] = (unsigned long)(arg2); \
3155 _argvec[3] = (unsigned long)(arg3); \
3156 _argvec[4] = (unsigned long)(arg4); \
3157 _argvec[5] = (unsigned long)(arg5); \
3158 _argvec[6] = (unsigned long)(arg6); \
3159 _argvec[7] = (unsigned long)(arg7); \
3161 VALGRIND_ALIGN_STACK \
3162 "sub sp, sp, #4 \n\t" \
3163 "ldr r0, [%1, #20] \n\t" \
3164 "ldr r1, [%1, #24] \n\t" \
3165 "ldr r2, [%1, #28] \n\t" \
3166 "push {r0, r1, r2} \n\t" \
3167 "ldr r0, [%1, #4] \n\t" \
3168 "ldr r1, [%1, #8] \n\t" \
3169 "ldr r2, [%1, #12] \n\t" \
3170 "ldr r3, [%1, #16] \n\t" \
3171 "ldr r4, [%1] \n\t" /* target->r4 */ \
3172 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3173 VALGRIND_RESTORE_STACK \
3175 : /*out*/ "=r" (_res) \
3176 : /*in*/ "0" (&_argvec[0]) \
3177 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3179 lval = (__typeof__(lval)) _res; \
3182 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3185 volatile OrigFn _orig = (orig); \
3186 volatile unsigned long _argvec[9]; \
3187 volatile unsigned long _res; \
3188 _argvec[0] = (unsigned long)_orig.nraddr; \
3189 _argvec[1] = (unsigned long)(arg1); \
3190 _argvec[2] = (unsigned long)(arg2); \
3191 _argvec[3] = (unsigned long)(arg3); \
3192 _argvec[4] = (unsigned long)(arg4); \
3193 _argvec[5] = (unsigned long)(arg5); \
3194 _argvec[6] = (unsigned long)(arg6); \
3195 _argvec[7] = (unsigned long)(arg7); \
3196 _argvec[8] = (unsigned long)(arg8); \
3198 VALGRIND_ALIGN_STACK \
3199 "ldr r0, [%1, #20] \n\t" \
3200 "ldr r1, [%1, #24] \n\t" \
3201 "ldr r2, [%1, #28] \n\t" \
3202 "ldr r3, [%1, #32] \n\t" \
3203 "push {r0, r1, r2, r3} \n\t" \
3204 "ldr r0, [%1, #4] \n\t" \
3205 "ldr r1, [%1, #8] \n\t" \
3206 "ldr r2, [%1, #12] \n\t" \
3207 "ldr r3, [%1, #16] \n\t" \
3208 "ldr r4, [%1] \n\t" /* target->r4 */ \
3209 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3210 VALGRIND_RESTORE_STACK \
3212 : /*out*/ "=r" (_res) \
3213 : /*in*/ "0" (&_argvec[0]) \
3214 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3216 lval = (__typeof__(lval)) _res; \
3219 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3222 volatile OrigFn _orig = (orig); \
3223 volatile unsigned long _argvec[10]; \
3224 volatile unsigned long _res; \
3225 _argvec[0] = (unsigned long)_orig.nraddr; \
3226 _argvec[1] = (unsigned long)(arg1); \
3227 _argvec[2] = (unsigned long)(arg2); \
3228 _argvec[3] = (unsigned long)(arg3); \
3229 _argvec[4] = (unsigned long)(arg4); \
3230 _argvec[5] = (unsigned long)(arg5); \
3231 _argvec[6] = (unsigned long)(arg6); \
3232 _argvec[7] = (unsigned long)(arg7); \
3233 _argvec[8] = (unsigned long)(arg8); \
3234 _argvec[9] = (unsigned long)(arg9); \
3236 VALGRIND_ALIGN_STACK \
3237 "sub sp, sp, #4 \n\t" \
3238 "ldr r0, [%1, #20] \n\t" \
3239 "ldr r1, [%1, #24] \n\t" \
3240 "ldr r2, [%1, #28] \n\t" \
3241 "ldr r3, [%1, #32] \n\t" \
3242 "ldr r4, [%1, #36] \n\t" \
3243 "push {r0, r1, r2, r3, r4} \n\t" \
3244 "ldr r0, [%1, #4] \n\t" \
3245 "ldr r1, [%1, #8] \n\t" \
3246 "ldr r2, [%1, #12] \n\t" \
3247 "ldr r3, [%1, #16] \n\t" \
3248 "ldr r4, [%1] \n\t" /* target->r4 */ \
3249 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3250 VALGRIND_RESTORE_STACK \
3252 : /*out*/ "=r" (_res) \
3253 : /*in*/ "0" (&_argvec[0]) \
3254 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3256 lval = (__typeof__(lval)) _res; \
3259 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3260 arg7,arg8,arg9,arg10) \
3262 volatile OrigFn _orig = (orig); \
3263 volatile unsigned long _argvec[11]; \
3264 volatile unsigned long _res; \
3265 _argvec[0] = (unsigned long)_orig.nraddr; \
3266 _argvec[1] = (unsigned long)(arg1); \
3267 _argvec[2] = (unsigned long)(arg2); \
3268 _argvec[3] = (unsigned long)(arg3); \
3269 _argvec[4] = (unsigned long)(arg4); \
3270 _argvec[5] = (unsigned long)(arg5); \
3271 _argvec[6] = (unsigned long)(arg6); \
3272 _argvec[7] = (unsigned long)(arg7); \
3273 _argvec[8] = (unsigned long)(arg8); \
3274 _argvec[9] = (unsigned long)(arg9); \
3275 _argvec[10] = (unsigned long)(arg10); \
3277 VALGRIND_ALIGN_STACK \
3278 "ldr r0, [%1, #40] \n\t" \
3280 "ldr r0, [%1, #20] \n\t" \
3281 "ldr r1, [%1, #24] \n\t" \
3282 "ldr r2, [%1, #28] \n\t" \
3283 "ldr r3, [%1, #32] \n\t" \
3284 "ldr r4, [%1, #36] \n\t" \
3285 "push {r0, r1, r2, r3, r4} \n\t" \
3286 "ldr r0, [%1, #4] \n\t" \
3287 "ldr r1, [%1, #8] \n\t" \
3288 "ldr r2, [%1, #12] \n\t" \
3289 "ldr r3, [%1, #16] \n\t" \
3290 "ldr r4, [%1] \n\t" /* target->r4 */ \
3291 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3292 VALGRIND_RESTORE_STACK \
3294 : /*out*/ "=r" (_res) \
3295 : /*in*/ "0" (&_argvec[0]) \
3296 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3298 lval = (__typeof__(lval)) _res; \
3301 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
3302 arg6,arg7,arg8,arg9,arg10, \
3305 volatile OrigFn _orig = (orig); \
3306 volatile unsigned long _argvec[12]; \
3307 volatile unsigned long _res; \
3308 _argvec[0] = (unsigned long)_orig.nraddr; \
3309 _argvec[1] = (unsigned long)(arg1); \
3310 _argvec[2] = (unsigned long)(arg2); \
3311 _argvec[3] = (unsigned long)(arg3); \
3312 _argvec[4] = (unsigned long)(arg4); \
3313 _argvec[5] = (unsigned long)(arg5); \
3314 _argvec[6] = (unsigned long)(arg6); \
3315 _argvec[7] = (unsigned long)(arg7); \
3316 _argvec[8] = (unsigned long)(arg8); \
3317 _argvec[9] = (unsigned long)(arg9); \
3318 _argvec[10] = (unsigned long)(arg10); \
3319 _argvec[11] = (unsigned long)(arg11); \
3321 VALGRIND_ALIGN_STACK \
3322 "sub sp, sp, #4 \n\t" \
3323 "ldr r0, [%1, #40] \n\t" \
3324 "ldr r1, [%1, #44] \n\t" \
3325 "push {r0, r1} \n\t" \
3326 "ldr r0, [%1, #20] \n\t" \
3327 "ldr r1, [%1, #24] \n\t" \
3328 "ldr r2, [%1, #28] \n\t" \
3329 "ldr r3, [%1, #32] \n\t" \
3330 "ldr r4, [%1, #36] \n\t" \
3331 "push {r0, r1, r2, r3, r4} \n\t" \
3332 "ldr r0, [%1, #4] \n\t" \
3333 "ldr r1, [%1, #8] \n\t" \
3334 "ldr r2, [%1, #12] \n\t" \
3335 "ldr r3, [%1, #16] \n\t" \
3336 "ldr r4, [%1] \n\t" /* target->r4 */ \
3337 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3338 VALGRIND_RESTORE_STACK \
3340 : /*out*/ "=r" (_res) \
3341 : /*in*/ "0" (&_argvec[0]) \
3342 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3344 lval = (__typeof__(lval)) _res; \
3347 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
3348 arg6,arg7,arg8,arg9,arg10, \
3351 volatile OrigFn _orig = (orig); \
3352 volatile unsigned long _argvec[13]; \
3353 volatile unsigned long _res; \
3354 _argvec[0] = (unsigned long)_orig.nraddr; \
3355 _argvec[1] = (unsigned long)(arg1); \
3356 _argvec[2] = (unsigned long)(arg2); \
3357 _argvec[3] = (unsigned long)(arg3); \
3358 _argvec[4] = (unsigned long)(arg4); \
3359 _argvec[5] = (unsigned long)(arg5); \
3360 _argvec[6] = (unsigned long)(arg6); \
3361 _argvec[7] = (unsigned long)(arg7); \
3362 _argvec[8] = (unsigned long)(arg8); \
3363 _argvec[9] = (unsigned long)(arg9); \
3364 _argvec[10] = (unsigned long)(arg10); \
3365 _argvec[11] = (unsigned long)(arg11); \
3366 _argvec[12] = (unsigned long)(arg12); \
3368 VALGRIND_ALIGN_STACK \
3369 "ldr r0, [%1, #40] \n\t" \
3370 "ldr r1, [%1, #44] \n\t" \
3371 "ldr r2, [%1, #48] \n\t" \
3372 "push {r0, r1, r2} \n\t" \
3373 "ldr r0, [%1, #20] \n\t" \
3374 "ldr r1, [%1, #24] \n\t" \
3375 "ldr r2, [%1, #28] \n\t" \
3376 "ldr r3, [%1, #32] \n\t" \
3377 "ldr r4, [%1, #36] \n\t" \
3378 "push {r0, r1, r2, r3, r4} \n\t" \
3379 "ldr r0, [%1, #4] \n\t" \
3380 "ldr r1, [%1, #8] \n\t" \
3381 "ldr r2, [%1, #12] \n\t" \
3382 "ldr r3, [%1, #16] \n\t" \
3383 "ldr r4, [%1] \n\t" /* target->r4 */ \
3384 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3385 VALGRIND_RESTORE_STACK \
3387 : /*out*/ "=r" (_res) \
3388 : /*in*/ "0" (&_argvec[0]) \
3389 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3391 lval = (__typeof__(lval)) _res; \
3394 #endif /* PLAT_arm_linux */
3396 /* ------------------------- s390x-linux ------------------------- */
3398 #if defined(PLAT_s390x_linux)
3400 /* Similar workaround as amd64 (see above), but we use r11 as frame
3401 pointer and save the old r11 in r7. r11 might be used for
3402 argvec, therefore we copy argvec in r1 since r1 is clobbered
3403 after the call anyway. */
3404 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
3405 # define __FRAME_POINTER \
3406 ,"d"(__builtin_dwarf_cfa())
3407 # define VALGRIND_CFI_PROLOGUE \
3408 ".cfi_remember_state\n\t" \
3409 "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
3412 ".cfi_def_cfa r11, 0\n\t"
3413 # define VALGRIND_CFI_EPILOGUE \
3415 ".cfi_restore_state\n\t"
3417 # define __FRAME_POINTER
3418 # define VALGRIND_CFI_PROLOGUE \
3420 # define VALGRIND_CFI_EPILOGUE
3423 /* Nb: On s390 the stack pointer is properly aligned *at all times*
3424 according to the s390 GCC maintainer. (The ABI specification is not
3425 precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
3426 VALGRIND_RESTORE_STACK are not defined here. */
3428 /* These regs are trashed by the hidden call. Note that we overwrite
3429 r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
3430 function a proper return address. All others are ABI defined call
3432 #define __CALLER_SAVED_REGS "0","1","2","3","4","5","14", \
3433 "f0","f1","f2","f3","f4","f5","f6","f7"
3435 /* Nb: Although r11 is modified in the asm snippets below (inside
3436 VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
3438 (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
3440 (2) GCC will complain that r11 cannot appear inside a clobber section,
3441 when compiled with -O -fno-omit-frame-pointer
3444 #define CALL_FN_W_v(lval, orig) \
3446 volatile OrigFn _orig = (orig); \
3447 volatile unsigned long _argvec[1]; \
3448 volatile unsigned long _res; \
3449 _argvec[0] = (unsigned long)_orig.nraddr; \
3451 VALGRIND_CFI_PROLOGUE \
3452 "aghi 15,-160\n\t" \
3453 "lg 1, 0(1)\n\t" /* target->r1 */ \
3454 VALGRIND_CALL_NOREDIR_R1 \
3457 VALGRIND_CFI_EPILOGUE \
3458 : /*out*/ "=d" (_res) \
3459 : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
3460 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3462 lval = (__typeof__(lval)) _res; \
3465 /* The call abi has the arguments in r2-r6 and stack */
3466 #define CALL_FN_W_W(lval, orig, arg1) \
3468 volatile OrigFn _orig = (orig); \
3469 volatile unsigned long _argvec[2]; \
3470 volatile unsigned long _res; \
3471 _argvec[0] = (unsigned long)_orig.nraddr; \
3472 _argvec[1] = (unsigned long)arg1; \
3474 VALGRIND_CFI_PROLOGUE \
3475 "aghi 15,-160\n\t" \
3478 VALGRIND_CALL_NOREDIR_R1 \
3481 VALGRIND_CFI_EPILOGUE \
3482 : /*out*/ "=d" (_res) \
3483 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3484 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3486 lval = (__typeof__(lval)) _res; \
3489 #define CALL_FN_W_WW(lval, orig, arg1, arg2) \
3491 volatile OrigFn _orig = (orig); \
3492 volatile unsigned long _argvec[3]; \
3493 volatile unsigned long _res; \
3494 _argvec[0] = (unsigned long)_orig.nraddr; \
3495 _argvec[1] = (unsigned long)arg1; \
3496 _argvec[2] = (unsigned long)arg2; \
3498 VALGRIND_CFI_PROLOGUE \
3499 "aghi 15,-160\n\t" \
3503 VALGRIND_CALL_NOREDIR_R1 \
3506 VALGRIND_CFI_EPILOGUE \
3507 : /*out*/ "=d" (_res) \
3508 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3509 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3511 lval = (__typeof__(lval)) _res; \
3514 #define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
3516 volatile OrigFn _orig = (orig); \
3517 volatile unsigned long _argvec[4]; \
3518 volatile unsigned long _res; \
3519 _argvec[0] = (unsigned long)_orig.nraddr; \
3520 _argvec[1] = (unsigned long)arg1; \
3521 _argvec[2] = (unsigned long)arg2; \
3522 _argvec[3] = (unsigned long)arg3; \
3524 VALGRIND_CFI_PROLOGUE \
3525 "aghi 15,-160\n\t" \
3530 VALGRIND_CALL_NOREDIR_R1 \
3533 VALGRIND_CFI_EPILOGUE \
3534 : /*out*/ "=d" (_res) \
3535 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3536 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3538 lval = (__typeof__(lval)) _res; \
3541 #define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
3543 volatile OrigFn _orig = (orig); \
3544 volatile unsigned long _argvec[5]; \
3545 volatile unsigned long _res; \
3546 _argvec[0] = (unsigned long)_orig.nraddr; \
3547 _argvec[1] = (unsigned long)arg1; \
3548 _argvec[2] = (unsigned long)arg2; \
3549 _argvec[3] = (unsigned long)arg3; \
3550 _argvec[4] = (unsigned long)arg4; \
3552 VALGRIND_CFI_PROLOGUE \
3553 "aghi 15,-160\n\t" \
3559 VALGRIND_CALL_NOREDIR_R1 \
3562 VALGRIND_CFI_EPILOGUE \
3563 : /*out*/ "=d" (_res) \
3564 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3565 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3567 lval = (__typeof__(lval)) _res; \
3570 #define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
3572 volatile OrigFn _orig = (orig); \
3573 volatile unsigned long _argvec[6]; \
3574 volatile unsigned long _res; \
3575 _argvec[0] = (unsigned long)_orig.nraddr; \
3576 _argvec[1] = (unsigned long)arg1; \
3577 _argvec[2] = (unsigned long)arg2; \
3578 _argvec[3] = (unsigned long)arg3; \
3579 _argvec[4] = (unsigned long)arg4; \
3580 _argvec[5] = (unsigned long)arg5; \
3582 VALGRIND_CFI_PROLOGUE \
3583 "aghi 15,-160\n\t" \
3590 VALGRIND_CALL_NOREDIR_R1 \
3593 VALGRIND_CFI_EPILOGUE \
3594 : /*out*/ "=d" (_res) \
3595 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3596 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3598 lval = (__typeof__(lval)) _res; \
3601 #define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3604 volatile OrigFn _orig = (orig); \
3605 volatile unsigned long _argvec[7]; \
3606 volatile unsigned long _res; \
3607 _argvec[0] = (unsigned long)_orig.nraddr; \
3608 _argvec[1] = (unsigned long)arg1; \
3609 _argvec[2] = (unsigned long)arg2; \
3610 _argvec[3] = (unsigned long)arg3; \
3611 _argvec[4] = (unsigned long)arg4; \
3612 _argvec[5] = (unsigned long)arg5; \
3613 _argvec[6] = (unsigned long)arg6; \
3615 VALGRIND_CFI_PROLOGUE \
3616 "aghi 15,-168\n\t" \
3622 "mvc 160(8,15), 48(1)\n\t" \
3624 VALGRIND_CALL_NOREDIR_R1 \
3627 VALGRIND_CFI_EPILOGUE \
3628 : /*out*/ "=d" (_res) \
3629 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3630 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3632 lval = (__typeof__(lval)) _res; \
3635 #define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3638 volatile OrigFn _orig = (orig); \
3639 volatile unsigned long _argvec[8]; \
3640 volatile unsigned long _res; \
3641 _argvec[0] = (unsigned long)_orig.nraddr; \
3642 _argvec[1] = (unsigned long)arg1; \
3643 _argvec[2] = (unsigned long)arg2; \
3644 _argvec[3] = (unsigned long)arg3; \
3645 _argvec[4] = (unsigned long)arg4; \
3646 _argvec[5] = (unsigned long)arg5; \
3647 _argvec[6] = (unsigned long)arg6; \
3648 _argvec[7] = (unsigned long)arg7; \
3650 VALGRIND_CFI_PROLOGUE \
3651 "aghi 15,-176\n\t" \
3657 "mvc 160(8,15), 48(1)\n\t" \
3658 "mvc 168(8,15), 56(1)\n\t" \
3660 VALGRIND_CALL_NOREDIR_R1 \
3663 VALGRIND_CFI_EPILOGUE \
3664 : /*out*/ "=d" (_res) \
3665 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3666 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3668 lval = (__typeof__(lval)) _res; \
3671 #define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3674 volatile OrigFn _orig = (orig); \
3675 volatile unsigned long _argvec[9]; \
3676 volatile unsigned long _res; \
3677 _argvec[0] = (unsigned long)_orig.nraddr; \
3678 _argvec[1] = (unsigned long)arg1; \
3679 _argvec[2] = (unsigned long)arg2; \
3680 _argvec[3] = (unsigned long)arg3; \
3681 _argvec[4] = (unsigned long)arg4; \
3682 _argvec[5] = (unsigned long)arg5; \
3683 _argvec[6] = (unsigned long)arg6; \
3684 _argvec[7] = (unsigned long)arg7; \
3685 _argvec[8] = (unsigned long)arg8; \
3687 VALGRIND_CFI_PROLOGUE \
3688 "aghi 15,-184\n\t" \
3694 "mvc 160(8,15), 48(1)\n\t" \
3695 "mvc 168(8,15), 56(1)\n\t" \
3696 "mvc 176(8,15), 64(1)\n\t" \
3698 VALGRIND_CALL_NOREDIR_R1 \
3701 VALGRIND_CFI_EPILOGUE \
3702 : /*out*/ "=d" (_res) \
3703 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3704 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3706 lval = (__typeof__(lval)) _res; \
3709 #define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3710 arg6, arg7 ,arg8, arg9) \
3712 volatile OrigFn _orig = (orig); \
3713 volatile unsigned long _argvec[10]; \
3714 volatile unsigned long _res; \
3715 _argvec[0] = (unsigned long)_orig.nraddr; \
3716 _argvec[1] = (unsigned long)arg1; \
3717 _argvec[2] = (unsigned long)arg2; \
3718 _argvec[3] = (unsigned long)arg3; \
3719 _argvec[4] = (unsigned long)arg4; \
3720 _argvec[5] = (unsigned long)arg5; \
3721 _argvec[6] = (unsigned long)arg6; \
3722 _argvec[7] = (unsigned long)arg7; \
3723 _argvec[8] = (unsigned long)arg8; \
3724 _argvec[9] = (unsigned long)arg9; \
3726 VALGRIND_CFI_PROLOGUE \
3727 "aghi 15,-192\n\t" \
3733 "mvc 160(8,15), 48(1)\n\t" \
3734 "mvc 168(8,15), 56(1)\n\t" \
3735 "mvc 176(8,15), 64(1)\n\t" \
3736 "mvc 184(8,15), 72(1)\n\t" \
3738 VALGRIND_CALL_NOREDIR_R1 \
3741 VALGRIND_CFI_EPILOGUE \
3742 : /*out*/ "=d" (_res) \
3743 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3744 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3746 lval = (__typeof__(lval)) _res; \
3749 #define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3750 arg6, arg7 ,arg8, arg9, arg10) \
3752 volatile OrigFn _orig = (orig); \
3753 volatile unsigned long _argvec[11]; \
3754 volatile unsigned long _res; \
3755 _argvec[0] = (unsigned long)_orig.nraddr; \
3756 _argvec[1] = (unsigned long)arg1; \
3757 _argvec[2] = (unsigned long)arg2; \
3758 _argvec[3] = (unsigned long)arg3; \
3759 _argvec[4] = (unsigned long)arg4; \
3760 _argvec[5] = (unsigned long)arg5; \
3761 _argvec[6] = (unsigned long)arg6; \
3762 _argvec[7] = (unsigned long)arg7; \
3763 _argvec[8] = (unsigned long)arg8; \
3764 _argvec[9] = (unsigned long)arg9; \
3765 _argvec[10] = (unsigned long)arg10; \
3767 VALGRIND_CFI_PROLOGUE \
3768 "aghi 15,-200\n\t" \
3774 "mvc 160(8,15), 48(1)\n\t" \
3775 "mvc 168(8,15), 56(1)\n\t" \
3776 "mvc 176(8,15), 64(1)\n\t" \
3777 "mvc 184(8,15), 72(1)\n\t" \
3778 "mvc 192(8,15), 80(1)\n\t" \
3780 VALGRIND_CALL_NOREDIR_R1 \
3783 VALGRIND_CFI_EPILOGUE \
3784 : /*out*/ "=d" (_res) \
3785 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3786 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3788 lval = (__typeof__(lval)) _res; \
3791 #define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3792 arg6, arg7 ,arg8, arg9, arg10, arg11) \
3794 volatile OrigFn _orig = (orig); \
3795 volatile unsigned long _argvec[12]; \
3796 volatile unsigned long _res; \
3797 _argvec[0] = (unsigned long)_orig.nraddr; \
3798 _argvec[1] = (unsigned long)arg1; \
3799 _argvec[2] = (unsigned long)arg2; \
3800 _argvec[3] = (unsigned long)arg3; \
3801 _argvec[4] = (unsigned long)arg4; \
3802 _argvec[5] = (unsigned long)arg5; \
3803 _argvec[6] = (unsigned long)arg6; \
3804 _argvec[7] = (unsigned long)arg7; \
3805 _argvec[8] = (unsigned long)arg8; \
3806 _argvec[9] = (unsigned long)arg9; \
3807 _argvec[10] = (unsigned long)arg10; \
3808 _argvec[11] = (unsigned long)arg11; \
3810 VALGRIND_CFI_PROLOGUE \
3811 "aghi 15,-208\n\t" \
3817 "mvc 160(8,15), 48(1)\n\t" \
3818 "mvc 168(8,15), 56(1)\n\t" \
3819 "mvc 176(8,15), 64(1)\n\t" \
3820 "mvc 184(8,15), 72(1)\n\t" \
3821 "mvc 192(8,15), 80(1)\n\t" \
3822 "mvc 200(8,15), 88(1)\n\t" \
3824 VALGRIND_CALL_NOREDIR_R1 \
3827 VALGRIND_CFI_EPILOGUE \
3828 : /*out*/ "=d" (_res) \
3829 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3830 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3832 lval = (__typeof__(lval)) _res; \
3835 #define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3836 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
3838 volatile OrigFn _orig = (orig); \
3839 volatile unsigned long _argvec[13]; \
3840 volatile unsigned long _res; \
3841 _argvec[0] = (unsigned long)_orig.nraddr; \
3842 _argvec[1] = (unsigned long)arg1; \
3843 _argvec[2] = (unsigned long)arg2; \
3844 _argvec[3] = (unsigned long)arg3; \
3845 _argvec[4] = (unsigned long)arg4; \
3846 _argvec[5] = (unsigned long)arg5; \
3847 _argvec[6] = (unsigned long)arg6; \
3848 _argvec[7] = (unsigned long)arg7; \
3849 _argvec[8] = (unsigned long)arg8; \
3850 _argvec[9] = (unsigned long)arg9; \
3851 _argvec[10] = (unsigned long)arg10; \
3852 _argvec[11] = (unsigned long)arg11; \
3853 _argvec[12] = (unsigned long)arg12; \
3855 VALGRIND_CFI_PROLOGUE \
3856 "aghi 15,-216\n\t" \
3862 "mvc 160(8,15), 48(1)\n\t" \
3863 "mvc 168(8,15), 56(1)\n\t" \
3864 "mvc 176(8,15), 64(1)\n\t" \
3865 "mvc 184(8,15), 72(1)\n\t" \
3866 "mvc 192(8,15), 80(1)\n\t" \
3867 "mvc 200(8,15), 88(1)\n\t" \
3868 "mvc 208(8,15), 96(1)\n\t" \
3870 VALGRIND_CALL_NOREDIR_R1 \
3873 VALGRIND_CFI_EPILOGUE \
3874 : /*out*/ "=d" (_res) \
3875 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3876 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3878 lval = (__typeof__(lval)) _res; \
3882 #endif /* PLAT_s390x_linux */
3884 /* ------------------------- mips-linux ------------------------- */
3886 #if defined(PLAT_mips32_linux)
3888 /* These regs are trashed by the hidden call. */
3889 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
3890 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
3893 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
3896 #define CALL_FN_W_v(lval, orig) \
3898 volatile OrigFn _orig = (orig); \
3899 volatile unsigned long _argvec[1]; \
3900 volatile unsigned long _res; \
3901 _argvec[0] = (unsigned long)_orig.nraddr; \
3903 "subu $29, $29, 8 \n\t" \
3904 "sw $gp, 0($sp) \n\t" \
3905 "sw $ra, 4($sp) \n\t" \
3906 "subu $29, $29, 16 \n\t" \
3907 "lw $t9, 0(%1) \n\t" /* target->t9 */ \
3908 VALGRIND_CALL_NOREDIR_T9 \
3909 "addu $29, $29, 16\n\t" \
3910 "lw $gp, 0($sp) \n\t" \
3911 "lw $ra, 4($sp) \n\t" \
3912 "addu $29, $29, 8 \n\t" \
3914 : /*out*/ "=r" (_res) \
3915 : /*in*/ "0" (&_argvec[0]) \
3916 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
3918 lval = (__typeof__(lval)) _res; \
3921 #define CALL_FN_W_W(lval, orig, arg1) \
3923 volatile OrigFn _orig = (orig); \
3924 volatile unsigned long _argvec[2]; \
3925 volatile unsigned long _res; \
3926 _argvec[0] = (unsigned long)_orig.nraddr; \
3927 _argvec[1] = (unsigned long)(arg1); \
3929 "subu $29, $29, 8 \n\t" \
3930 "sw $gp, 0($sp) \n\t" \
3931 "sw $ra, 4($sp) \n\t" \
3932 "subu $29, $29, 16 \n\t" \
3933 "lw $a0, 4(%1) \n\t" /* arg1*/ \
3934 "lw $t9, 0(%1) \n\t" /* target->t9 */ \
3935 VALGRIND_CALL_NOREDIR_T9 \
3936 "addu $29, $29, 16 \n\t" \
3937 "lw $gp, 0($sp) \n\t" \
3938 "lw $ra, 4($sp) \n\t" \
3939 "addu $29, $29, 8 \n\t" \
3941 : /*out*/ "=r" (_res) \
3942 : /*in*/ "0" (&_argvec[0]) \
3943 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
3945 lval = (__typeof__(lval)) _res; \
3948 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3950 volatile OrigFn _orig = (orig); \
3951 volatile unsigned long _argvec[3]; \
3952 volatile unsigned long _res; \
3953 _argvec[0] = (unsigned long)_orig.nraddr; \
3954 _argvec[1] = (unsigned long)(arg1); \
3955 _argvec[2] = (unsigned long)(arg2); \
3957 "subu $29, $29, 8 \n\t" \
3958 "sw $gp, 0($sp) \n\t" \
3959 "sw $ra, 4($sp) \n\t" \
3960 "subu $29, $29, 16 \n\t" \
3961 "lw $a0, 4(%1) \n\t" \
3962 "lw $a1, 8(%1) \n\t" \
3963 "lw $t9, 0(%1) \n\t" /* target->t9 */ \
3964 VALGRIND_CALL_NOREDIR_T9 \
3965 "addu $29, $29, 16 \n\t" \
3966 "lw $gp, 0($sp) \n\t" \
3967 "lw $ra, 4($sp) \n\t" \
3968 "addu $29, $29, 8 \n\t" \
3970 : /*out*/ "=r" (_res) \
3971 : /*in*/ "0" (&_argvec[0]) \
3972 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
3974 lval = (__typeof__(lval)) _res; \
3977 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3979 volatile OrigFn _orig = (orig); \
3980 volatile unsigned long _argvec[4]; \
3981 volatile unsigned long _res; \
3982 _argvec[0] = (unsigned long)_orig.nraddr; \
3983 _argvec[1] = (unsigned long)(arg1); \
3984 _argvec[2] = (unsigned long)(arg2); \
3985 _argvec[3] = (unsigned long)(arg3); \
3987 "subu $29, $29, 8 \n\t" \
3988 "sw $gp, 0($sp) \n\t" \
3989 "sw $ra, 4($sp) \n\t" \
3990 "subu $29, $29, 16 \n\t" \
3991 "lw $a0, 4(%1) \n\t" \
3992 "lw $a1, 8(%1) \n\t" \
3993 "lw $a2, 12(%1) \n\t" \
3994 "lw $t9, 0(%1) \n\t" /* target->t9 */ \
3995 VALGRIND_CALL_NOREDIR_T9 \
3996 "addu $29, $29, 16 \n\t" \
3997 "lw $gp, 0($sp) \n\t" \
3998 "lw $ra, 4($sp) \n\t" \
3999 "addu $29, $29, 8 \n\t" \
4001 : /*out*/ "=r" (_res) \
4002 : /*in*/ "0" (&_argvec[0]) \
4003 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
4005 lval = (__typeof__(lval)) _res; \
4008 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4010 volatile OrigFn _orig = (orig); \
4011 volatile unsigned long _argvec[5]; \
4012 volatile unsigned long _res; \
4013 _argvec[0] = (unsigned long)_orig.nraddr; \
4014 _argvec[1] = (unsigned long)(arg1); \
4015 _argvec[2] = (unsigned long)(arg2); \
4016 _argvec[3] = (unsigned long)(arg3); \
4017 _argvec[4] = (unsigned long)(arg4); \
4019 "subu $29, $29, 8 \n\t" \
4020 "sw $gp, 0($sp) \n\t" \
4021 "sw $ra, 4($sp) \n\t" \
4022 "subu $29, $29, 16 \n\t" \
4023 "lw $a0, 4(%1) \n\t" \
4024 "lw $a1, 8(%1) \n\t" \
4025 "lw $a2, 12(%1) \n\t" \
4026 "lw $a3, 16(%1) \n\t" \
4027 "lw $t9, 0(%1) \n\t" /* target->t9 */ \
4028 VALGRIND_CALL_NOREDIR_T9 \
4029 "addu $29, $29, 16 \n\t" \
4030 "lw $gp, 0($sp) \n\t" \
4031 "lw $ra, 4($sp) \n\t" \
4032 "addu $29, $29, 8 \n\t" \
4034 : /*out*/ "=r" (_res) \
4035 : /*in*/ "0" (&_argvec[0]) \
4036 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
4038 lval = (__typeof__(lval)) _res; \
4041 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4043 volatile OrigFn _orig = (orig); \
4044 volatile unsigned long _argvec[6]; \
4045 volatile unsigned long _res; \
4046 _argvec[0] = (unsigned long)_orig.nraddr; \
4047 _argvec[1] = (unsigned long)(arg1); \
4048 _argvec[2] = (unsigned long)(arg2); \
4049 _argvec[3] = (unsigned long)(arg3); \
4050 _argvec[4] = (unsigned long)(arg4); \
4051 _argvec[5] = (unsigned long)(arg5); \
4053 "subu $29, $29, 8 \n\t" \
4054 "sw $gp, 0($sp) \n\t" \
4055 "sw $ra, 4($sp) \n\t" \
4056 "lw $a0, 20(%1) \n\t" \
4057 "subu $sp, $sp, 24\n\t" \
4058 "sw $a0, 16($sp) \n\t" \
4059 "lw $a0, 4(%1) \n\t" \
4060 "lw $a1, 8(%1) \n\t" \
4061 "lw $a2, 12(%1) \n\t" \
4062 "lw $a3, 16(%1) \n\t" \
4063 "lw $t9, 0(%1) \n\t" /* target->t9 */ \
4064 VALGRIND_CALL_NOREDIR_T9 \
4065 "addu $29, $29, 24 \n\t" \
4066 "lw $gp, 0($sp) \n\t" \
4067 "lw $ra, 4($sp) \n\t" \
4068 "addu $sp, $sp, 8 \n\t" \
4070 : /*out*/ "=r" (_res) \
4071 : /*in*/ "0" (&_argvec[0]) \
4072 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
4074 lval = (__typeof__(lval)) _res; \
4076 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4078 volatile OrigFn _orig = (orig); \
4079 volatile unsigned long _argvec[7]; \
4080 volatile unsigned long _res; \
4081 _argvec[0] = (unsigned long)_orig.nraddr; \
4082 _argvec[1] = (unsigned long)(arg1); \
4083 _argvec[2] = (unsigned long)(arg2); \
4084 _argvec[3] = (unsigned long)(arg3); \
4085 _argvec[4] = (unsigned long)(arg4); \
4086 _argvec[5] = (unsigned long)(arg5); \
4087 _argvec[6] = (unsigned long)(arg6); \
4089 "subu $29, $29, 8 \n\t" \
4090 "sw $gp, 0($sp) \n\t" \
4091 "sw $ra, 4($sp) \n\t" \
4092 "lw $a0, 20(%1) \n\t" \
4093 "subu $sp, $sp, 32\n\t" \
4094 "sw $a0, 16($sp) \n\t" \
4095 "lw $a0, 24(%1) \n\t" \
4097 "sw $a0, 20($sp) \n\t" \
4098 "lw $a0, 4(%1) \n\t" \
4099 "lw $a1, 8(%1) \n\t" \
4100 "lw $a2, 12(%1) \n\t" \
4101 "lw $a3, 16(%1) \n\t" \
4102 "lw $t9, 0(%1) \n\t" /* target->t9 */ \
4103 VALGRIND_CALL_NOREDIR_T9 \
4104 "addu $sp, $sp, 32 \n\t" \
4105 "lw $gp, 0($sp) \n\t" \
4106 "lw $ra, 4($sp) \n\t" \
4107 "addu $sp, $sp, 8 \n\t" \
4109 : /*out*/ "=r" (_res) \
4110 : /*in*/ "0" (&_argvec[0]) \
4111 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
4113 lval = (__typeof__(lval)) _res; \
4116 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4119 volatile OrigFn _orig = (orig); \
4120 volatile unsigned long _argvec[8]; \
4121 volatile unsigned long _res; \
4122 _argvec[0] = (unsigned long)_orig.nraddr; \
4123 _argvec[1] = (unsigned long)(arg1); \
4124 _argvec[2] = (unsigned long)(arg2); \
4125 _argvec[3] = (unsigned long)(arg3); \
4126 _argvec[4] = (unsigned long)(arg4); \
4127 _argvec[5] = (unsigned long)(arg5); \
4128 _argvec[6] = (unsigned long)(arg6); \
4129 _argvec[7] = (unsigned long)(arg7); \
4131 "subu $29, $29, 8 \n\t" \
4132 "sw $gp, 0($sp) \n\t" \
4133 "sw $ra, 4($sp) \n\t" \
4134 "lw $a0, 20(%1) \n\t" \
4135 "subu $sp, $sp, 32\n\t" \
4136 "sw $a0, 16($sp) \n\t" \
4137 "lw $a0, 24(%1) \n\t" \
4138 "sw $a0, 20($sp) \n\t" \
4139 "lw $a0, 28(%1) \n\t" \
4140 "sw $a0, 24($sp) \n\t" \
4141 "lw $a0, 4(%1) \n\t" \
4142 "lw $a1, 8(%1) \n\t" \
4143 "lw $a2, 12(%1) \n\t" \
4144 "lw $a3, 16(%1) \n\t" \
4145 "lw $t9, 0(%1) \n\t" /* target->t9 */ \
4146 VALGRIND_CALL_NOREDIR_T9 \
4147 "addu $sp, $sp, 32 \n\t" \
4148 "lw $gp, 0($sp) \n\t" \
4149 "lw $ra, 4($sp) \n\t" \
4150 "addu $sp, $sp, 8 \n\t" \
4152 : /*out*/ "=r" (_res) \
4153 : /*in*/ "0" (&_argvec[0]) \
4154 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
4156 lval = (__typeof__(lval)) _res; \
4159 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4162 volatile OrigFn _orig = (orig); \
4163 volatile unsigned long _argvec[9]; \
4164 volatile unsigned long _res; \
4165 _argvec[0] = (unsigned long)_orig.nraddr; \
4166 _argvec[1] = (unsigned long)(arg1); \
4167 _argvec[2] = (unsigned long)(arg2); \
4168 _argvec[3] = (unsigned long)(arg3); \
4169 _argvec[4] = (unsigned long)(arg4); \
4170 _argvec[5] = (unsigned long)(arg5); \
4171 _argvec[6] = (unsigned long)(arg6); \
4172 _argvec[7] = (unsigned long)(arg7); \
4173 _argvec[8] = (unsigned long)(arg8); \
4175 "subu $29, $29, 8 \n\t" \
4176 "sw $gp, 0($sp) \n\t" \
4177 "sw $ra, 4($sp) \n\t" \
4178 "lw $a0, 20(%1) \n\t" \
4179 "subu $sp, $sp, 40\n\t" \
4180 "sw $a0, 16($sp) \n\t" \
4181 "lw $a0, 24(%1) \n\t" \
4182 "sw $a0, 20($sp) \n\t" \
4183 "lw $a0, 28(%1) \n\t" \
4184 "sw $a0, 24($sp) \n\t" \
4185 "lw $a0, 32(%1) \n\t" \
4186 "sw $a0, 28($sp) \n\t" \
4187 "lw $a0, 4(%1) \n\t" \
4188 "lw $a1, 8(%1) \n\t" \
4189 "lw $a2, 12(%1) \n\t" \
4190 "lw $a3, 16(%1) \n\t" \
4191 "lw $t9, 0(%1) \n\t" /* target->t9 */ \
4192 VALGRIND_CALL_NOREDIR_T9 \
4193 "addu $sp, $sp, 40 \n\t" \
4194 "lw $gp, 0($sp) \n\t" \
4195 "lw $ra, 4($sp) \n\t" \
4196 "addu $sp, $sp, 8 \n\t" \
4198 : /*out*/ "=r" (_res) \
4199 : /*in*/ "0" (&_argvec[0]) \
4200 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
4202 lval = (__typeof__(lval)) _res; \
4205 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4208 volatile OrigFn _orig = (orig); \
4209 volatile unsigned long _argvec[10]; \
4210 volatile unsigned long _res; \
4211 _argvec[0] = (unsigned long)_orig.nraddr; \
4212 _argvec[1] = (unsigned long)(arg1); \
4213 _argvec[2] = (unsigned long)(arg2); \
4214 _argvec[3] = (unsigned long)(arg3); \
4215 _argvec[4] = (unsigned long)(arg4); \
4216 _argvec[5] = (unsigned long)(arg5); \
4217 _argvec[6] = (unsigned long)(arg6); \
4218 _argvec[7] = (unsigned long)(arg7); \
4219 _argvec[8] = (unsigned long)(arg8); \
4220 _argvec[9] = (unsigned long)(arg9); \
4222 "subu $29, $29, 8 \n\t" \
4223 "sw $gp, 0($sp) \n\t" \
4224 "sw $ra, 4($sp) \n\t" \
4225 "lw $a0, 20(%1) \n\t" \
4226 "subu $sp, $sp, 40\n\t" \
4227 "sw $a0, 16($sp) \n\t" \
4228 "lw $a0, 24(%1) \n\t" \
4229 "sw $a0, 20($sp) \n\t" \
4230 "lw $a0, 28(%1) \n\t" \
4231 "sw $a0, 24($sp) \n\t" \
4232 "lw $a0, 32(%1) \n\t" \
4233 "sw $a0, 28($sp) \n\t" \
4234 "lw $a0, 36(%1) \n\t" \
4235 "sw $a0, 32($sp) \n\t" \
4236 "lw $a0, 4(%1) \n\t" \
4237 "lw $a1, 8(%1) \n\t" \
4238 "lw $a2, 12(%1) \n\t" \
4239 "lw $a3, 16(%1) \n\t" \
4240 "lw $t9, 0(%1) \n\t" /* target->t9 */ \
4241 VALGRIND_CALL_NOREDIR_T9 \
4242 "addu $sp, $sp, 40 \n\t" \
4243 "lw $gp, 0($sp) \n\t" \
4244 "lw $ra, 4($sp) \n\t" \
4245 "addu $sp, $sp, 8 \n\t" \
4247 : /*out*/ "=r" (_res) \
4248 : /*in*/ "0" (&_argvec[0]) \
4249 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
4251 lval = (__typeof__(lval)) _res; \
4254 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4255 arg7,arg8,arg9,arg10) \
4257 volatile OrigFn _orig = (orig); \
4258 volatile unsigned long _argvec[11]; \
4259 volatile unsigned long _res; \
4260 _argvec[0] = (unsigned long)_orig.nraddr; \
4261 _argvec[1] = (unsigned long)(arg1); \
4262 _argvec[2] = (unsigned long)(arg2); \
4263 _argvec[3] = (unsigned long)(arg3); \
4264 _argvec[4] = (unsigned long)(arg4); \
4265 _argvec[5] = (unsigned long)(arg5); \
4266 _argvec[6] = (unsigned long)(arg6); \
4267 _argvec[7] = (unsigned long)(arg7); \
4268 _argvec[8] = (unsigned long)(arg8); \
4269 _argvec[9] = (unsigned long)(arg9); \
4270 _argvec[10] = (unsigned long)(arg10); \
4272 "subu $29, $29, 8 \n\t" \
4273 "sw $gp, 0($sp) \n\t" \
4274 "sw $ra, 4($sp) \n\t" \
4275 "lw $a0, 20(%1) \n\t" \
4276 "subu $sp, $sp, 48\n\t" \
4277 "sw $a0, 16($sp) \n\t" \
4278 "lw $a0, 24(%1) \n\t" \
4279 "sw $a0, 20($sp) \n\t" \
4280 "lw $a0, 28(%1) \n\t" \
4281 "sw $a0, 24($sp) \n\t" \
4282 "lw $a0, 32(%1) \n\t" \
4283 "sw $a0, 28($sp) \n\t" \
4284 "lw $a0, 36(%1) \n\t" \
4285 "sw $a0, 32($sp) \n\t" \
4286 "lw $a0, 40(%1) \n\t" \
4287 "sw $a0, 36($sp) \n\t" \
4288 "lw $a0, 4(%1) \n\t" \
4289 "lw $a1, 8(%1) \n\t" \
4290 "lw $a2, 12(%1) \n\t" \
4291 "lw $a3, 16(%1) \n\t" \
4292 "lw $t9, 0(%1) \n\t" /* target->t9 */ \
4293 VALGRIND_CALL_NOREDIR_T9 \
4294 "addu $sp, $sp, 48 \n\t" \
4295 "lw $gp, 0($sp) \n\t" \
4296 "lw $ra, 4($sp) \n\t" \
4297 "addu $sp, $sp, 8 \n\t" \
4299 : /*out*/ "=r" (_res) \
4300 : /*in*/ "0" (&_argvec[0]) \
4301 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
4303 lval = (__typeof__(lval)) _res; \
4306 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4307 arg6,arg7,arg8,arg9,arg10, \
4310 volatile OrigFn _orig = (orig); \
4311 volatile unsigned long _argvec[12]; \
4312 volatile unsigned long _res; \
4313 _argvec[0] = (unsigned long)_orig.nraddr; \
4314 _argvec[1] = (unsigned long)(arg1); \
4315 _argvec[2] = (unsigned long)(arg2); \
4316 _argvec[3] = (unsigned long)(arg3); \
4317 _argvec[4] = (unsigned long)(arg4); \
4318 _argvec[5] = (unsigned long)(arg5); \
4319 _argvec[6] = (unsigned long)(arg6); \
4320 _argvec[7] = (unsigned long)(arg7); \
4321 _argvec[8] = (unsigned long)(arg8); \
4322 _argvec[9] = (unsigned long)(arg9); \
4323 _argvec[10] = (unsigned long)(arg10); \
4324 _argvec[11] = (unsigned long)(arg11); \
4326 "subu $29, $29, 8 \n\t" \
4327 "sw $gp, 0($sp) \n\t" \
4328 "sw $ra, 4($sp) \n\t" \
4329 "lw $a0, 20(%1) \n\t" \
4330 "subu $sp, $sp, 48\n\t" \
4331 "sw $a0, 16($sp) \n\t" \
4332 "lw $a0, 24(%1) \n\t" \
4333 "sw $a0, 20($sp) \n\t" \
4334 "lw $a0, 28(%1) \n\t" \
4335 "sw $a0, 24($sp) \n\t" \
4336 "lw $a0, 32(%1) \n\t" \
4337 "sw $a0, 28($sp) \n\t" \
4338 "lw $a0, 36(%1) \n\t" \
4339 "sw $a0, 32($sp) \n\t" \
4340 "lw $a0, 40(%1) \n\t" \
4341 "sw $a0, 36($sp) \n\t" \
4342 "lw $a0, 44(%1) \n\t" \
4343 "sw $a0, 40($sp) \n\t" \
4344 "lw $a0, 4(%1) \n\t" \
4345 "lw $a1, 8(%1) \n\t" \
4346 "lw $a2, 12(%1) \n\t" \
4347 "lw $a3, 16(%1) \n\t" \
4348 "lw $t9, 0(%1) \n\t" /* target->t9 */ \
4349 VALGRIND_CALL_NOREDIR_T9 \
4350 "addu $sp, $sp, 48 \n\t" \
4351 "lw $gp, 0($sp) \n\t" \
4352 "lw $ra, 4($sp) \n\t" \
4353 "addu $sp, $sp, 8 \n\t" \
4355 : /*out*/ "=r" (_res) \
4356 : /*in*/ "0" (&_argvec[0]) \
4357 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
4359 lval = (__typeof__(lval)) _res; \
4362 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4363 arg6,arg7,arg8,arg9,arg10, \
4366 volatile OrigFn _orig = (orig); \
4367 volatile unsigned long _argvec[13]; \
4368 volatile unsigned long _res; \
4369 _argvec[0] = (unsigned long)_orig.nraddr; \
4370 _argvec[1] = (unsigned long)(arg1); \
4371 _argvec[2] = (unsigned long)(arg2); \
4372 _argvec[3] = (unsigned long)(arg3); \
4373 _argvec[4] = (unsigned long)(arg4); \
4374 _argvec[5] = (unsigned long)(arg5); \
4375 _argvec[6] = (unsigned long)(arg6); \
4376 _argvec[7] = (unsigned long)(arg7); \
4377 _argvec[8] = (unsigned long)(arg8); \
4378 _argvec[9] = (unsigned long)(arg9); \
4379 _argvec[10] = (unsigned long)(arg10); \
4380 _argvec[11] = (unsigned long)(arg11); \
4381 _argvec[12] = (unsigned long)(arg12); \
4383 "subu $29, $29, 8 \n\t" \
4384 "sw $gp, 0($sp) \n\t" \
4385 "sw $ra, 4($sp) \n\t" \
4386 "lw $a0, 20(%1) \n\t" \
4387 "subu $sp, $sp, 56\n\t" \
4388 "sw $a0, 16($sp) \n\t" \
4389 "lw $a0, 24(%1) \n\t" \
4390 "sw $a0, 20($sp) \n\t" \
4391 "lw $a0, 28(%1) \n\t" \
4392 "sw $a0, 24($sp) \n\t" \
4393 "lw $a0, 32(%1) \n\t" \
4394 "sw $a0, 28($sp) \n\t" \
4395 "lw $a0, 36(%1) \n\t" \
4396 "sw $a0, 32($sp) \n\t" \
4397 "lw $a0, 40(%1) \n\t" \
4398 "sw $a0, 36($sp) \n\t" \
4399 "lw $a0, 44(%1) \n\t" \
4400 "sw $a0, 40($sp) \n\t" \
4401 "lw $a0, 48(%1) \n\t" \
4402 "sw $a0, 44($sp) \n\t" \
4403 "lw $a0, 4(%1) \n\t" \
4404 "lw $a1, 8(%1) \n\t" \
4405 "lw $a2, 12(%1) \n\t" \
4406 "lw $a3, 16(%1) \n\t" \
4407 "lw $t9, 0(%1) \n\t" /* target->t9 */ \
4408 VALGRIND_CALL_NOREDIR_T9 \
4409 "addu $sp, $sp, 56 \n\t" \
4410 "lw $gp, 0($sp) \n\t" \
4411 "lw $ra, 4($sp) \n\t" \
4412 "addu $sp, $sp, 8 \n\t" \
4414 : /*out*/ "=r" (_res) \
4415 : /*in*/ "0" (&_argvec[0]) \
4416 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS \
4418 lval = (__typeof__(lval)) _res; \
4421 #endif /* PLAT_mips32_linux */
4424 /* ------------------------------------------------------------------ */
4425 /* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
4427 /* ------------------------------------------------------------------ */
4429 /* Some request codes. There are many more of these, but most are not
4430 exposed to end-user view. These are the public ones, all of the
4431 form 0x1000 + small_number.
4433 Core ones are in the range 0x00000000--0x0000ffff. The non-public
4434 ones start at 0x2000.
4437 /* These macros are used by tools -- they must be public, but don't
4438 embed them into other programs. */
4439 #define VG_USERREQ_TOOL_BASE(a,b) \
4440 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
4441 #define VG_IS_TOOL_USERREQ(a, b, v) \
4442 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
4444 /* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
4445 This enum comprises an ABI exported by Valgrind to programs
4446 which use client requests. DO NOT CHANGE THE ORDER OF THESE
4447 ENTRIES, NOR DELETE ANY -- add new ones at the end. */
4449 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
4450 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
4452 /* These allow any function to be called from the simulated
4453 CPU but run on the real CPU. Nb: the first arg passed to
4454 the function is always the ThreadId of the running
4455 thread! So CLIENT_CALL0 actually requires a 1 arg
4457 VG_USERREQ__CLIENT_CALL0 = 0x1101,
4458 VG_USERREQ__CLIENT_CALL1 = 0x1102,
4459 VG_USERREQ__CLIENT_CALL2 = 0x1103,
4460 VG_USERREQ__CLIENT_CALL3 = 0x1104,
4462 /* Can be useful in regression testing suites -- eg. can
4463 send Valgrind's output to /dev/null and still count
4465 VG_USERREQ__COUNT_ERRORS = 0x1201,
4467 /* Allows a string (gdb monitor command) to be passed to the tool
4468 Used for interaction with vgdb/gdb */
4469 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
4471 /* These are useful and can be interpreted by any tool that
4472 tracks malloc() et al, by using vg_replace_malloc.c. */
4473 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
4474 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
4475 VG_USERREQ__FREELIKE_BLOCK = 0x1302,
4476 /* Memory pool support. */
4477 VG_USERREQ__CREATE_MEMPOOL = 0x1303,
4478 VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
4479 VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
4480 VG_USERREQ__MEMPOOL_FREE = 0x1306,
4481 VG_USERREQ__MEMPOOL_TRIM = 0x1307,
4482 VG_USERREQ__MOVE_MEMPOOL = 0x1308,
4483 VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
4484 VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
4486 /* Allow printfs to valgrind log. */
4487 /* The first two pass the va_list argument by value, which
4488 assumes it is the same size as or smaller than a UWord,
4489 which generally isn't the case. Hence are deprecated.
4490 The second two pass the vargs by reference and so are
4491 immune to this problem. */
4492 /* both :: char* fmt, va_list vargs (DEPRECATED) */
4493 VG_USERREQ__PRINTF = 0x1401,
4494 VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
4495 /* both :: char* fmt, va_list* vargs */
4496 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
4497 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
4499 /* Stack support. */
4500 VG_USERREQ__STACK_REGISTER = 0x1501,
4501 VG_USERREQ__STACK_DEREGISTER = 0x1502,
4502 VG_USERREQ__STACK_CHANGE = 0x1503,
4505 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
4507 /* Querying of debug info. */
4508 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701,
4510 /* Disable/enable error reporting level. Takes a single
4511 Word arg which is the delta to this thread's error
4512 disablement indicator. Hence 1 disables or further
4513 disables errors, and -1 moves back towards enablement.
4514 Other values are not allowed. */
4515 VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801,
4517 /* Initialise IR injection */
4518 VG_USERREQ__VEX_INIT_FOR_IRI = 0x1901
4521 #if !defined(__GNUC__)
4522 # define __extension__ /* */
4526 /* Returns the number of Valgrinds this code is running under. That
4527 is, 0 if running natively, 1 if running under Valgrind, 2 if
4528 running under Valgrind which is running under another Valgrind,
4530 #define RUNNING_ON_VALGRIND \
4531 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
4532 VG_USERREQ__RUNNING_ON_VALGRIND, \
4536 /* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
4537 _qzz_len - 1]. Useful if you are debugging a JITter or some such,
4538 since it provides a way to make sure valgrind will retranslate the
4539 invalidated area. Returns no value. */
4540 #define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
4541 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
4542 _qzz_addr, _qzz_len, 0, 0, 0)
4545 /* These requests are for getting Valgrind itself to print something.
4546 Possibly with a backtrace. This is a really ugly hack. The return value
4547 is the number of characters printed, excluding the "**<pid>** " part at the
4548 start and the backtrace (if present). */
4550 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
4551 /* Modern GCC will optimize the static routine out if unused,
4552 and unused attribute will shut down warnings about it. */
4553 static int VALGRIND_PRINTF(const char *format, ...)
4554 __attribute__((format(__printf__, 1, 2), __unused__));
4557 #if defined(_MSC_VER)
4560 VALGRIND_PRINTF(const char *format, ...)
4562 #if defined(NVALGRIND)
4564 #else /* NVALGRIND */
4565 #if defined(_MSC_VER)
4568 unsigned long _qzz_res;
4571 va_start(vargs, format);
4572 #if defined(_MSC_VER)
4573 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
4574 VG_USERREQ__PRINTF_VALIST_BY_REF,
4579 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
4580 VG_USERREQ__PRINTF_VALIST_BY_REF,
4581 (unsigned long)format,
4582 (unsigned long)&vargs,
4586 return (int)_qzz_res;
4587 #endif /* NVALGRIND */
4590 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
4591 static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
4592 __attribute__((format(__printf__, 1, 2), __unused__));
4595 #if defined(_MSC_VER)
4598 VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
4600 #if defined(NVALGRIND)
4602 #else /* NVALGRIND */
4603 #if defined(_MSC_VER)
4606 unsigned long _qzz_res;
4609 va_start(vargs, format);
4610 #if defined(_MSC_VER)
4611 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
4612 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
4617 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
4618 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
4619 (unsigned long)format,
4620 (unsigned long)&vargs,
4624 return (int)_qzz_res;
4625 #endif /* NVALGRIND */
4629 /* These requests allow control to move from the simulated CPU to the
4630 real CPU, calling an arbitary function.
4632 Note that the current ThreadId is inserted as the first argument.
4635 VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
4637 requires f to have this signature:
4639 Word f(Word tid, Word arg1, Word arg2)
4641 where "Word" is a word-sized type.
4643 Note that these client requests are not entirely reliable. For example,
4644 if you call a function with them that subsequently calls printf(),
4645 there's a high chance Valgrind will crash. Generally, your prospects of
4646 these working are made higher if the called function does not refer to
4647 any global variables, and does not refer to any libc or other functions
4648 (printf et al). Any kind of entanglement with libc or dynamic linking is
4649 likely to have a bad outcome, for tricky reasons which we've grappled
4650 with a lot in the past.
4652 #define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
4653 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
4654 VG_USERREQ__CLIENT_CALL0, \
4658 #define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
4659 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
4660 VG_USERREQ__CLIENT_CALL1, \
4664 #define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
4665 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
4666 VG_USERREQ__CLIENT_CALL2, \
4668 _qyy_arg1, _qyy_arg2, 0, 0)
4670 #define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
4671 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
4672 VG_USERREQ__CLIENT_CALL3, \
4674 _qyy_arg1, _qyy_arg2, \
4678 /* Counts the number of errors that have been recorded by a tool. Nb:
4679 the tool must record the errors with VG_(maybe_record_error)() or
4680 VG_(unique_error)() for them to be counted. */
4681 #define VALGRIND_COUNT_ERRORS \
4682 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
4683 0 /* default return */, \
4684 VG_USERREQ__COUNT_ERRORS, \
4687 /* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
4688 when heap blocks are allocated in order to give accurate results. This
4689 happens automatically for the standard allocator functions such as
4690 malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
4693 But if your program uses a custom allocator, this doesn't automatically
4694 happen, and Valgrind will not do as well. For example, if you allocate
4695 superblocks with mmap() and then allocates chunks of the superblocks, all
4696 Valgrind's observations will be at the mmap() level and it won't know that
4697 the chunks should be considered separate entities. In Memcheck's case,
4698 that means you probably won't get heap block overrun detection (because
4699 there won't be redzones marked as unaddressable) and you definitely won't
4700 get any leak detection.
4702 The following client requests allow a custom allocator to be annotated so
4703 that it can be handled accurately by Valgrind.
4705 VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
4706 by a malloc()-like function. For Memcheck (an illustrative case), this
4709 - It records that the block has been allocated. This means any addresses
4710 within the block mentioned in error messages will be
4711 identified as belonging to the block. It also means that if the block
4712 isn't freed it will be detected by the leak checker.
4714 - It marks the block as being addressable and undefined (if 'is_zeroed' is
4715 not set), or addressable and defined (if 'is_zeroed' is set). This
4716 controls how accesses to the block by the program are handled.
4718 'addr' is the start of the usable block (ie. after any
4719 redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
4720 can apply redzones -- these are blocks of padding at the start and end of
4721 each block. Adding redzones is recommended as it makes it much more likely
4722 Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
4723 zeroed (or filled with another predictable value), as is the case for
4726 VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
4727 heap block -- that will be used by the client program -- is allocated.
4728 It's best to put it at the outermost level of the allocator if possible;
4729 for example, if you have a function my_alloc() which calls
4730 internal_alloc(), and the client request is put inside internal_alloc(),
4731 stack traces relating to the heap block will contain entries for both
4732 my_alloc() and internal_alloc(), which is probably not what you want.
4734 For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
4735 custom blocks from within a heap block, B, that has been allocated with
4736 malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
4737 -- the custom blocks will take precedence.
4739 VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
4740 Memcheck, it does two things:
4742 - It records that the block has been deallocated. This assumes that the
4743 block was annotated as having been allocated via
4744 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
4746 - It marks the block as being unaddressable.
4748 VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
4749 heap block is deallocated.
4751 VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
4752 Memcheck, it does four things:
4754 - It records that the size of a block has been changed. This assumes that
4755 the block was annotated as having been allocated via
4756 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
4758 - If the block shrunk, it marks the freed memory as being unaddressable.
4760 - If the block grew, it marks the new area as undefined and defines a red
4761 zone past the end of the new block.
4763 - The V-bits of the overlap between the old and the new block are preserved.
4765 VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
4766 and before deallocation of the old block.
4768 In many cases, these three client requests will not be enough to get your
4769 allocator working well with Memcheck. More specifically, if your allocator
4770 writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
4771 will be necessary to mark the memory as addressable just before the zeroing
4772 occurs, otherwise you'll get a lot of invalid write errors. For example,
4773 you'll need to do this if your allocator recycles freed blocks, but it
4774 zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
4775 Alternatively, if your allocator reuses freed blocks for allocator-internal
4776 data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
4778 Really, what's happening is a blurring of the lines between the client
4779 program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
4780 memory should be considered unaddressable to the client program, but the
4781 allocator knows more than the rest of the client program and so may be able
4782 to safely access it. Extra client requests are necessary for Valgrind to
4783 understand the distinction between the allocator and the rest of the
4786 Ignored if addr == 0.
4788 #define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
4789 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
4790 addr, sizeB, rzB, is_zeroed, 0)
4792 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
4793 Ignored if addr == 0.
4795 #define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
4796 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
4797 addr, oldSizeB, newSizeB, rzB, 0)
4799 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
4800 Ignored if addr == 0.
4802 #define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
4803 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
4806 /* Create a memory pool. */
4807 #define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
4808 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
4809 pool, rzB, is_zeroed, 0, 0)
4811 /* Destroy a memory pool. */
4812 #define VALGRIND_DESTROY_MEMPOOL(pool) \
4813 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
4816 /* Associate a piece of memory with a memory pool. */
4817 #define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
4818 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
4819 pool, addr, size, 0, 0)
4821 /* Disassociate a piece of memory from a memory pool. */
4822 #define VALGRIND_MEMPOOL_FREE(pool, addr) \
4823 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
4824 pool, addr, 0, 0, 0)
4826 /* Disassociate any pieces outside a particular range. */
4827 #define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
4828 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
4829 pool, addr, size, 0, 0)
4831 /* Resize and/or move a piece associated with a memory pool. */
4832 #define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
4833 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
4834 poolA, poolB, 0, 0, 0)
4836 /* Resize and/or move a piece associated with a memory pool. */
4837 #define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
4838 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
4839 pool, addrA, addrB, size, 0)
4841 /* Return 1 if a mempool exists, else 0. */
4842 #define VALGRIND_MEMPOOL_EXISTS(pool) \
4843 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
4844 VG_USERREQ__MEMPOOL_EXISTS, \
4847 /* Mark a piece of memory as being a stack. Returns a stack id. */
4848 #define VALGRIND_STACK_REGISTER(start, end) \
4849 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
4850 VG_USERREQ__STACK_REGISTER, \
4851 start, end, 0, 0, 0)
4853 /* Unmark the piece of memory associated with a stack id as being a
4855 #define VALGRIND_STACK_DEREGISTER(id) \
4856 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
4859 /* Change the start and end address of the stack id. */
4860 #define VALGRIND_STACK_CHANGE(id, start, end) \
4861 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
4862 id, start, end, 0, 0)
4864 /* Load PDB debug info for Wine PE image_map. */
4865 #define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
4866 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
4867 fd, ptr, total_size, delta, 0)
4869 /* Map a code address to a source file name and line number. buf64
4870 must point to a 64-byte buffer in the caller's address space. The
4871 result will be dumped in there and is guaranteed to be zero
4872 terminated. If no info is found, the first byte is set to zero. */
4873 #define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
4874 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
4875 VG_USERREQ__MAP_IP_TO_SRCLOC, \
4876 addr, buf64, 0, 0, 0)
4878 /* Disable error reporting for this thread. Behaves in a stack like
4879 way, so you can safely call this multiple times provided that
4880 VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
4881 to re-enable reporting. The first call of this macro disables
4882 reporting. Subsequent calls have no effect except to increase the
4883 number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
4884 reporting. Child threads do not inherit this setting from their
4885 parents -- they are always created with reporting enabled. */
4886 #define VALGRIND_DISABLE_ERROR_REPORTING \
4887 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
4890 /* Re-enable error reporting, as per comments on
4891 VALGRIND_DISABLE_ERROR_REPORTING. */
4892 #define VALGRIND_ENABLE_ERROR_REPORTING \
4893 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
4896 #undef PLAT_x86_darwin
4897 #undef PLAT_amd64_darwin
4898 #undef PLAT_x86_win32
4899 #undef PLAT_x86_linux
4900 #undef PLAT_amd64_linux
4901 #undef PLAT_ppc32_linux
4902 #undef PLAT_ppc64_linux
4903 #undef PLAT_arm_linux
4904 #undef PLAT_s390x_linux
4905 #undef PLAT_mips32_linux
4907 #endif /* __VALGRIND_H */