2 ----------------------------------------------------------------
4 Notice that the following BSD-style license applies to this one
5 file (valgrind.h) only. The rest of Valgrind is licensed under the
6 terms of the GNU General Public License, version 2, unless
7 otherwise indicated. See the COPYING file in the source
8 distribution for details.
10 ----------------------------------------------------------------
12 This file is part of Valgrind, a dynamic binary instrumentation
15 Copyright (C) 2000-2015 Julian Seward. All rights reserved.
17 Redistribution and use in source and binary forms, with or without
18 modification, are permitted provided that the following conditions
21 1. Redistributions of source code must retain the above copyright
22 notice, this list of conditions and the following disclaimer.
24 2. The origin of this software must not be misrepresented; you must
25 not claim that you wrote the original software. If you use this
26 software in a product, an acknowledgment in the product
27 documentation would be appreciated but is not required.
29 3. Altered source versions must be plainly marked as such, and must
30 not be misrepresented as being the original software.
32 4. The name of the author may not be used to endorse or promote
33 products derived from this software without specific prior written
36 THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
37 OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
38 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
39 ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
40 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
41 DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
42 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
43 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
44 WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
45 NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
46 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
48 ----------------------------------------------------------------
50 Notice that the above BSD-style license applies to this one file
51 (valgrind.h) only. The entire rest of Valgrind is licensed under
52 the terms of the GNU General Public License, version 2. See the
53 COPYING file in the source distribution for details.
55 ----------------------------------------------------------------
59 /* This file is for inclusion into client (your!) code.
61 You can use these macros to manipulate and query Valgrind's
62 execution inside your own programs.
64 The resulting executables will still run without Valgrind, just a
65 little bit more slowly than they otherwise would, but otherwise
66 unchanged. When not running on valgrind, each client request
67 consumes very few (eg. 7) instructions, so the resulting performance
68 loss is negligible unless you plan to execute client requests
69 millions of times per second. Nevertheless, if that is still a
70 problem, you can compile with the NVALGRIND symbol defined (gcc
71 -DNVALGRIND) so that client requests are not even compiled in. */
77 /* ------------------------------------------------------------------ */
78 /* VERSION NUMBER OF VALGRIND */
79 /* ------------------------------------------------------------------ */
81 /* Specify Valgrind's version number, so that user code can
82 conditionally compile based on our version number. Note that these
83 were introduced at version 3.6 and so do not exist in version 3.5
84 or earlier. The recommended way to use them to check for "version
87 #if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
88 && (__VALGRIND_MAJOR__ > 3 \
89 || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
91 #define __VALGRIND_MAJOR__ 3
92 #define __VALGRIND_MINOR__ 11
97 /* Nb: this file might be included in a file compiled with -ansi. So
98 we can't use C++ style "//" comments nor the "asm" keyword (instead
101 /* Derive some tags indicating what the target platform is. Note
102 that in this file we're using the compiler's CPP symbols for
103 identifying architectures, which are different to the ones we use
104 within the rest of Valgrind. Note, __powerpc__ is active for both
105 32 and 64-bit PPC, whereas __powerpc64__ is only active for the
106 latter (on Linux, that is).
108 Misc note: how to find out what's predefined in gcc by default:
109 gcc -Wp,-dM somefile.c
111 #undef PLAT_x86_darwin
112 #undef PLAT_amd64_darwin
113 #undef PLAT_x86_win32
114 #undef PLAT_amd64_win64
115 #undef PLAT_x86_linux
116 #undef PLAT_amd64_linux
117 #undef PLAT_ppc32_linux
118 #undef PLAT_ppc64be_linux
119 #undef PLAT_ppc64le_linux
120 #undef PLAT_arm_linux
121 #undef PLAT_arm64_linux
122 #undef PLAT_s390x_linux
123 #undef PLAT_mips32_linux
124 #undef PLAT_mips64_linux
125 #undef PLAT_tilegx_linux
126 #undef PLAT_x86_solaris
127 #undef PLAT_amd64_solaris
130 #if defined(__APPLE__) && defined(__i386__)
131 # define PLAT_x86_darwin 1
132 #elif defined(__APPLE__) && defined(__x86_64__)
133 # define PLAT_amd64_darwin 1
134 #elif (defined(__MINGW32__) && !defined(__MINGW64__)) \
135 || defined(__CYGWIN32__) \
136 || (defined(_WIN32) && defined(_M_IX86))
137 # define PLAT_x86_win32 1
138 #elif defined(__MINGW64__) \
139 || (defined(_WIN64) && defined(_M_X64))
140 # define PLAT_amd64_win64 1
141 #elif defined(__linux__) && defined(__i386__)
142 # define PLAT_x86_linux 1
143 #elif defined(__linux__) && defined(__x86_64__) && !defined(__ILP32__)
144 # define PLAT_amd64_linux 1
145 #elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
146 # define PLAT_ppc32_linux 1
147 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF != 2
148 /* Big Endian uses ELF version 1 */
149 # define PLAT_ppc64be_linux 1
150 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF == 2
151 /* Little Endian uses ELF version 2 */
152 # define PLAT_ppc64le_linux 1
153 #elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
154 # define PLAT_arm_linux 1
155 #elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
156 # define PLAT_arm64_linux 1
157 #elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
158 # define PLAT_s390x_linux 1
159 #elif defined(__linux__) && defined(__mips__) && (__mips==64)
160 # define PLAT_mips64_linux 1
161 #elif defined(__linux__) && defined(__mips__) && (__mips!=64)
162 # define PLAT_mips32_linux 1
163 #elif defined(__linux__) && defined(__tilegx__)
164 # define PLAT_tilegx_linux 1
165 #elif defined(__sun) && defined(__i386__)
166 # define PLAT_x86_solaris 1
167 #elif defined(__sun) && defined(__x86_64__)
168 # define PLAT_amd64_solaris 1
170 /* If we're not compiling for our target platform, don't generate
172 # if !defined(NVALGRIND)
178 /* ------------------------------------------------------------------ */
179 /* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
180 /* in here of use to end-users -- skip to the next section. */
181 /* ------------------------------------------------------------------ */
184 * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
185 * request. Accepts both pointers and integers as arguments.
187 * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
188 * client request that does not return a value.
190 * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
191 * client request and whose value equals the client request result. Accepts
192 * both pointers and integers as arguments. Note that such calls are not
193 * necessarily pure functions -- they may have side effects.
196 #define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
197 _zzq_request, _zzq_arg1, _zzq_arg2, \
198 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
199 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
200 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
201 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
203 #define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
204 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
205 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
206 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
207 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
209 #if defined(NVALGRIND)
211 /* Define NVALGRIND to completely remove the Valgrind magic sequence
212 from the compiled code (analogous to NDEBUG's effects on
214 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
215 _zzq_default, _zzq_request, \
216 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
219 #else /* ! NVALGRIND */
221 /* The following defines the magic code sequences which the JITter
222 spots and handles magically. Don't look too closely at them as
223 they will rot your brain.
225 The assembly code sequences for all architectures is in this one
226 file. This is because this file must be stand-alone, and we don't
227 want to have multiple files.
229 For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
230 value gets put in the return slot, so that everything works when
231 this is executed not under Valgrind. Args are passed in a memory
232 block, and so there's no intrinsic limit to the number that could
233 be passed, but it's currently five.
236 _zzq_rlval result lvalue
237 _zzq_default default value (result returned when running on real CPU)
238 _zzq_request request code
239 _zzq_arg1..5 request params
241 The other two macros are used to support function wrapping, and are
242 a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
243 guest's NRADDR pseudo-register and whatever other information is
244 needed to safely run the call original from the wrapper: on
245 ppc64-linux, the R2 value at the divert point is also needed. This
246 information is abstracted into a user-visible type, OrigFn.
248 VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
249 guest, but guarantees that the branch instruction will not be
250 redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
251 branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
252 complete inline asm, since it needs to be combined with more magic
253 inline asm stuff to be useful.
256 /* ----------------- x86-{linux,darwin,solaris} ---------------- */
258 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
259 || (defined(PLAT_x86_win32) && defined(__GNUC__)) \
260 || defined(PLAT_x86_solaris)
264 unsigned int nraddr
; /* where's the code? */
268 #define __SPECIAL_INSTRUCTION_PREAMBLE \
269 "roll $3, %%edi ; roll $13, %%edi\n\t" \
270 "roll $29, %%edi ; roll $19, %%edi\n\t"
272 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
273 _zzq_default, _zzq_request, \
274 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
276 ({volatile unsigned int _zzq_args[6]; \
277 volatile unsigned int _zzq_result; \
278 _zzq_args[0] = (unsigned int)(_zzq_request); \
279 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
280 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
281 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
282 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
283 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
284 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
285 /* %EDX = client_request ( %EAX ) */ \
286 "xchgl %%ebx,%%ebx" \
287 : "=d" (_zzq_result) \
288 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
294 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
295 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
296 volatile unsigned int __addr; \
297 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
298 /* %EAX = guest_NRADDR */ \
299 "xchgl %%ecx,%%ecx" \
304 _zzq_orig->nraddr = __addr; \
307 #define VALGRIND_CALL_NOREDIR_EAX \
308 __SPECIAL_INSTRUCTION_PREAMBLE \
309 /* call-noredir *%EAX */ \
310 "xchgl %%edx,%%edx\n\t"
312 #define VALGRIND_VEX_INJECT_IR() \
314 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
315 "xchgl %%edi,%%edi\n\t" \
316 : : : "cc", "memory" \
320 #endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__)
321 || PLAT_x86_solaris */
323 /* ------------------------- x86-Win32 ------------------------- */
325 #if defined(PLAT_x86_win32) && !defined(__GNUC__)
329 unsigned int nraddr
; /* where's the code? */
333 #if defined(_MSC_VER)
335 #define __SPECIAL_INSTRUCTION_PREAMBLE \
336 __asm rol edi, 3 __asm rol edi, 13 \
337 __asm rol edi, 29 __asm rol edi, 19
339 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
340 _zzq_default, _zzq_request, \
341 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
342 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
343 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
344 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
345 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
347 static __inline
uintptr_t
348 valgrind_do_client_request_expr(uintptr_t _zzq_default
, uintptr_t _zzq_request
,
349 uintptr_t _zzq_arg1
, uintptr_t _zzq_arg2
,
350 uintptr_t _zzq_arg3
, uintptr_t _zzq_arg4
,
353 volatile uintptr_t _zzq_args
[6];
354 volatile unsigned int _zzq_result
;
355 _zzq_args
[0] = (uintptr_t)(_zzq_request
);
356 _zzq_args
[1] = (uintptr_t)(_zzq_arg1
);
357 _zzq_args
[2] = (uintptr_t)(_zzq_arg2
);
358 _zzq_args
[3] = (uintptr_t)(_zzq_arg3
);
359 _zzq_args
[4] = (uintptr_t)(_zzq_arg4
);
360 _zzq_args
[5] = (uintptr_t)(_zzq_arg5
);
361 __asm
{ __asm lea eax
, _zzq_args __asm mov edx
, _zzq_default
362 __SPECIAL_INSTRUCTION_PREAMBLE
363 /* %EDX = client_request ( %EAX ) */
365 __asm mov _zzq_result
, edx
370 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
371 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
372 volatile unsigned int __addr; \
373 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
374 /* %EAX = guest_NRADDR */ \
376 __asm mov __addr, eax \
378 _zzq_orig->nraddr = __addr; \
381 #define VALGRIND_CALL_NOREDIR_EAX ERROR
383 #define VALGRIND_VEX_INJECT_IR() \
385 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
391 #error Unsupported compiler.
394 #endif /* PLAT_x86_win32 */
396 /* ----------------- amd64-{linux,darwin,solaris} --------------- */
398 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
399 || defined(PLAT_amd64_solaris) \
400 || (defined(PLAT_amd64_win64) && defined(__GNUC__))
404 unsigned long int nraddr
; /* where's the code? */
408 #define __SPECIAL_INSTRUCTION_PREAMBLE \
409 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
410 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
412 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
413 _zzq_default, _zzq_request, \
414 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
416 ({ volatile unsigned long int _zzq_args[6]; \
417 volatile unsigned long int _zzq_result; \
418 _zzq_args[0] = (unsigned long int)(_zzq_request); \
419 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
420 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
421 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
422 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
423 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
424 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
425 /* %RDX = client_request ( %RAX ) */ \
426 "xchgq %%rbx,%%rbx" \
427 : "=d" (_zzq_result) \
428 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
434 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
435 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
436 volatile unsigned long int __addr; \
437 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
438 /* %RAX = guest_NRADDR */ \
439 "xchgq %%rcx,%%rcx" \
444 _zzq_orig->nraddr = __addr; \
447 #define VALGRIND_CALL_NOREDIR_RAX \
448 __SPECIAL_INSTRUCTION_PREAMBLE \
449 /* call-noredir *%RAX */ \
450 "xchgq %%rdx,%%rdx\n\t"
452 #define VALGRIND_VEX_INJECT_IR() \
454 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
455 "xchgq %%rdi,%%rdi\n\t" \
456 : : : "cc", "memory" \
460 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
462 /* ------------------------- amd64-Win64 ------------------------- */
464 #if defined(PLAT_amd64_win64) && !defined(__GNUC__)
466 #error Unsupported compiler.
468 #endif /* PLAT_amd64_win64 */
470 /* ------------------------ ppc32-linux ------------------------ */
472 #if defined(PLAT_ppc32_linux)
476 unsigned int nraddr
; /* where's the code? */
480 #define __SPECIAL_INSTRUCTION_PREAMBLE \
481 "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
482 "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
484 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
485 _zzq_default, _zzq_request, \
486 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
489 ({ unsigned int _zzq_args[6]; \
490 unsigned int _zzq_result; \
491 unsigned int* _zzq_ptr; \
492 _zzq_args[0] = (unsigned int)(_zzq_request); \
493 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
494 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
495 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
496 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
497 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
498 _zzq_ptr = _zzq_args; \
499 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
500 "mr 4,%2\n\t" /*ptr*/ \
501 __SPECIAL_INSTRUCTION_PREAMBLE \
502 /* %R3 = client_request ( %R4 ) */ \
504 "mr %0,3" /*result*/ \
505 : "=b" (_zzq_result) \
506 : "b" (_zzq_default), "b" (_zzq_ptr) \
507 : "cc", "memory", "r3", "r4"); \
511 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
512 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
513 unsigned int __addr; \
514 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
515 /* %R3 = guest_NRADDR */ \
520 : "cc", "memory", "r3" \
522 _zzq_orig->nraddr = __addr; \
525 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
526 __SPECIAL_INSTRUCTION_PREAMBLE \
527 /* branch-and-link-to-noredir *%R11 */ \
530 #define VALGRIND_VEX_INJECT_IR() \
532 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
537 #endif /* PLAT_ppc32_linux */
539 /* ------------------------ ppc64-linux ------------------------ */
541 #if defined(PLAT_ppc64be_linux)
545 unsigned long int nraddr
; /* where's the code? */
546 unsigned long int r2
; /* what tocptr do we need? */
550 #define __SPECIAL_INSTRUCTION_PREAMBLE \
551 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
552 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
554 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
555 _zzq_default, _zzq_request, \
556 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
559 ({ unsigned long int _zzq_args[6]; \
560 unsigned long int _zzq_result; \
561 unsigned long int* _zzq_ptr; \
562 _zzq_args[0] = (unsigned long int)(_zzq_request); \
563 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
564 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
565 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
566 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
567 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
568 _zzq_ptr = _zzq_args; \
569 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
570 "mr 4,%2\n\t" /*ptr*/ \
571 __SPECIAL_INSTRUCTION_PREAMBLE \
572 /* %R3 = client_request ( %R4 ) */ \
574 "mr %0,3" /*result*/ \
575 : "=b" (_zzq_result) \
576 : "b" (_zzq_default), "b" (_zzq_ptr) \
577 : "cc", "memory", "r3", "r4"); \
581 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
582 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
583 unsigned long int __addr; \
584 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
585 /* %R3 = guest_NRADDR */ \
590 : "cc", "memory", "r3" \
592 _zzq_orig->nraddr = __addr; \
593 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
594 /* %R3 = guest_NRADDR_GPR2 */ \
599 : "cc", "memory", "r3" \
601 _zzq_orig->r2 = __addr; \
604 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
605 __SPECIAL_INSTRUCTION_PREAMBLE \
606 /* branch-and-link-to-noredir *%R11 */ \
609 #define VALGRIND_VEX_INJECT_IR() \
611 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
616 #endif /* PLAT_ppc64be_linux */
618 #if defined(PLAT_ppc64le_linux)
622 unsigned long int nraddr
; /* where's the code? */
623 unsigned long int r2
; /* what tocptr do we need? */
627 #define __SPECIAL_INSTRUCTION_PREAMBLE \
628 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
629 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
631 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
632 _zzq_default, _zzq_request, \
633 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
636 ({ unsigned long int _zzq_args[6]; \
637 unsigned long int _zzq_result; \
638 unsigned long int* _zzq_ptr; \
639 _zzq_args[0] = (unsigned long int)(_zzq_request); \
640 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
641 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
642 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
643 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
644 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
645 _zzq_ptr = _zzq_args; \
646 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
647 "mr 4,%2\n\t" /*ptr*/ \
648 __SPECIAL_INSTRUCTION_PREAMBLE \
649 /* %R3 = client_request ( %R4 ) */ \
651 "mr %0,3" /*result*/ \
652 : "=b" (_zzq_result) \
653 : "b" (_zzq_default), "b" (_zzq_ptr) \
654 : "cc", "memory", "r3", "r4"); \
658 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
659 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
660 unsigned long int __addr; \
661 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
662 /* %R3 = guest_NRADDR */ \
667 : "cc", "memory", "r3" \
669 _zzq_orig->nraddr = __addr; \
670 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
671 /* %R3 = guest_NRADDR_GPR2 */ \
676 : "cc", "memory", "r3" \
678 _zzq_orig->r2 = __addr; \
681 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
682 __SPECIAL_INSTRUCTION_PREAMBLE \
683 /* branch-and-link-to-noredir *%R12 */ \
686 #define VALGRIND_VEX_INJECT_IR() \
688 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
693 #endif /* PLAT_ppc64le_linux */
695 /* ------------------------- arm-linux ------------------------- */
697 #if defined(PLAT_arm_linux)
701 unsigned int nraddr
; /* where's the code? */
705 #define __SPECIAL_INSTRUCTION_PREAMBLE \
706 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
707 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
709 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
710 _zzq_default, _zzq_request, \
711 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
714 ({volatile unsigned int _zzq_args[6]; \
715 volatile unsigned int _zzq_result; \
716 _zzq_args[0] = (unsigned int)(_zzq_request); \
717 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
718 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
719 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
720 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
721 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
722 __asm__ volatile("mov r3, %1\n\t" /*default*/ \
723 "mov r4, %2\n\t" /*ptr*/ \
724 __SPECIAL_INSTRUCTION_PREAMBLE \
725 /* R3 = client_request ( R4 ) */ \
726 "orr r10, r10, r10\n\t" \
727 "mov %0, r3" /*result*/ \
728 : "=r" (_zzq_result) \
729 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
730 : "cc","memory", "r3", "r4"); \
734 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
735 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
736 unsigned int __addr; \
737 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
738 /* R3 = guest_NRADDR */ \
739 "orr r11, r11, r11\n\t" \
743 : "cc", "memory", "r3" \
745 _zzq_orig->nraddr = __addr; \
748 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
749 __SPECIAL_INSTRUCTION_PREAMBLE \
750 /* branch-and-link-to-noredir *%R4 */ \
751 "orr r12, r12, r12\n\t"
753 #define VALGRIND_VEX_INJECT_IR() \
755 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
756 "orr r9, r9, r9\n\t" \
757 : : : "cc", "memory" \
761 #endif /* PLAT_arm_linux */
763 /* ------------------------ arm64-linux ------------------------- */
765 #if defined(PLAT_arm64_linux)
769 unsigned long int nraddr
; /* where's the code? */
773 #define __SPECIAL_INSTRUCTION_PREAMBLE \
774 "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
775 "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
777 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
778 _zzq_default, _zzq_request, \
779 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
782 ({volatile unsigned long int _zzq_args[6]; \
783 volatile unsigned long int _zzq_result; \
784 _zzq_args[0] = (unsigned long int)(_zzq_request); \
785 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
786 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
787 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
788 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
789 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
790 __asm__ volatile("mov x3, %1\n\t" /*default*/ \
791 "mov x4, %2\n\t" /*ptr*/ \
792 __SPECIAL_INSTRUCTION_PREAMBLE \
793 /* X3 = client_request ( X4 ) */ \
794 "orr x10, x10, x10\n\t" \
795 "mov %0, x3" /*result*/ \
796 : "=r" (_zzq_result) \
797 : "r" ((unsigned long int)(_zzq_default)), \
798 "r" (&_zzq_args[0]) \
799 : "cc","memory", "x3", "x4"); \
803 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
804 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
805 unsigned long int __addr; \
806 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
807 /* X3 = guest_NRADDR */ \
808 "orr x11, x11, x11\n\t" \
812 : "cc", "memory", "x3" \
814 _zzq_orig->nraddr = __addr; \
817 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
818 __SPECIAL_INSTRUCTION_PREAMBLE \
819 /* branch-and-link-to-noredir X8 */ \
820 "orr x12, x12, x12\n\t"
822 #define VALGRIND_VEX_INJECT_IR() \
824 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
825 "orr x9, x9, x9\n\t" \
826 : : : "cc", "memory" \
830 #endif /* PLAT_arm64_linux */
832 /* ------------------------ s390x-linux ------------------------ */
834 #if defined(PLAT_s390x_linux)
838 unsigned long int nraddr
; /* where's the code? */
842 /* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
843 * code. This detection is implemented in platform specific toIR.c
844 * (e.g. VEX/priv/guest_s390_decoder.c).
846 #define __SPECIAL_INSTRUCTION_PREAMBLE \
852 #define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
853 #define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
854 #define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
855 #define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
857 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
858 _zzq_default, _zzq_request, \
859 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
861 ({volatile unsigned long int _zzq_args[6]; \
862 volatile unsigned long int _zzq_result; \
863 _zzq_args[0] = (unsigned long int)(_zzq_request); \
864 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
865 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
866 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
867 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
868 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
869 __asm__ volatile(/* r2 = args */ \
873 __SPECIAL_INSTRUCTION_PREAMBLE \
874 __CLIENT_REQUEST_CODE \
877 : "=d" (_zzq_result) \
878 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
879 : "cc", "2", "3", "memory" \
884 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
885 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
886 volatile unsigned long int __addr; \
887 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
888 __GET_NR_CONTEXT_CODE \
892 : "cc", "3", "memory" \
894 _zzq_orig->nraddr = __addr; \
897 #define VALGRIND_CALL_NOREDIR_R1 \
898 __SPECIAL_INSTRUCTION_PREAMBLE \
901 #define VALGRIND_VEX_INJECT_IR() \
903 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
904 __VEX_INJECT_IR_CODE); \
907 #endif /* PLAT_s390x_linux */
909 /* ------------------------- mips32-linux ---------------- */
911 #if defined(PLAT_mips32_linux)
915 unsigned int nraddr
; /* where's the code? */
923 #define __SPECIAL_INSTRUCTION_PREAMBLE \
924 "srl $0, $0, 13\n\t" \
925 "srl $0, $0, 29\n\t" \
926 "srl $0, $0, 3\n\t" \
929 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
930 _zzq_default, _zzq_request, \
931 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
933 ({ volatile unsigned int _zzq_args[6]; \
934 volatile unsigned int _zzq_result; \
935 _zzq_args[0] = (unsigned int)(_zzq_request); \
936 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
937 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
938 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
939 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
940 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
941 __asm__ volatile("move $11, %1\n\t" /*default*/ \
942 "move $12, %2\n\t" /*ptr*/ \
943 __SPECIAL_INSTRUCTION_PREAMBLE \
944 /* T3 = client_request ( T4 ) */ \
945 "or $13, $13, $13\n\t" \
946 "move %0, $11\n\t" /*result*/ \
947 : "=r" (_zzq_result) \
948 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
953 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
954 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
955 volatile unsigned int __addr; \
956 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
957 /* %t9 = guest_NRADDR */ \
958 "or $14, $14, $14\n\t" \
959 "move %0, $11" /*result*/ \
964 _zzq_orig->nraddr = __addr; \
967 #define VALGRIND_CALL_NOREDIR_T9 \
968 __SPECIAL_INSTRUCTION_PREAMBLE \
969 /* call-noredir *%t9 */ \
970 "or $15, $15, $15\n\t"
972 #define VALGRIND_VEX_INJECT_IR() \
974 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
975 "or $11, $11, $11\n\t" \
980 #endif /* PLAT_mips32_linux */
982 /* ------------------------- mips64-linux ---------------- */
984 #if defined(PLAT_mips64_linux)
988 unsigned long nraddr
; /* where's the code? */
996 #define __SPECIAL_INSTRUCTION_PREAMBLE \
997 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
998 "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
1000 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1001 _zzq_default, _zzq_request, \
1002 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1004 ({ volatile unsigned long int _zzq_args[6]; \
1005 volatile unsigned long int _zzq_result; \
1006 _zzq_args[0] = (unsigned long int)(_zzq_request); \
1007 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
1008 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
1009 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
1010 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
1011 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
1012 __asm__ volatile("move $11, %1\n\t" /*default*/ \
1013 "move $12, %2\n\t" /*ptr*/ \
1014 __SPECIAL_INSTRUCTION_PREAMBLE \
1015 /* $11 = client_request ( $12 ) */ \
1016 "or $13, $13, $13\n\t" \
1017 "move %0, $11\n\t" /*result*/ \
1018 : "=r" (_zzq_result) \
1019 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1024 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1025 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1026 volatile unsigned long int __addr; \
1027 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1028 /* $11 = guest_NRADDR */ \
1029 "or $14, $14, $14\n\t" \
1030 "move %0, $11" /*result*/ \
1034 _zzq_orig->nraddr = __addr; \
1037 #define VALGRIND_CALL_NOREDIR_T9 \
1038 __SPECIAL_INSTRUCTION_PREAMBLE \
1039 /* call-noredir $25 */ \
1040 "or $15, $15, $15\n\t"
1042 #define VALGRIND_VEX_INJECT_IR() \
1044 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1045 "or $11, $11, $11\n\t" \
1049 #endif /* PLAT_mips64_linux */
1051 /* ------------------------ tilegx-linux --------------- */
1052 #if defined(PLAT_tilegx_linux)
1056 unsigned long long int nraddr
; /* where's the code? */
1059 /*** special instruction sequence.
1060 0:02b3c7ff91234fff { moveli zero, 4660 ; moveli zero, 22136 }
1061 8:0091a7ff95678fff { moveli zero, 22136 ; moveli zero, 4660 }
1064 #define __SPECIAL_INSTRUCTION_PREAMBLE \
1065 ".quad 0x02b3c7ff91234fff\n" \
1066 ".quad 0x0091a7ff95678fff\n"
1068 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1069 _zzq_default, _zzq_request, \
1070 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1071 ({ volatile unsigned long long int _zzq_args[6]; \
1072 volatile unsigned long long int _zzq_result; \
1073 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
1074 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
1075 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
1076 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
1077 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
1078 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
1079 __asm__ volatile("move r11, %1\n\t" /*default*/ \
1080 "move r12, %2\n\t" /*ptr*/ \
1081 __SPECIAL_INSTRUCTION_PREAMBLE \
1082 /* r11 = client_request */ \
1083 "or r13, r13, r13\n\t" \
1084 "move %0, r11\n\t" /*result*/ \
1085 : "=r" (_zzq_result) \
1086 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1087 : "memory", "r11", "r12"); \
1091 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1092 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1093 volatile unsigned long long int __addr; \
1094 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1095 /* r11 = guest_NRADDR */ \
1096 "or r14, r14, r14\n" \
1102 _zzq_orig->nraddr = __addr; \
1105 #define VALGRIND_CALL_NOREDIR_R12 \
1106 __SPECIAL_INSTRUCTION_PREAMBLE \
1107 "or r15, r15, r15\n\t"
1109 #define VALGRIND_VEX_INJECT_IR() \
1111 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1112 "or r11, r11, r11\n\t" \
1116 #endif /* PLAT_tilegx_linux */
1118 /* Insert assembly code for other platforms here... */
1120 #endif /* NVALGRIND */
1123 /* ------------------------------------------------------------------ */
1124 /* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
1125 /* ugly. It's the least-worst tradeoff I can think of. */
1126 /* ------------------------------------------------------------------ */
1128 /* This section defines magic (a.k.a appalling-hack) macros for doing
1129 guaranteed-no-redirection macros, so as to get from function
1130 wrappers to the functions they are wrapping. The whole point is to
1131 construct standard call sequences, but to do the call itself with a
1132 special no-redirect call pseudo-instruction that the JIT
1133 understands and handles specially. This section is long and
1134 repetitious, and I can't see a way to make it shorter.
1136 The naming scheme is as follows:
1138 CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
1140 'W' stands for "word" and 'v' for "void". Hence there are
1141 different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
1142 and for each, the possibility of returning a word-typed result, or
1146 /* Use these to write the name of your wrapper. NOTE: duplicates
1147 VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
1148 the default behaviour equivalance class tag "0000" into the name.
1149 See pub_tool_redir.h for details -- normally you don't need to
1150 think about this, though. */
1152 /* Use an extra level of macroisation so as to ensure the soname/fnname
1153 args are fully macro-expanded before pasting them together. */
1154 #define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
1156 #define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
1157 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
1159 #define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
1160 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
1162 /* Use this macro from within a wrapper function to collect the
1163 context (address and possibly other info) of the original function.
1164 Once you have that you can then use it in one of the CALL_FN_
1165 macros. The type of the argument _lval is OrigFn. */
1166 #define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
1168 /* Also provide end-user facilities for function replacement, rather
1169 than wrapping. A replacement function differs from a wrapper in
1170 that it has no way to get hold of the original function being
1171 called, and hence no way to call onwards to it. In a replacement
1172 function, VALGRIND_GET_ORIG_FN always returns zero. */
1174 #define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1175 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1177 #define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1178 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1180 /* Derivatives of the main macros below, for calling functions
1183 #define CALL_FN_v_v(fnptr) \
1184 do { volatile unsigned long _junk; \
1185 CALL_FN_W_v(_junk,fnptr); } while (0)
1187 #define CALL_FN_v_W(fnptr, arg1) \
1188 do { volatile unsigned long _junk; \
1189 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1191 #define CALL_FN_v_WW(fnptr, arg1,arg2) \
1192 do { volatile unsigned long _junk; \
1193 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1195 #define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1196 do { volatile unsigned long _junk; \
1197 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1199 #define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1200 do { volatile unsigned long _junk; \
1201 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1203 #define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1204 do { volatile unsigned long _junk; \
1205 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1207 #define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1208 do { volatile unsigned long _junk; \
1209 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1211 #define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1212 do { volatile unsigned long _junk; \
1213 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1215 /* ----------------- x86-{linux,darwin,solaris} ---------------- */
1217 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
1218 || defined(PLAT_x86_solaris)
1220 /* These regs are trashed by the hidden call. No need to mention eax
1221 as gcc can already see that, plus causes gcc to bomb. */
1222 #define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
1224 /* Macros to save and align the stack before making a function
1225 call and restore it afterwards as gcc may not keep the stack
1226 pointer aligned if it doesn't realise calls are being made
1227 to other functions. */
1229 #define VALGRIND_ALIGN_STACK \
1230 "movl %%esp,%%edi\n\t" \
1231 "andl $0xfffffff0,%%esp\n\t"
1232 #define VALGRIND_RESTORE_STACK \
1233 "movl %%edi,%%esp\n\t"
1235 /* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
1238 #define CALL_FN_W_v(lval, orig) \
1240 volatile OrigFn _orig = (orig); \
1241 volatile unsigned long _argvec[1]; \
1242 volatile unsigned long _res; \
1243 _argvec[0] = (unsigned long)_orig.nraddr; \
1245 VALGRIND_ALIGN_STACK \
1246 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1247 VALGRIND_CALL_NOREDIR_EAX \
1248 VALGRIND_RESTORE_STACK \
1249 : /*out*/ "=a" (_res) \
1250 : /*in*/ "a" (&_argvec[0]) \
1251 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1253 lval = (__typeof__(lval)) _res; \
1256 #define CALL_FN_W_W(lval, orig, arg1) \
1258 volatile OrigFn _orig = (orig); \
1259 volatile unsigned long _argvec[2]; \
1260 volatile unsigned long _res; \
1261 _argvec[0] = (unsigned long)_orig.nraddr; \
1262 _argvec[1] = (unsigned long)(arg1); \
1264 VALGRIND_ALIGN_STACK \
1265 "subl $12, %%esp\n\t" \
1266 "pushl 4(%%eax)\n\t" \
1267 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1268 VALGRIND_CALL_NOREDIR_EAX \
1269 VALGRIND_RESTORE_STACK \
1270 : /*out*/ "=a" (_res) \
1271 : /*in*/ "a" (&_argvec[0]) \
1272 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1274 lval = (__typeof__(lval)) _res; \
1277 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1279 volatile OrigFn _orig = (orig); \
1280 volatile unsigned long _argvec[3]; \
1281 volatile unsigned long _res; \
1282 _argvec[0] = (unsigned long)_orig.nraddr; \
1283 _argvec[1] = (unsigned long)(arg1); \
1284 _argvec[2] = (unsigned long)(arg2); \
1286 VALGRIND_ALIGN_STACK \
1287 "subl $8, %%esp\n\t" \
1288 "pushl 8(%%eax)\n\t" \
1289 "pushl 4(%%eax)\n\t" \
1290 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1291 VALGRIND_CALL_NOREDIR_EAX \
1292 VALGRIND_RESTORE_STACK \
1293 : /*out*/ "=a" (_res) \
1294 : /*in*/ "a" (&_argvec[0]) \
1295 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1297 lval = (__typeof__(lval)) _res; \
1300 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1302 volatile OrigFn _orig = (orig); \
1303 volatile unsigned long _argvec[4]; \
1304 volatile unsigned long _res; \
1305 _argvec[0] = (unsigned long)_orig.nraddr; \
1306 _argvec[1] = (unsigned long)(arg1); \
1307 _argvec[2] = (unsigned long)(arg2); \
1308 _argvec[3] = (unsigned long)(arg3); \
1310 VALGRIND_ALIGN_STACK \
1311 "subl $4, %%esp\n\t" \
1312 "pushl 12(%%eax)\n\t" \
1313 "pushl 8(%%eax)\n\t" \
1314 "pushl 4(%%eax)\n\t" \
1315 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1316 VALGRIND_CALL_NOREDIR_EAX \
1317 VALGRIND_RESTORE_STACK \
1318 : /*out*/ "=a" (_res) \
1319 : /*in*/ "a" (&_argvec[0]) \
1320 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1322 lval = (__typeof__(lval)) _res; \
1325 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1327 volatile OrigFn _orig = (orig); \
1328 volatile unsigned long _argvec[5]; \
1329 volatile unsigned long _res; \
1330 _argvec[0] = (unsigned long)_orig.nraddr; \
1331 _argvec[1] = (unsigned long)(arg1); \
1332 _argvec[2] = (unsigned long)(arg2); \
1333 _argvec[3] = (unsigned long)(arg3); \
1334 _argvec[4] = (unsigned long)(arg4); \
1336 VALGRIND_ALIGN_STACK \
1337 "pushl 16(%%eax)\n\t" \
1338 "pushl 12(%%eax)\n\t" \
1339 "pushl 8(%%eax)\n\t" \
1340 "pushl 4(%%eax)\n\t" \
1341 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1342 VALGRIND_CALL_NOREDIR_EAX \
1343 VALGRIND_RESTORE_STACK \
1344 : /*out*/ "=a" (_res) \
1345 : /*in*/ "a" (&_argvec[0]) \
1346 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1348 lval = (__typeof__(lval)) _res; \
1351 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1353 volatile OrigFn _orig = (orig); \
1354 volatile unsigned long _argvec[6]; \
1355 volatile unsigned long _res; \
1356 _argvec[0] = (unsigned long)_orig.nraddr; \
1357 _argvec[1] = (unsigned long)(arg1); \
1358 _argvec[2] = (unsigned long)(arg2); \
1359 _argvec[3] = (unsigned long)(arg3); \
1360 _argvec[4] = (unsigned long)(arg4); \
1361 _argvec[5] = (unsigned long)(arg5); \
1363 VALGRIND_ALIGN_STACK \
1364 "subl $12, %%esp\n\t" \
1365 "pushl 20(%%eax)\n\t" \
1366 "pushl 16(%%eax)\n\t" \
1367 "pushl 12(%%eax)\n\t" \
1368 "pushl 8(%%eax)\n\t" \
1369 "pushl 4(%%eax)\n\t" \
1370 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1371 VALGRIND_CALL_NOREDIR_EAX \
1372 VALGRIND_RESTORE_STACK \
1373 : /*out*/ "=a" (_res) \
1374 : /*in*/ "a" (&_argvec[0]) \
1375 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1377 lval = (__typeof__(lval)) _res; \
1380 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1382 volatile OrigFn _orig = (orig); \
1383 volatile unsigned long _argvec[7]; \
1384 volatile unsigned long _res; \
1385 _argvec[0] = (unsigned long)_orig.nraddr; \
1386 _argvec[1] = (unsigned long)(arg1); \
1387 _argvec[2] = (unsigned long)(arg2); \
1388 _argvec[3] = (unsigned long)(arg3); \
1389 _argvec[4] = (unsigned long)(arg4); \
1390 _argvec[5] = (unsigned long)(arg5); \
1391 _argvec[6] = (unsigned long)(arg6); \
1393 VALGRIND_ALIGN_STACK \
1394 "subl $8, %%esp\n\t" \
1395 "pushl 24(%%eax)\n\t" \
1396 "pushl 20(%%eax)\n\t" \
1397 "pushl 16(%%eax)\n\t" \
1398 "pushl 12(%%eax)\n\t" \
1399 "pushl 8(%%eax)\n\t" \
1400 "pushl 4(%%eax)\n\t" \
1401 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1402 VALGRIND_CALL_NOREDIR_EAX \
1403 VALGRIND_RESTORE_STACK \
1404 : /*out*/ "=a" (_res) \
1405 : /*in*/ "a" (&_argvec[0]) \
1406 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1408 lval = (__typeof__(lval)) _res; \
1411 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1414 volatile OrigFn _orig = (orig); \
1415 volatile unsigned long _argvec[8]; \
1416 volatile unsigned long _res; \
1417 _argvec[0] = (unsigned long)_orig.nraddr; \
1418 _argvec[1] = (unsigned long)(arg1); \
1419 _argvec[2] = (unsigned long)(arg2); \
1420 _argvec[3] = (unsigned long)(arg3); \
1421 _argvec[4] = (unsigned long)(arg4); \
1422 _argvec[5] = (unsigned long)(arg5); \
1423 _argvec[6] = (unsigned long)(arg6); \
1424 _argvec[7] = (unsigned long)(arg7); \
1426 VALGRIND_ALIGN_STACK \
1427 "subl $4, %%esp\n\t" \
1428 "pushl 28(%%eax)\n\t" \
1429 "pushl 24(%%eax)\n\t" \
1430 "pushl 20(%%eax)\n\t" \
1431 "pushl 16(%%eax)\n\t" \
1432 "pushl 12(%%eax)\n\t" \
1433 "pushl 8(%%eax)\n\t" \
1434 "pushl 4(%%eax)\n\t" \
1435 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1436 VALGRIND_CALL_NOREDIR_EAX \
1437 VALGRIND_RESTORE_STACK \
1438 : /*out*/ "=a" (_res) \
1439 : /*in*/ "a" (&_argvec[0]) \
1440 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1442 lval = (__typeof__(lval)) _res; \
1445 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1448 volatile OrigFn _orig = (orig); \
1449 volatile unsigned long _argvec[9]; \
1450 volatile unsigned long _res; \
1451 _argvec[0] = (unsigned long)_orig.nraddr; \
1452 _argvec[1] = (unsigned long)(arg1); \
1453 _argvec[2] = (unsigned long)(arg2); \
1454 _argvec[3] = (unsigned long)(arg3); \
1455 _argvec[4] = (unsigned long)(arg4); \
1456 _argvec[5] = (unsigned long)(arg5); \
1457 _argvec[6] = (unsigned long)(arg6); \
1458 _argvec[7] = (unsigned long)(arg7); \
1459 _argvec[8] = (unsigned long)(arg8); \
1461 VALGRIND_ALIGN_STACK \
1462 "pushl 32(%%eax)\n\t" \
1463 "pushl 28(%%eax)\n\t" \
1464 "pushl 24(%%eax)\n\t" \
1465 "pushl 20(%%eax)\n\t" \
1466 "pushl 16(%%eax)\n\t" \
1467 "pushl 12(%%eax)\n\t" \
1468 "pushl 8(%%eax)\n\t" \
1469 "pushl 4(%%eax)\n\t" \
1470 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1471 VALGRIND_CALL_NOREDIR_EAX \
1472 VALGRIND_RESTORE_STACK \
1473 : /*out*/ "=a" (_res) \
1474 : /*in*/ "a" (&_argvec[0]) \
1475 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1477 lval = (__typeof__(lval)) _res; \
1480 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1483 volatile OrigFn _orig = (orig); \
1484 volatile unsigned long _argvec[10]; \
1485 volatile unsigned long _res; \
1486 _argvec[0] = (unsigned long)_orig.nraddr; \
1487 _argvec[1] = (unsigned long)(arg1); \
1488 _argvec[2] = (unsigned long)(arg2); \
1489 _argvec[3] = (unsigned long)(arg3); \
1490 _argvec[4] = (unsigned long)(arg4); \
1491 _argvec[5] = (unsigned long)(arg5); \
1492 _argvec[6] = (unsigned long)(arg6); \
1493 _argvec[7] = (unsigned long)(arg7); \
1494 _argvec[8] = (unsigned long)(arg8); \
1495 _argvec[9] = (unsigned long)(arg9); \
1497 VALGRIND_ALIGN_STACK \
1498 "subl $12, %%esp\n\t" \
1499 "pushl 36(%%eax)\n\t" \
1500 "pushl 32(%%eax)\n\t" \
1501 "pushl 28(%%eax)\n\t" \
1502 "pushl 24(%%eax)\n\t" \
1503 "pushl 20(%%eax)\n\t" \
1504 "pushl 16(%%eax)\n\t" \
1505 "pushl 12(%%eax)\n\t" \
1506 "pushl 8(%%eax)\n\t" \
1507 "pushl 4(%%eax)\n\t" \
1508 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1509 VALGRIND_CALL_NOREDIR_EAX \
1510 VALGRIND_RESTORE_STACK \
1511 : /*out*/ "=a" (_res) \
1512 : /*in*/ "a" (&_argvec[0]) \
1513 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1515 lval = (__typeof__(lval)) _res; \
1518 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1519 arg7,arg8,arg9,arg10) \
1521 volatile OrigFn _orig = (orig); \
1522 volatile unsigned long _argvec[11]; \
1523 volatile unsigned long _res; \
1524 _argvec[0] = (unsigned long)_orig.nraddr; \
1525 _argvec[1] = (unsigned long)(arg1); \
1526 _argvec[2] = (unsigned long)(arg2); \
1527 _argvec[3] = (unsigned long)(arg3); \
1528 _argvec[4] = (unsigned long)(arg4); \
1529 _argvec[5] = (unsigned long)(arg5); \
1530 _argvec[6] = (unsigned long)(arg6); \
1531 _argvec[7] = (unsigned long)(arg7); \
1532 _argvec[8] = (unsigned long)(arg8); \
1533 _argvec[9] = (unsigned long)(arg9); \
1534 _argvec[10] = (unsigned long)(arg10); \
1536 VALGRIND_ALIGN_STACK \
1537 "subl $8, %%esp\n\t" \
1538 "pushl 40(%%eax)\n\t" \
1539 "pushl 36(%%eax)\n\t" \
1540 "pushl 32(%%eax)\n\t" \
1541 "pushl 28(%%eax)\n\t" \
1542 "pushl 24(%%eax)\n\t" \
1543 "pushl 20(%%eax)\n\t" \
1544 "pushl 16(%%eax)\n\t" \
1545 "pushl 12(%%eax)\n\t" \
1546 "pushl 8(%%eax)\n\t" \
1547 "pushl 4(%%eax)\n\t" \
1548 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1549 VALGRIND_CALL_NOREDIR_EAX \
1550 VALGRIND_RESTORE_STACK \
1551 : /*out*/ "=a" (_res) \
1552 : /*in*/ "a" (&_argvec[0]) \
1553 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1555 lval = (__typeof__(lval)) _res; \
1558 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1559 arg6,arg7,arg8,arg9,arg10, \
1562 volatile OrigFn _orig = (orig); \
1563 volatile unsigned long _argvec[12]; \
1564 volatile unsigned long _res; \
1565 _argvec[0] = (unsigned long)_orig.nraddr; \
1566 _argvec[1] = (unsigned long)(arg1); \
1567 _argvec[2] = (unsigned long)(arg2); \
1568 _argvec[3] = (unsigned long)(arg3); \
1569 _argvec[4] = (unsigned long)(arg4); \
1570 _argvec[5] = (unsigned long)(arg5); \
1571 _argvec[6] = (unsigned long)(arg6); \
1572 _argvec[7] = (unsigned long)(arg7); \
1573 _argvec[8] = (unsigned long)(arg8); \
1574 _argvec[9] = (unsigned long)(arg9); \
1575 _argvec[10] = (unsigned long)(arg10); \
1576 _argvec[11] = (unsigned long)(arg11); \
1578 VALGRIND_ALIGN_STACK \
1579 "subl $4, %%esp\n\t" \
1580 "pushl 44(%%eax)\n\t" \
1581 "pushl 40(%%eax)\n\t" \
1582 "pushl 36(%%eax)\n\t" \
1583 "pushl 32(%%eax)\n\t" \
1584 "pushl 28(%%eax)\n\t" \
1585 "pushl 24(%%eax)\n\t" \
1586 "pushl 20(%%eax)\n\t" \
1587 "pushl 16(%%eax)\n\t" \
1588 "pushl 12(%%eax)\n\t" \
1589 "pushl 8(%%eax)\n\t" \
1590 "pushl 4(%%eax)\n\t" \
1591 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1592 VALGRIND_CALL_NOREDIR_EAX \
1593 VALGRIND_RESTORE_STACK \
1594 : /*out*/ "=a" (_res) \
1595 : /*in*/ "a" (&_argvec[0]) \
1596 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1598 lval = (__typeof__(lval)) _res; \
1601 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1602 arg6,arg7,arg8,arg9,arg10, \
1605 volatile OrigFn _orig = (orig); \
1606 volatile unsigned long _argvec[13]; \
1607 volatile unsigned long _res; \
1608 _argvec[0] = (unsigned long)_orig.nraddr; \
1609 _argvec[1] = (unsigned long)(arg1); \
1610 _argvec[2] = (unsigned long)(arg2); \
1611 _argvec[3] = (unsigned long)(arg3); \
1612 _argvec[4] = (unsigned long)(arg4); \
1613 _argvec[5] = (unsigned long)(arg5); \
1614 _argvec[6] = (unsigned long)(arg6); \
1615 _argvec[7] = (unsigned long)(arg7); \
1616 _argvec[8] = (unsigned long)(arg8); \
1617 _argvec[9] = (unsigned long)(arg9); \
1618 _argvec[10] = (unsigned long)(arg10); \
1619 _argvec[11] = (unsigned long)(arg11); \
1620 _argvec[12] = (unsigned long)(arg12); \
1622 VALGRIND_ALIGN_STACK \
1623 "pushl 48(%%eax)\n\t" \
1624 "pushl 44(%%eax)\n\t" \
1625 "pushl 40(%%eax)\n\t" \
1626 "pushl 36(%%eax)\n\t" \
1627 "pushl 32(%%eax)\n\t" \
1628 "pushl 28(%%eax)\n\t" \
1629 "pushl 24(%%eax)\n\t" \
1630 "pushl 20(%%eax)\n\t" \
1631 "pushl 16(%%eax)\n\t" \
1632 "pushl 12(%%eax)\n\t" \
1633 "pushl 8(%%eax)\n\t" \
1634 "pushl 4(%%eax)\n\t" \
1635 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1636 VALGRIND_CALL_NOREDIR_EAX \
1637 VALGRIND_RESTORE_STACK \
1638 : /*out*/ "=a" (_res) \
1639 : /*in*/ "a" (&_argvec[0]) \
1640 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1642 lval = (__typeof__(lval)) _res; \
1645 #endif /* PLAT_x86_linux || PLAT_x86_darwin || PLAT_x86_solaris */
1647 /* ---------------- amd64-{linux,darwin,solaris} --------------- */
1649 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
1650 || defined(PLAT_amd64_solaris)
1652 /* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1654 /* These regs are trashed by the hidden call. */
1655 #define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1656 "rdi", "r8", "r9", "r10", "r11"
1658 /* This is all pretty complex. It's so as to make stack unwinding
1659 work reliably. See bug 243270. The basic problem is the sub and
1660 add of 128 of %rsp in all of the following macros. If gcc believes
1661 the CFA is in %rsp, then unwinding may fail, because what's at the
1662 CFA is not what gcc "expected" when it constructs the CFIs for the
1663 places where the macros are instantiated.
1665 But we can't just add a CFI annotation to increase the CFA offset
1666 by 128, to match the sub of 128 from %rsp, because we don't know
1667 whether gcc has chosen %rsp as the CFA at that point, or whether it
1668 has chosen some other register (eg, %rbp). In the latter case,
1669 adding a CFI annotation to change the CFA offset is simply wrong.
1671 So the solution is to get hold of the CFA using
1672 __builtin_dwarf_cfa(), put it in a known register, and add a
1673 CFI annotation to say what the register is. We choose %rbp for
1674 this (perhaps perversely), because:
1676 (1) %rbp is already subject to unwinding. If a new register was
1677 chosen then the unwinder would have to unwind it in all stack
1678 traces, which is expensive, and
1680 (2) %rbp is already subject to precise exception updates in the
1681 JIT. If a new register was chosen, we'd have to have precise
1682 exceptions for it too, which reduces performance of the
1685 However .. one extra complication. We can't just whack the result
1686 of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1687 list of trashed registers at the end of the inline assembly
1688 fragments; gcc won't allow %rbp to appear in that list. Hence
1689 instead we need to stash %rbp in %r15 for the duration of the asm,
1690 and say that %r15 is trashed instead. gcc seems happy to go with
1693 Oh .. and this all needs to be conditionalised so that it is
1694 unchanged from before this commit, when compiled with older gccs
1695 that don't support __builtin_dwarf_cfa. Furthermore, since
1696 this header file is freestanding, it has to be independent of
1697 config.h, and so the following conditionalisation cannot depend on
1698 configure time checks.
1700 Although it's not clear from
1701 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1702 this expression excludes Darwin.
1703 .cfi directives in Darwin assembly appear to be completely
1704 different and I haven't investigated how they work.
1706 For even more entertainment value, note we have to use the
1707 completely undocumented __builtin_dwarf_cfa(), which appears to
1708 really compute the CFA, whereas __builtin_frame_address(0) claims
1709 to but actually doesn't. See
1710 https://bugs.kde.org/show_bug.cgi?id=243270#c47
1712 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1713 # define __FRAME_POINTER \
1714 ,"r"(__builtin_dwarf_cfa())
1715 # define VALGRIND_CFI_PROLOGUE \
1716 "movq %%rbp, %%r15\n\t" \
1717 "movq %2, %%rbp\n\t" \
1718 ".cfi_remember_state\n\t" \
1719 ".cfi_def_cfa rbp, 0\n\t"
1720 # define VALGRIND_CFI_EPILOGUE \
1721 "movq %%r15, %%rbp\n\t" \
1722 ".cfi_restore_state\n\t"
1724 # define __FRAME_POINTER
1725 # define VALGRIND_CFI_PROLOGUE
1726 # define VALGRIND_CFI_EPILOGUE
1729 /* Macros to save and align the stack before making a function
1730 call and restore it afterwards as gcc may not keep the stack
1731 pointer aligned if it doesn't realise calls are being made
1732 to other functions. */
1734 #define VALGRIND_ALIGN_STACK \
1735 "movq %%rsp,%%r14\n\t" \
1736 "andq $0xfffffffffffffff0,%%rsp\n\t"
1737 #define VALGRIND_RESTORE_STACK \
1738 "movq %%r14,%%rsp\n\t"
1740 /* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1743 /* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1744 macros. In order not to trash the stack redzone, we need to drop
1745 %rsp by 128 before the hidden call, and restore afterwards. The
1746 nastyness is that it is only by luck that the stack still appears
1747 to be unwindable during the hidden call - since then the behaviour
1748 of any routine using this macro does not match what the CFI data
1751 Why is this important? Imagine that a wrapper has a stack
1752 allocated local, and passes to the hidden call, a pointer to it.
1753 Because gcc does not know about the hidden call, it may allocate
1754 that local in the redzone. Unfortunately the hidden call may then
1755 trash it before it comes to use it. So we must step clear of the
1756 redzone, for the duration of the hidden call, to make it safe.
1758 Probably the same problem afflicts the other redzone-style ABIs too
1759 (ppc64-linux); but for those, the stack is
1760 self describing (none of this CFI nonsense) so at least messing
1761 with the stack pointer doesn't give a danger of non-unwindable
1764 #define CALL_FN_W_v(lval, orig) \
1766 volatile OrigFn _orig = (orig); \
1767 volatile unsigned long _argvec[1]; \
1768 volatile unsigned long _res; \
1769 _argvec[0] = (unsigned long)_orig.nraddr; \
1771 VALGRIND_CFI_PROLOGUE \
1772 VALGRIND_ALIGN_STACK \
1773 "subq $128,%%rsp\n\t" \
1774 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1775 VALGRIND_CALL_NOREDIR_RAX \
1776 VALGRIND_RESTORE_STACK \
1777 VALGRIND_CFI_EPILOGUE \
1778 : /*out*/ "=a" (_res) \
1779 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1780 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1782 lval = (__typeof__(lval)) _res; \
1785 #define CALL_FN_W_W(lval, orig, arg1) \
1787 volatile OrigFn _orig = (orig); \
1788 volatile unsigned long _argvec[2]; \
1789 volatile unsigned long _res; \
1790 _argvec[0] = (unsigned long)_orig.nraddr; \
1791 _argvec[1] = (unsigned long)(arg1); \
1793 VALGRIND_CFI_PROLOGUE \
1794 VALGRIND_ALIGN_STACK \
1795 "subq $128,%%rsp\n\t" \
1796 "movq 8(%%rax), %%rdi\n\t" \
1797 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1798 VALGRIND_CALL_NOREDIR_RAX \
1799 VALGRIND_RESTORE_STACK \
1800 VALGRIND_CFI_EPILOGUE \
1801 : /*out*/ "=a" (_res) \
1802 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1803 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1805 lval = (__typeof__(lval)) _res; \
1808 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1810 volatile OrigFn _orig = (orig); \
1811 volatile unsigned long _argvec[3]; \
1812 volatile unsigned long _res; \
1813 _argvec[0] = (unsigned long)_orig.nraddr; \
1814 _argvec[1] = (unsigned long)(arg1); \
1815 _argvec[2] = (unsigned long)(arg2); \
1817 VALGRIND_CFI_PROLOGUE \
1818 VALGRIND_ALIGN_STACK \
1819 "subq $128,%%rsp\n\t" \
1820 "movq 16(%%rax), %%rsi\n\t" \
1821 "movq 8(%%rax), %%rdi\n\t" \
1822 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1823 VALGRIND_CALL_NOREDIR_RAX \
1824 VALGRIND_RESTORE_STACK \
1825 VALGRIND_CFI_EPILOGUE \
1826 : /*out*/ "=a" (_res) \
1827 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1828 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1830 lval = (__typeof__(lval)) _res; \
1833 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1835 volatile OrigFn _orig = (orig); \
1836 volatile unsigned long _argvec[4]; \
1837 volatile unsigned long _res; \
1838 _argvec[0] = (unsigned long)_orig.nraddr; \
1839 _argvec[1] = (unsigned long)(arg1); \
1840 _argvec[2] = (unsigned long)(arg2); \
1841 _argvec[3] = (unsigned long)(arg3); \
1843 VALGRIND_CFI_PROLOGUE \
1844 VALGRIND_ALIGN_STACK \
1845 "subq $128,%%rsp\n\t" \
1846 "movq 24(%%rax), %%rdx\n\t" \
1847 "movq 16(%%rax), %%rsi\n\t" \
1848 "movq 8(%%rax), %%rdi\n\t" \
1849 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1850 VALGRIND_CALL_NOREDIR_RAX \
1851 VALGRIND_RESTORE_STACK \
1852 VALGRIND_CFI_EPILOGUE \
1853 : /*out*/ "=a" (_res) \
1854 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1855 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1857 lval = (__typeof__(lval)) _res; \
1860 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1862 volatile OrigFn _orig = (orig); \
1863 volatile unsigned long _argvec[5]; \
1864 volatile unsigned long _res; \
1865 _argvec[0] = (unsigned long)_orig.nraddr; \
1866 _argvec[1] = (unsigned long)(arg1); \
1867 _argvec[2] = (unsigned long)(arg2); \
1868 _argvec[3] = (unsigned long)(arg3); \
1869 _argvec[4] = (unsigned long)(arg4); \
1871 VALGRIND_CFI_PROLOGUE \
1872 VALGRIND_ALIGN_STACK \
1873 "subq $128,%%rsp\n\t" \
1874 "movq 32(%%rax), %%rcx\n\t" \
1875 "movq 24(%%rax), %%rdx\n\t" \
1876 "movq 16(%%rax), %%rsi\n\t" \
1877 "movq 8(%%rax), %%rdi\n\t" \
1878 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1879 VALGRIND_CALL_NOREDIR_RAX \
1880 VALGRIND_RESTORE_STACK \
1881 VALGRIND_CFI_EPILOGUE \
1882 : /*out*/ "=a" (_res) \
1883 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1884 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1886 lval = (__typeof__(lval)) _res; \
1889 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1891 volatile OrigFn _orig = (orig); \
1892 volatile unsigned long _argvec[6]; \
1893 volatile unsigned long _res; \
1894 _argvec[0] = (unsigned long)_orig.nraddr; \
1895 _argvec[1] = (unsigned long)(arg1); \
1896 _argvec[2] = (unsigned long)(arg2); \
1897 _argvec[3] = (unsigned long)(arg3); \
1898 _argvec[4] = (unsigned long)(arg4); \
1899 _argvec[5] = (unsigned long)(arg5); \
1901 VALGRIND_CFI_PROLOGUE \
1902 VALGRIND_ALIGN_STACK \
1903 "subq $128,%%rsp\n\t" \
1904 "movq 40(%%rax), %%r8\n\t" \
1905 "movq 32(%%rax), %%rcx\n\t" \
1906 "movq 24(%%rax), %%rdx\n\t" \
1907 "movq 16(%%rax), %%rsi\n\t" \
1908 "movq 8(%%rax), %%rdi\n\t" \
1909 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1910 VALGRIND_CALL_NOREDIR_RAX \
1911 VALGRIND_RESTORE_STACK \
1912 VALGRIND_CFI_EPILOGUE \
1913 : /*out*/ "=a" (_res) \
1914 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1915 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1917 lval = (__typeof__(lval)) _res; \
1920 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1922 volatile OrigFn _orig = (orig); \
1923 volatile unsigned long _argvec[7]; \
1924 volatile unsigned long _res; \
1925 _argvec[0] = (unsigned long)_orig.nraddr; \
1926 _argvec[1] = (unsigned long)(arg1); \
1927 _argvec[2] = (unsigned long)(arg2); \
1928 _argvec[3] = (unsigned long)(arg3); \
1929 _argvec[4] = (unsigned long)(arg4); \
1930 _argvec[5] = (unsigned long)(arg5); \
1931 _argvec[6] = (unsigned long)(arg6); \
1933 VALGRIND_CFI_PROLOGUE \
1934 VALGRIND_ALIGN_STACK \
1935 "subq $128,%%rsp\n\t" \
1936 "movq 48(%%rax), %%r9\n\t" \
1937 "movq 40(%%rax), %%r8\n\t" \
1938 "movq 32(%%rax), %%rcx\n\t" \
1939 "movq 24(%%rax), %%rdx\n\t" \
1940 "movq 16(%%rax), %%rsi\n\t" \
1941 "movq 8(%%rax), %%rdi\n\t" \
1942 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1943 VALGRIND_CALL_NOREDIR_RAX \
1944 VALGRIND_RESTORE_STACK \
1945 VALGRIND_CFI_EPILOGUE \
1946 : /*out*/ "=a" (_res) \
1947 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1948 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1950 lval = (__typeof__(lval)) _res; \
1953 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1956 volatile OrigFn _orig = (orig); \
1957 volatile unsigned long _argvec[8]; \
1958 volatile unsigned long _res; \
1959 _argvec[0] = (unsigned long)_orig.nraddr; \
1960 _argvec[1] = (unsigned long)(arg1); \
1961 _argvec[2] = (unsigned long)(arg2); \
1962 _argvec[3] = (unsigned long)(arg3); \
1963 _argvec[4] = (unsigned long)(arg4); \
1964 _argvec[5] = (unsigned long)(arg5); \
1965 _argvec[6] = (unsigned long)(arg6); \
1966 _argvec[7] = (unsigned long)(arg7); \
1968 VALGRIND_CFI_PROLOGUE \
1969 VALGRIND_ALIGN_STACK \
1970 "subq $136,%%rsp\n\t" \
1971 "pushq 56(%%rax)\n\t" \
1972 "movq 48(%%rax), %%r9\n\t" \
1973 "movq 40(%%rax), %%r8\n\t" \
1974 "movq 32(%%rax), %%rcx\n\t" \
1975 "movq 24(%%rax), %%rdx\n\t" \
1976 "movq 16(%%rax), %%rsi\n\t" \
1977 "movq 8(%%rax), %%rdi\n\t" \
1978 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1979 VALGRIND_CALL_NOREDIR_RAX \
1980 VALGRIND_RESTORE_STACK \
1981 VALGRIND_CFI_EPILOGUE \
1982 : /*out*/ "=a" (_res) \
1983 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1984 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1986 lval = (__typeof__(lval)) _res; \
1989 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1992 volatile OrigFn _orig = (orig); \
1993 volatile unsigned long _argvec[9]; \
1994 volatile unsigned long _res; \
1995 _argvec[0] = (unsigned long)_orig.nraddr; \
1996 _argvec[1] = (unsigned long)(arg1); \
1997 _argvec[2] = (unsigned long)(arg2); \
1998 _argvec[3] = (unsigned long)(arg3); \
1999 _argvec[4] = (unsigned long)(arg4); \
2000 _argvec[5] = (unsigned long)(arg5); \
2001 _argvec[6] = (unsigned long)(arg6); \
2002 _argvec[7] = (unsigned long)(arg7); \
2003 _argvec[8] = (unsigned long)(arg8); \
2005 VALGRIND_CFI_PROLOGUE \
2006 VALGRIND_ALIGN_STACK \
2007 "subq $128,%%rsp\n\t" \
2008 "pushq 64(%%rax)\n\t" \
2009 "pushq 56(%%rax)\n\t" \
2010 "movq 48(%%rax), %%r9\n\t" \
2011 "movq 40(%%rax), %%r8\n\t" \
2012 "movq 32(%%rax), %%rcx\n\t" \
2013 "movq 24(%%rax), %%rdx\n\t" \
2014 "movq 16(%%rax), %%rsi\n\t" \
2015 "movq 8(%%rax), %%rdi\n\t" \
2016 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2017 VALGRIND_CALL_NOREDIR_RAX \
2018 VALGRIND_RESTORE_STACK \
2019 VALGRIND_CFI_EPILOGUE \
2020 : /*out*/ "=a" (_res) \
2021 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2022 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2024 lval = (__typeof__(lval)) _res; \
2027 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2030 volatile OrigFn _orig = (orig); \
2031 volatile unsigned long _argvec[10]; \
2032 volatile unsigned long _res; \
2033 _argvec[0] = (unsigned long)_orig.nraddr; \
2034 _argvec[1] = (unsigned long)(arg1); \
2035 _argvec[2] = (unsigned long)(arg2); \
2036 _argvec[3] = (unsigned long)(arg3); \
2037 _argvec[4] = (unsigned long)(arg4); \
2038 _argvec[5] = (unsigned long)(arg5); \
2039 _argvec[6] = (unsigned long)(arg6); \
2040 _argvec[7] = (unsigned long)(arg7); \
2041 _argvec[8] = (unsigned long)(arg8); \
2042 _argvec[9] = (unsigned long)(arg9); \
2044 VALGRIND_CFI_PROLOGUE \
2045 VALGRIND_ALIGN_STACK \
2046 "subq $136,%%rsp\n\t" \
2047 "pushq 72(%%rax)\n\t" \
2048 "pushq 64(%%rax)\n\t" \
2049 "pushq 56(%%rax)\n\t" \
2050 "movq 48(%%rax), %%r9\n\t" \
2051 "movq 40(%%rax), %%r8\n\t" \
2052 "movq 32(%%rax), %%rcx\n\t" \
2053 "movq 24(%%rax), %%rdx\n\t" \
2054 "movq 16(%%rax), %%rsi\n\t" \
2055 "movq 8(%%rax), %%rdi\n\t" \
2056 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2057 VALGRIND_CALL_NOREDIR_RAX \
2058 VALGRIND_RESTORE_STACK \
2059 VALGRIND_CFI_EPILOGUE \
2060 : /*out*/ "=a" (_res) \
2061 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2062 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2064 lval = (__typeof__(lval)) _res; \
2067 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2068 arg7,arg8,arg9,arg10) \
2070 volatile OrigFn _orig = (orig); \
2071 volatile unsigned long _argvec[11]; \
2072 volatile unsigned long _res; \
2073 _argvec[0] = (unsigned long)_orig.nraddr; \
2074 _argvec[1] = (unsigned long)(arg1); \
2075 _argvec[2] = (unsigned long)(arg2); \
2076 _argvec[3] = (unsigned long)(arg3); \
2077 _argvec[4] = (unsigned long)(arg4); \
2078 _argvec[5] = (unsigned long)(arg5); \
2079 _argvec[6] = (unsigned long)(arg6); \
2080 _argvec[7] = (unsigned long)(arg7); \
2081 _argvec[8] = (unsigned long)(arg8); \
2082 _argvec[9] = (unsigned long)(arg9); \
2083 _argvec[10] = (unsigned long)(arg10); \
2085 VALGRIND_CFI_PROLOGUE \
2086 VALGRIND_ALIGN_STACK \
2087 "subq $128,%%rsp\n\t" \
2088 "pushq 80(%%rax)\n\t" \
2089 "pushq 72(%%rax)\n\t" \
2090 "pushq 64(%%rax)\n\t" \
2091 "pushq 56(%%rax)\n\t" \
2092 "movq 48(%%rax), %%r9\n\t" \
2093 "movq 40(%%rax), %%r8\n\t" \
2094 "movq 32(%%rax), %%rcx\n\t" \
2095 "movq 24(%%rax), %%rdx\n\t" \
2096 "movq 16(%%rax), %%rsi\n\t" \
2097 "movq 8(%%rax), %%rdi\n\t" \
2098 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2099 VALGRIND_CALL_NOREDIR_RAX \
2100 VALGRIND_RESTORE_STACK \
2101 VALGRIND_CFI_EPILOGUE \
2102 : /*out*/ "=a" (_res) \
2103 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2104 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2106 lval = (__typeof__(lval)) _res; \
2109 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2110 arg7,arg8,arg9,arg10,arg11) \
2112 volatile OrigFn _orig = (orig); \
2113 volatile unsigned long _argvec[12]; \
2114 volatile unsigned long _res; \
2115 _argvec[0] = (unsigned long)_orig.nraddr; \
2116 _argvec[1] = (unsigned long)(arg1); \
2117 _argvec[2] = (unsigned long)(arg2); \
2118 _argvec[3] = (unsigned long)(arg3); \
2119 _argvec[4] = (unsigned long)(arg4); \
2120 _argvec[5] = (unsigned long)(arg5); \
2121 _argvec[6] = (unsigned long)(arg6); \
2122 _argvec[7] = (unsigned long)(arg7); \
2123 _argvec[8] = (unsigned long)(arg8); \
2124 _argvec[9] = (unsigned long)(arg9); \
2125 _argvec[10] = (unsigned long)(arg10); \
2126 _argvec[11] = (unsigned long)(arg11); \
2128 VALGRIND_CFI_PROLOGUE \
2129 VALGRIND_ALIGN_STACK \
2130 "subq $136,%%rsp\n\t" \
2131 "pushq 88(%%rax)\n\t" \
2132 "pushq 80(%%rax)\n\t" \
2133 "pushq 72(%%rax)\n\t" \
2134 "pushq 64(%%rax)\n\t" \
2135 "pushq 56(%%rax)\n\t" \
2136 "movq 48(%%rax), %%r9\n\t" \
2137 "movq 40(%%rax), %%r8\n\t" \
2138 "movq 32(%%rax), %%rcx\n\t" \
2139 "movq 24(%%rax), %%rdx\n\t" \
2140 "movq 16(%%rax), %%rsi\n\t" \
2141 "movq 8(%%rax), %%rdi\n\t" \
2142 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2143 VALGRIND_CALL_NOREDIR_RAX \
2144 VALGRIND_RESTORE_STACK \
2145 VALGRIND_CFI_EPILOGUE \
2146 : /*out*/ "=a" (_res) \
2147 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2148 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2150 lval = (__typeof__(lval)) _res; \
2153 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2154 arg7,arg8,arg9,arg10,arg11,arg12) \
2156 volatile OrigFn _orig = (orig); \
2157 volatile unsigned long _argvec[13]; \
2158 volatile unsigned long _res; \
2159 _argvec[0] = (unsigned long)_orig.nraddr; \
2160 _argvec[1] = (unsigned long)(arg1); \
2161 _argvec[2] = (unsigned long)(arg2); \
2162 _argvec[3] = (unsigned long)(arg3); \
2163 _argvec[4] = (unsigned long)(arg4); \
2164 _argvec[5] = (unsigned long)(arg5); \
2165 _argvec[6] = (unsigned long)(arg6); \
2166 _argvec[7] = (unsigned long)(arg7); \
2167 _argvec[8] = (unsigned long)(arg8); \
2168 _argvec[9] = (unsigned long)(arg9); \
2169 _argvec[10] = (unsigned long)(arg10); \
2170 _argvec[11] = (unsigned long)(arg11); \
2171 _argvec[12] = (unsigned long)(arg12); \
2173 VALGRIND_CFI_PROLOGUE \
2174 VALGRIND_ALIGN_STACK \
2175 "subq $128,%%rsp\n\t" \
2176 "pushq 96(%%rax)\n\t" \
2177 "pushq 88(%%rax)\n\t" \
2178 "pushq 80(%%rax)\n\t" \
2179 "pushq 72(%%rax)\n\t" \
2180 "pushq 64(%%rax)\n\t" \
2181 "pushq 56(%%rax)\n\t" \
2182 "movq 48(%%rax), %%r9\n\t" \
2183 "movq 40(%%rax), %%r8\n\t" \
2184 "movq 32(%%rax), %%rcx\n\t" \
2185 "movq 24(%%rax), %%rdx\n\t" \
2186 "movq 16(%%rax), %%rsi\n\t" \
2187 "movq 8(%%rax), %%rdi\n\t" \
2188 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2189 VALGRIND_CALL_NOREDIR_RAX \
2190 VALGRIND_RESTORE_STACK \
2191 VALGRIND_CFI_EPILOGUE \
2192 : /*out*/ "=a" (_res) \
2193 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2194 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2196 lval = (__typeof__(lval)) _res; \
2199 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
2201 /* ------------------------ ppc32-linux ------------------------ */
2203 #if defined(PLAT_ppc32_linux)
2205 /* This is useful for finding out about the on-stack stuff:
2207 extern int f9 ( int,int,int,int,int,int,int,int,int );
2208 extern int f10 ( int,int,int,int,int,int,int,int,int,int );
2209 extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
2210 extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
2213 return f9(11,22,33,44,55,66,77,88,99);
2216 return f10(11,22,33,44,55,66,77,88,99,110);
2219 return f11(11,22,33,44,55,66,77,88,99,110,121);
2222 return f12(11,22,33,44,55,66,77,88,99,110,121,132);
2226 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2228 /* These regs are trashed by the hidden call. */
2229 #define __CALLER_SAVED_REGS \
2230 "lr", "ctr", "xer", \
2231 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2232 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2235 /* Macros to save and align the stack before making a function
2236 call and restore it afterwards as gcc may not keep the stack
2237 pointer aligned if it doesn't realise calls are being made
2238 to other functions. */
2240 #define VALGRIND_ALIGN_STACK \
2242 "rlwinm 1,1,0,0,27\n\t"
2243 #define VALGRIND_RESTORE_STACK \
2246 /* These CALL_FN_ macros assume that on ppc32-linux,
2247 sizeof(unsigned long) == 4. */
2249 #define CALL_FN_W_v(lval, orig) \
2251 volatile OrigFn _orig = (orig); \
2252 volatile unsigned long _argvec[1]; \
2253 volatile unsigned long _res; \
2254 _argvec[0] = (unsigned long)_orig.nraddr; \
2256 VALGRIND_ALIGN_STACK \
2258 "lwz 11,0(11)\n\t" /* target->r11 */ \
2259 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2260 VALGRIND_RESTORE_STACK \
2262 : /*out*/ "=r" (_res) \
2263 : /*in*/ "r" (&_argvec[0]) \
2264 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2266 lval = (__typeof__(lval)) _res; \
2269 #define CALL_FN_W_W(lval, orig, arg1) \
2271 volatile OrigFn _orig = (orig); \
2272 volatile unsigned long _argvec[2]; \
2273 volatile unsigned long _res; \
2274 _argvec[0] = (unsigned long)_orig.nraddr; \
2275 _argvec[1] = (unsigned long)arg1; \
2277 VALGRIND_ALIGN_STACK \
2279 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2280 "lwz 11,0(11)\n\t" /* target->r11 */ \
2281 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2282 VALGRIND_RESTORE_STACK \
2284 : /*out*/ "=r" (_res) \
2285 : /*in*/ "r" (&_argvec[0]) \
2286 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2288 lval = (__typeof__(lval)) _res; \
2291 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2293 volatile OrigFn _orig = (orig); \
2294 volatile unsigned long _argvec[3]; \
2295 volatile unsigned long _res; \
2296 _argvec[0] = (unsigned long)_orig.nraddr; \
2297 _argvec[1] = (unsigned long)arg1; \
2298 _argvec[2] = (unsigned long)arg2; \
2300 VALGRIND_ALIGN_STACK \
2302 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2304 "lwz 11,0(11)\n\t" /* target->r11 */ \
2305 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2306 VALGRIND_RESTORE_STACK \
2308 : /*out*/ "=r" (_res) \
2309 : /*in*/ "r" (&_argvec[0]) \
2310 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2312 lval = (__typeof__(lval)) _res; \
2315 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2317 volatile OrigFn _orig = (orig); \
2318 volatile unsigned long _argvec[4]; \
2319 volatile unsigned long _res; \
2320 _argvec[0] = (unsigned long)_orig.nraddr; \
2321 _argvec[1] = (unsigned long)arg1; \
2322 _argvec[2] = (unsigned long)arg2; \
2323 _argvec[3] = (unsigned long)arg3; \
2325 VALGRIND_ALIGN_STACK \
2327 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2329 "lwz 5,12(11)\n\t" \
2330 "lwz 11,0(11)\n\t" /* target->r11 */ \
2331 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2332 VALGRIND_RESTORE_STACK \
2334 : /*out*/ "=r" (_res) \
2335 : /*in*/ "r" (&_argvec[0]) \
2336 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2338 lval = (__typeof__(lval)) _res; \
2341 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2343 volatile OrigFn _orig = (orig); \
2344 volatile unsigned long _argvec[5]; \
2345 volatile unsigned long _res; \
2346 _argvec[0] = (unsigned long)_orig.nraddr; \
2347 _argvec[1] = (unsigned long)arg1; \
2348 _argvec[2] = (unsigned long)arg2; \
2349 _argvec[3] = (unsigned long)arg3; \
2350 _argvec[4] = (unsigned long)arg4; \
2352 VALGRIND_ALIGN_STACK \
2354 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2356 "lwz 5,12(11)\n\t" \
2357 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2358 "lwz 11,0(11)\n\t" /* target->r11 */ \
2359 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2360 VALGRIND_RESTORE_STACK \
2362 : /*out*/ "=r" (_res) \
2363 : /*in*/ "r" (&_argvec[0]) \
2364 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2366 lval = (__typeof__(lval)) _res; \
2369 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2371 volatile OrigFn _orig = (orig); \
2372 volatile unsigned long _argvec[6]; \
2373 volatile unsigned long _res; \
2374 _argvec[0] = (unsigned long)_orig.nraddr; \
2375 _argvec[1] = (unsigned long)arg1; \
2376 _argvec[2] = (unsigned long)arg2; \
2377 _argvec[3] = (unsigned long)arg3; \
2378 _argvec[4] = (unsigned long)arg4; \
2379 _argvec[5] = (unsigned long)arg5; \
2381 VALGRIND_ALIGN_STACK \
2383 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2385 "lwz 5,12(11)\n\t" \
2386 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2387 "lwz 7,20(11)\n\t" \
2388 "lwz 11,0(11)\n\t" /* target->r11 */ \
2389 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2390 VALGRIND_RESTORE_STACK \
2392 : /*out*/ "=r" (_res) \
2393 : /*in*/ "r" (&_argvec[0]) \
2394 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2396 lval = (__typeof__(lval)) _res; \
2399 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2401 volatile OrigFn _orig = (orig); \
2402 volatile unsigned long _argvec[7]; \
2403 volatile unsigned long _res; \
2404 _argvec[0] = (unsigned long)_orig.nraddr; \
2405 _argvec[1] = (unsigned long)arg1; \
2406 _argvec[2] = (unsigned long)arg2; \
2407 _argvec[3] = (unsigned long)arg3; \
2408 _argvec[4] = (unsigned long)arg4; \
2409 _argvec[5] = (unsigned long)arg5; \
2410 _argvec[6] = (unsigned long)arg6; \
2412 VALGRIND_ALIGN_STACK \
2414 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2416 "lwz 5,12(11)\n\t" \
2417 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2418 "lwz 7,20(11)\n\t" \
2419 "lwz 8,24(11)\n\t" \
2420 "lwz 11,0(11)\n\t" /* target->r11 */ \
2421 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2422 VALGRIND_RESTORE_STACK \
2424 : /*out*/ "=r" (_res) \
2425 : /*in*/ "r" (&_argvec[0]) \
2426 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2428 lval = (__typeof__(lval)) _res; \
2431 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2434 volatile OrigFn _orig = (orig); \
2435 volatile unsigned long _argvec[8]; \
2436 volatile unsigned long _res; \
2437 _argvec[0] = (unsigned long)_orig.nraddr; \
2438 _argvec[1] = (unsigned long)arg1; \
2439 _argvec[2] = (unsigned long)arg2; \
2440 _argvec[3] = (unsigned long)arg3; \
2441 _argvec[4] = (unsigned long)arg4; \
2442 _argvec[5] = (unsigned long)arg5; \
2443 _argvec[6] = (unsigned long)arg6; \
2444 _argvec[7] = (unsigned long)arg7; \
2446 VALGRIND_ALIGN_STACK \
2448 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2450 "lwz 5,12(11)\n\t" \
2451 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2452 "lwz 7,20(11)\n\t" \
2453 "lwz 8,24(11)\n\t" \
2454 "lwz 9,28(11)\n\t" \
2455 "lwz 11,0(11)\n\t" /* target->r11 */ \
2456 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2457 VALGRIND_RESTORE_STACK \
2459 : /*out*/ "=r" (_res) \
2460 : /*in*/ "r" (&_argvec[0]) \
2461 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2463 lval = (__typeof__(lval)) _res; \
2466 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2469 volatile OrigFn _orig = (orig); \
2470 volatile unsigned long _argvec[9]; \
2471 volatile unsigned long _res; \
2472 _argvec[0] = (unsigned long)_orig.nraddr; \
2473 _argvec[1] = (unsigned long)arg1; \
2474 _argvec[2] = (unsigned long)arg2; \
2475 _argvec[3] = (unsigned long)arg3; \
2476 _argvec[4] = (unsigned long)arg4; \
2477 _argvec[5] = (unsigned long)arg5; \
2478 _argvec[6] = (unsigned long)arg6; \
2479 _argvec[7] = (unsigned long)arg7; \
2480 _argvec[8] = (unsigned long)arg8; \
2482 VALGRIND_ALIGN_STACK \
2484 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2486 "lwz 5,12(11)\n\t" \
2487 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2488 "lwz 7,20(11)\n\t" \
2489 "lwz 8,24(11)\n\t" \
2490 "lwz 9,28(11)\n\t" \
2491 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2492 "lwz 11,0(11)\n\t" /* target->r11 */ \
2493 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2494 VALGRIND_RESTORE_STACK \
2496 : /*out*/ "=r" (_res) \
2497 : /*in*/ "r" (&_argvec[0]) \
2498 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2500 lval = (__typeof__(lval)) _res; \
2503 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2506 volatile OrigFn _orig = (orig); \
2507 volatile unsigned long _argvec[10]; \
2508 volatile unsigned long _res; \
2509 _argvec[0] = (unsigned long)_orig.nraddr; \
2510 _argvec[1] = (unsigned long)arg1; \
2511 _argvec[2] = (unsigned long)arg2; \
2512 _argvec[3] = (unsigned long)arg3; \
2513 _argvec[4] = (unsigned long)arg4; \
2514 _argvec[5] = (unsigned long)arg5; \
2515 _argvec[6] = (unsigned long)arg6; \
2516 _argvec[7] = (unsigned long)arg7; \
2517 _argvec[8] = (unsigned long)arg8; \
2518 _argvec[9] = (unsigned long)arg9; \
2520 VALGRIND_ALIGN_STACK \
2522 "addi 1,1,-16\n\t" \
2524 "lwz 3,36(11)\n\t" \
2527 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2529 "lwz 5,12(11)\n\t" \
2530 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2531 "lwz 7,20(11)\n\t" \
2532 "lwz 8,24(11)\n\t" \
2533 "lwz 9,28(11)\n\t" \
2534 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2535 "lwz 11,0(11)\n\t" /* target->r11 */ \
2536 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2537 VALGRIND_RESTORE_STACK \
2539 : /*out*/ "=r" (_res) \
2540 : /*in*/ "r" (&_argvec[0]) \
2541 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2543 lval = (__typeof__(lval)) _res; \
2546 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2547 arg7,arg8,arg9,arg10) \
2549 volatile OrigFn _orig = (orig); \
2550 volatile unsigned long _argvec[11]; \
2551 volatile unsigned long _res; \
2552 _argvec[0] = (unsigned long)_orig.nraddr; \
2553 _argvec[1] = (unsigned long)arg1; \
2554 _argvec[2] = (unsigned long)arg2; \
2555 _argvec[3] = (unsigned long)arg3; \
2556 _argvec[4] = (unsigned long)arg4; \
2557 _argvec[5] = (unsigned long)arg5; \
2558 _argvec[6] = (unsigned long)arg6; \
2559 _argvec[7] = (unsigned long)arg7; \
2560 _argvec[8] = (unsigned long)arg8; \
2561 _argvec[9] = (unsigned long)arg9; \
2562 _argvec[10] = (unsigned long)arg10; \
2564 VALGRIND_ALIGN_STACK \
2566 "addi 1,1,-16\n\t" \
2568 "lwz 3,40(11)\n\t" \
2571 "lwz 3,36(11)\n\t" \
2574 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2576 "lwz 5,12(11)\n\t" \
2577 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2578 "lwz 7,20(11)\n\t" \
2579 "lwz 8,24(11)\n\t" \
2580 "lwz 9,28(11)\n\t" \
2581 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2582 "lwz 11,0(11)\n\t" /* target->r11 */ \
2583 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2584 VALGRIND_RESTORE_STACK \
2586 : /*out*/ "=r" (_res) \
2587 : /*in*/ "r" (&_argvec[0]) \
2588 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2590 lval = (__typeof__(lval)) _res; \
2593 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2594 arg7,arg8,arg9,arg10,arg11) \
2596 volatile OrigFn _orig = (orig); \
2597 volatile unsigned long _argvec[12]; \
2598 volatile unsigned long _res; \
2599 _argvec[0] = (unsigned long)_orig.nraddr; \
2600 _argvec[1] = (unsigned long)arg1; \
2601 _argvec[2] = (unsigned long)arg2; \
2602 _argvec[3] = (unsigned long)arg3; \
2603 _argvec[4] = (unsigned long)arg4; \
2604 _argvec[5] = (unsigned long)arg5; \
2605 _argvec[6] = (unsigned long)arg6; \
2606 _argvec[7] = (unsigned long)arg7; \
2607 _argvec[8] = (unsigned long)arg8; \
2608 _argvec[9] = (unsigned long)arg9; \
2609 _argvec[10] = (unsigned long)arg10; \
2610 _argvec[11] = (unsigned long)arg11; \
2612 VALGRIND_ALIGN_STACK \
2614 "addi 1,1,-32\n\t" \
2616 "lwz 3,44(11)\n\t" \
2619 "lwz 3,40(11)\n\t" \
2622 "lwz 3,36(11)\n\t" \
2625 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2627 "lwz 5,12(11)\n\t" \
2628 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2629 "lwz 7,20(11)\n\t" \
2630 "lwz 8,24(11)\n\t" \
2631 "lwz 9,28(11)\n\t" \
2632 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2633 "lwz 11,0(11)\n\t" /* target->r11 */ \
2634 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2635 VALGRIND_RESTORE_STACK \
2637 : /*out*/ "=r" (_res) \
2638 : /*in*/ "r" (&_argvec[0]) \
2639 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2641 lval = (__typeof__(lval)) _res; \
2644 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2645 arg7,arg8,arg9,arg10,arg11,arg12) \
2647 volatile OrigFn _orig = (orig); \
2648 volatile unsigned long _argvec[13]; \
2649 volatile unsigned long _res; \
2650 _argvec[0] = (unsigned long)_orig.nraddr; \
2651 _argvec[1] = (unsigned long)arg1; \
2652 _argvec[2] = (unsigned long)arg2; \
2653 _argvec[3] = (unsigned long)arg3; \
2654 _argvec[4] = (unsigned long)arg4; \
2655 _argvec[5] = (unsigned long)arg5; \
2656 _argvec[6] = (unsigned long)arg6; \
2657 _argvec[7] = (unsigned long)arg7; \
2658 _argvec[8] = (unsigned long)arg8; \
2659 _argvec[9] = (unsigned long)arg9; \
2660 _argvec[10] = (unsigned long)arg10; \
2661 _argvec[11] = (unsigned long)arg11; \
2662 _argvec[12] = (unsigned long)arg12; \
2664 VALGRIND_ALIGN_STACK \
2666 "addi 1,1,-32\n\t" \
2668 "lwz 3,48(11)\n\t" \
2671 "lwz 3,44(11)\n\t" \
2674 "lwz 3,40(11)\n\t" \
2677 "lwz 3,36(11)\n\t" \
2680 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2682 "lwz 5,12(11)\n\t" \
2683 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2684 "lwz 7,20(11)\n\t" \
2685 "lwz 8,24(11)\n\t" \
2686 "lwz 9,28(11)\n\t" \
2687 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2688 "lwz 11,0(11)\n\t" /* target->r11 */ \
2689 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2690 VALGRIND_RESTORE_STACK \
2692 : /*out*/ "=r" (_res) \
2693 : /*in*/ "r" (&_argvec[0]) \
2694 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2696 lval = (__typeof__(lval)) _res; \
2699 #endif /* PLAT_ppc32_linux */
2701 /* ------------------------ ppc64-linux ------------------------ */
2703 #if defined(PLAT_ppc64be_linux)
2705 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2707 /* These regs are trashed by the hidden call. */
2708 #define __CALLER_SAVED_REGS \
2709 "lr", "ctr", "xer", \
2710 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2711 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2714 /* Macros to save and align the stack before making a function
2715 call and restore it afterwards as gcc may not keep the stack
2716 pointer aligned if it doesn't realise calls are being made
2717 to other functions. */
2719 #define VALGRIND_ALIGN_STACK \
2721 "rldicr 1,1,0,59\n\t"
2722 #define VALGRIND_RESTORE_STACK \
2725 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2728 #define CALL_FN_W_v(lval, orig) \
2730 volatile OrigFn _orig = (orig); \
2731 volatile unsigned long _argvec[3+0]; \
2732 volatile unsigned long _res; \
2733 /* _argvec[0] holds current r2 across the call */ \
2734 _argvec[1] = (unsigned long)_orig.r2; \
2735 _argvec[2] = (unsigned long)_orig.nraddr; \
2737 VALGRIND_ALIGN_STACK \
2739 "std 2,-16(11)\n\t" /* save tocptr */ \
2740 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2741 "ld 11, 0(11)\n\t" /* target->r11 */ \
2742 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2745 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2746 VALGRIND_RESTORE_STACK \
2747 : /*out*/ "=r" (_res) \
2748 : /*in*/ "r" (&_argvec[2]) \
2749 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2751 lval = (__typeof__(lval)) _res; \
2754 #define CALL_FN_W_W(lval, orig, arg1) \
2756 volatile OrigFn _orig = (orig); \
2757 volatile unsigned long _argvec[3+1]; \
2758 volatile unsigned long _res; \
2759 /* _argvec[0] holds current r2 across the call */ \
2760 _argvec[1] = (unsigned long)_orig.r2; \
2761 _argvec[2] = (unsigned long)_orig.nraddr; \
2762 _argvec[2+1] = (unsigned long)arg1; \
2764 VALGRIND_ALIGN_STACK \
2766 "std 2,-16(11)\n\t" /* save tocptr */ \
2767 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2768 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2769 "ld 11, 0(11)\n\t" /* target->r11 */ \
2770 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2773 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2774 VALGRIND_RESTORE_STACK \
2775 : /*out*/ "=r" (_res) \
2776 : /*in*/ "r" (&_argvec[2]) \
2777 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2779 lval = (__typeof__(lval)) _res; \
2782 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2784 volatile OrigFn _orig = (orig); \
2785 volatile unsigned long _argvec[3+2]; \
2786 volatile unsigned long _res; \
2787 /* _argvec[0] holds current r2 across the call */ \
2788 _argvec[1] = (unsigned long)_orig.r2; \
2789 _argvec[2] = (unsigned long)_orig.nraddr; \
2790 _argvec[2+1] = (unsigned long)arg1; \
2791 _argvec[2+2] = (unsigned long)arg2; \
2793 VALGRIND_ALIGN_STACK \
2795 "std 2,-16(11)\n\t" /* save tocptr */ \
2796 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2797 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2798 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2799 "ld 11, 0(11)\n\t" /* target->r11 */ \
2800 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2803 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2804 VALGRIND_RESTORE_STACK \
2805 : /*out*/ "=r" (_res) \
2806 : /*in*/ "r" (&_argvec[2]) \
2807 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2809 lval = (__typeof__(lval)) _res; \
2812 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2814 volatile OrigFn _orig = (orig); \
2815 volatile unsigned long _argvec[3+3]; \
2816 volatile unsigned long _res; \
2817 /* _argvec[0] holds current r2 across the call */ \
2818 _argvec[1] = (unsigned long)_orig.r2; \
2819 _argvec[2] = (unsigned long)_orig.nraddr; \
2820 _argvec[2+1] = (unsigned long)arg1; \
2821 _argvec[2+2] = (unsigned long)arg2; \
2822 _argvec[2+3] = (unsigned long)arg3; \
2824 VALGRIND_ALIGN_STACK \
2826 "std 2,-16(11)\n\t" /* save tocptr */ \
2827 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2828 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2829 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2830 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2831 "ld 11, 0(11)\n\t" /* target->r11 */ \
2832 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2835 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2836 VALGRIND_RESTORE_STACK \
2837 : /*out*/ "=r" (_res) \
2838 : /*in*/ "r" (&_argvec[2]) \
2839 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2841 lval = (__typeof__(lval)) _res; \
2844 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2846 volatile OrigFn _orig = (orig); \
2847 volatile unsigned long _argvec[3+4]; \
2848 volatile unsigned long _res; \
2849 /* _argvec[0] holds current r2 across the call */ \
2850 _argvec[1] = (unsigned long)_orig.r2; \
2851 _argvec[2] = (unsigned long)_orig.nraddr; \
2852 _argvec[2+1] = (unsigned long)arg1; \
2853 _argvec[2+2] = (unsigned long)arg2; \
2854 _argvec[2+3] = (unsigned long)arg3; \
2855 _argvec[2+4] = (unsigned long)arg4; \
2857 VALGRIND_ALIGN_STACK \
2859 "std 2,-16(11)\n\t" /* save tocptr */ \
2860 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2861 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2862 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2863 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2864 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2865 "ld 11, 0(11)\n\t" /* target->r11 */ \
2866 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2869 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2870 VALGRIND_RESTORE_STACK \
2871 : /*out*/ "=r" (_res) \
2872 : /*in*/ "r" (&_argvec[2]) \
2873 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2875 lval = (__typeof__(lval)) _res; \
2878 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2880 volatile OrigFn _orig = (orig); \
2881 volatile unsigned long _argvec[3+5]; \
2882 volatile unsigned long _res; \
2883 /* _argvec[0] holds current r2 across the call */ \
2884 _argvec[1] = (unsigned long)_orig.r2; \
2885 _argvec[2] = (unsigned long)_orig.nraddr; \
2886 _argvec[2+1] = (unsigned long)arg1; \
2887 _argvec[2+2] = (unsigned long)arg2; \
2888 _argvec[2+3] = (unsigned long)arg3; \
2889 _argvec[2+4] = (unsigned long)arg4; \
2890 _argvec[2+5] = (unsigned long)arg5; \
2892 VALGRIND_ALIGN_STACK \
2894 "std 2,-16(11)\n\t" /* save tocptr */ \
2895 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2896 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2897 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2898 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2899 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2900 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2901 "ld 11, 0(11)\n\t" /* target->r11 */ \
2902 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2905 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2906 VALGRIND_RESTORE_STACK \
2907 : /*out*/ "=r" (_res) \
2908 : /*in*/ "r" (&_argvec[2]) \
2909 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2911 lval = (__typeof__(lval)) _res; \
2914 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2916 volatile OrigFn _orig = (orig); \
2917 volatile unsigned long _argvec[3+6]; \
2918 volatile unsigned long _res; \
2919 /* _argvec[0] holds current r2 across the call */ \
2920 _argvec[1] = (unsigned long)_orig.r2; \
2921 _argvec[2] = (unsigned long)_orig.nraddr; \
2922 _argvec[2+1] = (unsigned long)arg1; \
2923 _argvec[2+2] = (unsigned long)arg2; \
2924 _argvec[2+3] = (unsigned long)arg3; \
2925 _argvec[2+4] = (unsigned long)arg4; \
2926 _argvec[2+5] = (unsigned long)arg5; \
2927 _argvec[2+6] = (unsigned long)arg6; \
2929 VALGRIND_ALIGN_STACK \
2931 "std 2,-16(11)\n\t" /* save tocptr */ \
2932 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2933 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2934 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2935 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2936 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2937 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2938 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2939 "ld 11, 0(11)\n\t" /* target->r11 */ \
2940 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2943 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2944 VALGRIND_RESTORE_STACK \
2945 : /*out*/ "=r" (_res) \
2946 : /*in*/ "r" (&_argvec[2]) \
2947 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2949 lval = (__typeof__(lval)) _res; \
2952 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2955 volatile OrigFn _orig = (orig); \
2956 volatile unsigned long _argvec[3+7]; \
2957 volatile unsigned long _res; \
2958 /* _argvec[0] holds current r2 across the call */ \
2959 _argvec[1] = (unsigned long)_orig.r2; \
2960 _argvec[2] = (unsigned long)_orig.nraddr; \
2961 _argvec[2+1] = (unsigned long)arg1; \
2962 _argvec[2+2] = (unsigned long)arg2; \
2963 _argvec[2+3] = (unsigned long)arg3; \
2964 _argvec[2+4] = (unsigned long)arg4; \
2965 _argvec[2+5] = (unsigned long)arg5; \
2966 _argvec[2+6] = (unsigned long)arg6; \
2967 _argvec[2+7] = (unsigned long)arg7; \
2969 VALGRIND_ALIGN_STACK \
2971 "std 2,-16(11)\n\t" /* save tocptr */ \
2972 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2973 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2974 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2975 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2976 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2977 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2978 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2979 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2980 "ld 11, 0(11)\n\t" /* target->r11 */ \
2981 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2984 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2985 VALGRIND_RESTORE_STACK \
2986 : /*out*/ "=r" (_res) \
2987 : /*in*/ "r" (&_argvec[2]) \
2988 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2990 lval = (__typeof__(lval)) _res; \
2993 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2996 volatile OrigFn _orig = (orig); \
2997 volatile unsigned long _argvec[3+8]; \
2998 volatile unsigned long _res; \
2999 /* _argvec[0] holds current r2 across the call */ \
3000 _argvec[1] = (unsigned long)_orig.r2; \
3001 _argvec[2] = (unsigned long)_orig.nraddr; \
3002 _argvec[2+1] = (unsigned long)arg1; \
3003 _argvec[2+2] = (unsigned long)arg2; \
3004 _argvec[2+3] = (unsigned long)arg3; \
3005 _argvec[2+4] = (unsigned long)arg4; \
3006 _argvec[2+5] = (unsigned long)arg5; \
3007 _argvec[2+6] = (unsigned long)arg6; \
3008 _argvec[2+7] = (unsigned long)arg7; \
3009 _argvec[2+8] = (unsigned long)arg8; \
3011 VALGRIND_ALIGN_STACK \
3013 "std 2,-16(11)\n\t" /* save tocptr */ \
3014 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3015 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3016 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3017 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3018 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3019 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3020 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3021 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3022 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3023 "ld 11, 0(11)\n\t" /* target->r11 */ \
3024 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3027 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3028 VALGRIND_RESTORE_STACK \
3029 : /*out*/ "=r" (_res) \
3030 : /*in*/ "r" (&_argvec[2]) \
3031 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3033 lval = (__typeof__(lval)) _res; \
3036 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3039 volatile OrigFn _orig = (orig); \
3040 volatile unsigned long _argvec[3+9]; \
3041 volatile unsigned long _res; \
3042 /* _argvec[0] holds current r2 across the call */ \
3043 _argvec[1] = (unsigned long)_orig.r2; \
3044 _argvec[2] = (unsigned long)_orig.nraddr; \
3045 _argvec[2+1] = (unsigned long)arg1; \
3046 _argvec[2+2] = (unsigned long)arg2; \
3047 _argvec[2+3] = (unsigned long)arg3; \
3048 _argvec[2+4] = (unsigned long)arg4; \
3049 _argvec[2+5] = (unsigned long)arg5; \
3050 _argvec[2+6] = (unsigned long)arg6; \
3051 _argvec[2+7] = (unsigned long)arg7; \
3052 _argvec[2+8] = (unsigned long)arg8; \
3053 _argvec[2+9] = (unsigned long)arg9; \
3055 VALGRIND_ALIGN_STACK \
3057 "std 2,-16(11)\n\t" /* save tocptr */ \
3058 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3059 "addi 1,1,-128\n\t" /* expand stack frame */ \
3062 "std 3,112(1)\n\t" \
3064 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3065 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3066 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3067 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3068 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3069 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3070 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3071 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3072 "ld 11, 0(11)\n\t" /* target->r11 */ \
3073 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3076 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3077 VALGRIND_RESTORE_STACK \
3078 : /*out*/ "=r" (_res) \
3079 : /*in*/ "r" (&_argvec[2]) \
3080 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3082 lval = (__typeof__(lval)) _res; \
3085 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3086 arg7,arg8,arg9,arg10) \
3088 volatile OrigFn _orig = (orig); \
3089 volatile unsigned long _argvec[3+10]; \
3090 volatile unsigned long _res; \
3091 /* _argvec[0] holds current r2 across the call */ \
3092 _argvec[1] = (unsigned long)_orig.r2; \
3093 _argvec[2] = (unsigned long)_orig.nraddr; \
3094 _argvec[2+1] = (unsigned long)arg1; \
3095 _argvec[2+2] = (unsigned long)arg2; \
3096 _argvec[2+3] = (unsigned long)arg3; \
3097 _argvec[2+4] = (unsigned long)arg4; \
3098 _argvec[2+5] = (unsigned long)arg5; \
3099 _argvec[2+6] = (unsigned long)arg6; \
3100 _argvec[2+7] = (unsigned long)arg7; \
3101 _argvec[2+8] = (unsigned long)arg8; \
3102 _argvec[2+9] = (unsigned long)arg9; \
3103 _argvec[2+10] = (unsigned long)arg10; \
3105 VALGRIND_ALIGN_STACK \
3107 "std 2,-16(11)\n\t" /* save tocptr */ \
3108 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3109 "addi 1,1,-128\n\t" /* expand stack frame */ \
3112 "std 3,120(1)\n\t" \
3115 "std 3,112(1)\n\t" \
3117 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3118 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3119 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3120 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3121 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3122 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3123 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3124 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3125 "ld 11, 0(11)\n\t" /* target->r11 */ \
3126 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3129 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3130 VALGRIND_RESTORE_STACK \
3131 : /*out*/ "=r" (_res) \
3132 : /*in*/ "r" (&_argvec[2]) \
3133 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3135 lval = (__typeof__(lval)) _res; \
3138 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3139 arg7,arg8,arg9,arg10,arg11) \
3141 volatile OrigFn _orig = (orig); \
3142 volatile unsigned long _argvec[3+11]; \
3143 volatile unsigned long _res; \
3144 /* _argvec[0] holds current r2 across the call */ \
3145 _argvec[1] = (unsigned long)_orig.r2; \
3146 _argvec[2] = (unsigned long)_orig.nraddr; \
3147 _argvec[2+1] = (unsigned long)arg1; \
3148 _argvec[2+2] = (unsigned long)arg2; \
3149 _argvec[2+3] = (unsigned long)arg3; \
3150 _argvec[2+4] = (unsigned long)arg4; \
3151 _argvec[2+5] = (unsigned long)arg5; \
3152 _argvec[2+6] = (unsigned long)arg6; \
3153 _argvec[2+7] = (unsigned long)arg7; \
3154 _argvec[2+8] = (unsigned long)arg8; \
3155 _argvec[2+9] = (unsigned long)arg9; \
3156 _argvec[2+10] = (unsigned long)arg10; \
3157 _argvec[2+11] = (unsigned long)arg11; \
3159 VALGRIND_ALIGN_STACK \
3161 "std 2,-16(11)\n\t" /* save tocptr */ \
3162 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3163 "addi 1,1,-144\n\t" /* expand stack frame */ \
3166 "std 3,128(1)\n\t" \
3169 "std 3,120(1)\n\t" \
3172 "std 3,112(1)\n\t" \
3174 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3175 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3176 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3177 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3178 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3179 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3180 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3181 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3182 "ld 11, 0(11)\n\t" /* target->r11 */ \
3183 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3186 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3187 VALGRIND_RESTORE_STACK \
3188 : /*out*/ "=r" (_res) \
3189 : /*in*/ "r" (&_argvec[2]) \
3190 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3192 lval = (__typeof__(lval)) _res; \
3195 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3196 arg7,arg8,arg9,arg10,arg11,arg12) \
3198 volatile OrigFn _orig = (orig); \
3199 volatile unsigned long _argvec[3+12]; \
3200 volatile unsigned long _res; \
3201 /* _argvec[0] holds current r2 across the call */ \
3202 _argvec[1] = (unsigned long)_orig.r2; \
3203 _argvec[2] = (unsigned long)_orig.nraddr; \
3204 _argvec[2+1] = (unsigned long)arg1; \
3205 _argvec[2+2] = (unsigned long)arg2; \
3206 _argvec[2+3] = (unsigned long)arg3; \
3207 _argvec[2+4] = (unsigned long)arg4; \
3208 _argvec[2+5] = (unsigned long)arg5; \
3209 _argvec[2+6] = (unsigned long)arg6; \
3210 _argvec[2+7] = (unsigned long)arg7; \
3211 _argvec[2+8] = (unsigned long)arg8; \
3212 _argvec[2+9] = (unsigned long)arg9; \
3213 _argvec[2+10] = (unsigned long)arg10; \
3214 _argvec[2+11] = (unsigned long)arg11; \
3215 _argvec[2+12] = (unsigned long)arg12; \
3217 VALGRIND_ALIGN_STACK \
3219 "std 2,-16(11)\n\t" /* save tocptr */ \
3220 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3221 "addi 1,1,-144\n\t" /* expand stack frame */ \
3224 "std 3,136(1)\n\t" \
3227 "std 3,128(1)\n\t" \
3230 "std 3,120(1)\n\t" \
3233 "std 3,112(1)\n\t" \
3235 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3236 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3237 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3238 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3239 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3240 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3241 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3242 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3243 "ld 11, 0(11)\n\t" /* target->r11 */ \
3244 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3247 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3248 VALGRIND_RESTORE_STACK \
3249 : /*out*/ "=r" (_res) \
3250 : /*in*/ "r" (&_argvec[2]) \
3251 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3253 lval = (__typeof__(lval)) _res; \
3256 #endif /* PLAT_ppc64be_linux */
3258 /* ------------------------- ppc64le-linux ----------------------- */
3259 #if defined(PLAT_ppc64le_linux)
3261 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
3263 /* These regs are trashed by the hidden call. */
3264 #define __CALLER_SAVED_REGS \
3265 "lr", "ctr", "xer", \
3266 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
3267 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
3270 /* Macros to save and align the stack before making a function
3271 call and restore it afterwards as gcc may not keep the stack
3272 pointer aligned if it doesn't realise calls are being made
3273 to other functions. */
3275 #define VALGRIND_ALIGN_STACK \
3277 "rldicr 1,1,0,59\n\t"
3278 #define VALGRIND_RESTORE_STACK \
3281 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
3284 #define CALL_FN_W_v(lval, orig) \
3286 volatile OrigFn _orig = (orig); \
3287 volatile unsigned long _argvec[3+0]; \
3288 volatile unsigned long _res; \
3289 /* _argvec[0] holds current r2 across the call */ \
3290 _argvec[1] = (unsigned long)_orig.r2; \
3291 _argvec[2] = (unsigned long)_orig.nraddr; \
3293 VALGRIND_ALIGN_STACK \
3295 "std 2,-16(12)\n\t" /* save tocptr */ \
3296 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3297 "ld 12, 0(12)\n\t" /* target->r12 */ \
3298 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3301 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3302 VALGRIND_RESTORE_STACK \
3303 : /*out*/ "=r" (_res) \
3304 : /*in*/ "r" (&_argvec[2]) \
3305 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3307 lval = (__typeof__(lval)) _res; \
3310 #define CALL_FN_W_W(lval, orig, arg1) \
3312 volatile OrigFn _orig = (orig); \
3313 volatile unsigned long _argvec[3+1]; \
3314 volatile unsigned long _res; \
3315 /* _argvec[0] holds current r2 across the call */ \
3316 _argvec[1] = (unsigned long)_orig.r2; \
3317 _argvec[2] = (unsigned long)_orig.nraddr; \
3318 _argvec[2+1] = (unsigned long)arg1; \
3320 VALGRIND_ALIGN_STACK \
3322 "std 2,-16(12)\n\t" /* save tocptr */ \
3323 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3324 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3325 "ld 12, 0(12)\n\t" /* target->r12 */ \
3326 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3329 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3330 VALGRIND_RESTORE_STACK \
3331 : /*out*/ "=r" (_res) \
3332 : /*in*/ "r" (&_argvec[2]) \
3333 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3335 lval = (__typeof__(lval)) _res; \
3338 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3340 volatile OrigFn _orig = (orig); \
3341 volatile unsigned long _argvec[3+2]; \
3342 volatile unsigned long _res; \
3343 /* _argvec[0] holds current r2 across the call */ \
3344 _argvec[1] = (unsigned long)_orig.r2; \
3345 _argvec[2] = (unsigned long)_orig.nraddr; \
3346 _argvec[2+1] = (unsigned long)arg1; \
3347 _argvec[2+2] = (unsigned long)arg2; \
3349 VALGRIND_ALIGN_STACK \
3351 "std 2,-16(12)\n\t" /* save tocptr */ \
3352 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3353 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3354 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3355 "ld 12, 0(12)\n\t" /* target->r12 */ \
3356 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3359 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3360 VALGRIND_RESTORE_STACK \
3361 : /*out*/ "=r" (_res) \
3362 : /*in*/ "r" (&_argvec[2]) \
3363 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3365 lval = (__typeof__(lval)) _res; \
3368 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3370 volatile OrigFn _orig = (orig); \
3371 volatile unsigned long _argvec[3+3]; \
3372 volatile unsigned long _res; \
3373 /* _argvec[0] holds current r2 across the call */ \
3374 _argvec[1] = (unsigned long)_orig.r2; \
3375 _argvec[2] = (unsigned long)_orig.nraddr; \
3376 _argvec[2+1] = (unsigned long)arg1; \
3377 _argvec[2+2] = (unsigned long)arg2; \
3378 _argvec[2+3] = (unsigned long)arg3; \
3380 VALGRIND_ALIGN_STACK \
3382 "std 2,-16(12)\n\t" /* save tocptr */ \
3383 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3384 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3385 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3386 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3387 "ld 12, 0(12)\n\t" /* target->r12 */ \
3388 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3391 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3392 VALGRIND_RESTORE_STACK \
3393 : /*out*/ "=r" (_res) \
3394 : /*in*/ "r" (&_argvec[2]) \
3395 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3397 lval = (__typeof__(lval)) _res; \
3400 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3402 volatile OrigFn _orig = (orig); \
3403 volatile unsigned long _argvec[3+4]; \
3404 volatile unsigned long _res; \
3405 /* _argvec[0] holds current r2 across the call */ \
3406 _argvec[1] = (unsigned long)_orig.r2; \
3407 _argvec[2] = (unsigned long)_orig.nraddr; \
3408 _argvec[2+1] = (unsigned long)arg1; \
3409 _argvec[2+2] = (unsigned long)arg2; \
3410 _argvec[2+3] = (unsigned long)arg3; \
3411 _argvec[2+4] = (unsigned long)arg4; \
3413 VALGRIND_ALIGN_STACK \
3415 "std 2,-16(12)\n\t" /* save tocptr */ \
3416 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3417 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3418 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3419 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3420 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3421 "ld 12, 0(12)\n\t" /* target->r12 */ \
3422 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3425 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3426 VALGRIND_RESTORE_STACK \
3427 : /*out*/ "=r" (_res) \
3428 : /*in*/ "r" (&_argvec[2]) \
3429 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3431 lval = (__typeof__(lval)) _res; \
3434 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3436 volatile OrigFn _orig = (orig); \
3437 volatile unsigned long _argvec[3+5]; \
3438 volatile unsigned long _res; \
3439 /* _argvec[0] holds current r2 across the call */ \
3440 _argvec[1] = (unsigned long)_orig.r2; \
3441 _argvec[2] = (unsigned long)_orig.nraddr; \
3442 _argvec[2+1] = (unsigned long)arg1; \
3443 _argvec[2+2] = (unsigned long)arg2; \
3444 _argvec[2+3] = (unsigned long)arg3; \
3445 _argvec[2+4] = (unsigned long)arg4; \
3446 _argvec[2+5] = (unsigned long)arg5; \
3448 VALGRIND_ALIGN_STACK \
3450 "std 2,-16(12)\n\t" /* save tocptr */ \
3451 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3452 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3453 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3454 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3455 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3456 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3457 "ld 12, 0(12)\n\t" /* target->r12 */ \
3458 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3461 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3462 VALGRIND_RESTORE_STACK \
3463 : /*out*/ "=r" (_res) \
3464 : /*in*/ "r" (&_argvec[2]) \
3465 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3467 lval = (__typeof__(lval)) _res; \
3470 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3472 volatile OrigFn _orig = (orig); \
3473 volatile unsigned long _argvec[3+6]; \
3474 volatile unsigned long _res; \
3475 /* _argvec[0] holds current r2 across the call */ \
3476 _argvec[1] = (unsigned long)_orig.r2; \
3477 _argvec[2] = (unsigned long)_orig.nraddr; \
3478 _argvec[2+1] = (unsigned long)arg1; \
3479 _argvec[2+2] = (unsigned long)arg2; \
3480 _argvec[2+3] = (unsigned long)arg3; \
3481 _argvec[2+4] = (unsigned long)arg4; \
3482 _argvec[2+5] = (unsigned long)arg5; \
3483 _argvec[2+6] = (unsigned long)arg6; \
3485 VALGRIND_ALIGN_STACK \
3487 "std 2,-16(12)\n\t" /* save tocptr */ \
3488 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3489 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3490 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3491 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3492 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3493 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3494 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3495 "ld 12, 0(12)\n\t" /* target->r12 */ \
3496 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3499 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3500 VALGRIND_RESTORE_STACK \
3501 : /*out*/ "=r" (_res) \
3502 : /*in*/ "r" (&_argvec[2]) \
3503 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3505 lval = (__typeof__(lval)) _res; \
3508 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3511 volatile OrigFn _orig = (orig); \
3512 volatile unsigned long _argvec[3+7]; \
3513 volatile unsigned long _res; \
3514 /* _argvec[0] holds current r2 across the call */ \
3515 _argvec[1] = (unsigned long)_orig.r2; \
3516 _argvec[2] = (unsigned long)_orig.nraddr; \
3517 _argvec[2+1] = (unsigned long)arg1; \
3518 _argvec[2+2] = (unsigned long)arg2; \
3519 _argvec[2+3] = (unsigned long)arg3; \
3520 _argvec[2+4] = (unsigned long)arg4; \
3521 _argvec[2+5] = (unsigned long)arg5; \
3522 _argvec[2+6] = (unsigned long)arg6; \
3523 _argvec[2+7] = (unsigned long)arg7; \
3525 VALGRIND_ALIGN_STACK \
3527 "std 2,-16(12)\n\t" /* save tocptr */ \
3528 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3529 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3530 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3531 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3532 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3533 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3534 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3535 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3536 "ld 12, 0(12)\n\t" /* target->r12 */ \
3537 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3540 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3541 VALGRIND_RESTORE_STACK \
3542 : /*out*/ "=r" (_res) \
3543 : /*in*/ "r" (&_argvec[2]) \
3544 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3546 lval = (__typeof__(lval)) _res; \
3549 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3552 volatile OrigFn _orig = (orig); \
3553 volatile unsigned long _argvec[3+8]; \
3554 volatile unsigned long _res; \
3555 /* _argvec[0] holds current r2 across the call */ \
3556 _argvec[1] = (unsigned long)_orig.r2; \
3557 _argvec[2] = (unsigned long)_orig.nraddr; \
3558 _argvec[2+1] = (unsigned long)arg1; \
3559 _argvec[2+2] = (unsigned long)arg2; \
3560 _argvec[2+3] = (unsigned long)arg3; \
3561 _argvec[2+4] = (unsigned long)arg4; \
3562 _argvec[2+5] = (unsigned long)arg5; \
3563 _argvec[2+6] = (unsigned long)arg6; \
3564 _argvec[2+7] = (unsigned long)arg7; \
3565 _argvec[2+8] = (unsigned long)arg8; \
3567 VALGRIND_ALIGN_STACK \
3569 "std 2,-16(12)\n\t" /* save tocptr */ \
3570 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3571 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3572 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3573 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3574 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3575 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3576 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3577 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3578 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3579 "ld 12, 0(12)\n\t" /* target->r12 */ \
3580 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3583 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3584 VALGRIND_RESTORE_STACK \
3585 : /*out*/ "=r" (_res) \
3586 : /*in*/ "r" (&_argvec[2]) \
3587 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3589 lval = (__typeof__(lval)) _res; \
3592 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3595 volatile OrigFn _orig = (orig); \
3596 volatile unsigned long _argvec[3+9]; \
3597 volatile unsigned long _res; \
3598 /* _argvec[0] holds current r2 across the call */ \
3599 _argvec[1] = (unsigned long)_orig.r2; \
3600 _argvec[2] = (unsigned long)_orig.nraddr; \
3601 _argvec[2+1] = (unsigned long)arg1; \
3602 _argvec[2+2] = (unsigned long)arg2; \
3603 _argvec[2+3] = (unsigned long)arg3; \
3604 _argvec[2+4] = (unsigned long)arg4; \
3605 _argvec[2+5] = (unsigned long)arg5; \
3606 _argvec[2+6] = (unsigned long)arg6; \
3607 _argvec[2+7] = (unsigned long)arg7; \
3608 _argvec[2+8] = (unsigned long)arg8; \
3609 _argvec[2+9] = (unsigned long)arg9; \
3611 VALGRIND_ALIGN_STACK \
3613 "std 2,-16(12)\n\t" /* save tocptr */ \
3614 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3615 "addi 1,1,-128\n\t" /* expand stack frame */ \
3620 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3621 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3622 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3623 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3624 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3625 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3626 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3627 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3628 "ld 12, 0(12)\n\t" /* target->r12 */ \
3629 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3632 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3633 VALGRIND_RESTORE_STACK \
3634 : /*out*/ "=r" (_res) \
3635 : /*in*/ "r" (&_argvec[2]) \
3636 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3638 lval = (__typeof__(lval)) _res; \
3641 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3642 arg7,arg8,arg9,arg10) \
3644 volatile OrigFn _orig = (orig); \
3645 volatile unsigned long _argvec[3+10]; \
3646 volatile unsigned long _res; \
3647 /* _argvec[0] holds current r2 across the call */ \
3648 _argvec[1] = (unsigned long)_orig.r2; \
3649 _argvec[2] = (unsigned long)_orig.nraddr; \
3650 _argvec[2+1] = (unsigned long)arg1; \
3651 _argvec[2+2] = (unsigned long)arg2; \
3652 _argvec[2+3] = (unsigned long)arg3; \
3653 _argvec[2+4] = (unsigned long)arg4; \
3654 _argvec[2+5] = (unsigned long)arg5; \
3655 _argvec[2+6] = (unsigned long)arg6; \
3656 _argvec[2+7] = (unsigned long)arg7; \
3657 _argvec[2+8] = (unsigned long)arg8; \
3658 _argvec[2+9] = (unsigned long)arg9; \
3659 _argvec[2+10] = (unsigned long)arg10; \
3661 VALGRIND_ALIGN_STACK \
3663 "std 2,-16(12)\n\t" /* save tocptr */ \
3664 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3665 "addi 1,1,-128\n\t" /* expand stack frame */ \
3668 "std 3,104(1)\n\t" \
3673 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3674 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3675 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3676 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3677 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3678 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3679 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3680 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3681 "ld 12, 0(12)\n\t" /* target->r12 */ \
3682 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3685 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3686 VALGRIND_RESTORE_STACK \
3687 : /*out*/ "=r" (_res) \
3688 : /*in*/ "r" (&_argvec[2]) \
3689 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3691 lval = (__typeof__(lval)) _res; \
3694 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3695 arg7,arg8,arg9,arg10,arg11) \
3697 volatile OrigFn _orig = (orig); \
3698 volatile unsigned long _argvec[3+11]; \
3699 volatile unsigned long _res; \
3700 /* _argvec[0] holds current r2 across the call */ \
3701 _argvec[1] = (unsigned long)_orig.r2; \
3702 _argvec[2] = (unsigned long)_orig.nraddr; \
3703 _argvec[2+1] = (unsigned long)arg1; \
3704 _argvec[2+2] = (unsigned long)arg2; \
3705 _argvec[2+3] = (unsigned long)arg3; \
3706 _argvec[2+4] = (unsigned long)arg4; \
3707 _argvec[2+5] = (unsigned long)arg5; \
3708 _argvec[2+6] = (unsigned long)arg6; \
3709 _argvec[2+7] = (unsigned long)arg7; \
3710 _argvec[2+8] = (unsigned long)arg8; \
3711 _argvec[2+9] = (unsigned long)arg9; \
3712 _argvec[2+10] = (unsigned long)arg10; \
3713 _argvec[2+11] = (unsigned long)arg11; \
3715 VALGRIND_ALIGN_STACK \
3717 "std 2,-16(12)\n\t" /* save tocptr */ \
3718 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3719 "addi 1,1,-144\n\t" /* expand stack frame */ \
3722 "std 3,112(1)\n\t" \
3725 "std 3,104(1)\n\t" \
3730 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3731 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3732 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3733 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3734 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3735 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3736 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3737 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3738 "ld 12, 0(12)\n\t" /* target->r12 */ \
3739 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3742 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3743 VALGRIND_RESTORE_STACK \
3744 : /*out*/ "=r" (_res) \
3745 : /*in*/ "r" (&_argvec[2]) \
3746 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3748 lval = (__typeof__(lval)) _res; \
3751 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3752 arg7,arg8,arg9,arg10,arg11,arg12) \
3754 volatile OrigFn _orig = (orig); \
3755 volatile unsigned long _argvec[3+12]; \
3756 volatile unsigned long _res; \
3757 /* _argvec[0] holds current r2 across the call */ \
3758 _argvec[1] = (unsigned long)_orig.r2; \
3759 _argvec[2] = (unsigned long)_orig.nraddr; \
3760 _argvec[2+1] = (unsigned long)arg1; \
3761 _argvec[2+2] = (unsigned long)arg2; \
3762 _argvec[2+3] = (unsigned long)arg3; \
3763 _argvec[2+4] = (unsigned long)arg4; \
3764 _argvec[2+5] = (unsigned long)arg5; \
3765 _argvec[2+6] = (unsigned long)arg6; \
3766 _argvec[2+7] = (unsigned long)arg7; \
3767 _argvec[2+8] = (unsigned long)arg8; \
3768 _argvec[2+9] = (unsigned long)arg9; \
3769 _argvec[2+10] = (unsigned long)arg10; \
3770 _argvec[2+11] = (unsigned long)arg11; \
3771 _argvec[2+12] = (unsigned long)arg12; \
3773 VALGRIND_ALIGN_STACK \
3775 "std 2,-16(12)\n\t" /* save tocptr */ \
3776 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3777 "addi 1,1,-144\n\t" /* expand stack frame */ \
3780 "std 3,120(1)\n\t" \
3783 "std 3,112(1)\n\t" \
3786 "std 3,104(1)\n\t" \
3791 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3792 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3793 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3794 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3795 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3796 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3797 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3798 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3799 "ld 12, 0(12)\n\t" /* target->r12 */ \
3800 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3803 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3804 VALGRIND_RESTORE_STACK \
3805 : /*out*/ "=r" (_res) \
3806 : /*in*/ "r" (&_argvec[2]) \
3807 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3809 lval = (__typeof__(lval)) _res; \
3812 #endif /* PLAT_ppc64le_linux */
3814 /* ------------------------- arm-linux ------------------------- */
3816 #if defined(PLAT_arm_linux)
3818 /* These regs are trashed by the hidden call. */
3819 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4", "r12", "r14"
3821 /* Macros to save and align the stack before making a function
3822 call and restore it afterwards as gcc may not keep the stack
3823 pointer aligned if it doesn't realise calls are being made
3824 to other functions. */
3826 /* This is a bit tricky. We store the original stack pointer in r10
3827 as it is callee-saves. gcc doesn't allow the use of r11 for some
3828 reason. Also, we can't directly "bic" the stack pointer in thumb
3829 mode since r13 isn't an allowed register number in that context.
3830 So use r4 as a temporary, since that is about to get trashed
3831 anyway, just after each use of this macro. Side effect is we need
3832 to be very careful about any future changes, since
3833 VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
3834 #define VALGRIND_ALIGN_STACK \
3837 "bic r4, r4, #7\n\t" \
3839 #define VALGRIND_RESTORE_STACK \
3842 /* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
3845 #define CALL_FN_W_v(lval, orig) \
3847 volatile OrigFn _orig = (orig); \
3848 volatile unsigned long _argvec[1]; \
3849 volatile unsigned long _res; \
3850 _argvec[0] = (unsigned long)_orig.nraddr; \
3852 VALGRIND_ALIGN_STACK \
3853 "ldr r4, [%1] \n\t" /* target->r4 */ \
3854 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3855 VALGRIND_RESTORE_STACK \
3857 : /*out*/ "=r" (_res) \
3858 : /*in*/ "0" (&_argvec[0]) \
3859 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3861 lval = (__typeof__(lval)) _res; \
3864 #define CALL_FN_W_W(lval, orig, arg1) \
3866 volatile OrigFn _orig = (orig); \
3867 volatile unsigned long _argvec[2]; \
3868 volatile unsigned long _res; \
3869 _argvec[0] = (unsigned long)_orig.nraddr; \
3870 _argvec[1] = (unsigned long)(arg1); \
3872 VALGRIND_ALIGN_STACK \
3873 "ldr r0, [%1, #4] \n\t" \
3874 "ldr r4, [%1] \n\t" /* target->r4 */ \
3875 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3876 VALGRIND_RESTORE_STACK \
3878 : /*out*/ "=r" (_res) \
3879 : /*in*/ "0" (&_argvec[0]) \
3880 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3882 lval = (__typeof__(lval)) _res; \
3885 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3887 volatile OrigFn _orig = (orig); \
3888 volatile unsigned long _argvec[3]; \
3889 volatile unsigned long _res; \
3890 _argvec[0] = (unsigned long)_orig.nraddr; \
3891 _argvec[1] = (unsigned long)(arg1); \
3892 _argvec[2] = (unsigned long)(arg2); \
3894 VALGRIND_ALIGN_STACK \
3895 "ldr r0, [%1, #4] \n\t" \
3896 "ldr r1, [%1, #8] \n\t" \
3897 "ldr r4, [%1] \n\t" /* target->r4 */ \
3898 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3899 VALGRIND_RESTORE_STACK \
3901 : /*out*/ "=r" (_res) \
3902 : /*in*/ "0" (&_argvec[0]) \
3903 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3905 lval = (__typeof__(lval)) _res; \
3908 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3910 volatile OrigFn _orig = (orig); \
3911 volatile unsigned long _argvec[4]; \
3912 volatile unsigned long _res; \
3913 _argvec[0] = (unsigned long)_orig.nraddr; \
3914 _argvec[1] = (unsigned long)(arg1); \
3915 _argvec[2] = (unsigned long)(arg2); \
3916 _argvec[3] = (unsigned long)(arg3); \
3918 VALGRIND_ALIGN_STACK \
3919 "ldr r0, [%1, #4] \n\t" \
3920 "ldr r1, [%1, #8] \n\t" \
3921 "ldr r2, [%1, #12] \n\t" \
3922 "ldr r4, [%1] \n\t" /* target->r4 */ \
3923 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3924 VALGRIND_RESTORE_STACK \
3926 : /*out*/ "=r" (_res) \
3927 : /*in*/ "0" (&_argvec[0]) \
3928 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3930 lval = (__typeof__(lval)) _res; \
3933 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3935 volatile OrigFn _orig = (orig); \
3936 volatile unsigned long _argvec[5]; \
3937 volatile unsigned long _res; \
3938 _argvec[0] = (unsigned long)_orig.nraddr; \
3939 _argvec[1] = (unsigned long)(arg1); \
3940 _argvec[2] = (unsigned long)(arg2); \
3941 _argvec[3] = (unsigned long)(arg3); \
3942 _argvec[4] = (unsigned long)(arg4); \
3944 VALGRIND_ALIGN_STACK \
3945 "ldr r0, [%1, #4] \n\t" \
3946 "ldr r1, [%1, #8] \n\t" \
3947 "ldr r2, [%1, #12] \n\t" \
3948 "ldr r3, [%1, #16] \n\t" \
3949 "ldr r4, [%1] \n\t" /* target->r4 */ \
3950 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3951 VALGRIND_RESTORE_STACK \
3953 : /*out*/ "=r" (_res) \
3954 : /*in*/ "0" (&_argvec[0]) \
3955 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3957 lval = (__typeof__(lval)) _res; \
3960 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3962 volatile OrigFn _orig = (orig); \
3963 volatile unsigned long _argvec[6]; \
3964 volatile unsigned long _res; \
3965 _argvec[0] = (unsigned long)_orig.nraddr; \
3966 _argvec[1] = (unsigned long)(arg1); \
3967 _argvec[2] = (unsigned long)(arg2); \
3968 _argvec[3] = (unsigned long)(arg3); \
3969 _argvec[4] = (unsigned long)(arg4); \
3970 _argvec[5] = (unsigned long)(arg5); \
3972 VALGRIND_ALIGN_STACK \
3973 "sub sp, sp, #4 \n\t" \
3974 "ldr r0, [%1, #20] \n\t" \
3976 "ldr r0, [%1, #4] \n\t" \
3977 "ldr r1, [%1, #8] \n\t" \
3978 "ldr r2, [%1, #12] \n\t" \
3979 "ldr r3, [%1, #16] \n\t" \
3980 "ldr r4, [%1] \n\t" /* target->r4 */ \
3981 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3982 VALGRIND_RESTORE_STACK \
3984 : /*out*/ "=r" (_res) \
3985 : /*in*/ "0" (&_argvec[0]) \
3986 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3988 lval = (__typeof__(lval)) _res; \
3991 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3993 volatile OrigFn _orig = (orig); \
3994 volatile unsigned long _argvec[7]; \
3995 volatile unsigned long _res; \
3996 _argvec[0] = (unsigned long)_orig.nraddr; \
3997 _argvec[1] = (unsigned long)(arg1); \
3998 _argvec[2] = (unsigned long)(arg2); \
3999 _argvec[3] = (unsigned long)(arg3); \
4000 _argvec[4] = (unsigned long)(arg4); \
4001 _argvec[5] = (unsigned long)(arg5); \
4002 _argvec[6] = (unsigned long)(arg6); \
4004 VALGRIND_ALIGN_STACK \
4005 "ldr r0, [%1, #20] \n\t" \
4006 "ldr r1, [%1, #24] \n\t" \
4007 "push {r0, r1} \n\t" \
4008 "ldr r0, [%1, #4] \n\t" \
4009 "ldr r1, [%1, #8] \n\t" \
4010 "ldr r2, [%1, #12] \n\t" \
4011 "ldr r3, [%1, #16] \n\t" \
4012 "ldr r4, [%1] \n\t" /* target->r4 */ \
4013 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4014 VALGRIND_RESTORE_STACK \
4016 : /*out*/ "=r" (_res) \
4017 : /*in*/ "0" (&_argvec[0]) \
4018 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4020 lval = (__typeof__(lval)) _res; \
4023 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4026 volatile OrigFn _orig = (orig); \
4027 volatile unsigned long _argvec[8]; \
4028 volatile unsigned long _res; \
4029 _argvec[0] = (unsigned long)_orig.nraddr; \
4030 _argvec[1] = (unsigned long)(arg1); \
4031 _argvec[2] = (unsigned long)(arg2); \
4032 _argvec[3] = (unsigned long)(arg3); \
4033 _argvec[4] = (unsigned long)(arg4); \
4034 _argvec[5] = (unsigned long)(arg5); \
4035 _argvec[6] = (unsigned long)(arg6); \
4036 _argvec[7] = (unsigned long)(arg7); \
4038 VALGRIND_ALIGN_STACK \
4039 "sub sp, sp, #4 \n\t" \
4040 "ldr r0, [%1, #20] \n\t" \
4041 "ldr r1, [%1, #24] \n\t" \
4042 "ldr r2, [%1, #28] \n\t" \
4043 "push {r0, r1, r2} \n\t" \
4044 "ldr r0, [%1, #4] \n\t" \
4045 "ldr r1, [%1, #8] \n\t" \
4046 "ldr r2, [%1, #12] \n\t" \
4047 "ldr r3, [%1, #16] \n\t" \
4048 "ldr r4, [%1] \n\t" /* target->r4 */ \
4049 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4050 VALGRIND_RESTORE_STACK \
4052 : /*out*/ "=r" (_res) \
4053 : /*in*/ "0" (&_argvec[0]) \
4054 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4056 lval = (__typeof__(lval)) _res; \
4059 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4062 volatile OrigFn _orig = (orig); \
4063 volatile unsigned long _argvec[9]; \
4064 volatile unsigned long _res; \
4065 _argvec[0] = (unsigned long)_orig.nraddr; \
4066 _argvec[1] = (unsigned long)(arg1); \
4067 _argvec[2] = (unsigned long)(arg2); \
4068 _argvec[3] = (unsigned long)(arg3); \
4069 _argvec[4] = (unsigned long)(arg4); \
4070 _argvec[5] = (unsigned long)(arg5); \
4071 _argvec[6] = (unsigned long)(arg6); \
4072 _argvec[7] = (unsigned long)(arg7); \
4073 _argvec[8] = (unsigned long)(arg8); \
4075 VALGRIND_ALIGN_STACK \
4076 "ldr r0, [%1, #20] \n\t" \
4077 "ldr r1, [%1, #24] \n\t" \
4078 "ldr r2, [%1, #28] \n\t" \
4079 "ldr r3, [%1, #32] \n\t" \
4080 "push {r0, r1, r2, r3} \n\t" \
4081 "ldr r0, [%1, #4] \n\t" \
4082 "ldr r1, [%1, #8] \n\t" \
4083 "ldr r2, [%1, #12] \n\t" \
4084 "ldr r3, [%1, #16] \n\t" \
4085 "ldr r4, [%1] \n\t" /* target->r4 */ \
4086 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4087 VALGRIND_RESTORE_STACK \
4089 : /*out*/ "=r" (_res) \
4090 : /*in*/ "0" (&_argvec[0]) \
4091 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4093 lval = (__typeof__(lval)) _res; \
4096 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4099 volatile OrigFn _orig = (orig); \
4100 volatile unsigned long _argvec[10]; \
4101 volatile unsigned long _res; \
4102 _argvec[0] = (unsigned long)_orig.nraddr; \
4103 _argvec[1] = (unsigned long)(arg1); \
4104 _argvec[2] = (unsigned long)(arg2); \
4105 _argvec[3] = (unsigned long)(arg3); \
4106 _argvec[4] = (unsigned long)(arg4); \
4107 _argvec[5] = (unsigned long)(arg5); \
4108 _argvec[6] = (unsigned long)(arg6); \
4109 _argvec[7] = (unsigned long)(arg7); \
4110 _argvec[8] = (unsigned long)(arg8); \
4111 _argvec[9] = (unsigned long)(arg9); \
4113 VALGRIND_ALIGN_STACK \
4114 "sub sp, sp, #4 \n\t" \
4115 "ldr r0, [%1, #20] \n\t" \
4116 "ldr r1, [%1, #24] \n\t" \
4117 "ldr r2, [%1, #28] \n\t" \
4118 "ldr r3, [%1, #32] \n\t" \
4119 "ldr r4, [%1, #36] \n\t" \
4120 "push {r0, r1, r2, r3, r4} \n\t" \
4121 "ldr r0, [%1, #4] \n\t" \
4122 "ldr r1, [%1, #8] \n\t" \
4123 "ldr r2, [%1, #12] \n\t" \
4124 "ldr r3, [%1, #16] \n\t" \
4125 "ldr r4, [%1] \n\t" /* target->r4 */ \
4126 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4127 VALGRIND_RESTORE_STACK \
4129 : /*out*/ "=r" (_res) \
4130 : /*in*/ "0" (&_argvec[0]) \
4131 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4133 lval = (__typeof__(lval)) _res; \
4136 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4137 arg7,arg8,arg9,arg10) \
4139 volatile OrigFn _orig = (orig); \
4140 volatile unsigned long _argvec[11]; \
4141 volatile unsigned long _res; \
4142 _argvec[0] = (unsigned long)_orig.nraddr; \
4143 _argvec[1] = (unsigned long)(arg1); \
4144 _argvec[2] = (unsigned long)(arg2); \
4145 _argvec[3] = (unsigned long)(arg3); \
4146 _argvec[4] = (unsigned long)(arg4); \
4147 _argvec[5] = (unsigned long)(arg5); \
4148 _argvec[6] = (unsigned long)(arg6); \
4149 _argvec[7] = (unsigned long)(arg7); \
4150 _argvec[8] = (unsigned long)(arg8); \
4151 _argvec[9] = (unsigned long)(arg9); \
4152 _argvec[10] = (unsigned long)(arg10); \
4154 VALGRIND_ALIGN_STACK \
4155 "ldr r0, [%1, #40] \n\t" \
4157 "ldr r0, [%1, #20] \n\t" \
4158 "ldr r1, [%1, #24] \n\t" \
4159 "ldr r2, [%1, #28] \n\t" \
4160 "ldr r3, [%1, #32] \n\t" \
4161 "ldr r4, [%1, #36] \n\t" \
4162 "push {r0, r1, r2, r3, r4} \n\t" \
4163 "ldr r0, [%1, #4] \n\t" \
4164 "ldr r1, [%1, #8] \n\t" \
4165 "ldr r2, [%1, #12] \n\t" \
4166 "ldr r3, [%1, #16] \n\t" \
4167 "ldr r4, [%1] \n\t" /* target->r4 */ \
4168 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4169 VALGRIND_RESTORE_STACK \
4171 : /*out*/ "=r" (_res) \
4172 : /*in*/ "0" (&_argvec[0]) \
4173 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4175 lval = (__typeof__(lval)) _res; \
4178 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4179 arg6,arg7,arg8,arg9,arg10, \
4182 volatile OrigFn _orig = (orig); \
4183 volatile unsigned long _argvec[12]; \
4184 volatile unsigned long _res; \
4185 _argvec[0] = (unsigned long)_orig.nraddr; \
4186 _argvec[1] = (unsigned long)(arg1); \
4187 _argvec[2] = (unsigned long)(arg2); \
4188 _argvec[3] = (unsigned long)(arg3); \
4189 _argvec[4] = (unsigned long)(arg4); \
4190 _argvec[5] = (unsigned long)(arg5); \
4191 _argvec[6] = (unsigned long)(arg6); \
4192 _argvec[7] = (unsigned long)(arg7); \
4193 _argvec[8] = (unsigned long)(arg8); \
4194 _argvec[9] = (unsigned long)(arg9); \
4195 _argvec[10] = (unsigned long)(arg10); \
4196 _argvec[11] = (unsigned long)(arg11); \
4198 VALGRIND_ALIGN_STACK \
4199 "sub sp, sp, #4 \n\t" \
4200 "ldr r0, [%1, #40] \n\t" \
4201 "ldr r1, [%1, #44] \n\t" \
4202 "push {r0, r1} \n\t" \
4203 "ldr r0, [%1, #20] \n\t" \
4204 "ldr r1, [%1, #24] \n\t" \
4205 "ldr r2, [%1, #28] \n\t" \
4206 "ldr r3, [%1, #32] \n\t" \
4207 "ldr r4, [%1, #36] \n\t" \
4208 "push {r0, r1, r2, r3, r4} \n\t" \
4209 "ldr r0, [%1, #4] \n\t" \
4210 "ldr r1, [%1, #8] \n\t" \
4211 "ldr r2, [%1, #12] \n\t" \
4212 "ldr r3, [%1, #16] \n\t" \
4213 "ldr r4, [%1] \n\t" /* target->r4 */ \
4214 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4215 VALGRIND_RESTORE_STACK \
4217 : /*out*/ "=r" (_res) \
4218 : /*in*/ "0" (&_argvec[0]) \
4219 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4221 lval = (__typeof__(lval)) _res; \
4224 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4225 arg6,arg7,arg8,arg9,arg10, \
4228 volatile OrigFn _orig = (orig); \
4229 volatile unsigned long _argvec[13]; \
4230 volatile unsigned long _res; \
4231 _argvec[0] = (unsigned long)_orig.nraddr; \
4232 _argvec[1] = (unsigned long)(arg1); \
4233 _argvec[2] = (unsigned long)(arg2); \
4234 _argvec[3] = (unsigned long)(arg3); \
4235 _argvec[4] = (unsigned long)(arg4); \
4236 _argvec[5] = (unsigned long)(arg5); \
4237 _argvec[6] = (unsigned long)(arg6); \
4238 _argvec[7] = (unsigned long)(arg7); \
4239 _argvec[8] = (unsigned long)(arg8); \
4240 _argvec[9] = (unsigned long)(arg9); \
4241 _argvec[10] = (unsigned long)(arg10); \
4242 _argvec[11] = (unsigned long)(arg11); \
4243 _argvec[12] = (unsigned long)(arg12); \
4245 VALGRIND_ALIGN_STACK \
4246 "ldr r0, [%1, #40] \n\t" \
4247 "ldr r1, [%1, #44] \n\t" \
4248 "ldr r2, [%1, #48] \n\t" \
4249 "push {r0, r1, r2} \n\t" \
4250 "ldr r0, [%1, #20] \n\t" \
4251 "ldr r1, [%1, #24] \n\t" \
4252 "ldr r2, [%1, #28] \n\t" \
4253 "ldr r3, [%1, #32] \n\t" \
4254 "ldr r4, [%1, #36] \n\t" \
4255 "push {r0, r1, r2, r3, r4} \n\t" \
4256 "ldr r0, [%1, #4] \n\t" \
4257 "ldr r1, [%1, #8] \n\t" \
4258 "ldr r2, [%1, #12] \n\t" \
4259 "ldr r3, [%1, #16] \n\t" \
4260 "ldr r4, [%1] \n\t" /* target->r4 */ \
4261 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4262 VALGRIND_RESTORE_STACK \
4264 : /*out*/ "=r" (_res) \
4265 : /*in*/ "0" (&_argvec[0]) \
4266 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4268 lval = (__typeof__(lval)) _res; \
4271 #endif /* PLAT_arm_linux */
4273 /* ------------------------ arm64-linux ------------------------ */
4275 #if defined(PLAT_arm64_linux)
4277 /* These regs are trashed by the hidden call. */
4278 #define __CALLER_SAVED_REGS \
4279 "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
4280 "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
4281 "x18", "x19", "x20", "x30", \
4282 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
4283 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
4284 "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
4285 "v26", "v27", "v28", "v29", "v30", "v31"
4287 /* x21 is callee-saved, so we can use it to save and restore SP around
4289 #define VALGRIND_ALIGN_STACK \
4291 "bic sp, x21, #15\n\t"
4292 #define VALGRIND_RESTORE_STACK \
4295 /* These CALL_FN_ macros assume that on arm64-linux,
4296 sizeof(unsigned long) == 8. */
4298 #define CALL_FN_W_v(lval, orig) \
4300 volatile OrigFn _orig = (orig); \
4301 volatile unsigned long _argvec[1]; \
4302 volatile unsigned long _res; \
4303 _argvec[0] = (unsigned long)_orig.nraddr; \
4305 VALGRIND_ALIGN_STACK \
4306 "ldr x8, [%1] \n\t" /* target->x8 */ \
4307 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4308 VALGRIND_RESTORE_STACK \
4310 : /*out*/ "=r" (_res) \
4311 : /*in*/ "0" (&_argvec[0]) \
4312 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4314 lval = (__typeof__(lval)) _res; \
4317 #define CALL_FN_W_W(lval, orig, arg1) \
4319 volatile OrigFn _orig = (orig); \
4320 volatile unsigned long _argvec[2]; \
4321 volatile unsigned long _res; \
4322 _argvec[0] = (unsigned long)_orig.nraddr; \
4323 _argvec[1] = (unsigned long)(arg1); \
4325 VALGRIND_ALIGN_STACK \
4326 "ldr x0, [%1, #8] \n\t" \
4327 "ldr x8, [%1] \n\t" /* target->x8 */ \
4328 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4329 VALGRIND_RESTORE_STACK \
4331 : /*out*/ "=r" (_res) \
4332 : /*in*/ "0" (&_argvec[0]) \
4333 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4335 lval = (__typeof__(lval)) _res; \
4338 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4340 volatile OrigFn _orig = (orig); \
4341 volatile unsigned long _argvec[3]; \
4342 volatile unsigned long _res; \
4343 _argvec[0] = (unsigned long)_orig.nraddr; \
4344 _argvec[1] = (unsigned long)(arg1); \
4345 _argvec[2] = (unsigned long)(arg2); \
4347 VALGRIND_ALIGN_STACK \
4348 "ldr x0, [%1, #8] \n\t" \
4349 "ldr x1, [%1, #16] \n\t" \
4350 "ldr x8, [%1] \n\t" /* target->x8 */ \
4351 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4352 VALGRIND_RESTORE_STACK \
4354 : /*out*/ "=r" (_res) \
4355 : /*in*/ "0" (&_argvec[0]) \
4356 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4358 lval = (__typeof__(lval)) _res; \
4361 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4363 volatile OrigFn _orig = (orig); \
4364 volatile unsigned long _argvec[4]; \
4365 volatile unsigned long _res; \
4366 _argvec[0] = (unsigned long)_orig.nraddr; \
4367 _argvec[1] = (unsigned long)(arg1); \
4368 _argvec[2] = (unsigned long)(arg2); \
4369 _argvec[3] = (unsigned long)(arg3); \
4371 VALGRIND_ALIGN_STACK \
4372 "ldr x0, [%1, #8] \n\t" \
4373 "ldr x1, [%1, #16] \n\t" \
4374 "ldr x2, [%1, #24] \n\t" \
4375 "ldr x8, [%1] \n\t" /* target->x8 */ \
4376 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4377 VALGRIND_RESTORE_STACK \
4379 : /*out*/ "=r" (_res) \
4380 : /*in*/ "0" (&_argvec[0]) \
4381 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4383 lval = (__typeof__(lval)) _res; \
4386 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4388 volatile OrigFn _orig = (orig); \
4389 volatile unsigned long _argvec[5]; \
4390 volatile unsigned long _res; \
4391 _argvec[0] = (unsigned long)_orig.nraddr; \
4392 _argvec[1] = (unsigned long)(arg1); \
4393 _argvec[2] = (unsigned long)(arg2); \
4394 _argvec[3] = (unsigned long)(arg3); \
4395 _argvec[4] = (unsigned long)(arg4); \
4397 VALGRIND_ALIGN_STACK \
4398 "ldr x0, [%1, #8] \n\t" \
4399 "ldr x1, [%1, #16] \n\t" \
4400 "ldr x2, [%1, #24] \n\t" \
4401 "ldr x3, [%1, #32] \n\t" \
4402 "ldr x8, [%1] \n\t" /* target->x8 */ \
4403 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4404 VALGRIND_RESTORE_STACK \
4406 : /*out*/ "=r" (_res) \
4407 : /*in*/ "0" (&_argvec[0]) \
4408 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4410 lval = (__typeof__(lval)) _res; \
4413 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4415 volatile OrigFn _orig = (orig); \
4416 volatile unsigned long _argvec[6]; \
4417 volatile unsigned long _res; \
4418 _argvec[0] = (unsigned long)_orig.nraddr; \
4419 _argvec[1] = (unsigned long)(arg1); \
4420 _argvec[2] = (unsigned long)(arg2); \
4421 _argvec[3] = (unsigned long)(arg3); \
4422 _argvec[4] = (unsigned long)(arg4); \
4423 _argvec[5] = (unsigned long)(arg5); \
4425 VALGRIND_ALIGN_STACK \
4426 "ldr x0, [%1, #8] \n\t" \
4427 "ldr x1, [%1, #16] \n\t" \
4428 "ldr x2, [%1, #24] \n\t" \
4429 "ldr x3, [%1, #32] \n\t" \
4430 "ldr x4, [%1, #40] \n\t" \
4431 "ldr x8, [%1] \n\t" /* target->x8 */ \
4432 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4433 VALGRIND_RESTORE_STACK \
4435 : /*out*/ "=r" (_res) \
4436 : /*in*/ "0" (&_argvec[0]) \
4437 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4439 lval = (__typeof__(lval)) _res; \
4442 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4444 volatile OrigFn _orig = (orig); \
4445 volatile unsigned long _argvec[7]; \
4446 volatile unsigned long _res; \
4447 _argvec[0] = (unsigned long)_orig.nraddr; \
4448 _argvec[1] = (unsigned long)(arg1); \
4449 _argvec[2] = (unsigned long)(arg2); \
4450 _argvec[3] = (unsigned long)(arg3); \
4451 _argvec[4] = (unsigned long)(arg4); \
4452 _argvec[5] = (unsigned long)(arg5); \
4453 _argvec[6] = (unsigned long)(arg6); \
4455 VALGRIND_ALIGN_STACK \
4456 "ldr x0, [%1, #8] \n\t" \
4457 "ldr x1, [%1, #16] \n\t" \
4458 "ldr x2, [%1, #24] \n\t" \
4459 "ldr x3, [%1, #32] \n\t" \
4460 "ldr x4, [%1, #40] \n\t" \
4461 "ldr x5, [%1, #48] \n\t" \
4462 "ldr x8, [%1] \n\t" /* target->x8 */ \
4463 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4464 VALGRIND_RESTORE_STACK \
4466 : /*out*/ "=r" (_res) \
4467 : /*in*/ "0" (&_argvec[0]) \
4468 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4470 lval = (__typeof__(lval)) _res; \
4473 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4476 volatile OrigFn _orig = (orig); \
4477 volatile unsigned long _argvec[8]; \
4478 volatile unsigned long _res; \
4479 _argvec[0] = (unsigned long)_orig.nraddr; \
4480 _argvec[1] = (unsigned long)(arg1); \
4481 _argvec[2] = (unsigned long)(arg2); \
4482 _argvec[3] = (unsigned long)(arg3); \
4483 _argvec[4] = (unsigned long)(arg4); \
4484 _argvec[5] = (unsigned long)(arg5); \
4485 _argvec[6] = (unsigned long)(arg6); \
4486 _argvec[7] = (unsigned long)(arg7); \
4488 VALGRIND_ALIGN_STACK \
4489 "ldr x0, [%1, #8] \n\t" \
4490 "ldr x1, [%1, #16] \n\t" \
4491 "ldr x2, [%1, #24] \n\t" \
4492 "ldr x3, [%1, #32] \n\t" \
4493 "ldr x4, [%1, #40] \n\t" \
4494 "ldr x5, [%1, #48] \n\t" \
4495 "ldr x6, [%1, #56] \n\t" \
4496 "ldr x8, [%1] \n\t" /* target->x8 */ \
4497 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4498 VALGRIND_RESTORE_STACK \
4500 : /*out*/ "=r" (_res) \
4501 : /*in*/ "0" (&_argvec[0]) \
4502 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4504 lval = (__typeof__(lval)) _res; \
4507 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4510 volatile OrigFn _orig = (orig); \
4511 volatile unsigned long _argvec[9]; \
4512 volatile unsigned long _res; \
4513 _argvec[0] = (unsigned long)_orig.nraddr; \
4514 _argvec[1] = (unsigned long)(arg1); \
4515 _argvec[2] = (unsigned long)(arg2); \
4516 _argvec[3] = (unsigned long)(arg3); \
4517 _argvec[4] = (unsigned long)(arg4); \
4518 _argvec[5] = (unsigned long)(arg5); \
4519 _argvec[6] = (unsigned long)(arg6); \
4520 _argvec[7] = (unsigned long)(arg7); \
4521 _argvec[8] = (unsigned long)(arg8); \
4523 VALGRIND_ALIGN_STACK \
4524 "ldr x0, [%1, #8] \n\t" \
4525 "ldr x1, [%1, #16] \n\t" \
4526 "ldr x2, [%1, #24] \n\t" \
4527 "ldr x3, [%1, #32] \n\t" \
4528 "ldr x4, [%1, #40] \n\t" \
4529 "ldr x5, [%1, #48] \n\t" \
4530 "ldr x6, [%1, #56] \n\t" \
4531 "ldr x7, [%1, #64] \n\t" \
4532 "ldr x8, [%1] \n\t" /* target->x8 */ \
4533 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4534 VALGRIND_RESTORE_STACK \
4536 : /*out*/ "=r" (_res) \
4537 : /*in*/ "0" (&_argvec[0]) \
4538 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4540 lval = (__typeof__(lval)) _res; \
4543 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4546 volatile OrigFn _orig = (orig); \
4547 volatile unsigned long _argvec[10]; \
4548 volatile unsigned long _res; \
4549 _argvec[0] = (unsigned long)_orig.nraddr; \
4550 _argvec[1] = (unsigned long)(arg1); \
4551 _argvec[2] = (unsigned long)(arg2); \
4552 _argvec[3] = (unsigned long)(arg3); \
4553 _argvec[4] = (unsigned long)(arg4); \
4554 _argvec[5] = (unsigned long)(arg5); \
4555 _argvec[6] = (unsigned long)(arg6); \
4556 _argvec[7] = (unsigned long)(arg7); \
4557 _argvec[8] = (unsigned long)(arg8); \
4558 _argvec[9] = (unsigned long)(arg9); \
4560 VALGRIND_ALIGN_STACK \
4561 "sub sp, sp, #0x20 \n\t" \
4562 "ldr x0, [%1, #8] \n\t" \
4563 "ldr x1, [%1, #16] \n\t" \
4564 "ldr x2, [%1, #24] \n\t" \
4565 "ldr x3, [%1, #32] \n\t" \
4566 "ldr x4, [%1, #40] \n\t" \
4567 "ldr x5, [%1, #48] \n\t" \
4568 "ldr x6, [%1, #56] \n\t" \
4569 "ldr x7, [%1, #64] \n\t" \
4570 "ldr x8, [%1, #72] \n\t" \
4571 "str x8, [sp, #0] \n\t" \
4572 "ldr x8, [%1] \n\t" /* target->x8 */ \
4573 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4574 VALGRIND_RESTORE_STACK \
4576 : /*out*/ "=r" (_res) \
4577 : /*in*/ "0" (&_argvec[0]) \
4578 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4580 lval = (__typeof__(lval)) _res; \
4583 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4584 arg7,arg8,arg9,arg10) \
4586 volatile OrigFn _orig = (orig); \
4587 volatile unsigned long _argvec[11]; \
4588 volatile unsigned long _res; \
4589 _argvec[0] = (unsigned long)_orig.nraddr; \
4590 _argvec[1] = (unsigned long)(arg1); \
4591 _argvec[2] = (unsigned long)(arg2); \
4592 _argvec[3] = (unsigned long)(arg3); \
4593 _argvec[4] = (unsigned long)(arg4); \
4594 _argvec[5] = (unsigned long)(arg5); \
4595 _argvec[6] = (unsigned long)(arg6); \
4596 _argvec[7] = (unsigned long)(arg7); \
4597 _argvec[8] = (unsigned long)(arg8); \
4598 _argvec[9] = (unsigned long)(arg9); \
4599 _argvec[10] = (unsigned long)(arg10); \
4601 VALGRIND_ALIGN_STACK \
4602 "sub sp, sp, #0x20 \n\t" \
4603 "ldr x0, [%1, #8] \n\t" \
4604 "ldr x1, [%1, #16] \n\t" \
4605 "ldr x2, [%1, #24] \n\t" \
4606 "ldr x3, [%1, #32] \n\t" \
4607 "ldr x4, [%1, #40] \n\t" \
4608 "ldr x5, [%1, #48] \n\t" \
4609 "ldr x6, [%1, #56] \n\t" \
4610 "ldr x7, [%1, #64] \n\t" \
4611 "ldr x8, [%1, #72] \n\t" \
4612 "str x8, [sp, #0] \n\t" \
4613 "ldr x8, [%1, #80] \n\t" \
4614 "str x8, [sp, #8] \n\t" \
4615 "ldr x8, [%1] \n\t" /* target->x8 */ \
4616 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4617 VALGRIND_RESTORE_STACK \
4619 : /*out*/ "=r" (_res) \
4620 : /*in*/ "0" (&_argvec[0]) \
4621 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4623 lval = (__typeof__(lval)) _res; \
4626 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4627 arg7,arg8,arg9,arg10,arg11) \
4629 volatile OrigFn _orig = (orig); \
4630 volatile unsigned long _argvec[12]; \
4631 volatile unsigned long _res; \
4632 _argvec[0] = (unsigned long)_orig.nraddr; \
4633 _argvec[1] = (unsigned long)(arg1); \
4634 _argvec[2] = (unsigned long)(arg2); \
4635 _argvec[3] = (unsigned long)(arg3); \
4636 _argvec[4] = (unsigned long)(arg4); \
4637 _argvec[5] = (unsigned long)(arg5); \
4638 _argvec[6] = (unsigned long)(arg6); \
4639 _argvec[7] = (unsigned long)(arg7); \
4640 _argvec[8] = (unsigned long)(arg8); \
4641 _argvec[9] = (unsigned long)(arg9); \
4642 _argvec[10] = (unsigned long)(arg10); \
4643 _argvec[11] = (unsigned long)(arg11); \
4645 VALGRIND_ALIGN_STACK \
4646 "sub sp, sp, #0x30 \n\t" \
4647 "ldr x0, [%1, #8] \n\t" \
4648 "ldr x1, [%1, #16] \n\t" \
4649 "ldr x2, [%1, #24] \n\t" \
4650 "ldr x3, [%1, #32] \n\t" \
4651 "ldr x4, [%1, #40] \n\t" \
4652 "ldr x5, [%1, #48] \n\t" \
4653 "ldr x6, [%1, #56] \n\t" \
4654 "ldr x7, [%1, #64] \n\t" \
4655 "ldr x8, [%1, #72] \n\t" \
4656 "str x8, [sp, #0] \n\t" \
4657 "ldr x8, [%1, #80] \n\t" \
4658 "str x8, [sp, #8] \n\t" \
4659 "ldr x8, [%1, #88] \n\t" \
4660 "str x8, [sp, #16] \n\t" \
4661 "ldr x8, [%1] \n\t" /* target->x8 */ \
4662 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4663 VALGRIND_RESTORE_STACK \
4665 : /*out*/ "=r" (_res) \
4666 : /*in*/ "0" (&_argvec[0]) \
4667 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4669 lval = (__typeof__(lval)) _res; \
4672 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4673 arg7,arg8,arg9,arg10,arg11, \
4676 volatile OrigFn _orig = (orig); \
4677 volatile unsigned long _argvec[13]; \
4678 volatile unsigned long _res; \
4679 _argvec[0] = (unsigned long)_orig.nraddr; \
4680 _argvec[1] = (unsigned long)(arg1); \
4681 _argvec[2] = (unsigned long)(arg2); \
4682 _argvec[3] = (unsigned long)(arg3); \
4683 _argvec[4] = (unsigned long)(arg4); \
4684 _argvec[5] = (unsigned long)(arg5); \
4685 _argvec[6] = (unsigned long)(arg6); \
4686 _argvec[7] = (unsigned long)(arg7); \
4687 _argvec[8] = (unsigned long)(arg8); \
4688 _argvec[9] = (unsigned long)(arg9); \
4689 _argvec[10] = (unsigned long)(arg10); \
4690 _argvec[11] = (unsigned long)(arg11); \
4691 _argvec[12] = (unsigned long)(arg12); \
4693 VALGRIND_ALIGN_STACK \
4694 "sub sp, sp, #0x30 \n\t" \
4695 "ldr x0, [%1, #8] \n\t" \
4696 "ldr x1, [%1, #16] \n\t" \
4697 "ldr x2, [%1, #24] \n\t" \
4698 "ldr x3, [%1, #32] \n\t" \
4699 "ldr x4, [%1, #40] \n\t" \
4700 "ldr x5, [%1, #48] \n\t" \
4701 "ldr x6, [%1, #56] \n\t" \
4702 "ldr x7, [%1, #64] \n\t" \
4703 "ldr x8, [%1, #72] \n\t" \
4704 "str x8, [sp, #0] \n\t" \
4705 "ldr x8, [%1, #80] \n\t" \
4706 "str x8, [sp, #8] \n\t" \
4707 "ldr x8, [%1, #88] \n\t" \
4708 "str x8, [sp, #16] \n\t" \
4709 "ldr x8, [%1, #96] \n\t" \
4710 "str x8, [sp, #24] \n\t" \
4711 "ldr x8, [%1] \n\t" /* target->x8 */ \
4712 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4713 VALGRIND_RESTORE_STACK \
4715 : /*out*/ "=r" (_res) \
4716 : /*in*/ "0" (&_argvec[0]) \
4717 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4719 lval = (__typeof__(lval)) _res; \
4722 #endif /* PLAT_arm64_linux */
4724 /* ------------------------- s390x-linux ------------------------- */
4726 #if defined(PLAT_s390x_linux)
4728 /* Similar workaround as amd64 (see above), but we use r11 as frame
4729 pointer and save the old r11 in r7. r11 might be used for
4730 argvec, therefore we copy argvec in r1 since r1 is clobbered
4731 after the call anyway. */
4732 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
4733 # define __FRAME_POINTER \
4734 ,"d"(__builtin_dwarf_cfa())
4735 # define VALGRIND_CFI_PROLOGUE \
4736 ".cfi_remember_state\n\t" \
4737 "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
4740 ".cfi_def_cfa r11, 0\n\t"
4741 # define VALGRIND_CFI_EPILOGUE \
4743 ".cfi_restore_state\n\t"
4745 # define __FRAME_POINTER
4746 # define VALGRIND_CFI_PROLOGUE \
4748 # define VALGRIND_CFI_EPILOGUE
4751 /* Nb: On s390 the stack pointer is properly aligned *at all times*
4752 according to the s390 GCC maintainer. (The ABI specification is not
4753 precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
4754 VALGRIND_RESTORE_STACK are not defined here. */
4756 /* These regs are trashed by the hidden call. Note that we overwrite
4757 r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
4758 function a proper return address. All others are ABI defined call
4760 #define __CALLER_SAVED_REGS "0","1","2","3","4","5","14", \
4761 "f0","f1","f2","f3","f4","f5","f6","f7"
4763 /* Nb: Although r11 is modified in the asm snippets below (inside
4764 VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
4766 (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
4768 (2) GCC will complain that r11 cannot appear inside a clobber section,
4769 when compiled with -O -fno-omit-frame-pointer
4772 #define CALL_FN_W_v(lval, orig) \
4774 volatile OrigFn _orig = (orig); \
4775 volatile unsigned long _argvec[1]; \
4776 volatile unsigned long _res; \
4777 _argvec[0] = (unsigned long)_orig.nraddr; \
4779 VALGRIND_CFI_PROLOGUE \
4780 "aghi 15,-160\n\t" \
4781 "lg 1, 0(1)\n\t" /* target->r1 */ \
4782 VALGRIND_CALL_NOREDIR_R1 \
4785 VALGRIND_CFI_EPILOGUE \
4786 : /*out*/ "=d" (_res) \
4787 : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
4788 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4790 lval = (__typeof__(lval)) _res; \
4793 /* The call abi has the arguments in r2-r6 and stack */
4794 #define CALL_FN_W_W(lval, orig, arg1) \
4796 volatile OrigFn _orig = (orig); \
4797 volatile unsigned long _argvec[2]; \
4798 volatile unsigned long _res; \
4799 _argvec[0] = (unsigned long)_orig.nraddr; \
4800 _argvec[1] = (unsigned long)arg1; \
4802 VALGRIND_CFI_PROLOGUE \
4803 "aghi 15,-160\n\t" \
4806 VALGRIND_CALL_NOREDIR_R1 \
4809 VALGRIND_CFI_EPILOGUE \
4810 : /*out*/ "=d" (_res) \
4811 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4812 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4814 lval = (__typeof__(lval)) _res; \
4817 #define CALL_FN_W_WW(lval, orig, arg1, arg2) \
4819 volatile OrigFn _orig = (orig); \
4820 volatile unsigned long _argvec[3]; \
4821 volatile unsigned long _res; \
4822 _argvec[0] = (unsigned long)_orig.nraddr; \
4823 _argvec[1] = (unsigned long)arg1; \
4824 _argvec[2] = (unsigned long)arg2; \
4826 VALGRIND_CFI_PROLOGUE \
4827 "aghi 15,-160\n\t" \
4831 VALGRIND_CALL_NOREDIR_R1 \
4834 VALGRIND_CFI_EPILOGUE \
4835 : /*out*/ "=d" (_res) \
4836 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4837 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4839 lval = (__typeof__(lval)) _res; \
4842 #define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
4844 volatile OrigFn _orig = (orig); \
4845 volatile unsigned long _argvec[4]; \
4846 volatile unsigned long _res; \
4847 _argvec[0] = (unsigned long)_orig.nraddr; \
4848 _argvec[1] = (unsigned long)arg1; \
4849 _argvec[2] = (unsigned long)arg2; \
4850 _argvec[3] = (unsigned long)arg3; \
4852 VALGRIND_CFI_PROLOGUE \
4853 "aghi 15,-160\n\t" \
4858 VALGRIND_CALL_NOREDIR_R1 \
4861 VALGRIND_CFI_EPILOGUE \
4862 : /*out*/ "=d" (_res) \
4863 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4864 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4866 lval = (__typeof__(lval)) _res; \
4869 #define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
4871 volatile OrigFn _orig = (orig); \
4872 volatile unsigned long _argvec[5]; \
4873 volatile unsigned long _res; \
4874 _argvec[0] = (unsigned long)_orig.nraddr; \
4875 _argvec[1] = (unsigned long)arg1; \
4876 _argvec[2] = (unsigned long)arg2; \
4877 _argvec[3] = (unsigned long)arg3; \
4878 _argvec[4] = (unsigned long)arg4; \
4880 VALGRIND_CFI_PROLOGUE \
4881 "aghi 15,-160\n\t" \
4887 VALGRIND_CALL_NOREDIR_R1 \
4890 VALGRIND_CFI_EPILOGUE \
4891 : /*out*/ "=d" (_res) \
4892 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4893 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4895 lval = (__typeof__(lval)) _res; \
4898 #define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
4900 volatile OrigFn _orig = (orig); \
4901 volatile unsigned long _argvec[6]; \
4902 volatile unsigned long _res; \
4903 _argvec[0] = (unsigned long)_orig.nraddr; \
4904 _argvec[1] = (unsigned long)arg1; \
4905 _argvec[2] = (unsigned long)arg2; \
4906 _argvec[3] = (unsigned long)arg3; \
4907 _argvec[4] = (unsigned long)arg4; \
4908 _argvec[5] = (unsigned long)arg5; \
4910 VALGRIND_CFI_PROLOGUE \
4911 "aghi 15,-160\n\t" \
4918 VALGRIND_CALL_NOREDIR_R1 \
4921 VALGRIND_CFI_EPILOGUE \
4922 : /*out*/ "=d" (_res) \
4923 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4924 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4926 lval = (__typeof__(lval)) _res; \
4929 #define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4932 volatile OrigFn _orig = (orig); \
4933 volatile unsigned long _argvec[7]; \
4934 volatile unsigned long _res; \
4935 _argvec[0] = (unsigned long)_orig.nraddr; \
4936 _argvec[1] = (unsigned long)arg1; \
4937 _argvec[2] = (unsigned long)arg2; \
4938 _argvec[3] = (unsigned long)arg3; \
4939 _argvec[4] = (unsigned long)arg4; \
4940 _argvec[5] = (unsigned long)arg5; \
4941 _argvec[6] = (unsigned long)arg6; \
4943 VALGRIND_CFI_PROLOGUE \
4944 "aghi 15,-168\n\t" \
4950 "mvc 160(8,15), 48(1)\n\t" \
4952 VALGRIND_CALL_NOREDIR_R1 \
4955 VALGRIND_CFI_EPILOGUE \
4956 : /*out*/ "=d" (_res) \
4957 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4958 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4960 lval = (__typeof__(lval)) _res; \
4963 #define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4966 volatile OrigFn _orig = (orig); \
4967 volatile unsigned long _argvec[8]; \
4968 volatile unsigned long _res; \
4969 _argvec[0] = (unsigned long)_orig.nraddr; \
4970 _argvec[1] = (unsigned long)arg1; \
4971 _argvec[2] = (unsigned long)arg2; \
4972 _argvec[3] = (unsigned long)arg3; \
4973 _argvec[4] = (unsigned long)arg4; \
4974 _argvec[5] = (unsigned long)arg5; \
4975 _argvec[6] = (unsigned long)arg6; \
4976 _argvec[7] = (unsigned long)arg7; \
4978 VALGRIND_CFI_PROLOGUE \
4979 "aghi 15,-176\n\t" \
4985 "mvc 160(8,15), 48(1)\n\t" \
4986 "mvc 168(8,15), 56(1)\n\t" \
4988 VALGRIND_CALL_NOREDIR_R1 \
4991 VALGRIND_CFI_EPILOGUE \
4992 : /*out*/ "=d" (_res) \
4993 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4994 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4996 lval = (__typeof__(lval)) _res; \
4999 #define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5002 volatile OrigFn _orig = (orig); \
5003 volatile unsigned long _argvec[9]; \
5004 volatile unsigned long _res; \
5005 _argvec[0] = (unsigned long)_orig.nraddr; \
5006 _argvec[1] = (unsigned long)arg1; \
5007 _argvec[2] = (unsigned long)arg2; \
5008 _argvec[3] = (unsigned long)arg3; \
5009 _argvec[4] = (unsigned long)arg4; \
5010 _argvec[5] = (unsigned long)arg5; \
5011 _argvec[6] = (unsigned long)arg6; \
5012 _argvec[7] = (unsigned long)arg7; \
5013 _argvec[8] = (unsigned long)arg8; \
5015 VALGRIND_CFI_PROLOGUE \
5016 "aghi 15,-184\n\t" \
5022 "mvc 160(8,15), 48(1)\n\t" \
5023 "mvc 168(8,15), 56(1)\n\t" \
5024 "mvc 176(8,15), 64(1)\n\t" \
5026 VALGRIND_CALL_NOREDIR_R1 \
5029 VALGRIND_CFI_EPILOGUE \
5030 : /*out*/ "=d" (_res) \
5031 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5032 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5034 lval = (__typeof__(lval)) _res; \
5037 #define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5038 arg6, arg7 ,arg8, arg9) \
5040 volatile OrigFn _orig = (orig); \
5041 volatile unsigned long _argvec[10]; \
5042 volatile unsigned long _res; \
5043 _argvec[0] = (unsigned long)_orig.nraddr; \
5044 _argvec[1] = (unsigned long)arg1; \
5045 _argvec[2] = (unsigned long)arg2; \
5046 _argvec[3] = (unsigned long)arg3; \
5047 _argvec[4] = (unsigned long)arg4; \
5048 _argvec[5] = (unsigned long)arg5; \
5049 _argvec[6] = (unsigned long)arg6; \
5050 _argvec[7] = (unsigned long)arg7; \
5051 _argvec[8] = (unsigned long)arg8; \
5052 _argvec[9] = (unsigned long)arg9; \
5054 VALGRIND_CFI_PROLOGUE \
5055 "aghi 15,-192\n\t" \
5061 "mvc 160(8,15), 48(1)\n\t" \
5062 "mvc 168(8,15), 56(1)\n\t" \
5063 "mvc 176(8,15), 64(1)\n\t" \
5064 "mvc 184(8,15), 72(1)\n\t" \
5066 VALGRIND_CALL_NOREDIR_R1 \
5069 VALGRIND_CFI_EPILOGUE \
5070 : /*out*/ "=d" (_res) \
5071 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5072 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5074 lval = (__typeof__(lval)) _res; \
5077 #define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5078 arg6, arg7 ,arg8, arg9, arg10) \
5080 volatile OrigFn _orig = (orig); \
5081 volatile unsigned long _argvec[11]; \
5082 volatile unsigned long _res; \
5083 _argvec[0] = (unsigned long)_orig.nraddr; \
5084 _argvec[1] = (unsigned long)arg1; \
5085 _argvec[2] = (unsigned long)arg2; \
5086 _argvec[3] = (unsigned long)arg3; \
5087 _argvec[4] = (unsigned long)arg4; \
5088 _argvec[5] = (unsigned long)arg5; \
5089 _argvec[6] = (unsigned long)arg6; \
5090 _argvec[7] = (unsigned long)arg7; \
5091 _argvec[8] = (unsigned long)arg8; \
5092 _argvec[9] = (unsigned long)arg9; \
5093 _argvec[10] = (unsigned long)arg10; \
5095 VALGRIND_CFI_PROLOGUE \
5096 "aghi 15,-200\n\t" \
5102 "mvc 160(8,15), 48(1)\n\t" \
5103 "mvc 168(8,15), 56(1)\n\t" \
5104 "mvc 176(8,15), 64(1)\n\t" \
5105 "mvc 184(8,15), 72(1)\n\t" \
5106 "mvc 192(8,15), 80(1)\n\t" \
5108 VALGRIND_CALL_NOREDIR_R1 \
5111 VALGRIND_CFI_EPILOGUE \
5112 : /*out*/ "=d" (_res) \
5113 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5114 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5116 lval = (__typeof__(lval)) _res; \
5119 #define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5120 arg6, arg7 ,arg8, arg9, arg10, arg11) \
5122 volatile OrigFn _orig = (orig); \
5123 volatile unsigned long _argvec[12]; \
5124 volatile unsigned long _res; \
5125 _argvec[0] = (unsigned long)_orig.nraddr; \
5126 _argvec[1] = (unsigned long)arg1; \
5127 _argvec[2] = (unsigned long)arg2; \
5128 _argvec[3] = (unsigned long)arg3; \
5129 _argvec[4] = (unsigned long)arg4; \
5130 _argvec[5] = (unsigned long)arg5; \
5131 _argvec[6] = (unsigned long)arg6; \
5132 _argvec[7] = (unsigned long)arg7; \
5133 _argvec[8] = (unsigned long)arg8; \
5134 _argvec[9] = (unsigned long)arg9; \
5135 _argvec[10] = (unsigned long)arg10; \
5136 _argvec[11] = (unsigned long)arg11; \
5138 VALGRIND_CFI_PROLOGUE \
5139 "aghi 15,-208\n\t" \
5145 "mvc 160(8,15), 48(1)\n\t" \
5146 "mvc 168(8,15), 56(1)\n\t" \
5147 "mvc 176(8,15), 64(1)\n\t" \
5148 "mvc 184(8,15), 72(1)\n\t" \
5149 "mvc 192(8,15), 80(1)\n\t" \
5150 "mvc 200(8,15), 88(1)\n\t" \
5152 VALGRIND_CALL_NOREDIR_R1 \
5155 VALGRIND_CFI_EPILOGUE \
5156 : /*out*/ "=d" (_res) \
5157 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5158 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5160 lval = (__typeof__(lval)) _res; \
5163 #define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5164 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
5166 volatile OrigFn _orig = (orig); \
5167 volatile unsigned long _argvec[13]; \
5168 volatile unsigned long _res; \
5169 _argvec[0] = (unsigned long)_orig.nraddr; \
5170 _argvec[1] = (unsigned long)arg1; \
5171 _argvec[2] = (unsigned long)arg2; \
5172 _argvec[3] = (unsigned long)arg3; \
5173 _argvec[4] = (unsigned long)arg4; \
5174 _argvec[5] = (unsigned long)arg5; \
5175 _argvec[6] = (unsigned long)arg6; \
5176 _argvec[7] = (unsigned long)arg7; \
5177 _argvec[8] = (unsigned long)arg8; \
5178 _argvec[9] = (unsigned long)arg9; \
5179 _argvec[10] = (unsigned long)arg10; \
5180 _argvec[11] = (unsigned long)arg11; \
5181 _argvec[12] = (unsigned long)arg12; \
5183 VALGRIND_CFI_PROLOGUE \
5184 "aghi 15,-216\n\t" \
5190 "mvc 160(8,15), 48(1)\n\t" \
5191 "mvc 168(8,15), 56(1)\n\t" \
5192 "mvc 176(8,15), 64(1)\n\t" \
5193 "mvc 184(8,15), 72(1)\n\t" \
5194 "mvc 192(8,15), 80(1)\n\t" \
5195 "mvc 200(8,15), 88(1)\n\t" \
5196 "mvc 208(8,15), 96(1)\n\t" \
5198 VALGRIND_CALL_NOREDIR_R1 \
5201 VALGRIND_CFI_EPILOGUE \
5202 : /*out*/ "=d" (_res) \
5203 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5204 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5206 lval = (__typeof__(lval)) _res; \
5210 #endif /* PLAT_s390x_linux */
5212 /* ------------------------- mips32-linux ----------------------- */
5214 #if defined(PLAT_mips32_linux)
5216 /* These regs are trashed by the hidden call. */
5217 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5218 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5221 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5224 #define CALL_FN_W_v(lval, orig) \
5226 volatile OrigFn _orig = (orig); \
5227 volatile unsigned long _argvec[1]; \
5228 volatile unsigned long _res; \
5229 _argvec[0] = (unsigned long)_orig.nraddr; \
5231 "subu $29, $29, 8 \n\t" \
5232 "sw $28, 0($29) \n\t" \
5233 "sw $31, 4($29) \n\t" \
5234 "subu $29, $29, 16 \n\t" \
5235 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5236 VALGRIND_CALL_NOREDIR_T9 \
5237 "addu $29, $29, 16\n\t" \
5238 "lw $28, 0($29) \n\t" \
5239 "lw $31, 4($29) \n\t" \
5240 "addu $29, $29, 8 \n\t" \
5242 : /*out*/ "=r" (_res) \
5243 : /*in*/ "0" (&_argvec[0]) \
5244 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5246 lval = (__typeof__(lval)) _res; \
5249 #define CALL_FN_W_W(lval, orig, arg1) \
5251 volatile OrigFn _orig = (orig); \
5252 volatile unsigned long _argvec[2]; \
5253 volatile unsigned long _res; \
5254 _argvec[0] = (unsigned long)_orig.nraddr; \
5255 _argvec[1] = (unsigned long)(arg1); \
5257 "subu $29, $29, 8 \n\t" \
5258 "sw $28, 0($29) \n\t" \
5259 "sw $31, 4($29) \n\t" \
5260 "subu $29, $29, 16 \n\t" \
5261 "lw $4, 4(%1) \n\t" /* arg1*/ \
5262 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5263 VALGRIND_CALL_NOREDIR_T9 \
5264 "addu $29, $29, 16 \n\t" \
5265 "lw $28, 0($29) \n\t" \
5266 "lw $31, 4($29) \n\t" \
5267 "addu $29, $29, 8 \n\t" \
5269 : /*out*/ "=r" (_res) \
5270 : /*in*/ "0" (&_argvec[0]) \
5271 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5273 lval = (__typeof__(lval)) _res; \
5276 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5278 volatile OrigFn _orig = (orig); \
5279 volatile unsigned long _argvec[3]; \
5280 volatile unsigned long _res; \
5281 _argvec[0] = (unsigned long)_orig.nraddr; \
5282 _argvec[1] = (unsigned long)(arg1); \
5283 _argvec[2] = (unsigned long)(arg2); \
5285 "subu $29, $29, 8 \n\t" \
5286 "sw $28, 0($29) \n\t" \
5287 "sw $31, 4($29) \n\t" \
5288 "subu $29, $29, 16 \n\t" \
5289 "lw $4, 4(%1) \n\t" \
5290 "lw $5, 8(%1) \n\t" \
5291 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5292 VALGRIND_CALL_NOREDIR_T9 \
5293 "addu $29, $29, 16 \n\t" \
5294 "lw $28, 0($29) \n\t" \
5295 "lw $31, 4($29) \n\t" \
5296 "addu $29, $29, 8 \n\t" \
5298 : /*out*/ "=r" (_res) \
5299 : /*in*/ "0" (&_argvec[0]) \
5300 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5302 lval = (__typeof__(lval)) _res; \
5305 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5307 volatile OrigFn _orig = (orig); \
5308 volatile unsigned long _argvec[4]; \
5309 volatile unsigned long _res; \
5310 _argvec[0] = (unsigned long)_orig.nraddr; \
5311 _argvec[1] = (unsigned long)(arg1); \
5312 _argvec[2] = (unsigned long)(arg2); \
5313 _argvec[3] = (unsigned long)(arg3); \
5315 "subu $29, $29, 8 \n\t" \
5316 "sw $28, 0($29) \n\t" \
5317 "sw $31, 4($29) \n\t" \
5318 "subu $29, $29, 16 \n\t" \
5319 "lw $4, 4(%1) \n\t" \
5320 "lw $5, 8(%1) \n\t" \
5321 "lw $6, 12(%1) \n\t" \
5322 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5323 VALGRIND_CALL_NOREDIR_T9 \
5324 "addu $29, $29, 16 \n\t" \
5325 "lw $28, 0($29) \n\t" \
5326 "lw $31, 4($29) \n\t" \
5327 "addu $29, $29, 8 \n\t" \
5329 : /*out*/ "=r" (_res) \
5330 : /*in*/ "0" (&_argvec[0]) \
5331 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5333 lval = (__typeof__(lval)) _res; \
5336 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5338 volatile OrigFn _orig = (orig); \
5339 volatile unsigned long _argvec[5]; \
5340 volatile unsigned long _res; \
5341 _argvec[0] = (unsigned long)_orig.nraddr; \
5342 _argvec[1] = (unsigned long)(arg1); \
5343 _argvec[2] = (unsigned long)(arg2); \
5344 _argvec[3] = (unsigned long)(arg3); \
5345 _argvec[4] = (unsigned long)(arg4); \
5347 "subu $29, $29, 8 \n\t" \
5348 "sw $28, 0($29) \n\t" \
5349 "sw $31, 4($29) \n\t" \
5350 "subu $29, $29, 16 \n\t" \
5351 "lw $4, 4(%1) \n\t" \
5352 "lw $5, 8(%1) \n\t" \
5353 "lw $6, 12(%1) \n\t" \
5354 "lw $7, 16(%1) \n\t" \
5355 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5356 VALGRIND_CALL_NOREDIR_T9 \
5357 "addu $29, $29, 16 \n\t" \
5358 "lw $28, 0($29) \n\t" \
5359 "lw $31, 4($29) \n\t" \
5360 "addu $29, $29, 8 \n\t" \
5362 : /*out*/ "=r" (_res) \
5363 : /*in*/ "0" (&_argvec[0]) \
5364 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5366 lval = (__typeof__(lval)) _res; \
5369 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5371 volatile OrigFn _orig = (orig); \
5372 volatile unsigned long _argvec[6]; \
5373 volatile unsigned long _res; \
5374 _argvec[0] = (unsigned long)_orig.nraddr; \
5375 _argvec[1] = (unsigned long)(arg1); \
5376 _argvec[2] = (unsigned long)(arg2); \
5377 _argvec[3] = (unsigned long)(arg3); \
5378 _argvec[4] = (unsigned long)(arg4); \
5379 _argvec[5] = (unsigned long)(arg5); \
5381 "subu $29, $29, 8 \n\t" \
5382 "sw $28, 0($29) \n\t" \
5383 "sw $31, 4($29) \n\t" \
5384 "lw $4, 20(%1) \n\t" \
5385 "subu $29, $29, 24\n\t" \
5386 "sw $4, 16($29) \n\t" \
5387 "lw $4, 4(%1) \n\t" \
5388 "lw $5, 8(%1) \n\t" \
5389 "lw $6, 12(%1) \n\t" \
5390 "lw $7, 16(%1) \n\t" \
5391 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5392 VALGRIND_CALL_NOREDIR_T9 \
5393 "addu $29, $29, 24 \n\t" \
5394 "lw $28, 0($29) \n\t" \
5395 "lw $31, 4($29) \n\t" \
5396 "addu $29, $29, 8 \n\t" \
5398 : /*out*/ "=r" (_res) \
5399 : /*in*/ "0" (&_argvec[0]) \
5400 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5402 lval = (__typeof__(lval)) _res; \
5404 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5406 volatile OrigFn _orig = (orig); \
5407 volatile unsigned long _argvec[7]; \
5408 volatile unsigned long _res; \
5409 _argvec[0] = (unsigned long)_orig.nraddr; \
5410 _argvec[1] = (unsigned long)(arg1); \
5411 _argvec[2] = (unsigned long)(arg2); \
5412 _argvec[3] = (unsigned long)(arg3); \
5413 _argvec[4] = (unsigned long)(arg4); \
5414 _argvec[5] = (unsigned long)(arg5); \
5415 _argvec[6] = (unsigned long)(arg6); \
5417 "subu $29, $29, 8 \n\t" \
5418 "sw $28, 0($29) \n\t" \
5419 "sw $31, 4($29) \n\t" \
5420 "lw $4, 20(%1) \n\t" \
5421 "subu $29, $29, 32\n\t" \
5422 "sw $4, 16($29) \n\t" \
5423 "lw $4, 24(%1) \n\t" \
5425 "sw $4, 20($29) \n\t" \
5426 "lw $4, 4(%1) \n\t" \
5427 "lw $5, 8(%1) \n\t" \
5428 "lw $6, 12(%1) \n\t" \
5429 "lw $7, 16(%1) \n\t" \
5430 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5431 VALGRIND_CALL_NOREDIR_T9 \
5432 "addu $29, $29, 32 \n\t" \
5433 "lw $28, 0($29) \n\t" \
5434 "lw $31, 4($29) \n\t" \
5435 "addu $29, $29, 8 \n\t" \
5437 : /*out*/ "=r" (_res) \
5438 : /*in*/ "0" (&_argvec[0]) \
5439 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5441 lval = (__typeof__(lval)) _res; \
5444 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5447 volatile OrigFn _orig = (orig); \
5448 volatile unsigned long _argvec[8]; \
5449 volatile unsigned long _res; \
5450 _argvec[0] = (unsigned long)_orig.nraddr; \
5451 _argvec[1] = (unsigned long)(arg1); \
5452 _argvec[2] = (unsigned long)(arg2); \
5453 _argvec[3] = (unsigned long)(arg3); \
5454 _argvec[4] = (unsigned long)(arg4); \
5455 _argvec[5] = (unsigned long)(arg5); \
5456 _argvec[6] = (unsigned long)(arg6); \
5457 _argvec[7] = (unsigned long)(arg7); \
5459 "subu $29, $29, 8 \n\t" \
5460 "sw $28, 0($29) \n\t" \
5461 "sw $31, 4($29) \n\t" \
5462 "lw $4, 20(%1) \n\t" \
5463 "subu $29, $29, 32\n\t" \
5464 "sw $4, 16($29) \n\t" \
5465 "lw $4, 24(%1) \n\t" \
5466 "sw $4, 20($29) \n\t" \
5467 "lw $4, 28(%1) \n\t" \
5468 "sw $4, 24($29) \n\t" \
5469 "lw $4, 4(%1) \n\t" \
5470 "lw $5, 8(%1) \n\t" \
5471 "lw $6, 12(%1) \n\t" \
5472 "lw $7, 16(%1) \n\t" \
5473 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5474 VALGRIND_CALL_NOREDIR_T9 \
5475 "addu $29, $29, 32 \n\t" \
5476 "lw $28, 0($29) \n\t" \
5477 "lw $31, 4($29) \n\t" \
5478 "addu $29, $29, 8 \n\t" \
5480 : /*out*/ "=r" (_res) \
5481 : /*in*/ "0" (&_argvec[0]) \
5482 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5484 lval = (__typeof__(lval)) _res; \
5487 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5490 volatile OrigFn _orig = (orig); \
5491 volatile unsigned long _argvec[9]; \
5492 volatile unsigned long _res; \
5493 _argvec[0] = (unsigned long)_orig.nraddr; \
5494 _argvec[1] = (unsigned long)(arg1); \
5495 _argvec[2] = (unsigned long)(arg2); \
5496 _argvec[3] = (unsigned long)(arg3); \
5497 _argvec[4] = (unsigned long)(arg4); \
5498 _argvec[5] = (unsigned long)(arg5); \
5499 _argvec[6] = (unsigned long)(arg6); \
5500 _argvec[7] = (unsigned long)(arg7); \
5501 _argvec[8] = (unsigned long)(arg8); \
5503 "subu $29, $29, 8 \n\t" \
5504 "sw $28, 0($29) \n\t" \
5505 "sw $31, 4($29) \n\t" \
5506 "lw $4, 20(%1) \n\t" \
5507 "subu $29, $29, 40\n\t" \
5508 "sw $4, 16($29) \n\t" \
5509 "lw $4, 24(%1) \n\t" \
5510 "sw $4, 20($29) \n\t" \
5511 "lw $4, 28(%1) \n\t" \
5512 "sw $4, 24($29) \n\t" \
5513 "lw $4, 32(%1) \n\t" \
5514 "sw $4, 28($29) \n\t" \
5515 "lw $4, 4(%1) \n\t" \
5516 "lw $5, 8(%1) \n\t" \
5517 "lw $6, 12(%1) \n\t" \
5518 "lw $7, 16(%1) \n\t" \
5519 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5520 VALGRIND_CALL_NOREDIR_T9 \
5521 "addu $29, $29, 40 \n\t" \
5522 "lw $28, 0($29) \n\t" \
5523 "lw $31, 4($29) \n\t" \
5524 "addu $29, $29, 8 \n\t" \
5526 : /*out*/ "=r" (_res) \
5527 : /*in*/ "0" (&_argvec[0]) \
5528 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5530 lval = (__typeof__(lval)) _res; \
5533 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5536 volatile OrigFn _orig = (orig); \
5537 volatile unsigned long _argvec[10]; \
5538 volatile unsigned long _res; \
5539 _argvec[0] = (unsigned long)_orig.nraddr; \
5540 _argvec[1] = (unsigned long)(arg1); \
5541 _argvec[2] = (unsigned long)(arg2); \
5542 _argvec[3] = (unsigned long)(arg3); \
5543 _argvec[4] = (unsigned long)(arg4); \
5544 _argvec[5] = (unsigned long)(arg5); \
5545 _argvec[6] = (unsigned long)(arg6); \
5546 _argvec[7] = (unsigned long)(arg7); \
5547 _argvec[8] = (unsigned long)(arg8); \
5548 _argvec[9] = (unsigned long)(arg9); \
5550 "subu $29, $29, 8 \n\t" \
5551 "sw $28, 0($29) \n\t" \
5552 "sw $31, 4($29) \n\t" \
5553 "lw $4, 20(%1) \n\t" \
5554 "subu $29, $29, 40\n\t" \
5555 "sw $4, 16($29) \n\t" \
5556 "lw $4, 24(%1) \n\t" \
5557 "sw $4, 20($29) \n\t" \
5558 "lw $4, 28(%1) \n\t" \
5559 "sw $4, 24($29) \n\t" \
5560 "lw $4, 32(%1) \n\t" \
5561 "sw $4, 28($29) \n\t" \
5562 "lw $4, 36(%1) \n\t" \
5563 "sw $4, 32($29) \n\t" \
5564 "lw $4, 4(%1) \n\t" \
5565 "lw $5, 8(%1) \n\t" \
5566 "lw $6, 12(%1) \n\t" \
5567 "lw $7, 16(%1) \n\t" \
5568 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5569 VALGRIND_CALL_NOREDIR_T9 \
5570 "addu $29, $29, 40 \n\t" \
5571 "lw $28, 0($29) \n\t" \
5572 "lw $31, 4($29) \n\t" \
5573 "addu $29, $29, 8 \n\t" \
5575 : /*out*/ "=r" (_res) \
5576 : /*in*/ "0" (&_argvec[0]) \
5577 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5579 lval = (__typeof__(lval)) _res; \
5582 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5583 arg7,arg8,arg9,arg10) \
5585 volatile OrigFn _orig = (orig); \
5586 volatile unsigned long _argvec[11]; \
5587 volatile unsigned long _res; \
5588 _argvec[0] = (unsigned long)_orig.nraddr; \
5589 _argvec[1] = (unsigned long)(arg1); \
5590 _argvec[2] = (unsigned long)(arg2); \
5591 _argvec[3] = (unsigned long)(arg3); \
5592 _argvec[4] = (unsigned long)(arg4); \
5593 _argvec[5] = (unsigned long)(arg5); \
5594 _argvec[6] = (unsigned long)(arg6); \
5595 _argvec[7] = (unsigned long)(arg7); \
5596 _argvec[8] = (unsigned long)(arg8); \
5597 _argvec[9] = (unsigned long)(arg9); \
5598 _argvec[10] = (unsigned long)(arg10); \
5600 "subu $29, $29, 8 \n\t" \
5601 "sw $28, 0($29) \n\t" \
5602 "sw $31, 4($29) \n\t" \
5603 "lw $4, 20(%1) \n\t" \
5604 "subu $29, $29, 48\n\t" \
5605 "sw $4, 16($29) \n\t" \
5606 "lw $4, 24(%1) \n\t" \
5607 "sw $4, 20($29) \n\t" \
5608 "lw $4, 28(%1) \n\t" \
5609 "sw $4, 24($29) \n\t" \
5610 "lw $4, 32(%1) \n\t" \
5611 "sw $4, 28($29) \n\t" \
5612 "lw $4, 36(%1) \n\t" \
5613 "sw $4, 32($29) \n\t" \
5614 "lw $4, 40(%1) \n\t" \
5615 "sw $4, 36($29) \n\t" \
5616 "lw $4, 4(%1) \n\t" \
5617 "lw $5, 8(%1) \n\t" \
5618 "lw $6, 12(%1) \n\t" \
5619 "lw $7, 16(%1) \n\t" \
5620 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5621 VALGRIND_CALL_NOREDIR_T9 \
5622 "addu $29, $29, 48 \n\t" \
5623 "lw $28, 0($29) \n\t" \
5624 "lw $31, 4($29) \n\t" \
5625 "addu $29, $29, 8 \n\t" \
5627 : /*out*/ "=r" (_res) \
5628 : /*in*/ "0" (&_argvec[0]) \
5629 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5631 lval = (__typeof__(lval)) _res; \
5634 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5635 arg6,arg7,arg8,arg9,arg10, \
5638 volatile OrigFn _orig = (orig); \
5639 volatile unsigned long _argvec[12]; \
5640 volatile unsigned long _res; \
5641 _argvec[0] = (unsigned long)_orig.nraddr; \
5642 _argvec[1] = (unsigned long)(arg1); \
5643 _argvec[2] = (unsigned long)(arg2); \
5644 _argvec[3] = (unsigned long)(arg3); \
5645 _argvec[4] = (unsigned long)(arg4); \
5646 _argvec[5] = (unsigned long)(arg5); \
5647 _argvec[6] = (unsigned long)(arg6); \
5648 _argvec[7] = (unsigned long)(arg7); \
5649 _argvec[8] = (unsigned long)(arg8); \
5650 _argvec[9] = (unsigned long)(arg9); \
5651 _argvec[10] = (unsigned long)(arg10); \
5652 _argvec[11] = (unsigned long)(arg11); \
5654 "subu $29, $29, 8 \n\t" \
5655 "sw $28, 0($29) \n\t" \
5656 "sw $31, 4($29) \n\t" \
5657 "lw $4, 20(%1) \n\t" \
5658 "subu $29, $29, 48\n\t" \
5659 "sw $4, 16($29) \n\t" \
5660 "lw $4, 24(%1) \n\t" \
5661 "sw $4, 20($29) \n\t" \
5662 "lw $4, 28(%1) \n\t" \
5663 "sw $4, 24($29) \n\t" \
5664 "lw $4, 32(%1) \n\t" \
5665 "sw $4, 28($29) \n\t" \
5666 "lw $4, 36(%1) \n\t" \
5667 "sw $4, 32($29) \n\t" \
5668 "lw $4, 40(%1) \n\t" \
5669 "sw $4, 36($29) \n\t" \
5670 "lw $4, 44(%1) \n\t" \
5671 "sw $4, 40($29) \n\t" \
5672 "lw $4, 4(%1) \n\t" \
5673 "lw $5, 8(%1) \n\t" \
5674 "lw $6, 12(%1) \n\t" \
5675 "lw $7, 16(%1) \n\t" \
5676 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5677 VALGRIND_CALL_NOREDIR_T9 \
5678 "addu $29, $29, 48 \n\t" \
5679 "lw $28, 0($29) \n\t" \
5680 "lw $31, 4($29) \n\t" \
5681 "addu $29, $29, 8 \n\t" \
5683 : /*out*/ "=r" (_res) \
5684 : /*in*/ "0" (&_argvec[0]) \
5685 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5687 lval = (__typeof__(lval)) _res; \
5690 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5691 arg6,arg7,arg8,arg9,arg10, \
5694 volatile OrigFn _orig = (orig); \
5695 volatile unsigned long _argvec[13]; \
5696 volatile unsigned long _res; \
5697 _argvec[0] = (unsigned long)_orig.nraddr; \
5698 _argvec[1] = (unsigned long)(arg1); \
5699 _argvec[2] = (unsigned long)(arg2); \
5700 _argvec[3] = (unsigned long)(arg3); \
5701 _argvec[4] = (unsigned long)(arg4); \
5702 _argvec[5] = (unsigned long)(arg5); \
5703 _argvec[6] = (unsigned long)(arg6); \
5704 _argvec[7] = (unsigned long)(arg7); \
5705 _argvec[8] = (unsigned long)(arg8); \
5706 _argvec[9] = (unsigned long)(arg9); \
5707 _argvec[10] = (unsigned long)(arg10); \
5708 _argvec[11] = (unsigned long)(arg11); \
5709 _argvec[12] = (unsigned long)(arg12); \
5711 "subu $29, $29, 8 \n\t" \
5712 "sw $28, 0($29) \n\t" \
5713 "sw $31, 4($29) \n\t" \
5714 "lw $4, 20(%1) \n\t" \
5715 "subu $29, $29, 56\n\t" \
5716 "sw $4, 16($29) \n\t" \
5717 "lw $4, 24(%1) \n\t" \
5718 "sw $4, 20($29) \n\t" \
5719 "lw $4, 28(%1) \n\t" \
5720 "sw $4, 24($29) \n\t" \
5721 "lw $4, 32(%1) \n\t" \
5722 "sw $4, 28($29) \n\t" \
5723 "lw $4, 36(%1) \n\t" \
5724 "sw $4, 32($29) \n\t" \
5725 "lw $4, 40(%1) \n\t" \
5726 "sw $4, 36($29) \n\t" \
5727 "lw $4, 44(%1) \n\t" \
5728 "sw $4, 40($29) \n\t" \
5729 "lw $4, 48(%1) \n\t" \
5730 "sw $4, 44($29) \n\t" \
5731 "lw $4, 4(%1) \n\t" \
5732 "lw $5, 8(%1) \n\t" \
5733 "lw $6, 12(%1) \n\t" \
5734 "lw $7, 16(%1) \n\t" \
5735 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5736 VALGRIND_CALL_NOREDIR_T9 \
5737 "addu $29, $29, 56 \n\t" \
5738 "lw $28, 0($29) \n\t" \
5739 "lw $31, 4($29) \n\t" \
5740 "addu $29, $29, 8 \n\t" \
5742 : /*out*/ "=r" (_res) \
5743 : /*in*/ "r" (&_argvec[0]) \
5744 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5746 lval = (__typeof__(lval)) _res; \
5749 #endif /* PLAT_mips32_linux */
5751 /* ------------------------- mips64-linux ------------------------- */
5753 #if defined(PLAT_mips64_linux)
5755 /* These regs are trashed by the hidden call. */
5756 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5757 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5760 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5763 #define CALL_FN_W_v(lval, orig) \
5765 volatile OrigFn _orig = (orig); \
5766 volatile unsigned long _argvec[1]; \
5767 volatile unsigned long _res; \
5768 _argvec[0] = (unsigned long)_orig.nraddr; \
5770 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5771 VALGRIND_CALL_NOREDIR_T9 \
5773 : /*out*/ "=r" (_res) \
5774 : /*in*/ "0" (&_argvec[0]) \
5775 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5777 lval = (__typeof__(lval)) _res; \
5780 #define CALL_FN_W_W(lval, orig, arg1) \
5782 volatile OrigFn _orig = (orig); \
5783 volatile unsigned long _argvec[2]; \
5784 volatile unsigned long _res; \
5785 _argvec[0] = (unsigned long)_orig.nraddr; \
5786 _argvec[1] = (unsigned long)(arg1); \
5788 "ld $4, 8(%1)\n\t" /* arg1*/ \
5789 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5790 VALGRIND_CALL_NOREDIR_T9 \
5792 : /*out*/ "=r" (_res) \
5793 : /*in*/ "r" (&_argvec[0]) \
5794 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5796 lval = (__typeof__(lval)) _res; \
5799 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5801 volatile OrigFn _orig = (orig); \
5802 volatile unsigned long _argvec[3]; \
5803 volatile unsigned long _res; \
5804 _argvec[0] = (unsigned long)_orig.nraddr; \
5805 _argvec[1] = (unsigned long)(arg1); \
5806 _argvec[2] = (unsigned long)(arg2); \
5808 "ld $4, 8(%1)\n\t" \
5809 "ld $5, 16(%1)\n\t" \
5810 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5811 VALGRIND_CALL_NOREDIR_T9 \
5813 : /*out*/ "=r" (_res) \
5814 : /*in*/ "r" (&_argvec[0]) \
5815 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5817 lval = (__typeof__(lval)) _res; \
5820 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5822 volatile OrigFn _orig = (orig); \
5823 volatile unsigned long _argvec[4]; \
5824 volatile unsigned long _res; \
5825 _argvec[0] = (unsigned long)_orig.nraddr; \
5826 _argvec[1] = (unsigned long)(arg1); \
5827 _argvec[2] = (unsigned long)(arg2); \
5828 _argvec[3] = (unsigned long)(arg3); \
5830 "ld $4, 8(%1)\n\t" \
5831 "ld $5, 16(%1)\n\t" \
5832 "ld $6, 24(%1)\n\t" \
5833 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5834 VALGRIND_CALL_NOREDIR_T9 \
5836 : /*out*/ "=r" (_res) \
5837 : /*in*/ "r" (&_argvec[0]) \
5838 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5840 lval = (__typeof__(lval)) _res; \
5843 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5845 volatile OrigFn _orig = (orig); \
5846 volatile unsigned long _argvec[5]; \
5847 volatile unsigned long _res; \
5848 _argvec[0] = (unsigned long)_orig.nraddr; \
5849 _argvec[1] = (unsigned long)(arg1); \
5850 _argvec[2] = (unsigned long)(arg2); \
5851 _argvec[3] = (unsigned long)(arg3); \
5852 _argvec[4] = (unsigned long)(arg4); \
5854 "ld $4, 8(%1)\n\t" \
5855 "ld $5, 16(%1)\n\t" \
5856 "ld $6, 24(%1)\n\t" \
5857 "ld $7, 32(%1)\n\t" \
5858 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5859 VALGRIND_CALL_NOREDIR_T9 \
5861 : /*out*/ "=r" (_res) \
5862 : /*in*/ "r" (&_argvec[0]) \
5863 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5865 lval = (__typeof__(lval)) _res; \
5868 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5870 volatile OrigFn _orig = (orig); \
5871 volatile unsigned long _argvec[6]; \
5872 volatile unsigned long _res; \
5873 _argvec[0] = (unsigned long)_orig.nraddr; \
5874 _argvec[1] = (unsigned long)(arg1); \
5875 _argvec[2] = (unsigned long)(arg2); \
5876 _argvec[3] = (unsigned long)(arg3); \
5877 _argvec[4] = (unsigned long)(arg4); \
5878 _argvec[5] = (unsigned long)(arg5); \
5880 "ld $4, 8(%1)\n\t" \
5881 "ld $5, 16(%1)\n\t" \
5882 "ld $6, 24(%1)\n\t" \
5883 "ld $7, 32(%1)\n\t" \
5884 "ld $8, 40(%1)\n\t" \
5885 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5886 VALGRIND_CALL_NOREDIR_T9 \
5888 : /*out*/ "=r" (_res) \
5889 : /*in*/ "r" (&_argvec[0]) \
5890 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5892 lval = (__typeof__(lval)) _res; \
5895 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5897 volatile OrigFn _orig = (orig); \
5898 volatile unsigned long _argvec[7]; \
5899 volatile unsigned long _res; \
5900 _argvec[0] = (unsigned long)_orig.nraddr; \
5901 _argvec[1] = (unsigned long)(arg1); \
5902 _argvec[2] = (unsigned long)(arg2); \
5903 _argvec[3] = (unsigned long)(arg3); \
5904 _argvec[4] = (unsigned long)(arg4); \
5905 _argvec[5] = (unsigned long)(arg5); \
5906 _argvec[6] = (unsigned long)(arg6); \
5908 "ld $4, 8(%1)\n\t" \
5909 "ld $5, 16(%1)\n\t" \
5910 "ld $6, 24(%1)\n\t" \
5911 "ld $7, 32(%1)\n\t" \
5912 "ld $8, 40(%1)\n\t" \
5913 "ld $9, 48(%1)\n\t" \
5914 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5915 VALGRIND_CALL_NOREDIR_T9 \
5917 : /*out*/ "=r" (_res) \
5918 : /*in*/ "r" (&_argvec[0]) \
5919 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5921 lval = (__typeof__(lval)) _res; \
5924 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5927 volatile OrigFn _orig = (orig); \
5928 volatile unsigned long _argvec[8]; \
5929 volatile unsigned long _res; \
5930 _argvec[0] = (unsigned long)_orig.nraddr; \
5931 _argvec[1] = (unsigned long)(arg1); \
5932 _argvec[2] = (unsigned long)(arg2); \
5933 _argvec[3] = (unsigned long)(arg3); \
5934 _argvec[4] = (unsigned long)(arg4); \
5935 _argvec[5] = (unsigned long)(arg5); \
5936 _argvec[6] = (unsigned long)(arg6); \
5937 _argvec[7] = (unsigned long)(arg7); \
5939 "ld $4, 8(%1)\n\t" \
5940 "ld $5, 16(%1)\n\t" \
5941 "ld $6, 24(%1)\n\t" \
5942 "ld $7, 32(%1)\n\t" \
5943 "ld $8, 40(%1)\n\t" \
5944 "ld $9, 48(%1)\n\t" \
5945 "ld $10, 56(%1)\n\t" \
5946 "ld $25, 0(%1) \n\t" /* target->t9 */ \
5947 VALGRIND_CALL_NOREDIR_T9 \
5949 : /*out*/ "=r" (_res) \
5950 : /*in*/ "r" (&_argvec[0]) \
5951 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5953 lval = (__typeof__(lval)) _res; \
5956 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5959 volatile OrigFn _orig = (orig); \
5960 volatile unsigned long _argvec[9]; \
5961 volatile unsigned long _res; \
5962 _argvec[0] = (unsigned long)_orig.nraddr; \
5963 _argvec[1] = (unsigned long)(arg1); \
5964 _argvec[2] = (unsigned long)(arg2); \
5965 _argvec[3] = (unsigned long)(arg3); \
5966 _argvec[4] = (unsigned long)(arg4); \
5967 _argvec[5] = (unsigned long)(arg5); \
5968 _argvec[6] = (unsigned long)(arg6); \
5969 _argvec[7] = (unsigned long)(arg7); \
5970 _argvec[8] = (unsigned long)(arg8); \
5972 "ld $4, 8(%1)\n\t" \
5973 "ld $5, 16(%1)\n\t" \
5974 "ld $6, 24(%1)\n\t" \
5975 "ld $7, 32(%1)\n\t" \
5976 "ld $8, 40(%1)\n\t" \
5977 "ld $9, 48(%1)\n\t" \
5978 "ld $10, 56(%1)\n\t" \
5979 "ld $11, 64(%1)\n\t" \
5980 "ld $25, 0(%1) \n\t" /* target->t9 */ \
5981 VALGRIND_CALL_NOREDIR_T9 \
5983 : /*out*/ "=r" (_res) \
5984 : /*in*/ "r" (&_argvec[0]) \
5985 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5987 lval = (__typeof__(lval)) _res; \
5990 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5993 volatile OrigFn _orig = (orig); \
5994 volatile unsigned long _argvec[10]; \
5995 volatile unsigned long _res; \
5996 _argvec[0] = (unsigned long)_orig.nraddr; \
5997 _argvec[1] = (unsigned long)(arg1); \
5998 _argvec[2] = (unsigned long)(arg2); \
5999 _argvec[3] = (unsigned long)(arg3); \
6000 _argvec[4] = (unsigned long)(arg4); \
6001 _argvec[5] = (unsigned long)(arg5); \
6002 _argvec[6] = (unsigned long)(arg6); \
6003 _argvec[7] = (unsigned long)(arg7); \
6004 _argvec[8] = (unsigned long)(arg8); \
6005 _argvec[9] = (unsigned long)(arg9); \
6007 "dsubu $29, $29, 8\n\t" \
6008 "ld $4, 72(%1)\n\t" \
6009 "sd $4, 0($29)\n\t" \
6010 "ld $4, 8(%1)\n\t" \
6011 "ld $5, 16(%1)\n\t" \
6012 "ld $6, 24(%1)\n\t" \
6013 "ld $7, 32(%1)\n\t" \
6014 "ld $8, 40(%1)\n\t" \
6015 "ld $9, 48(%1)\n\t" \
6016 "ld $10, 56(%1)\n\t" \
6017 "ld $11, 64(%1)\n\t" \
6018 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6019 VALGRIND_CALL_NOREDIR_T9 \
6020 "daddu $29, $29, 8\n\t" \
6022 : /*out*/ "=r" (_res) \
6023 : /*in*/ "r" (&_argvec[0]) \
6024 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6026 lval = (__typeof__(lval)) _res; \
6029 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6030 arg7,arg8,arg9,arg10) \
6032 volatile OrigFn _orig = (orig); \
6033 volatile unsigned long _argvec[11]; \
6034 volatile unsigned long _res; \
6035 _argvec[0] = (unsigned long)_orig.nraddr; \
6036 _argvec[1] = (unsigned long)(arg1); \
6037 _argvec[2] = (unsigned long)(arg2); \
6038 _argvec[3] = (unsigned long)(arg3); \
6039 _argvec[4] = (unsigned long)(arg4); \
6040 _argvec[5] = (unsigned long)(arg5); \
6041 _argvec[6] = (unsigned long)(arg6); \
6042 _argvec[7] = (unsigned long)(arg7); \
6043 _argvec[8] = (unsigned long)(arg8); \
6044 _argvec[9] = (unsigned long)(arg9); \
6045 _argvec[10] = (unsigned long)(arg10); \
6047 "dsubu $29, $29, 16\n\t" \
6048 "ld $4, 72(%1)\n\t" \
6049 "sd $4, 0($29)\n\t" \
6050 "ld $4, 80(%1)\n\t" \
6051 "sd $4, 8($29)\n\t" \
6052 "ld $4, 8(%1)\n\t" \
6053 "ld $5, 16(%1)\n\t" \
6054 "ld $6, 24(%1)\n\t" \
6055 "ld $7, 32(%1)\n\t" \
6056 "ld $8, 40(%1)\n\t" \
6057 "ld $9, 48(%1)\n\t" \
6058 "ld $10, 56(%1)\n\t" \
6059 "ld $11, 64(%1)\n\t" \
6060 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6061 VALGRIND_CALL_NOREDIR_T9 \
6062 "daddu $29, $29, 16\n\t" \
6064 : /*out*/ "=r" (_res) \
6065 : /*in*/ "r" (&_argvec[0]) \
6066 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6068 lval = (__typeof__(lval)) _res; \
6071 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6072 arg6,arg7,arg8,arg9,arg10, \
6075 volatile OrigFn _orig = (orig); \
6076 volatile unsigned long _argvec[12]; \
6077 volatile unsigned long _res; \
6078 _argvec[0] = (unsigned long)_orig.nraddr; \
6079 _argvec[1] = (unsigned long)(arg1); \
6080 _argvec[2] = (unsigned long)(arg2); \
6081 _argvec[3] = (unsigned long)(arg3); \
6082 _argvec[4] = (unsigned long)(arg4); \
6083 _argvec[5] = (unsigned long)(arg5); \
6084 _argvec[6] = (unsigned long)(arg6); \
6085 _argvec[7] = (unsigned long)(arg7); \
6086 _argvec[8] = (unsigned long)(arg8); \
6087 _argvec[9] = (unsigned long)(arg9); \
6088 _argvec[10] = (unsigned long)(arg10); \
6089 _argvec[11] = (unsigned long)(arg11); \
6091 "dsubu $29, $29, 24\n\t" \
6092 "ld $4, 72(%1)\n\t" \
6093 "sd $4, 0($29)\n\t" \
6094 "ld $4, 80(%1)\n\t" \
6095 "sd $4, 8($29)\n\t" \
6096 "ld $4, 88(%1)\n\t" \
6097 "sd $4, 16($29)\n\t" \
6098 "ld $4, 8(%1)\n\t" \
6099 "ld $5, 16(%1)\n\t" \
6100 "ld $6, 24(%1)\n\t" \
6101 "ld $7, 32(%1)\n\t" \
6102 "ld $8, 40(%1)\n\t" \
6103 "ld $9, 48(%1)\n\t" \
6104 "ld $10, 56(%1)\n\t" \
6105 "ld $11, 64(%1)\n\t" \
6106 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6107 VALGRIND_CALL_NOREDIR_T9 \
6108 "daddu $29, $29, 24\n\t" \
6110 : /*out*/ "=r" (_res) \
6111 : /*in*/ "r" (&_argvec[0]) \
6112 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6114 lval = (__typeof__(lval)) _res; \
6117 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6118 arg6,arg7,arg8,arg9,arg10, \
6121 volatile OrigFn _orig = (orig); \
6122 volatile unsigned long _argvec[13]; \
6123 volatile unsigned long _res; \
6124 _argvec[0] = (unsigned long)_orig.nraddr; \
6125 _argvec[1] = (unsigned long)(arg1); \
6126 _argvec[2] = (unsigned long)(arg2); \
6127 _argvec[3] = (unsigned long)(arg3); \
6128 _argvec[4] = (unsigned long)(arg4); \
6129 _argvec[5] = (unsigned long)(arg5); \
6130 _argvec[6] = (unsigned long)(arg6); \
6131 _argvec[7] = (unsigned long)(arg7); \
6132 _argvec[8] = (unsigned long)(arg8); \
6133 _argvec[9] = (unsigned long)(arg9); \
6134 _argvec[10] = (unsigned long)(arg10); \
6135 _argvec[11] = (unsigned long)(arg11); \
6136 _argvec[12] = (unsigned long)(arg12); \
6138 "dsubu $29, $29, 32\n\t" \
6139 "ld $4, 72(%1)\n\t" \
6140 "sd $4, 0($29)\n\t" \
6141 "ld $4, 80(%1)\n\t" \
6142 "sd $4, 8($29)\n\t" \
6143 "ld $4, 88(%1)\n\t" \
6144 "sd $4, 16($29)\n\t" \
6145 "ld $4, 96(%1)\n\t" \
6146 "sd $4, 24($29)\n\t" \
6147 "ld $4, 8(%1)\n\t" \
6148 "ld $5, 16(%1)\n\t" \
6149 "ld $6, 24(%1)\n\t" \
6150 "ld $7, 32(%1)\n\t" \
6151 "ld $8, 40(%1)\n\t" \
6152 "ld $9, 48(%1)\n\t" \
6153 "ld $10, 56(%1)\n\t" \
6154 "ld $11, 64(%1)\n\t" \
6155 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6156 VALGRIND_CALL_NOREDIR_T9 \
6157 "daddu $29, $29, 32\n\t" \
6159 : /*out*/ "=r" (_res) \
6160 : /*in*/ "r" (&_argvec[0]) \
6161 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6163 lval = (__typeof__(lval)) _res; \
6166 #endif /* PLAT_mips64_linux */
6168 /* ------------------------ tilegx-linux ------------------------- */
6170 #if defined(PLAT_tilegx_linux)
6172 /* These regs are trashed by the hidden call. */
6173 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3", "r4", "r5", \
6174 "r6", "r7", "r8", "r9", "r10", "r11", "r12", "r13", "r14", \
6175 "r15", "r16", "r17", "r18", "r19", "r20", "r21", "r22", \
6176 "r23", "r24", "r25", "r26", "r27", "r28", "r29", "lr"
6178 /* These CALL_FN_ macros assume that on tilegx-linux, sizeof(unsigned
6181 #define CALL_FN_W_v(lval, orig) \
6183 volatile OrigFn _orig = (orig); \
6184 volatile unsigned long _argvec[1]; \
6185 volatile unsigned long _res; \
6186 _argvec[0] = (unsigned long)_orig.nraddr; \
6188 "addi sp, sp, -8 \n\t" \
6189 "st_add sp, lr, -8 \n\t" \
6190 "ld r12, %1 \n\t" /* target->r11 */ \
6191 VALGRIND_CALL_NOREDIR_R12 \
6192 "addi sp, sp, 8\n\t" \
6193 "ld_add lr, sp, 8 \n\t" \
6195 : /*out*/ "=r" (_res) \
6196 : /*in*/ "r" (&_argvec[0]) \
6197 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6199 lval = (__typeof__(lval)) _res; \
6202 #define CALL_FN_W_W(lval, orig, arg1) \
6204 volatile OrigFn _orig = (orig); \
6205 volatile unsigned long _argvec[2]; \
6206 volatile unsigned long _res; \
6207 _argvec[0] = (unsigned long)_orig.nraddr; \
6208 _argvec[1] = (unsigned long)(arg1); \
6210 "addi sp, sp, -8 \n\t" \
6211 "st_add sp, lr, -8 \n\t" \
6212 "move r29, %1 \n\t" \
6213 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6214 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6215 VALGRIND_CALL_NOREDIR_R12 \
6216 "addi sp, sp, 8\n\t" \
6217 "ld_add lr, sp, 8 \n\t" \
6219 : /*out*/ "=r" (_res) \
6220 : /*in*/ "r" (&_argvec[0]) \
6221 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6222 lval = (__typeof__(lval)) _res; \
6225 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
6227 volatile OrigFn _orig = (orig); \
6228 volatile unsigned long _argvec[3]; \
6229 volatile unsigned long _res; \
6230 _argvec[0] = (unsigned long)_orig.nraddr; \
6231 _argvec[1] = (unsigned long)(arg1); \
6232 _argvec[2] = (unsigned long)(arg2); \
6234 "addi sp, sp, -8 \n\t" \
6235 "st_add sp, lr, -8 \n\t" \
6236 "move r29, %1 \n\t" \
6237 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6238 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6239 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6240 VALGRIND_CALL_NOREDIR_R12 \
6241 "addi sp, sp, 8\n\t" \
6242 "ld_add lr, sp, 8 \n\t" \
6244 : /*out*/ "=r" (_res) \
6245 : /*in*/ "r" (&_argvec[0]) \
6246 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6247 lval = (__typeof__(lval)) _res; \
6250 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
6252 volatile OrigFn _orig = (orig); \
6253 volatile unsigned long _argvec[4]; \
6254 volatile unsigned long _res; \
6255 _argvec[0] = (unsigned long)_orig.nraddr; \
6256 _argvec[1] = (unsigned long)(arg1); \
6257 _argvec[2] = (unsigned long)(arg2); \
6258 _argvec[3] = (unsigned long)(arg3); \
6260 "addi sp, sp, -8 \n\t" \
6261 "st_add sp, lr, -8 \n\t" \
6262 "move r29, %1 \n\t" \
6263 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6264 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6265 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6266 "ld_add r2, r29, 8 \n\t" /*arg3 -> r2 */ \
6267 VALGRIND_CALL_NOREDIR_R12 \
6268 "addi sp, sp, 8 \n\t" \
6269 "ld_add lr, sp, 8 \n\t" \
6271 : /*out*/ "=r" (_res) \
6272 : /*in*/ "r" (&_argvec[0]) \
6273 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6274 lval = (__typeof__(lval)) _res; \
6277 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
6279 volatile OrigFn _orig = (orig); \
6280 volatile unsigned long _argvec[5]; \
6281 volatile unsigned long _res; \
6282 _argvec[0] = (unsigned long)_orig.nraddr; \
6283 _argvec[1] = (unsigned long)(arg1); \
6284 _argvec[2] = (unsigned long)(arg2); \
6285 _argvec[3] = (unsigned long)(arg3); \
6286 _argvec[4] = (unsigned long)(arg4); \
6288 "addi sp, sp, -8 \n\t" \
6289 "st_add sp, lr, -8 \n\t" \
6290 "move r29, %1 \n\t" \
6291 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6292 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6293 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6294 "ld_add r2, r29, 8 \n\t" /*arg3 -> r2 */ \
6295 "ld_add r3, r29, 8 \n\t" /*arg4 -> r3 */ \
6296 VALGRIND_CALL_NOREDIR_R12 \
6297 "addi sp, sp, 8\n\t" \
6298 "ld_add lr, sp, 8 \n\t" \
6300 : /*out*/ "=r" (_res) \
6301 : /*in*/ "r" (&_argvec[0]) \
6302 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6303 lval = (__typeof__(lval)) _res; \
6306 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
6308 volatile OrigFn _orig = (orig); \
6309 volatile unsigned long _argvec[6]; \
6310 volatile unsigned long _res; \
6311 _argvec[0] = (unsigned long)_orig.nraddr; \
6312 _argvec[1] = (unsigned long)(arg1); \
6313 _argvec[2] = (unsigned long)(arg2); \
6314 _argvec[3] = (unsigned long)(arg3); \
6315 _argvec[4] = (unsigned long)(arg4); \
6316 _argvec[5] = (unsigned long)(arg5); \
6318 "addi sp, sp, -8 \n\t" \
6319 "st_add sp, lr, -8 \n\t" \
6320 "move r29, %1 \n\t" \
6321 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6322 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6323 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6324 "ld_add r2, r29, 8 \n\t" /*arg3 -> r2 */ \
6325 "ld_add r3, r29, 8 \n\t" /*arg4 -> r3 */ \
6326 "ld_add r4, r29, 8 \n\t" /*arg5 -> r4 */ \
6327 VALGRIND_CALL_NOREDIR_R12 \
6328 "addi sp, sp, 8\n\t" \
6329 "ld_add lr, sp, 8 \n\t" \
6331 : /*out*/ "=r" (_res) \
6332 : /*in*/ "r" (&_argvec[0]) \
6333 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6334 lval = (__typeof__(lval)) _res; \
6336 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
6338 volatile OrigFn _orig = (orig); \
6339 volatile unsigned long _argvec[7]; \
6340 volatile unsigned long _res; \
6341 _argvec[0] = (unsigned long)_orig.nraddr; \
6342 _argvec[1] = (unsigned long)(arg1); \
6343 _argvec[2] = (unsigned long)(arg2); \
6344 _argvec[3] = (unsigned long)(arg3); \
6345 _argvec[4] = (unsigned long)(arg4); \
6346 _argvec[5] = (unsigned long)(arg5); \
6347 _argvec[6] = (unsigned long)(arg6); \
6349 "addi sp, sp, -8 \n\t" \
6350 "st_add sp, lr, -8 \n\t" \
6351 "move r29, %1 \n\t" \
6352 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6353 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6354 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6355 "ld_add r2, r29, 8 \n\t" /*arg3 -> r2 */ \
6356 "ld_add r3, r29, 8 \n\t" /*arg4 -> r3 */ \
6357 "ld_add r4, r29, 8 \n\t" /*arg5 -> r4 */ \
6358 "ld_add r5, r29, 8 \n\t" /*arg6 -> r5 */ \
6359 VALGRIND_CALL_NOREDIR_R12 \
6360 "addi sp, sp, 8\n\t" \
6361 "ld_add lr, sp, 8 \n\t" \
6363 : /*out*/ "=r" (_res) \
6364 : /*in*/ "r" (&_argvec[0]) \
6365 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6366 lval = (__typeof__(lval)) _res; \
6369 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6372 volatile OrigFn _orig = (orig); \
6373 volatile unsigned long _argvec[8]; \
6374 volatile unsigned long _res; \
6375 _argvec[0] = (unsigned long)_orig.nraddr; \
6376 _argvec[1] = (unsigned long)(arg1); \
6377 _argvec[2] = (unsigned long)(arg2); \
6378 _argvec[3] = (unsigned long)(arg3); \
6379 _argvec[4] = (unsigned long)(arg4); \
6380 _argvec[5] = (unsigned long)(arg5); \
6381 _argvec[6] = (unsigned long)(arg6); \
6382 _argvec[7] = (unsigned long)(arg7); \
6384 "addi sp, sp, -8 \n\t" \
6385 "st_add sp, lr, -8 \n\t" \
6386 "move r29, %1 \n\t" \
6387 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6388 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6389 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6390 "ld_add r2, r29, 8 \n\t" /*arg3 -> r2 */ \
6391 "ld_add r3, r29, 8 \n\t" /*arg4 -> r3 */ \
6392 "ld_add r4, r29, 8 \n\t" /*arg5 -> r4 */ \
6393 "ld_add r5, r29, 8 \n\t" /*arg6 -> r5 */ \
6394 "ld_add r6, r29, 8 \n\t" /*arg7 -> r6 */ \
6395 VALGRIND_CALL_NOREDIR_R12 \
6396 "addi sp, sp, 8\n\t" \
6397 "ld_add lr, sp, 8 \n\t" \
6399 : /*out*/ "=r" (_res) \
6400 : /*in*/ "r" (&_argvec[0]) \
6401 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6402 lval = (__typeof__(lval)) _res; \
6405 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6408 volatile OrigFn _orig = (orig); \
6409 volatile unsigned long _argvec[9]; \
6410 volatile unsigned long _res; \
6411 _argvec[0] = (unsigned long)_orig.nraddr; \
6412 _argvec[1] = (unsigned long)(arg1); \
6413 _argvec[2] = (unsigned long)(arg2); \
6414 _argvec[3] = (unsigned long)(arg3); \
6415 _argvec[4] = (unsigned long)(arg4); \
6416 _argvec[5] = (unsigned long)(arg5); \
6417 _argvec[6] = (unsigned long)(arg6); \
6418 _argvec[7] = (unsigned long)(arg7); \
6419 _argvec[8] = (unsigned long)(arg8); \
6421 "addi sp, sp, -8 \n\t" \
6422 "st_add sp, lr, -8 \n\t" \
6423 "move r29, %1 \n\t" \
6424 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6425 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6426 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6427 "ld_add r2, r29, 8 \n\t" /*arg3 -> r2 */ \
6428 "ld_add r3, r29, 8 \n\t" /*arg4 -> r3 */ \
6429 "ld_add r4, r29, 8 \n\t" /*arg5 -> r4 */ \
6430 "ld_add r5, r29, 8 \n\t" /*arg6 -> r5 */ \
6431 "ld_add r6, r29, 8 \n\t" /*arg7 -> r6 */ \
6432 "ld_add r7, r29, 8 \n\t" /*arg8 -> r7 */ \
6433 VALGRIND_CALL_NOREDIR_R12 \
6434 "addi sp, sp, 8\n\t" \
6435 "ld_add lr, sp, 8 \n\t" \
6437 : /*out*/ "=r" (_res) \
6438 : /*in*/ "r" (&_argvec[0]) \
6439 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6440 lval = (__typeof__(lval)) _res; \
6443 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6446 volatile OrigFn _orig = (orig); \
6447 volatile unsigned long _argvec[10]; \
6448 volatile unsigned long _res; \
6449 _argvec[0] = (unsigned long)_orig.nraddr; \
6450 _argvec[1] = (unsigned long)(arg1); \
6451 _argvec[2] = (unsigned long)(arg2); \
6452 _argvec[3] = (unsigned long)(arg3); \
6453 _argvec[4] = (unsigned long)(arg4); \
6454 _argvec[5] = (unsigned long)(arg5); \
6455 _argvec[6] = (unsigned long)(arg6); \
6456 _argvec[7] = (unsigned long)(arg7); \
6457 _argvec[8] = (unsigned long)(arg8); \
6458 _argvec[9] = (unsigned long)(arg9); \
6460 "addi sp, sp, -8 \n\t" \
6461 "st_add sp, lr, -8 \n\t" \
6462 "move r29, %1 \n\t" \
6463 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6464 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6465 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6466 "ld_add r2, r29, 8 \n\t" /*arg3 -> r2 */ \
6467 "ld_add r3, r29, 8 \n\t" /*arg4 -> r3 */ \
6468 "ld_add r4, r29, 8 \n\t" /*arg5 -> r4 */ \
6469 "ld_add r5, r29, 8 \n\t" /*arg6 -> r5 */ \
6470 "ld_add r6, r29, 8 \n\t" /*arg7 -> r6 */ \
6471 "ld_add r7, r29, 8 \n\t" /*arg8 -> r7 */ \
6472 "ld_add r8, r29, 8 \n\t" /*arg9 -> r8 */ \
6473 VALGRIND_CALL_NOREDIR_R12 \
6474 "addi sp, sp, 8\n\t" \
6475 "ld_add lr, sp, 8 \n\t" \
6477 : /*out*/ "=r" (_res) \
6478 : /*in*/ "r" (&_argvec[0]) \
6479 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6480 lval = (__typeof__(lval)) _res; \
6483 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6484 arg7,arg8,arg9,arg10) \
6486 volatile OrigFn _orig = (orig); \
6487 volatile unsigned long _argvec[11]; \
6488 volatile unsigned long _res; \
6489 _argvec[0] = (unsigned long)_orig.nraddr; \
6490 _argvec[1] = (unsigned long)(arg1); \
6491 _argvec[2] = (unsigned long)(arg2); \
6492 _argvec[3] = (unsigned long)(arg3); \
6493 _argvec[4] = (unsigned long)(arg4); \
6494 _argvec[5] = (unsigned long)(arg5); \
6495 _argvec[6] = (unsigned long)(arg6); \
6496 _argvec[7] = (unsigned long)(arg7); \
6497 _argvec[8] = (unsigned long)(arg8); \
6498 _argvec[9] = (unsigned long)(arg9); \
6499 _argvec[10] = (unsigned long)(arg10); \
6501 "addi sp, sp, -8 \n\t" \
6502 "st_add sp, lr, -8 \n\t" \
6503 "move r29, %1 \n\t" \
6504 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6505 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6506 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6507 "ld_add r2, r29, 8 \n\t" /*arg3 -> r2 */ \
6508 "ld_add r3, r29, 8 \n\t" /*arg4 -> r3 */ \
6509 "ld_add r4, r29, 8 \n\t" /*arg5 -> r4 */ \
6510 "ld_add r5, r29, 8 \n\t" /*arg6 -> r5 */ \
6511 "ld_add r6, r29, 8 \n\t" /*arg7 -> r6 */ \
6512 "ld_add r7, r29, 8 \n\t" /*arg8 -> r7 */ \
6513 "ld_add r8, r29, 8 \n\t" /*arg9 -> r8 */ \
6514 "ld_add r9, r29, 8 \n\t" /*arg10 -> r9 */ \
6515 VALGRIND_CALL_NOREDIR_R12 \
6516 "addi sp, sp, 8\n\t" \
6517 "ld_add lr, sp, 8 \n\t" \
6519 : /*out*/ "=r" (_res) \
6520 : /*in*/ "r" (&_argvec[0]) \
6521 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6522 lval = (__typeof__(lval)) _res; \
6525 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6526 arg6,arg7,arg8,arg9,arg10, \
6529 volatile OrigFn _orig = (orig); \
6530 volatile unsigned long _argvec[12]; \
6531 volatile unsigned long _res; \
6532 _argvec[0] = (unsigned long)_orig.nraddr; \
6533 _argvec[1] = (unsigned long)(arg1); \
6534 _argvec[2] = (unsigned long)(arg2); \
6535 _argvec[3] = (unsigned long)(arg3); \
6536 _argvec[4] = (unsigned long)(arg4); \
6537 _argvec[5] = (unsigned long)(arg5); \
6538 _argvec[6] = (unsigned long)(arg6); \
6539 _argvec[7] = (unsigned long)(arg7); \
6540 _argvec[8] = (unsigned long)(arg8); \
6541 _argvec[9] = (unsigned long)(arg9); \
6542 _argvec[10] = (unsigned long)(arg10); \
6543 _argvec[11] = (unsigned long)(arg11); \
6545 "addi sp, sp, -8 \n\t" \
6546 "st_add sp, lr, -8 \n\t" \
6547 "move r29, %1 \n\t" \
6548 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6549 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6550 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6551 "ld_add r2, r29, 8 \n\t" /*arg3 -> r2 */ \
6552 "ld_add r3, r29, 8 \n\t" /*arg4 -> r3 */ \
6553 "ld_add r4, r29, 8 \n\t" /*arg5 -> r4 */ \
6554 "ld_add r5, r29, 8 \n\t" /*arg6 -> r5 */ \
6555 "ld_add r6, r29, 8 \n\t" /*arg7 -> r6 */ \
6556 "ld_add r7, r29, 8 \n\t" /*arg8 -> r7 */ \
6557 "ld_add r8, r29, 8 \n\t" /*arg9 -> r8 */ \
6558 "ld_add r9, r29, 8 \n\t" /*arg10 -> r9 */ \
6559 "ld r10, r29 \n\t" \
6560 "st_add sp, r10, -16 \n\t" \
6561 VALGRIND_CALL_NOREDIR_R12 \
6562 "addi sp, sp, 24 \n\t" \
6563 "ld_add lr, sp, 8 \n\t" \
6565 : /*out*/ "=r" (_res) \
6566 : /*in*/ "r" (&_argvec[0]) \
6567 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6568 lval = (__typeof__(lval)) _res; \
6571 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6572 arg6,arg7,arg8,arg9,arg10, \
6575 volatile OrigFn _orig = (orig); \
6576 volatile unsigned long _argvec[13]; \
6577 volatile unsigned long _res; \
6578 _argvec[0] = (unsigned long)_orig.nraddr; \
6579 _argvec[1] = (unsigned long)(arg1); \
6580 _argvec[2] = (unsigned long)(arg2); \
6581 _argvec[3] = (unsigned long)(arg3); \
6582 _argvec[4] = (unsigned long)(arg4); \
6583 _argvec[5] = (unsigned long)(arg5); \
6584 _argvec[6] = (unsigned long)(arg6); \
6585 _argvec[7] = (unsigned long)(arg7); \
6586 _argvec[8] = (unsigned long)(arg8); \
6587 _argvec[9] = (unsigned long)(arg9); \
6588 _argvec[10] = (unsigned long)(arg10); \
6589 _argvec[11] = (unsigned long)(arg11); \
6590 _argvec[12] = (unsigned long)(arg12); \
6592 "addi sp, sp, -8 \n\t" \
6593 "st_add sp, lr, -8 \n\t" \
6594 "move r29, %1 \n\t" \
6595 "ld_add r12, r29, 8 \n\t" /* target->r11 */ \
6596 "ld_add r0, r29, 8 \n\t" /*arg1 -> r0 */ \
6597 "ld_add r1, r29, 8 \n\t" /*arg2 -> r1 */ \
6598 "ld_add r2, r29, 8 \n\t" /*arg3 -> r2 */ \
6599 "ld_add r3, r29, 8 \n\t" /*arg4 -> r3 */ \
6600 "ld_add r4, r29, 8 \n\t" /*arg5 -> r4 */ \
6601 "ld_add r5, r29, 8 \n\t" /*arg6 -> r5 */ \
6602 "ld_add r6, r29, 8 \n\t" /*arg7 -> r6 */ \
6603 "ld_add r7, r29, 8 \n\t" /*arg8 -> r7 */ \
6604 "ld_add r8, r29, 8 \n\t" /*arg9 -> r8 */ \
6605 "ld_add r9, r29, 8 \n\t" /*arg10 -> r9 */ \
6606 "addi r28, sp, -8 \n\t" \
6607 "addi sp, sp, -24 \n\t" \
6608 "ld_add r10, r29, 8 \n\t" \
6609 "ld r11, r29 \n\t" \
6610 "st_add r28, r10, 8 \n\t" \
6611 "st r28, r11 \n\t" \
6612 VALGRIND_CALL_NOREDIR_R12 \
6613 "addi sp, sp, 32 \n\t" \
6614 "ld_add lr, sp, 8 \n\t" \
6616 : /*out*/ "=r" (_res) \
6617 : /*in*/ "r" (&_argvec[0]) \
6618 : /*trash*/ "memory", __CALLER_SAVED_REGS); \
6619 lval = (__typeof__(lval)) _res; \
6621 #endif /* PLAT_tilegx_linux */
6623 /* ------------------------------------------------------------------ */
6624 /* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
6626 /* ------------------------------------------------------------------ */
6628 /* Some request codes. There are many more of these, but most are not
6629 exposed to end-user view. These are the public ones, all of the
6630 form 0x1000 + small_number.
6632 Core ones are in the range 0x00000000--0x0000ffff. The non-public
6633 ones start at 0x2000.
6636 /* These macros are used by tools -- they must be public, but don't
6637 embed them into other programs. */
6638 #define VG_USERREQ_TOOL_BASE(a,b) \
6639 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
6640 #define VG_IS_TOOL_USERREQ(a, b, v) \
6641 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
6643 /* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
6644 This enum comprises an ABI exported by Valgrind to programs
6645 which use client requests. DO NOT CHANGE THE ORDER OF THESE
6646 ENTRIES, NOR DELETE ANY -- add new ones at the end. */
6648 enum { VG_USERREQ__RUNNING_ON_VALGRIND
= 0x1001,
6649 VG_USERREQ__DISCARD_TRANSLATIONS
= 0x1002,
6651 /* These allow any function to be called from the simulated
6652 CPU but run on the real CPU. Nb: the first arg passed to
6653 the function is always the ThreadId of the running
6654 thread! So CLIENT_CALL0 actually requires a 1 arg
6656 VG_USERREQ__CLIENT_CALL0
= 0x1101,
6657 VG_USERREQ__CLIENT_CALL1
= 0x1102,
6658 VG_USERREQ__CLIENT_CALL2
= 0x1103,
6659 VG_USERREQ__CLIENT_CALL3
= 0x1104,
6661 /* Can be useful in regression testing suites -- eg. can
6662 send Valgrind's output to /dev/null and still count
6664 VG_USERREQ__COUNT_ERRORS
= 0x1201,
6666 /* Allows the client program and/or gdbserver to execute a monitor
6668 VG_USERREQ__GDB_MONITOR_COMMAND
= 0x1202,
6670 /* These are useful and can be interpreted by any tool that
6671 tracks malloc() et al, by using vg_replace_malloc.c. */
6672 VG_USERREQ__MALLOCLIKE_BLOCK
= 0x1301,
6673 VG_USERREQ__RESIZEINPLACE_BLOCK
= 0x130b,
6674 VG_USERREQ__FREELIKE_BLOCK
= 0x1302,
6675 /* Memory pool support. */
6676 VG_USERREQ__CREATE_MEMPOOL
= 0x1303,
6677 VG_USERREQ__DESTROY_MEMPOOL
= 0x1304,
6678 VG_USERREQ__MEMPOOL_ALLOC
= 0x1305,
6679 VG_USERREQ__MEMPOOL_FREE
= 0x1306,
6680 VG_USERREQ__MEMPOOL_TRIM
= 0x1307,
6681 VG_USERREQ__MOVE_MEMPOOL
= 0x1308,
6682 VG_USERREQ__MEMPOOL_CHANGE
= 0x1309,
6683 VG_USERREQ__MEMPOOL_EXISTS
= 0x130a,
6685 /* Allow printfs to valgrind log. */
6686 /* The first two pass the va_list argument by value, which
6687 assumes it is the same size as or smaller than a UWord,
6688 which generally isn't the case. Hence are deprecated.
6689 The second two pass the vargs by reference and so are
6690 immune to this problem. */
6691 /* both :: char* fmt, va_list vargs (DEPRECATED) */
6692 VG_USERREQ__PRINTF
= 0x1401,
6693 VG_USERREQ__PRINTF_BACKTRACE
= 0x1402,
6694 /* both :: char* fmt, va_list* vargs */
6695 VG_USERREQ__PRINTF_VALIST_BY_REF
= 0x1403,
6696 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF
= 0x1404,
6698 /* Stack support. */
6699 VG_USERREQ__STACK_REGISTER
= 0x1501,
6700 VG_USERREQ__STACK_DEREGISTER
= 0x1502,
6701 VG_USERREQ__STACK_CHANGE
= 0x1503,
6704 VG_USERREQ__LOAD_PDB_DEBUGINFO
= 0x1601,
6706 /* Querying of debug info. */
6707 VG_USERREQ__MAP_IP_TO_SRCLOC
= 0x1701,
6709 /* Disable/enable error reporting level. Takes a single
6710 Word arg which is the delta to this thread's error
6711 disablement indicator. Hence 1 disables or further
6712 disables errors, and -1 moves back towards enablement.
6713 Other values are not allowed. */
6714 VG_USERREQ__CHANGE_ERR_DISABLEMENT
= 0x1801,
6716 /* Initialise IR injection */
6717 VG_USERREQ__VEX_INIT_FOR_IRI
= 0x1901
6720 #if !defined(__GNUC__)
6721 # define __extension__ /* */
6725 /* Returns the number of Valgrinds this code is running under. That
6726 is, 0 if running natively, 1 if running under Valgrind, 2 if
6727 running under Valgrind which is running under another Valgrind,
6729 #define RUNNING_ON_VALGRIND \
6730 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
6731 VG_USERREQ__RUNNING_ON_VALGRIND, \
6735 /* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
6736 _qzz_len - 1]. Useful if you are debugging a JITter or some such,
6737 since it provides a way to make sure valgrind will retranslate the
6738 invalidated area. Returns no value. */
6739 #define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
6740 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
6741 _qzz_addr, _qzz_len, 0, 0, 0)
6744 /* These requests are for getting Valgrind itself to print something.
6745 Possibly with a backtrace. This is a really ugly hack. The return value
6746 is the number of characters printed, excluding the "**<pid>** " part at the
6747 start and the backtrace (if present). */
6749 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6750 /* Modern GCC will optimize the static routine out if unused,
6751 and unused attribute will shut down warnings about it. */
6752 static int VALGRIND_PRINTF(const char *format
, ...)
6753 __attribute__((format(__printf__
, 1, 2), __unused__
));
6756 #if defined(_MSC_VER)
6759 VALGRIND_PRINTF(const char *format
, ...)
6761 #if defined(NVALGRIND)
6763 #else /* NVALGRIND */
6764 #if defined(_MSC_VER) || defined(__MINGW64__)
6767 unsigned long _qzz_res
;
6770 va_start(vargs
, format
);
6771 #if defined(_MSC_VER) || defined(__MINGW64__)
6772 _qzz_res
= VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6773 VG_USERREQ__PRINTF_VALIST_BY_REF
,
6778 _qzz_res
= VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6779 VG_USERREQ__PRINTF_VALIST_BY_REF
,
6780 (unsigned long)format
,
6781 (unsigned long)&vargs
,
6785 return (int)_qzz_res
;
6786 #endif /* NVALGRIND */
6789 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6790 static int VALGRIND_PRINTF_BACKTRACE(const char *format
, ...)
6791 __attribute__((format(__printf__
, 1, 2), __unused__
));
6794 #if defined(_MSC_VER)
6797 VALGRIND_PRINTF_BACKTRACE(const char *format
, ...)
6799 #if defined(NVALGRIND)
6801 #else /* NVALGRIND */
6802 #if defined(_MSC_VER) || defined(__MINGW64__)
6805 unsigned long _qzz_res
;
6808 va_start(vargs
, format
);
6809 #if defined(_MSC_VER) || defined(__MINGW64__)
6810 _qzz_res
= VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6811 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF
,
6816 _qzz_res
= VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6817 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF
,
6818 (unsigned long)format
,
6819 (unsigned long)&vargs
,
6823 return (int)_qzz_res
;
6824 #endif /* NVALGRIND */
6828 /* These requests allow control to move from the simulated CPU to the
6829 real CPU, calling an arbitary function.
6831 Note that the current ThreadId is inserted as the first argument.
6834 VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
6836 requires f to have this signature:
6838 Word f(Word tid, Word arg1, Word arg2)
6840 where "Word" is a word-sized type.
6842 Note that these client requests are not entirely reliable. For example,
6843 if you call a function with them that subsequently calls printf(),
6844 there's a high chance Valgrind will crash. Generally, your prospects of
6845 these working are made higher if the called function does not refer to
6846 any global variables, and does not refer to any libc or other functions
6847 (printf et al). Any kind of entanglement with libc or dynamic linking is
6848 likely to have a bad outcome, for tricky reasons which we've grappled
6849 with a lot in the past.
6851 #define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
6852 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6853 VG_USERREQ__CLIENT_CALL0, \
6857 #define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
6858 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6859 VG_USERREQ__CLIENT_CALL1, \
6863 #define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
6864 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6865 VG_USERREQ__CLIENT_CALL2, \
6867 _qyy_arg1, _qyy_arg2, 0, 0)
6869 #define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
6870 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6871 VG_USERREQ__CLIENT_CALL3, \
6873 _qyy_arg1, _qyy_arg2, \
6877 /* Counts the number of errors that have been recorded by a tool. Nb:
6878 the tool must record the errors with VG_(maybe_record_error)() or
6879 VG_(unique_error)() for them to be counted. */
6880 #define VALGRIND_COUNT_ERRORS \
6881 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
6882 0 /* default return */, \
6883 VG_USERREQ__COUNT_ERRORS, \
6886 /* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
6887 when heap blocks are allocated in order to give accurate results. This
6888 happens automatically for the standard allocator functions such as
6889 malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
6892 But if your program uses a custom allocator, this doesn't automatically
6893 happen, and Valgrind will not do as well. For example, if you allocate
6894 superblocks with mmap() and then allocates chunks of the superblocks, all
6895 Valgrind's observations will be at the mmap() level and it won't know that
6896 the chunks should be considered separate entities. In Memcheck's case,
6897 that means you probably won't get heap block overrun detection (because
6898 there won't be redzones marked as unaddressable) and you definitely won't
6899 get any leak detection.
6901 The following client requests allow a custom allocator to be annotated so
6902 that it can be handled accurately by Valgrind.
6904 VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
6905 by a malloc()-like function. For Memcheck (an illustrative case), this
6908 - It records that the block has been allocated. This means any addresses
6909 within the block mentioned in error messages will be
6910 identified as belonging to the block. It also means that if the block
6911 isn't freed it will be detected by the leak checker.
6913 - It marks the block as being addressable and undefined (if 'is_zeroed' is
6914 not set), or addressable and defined (if 'is_zeroed' is set). This
6915 controls how accesses to the block by the program are handled.
6917 'addr' is the start of the usable block (ie. after any
6918 redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
6919 can apply redzones -- these are blocks of padding at the start and end of
6920 each block. Adding redzones is recommended as it makes it much more likely
6921 Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
6922 zeroed (or filled with another predictable value), as is the case for
6925 VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
6926 heap block -- that will be used by the client program -- is allocated.
6927 It's best to put it at the outermost level of the allocator if possible;
6928 for example, if you have a function my_alloc() which calls
6929 internal_alloc(), and the client request is put inside internal_alloc(),
6930 stack traces relating to the heap block will contain entries for both
6931 my_alloc() and internal_alloc(), which is probably not what you want.
6933 For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
6934 custom blocks from within a heap block, B, that has been allocated with
6935 malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
6936 -- the custom blocks will take precedence.
6938 VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
6939 Memcheck, it does two things:
6941 - It records that the block has been deallocated. This assumes that the
6942 block was annotated as having been allocated via
6943 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6945 - It marks the block as being unaddressable.
6947 VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
6948 heap block is deallocated.
6950 VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
6951 Memcheck, it does four things:
6953 - It records that the size of a block has been changed. This assumes that
6954 the block was annotated as having been allocated via
6955 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6957 - If the block shrunk, it marks the freed memory as being unaddressable.
6959 - If the block grew, it marks the new area as undefined and defines a red
6960 zone past the end of the new block.
6962 - The V-bits of the overlap between the old and the new block are preserved.
6964 VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
6965 and before deallocation of the old block.
6967 In many cases, these three client requests will not be enough to get your
6968 allocator working well with Memcheck. More specifically, if your allocator
6969 writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
6970 will be necessary to mark the memory as addressable just before the zeroing
6971 occurs, otherwise you'll get a lot of invalid write errors. For example,
6972 you'll need to do this if your allocator recycles freed blocks, but it
6973 zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
6974 Alternatively, if your allocator reuses freed blocks for allocator-internal
6975 data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
6977 Really, what's happening is a blurring of the lines between the client
6978 program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
6979 memory should be considered unaddressable to the client program, but the
6980 allocator knows more than the rest of the client program and so may be able
6981 to safely access it. Extra client requests are necessary for Valgrind to
6982 understand the distinction between the allocator and the rest of the
6985 Ignored if addr == 0.
6987 #define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
6988 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
6989 addr, sizeB, rzB, is_zeroed, 0)
6991 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6992 Ignored if addr == 0.
6994 #define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
6995 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
6996 addr, oldSizeB, newSizeB, rzB, 0)
6998 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6999 Ignored if addr == 0.
7001 #define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
7002 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
7005 /* Create a memory pool. */
7006 #define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
7007 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
7008 pool, rzB, is_zeroed, 0, 0)
7010 /* Destroy a memory pool. */
7011 #define VALGRIND_DESTROY_MEMPOOL(pool) \
7012 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
7015 /* Associate a piece of memory with a memory pool. */
7016 #define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
7017 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
7018 pool, addr, size, 0, 0)
7020 /* Disassociate a piece of memory from a memory pool. */
7021 #define VALGRIND_MEMPOOL_FREE(pool, addr) \
7022 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
7023 pool, addr, 0, 0, 0)
7025 /* Disassociate any pieces outside a particular range. */
7026 #define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
7027 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
7028 pool, addr, size, 0, 0)
7030 /* Resize and/or move a piece associated with a memory pool. */
7031 #define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
7032 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
7033 poolA, poolB, 0, 0, 0)
7035 /* Resize and/or move a piece associated with a memory pool. */
7036 #define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
7037 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
7038 pool, addrA, addrB, size, 0)
7040 /* Return 1 if a mempool exists, else 0. */
7041 #define VALGRIND_MEMPOOL_EXISTS(pool) \
7042 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7043 VG_USERREQ__MEMPOOL_EXISTS, \
7046 /* Mark a piece of memory as being a stack. Returns a stack id.
7047 start is the lowest addressable stack byte, end is the highest
7048 addressable stack byte. */
7049 #define VALGRIND_STACK_REGISTER(start, end) \
7050 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7051 VG_USERREQ__STACK_REGISTER, \
7052 start, end, 0, 0, 0)
7054 /* Unmark the piece of memory associated with a stack id as being a
7056 #define VALGRIND_STACK_DEREGISTER(id) \
7057 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
7060 /* Change the start and end address of the stack id.
7061 start is the new lowest addressable stack byte, end is the new highest
7062 addressable stack byte. */
7063 #define VALGRIND_STACK_CHANGE(id, start, end) \
7064 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
7065 id, start, end, 0, 0)
7067 /* Load PDB debug info for Wine PE image_map. */
7068 #define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
7069 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
7070 fd, ptr, total_size, delta, 0)
7072 /* Map a code address to a source file name and line number. buf64
7073 must point to a 64-byte buffer in the caller's address space. The
7074 result will be dumped in there and is guaranteed to be zero
7075 terminated. If no info is found, the first byte is set to zero. */
7076 #define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
7077 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7078 VG_USERREQ__MAP_IP_TO_SRCLOC, \
7079 addr, buf64, 0, 0, 0)
7081 /* Disable error reporting for this thread. Behaves in a stack like
7082 way, so you can safely call this multiple times provided that
7083 VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
7084 to re-enable reporting. The first call of this macro disables
7085 reporting. Subsequent calls have no effect except to increase the
7086 number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
7087 reporting. Child threads do not inherit this setting from their
7088 parents -- they are always created with reporting enabled. */
7089 #define VALGRIND_DISABLE_ERROR_REPORTING \
7090 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7093 /* Re-enable error reporting, as per comments on
7094 VALGRIND_DISABLE_ERROR_REPORTING. */
7095 #define VALGRIND_ENABLE_ERROR_REPORTING \
7096 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7099 /* Execute a monitor command from the client program.
7100 If a connection is opened with GDB, the output will be sent
7101 according to the output mode set for vgdb.
7102 If no connection is opened, output will go to the log output.
7103 Returns 1 if command not recognised, 0 otherwise. */
7104 #define VALGRIND_MONITOR_COMMAND(command) \
7105 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
7106 command, 0, 0, 0, 0)
7109 #undef PLAT_x86_darwin
7110 #undef PLAT_amd64_darwin
7111 #undef PLAT_x86_win32
7112 #undef PLAT_amd64_win64
7113 #undef PLAT_x86_linux
7114 #undef PLAT_amd64_linux
7115 #undef PLAT_ppc32_linux
7116 #undef PLAT_ppc64be_linux
7117 #undef PLAT_ppc64le_linux
7118 #undef PLAT_arm_linux
7119 #undef PLAT_s390x_linux
7120 #undef PLAT_mips32_linux
7121 #undef PLAT_mips64_linux
7122 #undef PLAT_tilegx_linux
7123 #undef PLAT_x86_solaris
7124 #undef PLAT_amd64_solaris
7126 #endif /* __VALGRIND_H */