Enable sys_adjtimex() on arm-linux. Fixes #412408.
[valgrind.git] / include / valgrind.h
blobc8b24a38e82ecccff9a77202ea4574dab3df06b5
1 /* -*- c -*-
2 ----------------------------------------------------------------
4 Notice that the following BSD-style license applies to this one
5 file (valgrind.h) only. The rest of Valgrind is licensed under the
6 terms of the GNU General Public License, version 2, unless
7 otherwise indicated. See the COPYING file in the source
8 distribution for details.
10 ----------------------------------------------------------------
12 This file is part of Valgrind, a dynamic binary instrumentation
13 framework.
15 Copyright (C) 2000-2017 Julian Seward. All rights reserved.
17 Redistribution and use in source and binary forms, with or without
18 modification, are permitted provided that the following conditions
19 are met:
21 1. Redistributions of source code must retain the above copyright
22 notice, this list of conditions and the following disclaimer.
24 2. The origin of this software must not be misrepresented; you must
25 not claim that you wrote the original software. If you use this
26 software in a product, an acknowledgment in the product
27 documentation would be appreciated but is not required.
29 3. Altered source versions must be plainly marked as such, and must
30 not be misrepresented as being the original software.
32 4. The name of the author may not be used to endorse or promote
33 products derived from this software without specific prior written
34 permission.
36 THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
37 OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
38 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
39 ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
40 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
41 DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
42 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
43 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
44 WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
45 NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
46 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
48 ----------------------------------------------------------------
50 Notice that the above BSD-style license applies to this one file
51 (valgrind.h) only. The entire rest of Valgrind is licensed under
52 the terms of the GNU General Public License, version 2. See the
53 COPYING file in the source distribution for details.
55 ----------------------------------------------------------------
59 /* This file is for inclusion into client (your!) code.
61 You can use these macros to manipulate and query Valgrind's
62 execution inside your own programs.
64 The resulting executables will still run without Valgrind, just a
65 little bit more slowly than they otherwise would, but otherwise
66 unchanged. When not running on valgrind, each client request
67 consumes very few (eg. 7) instructions, so the resulting performance
68 loss is negligible unless you plan to execute client requests
69 millions of times per second. Nevertheless, if that is still a
70 problem, you can compile with the NVALGRIND symbol defined (gcc
71 -DNVALGRIND) so that client requests are not even compiled in. */
73 #ifndef __VALGRIND_H
74 #define __VALGRIND_H
77 /* ------------------------------------------------------------------ */
78 /* VERSION NUMBER OF VALGRIND */
79 /* ------------------------------------------------------------------ */
81 /* Specify Valgrind's version number, so that user code can
82 conditionally compile based on our version number. Note that these
83 were introduced at version 3.6 and so do not exist in version 3.5
84 or earlier. The recommended way to use them to check for "version
85 X.Y or later" is (eg)
87 #if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
88 && (__VALGRIND_MAJOR__ > 3 \
89 || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
91 #define __VALGRIND_MAJOR__ 3
92 #define __VALGRIND_MINOR__ 16
95 #include <stdarg.h>
97 /* Nb: this file might be included in a file compiled with -ansi. So
98 we can't use C++ style "//" comments nor the "asm" keyword (instead
99 use "__asm__"). */
101 /* Derive some tags indicating what the target platform is. Note
102 that in this file we're using the compiler's CPP symbols for
103 identifying architectures, which are different to the ones we use
104 within the rest of Valgrind. Note, __powerpc__ is active for both
105 32 and 64-bit PPC, whereas __powerpc64__ is only active for the
106 latter (on Linux, that is).
108 Misc note: how to find out what's predefined in gcc by default:
109 gcc -Wp,-dM somefile.c
111 #undef PLAT_x86_darwin
112 #undef PLAT_amd64_darwin
113 #undef PLAT_x86_win32
114 #undef PLAT_amd64_win64
115 #undef PLAT_x86_linux
116 #undef PLAT_amd64_linux
117 #undef PLAT_ppc32_linux
118 #undef PLAT_ppc64be_linux
119 #undef PLAT_ppc64le_linux
120 #undef PLAT_arm_linux
121 #undef PLAT_arm64_linux
122 #undef PLAT_s390x_linux
123 #undef PLAT_mips32_linux
124 #undef PLAT_mips64_linux
125 #undef PLAT_nanomips_linux
126 #undef PLAT_x86_solaris
127 #undef PLAT_amd64_solaris
130 #if defined(__APPLE__) && defined(__i386__)
131 # define PLAT_x86_darwin 1
132 #elif defined(__APPLE__) && defined(__x86_64__)
133 # define PLAT_amd64_darwin 1
134 #elif (defined(__MINGW32__) && !defined(__MINGW64__)) \
135 || defined(__CYGWIN32__) \
136 || (defined(_WIN32) && defined(_M_IX86))
137 # define PLAT_x86_win32 1
138 #elif defined(__MINGW64__) \
139 || (defined(_WIN64) && defined(_M_X64))
140 # define PLAT_amd64_win64 1
141 #elif defined(__linux__) && defined(__i386__)
142 # define PLAT_x86_linux 1
143 #elif defined(__linux__) && defined(__x86_64__) && !defined(__ILP32__)
144 # define PLAT_amd64_linux 1
145 #elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
146 # define PLAT_ppc32_linux 1
147 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF != 2
148 /* Big Endian uses ELF version 1 */
149 # define PLAT_ppc64be_linux 1
150 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF == 2
151 /* Little Endian uses ELF version 2 */
152 # define PLAT_ppc64le_linux 1
153 #elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
154 # define PLAT_arm_linux 1
155 #elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
156 # define PLAT_arm64_linux 1
157 #elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
158 # define PLAT_s390x_linux 1
159 #elif defined(__linux__) && defined(__mips__) && (__mips==64)
160 # define PLAT_mips64_linux 1
161 #elif defined(__linux__) && defined(__mips__) && (__mips==32)
162 # define PLAT_mips32_linux 1
163 #elif defined(__linux__) && defined(__nanomips__)
164 # define PLAT_nanomips_linux 1
165 #elif defined(__sun) && defined(__i386__)
166 # define PLAT_x86_solaris 1
167 #elif defined(__sun) && defined(__x86_64__)
168 # define PLAT_amd64_solaris 1
169 #else
170 /* If we're not compiling for our target platform, don't generate
171 any inline asms. */
172 # if !defined(NVALGRIND)
173 # define NVALGRIND 1
174 # endif
175 #endif
178 /* ------------------------------------------------------------------ */
179 /* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
180 /* in here of use to end-users -- skip to the next section. */
181 /* ------------------------------------------------------------------ */
184 * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
185 * request. Accepts both pointers and integers as arguments.
187 * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
188 * client request that does not return a value.
190 * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
191 * client request and whose value equals the client request result. Accepts
192 * both pointers and integers as arguments. Note that such calls are not
193 * necessarily pure functions -- they may have side effects.
196 #define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
197 _zzq_request, _zzq_arg1, _zzq_arg2, \
198 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
199 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
200 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
201 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
203 #define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
204 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
205 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
206 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
207 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
209 #if defined(NVALGRIND)
211 /* Define NVALGRIND to completely remove the Valgrind magic sequence
212 from the compiled code (analogous to NDEBUG's effects on
213 assert()) */
214 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
215 _zzq_default, _zzq_request, \
216 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
217 (_zzq_default)
219 #else /* ! NVALGRIND */
221 /* The following defines the magic code sequences which the JITter
222 spots and handles magically. Don't look too closely at them as
223 they will rot your brain.
225 The assembly code sequences for all architectures is in this one
226 file. This is because this file must be stand-alone, and we don't
227 want to have multiple files.
229 For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
230 value gets put in the return slot, so that everything works when
231 this is executed not under Valgrind. Args are passed in a memory
232 block, and so there's no intrinsic limit to the number that could
233 be passed, but it's currently five.
235 The macro args are:
236 _zzq_rlval result lvalue
237 _zzq_default default value (result returned when running on real CPU)
238 _zzq_request request code
239 _zzq_arg1..5 request params
241 The other two macros are used to support function wrapping, and are
242 a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
243 guest's NRADDR pseudo-register and whatever other information is
244 needed to safely run the call original from the wrapper: on
245 ppc64-linux, the R2 value at the divert point is also needed. This
246 information is abstracted into a user-visible type, OrigFn.
248 VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
249 guest, but guarantees that the branch instruction will not be
250 redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
251 branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
252 complete inline asm, since it needs to be combined with more magic
253 inline asm stuff to be useful.
256 /* ----------------- x86-{linux,darwin,solaris} ---------------- */
258 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
259 || (defined(PLAT_x86_win32) && defined(__GNUC__)) \
260 || defined(PLAT_x86_solaris)
262 typedef
263 struct {
264 unsigned int nraddr; /* where's the code? */
266 OrigFn;
268 #define __SPECIAL_INSTRUCTION_PREAMBLE \
269 "roll $3, %%edi ; roll $13, %%edi\n\t" \
270 "roll $29, %%edi ; roll $19, %%edi\n\t"
272 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
273 _zzq_default, _zzq_request, \
274 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
275 __extension__ \
276 ({volatile unsigned int _zzq_args[6]; \
277 volatile unsigned int _zzq_result; \
278 _zzq_args[0] = (unsigned int)(_zzq_request); \
279 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
280 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
281 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
282 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
283 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
284 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
285 /* %EDX = client_request ( %EAX ) */ \
286 "xchgl %%ebx,%%ebx" \
287 : "=d" (_zzq_result) \
288 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
289 : "cc", "memory" \
290 ); \
291 _zzq_result; \
294 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
295 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
296 volatile unsigned int __addr; \
297 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
298 /* %EAX = guest_NRADDR */ \
299 "xchgl %%ecx,%%ecx" \
300 : "=a" (__addr) \
302 : "cc", "memory" \
303 ); \
304 _zzq_orig->nraddr = __addr; \
307 #define VALGRIND_CALL_NOREDIR_EAX \
308 __SPECIAL_INSTRUCTION_PREAMBLE \
309 /* call-noredir *%EAX */ \
310 "xchgl %%edx,%%edx\n\t"
312 #define VALGRIND_VEX_INJECT_IR() \
313 do { \
314 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
315 "xchgl %%edi,%%edi\n\t" \
316 : : : "cc", "memory" \
317 ); \
318 } while (0)
320 #endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__)
321 || PLAT_x86_solaris */
323 /* ------------------------- x86-Win32 ------------------------- */
325 #if defined(PLAT_x86_win32) && !defined(__GNUC__)
327 typedef
328 struct {
329 unsigned int nraddr; /* where's the code? */
331 OrigFn;
333 #if defined(_MSC_VER)
335 #define __SPECIAL_INSTRUCTION_PREAMBLE \
336 __asm rol edi, 3 __asm rol edi, 13 \
337 __asm rol edi, 29 __asm rol edi, 19
339 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
340 _zzq_default, _zzq_request, \
341 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
342 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
343 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
344 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
345 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
347 static __inline uintptr_t
348 valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
349 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
350 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
351 uintptr_t _zzq_arg5)
353 volatile uintptr_t _zzq_args[6];
354 volatile unsigned int _zzq_result;
355 _zzq_args[0] = (uintptr_t)(_zzq_request);
356 _zzq_args[1] = (uintptr_t)(_zzq_arg1);
357 _zzq_args[2] = (uintptr_t)(_zzq_arg2);
358 _zzq_args[3] = (uintptr_t)(_zzq_arg3);
359 _zzq_args[4] = (uintptr_t)(_zzq_arg4);
360 _zzq_args[5] = (uintptr_t)(_zzq_arg5);
361 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
362 __SPECIAL_INSTRUCTION_PREAMBLE
363 /* %EDX = client_request ( %EAX ) */
364 __asm xchg ebx,ebx
365 __asm mov _zzq_result, edx
367 return _zzq_result;
370 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
371 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
372 volatile unsigned int __addr; \
373 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
374 /* %EAX = guest_NRADDR */ \
375 __asm xchg ecx,ecx \
376 __asm mov __addr, eax \
378 _zzq_orig->nraddr = __addr; \
381 #define VALGRIND_CALL_NOREDIR_EAX ERROR
383 #define VALGRIND_VEX_INJECT_IR() \
384 do { \
385 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
386 __asm xchg edi,edi \
388 } while (0)
390 #else
391 #error Unsupported compiler.
392 #endif
394 #endif /* PLAT_x86_win32 */
396 /* ----------------- amd64-{linux,darwin,solaris} --------------- */
398 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
399 || defined(PLAT_amd64_solaris) \
400 || (defined(PLAT_amd64_win64) && defined(__GNUC__))
402 typedef
403 struct {
404 unsigned long int nraddr; /* where's the code? */
406 OrigFn;
408 #define __SPECIAL_INSTRUCTION_PREAMBLE \
409 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
410 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
412 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
413 _zzq_default, _zzq_request, \
414 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
415 __extension__ \
416 ({ volatile unsigned long int _zzq_args[6]; \
417 volatile unsigned long int _zzq_result; \
418 _zzq_args[0] = (unsigned long int)(_zzq_request); \
419 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
420 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
421 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
422 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
423 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
424 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
425 /* %RDX = client_request ( %RAX ) */ \
426 "xchgq %%rbx,%%rbx" \
427 : "=d" (_zzq_result) \
428 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
429 : "cc", "memory" \
430 ); \
431 _zzq_result; \
434 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
435 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
436 volatile unsigned long int __addr; \
437 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
438 /* %RAX = guest_NRADDR */ \
439 "xchgq %%rcx,%%rcx" \
440 : "=a" (__addr) \
442 : "cc", "memory" \
443 ); \
444 _zzq_orig->nraddr = __addr; \
447 #define VALGRIND_CALL_NOREDIR_RAX \
448 __SPECIAL_INSTRUCTION_PREAMBLE \
449 /* call-noredir *%RAX */ \
450 "xchgq %%rdx,%%rdx\n\t"
452 #define VALGRIND_VEX_INJECT_IR() \
453 do { \
454 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
455 "xchgq %%rdi,%%rdi\n\t" \
456 : : : "cc", "memory" \
457 ); \
458 } while (0)
460 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
462 /* ------------------------- amd64-Win64 ------------------------- */
464 #if defined(PLAT_amd64_win64) && !defined(__GNUC__)
466 #error Unsupported compiler.
468 #endif /* PLAT_amd64_win64 */
470 /* ------------------------ ppc32-linux ------------------------ */
472 #if defined(PLAT_ppc32_linux)
474 typedef
475 struct {
476 unsigned int nraddr; /* where's the code? */
478 OrigFn;
480 #define __SPECIAL_INSTRUCTION_PREAMBLE \
481 "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
482 "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
484 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
485 _zzq_default, _zzq_request, \
486 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
488 __extension__ \
489 ({ unsigned int _zzq_args[6]; \
490 unsigned int _zzq_result; \
491 unsigned int* _zzq_ptr; \
492 _zzq_args[0] = (unsigned int)(_zzq_request); \
493 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
494 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
495 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
496 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
497 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
498 _zzq_ptr = _zzq_args; \
499 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
500 "mr 4,%2\n\t" /*ptr*/ \
501 __SPECIAL_INSTRUCTION_PREAMBLE \
502 /* %R3 = client_request ( %R4 ) */ \
503 "or 1,1,1\n\t" \
504 "mr %0,3" /*result*/ \
505 : "=b" (_zzq_result) \
506 : "b" (_zzq_default), "b" (_zzq_ptr) \
507 : "cc", "memory", "r3", "r4"); \
508 _zzq_result; \
511 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
512 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
513 unsigned int __addr; \
514 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
515 /* %R3 = guest_NRADDR */ \
516 "or 2,2,2\n\t" \
517 "mr %0,3" \
518 : "=b" (__addr) \
520 : "cc", "memory", "r3" \
521 ); \
522 _zzq_orig->nraddr = __addr; \
525 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
526 __SPECIAL_INSTRUCTION_PREAMBLE \
527 /* branch-and-link-to-noredir *%R11 */ \
528 "or 3,3,3\n\t"
530 #define VALGRIND_VEX_INJECT_IR() \
531 do { \
532 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
533 "or 5,5,5\n\t" \
534 ); \
535 } while (0)
537 #endif /* PLAT_ppc32_linux */
539 /* ------------------------ ppc64-linux ------------------------ */
541 #if defined(PLAT_ppc64be_linux)
543 typedef
544 struct {
545 unsigned long int nraddr; /* where's the code? */
546 unsigned long int r2; /* what tocptr do we need? */
548 OrigFn;
550 #define __SPECIAL_INSTRUCTION_PREAMBLE \
551 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
552 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
554 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
555 _zzq_default, _zzq_request, \
556 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
558 __extension__ \
559 ({ unsigned long int _zzq_args[6]; \
560 unsigned long int _zzq_result; \
561 unsigned long int* _zzq_ptr; \
562 _zzq_args[0] = (unsigned long int)(_zzq_request); \
563 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
564 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
565 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
566 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
567 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
568 _zzq_ptr = _zzq_args; \
569 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
570 "mr 4,%2\n\t" /*ptr*/ \
571 __SPECIAL_INSTRUCTION_PREAMBLE \
572 /* %R3 = client_request ( %R4 ) */ \
573 "or 1,1,1\n\t" \
574 "mr %0,3" /*result*/ \
575 : "=b" (_zzq_result) \
576 : "b" (_zzq_default), "b" (_zzq_ptr) \
577 : "cc", "memory", "r3", "r4"); \
578 _zzq_result; \
581 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
582 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
583 unsigned long int __addr; \
584 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
585 /* %R3 = guest_NRADDR */ \
586 "or 2,2,2\n\t" \
587 "mr %0,3" \
588 : "=b" (__addr) \
590 : "cc", "memory", "r3" \
591 ); \
592 _zzq_orig->nraddr = __addr; \
593 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
594 /* %R3 = guest_NRADDR_GPR2 */ \
595 "or 4,4,4\n\t" \
596 "mr %0,3" \
597 : "=b" (__addr) \
599 : "cc", "memory", "r3" \
600 ); \
601 _zzq_orig->r2 = __addr; \
604 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
605 __SPECIAL_INSTRUCTION_PREAMBLE \
606 /* branch-and-link-to-noredir *%R11 */ \
607 "or 3,3,3\n\t"
609 #define VALGRIND_VEX_INJECT_IR() \
610 do { \
611 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
612 "or 5,5,5\n\t" \
613 ); \
614 } while (0)
616 #endif /* PLAT_ppc64be_linux */
618 #if defined(PLAT_ppc64le_linux)
620 typedef
621 struct {
622 unsigned long int nraddr; /* where's the code? */
623 unsigned long int r2; /* what tocptr do we need? */
625 OrigFn;
627 #define __SPECIAL_INSTRUCTION_PREAMBLE \
628 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
629 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
631 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
632 _zzq_default, _zzq_request, \
633 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
635 __extension__ \
636 ({ unsigned long int _zzq_args[6]; \
637 unsigned long int _zzq_result; \
638 unsigned long int* _zzq_ptr; \
639 _zzq_args[0] = (unsigned long int)(_zzq_request); \
640 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
641 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
642 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
643 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
644 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
645 _zzq_ptr = _zzq_args; \
646 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
647 "mr 4,%2\n\t" /*ptr*/ \
648 __SPECIAL_INSTRUCTION_PREAMBLE \
649 /* %R3 = client_request ( %R4 ) */ \
650 "or 1,1,1\n\t" \
651 "mr %0,3" /*result*/ \
652 : "=b" (_zzq_result) \
653 : "b" (_zzq_default), "b" (_zzq_ptr) \
654 : "cc", "memory", "r3", "r4"); \
655 _zzq_result; \
658 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
659 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
660 unsigned long int __addr; \
661 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
662 /* %R3 = guest_NRADDR */ \
663 "or 2,2,2\n\t" \
664 "mr %0,3" \
665 : "=b" (__addr) \
667 : "cc", "memory", "r3" \
668 ); \
669 _zzq_orig->nraddr = __addr; \
670 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
671 /* %R3 = guest_NRADDR_GPR2 */ \
672 "or 4,4,4\n\t" \
673 "mr %0,3" \
674 : "=b" (__addr) \
676 : "cc", "memory", "r3" \
677 ); \
678 _zzq_orig->r2 = __addr; \
681 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
682 __SPECIAL_INSTRUCTION_PREAMBLE \
683 /* branch-and-link-to-noredir *%R12 */ \
684 "or 3,3,3\n\t"
686 #define VALGRIND_VEX_INJECT_IR() \
687 do { \
688 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
689 "or 5,5,5\n\t" \
690 ); \
691 } while (0)
693 #endif /* PLAT_ppc64le_linux */
695 /* ------------------------- arm-linux ------------------------- */
697 #if defined(PLAT_arm_linux)
699 typedef
700 struct {
701 unsigned int nraddr; /* where's the code? */
703 OrigFn;
705 #define __SPECIAL_INSTRUCTION_PREAMBLE \
706 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
707 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
709 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
710 _zzq_default, _zzq_request, \
711 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
713 __extension__ \
714 ({volatile unsigned int _zzq_args[6]; \
715 volatile unsigned int _zzq_result; \
716 _zzq_args[0] = (unsigned int)(_zzq_request); \
717 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
718 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
719 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
720 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
721 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
722 __asm__ volatile("mov r3, %1\n\t" /*default*/ \
723 "mov r4, %2\n\t" /*ptr*/ \
724 __SPECIAL_INSTRUCTION_PREAMBLE \
725 /* R3 = client_request ( R4 ) */ \
726 "orr r10, r10, r10\n\t" \
727 "mov %0, r3" /*result*/ \
728 : "=r" (_zzq_result) \
729 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
730 : "cc","memory", "r3", "r4"); \
731 _zzq_result; \
734 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
735 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
736 unsigned int __addr; \
737 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
738 /* R3 = guest_NRADDR */ \
739 "orr r11, r11, r11\n\t" \
740 "mov %0, r3" \
741 : "=r" (__addr) \
743 : "cc", "memory", "r3" \
744 ); \
745 _zzq_orig->nraddr = __addr; \
748 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
749 __SPECIAL_INSTRUCTION_PREAMBLE \
750 /* branch-and-link-to-noredir *%R4 */ \
751 "orr r12, r12, r12\n\t"
753 #define VALGRIND_VEX_INJECT_IR() \
754 do { \
755 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
756 "orr r9, r9, r9\n\t" \
757 : : : "cc", "memory" \
758 ); \
759 } while (0)
761 #endif /* PLAT_arm_linux */
763 /* ------------------------ arm64-linux ------------------------- */
765 #if defined(PLAT_arm64_linux)
767 typedef
768 struct {
769 unsigned long int nraddr; /* where's the code? */
771 OrigFn;
773 #define __SPECIAL_INSTRUCTION_PREAMBLE \
774 "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
775 "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
777 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
778 _zzq_default, _zzq_request, \
779 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
781 __extension__ \
782 ({volatile unsigned long int _zzq_args[6]; \
783 volatile unsigned long int _zzq_result; \
784 _zzq_args[0] = (unsigned long int)(_zzq_request); \
785 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
786 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
787 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
788 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
789 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
790 __asm__ volatile("mov x3, %1\n\t" /*default*/ \
791 "mov x4, %2\n\t" /*ptr*/ \
792 __SPECIAL_INSTRUCTION_PREAMBLE \
793 /* X3 = client_request ( X4 ) */ \
794 "orr x10, x10, x10\n\t" \
795 "mov %0, x3" /*result*/ \
796 : "=r" (_zzq_result) \
797 : "r" ((unsigned long int)(_zzq_default)), \
798 "r" (&_zzq_args[0]) \
799 : "cc","memory", "x3", "x4"); \
800 _zzq_result; \
803 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
804 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
805 unsigned long int __addr; \
806 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
807 /* X3 = guest_NRADDR */ \
808 "orr x11, x11, x11\n\t" \
809 "mov %0, x3" \
810 : "=r" (__addr) \
812 : "cc", "memory", "x3" \
813 ); \
814 _zzq_orig->nraddr = __addr; \
817 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
818 __SPECIAL_INSTRUCTION_PREAMBLE \
819 /* branch-and-link-to-noredir X8 */ \
820 "orr x12, x12, x12\n\t"
822 #define VALGRIND_VEX_INJECT_IR() \
823 do { \
824 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
825 "orr x9, x9, x9\n\t" \
826 : : : "cc", "memory" \
827 ); \
828 } while (0)
830 #endif /* PLAT_arm64_linux */
832 /* ------------------------ s390x-linux ------------------------ */
834 #if defined(PLAT_s390x_linux)
836 typedef
837 struct {
838 unsigned long int nraddr; /* where's the code? */
840 OrigFn;
842 /* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
843 * code. This detection is implemented in platform specific toIR.c
844 * (e.g. VEX/priv/guest_s390_decoder.c).
846 #define __SPECIAL_INSTRUCTION_PREAMBLE \
847 "lr 15,15\n\t" \
848 "lr 1,1\n\t" \
849 "lr 2,2\n\t" \
850 "lr 3,3\n\t"
852 #define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
853 #define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
854 #define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
855 #define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
857 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
858 _zzq_default, _zzq_request, \
859 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
860 __extension__ \
861 ({volatile unsigned long int _zzq_args[6]; \
862 volatile unsigned long int _zzq_result; \
863 _zzq_args[0] = (unsigned long int)(_zzq_request); \
864 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
865 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
866 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
867 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
868 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
869 __asm__ volatile(/* r2 = args */ \
870 "lgr 2,%1\n\t" \
871 /* r3 = default */ \
872 "lgr 3,%2\n\t" \
873 __SPECIAL_INSTRUCTION_PREAMBLE \
874 __CLIENT_REQUEST_CODE \
875 /* results = r3 */ \
876 "lgr %0, 3\n\t" \
877 : "=d" (_zzq_result) \
878 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
879 : "cc", "2", "3", "memory" \
880 ); \
881 _zzq_result; \
884 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
885 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
886 volatile unsigned long int __addr; \
887 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
888 __GET_NR_CONTEXT_CODE \
889 "lgr %0, 3\n\t" \
890 : "=a" (__addr) \
892 : "cc", "3", "memory" \
893 ); \
894 _zzq_orig->nraddr = __addr; \
897 #define VALGRIND_CALL_NOREDIR_R1 \
898 __SPECIAL_INSTRUCTION_PREAMBLE \
899 __CALL_NO_REDIR_CODE
901 #define VALGRIND_VEX_INJECT_IR() \
902 do { \
903 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
904 __VEX_INJECT_IR_CODE); \
905 } while (0)
907 #endif /* PLAT_s390x_linux */
909 /* ------------------------- mips32-linux ---------------- */
911 #if defined(PLAT_mips32_linux)
913 typedef
914 struct {
915 unsigned int nraddr; /* where's the code? */
917 OrigFn;
919 /* .word 0x342
920 * .word 0x742
921 * .word 0xC2
922 * .word 0x4C2*/
923 #define __SPECIAL_INSTRUCTION_PREAMBLE \
924 "srl $0, $0, 13\n\t" \
925 "srl $0, $0, 29\n\t" \
926 "srl $0, $0, 3\n\t" \
927 "srl $0, $0, 19\n\t"
929 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
930 _zzq_default, _zzq_request, \
931 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
932 __extension__ \
933 ({ volatile unsigned int _zzq_args[6]; \
934 volatile unsigned int _zzq_result; \
935 _zzq_args[0] = (unsigned int)(_zzq_request); \
936 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
937 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
938 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
939 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
940 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
941 __asm__ volatile("move $11, %1\n\t" /*default*/ \
942 "move $12, %2\n\t" /*ptr*/ \
943 __SPECIAL_INSTRUCTION_PREAMBLE \
944 /* T3 = client_request ( T4 ) */ \
945 "or $13, $13, $13\n\t" \
946 "move %0, $11\n\t" /*result*/ \
947 : "=r" (_zzq_result) \
948 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
949 : "$11", "$12", "memory"); \
950 _zzq_result; \
953 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
954 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
955 volatile unsigned int __addr; \
956 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
957 /* %t9 = guest_NRADDR */ \
958 "or $14, $14, $14\n\t" \
959 "move %0, $11" /*result*/ \
960 : "=r" (__addr) \
962 : "$11" \
963 ); \
964 _zzq_orig->nraddr = __addr; \
967 #define VALGRIND_CALL_NOREDIR_T9 \
968 __SPECIAL_INSTRUCTION_PREAMBLE \
969 /* call-noredir *%t9 */ \
970 "or $15, $15, $15\n\t"
972 #define VALGRIND_VEX_INJECT_IR() \
973 do { \
974 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
975 "or $11, $11, $11\n\t" \
976 ); \
977 } while (0)
980 #endif /* PLAT_mips32_linux */
982 /* ------------------------- mips64-linux ---------------- */
984 #if defined(PLAT_mips64_linux)
986 typedef
987 struct {
988 unsigned long nraddr; /* where's the code? */
990 OrigFn;
992 /* dsll $0,$0, 3
993 * dsll $0,$0, 13
994 * dsll $0,$0, 29
995 * dsll $0,$0, 19*/
996 #define __SPECIAL_INSTRUCTION_PREAMBLE \
997 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
998 "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
1000 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1001 _zzq_default, _zzq_request, \
1002 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1003 __extension__ \
1004 ({ volatile unsigned long int _zzq_args[6]; \
1005 volatile unsigned long int _zzq_result; \
1006 _zzq_args[0] = (unsigned long int)(_zzq_request); \
1007 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
1008 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
1009 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
1010 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
1011 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
1012 __asm__ volatile("move $11, %1\n\t" /*default*/ \
1013 "move $12, %2\n\t" /*ptr*/ \
1014 __SPECIAL_INSTRUCTION_PREAMBLE \
1015 /* $11 = client_request ( $12 ) */ \
1016 "or $13, $13, $13\n\t" \
1017 "move %0, $11\n\t" /*result*/ \
1018 : "=r" (_zzq_result) \
1019 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1020 : "$11", "$12", "memory"); \
1021 _zzq_result; \
1024 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1025 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1026 volatile unsigned long int __addr; \
1027 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1028 /* $11 = guest_NRADDR */ \
1029 "or $14, $14, $14\n\t" \
1030 "move %0, $11" /*result*/ \
1031 : "=r" (__addr) \
1033 : "$11"); \
1034 _zzq_orig->nraddr = __addr; \
1037 #define VALGRIND_CALL_NOREDIR_T9 \
1038 __SPECIAL_INSTRUCTION_PREAMBLE \
1039 /* call-noredir $25 */ \
1040 "or $15, $15, $15\n\t"
1042 #define VALGRIND_VEX_INJECT_IR() \
1043 do { \
1044 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1045 "or $11, $11, $11\n\t" \
1046 ); \
1047 } while (0)
1049 #endif /* PLAT_mips64_linux */
1051 #if defined(PLAT_nanomips_linux)
1053 typedef
1054 struct {
1055 unsigned int nraddr; /* where's the code? */
1057 OrigFn;
1059 8000 c04d srl zero, zero, 13
1060 8000 c05d srl zero, zero, 29
1061 8000 c043 srl zero, zero, 3
1062 8000 c053 srl zero, zero, 19
1065 #define __SPECIAL_INSTRUCTION_PREAMBLE "srl[32] $zero, $zero, 13 \n\t" \
1066 "srl[32] $zero, $zero, 29 \n\t" \
1067 "srl[32] $zero, $zero, 3 \n\t" \
1068 "srl[32] $zero, $zero, 19 \n\t"
1070 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1071 _zzq_default, _zzq_request, \
1072 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1073 __extension__ \
1074 ({ volatile unsigned int _zzq_args[6]; \
1075 volatile unsigned int _zzq_result; \
1076 _zzq_args[0] = (unsigned int)(_zzq_request); \
1077 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
1078 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
1079 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
1080 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
1081 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
1082 __asm__ volatile("move $a7, %1\n\t" /* default */ \
1083 "move $t0, %2\n\t" /* ptr */ \
1084 __SPECIAL_INSTRUCTION_PREAMBLE \
1085 /* $a7 = client_request( $t0 ) */ \
1086 "or[32] $t0, $t0, $t0\n\t" \
1087 "move %0, $a7\n\t" /* result */ \
1088 : "=r" (_zzq_result) \
1089 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1090 : "$a7", "$t0", "memory"); \
1091 _zzq_result; \
1094 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1095 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1096 volatile unsigned long int __addr; \
1097 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1098 /* $a7 = guest_NRADDR */ \
1099 "or[32] $t1, $t1, $t1\n\t" \
1100 "move %0, $a7" /*result*/ \
1101 : "=r" (__addr) \
1103 : "$a7"); \
1104 _zzq_orig->nraddr = __addr; \
1107 #define VALGRIND_CALL_NOREDIR_T9 \
1108 __SPECIAL_INSTRUCTION_PREAMBLE \
1109 /* call-noredir $25 */ \
1110 "or[32] $t2, $t2, $t2\n\t"
1112 #define VALGRIND_VEX_INJECT_IR() \
1113 do { \
1114 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1115 "or[32] $t3, $t3, $t3\n\t" \
1116 ); \
1117 } while (0)
1119 #endif
1120 /* Insert assembly code for other platforms here... */
1122 #endif /* NVALGRIND */
1125 /* ------------------------------------------------------------------ */
1126 /* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
1127 /* ugly. It's the least-worst tradeoff I can think of. */
1128 /* ------------------------------------------------------------------ */
1130 /* This section defines magic (a.k.a appalling-hack) macros for doing
1131 guaranteed-no-redirection macros, so as to get from function
1132 wrappers to the functions they are wrapping. The whole point is to
1133 construct standard call sequences, but to do the call itself with a
1134 special no-redirect call pseudo-instruction that the JIT
1135 understands and handles specially. This section is long and
1136 repetitious, and I can't see a way to make it shorter.
1138 The naming scheme is as follows:
1140 CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
1142 'W' stands for "word" and 'v' for "void". Hence there are
1143 different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
1144 and for each, the possibility of returning a word-typed result, or
1145 no result.
1148 /* Use these to write the name of your wrapper. NOTE: duplicates
1149 VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
1150 the default behaviour equivalance class tag "0000" into the name.
1151 See pub_tool_redir.h for details -- normally you don't need to
1152 think about this, though. */
1154 /* Use an extra level of macroisation so as to ensure the soname/fnname
1155 args are fully macro-expanded before pasting them together. */
1156 #define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
1158 #define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
1159 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
1161 #define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
1162 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
1164 /* Use this macro from within a wrapper function to collect the
1165 context (address and possibly other info) of the original function.
1166 Once you have that you can then use it in one of the CALL_FN_
1167 macros. The type of the argument _lval is OrigFn. */
1168 #define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
1170 /* Also provide end-user facilities for function replacement, rather
1171 than wrapping. A replacement function differs from a wrapper in
1172 that it has no way to get hold of the original function being
1173 called, and hence no way to call onwards to it. In a replacement
1174 function, VALGRIND_GET_ORIG_FN always returns zero. */
1176 #define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1177 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1179 #define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1180 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1182 /* Derivatives of the main macros below, for calling functions
1183 returning void. */
1185 #define CALL_FN_v_v(fnptr) \
1186 do { volatile unsigned long _junk; \
1187 CALL_FN_W_v(_junk,fnptr); } while (0)
1189 #define CALL_FN_v_W(fnptr, arg1) \
1190 do { volatile unsigned long _junk; \
1191 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1193 #define CALL_FN_v_WW(fnptr, arg1,arg2) \
1194 do { volatile unsigned long _junk; \
1195 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1197 #define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1198 do { volatile unsigned long _junk; \
1199 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1201 #define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1202 do { volatile unsigned long _junk; \
1203 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1205 #define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1206 do { volatile unsigned long _junk; \
1207 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1209 #define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1210 do { volatile unsigned long _junk; \
1211 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1213 #define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1214 do { volatile unsigned long _junk; \
1215 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1217 /* ----------------- x86-{linux,darwin,solaris} ---------------- */
1219 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
1220 || defined(PLAT_x86_solaris)
1222 /* These regs are trashed by the hidden call. No need to mention eax
1223 as gcc can already see that, plus causes gcc to bomb. */
1224 #define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
1226 /* Macros to save and align the stack before making a function
1227 call and restore it afterwards as gcc may not keep the stack
1228 pointer aligned if it doesn't realise calls are being made
1229 to other functions. */
1231 #define VALGRIND_ALIGN_STACK \
1232 "movl %%esp,%%edi\n\t" \
1233 "andl $0xfffffff0,%%esp\n\t"
1234 #define VALGRIND_RESTORE_STACK \
1235 "movl %%edi,%%esp\n\t"
1237 /* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
1238 long) == 4. */
1240 #define CALL_FN_W_v(lval, orig) \
1241 do { \
1242 volatile OrigFn _orig = (orig); \
1243 volatile unsigned long _argvec[1]; \
1244 volatile unsigned long _res; \
1245 _argvec[0] = (unsigned long)_orig.nraddr; \
1246 __asm__ volatile( \
1247 VALGRIND_ALIGN_STACK \
1248 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1249 VALGRIND_CALL_NOREDIR_EAX \
1250 VALGRIND_RESTORE_STACK \
1251 : /*out*/ "=a" (_res) \
1252 : /*in*/ "a" (&_argvec[0]) \
1253 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1254 ); \
1255 lval = (__typeof__(lval)) _res; \
1256 } while (0)
1258 #define CALL_FN_W_W(lval, orig, arg1) \
1259 do { \
1260 volatile OrigFn _orig = (orig); \
1261 volatile unsigned long _argvec[2]; \
1262 volatile unsigned long _res; \
1263 _argvec[0] = (unsigned long)_orig.nraddr; \
1264 _argvec[1] = (unsigned long)(arg1); \
1265 __asm__ volatile( \
1266 VALGRIND_ALIGN_STACK \
1267 "subl $12, %%esp\n\t" \
1268 "pushl 4(%%eax)\n\t" \
1269 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1270 VALGRIND_CALL_NOREDIR_EAX \
1271 VALGRIND_RESTORE_STACK \
1272 : /*out*/ "=a" (_res) \
1273 : /*in*/ "a" (&_argvec[0]) \
1274 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1275 ); \
1276 lval = (__typeof__(lval)) _res; \
1277 } while (0)
1279 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1280 do { \
1281 volatile OrigFn _orig = (orig); \
1282 volatile unsigned long _argvec[3]; \
1283 volatile unsigned long _res; \
1284 _argvec[0] = (unsigned long)_orig.nraddr; \
1285 _argvec[1] = (unsigned long)(arg1); \
1286 _argvec[2] = (unsigned long)(arg2); \
1287 __asm__ volatile( \
1288 VALGRIND_ALIGN_STACK \
1289 "subl $8, %%esp\n\t" \
1290 "pushl 8(%%eax)\n\t" \
1291 "pushl 4(%%eax)\n\t" \
1292 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1293 VALGRIND_CALL_NOREDIR_EAX \
1294 VALGRIND_RESTORE_STACK \
1295 : /*out*/ "=a" (_res) \
1296 : /*in*/ "a" (&_argvec[0]) \
1297 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1298 ); \
1299 lval = (__typeof__(lval)) _res; \
1300 } while (0)
1302 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1303 do { \
1304 volatile OrigFn _orig = (orig); \
1305 volatile unsigned long _argvec[4]; \
1306 volatile unsigned long _res; \
1307 _argvec[0] = (unsigned long)_orig.nraddr; \
1308 _argvec[1] = (unsigned long)(arg1); \
1309 _argvec[2] = (unsigned long)(arg2); \
1310 _argvec[3] = (unsigned long)(arg3); \
1311 __asm__ volatile( \
1312 VALGRIND_ALIGN_STACK \
1313 "subl $4, %%esp\n\t" \
1314 "pushl 12(%%eax)\n\t" \
1315 "pushl 8(%%eax)\n\t" \
1316 "pushl 4(%%eax)\n\t" \
1317 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1318 VALGRIND_CALL_NOREDIR_EAX \
1319 VALGRIND_RESTORE_STACK \
1320 : /*out*/ "=a" (_res) \
1321 : /*in*/ "a" (&_argvec[0]) \
1322 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1323 ); \
1324 lval = (__typeof__(lval)) _res; \
1325 } while (0)
1327 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1328 do { \
1329 volatile OrigFn _orig = (orig); \
1330 volatile unsigned long _argvec[5]; \
1331 volatile unsigned long _res; \
1332 _argvec[0] = (unsigned long)_orig.nraddr; \
1333 _argvec[1] = (unsigned long)(arg1); \
1334 _argvec[2] = (unsigned long)(arg2); \
1335 _argvec[3] = (unsigned long)(arg3); \
1336 _argvec[4] = (unsigned long)(arg4); \
1337 __asm__ volatile( \
1338 VALGRIND_ALIGN_STACK \
1339 "pushl 16(%%eax)\n\t" \
1340 "pushl 12(%%eax)\n\t" \
1341 "pushl 8(%%eax)\n\t" \
1342 "pushl 4(%%eax)\n\t" \
1343 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1344 VALGRIND_CALL_NOREDIR_EAX \
1345 VALGRIND_RESTORE_STACK \
1346 : /*out*/ "=a" (_res) \
1347 : /*in*/ "a" (&_argvec[0]) \
1348 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1349 ); \
1350 lval = (__typeof__(lval)) _res; \
1351 } while (0)
1353 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1354 do { \
1355 volatile OrigFn _orig = (orig); \
1356 volatile unsigned long _argvec[6]; \
1357 volatile unsigned long _res; \
1358 _argvec[0] = (unsigned long)_orig.nraddr; \
1359 _argvec[1] = (unsigned long)(arg1); \
1360 _argvec[2] = (unsigned long)(arg2); \
1361 _argvec[3] = (unsigned long)(arg3); \
1362 _argvec[4] = (unsigned long)(arg4); \
1363 _argvec[5] = (unsigned long)(arg5); \
1364 __asm__ volatile( \
1365 VALGRIND_ALIGN_STACK \
1366 "subl $12, %%esp\n\t" \
1367 "pushl 20(%%eax)\n\t" \
1368 "pushl 16(%%eax)\n\t" \
1369 "pushl 12(%%eax)\n\t" \
1370 "pushl 8(%%eax)\n\t" \
1371 "pushl 4(%%eax)\n\t" \
1372 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1373 VALGRIND_CALL_NOREDIR_EAX \
1374 VALGRIND_RESTORE_STACK \
1375 : /*out*/ "=a" (_res) \
1376 : /*in*/ "a" (&_argvec[0]) \
1377 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1378 ); \
1379 lval = (__typeof__(lval)) _res; \
1380 } while (0)
1382 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1383 do { \
1384 volatile OrigFn _orig = (orig); \
1385 volatile unsigned long _argvec[7]; \
1386 volatile unsigned long _res; \
1387 _argvec[0] = (unsigned long)_orig.nraddr; \
1388 _argvec[1] = (unsigned long)(arg1); \
1389 _argvec[2] = (unsigned long)(arg2); \
1390 _argvec[3] = (unsigned long)(arg3); \
1391 _argvec[4] = (unsigned long)(arg4); \
1392 _argvec[5] = (unsigned long)(arg5); \
1393 _argvec[6] = (unsigned long)(arg6); \
1394 __asm__ volatile( \
1395 VALGRIND_ALIGN_STACK \
1396 "subl $8, %%esp\n\t" \
1397 "pushl 24(%%eax)\n\t" \
1398 "pushl 20(%%eax)\n\t" \
1399 "pushl 16(%%eax)\n\t" \
1400 "pushl 12(%%eax)\n\t" \
1401 "pushl 8(%%eax)\n\t" \
1402 "pushl 4(%%eax)\n\t" \
1403 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1404 VALGRIND_CALL_NOREDIR_EAX \
1405 VALGRIND_RESTORE_STACK \
1406 : /*out*/ "=a" (_res) \
1407 : /*in*/ "a" (&_argvec[0]) \
1408 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1409 ); \
1410 lval = (__typeof__(lval)) _res; \
1411 } while (0)
1413 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1414 arg7) \
1415 do { \
1416 volatile OrigFn _orig = (orig); \
1417 volatile unsigned long _argvec[8]; \
1418 volatile unsigned long _res; \
1419 _argvec[0] = (unsigned long)_orig.nraddr; \
1420 _argvec[1] = (unsigned long)(arg1); \
1421 _argvec[2] = (unsigned long)(arg2); \
1422 _argvec[3] = (unsigned long)(arg3); \
1423 _argvec[4] = (unsigned long)(arg4); \
1424 _argvec[5] = (unsigned long)(arg5); \
1425 _argvec[6] = (unsigned long)(arg6); \
1426 _argvec[7] = (unsigned long)(arg7); \
1427 __asm__ volatile( \
1428 VALGRIND_ALIGN_STACK \
1429 "subl $4, %%esp\n\t" \
1430 "pushl 28(%%eax)\n\t" \
1431 "pushl 24(%%eax)\n\t" \
1432 "pushl 20(%%eax)\n\t" \
1433 "pushl 16(%%eax)\n\t" \
1434 "pushl 12(%%eax)\n\t" \
1435 "pushl 8(%%eax)\n\t" \
1436 "pushl 4(%%eax)\n\t" \
1437 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1438 VALGRIND_CALL_NOREDIR_EAX \
1439 VALGRIND_RESTORE_STACK \
1440 : /*out*/ "=a" (_res) \
1441 : /*in*/ "a" (&_argvec[0]) \
1442 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1443 ); \
1444 lval = (__typeof__(lval)) _res; \
1445 } while (0)
1447 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1448 arg7,arg8) \
1449 do { \
1450 volatile OrigFn _orig = (orig); \
1451 volatile unsigned long _argvec[9]; \
1452 volatile unsigned long _res; \
1453 _argvec[0] = (unsigned long)_orig.nraddr; \
1454 _argvec[1] = (unsigned long)(arg1); \
1455 _argvec[2] = (unsigned long)(arg2); \
1456 _argvec[3] = (unsigned long)(arg3); \
1457 _argvec[4] = (unsigned long)(arg4); \
1458 _argvec[5] = (unsigned long)(arg5); \
1459 _argvec[6] = (unsigned long)(arg6); \
1460 _argvec[7] = (unsigned long)(arg7); \
1461 _argvec[8] = (unsigned long)(arg8); \
1462 __asm__ volatile( \
1463 VALGRIND_ALIGN_STACK \
1464 "pushl 32(%%eax)\n\t" \
1465 "pushl 28(%%eax)\n\t" \
1466 "pushl 24(%%eax)\n\t" \
1467 "pushl 20(%%eax)\n\t" \
1468 "pushl 16(%%eax)\n\t" \
1469 "pushl 12(%%eax)\n\t" \
1470 "pushl 8(%%eax)\n\t" \
1471 "pushl 4(%%eax)\n\t" \
1472 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1473 VALGRIND_CALL_NOREDIR_EAX \
1474 VALGRIND_RESTORE_STACK \
1475 : /*out*/ "=a" (_res) \
1476 : /*in*/ "a" (&_argvec[0]) \
1477 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1478 ); \
1479 lval = (__typeof__(lval)) _res; \
1480 } while (0)
1482 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1483 arg7,arg8,arg9) \
1484 do { \
1485 volatile OrigFn _orig = (orig); \
1486 volatile unsigned long _argvec[10]; \
1487 volatile unsigned long _res; \
1488 _argvec[0] = (unsigned long)_orig.nraddr; \
1489 _argvec[1] = (unsigned long)(arg1); \
1490 _argvec[2] = (unsigned long)(arg2); \
1491 _argvec[3] = (unsigned long)(arg3); \
1492 _argvec[4] = (unsigned long)(arg4); \
1493 _argvec[5] = (unsigned long)(arg5); \
1494 _argvec[6] = (unsigned long)(arg6); \
1495 _argvec[7] = (unsigned long)(arg7); \
1496 _argvec[8] = (unsigned long)(arg8); \
1497 _argvec[9] = (unsigned long)(arg9); \
1498 __asm__ volatile( \
1499 VALGRIND_ALIGN_STACK \
1500 "subl $12, %%esp\n\t" \
1501 "pushl 36(%%eax)\n\t" \
1502 "pushl 32(%%eax)\n\t" \
1503 "pushl 28(%%eax)\n\t" \
1504 "pushl 24(%%eax)\n\t" \
1505 "pushl 20(%%eax)\n\t" \
1506 "pushl 16(%%eax)\n\t" \
1507 "pushl 12(%%eax)\n\t" \
1508 "pushl 8(%%eax)\n\t" \
1509 "pushl 4(%%eax)\n\t" \
1510 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1511 VALGRIND_CALL_NOREDIR_EAX \
1512 VALGRIND_RESTORE_STACK \
1513 : /*out*/ "=a" (_res) \
1514 : /*in*/ "a" (&_argvec[0]) \
1515 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1516 ); \
1517 lval = (__typeof__(lval)) _res; \
1518 } while (0)
1520 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1521 arg7,arg8,arg9,arg10) \
1522 do { \
1523 volatile OrigFn _orig = (orig); \
1524 volatile unsigned long _argvec[11]; \
1525 volatile unsigned long _res; \
1526 _argvec[0] = (unsigned long)_orig.nraddr; \
1527 _argvec[1] = (unsigned long)(arg1); \
1528 _argvec[2] = (unsigned long)(arg2); \
1529 _argvec[3] = (unsigned long)(arg3); \
1530 _argvec[4] = (unsigned long)(arg4); \
1531 _argvec[5] = (unsigned long)(arg5); \
1532 _argvec[6] = (unsigned long)(arg6); \
1533 _argvec[7] = (unsigned long)(arg7); \
1534 _argvec[8] = (unsigned long)(arg8); \
1535 _argvec[9] = (unsigned long)(arg9); \
1536 _argvec[10] = (unsigned long)(arg10); \
1537 __asm__ volatile( \
1538 VALGRIND_ALIGN_STACK \
1539 "subl $8, %%esp\n\t" \
1540 "pushl 40(%%eax)\n\t" \
1541 "pushl 36(%%eax)\n\t" \
1542 "pushl 32(%%eax)\n\t" \
1543 "pushl 28(%%eax)\n\t" \
1544 "pushl 24(%%eax)\n\t" \
1545 "pushl 20(%%eax)\n\t" \
1546 "pushl 16(%%eax)\n\t" \
1547 "pushl 12(%%eax)\n\t" \
1548 "pushl 8(%%eax)\n\t" \
1549 "pushl 4(%%eax)\n\t" \
1550 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1551 VALGRIND_CALL_NOREDIR_EAX \
1552 VALGRIND_RESTORE_STACK \
1553 : /*out*/ "=a" (_res) \
1554 : /*in*/ "a" (&_argvec[0]) \
1555 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1556 ); \
1557 lval = (__typeof__(lval)) _res; \
1558 } while (0)
1560 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1561 arg6,arg7,arg8,arg9,arg10, \
1562 arg11) \
1563 do { \
1564 volatile OrigFn _orig = (orig); \
1565 volatile unsigned long _argvec[12]; \
1566 volatile unsigned long _res; \
1567 _argvec[0] = (unsigned long)_orig.nraddr; \
1568 _argvec[1] = (unsigned long)(arg1); \
1569 _argvec[2] = (unsigned long)(arg2); \
1570 _argvec[3] = (unsigned long)(arg3); \
1571 _argvec[4] = (unsigned long)(arg4); \
1572 _argvec[5] = (unsigned long)(arg5); \
1573 _argvec[6] = (unsigned long)(arg6); \
1574 _argvec[7] = (unsigned long)(arg7); \
1575 _argvec[8] = (unsigned long)(arg8); \
1576 _argvec[9] = (unsigned long)(arg9); \
1577 _argvec[10] = (unsigned long)(arg10); \
1578 _argvec[11] = (unsigned long)(arg11); \
1579 __asm__ volatile( \
1580 VALGRIND_ALIGN_STACK \
1581 "subl $4, %%esp\n\t" \
1582 "pushl 44(%%eax)\n\t" \
1583 "pushl 40(%%eax)\n\t" \
1584 "pushl 36(%%eax)\n\t" \
1585 "pushl 32(%%eax)\n\t" \
1586 "pushl 28(%%eax)\n\t" \
1587 "pushl 24(%%eax)\n\t" \
1588 "pushl 20(%%eax)\n\t" \
1589 "pushl 16(%%eax)\n\t" \
1590 "pushl 12(%%eax)\n\t" \
1591 "pushl 8(%%eax)\n\t" \
1592 "pushl 4(%%eax)\n\t" \
1593 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1594 VALGRIND_CALL_NOREDIR_EAX \
1595 VALGRIND_RESTORE_STACK \
1596 : /*out*/ "=a" (_res) \
1597 : /*in*/ "a" (&_argvec[0]) \
1598 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1599 ); \
1600 lval = (__typeof__(lval)) _res; \
1601 } while (0)
1603 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1604 arg6,arg7,arg8,arg9,arg10, \
1605 arg11,arg12) \
1606 do { \
1607 volatile OrigFn _orig = (orig); \
1608 volatile unsigned long _argvec[13]; \
1609 volatile unsigned long _res; \
1610 _argvec[0] = (unsigned long)_orig.nraddr; \
1611 _argvec[1] = (unsigned long)(arg1); \
1612 _argvec[2] = (unsigned long)(arg2); \
1613 _argvec[3] = (unsigned long)(arg3); \
1614 _argvec[4] = (unsigned long)(arg4); \
1615 _argvec[5] = (unsigned long)(arg5); \
1616 _argvec[6] = (unsigned long)(arg6); \
1617 _argvec[7] = (unsigned long)(arg7); \
1618 _argvec[8] = (unsigned long)(arg8); \
1619 _argvec[9] = (unsigned long)(arg9); \
1620 _argvec[10] = (unsigned long)(arg10); \
1621 _argvec[11] = (unsigned long)(arg11); \
1622 _argvec[12] = (unsigned long)(arg12); \
1623 __asm__ volatile( \
1624 VALGRIND_ALIGN_STACK \
1625 "pushl 48(%%eax)\n\t" \
1626 "pushl 44(%%eax)\n\t" \
1627 "pushl 40(%%eax)\n\t" \
1628 "pushl 36(%%eax)\n\t" \
1629 "pushl 32(%%eax)\n\t" \
1630 "pushl 28(%%eax)\n\t" \
1631 "pushl 24(%%eax)\n\t" \
1632 "pushl 20(%%eax)\n\t" \
1633 "pushl 16(%%eax)\n\t" \
1634 "pushl 12(%%eax)\n\t" \
1635 "pushl 8(%%eax)\n\t" \
1636 "pushl 4(%%eax)\n\t" \
1637 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1638 VALGRIND_CALL_NOREDIR_EAX \
1639 VALGRIND_RESTORE_STACK \
1640 : /*out*/ "=a" (_res) \
1641 : /*in*/ "a" (&_argvec[0]) \
1642 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1643 ); \
1644 lval = (__typeof__(lval)) _res; \
1645 } while (0)
1647 #endif /* PLAT_x86_linux || PLAT_x86_darwin || PLAT_x86_solaris */
1649 /* ---------------- amd64-{linux,darwin,solaris} --------------- */
1651 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
1652 || defined(PLAT_amd64_solaris)
1654 /* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1656 /* These regs are trashed by the hidden call. */
1657 #define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1658 "rdi", "r8", "r9", "r10", "r11"
1660 /* This is all pretty complex. It's so as to make stack unwinding
1661 work reliably. See bug 243270. The basic problem is the sub and
1662 add of 128 of %rsp in all of the following macros. If gcc believes
1663 the CFA is in %rsp, then unwinding may fail, because what's at the
1664 CFA is not what gcc "expected" when it constructs the CFIs for the
1665 places where the macros are instantiated.
1667 But we can't just add a CFI annotation to increase the CFA offset
1668 by 128, to match the sub of 128 from %rsp, because we don't know
1669 whether gcc has chosen %rsp as the CFA at that point, or whether it
1670 has chosen some other register (eg, %rbp). In the latter case,
1671 adding a CFI annotation to change the CFA offset is simply wrong.
1673 So the solution is to get hold of the CFA using
1674 __builtin_dwarf_cfa(), put it in a known register, and add a
1675 CFI annotation to say what the register is. We choose %rbp for
1676 this (perhaps perversely), because:
1678 (1) %rbp is already subject to unwinding. If a new register was
1679 chosen then the unwinder would have to unwind it in all stack
1680 traces, which is expensive, and
1682 (2) %rbp is already subject to precise exception updates in the
1683 JIT. If a new register was chosen, we'd have to have precise
1684 exceptions for it too, which reduces performance of the
1685 generated code.
1687 However .. one extra complication. We can't just whack the result
1688 of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1689 list of trashed registers at the end of the inline assembly
1690 fragments; gcc won't allow %rbp to appear in that list. Hence
1691 instead we need to stash %rbp in %r15 for the duration of the asm,
1692 and say that %r15 is trashed instead. gcc seems happy to go with
1693 that.
1695 Oh .. and this all needs to be conditionalised so that it is
1696 unchanged from before this commit, when compiled with older gccs
1697 that don't support __builtin_dwarf_cfa. Furthermore, since
1698 this header file is freestanding, it has to be independent of
1699 config.h, and so the following conditionalisation cannot depend on
1700 configure time checks.
1702 Although it's not clear from
1703 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1704 this expression excludes Darwin.
1705 .cfi directives in Darwin assembly appear to be completely
1706 different and I haven't investigated how they work.
1708 For even more entertainment value, note we have to use the
1709 completely undocumented __builtin_dwarf_cfa(), which appears to
1710 really compute the CFA, whereas __builtin_frame_address(0) claims
1711 to but actually doesn't. See
1712 https://bugs.kde.org/show_bug.cgi?id=243270#c47
1714 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1715 # define __FRAME_POINTER \
1716 ,"r"(__builtin_dwarf_cfa())
1717 # define VALGRIND_CFI_PROLOGUE \
1718 "movq %%rbp, %%r15\n\t" \
1719 "movq %2, %%rbp\n\t" \
1720 ".cfi_remember_state\n\t" \
1721 ".cfi_def_cfa rbp, 0\n\t"
1722 # define VALGRIND_CFI_EPILOGUE \
1723 "movq %%r15, %%rbp\n\t" \
1724 ".cfi_restore_state\n\t"
1725 #else
1726 # define __FRAME_POINTER
1727 # define VALGRIND_CFI_PROLOGUE
1728 # define VALGRIND_CFI_EPILOGUE
1729 #endif
1731 /* Macros to save and align the stack before making a function
1732 call and restore it afterwards as gcc may not keep the stack
1733 pointer aligned if it doesn't realise calls are being made
1734 to other functions. */
1736 #define VALGRIND_ALIGN_STACK \
1737 "movq %%rsp,%%r14\n\t" \
1738 "andq $0xfffffffffffffff0,%%rsp\n\t"
1739 #define VALGRIND_RESTORE_STACK \
1740 "movq %%r14,%%rsp\n\t"
1742 /* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1743 long) == 8. */
1745 /* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1746 macros. In order not to trash the stack redzone, we need to drop
1747 %rsp by 128 before the hidden call, and restore afterwards. The
1748 nastyness is that it is only by luck that the stack still appears
1749 to be unwindable during the hidden call - since then the behaviour
1750 of any routine using this macro does not match what the CFI data
1751 says. Sigh.
1753 Why is this important? Imagine that a wrapper has a stack
1754 allocated local, and passes to the hidden call, a pointer to it.
1755 Because gcc does not know about the hidden call, it may allocate
1756 that local in the redzone. Unfortunately the hidden call may then
1757 trash it before it comes to use it. So we must step clear of the
1758 redzone, for the duration of the hidden call, to make it safe.
1760 Probably the same problem afflicts the other redzone-style ABIs too
1761 (ppc64-linux); but for those, the stack is
1762 self describing (none of this CFI nonsense) so at least messing
1763 with the stack pointer doesn't give a danger of non-unwindable
1764 stack. */
1766 #define CALL_FN_W_v(lval, orig) \
1767 do { \
1768 volatile OrigFn _orig = (orig); \
1769 volatile unsigned long _argvec[1]; \
1770 volatile unsigned long _res; \
1771 _argvec[0] = (unsigned long)_orig.nraddr; \
1772 __asm__ volatile( \
1773 VALGRIND_CFI_PROLOGUE \
1774 VALGRIND_ALIGN_STACK \
1775 "subq $128,%%rsp\n\t" \
1776 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1777 VALGRIND_CALL_NOREDIR_RAX \
1778 VALGRIND_RESTORE_STACK \
1779 VALGRIND_CFI_EPILOGUE \
1780 : /*out*/ "=a" (_res) \
1781 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1782 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1783 ); \
1784 lval = (__typeof__(lval)) _res; \
1785 } while (0)
1787 #define CALL_FN_W_W(lval, orig, arg1) \
1788 do { \
1789 volatile OrigFn _orig = (orig); \
1790 volatile unsigned long _argvec[2]; \
1791 volatile unsigned long _res; \
1792 _argvec[0] = (unsigned long)_orig.nraddr; \
1793 _argvec[1] = (unsigned long)(arg1); \
1794 __asm__ volatile( \
1795 VALGRIND_CFI_PROLOGUE \
1796 VALGRIND_ALIGN_STACK \
1797 "subq $128,%%rsp\n\t" \
1798 "movq 8(%%rax), %%rdi\n\t" \
1799 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1800 VALGRIND_CALL_NOREDIR_RAX \
1801 VALGRIND_RESTORE_STACK \
1802 VALGRIND_CFI_EPILOGUE \
1803 : /*out*/ "=a" (_res) \
1804 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1805 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1806 ); \
1807 lval = (__typeof__(lval)) _res; \
1808 } while (0)
1810 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1811 do { \
1812 volatile OrigFn _orig = (orig); \
1813 volatile unsigned long _argvec[3]; \
1814 volatile unsigned long _res; \
1815 _argvec[0] = (unsigned long)_orig.nraddr; \
1816 _argvec[1] = (unsigned long)(arg1); \
1817 _argvec[2] = (unsigned long)(arg2); \
1818 __asm__ volatile( \
1819 VALGRIND_CFI_PROLOGUE \
1820 VALGRIND_ALIGN_STACK \
1821 "subq $128,%%rsp\n\t" \
1822 "movq 16(%%rax), %%rsi\n\t" \
1823 "movq 8(%%rax), %%rdi\n\t" \
1824 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1825 VALGRIND_CALL_NOREDIR_RAX \
1826 VALGRIND_RESTORE_STACK \
1827 VALGRIND_CFI_EPILOGUE \
1828 : /*out*/ "=a" (_res) \
1829 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1830 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1831 ); \
1832 lval = (__typeof__(lval)) _res; \
1833 } while (0)
1835 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1836 do { \
1837 volatile OrigFn _orig = (orig); \
1838 volatile unsigned long _argvec[4]; \
1839 volatile unsigned long _res; \
1840 _argvec[0] = (unsigned long)_orig.nraddr; \
1841 _argvec[1] = (unsigned long)(arg1); \
1842 _argvec[2] = (unsigned long)(arg2); \
1843 _argvec[3] = (unsigned long)(arg3); \
1844 __asm__ volatile( \
1845 VALGRIND_CFI_PROLOGUE \
1846 VALGRIND_ALIGN_STACK \
1847 "subq $128,%%rsp\n\t" \
1848 "movq 24(%%rax), %%rdx\n\t" \
1849 "movq 16(%%rax), %%rsi\n\t" \
1850 "movq 8(%%rax), %%rdi\n\t" \
1851 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1852 VALGRIND_CALL_NOREDIR_RAX \
1853 VALGRIND_RESTORE_STACK \
1854 VALGRIND_CFI_EPILOGUE \
1855 : /*out*/ "=a" (_res) \
1856 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1857 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1858 ); \
1859 lval = (__typeof__(lval)) _res; \
1860 } while (0)
1862 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1863 do { \
1864 volatile OrigFn _orig = (orig); \
1865 volatile unsigned long _argvec[5]; \
1866 volatile unsigned long _res; \
1867 _argvec[0] = (unsigned long)_orig.nraddr; \
1868 _argvec[1] = (unsigned long)(arg1); \
1869 _argvec[2] = (unsigned long)(arg2); \
1870 _argvec[3] = (unsigned long)(arg3); \
1871 _argvec[4] = (unsigned long)(arg4); \
1872 __asm__ volatile( \
1873 VALGRIND_CFI_PROLOGUE \
1874 VALGRIND_ALIGN_STACK \
1875 "subq $128,%%rsp\n\t" \
1876 "movq 32(%%rax), %%rcx\n\t" \
1877 "movq 24(%%rax), %%rdx\n\t" \
1878 "movq 16(%%rax), %%rsi\n\t" \
1879 "movq 8(%%rax), %%rdi\n\t" \
1880 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1881 VALGRIND_CALL_NOREDIR_RAX \
1882 VALGRIND_RESTORE_STACK \
1883 VALGRIND_CFI_EPILOGUE \
1884 : /*out*/ "=a" (_res) \
1885 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1886 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1887 ); \
1888 lval = (__typeof__(lval)) _res; \
1889 } while (0)
1891 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1892 do { \
1893 volatile OrigFn _orig = (orig); \
1894 volatile unsigned long _argvec[6]; \
1895 volatile unsigned long _res; \
1896 _argvec[0] = (unsigned long)_orig.nraddr; \
1897 _argvec[1] = (unsigned long)(arg1); \
1898 _argvec[2] = (unsigned long)(arg2); \
1899 _argvec[3] = (unsigned long)(arg3); \
1900 _argvec[4] = (unsigned long)(arg4); \
1901 _argvec[5] = (unsigned long)(arg5); \
1902 __asm__ volatile( \
1903 VALGRIND_CFI_PROLOGUE \
1904 VALGRIND_ALIGN_STACK \
1905 "subq $128,%%rsp\n\t" \
1906 "movq 40(%%rax), %%r8\n\t" \
1907 "movq 32(%%rax), %%rcx\n\t" \
1908 "movq 24(%%rax), %%rdx\n\t" \
1909 "movq 16(%%rax), %%rsi\n\t" \
1910 "movq 8(%%rax), %%rdi\n\t" \
1911 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1912 VALGRIND_CALL_NOREDIR_RAX \
1913 VALGRIND_RESTORE_STACK \
1914 VALGRIND_CFI_EPILOGUE \
1915 : /*out*/ "=a" (_res) \
1916 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1917 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1918 ); \
1919 lval = (__typeof__(lval)) _res; \
1920 } while (0)
1922 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1923 do { \
1924 volatile OrigFn _orig = (orig); \
1925 volatile unsigned long _argvec[7]; \
1926 volatile unsigned long _res; \
1927 _argvec[0] = (unsigned long)_orig.nraddr; \
1928 _argvec[1] = (unsigned long)(arg1); \
1929 _argvec[2] = (unsigned long)(arg2); \
1930 _argvec[3] = (unsigned long)(arg3); \
1931 _argvec[4] = (unsigned long)(arg4); \
1932 _argvec[5] = (unsigned long)(arg5); \
1933 _argvec[6] = (unsigned long)(arg6); \
1934 __asm__ volatile( \
1935 VALGRIND_CFI_PROLOGUE \
1936 VALGRIND_ALIGN_STACK \
1937 "subq $128,%%rsp\n\t" \
1938 "movq 48(%%rax), %%r9\n\t" \
1939 "movq 40(%%rax), %%r8\n\t" \
1940 "movq 32(%%rax), %%rcx\n\t" \
1941 "movq 24(%%rax), %%rdx\n\t" \
1942 "movq 16(%%rax), %%rsi\n\t" \
1943 "movq 8(%%rax), %%rdi\n\t" \
1944 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1945 VALGRIND_CALL_NOREDIR_RAX \
1946 VALGRIND_RESTORE_STACK \
1947 VALGRIND_CFI_EPILOGUE \
1948 : /*out*/ "=a" (_res) \
1949 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1950 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1951 ); \
1952 lval = (__typeof__(lval)) _res; \
1953 } while (0)
1955 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1956 arg7) \
1957 do { \
1958 volatile OrigFn _orig = (orig); \
1959 volatile unsigned long _argvec[8]; \
1960 volatile unsigned long _res; \
1961 _argvec[0] = (unsigned long)_orig.nraddr; \
1962 _argvec[1] = (unsigned long)(arg1); \
1963 _argvec[2] = (unsigned long)(arg2); \
1964 _argvec[3] = (unsigned long)(arg3); \
1965 _argvec[4] = (unsigned long)(arg4); \
1966 _argvec[5] = (unsigned long)(arg5); \
1967 _argvec[6] = (unsigned long)(arg6); \
1968 _argvec[7] = (unsigned long)(arg7); \
1969 __asm__ volatile( \
1970 VALGRIND_CFI_PROLOGUE \
1971 VALGRIND_ALIGN_STACK \
1972 "subq $136,%%rsp\n\t" \
1973 "pushq 56(%%rax)\n\t" \
1974 "movq 48(%%rax), %%r9\n\t" \
1975 "movq 40(%%rax), %%r8\n\t" \
1976 "movq 32(%%rax), %%rcx\n\t" \
1977 "movq 24(%%rax), %%rdx\n\t" \
1978 "movq 16(%%rax), %%rsi\n\t" \
1979 "movq 8(%%rax), %%rdi\n\t" \
1980 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1981 VALGRIND_CALL_NOREDIR_RAX \
1982 VALGRIND_RESTORE_STACK \
1983 VALGRIND_CFI_EPILOGUE \
1984 : /*out*/ "=a" (_res) \
1985 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1986 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1987 ); \
1988 lval = (__typeof__(lval)) _res; \
1989 } while (0)
1991 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1992 arg7,arg8) \
1993 do { \
1994 volatile OrigFn _orig = (orig); \
1995 volatile unsigned long _argvec[9]; \
1996 volatile unsigned long _res; \
1997 _argvec[0] = (unsigned long)_orig.nraddr; \
1998 _argvec[1] = (unsigned long)(arg1); \
1999 _argvec[2] = (unsigned long)(arg2); \
2000 _argvec[3] = (unsigned long)(arg3); \
2001 _argvec[4] = (unsigned long)(arg4); \
2002 _argvec[5] = (unsigned long)(arg5); \
2003 _argvec[6] = (unsigned long)(arg6); \
2004 _argvec[7] = (unsigned long)(arg7); \
2005 _argvec[8] = (unsigned long)(arg8); \
2006 __asm__ volatile( \
2007 VALGRIND_CFI_PROLOGUE \
2008 VALGRIND_ALIGN_STACK \
2009 "subq $128,%%rsp\n\t" \
2010 "pushq 64(%%rax)\n\t" \
2011 "pushq 56(%%rax)\n\t" \
2012 "movq 48(%%rax), %%r9\n\t" \
2013 "movq 40(%%rax), %%r8\n\t" \
2014 "movq 32(%%rax), %%rcx\n\t" \
2015 "movq 24(%%rax), %%rdx\n\t" \
2016 "movq 16(%%rax), %%rsi\n\t" \
2017 "movq 8(%%rax), %%rdi\n\t" \
2018 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2019 VALGRIND_CALL_NOREDIR_RAX \
2020 VALGRIND_RESTORE_STACK \
2021 VALGRIND_CFI_EPILOGUE \
2022 : /*out*/ "=a" (_res) \
2023 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2024 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2025 ); \
2026 lval = (__typeof__(lval)) _res; \
2027 } while (0)
2029 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2030 arg7,arg8,arg9) \
2031 do { \
2032 volatile OrigFn _orig = (orig); \
2033 volatile unsigned long _argvec[10]; \
2034 volatile unsigned long _res; \
2035 _argvec[0] = (unsigned long)_orig.nraddr; \
2036 _argvec[1] = (unsigned long)(arg1); \
2037 _argvec[2] = (unsigned long)(arg2); \
2038 _argvec[3] = (unsigned long)(arg3); \
2039 _argvec[4] = (unsigned long)(arg4); \
2040 _argvec[5] = (unsigned long)(arg5); \
2041 _argvec[6] = (unsigned long)(arg6); \
2042 _argvec[7] = (unsigned long)(arg7); \
2043 _argvec[8] = (unsigned long)(arg8); \
2044 _argvec[9] = (unsigned long)(arg9); \
2045 __asm__ volatile( \
2046 VALGRIND_CFI_PROLOGUE \
2047 VALGRIND_ALIGN_STACK \
2048 "subq $136,%%rsp\n\t" \
2049 "pushq 72(%%rax)\n\t" \
2050 "pushq 64(%%rax)\n\t" \
2051 "pushq 56(%%rax)\n\t" \
2052 "movq 48(%%rax), %%r9\n\t" \
2053 "movq 40(%%rax), %%r8\n\t" \
2054 "movq 32(%%rax), %%rcx\n\t" \
2055 "movq 24(%%rax), %%rdx\n\t" \
2056 "movq 16(%%rax), %%rsi\n\t" \
2057 "movq 8(%%rax), %%rdi\n\t" \
2058 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2059 VALGRIND_CALL_NOREDIR_RAX \
2060 VALGRIND_RESTORE_STACK \
2061 VALGRIND_CFI_EPILOGUE \
2062 : /*out*/ "=a" (_res) \
2063 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2064 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2065 ); \
2066 lval = (__typeof__(lval)) _res; \
2067 } while (0)
2069 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2070 arg7,arg8,arg9,arg10) \
2071 do { \
2072 volatile OrigFn _orig = (orig); \
2073 volatile unsigned long _argvec[11]; \
2074 volatile unsigned long _res; \
2075 _argvec[0] = (unsigned long)_orig.nraddr; \
2076 _argvec[1] = (unsigned long)(arg1); \
2077 _argvec[2] = (unsigned long)(arg2); \
2078 _argvec[3] = (unsigned long)(arg3); \
2079 _argvec[4] = (unsigned long)(arg4); \
2080 _argvec[5] = (unsigned long)(arg5); \
2081 _argvec[6] = (unsigned long)(arg6); \
2082 _argvec[7] = (unsigned long)(arg7); \
2083 _argvec[8] = (unsigned long)(arg8); \
2084 _argvec[9] = (unsigned long)(arg9); \
2085 _argvec[10] = (unsigned long)(arg10); \
2086 __asm__ volatile( \
2087 VALGRIND_CFI_PROLOGUE \
2088 VALGRIND_ALIGN_STACK \
2089 "subq $128,%%rsp\n\t" \
2090 "pushq 80(%%rax)\n\t" \
2091 "pushq 72(%%rax)\n\t" \
2092 "pushq 64(%%rax)\n\t" \
2093 "pushq 56(%%rax)\n\t" \
2094 "movq 48(%%rax), %%r9\n\t" \
2095 "movq 40(%%rax), %%r8\n\t" \
2096 "movq 32(%%rax), %%rcx\n\t" \
2097 "movq 24(%%rax), %%rdx\n\t" \
2098 "movq 16(%%rax), %%rsi\n\t" \
2099 "movq 8(%%rax), %%rdi\n\t" \
2100 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2101 VALGRIND_CALL_NOREDIR_RAX \
2102 VALGRIND_RESTORE_STACK \
2103 VALGRIND_CFI_EPILOGUE \
2104 : /*out*/ "=a" (_res) \
2105 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2106 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2107 ); \
2108 lval = (__typeof__(lval)) _res; \
2109 } while (0)
2111 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2112 arg7,arg8,arg9,arg10,arg11) \
2113 do { \
2114 volatile OrigFn _orig = (orig); \
2115 volatile unsigned long _argvec[12]; \
2116 volatile unsigned long _res; \
2117 _argvec[0] = (unsigned long)_orig.nraddr; \
2118 _argvec[1] = (unsigned long)(arg1); \
2119 _argvec[2] = (unsigned long)(arg2); \
2120 _argvec[3] = (unsigned long)(arg3); \
2121 _argvec[4] = (unsigned long)(arg4); \
2122 _argvec[5] = (unsigned long)(arg5); \
2123 _argvec[6] = (unsigned long)(arg6); \
2124 _argvec[7] = (unsigned long)(arg7); \
2125 _argvec[8] = (unsigned long)(arg8); \
2126 _argvec[9] = (unsigned long)(arg9); \
2127 _argvec[10] = (unsigned long)(arg10); \
2128 _argvec[11] = (unsigned long)(arg11); \
2129 __asm__ volatile( \
2130 VALGRIND_CFI_PROLOGUE \
2131 VALGRIND_ALIGN_STACK \
2132 "subq $136,%%rsp\n\t" \
2133 "pushq 88(%%rax)\n\t" \
2134 "pushq 80(%%rax)\n\t" \
2135 "pushq 72(%%rax)\n\t" \
2136 "pushq 64(%%rax)\n\t" \
2137 "pushq 56(%%rax)\n\t" \
2138 "movq 48(%%rax), %%r9\n\t" \
2139 "movq 40(%%rax), %%r8\n\t" \
2140 "movq 32(%%rax), %%rcx\n\t" \
2141 "movq 24(%%rax), %%rdx\n\t" \
2142 "movq 16(%%rax), %%rsi\n\t" \
2143 "movq 8(%%rax), %%rdi\n\t" \
2144 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2145 VALGRIND_CALL_NOREDIR_RAX \
2146 VALGRIND_RESTORE_STACK \
2147 VALGRIND_CFI_EPILOGUE \
2148 : /*out*/ "=a" (_res) \
2149 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2150 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2151 ); \
2152 lval = (__typeof__(lval)) _res; \
2153 } while (0)
2155 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2156 arg7,arg8,arg9,arg10,arg11,arg12) \
2157 do { \
2158 volatile OrigFn _orig = (orig); \
2159 volatile unsigned long _argvec[13]; \
2160 volatile unsigned long _res; \
2161 _argvec[0] = (unsigned long)_orig.nraddr; \
2162 _argvec[1] = (unsigned long)(arg1); \
2163 _argvec[2] = (unsigned long)(arg2); \
2164 _argvec[3] = (unsigned long)(arg3); \
2165 _argvec[4] = (unsigned long)(arg4); \
2166 _argvec[5] = (unsigned long)(arg5); \
2167 _argvec[6] = (unsigned long)(arg6); \
2168 _argvec[7] = (unsigned long)(arg7); \
2169 _argvec[8] = (unsigned long)(arg8); \
2170 _argvec[9] = (unsigned long)(arg9); \
2171 _argvec[10] = (unsigned long)(arg10); \
2172 _argvec[11] = (unsigned long)(arg11); \
2173 _argvec[12] = (unsigned long)(arg12); \
2174 __asm__ volatile( \
2175 VALGRIND_CFI_PROLOGUE \
2176 VALGRIND_ALIGN_STACK \
2177 "subq $128,%%rsp\n\t" \
2178 "pushq 96(%%rax)\n\t" \
2179 "pushq 88(%%rax)\n\t" \
2180 "pushq 80(%%rax)\n\t" \
2181 "pushq 72(%%rax)\n\t" \
2182 "pushq 64(%%rax)\n\t" \
2183 "pushq 56(%%rax)\n\t" \
2184 "movq 48(%%rax), %%r9\n\t" \
2185 "movq 40(%%rax), %%r8\n\t" \
2186 "movq 32(%%rax), %%rcx\n\t" \
2187 "movq 24(%%rax), %%rdx\n\t" \
2188 "movq 16(%%rax), %%rsi\n\t" \
2189 "movq 8(%%rax), %%rdi\n\t" \
2190 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2191 VALGRIND_CALL_NOREDIR_RAX \
2192 VALGRIND_RESTORE_STACK \
2193 VALGRIND_CFI_EPILOGUE \
2194 : /*out*/ "=a" (_res) \
2195 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2196 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2197 ); \
2198 lval = (__typeof__(lval)) _res; \
2199 } while (0)
2201 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
2203 /* ------------------------ ppc32-linux ------------------------ */
2205 #if defined(PLAT_ppc32_linux)
2207 /* This is useful for finding out about the on-stack stuff:
2209 extern int f9 ( int,int,int,int,int,int,int,int,int );
2210 extern int f10 ( int,int,int,int,int,int,int,int,int,int );
2211 extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
2212 extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
2214 int g9 ( void ) {
2215 return f9(11,22,33,44,55,66,77,88,99);
2217 int g10 ( void ) {
2218 return f10(11,22,33,44,55,66,77,88,99,110);
2220 int g11 ( void ) {
2221 return f11(11,22,33,44,55,66,77,88,99,110,121);
2223 int g12 ( void ) {
2224 return f12(11,22,33,44,55,66,77,88,99,110,121,132);
2228 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2230 /* These regs are trashed by the hidden call. */
2231 #define __CALLER_SAVED_REGS \
2232 "lr", "ctr", "xer", \
2233 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2234 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2235 "r11", "r12", "r13"
2237 /* Macros to save and align the stack before making a function
2238 call and restore it afterwards as gcc may not keep the stack
2239 pointer aligned if it doesn't realise calls are being made
2240 to other functions. */
2242 #define VALGRIND_ALIGN_STACK \
2243 "mr 28,1\n\t" \
2244 "rlwinm 1,1,0,0,27\n\t"
2245 #define VALGRIND_RESTORE_STACK \
2246 "mr 1,28\n\t"
2248 /* These CALL_FN_ macros assume that on ppc32-linux,
2249 sizeof(unsigned long) == 4. */
2251 #define CALL_FN_W_v(lval, orig) \
2252 do { \
2253 volatile OrigFn _orig = (orig); \
2254 volatile unsigned long _argvec[1]; \
2255 volatile unsigned long _res; \
2256 _argvec[0] = (unsigned long)_orig.nraddr; \
2257 __asm__ volatile( \
2258 VALGRIND_ALIGN_STACK \
2259 "mr 11,%1\n\t" \
2260 "lwz 11,0(11)\n\t" /* target->r11 */ \
2261 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2262 VALGRIND_RESTORE_STACK \
2263 "mr %0,3" \
2264 : /*out*/ "=r" (_res) \
2265 : /*in*/ "r" (&_argvec[0]) \
2266 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2267 ); \
2268 lval = (__typeof__(lval)) _res; \
2269 } while (0)
2271 #define CALL_FN_W_W(lval, orig, arg1) \
2272 do { \
2273 volatile OrigFn _orig = (orig); \
2274 volatile unsigned long _argvec[2]; \
2275 volatile unsigned long _res; \
2276 _argvec[0] = (unsigned long)_orig.nraddr; \
2277 _argvec[1] = (unsigned long)arg1; \
2278 __asm__ volatile( \
2279 VALGRIND_ALIGN_STACK \
2280 "mr 11,%1\n\t" \
2281 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2282 "lwz 11,0(11)\n\t" /* target->r11 */ \
2283 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2284 VALGRIND_RESTORE_STACK \
2285 "mr %0,3" \
2286 : /*out*/ "=r" (_res) \
2287 : /*in*/ "r" (&_argvec[0]) \
2288 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2289 ); \
2290 lval = (__typeof__(lval)) _res; \
2291 } while (0)
2293 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2294 do { \
2295 volatile OrigFn _orig = (orig); \
2296 volatile unsigned long _argvec[3]; \
2297 volatile unsigned long _res; \
2298 _argvec[0] = (unsigned long)_orig.nraddr; \
2299 _argvec[1] = (unsigned long)arg1; \
2300 _argvec[2] = (unsigned long)arg2; \
2301 __asm__ volatile( \
2302 VALGRIND_ALIGN_STACK \
2303 "mr 11,%1\n\t" \
2304 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2305 "lwz 4,8(11)\n\t" \
2306 "lwz 11,0(11)\n\t" /* target->r11 */ \
2307 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2308 VALGRIND_RESTORE_STACK \
2309 "mr %0,3" \
2310 : /*out*/ "=r" (_res) \
2311 : /*in*/ "r" (&_argvec[0]) \
2312 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2313 ); \
2314 lval = (__typeof__(lval)) _res; \
2315 } while (0)
2317 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2318 do { \
2319 volatile OrigFn _orig = (orig); \
2320 volatile unsigned long _argvec[4]; \
2321 volatile unsigned long _res; \
2322 _argvec[0] = (unsigned long)_orig.nraddr; \
2323 _argvec[1] = (unsigned long)arg1; \
2324 _argvec[2] = (unsigned long)arg2; \
2325 _argvec[3] = (unsigned long)arg3; \
2326 __asm__ volatile( \
2327 VALGRIND_ALIGN_STACK \
2328 "mr 11,%1\n\t" \
2329 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2330 "lwz 4,8(11)\n\t" \
2331 "lwz 5,12(11)\n\t" \
2332 "lwz 11,0(11)\n\t" /* target->r11 */ \
2333 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2334 VALGRIND_RESTORE_STACK \
2335 "mr %0,3" \
2336 : /*out*/ "=r" (_res) \
2337 : /*in*/ "r" (&_argvec[0]) \
2338 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2339 ); \
2340 lval = (__typeof__(lval)) _res; \
2341 } while (0)
2343 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2344 do { \
2345 volatile OrigFn _orig = (orig); \
2346 volatile unsigned long _argvec[5]; \
2347 volatile unsigned long _res; \
2348 _argvec[0] = (unsigned long)_orig.nraddr; \
2349 _argvec[1] = (unsigned long)arg1; \
2350 _argvec[2] = (unsigned long)arg2; \
2351 _argvec[3] = (unsigned long)arg3; \
2352 _argvec[4] = (unsigned long)arg4; \
2353 __asm__ volatile( \
2354 VALGRIND_ALIGN_STACK \
2355 "mr 11,%1\n\t" \
2356 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2357 "lwz 4,8(11)\n\t" \
2358 "lwz 5,12(11)\n\t" \
2359 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2360 "lwz 11,0(11)\n\t" /* target->r11 */ \
2361 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2362 VALGRIND_RESTORE_STACK \
2363 "mr %0,3" \
2364 : /*out*/ "=r" (_res) \
2365 : /*in*/ "r" (&_argvec[0]) \
2366 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2367 ); \
2368 lval = (__typeof__(lval)) _res; \
2369 } while (0)
2371 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2372 do { \
2373 volatile OrigFn _orig = (orig); \
2374 volatile unsigned long _argvec[6]; \
2375 volatile unsigned long _res; \
2376 _argvec[0] = (unsigned long)_orig.nraddr; \
2377 _argvec[1] = (unsigned long)arg1; \
2378 _argvec[2] = (unsigned long)arg2; \
2379 _argvec[3] = (unsigned long)arg3; \
2380 _argvec[4] = (unsigned long)arg4; \
2381 _argvec[5] = (unsigned long)arg5; \
2382 __asm__ volatile( \
2383 VALGRIND_ALIGN_STACK \
2384 "mr 11,%1\n\t" \
2385 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2386 "lwz 4,8(11)\n\t" \
2387 "lwz 5,12(11)\n\t" \
2388 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2389 "lwz 7,20(11)\n\t" \
2390 "lwz 11,0(11)\n\t" /* target->r11 */ \
2391 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2392 VALGRIND_RESTORE_STACK \
2393 "mr %0,3" \
2394 : /*out*/ "=r" (_res) \
2395 : /*in*/ "r" (&_argvec[0]) \
2396 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2397 ); \
2398 lval = (__typeof__(lval)) _res; \
2399 } while (0)
2401 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2402 do { \
2403 volatile OrigFn _orig = (orig); \
2404 volatile unsigned long _argvec[7]; \
2405 volatile unsigned long _res; \
2406 _argvec[0] = (unsigned long)_orig.nraddr; \
2407 _argvec[1] = (unsigned long)arg1; \
2408 _argvec[2] = (unsigned long)arg2; \
2409 _argvec[3] = (unsigned long)arg3; \
2410 _argvec[4] = (unsigned long)arg4; \
2411 _argvec[5] = (unsigned long)arg5; \
2412 _argvec[6] = (unsigned long)arg6; \
2413 __asm__ volatile( \
2414 VALGRIND_ALIGN_STACK \
2415 "mr 11,%1\n\t" \
2416 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2417 "lwz 4,8(11)\n\t" \
2418 "lwz 5,12(11)\n\t" \
2419 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2420 "lwz 7,20(11)\n\t" \
2421 "lwz 8,24(11)\n\t" \
2422 "lwz 11,0(11)\n\t" /* target->r11 */ \
2423 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2424 VALGRIND_RESTORE_STACK \
2425 "mr %0,3" \
2426 : /*out*/ "=r" (_res) \
2427 : /*in*/ "r" (&_argvec[0]) \
2428 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2429 ); \
2430 lval = (__typeof__(lval)) _res; \
2431 } while (0)
2433 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2434 arg7) \
2435 do { \
2436 volatile OrigFn _orig = (orig); \
2437 volatile unsigned long _argvec[8]; \
2438 volatile unsigned long _res; \
2439 _argvec[0] = (unsigned long)_orig.nraddr; \
2440 _argvec[1] = (unsigned long)arg1; \
2441 _argvec[2] = (unsigned long)arg2; \
2442 _argvec[3] = (unsigned long)arg3; \
2443 _argvec[4] = (unsigned long)arg4; \
2444 _argvec[5] = (unsigned long)arg5; \
2445 _argvec[6] = (unsigned long)arg6; \
2446 _argvec[7] = (unsigned long)arg7; \
2447 __asm__ volatile( \
2448 VALGRIND_ALIGN_STACK \
2449 "mr 11,%1\n\t" \
2450 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2451 "lwz 4,8(11)\n\t" \
2452 "lwz 5,12(11)\n\t" \
2453 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2454 "lwz 7,20(11)\n\t" \
2455 "lwz 8,24(11)\n\t" \
2456 "lwz 9,28(11)\n\t" \
2457 "lwz 11,0(11)\n\t" /* target->r11 */ \
2458 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2459 VALGRIND_RESTORE_STACK \
2460 "mr %0,3" \
2461 : /*out*/ "=r" (_res) \
2462 : /*in*/ "r" (&_argvec[0]) \
2463 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2464 ); \
2465 lval = (__typeof__(lval)) _res; \
2466 } while (0)
2468 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2469 arg7,arg8) \
2470 do { \
2471 volatile OrigFn _orig = (orig); \
2472 volatile unsigned long _argvec[9]; \
2473 volatile unsigned long _res; \
2474 _argvec[0] = (unsigned long)_orig.nraddr; \
2475 _argvec[1] = (unsigned long)arg1; \
2476 _argvec[2] = (unsigned long)arg2; \
2477 _argvec[3] = (unsigned long)arg3; \
2478 _argvec[4] = (unsigned long)arg4; \
2479 _argvec[5] = (unsigned long)arg5; \
2480 _argvec[6] = (unsigned long)arg6; \
2481 _argvec[7] = (unsigned long)arg7; \
2482 _argvec[8] = (unsigned long)arg8; \
2483 __asm__ volatile( \
2484 VALGRIND_ALIGN_STACK \
2485 "mr 11,%1\n\t" \
2486 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2487 "lwz 4,8(11)\n\t" \
2488 "lwz 5,12(11)\n\t" \
2489 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2490 "lwz 7,20(11)\n\t" \
2491 "lwz 8,24(11)\n\t" \
2492 "lwz 9,28(11)\n\t" \
2493 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2494 "lwz 11,0(11)\n\t" /* target->r11 */ \
2495 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2496 VALGRIND_RESTORE_STACK \
2497 "mr %0,3" \
2498 : /*out*/ "=r" (_res) \
2499 : /*in*/ "r" (&_argvec[0]) \
2500 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2501 ); \
2502 lval = (__typeof__(lval)) _res; \
2503 } while (0)
2505 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2506 arg7,arg8,arg9) \
2507 do { \
2508 volatile OrigFn _orig = (orig); \
2509 volatile unsigned long _argvec[10]; \
2510 volatile unsigned long _res; \
2511 _argvec[0] = (unsigned long)_orig.nraddr; \
2512 _argvec[1] = (unsigned long)arg1; \
2513 _argvec[2] = (unsigned long)arg2; \
2514 _argvec[3] = (unsigned long)arg3; \
2515 _argvec[4] = (unsigned long)arg4; \
2516 _argvec[5] = (unsigned long)arg5; \
2517 _argvec[6] = (unsigned long)arg6; \
2518 _argvec[7] = (unsigned long)arg7; \
2519 _argvec[8] = (unsigned long)arg8; \
2520 _argvec[9] = (unsigned long)arg9; \
2521 __asm__ volatile( \
2522 VALGRIND_ALIGN_STACK \
2523 "mr 11,%1\n\t" \
2524 "addi 1,1,-16\n\t" \
2525 /* arg9 */ \
2526 "lwz 3,36(11)\n\t" \
2527 "stw 3,8(1)\n\t" \
2528 /* args1-8 */ \
2529 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2530 "lwz 4,8(11)\n\t" \
2531 "lwz 5,12(11)\n\t" \
2532 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2533 "lwz 7,20(11)\n\t" \
2534 "lwz 8,24(11)\n\t" \
2535 "lwz 9,28(11)\n\t" \
2536 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2537 "lwz 11,0(11)\n\t" /* target->r11 */ \
2538 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2539 VALGRIND_RESTORE_STACK \
2540 "mr %0,3" \
2541 : /*out*/ "=r" (_res) \
2542 : /*in*/ "r" (&_argvec[0]) \
2543 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2544 ); \
2545 lval = (__typeof__(lval)) _res; \
2546 } while (0)
2548 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2549 arg7,arg8,arg9,arg10) \
2550 do { \
2551 volatile OrigFn _orig = (orig); \
2552 volatile unsigned long _argvec[11]; \
2553 volatile unsigned long _res; \
2554 _argvec[0] = (unsigned long)_orig.nraddr; \
2555 _argvec[1] = (unsigned long)arg1; \
2556 _argvec[2] = (unsigned long)arg2; \
2557 _argvec[3] = (unsigned long)arg3; \
2558 _argvec[4] = (unsigned long)arg4; \
2559 _argvec[5] = (unsigned long)arg5; \
2560 _argvec[6] = (unsigned long)arg6; \
2561 _argvec[7] = (unsigned long)arg7; \
2562 _argvec[8] = (unsigned long)arg8; \
2563 _argvec[9] = (unsigned long)arg9; \
2564 _argvec[10] = (unsigned long)arg10; \
2565 __asm__ volatile( \
2566 VALGRIND_ALIGN_STACK \
2567 "mr 11,%1\n\t" \
2568 "addi 1,1,-16\n\t" \
2569 /* arg10 */ \
2570 "lwz 3,40(11)\n\t" \
2571 "stw 3,12(1)\n\t" \
2572 /* arg9 */ \
2573 "lwz 3,36(11)\n\t" \
2574 "stw 3,8(1)\n\t" \
2575 /* args1-8 */ \
2576 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2577 "lwz 4,8(11)\n\t" \
2578 "lwz 5,12(11)\n\t" \
2579 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2580 "lwz 7,20(11)\n\t" \
2581 "lwz 8,24(11)\n\t" \
2582 "lwz 9,28(11)\n\t" \
2583 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2584 "lwz 11,0(11)\n\t" /* target->r11 */ \
2585 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2586 VALGRIND_RESTORE_STACK \
2587 "mr %0,3" \
2588 : /*out*/ "=r" (_res) \
2589 : /*in*/ "r" (&_argvec[0]) \
2590 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2591 ); \
2592 lval = (__typeof__(lval)) _res; \
2593 } while (0)
2595 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2596 arg7,arg8,arg9,arg10,arg11) \
2597 do { \
2598 volatile OrigFn _orig = (orig); \
2599 volatile unsigned long _argvec[12]; \
2600 volatile unsigned long _res; \
2601 _argvec[0] = (unsigned long)_orig.nraddr; \
2602 _argvec[1] = (unsigned long)arg1; \
2603 _argvec[2] = (unsigned long)arg2; \
2604 _argvec[3] = (unsigned long)arg3; \
2605 _argvec[4] = (unsigned long)arg4; \
2606 _argvec[5] = (unsigned long)arg5; \
2607 _argvec[6] = (unsigned long)arg6; \
2608 _argvec[7] = (unsigned long)arg7; \
2609 _argvec[8] = (unsigned long)arg8; \
2610 _argvec[9] = (unsigned long)arg9; \
2611 _argvec[10] = (unsigned long)arg10; \
2612 _argvec[11] = (unsigned long)arg11; \
2613 __asm__ volatile( \
2614 VALGRIND_ALIGN_STACK \
2615 "mr 11,%1\n\t" \
2616 "addi 1,1,-32\n\t" \
2617 /* arg11 */ \
2618 "lwz 3,44(11)\n\t" \
2619 "stw 3,16(1)\n\t" \
2620 /* arg10 */ \
2621 "lwz 3,40(11)\n\t" \
2622 "stw 3,12(1)\n\t" \
2623 /* arg9 */ \
2624 "lwz 3,36(11)\n\t" \
2625 "stw 3,8(1)\n\t" \
2626 /* args1-8 */ \
2627 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2628 "lwz 4,8(11)\n\t" \
2629 "lwz 5,12(11)\n\t" \
2630 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2631 "lwz 7,20(11)\n\t" \
2632 "lwz 8,24(11)\n\t" \
2633 "lwz 9,28(11)\n\t" \
2634 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2635 "lwz 11,0(11)\n\t" /* target->r11 */ \
2636 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2637 VALGRIND_RESTORE_STACK \
2638 "mr %0,3" \
2639 : /*out*/ "=r" (_res) \
2640 : /*in*/ "r" (&_argvec[0]) \
2641 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2642 ); \
2643 lval = (__typeof__(lval)) _res; \
2644 } while (0)
2646 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2647 arg7,arg8,arg9,arg10,arg11,arg12) \
2648 do { \
2649 volatile OrigFn _orig = (orig); \
2650 volatile unsigned long _argvec[13]; \
2651 volatile unsigned long _res; \
2652 _argvec[0] = (unsigned long)_orig.nraddr; \
2653 _argvec[1] = (unsigned long)arg1; \
2654 _argvec[2] = (unsigned long)arg2; \
2655 _argvec[3] = (unsigned long)arg3; \
2656 _argvec[4] = (unsigned long)arg4; \
2657 _argvec[5] = (unsigned long)arg5; \
2658 _argvec[6] = (unsigned long)arg6; \
2659 _argvec[7] = (unsigned long)arg7; \
2660 _argvec[8] = (unsigned long)arg8; \
2661 _argvec[9] = (unsigned long)arg9; \
2662 _argvec[10] = (unsigned long)arg10; \
2663 _argvec[11] = (unsigned long)arg11; \
2664 _argvec[12] = (unsigned long)arg12; \
2665 __asm__ volatile( \
2666 VALGRIND_ALIGN_STACK \
2667 "mr 11,%1\n\t" \
2668 "addi 1,1,-32\n\t" \
2669 /* arg12 */ \
2670 "lwz 3,48(11)\n\t" \
2671 "stw 3,20(1)\n\t" \
2672 /* arg11 */ \
2673 "lwz 3,44(11)\n\t" \
2674 "stw 3,16(1)\n\t" \
2675 /* arg10 */ \
2676 "lwz 3,40(11)\n\t" \
2677 "stw 3,12(1)\n\t" \
2678 /* arg9 */ \
2679 "lwz 3,36(11)\n\t" \
2680 "stw 3,8(1)\n\t" \
2681 /* args1-8 */ \
2682 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2683 "lwz 4,8(11)\n\t" \
2684 "lwz 5,12(11)\n\t" \
2685 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2686 "lwz 7,20(11)\n\t" \
2687 "lwz 8,24(11)\n\t" \
2688 "lwz 9,28(11)\n\t" \
2689 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2690 "lwz 11,0(11)\n\t" /* target->r11 */ \
2691 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2692 VALGRIND_RESTORE_STACK \
2693 "mr %0,3" \
2694 : /*out*/ "=r" (_res) \
2695 : /*in*/ "r" (&_argvec[0]) \
2696 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2697 ); \
2698 lval = (__typeof__(lval)) _res; \
2699 } while (0)
2701 #endif /* PLAT_ppc32_linux */
2703 /* ------------------------ ppc64-linux ------------------------ */
2705 #if defined(PLAT_ppc64be_linux)
2707 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2709 /* These regs are trashed by the hidden call. */
2710 #define __CALLER_SAVED_REGS \
2711 "lr", "ctr", "xer", \
2712 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2713 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2714 "r11", "r12", "r13"
2716 /* Macros to save and align the stack before making a function
2717 call and restore it afterwards as gcc may not keep the stack
2718 pointer aligned if it doesn't realise calls are being made
2719 to other functions. */
2721 #define VALGRIND_ALIGN_STACK \
2722 "mr 28,1\n\t" \
2723 "rldicr 1,1,0,59\n\t"
2724 #define VALGRIND_RESTORE_STACK \
2725 "mr 1,28\n\t"
2727 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2728 long) == 8. */
2730 #define CALL_FN_W_v(lval, orig) \
2731 do { \
2732 volatile OrigFn _orig = (orig); \
2733 volatile unsigned long _argvec[3+0]; \
2734 volatile unsigned long _res; \
2735 /* _argvec[0] holds current r2 across the call */ \
2736 _argvec[1] = (unsigned long)_orig.r2; \
2737 _argvec[2] = (unsigned long)_orig.nraddr; \
2738 __asm__ volatile( \
2739 VALGRIND_ALIGN_STACK \
2740 "mr 11,%1\n\t" \
2741 "std 2,-16(11)\n\t" /* save tocptr */ \
2742 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2743 "ld 11, 0(11)\n\t" /* target->r11 */ \
2744 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2745 "mr 11,%1\n\t" \
2746 "mr %0,3\n\t" \
2747 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2748 VALGRIND_RESTORE_STACK \
2749 : /*out*/ "=r" (_res) \
2750 : /*in*/ "r" (&_argvec[2]) \
2751 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2752 ); \
2753 lval = (__typeof__(lval)) _res; \
2754 } while (0)
2756 #define CALL_FN_W_W(lval, orig, arg1) \
2757 do { \
2758 volatile OrigFn _orig = (orig); \
2759 volatile unsigned long _argvec[3+1]; \
2760 volatile unsigned long _res; \
2761 /* _argvec[0] holds current r2 across the call */ \
2762 _argvec[1] = (unsigned long)_orig.r2; \
2763 _argvec[2] = (unsigned long)_orig.nraddr; \
2764 _argvec[2+1] = (unsigned long)arg1; \
2765 __asm__ volatile( \
2766 VALGRIND_ALIGN_STACK \
2767 "mr 11,%1\n\t" \
2768 "std 2,-16(11)\n\t" /* save tocptr */ \
2769 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2770 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2771 "ld 11, 0(11)\n\t" /* target->r11 */ \
2772 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2773 "mr 11,%1\n\t" \
2774 "mr %0,3\n\t" \
2775 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2776 VALGRIND_RESTORE_STACK \
2777 : /*out*/ "=r" (_res) \
2778 : /*in*/ "r" (&_argvec[2]) \
2779 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2780 ); \
2781 lval = (__typeof__(lval)) _res; \
2782 } while (0)
2784 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2785 do { \
2786 volatile OrigFn _orig = (orig); \
2787 volatile unsigned long _argvec[3+2]; \
2788 volatile unsigned long _res; \
2789 /* _argvec[0] holds current r2 across the call */ \
2790 _argvec[1] = (unsigned long)_orig.r2; \
2791 _argvec[2] = (unsigned long)_orig.nraddr; \
2792 _argvec[2+1] = (unsigned long)arg1; \
2793 _argvec[2+2] = (unsigned long)arg2; \
2794 __asm__ volatile( \
2795 VALGRIND_ALIGN_STACK \
2796 "mr 11,%1\n\t" \
2797 "std 2,-16(11)\n\t" /* save tocptr */ \
2798 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2799 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2800 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2801 "ld 11, 0(11)\n\t" /* target->r11 */ \
2802 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2803 "mr 11,%1\n\t" \
2804 "mr %0,3\n\t" \
2805 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2806 VALGRIND_RESTORE_STACK \
2807 : /*out*/ "=r" (_res) \
2808 : /*in*/ "r" (&_argvec[2]) \
2809 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2810 ); \
2811 lval = (__typeof__(lval)) _res; \
2812 } while (0)
2814 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2815 do { \
2816 volatile OrigFn _orig = (orig); \
2817 volatile unsigned long _argvec[3+3]; \
2818 volatile unsigned long _res; \
2819 /* _argvec[0] holds current r2 across the call */ \
2820 _argvec[1] = (unsigned long)_orig.r2; \
2821 _argvec[2] = (unsigned long)_orig.nraddr; \
2822 _argvec[2+1] = (unsigned long)arg1; \
2823 _argvec[2+2] = (unsigned long)arg2; \
2824 _argvec[2+3] = (unsigned long)arg3; \
2825 __asm__ volatile( \
2826 VALGRIND_ALIGN_STACK \
2827 "mr 11,%1\n\t" \
2828 "std 2,-16(11)\n\t" /* save tocptr */ \
2829 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2830 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2831 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2832 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2833 "ld 11, 0(11)\n\t" /* target->r11 */ \
2834 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2835 "mr 11,%1\n\t" \
2836 "mr %0,3\n\t" \
2837 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2838 VALGRIND_RESTORE_STACK \
2839 : /*out*/ "=r" (_res) \
2840 : /*in*/ "r" (&_argvec[2]) \
2841 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2842 ); \
2843 lval = (__typeof__(lval)) _res; \
2844 } while (0)
2846 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2847 do { \
2848 volatile OrigFn _orig = (orig); \
2849 volatile unsigned long _argvec[3+4]; \
2850 volatile unsigned long _res; \
2851 /* _argvec[0] holds current r2 across the call */ \
2852 _argvec[1] = (unsigned long)_orig.r2; \
2853 _argvec[2] = (unsigned long)_orig.nraddr; \
2854 _argvec[2+1] = (unsigned long)arg1; \
2855 _argvec[2+2] = (unsigned long)arg2; \
2856 _argvec[2+3] = (unsigned long)arg3; \
2857 _argvec[2+4] = (unsigned long)arg4; \
2858 __asm__ volatile( \
2859 VALGRIND_ALIGN_STACK \
2860 "mr 11,%1\n\t" \
2861 "std 2,-16(11)\n\t" /* save tocptr */ \
2862 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2863 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2864 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2865 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2866 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2867 "ld 11, 0(11)\n\t" /* target->r11 */ \
2868 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2869 "mr 11,%1\n\t" \
2870 "mr %0,3\n\t" \
2871 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2872 VALGRIND_RESTORE_STACK \
2873 : /*out*/ "=r" (_res) \
2874 : /*in*/ "r" (&_argvec[2]) \
2875 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2876 ); \
2877 lval = (__typeof__(lval)) _res; \
2878 } while (0)
2880 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2881 do { \
2882 volatile OrigFn _orig = (orig); \
2883 volatile unsigned long _argvec[3+5]; \
2884 volatile unsigned long _res; \
2885 /* _argvec[0] holds current r2 across the call */ \
2886 _argvec[1] = (unsigned long)_orig.r2; \
2887 _argvec[2] = (unsigned long)_orig.nraddr; \
2888 _argvec[2+1] = (unsigned long)arg1; \
2889 _argvec[2+2] = (unsigned long)arg2; \
2890 _argvec[2+3] = (unsigned long)arg3; \
2891 _argvec[2+4] = (unsigned long)arg4; \
2892 _argvec[2+5] = (unsigned long)arg5; \
2893 __asm__ volatile( \
2894 VALGRIND_ALIGN_STACK \
2895 "mr 11,%1\n\t" \
2896 "std 2,-16(11)\n\t" /* save tocptr */ \
2897 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2898 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2899 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2900 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2901 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2902 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2903 "ld 11, 0(11)\n\t" /* target->r11 */ \
2904 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2905 "mr 11,%1\n\t" \
2906 "mr %0,3\n\t" \
2907 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2908 VALGRIND_RESTORE_STACK \
2909 : /*out*/ "=r" (_res) \
2910 : /*in*/ "r" (&_argvec[2]) \
2911 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2912 ); \
2913 lval = (__typeof__(lval)) _res; \
2914 } while (0)
2916 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2917 do { \
2918 volatile OrigFn _orig = (orig); \
2919 volatile unsigned long _argvec[3+6]; \
2920 volatile unsigned long _res; \
2921 /* _argvec[0] holds current r2 across the call */ \
2922 _argvec[1] = (unsigned long)_orig.r2; \
2923 _argvec[2] = (unsigned long)_orig.nraddr; \
2924 _argvec[2+1] = (unsigned long)arg1; \
2925 _argvec[2+2] = (unsigned long)arg2; \
2926 _argvec[2+3] = (unsigned long)arg3; \
2927 _argvec[2+4] = (unsigned long)arg4; \
2928 _argvec[2+5] = (unsigned long)arg5; \
2929 _argvec[2+6] = (unsigned long)arg6; \
2930 __asm__ volatile( \
2931 VALGRIND_ALIGN_STACK \
2932 "mr 11,%1\n\t" \
2933 "std 2,-16(11)\n\t" /* save tocptr */ \
2934 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2935 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2936 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2937 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2938 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2939 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2940 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2941 "ld 11, 0(11)\n\t" /* target->r11 */ \
2942 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2943 "mr 11,%1\n\t" \
2944 "mr %0,3\n\t" \
2945 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2946 VALGRIND_RESTORE_STACK \
2947 : /*out*/ "=r" (_res) \
2948 : /*in*/ "r" (&_argvec[2]) \
2949 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2950 ); \
2951 lval = (__typeof__(lval)) _res; \
2952 } while (0)
2954 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2955 arg7) \
2956 do { \
2957 volatile OrigFn _orig = (orig); \
2958 volatile unsigned long _argvec[3+7]; \
2959 volatile unsigned long _res; \
2960 /* _argvec[0] holds current r2 across the call */ \
2961 _argvec[1] = (unsigned long)_orig.r2; \
2962 _argvec[2] = (unsigned long)_orig.nraddr; \
2963 _argvec[2+1] = (unsigned long)arg1; \
2964 _argvec[2+2] = (unsigned long)arg2; \
2965 _argvec[2+3] = (unsigned long)arg3; \
2966 _argvec[2+4] = (unsigned long)arg4; \
2967 _argvec[2+5] = (unsigned long)arg5; \
2968 _argvec[2+6] = (unsigned long)arg6; \
2969 _argvec[2+7] = (unsigned long)arg7; \
2970 __asm__ volatile( \
2971 VALGRIND_ALIGN_STACK \
2972 "mr 11,%1\n\t" \
2973 "std 2,-16(11)\n\t" /* save tocptr */ \
2974 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2975 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2976 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2977 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2978 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2979 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2980 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2981 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2982 "ld 11, 0(11)\n\t" /* target->r11 */ \
2983 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2984 "mr 11,%1\n\t" \
2985 "mr %0,3\n\t" \
2986 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2987 VALGRIND_RESTORE_STACK \
2988 : /*out*/ "=r" (_res) \
2989 : /*in*/ "r" (&_argvec[2]) \
2990 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2991 ); \
2992 lval = (__typeof__(lval)) _res; \
2993 } while (0)
2995 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2996 arg7,arg8) \
2997 do { \
2998 volatile OrigFn _orig = (orig); \
2999 volatile unsigned long _argvec[3+8]; \
3000 volatile unsigned long _res; \
3001 /* _argvec[0] holds current r2 across the call */ \
3002 _argvec[1] = (unsigned long)_orig.r2; \
3003 _argvec[2] = (unsigned long)_orig.nraddr; \
3004 _argvec[2+1] = (unsigned long)arg1; \
3005 _argvec[2+2] = (unsigned long)arg2; \
3006 _argvec[2+3] = (unsigned long)arg3; \
3007 _argvec[2+4] = (unsigned long)arg4; \
3008 _argvec[2+5] = (unsigned long)arg5; \
3009 _argvec[2+6] = (unsigned long)arg6; \
3010 _argvec[2+7] = (unsigned long)arg7; \
3011 _argvec[2+8] = (unsigned long)arg8; \
3012 __asm__ volatile( \
3013 VALGRIND_ALIGN_STACK \
3014 "mr 11,%1\n\t" \
3015 "std 2,-16(11)\n\t" /* save tocptr */ \
3016 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3017 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3018 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3019 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3020 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3021 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3022 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3023 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3024 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3025 "ld 11, 0(11)\n\t" /* target->r11 */ \
3026 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3027 "mr 11,%1\n\t" \
3028 "mr %0,3\n\t" \
3029 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3030 VALGRIND_RESTORE_STACK \
3031 : /*out*/ "=r" (_res) \
3032 : /*in*/ "r" (&_argvec[2]) \
3033 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3034 ); \
3035 lval = (__typeof__(lval)) _res; \
3036 } while (0)
3038 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3039 arg7,arg8,arg9) \
3040 do { \
3041 volatile OrigFn _orig = (orig); \
3042 volatile unsigned long _argvec[3+9]; \
3043 volatile unsigned long _res; \
3044 /* _argvec[0] holds current r2 across the call */ \
3045 _argvec[1] = (unsigned long)_orig.r2; \
3046 _argvec[2] = (unsigned long)_orig.nraddr; \
3047 _argvec[2+1] = (unsigned long)arg1; \
3048 _argvec[2+2] = (unsigned long)arg2; \
3049 _argvec[2+3] = (unsigned long)arg3; \
3050 _argvec[2+4] = (unsigned long)arg4; \
3051 _argvec[2+5] = (unsigned long)arg5; \
3052 _argvec[2+6] = (unsigned long)arg6; \
3053 _argvec[2+7] = (unsigned long)arg7; \
3054 _argvec[2+8] = (unsigned long)arg8; \
3055 _argvec[2+9] = (unsigned long)arg9; \
3056 __asm__ volatile( \
3057 VALGRIND_ALIGN_STACK \
3058 "mr 11,%1\n\t" \
3059 "std 2,-16(11)\n\t" /* save tocptr */ \
3060 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3061 "addi 1,1,-128\n\t" /* expand stack frame */ \
3062 /* arg9 */ \
3063 "ld 3,72(11)\n\t" \
3064 "std 3,112(1)\n\t" \
3065 /* args1-8 */ \
3066 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3067 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3068 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3069 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3070 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3071 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3072 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3073 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3074 "ld 11, 0(11)\n\t" /* target->r11 */ \
3075 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3076 "mr 11,%1\n\t" \
3077 "mr %0,3\n\t" \
3078 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3079 VALGRIND_RESTORE_STACK \
3080 : /*out*/ "=r" (_res) \
3081 : /*in*/ "r" (&_argvec[2]) \
3082 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3083 ); \
3084 lval = (__typeof__(lval)) _res; \
3085 } while (0)
3087 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3088 arg7,arg8,arg9,arg10) \
3089 do { \
3090 volatile OrigFn _orig = (orig); \
3091 volatile unsigned long _argvec[3+10]; \
3092 volatile unsigned long _res; \
3093 /* _argvec[0] holds current r2 across the call */ \
3094 _argvec[1] = (unsigned long)_orig.r2; \
3095 _argvec[2] = (unsigned long)_orig.nraddr; \
3096 _argvec[2+1] = (unsigned long)arg1; \
3097 _argvec[2+2] = (unsigned long)arg2; \
3098 _argvec[2+3] = (unsigned long)arg3; \
3099 _argvec[2+4] = (unsigned long)arg4; \
3100 _argvec[2+5] = (unsigned long)arg5; \
3101 _argvec[2+6] = (unsigned long)arg6; \
3102 _argvec[2+7] = (unsigned long)arg7; \
3103 _argvec[2+8] = (unsigned long)arg8; \
3104 _argvec[2+9] = (unsigned long)arg9; \
3105 _argvec[2+10] = (unsigned long)arg10; \
3106 __asm__ volatile( \
3107 VALGRIND_ALIGN_STACK \
3108 "mr 11,%1\n\t" \
3109 "std 2,-16(11)\n\t" /* save tocptr */ \
3110 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3111 "addi 1,1,-128\n\t" /* expand stack frame */ \
3112 /* arg10 */ \
3113 "ld 3,80(11)\n\t" \
3114 "std 3,120(1)\n\t" \
3115 /* arg9 */ \
3116 "ld 3,72(11)\n\t" \
3117 "std 3,112(1)\n\t" \
3118 /* args1-8 */ \
3119 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3120 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3121 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3122 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3123 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3124 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3125 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3126 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3127 "ld 11, 0(11)\n\t" /* target->r11 */ \
3128 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3129 "mr 11,%1\n\t" \
3130 "mr %0,3\n\t" \
3131 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3132 VALGRIND_RESTORE_STACK \
3133 : /*out*/ "=r" (_res) \
3134 : /*in*/ "r" (&_argvec[2]) \
3135 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3136 ); \
3137 lval = (__typeof__(lval)) _res; \
3138 } while (0)
3140 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3141 arg7,arg8,arg9,arg10,arg11) \
3142 do { \
3143 volatile OrigFn _orig = (orig); \
3144 volatile unsigned long _argvec[3+11]; \
3145 volatile unsigned long _res; \
3146 /* _argvec[0] holds current r2 across the call */ \
3147 _argvec[1] = (unsigned long)_orig.r2; \
3148 _argvec[2] = (unsigned long)_orig.nraddr; \
3149 _argvec[2+1] = (unsigned long)arg1; \
3150 _argvec[2+2] = (unsigned long)arg2; \
3151 _argvec[2+3] = (unsigned long)arg3; \
3152 _argvec[2+4] = (unsigned long)arg4; \
3153 _argvec[2+5] = (unsigned long)arg5; \
3154 _argvec[2+6] = (unsigned long)arg6; \
3155 _argvec[2+7] = (unsigned long)arg7; \
3156 _argvec[2+8] = (unsigned long)arg8; \
3157 _argvec[2+9] = (unsigned long)arg9; \
3158 _argvec[2+10] = (unsigned long)arg10; \
3159 _argvec[2+11] = (unsigned long)arg11; \
3160 __asm__ volatile( \
3161 VALGRIND_ALIGN_STACK \
3162 "mr 11,%1\n\t" \
3163 "std 2,-16(11)\n\t" /* save tocptr */ \
3164 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3165 "addi 1,1,-144\n\t" /* expand stack frame */ \
3166 /* arg11 */ \
3167 "ld 3,88(11)\n\t" \
3168 "std 3,128(1)\n\t" \
3169 /* arg10 */ \
3170 "ld 3,80(11)\n\t" \
3171 "std 3,120(1)\n\t" \
3172 /* arg9 */ \
3173 "ld 3,72(11)\n\t" \
3174 "std 3,112(1)\n\t" \
3175 /* args1-8 */ \
3176 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3177 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3178 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3179 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3180 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3181 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3182 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3183 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3184 "ld 11, 0(11)\n\t" /* target->r11 */ \
3185 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3186 "mr 11,%1\n\t" \
3187 "mr %0,3\n\t" \
3188 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3189 VALGRIND_RESTORE_STACK \
3190 : /*out*/ "=r" (_res) \
3191 : /*in*/ "r" (&_argvec[2]) \
3192 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3193 ); \
3194 lval = (__typeof__(lval)) _res; \
3195 } while (0)
3197 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3198 arg7,arg8,arg9,arg10,arg11,arg12) \
3199 do { \
3200 volatile OrigFn _orig = (orig); \
3201 volatile unsigned long _argvec[3+12]; \
3202 volatile unsigned long _res; \
3203 /* _argvec[0] holds current r2 across the call */ \
3204 _argvec[1] = (unsigned long)_orig.r2; \
3205 _argvec[2] = (unsigned long)_orig.nraddr; \
3206 _argvec[2+1] = (unsigned long)arg1; \
3207 _argvec[2+2] = (unsigned long)arg2; \
3208 _argvec[2+3] = (unsigned long)arg3; \
3209 _argvec[2+4] = (unsigned long)arg4; \
3210 _argvec[2+5] = (unsigned long)arg5; \
3211 _argvec[2+6] = (unsigned long)arg6; \
3212 _argvec[2+7] = (unsigned long)arg7; \
3213 _argvec[2+8] = (unsigned long)arg8; \
3214 _argvec[2+9] = (unsigned long)arg9; \
3215 _argvec[2+10] = (unsigned long)arg10; \
3216 _argvec[2+11] = (unsigned long)arg11; \
3217 _argvec[2+12] = (unsigned long)arg12; \
3218 __asm__ volatile( \
3219 VALGRIND_ALIGN_STACK \
3220 "mr 11,%1\n\t" \
3221 "std 2,-16(11)\n\t" /* save tocptr */ \
3222 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3223 "addi 1,1,-144\n\t" /* expand stack frame */ \
3224 /* arg12 */ \
3225 "ld 3,96(11)\n\t" \
3226 "std 3,136(1)\n\t" \
3227 /* arg11 */ \
3228 "ld 3,88(11)\n\t" \
3229 "std 3,128(1)\n\t" \
3230 /* arg10 */ \
3231 "ld 3,80(11)\n\t" \
3232 "std 3,120(1)\n\t" \
3233 /* arg9 */ \
3234 "ld 3,72(11)\n\t" \
3235 "std 3,112(1)\n\t" \
3236 /* args1-8 */ \
3237 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3238 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3239 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3240 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3241 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3242 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3243 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3244 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3245 "ld 11, 0(11)\n\t" /* target->r11 */ \
3246 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3247 "mr 11,%1\n\t" \
3248 "mr %0,3\n\t" \
3249 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3250 VALGRIND_RESTORE_STACK \
3251 : /*out*/ "=r" (_res) \
3252 : /*in*/ "r" (&_argvec[2]) \
3253 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3254 ); \
3255 lval = (__typeof__(lval)) _res; \
3256 } while (0)
3258 #endif /* PLAT_ppc64be_linux */
3260 /* ------------------------- ppc64le-linux ----------------------- */
3261 #if defined(PLAT_ppc64le_linux)
3263 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
3265 /* These regs are trashed by the hidden call. */
3266 #define __CALLER_SAVED_REGS \
3267 "lr", "ctr", "xer", \
3268 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
3269 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
3270 "r11", "r12", "r13"
3272 /* Macros to save and align the stack before making a function
3273 call and restore it afterwards as gcc may not keep the stack
3274 pointer aligned if it doesn't realise calls are being made
3275 to other functions. */
3277 #define VALGRIND_ALIGN_STACK \
3278 "mr 28,1\n\t" \
3279 "rldicr 1,1,0,59\n\t"
3280 #define VALGRIND_RESTORE_STACK \
3281 "mr 1,28\n\t"
3283 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
3284 long) == 8. */
3286 #define CALL_FN_W_v(lval, orig) \
3287 do { \
3288 volatile OrigFn _orig = (orig); \
3289 volatile unsigned long _argvec[3+0]; \
3290 volatile unsigned long _res; \
3291 /* _argvec[0] holds current r2 across the call */ \
3292 _argvec[1] = (unsigned long)_orig.r2; \
3293 _argvec[2] = (unsigned long)_orig.nraddr; \
3294 __asm__ volatile( \
3295 VALGRIND_ALIGN_STACK \
3296 "mr 12,%1\n\t" \
3297 "std 2,-16(12)\n\t" /* save tocptr */ \
3298 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3299 "ld 12, 0(12)\n\t" /* target->r12 */ \
3300 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3301 "mr 12,%1\n\t" \
3302 "mr %0,3\n\t" \
3303 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3304 VALGRIND_RESTORE_STACK \
3305 : /*out*/ "=r" (_res) \
3306 : /*in*/ "r" (&_argvec[2]) \
3307 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3308 ); \
3309 lval = (__typeof__(lval)) _res; \
3310 } while (0)
3312 #define CALL_FN_W_W(lval, orig, arg1) \
3313 do { \
3314 volatile OrigFn _orig = (orig); \
3315 volatile unsigned long _argvec[3+1]; \
3316 volatile unsigned long _res; \
3317 /* _argvec[0] holds current r2 across the call */ \
3318 _argvec[1] = (unsigned long)_orig.r2; \
3319 _argvec[2] = (unsigned long)_orig.nraddr; \
3320 _argvec[2+1] = (unsigned long)arg1; \
3321 __asm__ volatile( \
3322 VALGRIND_ALIGN_STACK \
3323 "mr 12,%1\n\t" \
3324 "std 2,-16(12)\n\t" /* save tocptr */ \
3325 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3326 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3327 "ld 12, 0(12)\n\t" /* target->r12 */ \
3328 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3329 "mr 12,%1\n\t" \
3330 "mr %0,3\n\t" \
3331 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3332 VALGRIND_RESTORE_STACK \
3333 : /*out*/ "=r" (_res) \
3334 : /*in*/ "r" (&_argvec[2]) \
3335 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3336 ); \
3337 lval = (__typeof__(lval)) _res; \
3338 } while (0)
3340 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3341 do { \
3342 volatile OrigFn _orig = (orig); \
3343 volatile unsigned long _argvec[3+2]; \
3344 volatile unsigned long _res; \
3345 /* _argvec[0] holds current r2 across the call */ \
3346 _argvec[1] = (unsigned long)_orig.r2; \
3347 _argvec[2] = (unsigned long)_orig.nraddr; \
3348 _argvec[2+1] = (unsigned long)arg1; \
3349 _argvec[2+2] = (unsigned long)arg2; \
3350 __asm__ volatile( \
3351 VALGRIND_ALIGN_STACK \
3352 "mr 12,%1\n\t" \
3353 "std 2,-16(12)\n\t" /* save tocptr */ \
3354 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3355 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3356 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3357 "ld 12, 0(12)\n\t" /* target->r12 */ \
3358 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3359 "mr 12,%1\n\t" \
3360 "mr %0,3\n\t" \
3361 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3362 VALGRIND_RESTORE_STACK \
3363 : /*out*/ "=r" (_res) \
3364 : /*in*/ "r" (&_argvec[2]) \
3365 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3366 ); \
3367 lval = (__typeof__(lval)) _res; \
3368 } while (0)
3370 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3371 do { \
3372 volatile OrigFn _orig = (orig); \
3373 volatile unsigned long _argvec[3+3]; \
3374 volatile unsigned long _res; \
3375 /* _argvec[0] holds current r2 across the call */ \
3376 _argvec[1] = (unsigned long)_orig.r2; \
3377 _argvec[2] = (unsigned long)_orig.nraddr; \
3378 _argvec[2+1] = (unsigned long)arg1; \
3379 _argvec[2+2] = (unsigned long)arg2; \
3380 _argvec[2+3] = (unsigned long)arg3; \
3381 __asm__ volatile( \
3382 VALGRIND_ALIGN_STACK \
3383 "mr 12,%1\n\t" \
3384 "std 2,-16(12)\n\t" /* save tocptr */ \
3385 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3386 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3387 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3388 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3389 "ld 12, 0(12)\n\t" /* target->r12 */ \
3390 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3391 "mr 12,%1\n\t" \
3392 "mr %0,3\n\t" \
3393 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3394 VALGRIND_RESTORE_STACK \
3395 : /*out*/ "=r" (_res) \
3396 : /*in*/ "r" (&_argvec[2]) \
3397 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3398 ); \
3399 lval = (__typeof__(lval)) _res; \
3400 } while (0)
3402 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3403 do { \
3404 volatile OrigFn _orig = (orig); \
3405 volatile unsigned long _argvec[3+4]; \
3406 volatile unsigned long _res; \
3407 /* _argvec[0] holds current r2 across the call */ \
3408 _argvec[1] = (unsigned long)_orig.r2; \
3409 _argvec[2] = (unsigned long)_orig.nraddr; \
3410 _argvec[2+1] = (unsigned long)arg1; \
3411 _argvec[2+2] = (unsigned long)arg2; \
3412 _argvec[2+3] = (unsigned long)arg3; \
3413 _argvec[2+4] = (unsigned long)arg4; \
3414 __asm__ volatile( \
3415 VALGRIND_ALIGN_STACK \
3416 "mr 12,%1\n\t" \
3417 "std 2,-16(12)\n\t" /* save tocptr */ \
3418 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3419 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3420 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3421 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3422 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3423 "ld 12, 0(12)\n\t" /* target->r12 */ \
3424 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3425 "mr 12,%1\n\t" \
3426 "mr %0,3\n\t" \
3427 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3428 VALGRIND_RESTORE_STACK \
3429 : /*out*/ "=r" (_res) \
3430 : /*in*/ "r" (&_argvec[2]) \
3431 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3432 ); \
3433 lval = (__typeof__(lval)) _res; \
3434 } while (0)
3436 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3437 do { \
3438 volatile OrigFn _orig = (orig); \
3439 volatile unsigned long _argvec[3+5]; \
3440 volatile unsigned long _res; \
3441 /* _argvec[0] holds current r2 across the call */ \
3442 _argvec[1] = (unsigned long)_orig.r2; \
3443 _argvec[2] = (unsigned long)_orig.nraddr; \
3444 _argvec[2+1] = (unsigned long)arg1; \
3445 _argvec[2+2] = (unsigned long)arg2; \
3446 _argvec[2+3] = (unsigned long)arg3; \
3447 _argvec[2+4] = (unsigned long)arg4; \
3448 _argvec[2+5] = (unsigned long)arg5; \
3449 __asm__ volatile( \
3450 VALGRIND_ALIGN_STACK \
3451 "mr 12,%1\n\t" \
3452 "std 2,-16(12)\n\t" /* save tocptr */ \
3453 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3454 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3455 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3456 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3457 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3458 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3459 "ld 12, 0(12)\n\t" /* target->r12 */ \
3460 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3461 "mr 12,%1\n\t" \
3462 "mr %0,3\n\t" \
3463 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3464 VALGRIND_RESTORE_STACK \
3465 : /*out*/ "=r" (_res) \
3466 : /*in*/ "r" (&_argvec[2]) \
3467 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3468 ); \
3469 lval = (__typeof__(lval)) _res; \
3470 } while (0)
3472 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3473 do { \
3474 volatile OrigFn _orig = (orig); \
3475 volatile unsigned long _argvec[3+6]; \
3476 volatile unsigned long _res; \
3477 /* _argvec[0] holds current r2 across the call */ \
3478 _argvec[1] = (unsigned long)_orig.r2; \
3479 _argvec[2] = (unsigned long)_orig.nraddr; \
3480 _argvec[2+1] = (unsigned long)arg1; \
3481 _argvec[2+2] = (unsigned long)arg2; \
3482 _argvec[2+3] = (unsigned long)arg3; \
3483 _argvec[2+4] = (unsigned long)arg4; \
3484 _argvec[2+5] = (unsigned long)arg5; \
3485 _argvec[2+6] = (unsigned long)arg6; \
3486 __asm__ volatile( \
3487 VALGRIND_ALIGN_STACK \
3488 "mr 12,%1\n\t" \
3489 "std 2,-16(12)\n\t" /* save tocptr */ \
3490 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3491 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3492 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3493 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3494 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3495 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3496 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3497 "ld 12, 0(12)\n\t" /* target->r12 */ \
3498 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3499 "mr 12,%1\n\t" \
3500 "mr %0,3\n\t" \
3501 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3502 VALGRIND_RESTORE_STACK \
3503 : /*out*/ "=r" (_res) \
3504 : /*in*/ "r" (&_argvec[2]) \
3505 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3506 ); \
3507 lval = (__typeof__(lval)) _res; \
3508 } while (0)
3510 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3511 arg7) \
3512 do { \
3513 volatile OrigFn _orig = (orig); \
3514 volatile unsigned long _argvec[3+7]; \
3515 volatile unsigned long _res; \
3516 /* _argvec[0] holds current r2 across the call */ \
3517 _argvec[1] = (unsigned long)_orig.r2; \
3518 _argvec[2] = (unsigned long)_orig.nraddr; \
3519 _argvec[2+1] = (unsigned long)arg1; \
3520 _argvec[2+2] = (unsigned long)arg2; \
3521 _argvec[2+3] = (unsigned long)arg3; \
3522 _argvec[2+4] = (unsigned long)arg4; \
3523 _argvec[2+5] = (unsigned long)arg5; \
3524 _argvec[2+6] = (unsigned long)arg6; \
3525 _argvec[2+7] = (unsigned long)arg7; \
3526 __asm__ volatile( \
3527 VALGRIND_ALIGN_STACK \
3528 "mr 12,%1\n\t" \
3529 "std 2,-16(12)\n\t" /* save tocptr */ \
3530 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3531 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3532 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3533 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3534 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3535 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3536 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3537 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3538 "ld 12, 0(12)\n\t" /* target->r12 */ \
3539 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3540 "mr 12,%1\n\t" \
3541 "mr %0,3\n\t" \
3542 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3543 VALGRIND_RESTORE_STACK \
3544 : /*out*/ "=r" (_res) \
3545 : /*in*/ "r" (&_argvec[2]) \
3546 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3547 ); \
3548 lval = (__typeof__(lval)) _res; \
3549 } while (0)
3551 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3552 arg7,arg8) \
3553 do { \
3554 volatile OrigFn _orig = (orig); \
3555 volatile unsigned long _argvec[3+8]; \
3556 volatile unsigned long _res; \
3557 /* _argvec[0] holds current r2 across the call */ \
3558 _argvec[1] = (unsigned long)_orig.r2; \
3559 _argvec[2] = (unsigned long)_orig.nraddr; \
3560 _argvec[2+1] = (unsigned long)arg1; \
3561 _argvec[2+2] = (unsigned long)arg2; \
3562 _argvec[2+3] = (unsigned long)arg3; \
3563 _argvec[2+4] = (unsigned long)arg4; \
3564 _argvec[2+5] = (unsigned long)arg5; \
3565 _argvec[2+6] = (unsigned long)arg6; \
3566 _argvec[2+7] = (unsigned long)arg7; \
3567 _argvec[2+8] = (unsigned long)arg8; \
3568 __asm__ volatile( \
3569 VALGRIND_ALIGN_STACK \
3570 "mr 12,%1\n\t" \
3571 "std 2,-16(12)\n\t" /* save tocptr */ \
3572 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3573 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3574 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3575 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3576 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3577 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3578 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3579 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3580 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3581 "ld 12, 0(12)\n\t" /* target->r12 */ \
3582 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3583 "mr 12,%1\n\t" \
3584 "mr %0,3\n\t" \
3585 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3586 VALGRIND_RESTORE_STACK \
3587 : /*out*/ "=r" (_res) \
3588 : /*in*/ "r" (&_argvec[2]) \
3589 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3590 ); \
3591 lval = (__typeof__(lval)) _res; \
3592 } while (0)
3594 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3595 arg7,arg8,arg9) \
3596 do { \
3597 volatile OrigFn _orig = (orig); \
3598 volatile unsigned long _argvec[3+9]; \
3599 volatile unsigned long _res; \
3600 /* _argvec[0] holds current r2 across the call */ \
3601 _argvec[1] = (unsigned long)_orig.r2; \
3602 _argvec[2] = (unsigned long)_orig.nraddr; \
3603 _argvec[2+1] = (unsigned long)arg1; \
3604 _argvec[2+2] = (unsigned long)arg2; \
3605 _argvec[2+3] = (unsigned long)arg3; \
3606 _argvec[2+4] = (unsigned long)arg4; \
3607 _argvec[2+5] = (unsigned long)arg5; \
3608 _argvec[2+6] = (unsigned long)arg6; \
3609 _argvec[2+7] = (unsigned long)arg7; \
3610 _argvec[2+8] = (unsigned long)arg8; \
3611 _argvec[2+9] = (unsigned long)arg9; \
3612 __asm__ volatile( \
3613 VALGRIND_ALIGN_STACK \
3614 "mr 12,%1\n\t" \
3615 "std 2,-16(12)\n\t" /* save tocptr */ \
3616 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3617 "addi 1,1,-128\n\t" /* expand stack frame */ \
3618 /* arg9 */ \
3619 "ld 3,72(12)\n\t" \
3620 "std 3,96(1)\n\t" \
3621 /* args1-8 */ \
3622 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3623 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3624 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3625 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3626 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3627 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3628 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3629 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3630 "ld 12, 0(12)\n\t" /* target->r12 */ \
3631 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3632 "mr 12,%1\n\t" \
3633 "mr %0,3\n\t" \
3634 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3635 VALGRIND_RESTORE_STACK \
3636 : /*out*/ "=r" (_res) \
3637 : /*in*/ "r" (&_argvec[2]) \
3638 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3639 ); \
3640 lval = (__typeof__(lval)) _res; \
3641 } while (0)
3643 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3644 arg7,arg8,arg9,arg10) \
3645 do { \
3646 volatile OrigFn _orig = (orig); \
3647 volatile unsigned long _argvec[3+10]; \
3648 volatile unsigned long _res; \
3649 /* _argvec[0] holds current r2 across the call */ \
3650 _argvec[1] = (unsigned long)_orig.r2; \
3651 _argvec[2] = (unsigned long)_orig.nraddr; \
3652 _argvec[2+1] = (unsigned long)arg1; \
3653 _argvec[2+2] = (unsigned long)arg2; \
3654 _argvec[2+3] = (unsigned long)arg3; \
3655 _argvec[2+4] = (unsigned long)arg4; \
3656 _argvec[2+5] = (unsigned long)arg5; \
3657 _argvec[2+6] = (unsigned long)arg6; \
3658 _argvec[2+7] = (unsigned long)arg7; \
3659 _argvec[2+8] = (unsigned long)arg8; \
3660 _argvec[2+9] = (unsigned long)arg9; \
3661 _argvec[2+10] = (unsigned long)arg10; \
3662 __asm__ volatile( \
3663 VALGRIND_ALIGN_STACK \
3664 "mr 12,%1\n\t" \
3665 "std 2,-16(12)\n\t" /* save tocptr */ \
3666 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3667 "addi 1,1,-128\n\t" /* expand stack frame */ \
3668 /* arg10 */ \
3669 "ld 3,80(12)\n\t" \
3670 "std 3,104(1)\n\t" \
3671 /* arg9 */ \
3672 "ld 3,72(12)\n\t" \
3673 "std 3,96(1)\n\t" \
3674 /* args1-8 */ \
3675 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3676 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3677 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3678 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3679 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3680 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3681 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3682 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3683 "ld 12, 0(12)\n\t" /* target->r12 */ \
3684 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3685 "mr 12,%1\n\t" \
3686 "mr %0,3\n\t" \
3687 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3688 VALGRIND_RESTORE_STACK \
3689 : /*out*/ "=r" (_res) \
3690 : /*in*/ "r" (&_argvec[2]) \
3691 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3692 ); \
3693 lval = (__typeof__(lval)) _res; \
3694 } while (0)
3696 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3697 arg7,arg8,arg9,arg10,arg11) \
3698 do { \
3699 volatile OrigFn _orig = (orig); \
3700 volatile unsigned long _argvec[3+11]; \
3701 volatile unsigned long _res; \
3702 /* _argvec[0] holds current r2 across the call */ \
3703 _argvec[1] = (unsigned long)_orig.r2; \
3704 _argvec[2] = (unsigned long)_orig.nraddr; \
3705 _argvec[2+1] = (unsigned long)arg1; \
3706 _argvec[2+2] = (unsigned long)arg2; \
3707 _argvec[2+3] = (unsigned long)arg3; \
3708 _argvec[2+4] = (unsigned long)arg4; \
3709 _argvec[2+5] = (unsigned long)arg5; \
3710 _argvec[2+6] = (unsigned long)arg6; \
3711 _argvec[2+7] = (unsigned long)arg7; \
3712 _argvec[2+8] = (unsigned long)arg8; \
3713 _argvec[2+9] = (unsigned long)arg9; \
3714 _argvec[2+10] = (unsigned long)arg10; \
3715 _argvec[2+11] = (unsigned long)arg11; \
3716 __asm__ volatile( \
3717 VALGRIND_ALIGN_STACK \
3718 "mr 12,%1\n\t" \
3719 "std 2,-16(12)\n\t" /* save tocptr */ \
3720 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3721 "addi 1,1,-144\n\t" /* expand stack frame */ \
3722 /* arg11 */ \
3723 "ld 3,88(12)\n\t" \
3724 "std 3,112(1)\n\t" \
3725 /* arg10 */ \
3726 "ld 3,80(12)\n\t" \
3727 "std 3,104(1)\n\t" \
3728 /* arg9 */ \
3729 "ld 3,72(12)\n\t" \
3730 "std 3,96(1)\n\t" \
3731 /* args1-8 */ \
3732 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3733 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3734 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3735 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3736 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3737 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3738 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3739 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3740 "ld 12, 0(12)\n\t" /* target->r12 */ \
3741 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3742 "mr 12,%1\n\t" \
3743 "mr %0,3\n\t" \
3744 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3745 VALGRIND_RESTORE_STACK \
3746 : /*out*/ "=r" (_res) \
3747 : /*in*/ "r" (&_argvec[2]) \
3748 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3749 ); \
3750 lval = (__typeof__(lval)) _res; \
3751 } while (0)
3753 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3754 arg7,arg8,arg9,arg10,arg11,arg12) \
3755 do { \
3756 volatile OrigFn _orig = (orig); \
3757 volatile unsigned long _argvec[3+12]; \
3758 volatile unsigned long _res; \
3759 /* _argvec[0] holds current r2 across the call */ \
3760 _argvec[1] = (unsigned long)_orig.r2; \
3761 _argvec[2] = (unsigned long)_orig.nraddr; \
3762 _argvec[2+1] = (unsigned long)arg1; \
3763 _argvec[2+2] = (unsigned long)arg2; \
3764 _argvec[2+3] = (unsigned long)arg3; \
3765 _argvec[2+4] = (unsigned long)arg4; \
3766 _argvec[2+5] = (unsigned long)arg5; \
3767 _argvec[2+6] = (unsigned long)arg6; \
3768 _argvec[2+7] = (unsigned long)arg7; \
3769 _argvec[2+8] = (unsigned long)arg8; \
3770 _argvec[2+9] = (unsigned long)arg9; \
3771 _argvec[2+10] = (unsigned long)arg10; \
3772 _argvec[2+11] = (unsigned long)arg11; \
3773 _argvec[2+12] = (unsigned long)arg12; \
3774 __asm__ volatile( \
3775 VALGRIND_ALIGN_STACK \
3776 "mr 12,%1\n\t" \
3777 "std 2,-16(12)\n\t" /* save tocptr */ \
3778 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3779 "addi 1,1,-144\n\t" /* expand stack frame */ \
3780 /* arg12 */ \
3781 "ld 3,96(12)\n\t" \
3782 "std 3,120(1)\n\t" \
3783 /* arg11 */ \
3784 "ld 3,88(12)\n\t" \
3785 "std 3,112(1)\n\t" \
3786 /* arg10 */ \
3787 "ld 3,80(12)\n\t" \
3788 "std 3,104(1)\n\t" \
3789 /* arg9 */ \
3790 "ld 3,72(12)\n\t" \
3791 "std 3,96(1)\n\t" \
3792 /* args1-8 */ \
3793 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3794 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3795 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3796 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3797 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3798 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3799 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3800 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3801 "ld 12, 0(12)\n\t" /* target->r12 */ \
3802 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3803 "mr 12,%1\n\t" \
3804 "mr %0,3\n\t" \
3805 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3806 VALGRIND_RESTORE_STACK \
3807 : /*out*/ "=r" (_res) \
3808 : /*in*/ "r" (&_argvec[2]) \
3809 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3810 ); \
3811 lval = (__typeof__(lval)) _res; \
3812 } while (0)
3814 #endif /* PLAT_ppc64le_linux */
3816 /* ------------------------- arm-linux ------------------------- */
3818 #if defined(PLAT_arm_linux)
3820 /* These regs are trashed by the hidden call. */
3821 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4", "r12", "r14"
3823 /* Macros to save and align the stack before making a function
3824 call and restore it afterwards as gcc may not keep the stack
3825 pointer aligned if it doesn't realise calls are being made
3826 to other functions. */
3828 /* This is a bit tricky. We store the original stack pointer in r10
3829 as it is callee-saves. gcc doesn't allow the use of r11 for some
3830 reason. Also, we can't directly "bic" the stack pointer in thumb
3831 mode since r13 isn't an allowed register number in that context.
3832 So use r4 as a temporary, since that is about to get trashed
3833 anyway, just after each use of this macro. Side effect is we need
3834 to be very careful about any future changes, since
3835 VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
3836 #define VALGRIND_ALIGN_STACK \
3837 "mov r10, sp\n\t" \
3838 "mov r4, sp\n\t" \
3839 "bic r4, r4, #7\n\t" \
3840 "mov sp, r4\n\t"
3841 #define VALGRIND_RESTORE_STACK \
3842 "mov sp, r10\n\t"
3844 /* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
3845 long) == 4. */
3847 #define CALL_FN_W_v(lval, orig) \
3848 do { \
3849 volatile OrigFn _orig = (orig); \
3850 volatile unsigned long _argvec[1]; \
3851 volatile unsigned long _res; \
3852 _argvec[0] = (unsigned long)_orig.nraddr; \
3853 __asm__ volatile( \
3854 VALGRIND_ALIGN_STACK \
3855 "ldr r4, [%1] \n\t" /* target->r4 */ \
3856 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3857 VALGRIND_RESTORE_STACK \
3858 "mov %0, r0\n" \
3859 : /*out*/ "=r" (_res) \
3860 : /*in*/ "0" (&_argvec[0]) \
3861 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3862 ); \
3863 lval = (__typeof__(lval)) _res; \
3864 } while (0)
3866 #define CALL_FN_W_W(lval, orig, arg1) \
3867 do { \
3868 volatile OrigFn _orig = (orig); \
3869 volatile unsigned long _argvec[2]; \
3870 volatile unsigned long _res; \
3871 _argvec[0] = (unsigned long)_orig.nraddr; \
3872 _argvec[1] = (unsigned long)(arg1); \
3873 __asm__ volatile( \
3874 VALGRIND_ALIGN_STACK \
3875 "ldr r0, [%1, #4] \n\t" \
3876 "ldr r4, [%1] \n\t" /* target->r4 */ \
3877 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3878 VALGRIND_RESTORE_STACK \
3879 "mov %0, r0\n" \
3880 : /*out*/ "=r" (_res) \
3881 : /*in*/ "0" (&_argvec[0]) \
3882 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3883 ); \
3884 lval = (__typeof__(lval)) _res; \
3885 } while (0)
3887 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3888 do { \
3889 volatile OrigFn _orig = (orig); \
3890 volatile unsigned long _argvec[3]; \
3891 volatile unsigned long _res; \
3892 _argvec[0] = (unsigned long)_orig.nraddr; \
3893 _argvec[1] = (unsigned long)(arg1); \
3894 _argvec[2] = (unsigned long)(arg2); \
3895 __asm__ volatile( \
3896 VALGRIND_ALIGN_STACK \
3897 "ldr r0, [%1, #4] \n\t" \
3898 "ldr r1, [%1, #8] \n\t" \
3899 "ldr r4, [%1] \n\t" /* target->r4 */ \
3900 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3901 VALGRIND_RESTORE_STACK \
3902 "mov %0, r0\n" \
3903 : /*out*/ "=r" (_res) \
3904 : /*in*/ "0" (&_argvec[0]) \
3905 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3906 ); \
3907 lval = (__typeof__(lval)) _res; \
3908 } while (0)
3910 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3911 do { \
3912 volatile OrigFn _orig = (orig); \
3913 volatile unsigned long _argvec[4]; \
3914 volatile unsigned long _res; \
3915 _argvec[0] = (unsigned long)_orig.nraddr; \
3916 _argvec[1] = (unsigned long)(arg1); \
3917 _argvec[2] = (unsigned long)(arg2); \
3918 _argvec[3] = (unsigned long)(arg3); \
3919 __asm__ volatile( \
3920 VALGRIND_ALIGN_STACK \
3921 "ldr r0, [%1, #4] \n\t" \
3922 "ldr r1, [%1, #8] \n\t" \
3923 "ldr r2, [%1, #12] \n\t" \
3924 "ldr r4, [%1] \n\t" /* target->r4 */ \
3925 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3926 VALGRIND_RESTORE_STACK \
3927 "mov %0, r0\n" \
3928 : /*out*/ "=r" (_res) \
3929 : /*in*/ "0" (&_argvec[0]) \
3930 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3931 ); \
3932 lval = (__typeof__(lval)) _res; \
3933 } while (0)
3935 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3936 do { \
3937 volatile OrigFn _orig = (orig); \
3938 volatile unsigned long _argvec[5]; \
3939 volatile unsigned long _res; \
3940 _argvec[0] = (unsigned long)_orig.nraddr; \
3941 _argvec[1] = (unsigned long)(arg1); \
3942 _argvec[2] = (unsigned long)(arg2); \
3943 _argvec[3] = (unsigned long)(arg3); \
3944 _argvec[4] = (unsigned long)(arg4); \
3945 __asm__ volatile( \
3946 VALGRIND_ALIGN_STACK \
3947 "ldr r0, [%1, #4] \n\t" \
3948 "ldr r1, [%1, #8] \n\t" \
3949 "ldr r2, [%1, #12] \n\t" \
3950 "ldr r3, [%1, #16] \n\t" \
3951 "ldr r4, [%1] \n\t" /* target->r4 */ \
3952 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3953 VALGRIND_RESTORE_STACK \
3954 "mov %0, r0" \
3955 : /*out*/ "=r" (_res) \
3956 : /*in*/ "0" (&_argvec[0]) \
3957 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3958 ); \
3959 lval = (__typeof__(lval)) _res; \
3960 } while (0)
3962 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3963 do { \
3964 volatile OrigFn _orig = (orig); \
3965 volatile unsigned long _argvec[6]; \
3966 volatile unsigned long _res; \
3967 _argvec[0] = (unsigned long)_orig.nraddr; \
3968 _argvec[1] = (unsigned long)(arg1); \
3969 _argvec[2] = (unsigned long)(arg2); \
3970 _argvec[3] = (unsigned long)(arg3); \
3971 _argvec[4] = (unsigned long)(arg4); \
3972 _argvec[5] = (unsigned long)(arg5); \
3973 __asm__ volatile( \
3974 VALGRIND_ALIGN_STACK \
3975 "sub sp, sp, #4 \n\t" \
3976 "ldr r0, [%1, #20] \n\t" \
3977 "push {r0} \n\t" \
3978 "ldr r0, [%1, #4] \n\t" \
3979 "ldr r1, [%1, #8] \n\t" \
3980 "ldr r2, [%1, #12] \n\t" \
3981 "ldr r3, [%1, #16] \n\t" \
3982 "ldr r4, [%1] \n\t" /* target->r4 */ \
3983 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3984 VALGRIND_RESTORE_STACK \
3985 "mov %0, r0" \
3986 : /*out*/ "=r" (_res) \
3987 : /*in*/ "0" (&_argvec[0]) \
3988 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3989 ); \
3990 lval = (__typeof__(lval)) _res; \
3991 } while (0)
3993 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3994 do { \
3995 volatile OrigFn _orig = (orig); \
3996 volatile unsigned long _argvec[7]; \
3997 volatile unsigned long _res; \
3998 _argvec[0] = (unsigned long)_orig.nraddr; \
3999 _argvec[1] = (unsigned long)(arg1); \
4000 _argvec[2] = (unsigned long)(arg2); \
4001 _argvec[3] = (unsigned long)(arg3); \
4002 _argvec[4] = (unsigned long)(arg4); \
4003 _argvec[5] = (unsigned long)(arg5); \
4004 _argvec[6] = (unsigned long)(arg6); \
4005 __asm__ volatile( \
4006 VALGRIND_ALIGN_STACK \
4007 "ldr r0, [%1, #20] \n\t" \
4008 "ldr r1, [%1, #24] \n\t" \
4009 "push {r0, r1} \n\t" \
4010 "ldr r0, [%1, #4] \n\t" \
4011 "ldr r1, [%1, #8] \n\t" \
4012 "ldr r2, [%1, #12] \n\t" \
4013 "ldr r3, [%1, #16] \n\t" \
4014 "ldr r4, [%1] \n\t" /* target->r4 */ \
4015 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4016 VALGRIND_RESTORE_STACK \
4017 "mov %0, r0" \
4018 : /*out*/ "=r" (_res) \
4019 : /*in*/ "0" (&_argvec[0]) \
4020 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4021 ); \
4022 lval = (__typeof__(lval)) _res; \
4023 } while (0)
4025 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4026 arg7) \
4027 do { \
4028 volatile OrigFn _orig = (orig); \
4029 volatile unsigned long _argvec[8]; \
4030 volatile unsigned long _res; \
4031 _argvec[0] = (unsigned long)_orig.nraddr; \
4032 _argvec[1] = (unsigned long)(arg1); \
4033 _argvec[2] = (unsigned long)(arg2); \
4034 _argvec[3] = (unsigned long)(arg3); \
4035 _argvec[4] = (unsigned long)(arg4); \
4036 _argvec[5] = (unsigned long)(arg5); \
4037 _argvec[6] = (unsigned long)(arg6); \
4038 _argvec[7] = (unsigned long)(arg7); \
4039 __asm__ volatile( \
4040 VALGRIND_ALIGN_STACK \
4041 "sub sp, sp, #4 \n\t" \
4042 "ldr r0, [%1, #20] \n\t" \
4043 "ldr r1, [%1, #24] \n\t" \
4044 "ldr r2, [%1, #28] \n\t" \
4045 "push {r0, r1, r2} \n\t" \
4046 "ldr r0, [%1, #4] \n\t" \
4047 "ldr r1, [%1, #8] \n\t" \
4048 "ldr r2, [%1, #12] \n\t" \
4049 "ldr r3, [%1, #16] \n\t" \
4050 "ldr r4, [%1] \n\t" /* target->r4 */ \
4051 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4052 VALGRIND_RESTORE_STACK \
4053 "mov %0, r0" \
4054 : /*out*/ "=r" (_res) \
4055 : /*in*/ "0" (&_argvec[0]) \
4056 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4057 ); \
4058 lval = (__typeof__(lval)) _res; \
4059 } while (0)
4061 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4062 arg7,arg8) \
4063 do { \
4064 volatile OrigFn _orig = (orig); \
4065 volatile unsigned long _argvec[9]; \
4066 volatile unsigned long _res; \
4067 _argvec[0] = (unsigned long)_orig.nraddr; \
4068 _argvec[1] = (unsigned long)(arg1); \
4069 _argvec[2] = (unsigned long)(arg2); \
4070 _argvec[3] = (unsigned long)(arg3); \
4071 _argvec[4] = (unsigned long)(arg4); \
4072 _argvec[5] = (unsigned long)(arg5); \
4073 _argvec[6] = (unsigned long)(arg6); \
4074 _argvec[7] = (unsigned long)(arg7); \
4075 _argvec[8] = (unsigned long)(arg8); \
4076 __asm__ volatile( \
4077 VALGRIND_ALIGN_STACK \
4078 "ldr r0, [%1, #20] \n\t" \
4079 "ldr r1, [%1, #24] \n\t" \
4080 "ldr r2, [%1, #28] \n\t" \
4081 "ldr r3, [%1, #32] \n\t" \
4082 "push {r0, r1, r2, r3} \n\t" \
4083 "ldr r0, [%1, #4] \n\t" \
4084 "ldr r1, [%1, #8] \n\t" \
4085 "ldr r2, [%1, #12] \n\t" \
4086 "ldr r3, [%1, #16] \n\t" \
4087 "ldr r4, [%1] \n\t" /* target->r4 */ \
4088 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4089 VALGRIND_RESTORE_STACK \
4090 "mov %0, r0" \
4091 : /*out*/ "=r" (_res) \
4092 : /*in*/ "0" (&_argvec[0]) \
4093 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4094 ); \
4095 lval = (__typeof__(lval)) _res; \
4096 } while (0)
4098 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4099 arg7,arg8,arg9) \
4100 do { \
4101 volatile OrigFn _orig = (orig); \
4102 volatile unsigned long _argvec[10]; \
4103 volatile unsigned long _res; \
4104 _argvec[0] = (unsigned long)_orig.nraddr; \
4105 _argvec[1] = (unsigned long)(arg1); \
4106 _argvec[2] = (unsigned long)(arg2); \
4107 _argvec[3] = (unsigned long)(arg3); \
4108 _argvec[4] = (unsigned long)(arg4); \
4109 _argvec[5] = (unsigned long)(arg5); \
4110 _argvec[6] = (unsigned long)(arg6); \
4111 _argvec[7] = (unsigned long)(arg7); \
4112 _argvec[8] = (unsigned long)(arg8); \
4113 _argvec[9] = (unsigned long)(arg9); \
4114 __asm__ volatile( \
4115 VALGRIND_ALIGN_STACK \
4116 "sub sp, sp, #4 \n\t" \
4117 "ldr r0, [%1, #20] \n\t" \
4118 "ldr r1, [%1, #24] \n\t" \
4119 "ldr r2, [%1, #28] \n\t" \
4120 "ldr r3, [%1, #32] \n\t" \
4121 "ldr r4, [%1, #36] \n\t" \
4122 "push {r0, r1, r2, r3, r4} \n\t" \
4123 "ldr r0, [%1, #4] \n\t" \
4124 "ldr r1, [%1, #8] \n\t" \
4125 "ldr r2, [%1, #12] \n\t" \
4126 "ldr r3, [%1, #16] \n\t" \
4127 "ldr r4, [%1] \n\t" /* target->r4 */ \
4128 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4129 VALGRIND_RESTORE_STACK \
4130 "mov %0, r0" \
4131 : /*out*/ "=r" (_res) \
4132 : /*in*/ "0" (&_argvec[0]) \
4133 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4134 ); \
4135 lval = (__typeof__(lval)) _res; \
4136 } while (0)
4138 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4139 arg7,arg8,arg9,arg10) \
4140 do { \
4141 volatile OrigFn _orig = (orig); \
4142 volatile unsigned long _argvec[11]; \
4143 volatile unsigned long _res; \
4144 _argvec[0] = (unsigned long)_orig.nraddr; \
4145 _argvec[1] = (unsigned long)(arg1); \
4146 _argvec[2] = (unsigned long)(arg2); \
4147 _argvec[3] = (unsigned long)(arg3); \
4148 _argvec[4] = (unsigned long)(arg4); \
4149 _argvec[5] = (unsigned long)(arg5); \
4150 _argvec[6] = (unsigned long)(arg6); \
4151 _argvec[7] = (unsigned long)(arg7); \
4152 _argvec[8] = (unsigned long)(arg8); \
4153 _argvec[9] = (unsigned long)(arg9); \
4154 _argvec[10] = (unsigned long)(arg10); \
4155 __asm__ volatile( \
4156 VALGRIND_ALIGN_STACK \
4157 "ldr r0, [%1, #40] \n\t" \
4158 "push {r0} \n\t" \
4159 "ldr r0, [%1, #20] \n\t" \
4160 "ldr r1, [%1, #24] \n\t" \
4161 "ldr r2, [%1, #28] \n\t" \
4162 "ldr r3, [%1, #32] \n\t" \
4163 "ldr r4, [%1, #36] \n\t" \
4164 "push {r0, r1, r2, r3, r4} \n\t" \
4165 "ldr r0, [%1, #4] \n\t" \
4166 "ldr r1, [%1, #8] \n\t" \
4167 "ldr r2, [%1, #12] \n\t" \
4168 "ldr r3, [%1, #16] \n\t" \
4169 "ldr r4, [%1] \n\t" /* target->r4 */ \
4170 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4171 VALGRIND_RESTORE_STACK \
4172 "mov %0, r0" \
4173 : /*out*/ "=r" (_res) \
4174 : /*in*/ "0" (&_argvec[0]) \
4175 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4176 ); \
4177 lval = (__typeof__(lval)) _res; \
4178 } while (0)
4180 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4181 arg6,arg7,arg8,arg9,arg10, \
4182 arg11) \
4183 do { \
4184 volatile OrigFn _orig = (orig); \
4185 volatile unsigned long _argvec[12]; \
4186 volatile unsigned long _res; \
4187 _argvec[0] = (unsigned long)_orig.nraddr; \
4188 _argvec[1] = (unsigned long)(arg1); \
4189 _argvec[2] = (unsigned long)(arg2); \
4190 _argvec[3] = (unsigned long)(arg3); \
4191 _argvec[4] = (unsigned long)(arg4); \
4192 _argvec[5] = (unsigned long)(arg5); \
4193 _argvec[6] = (unsigned long)(arg6); \
4194 _argvec[7] = (unsigned long)(arg7); \
4195 _argvec[8] = (unsigned long)(arg8); \
4196 _argvec[9] = (unsigned long)(arg9); \
4197 _argvec[10] = (unsigned long)(arg10); \
4198 _argvec[11] = (unsigned long)(arg11); \
4199 __asm__ volatile( \
4200 VALGRIND_ALIGN_STACK \
4201 "sub sp, sp, #4 \n\t" \
4202 "ldr r0, [%1, #40] \n\t" \
4203 "ldr r1, [%1, #44] \n\t" \
4204 "push {r0, r1} \n\t" \
4205 "ldr r0, [%1, #20] \n\t" \
4206 "ldr r1, [%1, #24] \n\t" \
4207 "ldr r2, [%1, #28] \n\t" \
4208 "ldr r3, [%1, #32] \n\t" \
4209 "ldr r4, [%1, #36] \n\t" \
4210 "push {r0, r1, r2, r3, r4} \n\t" \
4211 "ldr r0, [%1, #4] \n\t" \
4212 "ldr r1, [%1, #8] \n\t" \
4213 "ldr r2, [%1, #12] \n\t" \
4214 "ldr r3, [%1, #16] \n\t" \
4215 "ldr r4, [%1] \n\t" /* target->r4 */ \
4216 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4217 VALGRIND_RESTORE_STACK \
4218 "mov %0, r0" \
4219 : /*out*/ "=r" (_res) \
4220 : /*in*/ "0" (&_argvec[0]) \
4221 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4222 ); \
4223 lval = (__typeof__(lval)) _res; \
4224 } while (0)
4226 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4227 arg6,arg7,arg8,arg9,arg10, \
4228 arg11,arg12) \
4229 do { \
4230 volatile OrigFn _orig = (orig); \
4231 volatile unsigned long _argvec[13]; \
4232 volatile unsigned long _res; \
4233 _argvec[0] = (unsigned long)_orig.nraddr; \
4234 _argvec[1] = (unsigned long)(arg1); \
4235 _argvec[2] = (unsigned long)(arg2); \
4236 _argvec[3] = (unsigned long)(arg3); \
4237 _argvec[4] = (unsigned long)(arg4); \
4238 _argvec[5] = (unsigned long)(arg5); \
4239 _argvec[6] = (unsigned long)(arg6); \
4240 _argvec[7] = (unsigned long)(arg7); \
4241 _argvec[8] = (unsigned long)(arg8); \
4242 _argvec[9] = (unsigned long)(arg9); \
4243 _argvec[10] = (unsigned long)(arg10); \
4244 _argvec[11] = (unsigned long)(arg11); \
4245 _argvec[12] = (unsigned long)(arg12); \
4246 __asm__ volatile( \
4247 VALGRIND_ALIGN_STACK \
4248 "ldr r0, [%1, #40] \n\t" \
4249 "ldr r1, [%1, #44] \n\t" \
4250 "ldr r2, [%1, #48] \n\t" \
4251 "push {r0, r1, r2} \n\t" \
4252 "ldr r0, [%1, #20] \n\t" \
4253 "ldr r1, [%1, #24] \n\t" \
4254 "ldr r2, [%1, #28] \n\t" \
4255 "ldr r3, [%1, #32] \n\t" \
4256 "ldr r4, [%1, #36] \n\t" \
4257 "push {r0, r1, r2, r3, r4} \n\t" \
4258 "ldr r0, [%1, #4] \n\t" \
4259 "ldr r1, [%1, #8] \n\t" \
4260 "ldr r2, [%1, #12] \n\t" \
4261 "ldr r3, [%1, #16] \n\t" \
4262 "ldr r4, [%1] \n\t" /* target->r4 */ \
4263 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4264 VALGRIND_RESTORE_STACK \
4265 "mov %0, r0" \
4266 : /*out*/ "=r" (_res) \
4267 : /*in*/ "0" (&_argvec[0]) \
4268 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4269 ); \
4270 lval = (__typeof__(lval)) _res; \
4271 } while (0)
4273 #endif /* PLAT_arm_linux */
4275 /* ------------------------ arm64-linux ------------------------ */
4277 #if defined(PLAT_arm64_linux)
4279 /* These regs are trashed by the hidden call. */
4280 #define __CALLER_SAVED_REGS \
4281 "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
4282 "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
4283 "x18", "x19", "x20", "x30", \
4284 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
4285 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
4286 "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
4287 "v26", "v27", "v28", "v29", "v30", "v31"
4289 /* x21 is callee-saved, so we can use it to save and restore SP around
4290 the hidden call. */
4291 #define VALGRIND_ALIGN_STACK \
4292 "mov x21, sp\n\t" \
4293 "bic sp, x21, #15\n\t"
4294 #define VALGRIND_RESTORE_STACK \
4295 "mov sp, x21\n\t"
4297 /* These CALL_FN_ macros assume that on arm64-linux,
4298 sizeof(unsigned long) == 8. */
4300 #define CALL_FN_W_v(lval, orig) \
4301 do { \
4302 volatile OrigFn _orig = (orig); \
4303 volatile unsigned long _argvec[1]; \
4304 volatile unsigned long _res; \
4305 _argvec[0] = (unsigned long)_orig.nraddr; \
4306 __asm__ volatile( \
4307 VALGRIND_ALIGN_STACK \
4308 "ldr x8, [%1] \n\t" /* target->x8 */ \
4309 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4310 VALGRIND_RESTORE_STACK \
4311 "mov %0, x0\n" \
4312 : /*out*/ "=r" (_res) \
4313 : /*in*/ "0" (&_argvec[0]) \
4314 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4315 ); \
4316 lval = (__typeof__(lval)) _res; \
4317 } while (0)
4319 #define CALL_FN_W_W(lval, orig, arg1) \
4320 do { \
4321 volatile OrigFn _orig = (orig); \
4322 volatile unsigned long _argvec[2]; \
4323 volatile unsigned long _res; \
4324 _argvec[0] = (unsigned long)_orig.nraddr; \
4325 _argvec[1] = (unsigned long)(arg1); \
4326 __asm__ volatile( \
4327 VALGRIND_ALIGN_STACK \
4328 "ldr x0, [%1, #8] \n\t" \
4329 "ldr x8, [%1] \n\t" /* target->x8 */ \
4330 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4331 VALGRIND_RESTORE_STACK \
4332 "mov %0, x0\n" \
4333 : /*out*/ "=r" (_res) \
4334 : /*in*/ "0" (&_argvec[0]) \
4335 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4336 ); \
4337 lval = (__typeof__(lval)) _res; \
4338 } while (0)
4340 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4341 do { \
4342 volatile OrigFn _orig = (orig); \
4343 volatile unsigned long _argvec[3]; \
4344 volatile unsigned long _res; \
4345 _argvec[0] = (unsigned long)_orig.nraddr; \
4346 _argvec[1] = (unsigned long)(arg1); \
4347 _argvec[2] = (unsigned long)(arg2); \
4348 __asm__ volatile( \
4349 VALGRIND_ALIGN_STACK \
4350 "ldr x0, [%1, #8] \n\t" \
4351 "ldr x1, [%1, #16] \n\t" \
4352 "ldr x8, [%1] \n\t" /* target->x8 */ \
4353 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4354 VALGRIND_RESTORE_STACK \
4355 "mov %0, x0\n" \
4356 : /*out*/ "=r" (_res) \
4357 : /*in*/ "0" (&_argvec[0]) \
4358 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4359 ); \
4360 lval = (__typeof__(lval)) _res; \
4361 } while (0)
4363 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4364 do { \
4365 volatile OrigFn _orig = (orig); \
4366 volatile unsigned long _argvec[4]; \
4367 volatile unsigned long _res; \
4368 _argvec[0] = (unsigned long)_orig.nraddr; \
4369 _argvec[1] = (unsigned long)(arg1); \
4370 _argvec[2] = (unsigned long)(arg2); \
4371 _argvec[3] = (unsigned long)(arg3); \
4372 __asm__ volatile( \
4373 VALGRIND_ALIGN_STACK \
4374 "ldr x0, [%1, #8] \n\t" \
4375 "ldr x1, [%1, #16] \n\t" \
4376 "ldr x2, [%1, #24] \n\t" \
4377 "ldr x8, [%1] \n\t" /* target->x8 */ \
4378 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4379 VALGRIND_RESTORE_STACK \
4380 "mov %0, x0\n" \
4381 : /*out*/ "=r" (_res) \
4382 : /*in*/ "0" (&_argvec[0]) \
4383 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4384 ); \
4385 lval = (__typeof__(lval)) _res; \
4386 } while (0)
4388 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4389 do { \
4390 volatile OrigFn _orig = (orig); \
4391 volatile unsigned long _argvec[5]; \
4392 volatile unsigned long _res; \
4393 _argvec[0] = (unsigned long)_orig.nraddr; \
4394 _argvec[1] = (unsigned long)(arg1); \
4395 _argvec[2] = (unsigned long)(arg2); \
4396 _argvec[3] = (unsigned long)(arg3); \
4397 _argvec[4] = (unsigned long)(arg4); \
4398 __asm__ volatile( \
4399 VALGRIND_ALIGN_STACK \
4400 "ldr x0, [%1, #8] \n\t" \
4401 "ldr x1, [%1, #16] \n\t" \
4402 "ldr x2, [%1, #24] \n\t" \
4403 "ldr x3, [%1, #32] \n\t" \
4404 "ldr x8, [%1] \n\t" /* target->x8 */ \
4405 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4406 VALGRIND_RESTORE_STACK \
4407 "mov %0, x0" \
4408 : /*out*/ "=r" (_res) \
4409 : /*in*/ "0" (&_argvec[0]) \
4410 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4411 ); \
4412 lval = (__typeof__(lval)) _res; \
4413 } while (0)
4415 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4416 do { \
4417 volatile OrigFn _orig = (orig); \
4418 volatile unsigned long _argvec[6]; \
4419 volatile unsigned long _res; \
4420 _argvec[0] = (unsigned long)_orig.nraddr; \
4421 _argvec[1] = (unsigned long)(arg1); \
4422 _argvec[2] = (unsigned long)(arg2); \
4423 _argvec[3] = (unsigned long)(arg3); \
4424 _argvec[4] = (unsigned long)(arg4); \
4425 _argvec[5] = (unsigned long)(arg5); \
4426 __asm__ volatile( \
4427 VALGRIND_ALIGN_STACK \
4428 "ldr x0, [%1, #8] \n\t" \
4429 "ldr x1, [%1, #16] \n\t" \
4430 "ldr x2, [%1, #24] \n\t" \
4431 "ldr x3, [%1, #32] \n\t" \
4432 "ldr x4, [%1, #40] \n\t" \
4433 "ldr x8, [%1] \n\t" /* target->x8 */ \
4434 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4435 VALGRIND_RESTORE_STACK \
4436 "mov %0, x0" \
4437 : /*out*/ "=r" (_res) \
4438 : /*in*/ "0" (&_argvec[0]) \
4439 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4440 ); \
4441 lval = (__typeof__(lval)) _res; \
4442 } while (0)
4444 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4445 do { \
4446 volatile OrigFn _orig = (orig); \
4447 volatile unsigned long _argvec[7]; \
4448 volatile unsigned long _res; \
4449 _argvec[0] = (unsigned long)_orig.nraddr; \
4450 _argvec[1] = (unsigned long)(arg1); \
4451 _argvec[2] = (unsigned long)(arg2); \
4452 _argvec[3] = (unsigned long)(arg3); \
4453 _argvec[4] = (unsigned long)(arg4); \
4454 _argvec[5] = (unsigned long)(arg5); \
4455 _argvec[6] = (unsigned long)(arg6); \
4456 __asm__ volatile( \
4457 VALGRIND_ALIGN_STACK \
4458 "ldr x0, [%1, #8] \n\t" \
4459 "ldr x1, [%1, #16] \n\t" \
4460 "ldr x2, [%1, #24] \n\t" \
4461 "ldr x3, [%1, #32] \n\t" \
4462 "ldr x4, [%1, #40] \n\t" \
4463 "ldr x5, [%1, #48] \n\t" \
4464 "ldr x8, [%1] \n\t" /* target->x8 */ \
4465 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4466 VALGRIND_RESTORE_STACK \
4467 "mov %0, x0" \
4468 : /*out*/ "=r" (_res) \
4469 : /*in*/ "0" (&_argvec[0]) \
4470 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4471 ); \
4472 lval = (__typeof__(lval)) _res; \
4473 } while (0)
4475 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4476 arg7) \
4477 do { \
4478 volatile OrigFn _orig = (orig); \
4479 volatile unsigned long _argvec[8]; \
4480 volatile unsigned long _res; \
4481 _argvec[0] = (unsigned long)_orig.nraddr; \
4482 _argvec[1] = (unsigned long)(arg1); \
4483 _argvec[2] = (unsigned long)(arg2); \
4484 _argvec[3] = (unsigned long)(arg3); \
4485 _argvec[4] = (unsigned long)(arg4); \
4486 _argvec[5] = (unsigned long)(arg5); \
4487 _argvec[6] = (unsigned long)(arg6); \
4488 _argvec[7] = (unsigned long)(arg7); \
4489 __asm__ volatile( \
4490 VALGRIND_ALIGN_STACK \
4491 "ldr x0, [%1, #8] \n\t" \
4492 "ldr x1, [%1, #16] \n\t" \
4493 "ldr x2, [%1, #24] \n\t" \
4494 "ldr x3, [%1, #32] \n\t" \
4495 "ldr x4, [%1, #40] \n\t" \
4496 "ldr x5, [%1, #48] \n\t" \
4497 "ldr x6, [%1, #56] \n\t" \
4498 "ldr x8, [%1] \n\t" /* target->x8 */ \
4499 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4500 VALGRIND_RESTORE_STACK \
4501 "mov %0, x0" \
4502 : /*out*/ "=r" (_res) \
4503 : /*in*/ "0" (&_argvec[0]) \
4504 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4505 ); \
4506 lval = (__typeof__(lval)) _res; \
4507 } while (0)
4509 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4510 arg7,arg8) \
4511 do { \
4512 volatile OrigFn _orig = (orig); \
4513 volatile unsigned long _argvec[9]; \
4514 volatile unsigned long _res; \
4515 _argvec[0] = (unsigned long)_orig.nraddr; \
4516 _argvec[1] = (unsigned long)(arg1); \
4517 _argvec[2] = (unsigned long)(arg2); \
4518 _argvec[3] = (unsigned long)(arg3); \
4519 _argvec[4] = (unsigned long)(arg4); \
4520 _argvec[5] = (unsigned long)(arg5); \
4521 _argvec[6] = (unsigned long)(arg6); \
4522 _argvec[7] = (unsigned long)(arg7); \
4523 _argvec[8] = (unsigned long)(arg8); \
4524 __asm__ volatile( \
4525 VALGRIND_ALIGN_STACK \
4526 "ldr x0, [%1, #8] \n\t" \
4527 "ldr x1, [%1, #16] \n\t" \
4528 "ldr x2, [%1, #24] \n\t" \
4529 "ldr x3, [%1, #32] \n\t" \
4530 "ldr x4, [%1, #40] \n\t" \
4531 "ldr x5, [%1, #48] \n\t" \
4532 "ldr x6, [%1, #56] \n\t" \
4533 "ldr x7, [%1, #64] \n\t" \
4534 "ldr x8, [%1] \n\t" /* target->x8 */ \
4535 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4536 VALGRIND_RESTORE_STACK \
4537 "mov %0, x0" \
4538 : /*out*/ "=r" (_res) \
4539 : /*in*/ "0" (&_argvec[0]) \
4540 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4541 ); \
4542 lval = (__typeof__(lval)) _res; \
4543 } while (0)
4545 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4546 arg7,arg8,arg9) \
4547 do { \
4548 volatile OrigFn _orig = (orig); \
4549 volatile unsigned long _argvec[10]; \
4550 volatile unsigned long _res; \
4551 _argvec[0] = (unsigned long)_orig.nraddr; \
4552 _argvec[1] = (unsigned long)(arg1); \
4553 _argvec[2] = (unsigned long)(arg2); \
4554 _argvec[3] = (unsigned long)(arg3); \
4555 _argvec[4] = (unsigned long)(arg4); \
4556 _argvec[5] = (unsigned long)(arg5); \
4557 _argvec[6] = (unsigned long)(arg6); \
4558 _argvec[7] = (unsigned long)(arg7); \
4559 _argvec[8] = (unsigned long)(arg8); \
4560 _argvec[9] = (unsigned long)(arg9); \
4561 __asm__ volatile( \
4562 VALGRIND_ALIGN_STACK \
4563 "sub sp, sp, #0x20 \n\t" \
4564 "ldr x0, [%1, #8] \n\t" \
4565 "ldr x1, [%1, #16] \n\t" \
4566 "ldr x2, [%1, #24] \n\t" \
4567 "ldr x3, [%1, #32] \n\t" \
4568 "ldr x4, [%1, #40] \n\t" \
4569 "ldr x5, [%1, #48] \n\t" \
4570 "ldr x6, [%1, #56] \n\t" \
4571 "ldr x7, [%1, #64] \n\t" \
4572 "ldr x8, [%1, #72] \n\t" \
4573 "str x8, [sp, #0] \n\t" \
4574 "ldr x8, [%1] \n\t" /* target->x8 */ \
4575 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4576 VALGRIND_RESTORE_STACK \
4577 "mov %0, x0" \
4578 : /*out*/ "=r" (_res) \
4579 : /*in*/ "0" (&_argvec[0]) \
4580 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4581 ); \
4582 lval = (__typeof__(lval)) _res; \
4583 } while (0)
4585 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4586 arg7,arg8,arg9,arg10) \
4587 do { \
4588 volatile OrigFn _orig = (orig); \
4589 volatile unsigned long _argvec[11]; \
4590 volatile unsigned long _res; \
4591 _argvec[0] = (unsigned long)_orig.nraddr; \
4592 _argvec[1] = (unsigned long)(arg1); \
4593 _argvec[2] = (unsigned long)(arg2); \
4594 _argvec[3] = (unsigned long)(arg3); \
4595 _argvec[4] = (unsigned long)(arg4); \
4596 _argvec[5] = (unsigned long)(arg5); \
4597 _argvec[6] = (unsigned long)(arg6); \
4598 _argvec[7] = (unsigned long)(arg7); \
4599 _argvec[8] = (unsigned long)(arg8); \
4600 _argvec[9] = (unsigned long)(arg9); \
4601 _argvec[10] = (unsigned long)(arg10); \
4602 __asm__ volatile( \
4603 VALGRIND_ALIGN_STACK \
4604 "sub sp, sp, #0x20 \n\t" \
4605 "ldr x0, [%1, #8] \n\t" \
4606 "ldr x1, [%1, #16] \n\t" \
4607 "ldr x2, [%1, #24] \n\t" \
4608 "ldr x3, [%1, #32] \n\t" \
4609 "ldr x4, [%1, #40] \n\t" \
4610 "ldr x5, [%1, #48] \n\t" \
4611 "ldr x6, [%1, #56] \n\t" \
4612 "ldr x7, [%1, #64] \n\t" \
4613 "ldr x8, [%1, #72] \n\t" \
4614 "str x8, [sp, #0] \n\t" \
4615 "ldr x8, [%1, #80] \n\t" \
4616 "str x8, [sp, #8] \n\t" \
4617 "ldr x8, [%1] \n\t" /* target->x8 */ \
4618 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4619 VALGRIND_RESTORE_STACK \
4620 "mov %0, x0" \
4621 : /*out*/ "=r" (_res) \
4622 : /*in*/ "0" (&_argvec[0]) \
4623 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4624 ); \
4625 lval = (__typeof__(lval)) _res; \
4626 } while (0)
4628 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4629 arg7,arg8,arg9,arg10,arg11) \
4630 do { \
4631 volatile OrigFn _orig = (orig); \
4632 volatile unsigned long _argvec[12]; \
4633 volatile unsigned long _res; \
4634 _argvec[0] = (unsigned long)_orig.nraddr; \
4635 _argvec[1] = (unsigned long)(arg1); \
4636 _argvec[2] = (unsigned long)(arg2); \
4637 _argvec[3] = (unsigned long)(arg3); \
4638 _argvec[4] = (unsigned long)(arg4); \
4639 _argvec[5] = (unsigned long)(arg5); \
4640 _argvec[6] = (unsigned long)(arg6); \
4641 _argvec[7] = (unsigned long)(arg7); \
4642 _argvec[8] = (unsigned long)(arg8); \
4643 _argvec[9] = (unsigned long)(arg9); \
4644 _argvec[10] = (unsigned long)(arg10); \
4645 _argvec[11] = (unsigned long)(arg11); \
4646 __asm__ volatile( \
4647 VALGRIND_ALIGN_STACK \
4648 "sub sp, sp, #0x30 \n\t" \
4649 "ldr x0, [%1, #8] \n\t" \
4650 "ldr x1, [%1, #16] \n\t" \
4651 "ldr x2, [%1, #24] \n\t" \
4652 "ldr x3, [%1, #32] \n\t" \
4653 "ldr x4, [%1, #40] \n\t" \
4654 "ldr x5, [%1, #48] \n\t" \
4655 "ldr x6, [%1, #56] \n\t" \
4656 "ldr x7, [%1, #64] \n\t" \
4657 "ldr x8, [%1, #72] \n\t" \
4658 "str x8, [sp, #0] \n\t" \
4659 "ldr x8, [%1, #80] \n\t" \
4660 "str x8, [sp, #8] \n\t" \
4661 "ldr x8, [%1, #88] \n\t" \
4662 "str x8, [sp, #16] \n\t" \
4663 "ldr x8, [%1] \n\t" /* target->x8 */ \
4664 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4665 VALGRIND_RESTORE_STACK \
4666 "mov %0, x0" \
4667 : /*out*/ "=r" (_res) \
4668 : /*in*/ "0" (&_argvec[0]) \
4669 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4670 ); \
4671 lval = (__typeof__(lval)) _res; \
4672 } while (0)
4674 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4675 arg7,arg8,arg9,arg10,arg11, \
4676 arg12) \
4677 do { \
4678 volatile OrigFn _orig = (orig); \
4679 volatile unsigned long _argvec[13]; \
4680 volatile unsigned long _res; \
4681 _argvec[0] = (unsigned long)_orig.nraddr; \
4682 _argvec[1] = (unsigned long)(arg1); \
4683 _argvec[2] = (unsigned long)(arg2); \
4684 _argvec[3] = (unsigned long)(arg3); \
4685 _argvec[4] = (unsigned long)(arg4); \
4686 _argvec[5] = (unsigned long)(arg5); \
4687 _argvec[6] = (unsigned long)(arg6); \
4688 _argvec[7] = (unsigned long)(arg7); \
4689 _argvec[8] = (unsigned long)(arg8); \
4690 _argvec[9] = (unsigned long)(arg9); \
4691 _argvec[10] = (unsigned long)(arg10); \
4692 _argvec[11] = (unsigned long)(arg11); \
4693 _argvec[12] = (unsigned long)(arg12); \
4694 __asm__ volatile( \
4695 VALGRIND_ALIGN_STACK \
4696 "sub sp, sp, #0x30 \n\t" \
4697 "ldr x0, [%1, #8] \n\t" \
4698 "ldr x1, [%1, #16] \n\t" \
4699 "ldr x2, [%1, #24] \n\t" \
4700 "ldr x3, [%1, #32] \n\t" \
4701 "ldr x4, [%1, #40] \n\t" \
4702 "ldr x5, [%1, #48] \n\t" \
4703 "ldr x6, [%1, #56] \n\t" \
4704 "ldr x7, [%1, #64] \n\t" \
4705 "ldr x8, [%1, #72] \n\t" \
4706 "str x8, [sp, #0] \n\t" \
4707 "ldr x8, [%1, #80] \n\t" \
4708 "str x8, [sp, #8] \n\t" \
4709 "ldr x8, [%1, #88] \n\t" \
4710 "str x8, [sp, #16] \n\t" \
4711 "ldr x8, [%1, #96] \n\t" \
4712 "str x8, [sp, #24] \n\t" \
4713 "ldr x8, [%1] \n\t" /* target->x8 */ \
4714 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4715 VALGRIND_RESTORE_STACK \
4716 "mov %0, x0" \
4717 : /*out*/ "=r" (_res) \
4718 : /*in*/ "0" (&_argvec[0]) \
4719 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4720 ); \
4721 lval = (__typeof__(lval)) _res; \
4722 } while (0)
4724 #endif /* PLAT_arm64_linux */
4726 /* ------------------------- s390x-linux ------------------------- */
4728 #if defined(PLAT_s390x_linux)
4730 /* Similar workaround as amd64 (see above), but we use r11 as frame
4731 pointer and save the old r11 in r7. r11 might be used for
4732 argvec, therefore we copy argvec in r1 since r1 is clobbered
4733 after the call anyway. */
4734 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
4735 # define __FRAME_POINTER \
4736 ,"d"(__builtin_dwarf_cfa())
4737 # define VALGRIND_CFI_PROLOGUE \
4738 ".cfi_remember_state\n\t" \
4739 "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
4740 "lgr 7,11\n\t" \
4741 "lgr 11,%2\n\t" \
4742 ".cfi_def_cfa r11, 0\n\t"
4743 # define VALGRIND_CFI_EPILOGUE \
4744 "lgr 11, 7\n\t" \
4745 ".cfi_restore_state\n\t"
4746 #else
4747 # define __FRAME_POINTER
4748 # define VALGRIND_CFI_PROLOGUE \
4749 "lgr 1,%1\n\t"
4750 # define VALGRIND_CFI_EPILOGUE
4751 #endif
4753 /* Nb: On s390 the stack pointer is properly aligned *at all times*
4754 according to the s390 GCC maintainer. (The ABI specification is not
4755 precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
4756 VALGRIND_RESTORE_STACK are not defined here. */
4758 /* These regs are trashed by the hidden call. Note that we overwrite
4759 r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
4760 function a proper return address. All others are ABI defined call
4761 clobbers. */
4762 #if defined(__VX__) || defined(__S390_VX__)
4763 #define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", \
4764 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", \
4765 "v8", "v9", "v10", "v11", "v12", "v13", "v14", "v15", \
4766 "v16", "v17", "v18", "v19", "v20", "v21", "v22", "v23", \
4767 "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31"
4768 #else
4769 #define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", \
4770 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7"
4771 #endif
4773 /* Nb: Although r11 is modified in the asm snippets below (inside
4774 VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
4775 two reasons:
4776 (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
4777 modified
4778 (2) GCC will complain that r11 cannot appear inside a clobber section,
4779 when compiled with -O -fno-omit-frame-pointer
4782 #define CALL_FN_W_v(lval, orig) \
4783 do { \
4784 volatile OrigFn _orig = (orig); \
4785 volatile unsigned long _argvec[1]; \
4786 volatile unsigned long _res; \
4787 _argvec[0] = (unsigned long)_orig.nraddr; \
4788 __asm__ volatile( \
4789 VALGRIND_CFI_PROLOGUE \
4790 "aghi 15,-160\n\t" \
4791 "lg 1, 0(1)\n\t" /* target->r1 */ \
4792 VALGRIND_CALL_NOREDIR_R1 \
4793 "aghi 15,160\n\t" \
4794 VALGRIND_CFI_EPILOGUE \
4795 "lgr %0, 2\n\t" \
4796 : /*out*/ "=d" (_res) \
4797 : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
4798 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4799 ); \
4800 lval = (__typeof__(lval)) _res; \
4801 } while (0)
4803 /* The call abi has the arguments in r2-r6 and stack */
4804 #define CALL_FN_W_W(lval, orig, arg1) \
4805 do { \
4806 volatile OrigFn _orig = (orig); \
4807 volatile unsigned long _argvec[2]; \
4808 volatile unsigned long _res; \
4809 _argvec[0] = (unsigned long)_orig.nraddr; \
4810 _argvec[1] = (unsigned long)arg1; \
4811 __asm__ volatile( \
4812 VALGRIND_CFI_PROLOGUE \
4813 "aghi 15,-160\n\t" \
4814 "lg 2, 8(1)\n\t" \
4815 "lg 1, 0(1)\n\t" \
4816 VALGRIND_CALL_NOREDIR_R1 \
4817 "aghi 15,160\n\t" \
4818 VALGRIND_CFI_EPILOGUE \
4819 "lgr %0, 2\n\t" \
4820 : /*out*/ "=d" (_res) \
4821 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4822 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4823 ); \
4824 lval = (__typeof__(lval)) _res; \
4825 } while (0)
4827 #define CALL_FN_W_WW(lval, orig, arg1, arg2) \
4828 do { \
4829 volatile OrigFn _orig = (orig); \
4830 volatile unsigned long _argvec[3]; \
4831 volatile unsigned long _res; \
4832 _argvec[0] = (unsigned long)_orig.nraddr; \
4833 _argvec[1] = (unsigned long)arg1; \
4834 _argvec[2] = (unsigned long)arg2; \
4835 __asm__ volatile( \
4836 VALGRIND_CFI_PROLOGUE \
4837 "aghi 15,-160\n\t" \
4838 "lg 2, 8(1)\n\t" \
4839 "lg 3,16(1)\n\t" \
4840 "lg 1, 0(1)\n\t" \
4841 VALGRIND_CALL_NOREDIR_R1 \
4842 "aghi 15,160\n\t" \
4843 VALGRIND_CFI_EPILOGUE \
4844 "lgr %0, 2\n\t" \
4845 : /*out*/ "=d" (_res) \
4846 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4847 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4848 ); \
4849 lval = (__typeof__(lval)) _res; \
4850 } while (0)
4852 #define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
4853 do { \
4854 volatile OrigFn _orig = (orig); \
4855 volatile unsigned long _argvec[4]; \
4856 volatile unsigned long _res; \
4857 _argvec[0] = (unsigned long)_orig.nraddr; \
4858 _argvec[1] = (unsigned long)arg1; \
4859 _argvec[2] = (unsigned long)arg2; \
4860 _argvec[3] = (unsigned long)arg3; \
4861 __asm__ volatile( \
4862 VALGRIND_CFI_PROLOGUE \
4863 "aghi 15,-160\n\t" \
4864 "lg 2, 8(1)\n\t" \
4865 "lg 3,16(1)\n\t" \
4866 "lg 4,24(1)\n\t" \
4867 "lg 1, 0(1)\n\t" \
4868 VALGRIND_CALL_NOREDIR_R1 \
4869 "aghi 15,160\n\t" \
4870 VALGRIND_CFI_EPILOGUE \
4871 "lgr %0, 2\n\t" \
4872 : /*out*/ "=d" (_res) \
4873 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4874 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4875 ); \
4876 lval = (__typeof__(lval)) _res; \
4877 } while (0)
4879 #define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
4880 do { \
4881 volatile OrigFn _orig = (orig); \
4882 volatile unsigned long _argvec[5]; \
4883 volatile unsigned long _res; \
4884 _argvec[0] = (unsigned long)_orig.nraddr; \
4885 _argvec[1] = (unsigned long)arg1; \
4886 _argvec[2] = (unsigned long)arg2; \
4887 _argvec[3] = (unsigned long)arg3; \
4888 _argvec[4] = (unsigned long)arg4; \
4889 __asm__ volatile( \
4890 VALGRIND_CFI_PROLOGUE \
4891 "aghi 15,-160\n\t" \
4892 "lg 2, 8(1)\n\t" \
4893 "lg 3,16(1)\n\t" \
4894 "lg 4,24(1)\n\t" \
4895 "lg 5,32(1)\n\t" \
4896 "lg 1, 0(1)\n\t" \
4897 VALGRIND_CALL_NOREDIR_R1 \
4898 "aghi 15,160\n\t" \
4899 VALGRIND_CFI_EPILOGUE \
4900 "lgr %0, 2\n\t" \
4901 : /*out*/ "=d" (_res) \
4902 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4903 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4904 ); \
4905 lval = (__typeof__(lval)) _res; \
4906 } while (0)
4908 #define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
4909 do { \
4910 volatile OrigFn _orig = (orig); \
4911 volatile unsigned long _argvec[6]; \
4912 volatile unsigned long _res; \
4913 _argvec[0] = (unsigned long)_orig.nraddr; \
4914 _argvec[1] = (unsigned long)arg1; \
4915 _argvec[2] = (unsigned long)arg2; \
4916 _argvec[3] = (unsigned long)arg3; \
4917 _argvec[4] = (unsigned long)arg4; \
4918 _argvec[5] = (unsigned long)arg5; \
4919 __asm__ volatile( \
4920 VALGRIND_CFI_PROLOGUE \
4921 "aghi 15,-160\n\t" \
4922 "lg 2, 8(1)\n\t" \
4923 "lg 3,16(1)\n\t" \
4924 "lg 4,24(1)\n\t" \
4925 "lg 5,32(1)\n\t" \
4926 "lg 6,40(1)\n\t" \
4927 "lg 1, 0(1)\n\t" \
4928 VALGRIND_CALL_NOREDIR_R1 \
4929 "aghi 15,160\n\t" \
4930 VALGRIND_CFI_EPILOGUE \
4931 "lgr %0, 2\n\t" \
4932 : /*out*/ "=d" (_res) \
4933 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4934 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4935 ); \
4936 lval = (__typeof__(lval)) _res; \
4937 } while (0)
4939 #define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4940 arg6) \
4941 do { \
4942 volatile OrigFn _orig = (orig); \
4943 volatile unsigned long _argvec[7]; \
4944 volatile unsigned long _res; \
4945 _argvec[0] = (unsigned long)_orig.nraddr; \
4946 _argvec[1] = (unsigned long)arg1; \
4947 _argvec[2] = (unsigned long)arg2; \
4948 _argvec[3] = (unsigned long)arg3; \
4949 _argvec[4] = (unsigned long)arg4; \
4950 _argvec[5] = (unsigned long)arg5; \
4951 _argvec[6] = (unsigned long)arg6; \
4952 __asm__ volatile( \
4953 VALGRIND_CFI_PROLOGUE \
4954 "aghi 15,-168\n\t" \
4955 "lg 2, 8(1)\n\t" \
4956 "lg 3,16(1)\n\t" \
4957 "lg 4,24(1)\n\t" \
4958 "lg 5,32(1)\n\t" \
4959 "lg 6,40(1)\n\t" \
4960 "mvc 160(8,15), 48(1)\n\t" \
4961 "lg 1, 0(1)\n\t" \
4962 VALGRIND_CALL_NOREDIR_R1 \
4963 "aghi 15,168\n\t" \
4964 VALGRIND_CFI_EPILOGUE \
4965 "lgr %0, 2\n\t" \
4966 : /*out*/ "=d" (_res) \
4967 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4968 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4969 ); \
4970 lval = (__typeof__(lval)) _res; \
4971 } while (0)
4973 #define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4974 arg6, arg7) \
4975 do { \
4976 volatile OrigFn _orig = (orig); \
4977 volatile unsigned long _argvec[8]; \
4978 volatile unsigned long _res; \
4979 _argvec[0] = (unsigned long)_orig.nraddr; \
4980 _argvec[1] = (unsigned long)arg1; \
4981 _argvec[2] = (unsigned long)arg2; \
4982 _argvec[3] = (unsigned long)arg3; \
4983 _argvec[4] = (unsigned long)arg4; \
4984 _argvec[5] = (unsigned long)arg5; \
4985 _argvec[6] = (unsigned long)arg6; \
4986 _argvec[7] = (unsigned long)arg7; \
4987 __asm__ volatile( \
4988 VALGRIND_CFI_PROLOGUE \
4989 "aghi 15,-176\n\t" \
4990 "lg 2, 8(1)\n\t" \
4991 "lg 3,16(1)\n\t" \
4992 "lg 4,24(1)\n\t" \
4993 "lg 5,32(1)\n\t" \
4994 "lg 6,40(1)\n\t" \
4995 "mvc 160(8,15), 48(1)\n\t" \
4996 "mvc 168(8,15), 56(1)\n\t" \
4997 "lg 1, 0(1)\n\t" \
4998 VALGRIND_CALL_NOREDIR_R1 \
4999 "aghi 15,176\n\t" \
5000 VALGRIND_CFI_EPILOGUE \
5001 "lgr %0, 2\n\t" \
5002 : /*out*/ "=d" (_res) \
5003 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5004 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5005 ); \
5006 lval = (__typeof__(lval)) _res; \
5007 } while (0)
5009 #define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5010 arg6, arg7 ,arg8) \
5011 do { \
5012 volatile OrigFn _orig = (orig); \
5013 volatile unsigned long _argvec[9]; \
5014 volatile unsigned long _res; \
5015 _argvec[0] = (unsigned long)_orig.nraddr; \
5016 _argvec[1] = (unsigned long)arg1; \
5017 _argvec[2] = (unsigned long)arg2; \
5018 _argvec[3] = (unsigned long)arg3; \
5019 _argvec[4] = (unsigned long)arg4; \
5020 _argvec[5] = (unsigned long)arg5; \
5021 _argvec[6] = (unsigned long)arg6; \
5022 _argvec[7] = (unsigned long)arg7; \
5023 _argvec[8] = (unsigned long)arg8; \
5024 __asm__ volatile( \
5025 VALGRIND_CFI_PROLOGUE \
5026 "aghi 15,-184\n\t" \
5027 "lg 2, 8(1)\n\t" \
5028 "lg 3,16(1)\n\t" \
5029 "lg 4,24(1)\n\t" \
5030 "lg 5,32(1)\n\t" \
5031 "lg 6,40(1)\n\t" \
5032 "mvc 160(8,15), 48(1)\n\t" \
5033 "mvc 168(8,15), 56(1)\n\t" \
5034 "mvc 176(8,15), 64(1)\n\t" \
5035 "lg 1, 0(1)\n\t" \
5036 VALGRIND_CALL_NOREDIR_R1 \
5037 "aghi 15,184\n\t" \
5038 VALGRIND_CFI_EPILOGUE \
5039 "lgr %0, 2\n\t" \
5040 : /*out*/ "=d" (_res) \
5041 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5042 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5043 ); \
5044 lval = (__typeof__(lval)) _res; \
5045 } while (0)
5047 #define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5048 arg6, arg7 ,arg8, arg9) \
5049 do { \
5050 volatile OrigFn _orig = (orig); \
5051 volatile unsigned long _argvec[10]; \
5052 volatile unsigned long _res; \
5053 _argvec[0] = (unsigned long)_orig.nraddr; \
5054 _argvec[1] = (unsigned long)arg1; \
5055 _argvec[2] = (unsigned long)arg2; \
5056 _argvec[3] = (unsigned long)arg3; \
5057 _argvec[4] = (unsigned long)arg4; \
5058 _argvec[5] = (unsigned long)arg5; \
5059 _argvec[6] = (unsigned long)arg6; \
5060 _argvec[7] = (unsigned long)arg7; \
5061 _argvec[8] = (unsigned long)arg8; \
5062 _argvec[9] = (unsigned long)arg9; \
5063 __asm__ volatile( \
5064 VALGRIND_CFI_PROLOGUE \
5065 "aghi 15,-192\n\t" \
5066 "lg 2, 8(1)\n\t" \
5067 "lg 3,16(1)\n\t" \
5068 "lg 4,24(1)\n\t" \
5069 "lg 5,32(1)\n\t" \
5070 "lg 6,40(1)\n\t" \
5071 "mvc 160(8,15), 48(1)\n\t" \
5072 "mvc 168(8,15), 56(1)\n\t" \
5073 "mvc 176(8,15), 64(1)\n\t" \
5074 "mvc 184(8,15), 72(1)\n\t" \
5075 "lg 1, 0(1)\n\t" \
5076 VALGRIND_CALL_NOREDIR_R1 \
5077 "aghi 15,192\n\t" \
5078 VALGRIND_CFI_EPILOGUE \
5079 "lgr %0, 2\n\t" \
5080 : /*out*/ "=d" (_res) \
5081 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5082 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5083 ); \
5084 lval = (__typeof__(lval)) _res; \
5085 } while (0)
5087 #define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5088 arg6, arg7 ,arg8, arg9, arg10) \
5089 do { \
5090 volatile OrigFn _orig = (orig); \
5091 volatile unsigned long _argvec[11]; \
5092 volatile unsigned long _res; \
5093 _argvec[0] = (unsigned long)_orig.nraddr; \
5094 _argvec[1] = (unsigned long)arg1; \
5095 _argvec[2] = (unsigned long)arg2; \
5096 _argvec[3] = (unsigned long)arg3; \
5097 _argvec[4] = (unsigned long)arg4; \
5098 _argvec[5] = (unsigned long)arg5; \
5099 _argvec[6] = (unsigned long)arg6; \
5100 _argvec[7] = (unsigned long)arg7; \
5101 _argvec[8] = (unsigned long)arg8; \
5102 _argvec[9] = (unsigned long)arg9; \
5103 _argvec[10] = (unsigned long)arg10; \
5104 __asm__ volatile( \
5105 VALGRIND_CFI_PROLOGUE \
5106 "aghi 15,-200\n\t" \
5107 "lg 2, 8(1)\n\t" \
5108 "lg 3,16(1)\n\t" \
5109 "lg 4,24(1)\n\t" \
5110 "lg 5,32(1)\n\t" \
5111 "lg 6,40(1)\n\t" \
5112 "mvc 160(8,15), 48(1)\n\t" \
5113 "mvc 168(8,15), 56(1)\n\t" \
5114 "mvc 176(8,15), 64(1)\n\t" \
5115 "mvc 184(8,15), 72(1)\n\t" \
5116 "mvc 192(8,15), 80(1)\n\t" \
5117 "lg 1, 0(1)\n\t" \
5118 VALGRIND_CALL_NOREDIR_R1 \
5119 "aghi 15,200\n\t" \
5120 VALGRIND_CFI_EPILOGUE \
5121 "lgr %0, 2\n\t" \
5122 : /*out*/ "=d" (_res) \
5123 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5124 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5125 ); \
5126 lval = (__typeof__(lval)) _res; \
5127 } while (0)
5129 #define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5130 arg6, arg7 ,arg8, arg9, arg10, arg11) \
5131 do { \
5132 volatile OrigFn _orig = (orig); \
5133 volatile unsigned long _argvec[12]; \
5134 volatile unsigned long _res; \
5135 _argvec[0] = (unsigned long)_orig.nraddr; \
5136 _argvec[1] = (unsigned long)arg1; \
5137 _argvec[2] = (unsigned long)arg2; \
5138 _argvec[3] = (unsigned long)arg3; \
5139 _argvec[4] = (unsigned long)arg4; \
5140 _argvec[5] = (unsigned long)arg5; \
5141 _argvec[6] = (unsigned long)arg6; \
5142 _argvec[7] = (unsigned long)arg7; \
5143 _argvec[8] = (unsigned long)arg8; \
5144 _argvec[9] = (unsigned long)arg9; \
5145 _argvec[10] = (unsigned long)arg10; \
5146 _argvec[11] = (unsigned long)arg11; \
5147 __asm__ volatile( \
5148 VALGRIND_CFI_PROLOGUE \
5149 "aghi 15,-208\n\t" \
5150 "lg 2, 8(1)\n\t" \
5151 "lg 3,16(1)\n\t" \
5152 "lg 4,24(1)\n\t" \
5153 "lg 5,32(1)\n\t" \
5154 "lg 6,40(1)\n\t" \
5155 "mvc 160(8,15), 48(1)\n\t" \
5156 "mvc 168(8,15), 56(1)\n\t" \
5157 "mvc 176(8,15), 64(1)\n\t" \
5158 "mvc 184(8,15), 72(1)\n\t" \
5159 "mvc 192(8,15), 80(1)\n\t" \
5160 "mvc 200(8,15), 88(1)\n\t" \
5161 "lg 1, 0(1)\n\t" \
5162 VALGRIND_CALL_NOREDIR_R1 \
5163 "aghi 15,208\n\t" \
5164 VALGRIND_CFI_EPILOGUE \
5165 "lgr %0, 2\n\t" \
5166 : /*out*/ "=d" (_res) \
5167 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5168 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5169 ); \
5170 lval = (__typeof__(lval)) _res; \
5171 } while (0)
5173 #define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5174 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
5175 do { \
5176 volatile OrigFn _orig = (orig); \
5177 volatile unsigned long _argvec[13]; \
5178 volatile unsigned long _res; \
5179 _argvec[0] = (unsigned long)_orig.nraddr; \
5180 _argvec[1] = (unsigned long)arg1; \
5181 _argvec[2] = (unsigned long)arg2; \
5182 _argvec[3] = (unsigned long)arg3; \
5183 _argvec[4] = (unsigned long)arg4; \
5184 _argvec[5] = (unsigned long)arg5; \
5185 _argvec[6] = (unsigned long)arg6; \
5186 _argvec[7] = (unsigned long)arg7; \
5187 _argvec[8] = (unsigned long)arg8; \
5188 _argvec[9] = (unsigned long)arg9; \
5189 _argvec[10] = (unsigned long)arg10; \
5190 _argvec[11] = (unsigned long)arg11; \
5191 _argvec[12] = (unsigned long)arg12; \
5192 __asm__ volatile( \
5193 VALGRIND_CFI_PROLOGUE \
5194 "aghi 15,-216\n\t" \
5195 "lg 2, 8(1)\n\t" \
5196 "lg 3,16(1)\n\t" \
5197 "lg 4,24(1)\n\t" \
5198 "lg 5,32(1)\n\t" \
5199 "lg 6,40(1)\n\t" \
5200 "mvc 160(8,15), 48(1)\n\t" \
5201 "mvc 168(8,15), 56(1)\n\t" \
5202 "mvc 176(8,15), 64(1)\n\t" \
5203 "mvc 184(8,15), 72(1)\n\t" \
5204 "mvc 192(8,15), 80(1)\n\t" \
5205 "mvc 200(8,15), 88(1)\n\t" \
5206 "mvc 208(8,15), 96(1)\n\t" \
5207 "lg 1, 0(1)\n\t" \
5208 VALGRIND_CALL_NOREDIR_R1 \
5209 "aghi 15,216\n\t" \
5210 VALGRIND_CFI_EPILOGUE \
5211 "lgr %0, 2\n\t" \
5212 : /*out*/ "=d" (_res) \
5213 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5214 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5215 ); \
5216 lval = (__typeof__(lval)) _res; \
5217 } while (0)
5220 #endif /* PLAT_s390x_linux */
5222 /* ------------------------- mips32-linux ----------------------- */
5224 #if defined(PLAT_mips32_linux)
5226 /* These regs are trashed by the hidden call. */
5227 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5228 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5229 "$25", "$31"
5231 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5232 long) == 4. */
5234 #define CALL_FN_W_v(lval, orig) \
5235 do { \
5236 volatile OrigFn _orig = (orig); \
5237 volatile unsigned long _argvec[1]; \
5238 volatile unsigned long _res; \
5239 _argvec[0] = (unsigned long)_orig.nraddr; \
5240 __asm__ volatile( \
5241 "subu $29, $29, 8 \n\t" \
5242 "sw $28, 0($29) \n\t" \
5243 "sw $31, 4($29) \n\t" \
5244 "subu $29, $29, 16 \n\t" \
5245 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5246 VALGRIND_CALL_NOREDIR_T9 \
5247 "addu $29, $29, 16\n\t" \
5248 "lw $28, 0($29) \n\t" \
5249 "lw $31, 4($29) \n\t" \
5250 "addu $29, $29, 8 \n\t" \
5251 "move %0, $2\n" \
5252 : /*out*/ "=r" (_res) \
5253 : /*in*/ "0" (&_argvec[0]) \
5254 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5255 ); \
5256 lval = (__typeof__(lval)) _res; \
5257 } while (0)
5259 #define CALL_FN_W_W(lval, orig, arg1) \
5260 do { \
5261 volatile OrigFn _orig = (orig); \
5262 volatile unsigned long _argvec[2]; \
5263 volatile unsigned long _res; \
5264 _argvec[0] = (unsigned long)_orig.nraddr; \
5265 _argvec[1] = (unsigned long)(arg1); \
5266 __asm__ volatile( \
5267 "subu $29, $29, 8 \n\t" \
5268 "sw $28, 0($29) \n\t" \
5269 "sw $31, 4($29) \n\t" \
5270 "subu $29, $29, 16 \n\t" \
5271 "lw $4, 4(%1) \n\t" /* arg1*/ \
5272 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5273 VALGRIND_CALL_NOREDIR_T9 \
5274 "addu $29, $29, 16 \n\t" \
5275 "lw $28, 0($29) \n\t" \
5276 "lw $31, 4($29) \n\t" \
5277 "addu $29, $29, 8 \n\t" \
5278 "move %0, $2\n" \
5279 : /*out*/ "=r" (_res) \
5280 : /*in*/ "0" (&_argvec[0]) \
5281 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5282 ); \
5283 lval = (__typeof__(lval)) _res; \
5284 } while (0)
5286 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5287 do { \
5288 volatile OrigFn _orig = (orig); \
5289 volatile unsigned long _argvec[3]; \
5290 volatile unsigned long _res; \
5291 _argvec[0] = (unsigned long)_orig.nraddr; \
5292 _argvec[1] = (unsigned long)(arg1); \
5293 _argvec[2] = (unsigned long)(arg2); \
5294 __asm__ volatile( \
5295 "subu $29, $29, 8 \n\t" \
5296 "sw $28, 0($29) \n\t" \
5297 "sw $31, 4($29) \n\t" \
5298 "subu $29, $29, 16 \n\t" \
5299 "lw $4, 4(%1) \n\t" \
5300 "lw $5, 8(%1) \n\t" \
5301 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5302 VALGRIND_CALL_NOREDIR_T9 \
5303 "addu $29, $29, 16 \n\t" \
5304 "lw $28, 0($29) \n\t" \
5305 "lw $31, 4($29) \n\t" \
5306 "addu $29, $29, 8 \n\t" \
5307 "move %0, $2\n" \
5308 : /*out*/ "=r" (_res) \
5309 : /*in*/ "0" (&_argvec[0]) \
5310 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5311 ); \
5312 lval = (__typeof__(lval)) _res; \
5313 } while (0)
5315 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5316 do { \
5317 volatile OrigFn _orig = (orig); \
5318 volatile unsigned long _argvec[4]; \
5319 volatile unsigned long _res; \
5320 _argvec[0] = (unsigned long)_orig.nraddr; \
5321 _argvec[1] = (unsigned long)(arg1); \
5322 _argvec[2] = (unsigned long)(arg2); \
5323 _argvec[3] = (unsigned long)(arg3); \
5324 __asm__ volatile( \
5325 "subu $29, $29, 8 \n\t" \
5326 "sw $28, 0($29) \n\t" \
5327 "sw $31, 4($29) \n\t" \
5328 "subu $29, $29, 16 \n\t" \
5329 "lw $4, 4(%1) \n\t" \
5330 "lw $5, 8(%1) \n\t" \
5331 "lw $6, 12(%1) \n\t" \
5332 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5333 VALGRIND_CALL_NOREDIR_T9 \
5334 "addu $29, $29, 16 \n\t" \
5335 "lw $28, 0($29) \n\t" \
5336 "lw $31, 4($29) \n\t" \
5337 "addu $29, $29, 8 \n\t" \
5338 "move %0, $2\n" \
5339 : /*out*/ "=r" (_res) \
5340 : /*in*/ "0" (&_argvec[0]) \
5341 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5342 ); \
5343 lval = (__typeof__(lval)) _res; \
5344 } while (0)
5346 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5347 do { \
5348 volatile OrigFn _orig = (orig); \
5349 volatile unsigned long _argvec[5]; \
5350 volatile unsigned long _res; \
5351 _argvec[0] = (unsigned long)_orig.nraddr; \
5352 _argvec[1] = (unsigned long)(arg1); \
5353 _argvec[2] = (unsigned long)(arg2); \
5354 _argvec[3] = (unsigned long)(arg3); \
5355 _argvec[4] = (unsigned long)(arg4); \
5356 __asm__ volatile( \
5357 "subu $29, $29, 8 \n\t" \
5358 "sw $28, 0($29) \n\t" \
5359 "sw $31, 4($29) \n\t" \
5360 "subu $29, $29, 16 \n\t" \
5361 "lw $4, 4(%1) \n\t" \
5362 "lw $5, 8(%1) \n\t" \
5363 "lw $6, 12(%1) \n\t" \
5364 "lw $7, 16(%1) \n\t" \
5365 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5366 VALGRIND_CALL_NOREDIR_T9 \
5367 "addu $29, $29, 16 \n\t" \
5368 "lw $28, 0($29) \n\t" \
5369 "lw $31, 4($29) \n\t" \
5370 "addu $29, $29, 8 \n\t" \
5371 "move %0, $2\n" \
5372 : /*out*/ "=r" (_res) \
5373 : /*in*/ "0" (&_argvec[0]) \
5374 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5375 ); \
5376 lval = (__typeof__(lval)) _res; \
5377 } while (0)
5379 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5380 do { \
5381 volatile OrigFn _orig = (orig); \
5382 volatile unsigned long _argvec[6]; \
5383 volatile unsigned long _res; \
5384 _argvec[0] = (unsigned long)_orig.nraddr; \
5385 _argvec[1] = (unsigned long)(arg1); \
5386 _argvec[2] = (unsigned long)(arg2); \
5387 _argvec[3] = (unsigned long)(arg3); \
5388 _argvec[4] = (unsigned long)(arg4); \
5389 _argvec[5] = (unsigned long)(arg5); \
5390 __asm__ volatile( \
5391 "subu $29, $29, 8 \n\t" \
5392 "sw $28, 0($29) \n\t" \
5393 "sw $31, 4($29) \n\t" \
5394 "lw $4, 20(%1) \n\t" \
5395 "subu $29, $29, 24\n\t" \
5396 "sw $4, 16($29) \n\t" \
5397 "lw $4, 4(%1) \n\t" \
5398 "lw $5, 8(%1) \n\t" \
5399 "lw $6, 12(%1) \n\t" \
5400 "lw $7, 16(%1) \n\t" \
5401 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5402 VALGRIND_CALL_NOREDIR_T9 \
5403 "addu $29, $29, 24 \n\t" \
5404 "lw $28, 0($29) \n\t" \
5405 "lw $31, 4($29) \n\t" \
5406 "addu $29, $29, 8 \n\t" \
5407 "move %0, $2\n" \
5408 : /*out*/ "=r" (_res) \
5409 : /*in*/ "0" (&_argvec[0]) \
5410 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5411 ); \
5412 lval = (__typeof__(lval)) _res; \
5413 } while (0)
5414 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5415 do { \
5416 volatile OrigFn _orig = (orig); \
5417 volatile unsigned long _argvec[7]; \
5418 volatile unsigned long _res; \
5419 _argvec[0] = (unsigned long)_orig.nraddr; \
5420 _argvec[1] = (unsigned long)(arg1); \
5421 _argvec[2] = (unsigned long)(arg2); \
5422 _argvec[3] = (unsigned long)(arg3); \
5423 _argvec[4] = (unsigned long)(arg4); \
5424 _argvec[5] = (unsigned long)(arg5); \
5425 _argvec[6] = (unsigned long)(arg6); \
5426 __asm__ volatile( \
5427 "subu $29, $29, 8 \n\t" \
5428 "sw $28, 0($29) \n\t" \
5429 "sw $31, 4($29) \n\t" \
5430 "lw $4, 20(%1) \n\t" \
5431 "subu $29, $29, 32\n\t" \
5432 "sw $4, 16($29) \n\t" \
5433 "lw $4, 24(%1) \n\t" \
5434 "nop\n\t" \
5435 "sw $4, 20($29) \n\t" \
5436 "lw $4, 4(%1) \n\t" \
5437 "lw $5, 8(%1) \n\t" \
5438 "lw $6, 12(%1) \n\t" \
5439 "lw $7, 16(%1) \n\t" \
5440 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5441 VALGRIND_CALL_NOREDIR_T9 \
5442 "addu $29, $29, 32 \n\t" \
5443 "lw $28, 0($29) \n\t" \
5444 "lw $31, 4($29) \n\t" \
5445 "addu $29, $29, 8 \n\t" \
5446 "move %0, $2\n" \
5447 : /*out*/ "=r" (_res) \
5448 : /*in*/ "0" (&_argvec[0]) \
5449 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5450 ); \
5451 lval = (__typeof__(lval)) _res; \
5452 } while (0)
5454 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5455 arg7) \
5456 do { \
5457 volatile OrigFn _orig = (orig); \
5458 volatile unsigned long _argvec[8]; \
5459 volatile unsigned long _res; \
5460 _argvec[0] = (unsigned long)_orig.nraddr; \
5461 _argvec[1] = (unsigned long)(arg1); \
5462 _argvec[2] = (unsigned long)(arg2); \
5463 _argvec[3] = (unsigned long)(arg3); \
5464 _argvec[4] = (unsigned long)(arg4); \
5465 _argvec[5] = (unsigned long)(arg5); \
5466 _argvec[6] = (unsigned long)(arg6); \
5467 _argvec[7] = (unsigned long)(arg7); \
5468 __asm__ volatile( \
5469 "subu $29, $29, 8 \n\t" \
5470 "sw $28, 0($29) \n\t" \
5471 "sw $31, 4($29) \n\t" \
5472 "lw $4, 20(%1) \n\t" \
5473 "subu $29, $29, 32\n\t" \
5474 "sw $4, 16($29) \n\t" \
5475 "lw $4, 24(%1) \n\t" \
5476 "sw $4, 20($29) \n\t" \
5477 "lw $4, 28(%1) \n\t" \
5478 "sw $4, 24($29) \n\t" \
5479 "lw $4, 4(%1) \n\t" \
5480 "lw $5, 8(%1) \n\t" \
5481 "lw $6, 12(%1) \n\t" \
5482 "lw $7, 16(%1) \n\t" \
5483 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5484 VALGRIND_CALL_NOREDIR_T9 \
5485 "addu $29, $29, 32 \n\t" \
5486 "lw $28, 0($29) \n\t" \
5487 "lw $31, 4($29) \n\t" \
5488 "addu $29, $29, 8 \n\t" \
5489 "move %0, $2\n" \
5490 : /*out*/ "=r" (_res) \
5491 : /*in*/ "0" (&_argvec[0]) \
5492 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5493 ); \
5494 lval = (__typeof__(lval)) _res; \
5495 } while (0)
5497 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5498 arg7,arg8) \
5499 do { \
5500 volatile OrigFn _orig = (orig); \
5501 volatile unsigned long _argvec[9]; \
5502 volatile unsigned long _res; \
5503 _argvec[0] = (unsigned long)_orig.nraddr; \
5504 _argvec[1] = (unsigned long)(arg1); \
5505 _argvec[2] = (unsigned long)(arg2); \
5506 _argvec[3] = (unsigned long)(arg3); \
5507 _argvec[4] = (unsigned long)(arg4); \
5508 _argvec[5] = (unsigned long)(arg5); \
5509 _argvec[6] = (unsigned long)(arg6); \
5510 _argvec[7] = (unsigned long)(arg7); \
5511 _argvec[8] = (unsigned long)(arg8); \
5512 __asm__ volatile( \
5513 "subu $29, $29, 8 \n\t" \
5514 "sw $28, 0($29) \n\t" \
5515 "sw $31, 4($29) \n\t" \
5516 "lw $4, 20(%1) \n\t" \
5517 "subu $29, $29, 40\n\t" \
5518 "sw $4, 16($29) \n\t" \
5519 "lw $4, 24(%1) \n\t" \
5520 "sw $4, 20($29) \n\t" \
5521 "lw $4, 28(%1) \n\t" \
5522 "sw $4, 24($29) \n\t" \
5523 "lw $4, 32(%1) \n\t" \
5524 "sw $4, 28($29) \n\t" \
5525 "lw $4, 4(%1) \n\t" \
5526 "lw $5, 8(%1) \n\t" \
5527 "lw $6, 12(%1) \n\t" \
5528 "lw $7, 16(%1) \n\t" \
5529 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5530 VALGRIND_CALL_NOREDIR_T9 \
5531 "addu $29, $29, 40 \n\t" \
5532 "lw $28, 0($29) \n\t" \
5533 "lw $31, 4($29) \n\t" \
5534 "addu $29, $29, 8 \n\t" \
5535 "move %0, $2\n" \
5536 : /*out*/ "=r" (_res) \
5537 : /*in*/ "0" (&_argvec[0]) \
5538 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5539 ); \
5540 lval = (__typeof__(lval)) _res; \
5541 } while (0)
5543 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5544 arg7,arg8,arg9) \
5545 do { \
5546 volatile OrigFn _orig = (orig); \
5547 volatile unsigned long _argvec[10]; \
5548 volatile unsigned long _res; \
5549 _argvec[0] = (unsigned long)_orig.nraddr; \
5550 _argvec[1] = (unsigned long)(arg1); \
5551 _argvec[2] = (unsigned long)(arg2); \
5552 _argvec[3] = (unsigned long)(arg3); \
5553 _argvec[4] = (unsigned long)(arg4); \
5554 _argvec[5] = (unsigned long)(arg5); \
5555 _argvec[6] = (unsigned long)(arg6); \
5556 _argvec[7] = (unsigned long)(arg7); \
5557 _argvec[8] = (unsigned long)(arg8); \
5558 _argvec[9] = (unsigned long)(arg9); \
5559 __asm__ volatile( \
5560 "subu $29, $29, 8 \n\t" \
5561 "sw $28, 0($29) \n\t" \
5562 "sw $31, 4($29) \n\t" \
5563 "lw $4, 20(%1) \n\t" \
5564 "subu $29, $29, 40\n\t" \
5565 "sw $4, 16($29) \n\t" \
5566 "lw $4, 24(%1) \n\t" \
5567 "sw $4, 20($29) \n\t" \
5568 "lw $4, 28(%1) \n\t" \
5569 "sw $4, 24($29) \n\t" \
5570 "lw $4, 32(%1) \n\t" \
5571 "sw $4, 28($29) \n\t" \
5572 "lw $4, 36(%1) \n\t" \
5573 "sw $4, 32($29) \n\t" \
5574 "lw $4, 4(%1) \n\t" \
5575 "lw $5, 8(%1) \n\t" \
5576 "lw $6, 12(%1) \n\t" \
5577 "lw $7, 16(%1) \n\t" \
5578 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5579 VALGRIND_CALL_NOREDIR_T9 \
5580 "addu $29, $29, 40 \n\t" \
5581 "lw $28, 0($29) \n\t" \
5582 "lw $31, 4($29) \n\t" \
5583 "addu $29, $29, 8 \n\t" \
5584 "move %0, $2\n" \
5585 : /*out*/ "=r" (_res) \
5586 : /*in*/ "0" (&_argvec[0]) \
5587 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5588 ); \
5589 lval = (__typeof__(lval)) _res; \
5590 } while (0)
5592 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5593 arg7,arg8,arg9,arg10) \
5594 do { \
5595 volatile OrigFn _orig = (orig); \
5596 volatile unsigned long _argvec[11]; \
5597 volatile unsigned long _res; \
5598 _argvec[0] = (unsigned long)_orig.nraddr; \
5599 _argvec[1] = (unsigned long)(arg1); \
5600 _argvec[2] = (unsigned long)(arg2); \
5601 _argvec[3] = (unsigned long)(arg3); \
5602 _argvec[4] = (unsigned long)(arg4); \
5603 _argvec[5] = (unsigned long)(arg5); \
5604 _argvec[6] = (unsigned long)(arg6); \
5605 _argvec[7] = (unsigned long)(arg7); \
5606 _argvec[8] = (unsigned long)(arg8); \
5607 _argvec[9] = (unsigned long)(arg9); \
5608 _argvec[10] = (unsigned long)(arg10); \
5609 __asm__ volatile( \
5610 "subu $29, $29, 8 \n\t" \
5611 "sw $28, 0($29) \n\t" \
5612 "sw $31, 4($29) \n\t" \
5613 "lw $4, 20(%1) \n\t" \
5614 "subu $29, $29, 48\n\t" \
5615 "sw $4, 16($29) \n\t" \
5616 "lw $4, 24(%1) \n\t" \
5617 "sw $4, 20($29) \n\t" \
5618 "lw $4, 28(%1) \n\t" \
5619 "sw $4, 24($29) \n\t" \
5620 "lw $4, 32(%1) \n\t" \
5621 "sw $4, 28($29) \n\t" \
5622 "lw $4, 36(%1) \n\t" \
5623 "sw $4, 32($29) \n\t" \
5624 "lw $4, 40(%1) \n\t" \
5625 "sw $4, 36($29) \n\t" \
5626 "lw $4, 4(%1) \n\t" \
5627 "lw $5, 8(%1) \n\t" \
5628 "lw $6, 12(%1) \n\t" \
5629 "lw $7, 16(%1) \n\t" \
5630 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5631 VALGRIND_CALL_NOREDIR_T9 \
5632 "addu $29, $29, 48 \n\t" \
5633 "lw $28, 0($29) \n\t" \
5634 "lw $31, 4($29) \n\t" \
5635 "addu $29, $29, 8 \n\t" \
5636 "move %0, $2\n" \
5637 : /*out*/ "=r" (_res) \
5638 : /*in*/ "0" (&_argvec[0]) \
5639 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5640 ); \
5641 lval = (__typeof__(lval)) _res; \
5642 } while (0)
5644 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5645 arg6,arg7,arg8,arg9,arg10, \
5646 arg11) \
5647 do { \
5648 volatile OrigFn _orig = (orig); \
5649 volatile unsigned long _argvec[12]; \
5650 volatile unsigned long _res; \
5651 _argvec[0] = (unsigned long)_orig.nraddr; \
5652 _argvec[1] = (unsigned long)(arg1); \
5653 _argvec[2] = (unsigned long)(arg2); \
5654 _argvec[3] = (unsigned long)(arg3); \
5655 _argvec[4] = (unsigned long)(arg4); \
5656 _argvec[5] = (unsigned long)(arg5); \
5657 _argvec[6] = (unsigned long)(arg6); \
5658 _argvec[7] = (unsigned long)(arg7); \
5659 _argvec[8] = (unsigned long)(arg8); \
5660 _argvec[9] = (unsigned long)(arg9); \
5661 _argvec[10] = (unsigned long)(arg10); \
5662 _argvec[11] = (unsigned long)(arg11); \
5663 __asm__ volatile( \
5664 "subu $29, $29, 8 \n\t" \
5665 "sw $28, 0($29) \n\t" \
5666 "sw $31, 4($29) \n\t" \
5667 "lw $4, 20(%1) \n\t" \
5668 "subu $29, $29, 48\n\t" \
5669 "sw $4, 16($29) \n\t" \
5670 "lw $4, 24(%1) \n\t" \
5671 "sw $4, 20($29) \n\t" \
5672 "lw $4, 28(%1) \n\t" \
5673 "sw $4, 24($29) \n\t" \
5674 "lw $4, 32(%1) \n\t" \
5675 "sw $4, 28($29) \n\t" \
5676 "lw $4, 36(%1) \n\t" \
5677 "sw $4, 32($29) \n\t" \
5678 "lw $4, 40(%1) \n\t" \
5679 "sw $4, 36($29) \n\t" \
5680 "lw $4, 44(%1) \n\t" \
5681 "sw $4, 40($29) \n\t" \
5682 "lw $4, 4(%1) \n\t" \
5683 "lw $5, 8(%1) \n\t" \
5684 "lw $6, 12(%1) \n\t" \
5685 "lw $7, 16(%1) \n\t" \
5686 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5687 VALGRIND_CALL_NOREDIR_T9 \
5688 "addu $29, $29, 48 \n\t" \
5689 "lw $28, 0($29) \n\t" \
5690 "lw $31, 4($29) \n\t" \
5691 "addu $29, $29, 8 \n\t" \
5692 "move %0, $2\n" \
5693 : /*out*/ "=r" (_res) \
5694 : /*in*/ "0" (&_argvec[0]) \
5695 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5696 ); \
5697 lval = (__typeof__(lval)) _res; \
5698 } while (0)
5700 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5701 arg6,arg7,arg8,arg9,arg10, \
5702 arg11,arg12) \
5703 do { \
5704 volatile OrigFn _orig = (orig); \
5705 volatile unsigned long _argvec[13]; \
5706 volatile unsigned long _res; \
5707 _argvec[0] = (unsigned long)_orig.nraddr; \
5708 _argvec[1] = (unsigned long)(arg1); \
5709 _argvec[2] = (unsigned long)(arg2); \
5710 _argvec[3] = (unsigned long)(arg3); \
5711 _argvec[4] = (unsigned long)(arg4); \
5712 _argvec[5] = (unsigned long)(arg5); \
5713 _argvec[6] = (unsigned long)(arg6); \
5714 _argvec[7] = (unsigned long)(arg7); \
5715 _argvec[8] = (unsigned long)(arg8); \
5716 _argvec[9] = (unsigned long)(arg9); \
5717 _argvec[10] = (unsigned long)(arg10); \
5718 _argvec[11] = (unsigned long)(arg11); \
5719 _argvec[12] = (unsigned long)(arg12); \
5720 __asm__ volatile( \
5721 "subu $29, $29, 8 \n\t" \
5722 "sw $28, 0($29) \n\t" \
5723 "sw $31, 4($29) \n\t" \
5724 "lw $4, 20(%1) \n\t" \
5725 "subu $29, $29, 56\n\t" \
5726 "sw $4, 16($29) \n\t" \
5727 "lw $4, 24(%1) \n\t" \
5728 "sw $4, 20($29) \n\t" \
5729 "lw $4, 28(%1) \n\t" \
5730 "sw $4, 24($29) \n\t" \
5731 "lw $4, 32(%1) \n\t" \
5732 "sw $4, 28($29) \n\t" \
5733 "lw $4, 36(%1) \n\t" \
5734 "sw $4, 32($29) \n\t" \
5735 "lw $4, 40(%1) \n\t" \
5736 "sw $4, 36($29) \n\t" \
5737 "lw $4, 44(%1) \n\t" \
5738 "sw $4, 40($29) \n\t" \
5739 "lw $4, 48(%1) \n\t" \
5740 "sw $4, 44($29) \n\t" \
5741 "lw $4, 4(%1) \n\t" \
5742 "lw $5, 8(%1) \n\t" \
5743 "lw $6, 12(%1) \n\t" \
5744 "lw $7, 16(%1) \n\t" \
5745 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5746 VALGRIND_CALL_NOREDIR_T9 \
5747 "addu $29, $29, 56 \n\t" \
5748 "lw $28, 0($29) \n\t" \
5749 "lw $31, 4($29) \n\t" \
5750 "addu $29, $29, 8 \n\t" \
5751 "move %0, $2\n" \
5752 : /*out*/ "=r" (_res) \
5753 : /*in*/ "r" (&_argvec[0]) \
5754 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5755 ); \
5756 lval = (__typeof__(lval)) _res; \
5757 } while (0)
5759 #endif /* PLAT_mips32_linux */
5761 /* ------------------------- nanomips-linux -------------------- */
5763 #if defined(PLAT_nanomips_linux)
5765 /* These regs are trashed by the hidden call. */
5766 #define __CALLER_SAVED_REGS "$t4", "$t5", "$a0", "$a1", "$a2", \
5767 "$a3", "$a4", "$a5", "$a6", "$a7", "$t0", "$t1", "$t2", "$t3", \
5768 "$t8","$t9", "$at"
5770 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5771 long) == 4. */
5773 #define CALL_FN_W_v(lval, orig) \
5774 do { \
5775 volatile OrigFn _orig = (orig); \
5776 volatile unsigned long _argvec[1]; \
5777 volatile unsigned long _res; \
5778 _argvec[0] = (unsigned long)_orig.nraddr; \
5779 __asm__ volatile( \
5780 "lw $t9, 0(%1)\n\t" \
5781 VALGRIND_CALL_NOREDIR_T9 \
5782 "move %0, $a0\n" \
5783 : /*out*/ "=r" (_res) \
5784 : /*in*/ "r" (&_argvec[0]) \
5785 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5786 ); \
5787 lval = (__typeof__(lval)) _res; \
5788 } while (0)
5790 #define CALL_FN_W_W(lval, orig, arg1) \
5791 do { \
5792 volatile OrigFn _orig = (orig); \
5793 volatile unsigned long _argvec[2]; \
5794 volatile unsigned long _res; \
5795 _argvec[0] = (unsigned long)_orig.nraddr; \
5796 _argvec[1] = (unsigned long)(arg1); \
5797 __asm__ volatile( \
5798 "lw $t9, 0(%1)\n\t" \
5799 "lw $a0, 4(%1)\n\t" \
5800 VALGRIND_CALL_NOREDIR_T9 \
5801 "move %0, $a0\n" \
5802 : /*out*/ "=r" (_res) \
5803 : /*in*/ "r" (&_argvec[0]) \
5804 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5805 ); \
5806 lval = (__typeof__(lval)) _res; \
5807 } while (0)
5809 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5810 do { \
5811 volatile OrigFn _orig = (orig); \
5812 volatile unsigned long _argvec[3]; \
5813 volatile unsigned long _res; \
5814 _argvec[0] = (unsigned long)_orig.nraddr; \
5815 _argvec[1] = (unsigned long)(arg1); \
5816 _argvec[2] = (unsigned long)(arg2); \
5817 __asm__ volatile( \
5818 "lw $t9, 0(%1)\n\t" \
5819 "lw $a0, 4(%1)\n\t" \
5820 "lw $a1, 8(%1)\n\t" \
5821 VALGRIND_CALL_NOREDIR_T9 \
5822 "move %0, $a0\n" \
5823 : /*out*/ "=r" (_res) \
5824 : /*in*/ "r" (&_argvec[0]) \
5825 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5826 ); \
5827 lval = (__typeof__(lval)) _res; \
5828 } while (0)
5830 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5831 do { \
5832 volatile OrigFn _orig = (orig); \
5833 volatile unsigned long _argvec[4]; \
5834 volatile unsigned long _res; \
5835 _argvec[0] = (unsigned long)_orig.nraddr; \
5836 _argvec[1] = (unsigned long)(arg1); \
5837 _argvec[2] = (unsigned long)(arg2); \
5838 _argvec[3] = (unsigned long)(arg3); \
5839 __asm__ volatile( \
5840 "lw $t9, 0(%1)\n\t" \
5841 "lw $a0, 4(%1)\n\t" \
5842 "lw $a1, 8(%1)\n\t" \
5843 "lw $a2,12(%1)\n\t" \
5844 VALGRIND_CALL_NOREDIR_T9 \
5845 "move %0, $a0\n" \
5846 : /*out*/ "=r" (_res) \
5847 : /*in*/ "r" (&_argvec[0]) \
5848 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5849 ); \
5850 lval = (__typeof__(lval)) _res; \
5851 } while (0)
5853 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5854 do { \
5855 volatile OrigFn _orig = (orig); \
5856 volatile unsigned long _argvec[5]; \
5857 volatile unsigned long _res; \
5858 _argvec[0] = (unsigned long)_orig.nraddr; \
5859 _argvec[1] = (unsigned long)(arg1); \
5860 _argvec[2] = (unsigned long)(arg2); \
5861 _argvec[3] = (unsigned long)(arg3); \
5862 _argvec[4] = (unsigned long)(arg4); \
5863 __asm__ volatile( \
5864 "lw $t9, 0(%1)\n\t" \
5865 "lw $a0, 4(%1)\n\t" \
5866 "lw $a1, 8(%1)\n\t" \
5867 "lw $a2,12(%1)\n\t" \
5868 "lw $a3,16(%1)\n\t" \
5869 VALGRIND_CALL_NOREDIR_T9 \
5870 "move %0, $a0\n" \
5871 : /*out*/ "=r" (_res) \
5872 : /*in*/ "r" (&_argvec[0]) \
5873 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5874 ); \
5875 lval = (__typeof__(lval)) _res; \
5876 } while (0)
5878 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5879 do { \
5880 volatile OrigFn _orig = (orig); \
5881 volatile unsigned long _argvec[6]; \
5882 volatile unsigned long _res; \
5883 _argvec[0] = (unsigned long)_orig.nraddr; \
5884 _argvec[1] = (unsigned long)(arg1); \
5885 _argvec[2] = (unsigned long)(arg2); \
5886 _argvec[3] = (unsigned long)(arg3); \
5887 _argvec[4] = (unsigned long)(arg4); \
5888 _argvec[5] = (unsigned long)(arg5); \
5889 __asm__ volatile( \
5890 "lw $t9, 0(%1)\n\t" \
5891 "lw $a0, 4(%1)\n\t" \
5892 "lw $a1, 8(%1)\n\t" \
5893 "lw $a2,12(%1)\n\t" \
5894 "lw $a3,16(%1)\n\t" \
5895 "lw $a4,20(%1)\n\t" \
5896 VALGRIND_CALL_NOREDIR_T9 \
5897 "move %0, $a0\n" \
5898 : /*out*/ "=r" (_res) \
5899 : /*in*/ "r" (&_argvec[0]) \
5900 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5901 ); \
5902 lval = (__typeof__(lval)) _res; \
5903 } while (0)
5904 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5905 do { \
5906 volatile OrigFn _orig = (orig); \
5907 volatile unsigned long _argvec[7]; \
5908 volatile unsigned long _res; \
5909 _argvec[0] = (unsigned long)_orig.nraddr; \
5910 _argvec[1] = (unsigned long)(arg1); \
5911 _argvec[2] = (unsigned long)(arg2); \
5912 _argvec[3] = (unsigned long)(arg3); \
5913 _argvec[4] = (unsigned long)(arg4); \
5914 _argvec[5] = (unsigned long)(arg5); \
5915 _argvec[6] = (unsigned long)(arg6); \
5916 __asm__ volatile( \
5917 "lw $t9, 0(%1)\n\t" \
5918 "lw $a0, 4(%1)\n\t" \
5919 "lw $a1, 8(%1)\n\t" \
5920 "lw $a2,12(%1)\n\t" \
5921 "lw $a3,16(%1)\n\t" \
5922 "lw $a4,20(%1)\n\t" \
5923 "lw $a5,24(%1)\n\t" \
5924 VALGRIND_CALL_NOREDIR_T9 \
5925 "move %0, $a0\n" \
5926 : /*out*/ "=r" (_res) \
5927 : /*in*/ "r" (&_argvec[0]) \
5928 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5929 ); \
5930 lval = (__typeof__(lval)) _res; \
5931 } while (0)
5933 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5934 arg7) \
5935 do { \
5936 volatile OrigFn _orig = (orig); \
5937 volatile unsigned long _argvec[8]; \
5938 volatile unsigned long _res; \
5939 _argvec[0] = (unsigned long)_orig.nraddr; \
5940 _argvec[1] = (unsigned long)(arg1); \
5941 _argvec[2] = (unsigned long)(arg2); \
5942 _argvec[3] = (unsigned long)(arg3); \
5943 _argvec[4] = (unsigned long)(arg4); \
5944 _argvec[5] = (unsigned long)(arg5); \
5945 _argvec[6] = (unsigned long)(arg6); \
5946 _argvec[7] = (unsigned long)(arg7); \
5947 __asm__ volatile( \
5948 "lw $t9, 0(%1)\n\t" \
5949 "lw $a0, 4(%1)\n\t" \
5950 "lw $a1, 8(%1)\n\t" \
5951 "lw $a2,12(%1)\n\t" \
5952 "lw $a3,16(%1)\n\t" \
5953 "lw $a4,20(%1)\n\t" \
5954 "lw $a5,24(%1)\n\t" \
5955 "lw $a6,28(%1)\n\t" \
5956 VALGRIND_CALL_NOREDIR_T9 \
5957 "move %0, $a0\n" \
5958 : /*out*/ "=r" (_res) \
5959 : /*in*/ "r" (&_argvec[0]) \
5960 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5961 ); \
5962 lval = (__typeof__(lval)) _res; \
5963 } while (0)
5965 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5966 arg7,arg8) \
5967 do { \
5968 volatile OrigFn _orig = (orig); \
5969 volatile unsigned long _argvec[9]; \
5970 volatile unsigned long _res; \
5971 _argvec[0] = (unsigned long)_orig.nraddr; \
5972 _argvec[1] = (unsigned long)(arg1); \
5973 _argvec[2] = (unsigned long)(arg2); \
5974 _argvec[3] = (unsigned long)(arg3); \
5975 _argvec[4] = (unsigned long)(arg4); \
5976 _argvec[5] = (unsigned long)(arg5); \
5977 _argvec[6] = (unsigned long)(arg6); \
5978 _argvec[7] = (unsigned long)(arg7); \
5979 _argvec[8] = (unsigned long)(arg8); \
5980 __asm__ volatile( \
5981 "lw $t9, 0(%1)\n\t" \
5982 "lw $a0, 4(%1)\n\t" \
5983 "lw $a1, 8(%1)\n\t" \
5984 "lw $a2,12(%1)\n\t" \
5985 "lw $a3,16(%1)\n\t" \
5986 "lw $a4,20(%1)\n\t" \
5987 "lw $a5,24(%1)\n\t" \
5988 "lw $a6,28(%1)\n\t" \
5989 "lw $a7,32(%1)\n\t" \
5990 VALGRIND_CALL_NOREDIR_T9 \
5991 "move %0, $a0\n" \
5992 : /*out*/ "=r" (_res) \
5993 : /*in*/ "r" (&_argvec[0]) \
5994 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5995 ); \
5996 lval = (__typeof__(lval)) _res; \
5997 } while (0)
5999 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6000 arg7,arg8,arg9) \
6001 do { \
6002 volatile OrigFn _orig = (orig); \
6003 volatile unsigned long _argvec[10]; \
6004 volatile unsigned long _res; \
6005 _argvec[0] = (unsigned long)_orig.nraddr; \
6006 _argvec[1] = (unsigned long)(arg1); \
6007 _argvec[2] = (unsigned long)(arg2); \
6008 _argvec[3] = (unsigned long)(arg3); \
6009 _argvec[4] = (unsigned long)(arg4); \
6010 _argvec[5] = (unsigned long)(arg5); \
6011 _argvec[6] = (unsigned long)(arg6); \
6012 _argvec[7] = (unsigned long)(arg7); \
6013 _argvec[8] = (unsigned long)(arg8); \
6014 _argvec[9] = (unsigned long)(arg9); \
6015 __asm__ volatile( \
6016 "addiu $sp, $sp, -16 \n\t" \
6017 "lw $t9,36(%1) \n\t" \
6018 "sw $t9, 0($sp) \n\t" \
6019 "lw $t9, 0(%1) \n\t" \
6020 "lw $a0, 4(%1) \n\t" \
6021 "lw $a1, 8(%1) \n\t" \
6022 "lw $a2,12(%1) \n\t" \
6023 "lw $a3,16(%1) \n\t" \
6024 "lw $a4,20(%1) \n\t" \
6025 "lw $a5,24(%1) \n\t" \
6026 "lw $a6,28(%1) \n\t" \
6027 "lw $a7,32(%1) \n\t" \
6028 VALGRIND_CALL_NOREDIR_T9 \
6029 "move %0, $a0 \n\t" \
6030 "addiu $sp, $sp, 16 \n\t" \
6031 : /*out*/ "=r" (_res) \
6032 : /*in*/ "r" (&_argvec[0]) \
6033 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6034 ); \
6035 lval = (__typeof__(lval)) _res; \
6036 } while (0)
6038 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6039 arg7,arg8,arg9,arg10) \
6040 do { \
6041 volatile OrigFn _orig = (orig); \
6042 volatile unsigned long _argvec[11]; \
6043 volatile unsigned long _res; \
6044 _argvec[0] = (unsigned long)_orig.nraddr; \
6045 _argvec[1] = (unsigned long)(arg1); \
6046 _argvec[2] = (unsigned long)(arg2); \
6047 _argvec[3] = (unsigned long)(arg3); \
6048 _argvec[4] = (unsigned long)(arg4); \
6049 _argvec[5] = (unsigned long)(arg5); \
6050 _argvec[6] = (unsigned long)(arg6); \
6051 _argvec[7] = (unsigned long)(arg7); \
6052 _argvec[8] = (unsigned long)(arg8); \
6053 _argvec[9] = (unsigned long)(arg9); \
6054 _argvec[10] = (unsigned long)(arg10); \
6055 __asm__ volatile( \
6056 "addiu $sp, $sp, -16 \n\t" \
6057 "lw $t9,36(%1) \n\t" \
6058 "sw $t9, 0($sp) \n\t" \
6059 "lw $t9,40(%1) \n\t" \
6060 "sw $t9, 4($sp) \n\t" \
6061 "lw $t9, 0(%1) \n\t" \
6062 "lw $a0, 4(%1) \n\t" \
6063 "lw $a1, 8(%1) \n\t" \
6064 "lw $a2,12(%1) \n\t" \
6065 "lw $a3,16(%1) \n\t" \
6066 "lw $a4,20(%1) \n\t" \
6067 "lw $a5,24(%1) \n\t" \
6068 "lw $a6,28(%1) \n\t" \
6069 "lw $a7,32(%1) \n\t" \
6070 VALGRIND_CALL_NOREDIR_T9 \
6071 "move %0, $a0 \n\t" \
6072 "addiu $sp, $sp, 16 \n\t" \
6073 : /*out*/ "=r" (_res) \
6074 : /*in*/ "r" (&_argvec[0]) \
6075 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6076 ); \
6077 lval = (__typeof__(lval)) _res; \
6078 } while (0)
6080 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6081 arg6,arg7,arg8,arg9,arg10, \
6082 arg11) \
6083 do { \
6084 volatile OrigFn _orig = (orig); \
6085 volatile unsigned long _argvec[12]; \
6086 volatile unsigned long _res; \
6087 _argvec[0] = (unsigned long)_orig.nraddr; \
6088 _argvec[1] = (unsigned long)(arg1); \
6089 _argvec[2] = (unsigned long)(arg2); \
6090 _argvec[3] = (unsigned long)(arg3); \
6091 _argvec[4] = (unsigned long)(arg4); \
6092 _argvec[5] = (unsigned long)(arg5); \
6093 _argvec[6] = (unsigned long)(arg6); \
6094 _argvec[7] = (unsigned long)(arg7); \
6095 _argvec[8] = (unsigned long)(arg8); \
6096 _argvec[9] = (unsigned long)(arg9); \
6097 _argvec[10] = (unsigned long)(arg10); \
6098 _argvec[11] = (unsigned long)(arg11); \
6099 __asm__ volatile( \
6100 "addiu $sp, $sp, -16 \n\t" \
6101 "lw $t9,36(%1) \n\t" \
6102 "sw $t9, 0($sp) \n\t" \
6103 "lw $t9,40(%1) \n\t" \
6104 "sw $t9, 4($sp) \n\t" \
6105 "lw $t9,44(%1) \n\t" \
6106 "sw $t9, 8($sp) \n\t" \
6107 "lw $t9, 0(%1) \n\t" \
6108 "lw $a0, 4(%1) \n\t" \
6109 "lw $a1, 8(%1) \n\t" \
6110 "lw $a2,12(%1) \n\t" \
6111 "lw $a3,16(%1) \n\t" \
6112 "lw $a4,20(%1) \n\t" \
6113 "lw $a5,24(%1) \n\t" \
6114 "lw $a6,28(%1) \n\t" \
6115 "lw $a7,32(%1) \n\t" \
6116 VALGRIND_CALL_NOREDIR_T9 \
6117 "move %0, $a0 \n\t" \
6118 "addiu $sp, $sp, 16 \n\t" \
6119 : /*out*/ "=r" (_res) \
6120 : /*in*/ "r" (&_argvec[0]) \
6121 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6122 ); \
6123 lval = (__typeof__(lval)) _res; \
6124 } while (0)
6126 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6127 arg6,arg7,arg8,arg9,arg10, \
6128 arg11,arg12) \
6129 do { \
6130 volatile OrigFn _orig = (orig); \
6131 volatile unsigned long _argvec[13]; \
6132 volatile unsigned long _res; \
6133 _argvec[0] = (unsigned long)_orig.nraddr; \
6134 _argvec[1] = (unsigned long)(arg1); \
6135 _argvec[2] = (unsigned long)(arg2); \
6136 _argvec[3] = (unsigned long)(arg3); \
6137 _argvec[4] = (unsigned long)(arg4); \
6138 _argvec[5] = (unsigned long)(arg5); \
6139 _argvec[6] = (unsigned long)(arg6); \
6140 _argvec[7] = (unsigned long)(arg7); \
6141 _argvec[8] = (unsigned long)(arg8); \
6142 _argvec[9] = (unsigned long)(arg9); \
6143 _argvec[10] = (unsigned long)(arg10); \
6144 _argvec[11] = (unsigned long)(arg11); \
6145 _argvec[12] = (unsigned long)(arg12); \
6146 __asm__ volatile( \
6147 "addiu $sp, $sp, -16 \n\t" \
6148 "lw $t9,36(%1) \n\t" \
6149 "sw $t9, 0($sp) \n\t" \
6150 "lw $t9,40(%1) \n\t" \
6151 "sw $t9, 4($sp) \n\t" \
6152 "lw $t9,44(%1) \n\t" \
6153 "sw $t9, 8($sp) \n\t" \
6154 "lw $t9,48(%1) \n\t" \
6155 "sw $t9,12($sp) \n\t" \
6156 "lw $t9, 0(%1) \n\t" \
6157 "lw $a0, 4(%1) \n\t" \
6158 "lw $a1, 8(%1) \n\t" \
6159 "lw $a2,12(%1) \n\t" \
6160 "lw $a3,16(%1) \n\t" \
6161 "lw $a4,20(%1) \n\t" \
6162 "lw $a5,24(%1) \n\t" \
6163 "lw $a6,28(%1) \n\t" \
6164 "lw $a7,32(%1) \n\t" \
6165 VALGRIND_CALL_NOREDIR_T9 \
6166 "move %0, $a0 \n\t" \
6167 "addiu $sp, $sp, 16 \n\t" \
6168 : /*out*/ "=r" (_res) \
6169 : /*in*/ "r" (&_argvec[0]) \
6170 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6171 ); \
6172 lval = (__typeof__(lval)) _res; \
6173 } while (0)
6175 #endif /* PLAT_nanomips_linux */
6177 /* ------------------------- mips64-linux ------------------------- */
6179 #if defined(PLAT_mips64_linux)
6181 /* These regs are trashed by the hidden call. */
6182 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
6183 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
6184 "$25", "$31"
6186 /* These CALL_FN_ macros assume that on mips64-linux,
6187 sizeof(long long) == 8. */
6189 #define MIPS64_LONG2REG_CAST(x) ((long long)(long)x)
6191 #define CALL_FN_W_v(lval, orig) \
6192 do { \
6193 volatile OrigFn _orig = (orig); \
6194 volatile unsigned long long _argvec[1]; \
6195 volatile unsigned long long _res; \
6196 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6197 __asm__ volatile( \
6198 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6199 VALGRIND_CALL_NOREDIR_T9 \
6200 "move %0, $2\n" \
6201 : /*out*/ "=r" (_res) \
6202 : /*in*/ "0" (&_argvec[0]) \
6203 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6204 ); \
6205 lval = (__typeof__(lval)) (long)_res; \
6206 } while (0)
6208 #define CALL_FN_W_W(lval, orig, arg1) \
6209 do { \
6210 volatile OrigFn _orig = (orig); \
6211 volatile unsigned long long _argvec[2]; \
6212 volatile unsigned long long _res; \
6213 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6214 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6215 __asm__ volatile( \
6216 "ld $4, 8(%1)\n\t" /* arg1*/ \
6217 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6218 VALGRIND_CALL_NOREDIR_T9 \
6219 "move %0, $2\n" \
6220 : /*out*/ "=r" (_res) \
6221 : /*in*/ "r" (&_argvec[0]) \
6222 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6223 ); \
6224 lval = (__typeof__(lval)) (long)_res; \
6225 } while (0)
6227 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
6228 do { \
6229 volatile OrigFn _orig = (orig); \
6230 volatile unsigned long long _argvec[3]; \
6231 volatile unsigned long long _res; \
6232 _argvec[0] = _orig.nraddr; \
6233 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6234 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6235 __asm__ volatile( \
6236 "ld $4, 8(%1)\n\t" \
6237 "ld $5, 16(%1)\n\t" \
6238 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6239 VALGRIND_CALL_NOREDIR_T9 \
6240 "move %0, $2\n" \
6241 : /*out*/ "=r" (_res) \
6242 : /*in*/ "r" (&_argvec[0]) \
6243 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6244 ); \
6245 lval = (__typeof__(lval)) (long)_res; \
6246 } while (0)
6249 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
6250 do { \
6251 volatile OrigFn _orig = (orig); \
6252 volatile unsigned long long _argvec[4]; \
6253 volatile unsigned long long _res; \
6254 _argvec[0] = _orig.nraddr; \
6255 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6256 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6257 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6258 __asm__ volatile( \
6259 "ld $4, 8(%1)\n\t" \
6260 "ld $5, 16(%1)\n\t" \
6261 "ld $6, 24(%1)\n\t" \
6262 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6263 VALGRIND_CALL_NOREDIR_T9 \
6264 "move %0, $2\n" \
6265 : /*out*/ "=r" (_res) \
6266 : /*in*/ "r" (&_argvec[0]) \
6267 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6268 ); \
6269 lval = (__typeof__(lval)) (long)_res; \
6270 } while (0)
6272 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
6273 do { \
6274 volatile OrigFn _orig = (orig); \
6275 volatile unsigned long long _argvec[5]; \
6276 volatile unsigned long long _res; \
6277 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6278 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6279 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6280 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6281 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6282 __asm__ volatile( \
6283 "ld $4, 8(%1)\n\t" \
6284 "ld $5, 16(%1)\n\t" \
6285 "ld $6, 24(%1)\n\t" \
6286 "ld $7, 32(%1)\n\t" \
6287 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6288 VALGRIND_CALL_NOREDIR_T9 \
6289 "move %0, $2\n" \
6290 : /*out*/ "=r" (_res) \
6291 : /*in*/ "r" (&_argvec[0]) \
6292 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6293 ); \
6294 lval = (__typeof__(lval)) (long)_res; \
6295 } while (0)
6297 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
6298 do { \
6299 volatile OrigFn _orig = (orig); \
6300 volatile unsigned long long _argvec[6]; \
6301 volatile unsigned long long _res; \
6302 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6303 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6304 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6305 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6306 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6307 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6308 __asm__ volatile( \
6309 "ld $4, 8(%1)\n\t" \
6310 "ld $5, 16(%1)\n\t" \
6311 "ld $6, 24(%1)\n\t" \
6312 "ld $7, 32(%1)\n\t" \
6313 "ld $8, 40(%1)\n\t" \
6314 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6315 VALGRIND_CALL_NOREDIR_T9 \
6316 "move %0, $2\n" \
6317 : /*out*/ "=r" (_res) \
6318 : /*in*/ "r" (&_argvec[0]) \
6319 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6320 ); \
6321 lval = (__typeof__(lval)) (long)_res; \
6322 } while (0)
6324 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
6325 do { \
6326 volatile OrigFn _orig = (orig); \
6327 volatile unsigned long long _argvec[7]; \
6328 volatile unsigned long long _res; \
6329 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6330 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6331 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6332 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6333 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6334 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6335 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6336 __asm__ volatile( \
6337 "ld $4, 8(%1)\n\t" \
6338 "ld $5, 16(%1)\n\t" \
6339 "ld $6, 24(%1)\n\t" \
6340 "ld $7, 32(%1)\n\t" \
6341 "ld $8, 40(%1)\n\t" \
6342 "ld $9, 48(%1)\n\t" \
6343 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6344 VALGRIND_CALL_NOREDIR_T9 \
6345 "move %0, $2\n" \
6346 : /*out*/ "=r" (_res) \
6347 : /*in*/ "r" (&_argvec[0]) \
6348 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6349 ); \
6350 lval = (__typeof__(lval)) (long)_res; \
6351 } while (0)
6353 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6354 arg7) \
6355 do { \
6356 volatile OrigFn _orig = (orig); \
6357 volatile unsigned long long _argvec[8]; \
6358 volatile unsigned long long _res; \
6359 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6360 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6361 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6362 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6363 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6364 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6365 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6366 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6367 __asm__ volatile( \
6368 "ld $4, 8(%1)\n\t" \
6369 "ld $5, 16(%1)\n\t" \
6370 "ld $6, 24(%1)\n\t" \
6371 "ld $7, 32(%1)\n\t" \
6372 "ld $8, 40(%1)\n\t" \
6373 "ld $9, 48(%1)\n\t" \
6374 "ld $10, 56(%1)\n\t" \
6375 "ld $25, 0(%1) \n\t" /* target->t9 */ \
6376 VALGRIND_CALL_NOREDIR_T9 \
6377 "move %0, $2\n" \
6378 : /*out*/ "=r" (_res) \
6379 : /*in*/ "r" (&_argvec[0]) \
6380 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6381 ); \
6382 lval = (__typeof__(lval)) (long)_res; \
6383 } while (0)
6385 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6386 arg7,arg8) \
6387 do { \
6388 volatile OrigFn _orig = (orig); \
6389 volatile unsigned long long _argvec[9]; \
6390 volatile unsigned long long _res; \
6391 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6392 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6393 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6394 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6395 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6396 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6397 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6398 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6399 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6400 __asm__ volatile( \
6401 "ld $4, 8(%1)\n\t" \
6402 "ld $5, 16(%1)\n\t" \
6403 "ld $6, 24(%1)\n\t" \
6404 "ld $7, 32(%1)\n\t" \
6405 "ld $8, 40(%1)\n\t" \
6406 "ld $9, 48(%1)\n\t" \
6407 "ld $10, 56(%1)\n\t" \
6408 "ld $11, 64(%1)\n\t" \
6409 "ld $25, 0(%1) \n\t" /* target->t9 */ \
6410 VALGRIND_CALL_NOREDIR_T9 \
6411 "move %0, $2\n" \
6412 : /*out*/ "=r" (_res) \
6413 : /*in*/ "r" (&_argvec[0]) \
6414 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6415 ); \
6416 lval = (__typeof__(lval)) (long)_res; \
6417 } while (0)
6419 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6420 arg7,arg8,arg9) \
6421 do { \
6422 volatile OrigFn _orig = (orig); \
6423 volatile unsigned long long _argvec[10]; \
6424 volatile unsigned long long _res; \
6425 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6426 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6427 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6428 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6429 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6430 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6431 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6432 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6433 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6434 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6435 __asm__ volatile( \
6436 "dsubu $29, $29, 8\n\t" \
6437 "ld $4, 72(%1)\n\t" \
6438 "sd $4, 0($29)\n\t" \
6439 "ld $4, 8(%1)\n\t" \
6440 "ld $5, 16(%1)\n\t" \
6441 "ld $6, 24(%1)\n\t" \
6442 "ld $7, 32(%1)\n\t" \
6443 "ld $8, 40(%1)\n\t" \
6444 "ld $9, 48(%1)\n\t" \
6445 "ld $10, 56(%1)\n\t" \
6446 "ld $11, 64(%1)\n\t" \
6447 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6448 VALGRIND_CALL_NOREDIR_T9 \
6449 "daddu $29, $29, 8\n\t" \
6450 "move %0, $2\n" \
6451 : /*out*/ "=r" (_res) \
6452 : /*in*/ "r" (&_argvec[0]) \
6453 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6454 ); \
6455 lval = (__typeof__(lval)) (long)_res; \
6456 } while (0)
6458 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6459 arg7,arg8,arg9,arg10) \
6460 do { \
6461 volatile OrigFn _orig = (orig); \
6462 volatile unsigned long long _argvec[11]; \
6463 volatile unsigned long long _res; \
6464 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6465 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6466 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6467 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6468 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6469 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6470 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6471 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6472 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6473 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6474 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6475 __asm__ volatile( \
6476 "dsubu $29, $29, 16\n\t" \
6477 "ld $4, 72(%1)\n\t" \
6478 "sd $4, 0($29)\n\t" \
6479 "ld $4, 80(%1)\n\t" \
6480 "sd $4, 8($29)\n\t" \
6481 "ld $4, 8(%1)\n\t" \
6482 "ld $5, 16(%1)\n\t" \
6483 "ld $6, 24(%1)\n\t" \
6484 "ld $7, 32(%1)\n\t" \
6485 "ld $8, 40(%1)\n\t" \
6486 "ld $9, 48(%1)\n\t" \
6487 "ld $10, 56(%1)\n\t" \
6488 "ld $11, 64(%1)\n\t" \
6489 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6490 VALGRIND_CALL_NOREDIR_T9 \
6491 "daddu $29, $29, 16\n\t" \
6492 "move %0, $2\n" \
6493 : /*out*/ "=r" (_res) \
6494 : /*in*/ "r" (&_argvec[0]) \
6495 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6496 ); \
6497 lval = (__typeof__(lval)) (long)_res; \
6498 } while (0)
6500 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6501 arg6,arg7,arg8,arg9,arg10, \
6502 arg11) \
6503 do { \
6504 volatile OrigFn _orig = (orig); \
6505 volatile unsigned long long _argvec[12]; \
6506 volatile unsigned long long _res; \
6507 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6508 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6509 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6510 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6511 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6512 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6513 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6514 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6515 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6516 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6517 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6518 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6519 __asm__ volatile( \
6520 "dsubu $29, $29, 24\n\t" \
6521 "ld $4, 72(%1)\n\t" \
6522 "sd $4, 0($29)\n\t" \
6523 "ld $4, 80(%1)\n\t" \
6524 "sd $4, 8($29)\n\t" \
6525 "ld $4, 88(%1)\n\t" \
6526 "sd $4, 16($29)\n\t" \
6527 "ld $4, 8(%1)\n\t" \
6528 "ld $5, 16(%1)\n\t" \
6529 "ld $6, 24(%1)\n\t" \
6530 "ld $7, 32(%1)\n\t" \
6531 "ld $8, 40(%1)\n\t" \
6532 "ld $9, 48(%1)\n\t" \
6533 "ld $10, 56(%1)\n\t" \
6534 "ld $11, 64(%1)\n\t" \
6535 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6536 VALGRIND_CALL_NOREDIR_T9 \
6537 "daddu $29, $29, 24\n\t" \
6538 "move %0, $2\n" \
6539 : /*out*/ "=r" (_res) \
6540 : /*in*/ "r" (&_argvec[0]) \
6541 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6542 ); \
6543 lval = (__typeof__(lval)) (long)_res; \
6544 } while (0)
6546 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6547 arg6,arg7,arg8,arg9,arg10, \
6548 arg11,arg12) \
6549 do { \
6550 volatile OrigFn _orig = (orig); \
6551 volatile unsigned long long _argvec[13]; \
6552 volatile unsigned long long _res; \
6553 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6554 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6555 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6556 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6557 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6558 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6559 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6560 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6561 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6562 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6563 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6564 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6565 _argvec[12] = MIPS64_LONG2REG_CAST(arg12); \
6566 __asm__ volatile( \
6567 "dsubu $29, $29, 32\n\t" \
6568 "ld $4, 72(%1)\n\t" \
6569 "sd $4, 0($29)\n\t" \
6570 "ld $4, 80(%1)\n\t" \
6571 "sd $4, 8($29)\n\t" \
6572 "ld $4, 88(%1)\n\t" \
6573 "sd $4, 16($29)\n\t" \
6574 "ld $4, 96(%1)\n\t" \
6575 "sd $4, 24($29)\n\t" \
6576 "ld $4, 8(%1)\n\t" \
6577 "ld $5, 16(%1)\n\t" \
6578 "ld $6, 24(%1)\n\t" \
6579 "ld $7, 32(%1)\n\t" \
6580 "ld $8, 40(%1)\n\t" \
6581 "ld $9, 48(%1)\n\t" \
6582 "ld $10, 56(%1)\n\t" \
6583 "ld $11, 64(%1)\n\t" \
6584 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6585 VALGRIND_CALL_NOREDIR_T9 \
6586 "daddu $29, $29, 32\n\t" \
6587 "move %0, $2\n" \
6588 : /*out*/ "=r" (_res) \
6589 : /*in*/ "r" (&_argvec[0]) \
6590 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6591 ); \
6592 lval = (__typeof__(lval)) (long)_res; \
6593 } while (0)
6595 #endif /* PLAT_mips64_linux */
6597 /* ------------------------------------------------------------------ */
6598 /* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
6599 /* */
6600 /* ------------------------------------------------------------------ */
6602 /* Some request codes. There are many more of these, but most are not
6603 exposed to end-user view. These are the public ones, all of the
6604 form 0x1000 + small_number.
6606 Core ones are in the range 0x00000000--0x0000ffff. The non-public
6607 ones start at 0x2000.
6610 /* These macros are used by tools -- they must be public, but don't
6611 embed them into other programs. */
6612 #define VG_USERREQ_TOOL_BASE(a,b) \
6613 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
6614 #define VG_IS_TOOL_USERREQ(a, b, v) \
6615 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
6617 /* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
6618 This enum comprises an ABI exported by Valgrind to programs
6619 which use client requests. DO NOT CHANGE THE NUMERIC VALUES OF THESE
6620 ENTRIES, NOR DELETE ANY -- add new ones at the end of the most
6621 relevant group. */
6622 typedef
6623 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
6624 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
6626 /* These allow any function to be called from the simulated
6627 CPU but run on the real CPU. Nb: the first arg passed to
6628 the function is always the ThreadId of the running
6629 thread! So CLIENT_CALL0 actually requires a 1 arg
6630 function, etc. */
6631 VG_USERREQ__CLIENT_CALL0 = 0x1101,
6632 VG_USERREQ__CLIENT_CALL1 = 0x1102,
6633 VG_USERREQ__CLIENT_CALL2 = 0x1103,
6634 VG_USERREQ__CLIENT_CALL3 = 0x1104,
6636 /* Can be useful in regression testing suites -- eg. can
6637 send Valgrind's output to /dev/null and still count
6638 errors. */
6639 VG_USERREQ__COUNT_ERRORS = 0x1201,
6641 /* Allows the client program and/or gdbserver to execute a monitor
6642 command. */
6643 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
6645 /* Allows the client program to change a dynamic command line
6646 option. */
6647 VG_USERREQ__CLO_CHANGE = 0x1203,
6649 /* These are useful and can be interpreted by any tool that
6650 tracks malloc() et al, by using vg_replace_malloc.c. */
6651 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
6652 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
6653 VG_USERREQ__FREELIKE_BLOCK = 0x1302,
6654 /* Memory pool support. */
6655 VG_USERREQ__CREATE_MEMPOOL = 0x1303,
6656 VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
6657 VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
6658 VG_USERREQ__MEMPOOL_FREE = 0x1306,
6659 VG_USERREQ__MEMPOOL_TRIM = 0x1307,
6660 VG_USERREQ__MOVE_MEMPOOL = 0x1308,
6661 VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
6662 VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
6664 /* Allow printfs to valgrind log. */
6665 /* The first two pass the va_list argument by value, which
6666 assumes it is the same size as or smaller than a UWord,
6667 which generally isn't the case. Hence are deprecated.
6668 The second two pass the vargs by reference and so are
6669 immune to this problem. */
6670 /* both :: char* fmt, va_list vargs (DEPRECATED) */
6671 VG_USERREQ__PRINTF = 0x1401,
6672 VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
6673 /* both :: char* fmt, va_list* vargs */
6674 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
6675 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
6677 /* Stack support. */
6678 VG_USERREQ__STACK_REGISTER = 0x1501,
6679 VG_USERREQ__STACK_DEREGISTER = 0x1502,
6680 VG_USERREQ__STACK_CHANGE = 0x1503,
6682 /* Wine support */
6683 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
6685 /* Querying of debug info. */
6686 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701,
6688 /* Disable/enable error reporting level. Takes a single
6689 Word arg which is the delta to this thread's error
6690 disablement indicator. Hence 1 disables or further
6691 disables errors, and -1 moves back towards enablement.
6692 Other values are not allowed. */
6693 VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801,
6695 /* Some requests used for Valgrind internal, such as
6696 self-test or self-hosting. */
6697 /* Initialise IR injection */
6698 VG_USERREQ__VEX_INIT_FOR_IRI = 0x1901,
6699 /* Used by Inner Valgrind to inform Outer Valgrind where to
6700 find the list of inner guest threads */
6701 VG_USERREQ__INNER_THREADS = 0x1902
6702 } Vg_ClientRequest;
6704 #if !defined(__GNUC__)
6705 # define __extension__ /* */
6706 #endif
6709 /* Returns the number of Valgrinds this code is running under. That
6710 is, 0 if running natively, 1 if running under Valgrind, 2 if
6711 running under Valgrind which is running under another Valgrind,
6712 etc. */
6713 #define RUNNING_ON_VALGRIND \
6714 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
6715 VG_USERREQ__RUNNING_ON_VALGRIND, \
6716 0, 0, 0, 0, 0) \
6719 /* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
6720 _qzz_len - 1]. Useful if you are debugging a JITter or some such,
6721 since it provides a way to make sure valgrind will retranslate the
6722 invalidated area. Returns no value. */
6723 #define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
6724 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
6725 _qzz_addr, _qzz_len, 0, 0, 0)
6727 #define VALGRIND_INNER_THREADS(_qzz_addr) \
6728 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__INNER_THREADS, \
6729 _qzz_addr, 0, 0, 0, 0)
6732 /* These requests are for getting Valgrind itself to print something.
6733 Possibly with a backtrace. This is a really ugly hack. The return value
6734 is the number of characters printed, excluding the "**<pid>** " part at the
6735 start and the backtrace (if present). */
6737 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6738 /* Modern GCC will optimize the static routine out if unused,
6739 and unused attribute will shut down warnings about it. */
6740 static int VALGRIND_PRINTF(const char *format, ...)
6741 __attribute__((format(__printf__, 1, 2), __unused__));
6742 #endif
6743 static int
6744 #if defined(_MSC_VER)
6745 __inline
6746 #endif
6747 VALGRIND_PRINTF(const char *format, ...)
6749 #if defined(NVALGRIND)
6750 (void)format;
6751 return 0;
6752 #else /* NVALGRIND */
6753 #if defined(_MSC_VER) || defined(__MINGW64__)
6754 uintptr_t _qzz_res;
6755 #else
6756 unsigned long _qzz_res;
6757 #endif
6758 va_list vargs;
6759 va_start(vargs, format);
6760 #if defined(_MSC_VER) || defined(__MINGW64__)
6761 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6762 VG_USERREQ__PRINTF_VALIST_BY_REF,
6763 (uintptr_t)format,
6764 (uintptr_t)&vargs,
6765 0, 0, 0);
6766 #else
6767 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6768 VG_USERREQ__PRINTF_VALIST_BY_REF,
6769 (unsigned long)format,
6770 (unsigned long)&vargs,
6771 0, 0, 0);
6772 #endif
6773 va_end(vargs);
6774 return (int)_qzz_res;
6775 #endif /* NVALGRIND */
6778 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6779 static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6780 __attribute__((format(__printf__, 1, 2), __unused__));
6781 #endif
6782 static int
6783 #if defined(_MSC_VER)
6784 __inline
6785 #endif
6786 VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6788 #if defined(NVALGRIND)
6789 (void)format;
6790 return 0;
6791 #else /* NVALGRIND */
6792 #if defined(_MSC_VER) || defined(__MINGW64__)
6793 uintptr_t _qzz_res;
6794 #else
6795 unsigned long _qzz_res;
6796 #endif
6797 va_list vargs;
6798 va_start(vargs, format);
6799 #if defined(_MSC_VER) || defined(__MINGW64__)
6800 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6801 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6802 (uintptr_t)format,
6803 (uintptr_t)&vargs,
6804 0, 0, 0);
6805 #else
6806 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6807 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6808 (unsigned long)format,
6809 (unsigned long)&vargs,
6810 0, 0, 0);
6811 #endif
6812 va_end(vargs);
6813 return (int)_qzz_res;
6814 #endif /* NVALGRIND */
6818 /* These requests allow control to move from the simulated CPU to the
6819 real CPU, calling an arbitrary function.
6821 Note that the current ThreadId is inserted as the first argument.
6822 So this call:
6824 VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
6826 requires f to have this signature:
6828 Word f(Word tid, Word arg1, Word arg2)
6830 where "Word" is a word-sized type.
6832 Note that these client requests are not entirely reliable. For example,
6833 if you call a function with them that subsequently calls printf(),
6834 there's a high chance Valgrind will crash. Generally, your prospects of
6835 these working are made higher if the called function does not refer to
6836 any global variables, and does not refer to any libc or other functions
6837 (printf et al). Any kind of entanglement with libc or dynamic linking is
6838 likely to have a bad outcome, for tricky reasons which we've grappled
6839 with a lot in the past.
6841 #define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
6842 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6843 VG_USERREQ__CLIENT_CALL0, \
6844 _qyy_fn, \
6845 0, 0, 0, 0)
6847 #define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
6848 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6849 VG_USERREQ__CLIENT_CALL1, \
6850 _qyy_fn, \
6851 _qyy_arg1, 0, 0, 0)
6853 #define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
6854 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6855 VG_USERREQ__CLIENT_CALL2, \
6856 _qyy_fn, \
6857 _qyy_arg1, _qyy_arg2, 0, 0)
6859 #define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
6860 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6861 VG_USERREQ__CLIENT_CALL3, \
6862 _qyy_fn, \
6863 _qyy_arg1, _qyy_arg2, \
6864 _qyy_arg3, 0)
6867 /* Counts the number of errors that have been recorded by a tool. Nb:
6868 the tool must record the errors with VG_(maybe_record_error)() or
6869 VG_(unique_error)() for them to be counted. */
6870 #define VALGRIND_COUNT_ERRORS \
6871 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
6872 0 /* default return */, \
6873 VG_USERREQ__COUNT_ERRORS, \
6874 0, 0, 0, 0, 0)
6876 /* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
6877 when heap blocks are allocated in order to give accurate results. This
6878 happens automatically for the standard allocator functions such as
6879 malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
6880 delete[], etc.
6882 But if your program uses a custom allocator, this doesn't automatically
6883 happen, and Valgrind will not do as well. For example, if you allocate
6884 superblocks with mmap() and then allocates chunks of the superblocks, all
6885 Valgrind's observations will be at the mmap() level and it won't know that
6886 the chunks should be considered separate entities. In Memcheck's case,
6887 that means you probably won't get heap block overrun detection (because
6888 there won't be redzones marked as unaddressable) and you definitely won't
6889 get any leak detection.
6891 The following client requests allow a custom allocator to be annotated so
6892 that it can be handled accurately by Valgrind.
6894 VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
6895 by a malloc()-like function. For Memcheck (an illustrative case), this
6896 does two things:
6898 - It records that the block has been allocated. This means any addresses
6899 within the block mentioned in error messages will be
6900 identified as belonging to the block. It also means that if the block
6901 isn't freed it will be detected by the leak checker.
6903 - It marks the block as being addressable and undefined (if 'is_zeroed' is
6904 not set), or addressable and defined (if 'is_zeroed' is set). This
6905 controls how accesses to the block by the program are handled.
6907 'addr' is the start of the usable block (ie. after any
6908 redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
6909 can apply redzones -- these are blocks of padding at the start and end of
6910 each block. Adding redzones is recommended as it makes it much more likely
6911 Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
6912 zeroed (or filled with another predictable value), as is the case for
6913 calloc().
6915 VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
6916 heap block -- that will be used by the client program -- is allocated.
6917 It's best to put it at the outermost level of the allocator if possible;
6918 for example, if you have a function my_alloc() which calls
6919 internal_alloc(), and the client request is put inside internal_alloc(),
6920 stack traces relating to the heap block will contain entries for both
6921 my_alloc() and internal_alloc(), which is probably not what you want.
6923 For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
6924 custom blocks from within a heap block, B, that has been allocated with
6925 malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
6926 -- the custom blocks will take precedence.
6928 VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
6929 Memcheck, it does two things:
6931 - It records that the block has been deallocated. This assumes that the
6932 block was annotated as having been allocated via
6933 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6935 - It marks the block as being unaddressable.
6937 VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
6938 heap block is deallocated.
6940 VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
6941 Memcheck, it does four things:
6943 - It records that the size of a block has been changed. This assumes that
6944 the block was annotated as having been allocated via
6945 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6947 - If the block shrunk, it marks the freed memory as being unaddressable.
6949 - If the block grew, it marks the new area as undefined and defines a red
6950 zone past the end of the new block.
6952 - The V-bits of the overlap between the old and the new block are preserved.
6954 VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
6955 and before deallocation of the old block.
6957 In many cases, these three client requests will not be enough to get your
6958 allocator working well with Memcheck. More specifically, if your allocator
6959 writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
6960 will be necessary to mark the memory as addressable just before the zeroing
6961 occurs, otherwise you'll get a lot of invalid write errors. For example,
6962 you'll need to do this if your allocator recycles freed blocks, but it
6963 zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
6964 Alternatively, if your allocator reuses freed blocks for allocator-internal
6965 data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
6967 Really, what's happening is a blurring of the lines between the client
6968 program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
6969 memory should be considered unaddressable to the client program, but the
6970 allocator knows more than the rest of the client program and so may be able
6971 to safely access it. Extra client requests are necessary for Valgrind to
6972 understand the distinction between the allocator and the rest of the
6973 program.
6975 Ignored if addr == 0.
6977 #define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
6978 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
6979 addr, sizeB, rzB, is_zeroed, 0)
6981 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6982 Ignored if addr == 0.
6984 #define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
6985 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
6986 addr, oldSizeB, newSizeB, rzB, 0)
6988 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6989 Ignored if addr == 0.
6991 #define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
6992 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
6993 addr, rzB, 0, 0, 0)
6995 /* Create a memory pool. */
6996 #define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
6997 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
6998 pool, rzB, is_zeroed, 0, 0)
7000 /* Create a memory pool with some flags specifying extended behaviour.
7001 When flags is zero, the behaviour is identical to VALGRIND_CREATE_MEMPOOL.
7003 The flag VALGRIND_MEMPOOL_METAPOOL specifies that the pieces of memory
7004 associated with the pool using VALGRIND_MEMPOOL_ALLOC will be used
7005 by the application as superblocks to dole out MALLOC_LIKE blocks using
7006 VALGRIND_MALLOCLIKE_BLOCK. In other words, a meta pool is a "2 levels"
7007 pool : first level is the blocks described by VALGRIND_MEMPOOL_ALLOC.
7008 The second level blocks are described using VALGRIND_MALLOCLIKE_BLOCK.
7009 Note that the association between the pool and the second level blocks
7010 is implicit : second level blocks will be located inside first level
7011 blocks. It is necessary to use the VALGRIND_MEMPOOL_METAPOOL flag
7012 for such 2 levels pools, as otherwise valgrind will detect overlapping
7013 memory blocks, and will abort execution (e.g. during leak search).
7015 Such a meta pool can also be marked as an 'auto free' pool using the flag
7016 VALGRIND_MEMPOOL_AUTO_FREE, which must be OR-ed together with the
7017 VALGRIND_MEMPOOL_METAPOOL. For an 'auto free' pool, VALGRIND_MEMPOOL_FREE
7018 will automatically free the second level blocks that are contained
7019 inside the first level block freed with VALGRIND_MEMPOOL_FREE.
7020 In other words, calling VALGRIND_MEMPOOL_FREE will cause implicit calls
7021 to VALGRIND_FREELIKE_BLOCK for all the second level blocks included
7022 in the first level block.
7023 Note: it is an error to use the VALGRIND_MEMPOOL_AUTO_FREE flag
7024 without the VALGRIND_MEMPOOL_METAPOOL flag.
7026 #define VALGRIND_MEMPOOL_AUTO_FREE 1
7027 #define VALGRIND_MEMPOOL_METAPOOL 2
7028 #define VALGRIND_CREATE_MEMPOOL_EXT(pool, rzB, is_zeroed, flags) \
7029 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
7030 pool, rzB, is_zeroed, flags, 0)
7032 /* Destroy a memory pool. */
7033 #define VALGRIND_DESTROY_MEMPOOL(pool) \
7034 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
7035 pool, 0, 0, 0, 0)
7037 /* Associate a piece of memory with a memory pool. */
7038 #define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
7039 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
7040 pool, addr, size, 0, 0)
7042 /* Disassociate a piece of memory from a memory pool. */
7043 #define VALGRIND_MEMPOOL_FREE(pool, addr) \
7044 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
7045 pool, addr, 0, 0, 0)
7047 /* Disassociate any pieces outside a particular range. */
7048 #define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
7049 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
7050 pool, addr, size, 0, 0)
7052 /* Resize and/or move a piece associated with a memory pool. */
7053 #define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
7054 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
7055 poolA, poolB, 0, 0, 0)
7057 /* Resize and/or move a piece associated with a memory pool. */
7058 #define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
7059 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
7060 pool, addrA, addrB, size, 0)
7062 /* Return 1 if a mempool exists, else 0. */
7063 #define VALGRIND_MEMPOOL_EXISTS(pool) \
7064 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7065 VG_USERREQ__MEMPOOL_EXISTS, \
7066 pool, 0, 0, 0, 0)
7068 /* Mark a piece of memory as being a stack. Returns a stack id.
7069 start is the lowest addressable stack byte, end is the highest
7070 addressable stack byte. */
7071 #define VALGRIND_STACK_REGISTER(start, end) \
7072 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7073 VG_USERREQ__STACK_REGISTER, \
7074 start, end, 0, 0, 0)
7076 /* Unmark the piece of memory associated with a stack id as being a
7077 stack. */
7078 #define VALGRIND_STACK_DEREGISTER(id) \
7079 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
7080 id, 0, 0, 0, 0)
7082 /* Change the start and end address of the stack id.
7083 start is the new lowest addressable stack byte, end is the new highest
7084 addressable stack byte. */
7085 #define VALGRIND_STACK_CHANGE(id, start, end) \
7086 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
7087 id, start, end, 0, 0)
7089 /* Load PDB debug info for Wine PE image_map. */
7090 #define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
7091 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
7092 fd, ptr, total_size, delta, 0)
7094 /* Map a code address to a source file name and line number. buf64
7095 must point to a 64-byte buffer in the caller's address space. The
7096 result will be dumped in there and is guaranteed to be zero
7097 terminated. If no info is found, the first byte is set to zero. */
7098 #define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
7099 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7100 VG_USERREQ__MAP_IP_TO_SRCLOC, \
7101 addr, buf64, 0, 0, 0)
7103 /* Disable error reporting for this thread. Behaves in a stack like
7104 way, so you can safely call this multiple times provided that
7105 VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
7106 to re-enable reporting. The first call of this macro disables
7107 reporting. Subsequent calls have no effect except to increase the
7108 number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
7109 reporting. Child threads do not inherit this setting from their
7110 parents -- they are always created with reporting enabled. */
7111 #define VALGRIND_DISABLE_ERROR_REPORTING \
7112 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7113 1, 0, 0, 0, 0)
7115 /* Re-enable error reporting, as per comments on
7116 VALGRIND_DISABLE_ERROR_REPORTING. */
7117 #define VALGRIND_ENABLE_ERROR_REPORTING \
7118 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7119 -1, 0, 0, 0, 0)
7121 /* Execute a monitor command from the client program.
7122 If a connection is opened with GDB, the output will be sent
7123 according to the output mode set for vgdb.
7124 If no connection is opened, output will go to the log output.
7125 Returns 1 if command not recognised, 0 otherwise. */
7126 #define VALGRIND_MONITOR_COMMAND(command) \
7127 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
7128 command, 0, 0, 0, 0)
7131 /* Change the value of a dynamic command line option.
7132 Note that unknown or not dynamically changeable options
7133 will cause a warning message to be output. */
7134 #define VALGRIND_CLO_CHANGE(option) \
7135 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CLO_CHANGE, \
7136 option, 0, 0, 0, 0)
7139 #undef PLAT_x86_darwin
7140 #undef PLAT_amd64_darwin
7141 #undef PLAT_x86_win32
7142 #undef PLAT_amd64_win64
7143 #undef PLAT_x86_linux
7144 #undef PLAT_amd64_linux
7145 #undef PLAT_ppc32_linux
7146 #undef PLAT_ppc64be_linux
7147 #undef PLAT_ppc64le_linux
7148 #undef PLAT_arm_linux
7149 #undef PLAT_s390x_linux
7150 #undef PLAT_mips32_linux
7151 #undef PLAT_mips64_linux
7152 #undef PLAT_nanomips_linux
7153 #undef PLAT_x86_solaris
7154 #undef PLAT_amd64_solaris
7156 #endif /* __VALGRIND_H */