Partly revert "gio: Add filename type annotations"
[glib.git] / glib / valgrind.h
blob565f7e56c705a59346ad6377d91a9a750e75844e
1 /* -*- c -*-
2 ----------------------------------------------------------------
4 Notice that the following BSD-style license applies to this one
5 file (valgrind.h) only. The rest of Valgrind is licensed under the
6 terms of the GNU General Public License, version 2, unless
7 otherwise indicated. See the COPYING file in the source
8 distribution for details.
10 ----------------------------------------------------------------
12 This file is part of Valgrind, a dynamic binary instrumentation
13 framework.
15 Copyright (C) 2000-2013 Julian Seward. All rights reserved.
17 Redistribution and use in source and binary forms, with or without
18 modification, are permitted provided that the following conditions
19 are met:
21 1. Redistributions of source code must retain the above copyright
22 notice, this list of conditions and the following disclaimer.
24 2. The origin of this software must not be misrepresented; you must
25 not claim that you wrote the original software. If you use this
26 software in a product, an acknowledgment in the product
27 documentation would be appreciated but is not required.
29 3. Altered source versions must be plainly marked as such, and must
30 not be misrepresented as being the original software.
32 4. The name of the author may not be used to endorse or promote
33 products derived from this software without specific prior written
34 permission.
36 THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
37 OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
38 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
39 ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
40 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
41 DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
42 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
43 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
44 WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
45 NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
46 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
48 ----------------------------------------------------------------
50 Notice that the above BSD-style license applies to this one file
51 (valgrind.h) only. The entire rest of Valgrind is licensed under
52 the terms of the GNU General Public License, version 2. See the
53 COPYING file in the source distribution for details.
55 ----------------------------------------------------------------
58 /* This file is for inclusion into client (your!) code.
60 You can use these macros to manipulate and query Valgrind's
61 execution inside your own programs.
63 The resulting executables will still run without Valgrind, just a
64 little bit more slowly than they otherwise would, but otherwise
65 unchanged. When not running on valgrind, each client request
66 consumes very few (eg. 7) instructions, so the resulting performance
67 loss is negligible unless you plan to execute client requests
68 millions of times per second. Nevertheless, if that is still a
69 problem, you can compile with the NVALGRIND symbol defined (gcc
70 -DNVALGRIND) so that client requests are not even compiled in. */
72 #ifndef __VALGRIND_H
73 #define __VALGRIND_H
75 /* ------------------------------------------------------------------ */
76 /* VERSION NUMBER OF VALGRIND */
77 /* ------------------------------------------------------------------ */
79 /* Specify Valgrind's version number, so that user code can
80 conditionally compile based on our version number. Note that these
81 were introduced at version 3.6 and so do not exist in version 3.5
82 or earlier. The recommended way to use them to check for "version
83 X.Y or later" is (eg)
85 #if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
86 && (__VALGRIND_MAJOR__ > 3 \
87 || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
89 #define __VALGRIND_MAJOR__ 3
90 #define __VALGRIND_MINOR__ 10
93 #include <stdarg.h>
94 #if HAVE_STDINT_H
95 #include <stdint.h>
96 #endif
98 /* Nb: this file might be included in a file compiled with -ansi. So
99 we can't use C++ style "//" comments nor the "asm" keyword (instead
100 use "__asm__"). */
102 /* Derive some tags indicating what the target platform is. Note
103 that in this file we're using the compiler's CPP symbols for
104 identifying architectures, which are different to the ones we use
105 within the rest of Valgrind. Note, __powerpc__ is active for both
106 32 and 64-bit PPC, whereas __powerpc64__ is only active for the
107 latter (on Linux, that is).
109 Misc note: how to find out what's predefined in gcc by default:
110 gcc -Wp,-dM somefile.c
112 #undef PLAT_x86_darwin
113 #undef PLAT_amd64_darwin
114 #undef PLAT_x86_win32
115 #undef PLAT_amd64_win64
116 #undef PLAT_x86_linux
117 #undef PLAT_amd64_linux
118 #undef PLAT_ppc32_linux
119 #undef PLAT_ppc64_linux
120 #undef PLAT_arm_linux
121 #undef PLAT_arm64_linux
122 #undef PLAT_s390x_linux
123 #undef PLAT_mips32_linux
124 #undef PLAT_mips64_linux
127 #if defined(__APPLE__) && defined(__i386__)
128 # define PLAT_x86_darwin 1
129 #elif defined(__APPLE__) && defined(__x86_64__)
130 # define PLAT_amd64_darwin 1
131 #elif (defined(__MINGW32__) && !defined(__MINGW64__)) \
132 || defined(__CYGWIN32__) \
133 || (defined(_WIN32) && defined(_M_IX86))
134 # define PLAT_x86_win32 1
135 #elif defined(__MINGW64__) \
136 || (defined(_WIN64) && defined(_M_X64))
137 # define PLAT_amd64_win64 1
138 #elif defined(__linux__) && defined(__i386__)
139 # define PLAT_x86_linux 1
140 #elif defined(__linux__) && defined(__x86_64__)
141 # define PLAT_amd64_linux 1
142 #elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
143 # define PLAT_ppc32_linux 1
144 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__)
145 # define PLAT_ppc64_linux 1
146 #elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
147 # define PLAT_arm_linux 1
148 #elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
149 # define PLAT_arm64_linux 1
150 #elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
151 # define PLAT_s390x_linux 1
152 #elif defined(__linux__) && defined(__mips__) && (__mips==64)
153 # define PLAT_mips64_linux 1
154 #elif defined(__linux__) && defined(__mips__) && (__mips!=64)
155 # define PLAT_mips32_linux 1
156 #else
157 /* If we're not compiling for our target platform, don't generate
158 any inline asms. */
159 # if !defined(NVALGRIND)
160 # define NVALGRIND 1
161 # endif
162 #endif
164 /* XXX: Unfortunately x64 Visual C++ does not suport inline asms,
165 * so disable the use of valgrind's inline asm's for x64 Visual C++
166 * builds, so that x64 Visual C++ builds of GLib can be maintained
168 #if defined (PLAT_amd64_win64) && defined (_MSC_VER)
169 # if !defined(NVALGRIND)
170 # define NVALGRIND 1
171 # endif
172 #endif
175 /* ------------------------------------------------------------------ */
176 /* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
177 /* in here of use to end-users -- skip to the next section. */
178 /* ------------------------------------------------------------------ */
181 * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
182 * request. Accepts both pointers and integers as arguments.
184 * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
185 * client request that does not return a value.
187 * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
188 * client request and whose value equals the client request result. Accepts
189 * both pointers and integers as arguments. Note that such calls are not
190 * necessarily pure functions -- they may have side effects.
193 #define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
194 _zzq_request, _zzq_arg1, _zzq_arg2, \
195 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
196 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
197 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
198 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
200 #define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
201 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
202 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
203 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
204 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
206 #if defined(NVALGRIND)
208 /* Define NVALGRIND to completely remove the Valgrind magic sequence
209 from the compiled code (analogous to NDEBUG's effects on
210 assert()) */
211 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
212 _zzq_default, _zzq_request, \
213 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
214 (_zzq_default)
216 #else /* ! NVALGRIND */
218 /* The following defines the magic code sequences which the JITter
219 spots and handles magically. Don't look too closely at them as
220 they will rot your brain.
222 The assembly code sequences for all architectures is in this one
223 file. This is because this file must be stand-alone, and we don't
224 want to have multiple files.
226 For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
227 value gets put in the return slot, so that everything works when
228 this is executed not under Valgrind. Args are passed in a memory
229 block, and so there's no intrinsic limit to the number that could
230 be passed, but it's currently five.
232 The macro args are:
233 _zzq_rlval result lvalue
234 _zzq_default default value (result returned when running on real CPU)
235 _zzq_request request code
236 _zzq_arg1..5 request params
238 The other two macros are used to support function wrapping, and are
239 a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
240 guest's NRADDR pseudo-register and whatever other information is
241 needed to safely run the call original from the wrapper: on
242 ppc64-linux, the R2 value at the divert point is also needed. This
243 information is abstracted into a user-visible type, OrigFn.
245 VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
246 guest, but guarantees that the branch instruction will not be
247 redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
248 branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
249 complete inline asm, since it needs to be combined with more magic
250 inline asm stuff to be useful.
253 /* ------------------------- x86-{linux,darwin} ---------------- */
255 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
256 || (defined(PLAT_x86_win32) && defined(__GNUC__))
258 typedef
259 struct {
260 unsigned int nraddr; /* where's the code? */
262 OrigFn;
264 #define __SPECIAL_INSTRUCTION_PREAMBLE \
265 "roll $3, %%edi ; roll $13, %%edi\n\t" \
266 "roll $29, %%edi ; roll $19, %%edi\n\t"
268 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
269 _zzq_default, _zzq_request, \
270 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
271 __extension__ \
272 ({volatile unsigned int _zzq_args[6]; \
273 volatile unsigned int _zzq_result; \
274 _zzq_args[0] = (unsigned int)(_zzq_request); \
275 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
276 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
277 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
278 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
279 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
280 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
281 /* %EDX = client_request ( %EAX ) */ \
282 "xchgl %%ebx,%%ebx" \
283 : "=d" (_zzq_result) \
284 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
285 : "cc", "memory" \
286 ); \
287 _zzq_result; \
290 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
291 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
292 volatile unsigned int __addr; \
293 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
294 /* %EAX = guest_NRADDR */ \
295 "xchgl %%ecx,%%ecx" \
296 : "=a" (__addr) \
298 : "cc", "memory" \
299 ); \
300 _zzq_orig->nraddr = __addr; \
303 #define VALGRIND_CALL_NOREDIR_EAX \
304 __SPECIAL_INSTRUCTION_PREAMBLE \
305 /* call-noredir *%EAX */ \
306 "xchgl %%edx,%%edx\n\t"
308 #define VALGRIND_VEX_INJECT_IR() \
309 do { \
310 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
311 "xchgl %%edi,%%edi\n\t" \
312 : : : "cc", "memory" \
313 ); \
314 } while (0)
316 #endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__) */
318 /* ------------------------- x86-Win32 ------------------------- */
320 #if defined(PLAT_x86_win32) && !defined(__GNUC__)
322 typedef
323 struct {
324 unsigned int nraddr; /* where's the code? */
326 OrigFn;
328 #if defined(_MSC_VER)
330 #define __SPECIAL_INSTRUCTION_PREAMBLE \
331 __asm rol edi, 3 __asm rol edi, 13 \
332 __asm rol edi, 29 __asm rol edi, 19
334 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
335 _zzq_default, _zzq_request, \
336 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
337 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
338 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
339 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
340 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
342 static __inline uintptr_t
343 valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
344 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
345 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
346 uintptr_t _zzq_arg5)
348 volatile uintptr_t _zzq_args[6];
349 volatile unsigned int _zzq_result;
350 _zzq_args[0] = (uintptr_t)(_zzq_request);
351 _zzq_args[1] = (uintptr_t)(_zzq_arg1);
352 _zzq_args[2] = (uintptr_t)(_zzq_arg2);
353 _zzq_args[3] = (uintptr_t)(_zzq_arg3);
354 _zzq_args[4] = (uintptr_t)(_zzq_arg4);
355 _zzq_args[5] = (uintptr_t)(_zzq_arg5);
356 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
357 __SPECIAL_INSTRUCTION_PREAMBLE
358 /* %EDX = client_request ( %EAX ) */
359 __asm xchg ebx,ebx
360 __asm mov _zzq_result, edx
362 return _zzq_result;
365 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
366 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
367 volatile unsigned int __addr; \
368 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
369 /* %EAX = guest_NRADDR */ \
370 __asm xchg ecx,ecx \
371 __asm mov __addr, eax \
373 _zzq_orig->nraddr = __addr; \
376 #define VALGRIND_CALL_NOREDIR_EAX ERROR
378 #define VALGRIND_VEX_INJECT_IR() \
379 do { \
380 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
381 __asm xchg edi,edi \
383 } while (0)
385 #else
386 #error Unsupported compiler.
387 #endif
389 #endif /* PLAT_x86_win32 */
391 /* ------------------------ amd64-{linux,darwin} --------------- */
393 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
394 || (defined(PLAT_amd64_win64) && defined(__GNUC__))
396 typedef
397 struct {
398 unsigned long long int nraddr; /* where's the code? */
400 OrigFn;
402 #define __SPECIAL_INSTRUCTION_PREAMBLE \
403 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
404 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
406 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
407 _zzq_default, _zzq_request, \
408 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
409 __extension__ \
410 ({ volatile unsigned long long int _zzq_args[6]; \
411 volatile unsigned long long int _zzq_result; \
412 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
413 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
414 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
415 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
416 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
417 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
418 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
419 /* %RDX = client_request ( %RAX ) */ \
420 "xchgq %%rbx,%%rbx" \
421 : "=d" (_zzq_result) \
422 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
423 : "cc", "memory" \
424 ); \
425 _zzq_result; \
428 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
429 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
430 volatile unsigned long long int __addr; \
431 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
432 /* %RAX = guest_NRADDR */ \
433 "xchgq %%rcx,%%rcx" \
434 : "=a" (__addr) \
436 : "cc", "memory" \
437 ); \
438 _zzq_orig->nraddr = __addr; \
441 #define VALGRIND_CALL_NOREDIR_RAX \
442 __SPECIAL_INSTRUCTION_PREAMBLE \
443 /* call-noredir *%RAX */ \
444 "xchgq %%rdx,%%rdx\n\t"
446 #define VALGRIND_VEX_INJECT_IR() \
447 do { \
448 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
449 "xchgq %%rdi,%%rdi\n\t" \
450 : : : "cc", "memory" \
451 ); \
452 } while (0)
454 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
456 /* ------------------------- amd64-Win64 ------------------------- */
458 #if defined(PLAT_amd64_win64) && !defined(__GNUC__)
460 #error Unsupported compiler.
462 #endif /* PLAT_amd64_win64 */
464 /* ------------------------ ppc32-linux ------------------------ */
466 #if defined(PLAT_ppc32_linux)
468 typedef
469 struct {
470 unsigned int nraddr; /* where's the code? */
472 OrigFn;
474 #define __SPECIAL_INSTRUCTION_PREAMBLE \
475 "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
476 "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
478 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
479 _zzq_default, _zzq_request, \
480 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
482 __extension__ \
483 ({ unsigned int _zzq_args[6]; \
484 unsigned int _zzq_result; \
485 unsigned int* _zzq_ptr; \
486 _zzq_args[0] = (unsigned int)(_zzq_request); \
487 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
488 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
489 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
490 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
491 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
492 _zzq_ptr = _zzq_args; \
493 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
494 "mr 4,%2\n\t" /*ptr*/ \
495 __SPECIAL_INSTRUCTION_PREAMBLE \
496 /* %R3 = client_request ( %R4 ) */ \
497 "or 1,1,1\n\t" \
498 "mr %0,3" /*result*/ \
499 : "=b" (_zzq_result) \
500 : "b" (_zzq_default), "b" (_zzq_ptr) \
501 : "cc", "memory", "r3", "r4"); \
502 _zzq_result; \
505 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
506 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
507 unsigned int __addr; \
508 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
509 /* %R3 = guest_NRADDR */ \
510 "or 2,2,2\n\t" \
511 "mr %0,3" \
512 : "=b" (__addr) \
514 : "cc", "memory", "r3" \
515 ); \
516 _zzq_orig->nraddr = __addr; \
519 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
520 __SPECIAL_INSTRUCTION_PREAMBLE \
521 /* branch-and-link-to-noredir *%R11 */ \
522 "or 3,3,3\n\t"
524 #define VALGRIND_VEX_INJECT_IR() \
525 do { \
526 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
527 "or 5,5,5\n\t" \
528 ); \
529 } while (0)
531 #endif /* PLAT_ppc32_linux */
533 /* ------------------------ ppc64-linux ------------------------ */
535 #if defined(PLAT_ppc64_linux)
537 typedef
538 struct {
539 unsigned long long int nraddr; /* where's the code? */
540 unsigned long long int r2; /* what tocptr do we need? */
542 OrigFn;
544 #define __SPECIAL_INSTRUCTION_PREAMBLE \
545 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
546 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
548 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
549 _zzq_default, _zzq_request, \
550 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
552 __extension__ \
553 ({ unsigned long long int _zzq_args[6]; \
554 unsigned long long int _zzq_result; \
555 unsigned long long int* _zzq_ptr; \
556 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
557 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
558 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
559 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
560 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
561 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
562 _zzq_ptr = _zzq_args; \
563 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
564 "mr 4,%2\n\t" /*ptr*/ \
565 __SPECIAL_INSTRUCTION_PREAMBLE \
566 /* %R3 = client_request ( %R4 ) */ \
567 "or 1,1,1\n\t" \
568 "mr %0,3" /*result*/ \
569 : "=b" (_zzq_result) \
570 : "b" (_zzq_default), "b" (_zzq_ptr) \
571 : "cc", "memory", "r3", "r4"); \
572 _zzq_result; \
575 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
576 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
577 unsigned long long int __addr; \
578 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
579 /* %R3 = guest_NRADDR */ \
580 "or 2,2,2\n\t" \
581 "mr %0,3" \
582 : "=b" (__addr) \
584 : "cc", "memory", "r3" \
585 ); \
586 _zzq_orig->nraddr = __addr; \
587 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
588 /* %R3 = guest_NRADDR_GPR2 */ \
589 "or 4,4,4\n\t" \
590 "mr %0,3" \
591 : "=b" (__addr) \
593 : "cc", "memory", "r3" \
594 ); \
595 _zzq_orig->r2 = __addr; \
598 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
599 __SPECIAL_INSTRUCTION_PREAMBLE \
600 /* branch-and-link-to-noredir *%R11 */ \
601 "or 3,3,3\n\t"
603 #define VALGRIND_VEX_INJECT_IR() \
604 do { \
605 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
606 "or 5,5,5\n\t" \
607 ); \
608 } while (0)
610 #endif /* PLAT_ppc64_linux */
612 /* ------------------------- arm-linux ------------------------- */
614 #if defined(PLAT_arm_linux)
616 typedef
617 struct {
618 unsigned int nraddr; /* where's the code? */
620 OrigFn;
622 #define __SPECIAL_INSTRUCTION_PREAMBLE \
623 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
624 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
626 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
627 _zzq_default, _zzq_request, \
628 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
630 __extension__ \
631 ({volatile unsigned int _zzq_args[6]; \
632 volatile unsigned int _zzq_result; \
633 _zzq_args[0] = (unsigned int)(_zzq_request); \
634 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
635 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
636 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
637 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
638 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
639 __asm__ volatile("mov r3, %1\n\t" /*default*/ \
640 "mov r4, %2\n\t" /*ptr*/ \
641 __SPECIAL_INSTRUCTION_PREAMBLE \
642 /* R3 = client_request ( R4 ) */ \
643 "orr r10, r10, r10\n\t" \
644 "mov %0, r3" /*result*/ \
645 : "=r" (_zzq_result) \
646 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
647 : "cc","memory", "r3", "r4"); \
648 _zzq_result; \
651 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
652 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
653 unsigned int __addr; \
654 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
655 /* R3 = guest_NRADDR */ \
656 "orr r11, r11, r11\n\t" \
657 "mov %0, r3" \
658 : "=r" (__addr) \
660 : "cc", "memory", "r3" \
661 ); \
662 _zzq_orig->nraddr = __addr; \
665 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
666 __SPECIAL_INSTRUCTION_PREAMBLE \
667 /* branch-and-link-to-noredir *%R4 */ \
668 "orr r12, r12, r12\n\t"
670 #define VALGRIND_VEX_INJECT_IR() \
671 do { \
672 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
673 "orr r9, r9, r9\n\t" \
674 : : : "cc", "memory" \
675 ); \
676 } while (0)
678 #endif /* PLAT_arm_linux */
680 /* ------------------------ arm64-linux ------------------------- */
682 #if defined(PLAT_arm64_linux)
684 typedef
685 struct {
686 unsigned long long int nraddr; /* where's the code? */
688 OrigFn;
690 #define __SPECIAL_INSTRUCTION_PREAMBLE \
691 "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
692 "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
694 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
695 _zzq_default, _zzq_request, \
696 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
698 __extension__ \
699 ({volatile unsigned long long int _zzq_args[6]; \
700 volatile unsigned long long int _zzq_result; \
701 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
702 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
703 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
704 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
705 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
706 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
707 __asm__ volatile("mov x3, %1\n\t" /*default*/ \
708 "mov x4, %2\n\t" /*ptr*/ \
709 __SPECIAL_INSTRUCTION_PREAMBLE \
710 /* X3 = client_request ( X4 ) */ \
711 "orr x10, x10, x10\n\t" \
712 "mov %0, x3" /*result*/ \
713 : "=r" (_zzq_result) \
714 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
715 : "cc","memory", "x3", "x4"); \
716 _zzq_result; \
719 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
720 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
721 unsigned long long int __addr; \
722 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
723 /* X3 = guest_NRADDR */ \
724 "orr x11, x11, x11\n\t" \
725 "mov %0, x3" \
726 : "=r" (__addr) \
728 : "cc", "memory", "x3" \
729 ); \
730 _zzq_orig->nraddr = __addr; \
733 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
734 __SPECIAL_INSTRUCTION_PREAMBLE \
735 /* branch-and-link-to-noredir X8 */ \
736 "orr x12, x12, x12\n\t"
738 #define VALGRIND_VEX_INJECT_IR() \
739 do { \
740 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
741 "orr x9, x9, x9\n\t" \
742 : : : "cc", "memory" \
743 ); \
744 } while (0)
746 #endif /* PLAT_arm64_linux */
748 /* ------------------------ s390x-linux ------------------------ */
750 #if defined(PLAT_s390x_linux)
752 typedef
753 struct {
754 unsigned long long int nraddr; /* where's the code? */
756 OrigFn;
758 /* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
759 * code. This detection is implemented in platform specific toIR.c
760 * (e.g. VEX/priv/guest_s390_decoder.c).
762 #define __SPECIAL_INSTRUCTION_PREAMBLE \
763 "lr 15,15\n\t" \
764 "lr 1,1\n\t" \
765 "lr 2,2\n\t" \
766 "lr 3,3\n\t"
768 #define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
769 #define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
770 #define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
771 #define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
773 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
774 _zzq_default, _zzq_request, \
775 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
776 __extension__ \
777 ({volatile unsigned long long int _zzq_args[6]; \
778 volatile unsigned long long int _zzq_result; \
779 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
780 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
781 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
782 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
783 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
784 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
785 __asm__ volatile(/* r2 = args */ \
786 "lgr 2,%1\n\t" \
787 /* r3 = default */ \
788 "lgr 3,%2\n\t" \
789 __SPECIAL_INSTRUCTION_PREAMBLE \
790 __CLIENT_REQUEST_CODE \
791 /* results = r3 */ \
792 "lgr %0, 3\n\t" \
793 : "=d" (_zzq_result) \
794 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
795 : "cc", "2", "3", "memory" \
796 ); \
797 _zzq_result; \
800 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
801 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
802 volatile unsigned long long int __addr; \
803 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
804 __GET_NR_CONTEXT_CODE \
805 "lgr %0, 3\n\t" \
806 : "=a" (__addr) \
808 : "cc", "3", "memory" \
809 ); \
810 _zzq_orig->nraddr = __addr; \
813 #define VALGRIND_CALL_NOREDIR_R1 \
814 __SPECIAL_INSTRUCTION_PREAMBLE \
815 __CALL_NO_REDIR_CODE
817 #define VALGRIND_VEX_INJECT_IR() \
818 do { \
819 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
820 __VEX_INJECT_IR_CODE); \
821 } while (0)
823 #endif /* PLAT_s390x_linux */
825 /* ------------------------- mips32-linux ---------------- */
827 #if defined(PLAT_mips32_linux)
829 typedef
830 struct {
831 unsigned int nraddr; /* where's the code? */
833 OrigFn;
835 /* .word 0x342
836 * .word 0x742
837 * .word 0xC2
838 * .word 0x4C2*/
839 #define __SPECIAL_INSTRUCTION_PREAMBLE \
840 "srl $0, $0, 13\n\t" \
841 "srl $0, $0, 29\n\t" \
842 "srl $0, $0, 3\n\t" \
843 "srl $0, $0, 19\n\t"
845 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
846 _zzq_default, _zzq_request, \
847 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
848 __extension__ \
849 ({ volatile unsigned int _zzq_args[6]; \
850 volatile unsigned int _zzq_result; \
851 _zzq_args[0] = (unsigned int)(_zzq_request); \
852 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
853 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
854 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
855 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
856 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
857 __asm__ volatile("move $11, %1\n\t" /*default*/ \
858 "move $12, %2\n\t" /*ptr*/ \
859 __SPECIAL_INSTRUCTION_PREAMBLE \
860 /* T3 = client_request ( T4 ) */ \
861 "or $13, $13, $13\n\t" \
862 "move %0, $11\n\t" /*result*/ \
863 : "=r" (_zzq_result) \
864 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
865 : "$11", "$12"); \
866 _zzq_result; \
869 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
870 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
871 volatile unsigned int __addr; \
872 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
873 /* %t9 = guest_NRADDR */ \
874 "or $14, $14, $14\n\t" \
875 "move %0, $11" /*result*/ \
876 : "=r" (__addr) \
878 : "$11" \
879 ); \
880 _zzq_orig->nraddr = __addr; \
883 #define VALGRIND_CALL_NOREDIR_T9 \
884 __SPECIAL_INSTRUCTION_PREAMBLE \
885 /* call-noredir *%t9 */ \
886 "or $15, $15, $15\n\t"
888 #define VALGRIND_VEX_INJECT_IR() \
889 do { \
890 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
891 "or $11, $11, $11\n\t" \
892 ); \
893 } while (0)
896 #endif /* PLAT_mips32_linux */
898 /* ------------------------- mips64-linux ---------------- */
900 #if defined(PLAT_mips64_linux)
902 typedef
903 struct {
904 unsigned long long nraddr; /* where's the code? */
906 OrigFn;
908 /* dsll $0,$0, 3
909 * dsll $0,$0, 13
910 * dsll $0,$0, 29
911 * dsll $0,$0, 19*/
912 #define __SPECIAL_INSTRUCTION_PREAMBLE \
913 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
914 "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
916 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
917 _zzq_default, _zzq_request, \
918 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
919 __extension__ \
920 ({ volatile unsigned long long int _zzq_args[6]; \
921 volatile unsigned long long int _zzq_result; \
922 _zzq_args[0] = (unsigned long long int)(_zzq_request); \
923 _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
924 _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
925 _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
926 _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
927 _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
928 __asm__ volatile("move $11, %1\n\t" /*default*/ \
929 "move $12, %2\n\t" /*ptr*/ \
930 __SPECIAL_INSTRUCTION_PREAMBLE \
931 /* $11 = client_request ( $12 ) */ \
932 "or $13, $13, $13\n\t" \
933 "move %0, $11\n\t" /*result*/ \
934 : "=r" (_zzq_result) \
935 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
936 : "$11", "$12"); \
937 _zzq_result; \
940 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
941 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
942 volatile unsigned long long int __addr; \
943 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
944 /* $11 = guest_NRADDR */ \
945 "or $14, $14, $14\n\t" \
946 "move %0, $11" /*result*/ \
947 : "=r" (__addr) \
949 : "$11"); \
950 _zzq_orig->nraddr = __addr; \
953 #define VALGRIND_CALL_NOREDIR_T9 \
954 __SPECIAL_INSTRUCTION_PREAMBLE \
955 /* call-noredir $25 */ \
956 "or $15, $15, $15\n\t"
958 #define VALGRIND_VEX_INJECT_IR() \
959 do { \
960 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
961 "or $11, $11, $11\n\t" \
962 ); \
963 } while (0)
965 #endif /* PLAT_mips64_linux */
967 /* Insert assembly code for other platforms here... */
969 #endif /* NVALGRIND */
972 /* ------------------------------------------------------------------ */
973 /* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
974 /* ugly. It's the least-worst tradeoff I can think of. */
975 /* ------------------------------------------------------------------ */
977 /* This section defines magic (a.k.a appalling-hack) macros for doing
978 guaranteed-no-redirection macros, so as to get from function
979 wrappers to the functions they are wrapping. The whole point is to
980 construct standard call sequences, but to do the call itself with a
981 special no-redirect call pseudo-instruction that the JIT
982 understands and handles specially. This section is long and
983 repetitious, and I can't see a way to make it shorter.
985 The naming scheme is as follows:
987 CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
989 'W' stands for "word" and 'v' for "void". Hence there are
990 different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
991 and for each, the possibility of returning a word-typed result, or
992 no result.
995 /* Use these to write the name of your wrapper. NOTE: duplicates
996 VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
997 the default behaviour equivalance class tag "0000" into the name.
998 See pub_tool_redir.h for details -- normally you don't need to
999 think about this, though. */
1001 /* Use an extra level of macroisation so as to ensure the soname/fnname
1002 args are fully macro-expanded before pasting them together. */
1003 #define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
1005 #define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
1006 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
1008 #define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
1009 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
1011 /* Use this macro from within a wrapper function to collect the
1012 context (address and possibly other info) of the original function.
1013 Once you have that you can then use it in one of the CALL_FN_
1014 macros. The type of the argument _lval is OrigFn. */
1015 #define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
1017 /* Also provide end-user facilities for function replacement, rather
1018 than wrapping. A replacement function differs from a wrapper in
1019 that it has no way to get hold of the original function being
1020 called, and hence no way to call onwards to it. In a replacement
1021 function, VALGRIND_GET_ORIG_FN always returns zero. */
1023 #define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1024 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1026 #define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1027 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1029 /* Derivatives of the main macros below, for calling functions
1030 returning void. */
1032 #define CALL_FN_v_v(fnptr) \
1033 do { volatile unsigned long _junk; \
1034 CALL_FN_W_v(_junk,fnptr); } while (0)
1036 #define CALL_FN_v_W(fnptr, arg1) \
1037 do { volatile unsigned long _junk; \
1038 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1040 #define CALL_FN_v_WW(fnptr, arg1,arg2) \
1041 do { volatile unsigned long _junk; \
1042 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1044 #define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1045 do { volatile unsigned long _junk; \
1046 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1048 #define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1049 do { volatile unsigned long _junk; \
1050 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1052 #define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1053 do { volatile unsigned long _junk; \
1054 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1056 #define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1057 do { volatile unsigned long _junk; \
1058 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1060 #define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1061 do { volatile unsigned long _junk; \
1062 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1064 /* ------------------------- x86-{linux,darwin} ---------------- */
1066 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin)
1068 /* These regs are trashed by the hidden call. No need to mention eax
1069 as gcc can already see that, plus causes gcc to bomb. */
1070 #define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
1072 /* Macros to save and align the stack before making a function
1073 call and restore it afterwards as gcc may not keep the stack
1074 pointer aligned if it doesn't realise calls are being made
1075 to other functions. */
1077 #define VALGRIND_ALIGN_STACK \
1078 "movl %%esp,%%edi\n\t" \
1079 "andl $0xfffffff0,%%esp\n\t"
1080 #define VALGRIND_RESTORE_STACK \
1081 "movl %%edi,%%esp\n\t"
1083 /* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
1084 long) == 4. */
1086 #define CALL_FN_W_v(lval, orig) \
1087 do { \
1088 volatile OrigFn _orig = (orig); \
1089 volatile unsigned long _argvec[1]; \
1090 volatile unsigned long _res; \
1091 _argvec[0] = (unsigned long)_orig.nraddr; \
1092 __asm__ volatile( \
1093 VALGRIND_ALIGN_STACK \
1094 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1095 VALGRIND_CALL_NOREDIR_EAX \
1096 VALGRIND_RESTORE_STACK \
1097 : /*out*/ "=a" (_res) \
1098 : /*in*/ "a" (&_argvec[0]) \
1099 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1100 ); \
1101 lval = (__typeof__(lval)) _res; \
1102 } while (0)
1104 #define CALL_FN_W_W(lval, orig, arg1) \
1105 do { \
1106 volatile OrigFn _orig = (orig); \
1107 volatile unsigned long _argvec[2]; \
1108 volatile unsigned long _res; \
1109 _argvec[0] = (unsigned long)_orig.nraddr; \
1110 _argvec[1] = (unsigned long)(arg1); \
1111 __asm__ volatile( \
1112 VALGRIND_ALIGN_STACK \
1113 "subl $12, %%esp\n\t" \
1114 "pushl 4(%%eax)\n\t" \
1115 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1116 VALGRIND_CALL_NOREDIR_EAX \
1117 VALGRIND_RESTORE_STACK \
1118 : /*out*/ "=a" (_res) \
1119 : /*in*/ "a" (&_argvec[0]) \
1120 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1121 ); \
1122 lval = (__typeof__(lval)) _res; \
1123 } while (0)
1125 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1126 do { \
1127 volatile OrigFn _orig = (orig); \
1128 volatile unsigned long _argvec[3]; \
1129 volatile unsigned long _res; \
1130 _argvec[0] = (unsigned long)_orig.nraddr; \
1131 _argvec[1] = (unsigned long)(arg1); \
1132 _argvec[2] = (unsigned long)(arg2); \
1133 __asm__ volatile( \
1134 VALGRIND_ALIGN_STACK \
1135 "subl $8, %%esp\n\t" \
1136 "pushl 8(%%eax)\n\t" \
1137 "pushl 4(%%eax)\n\t" \
1138 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1139 VALGRIND_CALL_NOREDIR_EAX \
1140 VALGRIND_RESTORE_STACK \
1141 : /*out*/ "=a" (_res) \
1142 : /*in*/ "a" (&_argvec[0]) \
1143 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1144 ); \
1145 lval = (__typeof__(lval)) _res; \
1146 } while (0)
1148 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1149 do { \
1150 volatile OrigFn _orig = (orig); \
1151 volatile unsigned long _argvec[4]; \
1152 volatile unsigned long _res; \
1153 _argvec[0] = (unsigned long)_orig.nraddr; \
1154 _argvec[1] = (unsigned long)(arg1); \
1155 _argvec[2] = (unsigned long)(arg2); \
1156 _argvec[3] = (unsigned long)(arg3); \
1157 __asm__ volatile( \
1158 VALGRIND_ALIGN_STACK \
1159 "subl $4, %%esp\n\t" \
1160 "pushl 12(%%eax)\n\t" \
1161 "pushl 8(%%eax)\n\t" \
1162 "pushl 4(%%eax)\n\t" \
1163 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1164 VALGRIND_CALL_NOREDIR_EAX \
1165 VALGRIND_RESTORE_STACK \
1166 : /*out*/ "=a" (_res) \
1167 : /*in*/ "a" (&_argvec[0]) \
1168 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1169 ); \
1170 lval = (__typeof__(lval)) _res; \
1171 } while (0)
1173 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1174 do { \
1175 volatile OrigFn _orig = (orig); \
1176 volatile unsigned long _argvec[5]; \
1177 volatile unsigned long _res; \
1178 _argvec[0] = (unsigned long)_orig.nraddr; \
1179 _argvec[1] = (unsigned long)(arg1); \
1180 _argvec[2] = (unsigned long)(arg2); \
1181 _argvec[3] = (unsigned long)(arg3); \
1182 _argvec[4] = (unsigned long)(arg4); \
1183 __asm__ volatile( \
1184 VALGRIND_ALIGN_STACK \
1185 "pushl 16(%%eax)\n\t" \
1186 "pushl 12(%%eax)\n\t" \
1187 "pushl 8(%%eax)\n\t" \
1188 "pushl 4(%%eax)\n\t" \
1189 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1190 VALGRIND_CALL_NOREDIR_EAX \
1191 VALGRIND_RESTORE_STACK \
1192 : /*out*/ "=a" (_res) \
1193 : /*in*/ "a" (&_argvec[0]) \
1194 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1195 ); \
1196 lval = (__typeof__(lval)) _res; \
1197 } while (0)
1199 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1200 do { \
1201 volatile OrigFn _orig = (orig); \
1202 volatile unsigned long _argvec[6]; \
1203 volatile unsigned long _res; \
1204 _argvec[0] = (unsigned long)_orig.nraddr; \
1205 _argvec[1] = (unsigned long)(arg1); \
1206 _argvec[2] = (unsigned long)(arg2); \
1207 _argvec[3] = (unsigned long)(arg3); \
1208 _argvec[4] = (unsigned long)(arg4); \
1209 _argvec[5] = (unsigned long)(arg5); \
1210 __asm__ volatile( \
1211 VALGRIND_ALIGN_STACK \
1212 "subl $12, %%esp\n\t" \
1213 "pushl 20(%%eax)\n\t" \
1214 "pushl 16(%%eax)\n\t" \
1215 "pushl 12(%%eax)\n\t" \
1216 "pushl 8(%%eax)\n\t" \
1217 "pushl 4(%%eax)\n\t" \
1218 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1219 VALGRIND_CALL_NOREDIR_EAX \
1220 VALGRIND_RESTORE_STACK \
1221 : /*out*/ "=a" (_res) \
1222 : /*in*/ "a" (&_argvec[0]) \
1223 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1224 ); \
1225 lval = (__typeof__(lval)) _res; \
1226 } while (0)
1228 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1229 do { \
1230 volatile OrigFn _orig = (orig); \
1231 volatile unsigned long _argvec[7]; \
1232 volatile unsigned long _res; \
1233 _argvec[0] = (unsigned long)_orig.nraddr; \
1234 _argvec[1] = (unsigned long)(arg1); \
1235 _argvec[2] = (unsigned long)(arg2); \
1236 _argvec[3] = (unsigned long)(arg3); \
1237 _argvec[4] = (unsigned long)(arg4); \
1238 _argvec[5] = (unsigned long)(arg5); \
1239 _argvec[6] = (unsigned long)(arg6); \
1240 __asm__ volatile( \
1241 VALGRIND_ALIGN_STACK \
1242 "subl $8, %%esp\n\t" \
1243 "pushl 24(%%eax)\n\t" \
1244 "pushl 20(%%eax)\n\t" \
1245 "pushl 16(%%eax)\n\t" \
1246 "pushl 12(%%eax)\n\t" \
1247 "pushl 8(%%eax)\n\t" \
1248 "pushl 4(%%eax)\n\t" \
1249 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1250 VALGRIND_CALL_NOREDIR_EAX \
1251 VALGRIND_RESTORE_STACK \
1252 : /*out*/ "=a" (_res) \
1253 : /*in*/ "a" (&_argvec[0]) \
1254 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1255 ); \
1256 lval = (__typeof__(lval)) _res; \
1257 } while (0)
1259 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1260 arg7) \
1261 do { \
1262 volatile OrigFn _orig = (orig); \
1263 volatile unsigned long _argvec[8]; \
1264 volatile unsigned long _res; \
1265 _argvec[0] = (unsigned long)_orig.nraddr; \
1266 _argvec[1] = (unsigned long)(arg1); \
1267 _argvec[2] = (unsigned long)(arg2); \
1268 _argvec[3] = (unsigned long)(arg3); \
1269 _argvec[4] = (unsigned long)(arg4); \
1270 _argvec[5] = (unsigned long)(arg5); \
1271 _argvec[6] = (unsigned long)(arg6); \
1272 _argvec[7] = (unsigned long)(arg7); \
1273 __asm__ volatile( \
1274 VALGRIND_ALIGN_STACK \
1275 "subl $4, %%esp\n\t" \
1276 "pushl 28(%%eax)\n\t" \
1277 "pushl 24(%%eax)\n\t" \
1278 "pushl 20(%%eax)\n\t" \
1279 "pushl 16(%%eax)\n\t" \
1280 "pushl 12(%%eax)\n\t" \
1281 "pushl 8(%%eax)\n\t" \
1282 "pushl 4(%%eax)\n\t" \
1283 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1284 VALGRIND_CALL_NOREDIR_EAX \
1285 VALGRIND_RESTORE_STACK \
1286 : /*out*/ "=a" (_res) \
1287 : /*in*/ "a" (&_argvec[0]) \
1288 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1289 ); \
1290 lval = (__typeof__(lval)) _res; \
1291 } while (0)
1293 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1294 arg7,arg8) \
1295 do { \
1296 volatile OrigFn _orig = (orig); \
1297 volatile unsigned long _argvec[9]; \
1298 volatile unsigned long _res; \
1299 _argvec[0] = (unsigned long)_orig.nraddr; \
1300 _argvec[1] = (unsigned long)(arg1); \
1301 _argvec[2] = (unsigned long)(arg2); \
1302 _argvec[3] = (unsigned long)(arg3); \
1303 _argvec[4] = (unsigned long)(arg4); \
1304 _argvec[5] = (unsigned long)(arg5); \
1305 _argvec[6] = (unsigned long)(arg6); \
1306 _argvec[7] = (unsigned long)(arg7); \
1307 _argvec[8] = (unsigned long)(arg8); \
1308 __asm__ volatile( \
1309 VALGRIND_ALIGN_STACK \
1310 "pushl 32(%%eax)\n\t" \
1311 "pushl 28(%%eax)\n\t" \
1312 "pushl 24(%%eax)\n\t" \
1313 "pushl 20(%%eax)\n\t" \
1314 "pushl 16(%%eax)\n\t" \
1315 "pushl 12(%%eax)\n\t" \
1316 "pushl 8(%%eax)\n\t" \
1317 "pushl 4(%%eax)\n\t" \
1318 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1319 VALGRIND_CALL_NOREDIR_EAX \
1320 VALGRIND_RESTORE_STACK \
1321 : /*out*/ "=a" (_res) \
1322 : /*in*/ "a" (&_argvec[0]) \
1323 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1324 ); \
1325 lval = (__typeof__(lval)) _res; \
1326 } while (0)
1328 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1329 arg7,arg8,arg9) \
1330 do { \
1331 volatile OrigFn _orig = (orig); \
1332 volatile unsigned long _argvec[10]; \
1333 volatile unsigned long _res; \
1334 _argvec[0] = (unsigned long)_orig.nraddr; \
1335 _argvec[1] = (unsigned long)(arg1); \
1336 _argvec[2] = (unsigned long)(arg2); \
1337 _argvec[3] = (unsigned long)(arg3); \
1338 _argvec[4] = (unsigned long)(arg4); \
1339 _argvec[5] = (unsigned long)(arg5); \
1340 _argvec[6] = (unsigned long)(arg6); \
1341 _argvec[7] = (unsigned long)(arg7); \
1342 _argvec[8] = (unsigned long)(arg8); \
1343 _argvec[9] = (unsigned long)(arg9); \
1344 __asm__ volatile( \
1345 VALGRIND_ALIGN_STACK \
1346 "subl $12, %%esp\n\t" \
1347 "pushl 36(%%eax)\n\t" \
1348 "pushl 32(%%eax)\n\t" \
1349 "pushl 28(%%eax)\n\t" \
1350 "pushl 24(%%eax)\n\t" \
1351 "pushl 20(%%eax)\n\t" \
1352 "pushl 16(%%eax)\n\t" \
1353 "pushl 12(%%eax)\n\t" \
1354 "pushl 8(%%eax)\n\t" \
1355 "pushl 4(%%eax)\n\t" \
1356 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1357 VALGRIND_CALL_NOREDIR_EAX \
1358 VALGRIND_RESTORE_STACK \
1359 : /*out*/ "=a" (_res) \
1360 : /*in*/ "a" (&_argvec[0]) \
1361 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1362 ); \
1363 lval = (__typeof__(lval)) _res; \
1364 } while (0)
1366 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1367 arg7,arg8,arg9,arg10) \
1368 do { \
1369 volatile OrigFn _orig = (orig); \
1370 volatile unsigned long _argvec[11]; \
1371 volatile unsigned long _res; \
1372 _argvec[0] = (unsigned long)_orig.nraddr; \
1373 _argvec[1] = (unsigned long)(arg1); \
1374 _argvec[2] = (unsigned long)(arg2); \
1375 _argvec[3] = (unsigned long)(arg3); \
1376 _argvec[4] = (unsigned long)(arg4); \
1377 _argvec[5] = (unsigned long)(arg5); \
1378 _argvec[6] = (unsigned long)(arg6); \
1379 _argvec[7] = (unsigned long)(arg7); \
1380 _argvec[8] = (unsigned long)(arg8); \
1381 _argvec[9] = (unsigned long)(arg9); \
1382 _argvec[10] = (unsigned long)(arg10); \
1383 __asm__ volatile( \
1384 VALGRIND_ALIGN_STACK \
1385 "subl $8, %%esp\n\t" \
1386 "pushl 40(%%eax)\n\t" \
1387 "pushl 36(%%eax)\n\t" \
1388 "pushl 32(%%eax)\n\t" \
1389 "pushl 28(%%eax)\n\t" \
1390 "pushl 24(%%eax)\n\t" \
1391 "pushl 20(%%eax)\n\t" \
1392 "pushl 16(%%eax)\n\t" \
1393 "pushl 12(%%eax)\n\t" \
1394 "pushl 8(%%eax)\n\t" \
1395 "pushl 4(%%eax)\n\t" \
1396 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1397 VALGRIND_CALL_NOREDIR_EAX \
1398 VALGRIND_RESTORE_STACK \
1399 : /*out*/ "=a" (_res) \
1400 : /*in*/ "a" (&_argvec[0]) \
1401 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1402 ); \
1403 lval = (__typeof__(lval)) _res; \
1404 } while (0)
1406 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1407 arg6,arg7,arg8,arg9,arg10, \
1408 arg11) \
1409 do { \
1410 volatile OrigFn _orig = (orig); \
1411 volatile unsigned long _argvec[12]; \
1412 volatile unsigned long _res; \
1413 _argvec[0] = (unsigned long)_orig.nraddr; \
1414 _argvec[1] = (unsigned long)(arg1); \
1415 _argvec[2] = (unsigned long)(arg2); \
1416 _argvec[3] = (unsigned long)(arg3); \
1417 _argvec[4] = (unsigned long)(arg4); \
1418 _argvec[5] = (unsigned long)(arg5); \
1419 _argvec[6] = (unsigned long)(arg6); \
1420 _argvec[7] = (unsigned long)(arg7); \
1421 _argvec[8] = (unsigned long)(arg8); \
1422 _argvec[9] = (unsigned long)(arg9); \
1423 _argvec[10] = (unsigned long)(arg10); \
1424 _argvec[11] = (unsigned long)(arg11); \
1425 __asm__ volatile( \
1426 VALGRIND_ALIGN_STACK \
1427 "subl $4, %%esp\n\t" \
1428 "pushl 44(%%eax)\n\t" \
1429 "pushl 40(%%eax)\n\t" \
1430 "pushl 36(%%eax)\n\t" \
1431 "pushl 32(%%eax)\n\t" \
1432 "pushl 28(%%eax)\n\t" \
1433 "pushl 24(%%eax)\n\t" \
1434 "pushl 20(%%eax)\n\t" \
1435 "pushl 16(%%eax)\n\t" \
1436 "pushl 12(%%eax)\n\t" \
1437 "pushl 8(%%eax)\n\t" \
1438 "pushl 4(%%eax)\n\t" \
1439 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1440 VALGRIND_CALL_NOREDIR_EAX \
1441 VALGRIND_RESTORE_STACK \
1442 : /*out*/ "=a" (_res) \
1443 : /*in*/ "a" (&_argvec[0]) \
1444 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1445 ); \
1446 lval = (__typeof__(lval)) _res; \
1447 } while (0)
1449 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1450 arg6,arg7,arg8,arg9,arg10, \
1451 arg11,arg12) \
1452 do { \
1453 volatile OrigFn _orig = (orig); \
1454 volatile unsigned long _argvec[13]; \
1455 volatile unsigned long _res; \
1456 _argvec[0] = (unsigned long)_orig.nraddr; \
1457 _argvec[1] = (unsigned long)(arg1); \
1458 _argvec[2] = (unsigned long)(arg2); \
1459 _argvec[3] = (unsigned long)(arg3); \
1460 _argvec[4] = (unsigned long)(arg4); \
1461 _argvec[5] = (unsigned long)(arg5); \
1462 _argvec[6] = (unsigned long)(arg6); \
1463 _argvec[7] = (unsigned long)(arg7); \
1464 _argvec[8] = (unsigned long)(arg8); \
1465 _argvec[9] = (unsigned long)(arg9); \
1466 _argvec[10] = (unsigned long)(arg10); \
1467 _argvec[11] = (unsigned long)(arg11); \
1468 _argvec[12] = (unsigned long)(arg12); \
1469 __asm__ volatile( \
1470 VALGRIND_ALIGN_STACK \
1471 "pushl 48(%%eax)\n\t" \
1472 "pushl 44(%%eax)\n\t" \
1473 "pushl 40(%%eax)\n\t" \
1474 "pushl 36(%%eax)\n\t" \
1475 "pushl 32(%%eax)\n\t" \
1476 "pushl 28(%%eax)\n\t" \
1477 "pushl 24(%%eax)\n\t" \
1478 "pushl 20(%%eax)\n\t" \
1479 "pushl 16(%%eax)\n\t" \
1480 "pushl 12(%%eax)\n\t" \
1481 "pushl 8(%%eax)\n\t" \
1482 "pushl 4(%%eax)\n\t" \
1483 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1484 VALGRIND_CALL_NOREDIR_EAX \
1485 VALGRIND_RESTORE_STACK \
1486 : /*out*/ "=a" (_res) \
1487 : /*in*/ "a" (&_argvec[0]) \
1488 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1489 ); \
1490 lval = (__typeof__(lval)) _res; \
1491 } while (0)
1493 #endif /* PLAT_x86_linux || PLAT_x86_darwin */
1495 /* ------------------------ amd64-{linux,darwin} --------------- */
1497 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin)
1499 /* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1501 /* These regs are trashed by the hidden call. */
1502 #define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1503 "rdi", "r8", "r9", "r10", "r11"
1505 /* This is all pretty complex. It's so as to make stack unwinding
1506 work reliably. See bug 243270. The basic problem is the sub and
1507 add of 128 of %rsp in all of the following macros. If gcc believes
1508 the CFA is in %rsp, then unwinding may fail, because what's at the
1509 CFA is not what gcc "expected" when it constructs the CFIs for the
1510 places where the macros are instantiated.
1512 But we can't just add a CFI annotation to increase the CFA offset
1513 by 128, to match the sub of 128 from %rsp, because we don't know
1514 whether gcc has chosen %rsp as the CFA at that point, or whether it
1515 has chosen some other register (eg, %rbp). In the latter case,
1516 adding a CFI annotation to change the CFA offset is simply wrong.
1518 So the solution is to get hold of the CFA using
1519 __builtin_dwarf_cfa(), put it in a known register, and add a
1520 CFI annotation to say what the register is. We choose %rbp for
1521 this (perhaps perversely), because:
1523 (1) %rbp is already subject to unwinding. If a new register was
1524 chosen then the unwinder would have to unwind it in all stack
1525 traces, which is expensive, and
1527 (2) %rbp is already subject to precise exception updates in the
1528 JIT. If a new register was chosen, we'd have to have precise
1529 exceptions for it too, which reduces performance of the
1530 generated code.
1532 However .. one extra complication. We can't just whack the result
1533 of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1534 list of trashed registers at the end of the inline assembly
1535 fragments; gcc won't allow %rbp to appear in that list. Hence
1536 instead we need to stash %rbp in %r15 for the duration of the asm,
1537 and say that %r15 is trashed instead. gcc seems happy to go with
1538 that.
1540 Oh .. and this all needs to be conditionalised so that it is
1541 unchanged from before this commit, when compiled with older gccs
1542 that don't support __builtin_dwarf_cfa. Furthermore, since
1543 this header file is freestanding, it has to be independent of
1544 config.h, and so the following conditionalisation cannot depend on
1545 configure time checks.
1547 Although it's not clear from
1548 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1549 this expression excludes Darwin.
1550 .cfi directives in Darwin assembly appear to be completely
1551 different and I haven't investigated how they work.
1553 For even more entertainment value, note we have to use the
1554 completely undocumented __builtin_dwarf_cfa(), which appears to
1555 really compute the CFA, whereas __builtin_frame_address(0) claims
1556 to but actually doesn't. See
1557 https://bugs.kde.org/show_bug.cgi?id=243270#c47
1559 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1560 # define __FRAME_POINTER \
1561 ,"r"(__builtin_dwarf_cfa())
1562 # define VALGRIND_CFI_PROLOGUE \
1563 "movq %%rbp, %%r15\n\t" \
1564 "movq %2, %%rbp\n\t" \
1565 ".cfi_remember_state\n\t" \
1566 ".cfi_def_cfa rbp, 0\n\t"
1567 # define VALGRIND_CFI_EPILOGUE \
1568 "movq %%r15, %%rbp\n\t" \
1569 ".cfi_restore_state\n\t"
1570 #else
1571 # define __FRAME_POINTER
1572 # define VALGRIND_CFI_PROLOGUE
1573 # define VALGRIND_CFI_EPILOGUE
1574 #endif
1576 /* Macros to save and align the stack before making a function
1577 call and restore it afterwards as gcc may not keep the stack
1578 pointer aligned if it doesn't realise calls are being made
1579 to other functions. */
1581 #define VALGRIND_ALIGN_STACK \
1582 "movq %%rsp,%%r14\n\t" \
1583 "andq $0xfffffffffffffff0,%%rsp\n\t"
1584 #define VALGRIND_RESTORE_STACK \
1585 "movq %%r14,%%rsp\n\t"
1587 /* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1588 long) == 8. */
1590 /* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1591 macros. In order not to trash the stack redzone, we need to drop
1592 %rsp by 128 before the hidden call, and restore afterwards. The
1593 nastyness is that it is only by luck that the stack still appears
1594 to be unwindable during the hidden call - since then the behaviour
1595 of any routine using this macro does not match what the CFI data
1596 says. Sigh.
1598 Why is this important? Imagine that a wrapper has a stack
1599 allocated local, and passes to the hidden call, a pointer to it.
1600 Because gcc does not know about the hidden call, it may allocate
1601 that local in the redzone. Unfortunately the hidden call may then
1602 trash it before it comes to use it. So we must step clear of the
1603 redzone, for the duration of the hidden call, to make it safe.
1605 Probably the same problem afflicts the other redzone-style ABIs too
1606 (ppc64-linux); but for those, the stack is
1607 self describing (none of this CFI nonsense) so at least messing
1608 with the stack pointer doesn't give a danger of non-unwindable
1609 stack. */
1611 #define CALL_FN_W_v(lval, orig) \
1612 do { \
1613 volatile OrigFn _orig = (orig); \
1614 volatile unsigned long _argvec[1]; \
1615 volatile unsigned long _res; \
1616 _argvec[0] = (unsigned long)_orig.nraddr; \
1617 __asm__ volatile( \
1618 VALGRIND_CFI_PROLOGUE \
1619 VALGRIND_ALIGN_STACK \
1620 "subq $128,%%rsp\n\t" \
1621 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1622 VALGRIND_CALL_NOREDIR_RAX \
1623 VALGRIND_RESTORE_STACK \
1624 VALGRIND_CFI_EPILOGUE \
1625 : /*out*/ "=a" (_res) \
1626 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1627 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1628 ); \
1629 lval = (__typeof__(lval)) _res; \
1630 } while (0)
1632 #define CALL_FN_W_W(lval, orig, arg1) \
1633 do { \
1634 volatile OrigFn _orig = (orig); \
1635 volatile unsigned long _argvec[2]; \
1636 volatile unsigned long _res; \
1637 _argvec[0] = (unsigned long)_orig.nraddr; \
1638 _argvec[1] = (unsigned long)(arg1); \
1639 __asm__ volatile( \
1640 VALGRIND_CFI_PROLOGUE \
1641 VALGRIND_ALIGN_STACK \
1642 "subq $128,%%rsp\n\t" \
1643 "movq 8(%%rax), %%rdi\n\t" \
1644 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1645 VALGRIND_CALL_NOREDIR_RAX \
1646 VALGRIND_RESTORE_STACK \
1647 VALGRIND_CFI_EPILOGUE \
1648 : /*out*/ "=a" (_res) \
1649 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1650 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1651 ); \
1652 lval = (__typeof__(lval)) _res; \
1653 } while (0)
1655 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1656 do { \
1657 volatile OrigFn _orig = (orig); \
1658 volatile unsigned long _argvec[3]; \
1659 volatile unsigned long _res; \
1660 _argvec[0] = (unsigned long)_orig.nraddr; \
1661 _argvec[1] = (unsigned long)(arg1); \
1662 _argvec[2] = (unsigned long)(arg2); \
1663 __asm__ volatile( \
1664 VALGRIND_CFI_PROLOGUE \
1665 VALGRIND_ALIGN_STACK \
1666 "subq $128,%%rsp\n\t" \
1667 "movq 16(%%rax), %%rsi\n\t" \
1668 "movq 8(%%rax), %%rdi\n\t" \
1669 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1670 VALGRIND_CALL_NOREDIR_RAX \
1671 VALGRIND_RESTORE_STACK \
1672 VALGRIND_CFI_EPILOGUE \
1673 : /*out*/ "=a" (_res) \
1674 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1675 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1676 ); \
1677 lval = (__typeof__(lval)) _res; \
1678 } while (0)
1680 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1681 do { \
1682 volatile OrigFn _orig = (orig); \
1683 volatile unsigned long _argvec[4]; \
1684 volatile unsigned long _res; \
1685 _argvec[0] = (unsigned long)_orig.nraddr; \
1686 _argvec[1] = (unsigned long)(arg1); \
1687 _argvec[2] = (unsigned long)(arg2); \
1688 _argvec[3] = (unsigned long)(arg3); \
1689 __asm__ volatile( \
1690 VALGRIND_CFI_PROLOGUE \
1691 VALGRIND_ALIGN_STACK \
1692 "subq $128,%%rsp\n\t" \
1693 "movq 24(%%rax), %%rdx\n\t" \
1694 "movq 16(%%rax), %%rsi\n\t" \
1695 "movq 8(%%rax), %%rdi\n\t" \
1696 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1697 VALGRIND_CALL_NOREDIR_RAX \
1698 VALGRIND_RESTORE_STACK \
1699 VALGRIND_CFI_EPILOGUE \
1700 : /*out*/ "=a" (_res) \
1701 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1702 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1703 ); \
1704 lval = (__typeof__(lval)) _res; \
1705 } while (0)
1707 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1708 do { \
1709 volatile OrigFn _orig = (orig); \
1710 volatile unsigned long _argvec[5]; \
1711 volatile unsigned long _res; \
1712 _argvec[0] = (unsigned long)_orig.nraddr; \
1713 _argvec[1] = (unsigned long)(arg1); \
1714 _argvec[2] = (unsigned long)(arg2); \
1715 _argvec[3] = (unsigned long)(arg3); \
1716 _argvec[4] = (unsigned long)(arg4); \
1717 __asm__ volatile( \
1718 VALGRIND_CFI_PROLOGUE \
1719 VALGRIND_ALIGN_STACK \
1720 "subq $128,%%rsp\n\t" \
1721 "movq 32(%%rax), %%rcx\n\t" \
1722 "movq 24(%%rax), %%rdx\n\t" \
1723 "movq 16(%%rax), %%rsi\n\t" \
1724 "movq 8(%%rax), %%rdi\n\t" \
1725 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1726 VALGRIND_CALL_NOREDIR_RAX \
1727 VALGRIND_RESTORE_STACK \
1728 VALGRIND_CFI_EPILOGUE \
1729 : /*out*/ "=a" (_res) \
1730 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1731 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1732 ); \
1733 lval = (__typeof__(lval)) _res; \
1734 } while (0)
1736 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1737 do { \
1738 volatile OrigFn _orig = (orig); \
1739 volatile unsigned long _argvec[6]; \
1740 volatile unsigned long _res; \
1741 _argvec[0] = (unsigned long)_orig.nraddr; \
1742 _argvec[1] = (unsigned long)(arg1); \
1743 _argvec[2] = (unsigned long)(arg2); \
1744 _argvec[3] = (unsigned long)(arg3); \
1745 _argvec[4] = (unsigned long)(arg4); \
1746 _argvec[5] = (unsigned long)(arg5); \
1747 __asm__ volatile( \
1748 VALGRIND_CFI_PROLOGUE \
1749 VALGRIND_ALIGN_STACK \
1750 "subq $128,%%rsp\n\t" \
1751 "movq 40(%%rax), %%r8\n\t" \
1752 "movq 32(%%rax), %%rcx\n\t" \
1753 "movq 24(%%rax), %%rdx\n\t" \
1754 "movq 16(%%rax), %%rsi\n\t" \
1755 "movq 8(%%rax), %%rdi\n\t" \
1756 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1757 VALGRIND_CALL_NOREDIR_RAX \
1758 VALGRIND_RESTORE_STACK \
1759 VALGRIND_CFI_EPILOGUE \
1760 : /*out*/ "=a" (_res) \
1761 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1762 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1763 ); \
1764 lval = (__typeof__(lval)) _res; \
1765 } while (0)
1767 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1768 do { \
1769 volatile OrigFn _orig = (orig); \
1770 volatile unsigned long _argvec[7]; \
1771 volatile unsigned long _res; \
1772 _argvec[0] = (unsigned long)_orig.nraddr; \
1773 _argvec[1] = (unsigned long)(arg1); \
1774 _argvec[2] = (unsigned long)(arg2); \
1775 _argvec[3] = (unsigned long)(arg3); \
1776 _argvec[4] = (unsigned long)(arg4); \
1777 _argvec[5] = (unsigned long)(arg5); \
1778 _argvec[6] = (unsigned long)(arg6); \
1779 __asm__ volatile( \
1780 VALGRIND_CFI_PROLOGUE \
1781 VALGRIND_ALIGN_STACK \
1782 "subq $128,%%rsp\n\t" \
1783 "movq 48(%%rax), %%r9\n\t" \
1784 "movq 40(%%rax), %%r8\n\t" \
1785 "movq 32(%%rax), %%rcx\n\t" \
1786 "movq 24(%%rax), %%rdx\n\t" \
1787 "movq 16(%%rax), %%rsi\n\t" \
1788 "movq 8(%%rax), %%rdi\n\t" \
1789 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1790 VALGRIND_CALL_NOREDIR_RAX \
1791 VALGRIND_RESTORE_STACK \
1792 VALGRIND_CFI_EPILOGUE \
1793 : /*out*/ "=a" (_res) \
1794 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1795 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1796 ); \
1797 lval = (__typeof__(lval)) _res; \
1798 } while (0)
1800 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1801 arg7) \
1802 do { \
1803 volatile OrigFn _orig = (orig); \
1804 volatile unsigned long _argvec[8]; \
1805 volatile unsigned long _res; \
1806 _argvec[0] = (unsigned long)_orig.nraddr; \
1807 _argvec[1] = (unsigned long)(arg1); \
1808 _argvec[2] = (unsigned long)(arg2); \
1809 _argvec[3] = (unsigned long)(arg3); \
1810 _argvec[4] = (unsigned long)(arg4); \
1811 _argvec[5] = (unsigned long)(arg5); \
1812 _argvec[6] = (unsigned long)(arg6); \
1813 _argvec[7] = (unsigned long)(arg7); \
1814 __asm__ volatile( \
1815 VALGRIND_CFI_PROLOGUE \
1816 VALGRIND_ALIGN_STACK \
1817 "subq $136,%%rsp\n\t" \
1818 "pushq 56(%%rax)\n\t" \
1819 "movq 48(%%rax), %%r9\n\t" \
1820 "movq 40(%%rax), %%r8\n\t" \
1821 "movq 32(%%rax), %%rcx\n\t" \
1822 "movq 24(%%rax), %%rdx\n\t" \
1823 "movq 16(%%rax), %%rsi\n\t" \
1824 "movq 8(%%rax), %%rdi\n\t" \
1825 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1826 VALGRIND_CALL_NOREDIR_RAX \
1827 VALGRIND_RESTORE_STACK \
1828 VALGRIND_CFI_EPILOGUE \
1829 : /*out*/ "=a" (_res) \
1830 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1831 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1832 ); \
1833 lval = (__typeof__(lval)) _res; \
1834 } while (0)
1836 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1837 arg7,arg8) \
1838 do { \
1839 volatile OrigFn _orig = (orig); \
1840 volatile unsigned long _argvec[9]; \
1841 volatile unsigned long _res; \
1842 _argvec[0] = (unsigned long)_orig.nraddr; \
1843 _argvec[1] = (unsigned long)(arg1); \
1844 _argvec[2] = (unsigned long)(arg2); \
1845 _argvec[3] = (unsigned long)(arg3); \
1846 _argvec[4] = (unsigned long)(arg4); \
1847 _argvec[5] = (unsigned long)(arg5); \
1848 _argvec[6] = (unsigned long)(arg6); \
1849 _argvec[7] = (unsigned long)(arg7); \
1850 _argvec[8] = (unsigned long)(arg8); \
1851 __asm__ volatile( \
1852 VALGRIND_CFI_PROLOGUE \
1853 VALGRIND_ALIGN_STACK \
1854 "subq $128,%%rsp\n\t" \
1855 "pushq 64(%%rax)\n\t" \
1856 "pushq 56(%%rax)\n\t" \
1857 "movq 48(%%rax), %%r9\n\t" \
1858 "movq 40(%%rax), %%r8\n\t" \
1859 "movq 32(%%rax), %%rcx\n\t" \
1860 "movq 24(%%rax), %%rdx\n\t" \
1861 "movq 16(%%rax), %%rsi\n\t" \
1862 "movq 8(%%rax), %%rdi\n\t" \
1863 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1864 VALGRIND_CALL_NOREDIR_RAX \
1865 VALGRIND_RESTORE_STACK \
1866 VALGRIND_CFI_EPILOGUE \
1867 : /*out*/ "=a" (_res) \
1868 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1869 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1870 ); \
1871 lval = (__typeof__(lval)) _res; \
1872 } while (0)
1874 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1875 arg7,arg8,arg9) \
1876 do { \
1877 volatile OrigFn _orig = (orig); \
1878 volatile unsigned long _argvec[10]; \
1879 volatile unsigned long _res; \
1880 _argvec[0] = (unsigned long)_orig.nraddr; \
1881 _argvec[1] = (unsigned long)(arg1); \
1882 _argvec[2] = (unsigned long)(arg2); \
1883 _argvec[3] = (unsigned long)(arg3); \
1884 _argvec[4] = (unsigned long)(arg4); \
1885 _argvec[5] = (unsigned long)(arg5); \
1886 _argvec[6] = (unsigned long)(arg6); \
1887 _argvec[7] = (unsigned long)(arg7); \
1888 _argvec[8] = (unsigned long)(arg8); \
1889 _argvec[9] = (unsigned long)(arg9); \
1890 __asm__ volatile( \
1891 VALGRIND_CFI_PROLOGUE \
1892 VALGRIND_ALIGN_STACK \
1893 "subq $136,%%rsp\n\t" \
1894 "pushq 72(%%rax)\n\t" \
1895 "pushq 64(%%rax)\n\t" \
1896 "pushq 56(%%rax)\n\t" \
1897 "movq 48(%%rax), %%r9\n\t" \
1898 "movq 40(%%rax), %%r8\n\t" \
1899 "movq 32(%%rax), %%rcx\n\t" \
1900 "movq 24(%%rax), %%rdx\n\t" \
1901 "movq 16(%%rax), %%rsi\n\t" \
1902 "movq 8(%%rax), %%rdi\n\t" \
1903 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1904 VALGRIND_CALL_NOREDIR_RAX \
1905 VALGRIND_RESTORE_STACK \
1906 VALGRIND_CFI_EPILOGUE \
1907 : /*out*/ "=a" (_res) \
1908 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1909 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1910 ); \
1911 lval = (__typeof__(lval)) _res; \
1912 } while (0)
1914 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1915 arg7,arg8,arg9,arg10) \
1916 do { \
1917 volatile OrigFn _orig = (orig); \
1918 volatile unsigned long _argvec[11]; \
1919 volatile unsigned long _res; \
1920 _argvec[0] = (unsigned long)_orig.nraddr; \
1921 _argvec[1] = (unsigned long)(arg1); \
1922 _argvec[2] = (unsigned long)(arg2); \
1923 _argvec[3] = (unsigned long)(arg3); \
1924 _argvec[4] = (unsigned long)(arg4); \
1925 _argvec[5] = (unsigned long)(arg5); \
1926 _argvec[6] = (unsigned long)(arg6); \
1927 _argvec[7] = (unsigned long)(arg7); \
1928 _argvec[8] = (unsigned long)(arg8); \
1929 _argvec[9] = (unsigned long)(arg9); \
1930 _argvec[10] = (unsigned long)(arg10); \
1931 __asm__ volatile( \
1932 VALGRIND_CFI_PROLOGUE \
1933 VALGRIND_ALIGN_STACK \
1934 "subq $128,%%rsp\n\t" \
1935 "pushq 80(%%rax)\n\t" \
1936 "pushq 72(%%rax)\n\t" \
1937 "pushq 64(%%rax)\n\t" \
1938 "pushq 56(%%rax)\n\t" \
1939 "movq 48(%%rax), %%r9\n\t" \
1940 "movq 40(%%rax), %%r8\n\t" \
1941 "movq 32(%%rax), %%rcx\n\t" \
1942 "movq 24(%%rax), %%rdx\n\t" \
1943 "movq 16(%%rax), %%rsi\n\t" \
1944 "movq 8(%%rax), %%rdi\n\t" \
1945 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1946 VALGRIND_CALL_NOREDIR_RAX \
1947 VALGRIND_RESTORE_STACK \
1948 VALGRIND_CFI_EPILOGUE \
1949 : /*out*/ "=a" (_res) \
1950 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1951 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1952 ); \
1953 lval = (__typeof__(lval)) _res; \
1954 } while (0)
1956 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1957 arg7,arg8,arg9,arg10,arg11) \
1958 do { \
1959 volatile OrigFn _orig = (orig); \
1960 volatile unsigned long _argvec[12]; \
1961 volatile unsigned long _res; \
1962 _argvec[0] = (unsigned long)_orig.nraddr; \
1963 _argvec[1] = (unsigned long)(arg1); \
1964 _argvec[2] = (unsigned long)(arg2); \
1965 _argvec[3] = (unsigned long)(arg3); \
1966 _argvec[4] = (unsigned long)(arg4); \
1967 _argvec[5] = (unsigned long)(arg5); \
1968 _argvec[6] = (unsigned long)(arg6); \
1969 _argvec[7] = (unsigned long)(arg7); \
1970 _argvec[8] = (unsigned long)(arg8); \
1971 _argvec[9] = (unsigned long)(arg9); \
1972 _argvec[10] = (unsigned long)(arg10); \
1973 _argvec[11] = (unsigned long)(arg11); \
1974 __asm__ volatile( \
1975 VALGRIND_CFI_PROLOGUE \
1976 VALGRIND_ALIGN_STACK \
1977 "subq $136,%%rsp\n\t" \
1978 "pushq 88(%%rax)\n\t" \
1979 "pushq 80(%%rax)\n\t" \
1980 "pushq 72(%%rax)\n\t" \
1981 "pushq 64(%%rax)\n\t" \
1982 "pushq 56(%%rax)\n\t" \
1983 "movq 48(%%rax), %%r9\n\t" \
1984 "movq 40(%%rax), %%r8\n\t" \
1985 "movq 32(%%rax), %%rcx\n\t" \
1986 "movq 24(%%rax), %%rdx\n\t" \
1987 "movq 16(%%rax), %%rsi\n\t" \
1988 "movq 8(%%rax), %%rdi\n\t" \
1989 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1990 VALGRIND_CALL_NOREDIR_RAX \
1991 VALGRIND_RESTORE_STACK \
1992 VALGRIND_CFI_EPILOGUE \
1993 : /*out*/ "=a" (_res) \
1994 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1995 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1996 ); \
1997 lval = (__typeof__(lval)) _res; \
1998 } while (0)
2000 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2001 arg7,arg8,arg9,arg10,arg11,arg12) \
2002 do { \
2003 volatile OrigFn _orig = (orig); \
2004 volatile unsigned long _argvec[13]; \
2005 volatile unsigned long _res; \
2006 _argvec[0] = (unsigned long)_orig.nraddr; \
2007 _argvec[1] = (unsigned long)(arg1); \
2008 _argvec[2] = (unsigned long)(arg2); \
2009 _argvec[3] = (unsigned long)(arg3); \
2010 _argvec[4] = (unsigned long)(arg4); \
2011 _argvec[5] = (unsigned long)(arg5); \
2012 _argvec[6] = (unsigned long)(arg6); \
2013 _argvec[7] = (unsigned long)(arg7); \
2014 _argvec[8] = (unsigned long)(arg8); \
2015 _argvec[9] = (unsigned long)(arg9); \
2016 _argvec[10] = (unsigned long)(arg10); \
2017 _argvec[11] = (unsigned long)(arg11); \
2018 _argvec[12] = (unsigned long)(arg12); \
2019 __asm__ volatile( \
2020 VALGRIND_CFI_PROLOGUE \
2021 VALGRIND_ALIGN_STACK \
2022 "subq $128,%%rsp\n\t" \
2023 "pushq 96(%%rax)\n\t" \
2024 "pushq 88(%%rax)\n\t" \
2025 "pushq 80(%%rax)\n\t" \
2026 "pushq 72(%%rax)\n\t" \
2027 "pushq 64(%%rax)\n\t" \
2028 "pushq 56(%%rax)\n\t" \
2029 "movq 48(%%rax), %%r9\n\t" \
2030 "movq 40(%%rax), %%r8\n\t" \
2031 "movq 32(%%rax), %%rcx\n\t" \
2032 "movq 24(%%rax), %%rdx\n\t" \
2033 "movq 16(%%rax), %%rsi\n\t" \
2034 "movq 8(%%rax), %%rdi\n\t" \
2035 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2036 VALGRIND_CALL_NOREDIR_RAX \
2037 VALGRIND_RESTORE_STACK \
2038 VALGRIND_CFI_EPILOGUE \
2039 : /*out*/ "=a" (_res) \
2040 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2041 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2042 ); \
2043 lval = (__typeof__(lval)) _res; \
2044 } while (0)
2046 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
2048 /* ------------------------ ppc32-linux ------------------------ */
2050 #if defined(PLAT_ppc32_linux)
2052 /* This is useful for finding out about the on-stack stuff:
2054 extern int f9 ( int,int,int,int,int,int,int,int,int );
2055 extern int f10 ( int,int,int,int,int,int,int,int,int,int );
2056 extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
2057 extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
2059 int g9 ( void ) {
2060 return f9(11,22,33,44,55,66,77,88,99);
2062 int g10 ( void ) {
2063 return f10(11,22,33,44,55,66,77,88,99,110);
2065 int g11 ( void ) {
2066 return f11(11,22,33,44,55,66,77,88,99,110,121);
2068 int g12 ( void ) {
2069 return f12(11,22,33,44,55,66,77,88,99,110,121,132);
2073 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2075 /* These regs are trashed by the hidden call. */
2076 #define __CALLER_SAVED_REGS \
2077 "lr", "ctr", "xer", \
2078 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2079 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2080 "r11", "r12", "r13"
2082 /* Macros to save and align the stack before making a function
2083 call and restore it afterwards as gcc may not keep the stack
2084 pointer aligned if it doesn't realise calls are being made
2085 to other functions. */
2087 #define VALGRIND_ALIGN_STACK \
2088 "mr 28,1\n\t" \
2089 "rlwinm 1,1,0,0,27\n\t"
2090 #define VALGRIND_RESTORE_STACK \
2091 "mr 1,28\n\t"
2093 /* These CALL_FN_ macros assume that on ppc32-linux,
2094 sizeof(unsigned long) == 4. */
2096 #define CALL_FN_W_v(lval, orig) \
2097 do { \
2098 volatile OrigFn _orig = (orig); \
2099 volatile unsigned long _argvec[1]; \
2100 volatile unsigned long _res; \
2101 _argvec[0] = (unsigned long)_orig.nraddr; \
2102 __asm__ volatile( \
2103 VALGRIND_ALIGN_STACK \
2104 "mr 11,%1\n\t" \
2105 "lwz 11,0(11)\n\t" /* target->r11 */ \
2106 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2107 VALGRIND_RESTORE_STACK \
2108 "mr %0,3" \
2109 : /*out*/ "=r" (_res) \
2110 : /*in*/ "r" (&_argvec[0]) \
2111 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2112 ); \
2113 lval = (__typeof__(lval)) _res; \
2114 } while (0)
2116 #define CALL_FN_W_W(lval, orig, arg1) \
2117 do { \
2118 volatile OrigFn _orig = (orig); \
2119 volatile unsigned long _argvec[2]; \
2120 volatile unsigned long _res; \
2121 _argvec[0] = (unsigned long)_orig.nraddr; \
2122 _argvec[1] = (unsigned long)arg1; \
2123 __asm__ volatile( \
2124 VALGRIND_ALIGN_STACK \
2125 "mr 11,%1\n\t" \
2126 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2127 "lwz 11,0(11)\n\t" /* target->r11 */ \
2128 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2129 VALGRIND_RESTORE_STACK \
2130 "mr %0,3" \
2131 : /*out*/ "=r" (_res) \
2132 : /*in*/ "r" (&_argvec[0]) \
2133 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2134 ); \
2135 lval = (__typeof__(lval)) _res; \
2136 } while (0)
2138 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2139 do { \
2140 volatile OrigFn _orig = (orig); \
2141 volatile unsigned long _argvec[3]; \
2142 volatile unsigned long _res; \
2143 _argvec[0] = (unsigned long)_orig.nraddr; \
2144 _argvec[1] = (unsigned long)arg1; \
2145 _argvec[2] = (unsigned long)arg2; \
2146 __asm__ volatile( \
2147 VALGRIND_ALIGN_STACK \
2148 "mr 11,%1\n\t" \
2149 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2150 "lwz 4,8(11)\n\t" \
2151 "lwz 11,0(11)\n\t" /* target->r11 */ \
2152 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2153 VALGRIND_RESTORE_STACK \
2154 "mr %0,3" \
2155 : /*out*/ "=r" (_res) \
2156 : /*in*/ "r" (&_argvec[0]) \
2157 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2158 ); \
2159 lval = (__typeof__(lval)) _res; \
2160 } while (0)
2162 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2163 do { \
2164 volatile OrigFn _orig = (orig); \
2165 volatile unsigned long _argvec[4]; \
2166 volatile unsigned long _res; \
2167 _argvec[0] = (unsigned long)_orig.nraddr; \
2168 _argvec[1] = (unsigned long)arg1; \
2169 _argvec[2] = (unsigned long)arg2; \
2170 _argvec[3] = (unsigned long)arg3; \
2171 __asm__ volatile( \
2172 VALGRIND_ALIGN_STACK \
2173 "mr 11,%1\n\t" \
2174 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2175 "lwz 4,8(11)\n\t" \
2176 "lwz 5,12(11)\n\t" \
2177 "lwz 11,0(11)\n\t" /* target->r11 */ \
2178 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2179 VALGRIND_RESTORE_STACK \
2180 "mr %0,3" \
2181 : /*out*/ "=r" (_res) \
2182 : /*in*/ "r" (&_argvec[0]) \
2183 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2184 ); \
2185 lval = (__typeof__(lval)) _res; \
2186 } while (0)
2188 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2189 do { \
2190 volatile OrigFn _orig = (orig); \
2191 volatile unsigned long _argvec[5]; \
2192 volatile unsigned long _res; \
2193 _argvec[0] = (unsigned long)_orig.nraddr; \
2194 _argvec[1] = (unsigned long)arg1; \
2195 _argvec[2] = (unsigned long)arg2; \
2196 _argvec[3] = (unsigned long)arg3; \
2197 _argvec[4] = (unsigned long)arg4; \
2198 __asm__ volatile( \
2199 VALGRIND_ALIGN_STACK \
2200 "mr 11,%1\n\t" \
2201 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2202 "lwz 4,8(11)\n\t" \
2203 "lwz 5,12(11)\n\t" \
2204 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2205 "lwz 11,0(11)\n\t" /* target->r11 */ \
2206 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2207 VALGRIND_RESTORE_STACK \
2208 "mr %0,3" \
2209 : /*out*/ "=r" (_res) \
2210 : /*in*/ "r" (&_argvec[0]) \
2211 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2212 ); \
2213 lval = (__typeof__(lval)) _res; \
2214 } while (0)
2216 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2217 do { \
2218 volatile OrigFn _orig = (orig); \
2219 volatile unsigned long _argvec[6]; \
2220 volatile unsigned long _res; \
2221 _argvec[0] = (unsigned long)_orig.nraddr; \
2222 _argvec[1] = (unsigned long)arg1; \
2223 _argvec[2] = (unsigned long)arg2; \
2224 _argvec[3] = (unsigned long)arg3; \
2225 _argvec[4] = (unsigned long)arg4; \
2226 _argvec[5] = (unsigned long)arg5; \
2227 __asm__ volatile( \
2228 VALGRIND_ALIGN_STACK \
2229 "mr 11,%1\n\t" \
2230 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2231 "lwz 4,8(11)\n\t" \
2232 "lwz 5,12(11)\n\t" \
2233 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2234 "lwz 7,20(11)\n\t" \
2235 "lwz 11,0(11)\n\t" /* target->r11 */ \
2236 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2237 VALGRIND_RESTORE_STACK \
2238 "mr %0,3" \
2239 : /*out*/ "=r" (_res) \
2240 : /*in*/ "r" (&_argvec[0]) \
2241 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2242 ); \
2243 lval = (__typeof__(lval)) _res; \
2244 } while (0)
2246 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2247 do { \
2248 volatile OrigFn _orig = (orig); \
2249 volatile unsigned long _argvec[7]; \
2250 volatile unsigned long _res; \
2251 _argvec[0] = (unsigned long)_orig.nraddr; \
2252 _argvec[1] = (unsigned long)arg1; \
2253 _argvec[2] = (unsigned long)arg2; \
2254 _argvec[3] = (unsigned long)arg3; \
2255 _argvec[4] = (unsigned long)arg4; \
2256 _argvec[5] = (unsigned long)arg5; \
2257 _argvec[6] = (unsigned long)arg6; \
2258 __asm__ volatile( \
2259 VALGRIND_ALIGN_STACK \
2260 "mr 11,%1\n\t" \
2261 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2262 "lwz 4,8(11)\n\t" \
2263 "lwz 5,12(11)\n\t" \
2264 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2265 "lwz 7,20(11)\n\t" \
2266 "lwz 8,24(11)\n\t" \
2267 "lwz 11,0(11)\n\t" /* target->r11 */ \
2268 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2269 VALGRIND_RESTORE_STACK \
2270 "mr %0,3" \
2271 : /*out*/ "=r" (_res) \
2272 : /*in*/ "r" (&_argvec[0]) \
2273 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2274 ); \
2275 lval = (__typeof__(lval)) _res; \
2276 } while (0)
2278 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2279 arg7) \
2280 do { \
2281 volatile OrigFn _orig = (orig); \
2282 volatile unsigned long _argvec[8]; \
2283 volatile unsigned long _res; \
2284 _argvec[0] = (unsigned long)_orig.nraddr; \
2285 _argvec[1] = (unsigned long)arg1; \
2286 _argvec[2] = (unsigned long)arg2; \
2287 _argvec[3] = (unsigned long)arg3; \
2288 _argvec[4] = (unsigned long)arg4; \
2289 _argvec[5] = (unsigned long)arg5; \
2290 _argvec[6] = (unsigned long)arg6; \
2291 _argvec[7] = (unsigned long)arg7; \
2292 __asm__ volatile( \
2293 VALGRIND_ALIGN_STACK \
2294 "mr 11,%1\n\t" \
2295 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2296 "lwz 4,8(11)\n\t" \
2297 "lwz 5,12(11)\n\t" \
2298 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2299 "lwz 7,20(11)\n\t" \
2300 "lwz 8,24(11)\n\t" \
2301 "lwz 9,28(11)\n\t" \
2302 "lwz 11,0(11)\n\t" /* target->r11 */ \
2303 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2304 VALGRIND_RESTORE_STACK \
2305 "mr %0,3" \
2306 : /*out*/ "=r" (_res) \
2307 : /*in*/ "r" (&_argvec[0]) \
2308 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2309 ); \
2310 lval = (__typeof__(lval)) _res; \
2311 } while (0)
2313 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2314 arg7,arg8) \
2315 do { \
2316 volatile OrigFn _orig = (orig); \
2317 volatile unsigned long _argvec[9]; \
2318 volatile unsigned long _res; \
2319 _argvec[0] = (unsigned long)_orig.nraddr; \
2320 _argvec[1] = (unsigned long)arg1; \
2321 _argvec[2] = (unsigned long)arg2; \
2322 _argvec[3] = (unsigned long)arg3; \
2323 _argvec[4] = (unsigned long)arg4; \
2324 _argvec[5] = (unsigned long)arg5; \
2325 _argvec[6] = (unsigned long)arg6; \
2326 _argvec[7] = (unsigned long)arg7; \
2327 _argvec[8] = (unsigned long)arg8; \
2328 __asm__ volatile( \
2329 VALGRIND_ALIGN_STACK \
2330 "mr 11,%1\n\t" \
2331 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2332 "lwz 4,8(11)\n\t" \
2333 "lwz 5,12(11)\n\t" \
2334 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2335 "lwz 7,20(11)\n\t" \
2336 "lwz 8,24(11)\n\t" \
2337 "lwz 9,28(11)\n\t" \
2338 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2339 "lwz 11,0(11)\n\t" /* target->r11 */ \
2340 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2341 VALGRIND_RESTORE_STACK \
2342 "mr %0,3" \
2343 : /*out*/ "=r" (_res) \
2344 : /*in*/ "r" (&_argvec[0]) \
2345 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2346 ); \
2347 lval = (__typeof__(lval)) _res; \
2348 } while (0)
2350 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2351 arg7,arg8,arg9) \
2352 do { \
2353 volatile OrigFn _orig = (orig); \
2354 volatile unsigned long _argvec[10]; \
2355 volatile unsigned long _res; \
2356 _argvec[0] = (unsigned long)_orig.nraddr; \
2357 _argvec[1] = (unsigned long)arg1; \
2358 _argvec[2] = (unsigned long)arg2; \
2359 _argvec[3] = (unsigned long)arg3; \
2360 _argvec[4] = (unsigned long)arg4; \
2361 _argvec[5] = (unsigned long)arg5; \
2362 _argvec[6] = (unsigned long)arg6; \
2363 _argvec[7] = (unsigned long)arg7; \
2364 _argvec[8] = (unsigned long)arg8; \
2365 _argvec[9] = (unsigned long)arg9; \
2366 __asm__ volatile( \
2367 VALGRIND_ALIGN_STACK \
2368 "mr 11,%1\n\t" \
2369 "addi 1,1,-16\n\t" \
2370 /* arg9 */ \
2371 "lwz 3,36(11)\n\t" \
2372 "stw 3,8(1)\n\t" \
2373 /* args1-8 */ \
2374 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2375 "lwz 4,8(11)\n\t" \
2376 "lwz 5,12(11)\n\t" \
2377 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2378 "lwz 7,20(11)\n\t" \
2379 "lwz 8,24(11)\n\t" \
2380 "lwz 9,28(11)\n\t" \
2381 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2382 "lwz 11,0(11)\n\t" /* target->r11 */ \
2383 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2384 VALGRIND_RESTORE_STACK \
2385 "mr %0,3" \
2386 : /*out*/ "=r" (_res) \
2387 : /*in*/ "r" (&_argvec[0]) \
2388 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2389 ); \
2390 lval = (__typeof__(lval)) _res; \
2391 } while (0)
2393 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2394 arg7,arg8,arg9,arg10) \
2395 do { \
2396 volatile OrigFn _orig = (orig); \
2397 volatile unsigned long _argvec[11]; \
2398 volatile unsigned long _res; \
2399 _argvec[0] = (unsigned long)_orig.nraddr; \
2400 _argvec[1] = (unsigned long)arg1; \
2401 _argvec[2] = (unsigned long)arg2; \
2402 _argvec[3] = (unsigned long)arg3; \
2403 _argvec[4] = (unsigned long)arg4; \
2404 _argvec[5] = (unsigned long)arg5; \
2405 _argvec[6] = (unsigned long)arg6; \
2406 _argvec[7] = (unsigned long)arg7; \
2407 _argvec[8] = (unsigned long)arg8; \
2408 _argvec[9] = (unsigned long)arg9; \
2409 _argvec[10] = (unsigned long)arg10; \
2410 __asm__ volatile( \
2411 VALGRIND_ALIGN_STACK \
2412 "mr 11,%1\n\t" \
2413 "addi 1,1,-16\n\t" \
2414 /* arg10 */ \
2415 "lwz 3,40(11)\n\t" \
2416 "stw 3,12(1)\n\t" \
2417 /* arg9 */ \
2418 "lwz 3,36(11)\n\t" \
2419 "stw 3,8(1)\n\t" \
2420 /* args1-8 */ \
2421 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2422 "lwz 4,8(11)\n\t" \
2423 "lwz 5,12(11)\n\t" \
2424 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2425 "lwz 7,20(11)\n\t" \
2426 "lwz 8,24(11)\n\t" \
2427 "lwz 9,28(11)\n\t" \
2428 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2429 "lwz 11,0(11)\n\t" /* target->r11 */ \
2430 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2431 VALGRIND_RESTORE_STACK \
2432 "mr %0,3" \
2433 : /*out*/ "=r" (_res) \
2434 : /*in*/ "r" (&_argvec[0]) \
2435 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2436 ); \
2437 lval = (__typeof__(lval)) _res; \
2438 } while (0)
2440 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2441 arg7,arg8,arg9,arg10,arg11) \
2442 do { \
2443 volatile OrigFn _orig = (orig); \
2444 volatile unsigned long _argvec[12]; \
2445 volatile unsigned long _res; \
2446 _argvec[0] = (unsigned long)_orig.nraddr; \
2447 _argvec[1] = (unsigned long)arg1; \
2448 _argvec[2] = (unsigned long)arg2; \
2449 _argvec[3] = (unsigned long)arg3; \
2450 _argvec[4] = (unsigned long)arg4; \
2451 _argvec[5] = (unsigned long)arg5; \
2452 _argvec[6] = (unsigned long)arg6; \
2453 _argvec[7] = (unsigned long)arg7; \
2454 _argvec[8] = (unsigned long)arg8; \
2455 _argvec[9] = (unsigned long)arg9; \
2456 _argvec[10] = (unsigned long)arg10; \
2457 _argvec[11] = (unsigned long)arg11; \
2458 __asm__ volatile( \
2459 VALGRIND_ALIGN_STACK \
2460 "mr 11,%1\n\t" \
2461 "addi 1,1,-32\n\t" \
2462 /* arg11 */ \
2463 "lwz 3,44(11)\n\t" \
2464 "stw 3,16(1)\n\t" \
2465 /* arg10 */ \
2466 "lwz 3,40(11)\n\t" \
2467 "stw 3,12(1)\n\t" \
2468 /* arg9 */ \
2469 "lwz 3,36(11)\n\t" \
2470 "stw 3,8(1)\n\t" \
2471 /* args1-8 */ \
2472 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2473 "lwz 4,8(11)\n\t" \
2474 "lwz 5,12(11)\n\t" \
2475 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2476 "lwz 7,20(11)\n\t" \
2477 "lwz 8,24(11)\n\t" \
2478 "lwz 9,28(11)\n\t" \
2479 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2480 "lwz 11,0(11)\n\t" /* target->r11 */ \
2481 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2482 VALGRIND_RESTORE_STACK \
2483 "mr %0,3" \
2484 : /*out*/ "=r" (_res) \
2485 : /*in*/ "r" (&_argvec[0]) \
2486 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2487 ); \
2488 lval = (__typeof__(lval)) _res; \
2489 } while (0)
2491 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2492 arg7,arg8,arg9,arg10,arg11,arg12) \
2493 do { \
2494 volatile OrigFn _orig = (orig); \
2495 volatile unsigned long _argvec[13]; \
2496 volatile unsigned long _res; \
2497 _argvec[0] = (unsigned long)_orig.nraddr; \
2498 _argvec[1] = (unsigned long)arg1; \
2499 _argvec[2] = (unsigned long)arg2; \
2500 _argvec[3] = (unsigned long)arg3; \
2501 _argvec[4] = (unsigned long)arg4; \
2502 _argvec[5] = (unsigned long)arg5; \
2503 _argvec[6] = (unsigned long)arg6; \
2504 _argvec[7] = (unsigned long)arg7; \
2505 _argvec[8] = (unsigned long)arg8; \
2506 _argvec[9] = (unsigned long)arg9; \
2507 _argvec[10] = (unsigned long)arg10; \
2508 _argvec[11] = (unsigned long)arg11; \
2509 _argvec[12] = (unsigned long)arg12; \
2510 __asm__ volatile( \
2511 VALGRIND_ALIGN_STACK \
2512 "mr 11,%1\n\t" \
2513 "addi 1,1,-32\n\t" \
2514 /* arg12 */ \
2515 "lwz 3,48(11)\n\t" \
2516 "stw 3,20(1)\n\t" \
2517 /* arg11 */ \
2518 "lwz 3,44(11)\n\t" \
2519 "stw 3,16(1)\n\t" \
2520 /* arg10 */ \
2521 "lwz 3,40(11)\n\t" \
2522 "stw 3,12(1)\n\t" \
2523 /* arg9 */ \
2524 "lwz 3,36(11)\n\t" \
2525 "stw 3,8(1)\n\t" \
2526 /* args1-8 */ \
2527 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2528 "lwz 4,8(11)\n\t" \
2529 "lwz 5,12(11)\n\t" \
2530 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2531 "lwz 7,20(11)\n\t" \
2532 "lwz 8,24(11)\n\t" \
2533 "lwz 9,28(11)\n\t" \
2534 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2535 "lwz 11,0(11)\n\t" /* target->r11 */ \
2536 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2537 VALGRIND_RESTORE_STACK \
2538 "mr %0,3" \
2539 : /*out*/ "=r" (_res) \
2540 : /*in*/ "r" (&_argvec[0]) \
2541 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2542 ); \
2543 lval = (__typeof__(lval)) _res; \
2544 } while (0)
2546 #endif /* PLAT_ppc32_linux */
2548 /* ------------------------ ppc64-linux ------------------------ */
2550 #if defined(PLAT_ppc64_linux)
2552 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2554 /* These regs are trashed by the hidden call. */
2555 #define __CALLER_SAVED_REGS \
2556 "lr", "ctr", "xer", \
2557 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2558 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2559 "r11", "r12", "r13"
2561 /* Macros to save and align the stack before making a function
2562 call and restore it afterwards as gcc may not keep the stack
2563 pointer aligned if it doesn't realise calls are being made
2564 to other functions. */
2566 #define VALGRIND_ALIGN_STACK \
2567 "mr 28,1\n\t" \
2568 "rldicr 1,1,0,59\n\t"
2569 #define VALGRIND_RESTORE_STACK \
2570 "mr 1,28\n\t"
2572 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2573 long) == 8. */
2575 #define CALL_FN_W_v(lval, orig) \
2576 do { \
2577 volatile OrigFn _orig = (orig); \
2578 volatile unsigned long _argvec[3+0]; \
2579 volatile unsigned long _res; \
2580 /* _argvec[0] holds current r2 across the call */ \
2581 _argvec[1] = (unsigned long)_orig.r2; \
2582 _argvec[2] = (unsigned long)_orig.nraddr; \
2583 __asm__ volatile( \
2584 VALGRIND_ALIGN_STACK \
2585 "mr 11,%1\n\t" \
2586 "std 2,-16(11)\n\t" /* save tocptr */ \
2587 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2588 "ld 11, 0(11)\n\t" /* target->r11 */ \
2589 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2590 "mr 11,%1\n\t" \
2591 "mr %0,3\n\t" \
2592 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2593 VALGRIND_RESTORE_STACK \
2594 : /*out*/ "=r" (_res) \
2595 : /*in*/ "r" (&_argvec[2]) \
2596 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2597 ); \
2598 lval = (__typeof__(lval)) _res; \
2599 } while (0)
2601 #define CALL_FN_W_W(lval, orig, arg1) \
2602 do { \
2603 volatile OrigFn _orig = (orig); \
2604 volatile unsigned long _argvec[3+1]; \
2605 volatile unsigned long _res; \
2606 /* _argvec[0] holds current r2 across the call */ \
2607 _argvec[1] = (unsigned long)_orig.r2; \
2608 _argvec[2] = (unsigned long)_orig.nraddr; \
2609 _argvec[2+1] = (unsigned long)arg1; \
2610 __asm__ volatile( \
2611 VALGRIND_ALIGN_STACK \
2612 "mr 11,%1\n\t" \
2613 "std 2,-16(11)\n\t" /* save tocptr */ \
2614 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2615 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2616 "ld 11, 0(11)\n\t" /* target->r11 */ \
2617 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2618 "mr 11,%1\n\t" \
2619 "mr %0,3\n\t" \
2620 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2621 VALGRIND_RESTORE_STACK \
2622 : /*out*/ "=r" (_res) \
2623 : /*in*/ "r" (&_argvec[2]) \
2624 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2625 ); \
2626 lval = (__typeof__(lval)) _res; \
2627 } while (0)
2629 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2630 do { \
2631 volatile OrigFn _orig = (orig); \
2632 volatile unsigned long _argvec[3+2]; \
2633 volatile unsigned long _res; \
2634 /* _argvec[0] holds current r2 across the call */ \
2635 _argvec[1] = (unsigned long)_orig.r2; \
2636 _argvec[2] = (unsigned long)_orig.nraddr; \
2637 _argvec[2+1] = (unsigned long)arg1; \
2638 _argvec[2+2] = (unsigned long)arg2; \
2639 __asm__ volatile( \
2640 VALGRIND_ALIGN_STACK \
2641 "mr 11,%1\n\t" \
2642 "std 2,-16(11)\n\t" /* save tocptr */ \
2643 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2644 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2645 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2646 "ld 11, 0(11)\n\t" /* target->r11 */ \
2647 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2648 "mr 11,%1\n\t" \
2649 "mr %0,3\n\t" \
2650 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2651 VALGRIND_RESTORE_STACK \
2652 : /*out*/ "=r" (_res) \
2653 : /*in*/ "r" (&_argvec[2]) \
2654 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2655 ); \
2656 lval = (__typeof__(lval)) _res; \
2657 } while (0)
2659 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2660 do { \
2661 volatile OrigFn _orig = (orig); \
2662 volatile unsigned long _argvec[3+3]; \
2663 volatile unsigned long _res; \
2664 /* _argvec[0] holds current r2 across the call */ \
2665 _argvec[1] = (unsigned long)_orig.r2; \
2666 _argvec[2] = (unsigned long)_orig.nraddr; \
2667 _argvec[2+1] = (unsigned long)arg1; \
2668 _argvec[2+2] = (unsigned long)arg2; \
2669 _argvec[2+3] = (unsigned long)arg3; \
2670 __asm__ volatile( \
2671 VALGRIND_ALIGN_STACK \
2672 "mr 11,%1\n\t" \
2673 "std 2,-16(11)\n\t" /* save tocptr */ \
2674 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2675 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2676 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2677 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2678 "ld 11, 0(11)\n\t" /* target->r11 */ \
2679 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2680 "mr 11,%1\n\t" \
2681 "mr %0,3\n\t" \
2682 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2683 VALGRIND_RESTORE_STACK \
2684 : /*out*/ "=r" (_res) \
2685 : /*in*/ "r" (&_argvec[2]) \
2686 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2687 ); \
2688 lval = (__typeof__(lval)) _res; \
2689 } while (0)
2691 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2692 do { \
2693 volatile OrigFn _orig = (orig); \
2694 volatile unsigned long _argvec[3+4]; \
2695 volatile unsigned long _res; \
2696 /* _argvec[0] holds current r2 across the call */ \
2697 _argvec[1] = (unsigned long)_orig.r2; \
2698 _argvec[2] = (unsigned long)_orig.nraddr; \
2699 _argvec[2+1] = (unsigned long)arg1; \
2700 _argvec[2+2] = (unsigned long)arg2; \
2701 _argvec[2+3] = (unsigned long)arg3; \
2702 _argvec[2+4] = (unsigned long)arg4; \
2703 __asm__ volatile( \
2704 VALGRIND_ALIGN_STACK \
2705 "mr 11,%1\n\t" \
2706 "std 2,-16(11)\n\t" /* save tocptr */ \
2707 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2708 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2709 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2710 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2711 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2712 "ld 11, 0(11)\n\t" /* target->r11 */ \
2713 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2714 "mr 11,%1\n\t" \
2715 "mr %0,3\n\t" \
2716 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2717 VALGRIND_RESTORE_STACK \
2718 : /*out*/ "=r" (_res) \
2719 : /*in*/ "r" (&_argvec[2]) \
2720 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2721 ); \
2722 lval = (__typeof__(lval)) _res; \
2723 } while (0)
2725 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2726 do { \
2727 volatile OrigFn _orig = (orig); \
2728 volatile unsigned long _argvec[3+5]; \
2729 volatile unsigned long _res; \
2730 /* _argvec[0] holds current r2 across the call */ \
2731 _argvec[1] = (unsigned long)_orig.r2; \
2732 _argvec[2] = (unsigned long)_orig.nraddr; \
2733 _argvec[2+1] = (unsigned long)arg1; \
2734 _argvec[2+2] = (unsigned long)arg2; \
2735 _argvec[2+3] = (unsigned long)arg3; \
2736 _argvec[2+4] = (unsigned long)arg4; \
2737 _argvec[2+5] = (unsigned long)arg5; \
2738 __asm__ volatile( \
2739 VALGRIND_ALIGN_STACK \
2740 "mr 11,%1\n\t" \
2741 "std 2,-16(11)\n\t" /* save tocptr */ \
2742 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2743 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2744 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2745 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2746 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2747 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2748 "ld 11, 0(11)\n\t" /* target->r11 */ \
2749 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2750 "mr 11,%1\n\t" \
2751 "mr %0,3\n\t" \
2752 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2753 VALGRIND_RESTORE_STACK \
2754 : /*out*/ "=r" (_res) \
2755 : /*in*/ "r" (&_argvec[2]) \
2756 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2757 ); \
2758 lval = (__typeof__(lval)) _res; \
2759 } while (0)
2761 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2762 do { \
2763 volatile OrigFn _orig = (orig); \
2764 volatile unsigned long _argvec[3+6]; \
2765 volatile unsigned long _res; \
2766 /* _argvec[0] holds current r2 across the call */ \
2767 _argvec[1] = (unsigned long)_orig.r2; \
2768 _argvec[2] = (unsigned long)_orig.nraddr; \
2769 _argvec[2+1] = (unsigned long)arg1; \
2770 _argvec[2+2] = (unsigned long)arg2; \
2771 _argvec[2+3] = (unsigned long)arg3; \
2772 _argvec[2+4] = (unsigned long)arg4; \
2773 _argvec[2+5] = (unsigned long)arg5; \
2774 _argvec[2+6] = (unsigned long)arg6; \
2775 __asm__ volatile( \
2776 VALGRIND_ALIGN_STACK \
2777 "mr 11,%1\n\t" \
2778 "std 2,-16(11)\n\t" /* save tocptr */ \
2779 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2780 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2781 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2782 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2783 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2784 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2785 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2786 "ld 11, 0(11)\n\t" /* target->r11 */ \
2787 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2788 "mr 11,%1\n\t" \
2789 "mr %0,3\n\t" \
2790 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2791 VALGRIND_RESTORE_STACK \
2792 : /*out*/ "=r" (_res) \
2793 : /*in*/ "r" (&_argvec[2]) \
2794 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2795 ); \
2796 lval = (__typeof__(lval)) _res; \
2797 } while (0)
2799 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2800 arg7) \
2801 do { \
2802 volatile OrigFn _orig = (orig); \
2803 volatile unsigned long _argvec[3+7]; \
2804 volatile unsigned long _res; \
2805 /* _argvec[0] holds current r2 across the call */ \
2806 _argvec[1] = (unsigned long)_orig.r2; \
2807 _argvec[2] = (unsigned long)_orig.nraddr; \
2808 _argvec[2+1] = (unsigned long)arg1; \
2809 _argvec[2+2] = (unsigned long)arg2; \
2810 _argvec[2+3] = (unsigned long)arg3; \
2811 _argvec[2+4] = (unsigned long)arg4; \
2812 _argvec[2+5] = (unsigned long)arg5; \
2813 _argvec[2+6] = (unsigned long)arg6; \
2814 _argvec[2+7] = (unsigned long)arg7; \
2815 __asm__ volatile( \
2816 VALGRIND_ALIGN_STACK \
2817 "mr 11,%1\n\t" \
2818 "std 2,-16(11)\n\t" /* save tocptr */ \
2819 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2820 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2821 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2822 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2823 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2824 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2825 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2826 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2827 "ld 11, 0(11)\n\t" /* target->r11 */ \
2828 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2829 "mr 11,%1\n\t" \
2830 "mr %0,3\n\t" \
2831 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2832 VALGRIND_RESTORE_STACK \
2833 : /*out*/ "=r" (_res) \
2834 : /*in*/ "r" (&_argvec[2]) \
2835 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2836 ); \
2837 lval = (__typeof__(lval)) _res; \
2838 } while (0)
2840 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2841 arg7,arg8) \
2842 do { \
2843 volatile OrigFn _orig = (orig); \
2844 volatile unsigned long _argvec[3+8]; \
2845 volatile unsigned long _res; \
2846 /* _argvec[0] holds current r2 across the call */ \
2847 _argvec[1] = (unsigned long)_orig.r2; \
2848 _argvec[2] = (unsigned long)_orig.nraddr; \
2849 _argvec[2+1] = (unsigned long)arg1; \
2850 _argvec[2+2] = (unsigned long)arg2; \
2851 _argvec[2+3] = (unsigned long)arg3; \
2852 _argvec[2+4] = (unsigned long)arg4; \
2853 _argvec[2+5] = (unsigned long)arg5; \
2854 _argvec[2+6] = (unsigned long)arg6; \
2855 _argvec[2+7] = (unsigned long)arg7; \
2856 _argvec[2+8] = (unsigned long)arg8; \
2857 __asm__ volatile( \
2858 VALGRIND_ALIGN_STACK \
2859 "mr 11,%1\n\t" \
2860 "std 2,-16(11)\n\t" /* save tocptr */ \
2861 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2862 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2863 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2864 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2865 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2866 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2867 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2868 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2869 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2870 "ld 11, 0(11)\n\t" /* target->r11 */ \
2871 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2872 "mr 11,%1\n\t" \
2873 "mr %0,3\n\t" \
2874 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2875 VALGRIND_RESTORE_STACK \
2876 : /*out*/ "=r" (_res) \
2877 : /*in*/ "r" (&_argvec[2]) \
2878 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2879 ); \
2880 lval = (__typeof__(lval)) _res; \
2881 } while (0)
2883 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2884 arg7,arg8,arg9) \
2885 do { \
2886 volatile OrigFn _orig = (orig); \
2887 volatile unsigned long _argvec[3+9]; \
2888 volatile unsigned long _res; \
2889 /* _argvec[0] holds current r2 across the call */ \
2890 _argvec[1] = (unsigned long)_orig.r2; \
2891 _argvec[2] = (unsigned long)_orig.nraddr; \
2892 _argvec[2+1] = (unsigned long)arg1; \
2893 _argvec[2+2] = (unsigned long)arg2; \
2894 _argvec[2+3] = (unsigned long)arg3; \
2895 _argvec[2+4] = (unsigned long)arg4; \
2896 _argvec[2+5] = (unsigned long)arg5; \
2897 _argvec[2+6] = (unsigned long)arg6; \
2898 _argvec[2+7] = (unsigned long)arg7; \
2899 _argvec[2+8] = (unsigned long)arg8; \
2900 _argvec[2+9] = (unsigned long)arg9; \
2901 __asm__ volatile( \
2902 VALGRIND_ALIGN_STACK \
2903 "mr 11,%1\n\t" \
2904 "std 2,-16(11)\n\t" /* save tocptr */ \
2905 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2906 "addi 1,1,-128\n\t" /* expand stack frame */ \
2907 /* arg9 */ \
2908 "ld 3,72(11)\n\t" \
2909 "std 3,112(1)\n\t" \
2910 /* args1-8 */ \
2911 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2912 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2913 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2914 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2915 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2916 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2917 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2918 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2919 "ld 11, 0(11)\n\t" /* target->r11 */ \
2920 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2921 "mr 11,%1\n\t" \
2922 "mr %0,3\n\t" \
2923 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2924 VALGRIND_RESTORE_STACK \
2925 : /*out*/ "=r" (_res) \
2926 : /*in*/ "r" (&_argvec[2]) \
2927 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2928 ); \
2929 lval = (__typeof__(lval)) _res; \
2930 } while (0)
2932 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2933 arg7,arg8,arg9,arg10) \
2934 do { \
2935 volatile OrigFn _orig = (orig); \
2936 volatile unsigned long _argvec[3+10]; \
2937 volatile unsigned long _res; \
2938 /* _argvec[0] holds current r2 across the call */ \
2939 _argvec[1] = (unsigned long)_orig.r2; \
2940 _argvec[2] = (unsigned long)_orig.nraddr; \
2941 _argvec[2+1] = (unsigned long)arg1; \
2942 _argvec[2+2] = (unsigned long)arg2; \
2943 _argvec[2+3] = (unsigned long)arg3; \
2944 _argvec[2+4] = (unsigned long)arg4; \
2945 _argvec[2+5] = (unsigned long)arg5; \
2946 _argvec[2+6] = (unsigned long)arg6; \
2947 _argvec[2+7] = (unsigned long)arg7; \
2948 _argvec[2+8] = (unsigned long)arg8; \
2949 _argvec[2+9] = (unsigned long)arg9; \
2950 _argvec[2+10] = (unsigned long)arg10; \
2951 __asm__ volatile( \
2952 VALGRIND_ALIGN_STACK \
2953 "mr 11,%1\n\t" \
2954 "std 2,-16(11)\n\t" /* save tocptr */ \
2955 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2956 "addi 1,1,-128\n\t" /* expand stack frame */ \
2957 /* arg10 */ \
2958 "ld 3,80(11)\n\t" \
2959 "std 3,120(1)\n\t" \
2960 /* arg9 */ \
2961 "ld 3,72(11)\n\t" \
2962 "std 3,112(1)\n\t" \
2963 /* args1-8 */ \
2964 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2965 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2966 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2967 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2968 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2969 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2970 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2971 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2972 "ld 11, 0(11)\n\t" /* target->r11 */ \
2973 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2974 "mr 11,%1\n\t" \
2975 "mr %0,3\n\t" \
2976 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2977 VALGRIND_RESTORE_STACK \
2978 : /*out*/ "=r" (_res) \
2979 : /*in*/ "r" (&_argvec[2]) \
2980 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2981 ); \
2982 lval = (__typeof__(lval)) _res; \
2983 } while (0)
2985 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2986 arg7,arg8,arg9,arg10,arg11) \
2987 do { \
2988 volatile OrigFn _orig = (orig); \
2989 volatile unsigned long _argvec[3+11]; \
2990 volatile unsigned long _res; \
2991 /* _argvec[0] holds current r2 across the call */ \
2992 _argvec[1] = (unsigned long)_orig.r2; \
2993 _argvec[2] = (unsigned long)_orig.nraddr; \
2994 _argvec[2+1] = (unsigned long)arg1; \
2995 _argvec[2+2] = (unsigned long)arg2; \
2996 _argvec[2+3] = (unsigned long)arg3; \
2997 _argvec[2+4] = (unsigned long)arg4; \
2998 _argvec[2+5] = (unsigned long)arg5; \
2999 _argvec[2+6] = (unsigned long)arg6; \
3000 _argvec[2+7] = (unsigned long)arg7; \
3001 _argvec[2+8] = (unsigned long)arg8; \
3002 _argvec[2+9] = (unsigned long)arg9; \
3003 _argvec[2+10] = (unsigned long)arg10; \
3004 _argvec[2+11] = (unsigned long)arg11; \
3005 __asm__ volatile( \
3006 VALGRIND_ALIGN_STACK \
3007 "mr 11,%1\n\t" \
3008 "std 2,-16(11)\n\t" /* save tocptr */ \
3009 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3010 "addi 1,1,-144\n\t" /* expand stack frame */ \
3011 /* arg11 */ \
3012 "ld 3,88(11)\n\t" \
3013 "std 3,128(1)\n\t" \
3014 /* arg10 */ \
3015 "ld 3,80(11)\n\t" \
3016 "std 3,120(1)\n\t" \
3017 /* arg9 */ \
3018 "ld 3,72(11)\n\t" \
3019 "std 3,112(1)\n\t" \
3020 /* args1-8 */ \
3021 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3022 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3023 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3024 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3025 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3026 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3027 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3028 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3029 "ld 11, 0(11)\n\t" /* target->r11 */ \
3030 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3031 "mr 11,%1\n\t" \
3032 "mr %0,3\n\t" \
3033 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3034 VALGRIND_RESTORE_STACK \
3035 : /*out*/ "=r" (_res) \
3036 : /*in*/ "r" (&_argvec[2]) \
3037 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3038 ); \
3039 lval = (__typeof__(lval)) _res; \
3040 } while (0)
3042 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3043 arg7,arg8,arg9,arg10,arg11,arg12) \
3044 do { \
3045 volatile OrigFn _orig = (orig); \
3046 volatile unsigned long _argvec[3+12]; \
3047 volatile unsigned long _res; \
3048 /* _argvec[0] holds current r2 across the call */ \
3049 _argvec[1] = (unsigned long)_orig.r2; \
3050 _argvec[2] = (unsigned long)_orig.nraddr; \
3051 _argvec[2+1] = (unsigned long)arg1; \
3052 _argvec[2+2] = (unsigned long)arg2; \
3053 _argvec[2+3] = (unsigned long)arg3; \
3054 _argvec[2+4] = (unsigned long)arg4; \
3055 _argvec[2+5] = (unsigned long)arg5; \
3056 _argvec[2+6] = (unsigned long)arg6; \
3057 _argvec[2+7] = (unsigned long)arg7; \
3058 _argvec[2+8] = (unsigned long)arg8; \
3059 _argvec[2+9] = (unsigned long)arg9; \
3060 _argvec[2+10] = (unsigned long)arg10; \
3061 _argvec[2+11] = (unsigned long)arg11; \
3062 _argvec[2+12] = (unsigned long)arg12; \
3063 __asm__ volatile( \
3064 VALGRIND_ALIGN_STACK \
3065 "mr 11,%1\n\t" \
3066 "std 2,-16(11)\n\t" /* save tocptr */ \
3067 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3068 "addi 1,1,-144\n\t" /* expand stack frame */ \
3069 /* arg12 */ \
3070 "ld 3,96(11)\n\t" \
3071 "std 3,136(1)\n\t" \
3072 /* arg11 */ \
3073 "ld 3,88(11)\n\t" \
3074 "std 3,128(1)\n\t" \
3075 /* arg10 */ \
3076 "ld 3,80(11)\n\t" \
3077 "std 3,120(1)\n\t" \
3078 /* arg9 */ \
3079 "ld 3,72(11)\n\t" \
3080 "std 3,112(1)\n\t" \
3081 /* args1-8 */ \
3082 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3083 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3084 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3085 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3086 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3087 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3088 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3089 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3090 "ld 11, 0(11)\n\t" /* target->r11 */ \
3091 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3092 "mr 11,%1\n\t" \
3093 "mr %0,3\n\t" \
3094 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3095 VALGRIND_RESTORE_STACK \
3096 : /*out*/ "=r" (_res) \
3097 : /*in*/ "r" (&_argvec[2]) \
3098 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3099 ); \
3100 lval = (__typeof__(lval)) _res; \
3101 } while (0)
3103 #endif /* PLAT_ppc64_linux */
3105 /* ------------------------- arm-linux ------------------------- */
3107 #if defined(PLAT_arm_linux)
3109 /* These regs are trashed by the hidden call. */
3110 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4","r14"
3112 /* Macros to save and align the stack before making a function
3113 call and restore it afterwards as gcc may not keep the stack
3114 pointer aligned if it doesn't realise calls are being made
3115 to other functions. */
3117 /* This is a bit tricky. We store the original stack pointer in r10
3118 as it is callee-saves. gcc doesn't allow the use of r11 for some
3119 reason. Also, we can't directly "bic" the stack pointer in thumb
3120 mode since r13 isn't an allowed register number in that context.
3121 So use r4 as a temporary, since that is about to get trashed
3122 anyway, just after each use of this macro. Side effect is we need
3123 to be very careful about any future changes, since
3124 VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
3125 #define VALGRIND_ALIGN_STACK \
3126 "mov r10, sp\n\t" \
3127 "mov r4, sp\n\t" \
3128 "bic r4, r4, #7\n\t" \
3129 "mov sp, r4\n\t"
3130 #define VALGRIND_RESTORE_STACK \
3131 "mov sp, r10\n\t"
3133 /* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
3134 long) == 4. */
3136 #define CALL_FN_W_v(lval, orig) \
3137 do { \
3138 volatile OrigFn _orig = (orig); \
3139 volatile unsigned long _argvec[1]; \
3140 volatile unsigned long _res; \
3141 _argvec[0] = (unsigned long)_orig.nraddr; \
3142 __asm__ volatile( \
3143 VALGRIND_ALIGN_STACK \
3144 "ldr r4, [%1] \n\t" /* target->r4 */ \
3145 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3146 VALGRIND_RESTORE_STACK \
3147 "mov %0, r0\n" \
3148 : /*out*/ "=r" (_res) \
3149 : /*in*/ "0" (&_argvec[0]) \
3150 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3151 ); \
3152 lval = (__typeof__(lval)) _res; \
3153 } while (0)
3155 #define CALL_FN_W_W(lval, orig, arg1) \
3156 do { \
3157 volatile OrigFn _orig = (orig); \
3158 volatile unsigned long _argvec[2]; \
3159 volatile unsigned long _res; \
3160 _argvec[0] = (unsigned long)_orig.nraddr; \
3161 _argvec[1] = (unsigned long)(arg1); \
3162 __asm__ volatile( \
3163 VALGRIND_ALIGN_STACK \
3164 "ldr r0, [%1, #4] \n\t" \
3165 "ldr r4, [%1] \n\t" /* target->r4 */ \
3166 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3167 VALGRIND_RESTORE_STACK \
3168 "mov %0, r0\n" \
3169 : /*out*/ "=r" (_res) \
3170 : /*in*/ "0" (&_argvec[0]) \
3171 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3172 ); \
3173 lval = (__typeof__(lval)) _res; \
3174 } while (0)
3176 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3177 do { \
3178 volatile OrigFn _orig = (orig); \
3179 volatile unsigned long _argvec[3]; \
3180 volatile unsigned long _res; \
3181 _argvec[0] = (unsigned long)_orig.nraddr; \
3182 _argvec[1] = (unsigned long)(arg1); \
3183 _argvec[2] = (unsigned long)(arg2); \
3184 __asm__ volatile( \
3185 VALGRIND_ALIGN_STACK \
3186 "ldr r0, [%1, #4] \n\t" \
3187 "ldr r1, [%1, #8] \n\t" \
3188 "ldr r4, [%1] \n\t" /* target->r4 */ \
3189 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3190 VALGRIND_RESTORE_STACK \
3191 "mov %0, r0\n" \
3192 : /*out*/ "=r" (_res) \
3193 : /*in*/ "0" (&_argvec[0]) \
3194 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3195 ); \
3196 lval = (__typeof__(lval)) _res; \
3197 } while (0)
3199 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3200 do { \
3201 volatile OrigFn _orig = (orig); \
3202 volatile unsigned long _argvec[4]; \
3203 volatile unsigned long _res; \
3204 _argvec[0] = (unsigned long)_orig.nraddr; \
3205 _argvec[1] = (unsigned long)(arg1); \
3206 _argvec[2] = (unsigned long)(arg2); \
3207 _argvec[3] = (unsigned long)(arg3); \
3208 __asm__ volatile( \
3209 VALGRIND_ALIGN_STACK \
3210 "ldr r0, [%1, #4] \n\t" \
3211 "ldr r1, [%1, #8] \n\t" \
3212 "ldr r2, [%1, #12] \n\t" \
3213 "ldr r4, [%1] \n\t" /* target->r4 */ \
3214 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3215 VALGRIND_RESTORE_STACK \
3216 "mov %0, r0\n" \
3217 : /*out*/ "=r" (_res) \
3218 : /*in*/ "0" (&_argvec[0]) \
3219 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3220 ); \
3221 lval = (__typeof__(lval)) _res; \
3222 } while (0)
3224 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3225 do { \
3226 volatile OrigFn _orig = (orig); \
3227 volatile unsigned long _argvec[5]; \
3228 volatile unsigned long _res; \
3229 _argvec[0] = (unsigned long)_orig.nraddr; \
3230 _argvec[1] = (unsigned long)(arg1); \
3231 _argvec[2] = (unsigned long)(arg2); \
3232 _argvec[3] = (unsigned long)(arg3); \
3233 _argvec[4] = (unsigned long)(arg4); \
3234 __asm__ volatile( \
3235 VALGRIND_ALIGN_STACK \
3236 "ldr r0, [%1, #4] \n\t" \
3237 "ldr r1, [%1, #8] \n\t" \
3238 "ldr r2, [%1, #12] \n\t" \
3239 "ldr r3, [%1, #16] \n\t" \
3240 "ldr r4, [%1] \n\t" /* target->r4 */ \
3241 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3242 VALGRIND_RESTORE_STACK \
3243 "mov %0, r0" \
3244 : /*out*/ "=r" (_res) \
3245 : /*in*/ "0" (&_argvec[0]) \
3246 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3247 ); \
3248 lval = (__typeof__(lval)) _res; \
3249 } while (0)
3251 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3252 do { \
3253 volatile OrigFn _orig = (orig); \
3254 volatile unsigned long _argvec[6]; \
3255 volatile unsigned long _res; \
3256 _argvec[0] = (unsigned long)_orig.nraddr; \
3257 _argvec[1] = (unsigned long)(arg1); \
3258 _argvec[2] = (unsigned long)(arg2); \
3259 _argvec[3] = (unsigned long)(arg3); \
3260 _argvec[4] = (unsigned long)(arg4); \
3261 _argvec[5] = (unsigned long)(arg5); \
3262 __asm__ volatile( \
3263 VALGRIND_ALIGN_STACK \
3264 "sub sp, sp, #4 \n\t" \
3265 "ldr r0, [%1, #20] \n\t" \
3266 "push {r0} \n\t" \
3267 "ldr r0, [%1, #4] \n\t" \
3268 "ldr r1, [%1, #8] \n\t" \
3269 "ldr r2, [%1, #12] \n\t" \
3270 "ldr r3, [%1, #16] \n\t" \
3271 "ldr r4, [%1] \n\t" /* target->r4 */ \
3272 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3273 VALGRIND_RESTORE_STACK \
3274 "mov %0, r0" \
3275 : /*out*/ "=r" (_res) \
3276 : /*in*/ "0" (&_argvec[0]) \
3277 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3278 ); \
3279 lval = (__typeof__(lval)) _res; \
3280 } while (0)
3282 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3283 do { \
3284 volatile OrigFn _orig = (orig); \
3285 volatile unsigned long _argvec[7]; \
3286 volatile unsigned long _res; \
3287 _argvec[0] = (unsigned long)_orig.nraddr; \
3288 _argvec[1] = (unsigned long)(arg1); \
3289 _argvec[2] = (unsigned long)(arg2); \
3290 _argvec[3] = (unsigned long)(arg3); \
3291 _argvec[4] = (unsigned long)(arg4); \
3292 _argvec[5] = (unsigned long)(arg5); \
3293 _argvec[6] = (unsigned long)(arg6); \
3294 __asm__ volatile( \
3295 VALGRIND_ALIGN_STACK \
3296 "ldr r0, [%1, #20] \n\t" \
3297 "ldr r1, [%1, #24] \n\t" \
3298 "push {r0, r1} \n\t" \
3299 "ldr r0, [%1, #4] \n\t" \
3300 "ldr r1, [%1, #8] \n\t" \
3301 "ldr r2, [%1, #12] \n\t" \
3302 "ldr r3, [%1, #16] \n\t" \
3303 "ldr r4, [%1] \n\t" /* target->r4 */ \
3304 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3305 VALGRIND_RESTORE_STACK \
3306 "mov %0, r0" \
3307 : /*out*/ "=r" (_res) \
3308 : /*in*/ "0" (&_argvec[0]) \
3309 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3310 ); \
3311 lval = (__typeof__(lval)) _res; \
3312 } while (0)
3314 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3315 arg7) \
3316 do { \
3317 volatile OrigFn _orig = (orig); \
3318 volatile unsigned long _argvec[8]; \
3319 volatile unsigned long _res; \
3320 _argvec[0] = (unsigned long)_orig.nraddr; \
3321 _argvec[1] = (unsigned long)(arg1); \
3322 _argvec[2] = (unsigned long)(arg2); \
3323 _argvec[3] = (unsigned long)(arg3); \
3324 _argvec[4] = (unsigned long)(arg4); \
3325 _argvec[5] = (unsigned long)(arg5); \
3326 _argvec[6] = (unsigned long)(arg6); \
3327 _argvec[7] = (unsigned long)(arg7); \
3328 __asm__ volatile( \
3329 VALGRIND_ALIGN_STACK \
3330 "sub sp, sp, #4 \n\t" \
3331 "ldr r0, [%1, #20] \n\t" \
3332 "ldr r1, [%1, #24] \n\t" \
3333 "ldr r2, [%1, #28] \n\t" \
3334 "push {r0, r1, r2} \n\t" \
3335 "ldr r0, [%1, #4] \n\t" \
3336 "ldr r1, [%1, #8] \n\t" \
3337 "ldr r2, [%1, #12] \n\t" \
3338 "ldr r3, [%1, #16] \n\t" \
3339 "ldr r4, [%1] \n\t" /* target->r4 */ \
3340 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3341 VALGRIND_RESTORE_STACK \
3342 "mov %0, r0" \
3343 : /*out*/ "=r" (_res) \
3344 : /*in*/ "0" (&_argvec[0]) \
3345 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3346 ); \
3347 lval = (__typeof__(lval)) _res; \
3348 } while (0)
3350 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3351 arg7,arg8) \
3352 do { \
3353 volatile OrigFn _orig = (orig); \
3354 volatile unsigned long _argvec[9]; \
3355 volatile unsigned long _res; \
3356 _argvec[0] = (unsigned long)_orig.nraddr; \
3357 _argvec[1] = (unsigned long)(arg1); \
3358 _argvec[2] = (unsigned long)(arg2); \
3359 _argvec[3] = (unsigned long)(arg3); \
3360 _argvec[4] = (unsigned long)(arg4); \
3361 _argvec[5] = (unsigned long)(arg5); \
3362 _argvec[6] = (unsigned long)(arg6); \
3363 _argvec[7] = (unsigned long)(arg7); \
3364 _argvec[8] = (unsigned long)(arg8); \
3365 __asm__ volatile( \
3366 VALGRIND_ALIGN_STACK \
3367 "ldr r0, [%1, #20] \n\t" \
3368 "ldr r1, [%1, #24] \n\t" \
3369 "ldr r2, [%1, #28] \n\t" \
3370 "ldr r3, [%1, #32] \n\t" \
3371 "push {r0, r1, r2, r3} \n\t" \
3372 "ldr r0, [%1, #4] \n\t" \
3373 "ldr r1, [%1, #8] \n\t" \
3374 "ldr r2, [%1, #12] \n\t" \
3375 "ldr r3, [%1, #16] \n\t" \
3376 "ldr r4, [%1] \n\t" /* target->r4 */ \
3377 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3378 VALGRIND_RESTORE_STACK \
3379 "mov %0, r0" \
3380 : /*out*/ "=r" (_res) \
3381 : /*in*/ "0" (&_argvec[0]) \
3382 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3383 ); \
3384 lval = (__typeof__(lval)) _res; \
3385 } while (0)
3387 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3388 arg7,arg8,arg9) \
3389 do { \
3390 volatile OrigFn _orig = (orig); \
3391 volatile unsigned long _argvec[10]; \
3392 volatile unsigned long _res; \
3393 _argvec[0] = (unsigned long)_orig.nraddr; \
3394 _argvec[1] = (unsigned long)(arg1); \
3395 _argvec[2] = (unsigned long)(arg2); \
3396 _argvec[3] = (unsigned long)(arg3); \
3397 _argvec[4] = (unsigned long)(arg4); \
3398 _argvec[5] = (unsigned long)(arg5); \
3399 _argvec[6] = (unsigned long)(arg6); \
3400 _argvec[7] = (unsigned long)(arg7); \
3401 _argvec[8] = (unsigned long)(arg8); \
3402 _argvec[9] = (unsigned long)(arg9); \
3403 __asm__ volatile( \
3404 VALGRIND_ALIGN_STACK \
3405 "sub sp, sp, #4 \n\t" \
3406 "ldr r0, [%1, #20] \n\t" \
3407 "ldr r1, [%1, #24] \n\t" \
3408 "ldr r2, [%1, #28] \n\t" \
3409 "ldr r3, [%1, #32] \n\t" \
3410 "ldr r4, [%1, #36] \n\t" \
3411 "push {r0, r1, r2, r3, r4} \n\t" \
3412 "ldr r0, [%1, #4] \n\t" \
3413 "ldr r1, [%1, #8] \n\t" \
3414 "ldr r2, [%1, #12] \n\t" \
3415 "ldr r3, [%1, #16] \n\t" \
3416 "ldr r4, [%1] \n\t" /* target->r4 */ \
3417 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3418 VALGRIND_RESTORE_STACK \
3419 "mov %0, r0" \
3420 : /*out*/ "=r" (_res) \
3421 : /*in*/ "0" (&_argvec[0]) \
3422 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3423 ); \
3424 lval = (__typeof__(lval)) _res; \
3425 } while (0)
3427 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3428 arg7,arg8,arg9,arg10) \
3429 do { \
3430 volatile OrigFn _orig = (orig); \
3431 volatile unsigned long _argvec[11]; \
3432 volatile unsigned long _res; \
3433 _argvec[0] = (unsigned long)_orig.nraddr; \
3434 _argvec[1] = (unsigned long)(arg1); \
3435 _argvec[2] = (unsigned long)(arg2); \
3436 _argvec[3] = (unsigned long)(arg3); \
3437 _argvec[4] = (unsigned long)(arg4); \
3438 _argvec[5] = (unsigned long)(arg5); \
3439 _argvec[6] = (unsigned long)(arg6); \
3440 _argvec[7] = (unsigned long)(arg7); \
3441 _argvec[8] = (unsigned long)(arg8); \
3442 _argvec[9] = (unsigned long)(arg9); \
3443 _argvec[10] = (unsigned long)(arg10); \
3444 __asm__ volatile( \
3445 VALGRIND_ALIGN_STACK \
3446 "ldr r0, [%1, #40] \n\t" \
3447 "push {r0} \n\t" \
3448 "ldr r0, [%1, #20] \n\t" \
3449 "ldr r1, [%1, #24] \n\t" \
3450 "ldr r2, [%1, #28] \n\t" \
3451 "ldr r3, [%1, #32] \n\t" \
3452 "ldr r4, [%1, #36] \n\t" \
3453 "push {r0, r1, r2, r3, r4} \n\t" \
3454 "ldr r0, [%1, #4] \n\t" \
3455 "ldr r1, [%1, #8] \n\t" \
3456 "ldr r2, [%1, #12] \n\t" \
3457 "ldr r3, [%1, #16] \n\t" \
3458 "ldr r4, [%1] \n\t" /* target->r4 */ \
3459 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3460 VALGRIND_RESTORE_STACK \
3461 "mov %0, r0" \
3462 : /*out*/ "=r" (_res) \
3463 : /*in*/ "0" (&_argvec[0]) \
3464 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3465 ); \
3466 lval = (__typeof__(lval)) _res; \
3467 } while (0)
3469 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
3470 arg6,arg7,arg8,arg9,arg10, \
3471 arg11) \
3472 do { \
3473 volatile OrigFn _orig = (orig); \
3474 volatile unsigned long _argvec[12]; \
3475 volatile unsigned long _res; \
3476 _argvec[0] = (unsigned long)_orig.nraddr; \
3477 _argvec[1] = (unsigned long)(arg1); \
3478 _argvec[2] = (unsigned long)(arg2); \
3479 _argvec[3] = (unsigned long)(arg3); \
3480 _argvec[4] = (unsigned long)(arg4); \
3481 _argvec[5] = (unsigned long)(arg5); \
3482 _argvec[6] = (unsigned long)(arg6); \
3483 _argvec[7] = (unsigned long)(arg7); \
3484 _argvec[8] = (unsigned long)(arg8); \
3485 _argvec[9] = (unsigned long)(arg9); \
3486 _argvec[10] = (unsigned long)(arg10); \
3487 _argvec[11] = (unsigned long)(arg11); \
3488 __asm__ volatile( \
3489 VALGRIND_ALIGN_STACK \
3490 "sub sp, sp, #4 \n\t" \
3491 "ldr r0, [%1, #40] \n\t" \
3492 "ldr r1, [%1, #44] \n\t" \
3493 "push {r0, r1} \n\t" \
3494 "ldr r0, [%1, #20] \n\t" \
3495 "ldr r1, [%1, #24] \n\t" \
3496 "ldr r2, [%1, #28] \n\t" \
3497 "ldr r3, [%1, #32] \n\t" \
3498 "ldr r4, [%1, #36] \n\t" \
3499 "push {r0, r1, r2, r3, r4} \n\t" \
3500 "ldr r0, [%1, #4] \n\t" \
3501 "ldr r1, [%1, #8] \n\t" \
3502 "ldr r2, [%1, #12] \n\t" \
3503 "ldr r3, [%1, #16] \n\t" \
3504 "ldr r4, [%1] \n\t" /* target->r4 */ \
3505 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3506 VALGRIND_RESTORE_STACK \
3507 "mov %0, r0" \
3508 : /*out*/ "=r" (_res) \
3509 : /*in*/ "0" (&_argvec[0]) \
3510 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3511 ); \
3512 lval = (__typeof__(lval)) _res; \
3513 } while (0)
3515 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
3516 arg6,arg7,arg8,arg9,arg10, \
3517 arg11,arg12) \
3518 do { \
3519 volatile OrigFn _orig = (orig); \
3520 volatile unsigned long _argvec[13]; \
3521 volatile unsigned long _res; \
3522 _argvec[0] = (unsigned long)_orig.nraddr; \
3523 _argvec[1] = (unsigned long)(arg1); \
3524 _argvec[2] = (unsigned long)(arg2); \
3525 _argvec[3] = (unsigned long)(arg3); \
3526 _argvec[4] = (unsigned long)(arg4); \
3527 _argvec[5] = (unsigned long)(arg5); \
3528 _argvec[6] = (unsigned long)(arg6); \
3529 _argvec[7] = (unsigned long)(arg7); \
3530 _argvec[8] = (unsigned long)(arg8); \
3531 _argvec[9] = (unsigned long)(arg9); \
3532 _argvec[10] = (unsigned long)(arg10); \
3533 _argvec[11] = (unsigned long)(arg11); \
3534 _argvec[12] = (unsigned long)(arg12); \
3535 __asm__ volatile( \
3536 VALGRIND_ALIGN_STACK \
3537 "ldr r0, [%1, #40] \n\t" \
3538 "ldr r1, [%1, #44] \n\t" \
3539 "ldr r2, [%1, #48] \n\t" \
3540 "push {r0, r1, r2} \n\t" \
3541 "ldr r0, [%1, #20] \n\t" \
3542 "ldr r1, [%1, #24] \n\t" \
3543 "ldr r2, [%1, #28] \n\t" \
3544 "ldr r3, [%1, #32] \n\t" \
3545 "ldr r4, [%1, #36] \n\t" \
3546 "push {r0, r1, r2, r3, r4} \n\t" \
3547 "ldr r0, [%1, #4] \n\t" \
3548 "ldr r1, [%1, #8] \n\t" \
3549 "ldr r2, [%1, #12] \n\t" \
3550 "ldr r3, [%1, #16] \n\t" \
3551 "ldr r4, [%1] \n\t" /* target->r4 */ \
3552 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3553 VALGRIND_RESTORE_STACK \
3554 "mov %0, r0" \
3555 : /*out*/ "=r" (_res) \
3556 : /*in*/ "0" (&_argvec[0]) \
3557 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3558 ); \
3559 lval = (__typeof__(lval)) _res; \
3560 } while (0)
3562 #endif /* PLAT_arm_linux */
3564 /* ------------------------ arm64-linux ------------------------ */
3566 #if defined(PLAT_arm64_linux)
3568 /* These regs are trashed by the hidden call. */
3569 #define __CALLER_SAVED_REGS \
3570 "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
3571 "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
3572 "x18", "x19", "x20", "x30", \
3573 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
3574 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
3575 "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
3576 "v26", "v27", "v28", "v29", "v30", "v31"
3578 /* x21 is callee-saved, so we can use it to save and restore SP around
3579 the hidden call. */
3580 #define VALGRIND_ALIGN_STACK \
3581 "mov x21, sp\n\t" \
3582 "bic sp, x21, #15\n\t"
3583 #define VALGRIND_RESTORE_STACK \
3584 "mov sp, x21\n\t"
3586 /* These CALL_FN_ macros assume that on arm64-linux,
3587 sizeof(unsigned long) == 8. */
3589 #define CALL_FN_W_v(lval, orig) \
3590 do { \
3591 volatile OrigFn _orig = (orig); \
3592 volatile unsigned long _argvec[1]; \
3593 volatile unsigned long _res; \
3594 _argvec[0] = (unsigned long)_orig.nraddr; \
3595 __asm__ volatile( \
3596 VALGRIND_ALIGN_STACK \
3597 "ldr x8, [%1] \n\t" /* target->x8 */ \
3598 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3599 VALGRIND_RESTORE_STACK \
3600 "mov %0, x0\n" \
3601 : /*out*/ "=r" (_res) \
3602 : /*in*/ "0" (&_argvec[0]) \
3603 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3604 ); \
3605 lval = (__typeof__(lval)) _res; \
3606 } while (0)
3608 #define CALL_FN_W_W(lval, orig, arg1) \
3609 do { \
3610 volatile OrigFn _orig = (orig); \
3611 volatile unsigned long _argvec[2]; \
3612 volatile unsigned long _res; \
3613 _argvec[0] = (unsigned long)_orig.nraddr; \
3614 _argvec[1] = (unsigned long)(arg1); \
3615 __asm__ volatile( \
3616 VALGRIND_ALIGN_STACK \
3617 "ldr x0, [%1, #8] \n\t" \
3618 "ldr x8, [%1] \n\t" /* target->x8 */ \
3619 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3620 VALGRIND_RESTORE_STACK \
3621 "mov %0, x0\n" \
3622 : /*out*/ "=r" (_res) \
3623 : /*in*/ "0" (&_argvec[0]) \
3624 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3625 ); \
3626 lval = (__typeof__(lval)) _res; \
3627 } while (0)
3629 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3630 do { \
3631 volatile OrigFn _orig = (orig); \
3632 volatile unsigned long _argvec[3]; \
3633 volatile unsigned long _res; \
3634 _argvec[0] = (unsigned long)_orig.nraddr; \
3635 _argvec[1] = (unsigned long)(arg1); \
3636 _argvec[2] = (unsigned long)(arg2); \
3637 __asm__ volatile( \
3638 VALGRIND_ALIGN_STACK \
3639 "ldr x0, [%1, #8] \n\t" \
3640 "ldr x1, [%1, #16] \n\t" \
3641 "ldr x8, [%1] \n\t" /* target->x8 */ \
3642 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3643 VALGRIND_RESTORE_STACK \
3644 "mov %0, x0\n" \
3645 : /*out*/ "=r" (_res) \
3646 : /*in*/ "0" (&_argvec[0]) \
3647 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3648 ); \
3649 lval = (__typeof__(lval)) _res; \
3650 } while (0)
3652 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3653 do { \
3654 volatile OrigFn _orig = (orig); \
3655 volatile unsigned long _argvec[4]; \
3656 volatile unsigned long _res; \
3657 _argvec[0] = (unsigned long)_orig.nraddr; \
3658 _argvec[1] = (unsigned long)(arg1); \
3659 _argvec[2] = (unsigned long)(arg2); \
3660 _argvec[3] = (unsigned long)(arg3); \
3661 __asm__ volatile( \
3662 VALGRIND_ALIGN_STACK \
3663 "ldr x0, [%1, #8] \n\t" \
3664 "ldr x1, [%1, #16] \n\t" \
3665 "ldr x2, [%1, #24] \n\t" \
3666 "ldr x8, [%1] \n\t" /* target->x8 */ \
3667 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3668 VALGRIND_RESTORE_STACK \
3669 "mov %0, x0\n" \
3670 : /*out*/ "=r" (_res) \
3671 : /*in*/ "0" (&_argvec[0]) \
3672 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3673 ); \
3674 lval = (__typeof__(lval)) _res; \
3675 } while (0)
3677 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3678 do { \
3679 volatile OrigFn _orig = (orig); \
3680 volatile unsigned long _argvec[5]; \
3681 volatile unsigned long _res; \
3682 _argvec[0] = (unsigned long)_orig.nraddr; \
3683 _argvec[1] = (unsigned long)(arg1); \
3684 _argvec[2] = (unsigned long)(arg2); \
3685 _argvec[3] = (unsigned long)(arg3); \
3686 _argvec[4] = (unsigned long)(arg4); \
3687 __asm__ volatile( \
3688 VALGRIND_ALIGN_STACK \
3689 "ldr x0, [%1, #8] \n\t" \
3690 "ldr x1, [%1, #16] \n\t" \
3691 "ldr x2, [%1, #24] \n\t" \
3692 "ldr x3, [%1, #32] \n\t" \
3693 "ldr x8, [%1] \n\t" /* target->x8 */ \
3694 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3695 VALGRIND_RESTORE_STACK \
3696 "mov %0, x0" \
3697 : /*out*/ "=r" (_res) \
3698 : /*in*/ "0" (&_argvec[0]) \
3699 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3700 ); \
3701 lval = (__typeof__(lval)) _res; \
3702 } while (0)
3704 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3705 do { \
3706 volatile OrigFn _orig = (orig); \
3707 volatile unsigned long _argvec[6]; \
3708 volatile unsigned long _res; \
3709 _argvec[0] = (unsigned long)_orig.nraddr; \
3710 _argvec[1] = (unsigned long)(arg1); \
3711 _argvec[2] = (unsigned long)(arg2); \
3712 _argvec[3] = (unsigned long)(arg3); \
3713 _argvec[4] = (unsigned long)(arg4); \
3714 _argvec[5] = (unsigned long)(arg5); \
3715 __asm__ volatile( \
3716 VALGRIND_ALIGN_STACK \
3717 "ldr x0, [%1, #8] \n\t" \
3718 "ldr x1, [%1, #16] \n\t" \
3719 "ldr x2, [%1, #24] \n\t" \
3720 "ldr x3, [%1, #32] \n\t" \
3721 "ldr x4, [%1, #40] \n\t" \
3722 "ldr x8, [%1] \n\t" /* target->x8 */ \
3723 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3724 VALGRIND_RESTORE_STACK \
3725 "mov %0, x0" \
3726 : /*out*/ "=r" (_res) \
3727 : /*in*/ "0" (&_argvec[0]) \
3728 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3729 ); \
3730 lval = (__typeof__(lval)) _res; \
3731 } while (0)
3733 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3734 do { \
3735 volatile OrigFn _orig = (orig); \
3736 volatile unsigned long _argvec[7]; \
3737 volatile unsigned long _res; \
3738 _argvec[0] = (unsigned long)_orig.nraddr; \
3739 _argvec[1] = (unsigned long)(arg1); \
3740 _argvec[2] = (unsigned long)(arg2); \
3741 _argvec[3] = (unsigned long)(arg3); \
3742 _argvec[4] = (unsigned long)(arg4); \
3743 _argvec[5] = (unsigned long)(arg5); \
3744 _argvec[6] = (unsigned long)(arg6); \
3745 __asm__ volatile( \
3746 VALGRIND_ALIGN_STACK \
3747 "ldr x0, [%1, #8] \n\t" \
3748 "ldr x1, [%1, #16] \n\t" \
3749 "ldr x2, [%1, #24] \n\t" \
3750 "ldr x3, [%1, #32] \n\t" \
3751 "ldr x4, [%1, #40] \n\t" \
3752 "ldr x5, [%1, #48] \n\t" \
3753 "ldr x8, [%1] \n\t" /* target->x8 */ \
3754 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3755 VALGRIND_RESTORE_STACK \
3756 "mov %0, x0" \
3757 : /*out*/ "=r" (_res) \
3758 : /*in*/ "0" (&_argvec[0]) \
3759 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3760 ); \
3761 lval = (__typeof__(lval)) _res; \
3762 } while (0)
3764 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3765 arg7) \
3766 do { \
3767 volatile OrigFn _orig = (orig); \
3768 volatile unsigned long _argvec[8]; \
3769 volatile unsigned long _res; \
3770 _argvec[0] = (unsigned long)_orig.nraddr; \
3771 _argvec[1] = (unsigned long)(arg1); \
3772 _argvec[2] = (unsigned long)(arg2); \
3773 _argvec[3] = (unsigned long)(arg3); \
3774 _argvec[4] = (unsigned long)(arg4); \
3775 _argvec[5] = (unsigned long)(arg5); \
3776 _argvec[6] = (unsigned long)(arg6); \
3777 _argvec[7] = (unsigned long)(arg7); \
3778 __asm__ volatile( \
3779 VALGRIND_ALIGN_STACK \
3780 "ldr x0, [%1, #8] \n\t" \
3781 "ldr x1, [%1, #16] \n\t" \
3782 "ldr x2, [%1, #24] \n\t" \
3783 "ldr x3, [%1, #32] \n\t" \
3784 "ldr x4, [%1, #40] \n\t" \
3785 "ldr x5, [%1, #48] \n\t" \
3786 "ldr x6, [%1, #56] \n\t" \
3787 "ldr x8, [%1] \n\t" /* target->x8 */ \
3788 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3789 VALGRIND_RESTORE_STACK \
3790 "mov %0, x0" \
3791 : /*out*/ "=r" (_res) \
3792 : /*in*/ "0" (&_argvec[0]) \
3793 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3794 ); \
3795 lval = (__typeof__(lval)) _res; \
3796 } while (0)
3798 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3799 arg7,arg8) \
3800 do { \
3801 volatile OrigFn _orig = (orig); \
3802 volatile unsigned long _argvec[9]; \
3803 volatile unsigned long _res; \
3804 _argvec[0] = (unsigned long)_orig.nraddr; \
3805 _argvec[1] = (unsigned long)(arg1); \
3806 _argvec[2] = (unsigned long)(arg2); \
3807 _argvec[3] = (unsigned long)(arg3); \
3808 _argvec[4] = (unsigned long)(arg4); \
3809 _argvec[5] = (unsigned long)(arg5); \
3810 _argvec[6] = (unsigned long)(arg6); \
3811 _argvec[7] = (unsigned long)(arg7); \
3812 _argvec[8] = (unsigned long)(arg8); \
3813 __asm__ volatile( \
3814 VALGRIND_ALIGN_STACK \
3815 "ldr x0, [%1, #8] \n\t" \
3816 "ldr x1, [%1, #16] \n\t" \
3817 "ldr x2, [%1, #24] \n\t" \
3818 "ldr x3, [%1, #32] \n\t" \
3819 "ldr x4, [%1, #40] \n\t" \
3820 "ldr x5, [%1, #48] \n\t" \
3821 "ldr x6, [%1, #56] \n\t" \
3822 "ldr x7, [%1, #64] \n\t" \
3823 "ldr x8, [%1] \n\t" /* target->x8 */ \
3824 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3825 VALGRIND_RESTORE_STACK \
3826 "mov %0, x0" \
3827 : /*out*/ "=r" (_res) \
3828 : /*in*/ "0" (&_argvec[0]) \
3829 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3830 ); \
3831 lval = (__typeof__(lval)) _res; \
3832 } while (0)
3834 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3835 arg7,arg8,arg9) \
3836 do { \
3837 volatile OrigFn _orig = (orig); \
3838 volatile unsigned long _argvec[10]; \
3839 volatile unsigned long _res; \
3840 _argvec[0] = (unsigned long)_orig.nraddr; \
3841 _argvec[1] = (unsigned long)(arg1); \
3842 _argvec[2] = (unsigned long)(arg2); \
3843 _argvec[3] = (unsigned long)(arg3); \
3844 _argvec[4] = (unsigned long)(arg4); \
3845 _argvec[5] = (unsigned long)(arg5); \
3846 _argvec[6] = (unsigned long)(arg6); \
3847 _argvec[7] = (unsigned long)(arg7); \
3848 _argvec[8] = (unsigned long)(arg8); \
3849 _argvec[9] = (unsigned long)(arg9); \
3850 __asm__ volatile( \
3851 VALGRIND_ALIGN_STACK \
3852 "sub sp, sp, #0x20 \n\t" \
3853 "ldr x0, [%1, #8] \n\t" \
3854 "ldr x1, [%1, #16] \n\t" \
3855 "ldr x2, [%1, #24] \n\t" \
3856 "ldr x3, [%1, #32] \n\t" \
3857 "ldr x4, [%1, #40] \n\t" \
3858 "ldr x5, [%1, #48] \n\t" \
3859 "ldr x6, [%1, #56] \n\t" \
3860 "ldr x7, [%1, #64] \n\t" \
3861 "ldr x8, [%1, #72] \n\t" \
3862 "str x8, [sp, #0] \n\t" \
3863 "ldr x8, [%1] \n\t" /* target->x8 */ \
3864 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3865 VALGRIND_RESTORE_STACK \
3866 "mov %0, x0" \
3867 : /*out*/ "=r" (_res) \
3868 : /*in*/ "0" (&_argvec[0]) \
3869 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3870 ); \
3871 lval = (__typeof__(lval)) _res; \
3872 } while (0)
3874 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3875 arg7,arg8,arg9,arg10) \
3876 do { \
3877 volatile OrigFn _orig = (orig); \
3878 volatile unsigned long _argvec[11]; \
3879 volatile unsigned long _res; \
3880 _argvec[0] = (unsigned long)_orig.nraddr; \
3881 _argvec[1] = (unsigned long)(arg1); \
3882 _argvec[2] = (unsigned long)(arg2); \
3883 _argvec[3] = (unsigned long)(arg3); \
3884 _argvec[4] = (unsigned long)(arg4); \
3885 _argvec[5] = (unsigned long)(arg5); \
3886 _argvec[6] = (unsigned long)(arg6); \
3887 _argvec[7] = (unsigned long)(arg7); \
3888 _argvec[8] = (unsigned long)(arg8); \
3889 _argvec[9] = (unsigned long)(arg9); \
3890 _argvec[10] = (unsigned long)(arg10); \
3891 __asm__ volatile( \
3892 VALGRIND_ALIGN_STACK \
3893 "sub sp, sp, #0x20 \n\t" \
3894 "ldr x0, [%1, #8] \n\t" \
3895 "ldr x1, [%1, #16] \n\t" \
3896 "ldr x2, [%1, #24] \n\t" \
3897 "ldr x3, [%1, #32] \n\t" \
3898 "ldr x4, [%1, #40] \n\t" \
3899 "ldr x5, [%1, #48] \n\t" \
3900 "ldr x6, [%1, #56] \n\t" \
3901 "ldr x7, [%1, #64] \n\t" \
3902 "ldr x8, [%1, #72] \n\t" \
3903 "str x8, [sp, #0] \n\t" \
3904 "ldr x8, [%1, #80] \n\t" \
3905 "str x8, [sp, #8] \n\t" \
3906 "ldr x8, [%1] \n\t" /* target->x8 */ \
3907 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3908 VALGRIND_RESTORE_STACK \
3909 "mov %0, x0" \
3910 : /*out*/ "=r" (_res) \
3911 : /*in*/ "0" (&_argvec[0]) \
3912 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3913 ); \
3914 lval = (__typeof__(lval)) _res; \
3915 } while (0)
3917 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3918 arg7,arg8,arg9,arg10,arg11) \
3919 do { \
3920 volatile OrigFn _orig = (orig); \
3921 volatile unsigned long _argvec[12]; \
3922 volatile unsigned long _res; \
3923 _argvec[0] = (unsigned long)_orig.nraddr; \
3924 _argvec[1] = (unsigned long)(arg1); \
3925 _argvec[2] = (unsigned long)(arg2); \
3926 _argvec[3] = (unsigned long)(arg3); \
3927 _argvec[4] = (unsigned long)(arg4); \
3928 _argvec[5] = (unsigned long)(arg5); \
3929 _argvec[6] = (unsigned long)(arg6); \
3930 _argvec[7] = (unsigned long)(arg7); \
3931 _argvec[8] = (unsigned long)(arg8); \
3932 _argvec[9] = (unsigned long)(arg9); \
3933 _argvec[10] = (unsigned long)(arg10); \
3934 _argvec[11] = (unsigned long)(arg11); \
3935 __asm__ volatile( \
3936 VALGRIND_ALIGN_STACK \
3937 "sub sp, sp, #0x30 \n\t" \
3938 "ldr x0, [%1, #8] \n\t" \
3939 "ldr x1, [%1, #16] \n\t" \
3940 "ldr x2, [%1, #24] \n\t" \
3941 "ldr x3, [%1, #32] \n\t" \
3942 "ldr x4, [%1, #40] \n\t" \
3943 "ldr x5, [%1, #48] \n\t" \
3944 "ldr x6, [%1, #56] \n\t" \
3945 "ldr x7, [%1, #64] \n\t" \
3946 "ldr x8, [%1, #72] \n\t" \
3947 "str x8, [sp, #0] \n\t" \
3948 "ldr x8, [%1, #80] \n\t" \
3949 "str x8, [sp, #8] \n\t" \
3950 "ldr x8, [%1, #88] \n\t" \
3951 "str x8, [sp, #16] \n\t" \
3952 "ldr x8, [%1] \n\t" /* target->x8 */ \
3953 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3954 VALGRIND_RESTORE_STACK \
3955 "mov %0, x0" \
3956 : /*out*/ "=r" (_res) \
3957 : /*in*/ "0" (&_argvec[0]) \
3958 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3959 ); \
3960 lval = (__typeof__(lval)) _res; \
3961 } while (0)
3963 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3964 arg7,arg8,arg9,arg10,arg11, \
3965 arg12) \
3966 do { \
3967 volatile OrigFn _orig = (orig); \
3968 volatile unsigned long _argvec[13]; \
3969 volatile unsigned long _res; \
3970 _argvec[0] = (unsigned long)_orig.nraddr; \
3971 _argvec[1] = (unsigned long)(arg1); \
3972 _argvec[2] = (unsigned long)(arg2); \
3973 _argvec[3] = (unsigned long)(arg3); \
3974 _argvec[4] = (unsigned long)(arg4); \
3975 _argvec[5] = (unsigned long)(arg5); \
3976 _argvec[6] = (unsigned long)(arg6); \
3977 _argvec[7] = (unsigned long)(arg7); \
3978 _argvec[8] = (unsigned long)(arg8); \
3979 _argvec[9] = (unsigned long)(arg9); \
3980 _argvec[10] = (unsigned long)(arg10); \
3981 _argvec[11] = (unsigned long)(arg11); \
3982 _argvec[12] = (unsigned long)(arg12); \
3983 __asm__ volatile( \
3984 VALGRIND_ALIGN_STACK \
3985 "sub sp, sp, #0x30 \n\t" \
3986 "ldr x0, [%1, #8] \n\t" \
3987 "ldr x1, [%1, #16] \n\t" \
3988 "ldr x2, [%1, #24] \n\t" \
3989 "ldr x3, [%1, #32] \n\t" \
3990 "ldr x4, [%1, #40] \n\t" \
3991 "ldr x5, [%1, #48] \n\t" \
3992 "ldr x6, [%1, #56] \n\t" \
3993 "ldr x7, [%1, #64] \n\t" \
3994 "ldr x8, [%1, #72] \n\t" \
3995 "str x8, [sp, #0] \n\t" \
3996 "ldr x8, [%1, #80] \n\t" \
3997 "str x8, [sp, #8] \n\t" \
3998 "ldr x8, [%1, #88] \n\t" \
3999 "str x8, [sp, #16] \n\t" \
4000 "ldr x8, [%1, #96] \n\t" \
4001 "str x8, [sp, #24] \n\t" \
4002 "ldr x8, [%1] \n\t" /* target->x8 */ \
4003 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4004 VALGRIND_RESTORE_STACK \
4005 "mov %0, x0" \
4006 : /*out*/ "=r" (_res) \
4007 : /*in*/ "0" (&_argvec[0]) \
4008 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4009 ); \
4010 lval = (__typeof__(lval)) _res; \
4011 } while (0)
4013 #endif /* PLAT_arm64_linux */
4015 /* ------------------------- s390x-linux ------------------------- */
4017 #if defined(PLAT_s390x_linux)
4019 /* Similar workaround as amd64 (see above), but we use r11 as frame
4020 pointer and save the old r11 in r7. r11 might be used for
4021 argvec, therefore we copy argvec in r1 since r1 is clobbered
4022 after the call anyway. */
4023 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
4024 # define __FRAME_POINTER \
4025 ,"d"(__builtin_dwarf_cfa())
4026 # define VALGRIND_CFI_PROLOGUE \
4027 ".cfi_remember_state\n\t" \
4028 "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
4029 "lgr 7,11\n\t" \
4030 "lgr 11,%2\n\t" \
4031 ".cfi_def_cfa r11, 0\n\t"
4032 # define VALGRIND_CFI_EPILOGUE \
4033 "lgr 11, 7\n\t" \
4034 ".cfi_restore_state\n\t"
4035 #else
4036 # define __FRAME_POINTER
4037 # define VALGRIND_CFI_PROLOGUE \
4038 "lgr 1,%1\n\t"
4039 # define VALGRIND_CFI_EPILOGUE
4040 #endif
4042 /* Nb: On s390 the stack pointer is properly aligned *at all times*
4043 according to the s390 GCC maintainer. (The ABI specification is not
4044 precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
4045 VALGRIND_RESTORE_STACK are not defined here. */
4047 /* These regs are trashed by the hidden call. Note that we overwrite
4048 r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
4049 function a proper return address. All others are ABI defined call
4050 clobbers. */
4051 #define __CALLER_SAVED_REGS "0","1","2","3","4","5","14", \
4052 "f0","f1","f2","f3","f4","f5","f6","f7"
4054 /* Nb: Although r11 is modified in the asm snippets below (inside
4055 VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
4056 two reasons:
4057 (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
4058 modified
4059 (2) GCC will complain that r11 cannot appear inside a clobber section,
4060 when compiled with -O -fno-omit-frame-pointer
4063 #define CALL_FN_W_v(lval, orig) \
4064 do { \
4065 volatile OrigFn _orig = (orig); \
4066 volatile unsigned long _argvec[1]; \
4067 volatile unsigned long _res; \
4068 _argvec[0] = (unsigned long)_orig.nraddr; \
4069 __asm__ volatile( \
4070 VALGRIND_CFI_PROLOGUE \
4071 "aghi 15,-160\n\t" \
4072 "lg 1, 0(1)\n\t" /* target->r1 */ \
4073 VALGRIND_CALL_NOREDIR_R1 \
4074 "lgr %0, 2\n\t" \
4075 "aghi 15,160\n\t" \
4076 VALGRIND_CFI_EPILOGUE \
4077 : /*out*/ "=d" (_res) \
4078 : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
4079 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4080 ); \
4081 lval = (__typeof__(lval)) _res; \
4082 } while (0)
4084 /* The call abi has the arguments in r2-r6 and stack */
4085 #define CALL_FN_W_W(lval, orig, arg1) \
4086 do { \
4087 volatile OrigFn _orig = (orig); \
4088 volatile unsigned long _argvec[2]; \
4089 volatile unsigned long _res; \
4090 _argvec[0] = (unsigned long)_orig.nraddr; \
4091 _argvec[1] = (unsigned long)arg1; \
4092 __asm__ volatile( \
4093 VALGRIND_CFI_PROLOGUE \
4094 "aghi 15,-160\n\t" \
4095 "lg 2, 8(1)\n\t" \
4096 "lg 1, 0(1)\n\t" \
4097 VALGRIND_CALL_NOREDIR_R1 \
4098 "lgr %0, 2\n\t" \
4099 "aghi 15,160\n\t" \
4100 VALGRIND_CFI_EPILOGUE \
4101 : /*out*/ "=d" (_res) \
4102 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4103 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4104 ); \
4105 lval = (__typeof__(lval)) _res; \
4106 } while (0)
4108 #define CALL_FN_W_WW(lval, orig, arg1, arg2) \
4109 do { \
4110 volatile OrigFn _orig = (orig); \
4111 volatile unsigned long _argvec[3]; \
4112 volatile unsigned long _res; \
4113 _argvec[0] = (unsigned long)_orig.nraddr; \
4114 _argvec[1] = (unsigned long)arg1; \
4115 _argvec[2] = (unsigned long)arg2; \
4116 __asm__ volatile( \
4117 VALGRIND_CFI_PROLOGUE \
4118 "aghi 15,-160\n\t" \
4119 "lg 2, 8(1)\n\t" \
4120 "lg 3,16(1)\n\t" \
4121 "lg 1, 0(1)\n\t" \
4122 VALGRIND_CALL_NOREDIR_R1 \
4123 "lgr %0, 2\n\t" \
4124 "aghi 15,160\n\t" \
4125 VALGRIND_CFI_EPILOGUE \
4126 : /*out*/ "=d" (_res) \
4127 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4128 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4129 ); \
4130 lval = (__typeof__(lval)) _res; \
4131 } while (0)
4133 #define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
4134 do { \
4135 volatile OrigFn _orig = (orig); \
4136 volatile unsigned long _argvec[4]; \
4137 volatile unsigned long _res; \
4138 _argvec[0] = (unsigned long)_orig.nraddr; \
4139 _argvec[1] = (unsigned long)arg1; \
4140 _argvec[2] = (unsigned long)arg2; \
4141 _argvec[3] = (unsigned long)arg3; \
4142 __asm__ volatile( \
4143 VALGRIND_CFI_PROLOGUE \
4144 "aghi 15,-160\n\t" \
4145 "lg 2, 8(1)\n\t" \
4146 "lg 3,16(1)\n\t" \
4147 "lg 4,24(1)\n\t" \
4148 "lg 1, 0(1)\n\t" \
4149 VALGRIND_CALL_NOREDIR_R1 \
4150 "lgr %0, 2\n\t" \
4151 "aghi 15,160\n\t" \
4152 VALGRIND_CFI_EPILOGUE \
4153 : /*out*/ "=d" (_res) \
4154 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4155 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4156 ); \
4157 lval = (__typeof__(lval)) _res; \
4158 } while (0)
4160 #define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
4161 do { \
4162 volatile OrigFn _orig = (orig); \
4163 volatile unsigned long _argvec[5]; \
4164 volatile unsigned long _res; \
4165 _argvec[0] = (unsigned long)_orig.nraddr; \
4166 _argvec[1] = (unsigned long)arg1; \
4167 _argvec[2] = (unsigned long)arg2; \
4168 _argvec[3] = (unsigned long)arg3; \
4169 _argvec[4] = (unsigned long)arg4; \
4170 __asm__ volatile( \
4171 VALGRIND_CFI_PROLOGUE \
4172 "aghi 15,-160\n\t" \
4173 "lg 2, 8(1)\n\t" \
4174 "lg 3,16(1)\n\t" \
4175 "lg 4,24(1)\n\t" \
4176 "lg 5,32(1)\n\t" \
4177 "lg 1, 0(1)\n\t" \
4178 VALGRIND_CALL_NOREDIR_R1 \
4179 "lgr %0, 2\n\t" \
4180 "aghi 15,160\n\t" \
4181 VALGRIND_CFI_EPILOGUE \
4182 : /*out*/ "=d" (_res) \
4183 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4184 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4185 ); \
4186 lval = (__typeof__(lval)) _res; \
4187 } while (0)
4189 #define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
4190 do { \
4191 volatile OrigFn _orig = (orig); \
4192 volatile unsigned long _argvec[6]; \
4193 volatile unsigned long _res; \
4194 _argvec[0] = (unsigned long)_orig.nraddr; \
4195 _argvec[1] = (unsigned long)arg1; \
4196 _argvec[2] = (unsigned long)arg2; \
4197 _argvec[3] = (unsigned long)arg3; \
4198 _argvec[4] = (unsigned long)arg4; \
4199 _argvec[5] = (unsigned long)arg5; \
4200 __asm__ volatile( \
4201 VALGRIND_CFI_PROLOGUE \
4202 "aghi 15,-160\n\t" \
4203 "lg 2, 8(1)\n\t" \
4204 "lg 3,16(1)\n\t" \
4205 "lg 4,24(1)\n\t" \
4206 "lg 5,32(1)\n\t" \
4207 "lg 6,40(1)\n\t" \
4208 "lg 1, 0(1)\n\t" \
4209 VALGRIND_CALL_NOREDIR_R1 \
4210 "lgr %0, 2\n\t" \
4211 "aghi 15,160\n\t" \
4212 VALGRIND_CFI_EPILOGUE \
4213 : /*out*/ "=d" (_res) \
4214 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4215 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4216 ); \
4217 lval = (__typeof__(lval)) _res; \
4218 } while (0)
4220 #define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4221 arg6) \
4222 do { \
4223 volatile OrigFn _orig = (orig); \
4224 volatile unsigned long _argvec[7]; \
4225 volatile unsigned long _res; \
4226 _argvec[0] = (unsigned long)_orig.nraddr; \
4227 _argvec[1] = (unsigned long)arg1; \
4228 _argvec[2] = (unsigned long)arg2; \
4229 _argvec[3] = (unsigned long)arg3; \
4230 _argvec[4] = (unsigned long)arg4; \
4231 _argvec[5] = (unsigned long)arg5; \
4232 _argvec[6] = (unsigned long)arg6; \
4233 __asm__ volatile( \
4234 VALGRIND_CFI_PROLOGUE \
4235 "aghi 15,-168\n\t" \
4236 "lg 2, 8(1)\n\t" \
4237 "lg 3,16(1)\n\t" \
4238 "lg 4,24(1)\n\t" \
4239 "lg 5,32(1)\n\t" \
4240 "lg 6,40(1)\n\t" \
4241 "mvc 160(8,15), 48(1)\n\t" \
4242 "lg 1, 0(1)\n\t" \
4243 VALGRIND_CALL_NOREDIR_R1 \
4244 "lgr %0, 2\n\t" \
4245 "aghi 15,168\n\t" \
4246 VALGRIND_CFI_EPILOGUE \
4247 : /*out*/ "=d" (_res) \
4248 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4249 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4250 ); \
4251 lval = (__typeof__(lval)) _res; \
4252 } while (0)
4254 #define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4255 arg6, arg7) \
4256 do { \
4257 volatile OrigFn _orig = (orig); \
4258 volatile unsigned long _argvec[8]; \
4259 volatile unsigned long _res; \
4260 _argvec[0] = (unsigned long)_orig.nraddr; \
4261 _argvec[1] = (unsigned long)arg1; \
4262 _argvec[2] = (unsigned long)arg2; \
4263 _argvec[3] = (unsigned long)arg3; \
4264 _argvec[4] = (unsigned long)arg4; \
4265 _argvec[5] = (unsigned long)arg5; \
4266 _argvec[6] = (unsigned long)arg6; \
4267 _argvec[7] = (unsigned long)arg7; \
4268 __asm__ volatile( \
4269 VALGRIND_CFI_PROLOGUE \
4270 "aghi 15,-176\n\t" \
4271 "lg 2, 8(1)\n\t" \
4272 "lg 3,16(1)\n\t" \
4273 "lg 4,24(1)\n\t" \
4274 "lg 5,32(1)\n\t" \
4275 "lg 6,40(1)\n\t" \
4276 "mvc 160(8,15), 48(1)\n\t" \
4277 "mvc 168(8,15), 56(1)\n\t" \
4278 "lg 1, 0(1)\n\t" \
4279 VALGRIND_CALL_NOREDIR_R1 \
4280 "lgr %0, 2\n\t" \
4281 "aghi 15,176\n\t" \
4282 VALGRIND_CFI_EPILOGUE \
4283 : /*out*/ "=d" (_res) \
4284 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4285 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4286 ); \
4287 lval = (__typeof__(lval)) _res; \
4288 } while (0)
4290 #define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4291 arg6, arg7 ,arg8) \
4292 do { \
4293 volatile OrigFn _orig = (orig); \
4294 volatile unsigned long _argvec[9]; \
4295 volatile unsigned long _res; \
4296 _argvec[0] = (unsigned long)_orig.nraddr; \
4297 _argvec[1] = (unsigned long)arg1; \
4298 _argvec[2] = (unsigned long)arg2; \
4299 _argvec[3] = (unsigned long)arg3; \
4300 _argvec[4] = (unsigned long)arg4; \
4301 _argvec[5] = (unsigned long)arg5; \
4302 _argvec[6] = (unsigned long)arg6; \
4303 _argvec[7] = (unsigned long)arg7; \
4304 _argvec[8] = (unsigned long)arg8; \
4305 __asm__ volatile( \
4306 VALGRIND_CFI_PROLOGUE \
4307 "aghi 15,-184\n\t" \
4308 "lg 2, 8(1)\n\t" \
4309 "lg 3,16(1)\n\t" \
4310 "lg 4,24(1)\n\t" \
4311 "lg 5,32(1)\n\t" \
4312 "lg 6,40(1)\n\t" \
4313 "mvc 160(8,15), 48(1)\n\t" \
4314 "mvc 168(8,15), 56(1)\n\t" \
4315 "mvc 176(8,15), 64(1)\n\t" \
4316 "lg 1, 0(1)\n\t" \
4317 VALGRIND_CALL_NOREDIR_R1 \
4318 "lgr %0, 2\n\t" \
4319 "aghi 15,184\n\t" \
4320 VALGRIND_CFI_EPILOGUE \
4321 : /*out*/ "=d" (_res) \
4322 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4323 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4324 ); \
4325 lval = (__typeof__(lval)) _res; \
4326 } while (0)
4328 #define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4329 arg6, arg7 ,arg8, arg9) \
4330 do { \
4331 volatile OrigFn _orig = (orig); \
4332 volatile unsigned long _argvec[10]; \
4333 volatile unsigned long _res; \
4334 _argvec[0] = (unsigned long)_orig.nraddr; \
4335 _argvec[1] = (unsigned long)arg1; \
4336 _argvec[2] = (unsigned long)arg2; \
4337 _argvec[3] = (unsigned long)arg3; \
4338 _argvec[4] = (unsigned long)arg4; \
4339 _argvec[5] = (unsigned long)arg5; \
4340 _argvec[6] = (unsigned long)arg6; \
4341 _argvec[7] = (unsigned long)arg7; \
4342 _argvec[8] = (unsigned long)arg8; \
4343 _argvec[9] = (unsigned long)arg9; \
4344 __asm__ volatile( \
4345 VALGRIND_CFI_PROLOGUE \
4346 "aghi 15,-192\n\t" \
4347 "lg 2, 8(1)\n\t" \
4348 "lg 3,16(1)\n\t" \
4349 "lg 4,24(1)\n\t" \
4350 "lg 5,32(1)\n\t" \
4351 "lg 6,40(1)\n\t" \
4352 "mvc 160(8,15), 48(1)\n\t" \
4353 "mvc 168(8,15), 56(1)\n\t" \
4354 "mvc 176(8,15), 64(1)\n\t" \
4355 "mvc 184(8,15), 72(1)\n\t" \
4356 "lg 1, 0(1)\n\t" \
4357 VALGRIND_CALL_NOREDIR_R1 \
4358 "lgr %0, 2\n\t" \
4359 "aghi 15,192\n\t" \
4360 VALGRIND_CFI_EPILOGUE \
4361 : /*out*/ "=d" (_res) \
4362 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4363 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4364 ); \
4365 lval = (__typeof__(lval)) _res; \
4366 } while (0)
4368 #define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4369 arg6, arg7 ,arg8, arg9, arg10) \
4370 do { \
4371 volatile OrigFn _orig = (orig); \
4372 volatile unsigned long _argvec[11]; \
4373 volatile unsigned long _res; \
4374 _argvec[0] = (unsigned long)_orig.nraddr; \
4375 _argvec[1] = (unsigned long)arg1; \
4376 _argvec[2] = (unsigned long)arg2; \
4377 _argvec[3] = (unsigned long)arg3; \
4378 _argvec[4] = (unsigned long)arg4; \
4379 _argvec[5] = (unsigned long)arg5; \
4380 _argvec[6] = (unsigned long)arg6; \
4381 _argvec[7] = (unsigned long)arg7; \
4382 _argvec[8] = (unsigned long)arg8; \
4383 _argvec[9] = (unsigned long)arg9; \
4384 _argvec[10] = (unsigned long)arg10; \
4385 __asm__ volatile( \
4386 VALGRIND_CFI_PROLOGUE \
4387 "aghi 15,-200\n\t" \
4388 "lg 2, 8(1)\n\t" \
4389 "lg 3,16(1)\n\t" \
4390 "lg 4,24(1)\n\t" \
4391 "lg 5,32(1)\n\t" \
4392 "lg 6,40(1)\n\t" \
4393 "mvc 160(8,15), 48(1)\n\t" \
4394 "mvc 168(8,15), 56(1)\n\t" \
4395 "mvc 176(8,15), 64(1)\n\t" \
4396 "mvc 184(8,15), 72(1)\n\t" \
4397 "mvc 192(8,15), 80(1)\n\t" \
4398 "lg 1, 0(1)\n\t" \
4399 VALGRIND_CALL_NOREDIR_R1 \
4400 "lgr %0, 2\n\t" \
4401 "aghi 15,200\n\t" \
4402 VALGRIND_CFI_EPILOGUE \
4403 : /*out*/ "=d" (_res) \
4404 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4405 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4406 ); \
4407 lval = (__typeof__(lval)) _res; \
4408 } while (0)
4410 #define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4411 arg6, arg7 ,arg8, arg9, arg10, arg11) \
4412 do { \
4413 volatile OrigFn _orig = (orig); \
4414 volatile unsigned long _argvec[12]; \
4415 volatile unsigned long _res; \
4416 _argvec[0] = (unsigned long)_orig.nraddr; \
4417 _argvec[1] = (unsigned long)arg1; \
4418 _argvec[2] = (unsigned long)arg2; \
4419 _argvec[3] = (unsigned long)arg3; \
4420 _argvec[4] = (unsigned long)arg4; \
4421 _argvec[5] = (unsigned long)arg5; \
4422 _argvec[6] = (unsigned long)arg6; \
4423 _argvec[7] = (unsigned long)arg7; \
4424 _argvec[8] = (unsigned long)arg8; \
4425 _argvec[9] = (unsigned long)arg9; \
4426 _argvec[10] = (unsigned long)arg10; \
4427 _argvec[11] = (unsigned long)arg11; \
4428 __asm__ volatile( \
4429 VALGRIND_CFI_PROLOGUE \
4430 "aghi 15,-208\n\t" \
4431 "lg 2, 8(1)\n\t" \
4432 "lg 3,16(1)\n\t" \
4433 "lg 4,24(1)\n\t" \
4434 "lg 5,32(1)\n\t" \
4435 "lg 6,40(1)\n\t" \
4436 "mvc 160(8,15), 48(1)\n\t" \
4437 "mvc 168(8,15), 56(1)\n\t" \
4438 "mvc 176(8,15), 64(1)\n\t" \
4439 "mvc 184(8,15), 72(1)\n\t" \
4440 "mvc 192(8,15), 80(1)\n\t" \
4441 "mvc 200(8,15), 88(1)\n\t" \
4442 "lg 1, 0(1)\n\t" \
4443 VALGRIND_CALL_NOREDIR_R1 \
4444 "lgr %0, 2\n\t" \
4445 "aghi 15,208\n\t" \
4446 VALGRIND_CFI_EPILOGUE \
4447 : /*out*/ "=d" (_res) \
4448 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4449 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4450 ); \
4451 lval = (__typeof__(lval)) _res; \
4452 } while (0)
4454 #define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4455 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
4456 do { \
4457 volatile OrigFn _orig = (orig); \
4458 volatile unsigned long _argvec[13]; \
4459 volatile unsigned long _res; \
4460 _argvec[0] = (unsigned long)_orig.nraddr; \
4461 _argvec[1] = (unsigned long)arg1; \
4462 _argvec[2] = (unsigned long)arg2; \
4463 _argvec[3] = (unsigned long)arg3; \
4464 _argvec[4] = (unsigned long)arg4; \
4465 _argvec[5] = (unsigned long)arg5; \
4466 _argvec[6] = (unsigned long)arg6; \
4467 _argvec[7] = (unsigned long)arg7; \
4468 _argvec[8] = (unsigned long)arg8; \
4469 _argvec[9] = (unsigned long)arg9; \
4470 _argvec[10] = (unsigned long)arg10; \
4471 _argvec[11] = (unsigned long)arg11; \
4472 _argvec[12] = (unsigned long)arg12; \
4473 __asm__ volatile( \
4474 VALGRIND_CFI_PROLOGUE \
4475 "aghi 15,-216\n\t" \
4476 "lg 2, 8(1)\n\t" \
4477 "lg 3,16(1)\n\t" \
4478 "lg 4,24(1)\n\t" \
4479 "lg 5,32(1)\n\t" \
4480 "lg 6,40(1)\n\t" \
4481 "mvc 160(8,15), 48(1)\n\t" \
4482 "mvc 168(8,15), 56(1)\n\t" \
4483 "mvc 176(8,15), 64(1)\n\t" \
4484 "mvc 184(8,15), 72(1)\n\t" \
4485 "mvc 192(8,15), 80(1)\n\t" \
4486 "mvc 200(8,15), 88(1)\n\t" \
4487 "mvc 208(8,15), 96(1)\n\t" \
4488 "lg 1, 0(1)\n\t" \
4489 VALGRIND_CALL_NOREDIR_R1 \
4490 "lgr %0, 2\n\t" \
4491 "aghi 15,216\n\t" \
4492 VALGRIND_CFI_EPILOGUE \
4493 : /*out*/ "=d" (_res) \
4494 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4495 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4496 ); \
4497 lval = (__typeof__(lval)) _res; \
4498 } while (0)
4501 #endif /* PLAT_s390x_linux */
4503 /* ------------------------- mips32-linux ----------------------- */
4505 #if defined(PLAT_mips32_linux)
4507 /* These regs are trashed by the hidden call. */
4508 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
4509 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
4510 "$25", "$31"
4512 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
4513 long) == 4. */
4515 #define CALL_FN_W_v(lval, orig) \
4516 do { \
4517 volatile OrigFn _orig = (orig); \
4518 volatile unsigned long _argvec[1]; \
4519 volatile unsigned long _res; \
4520 _argvec[0] = (unsigned long)_orig.nraddr; \
4521 __asm__ volatile( \
4522 "subu $29, $29, 8 \n\t" \
4523 "sw $28, 0($29) \n\t" \
4524 "sw $31, 4($29) \n\t" \
4525 "subu $29, $29, 16 \n\t" \
4526 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4527 VALGRIND_CALL_NOREDIR_T9 \
4528 "addu $29, $29, 16\n\t" \
4529 "lw $28, 0($29) \n\t" \
4530 "lw $31, 4($29) \n\t" \
4531 "addu $29, $29, 8 \n\t" \
4532 "move %0, $2\n" \
4533 : /*out*/ "=r" (_res) \
4534 : /*in*/ "0" (&_argvec[0]) \
4535 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4536 ); \
4537 lval = (__typeof__(lval)) _res; \
4538 } while (0)
4540 #define CALL_FN_W_W(lval, orig, arg1) \
4541 do { \
4542 volatile OrigFn _orig = (orig); \
4543 volatile unsigned long _argvec[2]; \
4544 volatile unsigned long _res; \
4545 _argvec[0] = (unsigned long)_orig.nraddr; \
4546 _argvec[1] = (unsigned long)(arg1); \
4547 __asm__ volatile( \
4548 "subu $29, $29, 8 \n\t" \
4549 "sw $28, 0($29) \n\t" \
4550 "sw $31, 4($29) \n\t" \
4551 "subu $29, $29, 16 \n\t" \
4552 "lw $4, 4(%1) \n\t" /* arg1*/ \
4553 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4554 VALGRIND_CALL_NOREDIR_T9 \
4555 "addu $29, $29, 16 \n\t" \
4556 "lw $28, 0($29) \n\t" \
4557 "lw $31, 4($29) \n\t" \
4558 "addu $29, $29, 8 \n\t" \
4559 "move %0, $2\n" \
4560 : /*out*/ "=r" (_res) \
4561 : /*in*/ "0" (&_argvec[0]) \
4562 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4563 ); \
4564 lval = (__typeof__(lval)) _res; \
4565 } while (0)
4567 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4568 do { \
4569 volatile OrigFn _orig = (orig); \
4570 volatile unsigned long _argvec[3]; \
4571 volatile unsigned long _res; \
4572 _argvec[0] = (unsigned long)_orig.nraddr; \
4573 _argvec[1] = (unsigned long)(arg1); \
4574 _argvec[2] = (unsigned long)(arg2); \
4575 __asm__ volatile( \
4576 "subu $29, $29, 8 \n\t" \
4577 "sw $28, 0($29) \n\t" \
4578 "sw $31, 4($29) \n\t" \
4579 "subu $29, $29, 16 \n\t" \
4580 "lw $4, 4(%1) \n\t" \
4581 "lw $5, 8(%1) \n\t" \
4582 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4583 VALGRIND_CALL_NOREDIR_T9 \
4584 "addu $29, $29, 16 \n\t" \
4585 "lw $28, 0($29) \n\t" \
4586 "lw $31, 4($29) \n\t" \
4587 "addu $29, $29, 8 \n\t" \
4588 "move %0, $2\n" \
4589 : /*out*/ "=r" (_res) \
4590 : /*in*/ "0" (&_argvec[0]) \
4591 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4592 ); \
4593 lval = (__typeof__(lval)) _res; \
4594 } while (0)
4596 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4597 do { \
4598 volatile OrigFn _orig = (orig); \
4599 volatile unsigned long _argvec[4]; \
4600 volatile unsigned long _res; \
4601 _argvec[0] = (unsigned long)_orig.nraddr; \
4602 _argvec[1] = (unsigned long)(arg1); \
4603 _argvec[2] = (unsigned long)(arg2); \
4604 _argvec[3] = (unsigned long)(arg3); \
4605 __asm__ volatile( \
4606 "subu $29, $29, 8 \n\t" \
4607 "sw $28, 0($29) \n\t" \
4608 "sw $31, 4($29) \n\t" \
4609 "subu $29, $29, 16 \n\t" \
4610 "lw $4, 4(%1) \n\t" \
4611 "lw $5, 8(%1) \n\t" \
4612 "lw $6, 12(%1) \n\t" \
4613 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4614 VALGRIND_CALL_NOREDIR_T9 \
4615 "addu $29, $29, 16 \n\t" \
4616 "lw $28, 0($29) \n\t" \
4617 "lw $31, 4($29) \n\t" \
4618 "addu $29, $29, 8 \n\t" \
4619 "move %0, $2\n" \
4620 : /*out*/ "=r" (_res) \
4621 : /*in*/ "0" (&_argvec[0]) \
4622 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4623 ); \
4624 lval = (__typeof__(lval)) _res; \
4625 } while (0)
4627 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4628 do { \
4629 volatile OrigFn _orig = (orig); \
4630 volatile unsigned long _argvec[5]; \
4631 volatile unsigned long _res; \
4632 _argvec[0] = (unsigned long)_orig.nraddr; \
4633 _argvec[1] = (unsigned long)(arg1); \
4634 _argvec[2] = (unsigned long)(arg2); \
4635 _argvec[3] = (unsigned long)(arg3); \
4636 _argvec[4] = (unsigned long)(arg4); \
4637 __asm__ volatile( \
4638 "subu $29, $29, 8 \n\t" \
4639 "sw $28, 0($29) \n\t" \
4640 "sw $31, 4($29) \n\t" \
4641 "subu $29, $29, 16 \n\t" \
4642 "lw $4, 4(%1) \n\t" \
4643 "lw $5, 8(%1) \n\t" \
4644 "lw $6, 12(%1) \n\t" \
4645 "lw $7, 16(%1) \n\t" \
4646 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4647 VALGRIND_CALL_NOREDIR_T9 \
4648 "addu $29, $29, 16 \n\t" \
4649 "lw $28, 0($29) \n\t" \
4650 "lw $31, 4($29) \n\t" \
4651 "addu $29, $29, 8 \n\t" \
4652 "move %0, $2\n" \
4653 : /*out*/ "=r" (_res) \
4654 : /*in*/ "0" (&_argvec[0]) \
4655 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4656 ); \
4657 lval = (__typeof__(lval)) _res; \
4658 } while (0)
4660 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4661 do { \
4662 volatile OrigFn _orig = (orig); \
4663 volatile unsigned long _argvec[6]; \
4664 volatile unsigned long _res; \
4665 _argvec[0] = (unsigned long)_orig.nraddr; \
4666 _argvec[1] = (unsigned long)(arg1); \
4667 _argvec[2] = (unsigned long)(arg2); \
4668 _argvec[3] = (unsigned long)(arg3); \
4669 _argvec[4] = (unsigned long)(arg4); \
4670 _argvec[5] = (unsigned long)(arg5); \
4671 __asm__ volatile( \
4672 "subu $29, $29, 8 \n\t" \
4673 "sw $28, 0($29) \n\t" \
4674 "sw $31, 4($29) \n\t" \
4675 "lw $4, 20(%1) \n\t" \
4676 "subu $29, $29, 24\n\t" \
4677 "sw $4, 16($29) \n\t" \
4678 "lw $4, 4(%1) \n\t" \
4679 "lw $5, 8(%1) \n\t" \
4680 "lw $6, 12(%1) \n\t" \
4681 "lw $7, 16(%1) \n\t" \
4682 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4683 VALGRIND_CALL_NOREDIR_T9 \
4684 "addu $29, $29, 24 \n\t" \
4685 "lw $28, 0($29) \n\t" \
4686 "lw $31, 4($29) \n\t" \
4687 "addu $29, $29, 8 \n\t" \
4688 "move %0, $2\n" \
4689 : /*out*/ "=r" (_res) \
4690 : /*in*/ "0" (&_argvec[0]) \
4691 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4692 ); \
4693 lval = (__typeof__(lval)) _res; \
4694 } while (0)
4695 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4696 do { \
4697 volatile OrigFn _orig = (orig); \
4698 volatile unsigned long _argvec[7]; \
4699 volatile unsigned long _res; \
4700 _argvec[0] = (unsigned long)_orig.nraddr; \
4701 _argvec[1] = (unsigned long)(arg1); \
4702 _argvec[2] = (unsigned long)(arg2); \
4703 _argvec[3] = (unsigned long)(arg3); \
4704 _argvec[4] = (unsigned long)(arg4); \
4705 _argvec[5] = (unsigned long)(arg5); \
4706 _argvec[6] = (unsigned long)(arg6); \
4707 __asm__ volatile( \
4708 "subu $29, $29, 8 \n\t" \
4709 "sw $28, 0($29) \n\t" \
4710 "sw $31, 4($29) \n\t" \
4711 "lw $4, 20(%1) \n\t" \
4712 "subu $29, $29, 32\n\t" \
4713 "sw $4, 16($29) \n\t" \
4714 "lw $4, 24(%1) \n\t" \
4715 "nop\n\t" \
4716 "sw $4, 20($29) \n\t" \
4717 "lw $4, 4(%1) \n\t" \
4718 "lw $5, 8(%1) \n\t" \
4719 "lw $6, 12(%1) \n\t" \
4720 "lw $7, 16(%1) \n\t" \
4721 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4722 VALGRIND_CALL_NOREDIR_T9 \
4723 "addu $29, $29, 32 \n\t" \
4724 "lw $28, 0($29) \n\t" \
4725 "lw $31, 4($29) \n\t" \
4726 "addu $29, $29, 8 \n\t" \
4727 "move %0, $2\n" \
4728 : /*out*/ "=r" (_res) \
4729 : /*in*/ "0" (&_argvec[0]) \
4730 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4731 ); \
4732 lval = (__typeof__(lval)) _res; \
4733 } while (0)
4735 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4736 arg7) \
4737 do { \
4738 volatile OrigFn _orig = (orig); \
4739 volatile unsigned long _argvec[8]; \
4740 volatile unsigned long _res; \
4741 _argvec[0] = (unsigned long)_orig.nraddr; \
4742 _argvec[1] = (unsigned long)(arg1); \
4743 _argvec[2] = (unsigned long)(arg2); \
4744 _argvec[3] = (unsigned long)(arg3); \
4745 _argvec[4] = (unsigned long)(arg4); \
4746 _argvec[5] = (unsigned long)(arg5); \
4747 _argvec[6] = (unsigned long)(arg6); \
4748 _argvec[7] = (unsigned long)(arg7); \
4749 __asm__ volatile( \
4750 "subu $29, $29, 8 \n\t" \
4751 "sw $28, 0($29) \n\t" \
4752 "sw $31, 4($29) \n\t" \
4753 "lw $4, 20(%1) \n\t" \
4754 "subu $29, $29, 32\n\t" \
4755 "sw $4, 16($29) \n\t" \
4756 "lw $4, 24(%1) \n\t" \
4757 "sw $4, 20($29) \n\t" \
4758 "lw $4, 28(%1) \n\t" \
4759 "sw $4, 24($29) \n\t" \
4760 "lw $4, 4(%1) \n\t" \
4761 "lw $5, 8(%1) \n\t" \
4762 "lw $6, 12(%1) \n\t" \
4763 "lw $7, 16(%1) \n\t" \
4764 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4765 VALGRIND_CALL_NOREDIR_T9 \
4766 "addu $29, $29, 32 \n\t" \
4767 "lw $28, 0($29) \n\t" \
4768 "lw $31, 4($29) \n\t" \
4769 "addu $29, $29, 8 \n\t" \
4770 "move %0, $2\n" \
4771 : /*out*/ "=r" (_res) \
4772 : /*in*/ "0" (&_argvec[0]) \
4773 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4774 ); \
4775 lval = (__typeof__(lval)) _res; \
4776 } while (0)
4778 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4779 arg7,arg8) \
4780 do { \
4781 volatile OrigFn _orig = (orig); \
4782 volatile unsigned long _argvec[9]; \
4783 volatile unsigned long _res; \
4784 _argvec[0] = (unsigned long)_orig.nraddr; \
4785 _argvec[1] = (unsigned long)(arg1); \
4786 _argvec[2] = (unsigned long)(arg2); \
4787 _argvec[3] = (unsigned long)(arg3); \
4788 _argvec[4] = (unsigned long)(arg4); \
4789 _argvec[5] = (unsigned long)(arg5); \
4790 _argvec[6] = (unsigned long)(arg6); \
4791 _argvec[7] = (unsigned long)(arg7); \
4792 _argvec[8] = (unsigned long)(arg8); \
4793 __asm__ volatile( \
4794 "subu $29, $29, 8 \n\t" \
4795 "sw $28, 0($29) \n\t" \
4796 "sw $31, 4($29) \n\t" \
4797 "lw $4, 20(%1) \n\t" \
4798 "subu $29, $29, 40\n\t" \
4799 "sw $4, 16($29) \n\t" \
4800 "lw $4, 24(%1) \n\t" \
4801 "sw $4, 20($29) \n\t" \
4802 "lw $4, 28(%1) \n\t" \
4803 "sw $4, 24($29) \n\t" \
4804 "lw $4, 32(%1) \n\t" \
4805 "sw $4, 28($29) \n\t" \
4806 "lw $4, 4(%1) \n\t" \
4807 "lw $5, 8(%1) \n\t" \
4808 "lw $6, 12(%1) \n\t" \
4809 "lw $7, 16(%1) \n\t" \
4810 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4811 VALGRIND_CALL_NOREDIR_T9 \
4812 "addu $29, $29, 40 \n\t" \
4813 "lw $28, 0($29) \n\t" \
4814 "lw $31, 4($29) \n\t" \
4815 "addu $29, $29, 8 \n\t" \
4816 "move %0, $2\n" \
4817 : /*out*/ "=r" (_res) \
4818 : /*in*/ "0" (&_argvec[0]) \
4819 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4820 ); \
4821 lval = (__typeof__(lval)) _res; \
4822 } while (0)
4824 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4825 arg7,arg8,arg9) \
4826 do { \
4827 volatile OrigFn _orig = (orig); \
4828 volatile unsigned long _argvec[10]; \
4829 volatile unsigned long _res; \
4830 _argvec[0] = (unsigned long)_orig.nraddr; \
4831 _argvec[1] = (unsigned long)(arg1); \
4832 _argvec[2] = (unsigned long)(arg2); \
4833 _argvec[3] = (unsigned long)(arg3); \
4834 _argvec[4] = (unsigned long)(arg4); \
4835 _argvec[5] = (unsigned long)(arg5); \
4836 _argvec[6] = (unsigned long)(arg6); \
4837 _argvec[7] = (unsigned long)(arg7); \
4838 _argvec[8] = (unsigned long)(arg8); \
4839 _argvec[9] = (unsigned long)(arg9); \
4840 __asm__ volatile( \
4841 "subu $29, $29, 8 \n\t" \
4842 "sw $28, 0($29) \n\t" \
4843 "sw $31, 4($29) \n\t" \
4844 "lw $4, 20(%1) \n\t" \
4845 "subu $29, $29, 40\n\t" \
4846 "sw $4, 16($29) \n\t" \
4847 "lw $4, 24(%1) \n\t" \
4848 "sw $4, 20($29) \n\t" \
4849 "lw $4, 28(%1) \n\t" \
4850 "sw $4, 24($29) \n\t" \
4851 "lw $4, 32(%1) \n\t" \
4852 "sw $4, 28($29) \n\t" \
4853 "lw $4, 36(%1) \n\t" \
4854 "sw $4, 32($29) \n\t" \
4855 "lw $4, 4(%1) \n\t" \
4856 "lw $5, 8(%1) \n\t" \
4857 "lw $6, 12(%1) \n\t" \
4858 "lw $7, 16(%1) \n\t" \
4859 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4860 VALGRIND_CALL_NOREDIR_T9 \
4861 "addu $29, $29, 40 \n\t" \
4862 "lw $28, 0($29) \n\t" \
4863 "lw $31, 4($29) \n\t" \
4864 "addu $29, $29, 8 \n\t" \
4865 "move %0, $2\n" \
4866 : /*out*/ "=r" (_res) \
4867 : /*in*/ "0" (&_argvec[0]) \
4868 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4869 ); \
4870 lval = (__typeof__(lval)) _res; \
4871 } while (0)
4873 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4874 arg7,arg8,arg9,arg10) \
4875 do { \
4876 volatile OrigFn _orig = (orig); \
4877 volatile unsigned long _argvec[11]; \
4878 volatile unsigned long _res; \
4879 _argvec[0] = (unsigned long)_orig.nraddr; \
4880 _argvec[1] = (unsigned long)(arg1); \
4881 _argvec[2] = (unsigned long)(arg2); \
4882 _argvec[3] = (unsigned long)(arg3); \
4883 _argvec[4] = (unsigned long)(arg4); \
4884 _argvec[5] = (unsigned long)(arg5); \
4885 _argvec[6] = (unsigned long)(arg6); \
4886 _argvec[7] = (unsigned long)(arg7); \
4887 _argvec[8] = (unsigned long)(arg8); \
4888 _argvec[9] = (unsigned long)(arg9); \
4889 _argvec[10] = (unsigned long)(arg10); \
4890 __asm__ volatile( \
4891 "subu $29, $29, 8 \n\t" \
4892 "sw $28, 0($29) \n\t" \
4893 "sw $31, 4($29) \n\t" \
4894 "lw $4, 20(%1) \n\t" \
4895 "subu $29, $29, 48\n\t" \
4896 "sw $4, 16($29) \n\t" \
4897 "lw $4, 24(%1) \n\t" \
4898 "sw $4, 20($29) \n\t" \
4899 "lw $4, 28(%1) \n\t" \
4900 "sw $4, 24($29) \n\t" \
4901 "lw $4, 32(%1) \n\t" \
4902 "sw $4, 28($29) \n\t" \
4903 "lw $4, 36(%1) \n\t" \
4904 "sw $4, 32($29) \n\t" \
4905 "lw $4, 40(%1) \n\t" \
4906 "sw $4, 36($29) \n\t" \
4907 "lw $4, 4(%1) \n\t" \
4908 "lw $5, 8(%1) \n\t" \
4909 "lw $6, 12(%1) \n\t" \
4910 "lw $7, 16(%1) \n\t" \
4911 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4912 VALGRIND_CALL_NOREDIR_T9 \
4913 "addu $29, $29, 48 \n\t" \
4914 "lw $28, 0($29) \n\t" \
4915 "lw $31, 4($29) \n\t" \
4916 "addu $29, $29, 8 \n\t" \
4917 "move %0, $2\n" \
4918 : /*out*/ "=r" (_res) \
4919 : /*in*/ "0" (&_argvec[0]) \
4920 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4921 ); \
4922 lval = (__typeof__(lval)) _res; \
4923 } while (0)
4925 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4926 arg6,arg7,arg8,arg9,arg10, \
4927 arg11) \
4928 do { \
4929 volatile OrigFn _orig = (orig); \
4930 volatile unsigned long _argvec[12]; \
4931 volatile unsigned long _res; \
4932 _argvec[0] = (unsigned long)_orig.nraddr; \
4933 _argvec[1] = (unsigned long)(arg1); \
4934 _argvec[2] = (unsigned long)(arg2); \
4935 _argvec[3] = (unsigned long)(arg3); \
4936 _argvec[4] = (unsigned long)(arg4); \
4937 _argvec[5] = (unsigned long)(arg5); \
4938 _argvec[6] = (unsigned long)(arg6); \
4939 _argvec[7] = (unsigned long)(arg7); \
4940 _argvec[8] = (unsigned long)(arg8); \
4941 _argvec[9] = (unsigned long)(arg9); \
4942 _argvec[10] = (unsigned long)(arg10); \
4943 _argvec[11] = (unsigned long)(arg11); \
4944 __asm__ volatile( \
4945 "subu $29, $29, 8 \n\t" \
4946 "sw $28, 0($29) \n\t" \
4947 "sw $31, 4($29) \n\t" \
4948 "lw $4, 20(%1) \n\t" \
4949 "subu $29, $29, 48\n\t" \
4950 "sw $4, 16($29) \n\t" \
4951 "lw $4, 24(%1) \n\t" \
4952 "sw $4, 20($29) \n\t" \
4953 "lw $4, 28(%1) \n\t" \
4954 "sw $4, 24($29) \n\t" \
4955 "lw $4, 32(%1) \n\t" \
4956 "sw $4, 28($29) \n\t" \
4957 "lw $4, 36(%1) \n\t" \
4958 "sw $4, 32($29) \n\t" \
4959 "lw $4, 40(%1) \n\t" \
4960 "sw $4, 36($29) \n\t" \
4961 "lw $4, 44(%1) \n\t" \
4962 "sw $4, 40($29) \n\t" \
4963 "lw $4, 4(%1) \n\t" \
4964 "lw $5, 8(%1) \n\t" \
4965 "lw $6, 12(%1) \n\t" \
4966 "lw $7, 16(%1) \n\t" \
4967 "lw $25, 0(%1) \n\t" /* target->t9 */ \
4968 VALGRIND_CALL_NOREDIR_T9 \
4969 "addu $29, $29, 48 \n\t" \
4970 "lw $28, 0($29) \n\t" \
4971 "lw $31, 4($29) \n\t" \
4972 "addu $29, $29, 8 \n\t" \
4973 "move %0, $2\n" \
4974 : /*out*/ "=r" (_res) \
4975 : /*in*/ "0" (&_argvec[0]) \
4976 : /*trash*/ "memory", __CALLER_SAVED_REGS \
4977 ); \
4978 lval = (__typeof__(lval)) _res; \
4979 } while (0)
4981 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4982 arg6,arg7,arg8,arg9,arg10, \
4983 arg11,arg12) \
4984 do { \
4985 volatile OrigFn _orig = (orig); \
4986 volatile unsigned long _argvec[13]; \
4987 volatile unsigned long _res; \
4988 _argvec[0] = (unsigned long)_orig.nraddr; \
4989 _argvec[1] = (unsigned long)(arg1); \
4990 _argvec[2] = (unsigned long)(arg2); \
4991 _argvec[3] = (unsigned long)(arg3); \
4992 _argvec[4] = (unsigned long)(arg4); \
4993 _argvec[5] = (unsigned long)(arg5); \
4994 _argvec[6] = (unsigned long)(arg6); \
4995 _argvec[7] = (unsigned long)(arg7); \
4996 _argvec[8] = (unsigned long)(arg8); \
4997 _argvec[9] = (unsigned long)(arg9); \
4998 _argvec[10] = (unsigned long)(arg10); \
4999 _argvec[11] = (unsigned long)(arg11); \
5000 _argvec[12] = (unsigned long)(arg12); \
5001 __asm__ volatile( \
5002 "subu $29, $29, 8 \n\t" \
5003 "sw $28, 0($29) \n\t" \
5004 "sw $31, 4($29) \n\t" \
5005 "lw $4, 20(%1) \n\t" \
5006 "subu $29, $29, 56\n\t" \
5007 "sw $4, 16($29) \n\t" \
5008 "lw $4, 24(%1) \n\t" \
5009 "sw $4, 20($29) \n\t" \
5010 "lw $4, 28(%1) \n\t" \
5011 "sw $4, 24($29) \n\t" \
5012 "lw $4, 32(%1) \n\t" \
5013 "sw $4, 28($29) \n\t" \
5014 "lw $4, 36(%1) \n\t" \
5015 "sw $4, 32($29) \n\t" \
5016 "lw $4, 40(%1) \n\t" \
5017 "sw $4, 36($29) \n\t" \
5018 "lw $4, 44(%1) \n\t" \
5019 "sw $4, 40($29) \n\t" \
5020 "lw $4, 48(%1) \n\t" \
5021 "sw $4, 44($29) \n\t" \
5022 "lw $4, 4(%1) \n\t" \
5023 "lw $5, 8(%1) \n\t" \
5024 "lw $6, 12(%1) \n\t" \
5025 "lw $7, 16(%1) \n\t" \
5026 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5027 VALGRIND_CALL_NOREDIR_T9 \
5028 "addu $29, $29, 56 \n\t" \
5029 "lw $28, 0($29) \n\t" \
5030 "lw $31, 4($29) \n\t" \
5031 "addu $29, $29, 8 \n\t" \
5032 "move %0, $2\n" \
5033 : /*out*/ "=r" (_res) \
5034 : /*in*/ "r" (&_argvec[0]) \
5035 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5036 ); \
5037 lval = (__typeof__(lval)) _res; \
5038 } while (0)
5040 #endif /* PLAT_mips32_linux */
5042 /* ------------------------- mips64-linux ------------------------- */
5044 #if defined(PLAT_mips64_linux)
5046 /* These regs are trashed by the hidden call. */
5047 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5048 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5049 "$25", "$31"
5051 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5052 long) == 4. */
5054 #define CALL_FN_W_v(lval, orig) \
5055 do { \
5056 volatile OrigFn _orig = (orig); \
5057 volatile unsigned long _argvec[1]; \
5058 volatile unsigned long _res; \
5059 _argvec[0] = (unsigned long)_orig.nraddr; \
5060 __asm__ volatile( \
5061 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5062 VALGRIND_CALL_NOREDIR_T9 \
5063 "move %0, $2\n" \
5064 : /*out*/ "=r" (_res) \
5065 : /*in*/ "0" (&_argvec[0]) \
5066 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5067 ); \
5068 lval = (__typeof__(lval)) _res; \
5069 } while (0)
5071 #define CALL_FN_W_W(lval, orig, arg1) \
5072 do { \
5073 volatile OrigFn _orig = (orig); \
5074 volatile unsigned long _argvec[2]; \
5075 volatile unsigned long _res; \
5076 _argvec[0] = (unsigned long)_orig.nraddr; \
5077 _argvec[1] = (unsigned long)(arg1); \
5078 __asm__ volatile( \
5079 "ld $4, 8(%1)\n\t" /* arg1*/ \
5080 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5081 VALGRIND_CALL_NOREDIR_T9 \
5082 "move %0, $2\n" \
5083 : /*out*/ "=r" (_res) \
5084 : /*in*/ "r" (&_argvec[0]) \
5085 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5086 ); \
5087 lval = (__typeof__(lval)) _res; \
5088 } while (0)
5090 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5091 do { \
5092 volatile OrigFn _orig = (orig); \
5093 volatile unsigned long _argvec[3]; \
5094 volatile unsigned long _res; \
5095 _argvec[0] = (unsigned long)_orig.nraddr; \
5096 _argvec[1] = (unsigned long)(arg1); \
5097 _argvec[2] = (unsigned long)(arg2); \
5098 __asm__ volatile( \
5099 "ld $4, 8(%1)\n\t" \
5100 "ld $5, 16(%1)\n\t" \
5101 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5102 VALGRIND_CALL_NOREDIR_T9 \
5103 "move %0, $2\n" \
5104 : /*out*/ "=r" (_res) \
5105 : /*in*/ "r" (&_argvec[0]) \
5106 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5107 ); \
5108 lval = (__typeof__(lval)) _res; \
5109 } while (0)
5111 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5112 do { \
5113 volatile OrigFn _orig = (orig); \
5114 volatile unsigned long _argvec[4]; \
5115 volatile unsigned long _res; \
5116 _argvec[0] = (unsigned long)_orig.nraddr; \
5117 _argvec[1] = (unsigned long)(arg1); \
5118 _argvec[2] = (unsigned long)(arg2); \
5119 _argvec[3] = (unsigned long)(arg3); \
5120 __asm__ volatile( \
5121 "ld $4, 8(%1)\n\t" \
5122 "ld $5, 16(%1)\n\t" \
5123 "ld $6, 24(%1)\n\t" \
5124 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5125 VALGRIND_CALL_NOREDIR_T9 \
5126 "move %0, $2\n" \
5127 : /*out*/ "=r" (_res) \
5128 : /*in*/ "r" (&_argvec[0]) \
5129 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5130 ); \
5131 lval = (__typeof__(lval)) _res; \
5132 } while (0)
5134 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5135 do { \
5136 volatile OrigFn _orig = (orig); \
5137 volatile unsigned long _argvec[5]; \
5138 volatile unsigned long _res; \
5139 _argvec[0] = (unsigned long)_orig.nraddr; \
5140 _argvec[1] = (unsigned long)(arg1); \
5141 _argvec[2] = (unsigned long)(arg2); \
5142 _argvec[3] = (unsigned long)(arg3); \
5143 _argvec[4] = (unsigned long)(arg4); \
5144 __asm__ volatile( \
5145 "ld $4, 8(%1)\n\t" \
5146 "ld $5, 16(%1)\n\t" \
5147 "ld $6, 24(%1)\n\t" \
5148 "ld $7, 32(%1)\n\t" \
5149 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5150 VALGRIND_CALL_NOREDIR_T9 \
5151 "move %0, $2\n" \
5152 : /*out*/ "=r" (_res) \
5153 : /*in*/ "r" (&_argvec[0]) \
5154 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5155 ); \
5156 lval = (__typeof__(lval)) _res; \
5157 } while (0)
5159 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5160 do { \
5161 volatile OrigFn _orig = (orig); \
5162 volatile unsigned long _argvec[6]; \
5163 volatile unsigned long _res; \
5164 _argvec[0] = (unsigned long)_orig.nraddr; \
5165 _argvec[1] = (unsigned long)(arg1); \
5166 _argvec[2] = (unsigned long)(arg2); \
5167 _argvec[3] = (unsigned long)(arg3); \
5168 _argvec[4] = (unsigned long)(arg4); \
5169 _argvec[5] = (unsigned long)(arg5); \
5170 __asm__ volatile( \
5171 "ld $4, 8(%1)\n\t" \
5172 "ld $5, 16(%1)\n\t" \
5173 "ld $6, 24(%1)\n\t" \
5174 "ld $7, 32(%1)\n\t" \
5175 "ld $8, 40(%1)\n\t" \
5176 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5177 VALGRIND_CALL_NOREDIR_T9 \
5178 "move %0, $2\n" \
5179 : /*out*/ "=r" (_res) \
5180 : /*in*/ "r" (&_argvec[0]) \
5181 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5182 ); \
5183 lval = (__typeof__(lval)) _res; \
5184 } while (0)
5186 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5187 do { \
5188 volatile OrigFn _orig = (orig); \
5189 volatile unsigned long _argvec[7]; \
5190 volatile unsigned long _res; \
5191 _argvec[0] = (unsigned long)_orig.nraddr; \
5192 _argvec[1] = (unsigned long)(arg1); \
5193 _argvec[2] = (unsigned long)(arg2); \
5194 _argvec[3] = (unsigned long)(arg3); \
5195 _argvec[4] = (unsigned long)(arg4); \
5196 _argvec[5] = (unsigned long)(arg5); \
5197 _argvec[6] = (unsigned long)(arg6); \
5198 __asm__ volatile( \
5199 "ld $4, 8(%1)\n\t" \
5200 "ld $5, 16(%1)\n\t" \
5201 "ld $6, 24(%1)\n\t" \
5202 "ld $7, 32(%1)\n\t" \
5203 "ld $8, 40(%1)\n\t" \
5204 "ld $9, 48(%1)\n\t" \
5205 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5206 VALGRIND_CALL_NOREDIR_T9 \
5207 "move %0, $2\n" \
5208 : /*out*/ "=r" (_res) \
5209 : /*in*/ "r" (&_argvec[0]) \
5210 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5211 ); \
5212 lval = (__typeof__(lval)) _res; \
5213 } while (0)
5215 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5216 arg7) \
5217 do { \
5218 volatile OrigFn _orig = (orig); \
5219 volatile unsigned long _argvec[8]; \
5220 volatile unsigned long _res; \
5221 _argvec[0] = (unsigned long)_orig.nraddr; \
5222 _argvec[1] = (unsigned long)(arg1); \
5223 _argvec[2] = (unsigned long)(arg2); \
5224 _argvec[3] = (unsigned long)(arg3); \
5225 _argvec[4] = (unsigned long)(arg4); \
5226 _argvec[5] = (unsigned long)(arg5); \
5227 _argvec[6] = (unsigned long)(arg6); \
5228 _argvec[7] = (unsigned long)(arg7); \
5229 __asm__ volatile( \
5230 "ld $4, 8(%1)\n\t" \
5231 "ld $5, 16(%1)\n\t" \
5232 "ld $6, 24(%1)\n\t" \
5233 "ld $7, 32(%1)\n\t" \
5234 "ld $8, 40(%1)\n\t" \
5235 "ld $9, 48(%1)\n\t" \
5236 "ld $10, 56(%1)\n\t" \
5237 "ld $25, 0(%1) \n\t" /* target->t9 */ \
5238 VALGRIND_CALL_NOREDIR_T9 \
5239 "move %0, $2\n" \
5240 : /*out*/ "=r" (_res) \
5241 : /*in*/ "r" (&_argvec[0]) \
5242 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5243 ); \
5244 lval = (__typeof__(lval)) _res; \
5245 } while (0)
5247 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5248 arg7,arg8) \
5249 do { \
5250 volatile OrigFn _orig = (orig); \
5251 volatile unsigned long _argvec[9]; \
5252 volatile unsigned long _res; \
5253 _argvec[0] = (unsigned long)_orig.nraddr; \
5254 _argvec[1] = (unsigned long)(arg1); \
5255 _argvec[2] = (unsigned long)(arg2); \
5256 _argvec[3] = (unsigned long)(arg3); \
5257 _argvec[4] = (unsigned long)(arg4); \
5258 _argvec[5] = (unsigned long)(arg5); \
5259 _argvec[6] = (unsigned long)(arg6); \
5260 _argvec[7] = (unsigned long)(arg7); \
5261 _argvec[8] = (unsigned long)(arg8); \
5262 __asm__ volatile( \
5263 "ld $4, 8(%1)\n\t" \
5264 "ld $5, 16(%1)\n\t" \
5265 "ld $6, 24(%1)\n\t" \
5266 "ld $7, 32(%1)\n\t" \
5267 "ld $8, 40(%1)\n\t" \
5268 "ld $9, 48(%1)\n\t" \
5269 "ld $10, 56(%1)\n\t" \
5270 "ld $11, 64(%1)\n\t" \
5271 "ld $25, 0(%1) \n\t" /* target->t9 */ \
5272 VALGRIND_CALL_NOREDIR_T9 \
5273 "move %0, $2\n" \
5274 : /*out*/ "=r" (_res) \
5275 : /*in*/ "r" (&_argvec[0]) \
5276 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5277 ); \
5278 lval = (__typeof__(lval)) _res; \
5279 } while (0)
5281 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5282 arg7,arg8,arg9) \
5283 do { \
5284 volatile OrigFn _orig = (orig); \
5285 volatile unsigned long _argvec[10]; \
5286 volatile unsigned long _res; \
5287 _argvec[0] = (unsigned long)_orig.nraddr; \
5288 _argvec[1] = (unsigned long)(arg1); \
5289 _argvec[2] = (unsigned long)(arg2); \
5290 _argvec[3] = (unsigned long)(arg3); \
5291 _argvec[4] = (unsigned long)(arg4); \
5292 _argvec[5] = (unsigned long)(arg5); \
5293 _argvec[6] = (unsigned long)(arg6); \
5294 _argvec[7] = (unsigned long)(arg7); \
5295 _argvec[8] = (unsigned long)(arg8); \
5296 _argvec[9] = (unsigned long)(arg9); \
5297 __asm__ volatile( \
5298 "dsubu $29, $29, 8\n\t" \
5299 "ld $4, 72(%1)\n\t" \
5300 "sd $4, 0($29)\n\t" \
5301 "ld $4, 8(%1)\n\t" \
5302 "ld $5, 16(%1)\n\t" \
5303 "ld $6, 24(%1)\n\t" \
5304 "ld $7, 32(%1)\n\t" \
5305 "ld $8, 40(%1)\n\t" \
5306 "ld $9, 48(%1)\n\t" \
5307 "ld $10, 56(%1)\n\t" \
5308 "ld $11, 64(%1)\n\t" \
5309 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5310 VALGRIND_CALL_NOREDIR_T9 \
5311 "daddu $29, $29, 8\n\t" \
5312 "move %0, $2\n" \
5313 : /*out*/ "=r" (_res) \
5314 : /*in*/ "r" (&_argvec[0]) \
5315 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5316 ); \
5317 lval = (__typeof__(lval)) _res; \
5318 } while (0)
5320 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5321 arg7,arg8,arg9,arg10) \
5322 do { \
5323 volatile OrigFn _orig = (orig); \
5324 volatile unsigned long _argvec[11]; \
5325 volatile unsigned long _res; \
5326 _argvec[0] = (unsigned long)_orig.nraddr; \
5327 _argvec[1] = (unsigned long)(arg1); \
5328 _argvec[2] = (unsigned long)(arg2); \
5329 _argvec[3] = (unsigned long)(arg3); \
5330 _argvec[4] = (unsigned long)(arg4); \
5331 _argvec[5] = (unsigned long)(arg5); \
5332 _argvec[6] = (unsigned long)(arg6); \
5333 _argvec[7] = (unsigned long)(arg7); \
5334 _argvec[8] = (unsigned long)(arg8); \
5335 _argvec[9] = (unsigned long)(arg9); \
5336 _argvec[10] = (unsigned long)(arg10); \
5337 __asm__ volatile( \
5338 "dsubu $29, $29, 16\n\t" \
5339 "ld $4, 72(%1)\n\t" \
5340 "sd $4, 0($29)\n\t" \
5341 "ld $4, 80(%1)\n\t" \
5342 "sd $4, 8($29)\n\t" \
5343 "ld $4, 8(%1)\n\t" \
5344 "ld $5, 16(%1)\n\t" \
5345 "ld $6, 24(%1)\n\t" \
5346 "ld $7, 32(%1)\n\t" \
5347 "ld $8, 40(%1)\n\t" \
5348 "ld $9, 48(%1)\n\t" \
5349 "ld $10, 56(%1)\n\t" \
5350 "ld $11, 64(%1)\n\t" \
5351 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5352 VALGRIND_CALL_NOREDIR_T9 \
5353 "daddu $29, $29, 16\n\t" \
5354 "move %0, $2\n" \
5355 : /*out*/ "=r" (_res) \
5356 : /*in*/ "r" (&_argvec[0]) \
5357 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5358 ); \
5359 lval = (__typeof__(lval)) _res; \
5360 } while (0)
5362 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5363 arg6,arg7,arg8,arg9,arg10, \
5364 arg11) \
5365 do { \
5366 volatile OrigFn _orig = (orig); \
5367 volatile unsigned long _argvec[12]; \
5368 volatile unsigned long _res; \
5369 _argvec[0] = (unsigned long)_orig.nraddr; \
5370 _argvec[1] = (unsigned long)(arg1); \
5371 _argvec[2] = (unsigned long)(arg2); \
5372 _argvec[3] = (unsigned long)(arg3); \
5373 _argvec[4] = (unsigned long)(arg4); \
5374 _argvec[5] = (unsigned long)(arg5); \
5375 _argvec[6] = (unsigned long)(arg6); \
5376 _argvec[7] = (unsigned long)(arg7); \
5377 _argvec[8] = (unsigned long)(arg8); \
5378 _argvec[9] = (unsigned long)(arg9); \
5379 _argvec[10] = (unsigned long)(arg10); \
5380 _argvec[11] = (unsigned long)(arg11); \
5381 __asm__ volatile( \
5382 "dsubu $29, $29, 24\n\t" \
5383 "ld $4, 72(%1)\n\t" \
5384 "sd $4, 0($29)\n\t" \
5385 "ld $4, 80(%1)\n\t" \
5386 "sd $4, 8($29)\n\t" \
5387 "ld $4, 88(%1)\n\t" \
5388 "sd $4, 16($29)\n\t" \
5389 "ld $4, 8(%1)\n\t" \
5390 "ld $5, 16(%1)\n\t" \
5391 "ld $6, 24(%1)\n\t" \
5392 "ld $7, 32(%1)\n\t" \
5393 "ld $8, 40(%1)\n\t" \
5394 "ld $9, 48(%1)\n\t" \
5395 "ld $10, 56(%1)\n\t" \
5396 "ld $11, 64(%1)\n\t" \
5397 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5398 VALGRIND_CALL_NOREDIR_T9 \
5399 "daddu $29, $29, 24\n\t" \
5400 "move %0, $2\n" \
5401 : /*out*/ "=r" (_res) \
5402 : /*in*/ "r" (&_argvec[0]) \
5403 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5404 ); \
5405 lval = (__typeof__(lval)) _res; \
5406 } while (0)
5408 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5409 arg6,arg7,arg8,arg9,arg10, \
5410 arg11,arg12) \
5411 do { \
5412 volatile OrigFn _orig = (orig); \
5413 volatile unsigned long _argvec[13]; \
5414 volatile unsigned long _res; \
5415 _argvec[0] = (unsigned long)_orig.nraddr; \
5416 _argvec[1] = (unsigned long)(arg1); \
5417 _argvec[2] = (unsigned long)(arg2); \
5418 _argvec[3] = (unsigned long)(arg3); \
5419 _argvec[4] = (unsigned long)(arg4); \
5420 _argvec[5] = (unsigned long)(arg5); \
5421 _argvec[6] = (unsigned long)(arg6); \
5422 _argvec[7] = (unsigned long)(arg7); \
5423 _argvec[8] = (unsigned long)(arg8); \
5424 _argvec[9] = (unsigned long)(arg9); \
5425 _argvec[10] = (unsigned long)(arg10); \
5426 _argvec[11] = (unsigned long)(arg11); \
5427 _argvec[12] = (unsigned long)(arg12); \
5428 __asm__ volatile( \
5429 "dsubu $29, $29, 32\n\t" \
5430 "ld $4, 72(%1)\n\t" \
5431 "sd $4, 0($29)\n\t" \
5432 "ld $4, 80(%1)\n\t" \
5433 "sd $4, 8($29)\n\t" \
5434 "ld $4, 88(%1)\n\t" \
5435 "sd $4, 16($29)\n\t" \
5436 "ld $4, 96(%1)\n\t" \
5437 "sd $4, 24($29)\n\t" \
5438 "ld $4, 8(%1)\n\t" \
5439 "ld $5, 16(%1)\n\t" \
5440 "ld $6, 24(%1)\n\t" \
5441 "ld $7, 32(%1)\n\t" \
5442 "ld $8, 40(%1)\n\t" \
5443 "ld $9, 48(%1)\n\t" \
5444 "ld $10, 56(%1)\n\t" \
5445 "ld $11, 64(%1)\n\t" \
5446 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5447 VALGRIND_CALL_NOREDIR_T9 \
5448 "daddu $29, $29, 32\n\t" \
5449 "move %0, $2\n" \
5450 : /*out*/ "=r" (_res) \
5451 : /*in*/ "r" (&_argvec[0]) \
5452 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5453 ); \
5454 lval = (__typeof__(lval)) _res; \
5455 } while (0)
5457 #endif /* PLAT_mips64_linux */
5460 /* ------------------------------------------------------------------ */
5461 /* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
5462 /* */
5463 /* ------------------------------------------------------------------ */
5465 /* Some request codes. There are many more of these, but most are not
5466 exposed to end-user view. These are the public ones, all of the
5467 form 0x1000 + small_number.
5469 Core ones are in the range 0x00000000--0x0000ffff. The non-public
5470 ones start at 0x2000.
5473 /* These macros are used by tools -- they must be public, but don't
5474 embed them into other programs. */
5475 #define VG_USERREQ_TOOL_BASE(a,b) \
5476 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
5477 #define VG_IS_TOOL_USERREQ(a, b, v) \
5478 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
5480 /* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
5481 This enum comprises an ABI exported by Valgrind to programs
5482 which use client requests. DO NOT CHANGE THE ORDER OF THESE
5483 ENTRIES, NOR DELETE ANY -- add new ones at the end. */
5484 typedef
5485 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
5486 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
5488 /* These allow any function to be called from the simulated
5489 CPU but run on the real CPU. Nb: the first arg passed to
5490 the function is always the ThreadId of the running
5491 thread! So CLIENT_CALL0 actually requires a 1 arg
5492 function, etc. */
5493 VG_USERREQ__CLIENT_CALL0 = 0x1101,
5494 VG_USERREQ__CLIENT_CALL1 = 0x1102,
5495 VG_USERREQ__CLIENT_CALL2 = 0x1103,
5496 VG_USERREQ__CLIENT_CALL3 = 0x1104,
5498 /* Can be useful in regression testing suites -- eg. can
5499 send Valgrind's output to /dev/null and still count
5500 errors. */
5501 VG_USERREQ__COUNT_ERRORS = 0x1201,
5503 /* Allows the client program and/or gdbserver to execute a monitor
5504 command. */
5505 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
5507 /* These are useful and can be interpreted by any tool that
5508 tracks malloc() et al, by using vg_replace_malloc.c. */
5509 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
5510 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
5511 VG_USERREQ__FREELIKE_BLOCK = 0x1302,
5512 /* Memory pool support. */
5513 VG_USERREQ__CREATE_MEMPOOL = 0x1303,
5514 VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
5515 VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
5516 VG_USERREQ__MEMPOOL_FREE = 0x1306,
5517 VG_USERREQ__MEMPOOL_TRIM = 0x1307,
5518 VG_USERREQ__MOVE_MEMPOOL = 0x1308,
5519 VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
5520 VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
5522 /* Allow printfs to valgrind log. */
5523 /* The first two pass the va_list argument by value, which
5524 assumes it is the same size as or smaller than a UWord,
5525 which generally isn't the case. Hence are deprecated.
5526 The second two pass the vargs by reference and so are
5527 immune to this problem. */
5528 /* both :: char* fmt, va_list vargs (DEPRECATED) */
5529 VG_USERREQ__PRINTF = 0x1401,
5530 VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
5531 /* both :: char* fmt, va_list* vargs */
5532 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
5533 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
5535 /* Stack support. */
5536 VG_USERREQ__STACK_REGISTER = 0x1501,
5537 VG_USERREQ__STACK_DEREGISTER = 0x1502,
5538 VG_USERREQ__STACK_CHANGE = 0x1503,
5540 /* Wine support */
5541 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
5543 /* Querying of debug info. */
5544 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701,
5546 /* Disable/enable error reporting level. Takes a single
5547 Word arg which is the delta to this thread's error
5548 disablement indicator. Hence 1 disables or further
5549 disables errors, and -1 moves back towards enablement.
5550 Other values are not allowed. */
5551 VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801,
5553 /* Initialise IR injection */
5554 VG_USERREQ__VEX_INIT_FOR_IRI = 0x1901
5555 } Vg_ClientRequest;
5557 #if !defined(__GNUC__)
5558 # define __extension__ /* */
5559 #endif
5562 /* Returns the number of Valgrinds this code is running under. That
5563 is, 0 if running natively, 1 if running under Valgrind, 2 if
5564 running under Valgrind which is running under another Valgrind,
5565 etc. */
5566 #define RUNNING_ON_VALGRIND \
5567 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
5568 VG_USERREQ__RUNNING_ON_VALGRIND, \
5569 0, 0, 0, 0, 0) \
5572 /* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
5573 _qzz_len - 1]. Useful if you are debugging a JITter or some such,
5574 since it provides a way to make sure valgrind will retranslate the
5575 invalidated area. Returns no value. */
5576 #define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
5577 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
5578 _qzz_addr, _qzz_len, 0, 0, 0)
5581 /* These requests are for getting Valgrind itself to print something.
5582 Possibly with a backtrace. This is a really ugly hack. The return value
5583 is the number of characters printed, excluding the "**<pid>** " part at the
5584 start and the backtrace (if present). */
5586 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
5587 /* Modern GCC will optimize the static routine out if unused,
5588 and unused attribute will shut down warnings about it. */
5589 static int VALGRIND_PRINTF(const char *format, ...)
5590 __attribute__((format(__printf__, 1, 2), __unused__));
5591 #endif
5592 static int
5593 #if defined(_MSC_VER)
5594 __inline
5595 #endif
5596 VALGRIND_PRINTF(const char *format, ...)
5598 #if defined(NVALGRIND)
5599 return 0;
5600 #else /* NVALGRIND */
5601 #if defined(_MSC_VER) || defined(__MINGW64__)
5602 uintptr_t _qzz_res;
5603 #else
5604 unsigned long _qzz_res;
5605 #endif
5606 va_list vargs;
5607 va_start(vargs, format);
5608 #if defined(_MSC_VER) || defined(__MINGW64__)
5609 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5610 VG_USERREQ__PRINTF_VALIST_BY_REF,
5611 (uintptr_t)format,
5612 (uintptr_t)&vargs,
5613 0, 0, 0);
5614 #else
5615 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5616 VG_USERREQ__PRINTF_VALIST_BY_REF,
5617 (unsigned long)format,
5618 (unsigned long)&vargs,
5619 0, 0, 0);
5620 #endif
5621 va_end(vargs);
5622 return (int)_qzz_res;
5623 #endif /* NVALGRIND */
5626 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
5627 static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
5628 __attribute__((format(__printf__, 1, 2), __unused__));
5629 #endif
5630 static int
5631 #if defined(_MSC_VER)
5632 __inline
5633 #endif
5634 VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
5636 #if defined(NVALGRIND)
5637 return 0;
5638 #else /* NVALGRIND */
5639 #if defined(_MSC_VER) || defined(__MINGW64__)
5640 uintptr_t _qzz_res;
5641 #else
5642 unsigned long _qzz_res;
5643 #endif
5644 va_list vargs;
5645 va_start(vargs, format);
5646 #if defined(_MSC_VER) || defined(__MINGW64__)
5647 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5648 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
5649 (uintptr_t)format,
5650 (uintptr_t)&vargs,
5651 0, 0, 0);
5652 #else
5653 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5654 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
5655 (unsigned long)format,
5656 (unsigned long)&vargs,
5657 0, 0, 0);
5658 #endif
5659 va_end(vargs);
5660 return (int)_qzz_res;
5661 #endif /* NVALGRIND */
5665 /* These requests allow control to move from the simulated CPU to the
5666 real CPU, calling an arbitary function.
5668 Note that the current ThreadId is inserted as the first argument.
5669 So this call:
5671 VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
5673 requires f to have this signature:
5675 Word f(Word tid, Word arg1, Word arg2)
5677 where "Word" is a word-sized type.
5679 Note that these client requests are not entirely reliable. For example,
5680 if you call a function with them that subsequently calls printf(),
5681 there's a high chance Valgrind will crash. Generally, your prospects of
5682 these working are made higher if the called function does not refer to
5683 any global variables, and does not refer to any libc or other functions
5684 (printf et al). Any kind of entanglement with libc or dynamic linking is
5685 likely to have a bad outcome, for tricky reasons which we've grappled
5686 with a lot in the past.
5688 #define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
5689 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5690 VG_USERREQ__CLIENT_CALL0, \
5691 _qyy_fn, \
5692 0, 0, 0, 0)
5694 #define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
5695 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5696 VG_USERREQ__CLIENT_CALL1, \
5697 _qyy_fn, \
5698 _qyy_arg1, 0, 0, 0)
5700 #define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
5701 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5702 VG_USERREQ__CLIENT_CALL2, \
5703 _qyy_fn, \
5704 _qyy_arg1, _qyy_arg2, 0, 0)
5706 #define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
5707 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5708 VG_USERREQ__CLIENT_CALL3, \
5709 _qyy_fn, \
5710 _qyy_arg1, _qyy_arg2, \
5711 _qyy_arg3, 0)
5714 /* Counts the number of errors that have been recorded by a tool. Nb:
5715 the tool must record the errors with VG_(maybe_record_error)() or
5716 VG_(unique_error)() for them to be counted. */
5717 #define VALGRIND_COUNT_ERRORS \
5718 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
5719 0 /* default return */, \
5720 VG_USERREQ__COUNT_ERRORS, \
5721 0, 0, 0, 0, 0)
5723 /* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
5724 when heap blocks are allocated in order to give accurate results. This
5725 happens automatically for the standard allocator functions such as
5726 malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
5727 delete[], etc.
5729 But if your program uses a custom allocator, this doesn't automatically
5730 happen, and Valgrind will not do as well. For example, if you allocate
5731 superblocks with mmap() and then allocates chunks of the superblocks, all
5732 Valgrind's observations will be at the mmap() level and it won't know that
5733 the chunks should be considered separate entities. In Memcheck's case,
5734 that means you probably won't get heap block overrun detection (because
5735 there won't be redzones marked as unaddressable) and you definitely won't
5736 get any leak detection.
5738 The following client requests allow a custom allocator to be annotated so
5739 that it can be handled accurately by Valgrind.
5741 VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
5742 by a malloc()-like function. For Memcheck (an illustrative case), this
5743 does two things:
5745 - It records that the block has been allocated. This means any addresses
5746 within the block mentioned in error messages will be
5747 identified as belonging to the block. It also means that if the block
5748 isn't freed it will be detected by the leak checker.
5750 - It marks the block as being addressable and undefined (if 'is_zeroed' is
5751 not set), or addressable and defined (if 'is_zeroed' is set). This
5752 controls how accesses to the block by the program are handled.
5754 'addr' is the start of the usable block (ie. after any
5755 redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
5756 can apply redzones -- these are blocks of padding at the start and end of
5757 each block. Adding redzones is recommended as it makes it much more likely
5758 Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
5759 zeroed (or filled with another predictable value), as is the case for
5760 calloc().
5762 VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
5763 heap block -- that will be used by the client program -- is allocated.
5764 It's best to put it at the outermost level of the allocator if possible;
5765 for example, if you have a function my_alloc() which calls
5766 internal_alloc(), and the client request is put inside internal_alloc(),
5767 stack traces relating to the heap block will contain entries for both
5768 my_alloc() and internal_alloc(), which is probably not what you want.
5770 For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
5771 custom blocks from within a heap block, B, that has been allocated with
5772 malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
5773 -- the custom blocks will take precedence.
5775 VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
5776 Memcheck, it does two things:
5778 - It records that the block has been deallocated. This assumes that the
5779 block was annotated as having been allocated via
5780 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
5782 - It marks the block as being unaddressable.
5784 VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
5785 heap block is deallocated.
5787 VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
5788 Memcheck, it does four things:
5790 - It records that the size of a block has been changed. This assumes that
5791 the block was annotated as having been allocated via
5792 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
5794 - If the block shrunk, it marks the freed memory as being unaddressable.
5796 - If the block grew, it marks the new area as undefined and defines a red
5797 zone past the end of the new block.
5799 - The V-bits of the overlap between the old and the new block are preserved.
5801 VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
5802 and before deallocation of the old block.
5804 In many cases, these three client requests will not be enough to get your
5805 allocator working well with Memcheck. More specifically, if your allocator
5806 writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
5807 will be necessary to mark the memory as addressable just before the zeroing
5808 occurs, otherwise you'll get a lot of invalid write errors. For example,
5809 you'll need to do this if your allocator recycles freed blocks, but it
5810 zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
5811 Alternatively, if your allocator reuses freed blocks for allocator-internal
5812 data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
5814 Really, what's happening is a blurring of the lines between the client
5815 program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
5816 memory should be considered unaddressable to the client program, but the
5817 allocator knows more than the rest of the client program and so may be able
5818 to safely access it. Extra client requests are necessary for Valgrind to
5819 understand the distinction between the allocator and the rest of the
5820 program.
5822 Ignored if addr == 0.
5824 #define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
5825 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
5826 addr, sizeB, rzB, is_zeroed, 0)
5828 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
5829 Ignored if addr == 0.
5831 #define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
5832 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
5833 addr, oldSizeB, newSizeB, rzB, 0)
5835 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
5836 Ignored if addr == 0.
5838 #define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
5839 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
5840 addr, rzB, 0, 0, 0)
5842 /* Create a memory pool. */
5843 #define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
5844 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
5845 pool, rzB, is_zeroed, 0, 0)
5847 /* Destroy a memory pool. */
5848 #define VALGRIND_DESTROY_MEMPOOL(pool) \
5849 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
5850 pool, 0, 0, 0, 0)
5852 /* Associate a piece of memory with a memory pool. */
5853 #define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
5854 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
5855 pool, addr, size, 0, 0)
5857 /* Disassociate a piece of memory from a memory pool. */
5858 #define VALGRIND_MEMPOOL_FREE(pool, addr) \
5859 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
5860 pool, addr, 0, 0, 0)
5862 /* Disassociate any pieces outside a particular range. */
5863 #define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
5864 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
5865 pool, addr, size, 0, 0)
5867 /* Resize and/or move a piece associated with a memory pool. */
5868 #define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
5869 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
5870 poolA, poolB, 0, 0, 0)
5872 /* Resize and/or move a piece associated with a memory pool. */
5873 #define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
5874 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
5875 pool, addrA, addrB, size, 0)
5877 /* Return 1 if a mempool exists, else 0. */
5878 #define VALGRIND_MEMPOOL_EXISTS(pool) \
5879 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
5880 VG_USERREQ__MEMPOOL_EXISTS, \
5881 pool, 0, 0, 0, 0)
5883 /* Mark a piece of memory as being a stack. Returns a stack id. */
5884 #define VALGRIND_STACK_REGISTER(start, end) \
5885 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
5886 VG_USERREQ__STACK_REGISTER, \
5887 start, end, 0, 0, 0)
5889 /* Unmark the piece of memory associated with a stack id as being a
5890 stack. */
5891 #define VALGRIND_STACK_DEREGISTER(id) \
5892 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
5893 id, 0, 0, 0, 0)
5895 /* Change the start and end address of the stack id. */
5896 #define VALGRIND_STACK_CHANGE(id, start, end) \
5897 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
5898 id, start, end, 0, 0)
5900 /* Load PDB debug info for Wine PE image_map. */
5901 #define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
5902 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
5903 fd, ptr, total_size, delta, 0)
5905 /* Map a code address to a source file name and line number. buf64
5906 must point to a 64-byte buffer in the caller's address space. The
5907 result will be dumped in there and is guaranteed to be zero
5908 terminated. If no info is found, the first byte is set to zero. */
5909 #define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
5910 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
5911 VG_USERREQ__MAP_IP_TO_SRCLOC, \
5912 addr, buf64, 0, 0, 0)
5914 /* Disable error reporting for this thread. Behaves in a stack like
5915 way, so you can safely call this multiple times provided that
5916 VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
5917 to re-enable reporting. The first call of this macro disables
5918 reporting. Subsequent calls have no effect except to increase the
5919 number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
5920 reporting. Child threads do not inherit this setting from their
5921 parents -- they are always created with reporting enabled. */
5922 #define VALGRIND_DISABLE_ERROR_REPORTING \
5923 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
5924 1, 0, 0, 0, 0)
5926 /* Re-enable error reporting, as per comments on
5927 VALGRIND_DISABLE_ERROR_REPORTING. */
5928 #define VALGRIND_ENABLE_ERROR_REPORTING \
5929 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
5930 -1, 0, 0, 0, 0)
5932 /* Execute a monitor command from the client program.
5933 If a connection is opened with GDB, the output will be sent
5934 according to the output mode set for vgdb.
5935 If no connection is opened, output will go to the log output.
5936 Returns 1 if command not recognised, 0 otherwise. */
5937 #define VALGRIND_MONITOR_COMMAND(command) \
5938 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
5939 command, 0, 0, 0, 0)
5942 #undef PLAT_x86_darwin
5943 #undef PLAT_amd64_darwin
5944 #undef PLAT_x86_win32
5945 #undef PLAT_amd64_win64
5946 #undef PLAT_x86_linux
5947 #undef PLAT_amd64_linux
5948 #undef PLAT_ppc32_linux
5949 #undef PLAT_ppc64_linux
5950 #undef PLAT_arm_linux
5951 #undef PLAT_s390x_linux
5952 #undef PLAT_mips32_linux
5953 #undef PLAT_mips64_linux
5955 #endif /* __VALGRIND_H */