1/* -*- c -*-
2 ----------------------------------------------------------------
3
4 Notice that the following BSD-style license applies to this one
5 file (valgrind.h) only. The rest of Valgrind is licensed under the
6 terms of the GNU General Public License, version 2, unless
7 otherwise indicated. See the COPYING file in the source
8 distribution for details.
9
10 ----------------------------------------------------------------
11
12 This file is part of Valgrind, a dynamic binary instrumentation
13 framework.
14
15 Copyright (C) 2000-2017 Julian Seward. All rights reserved.
16
17 Redistribution and use in source and binary forms, with or without
18 modification, are permitted provided that the following conditions
19 are met:
20
21 1. Redistributions of source code must retain the above copyright
22 notice, this list of conditions and the following disclaimer.
23
24 2. The origin of this software must not be misrepresented; you must
25 not claim that you wrote the original software. If you use this
26 software in a product, an acknowledgment in the product
27 documentation would be appreciated but is not required.
28
29 3. Altered source versions must be plainly marked as such, and must
30 not be misrepresented as being the original software.
31
32 4. The name of the author may not be used to endorse or promote
33 products derived from this software without specific prior written
34 permission.
35
36 THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
37 OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
38 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
39 ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
40 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
41 DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
42 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
43 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
44 WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
45 NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
46 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
47
48 ----------------------------------------------------------------
49
50 Notice that the above BSD-style license applies to this one file
51 (valgrind.h) only. The entire rest of Valgrind is licensed under
52 the terms of the GNU General Public License, version 2. See the
53 COPYING file in the source distribution for details.
54
55 ----------------------------------------------------------------
56*/
57
58
59/* This file is for inclusion into client (your!) code.
60
61 You can use these macros to manipulate and query Valgrind's
62 execution inside your own programs.
63
64 The resulting executables will still run without Valgrind, just a
65 little bit more slowly than they otherwise would, but otherwise
66 unchanged. When not running on valgrind, each client request
67 consumes very few (eg. 7) instructions, so the resulting performance
68 loss is negligible unless you plan to execute client requests
69 millions of times per second. Nevertheless, if that is still a
70 problem, you can compile with the NVALGRIND symbol defined (gcc
71 -DNVALGRIND) so that client requests are not even compiled in. */
72
73#ifndef __VALGRIND_H
74#define __VALGRIND_H
75
76
77/* ------------------------------------------------------------------ */
78/* VERSION NUMBER OF VALGRIND */
79/* ------------------------------------------------------------------ */
80
81/* Specify Valgrind's version number, so that user code can
82 conditionally compile based on our version number. Note that these
83 were introduced at version 3.6 and so do not exist in version 3.5
84 or earlier. The recommended way to use them to check for "version
85 X.Y or later" is (eg)
86
87#if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
88 && (__VALGRIND_MAJOR__ > 3 \
89 || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
90*/
91#define __VALGRIND_MAJOR__ 3
92#define __VALGRIND_MINOR__ 15
93
94
95#include <stdarg.h>
96
97/* Nb: this file might be included in a file compiled with -ansi. So
98 we can't use C++ style "//" comments nor the "asm" keyword (instead
99 use "__asm__"). */
100
101/* Derive some tags indicating what the target platform is. Note
102 that in this file we're using the compiler's CPP symbols for
103 identifying architectures, which are different to the ones we use
104 within the rest of Valgrind. Note, __powerpc__ is active for both
105 32 and 64-bit PPC, whereas __powerpc64__ is only active for the
106 latter (on Linux, that is).
107
108 Misc note: how to find out what's predefined in gcc by default:
109 gcc -Wp,-dM somefile.c
110*/
111#undef PLAT_x86_darwin
112#undef PLAT_amd64_darwin
113#undef PLAT_x86_win32
114#undef PLAT_amd64_win64
115#undef PLAT_x86_linux
116#undef PLAT_amd64_linux
117#undef PLAT_ppc32_linux
118#undef PLAT_ppc64be_linux
119#undef PLAT_ppc64le_linux
120#undef PLAT_arm_linux
121#undef PLAT_arm64_linux
122#undef PLAT_s390x_linux
123#undef PLAT_mips32_linux
124#undef PLAT_mips64_linux
125#undef PLAT_x86_solaris
126#undef PLAT_amd64_solaris
127
128
129#if defined(__APPLE__) && defined(__i386__)
130# define PLAT_x86_darwin 1
131#elif defined(__APPLE__) && defined(__x86_64__)
132# define PLAT_amd64_darwin 1
133#elif (defined(__MINGW32__) && defined(__i386__)) \
134 || defined(__CYGWIN32__) \
135 || (defined(_WIN32) && defined(_M_IX86))
136# define PLAT_x86_win32 1
137#elif (defined(__MINGW32__) && defined(__x86_64__)) \
138 || (defined(_WIN32) && defined(_M_X64))
139/* __MINGW32__ and _WIN32 are defined in 64 bit mode as well. */
140# define PLAT_amd64_win64 1
141#elif defined(__linux__) && defined(__i386__)
142# define PLAT_x86_linux 1
143#elif defined(__linux__) && defined(__x86_64__) && !defined(__ILP32__)
144# define PLAT_amd64_linux 1
145#elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
146# define PLAT_ppc32_linux 1
147#elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF != 2
148/* Big Endian uses ELF version 1 */
149# define PLAT_ppc64be_linux 1
150#elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF == 2
151/* Little Endian uses ELF version 2 */
152# define PLAT_ppc64le_linux 1
153#elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
154# define PLAT_arm_linux 1
155#elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
156# define PLAT_arm64_linux 1
157#elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
158# define PLAT_s390x_linux 1
159#elif defined(__linux__) && defined(__mips__) && (__mips==64)
160# define PLAT_mips64_linux 1
161#elif defined(__linux__) && defined(__mips__) && (__mips!=64)
162# define PLAT_mips32_linux 1
163#elif defined(__sun) && defined(__i386__)
164# define PLAT_x86_solaris 1
165#elif defined(__sun) && defined(__x86_64__)
166# define PLAT_amd64_solaris 1
167#else
168/* If we're not compiling for our target platform, don't generate
169 any inline asms. */
170# if !defined(NVALGRIND)
171# define NVALGRIND 1
172# endif
173#endif
174
175
176/* ------------------------------------------------------------------ */
177/* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
178/* in here of use to end-users -- skip to the next section. */
179/* ------------------------------------------------------------------ */
180
181/*
182 * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
183 * request. Accepts both pointers and integers as arguments.
184 *
185 * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
186 * client request that does not return a value.
187
188 * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
189 * client request and whose value equals the client request result. Accepts
190 * both pointers and integers as arguments. Note that such calls are not
191 * necessarily pure functions -- they may have side effects.
192 */
193
194#define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
195 _zzq_request, _zzq_arg1, _zzq_arg2, \
196 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
197 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
198 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
199 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
200
201#define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
202 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
203 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
204 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
205 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
206
207#if defined(NVALGRIND)
208
209/* Define NVALGRIND to completely remove the Valgrind magic sequence
210 from the compiled code (analogous to NDEBUG's effects on
211 assert()) */
212#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
213 _zzq_default, _zzq_request, \
214 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
215 (_zzq_default)
216
217#else /* ! NVALGRIND */
218
219/* The following defines the magic code sequences which the JITter
220 spots and handles magically. Don't look too closely at them as
221 they will rot your brain.
222
223 The assembly code sequences for all architectures is in this one
224 file. This is because this file must be stand-alone, and we don't
225 want to have multiple files.
226
227 For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
228 value gets put in the return slot, so that everything works when
229 this is executed not under Valgrind. Args are passed in a memory
230 block, and so there's no intrinsic limit to the number that could
231 be passed, but it's currently five.
232
233 The macro args are:
234 _zzq_rlval result lvalue
235 _zzq_default default value (result returned when running on real CPU)
236 _zzq_request request code
237 _zzq_arg1..5 request params
238
239 The other two macros are used to support function wrapping, and are
240 a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
241 guest's NRADDR pseudo-register and whatever other information is
242 needed to safely run the call original from the wrapper: on
243 ppc64-linux, the R2 value at the divert point is also needed. This
244 information is abstracted into a user-visible type, OrigFn.
245
246 VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
247 guest, but guarantees that the branch instruction will not be
248 redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
249 branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
250 complete inline asm, since it needs to be combined with more magic
251 inline asm stuff to be useful.
252*/
253
254/* ----------------- x86-{linux,darwin,solaris} ---------------- */
255
256#if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
257 || (defined(PLAT_x86_win32) && defined(__GNUC__)) \
258 || defined(PLAT_x86_solaris)
259
260typedef
261 struct {
262 unsigned int nraddr; /* where's the code? */
263 }
264 OrigFn;
265
266#define __SPECIAL_INSTRUCTION_PREAMBLE \
267 "roll $3, %%edi ; roll $13, %%edi\n\t" \
268 "roll $29, %%edi ; roll $19, %%edi\n\t"
269
270#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
271 _zzq_default, _zzq_request, \
272 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
273 __extension__ \
274 ({volatile unsigned int _zzq_args[6]; \
275 volatile unsigned int _zzq_result; \
276 _zzq_args[0] = (unsigned int)(_zzq_request); \
277 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
278 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
279 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
280 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
281 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
282 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
283 /* %EDX = client_request ( %EAX ) */ \
284 "xchgl %%ebx,%%ebx" \
285 : "=d" (_zzq_result) \
286 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
287 : "cc", "memory" \
288 ); \
289 _zzq_result; \
290 })
291
292#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
293 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
294 volatile unsigned int __addr; \
295 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
296 /* %EAX = guest_NRADDR */ \
297 "xchgl %%ecx,%%ecx" \
298 : "=a" (__addr) \
299 : \
300 : "cc", "memory" \
301 ); \
302 _zzq_orig->nraddr = __addr; \
303 }
304
305#define VALGRIND_CALL_NOREDIR_EAX \
306 __SPECIAL_INSTRUCTION_PREAMBLE \
307 /* call-noredir *%EAX */ \
308 "xchgl %%edx,%%edx\n\t"
309
310#define VALGRIND_VEX_INJECT_IR() \
311 do { \
312 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
313 "xchgl %%edi,%%edi\n\t" \
314 : : : "cc", "memory" \
315 ); \
316 } while (0)
317
318#endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__)
319 || PLAT_x86_solaris */
320
321/* ------------------------- x86-Win32 ------------------------- */
322
323#if defined(PLAT_x86_win32) && !defined(__GNUC__)
324
325typedef
326 struct {
327 unsigned int nraddr; /* where's the code? */
328 }
329 OrigFn;
330
331#if defined(_MSC_VER)
332
333#define __SPECIAL_INSTRUCTION_PREAMBLE \
334 __asm rol edi, 3 __asm rol edi, 13 \
335 __asm rol edi, 29 __asm rol edi, 19
336
337#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
338 _zzq_default, _zzq_request, \
339 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
340 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
341 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
342 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
343 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
344
345static __inline uintptr_t
346valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
347 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
348 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
349 uintptr_t _zzq_arg5)
350{
351 volatile uintptr_t _zzq_args[6];
352 volatile unsigned int _zzq_result;
353 _zzq_args[0] = (uintptr_t)(_zzq_request);
354 _zzq_args[1] = (uintptr_t)(_zzq_arg1);
355 _zzq_args[2] = (uintptr_t)(_zzq_arg2);
356 _zzq_args[3] = (uintptr_t)(_zzq_arg3);
357 _zzq_args[4] = (uintptr_t)(_zzq_arg4);
358 _zzq_args[5] = (uintptr_t)(_zzq_arg5);
359 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
360 __SPECIAL_INSTRUCTION_PREAMBLE
361 /* %EDX = client_request ( %EAX ) */
362 __asm xchg ebx,ebx
363 __asm mov _zzq_result, edx
364 }
365 return _zzq_result;
366}
367
368#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
369 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
370 volatile unsigned int __addr; \
371 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
372 /* %EAX = guest_NRADDR */ \
373 __asm xchg ecx,ecx \
374 __asm mov __addr, eax \
375 } \
376 _zzq_orig->nraddr = __addr; \
377 }
378
379#define VALGRIND_CALL_NOREDIR_EAX ERROR
380
381#define VALGRIND_VEX_INJECT_IR() \
382 do { \
383 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
384 __asm xchg edi,edi \
385 } \
386 } while (0)
387
388#else
389#error Unsupported compiler.
390#endif
391
392#endif /* PLAT_x86_win32 */
393
394/* ----------------- amd64-{linux,darwin,solaris} --------------- */
395
396#if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
397 || defined(PLAT_amd64_solaris) \
398 || (defined(PLAT_amd64_win64) && defined(__GNUC__))
399
400typedef
401 struct {
402 unsigned long int nraddr; /* where's the code? */
403 }
404 OrigFn;
405
406#define __SPECIAL_INSTRUCTION_PREAMBLE \
407 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
408 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
409
410#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
411 _zzq_default, _zzq_request, \
412 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
413 __extension__ \
414 ({ volatile unsigned long int _zzq_args[6]; \
415 volatile unsigned long int _zzq_result; \
416 _zzq_args[0] = (unsigned long int)(_zzq_request); \
417 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
418 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
419 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
420 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
421 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
422 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
423 /* %RDX = client_request ( %RAX ) */ \
424 "xchgq %%rbx,%%rbx" \
425 : "=d" (_zzq_result) \
426 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
427 : "cc", "memory" \
428 ); \
429 _zzq_result; \
430 })
431
432#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
433 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
434 volatile unsigned long int __addr; \
435 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
436 /* %RAX = guest_NRADDR */ \
437 "xchgq %%rcx,%%rcx" \
438 : "=a" (__addr) \
439 : \
440 : "cc", "memory" \
441 ); \
442 _zzq_orig->nraddr = __addr; \
443 }
444
445#define VALGRIND_CALL_NOREDIR_RAX \
446 __SPECIAL_INSTRUCTION_PREAMBLE \
447 /* call-noredir *%RAX */ \
448 "xchgq %%rdx,%%rdx\n\t"
449
450#define VALGRIND_VEX_INJECT_IR() \
451 do { \
452 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
453 "xchgq %%rdi,%%rdi\n\t" \
454 : : : "cc", "memory" \
455 ); \
456 } while (0)
457
458#endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
459
460/* ------------------------- amd64-Win64 ------------------------- */
461
462#if defined(PLAT_amd64_win64) && !defined(__GNUC__)
463
464#error Unsupported compiler.
465
466#endif /* PLAT_amd64_win64 */
467
468/* ------------------------ ppc32-linux ------------------------ */
469
470#if defined(PLAT_ppc32_linux)
471
472typedef
473 struct {
474 unsigned int nraddr; /* where's the code? */
475 }
476 OrigFn;
477
478#define __SPECIAL_INSTRUCTION_PREAMBLE \
479 "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
480 "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
481
482#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
483 _zzq_default, _zzq_request, \
484 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
485 \
486 __extension__ \
487 ({ unsigned int _zzq_args[6]; \
488 unsigned int _zzq_result; \
489 unsigned int* _zzq_ptr; \
490 _zzq_args[0] = (unsigned int)(_zzq_request); \
491 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
492 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
493 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
494 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
495 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
496 _zzq_ptr = _zzq_args; \
497 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
498 "mr 4,%2\n\t" /*ptr*/ \
499 __SPECIAL_INSTRUCTION_PREAMBLE \
500 /* %R3 = client_request ( %R4 ) */ \
501 "or 1,1,1\n\t" \
502 "mr %0,3" /*result*/ \
503 : "=b" (_zzq_result) \
504 : "b" (_zzq_default), "b" (_zzq_ptr) \
505 : "cc", "memory", "r3", "r4"); \
506 _zzq_result; \
507 })
508
509#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
510 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
511 unsigned int __addr; \
512 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
513 /* %R3 = guest_NRADDR */ \
514 "or 2,2,2\n\t" \
515 "mr %0,3" \
516 : "=b" (__addr) \
517 : \
518 : "cc", "memory", "r3" \
519 ); \
520 _zzq_orig->nraddr = __addr; \
521 }
522
523#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
524 __SPECIAL_INSTRUCTION_PREAMBLE \
525 /* branch-and-link-to-noredir *%R11 */ \
526 "or 3,3,3\n\t"
527
528#define VALGRIND_VEX_INJECT_IR() \
529 do { \
530 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
531 "or 5,5,5\n\t" \
532 ); \
533 } while (0)
534
535#endif /* PLAT_ppc32_linux */
536
537/* ------------------------ ppc64-linux ------------------------ */
538
539#if defined(PLAT_ppc64be_linux)
540
541typedef
542 struct {
543 unsigned long int nraddr; /* where's the code? */
544 unsigned long int r2; /* what tocptr do we need? */
545 }
546 OrigFn;
547
548#define __SPECIAL_INSTRUCTION_PREAMBLE \
549 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
550 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
551
552#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
553 _zzq_default, _zzq_request, \
554 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
555 \
556 __extension__ \
557 ({ unsigned long int _zzq_args[6]; \
558 unsigned long int _zzq_result; \
559 unsigned long int* _zzq_ptr; \
560 _zzq_args[0] = (unsigned long int)(_zzq_request); \
561 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
562 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
563 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
564 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
565 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
566 _zzq_ptr = _zzq_args; \
567 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
568 "mr 4,%2\n\t" /*ptr*/ \
569 __SPECIAL_INSTRUCTION_PREAMBLE \
570 /* %R3 = client_request ( %R4 ) */ \
571 "or 1,1,1\n\t" \
572 "mr %0,3" /*result*/ \
573 : "=b" (_zzq_result) \
574 : "b" (_zzq_default), "b" (_zzq_ptr) \
575 : "cc", "memory", "r3", "r4"); \
576 _zzq_result; \
577 })
578
579#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
580 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
581 unsigned long int __addr; \
582 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
583 /* %R3 = guest_NRADDR */ \
584 "or 2,2,2\n\t" \
585 "mr %0,3" \
586 : "=b" (__addr) \
587 : \
588 : "cc", "memory", "r3" \
589 ); \
590 _zzq_orig->nraddr = __addr; \
591 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
592 /* %R3 = guest_NRADDR_GPR2 */ \
593 "or 4,4,4\n\t" \
594 "mr %0,3" \
595 : "=b" (__addr) \
596 : \
597 : "cc", "memory", "r3" \
598 ); \
599 _zzq_orig->r2 = __addr; \
600 }
601
602#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
603 __SPECIAL_INSTRUCTION_PREAMBLE \
604 /* branch-and-link-to-noredir *%R11 */ \
605 "or 3,3,3\n\t"
606
607#define VALGRIND_VEX_INJECT_IR() \
608 do { \
609 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
610 "or 5,5,5\n\t" \
611 ); \
612 } while (0)
613
614#endif /* PLAT_ppc64be_linux */
615
616#if defined(PLAT_ppc64le_linux)
617
618typedef
619 struct {
620 unsigned long int nraddr; /* where's the code? */
621 unsigned long int r2; /* what tocptr do we need? */
622 }
623 OrigFn;
624
625#define __SPECIAL_INSTRUCTION_PREAMBLE \
626 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
627 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
628
629#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
630 _zzq_default, _zzq_request, \
631 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
632 \
633 __extension__ \
634 ({ unsigned long int _zzq_args[6]; \
635 unsigned long int _zzq_result; \
636 unsigned long int* _zzq_ptr; \
637 _zzq_args[0] = (unsigned long int)(_zzq_request); \
638 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
639 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
640 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
641 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
642 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
643 _zzq_ptr = _zzq_args; \
644 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
645 "mr 4,%2\n\t" /*ptr*/ \
646 __SPECIAL_INSTRUCTION_PREAMBLE \
647 /* %R3 = client_request ( %R4 ) */ \
648 "or 1,1,1\n\t" \
649 "mr %0,3" /*result*/ \
650 : "=b" (_zzq_result) \
651 : "b" (_zzq_default), "b" (_zzq_ptr) \
652 : "cc", "memory", "r3", "r4"); \
653 _zzq_result; \
654 })
655
656#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
657 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
658 unsigned long int __addr; \
659 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
660 /* %R3 = guest_NRADDR */ \
661 "or 2,2,2\n\t" \
662 "mr %0,3" \
663 : "=b" (__addr) \
664 : \
665 : "cc", "memory", "r3" \
666 ); \
667 _zzq_orig->nraddr = __addr; \
668 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
669 /* %R3 = guest_NRADDR_GPR2 */ \
670 "or 4,4,4\n\t" \
671 "mr %0,3" \
672 : "=b" (__addr) \
673 : \
674 : "cc", "memory", "r3" \
675 ); \
676 _zzq_orig->r2 = __addr; \
677 }
678
679#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
680 __SPECIAL_INSTRUCTION_PREAMBLE \
681 /* branch-and-link-to-noredir *%R12 */ \
682 "or 3,3,3\n\t"
683
684#define VALGRIND_VEX_INJECT_IR() \
685 do { \
686 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
687 "or 5,5,5\n\t" \
688 ); \
689 } while (0)
690
691#endif /* PLAT_ppc64le_linux */
692
693/* ------------------------- arm-linux ------------------------- */
694
695#if defined(PLAT_arm_linux)
696
697typedef
698 struct {
699 unsigned int nraddr; /* where's the code? */
700 }
701 OrigFn;
702
703#define __SPECIAL_INSTRUCTION_PREAMBLE \
704 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
705 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
706
707#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
708 _zzq_default, _zzq_request, \
709 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
710 \
711 __extension__ \
712 ({volatile unsigned int _zzq_args[6]; \
713 volatile unsigned int _zzq_result; \
714 _zzq_args[0] = (unsigned int)(_zzq_request); \
715 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
716 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
717 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
718 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
719 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
720 __asm__ volatile("mov r3, %1\n\t" /*default*/ \
721 "mov r4, %2\n\t" /*ptr*/ \
722 __SPECIAL_INSTRUCTION_PREAMBLE \
723 /* R3 = client_request ( R4 ) */ \
724 "orr r10, r10, r10\n\t" \
725 "mov %0, r3" /*result*/ \
726 : "=r" (_zzq_result) \
727 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
728 : "cc","memory", "r3", "r4"); \
729 _zzq_result; \
730 })
731
732#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
733 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
734 unsigned int __addr; \
735 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
736 /* R3 = guest_NRADDR */ \
737 "orr r11, r11, r11\n\t" \
738 "mov %0, r3" \
739 : "=r" (__addr) \
740 : \
741 : "cc", "memory", "r3" \
742 ); \
743 _zzq_orig->nraddr = __addr; \
744 }
745
746#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
747 __SPECIAL_INSTRUCTION_PREAMBLE \
748 /* branch-and-link-to-noredir *%R4 */ \
749 "orr r12, r12, r12\n\t"
750
751#define VALGRIND_VEX_INJECT_IR() \
752 do { \
753 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
754 "orr r9, r9, r9\n\t" \
755 : : : "cc", "memory" \
756 ); \
757 } while (0)
758
759#endif /* PLAT_arm_linux */
760
761/* ------------------------ arm64-linux ------------------------- */
762
763#if defined(PLAT_arm64_linux)
764
765typedef
766 struct {
767 unsigned long int nraddr; /* where's the code? */
768 }
769 OrigFn;
770
771#define __SPECIAL_INSTRUCTION_PREAMBLE \
772 "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
773 "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
774
775#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
776 _zzq_default, _zzq_request, \
777 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
778 \
779 __extension__ \
780 ({volatile unsigned long int _zzq_args[6]; \
781 volatile unsigned long int _zzq_result; \
782 _zzq_args[0] = (unsigned long int)(_zzq_request); \
783 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
784 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
785 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
786 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
787 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
788 __asm__ volatile("mov x3, %1\n\t" /*default*/ \
789 "mov x4, %2\n\t" /*ptr*/ \
790 __SPECIAL_INSTRUCTION_PREAMBLE \
791 /* X3 = client_request ( X4 ) */ \
792 "orr x10, x10, x10\n\t" \
793 "mov %0, x3" /*result*/ \
794 : "=r" (_zzq_result) \
795 : "r" ((unsigned long int)(_zzq_default)), \
796 "r" (&_zzq_args[0]) \
797 : "cc","memory", "x3", "x4"); \
798 _zzq_result; \
799 })
800
801#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
802 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
803 unsigned long int __addr; \
804 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
805 /* X3 = guest_NRADDR */ \
806 "orr x11, x11, x11\n\t" \
807 "mov %0, x3" \
808 : "=r" (__addr) \
809 : \
810 : "cc", "memory", "x3" \
811 ); \
812 _zzq_orig->nraddr = __addr; \
813 }
814
815#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
816 __SPECIAL_INSTRUCTION_PREAMBLE \
817 /* branch-and-link-to-noredir X8 */ \
818 "orr x12, x12, x12\n\t"
819
820#define VALGRIND_VEX_INJECT_IR() \
821 do { \
822 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
823 "orr x9, x9, x9\n\t" \
824 : : : "cc", "memory" \
825 ); \
826 } while (0)
827
828#endif /* PLAT_arm64_linux */
829
830/* ------------------------ s390x-linux ------------------------ */
831
832#if defined(PLAT_s390x_linux)
833
834typedef
835 struct {
836 unsigned long int nraddr; /* where's the code? */
837 }
838 OrigFn;
839
840/* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
841 * code. This detection is implemented in platform specific toIR.c
842 * (e.g. VEX/priv/guest_s390_decoder.c).
843 */
844#define __SPECIAL_INSTRUCTION_PREAMBLE \
845 "lr 15,15\n\t" \
846 "lr 1,1\n\t" \
847 "lr 2,2\n\t" \
848 "lr 3,3\n\t"
849
850#define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
851#define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
852#define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
853#define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
854
855#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
856 _zzq_default, _zzq_request, \
857 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
858 __extension__ \
859 ({volatile unsigned long int _zzq_args[6]; \
860 volatile unsigned long int _zzq_result; \
861 _zzq_args[0] = (unsigned long int)(_zzq_request); \
862 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
863 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
864 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
865 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
866 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
867 __asm__ volatile(/* r2 = args */ \
868 "lgr 2,%1\n\t" \
869 /* r3 = default */ \
870 "lgr 3,%2\n\t" \
871 __SPECIAL_INSTRUCTION_PREAMBLE \
872 __CLIENT_REQUEST_CODE \
873 /* results = r3 */ \
874 "lgr %0, 3\n\t" \
875 : "=d" (_zzq_result) \
876 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
877 : "cc", "2", "3", "memory" \
878 ); \
879 _zzq_result; \
880 })
881
882#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
883 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
884 volatile unsigned long int __addr; \
885 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
886 __GET_NR_CONTEXT_CODE \
887 "lgr %0, 3\n\t" \
888 : "=a" (__addr) \
889 : \
890 : "cc", "3", "memory" \
891 ); \
892 _zzq_orig->nraddr = __addr; \
893 }
894
895#define VALGRIND_CALL_NOREDIR_R1 \
896 __SPECIAL_INSTRUCTION_PREAMBLE \
897 __CALL_NO_REDIR_CODE
898
899#define VALGRIND_VEX_INJECT_IR() \
900 do { \
901 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
902 __VEX_INJECT_IR_CODE); \
903 } while (0)
904
905#endif /* PLAT_s390x_linux */
906
907/* ------------------------- mips32-linux ---------------- */
908
909#if defined(PLAT_mips32_linux)
910
911typedef
912 struct {
913 unsigned int nraddr; /* where's the code? */
914 }
915 OrigFn;
916
917/* .word 0x342
918 * .word 0x742
919 * .word 0xC2
920 * .word 0x4C2*/
921#define __SPECIAL_INSTRUCTION_PREAMBLE \
922 "srl $0, $0, 13\n\t" \
923 "srl $0, $0, 29\n\t" \
924 "srl $0, $0, 3\n\t" \
925 "srl $0, $0, 19\n\t"
926
927#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
928 _zzq_default, _zzq_request, \
929 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
930 __extension__ \
931 ({ volatile unsigned int _zzq_args[6]; \
932 volatile unsigned int _zzq_result; \
933 _zzq_args[0] = (unsigned int)(_zzq_request); \
934 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
935 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
936 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
937 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
938 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
939 __asm__ volatile("move $11, %1\n\t" /*default*/ \
940 "move $12, %2\n\t" /*ptr*/ \
941 __SPECIAL_INSTRUCTION_PREAMBLE \
942 /* T3 = client_request ( T4 ) */ \
943 "or $13, $13, $13\n\t" \
944 "move %0, $11\n\t" /*result*/ \
945 : "=r" (_zzq_result) \
946 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
947 : "$11", "$12", "memory"); \
948 _zzq_result; \
949 })
950
951#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
952 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
953 volatile unsigned int __addr; \
954 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
955 /* %t9 = guest_NRADDR */ \
956 "or $14, $14, $14\n\t" \
957 "move %0, $11" /*result*/ \
958 : "=r" (__addr) \
959 : \
960 : "$11" \
961 ); \
962 _zzq_orig->nraddr = __addr; \
963 }
964
965#define VALGRIND_CALL_NOREDIR_T9 \
966 __SPECIAL_INSTRUCTION_PREAMBLE \
967 /* call-noredir *%t9 */ \
968 "or $15, $15, $15\n\t"
969
970#define VALGRIND_VEX_INJECT_IR() \
971 do { \
972 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
973 "or $11, $11, $11\n\t" \
974 ); \
975 } while (0)
976
977
978#endif /* PLAT_mips32_linux */
979
980/* ------------------------- mips64-linux ---------------- */
981
982#if defined(PLAT_mips64_linux)
983
984typedef
985 struct {
986 unsigned long nraddr; /* where's the code? */
987 }
988 OrigFn;
989
990/* dsll $0,$0, 3
991 * dsll $0,$0, 13
992 * dsll $0,$0, 29
993 * dsll $0,$0, 19*/
994#define __SPECIAL_INSTRUCTION_PREAMBLE \
995 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
996 "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
997
998#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
999 _zzq_default, _zzq_request, \
1000 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1001 __extension__ \
1002 ({ volatile unsigned long int _zzq_args[6]; \
1003 volatile unsigned long int _zzq_result; \
1004 _zzq_args[0] = (unsigned long int)(_zzq_request); \
1005 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
1006 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
1007 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
1008 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
1009 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
1010 __asm__ volatile("move $11, %1\n\t" /*default*/ \
1011 "move $12, %2\n\t" /*ptr*/ \
1012 __SPECIAL_INSTRUCTION_PREAMBLE \
1013 /* $11 = client_request ( $12 ) */ \
1014 "or $13, $13, $13\n\t" \
1015 "move %0, $11\n\t" /*result*/ \
1016 : "=r" (_zzq_result) \
1017 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1018 : "$11", "$12", "memory"); \
1019 _zzq_result; \
1020 })
1021
1022#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1023 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1024 volatile unsigned long int __addr; \
1025 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1026 /* $11 = guest_NRADDR */ \
1027 "or $14, $14, $14\n\t" \
1028 "move %0, $11" /*result*/ \
1029 : "=r" (__addr) \
1030 : \
1031 : "$11"); \
1032 _zzq_orig->nraddr = __addr; \
1033 }
1034
1035#define VALGRIND_CALL_NOREDIR_T9 \
1036 __SPECIAL_INSTRUCTION_PREAMBLE \
1037 /* call-noredir $25 */ \
1038 "or $15, $15, $15\n\t"
1039
1040#define VALGRIND_VEX_INJECT_IR() \
1041 do { \
1042 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1043 "or $11, $11, $11\n\t" \
1044 ); \
1045 } while (0)
1046
1047#endif /* PLAT_mips64_linux */
1048
1049/* Insert assembly code for other platforms here... */
1050
1051#endif /* NVALGRIND */
1052
1053
1054/* ------------------------------------------------------------------ */
1055/* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
1056/* ugly. It's the least-worst tradeoff I can think of. */
1057/* ------------------------------------------------------------------ */
1058
1059/* This section defines magic (a.k.a appalling-hack) macros for doing
1060 guaranteed-no-redirection macros, so as to get from function
1061 wrappers to the functions they are wrapping. The whole point is to
1062 construct standard call sequences, but to do the call itself with a
1063 special no-redirect call pseudo-instruction that the JIT
1064 understands and handles specially. This section is long and
1065 repetitious, and I can't see a way to make it shorter.
1066
1067 The naming scheme is as follows:
1068
1069 CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
1070
1071 'W' stands for "word" and 'v' for "void". Hence there are
1072 different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
1073 and for each, the possibility of returning a word-typed result, or
1074 no result.
1075*/
1076
1077/* Use these to write the name of your wrapper. NOTE: duplicates
1078 VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
1079 the default behaviour equivalance class tag "0000" into the name.
1080 See pub_tool_redir.h for details -- normally you don't need to
1081 think about this, though. */
1082
1083/* Use an extra level of macroisation so as to ensure the soname/fnname
1084 args are fully macro-expanded before pasting them together. */
1085#define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
1086
1087#define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
1088 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
1089
1090#define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
1091 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
1092
1093/* Use this macro from within a wrapper function to collect the
1094 context (address and possibly other info) of the original function.
1095 Once you have that you can then use it in one of the CALL_FN_
1096 macros. The type of the argument _lval is OrigFn. */
1097#define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
1098
1099/* Also provide end-user facilities for function replacement, rather
1100 than wrapping. A replacement function differs from a wrapper in
1101 that it has no way to get hold of the original function being
1102 called, and hence no way to call onwards to it. In a replacement
1103 function, VALGRIND_GET_ORIG_FN always returns zero. */
1104
1105#define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1106 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1107
1108#define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1109 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1110
1111/* Derivatives of the main macros below, for calling functions
1112 returning void. */
1113
1114#define CALL_FN_v_v(fnptr) \
1115 do { volatile unsigned long _junk; \
1116 CALL_FN_W_v(_junk,fnptr); } while (0)
1117
1118#define CALL_FN_v_W(fnptr, arg1) \
1119 do { volatile unsigned long _junk; \
1120 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1121
1122#define CALL_FN_v_WW(fnptr, arg1,arg2) \
1123 do { volatile unsigned long _junk; \
1124 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1125
1126#define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1127 do { volatile unsigned long _junk; \
1128 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1129
1130#define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1131 do { volatile unsigned long _junk; \
1132 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1133
1134#define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1135 do { volatile unsigned long _junk; \
1136 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1137
1138#define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1139 do { volatile unsigned long _junk; \
1140 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1141
1142#define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1143 do { volatile unsigned long _junk; \
1144 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1145
1146/* ----------------- x86-{linux,darwin,solaris} ---------------- */
1147
1148#if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
1149 || defined(PLAT_x86_solaris)
1150
1151/* These regs are trashed by the hidden call. No need to mention eax
1152 as gcc can already see that, plus causes gcc to bomb. */
1153#define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
1154
1155/* Macros to save and align the stack before making a function
1156 call and restore it afterwards as gcc may not keep the stack
1157 pointer aligned if it doesn't realise calls are being made
1158 to other functions. */
1159
1160#define VALGRIND_ALIGN_STACK \
1161 "movl %%esp,%%edi\n\t" \
1162 "andl $0xfffffff0,%%esp\n\t"
1163#define VALGRIND_RESTORE_STACK \
1164 "movl %%edi,%%esp\n\t"
1165
1166/* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
1167 long) == 4. */
1168
1169#define CALL_FN_W_v(lval, orig) \
1170 do { \
1171 volatile OrigFn _orig = (orig); \
1172 volatile unsigned long _argvec[1]; \
1173 volatile unsigned long _res; \
1174 _argvec[0] = (unsigned long)_orig.nraddr; \
1175 __asm__ volatile( \
1176 VALGRIND_ALIGN_STACK \
1177 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1178 VALGRIND_CALL_NOREDIR_EAX \
1179 VALGRIND_RESTORE_STACK \
1180 : /*out*/ "=a" (_res) \
1181 : /*in*/ "a" (&_argvec[0]) \
1182 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1183 ); \
1184 lval = (__typeof__(lval)) _res; \
1185 } while (0)
1186
1187#define CALL_FN_W_W(lval, orig, arg1) \
1188 do { \
1189 volatile OrigFn _orig = (orig); \
1190 volatile unsigned long _argvec[2]; \
1191 volatile unsigned long _res; \
1192 _argvec[0] = (unsigned long)_orig.nraddr; \
1193 _argvec[1] = (unsigned long)(arg1); \
1194 __asm__ volatile( \
1195 VALGRIND_ALIGN_STACK \
1196 "subl $12, %%esp\n\t" \
1197 "pushl 4(%%eax)\n\t" \
1198 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1199 VALGRIND_CALL_NOREDIR_EAX \
1200 VALGRIND_RESTORE_STACK \
1201 : /*out*/ "=a" (_res) \
1202 : /*in*/ "a" (&_argvec[0]) \
1203 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1204 ); \
1205 lval = (__typeof__(lval)) _res; \
1206 } while (0)
1207
1208#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1209 do { \
1210 volatile OrigFn _orig = (orig); \
1211 volatile unsigned long _argvec[3]; \
1212 volatile unsigned long _res; \
1213 _argvec[0] = (unsigned long)_orig.nraddr; \
1214 _argvec[1] = (unsigned long)(arg1); \
1215 _argvec[2] = (unsigned long)(arg2); \
1216 __asm__ volatile( \
1217 VALGRIND_ALIGN_STACK \
1218 "subl $8, %%esp\n\t" \
1219 "pushl 8(%%eax)\n\t" \
1220 "pushl 4(%%eax)\n\t" \
1221 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1222 VALGRIND_CALL_NOREDIR_EAX \
1223 VALGRIND_RESTORE_STACK \
1224 : /*out*/ "=a" (_res) \
1225 : /*in*/ "a" (&_argvec[0]) \
1226 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1227 ); \
1228 lval = (__typeof__(lval)) _res; \
1229 } while (0)
1230
1231#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1232 do { \
1233 volatile OrigFn _orig = (orig); \
1234 volatile unsigned long _argvec[4]; \
1235 volatile unsigned long _res; \
1236 _argvec[0] = (unsigned long)_orig.nraddr; \
1237 _argvec[1] = (unsigned long)(arg1); \
1238 _argvec[2] = (unsigned long)(arg2); \
1239 _argvec[3] = (unsigned long)(arg3); \
1240 __asm__ volatile( \
1241 VALGRIND_ALIGN_STACK \
1242 "subl $4, %%esp\n\t" \
1243 "pushl 12(%%eax)\n\t" \
1244 "pushl 8(%%eax)\n\t" \
1245 "pushl 4(%%eax)\n\t" \
1246 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1247 VALGRIND_CALL_NOREDIR_EAX \
1248 VALGRIND_RESTORE_STACK \
1249 : /*out*/ "=a" (_res) \
1250 : /*in*/ "a" (&_argvec[0]) \
1251 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1252 ); \
1253 lval = (__typeof__(lval)) _res; \
1254 } while (0)
1255
1256#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1257 do { \
1258 volatile OrigFn _orig = (orig); \
1259 volatile unsigned long _argvec[5]; \
1260 volatile unsigned long _res; \
1261 _argvec[0] = (unsigned long)_orig.nraddr; \
1262 _argvec[1] = (unsigned long)(arg1); \
1263 _argvec[2] = (unsigned long)(arg2); \
1264 _argvec[3] = (unsigned long)(arg3); \
1265 _argvec[4] = (unsigned long)(arg4); \
1266 __asm__ volatile( \
1267 VALGRIND_ALIGN_STACK \
1268 "pushl 16(%%eax)\n\t" \
1269 "pushl 12(%%eax)\n\t" \
1270 "pushl 8(%%eax)\n\t" \
1271 "pushl 4(%%eax)\n\t" \
1272 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1273 VALGRIND_CALL_NOREDIR_EAX \
1274 VALGRIND_RESTORE_STACK \
1275 : /*out*/ "=a" (_res) \
1276 : /*in*/ "a" (&_argvec[0]) \
1277 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1278 ); \
1279 lval = (__typeof__(lval)) _res; \
1280 } while (0)
1281
1282#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1283 do { \
1284 volatile OrigFn _orig = (orig); \
1285 volatile unsigned long _argvec[6]; \
1286 volatile unsigned long _res; \
1287 _argvec[0] = (unsigned long)_orig.nraddr; \
1288 _argvec[1] = (unsigned long)(arg1); \
1289 _argvec[2] = (unsigned long)(arg2); \
1290 _argvec[3] = (unsigned long)(arg3); \
1291 _argvec[4] = (unsigned long)(arg4); \
1292 _argvec[5] = (unsigned long)(arg5); \
1293 __asm__ volatile( \
1294 VALGRIND_ALIGN_STACK \
1295 "subl $12, %%esp\n\t" \
1296 "pushl 20(%%eax)\n\t" \
1297 "pushl 16(%%eax)\n\t" \
1298 "pushl 12(%%eax)\n\t" \
1299 "pushl 8(%%eax)\n\t" \
1300 "pushl 4(%%eax)\n\t" \
1301 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1302 VALGRIND_CALL_NOREDIR_EAX \
1303 VALGRIND_RESTORE_STACK \
1304 : /*out*/ "=a" (_res) \
1305 : /*in*/ "a" (&_argvec[0]) \
1306 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1307 ); \
1308 lval = (__typeof__(lval)) _res; \
1309 } while (0)
1310
1311#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1312 do { \
1313 volatile OrigFn _orig = (orig); \
1314 volatile unsigned long _argvec[7]; \
1315 volatile unsigned long _res; \
1316 _argvec[0] = (unsigned long)_orig.nraddr; \
1317 _argvec[1] = (unsigned long)(arg1); \
1318 _argvec[2] = (unsigned long)(arg2); \
1319 _argvec[3] = (unsigned long)(arg3); \
1320 _argvec[4] = (unsigned long)(arg4); \
1321 _argvec[5] = (unsigned long)(arg5); \
1322 _argvec[6] = (unsigned long)(arg6); \
1323 __asm__ volatile( \
1324 VALGRIND_ALIGN_STACK \
1325 "subl $8, %%esp\n\t" \
1326 "pushl 24(%%eax)\n\t" \
1327 "pushl 20(%%eax)\n\t" \
1328 "pushl 16(%%eax)\n\t" \
1329 "pushl 12(%%eax)\n\t" \
1330 "pushl 8(%%eax)\n\t" \
1331 "pushl 4(%%eax)\n\t" \
1332 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1333 VALGRIND_CALL_NOREDIR_EAX \
1334 VALGRIND_RESTORE_STACK \
1335 : /*out*/ "=a" (_res) \
1336 : /*in*/ "a" (&_argvec[0]) \
1337 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1338 ); \
1339 lval = (__typeof__(lval)) _res; \
1340 } while (0)
1341
1342#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1343 arg7) \
1344 do { \
1345 volatile OrigFn _orig = (orig); \
1346 volatile unsigned long _argvec[8]; \
1347 volatile unsigned long _res; \
1348 _argvec[0] = (unsigned long)_orig.nraddr; \
1349 _argvec[1] = (unsigned long)(arg1); \
1350 _argvec[2] = (unsigned long)(arg2); \
1351 _argvec[3] = (unsigned long)(arg3); \
1352 _argvec[4] = (unsigned long)(arg4); \
1353 _argvec[5] = (unsigned long)(arg5); \
1354 _argvec[6] = (unsigned long)(arg6); \
1355 _argvec[7] = (unsigned long)(arg7); \
1356 __asm__ volatile( \
1357 VALGRIND_ALIGN_STACK \
1358 "subl $4, %%esp\n\t" \
1359 "pushl 28(%%eax)\n\t" \
1360 "pushl 24(%%eax)\n\t" \
1361 "pushl 20(%%eax)\n\t" \
1362 "pushl 16(%%eax)\n\t" \
1363 "pushl 12(%%eax)\n\t" \
1364 "pushl 8(%%eax)\n\t" \
1365 "pushl 4(%%eax)\n\t" \
1366 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1367 VALGRIND_CALL_NOREDIR_EAX \
1368 VALGRIND_RESTORE_STACK \
1369 : /*out*/ "=a" (_res) \
1370 : /*in*/ "a" (&_argvec[0]) \
1371 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1372 ); \
1373 lval = (__typeof__(lval)) _res; \
1374 } while (0)
1375
1376#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1377 arg7,arg8) \
1378 do { \
1379 volatile OrigFn _orig = (orig); \
1380 volatile unsigned long _argvec[9]; \
1381 volatile unsigned long _res; \
1382 _argvec[0] = (unsigned long)_orig.nraddr; \
1383 _argvec[1] = (unsigned long)(arg1); \
1384 _argvec[2] = (unsigned long)(arg2); \
1385 _argvec[3] = (unsigned long)(arg3); \
1386 _argvec[4] = (unsigned long)(arg4); \
1387 _argvec[5] = (unsigned long)(arg5); \
1388 _argvec[6] = (unsigned long)(arg6); \
1389 _argvec[7] = (unsigned long)(arg7); \
1390 _argvec[8] = (unsigned long)(arg8); \
1391 __asm__ volatile( \
1392 VALGRIND_ALIGN_STACK \
1393 "pushl 32(%%eax)\n\t" \
1394 "pushl 28(%%eax)\n\t" \
1395 "pushl 24(%%eax)\n\t" \
1396 "pushl 20(%%eax)\n\t" \
1397 "pushl 16(%%eax)\n\t" \
1398 "pushl 12(%%eax)\n\t" \
1399 "pushl 8(%%eax)\n\t" \
1400 "pushl 4(%%eax)\n\t" \
1401 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1402 VALGRIND_CALL_NOREDIR_EAX \
1403 VALGRIND_RESTORE_STACK \
1404 : /*out*/ "=a" (_res) \
1405 : /*in*/ "a" (&_argvec[0]) \
1406 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1407 ); \
1408 lval = (__typeof__(lval)) _res; \
1409 } while (0)
1410
1411#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1412 arg7,arg8,arg9) \
1413 do { \
1414 volatile OrigFn _orig = (orig); \
1415 volatile unsigned long _argvec[10]; \
1416 volatile unsigned long _res; \
1417 _argvec[0] = (unsigned long)_orig.nraddr; \
1418 _argvec[1] = (unsigned long)(arg1); \
1419 _argvec[2] = (unsigned long)(arg2); \
1420 _argvec[3] = (unsigned long)(arg3); \
1421 _argvec[4] = (unsigned long)(arg4); \
1422 _argvec[5] = (unsigned long)(arg5); \
1423 _argvec[6] = (unsigned long)(arg6); \
1424 _argvec[7] = (unsigned long)(arg7); \
1425 _argvec[8] = (unsigned long)(arg8); \
1426 _argvec[9] = (unsigned long)(arg9); \
1427 __asm__ volatile( \
1428 VALGRIND_ALIGN_STACK \
1429 "subl $12, %%esp\n\t" \
1430 "pushl 36(%%eax)\n\t" \
1431 "pushl 32(%%eax)\n\t" \
1432 "pushl 28(%%eax)\n\t" \
1433 "pushl 24(%%eax)\n\t" \
1434 "pushl 20(%%eax)\n\t" \
1435 "pushl 16(%%eax)\n\t" \
1436 "pushl 12(%%eax)\n\t" \
1437 "pushl 8(%%eax)\n\t" \
1438 "pushl 4(%%eax)\n\t" \
1439 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1440 VALGRIND_CALL_NOREDIR_EAX \
1441 VALGRIND_RESTORE_STACK \
1442 : /*out*/ "=a" (_res) \
1443 : /*in*/ "a" (&_argvec[0]) \
1444 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1445 ); \
1446 lval = (__typeof__(lval)) _res; \
1447 } while (0)
1448
1449#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1450 arg7,arg8,arg9,arg10) \
1451 do { \
1452 volatile OrigFn _orig = (orig); \
1453 volatile unsigned long _argvec[11]; \
1454 volatile unsigned long _res; \
1455 _argvec[0] = (unsigned long)_orig.nraddr; \
1456 _argvec[1] = (unsigned long)(arg1); \
1457 _argvec[2] = (unsigned long)(arg2); \
1458 _argvec[3] = (unsigned long)(arg3); \
1459 _argvec[4] = (unsigned long)(arg4); \
1460 _argvec[5] = (unsigned long)(arg5); \
1461 _argvec[6] = (unsigned long)(arg6); \
1462 _argvec[7] = (unsigned long)(arg7); \
1463 _argvec[8] = (unsigned long)(arg8); \
1464 _argvec[9] = (unsigned long)(arg9); \
1465 _argvec[10] = (unsigned long)(arg10); \
1466 __asm__ volatile( \
1467 VALGRIND_ALIGN_STACK \
1468 "subl $8, %%esp\n\t" \
1469 "pushl 40(%%eax)\n\t" \
1470 "pushl 36(%%eax)\n\t" \
1471 "pushl 32(%%eax)\n\t" \
1472 "pushl 28(%%eax)\n\t" \
1473 "pushl 24(%%eax)\n\t" \
1474 "pushl 20(%%eax)\n\t" \
1475 "pushl 16(%%eax)\n\t" \
1476 "pushl 12(%%eax)\n\t" \
1477 "pushl 8(%%eax)\n\t" \
1478 "pushl 4(%%eax)\n\t" \
1479 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1480 VALGRIND_CALL_NOREDIR_EAX \
1481 VALGRIND_RESTORE_STACK \
1482 : /*out*/ "=a" (_res) \
1483 : /*in*/ "a" (&_argvec[0]) \
1484 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1485 ); \
1486 lval = (__typeof__(lval)) _res; \
1487 } while (0)
1488
1489#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1490 arg6,arg7,arg8,arg9,arg10, \
1491 arg11) \
1492 do { \
1493 volatile OrigFn _orig = (orig); \
1494 volatile unsigned long _argvec[12]; \
1495 volatile unsigned long _res; \
1496 _argvec[0] = (unsigned long)_orig.nraddr; \
1497 _argvec[1] = (unsigned long)(arg1); \
1498 _argvec[2] = (unsigned long)(arg2); \
1499 _argvec[3] = (unsigned long)(arg3); \
1500 _argvec[4] = (unsigned long)(arg4); \
1501 _argvec[5] = (unsigned long)(arg5); \
1502 _argvec[6] = (unsigned long)(arg6); \
1503 _argvec[7] = (unsigned long)(arg7); \
1504 _argvec[8] = (unsigned long)(arg8); \
1505 _argvec[9] = (unsigned long)(arg9); \
1506 _argvec[10] = (unsigned long)(arg10); \
1507 _argvec[11] = (unsigned long)(arg11); \
1508 __asm__ volatile( \
1509 VALGRIND_ALIGN_STACK \
1510 "subl $4, %%esp\n\t" \
1511 "pushl 44(%%eax)\n\t" \
1512 "pushl 40(%%eax)\n\t" \
1513 "pushl 36(%%eax)\n\t" \
1514 "pushl 32(%%eax)\n\t" \
1515 "pushl 28(%%eax)\n\t" \
1516 "pushl 24(%%eax)\n\t" \
1517 "pushl 20(%%eax)\n\t" \
1518 "pushl 16(%%eax)\n\t" \
1519 "pushl 12(%%eax)\n\t" \
1520 "pushl 8(%%eax)\n\t" \
1521 "pushl 4(%%eax)\n\t" \
1522 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1523 VALGRIND_CALL_NOREDIR_EAX \
1524 VALGRIND_RESTORE_STACK \
1525 : /*out*/ "=a" (_res) \
1526 : /*in*/ "a" (&_argvec[0]) \
1527 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1528 ); \
1529 lval = (__typeof__(lval)) _res; \
1530 } while (0)
1531
1532#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1533 arg6,arg7,arg8,arg9,arg10, \
1534 arg11,arg12) \
1535 do { \
1536 volatile OrigFn _orig = (orig); \
1537 volatile unsigned long _argvec[13]; \
1538 volatile unsigned long _res; \
1539 _argvec[0] = (unsigned long)_orig.nraddr; \
1540 _argvec[1] = (unsigned long)(arg1); \
1541 _argvec[2] = (unsigned long)(arg2); \
1542 _argvec[3] = (unsigned long)(arg3); \
1543 _argvec[4] = (unsigned long)(arg4); \
1544 _argvec[5] = (unsigned long)(arg5); \
1545 _argvec[6] = (unsigned long)(arg6); \
1546 _argvec[7] = (unsigned long)(arg7); \
1547 _argvec[8] = (unsigned long)(arg8); \
1548 _argvec[9] = (unsigned long)(arg9); \
1549 _argvec[10] = (unsigned long)(arg10); \
1550 _argvec[11] = (unsigned long)(arg11); \
1551 _argvec[12] = (unsigned long)(arg12); \
1552 __asm__ volatile( \
1553 VALGRIND_ALIGN_STACK \
1554 "pushl 48(%%eax)\n\t" \
1555 "pushl 44(%%eax)\n\t" \
1556 "pushl 40(%%eax)\n\t" \
1557 "pushl 36(%%eax)\n\t" \
1558 "pushl 32(%%eax)\n\t" \
1559 "pushl 28(%%eax)\n\t" \
1560 "pushl 24(%%eax)\n\t" \
1561 "pushl 20(%%eax)\n\t" \
1562 "pushl 16(%%eax)\n\t" \
1563 "pushl 12(%%eax)\n\t" \
1564 "pushl 8(%%eax)\n\t" \
1565 "pushl 4(%%eax)\n\t" \
1566 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1567 VALGRIND_CALL_NOREDIR_EAX \
1568 VALGRIND_RESTORE_STACK \
1569 : /*out*/ "=a" (_res) \
1570 : /*in*/ "a" (&_argvec[0]) \
1571 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1572 ); \
1573 lval = (__typeof__(lval)) _res; \
1574 } while (0)
1575
1576#endif /* PLAT_x86_linux || PLAT_x86_darwin || PLAT_x86_solaris */
1577
1578/* ---------------- amd64-{linux,darwin,solaris} --------------- */
1579
1580#if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
1581 || defined(PLAT_amd64_solaris)
1582
1583/* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1584
1585/* These regs are trashed by the hidden call. */
1586#define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1587 "rdi", "r8", "r9", "r10", "r11"
1588
1589/* This is all pretty complex. It's so as to make stack unwinding
1590 work reliably. See bug 243270. The basic problem is the sub and
1591 add of 128 of %rsp in all of the following macros. If gcc believes
1592 the CFA is in %rsp, then unwinding may fail, because what's at the
1593 CFA is not what gcc "expected" when it constructs the CFIs for the
1594 places where the macros are instantiated.
1595
1596 But we can't just add a CFI annotation to increase the CFA offset
1597 by 128, to match the sub of 128 from %rsp, because we don't know
1598 whether gcc has chosen %rsp as the CFA at that point, or whether it
1599 has chosen some other register (eg, %rbp). In the latter case,
1600 adding a CFI annotation to change the CFA offset is simply wrong.
1601
1602 So the solution is to get hold of the CFA using
1603 __builtin_dwarf_cfa(), put it in a known register, and add a
1604 CFI annotation to say what the register is. We choose %rbp for
1605 this (perhaps perversely), because:
1606
1607 (1) %rbp is already subject to unwinding. If a new register was
1608 chosen then the unwinder would have to unwind it in all stack
1609 traces, which is expensive, and
1610
1611 (2) %rbp is already subject to precise exception updates in the
1612 JIT. If a new register was chosen, we'd have to have precise
1613 exceptions for it too, which reduces performance of the
1614 generated code.
1615
1616 However .. one extra complication. We can't just whack the result
1617 of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1618 list of trashed registers at the end of the inline assembly
1619 fragments; gcc won't allow %rbp to appear in that list. Hence
1620 instead we need to stash %rbp in %r15 for the duration of the asm,
1621 and say that %r15 is trashed instead. gcc seems happy to go with
1622 that.
1623
1624 Oh .. and this all needs to be conditionalised so that it is
1625 unchanged from before this commit, when compiled with older gccs
1626 that don't support __builtin_dwarf_cfa. Furthermore, since
1627 this header file is freestanding, it has to be independent of
1628 config.h, and so the following conditionalisation cannot depend on
1629 configure time checks.
1630
1631 Although it's not clear from
1632 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1633 this expression excludes Darwin.
1634 .cfi directives in Darwin assembly appear to be completely
1635 different and I haven't investigated how they work.
1636
1637 For even more entertainment value, note we have to use the
1638 completely undocumented __builtin_dwarf_cfa(), which appears to
1639 really compute the CFA, whereas __builtin_frame_address(0) claims
1640 to but actually doesn't. See
1641 https://bugs.kde.org/show_bug.cgi?id=243270#c47
1642*/
1643#if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1644# define __FRAME_POINTER \
1645 ,"r"(__builtin_dwarf_cfa())
1646# define VALGRIND_CFI_PROLOGUE \
1647 "movq %%rbp, %%r15\n\t" \
1648 "movq %2, %%rbp\n\t" \
1649 ".cfi_remember_state\n\t" \
1650 ".cfi_def_cfa rbp, 0\n\t"
1651# define VALGRIND_CFI_EPILOGUE \
1652 "movq %%r15, %%rbp\n\t" \
1653 ".cfi_restore_state\n\t"
1654#else
1655# define __FRAME_POINTER
1656# define VALGRIND_CFI_PROLOGUE
1657# define VALGRIND_CFI_EPILOGUE
1658#endif
1659
1660/* Macros to save and align the stack before making a function
1661 call and restore it afterwards as gcc may not keep the stack
1662 pointer aligned if it doesn't realise calls are being made
1663 to other functions. */
1664
1665#define VALGRIND_ALIGN_STACK \
1666 "movq %%rsp,%%r14\n\t" \
1667 "andq $0xfffffffffffffff0,%%rsp\n\t"
1668#define VALGRIND_RESTORE_STACK \
1669 "movq %%r14,%%rsp\n\t"
1670
1671/* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1672 long) == 8. */
1673
1674/* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1675 macros. In order not to trash the stack redzone, we need to drop
1676 %rsp by 128 before the hidden call, and restore afterwards. The
1677 nastyness is that it is only by luck that the stack still appears
1678 to be unwindable during the hidden call - since then the behaviour
1679 of any routine using this macro does not match what the CFI data
1680 says. Sigh.
1681
1682 Why is this important? Imagine that a wrapper has a stack
1683 allocated local, and passes to the hidden call, a pointer to it.
1684 Because gcc does not know about the hidden call, it may allocate
1685 that local in the redzone. Unfortunately the hidden call may then
1686 trash it before it comes to use it. So we must step clear of the
1687 redzone, for the duration of the hidden call, to make it safe.
1688
1689 Probably the same problem afflicts the other redzone-style ABIs too
1690 (ppc64-linux); but for those, the stack is
1691 self describing (none of this CFI nonsense) so at least messing
1692 with the stack pointer doesn't give a danger of non-unwindable
1693 stack. */
1694
1695#define CALL_FN_W_v(lval, orig) \
1696 do { \
1697 volatile OrigFn _orig = (orig); \
1698 volatile unsigned long _argvec[1]; \
1699 volatile unsigned long _res; \
1700 _argvec[0] = (unsigned long)_orig.nraddr; \
1701 __asm__ volatile( \
1702 VALGRIND_CFI_PROLOGUE \
1703 VALGRIND_ALIGN_STACK \
1704 "subq $128,%%rsp\n\t" \
1705 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1706 VALGRIND_CALL_NOREDIR_RAX \
1707 VALGRIND_RESTORE_STACK \
1708 VALGRIND_CFI_EPILOGUE \
1709 : /*out*/ "=a" (_res) \
1710 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1711 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1712 ); \
1713 lval = (__typeof__(lval)) _res; \
1714 } while (0)
1715
1716#define CALL_FN_W_W(lval, orig, arg1) \
1717 do { \
1718 volatile OrigFn _orig = (orig); \
1719 volatile unsigned long _argvec[2]; \
1720 volatile unsigned long _res; \
1721 _argvec[0] = (unsigned long)_orig.nraddr; \
1722 _argvec[1] = (unsigned long)(arg1); \
1723 __asm__ volatile( \
1724 VALGRIND_CFI_PROLOGUE \
1725 VALGRIND_ALIGN_STACK \
1726 "subq $128,%%rsp\n\t" \
1727 "movq 8(%%rax), %%rdi\n\t" \
1728 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1729 VALGRIND_CALL_NOREDIR_RAX \
1730 VALGRIND_RESTORE_STACK \
1731 VALGRIND_CFI_EPILOGUE \
1732 : /*out*/ "=a" (_res) \
1733 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1734 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1735 ); \
1736 lval = (__typeof__(lval)) _res; \
1737 } while (0)
1738
1739#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1740 do { \
1741 volatile OrigFn _orig = (orig); \
1742 volatile unsigned long _argvec[3]; \
1743 volatile unsigned long _res; \
1744 _argvec[0] = (unsigned long)_orig.nraddr; \
1745 _argvec[1] = (unsigned long)(arg1); \
1746 _argvec[2] = (unsigned long)(arg2); \
1747 __asm__ volatile( \
1748 VALGRIND_CFI_PROLOGUE \
1749 VALGRIND_ALIGN_STACK \
1750 "subq $128,%%rsp\n\t" \
1751 "movq 16(%%rax), %%rsi\n\t" \
1752 "movq 8(%%rax), %%rdi\n\t" \
1753 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1754 VALGRIND_CALL_NOREDIR_RAX \
1755 VALGRIND_RESTORE_STACK \
1756 VALGRIND_CFI_EPILOGUE \
1757 : /*out*/ "=a" (_res) \
1758 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1759 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1760 ); \
1761 lval = (__typeof__(lval)) _res; \
1762 } while (0)
1763
1764#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1765 do { \
1766 volatile OrigFn _orig = (orig); \
1767 volatile unsigned long _argvec[4]; \
1768 volatile unsigned long _res; \
1769 _argvec[0] = (unsigned long)_orig.nraddr; \
1770 _argvec[1] = (unsigned long)(arg1); \
1771 _argvec[2] = (unsigned long)(arg2); \
1772 _argvec[3] = (unsigned long)(arg3); \
1773 __asm__ volatile( \
1774 VALGRIND_CFI_PROLOGUE \
1775 VALGRIND_ALIGN_STACK \
1776 "subq $128,%%rsp\n\t" \
1777 "movq 24(%%rax), %%rdx\n\t" \
1778 "movq 16(%%rax), %%rsi\n\t" \
1779 "movq 8(%%rax), %%rdi\n\t" \
1780 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1781 VALGRIND_CALL_NOREDIR_RAX \
1782 VALGRIND_RESTORE_STACK \
1783 VALGRIND_CFI_EPILOGUE \
1784 : /*out*/ "=a" (_res) \
1785 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1786 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1787 ); \
1788 lval = (__typeof__(lval)) _res; \
1789 } while (0)
1790
1791#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1792 do { \
1793 volatile OrigFn _orig = (orig); \
1794 volatile unsigned long _argvec[5]; \
1795 volatile unsigned long _res; \
1796 _argvec[0] = (unsigned long)_orig.nraddr; \
1797 _argvec[1] = (unsigned long)(arg1); \
1798 _argvec[2] = (unsigned long)(arg2); \
1799 _argvec[3] = (unsigned long)(arg3); \
1800 _argvec[4] = (unsigned long)(arg4); \
1801 __asm__ volatile( \
1802 VALGRIND_CFI_PROLOGUE \
1803 VALGRIND_ALIGN_STACK \
1804 "subq $128,%%rsp\n\t" \
1805 "movq 32(%%rax), %%rcx\n\t" \
1806 "movq 24(%%rax), %%rdx\n\t" \
1807 "movq 16(%%rax), %%rsi\n\t" \
1808 "movq 8(%%rax), %%rdi\n\t" \
1809 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1810 VALGRIND_CALL_NOREDIR_RAX \
1811 VALGRIND_RESTORE_STACK \
1812 VALGRIND_CFI_EPILOGUE \
1813 : /*out*/ "=a" (_res) \
1814 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1815 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1816 ); \
1817 lval = (__typeof__(lval)) _res; \
1818 } while (0)
1819
1820#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1821 do { \
1822 volatile OrigFn _orig = (orig); \
1823 volatile unsigned long _argvec[6]; \
1824 volatile unsigned long _res; \
1825 _argvec[0] = (unsigned long)_orig.nraddr; \
1826 _argvec[1] = (unsigned long)(arg1); \
1827 _argvec[2] = (unsigned long)(arg2); \
1828 _argvec[3] = (unsigned long)(arg3); \
1829 _argvec[4] = (unsigned long)(arg4); \
1830 _argvec[5] = (unsigned long)(arg5); \
1831 __asm__ volatile( \
1832 VALGRIND_CFI_PROLOGUE \
1833 VALGRIND_ALIGN_STACK \
1834 "subq $128,%%rsp\n\t" \
1835 "movq 40(%%rax), %%r8\n\t" \
1836 "movq 32(%%rax), %%rcx\n\t" \
1837 "movq 24(%%rax), %%rdx\n\t" \
1838 "movq 16(%%rax), %%rsi\n\t" \
1839 "movq 8(%%rax), %%rdi\n\t" \
1840 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1841 VALGRIND_CALL_NOREDIR_RAX \
1842 VALGRIND_RESTORE_STACK \
1843 VALGRIND_CFI_EPILOGUE \
1844 : /*out*/ "=a" (_res) \
1845 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1846 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1847 ); \
1848 lval = (__typeof__(lval)) _res; \
1849 } while (0)
1850
1851#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1852 do { \
1853 volatile OrigFn _orig = (orig); \
1854 volatile unsigned long _argvec[7]; \
1855 volatile unsigned long _res; \
1856 _argvec[0] = (unsigned long)_orig.nraddr; \
1857 _argvec[1] = (unsigned long)(arg1); \
1858 _argvec[2] = (unsigned long)(arg2); \
1859 _argvec[3] = (unsigned long)(arg3); \
1860 _argvec[4] = (unsigned long)(arg4); \
1861 _argvec[5] = (unsigned long)(arg5); \
1862 _argvec[6] = (unsigned long)(arg6); \
1863 __asm__ volatile( \
1864 VALGRIND_CFI_PROLOGUE \
1865 VALGRIND_ALIGN_STACK \
1866 "subq $128,%%rsp\n\t" \
1867 "movq 48(%%rax), %%r9\n\t" \
1868 "movq 40(%%rax), %%r8\n\t" \
1869 "movq 32(%%rax), %%rcx\n\t" \
1870 "movq 24(%%rax), %%rdx\n\t" \
1871 "movq 16(%%rax), %%rsi\n\t" \
1872 "movq 8(%%rax), %%rdi\n\t" \
1873 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1874 VALGRIND_CALL_NOREDIR_RAX \
1875 VALGRIND_RESTORE_STACK \
1876 VALGRIND_CFI_EPILOGUE \
1877 : /*out*/ "=a" (_res) \
1878 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1879 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1880 ); \
1881 lval = (__typeof__(lval)) _res; \
1882 } while (0)
1883
1884#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1885 arg7) \
1886 do { \
1887 volatile OrigFn _orig = (orig); \
1888 volatile unsigned long _argvec[8]; \
1889 volatile unsigned long _res; \
1890 _argvec[0] = (unsigned long)_orig.nraddr; \
1891 _argvec[1] = (unsigned long)(arg1); \
1892 _argvec[2] = (unsigned long)(arg2); \
1893 _argvec[3] = (unsigned long)(arg3); \
1894 _argvec[4] = (unsigned long)(arg4); \
1895 _argvec[5] = (unsigned long)(arg5); \
1896 _argvec[6] = (unsigned long)(arg6); \
1897 _argvec[7] = (unsigned long)(arg7); \
1898 __asm__ volatile( \
1899 VALGRIND_CFI_PROLOGUE \
1900 VALGRIND_ALIGN_STACK \
1901 "subq $136,%%rsp\n\t" \
1902 "pushq 56(%%rax)\n\t" \
1903 "movq 48(%%rax), %%r9\n\t" \
1904 "movq 40(%%rax), %%r8\n\t" \
1905 "movq 32(%%rax), %%rcx\n\t" \
1906 "movq 24(%%rax), %%rdx\n\t" \
1907 "movq 16(%%rax), %%rsi\n\t" \
1908 "movq 8(%%rax), %%rdi\n\t" \
1909 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1910 VALGRIND_CALL_NOREDIR_RAX \
1911 VALGRIND_RESTORE_STACK \
1912 VALGRIND_CFI_EPILOGUE \
1913 : /*out*/ "=a" (_res) \
1914 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1915 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1916 ); \
1917 lval = (__typeof__(lval)) _res; \
1918 } while (0)
1919
1920#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1921 arg7,arg8) \
1922 do { \
1923 volatile OrigFn _orig = (orig); \
1924 volatile unsigned long _argvec[9]; \
1925 volatile unsigned long _res; \
1926 _argvec[0] = (unsigned long)_orig.nraddr; \
1927 _argvec[1] = (unsigned long)(arg1); \
1928 _argvec[2] = (unsigned long)(arg2); \
1929 _argvec[3] = (unsigned long)(arg3); \
1930 _argvec[4] = (unsigned long)(arg4); \
1931 _argvec[5] = (unsigned long)(arg5); \
1932 _argvec[6] = (unsigned long)(arg6); \
1933 _argvec[7] = (unsigned long)(arg7); \
1934 _argvec[8] = (unsigned long)(arg8); \
1935 __asm__ volatile( \
1936 VALGRIND_CFI_PROLOGUE \
1937 VALGRIND_ALIGN_STACK \
1938 "subq $128,%%rsp\n\t" \
1939 "pushq 64(%%rax)\n\t" \
1940 "pushq 56(%%rax)\n\t" \
1941 "movq 48(%%rax), %%r9\n\t" \
1942 "movq 40(%%rax), %%r8\n\t" \
1943 "movq 32(%%rax), %%rcx\n\t" \
1944 "movq 24(%%rax), %%rdx\n\t" \
1945 "movq 16(%%rax), %%rsi\n\t" \
1946 "movq 8(%%rax), %%rdi\n\t" \
1947 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1948 VALGRIND_CALL_NOREDIR_RAX \
1949 VALGRIND_RESTORE_STACK \
1950 VALGRIND_CFI_EPILOGUE \
1951 : /*out*/ "=a" (_res) \
1952 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1953 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1954 ); \
1955 lval = (__typeof__(lval)) _res; \
1956 } while (0)
1957
1958#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1959 arg7,arg8,arg9) \
1960 do { \
1961 volatile OrigFn _orig = (orig); \
1962 volatile unsigned long _argvec[10]; \
1963 volatile unsigned long _res; \
1964 _argvec[0] = (unsigned long)_orig.nraddr; \
1965 _argvec[1] = (unsigned long)(arg1); \
1966 _argvec[2] = (unsigned long)(arg2); \
1967 _argvec[3] = (unsigned long)(arg3); \
1968 _argvec[4] = (unsigned long)(arg4); \
1969 _argvec[5] = (unsigned long)(arg5); \
1970 _argvec[6] = (unsigned long)(arg6); \
1971 _argvec[7] = (unsigned long)(arg7); \
1972 _argvec[8] = (unsigned long)(arg8); \
1973 _argvec[9] = (unsigned long)(arg9); \
1974 __asm__ volatile( \
1975 VALGRIND_CFI_PROLOGUE \
1976 VALGRIND_ALIGN_STACK \
1977 "subq $136,%%rsp\n\t" \
1978 "pushq 72(%%rax)\n\t" \
1979 "pushq 64(%%rax)\n\t" \
1980 "pushq 56(%%rax)\n\t" \
1981 "movq 48(%%rax), %%r9\n\t" \
1982 "movq 40(%%rax), %%r8\n\t" \
1983 "movq 32(%%rax), %%rcx\n\t" \
1984 "movq 24(%%rax), %%rdx\n\t" \
1985 "movq 16(%%rax), %%rsi\n\t" \
1986 "movq 8(%%rax), %%rdi\n\t" \
1987 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1988 VALGRIND_CALL_NOREDIR_RAX \
1989 VALGRIND_RESTORE_STACK \
1990 VALGRIND_CFI_EPILOGUE \
1991 : /*out*/ "=a" (_res) \
1992 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1993 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1994 ); \
1995 lval = (__typeof__(lval)) _res; \
1996 } while (0)
1997
1998#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1999 arg7,arg8,arg9,arg10) \
2000 do { \
2001 volatile OrigFn _orig = (orig); \
2002 volatile unsigned long _argvec[11]; \
2003 volatile unsigned long _res; \
2004 _argvec[0] = (unsigned long)_orig.nraddr; \
2005 _argvec[1] = (unsigned long)(arg1); \
2006 _argvec[2] = (unsigned long)(arg2); \
2007 _argvec[3] = (unsigned long)(arg3); \
2008 _argvec[4] = (unsigned long)(arg4); \
2009 _argvec[5] = (unsigned long)(arg5); \
2010 _argvec[6] = (unsigned long)(arg6); \
2011 _argvec[7] = (unsigned long)(arg7); \
2012 _argvec[8] = (unsigned long)(arg8); \
2013 _argvec[9] = (unsigned long)(arg9); \
2014 _argvec[10] = (unsigned long)(arg10); \
2015 __asm__ volatile( \
2016 VALGRIND_CFI_PROLOGUE \
2017 VALGRIND_ALIGN_STACK \
2018 "subq $128,%%rsp\n\t" \
2019 "pushq 80(%%rax)\n\t" \
2020 "pushq 72(%%rax)\n\t" \
2021 "pushq 64(%%rax)\n\t" \
2022 "pushq 56(%%rax)\n\t" \
2023 "movq 48(%%rax), %%r9\n\t" \
2024 "movq 40(%%rax), %%r8\n\t" \
2025 "movq 32(%%rax), %%rcx\n\t" \
2026 "movq 24(%%rax), %%rdx\n\t" \
2027 "movq 16(%%rax), %%rsi\n\t" \
2028 "movq 8(%%rax), %%rdi\n\t" \
2029 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2030 VALGRIND_CALL_NOREDIR_RAX \
2031 VALGRIND_RESTORE_STACK \
2032 VALGRIND_CFI_EPILOGUE \
2033 : /*out*/ "=a" (_res) \
2034 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2035 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2036 ); \
2037 lval = (__typeof__(lval)) _res; \
2038 } while (0)
2039
2040#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2041 arg7,arg8,arg9,arg10,arg11) \
2042 do { \
2043 volatile OrigFn _orig = (orig); \
2044 volatile unsigned long _argvec[12]; \
2045 volatile unsigned long _res; \
2046 _argvec[0] = (unsigned long)_orig.nraddr; \
2047 _argvec[1] = (unsigned long)(arg1); \
2048 _argvec[2] = (unsigned long)(arg2); \
2049 _argvec[3] = (unsigned long)(arg3); \
2050 _argvec[4] = (unsigned long)(arg4); \
2051 _argvec[5] = (unsigned long)(arg5); \
2052 _argvec[6] = (unsigned long)(arg6); \
2053 _argvec[7] = (unsigned long)(arg7); \
2054 _argvec[8] = (unsigned long)(arg8); \
2055 _argvec[9] = (unsigned long)(arg9); \
2056 _argvec[10] = (unsigned long)(arg10); \
2057 _argvec[11] = (unsigned long)(arg11); \
2058 __asm__ volatile( \
2059 VALGRIND_CFI_PROLOGUE \
2060 VALGRIND_ALIGN_STACK \
2061 "subq $136,%%rsp\n\t" \
2062 "pushq 88(%%rax)\n\t" \
2063 "pushq 80(%%rax)\n\t" \
2064 "pushq 72(%%rax)\n\t" \
2065 "pushq 64(%%rax)\n\t" \
2066 "pushq 56(%%rax)\n\t" \
2067 "movq 48(%%rax), %%r9\n\t" \
2068 "movq 40(%%rax), %%r8\n\t" \
2069 "movq 32(%%rax), %%rcx\n\t" \
2070 "movq 24(%%rax), %%rdx\n\t" \
2071 "movq 16(%%rax), %%rsi\n\t" \
2072 "movq 8(%%rax), %%rdi\n\t" \
2073 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2074 VALGRIND_CALL_NOREDIR_RAX \
2075 VALGRIND_RESTORE_STACK \
2076 VALGRIND_CFI_EPILOGUE \
2077 : /*out*/ "=a" (_res) \
2078 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2079 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2080 ); \
2081 lval = (__typeof__(lval)) _res; \
2082 } while (0)
2083
2084#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2085 arg7,arg8,arg9,arg10,arg11,arg12) \
2086 do { \
2087 volatile OrigFn _orig = (orig); \
2088 volatile unsigned long _argvec[13]; \
2089 volatile unsigned long _res; \
2090 _argvec[0] = (unsigned long)_orig.nraddr; \
2091 _argvec[1] = (unsigned long)(arg1); \
2092 _argvec[2] = (unsigned long)(arg2); \
2093 _argvec[3] = (unsigned long)(arg3); \
2094 _argvec[4] = (unsigned long)(arg4); \
2095 _argvec[5] = (unsigned long)(arg5); \
2096 _argvec[6] = (unsigned long)(arg6); \
2097 _argvec[7] = (unsigned long)(arg7); \
2098 _argvec[8] = (unsigned long)(arg8); \
2099 _argvec[9] = (unsigned long)(arg9); \
2100 _argvec[10] = (unsigned long)(arg10); \
2101 _argvec[11] = (unsigned long)(arg11); \
2102 _argvec[12] = (unsigned long)(arg12); \
2103 __asm__ volatile( \
2104 VALGRIND_CFI_PROLOGUE \
2105 VALGRIND_ALIGN_STACK \
2106 "subq $128,%%rsp\n\t" \
2107 "pushq 96(%%rax)\n\t" \
2108 "pushq 88(%%rax)\n\t" \
2109 "pushq 80(%%rax)\n\t" \
2110 "pushq 72(%%rax)\n\t" \
2111 "pushq 64(%%rax)\n\t" \
2112 "pushq 56(%%rax)\n\t" \
2113 "movq 48(%%rax), %%r9\n\t" \
2114 "movq 40(%%rax), %%r8\n\t" \
2115 "movq 32(%%rax), %%rcx\n\t" \
2116 "movq 24(%%rax), %%rdx\n\t" \
2117 "movq 16(%%rax), %%rsi\n\t" \
2118 "movq 8(%%rax), %%rdi\n\t" \
2119 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2120 VALGRIND_CALL_NOREDIR_RAX \
2121 VALGRIND_RESTORE_STACK \
2122 VALGRIND_CFI_EPILOGUE \
2123 : /*out*/ "=a" (_res) \
2124 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2125 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2126 ); \
2127 lval = (__typeof__(lval)) _res; \
2128 } while (0)
2129
2130#endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
2131
2132/* ------------------------ ppc32-linux ------------------------ */
2133
2134#if defined(PLAT_ppc32_linux)
2135
2136/* This is useful for finding out about the on-stack stuff:
2137
2138 extern int f9 ( int,int,int,int,int,int,int,int,int );
2139 extern int f10 ( int,int,int,int,int,int,int,int,int,int );
2140 extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
2141 extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
2142
2143 int g9 ( void ) {
2144 return f9(11,22,33,44,55,66,77,88,99);
2145 }
2146 int g10 ( void ) {
2147 return f10(11,22,33,44,55,66,77,88,99,110);
2148 }
2149 int g11 ( void ) {
2150 return f11(11,22,33,44,55,66,77,88,99,110,121);
2151 }
2152 int g12 ( void ) {
2153 return f12(11,22,33,44,55,66,77,88,99,110,121,132);
2154 }
2155*/
2156
2157/* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2158
2159/* These regs are trashed by the hidden call. */
2160#define __CALLER_SAVED_REGS \
2161 "lr", "ctr", "xer", \
2162 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2163 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2164 "r11", "r12", "r13"
2165
2166/* Macros to save and align the stack before making a function
2167 call and restore it afterwards as gcc may not keep the stack
2168 pointer aligned if it doesn't realise calls are being made
2169 to other functions. */
2170
2171#define VALGRIND_ALIGN_STACK \
2172 "mr 28,1\n\t" \
2173 "rlwinm 1,1,0,0,27\n\t"
2174#define VALGRIND_RESTORE_STACK \
2175 "mr 1,28\n\t"
2176
2177/* These CALL_FN_ macros assume that on ppc32-linux,
2178 sizeof(unsigned long) == 4. */
2179
2180#define CALL_FN_W_v(lval, orig) \
2181 do { \
2182 volatile OrigFn _orig = (orig); \
2183 volatile unsigned long _argvec[1]; \
2184 volatile unsigned long _res; \
2185 _argvec[0] = (unsigned long)_orig.nraddr; \
2186 __asm__ volatile( \
2187 VALGRIND_ALIGN_STACK \
2188 "mr 11,%1\n\t" \
2189 "lwz 11,0(11)\n\t" /* target->r11 */ \
2190 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2191 VALGRIND_RESTORE_STACK \
2192 "mr %0,3" \
2193 : /*out*/ "=r" (_res) \
2194 : /*in*/ "r" (&_argvec[0]) \
2195 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2196 ); \
2197 lval = (__typeof__(lval)) _res; \
2198 } while (0)
2199
2200#define CALL_FN_W_W(lval, orig, arg1) \
2201 do { \
2202 volatile OrigFn _orig = (orig); \
2203 volatile unsigned long _argvec[2]; \
2204 volatile unsigned long _res; \
2205 _argvec[0] = (unsigned long)_orig.nraddr; \
2206 _argvec[1] = (unsigned long)arg1; \
2207 __asm__ volatile( \
2208 VALGRIND_ALIGN_STACK \
2209 "mr 11,%1\n\t" \
2210 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2211 "lwz 11,0(11)\n\t" /* target->r11 */ \
2212 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2213 VALGRIND_RESTORE_STACK \
2214 "mr %0,3" \
2215 : /*out*/ "=r" (_res) \
2216 : /*in*/ "r" (&_argvec[0]) \
2217 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2218 ); \
2219 lval = (__typeof__(lval)) _res; \
2220 } while (0)
2221
2222#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2223 do { \
2224 volatile OrigFn _orig = (orig); \
2225 volatile unsigned long _argvec[3]; \
2226 volatile unsigned long _res; \
2227 _argvec[0] = (unsigned long)_orig.nraddr; \
2228 _argvec[1] = (unsigned long)arg1; \
2229 _argvec[2] = (unsigned long)arg2; \
2230 __asm__ volatile( \
2231 VALGRIND_ALIGN_STACK \
2232 "mr 11,%1\n\t" \
2233 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2234 "lwz 4,8(11)\n\t" \
2235 "lwz 11,0(11)\n\t" /* target->r11 */ \
2236 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2237 VALGRIND_RESTORE_STACK \
2238 "mr %0,3" \
2239 : /*out*/ "=r" (_res) \
2240 : /*in*/ "r" (&_argvec[0]) \
2241 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2242 ); \
2243 lval = (__typeof__(lval)) _res; \
2244 } while (0)
2245
2246#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2247 do { \
2248 volatile OrigFn _orig = (orig); \
2249 volatile unsigned long _argvec[4]; \
2250 volatile unsigned long _res; \
2251 _argvec[0] = (unsigned long)_orig.nraddr; \
2252 _argvec[1] = (unsigned long)arg1; \
2253 _argvec[2] = (unsigned long)arg2; \
2254 _argvec[3] = (unsigned long)arg3; \
2255 __asm__ volatile( \
2256 VALGRIND_ALIGN_STACK \
2257 "mr 11,%1\n\t" \
2258 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2259 "lwz 4,8(11)\n\t" \
2260 "lwz 5,12(11)\n\t" \
2261 "lwz 11,0(11)\n\t" /* target->r11 */ \
2262 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2263 VALGRIND_RESTORE_STACK \
2264 "mr %0,3" \
2265 : /*out*/ "=r" (_res) \
2266 : /*in*/ "r" (&_argvec[0]) \
2267 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2268 ); \
2269 lval = (__typeof__(lval)) _res; \
2270 } while (0)
2271
2272#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2273 do { \
2274 volatile OrigFn _orig = (orig); \
2275 volatile unsigned long _argvec[5]; \
2276 volatile unsigned long _res; \
2277 _argvec[0] = (unsigned long)_orig.nraddr; \
2278 _argvec[1] = (unsigned long)arg1; \
2279 _argvec[2] = (unsigned long)arg2; \
2280 _argvec[3] = (unsigned long)arg3; \
2281 _argvec[4] = (unsigned long)arg4; \
2282 __asm__ volatile( \
2283 VALGRIND_ALIGN_STACK \
2284 "mr 11,%1\n\t" \
2285 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2286 "lwz 4,8(11)\n\t" \
2287 "lwz 5,12(11)\n\t" \
2288 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2289 "lwz 11,0(11)\n\t" /* target->r11 */ \
2290 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2291 VALGRIND_RESTORE_STACK \
2292 "mr %0,3" \
2293 : /*out*/ "=r" (_res) \
2294 : /*in*/ "r" (&_argvec[0]) \
2295 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2296 ); \
2297 lval = (__typeof__(lval)) _res; \
2298 } while (0)
2299
2300#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2301 do { \
2302 volatile OrigFn _orig = (orig); \
2303 volatile unsigned long _argvec[6]; \
2304 volatile unsigned long _res; \
2305 _argvec[0] = (unsigned long)_orig.nraddr; \
2306 _argvec[1] = (unsigned long)arg1; \
2307 _argvec[2] = (unsigned long)arg2; \
2308 _argvec[3] = (unsigned long)arg3; \
2309 _argvec[4] = (unsigned long)arg4; \
2310 _argvec[5] = (unsigned long)arg5; \
2311 __asm__ volatile( \
2312 VALGRIND_ALIGN_STACK \
2313 "mr 11,%1\n\t" \
2314 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2315 "lwz 4,8(11)\n\t" \
2316 "lwz 5,12(11)\n\t" \
2317 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2318 "lwz 7,20(11)\n\t" \
2319 "lwz 11,0(11)\n\t" /* target->r11 */ \
2320 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2321 VALGRIND_RESTORE_STACK \
2322 "mr %0,3" \
2323 : /*out*/ "=r" (_res) \
2324 : /*in*/ "r" (&_argvec[0]) \
2325 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2326 ); \
2327 lval = (__typeof__(lval)) _res; \
2328 } while (0)
2329
2330#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2331 do { \
2332 volatile OrigFn _orig = (orig); \
2333 volatile unsigned long _argvec[7]; \
2334 volatile unsigned long _res; \
2335 _argvec[0] = (unsigned long)_orig.nraddr; \
2336 _argvec[1] = (unsigned long)arg1; \
2337 _argvec[2] = (unsigned long)arg2; \
2338 _argvec[3] = (unsigned long)arg3; \
2339 _argvec[4] = (unsigned long)arg4; \
2340 _argvec[5] = (unsigned long)arg5; \
2341 _argvec[6] = (unsigned long)arg6; \
2342 __asm__ volatile( \
2343 VALGRIND_ALIGN_STACK \
2344 "mr 11,%1\n\t" \
2345 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2346 "lwz 4,8(11)\n\t" \
2347 "lwz 5,12(11)\n\t" \
2348 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2349 "lwz 7,20(11)\n\t" \
2350 "lwz 8,24(11)\n\t" \
2351 "lwz 11,0(11)\n\t" /* target->r11 */ \
2352 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2353 VALGRIND_RESTORE_STACK \
2354 "mr %0,3" \
2355 : /*out*/ "=r" (_res) \
2356 : /*in*/ "r" (&_argvec[0]) \
2357 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2358 ); \
2359 lval = (__typeof__(lval)) _res; \
2360 } while (0)
2361
2362#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2363 arg7) \
2364 do { \
2365 volatile OrigFn _orig = (orig); \
2366 volatile unsigned long _argvec[8]; \
2367 volatile unsigned long _res; \
2368 _argvec[0] = (unsigned long)_orig.nraddr; \
2369 _argvec[1] = (unsigned long)arg1; \
2370 _argvec[2] = (unsigned long)arg2; \
2371 _argvec[3] = (unsigned long)arg3; \
2372 _argvec[4] = (unsigned long)arg4; \
2373 _argvec[5] = (unsigned long)arg5; \
2374 _argvec[6] = (unsigned long)arg6; \
2375 _argvec[7] = (unsigned long)arg7; \
2376 __asm__ volatile( \
2377 VALGRIND_ALIGN_STACK \
2378 "mr 11,%1\n\t" \
2379 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2380 "lwz 4,8(11)\n\t" \
2381 "lwz 5,12(11)\n\t" \
2382 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2383 "lwz 7,20(11)\n\t" \
2384 "lwz 8,24(11)\n\t" \
2385 "lwz 9,28(11)\n\t" \
2386 "lwz 11,0(11)\n\t" /* target->r11 */ \
2387 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2388 VALGRIND_RESTORE_STACK \
2389 "mr %0,3" \
2390 : /*out*/ "=r" (_res) \
2391 : /*in*/ "r" (&_argvec[0]) \
2392 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2393 ); \
2394 lval = (__typeof__(lval)) _res; \
2395 } while (0)
2396
2397#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2398 arg7,arg8) \
2399 do { \
2400 volatile OrigFn _orig = (orig); \
2401 volatile unsigned long _argvec[9]; \
2402 volatile unsigned long _res; \
2403 _argvec[0] = (unsigned long)_orig.nraddr; \
2404 _argvec[1] = (unsigned long)arg1; \
2405 _argvec[2] = (unsigned long)arg2; \
2406 _argvec[3] = (unsigned long)arg3; \
2407 _argvec[4] = (unsigned long)arg4; \
2408 _argvec[5] = (unsigned long)arg5; \
2409 _argvec[6] = (unsigned long)arg6; \
2410 _argvec[7] = (unsigned long)arg7; \
2411 _argvec[8] = (unsigned long)arg8; \
2412 __asm__ volatile( \
2413 VALGRIND_ALIGN_STACK \
2414 "mr 11,%1\n\t" \
2415 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2416 "lwz 4,8(11)\n\t" \
2417 "lwz 5,12(11)\n\t" \
2418 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2419 "lwz 7,20(11)\n\t" \
2420 "lwz 8,24(11)\n\t" \
2421 "lwz 9,28(11)\n\t" \
2422 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2423 "lwz 11,0(11)\n\t" /* target->r11 */ \
2424 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2425 VALGRIND_RESTORE_STACK \
2426 "mr %0,3" \
2427 : /*out*/ "=r" (_res) \
2428 : /*in*/ "r" (&_argvec[0]) \
2429 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2430 ); \
2431 lval = (__typeof__(lval)) _res; \
2432 } while (0)
2433
2434#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2435 arg7,arg8,arg9) \
2436 do { \
2437 volatile OrigFn _orig = (orig); \
2438 volatile unsigned long _argvec[10]; \
2439 volatile unsigned long _res; \
2440 _argvec[0] = (unsigned long)_orig.nraddr; \
2441 _argvec[1] = (unsigned long)arg1; \
2442 _argvec[2] = (unsigned long)arg2; \
2443 _argvec[3] = (unsigned long)arg3; \
2444 _argvec[4] = (unsigned long)arg4; \
2445 _argvec[5] = (unsigned long)arg5; \
2446 _argvec[6] = (unsigned long)arg6; \
2447 _argvec[7] = (unsigned long)arg7; \
2448 _argvec[8] = (unsigned long)arg8; \
2449 _argvec[9] = (unsigned long)arg9; \
2450 __asm__ volatile( \
2451 VALGRIND_ALIGN_STACK \
2452 "mr 11,%1\n\t" \
2453 "addi 1,1,-16\n\t" \
2454 /* arg9 */ \
2455 "lwz 3,36(11)\n\t" \
2456 "stw 3,8(1)\n\t" \
2457 /* args1-8 */ \
2458 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2459 "lwz 4,8(11)\n\t" \
2460 "lwz 5,12(11)\n\t" \
2461 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2462 "lwz 7,20(11)\n\t" \
2463 "lwz 8,24(11)\n\t" \
2464 "lwz 9,28(11)\n\t" \
2465 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2466 "lwz 11,0(11)\n\t" /* target->r11 */ \
2467 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2468 VALGRIND_RESTORE_STACK \
2469 "mr %0,3" \
2470 : /*out*/ "=r" (_res) \
2471 : /*in*/ "r" (&_argvec[0]) \
2472 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2473 ); \
2474 lval = (__typeof__(lval)) _res; \
2475 } while (0)
2476
2477#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2478 arg7,arg8,arg9,arg10) \
2479 do { \
2480 volatile OrigFn _orig = (orig); \
2481 volatile unsigned long _argvec[11]; \
2482 volatile unsigned long _res; \
2483 _argvec[0] = (unsigned long)_orig.nraddr; \
2484 _argvec[1] = (unsigned long)arg1; \
2485 _argvec[2] = (unsigned long)arg2; \
2486 _argvec[3] = (unsigned long)arg3; \
2487 _argvec[4] = (unsigned long)arg4; \
2488 _argvec[5] = (unsigned long)arg5; \
2489 _argvec[6] = (unsigned long)arg6; \
2490 _argvec[7] = (unsigned long)arg7; \
2491 _argvec[8] = (unsigned long)arg8; \
2492 _argvec[9] = (unsigned long)arg9; \
2493 _argvec[10] = (unsigned long)arg10; \
2494 __asm__ volatile( \
2495 VALGRIND_ALIGN_STACK \
2496 "mr 11,%1\n\t" \
2497 "addi 1,1,-16\n\t" \
2498 /* arg10 */ \
2499 "lwz 3,40(11)\n\t" \
2500 "stw 3,12(1)\n\t" \
2501 /* arg9 */ \
2502 "lwz 3,36(11)\n\t" \
2503 "stw 3,8(1)\n\t" \
2504 /* args1-8 */ \
2505 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2506 "lwz 4,8(11)\n\t" \
2507 "lwz 5,12(11)\n\t" \
2508 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2509 "lwz 7,20(11)\n\t" \
2510 "lwz 8,24(11)\n\t" \
2511 "lwz 9,28(11)\n\t" \
2512 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2513 "lwz 11,0(11)\n\t" /* target->r11 */ \
2514 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2515 VALGRIND_RESTORE_STACK \
2516 "mr %0,3" \
2517 : /*out*/ "=r" (_res) \
2518 : /*in*/ "r" (&_argvec[0]) \
2519 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2520 ); \
2521 lval = (__typeof__(lval)) _res; \
2522 } while (0)
2523
2524#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2525 arg7,arg8,arg9,arg10,arg11) \
2526 do { \
2527 volatile OrigFn _orig = (orig); \
2528 volatile unsigned long _argvec[12]; \
2529 volatile unsigned long _res; \
2530 _argvec[0] = (unsigned long)_orig.nraddr; \
2531 _argvec[1] = (unsigned long)arg1; \
2532 _argvec[2] = (unsigned long)arg2; \
2533 _argvec[3] = (unsigned long)arg3; \
2534 _argvec[4] = (unsigned long)arg4; \
2535 _argvec[5] = (unsigned long)arg5; \
2536 _argvec[6] = (unsigned long)arg6; \
2537 _argvec[7] = (unsigned long)arg7; \
2538 _argvec[8] = (unsigned long)arg8; \
2539 _argvec[9] = (unsigned long)arg9; \
2540 _argvec[10] = (unsigned long)arg10; \
2541 _argvec[11] = (unsigned long)arg11; \
2542 __asm__ volatile( \
2543 VALGRIND_ALIGN_STACK \
2544 "mr 11,%1\n\t" \
2545 "addi 1,1,-32\n\t" \
2546 /* arg11 */ \
2547 "lwz 3,44(11)\n\t" \
2548 "stw 3,16(1)\n\t" \
2549 /* arg10 */ \
2550 "lwz 3,40(11)\n\t" \
2551 "stw 3,12(1)\n\t" \
2552 /* arg9 */ \
2553 "lwz 3,36(11)\n\t" \
2554 "stw 3,8(1)\n\t" \
2555 /* args1-8 */ \
2556 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2557 "lwz 4,8(11)\n\t" \
2558 "lwz 5,12(11)\n\t" \
2559 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2560 "lwz 7,20(11)\n\t" \
2561 "lwz 8,24(11)\n\t" \
2562 "lwz 9,28(11)\n\t" \
2563 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2564 "lwz 11,0(11)\n\t" /* target->r11 */ \
2565 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2566 VALGRIND_RESTORE_STACK \
2567 "mr %0,3" \
2568 : /*out*/ "=r" (_res) \
2569 : /*in*/ "r" (&_argvec[0]) \
2570 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2571 ); \
2572 lval = (__typeof__(lval)) _res; \
2573 } while (0)
2574
2575#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2576 arg7,arg8,arg9,arg10,arg11,arg12) \
2577 do { \
2578 volatile OrigFn _orig = (orig); \
2579 volatile unsigned long _argvec[13]; \
2580 volatile unsigned long _res; \
2581 _argvec[0] = (unsigned long)_orig.nraddr; \
2582 _argvec[1] = (unsigned long)arg1; \
2583 _argvec[2] = (unsigned long)arg2; \
2584 _argvec[3] = (unsigned long)arg3; \
2585 _argvec[4] = (unsigned long)arg4; \
2586 _argvec[5] = (unsigned long)arg5; \
2587 _argvec[6] = (unsigned long)arg6; \
2588 _argvec[7] = (unsigned long)arg7; \
2589 _argvec[8] = (unsigned long)arg8; \
2590 _argvec[9] = (unsigned long)arg9; \
2591 _argvec[10] = (unsigned long)arg10; \
2592 _argvec[11] = (unsigned long)arg11; \
2593 _argvec[12] = (unsigned long)arg12; \
2594 __asm__ volatile( \
2595 VALGRIND_ALIGN_STACK \
2596 "mr 11,%1\n\t" \
2597 "addi 1,1,-32\n\t" \
2598 /* arg12 */ \
2599 "lwz 3,48(11)\n\t" \
2600 "stw 3,20(1)\n\t" \
2601 /* arg11 */ \
2602 "lwz 3,44(11)\n\t" \
2603 "stw 3,16(1)\n\t" \
2604 /* arg10 */ \
2605 "lwz 3,40(11)\n\t" \
2606 "stw 3,12(1)\n\t" \
2607 /* arg9 */ \
2608 "lwz 3,36(11)\n\t" \
2609 "stw 3,8(1)\n\t" \
2610 /* args1-8 */ \
2611 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2612 "lwz 4,8(11)\n\t" \
2613 "lwz 5,12(11)\n\t" \
2614 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2615 "lwz 7,20(11)\n\t" \
2616 "lwz 8,24(11)\n\t" \
2617 "lwz 9,28(11)\n\t" \
2618 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2619 "lwz 11,0(11)\n\t" /* target->r11 */ \
2620 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2621 VALGRIND_RESTORE_STACK \
2622 "mr %0,3" \
2623 : /*out*/ "=r" (_res) \
2624 : /*in*/ "r" (&_argvec[0]) \
2625 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2626 ); \
2627 lval = (__typeof__(lval)) _res; \
2628 } while (0)
2629
2630#endif /* PLAT_ppc32_linux */
2631
2632/* ------------------------ ppc64-linux ------------------------ */
2633
2634#if defined(PLAT_ppc64be_linux)
2635
2636/* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2637
2638/* These regs are trashed by the hidden call. */
2639#define __CALLER_SAVED_REGS \
2640 "lr", "ctr", "xer", \
2641 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2642 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2643 "r11", "r12", "r13"
2644
2645/* Macros to save and align the stack before making a function
2646 call and restore it afterwards as gcc may not keep the stack
2647 pointer aligned if it doesn't realise calls are being made
2648 to other functions. */
2649
2650#define VALGRIND_ALIGN_STACK \
2651 "mr 28,1\n\t" \
2652 "rldicr 1,1,0,59\n\t"
2653#define VALGRIND_RESTORE_STACK \
2654 "mr 1,28\n\t"
2655
2656/* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2657 long) == 8. */
2658
2659#define CALL_FN_W_v(lval, orig) \
2660 do { \
2661 volatile OrigFn _orig = (orig); \
2662 volatile unsigned long _argvec[3+0]; \
2663 volatile unsigned long _res; \
2664 /* _argvec[0] holds current r2 across the call */ \
2665 _argvec[1] = (unsigned long)_orig.r2; \
2666 _argvec[2] = (unsigned long)_orig.nraddr; \
2667 __asm__ volatile( \
2668 VALGRIND_ALIGN_STACK \
2669 "mr 11,%1\n\t" \
2670 "std 2,-16(11)\n\t" /* save tocptr */ \
2671 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2672 "ld 11, 0(11)\n\t" /* target->r11 */ \
2673 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2674 "mr 11,%1\n\t" \
2675 "mr %0,3\n\t" \
2676 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2677 VALGRIND_RESTORE_STACK \
2678 : /*out*/ "=r" (_res) \
2679 : /*in*/ "r" (&_argvec[2]) \
2680 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2681 ); \
2682 lval = (__typeof__(lval)) _res; \
2683 } while (0)
2684
2685#define CALL_FN_W_W(lval, orig, arg1) \
2686 do { \
2687 volatile OrigFn _orig = (orig); \
2688 volatile unsigned long _argvec[3+1]; \
2689 volatile unsigned long _res; \
2690 /* _argvec[0] holds current r2 across the call */ \
2691 _argvec[1] = (unsigned long)_orig.r2; \
2692 _argvec[2] = (unsigned long)_orig.nraddr; \
2693 _argvec[2+1] = (unsigned long)arg1; \
2694 __asm__ volatile( \
2695 VALGRIND_ALIGN_STACK \
2696 "mr 11,%1\n\t" \
2697 "std 2,-16(11)\n\t" /* save tocptr */ \
2698 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2699 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2700 "ld 11, 0(11)\n\t" /* target->r11 */ \
2701 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2702 "mr 11,%1\n\t" \
2703 "mr %0,3\n\t" \
2704 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2705 VALGRIND_RESTORE_STACK \
2706 : /*out*/ "=r" (_res) \
2707 : /*in*/ "r" (&_argvec[2]) \
2708 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2709 ); \
2710 lval = (__typeof__(lval)) _res; \
2711 } while (0)
2712
2713#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2714 do { \
2715 volatile OrigFn _orig = (orig); \
2716 volatile unsigned long _argvec[3+2]; \
2717 volatile unsigned long _res; \
2718 /* _argvec[0] holds current r2 across the call */ \
2719 _argvec[1] = (unsigned long)_orig.r2; \
2720 _argvec[2] = (unsigned long)_orig.nraddr; \
2721 _argvec[2+1] = (unsigned long)arg1; \
2722 _argvec[2+2] = (unsigned long)arg2; \
2723 __asm__ volatile( \
2724 VALGRIND_ALIGN_STACK \
2725 "mr 11,%1\n\t" \
2726 "std 2,-16(11)\n\t" /* save tocptr */ \
2727 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2728 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2729 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2730 "ld 11, 0(11)\n\t" /* target->r11 */ \
2731 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2732 "mr 11,%1\n\t" \
2733 "mr %0,3\n\t" \
2734 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2735 VALGRIND_RESTORE_STACK \
2736 : /*out*/ "=r" (_res) \
2737 : /*in*/ "r" (&_argvec[2]) \
2738 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2739 ); \
2740 lval = (__typeof__(lval)) _res; \
2741 } while (0)
2742
2743#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2744 do { \
2745 volatile OrigFn _orig = (orig); \
2746 volatile unsigned long _argvec[3+3]; \
2747 volatile unsigned long _res; \
2748 /* _argvec[0] holds current r2 across the call */ \
2749 _argvec[1] = (unsigned long)_orig.r2; \
2750 _argvec[2] = (unsigned long)_orig.nraddr; \
2751 _argvec[2+1] = (unsigned long)arg1; \
2752 _argvec[2+2] = (unsigned long)arg2; \
2753 _argvec[2+3] = (unsigned long)arg3; \
2754 __asm__ volatile( \
2755 VALGRIND_ALIGN_STACK \
2756 "mr 11,%1\n\t" \
2757 "std 2,-16(11)\n\t" /* save tocptr */ \
2758 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2759 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2760 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2761 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2762 "ld 11, 0(11)\n\t" /* target->r11 */ \
2763 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2764 "mr 11,%1\n\t" \
2765 "mr %0,3\n\t" \
2766 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2767 VALGRIND_RESTORE_STACK \
2768 : /*out*/ "=r" (_res) \
2769 : /*in*/ "r" (&_argvec[2]) \
2770 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2771 ); \
2772 lval = (__typeof__(lval)) _res; \
2773 } while (0)
2774
2775#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2776 do { \
2777 volatile OrigFn _orig = (orig); \
2778 volatile unsigned long _argvec[3+4]; \
2779 volatile unsigned long _res; \
2780 /* _argvec[0] holds current r2 across the call */ \
2781 _argvec[1] = (unsigned long)_orig.r2; \
2782 _argvec[2] = (unsigned long)_orig.nraddr; \
2783 _argvec[2+1] = (unsigned long)arg1; \
2784 _argvec[2+2] = (unsigned long)arg2; \
2785 _argvec[2+3] = (unsigned long)arg3; \
2786 _argvec[2+4] = (unsigned long)arg4; \
2787 __asm__ volatile( \
2788 VALGRIND_ALIGN_STACK \
2789 "mr 11,%1\n\t" \
2790 "std 2,-16(11)\n\t" /* save tocptr */ \
2791 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2792 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2793 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2794 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2795 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2796 "ld 11, 0(11)\n\t" /* target->r11 */ \
2797 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2798 "mr 11,%1\n\t" \
2799 "mr %0,3\n\t" \
2800 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2801 VALGRIND_RESTORE_STACK \
2802 : /*out*/ "=r" (_res) \
2803 : /*in*/ "r" (&_argvec[2]) \
2804 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2805 ); \
2806 lval = (__typeof__(lval)) _res; \
2807 } while (0)
2808
2809#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2810 do { \
2811 volatile OrigFn _orig = (orig); \
2812 volatile unsigned long _argvec[3+5]; \
2813 volatile unsigned long _res; \
2814 /* _argvec[0] holds current r2 across the call */ \
2815 _argvec[1] = (unsigned long)_orig.r2; \
2816 _argvec[2] = (unsigned long)_orig.nraddr; \
2817 _argvec[2+1] = (unsigned long)arg1; \
2818 _argvec[2+2] = (unsigned long)arg2; \
2819 _argvec[2+3] = (unsigned long)arg3; \
2820 _argvec[2+4] = (unsigned long)arg4; \
2821 _argvec[2+5] = (unsigned long)arg5; \
2822 __asm__ volatile( \
2823 VALGRIND_ALIGN_STACK \
2824 "mr 11,%1\n\t" \
2825 "std 2,-16(11)\n\t" /* save tocptr */ \
2826 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2827 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2828 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2829 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2830 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2831 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2832 "ld 11, 0(11)\n\t" /* target->r11 */ \
2833 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2834 "mr 11,%1\n\t" \
2835 "mr %0,3\n\t" \
2836 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2837 VALGRIND_RESTORE_STACK \
2838 : /*out*/ "=r" (_res) \
2839 : /*in*/ "r" (&_argvec[2]) \
2840 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2841 ); \
2842 lval = (__typeof__(lval)) _res; \
2843 } while (0)
2844
2845#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2846 do { \
2847 volatile OrigFn _orig = (orig); \
2848 volatile unsigned long _argvec[3+6]; \
2849 volatile unsigned long _res; \
2850 /* _argvec[0] holds current r2 across the call */ \
2851 _argvec[1] = (unsigned long)_orig.r2; \
2852 _argvec[2] = (unsigned long)_orig.nraddr; \
2853 _argvec[2+1] = (unsigned long)arg1; \
2854 _argvec[2+2] = (unsigned long)arg2; \
2855 _argvec[2+3] = (unsigned long)arg3; \
2856 _argvec[2+4] = (unsigned long)arg4; \
2857 _argvec[2+5] = (unsigned long)arg5; \
2858 _argvec[2+6] = (unsigned long)arg6; \
2859 __asm__ volatile( \
2860 VALGRIND_ALIGN_STACK \
2861 "mr 11,%1\n\t" \
2862 "std 2,-16(11)\n\t" /* save tocptr */ \
2863 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2864 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2865 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2866 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2867 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2868 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2869 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2870 "ld 11, 0(11)\n\t" /* target->r11 */ \
2871 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2872 "mr 11,%1\n\t" \
2873 "mr %0,3\n\t" \
2874 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2875 VALGRIND_RESTORE_STACK \
2876 : /*out*/ "=r" (_res) \
2877 : /*in*/ "r" (&_argvec[2]) \
2878 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2879 ); \
2880 lval = (__typeof__(lval)) _res; \
2881 } while (0)
2882
2883#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2884 arg7) \
2885 do { \
2886 volatile OrigFn _orig = (orig); \
2887 volatile unsigned long _argvec[3+7]; \
2888 volatile unsigned long _res; \
2889 /* _argvec[0] holds current r2 across the call */ \
2890 _argvec[1] = (unsigned long)_orig.r2; \
2891 _argvec[2] = (unsigned long)_orig.nraddr; \
2892 _argvec[2+1] = (unsigned long)arg1; \
2893 _argvec[2+2] = (unsigned long)arg2; \
2894 _argvec[2+3] = (unsigned long)arg3; \
2895 _argvec[2+4] = (unsigned long)arg4; \
2896 _argvec[2+5] = (unsigned long)arg5; \
2897 _argvec[2+6] = (unsigned long)arg6; \
2898 _argvec[2+7] = (unsigned long)arg7; \
2899 __asm__ volatile( \
2900 VALGRIND_ALIGN_STACK \
2901 "mr 11,%1\n\t" \
2902 "std 2,-16(11)\n\t" /* save tocptr */ \
2903 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2904 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2905 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2906 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2907 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2908 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2909 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2910 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2911 "ld 11, 0(11)\n\t" /* target->r11 */ \
2912 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2913 "mr 11,%1\n\t" \
2914 "mr %0,3\n\t" \
2915 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2916 VALGRIND_RESTORE_STACK \
2917 : /*out*/ "=r" (_res) \
2918 : /*in*/ "r" (&_argvec[2]) \
2919 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2920 ); \
2921 lval = (__typeof__(lval)) _res; \
2922 } while (0)
2923
2924#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2925 arg7,arg8) \
2926 do { \
2927 volatile OrigFn _orig = (orig); \
2928 volatile unsigned long _argvec[3+8]; \
2929 volatile unsigned long _res; \
2930 /* _argvec[0] holds current r2 across the call */ \
2931 _argvec[1] = (unsigned long)_orig.r2; \
2932 _argvec[2] = (unsigned long)_orig.nraddr; \
2933 _argvec[2+1] = (unsigned long)arg1; \
2934 _argvec[2+2] = (unsigned long)arg2; \
2935 _argvec[2+3] = (unsigned long)arg3; \
2936 _argvec[2+4] = (unsigned long)arg4; \
2937 _argvec[2+5] = (unsigned long)arg5; \
2938 _argvec[2+6] = (unsigned long)arg6; \
2939 _argvec[2+7] = (unsigned long)arg7; \
2940 _argvec[2+8] = (unsigned long)arg8; \
2941 __asm__ volatile( \
2942 VALGRIND_ALIGN_STACK \
2943 "mr 11,%1\n\t" \
2944 "std 2,-16(11)\n\t" /* save tocptr */ \
2945 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2946 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2947 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2948 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2949 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2950 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2951 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2952 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2953 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2954 "ld 11, 0(11)\n\t" /* target->r11 */ \
2955 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2956 "mr 11,%1\n\t" \
2957 "mr %0,3\n\t" \
2958 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2959 VALGRIND_RESTORE_STACK \
2960 : /*out*/ "=r" (_res) \
2961 : /*in*/ "r" (&_argvec[2]) \
2962 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2963 ); \
2964 lval = (__typeof__(lval)) _res; \
2965 } while (0)
2966
2967#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2968 arg7,arg8,arg9) \
2969 do { \
2970 volatile OrigFn _orig = (orig); \
2971 volatile unsigned long _argvec[3+9]; \
2972 volatile unsigned long _res; \
2973 /* _argvec[0] holds current r2 across the call */ \
2974 _argvec[1] = (unsigned long)_orig.r2; \
2975 _argvec[2] = (unsigned long)_orig.nraddr; \
2976 _argvec[2+1] = (unsigned long)arg1; \
2977 _argvec[2+2] = (unsigned long)arg2; \
2978 _argvec[2+3] = (unsigned long)arg3; \
2979 _argvec[2+4] = (unsigned long)arg4; \
2980 _argvec[2+5] = (unsigned long)arg5; \
2981 _argvec[2+6] = (unsigned long)arg6; \
2982 _argvec[2+7] = (unsigned long)arg7; \
2983 _argvec[2+8] = (unsigned long)arg8; \
2984 _argvec[2+9] = (unsigned long)arg9; \
2985 __asm__ volatile( \
2986 VALGRIND_ALIGN_STACK \
2987 "mr 11,%1\n\t" \
2988 "std 2,-16(11)\n\t" /* save tocptr */ \
2989 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2990 "addi 1,1,-128\n\t" /* expand stack frame */ \
2991 /* arg9 */ \
2992 "ld 3,72(11)\n\t" \
2993 "std 3,112(1)\n\t" \
2994 /* args1-8 */ \
2995 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2996 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2997 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2998 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2999 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3000 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3001 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3002 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3003 "ld 11, 0(11)\n\t" /* target->r11 */ \
3004 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3005 "mr 11,%1\n\t" \
3006 "mr %0,3\n\t" \
3007 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3008 VALGRIND_RESTORE_STACK \
3009 : /*out*/ "=r" (_res) \
3010 : /*in*/ "r" (&_argvec[2]) \
3011 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3012 ); \
3013 lval = (__typeof__(lval)) _res; \
3014 } while (0)
3015
3016#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3017 arg7,arg8,arg9,arg10) \
3018 do { \
3019 volatile OrigFn _orig = (orig); \
3020 volatile unsigned long _argvec[3+10]; \
3021 volatile unsigned long _res; \
3022 /* _argvec[0] holds current r2 across the call */ \
3023 _argvec[1] = (unsigned long)_orig.r2; \
3024 _argvec[2] = (unsigned long)_orig.nraddr; \
3025 _argvec[2+1] = (unsigned long)arg1; \
3026 _argvec[2+2] = (unsigned long)arg2; \
3027 _argvec[2+3] = (unsigned long)arg3; \
3028 _argvec[2+4] = (unsigned long)arg4; \
3029 _argvec[2+5] = (unsigned long)arg5; \
3030 _argvec[2+6] = (unsigned long)arg6; \
3031 _argvec[2+7] = (unsigned long)arg7; \
3032 _argvec[2+8] = (unsigned long)arg8; \
3033 _argvec[2+9] = (unsigned long)arg9; \
3034 _argvec[2+10] = (unsigned long)arg10; \
3035 __asm__ volatile( \
3036 VALGRIND_ALIGN_STACK \
3037 "mr 11,%1\n\t" \
3038 "std 2,-16(11)\n\t" /* save tocptr */ \
3039 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3040 "addi 1,1,-128\n\t" /* expand stack frame */ \
3041 /* arg10 */ \
3042 "ld 3,80(11)\n\t" \
3043 "std 3,120(1)\n\t" \
3044 /* arg9 */ \
3045 "ld 3,72(11)\n\t" \
3046 "std 3,112(1)\n\t" \
3047 /* args1-8 */ \
3048 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3049 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3050 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3051 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3052 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3053 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3054 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3055 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3056 "ld 11, 0(11)\n\t" /* target->r11 */ \
3057 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3058 "mr 11,%1\n\t" \
3059 "mr %0,3\n\t" \
3060 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3061 VALGRIND_RESTORE_STACK \
3062 : /*out*/ "=r" (_res) \
3063 : /*in*/ "r" (&_argvec[2]) \
3064 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3065 ); \
3066 lval = (__typeof__(lval)) _res; \
3067 } while (0)
3068
3069#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3070 arg7,arg8,arg9,arg10,arg11) \
3071 do { \
3072 volatile OrigFn _orig = (orig); \
3073 volatile unsigned long _argvec[3+11]; \
3074 volatile unsigned long _res; \
3075 /* _argvec[0] holds current r2 across the call */ \
3076 _argvec[1] = (unsigned long)_orig.r2; \
3077 _argvec[2] = (unsigned long)_orig.nraddr; \
3078 _argvec[2+1] = (unsigned long)arg1; \
3079 _argvec[2+2] = (unsigned long)arg2; \
3080 _argvec[2+3] = (unsigned long)arg3; \
3081 _argvec[2+4] = (unsigned long)arg4; \
3082 _argvec[2+5] = (unsigned long)arg5; \
3083 _argvec[2+6] = (unsigned long)arg6; \
3084 _argvec[2+7] = (unsigned long)arg7; \
3085 _argvec[2+8] = (unsigned long)arg8; \
3086 _argvec[2+9] = (unsigned long)arg9; \
3087 _argvec[2+10] = (unsigned long)arg10; \
3088 _argvec[2+11] = (unsigned long)arg11; \
3089 __asm__ volatile( \
3090 VALGRIND_ALIGN_STACK \
3091 "mr 11,%1\n\t" \
3092 "std 2,-16(11)\n\t" /* save tocptr */ \
3093 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3094 "addi 1,1,-144\n\t" /* expand stack frame */ \
3095 /* arg11 */ \
3096 "ld 3,88(11)\n\t" \
3097 "std 3,128(1)\n\t" \
3098 /* arg10 */ \
3099 "ld 3,80(11)\n\t" \
3100 "std 3,120(1)\n\t" \
3101 /* arg9 */ \
3102 "ld 3,72(11)\n\t" \
3103 "std 3,112(1)\n\t" \
3104 /* args1-8 */ \
3105 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3106 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3107 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3108 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3109 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3110 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3111 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3112 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3113 "ld 11, 0(11)\n\t" /* target->r11 */ \
3114 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3115 "mr 11,%1\n\t" \
3116 "mr %0,3\n\t" \
3117 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3118 VALGRIND_RESTORE_STACK \
3119 : /*out*/ "=r" (_res) \
3120 : /*in*/ "r" (&_argvec[2]) \
3121 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3122 ); \
3123 lval = (__typeof__(lval)) _res; \
3124 } while (0)
3125
3126#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3127 arg7,arg8,arg9,arg10,arg11,arg12) \
3128 do { \
3129 volatile OrigFn _orig = (orig); \
3130 volatile unsigned long _argvec[3+12]; \
3131 volatile unsigned long _res; \
3132 /* _argvec[0] holds current r2 across the call */ \
3133 _argvec[1] = (unsigned long)_orig.r2; \
3134 _argvec[2] = (unsigned long)_orig.nraddr; \
3135 _argvec[2+1] = (unsigned long)arg1; \
3136 _argvec[2+2] = (unsigned long)arg2; \
3137 _argvec[2+3] = (unsigned long)arg3; \
3138 _argvec[2+4] = (unsigned long)arg4; \
3139 _argvec[2+5] = (unsigned long)arg5; \
3140 _argvec[2+6] = (unsigned long)arg6; \
3141 _argvec[2+7] = (unsigned long)arg7; \
3142 _argvec[2+8] = (unsigned long)arg8; \
3143 _argvec[2+9] = (unsigned long)arg9; \
3144 _argvec[2+10] = (unsigned long)arg10; \
3145 _argvec[2+11] = (unsigned long)arg11; \
3146 _argvec[2+12] = (unsigned long)arg12; \
3147 __asm__ volatile( \
3148 VALGRIND_ALIGN_STACK \
3149 "mr 11,%1\n\t" \
3150 "std 2,-16(11)\n\t" /* save tocptr */ \
3151 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3152 "addi 1,1,-144\n\t" /* expand stack frame */ \
3153 /* arg12 */ \
3154 "ld 3,96(11)\n\t" \
3155 "std 3,136(1)\n\t" \
3156 /* arg11 */ \
3157 "ld 3,88(11)\n\t" \
3158 "std 3,128(1)\n\t" \
3159 /* arg10 */ \
3160 "ld 3,80(11)\n\t" \
3161 "std 3,120(1)\n\t" \
3162 /* arg9 */ \
3163 "ld 3,72(11)\n\t" \
3164 "std 3,112(1)\n\t" \
3165 /* args1-8 */ \
3166 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3167 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3168 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3169 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3170 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3171 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3172 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3173 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3174 "ld 11, 0(11)\n\t" /* target->r11 */ \
3175 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3176 "mr 11,%1\n\t" \
3177 "mr %0,3\n\t" \
3178 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3179 VALGRIND_RESTORE_STACK \
3180 : /*out*/ "=r" (_res) \
3181 : /*in*/ "r" (&_argvec[2]) \
3182 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3183 ); \
3184 lval = (__typeof__(lval)) _res; \
3185 } while (0)
3186
3187#endif /* PLAT_ppc64be_linux */
3188
3189/* ------------------------- ppc64le-linux ----------------------- */
3190#if defined(PLAT_ppc64le_linux)
3191
3192/* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
3193
3194/* These regs are trashed by the hidden call. */
3195#define __CALLER_SAVED_REGS \
3196 "lr", "ctr", "xer", \
3197 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
3198 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
3199 "r11", "r12", "r13"
3200
3201/* Macros to save and align the stack before making a function
3202 call and restore it afterwards as gcc may not keep the stack
3203 pointer aligned if it doesn't realise calls are being made
3204 to other functions. */
3205
3206#define VALGRIND_ALIGN_STACK \
3207 "mr 28,1\n\t" \
3208 "rldicr 1,1,0,59\n\t"
3209#define VALGRIND_RESTORE_STACK \
3210 "mr 1,28\n\t"
3211
3212/* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
3213 long) == 8. */
3214
3215#define CALL_FN_W_v(lval, orig) \
3216 do { \
3217 volatile OrigFn _orig = (orig); \
3218 volatile unsigned long _argvec[3+0]; \
3219 volatile unsigned long _res; \
3220 /* _argvec[0] holds current r2 across the call */ \
3221 _argvec[1] = (unsigned long)_orig.r2; \
3222 _argvec[2] = (unsigned long)_orig.nraddr; \
3223 __asm__ volatile( \
3224 VALGRIND_ALIGN_STACK \
3225 "mr 12,%1\n\t" \
3226 "std 2,-16(12)\n\t" /* save tocptr */ \
3227 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3228 "ld 12, 0(12)\n\t" /* target->r12 */ \
3229 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3230 "mr 12,%1\n\t" \
3231 "mr %0,3\n\t" \
3232 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3233 VALGRIND_RESTORE_STACK \
3234 : /*out*/ "=r" (_res) \
3235 : /*in*/ "r" (&_argvec[2]) \
3236 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3237 ); \
3238 lval = (__typeof__(lval)) _res; \
3239 } while (0)
3240
3241#define CALL_FN_W_W(lval, orig, arg1) \
3242 do { \
3243 volatile OrigFn _orig = (orig); \
3244 volatile unsigned long _argvec[3+1]; \
3245 volatile unsigned long _res; \
3246 /* _argvec[0] holds current r2 across the call */ \
3247 _argvec[1] = (unsigned long)_orig.r2; \
3248 _argvec[2] = (unsigned long)_orig.nraddr; \
3249 _argvec[2+1] = (unsigned long)arg1; \
3250 __asm__ volatile( \
3251 VALGRIND_ALIGN_STACK \
3252 "mr 12,%1\n\t" \
3253 "std 2,-16(12)\n\t" /* save tocptr */ \
3254 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3255 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3256 "ld 12, 0(12)\n\t" /* target->r12 */ \
3257 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3258 "mr 12,%1\n\t" \
3259 "mr %0,3\n\t" \
3260 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3261 VALGRIND_RESTORE_STACK \
3262 : /*out*/ "=r" (_res) \
3263 : /*in*/ "r" (&_argvec[2]) \
3264 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3265 ); \
3266 lval = (__typeof__(lval)) _res; \
3267 } while (0)
3268
3269#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3270 do { \
3271 volatile OrigFn _orig = (orig); \
3272 volatile unsigned long _argvec[3+2]; \
3273 volatile unsigned long _res; \
3274 /* _argvec[0] holds current r2 across the call */ \
3275 _argvec[1] = (unsigned long)_orig.r2; \
3276 _argvec[2] = (unsigned long)_orig.nraddr; \
3277 _argvec[2+1] = (unsigned long)arg1; \
3278 _argvec[2+2] = (unsigned long)arg2; \
3279 __asm__ volatile( \
3280 VALGRIND_ALIGN_STACK \
3281 "mr 12,%1\n\t" \
3282 "std 2,-16(12)\n\t" /* save tocptr */ \
3283 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3284 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3285 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3286 "ld 12, 0(12)\n\t" /* target->r12 */ \
3287 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3288 "mr 12,%1\n\t" \
3289 "mr %0,3\n\t" \
3290 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3291 VALGRIND_RESTORE_STACK \
3292 : /*out*/ "=r" (_res) \
3293 : /*in*/ "r" (&_argvec[2]) \
3294 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3295 ); \
3296 lval = (__typeof__(lval)) _res; \
3297 } while (0)
3298
3299#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3300 do { \
3301 volatile OrigFn _orig = (orig); \
3302 volatile unsigned long _argvec[3+3]; \
3303 volatile unsigned long _res; \
3304 /* _argvec[0] holds current r2 across the call */ \
3305 _argvec[1] = (unsigned long)_orig.r2; \
3306 _argvec[2] = (unsigned long)_orig.nraddr; \
3307 _argvec[2+1] = (unsigned long)arg1; \
3308 _argvec[2+2] = (unsigned long)arg2; \
3309 _argvec[2+3] = (unsigned long)arg3; \
3310 __asm__ volatile( \
3311 VALGRIND_ALIGN_STACK \
3312 "mr 12,%1\n\t" \
3313 "std 2,-16(12)\n\t" /* save tocptr */ \
3314 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3315 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3316 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3317 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3318 "ld 12, 0(12)\n\t" /* target->r12 */ \
3319 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3320 "mr 12,%1\n\t" \
3321 "mr %0,3\n\t" \
3322 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3323 VALGRIND_RESTORE_STACK \
3324 : /*out*/ "=r" (_res) \
3325 : /*in*/ "r" (&_argvec[2]) \
3326 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3327 ); \
3328 lval = (__typeof__(lval)) _res; \
3329 } while (0)
3330
3331#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3332 do { \
3333 volatile OrigFn _orig = (orig); \
3334 volatile unsigned long _argvec[3+4]; \
3335 volatile unsigned long _res; \
3336 /* _argvec[0] holds current r2 across the call */ \
3337 _argvec[1] = (unsigned long)_orig.r2; \
3338 _argvec[2] = (unsigned long)_orig.nraddr; \
3339 _argvec[2+1] = (unsigned long)arg1; \
3340 _argvec[2+2] = (unsigned long)arg2; \
3341 _argvec[2+3] = (unsigned long)arg3; \
3342 _argvec[2+4] = (unsigned long)arg4; \
3343 __asm__ volatile( \
3344 VALGRIND_ALIGN_STACK \
3345 "mr 12,%1\n\t" \
3346 "std 2,-16(12)\n\t" /* save tocptr */ \
3347 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3348 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3349 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3350 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3351 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3352 "ld 12, 0(12)\n\t" /* target->r12 */ \
3353 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3354 "mr 12,%1\n\t" \
3355 "mr %0,3\n\t" \
3356 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3357 VALGRIND_RESTORE_STACK \
3358 : /*out*/ "=r" (_res) \
3359 : /*in*/ "r" (&_argvec[2]) \
3360 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3361 ); \
3362 lval = (__typeof__(lval)) _res; \
3363 } while (0)
3364
3365#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3366 do { \
3367 volatile OrigFn _orig = (orig); \
3368 volatile unsigned long _argvec[3+5]; \
3369 volatile unsigned long _res; \
3370 /* _argvec[0] holds current r2 across the call */ \
3371 _argvec[1] = (unsigned long)_orig.r2; \
3372 _argvec[2] = (unsigned long)_orig.nraddr; \
3373 _argvec[2+1] = (unsigned long)arg1; \
3374 _argvec[2+2] = (unsigned long)arg2; \
3375 _argvec[2+3] = (unsigned long)arg3; \
3376 _argvec[2+4] = (unsigned long)arg4; \
3377 _argvec[2+5] = (unsigned long)arg5; \
3378 __asm__ volatile( \
3379 VALGRIND_ALIGN_STACK \
3380 "mr 12,%1\n\t" \
3381 "std 2,-16(12)\n\t" /* save tocptr */ \
3382 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3383 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3384 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3385 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3386 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3387 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3388 "ld 12, 0(12)\n\t" /* target->r12 */ \
3389 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3390 "mr 12,%1\n\t" \
3391 "mr %0,3\n\t" \
3392 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3393 VALGRIND_RESTORE_STACK \
3394 : /*out*/ "=r" (_res) \
3395 : /*in*/ "r" (&_argvec[2]) \
3396 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3397 ); \
3398 lval = (__typeof__(lval)) _res; \
3399 } while (0)
3400
3401#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3402 do { \
3403 volatile OrigFn _orig = (orig); \
3404 volatile unsigned long _argvec[3+6]; \
3405 volatile unsigned long _res; \
3406 /* _argvec[0] holds current r2 across the call */ \
3407 _argvec[1] = (unsigned long)_orig.r2; \
3408 _argvec[2] = (unsigned long)_orig.nraddr; \
3409 _argvec[2+1] = (unsigned long)arg1; \
3410 _argvec[2+2] = (unsigned long)arg2; \
3411 _argvec[2+3] = (unsigned long)arg3; \
3412 _argvec[2+4] = (unsigned long)arg4; \
3413 _argvec[2+5] = (unsigned long)arg5; \
3414 _argvec[2+6] = (unsigned long)arg6; \
3415 __asm__ volatile( \
3416 VALGRIND_ALIGN_STACK \
3417 "mr 12,%1\n\t" \
3418 "std 2,-16(12)\n\t" /* save tocptr */ \
3419 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3420 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3421 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3422 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3423 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3424 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3425 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3426 "ld 12, 0(12)\n\t" /* target->r12 */ \
3427 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3428 "mr 12,%1\n\t" \
3429 "mr %0,3\n\t" \
3430 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3431 VALGRIND_RESTORE_STACK \
3432 : /*out*/ "=r" (_res) \
3433 : /*in*/ "r" (&_argvec[2]) \
3434 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3435 ); \
3436 lval = (__typeof__(lval)) _res; \
3437 } while (0)
3438
3439#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3440 arg7) \
3441 do { \
3442 volatile OrigFn _orig = (orig); \
3443 volatile unsigned long _argvec[3+7]; \
3444 volatile unsigned long _res; \
3445 /* _argvec[0] holds current r2 across the call */ \
3446 _argvec[1] = (unsigned long)_orig.r2; \
3447 _argvec[2] = (unsigned long)_orig.nraddr; \
3448 _argvec[2+1] = (unsigned long)arg1; \
3449 _argvec[2+2] = (unsigned long)arg2; \
3450 _argvec[2+3] = (unsigned long)arg3; \
3451 _argvec[2+4] = (unsigned long)arg4; \
3452 _argvec[2+5] = (unsigned long)arg5; \
3453 _argvec[2+6] = (unsigned long)arg6; \
3454 _argvec[2+7] = (unsigned long)arg7; \
3455 __asm__ volatile( \
3456 VALGRIND_ALIGN_STACK \
3457 "mr 12,%1\n\t" \
3458 "std 2,-16(12)\n\t" /* save tocptr */ \
3459 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3460 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3461 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3462 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3463 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3464 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3465 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3466 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3467 "ld 12, 0(12)\n\t" /* target->r12 */ \
3468 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3469 "mr 12,%1\n\t" \
3470 "mr %0,3\n\t" \
3471 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3472 VALGRIND_RESTORE_STACK \
3473 : /*out*/ "=r" (_res) \
3474 : /*in*/ "r" (&_argvec[2]) \
3475 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3476 ); \
3477 lval = (__typeof__(lval)) _res; \
3478 } while (0)
3479
3480#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3481 arg7,arg8) \
3482 do { \
3483 volatile OrigFn _orig = (orig); \
3484 volatile unsigned long _argvec[3+8]; \
3485 volatile unsigned long _res; \
3486 /* _argvec[0] holds current r2 across the call */ \
3487 _argvec[1] = (unsigned long)_orig.r2; \
3488 _argvec[2] = (unsigned long)_orig.nraddr; \
3489 _argvec[2+1] = (unsigned long)arg1; \
3490 _argvec[2+2] = (unsigned long)arg2; \
3491 _argvec[2+3] = (unsigned long)arg3; \
3492 _argvec[2+4] = (unsigned long)arg4; \
3493 _argvec[2+5] = (unsigned long)arg5; \
3494 _argvec[2+6] = (unsigned long)arg6; \
3495 _argvec[2+7] = (unsigned long)arg7; \
3496 _argvec[2+8] = (unsigned long)arg8; \
3497 __asm__ volatile( \
3498 VALGRIND_ALIGN_STACK \
3499 "mr 12,%1\n\t" \
3500 "std 2,-16(12)\n\t" /* save tocptr */ \
3501 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3502 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3503 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3504 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3505 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3506 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3507 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3508 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3509 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3510 "ld 12, 0(12)\n\t" /* target->r12 */ \
3511 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3512 "mr 12,%1\n\t" \
3513 "mr %0,3\n\t" \
3514 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3515 VALGRIND_RESTORE_STACK \
3516 : /*out*/ "=r" (_res) \
3517 : /*in*/ "r" (&_argvec[2]) \
3518 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3519 ); \
3520 lval = (__typeof__(lval)) _res; \
3521 } while (0)
3522
3523#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3524 arg7,arg8,arg9) \
3525 do { \
3526 volatile OrigFn _orig = (orig); \
3527 volatile unsigned long _argvec[3+9]; \
3528 volatile unsigned long _res; \
3529 /* _argvec[0] holds current r2 across the call */ \
3530 _argvec[1] = (unsigned long)_orig.r2; \
3531 _argvec[2] = (unsigned long)_orig.nraddr; \
3532 _argvec[2+1] = (unsigned long)arg1; \
3533 _argvec[2+2] = (unsigned long)arg2; \
3534 _argvec[2+3] = (unsigned long)arg3; \
3535 _argvec[2+4] = (unsigned long)arg4; \
3536 _argvec[2+5] = (unsigned long)arg5; \
3537 _argvec[2+6] = (unsigned long)arg6; \
3538 _argvec[2+7] = (unsigned long)arg7; \
3539 _argvec[2+8] = (unsigned long)arg8; \
3540 _argvec[2+9] = (unsigned long)arg9; \
3541 __asm__ volatile( \
3542 VALGRIND_ALIGN_STACK \
3543 "mr 12,%1\n\t" \
3544 "std 2,-16(12)\n\t" /* save tocptr */ \
3545 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3546 "addi 1,1,-128\n\t" /* expand stack frame */ \
3547 /* arg9 */ \
3548 "ld 3,72(12)\n\t" \
3549 "std 3,96(1)\n\t" \
3550 /* args1-8 */ \
3551 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3552 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3553 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3554 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3555 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3556 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3557 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3558 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3559 "ld 12, 0(12)\n\t" /* target->r12 */ \
3560 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3561 "mr 12,%1\n\t" \
3562 "mr %0,3\n\t" \
3563 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3564 VALGRIND_RESTORE_STACK \
3565 : /*out*/ "=r" (_res) \
3566 : /*in*/ "r" (&_argvec[2]) \
3567 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3568 ); \
3569 lval = (__typeof__(lval)) _res; \
3570 } while (0)
3571
3572#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3573 arg7,arg8,arg9,arg10) \
3574 do { \
3575 volatile OrigFn _orig = (orig); \
3576 volatile unsigned long _argvec[3+10]; \
3577 volatile unsigned long _res; \
3578 /* _argvec[0] holds current r2 across the call */ \
3579 _argvec[1] = (unsigned long)_orig.r2; \
3580 _argvec[2] = (unsigned long)_orig.nraddr; \
3581 _argvec[2+1] = (unsigned long)arg1; \
3582 _argvec[2+2] = (unsigned long)arg2; \
3583 _argvec[2+3] = (unsigned long)arg3; \
3584 _argvec[2+4] = (unsigned long)arg4; \
3585 _argvec[2+5] = (unsigned long)arg5; \
3586 _argvec[2+6] = (unsigned long)arg6; \
3587 _argvec[2+7] = (unsigned long)arg7; \
3588 _argvec[2+8] = (unsigned long)arg8; \
3589 _argvec[2+9] = (unsigned long)arg9; \
3590 _argvec[2+10] = (unsigned long)arg10; \
3591 __asm__ volatile( \
3592 VALGRIND_ALIGN_STACK \
3593 "mr 12,%1\n\t" \
3594 "std 2,-16(12)\n\t" /* save tocptr */ \
3595 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3596 "addi 1,1,-128\n\t" /* expand stack frame */ \
3597 /* arg10 */ \
3598 "ld 3,80(12)\n\t" \
3599 "std 3,104(1)\n\t" \
3600 /* arg9 */ \
3601 "ld 3,72(12)\n\t" \
3602 "std 3,96(1)\n\t" \
3603 /* args1-8 */ \
3604 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3605 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3606 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3607 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3608 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3609 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3610 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3611 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3612 "ld 12, 0(12)\n\t" /* target->r12 */ \
3613 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3614 "mr 12,%1\n\t" \
3615 "mr %0,3\n\t" \
3616 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3617 VALGRIND_RESTORE_STACK \
3618 : /*out*/ "=r" (_res) \
3619 : /*in*/ "r" (&_argvec[2]) \
3620 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3621 ); \
3622 lval = (__typeof__(lval)) _res; \
3623 } while (0)
3624
3625#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3626 arg7,arg8,arg9,arg10,arg11) \
3627 do { \
3628 volatile OrigFn _orig = (orig); \
3629 volatile unsigned long _argvec[3+11]; \
3630 volatile unsigned long _res; \
3631 /* _argvec[0] holds current r2 across the call */ \
3632 _argvec[1] = (unsigned long)_orig.r2; \
3633 _argvec[2] = (unsigned long)_orig.nraddr; \
3634 _argvec[2+1] = (unsigned long)arg1; \
3635 _argvec[2+2] = (unsigned long)arg2; \
3636 _argvec[2+3] = (unsigned long)arg3; \
3637 _argvec[2+4] = (unsigned long)arg4; \
3638 _argvec[2+5] = (unsigned long)arg5; \
3639 _argvec[2+6] = (unsigned long)arg6; \
3640 _argvec[2+7] = (unsigned long)arg7; \
3641 _argvec[2+8] = (unsigned long)arg8; \
3642 _argvec[2+9] = (unsigned long)arg9; \
3643 _argvec[2+10] = (unsigned long)arg10; \
3644 _argvec[2+11] = (unsigned long)arg11; \
3645 __asm__ volatile( \
3646 VALGRIND_ALIGN_STACK \
3647 "mr 12,%1\n\t" \
3648 "std 2,-16(12)\n\t" /* save tocptr */ \
3649 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3650 "addi 1,1,-144\n\t" /* expand stack frame */ \
3651 /* arg11 */ \
3652 "ld 3,88(12)\n\t" \
3653 "std 3,112(1)\n\t" \
3654 /* arg10 */ \
3655 "ld 3,80(12)\n\t" \
3656 "std 3,104(1)\n\t" \
3657 /* arg9 */ \
3658 "ld 3,72(12)\n\t" \
3659 "std 3,96(1)\n\t" \
3660 /* args1-8 */ \
3661 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3662 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3663 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3664 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3665 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3666 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3667 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3668 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3669 "ld 12, 0(12)\n\t" /* target->r12 */ \
3670 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3671 "mr 12,%1\n\t" \
3672 "mr %0,3\n\t" \
3673 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3674 VALGRIND_RESTORE_STACK \
3675 : /*out*/ "=r" (_res) \
3676 : /*in*/ "r" (&_argvec[2]) \
3677 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3678 ); \
3679 lval = (__typeof__(lval)) _res; \
3680 } while (0)
3681
3682#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3683 arg7,arg8,arg9,arg10,arg11,arg12) \
3684 do { \
3685 volatile OrigFn _orig = (orig); \
3686 volatile unsigned long _argvec[3+12]; \
3687 volatile unsigned long _res; \
3688 /* _argvec[0] holds current r2 across the call */ \
3689 _argvec[1] = (unsigned long)_orig.r2; \
3690 _argvec[2] = (unsigned long)_orig.nraddr; \
3691 _argvec[2+1] = (unsigned long)arg1; \
3692 _argvec[2+2] = (unsigned long)arg2; \
3693 _argvec[2+3] = (unsigned long)arg3; \
3694 _argvec[2+4] = (unsigned long)arg4; \
3695 _argvec[2+5] = (unsigned long)arg5; \
3696 _argvec[2+6] = (unsigned long)arg6; \
3697 _argvec[2+7] = (unsigned long)arg7; \
3698 _argvec[2+8] = (unsigned long)arg8; \
3699 _argvec[2+9] = (unsigned long)arg9; \
3700 _argvec[2+10] = (unsigned long)arg10; \
3701 _argvec[2+11] = (unsigned long)arg11; \
3702 _argvec[2+12] = (unsigned long)arg12; \
3703 __asm__ volatile( \
3704 VALGRIND_ALIGN_STACK \
3705 "mr 12,%1\n\t" \
3706 "std 2,-16(12)\n\t" /* save tocptr */ \
3707 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3708 "addi 1,1,-144\n\t" /* expand stack frame */ \
3709 /* arg12 */ \
3710 "ld 3,96(12)\n\t" \
3711 "std 3,120(1)\n\t" \
3712 /* arg11 */ \
3713 "ld 3,88(12)\n\t" \
3714 "std 3,112(1)\n\t" \
3715 /* arg10 */ \
3716 "ld 3,80(12)\n\t" \
3717 "std 3,104(1)\n\t" \
3718 /* arg9 */ \
3719 "ld 3,72(12)\n\t" \
3720 "std 3,96(1)\n\t" \
3721 /* args1-8 */ \
3722 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3723 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3724 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3725 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3726 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3727 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3728 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3729 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3730 "ld 12, 0(12)\n\t" /* target->r12 */ \
3731 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3732 "mr 12,%1\n\t" \
3733 "mr %0,3\n\t" \
3734 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3735 VALGRIND_RESTORE_STACK \
3736 : /*out*/ "=r" (_res) \
3737 : /*in*/ "r" (&_argvec[2]) \
3738 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3739 ); \
3740 lval = (__typeof__(lval)) _res; \
3741 } while (0)
3742
3743#endif /* PLAT_ppc64le_linux */
3744
3745/* ------------------------- arm-linux ------------------------- */
3746
3747#if defined(PLAT_arm_linux)
3748
3749/* These regs are trashed by the hidden call. */
3750#define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4", "r12", "r14"
3751
3752/* Macros to save and align the stack before making a function
3753 call and restore it afterwards as gcc may not keep the stack
3754 pointer aligned if it doesn't realise calls are being made
3755 to other functions. */
3756
3757/* This is a bit tricky. We store the original stack pointer in r10
3758 as it is callee-saves. gcc doesn't allow the use of r11 for some
3759 reason. Also, we can't directly "bic" the stack pointer in thumb
3760 mode since r13 isn't an allowed register number in that context.
3761 So use r4 as a temporary, since that is about to get trashed
3762 anyway, just after each use of this macro. Side effect is we need
3763 to be very careful about any future changes, since
3764 VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
3765#define VALGRIND_ALIGN_STACK \
3766 "mov r10, sp\n\t" \
3767 "mov r4, sp\n\t" \
3768 "bic r4, r4, #7\n\t" \
3769 "mov sp, r4\n\t"
3770#define VALGRIND_RESTORE_STACK \
3771 "mov sp, r10\n\t"
3772
3773/* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
3774 long) == 4. */
3775
3776#define CALL_FN_W_v(lval, orig) \
3777 do { \
3778 volatile OrigFn _orig = (orig); \
3779 volatile unsigned long _argvec[1]; \
3780 volatile unsigned long _res; \
3781 _argvec[0] = (unsigned long)_orig.nraddr; \
3782 __asm__ volatile( \
3783 VALGRIND_ALIGN_STACK \
3784 "ldr r4, [%1] \n\t" /* target->r4 */ \
3785 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3786 VALGRIND_RESTORE_STACK \
3787 "mov %0, r0\n" \
3788 : /*out*/ "=r" (_res) \
3789 : /*in*/ "0" (&_argvec[0]) \
3790 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3791 ); \
3792 lval = (__typeof__(lval)) _res; \
3793 } while (0)
3794
3795#define CALL_FN_W_W(lval, orig, arg1) \
3796 do { \
3797 volatile OrigFn _orig = (orig); \
3798 volatile unsigned long _argvec[2]; \
3799 volatile unsigned long _res; \
3800 _argvec[0] = (unsigned long)_orig.nraddr; \
3801 _argvec[1] = (unsigned long)(arg1); \
3802 __asm__ volatile( \
3803 VALGRIND_ALIGN_STACK \
3804 "ldr r0, [%1, #4] \n\t" \
3805 "ldr r4, [%1] \n\t" /* target->r4 */ \
3806 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3807 VALGRIND_RESTORE_STACK \
3808 "mov %0, r0\n" \
3809 : /*out*/ "=r" (_res) \
3810 : /*in*/ "0" (&_argvec[0]) \
3811 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3812 ); \
3813 lval = (__typeof__(lval)) _res; \
3814 } while (0)
3815
3816#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3817 do { \
3818 volatile OrigFn _orig = (orig); \
3819 volatile unsigned long _argvec[3]; \
3820 volatile unsigned long _res; \
3821 _argvec[0] = (unsigned long)_orig.nraddr; \
3822 _argvec[1] = (unsigned long)(arg1); \
3823 _argvec[2] = (unsigned long)(arg2); \
3824 __asm__ volatile( \
3825 VALGRIND_ALIGN_STACK \
3826 "ldr r0, [%1, #4] \n\t" \
3827 "ldr r1, [%1, #8] \n\t" \
3828 "ldr r4, [%1] \n\t" /* target->r4 */ \
3829 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3830 VALGRIND_RESTORE_STACK \
3831 "mov %0, r0\n" \
3832 : /*out*/ "=r" (_res) \
3833 : /*in*/ "0" (&_argvec[0]) \
3834 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3835 ); \
3836 lval = (__typeof__(lval)) _res; \
3837 } while (0)
3838
3839#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3840 do { \
3841 volatile OrigFn _orig = (orig); \
3842 volatile unsigned long _argvec[4]; \
3843 volatile unsigned long _res; \
3844 _argvec[0] = (unsigned long)_orig.nraddr; \
3845 _argvec[1] = (unsigned long)(arg1); \
3846 _argvec[2] = (unsigned long)(arg2); \
3847 _argvec[3] = (unsigned long)(arg3); \
3848 __asm__ volatile( \
3849 VALGRIND_ALIGN_STACK \
3850 "ldr r0, [%1, #4] \n\t" \
3851 "ldr r1, [%1, #8] \n\t" \
3852 "ldr r2, [%1, #12] \n\t" \
3853 "ldr r4, [%1] \n\t" /* target->r4 */ \
3854 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3855 VALGRIND_RESTORE_STACK \
3856 "mov %0, r0\n" \
3857 : /*out*/ "=r" (_res) \
3858 : /*in*/ "0" (&_argvec[0]) \
3859 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3860 ); \
3861 lval = (__typeof__(lval)) _res; \
3862 } while (0)
3863
3864#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3865 do { \
3866 volatile OrigFn _orig = (orig); \
3867 volatile unsigned long _argvec[5]; \
3868 volatile unsigned long _res; \
3869 _argvec[0] = (unsigned long)_orig.nraddr; \
3870 _argvec[1] = (unsigned long)(arg1); \
3871 _argvec[2] = (unsigned long)(arg2); \
3872 _argvec[3] = (unsigned long)(arg3); \
3873 _argvec[4] = (unsigned long)(arg4); \
3874 __asm__ volatile( \
3875 VALGRIND_ALIGN_STACK \
3876 "ldr r0, [%1, #4] \n\t" \
3877 "ldr r1, [%1, #8] \n\t" \
3878 "ldr r2, [%1, #12] \n\t" \
3879 "ldr r3, [%1, #16] \n\t" \
3880 "ldr r4, [%1] \n\t" /* target->r4 */ \
3881 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3882 VALGRIND_RESTORE_STACK \
3883 "mov %0, r0" \
3884 : /*out*/ "=r" (_res) \
3885 : /*in*/ "0" (&_argvec[0]) \
3886 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3887 ); \
3888 lval = (__typeof__(lval)) _res; \
3889 } while (0)
3890
3891#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3892 do { \
3893 volatile OrigFn _orig = (orig); \
3894 volatile unsigned long _argvec[6]; \
3895 volatile unsigned long _res; \
3896 _argvec[0] = (unsigned long)_orig.nraddr; \
3897 _argvec[1] = (unsigned long)(arg1); \
3898 _argvec[2] = (unsigned long)(arg2); \
3899 _argvec[3] = (unsigned long)(arg3); \
3900 _argvec[4] = (unsigned long)(arg4); \
3901 _argvec[5] = (unsigned long)(arg5); \
3902 __asm__ volatile( \
3903 VALGRIND_ALIGN_STACK \
3904 "sub sp, sp, #4 \n\t" \
3905 "ldr r0, [%1, #20] \n\t" \
3906 "push {r0} \n\t" \
3907 "ldr r0, [%1, #4] \n\t" \
3908 "ldr r1, [%1, #8] \n\t" \
3909 "ldr r2, [%1, #12] \n\t" \
3910 "ldr r3, [%1, #16] \n\t" \
3911 "ldr r4, [%1] \n\t" /* target->r4 */ \
3912 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3913 VALGRIND_RESTORE_STACK \
3914 "mov %0, r0" \
3915 : /*out*/ "=r" (_res) \
3916 : /*in*/ "0" (&_argvec[0]) \
3917 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3918 ); \
3919 lval = (__typeof__(lval)) _res; \
3920 } while (0)
3921
3922#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3923 do { \
3924 volatile OrigFn _orig = (orig); \
3925 volatile unsigned long _argvec[7]; \
3926 volatile unsigned long _res; \
3927 _argvec[0] = (unsigned long)_orig.nraddr; \
3928 _argvec[1] = (unsigned long)(arg1); \
3929 _argvec[2] = (unsigned long)(arg2); \
3930 _argvec[3] = (unsigned long)(arg3); \
3931 _argvec[4] = (unsigned long)(arg4); \
3932 _argvec[5] = (unsigned long)(arg5); \
3933 _argvec[6] = (unsigned long)(arg6); \
3934 __asm__ volatile( \
3935 VALGRIND_ALIGN_STACK \
3936 "ldr r0, [%1, #20] \n\t" \
3937 "ldr r1, [%1, #24] \n\t" \
3938 "push {r0, r1} \n\t" \
3939 "ldr r0, [%1, #4] \n\t" \
3940 "ldr r1, [%1, #8] \n\t" \
3941 "ldr r2, [%1, #12] \n\t" \
3942 "ldr r3, [%1, #16] \n\t" \
3943 "ldr r4, [%1] \n\t" /* target->r4 */ \
3944 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3945 VALGRIND_RESTORE_STACK \
3946 "mov %0, r0" \
3947 : /*out*/ "=r" (_res) \
3948 : /*in*/ "0" (&_argvec[0]) \
3949 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3950 ); \
3951 lval = (__typeof__(lval)) _res; \
3952 } while (0)
3953
3954#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3955 arg7) \
3956 do { \
3957 volatile OrigFn _orig = (orig); \
3958 volatile unsigned long _argvec[8]; \
3959 volatile unsigned long _res; \
3960 _argvec[0] = (unsigned long)_orig.nraddr; \
3961 _argvec[1] = (unsigned long)(arg1); \
3962 _argvec[2] = (unsigned long)(arg2); \
3963 _argvec[3] = (unsigned long)(arg3); \
3964 _argvec[4] = (unsigned long)(arg4); \
3965 _argvec[5] = (unsigned long)(arg5); \
3966 _argvec[6] = (unsigned long)(arg6); \
3967 _argvec[7] = (unsigned long)(arg7); \
3968 __asm__ volatile( \
3969 VALGRIND_ALIGN_STACK \
3970 "sub sp, sp, #4 \n\t" \
3971 "ldr r0, [%1, #20] \n\t" \
3972 "ldr r1, [%1, #24] \n\t" \
3973 "ldr r2, [%1, #28] \n\t" \
3974 "push {r0, r1, r2} \n\t" \
3975 "ldr r0, [%1, #4] \n\t" \
3976 "ldr r1, [%1, #8] \n\t" \
3977 "ldr r2, [%1, #12] \n\t" \
3978 "ldr r3, [%1, #16] \n\t" \
3979 "ldr r4, [%1] \n\t" /* target->r4 */ \
3980 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3981 VALGRIND_RESTORE_STACK \
3982 "mov %0, r0" \
3983 : /*out*/ "=r" (_res) \
3984 : /*in*/ "0" (&_argvec[0]) \
3985 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3986 ); \
3987 lval = (__typeof__(lval)) _res; \
3988 } while (0)
3989
3990#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3991 arg7,arg8) \
3992 do { \
3993 volatile OrigFn _orig = (orig); \
3994 volatile unsigned long _argvec[9]; \
3995 volatile unsigned long _res; \
3996 _argvec[0] = (unsigned long)_orig.nraddr; \
3997 _argvec[1] = (unsigned long)(arg1); \
3998 _argvec[2] = (unsigned long)(arg2); \
3999 _argvec[3] = (unsigned long)(arg3); \
4000 _argvec[4] = (unsigned long)(arg4); \
4001 _argvec[5] = (unsigned long)(arg5); \
4002 _argvec[6] = (unsigned long)(arg6); \
4003 _argvec[7] = (unsigned long)(arg7); \
4004 _argvec[8] = (unsigned long)(arg8); \
4005 __asm__ volatile( \
4006 VALGRIND_ALIGN_STACK \
4007 "ldr r0, [%1, #20] \n\t" \
4008 "ldr r1, [%1, #24] \n\t" \
4009 "ldr r2, [%1, #28] \n\t" \
4010 "ldr r3, [%1, #32] \n\t" \
4011 "push {r0, r1, r2, r3} \n\t" \
4012 "ldr r0, [%1, #4] \n\t" \
4013 "ldr r1, [%1, #8] \n\t" \
4014 "ldr r2, [%1, #12] \n\t" \
4015 "ldr r3, [%1, #16] \n\t" \
4016 "ldr r4, [%1] \n\t" /* target->r4 */ \
4017 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4018 VALGRIND_RESTORE_STACK \
4019 "mov %0, r0" \
4020 : /*out*/ "=r" (_res) \
4021 : /*in*/ "0" (&_argvec[0]) \
4022 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4023 ); \
4024 lval = (__typeof__(lval)) _res; \
4025 } while (0)
4026
4027#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4028 arg7,arg8,arg9) \
4029 do { \
4030 volatile OrigFn _orig = (orig); \
4031 volatile unsigned long _argvec[10]; \
4032 volatile unsigned long _res; \
4033 _argvec[0] = (unsigned long)_orig.nraddr; \
4034 _argvec[1] = (unsigned long)(arg1); \
4035 _argvec[2] = (unsigned long)(arg2); \
4036 _argvec[3] = (unsigned long)(arg3); \
4037 _argvec[4] = (unsigned long)(arg4); \
4038 _argvec[5] = (unsigned long)(arg5); \
4039 _argvec[6] = (unsigned long)(arg6); \
4040 _argvec[7] = (unsigned long)(arg7); \
4041 _argvec[8] = (unsigned long)(arg8); \
4042 _argvec[9] = (unsigned long)(arg9); \
4043 __asm__ volatile( \
4044 VALGRIND_ALIGN_STACK \
4045 "sub sp, sp, #4 \n\t" \
4046 "ldr r0, [%1, #20] \n\t" \
4047 "ldr r1, [%1, #24] \n\t" \
4048 "ldr r2, [%1, #28] \n\t" \
4049 "ldr r3, [%1, #32] \n\t" \
4050 "ldr r4, [%1, #36] \n\t" \
4051 "push {r0, r1, r2, r3, r4} \n\t" \
4052 "ldr r0, [%1, #4] \n\t" \
4053 "ldr r1, [%1, #8] \n\t" \
4054 "ldr r2, [%1, #12] \n\t" \
4055 "ldr r3, [%1, #16] \n\t" \
4056 "ldr r4, [%1] \n\t" /* target->r4 */ \
4057 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4058 VALGRIND_RESTORE_STACK \
4059 "mov %0, r0" \
4060 : /*out*/ "=r" (_res) \
4061 : /*in*/ "0" (&_argvec[0]) \
4062 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4063 ); \
4064 lval = (__typeof__(lval)) _res; \
4065 } while (0)
4066
4067#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4068 arg7,arg8,arg9,arg10) \
4069 do { \
4070 volatile OrigFn _orig = (orig); \
4071 volatile unsigned long _argvec[11]; \
4072 volatile unsigned long _res; \
4073 _argvec[0] = (unsigned long)_orig.nraddr; \
4074 _argvec[1] = (unsigned long)(arg1); \
4075 _argvec[2] = (unsigned long)(arg2); \
4076 _argvec[3] = (unsigned long)(arg3); \
4077 _argvec[4] = (unsigned long)(arg4); \
4078 _argvec[5] = (unsigned long)(arg5); \
4079 _argvec[6] = (unsigned long)(arg6); \
4080 _argvec[7] = (unsigned long)(arg7); \
4081 _argvec[8] = (unsigned long)(arg8); \
4082 _argvec[9] = (unsigned long)(arg9); \
4083 _argvec[10] = (unsigned long)(arg10); \
4084 __asm__ volatile( \
4085 VALGRIND_ALIGN_STACK \
4086 "ldr r0, [%1, #40] \n\t" \
4087 "push {r0} \n\t" \
4088 "ldr r0, [%1, #20] \n\t" \
4089 "ldr r1, [%1, #24] \n\t" \
4090 "ldr r2, [%1, #28] \n\t" \
4091 "ldr r3, [%1, #32] \n\t" \
4092 "ldr r4, [%1, #36] \n\t" \
4093 "push {r0, r1, r2, r3, r4} \n\t" \
4094 "ldr r0, [%1, #4] \n\t" \
4095 "ldr r1, [%1, #8] \n\t" \
4096 "ldr r2, [%1, #12] \n\t" \
4097 "ldr r3, [%1, #16] \n\t" \
4098 "ldr r4, [%1] \n\t" /* target->r4 */ \
4099 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4100 VALGRIND_RESTORE_STACK \
4101 "mov %0, r0" \
4102 : /*out*/ "=r" (_res) \
4103 : /*in*/ "0" (&_argvec[0]) \
4104 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4105 ); \
4106 lval = (__typeof__(lval)) _res; \
4107 } while (0)
4108
4109#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4110 arg6,arg7,arg8,arg9,arg10, \
4111 arg11) \
4112 do { \
4113 volatile OrigFn _orig = (orig); \
4114 volatile unsigned long _argvec[12]; \
4115 volatile unsigned long _res; \
4116 _argvec[0] = (unsigned long)_orig.nraddr; \
4117 _argvec[1] = (unsigned long)(arg1); \
4118 _argvec[2] = (unsigned long)(arg2); \
4119 _argvec[3] = (unsigned long)(arg3); \
4120 _argvec[4] = (unsigned long)(arg4); \
4121 _argvec[5] = (unsigned long)(arg5); \
4122 _argvec[6] = (unsigned long)(arg6); \
4123 _argvec[7] = (unsigned long)(arg7); \
4124 _argvec[8] = (unsigned long)(arg8); \
4125 _argvec[9] = (unsigned long)(arg9); \
4126 _argvec[10] = (unsigned long)(arg10); \
4127 _argvec[11] = (unsigned long)(arg11); \
4128 __asm__ volatile( \
4129 VALGRIND_ALIGN_STACK \
4130 "sub sp, sp, #4 \n\t" \
4131 "ldr r0, [%1, #40] \n\t" \
4132 "ldr r1, [%1, #44] \n\t" \
4133 "push {r0, r1} \n\t" \
4134 "ldr r0, [%1, #20] \n\t" \
4135 "ldr r1, [%1, #24] \n\t" \
4136 "ldr r2, [%1, #28] \n\t" \
4137 "ldr r3, [%1, #32] \n\t" \
4138 "ldr r4, [%1, #36] \n\t" \
4139 "push {r0, r1, r2, r3, r4} \n\t" \
4140 "ldr r0, [%1, #4] \n\t" \
4141 "ldr r1, [%1, #8] \n\t" \
4142 "ldr r2, [%1, #12] \n\t" \
4143 "ldr r3, [%1, #16] \n\t" \
4144 "ldr r4, [%1] \n\t" /* target->r4 */ \
4145 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4146 VALGRIND_RESTORE_STACK \
4147 "mov %0, r0" \
4148 : /*out*/ "=r" (_res) \
4149 : /*in*/ "0" (&_argvec[0]) \
4150 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4151 ); \
4152 lval = (__typeof__(lval)) _res; \
4153 } while (0)
4154
4155#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4156 arg6,arg7,arg8,arg9,arg10, \
4157 arg11,arg12) \
4158 do { \
4159 volatile OrigFn _orig = (orig); \
4160 volatile unsigned long _argvec[13]; \
4161 volatile unsigned long _res; \
4162 _argvec[0] = (unsigned long)_orig.nraddr; \
4163 _argvec[1] = (unsigned long)(arg1); \
4164 _argvec[2] = (unsigned long)(arg2); \
4165 _argvec[3] = (unsigned long)(arg3); \
4166 _argvec[4] = (unsigned long)(arg4); \
4167 _argvec[5] = (unsigned long)(arg5); \
4168 _argvec[6] = (unsigned long)(arg6); \
4169 _argvec[7] = (unsigned long)(arg7); \
4170 _argvec[8] = (unsigned long)(arg8); \
4171 _argvec[9] = (unsigned long)(arg9); \
4172 _argvec[10] = (unsigned long)(arg10); \
4173 _argvec[11] = (unsigned long)(arg11); \
4174 _argvec[12] = (unsigned long)(arg12); \
4175 __asm__ volatile( \
4176 VALGRIND_ALIGN_STACK \
4177 "ldr r0, [%1, #40] \n\t" \
4178 "ldr r1, [%1, #44] \n\t" \
4179 "ldr r2, [%1, #48] \n\t" \
4180 "push {r0, r1, r2} \n\t" \
4181 "ldr r0, [%1, #20] \n\t" \
4182 "ldr r1, [%1, #24] \n\t" \
4183 "ldr r2, [%1, #28] \n\t" \
4184 "ldr r3, [%1, #32] \n\t" \
4185 "ldr r4, [%1, #36] \n\t" \
4186 "push {r0, r1, r2, r3, r4} \n\t" \
4187 "ldr r0, [%1, #4] \n\t" \
4188 "ldr r1, [%1, #8] \n\t" \
4189 "ldr r2, [%1, #12] \n\t" \
4190 "ldr r3, [%1, #16] \n\t" \
4191 "ldr r4, [%1] \n\t" /* target->r4 */ \
4192 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4193 VALGRIND_RESTORE_STACK \
4194 "mov %0, r0" \
4195 : /*out*/ "=r" (_res) \
4196 : /*in*/ "0" (&_argvec[0]) \
4197 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4198 ); \
4199 lval = (__typeof__(lval)) _res; \
4200 } while (0)
4201
4202#endif /* PLAT_arm_linux */
4203
4204/* ------------------------ arm64-linux ------------------------ */
4205
4206#if defined(PLAT_arm64_linux)
4207
4208/* These regs are trashed by the hidden call. */
4209#define __CALLER_SAVED_REGS \
4210 "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
4211 "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
4212 "x18", "x19", "x20", "x30", \
4213 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
4214 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
4215 "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
4216 "v26", "v27", "v28", "v29", "v30", "v31"
4217
4218/* x21 is callee-saved, so we can use it to save and restore SP around
4219 the hidden call. */
4220#define VALGRIND_ALIGN_STACK \
4221 "mov x21, sp\n\t" \
4222 "bic sp, x21, #15\n\t"
4223#define VALGRIND_RESTORE_STACK \
4224 "mov sp, x21\n\t"
4225
4226/* These CALL_FN_ macros assume that on arm64-linux,
4227 sizeof(unsigned long) == 8. */
4228
4229#define CALL_FN_W_v(lval, orig) \
4230 do { \
4231 volatile OrigFn _orig = (orig); \
4232 volatile unsigned long _argvec[1]; \
4233 volatile unsigned long _res; \
4234 _argvec[0] = (unsigned long)_orig.nraddr; \
4235 __asm__ volatile( \
4236 VALGRIND_ALIGN_STACK \
4237 "ldr x8, [%1] \n\t" /* target->x8 */ \
4238 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4239 VALGRIND_RESTORE_STACK \
4240 "mov %0, x0\n" \
4241 : /*out*/ "=r" (_res) \
4242 : /*in*/ "0" (&_argvec[0]) \
4243 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4244 ); \
4245 lval = (__typeof__(lval)) _res; \
4246 } while (0)
4247
4248#define CALL_FN_W_W(lval, orig, arg1) \
4249 do { \
4250 volatile OrigFn _orig = (orig); \
4251 volatile unsigned long _argvec[2]; \
4252 volatile unsigned long _res; \
4253 _argvec[0] = (unsigned long)_orig.nraddr; \
4254 _argvec[1] = (unsigned long)(arg1); \
4255 __asm__ volatile( \
4256 VALGRIND_ALIGN_STACK \
4257 "ldr x0, [%1, #8] \n\t" \
4258 "ldr x8, [%1] \n\t" /* target->x8 */ \
4259 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4260 VALGRIND_RESTORE_STACK \
4261 "mov %0, x0\n" \
4262 : /*out*/ "=r" (_res) \
4263 : /*in*/ "0" (&_argvec[0]) \
4264 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4265 ); \
4266 lval = (__typeof__(lval)) _res; \
4267 } while (0)
4268
4269#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4270 do { \
4271 volatile OrigFn _orig = (orig); \
4272 volatile unsigned long _argvec[3]; \
4273 volatile unsigned long _res; \
4274 _argvec[0] = (unsigned long)_orig.nraddr; \
4275 _argvec[1] = (unsigned long)(arg1); \
4276 _argvec[2] = (unsigned long)(arg2); \
4277 __asm__ volatile( \
4278 VALGRIND_ALIGN_STACK \
4279 "ldr x0, [%1, #8] \n\t" \
4280 "ldr x1, [%1, #16] \n\t" \
4281 "ldr x8, [%1] \n\t" /* target->x8 */ \
4282 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4283 VALGRIND_RESTORE_STACK \
4284 "mov %0, x0\n" \
4285 : /*out*/ "=r" (_res) \
4286 : /*in*/ "0" (&_argvec[0]) \
4287 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4288 ); \
4289 lval = (__typeof__(lval)) _res; \
4290 } while (0)
4291
4292#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4293 do { \
4294 volatile OrigFn _orig = (orig); \
4295 volatile unsigned long _argvec[4]; \
4296 volatile unsigned long _res; \
4297 _argvec[0] = (unsigned long)_orig.nraddr; \
4298 _argvec[1] = (unsigned long)(arg1); \
4299 _argvec[2] = (unsigned long)(arg2); \
4300 _argvec[3] = (unsigned long)(arg3); \
4301 __asm__ volatile( \
4302 VALGRIND_ALIGN_STACK \
4303 "ldr x0, [%1, #8] \n\t" \
4304 "ldr x1, [%1, #16] \n\t" \
4305 "ldr x2, [%1, #24] \n\t" \
4306 "ldr x8, [%1] \n\t" /* target->x8 */ \
4307 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4308 VALGRIND_RESTORE_STACK \
4309 "mov %0, x0\n" \
4310 : /*out*/ "=r" (_res) \
4311 : /*in*/ "0" (&_argvec[0]) \
4312 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4313 ); \
4314 lval = (__typeof__(lval)) _res; \
4315 } while (0)
4316
4317#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4318 do { \
4319 volatile OrigFn _orig = (orig); \
4320 volatile unsigned long _argvec[5]; \
4321 volatile unsigned long _res; \
4322 _argvec[0] = (unsigned long)_orig.nraddr; \
4323 _argvec[1] = (unsigned long)(arg1); \
4324 _argvec[2] = (unsigned long)(arg2); \
4325 _argvec[3] = (unsigned long)(arg3); \
4326 _argvec[4] = (unsigned long)(arg4); \
4327 __asm__ volatile( \
4328 VALGRIND_ALIGN_STACK \
4329 "ldr x0, [%1, #8] \n\t" \
4330 "ldr x1, [%1, #16] \n\t" \
4331 "ldr x2, [%1, #24] \n\t" \
4332 "ldr x3, [%1, #32] \n\t" \
4333 "ldr x8, [%1] \n\t" /* target->x8 */ \
4334 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4335 VALGRIND_RESTORE_STACK \
4336 "mov %0, x0" \
4337 : /*out*/ "=r" (_res) \
4338 : /*in*/ "0" (&_argvec[0]) \
4339 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4340 ); \
4341 lval = (__typeof__(lval)) _res; \
4342 } while (0)
4343
4344#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4345 do { \
4346 volatile OrigFn _orig = (orig); \
4347 volatile unsigned long _argvec[6]; \
4348 volatile unsigned long _res; \
4349 _argvec[0] = (unsigned long)_orig.nraddr; \
4350 _argvec[1] = (unsigned long)(arg1); \
4351 _argvec[2] = (unsigned long)(arg2); \
4352 _argvec[3] = (unsigned long)(arg3); \
4353 _argvec[4] = (unsigned long)(arg4); \
4354 _argvec[5] = (unsigned long)(arg5); \
4355 __asm__ volatile( \
4356 VALGRIND_ALIGN_STACK \
4357 "ldr x0, [%1, #8] \n\t" \
4358 "ldr x1, [%1, #16] \n\t" \
4359 "ldr x2, [%1, #24] \n\t" \
4360 "ldr x3, [%1, #32] \n\t" \
4361 "ldr x4, [%1, #40] \n\t" \
4362 "ldr x8, [%1] \n\t" /* target->x8 */ \
4363 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4364 VALGRIND_RESTORE_STACK \
4365 "mov %0, x0" \
4366 : /*out*/ "=r" (_res) \
4367 : /*in*/ "0" (&_argvec[0]) \
4368 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4369 ); \
4370 lval = (__typeof__(lval)) _res; \
4371 } while (0)
4372
4373#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4374 do { \
4375 volatile OrigFn _orig = (orig); \
4376 volatile unsigned long _argvec[7]; \
4377 volatile unsigned long _res; \
4378 _argvec[0] = (unsigned long)_orig.nraddr; \
4379 _argvec[1] = (unsigned long)(arg1); \
4380 _argvec[2] = (unsigned long)(arg2); \
4381 _argvec[3] = (unsigned long)(arg3); \
4382 _argvec[4] = (unsigned long)(arg4); \
4383 _argvec[5] = (unsigned long)(arg5); \
4384 _argvec[6] = (unsigned long)(arg6); \
4385 __asm__ volatile( \
4386 VALGRIND_ALIGN_STACK \
4387 "ldr x0, [%1, #8] \n\t" \
4388 "ldr x1, [%1, #16] \n\t" \
4389 "ldr x2, [%1, #24] \n\t" \
4390 "ldr x3, [%1, #32] \n\t" \
4391 "ldr x4, [%1, #40] \n\t" \
4392 "ldr x5, [%1, #48] \n\t" \
4393 "ldr x8, [%1] \n\t" /* target->x8 */ \
4394 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4395 VALGRIND_RESTORE_STACK \
4396 "mov %0, x0" \
4397 : /*out*/ "=r" (_res) \
4398 : /*in*/ "0" (&_argvec[0]) \
4399 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4400 ); \
4401 lval = (__typeof__(lval)) _res; \
4402 } while (0)
4403
4404#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4405 arg7) \
4406 do { \
4407 volatile OrigFn _orig = (orig); \
4408 volatile unsigned long _argvec[8]; \
4409 volatile unsigned long _res; \
4410 _argvec[0] = (unsigned long)_orig.nraddr; \
4411 _argvec[1] = (unsigned long)(arg1); \
4412 _argvec[2] = (unsigned long)(arg2); \
4413 _argvec[3] = (unsigned long)(arg3); \
4414 _argvec[4] = (unsigned long)(arg4); \
4415 _argvec[5] = (unsigned long)(arg5); \
4416 _argvec[6] = (unsigned long)(arg6); \
4417 _argvec[7] = (unsigned long)(arg7); \
4418 __asm__ volatile( \
4419 VALGRIND_ALIGN_STACK \
4420 "ldr x0, [%1, #8] \n\t" \
4421 "ldr x1, [%1, #16] \n\t" \
4422 "ldr x2, [%1, #24] \n\t" \
4423 "ldr x3, [%1, #32] \n\t" \
4424 "ldr x4, [%1, #40] \n\t" \
4425 "ldr x5, [%1, #48] \n\t" \
4426 "ldr x6, [%1, #56] \n\t" \
4427 "ldr x8, [%1] \n\t" /* target->x8 */ \
4428 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4429 VALGRIND_RESTORE_STACK \
4430 "mov %0, x0" \
4431 : /*out*/ "=r" (_res) \
4432 : /*in*/ "0" (&_argvec[0]) \
4433 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4434 ); \
4435 lval = (__typeof__(lval)) _res; \
4436 } while (0)
4437
4438#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4439 arg7,arg8) \
4440 do { \
4441 volatile OrigFn _orig = (orig); \
4442 volatile unsigned long _argvec[9]; \
4443 volatile unsigned long _res; \
4444 _argvec[0] = (unsigned long)_orig.nraddr; \
4445 _argvec[1] = (unsigned long)(arg1); \
4446 _argvec[2] = (unsigned long)(arg2); \
4447 _argvec[3] = (unsigned long)(arg3); \
4448 _argvec[4] = (unsigned long)(arg4); \
4449 _argvec[5] = (unsigned long)(arg5); \
4450 _argvec[6] = (unsigned long)(arg6); \
4451 _argvec[7] = (unsigned long)(arg7); \
4452 _argvec[8] = (unsigned long)(arg8); \
4453 __asm__ volatile( \
4454 VALGRIND_ALIGN_STACK \
4455 "ldr x0, [%1, #8] \n\t" \
4456 "ldr x1, [%1, #16] \n\t" \
4457 "ldr x2, [%1, #24] \n\t" \
4458 "ldr x3, [%1, #32] \n\t" \
4459 "ldr x4, [%1, #40] \n\t" \
4460 "ldr x5, [%1, #48] \n\t" \
4461 "ldr x6, [%1, #56] \n\t" \
4462 "ldr x7, [%1, #64] \n\t" \
4463 "ldr x8, [%1] \n\t" /* target->x8 */ \
4464 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4465 VALGRIND_RESTORE_STACK \
4466 "mov %0, x0" \
4467 : /*out*/ "=r" (_res) \
4468 : /*in*/ "0" (&_argvec[0]) \
4469 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4470 ); \
4471 lval = (__typeof__(lval)) _res; \
4472 } while (0)
4473
4474#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4475 arg7,arg8,arg9) \
4476 do { \
4477 volatile OrigFn _orig = (orig); \
4478 volatile unsigned long _argvec[10]; \
4479 volatile unsigned long _res; \
4480 _argvec[0] = (unsigned long)_orig.nraddr; \
4481 _argvec[1] = (unsigned long)(arg1); \
4482 _argvec[2] = (unsigned long)(arg2); \
4483 _argvec[3] = (unsigned long)(arg3); \
4484 _argvec[4] = (unsigned long)(arg4); \
4485 _argvec[5] = (unsigned long)(arg5); \
4486 _argvec[6] = (unsigned long)(arg6); \
4487 _argvec[7] = (unsigned long)(arg7); \
4488 _argvec[8] = (unsigned long)(arg8); \
4489 _argvec[9] = (unsigned long)(arg9); \
4490 __asm__ volatile( \
4491 VALGRIND_ALIGN_STACK \
4492 "sub sp, sp, #0x20 \n\t" \
4493 "ldr x0, [%1, #8] \n\t" \
4494 "ldr x1, [%1, #16] \n\t" \
4495 "ldr x2, [%1, #24] \n\t" \
4496 "ldr x3, [%1, #32] \n\t" \
4497 "ldr x4, [%1, #40] \n\t" \
4498 "ldr x5, [%1, #48] \n\t" \
4499 "ldr x6, [%1, #56] \n\t" \
4500 "ldr x7, [%1, #64] \n\t" \
4501 "ldr x8, [%1, #72] \n\t" \
4502 "str x8, [sp, #0] \n\t" \
4503 "ldr x8, [%1] \n\t" /* target->x8 */ \
4504 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4505 VALGRIND_RESTORE_STACK \
4506 "mov %0, x0" \
4507 : /*out*/ "=r" (_res) \
4508 : /*in*/ "0" (&_argvec[0]) \
4509 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4510 ); \
4511 lval = (__typeof__(lval)) _res; \
4512 } while (0)
4513
4514#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4515 arg7,arg8,arg9,arg10) \
4516 do { \
4517 volatile OrigFn _orig = (orig); \
4518 volatile unsigned long _argvec[11]; \
4519 volatile unsigned long _res; \
4520 _argvec[0] = (unsigned long)_orig.nraddr; \
4521 _argvec[1] = (unsigned long)(arg1); \
4522 _argvec[2] = (unsigned long)(arg2); \
4523 _argvec[3] = (unsigned long)(arg3); \
4524 _argvec[4] = (unsigned long)(arg4); \
4525 _argvec[5] = (unsigned long)(arg5); \
4526 _argvec[6] = (unsigned long)(arg6); \
4527 _argvec[7] = (unsigned long)(arg7); \
4528 _argvec[8] = (unsigned long)(arg8); \
4529 _argvec[9] = (unsigned long)(arg9); \
4530 _argvec[10] = (unsigned long)(arg10); \
4531 __asm__ volatile( \
4532 VALGRIND_ALIGN_STACK \
4533 "sub sp, sp, #0x20 \n\t" \
4534 "ldr x0, [%1, #8] \n\t" \
4535 "ldr x1, [%1, #16] \n\t" \
4536 "ldr x2, [%1, #24] \n\t" \
4537 "ldr x3, [%1, #32] \n\t" \
4538 "ldr x4, [%1, #40] \n\t" \
4539 "ldr x5, [%1, #48] \n\t" \
4540 "ldr x6, [%1, #56] \n\t" \
4541 "ldr x7, [%1, #64] \n\t" \
4542 "ldr x8, [%1, #72] \n\t" \
4543 "str x8, [sp, #0] \n\t" \
4544 "ldr x8, [%1, #80] \n\t" \
4545 "str x8, [sp, #8] \n\t" \
4546 "ldr x8, [%1] \n\t" /* target->x8 */ \
4547 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4548 VALGRIND_RESTORE_STACK \
4549 "mov %0, x0" \
4550 : /*out*/ "=r" (_res) \
4551 : /*in*/ "0" (&_argvec[0]) \
4552 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4553 ); \
4554 lval = (__typeof__(lval)) _res; \
4555 } while (0)
4556
4557#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4558 arg7,arg8,arg9,arg10,arg11) \
4559 do { \
4560 volatile OrigFn _orig = (orig); \
4561 volatile unsigned long _argvec[12]; \
4562 volatile unsigned long _res; \
4563 _argvec[0] = (unsigned long)_orig.nraddr; \
4564 _argvec[1] = (unsigned long)(arg1); \
4565 _argvec[2] = (unsigned long)(arg2); \
4566 _argvec[3] = (unsigned long)(arg3); \
4567 _argvec[4] = (unsigned long)(arg4); \
4568 _argvec[5] = (unsigned long)(arg5); \
4569 _argvec[6] = (unsigned long)(arg6); \
4570 _argvec[7] = (unsigned long)(arg7); \
4571 _argvec[8] = (unsigned long)(arg8); \
4572 _argvec[9] = (unsigned long)(arg9); \
4573 _argvec[10] = (unsigned long)(arg10); \
4574 _argvec[11] = (unsigned long)(arg11); \
4575 __asm__ volatile( \
4576 VALGRIND_ALIGN_STACK \
4577 "sub sp, sp, #0x30 \n\t" \
4578 "ldr x0, [%1, #8] \n\t" \
4579 "ldr x1, [%1, #16] \n\t" \
4580 "ldr x2, [%1, #24] \n\t" \
4581 "ldr x3, [%1, #32] \n\t" \
4582 "ldr x4, [%1, #40] \n\t" \
4583 "ldr x5, [%1, #48] \n\t" \
4584 "ldr x6, [%1, #56] \n\t" \
4585 "ldr x7, [%1, #64] \n\t" \
4586 "ldr x8, [%1, #72] \n\t" \
4587 "str x8, [sp, #0] \n\t" \
4588 "ldr x8, [%1, #80] \n\t" \
4589 "str x8, [sp, #8] \n\t" \
4590 "ldr x8, [%1, #88] \n\t" \
4591 "str x8, [sp, #16] \n\t" \
4592 "ldr x8, [%1] \n\t" /* target->x8 */ \
4593 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4594 VALGRIND_RESTORE_STACK \
4595 "mov %0, x0" \
4596 : /*out*/ "=r" (_res) \
4597 : /*in*/ "0" (&_argvec[0]) \
4598 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4599 ); \
4600 lval = (__typeof__(lval)) _res; \
4601 } while (0)
4602
4603#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4604 arg7,arg8,arg9,arg10,arg11, \
4605 arg12) \
4606 do { \
4607 volatile OrigFn _orig = (orig); \
4608 volatile unsigned long _argvec[13]; \
4609 volatile unsigned long _res; \
4610 _argvec[0] = (unsigned long)_orig.nraddr; \
4611 _argvec[1] = (unsigned long)(arg1); \
4612 _argvec[2] = (unsigned long)(arg2); \
4613 _argvec[3] = (unsigned long)(arg3); \
4614 _argvec[4] = (unsigned long)(arg4); \
4615 _argvec[5] = (unsigned long)(arg5); \
4616 _argvec[6] = (unsigned long)(arg6); \
4617 _argvec[7] = (unsigned long)(arg7); \
4618 _argvec[8] = (unsigned long)(arg8); \
4619 _argvec[9] = (unsigned long)(arg9); \
4620 _argvec[10] = (unsigned long)(arg10); \
4621 _argvec[11] = (unsigned long)(arg11); \
4622 _argvec[12] = (unsigned long)(arg12); \
4623 __asm__ volatile( \
4624 VALGRIND_ALIGN_STACK \
4625 "sub sp, sp, #0x30 \n\t" \
4626 "ldr x0, [%1, #8] \n\t" \
4627 "ldr x1, [%1, #16] \n\t" \
4628 "ldr x2, [%1, #24] \n\t" \
4629 "ldr x3, [%1, #32] \n\t" \
4630 "ldr x4, [%1, #40] \n\t" \
4631 "ldr x5, [%1, #48] \n\t" \
4632 "ldr x6, [%1, #56] \n\t" \
4633 "ldr x7, [%1, #64] \n\t" \
4634 "ldr x8, [%1, #72] \n\t" \
4635 "str x8, [sp, #0] \n\t" \
4636 "ldr x8, [%1, #80] \n\t" \
4637 "str x8, [sp, #8] \n\t" \
4638 "ldr x8, [%1, #88] \n\t" \
4639 "str x8, [sp, #16] \n\t" \
4640 "ldr x8, [%1, #96] \n\t" \
4641 "str x8, [sp, #24] \n\t" \
4642 "ldr x8, [%1] \n\t" /* target->x8 */ \
4643 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4644 VALGRIND_RESTORE_STACK \
4645 "mov %0, x0" \
4646 : /*out*/ "=r" (_res) \
4647 : /*in*/ "0" (&_argvec[0]) \
4648 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4649 ); \
4650 lval = (__typeof__(lval)) _res; \
4651 } while (0)
4652
4653#endif /* PLAT_arm64_linux */
4654
4655/* ------------------------- s390x-linux ------------------------- */
4656
4657#if defined(PLAT_s390x_linux)
4658
4659/* Similar workaround as amd64 (see above), but we use r11 as frame
4660 pointer and save the old r11 in r7. r11 might be used for
4661 argvec, therefore we copy argvec in r1 since r1 is clobbered
4662 after the call anyway. */
4663#if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
4664# define __FRAME_POINTER \
4665 ,"d"(__builtin_dwarf_cfa())
4666# define VALGRIND_CFI_PROLOGUE \
4667 ".cfi_remember_state\n\t" \
4668 "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
4669 "lgr 7,11\n\t" \
4670 "lgr 11,%2\n\t" \
4671 ".cfi_def_cfa r11, 0\n\t"
4672# define VALGRIND_CFI_EPILOGUE \
4673 "lgr 11, 7\n\t" \
4674 ".cfi_restore_state\n\t"
4675#else
4676# define __FRAME_POINTER
4677# define VALGRIND_CFI_PROLOGUE \
4678 "lgr 1,%1\n\t"
4679# define VALGRIND_CFI_EPILOGUE
4680#endif
4681
4682/* Nb: On s390 the stack pointer is properly aligned *at all times*
4683 according to the s390 GCC maintainer. (The ABI specification is not
4684 precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
4685 VALGRIND_RESTORE_STACK are not defined here. */
4686
4687/* These regs are trashed by the hidden call. Note that we overwrite
4688 r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
4689 function a proper return address. All others are ABI defined call
4690 clobbers. */
4691#define __CALLER_SAVED_REGS "0","1","2","3","4","5","14", \
4692 "f0","f1","f2","f3","f4","f5","f6","f7"
4693
4694/* Nb: Although r11 is modified in the asm snippets below (inside
4695 VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
4696 two reasons:
4697 (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
4698 modified
4699 (2) GCC will complain that r11 cannot appear inside a clobber section,
4700 when compiled with -O -fno-omit-frame-pointer
4701 */
4702
4703#define CALL_FN_W_v(lval, orig) \
4704 do { \
4705 volatile OrigFn _orig = (orig); \
4706 volatile unsigned long _argvec[1]; \
4707 volatile unsigned long _res; \
4708 _argvec[0] = (unsigned long)_orig.nraddr; \
4709 __asm__ volatile( \
4710 VALGRIND_CFI_PROLOGUE \
4711 "aghi 15,-160\n\t" \
4712 "lg 1, 0(1)\n\t" /* target->r1 */ \
4713 VALGRIND_CALL_NOREDIR_R1 \
4714 "lgr %0, 2\n\t" \
4715 "aghi 15,160\n\t" \
4716 VALGRIND_CFI_EPILOGUE \
4717 : /*out*/ "=d" (_res) \
4718 : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
4719 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4720 ); \
4721 lval = (__typeof__(lval)) _res; \
4722 } while (0)
4723
4724/* The call abi has the arguments in r2-r6 and stack */
4725#define CALL_FN_W_W(lval, orig, arg1) \
4726 do { \
4727 volatile OrigFn _orig = (orig); \
4728 volatile unsigned long _argvec[2]; \
4729 volatile unsigned long _res; \
4730 _argvec[0] = (unsigned long)_orig.nraddr; \
4731 _argvec[1] = (unsigned long)arg1; \
4732 __asm__ volatile( \
4733 VALGRIND_CFI_PROLOGUE \
4734 "aghi 15,-160\n\t" \
4735 "lg 2, 8(1)\n\t" \
4736 "lg 1, 0(1)\n\t" \
4737 VALGRIND_CALL_NOREDIR_R1 \
4738 "lgr %0, 2\n\t" \
4739 "aghi 15,160\n\t" \
4740 VALGRIND_CFI_EPILOGUE \
4741 : /*out*/ "=d" (_res) \
4742 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4743 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4744 ); \
4745 lval = (__typeof__(lval)) _res; \
4746 } while (0)
4747
4748#define CALL_FN_W_WW(lval, orig, arg1, arg2) \
4749 do { \
4750 volatile OrigFn _orig = (orig); \
4751 volatile unsigned long _argvec[3]; \
4752 volatile unsigned long _res; \
4753 _argvec[0] = (unsigned long)_orig.nraddr; \
4754 _argvec[1] = (unsigned long)arg1; \
4755 _argvec[2] = (unsigned long)arg2; \
4756 __asm__ volatile( \
4757 VALGRIND_CFI_PROLOGUE \
4758 "aghi 15,-160\n\t" \
4759 "lg 2, 8(1)\n\t" \
4760 "lg 3,16(1)\n\t" \
4761 "lg 1, 0(1)\n\t" \
4762 VALGRIND_CALL_NOREDIR_R1 \
4763 "lgr %0, 2\n\t" \
4764 "aghi 15,160\n\t" \
4765 VALGRIND_CFI_EPILOGUE \
4766 : /*out*/ "=d" (_res) \
4767 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4768 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4769 ); \
4770 lval = (__typeof__(lval)) _res; \
4771 } while (0)
4772
4773#define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
4774 do { \
4775 volatile OrigFn _orig = (orig); \
4776 volatile unsigned long _argvec[4]; \
4777 volatile unsigned long _res; \
4778 _argvec[0] = (unsigned long)_orig.nraddr; \
4779 _argvec[1] = (unsigned long)arg1; \
4780 _argvec[2] = (unsigned long)arg2; \
4781 _argvec[3] = (unsigned long)arg3; \
4782 __asm__ volatile( \
4783 VALGRIND_CFI_PROLOGUE \
4784 "aghi 15,-160\n\t" \
4785 "lg 2, 8(1)\n\t" \
4786 "lg 3,16(1)\n\t" \
4787 "lg 4,24(1)\n\t" \
4788 "lg 1, 0(1)\n\t" \
4789 VALGRIND_CALL_NOREDIR_R1 \
4790 "lgr %0, 2\n\t" \
4791 "aghi 15,160\n\t" \
4792 VALGRIND_CFI_EPILOGUE \
4793 : /*out*/ "=d" (_res) \
4794 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4795 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4796 ); \
4797 lval = (__typeof__(lval)) _res; \
4798 } while (0)
4799
4800#define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
4801 do { \
4802 volatile OrigFn _orig = (orig); \
4803 volatile unsigned long _argvec[5]; \
4804 volatile unsigned long _res; \
4805 _argvec[0] = (unsigned long)_orig.nraddr; \
4806 _argvec[1] = (unsigned long)arg1; \
4807 _argvec[2] = (unsigned long)arg2; \
4808 _argvec[3] = (unsigned long)arg3; \
4809 _argvec[4] = (unsigned long)arg4; \
4810 __asm__ volatile( \
4811 VALGRIND_CFI_PROLOGUE \
4812 "aghi 15,-160\n\t" \
4813 "lg 2, 8(1)\n\t" \
4814 "lg 3,16(1)\n\t" \
4815 "lg 4,24(1)\n\t" \
4816 "lg 5,32(1)\n\t" \
4817 "lg 1, 0(1)\n\t" \
4818 VALGRIND_CALL_NOREDIR_R1 \
4819 "lgr %0, 2\n\t" \
4820 "aghi 15,160\n\t" \
4821 VALGRIND_CFI_EPILOGUE \
4822 : /*out*/ "=d" (_res) \
4823 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4824 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4825 ); \
4826 lval = (__typeof__(lval)) _res; \
4827 } while (0)
4828
4829#define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
4830 do { \
4831 volatile OrigFn _orig = (orig); \
4832 volatile unsigned long _argvec[6]; \
4833 volatile unsigned long _res; \
4834 _argvec[0] = (unsigned long)_orig.nraddr; \
4835 _argvec[1] = (unsigned long)arg1; \
4836 _argvec[2] = (unsigned long)arg2; \
4837 _argvec[3] = (unsigned long)arg3; \
4838 _argvec[4] = (unsigned long)arg4; \
4839 _argvec[5] = (unsigned long)arg5; \
4840 __asm__ volatile( \
4841 VALGRIND_CFI_PROLOGUE \
4842 "aghi 15,-160\n\t" \
4843 "lg 2, 8(1)\n\t" \
4844 "lg 3,16(1)\n\t" \
4845 "lg 4,24(1)\n\t" \
4846 "lg 5,32(1)\n\t" \
4847 "lg 6,40(1)\n\t" \
4848 "lg 1, 0(1)\n\t" \
4849 VALGRIND_CALL_NOREDIR_R1 \
4850 "lgr %0, 2\n\t" \
4851 "aghi 15,160\n\t" \
4852 VALGRIND_CFI_EPILOGUE \
4853 : /*out*/ "=d" (_res) \
4854 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4855 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4856 ); \
4857 lval = (__typeof__(lval)) _res; \
4858 } while (0)
4859
4860#define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4861 arg6) \
4862 do { \
4863 volatile OrigFn _orig = (orig); \
4864 volatile unsigned long _argvec[7]; \
4865 volatile unsigned long _res; \
4866 _argvec[0] = (unsigned long)_orig.nraddr; \
4867 _argvec[1] = (unsigned long)arg1; \
4868 _argvec[2] = (unsigned long)arg2; \
4869 _argvec[3] = (unsigned long)arg3; \
4870 _argvec[4] = (unsigned long)arg4; \
4871 _argvec[5] = (unsigned long)arg5; \
4872 _argvec[6] = (unsigned long)arg6; \
4873 __asm__ volatile( \
4874 VALGRIND_CFI_PROLOGUE \
4875 "aghi 15,-168\n\t" \
4876 "lg 2, 8(1)\n\t" \
4877 "lg 3,16(1)\n\t" \
4878 "lg 4,24(1)\n\t" \
4879 "lg 5,32(1)\n\t" \
4880 "lg 6,40(1)\n\t" \
4881 "mvc 160(8,15), 48(1)\n\t" \
4882 "lg 1, 0(1)\n\t" \
4883 VALGRIND_CALL_NOREDIR_R1 \
4884 "lgr %0, 2\n\t" \
4885 "aghi 15,168\n\t" \
4886 VALGRIND_CFI_EPILOGUE \
4887 : /*out*/ "=d" (_res) \
4888 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4889 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4890 ); \
4891 lval = (__typeof__(lval)) _res; \
4892 } while (0)
4893
4894#define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4895 arg6, arg7) \
4896 do { \
4897 volatile OrigFn _orig = (orig); \
4898 volatile unsigned long _argvec[8]; \
4899 volatile unsigned long _res; \
4900 _argvec[0] = (unsigned long)_orig.nraddr; \
4901 _argvec[1] = (unsigned long)arg1; \
4902 _argvec[2] = (unsigned long)arg2; \
4903 _argvec[3] = (unsigned long)arg3; \
4904 _argvec[4] = (unsigned long)arg4; \
4905 _argvec[5] = (unsigned long)arg5; \
4906 _argvec[6] = (unsigned long)arg6; \
4907 _argvec[7] = (unsigned long)arg7; \
4908 __asm__ volatile( \
4909 VALGRIND_CFI_PROLOGUE \
4910 "aghi 15,-176\n\t" \
4911 "lg 2, 8(1)\n\t" \
4912 "lg 3,16(1)\n\t" \
4913 "lg 4,24(1)\n\t" \
4914 "lg 5,32(1)\n\t" \
4915 "lg 6,40(1)\n\t" \
4916 "mvc 160(8,15), 48(1)\n\t" \
4917 "mvc 168(8,15), 56(1)\n\t" \
4918 "lg 1, 0(1)\n\t" \
4919 VALGRIND_CALL_NOREDIR_R1 \
4920 "lgr %0, 2\n\t" \
4921 "aghi 15,176\n\t" \
4922 VALGRIND_CFI_EPILOGUE \
4923 : /*out*/ "=d" (_res) \
4924 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4925 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4926 ); \
4927 lval = (__typeof__(lval)) _res; \
4928 } while (0)
4929
4930#define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4931 arg6, arg7 ,arg8) \
4932 do { \
4933 volatile OrigFn _orig = (orig); \
4934 volatile unsigned long _argvec[9]; \
4935 volatile unsigned long _res; \
4936 _argvec[0] = (unsigned long)_orig.nraddr; \
4937 _argvec[1] = (unsigned long)arg1; \
4938 _argvec[2] = (unsigned long)arg2; \
4939 _argvec[3] = (unsigned long)arg3; \
4940 _argvec[4] = (unsigned long)arg4; \
4941 _argvec[5] = (unsigned long)arg5; \
4942 _argvec[6] = (unsigned long)arg6; \
4943 _argvec[7] = (unsigned long)arg7; \
4944 _argvec[8] = (unsigned long)arg8; \
4945 __asm__ volatile( \
4946 VALGRIND_CFI_PROLOGUE \
4947 "aghi 15,-184\n\t" \
4948 "lg 2, 8(1)\n\t" \
4949 "lg 3,16(1)\n\t" \
4950 "lg 4,24(1)\n\t" \
4951 "lg 5,32(1)\n\t" \
4952 "lg 6,40(1)\n\t" \
4953 "mvc 160(8,15), 48(1)\n\t" \
4954 "mvc 168(8,15), 56(1)\n\t" \
4955 "mvc 176(8,15), 64(1)\n\t" \
4956 "lg 1, 0(1)\n\t" \
4957 VALGRIND_CALL_NOREDIR_R1 \
4958 "lgr %0, 2\n\t" \
4959 "aghi 15,184\n\t" \
4960 VALGRIND_CFI_EPILOGUE \
4961 : /*out*/ "=d" (_res) \
4962 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4963 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4964 ); \
4965 lval = (__typeof__(lval)) _res; \
4966 } while (0)
4967
4968#define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4969 arg6, arg7 ,arg8, arg9) \
4970 do { \
4971 volatile OrigFn _orig = (orig); \
4972 volatile unsigned long _argvec[10]; \
4973 volatile unsigned long _res; \
4974 _argvec[0] = (unsigned long)_orig.nraddr; \
4975 _argvec[1] = (unsigned long)arg1; \
4976 _argvec[2] = (unsigned long)arg2; \
4977 _argvec[3] = (unsigned long)arg3; \
4978 _argvec[4] = (unsigned long)arg4; \
4979 _argvec[5] = (unsigned long)arg5; \
4980 _argvec[6] = (unsigned long)arg6; \
4981 _argvec[7] = (unsigned long)arg7; \
4982 _argvec[8] = (unsigned long)arg8; \
4983 _argvec[9] = (unsigned long)arg9; \
4984 __asm__ volatile( \
4985 VALGRIND_CFI_PROLOGUE \
4986 "aghi 15,-192\n\t" \
4987 "lg 2, 8(1)\n\t" \
4988 "lg 3,16(1)\n\t" \
4989 "lg 4,24(1)\n\t" \
4990 "lg 5,32(1)\n\t" \
4991 "lg 6,40(1)\n\t" \
4992 "mvc 160(8,15), 48(1)\n\t" \
4993 "mvc 168(8,15), 56(1)\n\t" \
4994 "mvc 176(8,15), 64(1)\n\t" \
4995 "mvc 184(8,15), 72(1)\n\t" \
4996 "lg 1, 0(1)\n\t" \
4997 VALGRIND_CALL_NOREDIR_R1 \
4998 "lgr %0, 2\n\t" \
4999 "aghi 15,192\n\t" \
5000 VALGRIND_CFI_EPILOGUE \
5001 : /*out*/ "=d" (_res) \
5002 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5003 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5004 ); \
5005 lval = (__typeof__(lval)) _res; \
5006 } while (0)
5007
5008#define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5009 arg6, arg7 ,arg8, arg9, arg10) \
5010 do { \
5011 volatile OrigFn _orig = (orig); \
5012 volatile unsigned long _argvec[11]; \
5013 volatile unsigned long _res; \
5014 _argvec[0] = (unsigned long)_orig.nraddr; \
5015 _argvec[1] = (unsigned long)arg1; \
5016 _argvec[2] = (unsigned long)arg2; \
5017 _argvec[3] = (unsigned long)arg3; \
5018 _argvec[4] = (unsigned long)arg4; \
5019 _argvec[5] = (unsigned long)arg5; \
5020 _argvec[6] = (unsigned long)arg6; \
5021 _argvec[7] = (unsigned long)arg7; \
5022 _argvec[8] = (unsigned long)arg8; \
5023 _argvec[9] = (unsigned long)arg9; \
5024 _argvec[10] = (unsigned long)arg10; \
5025 __asm__ volatile( \
5026 VALGRIND_CFI_PROLOGUE \
5027 "aghi 15,-200\n\t" \
5028 "lg 2, 8(1)\n\t" \
5029 "lg 3,16(1)\n\t" \
5030 "lg 4,24(1)\n\t" \
5031 "lg 5,32(1)\n\t" \
5032 "lg 6,40(1)\n\t" \
5033 "mvc 160(8,15), 48(1)\n\t" \
5034 "mvc 168(8,15), 56(1)\n\t" \
5035 "mvc 176(8,15), 64(1)\n\t" \
5036 "mvc 184(8,15), 72(1)\n\t" \
5037 "mvc 192(8,15), 80(1)\n\t" \
5038 "lg 1, 0(1)\n\t" \
5039 VALGRIND_CALL_NOREDIR_R1 \
5040 "lgr %0, 2\n\t" \
5041 "aghi 15,200\n\t" \
5042 VALGRIND_CFI_EPILOGUE \
5043 : /*out*/ "=d" (_res) \
5044 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5045 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5046 ); \
5047 lval = (__typeof__(lval)) _res; \
5048 } while (0)
5049
5050#define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5051 arg6, arg7 ,arg8, arg9, arg10, arg11) \
5052 do { \
5053 volatile OrigFn _orig = (orig); \
5054 volatile unsigned long _argvec[12]; \
5055 volatile unsigned long _res; \
5056 _argvec[0] = (unsigned long)_orig.nraddr; \
5057 _argvec[1] = (unsigned long)arg1; \
5058 _argvec[2] = (unsigned long)arg2; \
5059 _argvec[3] = (unsigned long)arg3; \
5060 _argvec[4] = (unsigned long)arg4; \
5061 _argvec[5] = (unsigned long)arg5; \
5062 _argvec[6] = (unsigned long)arg6; \
5063 _argvec[7] = (unsigned long)arg7; \
5064 _argvec[8] = (unsigned long)arg8; \
5065 _argvec[9] = (unsigned long)arg9; \
5066 _argvec[10] = (unsigned long)arg10; \
5067 _argvec[11] = (unsigned long)arg11; \
5068 __asm__ volatile( \
5069 VALGRIND_CFI_PROLOGUE \
5070 "aghi 15,-208\n\t" \
5071 "lg 2, 8(1)\n\t" \
5072 "lg 3,16(1)\n\t" \
5073 "lg 4,24(1)\n\t" \
5074 "lg 5,32(1)\n\t" \
5075 "lg 6,40(1)\n\t" \
5076 "mvc 160(8,15), 48(1)\n\t" \
5077 "mvc 168(8,15), 56(1)\n\t" \
5078 "mvc 176(8,15), 64(1)\n\t" \
5079 "mvc 184(8,15), 72(1)\n\t" \
5080 "mvc 192(8,15), 80(1)\n\t" \
5081 "mvc 200(8,15), 88(1)\n\t" \
5082 "lg 1, 0(1)\n\t" \
5083 VALGRIND_CALL_NOREDIR_R1 \
5084 "lgr %0, 2\n\t" \
5085 "aghi 15,208\n\t" \
5086 VALGRIND_CFI_EPILOGUE \
5087 : /*out*/ "=d" (_res) \
5088 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5089 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5090 ); \
5091 lval = (__typeof__(lval)) _res; \
5092 } while (0)
5093
5094#define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5095 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
5096 do { \
5097 volatile OrigFn _orig = (orig); \
5098 volatile unsigned long _argvec[13]; \
5099 volatile unsigned long _res; \
5100 _argvec[0] = (unsigned long)_orig.nraddr; \
5101 _argvec[1] = (unsigned long)arg1; \
5102 _argvec[2] = (unsigned long)arg2; \
5103 _argvec[3] = (unsigned long)arg3; \
5104 _argvec[4] = (unsigned long)arg4; \
5105 _argvec[5] = (unsigned long)arg5; \
5106 _argvec[6] = (unsigned long)arg6; \
5107 _argvec[7] = (unsigned long)arg7; \
5108 _argvec[8] = (unsigned long)arg8; \
5109 _argvec[9] = (unsigned long)arg9; \
5110 _argvec[10] = (unsigned long)arg10; \
5111 _argvec[11] = (unsigned long)arg11; \
5112 _argvec[12] = (unsigned long)arg12; \
5113 __asm__ volatile( \
5114 VALGRIND_CFI_PROLOGUE \
5115 "aghi 15,-216\n\t" \
5116 "lg 2, 8(1)\n\t" \
5117 "lg 3,16(1)\n\t" \
5118 "lg 4,24(1)\n\t" \
5119 "lg 5,32(1)\n\t" \
5120 "lg 6,40(1)\n\t" \
5121 "mvc 160(8,15), 48(1)\n\t" \
5122 "mvc 168(8,15), 56(1)\n\t" \
5123 "mvc 176(8,15), 64(1)\n\t" \
5124 "mvc 184(8,15), 72(1)\n\t" \
5125 "mvc 192(8,15), 80(1)\n\t" \
5126 "mvc 200(8,15), 88(1)\n\t" \
5127 "mvc 208(8,15), 96(1)\n\t" \
5128 "lg 1, 0(1)\n\t" \
5129 VALGRIND_CALL_NOREDIR_R1 \
5130 "lgr %0, 2\n\t" \
5131 "aghi 15,216\n\t" \
5132 VALGRIND_CFI_EPILOGUE \
5133 : /*out*/ "=d" (_res) \
5134 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5135 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5136 ); \
5137 lval = (__typeof__(lval)) _res; \
5138 } while (0)
5139
5140
5141#endif /* PLAT_s390x_linux */
5142
5143/* ------------------------- mips32-linux ----------------------- */
5144
5145#if defined(PLAT_mips32_linux)
5146
5147/* These regs are trashed by the hidden call. */
5148#define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5149"$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5150"$25", "$31"
5151
5152/* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5153 long) == 4. */
5154
5155#define CALL_FN_W_v(lval, orig) \
5156 do { \
5157 volatile OrigFn _orig = (orig); \
5158 volatile unsigned long _argvec[1]; \
5159 volatile unsigned long _res; \
5160 _argvec[0] = (unsigned long)_orig.nraddr; \
5161 __asm__ volatile( \
5162 "subu $29, $29, 8 \n\t" \
5163 "sw $28, 0($29) \n\t" \
5164 "sw $31, 4($29) \n\t" \
5165 "subu $29, $29, 16 \n\t" \
5166 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5167 VALGRIND_CALL_NOREDIR_T9 \
5168 "addu $29, $29, 16\n\t" \
5169 "lw $28, 0($29) \n\t" \
5170 "lw $31, 4($29) \n\t" \
5171 "addu $29, $29, 8 \n\t" \
5172 "move %0, $2\n" \
5173 : /*out*/ "=r" (_res) \
5174 : /*in*/ "0" (&_argvec[0]) \
5175 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5176 ); \
5177 lval = (__typeof__(lval)) _res; \
5178 } while (0)
5179
5180#define CALL_FN_W_W(lval, orig, arg1) \
5181 do { \
5182 volatile OrigFn _orig = (orig); \
5183 volatile unsigned long _argvec[2]; \
5184 volatile unsigned long _res; \
5185 _argvec[0] = (unsigned long)_orig.nraddr; \
5186 _argvec[1] = (unsigned long)(arg1); \
5187 __asm__ volatile( \
5188 "subu $29, $29, 8 \n\t" \
5189 "sw $28, 0($29) \n\t" \
5190 "sw $31, 4($29) \n\t" \
5191 "subu $29, $29, 16 \n\t" \
5192 "lw $4, 4(%1) \n\t" /* arg1*/ \
5193 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5194 VALGRIND_CALL_NOREDIR_T9 \
5195 "addu $29, $29, 16 \n\t" \
5196 "lw $28, 0($29) \n\t" \
5197 "lw $31, 4($29) \n\t" \
5198 "addu $29, $29, 8 \n\t" \
5199 "move %0, $2\n" \
5200 : /*out*/ "=r" (_res) \
5201 : /*in*/ "0" (&_argvec[0]) \
5202 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5203 ); \
5204 lval = (__typeof__(lval)) _res; \
5205 } while (0)
5206
5207#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5208 do { \
5209 volatile OrigFn _orig = (orig); \
5210 volatile unsigned long _argvec[3]; \
5211 volatile unsigned long _res; \
5212 _argvec[0] = (unsigned long)_orig.nraddr; \
5213 _argvec[1] = (unsigned long)(arg1); \
5214 _argvec[2] = (unsigned long)(arg2); \
5215 __asm__ volatile( \
5216 "subu $29, $29, 8 \n\t" \
5217 "sw $28, 0($29) \n\t" \
5218 "sw $31, 4($29) \n\t" \
5219 "subu $29, $29, 16 \n\t" \
5220 "lw $4, 4(%1) \n\t" \
5221 "lw $5, 8(%1) \n\t" \
5222 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5223 VALGRIND_CALL_NOREDIR_T9 \
5224 "addu $29, $29, 16 \n\t" \
5225 "lw $28, 0($29) \n\t" \
5226 "lw $31, 4($29) \n\t" \
5227 "addu $29, $29, 8 \n\t" \
5228 "move %0, $2\n" \
5229 : /*out*/ "=r" (_res) \
5230 : /*in*/ "0" (&_argvec[0]) \
5231 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5232 ); \
5233 lval = (__typeof__(lval)) _res; \
5234 } while (0)
5235
5236#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5237 do { \
5238 volatile OrigFn _orig = (orig); \
5239 volatile unsigned long _argvec[4]; \
5240 volatile unsigned long _res; \
5241 _argvec[0] = (unsigned long)_orig.nraddr; \
5242 _argvec[1] = (unsigned long)(arg1); \
5243 _argvec[2] = (unsigned long)(arg2); \
5244 _argvec[3] = (unsigned long)(arg3); \
5245 __asm__ volatile( \
5246 "subu $29, $29, 8 \n\t" \
5247 "sw $28, 0($29) \n\t" \
5248 "sw $31, 4($29) \n\t" \
5249 "subu $29, $29, 16 \n\t" \
5250 "lw $4, 4(%1) \n\t" \
5251 "lw $5, 8(%1) \n\t" \
5252 "lw $6, 12(%1) \n\t" \
5253 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5254 VALGRIND_CALL_NOREDIR_T9 \
5255 "addu $29, $29, 16 \n\t" \
5256 "lw $28, 0($29) \n\t" \
5257 "lw $31, 4($29) \n\t" \
5258 "addu $29, $29, 8 \n\t" \
5259 "move %0, $2\n" \
5260 : /*out*/ "=r" (_res) \
5261 : /*in*/ "0" (&_argvec[0]) \
5262 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5263 ); \
5264 lval = (__typeof__(lval)) _res; \
5265 } while (0)
5266
5267#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5268 do { \
5269 volatile OrigFn _orig = (orig); \
5270 volatile unsigned long _argvec[5]; \
5271 volatile unsigned long _res; \
5272 _argvec[0] = (unsigned long)_orig.nraddr; \
5273 _argvec[1] = (unsigned long)(arg1); \
5274 _argvec[2] = (unsigned long)(arg2); \
5275 _argvec[3] = (unsigned long)(arg3); \
5276 _argvec[4] = (unsigned long)(arg4); \
5277 __asm__ volatile( \
5278 "subu $29, $29, 8 \n\t" \
5279 "sw $28, 0($29) \n\t" \
5280 "sw $31, 4($29) \n\t" \
5281 "subu $29, $29, 16 \n\t" \
5282 "lw $4, 4(%1) \n\t" \
5283 "lw $5, 8(%1) \n\t" \
5284 "lw $6, 12(%1) \n\t" \
5285 "lw $7, 16(%1) \n\t" \
5286 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5287 VALGRIND_CALL_NOREDIR_T9 \
5288 "addu $29, $29, 16 \n\t" \
5289 "lw $28, 0($29) \n\t" \
5290 "lw $31, 4($29) \n\t" \
5291 "addu $29, $29, 8 \n\t" \
5292 "move %0, $2\n" \
5293 : /*out*/ "=r" (_res) \
5294 : /*in*/ "0" (&_argvec[0]) \
5295 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5296 ); \
5297 lval = (__typeof__(lval)) _res; \
5298 } while (0)
5299
5300#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5301 do { \
5302 volatile OrigFn _orig = (orig); \
5303 volatile unsigned long _argvec[6]; \
5304 volatile unsigned long _res; \
5305 _argvec[0] = (unsigned long)_orig.nraddr; \
5306 _argvec[1] = (unsigned long)(arg1); \
5307 _argvec[2] = (unsigned long)(arg2); \
5308 _argvec[3] = (unsigned long)(arg3); \
5309 _argvec[4] = (unsigned long)(arg4); \
5310 _argvec[5] = (unsigned long)(arg5); \
5311 __asm__ volatile( \
5312 "subu $29, $29, 8 \n\t" \
5313 "sw $28, 0($29) \n\t" \
5314 "sw $31, 4($29) \n\t" \
5315 "lw $4, 20(%1) \n\t" \
5316 "subu $29, $29, 24\n\t" \
5317 "sw $4, 16($29) \n\t" \
5318 "lw $4, 4(%1) \n\t" \
5319 "lw $5, 8(%1) \n\t" \
5320 "lw $6, 12(%1) \n\t" \
5321 "lw $7, 16(%1) \n\t" \
5322 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5323 VALGRIND_CALL_NOREDIR_T9 \
5324 "addu $29, $29, 24 \n\t" \
5325 "lw $28, 0($29) \n\t" \
5326 "lw $31, 4($29) \n\t" \
5327 "addu $29, $29, 8 \n\t" \
5328 "move %0, $2\n" \
5329 : /*out*/ "=r" (_res) \
5330 : /*in*/ "0" (&_argvec[0]) \
5331 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5332 ); \
5333 lval = (__typeof__(lval)) _res; \
5334 } while (0)
5335#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5336 do { \
5337 volatile OrigFn _orig = (orig); \
5338 volatile unsigned long _argvec[7]; \
5339 volatile unsigned long _res; \
5340 _argvec[0] = (unsigned long)_orig.nraddr; \
5341 _argvec[1] = (unsigned long)(arg1); \
5342 _argvec[2] = (unsigned long)(arg2); \
5343 _argvec[3] = (unsigned long)(arg3); \
5344 _argvec[4] = (unsigned long)(arg4); \
5345 _argvec[5] = (unsigned long)(arg5); \
5346 _argvec[6] = (unsigned long)(arg6); \
5347 __asm__ volatile( \
5348 "subu $29, $29, 8 \n\t" \
5349 "sw $28, 0($29) \n\t" \
5350 "sw $31, 4($29) \n\t" \
5351 "lw $4, 20(%1) \n\t" \
5352 "subu $29, $29, 32\n\t" \
5353 "sw $4, 16($29) \n\t" \
5354 "lw $4, 24(%1) \n\t" \
5355 "nop\n\t" \
5356 "sw $4, 20($29) \n\t" \
5357 "lw $4, 4(%1) \n\t" \
5358 "lw $5, 8(%1) \n\t" \
5359 "lw $6, 12(%1) \n\t" \
5360 "lw $7, 16(%1) \n\t" \
5361 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5362 VALGRIND_CALL_NOREDIR_T9 \
5363 "addu $29, $29, 32 \n\t" \
5364 "lw $28, 0($29) \n\t" \
5365 "lw $31, 4($29) \n\t" \
5366 "addu $29, $29, 8 \n\t" \
5367 "move %0, $2\n" \
5368 : /*out*/ "=r" (_res) \
5369 : /*in*/ "0" (&_argvec[0]) \
5370 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5371 ); \
5372 lval = (__typeof__(lval)) _res; \
5373 } while (0)
5374
5375#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5376 arg7) \
5377 do { \
5378 volatile OrigFn _orig = (orig); \
5379 volatile unsigned long _argvec[8]; \
5380 volatile unsigned long _res; \
5381 _argvec[0] = (unsigned long)_orig.nraddr; \
5382 _argvec[1] = (unsigned long)(arg1); \
5383 _argvec[2] = (unsigned long)(arg2); \
5384 _argvec[3] = (unsigned long)(arg3); \
5385 _argvec[4] = (unsigned long)(arg4); \
5386 _argvec[5] = (unsigned long)(arg5); \
5387 _argvec[6] = (unsigned long)(arg6); \
5388 _argvec[7] = (unsigned long)(arg7); \
5389 __asm__ volatile( \
5390 "subu $29, $29, 8 \n\t" \
5391 "sw $28, 0($29) \n\t" \
5392 "sw $31, 4($29) \n\t" \
5393 "lw $4, 20(%1) \n\t" \
5394 "subu $29, $29, 32\n\t" \
5395 "sw $4, 16($29) \n\t" \
5396 "lw $4, 24(%1) \n\t" \
5397 "sw $4, 20($29) \n\t" \
5398 "lw $4, 28(%1) \n\t" \
5399 "sw $4, 24($29) \n\t" \
5400 "lw $4, 4(%1) \n\t" \
5401 "lw $5, 8(%1) \n\t" \
5402 "lw $6, 12(%1) \n\t" \
5403 "lw $7, 16(%1) \n\t" \
5404 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5405 VALGRIND_CALL_NOREDIR_T9 \
5406 "addu $29, $29, 32 \n\t" \
5407 "lw $28, 0($29) \n\t" \
5408 "lw $31, 4($29) \n\t" \
5409 "addu $29, $29, 8 \n\t" \
5410 "move %0, $2\n" \
5411 : /*out*/ "=r" (_res) \
5412 : /*in*/ "0" (&_argvec[0]) \
5413 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5414 ); \
5415 lval = (__typeof__(lval)) _res; \
5416 } while (0)
5417
5418#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5419 arg7,arg8) \
5420 do { \
5421 volatile OrigFn _orig = (orig); \
5422 volatile unsigned long _argvec[9]; \
5423 volatile unsigned long _res; \
5424 _argvec[0] = (unsigned long)_orig.nraddr; \
5425 _argvec[1] = (unsigned long)(arg1); \
5426 _argvec[2] = (unsigned long)(arg2); \
5427 _argvec[3] = (unsigned long)(arg3); \
5428 _argvec[4] = (unsigned long)(arg4); \
5429 _argvec[5] = (unsigned long)(arg5); \
5430 _argvec[6] = (unsigned long)(arg6); \
5431 _argvec[7] = (unsigned long)(arg7); \
5432 _argvec[8] = (unsigned long)(arg8); \
5433 __asm__ volatile( \
5434 "subu $29, $29, 8 \n\t" \
5435 "sw $28, 0($29) \n\t" \
5436 "sw $31, 4($29) \n\t" \
5437 "lw $4, 20(%1) \n\t" \
5438 "subu $29, $29, 40\n\t" \
5439 "sw $4, 16($29) \n\t" \
5440 "lw $4, 24(%1) \n\t" \
5441 "sw $4, 20($29) \n\t" \
5442 "lw $4, 28(%1) \n\t" \
5443 "sw $4, 24($29) \n\t" \
5444 "lw $4, 32(%1) \n\t" \
5445 "sw $4, 28($29) \n\t" \
5446 "lw $4, 4(%1) \n\t" \
5447 "lw $5, 8(%1) \n\t" \
5448 "lw $6, 12(%1) \n\t" \
5449 "lw $7, 16(%1) \n\t" \
5450 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5451 VALGRIND_CALL_NOREDIR_T9 \
5452 "addu $29, $29, 40 \n\t" \
5453 "lw $28, 0($29) \n\t" \
5454 "lw $31, 4($29) \n\t" \
5455 "addu $29, $29, 8 \n\t" \
5456 "move %0, $2\n" \
5457 : /*out*/ "=r" (_res) \
5458 : /*in*/ "0" (&_argvec[0]) \
5459 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5460 ); \
5461 lval = (__typeof__(lval)) _res; \
5462 } while (0)
5463
5464#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5465 arg7,arg8,arg9) \
5466 do { \
5467 volatile OrigFn _orig = (orig); \
5468 volatile unsigned long _argvec[10]; \
5469 volatile unsigned long _res; \
5470 _argvec[0] = (unsigned long)_orig.nraddr; \
5471 _argvec[1] = (unsigned long)(arg1); \
5472 _argvec[2] = (unsigned long)(arg2); \
5473 _argvec[3] = (unsigned long)(arg3); \
5474 _argvec[4] = (unsigned long)(arg4); \
5475 _argvec[5] = (unsigned long)(arg5); \
5476 _argvec[6] = (unsigned long)(arg6); \
5477 _argvec[7] = (unsigned long)(arg7); \
5478 _argvec[8] = (unsigned long)(arg8); \
5479 _argvec[9] = (unsigned long)(arg9); \
5480 __asm__ volatile( \
5481 "subu $29, $29, 8 \n\t" \
5482 "sw $28, 0($29) \n\t" \
5483 "sw $31, 4($29) \n\t" \
5484 "lw $4, 20(%1) \n\t" \
5485 "subu $29, $29, 40\n\t" \
5486 "sw $4, 16($29) \n\t" \
5487 "lw $4, 24(%1) \n\t" \
5488 "sw $4, 20($29) \n\t" \
5489 "lw $4, 28(%1) \n\t" \
5490 "sw $4, 24($29) \n\t" \
5491 "lw $4, 32(%1) \n\t" \
5492 "sw $4, 28($29) \n\t" \
5493 "lw $4, 36(%1) \n\t" \
5494 "sw $4, 32($29) \n\t" \
5495 "lw $4, 4(%1) \n\t" \
5496 "lw $5, 8(%1) \n\t" \
5497 "lw $6, 12(%1) \n\t" \
5498 "lw $7, 16(%1) \n\t" \
5499 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5500 VALGRIND_CALL_NOREDIR_T9 \
5501 "addu $29, $29, 40 \n\t" \
5502 "lw $28, 0($29) \n\t" \
5503 "lw $31, 4($29) \n\t" \
5504 "addu $29, $29, 8 \n\t" \
5505 "move %0, $2\n" \
5506 : /*out*/ "=r" (_res) \
5507 : /*in*/ "0" (&_argvec[0]) \
5508 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5509 ); \
5510 lval = (__typeof__(lval)) _res; \
5511 } while (0)
5512
5513#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5514 arg7,arg8,arg9,arg10) \
5515 do { \
5516 volatile OrigFn _orig = (orig); \
5517 volatile unsigned long _argvec[11]; \
5518 volatile unsigned long _res; \
5519 _argvec[0] = (unsigned long)_orig.nraddr; \
5520 _argvec[1] = (unsigned long)(arg1); \
5521 _argvec[2] = (unsigned long)(arg2); \
5522 _argvec[3] = (unsigned long)(arg3); \
5523 _argvec[4] = (unsigned long)(arg4); \
5524 _argvec[5] = (unsigned long)(arg5); \
5525 _argvec[6] = (unsigned long)(arg6); \
5526 _argvec[7] = (unsigned long)(arg7); \
5527 _argvec[8] = (unsigned long)(arg8); \
5528 _argvec[9] = (unsigned long)(arg9); \
5529 _argvec[10] = (unsigned long)(arg10); \
5530 __asm__ volatile( \
5531 "subu $29, $29, 8 \n\t" \
5532 "sw $28, 0($29) \n\t" \
5533 "sw $31, 4($29) \n\t" \
5534 "lw $4, 20(%1) \n\t" \
5535 "subu $29, $29, 48\n\t" \
5536 "sw $4, 16($29) \n\t" \
5537 "lw $4, 24(%1) \n\t" \
5538 "sw $4, 20($29) \n\t" \
5539 "lw $4, 28(%1) \n\t" \
5540 "sw $4, 24($29) \n\t" \
5541 "lw $4, 32(%1) \n\t" \
5542 "sw $4, 28($29) \n\t" \
5543 "lw $4, 36(%1) \n\t" \
5544 "sw $4, 32($29) \n\t" \
5545 "lw $4, 40(%1) \n\t" \
5546 "sw $4, 36($29) \n\t" \
5547 "lw $4, 4(%1) \n\t" \
5548 "lw $5, 8(%1) \n\t" \
5549 "lw $6, 12(%1) \n\t" \
5550 "lw $7, 16(%1) \n\t" \
5551 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5552 VALGRIND_CALL_NOREDIR_T9 \
5553 "addu $29, $29, 48 \n\t" \
5554 "lw $28, 0($29) \n\t" \
5555 "lw $31, 4($29) \n\t" \
5556 "addu $29, $29, 8 \n\t" \
5557 "move %0, $2\n" \
5558 : /*out*/ "=r" (_res) \
5559 : /*in*/ "0" (&_argvec[0]) \
5560 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5561 ); \
5562 lval = (__typeof__(lval)) _res; \
5563 } while (0)
5564
5565#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5566 arg6,arg7,arg8,arg9,arg10, \
5567 arg11) \
5568 do { \
5569 volatile OrigFn _orig = (orig); \
5570 volatile unsigned long _argvec[12]; \
5571 volatile unsigned long _res; \
5572 _argvec[0] = (unsigned long)_orig.nraddr; \
5573 _argvec[1] = (unsigned long)(arg1); \
5574 _argvec[2] = (unsigned long)(arg2); \
5575 _argvec[3] = (unsigned long)(arg3); \
5576 _argvec[4] = (unsigned long)(arg4); \
5577 _argvec[5] = (unsigned long)(arg5); \
5578 _argvec[6] = (unsigned long)(arg6); \
5579 _argvec[7] = (unsigned long)(arg7); \
5580 _argvec[8] = (unsigned long)(arg8); \
5581 _argvec[9] = (unsigned long)(arg9); \
5582 _argvec[10] = (unsigned long)(arg10); \
5583 _argvec[11] = (unsigned long)(arg11); \
5584 __asm__ volatile( \
5585 "subu $29, $29, 8 \n\t" \
5586 "sw $28, 0($29) \n\t" \
5587 "sw $31, 4($29) \n\t" \
5588 "lw $4, 20(%1) \n\t" \
5589 "subu $29, $29, 48\n\t" \
5590 "sw $4, 16($29) \n\t" \
5591 "lw $4, 24(%1) \n\t" \
5592 "sw $4, 20($29) \n\t" \
5593 "lw $4, 28(%1) \n\t" \
5594 "sw $4, 24($29) \n\t" \
5595 "lw $4, 32(%1) \n\t" \
5596 "sw $4, 28($29) \n\t" \
5597 "lw $4, 36(%1) \n\t" \
5598 "sw $4, 32($29) \n\t" \
5599 "lw $4, 40(%1) \n\t" \
5600 "sw $4, 36($29) \n\t" \
5601 "lw $4, 44(%1) \n\t" \
5602 "sw $4, 40($29) \n\t" \
5603 "lw $4, 4(%1) \n\t" \
5604 "lw $5, 8(%1) \n\t" \
5605 "lw $6, 12(%1) \n\t" \
5606 "lw $7, 16(%1) \n\t" \
5607 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5608 VALGRIND_CALL_NOREDIR_T9 \
5609 "addu $29, $29, 48 \n\t" \
5610 "lw $28, 0($29) \n\t" \
5611 "lw $31, 4($29) \n\t" \
5612 "addu $29, $29, 8 \n\t" \
5613 "move %0, $2\n" \
5614 : /*out*/ "=r" (_res) \
5615 : /*in*/ "0" (&_argvec[0]) \
5616 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5617 ); \
5618 lval = (__typeof__(lval)) _res; \
5619 } while (0)
5620
5621#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5622 arg6,arg7,arg8,arg9,arg10, \
5623 arg11,arg12) \
5624 do { \
5625 volatile OrigFn _orig = (orig); \
5626 volatile unsigned long _argvec[13]; \
5627 volatile unsigned long _res; \
5628 _argvec[0] = (unsigned long)_orig.nraddr; \
5629 _argvec[1] = (unsigned long)(arg1); \
5630 _argvec[2] = (unsigned long)(arg2); \
5631 _argvec[3] = (unsigned long)(arg3); \
5632 _argvec[4] = (unsigned long)(arg4); \
5633 _argvec[5] = (unsigned long)(arg5); \
5634 _argvec[6] = (unsigned long)(arg6); \
5635 _argvec[7] = (unsigned long)(arg7); \
5636 _argvec[8] = (unsigned long)(arg8); \
5637 _argvec[9] = (unsigned long)(arg9); \
5638 _argvec[10] = (unsigned long)(arg10); \
5639 _argvec[11] = (unsigned long)(arg11); \
5640 _argvec[12] = (unsigned long)(arg12); \
5641 __asm__ volatile( \
5642 "subu $29, $29, 8 \n\t" \
5643 "sw $28, 0($29) \n\t" \
5644 "sw $31, 4($29) \n\t" \
5645 "lw $4, 20(%1) \n\t" \
5646 "subu $29, $29, 56\n\t" \
5647 "sw $4, 16($29) \n\t" \
5648 "lw $4, 24(%1) \n\t" \
5649 "sw $4, 20($29) \n\t" \
5650 "lw $4, 28(%1) \n\t" \
5651 "sw $4, 24($29) \n\t" \
5652 "lw $4, 32(%1) \n\t" \
5653 "sw $4, 28($29) \n\t" \
5654 "lw $4, 36(%1) \n\t" \
5655 "sw $4, 32($29) \n\t" \
5656 "lw $4, 40(%1) \n\t" \
5657 "sw $4, 36($29) \n\t" \
5658 "lw $4, 44(%1) \n\t" \
5659 "sw $4, 40($29) \n\t" \
5660 "lw $4, 48(%1) \n\t" \
5661 "sw $4, 44($29) \n\t" \
5662 "lw $4, 4(%1) \n\t" \
5663 "lw $5, 8(%1) \n\t" \
5664 "lw $6, 12(%1) \n\t" \
5665 "lw $7, 16(%1) \n\t" \
5666 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5667 VALGRIND_CALL_NOREDIR_T9 \
5668 "addu $29, $29, 56 \n\t" \
5669 "lw $28, 0($29) \n\t" \
5670 "lw $31, 4($29) \n\t" \
5671 "addu $29, $29, 8 \n\t" \
5672 "move %0, $2\n" \
5673 : /*out*/ "=r" (_res) \
5674 : /*in*/ "r" (&_argvec[0]) \
5675 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5676 ); \
5677 lval = (__typeof__(lval)) _res; \
5678 } while (0)
5679
5680#endif /* PLAT_mips32_linux */
5681
5682/* ------------------------- mips64-linux ------------------------- */
5683
5684#if defined(PLAT_mips64_linux)
5685
5686/* These regs are trashed by the hidden call. */
5687#define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5688"$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5689"$25", "$31"
5690
5691/* These CALL_FN_ macros assume that on mips64-linux,
5692 sizeof(long long) == 8. */
5693
5694#define MIPS64_LONG2REG_CAST(x) ((long long)(long)x)
5695
5696#define CALL_FN_W_v(lval, orig) \
5697 do { \
5698 volatile OrigFn _orig = (orig); \
5699 volatile unsigned long long _argvec[1]; \
5700 volatile unsigned long long _res; \
5701 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5702 __asm__ volatile( \
5703 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5704 VALGRIND_CALL_NOREDIR_T9 \
5705 "move %0, $2\n" \
5706 : /*out*/ "=r" (_res) \
5707 : /*in*/ "0" (&_argvec[0]) \
5708 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5709 ); \
5710 lval = (__typeof__(lval)) (long)_res; \
5711 } while (0)
5712
5713#define CALL_FN_W_W(lval, orig, arg1) \
5714 do { \
5715 volatile OrigFn _orig = (orig); \
5716 volatile unsigned long long _argvec[2]; \
5717 volatile unsigned long long _res; \
5718 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5719 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5720 __asm__ volatile( \
5721 "ld $4, 8(%1)\n\t" /* arg1*/ \
5722 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5723 VALGRIND_CALL_NOREDIR_T9 \
5724 "move %0, $2\n" \
5725 : /*out*/ "=r" (_res) \
5726 : /*in*/ "r" (&_argvec[0]) \
5727 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5728 ); \
5729 lval = (__typeof__(lval)) (long)_res; \
5730 } while (0)
5731
5732#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5733 do { \
5734 volatile OrigFn _orig = (orig); \
5735 volatile unsigned long long _argvec[3]; \
5736 volatile unsigned long long _res; \
5737 _argvec[0] = _orig.nraddr; \
5738 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5739 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5740 __asm__ volatile( \
5741 "ld $4, 8(%1)\n\t" \
5742 "ld $5, 16(%1)\n\t" \
5743 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5744 VALGRIND_CALL_NOREDIR_T9 \
5745 "move %0, $2\n" \
5746 : /*out*/ "=r" (_res) \
5747 : /*in*/ "r" (&_argvec[0]) \
5748 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5749 ); \
5750 lval = (__typeof__(lval)) (long)_res; \
5751 } while (0)
5752
5753
5754#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5755 do { \
5756 volatile OrigFn _orig = (orig); \
5757 volatile unsigned long long _argvec[4]; \
5758 volatile unsigned long long _res; \
5759 _argvec[0] = _orig.nraddr; \
5760 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5761 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5762 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5763 __asm__ volatile( \
5764 "ld $4, 8(%1)\n\t" \
5765 "ld $5, 16(%1)\n\t" \
5766 "ld $6, 24(%1)\n\t" \
5767 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5768 VALGRIND_CALL_NOREDIR_T9 \
5769 "move %0, $2\n" \
5770 : /*out*/ "=r" (_res) \
5771 : /*in*/ "r" (&_argvec[0]) \
5772 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5773 ); \
5774 lval = (__typeof__(lval)) (long)_res; \
5775 } while (0)
5776
5777#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5778 do { \
5779 volatile OrigFn _orig = (orig); \
5780 volatile unsigned long long _argvec[5]; \
5781 volatile unsigned long long _res; \
5782 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5783 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5784 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5785 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5786 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5787 __asm__ volatile( \
5788 "ld $4, 8(%1)\n\t" \
5789 "ld $5, 16(%1)\n\t" \
5790 "ld $6, 24(%1)\n\t" \
5791 "ld $7, 32(%1)\n\t" \
5792 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5793 VALGRIND_CALL_NOREDIR_T9 \
5794 "move %0, $2\n" \
5795 : /*out*/ "=r" (_res) \
5796 : /*in*/ "r" (&_argvec[0]) \
5797 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5798 ); \
5799 lval = (__typeof__(lval)) (long)_res; \
5800 } while (0)
5801
5802#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5803 do { \
5804 volatile OrigFn _orig = (orig); \
5805 volatile unsigned long long _argvec[6]; \
5806 volatile unsigned long long _res; \
5807 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5808 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5809 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5810 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5811 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5812 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5813 __asm__ volatile( \
5814 "ld $4, 8(%1)\n\t" \
5815 "ld $5, 16(%1)\n\t" \
5816 "ld $6, 24(%1)\n\t" \
5817 "ld $7, 32(%1)\n\t" \
5818 "ld $8, 40(%1)\n\t" \
5819 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5820 VALGRIND_CALL_NOREDIR_T9 \
5821 "move %0, $2\n" \
5822 : /*out*/ "=r" (_res) \
5823 : /*in*/ "r" (&_argvec[0]) \
5824 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5825 ); \
5826 lval = (__typeof__(lval)) (long)_res; \
5827 } while (0)
5828
5829#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5830 do { \
5831 volatile OrigFn _orig = (orig); \
5832 volatile unsigned long long _argvec[7]; \
5833 volatile unsigned long long _res; \
5834 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5835 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5836 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5837 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5838 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5839 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5840 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
5841 __asm__ volatile( \
5842 "ld $4, 8(%1)\n\t" \
5843 "ld $5, 16(%1)\n\t" \
5844 "ld $6, 24(%1)\n\t" \
5845 "ld $7, 32(%1)\n\t" \
5846 "ld $8, 40(%1)\n\t" \
5847 "ld $9, 48(%1)\n\t" \
5848 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5849 VALGRIND_CALL_NOREDIR_T9 \
5850 "move %0, $2\n" \
5851 : /*out*/ "=r" (_res) \
5852 : /*in*/ "r" (&_argvec[0]) \
5853 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5854 ); \
5855 lval = (__typeof__(lval)) (long)_res; \
5856 } while (0)
5857
5858#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5859 arg7) \
5860 do { \
5861 volatile OrigFn _orig = (orig); \
5862 volatile unsigned long long _argvec[8]; \
5863 volatile unsigned long long _res; \
5864 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5865 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5866 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5867 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5868 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5869 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5870 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
5871 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
5872 __asm__ volatile( \
5873 "ld $4, 8(%1)\n\t" \
5874 "ld $5, 16(%1)\n\t" \
5875 "ld $6, 24(%1)\n\t" \
5876 "ld $7, 32(%1)\n\t" \
5877 "ld $8, 40(%1)\n\t" \
5878 "ld $9, 48(%1)\n\t" \
5879 "ld $10, 56(%1)\n\t" \
5880 "ld $25, 0(%1) \n\t" /* target->t9 */ \
5881 VALGRIND_CALL_NOREDIR_T9 \
5882 "move %0, $2\n" \
5883 : /*out*/ "=r" (_res) \
5884 : /*in*/ "r" (&_argvec[0]) \
5885 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5886 ); \
5887 lval = (__typeof__(lval)) (long)_res; \
5888 } while (0)
5889
5890#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5891 arg7,arg8) \
5892 do { \
5893 volatile OrigFn _orig = (orig); \
5894 volatile unsigned long long _argvec[9]; \
5895 volatile unsigned long long _res; \
5896 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5897 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5898 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5899 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5900 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5901 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5902 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
5903 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
5904 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
5905 __asm__ volatile( \
5906 "ld $4, 8(%1)\n\t" \
5907 "ld $5, 16(%1)\n\t" \
5908 "ld $6, 24(%1)\n\t" \
5909 "ld $7, 32(%1)\n\t" \
5910 "ld $8, 40(%1)\n\t" \
5911 "ld $9, 48(%1)\n\t" \
5912 "ld $10, 56(%1)\n\t" \
5913 "ld $11, 64(%1)\n\t" \
5914 "ld $25, 0(%1) \n\t" /* target->t9 */ \
5915 VALGRIND_CALL_NOREDIR_T9 \
5916 "move %0, $2\n" \
5917 : /*out*/ "=r" (_res) \
5918 : /*in*/ "r" (&_argvec[0]) \
5919 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5920 ); \
5921 lval = (__typeof__(lval)) (long)_res; \
5922 } while (0)
5923
5924#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5925 arg7,arg8,arg9) \
5926 do { \
5927 volatile OrigFn _orig = (orig); \
5928 volatile unsigned long long _argvec[10]; \
5929 volatile unsigned long long _res; \
5930 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5931 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5932 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5933 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5934 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5935 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5936 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
5937 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
5938 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
5939 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
5940 __asm__ volatile( \
5941 "dsubu $29, $29, 8\n\t" \
5942 "ld $4, 72(%1)\n\t" \
5943 "sd $4, 0($29)\n\t" \
5944 "ld $4, 8(%1)\n\t" \
5945 "ld $5, 16(%1)\n\t" \
5946 "ld $6, 24(%1)\n\t" \
5947 "ld $7, 32(%1)\n\t" \
5948 "ld $8, 40(%1)\n\t" \
5949 "ld $9, 48(%1)\n\t" \
5950 "ld $10, 56(%1)\n\t" \
5951 "ld $11, 64(%1)\n\t" \
5952 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5953 VALGRIND_CALL_NOREDIR_T9 \
5954 "daddu $29, $29, 8\n\t" \
5955 "move %0, $2\n" \
5956 : /*out*/ "=r" (_res) \
5957 : /*in*/ "r" (&_argvec[0]) \
5958 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5959 ); \
5960 lval = (__typeof__(lval)) (long)_res; \
5961 } while (0)
5962
5963#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5964 arg7,arg8,arg9,arg10) \
5965 do { \
5966 volatile OrigFn _orig = (orig); \
5967 volatile unsigned long long _argvec[11]; \
5968 volatile unsigned long long _res; \
5969 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
5970 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
5971 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
5972 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
5973 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
5974 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
5975 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
5976 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
5977 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
5978 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
5979 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
5980 __asm__ volatile( \
5981 "dsubu $29, $29, 16\n\t" \
5982 "ld $4, 72(%1)\n\t" \
5983 "sd $4, 0($29)\n\t" \
5984 "ld $4, 80(%1)\n\t" \
5985 "sd $4, 8($29)\n\t" \
5986 "ld $4, 8(%1)\n\t" \
5987 "ld $5, 16(%1)\n\t" \
5988 "ld $6, 24(%1)\n\t" \
5989 "ld $7, 32(%1)\n\t" \
5990 "ld $8, 40(%1)\n\t" \
5991 "ld $9, 48(%1)\n\t" \
5992 "ld $10, 56(%1)\n\t" \
5993 "ld $11, 64(%1)\n\t" \
5994 "ld $25, 0(%1)\n\t" /* target->t9 */ \
5995 VALGRIND_CALL_NOREDIR_T9 \
5996 "daddu $29, $29, 16\n\t" \
5997 "move %0, $2\n" \
5998 : /*out*/ "=r" (_res) \
5999 : /*in*/ "r" (&_argvec[0]) \
6000 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6001 ); \
6002 lval = (__typeof__(lval)) (long)_res; \
6003 } while (0)
6004
6005#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6006 arg6,arg7,arg8,arg9,arg10, \
6007 arg11) \
6008 do { \
6009 volatile OrigFn _orig = (orig); \
6010 volatile unsigned long long _argvec[12]; \
6011 volatile unsigned long long _res; \
6012 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6013 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6014 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6015 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6016 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6017 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6018 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6019 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6020 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6021 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6022 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6023 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6024 __asm__ volatile( \
6025 "dsubu $29, $29, 24\n\t" \
6026 "ld $4, 72(%1)\n\t" \
6027 "sd $4, 0($29)\n\t" \
6028 "ld $4, 80(%1)\n\t" \
6029 "sd $4, 8($29)\n\t" \
6030 "ld $4, 88(%1)\n\t" \
6031 "sd $4, 16($29)\n\t" \
6032 "ld $4, 8(%1)\n\t" \
6033 "ld $5, 16(%1)\n\t" \
6034 "ld $6, 24(%1)\n\t" \
6035 "ld $7, 32(%1)\n\t" \
6036 "ld $8, 40(%1)\n\t" \
6037 "ld $9, 48(%1)\n\t" \
6038 "ld $10, 56(%1)\n\t" \
6039 "ld $11, 64(%1)\n\t" \
6040 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6041 VALGRIND_CALL_NOREDIR_T9 \
6042 "daddu $29, $29, 24\n\t" \
6043 "move %0, $2\n" \
6044 : /*out*/ "=r" (_res) \
6045 : /*in*/ "r" (&_argvec[0]) \
6046 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6047 ); \
6048 lval = (__typeof__(lval)) (long)_res; \
6049 } while (0)
6050
6051#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6052 arg6,arg7,arg8,arg9,arg10, \
6053 arg11,arg12) \
6054 do { \
6055 volatile OrigFn _orig = (orig); \
6056 volatile unsigned long long _argvec[13]; \
6057 volatile unsigned long long _res; \
6058 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6059 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6060 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6061 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6062 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6063 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6064 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6065 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6066 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6067 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6068 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6069 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6070 _argvec[12] = MIPS64_LONG2REG_CAST(arg12); \
6071 __asm__ volatile( \
6072 "dsubu $29, $29, 32\n\t" \
6073 "ld $4, 72(%1)\n\t" \
6074 "sd $4, 0($29)\n\t" \
6075 "ld $4, 80(%1)\n\t" \
6076 "sd $4, 8($29)\n\t" \
6077 "ld $4, 88(%1)\n\t" \
6078 "sd $4, 16($29)\n\t" \
6079 "ld $4, 96(%1)\n\t" \
6080 "sd $4, 24($29)\n\t" \
6081 "ld $4, 8(%1)\n\t" \
6082 "ld $5, 16(%1)\n\t" \
6083 "ld $6, 24(%1)\n\t" \
6084 "ld $7, 32(%1)\n\t" \
6085 "ld $8, 40(%1)\n\t" \
6086 "ld $9, 48(%1)\n\t" \
6087 "ld $10, 56(%1)\n\t" \
6088 "ld $11, 64(%1)\n\t" \
6089 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6090 VALGRIND_CALL_NOREDIR_T9 \
6091 "daddu $29, $29, 32\n\t" \
6092 "move %0, $2\n" \
6093 : /*out*/ "=r" (_res) \
6094 : /*in*/ "r" (&_argvec[0]) \
6095 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6096 ); \
6097 lval = (__typeof__(lval)) (long)_res; \
6098 } while (0)
6099
6100#endif /* PLAT_mips64_linux */
6101
6102/* ------------------------------------------------------------------ */
6103/* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
6104/* */
6105/* ------------------------------------------------------------------ */
6106
6107/* Some request codes. There are many more of these, but most are not
6108 exposed to end-user view. These are the public ones, all of the
6109 form 0x1000 + small_number.
6110
6111 Core ones are in the range 0x00000000--0x0000ffff. The non-public
6112 ones start at 0x2000.
6113*/
6114
6115/* These macros are used by tools -- they must be public, but don't
6116 embed them into other programs. */
6117#define VG_USERREQ_TOOL_BASE(a,b) \
6118 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
6119#define VG_IS_TOOL_USERREQ(a, b, v) \
6120 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
6121
6122/* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
6123 This enum comprises an ABI exported by Valgrind to programs
6124 which use client requests. DO NOT CHANGE THE NUMERIC VALUES OF THESE
6125 ENTRIES, NOR DELETE ANY -- add new ones at the end of the most
6126 relevant group. */
6127typedef
6128 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
6129 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
6130
6131 /* These allow any function to be called from the simulated
6132 CPU but run on the real CPU. Nb: the first arg passed to
6133 the function is always the ThreadId of the running
6134 thread! So CLIENT_CALL0 actually requires a 1 arg
6135 function, etc. */
6136 VG_USERREQ__CLIENT_CALL0 = 0x1101,
6137 VG_USERREQ__CLIENT_CALL1 = 0x1102,
6138 VG_USERREQ__CLIENT_CALL2 = 0x1103,
6139 VG_USERREQ__CLIENT_CALL3 = 0x1104,
6140
6141 /* Can be useful in regression testing suites -- eg. can
6142 send Valgrind's output to /dev/null and still count
6143 errors. */
6144 VG_USERREQ__COUNT_ERRORS = 0x1201,
6145
6146 /* Allows the client program and/or gdbserver to execute a monitor
6147 command. */
6148 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
6149
6150 /* These are useful and can be interpreted by any tool that
6151 tracks malloc() et al, by using vg_replace_malloc.c. */
6152 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
6153 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
6154 VG_USERREQ__FREELIKE_BLOCK = 0x1302,
6155 /* Memory pool support. */
6156 VG_USERREQ__CREATE_MEMPOOL = 0x1303,
6157 VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
6158 VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
6159 VG_USERREQ__MEMPOOL_FREE = 0x1306,
6160 VG_USERREQ__MEMPOOL_TRIM = 0x1307,
6161 VG_USERREQ__MOVE_MEMPOOL = 0x1308,
6162 VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
6163 VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
6164
6165 /* Allow printfs to valgrind log. */
6166 /* The first two pass the va_list argument by value, which
6167 assumes it is the same size as or smaller than a UWord,
6168 which generally isn't the case. Hence are deprecated.
6169 The second two pass the vargs by reference and so are
6170 immune to this problem. */
6171 /* both :: char* fmt, va_list vargs (DEPRECATED) */
6172 VG_USERREQ__PRINTF = 0x1401,
6173 VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
6174 /* both :: char* fmt, va_list* vargs */
6175 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
6176 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
6177
6178 /* Stack support. */
6179 VG_USERREQ__STACK_REGISTER = 0x1501,
6180 VG_USERREQ__STACK_DEREGISTER = 0x1502,
6181 VG_USERREQ__STACK_CHANGE = 0x1503,
6182
6183 /* Wine support */
6184 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
6185
6186 /* Querying of debug info. */
6187 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701,
6188
6189 /* Disable/enable error reporting level. Takes a single
6190 Word arg which is the delta to this thread's error
6191 disablement indicator. Hence 1 disables or further
6192 disables errors, and -1 moves back towards enablement.
6193 Other values are not allowed. */
6194 VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801,
6195
6196 /* Some requests used for Valgrind internal, such as
6197 self-test or self-hosting. */
6198 /* Initialise IR injection */
6199 VG_USERREQ__VEX_INIT_FOR_IRI = 0x1901,
6200 /* Used by Inner Valgrind to inform Outer Valgrind where to
6201 find the list of inner guest threads */
6202 VG_USERREQ__INNER_THREADS = 0x1902
6203 } Vg_ClientRequest;
6204
6205#if !defined(__GNUC__)
6206# define __extension__ /* */
6207#endif
6208
6209
6210/* Returns the number of Valgrinds this code is running under. That
6211 is, 0 if running natively, 1 if running under Valgrind, 2 if
6212 running under Valgrind which is running under another Valgrind,
6213 etc. */
6214#define RUNNING_ON_VALGRIND \
6215 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
6216 VG_USERREQ__RUNNING_ON_VALGRIND, \
6217 0, 0, 0, 0, 0) \
6218
6219
6220/* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
6221 _qzz_len - 1]. Useful if you are debugging a JITter or some such,
6222 since it provides a way to make sure valgrind will retranslate the
6223 invalidated area. Returns no value. */
6224#define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
6225 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
6226 _qzz_addr, _qzz_len, 0, 0, 0)
6227
6228#define VALGRIND_INNER_THREADS(_qzz_addr) \
6229 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__INNER_THREADS, \
6230 _qzz_addr, 0, 0, 0, 0)
6231
6232
6233/* These requests are for getting Valgrind itself to print something.
6234 Possibly with a backtrace. This is a really ugly hack. The return value
6235 is the number of characters printed, excluding the "**<pid>** " part at the
6236 start and the backtrace (if present). */
6237
6238#if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6239/* Modern GCC will optimize the static routine out if unused,
6240 and unused attribute will shut down warnings about it. */
6241static int VALGRIND_PRINTF(const char *format, ...)
6242 __attribute__((format(__printf__, 1, 2), __unused__));
6243#endif
6244static int
6245#if defined(_MSC_VER)
6246__inline
6247#endif
6248VALGRIND_PRINTF(const char *format, ...)
6249{
6250#if defined(NVALGRIND)
6251 (void)format;
6252 return 0;
6253#else /* NVALGRIND */
6254#if defined(_MSC_VER) || defined(__MINGW64__)
6255 uintptr_t _qzz_res;
6256#else
6257 unsigned long _qzz_res;
6258#endif
6259 va_list vargs;
6260 va_start(vargs, format);
6261#if defined(_MSC_VER) || defined(__MINGW64__)
6262 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6263 VG_USERREQ__PRINTF_VALIST_BY_REF,
6264 (uintptr_t)format,
6265 (uintptr_t)&vargs,
6266 0, 0, 0);
6267#else
6268 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6269 VG_USERREQ__PRINTF_VALIST_BY_REF,
6270 (unsigned long)format,
6271 (unsigned long)&vargs,
6272 0, 0, 0);
6273#endif
6274 va_end(vargs);
6275 return (int)_qzz_res;
6276#endif /* NVALGRIND */
6277}
6278
6279#if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6280static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6281 __attribute__((format(__printf__, 1, 2), __unused__));
6282#endif
6283static int
6284#if defined(_MSC_VER)
6285__inline
6286#endif
6287VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6288{
6289#if defined(NVALGRIND)
6290 (void)format;
6291 return 0;
6292#else /* NVALGRIND */
6293#if defined(_MSC_VER) || defined(__MINGW64__)
6294 uintptr_t _qzz_res;
6295#else
6296 unsigned long _qzz_res;
6297#endif
6298 va_list vargs;
6299 va_start(vargs, format);
6300#if defined(_MSC_VER) || defined(__MINGW64__)
6301 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6302 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6303 (uintptr_t)format,
6304 (uintptr_t)&vargs,
6305 0, 0, 0);
6306#else
6307 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6308 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6309 (unsigned long)format,
6310 (unsigned long)&vargs,
6311 0, 0, 0);
6312#endif
6313 va_end(vargs);
6314 return (int)_qzz_res;
6315#endif /* NVALGRIND */
6316}
6317
6318
6319/* These requests allow control to move from the simulated CPU to the
6320 real CPU, calling an arbitrary function.
6321
6322 Note that the current ThreadId is inserted as the first argument.
6323 So this call:
6324
6325 VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
6326
6327 requires f to have this signature:
6328
6329 Word f(Word tid, Word arg1, Word arg2)
6330
6331 where "Word" is a word-sized type.
6332
6333 Note that these client requests are not entirely reliable. For example,
6334 if you call a function with them that subsequently calls printf(),
6335 there's a high chance Valgrind will crash. Generally, your prospects of
6336 these working are made higher if the called function does not refer to
6337 any global variables, and does not refer to any libc or other functions
6338 (printf et al). Any kind of entanglement with libc or dynamic linking is
6339 likely to have a bad outcome, for tricky reasons which we've grappled
6340 with a lot in the past.
6341*/
6342#define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
6343 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6344 VG_USERREQ__CLIENT_CALL0, \
6345 _qyy_fn, \
6346 0, 0, 0, 0)
6347
6348#define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
6349 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6350 VG_USERREQ__CLIENT_CALL1, \
6351 _qyy_fn, \
6352 _qyy_arg1, 0, 0, 0)
6353
6354#define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
6355 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6356 VG_USERREQ__CLIENT_CALL2, \
6357 _qyy_fn, \
6358 _qyy_arg1, _qyy_arg2, 0, 0)
6359
6360#define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
6361 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6362 VG_USERREQ__CLIENT_CALL3, \
6363 _qyy_fn, \
6364 _qyy_arg1, _qyy_arg2, \
6365 _qyy_arg3, 0)
6366
6367
6368/* Counts the number of errors that have been recorded by a tool. Nb:
6369 the tool must record the errors with VG_(maybe_record_error)() or
6370 VG_(unique_error)() for them to be counted. */
6371#define VALGRIND_COUNT_ERRORS \
6372 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
6373 0 /* default return */, \
6374 VG_USERREQ__COUNT_ERRORS, \
6375 0, 0, 0, 0, 0)
6376
6377/* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
6378 when heap blocks are allocated in order to give accurate results. This
6379 happens automatically for the standard allocator functions such as
6380 malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
6381 delete[], etc.
6382
6383 But if your program uses a custom allocator, this doesn't automatically
6384 happen, and Valgrind will not do as well. For example, if you allocate
6385 superblocks with mmap() and then allocates chunks of the superblocks, all
6386 Valgrind's observations will be at the mmap() level and it won't know that
6387 the chunks should be considered separate entities. In Memcheck's case,
6388 that means you probably won't get heap block overrun detection (because
6389 there won't be redzones marked as unaddressable) and you definitely won't
6390 get any leak detection.
6391
6392 The following client requests allow a custom allocator to be annotated so
6393 that it can be handled accurately by Valgrind.
6394
6395 VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
6396 by a malloc()-like function. For Memcheck (an illustrative case), this
6397 does two things:
6398
6399 - It records that the block has been allocated. This means any addresses
6400 within the block mentioned in error messages will be
6401 identified as belonging to the block. It also means that if the block
6402 isn't freed it will be detected by the leak checker.
6403
6404 - It marks the block as being addressable and undefined (if 'is_zeroed' is
6405 not set), or addressable and defined (if 'is_zeroed' is set). This
6406 controls how accesses to the block by the program are handled.
6407
6408 'addr' is the start of the usable block (ie. after any
6409 redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
6410 can apply redzones -- these are blocks of padding at the start and end of
6411 each block. Adding redzones is recommended as it makes it much more likely
6412 Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
6413 zeroed (or filled with another predictable value), as is the case for
6414 calloc().
6415
6416 VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
6417 heap block -- that will be used by the client program -- is allocated.
6418 It's best to put it at the outermost level of the allocator if possible;
6419 for example, if you have a function my_alloc() which calls
6420 internal_alloc(), and the client request is put inside internal_alloc(),
6421 stack traces relating to the heap block will contain entries for both
6422 my_alloc() and internal_alloc(), which is probably not what you want.
6423
6424 For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
6425 custom blocks from within a heap block, B, that has been allocated with
6426 malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
6427 -- the custom blocks will take precedence.
6428
6429 VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
6430 Memcheck, it does two things:
6431
6432 - It records that the block has been deallocated. This assumes that the
6433 block was annotated as having been allocated via
6434 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6435
6436 - It marks the block as being unaddressable.
6437
6438 VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
6439 heap block is deallocated.
6440
6441 VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
6442 Memcheck, it does four things:
6443
6444 - It records that the size of a block has been changed. This assumes that
6445 the block was annotated as having been allocated via
6446 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6447
6448 - If the block shrunk, it marks the freed memory as being unaddressable.
6449
6450 - If the block grew, it marks the new area as undefined and defines a red
6451 zone past the end of the new block.
6452
6453 - The V-bits of the overlap between the old and the new block are preserved.
6454
6455 VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
6456 and before deallocation of the old block.
6457
6458 In many cases, these three client requests will not be enough to get your
6459 allocator working well with Memcheck. More specifically, if your allocator
6460 writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
6461 will be necessary to mark the memory as addressable just before the zeroing
6462 occurs, otherwise you'll get a lot of invalid write errors. For example,
6463 you'll need to do this if your allocator recycles freed blocks, but it
6464 zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
6465 Alternatively, if your allocator reuses freed blocks for allocator-internal
6466 data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
6467
6468 Really, what's happening is a blurring of the lines between the client
6469 program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
6470 memory should be considered unaddressable to the client program, but the
6471 allocator knows more than the rest of the client program and so may be able
6472 to safely access it. Extra client requests are necessary for Valgrind to
6473 understand the distinction between the allocator and the rest of the
6474 program.
6475
6476 Ignored if addr == 0.
6477*/
6478#define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
6479 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
6480 addr, sizeB, rzB, is_zeroed, 0)
6481
6482/* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6483 Ignored if addr == 0.
6484*/
6485#define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
6486 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
6487 addr, oldSizeB, newSizeB, rzB, 0)
6488
6489/* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6490 Ignored if addr == 0.
6491*/
6492#define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
6493 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
6494 addr, rzB, 0, 0, 0)
6495
6496/* Create a memory pool. */
6497#define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
6498 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
6499 pool, rzB, is_zeroed, 0, 0)
6500
6501/* Create a memory pool with some flags specifying extended behaviour.
6502 When flags is zero, the behaviour is identical to VALGRIND_CREATE_MEMPOOL.
6503
6504 The flag VALGRIND_MEMPOOL_METAPOOL specifies that the pieces of memory
6505 associated with the pool using VALGRIND_MEMPOOL_ALLOC will be used
6506 by the application as superblocks to dole out MALLOC_LIKE blocks using
6507 VALGRIND_MALLOCLIKE_BLOCK. In other words, a meta pool is a "2 levels"
6508 pool : first level is the blocks described by VALGRIND_MEMPOOL_ALLOC.
6509 The second level blocks are described using VALGRIND_MALLOCLIKE_BLOCK.
6510 Note that the association between the pool and the second level blocks
6511 is implicit : second level blocks will be located inside first level
6512 blocks. It is necessary to use the VALGRIND_MEMPOOL_METAPOOL flag
6513 for such 2 levels pools, as otherwise valgrind will detect overlapping
6514 memory blocks, and will abort execution (e.g. during leak search).
6515
6516 Such a meta pool can also be marked as an 'auto free' pool using the flag
6517 VALGRIND_MEMPOOL_AUTO_FREE, which must be OR-ed together with the
6518 VALGRIND_MEMPOOL_METAPOOL. For an 'auto free' pool, VALGRIND_MEMPOOL_FREE
6519 will automatically free the second level blocks that are contained
6520 inside the first level block freed with VALGRIND_MEMPOOL_FREE.
6521 In other words, calling VALGRIND_MEMPOOL_FREE will cause implicit calls
6522 to VALGRIND_FREELIKE_BLOCK for all the second level blocks included
6523 in the first level block.
6524 Note: it is an error to use the VALGRIND_MEMPOOL_AUTO_FREE flag
6525 without the VALGRIND_MEMPOOL_METAPOOL flag.
6526*/
6527#define VALGRIND_MEMPOOL_AUTO_FREE 1
6528#define VALGRIND_MEMPOOL_METAPOOL 2
6529#define VALGRIND_CREATE_MEMPOOL_EXT(pool, rzB, is_zeroed, flags) \
6530 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
6531 pool, rzB, is_zeroed, flags, 0)
6532
6533/* Destroy a memory pool. */
6534#define VALGRIND_DESTROY_MEMPOOL(pool) \
6535 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
6536 pool, 0, 0, 0, 0)
6537
6538/* Associate a piece of memory with a memory pool. */
6539#define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
6540 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
6541 pool, addr, size, 0, 0)
6542
6543/* Disassociate a piece of memory from a memory pool. */
6544#define VALGRIND_MEMPOOL_FREE(pool, addr) \
6545 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
6546 pool, addr, 0, 0, 0)
6547
6548/* Disassociate any pieces outside a particular range. */
6549#define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
6550 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
6551 pool, addr, size, 0, 0)
6552
6553/* Resize and/or move a piece associated with a memory pool. */
6554#define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
6555 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
6556 poolA, poolB, 0, 0, 0)
6557
6558/* Resize and/or move a piece associated with a memory pool. */
6559#define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
6560 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
6561 pool, addrA, addrB, size, 0)
6562
6563/* Return 1 if a mempool exists, else 0. */
6564#define VALGRIND_MEMPOOL_EXISTS(pool) \
6565 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
6566 VG_USERREQ__MEMPOOL_EXISTS, \
6567 pool, 0, 0, 0, 0)
6568
6569/* Mark a piece of memory as being a stack. Returns a stack id.
6570 start is the lowest addressable stack byte, end is the highest
6571 addressable stack byte. */
6572#define VALGRIND_STACK_REGISTER(start, end) \
6573 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
6574 VG_USERREQ__STACK_REGISTER, \
6575 start, end, 0, 0, 0)
6576
6577/* Unmark the piece of memory associated with a stack id as being a
6578 stack. */
6579#define VALGRIND_STACK_DEREGISTER(id) \
6580 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
6581 id, 0, 0, 0, 0)
6582
6583/* Change the start and end address of the stack id.
6584 start is the new lowest addressable stack byte, end is the new highest
6585 addressable stack byte. */
6586#define VALGRIND_STACK_CHANGE(id, start, end) \
6587 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
6588 id, start, end, 0, 0)
6589
6590/* Load PDB debug info for Wine PE image_map. */
6591#define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
6592 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
6593 fd, ptr, total_size, delta, 0)
6594
6595/* Map a code address to a source file name and line number. buf64
6596 must point to a 64-byte buffer in the caller's address space. The
6597 result will be dumped in there and is guaranteed to be zero
6598 terminated. If no info is found, the first byte is set to zero. */
6599#define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
6600 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
6601 VG_USERREQ__MAP_IP_TO_SRCLOC, \
6602 addr, buf64, 0, 0, 0)
6603
6604/* Disable error reporting for this thread. Behaves in a stack like
6605 way, so you can safely call this multiple times provided that
6606 VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
6607 to re-enable reporting. The first call of this macro disables
6608 reporting. Subsequent calls have no effect except to increase the
6609 number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
6610 reporting. Child threads do not inherit this setting from their
6611 parents -- they are always created with reporting enabled. */
6612#define VALGRIND_DISABLE_ERROR_REPORTING \
6613 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
6614 1, 0, 0, 0, 0)
6615
6616/* Re-enable error reporting, as per comments on
6617 VALGRIND_DISABLE_ERROR_REPORTING. */
6618#define VALGRIND_ENABLE_ERROR_REPORTING \
6619 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
6620 -1, 0, 0, 0, 0)
6621
6622/* Execute a monitor command from the client program.
6623 If a connection is opened with GDB, the output will be sent
6624 according to the output mode set for vgdb.
6625 If no connection is opened, output will go to the log output.
6626 Returns 1 if command not recognised, 0 otherwise. */
6627#define VALGRIND_MONITOR_COMMAND(command) \
6628 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
6629 command, 0, 0, 0, 0)
6630
6631
6632#undef PLAT_x86_darwin
6633#undef PLAT_amd64_darwin
6634#undef PLAT_x86_win32
6635#undef PLAT_amd64_win64
6636#undef PLAT_x86_linux
6637#undef PLAT_amd64_linux
6638#undef PLAT_ppc32_linux
6639#undef PLAT_ppc64be_linux
6640#undef PLAT_ppc64le_linux
6641#undef PLAT_arm_linux
6642#undef PLAT_s390x_linux
6643#undef PLAT_mips32_linux
6644#undef PLAT_mips64_linux
6645#undef PLAT_x86_solaris
6646#undef PLAT_amd64_solaris
6647
6648#endif /* __VALGRIND_H */
6649

source code of gtk/subprojects/glib/glib/valgrind.h