1/* -*- c -*-
2 ----------------------------------------------------------------
3
4 Notice that the following BSD-style license applies to this one
5 file (valgrind.h) only. The rest of Valgrind is licensed under the
6 terms of the GNU General Public License, version 2, unless
7 otherwise indicated. See the COPYING file in the source
8 distribution for details.
9
10 ----------------------------------------------------------------
11
12 This file is part of Valgrind, a dynamic binary instrumentation
13 framework.
14
15 Copyright (C) 2000-2017 Julian Seward. All rights reserved.
16
17 Redistribution and use in source and binary forms, with or without
18 modification, are permitted provided that the following conditions
19 are met:
20
21 1. Redistributions of source code must retain the above copyright
22 notice, this list of conditions and the following disclaimer.
23
24 2. The origin of this software must not be misrepresented; you must
25 not claim that you wrote the original software. If you use this
26 software in a product, an acknowledgment in the product
27 documentation would be appreciated but is not required.
28
29 3. Altered source versions must be plainly marked as such, and must
30 not be misrepresented as being the original software.
31
32 4. The name of the author may not be used to endorse or promote
33 products derived from this software without specific prior written
34 permission.
35
36 THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
37 OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
38 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
39 ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
40 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
41 DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
42 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
43 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
44 WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
45 NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
46 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
47
48 ----------------------------------------------------------------
49
50 Notice that the above BSD-style license applies to this one file
51 (valgrind.h) only. The entire rest of Valgrind is licensed under
52 the terms of the GNU General Public License, version 2. See the
53 COPYING file in the source distribution for details.
54
55 ----------------------------------------------------------------
56*/
57
58
59/* This file is for inclusion into client (your!) code.
60
61 You can use these macros to manipulate and query Valgrind's
62 execution inside your own programs.
63
64 The resulting executables will still run without Valgrind, just a
65 little bit more slowly than they otherwise would, but otherwise
66 unchanged. When not running on valgrind, each client request
67 consumes very few (eg. 7) instructions, so the resulting performance
68 loss is negligible unless you plan to execute client requests
69 millions of times per second. Nevertheless, if that is still a
70 problem, you can compile with the NVALGRIND symbol defined (gcc
71 -DNVALGRIND) so that client requests are not even compiled in. */
72
73#ifndef __VALGRIND_H
74#define __VALGRIND_H
75
76
77/* ------------------------------------------------------------------ */
78/* VERSION NUMBER OF VALGRIND */
79/* ------------------------------------------------------------------ */
80
81/* Specify Valgrind's version number, so that user code can
82 conditionally compile based on our version number. Note that these
83 were introduced at version 3.6 and so do not exist in version 3.5
84 or earlier. The recommended way to use them to check for "version
85 X.Y or later" is (eg)
86
87#if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
88 && (__VALGRIND_MAJOR__ > 3 \
89 || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
90*/
91#define __VALGRIND_MAJOR__ 3
92#define __VALGRIND_MINOR__ 25
93
94
95#include <stdarg.h>
96
97/* Nb: this file might be included in a file compiled with -ansi. So
98 we can't use C++ style "//" comments nor the "asm" keyword (instead
99 use "__asm__"). */
100
101/* Derive some tags indicating what the target platform is. Note
102 that in this file we're using the compiler's CPP symbols for
103 identifying architectures, which are different to the ones we use
104 within the rest of Valgrind. Note, __powerpc__ is active for both
105 32 and 64-bit PPC, whereas __powerpc64__ is only active for the
106 latter (on Linux, that is).
107
108 Misc note: how to find out what's predefined in gcc by default:
109 gcc -Wp,-dM somefile.c
110*/
111#undef PLAT_x86_darwin
112#undef PLAT_amd64_darwin
113#undef PLAT_x86_freebsd
114#undef PLAT_amd64_freebsd
115#undef PLAT_arm64_freebsd
116#undef PLAT_x86_win32
117#undef PLAT_amd64_win64
118#undef PLAT_x86_linux
119#undef PLAT_amd64_linux
120#undef PLAT_ppc32_linux
121#undef PLAT_ppc64be_linux
122#undef PLAT_ppc64le_linux
123#undef PLAT_arm_linux
124#undef PLAT_arm64_linux
125#undef PLAT_s390x_linux
126#undef PLAT_mips32_linux
127#undef PLAT_mips64_linux
128#undef PLAT_nanomips_linux
129#undef PLAT_riscv64_linux
130#undef PLAT_x86_solaris
131#undef PLAT_amd64_solaris
132
133
134#if defined(__APPLE__) && defined(__i386__)
135# define PLAT_x86_darwin 1
136#elif defined(__APPLE__) && defined(__x86_64__)
137# define PLAT_amd64_darwin 1
138#elif defined(__FreeBSD__) && defined(__i386__)
139# define PLAT_x86_freebsd 1
140#elif defined(__FreeBSD__) && defined(__amd64__)
141# define PLAT_amd64_freebsd 1
142#elif defined(__FreeBSD__) && defined(__aarch64__) && !defined(__arm__)
143# define PLAT_arm64_freebsd 1
144#elif (defined(__MINGW32__) && defined(__i386__)) \
145 || defined(__CYGWIN32__) \
146 || (defined(_WIN32) && defined(_M_IX86))
147# define PLAT_x86_win32 1
148#elif (defined(__MINGW32__) && defined(__x86_64__)) \
149 || (defined(_WIN32) && defined(_M_X64))
150/* __MINGW32__ and _WIN32 are defined in 64 bit mode as well. */
151# define PLAT_amd64_win64 1
152#elif defined(__linux__) && defined(__i386__)
153# define PLAT_x86_linux 1
154#elif defined(__linux__) && defined(__x86_64__) && !defined(__ILP32__)
155# define PLAT_amd64_linux 1
156#elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
157# define PLAT_ppc32_linux 1
158#elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF != 2
159/* Big Endian uses ELF version 1 */
160# define PLAT_ppc64be_linux 1
161#elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF == 2
162/* Little Endian uses ELF version 2 */
163# define PLAT_ppc64le_linux 1
164#elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
165# define PLAT_arm_linux 1
166#elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
167# define PLAT_arm64_linux 1
168#elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
169# define PLAT_s390x_linux 1
170#elif defined(__linux__) && defined(__mips__) && (__mips==64)
171# define PLAT_mips64_linux 1
172#elif defined(__linux__) && defined(__mips__) && (__mips==32)
173# define PLAT_mips32_linux 1
174#elif defined(__linux__) && defined(__nanomips__)
175# define PLAT_nanomips_linux 1
176#elif defined(__linux__) && defined(__riscv) && (__riscv_xlen == 64)
177# define PLAT_riscv64_linux 1
178#elif defined(__sun) && defined(__i386__)
179# define PLAT_x86_solaris 1
180#elif defined(__sun) && defined(__x86_64__)
181# define PLAT_amd64_solaris 1
182#else
183/* If we're not compiling for our target platform, don't generate
184 any inline asms. */
185# if !defined(NVALGRIND)
186# define NVALGRIND 1
187# endif
188#endif
189
190
191/* ------------------------------------------------------------------ */
192/* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
193/* in here of use to end-users -- skip to the next section. */
194/* ------------------------------------------------------------------ */
195
196/*
197 * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
198 * request. Accepts both pointers and integers as arguments.
199 *
200 * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
201 * client request that does not return a value.
202
203 * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
204 * client request and whose value equals the client request result. Accepts
205 * both pointers and integers as arguments. Note that such calls are not
206 * necessarily pure functions -- they may have side effects.
207 */
208
209#define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
210 _zzq_request, _zzq_arg1, _zzq_arg2, \
211 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
212 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
213 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
214 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
215
216#define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
217 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
218 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
219 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
220 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
221
222#if defined(NVALGRIND)
223
224/* Define NVALGRIND to completely remove the Valgrind magic sequence
225 from the compiled code (analogous to NDEBUG's effects on
226 assert()) */
227#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
228 _zzq_default, _zzq_request, \
229 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
230 (_zzq_default)
231
232#else /* ! NVALGRIND */
233
234/* The following defines the magic code sequences which the JITter
235 spots and handles magically. Don't look too closely at them as
236 they will rot your brain.
237
238 The assembly code sequences for all architectures is in this one
239 file. This is because this file must be stand-alone, and we don't
240 want to have multiple files.
241
242 For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
243 value gets put in the return slot, so that everything works when
244 this is executed not under Valgrind. Args are passed in a memory
245 block, and so there's no intrinsic limit to the number that could
246 be passed, but it's currently five.
247
248 The macro args are:
249 _zzq_rlval result lvalue
250 _zzq_default default value (result returned when running on real CPU)
251 _zzq_request request code
252 _zzq_arg1..5 request params
253
254 The other two macros are used to support function wrapping, and are
255 a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
256 guest's NRADDR pseudo-register and whatever other information is
257 needed to safely run the call original from the wrapper: on
258 ppc64-linux, the R2 value at the divert point is also needed. This
259 information is abstracted into a user-visible type, OrigFn.
260
261 VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
262 guest, but guarantees that the branch instruction will not be
263 redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
264 branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
265 complete inline asm, since it needs to be combined with more magic
266 inline asm stuff to be useful.
267*/
268
269/* ----------------- x86-{linux,darwin,solaris} ---------------- */
270
271#if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
272 || (defined(PLAT_x86_win32) && defined(__GNUC__)) \
273 || defined(PLAT_x86_solaris) || defined(PLAT_x86_freebsd)
274
275typedef
276 struct {
277 unsigned int nraddr; /* where's the code? */
278 }
279 OrigFn;
280
281#define __SPECIAL_INSTRUCTION_PREAMBLE \
282 "roll $3, %%edi ; roll $13, %%edi\n\t" \
283 "roll $29, %%edi ; roll $19, %%edi\n\t"
284
285#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
286 _zzq_default, _zzq_request, \
287 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
288 __extension__ \
289 ({volatile unsigned int _zzq_args[6]; \
290 volatile unsigned int _zzq_result; \
291 _zzq_args[0] = (unsigned int)(_zzq_request); \
292 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
293 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
294 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
295 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
296 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
297 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
298 /* %EDX = client_request ( %EAX ) */ \
299 "xchgl %%ebx,%%ebx" \
300 : "=d" (_zzq_result) \
301 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
302 : "cc", "memory" \
303 ); \
304 _zzq_result; \
305 })
306
307#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
308 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
309 volatile unsigned int __addr; \
310 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
311 /* %EAX = guest_NRADDR */ \
312 "xchgl %%ecx,%%ecx" \
313 : "=a" (__addr) \
314 : \
315 : "cc", "memory" \
316 ); \
317 _zzq_orig->nraddr = __addr; \
318 }
319
320#define VALGRIND_CALL_NOREDIR_EAX \
321 __SPECIAL_INSTRUCTION_PREAMBLE \
322 /* call-noredir *%EAX */ \
323 "xchgl %%edx,%%edx\n\t"
324
325#define VALGRIND_VEX_INJECT_IR() \
326 do { \
327 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
328 "xchgl %%edi,%%edi\n\t" \
329 : : : "cc", "memory" \
330 ); \
331 } while (0)
332
333#endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__)
334 || PLAT_x86_solaris */
335
336/* ------------------------- x86-Win32 ------------------------- */
337
338#if defined(PLAT_x86_win32) && !defined(__GNUC__)
339
340typedef
341 struct {
342 unsigned int nraddr; /* where's the code? */
343 }
344 OrigFn;
345
346#if defined(_MSC_VER)
347
348#define __SPECIAL_INSTRUCTION_PREAMBLE \
349 __asm rol edi, 3 __asm rol edi, 13 \
350 __asm rol edi, 29 __asm rol edi, 19
351
352#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
353 _zzq_default, _zzq_request, \
354 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
355 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
356 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
357 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
358 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
359
360static __inline uintptr_t
361valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
362 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
363 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
364 uintptr_t _zzq_arg5)
365{
366 volatile uintptr_t _zzq_args[6];
367 volatile unsigned int _zzq_result;
368 _zzq_args[0] = (uintptr_t)(_zzq_request);
369 _zzq_args[1] = (uintptr_t)(_zzq_arg1);
370 _zzq_args[2] = (uintptr_t)(_zzq_arg2);
371 _zzq_args[3] = (uintptr_t)(_zzq_arg3);
372 _zzq_args[4] = (uintptr_t)(_zzq_arg4);
373 _zzq_args[5] = (uintptr_t)(_zzq_arg5);
374 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
375 __SPECIAL_INSTRUCTION_PREAMBLE
376 /* %EDX = client_request ( %EAX ) */
377 __asm xchg ebx,ebx
378 __asm mov _zzq_result, edx
379 }
380 return _zzq_result;
381}
382
383#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
384 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
385 volatile unsigned int __addr; \
386 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
387 /* %EAX = guest_NRADDR */ \
388 __asm xchg ecx,ecx \
389 __asm mov __addr, eax \
390 } \
391 _zzq_orig->nraddr = __addr; \
392 }
393
394#define VALGRIND_CALL_NOREDIR_EAX ERROR
395
396#define VALGRIND_VEX_INJECT_IR() \
397 do { \
398 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
399 __asm xchg edi,edi \
400 } \
401 } while (0)
402
403#else
404#error Unsupported compiler.
405#endif
406
407#endif /* PLAT_x86_win32 */
408
409/* ----------------- amd64-{linux,darwin,solaris} --------------- */
410
411#if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
412 || defined(PLAT_amd64_solaris) \
413 || defined(PLAT_amd64_freebsd) \
414 || (defined(PLAT_amd64_win64) && defined(__GNUC__))
415
416typedef
417 struct {
418 unsigned long int nraddr; /* where's the code? */
419 }
420 OrigFn;
421
422#define __SPECIAL_INSTRUCTION_PREAMBLE \
423 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
424 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
425
426#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
427 _zzq_default, _zzq_request, \
428 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
429 __extension__ \
430 ({ volatile unsigned long int _zzq_args[6]; \
431 volatile unsigned long int _zzq_result; \
432 _zzq_args[0] = (unsigned long int)(_zzq_request); \
433 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
434 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
435 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
436 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
437 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
438 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
439 /* %RDX = client_request ( %RAX ) */ \
440 "xchgq %%rbx,%%rbx" \
441 : "=d" (_zzq_result) \
442 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
443 : "cc", "memory" \
444 ); \
445 _zzq_result; \
446 })
447
448#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
449 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
450 volatile unsigned long int __addr; \
451 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
452 /* %RAX = guest_NRADDR */ \
453 "xchgq %%rcx,%%rcx" \
454 : "=a" (__addr) \
455 : \
456 : "cc", "memory" \
457 ); \
458 _zzq_orig->nraddr = __addr; \
459 }
460
461#define VALGRIND_CALL_NOREDIR_RAX \
462 __SPECIAL_INSTRUCTION_PREAMBLE \
463 /* call-noredir *%RAX */ \
464 "xchgq %%rdx,%%rdx\n\t"
465
466#define VALGRIND_VEX_INJECT_IR() \
467 do { \
468 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
469 "xchgq %%rdi,%%rdi\n\t" \
470 : : : "cc", "memory" \
471 ); \
472 } while (0)
473
474#endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
475
476/* ------------------------- amd64-Win64 ------------------------- */
477
478#if defined(PLAT_amd64_win64) && !defined(__GNUC__)
479
480#error Unsupported compiler.
481
482#endif /* PLAT_amd64_win64 */
483
484/* ------------------------ ppc32-linux ------------------------ */
485
486#if defined(PLAT_ppc32_linux)
487
488typedef
489 struct {
490 unsigned int nraddr; /* where's the code? */
491 }
492 OrigFn;
493
494#define __SPECIAL_INSTRUCTION_PREAMBLE \
495 "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
496 "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
497
498#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
499 _zzq_default, _zzq_request, \
500 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
501 \
502 __extension__ \
503 ({ unsigned int _zzq_args[6]; \
504 unsigned int _zzq_result; \
505 unsigned int* _zzq_ptr; \
506 _zzq_args[0] = (unsigned int)(_zzq_request); \
507 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
508 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
509 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
510 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
511 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
512 _zzq_ptr = _zzq_args; \
513 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
514 "mr 4,%2\n\t" /*ptr*/ \
515 __SPECIAL_INSTRUCTION_PREAMBLE \
516 /* %R3 = client_request ( %R4 ) */ \
517 "or 1,1,1\n\t" \
518 "mr %0,3" /*result*/ \
519 : "=b" (_zzq_result) \
520 : "b" (_zzq_default), "b" (_zzq_ptr) \
521 : "cc", "memory", "r3", "r4"); \
522 _zzq_result; \
523 })
524
525#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
526 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
527 unsigned int __addr; \
528 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
529 /* %R3 = guest_NRADDR */ \
530 "or 2,2,2\n\t" \
531 "mr %0,3" \
532 : "=b" (__addr) \
533 : \
534 : "cc", "memory", "r3" \
535 ); \
536 _zzq_orig->nraddr = __addr; \
537 }
538
539#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
540 __SPECIAL_INSTRUCTION_PREAMBLE \
541 /* branch-and-link-to-noredir *%R11 */ \
542 "or 3,3,3\n\t"
543
544#define VALGRIND_VEX_INJECT_IR() \
545 do { \
546 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
547 "or 5,5,5\n\t" \
548 ); \
549 } while (0)
550
551#endif /* PLAT_ppc32_linux */
552
553/* ------------------------ ppc64-linux ------------------------ */
554
555#if defined(PLAT_ppc64be_linux)
556
557typedef
558 struct {
559 unsigned long int nraddr; /* where's the code? */
560 unsigned long int r2; /* what tocptr do we need? */
561 }
562 OrigFn;
563
564#define __SPECIAL_INSTRUCTION_PREAMBLE \
565 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
566 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
567
568#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
569 _zzq_default, _zzq_request, \
570 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
571 \
572 __extension__ \
573 ({ unsigned long int _zzq_args[6]; \
574 unsigned long int _zzq_result; \
575 unsigned long int* _zzq_ptr; \
576 _zzq_args[0] = (unsigned long int)(_zzq_request); \
577 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
578 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
579 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
580 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
581 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
582 _zzq_ptr = _zzq_args; \
583 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
584 "mr 4,%2\n\t" /*ptr*/ \
585 __SPECIAL_INSTRUCTION_PREAMBLE \
586 /* %R3 = client_request ( %R4 ) */ \
587 "or 1,1,1\n\t" \
588 "mr %0,3" /*result*/ \
589 : "=b" (_zzq_result) \
590 : "b" (_zzq_default), "b" (_zzq_ptr) \
591 : "cc", "memory", "r3", "r4"); \
592 _zzq_result; \
593 })
594
595#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
596 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
597 unsigned long int __addr; \
598 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
599 /* %R3 = guest_NRADDR */ \
600 "or 2,2,2\n\t" \
601 "mr %0,3" \
602 : "=b" (__addr) \
603 : \
604 : "cc", "memory", "r3" \
605 ); \
606 _zzq_orig->nraddr = __addr; \
607 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
608 /* %R3 = guest_NRADDR_GPR2 */ \
609 "or 4,4,4\n\t" \
610 "mr %0,3" \
611 : "=b" (__addr) \
612 : \
613 : "cc", "memory", "r3" \
614 ); \
615 _zzq_orig->r2 = __addr; \
616 }
617
618#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
619 __SPECIAL_INSTRUCTION_PREAMBLE \
620 /* branch-and-link-to-noredir *%R11 */ \
621 "or 3,3,3\n\t"
622
623#define VALGRIND_VEX_INJECT_IR() \
624 do { \
625 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
626 "or 5,5,5\n\t" \
627 ); \
628 } while (0)
629
630#endif /* PLAT_ppc64be_linux */
631
632#if defined(PLAT_ppc64le_linux)
633
634typedef
635 struct {
636 unsigned long int nraddr; /* where's the code? */
637 unsigned long int r2; /* what tocptr do we need? */
638 }
639 OrigFn;
640
641#define __SPECIAL_INSTRUCTION_PREAMBLE \
642 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
643 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
644
645#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
646 _zzq_default, _zzq_request, \
647 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
648 \
649 __extension__ \
650 ({ unsigned long int _zzq_args[6]; \
651 unsigned long int _zzq_result; \
652 unsigned long int* _zzq_ptr; \
653 _zzq_args[0] = (unsigned long int)(_zzq_request); \
654 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
655 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
656 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
657 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
658 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
659 _zzq_ptr = _zzq_args; \
660 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
661 "mr 4,%2\n\t" /*ptr*/ \
662 __SPECIAL_INSTRUCTION_PREAMBLE \
663 /* %R3 = client_request ( %R4 ) */ \
664 "or 1,1,1\n\t" \
665 "mr %0,3" /*result*/ \
666 : "=b" (_zzq_result) \
667 : "b" (_zzq_default), "b" (_zzq_ptr) \
668 : "cc", "memory", "r3", "r4"); \
669 _zzq_result; \
670 })
671
672#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
673 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
674 unsigned long int __addr; \
675 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
676 /* %R3 = guest_NRADDR */ \
677 "or 2,2,2\n\t" \
678 "mr %0,3" \
679 : "=b" (__addr) \
680 : \
681 : "cc", "memory", "r3" \
682 ); \
683 _zzq_orig->nraddr = __addr; \
684 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
685 /* %R3 = guest_NRADDR_GPR2 */ \
686 "or 4,4,4\n\t" \
687 "mr %0,3" \
688 : "=b" (__addr) \
689 : \
690 : "cc", "memory", "r3" \
691 ); \
692 _zzq_orig->r2 = __addr; \
693 }
694
695#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
696 __SPECIAL_INSTRUCTION_PREAMBLE \
697 /* branch-and-link-to-noredir *%R12 */ \
698 "or 3,3,3\n\t"
699
700#define VALGRIND_VEX_INJECT_IR() \
701 do { \
702 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
703 "or 5,5,5\n\t" \
704 ); \
705 } while (0)
706
707#endif /* PLAT_ppc64le_linux */
708
709/* ------------------------- arm-linux ------------------------- */
710
711#if defined(PLAT_arm_linux)
712
713typedef
714 struct {
715 unsigned int nraddr; /* where's the code? */
716 }
717 OrigFn;
718
719#define __SPECIAL_INSTRUCTION_PREAMBLE \
720 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
721 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
722
723#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
724 _zzq_default, _zzq_request, \
725 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
726 \
727 __extension__ \
728 ({volatile unsigned int _zzq_args[6]; \
729 volatile unsigned int _zzq_result; \
730 _zzq_args[0] = (unsigned int)(_zzq_request); \
731 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
732 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
733 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
734 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
735 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
736 __asm__ volatile("mov r3, %1\n\t" /*default*/ \
737 "mov r4, %2\n\t" /*ptr*/ \
738 __SPECIAL_INSTRUCTION_PREAMBLE \
739 /* R3 = client_request ( R4 ) */ \
740 "orr r10, r10, r10\n\t" \
741 "mov %0, r3" /*result*/ \
742 : "=r" (_zzq_result) \
743 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
744 : "cc","memory", "r3", "r4"); \
745 _zzq_result; \
746 })
747
748#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
749 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
750 unsigned int __addr; \
751 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
752 /* R3 = guest_NRADDR */ \
753 "orr r11, r11, r11\n\t" \
754 "mov %0, r3" \
755 : "=r" (__addr) \
756 : \
757 : "cc", "memory", "r3" \
758 ); \
759 _zzq_orig->nraddr = __addr; \
760 }
761
762#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
763 __SPECIAL_INSTRUCTION_PREAMBLE \
764 /* branch-and-link-to-noredir *%R4 */ \
765 "orr r12, r12, r12\n\t"
766
767#define VALGRIND_VEX_INJECT_IR() \
768 do { \
769 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
770 "orr r9, r9, r9\n\t" \
771 : : : "cc", "memory" \
772 ); \
773 } while (0)
774
775#endif /* PLAT_arm_linux */
776
777/* ------------------------ arm64-{linux,freebsd} ------------------------- */
778
779#if defined(PLAT_arm64_linux) || defined(PLAT_arm64_freebsd)
780
781typedef
782 struct {
783 unsigned long int nraddr; /* where's the code? */
784 }
785 OrigFn;
786
787#define __SPECIAL_INSTRUCTION_PREAMBLE \
788 "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
789 "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
790
791#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
792 _zzq_default, _zzq_request, \
793 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
794 \
795 __extension__ \
796 ({volatile unsigned long int _zzq_args[6]; \
797 volatile unsigned long int _zzq_result; \
798 _zzq_args[0] = (unsigned long int)(_zzq_request); \
799 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
800 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
801 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
802 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
803 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
804 __asm__ volatile("mov x3, %1\n\t" /*default*/ \
805 "mov x4, %2\n\t" /*ptr*/ \
806 __SPECIAL_INSTRUCTION_PREAMBLE \
807 /* X3 = client_request ( X4 ) */ \
808 "orr x10, x10, x10\n\t" \
809 "mov %0, x3" /*result*/ \
810 : "=r" (_zzq_result) \
811 : "r" ((unsigned long int)(_zzq_default)), \
812 "r" (&_zzq_args[0]) \
813 : "cc","memory", "x3", "x4"); \
814 _zzq_result; \
815 })
816
817#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
818 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
819 unsigned long int __addr; \
820 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
821 /* X3 = guest_NRADDR */ \
822 "orr x11, x11, x11\n\t" \
823 "mov %0, x3" \
824 : "=r" (__addr) \
825 : \
826 : "cc", "memory", "x3" \
827 ); \
828 _zzq_orig->nraddr = __addr; \
829 }
830
831#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
832 __SPECIAL_INSTRUCTION_PREAMBLE \
833 /* branch-and-link-to-noredir X8 */ \
834 "orr x12, x12, x12\n\t"
835
836#define VALGRIND_VEX_INJECT_IR() \
837 do { \
838 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
839 "orr x9, x9, x9\n\t" \
840 : : : "cc", "memory" \
841 ); \
842 } while (0)
843
844#endif /* PLAT_arm64_linux || PLAT_arm64_freebsd */
845
846/* ------------------------ s390x-linux ------------------------ */
847
848#if defined(PLAT_s390x_linux)
849
850typedef
851 struct {
852 unsigned long int nraddr; /* where's the code? */
853 }
854 OrigFn;
855
856/* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
857 * code. This detection is implemented in platform specific toIR.c
858 * (e.g. VEX/priv/guest_s390_decoder.c).
859 */
860#define __SPECIAL_INSTRUCTION_PREAMBLE \
861 "lr 15,15\n\t" \
862 "lr 1,1\n\t" \
863 "lr 2,2\n\t" \
864 "lr 3,3\n\t"
865
866#define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
867#define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
868#define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
869#define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
870
871#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
872 _zzq_default, _zzq_request, \
873 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
874 __extension__ \
875 ({volatile unsigned long int _zzq_args[6]; \
876 volatile unsigned long int _zzq_result; \
877 _zzq_args[0] = (unsigned long int)(_zzq_request); \
878 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
879 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
880 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
881 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
882 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
883 __asm__ volatile(/* r2 = args */ \
884 "lgr 2,%1\n\t" \
885 /* r3 = default */ \
886 "lgr 3,%2\n\t" \
887 __SPECIAL_INSTRUCTION_PREAMBLE \
888 __CLIENT_REQUEST_CODE \
889 /* results = r3 */ \
890 "lgr %0, 3\n\t" \
891 : "=d" (_zzq_result) \
892 : "a" (&_zzq_args[0]), \
893 "0" ((unsigned long int)_zzq_default) \
894 : "cc", "2", "3", "memory" \
895 ); \
896 _zzq_result; \
897 })
898
899#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
900 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
901 volatile unsigned long int __addr; \
902 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
903 __GET_NR_CONTEXT_CODE \
904 "lgr %0, 3\n\t" \
905 : "=a" (__addr) \
906 : \
907 : "cc", "3", "memory" \
908 ); \
909 _zzq_orig->nraddr = __addr; \
910 }
911
912#define VALGRIND_CALL_NOREDIR_R1 \
913 __SPECIAL_INSTRUCTION_PREAMBLE \
914 __CALL_NO_REDIR_CODE
915
916#define VALGRIND_VEX_INJECT_IR() \
917 do { \
918 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
919 __VEX_INJECT_IR_CODE); \
920 } while (0)
921
922#endif /* PLAT_s390x_linux */
923
924/* ------------------------- mips32-linux ---------------- */
925
926#if defined(PLAT_mips32_linux)
927
928typedef
929 struct {
930 unsigned int nraddr; /* where's the code? */
931 }
932 OrigFn;
933
934/* .word 0x342
935 * .word 0x742
936 * .word 0xC2
937 * .word 0x4C2*/
938#define __SPECIAL_INSTRUCTION_PREAMBLE \
939 "srl $0, $0, 13\n\t" \
940 "srl $0, $0, 29\n\t" \
941 "srl $0, $0, 3\n\t" \
942 "srl $0, $0, 19\n\t"
943
944#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
945 _zzq_default, _zzq_request, \
946 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
947 __extension__ \
948 ({ volatile unsigned int _zzq_args[6]; \
949 volatile unsigned int _zzq_result; \
950 _zzq_args[0] = (unsigned int)(_zzq_request); \
951 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
952 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
953 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
954 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
955 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
956 __asm__ volatile("move $11, %1\n\t" /*default*/ \
957 "move $12, %2\n\t" /*ptr*/ \
958 __SPECIAL_INSTRUCTION_PREAMBLE \
959 /* T3 = client_request ( T4 ) */ \
960 "or $13, $13, $13\n\t" \
961 "move %0, $11\n\t" /*result*/ \
962 : "=r" (_zzq_result) \
963 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
964 : "$11", "$12", "memory"); \
965 _zzq_result; \
966 })
967
968#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
969 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
970 volatile unsigned int __addr; \
971 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
972 /* %t9 = guest_NRADDR */ \
973 "or $14, $14, $14\n\t" \
974 "move %0, $11" /*result*/ \
975 : "=r" (__addr) \
976 : \
977 : "$11" \
978 ); \
979 _zzq_orig->nraddr = __addr; \
980 }
981
982#define VALGRIND_CALL_NOREDIR_T9 \
983 __SPECIAL_INSTRUCTION_PREAMBLE \
984 /* call-noredir *%t9 */ \
985 "or $15, $15, $15\n\t"
986
987#define VALGRIND_VEX_INJECT_IR() \
988 do { \
989 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
990 "or $11, $11, $11\n\t" \
991 ); \
992 } while (0)
993
994
995#endif /* PLAT_mips32_linux */
996
997/* ------------------------- mips64-linux ---------------- */
998
999#if defined(PLAT_mips64_linux)
1000
1001typedef
1002 struct {
1003 unsigned long nraddr; /* where's the code? */
1004 }
1005 OrigFn;
1006
1007/* dsll $0,$0, 3
1008 * dsll $0,$0, 13
1009 * dsll $0,$0, 29
1010 * dsll $0,$0, 19*/
1011#define __SPECIAL_INSTRUCTION_PREAMBLE \
1012 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
1013 "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
1014
1015#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1016 _zzq_default, _zzq_request, \
1017 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1018 __extension__ \
1019 ({ volatile unsigned long int _zzq_args[6]; \
1020 volatile unsigned long int _zzq_result; \
1021 _zzq_args[0] = (unsigned long int)(_zzq_request); \
1022 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
1023 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
1024 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
1025 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
1026 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
1027 __asm__ volatile("move $11, %1\n\t" /*default*/ \
1028 "move $12, %2\n\t" /*ptr*/ \
1029 __SPECIAL_INSTRUCTION_PREAMBLE \
1030 /* $11 = client_request ( $12 ) */ \
1031 "or $13, $13, $13\n\t" \
1032 "move %0, $11\n\t" /*result*/ \
1033 : "=r" (_zzq_result) \
1034 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1035 : "$11", "$12", "memory"); \
1036 _zzq_result; \
1037 })
1038
1039#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1040 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1041 volatile unsigned long int __addr; \
1042 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1043 /* $11 = guest_NRADDR */ \
1044 "or $14, $14, $14\n\t" \
1045 "move %0, $11" /*result*/ \
1046 : "=r" (__addr) \
1047 : \
1048 : "$11"); \
1049 _zzq_orig->nraddr = __addr; \
1050 }
1051
1052#define VALGRIND_CALL_NOREDIR_T9 \
1053 __SPECIAL_INSTRUCTION_PREAMBLE \
1054 /* call-noredir $25 */ \
1055 "or $15, $15, $15\n\t"
1056
1057#define VALGRIND_VEX_INJECT_IR() \
1058 do { \
1059 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1060 "or $11, $11, $11\n\t" \
1061 ); \
1062 } while (0)
1063
1064#endif /* PLAT_mips64_linux */
1065
1066#if defined(PLAT_nanomips_linux)
1067
1068typedef
1069 struct {
1070 unsigned int nraddr; /* where's the code? */
1071 }
1072 OrigFn;
1073/*
1074 8000 c04d srl zero, zero, 13
1075 8000 c05d srl zero, zero, 29
1076 8000 c043 srl zero, zero, 3
1077 8000 c053 srl zero, zero, 19
1078*/
1079
1080#define __SPECIAL_INSTRUCTION_PREAMBLE "srl[32] $zero, $zero, 13 \n\t" \
1081 "srl[32] $zero, $zero, 29 \n\t" \
1082 "srl[32] $zero, $zero, 3 \n\t" \
1083 "srl[32] $zero, $zero, 19 \n\t"
1084
1085#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1086 _zzq_default, _zzq_request, \
1087 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1088 __extension__ \
1089 ({ volatile unsigned int _zzq_args[6]; \
1090 volatile unsigned int _zzq_result; \
1091 _zzq_args[0] = (unsigned int)(_zzq_request); \
1092 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
1093 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
1094 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
1095 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
1096 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
1097 __asm__ volatile("move $a7, %1\n\t" /* default */ \
1098 "move $t0, %2\n\t" /* ptr */ \
1099 __SPECIAL_INSTRUCTION_PREAMBLE \
1100 /* $a7 = client_request( $t0 ) */ \
1101 "or[32] $t0, $t0, $t0\n\t" \
1102 "move %0, $a7\n\t" /* result */ \
1103 : "=r" (_zzq_result) \
1104 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1105 : "$a7", "$t0", "memory"); \
1106 _zzq_result; \
1107 })
1108
1109#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1110 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1111 volatile unsigned long int __addr; \
1112 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1113 /* $a7 = guest_NRADDR */ \
1114 "or[32] $t1, $t1, $t1\n\t" \
1115 "move %0, $a7" /*result*/ \
1116 : "=r" (__addr) \
1117 : \
1118 : "$a7"); \
1119 _zzq_orig->nraddr = __addr; \
1120 }
1121
1122#define VALGRIND_CALL_NOREDIR_T9 \
1123 __SPECIAL_INSTRUCTION_PREAMBLE \
1124 /* call-noredir $25 */ \
1125 "or[32] $t2, $t2, $t2\n\t"
1126
1127#define VALGRIND_VEX_INJECT_IR() \
1128 do { \
1129 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1130 "or[32] $t3, $t3, $t3\n\t" \
1131 ); \
1132 } while (0)
1133
1134#endif
1135
1136/* ----------------------- riscv64-linux ------------------------ */
1137
1138#if defined(PLAT_riscv64_linux)
1139
1140typedef
1141 struct {
1142 unsigned long int nraddr; /* where's the code? */
1143 }
1144 OrigFn;
1145
1146#define __SPECIAL_INSTRUCTION_PREAMBLE \
1147 ".option push\n\t" \
1148 ".option norvc\n\t" \
1149 "srli zero, zero, 3\n\t" \
1150 "srli zero, zero, 13\n\t" \
1151 "srli zero, zero, 51\n\t" \
1152 "srli zero, zero, 61\n\t"
1153
1154#define __SPECIAL_INSTRUCTION_POSTAMBLE \
1155 ".option pop\n\t" \
1156
1157#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1158 _zzq_default, _zzq_request, \
1159 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1160 \
1161 __extension__ \
1162 ({volatile unsigned long int _zzq_args[6]; \
1163 volatile unsigned long int _zzq_result; \
1164 _zzq_args[0] = (unsigned long int)(_zzq_request); \
1165 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
1166 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
1167 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
1168 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
1169 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
1170 __asm__ volatile("mv a3, %1\n\t" /*default*/ \
1171 "mv a4, %2\n\t" /*ptr*/ \
1172 __SPECIAL_INSTRUCTION_PREAMBLE \
1173 /* a3 = client_request ( a4 ) */ \
1174 "or a0, a0, a0\n\t" \
1175 __SPECIAL_INSTRUCTION_POSTAMBLE \
1176 "mv %0, a3" /*result*/ \
1177 : "=r" (_zzq_result) \
1178 : "r" ((unsigned long int)(_zzq_default)), \
1179 "r" (&_zzq_args[0]) \
1180 : "memory", "a3", "a4"); \
1181 _zzq_result; \
1182 })
1183
1184#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1185 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1186 unsigned long int __addr; \
1187 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1188 /* a3 = guest_NRADDR */ \
1189 "or a1, a1, a1\n\t" \
1190 __SPECIAL_INSTRUCTION_POSTAMBLE \
1191 "mv %0, a3" \
1192 : "=r" (__addr) \
1193 : \
1194 : "memory", "a3" \
1195 ); \
1196 _zzq_orig->nraddr = __addr; \
1197 }
1198
1199#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_T0 \
1200 __SPECIAL_INSTRUCTION_PREAMBLE \
1201 /* branch-and-link-to-noredir t0 */ \
1202 "or a2, a2, a2\n\t" \
1203 __SPECIAL_INSTRUCTION_POSTAMBLE
1204
1205#define VALGRIND_VEX_INJECT_IR() \
1206 do { \
1207 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1208 "or a3, a3, a3\n\t" \
1209 __SPECIAL_INSTRUCTION_POSTAMBLE \
1210 : : : "memory" \
1211 ); \
1212 } while (0)
1213
1214#endif /* PLAT_riscv64_linux */
1215
1216/* Insert assembly code for other platforms here... */
1217
1218#endif /* NVALGRIND */
1219
1220
1221/* ------------------------------------------------------------------ */
1222/* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
1223/* ugly. It's the least-worst tradeoff I can think of. */
1224/* ------------------------------------------------------------------ */
1225
1226/* This section defines magic (a.k.a appalling-hack) macros for doing
1227 guaranteed-no-redirection macros, so as to get from function
1228 wrappers to the functions they are wrapping. The whole point is to
1229 construct standard call sequences, but to do the call itself with a
1230 special no-redirect call pseudo-instruction that the JIT
1231 understands and handles specially. This section is long and
1232 repetitious, and I can't see a way to make it shorter.
1233
1234 The naming scheme is as follows:
1235
1236 CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
1237
1238 'W' stands for "word" and 'v' for "void". Hence there are
1239 different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
1240 and for each, the possibility of returning a word-typed result, or
1241 no result.
1242*/
1243
1244/* Use these to write the name of your wrapper. NOTE: duplicates
1245 VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
1246 the default behaviour equivalance class tag "0000" into the name.
1247 See pub_tool_redir.h for details -- normally you don't need to
1248 think about this, though. */
1249
1250/* Use an extra level of macroisation so as to ensure the soname/fnname
1251 args are fully macro-expanded before pasting them together. */
1252#define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
1253
1254#define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
1255 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
1256
1257#define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
1258 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
1259
1260/* Use this macro from within a wrapper function to collect the
1261 context (address and possibly other info) of the original function.
1262 Once you have that you can then use it in one of the CALL_FN_
1263 macros. The type of the argument _lval is OrigFn. */
1264#define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
1265
1266/* Also provide end-user facilities for function replacement, rather
1267 than wrapping. A replacement function differs from a wrapper in
1268 that it has no way to get hold of the original function being
1269 called, and hence no way to call onwards to it. In a replacement
1270 function, VALGRIND_GET_ORIG_FN always returns zero. */
1271
1272#define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1273 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1274
1275#define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1276 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1277
1278/* Derivatives of the main macros below, for calling functions
1279 returning void. */
1280
1281#define CALL_FN_v_v(fnptr) \
1282 do { volatile unsigned long _junk; \
1283 CALL_FN_W_v(_junk,fnptr); } while (0)
1284
1285#define CALL_FN_v_W(fnptr, arg1) \
1286 do { volatile unsigned long _junk; \
1287 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1288
1289#define CALL_FN_v_WW(fnptr, arg1,arg2) \
1290 do { volatile unsigned long _junk; \
1291 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1292
1293#define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1294 do { volatile unsigned long _junk; \
1295 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1296
1297#define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1298 do { volatile unsigned long _junk; \
1299 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1300
1301#define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1302 do { volatile unsigned long _junk; \
1303 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1304
1305#define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1306 do { volatile unsigned long _junk; \
1307 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1308
1309#define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1310 do { volatile unsigned long _junk; \
1311 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1312
1313/* ----------------- x86-{linux,darwin,solaris} ---------------- */
1314
1315#if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
1316 || defined(PLAT_x86_solaris) || defined(PLAT_x86_freebsd)
1317
1318/* These regs are trashed by the hidden call. No need to mention eax
1319 as gcc can already see that, plus causes gcc to bomb. */
1320#define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
1321
1322/* Macros to save and align the stack before making a function
1323 call and restore it afterwards as gcc may not keep the stack
1324 pointer aligned if it doesn't realise calls are being made
1325 to other functions. */
1326
1327#define VALGRIND_ALIGN_STACK \
1328 "movl %%esp,%%edi\n\t" \
1329 "andl $0xfffffff0,%%esp\n\t"
1330#define VALGRIND_RESTORE_STACK \
1331 "movl %%edi,%%esp\n\t"
1332
1333/* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
1334 long) == 4. */
1335
1336#define CALL_FN_W_v(lval, orig) \
1337 do { \
1338 volatile OrigFn _orig = (orig); \
1339 volatile unsigned long _argvec[1]; \
1340 volatile unsigned long _res; \
1341 _argvec[0] = (unsigned long)_orig.nraddr; \
1342 __asm__ volatile( \
1343 VALGRIND_ALIGN_STACK \
1344 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1345 VALGRIND_CALL_NOREDIR_EAX \
1346 VALGRIND_RESTORE_STACK \
1347 : /*out*/ "=a" (_res) \
1348 : /*in*/ "a" (&_argvec[0]) \
1349 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1350 ); \
1351 lval = (__typeof__(lval)) _res; \
1352 } while (0)
1353
1354#define CALL_FN_W_W(lval, orig, arg1) \
1355 do { \
1356 volatile OrigFn _orig = (orig); \
1357 volatile unsigned long _argvec[2]; \
1358 volatile unsigned long _res; \
1359 _argvec[0] = (unsigned long)_orig.nraddr; \
1360 _argvec[1] = (unsigned long)(arg1); \
1361 __asm__ volatile( \
1362 VALGRIND_ALIGN_STACK \
1363 "subl $12, %%esp\n\t" \
1364 "pushl 4(%%eax)\n\t" \
1365 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1366 VALGRIND_CALL_NOREDIR_EAX \
1367 VALGRIND_RESTORE_STACK \
1368 : /*out*/ "=a" (_res) \
1369 : /*in*/ "a" (&_argvec[0]) \
1370 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1371 ); \
1372 lval = (__typeof__(lval)) _res; \
1373 } while (0)
1374
1375#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1376 do { \
1377 volatile OrigFn _orig = (orig); \
1378 volatile unsigned long _argvec[3]; \
1379 volatile unsigned long _res; \
1380 _argvec[0] = (unsigned long)_orig.nraddr; \
1381 _argvec[1] = (unsigned long)(arg1); \
1382 _argvec[2] = (unsigned long)(arg2); \
1383 __asm__ volatile( \
1384 VALGRIND_ALIGN_STACK \
1385 "subl $8, %%esp\n\t" \
1386 "pushl 8(%%eax)\n\t" \
1387 "pushl 4(%%eax)\n\t" \
1388 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1389 VALGRIND_CALL_NOREDIR_EAX \
1390 VALGRIND_RESTORE_STACK \
1391 : /*out*/ "=a" (_res) \
1392 : /*in*/ "a" (&_argvec[0]) \
1393 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1394 ); \
1395 lval = (__typeof__(lval)) _res; \
1396 } while (0)
1397
1398#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1399 do { \
1400 volatile OrigFn _orig = (orig); \
1401 volatile unsigned long _argvec[4]; \
1402 volatile unsigned long _res; \
1403 _argvec[0] = (unsigned long)_orig.nraddr; \
1404 _argvec[1] = (unsigned long)(arg1); \
1405 _argvec[2] = (unsigned long)(arg2); \
1406 _argvec[3] = (unsigned long)(arg3); \
1407 __asm__ volatile( \
1408 VALGRIND_ALIGN_STACK \
1409 "subl $4, %%esp\n\t" \
1410 "pushl 12(%%eax)\n\t" \
1411 "pushl 8(%%eax)\n\t" \
1412 "pushl 4(%%eax)\n\t" \
1413 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1414 VALGRIND_CALL_NOREDIR_EAX \
1415 VALGRIND_RESTORE_STACK \
1416 : /*out*/ "=a" (_res) \
1417 : /*in*/ "a" (&_argvec[0]) \
1418 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1419 ); \
1420 lval = (__typeof__(lval)) _res; \
1421 } while (0)
1422
1423#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1424 do { \
1425 volatile OrigFn _orig = (orig); \
1426 volatile unsigned long _argvec[5]; \
1427 volatile unsigned long _res; \
1428 _argvec[0] = (unsigned long)_orig.nraddr; \
1429 _argvec[1] = (unsigned long)(arg1); \
1430 _argvec[2] = (unsigned long)(arg2); \
1431 _argvec[3] = (unsigned long)(arg3); \
1432 _argvec[4] = (unsigned long)(arg4); \
1433 __asm__ volatile( \
1434 VALGRIND_ALIGN_STACK \
1435 "pushl 16(%%eax)\n\t" \
1436 "pushl 12(%%eax)\n\t" \
1437 "pushl 8(%%eax)\n\t" \
1438 "pushl 4(%%eax)\n\t" \
1439 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1440 VALGRIND_CALL_NOREDIR_EAX \
1441 VALGRIND_RESTORE_STACK \
1442 : /*out*/ "=a" (_res) \
1443 : /*in*/ "a" (&_argvec[0]) \
1444 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1445 ); \
1446 lval = (__typeof__(lval)) _res; \
1447 } while (0)
1448
1449#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1450 do { \
1451 volatile OrigFn _orig = (orig); \
1452 volatile unsigned long _argvec[6]; \
1453 volatile unsigned long _res; \
1454 _argvec[0] = (unsigned long)_orig.nraddr; \
1455 _argvec[1] = (unsigned long)(arg1); \
1456 _argvec[2] = (unsigned long)(arg2); \
1457 _argvec[3] = (unsigned long)(arg3); \
1458 _argvec[4] = (unsigned long)(arg4); \
1459 _argvec[5] = (unsigned long)(arg5); \
1460 __asm__ volatile( \
1461 VALGRIND_ALIGN_STACK \
1462 "subl $12, %%esp\n\t" \
1463 "pushl 20(%%eax)\n\t" \
1464 "pushl 16(%%eax)\n\t" \
1465 "pushl 12(%%eax)\n\t" \
1466 "pushl 8(%%eax)\n\t" \
1467 "pushl 4(%%eax)\n\t" \
1468 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1469 VALGRIND_CALL_NOREDIR_EAX \
1470 VALGRIND_RESTORE_STACK \
1471 : /*out*/ "=a" (_res) \
1472 : /*in*/ "a" (&_argvec[0]) \
1473 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1474 ); \
1475 lval = (__typeof__(lval)) _res; \
1476 } while (0)
1477
1478#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1479 do { \
1480 volatile OrigFn _orig = (orig); \
1481 volatile unsigned long _argvec[7]; \
1482 volatile unsigned long _res; \
1483 _argvec[0] = (unsigned long)_orig.nraddr; \
1484 _argvec[1] = (unsigned long)(arg1); \
1485 _argvec[2] = (unsigned long)(arg2); \
1486 _argvec[3] = (unsigned long)(arg3); \
1487 _argvec[4] = (unsigned long)(arg4); \
1488 _argvec[5] = (unsigned long)(arg5); \
1489 _argvec[6] = (unsigned long)(arg6); \
1490 __asm__ volatile( \
1491 VALGRIND_ALIGN_STACK \
1492 "subl $8, %%esp\n\t" \
1493 "pushl 24(%%eax)\n\t" \
1494 "pushl 20(%%eax)\n\t" \
1495 "pushl 16(%%eax)\n\t" \
1496 "pushl 12(%%eax)\n\t" \
1497 "pushl 8(%%eax)\n\t" \
1498 "pushl 4(%%eax)\n\t" \
1499 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1500 VALGRIND_CALL_NOREDIR_EAX \
1501 VALGRIND_RESTORE_STACK \
1502 : /*out*/ "=a" (_res) \
1503 : /*in*/ "a" (&_argvec[0]) \
1504 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1505 ); \
1506 lval = (__typeof__(lval)) _res; \
1507 } while (0)
1508
1509#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1510 arg7) \
1511 do { \
1512 volatile OrigFn _orig = (orig); \
1513 volatile unsigned long _argvec[8]; \
1514 volatile unsigned long _res; \
1515 _argvec[0] = (unsigned long)_orig.nraddr; \
1516 _argvec[1] = (unsigned long)(arg1); \
1517 _argvec[2] = (unsigned long)(arg2); \
1518 _argvec[3] = (unsigned long)(arg3); \
1519 _argvec[4] = (unsigned long)(arg4); \
1520 _argvec[5] = (unsigned long)(arg5); \
1521 _argvec[6] = (unsigned long)(arg6); \
1522 _argvec[7] = (unsigned long)(arg7); \
1523 __asm__ volatile( \
1524 VALGRIND_ALIGN_STACK \
1525 "subl $4, %%esp\n\t" \
1526 "pushl 28(%%eax)\n\t" \
1527 "pushl 24(%%eax)\n\t" \
1528 "pushl 20(%%eax)\n\t" \
1529 "pushl 16(%%eax)\n\t" \
1530 "pushl 12(%%eax)\n\t" \
1531 "pushl 8(%%eax)\n\t" \
1532 "pushl 4(%%eax)\n\t" \
1533 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1534 VALGRIND_CALL_NOREDIR_EAX \
1535 VALGRIND_RESTORE_STACK \
1536 : /*out*/ "=a" (_res) \
1537 : /*in*/ "a" (&_argvec[0]) \
1538 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1539 ); \
1540 lval = (__typeof__(lval)) _res; \
1541 } while (0)
1542
1543#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1544 arg7,arg8) \
1545 do { \
1546 volatile OrigFn _orig = (orig); \
1547 volatile unsigned long _argvec[9]; \
1548 volatile unsigned long _res; \
1549 _argvec[0] = (unsigned long)_orig.nraddr; \
1550 _argvec[1] = (unsigned long)(arg1); \
1551 _argvec[2] = (unsigned long)(arg2); \
1552 _argvec[3] = (unsigned long)(arg3); \
1553 _argvec[4] = (unsigned long)(arg4); \
1554 _argvec[5] = (unsigned long)(arg5); \
1555 _argvec[6] = (unsigned long)(arg6); \
1556 _argvec[7] = (unsigned long)(arg7); \
1557 _argvec[8] = (unsigned long)(arg8); \
1558 __asm__ volatile( \
1559 VALGRIND_ALIGN_STACK \
1560 "pushl 32(%%eax)\n\t" \
1561 "pushl 28(%%eax)\n\t" \
1562 "pushl 24(%%eax)\n\t" \
1563 "pushl 20(%%eax)\n\t" \
1564 "pushl 16(%%eax)\n\t" \
1565 "pushl 12(%%eax)\n\t" \
1566 "pushl 8(%%eax)\n\t" \
1567 "pushl 4(%%eax)\n\t" \
1568 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1569 VALGRIND_CALL_NOREDIR_EAX \
1570 VALGRIND_RESTORE_STACK \
1571 : /*out*/ "=a" (_res) \
1572 : /*in*/ "a" (&_argvec[0]) \
1573 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1574 ); \
1575 lval = (__typeof__(lval)) _res; \
1576 } while (0)
1577
1578#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1579 arg7,arg8,arg9) \
1580 do { \
1581 volatile OrigFn _orig = (orig); \
1582 volatile unsigned long _argvec[10]; \
1583 volatile unsigned long _res; \
1584 _argvec[0] = (unsigned long)_orig.nraddr; \
1585 _argvec[1] = (unsigned long)(arg1); \
1586 _argvec[2] = (unsigned long)(arg2); \
1587 _argvec[3] = (unsigned long)(arg3); \
1588 _argvec[4] = (unsigned long)(arg4); \
1589 _argvec[5] = (unsigned long)(arg5); \
1590 _argvec[6] = (unsigned long)(arg6); \
1591 _argvec[7] = (unsigned long)(arg7); \
1592 _argvec[8] = (unsigned long)(arg8); \
1593 _argvec[9] = (unsigned long)(arg9); \
1594 __asm__ volatile( \
1595 VALGRIND_ALIGN_STACK \
1596 "subl $12, %%esp\n\t" \
1597 "pushl 36(%%eax)\n\t" \
1598 "pushl 32(%%eax)\n\t" \
1599 "pushl 28(%%eax)\n\t" \
1600 "pushl 24(%%eax)\n\t" \
1601 "pushl 20(%%eax)\n\t" \
1602 "pushl 16(%%eax)\n\t" \
1603 "pushl 12(%%eax)\n\t" \
1604 "pushl 8(%%eax)\n\t" \
1605 "pushl 4(%%eax)\n\t" \
1606 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1607 VALGRIND_CALL_NOREDIR_EAX \
1608 VALGRIND_RESTORE_STACK \
1609 : /*out*/ "=a" (_res) \
1610 : /*in*/ "a" (&_argvec[0]) \
1611 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1612 ); \
1613 lval = (__typeof__(lval)) _res; \
1614 } while (0)
1615
1616#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1617 arg7,arg8,arg9,arg10) \
1618 do { \
1619 volatile OrigFn _orig = (orig); \
1620 volatile unsigned long _argvec[11]; \
1621 volatile unsigned long _res; \
1622 _argvec[0] = (unsigned long)_orig.nraddr; \
1623 _argvec[1] = (unsigned long)(arg1); \
1624 _argvec[2] = (unsigned long)(arg2); \
1625 _argvec[3] = (unsigned long)(arg3); \
1626 _argvec[4] = (unsigned long)(arg4); \
1627 _argvec[5] = (unsigned long)(arg5); \
1628 _argvec[6] = (unsigned long)(arg6); \
1629 _argvec[7] = (unsigned long)(arg7); \
1630 _argvec[8] = (unsigned long)(arg8); \
1631 _argvec[9] = (unsigned long)(arg9); \
1632 _argvec[10] = (unsigned long)(arg10); \
1633 __asm__ volatile( \
1634 VALGRIND_ALIGN_STACK \
1635 "subl $8, %%esp\n\t" \
1636 "pushl 40(%%eax)\n\t" \
1637 "pushl 36(%%eax)\n\t" \
1638 "pushl 32(%%eax)\n\t" \
1639 "pushl 28(%%eax)\n\t" \
1640 "pushl 24(%%eax)\n\t" \
1641 "pushl 20(%%eax)\n\t" \
1642 "pushl 16(%%eax)\n\t" \
1643 "pushl 12(%%eax)\n\t" \
1644 "pushl 8(%%eax)\n\t" \
1645 "pushl 4(%%eax)\n\t" \
1646 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1647 VALGRIND_CALL_NOREDIR_EAX \
1648 VALGRIND_RESTORE_STACK \
1649 : /*out*/ "=a" (_res) \
1650 : /*in*/ "a" (&_argvec[0]) \
1651 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1652 ); \
1653 lval = (__typeof__(lval)) _res; \
1654 } while (0)
1655
1656#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1657 arg6,arg7,arg8,arg9,arg10, \
1658 arg11) \
1659 do { \
1660 volatile OrigFn _orig = (orig); \
1661 volatile unsigned long _argvec[12]; \
1662 volatile unsigned long _res; \
1663 _argvec[0] = (unsigned long)_orig.nraddr; \
1664 _argvec[1] = (unsigned long)(arg1); \
1665 _argvec[2] = (unsigned long)(arg2); \
1666 _argvec[3] = (unsigned long)(arg3); \
1667 _argvec[4] = (unsigned long)(arg4); \
1668 _argvec[5] = (unsigned long)(arg5); \
1669 _argvec[6] = (unsigned long)(arg6); \
1670 _argvec[7] = (unsigned long)(arg7); \
1671 _argvec[8] = (unsigned long)(arg8); \
1672 _argvec[9] = (unsigned long)(arg9); \
1673 _argvec[10] = (unsigned long)(arg10); \
1674 _argvec[11] = (unsigned long)(arg11); \
1675 __asm__ volatile( \
1676 VALGRIND_ALIGN_STACK \
1677 "subl $4, %%esp\n\t" \
1678 "pushl 44(%%eax)\n\t" \
1679 "pushl 40(%%eax)\n\t" \
1680 "pushl 36(%%eax)\n\t" \
1681 "pushl 32(%%eax)\n\t" \
1682 "pushl 28(%%eax)\n\t" \
1683 "pushl 24(%%eax)\n\t" \
1684 "pushl 20(%%eax)\n\t" \
1685 "pushl 16(%%eax)\n\t" \
1686 "pushl 12(%%eax)\n\t" \
1687 "pushl 8(%%eax)\n\t" \
1688 "pushl 4(%%eax)\n\t" \
1689 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1690 VALGRIND_CALL_NOREDIR_EAX \
1691 VALGRIND_RESTORE_STACK \
1692 : /*out*/ "=a" (_res) \
1693 : /*in*/ "a" (&_argvec[0]) \
1694 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1695 ); \
1696 lval = (__typeof__(lval)) _res; \
1697 } while (0)
1698
1699#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1700 arg6,arg7,arg8,arg9,arg10, \
1701 arg11,arg12) \
1702 do { \
1703 volatile OrigFn _orig = (orig); \
1704 volatile unsigned long _argvec[13]; \
1705 volatile unsigned long _res; \
1706 _argvec[0] = (unsigned long)_orig.nraddr; \
1707 _argvec[1] = (unsigned long)(arg1); \
1708 _argvec[2] = (unsigned long)(arg2); \
1709 _argvec[3] = (unsigned long)(arg3); \
1710 _argvec[4] = (unsigned long)(arg4); \
1711 _argvec[5] = (unsigned long)(arg5); \
1712 _argvec[6] = (unsigned long)(arg6); \
1713 _argvec[7] = (unsigned long)(arg7); \
1714 _argvec[8] = (unsigned long)(arg8); \
1715 _argvec[9] = (unsigned long)(arg9); \
1716 _argvec[10] = (unsigned long)(arg10); \
1717 _argvec[11] = (unsigned long)(arg11); \
1718 _argvec[12] = (unsigned long)(arg12); \
1719 __asm__ volatile( \
1720 VALGRIND_ALIGN_STACK \
1721 "pushl 48(%%eax)\n\t" \
1722 "pushl 44(%%eax)\n\t" \
1723 "pushl 40(%%eax)\n\t" \
1724 "pushl 36(%%eax)\n\t" \
1725 "pushl 32(%%eax)\n\t" \
1726 "pushl 28(%%eax)\n\t" \
1727 "pushl 24(%%eax)\n\t" \
1728 "pushl 20(%%eax)\n\t" \
1729 "pushl 16(%%eax)\n\t" \
1730 "pushl 12(%%eax)\n\t" \
1731 "pushl 8(%%eax)\n\t" \
1732 "pushl 4(%%eax)\n\t" \
1733 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1734 VALGRIND_CALL_NOREDIR_EAX \
1735 VALGRIND_RESTORE_STACK \
1736 : /*out*/ "=a" (_res) \
1737 : /*in*/ "a" (&_argvec[0]) \
1738 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1739 ); \
1740 lval = (__typeof__(lval)) _res; \
1741 } while (0)
1742
1743#endif /* PLAT_x86_linux || PLAT_x86_darwin || PLAT_x86_solaris */
1744
1745/* ---------------- amd64-{linux,darwin,solaris} --------------- */
1746
1747#if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
1748 || defined(PLAT_amd64_solaris) || defined(PLAT_amd64_freebsd)
1749
1750/* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1751
1752/* These regs are trashed by the hidden call. */
1753#define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1754 "rdi", "r8", "r9", "r10", "r11"
1755
1756/* This is all pretty complex. It's so as to make stack unwinding
1757 work reliably. See bug 243270. The basic problem is the sub and
1758 add of 128 of %rsp in all of the following macros. If gcc believes
1759 the CFA is in %rsp, then unwinding may fail, because what's at the
1760 CFA is not what gcc "expected" when it constructs the CFIs for the
1761 places where the macros are instantiated.
1762
1763 But we can't just add a CFI annotation to increase the CFA offset
1764 by 128, to match the sub of 128 from %rsp, because we don't know
1765 whether gcc has chosen %rsp as the CFA at that point, or whether it
1766 has chosen some other register (eg, %rbp). In the latter case,
1767 adding a CFI annotation to change the CFA offset is simply wrong.
1768
1769 So the solution is to get hold of the CFA using
1770 __builtin_dwarf_cfa(), put it in a known register, and add a
1771 CFI annotation to say what the register is. We choose %rbp for
1772 this (perhaps perversely), because:
1773
1774 (1) %rbp is already subject to unwinding. If a new register was
1775 chosen then the unwinder would have to unwind it in all stack
1776 traces, which is expensive, and
1777
1778 (2) %rbp is already subject to precise exception updates in the
1779 JIT. If a new register was chosen, we'd have to have precise
1780 exceptions for it too, which reduces performance of the
1781 generated code.
1782
1783 However .. one extra complication. We can't just whack the result
1784 of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1785 list of trashed registers at the end of the inline assembly
1786 fragments; gcc won't allow %rbp to appear in that list. Hence
1787 instead we need to stash %rbp in %r15 for the duration of the asm,
1788 and say that %r15 is trashed instead. gcc seems happy to go with
1789 that.
1790
1791 Oh .. and this all needs to be conditionalised so that it is
1792 unchanged from before this commit, when compiled with older gccs
1793 that don't support __builtin_dwarf_cfa. Furthermore, since
1794 this header file is freestanding, it has to be independent of
1795 config.h, and so the following conditionalisation cannot depend on
1796 configure time checks.
1797
1798 Although it's not clear from
1799 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1800 this expression excludes Darwin.
1801 .cfi directives in Darwin assembly appear to be completely
1802 different and I haven't investigated how they work.
1803
1804 For even more entertainment value, note we have to use the
1805 completely undocumented __builtin_dwarf_cfa(), which appears to
1806 really compute the CFA, whereas __builtin_frame_address(0) claims
1807 to but actually doesn't. See
1808 https://bugs.kde.org/show_bug.cgi?id=243270#c47
1809*/
1810#if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1811# define __FRAME_POINTER \
1812 ,"r"(__builtin_dwarf_cfa())
1813# define VALGRIND_CFI_PROLOGUE \
1814 "movq %%rbp, %%r15\n\t" \
1815 "movq %2, %%rbp\n\t" \
1816 ".cfi_remember_state\n\t" \
1817 ".cfi_def_cfa rbp, 0\n\t"
1818# define VALGRIND_CFI_EPILOGUE \
1819 "movq %%r15, %%rbp\n\t" \
1820 ".cfi_restore_state\n\t"
1821#else
1822# define __FRAME_POINTER
1823# define VALGRIND_CFI_PROLOGUE
1824# define VALGRIND_CFI_EPILOGUE
1825#endif
1826
1827/* Macros to save and align the stack before making a function
1828 call and restore it afterwards as gcc may not keep the stack
1829 pointer aligned if it doesn't realise calls are being made
1830 to other functions. */
1831
1832#define VALGRIND_ALIGN_STACK \
1833 "movq %%rsp,%%r14\n\t" \
1834 "andq $0xfffffffffffffff0,%%rsp\n\t"
1835#define VALGRIND_RESTORE_STACK \
1836 "movq %%r14,%%rsp\n\t"
1837
1838/* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1839 long) == 8. */
1840
1841/* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1842 macros. In order not to trash the stack redzone, we need to drop
1843 %rsp by 128 before the hidden call, and restore afterwards. The
1844 nastyness is that it is only by luck that the stack still appears
1845 to be unwindable during the hidden call - since then the behaviour
1846 of any routine using this macro does not match what the CFI data
1847 says. Sigh.
1848
1849 Why is this important? Imagine that a wrapper has a stack
1850 allocated local, and passes to the hidden call, a pointer to it.
1851 Because gcc does not know about the hidden call, it may allocate
1852 that local in the redzone. Unfortunately the hidden call may then
1853 trash it before it comes to use it. So we must step clear of the
1854 redzone, for the duration of the hidden call, to make it safe.
1855
1856 Probably the same problem afflicts the other redzone-style ABIs too
1857 (ppc64-linux); but for those, the stack is
1858 self describing (none of this CFI nonsense) so at least messing
1859 with the stack pointer doesn't give a danger of non-unwindable
1860 stack. */
1861
1862#define CALL_FN_W_v(lval, orig) \
1863 do { \
1864 volatile OrigFn _orig = (orig); \
1865 volatile unsigned long _argvec[1]; \
1866 volatile unsigned long _res; \
1867 _argvec[0] = (unsigned long)_orig.nraddr; \
1868 __asm__ volatile( \
1869 VALGRIND_CFI_PROLOGUE \
1870 VALGRIND_ALIGN_STACK \
1871 "subq $128,%%rsp\n\t" \
1872 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1873 VALGRIND_CALL_NOREDIR_RAX \
1874 VALGRIND_RESTORE_STACK \
1875 VALGRIND_CFI_EPILOGUE \
1876 : /*out*/ "=a" (_res) \
1877 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1878 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1879 ); \
1880 lval = (__typeof__(lval)) _res; \
1881 } while (0)
1882
1883#define CALL_FN_W_W(lval, orig, arg1) \
1884 do { \
1885 volatile OrigFn _orig = (orig); \
1886 volatile unsigned long _argvec[2]; \
1887 volatile unsigned long _res; \
1888 _argvec[0] = (unsigned long)_orig.nraddr; \
1889 _argvec[1] = (unsigned long)(arg1); \
1890 __asm__ volatile( \
1891 VALGRIND_CFI_PROLOGUE \
1892 VALGRIND_ALIGN_STACK \
1893 "subq $128,%%rsp\n\t" \
1894 "movq 8(%%rax), %%rdi\n\t" \
1895 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1896 VALGRIND_CALL_NOREDIR_RAX \
1897 VALGRIND_RESTORE_STACK \
1898 VALGRIND_CFI_EPILOGUE \
1899 : /*out*/ "=a" (_res) \
1900 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1901 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1902 ); \
1903 lval = (__typeof__(lval)) _res; \
1904 } while (0)
1905
1906#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1907 do { \
1908 volatile OrigFn _orig = (orig); \
1909 volatile unsigned long _argvec[3]; \
1910 volatile unsigned long _res; \
1911 _argvec[0] = (unsigned long)_orig.nraddr; \
1912 _argvec[1] = (unsigned long)(arg1); \
1913 _argvec[2] = (unsigned long)(arg2); \
1914 __asm__ volatile( \
1915 VALGRIND_CFI_PROLOGUE \
1916 VALGRIND_ALIGN_STACK \
1917 "subq $128,%%rsp\n\t" \
1918 "movq 16(%%rax), %%rsi\n\t" \
1919 "movq 8(%%rax), %%rdi\n\t" \
1920 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1921 VALGRIND_CALL_NOREDIR_RAX \
1922 VALGRIND_RESTORE_STACK \
1923 VALGRIND_CFI_EPILOGUE \
1924 : /*out*/ "=a" (_res) \
1925 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1926 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1927 ); \
1928 lval = (__typeof__(lval)) _res; \
1929 } while (0)
1930
1931#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1932 do { \
1933 volatile OrigFn _orig = (orig); \
1934 volatile unsigned long _argvec[4]; \
1935 volatile unsigned long _res; \
1936 _argvec[0] = (unsigned long)_orig.nraddr; \
1937 _argvec[1] = (unsigned long)(arg1); \
1938 _argvec[2] = (unsigned long)(arg2); \
1939 _argvec[3] = (unsigned long)(arg3); \
1940 __asm__ volatile( \
1941 VALGRIND_CFI_PROLOGUE \
1942 VALGRIND_ALIGN_STACK \
1943 "subq $128,%%rsp\n\t" \
1944 "movq 24(%%rax), %%rdx\n\t" \
1945 "movq 16(%%rax), %%rsi\n\t" \
1946 "movq 8(%%rax), %%rdi\n\t" \
1947 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1948 VALGRIND_CALL_NOREDIR_RAX \
1949 VALGRIND_RESTORE_STACK \
1950 VALGRIND_CFI_EPILOGUE \
1951 : /*out*/ "=a" (_res) \
1952 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1953 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1954 ); \
1955 lval = (__typeof__(lval)) _res; \
1956 } while (0)
1957
1958#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1959 do { \
1960 volatile OrigFn _orig = (orig); \
1961 volatile unsigned long _argvec[5]; \
1962 volatile unsigned long _res; \
1963 _argvec[0] = (unsigned long)_orig.nraddr; \
1964 _argvec[1] = (unsigned long)(arg1); \
1965 _argvec[2] = (unsigned long)(arg2); \
1966 _argvec[3] = (unsigned long)(arg3); \
1967 _argvec[4] = (unsigned long)(arg4); \
1968 __asm__ volatile( \
1969 VALGRIND_CFI_PROLOGUE \
1970 VALGRIND_ALIGN_STACK \
1971 "subq $128,%%rsp\n\t" \
1972 "movq 32(%%rax), %%rcx\n\t" \
1973 "movq 24(%%rax), %%rdx\n\t" \
1974 "movq 16(%%rax), %%rsi\n\t" \
1975 "movq 8(%%rax), %%rdi\n\t" \
1976 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1977 VALGRIND_CALL_NOREDIR_RAX \
1978 VALGRIND_RESTORE_STACK \
1979 VALGRIND_CFI_EPILOGUE \
1980 : /*out*/ "=a" (_res) \
1981 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1982 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1983 ); \
1984 lval = (__typeof__(lval)) _res; \
1985 } while (0)
1986
1987#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1988 do { \
1989 volatile OrigFn _orig = (orig); \
1990 volatile unsigned long _argvec[6]; \
1991 volatile unsigned long _res; \
1992 _argvec[0] = (unsigned long)_orig.nraddr; \
1993 _argvec[1] = (unsigned long)(arg1); \
1994 _argvec[2] = (unsigned long)(arg2); \
1995 _argvec[3] = (unsigned long)(arg3); \
1996 _argvec[4] = (unsigned long)(arg4); \
1997 _argvec[5] = (unsigned long)(arg5); \
1998 __asm__ volatile( \
1999 VALGRIND_CFI_PROLOGUE \
2000 VALGRIND_ALIGN_STACK \
2001 "subq $128,%%rsp\n\t" \
2002 "movq 40(%%rax), %%r8\n\t" \
2003 "movq 32(%%rax), %%rcx\n\t" \
2004 "movq 24(%%rax), %%rdx\n\t" \
2005 "movq 16(%%rax), %%rsi\n\t" \
2006 "movq 8(%%rax), %%rdi\n\t" \
2007 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2008 VALGRIND_CALL_NOREDIR_RAX \
2009 VALGRIND_RESTORE_STACK \
2010 VALGRIND_CFI_EPILOGUE \
2011 : /*out*/ "=a" (_res) \
2012 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2013 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2014 ); \
2015 lval = (__typeof__(lval)) _res; \
2016 } while (0)
2017
2018#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2019 do { \
2020 volatile OrigFn _orig = (orig); \
2021 volatile unsigned long _argvec[7]; \
2022 volatile unsigned long _res; \
2023 _argvec[0] = (unsigned long)_orig.nraddr; \
2024 _argvec[1] = (unsigned long)(arg1); \
2025 _argvec[2] = (unsigned long)(arg2); \
2026 _argvec[3] = (unsigned long)(arg3); \
2027 _argvec[4] = (unsigned long)(arg4); \
2028 _argvec[5] = (unsigned long)(arg5); \
2029 _argvec[6] = (unsigned long)(arg6); \
2030 __asm__ volatile( \
2031 VALGRIND_CFI_PROLOGUE \
2032 VALGRIND_ALIGN_STACK \
2033 "subq $128,%%rsp\n\t" \
2034 "movq 48(%%rax), %%r9\n\t" \
2035 "movq 40(%%rax), %%r8\n\t" \
2036 "movq 32(%%rax), %%rcx\n\t" \
2037 "movq 24(%%rax), %%rdx\n\t" \
2038 "movq 16(%%rax), %%rsi\n\t" \
2039 "movq 8(%%rax), %%rdi\n\t" \
2040 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2041 VALGRIND_CALL_NOREDIR_RAX \
2042 VALGRIND_RESTORE_STACK \
2043 VALGRIND_CFI_EPILOGUE \
2044 : /*out*/ "=a" (_res) \
2045 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2046 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2047 ); \
2048 lval = (__typeof__(lval)) _res; \
2049 } while (0)
2050
2051#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2052 arg7) \
2053 do { \
2054 volatile OrigFn _orig = (orig); \
2055 volatile unsigned long _argvec[8]; \
2056 volatile unsigned long _res; \
2057 _argvec[0] = (unsigned long)_orig.nraddr; \
2058 _argvec[1] = (unsigned long)(arg1); \
2059 _argvec[2] = (unsigned long)(arg2); \
2060 _argvec[3] = (unsigned long)(arg3); \
2061 _argvec[4] = (unsigned long)(arg4); \
2062 _argvec[5] = (unsigned long)(arg5); \
2063 _argvec[6] = (unsigned long)(arg6); \
2064 _argvec[7] = (unsigned long)(arg7); \
2065 __asm__ volatile( \
2066 VALGRIND_CFI_PROLOGUE \
2067 VALGRIND_ALIGN_STACK \
2068 "subq $136,%%rsp\n\t" \
2069 "pushq 56(%%rax)\n\t" \
2070 "movq 48(%%rax), %%r9\n\t" \
2071 "movq 40(%%rax), %%r8\n\t" \
2072 "movq 32(%%rax), %%rcx\n\t" \
2073 "movq 24(%%rax), %%rdx\n\t" \
2074 "movq 16(%%rax), %%rsi\n\t" \
2075 "movq 8(%%rax), %%rdi\n\t" \
2076 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2077 VALGRIND_CALL_NOREDIR_RAX \
2078 VALGRIND_RESTORE_STACK \
2079 VALGRIND_CFI_EPILOGUE \
2080 : /*out*/ "=a" (_res) \
2081 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2082 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2083 ); \
2084 lval = (__typeof__(lval)) _res; \
2085 } while (0)
2086
2087#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2088 arg7,arg8) \
2089 do { \
2090 volatile OrigFn _orig = (orig); \
2091 volatile unsigned long _argvec[9]; \
2092 volatile unsigned long _res; \
2093 _argvec[0] = (unsigned long)_orig.nraddr; \
2094 _argvec[1] = (unsigned long)(arg1); \
2095 _argvec[2] = (unsigned long)(arg2); \
2096 _argvec[3] = (unsigned long)(arg3); \
2097 _argvec[4] = (unsigned long)(arg4); \
2098 _argvec[5] = (unsigned long)(arg5); \
2099 _argvec[6] = (unsigned long)(arg6); \
2100 _argvec[7] = (unsigned long)(arg7); \
2101 _argvec[8] = (unsigned long)(arg8); \
2102 __asm__ volatile( \
2103 VALGRIND_CFI_PROLOGUE \
2104 VALGRIND_ALIGN_STACK \
2105 "subq $128,%%rsp\n\t" \
2106 "pushq 64(%%rax)\n\t" \
2107 "pushq 56(%%rax)\n\t" \
2108 "movq 48(%%rax), %%r9\n\t" \
2109 "movq 40(%%rax), %%r8\n\t" \
2110 "movq 32(%%rax), %%rcx\n\t" \
2111 "movq 24(%%rax), %%rdx\n\t" \
2112 "movq 16(%%rax), %%rsi\n\t" \
2113 "movq 8(%%rax), %%rdi\n\t" \
2114 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2115 VALGRIND_CALL_NOREDIR_RAX \
2116 VALGRIND_RESTORE_STACK \
2117 VALGRIND_CFI_EPILOGUE \
2118 : /*out*/ "=a" (_res) \
2119 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2120 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2121 ); \
2122 lval = (__typeof__(lval)) _res; \
2123 } while (0)
2124
2125#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2126 arg7,arg8,arg9) \
2127 do { \
2128 volatile OrigFn _orig = (orig); \
2129 volatile unsigned long _argvec[10]; \
2130 volatile unsigned long _res; \
2131 _argvec[0] = (unsigned long)_orig.nraddr; \
2132 _argvec[1] = (unsigned long)(arg1); \
2133 _argvec[2] = (unsigned long)(arg2); \
2134 _argvec[3] = (unsigned long)(arg3); \
2135 _argvec[4] = (unsigned long)(arg4); \
2136 _argvec[5] = (unsigned long)(arg5); \
2137 _argvec[6] = (unsigned long)(arg6); \
2138 _argvec[7] = (unsigned long)(arg7); \
2139 _argvec[8] = (unsigned long)(arg8); \
2140 _argvec[9] = (unsigned long)(arg9); \
2141 __asm__ volatile( \
2142 VALGRIND_CFI_PROLOGUE \
2143 VALGRIND_ALIGN_STACK \
2144 "subq $136,%%rsp\n\t" \
2145 "pushq 72(%%rax)\n\t" \
2146 "pushq 64(%%rax)\n\t" \
2147 "pushq 56(%%rax)\n\t" \
2148 "movq 48(%%rax), %%r9\n\t" \
2149 "movq 40(%%rax), %%r8\n\t" \
2150 "movq 32(%%rax), %%rcx\n\t" \
2151 "movq 24(%%rax), %%rdx\n\t" \
2152 "movq 16(%%rax), %%rsi\n\t" \
2153 "movq 8(%%rax), %%rdi\n\t" \
2154 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2155 VALGRIND_CALL_NOREDIR_RAX \
2156 VALGRIND_RESTORE_STACK \
2157 VALGRIND_CFI_EPILOGUE \
2158 : /*out*/ "=a" (_res) \
2159 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2160 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2161 ); \
2162 lval = (__typeof__(lval)) _res; \
2163 } while (0)
2164
2165#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2166 arg7,arg8,arg9,arg10) \
2167 do { \
2168 volatile OrigFn _orig = (orig); \
2169 volatile unsigned long _argvec[11]; \
2170 volatile unsigned long _res; \
2171 _argvec[0] = (unsigned long)_orig.nraddr; \
2172 _argvec[1] = (unsigned long)(arg1); \
2173 _argvec[2] = (unsigned long)(arg2); \
2174 _argvec[3] = (unsigned long)(arg3); \
2175 _argvec[4] = (unsigned long)(arg4); \
2176 _argvec[5] = (unsigned long)(arg5); \
2177 _argvec[6] = (unsigned long)(arg6); \
2178 _argvec[7] = (unsigned long)(arg7); \
2179 _argvec[8] = (unsigned long)(arg8); \
2180 _argvec[9] = (unsigned long)(arg9); \
2181 _argvec[10] = (unsigned long)(arg10); \
2182 __asm__ volatile( \
2183 VALGRIND_CFI_PROLOGUE \
2184 VALGRIND_ALIGN_STACK \
2185 "subq $128,%%rsp\n\t" \
2186 "pushq 80(%%rax)\n\t" \
2187 "pushq 72(%%rax)\n\t" \
2188 "pushq 64(%%rax)\n\t" \
2189 "pushq 56(%%rax)\n\t" \
2190 "movq 48(%%rax), %%r9\n\t" \
2191 "movq 40(%%rax), %%r8\n\t" \
2192 "movq 32(%%rax), %%rcx\n\t" \
2193 "movq 24(%%rax), %%rdx\n\t" \
2194 "movq 16(%%rax), %%rsi\n\t" \
2195 "movq 8(%%rax), %%rdi\n\t" \
2196 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2197 VALGRIND_CALL_NOREDIR_RAX \
2198 VALGRIND_RESTORE_STACK \
2199 VALGRIND_CFI_EPILOGUE \
2200 : /*out*/ "=a" (_res) \
2201 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2202 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2203 ); \
2204 lval = (__typeof__(lval)) _res; \
2205 } while (0)
2206
2207#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2208 arg7,arg8,arg9,arg10,arg11) \
2209 do { \
2210 volatile OrigFn _orig = (orig); \
2211 volatile unsigned long _argvec[12]; \
2212 volatile unsigned long _res; \
2213 _argvec[0] = (unsigned long)_orig.nraddr; \
2214 _argvec[1] = (unsigned long)(arg1); \
2215 _argvec[2] = (unsigned long)(arg2); \
2216 _argvec[3] = (unsigned long)(arg3); \
2217 _argvec[4] = (unsigned long)(arg4); \
2218 _argvec[5] = (unsigned long)(arg5); \
2219 _argvec[6] = (unsigned long)(arg6); \
2220 _argvec[7] = (unsigned long)(arg7); \
2221 _argvec[8] = (unsigned long)(arg8); \
2222 _argvec[9] = (unsigned long)(arg9); \
2223 _argvec[10] = (unsigned long)(arg10); \
2224 _argvec[11] = (unsigned long)(arg11); \
2225 __asm__ volatile( \
2226 VALGRIND_CFI_PROLOGUE \
2227 VALGRIND_ALIGN_STACK \
2228 "subq $136,%%rsp\n\t" \
2229 "pushq 88(%%rax)\n\t" \
2230 "pushq 80(%%rax)\n\t" \
2231 "pushq 72(%%rax)\n\t" \
2232 "pushq 64(%%rax)\n\t" \
2233 "pushq 56(%%rax)\n\t" \
2234 "movq 48(%%rax), %%r9\n\t" \
2235 "movq 40(%%rax), %%r8\n\t" \
2236 "movq 32(%%rax), %%rcx\n\t" \
2237 "movq 24(%%rax), %%rdx\n\t" \
2238 "movq 16(%%rax), %%rsi\n\t" \
2239 "movq 8(%%rax), %%rdi\n\t" \
2240 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2241 VALGRIND_CALL_NOREDIR_RAX \
2242 VALGRIND_RESTORE_STACK \
2243 VALGRIND_CFI_EPILOGUE \
2244 : /*out*/ "=a" (_res) \
2245 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2246 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2247 ); \
2248 lval = (__typeof__(lval)) _res; \
2249 } while (0)
2250
2251#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2252 arg7,arg8,arg9,arg10,arg11,arg12) \
2253 do { \
2254 volatile OrigFn _orig = (orig); \
2255 volatile unsigned long _argvec[13]; \
2256 volatile unsigned long _res; \
2257 _argvec[0] = (unsigned long)_orig.nraddr; \
2258 _argvec[1] = (unsigned long)(arg1); \
2259 _argvec[2] = (unsigned long)(arg2); \
2260 _argvec[3] = (unsigned long)(arg3); \
2261 _argvec[4] = (unsigned long)(arg4); \
2262 _argvec[5] = (unsigned long)(arg5); \
2263 _argvec[6] = (unsigned long)(arg6); \
2264 _argvec[7] = (unsigned long)(arg7); \
2265 _argvec[8] = (unsigned long)(arg8); \
2266 _argvec[9] = (unsigned long)(arg9); \
2267 _argvec[10] = (unsigned long)(arg10); \
2268 _argvec[11] = (unsigned long)(arg11); \
2269 _argvec[12] = (unsigned long)(arg12); \
2270 __asm__ volatile( \
2271 VALGRIND_CFI_PROLOGUE \
2272 VALGRIND_ALIGN_STACK \
2273 "subq $128,%%rsp\n\t" \
2274 "pushq 96(%%rax)\n\t" \
2275 "pushq 88(%%rax)\n\t" \
2276 "pushq 80(%%rax)\n\t" \
2277 "pushq 72(%%rax)\n\t" \
2278 "pushq 64(%%rax)\n\t" \
2279 "pushq 56(%%rax)\n\t" \
2280 "movq 48(%%rax), %%r9\n\t" \
2281 "movq 40(%%rax), %%r8\n\t" \
2282 "movq 32(%%rax), %%rcx\n\t" \
2283 "movq 24(%%rax), %%rdx\n\t" \
2284 "movq 16(%%rax), %%rsi\n\t" \
2285 "movq 8(%%rax), %%rdi\n\t" \
2286 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2287 VALGRIND_CALL_NOREDIR_RAX \
2288 VALGRIND_RESTORE_STACK \
2289 VALGRIND_CFI_EPILOGUE \
2290 : /*out*/ "=a" (_res) \
2291 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2292 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2293 ); \
2294 lval = (__typeof__(lval)) _res; \
2295 } while (0)
2296
2297#endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
2298
2299/* ------------------------ ppc32-linux ------------------------ */
2300
2301#if defined(PLAT_ppc32_linux)
2302
2303/* This is useful for finding out about the on-stack stuff:
2304
2305 extern int f9 ( int,int,int,int,int,int,int,int,int );
2306 extern int f10 ( int,int,int,int,int,int,int,int,int,int );
2307 extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
2308 extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
2309
2310 int g9 ( void ) {
2311 return f9(11,22,33,44,55,66,77,88,99);
2312 }
2313 int g10 ( void ) {
2314 return f10(11,22,33,44,55,66,77,88,99,110);
2315 }
2316 int g11 ( void ) {
2317 return f11(11,22,33,44,55,66,77,88,99,110,121);
2318 }
2319 int g12 ( void ) {
2320 return f12(11,22,33,44,55,66,77,88,99,110,121,132);
2321 }
2322*/
2323
2324/* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2325
2326/* These regs are trashed by the hidden call. */
2327#define __CALLER_SAVED_REGS \
2328 "lr", "ctr", "xer", \
2329 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2330 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2331 "r11", "r12", "r13"
2332
2333/* Macros to save and align the stack before making a function
2334 call and restore it afterwards as gcc may not keep the stack
2335 pointer aligned if it doesn't realise calls are being made
2336 to other functions. */
2337
2338#define VALGRIND_ALIGN_STACK \
2339 "mr 28,1\n\t" \
2340 "rlwinm 1,1,0,0,27\n\t"
2341#define VALGRIND_RESTORE_STACK \
2342 "mr 1,28\n\t"
2343
2344/* These CALL_FN_ macros assume that on ppc32-linux,
2345 sizeof(unsigned long) == 4. */
2346
2347#define CALL_FN_W_v(lval, orig) \
2348 do { \
2349 volatile OrigFn _orig = (orig); \
2350 volatile unsigned long _argvec[1]; \
2351 volatile unsigned long _res; \
2352 _argvec[0] = (unsigned long)_orig.nraddr; \
2353 __asm__ volatile( \
2354 VALGRIND_ALIGN_STACK \
2355 "mr 11,%1\n\t" \
2356 "lwz 11,0(11)\n\t" /* target->r11 */ \
2357 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2358 VALGRIND_RESTORE_STACK \
2359 "mr %0,3" \
2360 : /*out*/ "=r" (_res) \
2361 : /*in*/ "r" (&_argvec[0]) \
2362 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2363 ); \
2364 lval = (__typeof__(lval)) _res; \
2365 } while (0)
2366
2367#define CALL_FN_W_W(lval, orig, arg1) \
2368 do { \
2369 volatile OrigFn _orig = (orig); \
2370 volatile unsigned long _argvec[2]; \
2371 volatile unsigned long _res; \
2372 _argvec[0] = (unsigned long)_orig.nraddr; \
2373 _argvec[1] = (unsigned long)arg1; \
2374 __asm__ volatile( \
2375 VALGRIND_ALIGN_STACK \
2376 "mr 11,%1\n\t" \
2377 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2378 "lwz 11,0(11)\n\t" /* target->r11 */ \
2379 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2380 VALGRIND_RESTORE_STACK \
2381 "mr %0,3" \
2382 : /*out*/ "=r" (_res) \
2383 : /*in*/ "r" (&_argvec[0]) \
2384 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2385 ); \
2386 lval = (__typeof__(lval)) _res; \
2387 } while (0)
2388
2389#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2390 do { \
2391 volatile OrigFn _orig = (orig); \
2392 volatile unsigned long _argvec[3]; \
2393 volatile unsigned long _res; \
2394 _argvec[0] = (unsigned long)_orig.nraddr; \
2395 _argvec[1] = (unsigned long)arg1; \
2396 _argvec[2] = (unsigned long)arg2; \
2397 __asm__ volatile( \
2398 VALGRIND_ALIGN_STACK \
2399 "mr 11,%1\n\t" \
2400 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2401 "lwz 4,8(11)\n\t" \
2402 "lwz 11,0(11)\n\t" /* target->r11 */ \
2403 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2404 VALGRIND_RESTORE_STACK \
2405 "mr %0,3" \
2406 : /*out*/ "=r" (_res) \
2407 : /*in*/ "r" (&_argvec[0]) \
2408 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2409 ); \
2410 lval = (__typeof__(lval)) _res; \
2411 } while (0)
2412
2413#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2414 do { \
2415 volatile OrigFn _orig = (orig); \
2416 volatile unsigned long _argvec[4]; \
2417 volatile unsigned long _res; \
2418 _argvec[0] = (unsigned long)_orig.nraddr; \
2419 _argvec[1] = (unsigned long)arg1; \
2420 _argvec[2] = (unsigned long)arg2; \
2421 _argvec[3] = (unsigned long)arg3; \
2422 __asm__ volatile( \
2423 VALGRIND_ALIGN_STACK \
2424 "mr 11,%1\n\t" \
2425 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2426 "lwz 4,8(11)\n\t" \
2427 "lwz 5,12(11)\n\t" \
2428 "lwz 11,0(11)\n\t" /* target->r11 */ \
2429 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2430 VALGRIND_RESTORE_STACK \
2431 "mr %0,3" \
2432 : /*out*/ "=r" (_res) \
2433 : /*in*/ "r" (&_argvec[0]) \
2434 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2435 ); \
2436 lval = (__typeof__(lval)) _res; \
2437 } while (0)
2438
2439#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2440 do { \
2441 volatile OrigFn _orig = (orig); \
2442 volatile unsigned long _argvec[5]; \
2443 volatile unsigned long _res; \
2444 _argvec[0] = (unsigned long)_orig.nraddr; \
2445 _argvec[1] = (unsigned long)arg1; \
2446 _argvec[2] = (unsigned long)arg2; \
2447 _argvec[3] = (unsigned long)arg3; \
2448 _argvec[4] = (unsigned long)arg4; \
2449 __asm__ volatile( \
2450 VALGRIND_ALIGN_STACK \
2451 "mr 11,%1\n\t" \
2452 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2453 "lwz 4,8(11)\n\t" \
2454 "lwz 5,12(11)\n\t" \
2455 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2456 "lwz 11,0(11)\n\t" /* target->r11 */ \
2457 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2458 VALGRIND_RESTORE_STACK \
2459 "mr %0,3" \
2460 : /*out*/ "=r" (_res) \
2461 : /*in*/ "r" (&_argvec[0]) \
2462 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2463 ); \
2464 lval = (__typeof__(lval)) _res; \
2465 } while (0)
2466
2467#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2468 do { \
2469 volatile OrigFn _orig = (orig); \
2470 volatile unsigned long _argvec[6]; \
2471 volatile unsigned long _res; \
2472 _argvec[0] = (unsigned long)_orig.nraddr; \
2473 _argvec[1] = (unsigned long)arg1; \
2474 _argvec[2] = (unsigned long)arg2; \
2475 _argvec[3] = (unsigned long)arg3; \
2476 _argvec[4] = (unsigned long)arg4; \
2477 _argvec[5] = (unsigned long)arg5; \
2478 __asm__ volatile( \
2479 VALGRIND_ALIGN_STACK \
2480 "mr 11,%1\n\t" \
2481 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2482 "lwz 4,8(11)\n\t" \
2483 "lwz 5,12(11)\n\t" \
2484 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2485 "lwz 7,20(11)\n\t" \
2486 "lwz 11,0(11)\n\t" /* target->r11 */ \
2487 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2488 VALGRIND_RESTORE_STACK \
2489 "mr %0,3" \
2490 : /*out*/ "=r" (_res) \
2491 : /*in*/ "r" (&_argvec[0]) \
2492 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2493 ); \
2494 lval = (__typeof__(lval)) _res; \
2495 } while (0)
2496
2497#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2498 do { \
2499 volatile OrigFn _orig = (orig); \
2500 volatile unsigned long _argvec[7]; \
2501 volatile unsigned long _res; \
2502 _argvec[0] = (unsigned long)_orig.nraddr; \
2503 _argvec[1] = (unsigned long)arg1; \
2504 _argvec[2] = (unsigned long)arg2; \
2505 _argvec[3] = (unsigned long)arg3; \
2506 _argvec[4] = (unsigned long)arg4; \
2507 _argvec[5] = (unsigned long)arg5; \
2508 _argvec[6] = (unsigned long)arg6; \
2509 __asm__ volatile( \
2510 VALGRIND_ALIGN_STACK \
2511 "mr 11,%1\n\t" \
2512 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2513 "lwz 4,8(11)\n\t" \
2514 "lwz 5,12(11)\n\t" \
2515 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2516 "lwz 7,20(11)\n\t" \
2517 "lwz 8,24(11)\n\t" \
2518 "lwz 11,0(11)\n\t" /* target->r11 */ \
2519 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2520 VALGRIND_RESTORE_STACK \
2521 "mr %0,3" \
2522 : /*out*/ "=r" (_res) \
2523 : /*in*/ "r" (&_argvec[0]) \
2524 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2525 ); \
2526 lval = (__typeof__(lval)) _res; \
2527 } while (0)
2528
2529#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2530 arg7) \
2531 do { \
2532 volatile OrigFn _orig = (orig); \
2533 volatile unsigned long _argvec[8]; \
2534 volatile unsigned long _res; \
2535 _argvec[0] = (unsigned long)_orig.nraddr; \
2536 _argvec[1] = (unsigned long)arg1; \
2537 _argvec[2] = (unsigned long)arg2; \
2538 _argvec[3] = (unsigned long)arg3; \
2539 _argvec[4] = (unsigned long)arg4; \
2540 _argvec[5] = (unsigned long)arg5; \
2541 _argvec[6] = (unsigned long)arg6; \
2542 _argvec[7] = (unsigned long)arg7; \
2543 __asm__ volatile( \
2544 VALGRIND_ALIGN_STACK \
2545 "mr 11,%1\n\t" \
2546 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2547 "lwz 4,8(11)\n\t" \
2548 "lwz 5,12(11)\n\t" \
2549 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2550 "lwz 7,20(11)\n\t" \
2551 "lwz 8,24(11)\n\t" \
2552 "lwz 9,28(11)\n\t" \
2553 "lwz 11,0(11)\n\t" /* target->r11 */ \
2554 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2555 VALGRIND_RESTORE_STACK \
2556 "mr %0,3" \
2557 : /*out*/ "=r" (_res) \
2558 : /*in*/ "r" (&_argvec[0]) \
2559 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2560 ); \
2561 lval = (__typeof__(lval)) _res; \
2562 } while (0)
2563
2564#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2565 arg7,arg8) \
2566 do { \
2567 volatile OrigFn _orig = (orig); \
2568 volatile unsigned long _argvec[9]; \
2569 volatile unsigned long _res; \
2570 _argvec[0] = (unsigned long)_orig.nraddr; \
2571 _argvec[1] = (unsigned long)arg1; \
2572 _argvec[2] = (unsigned long)arg2; \
2573 _argvec[3] = (unsigned long)arg3; \
2574 _argvec[4] = (unsigned long)arg4; \
2575 _argvec[5] = (unsigned long)arg5; \
2576 _argvec[6] = (unsigned long)arg6; \
2577 _argvec[7] = (unsigned long)arg7; \
2578 _argvec[8] = (unsigned long)arg8; \
2579 __asm__ volatile( \
2580 VALGRIND_ALIGN_STACK \
2581 "mr 11,%1\n\t" \
2582 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2583 "lwz 4,8(11)\n\t" \
2584 "lwz 5,12(11)\n\t" \
2585 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2586 "lwz 7,20(11)\n\t" \
2587 "lwz 8,24(11)\n\t" \
2588 "lwz 9,28(11)\n\t" \
2589 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2590 "lwz 11,0(11)\n\t" /* target->r11 */ \
2591 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2592 VALGRIND_RESTORE_STACK \
2593 "mr %0,3" \
2594 : /*out*/ "=r" (_res) \
2595 : /*in*/ "r" (&_argvec[0]) \
2596 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2597 ); \
2598 lval = (__typeof__(lval)) _res; \
2599 } while (0)
2600
2601#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2602 arg7,arg8,arg9) \
2603 do { \
2604 volatile OrigFn _orig = (orig); \
2605 volatile unsigned long _argvec[10]; \
2606 volatile unsigned long _res; \
2607 _argvec[0] = (unsigned long)_orig.nraddr; \
2608 _argvec[1] = (unsigned long)arg1; \
2609 _argvec[2] = (unsigned long)arg2; \
2610 _argvec[3] = (unsigned long)arg3; \
2611 _argvec[4] = (unsigned long)arg4; \
2612 _argvec[5] = (unsigned long)arg5; \
2613 _argvec[6] = (unsigned long)arg6; \
2614 _argvec[7] = (unsigned long)arg7; \
2615 _argvec[8] = (unsigned long)arg8; \
2616 _argvec[9] = (unsigned long)arg9; \
2617 __asm__ volatile( \
2618 VALGRIND_ALIGN_STACK \
2619 "mr 11,%1\n\t" \
2620 "addi 1,1,-16\n\t" \
2621 /* arg9 */ \
2622 "lwz 3,36(11)\n\t" \
2623 "stw 3,8(1)\n\t" \
2624 /* args1-8 */ \
2625 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2626 "lwz 4,8(11)\n\t" \
2627 "lwz 5,12(11)\n\t" \
2628 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2629 "lwz 7,20(11)\n\t" \
2630 "lwz 8,24(11)\n\t" \
2631 "lwz 9,28(11)\n\t" \
2632 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2633 "lwz 11,0(11)\n\t" /* target->r11 */ \
2634 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2635 VALGRIND_RESTORE_STACK \
2636 "mr %0,3" \
2637 : /*out*/ "=r" (_res) \
2638 : /*in*/ "r" (&_argvec[0]) \
2639 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2640 ); \
2641 lval = (__typeof__(lval)) _res; \
2642 } while (0)
2643
2644#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2645 arg7,arg8,arg9,arg10) \
2646 do { \
2647 volatile OrigFn _orig = (orig); \
2648 volatile unsigned long _argvec[11]; \
2649 volatile unsigned long _res; \
2650 _argvec[0] = (unsigned long)_orig.nraddr; \
2651 _argvec[1] = (unsigned long)arg1; \
2652 _argvec[2] = (unsigned long)arg2; \
2653 _argvec[3] = (unsigned long)arg3; \
2654 _argvec[4] = (unsigned long)arg4; \
2655 _argvec[5] = (unsigned long)arg5; \
2656 _argvec[6] = (unsigned long)arg6; \
2657 _argvec[7] = (unsigned long)arg7; \
2658 _argvec[8] = (unsigned long)arg8; \
2659 _argvec[9] = (unsigned long)arg9; \
2660 _argvec[10] = (unsigned long)arg10; \
2661 __asm__ volatile( \
2662 VALGRIND_ALIGN_STACK \
2663 "mr 11,%1\n\t" \
2664 "addi 1,1,-16\n\t" \
2665 /* arg10 */ \
2666 "lwz 3,40(11)\n\t" \
2667 "stw 3,12(1)\n\t" \
2668 /* arg9 */ \
2669 "lwz 3,36(11)\n\t" \
2670 "stw 3,8(1)\n\t" \
2671 /* args1-8 */ \
2672 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2673 "lwz 4,8(11)\n\t" \
2674 "lwz 5,12(11)\n\t" \
2675 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2676 "lwz 7,20(11)\n\t" \
2677 "lwz 8,24(11)\n\t" \
2678 "lwz 9,28(11)\n\t" \
2679 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2680 "lwz 11,0(11)\n\t" /* target->r11 */ \
2681 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2682 VALGRIND_RESTORE_STACK \
2683 "mr %0,3" \
2684 : /*out*/ "=r" (_res) \
2685 : /*in*/ "r" (&_argvec[0]) \
2686 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2687 ); \
2688 lval = (__typeof__(lval)) _res; \
2689 } while (0)
2690
2691#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2692 arg7,arg8,arg9,arg10,arg11) \
2693 do { \
2694 volatile OrigFn _orig = (orig); \
2695 volatile unsigned long _argvec[12]; \
2696 volatile unsigned long _res; \
2697 _argvec[0] = (unsigned long)_orig.nraddr; \
2698 _argvec[1] = (unsigned long)arg1; \
2699 _argvec[2] = (unsigned long)arg2; \
2700 _argvec[3] = (unsigned long)arg3; \
2701 _argvec[4] = (unsigned long)arg4; \
2702 _argvec[5] = (unsigned long)arg5; \
2703 _argvec[6] = (unsigned long)arg6; \
2704 _argvec[7] = (unsigned long)arg7; \
2705 _argvec[8] = (unsigned long)arg8; \
2706 _argvec[9] = (unsigned long)arg9; \
2707 _argvec[10] = (unsigned long)arg10; \
2708 _argvec[11] = (unsigned long)arg11; \
2709 __asm__ volatile( \
2710 VALGRIND_ALIGN_STACK \
2711 "mr 11,%1\n\t" \
2712 "addi 1,1,-32\n\t" \
2713 /* arg11 */ \
2714 "lwz 3,44(11)\n\t" \
2715 "stw 3,16(1)\n\t" \
2716 /* arg10 */ \
2717 "lwz 3,40(11)\n\t" \
2718 "stw 3,12(1)\n\t" \
2719 /* arg9 */ \
2720 "lwz 3,36(11)\n\t" \
2721 "stw 3,8(1)\n\t" \
2722 /* args1-8 */ \
2723 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2724 "lwz 4,8(11)\n\t" \
2725 "lwz 5,12(11)\n\t" \
2726 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2727 "lwz 7,20(11)\n\t" \
2728 "lwz 8,24(11)\n\t" \
2729 "lwz 9,28(11)\n\t" \
2730 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2731 "lwz 11,0(11)\n\t" /* target->r11 */ \
2732 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2733 VALGRIND_RESTORE_STACK \
2734 "mr %0,3" \
2735 : /*out*/ "=r" (_res) \
2736 : /*in*/ "r" (&_argvec[0]) \
2737 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2738 ); \
2739 lval = (__typeof__(lval)) _res; \
2740 } while (0)
2741
2742#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2743 arg7,arg8,arg9,arg10,arg11,arg12) \
2744 do { \
2745 volatile OrigFn _orig = (orig); \
2746 volatile unsigned long _argvec[13]; \
2747 volatile unsigned long _res; \
2748 _argvec[0] = (unsigned long)_orig.nraddr; \
2749 _argvec[1] = (unsigned long)arg1; \
2750 _argvec[2] = (unsigned long)arg2; \
2751 _argvec[3] = (unsigned long)arg3; \
2752 _argvec[4] = (unsigned long)arg4; \
2753 _argvec[5] = (unsigned long)arg5; \
2754 _argvec[6] = (unsigned long)arg6; \
2755 _argvec[7] = (unsigned long)arg7; \
2756 _argvec[8] = (unsigned long)arg8; \
2757 _argvec[9] = (unsigned long)arg9; \
2758 _argvec[10] = (unsigned long)arg10; \
2759 _argvec[11] = (unsigned long)arg11; \
2760 _argvec[12] = (unsigned long)arg12; \
2761 __asm__ volatile( \
2762 VALGRIND_ALIGN_STACK \
2763 "mr 11,%1\n\t" \
2764 "addi 1,1,-32\n\t" \
2765 /* arg12 */ \
2766 "lwz 3,48(11)\n\t" \
2767 "stw 3,20(1)\n\t" \
2768 /* arg11 */ \
2769 "lwz 3,44(11)\n\t" \
2770 "stw 3,16(1)\n\t" \
2771 /* arg10 */ \
2772 "lwz 3,40(11)\n\t" \
2773 "stw 3,12(1)\n\t" \
2774 /* arg9 */ \
2775 "lwz 3,36(11)\n\t" \
2776 "stw 3,8(1)\n\t" \
2777 /* args1-8 */ \
2778 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2779 "lwz 4,8(11)\n\t" \
2780 "lwz 5,12(11)\n\t" \
2781 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2782 "lwz 7,20(11)\n\t" \
2783 "lwz 8,24(11)\n\t" \
2784 "lwz 9,28(11)\n\t" \
2785 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2786 "lwz 11,0(11)\n\t" /* target->r11 */ \
2787 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2788 VALGRIND_RESTORE_STACK \
2789 "mr %0,3" \
2790 : /*out*/ "=r" (_res) \
2791 : /*in*/ "r" (&_argvec[0]) \
2792 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2793 ); \
2794 lval = (__typeof__(lval)) _res; \
2795 } while (0)
2796
2797#endif /* PLAT_ppc32_linux */
2798
2799/* ------------------------ ppc64-linux ------------------------ */
2800
2801#if defined(PLAT_ppc64be_linux)
2802
2803/* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2804
2805/* These regs are trashed by the hidden call. */
2806#define __CALLER_SAVED_REGS \
2807 "lr", "ctr", "xer", \
2808 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2809 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2810 "r11", "r12", "r13"
2811
2812/* Macros to save and align the stack before making a function
2813 call and restore it afterwards as gcc may not keep the stack
2814 pointer aligned if it doesn't realise calls are being made
2815 to other functions. */
2816
2817#define VALGRIND_ALIGN_STACK \
2818 "mr 28,1\n\t" \
2819 "rldicr 1,1,0,59\n\t"
2820#define VALGRIND_RESTORE_STACK \
2821 "mr 1,28\n\t"
2822
2823/* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2824 long) == 8. */
2825
2826#define CALL_FN_W_v(lval, orig) \
2827 do { \
2828 volatile OrigFn _orig = (orig); \
2829 volatile unsigned long _argvec[3+0]; \
2830 volatile unsigned long _res; \
2831 /* _argvec[0] holds current r2 across the call */ \
2832 _argvec[1] = (unsigned long)_orig.r2; \
2833 _argvec[2] = (unsigned long)_orig.nraddr; \
2834 __asm__ volatile( \
2835 VALGRIND_ALIGN_STACK \
2836 "mr 11,%1\n\t" \
2837 "std 2,-16(11)\n\t" /* save tocptr */ \
2838 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2839 "ld 11, 0(11)\n\t" /* target->r11 */ \
2840 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2841 "mr 11,%1\n\t" \
2842 "mr %0,3\n\t" \
2843 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2844 VALGRIND_RESTORE_STACK \
2845 : /*out*/ "=r" (_res) \
2846 : /*in*/ "r" (&_argvec[2]) \
2847 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2848 ); \
2849 lval = (__typeof__(lval)) _res; \
2850 } while (0)
2851
2852#define CALL_FN_W_W(lval, orig, arg1) \
2853 do { \
2854 volatile OrigFn _orig = (orig); \
2855 volatile unsigned long _argvec[3+1]; \
2856 volatile unsigned long _res; \
2857 /* _argvec[0] holds current r2 across the call */ \
2858 _argvec[1] = (unsigned long)_orig.r2; \
2859 _argvec[2] = (unsigned long)_orig.nraddr; \
2860 _argvec[2+1] = (unsigned long)arg1; \
2861 __asm__ volatile( \
2862 VALGRIND_ALIGN_STACK \
2863 "mr 11,%1\n\t" \
2864 "std 2,-16(11)\n\t" /* save tocptr */ \
2865 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2866 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2867 "ld 11, 0(11)\n\t" /* target->r11 */ \
2868 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2869 "mr 11,%1\n\t" \
2870 "mr %0,3\n\t" \
2871 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2872 VALGRIND_RESTORE_STACK \
2873 : /*out*/ "=r" (_res) \
2874 : /*in*/ "r" (&_argvec[2]) \
2875 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2876 ); \
2877 lval = (__typeof__(lval)) _res; \
2878 } while (0)
2879
2880#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2881 do { \
2882 volatile OrigFn _orig = (orig); \
2883 volatile unsigned long _argvec[3+2]; \
2884 volatile unsigned long _res; \
2885 /* _argvec[0] holds current r2 across the call */ \
2886 _argvec[1] = (unsigned long)_orig.r2; \
2887 _argvec[2] = (unsigned long)_orig.nraddr; \
2888 _argvec[2+1] = (unsigned long)arg1; \
2889 _argvec[2+2] = (unsigned long)arg2; \
2890 __asm__ volatile( \
2891 VALGRIND_ALIGN_STACK \
2892 "mr 11,%1\n\t" \
2893 "std 2,-16(11)\n\t" /* save tocptr */ \
2894 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2895 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2896 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2897 "ld 11, 0(11)\n\t" /* target->r11 */ \
2898 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2899 "mr 11,%1\n\t" \
2900 "mr %0,3\n\t" \
2901 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2902 VALGRIND_RESTORE_STACK \
2903 : /*out*/ "=r" (_res) \
2904 : /*in*/ "r" (&_argvec[2]) \
2905 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2906 ); \
2907 lval = (__typeof__(lval)) _res; \
2908 } while (0)
2909
2910#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2911 do { \
2912 volatile OrigFn _orig = (orig); \
2913 volatile unsigned long _argvec[3+3]; \
2914 volatile unsigned long _res; \
2915 /* _argvec[0] holds current r2 across the call */ \
2916 _argvec[1] = (unsigned long)_orig.r2; \
2917 _argvec[2] = (unsigned long)_orig.nraddr; \
2918 _argvec[2+1] = (unsigned long)arg1; \
2919 _argvec[2+2] = (unsigned long)arg2; \
2920 _argvec[2+3] = (unsigned long)arg3; \
2921 __asm__ volatile( \
2922 VALGRIND_ALIGN_STACK \
2923 "mr 11,%1\n\t" \
2924 "std 2,-16(11)\n\t" /* save tocptr */ \
2925 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2926 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2927 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2928 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2929 "ld 11, 0(11)\n\t" /* target->r11 */ \
2930 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2931 "mr 11,%1\n\t" \
2932 "mr %0,3\n\t" \
2933 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2934 VALGRIND_RESTORE_STACK \
2935 : /*out*/ "=r" (_res) \
2936 : /*in*/ "r" (&_argvec[2]) \
2937 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2938 ); \
2939 lval = (__typeof__(lval)) _res; \
2940 } while (0)
2941
2942#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2943 do { \
2944 volatile OrigFn _orig = (orig); \
2945 volatile unsigned long _argvec[3+4]; \
2946 volatile unsigned long _res; \
2947 /* _argvec[0] holds current r2 across the call */ \
2948 _argvec[1] = (unsigned long)_orig.r2; \
2949 _argvec[2] = (unsigned long)_orig.nraddr; \
2950 _argvec[2+1] = (unsigned long)arg1; \
2951 _argvec[2+2] = (unsigned long)arg2; \
2952 _argvec[2+3] = (unsigned long)arg3; \
2953 _argvec[2+4] = (unsigned long)arg4; \
2954 __asm__ volatile( \
2955 VALGRIND_ALIGN_STACK \
2956 "mr 11,%1\n\t" \
2957 "std 2,-16(11)\n\t" /* save tocptr */ \
2958 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2959 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2960 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2961 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2962 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2963 "ld 11, 0(11)\n\t" /* target->r11 */ \
2964 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2965 "mr 11,%1\n\t" \
2966 "mr %0,3\n\t" \
2967 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2968 VALGRIND_RESTORE_STACK \
2969 : /*out*/ "=r" (_res) \
2970 : /*in*/ "r" (&_argvec[2]) \
2971 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2972 ); \
2973 lval = (__typeof__(lval)) _res; \
2974 } while (0)
2975
2976#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2977 do { \
2978 volatile OrigFn _orig = (orig); \
2979 volatile unsigned long _argvec[3+5]; \
2980 volatile unsigned long _res; \
2981 /* _argvec[0] holds current r2 across the call */ \
2982 _argvec[1] = (unsigned long)_orig.r2; \
2983 _argvec[2] = (unsigned long)_orig.nraddr; \
2984 _argvec[2+1] = (unsigned long)arg1; \
2985 _argvec[2+2] = (unsigned long)arg2; \
2986 _argvec[2+3] = (unsigned long)arg3; \
2987 _argvec[2+4] = (unsigned long)arg4; \
2988 _argvec[2+5] = (unsigned long)arg5; \
2989 __asm__ volatile( \
2990 VALGRIND_ALIGN_STACK \
2991 "mr 11,%1\n\t" \
2992 "std 2,-16(11)\n\t" /* save tocptr */ \
2993 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2994 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2995 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2996 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2997 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2998 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2999 "ld 11, 0(11)\n\t" /* target->r11 */ \
3000 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3001 "mr 11,%1\n\t" \
3002 "mr %0,3\n\t" \
3003 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3004 VALGRIND_RESTORE_STACK \
3005 : /*out*/ "=r" (_res) \
3006 : /*in*/ "r" (&_argvec[2]) \
3007 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3008 ); \
3009 lval = (__typeof__(lval)) _res; \
3010 } while (0)
3011
3012#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3013 do { \
3014 volatile OrigFn _orig = (orig); \
3015 volatile unsigned long _argvec[3+6]; \
3016 volatile unsigned long _res; \
3017 /* _argvec[0] holds current r2 across the call */ \
3018 _argvec[1] = (unsigned long)_orig.r2; \
3019 _argvec[2] = (unsigned long)_orig.nraddr; \
3020 _argvec[2+1] = (unsigned long)arg1; \
3021 _argvec[2+2] = (unsigned long)arg2; \
3022 _argvec[2+3] = (unsigned long)arg3; \
3023 _argvec[2+4] = (unsigned long)arg4; \
3024 _argvec[2+5] = (unsigned long)arg5; \
3025 _argvec[2+6] = (unsigned long)arg6; \
3026 __asm__ volatile( \
3027 VALGRIND_ALIGN_STACK \
3028 "mr 11,%1\n\t" \
3029 "std 2,-16(11)\n\t" /* save tocptr */ \
3030 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3031 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3032 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3033 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3034 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3035 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3036 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3037 "ld 11, 0(11)\n\t" /* target->r11 */ \
3038 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3039 "mr 11,%1\n\t" \
3040 "mr %0,3\n\t" \
3041 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3042 VALGRIND_RESTORE_STACK \
3043 : /*out*/ "=r" (_res) \
3044 : /*in*/ "r" (&_argvec[2]) \
3045 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3046 ); \
3047 lval = (__typeof__(lval)) _res; \
3048 } while (0)
3049
3050#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3051 arg7) \
3052 do { \
3053 volatile OrigFn _orig = (orig); \
3054 volatile unsigned long _argvec[3+7]; \
3055 volatile unsigned long _res; \
3056 /* _argvec[0] holds current r2 across the call */ \
3057 _argvec[1] = (unsigned long)_orig.r2; \
3058 _argvec[2] = (unsigned long)_orig.nraddr; \
3059 _argvec[2+1] = (unsigned long)arg1; \
3060 _argvec[2+2] = (unsigned long)arg2; \
3061 _argvec[2+3] = (unsigned long)arg3; \
3062 _argvec[2+4] = (unsigned long)arg4; \
3063 _argvec[2+5] = (unsigned long)arg5; \
3064 _argvec[2+6] = (unsigned long)arg6; \
3065 _argvec[2+7] = (unsigned long)arg7; \
3066 __asm__ volatile( \
3067 VALGRIND_ALIGN_STACK \
3068 "mr 11,%1\n\t" \
3069 "std 2,-16(11)\n\t" /* save tocptr */ \
3070 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3071 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3072 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3073 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3074 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3075 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3076 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3077 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3078 "ld 11, 0(11)\n\t" /* target->r11 */ \
3079 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3080 "mr 11,%1\n\t" \
3081 "mr %0,3\n\t" \
3082 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3083 VALGRIND_RESTORE_STACK \
3084 : /*out*/ "=r" (_res) \
3085 : /*in*/ "r" (&_argvec[2]) \
3086 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3087 ); \
3088 lval = (__typeof__(lval)) _res; \
3089 } while (0)
3090
3091#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3092 arg7,arg8) \
3093 do { \
3094 volatile OrigFn _orig = (orig); \
3095 volatile unsigned long _argvec[3+8]; \
3096 volatile unsigned long _res; \
3097 /* _argvec[0] holds current r2 across the call */ \
3098 _argvec[1] = (unsigned long)_orig.r2; \
3099 _argvec[2] = (unsigned long)_orig.nraddr; \
3100 _argvec[2+1] = (unsigned long)arg1; \
3101 _argvec[2+2] = (unsigned long)arg2; \
3102 _argvec[2+3] = (unsigned long)arg3; \
3103 _argvec[2+4] = (unsigned long)arg4; \
3104 _argvec[2+5] = (unsigned long)arg5; \
3105 _argvec[2+6] = (unsigned long)arg6; \
3106 _argvec[2+7] = (unsigned long)arg7; \
3107 _argvec[2+8] = (unsigned long)arg8; \
3108 __asm__ volatile( \
3109 VALGRIND_ALIGN_STACK \
3110 "mr 11,%1\n\t" \
3111 "std 2,-16(11)\n\t" /* save tocptr */ \
3112 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3113 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3114 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3115 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3116 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3117 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3118 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3119 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3120 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3121 "ld 11, 0(11)\n\t" /* target->r11 */ \
3122 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3123 "mr 11,%1\n\t" \
3124 "mr %0,3\n\t" \
3125 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3126 VALGRIND_RESTORE_STACK \
3127 : /*out*/ "=r" (_res) \
3128 : /*in*/ "r" (&_argvec[2]) \
3129 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3130 ); \
3131 lval = (__typeof__(lval)) _res; \
3132 } while (0)
3133
3134#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3135 arg7,arg8,arg9) \
3136 do { \
3137 volatile OrigFn _orig = (orig); \
3138 volatile unsigned long _argvec[3+9]; \
3139 volatile unsigned long _res; \
3140 /* _argvec[0] holds current r2 across the call */ \
3141 _argvec[1] = (unsigned long)_orig.r2; \
3142 _argvec[2] = (unsigned long)_orig.nraddr; \
3143 _argvec[2+1] = (unsigned long)arg1; \
3144 _argvec[2+2] = (unsigned long)arg2; \
3145 _argvec[2+3] = (unsigned long)arg3; \
3146 _argvec[2+4] = (unsigned long)arg4; \
3147 _argvec[2+5] = (unsigned long)arg5; \
3148 _argvec[2+6] = (unsigned long)arg6; \
3149 _argvec[2+7] = (unsigned long)arg7; \
3150 _argvec[2+8] = (unsigned long)arg8; \
3151 _argvec[2+9] = (unsigned long)arg9; \
3152 __asm__ volatile( \
3153 VALGRIND_ALIGN_STACK \
3154 "mr 11,%1\n\t" \
3155 "std 2,-16(11)\n\t" /* save tocptr */ \
3156 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3157 "addi 1,1,-128\n\t" /* expand stack frame */ \
3158 /* arg9 */ \
3159 "ld 3,72(11)\n\t" \
3160 "std 3,112(1)\n\t" \
3161 /* args1-8 */ \
3162 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3163 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3164 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3165 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3166 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3167 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3168 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3169 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3170 "ld 11, 0(11)\n\t" /* target->r11 */ \
3171 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3172 "mr 11,%1\n\t" \
3173 "mr %0,3\n\t" \
3174 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3175 VALGRIND_RESTORE_STACK \
3176 : /*out*/ "=r" (_res) \
3177 : /*in*/ "r" (&_argvec[2]) \
3178 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3179 ); \
3180 lval = (__typeof__(lval)) _res; \
3181 } while (0)
3182
3183#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3184 arg7,arg8,arg9,arg10) \
3185 do { \
3186 volatile OrigFn _orig = (orig); \
3187 volatile unsigned long _argvec[3+10]; \
3188 volatile unsigned long _res; \
3189 /* _argvec[0] holds current r2 across the call */ \
3190 _argvec[1] = (unsigned long)_orig.r2; \
3191 _argvec[2] = (unsigned long)_orig.nraddr; \
3192 _argvec[2+1] = (unsigned long)arg1; \
3193 _argvec[2+2] = (unsigned long)arg2; \
3194 _argvec[2+3] = (unsigned long)arg3; \
3195 _argvec[2+4] = (unsigned long)arg4; \
3196 _argvec[2+5] = (unsigned long)arg5; \
3197 _argvec[2+6] = (unsigned long)arg6; \
3198 _argvec[2+7] = (unsigned long)arg7; \
3199 _argvec[2+8] = (unsigned long)arg8; \
3200 _argvec[2+9] = (unsigned long)arg9; \
3201 _argvec[2+10] = (unsigned long)arg10; \
3202 __asm__ volatile( \
3203 VALGRIND_ALIGN_STACK \
3204 "mr 11,%1\n\t" \
3205 "std 2,-16(11)\n\t" /* save tocptr */ \
3206 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3207 "addi 1,1,-128\n\t" /* expand stack frame */ \
3208 /* arg10 */ \
3209 "ld 3,80(11)\n\t" \
3210 "std 3,120(1)\n\t" \
3211 /* arg9 */ \
3212 "ld 3,72(11)\n\t" \
3213 "std 3,112(1)\n\t" \
3214 /* args1-8 */ \
3215 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3216 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3217 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3218 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3219 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3220 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3221 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3222 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3223 "ld 11, 0(11)\n\t" /* target->r11 */ \
3224 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3225 "mr 11,%1\n\t" \
3226 "mr %0,3\n\t" \
3227 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3228 VALGRIND_RESTORE_STACK \
3229 : /*out*/ "=r" (_res) \
3230 : /*in*/ "r" (&_argvec[2]) \
3231 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3232 ); \
3233 lval = (__typeof__(lval)) _res; \
3234 } while (0)
3235
3236#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3237 arg7,arg8,arg9,arg10,arg11) \
3238 do { \
3239 volatile OrigFn _orig = (orig); \
3240 volatile unsigned long _argvec[3+11]; \
3241 volatile unsigned long _res; \
3242 /* _argvec[0] holds current r2 across the call */ \
3243 _argvec[1] = (unsigned long)_orig.r2; \
3244 _argvec[2] = (unsigned long)_orig.nraddr; \
3245 _argvec[2+1] = (unsigned long)arg1; \
3246 _argvec[2+2] = (unsigned long)arg2; \
3247 _argvec[2+3] = (unsigned long)arg3; \
3248 _argvec[2+4] = (unsigned long)arg4; \
3249 _argvec[2+5] = (unsigned long)arg5; \
3250 _argvec[2+6] = (unsigned long)arg6; \
3251 _argvec[2+7] = (unsigned long)arg7; \
3252 _argvec[2+8] = (unsigned long)arg8; \
3253 _argvec[2+9] = (unsigned long)arg9; \
3254 _argvec[2+10] = (unsigned long)arg10; \
3255 _argvec[2+11] = (unsigned long)arg11; \
3256 __asm__ volatile( \
3257 VALGRIND_ALIGN_STACK \
3258 "mr 11,%1\n\t" \
3259 "std 2,-16(11)\n\t" /* save tocptr */ \
3260 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3261 "addi 1,1,-144\n\t" /* expand stack frame */ \
3262 /* arg11 */ \
3263 "ld 3,88(11)\n\t" \
3264 "std 3,128(1)\n\t" \
3265 /* arg10 */ \
3266 "ld 3,80(11)\n\t" \
3267 "std 3,120(1)\n\t" \
3268 /* arg9 */ \
3269 "ld 3,72(11)\n\t" \
3270 "std 3,112(1)\n\t" \
3271 /* args1-8 */ \
3272 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3273 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3274 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3275 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3276 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3277 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3278 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3279 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3280 "ld 11, 0(11)\n\t" /* target->r11 */ \
3281 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3282 "mr 11,%1\n\t" \
3283 "mr %0,3\n\t" \
3284 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3285 VALGRIND_RESTORE_STACK \
3286 : /*out*/ "=r" (_res) \
3287 : /*in*/ "r" (&_argvec[2]) \
3288 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3289 ); \
3290 lval = (__typeof__(lval)) _res; \
3291 } while (0)
3292
3293#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3294 arg7,arg8,arg9,arg10,arg11,arg12) \
3295 do { \
3296 volatile OrigFn _orig = (orig); \
3297 volatile unsigned long _argvec[3+12]; \
3298 volatile unsigned long _res; \
3299 /* _argvec[0] holds current r2 across the call */ \
3300 _argvec[1] = (unsigned long)_orig.r2; \
3301 _argvec[2] = (unsigned long)_orig.nraddr; \
3302 _argvec[2+1] = (unsigned long)arg1; \
3303 _argvec[2+2] = (unsigned long)arg2; \
3304 _argvec[2+3] = (unsigned long)arg3; \
3305 _argvec[2+4] = (unsigned long)arg4; \
3306 _argvec[2+5] = (unsigned long)arg5; \
3307 _argvec[2+6] = (unsigned long)arg6; \
3308 _argvec[2+7] = (unsigned long)arg7; \
3309 _argvec[2+8] = (unsigned long)arg8; \
3310 _argvec[2+9] = (unsigned long)arg9; \
3311 _argvec[2+10] = (unsigned long)arg10; \
3312 _argvec[2+11] = (unsigned long)arg11; \
3313 _argvec[2+12] = (unsigned long)arg12; \
3314 __asm__ volatile( \
3315 VALGRIND_ALIGN_STACK \
3316 "mr 11,%1\n\t" \
3317 "std 2,-16(11)\n\t" /* save tocptr */ \
3318 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3319 "addi 1,1,-144\n\t" /* expand stack frame */ \
3320 /* arg12 */ \
3321 "ld 3,96(11)\n\t" \
3322 "std 3,136(1)\n\t" \
3323 /* arg11 */ \
3324 "ld 3,88(11)\n\t" \
3325 "std 3,128(1)\n\t" \
3326 /* arg10 */ \
3327 "ld 3,80(11)\n\t" \
3328 "std 3,120(1)\n\t" \
3329 /* arg9 */ \
3330 "ld 3,72(11)\n\t" \
3331 "std 3,112(1)\n\t" \
3332 /* args1-8 */ \
3333 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3334 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3335 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3336 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3337 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3338 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3339 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3340 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3341 "ld 11, 0(11)\n\t" /* target->r11 */ \
3342 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3343 "mr 11,%1\n\t" \
3344 "mr %0,3\n\t" \
3345 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3346 VALGRIND_RESTORE_STACK \
3347 : /*out*/ "=r" (_res) \
3348 : /*in*/ "r" (&_argvec[2]) \
3349 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3350 ); \
3351 lval = (__typeof__(lval)) _res; \
3352 } while (0)
3353
3354#endif /* PLAT_ppc64be_linux */
3355
3356/* ------------------------- ppc64le-linux ----------------------- */
3357#if defined(PLAT_ppc64le_linux)
3358
3359/* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
3360
3361/* These regs are trashed by the hidden call. */
3362#define __CALLER_SAVED_REGS \
3363 "lr", "ctr", "xer", \
3364 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
3365 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
3366 "r11", "r12", "r13"
3367
3368/* Macros to save and align the stack before making a function
3369 call and restore it afterwards as gcc may not keep the stack
3370 pointer aligned if it doesn't realise calls are being made
3371 to other functions. */
3372
3373#define VALGRIND_ALIGN_STACK \
3374 "mr 28,1\n\t" \
3375 "rldicr 1,1,0,59\n\t"
3376#define VALGRIND_RESTORE_STACK \
3377 "mr 1,28\n\t"
3378
3379/* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
3380 long) == 8. */
3381
3382#define CALL_FN_W_v(lval, orig) \
3383 do { \
3384 volatile OrigFn _orig = (orig); \
3385 volatile unsigned long _argvec[3+0]; \
3386 volatile unsigned long _res; \
3387 /* _argvec[0] holds current r2 across the call */ \
3388 _argvec[1] = (unsigned long)_orig.r2; \
3389 _argvec[2] = (unsigned long)_orig.nraddr; \
3390 __asm__ volatile( \
3391 VALGRIND_ALIGN_STACK \
3392 "mr 12,%1\n\t" \
3393 "std 2,-16(12)\n\t" /* save tocptr */ \
3394 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3395 "ld 12, 0(12)\n\t" /* target->r12 */ \
3396 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3397 "mr 12,%1\n\t" \
3398 "mr %0,3\n\t" \
3399 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3400 VALGRIND_RESTORE_STACK \
3401 : /*out*/ "=r" (_res) \
3402 : /*in*/ "r" (&_argvec[2]) \
3403 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3404 ); \
3405 lval = (__typeof__(lval)) _res; \
3406 } while (0)
3407
3408#define CALL_FN_W_W(lval, orig, arg1) \
3409 do { \
3410 volatile OrigFn _orig = (orig); \
3411 volatile unsigned long _argvec[3+1]; \
3412 volatile unsigned long _res; \
3413 /* _argvec[0] holds current r2 across the call */ \
3414 _argvec[1] = (unsigned long)_orig.r2; \
3415 _argvec[2] = (unsigned long)_orig.nraddr; \
3416 _argvec[2+1] = (unsigned long)arg1; \
3417 __asm__ volatile( \
3418 VALGRIND_ALIGN_STACK \
3419 "mr 12,%1\n\t" \
3420 "std 2,-16(12)\n\t" /* save tocptr */ \
3421 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3422 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3423 "ld 12, 0(12)\n\t" /* target->r12 */ \
3424 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3425 "mr 12,%1\n\t" \
3426 "mr %0,3\n\t" \
3427 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3428 VALGRIND_RESTORE_STACK \
3429 : /*out*/ "=r" (_res) \
3430 : /*in*/ "r" (&_argvec[2]) \
3431 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3432 ); \
3433 lval = (__typeof__(lval)) _res; \
3434 } while (0)
3435
3436#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3437 do { \
3438 volatile OrigFn _orig = (orig); \
3439 volatile unsigned long _argvec[3+2]; \
3440 volatile unsigned long _res; \
3441 /* _argvec[0] holds current r2 across the call */ \
3442 _argvec[1] = (unsigned long)_orig.r2; \
3443 _argvec[2] = (unsigned long)_orig.nraddr; \
3444 _argvec[2+1] = (unsigned long)arg1; \
3445 _argvec[2+2] = (unsigned long)arg2; \
3446 __asm__ volatile( \
3447 VALGRIND_ALIGN_STACK \
3448 "mr 12,%1\n\t" \
3449 "std 2,-16(12)\n\t" /* save tocptr */ \
3450 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3451 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3452 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3453 "ld 12, 0(12)\n\t" /* target->r12 */ \
3454 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3455 "mr 12,%1\n\t" \
3456 "mr %0,3\n\t" \
3457 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3458 VALGRIND_RESTORE_STACK \
3459 : /*out*/ "=r" (_res) \
3460 : /*in*/ "r" (&_argvec[2]) \
3461 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3462 ); \
3463 lval = (__typeof__(lval)) _res; \
3464 } while (0)
3465
3466#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3467 do { \
3468 volatile OrigFn _orig = (orig); \
3469 volatile unsigned long _argvec[3+3]; \
3470 volatile unsigned long _res; \
3471 /* _argvec[0] holds current r2 across the call */ \
3472 _argvec[1] = (unsigned long)_orig.r2; \
3473 _argvec[2] = (unsigned long)_orig.nraddr; \
3474 _argvec[2+1] = (unsigned long)arg1; \
3475 _argvec[2+2] = (unsigned long)arg2; \
3476 _argvec[2+3] = (unsigned long)arg3; \
3477 __asm__ volatile( \
3478 VALGRIND_ALIGN_STACK \
3479 "mr 12,%1\n\t" \
3480 "std 2,-16(12)\n\t" /* save tocptr */ \
3481 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3482 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3483 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3484 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3485 "ld 12, 0(12)\n\t" /* target->r12 */ \
3486 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3487 "mr 12,%1\n\t" \
3488 "mr %0,3\n\t" \
3489 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3490 VALGRIND_RESTORE_STACK \
3491 : /*out*/ "=r" (_res) \
3492 : /*in*/ "r" (&_argvec[2]) \
3493 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3494 ); \
3495 lval = (__typeof__(lval)) _res; \
3496 } while (0)
3497
3498#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3499 do { \
3500 volatile OrigFn _orig = (orig); \
3501 volatile unsigned long _argvec[3+4]; \
3502 volatile unsigned long _res; \
3503 /* _argvec[0] holds current r2 across the call */ \
3504 _argvec[1] = (unsigned long)_orig.r2; \
3505 _argvec[2] = (unsigned long)_orig.nraddr; \
3506 _argvec[2+1] = (unsigned long)arg1; \
3507 _argvec[2+2] = (unsigned long)arg2; \
3508 _argvec[2+3] = (unsigned long)arg3; \
3509 _argvec[2+4] = (unsigned long)arg4; \
3510 __asm__ volatile( \
3511 VALGRIND_ALIGN_STACK \
3512 "mr 12,%1\n\t" \
3513 "std 2,-16(12)\n\t" /* save tocptr */ \
3514 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3515 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3516 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3517 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3518 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3519 "ld 12, 0(12)\n\t" /* target->r12 */ \
3520 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3521 "mr 12,%1\n\t" \
3522 "mr %0,3\n\t" \
3523 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3524 VALGRIND_RESTORE_STACK \
3525 : /*out*/ "=r" (_res) \
3526 : /*in*/ "r" (&_argvec[2]) \
3527 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3528 ); \
3529 lval = (__typeof__(lval)) _res; \
3530 } while (0)
3531
3532#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3533 do { \
3534 volatile OrigFn _orig = (orig); \
3535 volatile unsigned long _argvec[3+5]; \
3536 volatile unsigned long _res; \
3537 /* _argvec[0] holds current r2 across the call */ \
3538 _argvec[1] = (unsigned long)_orig.r2; \
3539 _argvec[2] = (unsigned long)_orig.nraddr; \
3540 _argvec[2+1] = (unsigned long)arg1; \
3541 _argvec[2+2] = (unsigned long)arg2; \
3542 _argvec[2+3] = (unsigned long)arg3; \
3543 _argvec[2+4] = (unsigned long)arg4; \
3544 _argvec[2+5] = (unsigned long)arg5; \
3545 __asm__ volatile( \
3546 VALGRIND_ALIGN_STACK \
3547 "mr 12,%1\n\t" \
3548 "std 2,-16(12)\n\t" /* save tocptr */ \
3549 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3550 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3551 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3552 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3553 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3554 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3555 "ld 12, 0(12)\n\t" /* target->r12 */ \
3556 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3557 "mr 12,%1\n\t" \
3558 "mr %0,3\n\t" \
3559 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3560 VALGRIND_RESTORE_STACK \
3561 : /*out*/ "=r" (_res) \
3562 : /*in*/ "r" (&_argvec[2]) \
3563 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3564 ); \
3565 lval = (__typeof__(lval)) _res; \
3566 } while (0)
3567
3568#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3569 do { \
3570 volatile OrigFn _orig = (orig); \
3571 volatile unsigned long _argvec[3+6]; \
3572 volatile unsigned long _res; \
3573 /* _argvec[0] holds current r2 across the call */ \
3574 _argvec[1] = (unsigned long)_orig.r2; \
3575 _argvec[2] = (unsigned long)_orig.nraddr; \
3576 _argvec[2+1] = (unsigned long)arg1; \
3577 _argvec[2+2] = (unsigned long)arg2; \
3578 _argvec[2+3] = (unsigned long)arg3; \
3579 _argvec[2+4] = (unsigned long)arg4; \
3580 _argvec[2+5] = (unsigned long)arg5; \
3581 _argvec[2+6] = (unsigned long)arg6; \
3582 __asm__ volatile( \
3583 VALGRIND_ALIGN_STACK \
3584 "mr 12,%1\n\t" \
3585 "std 2,-16(12)\n\t" /* save tocptr */ \
3586 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3587 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3588 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3589 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3590 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3591 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3592 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3593 "ld 12, 0(12)\n\t" /* target->r12 */ \
3594 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3595 "mr 12,%1\n\t" \
3596 "mr %0,3\n\t" \
3597 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3598 VALGRIND_RESTORE_STACK \
3599 : /*out*/ "=r" (_res) \
3600 : /*in*/ "r" (&_argvec[2]) \
3601 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3602 ); \
3603 lval = (__typeof__(lval)) _res; \
3604 } while (0)
3605
3606#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3607 arg7) \
3608 do { \
3609 volatile OrigFn _orig = (orig); \
3610 volatile unsigned long _argvec[3+7]; \
3611 volatile unsigned long _res; \
3612 /* _argvec[0] holds current r2 across the call */ \
3613 _argvec[1] = (unsigned long)_orig.r2; \
3614 _argvec[2] = (unsigned long)_orig.nraddr; \
3615 _argvec[2+1] = (unsigned long)arg1; \
3616 _argvec[2+2] = (unsigned long)arg2; \
3617 _argvec[2+3] = (unsigned long)arg3; \
3618 _argvec[2+4] = (unsigned long)arg4; \
3619 _argvec[2+5] = (unsigned long)arg5; \
3620 _argvec[2+6] = (unsigned long)arg6; \
3621 _argvec[2+7] = (unsigned long)arg7; \
3622 __asm__ volatile( \
3623 VALGRIND_ALIGN_STACK \
3624 "mr 12,%1\n\t" \
3625 "std 2,-16(12)\n\t" /* save tocptr */ \
3626 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3627 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3628 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3629 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3630 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3631 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3632 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3633 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3634 "ld 12, 0(12)\n\t" /* target->r12 */ \
3635 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3636 "mr 12,%1\n\t" \
3637 "mr %0,3\n\t" \
3638 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3639 VALGRIND_RESTORE_STACK \
3640 : /*out*/ "=r" (_res) \
3641 : /*in*/ "r" (&_argvec[2]) \
3642 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3643 ); \
3644 lval = (__typeof__(lval)) _res; \
3645 } while (0)
3646
3647#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3648 arg7,arg8) \
3649 do { \
3650 volatile OrigFn _orig = (orig); \
3651 volatile unsigned long _argvec[3+8]; \
3652 volatile unsigned long _res; \
3653 /* _argvec[0] holds current r2 across the call */ \
3654 _argvec[1] = (unsigned long)_orig.r2; \
3655 _argvec[2] = (unsigned long)_orig.nraddr; \
3656 _argvec[2+1] = (unsigned long)arg1; \
3657 _argvec[2+2] = (unsigned long)arg2; \
3658 _argvec[2+3] = (unsigned long)arg3; \
3659 _argvec[2+4] = (unsigned long)arg4; \
3660 _argvec[2+5] = (unsigned long)arg5; \
3661 _argvec[2+6] = (unsigned long)arg6; \
3662 _argvec[2+7] = (unsigned long)arg7; \
3663 _argvec[2+8] = (unsigned long)arg8; \
3664 __asm__ volatile( \
3665 VALGRIND_ALIGN_STACK \
3666 "mr 12,%1\n\t" \
3667 "std 2,-16(12)\n\t" /* save tocptr */ \
3668 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3669 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3670 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3671 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3672 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3673 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3674 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3675 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3676 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3677 "ld 12, 0(12)\n\t" /* target->r12 */ \
3678 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3679 "mr 12,%1\n\t" \
3680 "mr %0,3\n\t" \
3681 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3682 VALGRIND_RESTORE_STACK \
3683 : /*out*/ "=r" (_res) \
3684 : /*in*/ "r" (&_argvec[2]) \
3685 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3686 ); \
3687 lval = (__typeof__(lval)) _res; \
3688 } while (0)
3689
3690#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3691 arg7,arg8,arg9) \
3692 do { \
3693 volatile OrigFn _orig = (orig); \
3694 volatile unsigned long _argvec[3+9]; \
3695 volatile unsigned long _res; \
3696 /* _argvec[0] holds current r2 across the call */ \
3697 _argvec[1] = (unsigned long)_orig.r2; \
3698 _argvec[2] = (unsigned long)_orig.nraddr; \
3699 _argvec[2+1] = (unsigned long)arg1; \
3700 _argvec[2+2] = (unsigned long)arg2; \
3701 _argvec[2+3] = (unsigned long)arg3; \
3702 _argvec[2+4] = (unsigned long)arg4; \
3703 _argvec[2+5] = (unsigned long)arg5; \
3704 _argvec[2+6] = (unsigned long)arg6; \
3705 _argvec[2+7] = (unsigned long)arg7; \
3706 _argvec[2+8] = (unsigned long)arg8; \
3707 _argvec[2+9] = (unsigned long)arg9; \
3708 __asm__ volatile( \
3709 VALGRIND_ALIGN_STACK \
3710 "mr 12,%1\n\t" \
3711 "std 2,-16(12)\n\t" /* save tocptr */ \
3712 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3713 "addi 1,1,-128\n\t" /* expand stack frame */ \
3714 /* arg9 */ \
3715 "ld 3,72(12)\n\t" \
3716 "std 3,96(1)\n\t" \
3717 /* args1-8 */ \
3718 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3719 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3720 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3721 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3722 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3723 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3724 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3725 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3726 "ld 12, 0(12)\n\t" /* target->r12 */ \
3727 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3728 "mr 12,%1\n\t" \
3729 "mr %0,3\n\t" \
3730 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3731 VALGRIND_RESTORE_STACK \
3732 : /*out*/ "=r" (_res) \
3733 : /*in*/ "r" (&_argvec[2]) \
3734 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3735 ); \
3736 lval = (__typeof__(lval)) _res; \
3737 } while (0)
3738
3739#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3740 arg7,arg8,arg9,arg10) \
3741 do { \
3742 volatile OrigFn _orig = (orig); \
3743 volatile unsigned long _argvec[3+10]; \
3744 volatile unsigned long _res; \
3745 /* _argvec[0] holds current r2 across the call */ \
3746 _argvec[1] = (unsigned long)_orig.r2; \
3747 _argvec[2] = (unsigned long)_orig.nraddr; \
3748 _argvec[2+1] = (unsigned long)arg1; \
3749 _argvec[2+2] = (unsigned long)arg2; \
3750 _argvec[2+3] = (unsigned long)arg3; \
3751 _argvec[2+4] = (unsigned long)arg4; \
3752 _argvec[2+5] = (unsigned long)arg5; \
3753 _argvec[2+6] = (unsigned long)arg6; \
3754 _argvec[2+7] = (unsigned long)arg7; \
3755 _argvec[2+8] = (unsigned long)arg8; \
3756 _argvec[2+9] = (unsigned long)arg9; \
3757 _argvec[2+10] = (unsigned long)arg10; \
3758 __asm__ volatile( \
3759 VALGRIND_ALIGN_STACK \
3760 "mr 12,%1\n\t" \
3761 "std 2,-16(12)\n\t" /* save tocptr */ \
3762 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3763 "addi 1,1,-128\n\t" /* expand stack frame */ \
3764 /* arg10 */ \
3765 "ld 3,80(12)\n\t" \
3766 "std 3,104(1)\n\t" \
3767 /* arg9 */ \
3768 "ld 3,72(12)\n\t" \
3769 "std 3,96(1)\n\t" \
3770 /* args1-8 */ \
3771 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3772 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3773 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3774 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3775 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3776 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3777 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3778 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3779 "ld 12, 0(12)\n\t" /* target->r12 */ \
3780 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3781 "mr 12,%1\n\t" \
3782 "mr %0,3\n\t" \
3783 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3784 VALGRIND_RESTORE_STACK \
3785 : /*out*/ "=r" (_res) \
3786 : /*in*/ "r" (&_argvec[2]) \
3787 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3788 ); \
3789 lval = (__typeof__(lval)) _res; \
3790 } while (0)
3791
3792#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3793 arg7,arg8,arg9,arg10,arg11) \
3794 do { \
3795 volatile OrigFn _orig = (orig); \
3796 volatile unsigned long _argvec[3+11]; \
3797 volatile unsigned long _res; \
3798 /* _argvec[0] holds current r2 across the call */ \
3799 _argvec[1] = (unsigned long)_orig.r2; \
3800 _argvec[2] = (unsigned long)_orig.nraddr; \
3801 _argvec[2+1] = (unsigned long)arg1; \
3802 _argvec[2+2] = (unsigned long)arg2; \
3803 _argvec[2+3] = (unsigned long)arg3; \
3804 _argvec[2+4] = (unsigned long)arg4; \
3805 _argvec[2+5] = (unsigned long)arg5; \
3806 _argvec[2+6] = (unsigned long)arg6; \
3807 _argvec[2+7] = (unsigned long)arg7; \
3808 _argvec[2+8] = (unsigned long)arg8; \
3809 _argvec[2+9] = (unsigned long)arg9; \
3810 _argvec[2+10] = (unsigned long)arg10; \
3811 _argvec[2+11] = (unsigned long)arg11; \
3812 __asm__ volatile( \
3813 VALGRIND_ALIGN_STACK \
3814 "mr 12,%1\n\t" \
3815 "std 2,-16(12)\n\t" /* save tocptr */ \
3816 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3817 "addi 1,1,-144\n\t" /* expand stack frame */ \
3818 /* arg11 */ \
3819 "ld 3,88(12)\n\t" \
3820 "std 3,112(1)\n\t" \
3821 /* arg10 */ \
3822 "ld 3,80(12)\n\t" \
3823 "std 3,104(1)\n\t" \
3824 /* arg9 */ \
3825 "ld 3,72(12)\n\t" \
3826 "std 3,96(1)\n\t" \
3827 /* args1-8 */ \
3828 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3829 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3830 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3831 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3832 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3833 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3834 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3835 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3836 "ld 12, 0(12)\n\t" /* target->r12 */ \
3837 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3838 "mr 12,%1\n\t" \
3839 "mr %0,3\n\t" \
3840 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3841 VALGRIND_RESTORE_STACK \
3842 : /*out*/ "=r" (_res) \
3843 : /*in*/ "r" (&_argvec[2]) \
3844 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3845 ); \
3846 lval = (__typeof__(lval)) _res; \
3847 } while (0)
3848
3849#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3850 arg7,arg8,arg9,arg10,arg11,arg12) \
3851 do { \
3852 volatile OrigFn _orig = (orig); \
3853 volatile unsigned long _argvec[3+12]; \
3854 volatile unsigned long _res; \
3855 /* _argvec[0] holds current r2 across the call */ \
3856 _argvec[1] = (unsigned long)_orig.r2; \
3857 _argvec[2] = (unsigned long)_orig.nraddr; \
3858 _argvec[2+1] = (unsigned long)arg1; \
3859 _argvec[2+2] = (unsigned long)arg2; \
3860 _argvec[2+3] = (unsigned long)arg3; \
3861 _argvec[2+4] = (unsigned long)arg4; \
3862 _argvec[2+5] = (unsigned long)arg5; \
3863 _argvec[2+6] = (unsigned long)arg6; \
3864 _argvec[2+7] = (unsigned long)arg7; \
3865 _argvec[2+8] = (unsigned long)arg8; \
3866 _argvec[2+9] = (unsigned long)arg9; \
3867 _argvec[2+10] = (unsigned long)arg10; \
3868 _argvec[2+11] = (unsigned long)arg11; \
3869 _argvec[2+12] = (unsigned long)arg12; \
3870 __asm__ volatile( \
3871 VALGRIND_ALIGN_STACK \
3872 "mr 12,%1\n\t" \
3873 "std 2,-16(12)\n\t" /* save tocptr */ \
3874 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3875 "addi 1,1,-144\n\t" /* expand stack frame */ \
3876 /* arg12 */ \
3877 "ld 3,96(12)\n\t" \
3878 "std 3,120(1)\n\t" \
3879 /* arg11 */ \
3880 "ld 3,88(12)\n\t" \
3881 "std 3,112(1)\n\t" \
3882 /* arg10 */ \
3883 "ld 3,80(12)\n\t" \
3884 "std 3,104(1)\n\t" \
3885 /* arg9 */ \
3886 "ld 3,72(12)\n\t" \
3887 "std 3,96(1)\n\t" \
3888 /* args1-8 */ \
3889 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3890 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3891 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3892 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3893 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3894 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3895 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3896 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3897 "ld 12, 0(12)\n\t" /* target->r12 */ \
3898 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3899 "mr 12,%1\n\t" \
3900 "mr %0,3\n\t" \
3901 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3902 VALGRIND_RESTORE_STACK \
3903 : /*out*/ "=r" (_res) \
3904 : /*in*/ "r" (&_argvec[2]) \
3905 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3906 ); \
3907 lval = (__typeof__(lval)) _res; \
3908 } while (0)
3909
3910#endif /* PLAT_ppc64le_linux */
3911
3912/* ------------------------- arm-linux ------------------------- */
3913
3914#if defined(PLAT_arm_linux)
3915
3916/* These regs are trashed by the hidden call. */
3917#define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4", "r12", "r14"
3918
3919/* Macros to save and align the stack before making a function
3920 call and restore it afterwards as gcc may not keep the stack
3921 pointer aligned if it doesn't realise calls are being made
3922 to other functions. */
3923
3924/* This is a bit tricky. We store the original stack pointer in r10
3925 as it is callee-saves. gcc doesn't allow the use of r11 for some
3926 reason. Also, we can't directly "bic" the stack pointer in thumb
3927 mode since r13 isn't an allowed register number in that context.
3928 So use r4 as a temporary, since that is about to get trashed
3929 anyway, just after each use of this macro. Side effect is we need
3930 to be very careful about any future changes, since
3931 VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
3932#define VALGRIND_ALIGN_STACK \
3933 "mov r10, sp\n\t" \
3934 "mov r4, sp\n\t" \
3935 "bic r4, r4, #7\n\t" \
3936 "mov sp, r4\n\t"
3937#define VALGRIND_RESTORE_STACK \
3938 "mov sp, r10\n\t"
3939
3940/* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
3941 long) == 4. */
3942
3943#define CALL_FN_W_v(lval, orig) \
3944 do { \
3945 volatile OrigFn _orig = (orig); \
3946 volatile unsigned long _argvec[1]; \
3947 volatile unsigned long _res; \
3948 _argvec[0] = (unsigned long)_orig.nraddr; \
3949 __asm__ volatile( \
3950 VALGRIND_ALIGN_STACK \
3951 "ldr r4, [%1] \n\t" /* target->r4 */ \
3952 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3953 VALGRIND_RESTORE_STACK \
3954 "mov %0, r0\n" \
3955 : /*out*/ "=r" (_res) \
3956 : /*in*/ "0" (&_argvec[0]) \
3957 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3958 ); \
3959 lval = (__typeof__(lval)) _res; \
3960 } while (0)
3961
3962#define CALL_FN_W_W(lval, orig, arg1) \
3963 do { \
3964 volatile OrigFn _orig = (orig); \
3965 volatile unsigned long _argvec[2]; \
3966 volatile unsigned long _res; \
3967 _argvec[0] = (unsigned long)_orig.nraddr; \
3968 _argvec[1] = (unsigned long)(arg1); \
3969 __asm__ volatile( \
3970 VALGRIND_ALIGN_STACK \
3971 "ldr r0, [%1, #4] \n\t" \
3972 "ldr r4, [%1] \n\t" /* target->r4 */ \
3973 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3974 VALGRIND_RESTORE_STACK \
3975 "mov %0, r0\n" \
3976 : /*out*/ "=r" (_res) \
3977 : /*in*/ "0" (&_argvec[0]) \
3978 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3979 ); \
3980 lval = (__typeof__(lval)) _res; \
3981 } while (0)
3982
3983#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3984 do { \
3985 volatile OrigFn _orig = (orig); \
3986 volatile unsigned long _argvec[3]; \
3987 volatile unsigned long _res; \
3988 _argvec[0] = (unsigned long)_orig.nraddr; \
3989 _argvec[1] = (unsigned long)(arg1); \
3990 _argvec[2] = (unsigned long)(arg2); \
3991 __asm__ volatile( \
3992 VALGRIND_ALIGN_STACK \
3993 "ldr r0, [%1, #4] \n\t" \
3994 "ldr r1, [%1, #8] \n\t" \
3995 "ldr r4, [%1] \n\t" /* target->r4 */ \
3996 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3997 VALGRIND_RESTORE_STACK \
3998 "mov %0, r0\n" \
3999 : /*out*/ "=r" (_res) \
4000 : /*in*/ "0" (&_argvec[0]) \
4001 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4002 ); \
4003 lval = (__typeof__(lval)) _res; \
4004 } while (0)
4005
4006#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4007 do { \
4008 volatile OrigFn _orig = (orig); \
4009 volatile unsigned long _argvec[4]; \
4010 volatile unsigned long _res; \
4011 _argvec[0] = (unsigned long)_orig.nraddr; \
4012 _argvec[1] = (unsigned long)(arg1); \
4013 _argvec[2] = (unsigned long)(arg2); \
4014 _argvec[3] = (unsigned long)(arg3); \
4015 __asm__ volatile( \
4016 VALGRIND_ALIGN_STACK \
4017 "ldr r0, [%1, #4] \n\t" \
4018 "ldr r1, [%1, #8] \n\t" \
4019 "ldr r2, [%1, #12] \n\t" \
4020 "ldr r4, [%1] \n\t" /* target->r4 */ \
4021 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4022 VALGRIND_RESTORE_STACK \
4023 "mov %0, r0\n" \
4024 : /*out*/ "=r" (_res) \
4025 : /*in*/ "0" (&_argvec[0]) \
4026 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4027 ); \
4028 lval = (__typeof__(lval)) _res; \
4029 } while (0)
4030
4031#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4032 do { \
4033 volatile OrigFn _orig = (orig); \
4034 volatile unsigned long _argvec[5]; \
4035 volatile unsigned long _res; \
4036 _argvec[0] = (unsigned long)_orig.nraddr; \
4037 _argvec[1] = (unsigned long)(arg1); \
4038 _argvec[2] = (unsigned long)(arg2); \
4039 _argvec[3] = (unsigned long)(arg3); \
4040 _argvec[4] = (unsigned long)(arg4); \
4041 __asm__ volatile( \
4042 VALGRIND_ALIGN_STACK \
4043 "ldr r0, [%1, #4] \n\t" \
4044 "ldr r1, [%1, #8] \n\t" \
4045 "ldr r2, [%1, #12] \n\t" \
4046 "ldr r3, [%1, #16] \n\t" \
4047 "ldr r4, [%1] \n\t" /* target->r4 */ \
4048 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4049 VALGRIND_RESTORE_STACK \
4050 "mov %0, r0" \
4051 : /*out*/ "=r" (_res) \
4052 : /*in*/ "0" (&_argvec[0]) \
4053 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4054 ); \
4055 lval = (__typeof__(lval)) _res; \
4056 } while (0)
4057
4058#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4059 do { \
4060 volatile OrigFn _orig = (orig); \
4061 volatile unsigned long _argvec[6]; \
4062 volatile unsigned long _res; \
4063 _argvec[0] = (unsigned long)_orig.nraddr; \
4064 _argvec[1] = (unsigned long)(arg1); \
4065 _argvec[2] = (unsigned long)(arg2); \
4066 _argvec[3] = (unsigned long)(arg3); \
4067 _argvec[4] = (unsigned long)(arg4); \
4068 _argvec[5] = (unsigned long)(arg5); \
4069 __asm__ volatile( \
4070 VALGRIND_ALIGN_STACK \
4071 "sub sp, sp, #4 \n\t" \
4072 "ldr r0, [%1, #20] \n\t" \
4073 "push {r0} \n\t" \
4074 "ldr r0, [%1, #4] \n\t" \
4075 "ldr r1, [%1, #8] \n\t" \
4076 "ldr r2, [%1, #12] \n\t" \
4077 "ldr r3, [%1, #16] \n\t" \
4078 "ldr r4, [%1] \n\t" /* target->r4 */ \
4079 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4080 VALGRIND_RESTORE_STACK \
4081 "mov %0, r0" \
4082 : /*out*/ "=r" (_res) \
4083 : /*in*/ "0" (&_argvec[0]) \
4084 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4085 ); \
4086 lval = (__typeof__(lval)) _res; \
4087 } while (0)
4088
4089#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4090 do { \
4091 volatile OrigFn _orig = (orig); \
4092 volatile unsigned long _argvec[7]; \
4093 volatile unsigned long _res; \
4094 _argvec[0] = (unsigned long)_orig.nraddr; \
4095 _argvec[1] = (unsigned long)(arg1); \
4096 _argvec[2] = (unsigned long)(arg2); \
4097 _argvec[3] = (unsigned long)(arg3); \
4098 _argvec[4] = (unsigned long)(arg4); \
4099 _argvec[5] = (unsigned long)(arg5); \
4100 _argvec[6] = (unsigned long)(arg6); \
4101 __asm__ volatile( \
4102 VALGRIND_ALIGN_STACK \
4103 "ldr r0, [%1, #20] \n\t" \
4104 "ldr r1, [%1, #24] \n\t" \
4105 "push {r0, r1} \n\t" \
4106 "ldr r0, [%1, #4] \n\t" \
4107 "ldr r1, [%1, #8] \n\t" \
4108 "ldr r2, [%1, #12] \n\t" \
4109 "ldr r3, [%1, #16] \n\t" \
4110 "ldr r4, [%1] \n\t" /* target->r4 */ \
4111 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4112 VALGRIND_RESTORE_STACK \
4113 "mov %0, r0" \
4114 : /*out*/ "=r" (_res) \
4115 : /*in*/ "0" (&_argvec[0]) \
4116 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4117 ); \
4118 lval = (__typeof__(lval)) _res; \
4119 } while (0)
4120
4121#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4122 arg7) \
4123 do { \
4124 volatile OrigFn _orig = (orig); \
4125 volatile unsigned long _argvec[8]; \
4126 volatile unsigned long _res; \
4127 _argvec[0] = (unsigned long)_orig.nraddr; \
4128 _argvec[1] = (unsigned long)(arg1); \
4129 _argvec[2] = (unsigned long)(arg2); \
4130 _argvec[3] = (unsigned long)(arg3); \
4131 _argvec[4] = (unsigned long)(arg4); \
4132 _argvec[5] = (unsigned long)(arg5); \
4133 _argvec[6] = (unsigned long)(arg6); \
4134 _argvec[7] = (unsigned long)(arg7); \
4135 __asm__ volatile( \
4136 VALGRIND_ALIGN_STACK \
4137 "sub sp, sp, #4 \n\t" \
4138 "ldr r0, [%1, #20] \n\t" \
4139 "ldr r1, [%1, #24] \n\t" \
4140 "ldr r2, [%1, #28] \n\t" \
4141 "push {r0, r1, r2} \n\t" \
4142 "ldr r0, [%1, #4] \n\t" \
4143 "ldr r1, [%1, #8] \n\t" \
4144 "ldr r2, [%1, #12] \n\t" \
4145 "ldr r3, [%1, #16] \n\t" \
4146 "ldr r4, [%1] \n\t" /* target->r4 */ \
4147 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4148 VALGRIND_RESTORE_STACK \
4149 "mov %0, r0" \
4150 : /*out*/ "=r" (_res) \
4151 : /*in*/ "0" (&_argvec[0]) \
4152 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4153 ); \
4154 lval = (__typeof__(lval)) _res; \
4155 } while (0)
4156
4157#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4158 arg7,arg8) \
4159 do { \
4160 volatile OrigFn _orig = (orig); \
4161 volatile unsigned long _argvec[9]; \
4162 volatile unsigned long _res; \
4163 _argvec[0] = (unsigned long)_orig.nraddr; \
4164 _argvec[1] = (unsigned long)(arg1); \
4165 _argvec[2] = (unsigned long)(arg2); \
4166 _argvec[3] = (unsigned long)(arg3); \
4167 _argvec[4] = (unsigned long)(arg4); \
4168 _argvec[5] = (unsigned long)(arg5); \
4169 _argvec[6] = (unsigned long)(arg6); \
4170 _argvec[7] = (unsigned long)(arg7); \
4171 _argvec[8] = (unsigned long)(arg8); \
4172 __asm__ volatile( \
4173 VALGRIND_ALIGN_STACK \
4174 "ldr r0, [%1, #20] \n\t" \
4175 "ldr r1, [%1, #24] \n\t" \
4176 "ldr r2, [%1, #28] \n\t" \
4177 "ldr r3, [%1, #32] \n\t" \
4178 "push {r0, r1, r2, r3} \n\t" \
4179 "ldr r0, [%1, #4] \n\t" \
4180 "ldr r1, [%1, #8] \n\t" \
4181 "ldr r2, [%1, #12] \n\t" \
4182 "ldr r3, [%1, #16] \n\t" \
4183 "ldr r4, [%1] \n\t" /* target->r4 */ \
4184 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4185 VALGRIND_RESTORE_STACK \
4186 "mov %0, r0" \
4187 : /*out*/ "=r" (_res) \
4188 : /*in*/ "0" (&_argvec[0]) \
4189 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4190 ); \
4191 lval = (__typeof__(lval)) _res; \
4192 } while (0)
4193
4194#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4195 arg7,arg8,arg9) \
4196 do { \
4197 volatile OrigFn _orig = (orig); \
4198 volatile unsigned long _argvec[10]; \
4199 volatile unsigned long _res; \
4200 _argvec[0] = (unsigned long)_orig.nraddr; \
4201 _argvec[1] = (unsigned long)(arg1); \
4202 _argvec[2] = (unsigned long)(arg2); \
4203 _argvec[3] = (unsigned long)(arg3); \
4204 _argvec[4] = (unsigned long)(arg4); \
4205 _argvec[5] = (unsigned long)(arg5); \
4206 _argvec[6] = (unsigned long)(arg6); \
4207 _argvec[7] = (unsigned long)(arg7); \
4208 _argvec[8] = (unsigned long)(arg8); \
4209 _argvec[9] = (unsigned long)(arg9); \
4210 __asm__ volatile( \
4211 VALGRIND_ALIGN_STACK \
4212 "sub sp, sp, #4 \n\t" \
4213 "ldr r0, [%1, #20] \n\t" \
4214 "ldr r1, [%1, #24] \n\t" \
4215 "ldr r2, [%1, #28] \n\t" \
4216 "ldr r3, [%1, #32] \n\t" \
4217 "ldr r4, [%1, #36] \n\t" \
4218 "push {r0, r1, r2, r3, r4} \n\t" \
4219 "ldr r0, [%1, #4] \n\t" \
4220 "ldr r1, [%1, #8] \n\t" \
4221 "ldr r2, [%1, #12] \n\t" \
4222 "ldr r3, [%1, #16] \n\t" \
4223 "ldr r4, [%1] \n\t" /* target->r4 */ \
4224 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4225 VALGRIND_RESTORE_STACK \
4226 "mov %0, r0" \
4227 : /*out*/ "=r" (_res) \
4228 : /*in*/ "0" (&_argvec[0]) \
4229 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4230 ); \
4231 lval = (__typeof__(lval)) _res; \
4232 } while (0)
4233
4234#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4235 arg7,arg8,arg9,arg10) \
4236 do { \
4237 volatile OrigFn _orig = (orig); \
4238 volatile unsigned long _argvec[11]; \
4239 volatile unsigned long _res; \
4240 _argvec[0] = (unsigned long)_orig.nraddr; \
4241 _argvec[1] = (unsigned long)(arg1); \
4242 _argvec[2] = (unsigned long)(arg2); \
4243 _argvec[3] = (unsigned long)(arg3); \
4244 _argvec[4] = (unsigned long)(arg4); \
4245 _argvec[5] = (unsigned long)(arg5); \
4246 _argvec[6] = (unsigned long)(arg6); \
4247 _argvec[7] = (unsigned long)(arg7); \
4248 _argvec[8] = (unsigned long)(arg8); \
4249 _argvec[9] = (unsigned long)(arg9); \
4250 _argvec[10] = (unsigned long)(arg10); \
4251 __asm__ volatile( \
4252 VALGRIND_ALIGN_STACK \
4253 "ldr r0, [%1, #40] \n\t" \
4254 "push {r0} \n\t" \
4255 "ldr r0, [%1, #20] \n\t" \
4256 "ldr r1, [%1, #24] \n\t" \
4257 "ldr r2, [%1, #28] \n\t" \
4258 "ldr r3, [%1, #32] \n\t" \
4259 "ldr r4, [%1, #36] \n\t" \
4260 "push {r0, r1, r2, r3, r4} \n\t" \
4261 "ldr r0, [%1, #4] \n\t" \
4262 "ldr r1, [%1, #8] \n\t" \
4263 "ldr r2, [%1, #12] \n\t" \
4264 "ldr r3, [%1, #16] \n\t" \
4265 "ldr r4, [%1] \n\t" /* target->r4 */ \
4266 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4267 VALGRIND_RESTORE_STACK \
4268 "mov %0, r0" \
4269 : /*out*/ "=r" (_res) \
4270 : /*in*/ "0" (&_argvec[0]) \
4271 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4272 ); \
4273 lval = (__typeof__(lval)) _res; \
4274 } while (0)
4275
4276#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4277 arg6,arg7,arg8,arg9,arg10, \
4278 arg11) \
4279 do { \
4280 volatile OrigFn _orig = (orig); \
4281 volatile unsigned long _argvec[12]; \
4282 volatile unsigned long _res; \
4283 _argvec[0] = (unsigned long)_orig.nraddr; \
4284 _argvec[1] = (unsigned long)(arg1); \
4285 _argvec[2] = (unsigned long)(arg2); \
4286 _argvec[3] = (unsigned long)(arg3); \
4287 _argvec[4] = (unsigned long)(arg4); \
4288 _argvec[5] = (unsigned long)(arg5); \
4289 _argvec[6] = (unsigned long)(arg6); \
4290 _argvec[7] = (unsigned long)(arg7); \
4291 _argvec[8] = (unsigned long)(arg8); \
4292 _argvec[9] = (unsigned long)(arg9); \
4293 _argvec[10] = (unsigned long)(arg10); \
4294 _argvec[11] = (unsigned long)(arg11); \
4295 __asm__ volatile( \
4296 VALGRIND_ALIGN_STACK \
4297 "sub sp, sp, #4 \n\t" \
4298 "ldr r0, [%1, #40] \n\t" \
4299 "ldr r1, [%1, #44] \n\t" \
4300 "push {r0, r1} \n\t" \
4301 "ldr r0, [%1, #20] \n\t" \
4302 "ldr r1, [%1, #24] \n\t" \
4303 "ldr r2, [%1, #28] \n\t" \
4304 "ldr r3, [%1, #32] \n\t" \
4305 "ldr r4, [%1, #36] \n\t" \
4306 "push {r0, r1, r2, r3, r4} \n\t" \
4307 "ldr r0, [%1, #4] \n\t" \
4308 "ldr r1, [%1, #8] \n\t" \
4309 "ldr r2, [%1, #12] \n\t" \
4310 "ldr r3, [%1, #16] \n\t" \
4311 "ldr r4, [%1] \n\t" /* target->r4 */ \
4312 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4313 VALGRIND_RESTORE_STACK \
4314 "mov %0, r0" \
4315 : /*out*/ "=r" (_res) \
4316 : /*in*/ "0" (&_argvec[0]) \
4317 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4318 ); \
4319 lval = (__typeof__(lval)) _res; \
4320 } while (0)
4321
4322#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4323 arg6,arg7,arg8,arg9,arg10, \
4324 arg11,arg12) \
4325 do { \
4326 volatile OrigFn _orig = (orig); \
4327 volatile unsigned long _argvec[13]; \
4328 volatile unsigned long _res; \
4329 _argvec[0] = (unsigned long)_orig.nraddr; \
4330 _argvec[1] = (unsigned long)(arg1); \
4331 _argvec[2] = (unsigned long)(arg2); \
4332 _argvec[3] = (unsigned long)(arg3); \
4333 _argvec[4] = (unsigned long)(arg4); \
4334 _argvec[5] = (unsigned long)(arg5); \
4335 _argvec[6] = (unsigned long)(arg6); \
4336 _argvec[7] = (unsigned long)(arg7); \
4337 _argvec[8] = (unsigned long)(arg8); \
4338 _argvec[9] = (unsigned long)(arg9); \
4339 _argvec[10] = (unsigned long)(arg10); \
4340 _argvec[11] = (unsigned long)(arg11); \
4341 _argvec[12] = (unsigned long)(arg12); \
4342 __asm__ volatile( \
4343 VALGRIND_ALIGN_STACK \
4344 "ldr r0, [%1, #40] \n\t" \
4345 "ldr r1, [%1, #44] \n\t" \
4346 "ldr r2, [%1, #48] \n\t" \
4347 "push {r0, r1, r2} \n\t" \
4348 "ldr r0, [%1, #20] \n\t" \
4349 "ldr r1, [%1, #24] \n\t" \
4350 "ldr r2, [%1, #28] \n\t" \
4351 "ldr r3, [%1, #32] \n\t" \
4352 "ldr r4, [%1, #36] \n\t" \
4353 "push {r0, r1, r2, r3, r4} \n\t" \
4354 "ldr r0, [%1, #4] \n\t" \
4355 "ldr r1, [%1, #8] \n\t" \
4356 "ldr r2, [%1, #12] \n\t" \
4357 "ldr r3, [%1, #16] \n\t" \
4358 "ldr r4, [%1] \n\t" /* target->r4 */ \
4359 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4360 VALGRIND_RESTORE_STACK \
4361 "mov %0, r0" \
4362 : /*out*/ "=r" (_res) \
4363 : /*in*/ "0" (&_argvec[0]) \
4364 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4365 ); \
4366 lval = (__typeof__(lval)) _res; \
4367 } while (0)
4368
4369#endif /* PLAT_arm_linux */
4370
4371/* ------------------------ arm64-linux ------------------------ */
4372
4373#if defined(PLAT_arm64_linux) || defined(PLAT_arm64_freebsd)
4374
4375/* These regs are trashed by the hidden call. */
4376#define __CALLER_SAVED_REGS \
4377 "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
4378 "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
4379 "x18", "x19", "x20", "x30", \
4380 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
4381 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
4382 "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
4383 "v26", "v27", "v28", "v29", "v30", "v31"
4384
4385/* x21 is callee-saved, so we can use it to save and restore SP around
4386 the hidden call. */
4387#define VALGRIND_ALIGN_STACK \
4388 "mov x21, sp\n\t" \
4389 "bic sp, x21, #15\n\t"
4390#define VALGRIND_RESTORE_STACK \
4391 "mov sp, x21\n\t"
4392
4393/* These CALL_FN_ macros assume that on arm64-linux,
4394 sizeof(unsigned long) == 8. */
4395
4396#define CALL_FN_W_v(lval, orig) \
4397 do { \
4398 volatile OrigFn _orig = (orig); \
4399 volatile unsigned long _argvec[1]; \
4400 volatile unsigned long _res; \
4401 _argvec[0] = (unsigned long)_orig.nraddr; \
4402 __asm__ volatile( \
4403 VALGRIND_ALIGN_STACK \
4404 "ldr x8, [%1] \n\t" /* target->x8 */ \
4405 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4406 VALGRIND_RESTORE_STACK \
4407 "mov %0, x0\n" \
4408 : /*out*/ "=r" (_res) \
4409 : /*in*/ "0" (&_argvec[0]) \
4410 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4411 ); \
4412 lval = (__typeof__(lval)) _res; \
4413 } while (0)
4414
4415#define CALL_FN_W_W(lval, orig, arg1) \
4416 do { \
4417 volatile OrigFn _orig = (orig); \
4418 volatile unsigned long _argvec[2]; \
4419 volatile unsigned long _res; \
4420 _argvec[0] = (unsigned long)_orig.nraddr; \
4421 _argvec[1] = (unsigned long)(arg1); \
4422 __asm__ volatile( \
4423 VALGRIND_ALIGN_STACK \
4424 "ldr x0, [%1, #8] \n\t" \
4425 "ldr x8, [%1] \n\t" /* target->x8 */ \
4426 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4427 VALGRIND_RESTORE_STACK \
4428 "mov %0, x0\n" \
4429 : /*out*/ "=r" (_res) \
4430 : /*in*/ "0" (&_argvec[0]) \
4431 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4432 ); \
4433 lval = (__typeof__(lval)) _res; \
4434 } while (0)
4435
4436#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4437 do { \
4438 volatile OrigFn _orig = (orig); \
4439 volatile unsigned long _argvec[3]; \
4440 volatile unsigned long _res; \
4441 _argvec[0] = (unsigned long)_orig.nraddr; \
4442 _argvec[1] = (unsigned long)(arg1); \
4443 _argvec[2] = (unsigned long)(arg2); \
4444 __asm__ volatile( \
4445 VALGRIND_ALIGN_STACK \
4446 "ldr x0, [%1, #8] \n\t" \
4447 "ldr x1, [%1, #16] \n\t" \
4448 "ldr x8, [%1] \n\t" /* target->x8 */ \
4449 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4450 VALGRIND_RESTORE_STACK \
4451 "mov %0, x0\n" \
4452 : /*out*/ "=r" (_res) \
4453 : /*in*/ "0" (&_argvec[0]) \
4454 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4455 ); \
4456 lval = (__typeof__(lval)) _res; \
4457 } while (0)
4458
4459#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4460 do { \
4461 volatile OrigFn _orig = (orig); \
4462 volatile unsigned long _argvec[4]; \
4463 volatile unsigned long _res; \
4464 _argvec[0] = (unsigned long)_orig.nraddr; \
4465 _argvec[1] = (unsigned long)(arg1); \
4466 _argvec[2] = (unsigned long)(arg2); \
4467 _argvec[3] = (unsigned long)(arg3); \
4468 __asm__ volatile( \
4469 VALGRIND_ALIGN_STACK \
4470 "ldr x0, [%1, #8] \n\t" \
4471 "ldr x1, [%1, #16] \n\t" \
4472 "ldr x2, [%1, #24] \n\t" \
4473 "ldr x8, [%1] \n\t" /* target->x8 */ \
4474 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4475 VALGRIND_RESTORE_STACK \
4476 "mov %0, x0\n" \
4477 : /*out*/ "=r" (_res) \
4478 : /*in*/ "0" (&_argvec[0]) \
4479 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4480 ); \
4481 lval = (__typeof__(lval)) _res; \
4482 } while (0)
4483
4484#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4485 do { \
4486 volatile OrigFn _orig = (orig); \
4487 volatile unsigned long _argvec[5]; \
4488 volatile unsigned long _res; \
4489 _argvec[0] = (unsigned long)_orig.nraddr; \
4490 _argvec[1] = (unsigned long)(arg1); \
4491 _argvec[2] = (unsigned long)(arg2); \
4492 _argvec[3] = (unsigned long)(arg3); \
4493 _argvec[4] = (unsigned long)(arg4); \
4494 __asm__ volatile( \
4495 VALGRIND_ALIGN_STACK \
4496 "ldr x0, [%1, #8] \n\t" \
4497 "ldr x1, [%1, #16] \n\t" \
4498 "ldr x2, [%1, #24] \n\t" \
4499 "ldr x3, [%1, #32] \n\t" \
4500 "ldr x8, [%1] \n\t" /* target->x8 */ \
4501 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4502 VALGRIND_RESTORE_STACK \
4503 "mov %0, x0" \
4504 : /*out*/ "=r" (_res) \
4505 : /*in*/ "0" (&_argvec[0]) \
4506 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4507 ); \
4508 lval = (__typeof__(lval)) _res; \
4509 } while (0)
4510
4511#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4512 do { \
4513 volatile OrigFn _orig = (orig); \
4514 volatile unsigned long _argvec[6]; \
4515 volatile unsigned long _res; \
4516 _argvec[0] = (unsigned long)_orig.nraddr; \
4517 _argvec[1] = (unsigned long)(arg1); \
4518 _argvec[2] = (unsigned long)(arg2); \
4519 _argvec[3] = (unsigned long)(arg3); \
4520 _argvec[4] = (unsigned long)(arg4); \
4521 _argvec[5] = (unsigned long)(arg5); \
4522 __asm__ volatile( \
4523 VALGRIND_ALIGN_STACK \
4524 "ldr x0, [%1, #8] \n\t" \
4525 "ldr x1, [%1, #16] \n\t" \
4526 "ldr x2, [%1, #24] \n\t" \
4527 "ldr x3, [%1, #32] \n\t" \
4528 "ldr x4, [%1, #40] \n\t" \
4529 "ldr x8, [%1] \n\t" /* target->x8 */ \
4530 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4531 VALGRIND_RESTORE_STACK \
4532 "mov %0, x0" \
4533 : /*out*/ "=r" (_res) \
4534 : /*in*/ "0" (&_argvec[0]) \
4535 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4536 ); \
4537 lval = (__typeof__(lval)) _res; \
4538 } while (0)
4539
4540#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4541 do { \
4542 volatile OrigFn _orig = (orig); \
4543 volatile unsigned long _argvec[7]; \
4544 volatile unsigned long _res; \
4545 _argvec[0] = (unsigned long)_orig.nraddr; \
4546 _argvec[1] = (unsigned long)(arg1); \
4547 _argvec[2] = (unsigned long)(arg2); \
4548 _argvec[3] = (unsigned long)(arg3); \
4549 _argvec[4] = (unsigned long)(arg4); \
4550 _argvec[5] = (unsigned long)(arg5); \
4551 _argvec[6] = (unsigned long)(arg6); \
4552 __asm__ volatile( \
4553 VALGRIND_ALIGN_STACK \
4554 "ldr x0, [%1, #8] \n\t" \
4555 "ldr x1, [%1, #16] \n\t" \
4556 "ldr x2, [%1, #24] \n\t" \
4557 "ldr x3, [%1, #32] \n\t" \
4558 "ldr x4, [%1, #40] \n\t" \
4559 "ldr x5, [%1, #48] \n\t" \
4560 "ldr x8, [%1] \n\t" /* target->x8 */ \
4561 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4562 VALGRIND_RESTORE_STACK \
4563 "mov %0, x0" \
4564 : /*out*/ "=r" (_res) \
4565 : /*in*/ "0" (&_argvec[0]) \
4566 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4567 ); \
4568 lval = (__typeof__(lval)) _res; \
4569 } while (0)
4570
4571#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4572 arg7) \
4573 do { \
4574 volatile OrigFn _orig = (orig); \
4575 volatile unsigned long _argvec[8]; \
4576 volatile unsigned long _res; \
4577 _argvec[0] = (unsigned long)_orig.nraddr; \
4578 _argvec[1] = (unsigned long)(arg1); \
4579 _argvec[2] = (unsigned long)(arg2); \
4580 _argvec[3] = (unsigned long)(arg3); \
4581 _argvec[4] = (unsigned long)(arg4); \
4582 _argvec[5] = (unsigned long)(arg5); \
4583 _argvec[6] = (unsigned long)(arg6); \
4584 _argvec[7] = (unsigned long)(arg7); \
4585 __asm__ volatile( \
4586 VALGRIND_ALIGN_STACK \
4587 "ldr x0, [%1, #8] \n\t" \
4588 "ldr x1, [%1, #16] \n\t" \
4589 "ldr x2, [%1, #24] \n\t" \
4590 "ldr x3, [%1, #32] \n\t" \
4591 "ldr x4, [%1, #40] \n\t" \
4592 "ldr x5, [%1, #48] \n\t" \
4593 "ldr x6, [%1, #56] \n\t" \
4594 "ldr x8, [%1] \n\t" /* target->x8 */ \
4595 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4596 VALGRIND_RESTORE_STACK \
4597 "mov %0, x0" \
4598 : /*out*/ "=r" (_res) \
4599 : /*in*/ "0" (&_argvec[0]) \
4600 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4601 ); \
4602 lval = (__typeof__(lval)) _res; \
4603 } while (0)
4604
4605#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4606 arg7,arg8) \
4607 do { \
4608 volatile OrigFn _orig = (orig); \
4609 volatile unsigned long _argvec[9]; \
4610 volatile unsigned long _res; \
4611 _argvec[0] = (unsigned long)_orig.nraddr; \
4612 _argvec[1] = (unsigned long)(arg1); \
4613 _argvec[2] = (unsigned long)(arg2); \
4614 _argvec[3] = (unsigned long)(arg3); \
4615 _argvec[4] = (unsigned long)(arg4); \
4616 _argvec[5] = (unsigned long)(arg5); \
4617 _argvec[6] = (unsigned long)(arg6); \
4618 _argvec[7] = (unsigned long)(arg7); \
4619 _argvec[8] = (unsigned long)(arg8); \
4620 __asm__ volatile( \
4621 VALGRIND_ALIGN_STACK \
4622 "ldr x0, [%1, #8] \n\t" \
4623 "ldr x1, [%1, #16] \n\t" \
4624 "ldr x2, [%1, #24] \n\t" \
4625 "ldr x3, [%1, #32] \n\t" \
4626 "ldr x4, [%1, #40] \n\t" \
4627 "ldr x5, [%1, #48] \n\t" \
4628 "ldr x6, [%1, #56] \n\t" \
4629 "ldr x7, [%1, #64] \n\t" \
4630 "ldr x8, [%1] \n\t" /* target->x8 */ \
4631 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4632 VALGRIND_RESTORE_STACK \
4633 "mov %0, x0" \
4634 : /*out*/ "=r" (_res) \
4635 : /*in*/ "0" (&_argvec[0]) \
4636 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4637 ); \
4638 lval = (__typeof__(lval)) _res; \
4639 } while (0)
4640
4641#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4642 arg7,arg8,arg9) \
4643 do { \
4644 volatile OrigFn _orig = (orig); \
4645 volatile unsigned long _argvec[10]; \
4646 volatile unsigned long _res; \
4647 _argvec[0] = (unsigned long)_orig.nraddr; \
4648 _argvec[1] = (unsigned long)(arg1); \
4649 _argvec[2] = (unsigned long)(arg2); \
4650 _argvec[3] = (unsigned long)(arg3); \
4651 _argvec[4] = (unsigned long)(arg4); \
4652 _argvec[5] = (unsigned long)(arg5); \
4653 _argvec[6] = (unsigned long)(arg6); \
4654 _argvec[7] = (unsigned long)(arg7); \
4655 _argvec[8] = (unsigned long)(arg8); \
4656 _argvec[9] = (unsigned long)(arg9); \
4657 __asm__ volatile( \
4658 VALGRIND_ALIGN_STACK \
4659 "sub sp, sp, #0x20 \n\t" \
4660 "ldr x0, [%1, #8] \n\t" \
4661 "ldr x1, [%1, #16] \n\t" \
4662 "ldr x2, [%1, #24] \n\t" \
4663 "ldr x3, [%1, #32] \n\t" \
4664 "ldr x4, [%1, #40] \n\t" \
4665 "ldr x5, [%1, #48] \n\t" \
4666 "ldr x6, [%1, #56] \n\t" \
4667 "ldr x7, [%1, #64] \n\t" \
4668 "ldr x8, [%1, #72] \n\t" \
4669 "str x8, [sp, #0] \n\t" \
4670 "ldr x8, [%1] \n\t" /* target->x8 */ \
4671 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4672 VALGRIND_RESTORE_STACK \
4673 "mov %0, x0" \
4674 : /*out*/ "=r" (_res) \
4675 : /*in*/ "0" (&_argvec[0]) \
4676 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4677 ); \
4678 lval = (__typeof__(lval)) _res; \
4679 } while (0)
4680
4681#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4682 arg7,arg8,arg9,arg10) \
4683 do { \
4684 volatile OrigFn _orig = (orig); \
4685 volatile unsigned long _argvec[11]; \
4686 volatile unsigned long _res; \
4687 _argvec[0] = (unsigned long)_orig.nraddr; \
4688 _argvec[1] = (unsigned long)(arg1); \
4689 _argvec[2] = (unsigned long)(arg2); \
4690 _argvec[3] = (unsigned long)(arg3); \
4691 _argvec[4] = (unsigned long)(arg4); \
4692 _argvec[5] = (unsigned long)(arg5); \
4693 _argvec[6] = (unsigned long)(arg6); \
4694 _argvec[7] = (unsigned long)(arg7); \
4695 _argvec[8] = (unsigned long)(arg8); \
4696 _argvec[9] = (unsigned long)(arg9); \
4697 _argvec[10] = (unsigned long)(arg10); \
4698 __asm__ volatile( \
4699 VALGRIND_ALIGN_STACK \
4700 "sub sp, sp, #0x20 \n\t" \
4701 "ldr x0, [%1, #8] \n\t" \
4702 "ldr x1, [%1, #16] \n\t" \
4703 "ldr x2, [%1, #24] \n\t" \
4704 "ldr x3, [%1, #32] \n\t" \
4705 "ldr x4, [%1, #40] \n\t" \
4706 "ldr x5, [%1, #48] \n\t" \
4707 "ldr x6, [%1, #56] \n\t" \
4708 "ldr x7, [%1, #64] \n\t" \
4709 "ldr x8, [%1, #72] \n\t" \
4710 "str x8, [sp, #0] \n\t" \
4711 "ldr x8, [%1, #80] \n\t" \
4712 "str x8, [sp, #8] \n\t" \
4713 "ldr x8, [%1] \n\t" /* target->x8 */ \
4714 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4715 VALGRIND_RESTORE_STACK \
4716 "mov %0, x0" \
4717 : /*out*/ "=r" (_res) \
4718 : /*in*/ "0" (&_argvec[0]) \
4719 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4720 ); \
4721 lval = (__typeof__(lval)) _res; \
4722 } while (0)
4723
4724#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4725 arg7,arg8,arg9,arg10,arg11) \
4726 do { \
4727 volatile OrigFn _orig = (orig); \
4728 volatile unsigned long _argvec[12]; \
4729 volatile unsigned long _res; \
4730 _argvec[0] = (unsigned long)_orig.nraddr; \
4731 _argvec[1] = (unsigned long)(arg1); \
4732 _argvec[2] = (unsigned long)(arg2); \
4733 _argvec[3] = (unsigned long)(arg3); \
4734 _argvec[4] = (unsigned long)(arg4); \
4735 _argvec[5] = (unsigned long)(arg5); \
4736 _argvec[6] = (unsigned long)(arg6); \
4737 _argvec[7] = (unsigned long)(arg7); \
4738 _argvec[8] = (unsigned long)(arg8); \
4739 _argvec[9] = (unsigned long)(arg9); \
4740 _argvec[10] = (unsigned long)(arg10); \
4741 _argvec[11] = (unsigned long)(arg11); \
4742 __asm__ volatile( \
4743 VALGRIND_ALIGN_STACK \
4744 "sub sp, sp, #0x30 \n\t" \
4745 "ldr x0, [%1, #8] \n\t" \
4746 "ldr x1, [%1, #16] \n\t" \
4747 "ldr x2, [%1, #24] \n\t" \
4748 "ldr x3, [%1, #32] \n\t" \
4749 "ldr x4, [%1, #40] \n\t" \
4750 "ldr x5, [%1, #48] \n\t" \
4751 "ldr x6, [%1, #56] \n\t" \
4752 "ldr x7, [%1, #64] \n\t" \
4753 "ldr x8, [%1, #72] \n\t" \
4754 "str x8, [sp, #0] \n\t" \
4755 "ldr x8, [%1, #80] \n\t" \
4756 "str x8, [sp, #8] \n\t" \
4757 "ldr x8, [%1, #88] \n\t" \
4758 "str x8, [sp, #16] \n\t" \
4759 "ldr x8, [%1] \n\t" /* target->x8 */ \
4760 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4761 VALGRIND_RESTORE_STACK \
4762 "mov %0, x0" \
4763 : /*out*/ "=r" (_res) \
4764 : /*in*/ "0" (&_argvec[0]) \
4765 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4766 ); \
4767 lval = (__typeof__(lval)) _res; \
4768 } while (0)
4769
4770#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4771 arg7,arg8,arg9,arg10,arg11, \
4772 arg12) \
4773 do { \
4774 volatile OrigFn _orig = (orig); \
4775 volatile unsigned long _argvec[13]; \
4776 volatile unsigned long _res; \
4777 _argvec[0] = (unsigned long)_orig.nraddr; \
4778 _argvec[1] = (unsigned long)(arg1); \
4779 _argvec[2] = (unsigned long)(arg2); \
4780 _argvec[3] = (unsigned long)(arg3); \
4781 _argvec[4] = (unsigned long)(arg4); \
4782 _argvec[5] = (unsigned long)(arg5); \
4783 _argvec[6] = (unsigned long)(arg6); \
4784 _argvec[7] = (unsigned long)(arg7); \
4785 _argvec[8] = (unsigned long)(arg8); \
4786 _argvec[9] = (unsigned long)(arg9); \
4787 _argvec[10] = (unsigned long)(arg10); \
4788 _argvec[11] = (unsigned long)(arg11); \
4789 _argvec[12] = (unsigned long)(arg12); \
4790 __asm__ volatile( \
4791 VALGRIND_ALIGN_STACK \
4792 "sub sp, sp, #0x30 \n\t" \
4793 "ldr x0, [%1, #8] \n\t" \
4794 "ldr x1, [%1, #16] \n\t" \
4795 "ldr x2, [%1, #24] \n\t" \
4796 "ldr x3, [%1, #32] \n\t" \
4797 "ldr x4, [%1, #40] \n\t" \
4798 "ldr x5, [%1, #48] \n\t" \
4799 "ldr x6, [%1, #56] \n\t" \
4800 "ldr x7, [%1, #64] \n\t" \
4801 "ldr x8, [%1, #72] \n\t" \
4802 "str x8, [sp, #0] \n\t" \
4803 "ldr x8, [%1, #80] \n\t" \
4804 "str x8, [sp, #8] \n\t" \
4805 "ldr x8, [%1, #88] \n\t" \
4806 "str x8, [sp, #16] \n\t" \
4807 "ldr x8, [%1, #96] \n\t" \
4808 "str x8, [sp, #24] \n\t" \
4809 "ldr x8, [%1] \n\t" /* target->x8 */ \
4810 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4811 VALGRIND_RESTORE_STACK \
4812 "mov %0, x0" \
4813 : /*out*/ "=r" (_res) \
4814 : /*in*/ "0" (&_argvec[0]) \
4815 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4816 ); \
4817 lval = (__typeof__(lval)) _res; \
4818 } while (0)
4819
4820#endif /* PLAT_arm64_linux */
4821
4822/* ------------------------- s390x-linux ------------------------- */
4823
4824#if defined(PLAT_s390x_linux)
4825
4826/* Similar workaround as amd64 (see above), but we use r11 as frame
4827 pointer and save the old r11 in r7. r11 might be used for
4828 argvec, therefore we copy argvec in r1 since r1 is clobbered
4829 after the call anyway. */
4830#if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
4831# define __FRAME_POINTER \
4832 ,"d"(__builtin_dwarf_cfa())
4833# define VALGRIND_CFI_PROLOGUE \
4834 ".cfi_remember_state\n\t" \
4835 "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
4836 "lgr 7,11\n\t" \
4837 "lgr 11,%2\n\t" \
4838 ".cfi_def_cfa 11, 0\n\t"
4839# define VALGRIND_CFI_EPILOGUE \
4840 "lgr 11, 7\n\t" \
4841 ".cfi_restore_state\n\t"
4842#else
4843# define __FRAME_POINTER
4844# define VALGRIND_CFI_PROLOGUE \
4845 "lgr 1,%1\n\t"
4846# define VALGRIND_CFI_EPILOGUE
4847#endif
4848
4849/* Nb: On s390 the stack pointer is properly aligned *at all times*
4850 according to the s390 GCC maintainer. (The ABI specification is not
4851 precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
4852 VALGRIND_RESTORE_STACK are not defined here. */
4853
4854/* These regs are trashed by the hidden call. Note that we overwrite
4855 r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
4856 function a proper return address. All others are ABI defined call
4857 clobbers. */
4858#if defined(__VX__) || defined(__S390_VX__)
4859#define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", \
4860 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", \
4861 "v8", "v9", "v10", "v11", "v12", "v13", "v14", "v15", \
4862 "v16", "v17", "v18", "v19", "v20", "v21", "v22", "v23", \
4863 "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31"
4864#else
4865#define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", \
4866 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7"
4867#endif
4868
4869/* Nb: Although r11 is modified in the asm snippets below (inside
4870 VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
4871 two reasons:
4872 (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
4873 modified
4874 (2) GCC will complain that r11 cannot appear inside a clobber section,
4875 when compiled with -O -fno-omit-frame-pointer
4876 */
4877
4878#define CALL_FN_W_v(lval, orig) \
4879 do { \
4880 volatile OrigFn _orig = (orig); \
4881 volatile unsigned long _argvec[1]; \
4882 volatile unsigned long _res; \
4883 _argvec[0] = (unsigned long)_orig.nraddr; \
4884 __asm__ volatile( \
4885 VALGRIND_CFI_PROLOGUE \
4886 "aghi 15,-160\n\t" \
4887 "lg 1, 0(1)\n\t" /* target->r1 */ \
4888 VALGRIND_CALL_NOREDIR_R1 \
4889 "aghi 15,160\n\t" \
4890 VALGRIND_CFI_EPILOGUE \
4891 "lgr %0, 2\n\t" \
4892 : /*out*/ "=d" (_res) \
4893 : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
4894 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4895 ); \
4896 lval = (__typeof__(lval)) _res; \
4897 } while (0)
4898
4899/* The call abi has the arguments in r2-r6 and stack */
4900#define CALL_FN_W_W(lval, orig, arg1) \
4901 do { \
4902 volatile OrigFn _orig = (orig); \
4903 volatile unsigned long _argvec[2]; \
4904 volatile unsigned long _res; \
4905 _argvec[0] = (unsigned long)_orig.nraddr; \
4906 _argvec[1] = (unsigned long)arg1; \
4907 __asm__ volatile( \
4908 VALGRIND_CFI_PROLOGUE \
4909 "aghi 15,-160\n\t" \
4910 "lg 2, 8(1)\n\t" \
4911 "lg 1, 0(1)\n\t" \
4912 VALGRIND_CALL_NOREDIR_R1 \
4913 "aghi 15,160\n\t" \
4914 VALGRIND_CFI_EPILOGUE \
4915 "lgr %0, 2\n\t" \
4916 : /*out*/ "=d" (_res) \
4917 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4918 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4919 ); \
4920 lval = (__typeof__(lval)) _res; \
4921 } while (0)
4922
4923#define CALL_FN_W_WW(lval, orig, arg1, arg2) \
4924 do { \
4925 volatile OrigFn _orig = (orig); \
4926 volatile unsigned long _argvec[3]; \
4927 volatile unsigned long _res; \
4928 _argvec[0] = (unsigned long)_orig.nraddr; \
4929 _argvec[1] = (unsigned long)arg1; \
4930 _argvec[2] = (unsigned long)arg2; \
4931 __asm__ volatile( \
4932 VALGRIND_CFI_PROLOGUE \
4933 "aghi 15,-160\n\t" \
4934 "lg 2, 8(1)\n\t" \
4935 "lg 3,16(1)\n\t" \
4936 "lg 1, 0(1)\n\t" \
4937 VALGRIND_CALL_NOREDIR_R1 \
4938 "aghi 15,160\n\t" \
4939 VALGRIND_CFI_EPILOGUE \
4940 "lgr %0, 2\n\t" \
4941 : /*out*/ "=d" (_res) \
4942 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4943 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4944 ); \
4945 lval = (__typeof__(lval)) _res; \
4946 } while (0)
4947
4948#define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
4949 do { \
4950 volatile OrigFn _orig = (orig); \
4951 volatile unsigned long _argvec[4]; \
4952 volatile unsigned long _res; \
4953 _argvec[0] = (unsigned long)_orig.nraddr; \
4954 _argvec[1] = (unsigned long)arg1; \
4955 _argvec[2] = (unsigned long)arg2; \
4956 _argvec[3] = (unsigned long)arg3; \
4957 __asm__ volatile( \
4958 VALGRIND_CFI_PROLOGUE \
4959 "aghi 15,-160\n\t" \
4960 "lg 2, 8(1)\n\t" \
4961 "lg 3,16(1)\n\t" \
4962 "lg 4,24(1)\n\t" \
4963 "lg 1, 0(1)\n\t" \
4964 VALGRIND_CALL_NOREDIR_R1 \
4965 "aghi 15,160\n\t" \
4966 VALGRIND_CFI_EPILOGUE \
4967 "lgr %0, 2\n\t" \
4968 : /*out*/ "=d" (_res) \
4969 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4970 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4971 ); \
4972 lval = (__typeof__(lval)) _res; \
4973 } while (0)
4974
4975#define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
4976 do { \
4977 volatile OrigFn _orig = (orig); \
4978 volatile unsigned long _argvec[5]; \
4979 volatile unsigned long _res; \
4980 _argvec[0] = (unsigned long)_orig.nraddr; \
4981 _argvec[1] = (unsigned long)arg1; \
4982 _argvec[2] = (unsigned long)arg2; \
4983 _argvec[3] = (unsigned long)arg3; \
4984 _argvec[4] = (unsigned long)arg4; \
4985 __asm__ volatile( \
4986 VALGRIND_CFI_PROLOGUE \
4987 "aghi 15,-160\n\t" \
4988 "lg 2, 8(1)\n\t" \
4989 "lg 3,16(1)\n\t" \
4990 "lg 4,24(1)\n\t" \
4991 "lg 5,32(1)\n\t" \
4992 "lg 1, 0(1)\n\t" \
4993 VALGRIND_CALL_NOREDIR_R1 \
4994 "aghi 15,160\n\t" \
4995 VALGRIND_CFI_EPILOGUE \
4996 "lgr %0, 2\n\t" \
4997 : /*out*/ "=d" (_res) \
4998 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4999 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
5000 ); \
5001 lval = (__typeof__(lval)) _res; \
5002 } while (0)
5003
5004#define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
5005 do { \
5006 volatile OrigFn _orig = (orig); \
5007 volatile unsigned long _argvec[6]; \
5008 volatile unsigned long _res; \
5009 _argvec[0] = (unsigned long)_orig.nraddr; \
5010 _argvec[1] = (unsigned long)arg1; \
5011 _argvec[2] = (unsigned long)arg2; \
5012 _argvec[3] = (unsigned long)arg3; \
5013 _argvec[4] = (unsigned long)arg4; \
5014 _argvec[5] = (unsigned long)arg5; \
5015 __asm__ volatile( \
5016 VALGRIND_CFI_PROLOGUE \
5017 "aghi 15,-160\n\t" \
5018 "lg 2, 8(1)\n\t" \
5019 "lg 3,16(1)\n\t" \
5020 "lg 4,24(1)\n\t" \
5021 "lg 5,32(1)\n\t" \
5022 "lg 6,40(1)\n\t" \
5023 "lg 1, 0(1)\n\t" \
5024 VALGRIND_CALL_NOREDIR_R1 \
5025 "aghi 15,160\n\t" \
5026 VALGRIND_CFI_EPILOGUE \
5027 "lgr %0, 2\n\t" \
5028 : /*out*/ "=d" (_res) \
5029 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5030 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5031 ); \
5032 lval = (__typeof__(lval)) _res; \
5033 } while (0)
5034
5035#define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5036 arg6) \
5037 do { \
5038 volatile OrigFn _orig = (orig); \
5039 volatile unsigned long _argvec[7]; \
5040 volatile unsigned long _res; \
5041 _argvec[0] = (unsigned long)_orig.nraddr; \
5042 _argvec[1] = (unsigned long)arg1; \
5043 _argvec[2] = (unsigned long)arg2; \
5044 _argvec[3] = (unsigned long)arg3; \
5045 _argvec[4] = (unsigned long)arg4; \
5046 _argvec[5] = (unsigned long)arg5; \
5047 _argvec[6] = (unsigned long)arg6; \
5048 __asm__ volatile( \
5049 VALGRIND_CFI_PROLOGUE \
5050 "aghi 15,-168\n\t" \
5051 "lg 2, 8(1)\n\t" \
5052 "lg 3,16(1)\n\t" \
5053 "lg 4,24(1)\n\t" \
5054 "lg 5,32(1)\n\t" \
5055 "lg 6,40(1)\n\t" \
5056 "mvc 160(8,15), 48(1)\n\t" \
5057 "lg 1, 0(1)\n\t" \
5058 VALGRIND_CALL_NOREDIR_R1 \
5059 "aghi 15,168\n\t" \
5060 VALGRIND_CFI_EPILOGUE \
5061 "lgr %0, 2\n\t" \
5062 : /*out*/ "=d" (_res) \
5063 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5064 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5065 ); \
5066 lval = (__typeof__(lval)) _res; \
5067 } while (0)
5068
5069#define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5070 arg6, arg7) \
5071 do { \
5072 volatile OrigFn _orig = (orig); \
5073 volatile unsigned long _argvec[8]; \
5074 volatile unsigned long _res; \
5075 _argvec[0] = (unsigned long)_orig.nraddr; \
5076 _argvec[1] = (unsigned long)arg1; \
5077 _argvec[2] = (unsigned long)arg2; \
5078 _argvec[3] = (unsigned long)arg3; \
5079 _argvec[4] = (unsigned long)arg4; \
5080 _argvec[5] = (unsigned long)arg5; \
5081 _argvec[6] = (unsigned long)arg6; \
5082 _argvec[7] = (unsigned long)arg7; \
5083 __asm__ volatile( \
5084 VALGRIND_CFI_PROLOGUE \
5085 "aghi 15,-176\n\t" \
5086 "lg 2, 8(1)\n\t" \
5087 "lg 3,16(1)\n\t" \
5088 "lg 4,24(1)\n\t" \
5089 "lg 5,32(1)\n\t" \
5090 "lg 6,40(1)\n\t" \
5091 "mvc 160(8,15), 48(1)\n\t" \
5092 "mvc 168(8,15), 56(1)\n\t" \
5093 "lg 1, 0(1)\n\t" \
5094 VALGRIND_CALL_NOREDIR_R1 \
5095 "aghi 15,176\n\t" \
5096 VALGRIND_CFI_EPILOGUE \
5097 "lgr %0, 2\n\t" \
5098 : /*out*/ "=d" (_res) \
5099 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5100 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5101 ); \
5102 lval = (__typeof__(lval)) _res; \
5103 } while (0)
5104
5105#define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5106 arg6, arg7 ,arg8) \
5107 do { \
5108 volatile OrigFn _orig = (orig); \
5109 volatile unsigned long _argvec[9]; \
5110 volatile unsigned long _res; \
5111 _argvec[0] = (unsigned long)_orig.nraddr; \
5112 _argvec[1] = (unsigned long)arg1; \
5113 _argvec[2] = (unsigned long)arg2; \
5114 _argvec[3] = (unsigned long)arg3; \
5115 _argvec[4] = (unsigned long)arg4; \
5116 _argvec[5] = (unsigned long)arg5; \
5117 _argvec[6] = (unsigned long)arg6; \
5118 _argvec[7] = (unsigned long)arg7; \
5119 _argvec[8] = (unsigned long)arg8; \
5120 __asm__ volatile( \
5121 VALGRIND_CFI_PROLOGUE \
5122 "aghi 15,-184\n\t" \
5123 "lg 2, 8(1)\n\t" \
5124 "lg 3,16(1)\n\t" \
5125 "lg 4,24(1)\n\t" \
5126 "lg 5,32(1)\n\t" \
5127 "lg 6,40(1)\n\t" \
5128 "mvc 160(8,15), 48(1)\n\t" \
5129 "mvc 168(8,15), 56(1)\n\t" \
5130 "mvc 176(8,15), 64(1)\n\t" \
5131 "lg 1, 0(1)\n\t" \
5132 VALGRIND_CALL_NOREDIR_R1 \
5133 "aghi 15,184\n\t" \
5134 VALGRIND_CFI_EPILOGUE \
5135 "lgr %0, 2\n\t" \
5136 : /*out*/ "=d" (_res) \
5137 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5138 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5139 ); \
5140 lval = (__typeof__(lval)) _res; \
5141 } while (0)
5142
5143#define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5144 arg6, arg7 ,arg8, arg9) \
5145 do { \
5146 volatile OrigFn _orig = (orig); \
5147 volatile unsigned long _argvec[10]; \
5148 volatile unsigned long _res; \
5149 _argvec[0] = (unsigned long)_orig.nraddr; \
5150 _argvec[1] = (unsigned long)arg1; \
5151 _argvec[2] = (unsigned long)arg2; \
5152 _argvec[3] = (unsigned long)arg3; \
5153 _argvec[4] = (unsigned long)arg4; \
5154 _argvec[5] = (unsigned long)arg5; \
5155 _argvec[6] = (unsigned long)arg6; \
5156 _argvec[7] = (unsigned long)arg7; \
5157 _argvec[8] = (unsigned long)arg8; \
5158 _argvec[9] = (unsigned long)arg9; \
5159 __asm__ volatile( \
5160 VALGRIND_CFI_PROLOGUE \
5161 "aghi 15,-192\n\t" \
5162 "lg 2, 8(1)\n\t" \
5163 "lg 3,16(1)\n\t" \
5164 "lg 4,24(1)\n\t" \
5165 "lg 5,32(1)\n\t" \
5166 "lg 6,40(1)\n\t" \
5167 "mvc 160(8,15), 48(1)\n\t" \
5168 "mvc 168(8,15), 56(1)\n\t" \
5169 "mvc 176(8,15), 64(1)\n\t" \
5170 "mvc 184(8,15), 72(1)\n\t" \
5171 "lg 1, 0(1)\n\t" \
5172 VALGRIND_CALL_NOREDIR_R1 \
5173 "aghi 15,192\n\t" \
5174 VALGRIND_CFI_EPILOGUE \
5175 "lgr %0, 2\n\t" \
5176 : /*out*/ "=d" (_res) \
5177 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5178 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5179 ); \
5180 lval = (__typeof__(lval)) _res; \
5181 } while (0)
5182
5183#define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5184 arg6, arg7 ,arg8, arg9, arg10) \
5185 do { \
5186 volatile OrigFn _orig = (orig); \
5187 volatile unsigned long _argvec[11]; \
5188 volatile unsigned long _res; \
5189 _argvec[0] = (unsigned long)_orig.nraddr; \
5190 _argvec[1] = (unsigned long)arg1; \
5191 _argvec[2] = (unsigned long)arg2; \
5192 _argvec[3] = (unsigned long)arg3; \
5193 _argvec[4] = (unsigned long)arg4; \
5194 _argvec[5] = (unsigned long)arg5; \
5195 _argvec[6] = (unsigned long)arg6; \
5196 _argvec[7] = (unsigned long)arg7; \
5197 _argvec[8] = (unsigned long)arg8; \
5198 _argvec[9] = (unsigned long)arg9; \
5199 _argvec[10] = (unsigned long)arg10; \
5200 __asm__ volatile( \
5201 VALGRIND_CFI_PROLOGUE \
5202 "aghi 15,-200\n\t" \
5203 "lg 2, 8(1)\n\t" \
5204 "lg 3,16(1)\n\t" \
5205 "lg 4,24(1)\n\t" \
5206 "lg 5,32(1)\n\t" \
5207 "lg 6,40(1)\n\t" \
5208 "mvc 160(8,15), 48(1)\n\t" \
5209 "mvc 168(8,15), 56(1)\n\t" \
5210 "mvc 176(8,15), 64(1)\n\t" \
5211 "mvc 184(8,15), 72(1)\n\t" \
5212 "mvc 192(8,15), 80(1)\n\t" \
5213 "lg 1, 0(1)\n\t" \
5214 VALGRIND_CALL_NOREDIR_R1 \
5215 "aghi 15,200\n\t" \
5216 VALGRIND_CFI_EPILOGUE \
5217 "lgr %0, 2\n\t" \
5218 : /*out*/ "=d" (_res) \
5219 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5220 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5221 ); \
5222 lval = (__typeof__(lval)) _res; \
5223 } while (0)
5224
5225#define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5226 arg6, arg7 ,arg8, arg9, arg10, arg11) \
5227 do { \
5228 volatile OrigFn _orig = (orig); \
5229 volatile unsigned long _argvec[12]; \
5230 volatile unsigned long _res; \
5231 _argvec[0] = (unsigned long)_orig.nraddr; \
5232 _argvec[1] = (unsigned long)arg1; \
5233 _argvec[2] = (unsigned long)arg2; \
5234 _argvec[3] = (unsigned long)arg3; \
5235 _argvec[4] = (unsigned long)arg4; \
5236 _argvec[5] = (unsigned long)arg5; \
5237 _argvec[6] = (unsigned long)arg6; \
5238 _argvec[7] = (unsigned long)arg7; \
5239 _argvec[8] = (unsigned long)arg8; \
5240 _argvec[9] = (unsigned long)arg9; \
5241 _argvec[10] = (unsigned long)arg10; \
5242 _argvec[11] = (unsigned long)arg11; \
5243 __asm__ volatile( \
5244 VALGRIND_CFI_PROLOGUE \
5245 "aghi 15,-208\n\t" \
5246 "lg 2, 8(1)\n\t" \
5247 "lg 3,16(1)\n\t" \
5248 "lg 4,24(1)\n\t" \
5249 "lg 5,32(1)\n\t" \
5250 "lg 6,40(1)\n\t" \
5251 "mvc 160(8,15), 48(1)\n\t" \
5252 "mvc 168(8,15), 56(1)\n\t" \
5253 "mvc 176(8,15), 64(1)\n\t" \
5254 "mvc 184(8,15), 72(1)\n\t" \
5255 "mvc 192(8,15), 80(1)\n\t" \
5256 "mvc 200(8,15), 88(1)\n\t" \
5257 "lg 1, 0(1)\n\t" \
5258 VALGRIND_CALL_NOREDIR_R1 \
5259 "aghi 15,208\n\t" \
5260 VALGRIND_CFI_EPILOGUE \
5261 "lgr %0, 2\n\t" \
5262 : /*out*/ "=d" (_res) \
5263 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5264 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5265 ); \
5266 lval = (__typeof__(lval)) _res; \
5267 } while (0)
5268
5269#define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5270 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
5271 do { \
5272 volatile OrigFn _orig = (orig); \
5273 volatile unsigned long _argvec[13]; \
5274 volatile unsigned long _res; \
5275 _argvec[0] = (unsigned long)_orig.nraddr; \
5276 _argvec[1] = (unsigned long)arg1; \
5277 _argvec[2] = (unsigned long)arg2; \
5278 _argvec[3] = (unsigned long)arg3; \
5279 _argvec[4] = (unsigned long)arg4; \
5280 _argvec[5] = (unsigned long)arg5; \
5281 _argvec[6] = (unsigned long)arg6; \
5282 _argvec[7] = (unsigned long)arg7; \
5283 _argvec[8] = (unsigned long)arg8; \
5284 _argvec[9] = (unsigned long)arg9; \
5285 _argvec[10] = (unsigned long)arg10; \
5286 _argvec[11] = (unsigned long)arg11; \
5287 _argvec[12] = (unsigned long)arg12; \
5288 __asm__ volatile( \
5289 VALGRIND_CFI_PROLOGUE \
5290 "aghi 15,-216\n\t" \
5291 "lg 2, 8(1)\n\t" \
5292 "lg 3,16(1)\n\t" \
5293 "lg 4,24(1)\n\t" \
5294 "lg 5,32(1)\n\t" \
5295 "lg 6,40(1)\n\t" \
5296 "mvc 160(8,15), 48(1)\n\t" \
5297 "mvc 168(8,15), 56(1)\n\t" \
5298 "mvc 176(8,15), 64(1)\n\t" \
5299 "mvc 184(8,15), 72(1)\n\t" \
5300 "mvc 192(8,15), 80(1)\n\t" \
5301 "mvc 200(8,15), 88(1)\n\t" \
5302 "mvc 208(8,15), 96(1)\n\t" \
5303 "lg 1, 0(1)\n\t" \
5304 VALGRIND_CALL_NOREDIR_R1 \
5305 "aghi 15,216\n\t" \
5306 VALGRIND_CFI_EPILOGUE \
5307 "lgr %0, 2\n\t" \
5308 : /*out*/ "=d" (_res) \
5309 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5310 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5311 ); \
5312 lval = (__typeof__(lval)) _res; \
5313 } while (0)
5314
5315
5316#endif /* PLAT_s390x_linux */
5317
5318/* ------------------------- mips32-linux ----------------------- */
5319
5320#if defined(PLAT_mips32_linux)
5321
5322/* These regs are trashed by the hidden call. */
5323#define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5324"$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5325"$25", "$31"
5326
5327/* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5328 long) == 4. */
5329
5330#define CALL_FN_W_v(lval, orig) \
5331 do { \
5332 volatile OrigFn _orig = (orig); \
5333 volatile unsigned long _argvec[1]; \
5334 volatile unsigned long _res; \
5335 _argvec[0] = (unsigned long)_orig.nraddr; \
5336 __asm__ volatile( \
5337 "subu $29, $29, 8 \n\t" \
5338 "sw $28, 0($29) \n\t" \
5339 "sw $31, 4($29) \n\t" \
5340 "subu $29, $29, 16 \n\t" \
5341 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5342 VALGRIND_CALL_NOREDIR_T9 \
5343 "addu $29, $29, 16\n\t" \
5344 "lw $28, 0($29) \n\t" \
5345 "lw $31, 4($29) \n\t" \
5346 "addu $29, $29, 8 \n\t" \
5347 "move %0, $2\n" \
5348 : /*out*/ "=r" (_res) \
5349 : /*in*/ "0" (&_argvec[0]) \
5350 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5351 ); \
5352 lval = (__typeof__(lval)) _res; \
5353 } while (0)
5354
5355#define CALL_FN_W_W(lval, orig, arg1) \
5356 do { \
5357 volatile OrigFn _orig = (orig); \
5358 volatile unsigned long _argvec[2]; \
5359 volatile unsigned long _res; \
5360 _argvec[0] = (unsigned long)_orig.nraddr; \
5361 _argvec[1] = (unsigned long)(arg1); \
5362 __asm__ volatile( \
5363 "subu $29, $29, 8 \n\t" \
5364 "sw $28, 0($29) \n\t" \
5365 "sw $31, 4($29) \n\t" \
5366 "subu $29, $29, 16 \n\t" \
5367 "lw $4, 4(%1) \n\t" /* arg1*/ \
5368 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5369 VALGRIND_CALL_NOREDIR_T9 \
5370 "addu $29, $29, 16 \n\t" \
5371 "lw $28, 0($29) \n\t" \
5372 "lw $31, 4($29) \n\t" \
5373 "addu $29, $29, 8 \n\t" \
5374 "move %0, $2\n" \
5375 : /*out*/ "=r" (_res) \
5376 : /*in*/ "0" (&_argvec[0]) \
5377 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5378 ); \
5379 lval = (__typeof__(lval)) _res; \
5380 } while (0)
5381
5382#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5383 do { \
5384 volatile OrigFn _orig = (orig); \
5385 volatile unsigned long _argvec[3]; \
5386 volatile unsigned long _res; \
5387 _argvec[0] = (unsigned long)_orig.nraddr; \
5388 _argvec[1] = (unsigned long)(arg1); \
5389 _argvec[2] = (unsigned long)(arg2); \
5390 __asm__ volatile( \
5391 "subu $29, $29, 8 \n\t" \
5392 "sw $28, 0($29) \n\t" \
5393 "sw $31, 4($29) \n\t" \
5394 "subu $29, $29, 16 \n\t" \
5395 "lw $4, 4(%1) \n\t" \
5396 "lw $5, 8(%1) \n\t" \
5397 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5398 VALGRIND_CALL_NOREDIR_T9 \
5399 "addu $29, $29, 16 \n\t" \
5400 "lw $28, 0($29) \n\t" \
5401 "lw $31, 4($29) \n\t" \
5402 "addu $29, $29, 8 \n\t" \
5403 "move %0, $2\n" \
5404 : /*out*/ "=r" (_res) \
5405 : /*in*/ "0" (&_argvec[0]) \
5406 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5407 ); \
5408 lval = (__typeof__(lval)) _res; \
5409 } while (0)
5410
5411#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5412 do { \
5413 volatile OrigFn _orig = (orig); \
5414 volatile unsigned long _argvec[4]; \
5415 volatile unsigned long _res; \
5416 _argvec[0] = (unsigned long)_orig.nraddr; \
5417 _argvec[1] = (unsigned long)(arg1); \
5418 _argvec[2] = (unsigned long)(arg2); \
5419 _argvec[3] = (unsigned long)(arg3); \
5420 __asm__ volatile( \
5421 "subu $29, $29, 8 \n\t" \
5422 "sw $28, 0($29) \n\t" \
5423 "sw $31, 4($29) \n\t" \
5424 "subu $29, $29, 16 \n\t" \
5425 "lw $4, 4(%1) \n\t" \
5426 "lw $5, 8(%1) \n\t" \
5427 "lw $6, 12(%1) \n\t" \
5428 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5429 VALGRIND_CALL_NOREDIR_T9 \
5430 "addu $29, $29, 16 \n\t" \
5431 "lw $28, 0($29) \n\t" \
5432 "lw $31, 4($29) \n\t" \
5433 "addu $29, $29, 8 \n\t" \
5434 "move %0, $2\n" \
5435 : /*out*/ "=r" (_res) \
5436 : /*in*/ "0" (&_argvec[0]) \
5437 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5438 ); \
5439 lval = (__typeof__(lval)) _res; \
5440 } while (0)
5441
5442#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5443 do { \
5444 volatile OrigFn _orig = (orig); \
5445 volatile unsigned long _argvec[5]; \
5446 volatile unsigned long _res; \
5447 _argvec[0] = (unsigned long)_orig.nraddr; \
5448 _argvec[1] = (unsigned long)(arg1); \
5449 _argvec[2] = (unsigned long)(arg2); \
5450 _argvec[3] = (unsigned long)(arg3); \
5451 _argvec[4] = (unsigned long)(arg4); \
5452 __asm__ volatile( \
5453 "subu $29, $29, 8 \n\t" \
5454 "sw $28, 0($29) \n\t" \
5455 "sw $31, 4($29) \n\t" \
5456 "subu $29, $29, 16 \n\t" \
5457 "lw $4, 4(%1) \n\t" \
5458 "lw $5, 8(%1) \n\t" \
5459 "lw $6, 12(%1) \n\t" \
5460 "lw $7, 16(%1) \n\t" \
5461 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5462 VALGRIND_CALL_NOREDIR_T9 \
5463 "addu $29, $29, 16 \n\t" \
5464 "lw $28, 0($29) \n\t" \
5465 "lw $31, 4($29) \n\t" \
5466 "addu $29, $29, 8 \n\t" \
5467 "move %0, $2\n" \
5468 : /*out*/ "=r" (_res) \
5469 : /*in*/ "0" (&_argvec[0]) \
5470 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5471 ); \
5472 lval = (__typeof__(lval)) _res; \
5473 } while (0)
5474
5475#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5476 do { \
5477 volatile OrigFn _orig = (orig); \
5478 volatile unsigned long _argvec[6]; \
5479 volatile unsigned long _res; \
5480 _argvec[0] = (unsigned long)_orig.nraddr; \
5481 _argvec[1] = (unsigned long)(arg1); \
5482 _argvec[2] = (unsigned long)(arg2); \
5483 _argvec[3] = (unsigned long)(arg3); \
5484 _argvec[4] = (unsigned long)(arg4); \
5485 _argvec[5] = (unsigned long)(arg5); \
5486 __asm__ volatile( \
5487 "subu $29, $29, 8 \n\t" \
5488 "sw $28, 0($29) \n\t" \
5489 "sw $31, 4($29) \n\t" \
5490 "lw $4, 20(%1) \n\t" \
5491 "subu $29, $29, 24\n\t" \
5492 "sw $4, 16($29) \n\t" \
5493 "lw $4, 4(%1) \n\t" \
5494 "lw $5, 8(%1) \n\t" \
5495 "lw $6, 12(%1) \n\t" \
5496 "lw $7, 16(%1) \n\t" \
5497 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5498 VALGRIND_CALL_NOREDIR_T9 \
5499 "addu $29, $29, 24 \n\t" \
5500 "lw $28, 0($29) \n\t" \
5501 "lw $31, 4($29) \n\t" \
5502 "addu $29, $29, 8 \n\t" \
5503 "move %0, $2\n" \
5504 : /*out*/ "=r" (_res) \
5505 : /*in*/ "0" (&_argvec[0]) \
5506 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5507 ); \
5508 lval = (__typeof__(lval)) _res; \
5509 } while (0)
5510#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5511 do { \
5512 volatile OrigFn _orig = (orig); \
5513 volatile unsigned long _argvec[7]; \
5514 volatile unsigned long _res; \
5515 _argvec[0] = (unsigned long)_orig.nraddr; \
5516 _argvec[1] = (unsigned long)(arg1); \
5517 _argvec[2] = (unsigned long)(arg2); \
5518 _argvec[3] = (unsigned long)(arg3); \
5519 _argvec[4] = (unsigned long)(arg4); \
5520 _argvec[5] = (unsigned long)(arg5); \
5521 _argvec[6] = (unsigned long)(arg6); \
5522 __asm__ volatile( \
5523 "subu $29, $29, 8 \n\t" \
5524 "sw $28, 0($29) \n\t" \
5525 "sw $31, 4($29) \n\t" \
5526 "lw $4, 20(%1) \n\t" \
5527 "subu $29, $29, 32\n\t" \
5528 "sw $4, 16($29) \n\t" \
5529 "lw $4, 24(%1) \n\t" \
5530 "nop\n\t" \
5531 "sw $4, 20($29) \n\t" \
5532 "lw $4, 4(%1) \n\t" \
5533 "lw $5, 8(%1) \n\t" \
5534 "lw $6, 12(%1) \n\t" \
5535 "lw $7, 16(%1) \n\t" \
5536 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5537 VALGRIND_CALL_NOREDIR_T9 \
5538 "addu $29, $29, 32 \n\t" \
5539 "lw $28, 0($29) \n\t" \
5540 "lw $31, 4($29) \n\t" \
5541 "addu $29, $29, 8 \n\t" \
5542 "move %0, $2\n" \
5543 : /*out*/ "=r" (_res) \
5544 : /*in*/ "0" (&_argvec[0]) \
5545 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5546 ); \
5547 lval = (__typeof__(lval)) _res; \
5548 } while (0)
5549
5550#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5551 arg7) \
5552 do { \
5553 volatile OrigFn _orig = (orig); \
5554 volatile unsigned long _argvec[8]; \
5555 volatile unsigned long _res; \
5556 _argvec[0] = (unsigned long)_orig.nraddr; \
5557 _argvec[1] = (unsigned long)(arg1); \
5558 _argvec[2] = (unsigned long)(arg2); \
5559 _argvec[3] = (unsigned long)(arg3); \
5560 _argvec[4] = (unsigned long)(arg4); \
5561 _argvec[5] = (unsigned long)(arg5); \
5562 _argvec[6] = (unsigned long)(arg6); \
5563 _argvec[7] = (unsigned long)(arg7); \
5564 __asm__ volatile( \
5565 "subu $29, $29, 8 \n\t" \
5566 "sw $28, 0($29) \n\t" \
5567 "sw $31, 4($29) \n\t" \
5568 "lw $4, 20(%1) \n\t" \
5569 "subu $29, $29, 32\n\t" \
5570 "sw $4, 16($29) \n\t" \
5571 "lw $4, 24(%1) \n\t" \
5572 "sw $4, 20($29) \n\t" \
5573 "lw $4, 28(%1) \n\t" \
5574 "sw $4, 24($29) \n\t" \
5575 "lw $4, 4(%1) \n\t" \
5576 "lw $5, 8(%1) \n\t" \
5577 "lw $6, 12(%1) \n\t" \
5578 "lw $7, 16(%1) \n\t" \
5579 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5580 VALGRIND_CALL_NOREDIR_T9 \
5581 "addu $29, $29, 32 \n\t" \
5582 "lw $28, 0($29) \n\t" \
5583 "lw $31, 4($29) \n\t" \
5584 "addu $29, $29, 8 \n\t" \
5585 "move %0, $2\n" \
5586 : /*out*/ "=r" (_res) \
5587 : /*in*/ "0" (&_argvec[0]) \
5588 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5589 ); \
5590 lval = (__typeof__(lval)) _res; \
5591 } while (0)
5592
5593#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5594 arg7,arg8) \
5595 do { \
5596 volatile OrigFn _orig = (orig); \
5597 volatile unsigned long _argvec[9]; \
5598 volatile unsigned long _res; \
5599 _argvec[0] = (unsigned long)_orig.nraddr; \
5600 _argvec[1] = (unsigned long)(arg1); \
5601 _argvec[2] = (unsigned long)(arg2); \
5602 _argvec[3] = (unsigned long)(arg3); \
5603 _argvec[4] = (unsigned long)(arg4); \
5604 _argvec[5] = (unsigned long)(arg5); \
5605 _argvec[6] = (unsigned long)(arg6); \
5606 _argvec[7] = (unsigned long)(arg7); \
5607 _argvec[8] = (unsigned long)(arg8); \
5608 __asm__ volatile( \
5609 "subu $29, $29, 8 \n\t" \
5610 "sw $28, 0($29) \n\t" \
5611 "sw $31, 4($29) \n\t" \
5612 "lw $4, 20(%1) \n\t" \
5613 "subu $29, $29, 40\n\t" \
5614 "sw $4, 16($29) \n\t" \
5615 "lw $4, 24(%1) \n\t" \
5616 "sw $4, 20($29) \n\t" \
5617 "lw $4, 28(%1) \n\t" \
5618 "sw $4, 24($29) \n\t" \
5619 "lw $4, 32(%1) \n\t" \
5620 "sw $4, 28($29) \n\t" \
5621 "lw $4, 4(%1) \n\t" \
5622 "lw $5, 8(%1) \n\t" \
5623 "lw $6, 12(%1) \n\t" \
5624 "lw $7, 16(%1) \n\t" \
5625 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5626 VALGRIND_CALL_NOREDIR_T9 \
5627 "addu $29, $29, 40 \n\t" \
5628 "lw $28, 0($29) \n\t" \
5629 "lw $31, 4($29) \n\t" \
5630 "addu $29, $29, 8 \n\t" \
5631 "move %0, $2\n" \
5632 : /*out*/ "=r" (_res) \
5633 : /*in*/ "0" (&_argvec[0]) \
5634 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5635 ); \
5636 lval = (__typeof__(lval)) _res; \
5637 } while (0)
5638
5639#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5640 arg7,arg8,arg9) \
5641 do { \
5642 volatile OrigFn _orig = (orig); \
5643 volatile unsigned long _argvec[10]; \
5644 volatile unsigned long _res; \
5645 _argvec[0] = (unsigned long)_orig.nraddr; \
5646 _argvec[1] = (unsigned long)(arg1); \
5647 _argvec[2] = (unsigned long)(arg2); \
5648 _argvec[3] = (unsigned long)(arg3); \
5649 _argvec[4] = (unsigned long)(arg4); \
5650 _argvec[5] = (unsigned long)(arg5); \
5651 _argvec[6] = (unsigned long)(arg6); \
5652 _argvec[7] = (unsigned long)(arg7); \
5653 _argvec[8] = (unsigned long)(arg8); \
5654 _argvec[9] = (unsigned long)(arg9); \
5655 __asm__ volatile( \
5656 "subu $29, $29, 8 \n\t" \
5657 "sw $28, 0($29) \n\t" \
5658 "sw $31, 4($29) \n\t" \
5659 "lw $4, 20(%1) \n\t" \
5660 "subu $29, $29, 40\n\t" \
5661 "sw $4, 16($29) \n\t" \
5662 "lw $4, 24(%1) \n\t" \
5663 "sw $4, 20($29) \n\t" \
5664 "lw $4, 28(%1) \n\t" \
5665 "sw $4, 24($29) \n\t" \
5666 "lw $4, 32(%1) \n\t" \
5667 "sw $4, 28($29) \n\t" \
5668 "lw $4, 36(%1) \n\t" \
5669 "sw $4, 32($29) \n\t" \
5670 "lw $4, 4(%1) \n\t" \
5671 "lw $5, 8(%1) \n\t" \
5672 "lw $6, 12(%1) \n\t" \
5673 "lw $7, 16(%1) \n\t" \
5674 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5675 VALGRIND_CALL_NOREDIR_T9 \
5676 "addu $29, $29, 40 \n\t" \
5677 "lw $28, 0($29) \n\t" \
5678 "lw $31, 4($29) \n\t" \
5679 "addu $29, $29, 8 \n\t" \
5680 "move %0, $2\n" \
5681 : /*out*/ "=r" (_res) \
5682 : /*in*/ "0" (&_argvec[0]) \
5683 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5684 ); \
5685 lval = (__typeof__(lval)) _res; \
5686 } while (0)
5687
5688#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5689 arg7,arg8,arg9,arg10) \
5690 do { \
5691 volatile OrigFn _orig = (orig); \
5692 volatile unsigned long _argvec[11]; \
5693 volatile unsigned long _res; \
5694 _argvec[0] = (unsigned long)_orig.nraddr; \
5695 _argvec[1] = (unsigned long)(arg1); \
5696 _argvec[2] = (unsigned long)(arg2); \
5697 _argvec[3] = (unsigned long)(arg3); \
5698 _argvec[4] = (unsigned long)(arg4); \
5699 _argvec[5] = (unsigned long)(arg5); \
5700 _argvec[6] = (unsigned long)(arg6); \
5701 _argvec[7] = (unsigned long)(arg7); \
5702 _argvec[8] = (unsigned long)(arg8); \
5703 _argvec[9] = (unsigned long)(arg9); \
5704 _argvec[10] = (unsigned long)(arg10); \
5705 __asm__ volatile( \
5706 "subu $29, $29, 8 \n\t" \
5707 "sw $28, 0($29) \n\t" \
5708 "sw $31, 4($29) \n\t" \
5709 "lw $4, 20(%1) \n\t" \
5710 "subu $29, $29, 48\n\t" \
5711 "sw $4, 16($29) \n\t" \
5712 "lw $4, 24(%1) \n\t" \
5713 "sw $4, 20($29) \n\t" \
5714 "lw $4, 28(%1) \n\t" \
5715 "sw $4, 24($29) \n\t" \
5716 "lw $4, 32(%1) \n\t" \
5717 "sw $4, 28($29) \n\t" \
5718 "lw $4, 36(%1) \n\t" \
5719 "sw $4, 32($29) \n\t" \
5720 "lw $4, 40(%1) \n\t" \
5721 "sw $4, 36($29) \n\t" \
5722 "lw $4, 4(%1) \n\t" \
5723 "lw $5, 8(%1) \n\t" \
5724 "lw $6, 12(%1) \n\t" \
5725 "lw $7, 16(%1) \n\t" \
5726 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5727 VALGRIND_CALL_NOREDIR_T9 \
5728 "addu $29, $29, 48 \n\t" \
5729 "lw $28, 0($29) \n\t" \
5730 "lw $31, 4($29) \n\t" \
5731 "addu $29, $29, 8 \n\t" \
5732 "move %0, $2\n" \
5733 : /*out*/ "=r" (_res) \
5734 : /*in*/ "0" (&_argvec[0]) \
5735 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5736 ); \
5737 lval = (__typeof__(lval)) _res; \
5738 } while (0)
5739
5740#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5741 arg6,arg7,arg8,arg9,arg10, \
5742 arg11) \
5743 do { \
5744 volatile OrigFn _orig = (orig); \
5745 volatile unsigned long _argvec[12]; \
5746 volatile unsigned long _res; \
5747 _argvec[0] = (unsigned long)_orig.nraddr; \
5748 _argvec[1] = (unsigned long)(arg1); \
5749 _argvec[2] = (unsigned long)(arg2); \
5750 _argvec[3] = (unsigned long)(arg3); \
5751 _argvec[4] = (unsigned long)(arg4); \
5752 _argvec[5] = (unsigned long)(arg5); \
5753 _argvec[6] = (unsigned long)(arg6); \
5754 _argvec[7] = (unsigned long)(arg7); \
5755 _argvec[8] = (unsigned long)(arg8); \
5756 _argvec[9] = (unsigned long)(arg9); \
5757 _argvec[10] = (unsigned long)(arg10); \
5758 _argvec[11] = (unsigned long)(arg11); \
5759 __asm__ volatile( \
5760 "subu $29, $29, 8 \n\t" \
5761 "sw $28, 0($29) \n\t" \
5762 "sw $31, 4($29) \n\t" \
5763 "lw $4, 20(%1) \n\t" \
5764 "subu $29, $29, 48\n\t" \
5765 "sw $4, 16($29) \n\t" \
5766 "lw $4, 24(%1) \n\t" \
5767 "sw $4, 20($29) \n\t" \
5768 "lw $4, 28(%1) \n\t" \
5769 "sw $4, 24($29) \n\t" \
5770 "lw $4, 32(%1) \n\t" \
5771 "sw $4, 28($29) \n\t" \
5772 "lw $4, 36(%1) \n\t" \
5773 "sw $4, 32($29) \n\t" \
5774 "lw $4, 40(%1) \n\t" \
5775 "sw $4, 36($29) \n\t" \
5776 "lw $4, 44(%1) \n\t" \
5777 "sw $4, 40($29) \n\t" \
5778 "lw $4, 4(%1) \n\t" \
5779 "lw $5, 8(%1) \n\t" \
5780 "lw $6, 12(%1) \n\t" \
5781 "lw $7, 16(%1) \n\t" \
5782 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5783 VALGRIND_CALL_NOREDIR_T9 \
5784 "addu $29, $29, 48 \n\t" \
5785 "lw $28, 0($29) \n\t" \
5786 "lw $31, 4($29) \n\t" \
5787 "addu $29, $29, 8 \n\t" \
5788 "move %0, $2\n" \
5789 : /*out*/ "=r" (_res) \
5790 : /*in*/ "0" (&_argvec[0]) \
5791 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5792 ); \
5793 lval = (__typeof__(lval)) _res; \
5794 } while (0)
5795
5796#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5797 arg6,arg7,arg8,arg9,arg10, \
5798 arg11,arg12) \
5799 do { \
5800 volatile OrigFn _orig = (orig); \
5801 volatile unsigned long _argvec[13]; \
5802 volatile unsigned long _res; \
5803 _argvec[0] = (unsigned long)_orig.nraddr; \
5804 _argvec[1] = (unsigned long)(arg1); \
5805 _argvec[2] = (unsigned long)(arg2); \
5806 _argvec[3] = (unsigned long)(arg3); \
5807 _argvec[4] = (unsigned long)(arg4); \
5808 _argvec[5] = (unsigned long)(arg5); \
5809 _argvec[6] = (unsigned long)(arg6); \
5810 _argvec[7] = (unsigned long)(arg7); \
5811 _argvec[8] = (unsigned long)(arg8); \
5812 _argvec[9] = (unsigned long)(arg9); \
5813 _argvec[10] = (unsigned long)(arg10); \
5814 _argvec[11] = (unsigned long)(arg11); \
5815 _argvec[12] = (unsigned long)(arg12); \
5816 __asm__ volatile( \
5817 "subu $29, $29, 8 \n\t" \
5818 "sw $28, 0($29) \n\t" \
5819 "sw $31, 4($29) \n\t" \
5820 "lw $4, 20(%1) \n\t" \
5821 "subu $29, $29, 56\n\t" \
5822 "sw $4, 16($29) \n\t" \
5823 "lw $4, 24(%1) \n\t" \
5824 "sw $4, 20($29) \n\t" \
5825 "lw $4, 28(%1) \n\t" \
5826 "sw $4, 24($29) \n\t" \
5827 "lw $4, 32(%1) \n\t" \
5828 "sw $4, 28($29) \n\t" \
5829 "lw $4, 36(%1) \n\t" \
5830 "sw $4, 32($29) \n\t" \
5831 "lw $4, 40(%1) \n\t" \
5832 "sw $4, 36($29) \n\t" \
5833 "lw $4, 44(%1) \n\t" \
5834 "sw $4, 40($29) \n\t" \
5835 "lw $4, 48(%1) \n\t" \
5836 "sw $4, 44($29) \n\t" \
5837 "lw $4, 4(%1) \n\t" \
5838 "lw $5, 8(%1) \n\t" \
5839 "lw $6, 12(%1) \n\t" \
5840 "lw $7, 16(%1) \n\t" \
5841 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5842 VALGRIND_CALL_NOREDIR_T9 \
5843 "addu $29, $29, 56 \n\t" \
5844 "lw $28, 0($29) \n\t" \
5845 "lw $31, 4($29) \n\t" \
5846 "addu $29, $29, 8 \n\t" \
5847 "move %0, $2\n" \
5848 : /*out*/ "=r" (_res) \
5849 : /*in*/ "r" (&_argvec[0]) \
5850 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5851 ); \
5852 lval = (__typeof__(lval)) _res; \
5853 } while (0)
5854
5855#endif /* PLAT_mips32_linux */
5856
5857/* ------------------------- nanomips-linux -------------------- */
5858
5859#if defined(PLAT_nanomips_linux)
5860
5861/* These regs are trashed by the hidden call. */
5862#define __CALLER_SAVED_REGS "$t4", "$t5", "$a0", "$a1", "$a2", \
5863"$a3", "$a4", "$a5", "$a6", "$a7", "$t0", "$t1", "$t2", "$t3", \
5864"$t8","$t9", "$at"
5865
5866/* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5867 long) == 4. */
5868
5869#define CALL_FN_W_v(lval, orig) \
5870 do { \
5871 volatile OrigFn _orig = (orig); \
5872 volatile unsigned long _argvec[1]; \
5873 volatile unsigned long _res; \
5874 _argvec[0] = (unsigned long)_orig.nraddr; \
5875 __asm__ volatile( \
5876 "lw $t9, 0(%1)\n\t" \
5877 VALGRIND_CALL_NOREDIR_T9 \
5878 "move %0, $a0\n" \
5879 : /*out*/ "=r" (_res) \
5880 : /*in*/ "r" (&_argvec[0]) \
5881 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5882 ); \
5883 lval = (__typeof__(lval)) _res; \
5884 } while (0)
5885
5886#define CALL_FN_W_W(lval, orig, arg1) \
5887 do { \
5888 volatile OrigFn _orig = (orig); \
5889 volatile unsigned long _argvec[2]; \
5890 volatile unsigned long _res; \
5891 _argvec[0] = (unsigned long)_orig.nraddr; \
5892 _argvec[1] = (unsigned long)(arg1); \
5893 __asm__ volatile( \
5894 "lw $t9, 0(%1)\n\t" \
5895 "lw $a0, 4(%1)\n\t" \
5896 VALGRIND_CALL_NOREDIR_T9 \
5897 "move %0, $a0\n" \
5898 : /*out*/ "=r" (_res) \
5899 : /*in*/ "r" (&_argvec[0]) \
5900 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5901 ); \
5902 lval = (__typeof__(lval)) _res; \
5903 } while (0)
5904
5905#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5906 do { \
5907 volatile OrigFn _orig = (orig); \
5908 volatile unsigned long _argvec[3]; \
5909 volatile unsigned long _res; \
5910 _argvec[0] = (unsigned long)_orig.nraddr; \
5911 _argvec[1] = (unsigned long)(arg1); \
5912 _argvec[2] = (unsigned long)(arg2); \
5913 __asm__ volatile( \
5914 "lw $t9, 0(%1)\n\t" \
5915 "lw $a0, 4(%1)\n\t" \
5916 "lw $a1, 8(%1)\n\t" \
5917 VALGRIND_CALL_NOREDIR_T9 \
5918 "move %0, $a0\n" \
5919 : /*out*/ "=r" (_res) \
5920 : /*in*/ "r" (&_argvec[0]) \
5921 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5922 ); \
5923 lval = (__typeof__(lval)) _res; \
5924 } while (0)
5925
5926#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5927 do { \
5928 volatile OrigFn _orig = (orig); \
5929 volatile unsigned long _argvec[4]; \
5930 volatile unsigned long _res; \
5931 _argvec[0] = (unsigned long)_orig.nraddr; \
5932 _argvec[1] = (unsigned long)(arg1); \
5933 _argvec[2] = (unsigned long)(arg2); \
5934 _argvec[3] = (unsigned long)(arg3); \
5935 __asm__ volatile( \
5936 "lw $t9, 0(%1)\n\t" \
5937 "lw $a0, 4(%1)\n\t" \
5938 "lw $a1, 8(%1)\n\t" \
5939 "lw $a2,12(%1)\n\t" \
5940 VALGRIND_CALL_NOREDIR_T9 \
5941 "move %0, $a0\n" \
5942 : /*out*/ "=r" (_res) \
5943 : /*in*/ "r" (&_argvec[0]) \
5944 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5945 ); \
5946 lval = (__typeof__(lval)) _res; \
5947 } while (0)
5948
5949#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5950 do { \
5951 volatile OrigFn _orig = (orig); \
5952 volatile unsigned long _argvec[5]; \
5953 volatile unsigned long _res; \
5954 _argvec[0] = (unsigned long)_orig.nraddr; \
5955 _argvec[1] = (unsigned long)(arg1); \
5956 _argvec[2] = (unsigned long)(arg2); \
5957 _argvec[3] = (unsigned long)(arg3); \
5958 _argvec[4] = (unsigned long)(arg4); \
5959 __asm__ volatile( \
5960 "lw $t9, 0(%1)\n\t" \
5961 "lw $a0, 4(%1)\n\t" \
5962 "lw $a1, 8(%1)\n\t" \
5963 "lw $a2,12(%1)\n\t" \
5964 "lw $a3,16(%1)\n\t" \
5965 VALGRIND_CALL_NOREDIR_T9 \
5966 "move %0, $a0\n" \
5967 : /*out*/ "=r" (_res) \
5968 : /*in*/ "r" (&_argvec[0]) \
5969 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5970 ); \
5971 lval = (__typeof__(lval)) _res; \
5972 } while (0)
5973
5974#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5975 do { \
5976 volatile OrigFn _orig = (orig); \
5977 volatile unsigned long _argvec[6]; \
5978 volatile unsigned long _res; \
5979 _argvec[0] = (unsigned long)_orig.nraddr; \
5980 _argvec[1] = (unsigned long)(arg1); \
5981 _argvec[2] = (unsigned long)(arg2); \
5982 _argvec[3] = (unsigned long)(arg3); \
5983 _argvec[4] = (unsigned long)(arg4); \
5984 _argvec[5] = (unsigned long)(arg5); \
5985 __asm__ volatile( \
5986 "lw $t9, 0(%1)\n\t" \
5987 "lw $a0, 4(%1)\n\t" \
5988 "lw $a1, 8(%1)\n\t" \
5989 "lw $a2,12(%1)\n\t" \
5990 "lw $a3,16(%1)\n\t" \
5991 "lw $a4,20(%1)\n\t" \
5992 VALGRIND_CALL_NOREDIR_T9 \
5993 "move %0, $a0\n" \
5994 : /*out*/ "=r" (_res) \
5995 : /*in*/ "r" (&_argvec[0]) \
5996 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5997 ); \
5998 lval = (__typeof__(lval)) _res; \
5999 } while (0)
6000#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
6001 do { \
6002 volatile OrigFn _orig = (orig); \
6003 volatile unsigned long _argvec[7]; \
6004 volatile unsigned long _res; \
6005 _argvec[0] = (unsigned long)_orig.nraddr; \
6006 _argvec[1] = (unsigned long)(arg1); \
6007 _argvec[2] = (unsigned long)(arg2); \
6008 _argvec[3] = (unsigned long)(arg3); \
6009 _argvec[4] = (unsigned long)(arg4); \
6010 _argvec[5] = (unsigned long)(arg5); \
6011 _argvec[6] = (unsigned long)(arg6); \
6012 __asm__ volatile( \
6013 "lw $t9, 0(%1)\n\t" \
6014 "lw $a0, 4(%1)\n\t" \
6015 "lw $a1, 8(%1)\n\t" \
6016 "lw $a2,12(%1)\n\t" \
6017 "lw $a3,16(%1)\n\t" \
6018 "lw $a4,20(%1)\n\t" \
6019 "lw $a5,24(%1)\n\t" \
6020 VALGRIND_CALL_NOREDIR_T9 \
6021 "move %0, $a0\n" \
6022 : /*out*/ "=r" (_res) \
6023 : /*in*/ "r" (&_argvec[0]) \
6024 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6025 ); \
6026 lval = (__typeof__(lval)) _res; \
6027 } while (0)
6028
6029#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6030 arg7) \
6031 do { \
6032 volatile OrigFn _orig = (orig); \
6033 volatile unsigned long _argvec[8]; \
6034 volatile unsigned long _res; \
6035 _argvec[0] = (unsigned long)_orig.nraddr; \
6036 _argvec[1] = (unsigned long)(arg1); \
6037 _argvec[2] = (unsigned long)(arg2); \
6038 _argvec[3] = (unsigned long)(arg3); \
6039 _argvec[4] = (unsigned long)(arg4); \
6040 _argvec[5] = (unsigned long)(arg5); \
6041 _argvec[6] = (unsigned long)(arg6); \
6042 _argvec[7] = (unsigned long)(arg7); \
6043 __asm__ volatile( \
6044 "lw $t9, 0(%1)\n\t" \
6045 "lw $a0, 4(%1)\n\t" \
6046 "lw $a1, 8(%1)\n\t" \
6047 "lw $a2,12(%1)\n\t" \
6048 "lw $a3,16(%1)\n\t" \
6049 "lw $a4,20(%1)\n\t" \
6050 "lw $a5,24(%1)\n\t" \
6051 "lw $a6,28(%1)\n\t" \
6052 VALGRIND_CALL_NOREDIR_T9 \
6053 "move %0, $a0\n" \
6054 : /*out*/ "=r" (_res) \
6055 : /*in*/ "r" (&_argvec[0]) \
6056 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6057 ); \
6058 lval = (__typeof__(lval)) _res; \
6059 } while (0)
6060
6061#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6062 arg7,arg8) \
6063 do { \
6064 volatile OrigFn _orig = (orig); \
6065 volatile unsigned long _argvec[9]; \
6066 volatile unsigned long _res; \
6067 _argvec[0] = (unsigned long)_orig.nraddr; \
6068 _argvec[1] = (unsigned long)(arg1); \
6069 _argvec[2] = (unsigned long)(arg2); \
6070 _argvec[3] = (unsigned long)(arg3); \
6071 _argvec[4] = (unsigned long)(arg4); \
6072 _argvec[5] = (unsigned long)(arg5); \
6073 _argvec[6] = (unsigned long)(arg6); \
6074 _argvec[7] = (unsigned long)(arg7); \
6075 _argvec[8] = (unsigned long)(arg8); \
6076 __asm__ volatile( \
6077 "lw $t9, 0(%1)\n\t" \
6078 "lw $a0, 4(%1)\n\t" \
6079 "lw $a1, 8(%1)\n\t" \
6080 "lw $a2,12(%1)\n\t" \
6081 "lw $a3,16(%1)\n\t" \
6082 "lw $a4,20(%1)\n\t" \
6083 "lw $a5,24(%1)\n\t" \
6084 "lw $a6,28(%1)\n\t" \
6085 "lw $a7,32(%1)\n\t" \
6086 VALGRIND_CALL_NOREDIR_T9 \
6087 "move %0, $a0\n" \
6088 : /*out*/ "=r" (_res) \
6089 : /*in*/ "r" (&_argvec[0]) \
6090 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6091 ); \
6092 lval = (__typeof__(lval)) _res; \
6093 } while (0)
6094
6095#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6096 arg7,arg8,arg9) \
6097 do { \
6098 volatile OrigFn _orig = (orig); \
6099 volatile unsigned long _argvec[10]; \
6100 volatile unsigned long _res; \
6101 _argvec[0] = (unsigned long)_orig.nraddr; \
6102 _argvec[1] = (unsigned long)(arg1); \
6103 _argvec[2] = (unsigned long)(arg2); \
6104 _argvec[3] = (unsigned long)(arg3); \
6105 _argvec[4] = (unsigned long)(arg4); \
6106 _argvec[5] = (unsigned long)(arg5); \
6107 _argvec[6] = (unsigned long)(arg6); \
6108 _argvec[7] = (unsigned long)(arg7); \
6109 _argvec[8] = (unsigned long)(arg8); \
6110 _argvec[9] = (unsigned long)(arg9); \
6111 __asm__ volatile( \
6112 "addiu $sp, $sp, -16 \n\t" \
6113 "lw $t9,36(%1) \n\t" \
6114 "sw $t9, 0($sp) \n\t" \
6115 "lw $t9, 0(%1) \n\t" \
6116 "lw $a0, 4(%1) \n\t" \
6117 "lw $a1, 8(%1) \n\t" \
6118 "lw $a2,12(%1) \n\t" \
6119 "lw $a3,16(%1) \n\t" \
6120 "lw $a4,20(%1) \n\t" \
6121 "lw $a5,24(%1) \n\t" \
6122 "lw $a6,28(%1) \n\t" \
6123 "lw $a7,32(%1) \n\t" \
6124 VALGRIND_CALL_NOREDIR_T9 \
6125 "move %0, $a0 \n\t" \
6126 "addiu $sp, $sp, 16 \n\t" \
6127 : /*out*/ "=r" (_res) \
6128 : /*in*/ "r" (&_argvec[0]) \
6129 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6130 ); \
6131 lval = (__typeof__(lval)) _res; \
6132 } while (0)
6133
6134#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6135 arg7,arg8,arg9,arg10) \
6136 do { \
6137 volatile OrigFn _orig = (orig); \
6138 volatile unsigned long _argvec[11]; \
6139 volatile unsigned long _res; \
6140 _argvec[0] = (unsigned long)_orig.nraddr; \
6141 _argvec[1] = (unsigned long)(arg1); \
6142 _argvec[2] = (unsigned long)(arg2); \
6143 _argvec[3] = (unsigned long)(arg3); \
6144 _argvec[4] = (unsigned long)(arg4); \
6145 _argvec[5] = (unsigned long)(arg5); \
6146 _argvec[6] = (unsigned long)(arg6); \
6147 _argvec[7] = (unsigned long)(arg7); \
6148 _argvec[8] = (unsigned long)(arg8); \
6149 _argvec[9] = (unsigned long)(arg9); \
6150 _argvec[10] = (unsigned long)(arg10); \
6151 __asm__ volatile( \
6152 "addiu $sp, $sp, -16 \n\t" \
6153 "lw $t9,36(%1) \n\t" \
6154 "sw $t9, 0($sp) \n\t" \
6155 "lw $t9,40(%1) \n\t" \
6156 "sw $t9, 4($sp) \n\t" \
6157 "lw $t9, 0(%1) \n\t" \
6158 "lw $a0, 4(%1) \n\t" \
6159 "lw $a1, 8(%1) \n\t" \
6160 "lw $a2,12(%1) \n\t" \
6161 "lw $a3,16(%1) \n\t" \
6162 "lw $a4,20(%1) \n\t" \
6163 "lw $a5,24(%1) \n\t" \
6164 "lw $a6,28(%1) \n\t" \
6165 "lw $a7,32(%1) \n\t" \
6166 VALGRIND_CALL_NOREDIR_T9 \
6167 "move %0, $a0 \n\t" \
6168 "addiu $sp, $sp, 16 \n\t" \
6169 : /*out*/ "=r" (_res) \
6170 : /*in*/ "r" (&_argvec[0]) \
6171 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6172 ); \
6173 lval = (__typeof__(lval)) _res; \
6174 } while (0)
6175
6176#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6177 arg6,arg7,arg8,arg9,arg10, \
6178 arg11) \
6179 do { \
6180 volatile OrigFn _orig = (orig); \
6181 volatile unsigned long _argvec[12]; \
6182 volatile unsigned long _res; \
6183 _argvec[0] = (unsigned long)_orig.nraddr; \
6184 _argvec[1] = (unsigned long)(arg1); \
6185 _argvec[2] = (unsigned long)(arg2); \
6186 _argvec[3] = (unsigned long)(arg3); \
6187 _argvec[4] = (unsigned long)(arg4); \
6188 _argvec[5] = (unsigned long)(arg5); \
6189 _argvec[6] = (unsigned long)(arg6); \
6190 _argvec[7] = (unsigned long)(arg7); \
6191 _argvec[8] = (unsigned long)(arg8); \
6192 _argvec[9] = (unsigned long)(arg9); \
6193 _argvec[10] = (unsigned long)(arg10); \
6194 _argvec[11] = (unsigned long)(arg11); \
6195 __asm__ volatile( \
6196 "addiu $sp, $sp, -16 \n\t" \
6197 "lw $t9,36(%1) \n\t" \
6198 "sw $t9, 0($sp) \n\t" \
6199 "lw $t9,40(%1) \n\t" \
6200 "sw $t9, 4($sp) \n\t" \
6201 "lw $t9,44(%1) \n\t" \
6202 "sw $t9, 8($sp) \n\t" \
6203 "lw $t9, 0(%1) \n\t" \
6204 "lw $a0, 4(%1) \n\t" \
6205 "lw $a1, 8(%1) \n\t" \
6206 "lw $a2,12(%1) \n\t" \
6207 "lw $a3,16(%1) \n\t" \
6208 "lw $a4,20(%1) \n\t" \
6209 "lw $a5,24(%1) \n\t" \
6210 "lw $a6,28(%1) \n\t" \
6211 "lw $a7,32(%1) \n\t" \
6212 VALGRIND_CALL_NOREDIR_T9 \
6213 "move %0, $a0 \n\t" \
6214 "addiu $sp, $sp, 16 \n\t" \
6215 : /*out*/ "=r" (_res) \
6216 : /*in*/ "r" (&_argvec[0]) \
6217 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6218 ); \
6219 lval = (__typeof__(lval)) _res; \
6220 } while (0)
6221
6222#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6223 arg6,arg7,arg8,arg9,arg10, \
6224 arg11,arg12) \
6225 do { \
6226 volatile OrigFn _orig = (orig); \
6227 volatile unsigned long _argvec[13]; \
6228 volatile unsigned long _res; \
6229 _argvec[0] = (unsigned long)_orig.nraddr; \
6230 _argvec[1] = (unsigned long)(arg1); \
6231 _argvec[2] = (unsigned long)(arg2); \
6232 _argvec[3] = (unsigned long)(arg3); \
6233 _argvec[4] = (unsigned long)(arg4); \
6234 _argvec[5] = (unsigned long)(arg5); \
6235 _argvec[6] = (unsigned long)(arg6); \
6236 _argvec[7] = (unsigned long)(arg7); \
6237 _argvec[8] = (unsigned long)(arg8); \
6238 _argvec[9] = (unsigned long)(arg9); \
6239 _argvec[10] = (unsigned long)(arg10); \
6240 _argvec[11] = (unsigned long)(arg11); \
6241 _argvec[12] = (unsigned long)(arg12); \
6242 __asm__ volatile( \
6243 "addiu $sp, $sp, -16 \n\t" \
6244 "lw $t9,36(%1) \n\t" \
6245 "sw $t9, 0($sp) \n\t" \
6246 "lw $t9,40(%1) \n\t" \
6247 "sw $t9, 4($sp) \n\t" \
6248 "lw $t9,44(%1) \n\t" \
6249 "sw $t9, 8($sp) \n\t" \
6250 "lw $t9,48(%1) \n\t" \
6251 "sw $t9,12($sp) \n\t" \
6252 "lw $t9, 0(%1) \n\t" \
6253 "lw $a0, 4(%1) \n\t" \
6254 "lw $a1, 8(%1) \n\t" \
6255 "lw $a2,12(%1) \n\t" \
6256 "lw $a3,16(%1) \n\t" \
6257 "lw $a4,20(%1) \n\t" \
6258 "lw $a5,24(%1) \n\t" \
6259 "lw $a6,28(%1) \n\t" \
6260 "lw $a7,32(%1) \n\t" \
6261 VALGRIND_CALL_NOREDIR_T9 \
6262 "move %0, $a0 \n\t" \
6263 "addiu $sp, $sp, 16 \n\t" \
6264 : /*out*/ "=r" (_res) \
6265 : /*in*/ "r" (&_argvec[0]) \
6266 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6267 ); \
6268 lval = (__typeof__(lval)) _res; \
6269 } while (0)
6270
6271#endif /* PLAT_nanomips_linux */
6272
6273/* ------------------------- mips64-linux ------------------------- */
6274
6275#if defined(PLAT_mips64_linux)
6276
6277/* These regs are trashed by the hidden call. */
6278#define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
6279"$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
6280"$25", "$31"
6281
6282/* These CALL_FN_ macros assume that on mips64-linux,
6283 sizeof(long long) == 8. */
6284
6285#define MIPS64_LONG2REG_CAST(x) ((long long)(long)x)
6286
6287#define CALL_FN_W_v(lval, orig) \
6288 do { \
6289 volatile OrigFn _orig = (orig); \
6290 volatile unsigned long long _argvec[1]; \
6291 volatile unsigned long long _res; \
6292 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6293 __asm__ volatile( \
6294 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6295 VALGRIND_CALL_NOREDIR_T9 \
6296 "move %0, $2\n" \
6297 : /*out*/ "=r" (_res) \
6298 : /*in*/ "0" (&_argvec[0]) \
6299 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6300 ); \
6301 lval = (__typeof__(lval)) (long)_res; \
6302 } while (0)
6303
6304#define CALL_FN_W_W(lval, orig, arg1) \
6305 do { \
6306 volatile OrigFn _orig = (orig); \
6307 volatile unsigned long long _argvec[2]; \
6308 volatile unsigned long long _res; \
6309 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6310 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6311 __asm__ volatile( \
6312 "ld $4, 8(%1)\n\t" /* arg1*/ \
6313 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6314 VALGRIND_CALL_NOREDIR_T9 \
6315 "move %0, $2\n" \
6316 : /*out*/ "=r" (_res) \
6317 : /*in*/ "r" (&_argvec[0]) \
6318 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6319 ); \
6320 lval = (__typeof__(lval)) (long)_res; \
6321 } while (0)
6322
6323#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
6324 do { \
6325 volatile OrigFn _orig = (orig); \
6326 volatile unsigned long long _argvec[3]; \
6327 volatile unsigned long long _res; \
6328 _argvec[0] = _orig.nraddr; \
6329 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6330 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6331 __asm__ volatile( \
6332 "ld $4, 8(%1)\n\t" \
6333 "ld $5, 16(%1)\n\t" \
6334 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6335 VALGRIND_CALL_NOREDIR_T9 \
6336 "move %0, $2\n" \
6337 : /*out*/ "=r" (_res) \
6338 : /*in*/ "r" (&_argvec[0]) \
6339 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6340 ); \
6341 lval = (__typeof__(lval)) (long)_res; \
6342 } while (0)
6343
6344
6345#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
6346 do { \
6347 volatile OrigFn _orig = (orig); \
6348 volatile unsigned long long _argvec[4]; \
6349 volatile unsigned long long _res; \
6350 _argvec[0] = _orig.nraddr; \
6351 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6352 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6353 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6354 __asm__ volatile( \
6355 "ld $4, 8(%1)\n\t" \
6356 "ld $5, 16(%1)\n\t" \
6357 "ld $6, 24(%1)\n\t" \
6358 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6359 VALGRIND_CALL_NOREDIR_T9 \
6360 "move %0, $2\n" \
6361 : /*out*/ "=r" (_res) \
6362 : /*in*/ "r" (&_argvec[0]) \
6363 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6364 ); \
6365 lval = (__typeof__(lval)) (long)_res; \
6366 } while (0)
6367
6368#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
6369 do { \
6370 volatile OrigFn _orig = (orig); \
6371 volatile unsigned long long _argvec[5]; \
6372 volatile unsigned long long _res; \
6373 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6374 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6375 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6376 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6377 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6378 __asm__ volatile( \
6379 "ld $4, 8(%1)\n\t" \
6380 "ld $5, 16(%1)\n\t" \
6381 "ld $6, 24(%1)\n\t" \
6382 "ld $7, 32(%1)\n\t" \
6383 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6384 VALGRIND_CALL_NOREDIR_T9 \
6385 "move %0, $2\n" \
6386 : /*out*/ "=r" (_res) \
6387 : /*in*/ "r" (&_argvec[0]) \
6388 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6389 ); \
6390 lval = (__typeof__(lval)) (long)_res; \
6391 } while (0)
6392
6393#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
6394 do { \
6395 volatile OrigFn _orig = (orig); \
6396 volatile unsigned long long _argvec[6]; \
6397 volatile unsigned long long _res; \
6398 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6399 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6400 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6401 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6402 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6403 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6404 __asm__ volatile( \
6405 "ld $4, 8(%1)\n\t" \
6406 "ld $5, 16(%1)\n\t" \
6407 "ld $6, 24(%1)\n\t" \
6408 "ld $7, 32(%1)\n\t" \
6409 "ld $8, 40(%1)\n\t" \
6410 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6411 VALGRIND_CALL_NOREDIR_T9 \
6412 "move %0, $2\n" \
6413 : /*out*/ "=r" (_res) \
6414 : /*in*/ "r" (&_argvec[0]) \
6415 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6416 ); \
6417 lval = (__typeof__(lval)) (long)_res; \
6418 } while (0)
6419
6420#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
6421 do { \
6422 volatile OrigFn _orig = (orig); \
6423 volatile unsigned long long _argvec[7]; \
6424 volatile unsigned long long _res; \
6425 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6426 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6427 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6428 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6429 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6430 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6431 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6432 __asm__ volatile( \
6433 "ld $4, 8(%1)\n\t" \
6434 "ld $5, 16(%1)\n\t" \
6435 "ld $6, 24(%1)\n\t" \
6436 "ld $7, 32(%1)\n\t" \
6437 "ld $8, 40(%1)\n\t" \
6438 "ld $9, 48(%1)\n\t" \
6439 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6440 VALGRIND_CALL_NOREDIR_T9 \
6441 "move %0, $2\n" \
6442 : /*out*/ "=r" (_res) \
6443 : /*in*/ "r" (&_argvec[0]) \
6444 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6445 ); \
6446 lval = (__typeof__(lval)) (long)_res; \
6447 } while (0)
6448
6449#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6450 arg7) \
6451 do { \
6452 volatile OrigFn _orig = (orig); \
6453 volatile unsigned long long _argvec[8]; \
6454 volatile unsigned long long _res; \
6455 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6456 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6457 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6458 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6459 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6460 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6461 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6462 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6463 __asm__ volatile( \
6464 "ld $4, 8(%1)\n\t" \
6465 "ld $5, 16(%1)\n\t" \
6466 "ld $6, 24(%1)\n\t" \
6467 "ld $7, 32(%1)\n\t" \
6468 "ld $8, 40(%1)\n\t" \
6469 "ld $9, 48(%1)\n\t" \
6470 "ld $10, 56(%1)\n\t" \
6471 "ld $25, 0(%1) \n\t" /* target->t9 */ \
6472 VALGRIND_CALL_NOREDIR_T9 \
6473 "move %0, $2\n" \
6474 : /*out*/ "=r" (_res) \
6475 : /*in*/ "r" (&_argvec[0]) \
6476 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6477 ); \
6478 lval = (__typeof__(lval)) (long)_res; \
6479 } while (0)
6480
6481#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6482 arg7,arg8) \
6483 do { \
6484 volatile OrigFn _orig = (orig); \
6485 volatile unsigned long long _argvec[9]; \
6486 volatile unsigned long long _res; \
6487 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6488 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6489 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6490 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6491 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6492 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6493 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6494 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6495 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6496 __asm__ volatile( \
6497 "ld $4, 8(%1)\n\t" \
6498 "ld $5, 16(%1)\n\t" \
6499 "ld $6, 24(%1)\n\t" \
6500 "ld $7, 32(%1)\n\t" \
6501 "ld $8, 40(%1)\n\t" \
6502 "ld $9, 48(%1)\n\t" \
6503 "ld $10, 56(%1)\n\t" \
6504 "ld $11, 64(%1)\n\t" \
6505 "ld $25, 0(%1) \n\t" /* target->t9 */ \
6506 VALGRIND_CALL_NOREDIR_T9 \
6507 "move %0, $2\n" \
6508 : /*out*/ "=r" (_res) \
6509 : /*in*/ "r" (&_argvec[0]) \
6510 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6511 ); \
6512 lval = (__typeof__(lval)) (long)_res; \
6513 } while (0)
6514
6515#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6516 arg7,arg8,arg9) \
6517 do { \
6518 volatile OrigFn _orig = (orig); \
6519 volatile unsigned long long _argvec[10]; \
6520 volatile unsigned long long _res; \
6521 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6522 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6523 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6524 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6525 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6526 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6527 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6528 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6529 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6530 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6531 __asm__ volatile( \
6532 "dsubu $29, $29, 8\n\t" \
6533 "ld $4, 72(%1)\n\t" \
6534 "sd $4, 0($29)\n\t" \
6535 "ld $4, 8(%1)\n\t" \
6536 "ld $5, 16(%1)\n\t" \
6537 "ld $6, 24(%1)\n\t" \
6538 "ld $7, 32(%1)\n\t" \
6539 "ld $8, 40(%1)\n\t" \
6540 "ld $9, 48(%1)\n\t" \
6541 "ld $10, 56(%1)\n\t" \
6542 "ld $11, 64(%1)\n\t" \
6543 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6544 VALGRIND_CALL_NOREDIR_T9 \
6545 "daddu $29, $29, 8\n\t" \
6546 "move %0, $2\n" \
6547 : /*out*/ "=r" (_res) \
6548 : /*in*/ "r" (&_argvec[0]) \
6549 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6550 ); \
6551 lval = (__typeof__(lval)) (long)_res; \
6552 } while (0)
6553
6554#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6555 arg7,arg8,arg9,arg10) \
6556 do { \
6557 volatile OrigFn _orig = (orig); \
6558 volatile unsigned long long _argvec[11]; \
6559 volatile unsigned long long _res; \
6560 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6561 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6562 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6563 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6564 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6565 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6566 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6567 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6568 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6569 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6570 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6571 __asm__ volatile( \
6572 "dsubu $29, $29, 16\n\t" \
6573 "ld $4, 72(%1)\n\t" \
6574 "sd $4, 0($29)\n\t" \
6575 "ld $4, 80(%1)\n\t" \
6576 "sd $4, 8($29)\n\t" \
6577 "ld $4, 8(%1)\n\t" \
6578 "ld $5, 16(%1)\n\t" \
6579 "ld $6, 24(%1)\n\t" \
6580 "ld $7, 32(%1)\n\t" \
6581 "ld $8, 40(%1)\n\t" \
6582 "ld $9, 48(%1)\n\t" \
6583 "ld $10, 56(%1)\n\t" \
6584 "ld $11, 64(%1)\n\t" \
6585 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6586 VALGRIND_CALL_NOREDIR_T9 \
6587 "daddu $29, $29, 16\n\t" \
6588 "move %0, $2\n" \
6589 : /*out*/ "=r" (_res) \
6590 : /*in*/ "r" (&_argvec[0]) \
6591 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6592 ); \
6593 lval = (__typeof__(lval)) (long)_res; \
6594 } while (0)
6595
6596#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6597 arg6,arg7,arg8,arg9,arg10, \
6598 arg11) \
6599 do { \
6600 volatile OrigFn _orig = (orig); \
6601 volatile unsigned long long _argvec[12]; \
6602 volatile unsigned long long _res; \
6603 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6604 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6605 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6606 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6607 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6608 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6609 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6610 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6611 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6612 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6613 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6614 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6615 __asm__ volatile( \
6616 "dsubu $29, $29, 24\n\t" \
6617 "ld $4, 72(%1)\n\t" \
6618 "sd $4, 0($29)\n\t" \
6619 "ld $4, 80(%1)\n\t" \
6620 "sd $4, 8($29)\n\t" \
6621 "ld $4, 88(%1)\n\t" \
6622 "sd $4, 16($29)\n\t" \
6623 "ld $4, 8(%1)\n\t" \
6624 "ld $5, 16(%1)\n\t" \
6625 "ld $6, 24(%1)\n\t" \
6626 "ld $7, 32(%1)\n\t" \
6627 "ld $8, 40(%1)\n\t" \
6628 "ld $9, 48(%1)\n\t" \
6629 "ld $10, 56(%1)\n\t" \
6630 "ld $11, 64(%1)\n\t" \
6631 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6632 VALGRIND_CALL_NOREDIR_T9 \
6633 "daddu $29, $29, 24\n\t" \
6634 "move %0, $2\n" \
6635 : /*out*/ "=r" (_res) \
6636 : /*in*/ "r" (&_argvec[0]) \
6637 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6638 ); \
6639 lval = (__typeof__(lval)) (long)_res; \
6640 } while (0)
6641
6642#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6643 arg6,arg7,arg8,arg9,arg10, \
6644 arg11,arg12) \
6645 do { \
6646 volatile OrigFn _orig = (orig); \
6647 volatile unsigned long long _argvec[13]; \
6648 volatile unsigned long long _res; \
6649 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6650 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6651 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6652 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6653 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6654 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6655 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6656 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6657 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6658 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6659 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6660 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6661 _argvec[12] = MIPS64_LONG2REG_CAST(arg12); \
6662 __asm__ volatile( \
6663 "dsubu $29, $29, 32\n\t" \
6664 "ld $4, 72(%1)\n\t" \
6665 "sd $4, 0($29)\n\t" \
6666 "ld $4, 80(%1)\n\t" \
6667 "sd $4, 8($29)\n\t" \
6668 "ld $4, 88(%1)\n\t" \
6669 "sd $4, 16($29)\n\t" \
6670 "ld $4, 96(%1)\n\t" \
6671 "sd $4, 24($29)\n\t" \
6672 "ld $4, 8(%1)\n\t" \
6673 "ld $5, 16(%1)\n\t" \
6674 "ld $6, 24(%1)\n\t" \
6675 "ld $7, 32(%1)\n\t" \
6676 "ld $8, 40(%1)\n\t" \
6677 "ld $9, 48(%1)\n\t" \
6678 "ld $10, 56(%1)\n\t" \
6679 "ld $11, 64(%1)\n\t" \
6680 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6681 VALGRIND_CALL_NOREDIR_T9 \
6682 "daddu $29, $29, 32\n\t" \
6683 "move %0, $2\n" \
6684 : /*out*/ "=r" (_res) \
6685 : /*in*/ "r" (&_argvec[0]) \
6686 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6687 ); \
6688 lval = (__typeof__(lval)) (long)_res; \
6689 } while (0)
6690
6691#endif /* PLAT_mips64_linux */
6692
6693/* ----------------------- riscv64-linux ----------------------- */
6694
6695#if defined(PLAT_riscv64_linux)
6696
6697/* These regs are trashed by the hidden call. */
6698#define __CALLER_SAVED_REGS \
6699 "ra", \
6700 "t0", "t1", "t2", "t3", "t4", "t5", "t6", \
6701 "a0", "a1", "a2", "a3", "a4", "a5", "a6", "a7", \
6702 "ft0", "ft1", "ft2", "ft3", "ft4", "ft5", "ft6", "ft7", \
6703 "ft8", "ft9", "ft10", "ft11", \
6704 "fa0", "fa1", "fa2", "fa3", "fa4", "fa5", "fa6", "fa7"
6705
6706/* s11 is callee-saved, so we can use it to save and restore sp around
6707 the hidden call. */
6708#define VALGRIND_ALIGN_STACK \
6709 "mv s11, sp\n\t" \
6710 "andi sp, sp, 0xfffffffffffffff0\n\t"
6711#define VALGRIND_RESTORE_STACK \
6712 "mv sp, s11\n\t"
6713
6714/* These CALL_FN_ macros assume that on riscv64-linux,
6715 sizeof(unsigned long) == 8. */
6716
6717#define CALL_FN_W_v(lval, orig) \
6718 do { \
6719 volatile OrigFn _orig = (orig); \
6720 volatile unsigned long _argvec[1]; \
6721 volatile unsigned long _res; \
6722 _argvec[0] = (unsigned long)_orig.nraddr; \
6723 __asm__ volatile( \
6724 VALGRIND_ALIGN_STACK \
6725 "ld t0, 0(%1) \n\t" /* target->t0 */ \
6726 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_T0 \
6727 VALGRIND_RESTORE_STACK \
6728 "mv %0, a0\n" \
6729 : /*out*/ "=r" (_res) \
6730 : /*in*/ "0" (&_argvec[0]) \
6731 : /*trash*/ "memory", __CALLER_SAVED_REGS, "s11" \
6732 ); \
6733 lval = (__typeof__(lval)) _res; \
6734 } while (0)
6735
6736#define CALL_FN_W_W(lval, orig, arg1) \
6737 do { \
6738 volatile OrigFn _orig = (orig); \
6739 volatile unsigned long _argvec[2]; \
6740 volatile unsigned long _res; \
6741 _argvec[0] = (unsigned long)_orig.nraddr; \
6742 _argvec[1] = (unsigned long)(arg1); \
6743 __asm__ volatile( \
6744 VALGRIND_ALIGN_STACK \
6745 "ld a0, 8(%1) \n\t" \
6746 "ld t0, 0(%1) \n\t" /* target->t0 */ \
6747 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_T0 \
6748 VALGRIND_RESTORE_STACK \
6749 "mv %0, a0\n" \
6750 : /*out*/ "=r" (_res) \
6751 : /*in*/ "0" (&_argvec[0]) \
6752 : /*trash*/ "memory", __CALLER_SAVED_REGS, "s11" \
6753 ); \
6754 lval = (__typeof__(lval)) _res; \
6755 } while (0)
6756
6757#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
6758 do { \
6759 volatile OrigFn _orig = (orig); \
6760 volatile unsigned long _argvec[3]; \
6761 volatile unsigned long _res; \
6762 _argvec[0] = (unsigned long)_orig.nraddr; \
6763 _argvec[1] = (unsigned long)(arg1); \
6764 _argvec[2] = (unsigned long)(arg2); \
6765 __asm__ volatile( \
6766 VALGRIND_ALIGN_STACK \
6767 "ld a0, 8(%1) \n\t" \
6768 "ld a1, 16(%1) \n\t" \
6769 "ld t0, 0(%1) \n\t" /* target->t0 */ \
6770 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_T0 \
6771 VALGRIND_RESTORE_STACK \
6772 "mv %0, a0\n" \
6773 : /*out*/ "=r" (_res) \
6774 : /*in*/ "0" (&_argvec[0]) \
6775 : /*trash*/ "memory", __CALLER_SAVED_REGS, "s11" \
6776 ); \
6777 lval = (__typeof__(lval)) _res; \
6778 } while (0)
6779
6780#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
6781 do { \
6782 volatile OrigFn _orig = (orig); \
6783 volatile unsigned long _argvec[4]; \
6784 volatile unsigned long _res; \
6785 _argvec[0] = (unsigned long)_orig.nraddr; \
6786 _argvec[1] = (unsigned long)(arg1); \
6787 _argvec[2] = (unsigned long)(arg2); \
6788 _argvec[3] = (unsigned long)(arg3); \
6789 __asm__ volatile( \
6790 VALGRIND_ALIGN_STACK \
6791 "ld a0, 8(%1) \n\t" \
6792 "ld a1, 16(%1) \n\t" \
6793 "ld a2, 24(%1) \n\t" \
6794 "ld t0, 0(%1) \n\t" /* target->t0 */ \
6795 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_T0 \
6796 VALGRIND_RESTORE_STACK \
6797 "mv %0, a0\n" \
6798 : /*out*/ "=r" (_res) \
6799 : /*in*/ "0" (&_argvec[0]) \
6800 : /*trash*/ "memory", __CALLER_SAVED_REGS, "s11" \
6801 ); \
6802 lval = (__typeof__(lval)) _res; \
6803 } while (0)
6804
6805#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
6806 do { \
6807 volatile OrigFn _orig = (orig); \
6808 volatile unsigned long _argvec[5]; \
6809 volatile unsigned long _res; \
6810 _argvec[0] = (unsigned long)_orig.nraddr; \
6811 _argvec[1] = (unsigned long)(arg1); \
6812 _argvec[2] = (unsigned long)(arg2); \
6813 _argvec[3] = (unsigned long)(arg3); \
6814 _argvec[4] = (unsigned long)(arg4); \
6815 __asm__ volatile( \
6816 VALGRIND_ALIGN_STACK \
6817 "ld a0, 8(%1) \n\t" \
6818 "ld a1, 16(%1) \n\t" \
6819 "ld a2, 24(%1) \n\t" \
6820 "ld a3, 32(%1) \n\t" \
6821 "ld t0, 0(%1) \n\t" /* target->t0 */ \
6822 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_T0 \
6823 VALGRIND_RESTORE_STACK \
6824 "mv %0, a0" \
6825 : /*out*/ "=r" (_res) \
6826 : /*in*/ "0" (&_argvec[0]) \
6827 : /*trash*/ "memory", __CALLER_SAVED_REGS, "s11" \
6828 ); \
6829 lval = (__typeof__(lval)) _res; \
6830 } while (0)
6831
6832#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
6833 do { \
6834 volatile OrigFn _orig = (orig); \
6835 volatile unsigned long _argvec[6]; \
6836 volatile unsigned long _res; \
6837 _argvec[0] = (unsigned long)_orig.nraddr; \
6838 _argvec[1] = (unsigned long)(arg1); \
6839 _argvec[2] = (unsigned long)(arg2); \
6840 _argvec[3] = (unsigned long)(arg3); \
6841 _argvec[4] = (unsigned long)(arg4); \
6842 _argvec[5] = (unsigned long)(arg5); \
6843 __asm__ volatile( \
6844 VALGRIND_ALIGN_STACK \
6845 "ld a0, 8(%1) \n\t" \
6846 "ld a1, 16(%1) \n\t" \
6847 "ld a2, 24(%1) \n\t" \
6848 "ld a3, 32(%1) \n\t" \
6849 "ld a4, 40(%1) \n\t" \
6850 "ld t0, 0(%1) \n\t" /* target->t0 */ \
6851 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_T0 \
6852 VALGRIND_RESTORE_STACK \
6853 "mv %0, a0" \
6854 : /*out*/ "=r" (_res) \
6855 : /*in*/ "0" (&_argvec[0]) \
6856 : /*trash*/ "memory", __CALLER_SAVED_REGS, "s11" \
6857 ); \
6858 lval = (__typeof__(lval)) _res; \
6859 } while (0)
6860
6861#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
6862 do { \
6863 volatile OrigFn _orig = (orig); \
6864 volatile unsigned long _argvec[7]; \
6865 volatile unsigned long _res; \
6866 _argvec[0] = (unsigned long)_orig.nraddr; \
6867 _argvec[1] = (unsigned long)(arg1); \
6868 _argvec[2] = (unsigned long)(arg2); \
6869 _argvec[3] = (unsigned long)(arg3); \
6870 _argvec[4] = (unsigned long)(arg4); \
6871 _argvec[5] = (unsigned long)(arg5); \
6872 _argvec[6] = (unsigned long)(arg6); \
6873 __asm__ volatile( \
6874 VALGRIND_ALIGN_STACK \
6875 "ld a0, 8(%1) \n\t" \
6876 "ld a1, 16(%1) \n\t" \
6877 "ld a2, 24(%1) \n\t" \
6878 "ld a3, 32(%1) \n\t" \
6879 "ld a4, 40(%1) \n\t" \
6880 "ld a5, 48(%1) \n\t" \
6881 "ld t0, 0(%1) \n\t" /* target->t0 */ \
6882 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_T0 \
6883 VALGRIND_RESTORE_STACK \
6884 "mv %0, a0" \
6885 : /*out*/ "=r" (_res) \
6886 : /*in*/ "0" (&_argvec[0]) \
6887 : /*trash*/ "memory", __CALLER_SAVED_REGS, "s11" \
6888 ); \
6889 lval = (__typeof__(lval)) _res; \
6890 } while (0)
6891
6892#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6893 arg7) \
6894 do { \
6895 volatile OrigFn _orig = (orig); \
6896 volatile unsigned long _argvec[8]; \
6897 volatile unsigned long _res; \
6898 _argvec[0] = (unsigned long)_orig.nraddr; \
6899 _argvec[1] = (unsigned long)(arg1); \
6900 _argvec[2] = (unsigned long)(arg2); \
6901 _argvec[3] = (unsigned long)(arg3); \
6902 _argvec[4] = (unsigned long)(arg4); \
6903 _argvec[5] = (unsigned long)(arg5); \
6904 _argvec[6] = (unsigned long)(arg6); \
6905 _argvec[7] = (unsigned long)(arg7); \
6906 __asm__ volatile( \
6907 VALGRIND_ALIGN_STACK \
6908 "ld a0, 8(%1) \n\t" \
6909 "ld a1, 16(%1) \n\t" \
6910 "ld a2, 24(%1) \n\t" \
6911 "ld a3, 32(%1) \n\t" \
6912 "ld a4, 40(%1) \n\t" \
6913 "ld a5, 48(%1) \n\t" \
6914 "ld a6, 56(%1) \n\t" \
6915 "ld t0, 0(%1) \n\t" /* target->t0 */ \
6916 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_T0 \
6917 VALGRIND_RESTORE_STACK \
6918 "mv %0, a0" \
6919 : /*out*/ "=r" (_res) \
6920 : /*in*/ "0" (&_argvec[0]) \
6921 : /*trash*/ "memory", __CALLER_SAVED_REGS, "s11" \
6922 ); \
6923 lval = (__typeof__(lval)) _res; \
6924 } while (0)
6925
6926#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6927 arg7,arg8) \
6928 do { \
6929 volatile OrigFn _orig = (orig); \
6930 volatile unsigned long _argvec[9]; \
6931 volatile unsigned long _res; \
6932 _argvec[0] = (unsigned long)_orig.nraddr; \
6933 _argvec[1] = (unsigned long)(arg1); \
6934 _argvec[2] = (unsigned long)(arg2); \
6935 _argvec[3] = (unsigned long)(arg3); \
6936 _argvec[4] = (unsigned long)(arg4); \
6937 _argvec[5] = (unsigned long)(arg5); \
6938 _argvec[6] = (unsigned long)(arg6); \
6939 _argvec[7] = (unsigned long)(arg7); \
6940 _argvec[8] = (unsigned long)(arg8); \
6941 __asm__ volatile( \
6942 VALGRIND_ALIGN_STACK \
6943 "ld a0, 8(%1) \n\t" \
6944 "ld a1, 16(%1) \n\t" \
6945 "ld a2, 24(%1) \n\t" \
6946 "ld a3, 32(%1) \n\t" \
6947 "ld a4, 40(%1) \n\t" \
6948 "ld a5, 48(%1) \n\t" \
6949 "ld a6, 56(%1) \n\t" \
6950 "ld a7, 64(%1) \n\t" \
6951 "ld t0, 0(%1) \n\t" /* target->t0 */ \
6952 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_T0 \
6953 VALGRIND_RESTORE_STACK \
6954 "mv %0, a0" \
6955 : /*out*/ "=r" (_res) \
6956 : /*in*/ "0" (&_argvec[0]) \
6957 : /*trash*/ "memory", __CALLER_SAVED_REGS, "s11" \
6958 ); \
6959 lval = (__typeof__(lval)) _res; \
6960 } while (0)
6961
6962#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6963 arg7,arg8,arg9) \
6964 do { \
6965 volatile OrigFn _orig = (orig); \
6966 volatile unsigned long _argvec[10]; \
6967 volatile unsigned long _res; \
6968 _argvec[0] = (unsigned long)_orig.nraddr; \
6969 _argvec[1] = (unsigned long)(arg1); \
6970 _argvec[2] = (unsigned long)(arg2); \
6971 _argvec[3] = (unsigned long)(arg3); \
6972 _argvec[4] = (unsigned long)(arg4); \
6973 _argvec[5] = (unsigned long)(arg5); \
6974 _argvec[6] = (unsigned long)(arg6); \
6975 _argvec[7] = (unsigned long)(arg7); \
6976 _argvec[8] = (unsigned long)(arg8); \
6977 _argvec[9] = (unsigned long)(arg9); \
6978 __asm__ volatile( \
6979 VALGRIND_ALIGN_STACK \
6980 "addi sp, sp, -16 \n\t" \
6981 "ld a0, 8(%1) \n\t" \
6982 "ld a1, 16(%1) \n\t" \
6983 "ld a2, 24(%1) \n\t" \
6984 "ld a3, 32(%1) \n\t" \
6985 "ld a4, 40(%1) \n\t" \
6986 "ld a5, 48(%1) \n\t" \
6987 "ld a6, 56(%1) \n\t" \
6988 "ld a7, 64(%1) \n\t" \
6989 "ld t0, 72(%1) \n\t" \
6990 "sd t0, 0(sp) \n\t" \
6991 "ld t0, 0(%1) \n\t" /* target->t0 */ \
6992 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_T0 \
6993 VALGRIND_RESTORE_STACK \
6994 "mv %0, a0" \
6995 : /*out*/ "=r" (_res) \
6996 : /*in*/ "0" (&_argvec[0]) \
6997 : /*trash*/ "memory", __CALLER_SAVED_REGS, "s11" \
6998 ); \
6999 lval = (__typeof__(lval)) _res; \
7000 } while (0)
7001
7002#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
7003 arg7,arg8,arg9,arg10) \
7004 do { \
7005 volatile OrigFn _orig = (orig); \
7006 volatile unsigned long _argvec[11]; \
7007 volatile unsigned long _res; \
7008 _argvec[0] = (unsigned long)_orig.nraddr; \
7009 _argvec[1] = (unsigned long)(arg1); \
7010 _argvec[2] = (unsigned long)(arg2); \
7011 _argvec[3] = (unsigned long)(arg3); \
7012 _argvec[4] = (unsigned long)(arg4); \
7013 _argvec[5] = (unsigned long)(arg5); \
7014 _argvec[6] = (unsigned long)(arg6); \
7015 _argvec[7] = (unsigned long)(arg7); \
7016 _argvec[8] = (unsigned long)(arg8); \
7017 _argvec[9] = (unsigned long)(arg9); \
7018 _argvec[10] = (unsigned long)(arg10); \
7019 __asm__ volatile( \
7020 VALGRIND_ALIGN_STACK \
7021 "addi sp, sp, -16 \n\t" \
7022 "ld a0, 8(%1) \n\t" \
7023 "ld a1, 16(%1) \n\t" \
7024 "ld a2, 24(%1) \n\t" \
7025 "ld a3, 32(%1) \n\t" \
7026 "ld a4, 40(%1) \n\t" \
7027 "ld a5, 48(%1) \n\t" \
7028 "ld a6, 56(%1) \n\t" \
7029 "ld a7, 64(%1) \n\t" \
7030 "ld t0, 72(%1) \n\t" \
7031 "sd t0, 0(sp) \n\t" \
7032 "ld t0, 80(%1) \n\t" \
7033 "sd t0, 8(sp) \n\t" \
7034 "ld t0, 0(%1) \n\t" /* target->t0 */ \
7035 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_T0 \
7036 VALGRIND_RESTORE_STACK \
7037 "mv %0, a0" \
7038 : /*out*/ "=r" (_res) \
7039 : /*in*/ "0" (&_argvec[0]) \
7040 : /*trash*/ "memory", __CALLER_SAVED_REGS, "s11" \
7041 ); \
7042 lval = (__typeof__(lval)) _res; \
7043 } while (0)
7044
7045#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
7046 arg7,arg8,arg9,arg10,arg11) \
7047 do { \
7048 volatile OrigFn _orig = (orig); \
7049 volatile unsigned long _argvec[12]; \
7050 volatile unsigned long _res; \
7051 _argvec[0] = (unsigned long)_orig.nraddr; \
7052 _argvec[1] = (unsigned long)(arg1); \
7053 _argvec[2] = (unsigned long)(arg2); \
7054 _argvec[3] = (unsigned long)(arg3); \
7055 _argvec[4] = (unsigned long)(arg4); \
7056 _argvec[5] = (unsigned long)(arg5); \
7057 _argvec[6] = (unsigned long)(arg6); \
7058 _argvec[7] = (unsigned long)(arg7); \
7059 _argvec[8] = (unsigned long)(arg8); \
7060 _argvec[9] = (unsigned long)(arg9); \
7061 _argvec[10] = (unsigned long)(arg10); \
7062 _argvec[11] = (unsigned long)(arg11); \
7063 __asm__ volatile( \
7064 VALGRIND_ALIGN_STACK \
7065 "addi sp, sp, -32 \n\t" \
7066 "ld a0, 8(%1) \n\t" \
7067 "ld a1, 16(%1) \n\t" \
7068 "ld a2, 24(%1) \n\t" \
7069 "ld a3, 32(%1) \n\t" \
7070 "ld a4, 40(%1) \n\t" \
7071 "ld a5, 48(%1) \n\t" \
7072 "ld a6, 56(%1) \n\t" \
7073 "ld a7, 64(%1) \n\t" \
7074 "ld t0, 72(%1) \n\t" \
7075 "sd t0, 0(sp) \n\t" \
7076 "ld t0, 80(%1) \n\t" \
7077 "sd t0, 8(sp) \n\t" \
7078 "ld t0, 88(%1) \n\t" \
7079 "sd t0, 16(sp) \n\t" \
7080 "ld t0, 0(%1) \n\t" /* target->t0 */ \
7081 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_T0 \
7082 VALGRIND_RESTORE_STACK \
7083 "mv %0, a0" \
7084 : /*out*/ "=r" (_res) \
7085 : /*in*/ "0" (&_argvec[0]) \
7086 : /*trash*/ "memory", __CALLER_SAVED_REGS, "s11" \
7087 ); \
7088 lval = (__typeof__(lval)) _res; \
7089 } while (0)
7090
7091#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
7092 arg7,arg8,arg9,arg10,arg11, \
7093 arg12) \
7094 do { \
7095 volatile OrigFn _orig = (orig); \
7096 volatile unsigned long _argvec[13]; \
7097 volatile unsigned long _res; \
7098 _argvec[0] = (unsigned long)_orig.nraddr; \
7099 _argvec[1] = (unsigned long)(arg1); \
7100 _argvec[2] = (unsigned long)(arg2); \
7101 _argvec[3] = (unsigned long)(arg3); \
7102 _argvec[4] = (unsigned long)(arg4); \
7103 _argvec[5] = (unsigned long)(arg5); \
7104 _argvec[6] = (unsigned long)(arg6); \
7105 _argvec[7] = (unsigned long)(arg7); \
7106 _argvec[8] = (unsigned long)(arg8); \
7107 _argvec[9] = (unsigned long)(arg9); \
7108 _argvec[10] = (unsigned long)(arg10); \
7109 _argvec[11] = (unsigned long)(arg11); \
7110 _argvec[12] = (unsigned long)(arg12); \
7111 __asm__ volatile( \
7112 VALGRIND_ALIGN_STACK \
7113 "addi sp, sp, -32 \n\t" \
7114 "ld a0, 8(%1) \n\t" \
7115 "ld a1, 16(%1) \n\t" \
7116 "ld a2, 24(%1) \n\t" \
7117 "ld a3, 32(%1) \n\t" \
7118 "ld a4, 40(%1) \n\t" \
7119 "ld a5, 48(%1) \n\t" \
7120 "ld a6, 56(%1) \n\t" \
7121 "ld a7, 64(%1) \n\t" \
7122 "ld t0, 72(%1) \n\t" \
7123 "sd t0, 0(sp) \n\t" \
7124 "ld t0, 80(%1) \n\t" \
7125 "sd t0, 8(sp) \n\t" \
7126 "ld t0, 88(%1) \n\t" \
7127 "sd t0, 16(sp) \n\t" \
7128 "ld t0, 96(%1) \n\t" \
7129 "sd t0, 24(sp) \n\t" \
7130 "ld t0, 0(%1) \n\t" /* target->t0 */ \
7131 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_T0 \
7132 VALGRIND_RESTORE_STACK \
7133 "mv %0, a0" \
7134 : /*out*/ "=r" (_res) \
7135 : /*in*/ "0" (&_argvec[0]) \
7136 : /*trash*/ "memory", __CALLER_SAVED_REGS, "s11" \
7137 ); \
7138 lval = (__typeof__(lval)) _res; \
7139 } while (0)
7140
7141#endif /* PLAT_riscv64_linux */
7142
7143/* ------------------------------------------------------------------ */
7144/* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
7145/* */
7146/* ------------------------------------------------------------------ */
7147
7148/* Some request codes. There are many more of these, but most are not
7149 exposed to end-user view. These are the public ones, all of the
7150 form 0x1000 + small_number.
7151
7152 Core ones are in the range 0x00000000--0x0000ffff. The non-public
7153 ones start at 0x2000.
7154*/
7155
7156/* These macros are used by tools -- they must be public, but don't
7157 embed them into other programs. */
7158#define VG_USERREQ_TOOL_BASE(a,b) \
7159 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
7160#define VG_IS_TOOL_USERREQ(a, b, v) \
7161 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
7162
7163/* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
7164 This enum comprises an ABI exported by Valgrind to programs
7165 which use client requests. DO NOT CHANGE THE NUMERIC VALUES OF THESE
7166 ENTRIES, NOR DELETE ANY -- add new ones at the end of the most
7167 relevant group. */
7168typedef
7169 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
7170 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
7171
7172 /* These allow any function to be called from the simulated
7173 CPU but run on the real CPU. Nb: the first arg passed to
7174 the function is always the ThreadId of the running
7175 thread! So CLIENT_CALL0 actually requires a 1 arg
7176 function, etc. */
7177 VG_USERREQ__CLIENT_CALL0 = 0x1101,
7178 VG_USERREQ__CLIENT_CALL1 = 0x1102,
7179 VG_USERREQ__CLIENT_CALL2 = 0x1103,
7180 VG_USERREQ__CLIENT_CALL3 = 0x1104,
7181
7182 /* Can be useful in regression testing suites -- eg. can
7183 send Valgrind's output to /dev/null and still count
7184 errors. */
7185 VG_USERREQ__COUNT_ERRORS = 0x1201,
7186
7187 /* Allows the client program and/or gdbserver to execute a monitor
7188 command. */
7189 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
7190
7191 /* Allows the client program to change a dynamic command line
7192 option. */
7193 VG_USERREQ__CLO_CHANGE = 0x1203,
7194
7195 /* These are useful and can be interpreted by any tool that
7196 tracks malloc() et al, by using vg_replace_malloc.c. */
7197 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
7198 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
7199 VG_USERREQ__FREELIKE_BLOCK = 0x1302,
7200 /* Memory pool support. */
7201 VG_USERREQ__CREATE_MEMPOOL = 0x1303,
7202 VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
7203 VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
7204 VG_USERREQ__MEMPOOL_FREE = 0x1306,
7205 VG_USERREQ__MEMPOOL_TRIM = 0x1307,
7206 VG_USERREQ__MOVE_MEMPOOL = 0x1308,
7207 VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
7208 VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
7209
7210 /* Allow printfs to valgrind log. */
7211 /* The first two pass the va_list argument by value, which
7212 assumes it is the same size as or smaller than a UWord,
7213 which generally isn't the case. Hence are deprecated.
7214 The second two pass the vargs by reference and so are
7215 immune to this problem. */
7216 /* both :: char* fmt, va_list vargs (DEPRECATED) */
7217 VG_USERREQ__PRINTF = 0x1401,
7218 VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
7219 /* both :: char* fmt, va_list* vargs */
7220 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
7221 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
7222
7223 /* Stack support. */
7224 VG_USERREQ__STACK_REGISTER = 0x1501,
7225 VG_USERREQ__STACK_DEREGISTER = 0x1502,
7226 VG_USERREQ__STACK_CHANGE = 0x1503,
7227
7228 /* Wine support */
7229 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
7230
7231 /* Querying of debug info. */
7232 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701,
7233
7234 /* Disable/enable error reporting level. Takes a single
7235 Word arg which is the delta to this thread's error
7236 disablement indicator. Hence 1 disables or further
7237 disables errors, and -1 moves back towards enablement.
7238 Other values are not allowed. */
7239 VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801,
7240
7241 /* Some requests used for Valgrind internal, such as
7242 self-test or self-hosting. */
7243 /* Initialise IR injection */
7244 VG_USERREQ__VEX_INIT_FOR_IRI = 0x1901,
7245 /* Used by Inner Valgrind to inform Outer Valgrind where to
7246 find the list of inner guest threads */
7247 VG_USERREQ__INNER_THREADS = 0x1902
7248 } Vg_ClientRequest;
7249
7250#if !defined(__GNUC__)
7251# define __extension__ /* */
7252#endif
7253
7254
7255/* Returns the number of Valgrinds this code is running under. That
7256 is, 0 if running natively, 1 if running under Valgrind, 2 if
7257 running under Valgrind which is running under another Valgrind,
7258 etc. */
7259#define RUNNING_ON_VALGRIND \
7260 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
7261 VG_USERREQ__RUNNING_ON_VALGRIND, \
7262 0, 0, 0, 0, 0) \
7263
7264
7265/* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
7266 _qzz_len - 1]. Useful if you are debugging a JITter or some such,
7267 since it provides a way to make sure valgrind will retranslate the
7268 invalidated area. Returns no value. */
7269#define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
7270 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
7271 _qzz_addr, _qzz_len, 0, 0, 0)
7272
7273#define VALGRIND_INNER_THREADS(_qzz_addr) \
7274 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__INNER_THREADS, \
7275 _qzz_addr, 0, 0, 0, 0)
7276
7277
7278/* These requests are for getting Valgrind itself to print something.
7279 Possibly with a backtrace. This is a really ugly hack. The return value
7280 is the number of characters printed, excluding the "**<pid>** " part at the
7281 start and the backtrace (if present). */
7282
7283#if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
7284/* Modern GCC will optimize the static routine out if unused,
7285 and unused attribute will shut down warnings about it. */
7286static int VALGRIND_PRINTF(const char *format, ...)
7287 __attribute__((format(__printf__, 1, 2), __unused__));
7288#endif
7289static int
7290#if defined(_MSC_VER)
7291__inline
7292#endif
7293VALGRIND_PRINTF(const char *format, ...)
7294{
7295#if defined(NVALGRIND)
7296 (void)format;
7297 return 0;
7298#else /* NVALGRIND */
7299#if defined(_MSC_VER) || defined(__MINGW64__)
7300 uintptr_t _qzz_res;
7301#else
7302 unsigned long _qzz_res;
7303#endif
7304 va_list vargs;
7305 va_start(vargs, format);
7306#if defined(_MSC_VER) || defined(__MINGW64__)
7307 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
7308 VG_USERREQ__PRINTF_VALIST_BY_REF,
7309 (uintptr_t)format,
7310 (uintptr_t)&vargs,
7311 0, 0, 0);
7312#else
7313 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
7314 VG_USERREQ__PRINTF_VALIST_BY_REF,
7315 (unsigned long)format,
7316 (unsigned long)&vargs,
7317 0, 0, 0);
7318#endif
7319 va_end(vargs);
7320 return (int)_qzz_res;
7321#endif /* NVALGRIND */
7322}
7323
7324#if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
7325static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
7326 __attribute__((format(__printf__, 1, 2), __unused__));
7327#endif
7328static int
7329#if defined(_MSC_VER)
7330__inline
7331#endif
7332VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
7333{
7334#if defined(NVALGRIND)
7335 (void)format;
7336 return 0;
7337#else /* NVALGRIND */
7338#if defined(_MSC_VER) || defined(__MINGW64__)
7339 uintptr_t _qzz_res;
7340#else
7341 unsigned long _qzz_res;
7342#endif
7343 va_list vargs;
7344 va_start(vargs, format);
7345#if defined(_MSC_VER) || defined(__MINGW64__)
7346 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
7347 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
7348 (uintptr_t)format,
7349 (uintptr_t)&vargs,
7350 0, 0, 0);
7351#else
7352 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
7353 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
7354 (unsigned long)format,
7355 (unsigned long)&vargs,
7356 0, 0, 0);
7357#endif
7358 va_end(vargs);
7359 return (int)_qzz_res;
7360#endif /* NVALGRIND */
7361}
7362
7363
7364/* These requests allow control to move from the simulated CPU to the
7365 real CPU, calling an arbitrary function.
7366
7367 Note that the current ThreadId is inserted as the first argument.
7368 So this call:
7369
7370 VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
7371
7372 requires f to have this signature:
7373
7374 Word f(Word tid, Word arg1, Word arg2)
7375
7376 where "Word" is a word-sized type.
7377
7378 Note that these client requests are not entirely reliable. For example,
7379 if you call a function with them that subsequently calls printf(),
7380 there's a high chance Valgrind will crash. Generally, your prospects of
7381 these working are made higher if the called function does not refer to
7382 any global variables, and does not refer to any libc or other functions
7383 (printf et al). Any kind of entanglement with libc or dynamic linking is
7384 likely to have a bad outcome, for tricky reasons which we've grappled
7385 with a lot in the past.
7386*/
7387#define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
7388 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
7389 VG_USERREQ__CLIENT_CALL0, \
7390 _qyy_fn, \
7391 0, 0, 0, 0)
7392
7393#define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
7394 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
7395 VG_USERREQ__CLIENT_CALL1, \
7396 _qyy_fn, \
7397 _qyy_arg1, 0, 0, 0)
7398
7399#define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
7400 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
7401 VG_USERREQ__CLIENT_CALL2, \
7402 _qyy_fn, \
7403 _qyy_arg1, _qyy_arg2, 0, 0)
7404
7405#define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
7406 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
7407 VG_USERREQ__CLIENT_CALL3, \
7408 _qyy_fn, \
7409 _qyy_arg1, _qyy_arg2, \
7410 _qyy_arg3, 0)
7411
7412
7413/* Counts the number of errors that have been recorded by a tool. Nb:
7414 the tool must record the errors with VG_(maybe_record_error)() or
7415 VG_(unique_error)() for them to be counted. */
7416#define VALGRIND_COUNT_ERRORS \
7417 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
7418 0 /* default return */, \
7419 VG_USERREQ__COUNT_ERRORS, \
7420 0, 0, 0, 0, 0)
7421
7422/* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
7423 when heap blocks are allocated in order to give accurate results. This
7424 happens automatically for the standard allocator functions such as
7425 malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
7426 delete[], etc.
7427
7428 But if your program uses a custom allocator, this doesn't automatically
7429 happen, and Valgrind will not do as well. For example, if you allocate
7430 superblocks with mmap() and then allocates chunks of the superblocks, all
7431 Valgrind's observations will be at the mmap() level and it won't know that
7432 the chunks should be considered separate entities. In Memcheck's case,
7433 that means you probably won't get heap block overrun detection (because
7434 there won't be redzones marked as unaddressable) and you definitely won't
7435 get any leak detection.
7436
7437 The following client requests allow a custom allocator to be annotated so
7438 that it can be handled accurately by Valgrind.
7439
7440 VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
7441 by a malloc()-like function. For Memcheck (an illustrative case), this
7442 does two things:
7443
7444 - It records that the block has been allocated. This means any addresses
7445 within the block mentioned in error messages will be
7446 identified as belonging to the block. It also means that if the block
7447 isn't freed it will be detected by the leak checker.
7448
7449 - It marks the block as being addressable and undefined (if 'is_zeroed' is
7450 not set), or addressable and defined (if 'is_zeroed' is set). This
7451 controls how accesses to the block by the program are handled.
7452
7453 'addr' is the start of the usable block (ie. after any
7454 redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
7455 can apply redzones -- these are blocks of padding at the start and end of
7456 each block. Adding redzones is recommended as it makes it much more likely
7457 Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
7458 zeroed (or filled with another predictable value), as is the case for
7459 calloc().
7460
7461 VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
7462 heap block -- that will be used by the client program -- is allocated.
7463 It's best to put it at the outermost level of the allocator if possible;
7464 for example, if you have a function my_alloc() which calls
7465 internal_alloc(), and the client request is put inside internal_alloc(),
7466 stack traces relating to the heap block will contain entries for both
7467 my_alloc() and internal_alloc(), which is probably not what you want.
7468
7469 For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
7470 custom blocks from within a heap block, B, that has been allocated with
7471 malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
7472 -- the custom blocks will take precedence.
7473
7474 VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
7475 Memcheck, it does two things:
7476
7477 - It records that the block has been deallocated. This assumes that the
7478 block was annotated as having been allocated via
7479 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
7480
7481 - It marks the block as being unaddressable.
7482
7483 VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
7484 heap block is deallocated.
7485
7486 VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
7487 Memcheck, it does four things:
7488
7489 - It records that the size of a block has been changed. This assumes that
7490 the block was annotated as having been allocated via
7491 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
7492
7493 - If the block shrunk, it marks the freed memory as being unaddressable.
7494
7495 - If the block grew, it marks the new area as undefined and defines a red
7496 zone past the end of the new block.
7497
7498 - The V-bits of the overlap between the old and the new block are preserved.
7499
7500 VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
7501 and before deallocation of the old block.
7502
7503 In many cases, these three client requests will not be enough to get your
7504 allocator working well with Memcheck. More specifically, if your allocator
7505 writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
7506 will be necessary to mark the memory as addressable just before the zeroing
7507 occurs, otherwise you'll get a lot of invalid write errors. For example,
7508 you'll need to do this if your allocator recycles freed blocks, but it
7509 zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
7510 Alternatively, if your allocator reuses freed blocks for allocator-internal
7511 data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
7512
7513 Really, what's happening is a blurring of the lines between the client
7514 program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
7515 memory should be considered unaddressable to the client program, but the
7516 allocator knows more than the rest of the client program and so may be able
7517 to safely access it. Extra client requests are necessary for Valgrind to
7518 understand the distinction between the allocator and the rest of the
7519 program.
7520
7521 Ignored if addr == 0.
7522*/
7523#define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
7524 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
7525 addr, sizeB, rzB, is_zeroed, 0)
7526
7527/* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
7528 Ignored if addr == 0.
7529*/
7530#define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
7531 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
7532 addr, oldSizeB, newSizeB, rzB, 0)
7533
7534/* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
7535 Ignored if addr == 0.
7536*/
7537#define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
7538 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
7539 addr, rzB, 0, 0, 0)
7540
7541/* Create a memory pool. */
7542#define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
7543 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
7544 pool, rzB, is_zeroed, 0, 0)
7545
7546/* Create a memory pool with some flags specifying extended behaviour.
7547 When flags is zero, the behaviour is identical to VALGRIND_CREATE_MEMPOOL.
7548
7549 The flag VALGRIND_MEMPOOL_METAPOOL specifies that the pieces of memory
7550 associated with the pool using VALGRIND_MEMPOOL_ALLOC will be used
7551 by the application as superblocks to dole out MALLOC_LIKE blocks using
7552 VALGRIND_MALLOCLIKE_BLOCK. In other words, a meta pool is a "2 levels"
7553 pool : first level is the blocks described by VALGRIND_MEMPOOL_ALLOC.
7554 The second level blocks are described using VALGRIND_MALLOCLIKE_BLOCK.
7555 Note that the association between the pool and the second level blocks
7556 is implicit : second level blocks will be located inside first level
7557 blocks. It is necessary to use the VALGRIND_MEMPOOL_METAPOOL flag
7558 for such 2 levels pools, as otherwise valgrind will detect overlapping
7559 memory blocks, and will abort execution (e.g. during leak search).
7560
7561 Such a meta pool can also be marked as an 'auto free' pool using the flag
7562 VALGRIND_MEMPOOL_AUTO_FREE, which must be OR-ed together with the
7563 VALGRIND_MEMPOOL_METAPOOL. For an 'auto free' pool, VALGRIND_MEMPOOL_FREE
7564 will automatically free the second level blocks that are contained
7565 inside the first level block freed with VALGRIND_MEMPOOL_FREE.
7566 In other words, calling VALGRIND_MEMPOOL_FREE will cause implicit calls
7567 to VALGRIND_FREELIKE_BLOCK for all the second level blocks included
7568 in the first level block.
7569 Note: it is an error to use the VALGRIND_MEMPOOL_AUTO_FREE flag
7570 without the VALGRIND_MEMPOOL_METAPOOL flag.
7571*/
7572#define VALGRIND_MEMPOOL_AUTO_FREE 1
7573#define VALGRIND_MEMPOOL_METAPOOL 2
7574#define VALGRIND_CREATE_MEMPOOL_EXT(pool, rzB, is_zeroed, flags) \
7575 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
7576 pool, rzB, is_zeroed, flags, 0)
7577
7578/* Destroy a memory pool. */
7579#define VALGRIND_DESTROY_MEMPOOL(pool) \
7580 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
7581 pool, 0, 0, 0, 0)
7582
7583/* Associate a piece of memory with a memory pool. */
7584#define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
7585 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
7586 pool, addr, size, 0, 0)
7587
7588/* Disassociate a piece of memory from a memory pool. */
7589#define VALGRIND_MEMPOOL_FREE(pool, addr) \
7590 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
7591 pool, addr, 0, 0, 0)
7592
7593/* Disassociate any pieces outside a particular range. */
7594#define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
7595 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
7596 pool, addr, size, 0, 0)
7597
7598/* Resize and/or move a piece associated with a memory pool. */
7599#define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
7600 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
7601 poolA, poolB, 0, 0, 0)
7602
7603/* Resize and/or move a piece associated with a memory pool. */
7604#define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
7605 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
7606 pool, addrA, addrB, size, 0)
7607
7608/* Return 1 if a mempool exists, else 0. */
7609#define VALGRIND_MEMPOOL_EXISTS(pool) \
7610 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7611 VG_USERREQ__MEMPOOL_EXISTS, \
7612 pool, 0, 0, 0, 0)
7613
7614/* Mark a piece of memory as being a stack. Returns a stack id.
7615 start is the lowest addressable stack byte, end is the highest
7616 addressable stack byte. */
7617#define VALGRIND_STACK_REGISTER(start, end) \
7618 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7619 VG_USERREQ__STACK_REGISTER, \
7620 start, end, 0, 0, 0)
7621
7622/* Unmark the piece of memory associated with a stack id as being a
7623 stack. */
7624#define VALGRIND_STACK_DEREGISTER(id) \
7625 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
7626 id, 0, 0, 0, 0)
7627
7628/* Change the start and end address of the stack id.
7629 start is the new lowest addressable stack byte, end is the new highest
7630 addressable stack byte. */
7631#define VALGRIND_STACK_CHANGE(id, start, end) \
7632 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
7633 id, start, end, 0, 0)
7634
7635/* Load PDB debug info for Wine PE image_map. */
7636#define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
7637 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
7638 fd, ptr, total_size, delta, 0)
7639
7640/* Map a code address to a source file name and line number. buf64
7641 must point to a 64-byte buffer in the caller's address space. The
7642 result will be dumped in there and is guaranteed to be zero
7643 terminated. If no info is found, the first byte is set to zero. */
7644#define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
7645 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7646 VG_USERREQ__MAP_IP_TO_SRCLOC, \
7647 addr, buf64, 0, 0, 0)
7648
7649/* Disable error reporting for this thread. Behaves in a stack like
7650 way, so you can safely call this multiple times provided that
7651 VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
7652 to re-enable reporting. The first call of this macro disables
7653 reporting. Subsequent calls have no effect except to increase the
7654 number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
7655 reporting. Child threads do not inherit this setting from their
7656 parents -- they are always created with reporting enabled. */
7657#define VALGRIND_DISABLE_ERROR_REPORTING \
7658 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7659 1, 0, 0, 0, 0)
7660
7661/* Re-enable error reporting, as per comments on
7662 VALGRIND_DISABLE_ERROR_REPORTING. */
7663#define VALGRIND_ENABLE_ERROR_REPORTING \
7664 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7665 -1, 0, 0, 0, 0)
7666
7667/* Execute a monitor command from the client program.
7668 If a connection is opened with GDB, the output will be sent
7669 according to the output mode set for vgdb.
7670 If no connection is opened, output will go to the log output.
7671 Returns 1 if command not recognised, 0 otherwise. */
7672#define VALGRIND_MONITOR_COMMAND(command) \
7673 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
7674 command, 0, 0, 0, 0)
7675
7676
7677/* Change the value of a dynamic command line option.
7678 Note that unknown or not dynamically changeable options
7679 will cause a warning message to be output. */
7680#define VALGRIND_CLO_CHANGE(option) \
7681 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CLO_CHANGE, \
7682 option, 0, 0, 0, 0)
7683
7684
7685#undef PLAT_x86_darwin
7686#undef PLAT_amd64_darwin
7687#undef PLAT_x86_win32
7688#undef PLAT_amd64_win64
7689#undef PLAT_x86_linux
7690#undef PLAT_amd64_linux
7691#undef PLAT_ppc32_linux
7692#undef PLAT_ppc64be_linux
7693#undef PLAT_ppc64le_linux
7694#undef PLAT_arm_linux
7695#undef PLAT_s390x_linux
7696#undef PLAT_mips32_linux
7697#undef PLAT_mips64_linux
7698#undef PLAT_nanomips_linux
7699#undef PLAT_riscv64_linux
7700#undef PLAT_x86_solaris
7701#undef PLAT_amd64_solaris
7702
7703#endif /* __VALGRIND_H */
7704

source code of qtbase/src/testlib/3rdparty/valgrind/valgrind_p.h