1/* -*- c -*-
2 ----------------------------------------------------------------
3
4 Notice that the following BSD-style license applies to this one
5 file (valgrind.h) only. The rest of Valgrind is licensed under the
6 terms of the GNU General Public License, version 2, unless
7 otherwise indicated. See the COPYING file in the source
8 distribution for details.
9
10 ----------------------------------------------------------------
11
12 This file is part of Valgrind, a dynamic binary instrumentation
13 framework.
14
15 Copyright (C) 2000-2017 Julian Seward. All rights reserved.
16
17 Redistribution and use in source and binary forms, with or without
18 modification, are permitted provided that the following conditions
19 are met:
20
21 1. Redistributions of source code must retain the above copyright
22 notice, this list of conditions and the following disclaimer.
23
24 2. The origin of this software must not be misrepresented; you must
25 not claim that you wrote the original software. If you use this
26 software in a product, an acknowledgment in the product
27 documentation would be appreciated but is not required.
28
29 3. Altered source versions must be plainly marked as such, and must
30 not be misrepresented as being the original software.
31
32 4. The name of the author may not be used to endorse or promote
33 products derived from this software without specific prior written
34 permission.
35
36 THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
37 OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
38 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
39 ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
40 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
41 DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
42 GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
43 INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
44 WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
45 NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
46 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
47
48 ----------------------------------------------------------------
49
50 Notice that the above BSD-style license applies to this one file
51 (valgrind.h) only. The entire rest of Valgrind is licensed under
52 the terms of the GNU General Public License, version 2. See the
53 COPYING file in the source distribution for details.
54
55 ----------------------------------------------------------------
56*/
57
58
59/* This file is for inclusion into client (your!) code.
60
61 You can use these macros to manipulate and query Valgrind's
62 execution inside your own programs.
63
64 The resulting executables will still run without Valgrind, just a
65 little bit more slowly than they otherwise would, but otherwise
66 unchanged. When not running on valgrind, each client request
67 consumes very few (eg. 7) instructions, so the resulting performance
68 loss is negligible unless you plan to execute client requests
69 millions of times per second. Nevertheless, if that is still a
70 problem, you can compile with the NVALGRIND symbol defined (gcc
71 -DNVALGRIND) so that client requests are not even compiled in. */
72
73#ifndef __VALGRIND_H
74#define __VALGRIND_H
75
76
77/* ------------------------------------------------------------------ */
78/* VERSION NUMBER OF VALGRIND */
79/* ------------------------------------------------------------------ */
80
81/* Specify Valgrind's version number, so that user code can
82 conditionally compile based on our version number. Note that these
83 were introduced at version 3.6 and so do not exist in version 3.5
84 or earlier. The recommended way to use them to check for "version
85 X.Y or later" is (eg)
86
87#if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
88 && (__VALGRIND_MAJOR__ > 3 \
89 || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
90*/
91#define __VALGRIND_MAJOR__ 3
92#define __VALGRIND_MINOR__ 17
93
94
95#include <stdarg.h>
96
97/* Nb: this file might be included in a file compiled with -ansi. So
98 we can't use C++ style "//" comments nor the "asm" keyword (instead
99 use "__asm__"). */
100
101/* Derive some tags indicating what the target platform is. Note
102 that in this file we're using the compiler's CPP symbols for
103 identifying architectures, which are different to the ones we use
104 within the rest of Valgrind. Note, __powerpc__ is active for both
105 32 and 64-bit PPC, whereas __powerpc64__ is only active for the
106 latter (on Linux, that is).
107
108 Misc note: how to find out what's predefined in gcc by default:
109 gcc -Wp,-dM somefile.c
110*/
111#undef PLAT_x86_darwin
112#undef PLAT_amd64_darwin
113#undef PLAT_x86_win32
114#undef PLAT_amd64_win64
115#undef PLAT_x86_linux
116#undef PLAT_amd64_linux
117#undef PLAT_ppc32_linux
118#undef PLAT_ppc64be_linux
119#undef PLAT_ppc64le_linux
120#undef PLAT_arm_linux
121#undef PLAT_arm64_linux
122#undef PLAT_s390x_linux
123#undef PLAT_mips32_linux
124#undef PLAT_mips64_linux
125#undef PLAT_nanomips_linux
126#undef PLAT_x86_solaris
127#undef PLAT_amd64_solaris
128
129
130#if defined(__APPLE__) && defined(__i386__)
131# define PLAT_x86_darwin 1
132#elif defined(__APPLE__) && defined(__x86_64__)
133# define PLAT_amd64_darwin 1
134#elif (defined(__MINGW32__) && defined(__i386__)) \
135 || defined(__CYGWIN32__) \
136 || (defined(_WIN32) && defined(_M_IX86))
137# define PLAT_x86_win32 1
138#elif (defined(__MINGW32__) && defined(__x86_64__)) \
139 || (defined(_WIN32) && defined(_M_X64))
140/* __MINGW32__ and _WIN32 are defined in 64 bit mode as well. */
141# define PLAT_amd64_win64 1
142#elif defined(__linux__) && defined(__i386__)
143# define PLAT_x86_linux 1
144#elif defined(__linux__) && defined(__x86_64__) && !defined(__ILP32__)
145# define PLAT_amd64_linux 1
146#elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
147# define PLAT_ppc32_linux 1
148#elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF != 2
149/* Big Endian uses ELF version 1 */
150# define PLAT_ppc64be_linux 1
151#elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__) && _CALL_ELF == 2
152/* Little Endian uses ELF version 2 */
153# define PLAT_ppc64le_linux 1
154#elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
155# define PLAT_arm_linux 1
156#elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
157# define PLAT_arm64_linux 1
158#elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
159# define PLAT_s390x_linux 1
160#elif defined(__linux__) && defined(__mips__) && (__mips==64)
161# define PLAT_mips64_linux 1
162#elif defined(__linux__) && defined(__mips__) && (__mips==32)
163# define PLAT_mips32_linux 1
164#elif defined(__linux__) && defined(__nanomips__)
165# define PLAT_nanomips_linux 1
166#elif defined(__sun) && defined(__i386__)
167# define PLAT_x86_solaris 1
168#elif defined(__sun) && defined(__x86_64__)
169# define PLAT_amd64_solaris 1
170#else
171/* If we're not compiling for our target platform, don't generate
172 any inline asms. */
173# if !defined(NVALGRIND)
174# define NVALGRIND 1
175# endif
176#endif
177
178
179/* ------------------------------------------------------------------ */
180/* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
181/* in here of use to end-users -- skip to the next section. */
182/* ------------------------------------------------------------------ */
183
184/*
185 * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
186 * request. Accepts both pointers and integers as arguments.
187 *
188 * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
189 * client request that does not return a value.
190
191 * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
192 * client request and whose value equals the client request result. Accepts
193 * both pointers and integers as arguments. Note that such calls are not
194 * necessarily pure functions -- they may have side effects.
195 */
196
197#define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
198 _zzq_request, _zzq_arg1, _zzq_arg2, \
199 _zzq_arg3, _zzq_arg4, _zzq_arg5) \
200 do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
201 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
202 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
203
204#define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
205 _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
206 do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
207 (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
208 (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
209
210#if defined(NVALGRIND)
211
212/* Define NVALGRIND to completely remove the Valgrind magic sequence
213 from the compiled code (analogous to NDEBUG's effects on
214 assert()) */
215#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
216 _zzq_default, _zzq_request, \
217 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
218 (_zzq_default)
219
220#else /* ! NVALGRIND */
221
222/* The following defines the magic code sequences which the JITter
223 spots and handles magically. Don't look too closely at them as
224 they will rot your brain.
225
226 The assembly code sequences for all architectures is in this one
227 file. This is because this file must be stand-alone, and we don't
228 want to have multiple files.
229
230 For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
231 value gets put in the return slot, so that everything works when
232 this is executed not under Valgrind. Args are passed in a memory
233 block, and so there's no intrinsic limit to the number that could
234 be passed, but it's currently five.
235
236 The macro args are:
237 _zzq_rlval result lvalue
238 _zzq_default default value (result returned when running on real CPU)
239 _zzq_request request code
240 _zzq_arg1..5 request params
241
242 The other two macros are used to support function wrapping, and are
243 a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
244 guest's NRADDR pseudo-register and whatever other information is
245 needed to safely run the call original from the wrapper: on
246 ppc64-linux, the R2 value at the divert point is also needed. This
247 information is abstracted into a user-visible type, OrigFn.
248
249 VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
250 guest, but guarantees that the branch instruction will not be
251 redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
252 branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
253 complete inline asm, since it needs to be combined with more magic
254 inline asm stuff to be useful.
255*/
256
257/* ----------------- x86-{linux,darwin,solaris} ---------------- */
258
259#if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
260 || (defined(PLAT_x86_win32) && defined(__GNUC__)) \
261 || defined(PLAT_x86_solaris)
262
263typedef
264 struct {
265 unsigned int nraddr; /* where's the code? */
266 }
267 OrigFn;
268
269#define __SPECIAL_INSTRUCTION_PREAMBLE \
270 "roll $3, %%edi ; roll $13, %%edi\n\t" \
271 "roll $29, %%edi ; roll $19, %%edi\n\t"
272
273#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
274 _zzq_default, _zzq_request, \
275 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
276 __extension__ \
277 ({volatile unsigned int _zzq_args[6]; \
278 volatile unsigned int _zzq_result; \
279 _zzq_args[0] = (unsigned int)(_zzq_request); \
280 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
281 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
282 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
283 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
284 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
285 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
286 /* %EDX = client_request ( %EAX ) */ \
287 "xchgl %%ebx,%%ebx" \
288 : "=d" (_zzq_result) \
289 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
290 : "cc", "memory" \
291 ); \
292 _zzq_result; \
293 })
294
295#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
296 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
297 volatile unsigned int __addr; \
298 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
299 /* %EAX = guest_NRADDR */ \
300 "xchgl %%ecx,%%ecx" \
301 : "=a" (__addr) \
302 : \
303 : "cc", "memory" \
304 ); \
305 _zzq_orig->nraddr = __addr; \
306 }
307
308#define VALGRIND_CALL_NOREDIR_EAX \
309 __SPECIAL_INSTRUCTION_PREAMBLE \
310 /* call-noredir *%EAX */ \
311 "xchgl %%edx,%%edx\n\t"
312
313#define VALGRIND_VEX_INJECT_IR() \
314 do { \
315 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
316 "xchgl %%edi,%%edi\n\t" \
317 : : : "cc", "memory" \
318 ); \
319 } while (0)
320
321#endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__)
322 || PLAT_x86_solaris */
323
324/* ------------------------- x86-Win32 ------------------------- */
325
326#if defined(PLAT_x86_win32) && !defined(__GNUC__)
327
328typedef
329 struct {
330 unsigned int nraddr; /* where's the code? */
331 }
332 OrigFn;
333
334#if defined(_MSC_VER)
335
336#define __SPECIAL_INSTRUCTION_PREAMBLE \
337 __asm rol edi, 3 __asm rol edi, 13 \
338 __asm rol edi, 29 __asm rol edi, 19
339
340#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
341 _zzq_default, _zzq_request, \
342 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
343 valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
344 (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
345 (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
346 (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
347
348static __inline uintptr_t
349valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
350 uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
351 uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
352 uintptr_t _zzq_arg5)
353{
354 volatile uintptr_t _zzq_args[6];
355 volatile unsigned int _zzq_result;
356 _zzq_args[0] = (uintptr_t)(_zzq_request);
357 _zzq_args[1] = (uintptr_t)(_zzq_arg1);
358 _zzq_args[2] = (uintptr_t)(_zzq_arg2);
359 _zzq_args[3] = (uintptr_t)(_zzq_arg3);
360 _zzq_args[4] = (uintptr_t)(_zzq_arg4);
361 _zzq_args[5] = (uintptr_t)(_zzq_arg5);
362 __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
363 __SPECIAL_INSTRUCTION_PREAMBLE
364 /* %EDX = client_request ( %EAX ) */
365 __asm xchg ebx,ebx
366 __asm mov _zzq_result, edx
367 }
368 return _zzq_result;
369}
370
371#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
372 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
373 volatile unsigned int __addr; \
374 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
375 /* %EAX = guest_NRADDR */ \
376 __asm xchg ecx,ecx \
377 __asm mov __addr, eax \
378 } \
379 _zzq_orig->nraddr = __addr; \
380 }
381
382#define VALGRIND_CALL_NOREDIR_EAX ERROR
383
384#define VALGRIND_VEX_INJECT_IR() \
385 do { \
386 __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
387 __asm xchg edi,edi \
388 } \
389 } while (0)
390
391#else
392#error Unsupported compiler.
393#endif
394
395#endif /* PLAT_x86_win32 */
396
397/* ----------------- amd64-{linux,darwin,solaris} --------------- */
398
399#if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
400 || defined(PLAT_amd64_solaris) \
401 || (defined(PLAT_amd64_win64) && defined(__GNUC__))
402
403typedef
404 struct {
405 unsigned long int nraddr; /* where's the code? */
406 }
407 OrigFn;
408
409#define __SPECIAL_INSTRUCTION_PREAMBLE \
410 "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
411 "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
412
413#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
414 _zzq_default, _zzq_request, \
415 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
416 __extension__ \
417 ({ volatile unsigned long int _zzq_args[6]; \
418 volatile unsigned long int _zzq_result; \
419 _zzq_args[0] = (unsigned long int)(_zzq_request); \
420 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
421 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
422 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
423 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
424 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
425 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
426 /* %RDX = client_request ( %RAX ) */ \
427 "xchgq %%rbx,%%rbx" \
428 : "=d" (_zzq_result) \
429 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
430 : "cc", "memory" \
431 ); \
432 _zzq_result; \
433 })
434
435#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
436 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
437 volatile unsigned long int __addr; \
438 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
439 /* %RAX = guest_NRADDR */ \
440 "xchgq %%rcx,%%rcx" \
441 : "=a" (__addr) \
442 : \
443 : "cc", "memory" \
444 ); \
445 _zzq_orig->nraddr = __addr; \
446 }
447
448#define VALGRIND_CALL_NOREDIR_RAX \
449 __SPECIAL_INSTRUCTION_PREAMBLE \
450 /* call-noredir *%RAX */ \
451 "xchgq %%rdx,%%rdx\n\t"
452
453#define VALGRIND_VEX_INJECT_IR() \
454 do { \
455 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
456 "xchgq %%rdi,%%rdi\n\t" \
457 : : : "cc", "memory" \
458 ); \
459 } while (0)
460
461#endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
462
463/* ------------------------- amd64-Win64 ------------------------- */
464
465#if defined(PLAT_amd64_win64) && !defined(__GNUC__)
466
467#error Unsupported compiler.
468
469#endif /* PLAT_amd64_win64 */
470
471/* ------------------------ ppc32-linux ------------------------ */
472
473#if defined(PLAT_ppc32_linux)
474
475typedef
476 struct {
477 unsigned int nraddr; /* where's the code? */
478 }
479 OrigFn;
480
481#define __SPECIAL_INSTRUCTION_PREAMBLE \
482 "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
483 "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
484
485#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
486 _zzq_default, _zzq_request, \
487 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
488 \
489 __extension__ \
490 ({ unsigned int _zzq_args[6]; \
491 unsigned int _zzq_result; \
492 unsigned int* _zzq_ptr; \
493 _zzq_args[0] = (unsigned int)(_zzq_request); \
494 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
495 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
496 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
497 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
498 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
499 _zzq_ptr = _zzq_args; \
500 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
501 "mr 4,%2\n\t" /*ptr*/ \
502 __SPECIAL_INSTRUCTION_PREAMBLE \
503 /* %R3 = client_request ( %R4 ) */ \
504 "or 1,1,1\n\t" \
505 "mr %0,3" /*result*/ \
506 : "=b" (_zzq_result) \
507 : "b" (_zzq_default), "b" (_zzq_ptr) \
508 : "cc", "memory", "r3", "r4"); \
509 _zzq_result; \
510 })
511
512#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
513 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
514 unsigned int __addr; \
515 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
516 /* %R3 = guest_NRADDR */ \
517 "or 2,2,2\n\t" \
518 "mr %0,3" \
519 : "=b" (__addr) \
520 : \
521 : "cc", "memory", "r3" \
522 ); \
523 _zzq_orig->nraddr = __addr; \
524 }
525
526#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
527 __SPECIAL_INSTRUCTION_PREAMBLE \
528 /* branch-and-link-to-noredir *%R11 */ \
529 "or 3,3,3\n\t"
530
531#define VALGRIND_VEX_INJECT_IR() \
532 do { \
533 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
534 "or 5,5,5\n\t" \
535 ); \
536 } while (0)
537
538#endif /* PLAT_ppc32_linux */
539
540/* ------------------------ ppc64-linux ------------------------ */
541
542#if defined(PLAT_ppc64be_linux)
543
544typedef
545 struct {
546 unsigned long int nraddr; /* where's the code? */
547 unsigned long int r2; /* what tocptr do we need? */
548 }
549 OrigFn;
550
551#define __SPECIAL_INSTRUCTION_PREAMBLE \
552 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
553 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
554
555#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
556 _zzq_default, _zzq_request, \
557 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
558 \
559 __extension__ \
560 ({ unsigned long int _zzq_args[6]; \
561 unsigned long int _zzq_result; \
562 unsigned long int* _zzq_ptr; \
563 _zzq_args[0] = (unsigned long int)(_zzq_request); \
564 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
565 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
566 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
567 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
568 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
569 _zzq_ptr = _zzq_args; \
570 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
571 "mr 4,%2\n\t" /*ptr*/ \
572 __SPECIAL_INSTRUCTION_PREAMBLE \
573 /* %R3 = client_request ( %R4 ) */ \
574 "or 1,1,1\n\t" \
575 "mr %0,3" /*result*/ \
576 : "=b" (_zzq_result) \
577 : "b" (_zzq_default), "b" (_zzq_ptr) \
578 : "cc", "memory", "r3", "r4"); \
579 _zzq_result; \
580 })
581
582#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
583 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
584 unsigned long int __addr; \
585 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
586 /* %R3 = guest_NRADDR */ \
587 "or 2,2,2\n\t" \
588 "mr %0,3" \
589 : "=b" (__addr) \
590 : \
591 : "cc", "memory", "r3" \
592 ); \
593 _zzq_orig->nraddr = __addr; \
594 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
595 /* %R3 = guest_NRADDR_GPR2 */ \
596 "or 4,4,4\n\t" \
597 "mr %0,3" \
598 : "=b" (__addr) \
599 : \
600 : "cc", "memory", "r3" \
601 ); \
602 _zzq_orig->r2 = __addr; \
603 }
604
605#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
606 __SPECIAL_INSTRUCTION_PREAMBLE \
607 /* branch-and-link-to-noredir *%R11 */ \
608 "or 3,3,3\n\t"
609
610#define VALGRIND_VEX_INJECT_IR() \
611 do { \
612 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
613 "or 5,5,5\n\t" \
614 ); \
615 } while (0)
616
617#endif /* PLAT_ppc64be_linux */
618
619#if defined(PLAT_ppc64le_linux)
620
621typedef
622 struct {
623 unsigned long int nraddr; /* where's the code? */
624 unsigned long int r2; /* what tocptr do we need? */
625 }
626 OrigFn;
627
628#define __SPECIAL_INSTRUCTION_PREAMBLE \
629 "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
630 "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
631
632#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
633 _zzq_default, _zzq_request, \
634 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
635 \
636 __extension__ \
637 ({ unsigned long int _zzq_args[6]; \
638 unsigned long int _zzq_result; \
639 unsigned long int* _zzq_ptr; \
640 _zzq_args[0] = (unsigned long int)(_zzq_request); \
641 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
642 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
643 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
644 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
645 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
646 _zzq_ptr = _zzq_args; \
647 __asm__ volatile("mr 3,%1\n\t" /*default*/ \
648 "mr 4,%2\n\t" /*ptr*/ \
649 __SPECIAL_INSTRUCTION_PREAMBLE \
650 /* %R3 = client_request ( %R4 ) */ \
651 "or 1,1,1\n\t" \
652 "mr %0,3" /*result*/ \
653 : "=b" (_zzq_result) \
654 : "b" (_zzq_default), "b" (_zzq_ptr) \
655 : "cc", "memory", "r3", "r4"); \
656 _zzq_result; \
657 })
658
659#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
660 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
661 unsigned long int __addr; \
662 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
663 /* %R3 = guest_NRADDR */ \
664 "or 2,2,2\n\t" \
665 "mr %0,3" \
666 : "=b" (__addr) \
667 : \
668 : "cc", "memory", "r3" \
669 ); \
670 _zzq_orig->nraddr = __addr; \
671 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
672 /* %R3 = guest_NRADDR_GPR2 */ \
673 "or 4,4,4\n\t" \
674 "mr %0,3" \
675 : "=b" (__addr) \
676 : \
677 : "cc", "memory", "r3" \
678 ); \
679 _zzq_orig->r2 = __addr; \
680 }
681
682#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
683 __SPECIAL_INSTRUCTION_PREAMBLE \
684 /* branch-and-link-to-noredir *%R12 */ \
685 "or 3,3,3\n\t"
686
687#define VALGRIND_VEX_INJECT_IR() \
688 do { \
689 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
690 "or 5,5,5\n\t" \
691 ); \
692 } while (0)
693
694#endif /* PLAT_ppc64le_linux */
695
696/* ------------------------- arm-linux ------------------------- */
697
698#if defined(PLAT_arm_linux)
699
700typedef
701 struct {
702 unsigned int nraddr; /* where's the code? */
703 }
704 OrigFn;
705
706#define __SPECIAL_INSTRUCTION_PREAMBLE \
707 "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
708 "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
709
710#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
711 _zzq_default, _zzq_request, \
712 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
713 \
714 __extension__ \
715 ({volatile unsigned int _zzq_args[6]; \
716 volatile unsigned int _zzq_result; \
717 _zzq_args[0] = (unsigned int)(_zzq_request); \
718 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
719 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
720 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
721 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
722 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
723 __asm__ volatile("mov r3, %1\n\t" /*default*/ \
724 "mov r4, %2\n\t" /*ptr*/ \
725 __SPECIAL_INSTRUCTION_PREAMBLE \
726 /* R3 = client_request ( R4 ) */ \
727 "orr r10, r10, r10\n\t" \
728 "mov %0, r3" /*result*/ \
729 : "=r" (_zzq_result) \
730 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
731 : "cc","memory", "r3", "r4"); \
732 _zzq_result; \
733 })
734
735#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
736 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
737 unsigned int __addr; \
738 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
739 /* R3 = guest_NRADDR */ \
740 "orr r11, r11, r11\n\t" \
741 "mov %0, r3" \
742 : "=r" (__addr) \
743 : \
744 : "cc", "memory", "r3" \
745 ); \
746 _zzq_orig->nraddr = __addr; \
747 }
748
749#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
750 __SPECIAL_INSTRUCTION_PREAMBLE \
751 /* branch-and-link-to-noredir *%R4 */ \
752 "orr r12, r12, r12\n\t"
753
754#define VALGRIND_VEX_INJECT_IR() \
755 do { \
756 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
757 "orr r9, r9, r9\n\t" \
758 : : : "cc", "memory" \
759 ); \
760 } while (0)
761
762#endif /* PLAT_arm_linux */
763
764/* ------------------------ arm64-linux ------------------------- */
765
766#if defined(PLAT_arm64_linux)
767
768typedef
769 struct {
770 unsigned long int nraddr; /* where's the code? */
771 }
772 OrigFn;
773
774#define __SPECIAL_INSTRUCTION_PREAMBLE \
775 "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
776 "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
777
778#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
779 _zzq_default, _zzq_request, \
780 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
781 \
782 __extension__ \
783 ({volatile unsigned long int _zzq_args[6]; \
784 volatile unsigned long int _zzq_result; \
785 _zzq_args[0] = (unsigned long int)(_zzq_request); \
786 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
787 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
788 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
789 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
790 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
791 __asm__ volatile("mov x3, %1\n\t" /*default*/ \
792 "mov x4, %2\n\t" /*ptr*/ \
793 __SPECIAL_INSTRUCTION_PREAMBLE \
794 /* X3 = client_request ( X4 ) */ \
795 "orr x10, x10, x10\n\t" \
796 "mov %0, x3" /*result*/ \
797 : "=r" (_zzq_result) \
798 : "r" ((unsigned long int)(_zzq_default)), \
799 "r" (&_zzq_args[0]) \
800 : "cc","memory", "x3", "x4"); \
801 _zzq_result; \
802 })
803
804#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
805 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
806 unsigned long int __addr; \
807 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
808 /* X3 = guest_NRADDR */ \
809 "orr x11, x11, x11\n\t" \
810 "mov %0, x3" \
811 : "=r" (__addr) \
812 : \
813 : "cc", "memory", "x3" \
814 ); \
815 _zzq_orig->nraddr = __addr; \
816 }
817
818#define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
819 __SPECIAL_INSTRUCTION_PREAMBLE \
820 /* branch-and-link-to-noredir X8 */ \
821 "orr x12, x12, x12\n\t"
822
823#define VALGRIND_VEX_INJECT_IR() \
824 do { \
825 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
826 "orr x9, x9, x9\n\t" \
827 : : : "cc", "memory" \
828 ); \
829 } while (0)
830
831#endif /* PLAT_arm64_linux */
832
833/* ------------------------ s390x-linux ------------------------ */
834
835#if defined(PLAT_s390x_linux)
836
837typedef
838 struct {
839 unsigned long int nraddr; /* where's the code? */
840 }
841 OrigFn;
842
843/* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
844 * code. This detection is implemented in platform specific toIR.c
845 * (e.g. VEX/priv/guest_s390_decoder.c).
846 */
847#define __SPECIAL_INSTRUCTION_PREAMBLE \
848 "lr 15,15\n\t" \
849 "lr 1,1\n\t" \
850 "lr 2,2\n\t" \
851 "lr 3,3\n\t"
852
853#define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
854#define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
855#define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
856#define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
857
858#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
859 _zzq_default, _zzq_request, \
860 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
861 __extension__ \
862 ({volatile unsigned long int _zzq_args[6]; \
863 volatile unsigned long int _zzq_result; \
864 _zzq_args[0] = (unsigned long int)(_zzq_request); \
865 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
866 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
867 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
868 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
869 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
870 __asm__ volatile(/* r2 = args */ \
871 "lgr 2,%1\n\t" \
872 /* r3 = default */ \
873 "lgr 3,%2\n\t" \
874 __SPECIAL_INSTRUCTION_PREAMBLE \
875 __CLIENT_REQUEST_CODE \
876 /* results = r3 */ \
877 "lgr %0, 3\n\t" \
878 : "=d" (_zzq_result) \
879 : "a" (&_zzq_args[0]), "0" (_zzq_default) \
880 : "cc", "2", "3", "memory" \
881 ); \
882 _zzq_result; \
883 })
884
885#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
886 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
887 volatile unsigned long int __addr; \
888 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
889 __GET_NR_CONTEXT_CODE \
890 "lgr %0, 3\n\t" \
891 : "=a" (__addr) \
892 : \
893 : "cc", "3", "memory" \
894 ); \
895 _zzq_orig->nraddr = __addr; \
896 }
897
898#define VALGRIND_CALL_NOREDIR_R1 \
899 __SPECIAL_INSTRUCTION_PREAMBLE \
900 __CALL_NO_REDIR_CODE
901
902#define VALGRIND_VEX_INJECT_IR() \
903 do { \
904 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
905 __VEX_INJECT_IR_CODE); \
906 } while (0)
907
908#endif /* PLAT_s390x_linux */
909
910/* ------------------------- mips32-linux ---------------- */
911
912#if defined(PLAT_mips32_linux)
913
914typedef
915 struct {
916 unsigned int nraddr; /* where's the code? */
917 }
918 OrigFn;
919
920/* .word 0x342
921 * .word 0x742
922 * .word 0xC2
923 * .word 0x4C2*/
924#define __SPECIAL_INSTRUCTION_PREAMBLE \
925 "srl $0, $0, 13\n\t" \
926 "srl $0, $0, 29\n\t" \
927 "srl $0, $0, 3\n\t" \
928 "srl $0, $0, 19\n\t"
929
930#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
931 _zzq_default, _zzq_request, \
932 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
933 __extension__ \
934 ({ volatile unsigned int _zzq_args[6]; \
935 volatile unsigned int _zzq_result; \
936 _zzq_args[0] = (unsigned int)(_zzq_request); \
937 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
938 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
939 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
940 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
941 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
942 __asm__ volatile("move $11, %1\n\t" /*default*/ \
943 "move $12, %2\n\t" /*ptr*/ \
944 __SPECIAL_INSTRUCTION_PREAMBLE \
945 /* T3 = client_request ( T4 ) */ \
946 "or $13, $13, $13\n\t" \
947 "move %0, $11\n\t" /*result*/ \
948 : "=r" (_zzq_result) \
949 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
950 : "$11", "$12", "memory"); \
951 _zzq_result; \
952 })
953
954#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
955 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
956 volatile unsigned int __addr; \
957 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
958 /* %t9 = guest_NRADDR */ \
959 "or $14, $14, $14\n\t" \
960 "move %0, $11" /*result*/ \
961 : "=r" (__addr) \
962 : \
963 : "$11" \
964 ); \
965 _zzq_orig->nraddr = __addr; \
966 }
967
968#define VALGRIND_CALL_NOREDIR_T9 \
969 __SPECIAL_INSTRUCTION_PREAMBLE \
970 /* call-noredir *%t9 */ \
971 "or $15, $15, $15\n\t"
972
973#define VALGRIND_VEX_INJECT_IR() \
974 do { \
975 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
976 "or $11, $11, $11\n\t" \
977 ); \
978 } while (0)
979
980
981#endif /* PLAT_mips32_linux */
982
983/* ------------------------- mips64-linux ---------------- */
984
985#if defined(PLAT_mips64_linux)
986
987typedef
988 struct {
989 unsigned long nraddr; /* where's the code? */
990 }
991 OrigFn;
992
993/* dsll $0,$0, 3
994 * dsll $0,$0, 13
995 * dsll $0,$0, 29
996 * dsll $0,$0, 19*/
997#define __SPECIAL_INSTRUCTION_PREAMBLE \
998 "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
999 "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
1000
1001#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1002 _zzq_default, _zzq_request, \
1003 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1004 __extension__ \
1005 ({ volatile unsigned long int _zzq_args[6]; \
1006 volatile unsigned long int _zzq_result; \
1007 _zzq_args[0] = (unsigned long int)(_zzq_request); \
1008 _zzq_args[1] = (unsigned long int)(_zzq_arg1); \
1009 _zzq_args[2] = (unsigned long int)(_zzq_arg2); \
1010 _zzq_args[3] = (unsigned long int)(_zzq_arg3); \
1011 _zzq_args[4] = (unsigned long int)(_zzq_arg4); \
1012 _zzq_args[5] = (unsigned long int)(_zzq_arg5); \
1013 __asm__ volatile("move $11, %1\n\t" /*default*/ \
1014 "move $12, %2\n\t" /*ptr*/ \
1015 __SPECIAL_INSTRUCTION_PREAMBLE \
1016 /* $11 = client_request ( $12 ) */ \
1017 "or $13, $13, $13\n\t" \
1018 "move %0, $11\n\t" /*result*/ \
1019 : "=r" (_zzq_result) \
1020 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1021 : "$11", "$12", "memory"); \
1022 _zzq_result; \
1023 })
1024
1025#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1026 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1027 volatile unsigned long int __addr; \
1028 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1029 /* $11 = guest_NRADDR */ \
1030 "or $14, $14, $14\n\t" \
1031 "move %0, $11" /*result*/ \
1032 : "=r" (__addr) \
1033 : \
1034 : "$11"); \
1035 _zzq_orig->nraddr = __addr; \
1036 }
1037
1038#define VALGRIND_CALL_NOREDIR_T9 \
1039 __SPECIAL_INSTRUCTION_PREAMBLE \
1040 /* call-noredir $25 */ \
1041 "or $15, $15, $15\n\t"
1042
1043#define VALGRIND_VEX_INJECT_IR() \
1044 do { \
1045 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1046 "or $11, $11, $11\n\t" \
1047 ); \
1048 } while (0)
1049
1050#endif /* PLAT_mips64_linux */
1051
1052#if defined(PLAT_nanomips_linux)
1053
1054typedef
1055 struct {
1056 unsigned int nraddr; /* where's the code? */
1057 }
1058 OrigFn;
1059/*
1060 8000 c04d srl zero, zero, 13
1061 8000 c05d srl zero, zero, 29
1062 8000 c043 srl zero, zero, 3
1063 8000 c053 srl zero, zero, 19
1064*/
1065
1066#define __SPECIAL_INSTRUCTION_PREAMBLE "srl[32] $zero, $zero, 13 \n\t" \
1067 "srl[32] $zero, $zero, 29 \n\t" \
1068 "srl[32] $zero, $zero, 3 \n\t" \
1069 "srl[32] $zero, $zero, 19 \n\t"
1070
1071#define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
1072 _zzq_default, _zzq_request, \
1073 _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
1074 __extension__ \
1075 ({ volatile unsigned int _zzq_args[6]; \
1076 volatile unsigned int _zzq_result; \
1077 _zzq_args[0] = (unsigned int)(_zzq_request); \
1078 _zzq_args[1] = (unsigned int)(_zzq_arg1); \
1079 _zzq_args[2] = (unsigned int)(_zzq_arg2); \
1080 _zzq_args[3] = (unsigned int)(_zzq_arg3); \
1081 _zzq_args[4] = (unsigned int)(_zzq_arg4); \
1082 _zzq_args[5] = (unsigned int)(_zzq_arg5); \
1083 __asm__ volatile("move $a7, %1\n\t" /* default */ \
1084 "move $t0, %2\n\t" /* ptr */ \
1085 __SPECIAL_INSTRUCTION_PREAMBLE \
1086 /* $a7 = client_request( $t0 ) */ \
1087 "or[32] $t0, $t0, $t0\n\t" \
1088 "move %0, $a7\n\t" /* result */ \
1089 : "=r" (_zzq_result) \
1090 : "r" (_zzq_default), "r" (&_zzq_args[0]) \
1091 : "$a7", "$t0", "memory"); \
1092 _zzq_result; \
1093 })
1094
1095#define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
1096 { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
1097 volatile unsigned long int __addr; \
1098 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1099 /* $a7 = guest_NRADDR */ \
1100 "or[32] $t1, $t1, $t1\n\t" \
1101 "move %0, $a7" /*result*/ \
1102 : "=r" (__addr) \
1103 : \
1104 : "$a7"); \
1105 _zzq_orig->nraddr = __addr; \
1106 }
1107
1108#define VALGRIND_CALL_NOREDIR_T9 \
1109 __SPECIAL_INSTRUCTION_PREAMBLE \
1110 /* call-noredir $25 */ \
1111 "or[32] $t2, $t2, $t2\n\t"
1112
1113#define VALGRIND_VEX_INJECT_IR() \
1114 do { \
1115 __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
1116 "or[32] $t3, $t3, $t3\n\t" \
1117 ); \
1118 } while (0)
1119
1120#endif
1121/* Insert assembly code for other platforms here... */
1122
1123#endif /* NVALGRIND */
1124
1125
1126/* ------------------------------------------------------------------ */
1127/* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
1128/* ugly. It's the least-worst tradeoff I can think of. */
1129/* ------------------------------------------------------------------ */
1130
1131/* This section defines magic (a.k.a appalling-hack) macros for doing
1132 guaranteed-no-redirection macros, so as to get from function
1133 wrappers to the functions they are wrapping. The whole point is to
1134 construct standard call sequences, but to do the call itself with a
1135 special no-redirect call pseudo-instruction that the JIT
1136 understands and handles specially. This section is long and
1137 repetitious, and I can't see a way to make it shorter.
1138
1139 The naming scheme is as follows:
1140
1141 CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
1142
1143 'W' stands for "word" and 'v' for "void". Hence there are
1144 different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
1145 and for each, the possibility of returning a word-typed result, or
1146 no result.
1147*/
1148
1149/* Use these to write the name of your wrapper. NOTE: duplicates
1150 VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
1151 the default behaviour equivalance class tag "0000" into the name.
1152 See pub_tool_redir.h for details -- normally you don't need to
1153 think about this, though. */
1154
1155/* Use an extra level of macroisation so as to ensure the soname/fnname
1156 args are fully macro-expanded before pasting them together. */
1157#define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
1158
1159#define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
1160 VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
1161
1162#define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
1163 VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
1164
1165/* Use this macro from within a wrapper function to collect the
1166 context (address and possibly other info) of the original function.
1167 Once you have that you can then use it in one of the CALL_FN_
1168 macros. The type of the argument _lval is OrigFn. */
1169#define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
1170
1171/* Also provide end-user facilities for function replacement, rather
1172 than wrapping. A replacement function differs from a wrapper in
1173 that it has no way to get hold of the original function being
1174 called, and hence no way to call onwards to it. In a replacement
1175 function, VALGRIND_GET_ORIG_FN always returns zero. */
1176
1177#define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1178 VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1179
1180#define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1181 VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1182
1183/* Derivatives of the main macros below, for calling functions
1184 returning void. */
1185
1186#define CALL_FN_v_v(fnptr) \
1187 do { volatile unsigned long _junk; \
1188 CALL_FN_W_v(_junk,fnptr); } while (0)
1189
1190#define CALL_FN_v_W(fnptr, arg1) \
1191 do { volatile unsigned long _junk; \
1192 CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1193
1194#define CALL_FN_v_WW(fnptr, arg1,arg2) \
1195 do { volatile unsigned long _junk; \
1196 CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1197
1198#define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1199 do { volatile unsigned long _junk; \
1200 CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1201
1202#define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1203 do { volatile unsigned long _junk; \
1204 CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1205
1206#define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1207 do { volatile unsigned long _junk; \
1208 CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1209
1210#define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1211 do { volatile unsigned long _junk; \
1212 CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1213
1214#define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1215 do { volatile unsigned long _junk; \
1216 CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1217
1218/* ----------------- x86-{linux,darwin,solaris} ---------------- */
1219
1220#if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
1221 || defined(PLAT_x86_solaris)
1222
1223/* These regs are trashed by the hidden call. No need to mention eax
1224 as gcc can already see that, plus causes gcc to bomb. */
1225#define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
1226
1227/* Macros to save and align the stack before making a function
1228 call and restore it afterwards as gcc may not keep the stack
1229 pointer aligned if it doesn't realise calls are being made
1230 to other functions. */
1231
1232#define VALGRIND_ALIGN_STACK \
1233 "movl %%esp,%%edi\n\t" \
1234 "andl $0xfffffff0,%%esp\n\t"
1235#define VALGRIND_RESTORE_STACK \
1236 "movl %%edi,%%esp\n\t"
1237
1238/* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
1239 long) == 4. */
1240
1241#define CALL_FN_W_v(lval, orig) \
1242 do { \
1243 volatile OrigFn _orig = (orig); \
1244 volatile unsigned long _argvec[1]; \
1245 volatile unsigned long _res; \
1246 _argvec[0] = (unsigned long)_orig.nraddr; \
1247 __asm__ volatile( \
1248 VALGRIND_ALIGN_STACK \
1249 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1250 VALGRIND_CALL_NOREDIR_EAX \
1251 VALGRIND_RESTORE_STACK \
1252 : /*out*/ "=a" (_res) \
1253 : /*in*/ "a" (&_argvec[0]) \
1254 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1255 ); \
1256 lval = (__typeof__(lval)) _res; \
1257 } while (0)
1258
1259#define CALL_FN_W_W(lval, orig, arg1) \
1260 do { \
1261 volatile OrigFn _orig = (orig); \
1262 volatile unsigned long _argvec[2]; \
1263 volatile unsigned long _res; \
1264 _argvec[0] = (unsigned long)_orig.nraddr; \
1265 _argvec[1] = (unsigned long)(arg1); \
1266 __asm__ volatile( \
1267 VALGRIND_ALIGN_STACK \
1268 "subl $12, %%esp\n\t" \
1269 "pushl 4(%%eax)\n\t" \
1270 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1271 VALGRIND_CALL_NOREDIR_EAX \
1272 VALGRIND_RESTORE_STACK \
1273 : /*out*/ "=a" (_res) \
1274 : /*in*/ "a" (&_argvec[0]) \
1275 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1276 ); \
1277 lval = (__typeof__(lval)) _res; \
1278 } while (0)
1279
1280#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1281 do { \
1282 volatile OrigFn _orig = (orig); \
1283 volatile unsigned long _argvec[3]; \
1284 volatile unsigned long _res; \
1285 _argvec[0] = (unsigned long)_orig.nraddr; \
1286 _argvec[1] = (unsigned long)(arg1); \
1287 _argvec[2] = (unsigned long)(arg2); \
1288 __asm__ volatile( \
1289 VALGRIND_ALIGN_STACK \
1290 "subl $8, %%esp\n\t" \
1291 "pushl 8(%%eax)\n\t" \
1292 "pushl 4(%%eax)\n\t" \
1293 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1294 VALGRIND_CALL_NOREDIR_EAX \
1295 VALGRIND_RESTORE_STACK \
1296 : /*out*/ "=a" (_res) \
1297 : /*in*/ "a" (&_argvec[0]) \
1298 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1299 ); \
1300 lval = (__typeof__(lval)) _res; \
1301 } while (0)
1302
1303#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1304 do { \
1305 volatile OrigFn _orig = (orig); \
1306 volatile unsigned long _argvec[4]; \
1307 volatile unsigned long _res; \
1308 _argvec[0] = (unsigned long)_orig.nraddr; \
1309 _argvec[1] = (unsigned long)(arg1); \
1310 _argvec[2] = (unsigned long)(arg2); \
1311 _argvec[3] = (unsigned long)(arg3); \
1312 __asm__ volatile( \
1313 VALGRIND_ALIGN_STACK \
1314 "subl $4, %%esp\n\t" \
1315 "pushl 12(%%eax)\n\t" \
1316 "pushl 8(%%eax)\n\t" \
1317 "pushl 4(%%eax)\n\t" \
1318 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1319 VALGRIND_CALL_NOREDIR_EAX \
1320 VALGRIND_RESTORE_STACK \
1321 : /*out*/ "=a" (_res) \
1322 : /*in*/ "a" (&_argvec[0]) \
1323 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1324 ); \
1325 lval = (__typeof__(lval)) _res; \
1326 } while (0)
1327
1328#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1329 do { \
1330 volatile OrigFn _orig = (orig); \
1331 volatile unsigned long _argvec[5]; \
1332 volatile unsigned long _res; \
1333 _argvec[0] = (unsigned long)_orig.nraddr; \
1334 _argvec[1] = (unsigned long)(arg1); \
1335 _argvec[2] = (unsigned long)(arg2); \
1336 _argvec[3] = (unsigned long)(arg3); \
1337 _argvec[4] = (unsigned long)(arg4); \
1338 __asm__ volatile( \
1339 VALGRIND_ALIGN_STACK \
1340 "pushl 16(%%eax)\n\t" \
1341 "pushl 12(%%eax)\n\t" \
1342 "pushl 8(%%eax)\n\t" \
1343 "pushl 4(%%eax)\n\t" \
1344 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1345 VALGRIND_CALL_NOREDIR_EAX \
1346 VALGRIND_RESTORE_STACK \
1347 : /*out*/ "=a" (_res) \
1348 : /*in*/ "a" (&_argvec[0]) \
1349 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1350 ); \
1351 lval = (__typeof__(lval)) _res; \
1352 } while (0)
1353
1354#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1355 do { \
1356 volatile OrigFn _orig = (orig); \
1357 volatile unsigned long _argvec[6]; \
1358 volatile unsigned long _res; \
1359 _argvec[0] = (unsigned long)_orig.nraddr; \
1360 _argvec[1] = (unsigned long)(arg1); \
1361 _argvec[2] = (unsigned long)(arg2); \
1362 _argvec[3] = (unsigned long)(arg3); \
1363 _argvec[4] = (unsigned long)(arg4); \
1364 _argvec[5] = (unsigned long)(arg5); \
1365 __asm__ volatile( \
1366 VALGRIND_ALIGN_STACK \
1367 "subl $12, %%esp\n\t" \
1368 "pushl 20(%%eax)\n\t" \
1369 "pushl 16(%%eax)\n\t" \
1370 "pushl 12(%%eax)\n\t" \
1371 "pushl 8(%%eax)\n\t" \
1372 "pushl 4(%%eax)\n\t" \
1373 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1374 VALGRIND_CALL_NOREDIR_EAX \
1375 VALGRIND_RESTORE_STACK \
1376 : /*out*/ "=a" (_res) \
1377 : /*in*/ "a" (&_argvec[0]) \
1378 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1379 ); \
1380 lval = (__typeof__(lval)) _res; \
1381 } while (0)
1382
1383#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1384 do { \
1385 volatile OrigFn _orig = (orig); \
1386 volatile unsigned long _argvec[7]; \
1387 volatile unsigned long _res; \
1388 _argvec[0] = (unsigned long)_orig.nraddr; \
1389 _argvec[1] = (unsigned long)(arg1); \
1390 _argvec[2] = (unsigned long)(arg2); \
1391 _argvec[3] = (unsigned long)(arg3); \
1392 _argvec[4] = (unsigned long)(arg4); \
1393 _argvec[5] = (unsigned long)(arg5); \
1394 _argvec[6] = (unsigned long)(arg6); \
1395 __asm__ volatile( \
1396 VALGRIND_ALIGN_STACK \
1397 "subl $8, %%esp\n\t" \
1398 "pushl 24(%%eax)\n\t" \
1399 "pushl 20(%%eax)\n\t" \
1400 "pushl 16(%%eax)\n\t" \
1401 "pushl 12(%%eax)\n\t" \
1402 "pushl 8(%%eax)\n\t" \
1403 "pushl 4(%%eax)\n\t" \
1404 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1405 VALGRIND_CALL_NOREDIR_EAX \
1406 VALGRIND_RESTORE_STACK \
1407 : /*out*/ "=a" (_res) \
1408 : /*in*/ "a" (&_argvec[0]) \
1409 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1410 ); \
1411 lval = (__typeof__(lval)) _res; \
1412 } while (0)
1413
1414#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1415 arg7) \
1416 do { \
1417 volatile OrigFn _orig = (orig); \
1418 volatile unsigned long _argvec[8]; \
1419 volatile unsigned long _res; \
1420 _argvec[0] = (unsigned long)_orig.nraddr; \
1421 _argvec[1] = (unsigned long)(arg1); \
1422 _argvec[2] = (unsigned long)(arg2); \
1423 _argvec[3] = (unsigned long)(arg3); \
1424 _argvec[4] = (unsigned long)(arg4); \
1425 _argvec[5] = (unsigned long)(arg5); \
1426 _argvec[6] = (unsigned long)(arg6); \
1427 _argvec[7] = (unsigned long)(arg7); \
1428 __asm__ volatile( \
1429 VALGRIND_ALIGN_STACK \
1430 "subl $4, %%esp\n\t" \
1431 "pushl 28(%%eax)\n\t" \
1432 "pushl 24(%%eax)\n\t" \
1433 "pushl 20(%%eax)\n\t" \
1434 "pushl 16(%%eax)\n\t" \
1435 "pushl 12(%%eax)\n\t" \
1436 "pushl 8(%%eax)\n\t" \
1437 "pushl 4(%%eax)\n\t" \
1438 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1439 VALGRIND_CALL_NOREDIR_EAX \
1440 VALGRIND_RESTORE_STACK \
1441 : /*out*/ "=a" (_res) \
1442 : /*in*/ "a" (&_argvec[0]) \
1443 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1444 ); \
1445 lval = (__typeof__(lval)) _res; \
1446 } while (0)
1447
1448#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1449 arg7,arg8) \
1450 do { \
1451 volatile OrigFn _orig = (orig); \
1452 volatile unsigned long _argvec[9]; \
1453 volatile unsigned long _res; \
1454 _argvec[0] = (unsigned long)_orig.nraddr; \
1455 _argvec[1] = (unsigned long)(arg1); \
1456 _argvec[2] = (unsigned long)(arg2); \
1457 _argvec[3] = (unsigned long)(arg3); \
1458 _argvec[4] = (unsigned long)(arg4); \
1459 _argvec[5] = (unsigned long)(arg5); \
1460 _argvec[6] = (unsigned long)(arg6); \
1461 _argvec[7] = (unsigned long)(arg7); \
1462 _argvec[8] = (unsigned long)(arg8); \
1463 __asm__ volatile( \
1464 VALGRIND_ALIGN_STACK \
1465 "pushl 32(%%eax)\n\t" \
1466 "pushl 28(%%eax)\n\t" \
1467 "pushl 24(%%eax)\n\t" \
1468 "pushl 20(%%eax)\n\t" \
1469 "pushl 16(%%eax)\n\t" \
1470 "pushl 12(%%eax)\n\t" \
1471 "pushl 8(%%eax)\n\t" \
1472 "pushl 4(%%eax)\n\t" \
1473 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1474 VALGRIND_CALL_NOREDIR_EAX \
1475 VALGRIND_RESTORE_STACK \
1476 : /*out*/ "=a" (_res) \
1477 : /*in*/ "a" (&_argvec[0]) \
1478 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1479 ); \
1480 lval = (__typeof__(lval)) _res; \
1481 } while (0)
1482
1483#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1484 arg7,arg8,arg9) \
1485 do { \
1486 volatile OrigFn _orig = (orig); \
1487 volatile unsigned long _argvec[10]; \
1488 volatile unsigned long _res; \
1489 _argvec[0] = (unsigned long)_orig.nraddr; \
1490 _argvec[1] = (unsigned long)(arg1); \
1491 _argvec[2] = (unsigned long)(arg2); \
1492 _argvec[3] = (unsigned long)(arg3); \
1493 _argvec[4] = (unsigned long)(arg4); \
1494 _argvec[5] = (unsigned long)(arg5); \
1495 _argvec[6] = (unsigned long)(arg6); \
1496 _argvec[7] = (unsigned long)(arg7); \
1497 _argvec[8] = (unsigned long)(arg8); \
1498 _argvec[9] = (unsigned long)(arg9); \
1499 __asm__ volatile( \
1500 VALGRIND_ALIGN_STACK \
1501 "subl $12, %%esp\n\t" \
1502 "pushl 36(%%eax)\n\t" \
1503 "pushl 32(%%eax)\n\t" \
1504 "pushl 28(%%eax)\n\t" \
1505 "pushl 24(%%eax)\n\t" \
1506 "pushl 20(%%eax)\n\t" \
1507 "pushl 16(%%eax)\n\t" \
1508 "pushl 12(%%eax)\n\t" \
1509 "pushl 8(%%eax)\n\t" \
1510 "pushl 4(%%eax)\n\t" \
1511 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1512 VALGRIND_CALL_NOREDIR_EAX \
1513 VALGRIND_RESTORE_STACK \
1514 : /*out*/ "=a" (_res) \
1515 : /*in*/ "a" (&_argvec[0]) \
1516 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1517 ); \
1518 lval = (__typeof__(lval)) _res; \
1519 } while (0)
1520
1521#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1522 arg7,arg8,arg9,arg10) \
1523 do { \
1524 volatile OrigFn _orig = (orig); \
1525 volatile unsigned long _argvec[11]; \
1526 volatile unsigned long _res; \
1527 _argvec[0] = (unsigned long)_orig.nraddr; \
1528 _argvec[1] = (unsigned long)(arg1); \
1529 _argvec[2] = (unsigned long)(arg2); \
1530 _argvec[3] = (unsigned long)(arg3); \
1531 _argvec[4] = (unsigned long)(arg4); \
1532 _argvec[5] = (unsigned long)(arg5); \
1533 _argvec[6] = (unsigned long)(arg6); \
1534 _argvec[7] = (unsigned long)(arg7); \
1535 _argvec[8] = (unsigned long)(arg8); \
1536 _argvec[9] = (unsigned long)(arg9); \
1537 _argvec[10] = (unsigned long)(arg10); \
1538 __asm__ volatile( \
1539 VALGRIND_ALIGN_STACK \
1540 "subl $8, %%esp\n\t" \
1541 "pushl 40(%%eax)\n\t" \
1542 "pushl 36(%%eax)\n\t" \
1543 "pushl 32(%%eax)\n\t" \
1544 "pushl 28(%%eax)\n\t" \
1545 "pushl 24(%%eax)\n\t" \
1546 "pushl 20(%%eax)\n\t" \
1547 "pushl 16(%%eax)\n\t" \
1548 "pushl 12(%%eax)\n\t" \
1549 "pushl 8(%%eax)\n\t" \
1550 "pushl 4(%%eax)\n\t" \
1551 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1552 VALGRIND_CALL_NOREDIR_EAX \
1553 VALGRIND_RESTORE_STACK \
1554 : /*out*/ "=a" (_res) \
1555 : /*in*/ "a" (&_argvec[0]) \
1556 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1557 ); \
1558 lval = (__typeof__(lval)) _res; \
1559 } while (0)
1560
1561#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1562 arg6,arg7,arg8,arg9,arg10, \
1563 arg11) \
1564 do { \
1565 volatile OrigFn _orig = (orig); \
1566 volatile unsigned long _argvec[12]; \
1567 volatile unsigned long _res; \
1568 _argvec[0] = (unsigned long)_orig.nraddr; \
1569 _argvec[1] = (unsigned long)(arg1); \
1570 _argvec[2] = (unsigned long)(arg2); \
1571 _argvec[3] = (unsigned long)(arg3); \
1572 _argvec[4] = (unsigned long)(arg4); \
1573 _argvec[5] = (unsigned long)(arg5); \
1574 _argvec[6] = (unsigned long)(arg6); \
1575 _argvec[7] = (unsigned long)(arg7); \
1576 _argvec[8] = (unsigned long)(arg8); \
1577 _argvec[9] = (unsigned long)(arg9); \
1578 _argvec[10] = (unsigned long)(arg10); \
1579 _argvec[11] = (unsigned long)(arg11); \
1580 __asm__ volatile( \
1581 VALGRIND_ALIGN_STACK \
1582 "subl $4, %%esp\n\t" \
1583 "pushl 44(%%eax)\n\t" \
1584 "pushl 40(%%eax)\n\t" \
1585 "pushl 36(%%eax)\n\t" \
1586 "pushl 32(%%eax)\n\t" \
1587 "pushl 28(%%eax)\n\t" \
1588 "pushl 24(%%eax)\n\t" \
1589 "pushl 20(%%eax)\n\t" \
1590 "pushl 16(%%eax)\n\t" \
1591 "pushl 12(%%eax)\n\t" \
1592 "pushl 8(%%eax)\n\t" \
1593 "pushl 4(%%eax)\n\t" \
1594 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1595 VALGRIND_CALL_NOREDIR_EAX \
1596 VALGRIND_RESTORE_STACK \
1597 : /*out*/ "=a" (_res) \
1598 : /*in*/ "a" (&_argvec[0]) \
1599 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1600 ); \
1601 lval = (__typeof__(lval)) _res; \
1602 } while (0)
1603
1604#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1605 arg6,arg7,arg8,arg9,arg10, \
1606 arg11,arg12) \
1607 do { \
1608 volatile OrigFn _orig = (orig); \
1609 volatile unsigned long _argvec[13]; \
1610 volatile unsigned long _res; \
1611 _argvec[0] = (unsigned long)_orig.nraddr; \
1612 _argvec[1] = (unsigned long)(arg1); \
1613 _argvec[2] = (unsigned long)(arg2); \
1614 _argvec[3] = (unsigned long)(arg3); \
1615 _argvec[4] = (unsigned long)(arg4); \
1616 _argvec[5] = (unsigned long)(arg5); \
1617 _argvec[6] = (unsigned long)(arg6); \
1618 _argvec[7] = (unsigned long)(arg7); \
1619 _argvec[8] = (unsigned long)(arg8); \
1620 _argvec[9] = (unsigned long)(arg9); \
1621 _argvec[10] = (unsigned long)(arg10); \
1622 _argvec[11] = (unsigned long)(arg11); \
1623 _argvec[12] = (unsigned long)(arg12); \
1624 __asm__ volatile( \
1625 VALGRIND_ALIGN_STACK \
1626 "pushl 48(%%eax)\n\t" \
1627 "pushl 44(%%eax)\n\t" \
1628 "pushl 40(%%eax)\n\t" \
1629 "pushl 36(%%eax)\n\t" \
1630 "pushl 32(%%eax)\n\t" \
1631 "pushl 28(%%eax)\n\t" \
1632 "pushl 24(%%eax)\n\t" \
1633 "pushl 20(%%eax)\n\t" \
1634 "pushl 16(%%eax)\n\t" \
1635 "pushl 12(%%eax)\n\t" \
1636 "pushl 8(%%eax)\n\t" \
1637 "pushl 4(%%eax)\n\t" \
1638 "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1639 VALGRIND_CALL_NOREDIR_EAX \
1640 VALGRIND_RESTORE_STACK \
1641 : /*out*/ "=a" (_res) \
1642 : /*in*/ "a" (&_argvec[0]) \
1643 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1644 ); \
1645 lval = (__typeof__(lval)) _res; \
1646 } while (0)
1647
1648#endif /* PLAT_x86_linux || PLAT_x86_darwin || PLAT_x86_solaris */
1649
1650/* ---------------- amd64-{linux,darwin,solaris} --------------- */
1651
1652#if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin) \
1653 || defined(PLAT_amd64_solaris)
1654
1655/* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1656
1657/* These regs are trashed by the hidden call. */
1658#define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1659 "rdi", "r8", "r9", "r10", "r11"
1660
1661/* This is all pretty complex. It's so as to make stack unwinding
1662 work reliably. See bug 243270. The basic problem is the sub and
1663 add of 128 of %rsp in all of the following macros. If gcc believes
1664 the CFA is in %rsp, then unwinding may fail, because what's at the
1665 CFA is not what gcc "expected" when it constructs the CFIs for the
1666 places where the macros are instantiated.
1667
1668 But we can't just add a CFI annotation to increase the CFA offset
1669 by 128, to match the sub of 128 from %rsp, because we don't know
1670 whether gcc has chosen %rsp as the CFA at that point, or whether it
1671 has chosen some other register (eg, %rbp). In the latter case,
1672 adding a CFI annotation to change the CFA offset is simply wrong.
1673
1674 So the solution is to get hold of the CFA using
1675 __builtin_dwarf_cfa(), put it in a known register, and add a
1676 CFI annotation to say what the register is. We choose %rbp for
1677 this (perhaps perversely), because:
1678
1679 (1) %rbp is already subject to unwinding. If a new register was
1680 chosen then the unwinder would have to unwind it in all stack
1681 traces, which is expensive, and
1682
1683 (2) %rbp is already subject to precise exception updates in the
1684 JIT. If a new register was chosen, we'd have to have precise
1685 exceptions for it too, which reduces performance of the
1686 generated code.
1687
1688 However .. one extra complication. We can't just whack the result
1689 of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1690 list of trashed registers at the end of the inline assembly
1691 fragments; gcc won't allow %rbp to appear in that list. Hence
1692 instead we need to stash %rbp in %r15 for the duration of the asm,
1693 and say that %r15 is trashed instead. gcc seems happy to go with
1694 that.
1695
1696 Oh .. and this all needs to be conditionalised so that it is
1697 unchanged from before this commit, when compiled with older gccs
1698 that don't support __builtin_dwarf_cfa. Furthermore, since
1699 this header file is freestanding, it has to be independent of
1700 config.h, and so the following conditionalisation cannot depend on
1701 configure time checks.
1702
1703 Although it's not clear from
1704 'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1705 this expression excludes Darwin.
1706 .cfi directives in Darwin assembly appear to be completely
1707 different and I haven't investigated how they work.
1708
1709 For even more entertainment value, note we have to use the
1710 completely undocumented __builtin_dwarf_cfa(), which appears to
1711 really compute the CFA, whereas __builtin_frame_address(0) claims
1712 to but actually doesn't. See
1713 https://bugs.kde.org/show_bug.cgi?id=243270#c47
1714*/
1715#if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1716# define __FRAME_POINTER \
1717 ,"r"(__builtin_dwarf_cfa())
1718# define VALGRIND_CFI_PROLOGUE \
1719 "movq %%rbp, %%r15\n\t" \
1720 "movq %2, %%rbp\n\t" \
1721 ".cfi_remember_state\n\t" \
1722 ".cfi_def_cfa rbp, 0\n\t"
1723# define VALGRIND_CFI_EPILOGUE \
1724 "movq %%r15, %%rbp\n\t" \
1725 ".cfi_restore_state\n\t"
1726#else
1727# define __FRAME_POINTER
1728# define VALGRIND_CFI_PROLOGUE
1729# define VALGRIND_CFI_EPILOGUE
1730#endif
1731
1732/* Macros to save and align the stack before making a function
1733 call and restore it afterwards as gcc may not keep the stack
1734 pointer aligned if it doesn't realise calls are being made
1735 to other functions. */
1736
1737#define VALGRIND_ALIGN_STACK \
1738 "movq %%rsp,%%r14\n\t" \
1739 "andq $0xfffffffffffffff0,%%rsp\n\t"
1740#define VALGRIND_RESTORE_STACK \
1741 "movq %%r14,%%rsp\n\t"
1742
1743/* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1744 long) == 8. */
1745
1746/* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1747 macros. In order not to trash the stack redzone, we need to drop
1748 %rsp by 128 before the hidden call, and restore afterwards. The
1749 nastyness is that it is only by luck that the stack still appears
1750 to be unwindable during the hidden call - since then the behaviour
1751 of any routine using this macro does not match what the CFI data
1752 says. Sigh.
1753
1754 Why is this important? Imagine that a wrapper has a stack
1755 allocated local, and passes to the hidden call, a pointer to it.
1756 Because gcc does not know about the hidden call, it may allocate
1757 that local in the redzone. Unfortunately the hidden call may then
1758 trash it before it comes to use it. So we must step clear of the
1759 redzone, for the duration of the hidden call, to make it safe.
1760
1761 Probably the same problem afflicts the other redzone-style ABIs too
1762 (ppc64-linux); but for those, the stack is
1763 self describing (none of this CFI nonsense) so at least messing
1764 with the stack pointer doesn't give a danger of non-unwindable
1765 stack. */
1766
1767#define CALL_FN_W_v(lval, orig) \
1768 do { \
1769 volatile OrigFn _orig = (orig); \
1770 volatile unsigned long _argvec[1]; \
1771 volatile unsigned long _res; \
1772 _argvec[0] = (unsigned long)_orig.nraddr; \
1773 __asm__ volatile( \
1774 VALGRIND_CFI_PROLOGUE \
1775 VALGRIND_ALIGN_STACK \
1776 "subq $128,%%rsp\n\t" \
1777 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1778 VALGRIND_CALL_NOREDIR_RAX \
1779 VALGRIND_RESTORE_STACK \
1780 VALGRIND_CFI_EPILOGUE \
1781 : /*out*/ "=a" (_res) \
1782 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1783 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1784 ); \
1785 lval = (__typeof__(lval)) _res; \
1786 } while (0)
1787
1788#define CALL_FN_W_W(lval, orig, arg1) \
1789 do { \
1790 volatile OrigFn _orig = (orig); \
1791 volatile unsigned long _argvec[2]; \
1792 volatile unsigned long _res; \
1793 _argvec[0] = (unsigned long)_orig.nraddr; \
1794 _argvec[1] = (unsigned long)(arg1); \
1795 __asm__ volatile( \
1796 VALGRIND_CFI_PROLOGUE \
1797 VALGRIND_ALIGN_STACK \
1798 "subq $128,%%rsp\n\t" \
1799 "movq 8(%%rax), %%rdi\n\t" \
1800 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1801 VALGRIND_CALL_NOREDIR_RAX \
1802 VALGRIND_RESTORE_STACK \
1803 VALGRIND_CFI_EPILOGUE \
1804 : /*out*/ "=a" (_res) \
1805 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1806 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1807 ); \
1808 lval = (__typeof__(lval)) _res; \
1809 } while (0)
1810
1811#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1812 do { \
1813 volatile OrigFn _orig = (orig); \
1814 volatile unsigned long _argvec[3]; \
1815 volatile unsigned long _res; \
1816 _argvec[0] = (unsigned long)_orig.nraddr; \
1817 _argvec[1] = (unsigned long)(arg1); \
1818 _argvec[2] = (unsigned long)(arg2); \
1819 __asm__ volatile( \
1820 VALGRIND_CFI_PROLOGUE \
1821 VALGRIND_ALIGN_STACK \
1822 "subq $128,%%rsp\n\t" \
1823 "movq 16(%%rax), %%rsi\n\t" \
1824 "movq 8(%%rax), %%rdi\n\t" \
1825 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1826 VALGRIND_CALL_NOREDIR_RAX \
1827 VALGRIND_RESTORE_STACK \
1828 VALGRIND_CFI_EPILOGUE \
1829 : /*out*/ "=a" (_res) \
1830 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1831 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1832 ); \
1833 lval = (__typeof__(lval)) _res; \
1834 } while (0)
1835
1836#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1837 do { \
1838 volatile OrigFn _orig = (orig); \
1839 volatile unsigned long _argvec[4]; \
1840 volatile unsigned long _res; \
1841 _argvec[0] = (unsigned long)_orig.nraddr; \
1842 _argvec[1] = (unsigned long)(arg1); \
1843 _argvec[2] = (unsigned long)(arg2); \
1844 _argvec[3] = (unsigned long)(arg3); \
1845 __asm__ volatile( \
1846 VALGRIND_CFI_PROLOGUE \
1847 VALGRIND_ALIGN_STACK \
1848 "subq $128,%%rsp\n\t" \
1849 "movq 24(%%rax), %%rdx\n\t" \
1850 "movq 16(%%rax), %%rsi\n\t" \
1851 "movq 8(%%rax), %%rdi\n\t" \
1852 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1853 VALGRIND_CALL_NOREDIR_RAX \
1854 VALGRIND_RESTORE_STACK \
1855 VALGRIND_CFI_EPILOGUE \
1856 : /*out*/ "=a" (_res) \
1857 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1858 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1859 ); \
1860 lval = (__typeof__(lval)) _res; \
1861 } while (0)
1862
1863#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1864 do { \
1865 volatile OrigFn _orig = (orig); \
1866 volatile unsigned long _argvec[5]; \
1867 volatile unsigned long _res; \
1868 _argvec[0] = (unsigned long)_orig.nraddr; \
1869 _argvec[1] = (unsigned long)(arg1); \
1870 _argvec[2] = (unsigned long)(arg2); \
1871 _argvec[3] = (unsigned long)(arg3); \
1872 _argvec[4] = (unsigned long)(arg4); \
1873 __asm__ volatile( \
1874 VALGRIND_CFI_PROLOGUE \
1875 VALGRIND_ALIGN_STACK \
1876 "subq $128,%%rsp\n\t" \
1877 "movq 32(%%rax), %%rcx\n\t" \
1878 "movq 24(%%rax), %%rdx\n\t" \
1879 "movq 16(%%rax), %%rsi\n\t" \
1880 "movq 8(%%rax), %%rdi\n\t" \
1881 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1882 VALGRIND_CALL_NOREDIR_RAX \
1883 VALGRIND_RESTORE_STACK \
1884 VALGRIND_CFI_EPILOGUE \
1885 : /*out*/ "=a" (_res) \
1886 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1887 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1888 ); \
1889 lval = (__typeof__(lval)) _res; \
1890 } while (0)
1891
1892#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1893 do { \
1894 volatile OrigFn _orig = (orig); \
1895 volatile unsigned long _argvec[6]; \
1896 volatile unsigned long _res; \
1897 _argvec[0] = (unsigned long)_orig.nraddr; \
1898 _argvec[1] = (unsigned long)(arg1); \
1899 _argvec[2] = (unsigned long)(arg2); \
1900 _argvec[3] = (unsigned long)(arg3); \
1901 _argvec[4] = (unsigned long)(arg4); \
1902 _argvec[5] = (unsigned long)(arg5); \
1903 __asm__ volatile( \
1904 VALGRIND_CFI_PROLOGUE \
1905 VALGRIND_ALIGN_STACK \
1906 "subq $128,%%rsp\n\t" \
1907 "movq 40(%%rax), %%r8\n\t" \
1908 "movq 32(%%rax), %%rcx\n\t" \
1909 "movq 24(%%rax), %%rdx\n\t" \
1910 "movq 16(%%rax), %%rsi\n\t" \
1911 "movq 8(%%rax), %%rdi\n\t" \
1912 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1913 VALGRIND_CALL_NOREDIR_RAX \
1914 VALGRIND_RESTORE_STACK \
1915 VALGRIND_CFI_EPILOGUE \
1916 : /*out*/ "=a" (_res) \
1917 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1918 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1919 ); \
1920 lval = (__typeof__(lval)) _res; \
1921 } while (0)
1922
1923#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1924 do { \
1925 volatile OrigFn _orig = (orig); \
1926 volatile unsigned long _argvec[7]; \
1927 volatile unsigned long _res; \
1928 _argvec[0] = (unsigned long)_orig.nraddr; \
1929 _argvec[1] = (unsigned long)(arg1); \
1930 _argvec[2] = (unsigned long)(arg2); \
1931 _argvec[3] = (unsigned long)(arg3); \
1932 _argvec[4] = (unsigned long)(arg4); \
1933 _argvec[5] = (unsigned long)(arg5); \
1934 _argvec[6] = (unsigned long)(arg6); \
1935 __asm__ volatile( \
1936 VALGRIND_CFI_PROLOGUE \
1937 VALGRIND_ALIGN_STACK \
1938 "subq $128,%%rsp\n\t" \
1939 "movq 48(%%rax), %%r9\n\t" \
1940 "movq 40(%%rax), %%r8\n\t" \
1941 "movq 32(%%rax), %%rcx\n\t" \
1942 "movq 24(%%rax), %%rdx\n\t" \
1943 "movq 16(%%rax), %%rsi\n\t" \
1944 "movq 8(%%rax), %%rdi\n\t" \
1945 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1946 VALGRIND_CALL_NOREDIR_RAX \
1947 VALGRIND_RESTORE_STACK \
1948 VALGRIND_CFI_EPILOGUE \
1949 : /*out*/ "=a" (_res) \
1950 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1951 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1952 ); \
1953 lval = (__typeof__(lval)) _res; \
1954 } while (0)
1955
1956#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1957 arg7) \
1958 do { \
1959 volatile OrigFn _orig = (orig); \
1960 volatile unsigned long _argvec[8]; \
1961 volatile unsigned long _res; \
1962 _argvec[0] = (unsigned long)_orig.nraddr; \
1963 _argvec[1] = (unsigned long)(arg1); \
1964 _argvec[2] = (unsigned long)(arg2); \
1965 _argvec[3] = (unsigned long)(arg3); \
1966 _argvec[4] = (unsigned long)(arg4); \
1967 _argvec[5] = (unsigned long)(arg5); \
1968 _argvec[6] = (unsigned long)(arg6); \
1969 _argvec[7] = (unsigned long)(arg7); \
1970 __asm__ volatile( \
1971 VALGRIND_CFI_PROLOGUE \
1972 VALGRIND_ALIGN_STACK \
1973 "subq $136,%%rsp\n\t" \
1974 "pushq 56(%%rax)\n\t" \
1975 "movq 48(%%rax), %%r9\n\t" \
1976 "movq 40(%%rax), %%r8\n\t" \
1977 "movq 32(%%rax), %%rcx\n\t" \
1978 "movq 24(%%rax), %%rdx\n\t" \
1979 "movq 16(%%rax), %%rsi\n\t" \
1980 "movq 8(%%rax), %%rdi\n\t" \
1981 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1982 VALGRIND_CALL_NOREDIR_RAX \
1983 VALGRIND_RESTORE_STACK \
1984 VALGRIND_CFI_EPILOGUE \
1985 : /*out*/ "=a" (_res) \
1986 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1987 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1988 ); \
1989 lval = (__typeof__(lval)) _res; \
1990 } while (0)
1991
1992#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1993 arg7,arg8) \
1994 do { \
1995 volatile OrigFn _orig = (orig); \
1996 volatile unsigned long _argvec[9]; \
1997 volatile unsigned long _res; \
1998 _argvec[0] = (unsigned long)_orig.nraddr; \
1999 _argvec[1] = (unsigned long)(arg1); \
2000 _argvec[2] = (unsigned long)(arg2); \
2001 _argvec[3] = (unsigned long)(arg3); \
2002 _argvec[4] = (unsigned long)(arg4); \
2003 _argvec[5] = (unsigned long)(arg5); \
2004 _argvec[6] = (unsigned long)(arg6); \
2005 _argvec[7] = (unsigned long)(arg7); \
2006 _argvec[8] = (unsigned long)(arg8); \
2007 __asm__ volatile( \
2008 VALGRIND_CFI_PROLOGUE \
2009 VALGRIND_ALIGN_STACK \
2010 "subq $128,%%rsp\n\t" \
2011 "pushq 64(%%rax)\n\t" \
2012 "pushq 56(%%rax)\n\t" \
2013 "movq 48(%%rax), %%r9\n\t" \
2014 "movq 40(%%rax), %%r8\n\t" \
2015 "movq 32(%%rax), %%rcx\n\t" \
2016 "movq 24(%%rax), %%rdx\n\t" \
2017 "movq 16(%%rax), %%rsi\n\t" \
2018 "movq 8(%%rax), %%rdi\n\t" \
2019 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2020 VALGRIND_CALL_NOREDIR_RAX \
2021 VALGRIND_RESTORE_STACK \
2022 VALGRIND_CFI_EPILOGUE \
2023 : /*out*/ "=a" (_res) \
2024 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2025 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2026 ); \
2027 lval = (__typeof__(lval)) _res; \
2028 } while (0)
2029
2030#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2031 arg7,arg8,arg9) \
2032 do { \
2033 volatile OrigFn _orig = (orig); \
2034 volatile unsigned long _argvec[10]; \
2035 volatile unsigned long _res; \
2036 _argvec[0] = (unsigned long)_orig.nraddr; \
2037 _argvec[1] = (unsigned long)(arg1); \
2038 _argvec[2] = (unsigned long)(arg2); \
2039 _argvec[3] = (unsigned long)(arg3); \
2040 _argvec[4] = (unsigned long)(arg4); \
2041 _argvec[5] = (unsigned long)(arg5); \
2042 _argvec[6] = (unsigned long)(arg6); \
2043 _argvec[7] = (unsigned long)(arg7); \
2044 _argvec[8] = (unsigned long)(arg8); \
2045 _argvec[9] = (unsigned long)(arg9); \
2046 __asm__ volatile( \
2047 VALGRIND_CFI_PROLOGUE \
2048 VALGRIND_ALIGN_STACK \
2049 "subq $136,%%rsp\n\t" \
2050 "pushq 72(%%rax)\n\t" \
2051 "pushq 64(%%rax)\n\t" \
2052 "pushq 56(%%rax)\n\t" \
2053 "movq 48(%%rax), %%r9\n\t" \
2054 "movq 40(%%rax), %%r8\n\t" \
2055 "movq 32(%%rax), %%rcx\n\t" \
2056 "movq 24(%%rax), %%rdx\n\t" \
2057 "movq 16(%%rax), %%rsi\n\t" \
2058 "movq 8(%%rax), %%rdi\n\t" \
2059 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2060 VALGRIND_CALL_NOREDIR_RAX \
2061 VALGRIND_RESTORE_STACK \
2062 VALGRIND_CFI_EPILOGUE \
2063 : /*out*/ "=a" (_res) \
2064 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2065 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2066 ); \
2067 lval = (__typeof__(lval)) _res; \
2068 } while (0)
2069
2070#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2071 arg7,arg8,arg9,arg10) \
2072 do { \
2073 volatile OrigFn _orig = (orig); \
2074 volatile unsigned long _argvec[11]; \
2075 volatile unsigned long _res; \
2076 _argvec[0] = (unsigned long)_orig.nraddr; \
2077 _argvec[1] = (unsigned long)(arg1); \
2078 _argvec[2] = (unsigned long)(arg2); \
2079 _argvec[3] = (unsigned long)(arg3); \
2080 _argvec[4] = (unsigned long)(arg4); \
2081 _argvec[5] = (unsigned long)(arg5); \
2082 _argvec[6] = (unsigned long)(arg6); \
2083 _argvec[7] = (unsigned long)(arg7); \
2084 _argvec[8] = (unsigned long)(arg8); \
2085 _argvec[9] = (unsigned long)(arg9); \
2086 _argvec[10] = (unsigned long)(arg10); \
2087 __asm__ volatile( \
2088 VALGRIND_CFI_PROLOGUE \
2089 VALGRIND_ALIGN_STACK \
2090 "subq $128,%%rsp\n\t" \
2091 "pushq 80(%%rax)\n\t" \
2092 "pushq 72(%%rax)\n\t" \
2093 "pushq 64(%%rax)\n\t" \
2094 "pushq 56(%%rax)\n\t" \
2095 "movq 48(%%rax), %%r9\n\t" \
2096 "movq 40(%%rax), %%r8\n\t" \
2097 "movq 32(%%rax), %%rcx\n\t" \
2098 "movq 24(%%rax), %%rdx\n\t" \
2099 "movq 16(%%rax), %%rsi\n\t" \
2100 "movq 8(%%rax), %%rdi\n\t" \
2101 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2102 VALGRIND_CALL_NOREDIR_RAX \
2103 VALGRIND_RESTORE_STACK \
2104 VALGRIND_CFI_EPILOGUE \
2105 : /*out*/ "=a" (_res) \
2106 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2107 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2108 ); \
2109 lval = (__typeof__(lval)) _res; \
2110 } while (0)
2111
2112#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2113 arg7,arg8,arg9,arg10,arg11) \
2114 do { \
2115 volatile OrigFn _orig = (orig); \
2116 volatile unsigned long _argvec[12]; \
2117 volatile unsigned long _res; \
2118 _argvec[0] = (unsigned long)_orig.nraddr; \
2119 _argvec[1] = (unsigned long)(arg1); \
2120 _argvec[2] = (unsigned long)(arg2); \
2121 _argvec[3] = (unsigned long)(arg3); \
2122 _argvec[4] = (unsigned long)(arg4); \
2123 _argvec[5] = (unsigned long)(arg5); \
2124 _argvec[6] = (unsigned long)(arg6); \
2125 _argvec[7] = (unsigned long)(arg7); \
2126 _argvec[8] = (unsigned long)(arg8); \
2127 _argvec[9] = (unsigned long)(arg9); \
2128 _argvec[10] = (unsigned long)(arg10); \
2129 _argvec[11] = (unsigned long)(arg11); \
2130 __asm__ volatile( \
2131 VALGRIND_CFI_PROLOGUE \
2132 VALGRIND_ALIGN_STACK \
2133 "subq $136,%%rsp\n\t" \
2134 "pushq 88(%%rax)\n\t" \
2135 "pushq 80(%%rax)\n\t" \
2136 "pushq 72(%%rax)\n\t" \
2137 "pushq 64(%%rax)\n\t" \
2138 "pushq 56(%%rax)\n\t" \
2139 "movq 48(%%rax), %%r9\n\t" \
2140 "movq 40(%%rax), %%r8\n\t" \
2141 "movq 32(%%rax), %%rcx\n\t" \
2142 "movq 24(%%rax), %%rdx\n\t" \
2143 "movq 16(%%rax), %%rsi\n\t" \
2144 "movq 8(%%rax), %%rdi\n\t" \
2145 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2146 VALGRIND_CALL_NOREDIR_RAX \
2147 VALGRIND_RESTORE_STACK \
2148 VALGRIND_CFI_EPILOGUE \
2149 : /*out*/ "=a" (_res) \
2150 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2151 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2152 ); \
2153 lval = (__typeof__(lval)) _res; \
2154 } while (0)
2155
2156#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2157 arg7,arg8,arg9,arg10,arg11,arg12) \
2158 do { \
2159 volatile OrigFn _orig = (orig); \
2160 volatile unsigned long _argvec[13]; \
2161 volatile unsigned long _res; \
2162 _argvec[0] = (unsigned long)_orig.nraddr; \
2163 _argvec[1] = (unsigned long)(arg1); \
2164 _argvec[2] = (unsigned long)(arg2); \
2165 _argvec[3] = (unsigned long)(arg3); \
2166 _argvec[4] = (unsigned long)(arg4); \
2167 _argvec[5] = (unsigned long)(arg5); \
2168 _argvec[6] = (unsigned long)(arg6); \
2169 _argvec[7] = (unsigned long)(arg7); \
2170 _argvec[8] = (unsigned long)(arg8); \
2171 _argvec[9] = (unsigned long)(arg9); \
2172 _argvec[10] = (unsigned long)(arg10); \
2173 _argvec[11] = (unsigned long)(arg11); \
2174 _argvec[12] = (unsigned long)(arg12); \
2175 __asm__ volatile( \
2176 VALGRIND_CFI_PROLOGUE \
2177 VALGRIND_ALIGN_STACK \
2178 "subq $128,%%rsp\n\t" \
2179 "pushq 96(%%rax)\n\t" \
2180 "pushq 88(%%rax)\n\t" \
2181 "pushq 80(%%rax)\n\t" \
2182 "pushq 72(%%rax)\n\t" \
2183 "pushq 64(%%rax)\n\t" \
2184 "pushq 56(%%rax)\n\t" \
2185 "movq 48(%%rax), %%r9\n\t" \
2186 "movq 40(%%rax), %%r8\n\t" \
2187 "movq 32(%%rax), %%rcx\n\t" \
2188 "movq 24(%%rax), %%rdx\n\t" \
2189 "movq 16(%%rax), %%rsi\n\t" \
2190 "movq 8(%%rax), %%rdi\n\t" \
2191 "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2192 VALGRIND_CALL_NOREDIR_RAX \
2193 VALGRIND_RESTORE_STACK \
2194 VALGRIND_CFI_EPILOGUE \
2195 : /*out*/ "=a" (_res) \
2196 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2197 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2198 ); \
2199 lval = (__typeof__(lval)) _res; \
2200 } while (0)
2201
2202#endif /* PLAT_amd64_linux || PLAT_amd64_darwin || PLAT_amd64_solaris */
2203
2204/* ------------------------ ppc32-linux ------------------------ */
2205
2206#if defined(PLAT_ppc32_linux)
2207
2208/* This is useful for finding out about the on-stack stuff:
2209
2210 extern int f9 ( int,int,int,int,int,int,int,int,int );
2211 extern int f10 ( int,int,int,int,int,int,int,int,int,int );
2212 extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
2213 extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
2214
2215 int g9 ( void ) {
2216 return f9(11,22,33,44,55,66,77,88,99);
2217 }
2218 int g10 ( void ) {
2219 return f10(11,22,33,44,55,66,77,88,99,110);
2220 }
2221 int g11 ( void ) {
2222 return f11(11,22,33,44,55,66,77,88,99,110,121);
2223 }
2224 int g12 ( void ) {
2225 return f12(11,22,33,44,55,66,77,88,99,110,121,132);
2226 }
2227*/
2228
2229/* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2230
2231/* These regs are trashed by the hidden call. */
2232#define __CALLER_SAVED_REGS \
2233 "lr", "ctr", "xer", \
2234 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2235 "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2236 "r11", "r12", "r13"
2237
2238/* Macros to save and align the stack before making a function
2239 call and restore it afterwards as gcc may not keep the stack
2240 pointer aligned if it doesn't realise calls are being made
2241 to other functions. */
2242
2243#define VALGRIND_ALIGN_STACK \
2244 "mr 28,1\n\t" \
2245 "rlwinm 1,1,0,0,27\n\t"
2246#define VALGRIND_RESTORE_STACK \
2247 "mr 1,28\n\t"
2248
2249/* These CALL_FN_ macros assume that on ppc32-linux,
2250 sizeof(unsigned long) == 4. */
2251
2252#define CALL_FN_W_v(lval, orig) \
2253 do { \
2254 volatile OrigFn _orig = (orig); \
2255 volatile unsigned long _argvec[1]; \
2256 volatile unsigned long _res; \
2257 _argvec[0] = (unsigned long)_orig.nraddr; \
2258 __asm__ volatile( \
2259 VALGRIND_ALIGN_STACK \
2260 "mr 11,%1\n\t" \
2261 "lwz 11,0(11)\n\t" /* target->r11 */ \
2262 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2263 VALGRIND_RESTORE_STACK \
2264 "mr %0,3" \
2265 : /*out*/ "=r" (_res) \
2266 : /*in*/ "r" (&_argvec[0]) \
2267 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2268 ); \
2269 lval = (__typeof__(lval)) _res; \
2270 } while (0)
2271
2272#define CALL_FN_W_W(lval, orig, arg1) \
2273 do { \
2274 volatile OrigFn _orig = (orig); \
2275 volatile unsigned long _argvec[2]; \
2276 volatile unsigned long _res; \
2277 _argvec[0] = (unsigned long)_orig.nraddr; \
2278 _argvec[1] = (unsigned long)arg1; \
2279 __asm__ volatile( \
2280 VALGRIND_ALIGN_STACK \
2281 "mr 11,%1\n\t" \
2282 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2283 "lwz 11,0(11)\n\t" /* target->r11 */ \
2284 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2285 VALGRIND_RESTORE_STACK \
2286 "mr %0,3" \
2287 : /*out*/ "=r" (_res) \
2288 : /*in*/ "r" (&_argvec[0]) \
2289 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2290 ); \
2291 lval = (__typeof__(lval)) _res; \
2292 } while (0)
2293
2294#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2295 do { \
2296 volatile OrigFn _orig = (orig); \
2297 volatile unsigned long _argvec[3]; \
2298 volatile unsigned long _res; \
2299 _argvec[0] = (unsigned long)_orig.nraddr; \
2300 _argvec[1] = (unsigned long)arg1; \
2301 _argvec[2] = (unsigned long)arg2; \
2302 __asm__ volatile( \
2303 VALGRIND_ALIGN_STACK \
2304 "mr 11,%1\n\t" \
2305 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2306 "lwz 4,8(11)\n\t" \
2307 "lwz 11,0(11)\n\t" /* target->r11 */ \
2308 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2309 VALGRIND_RESTORE_STACK \
2310 "mr %0,3" \
2311 : /*out*/ "=r" (_res) \
2312 : /*in*/ "r" (&_argvec[0]) \
2313 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2314 ); \
2315 lval = (__typeof__(lval)) _res; \
2316 } while (0)
2317
2318#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2319 do { \
2320 volatile OrigFn _orig = (orig); \
2321 volatile unsigned long _argvec[4]; \
2322 volatile unsigned long _res; \
2323 _argvec[0] = (unsigned long)_orig.nraddr; \
2324 _argvec[1] = (unsigned long)arg1; \
2325 _argvec[2] = (unsigned long)arg2; \
2326 _argvec[3] = (unsigned long)arg3; \
2327 __asm__ volatile( \
2328 VALGRIND_ALIGN_STACK \
2329 "mr 11,%1\n\t" \
2330 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2331 "lwz 4,8(11)\n\t" \
2332 "lwz 5,12(11)\n\t" \
2333 "lwz 11,0(11)\n\t" /* target->r11 */ \
2334 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2335 VALGRIND_RESTORE_STACK \
2336 "mr %0,3" \
2337 : /*out*/ "=r" (_res) \
2338 : /*in*/ "r" (&_argvec[0]) \
2339 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2340 ); \
2341 lval = (__typeof__(lval)) _res; \
2342 } while (0)
2343
2344#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2345 do { \
2346 volatile OrigFn _orig = (orig); \
2347 volatile unsigned long _argvec[5]; \
2348 volatile unsigned long _res; \
2349 _argvec[0] = (unsigned long)_orig.nraddr; \
2350 _argvec[1] = (unsigned long)arg1; \
2351 _argvec[2] = (unsigned long)arg2; \
2352 _argvec[3] = (unsigned long)arg3; \
2353 _argvec[4] = (unsigned long)arg4; \
2354 __asm__ volatile( \
2355 VALGRIND_ALIGN_STACK \
2356 "mr 11,%1\n\t" \
2357 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2358 "lwz 4,8(11)\n\t" \
2359 "lwz 5,12(11)\n\t" \
2360 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2361 "lwz 11,0(11)\n\t" /* target->r11 */ \
2362 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2363 VALGRIND_RESTORE_STACK \
2364 "mr %0,3" \
2365 : /*out*/ "=r" (_res) \
2366 : /*in*/ "r" (&_argvec[0]) \
2367 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2368 ); \
2369 lval = (__typeof__(lval)) _res; \
2370 } while (0)
2371
2372#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2373 do { \
2374 volatile OrigFn _orig = (orig); \
2375 volatile unsigned long _argvec[6]; \
2376 volatile unsigned long _res; \
2377 _argvec[0] = (unsigned long)_orig.nraddr; \
2378 _argvec[1] = (unsigned long)arg1; \
2379 _argvec[2] = (unsigned long)arg2; \
2380 _argvec[3] = (unsigned long)arg3; \
2381 _argvec[4] = (unsigned long)arg4; \
2382 _argvec[5] = (unsigned long)arg5; \
2383 __asm__ volatile( \
2384 VALGRIND_ALIGN_STACK \
2385 "mr 11,%1\n\t" \
2386 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2387 "lwz 4,8(11)\n\t" \
2388 "lwz 5,12(11)\n\t" \
2389 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2390 "lwz 7,20(11)\n\t" \
2391 "lwz 11,0(11)\n\t" /* target->r11 */ \
2392 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2393 VALGRIND_RESTORE_STACK \
2394 "mr %0,3" \
2395 : /*out*/ "=r" (_res) \
2396 : /*in*/ "r" (&_argvec[0]) \
2397 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2398 ); \
2399 lval = (__typeof__(lval)) _res; \
2400 } while (0)
2401
2402#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2403 do { \
2404 volatile OrigFn _orig = (orig); \
2405 volatile unsigned long _argvec[7]; \
2406 volatile unsigned long _res; \
2407 _argvec[0] = (unsigned long)_orig.nraddr; \
2408 _argvec[1] = (unsigned long)arg1; \
2409 _argvec[2] = (unsigned long)arg2; \
2410 _argvec[3] = (unsigned long)arg3; \
2411 _argvec[4] = (unsigned long)arg4; \
2412 _argvec[5] = (unsigned long)arg5; \
2413 _argvec[6] = (unsigned long)arg6; \
2414 __asm__ volatile( \
2415 VALGRIND_ALIGN_STACK \
2416 "mr 11,%1\n\t" \
2417 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2418 "lwz 4,8(11)\n\t" \
2419 "lwz 5,12(11)\n\t" \
2420 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2421 "lwz 7,20(11)\n\t" \
2422 "lwz 8,24(11)\n\t" \
2423 "lwz 11,0(11)\n\t" /* target->r11 */ \
2424 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2425 VALGRIND_RESTORE_STACK \
2426 "mr %0,3" \
2427 : /*out*/ "=r" (_res) \
2428 : /*in*/ "r" (&_argvec[0]) \
2429 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2430 ); \
2431 lval = (__typeof__(lval)) _res; \
2432 } while (0)
2433
2434#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2435 arg7) \
2436 do { \
2437 volatile OrigFn _orig = (orig); \
2438 volatile unsigned long _argvec[8]; \
2439 volatile unsigned long _res; \
2440 _argvec[0] = (unsigned long)_orig.nraddr; \
2441 _argvec[1] = (unsigned long)arg1; \
2442 _argvec[2] = (unsigned long)arg2; \
2443 _argvec[3] = (unsigned long)arg3; \
2444 _argvec[4] = (unsigned long)arg4; \
2445 _argvec[5] = (unsigned long)arg5; \
2446 _argvec[6] = (unsigned long)arg6; \
2447 _argvec[7] = (unsigned long)arg7; \
2448 __asm__ volatile( \
2449 VALGRIND_ALIGN_STACK \
2450 "mr 11,%1\n\t" \
2451 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2452 "lwz 4,8(11)\n\t" \
2453 "lwz 5,12(11)\n\t" \
2454 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2455 "lwz 7,20(11)\n\t" \
2456 "lwz 8,24(11)\n\t" \
2457 "lwz 9,28(11)\n\t" \
2458 "lwz 11,0(11)\n\t" /* target->r11 */ \
2459 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2460 VALGRIND_RESTORE_STACK \
2461 "mr %0,3" \
2462 : /*out*/ "=r" (_res) \
2463 : /*in*/ "r" (&_argvec[0]) \
2464 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2465 ); \
2466 lval = (__typeof__(lval)) _res; \
2467 } while (0)
2468
2469#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2470 arg7,arg8) \
2471 do { \
2472 volatile OrigFn _orig = (orig); \
2473 volatile unsigned long _argvec[9]; \
2474 volatile unsigned long _res; \
2475 _argvec[0] = (unsigned long)_orig.nraddr; \
2476 _argvec[1] = (unsigned long)arg1; \
2477 _argvec[2] = (unsigned long)arg2; \
2478 _argvec[3] = (unsigned long)arg3; \
2479 _argvec[4] = (unsigned long)arg4; \
2480 _argvec[5] = (unsigned long)arg5; \
2481 _argvec[6] = (unsigned long)arg6; \
2482 _argvec[7] = (unsigned long)arg7; \
2483 _argvec[8] = (unsigned long)arg8; \
2484 __asm__ volatile( \
2485 VALGRIND_ALIGN_STACK \
2486 "mr 11,%1\n\t" \
2487 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2488 "lwz 4,8(11)\n\t" \
2489 "lwz 5,12(11)\n\t" \
2490 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2491 "lwz 7,20(11)\n\t" \
2492 "lwz 8,24(11)\n\t" \
2493 "lwz 9,28(11)\n\t" \
2494 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2495 "lwz 11,0(11)\n\t" /* target->r11 */ \
2496 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2497 VALGRIND_RESTORE_STACK \
2498 "mr %0,3" \
2499 : /*out*/ "=r" (_res) \
2500 : /*in*/ "r" (&_argvec[0]) \
2501 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2502 ); \
2503 lval = (__typeof__(lval)) _res; \
2504 } while (0)
2505
2506#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2507 arg7,arg8,arg9) \
2508 do { \
2509 volatile OrigFn _orig = (orig); \
2510 volatile unsigned long _argvec[10]; \
2511 volatile unsigned long _res; \
2512 _argvec[0] = (unsigned long)_orig.nraddr; \
2513 _argvec[1] = (unsigned long)arg1; \
2514 _argvec[2] = (unsigned long)arg2; \
2515 _argvec[3] = (unsigned long)arg3; \
2516 _argvec[4] = (unsigned long)arg4; \
2517 _argvec[5] = (unsigned long)arg5; \
2518 _argvec[6] = (unsigned long)arg6; \
2519 _argvec[7] = (unsigned long)arg7; \
2520 _argvec[8] = (unsigned long)arg8; \
2521 _argvec[9] = (unsigned long)arg9; \
2522 __asm__ volatile( \
2523 VALGRIND_ALIGN_STACK \
2524 "mr 11,%1\n\t" \
2525 "addi 1,1,-16\n\t" \
2526 /* arg9 */ \
2527 "lwz 3,36(11)\n\t" \
2528 "stw 3,8(1)\n\t" \
2529 /* args1-8 */ \
2530 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2531 "lwz 4,8(11)\n\t" \
2532 "lwz 5,12(11)\n\t" \
2533 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2534 "lwz 7,20(11)\n\t" \
2535 "lwz 8,24(11)\n\t" \
2536 "lwz 9,28(11)\n\t" \
2537 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2538 "lwz 11,0(11)\n\t" /* target->r11 */ \
2539 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2540 VALGRIND_RESTORE_STACK \
2541 "mr %0,3" \
2542 : /*out*/ "=r" (_res) \
2543 : /*in*/ "r" (&_argvec[0]) \
2544 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2545 ); \
2546 lval = (__typeof__(lval)) _res; \
2547 } while (0)
2548
2549#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2550 arg7,arg8,arg9,arg10) \
2551 do { \
2552 volatile OrigFn _orig = (orig); \
2553 volatile unsigned long _argvec[11]; \
2554 volatile unsigned long _res; \
2555 _argvec[0] = (unsigned long)_orig.nraddr; \
2556 _argvec[1] = (unsigned long)arg1; \
2557 _argvec[2] = (unsigned long)arg2; \
2558 _argvec[3] = (unsigned long)arg3; \
2559 _argvec[4] = (unsigned long)arg4; \
2560 _argvec[5] = (unsigned long)arg5; \
2561 _argvec[6] = (unsigned long)arg6; \
2562 _argvec[7] = (unsigned long)arg7; \
2563 _argvec[8] = (unsigned long)arg8; \
2564 _argvec[9] = (unsigned long)arg9; \
2565 _argvec[10] = (unsigned long)arg10; \
2566 __asm__ volatile( \
2567 VALGRIND_ALIGN_STACK \
2568 "mr 11,%1\n\t" \
2569 "addi 1,1,-16\n\t" \
2570 /* arg10 */ \
2571 "lwz 3,40(11)\n\t" \
2572 "stw 3,12(1)\n\t" \
2573 /* arg9 */ \
2574 "lwz 3,36(11)\n\t" \
2575 "stw 3,8(1)\n\t" \
2576 /* args1-8 */ \
2577 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2578 "lwz 4,8(11)\n\t" \
2579 "lwz 5,12(11)\n\t" \
2580 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2581 "lwz 7,20(11)\n\t" \
2582 "lwz 8,24(11)\n\t" \
2583 "lwz 9,28(11)\n\t" \
2584 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2585 "lwz 11,0(11)\n\t" /* target->r11 */ \
2586 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2587 VALGRIND_RESTORE_STACK \
2588 "mr %0,3" \
2589 : /*out*/ "=r" (_res) \
2590 : /*in*/ "r" (&_argvec[0]) \
2591 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2592 ); \
2593 lval = (__typeof__(lval)) _res; \
2594 } while (0)
2595
2596#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2597 arg7,arg8,arg9,arg10,arg11) \
2598 do { \
2599 volatile OrigFn _orig = (orig); \
2600 volatile unsigned long _argvec[12]; \
2601 volatile unsigned long _res; \
2602 _argvec[0] = (unsigned long)_orig.nraddr; \
2603 _argvec[1] = (unsigned long)arg1; \
2604 _argvec[2] = (unsigned long)arg2; \
2605 _argvec[3] = (unsigned long)arg3; \
2606 _argvec[4] = (unsigned long)arg4; \
2607 _argvec[5] = (unsigned long)arg5; \
2608 _argvec[6] = (unsigned long)arg6; \
2609 _argvec[7] = (unsigned long)arg7; \
2610 _argvec[8] = (unsigned long)arg8; \
2611 _argvec[9] = (unsigned long)arg9; \
2612 _argvec[10] = (unsigned long)arg10; \
2613 _argvec[11] = (unsigned long)arg11; \
2614 __asm__ volatile( \
2615 VALGRIND_ALIGN_STACK \
2616 "mr 11,%1\n\t" \
2617 "addi 1,1,-32\n\t" \
2618 /* arg11 */ \
2619 "lwz 3,44(11)\n\t" \
2620 "stw 3,16(1)\n\t" \
2621 /* arg10 */ \
2622 "lwz 3,40(11)\n\t" \
2623 "stw 3,12(1)\n\t" \
2624 /* arg9 */ \
2625 "lwz 3,36(11)\n\t" \
2626 "stw 3,8(1)\n\t" \
2627 /* args1-8 */ \
2628 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2629 "lwz 4,8(11)\n\t" \
2630 "lwz 5,12(11)\n\t" \
2631 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2632 "lwz 7,20(11)\n\t" \
2633 "lwz 8,24(11)\n\t" \
2634 "lwz 9,28(11)\n\t" \
2635 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2636 "lwz 11,0(11)\n\t" /* target->r11 */ \
2637 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2638 VALGRIND_RESTORE_STACK \
2639 "mr %0,3" \
2640 : /*out*/ "=r" (_res) \
2641 : /*in*/ "r" (&_argvec[0]) \
2642 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2643 ); \
2644 lval = (__typeof__(lval)) _res; \
2645 } while (0)
2646
2647#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2648 arg7,arg8,arg9,arg10,arg11,arg12) \
2649 do { \
2650 volatile OrigFn _orig = (orig); \
2651 volatile unsigned long _argvec[13]; \
2652 volatile unsigned long _res; \
2653 _argvec[0] = (unsigned long)_orig.nraddr; \
2654 _argvec[1] = (unsigned long)arg1; \
2655 _argvec[2] = (unsigned long)arg2; \
2656 _argvec[3] = (unsigned long)arg3; \
2657 _argvec[4] = (unsigned long)arg4; \
2658 _argvec[5] = (unsigned long)arg5; \
2659 _argvec[6] = (unsigned long)arg6; \
2660 _argvec[7] = (unsigned long)arg7; \
2661 _argvec[8] = (unsigned long)arg8; \
2662 _argvec[9] = (unsigned long)arg9; \
2663 _argvec[10] = (unsigned long)arg10; \
2664 _argvec[11] = (unsigned long)arg11; \
2665 _argvec[12] = (unsigned long)arg12; \
2666 __asm__ volatile( \
2667 VALGRIND_ALIGN_STACK \
2668 "mr 11,%1\n\t" \
2669 "addi 1,1,-32\n\t" \
2670 /* arg12 */ \
2671 "lwz 3,48(11)\n\t" \
2672 "stw 3,20(1)\n\t" \
2673 /* arg11 */ \
2674 "lwz 3,44(11)\n\t" \
2675 "stw 3,16(1)\n\t" \
2676 /* arg10 */ \
2677 "lwz 3,40(11)\n\t" \
2678 "stw 3,12(1)\n\t" \
2679 /* arg9 */ \
2680 "lwz 3,36(11)\n\t" \
2681 "stw 3,8(1)\n\t" \
2682 /* args1-8 */ \
2683 "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2684 "lwz 4,8(11)\n\t" \
2685 "lwz 5,12(11)\n\t" \
2686 "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2687 "lwz 7,20(11)\n\t" \
2688 "lwz 8,24(11)\n\t" \
2689 "lwz 9,28(11)\n\t" \
2690 "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2691 "lwz 11,0(11)\n\t" /* target->r11 */ \
2692 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2693 VALGRIND_RESTORE_STACK \
2694 "mr %0,3" \
2695 : /*out*/ "=r" (_res) \
2696 : /*in*/ "r" (&_argvec[0]) \
2697 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2698 ); \
2699 lval = (__typeof__(lval)) _res; \
2700 } while (0)
2701
2702#endif /* PLAT_ppc32_linux */
2703
2704/* ------------------------ ppc64-linux ------------------------ */
2705
2706#if defined(PLAT_ppc64be_linux)
2707
2708/* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2709
2710/* These regs are trashed by the hidden call. */
2711#define __CALLER_SAVED_REGS \
2712 "lr", "ctr", "xer", \
2713 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2714 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2715 "r11", "r12", "r13"
2716
2717/* Macros to save and align the stack before making a function
2718 call and restore it afterwards as gcc may not keep the stack
2719 pointer aligned if it doesn't realise calls are being made
2720 to other functions. */
2721
2722#define VALGRIND_ALIGN_STACK \
2723 "mr 28,1\n\t" \
2724 "rldicr 1,1,0,59\n\t"
2725#define VALGRIND_RESTORE_STACK \
2726 "mr 1,28\n\t"
2727
2728/* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2729 long) == 8. */
2730
2731#define CALL_FN_W_v(lval, orig) \
2732 do { \
2733 volatile OrigFn _orig = (orig); \
2734 volatile unsigned long _argvec[3+0]; \
2735 volatile unsigned long _res; \
2736 /* _argvec[0] holds current r2 across the call */ \
2737 _argvec[1] = (unsigned long)_orig.r2; \
2738 _argvec[2] = (unsigned long)_orig.nraddr; \
2739 __asm__ volatile( \
2740 VALGRIND_ALIGN_STACK \
2741 "mr 11,%1\n\t" \
2742 "std 2,-16(11)\n\t" /* save tocptr */ \
2743 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2744 "ld 11, 0(11)\n\t" /* target->r11 */ \
2745 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2746 "mr 11,%1\n\t" \
2747 "mr %0,3\n\t" \
2748 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2749 VALGRIND_RESTORE_STACK \
2750 : /*out*/ "=r" (_res) \
2751 : /*in*/ "r" (&_argvec[2]) \
2752 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2753 ); \
2754 lval = (__typeof__(lval)) _res; \
2755 } while (0)
2756
2757#define CALL_FN_W_W(lval, orig, arg1) \
2758 do { \
2759 volatile OrigFn _orig = (orig); \
2760 volatile unsigned long _argvec[3+1]; \
2761 volatile unsigned long _res; \
2762 /* _argvec[0] holds current r2 across the call */ \
2763 _argvec[1] = (unsigned long)_orig.r2; \
2764 _argvec[2] = (unsigned long)_orig.nraddr; \
2765 _argvec[2+1] = (unsigned long)arg1; \
2766 __asm__ volatile( \
2767 VALGRIND_ALIGN_STACK \
2768 "mr 11,%1\n\t" \
2769 "std 2,-16(11)\n\t" /* save tocptr */ \
2770 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2771 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2772 "ld 11, 0(11)\n\t" /* target->r11 */ \
2773 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2774 "mr 11,%1\n\t" \
2775 "mr %0,3\n\t" \
2776 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2777 VALGRIND_RESTORE_STACK \
2778 : /*out*/ "=r" (_res) \
2779 : /*in*/ "r" (&_argvec[2]) \
2780 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2781 ); \
2782 lval = (__typeof__(lval)) _res; \
2783 } while (0)
2784
2785#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2786 do { \
2787 volatile OrigFn _orig = (orig); \
2788 volatile unsigned long _argvec[3+2]; \
2789 volatile unsigned long _res; \
2790 /* _argvec[0] holds current r2 across the call */ \
2791 _argvec[1] = (unsigned long)_orig.r2; \
2792 _argvec[2] = (unsigned long)_orig.nraddr; \
2793 _argvec[2+1] = (unsigned long)arg1; \
2794 _argvec[2+2] = (unsigned long)arg2; \
2795 __asm__ volatile( \
2796 VALGRIND_ALIGN_STACK \
2797 "mr 11,%1\n\t" \
2798 "std 2,-16(11)\n\t" /* save tocptr */ \
2799 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2800 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2801 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2802 "ld 11, 0(11)\n\t" /* target->r11 */ \
2803 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2804 "mr 11,%1\n\t" \
2805 "mr %0,3\n\t" \
2806 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2807 VALGRIND_RESTORE_STACK \
2808 : /*out*/ "=r" (_res) \
2809 : /*in*/ "r" (&_argvec[2]) \
2810 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2811 ); \
2812 lval = (__typeof__(lval)) _res; \
2813 } while (0)
2814
2815#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2816 do { \
2817 volatile OrigFn _orig = (orig); \
2818 volatile unsigned long _argvec[3+3]; \
2819 volatile unsigned long _res; \
2820 /* _argvec[0] holds current r2 across the call */ \
2821 _argvec[1] = (unsigned long)_orig.r2; \
2822 _argvec[2] = (unsigned long)_orig.nraddr; \
2823 _argvec[2+1] = (unsigned long)arg1; \
2824 _argvec[2+2] = (unsigned long)arg2; \
2825 _argvec[2+3] = (unsigned long)arg3; \
2826 __asm__ volatile( \
2827 VALGRIND_ALIGN_STACK \
2828 "mr 11,%1\n\t" \
2829 "std 2,-16(11)\n\t" /* save tocptr */ \
2830 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2831 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2832 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2833 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2834 "ld 11, 0(11)\n\t" /* target->r11 */ \
2835 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2836 "mr 11,%1\n\t" \
2837 "mr %0,3\n\t" \
2838 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2839 VALGRIND_RESTORE_STACK \
2840 : /*out*/ "=r" (_res) \
2841 : /*in*/ "r" (&_argvec[2]) \
2842 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2843 ); \
2844 lval = (__typeof__(lval)) _res; \
2845 } while (0)
2846
2847#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2848 do { \
2849 volatile OrigFn _orig = (orig); \
2850 volatile unsigned long _argvec[3+4]; \
2851 volatile unsigned long _res; \
2852 /* _argvec[0] holds current r2 across the call */ \
2853 _argvec[1] = (unsigned long)_orig.r2; \
2854 _argvec[2] = (unsigned long)_orig.nraddr; \
2855 _argvec[2+1] = (unsigned long)arg1; \
2856 _argvec[2+2] = (unsigned long)arg2; \
2857 _argvec[2+3] = (unsigned long)arg3; \
2858 _argvec[2+4] = (unsigned long)arg4; \
2859 __asm__ volatile( \
2860 VALGRIND_ALIGN_STACK \
2861 "mr 11,%1\n\t" \
2862 "std 2,-16(11)\n\t" /* save tocptr */ \
2863 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2864 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2865 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2866 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2867 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2868 "ld 11, 0(11)\n\t" /* target->r11 */ \
2869 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2870 "mr 11,%1\n\t" \
2871 "mr %0,3\n\t" \
2872 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2873 VALGRIND_RESTORE_STACK \
2874 : /*out*/ "=r" (_res) \
2875 : /*in*/ "r" (&_argvec[2]) \
2876 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2877 ); \
2878 lval = (__typeof__(lval)) _res; \
2879 } while (0)
2880
2881#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2882 do { \
2883 volatile OrigFn _orig = (orig); \
2884 volatile unsigned long _argvec[3+5]; \
2885 volatile unsigned long _res; \
2886 /* _argvec[0] holds current r2 across the call */ \
2887 _argvec[1] = (unsigned long)_orig.r2; \
2888 _argvec[2] = (unsigned long)_orig.nraddr; \
2889 _argvec[2+1] = (unsigned long)arg1; \
2890 _argvec[2+2] = (unsigned long)arg2; \
2891 _argvec[2+3] = (unsigned long)arg3; \
2892 _argvec[2+4] = (unsigned long)arg4; \
2893 _argvec[2+5] = (unsigned long)arg5; \
2894 __asm__ volatile( \
2895 VALGRIND_ALIGN_STACK \
2896 "mr 11,%1\n\t" \
2897 "std 2,-16(11)\n\t" /* save tocptr */ \
2898 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2899 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2900 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2901 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2902 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2903 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2904 "ld 11, 0(11)\n\t" /* target->r11 */ \
2905 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2906 "mr 11,%1\n\t" \
2907 "mr %0,3\n\t" \
2908 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2909 VALGRIND_RESTORE_STACK \
2910 : /*out*/ "=r" (_res) \
2911 : /*in*/ "r" (&_argvec[2]) \
2912 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2913 ); \
2914 lval = (__typeof__(lval)) _res; \
2915 } while (0)
2916
2917#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2918 do { \
2919 volatile OrigFn _orig = (orig); \
2920 volatile unsigned long _argvec[3+6]; \
2921 volatile unsigned long _res; \
2922 /* _argvec[0] holds current r2 across the call */ \
2923 _argvec[1] = (unsigned long)_orig.r2; \
2924 _argvec[2] = (unsigned long)_orig.nraddr; \
2925 _argvec[2+1] = (unsigned long)arg1; \
2926 _argvec[2+2] = (unsigned long)arg2; \
2927 _argvec[2+3] = (unsigned long)arg3; \
2928 _argvec[2+4] = (unsigned long)arg4; \
2929 _argvec[2+5] = (unsigned long)arg5; \
2930 _argvec[2+6] = (unsigned long)arg6; \
2931 __asm__ volatile( \
2932 VALGRIND_ALIGN_STACK \
2933 "mr 11,%1\n\t" \
2934 "std 2,-16(11)\n\t" /* save tocptr */ \
2935 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2936 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2937 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2938 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2939 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2940 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2941 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2942 "ld 11, 0(11)\n\t" /* target->r11 */ \
2943 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2944 "mr 11,%1\n\t" \
2945 "mr %0,3\n\t" \
2946 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2947 VALGRIND_RESTORE_STACK \
2948 : /*out*/ "=r" (_res) \
2949 : /*in*/ "r" (&_argvec[2]) \
2950 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2951 ); \
2952 lval = (__typeof__(lval)) _res; \
2953 } while (0)
2954
2955#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2956 arg7) \
2957 do { \
2958 volatile OrigFn _orig = (orig); \
2959 volatile unsigned long _argvec[3+7]; \
2960 volatile unsigned long _res; \
2961 /* _argvec[0] holds current r2 across the call */ \
2962 _argvec[1] = (unsigned long)_orig.r2; \
2963 _argvec[2] = (unsigned long)_orig.nraddr; \
2964 _argvec[2+1] = (unsigned long)arg1; \
2965 _argvec[2+2] = (unsigned long)arg2; \
2966 _argvec[2+3] = (unsigned long)arg3; \
2967 _argvec[2+4] = (unsigned long)arg4; \
2968 _argvec[2+5] = (unsigned long)arg5; \
2969 _argvec[2+6] = (unsigned long)arg6; \
2970 _argvec[2+7] = (unsigned long)arg7; \
2971 __asm__ volatile( \
2972 VALGRIND_ALIGN_STACK \
2973 "mr 11,%1\n\t" \
2974 "std 2,-16(11)\n\t" /* save tocptr */ \
2975 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2976 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2977 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2978 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2979 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2980 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2981 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2982 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2983 "ld 11, 0(11)\n\t" /* target->r11 */ \
2984 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2985 "mr 11,%1\n\t" \
2986 "mr %0,3\n\t" \
2987 "ld 2,-16(11)\n\t" /* restore tocptr */ \
2988 VALGRIND_RESTORE_STACK \
2989 : /*out*/ "=r" (_res) \
2990 : /*in*/ "r" (&_argvec[2]) \
2991 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2992 ); \
2993 lval = (__typeof__(lval)) _res; \
2994 } while (0)
2995
2996#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2997 arg7,arg8) \
2998 do { \
2999 volatile OrigFn _orig = (orig); \
3000 volatile unsigned long _argvec[3+8]; \
3001 volatile unsigned long _res; \
3002 /* _argvec[0] holds current r2 across the call */ \
3003 _argvec[1] = (unsigned long)_orig.r2; \
3004 _argvec[2] = (unsigned long)_orig.nraddr; \
3005 _argvec[2+1] = (unsigned long)arg1; \
3006 _argvec[2+2] = (unsigned long)arg2; \
3007 _argvec[2+3] = (unsigned long)arg3; \
3008 _argvec[2+4] = (unsigned long)arg4; \
3009 _argvec[2+5] = (unsigned long)arg5; \
3010 _argvec[2+6] = (unsigned long)arg6; \
3011 _argvec[2+7] = (unsigned long)arg7; \
3012 _argvec[2+8] = (unsigned long)arg8; \
3013 __asm__ volatile( \
3014 VALGRIND_ALIGN_STACK \
3015 "mr 11,%1\n\t" \
3016 "std 2,-16(11)\n\t" /* save tocptr */ \
3017 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3018 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3019 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3020 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3021 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3022 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3023 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3024 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3025 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3026 "ld 11, 0(11)\n\t" /* target->r11 */ \
3027 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3028 "mr 11,%1\n\t" \
3029 "mr %0,3\n\t" \
3030 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3031 VALGRIND_RESTORE_STACK \
3032 : /*out*/ "=r" (_res) \
3033 : /*in*/ "r" (&_argvec[2]) \
3034 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3035 ); \
3036 lval = (__typeof__(lval)) _res; \
3037 } while (0)
3038
3039#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3040 arg7,arg8,arg9) \
3041 do { \
3042 volatile OrigFn _orig = (orig); \
3043 volatile unsigned long _argvec[3+9]; \
3044 volatile unsigned long _res; \
3045 /* _argvec[0] holds current r2 across the call */ \
3046 _argvec[1] = (unsigned long)_orig.r2; \
3047 _argvec[2] = (unsigned long)_orig.nraddr; \
3048 _argvec[2+1] = (unsigned long)arg1; \
3049 _argvec[2+2] = (unsigned long)arg2; \
3050 _argvec[2+3] = (unsigned long)arg3; \
3051 _argvec[2+4] = (unsigned long)arg4; \
3052 _argvec[2+5] = (unsigned long)arg5; \
3053 _argvec[2+6] = (unsigned long)arg6; \
3054 _argvec[2+7] = (unsigned long)arg7; \
3055 _argvec[2+8] = (unsigned long)arg8; \
3056 _argvec[2+9] = (unsigned long)arg9; \
3057 __asm__ volatile( \
3058 VALGRIND_ALIGN_STACK \
3059 "mr 11,%1\n\t" \
3060 "std 2,-16(11)\n\t" /* save tocptr */ \
3061 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3062 "addi 1,1,-128\n\t" /* expand stack frame */ \
3063 /* arg9 */ \
3064 "ld 3,72(11)\n\t" \
3065 "std 3,112(1)\n\t" \
3066 /* args1-8 */ \
3067 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3068 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3069 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3070 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3071 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3072 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3073 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3074 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3075 "ld 11, 0(11)\n\t" /* target->r11 */ \
3076 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3077 "mr 11,%1\n\t" \
3078 "mr %0,3\n\t" \
3079 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3080 VALGRIND_RESTORE_STACK \
3081 : /*out*/ "=r" (_res) \
3082 : /*in*/ "r" (&_argvec[2]) \
3083 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3084 ); \
3085 lval = (__typeof__(lval)) _res; \
3086 } while (0)
3087
3088#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3089 arg7,arg8,arg9,arg10) \
3090 do { \
3091 volatile OrigFn _orig = (orig); \
3092 volatile unsigned long _argvec[3+10]; \
3093 volatile unsigned long _res; \
3094 /* _argvec[0] holds current r2 across the call */ \
3095 _argvec[1] = (unsigned long)_orig.r2; \
3096 _argvec[2] = (unsigned long)_orig.nraddr; \
3097 _argvec[2+1] = (unsigned long)arg1; \
3098 _argvec[2+2] = (unsigned long)arg2; \
3099 _argvec[2+3] = (unsigned long)arg3; \
3100 _argvec[2+4] = (unsigned long)arg4; \
3101 _argvec[2+5] = (unsigned long)arg5; \
3102 _argvec[2+6] = (unsigned long)arg6; \
3103 _argvec[2+7] = (unsigned long)arg7; \
3104 _argvec[2+8] = (unsigned long)arg8; \
3105 _argvec[2+9] = (unsigned long)arg9; \
3106 _argvec[2+10] = (unsigned long)arg10; \
3107 __asm__ volatile( \
3108 VALGRIND_ALIGN_STACK \
3109 "mr 11,%1\n\t" \
3110 "std 2,-16(11)\n\t" /* save tocptr */ \
3111 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3112 "addi 1,1,-128\n\t" /* expand stack frame */ \
3113 /* arg10 */ \
3114 "ld 3,80(11)\n\t" \
3115 "std 3,120(1)\n\t" \
3116 /* arg9 */ \
3117 "ld 3,72(11)\n\t" \
3118 "std 3,112(1)\n\t" \
3119 /* args1-8 */ \
3120 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3121 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3122 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3123 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3124 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3125 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3126 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3127 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3128 "ld 11, 0(11)\n\t" /* target->r11 */ \
3129 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3130 "mr 11,%1\n\t" \
3131 "mr %0,3\n\t" \
3132 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3133 VALGRIND_RESTORE_STACK \
3134 : /*out*/ "=r" (_res) \
3135 : /*in*/ "r" (&_argvec[2]) \
3136 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3137 ); \
3138 lval = (__typeof__(lval)) _res; \
3139 } while (0)
3140
3141#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3142 arg7,arg8,arg9,arg10,arg11) \
3143 do { \
3144 volatile OrigFn _orig = (orig); \
3145 volatile unsigned long _argvec[3+11]; \
3146 volatile unsigned long _res; \
3147 /* _argvec[0] holds current r2 across the call */ \
3148 _argvec[1] = (unsigned long)_orig.r2; \
3149 _argvec[2] = (unsigned long)_orig.nraddr; \
3150 _argvec[2+1] = (unsigned long)arg1; \
3151 _argvec[2+2] = (unsigned long)arg2; \
3152 _argvec[2+3] = (unsigned long)arg3; \
3153 _argvec[2+4] = (unsigned long)arg4; \
3154 _argvec[2+5] = (unsigned long)arg5; \
3155 _argvec[2+6] = (unsigned long)arg6; \
3156 _argvec[2+7] = (unsigned long)arg7; \
3157 _argvec[2+8] = (unsigned long)arg8; \
3158 _argvec[2+9] = (unsigned long)arg9; \
3159 _argvec[2+10] = (unsigned long)arg10; \
3160 _argvec[2+11] = (unsigned long)arg11; \
3161 __asm__ volatile( \
3162 VALGRIND_ALIGN_STACK \
3163 "mr 11,%1\n\t" \
3164 "std 2,-16(11)\n\t" /* save tocptr */ \
3165 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3166 "addi 1,1,-144\n\t" /* expand stack frame */ \
3167 /* arg11 */ \
3168 "ld 3,88(11)\n\t" \
3169 "std 3,128(1)\n\t" \
3170 /* arg10 */ \
3171 "ld 3,80(11)\n\t" \
3172 "std 3,120(1)\n\t" \
3173 /* arg9 */ \
3174 "ld 3,72(11)\n\t" \
3175 "std 3,112(1)\n\t" \
3176 /* args1-8 */ \
3177 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3178 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3179 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3180 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3181 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3182 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3183 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3184 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3185 "ld 11, 0(11)\n\t" /* target->r11 */ \
3186 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3187 "mr 11,%1\n\t" \
3188 "mr %0,3\n\t" \
3189 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3190 VALGRIND_RESTORE_STACK \
3191 : /*out*/ "=r" (_res) \
3192 : /*in*/ "r" (&_argvec[2]) \
3193 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3194 ); \
3195 lval = (__typeof__(lval)) _res; \
3196 } while (0)
3197
3198#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3199 arg7,arg8,arg9,arg10,arg11,arg12) \
3200 do { \
3201 volatile OrigFn _orig = (orig); \
3202 volatile unsigned long _argvec[3+12]; \
3203 volatile unsigned long _res; \
3204 /* _argvec[0] holds current r2 across the call */ \
3205 _argvec[1] = (unsigned long)_orig.r2; \
3206 _argvec[2] = (unsigned long)_orig.nraddr; \
3207 _argvec[2+1] = (unsigned long)arg1; \
3208 _argvec[2+2] = (unsigned long)arg2; \
3209 _argvec[2+3] = (unsigned long)arg3; \
3210 _argvec[2+4] = (unsigned long)arg4; \
3211 _argvec[2+5] = (unsigned long)arg5; \
3212 _argvec[2+6] = (unsigned long)arg6; \
3213 _argvec[2+7] = (unsigned long)arg7; \
3214 _argvec[2+8] = (unsigned long)arg8; \
3215 _argvec[2+9] = (unsigned long)arg9; \
3216 _argvec[2+10] = (unsigned long)arg10; \
3217 _argvec[2+11] = (unsigned long)arg11; \
3218 _argvec[2+12] = (unsigned long)arg12; \
3219 __asm__ volatile( \
3220 VALGRIND_ALIGN_STACK \
3221 "mr 11,%1\n\t" \
3222 "std 2,-16(11)\n\t" /* save tocptr */ \
3223 "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3224 "addi 1,1,-144\n\t" /* expand stack frame */ \
3225 /* arg12 */ \
3226 "ld 3,96(11)\n\t" \
3227 "std 3,136(1)\n\t" \
3228 /* arg11 */ \
3229 "ld 3,88(11)\n\t" \
3230 "std 3,128(1)\n\t" \
3231 /* arg10 */ \
3232 "ld 3,80(11)\n\t" \
3233 "std 3,120(1)\n\t" \
3234 /* arg9 */ \
3235 "ld 3,72(11)\n\t" \
3236 "std 3,112(1)\n\t" \
3237 /* args1-8 */ \
3238 "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3239 "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3240 "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3241 "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3242 "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3243 "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3244 "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3245 "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3246 "ld 11, 0(11)\n\t" /* target->r11 */ \
3247 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3248 "mr 11,%1\n\t" \
3249 "mr %0,3\n\t" \
3250 "ld 2,-16(11)\n\t" /* restore tocptr */ \
3251 VALGRIND_RESTORE_STACK \
3252 : /*out*/ "=r" (_res) \
3253 : /*in*/ "r" (&_argvec[2]) \
3254 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3255 ); \
3256 lval = (__typeof__(lval)) _res; \
3257 } while (0)
3258
3259#endif /* PLAT_ppc64be_linux */
3260
3261/* ------------------------- ppc64le-linux ----------------------- */
3262#if defined(PLAT_ppc64le_linux)
3263
3264/* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
3265
3266/* These regs are trashed by the hidden call. */
3267#define __CALLER_SAVED_REGS \
3268 "lr", "ctr", "xer", \
3269 "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
3270 "r0", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
3271 "r11", "r12", "r13"
3272
3273/* Macros to save and align the stack before making a function
3274 call and restore it afterwards as gcc may not keep the stack
3275 pointer aligned if it doesn't realise calls are being made
3276 to other functions. */
3277
3278#define VALGRIND_ALIGN_STACK \
3279 "mr 28,1\n\t" \
3280 "rldicr 1,1,0,59\n\t"
3281#define VALGRIND_RESTORE_STACK \
3282 "mr 1,28\n\t"
3283
3284/* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
3285 long) == 8. */
3286
3287#define CALL_FN_W_v(lval, orig) \
3288 do { \
3289 volatile OrigFn _orig = (orig); \
3290 volatile unsigned long _argvec[3+0]; \
3291 volatile unsigned long _res; \
3292 /* _argvec[0] holds current r2 across the call */ \
3293 _argvec[1] = (unsigned long)_orig.r2; \
3294 _argvec[2] = (unsigned long)_orig.nraddr; \
3295 __asm__ volatile( \
3296 VALGRIND_ALIGN_STACK \
3297 "mr 12,%1\n\t" \
3298 "std 2,-16(12)\n\t" /* save tocptr */ \
3299 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3300 "ld 12, 0(12)\n\t" /* target->r12 */ \
3301 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3302 "mr 12,%1\n\t" \
3303 "mr %0,3\n\t" \
3304 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3305 VALGRIND_RESTORE_STACK \
3306 : /*out*/ "=r" (_res) \
3307 : /*in*/ "r" (&_argvec[2]) \
3308 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3309 ); \
3310 lval = (__typeof__(lval)) _res; \
3311 } while (0)
3312
3313#define CALL_FN_W_W(lval, orig, arg1) \
3314 do { \
3315 volatile OrigFn _orig = (orig); \
3316 volatile unsigned long _argvec[3+1]; \
3317 volatile unsigned long _res; \
3318 /* _argvec[0] holds current r2 across the call */ \
3319 _argvec[1] = (unsigned long)_orig.r2; \
3320 _argvec[2] = (unsigned long)_orig.nraddr; \
3321 _argvec[2+1] = (unsigned long)arg1; \
3322 __asm__ volatile( \
3323 VALGRIND_ALIGN_STACK \
3324 "mr 12,%1\n\t" \
3325 "std 2,-16(12)\n\t" /* save tocptr */ \
3326 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3327 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3328 "ld 12, 0(12)\n\t" /* target->r12 */ \
3329 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3330 "mr 12,%1\n\t" \
3331 "mr %0,3\n\t" \
3332 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3333 VALGRIND_RESTORE_STACK \
3334 : /*out*/ "=r" (_res) \
3335 : /*in*/ "r" (&_argvec[2]) \
3336 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3337 ); \
3338 lval = (__typeof__(lval)) _res; \
3339 } while (0)
3340
3341#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3342 do { \
3343 volatile OrigFn _orig = (orig); \
3344 volatile unsigned long _argvec[3+2]; \
3345 volatile unsigned long _res; \
3346 /* _argvec[0] holds current r2 across the call */ \
3347 _argvec[1] = (unsigned long)_orig.r2; \
3348 _argvec[2] = (unsigned long)_orig.nraddr; \
3349 _argvec[2+1] = (unsigned long)arg1; \
3350 _argvec[2+2] = (unsigned long)arg2; \
3351 __asm__ volatile( \
3352 VALGRIND_ALIGN_STACK \
3353 "mr 12,%1\n\t" \
3354 "std 2,-16(12)\n\t" /* save tocptr */ \
3355 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3356 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3357 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3358 "ld 12, 0(12)\n\t" /* target->r12 */ \
3359 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3360 "mr 12,%1\n\t" \
3361 "mr %0,3\n\t" \
3362 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3363 VALGRIND_RESTORE_STACK \
3364 : /*out*/ "=r" (_res) \
3365 : /*in*/ "r" (&_argvec[2]) \
3366 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3367 ); \
3368 lval = (__typeof__(lval)) _res; \
3369 } while (0)
3370
3371#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3372 do { \
3373 volatile OrigFn _orig = (orig); \
3374 volatile unsigned long _argvec[3+3]; \
3375 volatile unsigned long _res; \
3376 /* _argvec[0] holds current r2 across the call */ \
3377 _argvec[1] = (unsigned long)_orig.r2; \
3378 _argvec[2] = (unsigned long)_orig.nraddr; \
3379 _argvec[2+1] = (unsigned long)arg1; \
3380 _argvec[2+2] = (unsigned long)arg2; \
3381 _argvec[2+3] = (unsigned long)arg3; \
3382 __asm__ volatile( \
3383 VALGRIND_ALIGN_STACK \
3384 "mr 12,%1\n\t" \
3385 "std 2,-16(12)\n\t" /* save tocptr */ \
3386 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3387 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3388 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3389 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3390 "ld 12, 0(12)\n\t" /* target->r12 */ \
3391 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3392 "mr 12,%1\n\t" \
3393 "mr %0,3\n\t" \
3394 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3395 VALGRIND_RESTORE_STACK \
3396 : /*out*/ "=r" (_res) \
3397 : /*in*/ "r" (&_argvec[2]) \
3398 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3399 ); \
3400 lval = (__typeof__(lval)) _res; \
3401 } while (0)
3402
3403#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3404 do { \
3405 volatile OrigFn _orig = (orig); \
3406 volatile unsigned long _argvec[3+4]; \
3407 volatile unsigned long _res; \
3408 /* _argvec[0] holds current r2 across the call */ \
3409 _argvec[1] = (unsigned long)_orig.r2; \
3410 _argvec[2] = (unsigned long)_orig.nraddr; \
3411 _argvec[2+1] = (unsigned long)arg1; \
3412 _argvec[2+2] = (unsigned long)arg2; \
3413 _argvec[2+3] = (unsigned long)arg3; \
3414 _argvec[2+4] = (unsigned long)arg4; \
3415 __asm__ volatile( \
3416 VALGRIND_ALIGN_STACK \
3417 "mr 12,%1\n\t" \
3418 "std 2,-16(12)\n\t" /* save tocptr */ \
3419 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3420 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3421 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3422 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3423 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3424 "ld 12, 0(12)\n\t" /* target->r12 */ \
3425 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3426 "mr 12,%1\n\t" \
3427 "mr %0,3\n\t" \
3428 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3429 VALGRIND_RESTORE_STACK \
3430 : /*out*/ "=r" (_res) \
3431 : /*in*/ "r" (&_argvec[2]) \
3432 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3433 ); \
3434 lval = (__typeof__(lval)) _res; \
3435 } while (0)
3436
3437#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3438 do { \
3439 volatile OrigFn _orig = (orig); \
3440 volatile unsigned long _argvec[3+5]; \
3441 volatile unsigned long _res; \
3442 /* _argvec[0] holds current r2 across the call */ \
3443 _argvec[1] = (unsigned long)_orig.r2; \
3444 _argvec[2] = (unsigned long)_orig.nraddr; \
3445 _argvec[2+1] = (unsigned long)arg1; \
3446 _argvec[2+2] = (unsigned long)arg2; \
3447 _argvec[2+3] = (unsigned long)arg3; \
3448 _argvec[2+4] = (unsigned long)arg4; \
3449 _argvec[2+5] = (unsigned long)arg5; \
3450 __asm__ volatile( \
3451 VALGRIND_ALIGN_STACK \
3452 "mr 12,%1\n\t" \
3453 "std 2,-16(12)\n\t" /* save tocptr */ \
3454 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3455 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3456 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3457 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3458 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3459 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3460 "ld 12, 0(12)\n\t" /* target->r12 */ \
3461 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3462 "mr 12,%1\n\t" \
3463 "mr %0,3\n\t" \
3464 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3465 VALGRIND_RESTORE_STACK \
3466 : /*out*/ "=r" (_res) \
3467 : /*in*/ "r" (&_argvec[2]) \
3468 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3469 ); \
3470 lval = (__typeof__(lval)) _res; \
3471 } while (0)
3472
3473#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3474 do { \
3475 volatile OrigFn _orig = (orig); \
3476 volatile unsigned long _argvec[3+6]; \
3477 volatile unsigned long _res; \
3478 /* _argvec[0] holds current r2 across the call */ \
3479 _argvec[1] = (unsigned long)_orig.r2; \
3480 _argvec[2] = (unsigned long)_orig.nraddr; \
3481 _argvec[2+1] = (unsigned long)arg1; \
3482 _argvec[2+2] = (unsigned long)arg2; \
3483 _argvec[2+3] = (unsigned long)arg3; \
3484 _argvec[2+4] = (unsigned long)arg4; \
3485 _argvec[2+5] = (unsigned long)arg5; \
3486 _argvec[2+6] = (unsigned long)arg6; \
3487 __asm__ volatile( \
3488 VALGRIND_ALIGN_STACK \
3489 "mr 12,%1\n\t" \
3490 "std 2,-16(12)\n\t" /* save tocptr */ \
3491 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3492 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3493 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3494 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3495 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3496 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3497 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3498 "ld 12, 0(12)\n\t" /* target->r12 */ \
3499 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3500 "mr 12,%1\n\t" \
3501 "mr %0,3\n\t" \
3502 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3503 VALGRIND_RESTORE_STACK \
3504 : /*out*/ "=r" (_res) \
3505 : /*in*/ "r" (&_argvec[2]) \
3506 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3507 ); \
3508 lval = (__typeof__(lval)) _res; \
3509 } while (0)
3510
3511#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3512 arg7) \
3513 do { \
3514 volatile OrigFn _orig = (orig); \
3515 volatile unsigned long _argvec[3+7]; \
3516 volatile unsigned long _res; \
3517 /* _argvec[0] holds current r2 across the call */ \
3518 _argvec[1] = (unsigned long)_orig.r2; \
3519 _argvec[2] = (unsigned long)_orig.nraddr; \
3520 _argvec[2+1] = (unsigned long)arg1; \
3521 _argvec[2+2] = (unsigned long)arg2; \
3522 _argvec[2+3] = (unsigned long)arg3; \
3523 _argvec[2+4] = (unsigned long)arg4; \
3524 _argvec[2+5] = (unsigned long)arg5; \
3525 _argvec[2+6] = (unsigned long)arg6; \
3526 _argvec[2+7] = (unsigned long)arg7; \
3527 __asm__ volatile( \
3528 VALGRIND_ALIGN_STACK \
3529 "mr 12,%1\n\t" \
3530 "std 2,-16(12)\n\t" /* save tocptr */ \
3531 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3532 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3533 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3534 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3535 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3536 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3537 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3538 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3539 "ld 12, 0(12)\n\t" /* target->r12 */ \
3540 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3541 "mr 12,%1\n\t" \
3542 "mr %0,3\n\t" \
3543 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3544 VALGRIND_RESTORE_STACK \
3545 : /*out*/ "=r" (_res) \
3546 : /*in*/ "r" (&_argvec[2]) \
3547 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3548 ); \
3549 lval = (__typeof__(lval)) _res; \
3550 } while (0)
3551
3552#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3553 arg7,arg8) \
3554 do { \
3555 volatile OrigFn _orig = (orig); \
3556 volatile unsigned long _argvec[3+8]; \
3557 volatile unsigned long _res; \
3558 /* _argvec[0] holds current r2 across the call */ \
3559 _argvec[1] = (unsigned long)_orig.r2; \
3560 _argvec[2] = (unsigned long)_orig.nraddr; \
3561 _argvec[2+1] = (unsigned long)arg1; \
3562 _argvec[2+2] = (unsigned long)arg2; \
3563 _argvec[2+3] = (unsigned long)arg3; \
3564 _argvec[2+4] = (unsigned long)arg4; \
3565 _argvec[2+5] = (unsigned long)arg5; \
3566 _argvec[2+6] = (unsigned long)arg6; \
3567 _argvec[2+7] = (unsigned long)arg7; \
3568 _argvec[2+8] = (unsigned long)arg8; \
3569 __asm__ volatile( \
3570 VALGRIND_ALIGN_STACK \
3571 "mr 12,%1\n\t" \
3572 "std 2,-16(12)\n\t" /* save tocptr */ \
3573 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3574 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3575 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3576 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3577 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3578 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3579 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3580 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3581 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3582 "ld 12, 0(12)\n\t" /* target->r12 */ \
3583 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3584 "mr 12,%1\n\t" \
3585 "mr %0,3\n\t" \
3586 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3587 VALGRIND_RESTORE_STACK \
3588 : /*out*/ "=r" (_res) \
3589 : /*in*/ "r" (&_argvec[2]) \
3590 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3591 ); \
3592 lval = (__typeof__(lval)) _res; \
3593 } while (0)
3594
3595#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3596 arg7,arg8,arg9) \
3597 do { \
3598 volatile OrigFn _orig = (orig); \
3599 volatile unsigned long _argvec[3+9]; \
3600 volatile unsigned long _res; \
3601 /* _argvec[0] holds current r2 across the call */ \
3602 _argvec[1] = (unsigned long)_orig.r2; \
3603 _argvec[2] = (unsigned long)_orig.nraddr; \
3604 _argvec[2+1] = (unsigned long)arg1; \
3605 _argvec[2+2] = (unsigned long)arg2; \
3606 _argvec[2+3] = (unsigned long)arg3; \
3607 _argvec[2+4] = (unsigned long)arg4; \
3608 _argvec[2+5] = (unsigned long)arg5; \
3609 _argvec[2+6] = (unsigned long)arg6; \
3610 _argvec[2+7] = (unsigned long)arg7; \
3611 _argvec[2+8] = (unsigned long)arg8; \
3612 _argvec[2+9] = (unsigned long)arg9; \
3613 __asm__ volatile( \
3614 VALGRIND_ALIGN_STACK \
3615 "mr 12,%1\n\t" \
3616 "std 2,-16(12)\n\t" /* save tocptr */ \
3617 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3618 "addi 1,1,-128\n\t" /* expand stack frame */ \
3619 /* arg9 */ \
3620 "ld 3,72(12)\n\t" \
3621 "std 3,96(1)\n\t" \
3622 /* args1-8 */ \
3623 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3624 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3625 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3626 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3627 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3628 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3629 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3630 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3631 "ld 12, 0(12)\n\t" /* target->r12 */ \
3632 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3633 "mr 12,%1\n\t" \
3634 "mr %0,3\n\t" \
3635 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3636 VALGRIND_RESTORE_STACK \
3637 : /*out*/ "=r" (_res) \
3638 : /*in*/ "r" (&_argvec[2]) \
3639 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3640 ); \
3641 lval = (__typeof__(lval)) _res; \
3642 } while (0)
3643
3644#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3645 arg7,arg8,arg9,arg10) \
3646 do { \
3647 volatile OrigFn _orig = (orig); \
3648 volatile unsigned long _argvec[3+10]; \
3649 volatile unsigned long _res; \
3650 /* _argvec[0] holds current r2 across the call */ \
3651 _argvec[1] = (unsigned long)_orig.r2; \
3652 _argvec[2] = (unsigned long)_orig.nraddr; \
3653 _argvec[2+1] = (unsigned long)arg1; \
3654 _argvec[2+2] = (unsigned long)arg2; \
3655 _argvec[2+3] = (unsigned long)arg3; \
3656 _argvec[2+4] = (unsigned long)arg4; \
3657 _argvec[2+5] = (unsigned long)arg5; \
3658 _argvec[2+6] = (unsigned long)arg6; \
3659 _argvec[2+7] = (unsigned long)arg7; \
3660 _argvec[2+8] = (unsigned long)arg8; \
3661 _argvec[2+9] = (unsigned long)arg9; \
3662 _argvec[2+10] = (unsigned long)arg10; \
3663 __asm__ volatile( \
3664 VALGRIND_ALIGN_STACK \
3665 "mr 12,%1\n\t" \
3666 "std 2,-16(12)\n\t" /* save tocptr */ \
3667 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3668 "addi 1,1,-128\n\t" /* expand stack frame */ \
3669 /* arg10 */ \
3670 "ld 3,80(12)\n\t" \
3671 "std 3,104(1)\n\t" \
3672 /* arg9 */ \
3673 "ld 3,72(12)\n\t" \
3674 "std 3,96(1)\n\t" \
3675 /* args1-8 */ \
3676 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3677 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3678 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3679 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3680 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3681 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3682 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3683 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3684 "ld 12, 0(12)\n\t" /* target->r12 */ \
3685 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3686 "mr 12,%1\n\t" \
3687 "mr %0,3\n\t" \
3688 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3689 VALGRIND_RESTORE_STACK \
3690 : /*out*/ "=r" (_res) \
3691 : /*in*/ "r" (&_argvec[2]) \
3692 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3693 ); \
3694 lval = (__typeof__(lval)) _res; \
3695 } while (0)
3696
3697#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3698 arg7,arg8,arg9,arg10,arg11) \
3699 do { \
3700 volatile OrigFn _orig = (orig); \
3701 volatile unsigned long _argvec[3+11]; \
3702 volatile unsigned long _res; \
3703 /* _argvec[0] holds current r2 across the call */ \
3704 _argvec[1] = (unsigned long)_orig.r2; \
3705 _argvec[2] = (unsigned long)_orig.nraddr; \
3706 _argvec[2+1] = (unsigned long)arg1; \
3707 _argvec[2+2] = (unsigned long)arg2; \
3708 _argvec[2+3] = (unsigned long)arg3; \
3709 _argvec[2+4] = (unsigned long)arg4; \
3710 _argvec[2+5] = (unsigned long)arg5; \
3711 _argvec[2+6] = (unsigned long)arg6; \
3712 _argvec[2+7] = (unsigned long)arg7; \
3713 _argvec[2+8] = (unsigned long)arg8; \
3714 _argvec[2+9] = (unsigned long)arg9; \
3715 _argvec[2+10] = (unsigned long)arg10; \
3716 _argvec[2+11] = (unsigned long)arg11; \
3717 __asm__ volatile( \
3718 VALGRIND_ALIGN_STACK \
3719 "mr 12,%1\n\t" \
3720 "std 2,-16(12)\n\t" /* save tocptr */ \
3721 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3722 "addi 1,1,-144\n\t" /* expand stack frame */ \
3723 /* arg11 */ \
3724 "ld 3,88(12)\n\t" \
3725 "std 3,112(1)\n\t" \
3726 /* arg10 */ \
3727 "ld 3,80(12)\n\t" \
3728 "std 3,104(1)\n\t" \
3729 /* arg9 */ \
3730 "ld 3,72(12)\n\t" \
3731 "std 3,96(1)\n\t" \
3732 /* args1-8 */ \
3733 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3734 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3735 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3736 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3737 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3738 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3739 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3740 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3741 "ld 12, 0(12)\n\t" /* target->r12 */ \
3742 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3743 "mr 12,%1\n\t" \
3744 "mr %0,3\n\t" \
3745 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3746 VALGRIND_RESTORE_STACK \
3747 : /*out*/ "=r" (_res) \
3748 : /*in*/ "r" (&_argvec[2]) \
3749 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3750 ); \
3751 lval = (__typeof__(lval)) _res; \
3752 } while (0)
3753
3754#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3755 arg7,arg8,arg9,arg10,arg11,arg12) \
3756 do { \
3757 volatile OrigFn _orig = (orig); \
3758 volatile unsigned long _argvec[3+12]; \
3759 volatile unsigned long _res; \
3760 /* _argvec[0] holds current r2 across the call */ \
3761 _argvec[1] = (unsigned long)_orig.r2; \
3762 _argvec[2] = (unsigned long)_orig.nraddr; \
3763 _argvec[2+1] = (unsigned long)arg1; \
3764 _argvec[2+2] = (unsigned long)arg2; \
3765 _argvec[2+3] = (unsigned long)arg3; \
3766 _argvec[2+4] = (unsigned long)arg4; \
3767 _argvec[2+5] = (unsigned long)arg5; \
3768 _argvec[2+6] = (unsigned long)arg6; \
3769 _argvec[2+7] = (unsigned long)arg7; \
3770 _argvec[2+8] = (unsigned long)arg8; \
3771 _argvec[2+9] = (unsigned long)arg9; \
3772 _argvec[2+10] = (unsigned long)arg10; \
3773 _argvec[2+11] = (unsigned long)arg11; \
3774 _argvec[2+12] = (unsigned long)arg12; \
3775 __asm__ volatile( \
3776 VALGRIND_ALIGN_STACK \
3777 "mr 12,%1\n\t" \
3778 "std 2,-16(12)\n\t" /* save tocptr */ \
3779 "ld 2,-8(12)\n\t" /* use nraddr's tocptr */ \
3780 "addi 1,1,-144\n\t" /* expand stack frame */ \
3781 /* arg12 */ \
3782 "ld 3,96(12)\n\t" \
3783 "std 3,120(1)\n\t" \
3784 /* arg11 */ \
3785 "ld 3,88(12)\n\t" \
3786 "std 3,112(1)\n\t" \
3787 /* arg10 */ \
3788 "ld 3,80(12)\n\t" \
3789 "std 3,104(1)\n\t" \
3790 /* arg9 */ \
3791 "ld 3,72(12)\n\t" \
3792 "std 3,96(1)\n\t" \
3793 /* args1-8 */ \
3794 "ld 3, 8(12)\n\t" /* arg1->r3 */ \
3795 "ld 4, 16(12)\n\t" /* arg2->r4 */ \
3796 "ld 5, 24(12)\n\t" /* arg3->r5 */ \
3797 "ld 6, 32(12)\n\t" /* arg4->r6 */ \
3798 "ld 7, 40(12)\n\t" /* arg5->r7 */ \
3799 "ld 8, 48(12)\n\t" /* arg6->r8 */ \
3800 "ld 9, 56(12)\n\t" /* arg7->r9 */ \
3801 "ld 10, 64(12)\n\t" /* arg8->r10 */ \
3802 "ld 12, 0(12)\n\t" /* target->r12 */ \
3803 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R12 \
3804 "mr 12,%1\n\t" \
3805 "mr %0,3\n\t" \
3806 "ld 2,-16(12)\n\t" /* restore tocptr */ \
3807 VALGRIND_RESTORE_STACK \
3808 : /*out*/ "=r" (_res) \
3809 : /*in*/ "r" (&_argvec[2]) \
3810 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3811 ); \
3812 lval = (__typeof__(lval)) _res; \
3813 } while (0)
3814
3815#endif /* PLAT_ppc64le_linux */
3816
3817/* ------------------------- arm-linux ------------------------- */
3818
3819#if defined(PLAT_arm_linux)
3820
3821/* These regs are trashed by the hidden call. */
3822#define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4", "r12", "r14"
3823
3824/* Macros to save and align the stack before making a function
3825 call and restore it afterwards as gcc may not keep the stack
3826 pointer aligned if it doesn't realise calls are being made
3827 to other functions. */
3828
3829/* This is a bit tricky. We store the original stack pointer in r10
3830 as it is callee-saves. gcc doesn't allow the use of r11 for some
3831 reason. Also, we can't directly "bic" the stack pointer in thumb
3832 mode since r13 isn't an allowed register number in that context.
3833 So use r4 as a temporary, since that is about to get trashed
3834 anyway, just after each use of this macro. Side effect is we need
3835 to be very careful about any future changes, since
3836 VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
3837#define VALGRIND_ALIGN_STACK \
3838 "mov r10, sp\n\t" \
3839 "mov r4, sp\n\t" \
3840 "bic r4, r4, #7\n\t" \
3841 "mov sp, r4\n\t"
3842#define VALGRIND_RESTORE_STACK \
3843 "mov sp, r10\n\t"
3844
3845/* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
3846 long) == 4. */
3847
3848#define CALL_FN_W_v(lval, orig) \
3849 do { \
3850 volatile OrigFn _orig = (orig); \
3851 volatile unsigned long _argvec[1]; \
3852 volatile unsigned long _res; \
3853 _argvec[0] = (unsigned long)_orig.nraddr; \
3854 __asm__ volatile( \
3855 VALGRIND_ALIGN_STACK \
3856 "ldr r4, [%1] \n\t" /* target->r4 */ \
3857 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3858 VALGRIND_RESTORE_STACK \
3859 "mov %0, r0\n" \
3860 : /*out*/ "=r" (_res) \
3861 : /*in*/ "0" (&_argvec[0]) \
3862 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3863 ); \
3864 lval = (__typeof__(lval)) _res; \
3865 } while (0)
3866
3867#define CALL_FN_W_W(lval, orig, arg1) \
3868 do { \
3869 volatile OrigFn _orig = (orig); \
3870 volatile unsigned long _argvec[2]; \
3871 volatile unsigned long _res; \
3872 _argvec[0] = (unsigned long)_orig.nraddr; \
3873 _argvec[1] = (unsigned long)(arg1); \
3874 __asm__ volatile( \
3875 VALGRIND_ALIGN_STACK \
3876 "ldr r0, [%1, #4] \n\t" \
3877 "ldr r4, [%1] \n\t" /* target->r4 */ \
3878 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3879 VALGRIND_RESTORE_STACK \
3880 "mov %0, r0\n" \
3881 : /*out*/ "=r" (_res) \
3882 : /*in*/ "0" (&_argvec[0]) \
3883 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3884 ); \
3885 lval = (__typeof__(lval)) _res; \
3886 } while (0)
3887
3888#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3889 do { \
3890 volatile OrigFn _orig = (orig); \
3891 volatile unsigned long _argvec[3]; \
3892 volatile unsigned long _res; \
3893 _argvec[0] = (unsigned long)_orig.nraddr; \
3894 _argvec[1] = (unsigned long)(arg1); \
3895 _argvec[2] = (unsigned long)(arg2); \
3896 __asm__ volatile( \
3897 VALGRIND_ALIGN_STACK \
3898 "ldr r0, [%1, #4] \n\t" \
3899 "ldr r1, [%1, #8] \n\t" \
3900 "ldr r4, [%1] \n\t" /* target->r4 */ \
3901 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3902 VALGRIND_RESTORE_STACK \
3903 "mov %0, r0\n" \
3904 : /*out*/ "=r" (_res) \
3905 : /*in*/ "0" (&_argvec[0]) \
3906 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3907 ); \
3908 lval = (__typeof__(lval)) _res; \
3909 } while (0)
3910
3911#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3912 do { \
3913 volatile OrigFn _orig = (orig); \
3914 volatile unsigned long _argvec[4]; \
3915 volatile unsigned long _res; \
3916 _argvec[0] = (unsigned long)_orig.nraddr; \
3917 _argvec[1] = (unsigned long)(arg1); \
3918 _argvec[2] = (unsigned long)(arg2); \
3919 _argvec[3] = (unsigned long)(arg3); \
3920 __asm__ volatile( \
3921 VALGRIND_ALIGN_STACK \
3922 "ldr r0, [%1, #4] \n\t" \
3923 "ldr r1, [%1, #8] \n\t" \
3924 "ldr r2, [%1, #12] \n\t" \
3925 "ldr r4, [%1] \n\t" /* target->r4 */ \
3926 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3927 VALGRIND_RESTORE_STACK \
3928 "mov %0, r0\n" \
3929 : /*out*/ "=r" (_res) \
3930 : /*in*/ "0" (&_argvec[0]) \
3931 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3932 ); \
3933 lval = (__typeof__(lval)) _res; \
3934 } while (0)
3935
3936#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3937 do { \
3938 volatile OrigFn _orig = (orig); \
3939 volatile unsigned long _argvec[5]; \
3940 volatile unsigned long _res; \
3941 _argvec[0] = (unsigned long)_orig.nraddr; \
3942 _argvec[1] = (unsigned long)(arg1); \
3943 _argvec[2] = (unsigned long)(arg2); \
3944 _argvec[3] = (unsigned long)(arg3); \
3945 _argvec[4] = (unsigned long)(arg4); \
3946 __asm__ volatile( \
3947 VALGRIND_ALIGN_STACK \
3948 "ldr r0, [%1, #4] \n\t" \
3949 "ldr r1, [%1, #8] \n\t" \
3950 "ldr r2, [%1, #12] \n\t" \
3951 "ldr r3, [%1, #16] \n\t" \
3952 "ldr r4, [%1] \n\t" /* target->r4 */ \
3953 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3954 VALGRIND_RESTORE_STACK \
3955 "mov %0, r0" \
3956 : /*out*/ "=r" (_res) \
3957 : /*in*/ "0" (&_argvec[0]) \
3958 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3959 ); \
3960 lval = (__typeof__(lval)) _res; \
3961 } while (0)
3962
3963#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3964 do { \
3965 volatile OrigFn _orig = (orig); \
3966 volatile unsigned long _argvec[6]; \
3967 volatile unsigned long _res; \
3968 _argvec[0] = (unsigned long)_orig.nraddr; \
3969 _argvec[1] = (unsigned long)(arg1); \
3970 _argvec[2] = (unsigned long)(arg2); \
3971 _argvec[3] = (unsigned long)(arg3); \
3972 _argvec[4] = (unsigned long)(arg4); \
3973 _argvec[5] = (unsigned long)(arg5); \
3974 __asm__ volatile( \
3975 VALGRIND_ALIGN_STACK \
3976 "sub sp, sp, #4 \n\t" \
3977 "ldr r0, [%1, #20] \n\t" \
3978 "push {r0} \n\t" \
3979 "ldr r0, [%1, #4] \n\t" \
3980 "ldr r1, [%1, #8] \n\t" \
3981 "ldr r2, [%1, #12] \n\t" \
3982 "ldr r3, [%1, #16] \n\t" \
3983 "ldr r4, [%1] \n\t" /* target->r4 */ \
3984 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3985 VALGRIND_RESTORE_STACK \
3986 "mov %0, r0" \
3987 : /*out*/ "=r" (_res) \
3988 : /*in*/ "0" (&_argvec[0]) \
3989 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3990 ); \
3991 lval = (__typeof__(lval)) _res; \
3992 } while (0)
3993
3994#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3995 do { \
3996 volatile OrigFn _orig = (orig); \
3997 volatile unsigned long _argvec[7]; \
3998 volatile unsigned long _res; \
3999 _argvec[0] = (unsigned long)_orig.nraddr; \
4000 _argvec[1] = (unsigned long)(arg1); \
4001 _argvec[2] = (unsigned long)(arg2); \
4002 _argvec[3] = (unsigned long)(arg3); \
4003 _argvec[4] = (unsigned long)(arg4); \
4004 _argvec[5] = (unsigned long)(arg5); \
4005 _argvec[6] = (unsigned long)(arg6); \
4006 __asm__ volatile( \
4007 VALGRIND_ALIGN_STACK \
4008 "ldr r0, [%1, #20] \n\t" \
4009 "ldr r1, [%1, #24] \n\t" \
4010 "push {r0, r1} \n\t" \
4011 "ldr r0, [%1, #4] \n\t" \
4012 "ldr r1, [%1, #8] \n\t" \
4013 "ldr r2, [%1, #12] \n\t" \
4014 "ldr r3, [%1, #16] \n\t" \
4015 "ldr r4, [%1] \n\t" /* target->r4 */ \
4016 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4017 VALGRIND_RESTORE_STACK \
4018 "mov %0, r0" \
4019 : /*out*/ "=r" (_res) \
4020 : /*in*/ "0" (&_argvec[0]) \
4021 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4022 ); \
4023 lval = (__typeof__(lval)) _res; \
4024 } while (0)
4025
4026#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4027 arg7) \
4028 do { \
4029 volatile OrigFn _orig = (orig); \
4030 volatile unsigned long _argvec[8]; \
4031 volatile unsigned long _res; \
4032 _argvec[0] = (unsigned long)_orig.nraddr; \
4033 _argvec[1] = (unsigned long)(arg1); \
4034 _argvec[2] = (unsigned long)(arg2); \
4035 _argvec[3] = (unsigned long)(arg3); \
4036 _argvec[4] = (unsigned long)(arg4); \
4037 _argvec[5] = (unsigned long)(arg5); \
4038 _argvec[6] = (unsigned long)(arg6); \
4039 _argvec[7] = (unsigned long)(arg7); \
4040 __asm__ volatile( \
4041 VALGRIND_ALIGN_STACK \
4042 "sub sp, sp, #4 \n\t" \
4043 "ldr r0, [%1, #20] \n\t" \
4044 "ldr r1, [%1, #24] \n\t" \
4045 "ldr r2, [%1, #28] \n\t" \
4046 "push {r0, r1, r2} \n\t" \
4047 "ldr r0, [%1, #4] \n\t" \
4048 "ldr r1, [%1, #8] \n\t" \
4049 "ldr r2, [%1, #12] \n\t" \
4050 "ldr r3, [%1, #16] \n\t" \
4051 "ldr r4, [%1] \n\t" /* target->r4 */ \
4052 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4053 VALGRIND_RESTORE_STACK \
4054 "mov %0, r0" \
4055 : /*out*/ "=r" (_res) \
4056 : /*in*/ "0" (&_argvec[0]) \
4057 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4058 ); \
4059 lval = (__typeof__(lval)) _res; \
4060 } while (0)
4061
4062#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4063 arg7,arg8) \
4064 do { \
4065 volatile OrigFn _orig = (orig); \
4066 volatile unsigned long _argvec[9]; \
4067 volatile unsigned long _res; \
4068 _argvec[0] = (unsigned long)_orig.nraddr; \
4069 _argvec[1] = (unsigned long)(arg1); \
4070 _argvec[2] = (unsigned long)(arg2); \
4071 _argvec[3] = (unsigned long)(arg3); \
4072 _argvec[4] = (unsigned long)(arg4); \
4073 _argvec[5] = (unsigned long)(arg5); \
4074 _argvec[6] = (unsigned long)(arg6); \
4075 _argvec[7] = (unsigned long)(arg7); \
4076 _argvec[8] = (unsigned long)(arg8); \
4077 __asm__ volatile( \
4078 VALGRIND_ALIGN_STACK \
4079 "ldr r0, [%1, #20] \n\t" \
4080 "ldr r1, [%1, #24] \n\t" \
4081 "ldr r2, [%1, #28] \n\t" \
4082 "ldr r3, [%1, #32] \n\t" \
4083 "push {r0, r1, r2, r3} \n\t" \
4084 "ldr r0, [%1, #4] \n\t" \
4085 "ldr r1, [%1, #8] \n\t" \
4086 "ldr r2, [%1, #12] \n\t" \
4087 "ldr r3, [%1, #16] \n\t" \
4088 "ldr r4, [%1] \n\t" /* target->r4 */ \
4089 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4090 VALGRIND_RESTORE_STACK \
4091 "mov %0, r0" \
4092 : /*out*/ "=r" (_res) \
4093 : /*in*/ "0" (&_argvec[0]) \
4094 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4095 ); \
4096 lval = (__typeof__(lval)) _res; \
4097 } while (0)
4098
4099#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4100 arg7,arg8,arg9) \
4101 do { \
4102 volatile OrigFn _orig = (orig); \
4103 volatile unsigned long _argvec[10]; \
4104 volatile unsigned long _res; \
4105 _argvec[0] = (unsigned long)_orig.nraddr; \
4106 _argvec[1] = (unsigned long)(arg1); \
4107 _argvec[2] = (unsigned long)(arg2); \
4108 _argvec[3] = (unsigned long)(arg3); \
4109 _argvec[4] = (unsigned long)(arg4); \
4110 _argvec[5] = (unsigned long)(arg5); \
4111 _argvec[6] = (unsigned long)(arg6); \
4112 _argvec[7] = (unsigned long)(arg7); \
4113 _argvec[8] = (unsigned long)(arg8); \
4114 _argvec[9] = (unsigned long)(arg9); \
4115 __asm__ volatile( \
4116 VALGRIND_ALIGN_STACK \
4117 "sub sp, sp, #4 \n\t" \
4118 "ldr r0, [%1, #20] \n\t" \
4119 "ldr r1, [%1, #24] \n\t" \
4120 "ldr r2, [%1, #28] \n\t" \
4121 "ldr r3, [%1, #32] \n\t" \
4122 "ldr r4, [%1, #36] \n\t" \
4123 "push {r0, r1, r2, r3, r4} \n\t" \
4124 "ldr r0, [%1, #4] \n\t" \
4125 "ldr r1, [%1, #8] \n\t" \
4126 "ldr r2, [%1, #12] \n\t" \
4127 "ldr r3, [%1, #16] \n\t" \
4128 "ldr r4, [%1] \n\t" /* target->r4 */ \
4129 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4130 VALGRIND_RESTORE_STACK \
4131 "mov %0, r0" \
4132 : /*out*/ "=r" (_res) \
4133 : /*in*/ "0" (&_argvec[0]) \
4134 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4135 ); \
4136 lval = (__typeof__(lval)) _res; \
4137 } while (0)
4138
4139#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4140 arg7,arg8,arg9,arg10) \
4141 do { \
4142 volatile OrigFn _orig = (orig); \
4143 volatile unsigned long _argvec[11]; \
4144 volatile unsigned long _res; \
4145 _argvec[0] = (unsigned long)_orig.nraddr; \
4146 _argvec[1] = (unsigned long)(arg1); \
4147 _argvec[2] = (unsigned long)(arg2); \
4148 _argvec[3] = (unsigned long)(arg3); \
4149 _argvec[4] = (unsigned long)(arg4); \
4150 _argvec[5] = (unsigned long)(arg5); \
4151 _argvec[6] = (unsigned long)(arg6); \
4152 _argvec[7] = (unsigned long)(arg7); \
4153 _argvec[8] = (unsigned long)(arg8); \
4154 _argvec[9] = (unsigned long)(arg9); \
4155 _argvec[10] = (unsigned long)(arg10); \
4156 __asm__ volatile( \
4157 VALGRIND_ALIGN_STACK \
4158 "ldr r0, [%1, #40] \n\t" \
4159 "push {r0} \n\t" \
4160 "ldr r0, [%1, #20] \n\t" \
4161 "ldr r1, [%1, #24] \n\t" \
4162 "ldr r2, [%1, #28] \n\t" \
4163 "ldr r3, [%1, #32] \n\t" \
4164 "ldr r4, [%1, #36] \n\t" \
4165 "push {r0, r1, r2, r3, r4} \n\t" \
4166 "ldr r0, [%1, #4] \n\t" \
4167 "ldr r1, [%1, #8] \n\t" \
4168 "ldr r2, [%1, #12] \n\t" \
4169 "ldr r3, [%1, #16] \n\t" \
4170 "ldr r4, [%1] \n\t" /* target->r4 */ \
4171 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4172 VALGRIND_RESTORE_STACK \
4173 "mov %0, r0" \
4174 : /*out*/ "=r" (_res) \
4175 : /*in*/ "0" (&_argvec[0]) \
4176 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4177 ); \
4178 lval = (__typeof__(lval)) _res; \
4179 } while (0)
4180
4181#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4182 arg6,arg7,arg8,arg9,arg10, \
4183 arg11) \
4184 do { \
4185 volatile OrigFn _orig = (orig); \
4186 volatile unsigned long _argvec[12]; \
4187 volatile unsigned long _res; \
4188 _argvec[0] = (unsigned long)_orig.nraddr; \
4189 _argvec[1] = (unsigned long)(arg1); \
4190 _argvec[2] = (unsigned long)(arg2); \
4191 _argvec[3] = (unsigned long)(arg3); \
4192 _argvec[4] = (unsigned long)(arg4); \
4193 _argvec[5] = (unsigned long)(arg5); \
4194 _argvec[6] = (unsigned long)(arg6); \
4195 _argvec[7] = (unsigned long)(arg7); \
4196 _argvec[8] = (unsigned long)(arg8); \
4197 _argvec[9] = (unsigned long)(arg9); \
4198 _argvec[10] = (unsigned long)(arg10); \
4199 _argvec[11] = (unsigned long)(arg11); \
4200 __asm__ volatile( \
4201 VALGRIND_ALIGN_STACK \
4202 "sub sp, sp, #4 \n\t" \
4203 "ldr r0, [%1, #40] \n\t" \
4204 "ldr r1, [%1, #44] \n\t" \
4205 "push {r0, r1} \n\t" \
4206 "ldr r0, [%1, #20] \n\t" \
4207 "ldr r1, [%1, #24] \n\t" \
4208 "ldr r2, [%1, #28] \n\t" \
4209 "ldr r3, [%1, #32] \n\t" \
4210 "ldr r4, [%1, #36] \n\t" \
4211 "push {r0, r1, r2, r3, r4} \n\t" \
4212 "ldr r0, [%1, #4] \n\t" \
4213 "ldr r1, [%1, #8] \n\t" \
4214 "ldr r2, [%1, #12] \n\t" \
4215 "ldr r3, [%1, #16] \n\t" \
4216 "ldr r4, [%1] \n\t" /* target->r4 */ \
4217 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4218 VALGRIND_RESTORE_STACK \
4219 "mov %0, r0" \
4220 : /*out*/ "=r" (_res) \
4221 : /*in*/ "0" (&_argvec[0]) \
4222 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4223 ); \
4224 lval = (__typeof__(lval)) _res; \
4225 } while (0)
4226
4227#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4228 arg6,arg7,arg8,arg9,arg10, \
4229 arg11,arg12) \
4230 do { \
4231 volatile OrigFn _orig = (orig); \
4232 volatile unsigned long _argvec[13]; \
4233 volatile unsigned long _res; \
4234 _argvec[0] = (unsigned long)_orig.nraddr; \
4235 _argvec[1] = (unsigned long)(arg1); \
4236 _argvec[2] = (unsigned long)(arg2); \
4237 _argvec[3] = (unsigned long)(arg3); \
4238 _argvec[4] = (unsigned long)(arg4); \
4239 _argvec[5] = (unsigned long)(arg5); \
4240 _argvec[6] = (unsigned long)(arg6); \
4241 _argvec[7] = (unsigned long)(arg7); \
4242 _argvec[8] = (unsigned long)(arg8); \
4243 _argvec[9] = (unsigned long)(arg9); \
4244 _argvec[10] = (unsigned long)(arg10); \
4245 _argvec[11] = (unsigned long)(arg11); \
4246 _argvec[12] = (unsigned long)(arg12); \
4247 __asm__ volatile( \
4248 VALGRIND_ALIGN_STACK \
4249 "ldr r0, [%1, #40] \n\t" \
4250 "ldr r1, [%1, #44] \n\t" \
4251 "ldr r2, [%1, #48] \n\t" \
4252 "push {r0, r1, r2} \n\t" \
4253 "ldr r0, [%1, #20] \n\t" \
4254 "ldr r1, [%1, #24] \n\t" \
4255 "ldr r2, [%1, #28] \n\t" \
4256 "ldr r3, [%1, #32] \n\t" \
4257 "ldr r4, [%1, #36] \n\t" \
4258 "push {r0, r1, r2, r3, r4} \n\t" \
4259 "ldr r0, [%1, #4] \n\t" \
4260 "ldr r1, [%1, #8] \n\t" \
4261 "ldr r2, [%1, #12] \n\t" \
4262 "ldr r3, [%1, #16] \n\t" \
4263 "ldr r4, [%1] \n\t" /* target->r4 */ \
4264 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
4265 VALGRIND_RESTORE_STACK \
4266 "mov %0, r0" \
4267 : /*out*/ "=r" (_res) \
4268 : /*in*/ "0" (&_argvec[0]) \
4269 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
4270 ); \
4271 lval = (__typeof__(lval)) _res; \
4272 } while (0)
4273
4274#endif /* PLAT_arm_linux */
4275
4276/* ------------------------ arm64-linux ------------------------ */
4277
4278#if defined(PLAT_arm64_linux)
4279
4280/* These regs are trashed by the hidden call. */
4281#define __CALLER_SAVED_REGS \
4282 "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
4283 "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
4284 "x18", "x19", "x20", "x30", \
4285 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
4286 "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
4287 "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
4288 "v26", "v27", "v28", "v29", "v30", "v31"
4289
4290/* x21 is callee-saved, so we can use it to save and restore SP around
4291 the hidden call. */
4292#define VALGRIND_ALIGN_STACK \
4293 "mov x21, sp\n\t" \
4294 "bic sp, x21, #15\n\t"
4295#define VALGRIND_RESTORE_STACK \
4296 "mov sp, x21\n\t"
4297
4298/* These CALL_FN_ macros assume that on arm64-linux,
4299 sizeof(unsigned long) == 8. */
4300
4301#define CALL_FN_W_v(lval, orig) \
4302 do { \
4303 volatile OrigFn _orig = (orig); \
4304 volatile unsigned long _argvec[1]; \
4305 volatile unsigned long _res; \
4306 _argvec[0] = (unsigned long)_orig.nraddr; \
4307 __asm__ volatile( \
4308 VALGRIND_ALIGN_STACK \
4309 "ldr x8, [%1] \n\t" /* target->x8 */ \
4310 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4311 VALGRIND_RESTORE_STACK \
4312 "mov %0, x0\n" \
4313 : /*out*/ "=r" (_res) \
4314 : /*in*/ "0" (&_argvec[0]) \
4315 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4316 ); \
4317 lval = (__typeof__(lval)) _res; \
4318 } while (0)
4319
4320#define CALL_FN_W_W(lval, orig, arg1) \
4321 do { \
4322 volatile OrigFn _orig = (orig); \
4323 volatile unsigned long _argvec[2]; \
4324 volatile unsigned long _res; \
4325 _argvec[0] = (unsigned long)_orig.nraddr; \
4326 _argvec[1] = (unsigned long)(arg1); \
4327 __asm__ volatile( \
4328 VALGRIND_ALIGN_STACK \
4329 "ldr x0, [%1, #8] \n\t" \
4330 "ldr x8, [%1] \n\t" /* target->x8 */ \
4331 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4332 VALGRIND_RESTORE_STACK \
4333 "mov %0, x0\n" \
4334 : /*out*/ "=r" (_res) \
4335 : /*in*/ "0" (&_argvec[0]) \
4336 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4337 ); \
4338 lval = (__typeof__(lval)) _res; \
4339 } while (0)
4340
4341#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4342 do { \
4343 volatile OrigFn _orig = (orig); \
4344 volatile unsigned long _argvec[3]; \
4345 volatile unsigned long _res; \
4346 _argvec[0] = (unsigned long)_orig.nraddr; \
4347 _argvec[1] = (unsigned long)(arg1); \
4348 _argvec[2] = (unsigned long)(arg2); \
4349 __asm__ volatile( \
4350 VALGRIND_ALIGN_STACK \
4351 "ldr x0, [%1, #8] \n\t" \
4352 "ldr x1, [%1, #16] \n\t" \
4353 "ldr x8, [%1] \n\t" /* target->x8 */ \
4354 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4355 VALGRIND_RESTORE_STACK \
4356 "mov %0, x0\n" \
4357 : /*out*/ "=r" (_res) \
4358 : /*in*/ "0" (&_argvec[0]) \
4359 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4360 ); \
4361 lval = (__typeof__(lval)) _res; \
4362 } while (0)
4363
4364#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4365 do { \
4366 volatile OrigFn _orig = (orig); \
4367 volatile unsigned long _argvec[4]; \
4368 volatile unsigned long _res; \
4369 _argvec[0] = (unsigned long)_orig.nraddr; \
4370 _argvec[1] = (unsigned long)(arg1); \
4371 _argvec[2] = (unsigned long)(arg2); \
4372 _argvec[3] = (unsigned long)(arg3); \
4373 __asm__ volatile( \
4374 VALGRIND_ALIGN_STACK \
4375 "ldr x0, [%1, #8] \n\t" \
4376 "ldr x1, [%1, #16] \n\t" \
4377 "ldr x2, [%1, #24] \n\t" \
4378 "ldr x8, [%1] \n\t" /* target->x8 */ \
4379 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4380 VALGRIND_RESTORE_STACK \
4381 "mov %0, x0\n" \
4382 : /*out*/ "=r" (_res) \
4383 : /*in*/ "0" (&_argvec[0]) \
4384 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4385 ); \
4386 lval = (__typeof__(lval)) _res; \
4387 } while (0)
4388
4389#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4390 do { \
4391 volatile OrigFn _orig = (orig); \
4392 volatile unsigned long _argvec[5]; \
4393 volatile unsigned long _res; \
4394 _argvec[0] = (unsigned long)_orig.nraddr; \
4395 _argvec[1] = (unsigned long)(arg1); \
4396 _argvec[2] = (unsigned long)(arg2); \
4397 _argvec[3] = (unsigned long)(arg3); \
4398 _argvec[4] = (unsigned long)(arg4); \
4399 __asm__ volatile( \
4400 VALGRIND_ALIGN_STACK \
4401 "ldr x0, [%1, #8] \n\t" \
4402 "ldr x1, [%1, #16] \n\t" \
4403 "ldr x2, [%1, #24] \n\t" \
4404 "ldr x3, [%1, #32] \n\t" \
4405 "ldr x8, [%1] \n\t" /* target->x8 */ \
4406 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4407 VALGRIND_RESTORE_STACK \
4408 "mov %0, x0" \
4409 : /*out*/ "=r" (_res) \
4410 : /*in*/ "0" (&_argvec[0]) \
4411 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4412 ); \
4413 lval = (__typeof__(lval)) _res; \
4414 } while (0)
4415
4416#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4417 do { \
4418 volatile OrigFn _orig = (orig); \
4419 volatile unsigned long _argvec[6]; \
4420 volatile unsigned long _res; \
4421 _argvec[0] = (unsigned long)_orig.nraddr; \
4422 _argvec[1] = (unsigned long)(arg1); \
4423 _argvec[2] = (unsigned long)(arg2); \
4424 _argvec[3] = (unsigned long)(arg3); \
4425 _argvec[4] = (unsigned long)(arg4); \
4426 _argvec[5] = (unsigned long)(arg5); \
4427 __asm__ volatile( \
4428 VALGRIND_ALIGN_STACK \
4429 "ldr x0, [%1, #8] \n\t" \
4430 "ldr x1, [%1, #16] \n\t" \
4431 "ldr x2, [%1, #24] \n\t" \
4432 "ldr x3, [%1, #32] \n\t" \
4433 "ldr x4, [%1, #40] \n\t" \
4434 "ldr x8, [%1] \n\t" /* target->x8 */ \
4435 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4436 VALGRIND_RESTORE_STACK \
4437 "mov %0, x0" \
4438 : /*out*/ "=r" (_res) \
4439 : /*in*/ "0" (&_argvec[0]) \
4440 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4441 ); \
4442 lval = (__typeof__(lval)) _res; \
4443 } while (0)
4444
4445#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4446 do { \
4447 volatile OrigFn _orig = (orig); \
4448 volatile unsigned long _argvec[7]; \
4449 volatile unsigned long _res; \
4450 _argvec[0] = (unsigned long)_orig.nraddr; \
4451 _argvec[1] = (unsigned long)(arg1); \
4452 _argvec[2] = (unsigned long)(arg2); \
4453 _argvec[3] = (unsigned long)(arg3); \
4454 _argvec[4] = (unsigned long)(arg4); \
4455 _argvec[5] = (unsigned long)(arg5); \
4456 _argvec[6] = (unsigned long)(arg6); \
4457 __asm__ volatile( \
4458 VALGRIND_ALIGN_STACK \
4459 "ldr x0, [%1, #8] \n\t" \
4460 "ldr x1, [%1, #16] \n\t" \
4461 "ldr x2, [%1, #24] \n\t" \
4462 "ldr x3, [%1, #32] \n\t" \
4463 "ldr x4, [%1, #40] \n\t" \
4464 "ldr x5, [%1, #48] \n\t" \
4465 "ldr x8, [%1] \n\t" /* target->x8 */ \
4466 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4467 VALGRIND_RESTORE_STACK \
4468 "mov %0, x0" \
4469 : /*out*/ "=r" (_res) \
4470 : /*in*/ "0" (&_argvec[0]) \
4471 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4472 ); \
4473 lval = (__typeof__(lval)) _res; \
4474 } while (0)
4475
4476#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4477 arg7) \
4478 do { \
4479 volatile OrigFn _orig = (orig); \
4480 volatile unsigned long _argvec[8]; \
4481 volatile unsigned long _res; \
4482 _argvec[0] = (unsigned long)_orig.nraddr; \
4483 _argvec[1] = (unsigned long)(arg1); \
4484 _argvec[2] = (unsigned long)(arg2); \
4485 _argvec[3] = (unsigned long)(arg3); \
4486 _argvec[4] = (unsigned long)(arg4); \
4487 _argvec[5] = (unsigned long)(arg5); \
4488 _argvec[6] = (unsigned long)(arg6); \
4489 _argvec[7] = (unsigned long)(arg7); \
4490 __asm__ volatile( \
4491 VALGRIND_ALIGN_STACK \
4492 "ldr x0, [%1, #8] \n\t" \
4493 "ldr x1, [%1, #16] \n\t" \
4494 "ldr x2, [%1, #24] \n\t" \
4495 "ldr x3, [%1, #32] \n\t" \
4496 "ldr x4, [%1, #40] \n\t" \
4497 "ldr x5, [%1, #48] \n\t" \
4498 "ldr x6, [%1, #56] \n\t" \
4499 "ldr x8, [%1] \n\t" /* target->x8 */ \
4500 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4501 VALGRIND_RESTORE_STACK \
4502 "mov %0, x0" \
4503 : /*out*/ "=r" (_res) \
4504 : /*in*/ "0" (&_argvec[0]) \
4505 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4506 ); \
4507 lval = (__typeof__(lval)) _res; \
4508 } while (0)
4509
4510#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4511 arg7,arg8) \
4512 do { \
4513 volatile OrigFn _orig = (orig); \
4514 volatile unsigned long _argvec[9]; \
4515 volatile unsigned long _res; \
4516 _argvec[0] = (unsigned long)_orig.nraddr; \
4517 _argvec[1] = (unsigned long)(arg1); \
4518 _argvec[2] = (unsigned long)(arg2); \
4519 _argvec[3] = (unsigned long)(arg3); \
4520 _argvec[4] = (unsigned long)(arg4); \
4521 _argvec[5] = (unsigned long)(arg5); \
4522 _argvec[6] = (unsigned long)(arg6); \
4523 _argvec[7] = (unsigned long)(arg7); \
4524 _argvec[8] = (unsigned long)(arg8); \
4525 __asm__ volatile( \
4526 VALGRIND_ALIGN_STACK \
4527 "ldr x0, [%1, #8] \n\t" \
4528 "ldr x1, [%1, #16] \n\t" \
4529 "ldr x2, [%1, #24] \n\t" \
4530 "ldr x3, [%1, #32] \n\t" \
4531 "ldr x4, [%1, #40] \n\t" \
4532 "ldr x5, [%1, #48] \n\t" \
4533 "ldr x6, [%1, #56] \n\t" \
4534 "ldr x7, [%1, #64] \n\t" \
4535 "ldr x8, [%1] \n\t" /* target->x8 */ \
4536 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4537 VALGRIND_RESTORE_STACK \
4538 "mov %0, x0" \
4539 : /*out*/ "=r" (_res) \
4540 : /*in*/ "0" (&_argvec[0]) \
4541 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4542 ); \
4543 lval = (__typeof__(lval)) _res; \
4544 } while (0)
4545
4546#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4547 arg7,arg8,arg9) \
4548 do { \
4549 volatile OrigFn _orig = (orig); \
4550 volatile unsigned long _argvec[10]; \
4551 volatile unsigned long _res; \
4552 _argvec[0] = (unsigned long)_orig.nraddr; \
4553 _argvec[1] = (unsigned long)(arg1); \
4554 _argvec[2] = (unsigned long)(arg2); \
4555 _argvec[3] = (unsigned long)(arg3); \
4556 _argvec[4] = (unsigned long)(arg4); \
4557 _argvec[5] = (unsigned long)(arg5); \
4558 _argvec[6] = (unsigned long)(arg6); \
4559 _argvec[7] = (unsigned long)(arg7); \
4560 _argvec[8] = (unsigned long)(arg8); \
4561 _argvec[9] = (unsigned long)(arg9); \
4562 __asm__ volatile( \
4563 VALGRIND_ALIGN_STACK \
4564 "sub sp, sp, #0x20 \n\t" \
4565 "ldr x0, [%1, #8] \n\t" \
4566 "ldr x1, [%1, #16] \n\t" \
4567 "ldr x2, [%1, #24] \n\t" \
4568 "ldr x3, [%1, #32] \n\t" \
4569 "ldr x4, [%1, #40] \n\t" \
4570 "ldr x5, [%1, #48] \n\t" \
4571 "ldr x6, [%1, #56] \n\t" \
4572 "ldr x7, [%1, #64] \n\t" \
4573 "ldr x8, [%1, #72] \n\t" \
4574 "str x8, [sp, #0] \n\t" \
4575 "ldr x8, [%1] \n\t" /* target->x8 */ \
4576 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4577 VALGRIND_RESTORE_STACK \
4578 "mov %0, x0" \
4579 : /*out*/ "=r" (_res) \
4580 : /*in*/ "0" (&_argvec[0]) \
4581 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4582 ); \
4583 lval = (__typeof__(lval)) _res; \
4584 } while (0)
4585
4586#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4587 arg7,arg8,arg9,arg10) \
4588 do { \
4589 volatile OrigFn _orig = (orig); \
4590 volatile unsigned long _argvec[11]; \
4591 volatile unsigned long _res; \
4592 _argvec[0] = (unsigned long)_orig.nraddr; \
4593 _argvec[1] = (unsigned long)(arg1); \
4594 _argvec[2] = (unsigned long)(arg2); \
4595 _argvec[3] = (unsigned long)(arg3); \
4596 _argvec[4] = (unsigned long)(arg4); \
4597 _argvec[5] = (unsigned long)(arg5); \
4598 _argvec[6] = (unsigned long)(arg6); \
4599 _argvec[7] = (unsigned long)(arg7); \
4600 _argvec[8] = (unsigned long)(arg8); \
4601 _argvec[9] = (unsigned long)(arg9); \
4602 _argvec[10] = (unsigned long)(arg10); \
4603 __asm__ volatile( \
4604 VALGRIND_ALIGN_STACK \
4605 "sub sp, sp, #0x20 \n\t" \
4606 "ldr x0, [%1, #8] \n\t" \
4607 "ldr x1, [%1, #16] \n\t" \
4608 "ldr x2, [%1, #24] \n\t" \
4609 "ldr x3, [%1, #32] \n\t" \
4610 "ldr x4, [%1, #40] \n\t" \
4611 "ldr x5, [%1, #48] \n\t" \
4612 "ldr x6, [%1, #56] \n\t" \
4613 "ldr x7, [%1, #64] \n\t" \
4614 "ldr x8, [%1, #72] \n\t" \
4615 "str x8, [sp, #0] \n\t" \
4616 "ldr x8, [%1, #80] \n\t" \
4617 "str x8, [sp, #8] \n\t" \
4618 "ldr x8, [%1] \n\t" /* target->x8 */ \
4619 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4620 VALGRIND_RESTORE_STACK \
4621 "mov %0, x0" \
4622 : /*out*/ "=r" (_res) \
4623 : /*in*/ "0" (&_argvec[0]) \
4624 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4625 ); \
4626 lval = (__typeof__(lval)) _res; \
4627 } while (0)
4628
4629#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4630 arg7,arg8,arg9,arg10,arg11) \
4631 do { \
4632 volatile OrigFn _orig = (orig); \
4633 volatile unsigned long _argvec[12]; \
4634 volatile unsigned long _res; \
4635 _argvec[0] = (unsigned long)_orig.nraddr; \
4636 _argvec[1] = (unsigned long)(arg1); \
4637 _argvec[2] = (unsigned long)(arg2); \
4638 _argvec[3] = (unsigned long)(arg3); \
4639 _argvec[4] = (unsigned long)(arg4); \
4640 _argvec[5] = (unsigned long)(arg5); \
4641 _argvec[6] = (unsigned long)(arg6); \
4642 _argvec[7] = (unsigned long)(arg7); \
4643 _argvec[8] = (unsigned long)(arg8); \
4644 _argvec[9] = (unsigned long)(arg9); \
4645 _argvec[10] = (unsigned long)(arg10); \
4646 _argvec[11] = (unsigned long)(arg11); \
4647 __asm__ volatile( \
4648 VALGRIND_ALIGN_STACK \
4649 "sub sp, sp, #0x30 \n\t" \
4650 "ldr x0, [%1, #8] \n\t" \
4651 "ldr x1, [%1, #16] \n\t" \
4652 "ldr x2, [%1, #24] \n\t" \
4653 "ldr x3, [%1, #32] \n\t" \
4654 "ldr x4, [%1, #40] \n\t" \
4655 "ldr x5, [%1, #48] \n\t" \
4656 "ldr x6, [%1, #56] \n\t" \
4657 "ldr x7, [%1, #64] \n\t" \
4658 "ldr x8, [%1, #72] \n\t" \
4659 "str x8, [sp, #0] \n\t" \
4660 "ldr x8, [%1, #80] \n\t" \
4661 "str x8, [sp, #8] \n\t" \
4662 "ldr x8, [%1, #88] \n\t" \
4663 "str x8, [sp, #16] \n\t" \
4664 "ldr x8, [%1] \n\t" /* target->x8 */ \
4665 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4666 VALGRIND_RESTORE_STACK \
4667 "mov %0, x0" \
4668 : /*out*/ "=r" (_res) \
4669 : /*in*/ "0" (&_argvec[0]) \
4670 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4671 ); \
4672 lval = (__typeof__(lval)) _res; \
4673 } while (0)
4674
4675#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4676 arg7,arg8,arg9,arg10,arg11, \
4677 arg12) \
4678 do { \
4679 volatile OrigFn _orig = (orig); \
4680 volatile unsigned long _argvec[13]; \
4681 volatile unsigned long _res; \
4682 _argvec[0] = (unsigned long)_orig.nraddr; \
4683 _argvec[1] = (unsigned long)(arg1); \
4684 _argvec[2] = (unsigned long)(arg2); \
4685 _argvec[3] = (unsigned long)(arg3); \
4686 _argvec[4] = (unsigned long)(arg4); \
4687 _argvec[5] = (unsigned long)(arg5); \
4688 _argvec[6] = (unsigned long)(arg6); \
4689 _argvec[7] = (unsigned long)(arg7); \
4690 _argvec[8] = (unsigned long)(arg8); \
4691 _argvec[9] = (unsigned long)(arg9); \
4692 _argvec[10] = (unsigned long)(arg10); \
4693 _argvec[11] = (unsigned long)(arg11); \
4694 _argvec[12] = (unsigned long)(arg12); \
4695 __asm__ volatile( \
4696 VALGRIND_ALIGN_STACK \
4697 "sub sp, sp, #0x30 \n\t" \
4698 "ldr x0, [%1, #8] \n\t" \
4699 "ldr x1, [%1, #16] \n\t" \
4700 "ldr x2, [%1, #24] \n\t" \
4701 "ldr x3, [%1, #32] \n\t" \
4702 "ldr x4, [%1, #40] \n\t" \
4703 "ldr x5, [%1, #48] \n\t" \
4704 "ldr x6, [%1, #56] \n\t" \
4705 "ldr x7, [%1, #64] \n\t" \
4706 "ldr x8, [%1, #72] \n\t" \
4707 "str x8, [sp, #0] \n\t" \
4708 "ldr x8, [%1, #80] \n\t" \
4709 "str x8, [sp, #8] \n\t" \
4710 "ldr x8, [%1, #88] \n\t" \
4711 "str x8, [sp, #16] \n\t" \
4712 "ldr x8, [%1, #96] \n\t" \
4713 "str x8, [sp, #24] \n\t" \
4714 "ldr x8, [%1] \n\t" /* target->x8 */ \
4715 VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
4716 VALGRIND_RESTORE_STACK \
4717 "mov %0, x0" \
4718 : /*out*/ "=r" (_res) \
4719 : /*in*/ "0" (&_argvec[0]) \
4720 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
4721 ); \
4722 lval = (__typeof__(lval)) _res; \
4723 } while (0)
4724
4725#endif /* PLAT_arm64_linux */
4726
4727/* ------------------------- s390x-linux ------------------------- */
4728
4729#if defined(PLAT_s390x_linux)
4730
4731/* Similar workaround as amd64 (see above), but we use r11 as frame
4732 pointer and save the old r11 in r7. r11 might be used for
4733 argvec, therefore we copy argvec in r1 since r1 is clobbered
4734 after the call anyway. */
4735#if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
4736# define __FRAME_POINTER \
4737 ,"d"(__builtin_dwarf_cfa())
4738# define VALGRIND_CFI_PROLOGUE \
4739 ".cfi_remember_state\n\t" \
4740 "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
4741 "lgr 7,11\n\t" \
4742 "lgr 11,%2\n\t" \
4743 ".cfi_def_cfa r11, 0\n\t"
4744# define VALGRIND_CFI_EPILOGUE \
4745 "lgr 11, 7\n\t" \
4746 ".cfi_restore_state\n\t"
4747#else
4748# define __FRAME_POINTER
4749# define VALGRIND_CFI_PROLOGUE \
4750 "lgr 1,%1\n\t"
4751# define VALGRIND_CFI_EPILOGUE
4752#endif
4753
4754/* Nb: On s390 the stack pointer is properly aligned *at all times*
4755 according to the s390 GCC maintainer. (The ABI specification is not
4756 precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
4757 VALGRIND_RESTORE_STACK are not defined here. */
4758
4759/* These regs are trashed by the hidden call. Note that we overwrite
4760 r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
4761 function a proper return address. All others are ABI defined call
4762 clobbers. */
4763#if defined(__VX__) || defined(__S390_VX__)
4764#define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", \
4765 "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", \
4766 "v8", "v9", "v10", "v11", "v12", "v13", "v14", "v15", \
4767 "v16", "v17", "v18", "v19", "v20", "v21", "v22", "v23", \
4768 "v24", "v25", "v26", "v27", "v28", "v29", "v30", "v31"
4769#else
4770#define __CALLER_SAVED_REGS "0", "1", "2", "3", "4", "5", "14", \
4771 "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7"
4772#endif
4773
4774/* Nb: Although r11 is modified in the asm snippets below (inside
4775 VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
4776 two reasons:
4777 (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
4778 modified
4779 (2) GCC will complain that r11 cannot appear inside a clobber section,
4780 when compiled with -O -fno-omit-frame-pointer
4781 */
4782
4783#define CALL_FN_W_v(lval, orig) \
4784 do { \
4785 volatile OrigFn _orig = (orig); \
4786 volatile unsigned long _argvec[1]; \
4787 volatile unsigned long _res; \
4788 _argvec[0] = (unsigned long)_orig.nraddr; \
4789 __asm__ volatile( \
4790 VALGRIND_CFI_PROLOGUE \
4791 "aghi 15,-160\n\t" \
4792 "lg 1, 0(1)\n\t" /* target->r1 */ \
4793 VALGRIND_CALL_NOREDIR_R1 \
4794 "aghi 15,160\n\t" \
4795 VALGRIND_CFI_EPILOGUE \
4796 "lgr %0, 2\n\t" \
4797 : /*out*/ "=d" (_res) \
4798 : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
4799 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4800 ); \
4801 lval = (__typeof__(lval)) _res; \
4802 } while (0)
4803
4804/* The call abi has the arguments in r2-r6 and stack */
4805#define CALL_FN_W_W(lval, orig, arg1) \
4806 do { \
4807 volatile OrigFn _orig = (orig); \
4808 volatile unsigned long _argvec[2]; \
4809 volatile unsigned long _res; \
4810 _argvec[0] = (unsigned long)_orig.nraddr; \
4811 _argvec[1] = (unsigned long)arg1; \
4812 __asm__ volatile( \
4813 VALGRIND_CFI_PROLOGUE \
4814 "aghi 15,-160\n\t" \
4815 "lg 2, 8(1)\n\t" \
4816 "lg 1, 0(1)\n\t" \
4817 VALGRIND_CALL_NOREDIR_R1 \
4818 "aghi 15,160\n\t" \
4819 VALGRIND_CFI_EPILOGUE \
4820 "lgr %0, 2\n\t" \
4821 : /*out*/ "=d" (_res) \
4822 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4823 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4824 ); \
4825 lval = (__typeof__(lval)) _res; \
4826 } while (0)
4827
4828#define CALL_FN_W_WW(lval, orig, arg1, arg2) \
4829 do { \
4830 volatile OrigFn _orig = (orig); \
4831 volatile unsigned long _argvec[3]; \
4832 volatile unsigned long _res; \
4833 _argvec[0] = (unsigned long)_orig.nraddr; \
4834 _argvec[1] = (unsigned long)arg1; \
4835 _argvec[2] = (unsigned long)arg2; \
4836 __asm__ volatile( \
4837 VALGRIND_CFI_PROLOGUE \
4838 "aghi 15,-160\n\t" \
4839 "lg 2, 8(1)\n\t" \
4840 "lg 3,16(1)\n\t" \
4841 "lg 1, 0(1)\n\t" \
4842 VALGRIND_CALL_NOREDIR_R1 \
4843 "aghi 15,160\n\t" \
4844 VALGRIND_CFI_EPILOGUE \
4845 "lgr %0, 2\n\t" \
4846 : /*out*/ "=d" (_res) \
4847 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4848 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4849 ); \
4850 lval = (__typeof__(lval)) _res; \
4851 } while (0)
4852
4853#define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
4854 do { \
4855 volatile OrigFn _orig = (orig); \
4856 volatile unsigned long _argvec[4]; \
4857 volatile unsigned long _res; \
4858 _argvec[0] = (unsigned long)_orig.nraddr; \
4859 _argvec[1] = (unsigned long)arg1; \
4860 _argvec[2] = (unsigned long)arg2; \
4861 _argvec[3] = (unsigned long)arg3; \
4862 __asm__ volatile( \
4863 VALGRIND_CFI_PROLOGUE \
4864 "aghi 15,-160\n\t" \
4865 "lg 2, 8(1)\n\t" \
4866 "lg 3,16(1)\n\t" \
4867 "lg 4,24(1)\n\t" \
4868 "lg 1, 0(1)\n\t" \
4869 VALGRIND_CALL_NOREDIR_R1 \
4870 "aghi 15,160\n\t" \
4871 VALGRIND_CFI_EPILOGUE \
4872 "lgr %0, 2\n\t" \
4873 : /*out*/ "=d" (_res) \
4874 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4875 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4876 ); \
4877 lval = (__typeof__(lval)) _res; \
4878 } while (0)
4879
4880#define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
4881 do { \
4882 volatile OrigFn _orig = (orig); \
4883 volatile unsigned long _argvec[5]; \
4884 volatile unsigned long _res; \
4885 _argvec[0] = (unsigned long)_orig.nraddr; \
4886 _argvec[1] = (unsigned long)arg1; \
4887 _argvec[2] = (unsigned long)arg2; \
4888 _argvec[3] = (unsigned long)arg3; \
4889 _argvec[4] = (unsigned long)arg4; \
4890 __asm__ volatile( \
4891 VALGRIND_CFI_PROLOGUE \
4892 "aghi 15,-160\n\t" \
4893 "lg 2, 8(1)\n\t" \
4894 "lg 3,16(1)\n\t" \
4895 "lg 4,24(1)\n\t" \
4896 "lg 5,32(1)\n\t" \
4897 "lg 1, 0(1)\n\t" \
4898 VALGRIND_CALL_NOREDIR_R1 \
4899 "aghi 15,160\n\t" \
4900 VALGRIND_CFI_EPILOGUE \
4901 "lgr %0, 2\n\t" \
4902 : /*out*/ "=d" (_res) \
4903 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4904 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
4905 ); \
4906 lval = (__typeof__(lval)) _res; \
4907 } while (0)
4908
4909#define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
4910 do { \
4911 volatile OrigFn _orig = (orig); \
4912 volatile unsigned long _argvec[6]; \
4913 volatile unsigned long _res; \
4914 _argvec[0] = (unsigned long)_orig.nraddr; \
4915 _argvec[1] = (unsigned long)arg1; \
4916 _argvec[2] = (unsigned long)arg2; \
4917 _argvec[3] = (unsigned long)arg3; \
4918 _argvec[4] = (unsigned long)arg4; \
4919 _argvec[5] = (unsigned long)arg5; \
4920 __asm__ volatile( \
4921 VALGRIND_CFI_PROLOGUE \
4922 "aghi 15,-160\n\t" \
4923 "lg 2, 8(1)\n\t" \
4924 "lg 3,16(1)\n\t" \
4925 "lg 4,24(1)\n\t" \
4926 "lg 5,32(1)\n\t" \
4927 "lg 6,40(1)\n\t" \
4928 "lg 1, 0(1)\n\t" \
4929 VALGRIND_CALL_NOREDIR_R1 \
4930 "aghi 15,160\n\t" \
4931 VALGRIND_CFI_EPILOGUE \
4932 "lgr %0, 2\n\t" \
4933 : /*out*/ "=d" (_res) \
4934 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4935 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4936 ); \
4937 lval = (__typeof__(lval)) _res; \
4938 } while (0)
4939
4940#define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4941 arg6) \
4942 do { \
4943 volatile OrigFn _orig = (orig); \
4944 volatile unsigned long _argvec[7]; \
4945 volatile unsigned long _res; \
4946 _argvec[0] = (unsigned long)_orig.nraddr; \
4947 _argvec[1] = (unsigned long)arg1; \
4948 _argvec[2] = (unsigned long)arg2; \
4949 _argvec[3] = (unsigned long)arg3; \
4950 _argvec[4] = (unsigned long)arg4; \
4951 _argvec[5] = (unsigned long)arg5; \
4952 _argvec[6] = (unsigned long)arg6; \
4953 __asm__ volatile( \
4954 VALGRIND_CFI_PROLOGUE \
4955 "aghi 15,-168\n\t" \
4956 "lg 2, 8(1)\n\t" \
4957 "lg 3,16(1)\n\t" \
4958 "lg 4,24(1)\n\t" \
4959 "lg 5,32(1)\n\t" \
4960 "lg 6,40(1)\n\t" \
4961 "mvc 160(8,15), 48(1)\n\t" \
4962 "lg 1, 0(1)\n\t" \
4963 VALGRIND_CALL_NOREDIR_R1 \
4964 "aghi 15,168\n\t" \
4965 VALGRIND_CFI_EPILOGUE \
4966 "lgr %0, 2\n\t" \
4967 : /*out*/ "=d" (_res) \
4968 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4969 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4970 ); \
4971 lval = (__typeof__(lval)) _res; \
4972 } while (0)
4973
4974#define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4975 arg6, arg7) \
4976 do { \
4977 volatile OrigFn _orig = (orig); \
4978 volatile unsigned long _argvec[8]; \
4979 volatile unsigned long _res; \
4980 _argvec[0] = (unsigned long)_orig.nraddr; \
4981 _argvec[1] = (unsigned long)arg1; \
4982 _argvec[2] = (unsigned long)arg2; \
4983 _argvec[3] = (unsigned long)arg3; \
4984 _argvec[4] = (unsigned long)arg4; \
4985 _argvec[5] = (unsigned long)arg5; \
4986 _argvec[6] = (unsigned long)arg6; \
4987 _argvec[7] = (unsigned long)arg7; \
4988 __asm__ volatile( \
4989 VALGRIND_CFI_PROLOGUE \
4990 "aghi 15,-176\n\t" \
4991 "lg 2, 8(1)\n\t" \
4992 "lg 3,16(1)\n\t" \
4993 "lg 4,24(1)\n\t" \
4994 "lg 5,32(1)\n\t" \
4995 "lg 6,40(1)\n\t" \
4996 "mvc 160(8,15), 48(1)\n\t" \
4997 "mvc 168(8,15), 56(1)\n\t" \
4998 "lg 1, 0(1)\n\t" \
4999 VALGRIND_CALL_NOREDIR_R1 \
5000 "aghi 15,176\n\t" \
5001 VALGRIND_CFI_EPILOGUE \
5002 "lgr %0, 2\n\t" \
5003 : /*out*/ "=d" (_res) \
5004 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5005 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5006 ); \
5007 lval = (__typeof__(lval)) _res; \
5008 } while (0)
5009
5010#define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5011 arg6, arg7 ,arg8) \
5012 do { \
5013 volatile OrigFn _orig = (orig); \
5014 volatile unsigned long _argvec[9]; \
5015 volatile unsigned long _res; \
5016 _argvec[0] = (unsigned long)_orig.nraddr; \
5017 _argvec[1] = (unsigned long)arg1; \
5018 _argvec[2] = (unsigned long)arg2; \
5019 _argvec[3] = (unsigned long)arg3; \
5020 _argvec[4] = (unsigned long)arg4; \
5021 _argvec[5] = (unsigned long)arg5; \
5022 _argvec[6] = (unsigned long)arg6; \
5023 _argvec[7] = (unsigned long)arg7; \
5024 _argvec[8] = (unsigned long)arg8; \
5025 __asm__ volatile( \
5026 VALGRIND_CFI_PROLOGUE \
5027 "aghi 15,-184\n\t" \
5028 "lg 2, 8(1)\n\t" \
5029 "lg 3,16(1)\n\t" \
5030 "lg 4,24(1)\n\t" \
5031 "lg 5,32(1)\n\t" \
5032 "lg 6,40(1)\n\t" \
5033 "mvc 160(8,15), 48(1)\n\t" \
5034 "mvc 168(8,15), 56(1)\n\t" \
5035 "mvc 176(8,15), 64(1)\n\t" \
5036 "lg 1, 0(1)\n\t" \
5037 VALGRIND_CALL_NOREDIR_R1 \
5038 "aghi 15,184\n\t" \
5039 VALGRIND_CFI_EPILOGUE \
5040 "lgr %0, 2\n\t" \
5041 : /*out*/ "=d" (_res) \
5042 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5043 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5044 ); \
5045 lval = (__typeof__(lval)) _res; \
5046 } while (0)
5047
5048#define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5049 arg6, arg7 ,arg8, arg9) \
5050 do { \
5051 volatile OrigFn _orig = (orig); \
5052 volatile unsigned long _argvec[10]; \
5053 volatile unsigned long _res; \
5054 _argvec[0] = (unsigned long)_orig.nraddr; \
5055 _argvec[1] = (unsigned long)arg1; \
5056 _argvec[2] = (unsigned long)arg2; \
5057 _argvec[3] = (unsigned long)arg3; \
5058 _argvec[4] = (unsigned long)arg4; \
5059 _argvec[5] = (unsigned long)arg5; \
5060 _argvec[6] = (unsigned long)arg6; \
5061 _argvec[7] = (unsigned long)arg7; \
5062 _argvec[8] = (unsigned long)arg8; \
5063 _argvec[9] = (unsigned long)arg9; \
5064 __asm__ volatile( \
5065 VALGRIND_CFI_PROLOGUE \
5066 "aghi 15,-192\n\t" \
5067 "lg 2, 8(1)\n\t" \
5068 "lg 3,16(1)\n\t" \
5069 "lg 4,24(1)\n\t" \
5070 "lg 5,32(1)\n\t" \
5071 "lg 6,40(1)\n\t" \
5072 "mvc 160(8,15), 48(1)\n\t" \
5073 "mvc 168(8,15), 56(1)\n\t" \
5074 "mvc 176(8,15), 64(1)\n\t" \
5075 "mvc 184(8,15), 72(1)\n\t" \
5076 "lg 1, 0(1)\n\t" \
5077 VALGRIND_CALL_NOREDIR_R1 \
5078 "aghi 15,192\n\t" \
5079 VALGRIND_CFI_EPILOGUE \
5080 "lgr %0, 2\n\t" \
5081 : /*out*/ "=d" (_res) \
5082 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5083 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5084 ); \
5085 lval = (__typeof__(lval)) _res; \
5086 } while (0)
5087
5088#define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5089 arg6, arg7 ,arg8, arg9, arg10) \
5090 do { \
5091 volatile OrigFn _orig = (orig); \
5092 volatile unsigned long _argvec[11]; \
5093 volatile unsigned long _res; \
5094 _argvec[0] = (unsigned long)_orig.nraddr; \
5095 _argvec[1] = (unsigned long)arg1; \
5096 _argvec[2] = (unsigned long)arg2; \
5097 _argvec[3] = (unsigned long)arg3; \
5098 _argvec[4] = (unsigned long)arg4; \
5099 _argvec[5] = (unsigned long)arg5; \
5100 _argvec[6] = (unsigned long)arg6; \
5101 _argvec[7] = (unsigned long)arg7; \
5102 _argvec[8] = (unsigned long)arg8; \
5103 _argvec[9] = (unsigned long)arg9; \
5104 _argvec[10] = (unsigned long)arg10; \
5105 __asm__ volatile( \
5106 VALGRIND_CFI_PROLOGUE \
5107 "aghi 15,-200\n\t" \
5108 "lg 2, 8(1)\n\t" \
5109 "lg 3,16(1)\n\t" \
5110 "lg 4,24(1)\n\t" \
5111 "lg 5,32(1)\n\t" \
5112 "lg 6,40(1)\n\t" \
5113 "mvc 160(8,15), 48(1)\n\t" \
5114 "mvc 168(8,15), 56(1)\n\t" \
5115 "mvc 176(8,15), 64(1)\n\t" \
5116 "mvc 184(8,15), 72(1)\n\t" \
5117 "mvc 192(8,15), 80(1)\n\t" \
5118 "lg 1, 0(1)\n\t" \
5119 VALGRIND_CALL_NOREDIR_R1 \
5120 "aghi 15,200\n\t" \
5121 VALGRIND_CFI_EPILOGUE \
5122 "lgr %0, 2\n\t" \
5123 : /*out*/ "=d" (_res) \
5124 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5125 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5126 ); \
5127 lval = (__typeof__(lval)) _res; \
5128 } while (0)
5129
5130#define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5131 arg6, arg7 ,arg8, arg9, arg10, arg11) \
5132 do { \
5133 volatile OrigFn _orig = (orig); \
5134 volatile unsigned long _argvec[12]; \
5135 volatile unsigned long _res; \
5136 _argvec[0] = (unsigned long)_orig.nraddr; \
5137 _argvec[1] = (unsigned long)arg1; \
5138 _argvec[2] = (unsigned long)arg2; \
5139 _argvec[3] = (unsigned long)arg3; \
5140 _argvec[4] = (unsigned long)arg4; \
5141 _argvec[5] = (unsigned long)arg5; \
5142 _argvec[6] = (unsigned long)arg6; \
5143 _argvec[7] = (unsigned long)arg7; \
5144 _argvec[8] = (unsigned long)arg8; \
5145 _argvec[9] = (unsigned long)arg9; \
5146 _argvec[10] = (unsigned long)arg10; \
5147 _argvec[11] = (unsigned long)arg11; \
5148 __asm__ volatile( \
5149 VALGRIND_CFI_PROLOGUE \
5150 "aghi 15,-208\n\t" \
5151 "lg 2, 8(1)\n\t" \
5152 "lg 3,16(1)\n\t" \
5153 "lg 4,24(1)\n\t" \
5154 "lg 5,32(1)\n\t" \
5155 "lg 6,40(1)\n\t" \
5156 "mvc 160(8,15), 48(1)\n\t" \
5157 "mvc 168(8,15), 56(1)\n\t" \
5158 "mvc 176(8,15), 64(1)\n\t" \
5159 "mvc 184(8,15), 72(1)\n\t" \
5160 "mvc 192(8,15), 80(1)\n\t" \
5161 "mvc 200(8,15), 88(1)\n\t" \
5162 "lg 1, 0(1)\n\t" \
5163 VALGRIND_CALL_NOREDIR_R1 \
5164 "aghi 15,208\n\t" \
5165 VALGRIND_CFI_EPILOGUE \
5166 "lgr %0, 2\n\t" \
5167 : /*out*/ "=d" (_res) \
5168 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5169 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5170 ); \
5171 lval = (__typeof__(lval)) _res; \
5172 } while (0)
5173
5174#define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
5175 arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
5176 do { \
5177 volatile OrigFn _orig = (orig); \
5178 volatile unsigned long _argvec[13]; \
5179 volatile unsigned long _res; \
5180 _argvec[0] = (unsigned long)_orig.nraddr; \
5181 _argvec[1] = (unsigned long)arg1; \
5182 _argvec[2] = (unsigned long)arg2; \
5183 _argvec[3] = (unsigned long)arg3; \
5184 _argvec[4] = (unsigned long)arg4; \
5185 _argvec[5] = (unsigned long)arg5; \
5186 _argvec[6] = (unsigned long)arg6; \
5187 _argvec[7] = (unsigned long)arg7; \
5188 _argvec[8] = (unsigned long)arg8; \
5189 _argvec[9] = (unsigned long)arg9; \
5190 _argvec[10] = (unsigned long)arg10; \
5191 _argvec[11] = (unsigned long)arg11; \
5192 _argvec[12] = (unsigned long)arg12; \
5193 __asm__ volatile( \
5194 VALGRIND_CFI_PROLOGUE \
5195 "aghi 15,-216\n\t" \
5196 "lg 2, 8(1)\n\t" \
5197 "lg 3,16(1)\n\t" \
5198 "lg 4,24(1)\n\t" \
5199 "lg 5,32(1)\n\t" \
5200 "lg 6,40(1)\n\t" \
5201 "mvc 160(8,15), 48(1)\n\t" \
5202 "mvc 168(8,15), 56(1)\n\t" \
5203 "mvc 176(8,15), 64(1)\n\t" \
5204 "mvc 184(8,15), 72(1)\n\t" \
5205 "mvc 192(8,15), 80(1)\n\t" \
5206 "mvc 200(8,15), 88(1)\n\t" \
5207 "mvc 208(8,15), 96(1)\n\t" \
5208 "lg 1, 0(1)\n\t" \
5209 VALGRIND_CALL_NOREDIR_R1 \
5210 "aghi 15,216\n\t" \
5211 VALGRIND_CFI_EPILOGUE \
5212 "lgr %0, 2\n\t" \
5213 : /*out*/ "=d" (_res) \
5214 : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
5215 : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
5216 ); \
5217 lval = (__typeof__(lval)) _res; \
5218 } while (0)
5219
5220
5221#endif /* PLAT_s390x_linux */
5222
5223/* ------------------------- mips32-linux ----------------------- */
5224
5225#if defined(PLAT_mips32_linux)
5226
5227/* These regs are trashed by the hidden call. */
5228#define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
5229"$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
5230"$25", "$31"
5231
5232/* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5233 long) == 4. */
5234
5235#define CALL_FN_W_v(lval, orig) \
5236 do { \
5237 volatile OrigFn _orig = (orig); \
5238 volatile unsigned long _argvec[1]; \
5239 volatile unsigned long _res; \
5240 _argvec[0] = (unsigned long)_orig.nraddr; \
5241 __asm__ volatile( \
5242 "subu $29, $29, 8 \n\t" \
5243 "sw $28, 0($29) \n\t" \
5244 "sw $31, 4($29) \n\t" \
5245 "subu $29, $29, 16 \n\t" \
5246 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5247 VALGRIND_CALL_NOREDIR_T9 \
5248 "addu $29, $29, 16\n\t" \
5249 "lw $28, 0($29) \n\t" \
5250 "lw $31, 4($29) \n\t" \
5251 "addu $29, $29, 8 \n\t" \
5252 "move %0, $2\n" \
5253 : /*out*/ "=r" (_res) \
5254 : /*in*/ "0" (&_argvec[0]) \
5255 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5256 ); \
5257 lval = (__typeof__(lval)) _res; \
5258 } while (0)
5259
5260#define CALL_FN_W_W(lval, orig, arg1) \
5261 do { \
5262 volatile OrigFn _orig = (orig); \
5263 volatile unsigned long _argvec[2]; \
5264 volatile unsigned long _res; \
5265 _argvec[0] = (unsigned long)_orig.nraddr; \
5266 _argvec[1] = (unsigned long)(arg1); \
5267 __asm__ volatile( \
5268 "subu $29, $29, 8 \n\t" \
5269 "sw $28, 0($29) \n\t" \
5270 "sw $31, 4($29) \n\t" \
5271 "subu $29, $29, 16 \n\t" \
5272 "lw $4, 4(%1) \n\t" /* arg1*/ \
5273 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5274 VALGRIND_CALL_NOREDIR_T9 \
5275 "addu $29, $29, 16 \n\t" \
5276 "lw $28, 0($29) \n\t" \
5277 "lw $31, 4($29) \n\t" \
5278 "addu $29, $29, 8 \n\t" \
5279 "move %0, $2\n" \
5280 : /*out*/ "=r" (_res) \
5281 : /*in*/ "0" (&_argvec[0]) \
5282 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5283 ); \
5284 lval = (__typeof__(lval)) _res; \
5285 } while (0)
5286
5287#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5288 do { \
5289 volatile OrigFn _orig = (orig); \
5290 volatile unsigned long _argvec[3]; \
5291 volatile unsigned long _res; \
5292 _argvec[0] = (unsigned long)_orig.nraddr; \
5293 _argvec[1] = (unsigned long)(arg1); \
5294 _argvec[2] = (unsigned long)(arg2); \
5295 __asm__ volatile( \
5296 "subu $29, $29, 8 \n\t" \
5297 "sw $28, 0($29) \n\t" \
5298 "sw $31, 4($29) \n\t" \
5299 "subu $29, $29, 16 \n\t" \
5300 "lw $4, 4(%1) \n\t" \
5301 "lw $5, 8(%1) \n\t" \
5302 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5303 VALGRIND_CALL_NOREDIR_T9 \
5304 "addu $29, $29, 16 \n\t" \
5305 "lw $28, 0($29) \n\t" \
5306 "lw $31, 4($29) \n\t" \
5307 "addu $29, $29, 8 \n\t" \
5308 "move %0, $2\n" \
5309 : /*out*/ "=r" (_res) \
5310 : /*in*/ "0" (&_argvec[0]) \
5311 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5312 ); \
5313 lval = (__typeof__(lval)) _res; \
5314 } while (0)
5315
5316#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5317 do { \
5318 volatile OrigFn _orig = (orig); \
5319 volatile unsigned long _argvec[4]; \
5320 volatile unsigned long _res; \
5321 _argvec[0] = (unsigned long)_orig.nraddr; \
5322 _argvec[1] = (unsigned long)(arg1); \
5323 _argvec[2] = (unsigned long)(arg2); \
5324 _argvec[3] = (unsigned long)(arg3); \
5325 __asm__ volatile( \
5326 "subu $29, $29, 8 \n\t" \
5327 "sw $28, 0($29) \n\t" \
5328 "sw $31, 4($29) \n\t" \
5329 "subu $29, $29, 16 \n\t" \
5330 "lw $4, 4(%1) \n\t" \
5331 "lw $5, 8(%1) \n\t" \
5332 "lw $6, 12(%1) \n\t" \
5333 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5334 VALGRIND_CALL_NOREDIR_T9 \
5335 "addu $29, $29, 16 \n\t" \
5336 "lw $28, 0($29) \n\t" \
5337 "lw $31, 4($29) \n\t" \
5338 "addu $29, $29, 8 \n\t" \
5339 "move %0, $2\n" \
5340 : /*out*/ "=r" (_res) \
5341 : /*in*/ "0" (&_argvec[0]) \
5342 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5343 ); \
5344 lval = (__typeof__(lval)) _res; \
5345 } while (0)
5346
5347#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5348 do { \
5349 volatile OrigFn _orig = (orig); \
5350 volatile unsigned long _argvec[5]; \
5351 volatile unsigned long _res; \
5352 _argvec[0] = (unsigned long)_orig.nraddr; \
5353 _argvec[1] = (unsigned long)(arg1); \
5354 _argvec[2] = (unsigned long)(arg2); \
5355 _argvec[3] = (unsigned long)(arg3); \
5356 _argvec[4] = (unsigned long)(arg4); \
5357 __asm__ volatile( \
5358 "subu $29, $29, 8 \n\t" \
5359 "sw $28, 0($29) \n\t" \
5360 "sw $31, 4($29) \n\t" \
5361 "subu $29, $29, 16 \n\t" \
5362 "lw $4, 4(%1) \n\t" \
5363 "lw $5, 8(%1) \n\t" \
5364 "lw $6, 12(%1) \n\t" \
5365 "lw $7, 16(%1) \n\t" \
5366 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5367 VALGRIND_CALL_NOREDIR_T9 \
5368 "addu $29, $29, 16 \n\t" \
5369 "lw $28, 0($29) \n\t" \
5370 "lw $31, 4($29) \n\t" \
5371 "addu $29, $29, 8 \n\t" \
5372 "move %0, $2\n" \
5373 : /*out*/ "=r" (_res) \
5374 : /*in*/ "0" (&_argvec[0]) \
5375 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5376 ); \
5377 lval = (__typeof__(lval)) _res; \
5378 } while (0)
5379
5380#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5381 do { \
5382 volatile OrigFn _orig = (orig); \
5383 volatile unsigned long _argvec[6]; \
5384 volatile unsigned long _res; \
5385 _argvec[0] = (unsigned long)_orig.nraddr; \
5386 _argvec[1] = (unsigned long)(arg1); \
5387 _argvec[2] = (unsigned long)(arg2); \
5388 _argvec[3] = (unsigned long)(arg3); \
5389 _argvec[4] = (unsigned long)(arg4); \
5390 _argvec[5] = (unsigned long)(arg5); \
5391 __asm__ volatile( \
5392 "subu $29, $29, 8 \n\t" \
5393 "sw $28, 0($29) \n\t" \
5394 "sw $31, 4($29) \n\t" \
5395 "lw $4, 20(%1) \n\t" \
5396 "subu $29, $29, 24\n\t" \
5397 "sw $4, 16($29) \n\t" \
5398 "lw $4, 4(%1) \n\t" \
5399 "lw $5, 8(%1) \n\t" \
5400 "lw $6, 12(%1) \n\t" \
5401 "lw $7, 16(%1) \n\t" \
5402 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5403 VALGRIND_CALL_NOREDIR_T9 \
5404 "addu $29, $29, 24 \n\t" \
5405 "lw $28, 0($29) \n\t" \
5406 "lw $31, 4($29) \n\t" \
5407 "addu $29, $29, 8 \n\t" \
5408 "move %0, $2\n" \
5409 : /*out*/ "=r" (_res) \
5410 : /*in*/ "0" (&_argvec[0]) \
5411 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5412 ); \
5413 lval = (__typeof__(lval)) _res; \
5414 } while (0)
5415#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5416 do { \
5417 volatile OrigFn _orig = (orig); \
5418 volatile unsigned long _argvec[7]; \
5419 volatile unsigned long _res; \
5420 _argvec[0] = (unsigned long)_orig.nraddr; \
5421 _argvec[1] = (unsigned long)(arg1); \
5422 _argvec[2] = (unsigned long)(arg2); \
5423 _argvec[3] = (unsigned long)(arg3); \
5424 _argvec[4] = (unsigned long)(arg4); \
5425 _argvec[5] = (unsigned long)(arg5); \
5426 _argvec[6] = (unsigned long)(arg6); \
5427 __asm__ volatile( \
5428 "subu $29, $29, 8 \n\t" \
5429 "sw $28, 0($29) \n\t" \
5430 "sw $31, 4($29) \n\t" \
5431 "lw $4, 20(%1) \n\t" \
5432 "subu $29, $29, 32\n\t" \
5433 "sw $4, 16($29) \n\t" \
5434 "lw $4, 24(%1) \n\t" \
5435 "nop\n\t" \
5436 "sw $4, 20($29) \n\t" \
5437 "lw $4, 4(%1) \n\t" \
5438 "lw $5, 8(%1) \n\t" \
5439 "lw $6, 12(%1) \n\t" \
5440 "lw $7, 16(%1) \n\t" \
5441 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5442 VALGRIND_CALL_NOREDIR_T9 \
5443 "addu $29, $29, 32 \n\t" \
5444 "lw $28, 0($29) \n\t" \
5445 "lw $31, 4($29) \n\t" \
5446 "addu $29, $29, 8 \n\t" \
5447 "move %0, $2\n" \
5448 : /*out*/ "=r" (_res) \
5449 : /*in*/ "0" (&_argvec[0]) \
5450 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5451 ); \
5452 lval = (__typeof__(lval)) _res; \
5453 } while (0)
5454
5455#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5456 arg7) \
5457 do { \
5458 volatile OrigFn _orig = (orig); \
5459 volatile unsigned long _argvec[8]; \
5460 volatile unsigned long _res; \
5461 _argvec[0] = (unsigned long)_orig.nraddr; \
5462 _argvec[1] = (unsigned long)(arg1); \
5463 _argvec[2] = (unsigned long)(arg2); \
5464 _argvec[3] = (unsigned long)(arg3); \
5465 _argvec[4] = (unsigned long)(arg4); \
5466 _argvec[5] = (unsigned long)(arg5); \
5467 _argvec[6] = (unsigned long)(arg6); \
5468 _argvec[7] = (unsigned long)(arg7); \
5469 __asm__ volatile( \
5470 "subu $29, $29, 8 \n\t" \
5471 "sw $28, 0($29) \n\t" \
5472 "sw $31, 4($29) \n\t" \
5473 "lw $4, 20(%1) \n\t" \
5474 "subu $29, $29, 32\n\t" \
5475 "sw $4, 16($29) \n\t" \
5476 "lw $4, 24(%1) \n\t" \
5477 "sw $4, 20($29) \n\t" \
5478 "lw $4, 28(%1) \n\t" \
5479 "sw $4, 24($29) \n\t" \
5480 "lw $4, 4(%1) \n\t" \
5481 "lw $5, 8(%1) \n\t" \
5482 "lw $6, 12(%1) \n\t" \
5483 "lw $7, 16(%1) \n\t" \
5484 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5485 VALGRIND_CALL_NOREDIR_T9 \
5486 "addu $29, $29, 32 \n\t" \
5487 "lw $28, 0($29) \n\t" \
5488 "lw $31, 4($29) \n\t" \
5489 "addu $29, $29, 8 \n\t" \
5490 "move %0, $2\n" \
5491 : /*out*/ "=r" (_res) \
5492 : /*in*/ "0" (&_argvec[0]) \
5493 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5494 ); \
5495 lval = (__typeof__(lval)) _res; \
5496 } while (0)
5497
5498#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5499 arg7,arg8) \
5500 do { \
5501 volatile OrigFn _orig = (orig); \
5502 volatile unsigned long _argvec[9]; \
5503 volatile unsigned long _res; \
5504 _argvec[0] = (unsigned long)_orig.nraddr; \
5505 _argvec[1] = (unsigned long)(arg1); \
5506 _argvec[2] = (unsigned long)(arg2); \
5507 _argvec[3] = (unsigned long)(arg3); \
5508 _argvec[4] = (unsigned long)(arg4); \
5509 _argvec[5] = (unsigned long)(arg5); \
5510 _argvec[6] = (unsigned long)(arg6); \
5511 _argvec[7] = (unsigned long)(arg7); \
5512 _argvec[8] = (unsigned long)(arg8); \
5513 __asm__ volatile( \
5514 "subu $29, $29, 8 \n\t" \
5515 "sw $28, 0($29) \n\t" \
5516 "sw $31, 4($29) \n\t" \
5517 "lw $4, 20(%1) \n\t" \
5518 "subu $29, $29, 40\n\t" \
5519 "sw $4, 16($29) \n\t" \
5520 "lw $4, 24(%1) \n\t" \
5521 "sw $4, 20($29) \n\t" \
5522 "lw $4, 28(%1) \n\t" \
5523 "sw $4, 24($29) \n\t" \
5524 "lw $4, 32(%1) \n\t" \
5525 "sw $4, 28($29) \n\t" \
5526 "lw $4, 4(%1) \n\t" \
5527 "lw $5, 8(%1) \n\t" \
5528 "lw $6, 12(%1) \n\t" \
5529 "lw $7, 16(%1) \n\t" \
5530 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5531 VALGRIND_CALL_NOREDIR_T9 \
5532 "addu $29, $29, 40 \n\t" \
5533 "lw $28, 0($29) \n\t" \
5534 "lw $31, 4($29) \n\t" \
5535 "addu $29, $29, 8 \n\t" \
5536 "move %0, $2\n" \
5537 : /*out*/ "=r" (_res) \
5538 : /*in*/ "0" (&_argvec[0]) \
5539 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5540 ); \
5541 lval = (__typeof__(lval)) _res; \
5542 } while (0)
5543
5544#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5545 arg7,arg8,arg9) \
5546 do { \
5547 volatile OrigFn _orig = (orig); \
5548 volatile unsigned long _argvec[10]; \
5549 volatile unsigned long _res; \
5550 _argvec[0] = (unsigned long)_orig.nraddr; \
5551 _argvec[1] = (unsigned long)(arg1); \
5552 _argvec[2] = (unsigned long)(arg2); \
5553 _argvec[3] = (unsigned long)(arg3); \
5554 _argvec[4] = (unsigned long)(arg4); \
5555 _argvec[5] = (unsigned long)(arg5); \
5556 _argvec[6] = (unsigned long)(arg6); \
5557 _argvec[7] = (unsigned long)(arg7); \
5558 _argvec[8] = (unsigned long)(arg8); \
5559 _argvec[9] = (unsigned long)(arg9); \
5560 __asm__ volatile( \
5561 "subu $29, $29, 8 \n\t" \
5562 "sw $28, 0($29) \n\t" \
5563 "sw $31, 4($29) \n\t" \
5564 "lw $4, 20(%1) \n\t" \
5565 "subu $29, $29, 40\n\t" \
5566 "sw $4, 16($29) \n\t" \
5567 "lw $4, 24(%1) \n\t" \
5568 "sw $4, 20($29) \n\t" \
5569 "lw $4, 28(%1) \n\t" \
5570 "sw $4, 24($29) \n\t" \
5571 "lw $4, 32(%1) \n\t" \
5572 "sw $4, 28($29) \n\t" \
5573 "lw $4, 36(%1) \n\t" \
5574 "sw $4, 32($29) \n\t" \
5575 "lw $4, 4(%1) \n\t" \
5576 "lw $5, 8(%1) \n\t" \
5577 "lw $6, 12(%1) \n\t" \
5578 "lw $7, 16(%1) \n\t" \
5579 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5580 VALGRIND_CALL_NOREDIR_T9 \
5581 "addu $29, $29, 40 \n\t" \
5582 "lw $28, 0($29) \n\t" \
5583 "lw $31, 4($29) \n\t" \
5584 "addu $29, $29, 8 \n\t" \
5585 "move %0, $2\n" \
5586 : /*out*/ "=r" (_res) \
5587 : /*in*/ "0" (&_argvec[0]) \
5588 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5589 ); \
5590 lval = (__typeof__(lval)) _res; \
5591 } while (0)
5592
5593#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5594 arg7,arg8,arg9,arg10) \
5595 do { \
5596 volatile OrigFn _orig = (orig); \
5597 volatile unsigned long _argvec[11]; \
5598 volatile unsigned long _res; \
5599 _argvec[0] = (unsigned long)_orig.nraddr; \
5600 _argvec[1] = (unsigned long)(arg1); \
5601 _argvec[2] = (unsigned long)(arg2); \
5602 _argvec[3] = (unsigned long)(arg3); \
5603 _argvec[4] = (unsigned long)(arg4); \
5604 _argvec[5] = (unsigned long)(arg5); \
5605 _argvec[6] = (unsigned long)(arg6); \
5606 _argvec[7] = (unsigned long)(arg7); \
5607 _argvec[8] = (unsigned long)(arg8); \
5608 _argvec[9] = (unsigned long)(arg9); \
5609 _argvec[10] = (unsigned long)(arg10); \
5610 __asm__ volatile( \
5611 "subu $29, $29, 8 \n\t" \
5612 "sw $28, 0($29) \n\t" \
5613 "sw $31, 4($29) \n\t" \
5614 "lw $4, 20(%1) \n\t" \
5615 "subu $29, $29, 48\n\t" \
5616 "sw $4, 16($29) \n\t" \
5617 "lw $4, 24(%1) \n\t" \
5618 "sw $4, 20($29) \n\t" \
5619 "lw $4, 28(%1) \n\t" \
5620 "sw $4, 24($29) \n\t" \
5621 "lw $4, 32(%1) \n\t" \
5622 "sw $4, 28($29) \n\t" \
5623 "lw $4, 36(%1) \n\t" \
5624 "sw $4, 32($29) \n\t" \
5625 "lw $4, 40(%1) \n\t" \
5626 "sw $4, 36($29) \n\t" \
5627 "lw $4, 4(%1) \n\t" \
5628 "lw $5, 8(%1) \n\t" \
5629 "lw $6, 12(%1) \n\t" \
5630 "lw $7, 16(%1) \n\t" \
5631 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5632 VALGRIND_CALL_NOREDIR_T9 \
5633 "addu $29, $29, 48 \n\t" \
5634 "lw $28, 0($29) \n\t" \
5635 "lw $31, 4($29) \n\t" \
5636 "addu $29, $29, 8 \n\t" \
5637 "move %0, $2\n" \
5638 : /*out*/ "=r" (_res) \
5639 : /*in*/ "0" (&_argvec[0]) \
5640 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5641 ); \
5642 lval = (__typeof__(lval)) _res; \
5643 } while (0)
5644
5645#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5646 arg6,arg7,arg8,arg9,arg10, \
5647 arg11) \
5648 do { \
5649 volatile OrigFn _orig = (orig); \
5650 volatile unsigned long _argvec[12]; \
5651 volatile unsigned long _res; \
5652 _argvec[0] = (unsigned long)_orig.nraddr; \
5653 _argvec[1] = (unsigned long)(arg1); \
5654 _argvec[2] = (unsigned long)(arg2); \
5655 _argvec[3] = (unsigned long)(arg3); \
5656 _argvec[4] = (unsigned long)(arg4); \
5657 _argvec[5] = (unsigned long)(arg5); \
5658 _argvec[6] = (unsigned long)(arg6); \
5659 _argvec[7] = (unsigned long)(arg7); \
5660 _argvec[8] = (unsigned long)(arg8); \
5661 _argvec[9] = (unsigned long)(arg9); \
5662 _argvec[10] = (unsigned long)(arg10); \
5663 _argvec[11] = (unsigned long)(arg11); \
5664 __asm__ volatile( \
5665 "subu $29, $29, 8 \n\t" \
5666 "sw $28, 0($29) \n\t" \
5667 "sw $31, 4($29) \n\t" \
5668 "lw $4, 20(%1) \n\t" \
5669 "subu $29, $29, 48\n\t" \
5670 "sw $4, 16($29) \n\t" \
5671 "lw $4, 24(%1) \n\t" \
5672 "sw $4, 20($29) \n\t" \
5673 "lw $4, 28(%1) \n\t" \
5674 "sw $4, 24($29) \n\t" \
5675 "lw $4, 32(%1) \n\t" \
5676 "sw $4, 28($29) \n\t" \
5677 "lw $4, 36(%1) \n\t" \
5678 "sw $4, 32($29) \n\t" \
5679 "lw $4, 40(%1) \n\t" \
5680 "sw $4, 36($29) \n\t" \
5681 "lw $4, 44(%1) \n\t" \
5682 "sw $4, 40($29) \n\t" \
5683 "lw $4, 4(%1) \n\t" \
5684 "lw $5, 8(%1) \n\t" \
5685 "lw $6, 12(%1) \n\t" \
5686 "lw $7, 16(%1) \n\t" \
5687 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5688 VALGRIND_CALL_NOREDIR_T9 \
5689 "addu $29, $29, 48 \n\t" \
5690 "lw $28, 0($29) \n\t" \
5691 "lw $31, 4($29) \n\t" \
5692 "addu $29, $29, 8 \n\t" \
5693 "move %0, $2\n" \
5694 : /*out*/ "=r" (_res) \
5695 : /*in*/ "0" (&_argvec[0]) \
5696 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5697 ); \
5698 lval = (__typeof__(lval)) _res; \
5699 } while (0)
5700
5701#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5702 arg6,arg7,arg8,arg9,arg10, \
5703 arg11,arg12) \
5704 do { \
5705 volatile OrigFn _orig = (orig); \
5706 volatile unsigned long _argvec[13]; \
5707 volatile unsigned long _res; \
5708 _argvec[0] = (unsigned long)_orig.nraddr; \
5709 _argvec[1] = (unsigned long)(arg1); \
5710 _argvec[2] = (unsigned long)(arg2); \
5711 _argvec[3] = (unsigned long)(arg3); \
5712 _argvec[4] = (unsigned long)(arg4); \
5713 _argvec[5] = (unsigned long)(arg5); \
5714 _argvec[6] = (unsigned long)(arg6); \
5715 _argvec[7] = (unsigned long)(arg7); \
5716 _argvec[8] = (unsigned long)(arg8); \
5717 _argvec[9] = (unsigned long)(arg9); \
5718 _argvec[10] = (unsigned long)(arg10); \
5719 _argvec[11] = (unsigned long)(arg11); \
5720 _argvec[12] = (unsigned long)(arg12); \
5721 __asm__ volatile( \
5722 "subu $29, $29, 8 \n\t" \
5723 "sw $28, 0($29) \n\t" \
5724 "sw $31, 4($29) \n\t" \
5725 "lw $4, 20(%1) \n\t" \
5726 "subu $29, $29, 56\n\t" \
5727 "sw $4, 16($29) \n\t" \
5728 "lw $4, 24(%1) \n\t" \
5729 "sw $4, 20($29) \n\t" \
5730 "lw $4, 28(%1) \n\t" \
5731 "sw $4, 24($29) \n\t" \
5732 "lw $4, 32(%1) \n\t" \
5733 "sw $4, 28($29) \n\t" \
5734 "lw $4, 36(%1) \n\t" \
5735 "sw $4, 32($29) \n\t" \
5736 "lw $4, 40(%1) \n\t" \
5737 "sw $4, 36($29) \n\t" \
5738 "lw $4, 44(%1) \n\t" \
5739 "sw $4, 40($29) \n\t" \
5740 "lw $4, 48(%1) \n\t" \
5741 "sw $4, 44($29) \n\t" \
5742 "lw $4, 4(%1) \n\t" \
5743 "lw $5, 8(%1) \n\t" \
5744 "lw $6, 12(%1) \n\t" \
5745 "lw $7, 16(%1) \n\t" \
5746 "lw $25, 0(%1) \n\t" /* target->t9 */ \
5747 VALGRIND_CALL_NOREDIR_T9 \
5748 "addu $29, $29, 56 \n\t" \
5749 "lw $28, 0($29) \n\t" \
5750 "lw $31, 4($29) \n\t" \
5751 "addu $29, $29, 8 \n\t" \
5752 "move %0, $2\n" \
5753 : /*out*/ "=r" (_res) \
5754 : /*in*/ "r" (&_argvec[0]) \
5755 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5756 ); \
5757 lval = (__typeof__(lval)) _res; \
5758 } while (0)
5759
5760#endif /* PLAT_mips32_linux */
5761
5762/* ------------------------- nanomips-linux -------------------- */
5763
5764#if defined(PLAT_nanomips_linux)
5765
5766/* These regs are trashed by the hidden call. */
5767#define __CALLER_SAVED_REGS "$t4", "$t5", "$a0", "$a1", "$a2", \
5768"$a3", "$a4", "$a5", "$a6", "$a7", "$t0", "$t1", "$t2", "$t3", \
5769"$t8","$t9", "$at"
5770
5771/* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
5772 long) == 4. */
5773
5774#define CALL_FN_W_v(lval, orig) \
5775 do { \
5776 volatile OrigFn _orig = (orig); \
5777 volatile unsigned long _argvec[1]; \
5778 volatile unsigned long _res; \
5779 _argvec[0] = (unsigned long)_orig.nraddr; \
5780 __asm__ volatile( \
5781 "lw $t9, 0(%1)\n\t" \
5782 VALGRIND_CALL_NOREDIR_T9 \
5783 "move %0, $a0\n" \
5784 : /*out*/ "=r" (_res) \
5785 : /*in*/ "r" (&_argvec[0]) \
5786 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5787 ); \
5788 lval = (__typeof__(lval)) _res; \
5789 } while (0)
5790
5791#define CALL_FN_W_W(lval, orig, arg1) \
5792 do { \
5793 volatile OrigFn _orig = (orig); \
5794 volatile unsigned long _argvec[2]; \
5795 volatile unsigned long _res; \
5796 _argvec[0] = (unsigned long)_orig.nraddr; \
5797 _argvec[1] = (unsigned long)(arg1); \
5798 __asm__ volatile( \
5799 "lw $t9, 0(%1)\n\t" \
5800 "lw $a0, 4(%1)\n\t" \
5801 VALGRIND_CALL_NOREDIR_T9 \
5802 "move %0, $a0\n" \
5803 : /*out*/ "=r" (_res) \
5804 : /*in*/ "r" (&_argvec[0]) \
5805 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5806 ); \
5807 lval = (__typeof__(lval)) _res; \
5808 } while (0)
5809
5810#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
5811 do { \
5812 volatile OrigFn _orig = (orig); \
5813 volatile unsigned long _argvec[3]; \
5814 volatile unsigned long _res; \
5815 _argvec[0] = (unsigned long)_orig.nraddr; \
5816 _argvec[1] = (unsigned long)(arg1); \
5817 _argvec[2] = (unsigned long)(arg2); \
5818 __asm__ volatile( \
5819 "lw $t9, 0(%1)\n\t" \
5820 "lw $a0, 4(%1)\n\t" \
5821 "lw $a1, 8(%1)\n\t" \
5822 VALGRIND_CALL_NOREDIR_T9 \
5823 "move %0, $a0\n" \
5824 : /*out*/ "=r" (_res) \
5825 : /*in*/ "r" (&_argvec[0]) \
5826 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5827 ); \
5828 lval = (__typeof__(lval)) _res; \
5829 } while (0)
5830
5831#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
5832 do { \
5833 volatile OrigFn _orig = (orig); \
5834 volatile unsigned long _argvec[4]; \
5835 volatile unsigned long _res; \
5836 _argvec[0] = (unsigned long)_orig.nraddr; \
5837 _argvec[1] = (unsigned long)(arg1); \
5838 _argvec[2] = (unsigned long)(arg2); \
5839 _argvec[3] = (unsigned long)(arg3); \
5840 __asm__ volatile( \
5841 "lw $t9, 0(%1)\n\t" \
5842 "lw $a0, 4(%1)\n\t" \
5843 "lw $a1, 8(%1)\n\t" \
5844 "lw $a2,12(%1)\n\t" \
5845 VALGRIND_CALL_NOREDIR_T9 \
5846 "move %0, $a0\n" \
5847 : /*out*/ "=r" (_res) \
5848 : /*in*/ "r" (&_argvec[0]) \
5849 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5850 ); \
5851 lval = (__typeof__(lval)) _res; \
5852 } while (0)
5853
5854#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
5855 do { \
5856 volatile OrigFn _orig = (orig); \
5857 volatile unsigned long _argvec[5]; \
5858 volatile unsigned long _res; \
5859 _argvec[0] = (unsigned long)_orig.nraddr; \
5860 _argvec[1] = (unsigned long)(arg1); \
5861 _argvec[2] = (unsigned long)(arg2); \
5862 _argvec[3] = (unsigned long)(arg3); \
5863 _argvec[4] = (unsigned long)(arg4); \
5864 __asm__ volatile( \
5865 "lw $t9, 0(%1)\n\t" \
5866 "lw $a0, 4(%1)\n\t" \
5867 "lw $a1, 8(%1)\n\t" \
5868 "lw $a2,12(%1)\n\t" \
5869 "lw $a3,16(%1)\n\t" \
5870 VALGRIND_CALL_NOREDIR_T9 \
5871 "move %0, $a0\n" \
5872 : /*out*/ "=r" (_res) \
5873 : /*in*/ "r" (&_argvec[0]) \
5874 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5875 ); \
5876 lval = (__typeof__(lval)) _res; \
5877 } while (0)
5878
5879#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
5880 do { \
5881 volatile OrigFn _orig = (orig); \
5882 volatile unsigned long _argvec[6]; \
5883 volatile unsigned long _res; \
5884 _argvec[0] = (unsigned long)_orig.nraddr; \
5885 _argvec[1] = (unsigned long)(arg1); \
5886 _argvec[2] = (unsigned long)(arg2); \
5887 _argvec[3] = (unsigned long)(arg3); \
5888 _argvec[4] = (unsigned long)(arg4); \
5889 _argvec[5] = (unsigned long)(arg5); \
5890 __asm__ volatile( \
5891 "lw $t9, 0(%1)\n\t" \
5892 "lw $a0, 4(%1)\n\t" \
5893 "lw $a1, 8(%1)\n\t" \
5894 "lw $a2,12(%1)\n\t" \
5895 "lw $a3,16(%1)\n\t" \
5896 "lw $a4,20(%1)\n\t" \
5897 VALGRIND_CALL_NOREDIR_T9 \
5898 "move %0, $a0\n" \
5899 : /*out*/ "=r" (_res) \
5900 : /*in*/ "r" (&_argvec[0]) \
5901 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5902 ); \
5903 lval = (__typeof__(lval)) _res; \
5904 } while (0)
5905#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
5906 do { \
5907 volatile OrigFn _orig = (orig); \
5908 volatile unsigned long _argvec[7]; \
5909 volatile unsigned long _res; \
5910 _argvec[0] = (unsigned long)_orig.nraddr; \
5911 _argvec[1] = (unsigned long)(arg1); \
5912 _argvec[2] = (unsigned long)(arg2); \
5913 _argvec[3] = (unsigned long)(arg3); \
5914 _argvec[4] = (unsigned long)(arg4); \
5915 _argvec[5] = (unsigned long)(arg5); \
5916 _argvec[6] = (unsigned long)(arg6); \
5917 __asm__ volatile( \
5918 "lw $t9, 0(%1)\n\t" \
5919 "lw $a0, 4(%1)\n\t" \
5920 "lw $a1, 8(%1)\n\t" \
5921 "lw $a2,12(%1)\n\t" \
5922 "lw $a3,16(%1)\n\t" \
5923 "lw $a4,20(%1)\n\t" \
5924 "lw $a5,24(%1)\n\t" \
5925 VALGRIND_CALL_NOREDIR_T9 \
5926 "move %0, $a0\n" \
5927 : /*out*/ "=r" (_res) \
5928 : /*in*/ "r" (&_argvec[0]) \
5929 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5930 ); \
5931 lval = (__typeof__(lval)) _res; \
5932 } while (0)
5933
5934#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5935 arg7) \
5936 do { \
5937 volatile OrigFn _orig = (orig); \
5938 volatile unsigned long _argvec[8]; \
5939 volatile unsigned long _res; \
5940 _argvec[0] = (unsigned long)_orig.nraddr; \
5941 _argvec[1] = (unsigned long)(arg1); \
5942 _argvec[2] = (unsigned long)(arg2); \
5943 _argvec[3] = (unsigned long)(arg3); \
5944 _argvec[4] = (unsigned long)(arg4); \
5945 _argvec[5] = (unsigned long)(arg5); \
5946 _argvec[6] = (unsigned long)(arg6); \
5947 _argvec[7] = (unsigned long)(arg7); \
5948 __asm__ volatile( \
5949 "lw $t9, 0(%1)\n\t" \
5950 "lw $a0, 4(%1)\n\t" \
5951 "lw $a1, 8(%1)\n\t" \
5952 "lw $a2,12(%1)\n\t" \
5953 "lw $a3,16(%1)\n\t" \
5954 "lw $a4,20(%1)\n\t" \
5955 "lw $a5,24(%1)\n\t" \
5956 "lw $a6,28(%1)\n\t" \
5957 VALGRIND_CALL_NOREDIR_T9 \
5958 "move %0, $a0\n" \
5959 : /*out*/ "=r" (_res) \
5960 : /*in*/ "r" (&_argvec[0]) \
5961 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5962 ); \
5963 lval = (__typeof__(lval)) _res; \
5964 } while (0)
5965
5966#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
5967 arg7,arg8) \
5968 do { \
5969 volatile OrigFn _orig = (orig); \
5970 volatile unsigned long _argvec[9]; \
5971 volatile unsigned long _res; \
5972 _argvec[0] = (unsigned long)_orig.nraddr; \
5973 _argvec[1] = (unsigned long)(arg1); \
5974 _argvec[2] = (unsigned long)(arg2); \
5975 _argvec[3] = (unsigned long)(arg3); \
5976 _argvec[4] = (unsigned long)(arg4); \
5977 _argvec[5] = (unsigned long)(arg5); \
5978 _argvec[6] = (unsigned long)(arg6); \
5979 _argvec[7] = (unsigned long)(arg7); \
5980 _argvec[8] = (unsigned long)(arg8); \
5981 __asm__ volatile( \
5982 "lw $t9, 0(%1)\n\t" \
5983 "lw $a0, 4(%1)\n\t" \
5984 "lw $a1, 8(%1)\n\t" \
5985 "lw $a2,12(%1)\n\t" \
5986 "lw $a3,16(%1)\n\t" \
5987 "lw $a4,20(%1)\n\t" \
5988 "lw $a5,24(%1)\n\t" \
5989 "lw $a6,28(%1)\n\t" \
5990 "lw $a7,32(%1)\n\t" \
5991 VALGRIND_CALL_NOREDIR_T9 \
5992 "move %0, $a0\n" \
5993 : /*out*/ "=r" (_res) \
5994 : /*in*/ "r" (&_argvec[0]) \
5995 : /*trash*/ "memory", __CALLER_SAVED_REGS \
5996 ); \
5997 lval = (__typeof__(lval)) _res; \
5998 } while (0)
5999
6000#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6001 arg7,arg8,arg9) \
6002 do { \
6003 volatile OrigFn _orig = (orig); \
6004 volatile unsigned long _argvec[10]; \
6005 volatile unsigned long _res; \
6006 _argvec[0] = (unsigned long)_orig.nraddr; \
6007 _argvec[1] = (unsigned long)(arg1); \
6008 _argvec[2] = (unsigned long)(arg2); \
6009 _argvec[3] = (unsigned long)(arg3); \
6010 _argvec[4] = (unsigned long)(arg4); \
6011 _argvec[5] = (unsigned long)(arg5); \
6012 _argvec[6] = (unsigned long)(arg6); \
6013 _argvec[7] = (unsigned long)(arg7); \
6014 _argvec[8] = (unsigned long)(arg8); \
6015 _argvec[9] = (unsigned long)(arg9); \
6016 __asm__ volatile( \
6017 "addiu $sp, $sp, -16 \n\t" \
6018 "lw $t9,36(%1) \n\t" \
6019 "sw $t9, 0($sp) \n\t" \
6020 "lw $t9, 0(%1) \n\t" \
6021 "lw $a0, 4(%1) \n\t" \
6022 "lw $a1, 8(%1) \n\t" \
6023 "lw $a2,12(%1) \n\t" \
6024 "lw $a3,16(%1) \n\t" \
6025 "lw $a4,20(%1) \n\t" \
6026 "lw $a5,24(%1) \n\t" \
6027 "lw $a6,28(%1) \n\t" \
6028 "lw $a7,32(%1) \n\t" \
6029 VALGRIND_CALL_NOREDIR_T9 \
6030 "move %0, $a0 \n\t" \
6031 "addiu $sp, $sp, 16 \n\t" \
6032 : /*out*/ "=r" (_res) \
6033 : /*in*/ "r" (&_argvec[0]) \
6034 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6035 ); \
6036 lval = (__typeof__(lval)) _res; \
6037 } while (0)
6038
6039#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6040 arg7,arg8,arg9,arg10) \
6041 do { \
6042 volatile OrigFn _orig = (orig); \
6043 volatile unsigned long _argvec[11]; \
6044 volatile unsigned long _res; \
6045 _argvec[0] = (unsigned long)_orig.nraddr; \
6046 _argvec[1] = (unsigned long)(arg1); \
6047 _argvec[2] = (unsigned long)(arg2); \
6048 _argvec[3] = (unsigned long)(arg3); \
6049 _argvec[4] = (unsigned long)(arg4); \
6050 _argvec[5] = (unsigned long)(arg5); \
6051 _argvec[6] = (unsigned long)(arg6); \
6052 _argvec[7] = (unsigned long)(arg7); \
6053 _argvec[8] = (unsigned long)(arg8); \
6054 _argvec[9] = (unsigned long)(arg9); \
6055 _argvec[10] = (unsigned long)(arg10); \
6056 __asm__ volatile( \
6057 "addiu $sp, $sp, -16 \n\t" \
6058 "lw $t9,36(%1) \n\t" \
6059 "sw $t9, 0($sp) \n\t" \
6060 "lw $t9,40(%1) \n\t" \
6061 "sw $t9, 4($sp) \n\t" \
6062 "lw $t9, 0(%1) \n\t" \
6063 "lw $a0, 4(%1) \n\t" \
6064 "lw $a1, 8(%1) \n\t" \
6065 "lw $a2,12(%1) \n\t" \
6066 "lw $a3,16(%1) \n\t" \
6067 "lw $a4,20(%1) \n\t" \
6068 "lw $a5,24(%1) \n\t" \
6069 "lw $a6,28(%1) \n\t" \
6070 "lw $a7,32(%1) \n\t" \
6071 VALGRIND_CALL_NOREDIR_T9 \
6072 "move %0, $a0 \n\t" \
6073 "addiu $sp, $sp, 16 \n\t" \
6074 : /*out*/ "=r" (_res) \
6075 : /*in*/ "r" (&_argvec[0]) \
6076 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6077 ); \
6078 lval = (__typeof__(lval)) _res; \
6079 } while (0)
6080
6081#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6082 arg6,arg7,arg8,arg9,arg10, \
6083 arg11) \
6084 do { \
6085 volatile OrigFn _orig = (orig); \
6086 volatile unsigned long _argvec[12]; \
6087 volatile unsigned long _res; \
6088 _argvec[0] = (unsigned long)_orig.nraddr; \
6089 _argvec[1] = (unsigned long)(arg1); \
6090 _argvec[2] = (unsigned long)(arg2); \
6091 _argvec[3] = (unsigned long)(arg3); \
6092 _argvec[4] = (unsigned long)(arg4); \
6093 _argvec[5] = (unsigned long)(arg5); \
6094 _argvec[6] = (unsigned long)(arg6); \
6095 _argvec[7] = (unsigned long)(arg7); \
6096 _argvec[8] = (unsigned long)(arg8); \
6097 _argvec[9] = (unsigned long)(arg9); \
6098 _argvec[10] = (unsigned long)(arg10); \
6099 _argvec[11] = (unsigned long)(arg11); \
6100 __asm__ volatile( \
6101 "addiu $sp, $sp, -16 \n\t" \
6102 "lw $t9,36(%1) \n\t" \
6103 "sw $t9, 0($sp) \n\t" \
6104 "lw $t9,40(%1) \n\t" \
6105 "sw $t9, 4($sp) \n\t" \
6106 "lw $t9,44(%1) \n\t" \
6107 "sw $t9, 8($sp) \n\t" \
6108 "lw $t9, 0(%1) \n\t" \
6109 "lw $a0, 4(%1) \n\t" \
6110 "lw $a1, 8(%1) \n\t" \
6111 "lw $a2,12(%1) \n\t" \
6112 "lw $a3,16(%1) \n\t" \
6113 "lw $a4,20(%1) \n\t" \
6114 "lw $a5,24(%1) \n\t" \
6115 "lw $a6,28(%1) \n\t" \
6116 "lw $a7,32(%1) \n\t" \
6117 VALGRIND_CALL_NOREDIR_T9 \
6118 "move %0, $a0 \n\t" \
6119 "addiu $sp, $sp, 16 \n\t" \
6120 : /*out*/ "=r" (_res) \
6121 : /*in*/ "r" (&_argvec[0]) \
6122 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6123 ); \
6124 lval = (__typeof__(lval)) _res; \
6125 } while (0)
6126
6127#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6128 arg6,arg7,arg8,arg9,arg10, \
6129 arg11,arg12) \
6130 do { \
6131 volatile OrigFn _orig = (orig); \
6132 volatile unsigned long _argvec[13]; \
6133 volatile unsigned long _res; \
6134 _argvec[0] = (unsigned long)_orig.nraddr; \
6135 _argvec[1] = (unsigned long)(arg1); \
6136 _argvec[2] = (unsigned long)(arg2); \
6137 _argvec[3] = (unsigned long)(arg3); \
6138 _argvec[4] = (unsigned long)(arg4); \
6139 _argvec[5] = (unsigned long)(arg5); \
6140 _argvec[6] = (unsigned long)(arg6); \
6141 _argvec[7] = (unsigned long)(arg7); \
6142 _argvec[8] = (unsigned long)(arg8); \
6143 _argvec[9] = (unsigned long)(arg9); \
6144 _argvec[10] = (unsigned long)(arg10); \
6145 _argvec[11] = (unsigned long)(arg11); \
6146 _argvec[12] = (unsigned long)(arg12); \
6147 __asm__ volatile( \
6148 "addiu $sp, $sp, -16 \n\t" \
6149 "lw $t9,36(%1) \n\t" \
6150 "sw $t9, 0($sp) \n\t" \
6151 "lw $t9,40(%1) \n\t" \
6152 "sw $t9, 4($sp) \n\t" \
6153 "lw $t9,44(%1) \n\t" \
6154 "sw $t9, 8($sp) \n\t" \
6155 "lw $t9,48(%1) \n\t" \
6156 "sw $t9,12($sp) \n\t" \
6157 "lw $t9, 0(%1) \n\t" \
6158 "lw $a0, 4(%1) \n\t" \
6159 "lw $a1, 8(%1) \n\t" \
6160 "lw $a2,12(%1) \n\t" \
6161 "lw $a3,16(%1) \n\t" \
6162 "lw $a4,20(%1) \n\t" \
6163 "lw $a5,24(%1) \n\t" \
6164 "lw $a6,28(%1) \n\t" \
6165 "lw $a7,32(%1) \n\t" \
6166 VALGRIND_CALL_NOREDIR_T9 \
6167 "move %0, $a0 \n\t" \
6168 "addiu $sp, $sp, 16 \n\t" \
6169 : /*out*/ "=r" (_res) \
6170 : /*in*/ "r" (&_argvec[0]) \
6171 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6172 ); \
6173 lval = (__typeof__(lval)) _res; \
6174 } while (0)
6175
6176#endif /* PLAT_nanomips_linux */
6177
6178/* ------------------------- mips64-linux ------------------------- */
6179
6180#if defined(PLAT_mips64_linux)
6181
6182/* These regs are trashed by the hidden call. */
6183#define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
6184"$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
6185"$25", "$31"
6186
6187/* These CALL_FN_ macros assume that on mips64-linux,
6188 sizeof(long long) == 8. */
6189
6190#define MIPS64_LONG2REG_CAST(x) ((long long)(long)x)
6191
6192#define CALL_FN_W_v(lval, orig) \
6193 do { \
6194 volatile OrigFn _orig = (orig); \
6195 volatile unsigned long long _argvec[1]; \
6196 volatile unsigned long long _res; \
6197 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6198 __asm__ volatile( \
6199 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6200 VALGRIND_CALL_NOREDIR_T9 \
6201 "move %0, $2\n" \
6202 : /*out*/ "=r" (_res) \
6203 : /*in*/ "0" (&_argvec[0]) \
6204 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6205 ); \
6206 lval = (__typeof__(lval)) (long)_res; \
6207 } while (0)
6208
6209#define CALL_FN_W_W(lval, orig, arg1) \
6210 do { \
6211 volatile OrigFn _orig = (orig); \
6212 volatile unsigned long long _argvec[2]; \
6213 volatile unsigned long long _res; \
6214 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6215 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6216 __asm__ volatile( \
6217 "ld $4, 8(%1)\n\t" /* arg1*/ \
6218 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6219 VALGRIND_CALL_NOREDIR_T9 \
6220 "move %0, $2\n" \
6221 : /*out*/ "=r" (_res) \
6222 : /*in*/ "r" (&_argvec[0]) \
6223 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6224 ); \
6225 lval = (__typeof__(lval)) (long)_res; \
6226 } while (0)
6227
6228#define CALL_FN_W_WW(lval, orig, arg1,arg2) \
6229 do { \
6230 volatile OrigFn _orig = (orig); \
6231 volatile unsigned long long _argvec[3]; \
6232 volatile unsigned long long _res; \
6233 _argvec[0] = _orig.nraddr; \
6234 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6235 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6236 __asm__ volatile( \
6237 "ld $4, 8(%1)\n\t" \
6238 "ld $5, 16(%1)\n\t" \
6239 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6240 VALGRIND_CALL_NOREDIR_T9 \
6241 "move %0, $2\n" \
6242 : /*out*/ "=r" (_res) \
6243 : /*in*/ "r" (&_argvec[0]) \
6244 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6245 ); \
6246 lval = (__typeof__(lval)) (long)_res; \
6247 } while (0)
6248
6249
6250#define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
6251 do { \
6252 volatile OrigFn _orig = (orig); \
6253 volatile unsigned long long _argvec[4]; \
6254 volatile unsigned long long _res; \
6255 _argvec[0] = _orig.nraddr; \
6256 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6257 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6258 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6259 __asm__ volatile( \
6260 "ld $4, 8(%1)\n\t" \
6261 "ld $5, 16(%1)\n\t" \
6262 "ld $6, 24(%1)\n\t" \
6263 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6264 VALGRIND_CALL_NOREDIR_T9 \
6265 "move %0, $2\n" \
6266 : /*out*/ "=r" (_res) \
6267 : /*in*/ "r" (&_argvec[0]) \
6268 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6269 ); \
6270 lval = (__typeof__(lval)) (long)_res; \
6271 } while (0)
6272
6273#define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
6274 do { \
6275 volatile OrigFn _orig = (orig); \
6276 volatile unsigned long long _argvec[5]; \
6277 volatile unsigned long long _res; \
6278 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6279 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6280 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6281 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6282 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6283 __asm__ volatile( \
6284 "ld $4, 8(%1)\n\t" \
6285 "ld $5, 16(%1)\n\t" \
6286 "ld $6, 24(%1)\n\t" \
6287 "ld $7, 32(%1)\n\t" \
6288 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6289 VALGRIND_CALL_NOREDIR_T9 \
6290 "move %0, $2\n" \
6291 : /*out*/ "=r" (_res) \
6292 : /*in*/ "r" (&_argvec[0]) \
6293 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6294 ); \
6295 lval = (__typeof__(lval)) (long)_res; \
6296 } while (0)
6297
6298#define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
6299 do { \
6300 volatile OrigFn _orig = (orig); \
6301 volatile unsigned long long _argvec[6]; \
6302 volatile unsigned long long _res; \
6303 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6304 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6305 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6306 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6307 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6308 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6309 __asm__ volatile( \
6310 "ld $4, 8(%1)\n\t" \
6311 "ld $5, 16(%1)\n\t" \
6312 "ld $6, 24(%1)\n\t" \
6313 "ld $7, 32(%1)\n\t" \
6314 "ld $8, 40(%1)\n\t" \
6315 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6316 VALGRIND_CALL_NOREDIR_T9 \
6317 "move %0, $2\n" \
6318 : /*out*/ "=r" (_res) \
6319 : /*in*/ "r" (&_argvec[0]) \
6320 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6321 ); \
6322 lval = (__typeof__(lval)) (long)_res; \
6323 } while (0)
6324
6325#define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
6326 do { \
6327 volatile OrigFn _orig = (orig); \
6328 volatile unsigned long long _argvec[7]; \
6329 volatile unsigned long long _res; \
6330 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6331 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6332 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6333 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6334 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6335 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6336 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6337 __asm__ volatile( \
6338 "ld $4, 8(%1)\n\t" \
6339 "ld $5, 16(%1)\n\t" \
6340 "ld $6, 24(%1)\n\t" \
6341 "ld $7, 32(%1)\n\t" \
6342 "ld $8, 40(%1)\n\t" \
6343 "ld $9, 48(%1)\n\t" \
6344 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6345 VALGRIND_CALL_NOREDIR_T9 \
6346 "move %0, $2\n" \
6347 : /*out*/ "=r" (_res) \
6348 : /*in*/ "r" (&_argvec[0]) \
6349 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6350 ); \
6351 lval = (__typeof__(lval)) (long)_res; \
6352 } while (0)
6353
6354#define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6355 arg7) \
6356 do { \
6357 volatile OrigFn _orig = (orig); \
6358 volatile unsigned long long _argvec[8]; \
6359 volatile unsigned long long _res; \
6360 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6361 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6362 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6363 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6364 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6365 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6366 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6367 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6368 __asm__ volatile( \
6369 "ld $4, 8(%1)\n\t" \
6370 "ld $5, 16(%1)\n\t" \
6371 "ld $6, 24(%1)\n\t" \
6372 "ld $7, 32(%1)\n\t" \
6373 "ld $8, 40(%1)\n\t" \
6374 "ld $9, 48(%1)\n\t" \
6375 "ld $10, 56(%1)\n\t" \
6376 "ld $25, 0(%1) \n\t" /* target->t9 */ \
6377 VALGRIND_CALL_NOREDIR_T9 \
6378 "move %0, $2\n" \
6379 : /*out*/ "=r" (_res) \
6380 : /*in*/ "r" (&_argvec[0]) \
6381 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6382 ); \
6383 lval = (__typeof__(lval)) (long)_res; \
6384 } while (0)
6385
6386#define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6387 arg7,arg8) \
6388 do { \
6389 volatile OrigFn _orig = (orig); \
6390 volatile unsigned long long _argvec[9]; \
6391 volatile unsigned long long _res; \
6392 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6393 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6394 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6395 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6396 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6397 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6398 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6399 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6400 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6401 __asm__ volatile( \
6402 "ld $4, 8(%1)\n\t" \
6403 "ld $5, 16(%1)\n\t" \
6404 "ld $6, 24(%1)\n\t" \
6405 "ld $7, 32(%1)\n\t" \
6406 "ld $8, 40(%1)\n\t" \
6407 "ld $9, 48(%1)\n\t" \
6408 "ld $10, 56(%1)\n\t" \
6409 "ld $11, 64(%1)\n\t" \
6410 "ld $25, 0(%1) \n\t" /* target->t9 */ \
6411 VALGRIND_CALL_NOREDIR_T9 \
6412 "move %0, $2\n" \
6413 : /*out*/ "=r" (_res) \
6414 : /*in*/ "r" (&_argvec[0]) \
6415 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6416 ); \
6417 lval = (__typeof__(lval)) (long)_res; \
6418 } while (0)
6419
6420#define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6421 arg7,arg8,arg9) \
6422 do { \
6423 volatile OrigFn _orig = (orig); \
6424 volatile unsigned long long _argvec[10]; \
6425 volatile unsigned long long _res; \
6426 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6427 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6428 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6429 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6430 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6431 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6432 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6433 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6434 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6435 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6436 __asm__ volatile( \
6437 "dsubu $29, $29, 8\n\t" \
6438 "ld $4, 72(%1)\n\t" \
6439 "sd $4, 0($29)\n\t" \
6440 "ld $4, 8(%1)\n\t" \
6441 "ld $5, 16(%1)\n\t" \
6442 "ld $6, 24(%1)\n\t" \
6443 "ld $7, 32(%1)\n\t" \
6444 "ld $8, 40(%1)\n\t" \
6445 "ld $9, 48(%1)\n\t" \
6446 "ld $10, 56(%1)\n\t" \
6447 "ld $11, 64(%1)\n\t" \
6448 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6449 VALGRIND_CALL_NOREDIR_T9 \
6450 "daddu $29, $29, 8\n\t" \
6451 "move %0, $2\n" \
6452 : /*out*/ "=r" (_res) \
6453 : /*in*/ "r" (&_argvec[0]) \
6454 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6455 ); \
6456 lval = (__typeof__(lval)) (long)_res; \
6457 } while (0)
6458
6459#define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
6460 arg7,arg8,arg9,arg10) \
6461 do { \
6462 volatile OrigFn _orig = (orig); \
6463 volatile unsigned long long _argvec[11]; \
6464 volatile unsigned long long _res; \
6465 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6466 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6467 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6468 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6469 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6470 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6471 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6472 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6473 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6474 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6475 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6476 __asm__ volatile( \
6477 "dsubu $29, $29, 16\n\t" \
6478 "ld $4, 72(%1)\n\t" \
6479 "sd $4, 0($29)\n\t" \
6480 "ld $4, 80(%1)\n\t" \
6481 "sd $4, 8($29)\n\t" \
6482 "ld $4, 8(%1)\n\t" \
6483 "ld $5, 16(%1)\n\t" \
6484 "ld $6, 24(%1)\n\t" \
6485 "ld $7, 32(%1)\n\t" \
6486 "ld $8, 40(%1)\n\t" \
6487 "ld $9, 48(%1)\n\t" \
6488 "ld $10, 56(%1)\n\t" \
6489 "ld $11, 64(%1)\n\t" \
6490 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6491 VALGRIND_CALL_NOREDIR_T9 \
6492 "daddu $29, $29, 16\n\t" \
6493 "move %0, $2\n" \
6494 : /*out*/ "=r" (_res) \
6495 : /*in*/ "r" (&_argvec[0]) \
6496 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6497 ); \
6498 lval = (__typeof__(lval)) (long)_res; \
6499 } while (0)
6500
6501#define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6502 arg6,arg7,arg8,arg9,arg10, \
6503 arg11) \
6504 do { \
6505 volatile OrigFn _orig = (orig); \
6506 volatile unsigned long long _argvec[12]; \
6507 volatile unsigned long long _res; \
6508 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6509 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6510 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6511 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6512 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6513 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6514 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6515 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6516 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6517 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6518 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6519 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6520 __asm__ volatile( \
6521 "dsubu $29, $29, 24\n\t" \
6522 "ld $4, 72(%1)\n\t" \
6523 "sd $4, 0($29)\n\t" \
6524 "ld $4, 80(%1)\n\t" \
6525 "sd $4, 8($29)\n\t" \
6526 "ld $4, 88(%1)\n\t" \
6527 "sd $4, 16($29)\n\t" \
6528 "ld $4, 8(%1)\n\t" \
6529 "ld $5, 16(%1)\n\t" \
6530 "ld $6, 24(%1)\n\t" \
6531 "ld $7, 32(%1)\n\t" \
6532 "ld $8, 40(%1)\n\t" \
6533 "ld $9, 48(%1)\n\t" \
6534 "ld $10, 56(%1)\n\t" \
6535 "ld $11, 64(%1)\n\t" \
6536 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6537 VALGRIND_CALL_NOREDIR_T9 \
6538 "daddu $29, $29, 24\n\t" \
6539 "move %0, $2\n" \
6540 : /*out*/ "=r" (_res) \
6541 : /*in*/ "r" (&_argvec[0]) \
6542 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6543 ); \
6544 lval = (__typeof__(lval)) (long)_res; \
6545 } while (0)
6546
6547#define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
6548 arg6,arg7,arg8,arg9,arg10, \
6549 arg11,arg12) \
6550 do { \
6551 volatile OrigFn _orig = (orig); \
6552 volatile unsigned long long _argvec[13]; \
6553 volatile unsigned long long _res; \
6554 _argvec[0] = MIPS64_LONG2REG_CAST(_orig.nraddr); \
6555 _argvec[1] = MIPS64_LONG2REG_CAST(arg1); \
6556 _argvec[2] = MIPS64_LONG2REG_CAST(arg2); \
6557 _argvec[3] = MIPS64_LONG2REG_CAST(arg3); \
6558 _argvec[4] = MIPS64_LONG2REG_CAST(arg4); \
6559 _argvec[5] = MIPS64_LONG2REG_CAST(arg5); \
6560 _argvec[6] = MIPS64_LONG2REG_CAST(arg6); \
6561 _argvec[7] = MIPS64_LONG2REG_CAST(arg7); \
6562 _argvec[8] = MIPS64_LONG2REG_CAST(arg8); \
6563 _argvec[9] = MIPS64_LONG2REG_CAST(arg9); \
6564 _argvec[10] = MIPS64_LONG2REG_CAST(arg10); \
6565 _argvec[11] = MIPS64_LONG2REG_CAST(arg11); \
6566 _argvec[12] = MIPS64_LONG2REG_CAST(arg12); \
6567 __asm__ volatile( \
6568 "dsubu $29, $29, 32\n\t" \
6569 "ld $4, 72(%1)\n\t" \
6570 "sd $4, 0($29)\n\t" \
6571 "ld $4, 80(%1)\n\t" \
6572 "sd $4, 8($29)\n\t" \
6573 "ld $4, 88(%1)\n\t" \
6574 "sd $4, 16($29)\n\t" \
6575 "ld $4, 96(%1)\n\t" \
6576 "sd $4, 24($29)\n\t" \
6577 "ld $4, 8(%1)\n\t" \
6578 "ld $5, 16(%1)\n\t" \
6579 "ld $6, 24(%1)\n\t" \
6580 "ld $7, 32(%1)\n\t" \
6581 "ld $8, 40(%1)\n\t" \
6582 "ld $9, 48(%1)\n\t" \
6583 "ld $10, 56(%1)\n\t" \
6584 "ld $11, 64(%1)\n\t" \
6585 "ld $25, 0(%1)\n\t" /* target->t9 */ \
6586 VALGRIND_CALL_NOREDIR_T9 \
6587 "daddu $29, $29, 32\n\t" \
6588 "move %0, $2\n" \
6589 : /*out*/ "=r" (_res) \
6590 : /*in*/ "r" (&_argvec[0]) \
6591 : /*trash*/ "memory", __CALLER_SAVED_REGS \
6592 ); \
6593 lval = (__typeof__(lval)) (long)_res; \
6594 } while (0)
6595
6596#endif /* PLAT_mips64_linux */
6597
6598/* ------------------------------------------------------------------ */
6599/* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
6600/* */
6601/* ------------------------------------------------------------------ */
6602
6603/* Some request codes. There are many more of these, but most are not
6604 exposed to end-user view. These are the public ones, all of the
6605 form 0x1000 + small_number.
6606
6607 Core ones are in the range 0x00000000--0x0000ffff. The non-public
6608 ones start at 0x2000.
6609*/
6610
6611/* These macros are used by tools -- they must be public, but don't
6612 embed them into other programs. */
6613#define VG_USERREQ_TOOL_BASE(a,b) \
6614 ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
6615#define VG_IS_TOOL_USERREQ(a, b, v) \
6616 (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
6617
6618/* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
6619 This enum comprises an ABI exported by Valgrind to programs
6620 which use client requests. DO NOT CHANGE THE NUMERIC VALUES OF THESE
6621 ENTRIES, NOR DELETE ANY -- add new ones at the end of the most
6622 relevant group. */
6623typedef
6624 enum { VG_USERREQ__RUNNING_ON_VALGRIND = 0x1001,
6625 VG_USERREQ__DISCARD_TRANSLATIONS = 0x1002,
6626
6627 /* These allow any function to be called from the simulated
6628 CPU but run on the real CPU. Nb: the first arg passed to
6629 the function is always the ThreadId of the running
6630 thread! So CLIENT_CALL0 actually requires a 1 arg
6631 function, etc. */
6632 VG_USERREQ__CLIENT_CALL0 = 0x1101,
6633 VG_USERREQ__CLIENT_CALL1 = 0x1102,
6634 VG_USERREQ__CLIENT_CALL2 = 0x1103,
6635 VG_USERREQ__CLIENT_CALL3 = 0x1104,
6636
6637 /* Can be useful in regression testing suites -- eg. can
6638 send Valgrind's output to /dev/null and still count
6639 errors. */
6640 VG_USERREQ__COUNT_ERRORS = 0x1201,
6641
6642 /* Allows the client program and/or gdbserver to execute a monitor
6643 command. */
6644 VG_USERREQ__GDB_MONITOR_COMMAND = 0x1202,
6645
6646 /* Allows the client program to change a dynamic command line
6647 option. */
6648 VG_USERREQ__CLO_CHANGE = 0x1203,
6649
6650 /* These are useful and can be interpreted by any tool that
6651 tracks malloc() et al, by using vg_replace_malloc.c. */
6652 VG_USERREQ__MALLOCLIKE_BLOCK = 0x1301,
6653 VG_USERREQ__RESIZEINPLACE_BLOCK = 0x130b,
6654 VG_USERREQ__FREELIKE_BLOCK = 0x1302,
6655 /* Memory pool support. */
6656 VG_USERREQ__CREATE_MEMPOOL = 0x1303,
6657 VG_USERREQ__DESTROY_MEMPOOL = 0x1304,
6658 VG_USERREQ__MEMPOOL_ALLOC = 0x1305,
6659 VG_USERREQ__MEMPOOL_FREE = 0x1306,
6660 VG_USERREQ__MEMPOOL_TRIM = 0x1307,
6661 VG_USERREQ__MOVE_MEMPOOL = 0x1308,
6662 VG_USERREQ__MEMPOOL_CHANGE = 0x1309,
6663 VG_USERREQ__MEMPOOL_EXISTS = 0x130a,
6664
6665 /* Allow printfs to valgrind log. */
6666 /* The first two pass the va_list argument by value, which
6667 assumes it is the same size as or smaller than a UWord,
6668 which generally isn't the case. Hence are deprecated.
6669 The second two pass the vargs by reference and so are
6670 immune to this problem. */
6671 /* both :: char* fmt, va_list vargs (DEPRECATED) */
6672 VG_USERREQ__PRINTF = 0x1401,
6673 VG_USERREQ__PRINTF_BACKTRACE = 0x1402,
6674 /* both :: char* fmt, va_list* vargs */
6675 VG_USERREQ__PRINTF_VALIST_BY_REF = 0x1403,
6676 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF = 0x1404,
6677
6678 /* Stack support. */
6679 VG_USERREQ__STACK_REGISTER = 0x1501,
6680 VG_USERREQ__STACK_DEREGISTER = 0x1502,
6681 VG_USERREQ__STACK_CHANGE = 0x1503,
6682
6683 /* Wine support */
6684 VG_USERREQ__LOAD_PDB_DEBUGINFO = 0x1601,
6685
6686 /* Querying of debug info. */
6687 VG_USERREQ__MAP_IP_TO_SRCLOC = 0x1701,
6688
6689 /* Disable/enable error reporting level. Takes a single
6690 Word arg which is the delta to this thread's error
6691 disablement indicator. Hence 1 disables or further
6692 disables errors, and -1 moves back towards enablement.
6693 Other values are not allowed. */
6694 VG_USERREQ__CHANGE_ERR_DISABLEMENT = 0x1801,
6695
6696 /* Some requests used for Valgrind internal, such as
6697 self-test or self-hosting. */
6698 /* Initialise IR injection */
6699 VG_USERREQ__VEX_INIT_FOR_IRI = 0x1901,
6700 /* Used by Inner Valgrind to inform Outer Valgrind where to
6701 find the list of inner guest threads */
6702 VG_USERREQ__INNER_THREADS = 0x1902
6703 } Vg_ClientRequest;
6704
6705#if !defined(__GNUC__)
6706# define __extension__ /* */
6707#endif
6708
6709
6710/* Returns the number of Valgrinds this code is running under. That
6711 is, 0 if running natively, 1 if running under Valgrind, 2 if
6712 running under Valgrind which is running under another Valgrind,
6713 etc. */
6714#define RUNNING_ON_VALGRIND \
6715 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
6716 VG_USERREQ__RUNNING_ON_VALGRIND, \
6717 0, 0, 0, 0, 0) \
6718
6719
6720/* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
6721 _qzz_len - 1]. Useful if you are debugging a JITter or some such,
6722 since it provides a way to make sure valgrind will retranslate the
6723 invalidated area. Returns no value. */
6724#define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
6725 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
6726 _qzz_addr, _qzz_len, 0, 0, 0)
6727
6728#define VALGRIND_INNER_THREADS(_qzz_addr) \
6729 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__INNER_THREADS, \
6730 _qzz_addr, 0, 0, 0, 0)
6731
6732
6733/* These requests are for getting Valgrind itself to print something.
6734 Possibly with a backtrace. This is a really ugly hack. The return value
6735 is the number of characters printed, excluding the "**<pid>** " part at the
6736 start and the backtrace (if present). */
6737
6738#if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6739/* Modern GCC will optimize the static routine out if unused,
6740 and unused attribute will shut down warnings about it. */
6741static int VALGRIND_PRINTF(const char *format, ...)
6742 __attribute__((format(__printf__, 1, 2), __unused__));
6743#endif
6744static int
6745#if defined(_MSC_VER)
6746__inline
6747#endif
6748VALGRIND_PRINTF(const char *format, ...)
6749{
6750#if defined(NVALGRIND)
6751 (void)format;
6752 return 0;
6753#else /* NVALGRIND */
6754#if defined(_MSC_VER) || defined(__MINGW64__)
6755 uintptr_t _qzz_res;
6756#else
6757 unsigned long _qzz_res;
6758#endif
6759 va_list vargs;
6760 va_start(vargs, format);
6761#if defined(_MSC_VER) || defined(__MINGW64__)
6762 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6763 VG_USERREQ__PRINTF_VALIST_BY_REF,
6764 (uintptr_t)format,
6765 (uintptr_t)&vargs,
6766 0, 0, 0);
6767#else
6768 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6769 VG_USERREQ__PRINTF_VALIST_BY_REF,
6770 (unsigned long)format,
6771 (unsigned long)&vargs,
6772 0, 0, 0);
6773#endif
6774 va_end(vargs);
6775 return (int)_qzz_res;
6776#endif /* NVALGRIND */
6777}
6778
6779#if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
6780static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6781 __attribute__((format(__printf__, 1, 2), __unused__));
6782#endif
6783static int
6784#if defined(_MSC_VER)
6785__inline
6786#endif
6787VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
6788{
6789#if defined(NVALGRIND)
6790 (void)format;
6791 return 0;
6792#else /* NVALGRIND */
6793#if defined(_MSC_VER) || defined(__MINGW64__)
6794 uintptr_t _qzz_res;
6795#else
6796 unsigned long _qzz_res;
6797#endif
6798 va_list vargs;
6799 va_start(vargs, format);
6800#if defined(_MSC_VER) || defined(__MINGW64__)
6801 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6802 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6803 (uintptr_t)format,
6804 (uintptr_t)&vargs,
6805 0, 0, 0);
6806#else
6807 _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
6808 VG_USERREQ__PRINTF_BACKTRACE_VALIST_BY_REF,
6809 (unsigned long)format,
6810 (unsigned long)&vargs,
6811 0, 0, 0);
6812#endif
6813 va_end(vargs);
6814 return (int)_qzz_res;
6815#endif /* NVALGRIND */
6816}
6817
6818
6819/* These requests allow control to move from the simulated CPU to the
6820 real CPU, calling an arbitrary function.
6821
6822 Note that the current ThreadId is inserted as the first argument.
6823 So this call:
6824
6825 VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
6826
6827 requires f to have this signature:
6828
6829 Word f(Word tid, Word arg1, Word arg2)
6830
6831 where "Word" is a word-sized type.
6832
6833 Note that these client requests are not entirely reliable. For example,
6834 if you call a function with them that subsequently calls printf(),
6835 there's a high chance Valgrind will crash. Generally, your prospects of
6836 these working are made higher if the called function does not refer to
6837 any global variables, and does not refer to any libc or other functions
6838 (printf et al). Any kind of entanglement with libc or dynamic linking is
6839 likely to have a bad outcome, for tricky reasons which we've grappled
6840 with a lot in the past.
6841*/
6842#define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
6843 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6844 VG_USERREQ__CLIENT_CALL0, \
6845 _qyy_fn, \
6846 0, 0, 0, 0)
6847
6848#define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
6849 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6850 VG_USERREQ__CLIENT_CALL1, \
6851 _qyy_fn, \
6852 _qyy_arg1, 0, 0, 0)
6853
6854#define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
6855 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6856 VG_USERREQ__CLIENT_CALL2, \
6857 _qyy_fn, \
6858 _qyy_arg1, _qyy_arg2, 0, 0)
6859
6860#define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
6861 VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
6862 VG_USERREQ__CLIENT_CALL3, \
6863 _qyy_fn, \
6864 _qyy_arg1, _qyy_arg2, \
6865 _qyy_arg3, 0)
6866
6867
6868/* Counts the number of errors that have been recorded by a tool. Nb:
6869 the tool must record the errors with VG_(maybe_record_error)() or
6870 VG_(unique_error)() for them to be counted. */
6871#define VALGRIND_COUNT_ERRORS \
6872 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
6873 0 /* default return */, \
6874 VG_USERREQ__COUNT_ERRORS, \
6875 0, 0, 0, 0, 0)
6876
6877/* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
6878 when heap blocks are allocated in order to give accurate results. This
6879 happens automatically for the standard allocator functions such as
6880 malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
6881 delete[], etc.
6882
6883 But if your program uses a custom allocator, this doesn't automatically
6884 happen, and Valgrind will not do as well. For example, if you allocate
6885 superblocks with mmap() and then allocates chunks of the superblocks, all
6886 Valgrind's observations will be at the mmap() level and it won't know that
6887 the chunks should be considered separate entities. In Memcheck's case,
6888 that means you probably won't get heap block overrun detection (because
6889 there won't be redzones marked as unaddressable) and you definitely won't
6890 get any leak detection.
6891
6892 The following client requests allow a custom allocator to be annotated so
6893 that it can be handled accurately by Valgrind.
6894
6895 VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
6896 by a malloc()-like function. For Memcheck (an illustrative case), this
6897 does two things:
6898
6899 - It records that the block has been allocated. This means any addresses
6900 within the block mentioned in error messages will be
6901 identified as belonging to the block. It also means that if the block
6902 isn't freed it will be detected by the leak checker.
6903
6904 - It marks the block as being addressable and undefined (if 'is_zeroed' is
6905 not set), or addressable and defined (if 'is_zeroed' is set). This
6906 controls how accesses to the block by the program are handled.
6907
6908 'addr' is the start of the usable block (ie. after any
6909 redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
6910 can apply redzones -- these are blocks of padding at the start and end of
6911 each block. Adding redzones is recommended as it makes it much more likely
6912 Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
6913 zeroed (or filled with another predictable value), as is the case for
6914 calloc().
6915
6916 VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
6917 heap block -- that will be used by the client program -- is allocated.
6918 It's best to put it at the outermost level of the allocator if possible;
6919 for example, if you have a function my_alloc() which calls
6920 internal_alloc(), and the client request is put inside internal_alloc(),
6921 stack traces relating to the heap block will contain entries for both
6922 my_alloc() and internal_alloc(), which is probably not what you want.
6923
6924 For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
6925 custom blocks from within a heap block, B, that has been allocated with
6926 malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
6927 -- the custom blocks will take precedence.
6928
6929 VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
6930 Memcheck, it does two things:
6931
6932 - It records that the block has been deallocated. This assumes that the
6933 block was annotated as having been allocated via
6934 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6935
6936 - It marks the block as being unaddressable.
6937
6938 VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
6939 heap block is deallocated.
6940
6941 VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
6942 Memcheck, it does four things:
6943
6944 - It records that the size of a block has been changed. This assumes that
6945 the block was annotated as having been allocated via
6946 VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
6947
6948 - If the block shrunk, it marks the freed memory as being unaddressable.
6949
6950 - If the block grew, it marks the new area as undefined and defines a red
6951 zone past the end of the new block.
6952
6953 - The V-bits of the overlap between the old and the new block are preserved.
6954
6955 VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
6956 and before deallocation of the old block.
6957
6958 In many cases, these three client requests will not be enough to get your
6959 allocator working well with Memcheck. More specifically, if your allocator
6960 writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
6961 will be necessary to mark the memory as addressable just before the zeroing
6962 occurs, otherwise you'll get a lot of invalid write errors. For example,
6963 you'll need to do this if your allocator recycles freed blocks, but it
6964 zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
6965 Alternatively, if your allocator reuses freed blocks for allocator-internal
6966 data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
6967
6968 Really, what's happening is a blurring of the lines between the client
6969 program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
6970 memory should be considered unaddressable to the client program, but the
6971 allocator knows more than the rest of the client program and so may be able
6972 to safely access it. Extra client requests are necessary for Valgrind to
6973 understand the distinction between the allocator and the rest of the
6974 program.
6975
6976 Ignored if addr == 0.
6977*/
6978#define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
6979 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
6980 addr, sizeB, rzB, is_zeroed, 0)
6981
6982/* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6983 Ignored if addr == 0.
6984*/
6985#define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
6986 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
6987 addr, oldSizeB, newSizeB, rzB, 0)
6988
6989/* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
6990 Ignored if addr == 0.
6991*/
6992#define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
6993 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
6994 addr, rzB, 0, 0, 0)
6995
6996/* Create a memory pool. */
6997#define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
6998 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
6999 pool, rzB, is_zeroed, 0, 0)
7000
7001/* Create a memory pool with some flags specifying extended behaviour.
7002 When flags is zero, the behaviour is identical to VALGRIND_CREATE_MEMPOOL.
7003
7004 The flag VALGRIND_MEMPOOL_METAPOOL specifies that the pieces of memory
7005 associated with the pool using VALGRIND_MEMPOOL_ALLOC will be used
7006 by the application as superblocks to dole out MALLOC_LIKE blocks using
7007 VALGRIND_MALLOCLIKE_BLOCK. In other words, a meta pool is a "2 levels"
7008 pool : first level is the blocks described by VALGRIND_MEMPOOL_ALLOC.
7009 The second level blocks are described using VALGRIND_MALLOCLIKE_BLOCK.
7010 Note that the association between the pool and the second level blocks
7011 is implicit : second level blocks will be located inside first level
7012 blocks. It is necessary to use the VALGRIND_MEMPOOL_METAPOOL flag
7013 for such 2 levels pools, as otherwise valgrind will detect overlapping
7014 memory blocks, and will abort execution (e.g. during leak search).
7015
7016 Such a meta pool can also be marked as an 'auto free' pool using the flag
7017 VALGRIND_MEMPOOL_AUTO_FREE, which must be OR-ed together with the
7018 VALGRIND_MEMPOOL_METAPOOL. For an 'auto free' pool, VALGRIND_MEMPOOL_FREE
7019 will automatically free the second level blocks that are contained
7020 inside the first level block freed with VALGRIND_MEMPOOL_FREE.
7021 In other words, calling VALGRIND_MEMPOOL_FREE will cause implicit calls
7022 to VALGRIND_FREELIKE_BLOCK for all the second level blocks included
7023 in the first level block.
7024 Note: it is an error to use the VALGRIND_MEMPOOL_AUTO_FREE flag
7025 without the VALGRIND_MEMPOOL_METAPOOL flag.
7026*/
7027#define VALGRIND_MEMPOOL_AUTO_FREE 1
7028#define VALGRIND_MEMPOOL_METAPOOL 2
7029#define VALGRIND_CREATE_MEMPOOL_EXT(pool, rzB, is_zeroed, flags) \
7030 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
7031 pool, rzB, is_zeroed, flags, 0)
7032
7033/* Destroy a memory pool. */
7034#define VALGRIND_DESTROY_MEMPOOL(pool) \
7035 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
7036 pool, 0, 0, 0, 0)
7037
7038/* Associate a piece of memory with a memory pool. */
7039#define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
7040 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
7041 pool, addr, size, 0, 0)
7042
7043/* Disassociate a piece of memory from a memory pool. */
7044#define VALGRIND_MEMPOOL_FREE(pool, addr) \
7045 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
7046 pool, addr, 0, 0, 0)
7047
7048/* Disassociate any pieces outside a particular range. */
7049#define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
7050 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
7051 pool, addr, size, 0, 0)
7052
7053/* Resize and/or move a piece associated with a memory pool. */
7054#define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
7055 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
7056 poolA, poolB, 0, 0, 0)
7057
7058/* Resize and/or move a piece associated with a memory pool. */
7059#define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
7060 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
7061 pool, addrA, addrB, size, 0)
7062
7063/* Return 1 if a mempool exists, else 0. */
7064#define VALGRIND_MEMPOOL_EXISTS(pool) \
7065 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7066 VG_USERREQ__MEMPOOL_EXISTS, \
7067 pool, 0, 0, 0, 0)
7068
7069/* Mark a piece of memory as being a stack. Returns a stack id.
7070 start is the lowest addressable stack byte, end is the highest
7071 addressable stack byte. */
7072#define VALGRIND_STACK_REGISTER(start, end) \
7073 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7074 VG_USERREQ__STACK_REGISTER, \
7075 start, end, 0, 0, 0)
7076
7077/* Unmark the piece of memory associated with a stack id as being a
7078 stack. */
7079#define VALGRIND_STACK_DEREGISTER(id) \
7080 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_DEREGISTER, \
7081 id, 0, 0, 0, 0)
7082
7083/* Change the start and end address of the stack id.
7084 start is the new lowest addressable stack byte, end is the new highest
7085 addressable stack byte. */
7086#define VALGRIND_STACK_CHANGE(id, start, end) \
7087 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__STACK_CHANGE, \
7088 id, start, end, 0, 0)
7089
7090/* Load PDB debug info for Wine PE image_map. */
7091#define VALGRIND_LOAD_PDB_DEBUGINFO(fd, ptr, total_size, delta) \
7092 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__LOAD_PDB_DEBUGINFO, \
7093 fd, ptr, total_size, delta, 0)
7094
7095/* Map a code address to a source file name and line number. buf64
7096 must point to a 64-byte buffer in the caller's address space. The
7097 result will be dumped in there and is guaranteed to be zero
7098 terminated. If no info is found, the first byte is set to zero. */
7099#define VALGRIND_MAP_IP_TO_SRCLOC(addr, buf64) \
7100 (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
7101 VG_USERREQ__MAP_IP_TO_SRCLOC, \
7102 addr, buf64, 0, 0, 0)
7103
7104/* Disable error reporting for this thread. Behaves in a stack like
7105 way, so you can safely call this multiple times provided that
7106 VALGRIND_ENABLE_ERROR_REPORTING is called the same number of times
7107 to re-enable reporting. The first call of this macro disables
7108 reporting. Subsequent calls have no effect except to increase the
7109 number of VALGRIND_ENABLE_ERROR_REPORTING calls needed to re-enable
7110 reporting. Child threads do not inherit this setting from their
7111 parents -- they are always created with reporting enabled. */
7112#define VALGRIND_DISABLE_ERROR_REPORTING \
7113 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7114 1, 0, 0, 0, 0)
7115
7116/* Re-enable error reporting, as per comments on
7117 VALGRIND_DISABLE_ERROR_REPORTING. */
7118#define VALGRIND_ENABLE_ERROR_REPORTING \
7119 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CHANGE_ERR_DISABLEMENT, \
7120 -1, 0, 0, 0, 0)
7121
7122/* Execute a monitor command from the client program.
7123 If a connection is opened with GDB, the output will be sent
7124 according to the output mode set for vgdb.
7125 If no connection is opened, output will go to the log output.
7126 Returns 1 if command not recognised, 0 otherwise. */
7127#define VALGRIND_MONITOR_COMMAND(command) \
7128 VALGRIND_DO_CLIENT_REQUEST_EXPR(0, VG_USERREQ__GDB_MONITOR_COMMAND, \
7129 command, 0, 0, 0, 0)
7130
7131
7132/* Change the value of a dynamic command line option.
7133 Note that unknown or not dynamically changeable options
7134 will cause a warning message to be output. */
7135#define VALGRIND_CLO_CHANGE(option) \
7136 VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CLO_CHANGE, \
7137 option, 0, 0, 0, 0)
7138
7139
7140#undef PLAT_x86_darwin
7141#undef PLAT_amd64_darwin
7142#undef PLAT_x86_win32
7143#undef PLAT_amd64_win64
7144#undef PLAT_x86_linux
7145#undef PLAT_amd64_linux
7146#undef PLAT_ppc32_linux
7147#undef PLAT_ppc64be_linux
7148#undef PLAT_ppc64le_linux
7149#undef PLAT_arm_linux
7150#undef PLAT_s390x_linux
7151#undef PLAT_mips32_linux
7152#undef PLAT_mips64_linux
7153#undef PLAT_nanomips_linux
7154#undef PLAT_x86_solaris
7155#undef PLAT_amd64_solaris
7156
7157#endif /* __VALGRIND_H */
7158