A Discrete-Event Network Simulator
API
valgrind.h
Go to the documentation of this file.
1 /* -*- c -*-
2  ----------------------------------------------------------------
3 
4  Notice that the following BSD-style license applies to this one
5  file (valgrind.h) only. The rest of Valgrind is licensed under the
6  terms of the GNU General Public License, version 2, unless
7  otherwise indicated. See the COPYING file in the source
8  distribution for details.
9 
10  ----------------------------------------------------------------
11 
12  This file is part of Valgrind, a dynamic binary instrumentation
13  framework.
14 
15  Copyright (C) 2000-2013 Julian Seward. All rights reserved.
16 
17  Redistribution and use in source and binary forms, with or without
18  modification, are permitted provided that the following conditions
19  are met:
20 
21  1. Redistributions of source code must retain the above copyright
22  notice, this list of conditions and the following disclaimer.
23 
24  2. The origin of this software must not be misrepresented; you must
25  not claim that you wrote the original software. If you use this
26  software in a product, an acknowledgment in the product
27  documentation would be appreciated but is not required.
28 
29  3. Altered source versions must be plainly marked as such, and must
30  not be misrepresented as being the original software.
31 
32  4. The name of the author may not be used to endorse or promote
33  products derived from this software without specific prior written
34  permission.
35 
36  THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
37  OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
38  WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
39  ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
40  DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
41  DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
42  GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
43  INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
44  WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
45  NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
46  SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
47 
48  ----------------------------------------------------------------
49 
50  Notice that the above BSD-style license applies to this one file
51  (valgrind.h) only. The entire rest of Valgrind is licensed under
52  the terms of the GNU General Public License, version 2. See the
53  COPYING file in the source distribution for details.
54 
55  ----------------------------------------------------------------
56 */
57 
58 
59 /* This file is for inclusion into client (your!) code.
60 
61  You can use these macros to manipulate and query Valgrind's
62  execution inside your own programs.
63 
64  The resulting executables will still run without Valgrind, just a
65  little bit more slowly than they otherwise would, but otherwise
66  unchanged. When not running on valgrind, each client request
67  consumes very few (eg. 7) instructions, so the resulting performance
68  loss is negligible unless you plan to execute client requests
69  millions of times per second. Nevertheless, if that is still a
70  problem, you can compile with the NVALGRIND symbol defined (gcc
71  -DNVALGRIND) so that client requests are not even compiled in. */
72 
73 #ifndef __VALGRIND_H
74 #define __VALGRIND_H
75 
76 
77 /* ------------------------------------------------------------------ */
78 /* VERSION NUMBER OF VALGRIND */
79 /* ------------------------------------------------------------------ */
80 
81 /* Specify Valgrind's version number, so that user code can
82  conditionally compile based on our version number. Note that these
83  were introduced at version 3.6 and so do not exist in version 3.5
84  or earlier. The recommended way to use them to check for "version
85  X.Y or later" is (eg)
86 
87 #if defined(__VALGRIND_MAJOR__) && defined(__VALGRIND_MINOR__) \
88  && (__VALGRIND_MAJOR__ > 3 \
89  || (__VALGRIND_MAJOR__ == 3 && __VALGRIND_MINOR__ >= 6))
90 */
91 #define __VALGRIND_MAJOR__ 3
92 #define __VALGRIND_MINOR__ 9
93 
94 
95 #include <stdarg.h>
96 
97 /* Nb: this file might be included in a file compiled with -ansi. So
98  we can't use C++ style "//" comments nor the "asm" keyword (instead
99  use "__asm__"). */
100 
101 /* Derive some tags indicating what the target platform is. Note
102  that in this file we're using the compiler's CPP symbols for
103  identifying architectures, which are different to the ones we use
104  within the rest of Valgrind. Note, __powerpc__ is active for both
105  32 and 64-bit PPC, whereas __powerpc64__ is only active for the
106  latter (on Linux, that is).
107 
108  Misc note: how to find out what's predefined in gcc by default:
109  gcc -Wp,-dM somefile.c
110 */
111 #undef PLAT_x86_darwin
112 #undef PLAT_amd64_darwin
113 #undef PLAT_x86_win32
114 #undef PLAT_amd64_win64
115 #undef PLAT_x86_linux
116 #undef PLAT_amd64_linux
117 #undef PLAT_ppc32_linux
118 #undef PLAT_ppc64_linux
119 #undef PLAT_arm_linux
120 #undef PLAT_arm64_linux
121 #undef PLAT_s390x_linux
122 #undef PLAT_mips32_linux
123 #undef PLAT_mips64_linux
124 
125 
126 #if defined(__APPLE__) && defined(__i386__)
127 # define PLAT_x86_darwin 1
128 #elif defined(__APPLE__) && defined(__x86_64__)
129 # define PLAT_amd64_darwin 1
130 #elif defined(__MINGW32__) || defined(__CYGWIN32__) \
131  || (defined(_WIN32) && defined(_M_IX86))
132 # define PLAT_x86_win32 1
133 #elif defined(__MINGW64__) || (defined(_WIN64) && defined(_M_X64))
134 # define PLAT_amd64_win64 1
135 #elif defined(__linux__) && defined(__i386__)
136 # define PLAT_x86_linux 1
137 #elif defined(__linux__) && defined(__x86_64__)
138 # define PLAT_amd64_linux 1
139 #elif defined(__linux__) && defined(__powerpc__) && !defined(__powerpc64__)
140 # define PLAT_ppc32_linux 1
141 #elif defined(__linux__) && defined(__powerpc__) && defined(__powerpc64__)
142 # define PLAT_ppc64_linux 1
143 #elif defined(__linux__) && defined(__arm__) && !defined(__aarch64__)
144 # define PLAT_arm_linux 1
145 #elif defined(__linux__) && defined(__aarch64__) && !defined(__arm__)
146 # define PLAT_arm64_linux 1
147 #elif defined(__linux__) && defined(__s390__) && defined(__s390x__)
148 # define PLAT_s390x_linux 1
149 #elif defined(__linux__) && defined(__mips__) && (__mips==64)
150 # define PLAT_mips64_linux 1
151 #elif defined(__linux__) && defined(__mips__) && (__mips!=64)
152 # define PLAT_mips32_linux 1
153 #else
154 /* If we're not compiling for our target platform, don't generate
155  any inline asms. */
156 # if !defined(NVALGRIND)
157 # define NVALGRIND 1
158 # endif
159 #endif
160 
161 
162 /* ------------------------------------------------------------------ */
163 /* ARCHITECTURE SPECIFICS for SPECIAL INSTRUCTIONS. There is nothing */
164 /* in here of use to end-users -- skip to the next section. */
165 /* ------------------------------------------------------------------ */
166 
167 /*
168  * VALGRIND_DO_CLIENT_REQUEST(): a statement that invokes a Valgrind client
169  * request. Accepts both pointers and integers as arguments.
170  *
171  * VALGRIND_DO_CLIENT_REQUEST_STMT(): a statement that invokes a Valgrind
172  * client request that does not return a value.
173 
174  * VALGRIND_DO_CLIENT_REQUEST_EXPR(): a C expression that invokes a Valgrind
175  * client request and whose value equals the client request result. Accepts
176  * both pointers and integers as arguments. Note that such calls are not
177  * necessarily pure functions -- they may have side effects.
178  */
179 
180 #define VALGRIND_DO_CLIENT_REQUEST(_zzq_rlval, _zzq_default, \
181  _zzq_request, _zzq_arg1, _zzq_arg2, \
182  _zzq_arg3, _zzq_arg4, _zzq_arg5) \
183  do { (_zzq_rlval) = VALGRIND_DO_CLIENT_REQUEST_EXPR((_zzq_default), \
184  (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
185  (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
186 
187 #define VALGRIND_DO_CLIENT_REQUEST_STMT(_zzq_request, _zzq_arg1, \
188  _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
189  do { (void) VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
190  (_zzq_request), (_zzq_arg1), (_zzq_arg2), \
191  (_zzq_arg3), (_zzq_arg4), (_zzq_arg5)); } while (0)
192 
193 #if defined(NVALGRIND)
194 
195 /* Define NVALGRIND to completely remove the Valgrind magic sequence
196  from the compiled code (analogous to NDEBUG's effects on
197  assert()) */
198 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
199  _zzq_default, _zzq_request, \
200  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
201  (_zzq_default)
202 
203 #else /* ! NVALGRIND */
204 
205 /* The following defines the magic code sequences which the JITter
206  spots and handles magically. Don't look too closely at them as
207  they will rot your brain.
208 
209  The assembly code sequences for all architectures is in this one
210  file. This is because this file must be stand-alone, and we don't
211  want to have multiple files.
212 
213  For VALGRIND_DO_CLIENT_REQUEST, we must ensure that the default
214  value gets put in the return slot, so that everything works when
215  this is executed not under Valgrind. Args are passed in a memory
216  block, and so there's no intrinsic limit to the number that could
217  be passed, but it's currently five.
218 
219  The macro args are:
220  _zzq_rlval result lvalue
221  _zzq_default default value (result returned when running on real CPU)
222  _zzq_request request code
223  _zzq_arg1..5 request params
224 
225  The other two macros are used to support function wrapping, and are
226  a lot simpler. VALGRIND_GET_NR_CONTEXT returns the value of the
227  guest's NRADDR pseudo-register and whatever other information is
228  needed to safely run the call original from the wrapper: on
229  ppc64-linux, the R2 value at the divert point is also needed. This
230  information is abstracted into a user-visible type, OrigFn.
231 
232  VALGRIND_CALL_NOREDIR_* behaves the same as the following on the
233  guest, but guarantees that the branch instruction will not be
234  redirected: x86: call *%eax, amd64: call *%rax, ppc32/ppc64:
235  branch-and-link-to-r11. VALGRIND_CALL_NOREDIR is just text, not a
236  complete inline asm, since it needs to be combined with more magic
237  inline asm stuff to be useful.
238 */
239 
240 /* ------------------------- x86-{linux,darwin} ---------------- */
241 
242 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin) \
243  || (defined(PLAT_x86_win32) && defined(__GNUC__))
244 
245 typedef
246  struct {
247  unsigned int nraddr; /* where's the code? */
248  }
249  OrigFn;
250 
251 #define __SPECIAL_INSTRUCTION_PREAMBLE \
252  "roll $3, %%edi ; roll $13, %%edi\n\t" \
253  "roll $29, %%edi ; roll $19, %%edi\n\t"
254 
255 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
256  _zzq_default, _zzq_request, \
257  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
258  __extension__ \
259  ({volatile unsigned int _zzq_args[6]; \
260  volatile unsigned int _zzq_result; \
261  _zzq_args[0] = (unsigned int)(_zzq_request); \
262  _zzq_args[1] = (unsigned int)(_zzq_arg1); \
263  _zzq_args[2] = (unsigned int)(_zzq_arg2); \
264  _zzq_args[3] = (unsigned int)(_zzq_arg3); \
265  _zzq_args[4] = (unsigned int)(_zzq_arg4); \
266  _zzq_args[5] = (unsigned int)(_zzq_arg5); \
267  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
268  /* %EDX = client_request ( %EAX ) */ \
269  "xchgl %%ebx,%%ebx" \
270  : "=d" (_zzq_result) \
271  : "a" (&_zzq_args[0]), "0" (_zzq_default) \
272  : "cc", "memory" \
273  ); \
274  _zzq_result; \
275  })
276 
277 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
278  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
279  volatile unsigned int __addr; \
280  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
281  /* %EAX = guest_NRADDR */ \
282  "xchgl %%ecx,%%ecx" \
283  : "=a" (__addr) \
284  : \
285  : "cc", "memory" \
286  ); \
287  _zzq_orig->nraddr = __addr; \
288  }
289 
290 #define VALGRIND_CALL_NOREDIR_EAX \
291  __SPECIAL_INSTRUCTION_PREAMBLE \
292  /* call-noredir *%EAX */ \
293  "xchgl %%edx,%%edx\n\t"
294 
295 #define VALGRIND_VEX_INJECT_IR() \
296  do { \
297  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
298  "xchgl %%edi,%%edi\n\t" \
299  : : : "cc", "memory" \
300  ); \
301  } while (0)
302 
303 #endif /* PLAT_x86_linux || PLAT_x86_darwin || (PLAT_x86_win32 && __GNUC__) */
304 
305 /* ------------------------- x86-Win32 ------------------------- */
306 
307 #if defined(PLAT_x86_win32) && !defined(__GNUC__)
308 
309 typedef
310  struct {
311  unsigned int nraddr; /* where's the code? */
312  }
313  OrigFn;
314 
315 #if defined(_MSC_VER)
316 
317 #define __SPECIAL_INSTRUCTION_PREAMBLE \
318  __asm rol edi, 3 __asm rol edi, 13 \
319  __asm rol edi, 29 __asm rol edi, 19
320 
321 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
322  _zzq_default, _zzq_request, \
323  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
324  valgrind_do_client_request_expr((uintptr_t)(_zzq_default), \
325  (uintptr_t)(_zzq_request), (uintptr_t)(_zzq_arg1), \
326  (uintptr_t)(_zzq_arg2), (uintptr_t)(_zzq_arg3), \
327  (uintptr_t)(_zzq_arg4), (uintptr_t)(_zzq_arg5))
328 
329 static __inline uintptr_t
330 valgrind_do_client_request_expr(uintptr_t _zzq_default, uintptr_t _zzq_request,
331  uintptr_t _zzq_arg1, uintptr_t _zzq_arg2,
332  uintptr_t _zzq_arg3, uintptr_t _zzq_arg4,
333  uintptr_t _zzq_arg5)
334 {
335  volatile uintptr_t _zzq_args[6];
336  volatile unsigned int _zzq_result;
337  _zzq_args[0] = (uintptr_t)(_zzq_request);
338  _zzq_args[1] = (uintptr_t)(_zzq_arg1);
339  _zzq_args[2] = (uintptr_t)(_zzq_arg2);
340  _zzq_args[3] = (uintptr_t)(_zzq_arg3);
341  _zzq_args[4] = (uintptr_t)(_zzq_arg4);
342  _zzq_args[5] = (uintptr_t)(_zzq_arg5);
343  __asm { __asm lea eax, _zzq_args __asm mov edx, _zzq_default
344  __SPECIAL_INSTRUCTION_PREAMBLE
345  /* %EDX = client_request ( %EAX ) */
346  __asm xchg ebx,ebx
347  __asm mov _zzq_result, edx
348  }
349  return _zzq_result;
350 }
351 
352 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
353  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
354  volatile unsigned int __addr; \
355  __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
356  /* %EAX = guest_NRADDR */ \
357  __asm xchg ecx,ecx \
358  __asm mov __addr, eax \
359  } \
360  _zzq_orig->nraddr = __addr; \
361  }
362 
363 #define VALGRIND_CALL_NOREDIR_EAX ERROR
364 
365 #define VALGRIND_VEX_INJECT_IR() \
366  do { \
367  __asm { __SPECIAL_INSTRUCTION_PREAMBLE \
368  __asm xchg edi,edi \
369  } \
370  } while (0)
371 
372 #else
373 #error Unsupported compiler.
374 #endif
375 
376 #endif /* PLAT_x86_win32 */
377 
378 /* ------------------------ amd64-{linux,darwin} --------------- */
379 
380 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin)
381 
382 typedef
383  struct {
384  unsigned long long int nraddr; /* where's the code? */
385  }
386  OrigFn;
387 
388 #define __SPECIAL_INSTRUCTION_PREAMBLE \
389  "rolq $3, %%rdi ; rolq $13, %%rdi\n\t" \
390  "rolq $61, %%rdi ; rolq $51, %%rdi\n\t"
391 
392 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
393  _zzq_default, _zzq_request, \
394  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
395  __extension__ \
396  ({ volatile unsigned long long int _zzq_args[6]; \
397  volatile unsigned long long int _zzq_result; \
398  _zzq_args[0] = (unsigned long long int)(_zzq_request); \
399  _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
400  _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
401  _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
402  _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
403  _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
404  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
405  /* %RDX = client_request ( %RAX ) */ \
406  "xchgq %%rbx,%%rbx" \
407  : "=d" (_zzq_result) \
408  : "a" (&_zzq_args[0]), "0" (_zzq_default) \
409  : "cc", "memory" \
410  ); \
411  _zzq_result; \
412  })
413 
414 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
415  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
416  volatile unsigned long long int __addr; \
417  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
418  /* %RAX = guest_NRADDR */ \
419  "xchgq %%rcx,%%rcx" \
420  : "=a" (__addr) \
421  : \
422  : "cc", "memory" \
423  ); \
424  _zzq_orig->nraddr = __addr; \
425  }
426 
427 #define VALGRIND_CALL_NOREDIR_RAX \
428  __SPECIAL_INSTRUCTION_PREAMBLE \
429  /* call-noredir *%RAX */ \
430  "xchgq %%rdx,%%rdx\n\t"
431 
432 #define VALGRIND_VEX_INJECT_IR() \
433  do { \
434  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
435  "xchgq %%rdi,%%rdi\n\t" \
436  : : : "cc", "memory" \
437  ); \
438  } while (0)
439 
440 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
441 
442 /* ------------------------ ppc32-linux ------------------------ */
443 
444 #if defined(PLAT_ppc32_linux)
445 
446 typedef
447  struct {
448  unsigned int nraddr; /* where's the code? */
449  }
450  OrigFn;
451 
452 #define __SPECIAL_INSTRUCTION_PREAMBLE \
453  "rlwinm 0,0,3,0,31 ; rlwinm 0,0,13,0,31\n\t" \
454  "rlwinm 0,0,29,0,31 ; rlwinm 0,0,19,0,31\n\t"
455 
456 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
457  _zzq_default, _zzq_request, \
458  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
459  \
460  __extension__ \
461  ({ unsigned int _zzq_args[6]; \
462  unsigned int _zzq_result; \
463  unsigned int* _zzq_ptr; \
464  _zzq_args[0] = (unsigned int)(_zzq_request); \
465  _zzq_args[1] = (unsigned int)(_zzq_arg1); \
466  _zzq_args[2] = (unsigned int)(_zzq_arg2); \
467  _zzq_args[3] = (unsigned int)(_zzq_arg3); \
468  _zzq_args[4] = (unsigned int)(_zzq_arg4); \
469  _zzq_args[5] = (unsigned int)(_zzq_arg5); \
470  _zzq_ptr = _zzq_args; \
471  __asm__ volatile("mr 3,%1\n\t" /*default*/ \
472  "mr 4,%2\n\t" /*ptr*/ \
473  __SPECIAL_INSTRUCTION_PREAMBLE \
474  /* %R3 = client_request ( %R4 ) */ \
475  "or 1,1,1\n\t" \
476  "mr %0,3" /*result*/ \
477  : "=b" (_zzq_result) \
478  : "b" (_zzq_default), "b" (_zzq_ptr) \
479  : "cc", "memory", "r3", "r4"); \
480  _zzq_result; \
481  })
482 
483 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
484  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
485  unsigned int __addr; \
486  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
487  /* %R3 = guest_NRADDR */ \
488  "or 2,2,2\n\t" \
489  "mr %0,3" \
490  : "=b" (__addr) \
491  : \
492  : "cc", "memory", "r3" \
493  ); \
494  _zzq_orig->nraddr = __addr; \
495  }
496 
497 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
498  __SPECIAL_INSTRUCTION_PREAMBLE \
499  /* branch-and-link-to-noredir *%R11 */ \
500  "or 3,3,3\n\t"
501 
502 #define VALGRIND_VEX_INJECT_IR() \
503  do { \
504  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
505  "or 5,5,5\n\t" \
506  ); \
507  } while (0)
508 
509 #endif /* PLAT_ppc32_linux */
510 
511 /* ------------------------ ppc64-linux ------------------------ */
512 
513 #if defined(PLAT_ppc64_linux)
514 
515 typedef
516  struct {
517  unsigned long long int nraddr; /* where's the code? */
518  unsigned long long int r2; /* what tocptr do we need? */
519  }
520  OrigFn;
521 
522 #define __SPECIAL_INSTRUCTION_PREAMBLE \
523  "rotldi 0,0,3 ; rotldi 0,0,13\n\t" \
524  "rotldi 0,0,61 ; rotldi 0,0,51\n\t"
525 
526 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
527  _zzq_default, _zzq_request, \
528  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
529  \
530  __extension__ \
531  ({ unsigned long long int _zzq_args[6]; \
532  unsigned long long int _zzq_result; \
533  unsigned long long int* _zzq_ptr; \
534  _zzq_args[0] = (unsigned long long int)(_zzq_request); \
535  _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
536  _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
537  _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
538  _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
539  _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
540  _zzq_ptr = _zzq_args; \
541  __asm__ volatile("mr 3,%1\n\t" /*default*/ \
542  "mr 4,%2\n\t" /*ptr*/ \
543  __SPECIAL_INSTRUCTION_PREAMBLE \
544  /* %R3 = client_request ( %R4 ) */ \
545  "or 1,1,1\n\t" \
546  "mr %0,3" /*result*/ \
547  : "=b" (_zzq_result) \
548  : "b" (_zzq_default), "b" (_zzq_ptr) \
549  : "cc", "memory", "r3", "r4"); \
550  _zzq_result; \
551  })
552 
553 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
554  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
555  unsigned long long int __addr; \
556  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
557  /* %R3 = guest_NRADDR */ \
558  "or 2,2,2\n\t" \
559  "mr %0,3" \
560  : "=b" (__addr) \
561  : \
562  : "cc", "memory", "r3" \
563  ); \
564  _zzq_orig->nraddr = __addr; \
565  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
566  /* %R3 = guest_NRADDR_GPR2 */ \
567  "or 4,4,4\n\t" \
568  "mr %0,3" \
569  : "=b" (__addr) \
570  : \
571  : "cc", "memory", "r3" \
572  ); \
573  _zzq_orig->r2 = __addr; \
574  }
575 
576 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
577  __SPECIAL_INSTRUCTION_PREAMBLE \
578  /* branch-and-link-to-noredir *%R11 */ \
579  "or 3,3,3\n\t"
580 
581 #define VALGRIND_VEX_INJECT_IR() \
582  do { \
583  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
584  "or 5,5,5\n\t" \
585  ); \
586  } while (0)
587 
588 #endif /* PLAT_ppc64_linux */
589 
590 /* ------------------------- arm-linux ------------------------- */
591 
592 #if defined(PLAT_arm_linux)
593 
594 typedef
595  struct {
596  unsigned int nraddr; /* where's the code? */
597  }
598  OrigFn;
599 
600 #define __SPECIAL_INSTRUCTION_PREAMBLE \
601  "mov r12, r12, ror #3 ; mov r12, r12, ror #13 \n\t" \
602  "mov r12, r12, ror #29 ; mov r12, r12, ror #19 \n\t"
603 
604 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
605  _zzq_default, _zzq_request, \
606  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
607  \
608  __extension__ \
609  ({volatile unsigned int _zzq_args[6]; \
610  volatile unsigned int _zzq_result; \
611  _zzq_args[0] = (unsigned int)(_zzq_request); \
612  _zzq_args[1] = (unsigned int)(_zzq_arg1); \
613  _zzq_args[2] = (unsigned int)(_zzq_arg2); \
614  _zzq_args[3] = (unsigned int)(_zzq_arg3); \
615  _zzq_args[4] = (unsigned int)(_zzq_arg4); \
616  _zzq_args[5] = (unsigned int)(_zzq_arg5); \
617  __asm__ volatile("mov r3, %1\n\t" /*default*/ \
618  "mov r4, %2\n\t" /*ptr*/ \
619  __SPECIAL_INSTRUCTION_PREAMBLE \
620  /* R3 = client_request ( R4 ) */ \
621  "orr r10, r10, r10\n\t" \
622  "mov %0, r3" /*result*/ \
623  : "=r" (_zzq_result) \
624  : "r" (_zzq_default), "r" (&_zzq_args[0]) \
625  : "cc","memory", "r3", "r4"); \
626  _zzq_result; \
627  })
628 
629 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
630  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
631  unsigned int __addr; \
632  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
633  /* R3 = guest_NRADDR */ \
634  "orr r11, r11, r11\n\t" \
635  "mov %0, r3" \
636  : "=r" (__addr) \
637  : \
638  : "cc", "memory", "r3" \
639  ); \
640  _zzq_orig->nraddr = __addr; \
641  }
642 
643 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
644  __SPECIAL_INSTRUCTION_PREAMBLE \
645  /* branch-and-link-to-noredir *%R4 */ \
646  "orr r12, r12, r12\n\t"
647 
648 #define VALGRIND_VEX_INJECT_IR() \
649  do { \
650  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
651  "orr r9, r9, r9\n\t" \
652  : : : "cc", "memory" \
653  ); \
654  } while (0)
655 
656 #endif /* PLAT_arm_linux */
657 
658 /* ------------------------ arm64-linux ------------------------- */
659 
660 #if defined(PLAT_arm64_linux)
661 
662 typedef
663  struct {
664  unsigned long long int nraddr; /* where's the code? */
665  }
666  OrigFn;
667 
668 #define __SPECIAL_INSTRUCTION_PREAMBLE \
669  "ror x12, x12, #3 ; ror x12, x12, #13 \n\t" \
670  "ror x12, x12, #51 ; ror x12, x12, #61 \n\t"
671 
672 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
673  _zzq_default, _zzq_request, \
674  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
675  \
676  __extension__ \
677  ({volatile unsigned long long int _zzq_args[6]; \
678  volatile unsigned long long int _zzq_result; \
679  _zzq_args[0] = (unsigned long long int)(_zzq_request); \
680  _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
681  _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
682  _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
683  _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
684  _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
685  __asm__ volatile("mov x3, %1\n\t" /*default*/ \
686  "mov x4, %2\n\t" /*ptr*/ \
687  __SPECIAL_INSTRUCTION_PREAMBLE \
688  /* X3 = client_request ( X4 ) */ \
689  "orr x10, x10, x10\n\t" \
690  "mov %0, x3" /*result*/ \
691  : "=r" (_zzq_result) \
692  : "r" (_zzq_default), "r" (&_zzq_args[0]) \
693  : "cc","memory", "x3", "x4"); \
694  _zzq_result; \
695  })
696 
697 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
698  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
699  unsigned long long int __addr; \
700  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
701  /* X3 = guest_NRADDR */ \
702  "orr x11, x11, x11\n\t" \
703  "mov %0, x3" \
704  : "=r" (__addr) \
705  : \
706  : "cc", "memory", "x3" \
707  ); \
708  _zzq_orig->nraddr = __addr; \
709  }
710 
711 #define VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
712  __SPECIAL_INSTRUCTION_PREAMBLE \
713  /* branch-and-link-to-noredir X8 */ \
714  "orr x12, x12, x12\n\t"
715 
716 #define VALGRIND_VEX_INJECT_IR() \
717  do { \
718  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
719  "orr x9, x9, x9\n\t" \
720  : : : "cc", "memory" \
721  ); \
722  } while (0)
723 
724 #endif /* PLAT_arm64_linux */
725 
726 /* ------------------------ s390x-linux ------------------------ */
727 
728 #if defined(PLAT_s390x_linux)
729 
730 typedef
731  struct {
732  unsigned long long int nraddr; /* where's the code? */
733  }
734  OrigFn;
735 
736 /* __SPECIAL_INSTRUCTION_PREAMBLE will be used to identify Valgrind specific
737  * code. This detection is implemented in platform specific toIR.c
738  * (e.g. VEX/priv/guest_s390_decoder.c).
739  */
740 #define __SPECIAL_INSTRUCTION_PREAMBLE \
741  "lr 15,15\n\t" \
742  "lr 1,1\n\t" \
743  "lr 2,2\n\t" \
744  "lr 3,3\n\t"
745 
746 #define __CLIENT_REQUEST_CODE "lr 2,2\n\t"
747 #define __GET_NR_CONTEXT_CODE "lr 3,3\n\t"
748 #define __CALL_NO_REDIR_CODE "lr 4,4\n\t"
749 #define __VEX_INJECT_IR_CODE "lr 5,5\n\t"
750 
751 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
752  _zzq_default, _zzq_request, \
753  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
754  __extension__ \
755  ({volatile unsigned long long int _zzq_args[6]; \
756  volatile unsigned long long int _zzq_result; \
757  _zzq_args[0] = (unsigned long long int)(_zzq_request); \
758  _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
759  _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
760  _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
761  _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
762  _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
763  __asm__ volatile(/* r2 = args */ \
764  "lgr 2,%1\n\t" \
765  /* r3 = default */ \
766  "lgr 3,%2\n\t" \
767  __SPECIAL_INSTRUCTION_PREAMBLE \
768  __CLIENT_REQUEST_CODE \
769  /* results = r3 */ \
770  "lgr %0, 3\n\t" \
771  : "=d" (_zzq_result) \
772  : "a" (&_zzq_args[0]), "0" (_zzq_default) \
773  : "cc", "2", "3", "memory" \
774  ); \
775  _zzq_result; \
776  })
777 
778 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
779  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
780  volatile unsigned long long int __addr; \
781  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
782  __GET_NR_CONTEXT_CODE \
783  "lgr %0, 3\n\t" \
784  : "=a" (__addr) \
785  : \
786  : "cc", "3", "memory" \
787  ); \
788  _zzq_orig->nraddr = __addr; \
789  }
790 
791 #define VALGRIND_CALL_NOREDIR_R1 \
792  __SPECIAL_INSTRUCTION_PREAMBLE \
793  __CALL_NO_REDIR_CODE
794 
795 #define VALGRIND_VEX_INJECT_IR() \
796  do { \
797  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
798  __VEX_INJECT_IR_CODE); \
799  } while (0)
800 
801 #endif /* PLAT_s390x_linux */
802 
803 /* ------------------------- mips32-linux ---------------- */
804 
805 #if defined(PLAT_mips32_linux)
806 
807 typedef
808  struct {
809  unsigned int nraddr; /* where's the code? */
810  }
811  OrigFn;
812 
813 /* .word 0x342
814  * .word 0x742
815  * .word 0xC2
816  * .word 0x4C2*/
817 #define __SPECIAL_INSTRUCTION_PREAMBLE \
818  "srl $0, $0, 13\n\t" \
819  "srl $0, $0, 29\n\t" \
820  "srl $0, $0, 3\n\t" \
821  "srl $0, $0, 19\n\t"
822 
823 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
824  _zzq_default, _zzq_request, \
825  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
826  __extension__ \
827  ({ volatile unsigned int _zzq_args[6]; \
828  volatile unsigned int _zzq_result; \
829  _zzq_args[0] = (unsigned int)(_zzq_request); \
830  _zzq_args[1] = (unsigned int)(_zzq_arg1); \
831  _zzq_args[2] = (unsigned int)(_zzq_arg2); \
832  _zzq_args[3] = (unsigned int)(_zzq_arg3); \
833  _zzq_args[4] = (unsigned int)(_zzq_arg4); \
834  _zzq_args[5] = (unsigned int)(_zzq_arg5); \
835  __asm__ volatile("move $11, %1\n\t" /*default*/ \
836  "move $12, %2\n\t" /*ptr*/ \
837  __SPECIAL_INSTRUCTION_PREAMBLE \
838  /* T3 = client_request ( T4 ) */ \
839  "or $13, $13, $13\n\t" \
840  "move %0, $11\n\t" /*result*/ \
841  : "=r" (_zzq_result) \
842  : "r" (_zzq_default), "r" (&_zzq_args[0]) \
843  : "$11", "$12"); \
844  _zzq_result; \
845  })
846 
847 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
848  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
849  volatile unsigned int __addr; \
850  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
851  /* %t9 = guest_NRADDR */ \
852  "or $14, $14, $14\n\t" \
853  "move %0, $11" /*result*/ \
854  : "=r" (__addr) \
855  : \
856  : "$11" \
857  ); \
858  _zzq_orig->nraddr = __addr; \
859  }
860 
861 #define VALGRIND_CALL_NOREDIR_T9 \
862  __SPECIAL_INSTRUCTION_PREAMBLE \
863  /* call-noredir *%t9 */ \
864  "or $15, $15, $15\n\t"
865 
866 #define VALGRIND_VEX_INJECT_IR() \
867  do { \
868  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
869  "or $11, $11, $11\n\t" \
870  ); \
871  } while (0)
872 
873 
874 #endif /* PLAT_mips32_linux */
875 
876 /* ------------------------- mips64-linux ---------------- */
877 
878 #if defined(PLAT_mips64_linux)
879 
880 typedef
881  struct {
882  unsigned long long nraddr; /* where's the code? */
883  }
884  OrigFn;
885 
886 /* dsll $0,$0, 3
887  * dsll $0,$0, 13
888  * dsll $0,$0, 29
889  * dsll $0,$0, 19*/
890 #define __SPECIAL_INSTRUCTION_PREAMBLE \
891  "dsll $0,$0, 3 ; dsll $0,$0,13\n\t" \
892  "dsll $0,$0,29 ; dsll $0,$0,19\n\t"
893 
894 #define VALGRIND_DO_CLIENT_REQUEST_EXPR( \
895  _zzq_default, _zzq_request, \
896  _zzq_arg1, _zzq_arg2, _zzq_arg3, _zzq_arg4, _zzq_arg5) \
897  __extension__ \
898  ({ volatile unsigned long long int _zzq_args[6]; \
899  volatile unsigned long long int _zzq_result; \
900  _zzq_args[0] = (unsigned long long int)(_zzq_request); \
901  _zzq_args[1] = (unsigned long long int)(_zzq_arg1); \
902  _zzq_args[2] = (unsigned long long int)(_zzq_arg2); \
903  _zzq_args[3] = (unsigned long long int)(_zzq_arg3); \
904  _zzq_args[4] = (unsigned long long int)(_zzq_arg4); \
905  _zzq_args[5] = (unsigned long long int)(_zzq_arg5); \
906  __asm__ volatile("move $11, %1\n\t" /*default*/ \
907  "move $12, %2\n\t" /*ptr*/ \
908  __SPECIAL_INSTRUCTION_PREAMBLE \
909  /* $11 = client_request ( $12 ) */ \
910  "or $13, $13, $13\n\t" \
911  "move %0, $11\n\t" /*result*/ \
912  : "=r" (_zzq_result) \
913  : "r" (_zzq_default), "r" (&_zzq_args[0]) \
914  : "$11", "$12"); \
915  _zzq_result; \
916  })
917 
918 #define VALGRIND_GET_NR_CONTEXT(_zzq_rlval) \
919  { volatile OrigFn* _zzq_orig = &(_zzq_rlval); \
920  volatile unsigned long long int __addr; \
921  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
922  /* $11 = guest_NRADDR */ \
923  "or $14, $14, $14\n\t" \
924  "move %0, $11" /*result*/ \
925  : "=r" (__addr) \
926  : \
927  : "$11"); \
928  _zzq_orig->nraddr = __addr; \
929  }
930 
931 #define VALGRIND_CALL_NOREDIR_T9 \
932  __SPECIAL_INSTRUCTION_PREAMBLE \
933  /* call-noredir $25 */ \
934  "or $15, $15, $15\n\t"
935 
936 #define VALGRIND_VEX_INJECT_IR() \
937  do { \
938  __asm__ volatile(__SPECIAL_INSTRUCTION_PREAMBLE \
939  "or $11, $11, $11\n\t" \
940  ); \
941  } while (0)
942 
943 #endif /* PLAT_mips64_linux */
944 
945 /* Insert assembly code for other platforms here... */
946 
947 #endif /* NVALGRIND */
948 
949 
950 /* ------------------------------------------------------------------ */
951 /* PLATFORM SPECIFICS for FUNCTION WRAPPING. This is all very */
952 /* ugly. It's the least-worst tradeoff I can think of. */
953 /* ------------------------------------------------------------------ */
954 
955 /* This section defines magic (a.k.a appalling-hack) macros for doing
956  guaranteed-no-redirection macros, so as to get from function
957  wrappers to the functions they are wrapping. The whole point is to
958  construct standard call sequences, but to do the call itself with a
959  special no-redirect call pseudo-instruction that the JIT
960  understands and handles specially. This section is long and
961  repetitious, and I can't see a way to make it shorter.
962 
963  The naming scheme is as follows:
964 
965  CALL_FN_{W,v}_{v,W,WW,WWW,WWWW,5W,6W,7W,etc}
966 
967  'W' stands for "word" and 'v' for "void". Hence there are
968  different macros for calling arity 0, 1, 2, 3, 4, etc, functions,
969  and for each, the possibility of returning a word-typed result, or
970  no result.
971 */
972 
973 /* Use these to write the name of your wrapper. NOTE: duplicates
974  VG_WRAP_FUNCTION_Z{U,Z} in pub_tool_redir.h. NOTE also: inserts
975  the default behaviour equivalance class tag "0000" into the name.
976  See pub_tool_redir.h for details -- normally you don't need to
977  think about this, though. */
978 
979 /* Use an extra level of macroisation so as to ensure the soname/fnname
980  args are fully macro-expanded before pasting them together. */
981 #define VG_CONCAT4(_aa,_bb,_cc,_dd) _aa##_bb##_cc##_dd
982 
983 #define I_WRAP_SONAME_FNNAME_ZU(soname,fnname) \
984  VG_CONCAT4(_vgw00000ZU_,soname,_,fnname)
985 
986 #define I_WRAP_SONAME_FNNAME_ZZ(soname,fnname) \
987  VG_CONCAT4(_vgw00000ZZ_,soname,_,fnname)
988 
989 /* Use this macro from within a wrapper function to collect the
990  context (address and possibly other info) of the original function.
991  Once you have that you can then use it in one of the CALL_FN_
992  macros. The type of the argument _lval is OrigFn. */
993 #define VALGRIND_GET_ORIG_FN(_lval) VALGRIND_GET_NR_CONTEXT(_lval)
994 
995 /* Also provide end-user facilities for function replacement, rather
996  than wrapping. A replacement function differs from a wrapper in
997  that it has no way to get hold of the original function being
998  called, and hence no way to call onwards to it. In a replacement
999  function, VALGRIND_GET_ORIG_FN always returns zero. */
1000 
1001 #define I_REPLACE_SONAME_FNNAME_ZU(soname,fnname) \
1002  VG_CONCAT4(_vgr00000ZU_,soname,_,fnname)
1003 
1004 #define I_REPLACE_SONAME_FNNAME_ZZ(soname,fnname) \
1005  VG_CONCAT4(_vgr00000ZZ_,soname,_,fnname)
1006 
1007 /* Derivatives of the main macros below, for calling functions
1008  returning void. */
1009 
1010 #define CALL_FN_v_v(fnptr) \
1011  do { volatile unsigned long _junk; \
1012  CALL_FN_W_v(_junk,fnptr); } while (0)
1013 
1014 #define CALL_FN_v_W(fnptr, arg1) \
1015  do { volatile unsigned long _junk; \
1016  CALL_FN_W_W(_junk,fnptr,arg1); } while (0)
1017 
1018 #define CALL_FN_v_WW(fnptr, arg1,arg2) \
1019  do { volatile unsigned long _junk; \
1020  CALL_FN_W_WW(_junk,fnptr,arg1,arg2); } while (0)
1021 
1022 #define CALL_FN_v_WWW(fnptr, arg1,arg2,arg3) \
1023  do { volatile unsigned long _junk; \
1024  CALL_FN_W_WWW(_junk,fnptr,arg1,arg2,arg3); } while (0)
1025 
1026 #define CALL_FN_v_WWWW(fnptr, arg1,arg2,arg3,arg4) \
1027  do { volatile unsigned long _junk; \
1028  CALL_FN_W_WWWW(_junk,fnptr,arg1,arg2,arg3,arg4); } while (0)
1029 
1030 #define CALL_FN_v_5W(fnptr, arg1,arg2,arg3,arg4,arg5) \
1031  do { volatile unsigned long _junk; \
1032  CALL_FN_W_5W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5); } while (0)
1033 
1034 #define CALL_FN_v_6W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6) \
1035  do { volatile unsigned long _junk; \
1036  CALL_FN_W_6W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6); } while (0)
1037 
1038 #define CALL_FN_v_7W(fnptr, arg1,arg2,arg3,arg4,arg5,arg6,arg7) \
1039  do { volatile unsigned long _junk; \
1040  CALL_FN_W_7W(_junk,fnptr,arg1,arg2,arg3,arg4,arg5,arg6,arg7); } while (0)
1041 
1042 /* ------------------------- x86-{linux,darwin} ---------------- */
1043 
1044 #if defined(PLAT_x86_linux) || defined(PLAT_x86_darwin)
1045 
1046 /* These regs are trashed by the hidden call. No need to mention eax
1047  as gcc can already see that, plus causes gcc to bomb. */
1048 #define __CALLER_SAVED_REGS /*"eax"*/ "ecx", "edx"
1049 
1050 /* Macros to save and align the stack before making a function
1051  call and restore it afterwards as gcc may not keep the stack
1052  pointer aligned if it doesn't realise calls are being made
1053  to other functions. */
1054 
1055 #define VALGRIND_ALIGN_STACK \
1056  "movl %%esp,%%edi\n\t" \
1057  "andl $0xfffffff0,%%esp\n\t"
1058 #define VALGRIND_RESTORE_STACK \
1059  "movl %%edi,%%esp\n\t"
1060 
1061 /* These CALL_FN_ macros assume that on x86-linux, sizeof(unsigned
1062  long) == 4. */
1063 
1064 #define CALL_FN_W_v(lval, orig) \
1065  do { \
1066  volatile OrigFn _orig = (orig); \
1067  volatile unsigned long _argvec[1]; \
1068  volatile unsigned long _res; \
1069  _argvec[0] = (unsigned long)_orig.nraddr; \
1070  __asm__ volatile( \
1071  VALGRIND_ALIGN_STACK \
1072  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1073  VALGRIND_CALL_NOREDIR_EAX \
1074  VALGRIND_RESTORE_STACK \
1075  : /*out*/ "=a" (_res) \
1076  : /*in*/ "a" (&_argvec[0]) \
1077  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1078  ); \
1079  lval = (__typeof__(lval)) _res; \
1080  } while (0)
1081 
1082 #define CALL_FN_W_W(lval, orig, arg1) \
1083  do { \
1084  volatile OrigFn _orig = (orig); \
1085  volatile unsigned long _argvec[2]; \
1086  volatile unsigned long _res; \
1087  _argvec[0] = (unsigned long)_orig.nraddr; \
1088  _argvec[1] = (unsigned long)(arg1); \
1089  __asm__ volatile( \
1090  VALGRIND_ALIGN_STACK \
1091  "subl $12, %%esp\n\t" \
1092  "pushl 4(%%eax)\n\t" \
1093  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1094  VALGRIND_CALL_NOREDIR_EAX \
1095  VALGRIND_RESTORE_STACK \
1096  : /*out*/ "=a" (_res) \
1097  : /*in*/ "a" (&_argvec[0]) \
1098  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1099  ); \
1100  lval = (__typeof__(lval)) _res; \
1101  } while (0)
1102 
1103 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1104  do { \
1105  volatile OrigFn _orig = (orig); \
1106  volatile unsigned long _argvec[3]; \
1107  volatile unsigned long _res; \
1108  _argvec[0] = (unsigned long)_orig.nraddr; \
1109  _argvec[1] = (unsigned long)(arg1); \
1110  _argvec[2] = (unsigned long)(arg2); \
1111  __asm__ volatile( \
1112  VALGRIND_ALIGN_STACK \
1113  "subl $8, %%esp\n\t" \
1114  "pushl 8(%%eax)\n\t" \
1115  "pushl 4(%%eax)\n\t" \
1116  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1117  VALGRIND_CALL_NOREDIR_EAX \
1118  VALGRIND_RESTORE_STACK \
1119  : /*out*/ "=a" (_res) \
1120  : /*in*/ "a" (&_argvec[0]) \
1121  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1122  ); \
1123  lval = (__typeof__(lval)) _res; \
1124  } while (0)
1125 
1126 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1127  do { \
1128  volatile OrigFn _orig = (orig); \
1129  volatile unsigned long _argvec[4]; \
1130  volatile unsigned long _res; \
1131  _argvec[0] = (unsigned long)_orig.nraddr; \
1132  _argvec[1] = (unsigned long)(arg1); \
1133  _argvec[2] = (unsigned long)(arg2); \
1134  _argvec[3] = (unsigned long)(arg3); \
1135  __asm__ volatile( \
1136  VALGRIND_ALIGN_STACK \
1137  "subl $4, %%esp\n\t" \
1138  "pushl 12(%%eax)\n\t" \
1139  "pushl 8(%%eax)\n\t" \
1140  "pushl 4(%%eax)\n\t" \
1141  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1142  VALGRIND_CALL_NOREDIR_EAX \
1143  VALGRIND_RESTORE_STACK \
1144  : /*out*/ "=a" (_res) \
1145  : /*in*/ "a" (&_argvec[0]) \
1146  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1147  ); \
1148  lval = (__typeof__(lval)) _res; \
1149  } while (0)
1150 
1151 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1152  do { \
1153  volatile OrigFn _orig = (orig); \
1154  volatile unsigned long _argvec[5]; \
1155  volatile unsigned long _res; \
1156  _argvec[0] = (unsigned long)_orig.nraddr; \
1157  _argvec[1] = (unsigned long)(arg1); \
1158  _argvec[2] = (unsigned long)(arg2); \
1159  _argvec[3] = (unsigned long)(arg3); \
1160  _argvec[4] = (unsigned long)(arg4); \
1161  __asm__ volatile( \
1162  VALGRIND_ALIGN_STACK \
1163  "pushl 16(%%eax)\n\t" \
1164  "pushl 12(%%eax)\n\t" \
1165  "pushl 8(%%eax)\n\t" \
1166  "pushl 4(%%eax)\n\t" \
1167  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1168  VALGRIND_CALL_NOREDIR_EAX \
1169  VALGRIND_RESTORE_STACK \
1170  : /*out*/ "=a" (_res) \
1171  : /*in*/ "a" (&_argvec[0]) \
1172  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1173  ); \
1174  lval = (__typeof__(lval)) _res; \
1175  } while (0)
1176 
1177 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1178  do { \
1179  volatile OrigFn _orig = (orig); \
1180  volatile unsigned long _argvec[6]; \
1181  volatile unsigned long _res; \
1182  _argvec[0] = (unsigned long)_orig.nraddr; \
1183  _argvec[1] = (unsigned long)(arg1); \
1184  _argvec[2] = (unsigned long)(arg2); \
1185  _argvec[3] = (unsigned long)(arg3); \
1186  _argvec[4] = (unsigned long)(arg4); \
1187  _argvec[5] = (unsigned long)(arg5); \
1188  __asm__ volatile( \
1189  VALGRIND_ALIGN_STACK \
1190  "subl $12, %%esp\n\t" \
1191  "pushl 20(%%eax)\n\t" \
1192  "pushl 16(%%eax)\n\t" \
1193  "pushl 12(%%eax)\n\t" \
1194  "pushl 8(%%eax)\n\t" \
1195  "pushl 4(%%eax)\n\t" \
1196  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1197  VALGRIND_CALL_NOREDIR_EAX \
1198  VALGRIND_RESTORE_STACK \
1199  : /*out*/ "=a" (_res) \
1200  : /*in*/ "a" (&_argvec[0]) \
1201  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1202  ); \
1203  lval = (__typeof__(lval)) _res; \
1204  } while (0)
1205 
1206 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1207  do { \
1208  volatile OrigFn _orig = (orig); \
1209  volatile unsigned long _argvec[7]; \
1210  volatile unsigned long _res; \
1211  _argvec[0] = (unsigned long)_orig.nraddr; \
1212  _argvec[1] = (unsigned long)(arg1); \
1213  _argvec[2] = (unsigned long)(arg2); \
1214  _argvec[3] = (unsigned long)(arg3); \
1215  _argvec[4] = (unsigned long)(arg4); \
1216  _argvec[5] = (unsigned long)(arg5); \
1217  _argvec[6] = (unsigned long)(arg6); \
1218  __asm__ volatile( \
1219  VALGRIND_ALIGN_STACK \
1220  "subl $8, %%esp\n\t" \
1221  "pushl 24(%%eax)\n\t" \
1222  "pushl 20(%%eax)\n\t" \
1223  "pushl 16(%%eax)\n\t" \
1224  "pushl 12(%%eax)\n\t" \
1225  "pushl 8(%%eax)\n\t" \
1226  "pushl 4(%%eax)\n\t" \
1227  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1228  VALGRIND_CALL_NOREDIR_EAX \
1229  VALGRIND_RESTORE_STACK \
1230  : /*out*/ "=a" (_res) \
1231  : /*in*/ "a" (&_argvec[0]) \
1232  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1233  ); \
1234  lval = (__typeof__(lval)) _res; \
1235  } while (0)
1236 
1237 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1238  arg7) \
1239  do { \
1240  volatile OrigFn _orig = (orig); \
1241  volatile unsigned long _argvec[8]; \
1242  volatile unsigned long _res; \
1243  _argvec[0] = (unsigned long)_orig.nraddr; \
1244  _argvec[1] = (unsigned long)(arg1); \
1245  _argvec[2] = (unsigned long)(arg2); \
1246  _argvec[3] = (unsigned long)(arg3); \
1247  _argvec[4] = (unsigned long)(arg4); \
1248  _argvec[5] = (unsigned long)(arg5); \
1249  _argvec[6] = (unsigned long)(arg6); \
1250  _argvec[7] = (unsigned long)(arg7); \
1251  __asm__ volatile( \
1252  VALGRIND_ALIGN_STACK \
1253  "subl $4, %%esp\n\t" \
1254  "pushl 28(%%eax)\n\t" \
1255  "pushl 24(%%eax)\n\t" \
1256  "pushl 20(%%eax)\n\t" \
1257  "pushl 16(%%eax)\n\t" \
1258  "pushl 12(%%eax)\n\t" \
1259  "pushl 8(%%eax)\n\t" \
1260  "pushl 4(%%eax)\n\t" \
1261  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1262  VALGRIND_CALL_NOREDIR_EAX \
1263  VALGRIND_RESTORE_STACK \
1264  : /*out*/ "=a" (_res) \
1265  : /*in*/ "a" (&_argvec[0]) \
1266  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1267  ); \
1268  lval = (__typeof__(lval)) _res; \
1269  } while (0)
1270 
1271 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1272  arg7,arg8) \
1273  do { \
1274  volatile OrigFn _orig = (orig); \
1275  volatile unsigned long _argvec[9]; \
1276  volatile unsigned long _res; \
1277  _argvec[0] = (unsigned long)_orig.nraddr; \
1278  _argvec[1] = (unsigned long)(arg1); \
1279  _argvec[2] = (unsigned long)(arg2); \
1280  _argvec[3] = (unsigned long)(arg3); \
1281  _argvec[4] = (unsigned long)(arg4); \
1282  _argvec[5] = (unsigned long)(arg5); \
1283  _argvec[6] = (unsigned long)(arg6); \
1284  _argvec[7] = (unsigned long)(arg7); \
1285  _argvec[8] = (unsigned long)(arg8); \
1286  __asm__ volatile( \
1287  VALGRIND_ALIGN_STACK \
1288  "pushl 32(%%eax)\n\t" \
1289  "pushl 28(%%eax)\n\t" \
1290  "pushl 24(%%eax)\n\t" \
1291  "pushl 20(%%eax)\n\t" \
1292  "pushl 16(%%eax)\n\t" \
1293  "pushl 12(%%eax)\n\t" \
1294  "pushl 8(%%eax)\n\t" \
1295  "pushl 4(%%eax)\n\t" \
1296  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1297  VALGRIND_CALL_NOREDIR_EAX \
1298  VALGRIND_RESTORE_STACK \
1299  : /*out*/ "=a" (_res) \
1300  : /*in*/ "a" (&_argvec[0]) \
1301  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1302  ); \
1303  lval = (__typeof__(lval)) _res; \
1304  } while (0)
1305 
1306 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1307  arg7,arg8,arg9) \
1308  do { \
1309  volatile OrigFn _orig = (orig); \
1310  volatile unsigned long _argvec[10]; \
1311  volatile unsigned long _res; \
1312  _argvec[0] = (unsigned long)_orig.nraddr; \
1313  _argvec[1] = (unsigned long)(arg1); \
1314  _argvec[2] = (unsigned long)(arg2); \
1315  _argvec[3] = (unsigned long)(arg3); \
1316  _argvec[4] = (unsigned long)(arg4); \
1317  _argvec[5] = (unsigned long)(arg5); \
1318  _argvec[6] = (unsigned long)(arg6); \
1319  _argvec[7] = (unsigned long)(arg7); \
1320  _argvec[8] = (unsigned long)(arg8); \
1321  _argvec[9] = (unsigned long)(arg9); \
1322  __asm__ volatile( \
1323  VALGRIND_ALIGN_STACK \
1324  "subl $12, %%esp\n\t" \
1325  "pushl 36(%%eax)\n\t" \
1326  "pushl 32(%%eax)\n\t" \
1327  "pushl 28(%%eax)\n\t" \
1328  "pushl 24(%%eax)\n\t" \
1329  "pushl 20(%%eax)\n\t" \
1330  "pushl 16(%%eax)\n\t" \
1331  "pushl 12(%%eax)\n\t" \
1332  "pushl 8(%%eax)\n\t" \
1333  "pushl 4(%%eax)\n\t" \
1334  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1335  VALGRIND_CALL_NOREDIR_EAX \
1336  VALGRIND_RESTORE_STACK \
1337  : /*out*/ "=a" (_res) \
1338  : /*in*/ "a" (&_argvec[0]) \
1339  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1340  ); \
1341  lval = (__typeof__(lval)) _res; \
1342  } while (0)
1343 
1344 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1345  arg7,arg8,arg9,arg10) \
1346  do { \
1347  volatile OrigFn _orig = (orig); \
1348  volatile unsigned long _argvec[11]; \
1349  volatile unsigned long _res; \
1350  _argvec[0] = (unsigned long)_orig.nraddr; \
1351  _argvec[1] = (unsigned long)(arg1); \
1352  _argvec[2] = (unsigned long)(arg2); \
1353  _argvec[3] = (unsigned long)(arg3); \
1354  _argvec[4] = (unsigned long)(arg4); \
1355  _argvec[5] = (unsigned long)(arg5); \
1356  _argvec[6] = (unsigned long)(arg6); \
1357  _argvec[7] = (unsigned long)(arg7); \
1358  _argvec[8] = (unsigned long)(arg8); \
1359  _argvec[9] = (unsigned long)(arg9); \
1360  _argvec[10] = (unsigned long)(arg10); \
1361  __asm__ volatile( \
1362  VALGRIND_ALIGN_STACK \
1363  "subl $8, %%esp\n\t" \
1364  "pushl 40(%%eax)\n\t" \
1365  "pushl 36(%%eax)\n\t" \
1366  "pushl 32(%%eax)\n\t" \
1367  "pushl 28(%%eax)\n\t" \
1368  "pushl 24(%%eax)\n\t" \
1369  "pushl 20(%%eax)\n\t" \
1370  "pushl 16(%%eax)\n\t" \
1371  "pushl 12(%%eax)\n\t" \
1372  "pushl 8(%%eax)\n\t" \
1373  "pushl 4(%%eax)\n\t" \
1374  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1375  VALGRIND_CALL_NOREDIR_EAX \
1376  VALGRIND_RESTORE_STACK \
1377  : /*out*/ "=a" (_res) \
1378  : /*in*/ "a" (&_argvec[0]) \
1379  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1380  ); \
1381  lval = (__typeof__(lval)) _res; \
1382  } while (0)
1383 
1384 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1385  arg6,arg7,arg8,arg9,arg10, \
1386  arg11) \
1387  do { \
1388  volatile OrigFn _orig = (orig); \
1389  volatile unsigned long _argvec[12]; \
1390  volatile unsigned long _res; \
1391  _argvec[0] = (unsigned long)_orig.nraddr; \
1392  _argvec[1] = (unsigned long)(arg1); \
1393  _argvec[2] = (unsigned long)(arg2); \
1394  _argvec[3] = (unsigned long)(arg3); \
1395  _argvec[4] = (unsigned long)(arg4); \
1396  _argvec[5] = (unsigned long)(arg5); \
1397  _argvec[6] = (unsigned long)(arg6); \
1398  _argvec[7] = (unsigned long)(arg7); \
1399  _argvec[8] = (unsigned long)(arg8); \
1400  _argvec[9] = (unsigned long)(arg9); \
1401  _argvec[10] = (unsigned long)(arg10); \
1402  _argvec[11] = (unsigned long)(arg11); \
1403  __asm__ volatile( \
1404  VALGRIND_ALIGN_STACK \
1405  "subl $4, %%esp\n\t" \
1406  "pushl 44(%%eax)\n\t" \
1407  "pushl 40(%%eax)\n\t" \
1408  "pushl 36(%%eax)\n\t" \
1409  "pushl 32(%%eax)\n\t" \
1410  "pushl 28(%%eax)\n\t" \
1411  "pushl 24(%%eax)\n\t" \
1412  "pushl 20(%%eax)\n\t" \
1413  "pushl 16(%%eax)\n\t" \
1414  "pushl 12(%%eax)\n\t" \
1415  "pushl 8(%%eax)\n\t" \
1416  "pushl 4(%%eax)\n\t" \
1417  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1418  VALGRIND_CALL_NOREDIR_EAX \
1419  VALGRIND_RESTORE_STACK \
1420  : /*out*/ "=a" (_res) \
1421  : /*in*/ "a" (&_argvec[0]) \
1422  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1423  ); \
1424  lval = (__typeof__(lval)) _res; \
1425  } while (0)
1426 
1427 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
1428  arg6,arg7,arg8,arg9,arg10, \
1429  arg11,arg12) \
1430  do { \
1431  volatile OrigFn _orig = (orig); \
1432  volatile unsigned long _argvec[13]; \
1433  volatile unsigned long _res; \
1434  _argvec[0] = (unsigned long)_orig.nraddr; \
1435  _argvec[1] = (unsigned long)(arg1); \
1436  _argvec[2] = (unsigned long)(arg2); \
1437  _argvec[3] = (unsigned long)(arg3); \
1438  _argvec[4] = (unsigned long)(arg4); \
1439  _argvec[5] = (unsigned long)(arg5); \
1440  _argvec[6] = (unsigned long)(arg6); \
1441  _argvec[7] = (unsigned long)(arg7); \
1442  _argvec[8] = (unsigned long)(arg8); \
1443  _argvec[9] = (unsigned long)(arg9); \
1444  _argvec[10] = (unsigned long)(arg10); \
1445  _argvec[11] = (unsigned long)(arg11); \
1446  _argvec[12] = (unsigned long)(arg12); \
1447  __asm__ volatile( \
1448  VALGRIND_ALIGN_STACK \
1449  "pushl 48(%%eax)\n\t" \
1450  "pushl 44(%%eax)\n\t" \
1451  "pushl 40(%%eax)\n\t" \
1452  "pushl 36(%%eax)\n\t" \
1453  "pushl 32(%%eax)\n\t" \
1454  "pushl 28(%%eax)\n\t" \
1455  "pushl 24(%%eax)\n\t" \
1456  "pushl 20(%%eax)\n\t" \
1457  "pushl 16(%%eax)\n\t" \
1458  "pushl 12(%%eax)\n\t" \
1459  "pushl 8(%%eax)\n\t" \
1460  "pushl 4(%%eax)\n\t" \
1461  "movl (%%eax), %%eax\n\t" /* target->%eax */ \
1462  VALGRIND_CALL_NOREDIR_EAX \
1463  VALGRIND_RESTORE_STACK \
1464  : /*out*/ "=a" (_res) \
1465  : /*in*/ "a" (&_argvec[0]) \
1466  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "edi" \
1467  ); \
1468  lval = (__typeof__(lval)) _res; \
1469  } while (0)
1470 
1471 #endif /* PLAT_x86_linux || PLAT_x86_darwin */
1472 
1473 /* ------------------------ amd64-{linux,darwin} --------------- */
1474 
1475 #if defined(PLAT_amd64_linux) || defined(PLAT_amd64_darwin)
1476 
1477 /* ARGREGS: rdi rsi rdx rcx r8 r9 (the rest on stack in R-to-L order) */
1478 
1479 /* These regs are trashed by the hidden call. */
1480 #define __CALLER_SAVED_REGS /*"rax",*/ "rcx", "rdx", "rsi", \
1481  "rdi", "r8", "r9", "r10", "r11"
1482 
1483 /* This is all pretty complex. It's so as to make stack unwinding
1484  work reliably. See bug 243270. The basic problem is the sub and
1485  add of 128 of %rsp in all of the following macros. If gcc believes
1486  the CFA is in %rsp, then unwinding may fail, because what's at the
1487  CFA is not what gcc "expected" when it constructs the CFIs for the
1488  places where the macros are instantiated.
1489 
1490  But we can't just add a CFI annotation to increase the CFA offset
1491  by 128, to match the sub of 128 from %rsp, because we don't know
1492  whether gcc has chosen %rsp as the CFA at that point, or whether it
1493  has chosen some other register (eg, %rbp). In the latter case,
1494  adding a CFI annotation to change the CFA offset is simply wrong.
1495 
1496  So the solution is to get hold of the CFA using
1497  __builtin_dwarf_cfa(), put it in a known register, and add a
1498  CFI annotation to say what the register is. We choose %rbp for
1499  this (perhaps perversely), because:
1500 
1501  (1) %rbp is already subject to unwinding. If a new register was
1502  chosen then the unwinder would have to unwind it in all stack
1503  traces, which is expensive, and
1504 
1505  (2) %rbp is already subject to precise exception updates in the
1506  JIT. If a new register was chosen, we'd have to have precise
1507  exceptions for it too, which reduces performance of the
1508  generated code.
1509 
1510  However .. one extra complication. We can't just whack the result
1511  of __builtin_dwarf_cfa() into %rbp and then add %rbp to the
1512  list of trashed registers at the end of the inline assembly
1513  fragments; gcc won't allow %rbp to appear in that list. Hence
1514  instead we need to stash %rbp in %r15 for the duration of the asm,
1515  and say that %r15 is trashed instead. gcc seems happy to go with
1516  that.
1517 
1518  Oh .. and this all needs to be conditionalised so that it is
1519  unchanged from before this commit, when compiled with older gccs
1520  that don't support __builtin_dwarf_cfa. Furthermore, since
1521  this header file is freestanding, it has to be independent of
1522  config.h, and so the following conditionalisation cannot depend on
1523  configure time checks.
1524 
1525  Although it's not clear from
1526  'defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)',
1527  this expression excludes Darwin.
1528  .cfi directives in Darwin assembly appear to be completely
1529  different and I haven't investigated how they work.
1530 
1531  For even more entertainment value, note we have to use the
1532  completely undocumented __builtin_dwarf_cfa(), which appears to
1533  really compute the CFA, whereas __builtin_frame_address(0) claims
1534  to but actually doesn't. See
1535  https://bugs.kde.org/show_bug.cgi?id=243270#c47
1536 */
1537 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
1538 # define __FRAME_POINTER \
1539  ,"r"(__builtin_dwarf_cfa())
1540 # define VALGRIND_CFI_PROLOGUE \
1541  "movq %%rbp, %%r15\n\t" \
1542  "movq %2, %%rbp\n\t" \
1543  ".cfi_remember_state\n\t" \
1544  ".cfi_def_cfa rbp, 0\n\t"
1545 # define VALGRIND_CFI_EPILOGUE \
1546  "movq %%r15, %%rbp\n\t" \
1547  ".cfi_restore_state\n\t"
1548 #else
1549 # define __FRAME_POINTER
1550 # define VALGRIND_CFI_PROLOGUE
1551 # define VALGRIND_CFI_EPILOGUE
1552 #endif
1553 
1554 /* Macros to save and align the stack before making a function
1555  call and restore it afterwards as gcc may not keep the stack
1556  pointer aligned if it doesn't realise calls are being made
1557  to other functions. */
1558 
1559 #define VALGRIND_ALIGN_STACK \
1560  "movq %%rsp,%%r14\n\t" \
1561  "andq $0xfffffffffffffff0,%%rsp\n\t"
1562 #define VALGRIND_RESTORE_STACK \
1563  "movq %%r14,%%rsp\n\t"
1564 
1565 /* These CALL_FN_ macros assume that on amd64-linux, sizeof(unsigned
1566  long) == 8. */
1567 
1568 /* NB 9 Sept 07. There is a nasty kludge here in all these CALL_FN_
1569  macros. In order not to trash the stack redzone, we need to drop
1570  %rsp by 128 before the hidden call, and restore afterwards. The
1571  nastyness is that it is only by luck that the stack still appears
1572  to be unwindable during the hidden call - since then the behaviour
1573  of any routine using this macro does not match what the CFI data
1574  says. Sigh.
1575 
1576  Why is this important? Imagine that a wrapper has a stack
1577  allocated local, and passes to the hidden call, a pointer to it.
1578  Because gcc does not know about the hidden call, it may allocate
1579  that local in the redzone. Unfortunately the hidden call may then
1580  trash it before it comes to use it. So we must step clear of the
1581  redzone, for the duration of the hidden call, to make it safe.
1582 
1583  Probably the same problem afflicts the other redzone-style ABIs too
1584  (ppc64-linux); but for those, the stack is
1585  self describing (none of this CFI nonsense) so at least messing
1586  with the stack pointer doesn't give a danger of non-unwindable
1587  stack. */
1588 
1589 #define CALL_FN_W_v(lval, orig) \
1590  do { \
1591  volatile OrigFn _orig = (orig); \
1592  volatile unsigned long _argvec[1]; \
1593  volatile unsigned long _res; \
1594  _argvec[0] = (unsigned long)_orig.nraddr; \
1595  __asm__ volatile( \
1596  VALGRIND_CFI_PROLOGUE \
1597  VALGRIND_ALIGN_STACK \
1598  "subq $128,%%rsp\n\t" \
1599  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1600  VALGRIND_CALL_NOREDIR_RAX \
1601  VALGRIND_RESTORE_STACK \
1602  VALGRIND_CFI_EPILOGUE \
1603  : /*out*/ "=a" (_res) \
1604  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1605  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1606  ); \
1607  lval = (__typeof__(lval)) _res; \
1608  } while (0)
1609 
1610 #define CALL_FN_W_W(lval, orig, arg1) \
1611  do { \
1612  volatile OrigFn _orig = (orig); \
1613  volatile unsigned long _argvec[2]; \
1614  volatile unsigned long _res; \
1615  _argvec[0] = (unsigned long)_orig.nraddr; \
1616  _argvec[1] = (unsigned long)(arg1); \
1617  __asm__ volatile( \
1618  VALGRIND_CFI_PROLOGUE \
1619  VALGRIND_ALIGN_STACK \
1620  "subq $128,%%rsp\n\t" \
1621  "movq 8(%%rax), %%rdi\n\t" \
1622  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1623  VALGRIND_CALL_NOREDIR_RAX \
1624  VALGRIND_RESTORE_STACK \
1625  VALGRIND_CFI_EPILOGUE \
1626  : /*out*/ "=a" (_res) \
1627  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1628  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1629  ); \
1630  lval = (__typeof__(lval)) _res; \
1631  } while (0)
1632 
1633 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
1634  do { \
1635  volatile OrigFn _orig = (orig); \
1636  volatile unsigned long _argvec[3]; \
1637  volatile unsigned long _res; \
1638  _argvec[0] = (unsigned long)_orig.nraddr; \
1639  _argvec[1] = (unsigned long)(arg1); \
1640  _argvec[2] = (unsigned long)(arg2); \
1641  __asm__ volatile( \
1642  VALGRIND_CFI_PROLOGUE \
1643  VALGRIND_ALIGN_STACK \
1644  "subq $128,%%rsp\n\t" \
1645  "movq 16(%%rax), %%rsi\n\t" \
1646  "movq 8(%%rax), %%rdi\n\t" \
1647  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1648  VALGRIND_CALL_NOREDIR_RAX \
1649  VALGRIND_RESTORE_STACK \
1650  VALGRIND_CFI_EPILOGUE \
1651  : /*out*/ "=a" (_res) \
1652  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1653  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1654  ); \
1655  lval = (__typeof__(lval)) _res; \
1656  } while (0)
1657 
1658 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
1659  do { \
1660  volatile OrigFn _orig = (orig); \
1661  volatile unsigned long _argvec[4]; \
1662  volatile unsigned long _res; \
1663  _argvec[0] = (unsigned long)_orig.nraddr; \
1664  _argvec[1] = (unsigned long)(arg1); \
1665  _argvec[2] = (unsigned long)(arg2); \
1666  _argvec[3] = (unsigned long)(arg3); \
1667  __asm__ volatile( \
1668  VALGRIND_CFI_PROLOGUE \
1669  VALGRIND_ALIGN_STACK \
1670  "subq $128,%%rsp\n\t" \
1671  "movq 24(%%rax), %%rdx\n\t" \
1672  "movq 16(%%rax), %%rsi\n\t" \
1673  "movq 8(%%rax), %%rdi\n\t" \
1674  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1675  VALGRIND_CALL_NOREDIR_RAX \
1676  VALGRIND_RESTORE_STACK \
1677  VALGRIND_CFI_EPILOGUE \
1678  : /*out*/ "=a" (_res) \
1679  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1680  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1681  ); \
1682  lval = (__typeof__(lval)) _res; \
1683  } while (0)
1684 
1685 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
1686  do { \
1687  volatile OrigFn _orig = (orig); \
1688  volatile unsigned long _argvec[5]; \
1689  volatile unsigned long _res; \
1690  _argvec[0] = (unsigned long)_orig.nraddr; \
1691  _argvec[1] = (unsigned long)(arg1); \
1692  _argvec[2] = (unsigned long)(arg2); \
1693  _argvec[3] = (unsigned long)(arg3); \
1694  _argvec[4] = (unsigned long)(arg4); \
1695  __asm__ volatile( \
1696  VALGRIND_CFI_PROLOGUE \
1697  VALGRIND_ALIGN_STACK \
1698  "subq $128,%%rsp\n\t" \
1699  "movq 32(%%rax), %%rcx\n\t" \
1700  "movq 24(%%rax), %%rdx\n\t" \
1701  "movq 16(%%rax), %%rsi\n\t" \
1702  "movq 8(%%rax), %%rdi\n\t" \
1703  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1704  VALGRIND_CALL_NOREDIR_RAX \
1705  VALGRIND_RESTORE_STACK \
1706  VALGRIND_CFI_EPILOGUE \
1707  : /*out*/ "=a" (_res) \
1708  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1709  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1710  ); \
1711  lval = (__typeof__(lval)) _res; \
1712  } while (0)
1713 
1714 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
1715  do { \
1716  volatile OrigFn _orig = (orig); \
1717  volatile unsigned long _argvec[6]; \
1718  volatile unsigned long _res; \
1719  _argvec[0] = (unsigned long)_orig.nraddr; \
1720  _argvec[1] = (unsigned long)(arg1); \
1721  _argvec[2] = (unsigned long)(arg2); \
1722  _argvec[3] = (unsigned long)(arg3); \
1723  _argvec[4] = (unsigned long)(arg4); \
1724  _argvec[5] = (unsigned long)(arg5); \
1725  __asm__ volatile( \
1726  VALGRIND_CFI_PROLOGUE \
1727  VALGRIND_ALIGN_STACK \
1728  "subq $128,%%rsp\n\t" \
1729  "movq 40(%%rax), %%r8\n\t" \
1730  "movq 32(%%rax), %%rcx\n\t" \
1731  "movq 24(%%rax), %%rdx\n\t" \
1732  "movq 16(%%rax), %%rsi\n\t" \
1733  "movq 8(%%rax), %%rdi\n\t" \
1734  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1735  VALGRIND_CALL_NOREDIR_RAX \
1736  VALGRIND_RESTORE_STACK \
1737  VALGRIND_CFI_EPILOGUE \
1738  : /*out*/ "=a" (_res) \
1739  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1740  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1741  ); \
1742  lval = (__typeof__(lval)) _res; \
1743  } while (0)
1744 
1745 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
1746  do { \
1747  volatile OrigFn _orig = (orig); \
1748  volatile unsigned long _argvec[7]; \
1749  volatile unsigned long _res; \
1750  _argvec[0] = (unsigned long)_orig.nraddr; \
1751  _argvec[1] = (unsigned long)(arg1); \
1752  _argvec[2] = (unsigned long)(arg2); \
1753  _argvec[3] = (unsigned long)(arg3); \
1754  _argvec[4] = (unsigned long)(arg4); \
1755  _argvec[5] = (unsigned long)(arg5); \
1756  _argvec[6] = (unsigned long)(arg6); \
1757  __asm__ volatile( \
1758  VALGRIND_CFI_PROLOGUE \
1759  VALGRIND_ALIGN_STACK \
1760  "subq $128,%%rsp\n\t" \
1761  "movq 48(%%rax), %%r9\n\t" \
1762  "movq 40(%%rax), %%r8\n\t" \
1763  "movq 32(%%rax), %%rcx\n\t" \
1764  "movq 24(%%rax), %%rdx\n\t" \
1765  "movq 16(%%rax), %%rsi\n\t" \
1766  "movq 8(%%rax), %%rdi\n\t" \
1767  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1768  VALGRIND_CALL_NOREDIR_RAX \
1769  VALGRIND_RESTORE_STACK \
1770  VALGRIND_CFI_EPILOGUE \
1771  : /*out*/ "=a" (_res) \
1772  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1773  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1774  ); \
1775  lval = (__typeof__(lval)) _res; \
1776  } while (0)
1777 
1778 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1779  arg7) \
1780  do { \
1781  volatile OrigFn _orig = (orig); \
1782  volatile unsigned long _argvec[8]; \
1783  volatile unsigned long _res; \
1784  _argvec[0] = (unsigned long)_orig.nraddr; \
1785  _argvec[1] = (unsigned long)(arg1); \
1786  _argvec[2] = (unsigned long)(arg2); \
1787  _argvec[3] = (unsigned long)(arg3); \
1788  _argvec[4] = (unsigned long)(arg4); \
1789  _argvec[5] = (unsigned long)(arg5); \
1790  _argvec[6] = (unsigned long)(arg6); \
1791  _argvec[7] = (unsigned long)(arg7); \
1792  __asm__ volatile( \
1793  VALGRIND_CFI_PROLOGUE \
1794  VALGRIND_ALIGN_STACK \
1795  "subq $136,%%rsp\n\t" \
1796  "pushq 56(%%rax)\n\t" \
1797  "movq 48(%%rax), %%r9\n\t" \
1798  "movq 40(%%rax), %%r8\n\t" \
1799  "movq 32(%%rax), %%rcx\n\t" \
1800  "movq 24(%%rax), %%rdx\n\t" \
1801  "movq 16(%%rax), %%rsi\n\t" \
1802  "movq 8(%%rax), %%rdi\n\t" \
1803  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1804  VALGRIND_CALL_NOREDIR_RAX \
1805  VALGRIND_RESTORE_STACK \
1806  VALGRIND_CFI_EPILOGUE \
1807  : /*out*/ "=a" (_res) \
1808  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1809  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1810  ); \
1811  lval = (__typeof__(lval)) _res; \
1812  } while (0)
1813 
1814 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1815  arg7,arg8) \
1816  do { \
1817  volatile OrigFn _orig = (orig); \
1818  volatile unsigned long _argvec[9]; \
1819  volatile unsigned long _res; \
1820  _argvec[0] = (unsigned long)_orig.nraddr; \
1821  _argvec[1] = (unsigned long)(arg1); \
1822  _argvec[2] = (unsigned long)(arg2); \
1823  _argvec[3] = (unsigned long)(arg3); \
1824  _argvec[4] = (unsigned long)(arg4); \
1825  _argvec[5] = (unsigned long)(arg5); \
1826  _argvec[6] = (unsigned long)(arg6); \
1827  _argvec[7] = (unsigned long)(arg7); \
1828  _argvec[8] = (unsigned long)(arg8); \
1829  __asm__ volatile( \
1830  VALGRIND_CFI_PROLOGUE \
1831  VALGRIND_ALIGN_STACK \
1832  "subq $128,%%rsp\n\t" \
1833  "pushq 64(%%rax)\n\t" \
1834  "pushq 56(%%rax)\n\t" \
1835  "movq 48(%%rax), %%r9\n\t" \
1836  "movq 40(%%rax), %%r8\n\t" \
1837  "movq 32(%%rax), %%rcx\n\t" \
1838  "movq 24(%%rax), %%rdx\n\t" \
1839  "movq 16(%%rax), %%rsi\n\t" \
1840  "movq 8(%%rax), %%rdi\n\t" \
1841  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1842  VALGRIND_CALL_NOREDIR_RAX \
1843  VALGRIND_RESTORE_STACK \
1844  VALGRIND_CFI_EPILOGUE \
1845  : /*out*/ "=a" (_res) \
1846  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1847  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1848  ); \
1849  lval = (__typeof__(lval)) _res; \
1850  } while (0)
1851 
1852 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1853  arg7,arg8,arg9) \
1854  do { \
1855  volatile OrigFn _orig = (orig); \
1856  volatile unsigned long _argvec[10]; \
1857  volatile unsigned long _res; \
1858  _argvec[0] = (unsigned long)_orig.nraddr; \
1859  _argvec[1] = (unsigned long)(arg1); \
1860  _argvec[2] = (unsigned long)(arg2); \
1861  _argvec[3] = (unsigned long)(arg3); \
1862  _argvec[4] = (unsigned long)(arg4); \
1863  _argvec[5] = (unsigned long)(arg5); \
1864  _argvec[6] = (unsigned long)(arg6); \
1865  _argvec[7] = (unsigned long)(arg7); \
1866  _argvec[8] = (unsigned long)(arg8); \
1867  _argvec[9] = (unsigned long)(arg9); \
1868  __asm__ volatile( \
1869  VALGRIND_CFI_PROLOGUE \
1870  VALGRIND_ALIGN_STACK \
1871  "subq $136,%%rsp\n\t" \
1872  "pushq 72(%%rax)\n\t" \
1873  "pushq 64(%%rax)\n\t" \
1874  "pushq 56(%%rax)\n\t" \
1875  "movq 48(%%rax), %%r9\n\t" \
1876  "movq 40(%%rax), %%r8\n\t" \
1877  "movq 32(%%rax), %%rcx\n\t" \
1878  "movq 24(%%rax), %%rdx\n\t" \
1879  "movq 16(%%rax), %%rsi\n\t" \
1880  "movq 8(%%rax), %%rdi\n\t" \
1881  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1882  VALGRIND_CALL_NOREDIR_RAX \
1883  VALGRIND_RESTORE_STACK \
1884  VALGRIND_CFI_EPILOGUE \
1885  : /*out*/ "=a" (_res) \
1886  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1887  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1888  ); \
1889  lval = (__typeof__(lval)) _res; \
1890  } while (0)
1891 
1892 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1893  arg7,arg8,arg9,arg10) \
1894  do { \
1895  volatile OrigFn _orig = (orig); \
1896  volatile unsigned long _argvec[11]; \
1897  volatile unsigned long _res; \
1898  _argvec[0] = (unsigned long)_orig.nraddr; \
1899  _argvec[1] = (unsigned long)(arg1); \
1900  _argvec[2] = (unsigned long)(arg2); \
1901  _argvec[3] = (unsigned long)(arg3); \
1902  _argvec[4] = (unsigned long)(arg4); \
1903  _argvec[5] = (unsigned long)(arg5); \
1904  _argvec[6] = (unsigned long)(arg6); \
1905  _argvec[7] = (unsigned long)(arg7); \
1906  _argvec[8] = (unsigned long)(arg8); \
1907  _argvec[9] = (unsigned long)(arg9); \
1908  _argvec[10] = (unsigned long)(arg10); \
1909  __asm__ volatile( \
1910  VALGRIND_CFI_PROLOGUE \
1911  VALGRIND_ALIGN_STACK \
1912  "subq $128,%%rsp\n\t" \
1913  "pushq 80(%%rax)\n\t" \
1914  "pushq 72(%%rax)\n\t" \
1915  "pushq 64(%%rax)\n\t" \
1916  "pushq 56(%%rax)\n\t" \
1917  "movq 48(%%rax), %%r9\n\t" \
1918  "movq 40(%%rax), %%r8\n\t" \
1919  "movq 32(%%rax), %%rcx\n\t" \
1920  "movq 24(%%rax), %%rdx\n\t" \
1921  "movq 16(%%rax), %%rsi\n\t" \
1922  "movq 8(%%rax), %%rdi\n\t" \
1923  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1924  VALGRIND_CALL_NOREDIR_RAX \
1925  VALGRIND_RESTORE_STACK \
1926  VALGRIND_CFI_EPILOGUE \
1927  : /*out*/ "=a" (_res) \
1928  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1929  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1930  ); \
1931  lval = (__typeof__(lval)) _res; \
1932  } while (0)
1933 
1934 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1935  arg7,arg8,arg9,arg10,arg11) \
1936  do { \
1937  volatile OrigFn _orig = (orig); \
1938  volatile unsigned long _argvec[12]; \
1939  volatile unsigned long _res; \
1940  _argvec[0] = (unsigned long)_orig.nraddr; \
1941  _argvec[1] = (unsigned long)(arg1); \
1942  _argvec[2] = (unsigned long)(arg2); \
1943  _argvec[3] = (unsigned long)(arg3); \
1944  _argvec[4] = (unsigned long)(arg4); \
1945  _argvec[5] = (unsigned long)(arg5); \
1946  _argvec[6] = (unsigned long)(arg6); \
1947  _argvec[7] = (unsigned long)(arg7); \
1948  _argvec[8] = (unsigned long)(arg8); \
1949  _argvec[9] = (unsigned long)(arg9); \
1950  _argvec[10] = (unsigned long)(arg10); \
1951  _argvec[11] = (unsigned long)(arg11); \
1952  __asm__ volatile( \
1953  VALGRIND_CFI_PROLOGUE \
1954  VALGRIND_ALIGN_STACK \
1955  "subq $136,%%rsp\n\t" \
1956  "pushq 88(%%rax)\n\t" \
1957  "pushq 80(%%rax)\n\t" \
1958  "pushq 72(%%rax)\n\t" \
1959  "pushq 64(%%rax)\n\t" \
1960  "pushq 56(%%rax)\n\t" \
1961  "movq 48(%%rax), %%r9\n\t" \
1962  "movq 40(%%rax), %%r8\n\t" \
1963  "movq 32(%%rax), %%rcx\n\t" \
1964  "movq 24(%%rax), %%rdx\n\t" \
1965  "movq 16(%%rax), %%rsi\n\t" \
1966  "movq 8(%%rax), %%rdi\n\t" \
1967  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
1968  VALGRIND_CALL_NOREDIR_RAX \
1969  VALGRIND_RESTORE_STACK \
1970  VALGRIND_CFI_EPILOGUE \
1971  : /*out*/ "=a" (_res) \
1972  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
1973  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
1974  ); \
1975  lval = (__typeof__(lval)) _res; \
1976  } while (0)
1977 
1978 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
1979  arg7,arg8,arg9,arg10,arg11,arg12) \
1980  do { \
1981  volatile OrigFn _orig = (orig); \
1982  volatile unsigned long _argvec[13]; \
1983  volatile unsigned long _res; \
1984  _argvec[0] = (unsigned long)_orig.nraddr; \
1985  _argvec[1] = (unsigned long)(arg1); \
1986  _argvec[2] = (unsigned long)(arg2); \
1987  _argvec[3] = (unsigned long)(arg3); \
1988  _argvec[4] = (unsigned long)(arg4); \
1989  _argvec[5] = (unsigned long)(arg5); \
1990  _argvec[6] = (unsigned long)(arg6); \
1991  _argvec[7] = (unsigned long)(arg7); \
1992  _argvec[8] = (unsigned long)(arg8); \
1993  _argvec[9] = (unsigned long)(arg9); \
1994  _argvec[10] = (unsigned long)(arg10); \
1995  _argvec[11] = (unsigned long)(arg11); \
1996  _argvec[12] = (unsigned long)(arg12); \
1997  __asm__ volatile( \
1998  VALGRIND_CFI_PROLOGUE \
1999  VALGRIND_ALIGN_STACK \
2000  "subq $128,%%rsp\n\t" \
2001  "pushq 96(%%rax)\n\t" \
2002  "pushq 88(%%rax)\n\t" \
2003  "pushq 80(%%rax)\n\t" \
2004  "pushq 72(%%rax)\n\t" \
2005  "pushq 64(%%rax)\n\t" \
2006  "pushq 56(%%rax)\n\t" \
2007  "movq 48(%%rax), %%r9\n\t" \
2008  "movq 40(%%rax), %%r8\n\t" \
2009  "movq 32(%%rax), %%rcx\n\t" \
2010  "movq 24(%%rax), %%rdx\n\t" \
2011  "movq 16(%%rax), %%rsi\n\t" \
2012  "movq 8(%%rax), %%rdi\n\t" \
2013  "movq (%%rax), %%rax\n\t" /* target->%rax */ \
2014  VALGRIND_CALL_NOREDIR_RAX \
2015  VALGRIND_RESTORE_STACK \
2016  VALGRIND_CFI_EPILOGUE \
2017  : /*out*/ "=a" (_res) \
2018  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
2019  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r14", "r15" \
2020  ); \
2021  lval = (__typeof__(lval)) _res; \
2022  } while (0)
2023 
2024 #endif /* PLAT_amd64_linux || PLAT_amd64_darwin */
2025 
2026 /* ------------------------ ppc32-linux ------------------------ */
2027 
2028 #if defined(PLAT_ppc32_linux)
2029 
2030 /* This is useful for finding out about the on-stack stuff:
2031 
2032  extern int f9 ( int,int,int,int,int,int,int,int,int );
2033  extern int f10 ( int,int,int,int,int,int,int,int,int,int );
2034  extern int f11 ( int,int,int,int,int,int,int,int,int,int,int );
2035  extern int f12 ( int,int,int,int,int,int,int,int,int,int,int,int );
2036 
2037  int g9 ( void ) {
2038  return f9(11,22,33,44,55,66,77,88,99);
2039  }
2040  int g10 ( void ) {
2041  return f10(11,22,33,44,55,66,77,88,99,110);
2042  }
2043  int g11 ( void ) {
2044  return f11(11,22,33,44,55,66,77,88,99,110,121);
2045  }
2046  int g12 ( void ) {
2047  return f12(11,22,33,44,55,66,77,88,99,110,121,132);
2048  }
2049 */
2050 
2051 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2052 
2053 /* These regs are trashed by the hidden call. */
2054 #define __CALLER_SAVED_REGS \
2055  "lr", "ctr", "xer", \
2056  "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2057  "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2058  "r11", "r12", "r13"
2059 
2060 /* Macros to save and align the stack before making a function
2061  call and restore it afterwards as gcc may not keep the stack
2062  pointer aligned if it doesn't realise calls are being made
2063  to other functions. */
2064 
2065 #define VALGRIND_ALIGN_STACK \
2066  "mr 28,1\n\t" \
2067  "rlwinm 1,1,0,0,27\n\t"
2068 #define VALGRIND_RESTORE_STACK \
2069  "mr 1,28\n\t"
2070 
2071 /* These CALL_FN_ macros assume that on ppc32-linux,
2072  sizeof(unsigned long) == 4. */
2073 
2074 #define CALL_FN_W_v(lval, orig) \
2075  do { \
2076  volatile OrigFn _orig = (orig); \
2077  volatile unsigned long _argvec[1]; \
2078  volatile unsigned long _res; \
2079  _argvec[0] = (unsigned long)_orig.nraddr; \
2080  __asm__ volatile( \
2081  VALGRIND_ALIGN_STACK \
2082  "mr 11,%1\n\t" \
2083  "lwz 11,0(11)\n\t" /* target->r11 */ \
2084  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2085  VALGRIND_RESTORE_STACK \
2086  "mr %0,3" \
2087  : /*out*/ "=r" (_res) \
2088  : /*in*/ "r" (&_argvec[0]) \
2089  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2090  ); \
2091  lval = (__typeof__(lval)) _res; \
2092  } while (0)
2093 
2094 #define CALL_FN_W_W(lval, orig, arg1) \
2095  do { \
2096  volatile OrigFn _orig = (orig); \
2097  volatile unsigned long _argvec[2]; \
2098  volatile unsigned long _res; \
2099  _argvec[0] = (unsigned long)_orig.nraddr; \
2100  _argvec[1] = (unsigned long)arg1; \
2101  __asm__ volatile( \
2102  VALGRIND_ALIGN_STACK \
2103  "mr 11,%1\n\t" \
2104  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2105  "lwz 11,0(11)\n\t" /* target->r11 */ \
2106  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2107  VALGRIND_RESTORE_STACK \
2108  "mr %0,3" \
2109  : /*out*/ "=r" (_res) \
2110  : /*in*/ "r" (&_argvec[0]) \
2111  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2112  ); \
2113  lval = (__typeof__(lval)) _res; \
2114  } while (0)
2115 
2116 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2117  do { \
2118  volatile OrigFn _orig = (orig); \
2119  volatile unsigned long _argvec[3]; \
2120  volatile unsigned long _res; \
2121  _argvec[0] = (unsigned long)_orig.nraddr; \
2122  _argvec[1] = (unsigned long)arg1; \
2123  _argvec[2] = (unsigned long)arg2; \
2124  __asm__ volatile( \
2125  VALGRIND_ALIGN_STACK \
2126  "mr 11,%1\n\t" \
2127  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2128  "lwz 4,8(11)\n\t" \
2129  "lwz 11,0(11)\n\t" /* target->r11 */ \
2130  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2131  VALGRIND_RESTORE_STACK \
2132  "mr %0,3" \
2133  : /*out*/ "=r" (_res) \
2134  : /*in*/ "r" (&_argvec[0]) \
2135  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2136  ); \
2137  lval = (__typeof__(lval)) _res; \
2138  } while (0)
2139 
2140 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2141  do { \
2142  volatile OrigFn _orig = (orig); \
2143  volatile unsigned long _argvec[4]; \
2144  volatile unsigned long _res; \
2145  _argvec[0] = (unsigned long)_orig.nraddr; \
2146  _argvec[1] = (unsigned long)arg1; \
2147  _argvec[2] = (unsigned long)arg2; \
2148  _argvec[3] = (unsigned long)arg3; \
2149  __asm__ volatile( \
2150  VALGRIND_ALIGN_STACK \
2151  "mr 11,%1\n\t" \
2152  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2153  "lwz 4,8(11)\n\t" \
2154  "lwz 5,12(11)\n\t" \
2155  "lwz 11,0(11)\n\t" /* target->r11 */ \
2156  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2157  VALGRIND_RESTORE_STACK \
2158  "mr %0,3" \
2159  : /*out*/ "=r" (_res) \
2160  : /*in*/ "r" (&_argvec[0]) \
2161  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2162  ); \
2163  lval = (__typeof__(lval)) _res; \
2164  } while (0)
2165 
2166 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2167  do { \
2168  volatile OrigFn _orig = (orig); \
2169  volatile unsigned long _argvec[5]; \
2170  volatile unsigned long _res; \
2171  _argvec[0] = (unsigned long)_orig.nraddr; \
2172  _argvec[1] = (unsigned long)arg1; \
2173  _argvec[2] = (unsigned long)arg2; \
2174  _argvec[3] = (unsigned long)arg3; \
2175  _argvec[4] = (unsigned long)arg4; \
2176  __asm__ volatile( \
2177  VALGRIND_ALIGN_STACK \
2178  "mr 11,%1\n\t" \
2179  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2180  "lwz 4,8(11)\n\t" \
2181  "lwz 5,12(11)\n\t" \
2182  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2183  "lwz 11,0(11)\n\t" /* target->r11 */ \
2184  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2185  VALGRIND_RESTORE_STACK \
2186  "mr %0,3" \
2187  : /*out*/ "=r" (_res) \
2188  : /*in*/ "r" (&_argvec[0]) \
2189  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2190  ); \
2191  lval = (__typeof__(lval)) _res; \
2192  } while (0)
2193 
2194 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2195  do { \
2196  volatile OrigFn _orig = (orig); \
2197  volatile unsigned long _argvec[6]; \
2198  volatile unsigned long _res; \
2199  _argvec[0] = (unsigned long)_orig.nraddr; \
2200  _argvec[1] = (unsigned long)arg1; \
2201  _argvec[2] = (unsigned long)arg2; \
2202  _argvec[3] = (unsigned long)arg3; \
2203  _argvec[4] = (unsigned long)arg4; \
2204  _argvec[5] = (unsigned long)arg5; \
2205  __asm__ volatile( \
2206  VALGRIND_ALIGN_STACK \
2207  "mr 11,%1\n\t" \
2208  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2209  "lwz 4,8(11)\n\t" \
2210  "lwz 5,12(11)\n\t" \
2211  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2212  "lwz 7,20(11)\n\t" \
2213  "lwz 11,0(11)\n\t" /* target->r11 */ \
2214  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2215  VALGRIND_RESTORE_STACK \
2216  "mr %0,3" \
2217  : /*out*/ "=r" (_res) \
2218  : /*in*/ "r" (&_argvec[0]) \
2219  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2220  ); \
2221  lval = (__typeof__(lval)) _res; \
2222  } while (0)
2223 
2224 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2225  do { \
2226  volatile OrigFn _orig = (orig); \
2227  volatile unsigned long _argvec[7]; \
2228  volatile unsigned long _res; \
2229  _argvec[0] = (unsigned long)_orig.nraddr; \
2230  _argvec[1] = (unsigned long)arg1; \
2231  _argvec[2] = (unsigned long)arg2; \
2232  _argvec[3] = (unsigned long)arg3; \
2233  _argvec[4] = (unsigned long)arg4; \
2234  _argvec[5] = (unsigned long)arg5; \
2235  _argvec[6] = (unsigned long)arg6; \
2236  __asm__ volatile( \
2237  VALGRIND_ALIGN_STACK \
2238  "mr 11,%1\n\t" \
2239  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2240  "lwz 4,8(11)\n\t" \
2241  "lwz 5,12(11)\n\t" \
2242  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2243  "lwz 7,20(11)\n\t" \
2244  "lwz 8,24(11)\n\t" \
2245  "lwz 11,0(11)\n\t" /* target->r11 */ \
2246  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2247  VALGRIND_RESTORE_STACK \
2248  "mr %0,3" \
2249  : /*out*/ "=r" (_res) \
2250  : /*in*/ "r" (&_argvec[0]) \
2251  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2252  ); \
2253  lval = (__typeof__(lval)) _res; \
2254  } while (0)
2255 
2256 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2257  arg7) \
2258  do { \
2259  volatile OrigFn _orig = (orig); \
2260  volatile unsigned long _argvec[8]; \
2261  volatile unsigned long _res; \
2262  _argvec[0] = (unsigned long)_orig.nraddr; \
2263  _argvec[1] = (unsigned long)arg1; \
2264  _argvec[2] = (unsigned long)arg2; \
2265  _argvec[3] = (unsigned long)arg3; \
2266  _argvec[4] = (unsigned long)arg4; \
2267  _argvec[5] = (unsigned long)arg5; \
2268  _argvec[6] = (unsigned long)arg6; \
2269  _argvec[7] = (unsigned long)arg7; \
2270  __asm__ volatile( \
2271  VALGRIND_ALIGN_STACK \
2272  "mr 11,%1\n\t" \
2273  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2274  "lwz 4,8(11)\n\t" \
2275  "lwz 5,12(11)\n\t" \
2276  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2277  "lwz 7,20(11)\n\t" \
2278  "lwz 8,24(11)\n\t" \
2279  "lwz 9,28(11)\n\t" \
2280  "lwz 11,0(11)\n\t" /* target->r11 */ \
2281  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2282  VALGRIND_RESTORE_STACK \
2283  "mr %0,3" \
2284  : /*out*/ "=r" (_res) \
2285  : /*in*/ "r" (&_argvec[0]) \
2286  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2287  ); \
2288  lval = (__typeof__(lval)) _res; \
2289  } while (0)
2290 
2291 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2292  arg7,arg8) \
2293  do { \
2294  volatile OrigFn _orig = (orig); \
2295  volatile unsigned long _argvec[9]; \
2296  volatile unsigned long _res; \
2297  _argvec[0] = (unsigned long)_orig.nraddr; \
2298  _argvec[1] = (unsigned long)arg1; \
2299  _argvec[2] = (unsigned long)arg2; \
2300  _argvec[3] = (unsigned long)arg3; \
2301  _argvec[4] = (unsigned long)arg4; \
2302  _argvec[5] = (unsigned long)arg5; \
2303  _argvec[6] = (unsigned long)arg6; \
2304  _argvec[7] = (unsigned long)arg7; \
2305  _argvec[8] = (unsigned long)arg8; \
2306  __asm__ volatile( \
2307  VALGRIND_ALIGN_STACK \
2308  "mr 11,%1\n\t" \
2309  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2310  "lwz 4,8(11)\n\t" \
2311  "lwz 5,12(11)\n\t" \
2312  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2313  "lwz 7,20(11)\n\t" \
2314  "lwz 8,24(11)\n\t" \
2315  "lwz 9,28(11)\n\t" \
2316  "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2317  "lwz 11,0(11)\n\t" /* target->r11 */ \
2318  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2319  VALGRIND_RESTORE_STACK \
2320  "mr %0,3" \
2321  : /*out*/ "=r" (_res) \
2322  : /*in*/ "r" (&_argvec[0]) \
2323  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2324  ); \
2325  lval = (__typeof__(lval)) _res; \
2326  } while (0)
2327 
2328 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2329  arg7,arg8,arg9) \
2330  do { \
2331  volatile OrigFn _orig = (orig); \
2332  volatile unsigned long _argvec[10]; \
2333  volatile unsigned long _res; \
2334  _argvec[0] = (unsigned long)_orig.nraddr; \
2335  _argvec[1] = (unsigned long)arg1; \
2336  _argvec[2] = (unsigned long)arg2; \
2337  _argvec[3] = (unsigned long)arg3; \
2338  _argvec[4] = (unsigned long)arg4; \
2339  _argvec[5] = (unsigned long)arg5; \
2340  _argvec[6] = (unsigned long)arg6; \
2341  _argvec[7] = (unsigned long)arg7; \
2342  _argvec[8] = (unsigned long)arg8; \
2343  _argvec[9] = (unsigned long)arg9; \
2344  __asm__ volatile( \
2345  VALGRIND_ALIGN_STACK \
2346  "mr 11,%1\n\t" \
2347  "addi 1,1,-16\n\t" \
2348  /* arg9 */ \
2349  "lwz 3,36(11)\n\t" \
2350  "stw 3,8(1)\n\t" \
2351  /* args1-8 */ \
2352  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2353  "lwz 4,8(11)\n\t" \
2354  "lwz 5,12(11)\n\t" \
2355  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2356  "lwz 7,20(11)\n\t" \
2357  "lwz 8,24(11)\n\t" \
2358  "lwz 9,28(11)\n\t" \
2359  "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2360  "lwz 11,0(11)\n\t" /* target->r11 */ \
2361  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2362  VALGRIND_RESTORE_STACK \
2363  "mr %0,3" \
2364  : /*out*/ "=r" (_res) \
2365  : /*in*/ "r" (&_argvec[0]) \
2366  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2367  ); \
2368  lval = (__typeof__(lval)) _res; \
2369  } while (0)
2370 
2371 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2372  arg7,arg8,arg9,arg10) \
2373  do { \
2374  volatile OrigFn _orig = (orig); \
2375  volatile unsigned long _argvec[11]; \
2376  volatile unsigned long _res; \
2377  _argvec[0] = (unsigned long)_orig.nraddr; \
2378  _argvec[1] = (unsigned long)arg1; \
2379  _argvec[2] = (unsigned long)arg2; \
2380  _argvec[3] = (unsigned long)arg3; \
2381  _argvec[4] = (unsigned long)arg4; \
2382  _argvec[5] = (unsigned long)arg5; \
2383  _argvec[6] = (unsigned long)arg6; \
2384  _argvec[7] = (unsigned long)arg7; \
2385  _argvec[8] = (unsigned long)arg8; \
2386  _argvec[9] = (unsigned long)arg9; \
2387  _argvec[10] = (unsigned long)arg10; \
2388  __asm__ volatile( \
2389  VALGRIND_ALIGN_STACK \
2390  "mr 11,%1\n\t" \
2391  "addi 1,1,-16\n\t" \
2392  /* arg10 */ \
2393  "lwz 3,40(11)\n\t" \
2394  "stw 3,12(1)\n\t" \
2395  /* arg9 */ \
2396  "lwz 3,36(11)\n\t" \
2397  "stw 3,8(1)\n\t" \
2398  /* args1-8 */ \
2399  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2400  "lwz 4,8(11)\n\t" \
2401  "lwz 5,12(11)\n\t" \
2402  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2403  "lwz 7,20(11)\n\t" \
2404  "lwz 8,24(11)\n\t" \
2405  "lwz 9,28(11)\n\t" \
2406  "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2407  "lwz 11,0(11)\n\t" /* target->r11 */ \
2408  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2409  VALGRIND_RESTORE_STACK \
2410  "mr %0,3" \
2411  : /*out*/ "=r" (_res) \
2412  : /*in*/ "r" (&_argvec[0]) \
2413  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2414  ); \
2415  lval = (__typeof__(lval)) _res; \
2416  } while (0)
2417 
2418 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2419  arg7,arg8,arg9,arg10,arg11) \
2420  do { \
2421  volatile OrigFn _orig = (orig); \
2422  volatile unsigned long _argvec[12]; \
2423  volatile unsigned long _res; \
2424  _argvec[0] = (unsigned long)_orig.nraddr; \
2425  _argvec[1] = (unsigned long)arg1; \
2426  _argvec[2] = (unsigned long)arg2; \
2427  _argvec[3] = (unsigned long)arg3; \
2428  _argvec[4] = (unsigned long)arg4; \
2429  _argvec[5] = (unsigned long)arg5; \
2430  _argvec[6] = (unsigned long)arg6; \
2431  _argvec[7] = (unsigned long)arg7; \
2432  _argvec[8] = (unsigned long)arg8; \
2433  _argvec[9] = (unsigned long)arg9; \
2434  _argvec[10] = (unsigned long)arg10; \
2435  _argvec[11] = (unsigned long)arg11; \
2436  __asm__ volatile( \
2437  VALGRIND_ALIGN_STACK \
2438  "mr 11,%1\n\t" \
2439  "addi 1,1,-32\n\t" \
2440  /* arg11 */ \
2441  "lwz 3,44(11)\n\t" \
2442  "stw 3,16(1)\n\t" \
2443  /* arg10 */ \
2444  "lwz 3,40(11)\n\t" \
2445  "stw 3,12(1)\n\t" \
2446  /* arg9 */ \
2447  "lwz 3,36(11)\n\t" \
2448  "stw 3,8(1)\n\t" \
2449  /* args1-8 */ \
2450  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2451  "lwz 4,8(11)\n\t" \
2452  "lwz 5,12(11)\n\t" \
2453  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2454  "lwz 7,20(11)\n\t" \
2455  "lwz 8,24(11)\n\t" \
2456  "lwz 9,28(11)\n\t" \
2457  "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2458  "lwz 11,0(11)\n\t" /* target->r11 */ \
2459  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2460  VALGRIND_RESTORE_STACK \
2461  "mr %0,3" \
2462  : /*out*/ "=r" (_res) \
2463  : /*in*/ "r" (&_argvec[0]) \
2464  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2465  ); \
2466  lval = (__typeof__(lval)) _res; \
2467  } while (0)
2468 
2469 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2470  arg7,arg8,arg9,arg10,arg11,arg12) \
2471  do { \
2472  volatile OrigFn _orig = (orig); \
2473  volatile unsigned long _argvec[13]; \
2474  volatile unsigned long _res; \
2475  _argvec[0] = (unsigned long)_orig.nraddr; \
2476  _argvec[1] = (unsigned long)arg1; \
2477  _argvec[2] = (unsigned long)arg2; \
2478  _argvec[3] = (unsigned long)arg3; \
2479  _argvec[4] = (unsigned long)arg4; \
2480  _argvec[5] = (unsigned long)arg5; \
2481  _argvec[6] = (unsigned long)arg6; \
2482  _argvec[7] = (unsigned long)arg7; \
2483  _argvec[8] = (unsigned long)arg8; \
2484  _argvec[9] = (unsigned long)arg9; \
2485  _argvec[10] = (unsigned long)arg10; \
2486  _argvec[11] = (unsigned long)arg11; \
2487  _argvec[12] = (unsigned long)arg12; \
2488  __asm__ volatile( \
2489  VALGRIND_ALIGN_STACK \
2490  "mr 11,%1\n\t" \
2491  "addi 1,1,-32\n\t" \
2492  /* arg12 */ \
2493  "lwz 3,48(11)\n\t" \
2494  "stw 3,20(1)\n\t" \
2495  /* arg11 */ \
2496  "lwz 3,44(11)\n\t" \
2497  "stw 3,16(1)\n\t" \
2498  /* arg10 */ \
2499  "lwz 3,40(11)\n\t" \
2500  "stw 3,12(1)\n\t" \
2501  /* arg9 */ \
2502  "lwz 3,36(11)\n\t" \
2503  "stw 3,8(1)\n\t" \
2504  /* args1-8 */ \
2505  "lwz 3,4(11)\n\t" /* arg1->r3 */ \
2506  "lwz 4,8(11)\n\t" \
2507  "lwz 5,12(11)\n\t" \
2508  "lwz 6,16(11)\n\t" /* arg4->r6 */ \
2509  "lwz 7,20(11)\n\t" \
2510  "lwz 8,24(11)\n\t" \
2511  "lwz 9,28(11)\n\t" \
2512  "lwz 10,32(11)\n\t" /* arg8->r10 */ \
2513  "lwz 11,0(11)\n\t" /* target->r11 */ \
2514  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2515  VALGRIND_RESTORE_STACK \
2516  "mr %0,3" \
2517  : /*out*/ "=r" (_res) \
2518  : /*in*/ "r" (&_argvec[0]) \
2519  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2520  ); \
2521  lval = (__typeof__(lval)) _res; \
2522  } while (0)
2523 
2524 #endif /* PLAT_ppc32_linux */
2525 
2526 /* ------------------------ ppc64-linux ------------------------ */
2527 
2528 #if defined(PLAT_ppc64_linux)
2529 
2530 /* ARGREGS: r3 r4 r5 r6 r7 r8 r9 r10 (the rest on stack somewhere) */
2531 
2532 /* These regs are trashed by the hidden call. */
2533 #define __CALLER_SAVED_REGS \
2534  "lr", "ctr", "xer", \
2535  "cr0", "cr1", "cr2", "cr3", "cr4", "cr5", "cr6", "cr7", \
2536  "r0", "r2", "r3", "r4", "r5", "r6", "r7", "r8", "r9", "r10", \
2537  "r11", "r12", "r13"
2538 
2539 /* Macros to save and align the stack before making a function
2540  call and restore it afterwards as gcc may not keep the stack
2541  pointer aligned if it doesn't realise calls are being made
2542  to other functions. */
2543 
2544 #define VALGRIND_ALIGN_STACK \
2545  "mr 28,1\n\t" \
2546  "rldicr 1,1,0,59\n\t"
2547 #define VALGRIND_RESTORE_STACK \
2548  "mr 1,28\n\t"
2549 
2550 /* These CALL_FN_ macros assume that on ppc64-linux, sizeof(unsigned
2551  long) == 8. */
2552 
2553 #define CALL_FN_W_v(lval, orig) \
2554  do { \
2555  volatile OrigFn _orig = (orig); \
2556  volatile unsigned long _argvec[3+0]; \
2557  volatile unsigned long _res; \
2558  /* _argvec[0] holds current r2 across the call */ \
2559  _argvec[1] = (unsigned long)_orig.r2; \
2560  _argvec[2] = (unsigned long)_orig.nraddr; \
2561  __asm__ volatile( \
2562  VALGRIND_ALIGN_STACK \
2563  "mr 11,%1\n\t" \
2564  "std 2,-16(11)\n\t" /* save tocptr */ \
2565  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2566  "ld 11, 0(11)\n\t" /* target->r11 */ \
2567  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2568  "mr 11,%1\n\t" \
2569  "mr %0,3\n\t" \
2570  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2571  VALGRIND_RESTORE_STACK \
2572  : /*out*/ "=r" (_res) \
2573  : /*in*/ "r" (&_argvec[2]) \
2574  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2575  ); \
2576  lval = (__typeof__(lval)) _res; \
2577  } while (0)
2578 
2579 #define CALL_FN_W_W(lval, orig, arg1) \
2580  do { \
2581  volatile OrigFn _orig = (orig); \
2582  volatile unsigned long _argvec[3+1]; \
2583  volatile unsigned long _res; \
2584  /* _argvec[0] holds current r2 across the call */ \
2585  _argvec[1] = (unsigned long)_orig.r2; \
2586  _argvec[2] = (unsigned long)_orig.nraddr; \
2587  _argvec[2+1] = (unsigned long)arg1; \
2588  __asm__ volatile( \
2589  VALGRIND_ALIGN_STACK \
2590  "mr 11,%1\n\t" \
2591  "std 2,-16(11)\n\t" /* save tocptr */ \
2592  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2593  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2594  "ld 11, 0(11)\n\t" /* target->r11 */ \
2595  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2596  "mr 11,%1\n\t" \
2597  "mr %0,3\n\t" \
2598  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2599  VALGRIND_RESTORE_STACK \
2600  : /*out*/ "=r" (_res) \
2601  : /*in*/ "r" (&_argvec[2]) \
2602  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2603  ); \
2604  lval = (__typeof__(lval)) _res; \
2605  } while (0)
2606 
2607 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
2608  do { \
2609  volatile OrigFn _orig = (orig); \
2610  volatile unsigned long _argvec[3+2]; \
2611  volatile unsigned long _res; \
2612  /* _argvec[0] holds current r2 across the call */ \
2613  _argvec[1] = (unsigned long)_orig.r2; \
2614  _argvec[2] = (unsigned long)_orig.nraddr; \
2615  _argvec[2+1] = (unsigned long)arg1; \
2616  _argvec[2+2] = (unsigned long)arg2; \
2617  __asm__ volatile( \
2618  VALGRIND_ALIGN_STACK \
2619  "mr 11,%1\n\t" \
2620  "std 2,-16(11)\n\t" /* save tocptr */ \
2621  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2622  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2623  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2624  "ld 11, 0(11)\n\t" /* target->r11 */ \
2625  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2626  "mr 11,%1\n\t" \
2627  "mr %0,3\n\t" \
2628  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2629  VALGRIND_RESTORE_STACK \
2630  : /*out*/ "=r" (_res) \
2631  : /*in*/ "r" (&_argvec[2]) \
2632  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2633  ); \
2634  lval = (__typeof__(lval)) _res; \
2635  } while (0)
2636 
2637 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
2638  do { \
2639  volatile OrigFn _orig = (orig); \
2640  volatile unsigned long _argvec[3+3]; \
2641  volatile unsigned long _res; \
2642  /* _argvec[0] holds current r2 across the call */ \
2643  _argvec[1] = (unsigned long)_orig.r2; \
2644  _argvec[2] = (unsigned long)_orig.nraddr; \
2645  _argvec[2+1] = (unsigned long)arg1; \
2646  _argvec[2+2] = (unsigned long)arg2; \
2647  _argvec[2+3] = (unsigned long)arg3; \
2648  __asm__ volatile( \
2649  VALGRIND_ALIGN_STACK \
2650  "mr 11,%1\n\t" \
2651  "std 2,-16(11)\n\t" /* save tocptr */ \
2652  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2653  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2654  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2655  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2656  "ld 11, 0(11)\n\t" /* target->r11 */ \
2657  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2658  "mr 11,%1\n\t" \
2659  "mr %0,3\n\t" \
2660  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2661  VALGRIND_RESTORE_STACK \
2662  : /*out*/ "=r" (_res) \
2663  : /*in*/ "r" (&_argvec[2]) \
2664  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2665  ); \
2666  lval = (__typeof__(lval)) _res; \
2667  } while (0)
2668 
2669 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
2670  do { \
2671  volatile OrigFn _orig = (orig); \
2672  volatile unsigned long _argvec[3+4]; \
2673  volatile unsigned long _res; \
2674  /* _argvec[0] holds current r2 across the call */ \
2675  _argvec[1] = (unsigned long)_orig.r2; \
2676  _argvec[2] = (unsigned long)_orig.nraddr; \
2677  _argvec[2+1] = (unsigned long)arg1; \
2678  _argvec[2+2] = (unsigned long)arg2; \
2679  _argvec[2+3] = (unsigned long)arg3; \
2680  _argvec[2+4] = (unsigned long)arg4; \
2681  __asm__ volatile( \
2682  VALGRIND_ALIGN_STACK \
2683  "mr 11,%1\n\t" \
2684  "std 2,-16(11)\n\t" /* save tocptr */ \
2685  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2686  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2687  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2688  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2689  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2690  "ld 11, 0(11)\n\t" /* target->r11 */ \
2691  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2692  "mr 11,%1\n\t" \
2693  "mr %0,3\n\t" \
2694  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2695  VALGRIND_RESTORE_STACK \
2696  : /*out*/ "=r" (_res) \
2697  : /*in*/ "r" (&_argvec[2]) \
2698  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2699  ); \
2700  lval = (__typeof__(lval)) _res; \
2701  } while (0)
2702 
2703 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
2704  do { \
2705  volatile OrigFn _orig = (orig); \
2706  volatile unsigned long _argvec[3+5]; \
2707  volatile unsigned long _res; \
2708  /* _argvec[0] holds current r2 across the call */ \
2709  _argvec[1] = (unsigned long)_orig.r2; \
2710  _argvec[2] = (unsigned long)_orig.nraddr; \
2711  _argvec[2+1] = (unsigned long)arg1; \
2712  _argvec[2+2] = (unsigned long)arg2; \
2713  _argvec[2+3] = (unsigned long)arg3; \
2714  _argvec[2+4] = (unsigned long)arg4; \
2715  _argvec[2+5] = (unsigned long)arg5; \
2716  __asm__ volatile( \
2717  VALGRIND_ALIGN_STACK \
2718  "mr 11,%1\n\t" \
2719  "std 2,-16(11)\n\t" /* save tocptr */ \
2720  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2721  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2722  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2723  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2724  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2725  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2726  "ld 11, 0(11)\n\t" /* target->r11 */ \
2727  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2728  "mr 11,%1\n\t" \
2729  "mr %0,3\n\t" \
2730  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2731  VALGRIND_RESTORE_STACK \
2732  : /*out*/ "=r" (_res) \
2733  : /*in*/ "r" (&_argvec[2]) \
2734  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2735  ); \
2736  lval = (__typeof__(lval)) _res; \
2737  } while (0)
2738 
2739 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
2740  do { \
2741  volatile OrigFn _orig = (orig); \
2742  volatile unsigned long _argvec[3+6]; \
2743  volatile unsigned long _res; \
2744  /* _argvec[0] holds current r2 across the call */ \
2745  _argvec[1] = (unsigned long)_orig.r2; \
2746  _argvec[2] = (unsigned long)_orig.nraddr; \
2747  _argvec[2+1] = (unsigned long)arg1; \
2748  _argvec[2+2] = (unsigned long)arg2; \
2749  _argvec[2+3] = (unsigned long)arg3; \
2750  _argvec[2+4] = (unsigned long)arg4; \
2751  _argvec[2+5] = (unsigned long)arg5; \
2752  _argvec[2+6] = (unsigned long)arg6; \
2753  __asm__ volatile( \
2754  VALGRIND_ALIGN_STACK \
2755  "mr 11,%1\n\t" \
2756  "std 2,-16(11)\n\t" /* save tocptr */ \
2757  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2758  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2759  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2760  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2761  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2762  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2763  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2764  "ld 11, 0(11)\n\t" /* target->r11 */ \
2765  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2766  "mr 11,%1\n\t" \
2767  "mr %0,3\n\t" \
2768  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2769  VALGRIND_RESTORE_STACK \
2770  : /*out*/ "=r" (_res) \
2771  : /*in*/ "r" (&_argvec[2]) \
2772  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2773  ); \
2774  lval = (__typeof__(lval)) _res; \
2775  } while (0)
2776 
2777 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2778  arg7) \
2779  do { \
2780  volatile OrigFn _orig = (orig); \
2781  volatile unsigned long _argvec[3+7]; \
2782  volatile unsigned long _res; \
2783  /* _argvec[0] holds current r2 across the call */ \
2784  _argvec[1] = (unsigned long)_orig.r2; \
2785  _argvec[2] = (unsigned long)_orig.nraddr; \
2786  _argvec[2+1] = (unsigned long)arg1; \
2787  _argvec[2+2] = (unsigned long)arg2; \
2788  _argvec[2+3] = (unsigned long)arg3; \
2789  _argvec[2+4] = (unsigned long)arg4; \
2790  _argvec[2+5] = (unsigned long)arg5; \
2791  _argvec[2+6] = (unsigned long)arg6; \
2792  _argvec[2+7] = (unsigned long)arg7; \
2793  __asm__ volatile( \
2794  VALGRIND_ALIGN_STACK \
2795  "mr 11,%1\n\t" \
2796  "std 2,-16(11)\n\t" /* save tocptr */ \
2797  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2798  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2799  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2800  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2801  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2802  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2803  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2804  "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2805  "ld 11, 0(11)\n\t" /* target->r11 */ \
2806  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2807  "mr 11,%1\n\t" \
2808  "mr %0,3\n\t" \
2809  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2810  VALGRIND_RESTORE_STACK \
2811  : /*out*/ "=r" (_res) \
2812  : /*in*/ "r" (&_argvec[2]) \
2813  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2814  ); \
2815  lval = (__typeof__(lval)) _res; \
2816  } while (0)
2817 
2818 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2819  arg7,arg8) \
2820  do { \
2821  volatile OrigFn _orig = (orig); \
2822  volatile unsigned long _argvec[3+8]; \
2823  volatile unsigned long _res; \
2824  /* _argvec[0] holds current r2 across the call */ \
2825  _argvec[1] = (unsigned long)_orig.r2; \
2826  _argvec[2] = (unsigned long)_orig.nraddr; \
2827  _argvec[2+1] = (unsigned long)arg1; \
2828  _argvec[2+2] = (unsigned long)arg2; \
2829  _argvec[2+3] = (unsigned long)arg3; \
2830  _argvec[2+4] = (unsigned long)arg4; \
2831  _argvec[2+5] = (unsigned long)arg5; \
2832  _argvec[2+6] = (unsigned long)arg6; \
2833  _argvec[2+7] = (unsigned long)arg7; \
2834  _argvec[2+8] = (unsigned long)arg8; \
2835  __asm__ volatile( \
2836  VALGRIND_ALIGN_STACK \
2837  "mr 11,%1\n\t" \
2838  "std 2,-16(11)\n\t" /* save tocptr */ \
2839  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2840  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2841  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2842  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2843  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2844  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2845  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2846  "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2847  "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2848  "ld 11, 0(11)\n\t" /* target->r11 */ \
2849  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2850  "mr 11,%1\n\t" \
2851  "mr %0,3\n\t" \
2852  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2853  VALGRIND_RESTORE_STACK \
2854  : /*out*/ "=r" (_res) \
2855  : /*in*/ "r" (&_argvec[2]) \
2856  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2857  ); \
2858  lval = (__typeof__(lval)) _res; \
2859  } while (0)
2860 
2861 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2862  arg7,arg8,arg9) \
2863  do { \
2864  volatile OrigFn _orig = (orig); \
2865  volatile unsigned long _argvec[3+9]; \
2866  volatile unsigned long _res; \
2867  /* _argvec[0] holds current r2 across the call */ \
2868  _argvec[1] = (unsigned long)_orig.r2; \
2869  _argvec[2] = (unsigned long)_orig.nraddr; \
2870  _argvec[2+1] = (unsigned long)arg1; \
2871  _argvec[2+2] = (unsigned long)arg2; \
2872  _argvec[2+3] = (unsigned long)arg3; \
2873  _argvec[2+4] = (unsigned long)arg4; \
2874  _argvec[2+5] = (unsigned long)arg5; \
2875  _argvec[2+6] = (unsigned long)arg6; \
2876  _argvec[2+7] = (unsigned long)arg7; \
2877  _argvec[2+8] = (unsigned long)arg8; \
2878  _argvec[2+9] = (unsigned long)arg9; \
2879  __asm__ volatile( \
2880  VALGRIND_ALIGN_STACK \
2881  "mr 11,%1\n\t" \
2882  "std 2,-16(11)\n\t" /* save tocptr */ \
2883  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2884  "addi 1,1,-128\n\t" /* expand stack frame */ \
2885  /* arg9 */ \
2886  "ld 3,72(11)\n\t" \
2887  "std 3,112(1)\n\t" \
2888  /* args1-8 */ \
2889  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2890  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2891  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2892  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2893  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2894  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2895  "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2896  "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2897  "ld 11, 0(11)\n\t" /* target->r11 */ \
2898  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2899  "mr 11,%1\n\t" \
2900  "mr %0,3\n\t" \
2901  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2902  VALGRIND_RESTORE_STACK \
2903  : /*out*/ "=r" (_res) \
2904  : /*in*/ "r" (&_argvec[2]) \
2905  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2906  ); \
2907  lval = (__typeof__(lval)) _res; \
2908  } while (0)
2909 
2910 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2911  arg7,arg8,arg9,arg10) \
2912  do { \
2913  volatile OrigFn _orig = (orig); \
2914  volatile unsigned long _argvec[3+10]; \
2915  volatile unsigned long _res; \
2916  /* _argvec[0] holds current r2 across the call */ \
2917  _argvec[1] = (unsigned long)_orig.r2; \
2918  _argvec[2] = (unsigned long)_orig.nraddr; \
2919  _argvec[2+1] = (unsigned long)arg1; \
2920  _argvec[2+2] = (unsigned long)arg2; \
2921  _argvec[2+3] = (unsigned long)arg3; \
2922  _argvec[2+4] = (unsigned long)arg4; \
2923  _argvec[2+5] = (unsigned long)arg5; \
2924  _argvec[2+6] = (unsigned long)arg6; \
2925  _argvec[2+7] = (unsigned long)arg7; \
2926  _argvec[2+8] = (unsigned long)arg8; \
2927  _argvec[2+9] = (unsigned long)arg9; \
2928  _argvec[2+10] = (unsigned long)arg10; \
2929  __asm__ volatile( \
2930  VALGRIND_ALIGN_STACK \
2931  "mr 11,%1\n\t" \
2932  "std 2,-16(11)\n\t" /* save tocptr */ \
2933  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2934  "addi 1,1,-128\n\t" /* expand stack frame */ \
2935  /* arg10 */ \
2936  "ld 3,80(11)\n\t" \
2937  "std 3,120(1)\n\t" \
2938  /* arg9 */ \
2939  "ld 3,72(11)\n\t" \
2940  "std 3,112(1)\n\t" \
2941  /* args1-8 */ \
2942  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
2943  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
2944  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
2945  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
2946  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
2947  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
2948  "ld 9, 56(11)\n\t" /* arg7->r9 */ \
2949  "ld 10, 64(11)\n\t" /* arg8->r10 */ \
2950  "ld 11, 0(11)\n\t" /* target->r11 */ \
2951  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
2952  "mr 11,%1\n\t" \
2953  "mr %0,3\n\t" \
2954  "ld 2,-16(11)\n\t" /* restore tocptr */ \
2955  VALGRIND_RESTORE_STACK \
2956  : /*out*/ "=r" (_res) \
2957  : /*in*/ "r" (&_argvec[2]) \
2958  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
2959  ); \
2960  lval = (__typeof__(lval)) _res; \
2961  } while (0)
2962 
2963 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
2964  arg7,arg8,arg9,arg10,arg11) \
2965  do { \
2966  volatile OrigFn _orig = (orig); \
2967  volatile unsigned long _argvec[3+11]; \
2968  volatile unsigned long _res; \
2969  /* _argvec[0] holds current r2 across the call */ \
2970  _argvec[1] = (unsigned long)_orig.r2; \
2971  _argvec[2] = (unsigned long)_orig.nraddr; \
2972  _argvec[2+1] = (unsigned long)arg1; \
2973  _argvec[2+2] = (unsigned long)arg2; \
2974  _argvec[2+3] = (unsigned long)arg3; \
2975  _argvec[2+4] = (unsigned long)arg4; \
2976  _argvec[2+5] = (unsigned long)arg5; \
2977  _argvec[2+6] = (unsigned long)arg6; \
2978  _argvec[2+7] = (unsigned long)arg7; \
2979  _argvec[2+8] = (unsigned long)arg8; \
2980  _argvec[2+9] = (unsigned long)arg9; \
2981  _argvec[2+10] = (unsigned long)arg10; \
2982  _argvec[2+11] = (unsigned long)arg11; \
2983  __asm__ volatile( \
2984  VALGRIND_ALIGN_STACK \
2985  "mr 11,%1\n\t" \
2986  "std 2,-16(11)\n\t" /* save tocptr */ \
2987  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
2988  "addi 1,1,-144\n\t" /* expand stack frame */ \
2989  /* arg11 */ \
2990  "ld 3,88(11)\n\t" \
2991  "std 3,128(1)\n\t" \
2992  /* arg10 */ \
2993  "ld 3,80(11)\n\t" \
2994  "std 3,120(1)\n\t" \
2995  /* arg9 */ \
2996  "ld 3,72(11)\n\t" \
2997  "std 3,112(1)\n\t" \
2998  /* args1-8 */ \
2999  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3000  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3001  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3002  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3003  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3004  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3005  "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3006  "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3007  "ld 11, 0(11)\n\t" /* target->r11 */ \
3008  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3009  "mr 11,%1\n\t" \
3010  "mr %0,3\n\t" \
3011  "ld 2,-16(11)\n\t" /* restore tocptr */ \
3012  VALGRIND_RESTORE_STACK \
3013  : /*out*/ "=r" (_res) \
3014  : /*in*/ "r" (&_argvec[2]) \
3015  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3016  ); \
3017  lval = (__typeof__(lval)) _res; \
3018  } while (0)
3019 
3020 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3021  arg7,arg8,arg9,arg10,arg11,arg12) \
3022  do { \
3023  volatile OrigFn _orig = (orig); \
3024  volatile unsigned long _argvec[3+12]; \
3025  volatile unsigned long _res; \
3026  /* _argvec[0] holds current r2 across the call */ \
3027  _argvec[1] = (unsigned long)_orig.r2; \
3028  _argvec[2] = (unsigned long)_orig.nraddr; \
3029  _argvec[2+1] = (unsigned long)arg1; \
3030  _argvec[2+2] = (unsigned long)arg2; \
3031  _argvec[2+3] = (unsigned long)arg3; \
3032  _argvec[2+4] = (unsigned long)arg4; \
3033  _argvec[2+5] = (unsigned long)arg5; \
3034  _argvec[2+6] = (unsigned long)arg6; \
3035  _argvec[2+7] = (unsigned long)arg7; \
3036  _argvec[2+8] = (unsigned long)arg8; \
3037  _argvec[2+9] = (unsigned long)arg9; \
3038  _argvec[2+10] = (unsigned long)arg10; \
3039  _argvec[2+11] = (unsigned long)arg11; \
3040  _argvec[2+12] = (unsigned long)arg12; \
3041  __asm__ volatile( \
3042  VALGRIND_ALIGN_STACK \
3043  "mr 11,%1\n\t" \
3044  "std 2,-16(11)\n\t" /* save tocptr */ \
3045  "ld 2,-8(11)\n\t" /* use nraddr's tocptr */ \
3046  "addi 1,1,-144\n\t" /* expand stack frame */ \
3047  /* arg12 */ \
3048  "ld 3,96(11)\n\t" \
3049  "std 3,136(1)\n\t" \
3050  /* arg11 */ \
3051  "ld 3,88(11)\n\t" \
3052  "std 3,128(1)\n\t" \
3053  /* arg10 */ \
3054  "ld 3,80(11)\n\t" \
3055  "std 3,120(1)\n\t" \
3056  /* arg9 */ \
3057  "ld 3,72(11)\n\t" \
3058  "std 3,112(1)\n\t" \
3059  /* args1-8 */ \
3060  "ld 3, 8(11)\n\t" /* arg1->r3 */ \
3061  "ld 4, 16(11)\n\t" /* arg2->r4 */ \
3062  "ld 5, 24(11)\n\t" /* arg3->r5 */ \
3063  "ld 6, 32(11)\n\t" /* arg4->r6 */ \
3064  "ld 7, 40(11)\n\t" /* arg5->r7 */ \
3065  "ld 8, 48(11)\n\t" /* arg6->r8 */ \
3066  "ld 9, 56(11)\n\t" /* arg7->r9 */ \
3067  "ld 10, 64(11)\n\t" /* arg8->r10 */ \
3068  "ld 11, 0(11)\n\t" /* target->r11 */ \
3069  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R11 \
3070  "mr 11,%1\n\t" \
3071  "mr %0,3\n\t" \
3072  "ld 2,-16(11)\n\t" /* restore tocptr */ \
3073  VALGRIND_RESTORE_STACK \
3074  : /*out*/ "=r" (_res) \
3075  : /*in*/ "r" (&_argvec[2]) \
3076  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r28" \
3077  ); \
3078  lval = (__typeof__(lval)) _res; \
3079  } while (0)
3080 
3081 #endif /* PLAT_ppc64_linux */
3082 
3083 /* ------------------------- arm-linux ------------------------- */
3084 
3085 #if defined(PLAT_arm_linux)
3086 
3087 /* These regs are trashed by the hidden call. */
3088 #define __CALLER_SAVED_REGS "r0", "r1", "r2", "r3","r4","r14"
3089 
3090 /* Macros to save and align the stack before making a function
3091  call and restore it afterwards as gcc may not keep the stack
3092  pointer aligned if it doesn't realise calls are being made
3093  to other functions. */
3094 
3095 /* This is a bit tricky. We store the original stack pointer in r10
3096  as it is callee-saves. gcc doesn't allow the use of r11 for some
3097  reason. Also, we can't directly "bic" the stack pointer in thumb
3098  mode since r13 isn't an allowed register number in that context.
3099  So use r4 as a temporary, since that is about to get trashed
3100  anyway, just after each use of this macro. Side effect is we need
3101  to be very careful about any future changes, since
3102  VALGRIND_ALIGN_STACK simply assumes r4 is usable. */
3103 #define VALGRIND_ALIGN_STACK \
3104  "mov r10, sp\n\t" \
3105  "mov r4, sp\n\t" \
3106  "bic r4, r4, #7\n\t" \
3107  "mov sp, r4\n\t"
3108 #define VALGRIND_RESTORE_STACK \
3109  "mov sp, r10\n\t"
3110 
3111 /* These CALL_FN_ macros assume that on arm-linux, sizeof(unsigned
3112  long) == 4. */
3113 
3114 #define CALL_FN_W_v(lval, orig) \
3115  do { \
3116  volatile OrigFn _orig = (orig); \
3117  volatile unsigned long _argvec[1]; \
3118  volatile unsigned long _res; \
3119  _argvec[0] = (unsigned long)_orig.nraddr; \
3120  __asm__ volatile( \
3121  VALGRIND_ALIGN_STACK \
3122  "ldr r4, [%1] \n\t" /* target->r4 */ \
3123  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3124  VALGRIND_RESTORE_STACK \
3125  "mov %0, r0\n" \
3126  : /*out*/ "=r" (_res) \
3127  : /*in*/ "0" (&_argvec[0]) \
3128  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3129  ); \
3130  lval = (__typeof__(lval)) _res; \
3131  } while (0)
3132 
3133 #define CALL_FN_W_W(lval, orig, arg1) \
3134  do { \
3135  volatile OrigFn _orig = (orig); \
3136  volatile unsigned long _argvec[2]; \
3137  volatile unsigned long _res; \
3138  _argvec[0] = (unsigned long)_orig.nraddr; \
3139  _argvec[1] = (unsigned long)(arg1); \
3140  __asm__ volatile( \
3141  VALGRIND_ALIGN_STACK \
3142  "ldr r0, [%1, #4] \n\t" \
3143  "ldr r4, [%1] \n\t" /* target->r4 */ \
3144  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3145  VALGRIND_RESTORE_STACK \
3146  "mov %0, r0\n" \
3147  : /*out*/ "=r" (_res) \
3148  : /*in*/ "0" (&_argvec[0]) \
3149  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3150  ); \
3151  lval = (__typeof__(lval)) _res; \
3152  } while (0)
3153 
3154 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3155  do { \
3156  volatile OrigFn _orig = (orig); \
3157  volatile unsigned long _argvec[3]; \
3158  volatile unsigned long _res; \
3159  _argvec[0] = (unsigned long)_orig.nraddr; \
3160  _argvec[1] = (unsigned long)(arg1); \
3161  _argvec[2] = (unsigned long)(arg2); \
3162  __asm__ volatile( \
3163  VALGRIND_ALIGN_STACK \
3164  "ldr r0, [%1, #4] \n\t" \
3165  "ldr r1, [%1, #8] \n\t" \
3166  "ldr r4, [%1] \n\t" /* target->r4 */ \
3167  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3168  VALGRIND_RESTORE_STACK \
3169  "mov %0, r0\n" \
3170  : /*out*/ "=r" (_res) \
3171  : /*in*/ "0" (&_argvec[0]) \
3172  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3173  ); \
3174  lval = (__typeof__(lval)) _res; \
3175  } while (0)
3176 
3177 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3178  do { \
3179  volatile OrigFn _orig = (orig); \
3180  volatile unsigned long _argvec[4]; \
3181  volatile unsigned long _res; \
3182  _argvec[0] = (unsigned long)_orig.nraddr; \
3183  _argvec[1] = (unsigned long)(arg1); \
3184  _argvec[2] = (unsigned long)(arg2); \
3185  _argvec[3] = (unsigned long)(arg3); \
3186  __asm__ volatile( \
3187  VALGRIND_ALIGN_STACK \
3188  "ldr r0, [%1, #4] \n\t" \
3189  "ldr r1, [%1, #8] \n\t" \
3190  "ldr r2, [%1, #12] \n\t" \
3191  "ldr r4, [%1] \n\t" /* target->r4 */ \
3192  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3193  VALGRIND_RESTORE_STACK \
3194  "mov %0, r0\n" \
3195  : /*out*/ "=r" (_res) \
3196  : /*in*/ "0" (&_argvec[0]) \
3197  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3198  ); \
3199  lval = (__typeof__(lval)) _res; \
3200  } while (0)
3201 
3202 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3203  do { \
3204  volatile OrigFn _orig = (orig); \
3205  volatile unsigned long _argvec[5]; \
3206  volatile unsigned long _res; \
3207  _argvec[0] = (unsigned long)_orig.nraddr; \
3208  _argvec[1] = (unsigned long)(arg1); \
3209  _argvec[2] = (unsigned long)(arg2); \
3210  _argvec[3] = (unsigned long)(arg3); \
3211  _argvec[4] = (unsigned long)(arg4); \
3212  __asm__ volatile( \
3213  VALGRIND_ALIGN_STACK \
3214  "ldr r0, [%1, #4] \n\t" \
3215  "ldr r1, [%1, #8] \n\t" \
3216  "ldr r2, [%1, #12] \n\t" \
3217  "ldr r3, [%1, #16] \n\t" \
3218  "ldr r4, [%1] \n\t" /* target->r4 */ \
3219  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3220  VALGRIND_RESTORE_STACK \
3221  "mov %0, r0" \
3222  : /*out*/ "=r" (_res) \
3223  : /*in*/ "0" (&_argvec[0]) \
3224  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3225  ); \
3226  lval = (__typeof__(lval)) _res; \
3227  } while (0)
3228 
3229 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
3230  do { \
3231  volatile OrigFn _orig = (orig); \
3232  volatile unsigned long _argvec[6]; \
3233  volatile unsigned long _res; \
3234  _argvec[0] = (unsigned long)_orig.nraddr; \
3235  _argvec[1] = (unsigned long)(arg1); \
3236  _argvec[2] = (unsigned long)(arg2); \
3237  _argvec[3] = (unsigned long)(arg3); \
3238  _argvec[4] = (unsigned long)(arg4); \
3239  _argvec[5] = (unsigned long)(arg5); \
3240  __asm__ volatile( \
3241  VALGRIND_ALIGN_STACK \
3242  "sub sp, sp, #4 \n\t" \
3243  "ldr r0, [%1, #20] \n\t" \
3244  "push {r0} \n\t" \
3245  "ldr r0, [%1, #4] \n\t" \
3246  "ldr r1, [%1, #8] \n\t" \
3247  "ldr r2, [%1, #12] \n\t" \
3248  "ldr r3, [%1, #16] \n\t" \
3249  "ldr r4, [%1] \n\t" /* target->r4 */ \
3250  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3251  VALGRIND_RESTORE_STACK \
3252  "mov %0, r0" \
3253  : /*out*/ "=r" (_res) \
3254  : /*in*/ "0" (&_argvec[0]) \
3255  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3256  ); \
3257  lval = (__typeof__(lval)) _res; \
3258  } while (0)
3259 
3260 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
3261  do { \
3262  volatile OrigFn _orig = (orig); \
3263  volatile unsigned long _argvec[7]; \
3264  volatile unsigned long _res; \
3265  _argvec[0] = (unsigned long)_orig.nraddr; \
3266  _argvec[1] = (unsigned long)(arg1); \
3267  _argvec[2] = (unsigned long)(arg2); \
3268  _argvec[3] = (unsigned long)(arg3); \
3269  _argvec[4] = (unsigned long)(arg4); \
3270  _argvec[5] = (unsigned long)(arg5); \
3271  _argvec[6] = (unsigned long)(arg6); \
3272  __asm__ volatile( \
3273  VALGRIND_ALIGN_STACK \
3274  "ldr r0, [%1, #20] \n\t" \
3275  "ldr r1, [%1, #24] \n\t" \
3276  "push {r0, r1} \n\t" \
3277  "ldr r0, [%1, #4] \n\t" \
3278  "ldr r1, [%1, #8] \n\t" \
3279  "ldr r2, [%1, #12] \n\t" \
3280  "ldr r3, [%1, #16] \n\t" \
3281  "ldr r4, [%1] \n\t" /* target->r4 */ \
3282  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3283  VALGRIND_RESTORE_STACK \
3284  "mov %0, r0" \
3285  : /*out*/ "=r" (_res) \
3286  : /*in*/ "0" (&_argvec[0]) \
3287  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3288  ); \
3289  lval = (__typeof__(lval)) _res; \
3290  } while (0)
3291 
3292 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3293  arg7) \
3294  do { \
3295  volatile OrigFn _orig = (orig); \
3296  volatile unsigned long _argvec[8]; \
3297  volatile unsigned long _res; \
3298  _argvec[0] = (unsigned long)_orig.nraddr; \
3299  _argvec[1] = (unsigned long)(arg1); \
3300  _argvec[2] = (unsigned long)(arg2); \
3301  _argvec[3] = (unsigned long)(arg3); \
3302  _argvec[4] = (unsigned long)(arg4); \
3303  _argvec[5] = (unsigned long)(arg5); \
3304  _argvec[6] = (unsigned long)(arg6); \
3305  _argvec[7] = (unsigned long)(arg7); \
3306  __asm__ volatile( \
3307  VALGRIND_ALIGN_STACK \
3308  "sub sp, sp, #4 \n\t" \
3309  "ldr r0, [%1, #20] \n\t" \
3310  "ldr r1, [%1, #24] \n\t" \
3311  "ldr r2, [%1, #28] \n\t" \
3312  "push {r0, r1, r2} \n\t" \
3313  "ldr r0, [%1, #4] \n\t" \
3314  "ldr r1, [%1, #8] \n\t" \
3315  "ldr r2, [%1, #12] \n\t" \
3316  "ldr r3, [%1, #16] \n\t" \
3317  "ldr r4, [%1] \n\t" /* target->r4 */ \
3318  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3319  VALGRIND_RESTORE_STACK \
3320  "mov %0, r0" \
3321  : /*out*/ "=r" (_res) \
3322  : /*in*/ "0" (&_argvec[0]) \
3323  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3324  ); \
3325  lval = (__typeof__(lval)) _res; \
3326  } while (0)
3327 
3328 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3329  arg7,arg8) \
3330  do { \
3331  volatile OrigFn _orig = (orig); \
3332  volatile unsigned long _argvec[9]; \
3333  volatile unsigned long _res; \
3334  _argvec[0] = (unsigned long)_orig.nraddr; \
3335  _argvec[1] = (unsigned long)(arg1); \
3336  _argvec[2] = (unsigned long)(arg2); \
3337  _argvec[3] = (unsigned long)(arg3); \
3338  _argvec[4] = (unsigned long)(arg4); \
3339  _argvec[5] = (unsigned long)(arg5); \
3340  _argvec[6] = (unsigned long)(arg6); \
3341  _argvec[7] = (unsigned long)(arg7); \
3342  _argvec[8] = (unsigned long)(arg8); \
3343  __asm__ volatile( \
3344  VALGRIND_ALIGN_STACK \
3345  "ldr r0, [%1, #20] \n\t" \
3346  "ldr r1, [%1, #24] \n\t" \
3347  "ldr r2, [%1, #28] \n\t" \
3348  "ldr r3, [%1, #32] \n\t" \
3349  "push {r0, r1, r2, r3} \n\t" \
3350  "ldr r0, [%1, #4] \n\t" \
3351  "ldr r1, [%1, #8] \n\t" \
3352  "ldr r2, [%1, #12] \n\t" \
3353  "ldr r3, [%1, #16] \n\t" \
3354  "ldr r4, [%1] \n\t" /* target->r4 */ \
3355  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3356  VALGRIND_RESTORE_STACK \
3357  "mov %0, r0" \
3358  : /*out*/ "=r" (_res) \
3359  : /*in*/ "0" (&_argvec[0]) \
3360  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3361  ); \
3362  lval = (__typeof__(lval)) _res; \
3363  } while (0)
3364 
3365 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3366  arg7,arg8,arg9) \
3367  do { \
3368  volatile OrigFn _orig = (orig); \
3369  volatile unsigned long _argvec[10]; \
3370  volatile unsigned long _res; \
3371  _argvec[0] = (unsigned long)_orig.nraddr; \
3372  _argvec[1] = (unsigned long)(arg1); \
3373  _argvec[2] = (unsigned long)(arg2); \
3374  _argvec[3] = (unsigned long)(arg3); \
3375  _argvec[4] = (unsigned long)(arg4); \
3376  _argvec[5] = (unsigned long)(arg5); \
3377  _argvec[6] = (unsigned long)(arg6); \
3378  _argvec[7] = (unsigned long)(arg7); \
3379  _argvec[8] = (unsigned long)(arg8); \
3380  _argvec[9] = (unsigned long)(arg9); \
3381  __asm__ volatile( \
3382  VALGRIND_ALIGN_STACK \
3383  "sub sp, sp, #4 \n\t" \
3384  "ldr r0, [%1, #20] \n\t" \
3385  "ldr r1, [%1, #24] \n\t" \
3386  "ldr r2, [%1, #28] \n\t" \
3387  "ldr r3, [%1, #32] \n\t" \
3388  "ldr r4, [%1, #36] \n\t" \
3389  "push {r0, r1, r2, r3, r4} \n\t" \
3390  "ldr r0, [%1, #4] \n\t" \
3391  "ldr r1, [%1, #8] \n\t" \
3392  "ldr r2, [%1, #12] \n\t" \
3393  "ldr r3, [%1, #16] \n\t" \
3394  "ldr r4, [%1] \n\t" /* target->r4 */ \
3395  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3396  VALGRIND_RESTORE_STACK \
3397  "mov %0, r0" \
3398  : /*out*/ "=r" (_res) \
3399  : /*in*/ "0" (&_argvec[0]) \
3400  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3401  ); \
3402  lval = (__typeof__(lval)) _res; \
3403  } while (0)
3404 
3405 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
3406  arg7,arg8,arg9,arg10) \
3407  do { \
3408  volatile OrigFn _orig = (orig); \
3409  volatile unsigned long _argvec[11]; \
3410  volatile unsigned long _res; \
3411  _argvec[0] = (unsigned long)_orig.nraddr; \
3412  _argvec[1] = (unsigned long)(arg1); \
3413  _argvec[2] = (unsigned long)(arg2); \
3414  _argvec[3] = (unsigned long)(arg3); \
3415  _argvec[4] = (unsigned long)(arg4); \
3416  _argvec[5] = (unsigned long)(arg5); \
3417  _argvec[6] = (unsigned long)(arg6); \
3418  _argvec[7] = (unsigned long)(arg7); \
3419  _argvec[8] = (unsigned long)(arg8); \
3420  _argvec[9] = (unsigned long)(arg9); \
3421  _argvec[10] = (unsigned long)(arg10); \
3422  __asm__ volatile( \
3423  VALGRIND_ALIGN_STACK \
3424  "ldr r0, [%1, #40] \n\t" \
3425  "push {r0} \n\t" \
3426  "ldr r0, [%1, #20] \n\t" \
3427  "ldr r1, [%1, #24] \n\t" \
3428  "ldr r2, [%1, #28] \n\t" \
3429  "ldr r3, [%1, #32] \n\t" \
3430  "ldr r4, [%1, #36] \n\t" \
3431  "push {r0, r1, r2, r3, r4} \n\t" \
3432  "ldr r0, [%1, #4] \n\t" \
3433  "ldr r1, [%1, #8] \n\t" \
3434  "ldr r2, [%1, #12] \n\t" \
3435  "ldr r3, [%1, #16] \n\t" \
3436  "ldr r4, [%1] \n\t" /* target->r4 */ \
3437  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3438  VALGRIND_RESTORE_STACK \
3439  "mov %0, r0" \
3440  : /*out*/ "=r" (_res) \
3441  : /*in*/ "0" (&_argvec[0]) \
3442  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3443  ); \
3444  lval = (__typeof__(lval)) _res; \
3445  } while (0)
3446 
3447 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
3448  arg6,arg7,arg8,arg9,arg10, \
3449  arg11) \
3450  do { \
3451  volatile OrigFn _orig = (orig); \
3452  volatile unsigned long _argvec[12]; \
3453  volatile unsigned long _res; \
3454  _argvec[0] = (unsigned long)_orig.nraddr; \
3455  _argvec[1] = (unsigned long)(arg1); \
3456  _argvec[2] = (unsigned long)(arg2); \
3457  _argvec[3] = (unsigned long)(arg3); \
3458  _argvec[4] = (unsigned long)(arg4); \
3459  _argvec[5] = (unsigned long)(arg5); \
3460  _argvec[6] = (unsigned long)(arg6); \
3461  _argvec[7] = (unsigned long)(arg7); \
3462  _argvec[8] = (unsigned long)(arg8); \
3463  _argvec[9] = (unsigned long)(arg9); \
3464  _argvec[10] = (unsigned long)(arg10); \
3465  _argvec[11] = (unsigned long)(arg11); \
3466  __asm__ volatile( \
3467  VALGRIND_ALIGN_STACK \
3468  "sub sp, sp, #4 \n\t" \
3469  "ldr r0, [%1, #40] \n\t" \
3470  "ldr r1, [%1, #44] \n\t" \
3471  "push {r0, r1} \n\t" \
3472  "ldr r0, [%1, #20] \n\t" \
3473  "ldr r1, [%1, #24] \n\t" \
3474  "ldr r2, [%1, #28] \n\t" \
3475  "ldr r3, [%1, #32] \n\t" \
3476  "ldr r4, [%1, #36] \n\t" \
3477  "push {r0, r1, r2, r3, r4} \n\t" \
3478  "ldr r0, [%1, #4] \n\t" \
3479  "ldr r1, [%1, #8] \n\t" \
3480  "ldr r2, [%1, #12] \n\t" \
3481  "ldr r3, [%1, #16] \n\t" \
3482  "ldr r4, [%1] \n\t" /* target->r4 */ \
3483  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3484  VALGRIND_RESTORE_STACK \
3485  "mov %0, r0" \
3486  : /*out*/ "=r" (_res) \
3487  : /*in*/ "0" (&_argvec[0]) \
3488  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3489  ); \
3490  lval = (__typeof__(lval)) _res; \
3491  } while (0)
3492 
3493 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
3494  arg6,arg7,arg8,arg9,arg10, \
3495  arg11,arg12) \
3496  do { \
3497  volatile OrigFn _orig = (orig); \
3498  volatile unsigned long _argvec[13]; \
3499  volatile unsigned long _res; \
3500  _argvec[0] = (unsigned long)_orig.nraddr; \
3501  _argvec[1] = (unsigned long)(arg1); \
3502  _argvec[2] = (unsigned long)(arg2); \
3503  _argvec[3] = (unsigned long)(arg3); \
3504  _argvec[4] = (unsigned long)(arg4); \
3505  _argvec[5] = (unsigned long)(arg5); \
3506  _argvec[6] = (unsigned long)(arg6); \
3507  _argvec[7] = (unsigned long)(arg7); \
3508  _argvec[8] = (unsigned long)(arg8); \
3509  _argvec[9] = (unsigned long)(arg9); \
3510  _argvec[10] = (unsigned long)(arg10); \
3511  _argvec[11] = (unsigned long)(arg11); \
3512  _argvec[12] = (unsigned long)(arg12); \
3513  __asm__ volatile( \
3514  VALGRIND_ALIGN_STACK \
3515  "ldr r0, [%1, #40] \n\t" \
3516  "ldr r1, [%1, #44] \n\t" \
3517  "ldr r2, [%1, #48] \n\t" \
3518  "push {r0, r1, r2} \n\t" \
3519  "ldr r0, [%1, #20] \n\t" \
3520  "ldr r1, [%1, #24] \n\t" \
3521  "ldr r2, [%1, #28] \n\t" \
3522  "ldr r3, [%1, #32] \n\t" \
3523  "ldr r4, [%1, #36] \n\t" \
3524  "push {r0, r1, r2, r3, r4} \n\t" \
3525  "ldr r0, [%1, #4] \n\t" \
3526  "ldr r1, [%1, #8] \n\t" \
3527  "ldr r2, [%1, #12] \n\t" \
3528  "ldr r3, [%1, #16] \n\t" \
3529  "ldr r4, [%1] \n\t" /* target->r4 */ \
3530  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_R4 \
3531  VALGRIND_RESTORE_STACK \
3532  "mov %0, r0" \
3533  : /*out*/ "=r" (_res) \
3534  : /*in*/ "0" (&_argvec[0]) \
3535  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "r10" \
3536  ); \
3537  lval = (__typeof__(lval)) _res; \
3538  } while (0)
3539 
3540 #endif /* PLAT_arm_linux */
3541 
3542 /* ------------------------ arm64-linux ------------------------ */
3543 
3544 #if defined(PLAT_arm64_linux)
3545 
3546 /* These regs are trashed by the hidden call. */
3547 #define __CALLER_SAVED_REGS \
3548  "x0", "x1", "x2", "x3","x4", "x5", "x6", "x7", "x8", "x9", \
3549  "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", \
3550  "x18", "x19", "x20", "x30", \
3551  "v0", "v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8", "v9", \
3552  "v10", "v11", "v12", "v13", "v14", "v15", "v16", "v17", \
3553  "v18", "v19", "v20", "v21", "v22", "v23", "v24", "v25", \
3554  "v26", "v27", "v28", "v29", "v30", "v31"
3555 
3556 /* x21 is callee-saved, so we can use it to save and restore SP around
3557  the hidden call. */
3558 #define VALGRIND_ALIGN_STACK \
3559  "mov x21, sp\n\t" \
3560  "bic sp, x21, #15\n\t"
3561 #define VALGRIND_RESTORE_STACK \
3562  "mov sp, x21\n\t"
3563 
3564 /* These CALL_FN_ macros assume that on arm64-linux,
3565  sizeof(unsigned long) == 8. */
3566 
3567 #define CALL_FN_W_v(lval, orig) \
3568  do { \
3569  volatile OrigFn _orig = (orig); \
3570  volatile unsigned long _argvec[1]; \
3571  volatile unsigned long _res; \
3572  _argvec[0] = (unsigned long)_orig.nraddr; \
3573  __asm__ volatile( \
3574  VALGRIND_ALIGN_STACK \
3575  "ldr x8, [%1] \n\t" /* target->x8 */ \
3576  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3577  VALGRIND_RESTORE_STACK \
3578  "mov %0, x0\n" \
3579  : /*out*/ "=r" (_res) \
3580  : /*in*/ "0" (&_argvec[0]) \
3581  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3582  ); \
3583  lval = (__typeof__(lval)) _res; \
3584  } while (0)
3585 
3586 #define CALL_FN_W_W(lval, orig, arg1) \
3587  do { \
3588  volatile OrigFn _orig = (orig); \
3589  volatile unsigned long _argvec[2]; \
3590  volatile unsigned long _res; \
3591  _argvec[0] = (unsigned long)_orig.nraddr; \
3592  _argvec[1] = (unsigned long)(arg1); \
3593  __asm__ volatile( \
3594  VALGRIND_ALIGN_STACK \
3595  "ldr x0, [%1, #8] \n\t" \
3596  "ldr x8, [%1] \n\t" /* target->x8 */ \
3597  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3598  VALGRIND_RESTORE_STACK \
3599  "mov %0, x0\n" \
3600  : /*out*/ "=r" (_res) \
3601  : /*in*/ "0" (&_argvec[0]) \
3602  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS, "x21" \
3603  ); \
3604  lval = (__typeof__(lval)) _res; \
3605  } while (0)
3606 
3607 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
3608  do { \
3609  volatile OrigFn _orig = (orig); \
3610  volatile unsigned long _argvec[3]; \
3611  volatile unsigned long _res; \
3612  _argvec[0] = (unsigned long)_orig.nraddr; \
3613  _argvec[1] = (unsigned long)(arg1); \
3614  _argvec[2] = (unsigned long)(arg2); \
3615  __asm__ volatile( \
3616  VALGRIND_ALIGN_STACK \
3617  "ldr x0, [%1, #8] \n\t" \
3618  "ldr x1, [%1, #16] \n\t" \
3619  "ldr x8, [%1] \n\t" /* target->x8 */ \
3620  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3621  VALGRIND_RESTORE_STACK \
3622  "mov %0, x0\n" \
3623  : /*out*/ "=r" (_res) \
3624  : /*in*/ "0" (&_argvec[0]) \
3625  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS/*, "r10"*/ \
3626  ); \
3627  lval = (__typeof__(lval)) _res; \
3628  } while (0)
3629 
3630 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
3631  do { \
3632  volatile OrigFn _orig = (orig); \
3633  volatile unsigned long _argvec[4]; \
3634  volatile unsigned long _res; \
3635  _argvec[0] = (unsigned long)_orig.nraddr; \
3636  _argvec[1] = (unsigned long)(arg1); \
3637  _argvec[2] = (unsigned long)(arg2); \
3638  _argvec[3] = (unsigned long)(arg3); \
3639  __asm__ volatile( \
3640  VALGRIND_ALIGN_STACK \
3641  "ldr x0, [%1, #8] \n\t" \
3642  "ldr x1, [%1, #16] \n\t" \
3643  "ldr x2, [%1, #24] \n\t" \
3644  "ldr x8, [%1] \n\t" /* target->x8 */ \
3645  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3646  VALGRIND_RESTORE_STACK \
3647  "mov %0, x0\n" \
3648  : /*out*/ "=r" (_res) \
3649  : /*in*/ "0" (&_argvec[0]) \
3650  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS/*, "r10"*/ \
3651  ); \
3652  lval = (__typeof__(lval)) _res; \
3653  } while (0)
3654 
3655 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
3656  do { \
3657  volatile OrigFn _orig = (orig); \
3658  volatile unsigned long _argvec[5]; \
3659  volatile unsigned long _res; \
3660  _argvec[0] = (unsigned long)_orig.nraddr; \
3661  _argvec[1] = (unsigned long)(arg1); \
3662  _argvec[2] = (unsigned long)(arg2); \
3663  _argvec[3] = (unsigned long)(arg3); \
3664  _argvec[4] = (unsigned long)(arg4); \
3665  __asm__ volatile( \
3666  VALGRIND_ALIGN_STACK \
3667  "ldr x0, [%1, #8] \n\t" \
3668  "ldr x1, [%1, #16] \n\t" \
3669  "ldr x2, [%1, #24] \n\t" \
3670  "ldr x3, [%1, #32] \n\t" \
3671  "ldr x8, [%1] \n\t" /* target->x8 */ \
3672  VALGRIND_BRANCH_AND_LINK_TO_NOREDIR_X8 \
3673  VALGRIND_RESTORE_STACK \
3674  "mov %0, x0" \
3675  : /*out*/ "=r" (_res) \
3676  : /*in*/ "0" (&_argvec[0]) \
3677  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS/*, "r10"*/ \
3678  ); \
3679  lval = (__typeof__(lval)) _res; \
3680  } while (0)
3681 
3682 #endif /* PLAT_arm64_linux */
3683 
3684 /* ------------------------- s390x-linux ------------------------- */
3685 
3686 #if defined(PLAT_s390x_linux)
3687 
3688 /* Similar workaround as amd64 (see above), but we use r11 as frame
3689  pointer and save the old r11 in r7. r11 might be used for
3690  argvec, therefore we copy argvec in r1 since r1 is clobbered
3691  after the call anyway. */
3692 #if defined(__GNUC__) && defined(__GCC_HAVE_DWARF2_CFI_ASM)
3693 # define __FRAME_POINTER \
3694  ,"d"(__builtin_dwarf_cfa())
3695 # define VALGRIND_CFI_PROLOGUE \
3696  ".cfi_remember_state\n\t" \
3697  "lgr 1,%1\n\t" /* copy the argvec pointer in r1 */ \
3698  "lgr 7,11\n\t" \
3699  "lgr 11,%2\n\t" \
3700  ".cfi_def_cfa r11, 0\n\t"
3701 # define VALGRIND_CFI_EPILOGUE \
3702  "lgr 11, 7\n\t" \
3703  ".cfi_restore_state\n\t"
3704 #else
3705 # define __FRAME_POINTER
3706 # define VALGRIND_CFI_PROLOGUE \
3707  "lgr 1,%1\n\t"
3708 # define VALGRIND_CFI_EPILOGUE
3709 #endif
3710 
3711 /* Nb: On s390 the stack pointer is properly aligned *at all times*
3712  according to the s390 GCC maintainer. (The ABI specification is not
3713  precise in this regard.) Therefore, VALGRIND_ALIGN_STACK and
3714  VALGRIND_RESTORE_STACK are not defined here. */
3715 
3716 /* These regs are trashed by the hidden call. Note that we overwrite
3717  r14 in s390_irgen_noredir (VEX/priv/guest_s390_irgen.c) to give the
3718  function a proper return address. All others are ABI defined call
3719  clobbers. */
3720 #define __CALLER_SAVED_REGS "0","1","2","3","4","5","14", \
3721  "f0","f1","f2","f3","f4","f5","f6","f7"
3722 
3723 /* Nb: Although r11 is modified in the asm snippets below (inside
3724  VALGRIND_CFI_PROLOGUE) it is not listed in the clobber section, for
3725  two reasons:
3726  (1) r11 is restored in VALGRIND_CFI_EPILOGUE, so effectively it is not
3727  modified
3728  (2) GCC will complain that r11 cannot appear inside a clobber section,
3729  when compiled with -O -fno-omit-frame-pointer
3730  */
3731 
3732 #define CALL_FN_W_v(lval, orig) \
3733  do { \
3734  volatile OrigFn _orig = (orig); \
3735  volatile unsigned long _argvec[1]; \
3736  volatile unsigned long _res; \
3737  _argvec[0] = (unsigned long)_orig.nraddr; \
3738  __asm__ volatile( \
3739  VALGRIND_CFI_PROLOGUE \
3740  "aghi 15,-160\n\t" \
3741  "lg 1, 0(1)\n\t" /* target->r1 */ \
3742  VALGRIND_CALL_NOREDIR_R1 \
3743  "lgr %0, 2\n\t" \
3744  "aghi 15,160\n\t" \
3745  VALGRIND_CFI_EPILOGUE \
3746  : /*out*/ "=d" (_res) \
3747  : /*in*/ "d" (&_argvec[0]) __FRAME_POINTER \
3748  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3749  ); \
3750  lval = (__typeof__(lval)) _res; \
3751  } while (0)
3752 
3753 /* The call abi has the arguments in r2-r6 and stack */
3754 #define CALL_FN_W_W(lval, orig, arg1) \
3755  do { \
3756  volatile OrigFn _orig = (orig); \
3757  volatile unsigned long _argvec[2]; \
3758  volatile unsigned long _res; \
3759  _argvec[0] = (unsigned long)_orig.nraddr; \
3760  _argvec[1] = (unsigned long)arg1; \
3761  __asm__ volatile( \
3762  VALGRIND_CFI_PROLOGUE \
3763  "aghi 15,-160\n\t" \
3764  "lg 2, 8(1)\n\t" \
3765  "lg 1, 0(1)\n\t" \
3766  VALGRIND_CALL_NOREDIR_R1 \
3767  "lgr %0, 2\n\t" \
3768  "aghi 15,160\n\t" \
3769  VALGRIND_CFI_EPILOGUE \
3770  : /*out*/ "=d" (_res) \
3771  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3772  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3773  ); \
3774  lval = (__typeof__(lval)) _res; \
3775  } while (0)
3776 
3777 #define CALL_FN_W_WW(lval, orig, arg1, arg2) \
3778  do { \
3779  volatile OrigFn _orig = (orig); \
3780  volatile unsigned long _argvec[3]; \
3781  volatile unsigned long _res; \
3782  _argvec[0] = (unsigned long)_orig.nraddr; \
3783  _argvec[1] = (unsigned long)arg1; \
3784  _argvec[2] = (unsigned long)arg2; \
3785  __asm__ volatile( \
3786  VALGRIND_CFI_PROLOGUE \
3787  "aghi 15,-160\n\t" \
3788  "lg 2, 8(1)\n\t" \
3789  "lg 3,16(1)\n\t" \
3790  "lg 1, 0(1)\n\t" \
3791  VALGRIND_CALL_NOREDIR_R1 \
3792  "lgr %0, 2\n\t" \
3793  "aghi 15,160\n\t" \
3794  VALGRIND_CFI_EPILOGUE \
3795  : /*out*/ "=d" (_res) \
3796  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3797  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3798  ); \
3799  lval = (__typeof__(lval)) _res; \
3800  } while (0)
3801 
3802 #define CALL_FN_W_WWW(lval, orig, arg1, arg2, arg3) \
3803  do { \
3804  volatile OrigFn _orig = (orig); \
3805  volatile unsigned long _argvec[4]; \
3806  volatile unsigned long _res; \
3807  _argvec[0] = (unsigned long)_orig.nraddr; \
3808  _argvec[1] = (unsigned long)arg1; \
3809  _argvec[2] = (unsigned long)arg2; \
3810  _argvec[3] = (unsigned long)arg3; \
3811  __asm__ volatile( \
3812  VALGRIND_CFI_PROLOGUE \
3813  "aghi 15,-160\n\t" \
3814  "lg 2, 8(1)\n\t" \
3815  "lg 3,16(1)\n\t" \
3816  "lg 4,24(1)\n\t" \
3817  "lg 1, 0(1)\n\t" \
3818  VALGRIND_CALL_NOREDIR_R1 \
3819  "lgr %0, 2\n\t" \
3820  "aghi 15,160\n\t" \
3821  VALGRIND_CFI_EPILOGUE \
3822  : /*out*/ "=d" (_res) \
3823  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3824  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3825  ); \
3826  lval = (__typeof__(lval)) _res; \
3827  } while (0)
3828 
3829 #define CALL_FN_W_WWWW(lval, orig, arg1, arg2, arg3, arg4) \
3830  do { \
3831  volatile OrigFn _orig = (orig); \
3832  volatile unsigned long _argvec[5]; \
3833  volatile unsigned long _res; \
3834  _argvec[0] = (unsigned long)_orig.nraddr; \
3835  _argvec[1] = (unsigned long)arg1; \
3836  _argvec[2] = (unsigned long)arg2; \
3837  _argvec[3] = (unsigned long)arg3; \
3838  _argvec[4] = (unsigned long)arg4; \
3839  __asm__ volatile( \
3840  VALGRIND_CFI_PROLOGUE \
3841  "aghi 15,-160\n\t" \
3842  "lg 2, 8(1)\n\t" \
3843  "lg 3,16(1)\n\t" \
3844  "lg 4,24(1)\n\t" \
3845  "lg 5,32(1)\n\t" \
3846  "lg 1, 0(1)\n\t" \
3847  VALGRIND_CALL_NOREDIR_R1 \
3848  "lgr %0, 2\n\t" \
3849  "aghi 15,160\n\t" \
3850  VALGRIND_CFI_EPILOGUE \
3851  : /*out*/ "=d" (_res) \
3852  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3853  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"7" \
3854  ); \
3855  lval = (__typeof__(lval)) _res; \
3856  } while (0)
3857 
3858 #define CALL_FN_W_5W(lval, orig, arg1, arg2, arg3, arg4, arg5) \
3859  do { \
3860  volatile OrigFn _orig = (orig); \
3861  volatile unsigned long _argvec[6]; \
3862  volatile unsigned long _res; \
3863  _argvec[0] = (unsigned long)_orig.nraddr; \
3864  _argvec[1] = (unsigned long)arg1; \
3865  _argvec[2] = (unsigned long)arg2; \
3866  _argvec[3] = (unsigned long)arg3; \
3867  _argvec[4] = (unsigned long)arg4; \
3868  _argvec[5] = (unsigned long)arg5; \
3869  __asm__ volatile( \
3870  VALGRIND_CFI_PROLOGUE \
3871  "aghi 15,-160\n\t" \
3872  "lg 2, 8(1)\n\t" \
3873  "lg 3,16(1)\n\t" \
3874  "lg 4,24(1)\n\t" \
3875  "lg 5,32(1)\n\t" \
3876  "lg 6,40(1)\n\t" \
3877  "lg 1, 0(1)\n\t" \
3878  VALGRIND_CALL_NOREDIR_R1 \
3879  "lgr %0, 2\n\t" \
3880  "aghi 15,160\n\t" \
3881  VALGRIND_CFI_EPILOGUE \
3882  : /*out*/ "=d" (_res) \
3883  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3884  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3885  ); \
3886  lval = (__typeof__(lval)) _res; \
3887  } while (0)
3888 
3889 #define CALL_FN_W_6W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3890  arg6) \
3891  do { \
3892  volatile OrigFn _orig = (orig); \
3893  volatile unsigned long _argvec[7]; \
3894  volatile unsigned long _res; \
3895  _argvec[0] = (unsigned long)_orig.nraddr; \
3896  _argvec[1] = (unsigned long)arg1; \
3897  _argvec[2] = (unsigned long)arg2; \
3898  _argvec[3] = (unsigned long)arg3; \
3899  _argvec[4] = (unsigned long)arg4; \
3900  _argvec[5] = (unsigned long)arg5; \
3901  _argvec[6] = (unsigned long)arg6; \
3902  __asm__ volatile( \
3903  VALGRIND_CFI_PROLOGUE \
3904  "aghi 15,-168\n\t" \
3905  "lg 2, 8(1)\n\t" \
3906  "lg 3,16(1)\n\t" \
3907  "lg 4,24(1)\n\t" \
3908  "lg 5,32(1)\n\t" \
3909  "lg 6,40(1)\n\t" \
3910  "mvc 160(8,15), 48(1)\n\t" \
3911  "lg 1, 0(1)\n\t" \
3912  VALGRIND_CALL_NOREDIR_R1 \
3913  "lgr %0, 2\n\t" \
3914  "aghi 15,168\n\t" \
3915  VALGRIND_CFI_EPILOGUE \
3916  : /*out*/ "=d" (_res) \
3917  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3918  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3919  ); \
3920  lval = (__typeof__(lval)) _res; \
3921  } while (0)
3922 
3923 #define CALL_FN_W_7W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3924  arg6, arg7) \
3925  do { \
3926  volatile OrigFn _orig = (orig); \
3927  volatile unsigned long _argvec[8]; \
3928  volatile unsigned long _res; \
3929  _argvec[0] = (unsigned long)_orig.nraddr; \
3930  _argvec[1] = (unsigned long)arg1; \
3931  _argvec[2] = (unsigned long)arg2; \
3932  _argvec[3] = (unsigned long)arg3; \
3933  _argvec[4] = (unsigned long)arg4; \
3934  _argvec[5] = (unsigned long)arg5; \
3935  _argvec[6] = (unsigned long)arg6; \
3936  _argvec[7] = (unsigned long)arg7; \
3937  __asm__ volatile( \
3938  VALGRIND_CFI_PROLOGUE \
3939  "aghi 15,-176\n\t" \
3940  "lg 2, 8(1)\n\t" \
3941  "lg 3,16(1)\n\t" \
3942  "lg 4,24(1)\n\t" \
3943  "lg 5,32(1)\n\t" \
3944  "lg 6,40(1)\n\t" \
3945  "mvc 160(8,15), 48(1)\n\t" \
3946  "mvc 168(8,15), 56(1)\n\t" \
3947  "lg 1, 0(1)\n\t" \
3948  VALGRIND_CALL_NOREDIR_R1 \
3949  "lgr %0, 2\n\t" \
3950  "aghi 15,176\n\t" \
3951  VALGRIND_CFI_EPILOGUE \
3952  : /*out*/ "=d" (_res) \
3953  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3954  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3955  ); \
3956  lval = (__typeof__(lval)) _res; \
3957  } while (0)
3958 
3959 #define CALL_FN_W_8W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3960  arg6, arg7 ,arg8) \
3961  do { \
3962  volatile OrigFn _orig = (orig); \
3963  volatile unsigned long _argvec[9]; \
3964  volatile unsigned long _res; \
3965  _argvec[0] = (unsigned long)_orig.nraddr; \
3966  _argvec[1] = (unsigned long)arg1; \
3967  _argvec[2] = (unsigned long)arg2; \
3968  _argvec[3] = (unsigned long)arg3; \
3969  _argvec[4] = (unsigned long)arg4; \
3970  _argvec[5] = (unsigned long)arg5; \
3971  _argvec[6] = (unsigned long)arg6; \
3972  _argvec[7] = (unsigned long)arg7; \
3973  _argvec[8] = (unsigned long)arg8; \
3974  __asm__ volatile( \
3975  VALGRIND_CFI_PROLOGUE \
3976  "aghi 15,-184\n\t" \
3977  "lg 2, 8(1)\n\t" \
3978  "lg 3,16(1)\n\t" \
3979  "lg 4,24(1)\n\t" \
3980  "lg 5,32(1)\n\t" \
3981  "lg 6,40(1)\n\t" \
3982  "mvc 160(8,15), 48(1)\n\t" \
3983  "mvc 168(8,15), 56(1)\n\t" \
3984  "mvc 176(8,15), 64(1)\n\t" \
3985  "lg 1, 0(1)\n\t" \
3986  VALGRIND_CALL_NOREDIR_R1 \
3987  "lgr %0, 2\n\t" \
3988  "aghi 15,184\n\t" \
3989  VALGRIND_CFI_EPILOGUE \
3990  : /*out*/ "=d" (_res) \
3991  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
3992  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
3993  ); \
3994  lval = (__typeof__(lval)) _res; \
3995  } while (0)
3996 
3997 #define CALL_FN_W_9W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
3998  arg6, arg7 ,arg8, arg9) \
3999  do { \
4000  volatile OrigFn _orig = (orig); \
4001  volatile unsigned long _argvec[10]; \
4002  volatile unsigned long _res; \
4003  _argvec[0] = (unsigned long)_orig.nraddr; \
4004  _argvec[1] = (unsigned long)arg1; \
4005  _argvec[2] = (unsigned long)arg2; \
4006  _argvec[3] = (unsigned long)arg3; \
4007  _argvec[4] = (unsigned long)arg4; \
4008  _argvec[5] = (unsigned long)arg5; \
4009  _argvec[6] = (unsigned long)arg6; \
4010  _argvec[7] = (unsigned long)arg7; \
4011  _argvec[8] = (unsigned long)arg8; \
4012  _argvec[9] = (unsigned long)arg9; \
4013  __asm__ volatile( \
4014  VALGRIND_CFI_PROLOGUE \
4015  "aghi 15,-192\n\t" \
4016  "lg 2, 8(1)\n\t" \
4017  "lg 3,16(1)\n\t" \
4018  "lg 4,24(1)\n\t" \
4019  "lg 5,32(1)\n\t" \
4020  "lg 6,40(1)\n\t" \
4021  "mvc 160(8,15), 48(1)\n\t" \
4022  "mvc 168(8,15), 56(1)\n\t" \
4023  "mvc 176(8,15), 64(1)\n\t" \
4024  "mvc 184(8,15), 72(1)\n\t" \
4025  "lg 1, 0(1)\n\t" \
4026  VALGRIND_CALL_NOREDIR_R1 \
4027  "lgr %0, 2\n\t" \
4028  "aghi 15,192\n\t" \
4029  VALGRIND_CFI_EPILOGUE \
4030  : /*out*/ "=d" (_res) \
4031  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4032  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4033  ); \
4034  lval = (__typeof__(lval)) _res; \
4035  } while (0)
4036 
4037 #define CALL_FN_W_10W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4038  arg6, arg7 ,arg8, arg9, arg10) \
4039  do { \
4040  volatile OrigFn _orig = (orig); \
4041  volatile unsigned long _argvec[11]; \
4042  volatile unsigned long _res; \
4043  _argvec[0] = (unsigned long)_orig.nraddr; \
4044  _argvec[1] = (unsigned long)arg1; \
4045  _argvec[2] = (unsigned long)arg2; \
4046  _argvec[3] = (unsigned long)arg3; \
4047  _argvec[4] = (unsigned long)arg4; \
4048  _argvec[5] = (unsigned long)arg5; \
4049  _argvec[6] = (unsigned long)arg6; \
4050  _argvec[7] = (unsigned long)arg7; \
4051  _argvec[8] = (unsigned long)arg8; \
4052  _argvec[9] = (unsigned long)arg9; \
4053  _argvec[10] = (unsigned long)arg10; \
4054  __asm__ volatile( \
4055  VALGRIND_CFI_PROLOGUE \
4056  "aghi 15,-200\n\t" \
4057  "lg 2, 8(1)\n\t" \
4058  "lg 3,16(1)\n\t" \
4059  "lg 4,24(1)\n\t" \
4060  "lg 5,32(1)\n\t" \
4061  "lg 6,40(1)\n\t" \
4062  "mvc 160(8,15), 48(1)\n\t" \
4063  "mvc 168(8,15), 56(1)\n\t" \
4064  "mvc 176(8,15), 64(1)\n\t" \
4065  "mvc 184(8,15), 72(1)\n\t" \
4066  "mvc 192(8,15), 80(1)\n\t" \
4067  "lg 1, 0(1)\n\t" \
4068  VALGRIND_CALL_NOREDIR_R1 \
4069  "lgr %0, 2\n\t" \
4070  "aghi 15,200\n\t" \
4071  VALGRIND_CFI_EPILOGUE \
4072  : /*out*/ "=d" (_res) \
4073  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4074  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4075  ); \
4076  lval = (__typeof__(lval)) _res; \
4077  } while (0)
4078 
4079 #define CALL_FN_W_11W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4080  arg6, arg7 ,arg8, arg9, arg10, arg11) \
4081  do { \
4082  volatile OrigFn _orig = (orig); \
4083  volatile unsigned long _argvec[12]; \
4084  volatile unsigned long _res; \
4085  _argvec[0] = (unsigned long)_orig.nraddr; \
4086  _argvec[1] = (unsigned long)arg1; \
4087  _argvec[2] = (unsigned long)arg2; \
4088  _argvec[3] = (unsigned long)arg3; \
4089  _argvec[4] = (unsigned long)arg4; \
4090  _argvec[5] = (unsigned long)arg5; \
4091  _argvec[6] = (unsigned long)arg6; \
4092  _argvec[7] = (unsigned long)arg7; \
4093  _argvec[8] = (unsigned long)arg8; \
4094  _argvec[9] = (unsigned long)arg9; \
4095  _argvec[10] = (unsigned long)arg10; \
4096  _argvec[11] = (unsigned long)arg11; \
4097  __asm__ volatile( \
4098  VALGRIND_CFI_PROLOGUE \
4099  "aghi 15,-208\n\t" \
4100  "lg 2, 8(1)\n\t" \
4101  "lg 3,16(1)\n\t" \
4102  "lg 4,24(1)\n\t" \
4103  "lg 5,32(1)\n\t" \
4104  "lg 6,40(1)\n\t" \
4105  "mvc 160(8,15), 48(1)\n\t" \
4106  "mvc 168(8,15), 56(1)\n\t" \
4107  "mvc 176(8,15), 64(1)\n\t" \
4108  "mvc 184(8,15), 72(1)\n\t" \
4109  "mvc 192(8,15), 80(1)\n\t" \
4110  "mvc 200(8,15), 88(1)\n\t" \
4111  "lg 1, 0(1)\n\t" \
4112  VALGRIND_CALL_NOREDIR_R1 \
4113  "lgr %0, 2\n\t" \
4114  "aghi 15,208\n\t" \
4115  VALGRIND_CFI_EPILOGUE \
4116  : /*out*/ "=d" (_res) \
4117  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4118  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4119  ); \
4120  lval = (__typeof__(lval)) _res; \
4121  } while (0)
4122 
4123 #define CALL_FN_W_12W(lval, orig, arg1, arg2, arg3, arg4, arg5, \
4124  arg6, arg7 ,arg8, arg9, arg10, arg11, arg12)\
4125  do { \
4126  volatile OrigFn _orig = (orig); \
4127  volatile unsigned long _argvec[13]; \
4128  volatile unsigned long _res; \
4129  _argvec[0] = (unsigned long)_orig.nraddr; \
4130  _argvec[1] = (unsigned long)arg1; \
4131  _argvec[2] = (unsigned long)arg2; \
4132  _argvec[3] = (unsigned long)arg3; \
4133  _argvec[4] = (unsigned long)arg4; \
4134  _argvec[5] = (unsigned long)arg5; \
4135  _argvec[6] = (unsigned long)arg6; \
4136  _argvec[7] = (unsigned long)arg7; \
4137  _argvec[8] = (unsigned long)arg8; \
4138  _argvec[9] = (unsigned long)arg9; \
4139  _argvec[10] = (unsigned long)arg10; \
4140  _argvec[11] = (unsigned long)arg11; \
4141  _argvec[12] = (unsigned long)arg12; \
4142  __asm__ volatile( \
4143  VALGRIND_CFI_PROLOGUE \
4144  "aghi 15,-216\n\t" \
4145  "lg 2, 8(1)\n\t" \
4146  "lg 3,16(1)\n\t" \
4147  "lg 4,24(1)\n\t" \
4148  "lg 5,32(1)\n\t" \
4149  "lg 6,40(1)\n\t" \
4150  "mvc 160(8,15), 48(1)\n\t" \
4151  "mvc 168(8,15), 56(1)\n\t" \
4152  "mvc 176(8,15), 64(1)\n\t" \
4153  "mvc 184(8,15), 72(1)\n\t" \
4154  "mvc 192(8,15), 80(1)\n\t" \
4155  "mvc 200(8,15), 88(1)\n\t" \
4156  "mvc 208(8,15), 96(1)\n\t" \
4157  "lg 1, 0(1)\n\t" \
4158  VALGRIND_CALL_NOREDIR_R1 \
4159  "lgr %0, 2\n\t" \
4160  "aghi 15,216\n\t" \
4161  VALGRIND_CFI_EPILOGUE \
4162  : /*out*/ "=d" (_res) \
4163  : /*in*/ "a" (&_argvec[0]) __FRAME_POINTER \
4164  : /*trash*/ "cc", "memory", __CALLER_SAVED_REGS,"6","7" \
4165  ); \
4166  lval = (__typeof__(lval)) _res; \
4167  } while (0)
4168 
4169 
4170 #endif /* PLAT_s390x_linux */
4171 
4172 /* ------------------------- mips32-linux ----------------------- */
4173 
4174 #if defined(PLAT_mips32_linux)
4175 
4176 /* These regs are trashed by the hidden call. */
4177 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
4178 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
4179 "$25", "$31"
4180 
4181 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
4182  long) == 4. */
4183 
4184 #define CALL_FN_W_v(lval, orig) \
4185  do { \
4186  volatile OrigFn _orig = (orig); \
4187  volatile unsigned long _argvec[1]; \
4188  volatile unsigned long _res; \
4189  _argvec[0] = (unsigned long)_orig.nraddr; \
4190  __asm__ volatile( \
4191  "subu $29, $29, 8 \n\t" \
4192  "sw $28, 0($29) \n\t" \
4193  "sw $31, 4($29) \n\t" \
4194  "subu $29, $29, 16 \n\t" \
4195  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4196  VALGRIND_CALL_NOREDIR_T9 \
4197  "addu $29, $29, 16\n\t" \
4198  "lw $28, 0($29) \n\t" \
4199  "lw $31, 4($29) \n\t" \
4200  "addu $29, $29, 8 \n\t" \
4201  "move %0, $2\n" \
4202  : /*out*/ "=r" (_res) \
4203  : /*in*/ "0" (&_argvec[0]) \
4204  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4205  ); \
4206  lval = (__typeof__(lval)) _res; \
4207  } while (0)
4208 
4209 #define CALL_FN_W_W(lval, orig, arg1) \
4210  do { \
4211  volatile OrigFn _orig = (orig); \
4212  volatile unsigned long _argvec[2]; \
4213  volatile unsigned long _res; \
4214  _argvec[0] = (unsigned long)_orig.nraddr; \
4215  _argvec[1] = (unsigned long)(arg1); \
4216  __asm__ volatile( \
4217  "subu $29, $29, 8 \n\t" \
4218  "sw $28, 0($29) \n\t" \
4219  "sw $31, 4($29) \n\t" \
4220  "subu $29, $29, 16 \n\t" \
4221  "lw $4, 4(%1) \n\t" /* arg1*/ \
4222  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4223  VALGRIND_CALL_NOREDIR_T9 \
4224  "addu $29, $29, 16 \n\t" \
4225  "lw $28, 0($29) \n\t" \
4226  "lw $31, 4($29) \n\t" \
4227  "addu $29, $29, 8 \n\t" \
4228  "move %0, $2\n" \
4229  : /*out*/ "=r" (_res) \
4230  : /*in*/ "0" (&_argvec[0]) \
4231  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4232  ); \
4233  lval = (__typeof__(lval)) _res; \
4234  } while (0)
4235 
4236 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4237  do { \
4238  volatile OrigFn _orig = (orig); \
4239  volatile unsigned long _argvec[3]; \
4240  volatile unsigned long _res; \
4241  _argvec[0] = (unsigned long)_orig.nraddr; \
4242  _argvec[1] = (unsigned long)(arg1); \
4243  _argvec[2] = (unsigned long)(arg2); \
4244  __asm__ volatile( \
4245  "subu $29, $29, 8 \n\t" \
4246  "sw $28, 0($29) \n\t" \
4247  "sw $31, 4($29) \n\t" \
4248  "subu $29, $29, 16 \n\t" \
4249  "lw $4, 4(%1) \n\t" \
4250  "lw $5, 8(%1) \n\t" \
4251  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4252  VALGRIND_CALL_NOREDIR_T9 \
4253  "addu $29, $29, 16 \n\t" \
4254  "lw $28, 0($29) \n\t" \
4255  "lw $31, 4($29) \n\t" \
4256  "addu $29, $29, 8 \n\t" \
4257  "move %0, $2\n" \
4258  : /*out*/ "=r" (_res) \
4259  : /*in*/ "0" (&_argvec[0]) \
4260  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4261  ); \
4262  lval = (__typeof__(lval)) _res; \
4263  } while (0)
4264 
4265 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4266  do { \
4267  volatile OrigFn _orig = (orig); \
4268  volatile unsigned long _argvec[4]; \
4269  volatile unsigned long _res; \
4270  _argvec[0] = (unsigned long)_orig.nraddr; \
4271  _argvec[1] = (unsigned long)(arg1); \
4272  _argvec[2] = (unsigned long)(arg2); \
4273  _argvec[3] = (unsigned long)(arg3); \
4274  __asm__ volatile( \
4275  "subu $29, $29, 8 \n\t" \
4276  "sw $28, 0($29) \n\t" \
4277  "sw $31, 4($29) \n\t" \
4278  "subu $29, $29, 16 \n\t" \
4279  "lw $4, 4(%1) \n\t" \
4280  "lw $5, 8(%1) \n\t" \
4281  "lw $6, 12(%1) \n\t" \
4282  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4283  VALGRIND_CALL_NOREDIR_T9 \
4284  "addu $29, $29, 16 \n\t" \
4285  "lw $28, 0($29) \n\t" \
4286  "lw $31, 4($29) \n\t" \
4287  "addu $29, $29, 8 \n\t" \
4288  "move %0, $2\n" \
4289  : /*out*/ "=r" (_res) \
4290  : /*in*/ "0" (&_argvec[0]) \
4291  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4292  ); \
4293  lval = (__typeof__(lval)) _res; \
4294  } while (0)
4295 
4296 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4297  do { \
4298  volatile OrigFn _orig = (orig); \
4299  volatile unsigned long _argvec[5]; \
4300  volatile unsigned long _res; \
4301  _argvec[0] = (unsigned long)_orig.nraddr; \
4302  _argvec[1] = (unsigned long)(arg1); \
4303  _argvec[2] = (unsigned long)(arg2); \
4304  _argvec[3] = (unsigned long)(arg3); \
4305  _argvec[4] = (unsigned long)(arg4); \
4306  __asm__ volatile( \
4307  "subu $29, $29, 8 \n\t" \
4308  "sw $28, 0($29) \n\t" \
4309  "sw $31, 4($29) \n\t" \
4310  "subu $29, $29, 16 \n\t" \
4311  "lw $4, 4(%1) \n\t" \
4312  "lw $5, 8(%1) \n\t" \
4313  "lw $6, 12(%1) \n\t" \
4314  "lw $7, 16(%1) \n\t" \
4315  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4316  VALGRIND_CALL_NOREDIR_T9 \
4317  "addu $29, $29, 16 \n\t" \
4318  "lw $28, 0($29) \n\t" \
4319  "lw $31, 4($29) \n\t" \
4320  "addu $29, $29, 8 \n\t" \
4321  "move %0, $2\n" \
4322  : /*out*/ "=r" (_res) \
4323  : /*in*/ "0" (&_argvec[0]) \
4324  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4325  ); \
4326  lval = (__typeof__(lval)) _res; \
4327  } while (0)
4328 
4329 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4330  do { \
4331  volatile OrigFn _orig = (orig); \
4332  volatile unsigned long _argvec[6]; \
4333  volatile unsigned long _res; \
4334  _argvec[0] = (unsigned long)_orig.nraddr; \
4335  _argvec[1] = (unsigned long)(arg1); \
4336  _argvec[2] = (unsigned long)(arg2); \
4337  _argvec[3] = (unsigned long)(arg3); \
4338  _argvec[4] = (unsigned long)(arg4); \
4339  _argvec[5] = (unsigned long)(arg5); \
4340  __asm__ volatile( \
4341  "subu $29, $29, 8 \n\t" \
4342  "sw $28, 0($29) \n\t" \
4343  "sw $31, 4($29) \n\t" \
4344  "lw $4, 20(%1) \n\t" \
4345  "subu $29, $29, 24\n\t" \
4346  "sw $4, 16($29) \n\t" \
4347  "lw $4, 4(%1) \n\t" \
4348  "lw $5, 8(%1) \n\t" \
4349  "lw $6, 12(%1) \n\t" \
4350  "lw $7, 16(%1) \n\t" \
4351  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4352  VALGRIND_CALL_NOREDIR_T9 \
4353  "addu $29, $29, 24 \n\t" \
4354  "lw $28, 0($29) \n\t" \
4355  "lw $31, 4($29) \n\t" \
4356  "addu $29, $29, 8 \n\t" \
4357  "move %0, $2\n" \
4358  : /*out*/ "=r" (_res) \
4359  : /*in*/ "0" (&_argvec[0]) \
4360  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4361  ); \
4362  lval = (__typeof__(lval)) _res; \
4363  } while (0)
4364 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4365  do { \
4366  volatile OrigFn _orig = (orig); \
4367  volatile unsigned long _argvec[7]; \
4368  volatile unsigned long _res; \
4369  _argvec[0] = (unsigned long)_orig.nraddr; \
4370  _argvec[1] = (unsigned long)(arg1); \
4371  _argvec[2] = (unsigned long)(arg2); \
4372  _argvec[3] = (unsigned long)(arg3); \
4373  _argvec[4] = (unsigned long)(arg4); \
4374  _argvec[5] = (unsigned long)(arg5); \
4375  _argvec[6] = (unsigned long)(arg6); \
4376  __asm__ volatile( \
4377  "subu $29, $29, 8 \n\t" \
4378  "sw $28, 0($29) \n\t" \
4379  "sw $31, 4($29) \n\t" \
4380  "lw $4, 20(%1) \n\t" \
4381  "subu $29, $29, 32\n\t" \
4382  "sw $4, 16($29) \n\t" \
4383  "lw $4, 24(%1) \n\t" \
4384  "nop\n\t" \
4385  "sw $4, 20($29) \n\t" \
4386  "lw $4, 4(%1) \n\t" \
4387  "lw $5, 8(%1) \n\t" \
4388  "lw $6, 12(%1) \n\t" \
4389  "lw $7, 16(%1) \n\t" \
4390  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4391  VALGRIND_CALL_NOREDIR_T9 \
4392  "addu $29, $29, 32 \n\t" \
4393  "lw $28, 0($29) \n\t" \
4394  "lw $31, 4($29) \n\t" \
4395  "addu $29, $29, 8 \n\t" \
4396  "move %0, $2\n" \
4397  : /*out*/ "=r" (_res) \
4398  : /*in*/ "0" (&_argvec[0]) \
4399  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4400  ); \
4401  lval = (__typeof__(lval)) _res; \
4402  } while (0)
4403 
4404 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4405  arg7) \
4406  do { \
4407  volatile OrigFn _orig = (orig); \
4408  volatile unsigned long _argvec[8]; \
4409  volatile unsigned long _res; \
4410  _argvec[0] = (unsigned long)_orig.nraddr; \
4411  _argvec[1] = (unsigned long)(arg1); \
4412  _argvec[2] = (unsigned long)(arg2); \
4413  _argvec[3] = (unsigned long)(arg3); \
4414  _argvec[4] = (unsigned long)(arg4); \
4415  _argvec[5] = (unsigned long)(arg5); \
4416  _argvec[6] = (unsigned long)(arg6); \
4417  _argvec[7] = (unsigned long)(arg7); \
4418  __asm__ volatile( \
4419  "subu $29, $29, 8 \n\t" \
4420  "sw $28, 0($29) \n\t" \
4421  "sw $31, 4($29) \n\t" \
4422  "lw $4, 20(%1) \n\t" \
4423  "subu $29, $29, 32\n\t" \
4424  "sw $4, 16($29) \n\t" \
4425  "lw $4, 24(%1) \n\t" \
4426  "sw $4, 20($29) \n\t" \
4427  "lw $4, 28(%1) \n\t" \
4428  "sw $4, 24($29) \n\t" \
4429  "lw $4, 4(%1) \n\t" \
4430  "lw $5, 8(%1) \n\t" \
4431  "lw $6, 12(%1) \n\t" \
4432  "lw $7, 16(%1) \n\t" \
4433  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4434  VALGRIND_CALL_NOREDIR_T9 \
4435  "addu $29, $29, 32 \n\t" \
4436  "lw $28, 0($29) \n\t" \
4437  "lw $31, 4($29) \n\t" \
4438  "addu $29, $29, 8 \n\t" \
4439  "move %0, $2\n" \
4440  : /*out*/ "=r" (_res) \
4441  : /*in*/ "0" (&_argvec[0]) \
4442  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4443  ); \
4444  lval = (__typeof__(lval)) _res; \
4445  } while (0)
4446 
4447 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4448  arg7,arg8) \
4449  do { \
4450  volatile OrigFn _orig = (orig); \
4451  volatile unsigned long _argvec[9]; \
4452  volatile unsigned long _res; \
4453  _argvec[0] = (unsigned long)_orig.nraddr; \
4454  _argvec[1] = (unsigned long)(arg1); \
4455  _argvec[2] = (unsigned long)(arg2); \
4456  _argvec[3] = (unsigned long)(arg3); \
4457  _argvec[4] = (unsigned long)(arg4); \
4458  _argvec[5] = (unsigned long)(arg5); \
4459  _argvec[6] = (unsigned long)(arg6); \
4460  _argvec[7] = (unsigned long)(arg7); \
4461  _argvec[8] = (unsigned long)(arg8); \
4462  __asm__ volatile( \
4463  "subu $29, $29, 8 \n\t" \
4464  "sw $28, 0($29) \n\t" \
4465  "sw $31, 4($29) \n\t" \
4466  "lw $4, 20(%1) \n\t" \
4467  "subu $29, $29, 40\n\t" \
4468  "sw $4, 16($29) \n\t" \
4469  "lw $4, 24(%1) \n\t" \
4470  "sw $4, 20($29) \n\t" \
4471  "lw $4, 28(%1) \n\t" \
4472  "sw $4, 24($29) \n\t" \
4473  "lw $4, 32(%1) \n\t" \
4474  "sw $4, 28($29) \n\t" \
4475  "lw $4, 4(%1) \n\t" \
4476  "lw $5, 8(%1) \n\t" \
4477  "lw $6, 12(%1) \n\t" \
4478  "lw $7, 16(%1) \n\t" \
4479  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4480  VALGRIND_CALL_NOREDIR_T9 \
4481  "addu $29, $29, 40 \n\t" \
4482  "lw $28, 0($29) \n\t" \
4483  "lw $31, 4($29) \n\t" \
4484  "addu $29, $29, 8 \n\t" \
4485  "move %0, $2\n" \
4486  : /*out*/ "=r" (_res) \
4487  : /*in*/ "0" (&_argvec[0]) \
4488  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4489  ); \
4490  lval = (__typeof__(lval)) _res; \
4491  } while (0)
4492 
4493 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4494  arg7,arg8,arg9) \
4495  do { \
4496  volatile OrigFn _orig = (orig); \
4497  volatile unsigned long _argvec[10]; \
4498  volatile unsigned long _res; \
4499  _argvec[0] = (unsigned long)_orig.nraddr; \
4500  _argvec[1] = (unsigned long)(arg1); \
4501  _argvec[2] = (unsigned long)(arg2); \
4502  _argvec[3] = (unsigned long)(arg3); \
4503  _argvec[4] = (unsigned long)(arg4); \
4504  _argvec[5] = (unsigned long)(arg5); \
4505  _argvec[6] = (unsigned long)(arg6); \
4506  _argvec[7] = (unsigned long)(arg7); \
4507  _argvec[8] = (unsigned long)(arg8); \
4508  _argvec[9] = (unsigned long)(arg9); \
4509  __asm__ volatile( \
4510  "subu $29, $29, 8 \n\t" \
4511  "sw $28, 0($29) \n\t" \
4512  "sw $31, 4($29) \n\t" \
4513  "lw $4, 20(%1) \n\t" \
4514  "subu $29, $29, 40\n\t" \
4515  "sw $4, 16($29) \n\t" \
4516  "lw $4, 24(%1) \n\t" \
4517  "sw $4, 20($29) \n\t" \
4518  "lw $4, 28(%1) \n\t" \
4519  "sw $4, 24($29) \n\t" \
4520  "lw $4, 32(%1) \n\t" \
4521  "sw $4, 28($29) \n\t" \
4522  "lw $4, 36(%1) \n\t" \
4523  "sw $4, 32($29) \n\t" \
4524  "lw $4, 4(%1) \n\t" \
4525  "lw $5, 8(%1) \n\t" \
4526  "lw $6, 12(%1) \n\t" \
4527  "lw $7, 16(%1) \n\t" \
4528  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4529  VALGRIND_CALL_NOREDIR_T9 \
4530  "addu $29, $29, 40 \n\t" \
4531  "lw $28, 0($29) \n\t" \
4532  "lw $31, 4($29) \n\t" \
4533  "addu $29, $29, 8 \n\t" \
4534  "move %0, $2\n" \
4535  : /*out*/ "=r" (_res) \
4536  : /*in*/ "0" (&_argvec[0]) \
4537  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4538  ); \
4539  lval = (__typeof__(lval)) _res; \
4540  } while (0)
4541 
4542 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4543  arg7,arg8,arg9,arg10) \
4544  do { \
4545  volatile OrigFn _orig = (orig); \
4546  volatile unsigned long _argvec[11]; \
4547  volatile unsigned long _res; \
4548  _argvec[0] = (unsigned long)_orig.nraddr; \
4549  _argvec[1] = (unsigned long)(arg1); \
4550  _argvec[2] = (unsigned long)(arg2); \
4551  _argvec[3] = (unsigned long)(arg3); \
4552  _argvec[4] = (unsigned long)(arg4); \
4553  _argvec[5] = (unsigned long)(arg5); \
4554  _argvec[6] = (unsigned long)(arg6); \
4555  _argvec[7] = (unsigned long)(arg7); \
4556  _argvec[8] = (unsigned long)(arg8); \
4557  _argvec[9] = (unsigned long)(arg9); \
4558  _argvec[10] = (unsigned long)(arg10); \
4559  __asm__ volatile( \
4560  "subu $29, $29, 8 \n\t" \
4561  "sw $28, 0($29) \n\t" \
4562  "sw $31, 4($29) \n\t" \
4563  "lw $4, 20(%1) \n\t" \
4564  "subu $29, $29, 48\n\t" \
4565  "sw $4, 16($29) \n\t" \
4566  "lw $4, 24(%1) \n\t" \
4567  "sw $4, 20($29) \n\t" \
4568  "lw $4, 28(%1) \n\t" \
4569  "sw $4, 24($29) \n\t" \
4570  "lw $4, 32(%1) \n\t" \
4571  "sw $4, 28($29) \n\t" \
4572  "lw $4, 36(%1) \n\t" \
4573  "sw $4, 32($29) \n\t" \
4574  "lw $4, 40(%1) \n\t" \
4575  "sw $4, 36($29) \n\t" \
4576  "lw $4, 4(%1) \n\t" \
4577  "lw $5, 8(%1) \n\t" \
4578  "lw $6, 12(%1) \n\t" \
4579  "lw $7, 16(%1) \n\t" \
4580  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4581  VALGRIND_CALL_NOREDIR_T9 \
4582  "addu $29, $29, 48 \n\t" \
4583  "lw $28, 0($29) \n\t" \
4584  "lw $31, 4($29) \n\t" \
4585  "addu $29, $29, 8 \n\t" \
4586  "move %0, $2\n" \
4587  : /*out*/ "=r" (_res) \
4588  : /*in*/ "0" (&_argvec[0]) \
4589  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4590  ); \
4591  lval = (__typeof__(lval)) _res; \
4592  } while (0)
4593 
4594 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4595  arg6,arg7,arg8,arg9,arg10, \
4596  arg11) \
4597  do { \
4598  volatile OrigFn _orig = (orig); \
4599  volatile unsigned long _argvec[12]; \
4600  volatile unsigned long _res; \
4601  _argvec[0] = (unsigned long)_orig.nraddr; \
4602  _argvec[1] = (unsigned long)(arg1); \
4603  _argvec[2] = (unsigned long)(arg2); \
4604  _argvec[3] = (unsigned long)(arg3); \
4605  _argvec[4] = (unsigned long)(arg4); \
4606  _argvec[5] = (unsigned long)(arg5); \
4607  _argvec[6] = (unsigned long)(arg6); \
4608  _argvec[7] = (unsigned long)(arg7); \
4609  _argvec[8] = (unsigned long)(arg8); \
4610  _argvec[9] = (unsigned long)(arg9); \
4611  _argvec[10] = (unsigned long)(arg10); \
4612  _argvec[11] = (unsigned long)(arg11); \
4613  __asm__ volatile( \
4614  "subu $29, $29, 8 \n\t" \
4615  "sw $28, 0($29) \n\t" \
4616  "sw $31, 4($29) \n\t" \
4617  "lw $4, 20(%1) \n\t" \
4618  "subu $29, $29, 48\n\t" \
4619  "sw $4, 16($29) \n\t" \
4620  "lw $4, 24(%1) \n\t" \
4621  "sw $4, 20($29) \n\t" \
4622  "lw $4, 28(%1) \n\t" \
4623  "sw $4, 24($29) \n\t" \
4624  "lw $4, 32(%1) \n\t" \
4625  "sw $4, 28($29) \n\t" \
4626  "lw $4, 36(%1) \n\t" \
4627  "sw $4, 32($29) \n\t" \
4628  "lw $4, 40(%1) \n\t" \
4629  "sw $4, 36($29) \n\t" \
4630  "lw $4, 44(%1) \n\t" \
4631  "sw $4, 40($29) \n\t" \
4632  "lw $4, 4(%1) \n\t" \
4633  "lw $5, 8(%1) \n\t" \
4634  "lw $6, 12(%1) \n\t" \
4635  "lw $7, 16(%1) \n\t" \
4636  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4637  VALGRIND_CALL_NOREDIR_T9 \
4638  "addu $29, $29, 48 \n\t" \
4639  "lw $28, 0($29) \n\t" \
4640  "lw $31, 4($29) \n\t" \
4641  "addu $29, $29, 8 \n\t" \
4642  "move %0, $2\n" \
4643  : /*out*/ "=r" (_res) \
4644  : /*in*/ "0" (&_argvec[0]) \
4645  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4646  ); \
4647  lval = (__typeof__(lval)) _res; \
4648  } while (0)
4649 
4650 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
4651  arg6,arg7,arg8,arg9,arg10, \
4652  arg11,arg12) \
4653  do { \
4654  volatile OrigFn _orig = (orig); \
4655  volatile unsigned long _argvec[13]; \
4656  volatile unsigned long _res; \
4657  _argvec[0] = (unsigned long)_orig.nraddr; \
4658  _argvec[1] = (unsigned long)(arg1); \
4659  _argvec[2] = (unsigned long)(arg2); \
4660  _argvec[3] = (unsigned long)(arg3); \
4661  _argvec[4] = (unsigned long)(arg4); \
4662  _argvec[5] = (unsigned long)(arg5); \
4663  _argvec[6] = (unsigned long)(arg6); \
4664  _argvec[7] = (unsigned long)(arg7); \
4665  _argvec[8] = (unsigned long)(arg8); \
4666  _argvec[9] = (unsigned long)(arg9); \
4667  _argvec[10] = (unsigned long)(arg10); \
4668  _argvec[11] = (unsigned long)(arg11); \
4669  _argvec[12] = (unsigned long)(arg12); \
4670  __asm__ volatile( \
4671  "subu $29, $29, 8 \n\t" \
4672  "sw $28, 0($29) \n\t" \
4673  "sw $31, 4($29) \n\t" \
4674  "lw $4, 20(%1) \n\t" \
4675  "subu $29, $29, 56\n\t" \
4676  "sw $4, 16($29) \n\t" \
4677  "lw $4, 24(%1) \n\t" \
4678  "sw $4, 20($29) \n\t" \
4679  "lw $4, 28(%1) \n\t" \
4680  "sw $4, 24($29) \n\t" \
4681  "lw $4, 32(%1) \n\t" \
4682  "sw $4, 28($29) \n\t" \
4683  "lw $4, 36(%1) \n\t" \
4684  "sw $4, 32($29) \n\t" \
4685  "lw $4, 40(%1) \n\t" \
4686  "sw $4, 36($29) \n\t" \
4687  "lw $4, 44(%1) \n\t" \
4688  "sw $4, 40($29) \n\t" \
4689  "lw $4, 48(%1) \n\t" \
4690  "sw $4, 44($29) \n\t" \
4691  "lw $4, 4(%1) \n\t" \
4692  "lw $5, 8(%1) \n\t" \
4693  "lw $6, 12(%1) \n\t" \
4694  "lw $7, 16(%1) \n\t" \
4695  "lw $25, 0(%1) \n\t" /* target->t9 */ \
4696  VALGRIND_CALL_NOREDIR_T9 \
4697  "addu $29, $29, 56 \n\t" \
4698  "lw $28, 0($29) \n\t" \
4699  "lw $31, 4($29) \n\t" \
4700  "addu $29, $29, 8 \n\t" \
4701  "move %0, $2\n" \
4702  : /*out*/ "=r" (_res) \
4703  : /*in*/ "r" (&_argvec[0]) \
4704  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4705  ); \
4706  lval = (__typeof__(lval)) _res; \
4707  } while (0)
4708 
4709 #endif /* PLAT_mips32_linux */
4710 
4711 /* ------------------------- mips64-linux ------------------------- */
4712 
4713 #if defined(PLAT_mips64_linux)
4714 
4715 /* These regs are trashed by the hidden call. */
4716 #define __CALLER_SAVED_REGS "$2", "$3", "$4", "$5", "$6", \
4717 "$7", "$8", "$9", "$10", "$11", "$12", "$13", "$14", "$15", "$24", \
4718 "$25", "$31"
4719 
4720 /* These CALL_FN_ macros assume that on mips-linux, sizeof(unsigned
4721  long) == 4. */
4722 
4723 #define CALL_FN_W_v(lval, orig) \
4724  do { \
4725  volatile OrigFn _orig = (orig); \
4726  volatile unsigned long _argvec[1]; \
4727  volatile unsigned long _res; \
4728  _argvec[0] = (unsigned long)_orig.nraddr; \
4729  __asm__ volatile( \
4730  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4731  VALGRIND_CALL_NOREDIR_T9 \
4732  "move %0, $2\n" \
4733  : /*out*/ "=r" (_res) \
4734  : /*in*/ "0" (&_argvec[0]) \
4735  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4736  ); \
4737  lval = (__typeof__(lval)) _res; \
4738  } while (0)
4739 
4740 #define CALL_FN_W_W(lval, orig, arg1) \
4741  do { \
4742  volatile OrigFn _orig = (orig); \
4743  volatile unsigned long _argvec[2]; \
4744  volatile unsigned long _res; \
4745  _argvec[0] = (unsigned long)_orig.nraddr; \
4746  _argvec[1] = (unsigned long)(arg1); \
4747  __asm__ volatile( \
4748  "ld $4, 8(%1)\n\t" /* arg1*/ \
4749  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4750  VALGRIND_CALL_NOREDIR_T9 \
4751  "move %0, $2\n" \
4752  : /*out*/ "=r" (_res) \
4753  : /*in*/ "r" (&_argvec[0]) \
4754  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4755  ); \
4756  lval = (__typeof__(lval)) _res; \
4757  } while (0)
4758 
4759 #define CALL_FN_W_WW(lval, orig, arg1,arg2) \
4760  do { \
4761  volatile OrigFn _orig = (orig); \
4762  volatile unsigned long _argvec[3]; \
4763  volatile unsigned long _res; \
4764  _argvec[0] = (unsigned long)_orig.nraddr; \
4765  _argvec[1] = (unsigned long)(arg1); \
4766  _argvec[2] = (unsigned long)(arg2); \
4767  __asm__ volatile( \
4768  "ld $4, 8(%1)\n\t" \
4769  "ld $5, 16(%1)\n\t" \
4770  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4771  VALGRIND_CALL_NOREDIR_T9 \
4772  "move %0, $2\n" \
4773  : /*out*/ "=r" (_res) \
4774  : /*in*/ "r" (&_argvec[0]) \
4775  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4776  ); \
4777  lval = (__typeof__(lval)) _res; \
4778  } while (0)
4779 
4780 #define CALL_FN_W_WWW(lval, orig, arg1,arg2,arg3) \
4781  do { \
4782  volatile OrigFn _orig = (orig); \
4783  volatile unsigned long _argvec[4]; \
4784  volatile unsigned long _res; \
4785  _argvec[0] = (unsigned long)_orig.nraddr; \
4786  _argvec[1] = (unsigned long)(arg1); \
4787  _argvec[2] = (unsigned long)(arg2); \
4788  _argvec[3] = (unsigned long)(arg3); \
4789  __asm__ volatile( \
4790  "ld $4, 8(%1)\n\t" \
4791  "ld $5, 16(%1)\n\t" \
4792  "ld $6, 24(%1)\n\t" \
4793  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4794  VALGRIND_CALL_NOREDIR_T9 \
4795  "move %0, $2\n" \
4796  : /*out*/ "=r" (_res) \
4797  : /*in*/ "r" (&_argvec[0]) \
4798  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4799  ); \
4800  lval = (__typeof__(lval)) _res; \
4801  } while (0)
4802 
4803 #define CALL_FN_W_WWWW(lval, orig, arg1,arg2,arg3,arg4) \
4804  do { \
4805  volatile OrigFn _orig = (orig); \
4806  volatile unsigned long _argvec[5]; \
4807  volatile unsigned long _res; \
4808  _argvec[0] = (unsigned long)_orig.nraddr; \
4809  _argvec[1] = (unsigned long)(arg1); \
4810  _argvec[2] = (unsigned long)(arg2); \
4811  _argvec[3] = (unsigned long)(arg3); \
4812  _argvec[4] = (unsigned long)(arg4); \
4813  __asm__ volatile( \
4814  "ld $4, 8(%1)\n\t" \
4815  "ld $5, 16(%1)\n\t" \
4816  "ld $6, 24(%1)\n\t" \
4817  "ld $7, 32(%1)\n\t" \
4818  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4819  VALGRIND_CALL_NOREDIR_T9 \
4820  "move %0, $2\n" \
4821  : /*out*/ "=r" (_res) \
4822  : /*in*/ "r" (&_argvec[0]) \
4823  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4824  ); \
4825  lval = (__typeof__(lval)) _res; \
4826  } while (0)
4827 
4828 #define CALL_FN_W_5W(lval, orig, arg1,arg2,arg3,arg4,arg5) \
4829  do { \
4830  volatile OrigFn _orig = (orig); \
4831  volatile unsigned long _argvec[6]; \
4832  volatile unsigned long _res; \
4833  _argvec[0] = (unsigned long)_orig.nraddr; \
4834  _argvec[1] = (unsigned long)(arg1); \
4835  _argvec[2] = (unsigned long)(arg2); \
4836  _argvec[3] = (unsigned long)(arg3); \
4837  _argvec[4] = (unsigned long)(arg4); \
4838  _argvec[5] = (unsigned long)(arg5); \
4839  __asm__ volatile( \
4840  "ld $4, 8(%1)\n\t" \
4841  "ld $5, 16(%1)\n\t" \
4842  "ld $6, 24(%1)\n\t" \
4843  "ld $7, 32(%1)\n\t" \
4844  "ld $8, 40(%1)\n\t" \
4845  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4846  VALGRIND_CALL_NOREDIR_T9 \
4847  "move %0, $2\n" \
4848  : /*out*/ "=r" (_res) \
4849  : /*in*/ "r" (&_argvec[0]) \
4850  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4851  ); \
4852  lval = (__typeof__(lval)) _res; \
4853  } while (0)
4854 
4855 #define CALL_FN_W_6W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6) \
4856  do { \
4857  volatile OrigFn _orig = (orig); \
4858  volatile unsigned long _argvec[7]; \
4859  volatile unsigned long _res; \
4860  _argvec[0] = (unsigned long)_orig.nraddr; \
4861  _argvec[1] = (unsigned long)(arg1); \
4862  _argvec[2] = (unsigned long)(arg2); \
4863  _argvec[3] = (unsigned long)(arg3); \
4864  _argvec[4] = (unsigned long)(arg4); \
4865  _argvec[5] = (unsigned long)(arg5); \
4866  _argvec[6] = (unsigned long)(arg6); \
4867  __asm__ volatile( \
4868  "ld $4, 8(%1)\n\t" \
4869  "ld $5, 16(%1)\n\t" \
4870  "ld $6, 24(%1)\n\t" \
4871  "ld $7, 32(%1)\n\t" \
4872  "ld $8, 40(%1)\n\t" \
4873  "ld $9, 48(%1)\n\t" \
4874  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4875  VALGRIND_CALL_NOREDIR_T9 \
4876  "move %0, $2\n" \
4877  : /*out*/ "=r" (_res) \
4878  : /*in*/ "r" (&_argvec[0]) \
4879  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4880  ); \
4881  lval = (__typeof__(lval)) _res; \
4882  } while (0)
4883 
4884 #define CALL_FN_W_7W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4885  arg7) \
4886  do { \
4887  volatile OrigFn _orig = (orig); \
4888  volatile unsigned long _argvec[8]; \
4889  volatile unsigned long _res; \
4890  _argvec[0] = (unsigned long)_orig.nraddr; \
4891  _argvec[1] = (unsigned long)(arg1); \
4892  _argvec[2] = (unsigned long)(arg2); \
4893  _argvec[3] = (unsigned long)(arg3); \
4894  _argvec[4] = (unsigned long)(arg4); \
4895  _argvec[5] = (unsigned long)(arg5); \
4896  _argvec[6] = (unsigned long)(arg6); \
4897  _argvec[7] = (unsigned long)(arg7); \
4898  __asm__ volatile( \
4899  "ld $4, 8(%1)\n\t" \
4900  "ld $5, 16(%1)\n\t" \
4901  "ld $6, 24(%1)\n\t" \
4902  "ld $7, 32(%1)\n\t" \
4903  "ld $8, 40(%1)\n\t" \
4904  "ld $9, 48(%1)\n\t" \
4905  "ld $10, 56(%1)\n\t" \
4906  "ld $25, 0(%1) \n\t" /* target->t9 */ \
4907  VALGRIND_CALL_NOREDIR_T9 \
4908  "move %0, $2\n" \
4909  : /*out*/ "=r" (_res) \
4910  : /*in*/ "r" (&_argvec[0]) \
4911  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4912  ); \
4913  lval = (__typeof__(lval)) _res; \
4914  } while (0)
4915 
4916 #define CALL_FN_W_8W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4917  arg7,arg8) \
4918  do { \
4919  volatile OrigFn _orig = (orig); \
4920  volatile unsigned long _argvec[9]; \
4921  volatile unsigned long _res; \
4922  _argvec[0] = (unsigned long)_orig.nraddr; \
4923  _argvec[1] = (unsigned long)(arg1); \
4924  _argvec[2] = (unsigned long)(arg2); \
4925  _argvec[3] = (unsigned long)(arg3); \
4926  _argvec[4] = (unsigned long)(arg4); \
4927  _argvec[5] = (unsigned long)(arg5); \
4928  _argvec[6] = (unsigned long)(arg6); \
4929  _argvec[7] = (unsigned long)(arg7); \
4930  _argvec[8] = (unsigned long)(arg8); \
4931  __asm__ volatile( \
4932  "ld $4, 8(%1)\n\t" \
4933  "ld $5, 16(%1)\n\t" \
4934  "ld $6, 24(%1)\n\t" \
4935  "ld $7, 32(%1)\n\t" \
4936  "ld $8, 40(%1)\n\t" \
4937  "ld $9, 48(%1)\n\t" \
4938  "ld $10, 56(%1)\n\t" \
4939  "ld $11, 64(%1)\n\t" \
4940  "ld $25, 0(%1) \n\t" /* target->t9 */ \
4941  VALGRIND_CALL_NOREDIR_T9 \
4942  "move %0, $2\n" \
4943  : /*out*/ "=r" (_res) \
4944  : /*in*/ "r" (&_argvec[0]) \
4945  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4946  ); \
4947  lval = (__typeof__(lval)) _res; \
4948  } while (0)
4949 
4950 #define CALL_FN_W_9W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4951  arg7,arg8,arg9) \
4952  do { \
4953  volatile OrigFn _orig = (orig); \
4954  volatile unsigned long _argvec[10]; \
4955  volatile unsigned long _res; \
4956  _argvec[0] = (unsigned long)_orig.nraddr; \
4957  _argvec[1] = (unsigned long)(arg1); \
4958  _argvec[2] = (unsigned long)(arg2); \
4959  _argvec[3] = (unsigned long)(arg3); \
4960  _argvec[4] = (unsigned long)(arg4); \
4961  _argvec[5] = (unsigned long)(arg5); \
4962  _argvec[6] = (unsigned long)(arg6); \
4963  _argvec[7] = (unsigned long)(arg7); \
4964  _argvec[8] = (unsigned long)(arg8); \
4965  _argvec[9] = (unsigned long)(arg9); \
4966  __asm__ volatile( \
4967  "dsubu $29, $29, 8\n\t" \
4968  "ld $4, 72(%1)\n\t" \
4969  "sd $4, 0($29)\n\t" \
4970  "ld $4, 8(%1)\n\t" \
4971  "ld $5, 16(%1)\n\t" \
4972  "ld $6, 24(%1)\n\t" \
4973  "ld $7, 32(%1)\n\t" \
4974  "ld $8, 40(%1)\n\t" \
4975  "ld $9, 48(%1)\n\t" \
4976  "ld $10, 56(%1)\n\t" \
4977  "ld $11, 64(%1)\n\t" \
4978  "ld $25, 0(%1)\n\t" /* target->t9 */ \
4979  VALGRIND_CALL_NOREDIR_T9 \
4980  "daddu $29, $29, 8\n\t" \
4981  "move %0, $2\n" \
4982  : /*out*/ "=r" (_res) \
4983  : /*in*/ "r" (&_argvec[0]) \
4984  : /*trash*/ "memory", __CALLER_SAVED_REGS \
4985  ); \
4986  lval = (__typeof__(lval)) _res; \
4987  } while (0)
4988 
4989 #define CALL_FN_W_10W(lval, orig, arg1,arg2,arg3,arg4,arg5,arg6, \
4990  arg7,arg8,arg9,arg10) \
4991  do { \
4992  volatile OrigFn _orig = (orig); \
4993  volatile unsigned long _argvec[11]; \
4994  volatile unsigned long _res; \
4995  _argvec[0] = (unsigned long)_orig.nraddr; \
4996  _argvec[1] = (unsigned long)(arg1); \
4997  _argvec[2] = (unsigned long)(arg2); \
4998  _argvec[3] = (unsigned long)(arg3); \
4999  _argvec[4] = (unsigned long)(arg4); \
5000  _argvec[5] = (unsigned long)(arg5); \
5001  _argvec[6] = (unsigned long)(arg6); \
5002  _argvec[7] = (unsigned long)(arg7); \
5003  _argvec[8] = (unsigned long)(arg8); \
5004  _argvec[9] = (unsigned long)(arg9); \
5005  _argvec[10] = (unsigned long)(arg10); \
5006  __asm__ volatile( \
5007  "dsubu $29, $29, 16\n\t" \
5008  "ld $4, 72(%1)\n\t" \
5009  "sd $4, 0($29)\n\t" \
5010  "ld $4, 80(%1)\n\t" \
5011  "sd $4, 8($29)\n\t" \
5012  "ld $4, 8(%1)\n\t" \
5013  "ld $5, 16(%1)\n\t" \
5014  "ld $6, 24(%1)\n\t" \
5015  "ld $7, 32(%1)\n\t" \
5016  "ld $8, 40(%1)\n\t" \
5017  "ld $9, 48(%1)\n\t" \
5018  "ld $10, 56(%1)\n\t" \
5019  "ld $11, 64(%1)\n\t" \
5020  "ld $25, 0(%1)\n\t" /* target->t9 */ \
5021  VALGRIND_CALL_NOREDIR_T9 \
5022  "daddu $29, $29, 16\n\t" \
5023  "move %0, $2\n" \
5024  : /*out*/ "=r" (_res) \
5025  : /*in*/ "r" (&_argvec[0]) \
5026  : /*trash*/ "memory", __CALLER_SAVED_REGS \
5027  ); \
5028  lval = (__typeof__(lval)) _res; \
5029  } while (0)
5030 
5031 #define CALL_FN_W_11W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5032  arg6,arg7,arg8,arg9,arg10, \
5033  arg11) \
5034  do { \
5035  volatile OrigFn _orig = (orig); \
5036  volatile unsigned long _argvec[12]; \
5037  volatile unsigned long _res; \
5038  _argvec[0] = (unsigned long)_orig.nraddr; \
5039  _argvec[1] = (unsigned long)(arg1); \
5040  _argvec[2] = (unsigned long)(arg2); \
5041  _argvec[3] = (unsigned long)(arg3); \
5042  _argvec[4] = (unsigned long)(arg4); \
5043  _argvec[5] = (unsigned long)(arg5); \
5044  _argvec[6] = (unsigned long)(arg6); \
5045  _argvec[7] = (unsigned long)(arg7); \
5046  _argvec[8] = (unsigned long)(arg8); \
5047  _argvec[9] = (unsigned long)(arg9); \
5048  _argvec[10] = (unsigned long)(arg10); \
5049  _argvec[11] = (unsigned long)(arg11); \
5050  __asm__ volatile( \
5051  "dsubu $29, $29, 24\n\t" \
5052  "ld $4, 72(%1)\n\t" \
5053  "sd $4, 0($29)\n\t" \
5054  "ld $4, 80(%1)\n\t" \
5055  "sd $4, 8($29)\n\t" \
5056  "ld $4, 88(%1)\n\t" \
5057  "sd $4, 16($29)\n\t" \
5058  "ld $4, 8(%1)\n\t" \
5059  "ld $5, 16(%1)\n\t" \
5060  "ld $6, 24(%1)\n\t" \
5061  "ld $7, 32(%1)\n\t" \
5062  "ld $8, 40(%1)\n\t" \
5063  "ld $9, 48(%1)\n\t" \
5064  "ld $10, 56(%1)\n\t" \
5065  "ld $11, 64(%1)\n\t" \
5066  "ld $25, 0(%1)\n\t" /* target->t9 */ \
5067  VALGRIND_CALL_NOREDIR_T9 \
5068  "daddu $29, $29, 24\n\t" \
5069  "move %0, $2\n" \
5070  : /*out*/ "=r" (_res) \
5071  : /*in*/ "r" (&_argvec[0]) \
5072  : /*trash*/ "memory", __CALLER_SAVED_REGS \
5073  ); \
5074  lval = (__typeof__(lval)) _res; \
5075  } while (0)
5076 
5077 #define CALL_FN_W_12W(lval, orig, arg1,arg2,arg3,arg4,arg5, \
5078  arg6,arg7,arg8,arg9,arg10, \
5079  arg11,arg12) \
5080  do { \
5081  volatile OrigFn _orig = (orig); \
5082  volatile unsigned long _argvec[13]; \
5083  volatile unsigned long _res; \
5084  _argvec[0] = (unsigned long)_orig.nraddr; \
5085  _argvec[1] = (unsigned long)(arg1); \
5086  _argvec[2] = (unsigned long)(arg2); \
5087  _argvec[3] = (unsigned long)(arg3); \
5088  _argvec[4] = (unsigned long)(arg4); \
5089  _argvec[5] = (unsigned long)(arg5); \
5090  _argvec[6] = (unsigned long)(arg6); \
5091  _argvec[7] = (unsigned long)(arg7); \
5092  _argvec[8] = (unsigned long)(arg8); \
5093  _argvec[9] = (unsigned long)(arg9); \
5094  _argvec[10] = (unsigned long)(arg10); \
5095  _argvec[11] = (unsigned long)(arg11); \
5096  _argvec[12] = (unsigned long)(arg12); \
5097  __asm__ volatile( \
5098  "dsubu $29, $29, 32\n\t" \
5099  "ld $4, 72(%1)\n\t" \
5100  "sd $4, 0($29)\n\t" \
5101  "ld $4, 80(%1)\n\t" \
5102  "sd $4, 8($29)\n\t" \
5103  "ld $4, 88(%1)\n\t" \
5104  "sd $4, 16($29)\n\t" \
5105  "ld $4, 96(%1)\n\t" \
5106  "sd $4, 24($29)\n\t" \
5107  "ld $4, 8(%1)\n\t" \
5108  "ld $5, 16(%1)\n\t" \
5109  "ld $6, 24(%1)\n\t" \
5110  "ld $7, 32(%1)\n\t" \
5111  "ld $8, 40(%1)\n\t" \
5112  "ld $9, 48(%1)\n\t" \
5113  "ld $10, 56(%1)\n\t" \
5114  "ld $11, 64(%1)\n\t" \
5115  "ld $25, 0(%1)\n\t" /* target->t9 */ \
5116  VALGRIND_CALL_NOREDIR_T9 \
5117  "daddu $29, $29, 32\n\t" \
5118  "move %0, $2\n" \
5119  : /*out*/ "=r" (_res) \
5120  : /*in*/ "r" (&_argvec[0]) \
5121  : /*trash*/ "memory", __CALLER_SAVED_REGS \
5122  ); \
5123  lval = (__typeof__(lval)) _res; \
5124  } while (0)
5125 
5126 #endif /* PLAT_mips64_linux */
5127 
5128 
5129 /* ------------------------------------------------------------------ */
5130 /* ARCHITECTURE INDEPENDENT MACROS for CLIENT REQUESTS. */
5131 /* */
5132 /* ------------------------------------------------------------------ */
5133 
5134 /* Some request codes. There are many more of these, but most are not
5135  exposed to end-user view. These are the public ones, all of the
5136  form 0x1000 + small_number.
5137 
5138  Core ones are in the range 0x00000000--0x0000ffff. The non-public
5139  ones start at 0x2000.
5140 */
5141 
5142 /* These macros are used by tools -- they must be public, but don't
5143  embed them into other programs. */
5144 #define VG_USERREQ_TOOL_BASE(a,b) \
5145  ((unsigned int)(((a)&0xff) << 24 | ((b)&0xff) << 16))
5146 #define VG_IS_TOOL_USERREQ(a, b, v) \
5147  (VG_USERREQ_TOOL_BASE(a,b) == ((v) & 0xffff0000))
5148 
5149 /* !! ABIWARNING !! ABIWARNING !! ABIWARNING !! ABIWARNING !!
5150  This enum comprises an ABI exported by Valgrind to programs
5151  which use client requests. DO NOT CHANGE THE ORDER OF THESE
5152  ENTRIES, NOR DELETE ANY -- add new ones at the end. */
5153 typedef
5156 
5157  /* These allow any function to be called from the simulated
5158  CPU but run on the real CPU. Nb: the first arg passed to
5159  the function is always the ThreadId of the running
5160  thread! So CLIENT_CALL0 actually requires a 1 arg
5161  function, etc. */
5166 
5167  /* Can be useful in regression testing suites -- eg. can
5168  send Valgrind's output to /dev/null and still count
5169  errors. */
5171 
5172  /* Allows the client program and/or gdbserver to execute a monitor
5173  command. */
5175 
5176  /* These are useful and can be interpreted by any tool that
5177  tracks malloc() et al, by using vg_replace_malloc.c. */
5181  /* Memory pool support. */
5190 
5191  /* Allow printfs to valgrind log. */
5192  /* The first two pass the va_list argument by value, which
5193  assumes it is the same size as or smaller than a UWord,
5194  which generally isn't the case. Hence are deprecated.
5195  The second two pass the vargs by reference and so are
5196  immune to this problem. */
5197  /* both :: char* fmt, va_list vargs (DEPRECATED) */
5200  /* both :: char* fmt, va_list* vargs */
5203 
5204  /* Stack support. */
5208 
5209  /* Wine support */
5211 
5212  /* Querying of debug info. */
5214 
5215  /* Disable/enable error reporting level. Takes a single
5216  Word arg which is the delta to this thread's error
5217  disablement indicator. Hence 1 disables or further
5218  disables errors, and -1 moves back towards enablement.
5219  Other values are not allowed. */
5221 
5222  /* Initialise IR injection */
5224  } Vg_ClientRequest;
5225 
5226 #if !defined(__GNUC__)
5227 # define __extension__ /* */
5228 #endif
5229 
5230 
5231 /* Returns the number of Valgrinds this code is running under. That
5232  is, 0 if running natively, 1 if running under Valgrind, 2 if
5233  running under Valgrind which is running under another Valgrind,
5234  etc. */
5235 #define RUNNING_ON_VALGRIND \
5236  (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* if not */, \
5237  VG_USERREQ__RUNNING_ON_VALGRIND, \
5238  0, 0, 0, 0, 0) \
5239 
5240 
5241 /* Discard translation of code in the range [_qzz_addr .. _qzz_addr +
5242  _qzz_len - 1]. Useful if you are debugging a JITter or some such,
5243  since it provides a way to make sure valgrind will retranslate the
5244  invalidated area. Returns no value. */
5245 #define VALGRIND_DISCARD_TRANSLATIONS(_qzz_addr,_qzz_len) \
5246  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DISCARD_TRANSLATIONS, \
5247  _qzz_addr, _qzz_len, 0, 0, 0)
5248 
5249 
5250 /* These requests are for getting Valgrind itself to print something.
5251  Possibly with a backtrace. This is a really ugly hack. The return value
5252  is the number of characters printed, excluding the "**<pid>** " part at the
5253  start and the backtrace (if present). */
5254 
5255 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
5256 /* Modern GCC will optimize the static routine out if unused,
5257  and unused attribute will shut down warnings about it. */
5258 static int VALGRIND_PRINTF(const char *format, ...)
5259  __attribute__((format(__printf__, 1, 2), __unused__));
5260 #endif
5261 static int
5262 #if defined(_MSC_VER)
5263 __inline
5264 #endif
5265 VALGRIND_PRINTF(const char *format, ...)
5266 {
5267 #if defined(NVALGRIND)
5268  return 0;
5269 #else /* NVALGRIND */
5270 #if defined(_MSC_VER) || defined(__MINGW64__)
5271  uintptr_t _qzz_res;
5272 #else
5273  unsigned long _qzz_res;
5274 #endif
5275  va_list vargs;
5276  va_start(vargs, format);
5277 #if defined(_MSC_VER) || defined(__MINGW64__)
5278  _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5280  (uintptr_t)format,
5281  (uintptr_t)&vargs,
5282  0, 0, 0);
5283 #else
5284  _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5286  (unsigned long)format,
5287  (unsigned long)&vargs,
5288  0, 0, 0);
5289 #endif
5290  va_end(vargs);
5291  return (int)_qzz_res;
5292 #endif /* NVALGRIND */
5293 }
5294 
5295 #if defined(__GNUC__) || defined(__INTEL_COMPILER) && !defined(_MSC_VER)
5296 static int VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
5297  __attribute__((format(__printf__, 1, 2), __unused__));
5298 #endif
5299 static int
5300 #if defined(_MSC_VER)
5301 __inline
5302 #endif
5303 VALGRIND_PRINTF_BACKTRACE(const char *format, ...)
5304 {
5305 #if defined(NVALGRIND)
5306  return 0;
5307 #else /* NVALGRIND */
5308 #if defined(_MSC_VER) || defined(__MINGW64__)
5309  uintptr_t _qzz_res;
5310 #else
5311  unsigned long _qzz_res;
5312 #endif
5313  va_list vargs;
5314  va_start(vargs, format);
5315 #if defined(_MSC_VER) || defined(__MINGW64__)
5316  _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5318  (uintptr_t)format,
5319  (uintptr_t)&vargs,
5320  0, 0, 0);
5321 #else
5322  _qzz_res = VALGRIND_DO_CLIENT_REQUEST_EXPR(0,
5324  (unsigned long)format,
5325  (unsigned long)&vargs,
5326  0, 0, 0);
5327 #endif
5328  va_end(vargs);
5329  return (int)_qzz_res;
5330 #endif /* NVALGRIND */
5331 }
5332 
5333 
5334 /* These requests allow control to move from the simulated CPU to the
5335  real CPU, calling an arbitary function.
5336 
5337  Note that the current ThreadId is inserted as the first argument.
5338  So this call:
5339 
5340  VALGRIND_NON_SIMD_CALL2(f, arg1, arg2)
5341 
5342  requires f to have this signature:
5343 
5344  Word f(Word tid, Word arg1, Word arg2)
5345 
5346  where "Word" is a word-sized type.
5347 
5348  Note that these client requests are not entirely reliable. For example,
5349  if you call a function with them that subsequently calls printf(),
5350  there's a high chance Valgrind will crash. Generally, your prospects of
5351  these working are made higher if the called function does not refer to
5352  any global variables, and does not refer to any libc or other functions
5353  (printf et al). Any kind of entanglement with libc or dynamic linking is
5354  likely to have a bad outcome, for tricky reasons which we've grappled
5355  with a lot in the past.
5356 */
5357 #define VALGRIND_NON_SIMD_CALL0(_qyy_fn) \
5358  VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5359  VG_USERREQ__CLIENT_CALL0, \
5360  _qyy_fn, \
5361  0, 0, 0, 0)
5362 
5363 #define VALGRIND_NON_SIMD_CALL1(_qyy_fn, _qyy_arg1) \
5364  VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5365  VG_USERREQ__CLIENT_CALL1, \
5366  _qyy_fn, \
5367  _qyy_arg1, 0, 0, 0)
5368 
5369 #define VALGRIND_NON_SIMD_CALL2(_qyy_fn, _qyy_arg1, _qyy_arg2) \
5370  VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5371  VG_USERREQ__CLIENT_CALL2, \
5372  _qyy_fn, \
5373  _qyy_arg1, _qyy_arg2, 0, 0)
5374 
5375 #define VALGRIND_NON_SIMD_CALL3(_qyy_fn, _qyy_arg1, _qyy_arg2, _qyy_arg3) \
5376  VALGRIND_DO_CLIENT_REQUEST_EXPR(0 /* default return */, \
5377  VG_USERREQ__CLIENT_CALL3, \
5378  _qyy_fn, \
5379  _qyy_arg1, _qyy_arg2, \
5380  _qyy_arg3, 0)
5381 
5382 
5383 /* Counts the number of errors that have been recorded by a tool. Nb:
5384  the tool must record the errors with VG_(maybe_record_error)() or
5385  VG_(unique_error)() for them to be counted. */
5386 #define VALGRIND_COUNT_ERRORS \
5387  (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR( \
5388  0 /* default return */, \
5389  VG_USERREQ__COUNT_ERRORS, \
5390  0, 0, 0, 0, 0)
5391 
5392 /* Several Valgrind tools (Memcheck, Massif, Helgrind, DRD) rely on knowing
5393  when heap blocks are allocated in order to give accurate results. This
5394  happens automatically for the standard allocator functions such as
5395  malloc(), calloc(), realloc(), memalign(), new, new[], free(), delete,
5396  delete[], etc.
5397 
5398  But if your program uses a custom allocator, this doesn't automatically
5399  happen, and Valgrind will not do as well. For example, if you allocate
5400  superblocks with mmap() and then allocates chunks of the superblocks, all
5401  Valgrind's observations will be at the mmap() level and it won't know that
5402  the chunks should be considered separate entities. In Memcheck's case,
5403  that means you probably won't get heap block overrun detection (because
5404  there won't be redzones marked as unaddressable) and you definitely won't
5405  get any leak detection.
5406 
5407  The following client requests allow a custom allocator to be annotated so
5408  that it can be handled accurately by Valgrind.
5409 
5410  VALGRIND_MALLOCLIKE_BLOCK marks a region of memory as having been allocated
5411  by a malloc()-like function. For Memcheck (an illustrative case), this
5412  does two things:
5413 
5414  - It records that the block has been allocated. This means any addresses
5415  within the block mentioned in error messages will be
5416  identified as belonging to the block. It also means that if the block
5417  isn't freed it will be detected by the leak checker.
5418 
5419  - It marks the block as being addressable and undefined (if 'is_zeroed' is
5420  not set), or addressable and defined (if 'is_zeroed' is set). This
5421  controls how accesses to the block by the program are handled.
5422 
5423  'addr' is the start of the usable block (ie. after any
5424  redzone), 'sizeB' is its size. 'rzB' is the redzone size if the allocator
5425  can apply redzones -- these are blocks of padding at the start and end of
5426  each block. Adding redzones is recommended as it makes it much more likely
5427  Valgrind will spot block overruns. `is_zeroed' indicates if the memory is
5428  zeroed (or filled with another predictable value), as is the case for
5429  calloc().
5430 
5431  VALGRIND_MALLOCLIKE_BLOCK should be put immediately after the point where a
5432  heap block -- that will be used by the client program -- is allocated.
5433  It's best to put it at the outermost level of the allocator if possible;
5434  for example, if you have a function my_alloc() which calls
5435  internal_alloc(), and the client request is put inside internal_alloc(),
5436  stack traces relating to the heap block will contain entries for both
5437  my_alloc() and internal_alloc(), which is probably not what you want.
5438 
5439  For Memcheck users: if you use VALGRIND_MALLOCLIKE_BLOCK to carve out
5440  custom blocks from within a heap block, B, that has been allocated with
5441  malloc/calloc/new/etc, then block B will be *ignored* during leak-checking
5442  -- the custom blocks will take precedence.
5443 
5444  VALGRIND_FREELIKE_BLOCK is the partner to VALGRIND_MALLOCLIKE_BLOCK. For
5445  Memcheck, it does two things:
5446 
5447  - It records that the block has been deallocated. This assumes that the
5448  block was annotated as having been allocated via
5449  VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
5450 
5451  - It marks the block as being unaddressable.
5452 
5453  VALGRIND_FREELIKE_BLOCK should be put immediately after the point where a
5454  heap block is deallocated.
5455 
5456  VALGRIND_RESIZEINPLACE_BLOCK informs a tool about reallocation. For
5457  Memcheck, it does four things:
5458 
5459  - It records that the size of a block has been changed. This assumes that
5460  the block was annotated as having been allocated via
5461  VALGRIND_MALLOCLIKE_BLOCK. Otherwise, an error will be issued.
5462 
5463  - If the block shrunk, it marks the freed memory as being unaddressable.
5464 
5465  - If the block grew, it marks the new area as undefined and defines a red
5466  zone past the end of the new block.
5467 
5468  - The V-bits of the overlap between the old and the new block are preserved.
5469 
5470  VALGRIND_RESIZEINPLACE_BLOCK should be put after allocation of the new block
5471  and before deallocation of the old block.
5472 
5473  In many cases, these three client requests will not be enough to get your
5474  allocator working well with Memcheck. More specifically, if your allocator
5475  writes to freed blocks in any way then a VALGRIND_MAKE_MEM_UNDEFINED call
5476  will be necessary to mark the memory as addressable just before the zeroing
5477  occurs, otherwise you'll get a lot of invalid write errors. For example,
5478  you'll need to do this if your allocator recycles freed blocks, but it
5479  zeroes them before handing them back out (via VALGRIND_MALLOCLIKE_BLOCK).
5480  Alternatively, if your allocator reuses freed blocks for allocator-internal
5481  data structures, VALGRIND_MAKE_MEM_UNDEFINED calls will also be necessary.
5482 
5483  Really, what's happening is a blurring of the lines between the client
5484  program and the allocator... after VALGRIND_FREELIKE_BLOCK is called, the
5485  memory should be considered unaddressable to the client program, but the
5486  allocator knows more than the rest of the client program and so may be able
5487  to safely access it. Extra client requests are necessary for Valgrind to
5488  understand the distinction between the allocator and the rest of the
5489  program.
5490 
5491  Ignored if addr == 0.
5492 */
5493 #define VALGRIND_MALLOCLIKE_BLOCK(addr, sizeB, rzB, is_zeroed) \
5494  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MALLOCLIKE_BLOCK, \
5495  addr, sizeB, rzB, is_zeroed, 0)
5496 
5497 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
5498  Ignored if addr == 0.
5499 */
5500 #define VALGRIND_RESIZEINPLACE_BLOCK(addr, oldSizeB, newSizeB, rzB) \
5501  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__RESIZEINPLACE_BLOCK, \
5502  addr, oldSizeB, newSizeB, rzB, 0)
5503 
5504 /* See the comment for VALGRIND_MALLOCLIKE_BLOCK for details.
5505  Ignored if addr == 0.
5506 */
5507 #define VALGRIND_FREELIKE_BLOCK(addr, rzB) \
5508  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__FREELIKE_BLOCK, \
5509  addr, rzB, 0, 0, 0)
5510 
5511 /* Create a memory pool. */
5512 #define VALGRIND_CREATE_MEMPOOL(pool, rzB, is_zeroed) \
5513  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__CREATE_MEMPOOL, \
5514  pool, rzB, is_zeroed, 0, 0)
5515 
5516 /* Destroy a memory pool. */
5517 #define VALGRIND_DESTROY_MEMPOOL(pool) \
5518  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__DESTROY_MEMPOOL, \
5519  pool, 0, 0, 0, 0)
5520 
5521 /* Associate a piece of memory with a memory pool. */
5522 #define VALGRIND_MEMPOOL_ALLOC(pool, addr, size) \
5523  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_ALLOC, \
5524  pool, addr, size, 0, 0)
5525 
5526 /* Disassociate a piece of memory from a memory pool. */
5527 #define VALGRIND_MEMPOOL_FREE(pool, addr) \
5528  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_FREE, \
5529  pool, addr, 0, 0, 0)
5530 
5531 /* Disassociate any pieces outside a particular range. */
5532 #define VALGRIND_MEMPOOL_TRIM(pool, addr, size) \
5533  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_TRIM, \
5534  pool, addr, size, 0, 0)
5535 
5536 /* Resize and/or move a piece associated with a memory pool. */
5537 #define VALGRIND_MOVE_MEMPOOL(poolA, poolB) \
5538  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MOVE_MEMPOOL, \
5539  poolA, poolB, 0, 0, 0)
5540 
5541 /* Resize and/or move a piece associated with a memory pool. */
5542 #define VALGRIND_MEMPOOL_CHANGE(pool, addrA, addrB, size) \
5543  VALGRIND_DO_CLIENT_REQUEST_STMT(VG_USERREQ__MEMPOOL_CHANGE, \
5544  pool, addrA, addrB, size, 0)
5545 
5546 /* Return 1 if a mempool exists, else 0. */
5547 #define VALGRIND_MEMPOOL_EXISTS(pool) \
5548  (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
5549  VG_USERREQ__MEMPOOL_EXISTS, \
5550  pool, 0, 0, 0, 0)
5551 
5552 /* Mark a piece of memory as being a stack. Returns a stack id. */
5553 #define VALGRIND_STACK_REGISTER(start, end) \
5554  (unsigned)VALGRIND_DO_CLIENT_REQUEST_EXPR(0, \
5555  VG_USERREQ__STACK_REGISTER, \
5556  start, end, 0, 0, 0)
5557 
5558 /* Unmark the piece of memory associated with a stack id as being a
5559