xref: /linux/tools/testing/selftests/vDSO/vdso_call.h (revision 3a39d672e7f48b8d6b91a09afa4b55352773b4b5)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * Macro to call vDSO functions
4  *
5  * Copyright (C) 2024 Christophe Leroy <christophe.leroy@csgroup.eu>, CS GROUP France
6  */
7 #ifndef __VDSO_CALL_H__
8 #define __VDSO_CALL_H__
9 
10 #ifdef __powerpc__
11 
12 #define LOADARGS_1(fn, __arg1) do {					\
13 	_r0 = fn;							\
14 	_r3 = (long)__arg1;						\
15 } while (0)
16 
17 #define LOADARGS_2(fn, __arg1, __arg2) do {				\
18 	_r0 = fn;							\
19 	_r3 = (long)__arg1;						\
20 	_r4 = (long)__arg2;						\
21 } while (0)
22 
23 #define LOADARGS_3(fn, __arg1, __arg2, __arg3) do {			\
24 	_r0 = fn;							\
25 	_r3 = (long)__arg1;						\
26 	_r4 = (long)__arg2;						\
27 	_r5 = (long)__arg3;						\
28 } while (0)
29 
30 #define LOADARGS_5(fn, __arg1, __arg2, __arg3, __arg4, __arg5) do {	\
31 	_r0 = fn;							\
32 	_r3 = (long)__arg1;						\
33 	_r4 = (long)__arg2;						\
34 	_r5 = (long)__arg3;						\
35 	_r6 = (long)__arg4;						\
36 	_r7 = (long)__arg5;						\
37 } while (0)
38 
39 #define VDSO_CALL(fn, nr, args...) ({					\
40 	register void *_r0 asm ("r0");					\
41 	register long _r3 asm ("r3");					\
42 	register long _r4 asm ("r4");					\
43 	register long _r5 asm ("r5");					\
44 	register long _r6 asm ("r6");					\
45 	register long _r7 asm ("r7");					\
46 	register long _r8 asm ("r8");					\
47 	register long _rval asm ("r3");					\
48 									\
49 	LOADARGS_##nr(fn, args);					\
50 									\
51 	asm volatile(							\
52 		"	mtctr %0\n"					\
53 		"	bctrl\n"					\
54 		"	bns+	1f\n"					\
55 		"	neg	3, 3\n"					\
56 		"1:"							\
57 		: "+r" (_r0), "=r" (_r3), "+r" (_r4), "+r" (_r5),	\
58 		  "+r" (_r6), "+r" (_r7), "+r" (_r8)			\
59 		: "r" (_rval)						\
60 		: "r9", "r10", "r11", "r12", "cr0", "cr1", "cr5",	\
61 		  "cr6", "cr7", "xer", "lr", "ctr", "memory"		\
62 	);								\
63 	_rval;								\
64 })
65 
66 #else
67 #define VDSO_CALL(fn, nr, args...)	fn(args)
68 #endif
69 
70 #endif
71