xref: /linux/tools/testing/selftests/vDSO/vdso_call.h (revision 4b81e2eb9e4db8f6094c077d0c8b27c264901c1b)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * Macro to call vDSO functions
4  *
5  * Copyright (C) 2024 Christophe Leroy <christophe.leroy@csgroup.eu>, CS GROUP France
6  */
7 #ifndef __VDSO_CALL_H__
8 #define __VDSO_CALL_H__
9 
10 #ifdef __powerpc__
11 
12 #define LOADARGS_1(fn, __arg1) do {					\
13 	_r0 = fn;							\
14 	_r3 = (long)__arg1;						\
15 } while (0)
16 
17 #define LOADARGS_2(fn, __arg1, __arg2) do {				\
18 	_r0 = fn;							\
19 	_r3 = (long)__arg1;						\
20 	_r4 = (long)__arg2;						\
21 } while (0)
22 
23 #define LOADARGS_3(fn, __arg1, __arg2, __arg3) do {			\
24 	_r0 = fn;							\
25 	_r3 = (long)__arg1;						\
26 	_r4 = (long)__arg2;						\
27 	_r5 = (long)__arg3;						\
28 } while (0)
29 
30 #define LOADARGS_5(fn, __arg1, __arg2, __arg3, __arg4, __arg5) do {	\
31 	_r0 = fn;							\
32 	_r3 = (long)__arg1;						\
33 	_r4 = (long)__arg2;						\
34 	_r5 = (long)__arg3;						\
35 	_r6 = (long)__arg4;						\
36 	_r7 = (long)__arg5;						\
37 } while (0)
38 
39 #define VDSO_CALL(fn, nr, args...) ({					\
40 	register void *_r0 asm ("r0");					\
41 	register long _r3 asm ("r3");					\
42 	register long _r4 asm ("r4");					\
43 	register long _r5 asm ("r5");					\
44 	register long _r6 asm ("r6");					\
45 	register long _r7 asm ("r7");					\
46 	register long _r8 asm ("r8");					\
47 									\
48 	LOADARGS_##nr(fn, args);					\
49 									\
50 	asm volatile(							\
51 		"	mtctr %0\n"					\
52 		"	bctrl\n"					\
53 		"	bns+	1f\n"					\
54 		"	neg	3, 3\n"					\
55 		"1:"							\
56 		: "+r" (_r0), "+r" (_r3), "+r" (_r4), "+r" (_r5),	\
57 		  "+r" (_r6), "+r" (_r7), "+r" (_r8)			\
58 		:							\
59 		: "r9", "r10", "r11", "r12", "cr0", "cr1", "cr5",	\
60 		  "cr6", "cr7", "xer", "lr", "ctr", "memory"		\
61 	);								\
62 	_r3;								\
63 })
64 
65 #else
66 #define VDSO_CALL(fn, nr, args...)	fn(args)
67 #endif
68 
69 #endif
70