xref: /linux/arch/x86/um/shared/sysdep/stub_32.h (revision be239684b18e1cdcafcf8c7face4a2f562c745ad)
1 /*
2  * Copyright (C) 2004 Jeff Dike (jdike@addtoit.com)
3  * Licensed under the GPL
4  */
5 
6 #ifndef __SYSDEP_STUB_H
7 #define __SYSDEP_STUB_H
8 
9 #include <asm/ptrace.h>
10 #include <generated/asm-offsets.h>
11 
12 #define STUB_MMAP_NR __NR_mmap2
13 #define MMAP_OFFSET(o) ((o) >> UM_KERN_PAGE_SHIFT)
14 
15 static __always_inline long stub_syscall0(long syscall)
16 {
17 	long ret;
18 
19 	__asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall)
20 			: "memory");
21 
22 	return ret;
23 }
24 
25 static __always_inline long stub_syscall1(long syscall, long arg1)
26 {
27 	long ret;
28 
29 	__asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall), "b" (arg1)
30 			: "memory");
31 
32 	return ret;
33 }
34 
35 static __always_inline long stub_syscall2(long syscall, long arg1, long arg2)
36 {
37 	long ret;
38 
39 	__asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall), "b" (arg1),
40 			"c" (arg2)
41 			: "memory");
42 
43 	return ret;
44 }
45 
46 static __always_inline long stub_syscall3(long syscall, long arg1, long arg2,
47 					  long arg3)
48 {
49 	long ret;
50 
51 	__asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall), "b" (arg1),
52 			"c" (arg2), "d" (arg3)
53 			: "memory");
54 
55 	return ret;
56 }
57 
58 static __always_inline long stub_syscall4(long syscall, long arg1, long arg2,
59 					  long arg3, long arg4)
60 {
61 	long ret;
62 
63 	__asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall), "b" (arg1),
64 			"c" (arg2), "d" (arg3), "S" (arg4)
65 			: "memory");
66 
67 	return ret;
68 }
69 
70 static __always_inline long stub_syscall5(long syscall, long arg1, long arg2,
71 					  long arg3, long arg4, long arg5)
72 {
73 	long ret;
74 
75 	__asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall), "b" (arg1),
76 			"c" (arg2), "d" (arg3), "S" (arg4), "D" (arg5)
77 			: "memory");
78 
79 	return ret;
80 }
81 
82 static __always_inline void trap_myself(void)
83 {
84 	__asm("int3");
85 }
86 
87 static __always_inline void remap_stack_and_trap(void)
88 {
89 	__asm__ volatile (
90 		"movl %%esp,%%ebx ;"
91 		"andl %0,%%ebx ;"
92 		"movl %1,%%eax ;"
93 		"movl %%ebx,%%edi ; addl %2,%%edi ; movl (%%edi),%%edi ;"
94 		"movl %%ebx,%%ebp ; addl %3,%%ebp ; movl (%%ebp),%%ebp ;"
95 		"int $0x80 ;"
96 		"addl %4,%%ebx ; movl %%eax, (%%ebx) ;"
97 		"int $3"
98 		: :
99 		"g" (~(STUB_DATA_PAGES * UM_KERN_PAGE_SIZE - 1)),
100 		"g" (STUB_MMAP_NR),
101 		"g" (UML_STUB_FIELD_FD),
102 		"g" (UML_STUB_FIELD_OFFSET),
103 		"g" (UML_STUB_FIELD_CHILD_ERR),
104 		"c" (STUB_DATA_PAGES * UM_KERN_PAGE_SIZE),
105 		"d" (PROT_READ | PROT_WRITE),
106 		"S" (MAP_FIXED | MAP_SHARED)
107 		:
108 		"memory");
109 }
110 
111 static __always_inline void *get_stub_data(void)
112 {
113 	unsigned long ret;
114 
115 	asm volatile (
116 		"movl %%esp,%0 ;"
117 		"andl %1,%0"
118 		: "=a" (ret)
119 		: "g" (~(STUB_DATA_PAGES * UM_KERN_PAGE_SIZE - 1)));
120 
121 	return (void *)ret;
122 }
123 #endif
124