1 /* 2 * Copyright (C) 2004 Jeff Dike (jdike@addtoit.com) 3 * Licensed under the GPL 4 */ 5 6 #ifndef __SYSDEP_STUB_H 7 #define __SYSDEP_STUB_H 8 9 #include <sysdep/ptrace_user.h> 10 #include <generated/asm-offsets.h> 11 #include <linux/stddef.h> 12 13 #define STUB_MMAP_NR __NR_mmap 14 #define MMAP_OFFSET(o) (o) 15 16 #define __syscall_clobber "r11","rcx","memory" 17 #define __syscall "syscall" 18 19 static inline long stub_syscall0(long syscall) 20 { 21 long ret; 22 23 __asm__ volatile (__syscall 24 : "=a" (ret) 25 : "0" (syscall) : __syscall_clobber ); 26 27 return ret; 28 } 29 30 static inline long stub_syscall2(long syscall, long arg1, long arg2) 31 { 32 long ret; 33 34 __asm__ volatile (__syscall 35 : "=a" (ret) 36 : "0" (syscall), "D" (arg1), "S" (arg2) : __syscall_clobber ); 37 38 return ret; 39 } 40 41 static inline long stub_syscall3(long syscall, long arg1, long arg2, long arg3) 42 { 43 long ret; 44 45 __asm__ volatile (__syscall 46 : "=a" (ret) 47 : "0" (syscall), "D" (arg1), "S" (arg2), "d" (arg3) 48 : __syscall_clobber ); 49 50 return ret; 51 } 52 53 static inline long stub_syscall4(long syscall, long arg1, long arg2, long arg3, 54 long arg4) 55 { 56 long ret; 57 58 __asm__ volatile ("movq %5,%%r10 ; " __syscall 59 : "=a" (ret) 60 : "0" (syscall), "D" (arg1), "S" (arg2), "d" (arg3), 61 "g" (arg4) 62 : __syscall_clobber, "r10" ); 63 64 return ret; 65 } 66 67 static inline long stub_syscall5(long syscall, long arg1, long arg2, long arg3, 68 long arg4, long arg5) 69 { 70 long ret; 71 72 __asm__ volatile ("movq %5,%%r10 ; movq %6,%%r8 ; " __syscall 73 : "=a" (ret) 74 : "0" (syscall), "D" (arg1), "S" (arg2), "d" (arg3), 75 "g" (arg4), "g" (arg5) 76 : __syscall_clobber, "r10", "r8" ); 77 78 return ret; 79 } 80 81 static inline void trap_myself(void) 82 { 83 __asm("int3"); 84 } 85 86 static inline void remap_stack_and_trap(void) 87 { 88 __asm__ volatile ( 89 "movq %0,%%rax ;" 90 "movq %%rsp,%%rdi ;" 91 "andq %1,%%rdi ;" 92 "movq %2,%%r10 ;" 93 "movq %%rdi,%%r8 ; addq %3,%%r8 ; movq (%%r8),%%r8 ;" 94 "movq %%rdi,%%r9 ; addq %4,%%r9 ; movq (%%r9),%%r9 ;" 95 __syscall ";" 96 "movq %%rsp,%%rdi ; andq %1,%%rdi ;" 97 "addq %5,%%rdi ; movq %%rax, (%%rdi) ;" 98 "int3" 99 : : 100 "g" (STUB_MMAP_NR), 101 "g" (~(STUB_DATA_PAGES * UM_KERN_PAGE_SIZE - 1)), 102 "g" (MAP_FIXED | MAP_SHARED), 103 "g" (UML_STUB_FIELD_FD), 104 "g" (UML_STUB_FIELD_OFFSET), 105 "g" (UML_STUB_FIELD_CHILD_ERR), 106 "S" (STUB_DATA_PAGES * UM_KERN_PAGE_SIZE), 107 "d" (PROT_READ | PROT_WRITE) 108 : 109 __syscall_clobber, "r10", "r8", "r9"); 110 } 111 112 static __always_inline void *get_stub_data(void) 113 { 114 unsigned long ret; 115 116 asm volatile ( 117 "movq %%rsp,%0 ;" 118 "andq %1,%0" 119 : "=a" (ret) 120 : "g" (~(STUB_DATA_PAGES * UM_KERN_PAGE_SIZE - 1))); 121 122 return (void *)ret; 123 } 124 #endif 125