1 /* SPDX-License-Identifier: LGPL-2.1 OR MIT */ 2 /* 3 * LoongArch specific definitions for NOLIBC 4 * Copyright (C) 2023 Loongson Technology Corporation Limited 5 */ 6 7 #ifndef _NOLIBC_ARCH_LOONGARCH_H 8 #define _NOLIBC_ARCH_LOONGARCH_H 9 10 /* Syscalls for LoongArch : 11 * - stack is 16-byte aligned 12 * - syscall number is passed in a7 13 * - arguments are in a0, a1, a2, a3, a4, a5 14 * - the system call is performed by calling "syscall 0" 15 * - syscall return comes in a0 16 * - the arguments are cast to long and assigned into the target 17 * registers which are then simply passed as registers to the asm code, 18 * so that we don't have to experience issues with register constraints. 19 * 20 * On LoongArch, select() is not implemented so we have to use pselect6(). 21 */ 22 #define __ARCH_WANT_SYS_PSELECT6 23 24 #define my_syscall0(num) \ 25 ({ \ 26 register long _num __asm__ ("a7") = (num); \ 27 register long _arg1 __asm__ ("a0"); \ 28 \ 29 __asm__ volatile ( \ 30 "syscall 0\n" \ 31 : "=r"(_arg1) \ 32 : "r"(_num) \ 33 : "memory", "$t0", "$t1", "$t2", "$t3", \ 34 "$t4", "$t5", "$t6", "$t7", "$t8" \ 35 ); \ 36 _arg1; \ 37 }) 38 39 #define my_syscall1(num, arg1) \ 40 ({ \ 41 register long _num __asm__ ("a7") = (num); \ 42 register long _arg1 __asm__ ("a0") = (long)(arg1); \ 43 \ 44 __asm__ volatile ( \ 45 "syscall 0\n" \ 46 : "+r"(_arg1) \ 47 : "r"(_num) \ 48 : "memory", "$t0", "$t1", "$t2", "$t3", \ 49 "$t4", "$t5", "$t6", "$t7", "$t8" \ 50 ); \ 51 _arg1; \ 52 }) 53 54 #define my_syscall2(num, arg1, arg2) \ 55 ({ \ 56 register long _num __asm__ ("a7") = (num); \ 57 register long _arg1 __asm__ ("a0") = (long)(arg1); \ 58 register long _arg2 __asm__ ("a1") = (long)(arg2); \ 59 \ 60 __asm__ volatile ( \ 61 "syscall 0\n" \ 62 : "+r"(_arg1) \ 63 : "r"(_arg2), \ 64 "r"(_num) \ 65 : "memory", "$t0", "$t1", "$t2", "$t3", \ 66 "$t4", "$t5", "$t6", "$t7", "$t8" \ 67 ); \ 68 _arg1; \ 69 }) 70 71 #define my_syscall3(num, arg1, arg2, arg3) \ 72 ({ \ 73 register long _num __asm__ ("a7") = (num); \ 74 register long _arg1 __asm__ ("a0") = (long)(arg1); \ 75 register long _arg2 __asm__ ("a1") = (long)(arg2); \ 76 register long _arg3 __asm__ ("a2") = (long)(arg3); \ 77 \ 78 __asm__ volatile ( \ 79 "syscall 0\n" \ 80 : "+r"(_arg1) \ 81 : "r"(_arg2), "r"(_arg3), \ 82 "r"(_num) \ 83 : "memory", "$t0", "$t1", "$t2", "$t3", \ 84 "$t4", "$t5", "$t6", "$t7", "$t8" \ 85 ); \ 86 _arg1; \ 87 }) 88 89 #define my_syscall4(num, arg1, arg2, arg3, arg4) \ 90 ({ \ 91 register long _num __asm__ ("a7") = (num); \ 92 register long _arg1 __asm__ ("a0") = (long)(arg1); \ 93 register long _arg2 __asm__ ("a1") = (long)(arg2); \ 94 register long _arg3 __asm__ ("a2") = (long)(arg3); \ 95 register long _arg4 __asm__ ("a3") = (long)(arg4); \ 96 \ 97 __asm__ volatile ( \ 98 "syscall 0\n" \ 99 : "+r"(_arg1) \ 100 : "r"(_arg2), "r"(_arg3), "r"(_arg4), \ 101 "r"(_num) \ 102 : "memory", "$t0", "$t1", "$t2", "$t3", \ 103 "$t4", "$t5", "$t6", "$t7", "$t8" \ 104 ); \ 105 _arg1; \ 106 }) 107 108 #define my_syscall5(num, arg1, arg2, arg3, arg4, arg5) \ 109 ({ \ 110 register long _num __asm__ ("a7") = (num); \ 111 register long _arg1 __asm__ ("a0") = (long)(arg1); \ 112 register long _arg2 __asm__ ("a1") = (long)(arg2); \ 113 register long _arg3 __asm__ ("a2") = (long)(arg3); \ 114 register long _arg4 __asm__ ("a3") = (long)(arg4); \ 115 register long _arg5 __asm__ ("a4") = (long)(arg5); \ 116 \ 117 __asm__ volatile ( \ 118 "syscall 0\n" \ 119 : "+r"(_arg1) \ 120 : "r"(_arg2), "r"(_arg3), "r"(_arg4), "r"(_arg5), \ 121 "r"(_num) \ 122 : "memory", "$t0", "$t1", "$t2", "$t3", \ 123 "$t4", "$t5", "$t6", "$t7", "$t8" \ 124 ); \ 125 _arg1; \ 126 }) 127 128 #define my_syscall6(num, arg1, arg2, arg3, arg4, arg5, arg6) \ 129 ({ \ 130 register long _num __asm__ ("a7") = (num); \ 131 register long _arg1 __asm__ ("a0") = (long)(arg1); \ 132 register long _arg2 __asm__ ("a1") = (long)(arg2); \ 133 register long _arg3 __asm__ ("a2") = (long)(arg3); \ 134 register long _arg4 __asm__ ("a3") = (long)(arg4); \ 135 register long _arg5 __asm__ ("a4") = (long)(arg5); \ 136 register long _arg6 __asm__ ("a5") = (long)(arg6); \ 137 \ 138 __asm__ volatile ( \ 139 "syscall 0\n" \ 140 : "+r"(_arg1) \ 141 : "r"(_arg2), "r"(_arg3), "r"(_arg4), "r"(_arg5), "r"(_arg6), \ 142 "r"(_num) \ 143 : "memory", "$t0", "$t1", "$t2", "$t3", \ 144 "$t4", "$t5", "$t6", "$t7", "$t8" \ 145 ); \ 146 _arg1; \ 147 }) 148 149 char **environ __attribute__((weak)); 150 const unsigned long *_auxv __attribute__((weak)); 151 152 #if __loongarch_grlen == 32 153 #define LONGLOG "2" 154 #define SZREG "4" 155 #define REG_L "ld.w" 156 #define LONG_S "st.w" 157 #define LONG_ADD "add.w" 158 #define LONG_ADDI "addi.w" 159 #define LONG_SLL "slli.w" 160 #define LONG_BSTRINS "bstrins.w" 161 #else // __loongarch_grlen == 64 162 #define LONGLOG "3" 163 #define SZREG "8" 164 #define REG_L "ld.d" 165 #define LONG_S "st.d" 166 #define LONG_ADD "add.d" 167 #define LONG_ADDI "addi.d" 168 #define LONG_SLL "slli.d" 169 #define LONG_BSTRINS "bstrins.d" 170 #endif 171 172 /* startup code */ 173 void __attribute__((weak,noreturn,optimize("omit-frame-pointer"))) _start(void) 174 { 175 __asm__ volatile ( 176 REG_L " $a0, $sp, 0\n" // argc (a0) was in the stack 177 LONG_ADDI " $a1, $sp, "SZREG"\n" // argv (a1) = sp + SZREG 178 LONG_SLL " $a2, $a0, "LONGLOG"\n" // envp (a2) = SZREG*argc ... 179 LONG_ADDI " $a2, $a2, "SZREG"\n" // + SZREG (skip null) 180 LONG_ADD " $a2, $a2, $a1\n" // + argv 181 182 "move $a3, $a2\n" // iterate a3 over envp to find auxv (after NULL) 183 "0:\n" // do { 184 REG_L " $a4, $a3, 0\n" // a4 = *a3; 185 LONG_ADDI " $a3, $a3, "SZREG"\n" // a3 += sizeof(void*); 186 "bne $a4, $zero, 0b\n" // } while (a4); 187 "la.pcrel $a4, _auxv\n" // a4 = &_auxv 188 LONG_S " $a3, $a4, 0\n" // store a3 into _auxv 189 190 "la.pcrel $a3, environ\n" // a3 = &environ 191 LONG_S " $a2, $a3, 0\n" // store envp(a2) into environ 192 LONG_BSTRINS " $sp, $zero, 3, 0\n" // sp must be 16-byte aligned 193 "bl main\n" // main() returns the status code, we'll exit with it. 194 "li.w $a7, 93\n" // NR_exit == 93 195 "syscall 0\n" 196 ); 197 __builtin_unreachable(); 198 } 199 200 #endif // _NOLIBC_ARCH_LOONGARCH_H 201