1 /* SPDX-License-Identifier: LGPL-2.1 OR MIT */ 2 /* 3 * LoongArch specific definitions for NOLIBC 4 * Copyright (C) 2023 Loongson Technology Corporation Limited 5 */ 6 7 #ifndef _NOLIBC_ARCH_LOONGARCH_H 8 #define _NOLIBC_ARCH_LOONGARCH_H 9 10 #include "compiler.h" 11 #include "crt.h" 12 13 /* Syscalls for LoongArch : 14 * - stack is 16-byte aligned 15 * - syscall number is passed in a7 16 * - arguments are in a0, a1, a2, a3, a4, a5 17 * - the system call is performed by calling "syscall 0" 18 * - syscall return comes in a0 19 * - the arguments are cast to long and assigned into the target 20 * registers which are then simply passed as registers to the asm code, 21 * so that we don't have to experience issues with register constraints. 22 */ 23 24 #define _NOLIBC_SYSCALL_CLOBBERLIST \ 25 "memory", "$t0", "$t1", "$t2", "$t3", "$t4", "$t5", "$t6", "$t7", "$t8" 26 27 #define my_syscall0(num) \ 28 ({ \ 29 register long _num __asm__ ("a7") = (num); \ 30 register long _arg1 __asm__ ("a0"); \ 31 \ 32 __asm__ volatile ( \ 33 "syscall 0\n" \ 34 : "=r"(_arg1) \ 35 : "r"(_num) \ 36 : _NOLIBC_SYSCALL_CLOBBERLIST \ 37 ); \ 38 _arg1; \ 39 }) 40 41 #define my_syscall1(num, arg1) \ 42 ({ \ 43 register long _num __asm__ ("a7") = (num); \ 44 register long _arg1 __asm__ ("a0") = (long)(arg1); \ 45 \ 46 __asm__ volatile ( \ 47 "syscall 0\n" \ 48 : "+r"(_arg1) \ 49 : "r"(_num) \ 50 : _NOLIBC_SYSCALL_CLOBBERLIST \ 51 ); \ 52 _arg1; \ 53 }) 54 55 #define my_syscall2(num, arg1, arg2) \ 56 ({ \ 57 register long _num __asm__ ("a7") = (num); \ 58 register long _arg1 __asm__ ("a0") = (long)(arg1); \ 59 register long _arg2 __asm__ ("a1") = (long)(arg2); \ 60 \ 61 __asm__ volatile ( \ 62 "syscall 0\n" \ 63 : "+r"(_arg1) \ 64 : "r"(_arg2), \ 65 "r"(_num) \ 66 : _NOLIBC_SYSCALL_CLOBBERLIST \ 67 ); \ 68 _arg1; \ 69 }) 70 71 #define my_syscall3(num, arg1, arg2, arg3) \ 72 ({ \ 73 register long _num __asm__ ("a7") = (num); \ 74 register long _arg1 __asm__ ("a0") = (long)(arg1); \ 75 register long _arg2 __asm__ ("a1") = (long)(arg2); \ 76 register long _arg3 __asm__ ("a2") = (long)(arg3); \ 77 \ 78 __asm__ volatile ( \ 79 "syscall 0\n" \ 80 : "+r"(_arg1) \ 81 : "r"(_arg2), "r"(_arg3), \ 82 "r"(_num) \ 83 : _NOLIBC_SYSCALL_CLOBBERLIST \ 84 ); \ 85 _arg1; \ 86 }) 87 88 #define my_syscall4(num, arg1, arg2, arg3, arg4) \ 89 ({ \ 90 register long _num __asm__ ("a7") = (num); \ 91 register long _arg1 __asm__ ("a0") = (long)(arg1); \ 92 register long _arg2 __asm__ ("a1") = (long)(arg2); \ 93 register long _arg3 __asm__ ("a2") = (long)(arg3); \ 94 register long _arg4 __asm__ ("a3") = (long)(arg4); \ 95 \ 96 __asm__ volatile ( \ 97 "syscall 0\n" \ 98 : "+r"(_arg1) \ 99 : "r"(_arg2), "r"(_arg3), "r"(_arg4), \ 100 "r"(_num) \ 101 : _NOLIBC_SYSCALL_CLOBBERLIST \ 102 ); \ 103 _arg1; \ 104 }) 105 106 #define my_syscall5(num, arg1, arg2, arg3, arg4, arg5) \ 107 ({ \ 108 register long _num __asm__ ("a7") = (num); \ 109 register long _arg1 __asm__ ("a0") = (long)(arg1); \ 110 register long _arg2 __asm__ ("a1") = (long)(arg2); \ 111 register long _arg3 __asm__ ("a2") = (long)(arg3); \ 112 register long _arg4 __asm__ ("a3") = (long)(arg4); \ 113 register long _arg5 __asm__ ("a4") = (long)(arg5); \ 114 \ 115 __asm__ volatile ( \ 116 "syscall 0\n" \ 117 : "+r"(_arg1) \ 118 : "r"(_arg2), "r"(_arg3), "r"(_arg4), "r"(_arg5), \ 119 "r"(_num) \ 120 : _NOLIBC_SYSCALL_CLOBBERLIST \ 121 ); \ 122 _arg1; \ 123 }) 124 125 #define my_syscall6(num, arg1, arg2, arg3, arg4, arg5, arg6) \ 126 ({ \ 127 register long _num __asm__ ("a7") = (num); \ 128 register long _arg1 __asm__ ("a0") = (long)(arg1); \ 129 register long _arg2 __asm__ ("a1") = (long)(arg2); \ 130 register long _arg3 __asm__ ("a2") = (long)(arg3); \ 131 register long _arg4 __asm__ ("a3") = (long)(arg4); \ 132 register long _arg5 __asm__ ("a4") = (long)(arg5); \ 133 register long _arg6 __asm__ ("a5") = (long)(arg6); \ 134 \ 135 __asm__ volatile ( \ 136 "syscall 0\n" \ 137 : "+r"(_arg1) \ 138 : "r"(_arg2), "r"(_arg3), "r"(_arg4), "r"(_arg5), "r"(_arg6), \ 139 "r"(_num) \ 140 : _NOLIBC_SYSCALL_CLOBBERLIST \ 141 ); \ 142 _arg1; \ 143 }) 144 145 #if __loongarch_grlen == 32 146 #define LONG_BSTRINS "bstrins.w" 147 #else /* __loongarch_grlen == 64 */ 148 #define LONG_BSTRINS "bstrins.d" 149 #endif 150 151 /* startup code */ 152 void __attribute__((weak, noreturn)) __nolibc_entrypoint __no_stack_protector _start(void) 153 { 154 __asm__ volatile ( 155 "move $a0, $sp\n" /* save stack pointer to $a0, as arg1 of _start_c */ 156 LONG_BSTRINS " $sp, $zero, 3, 0\n" /* $sp must be 16-byte aligned */ 157 "bl _start_c\n" /* transfer to c runtime */ 158 ); 159 __nolibc_entrypoint_epilogue(); 160 } 161 162 #endif /* _NOLIBC_ARCH_LOONGARCH_H */ 163