1 /* SPDX-License-Identifier: GPL-2.0 */ 2 3 #ifndef _ASM_SPARC_VDSO_PROCESSOR_H 4 #define _ASM_SPARC_VDSO_PROCESSOR_H 5 6 #include <linux/compiler.h> 7 8 #if defined(__arch64__) 9 10 /* Please see the commentary in asm/backoff.h for a description of 11 * what these instructions are doing and how they have been chosen. 12 * To make a long story short, we are trying to yield the current cpu 13 * strand during busy loops. 14 */ 15 #ifdef BUILD_VDSO 16 #define cpu_relax() asm volatile("\n99:\n\t" \ 17 "rd %%ccr, %%g0\n\t" \ 18 "rd %%ccr, %%g0\n\t" \ 19 "rd %%ccr, %%g0\n\t" \ 20 ::: "memory") 21 #else /* ! BUILD_VDSO */ 22 #define cpu_relax() asm volatile("\n99:\n\t" \ 23 "rd %%ccr, %%g0\n\t" \ 24 "rd %%ccr, %%g0\n\t" \ 25 "rd %%ccr, %%g0\n\t" \ 26 ".section .pause_3insn_patch,\"ax\"\n\t"\ 27 ".word 99b\n\t" \ 28 "wr %%g0, 128, %%asr27\n\t" \ 29 "nop\n\t" \ 30 "nop\n\t" \ 31 ".previous" \ 32 ::: "memory") 33 #endif /* BUILD_VDSO */ 34 35 #else /* ! __arch64__ */ 36 37 #define cpu_relax() barrier() 38 39 #endif /* __arch64__ */ 40 41 #endif /* _ASM_SPARC_VDSO_PROCESSOR_H */ 42