1 /*===--------------- x86gprintrin.h - X86 GPR intrinsics ------------------=== 2 * 3 * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 * See https://llvm.org/LICENSE.txt for license information. 5 * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 * 7 *===-----------------------------------------------------------------------=== 8 */ 9 10 #ifndef __X86GPRINTRIN_H 11 #define __X86GPRINTRIN_H 12 13 #if !(defined(_MSC_VER) || defined(__SCE__)) || __has_feature(modules) || \ 14 defined(__HRESET__) 15 #include <hresetintrin.h> 16 #endif 17 18 #if !(defined(_MSC_VER) || defined(__SCE__)) || __has_feature(modules) || \ 19 defined(__UINTR__) 20 #include <uintrintrin.h> 21 #endif 22 23 #if !(defined(_MSC_VER) || defined(__SCE__)) || __has_feature(modules) || \ 24 defined(__CRC32__) 25 #include <crc32intrin.h> 26 #endif 27 28 #if defined(__i386__) 29 #define __FULLBX "ebx" 30 #define __TMPGPR "eax" 31 #else 32 // When in 64-bit target, the 32-bit operands generate a 32-bit result, 33 // zero-extended to a 64-bit result in the destination general-purpose, 34 // It means "mov x %ebx" will clobber the higher 32 bits of rbx, so we 35 // should preserve the 64-bit register rbx. 36 #define __FULLBX "rbx" 37 #define __TMPGPR "rax" 38 #endif 39 40 #define __MOVEGPR(__r1, __r2) "mov {%%"__r1 ", %%"__r2 "|"__r2 ", "__r1"};" 41 42 #define __SAVE_GPRBX __MOVEGPR(__FULLBX, __TMPGPR) 43 #define __RESTORE_GPRBX __MOVEGPR(__TMPGPR, __FULLBX) 44 45 #define __SSC_MARK(__Tag) \ 46 __asm__ __volatile__( __SAVE_GPRBX \ 47 "mov {%0, %%ebx|ebx, %0}; " \ 48 ".byte 0x64, 0x67, 0x90; " \ 49 __RESTORE_GPRBX \ 50 ::"i"(__Tag) \ 51 : __TMPGPR ); 52 53 #endif /* __X86GPRINTRIN_H */ 54