1e8d8bef9SDimitry Andric /*===--------------- x86gprintrin.h - X86 GPR intrinsics ------------------=== 2e8d8bef9SDimitry Andric * 3e8d8bef9SDimitry Andric * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4e8d8bef9SDimitry Andric * See https://llvm.org/LICENSE.txt for license information. 5e8d8bef9SDimitry Andric * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6e8d8bef9SDimitry Andric * 7e8d8bef9SDimitry Andric *===-----------------------------------------------------------------------=== 8e8d8bef9SDimitry Andric */ 9e8d8bef9SDimitry Andric 10e8d8bef9SDimitry Andric #ifndef __X86GPRINTRIN_H 11e8d8bef9SDimitry Andric #define __X86GPRINTRIN_H 12e8d8bef9SDimitry Andric 13e8d8bef9SDimitry Andric #if !(defined(_MSC_VER) || defined(__SCE__)) || __has_feature(modules) || \ 14e8d8bef9SDimitry Andric defined(__HRESET__) 15e8d8bef9SDimitry Andric #include <hresetintrin.h> 16e8d8bef9SDimitry Andric #endif 17e8d8bef9SDimitry Andric 18e8d8bef9SDimitry Andric #if !(defined(_MSC_VER) || defined(__SCE__)) || __has_feature(modules) || \ 19e8d8bef9SDimitry Andric defined(__UINTR__) 20e8d8bef9SDimitry Andric #include <uintrintrin.h> 21e8d8bef9SDimitry Andric #endif 22e8d8bef9SDimitry Andric 23349cc55cSDimitry Andric #if !(defined(_MSC_VER) || defined(__SCE__)) || __has_feature(modules) || \ 24349cc55cSDimitry Andric defined(__CRC32__) 25349cc55cSDimitry Andric #include <crc32intrin.h> 26349cc55cSDimitry Andric #endif 27349cc55cSDimitry Andric 28*fcaf7f86SDimitry Andric #if defined(__i386__) 29*fcaf7f86SDimitry Andric #define __FULLBX "ebx" 30*fcaf7f86SDimitry Andric #define __TMPGPR "eax" 31*fcaf7f86SDimitry Andric #else 32*fcaf7f86SDimitry Andric // When in 64-bit target, the 32-bit operands generate a 32-bit result, 33*fcaf7f86SDimitry Andric // zero-extended to a 64-bit result in the destination general-purpose, 34*fcaf7f86SDimitry Andric // It means "mov x %ebx" will clobber the higher 32 bits of rbx, so we 35*fcaf7f86SDimitry Andric // should preserve the 64-bit register rbx. 36*fcaf7f86SDimitry Andric #define __FULLBX "rbx" 37*fcaf7f86SDimitry Andric #define __TMPGPR "rax" 38*fcaf7f86SDimitry Andric #endif 39*fcaf7f86SDimitry Andric 40*fcaf7f86SDimitry Andric #define __MOVEGPR(__r1, __r2) "mov {%%"__r1 ", %%"__r2 "|"__r2 ", "__r1"};" 41*fcaf7f86SDimitry Andric 42*fcaf7f86SDimitry Andric #define __SAVE_GPRBX __MOVEGPR(__FULLBX, __TMPGPR) 43*fcaf7f86SDimitry Andric #define __RESTORE_GPRBX __MOVEGPR(__TMPGPR, __FULLBX) 44*fcaf7f86SDimitry Andric 45*fcaf7f86SDimitry Andric #define __SSC_MARK(__Tag) \ 46*fcaf7f86SDimitry Andric __asm__ __volatile__( __SAVE_GPRBX \ 47349cc55cSDimitry Andric "mov {%0, %%ebx|ebx, %0}; " \ 48349cc55cSDimitry Andric ".byte 0x64, 0x67, 0x90; " \ 49*fcaf7f86SDimitry Andric __RESTORE_GPRBX \ 50*fcaf7f86SDimitry Andric ::"i"(__Tag) \ 51*fcaf7f86SDimitry Andric : __TMPGPR ); 52349cc55cSDimitry Andric 53e8d8bef9SDimitry Andric #endif /* __X86GPRINTRIN_H */ 54