1//===-- X86InstrInfo.td - Main X86 Instruction Definition --*- tablegen -*-===// 2// 3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4// See https://llvm.org/LICENSE.txt for license information. 5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6// 7//===----------------------------------------------------------------------===// 8// 9// This file describes the X86 instruction set, defining the instructions, and 10// properties of the instructions which are needed for code generation, machine 11// code emission, and analysis. 12// 13//===----------------------------------------------------------------------===// 14 15//===----------------------------------------------------------------------===// 16// X86 specific DAG Nodes. 17// 18 19def SDTX86CmpTest : SDTypeProfile<1, 2, [SDTCisVT<0, i32>, SDTCisInt<1>, 20 SDTCisSameAs<1, 2>]>; 21def SDTX86FCmp : SDTypeProfile<1, 2, [SDTCisVT<0, i32>, SDTCisFP<1>, 22 SDTCisSameAs<1, 2>]>; 23 24def SDTX86Cmov : SDTypeProfile<1, 4, 25 [SDTCisSameAs<0, 1>, SDTCisSameAs<1, 2>, 26 SDTCisVT<3, i8>, SDTCisVT<4, i32>]>; 27 28// Unary and binary operator instructions that set EFLAGS as a side-effect. 29def SDTUnaryArithWithFlags : SDTypeProfile<2, 1, 30 [SDTCisSameAs<0, 2>, 31 SDTCisInt<0>, SDTCisVT<1, i32>]>; 32 33def SDTBinaryArithWithFlags : SDTypeProfile<2, 2, 34 [SDTCisSameAs<0, 2>, 35 SDTCisSameAs<0, 3>, 36 SDTCisInt<0>, SDTCisVT<1, i32>]>; 37 38// SDTBinaryArithWithFlagsInOut - RES1, EFLAGS = op LHS, RHS, EFLAGS 39def SDTBinaryArithWithFlagsInOut : SDTypeProfile<2, 3, 40 [SDTCisSameAs<0, 2>, 41 SDTCisSameAs<0, 3>, 42 SDTCisInt<0>, 43 SDTCisVT<1, i32>, 44 SDTCisVT<4, i32>]>; 45// RES1, RES2, FLAGS = op LHS, RHS 46def SDT2ResultBinaryArithWithFlags : SDTypeProfile<3, 2, 47 [SDTCisSameAs<0, 1>, 48 SDTCisSameAs<0, 2>, 49 SDTCisSameAs<0, 3>, 50 SDTCisInt<0>, SDTCisVT<1, i32>]>; 51def SDTX86BrCond : SDTypeProfile<0, 3, 52 [SDTCisVT<0, OtherVT>, 53 SDTCisVT<1, i8>, SDTCisVT<2, i32>]>; 54 55def SDTX86SetCC : SDTypeProfile<1, 2, 56 [SDTCisVT<0, i8>, 57 SDTCisVT<1, i8>, SDTCisVT<2, i32>]>; 58def SDTX86SetCC_C : SDTypeProfile<1, 2, 59 [SDTCisInt<0>, 60 SDTCisVT<1, i8>, SDTCisVT<2, i32>]>; 61 62def SDTX86sahf : SDTypeProfile<1, 1, [SDTCisVT<0, i32>, SDTCisVT<1, i8>]>; 63 64def SDTX86rdrand : SDTypeProfile<2, 0, [SDTCisInt<0>, SDTCisVT<1, i32>]>; 65 66def SDTX86rdpkru : SDTypeProfile<1, 1, [SDTCisVT<0, i32>, SDTCisVT<1, i32>]>; 67def SDTX86wrpkru : SDTypeProfile<0, 3, [SDTCisVT<0, i32>, SDTCisVT<1, i32>, 68 SDTCisVT<2, i32>]>; 69 70def SDTX86cas : SDTypeProfile<0, 3, [SDTCisPtrTy<0>, SDTCisInt<1>, 71 SDTCisVT<2, i8>]>; 72def SDTX86cas8pair : SDTypeProfile<0, 1, [SDTCisPtrTy<0>]>; 73def SDTX86cas16pair : SDTypeProfile<0, 2, [SDTCisPtrTy<0>, SDTCisVT<1, i64>]>; 74 75def SDTLockBinaryArithWithFlags : SDTypeProfile<1, 2, [SDTCisVT<0, i32>, 76 SDTCisPtrTy<1>, 77 SDTCisInt<2>]>; 78 79def SDTLockUnaryArithWithFlags : SDTypeProfile<1, 1, [SDTCisVT<0, i32>, 80 SDTCisPtrTy<1>]>; 81 82def SDTX86Ret : SDTypeProfile<0, -1, [SDTCisVT<0, i32>]>; 83 84def SDT_X86CallSeqStart : SDCallSeqStart<[SDTCisVT<0, i32>, 85 SDTCisVT<1, i32>]>; 86def SDT_X86CallSeqEnd : SDCallSeqEnd<[SDTCisVT<0, i32>, 87 SDTCisVT<1, i32>]>; 88 89def SDT_X86Call : SDTypeProfile<0, -1, [SDTCisVT<0, iPTR>]>; 90 91def SDT_X86NtBrind : SDTypeProfile<0, -1, [SDTCisVT<0, iPTR>]>; 92 93def SDT_X86VASTART_SAVE_XMM_REGS : SDTypeProfile<0, -1, [SDTCisVT<0, i8>, 94 SDTCisVT<1, iPTR>, 95 SDTCisVT<2, iPTR>]>; 96 97def SDT_X86VAARG : SDTypeProfile<1, -1, [SDTCisPtrTy<0>, 98 SDTCisPtrTy<1>, 99 SDTCisVT<2, i32>, 100 SDTCisVT<3, i8>, 101 SDTCisVT<4, i32>]>; 102 103def SDTX86RepStr : SDTypeProfile<0, 1, [SDTCisVT<0, OtherVT>]>; 104 105def SDTX86Void : SDTypeProfile<0, 0, []>; 106 107def SDTX86Wrapper : SDTypeProfile<1, 1, [SDTCisSameAs<0, 1>, SDTCisPtrTy<0>]>; 108 109def SDT_X86TLSADDR : SDTypeProfile<0, 1, [SDTCisInt<0>]>; 110 111def SDT_X86TLSBASEADDR : SDTypeProfile<0, 1, [SDTCisInt<0>]>; 112 113def SDT_X86TLSCALL : SDTypeProfile<0, 1, [SDTCisInt<0>]>; 114 115def SDT_X86WIN_ALLOCA : SDTypeProfile<0, 1, [SDTCisVT<0, iPTR>]>; 116 117def SDT_X86SEG_ALLOCA : SDTypeProfile<1, 1, [SDTCisVT<0, iPTR>, SDTCisVT<1, iPTR>]>; 118 119def SDT_X86PROBED_ALLOCA : SDTypeProfile<1, 1, [SDTCisVT<0, iPTR>, SDTCisVT<1, iPTR>]>; 120 121def SDT_X86EHRET : SDTypeProfile<0, 1, [SDTCisInt<0>]>; 122 123def SDT_X86TCRET : SDTypeProfile<0, 2, [SDTCisPtrTy<0>, SDTCisVT<1, i32>]>; 124 125def SDT_X86MEMBARRIER : SDTypeProfile<0, 0, []>; 126 127def SDT_X86ENQCMD : SDTypeProfile<1, 2, [SDTCisVT<0, i32>, 128 SDTCisPtrTy<1>, SDTCisSameAs<1, 2>]>; 129 130def SDT_X86AESENCDECKL : SDTypeProfile<2, 2, [SDTCisVT<0, v2i64>, 131 SDTCisVT<1, i32>, 132 SDTCisVT<2, v2i64>, 133 SDTCisPtrTy<3>]>; 134 135def X86MemBarrier : SDNode<"X86ISD::MEMBARRIER", SDT_X86MEMBARRIER, 136 [SDNPHasChain,SDNPSideEffect]>; 137def X86MFence : SDNode<"X86ISD::MFENCE", SDT_X86MEMBARRIER, 138 [SDNPHasChain]>; 139 140 141def X86bsf : SDNode<"X86ISD::BSF", SDTUnaryArithWithFlags>; 142def X86bsr : SDNode<"X86ISD::BSR", SDTUnaryArithWithFlags>; 143def X86fshl : SDNode<"X86ISD::FSHL", SDTIntShiftDOp>; 144def X86fshr : SDNode<"X86ISD::FSHR", SDTIntShiftDOp>; 145 146def X86cmp : SDNode<"X86ISD::CMP" , SDTX86CmpTest>; 147def X86fcmp : SDNode<"X86ISD::FCMP", SDTX86FCmp>; 148def X86strict_fcmp : SDNode<"X86ISD::STRICT_FCMP", SDTX86FCmp, [SDNPHasChain]>; 149def X86strict_fcmps : SDNode<"X86ISD::STRICT_FCMPS", SDTX86FCmp, [SDNPHasChain]>; 150def X86bt : SDNode<"X86ISD::BT", SDTX86CmpTest>; 151 152def X86cmov : SDNode<"X86ISD::CMOV", SDTX86Cmov>; 153def X86brcond : SDNode<"X86ISD::BRCOND", SDTX86BrCond, 154 [SDNPHasChain]>; 155def X86setcc : SDNode<"X86ISD::SETCC", SDTX86SetCC>; 156def X86setcc_c : SDNode<"X86ISD::SETCC_CARRY", SDTX86SetCC_C>; 157 158def X86rdrand : SDNode<"X86ISD::RDRAND", SDTX86rdrand, 159 [SDNPHasChain, SDNPSideEffect]>; 160 161def X86rdseed : SDNode<"X86ISD::RDSEED", SDTX86rdrand, 162 [SDNPHasChain, SDNPSideEffect]>; 163 164def X86rdpkru : SDNode<"X86ISD::RDPKRU", SDTX86rdpkru, 165 [SDNPHasChain, SDNPSideEffect]>; 166def X86wrpkru : SDNode<"X86ISD::WRPKRU", SDTX86wrpkru, 167 [SDNPHasChain, SDNPSideEffect]>; 168 169def X86cas : SDNode<"X86ISD::LCMPXCHG_DAG", SDTX86cas, 170 [SDNPHasChain, SDNPInGlue, SDNPOutGlue, SDNPMayStore, 171 SDNPMayLoad, SDNPMemOperand]>; 172def X86cas8 : SDNode<"X86ISD::LCMPXCHG8_DAG", SDTX86cas8pair, 173 [SDNPHasChain, SDNPInGlue, SDNPOutGlue, SDNPMayStore, 174 SDNPMayLoad, SDNPMemOperand]>; 175def X86cas16 : SDNode<"X86ISD::LCMPXCHG16_DAG", SDTX86cas16pair, 176 [SDNPHasChain, SDNPInGlue, SDNPOutGlue, SDNPMayStore, 177 SDNPMayLoad, SDNPMemOperand]>; 178 179def X86retflag : SDNode<"X86ISD::RET_FLAG", SDTX86Ret, 180 [SDNPHasChain, SDNPOptInGlue, SDNPVariadic]>; 181def X86iret : SDNode<"X86ISD::IRET", SDTX86Ret, 182 [SDNPHasChain, SDNPOptInGlue]>; 183 184def X86vastart_save_xmm_regs : 185 SDNode<"X86ISD::VASTART_SAVE_XMM_REGS", 186 SDT_X86VASTART_SAVE_XMM_REGS, 187 [SDNPHasChain, SDNPVariadic]>; 188def X86vaarg64 : 189 SDNode<"X86ISD::VAARG_64", SDT_X86VAARG, 190 [SDNPHasChain, SDNPMayLoad, SDNPMayStore, 191 SDNPMemOperand]>; 192def X86vaargx32 : 193 SDNode<"X86ISD::VAARG_X32", SDT_X86VAARG, 194 [SDNPHasChain, SDNPMayLoad, SDNPMayStore, 195 SDNPMemOperand]>; 196def X86callseq_start : 197 SDNode<"ISD::CALLSEQ_START", SDT_X86CallSeqStart, 198 [SDNPHasChain, SDNPOutGlue]>; 199def X86callseq_end : 200 SDNode<"ISD::CALLSEQ_END", SDT_X86CallSeqEnd, 201 [SDNPHasChain, SDNPOptInGlue, SDNPOutGlue]>; 202 203def X86call : SDNode<"X86ISD::CALL", SDT_X86Call, 204 [SDNPHasChain, SDNPOutGlue, SDNPOptInGlue, 205 SDNPVariadic]>; 206 207def X86NoTrackCall : SDNode<"X86ISD::NT_CALL", SDT_X86Call, 208 [SDNPHasChain, SDNPOutGlue, SDNPOptInGlue, 209 SDNPVariadic]>; 210def X86NoTrackBrind : SDNode<"X86ISD::NT_BRIND", SDT_X86NtBrind, 211 [SDNPHasChain]>; 212 213def X86rep_stos: SDNode<"X86ISD::REP_STOS", SDTX86RepStr, 214 [SDNPHasChain, SDNPInGlue, SDNPOutGlue, SDNPMayStore]>; 215def X86rep_movs: SDNode<"X86ISD::REP_MOVS", SDTX86RepStr, 216 [SDNPHasChain, SDNPInGlue, SDNPOutGlue, SDNPMayStore, 217 SDNPMayLoad]>; 218 219def X86Wrapper : SDNode<"X86ISD::Wrapper", SDTX86Wrapper>; 220def X86WrapperRIP : SDNode<"X86ISD::WrapperRIP", SDTX86Wrapper>; 221 222def X86RecoverFrameAlloc : SDNode<"ISD::LOCAL_RECOVER", 223 SDTypeProfile<1, 1, [SDTCisSameAs<0, 1>, 224 SDTCisInt<1>]>>; 225 226def X86tlsaddr : SDNode<"X86ISD::TLSADDR", SDT_X86TLSADDR, 227 [SDNPHasChain, SDNPOptInGlue, SDNPOutGlue]>; 228 229def X86tlsbaseaddr : SDNode<"X86ISD::TLSBASEADDR", SDT_X86TLSBASEADDR, 230 [SDNPHasChain, SDNPOptInGlue, SDNPOutGlue]>; 231 232def X86ehret : SDNode<"X86ISD::EH_RETURN", SDT_X86EHRET, 233 [SDNPHasChain]>; 234 235def X86eh_sjlj_setjmp : SDNode<"X86ISD::EH_SJLJ_SETJMP", 236 SDTypeProfile<1, 1, [SDTCisInt<0>, 237 SDTCisPtrTy<1>]>, 238 [SDNPHasChain, SDNPSideEffect]>; 239def X86eh_sjlj_longjmp : SDNode<"X86ISD::EH_SJLJ_LONGJMP", 240 SDTypeProfile<0, 1, [SDTCisPtrTy<0>]>, 241 [SDNPHasChain, SDNPSideEffect]>; 242def X86eh_sjlj_setup_dispatch : SDNode<"X86ISD::EH_SJLJ_SETUP_DISPATCH", 243 SDTypeProfile<0, 0, []>, 244 [SDNPHasChain, SDNPSideEffect]>; 245 246def X86tcret : SDNode<"X86ISD::TC_RETURN", SDT_X86TCRET, 247 [SDNPHasChain, SDNPOptInGlue, SDNPVariadic]>; 248 249def X86add_flag : SDNode<"X86ISD::ADD", SDTBinaryArithWithFlags, 250 [SDNPCommutative]>; 251def X86sub_flag : SDNode<"X86ISD::SUB", SDTBinaryArithWithFlags>; 252def X86smul_flag : SDNode<"X86ISD::SMUL", SDTBinaryArithWithFlags, 253 [SDNPCommutative]>; 254def X86umul_flag : SDNode<"X86ISD::UMUL", SDT2ResultBinaryArithWithFlags, 255 [SDNPCommutative]>; 256def X86adc_flag : SDNode<"X86ISD::ADC", SDTBinaryArithWithFlagsInOut>; 257def X86sbb_flag : SDNode<"X86ISD::SBB", SDTBinaryArithWithFlagsInOut>; 258 259def X86or_flag : SDNode<"X86ISD::OR", SDTBinaryArithWithFlags, 260 [SDNPCommutative]>; 261def X86xor_flag : SDNode<"X86ISD::XOR", SDTBinaryArithWithFlags, 262 [SDNPCommutative]>; 263def X86and_flag : SDNode<"X86ISD::AND", SDTBinaryArithWithFlags, 264 [SDNPCommutative]>; 265 266def X86lock_add : SDNode<"X86ISD::LADD", SDTLockBinaryArithWithFlags, 267 [SDNPHasChain, SDNPMayStore, SDNPMayLoad, 268 SDNPMemOperand]>; 269def X86lock_sub : SDNode<"X86ISD::LSUB", SDTLockBinaryArithWithFlags, 270 [SDNPHasChain, SDNPMayStore, SDNPMayLoad, 271 SDNPMemOperand]>; 272def X86lock_or : SDNode<"X86ISD::LOR", SDTLockBinaryArithWithFlags, 273 [SDNPHasChain, SDNPMayStore, SDNPMayLoad, 274 SDNPMemOperand]>; 275def X86lock_xor : SDNode<"X86ISD::LXOR", SDTLockBinaryArithWithFlags, 276 [SDNPHasChain, SDNPMayStore, SDNPMayLoad, 277 SDNPMemOperand]>; 278def X86lock_and : SDNode<"X86ISD::LAND", SDTLockBinaryArithWithFlags, 279 [SDNPHasChain, SDNPMayStore, SDNPMayLoad, 280 SDNPMemOperand]>; 281 282def X86bextr : SDNode<"X86ISD::BEXTR", SDTIntBinOp>; 283def X86bextri : SDNode<"X86ISD::BEXTRI", SDTIntBinOp>; 284 285def X86bzhi : SDNode<"X86ISD::BZHI", SDTIntBinOp>; 286 287def X86pdep : SDNode<"X86ISD::PDEP", SDTIntBinOp>; 288def X86pext : SDNode<"X86ISD::PEXT", SDTIntBinOp>; 289 290def X86mul_imm : SDNode<"X86ISD::MUL_IMM", SDTIntBinOp>; 291 292def X86WinAlloca : SDNode<"X86ISD::WIN_ALLOCA", SDT_X86WIN_ALLOCA, 293 [SDNPHasChain, SDNPOutGlue]>; 294 295def X86SegAlloca : SDNode<"X86ISD::SEG_ALLOCA", SDT_X86SEG_ALLOCA, 296 [SDNPHasChain]>; 297 298def X86ProbedAlloca : SDNode<"X86ISD::PROBED_ALLOCA", SDT_X86PROBED_ALLOCA, 299 [SDNPHasChain]>; 300 301def X86TLSCall : SDNode<"X86ISD::TLSCALL", SDT_X86TLSCALL, 302 [SDNPHasChain, SDNPOptInGlue, SDNPOutGlue]>; 303 304def X86lwpins : SDNode<"X86ISD::LWPINS", 305 SDTypeProfile<1, 3, [SDTCisVT<0, i32>, SDTCisInt<1>, 306 SDTCisVT<2, i32>, SDTCisVT<3, i32>]>, 307 [SDNPHasChain, SDNPMayStore, SDNPMayLoad, SDNPSideEffect]>; 308 309def X86umwait : SDNode<"X86ISD::UMWAIT", 310 SDTypeProfile<1, 3, [SDTCisVT<0, i32>, SDTCisInt<1>, 311 SDTCisVT<2, i32>, SDTCisVT<3, i32>]>, 312 [SDNPHasChain, SDNPSideEffect]>; 313 314def X86tpause : SDNode<"X86ISD::TPAUSE", 315 SDTypeProfile<1, 3, [SDTCisVT<0, i32>, SDTCisInt<1>, 316 SDTCisVT<2, i32>, SDTCisVT<3, i32>]>, 317 [SDNPHasChain, SDNPSideEffect]>; 318 319def X86enqcmd : SDNode<"X86ISD::ENQCMD", SDT_X86ENQCMD, 320 [SDNPHasChain, SDNPSideEffect]>; 321def X86enqcmds : SDNode<"X86ISD::ENQCMDS", SDT_X86ENQCMD, 322 [SDNPHasChain, SDNPSideEffect]>; 323def X86testui : SDNode<"X86ISD::TESTUI", 324 SDTypeProfile<1, 0, [SDTCisVT<0, i32>]>, 325 [SDNPHasChain, SDNPSideEffect]>; 326 327def X86aesenc128kl : SDNode<"X86ISD::AESENC128KL", SDT_X86AESENCDECKL, 328 [SDNPHasChain, SDNPMayLoad, SDNPSideEffect, 329 SDNPMemOperand]>; 330def X86aesdec128kl : SDNode<"X86ISD::AESDEC128KL", SDT_X86AESENCDECKL, 331 [SDNPHasChain, SDNPMayLoad, SDNPSideEffect, 332 SDNPMemOperand]>; 333def X86aesenc256kl : SDNode<"X86ISD::AESENC256KL", SDT_X86AESENCDECKL, 334 [SDNPHasChain, SDNPMayLoad, SDNPSideEffect, 335 SDNPMemOperand]>; 336def X86aesdec256kl : SDNode<"X86ISD::AESDEC256KL", SDT_X86AESENCDECKL, 337 [SDNPHasChain, SDNPMayLoad, SDNPSideEffect, 338 SDNPMemOperand]>; 339 340//===----------------------------------------------------------------------===// 341// X86 Operand Definitions. 342// 343 344// A version of ptr_rc which excludes SP, ESP, and RSP. This is used for 345// the index operand of an address, to conform to x86 encoding restrictions. 346def ptr_rc_nosp : PointerLikeRegClass<1>; 347 348// *mem - Operand definitions for the funky X86 addressing mode operands. 349// 350def X86MemAsmOperand : AsmOperandClass { 351 let Name = "Mem"; 352} 353let RenderMethod = "addMemOperands", SuperClasses = [X86MemAsmOperand] in { 354 def X86Mem8AsmOperand : AsmOperandClass { let Name = "Mem8"; } 355 def X86Mem16AsmOperand : AsmOperandClass { let Name = "Mem16"; } 356 def X86Mem32AsmOperand : AsmOperandClass { let Name = "Mem32"; } 357 def X86Mem64AsmOperand : AsmOperandClass { let Name = "Mem64"; } 358 def X86Mem80AsmOperand : AsmOperandClass { let Name = "Mem80"; } 359 def X86Mem128AsmOperand : AsmOperandClass { let Name = "Mem128"; } 360 def X86Mem256AsmOperand : AsmOperandClass { let Name = "Mem256"; } 361 def X86Mem512AsmOperand : AsmOperandClass { let Name = "Mem512"; } 362 // Gather mem operands 363 def X86Mem64_RC128Operand : AsmOperandClass { let Name = "Mem64_RC128"; } 364 def X86Mem128_RC128Operand : AsmOperandClass { let Name = "Mem128_RC128"; } 365 def X86Mem256_RC128Operand : AsmOperandClass { let Name = "Mem256_RC128"; } 366 def X86Mem128_RC256Operand : AsmOperandClass { let Name = "Mem128_RC256"; } 367 def X86Mem256_RC256Operand : AsmOperandClass { let Name = "Mem256_RC256"; } 368 369 def X86Mem64_RC128XOperand : AsmOperandClass { let Name = "Mem64_RC128X"; } 370 def X86Mem128_RC128XOperand : AsmOperandClass { let Name = "Mem128_RC128X"; } 371 def X86Mem256_RC128XOperand : AsmOperandClass { let Name = "Mem256_RC128X"; } 372 def X86Mem128_RC256XOperand : AsmOperandClass { let Name = "Mem128_RC256X"; } 373 def X86Mem256_RC256XOperand : AsmOperandClass { let Name = "Mem256_RC256X"; } 374 def X86Mem512_RC256XOperand : AsmOperandClass { let Name = "Mem512_RC256X"; } 375 def X86Mem256_RC512Operand : AsmOperandClass { let Name = "Mem256_RC512"; } 376 def X86Mem512_RC512Operand : AsmOperandClass { let Name = "Mem512_RC512"; } 377 378 def X86SibMemOperand : AsmOperandClass { let Name = "SibMem"; } 379} 380 381def X86AbsMemAsmOperand : AsmOperandClass { 382 let Name = "AbsMem"; 383 let SuperClasses = [X86MemAsmOperand]; 384} 385 386class X86MemOperand<string printMethod, 387 AsmOperandClass parserMatchClass = X86MemAsmOperand> : Operand<iPTR> { 388 let PrintMethod = printMethod; 389 let MIOperandInfo = (ops ptr_rc, i8imm, ptr_rc_nosp, i32imm, SEGMENT_REG); 390 let ParserMatchClass = parserMatchClass; 391 let OperandType = "OPERAND_MEMORY"; 392} 393 394// Gather mem operands 395class X86VMemOperand<RegisterClass RC, string printMethod, 396 AsmOperandClass parserMatchClass> 397 : X86MemOperand<printMethod, parserMatchClass> { 398 let MIOperandInfo = (ops ptr_rc, i8imm, RC, i32imm, SEGMENT_REG); 399} 400 401def anymem : X86MemOperand<"printMemReference">; 402def X86any_fcmp : PatFrags<(ops node:$lhs, node:$rhs), 403 [(X86strict_fcmp node:$lhs, node:$rhs), 404 (X86fcmp node:$lhs, node:$rhs)]>; 405 406// FIXME: Right now we allow any size during parsing, but we might want to 407// restrict to only unsized memory. 408def opaquemem : X86MemOperand<"printMemReference">; 409 410def sibmem: X86MemOperand<"printMemReference", X86SibMemOperand>; 411 412def i8mem : X86MemOperand<"printbytemem", X86Mem8AsmOperand>; 413def i16mem : X86MemOperand<"printwordmem", X86Mem16AsmOperand>; 414def i32mem : X86MemOperand<"printdwordmem", X86Mem32AsmOperand>; 415def i64mem : X86MemOperand<"printqwordmem", X86Mem64AsmOperand>; 416def i128mem : X86MemOperand<"printxmmwordmem", X86Mem128AsmOperand>; 417def i256mem : X86MemOperand<"printymmwordmem", X86Mem256AsmOperand>; 418def i512mem : X86MemOperand<"printzmmwordmem", X86Mem512AsmOperand>; 419def f32mem : X86MemOperand<"printdwordmem", X86Mem32AsmOperand>; 420def f64mem : X86MemOperand<"printqwordmem", X86Mem64AsmOperand>; 421def f80mem : X86MemOperand<"printtbytemem", X86Mem80AsmOperand>; 422def f128mem : X86MemOperand<"printxmmwordmem", X86Mem128AsmOperand>; 423def f256mem : X86MemOperand<"printymmwordmem", X86Mem256AsmOperand>; 424def f512mem : X86MemOperand<"printzmmwordmem", X86Mem512AsmOperand>; 425 426// Gather mem operands 427def vx64mem : X86VMemOperand<VR128, "printqwordmem", X86Mem64_RC128Operand>; 428def vx128mem : X86VMemOperand<VR128, "printxmmwordmem", X86Mem128_RC128Operand>; 429def vx256mem : X86VMemOperand<VR128, "printymmwordmem", X86Mem256_RC128Operand>; 430def vy128mem : X86VMemOperand<VR256, "printxmmwordmem", X86Mem128_RC256Operand>; 431def vy256mem : X86VMemOperand<VR256, "printymmwordmem", X86Mem256_RC256Operand>; 432 433def vx64xmem : X86VMemOperand<VR128X, "printqwordmem", X86Mem64_RC128XOperand>; 434def vx128xmem : X86VMemOperand<VR128X, "printxmmwordmem", X86Mem128_RC128XOperand>; 435def vx256xmem : X86VMemOperand<VR128X, "printymmwordmem", X86Mem256_RC128XOperand>; 436def vy128xmem : X86VMemOperand<VR256X, "printxmmwordmem", X86Mem128_RC256XOperand>; 437def vy256xmem : X86VMemOperand<VR256X, "printymmwordmem", X86Mem256_RC256XOperand>; 438def vy512xmem : X86VMemOperand<VR256X, "printzmmwordmem", X86Mem512_RC256XOperand>; 439def vz256mem : X86VMemOperand<VR512, "printymmwordmem", X86Mem256_RC512Operand>; 440def vz512mem : X86VMemOperand<VR512, "printzmmwordmem", X86Mem512_RC512Operand>; 441 442// A version of i8mem for use on x86-64 and x32 that uses a NOREX GPR instead 443// of a plain GPR, so that it doesn't potentially require a REX prefix. 444def ptr_rc_norex : PointerLikeRegClass<2>; 445def ptr_rc_norex_nosp : PointerLikeRegClass<3>; 446 447def i8mem_NOREX : Operand<iPTR> { 448 let PrintMethod = "printbytemem"; 449 let MIOperandInfo = (ops ptr_rc_norex, i8imm, ptr_rc_norex_nosp, i32imm, 450 SEGMENT_REG); 451 let ParserMatchClass = X86Mem8AsmOperand; 452 let OperandType = "OPERAND_MEMORY"; 453} 454 455// GPRs available for tailcall. 456// It represents GR32_TC, GR64_TC or GR64_TCW64. 457def ptr_rc_tailcall : PointerLikeRegClass<4>; 458 459// Special i32mem for addresses of load folding tail calls. These are not 460// allowed to use callee-saved registers since they must be scheduled 461// after callee-saved register are popped. 462def i32mem_TC : Operand<i32> { 463 let PrintMethod = "printdwordmem"; 464 let MIOperandInfo = (ops ptr_rc_tailcall, i8imm, ptr_rc_tailcall, 465 i32imm, SEGMENT_REG); 466 let ParserMatchClass = X86Mem32AsmOperand; 467 let OperandType = "OPERAND_MEMORY"; 468} 469 470// Special i64mem for addresses of load folding tail calls. These are not 471// allowed to use callee-saved registers since they must be scheduled 472// after callee-saved register are popped. 473def i64mem_TC : Operand<i64> { 474 let PrintMethod = "printqwordmem"; 475 let MIOperandInfo = (ops ptr_rc_tailcall, i8imm, 476 ptr_rc_tailcall, i32imm, SEGMENT_REG); 477 let ParserMatchClass = X86Mem64AsmOperand; 478 let OperandType = "OPERAND_MEMORY"; 479} 480 481// Special parser to detect 16-bit mode to select 16-bit displacement. 482def X86AbsMem16AsmOperand : AsmOperandClass { 483 let Name = "AbsMem16"; 484 let RenderMethod = "addAbsMemOperands"; 485 let SuperClasses = [X86AbsMemAsmOperand]; 486} 487 488// Branch targets print as pc-relative values. 489class BranchTargetOperand<ValueType ty> : Operand<ty> { 490 let OperandType = "OPERAND_PCREL"; 491 let PrintMethod = "printPCRelImm"; 492 let ParserMatchClass = X86AbsMemAsmOperand; 493} 494 495def i32imm_brtarget : BranchTargetOperand<i32>; 496def i16imm_brtarget : BranchTargetOperand<i16>; 497 498// 64-bits but only 32 bits are significant, and those bits are treated as being 499// pc relative. 500def i64i32imm_brtarget : BranchTargetOperand<i64>; 501 502def brtarget : BranchTargetOperand<OtherVT>; 503def brtarget8 : BranchTargetOperand<OtherVT>; 504def brtarget16 : BranchTargetOperand<OtherVT> { 505 let ParserMatchClass = X86AbsMem16AsmOperand; 506} 507def brtarget32 : BranchTargetOperand<OtherVT>; 508 509let RenderMethod = "addSrcIdxOperands" in { 510 def X86SrcIdx8Operand : AsmOperandClass { 511 let Name = "SrcIdx8"; 512 let SuperClasses = [X86Mem8AsmOperand]; 513 } 514 def X86SrcIdx16Operand : AsmOperandClass { 515 let Name = "SrcIdx16"; 516 let SuperClasses = [X86Mem16AsmOperand]; 517 } 518 def X86SrcIdx32Operand : AsmOperandClass { 519 let Name = "SrcIdx32"; 520 let SuperClasses = [X86Mem32AsmOperand]; 521 } 522 def X86SrcIdx64Operand : AsmOperandClass { 523 let Name = "SrcIdx64"; 524 let SuperClasses = [X86Mem64AsmOperand]; 525 } 526} // RenderMethod = "addSrcIdxOperands" 527 528let RenderMethod = "addDstIdxOperands" in { 529 def X86DstIdx8Operand : AsmOperandClass { 530 let Name = "DstIdx8"; 531 let SuperClasses = [X86Mem8AsmOperand]; 532 } 533 def X86DstIdx16Operand : AsmOperandClass { 534 let Name = "DstIdx16"; 535 let SuperClasses = [X86Mem16AsmOperand]; 536 } 537 def X86DstIdx32Operand : AsmOperandClass { 538 let Name = "DstIdx32"; 539 let SuperClasses = [X86Mem32AsmOperand]; 540 } 541 def X86DstIdx64Operand : AsmOperandClass { 542 let Name = "DstIdx64"; 543 let SuperClasses = [X86Mem64AsmOperand]; 544 } 545} // RenderMethod = "addDstIdxOperands" 546 547let RenderMethod = "addMemOffsOperands" in { 548 def X86MemOffs16_8AsmOperand : AsmOperandClass { 549 let Name = "MemOffs16_8"; 550 let SuperClasses = [X86Mem8AsmOperand]; 551 } 552 def X86MemOffs16_16AsmOperand : AsmOperandClass { 553 let Name = "MemOffs16_16"; 554 let SuperClasses = [X86Mem16AsmOperand]; 555 } 556 def X86MemOffs16_32AsmOperand : AsmOperandClass { 557 let Name = "MemOffs16_32"; 558 let SuperClasses = [X86Mem32AsmOperand]; 559 } 560 def X86MemOffs32_8AsmOperand : AsmOperandClass { 561 let Name = "MemOffs32_8"; 562 let SuperClasses = [X86Mem8AsmOperand]; 563 } 564 def X86MemOffs32_16AsmOperand : AsmOperandClass { 565 let Name = "MemOffs32_16"; 566 let SuperClasses = [X86Mem16AsmOperand]; 567 } 568 def X86MemOffs32_32AsmOperand : AsmOperandClass { 569 let Name = "MemOffs32_32"; 570 let SuperClasses = [X86Mem32AsmOperand]; 571 } 572 def X86MemOffs32_64AsmOperand : AsmOperandClass { 573 let Name = "MemOffs32_64"; 574 let SuperClasses = [X86Mem64AsmOperand]; 575 } 576 def X86MemOffs64_8AsmOperand : AsmOperandClass { 577 let Name = "MemOffs64_8"; 578 let SuperClasses = [X86Mem8AsmOperand]; 579 } 580 def X86MemOffs64_16AsmOperand : AsmOperandClass { 581 let Name = "MemOffs64_16"; 582 let SuperClasses = [X86Mem16AsmOperand]; 583 } 584 def X86MemOffs64_32AsmOperand : AsmOperandClass { 585 let Name = "MemOffs64_32"; 586 let SuperClasses = [X86Mem32AsmOperand]; 587 } 588 def X86MemOffs64_64AsmOperand : AsmOperandClass { 589 let Name = "MemOffs64_64"; 590 let SuperClasses = [X86Mem64AsmOperand]; 591 } 592} // RenderMethod = "addMemOffsOperands" 593 594class X86SrcIdxOperand<string printMethod, AsmOperandClass parserMatchClass> 595 : X86MemOperand<printMethod, parserMatchClass> { 596 let MIOperandInfo = (ops ptr_rc, SEGMENT_REG); 597} 598 599class X86DstIdxOperand<string printMethod, AsmOperandClass parserMatchClass> 600 : X86MemOperand<printMethod, parserMatchClass> { 601 let MIOperandInfo = (ops ptr_rc); 602} 603 604def srcidx8 : X86SrcIdxOperand<"printSrcIdx8", X86SrcIdx8Operand>; 605def srcidx16 : X86SrcIdxOperand<"printSrcIdx16", X86SrcIdx16Operand>; 606def srcidx32 : X86SrcIdxOperand<"printSrcIdx32", X86SrcIdx32Operand>; 607def srcidx64 : X86SrcIdxOperand<"printSrcIdx64", X86SrcIdx64Operand>; 608def dstidx8 : X86DstIdxOperand<"printDstIdx8", X86DstIdx8Operand>; 609def dstidx16 : X86DstIdxOperand<"printDstIdx16", X86DstIdx16Operand>; 610def dstidx32 : X86DstIdxOperand<"printDstIdx32", X86DstIdx32Operand>; 611def dstidx64 : X86DstIdxOperand<"printDstIdx64", X86DstIdx64Operand>; 612 613class X86MemOffsOperand<Operand immOperand, string printMethod, 614 AsmOperandClass parserMatchClass> 615 : X86MemOperand<printMethod, parserMatchClass> { 616 let MIOperandInfo = (ops immOperand, SEGMENT_REG); 617} 618 619def offset16_8 : X86MemOffsOperand<i16imm, "printMemOffs8", 620 X86MemOffs16_8AsmOperand>; 621def offset16_16 : X86MemOffsOperand<i16imm, "printMemOffs16", 622 X86MemOffs16_16AsmOperand>; 623def offset16_32 : X86MemOffsOperand<i16imm, "printMemOffs32", 624 X86MemOffs16_32AsmOperand>; 625def offset32_8 : X86MemOffsOperand<i32imm, "printMemOffs8", 626 X86MemOffs32_8AsmOperand>; 627def offset32_16 : X86MemOffsOperand<i32imm, "printMemOffs16", 628 X86MemOffs32_16AsmOperand>; 629def offset32_32 : X86MemOffsOperand<i32imm, "printMemOffs32", 630 X86MemOffs32_32AsmOperand>; 631def offset32_64 : X86MemOffsOperand<i32imm, "printMemOffs64", 632 X86MemOffs32_64AsmOperand>; 633def offset64_8 : X86MemOffsOperand<i64imm, "printMemOffs8", 634 X86MemOffs64_8AsmOperand>; 635def offset64_16 : X86MemOffsOperand<i64imm, "printMemOffs16", 636 X86MemOffs64_16AsmOperand>; 637def offset64_32 : X86MemOffsOperand<i64imm, "printMemOffs32", 638 X86MemOffs64_32AsmOperand>; 639def offset64_64 : X86MemOffsOperand<i64imm, "printMemOffs64", 640 X86MemOffs64_64AsmOperand>; 641 642def ccode : Operand<i8> { 643 let PrintMethod = "printCondCode"; 644 let OperandNamespace = "X86"; 645 let OperandType = "OPERAND_COND_CODE"; 646} 647 648class ImmSExtAsmOperandClass : AsmOperandClass { 649 let SuperClasses = [ImmAsmOperand]; 650 let RenderMethod = "addImmOperands"; 651} 652 653def X86GR32orGR64AsmOperand : AsmOperandClass { 654 let Name = "GR32orGR64"; 655} 656def GR32orGR64 : RegisterOperand<GR32> { 657 let ParserMatchClass = X86GR32orGR64AsmOperand; 658} 659 660def X86GR16orGR32orGR64AsmOperand : AsmOperandClass { 661 let Name = "GR16orGR32orGR64"; 662} 663def GR16orGR32orGR64 : RegisterOperand<GR16> { 664 let ParserMatchClass = X86GR16orGR32orGR64AsmOperand; 665} 666 667def AVX512RCOperand : AsmOperandClass { 668 let Name = "AVX512RC"; 669} 670def AVX512RC : Operand<i32> { 671 let PrintMethod = "printRoundingControl"; 672 let OperandNamespace = "X86"; 673 let OperandType = "OPERAND_ROUNDING_CONTROL"; 674 let ParserMatchClass = AVX512RCOperand; 675} 676 677// Sign-extended immediate classes. We don't need to define the full lattice 678// here because there is no instruction with an ambiguity between ImmSExti64i32 679// and ImmSExti32i8. 680// 681// The strange ranges come from the fact that the assembler always works with 682// 64-bit immediates, but for a 16-bit target value we want to accept both "-1" 683// (which will be a -1ULL), and "0xFF" (-1 in 16-bits). 684 685// [0, 0x7FFFFFFF] | 686// [0xFFFFFFFF80000000, 0xFFFFFFFFFFFFFFFF] 687def ImmSExti64i32AsmOperand : ImmSExtAsmOperandClass { 688 let Name = "ImmSExti64i32"; 689} 690 691// [0, 0x0000007F] | [0x000000000000FF80, 0x000000000000FFFF] | 692// [0xFFFFFFFFFFFFFF80, 0xFFFFFFFFFFFFFFFF] 693def ImmSExti16i8AsmOperand : ImmSExtAsmOperandClass { 694 let Name = "ImmSExti16i8"; 695 let SuperClasses = [ImmSExti64i32AsmOperand]; 696} 697 698// [0, 0x0000007F] | [0x00000000FFFFFF80, 0x00000000FFFFFFFF] | 699// [0xFFFFFFFFFFFFFF80, 0xFFFFFFFFFFFFFFFF] 700def ImmSExti32i8AsmOperand : ImmSExtAsmOperandClass { 701 let Name = "ImmSExti32i8"; 702} 703 704// [0, 0x0000007F] | 705// [0xFFFFFFFFFFFFFF80, 0xFFFFFFFFFFFFFFFF] 706def ImmSExti64i8AsmOperand : ImmSExtAsmOperandClass { 707 let Name = "ImmSExti64i8"; 708 let SuperClasses = [ImmSExti16i8AsmOperand, ImmSExti32i8AsmOperand, 709 ImmSExti64i32AsmOperand]; 710} 711 712// 4-bit immediate used by some XOP instructions 713// [0, 0xF] 714def ImmUnsignedi4AsmOperand : AsmOperandClass { 715 let Name = "ImmUnsignedi4"; 716 let RenderMethod = "addImmOperands"; 717 let DiagnosticType = "InvalidImmUnsignedi4"; 718} 719 720// Unsigned immediate used by SSE/AVX instructions 721// [0, 0xFF] 722// [0xFFFFFFFFFFFFFF80, 0xFFFFFFFFFFFFFFFF] 723def ImmUnsignedi8AsmOperand : AsmOperandClass { 724 let Name = "ImmUnsignedi8"; 725 let RenderMethod = "addImmOperands"; 726} 727 728// A couple of more descriptive operand definitions. 729// 16-bits but only 8 bits are significant. 730def i16i8imm : Operand<i16> { 731 let ParserMatchClass = ImmSExti16i8AsmOperand; 732 let OperandType = "OPERAND_IMMEDIATE"; 733} 734// 32-bits but only 8 bits are significant. 735def i32i8imm : Operand<i32> { 736 let ParserMatchClass = ImmSExti32i8AsmOperand; 737 let OperandType = "OPERAND_IMMEDIATE"; 738} 739 740// 64-bits but only 32 bits are significant. 741def i64i32imm : Operand<i64> { 742 let ParserMatchClass = ImmSExti64i32AsmOperand; 743 let OperandType = "OPERAND_IMMEDIATE"; 744} 745 746// 64-bits but only 8 bits are significant. 747def i64i8imm : Operand<i64> { 748 let ParserMatchClass = ImmSExti64i8AsmOperand; 749 let OperandType = "OPERAND_IMMEDIATE"; 750} 751 752// Unsigned 4-bit immediate used by some XOP instructions. 753def u4imm : Operand<i8> { 754 let PrintMethod = "printU8Imm"; 755 let ParserMatchClass = ImmUnsignedi4AsmOperand; 756 let OperandType = "OPERAND_IMMEDIATE"; 757} 758 759// Unsigned 8-bit immediate used by SSE/AVX instructions. 760def u8imm : Operand<i8> { 761 let PrintMethod = "printU8Imm"; 762 let ParserMatchClass = ImmUnsignedi8AsmOperand; 763 let OperandType = "OPERAND_IMMEDIATE"; 764} 765 766// 16-bit immediate but only 8-bits are significant and they are unsigned. 767// Used by BT instructions. 768def i16u8imm : Operand<i16> { 769 let PrintMethod = "printU8Imm"; 770 let ParserMatchClass = ImmUnsignedi8AsmOperand; 771 let OperandType = "OPERAND_IMMEDIATE"; 772} 773 774// 32-bit immediate but only 8-bits are significant and they are unsigned. 775// Used by some SSE/AVX instructions that use intrinsics. 776def i32u8imm : Operand<i32> { 777 let PrintMethod = "printU8Imm"; 778 let ParserMatchClass = ImmUnsignedi8AsmOperand; 779 let OperandType = "OPERAND_IMMEDIATE"; 780} 781 782// 64-bit immediate but only 8-bits are significant and they are unsigned. 783// Used by BT instructions. 784def i64u8imm : Operand<i64> { 785 let PrintMethod = "printU8Imm"; 786 let ParserMatchClass = ImmUnsignedi8AsmOperand; 787 let OperandType = "OPERAND_IMMEDIATE"; 788} 789 790def lea64_32mem : Operand<i32> { 791 let PrintMethod = "printMemReference"; 792 let MIOperandInfo = (ops GR64, i8imm, GR64_NOSP, i32imm, SEGMENT_REG); 793 let ParserMatchClass = X86MemAsmOperand; 794} 795 796// Memory operands that use 64-bit pointers in both ILP32 and LP64. 797def lea64mem : Operand<i64> { 798 let PrintMethod = "printMemReference"; 799 let MIOperandInfo = (ops GR64, i8imm, GR64_NOSP, i32imm, SEGMENT_REG); 800 let ParserMatchClass = X86MemAsmOperand; 801} 802 803let RenderMethod = "addMaskPairOperands" in { 804 def VK1PairAsmOperand : AsmOperandClass { let Name = "VK1Pair"; } 805 def VK2PairAsmOperand : AsmOperandClass { let Name = "VK2Pair"; } 806 def VK4PairAsmOperand : AsmOperandClass { let Name = "VK4Pair"; } 807 def VK8PairAsmOperand : AsmOperandClass { let Name = "VK8Pair"; } 808 def VK16PairAsmOperand : AsmOperandClass { let Name = "VK16Pair"; } 809} 810 811def VK1Pair : RegisterOperand<VK1PAIR, "printVKPair"> { 812 let ParserMatchClass = VK1PairAsmOperand; 813} 814 815def VK2Pair : RegisterOperand<VK2PAIR, "printVKPair"> { 816 let ParserMatchClass = VK2PairAsmOperand; 817} 818 819def VK4Pair : RegisterOperand<VK4PAIR, "printVKPair"> { 820 let ParserMatchClass = VK4PairAsmOperand; 821} 822 823def VK8Pair : RegisterOperand<VK8PAIR, "printVKPair"> { 824 let ParserMatchClass = VK8PairAsmOperand; 825} 826 827def VK16Pair : RegisterOperand<VK16PAIR, "printVKPair"> { 828 let ParserMatchClass = VK16PairAsmOperand; 829} 830 831//===----------------------------------------------------------------------===// 832// X86 Complex Pattern Definitions. 833// 834 835// Define X86-specific addressing mode. 836def addr : ComplexPattern<iPTR, 5, "selectAddr", [], [SDNPWantParent]>; 837def lea32addr : ComplexPattern<i32, 5, "selectLEAAddr", 838 [add, sub, mul, X86mul_imm, shl, or, frameindex], 839 []>; 840// In 64-bit mode 32-bit LEAs can use RIP-relative addressing. 841def lea64_32addr : ComplexPattern<i32, 5, "selectLEA64_32Addr", 842 [add, sub, mul, X86mul_imm, shl, or, 843 frameindex, X86WrapperRIP], 844 []>; 845 846def tls32addr : ComplexPattern<i32, 5, "selectTLSADDRAddr", 847 [tglobaltlsaddr], []>; 848 849def tls32baseaddr : ComplexPattern<i32, 5, "selectTLSADDRAddr", 850 [tglobaltlsaddr], []>; 851 852def lea64addr : ComplexPattern<i64, 5, "selectLEAAddr", 853 [add, sub, mul, X86mul_imm, shl, or, frameindex, 854 X86WrapperRIP], []>; 855 856def tls64addr : ComplexPattern<i64, 5, "selectTLSADDRAddr", 857 [tglobaltlsaddr], []>; 858 859def tls64baseaddr : ComplexPattern<i64, 5, "selectTLSADDRAddr", 860 [tglobaltlsaddr], []>; 861 862def vectoraddr : ComplexPattern<iPTR, 5, "selectVectorAddr", [],[SDNPWantParent]>; 863 864// A relocatable immediate is an operand that can be relocated by the linker to 865// an immediate, such as a regular symbol in non-PIC code. 866def relocImm : ComplexPattern<iAny, 1, "selectRelocImm", 867 [X86Wrapper], [], 0>; 868 869//===----------------------------------------------------------------------===// 870// X86 Instruction Predicate Definitions. 871def TruePredicate : Predicate<"true">; 872 873def HasCMov : Predicate<"Subtarget->hasCMov()">; 874def NoCMov : Predicate<"!Subtarget->hasCMov()">; 875 876def HasMMX : Predicate<"Subtarget->hasMMX()">; 877def Has3DNow : Predicate<"Subtarget->has3DNow()">; 878def Has3DNowA : Predicate<"Subtarget->has3DNowA()">; 879def HasSSE1 : Predicate<"Subtarget->hasSSE1()">; 880def UseSSE1 : Predicate<"Subtarget->hasSSE1() && !Subtarget->hasAVX()">; 881def HasSSE2 : Predicate<"Subtarget->hasSSE2()">; 882def UseSSE2 : Predicate<"Subtarget->hasSSE2() && !Subtarget->hasAVX()">; 883def HasSSE3 : Predicate<"Subtarget->hasSSE3()">; 884def UseSSE3 : Predicate<"Subtarget->hasSSE3() && !Subtarget->hasAVX()">; 885def HasSSSE3 : Predicate<"Subtarget->hasSSSE3()">; 886def UseSSSE3 : Predicate<"Subtarget->hasSSSE3() && !Subtarget->hasAVX()">; 887def HasSSE41 : Predicate<"Subtarget->hasSSE41()">; 888def NoSSE41 : Predicate<"!Subtarget->hasSSE41()">; 889def UseSSE41 : Predicate<"Subtarget->hasSSE41() && !Subtarget->hasAVX()">; 890def HasSSE42 : Predicate<"Subtarget->hasSSE42()">; 891def UseSSE42 : Predicate<"Subtarget->hasSSE42() && !Subtarget->hasAVX()">; 892def HasSSE4A : Predicate<"Subtarget->hasSSE4A()">; 893def NoAVX : Predicate<"!Subtarget->hasAVX()">; 894def HasAVX : Predicate<"Subtarget->hasAVX()">; 895def HasAVX2 : Predicate<"Subtarget->hasAVX2()">; 896def HasAVX1Only : Predicate<"Subtarget->hasAVX() && !Subtarget->hasAVX2()">; 897def HasAVX512 : Predicate<"Subtarget->hasAVX512()">; 898def UseAVX : Predicate<"Subtarget->hasAVX() && !Subtarget->hasAVX512()">; 899def UseAVX2 : Predicate<"Subtarget->hasAVX2() && !Subtarget->hasAVX512()">; 900def NoAVX512 : Predicate<"!Subtarget->hasAVX512()">; 901def HasCDI : Predicate<"Subtarget->hasCDI()">; 902def HasVPOPCNTDQ : Predicate<"Subtarget->hasVPOPCNTDQ()">; 903def HasPFI : Predicate<"Subtarget->hasPFI()">; 904def HasERI : Predicate<"Subtarget->hasERI()">; 905def HasDQI : Predicate<"Subtarget->hasDQI()">; 906def NoDQI : Predicate<"!Subtarget->hasDQI()">; 907def HasBWI : Predicate<"Subtarget->hasBWI()">; 908def NoBWI : Predicate<"!Subtarget->hasBWI()">; 909def HasVLX : Predicate<"Subtarget->hasVLX()">; 910def NoVLX : Predicate<"!Subtarget->hasVLX()">; 911def NoVLX_Or_NoBWI : Predicate<"!Subtarget->hasVLX() || !Subtarget->hasBWI()">; 912def NoVLX_Or_NoDQI : Predicate<"!Subtarget->hasVLX() || !Subtarget->hasDQI()">; 913def PKU : Predicate<"Subtarget->hasPKU()">; 914def HasVNNI : Predicate<"Subtarget->hasVNNI()">; 915def HasVP2INTERSECT : Predicate<"Subtarget->hasVP2INTERSECT()">; 916def HasBF16 : Predicate<"Subtarget->hasBF16()">; 917def HasAVXVNNI : Predicate <"Subtarget->hasAVXVNNI()">; 918def NoVLX_Or_NoVNNI : Predicate<"!Subtarget->hasVLX() || !Subtarget->hasVNNI()">; 919 920def HasBITALG : Predicate<"Subtarget->hasBITALG()">; 921def HasPOPCNT : Predicate<"Subtarget->hasPOPCNT()">; 922def HasAES : Predicate<"Subtarget->hasAES()">; 923def HasVAES : Predicate<"Subtarget->hasVAES()">; 924def NoVLX_Or_NoVAES : Predicate<"!Subtarget->hasVLX() || !Subtarget->hasVAES()">; 925def HasFXSR : Predicate<"Subtarget->hasFXSR()">; 926def HasXSAVE : Predicate<"Subtarget->hasXSAVE()">; 927def HasXSAVEOPT : Predicate<"Subtarget->hasXSAVEOPT()">; 928def HasXSAVEC : Predicate<"Subtarget->hasXSAVEC()">; 929def HasXSAVES : Predicate<"Subtarget->hasXSAVES()">; 930def HasPCLMUL : Predicate<"Subtarget->hasPCLMUL()">; 931def NoVLX_Or_NoVPCLMULQDQ : 932 Predicate<"!Subtarget->hasVLX() || !Subtarget->hasVPCLMULQDQ()">; 933def HasVPCLMULQDQ : Predicate<"Subtarget->hasVPCLMULQDQ()">; 934def HasGFNI : Predicate<"Subtarget->hasGFNI()">; 935def HasFMA : Predicate<"Subtarget->hasFMA()">; 936def HasFMA4 : Predicate<"Subtarget->hasFMA4()">; 937def NoFMA4 : Predicate<"!Subtarget->hasFMA4()">; 938def HasXOP : Predicate<"Subtarget->hasXOP()">; 939def HasTBM : Predicate<"Subtarget->hasTBM()">; 940def NoTBM : Predicate<"!Subtarget->hasTBM()">; 941def HasLWP : Predicate<"Subtarget->hasLWP()">; 942def HasMOVBE : Predicate<"Subtarget->hasMOVBE()">; 943def HasRDRAND : Predicate<"Subtarget->hasRDRAND()">; 944def HasF16C : Predicate<"Subtarget->hasF16C()">; 945def HasFSGSBase : Predicate<"Subtarget->hasFSGSBase()">; 946def HasLZCNT : Predicate<"Subtarget->hasLZCNT()">; 947def HasBMI : Predicate<"Subtarget->hasBMI()">; 948def HasBMI2 : Predicate<"Subtarget->hasBMI2()">; 949def NoBMI2 : Predicate<"!Subtarget->hasBMI2()">; 950def HasVBMI : Predicate<"Subtarget->hasVBMI()">; 951def HasVBMI2 : Predicate<"Subtarget->hasVBMI2()">; 952def HasIFMA : Predicate<"Subtarget->hasIFMA()">; 953def HasRTM : Predicate<"Subtarget->hasRTM()">; 954def HasADX : Predicate<"Subtarget->hasADX()">; 955def HasSHA : Predicate<"Subtarget->hasSHA()">; 956def HasSGX : Predicate<"Subtarget->hasSGX()">; 957def HasRDSEED : Predicate<"Subtarget->hasRDSEED()">; 958def HasSSEPrefetch : Predicate<"Subtarget->hasSSEPrefetch()">; 959def NoSSEPrefetch : Predicate<"!Subtarget->hasSSEPrefetch()">; 960def HasPrefetchW : Predicate<"Subtarget->hasPrefetchW()">; 961def HasPREFETCHWT1 : Predicate<"Subtarget->hasPREFETCHWT1()">; 962def HasLAHFSAHF : Predicate<"Subtarget->hasLAHFSAHF()">; 963def HasMWAITX : Predicate<"Subtarget->hasMWAITX()">; 964def HasCLZERO : Predicate<"Subtarget->hasCLZERO()">; 965def HasCLDEMOTE : Predicate<"Subtarget->hasCLDEMOTE()">; 966def HasMOVDIRI : Predicate<"Subtarget->hasMOVDIRI()">; 967def HasMOVDIR64B : Predicate<"Subtarget->hasMOVDIR64B()">; 968def HasPTWRITE : Predicate<"Subtarget->hasPTWRITE()">; 969def FPStackf32 : Predicate<"!Subtarget->hasSSE1()">; 970def FPStackf64 : Predicate<"!Subtarget->hasSSE2()">; 971def HasSHSTK : Predicate<"Subtarget->hasSHSTK()">; 972def HasCLFLUSHOPT : Predicate<"Subtarget->hasCLFLUSHOPT()">; 973def HasCLWB : Predicate<"Subtarget->hasCLWB()">; 974def HasWBNOINVD : Predicate<"Subtarget->hasWBNOINVD()">; 975def HasRDPID : Predicate<"Subtarget->hasRDPID()">; 976def HasWAITPKG : Predicate<"Subtarget->hasWAITPKG()">; 977def HasINVPCID : Predicate<"Subtarget->hasINVPCID()">; 978def HasCmpxchg8b : Predicate<"Subtarget->hasCmpxchg8b()">; 979def HasCmpxchg16b: Predicate<"Subtarget->hasCmpxchg16b()">; 980def HasPCONFIG : Predicate<"Subtarget->hasPCONFIG()">; 981def HasENQCMD : Predicate<"Subtarget->hasENQCMD()">; 982def HasKL : Predicate<"Subtarget->hasKL()">; 983def HasWIDEKL : Predicate<"Subtarget->hasWIDEKL()">; 984def HasHRESET : Predicate<"Subtarget->hasHRESET()">; 985def HasSERIALIZE : Predicate<"Subtarget->hasSERIALIZE()">; 986def HasTSXLDTRK : Predicate<"Subtarget->hasTSXLDTRK()">; 987def HasAMXTILE : Predicate<"Subtarget->hasAMXTILE()">; 988def HasAMXBF16 : Predicate<"Subtarget->hasAMXBF16()">; 989def HasAMXINT8 : Predicate<"Subtarget->hasAMXINT8()">; 990def HasUINTR : Predicate<"Subtarget->hasUINTR()">; 991def Not64BitMode : Predicate<"!Subtarget->is64Bit()">, 992 AssemblerPredicate<(all_of (not Mode64Bit)), "Not 64-bit mode">; 993def In64BitMode : Predicate<"Subtarget->is64Bit()">, 994 AssemblerPredicate<(all_of Mode64Bit), "64-bit mode">; 995def IsLP64 : Predicate<"Subtarget->isTarget64BitLP64()">; 996def NotLP64 : Predicate<"!Subtarget->isTarget64BitLP64()">; 997def In16BitMode : Predicate<"Subtarget->is16Bit()">, 998 AssemblerPredicate<(all_of Mode16Bit), "16-bit mode">; 999def Not16BitMode : Predicate<"!Subtarget->is16Bit()">, 1000 AssemblerPredicate<(all_of (not Mode16Bit)), "Not 16-bit mode">; 1001def In32BitMode : Predicate<"Subtarget->is32Bit()">, 1002 AssemblerPredicate<(all_of Mode32Bit), "32-bit mode">; 1003def IsWin64 : Predicate<"Subtarget->isTargetWin64()">; 1004def NotWin64 : Predicate<"!Subtarget->isTargetWin64()">; 1005def NotWin64WithoutFP : Predicate<"!Subtarget->isTargetWin64() ||" 1006 "Subtarget->getFrameLowering()->hasFP(*MF)"> { 1007 let RecomputePerFunction = 1; 1008} 1009def IsPS4 : Predicate<"Subtarget->isTargetPS4()">; 1010def NotPS4 : Predicate<"!Subtarget->isTargetPS4()">; 1011def IsNaCl : Predicate<"Subtarget->isTargetNaCl()">; 1012def NotNaCl : Predicate<"!Subtarget->isTargetNaCl()">; 1013def SmallCode : Predicate<"TM.getCodeModel() == CodeModel::Small">; 1014def KernelCode : Predicate<"TM.getCodeModel() == CodeModel::Kernel">; 1015def NearData : Predicate<"TM.getCodeModel() == CodeModel::Small ||" 1016 "TM.getCodeModel() == CodeModel::Kernel">; 1017def IsNotPIC : Predicate<"!TM.isPositionIndependent()">; 1018 1019// We could compute these on a per-module basis but doing so requires accessing 1020// the Function object through the <Target>Subtarget and objections were raised 1021// to that (see post-commit review comments for r301750). 1022let RecomputePerFunction = 1 in { 1023 def OptForSize : Predicate<"shouldOptForSize(MF)">; 1024 def OptForMinSize : Predicate<"MF->getFunction().hasMinSize()">; 1025 def OptForSpeed : Predicate<"!shouldOptForSize(MF)">; 1026 def UseIncDec : Predicate<"!Subtarget->slowIncDec() || " 1027 "shouldOptForSize(MF)">; 1028 def NoSSE41_Or_OptForSize : Predicate<"shouldOptForSize(MF) || " 1029 "!Subtarget->hasSSE41()">; 1030} 1031 1032def CallImmAddr : Predicate<"Subtarget->isLegalToCallImmediateAddr()">; 1033def FavorMemIndirectCall : Predicate<"!Subtarget->slowTwoMemOps()">; 1034def HasFastMem32 : Predicate<"!Subtarget->isUnalignedMem32Slow()">; 1035def HasFastLZCNT : Predicate<"Subtarget->hasFastLZCNT()">; 1036def HasFastSHLDRotate : Predicate<"Subtarget->hasFastSHLDRotate()">; 1037def HasERMSB : Predicate<"Subtarget->hasERMSB()">; 1038def HasFSRM : Predicate<"Subtarget->hasFSRM()">; 1039def HasMFence : Predicate<"Subtarget->hasMFence()">; 1040def UseIndirectThunkCalls : Predicate<"Subtarget->useIndirectThunkCalls()">; 1041def NotUseIndirectThunkCalls : Predicate<"!Subtarget->useIndirectThunkCalls()">; 1042 1043//===----------------------------------------------------------------------===// 1044// X86 Instruction Format Definitions. 1045// 1046 1047include "X86InstrFormats.td" 1048 1049//===----------------------------------------------------------------------===// 1050// Pattern fragments. 1051// 1052 1053// X86 specific condition code. These correspond to CondCode in 1054// X86InstrInfo.h. They must be kept in synch. 1055def X86_COND_O : PatLeaf<(i8 0)>; 1056def X86_COND_NO : PatLeaf<(i8 1)>; 1057def X86_COND_B : PatLeaf<(i8 2)>; // alt. COND_C 1058def X86_COND_AE : PatLeaf<(i8 3)>; // alt. COND_NC 1059def X86_COND_E : PatLeaf<(i8 4)>; // alt. COND_Z 1060def X86_COND_NE : PatLeaf<(i8 5)>; // alt. COND_NZ 1061def X86_COND_BE : PatLeaf<(i8 6)>; // alt. COND_NA 1062def X86_COND_A : PatLeaf<(i8 7)>; // alt. COND_NBE 1063def X86_COND_S : PatLeaf<(i8 8)>; 1064def X86_COND_NS : PatLeaf<(i8 9)>; 1065def X86_COND_P : PatLeaf<(i8 10)>; // alt. COND_PE 1066def X86_COND_NP : PatLeaf<(i8 11)>; // alt. COND_PO 1067def X86_COND_L : PatLeaf<(i8 12)>; // alt. COND_NGE 1068def X86_COND_GE : PatLeaf<(i8 13)>; // alt. COND_NL 1069def X86_COND_LE : PatLeaf<(i8 14)>; // alt. COND_NG 1070def X86_COND_G : PatLeaf<(i8 15)>; // alt. COND_NLE 1071 1072def i16immSExt8 : ImmLeaf<i16, [{ return isInt<8>(Imm); }]>; 1073def i32immSExt8 : ImmLeaf<i32, [{ return isInt<8>(Imm); }]>; 1074def i64immSExt8 : ImmLeaf<i64, [{ return isInt<8>(Imm); }]>; 1075def i64immSExt32 : ImmLeaf<i64, [{ return isInt<32>(Imm); }]>; 1076def i64timmSExt32 : TImmLeaf<i64, [{ return isInt<32>(Imm); }]>; 1077 1078def i16relocImmSExt8 : PatLeaf<(i16 relocImm), [{ 1079 return isSExtAbsoluteSymbolRef(8, N); 1080}]>; 1081def i32relocImmSExt8 : PatLeaf<(i32 relocImm), [{ 1082 return isSExtAbsoluteSymbolRef(8, N); 1083}]>; 1084def i64relocImmSExt8 : PatLeaf<(i64 relocImm), [{ 1085 return isSExtAbsoluteSymbolRef(8, N); 1086}]>; 1087def i64relocImmSExt32 : PatLeaf<(i64 relocImm), [{ 1088 return isSExtAbsoluteSymbolRef(32, N); 1089}]>; 1090 1091// If we have multiple users of an immediate, it's much smaller to reuse 1092// the register, rather than encode the immediate in every instruction. 1093// This has the risk of increasing register pressure from stretched live 1094// ranges, however, the immediates should be trivial to rematerialize by 1095// the RA in the event of high register pressure. 1096// TODO : This is currently enabled for stores and binary ops. There are more 1097// cases for which this can be enabled, though this catches the bulk of the 1098// issues. 1099// TODO2 : This should really also be enabled under O2, but there's currently 1100// an issue with RA where we don't pull the constants into their users 1101// when we rematerialize them. I'll follow-up on enabling O2 after we fix that 1102// issue. 1103// TODO3 : This is currently limited to single basic blocks (DAG creation 1104// pulls block immediates to the top and merges them if necessary). 1105// Eventually, it would be nice to allow ConstantHoisting to merge constants 1106// globally for potentially added savings. 1107// 1108def imm_su : PatLeaf<(imm), [{ 1109 return !shouldAvoidImmediateInstFormsForSize(N); 1110}]>; 1111def i64immSExt32_su : PatLeaf<(i64immSExt32), [{ 1112 return !shouldAvoidImmediateInstFormsForSize(N); 1113}]>; 1114 1115def relocImm8_su : PatLeaf<(i8 relocImm), [{ 1116 return !shouldAvoidImmediateInstFormsForSize(N); 1117}]>; 1118def relocImm16_su : PatLeaf<(i16 relocImm), [{ 1119 return !shouldAvoidImmediateInstFormsForSize(N); 1120}]>; 1121def relocImm32_su : PatLeaf<(i32 relocImm), [{ 1122 return !shouldAvoidImmediateInstFormsForSize(N); 1123}]>; 1124 1125def i16relocImmSExt8_su : PatLeaf<(i16relocImmSExt8), [{ 1126 return !shouldAvoidImmediateInstFormsForSize(N); 1127}]>; 1128def i32relocImmSExt8_su : PatLeaf<(i32relocImmSExt8), [{ 1129 return !shouldAvoidImmediateInstFormsForSize(N); 1130}]>; 1131def i64relocImmSExt8_su : PatLeaf<(i64relocImmSExt8), [{ 1132 return !shouldAvoidImmediateInstFormsForSize(N); 1133}]>; 1134def i64relocImmSExt32_su : PatLeaf<(i64relocImmSExt32), [{ 1135 return !shouldAvoidImmediateInstFormsForSize(N); 1136}]>; 1137 1138def i16immSExt8_su : PatLeaf<(i16immSExt8), [{ 1139 return !shouldAvoidImmediateInstFormsForSize(N); 1140}]>; 1141def i32immSExt8_su : PatLeaf<(i32immSExt8), [{ 1142 return !shouldAvoidImmediateInstFormsForSize(N); 1143}]>; 1144def i64immSExt8_su : PatLeaf<(i64immSExt8), [{ 1145 return !shouldAvoidImmediateInstFormsForSize(N); 1146}]>; 1147 1148// i64immZExt32 predicate - True if the 64-bit immediate fits in a 32-bit 1149// unsigned field. 1150def i64immZExt32 : ImmLeaf<i64, [{ return isUInt<32>(Imm); }]>; 1151 1152def i64immZExt32SExt8 : ImmLeaf<i64, [{ 1153 return isUInt<32>(Imm) && isInt<8>(static_cast<int32_t>(Imm)); 1154}]>; 1155 1156// Helper fragments for loads. 1157 1158// It's safe to fold a zextload/extload from i1 as a regular i8 load. The 1159// upper bits are guaranteed to be zero and we were going to emit a MOV8rm 1160// which might get folded during peephole anyway. 1161def loadi8 : PatFrag<(ops node:$ptr), (i8 (unindexedload node:$ptr)), [{ 1162 LoadSDNode *LD = cast<LoadSDNode>(N); 1163 ISD::LoadExtType ExtType = LD->getExtensionType(); 1164 return ExtType == ISD::NON_EXTLOAD || ExtType == ISD::EXTLOAD || 1165 ExtType == ISD::ZEXTLOAD; 1166}]>; 1167 1168// It's always safe to treat a anyext i16 load as a i32 load if the i16 is 1169// known to be 32-bit aligned or better. Ditto for i8 to i16. 1170def loadi16 : PatFrag<(ops node:$ptr), (i16 (unindexedload node:$ptr)), [{ 1171 LoadSDNode *LD = cast<LoadSDNode>(N); 1172 ISD::LoadExtType ExtType = LD->getExtensionType(); 1173 if (ExtType == ISD::NON_EXTLOAD) 1174 return true; 1175 if (ExtType == ISD::EXTLOAD && EnablePromoteAnyextLoad) 1176 return LD->getAlignment() >= 2 && LD->isSimple(); 1177 return false; 1178}]>; 1179 1180def loadi32 : PatFrag<(ops node:$ptr), (i32 (unindexedload node:$ptr)), [{ 1181 LoadSDNode *LD = cast<LoadSDNode>(N); 1182 ISD::LoadExtType ExtType = LD->getExtensionType(); 1183 if (ExtType == ISD::NON_EXTLOAD) 1184 return true; 1185 if (ExtType == ISD::EXTLOAD && EnablePromoteAnyextLoad) 1186 return LD->getAlignment() >= 4 && LD->isSimple(); 1187 return false; 1188}]>; 1189 1190def loadi64 : PatFrag<(ops node:$ptr), (i64 (load node:$ptr))>; 1191def loadf32 : PatFrag<(ops node:$ptr), (f32 (load node:$ptr))>; 1192def loadf64 : PatFrag<(ops node:$ptr), (f64 (load node:$ptr))>; 1193def loadf80 : PatFrag<(ops node:$ptr), (f80 (load node:$ptr))>; 1194def loadf128 : PatFrag<(ops node:$ptr), (f128 (load node:$ptr))>; 1195def alignedloadf128 : PatFrag<(ops node:$ptr), (f128 (load node:$ptr)), [{ 1196 LoadSDNode *Ld = cast<LoadSDNode>(N); 1197 return Ld->getAlignment() >= Ld->getMemoryVT().getStoreSize(); 1198}]>; 1199def memopf128 : PatFrag<(ops node:$ptr), (f128 (load node:$ptr)), [{ 1200 LoadSDNode *Ld = cast<LoadSDNode>(N); 1201 return Subtarget->hasSSEUnalignedMem() || 1202 Ld->getAlignment() >= Ld->getMemoryVT().getStoreSize(); 1203}]>; 1204 1205def sextloadi16i8 : PatFrag<(ops node:$ptr), (i16 (sextloadi8 node:$ptr))>; 1206def sextloadi32i8 : PatFrag<(ops node:$ptr), (i32 (sextloadi8 node:$ptr))>; 1207def sextloadi32i16 : PatFrag<(ops node:$ptr), (i32 (sextloadi16 node:$ptr))>; 1208def sextloadi64i8 : PatFrag<(ops node:$ptr), (i64 (sextloadi8 node:$ptr))>; 1209def sextloadi64i16 : PatFrag<(ops node:$ptr), (i64 (sextloadi16 node:$ptr))>; 1210def sextloadi64i32 : PatFrag<(ops node:$ptr), (i64 (sextloadi32 node:$ptr))>; 1211 1212def zextloadi8i1 : PatFrag<(ops node:$ptr), (i8 (zextloadi1 node:$ptr))>; 1213def zextloadi16i1 : PatFrag<(ops node:$ptr), (i16 (zextloadi1 node:$ptr))>; 1214def zextloadi32i1 : PatFrag<(ops node:$ptr), (i32 (zextloadi1 node:$ptr))>; 1215def zextloadi16i8 : PatFrag<(ops node:$ptr), (i16 (zextloadi8 node:$ptr))>; 1216def zextloadi32i8 : PatFrag<(ops node:$ptr), (i32 (zextloadi8 node:$ptr))>; 1217def zextloadi32i16 : PatFrag<(ops node:$ptr), (i32 (zextloadi16 node:$ptr))>; 1218def zextloadi64i1 : PatFrag<(ops node:$ptr), (i64 (zextloadi1 node:$ptr))>; 1219def zextloadi64i8 : PatFrag<(ops node:$ptr), (i64 (zextloadi8 node:$ptr))>; 1220def zextloadi64i16 : PatFrag<(ops node:$ptr), (i64 (zextloadi16 node:$ptr))>; 1221def zextloadi64i32 : PatFrag<(ops node:$ptr), (i64 (zextloadi32 node:$ptr))>; 1222 1223def extloadi8i1 : PatFrag<(ops node:$ptr), (i8 (extloadi1 node:$ptr))>; 1224def extloadi16i1 : PatFrag<(ops node:$ptr), (i16 (extloadi1 node:$ptr))>; 1225def extloadi32i1 : PatFrag<(ops node:$ptr), (i32 (extloadi1 node:$ptr))>; 1226def extloadi16i8 : PatFrag<(ops node:$ptr), (i16 (extloadi8 node:$ptr))>; 1227def extloadi32i8 : PatFrag<(ops node:$ptr), (i32 (extloadi8 node:$ptr))>; 1228def extloadi32i16 : PatFrag<(ops node:$ptr), (i32 (extloadi16 node:$ptr))>; 1229def extloadi64i1 : PatFrag<(ops node:$ptr), (i64 (extloadi1 node:$ptr))>; 1230def extloadi64i8 : PatFrag<(ops node:$ptr), (i64 (extloadi8 node:$ptr))>; 1231def extloadi64i16 : PatFrag<(ops node:$ptr), (i64 (extloadi16 node:$ptr))>; 1232 1233// We can treat an i8/i16 extending load to i64 as a 32 bit load if its known 1234// to be 4 byte aligned or better. 1235def extloadi64i32 : PatFrag<(ops node:$ptr), (i64 (unindexedload node:$ptr)), [{ 1236 LoadSDNode *LD = cast<LoadSDNode>(N); 1237 ISD::LoadExtType ExtType = LD->getExtensionType(); 1238 if (ExtType != ISD::EXTLOAD) 1239 return false; 1240 if (LD->getMemoryVT() == MVT::i32) 1241 return true; 1242 1243 return LD->getAlignment() >= 4 && LD->isSimple(); 1244}]>; 1245 1246 1247// An 'and' node with a single use. 1248def and_su : PatFrag<(ops node:$lhs, node:$rhs), (and node:$lhs, node:$rhs), [{ 1249 return N->hasOneUse(); 1250}]>; 1251// An 'srl' node with a single use. 1252def srl_su : PatFrag<(ops node:$lhs, node:$rhs), (srl node:$lhs, node:$rhs), [{ 1253 return N->hasOneUse(); 1254}]>; 1255// An 'trunc' node with a single use. 1256def trunc_su : PatFrag<(ops node:$src), (trunc node:$src), [{ 1257 return N->hasOneUse(); 1258}]>; 1259 1260//===----------------------------------------------------------------------===// 1261// Instruction list. 1262// 1263 1264// Nop 1265let hasSideEffects = 0, SchedRW = [WriteNop] in { 1266 def NOOP : I<0x90, RawFrm, (outs), (ins), "nop", []>; 1267 def NOOPW : I<0x1f, MRMXm, (outs), (ins i16mem:$zero), 1268 "nop{w}\t$zero", []>, TB, OpSize16, NotMemoryFoldable; 1269 def NOOPL : I<0x1f, MRMXm, (outs), (ins i32mem:$zero), 1270 "nop{l}\t$zero", []>, TB, OpSize32, NotMemoryFoldable; 1271 def NOOPQ : RI<0x1f, MRMXm, (outs), (ins i64mem:$zero), 1272 "nop{q}\t$zero", []>, TB, NotMemoryFoldable, 1273 Requires<[In64BitMode]>; 1274 // Also allow register so we can assemble/disassemble 1275 def NOOPWr : I<0x1f, MRMXr, (outs), (ins GR16:$zero), 1276 "nop{w}\t$zero", []>, TB, OpSize16, NotMemoryFoldable; 1277 def NOOPLr : I<0x1f, MRMXr, (outs), (ins GR32:$zero), 1278 "nop{l}\t$zero", []>, TB, OpSize32, NotMemoryFoldable; 1279 def NOOPQr : RI<0x1f, MRMXr, (outs), (ins GR64:$zero), 1280 "nop{q}\t$zero", []>, TB, NotMemoryFoldable, 1281 Requires<[In64BitMode]>; 1282} 1283 1284 1285// Constructing a stack frame. 1286def ENTER : Ii16<0xC8, RawFrmImm8, (outs), (ins i16imm:$len, i8imm:$lvl), 1287 "enter\t$len, $lvl", []>, Sched<[WriteMicrocoded]>; 1288 1289let SchedRW = [WriteALU] in { 1290let Defs = [EBP, ESP], Uses = [EBP, ESP], mayLoad = 1, hasSideEffects=0 in 1291def LEAVE : I<0xC9, RawFrm, (outs), (ins), "leave", []>, 1292 Requires<[Not64BitMode]>; 1293 1294let Defs = [RBP,RSP], Uses = [RBP,RSP], mayLoad = 1, hasSideEffects = 0 in 1295def LEAVE64 : I<0xC9, RawFrm, (outs), (ins), "leave", []>, 1296 Requires<[In64BitMode]>; 1297} // SchedRW 1298 1299//===----------------------------------------------------------------------===// 1300// Miscellaneous Instructions. 1301// 1302 1303let isBarrier = 1, hasSideEffects = 1, usesCustomInserter = 1, 1304 SchedRW = [WriteSystem] in 1305 def Int_eh_sjlj_setup_dispatch 1306 : PseudoI<(outs), (ins), [(X86eh_sjlj_setup_dispatch)]>; 1307 1308let Defs = [ESP], Uses = [ESP], hasSideEffects=0 in { 1309let mayLoad = 1, SchedRW = [WriteLoad] in { 1310def POP16r : I<0x58, AddRegFrm, (outs GR16:$reg), (ins), "pop{w}\t$reg", []>, 1311 OpSize16; 1312def POP32r : I<0x58, AddRegFrm, (outs GR32:$reg), (ins), "pop{l}\t$reg", []>, 1313 OpSize32, Requires<[Not64BitMode]>; 1314// Long form for the disassembler. 1315let isCodeGenOnly = 1, ForceDisassemble = 1 in { 1316def POP16rmr: I<0x8F, MRM0r, (outs GR16:$reg), (ins), "pop{w}\t$reg", []>, 1317 OpSize16, NotMemoryFoldable; 1318def POP32rmr: I<0x8F, MRM0r, (outs GR32:$reg), (ins), "pop{l}\t$reg", []>, 1319 OpSize32, Requires<[Not64BitMode]>, NotMemoryFoldable; 1320} // isCodeGenOnly = 1, ForceDisassemble = 1 1321} // mayLoad, SchedRW 1322let mayStore = 1, mayLoad = 1, SchedRW = [WriteCopy] in { 1323def POP16rmm: I<0x8F, MRM0m, (outs), (ins i16mem:$dst), "pop{w}\t$dst", []>, 1324 OpSize16; 1325def POP32rmm: I<0x8F, MRM0m, (outs), (ins i32mem:$dst), "pop{l}\t$dst", []>, 1326 OpSize32, Requires<[Not64BitMode]>; 1327} // mayStore, mayLoad, SchedRW 1328 1329let mayStore = 1, SchedRW = [WriteStore] in { 1330def PUSH16r : I<0x50, AddRegFrm, (outs), (ins GR16:$reg), "push{w}\t$reg",[]>, 1331 OpSize16; 1332def PUSH32r : I<0x50, AddRegFrm, (outs), (ins GR32:$reg), "push{l}\t$reg",[]>, 1333 OpSize32, Requires<[Not64BitMode]>; 1334// Long form for the disassembler. 1335let isCodeGenOnly = 1, ForceDisassemble = 1 in { 1336def PUSH16rmr: I<0xFF, MRM6r, (outs), (ins GR16:$reg), "push{w}\t$reg",[]>, 1337 OpSize16, NotMemoryFoldable; 1338def PUSH32rmr: I<0xFF, MRM6r, (outs), (ins GR32:$reg), "push{l}\t$reg",[]>, 1339 OpSize32, Requires<[Not64BitMode]>, NotMemoryFoldable; 1340} // isCodeGenOnly = 1, ForceDisassemble = 1 1341 1342def PUSH16i8 : Ii8<0x6a, RawFrm, (outs), (ins i16i8imm:$imm), 1343 "push{w}\t$imm", []>, OpSize16; 1344def PUSHi16 : Ii16<0x68, RawFrm, (outs), (ins i16imm:$imm), 1345 "push{w}\t$imm", []>, OpSize16; 1346 1347def PUSH32i8 : Ii8<0x6a, RawFrm, (outs), (ins i32i8imm:$imm), 1348 "push{l}\t$imm", []>, OpSize32, 1349 Requires<[Not64BitMode]>; 1350def PUSHi32 : Ii32<0x68, RawFrm, (outs), (ins i32imm:$imm), 1351 "push{l}\t$imm", []>, OpSize32, 1352 Requires<[Not64BitMode]>; 1353} // mayStore, SchedRW 1354 1355let mayLoad = 1, mayStore = 1, SchedRW = [WriteCopy] in { 1356def PUSH16rmm: I<0xFF, MRM6m, (outs), (ins i16mem:$src), "push{w}\t$src", []>, 1357 OpSize16; 1358def PUSH32rmm: I<0xFF, MRM6m, (outs), (ins i32mem:$src), "push{l}\t$src", []>, 1359 OpSize32, Requires<[Not64BitMode]>; 1360} // mayLoad, mayStore, SchedRW 1361 1362} 1363 1364let mayLoad = 1, mayStore = 1, usesCustomInserter = 1, 1365 SchedRW = [WriteRMW], Defs = [ESP] in { 1366 let Uses = [ESP] in 1367 def RDFLAGS32 : PseudoI<(outs GR32:$dst), (ins), 1368 [(set GR32:$dst, (int_x86_flags_read_u32))]>, 1369 Requires<[Not64BitMode]>; 1370 1371 let Uses = [RSP] in 1372 def RDFLAGS64 : PseudoI<(outs GR64:$dst), (ins), 1373 [(set GR64:$dst, (int_x86_flags_read_u64))]>, 1374 Requires<[In64BitMode]>; 1375} 1376 1377let mayLoad = 1, mayStore = 1, usesCustomInserter = 1, 1378 SchedRW = [WriteRMW] in { 1379 let Defs = [ESP, EFLAGS, DF], Uses = [ESP] in 1380 def WRFLAGS32 : PseudoI<(outs), (ins GR32:$src), 1381 [(int_x86_flags_write_u32 GR32:$src)]>, 1382 Requires<[Not64BitMode]>; 1383 1384 let Defs = [RSP, EFLAGS, DF], Uses = [RSP] in 1385 def WRFLAGS64 : PseudoI<(outs), (ins GR64:$src), 1386 [(int_x86_flags_write_u64 GR64:$src)]>, 1387 Requires<[In64BitMode]>; 1388} 1389 1390let Defs = [ESP, EFLAGS, DF], Uses = [ESP], mayLoad = 1, hasSideEffects=0, 1391 SchedRW = [WriteLoad] in { 1392def POPF16 : I<0x9D, RawFrm, (outs), (ins), "popf{w}", []>, OpSize16; 1393def POPF32 : I<0x9D, RawFrm, (outs), (ins), "popf{l|d}", []>, OpSize32, 1394 Requires<[Not64BitMode]>; 1395} 1396 1397let Defs = [ESP], Uses = [ESP, EFLAGS, DF], mayStore = 1, hasSideEffects=0, 1398 SchedRW = [WriteStore] in { 1399def PUSHF16 : I<0x9C, RawFrm, (outs), (ins), "pushf{w}", []>, OpSize16; 1400def PUSHF32 : I<0x9C, RawFrm, (outs), (ins), "pushf{l|d}", []>, OpSize32, 1401 Requires<[Not64BitMode]>; 1402} 1403 1404let Defs = [RSP], Uses = [RSP], hasSideEffects=0 in { 1405let mayLoad = 1, SchedRW = [WriteLoad] in { 1406def POP64r : I<0x58, AddRegFrm, (outs GR64:$reg), (ins), "pop{q}\t$reg", []>, 1407 OpSize32, Requires<[In64BitMode]>; 1408// Long form for the disassembler. 1409let isCodeGenOnly = 1, ForceDisassemble = 1 in { 1410def POP64rmr: I<0x8F, MRM0r, (outs GR64:$reg), (ins), "pop{q}\t$reg", []>, 1411 OpSize32, Requires<[In64BitMode]>, NotMemoryFoldable; 1412} // isCodeGenOnly = 1, ForceDisassemble = 1 1413} // mayLoad, SchedRW 1414let mayLoad = 1, mayStore = 1, SchedRW = [WriteCopy] in 1415def POP64rmm: I<0x8F, MRM0m, (outs), (ins i64mem:$dst), "pop{q}\t$dst", []>, 1416 OpSize32, Requires<[In64BitMode]>; 1417let mayStore = 1, SchedRW = [WriteStore] in { 1418def PUSH64r : I<0x50, AddRegFrm, (outs), (ins GR64:$reg), "push{q}\t$reg", []>, 1419 OpSize32, Requires<[In64BitMode]>; 1420// Long form for the disassembler. 1421let isCodeGenOnly = 1, ForceDisassemble = 1 in { 1422def PUSH64rmr: I<0xFF, MRM6r, (outs), (ins GR64:$reg), "push{q}\t$reg", []>, 1423 OpSize32, Requires<[In64BitMode]>, NotMemoryFoldable; 1424} // isCodeGenOnly = 1, ForceDisassemble = 1 1425} // mayStore, SchedRW 1426let mayLoad = 1, mayStore = 1, SchedRW = [WriteCopy] in { 1427def PUSH64rmm: I<0xFF, MRM6m, (outs), (ins i64mem:$src), "push{q}\t$src", []>, 1428 OpSize32, Requires<[In64BitMode]>; 1429} // mayLoad, mayStore, SchedRW 1430} 1431 1432let Defs = [RSP], Uses = [RSP], hasSideEffects = 0, mayStore = 1, 1433 SchedRW = [WriteStore] in { 1434def PUSH64i8 : Ii8<0x6a, RawFrm, (outs), (ins i64i8imm:$imm), 1435 "push{q}\t$imm", []>, OpSize32, 1436 Requires<[In64BitMode]>; 1437def PUSH64i32 : Ii32S<0x68, RawFrm, (outs), (ins i64i32imm:$imm), 1438 "push{q}\t$imm", []>, OpSize32, 1439 Requires<[In64BitMode]>; 1440} 1441 1442let Defs = [RSP, EFLAGS, DF], Uses = [RSP], mayLoad = 1, hasSideEffects=0 in 1443def POPF64 : I<0x9D, RawFrm, (outs), (ins), "popfq", []>, 1444 OpSize32, Requires<[In64BitMode]>, Sched<[WriteLoad]>; 1445let Defs = [RSP], Uses = [RSP, EFLAGS, DF], mayStore = 1, hasSideEffects=0 in 1446def PUSHF64 : I<0x9C, RawFrm, (outs), (ins), "pushfq", []>, 1447 OpSize32, Requires<[In64BitMode]>, Sched<[WriteStore]>; 1448 1449let Defs = [EDI, ESI, EBP, EBX, EDX, ECX, EAX, ESP], Uses = [ESP], 1450 mayLoad = 1, hasSideEffects = 0, SchedRW = [WriteLoad] in { 1451def POPA32 : I<0x61, RawFrm, (outs), (ins), "popal", []>, 1452 OpSize32, Requires<[Not64BitMode]>; 1453def POPA16 : I<0x61, RawFrm, (outs), (ins), "popaw", []>, 1454 OpSize16, Requires<[Not64BitMode]>; 1455} 1456let Defs = [ESP], Uses = [EDI, ESI, EBP, EBX, EDX, ECX, EAX, ESP], 1457 mayStore = 1, hasSideEffects = 0, SchedRW = [WriteStore] in { 1458def PUSHA32 : I<0x60, RawFrm, (outs), (ins), "pushal", []>, 1459 OpSize32, Requires<[Not64BitMode]>; 1460def PUSHA16 : I<0x60, RawFrm, (outs), (ins), "pushaw", []>, 1461 OpSize16, Requires<[Not64BitMode]>; 1462} 1463 1464let Constraints = "$src = $dst", SchedRW = [WriteBSWAP32] in { 1465// This instruction is a consequence of BSWAP32r observing operand size. The 1466// encoding is valid, but the behavior is undefined. 1467let isCodeGenOnly = 1, ForceDisassemble = 1, hasSideEffects = 0 in 1468def BSWAP16r_BAD : I<0xC8, AddRegFrm, (outs GR16:$dst), (ins GR16:$src), 1469 "bswap{w}\t$dst", []>, OpSize16, TB; 1470// GR32 = bswap GR32 1471def BSWAP32r : I<0xC8, AddRegFrm, (outs GR32:$dst), (ins GR32:$src), 1472 "bswap{l}\t$dst", 1473 [(set GR32:$dst, (bswap GR32:$src))]>, OpSize32, TB; 1474 1475let SchedRW = [WriteBSWAP64] in 1476def BSWAP64r : RI<0xC8, AddRegFrm, (outs GR64:$dst), (ins GR64:$src), 1477 "bswap{q}\t$dst", 1478 [(set GR64:$dst, (bswap GR64:$src))]>, TB; 1479} // Constraints = "$src = $dst", SchedRW 1480 1481// Bit scan instructions. 1482let Defs = [EFLAGS] in { 1483def BSF16rr : I<0xBC, MRMSrcReg, (outs GR16:$dst), (ins GR16:$src), 1484 "bsf{w}\t{$src, $dst|$dst, $src}", 1485 [(set GR16:$dst, EFLAGS, (X86bsf GR16:$src))]>, 1486 PS, OpSize16, Sched<[WriteBSF]>; 1487def BSF16rm : I<0xBC, MRMSrcMem, (outs GR16:$dst), (ins i16mem:$src), 1488 "bsf{w}\t{$src, $dst|$dst, $src}", 1489 [(set GR16:$dst, EFLAGS, (X86bsf (loadi16 addr:$src)))]>, 1490 PS, OpSize16, Sched<[WriteBSFLd]>; 1491def BSF32rr : I<0xBC, MRMSrcReg, (outs GR32:$dst), (ins GR32:$src), 1492 "bsf{l}\t{$src, $dst|$dst, $src}", 1493 [(set GR32:$dst, EFLAGS, (X86bsf GR32:$src))]>, 1494 PS, OpSize32, Sched<[WriteBSF]>; 1495def BSF32rm : I<0xBC, MRMSrcMem, (outs GR32:$dst), (ins i32mem:$src), 1496 "bsf{l}\t{$src, $dst|$dst, $src}", 1497 [(set GR32:$dst, EFLAGS, (X86bsf (loadi32 addr:$src)))]>, 1498 PS, OpSize32, Sched<[WriteBSFLd]>; 1499def BSF64rr : RI<0xBC, MRMSrcReg, (outs GR64:$dst), (ins GR64:$src), 1500 "bsf{q}\t{$src, $dst|$dst, $src}", 1501 [(set GR64:$dst, EFLAGS, (X86bsf GR64:$src))]>, 1502 PS, Sched<[WriteBSF]>; 1503def BSF64rm : RI<0xBC, MRMSrcMem, (outs GR64:$dst), (ins i64mem:$src), 1504 "bsf{q}\t{$src, $dst|$dst, $src}", 1505 [(set GR64:$dst, EFLAGS, (X86bsf (loadi64 addr:$src)))]>, 1506 PS, Sched<[WriteBSFLd]>; 1507 1508def BSR16rr : I<0xBD, MRMSrcReg, (outs GR16:$dst), (ins GR16:$src), 1509 "bsr{w}\t{$src, $dst|$dst, $src}", 1510 [(set GR16:$dst, EFLAGS, (X86bsr GR16:$src))]>, 1511 PS, OpSize16, Sched<[WriteBSR]>; 1512def BSR16rm : I<0xBD, MRMSrcMem, (outs GR16:$dst), (ins i16mem:$src), 1513 "bsr{w}\t{$src, $dst|$dst, $src}", 1514 [(set GR16:$dst, EFLAGS, (X86bsr (loadi16 addr:$src)))]>, 1515 PS, OpSize16, Sched<[WriteBSRLd]>; 1516def BSR32rr : I<0xBD, MRMSrcReg, (outs GR32:$dst), (ins GR32:$src), 1517 "bsr{l}\t{$src, $dst|$dst, $src}", 1518 [(set GR32:$dst, EFLAGS, (X86bsr GR32:$src))]>, 1519 PS, OpSize32, Sched<[WriteBSR]>; 1520def BSR32rm : I<0xBD, MRMSrcMem, (outs GR32:$dst), (ins i32mem:$src), 1521 "bsr{l}\t{$src, $dst|$dst, $src}", 1522 [(set GR32:$dst, EFLAGS, (X86bsr (loadi32 addr:$src)))]>, 1523 PS, OpSize32, Sched<[WriteBSRLd]>; 1524def BSR64rr : RI<0xBD, MRMSrcReg, (outs GR64:$dst), (ins GR64:$src), 1525 "bsr{q}\t{$src, $dst|$dst, $src}", 1526 [(set GR64:$dst, EFLAGS, (X86bsr GR64:$src))]>, 1527 PS, Sched<[WriteBSR]>; 1528def BSR64rm : RI<0xBD, MRMSrcMem, (outs GR64:$dst), (ins i64mem:$src), 1529 "bsr{q}\t{$src, $dst|$dst, $src}", 1530 [(set GR64:$dst, EFLAGS, (X86bsr (loadi64 addr:$src)))]>, 1531 PS, Sched<[WriteBSRLd]>; 1532} // Defs = [EFLAGS] 1533 1534let SchedRW = [WriteMicrocoded] in { 1535let Defs = [EDI,ESI], Uses = [EDI,ESI,DF] in { 1536def MOVSB : I<0xA4, RawFrmDstSrc, (outs), (ins dstidx8:$dst, srcidx8:$src), 1537 "movsb\t{$src, $dst|$dst, $src}", []>; 1538def MOVSW : I<0xA5, RawFrmDstSrc, (outs), (ins dstidx16:$dst, srcidx16:$src), 1539 "movsw\t{$src, $dst|$dst, $src}", []>, OpSize16; 1540def MOVSL : I<0xA5, RawFrmDstSrc, (outs), (ins dstidx32:$dst, srcidx32:$src), 1541 "movs{l|d}\t{$src, $dst|$dst, $src}", []>, OpSize32; 1542def MOVSQ : RI<0xA5, RawFrmDstSrc, (outs), (ins dstidx64:$dst, srcidx64:$src), 1543 "movsq\t{$src, $dst|$dst, $src}", []>, 1544 Requires<[In64BitMode]>; 1545} 1546 1547let Defs = [EDI], Uses = [AL,EDI,DF] in 1548def STOSB : I<0xAA, RawFrmDst, (outs), (ins dstidx8:$dst), 1549 "stosb\t{%al, $dst|$dst, al}", []>; 1550let Defs = [EDI], Uses = [AX,EDI,DF] in 1551def STOSW : I<0xAB, RawFrmDst, (outs), (ins dstidx16:$dst), 1552 "stosw\t{%ax, $dst|$dst, ax}", []>, OpSize16; 1553let Defs = [EDI], Uses = [EAX,EDI,DF] in 1554def STOSL : I<0xAB, RawFrmDst, (outs), (ins dstidx32:$dst), 1555 "stos{l|d}\t{%eax, $dst|$dst, eax}", []>, OpSize32; 1556let Defs = [RDI], Uses = [RAX,RDI,DF] in 1557def STOSQ : RI<0xAB, RawFrmDst, (outs), (ins dstidx64:$dst), 1558 "stosq\t{%rax, $dst|$dst, rax}", []>, 1559 Requires<[In64BitMode]>; 1560 1561let Defs = [EDI,EFLAGS], Uses = [AL,EDI,DF] in 1562def SCASB : I<0xAE, RawFrmDst, (outs), (ins dstidx8:$dst), 1563 "scasb\t{$dst, %al|al, $dst}", []>; 1564let Defs = [EDI,EFLAGS], Uses = [AX,EDI,DF] in 1565def SCASW : I<0xAF, RawFrmDst, (outs), (ins dstidx16:$dst), 1566 "scasw\t{$dst, %ax|ax, $dst}", []>, OpSize16; 1567let Defs = [EDI,EFLAGS], Uses = [EAX,EDI,DF] in 1568def SCASL : I<0xAF, RawFrmDst, (outs), (ins dstidx32:$dst), 1569 "scas{l|d}\t{$dst, %eax|eax, $dst}", []>, OpSize32; 1570let Defs = [EDI,EFLAGS], Uses = [RAX,EDI,DF] in 1571def SCASQ : RI<0xAF, RawFrmDst, (outs), (ins dstidx64:$dst), 1572 "scasq\t{$dst, %rax|rax, $dst}", []>, 1573 Requires<[In64BitMode]>; 1574 1575let Defs = [EDI,ESI,EFLAGS], Uses = [EDI,ESI,DF] in { 1576def CMPSB : I<0xA6, RawFrmDstSrc, (outs), (ins dstidx8:$dst, srcidx8:$src), 1577 "cmpsb\t{$dst, $src|$src, $dst}", []>; 1578def CMPSW : I<0xA7, RawFrmDstSrc, (outs), (ins dstidx16:$dst, srcidx16:$src), 1579 "cmpsw\t{$dst, $src|$src, $dst}", []>, OpSize16; 1580def CMPSL : I<0xA7, RawFrmDstSrc, (outs), (ins dstidx32:$dst, srcidx32:$src), 1581 "cmps{l|d}\t{$dst, $src|$src, $dst}", []>, OpSize32; 1582def CMPSQ : RI<0xA7, RawFrmDstSrc, (outs), (ins dstidx64:$dst, srcidx64:$src), 1583 "cmpsq\t{$dst, $src|$src, $dst}", []>, 1584 Requires<[In64BitMode]>; 1585} 1586} // SchedRW 1587 1588//===----------------------------------------------------------------------===// 1589// Move Instructions. 1590// 1591let SchedRW = [WriteMove] in { 1592let hasSideEffects = 0, isMoveReg = 1 in { 1593def MOV8rr : I<0x88, MRMDestReg, (outs GR8 :$dst), (ins GR8 :$src), 1594 "mov{b}\t{$src, $dst|$dst, $src}", []>; 1595def MOV16rr : I<0x89, MRMDestReg, (outs GR16:$dst), (ins GR16:$src), 1596 "mov{w}\t{$src, $dst|$dst, $src}", []>, OpSize16; 1597def MOV32rr : I<0x89, MRMDestReg, (outs GR32:$dst), (ins GR32:$src), 1598 "mov{l}\t{$src, $dst|$dst, $src}", []>, OpSize32; 1599def MOV64rr : RI<0x89, MRMDestReg, (outs GR64:$dst), (ins GR64:$src), 1600 "mov{q}\t{$src, $dst|$dst, $src}", []>; 1601} 1602 1603let isReMaterializable = 1, isAsCheapAsAMove = 1, isMoveImm = 1 in { 1604def MOV8ri : Ii8 <0xB0, AddRegFrm, (outs GR8 :$dst), (ins i8imm :$src), 1605 "mov{b}\t{$src, $dst|$dst, $src}", 1606 [(set GR8:$dst, imm:$src)]>; 1607def MOV16ri : Ii16<0xB8, AddRegFrm, (outs GR16:$dst), (ins i16imm:$src), 1608 "mov{w}\t{$src, $dst|$dst, $src}", 1609 [(set GR16:$dst, imm:$src)]>, OpSize16; 1610def MOV32ri : Ii32<0xB8, AddRegFrm, (outs GR32:$dst), (ins i32imm:$src), 1611 "mov{l}\t{$src, $dst|$dst, $src}", 1612 [(set GR32:$dst, imm:$src)]>, OpSize32; 1613def MOV64ri32 : RIi32S<0xC7, MRM0r, (outs GR64:$dst), (ins i64i32imm:$src), 1614 "mov{q}\t{$src, $dst|$dst, $src}", 1615 [(set GR64:$dst, i64immSExt32:$src)]>; 1616} 1617let isReMaterializable = 1, isMoveImm = 1 in { 1618def MOV64ri : RIi64<0xB8, AddRegFrm, (outs GR64:$dst), (ins i64imm:$src), 1619 "movabs{q}\t{$src, $dst|$dst, $src}", 1620 [(set GR64:$dst, imm:$src)]>; 1621} 1622 1623// Longer forms that use a ModR/M byte. Needed for disassembler 1624let isCodeGenOnly = 1, ForceDisassemble = 1, hasSideEffects = 0 in { 1625def MOV8ri_alt : Ii8 <0xC6, MRM0r, (outs GR8 :$dst), (ins i8imm :$src), 1626 "mov{b}\t{$src, $dst|$dst, $src}", []>, 1627 FoldGenData<"MOV8ri">; 1628def MOV16ri_alt : Ii16<0xC7, MRM0r, (outs GR16:$dst), (ins i16imm:$src), 1629 "mov{w}\t{$src, $dst|$dst, $src}", []>, OpSize16, 1630 FoldGenData<"MOV16ri">; 1631def MOV32ri_alt : Ii32<0xC7, MRM0r, (outs GR32:$dst), (ins i32imm:$src), 1632 "mov{l}\t{$src, $dst|$dst, $src}", []>, OpSize32, 1633 FoldGenData<"MOV32ri">; 1634} 1635} // SchedRW 1636 1637let SchedRW = [WriteStore] in { 1638def MOV8mi : Ii8 <0xC6, MRM0m, (outs), (ins i8mem :$dst, i8imm :$src), 1639 "mov{b}\t{$src, $dst|$dst, $src}", 1640 [(store (i8 imm_su:$src), addr:$dst)]>; 1641def MOV16mi : Ii16<0xC7, MRM0m, (outs), (ins i16mem:$dst, i16imm:$src), 1642 "mov{w}\t{$src, $dst|$dst, $src}", 1643 [(store (i16 imm_su:$src), addr:$dst)]>, OpSize16; 1644def MOV32mi : Ii32<0xC7, MRM0m, (outs), (ins i32mem:$dst, i32imm:$src), 1645 "mov{l}\t{$src, $dst|$dst, $src}", 1646 [(store (i32 imm_su:$src), addr:$dst)]>, OpSize32; 1647def MOV64mi32 : RIi32S<0xC7, MRM0m, (outs), (ins i64mem:$dst, i64i32imm:$src), 1648 "mov{q}\t{$src, $dst|$dst, $src}", 1649 [(store i64immSExt32_su:$src, addr:$dst)]>, 1650 Requires<[In64BitMode]>; 1651} // SchedRW 1652 1653def : Pat<(i32 relocImm:$src), (MOV32ri relocImm:$src)>; 1654def : Pat<(i64 relocImm:$src), (MOV64ri relocImm:$src)>; 1655 1656def : Pat<(store (i8 relocImm8_su:$src), addr:$dst), 1657 (MOV8mi addr:$dst, relocImm8_su:$src)>; 1658def : Pat<(store (i16 relocImm16_su:$src), addr:$dst), 1659 (MOV16mi addr:$dst, relocImm16_su:$src)>; 1660def : Pat<(store (i32 relocImm32_su:$src), addr:$dst), 1661 (MOV32mi addr:$dst, relocImm32_su:$src)>; 1662def : Pat<(store (i64 i64relocImmSExt32_su:$src), addr:$dst), 1663 (MOV64mi32 addr:$dst, i64immSExt32_su:$src)>; 1664 1665let hasSideEffects = 0 in { 1666 1667/// Memory offset versions of moves. The immediate is an address mode sized 1668/// offset from the segment base. 1669let SchedRW = [WriteALU] in { 1670let mayLoad = 1 in { 1671let Defs = [AL] in 1672def MOV8ao32 : Ii32<0xA0, RawFrmMemOffs, (outs), (ins offset32_8:$src), 1673 "mov{b}\t{$src, %al|al, $src}", []>, 1674 AdSize32; 1675let Defs = [AX] in 1676def MOV16ao32 : Ii32<0xA1, RawFrmMemOffs, (outs), (ins offset32_16:$src), 1677 "mov{w}\t{$src, %ax|ax, $src}", []>, 1678 OpSize16, AdSize32; 1679let Defs = [EAX] in 1680def MOV32ao32 : Ii32<0xA1, RawFrmMemOffs, (outs), (ins offset32_32:$src), 1681 "mov{l}\t{$src, %eax|eax, $src}", []>, 1682 OpSize32, AdSize32; 1683let Defs = [RAX] in 1684def MOV64ao32 : RIi32<0xA1, RawFrmMemOffs, (outs), (ins offset32_64:$src), 1685 "mov{q}\t{$src, %rax|rax, $src}", []>, 1686 AdSize32; 1687 1688let Defs = [AL] in 1689def MOV8ao16 : Ii16<0xA0, RawFrmMemOffs, (outs), (ins offset16_8:$src), 1690 "mov{b}\t{$src, %al|al, $src}", []>, AdSize16; 1691let Defs = [AX] in 1692def MOV16ao16 : Ii16<0xA1, RawFrmMemOffs, (outs), (ins offset16_16:$src), 1693 "mov{w}\t{$src, %ax|ax, $src}", []>, 1694 OpSize16, AdSize16; 1695let Defs = [EAX] in 1696def MOV32ao16 : Ii16<0xA1, RawFrmMemOffs, (outs), (ins offset16_32:$src), 1697 "mov{l}\t{$src, %eax|eax, $src}", []>, 1698 AdSize16, OpSize32; 1699} // mayLoad 1700let mayStore = 1 in { 1701let Uses = [AL] in 1702def MOV8o32a : Ii32<0xA2, RawFrmMemOffs, (outs), (ins offset32_8:$dst), 1703 "mov{b}\t{%al, $dst|$dst, al}", []>, AdSize32; 1704let Uses = [AX] in 1705def MOV16o32a : Ii32<0xA3, RawFrmMemOffs, (outs), (ins offset32_16:$dst), 1706 "mov{w}\t{%ax, $dst|$dst, ax}", []>, 1707 OpSize16, AdSize32; 1708let Uses = [EAX] in 1709def MOV32o32a : Ii32<0xA3, RawFrmMemOffs, (outs), (ins offset32_32:$dst), 1710 "mov{l}\t{%eax, $dst|$dst, eax}", []>, 1711 OpSize32, AdSize32; 1712let Uses = [RAX] in 1713def MOV64o32a : RIi32<0xA3, RawFrmMemOffs, (outs), (ins offset32_64:$dst), 1714 "mov{q}\t{%rax, $dst|$dst, rax}", []>, 1715 AdSize32; 1716 1717let Uses = [AL] in 1718def MOV8o16a : Ii16<0xA2, RawFrmMemOffs, (outs), (ins offset16_8:$dst), 1719 "mov{b}\t{%al, $dst|$dst, al}", []>, AdSize16; 1720let Uses = [AX] in 1721def MOV16o16a : Ii16<0xA3, RawFrmMemOffs, (outs), (ins offset16_16:$dst), 1722 "mov{w}\t{%ax, $dst|$dst, ax}", []>, 1723 OpSize16, AdSize16; 1724let Uses = [EAX] in 1725def MOV32o16a : Ii16<0xA3, RawFrmMemOffs, (outs), (ins offset16_32:$dst), 1726 "mov{l}\t{%eax, $dst|$dst, eax}", []>, 1727 OpSize32, AdSize16; 1728} // mayStore 1729 1730// These forms all have full 64-bit absolute addresses in their instructions 1731// and use the movabs mnemonic to indicate this specific form. 1732let mayLoad = 1 in { 1733let Defs = [AL] in 1734def MOV8ao64 : Ii64<0xA0, RawFrmMemOffs, (outs), (ins offset64_8:$src), 1735 "movabs{b}\t{$src, %al|al, $src}", []>, 1736 AdSize64; 1737let Defs = [AX] in 1738def MOV16ao64 : Ii64<0xA1, RawFrmMemOffs, (outs), (ins offset64_16:$src), 1739 "movabs{w}\t{$src, %ax|ax, $src}", []>, 1740 OpSize16, AdSize64; 1741let Defs = [EAX] in 1742def MOV32ao64 : Ii64<0xA1, RawFrmMemOffs, (outs), (ins offset64_32:$src), 1743 "movabs{l}\t{$src, %eax|eax, $src}", []>, 1744 OpSize32, AdSize64; 1745let Defs = [RAX] in 1746def MOV64ao64 : RIi64<0xA1, RawFrmMemOffs, (outs), (ins offset64_64:$src), 1747 "movabs{q}\t{$src, %rax|rax, $src}", []>, 1748 AdSize64; 1749} // mayLoad 1750 1751let mayStore = 1 in { 1752let Uses = [AL] in 1753def MOV8o64a : Ii64<0xA2, RawFrmMemOffs, (outs), (ins offset64_8:$dst), 1754 "movabs{b}\t{%al, $dst|$dst, al}", []>, 1755 AdSize64; 1756let Uses = [AX] in 1757def MOV16o64a : Ii64<0xA3, RawFrmMemOffs, (outs), (ins offset64_16:$dst), 1758 "movabs{w}\t{%ax, $dst|$dst, ax}", []>, 1759 OpSize16, AdSize64; 1760let Uses = [EAX] in 1761def MOV32o64a : Ii64<0xA3, RawFrmMemOffs, (outs), (ins offset64_32:$dst), 1762 "movabs{l}\t{%eax, $dst|$dst, eax}", []>, 1763 OpSize32, AdSize64; 1764let Uses = [RAX] in 1765def MOV64o64a : RIi64<0xA3, RawFrmMemOffs, (outs), (ins offset64_64:$dst), 1766 "movabs{q}\t{%rax, $dst|$dst, rax}", []>, 1767 AdSize64; 1768} // mayStore 1769} // SchedRW 1770} // hasSideEffects = 0 1771 1772let isCodeGenOnly = 1, ForceDisassemble = 1, hasSideEffects = 0, 1773 SchedRW = [WriteMove], isMoveReg = 1 in { 1774def MOV8rr_REV : I<0x8A, MRMSrcReg, (outs GR8:$dst), (ins GR8:$src), 1775 "mov{b}\t{$src, $dst|$dst, $src}", []>, 1776 FoldGenData<"MOV8rr">; 1777def MOV16rr_REV : I<0x8B, MRMSrcReg, (outs GR16:$dst), (ins GR16:$src), 1778 "mov{w}\t{$src, $dst|$dst, $src}", []>, OpSize16, 1779 FoldGenData<"MOV16rr">; 1780def MOV32rr_REV : I<0x8B, MRMSrcReg, (outs GR32:$dst), (ins GR32:$src), 1781 "mov{l}\t{$src, $dst|$dst, $src}", []>, OpSize32, 1782 FoldGenData<"MOV32rr">; 1783def MOV64rr_REV : RI<0x8B, MRMSrcReg, (outs GR64:$dst), (ins GR64:$src), 1784 "mov{q}\t{$src, $dst|$dst, $src}", []>, 1785 FoldGenData<"MOV64rr">; 1786} 1787 1788// Reversed version with ".s" suffix for GAS compatibility. 1789def : InstAlias<"mov{b}.s\t{$src, $dst|$dst, $src}", 1790 (MOV8rr_REV GR8:$dst, GR8:$src), 0>; 1791def : InstAlias<"mov{w}.s\t{$src, $dst|$dst, $src}", 1792 (MOV16rr_REV GR16:$dst, GR16:$src), 0>; 1793def : InstAlias<"mov{l}.s\t{$src, $dst|$dst, $src}", 1794 (MOV32rr_REV GR32:$dst, GR32:$src), 0>; 1795def : InstAlias<"mov{q}.s\t{$src, $dst|$dst, $src}", 1796 (MOV64rr_REV GR64:$dst, GR64:$src), 0>; 1797def : InstAlias<"mov.s\t{$src, $dst|$dst, $src}", 1798 (MOV8rr_REV GR8:$dst, GR8:$src), 0, "att">; 1799def : InstAlias<"mov.s\t{$src, $dst|$dst, $src}", 1800 (MOV16rr_REV GR16:$dst, GR16:$src), 0, "att">; 1801def : InstAlias<"mov.s\t{$src, $dst|$dst, $src}", 1802 (MOV32rr_REV GR32:$dst, GR32:$src), 0, "att">; 1803def : InstAlias<"mov.s\t{$src, $dst|$dst, $src}", 1804 (MOV64rr_REV GR64:$dst, GR64:$src), 0, "att">; 1805 1806let canFoldAsLoad = 1, isReMaterializable = 1, SchedRW = [WriteLoad] in { 1807def MOV8rm : I<0x8A, MRMSrcMem, (outs GR8 :$dst), (ins i8mem :$src), 1808 "mov{b}\t{$src, $dst|$dst, $src}", 1809 [(set GR8:$dst, (loadi8 addr:$src))]>; 1810def MOV16rm : I<0x8B, MRMSrcMem, (outs GR16:$dst), (ins i16mem:$src), 1811 "mov{w}\t{$src, $dst|$dst, $src}", 1812 [(set GR16:$dst, (loadi16 addr:$src))]>, OpSize16; 1813def MOV32rm : I<0x8B, MRMSrcMem, (outs GR32:$dst), (ins i32mem:$src), 1814 "mov{l}\t{$src, $dst|$dst, $src}", 1815 [(set GR32:$dst, (loadi32 addr:$src))]>, OpSize32; 1816def MOV64rm : RI<0x8B, MRMSrcMem, (outs GR64:$dst), (ins i64mem:$src), 1817 "mov{q}\t{$src, $dst|$dst, $src}", 1818 [(set GR64:$dst, (load addr:$src))]>; 1819} 1820 1821let SchedRW = [WriteStore] in { 1822def MOV8mr : I<0x88, MRMDestMem, (outs), (ins i8mem :$dst, GR8 :$src), 1823 "mov{b}\t{$src, $dst|$dst, $src}", 1824 [(store GR8:$src, addr:$dst)]>; 1825def MOV16mr : I<0x89, MRMDestMem, (outs), (ins i16mem:$dst, GR16:$src), 1826 "mov{w}\t{$src, $dst|$dst, $src}", 1827 [(store GR16:$src, addr:$dst)]>, OpSize16; 1828def MOV32mr : I<0x89, MRMDestMem, (outs), (ins i32mem:$dst, GR32:$src), 1829 "mov{l}\t{$src, $dst|$dst, $src}", 1830 [(store GR32:$src, addr:$dst)]>, OpSize32; 1831def MOV64mr : RI<0x89, MRMDestMem, (outs), (ins i64mem:$dst, GR64:$src), 1832 "mov{q}\t{$src, $dst|$dst, $src}", 1833 [(store GR64:$src, addr:$dst)]>; 1834} // SchedRW 1835 1836// Versions of MOV8rr, MOV8mr, and MOV8rm that use i8mem_NOREX and GR8_NOREX so 1837// that they can be used for copying and storing h registers, which can't be 1838// encoded when a REX prefix is present. 1839let isCodeGenOnly = 1 in { 1840let hasSideEffects = 0, isMoveReg = 1 in 1841def MOV8rr_NOREX : I<0x88, MRMDestReg, 1842 (outs GR8_NOREX:$dst), (ins GR8_NOREX:$src), 1843 "mov{b}\t{$src, $dst|$dst, $src}", []>, 1844 Sched<[WriteMove]>; 1845let mayStore = 1, hasSideEffects = 0 in 1846def MOV8mr_NOREX : I<0x88, MRMDestMem, 1847 (outs), (ins i8mem_NOREX:$dst, GR8_NOREX:$src), 1848 "mov{b}\t{$src, $dst|$dst, $src}", []>, 1849 Sched<[WriteStore]>; 1850let mayLoad = 1, hasSideEffects = 0, 1851 canFoldAsLoad = 1, isReMaterializable = 1 in 1852def MOV8rm_NOREX : I<0x8A, MRMSrcMem, 1853 (outs GR8_NOREX:$dst), (ins i8mem_NOREX:$src), 1854 "mov{b}\t{$src, $dst|$dst, $src}", []>, 1855 Sched<[WriteLoad]>; 1856} 1857 1858 1859// Condition code ops, incl. set if equal/not equal/... 1860let SchedRW = [WriteLAHFSAHF] in { 1861let Defs = [EFLAGS], Uses = [AH], hasSideEffects = 0 in 1862def SAHF : I<0x9E, RawFrm, (outs), (ins), "sahf", []>, // flags = AH 1863 Requires<[HasLAHFSAHF]>; 1864let Defs = [AH], Uses = [EFLAGS], hasSideEffects = 0 in 1865def LAHF : I<0x9F, RawFrm, (outs), (ins), "lahf", []>, // AH = flags 1866 Requires<[HasLAHFSAHF]>; 1867} // SchedRW 1868 1869//===----------------------------------------------------------------------===// 1870// Bit tests instructions: BT, BTS, BTR, BTC. 1871 1872let Defs = [EFLAGS] in { 1873let SchedRW = [WriteBitTest] in { 1874def BT16rr : I<0xA3, MRMDestReg, (outs), (ins GR16:$src1, GR16:$src2), 1875 "bt{w}\t{$src2, $src1|$src1, $src2}", 1876 [(set EFLAGS, (X86bt GR16:$src1, GR16:$src2))]>, 1877 OpSize16, TB, NotMemoryFoldable; 1878def BT32rr : I<0xA3, MRMDestReg, (outs), (ins GR32:$src1, GR32:$src2), 1879 "bt{l}\t{$src2, $src1|$src1, $src2}", 1880 [(set EFLAGS, (X86bt GR32:$src1, GR32:$src2))]>, 1881 OpSize32, TB, NotMemoryFoldable; 1882def BT64rr : RI<0xA3, MRMDestReg, (outs), (ins GR64:$src1, GR64:$src2), 1883 "bt{q}\t{$src2, $src1|$src1, $src2}", 1884 [(set EFLAGS, (X86bt GR64:$src1, GR64:$src2))]>, TB, 1885 NotMemoryFoldable; 1886} // SchedRW 1887 1888// Unlike with the register+register form, the memory+register form of the 1889// bt instruction does not ignore the high bits of the index. From ISel's 1890// perspective, this is pretty bizarre. Make these instructions disassembly 1891// only for now. These instructions are also slow on modern CPUs so that's 1892// another reason to avoid generating them. 1893 1894let mayLoad = 1, hasSideEffects = 0, SchedRW = [WriteBitTestRegLd] in { 1895 def BT16mr : I<0xA3, MRMDestMem, (outs), (ins i16mem:$src1, GR16:$src2), 1896 "bt{w}\t{$src2, $src1|$src1, $src2}", 1897 []>, OpSize16, TB, NotMemoryFoldable; 1898 def BT32mr : I<0xA3, MRMDestMem, (outs), (ins i32mem:$src1, GR32:$src2), 1899 "bt{l}\t{$src2, $src1|$src1, $src2}", 1900 []>, OpSize32, TB, NotMemoryFoldable; 1901 def BT64mr : RI<0xA3, MRMDestMem, (outs), (ins i64mem:$src1, GR64:$src2), 1902 "bt{q}\t{$src2, $src1|$src1, $src2}", 1903 []>, TB, NotMemoryFoldable; 1904} 1905 1906let SchedRW = [WriteBitTest] in { 1907def BT16ri8 : Ii8<0xBA, MRM4r, (outs), (ins GR16:$src1, i16u8imm:$src2), 1908 "bt{w}\t{$src2, $src1|$src1, $src2}", 1909 [(set EFLAGS, (X86bt GR16:$src1, imm:$src2))]>, 1910 OpSize16, TB; 1911def BT32ri8 : Ii8<0xBA, MRM4r, (outs), (ins GR32:$src1, i32u8imm:$src2), 1912 "bt{l}\t{$src2, $src1|$src1, $src2}", 1913 [(set EFLAGS, (X86bt GR32:$src1, imm:$src2))]>, 1914 OpSize32, TB; 1915def BT64ri8 : RIi8<0xBA, MRM4r, (outs), (ins GR64:$src1, i64u8imm:$src2), 1916 "bt{q}\t{$src2, $src1|$src1, $src2}", 1917 [(set EFLAGS, (X86bt GR64:$src1, imm:$src2))]>, TB; 1918} // SchedRW 1919 1920// Note that these instructions aren't slow because that only applies when the 1921// other operand is in a register. When it's an immediate, bt is still fast. 1922let SchedRW = [WriteBitTestImmLd] in { 1923def BT16mi8 : Ii8<0xBA, MRM4m, (outs), (ins i16mem:$src1, i16u8imm:$src2), 1924 "bt{w}\t{$src2, $src1|$src1, $src2}", 1925 [(set EFLAGS, (X86bt (loadi16 addr:$src1), 1926 imm:$src2))]>, 1927 OpSize16, TB; 1928def BT32mi8 : Ii8<0xBA, MRM4m, (outs), (ins i32mem:$src1, i32u8imm:$src2), 1929 "bt{l}\t{$src2, $src1|$src1, $src2}", 1930 [(set EFLAGS, (X86bt (loadi32 addr:$src1), 1931 imm:$src2))]>, 1932 OpSize32, TB; 1933def BT64mi8 : RIi8<0xBA, MRM4m, (outs), (ins i64mem:$src1, i64u8imm:$src2), 1934 "bt{q}\t{$src2, $src1|$src1, $src2}", 1935 [(set EFLAGS, (X86bt (loadi64 addr:$src1), 1936 imm:$src2))]>, TB, 1937 Requires<[In64BitMode]>; 1938} // SchedRW 1939 1940let hasSideEffects = 0 in { 1941let SchedRW = [WriteBitTestSet], Constraints = "$src1 = $dst" in { 1942def BTC16rr : I<0xBB, MRMDestReg, (outs GR16:$dst), (ins GR16:$src1, GR16:$src2), 1943 "btc{w}\t{$src2, $src1|$src1, $src2}", []>, 1944 OpSize16, TB, NotMemoryFoldable; 1945def BTC32rr : I<0xBB, MRMDestReg, (outs GR32:$dst), (ins GR32:$src1, GR32:$src2), 1946 "btc{l}\t{$src2, $src1|$src1, $src2}", []>, 1947 OpSize32, TB, NotMemoryFoldable; 1948def BTC64rr : RI<0xBB, MRMDestReg, (outs GR64:$dst), (ins GR64:$src1, GR64:$src2), 1949 "btc{q}\t{$src2, $src1|$src1, $src2}", []>, TB, 1950 NotMemoryFoldable; 1951} // SchedRW 1952 1953let mayLoad = 1, mayStore = 1, SchedRW = [WriteBitTestSetRegRMW] in { 1954def BTC16mr : I<0xBB, MRMDestMem, (outs), (ins i16mem:$src1, GR16:$src2), 1955 "btc{w}\t{$src2, $src1|$src1, $src2}", []>, 1956 OpSize16, TB, NotMemoryFoldable; 1957def BTC32mr : I<0xBB, MRMDestMem, (outs), (ins i32mem:$src1, GR32:$src2), 1958 "btc{l}\t{$src2, $src1|$src1, $src2}", []>, 1959 OpSize32, TB, NotMemoryFoldable; 1960def BTC64mr : RI<0xBB, MRMDestMem, (outs), (ins i64mem:$src1, GR64:$src2), 1961 "btc{q}\t{$src2, $src1|$src1, $src2}", []>, TB, 1962 NotMemoryFoldable; 1963} 1964 1965let SchedRW = [WriteBitTestSet], Constraints = "$src1 = $dst" in { 1966def BTC16ri8 : Ii8<0xBA, MRM7r, (outs GR16:$dst), (ins GR16:$src1, i16u8imm:$src2), 1967 "btc{w}\t{$src2, $src1|$src1, $src2}", []>, OpSize16, TB; 1968def BTC32ri8 : Ii8<0xBA, MRM7r, (outs GR32:$dst), (ins GR32:$src1, i32u8imm:$src2), 1969 "btc{l}\t{$src2, $src1|$src1, $src2}", []>, OpSize32, TB; 1970def BTC64ri8 : RIi8<0xBA, MRM7r, (outs GR64:$dst), (ins GR64:$src1, i64u8imm:$src2), 1971 "btc{q}\t{$src2, $src1|$src1, $src2}", []>, TB; 1972} // SchedRW 1973 1974let mayLoad = 1, mayStore = 1, SchedRW = [WriteBitTestSetImmRMW] in { 1975def BTC16mi8 : Ii8<0xBA, MRM7m, (outs), (ins i16mem:$src1, i16u8imm:$src2), 1976 "btc{w}\t{$src2, $src1|$src1, $src2}", []>, OpSize16, TB; 1977def BTC32mi8 : Ii8<0xBA, MRM7m, (outs), (ins i32mem:$src1, i32u8imm:$src2), 1978 "btc{l}\t{$src2, $src1|$src1, $src2}", []>, OpSize32, TB; 1979def BTC64mi8 : RIi8<0xBA, MRM7m, (outs), (ins i64mem:$src1, i64u8imm:$src2), 1980 "btc{q}\t{$src2, $src1|$src1, $src2}", []>, TB, 1981 Requires<[In64BitMode]>; 1982} 1983 1984let SchedRW = [WriteBitTestSet], Constraints = "$src1 = $dst" in { 1985def BTR16rr : I<0xB3, MRMDestReg, (outs GR16:$dst), (ins GR16:$src1, GR16:$src2), 1986 "btr{w}\t{$src2, $src1|$src1, $src2}", []>, 1987 OpSize16, TB, NotMemoryFoldable; 1988def BTR32rr : I<0xB3, MRMDestReg, (outs GR32:$dst), (ins GR32:$src1, GR32:$src2), 1989 "btr{l}\t{$src2, $src1|$src1, $src2}", []>, 1990 OpSize32, TB, NotMemoryFoldable; 1991def BTR64rr : RI<0xB3, MRMDestReg, (outs GR64:$dst), (ins GR64:$src1, GR64:$src2), 1992 "btr{q}\t{$src2, $src1|$src1, $src2}", []>, TB, 1993 NotMemoryFoldable; 1994} // SchedRW 1995 1996let mayLoad = 1, mayStore = 1, SchedRW = [WriteBitTestSetRegRMW] in { 1997def BTR16mr : I<0xB3, MRMDestMem, (outs), (ins i16mem:$src1, GR16:$src2), 1998 "btr{w}\t{$src2, $src1|$src1, $src2}", []>, 1999 OpSize16, TB, NotMemoryFoldable; 2000def BTR32mr : I<0xB3, MRMDestMem, (outs), (ins i32mem:$src1, GR32:$src2), 2001 "btr{l}\t{$src2, $src1|$src1, $src2}", []>, 2002 OpSize32, TB, NotMemoryFoldable; 2003def BTR64mr : RI<0xB3, MRMDestMem, (outs), (ins i64mem:$src1, GR64:$src2), 2004 "btr{q}\t{$src2, $src1|$src1, $src2}", []>, TB, 2005 NotMemoryFoldable; 2006} 2007 2008let SchedRW = [WriteBitTestSet], Constraints = "$src1 = $dst" in { 2009def BTR16ri8 : Ii8<0xBA, MRM6r, (outs GR16:$dst), (ins GR16:$src1, i16u8imm:$src2), 2010 "btr{w}\t{$src2, $src1|$src1, $src2}", []>, 2011 OpSize16, TB; 2012def BTR32ri8 : Ii8<0xBA, MRM6r, (outs GR32:$dst), (ins GR32:$src1, i32u8imm:$src2), 2013 "btr{l}\t{$src2, $src1|$src1, $src2}", []>, 2014 OpSize32, TB; 2015def BTR64ri8 : RIi8<0xBA, MRM6r, (outs GR64:$dst), (ins GR64:$src1, i64u8imm:$src2), 2016 "btr{q}\t{$src2, $src1|$src1, $src2}", []>, TB; 2017} // SchedRW 2018 2019let mayLoad = 1, mayStore = 1, SchedRW = [WriteBitTestSetImmRMW] in { 2020def BTR16mi8 : Ii8<0xBA, MRM6m, (outs), (ins i16mem:$src1, i16u8imm:$src2), 2021 "btr{w}\t{$src2, $src1|$src1, $src2}", []>, 2022 OpSize16, TB; 2023def BTR32mi8 : Ii8<0xBA, MRM6m, (outs), (ins i32mem:$src1, i32u8imm:$src2), 2024 "btr{l}\t{$src2, $src1|$src1, $src2}", []>, 2025 OpSize32, TB; 2026def BTR64mi8 : RIi8<0xBA, MRM6m, (outs), (ins i64mem:$src1, i64u8imm:$src2), 2027 "btr{q}\t{$src2, $src1|$src1, $src2}", []>, TB, 2028 Requires<[In64BitMode]>; 2029} 2030 2031let SchedRW = [WriteBitTestSet], Constraints = "$src1 = $dst" in { 2032def BTS16rr : I<0xAB, MRMDestReg, (outs GR16:$dst), (ins GR16:$src1, GR16:$src2), 2033 "bts{w}\t{$src2, $src1|$src1, $src2}", []>, 2034 OpSize16, TB, NotMemoryFoldable; 2035def BTS32rr : I<0xAB, MRMDestReg, (outs GR32:$dst), (ins GR32:$src1, GR32:$src2), 2036 "bts{l}\t{$src2, $src1|$src1, $src2}", []>, 2037 OpSize32, TB, NotMemoryFoldable; 2038def BTS64rr : RI<0xAB, MRMDestReg, (outs GR64:$dst), (ins GR64:$src1, GR64:$src2), 2039 "bts{q}\t{$src2, $src1|$src1, $src2}", []>, TB, 2040 NotMemoryFoldable; 2041} // SchedRW 2042 2043let mayLoad = 1, mayStore = 1, SchedRW = [WriteBitTestSetRegRMW] in { 2044def BTS16mr : I<0xAB, MRMDestMem, (outs), (ins i16mem:$src1, GR16:$src2), 2045 "bts{w}\t{$src2, $src1|$src1, $src2}", []>, 2046 OpSize16, TB, NotMemoryFoldable; 2047def BTS32mr : I<0xAB, MRMDestMem, (outs), (ins i32mem:$src1, GR32:$src2), 2048 "bts{l}\t{$src2, $src1|$src1, $src2}", []>, 2049 OpSize32, TB, NotMemoryFoldable; 2050def BTS64mr : RI<0xAB, MRMDestMem, (outs), (ins i64mem:$src1, GR64:$src2), 2051 "bts{q}\t{$src2, $src1|$src1, $src2}", []>, TB, 2052 NotMemoryFoldable; 2053} 2054 2055let SchedRW = [WriteBitTestSet], Constraints = "$src1 = $dst" in { 2056def BTS16ri8 : Ii8<0xBA, MRM5r, (outs GR16:$dst), (ins GR16:$src1, i16u8imm:$src2), 2057 "bts{w}\t{$src2, $src1|$src1, $src2}", []>, OpSize16, TB; 2058def BTS32ri8 : Ii8<0xBA, MRM5r, (outs GR32:$dst), (ins GR32:$src1, i32u8imm:$src2), 2059 "bts{l}\t{$src2, $src1|$src1, $src2}", []>, OpSize32, TB; 2060def BTS64ri8 : RIi8<0xBA, MRM5r, (outs GR64:$dst), (ins GR64:$src1, i64u8imm:$src2), 2061 "bts{q}\t{$src2, $src1|$src1, $src2}", []>, TB; 2062} // SchedRW 2063 2064let mayLoad = 1, mayStore = 1, SchedRW = [WriteBitTestSetImmRMW] in { 2065def BTS16mi8 : Ii8<0xBA, MRM5m, (outs), (ins i16mem:$src1, i16u8imm:$src2), 2066 "bts{w}\t{$src2, $src1|$src1, $src2}", []>, OpSize16, TB; 2067def BTS32mi8 : Ii8<0xBA, MRM5m, (outs), (ins i32mem:$src1, i32u8imm:$src2), 2068 "bts{l}\t{$src2, $src1|$src1, $src2}", []>, OpSize32, TB; 2069def BTS64mi8 : RIi8<0xBA, MRM5m, (outs), (ins i64mem:$src1, i64u8imm:$src2), 2070 "bts{q}\t{$src2, $src1|$src1, $src2}", []>, TB, 2071 Requires<[In64BitMode]>; 2072} 2073} // hasSideEffects = 0 2074} // Defs = [EFLAGS] 2075 2076 2077//===----------------------------------------------------------------------===// 2078// Atomic support 2079// 2080 2081// Atomic swap. These are just normal xchg instructions. But since a memory 2082// operand is referenced, the atomicity is ensured. 2083multiclass ATOMIC_SWAP<bits<8> opc8, bits<8> opc, string mnemonic, string frag> { 2084 let Constraints = "$val = $dst", SchedRW = [WriteALULd, WriteRMW] in { 2085 def NAME#8rm : I<opc8, MRMSrcMem, (outs GR8:$dst), 2086 (ins GR8:$val, i8mem:$ptr), 2087 !strconcat(mnemonic, "{b}\t{$val, $ptr|$ptr, $val}"), 2088 [(set 2089 GR8:$dst, 2090 (!cast<PatFrag>(frag # "_8") addr:$ptr, GR8:$val))]>; 2091 def NAME#16rm : I<opc, MRMSrcMem, (outs GR16:$dst), 2092 (ins GR16:$val, i16mem:$ptr), 2093 !strconcat(mnemonic, "{w}\t{$val, $ptr|$ptr, $val}"), 2094 [(set 2095 GR16:$dst, 2096 (!cast<PatFrag>(frag # "_16") addr:$ptr, GR16:$val))]>, 2097 OpSize16; 2098 def NAME#32rm : I<opc, MRMSrcMem, (outs GR32:$dst), 2099 (ins GR32:$val, i32mem:$ptr), 2100 !strconcat(mnemonic, "{l}\t{$val, $ptr|$ptr, $val}"), 2101 [(set 2102 GR32:$dst, 2103 (!cast<PatFrag>(frag # "_32") addr:$ptr, GR32:$val))]>, 2104 OpSize32; 2105 def NAME#64rm : RI<opc, MRMSrcMem, (outs GR64:$dst), 2106 (ins GR64:$val, i64mem:$ptr), 2107 !strconcat(mnemonic, "{q}\t{$val, $ptr|$ptr, $val}"), 2108 [(set 2109 GR64:$dst, 2110 (!cast<PatFrag>(frag # "_64") addr:$ptr, GR64:$val))]>; 2111 } 2112} 2113 2114defm XCHG : ATOMIC_SWAP<0x86, 0x87, "xchg", "atomic_swap">, NotMemoryFoldable; 2115 2116// Swap between registers. 2117let SchedRW = [WriteXCHG] in { 2118let Constraints = "$src1 = $dst1, $src2 = $dst2", hasSideEffects = 0 in { 2119def XCHG8rr : I<0x86, MRMSrcReg, (outs GR8:$dst1, GR8:$dst2), 2120 (ins GR8:$src1, GR8:$src2), 2121 "xchg{b}\t{$src2, $src1|$src1, $src2}", []>, NotMemoryFoldable; 2122def XCHG16rr : I<0x87, MRMSrcReg, (outs GR16:$dst1, GR16:$dst2), 2123 (ins GR16:$src1, GR16:$src2), 2124 "xchg{w}\t{$src2, $src1|$src1, $src2}", []>, 2125 OpSize16, NotMemoryFoldable; 2126def XCHG32rr : I<0x87, MRMSrcReg, (outs GR32:$dst1, GR32:$dst2), 2127 (ins GR32:$src1, GR32:$src2), 2128 "xchg{l}\t{$src2, $src1|$src1, $src2}", []>, 2129 OpSize32, NotMemoryFoldable; 2130def XCHG64rr : RI<0x87, MRMSrcReg, (outs GR64:$dst1, GR64:$dst2), 2131 (ins GR64:$src1 ,GR64:$src2), 2132 "xchg{q}\t{$src2, $src1|$src1, $src2}", []>, NotMemoryFoldable; 2133} 2134 2135// Swap between EAX and other registers. 2136let Constraints = "$src = $dst", hasSideEffects = 0 in { 2137let Uses = [AX], Defs = [AX] in 2138def XCHG16ar : I<0x90, AddRegFrm, (outs GR16:$dst), (ins GR16:$src), 2139 "xchg{w}\t{$src, %ax|ax, $src}", []>, OpSize16; 2140let Uses = [EAX], Defs = [EAX] in 2141def XCHG32ar : I<0x90, AddRegFrm, (outs GR32:$dst), (ins GR32:$src), 2142 "xchg{l}\t{$src, %eax|eax, $src}", []>, OpSize32; 2143let Uses = [RAX], Defs = [RAX] in 2144def XCHG64ar : RI<0x90, AddRegFrm, (outs GR64:$dst), (ins GR64:$src), 2145 "xchg{q}\t{$src, %rax|rax, $src}", []>; 2146} 2147} // SchedRW 2148 2149let hasSideEffects = 0, Constraints = "$src1 = $dst1, $src2 = $dst2", 2150 Defs = [EFLAGS], SchedRW = [WriteXCHG] in { 2151def XADD8rr : I<0xC0, MRMDestReg, (outs GR8:$dst1, GR8:$dst2), 2152 (ins GR8:$src1, GR8:$src2), 2153 "xadd{b}\t{$src2, $src1|$src1, $src2}", []>, TB; 2154def XADD16rr : I<0xC1, MRMDestReg, (outs GR16:$dst1, GR16:$dst2), 2155 (ins GR16:$src1, GR16:$src2), 2156 "xadd{w}\t{$src2, $src1|$src1, $src2}", []>, TB, OpSize16; 2157def XADD32rr : I<0xC1, MRMDestReg, (outs GR32:$dst1, GR32:$dst2), 2158 (ins GR32:$src1, GR32:$src2), 2159 "xadd{l}\t{$src2, $src1|$src1, $src2}", []>, TB, OpSize32; 2160def XADD64rr : RI<0xC1, MRMDestReg, (outs GR64:$dst1, GR64:$dst2), 2161 (ins GR64:$src1, GR64:$src2), 2162 "xadd{q}\t{$src2, $src1|$src1, $src2}", []>, TB; 2163} // SchedRW 2164 2165let mayLoad = 1, mayStore = 1, hasSideEffects = 0, Constraints = "$val = $dst", 2166 Defs = [EFLAGS], SchedRW = [WriteALULd, WriteRMW] in { 2167def XADD8rm : I<0xC0, MRMSrcMem, (outs GR8:$dst), 2168 (ins GR8:$val, i8mem:$ptr), 2169 "xadd{b}\t{$val, $ptr|$ptr, $val}", []>, TB; 2170def XADD16rm : I<0xC1, MRMSrcMem, (outs GR16:$dst), 2171 (ins GR16:$val, i16mem:$ptr), 2172 "xadd{w}\t{$val, $ptr|$ptr, $val}", []>, TB, 2173 OpSize16; 2174def XADD32rm : I<0xC1, MRMSrcMem, (outs GR32:$dst), 2175 (ins GR32:$val, i32mem:$ptr), 2176 "xadd{l}\t{$val, $ptr|$ptr, $val}", []>, TB, 2177 OpSize32; 2178def XADD64rm : RI<0xC1, MRMSrcMem, (outs GR64:$dst), 2179 (ins GR64:$val, i64mem:$ptr), 2180 "xadd{q}\t{$val, $ptr|$ptr, $val}", []>, TB; 2181 2182} 2183 2184let SchedRW = [WriteCMPXCHG], hasSideEffects = 0 in { 2185let Defs = [AL, EFLAGS], Uses = [AL] in 2186def CMPXCHG8rr : I<0xB0, MRMDestReg, (outs GR8:$dst), (ins GR8:$src), 2187 "cmpxchg{b}\t{$src, $dst|$dst, $src}", []>, TB, 2188 NotMemoryFoldable; 2189let Defs = [AX, EFLAGS], Uses = [AX] in 2190def CMPXCHG16rr : I<0xB1, MRMDestReg, (outs GR16:$dst), (ins GR16:$src), 2191 "cmpxchg{w}\t{$src, $dst|$dst, $src}", []>, TB, OpSize16, 2192 NotMemoryFoldable; 2193let Defs = [EAX, EFLAGS], Uses = [EAX] in 2194def CMPXCHG32rr : I<0xB1, MRMDestReg, (outs GR32:$dst), (ins GR32:$src), 2195 "cmpxchg{l}\t{$src, $dst|$dst, $src}", []>, TB, OpSize32, 2196 NotMemoryFoldable; 2197let Defs = [RAX, EFLAGS], Uses = [RAX] in 2198def CMPXCHG64rr : RI<0xB1, MRMDestReg, (outs GR64:$dst), (ins GR64:$src), 2199 "cmpxchg{q}\t{$src, $dst|$dst, $src}", []>, TB, 2200 NotMemoryFoldable; 2201} // SchedRW, hasSideEffects 2202 2203let SchedRW = [WriteCMPXCHGRMW], mayLoad = 1, mayStore = 1, 2204 hasSideEffects = 0 in { 2205let Defs = [AL, EFLAGS], Uses = [AL] in 2206def CMPXCHG8rm : I<0xB0, MRMDestMem, (outs), (ins i8mem:$dst, GR8:$src), 2207 "cmpxchg{b}\t{$src, $dst|$dst, $src}", []>, TB, 2208 NotMemoryFoldable; 2209let Defs = [AX, EFLAGS], Uses = [AX] in 2210def CMPXCHG16rm : I<0xB1, MRMDestMem, (outs), (ins i16mem:$dst, GR16:$src), 2211 "cmpxchg{w}\t{$src, $dst|$dst, $src}", []>, TB, OpSize16, 2212 NotMemoryFoldable; 2213let Defs = [EAX, EFLAGS], Uses = [EAX] in 2214def CMPXCHG32rm : I<0xB1, MRMDestMem, (outs), (ins i32mem:$dst, GR32:$src), 2215 "cmpxchg{l}\t{$src, $dst|$dst, $src}", []>, TB, OpSize32, 2216 NotMemoryFoldable; 2217let Defs = [RAX, EFLAGS], Uses = [RAX] in 2218def CMPXCHG64rm : RI<0xB1, MRMDestMem, (outs), (ins i64mem:$dst, GR64:$src), 2219 "cmpxchg{q}\t{$src, $dst|$dst, $src}", []>, TB, 2220 NotMemoryFoldable; 2221 2222let Defs = [EAX, EDX, EFLAGS], Uses = [EAX, EBX, ECX, EDX] in 2223def CMPXCHG8B : I<0xC7, MRM1m, (outs), (ins i64mem:$dst), 2224 "cmpxchg8b\t$dst", []>, TB, Requires<[HasCmpxchg8b]>; 2225 2226let Defs = [RAX, RDX, EFLAGS], Uses = [RAX, RBX, RCX, RDX] in 2227// NOTE: In64BitMode check needed for the AssemblerPredicate. 2228def CMPXCHG16B : RI<0xC7, MRM1m, (outs), (ins i128mem:$dst), 2229 "cmpxchg16b\t$dst", []>, 2230 TB, Requires<[HasCmpxchg16b,In64BitMode]>; 2231} // SchedRW, mayLoad, mayStore, hasSideEffects 2232 2233 2234// Lock instruction prefix 2235let SchedRW = [WriteMicrocoded] in 2236def LOCK_PREFIX : I<0xF0, PrefixByte, (outs), (ins), "lock", []>; 2237 2238let SchedRW = [WriteNop] in { 2239 2240// Rex64 instruction prefix 2241def REX64_PREFIX : I<0x48, PrefixByte, (outs), (ins), "rex64", []>, 2242 Requires<[In64BitMode]>; 2243 2244// Data16 instruction prefix 2245def DATA16_PREFIX : I<0x66, PrefixByte, (outs), (ins), "data16", []>; 2246} // SchedRW 2247 2248// Repeat string operation instruction prefixes 2249let Defs = [ECX], Uses = [ECX,DF], SchedRW = [WriteMicrocoded] in { 2250// Repeat (used with INS, OUTS, MOVS, LODS and STOS) 2251def REP_PREFIX : I<0xF3, PrefixByte, (outs), (ins), "rep", []>; 2252// Repeat while not equal (used with CMPS and SCAS) 2253def REPNE_PREFIX : I<0xF2, PrefixByte, (outs), (ins), "repne", []>; 2254} 2255 2256// String manipulation instructions 2257let SchedRW = [WriteMicrocoded] in { 2258let Defs = [AL,ESI], Uses = [ESI,DF] in 2259def LODSB : I<0xAC, RawFrmSrc, (outs), (ins srcidx8:$src), 2260 "lodsb\t{$src, %al|al, $src}", []>; 2261let Defs = [AX,ESI], Uses = [ESI,DF] in 2262def LODSW : I<0xAD, RawFrmSrc, (outs), (ins srcidx16:$src), 2263 "lodsw\t{$src, %ax|ax, $src}", []>, OpSize16; 2264let Defs = [EAX,ESI], Uses = [ESI,DF] in 2265def LODSL : I<0xAD, RawFrmSrc, (outs), (ins srcidx32:$src), 2266 "lods{l|d}\t{$src, %eax|eax, $src}", []>, OpSize32; 2267let Defs = [RAX,ESI], Uses = [ESI,DF] in 2268def LODSQ : RI<0xAD, RawFrmSrc, (outs), (ins srcidx64:$src), 2269 "lodsq\t{$src, %rax|rax, $src}", []>, 2270 Requires<[In64BitMode]>; 2271} 2272 2273let SchedRW = [WriteSystem] in { 2274let Defs = [ESI], Uses = [DX,ESI,DF] in { 2275def OUTSB : I<0x6E, RawFrmSrc, (outs), (ins srcidx8:$src), 2276 "outsb\t{$src, %dx|dx, $src}", []>; 2277def OUTSW : I<0x6F, RawFrmSrc, (outs), (ins srcidx16:$src), 2278 "outsw\t{$src, %dx|dx, $src}", []>, OpSize16; 2279def OUTSL : I<0x6F, RawFrmSrc, (outs), (ins srcidx32:$src), 2280 "outs{l|d}\t{$src, %dx|dx, $src}", []>, OpSize32; 2281} 2282 2283let Defs = [EDI], Uses = [DX,EDI,DF] in { 2284def INSB : I<0x6C, RawFrmDst, (outs), (ins dstidx8:$dst), 2285 "insb\t{%dx, $dst|$dst, dx}", []>; 2286def INSW : I<0x6D, RawFrmDst, (outs), (ins dstidx16:$dst), 2287 "insw\t{%dx, $dst|$dst, dx}", []>, OpSize16; 2288def INSL : I<0x6D, RawFrmDst, (outs), (ins dstidx32:$dst), 2289 "ins{l|d}\t{%dx, $dst|$dst, dx}", []>, OpSize32; 2290} 2291} 2292 2293// EFLAGS management instructions. 2294let SchedRW = [WriteALU], Defs = [EFLAGS], Uses = [EFLAGS] in { 2295def CLC : I<0xF8, RawFrm, (outs), (ins), "clc", []>; 2296def STC : I<0xF9, RawFrm, (outs), (ins), "stc", []>; 2297def CMC : I<0xF5, RawFrm, (outs), (ins), "cmc", []>; 2298} 2299 2300// DF management instructions. 2301let SchedRW = [WriteALU], Defs = [DF] in { 2302def CLD : I<0xFC, RawFrm, (outs), (ins), "cld", []>; 2303def STD : I<0xFD, RawFrm, (outs), (ins), "std", []>; 2304} 2305 2306// Table lookup instructions 2307let Uses = [AL,EBX], Defs = [AL], hasSideEffects = 0, mayLoad = 1 in 2308def XLAT : I<0xD7, RawFrm, (outs), (ins), "xlatb", []>, Sched<[WriteLoad]>; 2309 2310let SchedRW = [WriteMicrocoded] in { 2311// ASCII Adjust After Addition 2312let Uses = [AL,EFLAGS], Defs = [AX,EFLAGS], hasSideEffects = 0 in 2313def AAA : I<0x37, RawFrm, (outs), (ins), "aaa", []>, 2314 Requires<[Not64BitMode]>; 2315 2316// ASCII Adjust AX Before Division 2317let Uses = [AX], Defs = [AX,EFLAGS], hasSideEffects = 0 in 2318def AAD8i8 : Ii8<0xD5, RawFrm, (outs), (ins i8imm:$src), 2319 "aad\t$src", []>, Requires<[Not64BitMode]>; 2320 2321// ASCII Adjust AX After Multiply 2322let Uses = [AL], Defs = [AX,EFLAGS], hasSideEffects = 0 in 2323def AAM8i8 : Ii8<0xD4, RawFrm, (outs), (ins i8imm:$src), 2324 "aam\t$src", []>, Requires<[Not64BitMode]>; 2325 2326// ASCII Adjust AL After Subtraction - sets 2327let Uses = [AL,EFLAGS], Defs = [AX,EFLAGS], hasSideEffects = 0 in 2328def AAS : I<0x3F, RawFrm, (outs), (ins), "aas", []>, 2329 Requires<[Not64BitMode]>; 2330 2331// Decimal Adjust AL after Addition 2332let Uses = [AL,EFLAGS], Defs = [AL,EFLAGS], hasSideEffects = 0 in 2333def DAA : I<0x27, RawFrm, (outs), (ins), "daa", []>, 2334 Requires<[Not64BitMode]>; 2335 2336// Decimal Adjust AL after Subtraction 2337let Uses = [AL,EFLAGS], Defs = [AL,EFLAGS], hasSideEffects = 0 in 2338def DAS : I<0x2F, RawFrm, (outs), (ins), "das", []>, 2339 Requires<[Not64BitMode]>; 2340} // SchedRW 2341 2342let SchedRW = [WriteSystem] in { 2343// Check Array Index Against Bounds 2344// Note: "bound" does not have reversed operands in at&t syntax. 2345def BOUNDS16rm : I<0x62, MRMSrcMem, (outs GR16:$dst), (ins i16mem:$src), 2346 "bound\t$dst, $src", []>, OpSize16, 2347 Requires<[Not64BitMode]>; 2348def BOUNDS32rm : I<0x62, MRMSrcMem, (outs GR32:$dst), (ins i32mem:$src), 2349 "bound\t$dst, $src", []>, OpSize32, 2350 Requires<[Not64BitMode]>; 2351 2352// Adjust RPL Field of Segment Selector 2353def ARPL16rr : I<0x63, MRMDestReg, (outs GR16:$dst), (ins GR16:$src), 2354 "arpl\t{$src, $dst|$dst, $src}", []>, 2355 Requires<[Not64BitMode]>, NotMemoryFoldable; 2356let mayStore = 1 in 2357def ARPL16mr : I<0x63, MRMDestMem, (outs), (ins i16mem:$dst, GR16:$src), 2358 "arpl\t{$src, $dst|$dst, $src}", []>, 2359 Requires<[Not64BitMode]>, NotMemoryFoldable; 2360} // SchedRW 2361 2362//===----------------------------------------------------------------------===// 2363// MOVBE Instructions 2364// 2365let Predicates = [HasMOVBE] in { 2366 let SchedRW = [WriteALULd] in { 2367 def MOVBE16rm : I<0xF0, MRMSrcMem, (outs GR16:$dst), (ins i16mem:$src), 2368 "movbe{w}\t{$src, $dst|$dst, $src}", 2369 [(set GR16:$dst, (bswap (loadi16 addr:$src)))]>, 2370 OpSize16, T8PS; 2371 def MOVBE32rm : I<0xF0, MRMSrcMem, (outs GR32:$dst), (ins i32mem:$src), 2372 "movbe{l}\t{$src, $dst|$dst, $src}", 2373 [(set GR32:$dst, (bswap (loadi32 addr:$src)))]>, 2374 OpSize32, T8PS; 2375 def MOVBE64rm : RI<0xF0, MRMSrcMem, (outs GR64:$dst), (ins i64mem:$src), 2376 "movbe{q}\t{$src, $dst|$dst, $src}", 2377 [(set GR64:$dst, (bswap (loadi64 addr:$src)))]>, 2378 T8PS; 2379 } 2380 let SchedRW = [WriteStore] in { 2381 def MOVBE16mr : I<0xF1, MRMDestMem, (outs), (ins i16mem:$dst, GR16:$src), 2382 "movbe{w}\t{$src, $dst|$dst, $src}", 2383 [(store (bswap GR16:$src), addr:$dst)]>, 2384 OpSize16, T8PS; 2385 def MOVBE32mr : I<0xF1, MRMDestMem, (outs), (ins i32mem:$dst, GR32:$src), 2386 "movbe{l}\t{$src, $dst|$dst, $src}", 2387 [(store (bswap GR32:$src), addr:$dst)]>, 2388 OpSize32, T8PS; 2389 def MOVBE64mr : RI<0xF1, MRMDestMem, (outs), (ins i64mem:$dst, GR64:$src), 2390 "movbe{q}\t{$src, $dst|$dst, $src}", 2391 [(store (bswap GR64:$src), addr:$dst)]>, 2392 T8PS; 2393 } 2394} 2395 2396//===----------------------------------------------------------------------===// 2397// RDRAND Instruction 2398// 2399let Predicates = [HasRDRAND], Defs = [EFLAGS], SchedRW = [WriteSystem] in { 2400 def RDRAND16r : I<0xC7, MRM6r, (outs GR16:$dst), (ins), 2401 "rdrand{w}\t$dst", [(set GR16:$dst, EFLAGS, (X86rdrand))]>, 2402 OpSize16, PS; 2403 def RDRAND32r : I<0xC7, MRM6r, (outs GR32:$dst), (ins), 2404 "rdrand{l}\t$dst", [(set GR32:$dst, EFLAGS, (X86rdrand))]>, 2405 OpSize32, PS; 2406 def RDRAND64r : RI<0xC7, MRM6r, (outs GR64:$dst), (ins), 2407 "rdrand{q}\t$dst", [(set GR64:$dst, EFLAGS, (X86rdrand))]>, 2408 PS; 2409} 2410 2411//===----------------------------------------------------------------------===// 2412// RDSEED Instruction 2413// 2414let Predicates = [HasRDSEED], Defs = [EFLAGS], SchedRW = [WriteSystem] in { 2415 def RDSEED16r : I<0xC7, MRM7r, (outs GR16:$dst), (ins), "rdseed{w}\t$dst", 2416 [(set GR16:$dst, EFLAGS, (X86rdseed))]>, OpSize16, PS; 2417 def RDSEED32r : I<0xC7, MRM7r, (outs GR32:$dst), (ins), "rdseed{l}\t$dst", 2418 [(set GR32:$dst, EFLAGS, (X86rdseed))]>, OpSize32, PS; 2419 def RDSEED64r : RI<0xC7, MRM7r, (outs GR64:$dst), (ins), "rdseed{q}\t$dst", 2420 [(set GR64:$dst, EFLAGS, (X86rdseed))]>, PS; 2421} 2422 2423//===----------------------------------------------------------------------===// 2424// LZCNT Instruction 2425// 2426let Predicates = [HasLZCNT], Defs = [EFLAGS] in { 2427 def LZCNT16rr : I<0xBD, MRMSrcReg, (outs GR16:$dst), (ins GR16:$src), 2428 "lzcnt{w}\t{$src, $dst|$dst, $src}", 2429 [(set GR16:$dst, (ctlz GR16:$src)), (implicit EFLAGS)]>, 2430 XS, OpSize16, Sched<[WriteLZCNT]>; 2431 def LZCNT16rm : I<0xBD, MRMSrcMem, (outs GR16:$dst), (ins i16mem:$src), 2432 "lzcnt{w}\t{$src, $dst|$dst, $src}", 2433 [(set GR16:$dst, (ctlz (loadi16 addr:$src))), 2434 (implicit EFLAGS)]>, XS, OpSize16, Sched<[WriteLZCNTLd]>; 2435 2436 def LZCNT32rr : I<0xBD, MRMSrcReg, (outs GR32:$dst), (ins GR32:$src), 2437 "lzcnt{l}\t{$src, $dst|$dst, $src}", 2438 [(set GR32:$dst, (ctlz GR32:$src)), (implicit EFLAGS)]>, 2439 XS, OpSize32, Sched<[WriteLZCNT]>; 2440 def LZCNT32rm : I<0xBD, MRMSrcMem, (outs GR32:$dst), (ins i32mem:$src), 2441 "lzcnt{l}\t{$src, $dst|$dst, $src}", 2442 [(set GR32:$dst, (ctlz (loadi32 addr:$src))), 2443 (implicit EFLAGS)]>, XS, OpSize32, Sched<[WriteLZCNTLd]>; 2444 2445 def LZCNT64rr : RI<0xBD, MRMSrcReg, (outs GR64:$dst), (ins GR64:$src), 2446 "lzcnt{q}\t{$src, $dst|$dst, $src}", 2447 [(set GR64:$dst, (ctlz GR64:$src)), (implicit EFLAGS)]>, 2448 XS, Sched<[WriteLZCNT]>; 2449 def LZCNT64rm : RI<0xBD, MRMSrcMem, (outs GR64:$dst), (ins i64mem:$src), 2450 "lzcnt{q}\t{$src, $dst|$dst, $src}", 2451 [(set GR64:$dst, (ctlz (loadi64 addr:$src))), 2452 (implicit EFLAGS)]>, XS, Sched<[WriteLZCNTLd]>; 2453} 2454 2455//===----------------------------------------------------------------------===// 2456// BMI Instructions 2457// 2458let Predicates = [HasBMI], Defs = [EFLAGS] in { 2459 def TZCNT16rr : I<0xBC, MRMSrcReg, (outs GR16:$dst), (ins GR16:$src), 2460 "tzcnt{w}\t{$src, $dst|$dst, $src}", 2461 [(set GR16:$dst, (cttz GR16:$src)), (implicit EFLAGS)]>, 2462 XS, OpSize16, Sched<[WriteTZCNT]>; 2463 def TZCNT16rm : I<0xBC, MRMSrcMem, (outs GR16:$dst), (ins i16mem:$src), 2464 "tzcnt{w}\t{$src, $dst|$dst, $src}", 2465 [(set GR16:$dst, (cttz (loadi16 addr:$src))), 2466 (implicit EFLAGS)]>, XS, OpSize16, Sched<[WriteTZCNTLd]>; 2467 2468 def TZCNT32rr : I<0xBC, MRMSrcReg, (outs GR32:$dst), (ins GR32:$src), 2469 "tzcnt{l}\t{$src, $dst|$dst, $src}", 2470 [(set GR32:$dst, (cttz GR32:$src)), (implicit EFLAGS)]>, 2471 XS, OpSize32, Sched<[WriteTZCNT]>; 2472 def TZCNT32rm : I<0xBC, MRMSrcMem, (outs GR32:$dst), (ins i32mem:$src), 2473 "tzcnt{l}\t{$src, $dst|$dst, $src}", 2474 [(set GR32:$dst, (cttz (loadi32 addr:$src))), 2475 (implicit EFLAGS)]>, XS, OpSize32, Sched<[WriteTZCNTLd]>; 2476 2477 def TZCNT64rr : RI<0xBC, MRMSrcReg, (outs GR64:$dst), (ins GR64:$src), 2478 "tzcnt{q}\t{$src, $dst|$dst, $src}", 2479 [(set GR64:$dst, (cttz GR64:$src)), (implicit EFLAGS)]>, 2480 XS, Sched<[WriteTZCNT]>; 2481 def TZCNT64rm : RI<0xBC, MRMSrcMem, (outs GR64:$dst), (ins i64mem:$src), 2482 "tzcnt{q}\t{$src, $dst|$dst, $src}", 2483 [(set GR64:$dst, (cttz (loadi64 addr:$src))), 2484 (implicit EFLAGS)]>, XS, Sched<[WriteTZCNTLd]>; 2485} 2486 2487multiclass bmi_bls<string mnemonic, Format RegMRM, Format MemMRM, 2488 RegisterClass RC, X86MemOperand x86memop, 2489 X86FoldableSchedWrite sched> { 2490let hasSideEffects = 0 in { 2491 def rr : I<0xF3, RegMRM, (outs RC:$dst), (ins RC:$src), 2492 !strconcat(mnemonic, "\t{$src, $dst|$dst, $src}"), []>, 2493 T8PS, VEX_4V, Sched<[sched]>; 2494 let mayLoad = 1 in 2495 def rm : I<0xF3, MemMRM, (outs RC:$dst), (ins x86memop:$src), 2496 !strconcat(mnemonic, "\t{$src, $dst|$dst, $src}"), []>, 2497 T8PS, VEX_4V, Sched<[sched.Folded]>; 2498} 2499} 2500 2501let Predicates = [HasBMI], Defs = [EFLAGS] in { 2502 defm BLSR32 : bmi_bls<"blsr{l}", MRM1r, MRM1m, GR32, i32mem, WriteBLS>; 2503 defm BLSR64 : bmi_bls<"blsr{q}", MRM1r, MRM1m, GR64, i64mem, WriteBLS>, VEX_W; 2504 defm BLSMSK32 : bmi_bls<"blsmsk{l}", MRM2r, MRM2m, GR32, i32mem, WriteBLS>; 2505 defm BLSMSK64 : bmi_bls<"blsmsk{q}", MRM2r, MRM2m, GR64, i64mem, WriteBLS>, VEX_W; 2506 defm BLSI32 : bmi_bls<"blsi{l}", MRM3r, MRM3m, GR32, i32mem, WriteBLS>; 2507 defm BLSI64 : bmi_bls<"blsi{q}", MRM3r, MRM3m, GR64, i64mem, WriteBLS>, VEX_W; 2508} 2509 2510//===----------------------------------------------------------------------===// 2511// Pattern fragments to auto generate BMI instructions. 2512//===----------------------------------------------------------------------===// 2513 2514def or_flag_nocf : PatFrag<(ops node:$lhs, node:$rhs), 2515 (X86or_flag node:$lhs, node:$rhs), [{ 2516 return hasNoCarryFlagUses(SDValue(N, 1)); 2517}]>; 2518 2519def xor_flag_nocf : PatFrag<(ops node:$lhs, node:$rhs), 2520 (X86xor_flag node:$lhs, node:$rhs), [{ 2521 return hasNoCarryFlagUses(SDValue(N, 1)); 2522}]>; 2523 2524def and_flag_nocf : PatFrag<(ops node:$lhs, node:$rhs), 2525 (X86and_flag node:$lhs, node:$rhs), [{ 2526 return hasNoCarryFlagUses(SDValue(N, 1)); 2527}]>; 2528 2529let Predicates = [HasBMI] in { 2530 // FIXME: patterns for the load versions are not implemented 2531 def : Pat<(and GR32:$src, (add GR32:$src, -1)), 2532 (BLSR32rr GR32:$src)>; 2533 def : Pat<(and GR64:$src, (add GR64:$src, -1)), 2534 (BLSR64rr GR64:$src)>; 2535 2536 def : Pat<(xor GR32:$src, (add GR32:$src, -1)), 2537 (BLSMSK32rr GR32:$src)>; 2538 def : Pat<(xor GR64:$src, (add GR64:$src, -1)), 2539 (BLSMSK64rr GR64:$src)>; 2540 2541 def : Pat<(and GR32:$src, (ineg GR32:$src)), 2542 (BLSI32rr GR32:$src)>; 2543 def : Pat<(and GR64:$src, (ineg GR64:$src)), 2544 (BLSI64rr GR64:$src)>; 2545 2546 // Versions to match flag producing ops. 2547 def : Pat<(and_flag_nocf GR32:$src, (add GR32:$src, -1)), 2548 (BLSR32rr GR32:$src)>; 2549 def : Pat<(and_flag_nocf GR64:$src, (add GR64:$src, -1)), 2550 (BLSR64rr GR64:$src)>; 2551 2552 def : Pat<(xor_flag_nocf GR32:$src, (add GR32:$src, -1)), 2553 (BLSMSK32rr GR32:$src)>; 2554 def : Pat<(xor_flag_nocf GR64:$src, (add GR64:$src, -1)), 2555 (BLSMSK64rr GR64:$src)>; 2556 2557 def : Pat<(and_flag_nocf GR32:$src, (ineg GR32:$src)), 2558 (BLSI32rr GR32:$src)>; 2559 def : Pat<(and_flag_nocf GR64:$src, (ineg GR64:$src)), 2560 (BLSI64rr GR64:$src)>; 2561} 2562 2563multiclass bmi_bextr<bits<8> opc, string mnemonic, RegisterClass RC, 2564 X86MemOperand x86memop, SDNode OpNode, 2565 PatFrag ld_frag, X86FoldableSchedWrite Sched> { 2566 def rr : I<opc, MRMSrcReg4VOp3, (outs RC:$dst), (ins RC:$src1, RC:$src2), 2567 !strconcat(mnemonic, "\t{$src2, $src1, $dst|$dst, $src1, $src2}"), 2568 [(set RC:$dst, (OpNode RC:$src1, RC:$src2)), (implicit EFLAGS)]>, 2569 T8PS, VEX, Sched<[Sched]>; 2570 def rm : I<opc, MRMSrcMem4VOp3, (outs RC:$dst), (ins x86memop:$src1, RC:$src2), 2571 !strconcat(mnemonic, "\t{$src2, $src1, $dst|$dst, $src1, $src2}"), 2572 [(set RC:$dst, (OpNode (ld_frag addr:$src1), RC:$src2)), 2573 (implicit EFLAGS)]>, T8PS, VEX, 2574 Sched<[Sched.Folded, 2575 // x86memop:$src1 2576 ReadDefault, ReadDefault, ReadDefault, ReadDefault, 2577 ReadDefault, 2578 // RC:$src2 2579 Sched.ReadAfterFold]>; 2580} 2581 2582let Predicates = [HasBMI], Defs = [EFLAGS] in { 2583 defm BEXTR32 : bmi_bextr<0xF7, "bextr{l}", GR32, i32mem, 2584 X86bextr, loadi32, WriteBEXTR>; 2585 defm BEXTR64 : bmi_bextr<0xF7, "bextr{q}", GR64, i64mem, 2586 X86bextr, loadi64, WriteBEXTR>, VEX_W; 2587} 2588 2589multiclass bmi_bzhi<bits<8> opc, string mnemonic, RegisterClass RC, 2590 X86MemOperand x86memop, Intrinsic Int, 2591 PatFrag ld_frag, X86FoldableSchedWrite Sched> { 2592 def rr : I<opc, MRMSrcReg4VOp3, (outs RC:$dst), (ins RC:$src1, RC:$src2), 2593 !strconcat(mnemonic, "\t{$src2, $src1, $dst|$dst, $src1, $src2}"), 2594 [(set RC:$dst, (Int RC:$src1, RC:$src2)), (implicit EFLAGS)]>, 2595 T8PS, VEX, Sched<[Sched]>; 2596 def rm : I<opc, MRMSrcMem4VOp3, (outs RC:$dst), (ins x86memop:$src1, RC:$src2), 2597 !strconcat(mnemonic, "\t{$src2, $src1, $dst|$dst, $src1, $src2}"), 2598 [(set RC:$dst, (Int (ld_frag addr:$src1), RC:$src2)), 2599 (implicit EFLAGS)]>, T8PS, VEX, 2600 Sched<[Sched.Folded, 2601 // x86memop:$src1 2602 ReadDefault, ReadDefault, ReadDefault, ReadDefault, 2603 ReadDefault, 2604 // RC:$src2 2605 Sched.ReadAfterFold]>; 2606} 2607 2608let Predicates = [HasBMI2], Defs = [EFLAGS] in { 2609 defm BZHI32 : bmi_bzhi<0xF5, "bzhi{l}", GR32, i32mem, 2610 X86bzhi, loadi32, WriteBZHI>; 2611 defm BZHI64 : bmi_bzhi<0xF5, "bzhi{q}", GR64, i64mem, 2612 X86bzhi, loadi64, WriteBZHI>, VEX_W; 2613} 2614 2615def CountTrailingOnes : SDNodeXForm<imm, [{ 2616 // Count the trailing ones in the immediate. 2617 return getI8Imm(countTrailingOnes(N->getZExtValue()), SDLoc(N)); 2618}]>; 2619 2620def BEXTRMaskXForm : SDNodeXForm<imm, [{ 2621 unsigned Length = countTrailingOnes(N->getZExtValue()); 2622 return getI32Imm(Length << 8, SDLoc(N)); 2623}]>; 2624 2625def AndMask64 : ImmLeaf<i64, [{ 2626 return isMask_64(Imm) && !isUInt<32>(Imm); 2627}]>; 2628 2629// Use BEXTR for 64-bit 'and' with large immediate 'mask'. 2630let Predicates = [HasBMI, NoBMI2, NoTBM] in { 2631 def : Pat<(and GR64:$src, AndMask64:$mask), 2632 (BEXTR64rr GR64:$src, 2633 (SUBREG_TO_REG (i64 0), 2634 (MOV32ri (BEXTRMaskXForm imm:$mask)), sub_32bit))>; 2635 def : Pat<(and (loadi64 addr:$src), AndMask64:$mask), 2636 (BEXTR64rm addr:$src, 2637 (SUBREG_TO_REG (i64 0), 2638 (MOV32ri (BEXTRMaskXForm imm:$mask)), sub_32bit))>; 2639} 2640 2641// Use BZHI for 64-bit 'and' with large immediate 'mask'. 2642let Predicates = [HasBMI2, NoTBM] in { 2643 def : Pat<(and GR64:$src, AndMask64:$mask), 2644 (BZHI64rr GR64:$src, 2645 (INSERT_SUBREG (i64 (IMPLICIT_DEF)), 2646 (MOV8ri (CountTrailingOnes imm:$mask)), sub_8bit))>; 2647 def : Pat<(and (loadi64 addr:$src), AndMask64:$mask), 2648 (BZHI64rm addr:$src, 2649 (INSERT_SUBREG (i64 (IMPLICIT_DEF)), 2650 (MOV8ri (CountTrailingOnes imm:$mask)), sub_8bit))>; 2651} 2652 2653multiclass bmi_pdep_pext<string mnemonic, RegisterClass RC, 2654 X86MemOperand x86memop, SDNode OpNode, 2655 PatFrag ld_frag> { 2656 def rr : I<0xF5, MRMSrcReg, (outs RC:$dst), (ins RC:$src1, RC:$src2), 2657 !strconcat(mnemonic, "\t{$src2, $src1, $dst|$dst, $src1, $src2}"), 2658 [(set RC:$dst, (OpNode RC:$src1, RC:$src2))]>, 2659 VEX_4V, Sched<[WriteALU]>; 2660 def rm : I<0xF5, MRMSrcMem, (outs RC:$dst), (ins RC:$src1, x86memop:$src2), 2661 !strconcat(mnemonic, "\t{$src2, $src1, $dst|$dst, $src1, $src2}"), 2662 [(set RC:$dst, (OpNode RC:$src1, (ld_frag addr:$src2)))]>, 2663 VEX_4V, Sched<[WriteALU.Folded, WriteALU.ReadAfterFold]>; 2664} 2665 2666let Predicates = [HasBMI2] in { 2667 defm PDEP32 : bmi_pdep_pext<"pdep{l}", GR32, i32mem, 2668 X86pdep, loadi32>, T8XD; 2669 defm PDEP64 : bmi_pdep_pext<"pdep{q}", GR64, i64mem, 2670 X86pdep, loadi64>, T8XD, VEX_W; 2671 defm PEXT32 : bmi_pdep_pext<"pext{l}", GR32, i32mem, 2672 X86pext, loadi32>, T8XS; 2673 defm PEXT64 : bmi_pdep_pext<"pext{q}", GR64, i64mem, 2674 X86pext, loadi64>, T8XS, VEX_W; 2675} 2676 2677//===----------------------------------------------------------------------===// 2678// TBM Instructions 2679// 2680let Predicates = [HasTBM], Defs = [EFLAGS] in { 2681 2682multiclass tbm_bextri<bits<8> opc, RegisterClass RC, string OpcodeStr, 2683 X86MemOperand x86memop, PatFrag ld_frag, 2684 SDNode OpNode, Operand immtype, 2685 SDPatternOperator immoperator, 2686 X86FoldableSchedWrite Sched> { 2687 def ri : Ii32<opc, MRMSrcReg, (outs RC:$dst), (ins RC:$src1, immtype:$cntl), 2688 !strconcat(OpcodeStr, 2689 "\t{$cntl, $src1, $dst|$dst, $src1, $cntl}"), 2690 [(set RC:$dst, (OpNode RC:$src1, immoperator:$cntl))]>, 2691 XOP, XOPA, Sched<[Sched]>; 2692 def mi : Ii32<opc, MRMSrcMem, (outs RC:$dst), 2693 (ins x86memop:$src1, immtype:$cntl), 2694 !strconcat(OpcodeStr, 2695 "\t{$cntl, $src1, $dst|$dst, $src1, $cntl}"), 2696 [(set RC:$dst, (OpNode (ld_frag addr:$src1), immoperator:$cntl))]>, 2697 XOP, XOPA, Sched<[Sched.Folded]>; 2698} 2699 2700defm BEXTRI32 : tbm_bextri<0x10, GR32, "bextr{l}", i32mem, loadi32, 2701 X86bextri, i32imm, timm, WriteBEXTR>; 2702let ImmT = Imm32S in 2703defm BEXTRI64 : tbm_bextri<0x10, GR64, "bextr{q}", i64mem, loadi64, 2704 X86bextri, i64i32imm, 2705 i64timmSExt32, WriteBEXTR>, VEX_W; 2706 2707multiclass tbm_binary_rm<bits<8> opc, Format FormReg, Format FormMem, 2708 RegisterClass RC, string OpcodeStr, 2709 X86MemOperand x86memop, X86FoldableSchedWrite Sched> { 2710let hasSideEffects = 0 in { 2711 def rr : I<opc, FormReg, (outs RC:$dst), (ins RC:$src), 2712 !strconcat(OpcodeStr,"\t{$src, $dst|$dst, $src}"), []>, 2713 XOP_4V, XOP9, Sched<[Sched]>; 2714 let mayLoad = 1 in 2715 def rm : I<opc, FormMem, (outs RC:$dst), (ins x86memop:$src), 2716 !strconcat(OpcodeStr,"\t{$src, $dst|$dst, $src}"), []>, 2717 XOP_4V, XOP9, Sched<[Sched.Folded]>; 2718} 2719} 2720 2721multiclass tbm_binary_intr<bits<8> opc, string OpcodeStr, 2722 X86FoldableSchedWrite Sched, 2723 Format FormReg, Format FormMem> { 2724 defm NAME#32 : tbm_binary_rm<opc, FormReg, FormMem, GR32, OpcodeStr#"{l}", 2725 i32mem, Sched>; 2726 defm NAME#64 : tbm_binary_rm<opc, FormReg, FormMem, GR64, OpcodeStr#"{q}", 2727 i64mem, Sched>, VEX_W; 2728} 2729 2730defm BLCFILL : tbm_binary_intr<0x01, "blcfill", WriteALU, MRM1r, MRM1m>; 2731defm BLCI : tbm_binary_intr<0x02, "blci", WriteALU, MRM6r, MRM6m>; 2732defm BLCIC : tbm_binary_intr<0x01, "blcic", WriteALU, MRM5r, MRM5m>; 2733defm BLCMSK : tbm_binary_intr<0x02, "blcmsk", WriteALU, MRM1r, MRM1m>; 2734defm BLCS : tbm_binary_intr<0x01, "blcs", WriteALU, MRM3r, MRM3m>; 2735defm BLSFILL : tbm_binary_intr<0x01, "blsfill", WriteALU, MRM2r, MRM2m>; 2736defm BLSIC : tbm_binary_intr<0x01, "blsic", WriteALU, MRM6r, MRM6m>; 2737defm T1MSKC : tbm_binary_intr<0x01, "t1mskc", WriteALU, MRM7r, MRM7m>; 2738defm TZMSK : tbm_binary_intr<0x01, "tzmsk", WriteALU, MRM4r, MRM4m>; 2739} // HasTBM, EFLAGS 2740 2741// Use BEXTRI for 64-bit 'and' with large immediate 'mask'. 2742let Predicates = [HasTBM] in { 2743 def : Pat<(and GR64:$src, AndMask64:$mask), 2744 (BEXTRI64ri GR64:$src, (BEXTRMaskXForm imm:$mask))>; 2745 2746 def : Pat<(and (loadi64 addr:$src), AndMask64:$mask), 2747 (BEXTRI64mi addr:$src, (BEXTRMaskXForm imm:$mask))>; 2748} 2749 2750//===----------------------------------------------------------------------===// 2751// Lightweight Profiling Instructions 2752 2753let Predicates = [HasLWP], SchedRW = [WriteSystem] in { 2754 2755def LLWPCB : I<0x12, MRM0r, (outs), (ins GR32:$src), "llwpcb\t$src", 2756 [(int_x86_llwpcb GR32:$src)]>, XOP, XOP9; 2757def SLWPCB : I<0x12, MRM1r, (outs GR32:$dst), (ins), "slwpcb\t$dst", 2758 [(set GR32:$dst, (int_x86_slwpcb))]>, XOP, XOP9; 2759 2760def LLWPCB64 : I<0x12, MRM0r, (outs), (ins GR64:$src), "llwpcb\t$src", 2761 [(int_x86_llwpcb GR64:$src)]>, XOP, XOP9, VEX_W; 2762def SLWPCB64 : I<0x12, MRM1r, (outs GR64:$dst), (ins), "slwpcb\t$dst", 2763 [(set GR64:$dst, (int_x86_slwpcb))]>, XOP, XOP9, VEX_W; 2764 2765multiclass lwpins_intr<RegisterClass RC> { 2766 def rri : Ii32<0x12, MRM0r, (outs), (ins RC:$src0, GR32:$src1, i32imm:$cntl), 2767 "lwpins\t{$cntl, $src1, $src0|$src0, $src1, $cntl}", 2768 [(set EFLAGS, (X86lwpins RC:$src0, GR32:$src1, timm:$cntl))]>, 2769 XOP_4V, XOPA; 2770 let mayLoad = 1 in 2771 def rmi : Ii32<0x12, MRM0m, (outs), (ins RC:$src0, i32mem:$src1, i32imm:$cntl), 2772 "lwpins\t{$cntl, $src1, $src0|$src0, $src1, $cntl}", 2773 [(set EFLAGS, (X86lwpins RC:$src0, (loadi32 addr:$src1), timm:$cntl))]>, 2774 XOP_4V, XOPA; 2775} 2776 2777let Defs = [EFLAGS] in { 2778 defm LWPINS32 : lwpins_intr<GR32>; 2779 defm LWPINS64 : lwpins_intr<GR64>, VEX_W; 2780} // EFLAGS 2781 2782multiclass lwpval_intr<RegisterClass RC, Intrinsic Int> { 2783 def rri : Ii32<0x12, MRM1r, (outs), (ins RC:$src0, GR32:$src1, i32imm:$cntl), 2784 "lwpval\t{$cntl, $src1, $src0|$src0, $src1, $cntl}", 2785 [(Int RC:$src0, GR32:$src1, timm:$cntl)]>, XOP_4V, XOPA; 2786 let mayLoad = 1 in 2787 def rmi : Ii32<0x12, MRM1m, (outs), (ins RC:$src0, i32mem:$src1, i32imm:$cntl), 2788 "lwpval\t{$cntl, $src1, $src0|$src0, $src1, $cntl}", 2789 [(Int RC:$src0, (loadi32 addr:$src1), timm:$cntl)]>, 2790 XOP_4V, XOPA; 2791} 2792 2793defm LWPVAL32 : lwpval_intr<GR32, int_x86_lwpval32>; 2794defm LWPVAL64 : lwpval_intr<GR64, int_x86_lwpval64>, VEX_W; 2795 2796} // HasLWP, SchedRW 2797 2798//===----------------------------------------------------------------------===// 2799// MONITORX/MWAITX Instructions 2800// 2801let SchedRW = [ WriteSystem ] in { 2802 let Uses = [ EAX, ECX, EDX ] in 2803 def MONITORX32rrr : I<0x01, MRM_FA, (outs), (ins), "monitorx", []>, 2804 TB, Requires<[ HasMWAITX, Not64BitMode ]>; 2805 let Uses = [ RAX, ECX, EDX ] in 2806 def MONITORX64rrr : I<0x01, MRM_FA, (outs), (ins), "monitorx", []>, 2807 TB, Requires<[ HasMWAITX, In64BitMode ]>; 2808 2809 let Uses = [ ECX, EAX, EBX ] in { 2810 def MWAITXrrr : I<0x01, MRM_FB, (outs), (ins), "mwaitx", 2811 []>, TB, Requires<[ HasMWAITX ]>; 2812 } 2813} // SchedRW 2814 2815def : InstAlias<"mwaitx\t{%eax, %ecx, %ebx|ebx, ecx, eax}", (MWAITXrrr)>, 2816 Requires<[ Not64BitMode ]>; 2817def : InstAlias<"mwaitx\t{%rax, %rcx, %rbx|rbx, rcx, rax}", (MWAITXrrr)>, 2818 Requires<[ In64BitMode ]>; 2819 2820def : InstAlias<"monitorx\t{%eax, %ecx, %edx|edx, ecx, eax}", (MONITORX32rrr)>, 2821 Requires<[ Not64BitMode ]>; 2822def : InstAlias<"monitorx\t{%rax, %rcx, %rdx|rdx, rcx, rax}", (MONITORX64rrr)>, 2823 Requires<[ In64BitMode ]>; 2824 2825//===----------------------------------------------------------------------===// 2826// WAITPKG Instructions 2827// 2828let SchedRW = [WriteSystem] in { 2829 def UMONITOR16 : I<0xAE, MRM6r, (outs), (ins GR16:$src), 2830 "umonitor\t$src", [(int_x86_umonitor GR16:$src)]>, 2831 XS, AdSize16, Requires<[HasWAITPKG, Not64BitMode]>; 2832 def UMONITOR32 : I<0xAE, MRM6r, (outs), (ins GR32:$src), 2833 "umonitor\t$src", [(int_x86_umonitor GR32:$src)]>, 2834 XS, AdSize32, Requires<[HasWAITPKG]>; 2835 def UMONITOR64 : I<0xAE, MRM6r, (outs), (ins GR64:$src), 2836 "umonitor\t$src", [(int_x86_umonitor GR64:$src)]>, 2837 XS, AdSize64, Requires<[HasWAITPKG, In64BitMode]>; 2838 let Uses = [EAX, EDX], Defs = [EFLAGS] in { 2839 def UMWAIT : I<0xAE, MRM6r, 2840 (outs), (ins GR32orGR64:$src), "umwait\t$src", 2841 [(set EFLAGS, (X86umwait GR32orGR64:$src, EDX, EAX))]>, 2842 XD, Requires<[HasWAITPKG]>; 2843 def TPAUSE : I<0xAE, MRM6r, 2844 (outs), (ins GR32orGR64:$src), "tpause\t$src", 2845 [(set EFLAGS, (X86tpause GR32orGR64:$src, EDX, EAX))]>, 2846 PD, Requires<[HasWAITPKG]>, NotMemoryFoldable; 2847 } 2848} // SchedRW 2849 2850//===----------------------------------------------------------------------===// 2851// MOVDIRI - Move doubleword/quadword as direct store 2852// 2853let SchedRW = [WriteStore] in { 2854def MOVDIRI32 : I<0xF9, MRMDestMem, (outs), (ins i32mem:$dst, GR32:$src), 2855 "movdiri\t{$src, $dst|$dst, $src}", 2856 [(int_x86_directstore32 addr:$dst, GR32:$src)]>, 2857 T8PS, Requires<[HasMOVDIRI]>; 2858def MOVDIRI64 : RI<0xF9, MRMDestMem, (outs), (ins i64mem:$dst, GR64:$src), 2859 "movdiri\t{$src, $dst|$dst, $src}", 2860 [(int_x86_directstore64 addr:$dst, GR64:$src)]>, 2861 T8PS, Requires<[In64BitMode, HasMOVDIRI]>; 2862} // SchedRW 2863 2864//===----------------------------------------------------------------------===// 2865// MOVDIR64B - Move 64 bytes as direct store 2866// 2867let SchedRW = [WriteStore] in { 2868def MOVDIR64B16 : I<0xF8, MRMSrcMem, (outs), (ins GR16:$dst, i512mem:$src), 2869 "movdir64b\t{$src, $dst|$dst, $src}", []>, 2870 T8PD, AdSize16, Requires<[HasMOVDIR64B, Not64BitMode]>; 2871def MOVDIR64B32 : I<0xF8, MRMSrcMem, (outs), (ins GR32:$dst, i512mem:$src), 2872 "movdir64b\t{$src, $dst|$dst, $src}", 2873 [(int_x86_movdir64b GR32:$dst, addr:$src)]>, 2874 T8PD, AdSize32, Requires<[HasMOVDIR64B]>; 2875def MOVDIR64B64 : I<0xF8, MRMSrcMem, (outs), (ins GR64:$dst, i512mem:$src), 2876 "movdir64b\t{$src, $dst|$dst, $src}", 2877 [(int_x86_movdir64b GR64:$dst, addr:$src)]>, 2878 T8PD, AdSize64, Requires<[HasMOVDIR64B, In64BitMode]>; 2879} // SchedRW 2880 2881//===----------------------------------------------------------------------===// 2882// ENQCMD/S - Enqueue 64-byte command as user with 64-byte write atomicity 2883// 2884let SchedRW = [WriteStore], Defs = [EFLAGS] in { 2885 def ENQCMD16 : I<0xF8, MRMSrcMem, (outs), (ins GR16:$dst, i512mem:$src), 2886 "enqcmd\t{$src, $dst|$dst, $src}", 2887 [(set EFLAGS, (X86enqcmd GR16:$dst, addr:$src))]>, 2888 T8XD, AdSize16, Requires<[HasENQCMD, Not64BitMode]>; 2889 def ENQCMD32 : I<0xF8, MRMSrcMem, (outs), (ins GR32:$dst, i512mem:$src), 2890 "enqcmd\t{$src, $dst|$dst, $src}", 2891 [(set EFLAGS, (X86enqcmd GR32:$dst, addr:$src))]>, 2892 T8XD, AdSize32, Requires<[HasENQCMD]>; 2893 def ENQCMD64 : I<0xF8, MRMSrcMem, (outs), (ins GR64:$dst, i512mem:$src), 2894 "enqcmd\t{$src, $dst|$dst, $src}", 2895 [(set EFLAGS, (X86enqcmd GR64:$dst, addr:$src))]>, 2896 T8XD, AdSize64, Requires<[HasENQCMD, In64BitMode]>; 2897 2898 def ENQCMDS16 : I<0xF8, MRMSrcMem, (outs), (ins GR16:$dst, i512mem:$src), 2899 "enqcmds\t{$src, $dst|$dst, $src}", 2900 [(set EFLAGS, (X86enqcmds GR16:$dst, addr:$src))]>, 2901 T8XS, AdSize16, Requires<[HasENQCMD, Not64BitMode]>; 2902 def ENQCMDS32 : I<0xF8, MRMSrcMem, (outs), (ins GR32:$dst, i512mem:$src), 2903 "enqcmds\t{$src, $dst|$dst, $src}", 2904 [(set EFLAGS, (X86enqcmds GR32:$dst, addr:$src))]>, 2905 T8XS, AdSize32, Requires<[HasENQCMD]>; 2906 def ENQCMDS64 : I<0xF8, MRMSrcMem, (outs), (ins GR64:$dst, i512mem:$src), 2907 "enqcmds\t{$src, $dst|$dst, $src}", 2908 [(set EFLAGS, (X86enqcmds GR64:$dst, addr:$src))]>, 2909 T8XS, AdSize64, Requires<[HasENQCMD, In64BitMode]>; 2910} 2911 2912//===----------------------------------------------------------------------===// 2913// CLZERO Instruction 2914// 2915let SchedRW = [WriteLoad] in { 2916 let Uses = [EAX] in 2917 def CLZERO32r : I<0x01, MRM_FC, (outs), (ins), "clzero", []>, 2918 TB, Requires<[HasCLZERO, Not64BitMode]>; 2919 let Uses = [RAX] in 2920 def CLZERO64r : I<0x01, MRM_FC, (outs), (ins), "clzero", []>, 2921 TB, Requires<[HasCLZERO, In64BitMode]>; 2922} // SchedRW 2923 2924def : InstAlias<"clzero\t{%eax|eax}", (CLZERO32r)>, Requires<[Not64BitMode]>; 2925def : InstAlias<"clzero\t{%rax|rax}", (CLZERO64r)>, Requires<[In64BitMode]>; 2926 2927//===----------------------------------------------------------------------===// 2928// INVLPGB Instruction 2929// OPCODE 0F 01 FE 2930// 2931let SchedRW = [WriteSystem] in { 2932 let Uses = [EAX, EDX] in 2933 def INVLPGB32 : I<0x01, MRM_FE, (outs), (ins), 2934 "invlpgb}", []>, 2935 PS, Requires<[Not64BitMode]>; 2936 let Uses = [RAX, EDX] in 2937 def INVLPGB64 : I<0x01, MRM_FE, (outs), (ins), 2938 "invlpgb", []>, 2939 PS, Requires<[In64BitMode]>; 2940} // SchedRW 2941 2942def : InstAlias<"invlpgb\t{%eax, %edx|eax, edx}", (INVLPGB32)>, Requires<[Not64BitMode]>; 2943def : InstAlias<"invlpgb\t{%rax, %edx|rax, edx}", (INVLPGB64)>, Requires<[In64BitMode]>; 2944 2945//===----------------------------------------------------------------------===// 2946// TLBSYNC Instruction 2947// OPCODE 0F 01 FF 2948// 2949let SchedRW = [WriteSystem] in { 2950 def TLBSYNC : I<0x01, MRM_FF, (outs), (ins), 2951 "tlbsync", []>, 2952 PS, Requires<[]>; 2953} // SchedRW 2954 2955//===----------------------------------------------------------------------===// 2956// HRESET Instruction 2957// 2958let Uses = [EAX], SchedRW = [WriteSystem] in 2959 def HRESET : Ii8<0xF0, MRM_C0, (outs), (ins i32u8imm:$imm), "hreset\t$imm", []>, 2960 Requires<[HasHRESET]>, TAXS; 2961 2962//===----------------------------------------------------------------------===// 2963// SERIALIZE Instruction 2964// 2965def SERIALIZE : I<0x01, MRM_E8, (outs), (ins), "serialize", 2966 [(int_x86_serialize)]>, PS, 2967 Requires<[HasSERIALIZE]>; 2968 2969//===----------------------------------------------------------------------===// 2970// TSXLDTRK - TSX Suspend Load Address Tracking 2971// 2972let Predicates = [HasTSXLDTRK] in { 2973 def XSUSLDTRK : I<0x01, MRM_E8, (outs), (ins), "xsusldtrk", 2974 [(int_x86_xsusldtrk)]>, XD; 2975 def XRESLDTRK : I<0x01, MRM_E9, (outs), (ins), "xresldtrk", 2976 [(int_x86_xresldtrk)]>, XD; 2977} 2978 2979//===----------------------------------------------------------------------===// 2980// UINTR Instructions 2981// 2982let Predicates = [HasUINTR, In64BitMode] in { 2983 def UIRET : I<0x01, MRM_EC, (outs), (ins), "uiret", 2984 []>, XS; 2985 def CLUI : I<0x01, MRM_EE, (outs), (ins), "clui", 2986 [(int_x86_clui)]>, XS; 2987 def STUI : I<0x01, MRM_EF, (outs), (ins), "stui", 2988 [(int_x86_stui)]>, XS; 2989 2990 def SENDUIPI : I<0xC7, MRM6r, (outs), (ins GR64:$arg), "senduipi\t$arg", 2991 [(int_x86_senduipi GR64:$arg)]>, XS; 2992 2993 let Defs = [EFLAGS] in 2994 def TESTUI : I<0x01, MRM_ED, (outs), (ins), "testui", 2995 [(set EFLAGS, (X86testui))]>, XS; 2996} 2997 2998//===----------------------------------------------------------------------===// 2999// Pattern fragments to auto generate TBM instructions. 3000//===----------------------------------------------------------------------===// 3001 3002let Predicates = [HasTBM] in { 3003 // FIXME: patterns for the load versions are not implemented 3004 def : Pat<(and GR32:$src, (add GR32:$src, 1)), 3005 (BLCFILL32rr GR32:$src)>; 3006 def : Pat<(and GR64:$src, (add GR64:$src, 1)), 3007 (BLCFILL64rr GR64:$src)>; 3008 3009 def : Pat<(or GR32:$src, (not (add GR32:$src, 1))), 3010 (BLCI32rr GR32:$src)>; 3011 def : Pat<(or GR64:$src, (not (add GR64:$src, 1))), 3012 (BLCI64rr GR64:$src)>; 3013 3014 // Extra patterns because opt can optimize the above patterns to this. 3015 def : Pat<(or GR32:$src, (sub -2, GR32:$src)), 3016 (BLCI32rr GR32:$src)>; 3017 def : Pat<(or GR64:$src, (sub -2, GR64:$src)), 3018 (BLCI64rr GR64:$src)>; 3019 3020 def : Pat<(and (not GR32:$src), (add GR32:$src, 1)), 3021 (BLCIC32rr GR32:$src)>; 3022 def : Pat<(and (not GR64:$src), (add GR64:$src, 1)), 3023 (BLCIC64rr GR64:$src)>; 3024 3025 def : Pat<(xor GR32:$src, (add GR32:$src, 1)), 3026 (BLCMSK32rr GR32:$src)>; 3027 def : Pat<(xor GR64:$src, (add GR64:$src, 1)), 3028 (BLCMSK64rr GR64:$src)>; 3029 3030 def : Pat<(or GR32:$src, (add GR32:$src, 1)), 3031 (BLCS32rr GR32:$src)>; 3032 def : Pat<(or GR64:$src, (add GR64:$src, 1)), 3033 (BLCS64rr GR64:$src)>; 3034 3035 def : Pat<(or GR32:$src, (add GR32:$src, -1)), 3036 (BLSFILL32rr GR32:$src)>; 3037 def : Pat<(or GR64:$src, (add GR64:$src, -1)), 3038 (BLSFILL64rr GR64:$src)>; 3039 3040 def : Pat<(or (not GR32:$src), (add GR32:$src, -1)), 3041 (BLSIC32rr GR32:$src)>; 3042 def : Pat<(or (not GR64:$src), (add GR64:$src, -1)), 3043 (BLSIC64rr GR64:$src)>; 3044 3045 def : Pat<(or (not GR32:$src), (add GR32:$src, 1)), 3046 (T1MSKC32rr GR32:$src)>; 3047 def : Pat<(or (not GR64:$src), (add GR64:$src, 1)), 3048 (T1MSKC64rr GR64:$src)>; 3049 3050 def : Pat<(and (not GR32:$src), (add GR32:$src, -1)), 3051 (TZMSK32rr GR32:$src)>; 3052 def : Pat<(and (not GR64:$src), (add GR64:$src, -1)), 3053 (TZMSK64rr GR64:$src)>; 3054 3055 // Patterns to match flag producing ops. 3056 def : Pat<(and_flag_nocf GR32:$src, (add GR32:$src, 1)), 3057 (BLCFILL32rr GR32:$src)>; 3058 def : Pat<(and_flag_nocf GR64:$src, (add GR64:$src, 1)), 3059 (BLCFILL64rr GR64:$src)>; 3060 3061 def : Pat<(or_flag_nocf GR32:$src, (not (add GR32:$src, 1))), 3062 (BLCI32rr GR32:$src)>; 3063 def : Pat<(or_flag_nocf GR64:$src, (not (add GR64:$src, 1))), 3064 (BLCI64rr GR64:$src)>; 3065 3066 // Extra patterns because opt can optimize the above patterns to this. 3067 def : Pat<(or_flag_nocf GR32:$src, (sub -2, GR32:$src)), 3068 (BLCI32rr GR32:$src)>; 3069 def : Pat<(or_flag_nocf GR64:$src, (sub -2, GR64:$src)), 3070 (BLCI64rr GR64:$src)>; 3071 3072 def : Pat<(and_flag_nocf (not GR32:$src), (add GR32:$src, 1)), 3073 (BLCIC32rr GR32:$src)>; 3074 def : Pat<(and_flag_nocf (not GR64:$src), (add GR64:$src, 1)), 3075 (BLCIC64rr GR64:$src)>; 3076 3077 def : Pat<(xor_flag_nocf GR32:$src, (add GR32:$src, 1)), 3078 (BLCMSK32rr GR32:$src)>; 3079 def : Pat<(xor_flag_nocf GR64:$src, (add GR64:$src, 1)), 3080 (BLCMSK64rr GR64:$src)>; 3081 3082 def : Pat<(or_flag_nocf GR32:$src, (add GR32:$src, 1)), 3083 (BLCS32rr GR32:$src)>; 3084 def : Pat<(or_flag_nocf GR64:$src, (add GR64:$src, 1)), 3085 (BLCS64rr GR64:$src)>; 3086 3087 def : Pat<(or_flag_nocf GR32:$src, (add GR32:$src, -1)), 3088 (BLSFILL32rr GR32:$src)>; 3089 def : Pat<(or_flag_nocf GR64:$src, (add GR64:$src, -1)), 3090 (BLSFILL64rr GR64:$src)>; 3091 3092 def : Pat<(or_flag_nocf (not GR32:$src), (add GR32:$src, -1)), 3093 (BLSIC32rr GR32:$src)>; 3094 def : Pat<(or_flag_nocf (not GR64:$src), (add GR64:$src, -1)), 3095 (BLSIC64rr GR64:$src)>; 3096 3097 def : Pat<(or_flag_nocf (not GR32:$src), (add GR32:$src, 1)), 3098 (T1MSKC32rr GR32:$src)>; 3099 def : Pat<(or_flag_nocf (not GR64:$src), (add GR64:$src, 1)), 3100 (T1MSKC64rr GR64:$src)>; 3101 3102 def : Pat<(and_flag_nocf (not GR32:$src), (add GR32:$src, -1)), 3103 (TZMSK32rr GR32:$src)>; 3104 def : Pat<(and_flag_nocf (not GR64:$src), (add GR64:$src, -1)), 3105 (TZMSK64rr GR64:$src)>; 3106} // HasTBM 3107 3108//===----------------------------------------------------------------------===// 3109// Memory Instructions 3110// 3111 3112let Predicates = [HasCLFLUSHOPT], SchedRW = [WriteLoad] in 3113def CLFLUSHOPT : I<0xAE, MRM7m, (outs), (ins i8mem:$src), 3114 "clflushopt\t$src", [(int_x86_clflushopt addr:$src)]>, PD; 3115 3116let Predicates = [HasCLWB], SchedRW = [WriteLoad] in 3117def CLWB : I<0xAE, MRM6m, (outs), (ins i8mem:$src), "clwb\t$src", 3118 [(int_x86_clwb addr:$src)]>, PD, NotMemoryFoldable; 3119 3120let Predicates = [HasCLDEMOTE], SchedRW = [WriteLoad] in 3121def CLDEMOTE : I<0x1C, MRM0m, (outs), (ins i8mem:$src), "cldemote\t$src", 3122 [(int_x86_cldemote addr:$src)]>, PS; 3123 3124//===----------------------------------------------------------------------===// 3125// Subsystems. 3126//===----------------------------------------------------------------------===// 3127 3128include "X86InstrArithmetic.td" 3129include "X86InstrCMovSetCC.td" 3130include "X86InstrExtension.td" 3131include "X86InstrControl.td" 3132include "X86InstrShiftRotate.td" 3133 3134// X87 Floating Point Stack. 3135include "X86InstrFPStack.td" 3136 3137// SIMD support (SSE, MMX and AVX) 3138include "X86InstrFragmentsSIMD.td" 3139 3140// FMA - Fused Multiply-Add support (requires FMA) 3141include "X86InstrFMA.td" 3142 3143// XOP 3144include "X86InstrXOP.td" 3145 3146// SSE, MMX and 3DNow! vector support. 3147include "X86InstrSSE.td" 3148include "X86InstrAVX512.td" 3149include "X86InstrMMX.td" 3150include "X86Instr3DNow.td" 3151 3152// MPX instructions 3153include "X86InstrMPX.td" 3154 3155include "X86InstrVMX.td" 3156include "X86InstrSVM.td" 3157include "X86InstrSNP.td" 3158 3159include "X86InstrTSX.td" 3160include "X86InstrSGX.td" 3161 3162include "X86InstrTDX.td" 3163 3164// Key Locker instructions 3165include "X86InstrKL.td" 3166 3167// AMX instructions 3168include "X86InstrAMX.td" 3169 3170// System instructions. 3171include "X86InstrSystem.td" 3172 3173// Compiler Pseudo Instructions and Pat Patterns 3174include "X86InstrCompiler.td" 3175include "X86InstrVecCompiler.td" 3176 3177//===----------------------------------------------------------------------===// 3178// Assembler Mnemonic Aliases 3179//===----------------------------------------------------------------------===// 3180 3181def : MnemonicAlias<"call", "callw", "att">, Requires<[In16BitMode]>; 3182def : MnemonicAlias<"call", "calll", "att">, Requires<[In32BitMode]>; 3183def : MnemonicAlias<"call", "callq", "att">, Requires<[In64BitMode]>; 3184 3185def : MnemonicAlias<"cbw", "cbtw", "att">; 3186def : MnemonicAlias<"cwde", "cwtl", "att">; 3187def : MnemonicAlias<"cwd", "cwtd", "att">; 3188def : MnemonicAlias<"cdq", "cltd", "att">; 3189def : MnemonicAlias<"cdqe", "cltq", "att">; 3190def : MnemonicAlias<"cqo", "cqto", "att">; 3191 3192// In 64-bit mode lret maps to lretl; it is not ambiguous with lretq. 3193def : MnemonicAlias<"lret", "lretw", "att">, Requires<[In16BitMode]>; 3194def : MnemonicAlias<"lret", "lretl", "att">, Requires<[Not16BitMode]>; 3195 3196def : MnemonicAlias<"leavel", "leave", "att">, Requires<[Not64BitMode]>; 3197def : MnemonicAlias<"leaveq", "leave", "att">, Requires<[In64BitMode]>; 3198 3199def : MnemonicAlias<"loopz", "loope">; 3200def : MnemonicAlias<"loopnz", "loopne">; 3201 3202def : MnemonicAlias<"pop", "popw", "att">, Requires<[In16BitMode]>; 3203def : MnemonicAlias<"pop", "popl", "att">, Requires<[In32BitMode]>; 3204def : MnemonicAlias<"pop", "popq", "att">, Requires<[In64BitMode]>; 3205def : MnemonicAlias<"popf", "popfw", "att">, Requires<[In16BitMode]>; 3206def : MnemonicAlias<"popf", "popfl", "att">, Requires<[In32BitMode]>; 3207def : MnemonicAlias<"popf", "popfq", "att">, Requires<[In64BitMode]>; 3208def : MnemonicAlias<"popf", "popfq", "intel">, Requires<[In64BitMode]>; 3209def : MnemonicAlias<"popfd", "popfl", "att">; 3210def : MnemonicAlias<"popfw", "popf", "intel">, Requires<[In32BitMode]>; 3211def : MnemonicAlias<"popfw", "popf", "intel">, Requires<[In64BitMode]>; 3212 3213// FIXME: This is wrong for "push reg". "push %bx" should turn into pushw in 3214// all modes. However: "push (addr)" and "push $42" should default to 3215// pushl/pushq depending on the current mode. Similar for "pop %bx" 3216def : MnemonicAlias<"push", "pushw", "att">, Requires<[In16BitMode]>; 3217def : MnemonicAlias<"push", "pushl", "att">, Requires<[In32BitMode]>; 3218def : MnemonicAlias<"push", "pushq", "att">, Requires<[In64BitMode]>; 3219def : MnemonicAlias<"pushf", "pushfw", "att">, Requires<[In16BitMode]>; 3220def : MnemonicAlias<"pushf", "pushfl", "att">, Requires<[In32BitMode]>; 3221def : MnemonicAlias<"pushf", "pushfq", "att">, Requires<[In64BitMode]>; 3222def : MnemonicAlias<"pushf", "pushfq", "intel">, Requires<[In64BitMode]>; 3223def : MnemonicAlias<"pushfd", "pushfl", "att">; 3224def : MnemonicAlias<"pushfw", "pushf", "intel">, Requires<[In32BitMode]>; 3225def : MnemonicAlias<"pushfw", "pushf", "intel">, Requires<[In64BitMode]>; 3226 3227def : MnemonicAlias<"popad", "popal", "intel">, Requires<[Not64BitMode]>; 3228def : MnemonicAlias<"pushad", "pushal", "intel">, Requires<[Not64BitMode]>; 3229def : MnemonicAlias<"popa", "popaw", "intel">, Requires<[In16BitMode]>; 3230def : MnemonicAlias<"pusha", "pushaw", "intel">, Requires<[In16BitMode]>; 3231def : MnemonicAlias<"popa", "popal", "intel">, Requires<[In32BitMode]>; 3232def : MnemonicAlias<"pusha", "pushal", "intel">, Requires<[In32BitMode]>; 3233 3234def : MnemonicAlias<"popa", "popaw", "att">, Requires<[In16BitMode]>; 3235def : MnemonicAlias<"pusha", "pushaw", "att">, Requires<[In16BitMode]>; 3236def : MnemonicAlias<"popa", "popal", "att">, Requires<[In32BitMode]>; 3237def : MnemonicAlias<"pusha", "pushal", "att">, Requires<[In32BitMode]>; 3238 3239def : MnemonicAlias<"repe", "rep">; 3240def : MnemonicAlias<"repz", "rep">; 3241def : MnemonicAlias<"repnz", "repne">; 3242 3243def : MnemonicAlias<"ret", "retw", "att">, Requires<[In16BitMode]>; 3244def : MnemonicAlias<"ret", "retl", "att">, Requires<[In32BitMode]>; 3245def : MnemonicAlias<"ret", "retq", "att">, Requires<[In64BitMode]>; 3246 3247// Apply 'ret' behavior to 'retn' 3248def : MnemonicAlias<"retn", "retw", "att">, Requires<[In16BitMode]>; 3249def : MnemonicAlias<"retn", "retl", "att">, Requires<[In32BitMode]>; 3250def : MnemonicAlias<"retn", "retq", "att">, Requires<[In64BitMode]>; 3251def : MnemonicAlias<"retn", "ret", "intel">; 3252 3253def : MnemonicAlias<"sal", "shl", "intel">; 3254def : MnemonicAlias<"salb", "shlb", "att">; 3255def : MnemonicAlias<"salw", "shlw", "att">; 3256def : MnemonicAlias<"sall", "shll", "att">; 3257def : MnemonicAlias<"salq", "shlq", "att">; 3258 3259def : MnemonicAlias<"smovb", "movsb", "att">; 3260def : MnemonicAlias<"smovw", "movsw", "att">; 3261def : MnemonicAlias<"smovl", "movsl", "att">; 3262def : MnemonicAlias<"smovq", "movsq", "att">; 3263 3264def : MnemonicAlias<"ud2a", "ud2", "att">; 3265def : MnemonicAlias<"ud2bw", "ud1w", "att">; 3266def : MnemonicAlias<"ud2bl", "ud1l", "att">; 3267def : MnemonicAlias<"ud2bq", "ud1q", "att">; 3268def : MnemonicAlias<"verrw", "verr", "att">; 3269 3270// MS recognizes 'xacquire'/'xrelease' as 'acquire'/'release' 3271def : MnemonicAlias<"acquire", "xacquire", "intel">; 3272def : MnemonicAlias<"release", "xrelease", "intel">; 3273 3274// System instruction aliases. 3275def : MnemonicAlias<"iret", "iretw", "att">, Requires<[In16BitMode]>; 3276def : MnemonicAlias<"iret", "iretl", "att">, Requires<[Not16BitMode]>; 3277def : MnemonicAlias<"sysret", "sysretl", "att">; 3278def : MnemonicAlias<"sysexit", "sysexitl", "att">; 3279 3280def : MnemonicAlias<"lgdt", "lgdtw", "att">, Requires<[In16BitMode]>; 3281def : MnemonicAlias<"lgdt", "lgdtl", "att">, Requires<[In32BitMode]>; 3282def : MnemonicAlias<"lgdt", "lgdtq", "att">, Requires<[In64BitMode]>; 3283def : MnemonicAlias<"lidt", "lidtw", "att">, Requires<[In16BitMode]>; 3284def : MnemonicAlias<"lidt", "lidtl", "att">, Requires<[In32BitMode]>; 3285def : MnemonicAlias<"lidt", "lidtq", "att">, Requires<[In64BitMode]>; 3286def : MnemonicAlias<"sgdt", "sgdtw", "att">, Requires<[In16BitMode]>; 3287def : MnemonicAlias<"sgdt", "sgdtl", "att">, Requires<[In32BitMode]>; 3288def : MnemonicAlias<"sgdt", "sgdtq", "att">, Requires<[In64BitMode]>; 3289def : MnemonicAlias<"sidt", "sidtw", "att">, Requires<[In16BitMode]>; 3290def : MnemonicAlias<"sidt", "sidtl", "att">, Requires<[In32BitMode]>; 3291def : MnemonicAlias<"sidt", "sidtq", "att">, Requires<[In64BitMode]>; 3292def : MnemonicAlias<"lgdt", "lgdtw", "intel">, Requires<[In16BitMode]>; 3293def : MnemonicAlias<"lgdt", "lgdtd", "intel">, Requires<[In32BitMode]>; 3294def : MnemonicAlias<"lidt", "lidtw", "intel">, Requires<[In16BitMode]>; 3295def : MnemonicAlias<"lidt", "lidtd", "intel">, Requires<[In32BitMode]>; 3296def : MnemonicAlias<"sgdt", "sgdtw", "intel">, Requires<[In16BitMode]>; 3297def : MnemonicAlias<"sgdt", "sgdtd", "intel">, Requires<[In32BitMode]>; 3298def : MnemonicAlias<"sidt", "sidtw", "intel">, Requires<[In16BitMode]>; 3299def : MnemonicAlias<"sidt", "sidtd", "intel">, Requires<[In32BitMode]>; 3300 3301 3302// Floating point stack aliases. 3303def : MnemonicAlias<"fcmovz", "fcmove", "att">; 3304def : MnemonicAlias<"fcmova", "fcmovnbe", "att">; 3305def : MnemonicAlias<"fcmovnae", "fcmovb", "att">; 3306def : MnemonicAlias<"fcmovna", "fcmovbe", "att">; 3307def : MnemonicAlias<"fcmovae", "fcmovnb", "att">; 3308def : MnemonicAlias<"fcomip", "fcompi">; 3309def : MnemonicAlias<"fildq", "fildll", "att">; 3310def : MnemonicAlias<"fistpq", "fistpll", "att">; 3311def : MnemonicAlias<"fisttpq", "fisttpll", "att">; 3312def : MnemonicAlias<"fldcww", "fldcw", "att">; 3313def : MnemonicAlias<"fnstcww", "fnstcw", "att">; 3314def : MnemonicAlias<"fnstsww", "fnstsw", "att">; 3315def : MnemonicAlias<"fucomip", "fucompi">; 3316def : MnemonicAlias<"fwait", "wait">; 3317 3318def : MnemonicAlias<"fxsaveq", "fxsave64", "att">; 3319def : MnemonicAlias<"fxrstorq", "fxrstor64", "att">; 3320def : MnemonicAlias<"xsaveq", "xsave64", "att">; 3321def : MnemonicAlias<"xrstorq", "xrstor64", "att">; 3322def : MnemonicAlias<"xsaveoptq", "xsaveopt64", "att">; 3323def : MnemonicAlias<"xrstorsq", "xrstors64", "att">; 3324def : MnemonicAlias<"xsavecq", "xsavec64", "att">; 3325def : MnemonicAlias<"xsavesq", "xsaves64", "att">; 3326 3327class CondCodeAlias<string Prefix,string Suffix, string OldCond, string NewCond, 3328 string VariantName> 3329 : MnemonicAlias<!strconcat(Prefix, OldCond, Suffix), 3330 !strconcat(Prefix, NewCond, Suffix), VariantName>; 3331 3332/// IntegerCondCodeMnemonicAlias - This multiclass defines a bunch of 3333/// MnemonicAlias's that canonicalize the condition code in a mnemonic, for 3334/// example "setz" -> "sete". 3335multiclass IntegerCondCodeMnemonicAlias<string Prefix, string Suffix, 3336 string V = ""> { 3337 def C : CondCodeAlias<Prefix, Suffix, "c", "b", V>; // setc -> setb 3338 def Z : CondCodeAlias<Prefix, Suffix, "z" , "e", V>; // setz -> sete 3339 def NA : CondCodeAlias<Prefix, Suffix, "na", "be", V>; // setna -> setbe 3340 def NB : CondCodeAlias<Prefix, Suffix, "nb", "ae", V>; // setnb -> setae 3341 def NC : CondCodeAlias<Prefix, Suffix, "nc", "ae", V>; // setnc -> setae 3342 def NG : CondCodeAlias<Prefix, Suffix, "ng", "le", V>; // setng -> setle 3343 def NL : CondCodeAlias<Prefix, Suffix, "nl", "ge", V>; // setnl -> setge 3344 def NZ : CondCodeAlias<Prefix, Suffix, "nz", "ne", V>; // setnz -> setne 3345 def PE : CondCodeAlias<Prefix, Suffix, "pe", "p", V>; // setpe -> setp 3346 def PO : CondCodeAlias<Prefix, Suffix, "po", "np", V>; // setpo -> setnp 3347 3348 def NAE : CondCodeAlias<Prefix, Suffix, "nae", "b", V>; // setnae -> setb 3349 def NBE : CondCodeAlias<Prefix, Suffix, "nbe", "a", V>; // setnbe -> seta 3350 def NGE : CondCodeAlias<Prefix, Suffix, "nge", "l", V>; // setnge -> setl 3351 def NLE : CondCodeAlias<Prefix, Suffix, "nle", "g", V>; // setnle -> setg 3352} 3353 3354// Aliases for set<CC> 3355defm : IntegerCondCodeMnemonicAlias<"set", "">; 3356// Aliases for j<CC> 3357defm : IntegerCondCodeMnemonicAlias<"j", "">; 3358// Aliases for cmov<CC>{w,l,q} 3359defm : IntegerCondCodeMnemonicAlias<"cmov", "w", "att">; 3360defm : IntegerCondCodeMnemonicAlias<"cmov", "l", "att">; 3361defm : IntegerCondCodeMnemonicAlias<"cmov", "q", "att">; 3362// No size suffix for intel-style asm. 3363defm : IntegerCondCodeMnemonicAlias<"cmov", "", "intel">; 3364 3365 3366//===----------------------------------------------------------------------===// 3367// Assembler Instruction Aliases 3368//===----------------------------------------------------------------------===// 3369 3370// aad/aam default to base 10 if no operand is specified. 3371def : InstAlias<"aad", (AAD8i8 10)>, Requires<[Not64BitMode]>; 3372def : InstAlias<"aam", (AAM8i8 10)>, Requires<[Not64BitMode]>; 3373 3374// Disambiguate the mem/imm form of bt-without-a-suffix as btl. 3375// Likewise for btc/btr/bts. 3376def : InstAlias<"bt\t{$imm, $mem|$mem, $imm}", 3377 (BT32mi8 i32mem:$mem, i32u8imm:$imm), 0, "att">; 3378def : InstAlias<"btc\t{$imm, $mem|$mem, $imm}", 3379 (BTC32mi8 i32mem:$mem, i32u8imm:$imm), 0, "att">; 3380def : InstAlias<"btr\t{$imm, $mem|$mem, $imm}", 3381 (BTR32mi8 i32mem:$mem, i32u8imm:$imm), 0, "att">; 3382def : InstAlias<"bts\t{$imm, $mem|$mem, $imm}", 3383 (BTS32mi8 i32mem:$mem, i32u8imm:$imm), 0, "att">; 3384 3385// clr aliases. 3386def : InstAlias<"clr{b}\t$reg", (XOR8rr GR8 :$reg, GR8 :$reg), 0>; 3387def : InstAlias<"clr{w}\t$reg", (XOR16rr GR16:$reg, GR16:$reg), 0>; 3388def : InstAlias<"clr{l}\t$reg", (XOR32rr GR32:$reg, GR32:$reg), 0>; 3389def : InstAlias<"clr{q}\t$reg", (XOR64rr GR64:$reg, GR64:$reg), 0>; 3390 3391// lods aliases. Accept the destination being omitted because it's implicit 3392// in the mnemonic, or the mnemonic suffix being omitted because it's implicit 3393// in the destination. 3394def : InstAlias<"lodsb\t$src", (LODSB srcidx8:$src), 0>; 3395def : InstAlias<"lodsw\t$src", (LODSW srcidx16:$src), 0>; 3396def : InstAlias<"lods{l|d}\t$src", (LODSL srcidx32:$src), 0>; 3397def : InstAlias<"lodsq\t$src", (LODSQ srcidx64:$src), 0>, Requires<[In64BitMode]>; 3398def : InstAlias<"lods\t{$src, %al|al, $src}", (LODSB srcidx8:$src), 0>; 3399def : InstAlias<"lods\t{$src, %ax|ax, $src}", (LODSW srcidx16:$src), 0>; 3400def : InstAlias<"lods\t{$src, %eax|eax, $src}", (LODSL srcidx32:$src), 0>; 3401def : InstAlias<"lods\t{$src, %rax|rax, $src}", (LODSQ srcidx64:$src), 0>, Requires<[In64BitMode]>; 3402def : InstAlias<"lods\t$src", (LODSB srcidx8:$src), 0, "intel">; 3403def : InstAlias<"lods\t$src", (LODSW srcidx16:$src), 0, "intel">; 3404def : InstAlias<"lods\t$src", (LODSL srcidx32:$src), 0, "intel">; 3405def : InstAlias<"lods\t$src", (LODSQ srcidx64:$src), 0, "intel">, Requires<[In64BitMode]>; 3406 3407 3408// stos aliases. Accept the source being omitted because it's implicit in 3409// the mnemonic, or the mnemonic suffix being omitted because it's implicit 3410// in the source. 3411def : InstAlias<"stosb\t$dst", (STOSB dstidx8:$dst), 0>; 3412def : InstAlias<"stosw\t$dst", (STOSW dstidx16:$dst), 0>; 3413def : InstAlias<"stos{l|d}\t$dst", (STOSL dstidx32:$dst), 0>; 3414def : InstAlias<"stosq\t$dst", (STOSQ dstidx64:$dst), 0>, Requires<[In64BitMode]>; 3415def : InstAlias<"stos\t{%al, $dst|$dst, al}", (STOSB dstidx8:$dst), 0>; 3416def : InstAlias<"stos\t{%ax, $dst|$dst, ax}", (STOSW dstidx16:$dst), 0>; 3417def : InstAlias<"stos\t{%eax, $dst|$dst, eax}", (STOSL dstidx32:$dst), 0>; 3418def : InstAlias<"stos\t{%rax, $dst|$dst, rax}", (STOSQ dstidx64:$dst), 0>, Requires<[In64BitMode]>; 3419def : InstAlias<"stos\t$dst", (STOSB dstidx8:$dst), 0, "intel">; 3420def : InstAlias<"stos\t$dst", (STOSW dstidx16:$dst), 0, "intel">; 3421def : InstAlias<"stos\t$dst", (STOSL dstidx32:$dst), 0, "intel">; 3422def : InstAlias<"stos\t$dst", (STOSQ dstidx64:$dst), 0, "intel">, Requires<[In64BitMode]>; 3423 3424 3425// scas aliases. Accept the destination being omitted because it's implicit 3426// in the mnemonic, or the mnemonic suffix being omitted because it's implicit 3427// in the destination. 3428def : InstAlias<"scasb\t$dst", (SCASB dstidx8:$dst), 0>; 3429def : InstAlias<"scasw\t$dst", (SCASW dstidx16:$dst), 0>; 3430def : InstAlias<"scas{l|d}\t$dst", (SCASL dstidx32:$dst), 0>; 3431def : InstAlias<"scasq\t$dst", (SCASQ dstidx64:$dst), 0>, Requires<[In64BitMode]>; 3432def : InstAlias<"scas\t{$dst, %al|al, $dst}", (SCASB dstidx8:$dst), 0>; 3433def : InstAlias<"scas\t{$dst, %ax|ax, $dst}", (SCASW dstidx16:$dst), 0>; 3434def : InstAlias<"scas\t{$dst, %eax|eax, $dst}", (SCASL dstidx32:$dst), 0>; 3435def : InstAlias<"scas\t{$dst, %rax|rax, $dst}", (SCASQ dstidx64:$dst), 0>, Requires<[In64BitMode]>; 3436def : InstAlias<"scas\t$dst", (SCASB dstidx8:$dst), 0, "intel">; 3437def : InstAlias<"scas\t$dst", (SCASW dstidx16:$dst), 0, "intel">; 3438def : InstAlias<"scas\t$dst", (SCASL dstidx32:$dst), 0, "intel">; 3439def : InstAlias<"scas\t$dst", (SCASQ dstidx64:$dst), 0, "intel">, Requires<[In64BitMode]>; 3440 3441// cmps aliases. Mnemonic suffix being omitted because it's implicit 3442// in the destination. 3443def : InstAlias<"cmps\t{$dst, $src|$src, $dst}", (CMPSB dstidx8:$dst, srcidx8:$src), 0, "intel">; 3444def : InstAlias<"cmps\t{$dst, $src|$src, $dst}", (CMPSW dstidx16:$dst, srcidx16:$src), 0, "intel">; 3445def : InstAlias<"cmps\t{$dst, $src|$src, $dst}", (CMPSL dstidx32:$dst, srcidx32:$src), 0, "intel">; 3446def : InstAlias<"cmps\t{$dst, $src|$src, $dst}", (CMPSQ dstidx64:$dst, srcidx64:$src), 0, "intel">, Requires<[In64BitMode]>; 3447 3448// movs aliases. Mnemonic suffix being omitted because it's implicit 3449// in the destination. 3450def : InstAlias<"movs\t{$src, $dst|$dst, $src}", (MOVSB dstidx8:$dst, srcidx8:$src), 0, "intel">; 3451def : InstAlias<"movs\t{$src, $dst|$dst, $src}", (MOVSW dstidx16:$dst, srcidx16:$src), 0, "intel">; 3452def : InstAlias<"movs\t{$src, $dst|$dst, $src}", (MOVSL dstidx32:$dst, srcidx32:$src), 0, "intel">; 3453def : InstAlias<"movs\t{$src, $dst|$dst, $src}", (MOVSQ dstidx64:$dst, srcidx64:$src), 0, "intel">, Requires<[In64BitMode]>; 3454 3455// div and idiv aliases for explicit A register. 3456def : InstAlias<"div{b}\t{$src, %al|al, $src}", (DIV8r GR8 :$src)>; 3457def : InstAlias<"div{w}\t{$src, %ax|ax, $src}", (DIV16r GR16:$src)>; 3458def : InstAlias<"div{l}\t{$src, %eax|eax, $src}", (DIV32r GR32:$src)>; 3459def : InstAlias<"div{q}\t{$src, %rax|rax, $src}", (DIV64r GR64:$src)>; 3460def : InstAlias<"div{b}\t{$src, %al|al, $src}", (DIV8m i8mem :$src)>; 3461def : InstAlias<"div{w}\t{$src, %ax|ax, $src}", (DIV16m i16mem:$src)>; 3462def : InstAlias<"div{l}\t{$src, %eax|eax, $src}", (DIV32m i32mem:$src)>; 3463def : InstAlias<"div{q}\t{$src, %rax|rax, $src}", (DIV64m i64mem:$src)>; 3464def : InstAlias<"idiv{b}\t{$src, %al|al, $src}", (IDIV8r GR8 :$src)>; 3465def : InstAlias<"idiv{w}\t{$src, %ax|ax, $src}", (IDIV16r GR16:$src)>; 3466def : InstAlias<"idiv{l}\t{$src, %eax|eax, $src}", (IDIV32r GR32:$src)>; 3467def : InstAlias<"idiv{q}\t{$src, %rax|rax, $src}", (IDIV64r GR64:$src)>; 3468def : InstAlias<"idiv{b}\t{$src, %al|al, $src}", (IDIV8m i8mem :$src)>; 3469def : InstAlias<"idiv{w}\t{$src, %ax|ax, $src}", (IDIV16m i16mem:$src)>; 3470def : InstAlias<"idiv{l}\t{$src, %eax|eax, $src}", (IDIV32m i32mem:$src)>; 3471def : InstAlias<"idiv{q}\t{$src, %rax|rax, $src}", (IDIV64m i64mem:$src)>; 3472 3473 3474 3475// Various unary fpstack operations default to operating on ST1. 3476// For example, "fxch" -> "fxch %st(1)" 3477def : InstAlias<"faddp", (ADD_FPrST0 ST1), 0>; 3478def: InstAlias<"fadd", (ADD_FPrST0 ST1), 0>; 3479def : InstAlias<"fsub{|r}p", (SUBR_FPrST0 ST1), 0>; 3480def : InstAlias<"fsub{r|}p", (SUB_FPrST0 ST1), 0>; 3481def : InstAlias<"fmul", (MUL_FPrST0 ST1), 0>; 3482def : InstAlias<"fmulp", (MUL_FPrST0 ST1), 0>; 3483def : InstAlias<"fdiv{|r}p", (DIVR_FPrST0 ST1), 0>; 3484def : InstAlias<"fdiv{r|}p", (DIV_FPrST0 ST1), 0>; 3485def : InstAlias<"fxch", (XCH_F ST1), 0>; 3486def : InstAlias<"fcom", (COM_FST0r ST1), 0>; 3487def : InstAlias<"fcomp", (COMP_FST0r ST1), 0>; 3488def : InstAlias<"fcomi", (COM_FIr ST1), 0>; 3489def : InstAlias<"fcompi", (COM_FIPr ST1), 0>; 3490def : InstAlias<"fucom", (UCOM_Fr ST1), 0>; 3491def : InstAlias<"fucomp", (UCOM_FPr ST1), 0>; 3492def : InstAlias<"fucomi", (UCOM_FIr ST1), 0>; 3493def : InstAlias<"fucompi", (UCOM_FIPr ST1), 0>; 3494 3495// Handle fmul/fadd/fsub/fdiv instructions with explicitly written st(0) op. 3496// For example, "fadd %st(4), %st(0)" -> "fadd %st(4)". We also disambiguate 3497// instructions like "fadd %st(0), %st(0)" as "fadd %st(0)" for consistency with 3498// gas. 3499multiclass FpUnaryAlias<string Mnemonic, Instruction Inst, bit EmitAlias = 1> { 3500 def : InstAlias<!strconcat(Mnemonic, "\t$op"), 3501 (Inst RSTi:$op), EmitAlias>; 3502 def : InstAlias<!strconcat(Mnemonic, "\t{%st, %st|st, st}"), 3503 (Inst ST0), EmitAlias>; 3504} 3505 3506defm : FpUnaryAlias<"fadd", ADD_FST0r, 0>; 3507defm : FpUnaryAlias<"faddp", ADD_FPrST0, 0>; 3508defm : FpUnaryAlias<"fsub", SUB_FST0r, 0>; 3509defm : FpUnaryAlias<"fsub{|r}p", SUBR_FPrST0, 0>; 3510defm : FpUnaryAlias<"fsubr", SUBR_FST0r, 0>; 3511defm : FpUnaryAlias<"fsub{r|}p", SUB_FPrST0, 0>; 3512defm : FpUnaryAlias<"fmul", MUL_FST0r, 0>; 3513defm : FpUnaryAlias<"fmulp", MUL_FPrST0, 0>; 3514defm : FpUnaryAlias<"fdiv", DIV_FST0r, 0>; 3515defm : FpUnaryAlias<"fdiv{|r}p", DIVR_FPrST0, 0>; 3516defm : FpUnaryAlias<"fdivr", DIVR_FST0r, 0>; 3517defm : FpUnaryAlias<"fdiv{r|}p", DIV_FPrST0, 0>; 3518defm : FpUnaryAlias<"fcomi", COM_FIr, 0>; 3519defm : FpUnaryAlias<"fucomi", UCOM_FIr, 0>; 3520defm : FpUnaryAlias<"fcompi", COM_FIPr, 0>; 3521defm : FpUnaryAlias<"fucompi", UCOM_FIPr, 0>; 3522 3523 3524// Handle "f{mulp,addp} $op, %st(0)" the same as "f{mulp,addp} $op", since they 3525// commute. We also allow fdiv[r]p/fsubrp even though they don't commute, 3526// solely because gas supports it. 3527def : InstAlias<"faddp\t{$op, %st|st, $op}", (ADD_FPrST0 RSTi:$op), 0>; 3528def : InstAlias<"fmulp\t{$op, %st|st, $op}", (MUL_FPrST0 RSTi:$op), 0>; 3529def : InstAlias<"fsub{|r}p\t{$op, %st|st, $op}", (SUBR_FPrST0 RSTi:$op), 0>; 3530def : InstAlias<"fsub{r|}p\t{$op, %st|st, $op}", (SUB_FPrST0 RSTi:$op), 0>; 3531def : InstAlias<"fdiv{|r}p\t{$op, %st|st, $op}", (DIVR_FPrST0 RSTi:$op), 0>; 3532def : InstAlias<"fdiv{r|}p\t{$op, %st|st, $op}", (DIV_FPrST0 RSTi:$op), 0>; 3533 3534def : InstAlias<"fnstsw" , (FNSTSW16r), 0>; 3535 3536// lcall and ljmp aliases. This seems to be an odd mapping in 64-bit mode, but 3537// this is compatible with what GAS does. 3538def : InstAlias<"lcall\t$seg, $off", (FARCALL32i i32imm:$off, i16imm:$seg), 0>, Requires<[In32BitMode]>; 3539def : InstAlias<"ljmp\t$seg, $off", (FARJMP32i i32imm:$off, i16imm:$seg), 0>, Requires<[In32BitMode]>; 3540def : InstAlias<"lcall\t{*}$dst", (FARCALL32m opaquemem:$dst), 0>, Requires<[Not16BitMode]>; 3541def : InstAlias<"ljmp\t{*}$dst", (FARJMP32m opaquemem:$dst), 0>, Requires<[Not16BitMode]>; 3542def : InstAlias<"lcall\t$seg, $off", (FARCALL16i i16imm:$off, i16imm:$seg), 0>, Requires<[In16BitMode]>; 3543def : InstAlias<"ljmp\t$seg, $off", (FARJMP16i i16imm:$off, i16imm:$seg), 0>, Requires<[In16BitMode]>; 3544def : InstAlias<"lcall\t{*}$dst", (FARCALL16m opaquemem:$dst), 0>, Requires<[In16BitMode]>; 3545def : InstAlias<"ljmp\t{*}$dst", (FARJMP16m opaquemem:$dst), 0>, Requires<[In16BitMode]>; 3546 3547def : InstAlias<"jmp\t{*}$dst", (JMP64m i64mem:$dst), 0, "att">, Requires<[In64BitMode]>; 3548def : InstAlias<"jmp\t{*}$dst", (JMP32m i32mem:$dst), 0, "att">, Requires<[In32BitMode]>; 3549def : InstAlias<"jmp\t{*}$dst", (JMP16m i16mem:$dst), 0, "att">, Requires<[In16BitMode]>; 3550 3551 3552// "imul <imm>, B" is an alias for "imul <imm>, B, B". 3553def : InstAlias<"imul{w}\t{$imm, $r|$r, $imm}", (IMUL16rri GR16:$r, GR16:$r, i16imm:$imm), 0>; 3554def : InstAlias<"imul{w}\t{$imm, $r|$r, $imm}", (IMUL16rri8 GR16:$r, GR16:$r, i16i8imm:$imm), 0>; 3555def : InstAlias<"imul{l}\t{$imm, $r|$r, $imm}", (IMUL32rri GR32:$r, GR32:$r, i32imm:$imm), 0>; 3556def : InstAlias<"imul{l}\t{$imm, $r|$r, $imm}", (IMUL32rri8 GR32:$r, GR32:$r, i32i8imm:$imm), 0>; 3557def : InstAlias<"imul{q}\t{$imm, $r|$r, $imm}", (IMUL64rri32 GR64:$r, GR64:$r, i64i32imm:$imm), 0>; 3558def : InstAlias<"imul{q}\t{$imm, $r|$r, $imm}", (IMUL64rri8 GR64:$r, GR64:$r, i64i8imm:$imm), 0>; 3559 3560// ins aliases. Accept the mnemonic suffix being omitted because it's implicit 3561// in the destination. 3562def : InstAlias<"ins\t{%dx, $dst|$dst, dx}", (INSB dstidx8:$dst), 0, "intel">; 3563def : InstAlias<"ins\t{%dx, $dst|$dst, dx}", (INSW dstidx16:$dst), 0, "intel">; 3564def : InstAlias<"ins\t{%dx, $dst|$dst, dx}", (INSL dstidx32:$dst), 0, "intel">; 3565 3566// outs aliases. Accept the mnemonic suffix being omitted because it's implicit 3567// in the source. 3568def : InstAlias<"outs\t{$src, %dx|dx, $src}", (OUTSB srcidx8:$src), 0, "intel">; 3569def : InstAlias<"outs\t{$src, %dx|dx, $src}", (OUTSW srcidx16:$src), 0, "intel">; 3570def : InstAlias<"outs\t{$src, %dx|dx, $src}", (OUTSL srcidx32:$src), 0, "intel">; 3571 3572// inb %dx -> inb %al, %dx 3573def : InstAlias<"inb\t{%dx|dx}", (IN8rr), 0>; 3574def : InstAlias<"inw\t{%dx|dx}", (IN16rr), 0>; 3575def : InstAlias<"inl\t{%dx|dx}", (IN32rr), 0>; 3576def : InstAlias<"inb\t$port", (IN8ri u8imm:$port), 0>; 3577def : InstAlias<"inw\t$port", (IN16ri u8imm:$port), 0>; 3578def : InstAlias<"inl\t$port", (IN32ri u8imm:$port), 0>; 3579 3580 3581// jmp and call aliases for lcall and ljmp. jmp $42,$5 -> ljmp 3582def : InstAlias<"call\t$seg, $off", (FARCALL16i i16imm:$off, i16imm:$seg)>, Requires<[In16BitMode]>; 3583def : InstAlias<"jmp\t$seg, $off", (FARJMP16i i16imm:$off, i16imm:$seg)>, Requires<[In16BitMode]>; 3584def : InstAlias<"call\t$seg, $off", (FARCALL32i i32imm:$off, i16imm:$seg)>, Requires<[In32BitMode]>; 3585def : InstAlias<"jmp\t$seg, $off", (FARJMP32i i32imm:$off, i16imm:$seg)>, Requires<[In32BitMode]>; 3586def : InstAlias<"callw\t$seg, $off", (FARCALL16i i16imm:$off, i16imm:$seg)>, Requires<[Not64BitMode]>; 3587def : InstAlias<"jmpw\t$seg, $off", (FARJMP16i i16imm:$off, i16imm:$seg)>, Requires<[Not64BitMode]>; 3588def : InstAlias<"calll\t$seg, $off", (FARCALL32i i32imm:$off, i16imm:$seg)>, Requires<[Not64BitMode]>; 3589def : InstAlias<"jmpl\t$seg, $off", (FARJMP32i i32imm:$off, i16imm:$seg)>, Requires<[Not64BitMode]>; 3590 3591// Match 'movq <largeimm>, <reg>' as an alias for movabsq. 3592def : InstAlias<"mov{q}\t{$imm, $reg|$reg, $imm}", (MOV64ri GR64:$reg, i64imm:$imm), 0>; 3593 3594// Match 'movd GR64, MMX' as an alias for movq to be compatible with gas, 3595// which supports this due to an old AMD documentation bug when 64-bit mode was 3596// created. 3597def : InstAlias<"movd\t{$src, $dst|$dst, $src}", 3598 (MMX_MOVD64to64rr VR64:$dst, GR64:$src), 0>; 3599def : InstAlias<"movd\t{$src, $dst|$dst, $src}", 3600 (MMX_MOVD64from64rr GR64:$dst, VR64:$src), 0>; 3601 3602// movsx aliases 3603def : InstAlias<"movsx\t{$src, $dst|$dst, $src}", (MOVSX16rr8 GR16:$dst, GR8:$src), 0, "att">; 3604def : InstAlias<"movsx\t{$src, $dst|$dst, $src}", (MOVSX16rm8 GR16:$dst, i8mem:$src), 0, "att">; 3605def : InstAlias<"movsx\t{$src, $dst|$dst, $src}", (MOVSX32rr8 GR32:$dst, GR8:$src), 0, "att">; 3606def : InstAlias<"movsx\t{$src, $dst|$dst, $src}", (MOVSX32rr16 GR32:$dst, GR16:$src), 0, "att">; 3607def : InstAlias<"movsx\t{$src, $dst|$dst, $src}", (MOVSX64rr8 GR64:$dst, GR8:$src), 0, "att">; 3608def : InstAlias<"movsx\t{$src, $dst|$dst, $src}", (MOVSX64rr16 GR64:$dst, GR16:$src), 0, "att">; 3609def : InstAlias<"movsx\t{$src, $dst|$dst, $src}", (MOVSX64rr32 GR64:$dst, GR32:$src), 0, "att">; 3610 3611// movzx aliases 3612def : InstAlias<"movzx\t{$src, $dst|$dst, $src}", (MOVZX16rr8 GR16:$dst, GR8:$src), 0, "att">; 3613def : InstAlias<"movzx\t{$src, $dst|$dst, $src}", (MOVZX16rm8 GR16:$dst, i8mem:$src), 0, "att">; 3614def : InstAlias<"movzx\t{$src, $dst|$dst, $src}", (MOVZX32rr8 GR32:$dst, GR8:$src), 0, "att">; 3615def : InstAlias<"movzx\t{$src, $dst|$dst, $src}", (MOVZX32rr16 GR32:$dst, GR16:$src), 0, "att">; 3616def : InstAlias<"movzx\t{$src, $dst|$dst, $src}", (MOVZX64rr8 GR64:$dst, GR8:$src), 0, "att">; 3617def : InstAlias<"movzx\t{$src, $dst|$dst, $src}", (MOVZX64rr16 GR64:$dst, GR16:$src), 0, "att">; 3618// Note: No GR32->GR64 movzx form. 3619 3620// outb %dx -> outb %al, %dx 3621def : InstAlias<"outb\t{%dx|dx}", (OUT8rr), 0>; 3622def : InstAlias<"outw\t{%dx|dx}", (OUT16rr), 0>; 3623def : InstAlias<"outl\t{%dx|dx}", (OUT32rr), 0>; 3624def : InstAlias<"outb\t$port", (OUT8ir u8imm:$port), 0>; 3625def : InstAlias<"outw\t$port", (OUT16ir u8imm:$port), 0>; 3626def : InstAlias<"outl\t$port", (OUT32ir u8imm:$port), 0>; 3627 3628// 'sldt <mem>' can be encoded with either sldtw or sldtq with the same 3629// effect (both store to a 16-bit mem). Force to sldtw to avoid ambiguity 3630// errors, since its encoding is the most compact. 3631def : InstAlias<"sldt $mem", (SLDT16m i16mem:$mem), 0>; 3632 3633// shld/shrd op,op -> shld op, op, CL 3634def : InstAlias<"shld{w}\t{$r2, $r1|$r1, $r2}", (SHLD16rrCL GR16:$r1, GR16:$r2), 0>; 3635def : InstAlias<"shld{l}\t{$r2, $r1|$r1, $r2}", (SHLD32rrCL GR32:$r1, GR32:$r2), 0>; 3636def : InstAlias<"shld{q}\t{$r2, $r1|$r1, $r2}", (SHLD64rrCL GR64:$r1, GR64:$r2), 0>; 3637def : InstAlias<"shrd{w}\t{$r2, $r1|$r1, $r2}", (SHRD16rrCL GR16:$r1, GR16:$r2), 0>; 3638def : InstAlias<"shrd{l}\t{$r2, $r1|$r1, $r2}", (SHRD32rrCL GR32:$r1, GR32:$r2), 0>; 3639def : InstAlias<"shrd{q}\t{$r2, $r1|$r1, $r2}", (SHRD64rrCL GR64:$r1, GR64:$r2), 0>; 3640 3641def : InstAlias<"shld{w}\t{$reg, $mem|$mem, $reg}", (SHLD16mrCL i16mem:$mem, GR16:$reg), 0>; 3642def : InstAlias<"shld{l}\t{$reg, $mem|$mem, $reg}", (SHLD32mrCL i32mem:$mem, GR32:$reg), 0>; 3643def : InstAlias<"shld{q}\t{$reg, $mem|$mem, $reg}", (SHLD64mrCL i64mem:$mem, GR64:$reg), 0>; 3644def : InstAlias<"shrd{w}\t{$reg, $mem|$mem, $reg}", (SHRD16mrCL i16mem:$mem, GR16:$reg), 0>; 3645def : InstAlias<"shrd{l}\t{$reg, $mem|$mem, $reg}", (SHRD32mrCL i32mem:$mem, GR32:$reg), 0>; 3646def : InstAlias<"shrd{q}\t{$reg, $mem|$mem, $reg}", (SHRD64mrCL i64mem:$mem, GR64:$reg), 0>; 3647 3648/* FIXME: This is disabled because the asm matcher is currently incapable of 3649 * matching a fixed immediate like $1. 3650// "shl X, $1" is an alias for "shl X". 3651multiclass ShiftRotateByOneAlias<string Mnemonic, string Opc> { 3652 def : InstAlias<!strconcat(Mnemonic, "b $op, $$1"), 3653 (!cast<Instruction>(!strconcat(Opc, "8r1")) GR8:$op)>; 3654 def : InstAlias<!strconcat(Mnemonic, "w $op, $$1"), 3655 (!cast<Instruction>(!strconcat(Opc, "16r1")) GR16:$op)>; 3656 def : InstAlias<!strconcat(Mnemonic, "l $op, $$1"), 3657 (!cast<Instruction>(!strconcat(Opc, "32r1")) GR32:$op)>; 3658 def : InstAlias<!strconcat(Mnemonic, "q $op, $$1"), 3659 (!cast<Instruction>(!strconcat(Opc, "64r1")) GR64:$op)>; 3660 def : InstAlias<!strconcat(Mnemonic, "b $op, $$1"), 3661 (!cast<Instruction>(!strconcat(Opc, "8m1")) i8mem:$op)>; 3662 def : InstAlias<!strconcat(Mnemonic, "w $op, $$1"), 3663 (!cast<Instruction>(!strconcat(Opc, "16m1")) i16mem:$op)>; 3664 def : InstAlias<!strconcat(Mnemonic, "l $op, $$1"), 3665 (!cast<Instruction>(!strconcat(Opc, "32m1")) i32mem:$op)>; 3666 def : InstAlias<!strconcat(Mnemonic, "q $op, $$1"), 3667 (!cast<Instruction>(!strconcat(Opc, "64m1")) i64mem:$op)>; 3668} 3669 3670defm : ShiftRotateByOneAlias<"rcl", "RCL">; 3671defm : ShiftRotateByOneAlias<"rcr", "RCR">; 3672defm : ShiftRotateByOneAlias<"rol", "ROL">; 3673defm : ShiftRotateByOneAlias<"ror", "ROR">; 3674FIXME */ 3675 3676// test: We accept "testX <reg>, <mem>" and "testX <mem>, <reg>" as synonyms. 3677def : InstAlias<"test{b}\t{$mem, $val|$val, $mem}", 3678 (TEST8mr i8mem :$mem, GR8 :$val), 0>; 3679def : InstAlias<"test{w}\t{$mem, $val|$val, $mem}", 3680 (TEST16mr i16mem:$mem, GR16:$val), 0>; 3681def : InstAlias<"test{l}\t{$mem, $val|$val, $mem}", 3682 (TEST32mr i32mem:$mem, GR32:$val), 0>; 3683def : InstAlias<"test{q}\t{$mem, $val|$val, $mem}", 3684 (TEST64mr i64mem:$mem, GR64:$val), 0>; 3685 3686// xchg: We accept "xchgX <reg>, <mem>" and "xchgX <mem>, <reg>" as synonyms. 3687def : InstAlias<"xchg{b}\t{$mem, $val|$val, $mem}", 3688 (XCHG8rm GR8 :$val, i8mem :$mem), 0>; 3689def : InstAlias<"xchg{w}\t{$mem, $val|$val, $mem}", 3690 (XCHG16rm GR16:$val, i16mem:$mem), 0>; 3691def : InstAlias<"xchg{l}\t{$mem, $val|$val, $mem}", 3692 (XCHG32rm GR32:$val, i32mem:$mem), 0>; 3693def : InstAlias<"xchg{q}\t{$mem, $val|$val, $mem}", 3694 (XCHG64rm GR64:$val, i64mem:$mem), 0>; 3695 3696// xchg: We accept "xchgX <reg>, %eax" and "xchgX %eax, <reg>" as synonyms. 3697def : InstAlias<"xchg{w}\t{%ax, $src|$src, ax}", (XCHG16ar GR16:$src), 0>; 3698def : InstAlias<"xchg{l}\t{%eax, $src|$src, eax}", (XCHG32ar GR32:$src), 0>; 3699def : InstAlias<"xchg{q}\t{%rax, $src|$src, rax}", (XCHG64ar GR64:$src), 0>; 3700 3701// In 64-bit mode, xchg %eax, %eax can't be encoded with the 0x90 opcode we 3702// would get by default because it's defined as NOP. But xchg %eax, %eax implies 3703// implicit zeroing of the upper 32 bits. So alias to the longer encoding. 3704def : InstAlias<"xchg{l}\t{%eax, %eax|eax, eax}", 3705 (XCHG32rr EAX, EAX), 0>, Requires<[In64BitMode]>; 3706 3707// xchg %rax, %rax is a nop in x86-64 and can be encoded as such. Without this 3708// we emit an unneeded REX.w prefix. 3709def : InstAlias<"xchg{q}\t{%rax, %rax|rax, rax}", (NOOP), 0>; 3710 3711// These aliases exist to get the parser to prioritize matching 8-bit 3712// immediate encodings over matching the implicit ax/eax/rax encodings. By 3713// explicitly mentioning the A register here, these entries will be ordered 3714// first due to the more explicit immediate type. 3715def : InstAlias<"adc{w}\t{$imm, %ax|ax, $imm}", (ADC16ri8 AX, i16i8imm:$imm), 0>; 3716def : InstAlias<"add{w}\t{$imm, %ax|ax, $imm}", (ADD16ri8 AX, i16i8imm:$imm), 0>; 3717def : InstAlias<"and{w}\t{$imm, %ax|ax, $imm}", (AND16ri8 AX, i16i8imm:$imm), 0>; 3718def : InstAlias<"cmp{w}\t{$imm, %ax|ax, $imm}", (CMP16ri8 AX, i16i8imm:$imm), 0>; 3719def : InstAlias<"or{w}\t{$imm, %ax|ax, $imm}", (OR16ri8 AX, i16i8imm:$imm), 0>; 3720def : InstAlias<"sbb{w}\t{$imm, %ax|ax, $imm}", (SBB16ri8 AX, i16i8imm:$imm), 0>; 3721def : InstAlias<"sub{w}\t{$imm, %ax|ax, $imm}", (SUB16ri8 AX, i16i8imm:$imm), 0>; 3722def : InstAlias<"xor{w}\t{$imm, %ax|ax, $imm}", (XOR16ri8 AX, i16i8imm:$imm), 0>; 3723 3724def : InstAlias<"adc{l}\t{$imm, %eax|eax, $imm}", (ADC32ri8 EAX, i32i8imm:$imm), 0>; 3725def : InstAlias<"add{l}\t{$imm, %eax|eax, $imm}", (ADD32ri8 EAX, i32i8imm:$imm), 0>; 3726def : InstAlias<"and{l}\t{$imm, %eax|eax, $imm}", (AND32ri8 EAX, i32i8imm:$imm), 0>; 3727def : InstAlias<"cmp{l}\t{$imm, %eax|eax, $imm}", (CMP32ri8 EAX, i32i8imm:$imm), 0>; 3728def : InstAlias<"or{l}\t{$imm, %eax|eax, $imm}", (OR32ri8 EAX, i32i8imm:$imm), 0>; 3729def : InstAlias<"sbb{l}\t{$imm, %eax|eax, $imm}", (SBB32ri8 EAX, i32i8imm:$imm), 0>; 3730def : InstAlias<"sub{l}\t{$imm, %eax|eax, $imm}", (SUB32ri8 EAX, i32i8imm:$imm), 0>; 3731def : InstAlias<"xor{l}\t{$imm, %eax|eax, $imm}", (XOR32ri8 EAX, i32i8imm:$imm), 0>; 3732 3733def : InstAlias<"adc{q}\t{$imm, %rax|rax, $imm}", (ADC64ri8 RAX, i64i8imm:$imm), 0>; 3734def : InstAlias<"add{q}\t{$imm, %rax|rax, $imm}", (ADD64ri8 RAX, i64i8imm:$imm), 0>; 3735def : InstAlias<"and{q}\t{$imm, %rax|rax, $imm}", (AND64ri8 RAX, i64i8imm:$imm), 0>; 3736def : InstAlias<"cmp{q}\t{$imm, %rax|rax, $imm}", (CMP64ri8 RAX, i64i8imm:$imm), 0>; 3737def : InstAlias<"or{q}\t{$imm, %rax|rax, $imm}", (OR64ri8 RAX, i64i8imm:$imm), 0>; 3738def : InstAlias<"sbb{q}\t{$imm, %rax|rax, $imm}", (SBB64ri8 RAX, i64i8imm:$imm), 0>; 3739def : InstAlias<"sub{q}\t{$imm, %rax|rax, $imm}", (SUB64ri8 RAX, i64i8imm:$imm), 0>; 3740def : InstAlias<"xor{q}\t{$imm, %rax|rax, $imm}", (XOR64ri8 RAX, i64i8imm:$imm), 0>; 3741