1 //===-- IntrinsicInst.cpp - Intrinsic Instruction Wrappers ---------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file implements methods that make it really easy to deal with intrinsic 10 // functions. 11 // 12 // All intrinsic function calls are instances of the call instruction, so these 13 // are all subclasses of the CallInst class. Note that none of these classes 14 // has state or virtual methods, which is an important part of this gross/neat 15 // hack working. 16 // 17 // In some cases, arguments to intrinsics need to be generic and are defined as 18 // type pointer to empty struct { }*. To access the real item of interest the 19 // cast instruction needs to be stripped away. 20 // 21 //===----------------------------------------------------------------------===// 22 23 #include "llvm/IR/IntrinsicInst.h" 24 #include "llvm/ADT/StringSwitch.h" 25 #include "llvm/IR/Constants.h" 26 #include "llvm/IR/DebugInfoMetadata.h" 27 #include "llvm/IR/Metadata.h" 28 #include "llvm/IR/Module.h" 29 #include "llvm/IR/Operator.h" 30 #include "llvm/IR/PatternMatch.h" 31 #include "llvm/IR/Statepoint.h" 32 #include <optional> 33 34 using namespace llvm; 35 36 bool IntrinsicInst::mayLowerToFunctionCall(Intrinsic::ID IID) { 37 switch (IID) { 38 case Intrinsic::objc_autorelease: 39 case Intrinsic::objc_autoreleasePoolPop: 40 case Intrinsic::objc_autoreleasePoolPush: 41 case Intrinsic::objc_autoreleaseReturnValue: 42 case Intrinsic::objc_copyWeak: 43 case Intrinsic::objc_destroyWeak: 44 case Intrinsic::objc_initWeak: 45 case Intrinsic::objc_loadWeak: 46 case Intrinsic::objc_loadWeakRetained: 47 case Intrinsic::objc_moveWeak: 48 case Intrinsic::objc_release: 49 case Intrinsic::objc_retain: 50 case Intrinsic::objc_retainAutorelease: 51 case Intrinsic::objc_retainAutoreleaseReturnValue: 52 case Intrinsic::objc_retainAutoreleasedReturnValue: 53 case Intrinsic::objc_retainBlock: 54 case Intrinsic::objc_storeStrong: 55 case Intrinsic::objc_storeWeak: 56 case Intrinsic::objc_unsafeClaimAutoreleasedReturnValue: 57 case Intrinsic::objc_retainedObject: 58 case Intrinsic::objc_unretainedObject: 59 case Intrinsic::objc_unretainedPointer: 60 case Intrinsic::objc_retain_autorelease: 61 case Intrinsic::objc_sync_enter: 62 case Intrinsic::objc_sync_exit: 63 return true; 64 default: 65 return false; 66 } 67 } 68 69 //===----------------------------------------------------------------------===// 70 /// DbgVariableIntrinsic - This is the common base class for debug info 71 /// intrinsics for variables. 72 /// 73 74 iterator_range<location_op_iterator> RawLocationWrapper::location_ops() const { 75 Metadata *MD = getRawLocation(); 76 assert(MD && "First operand of DbgVariableIntrinsic should be non-null."); 77 // If operand is ValueAsMetadata, return a range over just that operand. 78 if (auto *VAM = dyn_cast<ValueAsMetadata>(MD)) { 79 return {location_op_iterator(VAM), location_op_iterator(VAM + 1)}; 80 } 81 // If operand is DIArgList, return a range over its args. 82 if (auto *AL = dyn_cast<DIArgList>(MD)) 83 return {location_op_iterator(AL->args_begin()), 84 location_op_iterator(AL->args_end())}; 85 // Operand must be an empty metadata tuple, so return empty iterator. 86 return {location_op_iterator(static_cast<ValueAsMetadata *>(nullptr)), 87 location_op_iterator(static_cast<ValueAsMetadata *>(nullptr))}; 88 } 89 90 iterator_range<location_op_iterator> 91 DbgVariableIntrinsic::location_ops() const { 92 return getWrappedLocation().location_ops(); 93 } 94 95 Value *DbgVariableIntrinsic::getVariableLocationOp(unsigned OpIdx) const { 96 return getWrappedLocation().getVariableLocationOp(OpIdx); 97 } 98 99 Value *RawLocationWrapper::getVariableLocationOp(unsigned OpIdx) const { 100 Metadata *MD = getRawLocation(); 101 assert(MD && "First operand of DbgVariableIntrinsic should be non-null."); 102 if (auto *AL = dyn_cast<DIArgList>(MD)) 103 return AL->getArgs()[OpIdx]->getValue(); 104 if (isa<MDNode>(MD)) 105 return nullptr; 106 assert( 107 isa<ValueAsMetadata>(MD) && 108 "Attempted to get location operand from DbgVariableIntrinsic with none."); 109 auto *V = cast<ValueAsMetadata>(MD); 110 assert(OpIdx == 0 && "Operand Index must be 0 for a debug intrinsic with a " 111 "single location operand."); 112 return V->getValue(); 113 } 114 115 static ValueAsMetadata *getAsMetadata(Value *V) { 116 return isa<MetadataAsValue>(V) ? dyn_cast<ValueAsMetadata>( 117 cast<MetadataAsValue>(V)->getMetadata()) 118 : ValueAsMetadata::get(V); 119 } 120 121 void DbgVariableIntrinsic::replaceVariableLocationOp(Value *OldValue, 122 Value *NewValue, 123 bool AllowEmpty) { 124 // If OldValue is used as the address part of a dbg.assign intrinsic replace 125 // it with NewValue and return true. 126 auto ReplaceDbgAssignAddress = [this, OldValue, NewValue]() -> bool { 127 auto *DAI = dyn_cast<DbgAssignIntrinsic>(this); 128 if (!DAI || OldValue != DAI->getAddress()) 129 return false; 130 DAI->setAddress(NewValue); 131 return true; 132 }; 133 bool DbgAssignAddrReplaced = ReplaceDbgAssignAddress(); 134 (void)DbgAssignAddrReplaced; 135 136 assert(NewValue && "Values must be non-null"); 137 auto Locations = location_ops(); 138 auto OldIt = find(Locations, OldValue); 139 if (OldIt == Locations.end()) { 140 if (AllowEmpty || DbgAssignAddrReplaced) 141 return; 142 assert(DbgAssignAddrReplaced && 143 "OldValue must be dbg.assign addr if unused in DIArgList"); 144 return; 145 } 146 147 assert(OldIt != Locations.end() && "OldValue must be a current location"); 148 if (!hasArgList()) { 149 Value *NewOperand = isa<MetadataAsValue>(NewValue) 150 ? NewValue 151 : MetadataAsValue::get( 152 getContext(), ValueAsMetadata::get(NewValue)); 153 return setArgOperand(0, NewOperand); 154 } 155 SmallVector<ValueAsMetadata *, 4> MDs; 156 ValueAsMetadata *NewOperand = getAsMetadata(NewValue); 157 for (auto *VMD : Locations) 158 MDs.push_back(VMD == *OldIt ? NewOperand : getAsMetadata(VMD)); 159 setArgOperand( 160 0, MetadataAsValue::get(getContext(), DIArgList::get(getContext(), MDs))); 161 } 162 void DbgVariableIntrinsic::replaceVariableLocationOp(unsigned OpIdx, 163 Value *NewValue) { 164 assert(OpIdx < getNumVariableLocationOps() && "Invalid Operand Index"); 165 if (!hasArgList()) { 166 Value *NewOperand = isa<MetadataAsValue>(NewValue) 167 ? NewValue 168 : MetadataAsValue::get( 169 getContext(), ValueAsMetadata::get(NewValue)); 170 return setArgOperand(0, NewOperand); 171 } 172 SmallVector<ValueAsMetadata *, 4> MDs; 173 ValueAsMetadata *NewOperand = getAsMetadata(NewValue); 174 for (unsigned Idx = 0; Idx < getNumVariableLocationOps(); ++Idx) 175 MDs.push_back(Idx == OpIdx ? NewOperand 176 : getAsMetadata(getVariableLocationOp(Idx))); 177 setArgOperand( 178 0, MetadataAsValue::get(getContext(), DIArgList::get(getContext(), MDs))); 179 } 180 181 void DbgVariableIntrinsic::addVariableLocationOps(ArrayRef<Value *> NewValues, 182 DIExpression *NewExpr) { 183 assert(NewExpr->hasAllLocationOps(getNumVariableLocationOps() + 184 NewValues.size()) && 185 "NewExpr for debug variable intrinsic does not reference every " 186 "location operand."); 187 assert(!is_contained(NewValues, nullptr) && "New values must be non-null"); 188 setArgOperand(2, MetadataAsValue::get(getContext(), NewExpr)); 189 SmallVector<ValueAsMetadata *, 4> MDs; 190 for (auto *VMD : location_ops()) 191 MDs.push_back(getAsMetadata(VMD)); 192 for (auto *VMD : NewValues) 193 MDs.push_back(getAsMetadata(VMD)); 194 setArgOperand( 195 0, MetadataAsValue::get(getContext(), DIArgList::get(getContext(), MDs))); 196 } 197 198 std::optional<uint64_t> DbgVariableIntrinsic::getFragmentSizeInBits() const { 199 if (auto Fragment = getExpression()->getFragmentInfo()) 200 return Fragment->SizeInBits; 201 return getVariable()->getSizeInBits(); 202 } 203 204 Value *DbgAssignIntrinsic::getAddress() const { 205 auto *MD = getRawAddress(); 206 if (auto *V = dyn_cast<ValueAsMetadata>(MD)) 207 return V->getValue(); 208 209 // When the value goes to null, it gets replaced by an empty MDNode. 210 assert(!cast<MDNode>(MD)->getNumOperands() && "Expected an empty MDNode"); 211 return nullptr; 212 } 213 214 void DbgAssignIntrinsic::setAssignId(DIAssignID *New) { 215 setOperand(OpAssignID, MetadataAsValue::get(getContext(), New)); 216 } 217 218 void DbgAssignIntrinsic::setAddress(Value *V) { 219 setOperand(OpAddress, 220 MetadataAsValue::get(getContext(), ValueAsMetadata::get(V))); 221 } 222 223 void DbgAssignIntrinsic::setKillAddress() { 224 if (isKillAddress()) 225 return; 226 setAddress(UndefValue::get(getAddress()->getType())); 227 } 228 229 bool DbgAssignIntrinsic::isKillAddress() const { 230 Value *Addr = getAddress(); 231 return !Addr || isa<UndefValue>(Addr); 232 } 233 234 void DbgAssignIntrinsic::setValue(Value *V) { 235 setOperand(OpValue, 236 MetadataAsValue::get(getContext(), ValueAsMetadata::get(V))); 237 } 238 239 int llvm::Intrinsic::lookupLLVMIntrinsicByName(ArrayRef<const char *> NameTable, 240 StringRef Name) { 241 assert(Name.starts_with("llvm.") && "Unexpected intrinsic prefix"); 242 243 // Do successive binary searches of the dotted name components. For 244 // "llvm.gc.experimental.statepoint.p1i8.p1i32", we will find the range of 245 // intrinsics starting with "llvm.gc", then "llvm.gc.experimental", then 246 // "llvm.gc.experimental.statepoint", and then we will stop as the range is 247 // size 1. During the search, we can skip the prefix that we already know is 248 // identical. By using strncmp we consider names with differing suffixes to 249 // be part of the equal range. 250 size_t CmpEnd = 4; // Skip the "llvm" component. 251 const char *const *Low = NameTable.begin(); 252 const char *const *High = NameTable.end(); 253 const char *const *LastLow = Low; 254 while (CmpEnd < Name.size() && High - Low > 0) { 255 size_t CmpStart = CmpEnd; 256 CmpEnd = Name.find('.', CmpStart + 1); 257 CmpEnd = CmpEnd == StringRef::npos ? Name.size() : CmpEnd; 258 auto Cmp = [CmpStart, CmpEnd](const char *LHS, const char *RHS) { 259 return strncmp(LHS + CmpStart, RHS + CmpStart, CmpEnd - CmpStart) < 0; 260 }; 261 LastLow = Low; 262 std::tie(Low, High) = std::equal_range(Low, High, Name.data(), Cmp); 263 } 264 if (High - Low > 0) 265 LastLow = Low; 266 267 if (LastLow == NameTable.end()) 268 return -1; 269 StringRef NameFound = *LastLow; 270 if (Name == NameFound || 271 (Name.starts_with(NameFound) && Name[NameFound.size()] == '.')) 272 return LastLow - NameTable.begin(); 273 return -1; 274 } 275 276 ConstantInt *InstrProfCntrInstBase::getNumCounters() const { 277 if (InstrProfValueProfileInst::classof(this)) 278 llvm_unreachable("InstrProfValueProfileInst does not have counters!"); 279 return cast<ConstantInt>(const_cast<Value *>(getArgOperand(2))); 280 } 281 282 ConstantInt *InstrProfCntrInstBase::getIndex() const { 283 if (InstrProfValueProfileInst::classof(this)) 284 llvm_unreachable("Please use InstrProfValueProfileInst::getIndex()"); 285 return cast<ConstantInt>(const_cast<Value *>(getArgOperand(3))); 286 } 287 288 Value *InstrProfIncrementInst::getStep() const { 289 if (InstrProfIncrementInstStep::classof(this)) { 290 return const_cast<Value *>(getArgOperand(4)); 291 } 292 const Module *M = getModule(); 293 LLVMContext &Context = M->getContext(); 294 return ConstantInt::get(Type::getInt64Ty(Context), 1); 295 } 296 297 Value *InstrProfCallsite::getCallee() const { 298 if (isa<InstrProfCallsite>(this)) 299 return getArgOperand(4); 300 return nullptr; 301 } 302 303 std::optional<RoundingMode> ConstrainedFPIntrinsic::getRoundingMode() const { 304 unsigned NumOperands = arg_size(); 305 Metadata *MD = nullptr; 306 auto *MAV = dyn_cast<MetadataAsValue>(getArgOperand(NumOperands - 2)); 307 if (MAV) 308 MD = MAV->getMetadata(); 309 if (!MD || !isa<MDString>(MD)) 310 return std::nullopt; 311 return convertStrToRoundingMode(cast<MDString>(MD)->getString()); 312 } 313 314 std::optional<fp::ExceptionBehavior> 315 ConstrainedFPIntrinsic::getExceptionBehavior() const { 316 unsigned NumOperands = arg_size(); 317 Metadata *MD = nullptr; 318 auto *MAV = dyn_cast<MetadataAsValue>(getArgOperand(NumOperands - 1)); 319 if (MAV) 320 MD = MAV->getMetadata(); 321 if (!MD || !isa<MDString>(MD)) 322 return std::nullopt; 323 return convertStrToExceptionBehavior(cast<MDString>(MD)->getString()); 324 } 325 326 bool ConstrainedFPIntrinsic::isDefaultFPEnvironment() const { 327 std::optional<fp::ExceptionBehavior> Except = getExceptionBehavior(); 328 if (Except) { 329 if (*Except != fp::ebIgnore) 330 return false; 331 } 332 333 std::optional<RoundingMode> Rounding = getRoundingMode(); 334 if (Rounding) { 335 if (*Rounding != RoundingMode::NearestTiesToEven) 336 return false; 337 } 338 339 return true; 340 } 341 342 static FCmpInst::Predicate getFPPredicateFromMD(const Value *Op) { 343 Metadata *MD = cast<MetadataAsValue>(Op)->getMetadata(); 344 if (!MD || !isa<MDString>(MD)) 345 return FCmpInst::BAD_FCMP_PREDICATE; 346 return StringSwitch<FCmpInst::Predicate>(cast<MDString>(MD)->getString()) 347 .Case("oeq", FCmpInst::FCMP_OEQ) 348 .Case("ogt", FCmpInst::FCMP_OGT) 349 .Case("oge", FCmpInst::FCMP_OGE) 350 .Case("olt", FCmpInst::FCMP_OLT) 351 .Case("ole", FCmpInst::FCMP_OLE) 352 .Case("one", FCmpInst::FCMP_ONE) 353 .Case("ord", FCmpInst::FCMP_ORD) 354 .Case("uno", FCmpInst::FCMP_UNO) 355 .Case("ueq", FCmpInst::FCMP_UEQ) 356 .Case("ugt", FCmpInst::FCMP_UGT) 357 .Case("uge", FCmpInst::FCMP_UGE) 358 .Case("ult", FCmpInst::FCMP_ULT) 359 .Case("ule", FCmpInst::FCMP_ULE) 360 .Case("une", FCmpInst::FCMP_UNE) 361 .Default(FCmpInst::BAD_FCMP_PREDICATE); 362 } 363 364 FCmpInst::Predicate ConstrainedFPCmpIntrinsic::getPredicate() const { 365 return getFPPredicateFromMD(getArgOperand(2)); 366 } 367 368 unsigned ConstrainedFPIntrinsic::getNonMetadataArgCount() const { 369 // All constrained fp intrinsics have "fpexcept" metadata. 370 unsigned NumArgs = arg_size() - 1; 371 372 // Some intrinsics have "round" metadata. 373 if (Intrinsic::hasConstrainedFPRoundingModeOperand(getIntrinsicID())) 374 NumArgs -= 1; 375 376 // Compare intrinsics take their predicate as metadata. 377 if (isa<ConstrainedFPCmpIntrinsic>(this)) 378 NumArgs -= 1; 379 380 return NumArgs; 381 } 382 383 bool ConstrainedFPIntrinsic::classof(const IntrinsicInst *I) { 384 return Intrinsic::isConstrainedFPIntrinsic(I->getIntrinsicID()); 385 } 386 387 ElementCount VPIntrinsic::getStaticVectorLength() const { 388 auto GetVectorLengthOfType = [](const Type *T) -> ElementCount { 389 const auto *VT = cast<VectorType>(T); 390 auto ElemCount = VT->getElementCount(); 391 return ElemCount; 392 }; 393 394 Value *VPMask = getMaskParam(); 395 if (!VPMask) { 396 assert((getIntrinsicID() == Intrinsic::vp_merge || 397 getIntrinsicID() == Intrinsic::vp_select) && 398 "Unexpected VP intrinsic without mask operand"); 399 return GetVectorLengthOfType(getType()); 400 } 401 return GetVectorLengthOfType(VPMask->getType()); 402 } 403 404 Value *VPIntrinsic::getMaskParam() const { 405 if (auto MaskPos = getMaskParamPos(getIntrinsicID())) 406 return getArgOperand(*MaskPos); 407 return nullptr; 408 } 409 410 void VPIntrinsic::setMaskParam(Value *NewMask) { 411 auto MaskPos = getMaskParamPos(getIntrinsicID()); 412 setArgOperand(*MaskPos, NewMask); 413 } 414 415 Value *VPIntrinsic::getVectorLengthParam() const { 416 if (auto EVLPos = getVectorLengthParamPos(getIntrinsicID())) 417 return getArgOperand(*EVLPos); 418 return nullptr; 419 } 420 421 void VPIntrinsic::setVectorLengthParam(Value *NewEVL) { 422 auto EVLPos = getVectorLengthParamPos(getIntrinsicID()); 423 setArgOperand(*EVLPos, NewEVL); 424 } 425 426 std::optional<unsigned> 427 VPIntrinsic::getMaskParamPos(Intrinsic::ID IntrinsicID) { 428 switch (IntrinsicID) { 429 default: 430 return std::nullopt; 431 432 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS) \ 433 case Intrinsic::VPID: \ 434 return MASKPOS; 435 #include "llvm/IR/VPIntrinsics.def" 436 } 437 } 438 439 std::optional<unsigned> 440 VPIntrinsic::getVectorLengthParamPos(Intrinsic::ID IntrinsicID) { 441 switch (IntrinsicID) { 442 default: 443 return std::nullopt; 444 445 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS) \ 446 case Intrinsic::VPID: \ 447 return VLENPOS; 448 #include "llvm/IR/VPIntrinsics.def" 449 } 450 } 451 452 /// \return the alignment of the pointer used by this load/store/gather or 453 /// scatter. 454 MaybeAlign VPIntrinsic::getPointerAlignment() const { 455 std::optional<unsigned> PtrParamOpt = 456 getMemoryPointerParamPos(getIntrinsicID()); 457 assert(PtrParamOpt && "no pointer argument!"); 458 return getParamAlign(*PtrParamOpt); 459 } 460 461 /// \return The pointer operand of this load,store, gather or scatter. 462 Value *VPIntrinsic::getMemoryPointerParam() const { 463 if (auto PtrParamOpt = getMemoryPointerParamPos(getIntrinsicID())) 464 return getArgOperand(*PtrParamOpt); 465 return nullptr; 466 } 467 468 std::optional<unsigned> 469 VPIntrinsic::getMemoryPointerParamPos(Intrinsic::ID VPID) { 470 switch (VPID) { 471 default: 472 break; 473 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID: 474 #define VP_PROPERTY_MEMOP(POINTERPOS, ...) return POINTERPOS; 475 #define END_REGISTER_VP_INTRINSIC(VPID) break; 476 #include "llvm/IR/VPIntrinsics.def" 477 } 478 return std::nullopt; 479 } 480 481 /// \return The data (payload) operand of this store or scatter. 482 Value *VPIntrinsic::getMemoryDataParam() const { 483 auto DataParamOpt = getMemoryDataParamPos(getIntrinsicID()); 484 if (!DataParamOpt) 485 return nullptr; 486 return getArgOperand(*DataParamOpt); 487 } 488 489 std::optional<unsigned> VPIntrinsic::getMemoryDataParamPos(Intrinsic::ID VPID) { 490 switch (VPID) { 491 default: 492 break; 493 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID: 494 #define VP_PROPERTY_MEMOP(POINTERPOS, DATAPOS) return DATAPOS; 495 #define END_REGISTER_VP_INTRINSIC(VPID) break; 496 #include "llvm/IR/VPIntrinsics.def" 497 } 498 return std::nullopt; 499 } 500 501 constexpr bool isVPIntrinsic(Intrinsic::ID ID) { 502 switch (ID) { 503 default: 504 break; 505 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS) \ 506 case Intrinsic::VPID: \ 507 return true; 508 #include "llvm/IR/VPIntrinsics.def" 509 } 510 return false; 511 } 512 513 bool VPIntrinsic::isVPIntrinsic(Intrinsic::ID ID) { 514 return ::isVPIntrinsic(ID); 515 } 516 517 // Equivalent non-predicated opcode 518 constexpr static std::optional<unsigned> 519 getFunctionalOpcodeForVP(Intrinsic::ID ID) { 520 switch (ID) { 521 default: 522 break; 523 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID: 524 #define VP_PROPERTY_FUNCTIONAL_OPC(OPC) return Instruction::OPC; 525 #define END_REGISTER_VP_INTRINSIC(VPID) break; 526 #include "llvm/IR/VPIntrinsics.def" 527 } 528 return std::nullopt; 529 } 530 531 std::optional<unsigned> 532 VPIntrinsic::getFunctionalOpcodeForVP(Intrinsic::ID ID) { 533 return ::getFunctionalOpcodeForVP(ID); 534 } 535 536 // Equivalent non-predicated intrinsic ID 537 constexpr static std::optional<Intrinsic::ID> 538 getFunctionalIntrinsicIDForVP(Intrinsic::ID ID) { 539 switch (ID) { 540 default: 541 break; 542 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID: 543 #define VP_PROPERTY_FUNCTIONAL_INTRINSIC(INTRIN) return Intrinsic::INTRIN; 544 #define END_REGISTER_VP_INTRINSIC(VPID) break; 545 #include "llvm/IR/VPIntrinsics.def" 546 } 547 return std::nullopt; 548 } 549 550 std::optional<Intrinsic::ID> 551 VPIntrinsic::getFunctionalIntrinsicIDForVP(Intrinsic::ID ID) { 552 return ::getFunctionalIntrinsicIDForVP(ID); 553 } 554 555 constexpr static bool doesVPHaveNoFunctionalEquivalent(Intrinsic::ID ID) { 556 switch (ID) { 557 default: 558 break; 559 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID: 560 #define VP_PROPERTY_NO_FUNCTIONAL return true; 561 #define END_REGISTER_VP_INTRINSIC(VPID) break; 562 #include "llvm/IR/VPIntrinsics.def" 563 } 564 return false; 565 } 566 567 // All VP intrinsics should have an equivalent non-VP opcode or intrinsic 568 // defined, or be marked that they don't have one. 569 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) \ 570 static_assert(doesVPHaveNoFunctionalEquivalent(Intrinsic::VPID) || \ 571 getFunctionalOpcodeForVP(Intrinsic::VPID) || \ 572 getFunctionalIntrinsicIDForVP(Intrinsic::VPID)); 573 #include "llvm/IR/VPIntrinsics.def" 574 575 // Equivalent non-predicated constrained intrinsic 576 std::optional<Intrinsic::ID> 577 VPIntrinsic::getConstrainedIntrinsicIDForVP(Intrinsic::ID ID) { 578 switch (ID) { 579 default: 580 break; 581 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID: 582 #define VP_PROPERTY_CONSTRAINEDFP(HASRND, HASEXCEPT, CID) return Intrinsic::CID; 583 #define END_REGISTER_VP_INTRINSIC(VPID) break; 584 #include "llvm/IR/VPIntrinsics.def" 585 } 586 return std::nullopt; 587 } 588 589 Intrinsic::ID VPIntrinsic::getForOpcode(unsigned IROPC) { 590 switch (IROPC) { 591 default: 592 break; 593 594 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) break; 595 #define VP_PROPERTY_FUNCTIONAL_OPC(OPC) case Instruction::OPC: 596 #define END_REGISTER_VP_INTRINSIC(VPID) return Intrinsic::VPID; 597 #include "llvm/IR/VPIntrinsics.def" 598 } 599 return Intrinsic::not_intrinsic; 600 } 601 602 bool VPIntrinsic::canIgnoreVectorLengthParam() const { 603 using namespace PatternMatch; 604 605 ElementCount EC = getStaticVectorLength(); 606 607 // No vlen param - no lanes masked-off by it. 608 auto *VLParam = getVectorLengthParam(); 609 if (!VLParam) 610 return true; 611 612 // Note that the VP intrinsic causes undefined behavior if the Explicit Vector 613 // Length parameter is strictly greater-than the number of vector elements of 614 // the operation. This function returns true when this is detected statically 615 // in the IR. 616 617 // Check whether "W == vscale * EC.getKnownMinValue()" 618 if (EC.isScalable()) { 619 // Compare vscale patterns 620 uint64_t VScaleFactor; 621 if (match(VLParam, m_Mul(m_VScale(), m_ConstantInt(VScaleFactor)))) 622 return VScaleFactor >= EC.getKnownMinValue(); 623 return (EC.getKnownMinValue() == 1) && match(VLParam, m_VScale()); 624 } 625 626 // standard SIMD operation 627 const auto *VLConst = dyn_cast<ConstantInt>(VLParam); 628 if (!VLConst) 629 return false; 630 631 uint64_t VLNum = VLConst->getZExtValue(); 632 if (VLNum >= EC.getKnownMinValue()) 633 return true; 634 635 return false; 636 } 637 638 Function *VPIntrinsic::getDeclarationForParams(Module *M, Intrinsic::ID VPID, 639 Type *ReturnType, 640 ArrayRef<Value *> Params) { 641 assert(isVPIntrinsic(VPID) && "not a VP intrinsic"); 642 Function *VPFunc; 643 switch (VPID) { 644 default: { 645 Type *OverloadTy = Params[0]->getType(); 646 if (VPReductionIntrinsic::isVPReduction(VPID)) 647 OverloadTy = 648 Params[*VPReductionIntrinsic::getVectorParamPos(VPID)]->getType(); 649 650 VPFunc = Intrinsic::getDeclaration(M, VPID, OverloadTy); 651 break; 652 } 653 case Intrinsic::vp_trunc: 654 case Intrinsic::vp_sext: 655 case Intrinsic::vp_zext: 656 case Intrinsic::vp_fptoui: 657 case Intrinsic::vp_fptosi: 658 case Intrinsic::vp_uitofp: 659 case Intrinsic::vp_sitofp: 660 case Intrinsic::vp_fptrunc: 661 case Intrinsic::vp_fpext: 662 case Intrinsic::vp_ptrtoint: 663 case Intrinsic::vp_inttoptr: 664 case Intrinsic::vp_lrint: 665 case Intrinsic::vp_llrint: 666 case Intrinsic::vp_cttz_elts: 667 VPFunc = 668 Intrinsic::getDeclaration(M, VPID, {ReturnType, Params[0]->getType()}); 669 break; 670 case Intrinsic::vp_is_fpclass: 671 VPFunc = Intrinsic::getDeclaration(M, VPID, {Params[0]->getType()}); 672 break; 673 case Intrinsic::vp_merge: 674 case Intrinsic::vp_select: 675 VPFunc = Intrinsic::getDeclaration(M, VPID, {Params[1]->getType()}); 676 break; 677 case Intrinsic::vp_load: 678 VPFunc = Intrinsic::getDeclaration( 679 M, VPID, {ReturnType, Params[0]->getType()}); 680 break; 681 case Intrinsic::experimental_vp_strided_load: 682 VPFunc = Intrinsic::getDeclaration( 683 M, VPID, {ReturnType, Params[0]->getType(), Params[1]->getType()}); 684 break; 685 case Intrinsic::vp_gather: 686 VPFunc = Intrinsic::getDeclaration( 687 M, VPID, {ReturnType, Params[0]->getType()}); 688 break; 689 case Intrinsic::vp_store: 690 VPFunc = Intrinsic::getDeclaration( 691 M, VPID, {Params[0]->getType(), Params[1]->getType()}); 692 break; 693 case Intrinsic::experimental_vp_strided_store: 694 VPFunc = Intrinsic::getDeclaration( 695 M, VPID, 696 {Params[0]->getType(), Params[1]->getType(), Params[2]->getType()}); 697 break; 698 case Intrinsic::vp_scatter: 699 VPFunc = Intrinsic::getDeclaration( 700 M, VPID, {Params[0]->getType(), Params[1]->getType()}); 701 break; 702 case Intrinsic::experimental_vp_splat: 703 VPFunc = Intrinsic::getDeclaration(M, VPID, ReturnType); 704 break; 705 } 706 assert(VPFunc && "Could not declare VP intrinsic"); 707 return VPFunc; 708 } 709 710 bool VPReductionIntrinsic::isVPReduction(Intrinsic::ID ID) { 711 switch (ID) { 712 default: 713 break; 714 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID: 715 #define VP_PROPERTY_REDUCTION(STARTPOS, ...) return true; 716 #define END_REGISTER_VP_INTRINSIC(VPID) break; 717 #include "llvm/IR/VPIntrinsics.def" 718 } 719 return false; 720 } 721 722 bool VPCastIntrinsic::isVPCast(Intrinsic::ID ID) { 723 switch (ID) { 724 default: 725 break; 726 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID: 727 #define VP_PROPERTY_CASTOP return true; 728 #define END_REGISTER_VP_INTRINSIC(VPID) break; 729 #include "llvm/IR/VPIntrinsics.def" 730 } 731 return false; 732 } 733 734 bool VPCmpIntrinsic::isVPCmp(Intrinsic::ID ID) { 735 switch (ID) { 736 default: 737 break; 738 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID: 739 #define VP_PROPERTY_CMP(CCPOS, ...) return true; 740 #define END_REGISTER_VP_INTRINSIC(VPID) break; 741 #include "llvm/IR/VPIntrinsics.def" 742 } 743 return false; 744 } 745 746 bool VPBinOpIntrinsic::isVPBinOp(Intrinsic::ID ID) { 747 switch (ID) { 748 default: 749 break; 750 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID: 751 #define VP_PROPERTY_BINARYOP return true; 752 #define END_REGISTER_VP_INTRINSIC(VPID) break; 753 #include "llvm/IR/VPIntrinsics.def" 754 } 755 return false; 756 } 757 758 static ICmpInst::Predicate getIntPredicateFromMD(const Value *Op) { 759 Metadata *MD = cast<MetadataAsValue>(Op)->getMetadata(); 760 if (!MD || !isa<MDString>(MD)) 761 return ICmpInst::BAD_ICMP_PREDICATE; 762 return StringSwitch<ICmpInst::Predicate>(cast<MDString>(MD)->getString()) 763 .Case("eq", ICmpInst::ICMP_EQ) 764 .Case("ne", ICmpInst::ICMP_NE) 765 .Case("ugt", ICmpInst::ICMP_UGT) 766 .Case("uge", ICmpInst::ICMP_UGE) 767 .Case("ult", ICmpInst::ICMP_ULT) 768 .Case("ule", ICmpInst::ICMP_ULE) 769 .Case("sgt", ICmpInst::ICMP_SGT) 770 .Case("sge", ICmpInst::ICMP_SGE) 771 .Case("slt", ICmpInst::ICMP_SLT) 772 .Case("sle", ICmpInst::ICMP_SLE) 773 .Default(ICmpInst::BAD_ICMP_PREDICATE); 774 } 775 776 CmpInst::Predicate VPCmpIntrinsic::getPredicate() const { 777 bool IsFP = true; 778 std::optional<unsigned> CCArgIdx; 779 switch (getIntrinsicID()) { 780 default: 781 break; 782 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID: 783 #define VP_PROPERTY_CMP(CCPOS, ISFP) \ 784 CCArgIdx = CCPOS; \ 785 IsFP = ISFP; \ 786 break; 787 #define END_REGISTER_VP_INTRINSIC(VPID) break; 788 #include "llvm/IR/VPIntrinsics.def" 789 } 790 assert(CCArgIdx && "Unexpected vector-predicated comparison"); 791 return IsFP ? getFPPredicateFromMD(getArgOperand(*CCArgIdx)) 792 : getIntPredicateFromMD(getArgOperand(*CCArgIdx)); 793 } 794 795 unsigned VPReductionIntrinsic::getVectorParamPos() const { 796 return *VPReductionIntrinsic::getVectorParamPos(getIntrinsicID()); 797 } 798 799 unsigned VPReductionIntrinsic::getStartParamPos() const { 800 return *VPReductionIntrinsic::getStartParamPos(getIntrinsicID()); 801 } 802 803 std::optional<unsigned> 804 VPReductionIntrinsic::getVectorParamPos(Intrinsic::ID ID) { 805 switch (ID) { 806 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID: 807 #define VP_PROPERTY_REDUCTION(STARTPOS, VECTORPOS) return VECTORPOS; 808 #define END_REGISTER_VP_INTRINSIC(VPID) break; 809 #include "llvm/IR/VPIntrinsics.def" 810 default: 811 break; 812 } 813 return std::nullopt; 814 } 815 816 std::optional<unsigned> 817 VPReductionIntrinsic::getStartParamPos(Intrinsic::ID ID) { 818 switch (ID) { 819 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID: 820 #define VP_PROPERTY_REDUCTION(STARTPOS, VECTORPOS) return STARTPOS; 821 #define END_REGISTER_VP_INTRINSIC(VPID) break; 822 #include "llvm/IR/VPIntrinsics.def" 823 default: 824 break; 825 } 826 return std::nullopt; 827 } 828 829 Instruction::BinaryOps BinaryOpIntrinsic::getBinaryOp() const { 830 switch (getIntrinsicID()) { 831 case Intrinsic::uadd_with_overflow: 832 case Intrinsic::sadd_with_overflow: 833 case Intrinsic::uadd_sat: 834 case Intrinsic::sadd_sat: 835 return Instruction::Add; 836 case Intrinsic::usub_with_overflow: 837 case Intrinsic::ssub_with_overflow: 838 case Intrinsic::usub_sat: 839 case Intrinsic::ssub_sat: 840 return Instruction::Sub; 841 case Intrinsic::umul_with_overflow: 842 case Intrinsic::smul_with_overflow: 843 return Instruction::Mul; 844 default: 845 llvm_unreachable("Invalid intrinsic"); 846 } 847 } 848 849 bool BinaryOpIntrinsic::isSigned() const { 850 switch (getIntrinsicID()) { 851 case Intrinsic::sadd_with_overflow: 852 case Intrinsic::ssub_with_overflow: 853 case Intrinsic::smul_with_overflow: 854 case Intrinsic::sadd_sat: 855 case Intrinsic::ssub_sat: 856 return true; 857 default: 858 return false; 859 } 860 } 861 862 unsigned BinaryOpIntrinsic::getNoWrapKind() const { 863 if (isSigned()) 864 return OverflowingBinaryOperator::NoSignedWrap; 865 else 866 return OverflowingBinaryOperator::NoUnsignedWrap; 867 } 868 869 const Value *GCProjectionInst::getStatepoint() const { 870 const Value *Token = getArgOperand(0); 871 if (isa<UndefValue>(Token)) 872 return Token; 873 874 // Treat none token as if it was undef here 875 if (isa<ConstantTokenNone>(Token)) 876 return UndefValue::get(Token->getType()); 877 878 // This takes care both of relocates for call statepoints and relocates 879 // on normal path of invoke statepoint. 880 if (!isa<LandingPadInst>(Token)) 881 return cast<GCStatepointInst>(Token); 882 883 // This relocate is on exceptional path of an invoke statepoint 884 const BasicBlock *InvokeBB = 885 cast<Instruction>(Token)->getParent()->getUniquePredecessor(); 886 887 assert(InvokeBB && "safepoints should have unique landingpads"); 888 assert(InvokeBB->getTerminator() && 889 "safepoint block should be well formed"); 890 891 return cast<GCStatepointInst>(InvokeBB->getTerminator()); 892 } 893 894 Value *GCRelocateInst::getBasePtr() const { 895 auto Statepoint = getStatepoint(); 896 if (isa<UndefValue>(Statepoint)) 897 return UndefValue::get(Statepoint->getType()); 898 899 auto *GCInst = cast<GCStatepointInst>(Statepoint); 900 if (auto Opt = GCInst->getOperandBundle(LLVMContext::OB_gc_live)) 901 return *(Opt->Inputs.begin() + getBasePtrIndex()); 902 return *(GCInst->arg_begin() + getBasePtrIndex()); 903 } 904 905 Value *GCRelocateInst::getDerivedPtr() const { 906 auto *Statepoint = getStatepoint(); 907 if (isa<UndefValue>(Statepoint)) 908 return UndefValue::get(Statepoint->getType()); 909 910 auto *GCInst = cast<GCStatepointInst>(Statepoint); 911 if (auto Opt = GCInst->getOperandBundle(LLVMContext::OB_gc_live)) 912 return *(Opt->Inputs.begin() + getDerivedPtrIndex()); 913 return *(GCInst->arg_begin() + getDerivedPtrIndex()); 914 } 915