xref: /freebsd/contrib/llvm-project/llvm/lib/IR/IntrinsicInst.cpp (revision 0ad011ececb978e22a9bff2acf76633b094f1ff6)
1 //===-- IntrinsicInst.cpp - Intrinsic Instruction Wrappers ---------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file implements methods that make it really easy to deal with intrinsic
10 // functions.
11 //
12 // All intrinsic function calls are instances of the call instruction, so these
13 // are all subclasses of the CallInst class.  Note that none of these classes
14 // has state or virtual methods, which is an important part of this gross/neat
15 // hack working.
16 //
17 // In some cases, arguments to intrinsics need to be generic and are defined as
18 // type pointer to empty struct { }*.  To access the real item of interest the
19 // cast instruction needs to be stripped away.
20 //
21 //===----------------------------------------------------------------------===//
22 
23 #include "llvm/IR/IntrinsicInst.h"
24 #include "llvm/ADT/StringSwitch.h"
25 #include "llvm/IR/Constants.h"
26 #include "llvm/IR/DebugInfoMetadata.h"
27 #include "llvm/IR/Metadata.h"
28 #include "llvm/IR/Module.h"
29 #include "llvm/IR/Operator.h"
30 #include "llvm/IR/PatternMatch.h"
31 #include "llvm/IR/Statepoint.h"
32 #include <optional>
33 
34 using namespace llvm;
35 
36 bool IntrinsicInst::mayLowerToFunctionCall(Intrinsic::ID IID) {
37   switch (IID) {
38   case Intrinsic::objc_autorelease:
39   case Intrinsic::objc_autoreleasePoolPop:
40   case Intrinsic::objc_autoreleasePoolPush:
41   case Intrinsic::objc_autoreleaseReturnValue:
42   case Intrinsic::objc_copyWeak:
43   case Intrinsic::objc_destroyWeak:
44   case Intrinsic::objc_initWeak:
45   case Intrinsic::objc_loadWeak:
46   case Intrinsic::objc_loadWeakRetained:
47   case Intrinsic::objc_moveWeak:
48   case Intrinsic::objc_release:
49   case Intrinsic::objc_retain:
50   case Intrinsic::objc_retainAutorelease:
51   case Intrinsic::objc_retainAutoreleaseReturnValue:
52   case Intrinsic::objc_retainAutoreleasedReturnValue:
53   case Intrinsic::objc_retainBlock:
54   case Intrinsic::objc_storeStrong:
55   case Intrinsic::objc_storeWeak:
56   case Intrinsic::objc_unsafeClaimAutoreleasedReturnValue:
57   case Intrinsic::objc_retainedObject:
58   case Intrinsic::objc_unretainedObject:
59   case Intrinsic::objc_unretainedPointer:
60   case Intrinsic::objc_retain_autorelease:
61   case Intrinsic::objc_sync_enter:
62   case Intrinsic::objc_sync_exit:
63     return true;
64   default:
65     return false;
66   }
67 }
68 
69 //===----------------------------------------------------------------------===//
70 /// DbgVariableIntrinsic - This is the common base class for debug info
71 /// intrinsics for variables.
72 ///
73 
74 iterator_range<location_op_iterator> RawLocationWrapper::location_ops() const {
75   Metadata *MD = getRawLocation();
76   assert(MD && "First operand of DbgVariableIntrinsic should be non-null.");
77   // If operand is ValueAsMetadata, return a range over just that operand.
78   if (auto *VAM = dyn_cast<ValueAsMetadata>(MD)) {
79     return {location_op_iterator(VAM), location_op_iterator(VAM + 1)};
80   }
81   // If operand is DIArgList, return a range over its args.
82   if (auto *AL = dyn_cast<DIArgList>(MD))
83     return {location_op_iterator(AL->args_begin()),
84             location_op_iterator(AL->args_end())};
85   // Operand must be an empty metadata tuple, so return empty iterator.
86   return {location_op_iterator(static_cast<ValueAsMetadata *>(nullptr)),
87           location_op_iterator(static_cast<ValueAsMetadata *>(nullptr))};
88 }
89 
90 iterator_range<location_op_iterator>
91 DbgVariableIntrinsic::location_ops() const {
92   return getWrappedLocation().location_ops();
93 }
94 
95 Value *DbgVariableIntrinsic::getVariableLocationOp(unsigned OpIdx) const {
96   return getWrappedLocation().getVariableLocationOp(OpIdx);
97 }
98 
99 Value *RawLocationWrapper::getVariableLocationOp(unsigned OpIdx) const {
100   Metadata *MD = getRawLocation();
101   assert(MD && "First operand of DbgVariableIntrinsic should be non-null.");
102   if (auto *AL = dyn_cast<DIArgList>(MD))
103     return AL->getArgs()[OpIdx]->getValue();
104   if (isa<MDNode>(MD))
105     return nullptr;
106   assert(
107       isa<ValueAsMetadata>(MD) &&
108       "Attempted to get location operand from DbgVariableIntrinsic with none.");
109   auto *V = cast<ValueAsMetadata>(MD);
110   assert(OpIdx == 0 && "Operand Index must be 0 for a debug intrinsic with a "
111                        "single location operand.");
112   return V->getValue();
113 }
114 
115 static ValueAsMetadata *getAsMetadata(Value *V) {
116   return isa<MetadataAsValue>(V) ? dyn_cast<ValueAsMetadata>(
117                                        cast<MetadataAsValue>(V)->getMetadata())
118                                  : ValueAsMetadata::get(V);
119 }
120 
121 void DbgVariableIntrinsic::replaceVariableLocationOp(Value *OldValue,
122                                                      Value *NewValue) {
123   // If OldValue is used as the address part of a dbg.assign intrinsic replace
124   // it with NewValue and return true.
125   auto ReplaceDbgAssignAddress = [this, OldValue, NewValue]() -> bool {
126     auto *DAI = dyn_cast<DbgAssignIntrinsic>(this);
127     if (!DAI || OldValue != DAI->getAddress())
128       return false;
129     DAI->setAddress(NewValue);
130     return true;
131   };
132   bool DbgAssignAddrReplaced = ReplaceDbgAssignAddress();
133   (void)DbgAssignAddrReplaced;
134 
135   assert(NewValue && "Values must be non-null");
136   auto Locations = location_ops();
137   auto OldIt = find(Locations, OldValue);
138   if (OldIt == Locations.end()) {
139     assert(DbgAssignAddrReplaced &&
140            "OldValue must be dbg.assign addr if unused in DIArgList");
141     return;
142   }
143 
144   assert(OldIt != Locations.end() && "OldValue must be a current location");
145   if (!hasArgList()) {
146     Value *NewOperand = isa<MetadataAsValue>(NewValue)
147                             ? NewValue
148                             : MetadataAsValue::get(
149                                   getContext(), ValueAsMetadata::get(NewValue));
150     return setArgOperand(0, NewOperand);
151   }
152   SmallVector<ValueAsMetadata *, 4> MDs;
153   ValueAsMetadata *NewOperand = getAsMetadata(NewValue);
154   for (auto *VMD : Locations)
155     MDs.push_back(VMD == *OldIt ? NewOperand : getAsMetadata(VMD));
156   setArgOperand(
157       0, MetadataAsValue::get(getContext(), DIArgList::get(getContext(), MDs)));
158 }
159 void DbgVariableIntrinsic::replaceVariableLocationOp(unsigned OpIdx,
160                                                      Value *NewValue) {
161   assert(OpIdx < getNumVariableLocationOps() && "Invalid Operand Index");
162   if (!hasArgList()) {
163     Value *NewOperand = isa<MetadataAsValue>(NewValue)
164                             ? NewValue
165                             : MetadataAsValue::get(
166                                   getContext(), ValueAsMetadata::get(NewValue));
167     return setArgOperand(0, NewOperand);
168   }
169   SmallVector<ValueAsMetadata *, 4> MDs;
170   ValueAsMetadata *NewOperand = getAsMetadata(NewValue);
171   for (unsigned Idx = 0; Idx < getNumVariableLocationOps(); ++Idx)
172     MDs.push_back(Idx == OpIdx ? NewOperand
173                                : getAsMetadata(getVariableLocationOp(Idx)));
174   setArgOperand(
175       0, MetadataAsValue::get(getContext(), DIArgList::get(getContext(), MDs)));
176 }
177 
178 void DbgVariableIntrinsic::addVariableLocationOps(ArrayRef<Value *> NewValues,
179                                                   DIExpression *NewExpr) {
180   assert(NewExpr->hasAllLocationOps(getNumVariableLocationOps() +
181                                     NewValues.size()) &&
182          "NewExpr for debug variable intrinsic does not reference every "
183          "location operand.");
184   assert(!is_contained(NewValues, nullptr) && "New values must be non-null");
185   setArgOperand(2, MetadataAsValue::get(getContext(), NewExpr));
186   SmallVector<ValueAsMetadata *, 4> MDs;
187   for (auto *VMD : location_ops())
188     MDs.push_back(getAsMetadata(VMD));
189   for (auto *VMD : NewValues)
190     MDs.push_back(getAsMetadata(VMD));
191   setArgOperand(
192       0, MetadataAsValue::get(getContext(), DIArgList::get(getContext(), MDs)));
193 }
194 
195 std::optional<uint64_t> DbgVariableIntrinsic::getFragmentSizeInBits() const {
196   if (auto Fragment = getExpression()->getFragmentInfo())
197     return Fragment->SizeInBits;
198   return getVariable()->getSizeInBits();
199 }
200 
201 Value *DbgAssignIntrinsic::getAddress() const {
202   auto *MD = getRawAddress();
203   if (auto *V = dyn_cast<ValueAsMetadata>(MD))
204     return V->getValue();
205 
206   // When the value goes to null, it gets replaced by an empty MDNode.
207   assert(!cast<MDNode>(MD)->getNumOperands() && "Expected an empty MDNode");
208   return nullptr;
209 }
210 
211 void DbgAssignIntrinsic::setAssignId(DIAssignID *New) {
212   setOperand(OpAssignID, MetadataAsValue::get(getContext(), New));
213 }
214 
215 void DbgAssignIntrinsic::setAddress(Value *V) {
216   setOperand(OpAddress,
217              MetadataAsValue::get(getContext(), ValueAsMetadata::get(V)));
218 }
219 
220 void DbgAssignIntrinsic::setKillAddress() {
221   if (isKillAddress())
222     return;
223   setAddress(UndefValue::get(getAddress()->getType()));
224 }
225 
226 bool DbgAssignIntrinsic::isKillAddress() const {
227   Value *Addr = getAddress();
228   return !Addr || isa<UndefValue>(Addr);
229 }
230 
231 void DbgAssignIntrinsic::setValue(Value *V) {
232   setOperand(OpValue,
233              MetadataAsValue::get(getContext(), ValueAsMetadata::get(V)));
234 }
235 
236 int llvm::Intrinsic::lookupLLVMIntrinsicByName(ArrayRef<const char *> NameTable,
237                                                StringRef Name) {
238   assert(Name.startswith("llvm."));
239 
240   // Do successive binary searches of the dotted name components. For
241   // "llvm.gc.experimental.statepoint.p1i8.p1i32", we will find the range of
242   // intrinsics starting with "llvm.gc", then "llvm.gc.experimental", then
243   // "llvm.gc.experimental.statepoint", and then we will stop as the range is
244   // size 1. During the search, we can skip the prefix that we already know is
245   // identical. By using strncmp we consider names with differing suffixes to
246   // be part of the equal range.
247   size_t CmpEnd = 4; // Skip the "llvm" component.
248   const char *const *Low = NameTable.begin();
249   const char *const *High = NameTable.end();
250   const char *const *LastLow = Low;
251   while (CmpEnd < Name.size() && High - Low > 0) {
252     size_t CmpStart = CmpEnd;
253     CmpEnd = Name.find('.', CmpStart + 1);
254     CmpEnd = CmpEnd == StringRef::npos ? Name.size() : CmpEnd;
255     auto Cmp = [CmpStart, CmpEnd](const char *LHS, const char *RHS) {
256       return strncmp(LHS + CmpStart, RHS + CmpStart, CmpEnd - CmpStart) < 0;
257     };
258     LastLow = Low;
259     std::tie(Low, High) = std::equal_range(Low, High, Name.data(), Cmp);
260   }
261   if (High - Low > 0)
262     LastLow = Low;
263 
264   if (LastLow == NameTable.end())
265     return -1;
266   StringRef NameFound = *LastLow;
267   if (Name == NameFound ||
268       (Name.startswith(NameFound) && Name[NameFound.size()] == '.'))
269     return LastLow - NameTable.begin();
270   return -1;
271 }
272 
273 ConstantInt *InstrProfInstBase::getNumCounters() const {
274   if (InstrProfValueProfileInst::classof(this))
275     llvm_unreachable("InstrProfValueProfileInst does not have counters!");
276   return cast<ConstantInt>(const_cast<Value *>(getArgOperand(2)));
277 }
278 
279 ConstantInt *InstrProfInstBase::getIndex() const {
280   if (InstrProfValueProfileInst::classof(this))
281     llvm_unreachable("Please use InstrProfValueProfileInst::getIndex()");
282   return cast<ConstantInt>(const_cast<Value *>(getArgOperand(3)));
283 }
284 
285 Value *InstrProfIncrementInst::getStep() const {
286   if (InstrProfIncrementInstStep::classof(this)) {
287     return const_cast<Value *>(getArgOperand(4));
288   }
289   const Module *M = getModule();
290   LLVMContext &Context = M->getContext();
291   return ConstantInt::get(Type::getInt64Ty(Context), 1);
292 }
293 
294 std::optional<RoundingMode> ConstrainedFPIntrinsic::getRoundingMode() const {
295   unsigned NumOperands = arg_size();
296   Metadata *MD = nullptr;
297   auto *MAV = dyn_cast<MetadataAsValue>(getArgOperand(NumOperands - 2));
298   if (MAV)
299     MD = MAV->getMetadata();
300   if (!MD || !isa<MDString>(MD))
301     return std::nullopt;
302   return convertStrToRoundingMode(cast<MDString>(MD)->getString());
303 }
304 
305 std::optional<fp::ExceptionBehavior>
306 ConstrainedFPIntrinsic::getExceptionBehavior() const {
307   unsigned NumOperands = arg_size();
308   Metadata *MD = nullptr;
309   auto *MAV = dyn_cast<MetadataAsValue>(getArgOperand(NumOperands - 1));
310   if (MAV)
311     MD = MAV->getMetadata();
312   if (!MD || !isa<MDString>(MD))
313     return std::nullopt;
314   return convertStrToExceptionBehavior(cast<MDString>(MD)->getString());
315 }
316 
317 bool ConstrainedFPIntrinsic::isDefaultFPEnvironment() const {
318   std::optional<fp::ExceptionBehavior> Except = getExceptionBehavior();
319   if (Except) {
320     if (*Except != fp::ebIgnore)
321       return false;
322   }
323 
324   std::optional<RoundingMode> Rounding = getRoundingMode();
325   if (Rounding) {
326     if (*Rounding != RoundingMode::NearestTiesToEven)
327       return false;
328   }
329 
330   return true;
331 }
332 
333 static FCmpInst::Predicate getFPPredicateFromMD(const Value *Op) {
334   Metadata *MD = cast<MetadataAsValue>(Op)->getMetadata();
335   if (!MD || !isa<MDString>(MD))
336     return FCmpInst::BAD_FCMP_PREDICATE;
337   return StringSwitch<FCmpInst::Predicate>(cast<MDString>(MD)->getString())
338       .Case("oeq", FCmpInst::FCMP_OEQ)
339       .Case("ogt", FCmpInst::FCMP_OGT)
340       .Case("oge", FCmpInst::FCMP_OGE)
341       .Case("olt", FCmpInst::FCMP_OLT)
342       .Case("ole", FCmpInst::FCMP_OLE)
343       .Case("one", FCmpInst::FCMP_ONE)
344       .Case("ord", FCmpInst::FCMP_ORD)
345       .Case("uno", FCmpInst::FCMP_UNO)
346       .Case("ueq", FCmpInst::FCMP_UEQ)
347       .Case("ugt", FCmpInst::FCMP_UGT)
348       .Case("uge", FCmpInst::FCMP_UGE)
349       .Case("ult", FCmpInst::FCMP_ULT)
350       .Case("ule", FCmpInst::FCMP_ULE)
351       .Case("une", FCmpInst::FCMP_UNE)
352       .Default(FCmpInst::BAD_FCMP_PREDICATE);
353 }
354 
355 FCmpInst::Predicate ConstrainedFPCmpIntrinsic::getPredicate() const {
356   return getFPPredicateFromMD(getArgOperand(2));
357 }
358 
359 bool ConstrainedFPIntrinsic::isUnaryOp() const {
360   switch (getIntrinsicID()) {
361   default:
362     return false;
363 #define INSTRUCTION(NAME, NARG, ROUND_MODE, INTRINSIC)                         \
364   case Intrinsic::INTRINSIC:                                                   \
365     return NARG == 1;
366 #include "llvm/IR/ConstrainedOps.def"
367   }
368 }
369 
370 bool ConstrainedFPIntrinsic::isTernaryOp() const {
371   switch (getIntrinsicID()) {
372   default:
373     return false;
374 #define INSTRUCTION(NAME, NARG, ROUND_MODE, INTRINSIC)                         \
375   case Intrinsic::INTRINSIC:                                                   \
376     return NARG == 3;
377 #include "llvm/IR/ConstrainedOps.def"
378   }
379 }
380 
381 bool ConstrainedFPIntrinsic::classof(const IntrinsicInst *I) {
382   switch (I->getIntrinsicID()) {
383 #define INSTRUCTION(NAME, NARGS, ROUND_MODE, INTRINSIC)                        \
384   case Intrinsic::INTRINSIC:
385 #include "llvm/IR/ConstrainedOps.def"
386     return true;
387   default:
388     return false;
389   }
390 }
391 
392 ElementCount VPIntrinsic::getStaticVectorLength() const {
393   auto GetVectorLengthOfType = [](const Type *T) -> ElementCount {
394     const auto *VT = cast<VectorType>(T);
395     auto ElemCount = VT->getElementCount();
396     return ElemCount;
397   };
398 
399   Value *VPMask = getMaskParam();
400   if (!VPMask) {
401     assert((getIntrinsicID() == Intrinsic::vp_merge ||
402             getIntrinsicID() == Intrinsic::vp_select) &&
403            "Unexpected VP intrinsic without mask operand");
404     return GetVectorLengthOfType(getType());
405   }
406   return GetVectorLengthOfType(VPMask->getType());
407 }
408 
409 Value *VPIntrinsic::getMaskParam() const {
410   if (auto MaskPos = getMaskParamPos(getIntrinsicID()))
411     return getArgOperand(*MaskPos);
412   return nullptr;
413 }
414 
415 void VPIntrinsic::setMaskParam(Value *NewMask) {
416   auto MaskPos = getMaskParamPos(getIntrinsicID());
417   setArgOperand(*MaskPos, NewMask);
418 }
419 
420 Value *VPIntrinsic::getVectorLengthParam() const {
421   if (auto EVLPos = getVectorLengthParamPos(getIntrinsicID()))
422     return getArgOperand(*EVLPos);
423   return nullptr;
424 }
425 
426 void VPIntrinsic::setVectorLengthParam(Value *NewEVL) {
427   auto EVLPos = getVectorLengthParamPos(getIntrinsicID());
428   setArgOperand(*EVLPos, NewEVL);
429 }
430 
431 std::optional<unsigned>
432 VPIntrinsic::getMaskParamPos(Intrinsic::ID IntrinsicID) {
433   switch (IntrinsicID) {
434   default:
435     return std::nullopt;
436 
437 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS)                    \
438   case Intrinsic::VPID:                                                        \
439     return MASKPOS;
440 #include "llvm/IR/VPIntrinsics.def"
441   }
442 }
443 
444 std::optional<unsigned>
445 VPIntrinsic::getVectorLengthParamPos(Intrinsic::ID IntrinsicID) {
446   switch (IntrinsicID) {
447   default:
448     return std::nullopt;
449 
450 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS)                    \
451   case Intrinsic::VPID:                                                        \
452     return VLENPOS;
453 #include "llvm/IR/VPIntrinsics.def"
454   }
455 }
456 
457 /// \return the alignment of the pointer used by this load/store/gather or
458 /// scatter.
459 MaybeAlign VPIntrinsic::getPointerAlignment() const {
460   std::optional<unsigned> PtrParamOpt =
461       getMemoryPointerParamPos(getIntrinsicID());
462   assert(PtrParamOpt && "no pointer argument!");
463   return getParamAlign(*PtrParamOpt);
464 }
465 
466 /// \return The pointer operand of this load,store, gather or scatter.
467 Value *VPIntrinsic::getMemoryPointerParam() const {
468   if (auto PtrParamOpt = getMemoryPointerParamPos(getIntrinsicID()))
469     return getArgOperand(*PtrParamOpt);
470   return nullptr;
471 }
472 
473 std::optional<unsigned>
474 VPIntrinsic::getMemoryPointerParamPos(Intrinsic::ID VPID) {
475   switch (VPID) {
476   default:
477     break;
478 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
479 #define VP_PROPERTY_MEMOP(POINTERPOS, ...) return POINTERPOS;
480 #define END_REGISTER_VP_INTRINSIC(VPID) break;
481 #include "llvm/IR/VPIntrinsics.def"
482   }
483   return std::nullopt;
484 }
485 
486 /// \return The data (payload) operand of this store or scatter.
487 Value *VPIntrinsic::getMemoryDataParam() const {
488   auto DataParamOpt = getMemoryDataParamPos(getIntrinsicID());
489   if (!DataParamOpt)
490     return nullptr;
491   return getArgOperand(*DataParamOpt);
492 }
493 
494 std::optional<unsigned> VPIntrinsic::getMemoryDataParamPos(Intrinsic::ID VPID) {
495   switch (VPID) {
496   default:
497     break;
498 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
499 #define VP_PROPERTY_MEMOP(POINTERPOS, DATAPOS) return DATAPOS;
500 #define END_REGISTER_VP_INTRINSIC(VPID) break;
501 #include "llvm/IR/VPIntrinsics.def"
502   }
503   return std::nullopt;
504 }
505 
506 bool VPIntrinsic::isVPIntrinsic(Intrinsic::ID ID) {
507   switch (ID) {
508   default:
509     break;
510 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS)                    \
511   case Intrinsic::VPID:                                                        \
512     return true;
513 #include "llvm/IR/VPIntrinsics.def"
514   }
515   return false;
516 }
517 
518 // Equivalent non-predicated opcode
519 std::optional<unsigned>
520 VPIntrinsic::getFunctionalOpcodeForVP(Intrinsic::ID ID) {
521   switch (ID) {
522   default:
523     break;
524 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
525 #define VP_PROPERTY_FUNCTIONAL_OPC(OPC) return Instruction::OPC;
526 #define END_REGISTER_VP_INTRINSIC(VPID) break;
527 #include "llvm/IR/VPIntrinsics.def"
528   }
529   return std::nullopt;
530 }
531 
532 // Equivalent non-predicated constrained intrinsic
533 std::optional<unsigned>
534 VPIntrinsic::getConstrainedIntrinsicIDForVP(Intrinsic::ID ID) {
535   switch (ID) {
536   default:
537     break;
538 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
539 #define VP_PROPERTY_CONSTRAINEDFP(HASRND, HASEXCEPT, CID) return Intrinsic::CID;
540 #define END_REGISTER_VP_INTRINSIC(VPID) break;
541 #include "llvm/IR/VPIntrinsics.def"
542   }
543   return std::nullopt;
544 }
545 
546 Intrinsic::ID VPIntrinsic::getForOpcode(unsigned IROPC) {
547   switch (IROPC) {
548   default:
549     break;
550 
551 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) break;
552 #define VP_PROPERTY_FUNCTIONAL_OPC(OPC) case Instruction::OPC:
553 #define END_REGISTER_VP_INTRINSIC(VPID) return Intrinsic::VPID;
554 #include "llvm/IR/VPIntrinsics.def"
555   }
556   return Intrinsic::not_intrinsic;
557 }
558 
559 bool VPIntrinsic::canIgnoreVectorLengthParam() const {
560   using namespace PatternMatch;
561 
562   ElementCount EC = getStaticVectorLength();
563 
564   // No vlen param - no lanes masked-off by it.
565   auto *VLParam = getVectorLengthParam();
566   if (!VLParam)
567     return true;
568 
569   // Note that the VP intrinsic causes undefined behavior if the Explicit Vector
570   // Length parameter is strictly greater-than the number of vector elements of
571   // the operation. This function returns true when this is detected statically
572   // in the IR.
573 
574   // Check whether "W == vscale * EC.getKnownMinValue()"
575   if (EC.isScalable()) {
576     // Compare vscale patterns
577     uint64_t VScaleFactor;
578     if (match(VLParam, m_c_Mul(m_ConstantInt(VScaleFactor), m_VScale())))
579       return VScaleFactor >= EC.getKnownMinValue();
580     return (EC.getKnownMinValue() == 1) && match(VLParam, m_VScale());
581   }
582 
583   // standard SIMD operation
584   const auto *VLConst = dyn_cast<ConstantInt>(VLParam);
585   if (!VLConst)
586     return false;
587 
588   uint64_t VLNum = VLConst->getZExtValue();
589   if (VLNum >= EC.getKnownMinValue())
590     return true;
591 
592   return false;
593 }
594 
595 Function *VPIntrinsic::getDeclarationForParams(Module *M, Intrinsic::ID VPID,
596                                                Type *ReturnType,
597                                                ArrayRef<Value *> Params) {
598   assert(isVPIntrinsic(VPID) && "not a VP intrinsic");
599   Function *VPFunc;
600   switch (VPID) {
601   default: {
602     Type *OverloadTy = Params[0]->getType();
603     if (VPReductionIntrinsic::isVPReduction(VPID))
604       OverloadTy =
605           Params[*VPReductionIntrinsic::getVectorParamPos(VPID)]->getType();
606 
607     VPFunc = Intrinsic::getDeclaration(M, VPID, OverloadTy);
608     break;
609   }
610   case Intrinsic::vp_trunc:
611   case Intrinsic::vp_sext:
612   case Intrinsic::vp_zext:
613   case Intrinsic::vp_fptoui:
614   case Intrinsic::vp_fptosi:
615   case Intrinsic::vp_uitofp:
616   case Intrinsic::vp_sitofp:
617   case Intrinsic::vp_fptrunc:
618   case Intrinsic::vp_fpext:
619   case Intrinsic::vp_ptrtoint:
620   case Intrinsic::vp_inttoptr:
621     VPFunc =
622         Intrinsic::getDeclaration(M, VPID, {ReturnType, Params[0]->getType()});
623     break;
624   case Intrinsic::vp_merge:
625   case Intrinsic::vp_select:
626     VPFunc = Intrinsic::getDeclaration(M, VPID, {Params[1]->getType()});
627     break;
628   case Intrinsic::vp_load:
629     VPFunc = Intrinsic::getDeclaration(
630         M, VPID, {ReturnType, Params[0]->getType()});
631     break;
632   case Intrinsic::experimental_vp_strided_load:
633     VPFunc = Intrinsic::getDeclaration(
634         M, VPID, {ReturnType, Params[0]->getType(), Params[1]->getType()});
635     break;
636   case Intrinsic::vp_gather:
637     VPFunc = Intrinsic::getDeclaration(
638         M, VPID, {ReturnType, Params[0]->getType()});
639     break;
640   case Intrinsic::vp_store:
641     VPFunc = Intrinsic::getDeclaration(
642         M, VPID, {Params[0]->getType(), Params[1]->getType()});
643     break;
644   case Intrinsic::experimental_vp_strided_store:
645     VPFunc = Intrinsic::getDeclaration(
646         M, VPID,
647         {Params[0]->getType(), Params[1]->getType(), Params[2]->getType()});
648     break;
649   case Intrinsic::vp_scatter:
650     VPFunc = Intrinsic::getDeclaration(
651         M, VPID, {Params[0]->getType(), Params[1]->getType()});
652     break;
653   }
654   assert(VPFunc && "Could not declare VP intrinsic");
655   return VPFunc;
656 }
657 
658 bool VPReductionIntrinsic::isVPReduction(Intrinsic::ID ID) {
659   switch (ID) {
660   default:
661     break;
662 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
663 #define VP_PROPERTY_REDUCTION(STARTPOS, ...) return true;
664 #define END_REGISTER_VP_INTRINSIC(VPID) break;
665 #include "llvm/IR/VPIntrinsics.def"
666   }
667   return false;
668 }
669 
670 bool VPCastIntrinsic::isVPCast(Intrinsic::ID ID) {
671   switch (ID) {
672   default:
673     break;
674 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
675 #define VP_PROPERTY_CASTOP return true;
676 #define END_REGISTER_VP_INTRINSIC(VPID) break;
677 #include "llvm/IR/VPIntrinsics.def"
678   }
679   return false;
680 }
681 
682 bool VPCmpIntrinsic::isVPCmp(Intrinsic::ID ID) {
683   switch (ID) {
684   default:
685     break;
686 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
687 #define VP_PROPERTY_CMP(CCPOS, ...) return true;
688 #define END_REGISTER_VP_INTRINSIC(VPID) break;
689 #include "llvm/IR/VPIntrinsics.def"
690   }
691   return false;
692 }
693 
694 static ICmpInst::Predicate getIntPredicateFromMD(const Value *Op) {
695   Metadata *MD = cast<MetadataAsValue>(Op)->getMetadata();
696   if (!MD || !isa<MDString>(MD))
697     return ICmpInst::BAD_ICMP_PREDICATE;
698   return StringSwitch<ICmpInst::Predicate>(cast<MDString>(MD)->getString())
699       .Case("eq", ICmpInst::ICMP_EQ)
700       .Case("ne", ICmpInst::ICMP_NE)
701       .Case("ugt", ICmpInst::ICMP_UGT)
702       .Case("uge", ICmpInst::ICMP_UGE)
703       .Case("ult", ICmpInst::ICMP_ULT)
704       .Case("ule", ICmpInst::ICMP_ULE)
705       .Case("sgt", ICmpInst::ICMP_SGT)
706       .Case("sge", ICmpInst::ICMP_SGE)
707       .Case("slt", ICmpInst::ICMP_SLT)
708       .Case("sle", ICmpInst::ICMP_SLE)
709       .Default(ICmpInst::BAD_ICMP_PREDICATE);
710 }
711 
712 CmpInst::Predicate VPCmpIntrinsic::getPredicate() const {
713   bool IsFP = true;
714   std::optional<unsigned> CCArgIdx;
715   switch (getIntrinsicID()) {
716   default:
717     break;
718 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
719 #define VP_PROPERTY_CMP(CCPOS, ISFP)                                           \
720   CCArgIdx = CCPOS;                                                            \
721   IsFP = ISFP;                                                                 \
722   break;
723 #define END_REGISTER_VP_INTRINSIC(VPID) break;
724 #include "llvm/IR/VPIntrinsics.def"
725   }
726   assert(CCArgIdx && "Unexpected vector-predicated comparison");
727   return IsFP ? getFPPredicateFromMD(getArgOperand(*CCArgIdx))
728               : getIntPredicateFromMD(getArgOperand(*CCArgIdx));
729 }
730 
731 unsigned VPReductionIntrinsic::getVectorParamPos() const {
732   return *VPReductionIntrinsic::getVectorParamPos(getIntrinsicID());
733 }
734 
735 unsigned VPReductionIntrinsic::getStartParamPos() const {
736   return *VPReductionIntrinsic::getStartParamPos(getIntrinsicID());
737 }
738 
739 std::optional<unsigned>
740 VPReductionIntrinsic::getVectorParamPos(Intrinsic::ID ID) {
741   switch (ID) {
742 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
743 #define VP_PROPERTY_REDUCTION(STARTPOS, VECTORPOS) return VECTORPOS;
744 #define END_REGISTER_VP_INTRINSIC(VPID) break;
745 #include "llvm/IR/VPIntrinsics.def"
746   default:
747     break;
748   }
749   return std::nullopt;
750 }
751 
752 std::optional<unsigned>
753 VPReductionIntrinsic::getStartParamPos(Intrinsic::ID ID) {
754   switch (ID) {
755 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
756 #define VP_PROPERTY_REDUCTION(STARTPOS, VECTORPOS) return STARTPOS;
757 #define END_REGISTER_VP_INTRINSIC(VPID) break;
758 #include "llvm/IR/VPIntrinsics.def"
759   default:
760     break;
761   }
762   return std::nullopt;
763 }
764 
765 Instruction::BinaryOps BinaryOpIntrinsic::getBinaryOp() const {
766   switch (getIntrinsicID()) {
767   case Intrinsic::uadd_with_overflow:
768   case Intrinsic::sadd_with_overflow:
769   case Intrinsic::uadd_sat:
770   case Intrinsic::sadd_sat:
771     return Instruction::Add;
772   case Intrinsic::usub_with_overflow:
773   case Intrinsic::ssub_with_overflow:
774   case Intrinsic::usub_sat:
775   case Intrinsic::ssub_sat:
776     return Instruction::Sub;
777   case Intrinsic::umul_with_overflow:
778   case Intrinsic::smul_with_overflow:
779     return Instruction::Mul;
780   default:
781     llvm_unreachable("Invalid intrinsic");
782   }
783 }
784 
785 bool BinaryOpIntrinsic::isSigned() const {
786   switch (getIntrinsicID()) {
787   case Intrinsic::sadd_with_overflow:
788   case Intrinsic::ssub_with_overflow:
789   case Intrinsic::smul_with_overflow:
790   case Intrinsic::sadd_sat:
791   case Intrinsic::ssub_sat:
792     return true;
793   default:
794     return false;
795   }
796 }
797 
798 unsigned BinaryOpIntrinsic::getNoWrapKind() const {
799   if (isSigned())
800     return OverflowingBinaryOperator::NoSignedWrap;
801   else
802     return OverflowingBinaryOperator::NoUnsignedWrap;
803 }
804 
805 const Value *GCProjectionInst::getStatepoint() const {
806   const Value *Token = getArgOperand(0);
807   if (isa<UndefValue>(Token))
808     return Token;
809 
810   // This takes care both of relocates for call statepoints and relocates
811   // on normal path of invoke statepoint.
812   if (!isa<LandingPadInst>(Token))
813     return cast<GCStatepointInst>(Token);
814 
815   // This relocate is on exceptional path of an invoke statepoint
816   const BasicBlock *InvokeBB =
817     cast<Instruction>(Token)->getParent()->getUniquePredecessor();
818 
819   assert(InvokeBB && "safepoints should have unique landingpads");
820   assert(InvokeBB->getTerminator() &&
821          "safepoint block should be well formed");
822 
823   return cast<GCStatepointInst>(InvokeBB->getTerminator());
824 }
825 
826 Value *GCRelocateInst::getBasePtr() const {
827   auto Statepoint = getStatepoint();
828   if (isa<UndefValue>(Statepoint))
829     return UndefValue::get(Statepoint->getType());
830 
831   auto *GCInst = cast<GCStatepointInst>(Statepoint);
832   if (auto Opt = GCInst->getOperandBundle(LLVMContext::OB_gc_live))
833     return *(Opt->Inputs.begin() + getBasePtrIndex());
834   return *(GCInst->arg_begin() + getBasePtrIndex());
835 }
836 
837 Value *GCRelocateInst::getDerivedPtr() const {
838   auto *Statepoint = getStatepoint();
839   if (isa<UndefValue>(Statepoint))
840     return UndefValue::get(Statepoint->getType());
841 
842   auto *GCInst = cast<GCStatepointInst>(Statepoint);
843   if (auto Opt = GCInst->getOperandBundle(LLVMContext::OB_gc_live))
844     return *(Opt->Inputs.begin() + getDerivedPtrIndex());
845   return *(GCInst->arg_begin() + getDerivedPtrIndex());
846 }
847