xref: /freebsd/contrib/llvm-project/llvm/lib/IR/IntrinsicInst.cpp (revision 5f4c09dd85bff675e0ca63c55ea3c517e0fddfcc)
1 //===-- IntrinsicInst.cpp - Intrinsic Instruction Wrappers ---------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file implements methods that make it really easy to deal with intrinsic
10 // functions.
11 //
12 // All intrinsic function calls are instances of the call instruction, so these
13 // are all subclasses of the CallInst class.  Note that none of these classes
14 // has state or virtual methods, which is an important part of this gross/neat
15 // hack working.
16 //
17 // In some cases, arguments to intrinsics need to be generic and are defined as
18 // type pointer to empty struct { }*.  To access the real item of interest the
19 // cast instruction needs to be stripped away.
20 //
21 //===----------------------------------------------------------------------===//
22 
23 #include "llvm/IR/IntrinsicInst.h"
24 #include "llvm/ADT/StringSwitch.h"
25 #include "llvm/IR/Constants.h"
26 #include "llvm/IR/DebugInfoMetadata.h"
27 #include "llvm/IR/Metadata.h"
28 #include "llvm/IR/Module.h"
29 #include "llvm/IR/Operator.h"
30 #include "llvm/IR/PatternMatch.h"
31 #include "llvm/IR/Statepoint.h"
32 #include <optional>
33 
34 using namespace llvm;
35 
36 bool IntrinsicInst::mayLowerToFunctionCall(Intrinsic::ID IID) {
37   switch (IID) {
38   case Intrinsic::objc_autorelease:
39   case Intrinsic::objc_autoreleasePoolPop:
40   case Intrinsic::objc_autoreleasePoolPush:
41   case Intrinsic::objc_autoreleaseReturnValue:
42   case Intrinsic::objc_copyWeak:
43   case Intrinsic::objc_destroyWeak:
44   case Intrinsic::objc_initWeak:
45   case Intrinsic::objc_loadWeak:
46   case Intrinsic::objc_loadWeakRetained:
47   case Intrinsic::objc_moveWeak:
48   case Intrinsic::objc_release:
49   case Intrinsic::objc_retain:
50   case Intrinsic::objc_retainAutorelease:
51   case Intrinsic::objc_retainAutoreleaseReturnValue:
52   case Intrinsic::objc_retainAutoreleasedReturnValue:
53   case Intrinsic::objc_retainBlock:
54   case Intrinsic::objc_storeStrong:
55   case Intrinsic::objc_storeWeak:
56   case Intrinsic::objc_unsafeClaimAutoreleasedReturnValue:
57   case Intrinsic::objc_retainedObject:
58   case Intrinsic::objc_unretainedObject:
59   case Intrinsic::objc_unretainedPointer:
60   case Intrinsic::objc_retain_autorelease:
61   case Intrinsic::objc_sync_enter:
62   case Intrinsic::objc_sync_exit:
63     return true;
64   default:
65     return false;
66   }
67 }
68 
69 //===----------------------------------------------------------------------===//
70 /// DbgVariableIntrinsic - This is the common base class for debug info
71 /// intrinsics for variables.
72 ///
73 
74 iterator_range<DbgVariableIntrinsic::location_op_iterator>
75 DbgVariableIntrinsic::location_ops() const {
76   auto *MD = getRawLocation();
77   assert(MD && "First operand of DbgVariableIntrinsic should be non-null.");
78 
79   // If operand is ValueAsMetadata, return a range over just that operand.
80   if (auto *VAM = dyn_cast<ValueAsMetadata>(MD)) {
81     return {location_op_iterator(VAM), location_op_iterator(VAM + 1)};
82   }
83   // If operand is DIArgList, return a range over its args.
84   if (auto *AL = dyn_cast<DIArgList>(MD))
85     return {location_op_iterator(AL->args_begin()),
86             location_op_iterator(AL->args_end())};
87   // Operand must be an empty metadata tuple, so return empty iterator.
88   return {location_op_iterator(static_cast<ValueAsMetadata *>(nullptr)),
89           location_op_iterator(static_cast<ValueAsMetadata *>(nullptr))};
90 }
91 
92 Value *DbgVariableIntrinsic::getVariableLocationOp(unsigned OpIdx) const {
93   auto *MD = getRawLocation();
94   assert(MD && "First operand of DbgVariableIntrinsic should be non-null.");
95   if (auto *AL = dyn_cast<DIArgList>(MD))
96     return AL->getArgs()[OpIdx]->getValue();
97   if (isa<MDNode>(MD))
98     return nullptr;
99   assert(
100       isa<ValueAsMetadata>(MD) &&
101       "Attempted to get location operand from DbgVariableIntrinsic with none.");
102   auto *V = cast<ValueAsMetadata>(MD);
103   assert(OpIdx == 0 && "Operand Index must be 0 for a debug intrinsic with a "
104                        "single location operand.");
105   return V->getValue();
106 }
107 
108 static ValueAsMetadata *getAsMetadata(Value *V) {
109   return isa<MetadataAsValue>(V) ? dyn_cast<ValueAsMetadata>(
110                                        cast<MetadataAsValue>(V)->getMetadata())
111                                  : ValueAsMetadata::get(V);
112 }
113 
114 void DbgVariableIntrinsic::replaceVariableLocationOp(Value *OldValue,
115                                                      Value *NewValue) {
116   // If OldValue is used as the address part of a dbg.assign intrinsic replace
117   // it with NewValue and return true.
118   auto ReplaceDbgAssignAddress = [this, OldValue, NewValue]() -> bool {
119     auto *DAI = dyn_cast<DbgAssignIntrinsic>(this);
120     if (!DAI || OldValue != DAI->getAddress())
121       return false;
122     DAI->setAddress(NewValue);
123     return true;
124   };
125   bool DbgAssignAddrReplaced = ReplaceDbgAssignAddress();
126   (void)DbgAssignAddrReplaced;
127 
128   assert(NewValue && "Values must be non-null");
129   auto Locations = location_ops();
130   auto OldIt = find(Locations, OldValue);
131   assert((OldIt != Locations.end() || DbgAssignAddrReplaced) &&
132          "OldValue must be a current location");
133   if (!hasArgList()) {
134     // Additional check necessary to avoid unconditionally replacing this
135     // operand when a dbg.assign address is replaced (DbgAssignAddrReplaced is
136     // true).
137     if (OldValue != getVariableLocationOp(0))
138       return;
139     Value *NewOperand = isa<MetadataAsValue>(NewValue)
140                             ? NewValue
141                             : MetadataAsValue::get(
142                                   getContext(), ValueAsMetadata::get(NewValue));
143     return setArgOperand(0, NewOperand);
144   }
145   SmallVector<ValueAsMetadata *, 4> MDs;
146   ValueAsMetadata *NewOperand = getAsMetadata(NewValue);
147   for (auto *VMD : Locations)
148     MDs.push_back(VMD == *OldIt ? NewOperand : getAsMetadata(VMD));
149   setArgOperand(
150       0, MetadataAsValue::get(getContext(), DIArgList::get(getContext(), MDs)));
151 }
152 void DbgVariableIntrinsic::replaceVariableLocationOp(unsigned OpIdx,
153                                                      Value *NewValue) {
154   assert(OpIdx < getNumVariableLocationOps() && "Invalid Operand Index");
155   if (!hasArgList()) {
156     Value *NewOperand = isa<MetadataAsValue>(NewValue)
157                             ? NewValue
158                             : MetadataAsValue::get(
159                                   getContext(), ValueAsMetadata::get(NewValue));
160     return setArgOperand(0, NewOperand);
161   }
162   SmallVector<ValueAsMetadata *, 4> MDs;
163   ValueAsMetadata *NewOperand = getAsMetadata(NewValue);
164   for (unsigned Idx = 0; Idx < getNumVariableLocationOps(); ++Idx)
165     MDs.push_back(Idx == OpIdx ? NewOperand
166                                : getAsMetadata(getVariableLocationOp(Idx)));
167   setArgOperand(
168       0, MetadataAsValue::get(getContext(), DIArgList::get(getContext(), MDs)));
169 }
170 
171 void DbgVariableIntrinsic::addVariableLocationOps(ArrayRef<Value *> NewValues,
172                                                   DIExpression *NewExpr) {
173   assert(NewExpr->hasAllLocationOps(getNumVariableLocationOps() +
174                                     NewValues.size()) &&
175          "NewExpr for debug variable intrinsic does not reference every "
176          "location operand.");
177   assert(!is_contained(NewValues, nullptr) && "New values must be non-null");
178   setArgOperand(2, MetadataAsValue::get(getContext(), NewExpr));
179   SmallVector<ValueAsMetadata *, 4> MDs;
180   for (auto *VMD : location_ops())
181     MDs.push_back(getAsMetadata(VMD));
182   for (auto *VMD : NewValues)
183     MDs.push_back(getAsMetadata(VMD));
184   setArgOperand(
185       0, MetadataAsValue::get(getContext(), DIArgList::get(getContext(), MDs)));
186 }
187 
188 std::optional<uint64_t> DbgVariableIntrinsic::getFragmentSizeInBits() const {
189   if (auto Fragment = getExpression()->getFragmentInfo())
190     return Fragment->SizeInBits;
191   return getVariable()->getSizeInBits();
192 }
193 
194 Value *DbgAssignIntrinsic::getAddress() const {
195   auto *MD = getRawAddress();
196   if (auto *V = dyn_cast<ValueAsMetadata>(MD))
197     return V->getValue();
198 
199   // When the value goes to null, it gets replaced by an empty MDNode.
200   assert(!cast<MDNode>(MD)->getNumOperands() && "Expected an empty MDNode");
201   return nullptr;
202 }
203 
204 void DbgAssignIntrinsic::setAssignId(DIAssignID *New) {
205   setOperand(OpAssignID, MetadataAsValue::get(getContext(), New));
206 }
207 
208 void DbgAssignIntrinsic::setAddress(Value *V) {
209   assert(V->getType()->isPointerTy() &&
210          "Destination Component must be a pointer type");
211   setOperand(OpAddress,
212              MetadataAsValue::get(getContext(), ValueAsMetadata::get(V)));
213 }
214 
215 void DbgAssignIntrinsic::setKillAddress() {
216   if (isKillAddress())
217     return;
218   setAddress(UndefValue::get(getAddress()->getType()));
219 }
220 
221 bool DbgAssignIntrinsic::isKillAddress() const {
222   Value *Addr = getAddress();
223   return !Addr || isa<UndefValue>(Addr);
224 }
225 
226 void DbgAssignIntrinsic::setValue(Value *V) {
227   setOperand(OpValue,
228              MetadataAsValue::get(getContext(), ValueAsMetadata::get(V)));
229 }
230 
231 int llvm::Intrinsic::lookupLLVMIntrinsicByName(ArrayRef<const char *> NameTable,
232                                                StringRef Name) {
233   assert(Name.startswith("llvm."));
234 
235   // Do successive binary searches of the dotted name components. For
236   // "llvm.gc.experimental.statepoint.p1i8.p1i32", we will find the range of
237   // intrinsics starting with "llvm.gc", then "llvm.gc.experimental", then
238   // "llvm.gc.experimental.statepoint", and then we will stop as the range is
239   // size 1. During the search, we can skip the prefix that we already know is
240   // identical. By using strncmp we consider names with differing suffixes to
241   // be part of the equal range.
242   size_t CmpEnd = 4; // Skip the "llvm" component.
243   const char *const *Low = NameTable.begin();
244   const char *const *High = NameTable.end();
245   const char *const *LastLow = Low;
246   while (CmpEnd < Name.size() && High - Low > 0) {
247     size_t CmpStart = CmpEnd;
248     CmpEnd = Name.find('.', CmpStart + 1);
249     CmpEnd = CmpEnd == StringRef::npos ? Name.size() : CmpEnd;
250     auto Cmp = [CmpStart, CmpEnd](const char *LHS, const char *RHS) {
251       return strncmp(LHS + CmpStart, RHS + CmpStart, CmpEnd - CmpStart) < 0;
252     };
253     LastLow = Low;
254     std::tie(Low, High) = std::equal_range(Low, High, Name.data(), Cmp);
255   }
256   if (High - Low > 0)
257     LastLow = Low;
258 
259   if (LastLow == NameTable.end())
260     return -1;
261   StringRef NameFound = *LastLow;
262   if (Name == NameFound ||
263       (Name.startswith(NameFound) && Name[NameFound.size()] == '.'))
264     return LastLow - NameTable.begin();
265   return -1;
266 }
267 
268 ConstantInt *InstrProfInstBase::getNumCounters() const {
269   if (InstrProfValueProfileInst::classof(this))
270     llvm_unreachable("InstrProfValueProfileInst does not have counters!");
271   return cast<ConstantInt>(const_cast<Value *>(getArgOperand(2)));
272 }
273 
274 ConstantInt *InstrProfInstBase::getIndex() const {
275   if (InstrProfValueProfileInst::classof(this))
276     llvm_unreachable("Please use InstrProfValueProfileInst::getIndex()");
277   return cast<ConstantInt>(const_cast<Value *>(getArgOperand(3)));
278 }
279 
280 Value *InstrProfIncrementInst::getStep() const {
281   if (InstrProfIncrementInstStep::classof(this)) {
282     return const_cast<Value *>(getArgOperand(4));
283   }
284   const Module *M = getModule();
285   LLVMContext &Context = M->getContext();
286   return ConstantInt::get(Type::getInt64Ty(Context), 1);
287 }
288 
289 std::optional<RoundingMode> ConstrainedFPIntrinsic::getRoundingMode() const {
290   unsigned NumOperands = arg_size();
291   Metadata *MD = nullptr;
292   auto *MAV = dyn_cast<MetadataAsValue>(getArgOperand(NumOperands - 2));
293   if (MAV)
294     MD = MAV->getMetadata();
295   if (!MD || !isa<MDString>(MD))
296     return std::nullopt;
297   return convertStrToRoundingMode(cast<MDString>(MD)->getString());
298 }
299 
300 std::optional<fp::ExceptionBehavior>
301 ConstrainedFPIntrinsic::getExceptionBehavior() const {
302   unsigned NumOperands = arg_size();
303   Metadata *MD = nullptr;
304   auto *MAV = dyn_cast<MetadataAsValue>(getArgOperand(NumOperands - 1));
305   if (MAV)
306     MD = MAV->getMetadata();
307   if (!MD || !isa<MDString>(MD))
308     return std::nullopt;
309   return convertStrToExceptionBehavior(cast<MDString>(MD)->getString());
310 }
311 
312 bool ConstrainedFPIntrinsic::isDefaultFPEnvironment() const {
313   std::optional<fp::ExceptionBehavior> Except = getExceptionBehavior();
314   if (Except) {
315     if (*Except != fp::ebIgnore)
316       return false;
317   }
318 
319   std::optional<RoundingMode> Rounding = getRoundingMode();
320   if (Rounding) {
321     if (*Rounding != RoundingMode::NearestTiesToEven)
322       return false;
323   }
324 
325   return true;
326 }
327 
328 static FCmpInst::Predicate getFPPredicateFromMD(const Value *Op) {
329   Metadata *MD = cast<MetadataAsValue>(Op)->getMetadata();
330   if (!MD || !isa<MDString>(MD))
331     return FCmpInst::BAD_FCMP_PREDICATE;
332   return StringSwitch<FCmpInst::Predicate>(cast<MDString>(MD)->getString())
333       .Case("oeq", FCmpInst::FCMP_OEQ)
334       .Case("ogt", FCmpInst::FCMP_OGT)
335       .Case("oge", FCmpInst::FCMP_OGE)
336       .Case("olt", FCmpInst::FCMP_OLT)
337       .Case("ole", FCmpInst::FCMP_OLE)
338       .Case("one", FCmpInst::FCMP_ONE)
339       .Case("ord", FCmpInst::FCMP_ORD)
340       .Case("uno", FCmpInst::FCMP_UNO)
341       .Case("ueq", FCmpInst::FCMP_UEQ)
342       .Case("ugt", FCmpInst::FCMP_UGT)
343       .Case("uge", FCmpInst::FCMP_UGE)
344       .Case("ult", FCmpInst::FCMP_ULT)
345       .Case("ule", FCmpInst::FCMP_ULE)
346       .Case("une", FCmpInst::FCMP_UNE)
347       .Default(FCmpInst::BAD_FCMP_PREDICATE);
348 }
349 
350 FCmpInst::Predicate ConstrainedFPCmpIntrinsic::getPredicate() const {
351   return getFPPredicateFromMD(getArgOperand(2));
352 }
353 
354 bool ConstrainedFPIntrinsic::isUnaryOp() const {
355   switch (getIntrinsicID()) {
356   default:
357     return false;
358 #define INSTRUCTION(NAME, NARG, ROUND_MODE, INTRINSIC)                         \
359   case Intrinsic::INTRINSIC:                                                   \
360     return NARG == 1;
361 #include "llvm/IR/ConstrainedOps.def"
362   }
363 }
364 
365 bool ConstrainedFPIntrinsic::isTernaryOp() const {
366   switch (getIntrinsicID()) {
367   default:
368     return false;
369 #define INSTRUCTION(NAME, NARG, ROUND_MODE, INTRINSIC)                         \
370   case Intrinsic::INTRINSIC:                                                   \
371     return NARG == 3;
372 #include "llvm/IR/ConstrainedOps.def"
373   }
374 }
375 
376 bool ConstrainedFPIntrinsic::classof(const IntrinsicInst *I) {
377   switch (I->getIntrinsicID()) {
378 #define INSTRUCTION(NAME, NARGS, ROUND_MODE, INTRINSIC)                        \
379   case Intrinsic::INTRINSIC:
380 #include "llvm/IR/ConstrainedOps.def"
381     return true;
382   default:
383     return false;
384   }
385 }
386 
387 ElementCount VPIntrinsic::getStaticVectorLength() const {
388   auto GetVectorLengthOfType = [](const Type *T) -> ElementCount {
389     const auto *VT = cast<VectorType>(T);
390     auto ElemCount = VT->getElementCount();
391     return ElemCount;
392   };
393 
394   Value *VPMask = getMaskParam();
395   if (!VPMask) {
396     assert((getIntrinsicID() == Intrinsic::vp_merge ||
397             getIntrinsicID() == Intrinsic::vp_select) &&
398            "Unexpected VP intrinsic without mask operand");
399     return GetVectorLengthOfType(getType());
400   }
401   return GetVectorLengthOfType(VPMask->getType());
402 }
403 
404 Value *VPIntrinsic::getMaskParam() const {
405   if (auto MaskPos = getMaskParamPos(getIntrinsicID()))
406     return getArgOperand(*MaskPos);
407   return nullptr;
408 }
409 
410 void VPIntrinsic::setMaskParam(Value *NewMask) {
411   auto MaskPos = getMaskParamPos(getIntrinsicID());
412   setArgOperand(*MaskPos, NewMask);
413 }
414 
415 Value *VPIntrinsic::getVectorLengthParam() const {
416   if (auto EVLPos = getVectorLengthParamPos(getIntrinsicID()))
417     return getArgOperand(*EVLPos);
418   return nullptr;
419 }
420 
421 void VPIntrinsic::setVectorLengthParam(Value *NewEVL) {
422   auto EVLPos = getVectorLengthParamPos(getIntrinsicID());
423   setArgOperand(*EVLPos, NewEVL);
424 }
425 
426 std::optional<unsigned>
427 VPIntrinsic::getMaskParamPos(Intrinsic::ID IntrinsicID) {
428   switch (IntrinsicID) {
429   default:
430     return std::nullopt;
431 
432 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS)                    \
433   case Intrinsic::VPID:                                                        \
434     return MASKPOS;
435 #include "llvm/IR/VPIntrinsics.def"
436   }
437 }
438 
439 std::optional<unsigned>
440 VPIntrinsic::getVectorLengthParamPos(Intrinsic::ID IntrinsicID) {
441   switch (IntrinsicID) {
442   default:
443     return std::nullopt;
444 
445 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS)                    \
446   case Intrinsic::VPID:                                                        \
447     return VLENPOS;
448 #include "llvm/IR/VPIntrinsics.def"
449   }
450 }
451 
452 /// \return the alignment of the pointer used by this load/store/gather or
453 /// scatter.
454 MaybeAlign VPIntrinsic::getPointerAlignment() const {
455   std::optional<unsigned> PtrParamOpt =
456       getMemoryPointerParamPos(getIntrinsicID());
457   assert(PtrParamOpt && "no pointer argument!");
458   return getParamAlign(*PtrParamOpt);
459 }
460 
461 /// \return The pointer operand of this load,store, gather or scatter.
462 Value *VPIntrinsic::getMemoryPointerParam() const {
463   if (auto PtrParamOpt = getMemoryPointerParamPos(getIntrinsicID()))
464     return getArgOperand(*PtrParamOpt);
465   return nullptr;
466 }
467 
468 std::optional<unsigned>
469 VPIntrinsic::getMemoryPointerParamPos(Intrinsic::ID VPID) {
470   switch (VPID) {
471   default:
472     break;
473 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
474 #define VP_PROPERTY_MEMOP(POINTERPOS, ...) return POINTERPOS;
475 #define END_REGISTER_VP_INTRINSIC(VPID) break;
476 #include "llvm/IR/VPIntrinsics.def"
477   }
478   return std::nullopt;
479 }
480 
481 /// \return The data (payload) operand of this store or scatter.
482 Value *VPIntrinsic::getMemoryDataParam() const {
483   auto DataParamOpt = getMemoryDataParamPos(getIntrinsicID());
484   if (!DataParamOpt)
485     return nullptr;
486   return getArgOperand(*DataParamOpt);
487 }
488 
489 std::optional<unsigned> VPIntrinsic::getMemoryDataParamPos(Intrinsic::ID VPID) {
490   switch (VPID) {
491   default:
492     break;
493 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
494 #define VP_PROPERTY_MEMOP(POINTERPOS, DATAPOS) return DATAPOS;
495 #define END_REGISTER_VP_INTRINSIC(VPID) break;
496 #include "llvm/IR/VPIntrinsics.def"
497   }
498   return std::nullopt;
499 }
500 
501 bool VPIntrinsic::isVPIntrinsic(Intrinsic::ID ID) {
502   switch (ID) {
503   default:
504     break;
505 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS)                    \
506   case Intrinsic::VPID:                                                        \
507     return true;
508 #include "llvm/IR/VPIntrinsics.def"
509   }
510   return false;
511 }
512 
513 // Equivalent non-predicated opcode
514 std::optional<unsigned>
515 VPIntrinsic::getFunctionalOpcodeForVP(Intrinsic::ID ID) {
516   switch (ID) {
517   default:
518     break;
519 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
520 #define VP_PROPERTY_FUNCTIONAL_OPC(OPC) return Instruction::OPC;
521 #define END_REGISTER_VP_INTRINSIC(VPID) break;
522 #include "llvm/IR/VPIntrinsics.def"
523   }
524   return std::nullopt;
525 }
526 
527 Intrinsic::ID VPIntrinsic::getForOpcode(unsigned IROPC) {
528   switch (IROPC) {
529   default:
530     break;
531 
532 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) break;
533 #define VP_PROPERTY_FUNCTIONAL_OPC(OPC) case Instruction::OPC:
534 #define END_REGISTER_VP_INTRINSIC(VPID) return Intrinsic::VPID;
535 #include "llvm/IR/VPIntrinsics.def"
536   }
537   return Intrinsic::not_intrinsic;
538 }
539 
540 bool VPIntrinsic::canIgnoreVectorLengthParam() const {
541   using namespace PatternMatch;
542 
543   ElementCount EC = getStaticVectorLength();
544 
545   // No vlen param - no lanes masked-off by it.
546   auto *VLParam = getVectorLengthParam();
547   if (!VLParam)
548     return true;
549 
550   // Note that the VP intrinsic causes undefined behavior if the Explicit Vector
551   // Length parameter is strictly greater-than the number of vector elements of
552   // the operation. This function returns true when this is detected statically
553   // in the IR.
554 
555   // Check whether "W == vscale * EC.getKnownMinValue()"
556   if (EC.isScalable()) {
557     // Undig the DL
558     const auto *ParMod = this->getModule();
559     if (!ParMod)
560       return false;
561     const auto &DL = ParMod->getDataLayout();
562 
563     // Compare vscale patterns
564     uint64_t VScaleFactor;
565     if (match(VLParam, m_c_Mul(m_ConstantInt(VScaleFactor), m_VScale(DL))))
566       return VScaleFactor >= EC.getKnownMinValue();
567     return (EC.getKnownMinValue() == 1) && match(VLParam, m_VScale(DL));
568   }
569 
570   // standard SIMD operation
571   const auto *VLConst = dyn_cast<ConstantInt>(VLParam);
572   if (!VLConst)
573     return false;
574 
575   uint64_t VLNum = VLConst->getZExtValue();
576   if (VLNum >= EC.getKnownMinValue())
577     return true;
578 
579   return false;
580 }
581 
582 Function *VPIntrinsic::getDeclarationForParams(Module *M, Intrinsic::ID VPID,
583                                                Type *ReturnType,
584                                                ArrayRef<Value *> Params) {
585   assert(isVPIntrinsic(VPID) && "not a VP intrinsic");
586   Function *VPFunc;
587   switch (VPID) {
588   default: {
589     Type *OverloadTy = Params[0]->getType();
590     if (VPReductionIntrinsic::isVPReduction(VPID))
591       OverloadTy =
592           Params[*VPReductionIntrinsic::getVectorParamPos(VPID)]->getType();
593 
594     VPFunc = Intrinsic::getDeclaration(M, VPID, OverloadTy);
595     break;
596   }
597   case Intrinsic::vp_trunc:
598   case Intrinsic::vp_sext:
599   case Intrinsic::vp_zext:
600   case Intrinsic::vp_fptoui:
601   case Intrinsic::vp_fptosi:
602   case Intrinsic::vp_uitofp:
603   case Intrinsic::vp_sitofp:
604   case Intrinsic::vp_fptrunc:
605   case Intrinsic::vp_fpext:
606   case Intrinsic::vp_ptrtoint:
607   case Intrinsic::vp_inttoptr:
608     VPFunc =
609         Intrinsic::getDeclaration(M, VPID, {ReturnType, Params[0]->getType()});
610     break;
611   case Intrinsic::vp_merge:
612   case Intrinsic::vp_select:
613     VPFunc = Intrinsic::getDeclaration(M, VPID, {Params[1]->getType()});
614     break;
615   case Intrinsic::vp_load:
616     VPFunc = Intrinsic::getDeclaration(
617         M, VPID, {ReturnType, Params[0]->getType()});
618     break;
619   case Intrinsic::experimental_vp_strided_load:
620     VPFunc = Intrinsic::getDeclaration(
621         M, VPID, {ReturnType, Params[0]->getType(), Params[1]->getType()});
622     break;
623   case Intrinsic::vp_gather:
624     VPFunc = Intrinsic::getDeclaration(
625         M, VPID, {ReturnType, Params[0]->getType()});
626     break;
627   case Intrinsic::vp_store:
628     VPFunc = Intrinsic::getDeclaration(
629         M, VPID, {Params[0]->getType(), Params[1]->getType()});
630     break;
631   case Intrinsic::experimental_vp_strided_store:
632     VPFunc = Intrinsic::getDeclaration(
633         M, VPID,
634         {Params[0]->getType(), Params[1]->getType(), Params[2]->getType()});
635     break;
636   case Intrinsic::vp_scatter:
637     VPFunc = Intrinsic::getDeclaration(
638         M, VPID, {Params[0]->getType(), Params[1]->getType()});
639     break;
640   }
641   assert(VPFunc && "Could not declare VP intrinsic");
642   return VPFunc;
643 }
644 
645 bool VPReductionIntrinsic::isVPReduction(Intrinsic::ID ID) {
646   switch (ID) {
647   default:
648     break;
649 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
650 #define VP_PROPERTY_REDUCTION(STARTPOS, ...) return true;
651 #define END_REGISTER_VP_INTRINSIC(VPID) break;
652 #include "llvm/IR/VPIntrinsics.def"
653   }
654   return false;
655 }
656 
657 bool VPCastIntrinsic::isVPCast(Intrinsic::ID ID) {
658   switch (ID) {
659   default:
660     break;
661 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
662 #define VP_PROPERTY_CASTOP return true;
663 #define END_REGISTER_VP_INTRINSIC(VPID) break;
664 #include "llvm/IR/VPIntrinsics.def"
665   }
666   return false;
667 }
668 
669 bool VPCmpIntrinsic::isVPCmp(Intrinsic::ID ID) {
670   switch (ID) {
671   default:
672     break;
673 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
674 #define VP_PROPERTY_CMP(CCPOS, ...) return true;
675 #define END_REGISTER_VP_INTRINSIC(VPID) break;
676 #include "llvm/IR/VPIntrinsics.def"
677   }
678   return false;
679 }
680 
681 static ICmpInst::Predicate getIntPredicateFromMD(const Value *Op) {
682   Metadata *MD = cast<MetadataAsValue>(Op)->getMetadata();
683   if (!MD || !isa<MDString>(MD))
684     return ICmpInst::BAD_ICMP_PREDICATE;
685   return StringSwitch<ICmpInst::Predicate>(cast<MDString>(MD)->getString())
686       .Case("eq", ICmpInst::ICMP_EQ)
687       .Case("ne", ICmpInst::ICMP_NE)
688       .Case("ugt", ICmpInst::ICMP_UGT)
689       .Case("uge", ICmpInst::ICMP_UGE)
690       .Case("ult", ICmpInst::ICMP_ULT)
691       .Case("ule", ICmpInst::ICMP_ULE)
692       .Case("sgt", ICmpInst::ICMP_SGT)
693       .Case("sge", ICmpInst::ICMP_SGE)
694       .Case("slt", ICmpInst::ICMP_SLT)
695       .Case("sle", ICmpInst::ICMP_SLE)
696       .Default(ICmpInst::BAD_ICMP_PREDICATE);
697 }
698 
699 CmpInst::Predicate VPCmpIntrinsic::getPredicate() const {
700   bool IsFP = true;
701   std::optional<unsigned> CCArgIdx;
702   switch (getIntrinsicID()) {
703   default:
704     break;
705 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
706 #define VP_PROPERTY_CMP(CCPOS, ISFP)                                           \
707   CCArgIdx = CCPOS;                                                            \
708   IsFP = ISFP;                                                                 \
709   break;
710 #define END_REGISTER_VP_INTRINSIC(VPID) break;
711 #include "llvm/IR/VPIntrinsics.def"
712   }
713   assert(CCArgIdx && "Unexpected vector-predicated comparison");
714   return IsFP ? getFPPredicateFromMD(getArgOperand(*CCArgIdx))
715               : getIntPredicateFromMD(getArgOperand(*CCArgIdx));
716 }
717 
718 unsigned VPReductionIntrinsic::getVectorParamPos() const {
719   return *VPReductionIntrinsic::getVectorParamPos(getIntrinsicID());
720 }
721 
722 unsigned VPReductionIntrinsic::getStartParamPos() const {
723   return *VPReductionIntrinsic::getStartParamPos(getIntrinsicID());
724 }
725 
726 std::optional<unsigned>
727 VPReductionIntrinsic::getVectorParamPos(Intrinsic::ID ID) {
728   switch (ID) {
729 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
730 #define VP_PROPERTY_REDUCTION(STARTPOS, VECTORPOS) return VECTORPOS;
731 #define END_REGISTER_VP_INTRINSIC(VPID) break;
732 #include "llvm/IR/VPIntrinsics.def"
733   default:
734     break;
735   }
736   return std::nullopt;
737 }
738 
739 std::optional<unsigned>
740 VPReductionIntrinsic::getStartParamPos(Intrinsic::ID ID) {
741   switch (ID) {
742 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
743 #define VP_PROPERTY_REDUCTION(STARTPOS, VECTORPOS) return STARTPOS;
744 #define END_REGISTER_VP_INTRINSIC(VPID) break;
745 #include "llvm/IR/VPIntrinsics.def"
746   default:
747     break;
748   }
749   return std::nullopt;
750 }
751 
752 Instruction::BinaryOps BinaryOpIntrinsic::getBinaryOp() const {
753   switch (getIntrinsicID()) {
754   case Intrinsic::uadd_with_overflow:
755   case Intrinsic::sadd_with_overflow:
756   case Intrinsic::uadd_sat:
757   case Intrinsic::sadd_sat:
758     return Instruction::Add;
759   case Intrinsic::usub_with_overflow:
760   case Intrinsic::ssub_with_overflow:
761   case Intrinsic::usub_sat:
762   case Intrinsic::ssub_sat:
763     return Instruction::Sub;
764   case Intrinsic::umul_with_overflow:
765   case Intrinsic::smul_with_overflow:
766     return Instruction::Mul;
767   default:
768     llvm_unreachable("Invalid intrinsic");
769   }
770 }
771 
772 bool BinaryOpIntrinsic::isSigned() const {
773   switch (getIntrinsicID()) {
774   case Intrinsic::sadd_with_overflow:
775   case Intrinsic::ssub_with_overflow:
776   case Intrinsic::smul_with_overflow:
777   case Intrinsic::sadd_sat:
778   case Intrinsic::ssub_sat:
779     return true;
780   default:
781     return false;
782   }
783 }
784 
785 unsigned BinaryOpIntrinsic::getNoWrapKind() const {
786   if (isSigned())
787     return OverflowingBinaryOperator::NoSignedWrap;
788   else
789     return OverflowingBinaryOperator::NoUnsignedWrap;
790 }
791 
792 const Value *GCProjectionInst::getStatepoint() const {
793   const Value *Token = getArgOperand(0);
794   if (isa<UndefValue>(Token))
795     return Token;
796 
797   // This takes care both of relocates for call statepoints and relocates
798   // on normal path of invoke statepoint.
799   if (!isa<LandingPadInst>(Token))
800     return cast<GCStatepointInst>(Token);
801 
802   // This relocate is on exceptional path of an invoke statepoint
803   const BasicBlock *InvokeBB =
804     cast<Instruction>(Token)->getParent()->getUniquePredecessor();
805 
806   assert(InvokeBB && "safepoints should have unique landingpads");
807   assert(InvokeBB->getTerminator() &&
808          "safepoint block should be well formed");
809 
810   return cast<GCStatepointInst>(InvokeBB->getTerminator());
811 }
812 
813 Value *GCRelocateInst::getBasePtr() const {
814   auto Statepoint = getStatepoint();
815   if (isa<UndefValue>(Statepoint))
816     return UndefValue::get(Statepoint->getType());
817 
818   auto *GCInst = cast<GCStatepointInst>(Statepoint);
819   if (auto Opt = GCInst->getOperandBundle(LLVMContext::OB_gc_live))
820     return *(Opt->Inputs.begin() + getBasePtrIndex());
821   return *(GCInst->arg_begin() + getBasePtrIndex());
822 }
823 
824 Value *GCRelocateInst::getDerivedPtr() const {
825   auto *Statepoint = getStatepoint();
826   if (isa<UndefValue>(Statepoint))
827     return UndefValue::get(Statepoint->getType());
828 
829   auto *GCInst = cast<GCStatepointInst>(Statepoint);
830   if (auto Opt = GCInst->getOperandBundle(LLVMContext::OB_gc_live))
831     return *(Opt->Inputs.begin() + getDerivedPtrIndex());
832   return *(GCInst->arg_begin() + getDerivedPtrIndex());
833 }
834