xref: /freebsd/contrib/llvm-project/llvm/lib/IR/IntrinsicInst.cpp (revision e1e636193db45630c7881246d25902e57c43d24e)
1 //===-- IntrinsicInst.cpp - Intrinsic Instruction Wrappers ---------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file implements methods that make it really easy to deal with intrinsic
10 // functions.
11 //
12 // All intrinsic function calls are instances of the call instruction, so these
13 // are all subclasses of the CallInst class.  Note that none of these classes
14 // has state or virtual methods, which is an important part of this gross/neat
15 // hack working.
16 //
17 // In some cases, arguments to intrinsics need to be generic and are defined as
18 // type pointer to empty struct { }*.  To access the real item of interest the
19 // cast instruction needs to be stripped away.
20 //
21 //===----------------------------------------------------------------------===//
22 
23 #include "llvm/IR/IntrinsicInst.h"
24 #include "llvm/ADT/StringSwitch.h"
25 #include "llvm/IR/Constants.h"
26 #include "llvm/IR/DebugInfoMetadata.h"
27 #include "llvm/IR/Metadata.h"
28 #include "llvm/IR/Module.h"
29 #include "llvm/IR/Operator.h"
30 #include "llvm/IR/PatternMatch.h"
31 #include "llvm/IR/Statepoint.h"
32 #include <optional>
33 
34 using namespace llvm;
35 
36 bool IntrinsicInst::mayLowerToFunctionCall(Intrinsic::ID IID) {
37   switch (IID) {
38   case Intrinsic::objc_autorelease:
39   case Intrinsic::objc_autoreleasePoolPop:
40   case Intrinsic::objc_autoreleasePoolPush:
41   case Intrinsic::objc_autoreleaseReturnValue:
42   case Intrinsic::objc_copyWeak:
43   case Intrinsic::objc_destroyWeak:
44   case Intrinsic::objc_initWeak:
45   case Intrinsic::objc_loadWeak:
46   case Intrinsic::objc_loadWeakRetained:
47   case Intrinsic::objc_moveWeak:
48   case Intrinsic::objc_release:
49   case Intrinsic::objc_retain:
50   case Intrinsic::objc_retainAutorelease:
51   case Intrinsic::objc_retainAutoreleaseReturnValue:
52   case Intrinsic::objc_retainAutoreleasedReturnValue:
53   case Intrinsic::objc_retainBlock:
54   case Intrinsic::objc_storeStrong:
55   case Intrinsic::objc_storeWeak:
56   case Intrinsic::objc_unsafeClaimAutoreleasedReturnValue:
57   case Intrinsic::objc_retainedObject:
58   case Intrinsic::objc_unretainedObject:
59   case Intrinsic::objc_unretainedPointer:
60   case Intrinsic::objc_retain_autorelease:
61   case Intrinsic::objc_sync_enter:
62   case Intrinsic::objc_sync_exit:
63     return true;
64   default:
65     return false;
66   }
67 }
68 
69 //===----------------------------------------------------------------------===//
70 /// DbgVariableIntrinsic - This is the common base class for debug info
71 /// intrinsics for variables.
72 ///
73 
74 iterator_range<location_op_iterator> RawLocationWrapper::location_ops() const {
75   Metadata *MD = getRawLocation();
76   assert(MD && "First operand of DbgVariableIntrinsic should be non-null.");
77   // If operand is ValueAsMetadata, return a range over just that operand.
78   if (auto *VAM = dyn_cast<ValueAsMetadata>(MD)) {
79     return {location_op_iterator(VAM), location_op_iterator(VAM + 1)};
80   }
81   // If operand is DIArgList, return a range over its args.
82   if (auto *AL = dyn_cast<DIArgList>(MD))
83     return {location_op_iterator(AL->args_begin()),
84             location_op_iterator(AL->args_end())};
85   // Operand must be an empty metadata tuple, so return empty iterator.
86   return {location_op_iterator(static_cast<ValueAsMetadata *>(nullptr)),
87           location_op_iterator(static_cast<ValueAsMetadata *>(nullptr))};
88 }
89 
90 iterator_range<location_op_iterator>
91 DbgVariableIntrinsic::location_ops() const {
92   return getWrappedLocation().location_ops();
93 }
94 
95 Value *DbgVariableIntrinsic::getVariableLocationOp(unsigned OpIdx) const {
96   return getWrappedLocation().getVariableLocationOp(OpIdx);
97 }
98 
99 Value *RawLocationWrapper::getVariableLocationOp(unsigned OpIdx) const {
100   Metadata *MD = getRawLocation();
101   assert(MD && "First operand of DbgVariableIntrinsic should be non-null.");
102   if (auto *AL = dyn_cast<DIArgList>(MD))
103     return AL->getArgs()[OpIdx]->getValue();
104   if (isa<MDNode>(MD))
105     return nullptr;
106   assert(
107       isa<ValueAsMetadata>(MD) &&
108       "Attempted to get location operand from DbgVariableIntrinsic with none.");
109   auto *V = cast<ValueAsMetadata>(MD);
110   assert(OpIdx == 0 && "Operand Index must be 0 for a debug intrinsic with a "
111                        "single location operand.");
112   return V->getValue();
113 }
114 
115 static ValueAsMetadata *getAsMetadata(Value *V) {
116   return isa<MetadataAsValue>(V) ? dyn_cast<ValueAsMetadata>(
117                                        cast<MetadataAsValue>(V)->getMetadata())
118                                  : ValueAsMetadata::get(V);
119 }
120 
121 void DbgVariableIntrinsic::replaceVariableLocationOp(Value *OldValue,
122                                                      Value *NewValue) {
123   // If OldValue is used as the address part of a dbg.assign intrinsic replace
124   // it with NewValue and return true.
125   auto ReplaceDbgAssignAddress = [this, OldValue, NewValue]() -> bool {
126     auto *DAI = dyn_cast<DbgAssignIntrinsic>(this);
127     if (!DAI || OldValue != DAI->getAddress())
128       return false;
129     DAI->setAddress(NewValue);
130     return true;
131   };
132   bool DbgAssignAddrReplaced = ReplaceDbgAssignAddress();
133   (void)DbgAssignAddrReplaced;
134 
135   assert(NewValue && "Values must be non-null");
136   auto Locations = location_ops();
137   auto OldIt = find(Locations, OldValue);
138   if (OldIt == Locations.end()) {
139     assert(DbgAssignAddrReplaced &&
140            "OldValue must be dbg.assign addr if unused in DIArgList");
141     return;
142   }
143 
144   assert(OldIt != Locations.end() && "OldValue must be a current location");
145   if (!hasArgList()) {
146     Value *NewOperand = isa<MetadataAsValue>(NewValue)
147                             ? NewValue
148                             : MetadataAsValue::get(
149                                   getContext(), ValueAsMetadata::get(NewValue));
150     return setArgOperand(0, NewOperand);
151   }
152   SmallVector<ValueAsMetadata *, 4> MDs;
153   ValueAsMetadata *NewOperand = getAsMetadata(NewValue);
154   for (auto *VMD : Locations)
155     MDs.push_back(VMD == *OldIt ? NewOperand : getAsMetadata(VMD));
156   setArgOperand(
157       0, MetadataAsValue::get(getContext(), DIArgList::get(getContext(), MDs)));
158 }
159 void DbgVariableIntrinsic::replaceVariableLocationOp(unsigned OpIdx,
160                                                      Value *NewValue) {
161   assert(OpIdx < getNumVariableLocationOps() && "Invalid Operand Index");
162   if (!hasArgList()) {
163     Value *NewOperand = isa<MetadataAsValue>(NewValue)
164                             ? NewValue
165                             : MetadataAsValue::get(
166                                   getContext(), ValueAsMetadata::get(NewValue));
167     return setArgOperand(0, NewOperand);
168   }
169   SmallVector<ValueAsMetadata *, 4> MDs;
170   ValueAsMetadata *NewOperand = getAsMetadata(NewValue);
171   for (unsigned Idx = 0; Idx < getNumVariableLocationOps(); ++Idx)
172     MDs.push_back(Idx == OpIdx ? NewOperand
173                                : getAsMetadata(getVariableLocationOp(Idx)));
174   setArgOperand(
175       0, MetadataAsValue::get(getContext(), DIArgList::get(getContext(), MDs)));
176 }
177 
178 void DbgVariableIntrinsic::addVariableLocationOps(ArrayRef<Value *> NewValues,
179                                                   DIExpression *NewExpr) {
180   assert(NewExpr->hasAllLocationOps(getNumVariableLocationOps() +
181                                     NewValues.size()) &&
182          "NewExpr for debug variable intrinsic does not reference every "
183          "location operand.");
184   assert(!is_contained(NewValues, nullptr) && "New values must be non-null");
185   setArgOperand(2, MetadataAsValue::get(getContext(), NewExpr));
186   SmallVector<ValueAsMetadata *, 4> MDs;
187   for (auto *VMD : location_ops())
188     MDs.push_back(getAsMetadata(VMD));
189   for (auto *VMD : NewValues)
190     MDs.push_back(getAsMetadata(VMD));
191   setArgOperand(
192       0, MetadataAsValue::get(getContext(), DIArgList::get(getContext(), MDs)));
193 }
194 
195 std::optional<uint64_t> DbgVariableIntrinsic::getFragmentSizeInBits() const {
196   if (auto Fragment = getExpression()->getFragmentInfo())
197     return Fragment->SizeInBits;
198   return getVariable()->getSizeInBits();
199 }
200 
201 Value *DbgAssignIntrinsic::getAddress() const {
202   auto *MD = getRawAddress();
203   if (auto *V = dyn_cast<ValueAsMetadata>(MD))
204     return V->getValue();
205 
206   // When the value goes to null, it gets replaced by an empty MDNode.
207   assert(!cast<MDNode>(MD)->getNumOperands() && "Expected an empty MDNode");
208   return nullptr;
209 }
210 
211 void DbgAssignIntrinsic::setAssignId(DIAssignID *New) {
212   setOperand(OpAssignID, MetadataAsValue::get(getContext(), New));
213 }
214 
215 void DbgAssignIntrinsic::setAddress(Value *V) {
216   setOperand(OpAddress,
217              MetadataAsValue::get(getContext(), ValueAsMetadata::get(V)));
218 }
219 
220 void DbgAssignIntrinsic::setKillAddress() {
221   if (isKillAddress())
222     return;
223   setAddress(UndefValue::get(getAddress()->getType()));
224 }
225 
226 bool DbgAssignIntrinsic::isKillAddress() const {
227   Value *Addr = getAddress();
228   return !Addr || isa<UndefValue>(Addr);
229 }
230 
231 void DbgAssignIntrinsic::setValue(Value *V) {
232   setOperand(OpValue,
233              MetadataAsValue::get(getContext(), ValueAsMetadata::get(V)));
234 }
235 
236 int llvm::Intrinsic::lookupLLVMIntrinsicByName(ArrayRef<const char *> NameTable,
237                                                StringRef Name) {
238   assert(Name.starts_with("llvm.") && "Unexpected intrinsic prefix");
239 
240   // Do successive binary searches of the dotted name components. For
241   // "llvm.gc.experimental.statepoint.p1i8.p1i32", we will find the range of
242   // intrinsics starting with "llvm.gc", then "llvm.gc.experimental", then
243   // "llvm.gc.experimental.statepoint", and then we will stop as the range is
244   // size 1. During the search, we can skip the prefix that we already know is
245   // identical. By using strncmp we consider names with differing suffixes to
246   // be part of the equal range.
247   size_t CmpEnd = 4; // Skip the "llvm" component.
248   const char *const *Low = NameTable.begin();
249   const char *const *High = NameTable.end();
250   const char *const *LastLow = Low;
251   while (CmpEnd < Name.size() && High - Low > 0) {
252     size_t CmpStart = CmpEnd;
253     CmpEnd = Name.find('.', CmpStart + 1);
254     CmpEnd = CmpEnd == StringRef::npos ? Name.size() : CmpEnd;
255     auto Cmp = [CmpStart, CmpEnd](const char *LHS, const char *RHS) {
256       return strncmp(LHS + CmpStart, RHS + CmpStart, CmpEnd - CmpStart) < 0;
257     };
258     LastLow = Low;
259     std::tie(Low, High) = std::equal_range(Low, High, Name.data(), Cmp);
260   }
261   if (High - Low > 0)
262     LastLow = Low;
263 
264   if (LastLow == NameTable.end())
265     return -1;
266   StringRef NameFound = *LastLow;
267   if (Name == NameFound ||
268       (Name.starts_with(NameFound) && Name[NameFound.size()] == '.'))
269     return LastLow - NameTable.begin();
270   return -1;
271 }
272 
273 ConstantInt *InstrProfCntrInstBase::getNumCounters() const {
274   if (InstrProfValueProfileInst::classof(this))
275     llvm_unreachable("InstrProfValueProfileInst does not have counters!");
276   return cast<ConstantInt>(const_cast<Value *>(getArgOperand(2)));
277 }
278 
279 ConstantInt *InstrProfCntrInstBase::getIndex() const {
280   if (InstrProfValueProfileInst::classof(this))
281     llvm_unreachable("Please use InstrProfValueProfileInst::getIndex()");
282   return cast<ConstantInt>(const_cast<Value *>(getArgOperand(3)));
283 }
284 
285 Value *InstrProfIncrementInst::getStep() const {
286   if (InstrProfIncrementInstStep::classof(this)) {
287     return const_cast<Value *>(getArgOperand(4));
288   }
289   const Module *M = getModule();
290   LLVMContext &Context = M->getContext();
291   return ConstantInt::get(Type::getInt64Ty(Context), 1);
292 }
293 
294 std::optional<RoundingMode> ConstrainedFPIntrinsic::getRoundingMode() const {
295   unsigned NumOperands = arg_size();
296   Metadata *MD = nullptr;
297   auto *MAV = dyn_cast<MetadataAsValue>(getArgOperand(NumOperands - 2));
298   if (MAV)
299     MD = MAV->getMetadata();
300   if (!MD || !isa<MDString>(MD))
301     return std::nullopt;
302   return convertStrToRoundingMode(cast<MDString>(MD)->getString());
303 }
304 
305 std::optional<fp::ExceptionBehavior>
306 ConstrainedFPIntrinsic::getExceptionBehavior() const {
307   unsigned NumOperands = arg_size();
308   Metadata *MD = nullptr;
309   auto *MAV = dyn_cast<MetadataAsValue>(getArgOperand(NumOperands - 1));
310   if (MAV)
311     MD = MAV->getMetadata();
312   if (!MD || !isa<MDString>(MD))
313     return std::nullopt;
314   return convertStrToExceptionBehavior(cast<MDString>(MD)->getString());
315 }
316 
317 bool ConstrainedFPIntrinsic::isDefaultFPEnvironment() const {
318   std::optional<fp::ExceptionBehavior> Except = getExceptionBehavior();
319   if (Except) {
320     if (*Except != fp::ebIgnore)
321       return false;
322   }
323 
324   std::optional<RoundingMode> Rounding = getRoundingMode();
325   if (Rounding) {
326     if (*Rounding != RoundingMode::NearestTiesToEven)
327       return false;
328   }
329 
330   return true;
331 }
332 
333 static FCmpInst::Predicate getFPPredicateFromMD(const Value *Op) {
334   Metadata *MD = cast<MetadataAsValue>(Op)->getMetadata();
335   if (!MD || !isa<MDString>(MD))
336     return FCmpInst::BAD_FCMP_PREDICATE;
337   return StringSwitch<FCmpInst::Predicate>(cast<MDString>(MD)->getString())
338       .Case("oeq", FCmpInst::FCMP_OEQ)
339       .Case("ogt", FCmpInst::FCMP_OGT)
340       .Case("oge", FCmpInst::FCMP_OGE)
341       .Case("olt", FCmpInst::FCMP_OLT)
342       .Case("ole", FCmpInst::FCMP_OLE)
343       .Case("one", FCmpInst::FCMP_ONE)
344       .Case("ord", FCmpInst::FCMP_ORD)
345       .Case("uno", FCmpInst::FCMP_UNO)
346       .Case("ueq", FCmpInst::FCMP_UEQ)
347       .Case("ugt", FCmpInst::FCMP_UGT)
348       .Case("uge", FCmpInst::FCMP_UGE)
349       .Case("ult", FCmpInst::FCMP_ULT)
350       .Case("ule", FCmpInst::FCMP_ULE)
351       .Case("une", FCmpInst::FCMP_UNE)
352       .Default(FCmpInst::BAD_FCMP_PREDICATE);
353 }
354 
355 FCmpInst::Predicate ConstrainedFPCmpIntrinsic::getPredicate() const {
356   return getFPPredicateFromMD(getArgOperand(2));
357 }
358 
359 bool ConstrainedFPIntrinsic::isUnaryOp() const {
360   switch (getIntrinsicID()) {
361   default:
362     return false;
363 #define INSTRUCTION(NAME, NARG, ROUND_MODE, INTRINSIC)                         \
364   case Intrinsic::INTRINSIC:                                                   \
365     return NARG == 1;
366 #include "llvm/IR/ConstrainedOps.def"
367   }
368 }
369 
370 bool ConstrainedFPIntrinsic::isTernaryOp() const {
371   switch (getIntrinsicID()) {
372   default:
373     return false;
374 #define INSTRUCTION(NAME, NARG, ROUND_MODE, INTRINSIC)                         \
375   case Intrinsic::INTRINSIC:                                                   \
376     return NARG == 3;
377 #include "llvm/IR/ConstrainedOps.def"
378   }
379 }
380 
381 bool ConstrainedFPIntrinsic::classof(const IntrinsicInst *I) {
382   switch (I->getIntrinsicID()) {
383 #define INSTRUCTION(NAME, NARGS, ROUND_MODE, INTRINSIC)                        \
384   case Intrinsic::INTRINSIC:
385 #include "llvm/IR/ConstrainedOps.def"
386     return true;
387   default:
388     return false;
389   }
390 }
391 
392 ElementCount VPIntrinsic::getStaticVectorLength() const {
393   auto GetVectorLengthOfType = [](const Type *T) -> ElementCount {
394     const auto *VT = cast<VectorType>(T);
395     auto ElemCount = VT->getElementCount();
396     return ElemCount;
397   };
398 
399   Value *VPMask = getMaskParam();
400   if (!VPMask) {
401     assert((getIntrinsicID() == Intrinsic::vp_merge ||
402             getIntrinsicID() == Intrinsic::vp_select) &&
403            "Unexpected VP intrinsic without mask operand");
404     return GetVectorLengthOfType(getType());
405   }
406   return GetVectorLengthOfType(VPMask->getType());
407 }
408 
409 Value *VPIntrinsic::getMaskParam() const {
410   if (auto MaskPos = getMaskParamPos(getIntrinsicID()))
411     return getArgOperand(*MaskPos);
412   return nullptr;
413 }
414 
415 void VPIntrinsic::setMaskParam(Value *NewMask) {
416   auto MaskPos = getMaskParamPos(getIntrinsicID());
417   setArgOperand(*MaskPos, NewMask);
418 }
419 
420 Value *VPIntrinsic::getVectorLengthParam() const {
421   if (auto EVLPos = getVectorLengthParamPos(getIntrinsicID()))
422     return getArgOperand(*EVLPos);
423   return nullptr;
424 }
425 
426 void VPIntrinsic::setVectorLengthParam(Value *NewEVL) {
427   auto EVLPos = getVectorLengthParamPos(getIntrinsicID());
428   setArgOperand(*EVLPos, NewEVL);
429 }
430 
431 std::optional<unsigned>
432 VPIntrinsic::getMaskParamPos(Intrinsic::ID IntrinsicID) {
433   switch (IntrinsicID) {
434   default:
435     return std::nullopt;
436 
437 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS)                    \
438   case Intrinsic::VPID:                                                        \
439     return MASKPOS;
440 #include "llvm/IR/VPIntrinsics.def"
441   }
442 }
443 
444 std::optional<unsigned>
445 VPIntrinsic::getVectorLengthParamPos(Intrinsic::ID IntrinsicID) {
446   switch (IntrinsicID) {
447   default:
448     return std::nullopt;
449 
450 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS)                    \
451   case Intrinsic::VPID:                                                        \
452     return VLENPOS;
453 #include "llvm/IR/VPIntrinsics.def"
454   }
455 }
456 
457 /// \return the alignment of the pointer used by this load/store/gather or
458 /// scatter.
459 MaybeAlign VPIntrinsic::getPointerAlignment() const {
460   std::optional<unsigned> PtrParamOpt =
461       getMemoryPointerParamPos(getIntrinsicID());
462   assert(PtrParamOpt && "no pointer argument!");
463   return getParamAlign(*PtrParamOpt);
464 }
465 
466 /// \return The pointer operand of this load,store, gather or scatter.
467 Value *VPIntrinsic::getMemoryPointerParam() const {
468   if (auto PtrParamOpt = getMemoryPointerParamPos(getIntrinsicID()))
469     return getArgOperand(*PtrParamOpt);
470   return nullptr;
471 }
472 
473 std::optional<unsigned>
474 VPIntrinsic::getMemoryPointerParamPos(Intrinsic::ID VPID) {
475   switch (VPID) {
476   default:
477     break;
478 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
479 #define VP_PROPERTY_MEMOP(POINTERPOS, ...) return POINTERPOS;
480 #define END_REGISTER_VP_INTRINSIC(VPID) break;
481 #include "llvm/IR/VPIntrinsics.def"
482   }
483   return std::nullopt;
484 }
485 
486 /// \return The data (payload) operand of this store or scatter.
487 Value *VPIntrinsic::getMemoryDataParam() const {
488   auto DataParamOpt = getMemoryDataParamPos(getIntrinsicID());
489   if (!DataParamOpt)
490     return nullptr;
491   return getArgOperand(*DataParamOpt);
492 }
493 
494 std::optional<unsigned> VPIntrinsic::getMemoryDataParamPos(Intrinsic::ID VPID) {
495   switch (VPID) {
496   default:
497     break;
498 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
499 #define VP_PROPERTY_MEMOP(POINTERPOS, DATAPOS) return DATAPOS;
500 #define END_REGISTER_VP_INTRINSIC(VPID) break;
501 #include "llvm/IR/VPIntrinsics.def"
502   }
503   return std::nullopt;
504 }
505 
506 constexpr bool isVPIntrinsic(Intrinsic::ID ID) {
507   switch (ID) {
508   default:
509     break;
510 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS)                    \
511   case Intrinsic::VPID:                                                        \
512     return true;
513 #include "llvm/IR/VPIntrinsics.def"
514   }
515   return false;
516 }
517 
518 bool VPIntrinsic::isVPIntrinsic(Intrinsic::ID ID) {
519   return ::isVPIntrinsic(ID);
520 }
521 
522 // Equivalent non-predicated opcode
523 constexpr static std::optional<unsigned>
524 getFunctionalOpcodeForVP(Intrinsic::ID ID) {
525   switch (ID) {
526   default:
527     break;
528 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
529 #define VP_PROPERTY_FUNCTIONAL_OPC(OPC) return Instruction::OPC;
530 #define END_REGISTER_VP_INTRINSIC(VPID) break;
531 #include "llvm/IR/VPIntrinsics.def"
532   }
533   return std::nullopt;
534 }
535 
536 std::optional<unsigned>
537 VPIntrinsic::getFunctionalOpcodeForVP(Intrinsic::ID ID) {
538   return ::getFunctionalOpcodeForVP(ID);
539 }
540 
541 // Equivalent non-predicated intrinsic ID
542 constexpr static std::optional<Intrinsic::ID>
543 getFunctionalIntrinsicIDForVP(Intrinsic::ID ID) {
544   switch (ID) {
545   default:
546     break;
547 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
548 #define VP_PROPERTY_FUNCTIONAL_INTRINSIC(INTRIN) return Intrinsic::INTRIN;
549 #define END_REGISTER_VP_INTRINSIC(VPID) break;
550 #include "llvm/IR/VPIntrinsics.def"
551   }
552   return std::nullopt;
553 }
554 
555 std::optional<Intrinsic::ID>
556 VPIntrinsic::getFunctionalIntrinsicIDForVP(Intrinsic::ID ID) {
557   return ::getFunctionalIntrinsicIDForVP(ID);
558 }
559 
560 constexpr static bool doesVPHaveNoFunctionalEquivalent(Intrinsic::ID ID) {
561   switch (ID) {
562   default:
563     break;
564 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
565 #define VP_PROPERTY_NO_FUNCTIONAL return true;
566 #define END_REGISTER_VP_INTRINSIC(VPID) break;
567 #include "llvm/IR/VPIntrinsics.def"
568   }
569   return false;
570 }
571 
572 // All VP intrinsics should have an equivalent non-VP opcode or intrinsic
573 // defined, or be marked that they don't have one.
574 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...)                                 \
575   static_assert(doesVPHaveNoFunctionalEquivalent(Intrinsic::VPID) ||           \
576                 getFunctionalOpcodeForVP(Intrinsic::VPID) ||                   \
577                 getFunctionalIntrinsicIDForVP(Intrinsic::VPID));
578 #include "llvm/IR/VPIntrinsics.def"
579 
580 // Equivalent non-predicated constrained intrinsic
581 std::optional<Intrinsic::ID>
582 VPIntrinsic::getConstrainedIntrinsicIDForVP(Intrinsic::ID ID) {
583   switch (ID) {
584   default:
585     break;
586 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
587 #define VP_PROPERTY_CONSTRAINEDFP(HASRND, HASEXCEPT, CID) return Intrinsic::CID;
588 #define END_REGISTER_VP_INTRINSIC(VPID) break;
589 #include "llvm/IR/VPIntrinsics.def"
590   }
591   return std::nullopt;
592 }
593 
594 Intrinsic::ID VPIntrinsic::getForOpcode(unsigned IROPC) {
595   switch (IROPC) {
596   default:
597     break;
598 
599 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) break;
600 #define VP_PROPERTY_FUNCTIONAL_OPC(OPC) case Instruction::OPC:
601 #define END_REGISTER_VP_INTRINSIC(VPID) return Intrinsic::VPID;
602 #include "llvm/IR/VPIntrinsics.def"
603   }
604   return Intrinsic::not_intrinsic;
605 }
606 
607 bool VPIntrinsic::canIgnoreVectorLengthParam() const {
608   using namespace PatternMatch;
609 
610   ElementCount EC = getStaticVectorLength();
611 
612   // No vlen param - no lanes masked-off by it.
613   auto *VLParam = getVectorLengthParam();
614   if (!VLParam)
615     return true;
616 
617   // Note that the VP intrinsic causes undefined behavior if the Explicit Vector
618   // Length parameter is strictly greater-than the number of vector elements of
619   // the operation. This function returns true when this is detected statically
620   // in the IR.
621 
622   // Check whether "W == vscale * EC.getKnownMinValue()"
623   if (EC.isScalable()) {
624     // Compare vscale patterns
625     uint64_t VScaleFactor;
626     if (match(VLParam, m_c_Mul(m_ConstantInt(VScaleFactor), m_VScale())))
627       return VScaleFactor >= EC.getKnownMinValue();
628     return (EC.getKnownMinValue() == 1) && match(VLParam, m_VScale());
629   }
630 
631   // standard SIMD operation
632   const auto *VLConst = dyn_cast<ConstantInt>(VLParam);
633   if (!VLConst)
634     return false;
635 
636   uint64_t VLNum = VLConst->getZExtValue();
637   if (VLNum >= EC.getKnownMinValue())
638     return true;
639 
640   return false;
641 }
642 
643 Function *VPIntrinsic::getDeclarationForParams(Module *M, Intrinsic::ID VPID,
644                                                Type *ReturnType,
645                                                ArrayRef<Value *> Params) {
646   assert(isVPIntrinsic(VPID) && "not a VP intrinsic");
647   Function *VPFunc;
648   switch (VPID) {
649   default: {
650     Type *OverloadTy = Params[0]->getType();
651     if (VPReductionIntrinsic::isVPReduction(VPID))
652       OverloadTy =
653           Params[*VPReductionIntrinsic::getVectorParamPos(VPID)]->getType();
654 
655     VPFunc = Intrinsic::getDeclaration(M, VPID, OverloadTy);
656     break;
657   }
658   case Intrinsic::vp_trunc:
659   case Intrinsic::vp_sext:
660   case Intrinsic::vp_zext:
661   case Intrinsic::vp_fptoui:
662   case Intrinsic::vp_fptosi:
663   case Intrinsic::vp_uitofp:
664   case Intrinsic::vp_sitofp:
665   case Intrinsic::vp_fptrunc:
666   case Intrinsic::vp_fpext:
667   case Intrinsic::vp_ptrtoint:
668   case Intrinsic::vp_inttoptr:
669     VPFunc =
670         Intrinsic::getDeclaration(M, VPID, {ReturnType, Params[0]->getType()});
671     break;
672   case Intrinsic::vp_is_fpclass:
673     VPFunc = Intrinsic::getDeclaration(M, VPID, {Params[0]->getType()});
674     break;
675   case Intrinsic::vp_merge:
676   case Intrinsic::vp_select:
677     VPFunc = Intrinsic::getDeclaration(M, VPID, {Params[1]->getType()});
678     break;
679   case Intrinsic::vp_load:
680     VPFunc = Intrinsic::getDeclaration(
681         M, VPID, {ReturnType, Params[0]->getType()});
682     break;
683   case Intrinsic::experimental_vp_strided_load:
684     VPFunc = Intrinsic::getDeclaration(
685         M, VPID, {ReturnType, Params[0]->getType(), Params[1]->getType()});
686     break;
687   case Intrinsic::vp_gather:
688     VPFunc = Intrinsic::getDeclaration(
689         M, VPID, {ReturnType, Params[0]->getType()});
690     break;
691   case Intrinsic::vp_store:
692     VPFunc = Intrinsic::getDeclaration(
693         M, VPID, {Params[0]->getType(), Params[1]->getType()});
694     break;
695   case Intrinsic::experimental_vp_strided_store:
696     VPFunc = Intrinsic::getDeclaration(
697         M, VPID,
698         {Params[0]->getType(), Params[1]->getType(), Params[2]->getType()});
699     break;
700   case Intrinsic::vp_scatter:
701     VPFunc = Intrinsic::getDeclaration(
702         M, VPID, {Params[0]->getType(), Params[1]->getType()});
703     break;
704   }
705   assert(VPFunc && "Could not declare VP intrinsic");
706   return VPFunc;
707 }
708 
709 bool VPReductionIntrinsic::isVPReduction(Intrinsic::ID ID) {
710   switch (ID) {
711   default:
712     break;
713 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
714 #define VP_PROPERTY_REDUCTION(STARTPOS, ...) return true;
715 #define END_REGISTER_VP_INTRINSIC(VPID) break;
716 #include "llvm/IR/VPIntrinsics.def"
717   }
718   return false;
719 }
720 
721 bool VPCastIntrinsic::isVPCast(Intrinsic::ID ID) {
722   switch (ID) {
723   default:
724     break;
725 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
726 #define VP_PROPERTY_CASTOP return true;
727 #define END_REGISTER_VP_INTRINSIC(VPID) break;
728 #include "llvm/IR/VPIntrinsics.def"
729   }
730   return false;
731 }
732 
733 bool VPCmpIntrinsic::isVPCmp(Intrinsic::ID ID) {
734   switch (ID) {
735   default:
736     break;
737 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
738 #define VP_PROPERTY_CMP(CCPOS, ...) return true;
739 #define END_REGISTER_VP_INTRINSIC(VPID) break;
740 #include "llvm/IR/VPIntrinsics.def"
741   }
742   return false;
743 }
744 
745 bool VPBinOpIntrinsic::isVPBinOp(Intrinsic::ID ID) {
746   switch (ID) {
747   default:
748     break;
749 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
750 #define VP_PROPERTY_BINARYOP return true;
751 #define END_REGISTER_VP_INTRINSIC(VPID) break;
752 #include "llvm/IR/VPIntrinsics.def"
753   }
754   return false;
755 }
756 
757 static ICmpInst::Predicate getIntPredicateFromMD(const Value *Op) {
758   Metadata *MD = cast<MetadataAsValue>(Op)->getMetadata();
759   if (!MD || !isa<MDString>(MD))
760     return ICmpInst::BAD_ICMP_PREDICATE;
761   return StringSwitch<ICmpInst::Predicate>(cast<MDString>(MD)->getString())
762       .Case("eq", ICmpInst::ICMP_EQ)
763       .Case("ne", ICmpInst::ICMP_NE)
764       .Case("ugt", ICmpInst::ICMP_UGT)
765       .Case("uge", ICmpInst::ICMP_UGE)
766       .Case("ult", ICmpInst::ICMP_ULT)
767       .Case("ule", ICmpInst::ICMP_ULE)
768       .Case("sgt", ICmpInst::ICMP_SGT)
769       .Case("sge", ICmpInst::ICMP_SGE)
770       .Case("slt", ICmpInst::ICMP_SLT)
771       .Case("sle", ICmpInst::ICMP_SLE)
772       .Default(ICmpInst::BAD_ICMP_PREDICATE);
773 }
774 
775 CmpInst::Predicate VPCmpIntrinsic::getPredicate() const {
776   bool IsFP = true;
777   std::optional<unsigned> CCArgIdx;
778   switch (getIntrinsicID()) {
779   default:
780     break;
781 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
782 #define VP_PROPERTY_CMP(CCPOS, ISFP)                                           \
783   CCArgIdx = CCPOS;                                                            \
784   IsFP = ISFP;                                                                 \
785   break;
786 #define END_REGISTER_VP_INTRINSIC(VPID) break;
787 #include "llvm/IR/VPIntrinsics.def"
788   }
789   assert(CCArgIdx && "Unexpected vector-predicated comparison");
790   return IsFP ? getFPPredicateFromMD(getArgOperand(*CCArgIdx))
791               : getIntPredicateFromMD(getArgOperand(*CCArgIdx));
792 }
793 
794 unsigned VPReductionIntrinsic::getVectorParamPos() const {
795   return *VPReductionIntrinsic::getVectorParamPos(getIntrinsicID());
796 }
797 
798 unsigned VPReductionIntrinsic::getStartParamPos() const {
799   return *VPReductionIntrinsic::getStartParamPos(getIntrinsicID());
800 }
801 
802 std::optional<unsigned>
803 VPReductionIntrinsic::getVectorParamPos(Intrinsic::ID ID) {
804   switch (ID) {
805 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
806 #define VP_PROPERTY_REDUCTION(STARTPOS, VECTORPOS) return VECTORPOS;
807 #define END_REGISTER_VP_INTRINSIC(VPID) break;
808 #include "llvm/IR/VPIntrinsics.def"
809   default:
810     break;
811   }
812   return std::nullopt;
813 }
814 
815 std::optional<unsigned>
816 VPReductionIntrinsic::getStartParamPos(Intrinsic::ID ID) {
817   switch (ID) {
818 #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
819 #define VP_PROPERTY_REDUCTION(STARTPOS, VECTORPOS) return STARTPOS;
820 #define END_REGISTER_VP_INTRINSIC(VPID) break;
821 #include "llvm/IR/VPIntrinsics.def"
822   default:
823     break;
824   }
825   return std::nullopt;
826 }
827 
828 Instruction::BinaryOps BinaryOpIntrinsic::getBinaryOp() const {
829   switch (getIntrinsicID()) {
830   case Intrinsic::uadd_with_overflow:
831   case Intrinsic::sadd_with_overflow:
832   case Intrinsic::uadd_sat:
833   case Intrinsic::sadd_sat:
834     return Instruction::Add;
835   case Intrinsic::usub_with_overflow:
836   case Intrinsic::ssub_with_overflow:
837   case Intrinsic::usub_sat:
838   case Intrinsic::ssub_sat:
839     return Instruction::Sub;
840   case Intrinsic::umul_with_overflow:
841   case Intrinsic::smul_with_overflow:
842     return Instruction::Mul;
843   default:
844     llvm_unreachable("Invalid intrinsic");
845   }
846 }
847 
848 bool BinaryOpIntrinsic::isSigned() const {
849   switch (getIntrinsicID()) {
850   case Intrinsic::sadd_with_overflow:
851   case Intrinsic::ssub_with_overflow:
852   case Intrinsic::smul_with_overflow:
853   case Intrinsic::sadd_sat:
854   case Intrinsic::ssub_sat:
855     return true;
856   default:
857     return false;
858   }
859 }
860 
861 unsigned BinaryOpIntrinsic::getNoWrapKind() const {
862   if (isSigned())
863     return OverflowingBinaryOperator::NoSignedWrap;
864   else
865     return OverflowingBinaryOperator::NoUnsignedWrap;
866 }
867 
868 const Value *GCProjectionInst::getStatepoint() const {
869   const Value *Token = getArgOperand(0);
870   if (isa<UndefValue>(Token))
871     return Token;
872 
873   // Treat none token as if it was undef here
874   if (isa<ConstantTokenNone>(Token))
875     return UndefValue::get(Token->getType());
876 
877   // This takes care both of relocates for call statepoints and relocates
878   // on normal path of invoke statepoint.
879   if (!isa<LandingPadInst>(Token))
880     return cast<GCStatepointInst>(Token);
881 
882   // This relocate is on exceptional path of an invoke statepoint
883   const BasicBlock *InvokeBB =
884     cast<Instruction>(Token)->getParent()->getUniquePredecessor();
885 
886   assert(InvokeBB && "safepoints should have unique landingpads");
887   assert(InvokeBB->getTerminator() &&
888          "safepoint block should be well formed");
889 
890   return cast<GCStatepointInst>(InvokeBB->getTerminator());
891 }
892 
893 Value *GCRelocateInst::getBasePtr() const {
894   auto Statepoint = getStatepoint();
895   if (isa<UndefValue>(Statepoint))
896     return UndefValue::get(Statepoint->getType());
897 
898   auto *GCInst = cast<GCStatepointInst>(Statepoint);
899   if (auto Opt = GCInst->getOperandBundle(LLVMContext::OB_gc_live))
900     return *(Opt->Inputs.begin() + getBasePtrIndex());
901   return *(GCInst->arg_begin() + getBasePtrIndex());
902 }
903 
904 Value *GCRelocateInst::getDerivedPtr() const {
905   auto *Statepoint = getStatepoint();
906   if (isa<UndefValue>(Statepoint))
907     return UndefValue::get(Statepoint->getType());
908 
909   auto *GCInst = cast<GCStatepointInst>(Statepoint);
910   if (auto Opt = GCInst->getOperandBundle(LLVMContext::OB_gc_live))
911     return *(Opt->Inputs.begin() + getDerivedPtrIndex());
912   return *(GCInst->arg_begin() + getDerivedPtrIndex());
913 }
914