xref: /freebsd/contrib/llvm-project/llvm/lib/Transforms/Vectorize/VPlanConstruction.cpp (revision e64bea71c21eb42e97aa615188ba91f6cce0d36d)
1700637cbSDimitry Andric //===-- VPlanConstruction.cpp - Transforms for initial VPlan construction -===//
2700637cbSDimitry Andric //
3700637cbSDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4700637cbSDimitry Andric // See https://llvm.org/LICENSE.txt for license information.
5700637cbSDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6700637cbSDimitry Andric //
7700637cbSDimitry Andric //===----------------------------------------------------------------------===//
8700637cbSDimitry Andric ///
9700637cbSDimitry Andric /// \file
10700637cbSDimitry Andric /// This file implements transforms for initial VPlan construction.
11700637cbSDimitry Andric ///
12700637cbSDimitry Andric //===----------------------------------------------------------------------===//
13700637cbSDimitry Andric 
14700637cbSDimitry Andric #include "LoopVectorizationPlanner.h"
15700637cbSDimitry Andric #include "VPlan.h"
16700637cbSDimitry Andric #include "VPlanCFG.h"
17700637cbSDimitry Andric #include "VPlanDominatorTree.h"
18700637cbSDimitry Andric #include "VPlanPatternMatch.h"
19700637cbSDimitry Andric #include "VPlanTransforms.h"
20700637cbSDimitry Andric #include "llvm/Analysis/LoopInfo.h"
21700637cbSDimitry Andric #include "llvm/Analysis/LoopIterator.h"
22700637cbSDimitry Andric #include "llvm/Analysis/ScalarEvolution.h"
23700637cbSDimitry Andric #include "llvm/IR/MDBuilder.h"
24700637cbSDimitry Andric 
25700637cbSDimitry Andric #define DEBUG_TYPE "vplan"
26700637cbSDimitry Andric 
27700637cbSDimitry Andric using namespace llvm;
28700637cbSDimitry Andric using namespace VPlanPatternMatch;
29700637cbSDimitry Andric 
30700637cbSDimitry Andric namespace {
31700637cbSDimitry Andric // Class that is used to build the plain CFG for the incoming IR.
32700637cbSDimitry Andric class PlainCFGBuilder {
33700637cbSDimitry Andric   // The outermost loop of the input loop nest considered for vectorization.
34700637cbSDimitry Andric   Loop *TheLoop;
35700637cbSDimitry Andric 
36700637cbSDimitry Andric   // Loop Info analysis.
37700637cbSDimitry Andric   LoopInfo *LI;
38700637cbSDimitry Andric 
39700637cbSDimitry Andric   // Vectorization plan that we are working on.
40700637cbSDimitry Andric   std::unique_ptr<VPlan> Plan;
41700637cbSDimitry Andric 
42700637cbSDimitry Andric   // Builder of the VPlan instruction-level representation.
43700637cbSDimitry Andric   VPBuilder VPIRBuilder;
44700637cbSDimitry Andric 
45700637cbSDimitry Andric   // NOTE: The following maps are intentionally destroyed after the plain CFG
46700637cbSDimitry Andric   // construction because subsequent VPlan-to-VPlan transformation may
47700637cbSDimitry Andric   // invalidate them.
48700637cbSDimitry Andric   // Map incoming BasicBlocks to their newly-created VPBasicBlocks.
49700637cbSDimitry Andric   DenseMap<BasicBlock *, VPBasicBlock *> BB2VPBB;
50700637cbSDimitry Andric   // Map incoming Value definitions to their newly-created VPValues.
51700637cbSDimitry Andric   DenseMap<Value *, VPValue *> IRDef2VPValue;
52700637cbSDimitry Andric 
53700637cbSDimitry Andric   // Hold phi node's that need to be fixed once the plain CFG has been built.
54700637cbSDimitry Andric   SmallVector<PHINode *, 8> PhisToFix;
55700637cbSDimitry Andric 
56700637cbSDimitry Andric   // Utility functions.
57700637cbSDimitry Andric   void setVPBBPredsFromBB(VPBasicBlock *VPBB, BasicBlock *BB);
58700637cbSDimitry Andric   void fixHeaderPhis();
59700637cbSDimitry Andric   VPBasicBlock *getOrCreateVPBB(BasicBlock *BB);
60700637cbSDimitry Andric #ifndef NDEBUG
61700637cbSDimitry Andric   bool isExternalDef(Value *Val);
62700637cbSDimitry Andric #endif
63700637cbSDimitry Andric   VPValue *getOrCreateVPOperand(Value *IRVal);
64700637cbSDimitry Andric   void createVPInstructionsForVPBB(VPBasicBlock *VPBB, BasicBlock *BB);
65700637cbSDimitry Andric 
66700637cbSDimitry Andric public:
PlainCFGBuilder(Loop * Lp,LoopInfo * LI)67700637cbSDimitry Andric   PlainCFGBuilder(Loop *Lp, LoopInfo *LI)
68700637cbSDimitry Andric       : TheLoop(Lp), LI(LI), Plan(std::make_unique<VPlan>(Lp)) {}
69700637cbSDimitry Andric 
70700637cbSDimitry Andric   /// Build plain CFG for TheLoop and connect it to Plan's entry.
71700637cbSDimitry Andric   std::unique_ptr<VPlan> buildPlainCFG();
72700637cbSDimitry Andric };
73700637cbSDimitry Andric } // anonymous namespace
74700637cbSDimitry Andric 
75700637cbSDimitry Andric // Set predecessors of \p VPBB in the same order as they are in \p BB. \p VPBB
76700637cbSDimitry Andric // must have no predecessors.
setVPBBPredsFromBB(VPBasicBlock * VPBB,BasicBlock * BB)77700637cbSDimitry Andric void PlainCFGBuilder::setVPBBPredsFromBB(VPBasicBlock *VPBB, BasicBlock *BB) {
78700637cbSDimitry Andric   // Collect VPBB predecessors.
79700637cbSDimitry Andric   SmallVector<VPBlockBase *, 2> VPBBPreds;
80700637cbSDimitry Andric   for (BasicBlock *Pred : predecessors(BB))
81700637cbSDimitry Andric     VPBBPreds.push_back(getOrCreateVPBB(Pred));
82700637cbSDimitry Andric   VPBB->setPredecessors(VPBBPreds);
83700637cbSDimitry Andric }
84700637cbSDimitry Andric 
isHeaderBB(BasicBlock * BB,Loop * L)85700637cbSDimitry Andric static bool isHeaderBB(BasicBlock *BB, Loop *L) {
86700637cbSDimitry Andric   return L && BB == L->getHeader();
87700637cbSDimitry Andric }
88700637cbSDimitry Andric 
89700637cbSDimitry Andric // Add operands to VPInstructions representing phi nodes from the input IR.
fixHeaderPhis()90700637cbSDimitry Andric void PlainCFGBuilder::fixHeaderPhis() {
91700637cbSDimitry Andric   for (auto *Phi : PhisToFix) {
92700637cbSDimitry Andric     assert(IRDef2VPValue.count(Phi) && "Missing VPInstruction for PHINode.");
93700637cbSDimitry Andric     VPValue *VPVal = IRDef2VPValue[Phi];
94700637cbSDimitry Andric     assert(isa<VPWidenPHIRecipe>(VPVal) &&
95700637cbSDimitry Andric            "Expected WidenPHIRecipe for phi node.");
96700637cbSDimitry Andric     auto *VPPhi = cast<VPWidenPHIRecipe>(VPVal);
97700637cbSDimitry Andric     assert(VPPhi->getNumOperands() == 0 &&
98700637cbSDimitry Andric            "Expected VPInstruction with no operands.");
99700637cbSDimitry Andric     assert(isHeaderBB(Phi->getParent(), LI->getLoopFor(Phi->getParent())) &&
100700637cbSDimitry Andric            "Expected Phi in header block.");
101700637cbSDimitry Andric     assert(Phi->getNumOperands() == 2 &&
102700637cbSDimitry Andric            "header phi must have exactly 2 operands");
103700637cbSDimitry Andric     for (BasicBlock *Pred : predecessors(Phi->getParent()))
104700637cbSDimitry Andric       VPPhi->addOperand(
105700637cbSDimitry Andric           getOrCreateVPOperand(Phi->getIncomingValueForBlock(Pred)));
106700637cbSDimitry Andric   }
107700637cbSDimitry Andric }
108700637cbSDimitry Andric 
109700637cbSDimitry Andric // Create a new empty VPBasicBlock for an incoming BasicBlock or retrieve an
110700637cbSDimitry Andric // existing one if it was already created.
getOrCreateVPBB(BasicBlock * BB)111700637cbSDimitry Andric VPBasicBlock *PlainCFGBuilder::getOrCreateVPBB(BasicBlock *BB) {
112700637cbSDimitry Andric   if (auto *VPBB = BB2VPBB.lookup(BB)) {
113700637cbSDimitry Andric     // Retrieve existing VPBB.
114700637cbSDimitry Andric     return VPBB;
115700637cbSDimitry Andric   }
116700637cbSDimitry Andric 
117700637cbSDimitry Andric   // Create new VPBB.
118700637cbSDimitry Andric   StringRef Name = BB->getName();
119700637cbSDimitry Andric   LLVM_DEBUG(dbgs() << "Creating VPBasicBlock for " << Name << "\n");
120700637cbSDimitry Andric   VPBasicBlock *VPBB = Plan->createVPBasicBlock(Name);
121700637cbSDimitry Andric   BB2VPBB[BB] = VPBB;
122700637cbSDimitry Andric   return VPBB;
123700637cbSDimitry Andric }
124700637cbSDimitry Andric 
125700637cbSDimitry Andric #ifndef NDEBUG
126700637cbSDimitry Andric // Return true if \p Val is considered an external definition. An external
127700637cbSDimitry Andric // definition is either:
128700637cbSDimitry Andric // 1. A Value that is not an Instruction. This will be refined in the future.
129700637cbSDimitry Andric // 2. An Instruction that is outside of the IR region represented in VPlan,
130700637cbSDimitry Andric // i.e., is not part of the loop nest.
isExternalDef(Value * Val)131700637cbSDimitry Andric bool PlainCFGBuilder::isExternalDef(Value *Val) {
132700637cbSDimitry Andric   // All the Values that are not Instructions are considered external
133700637cbSDimitry Andric   // definitions for now.
134700637cbSDimitry Andric   Instruction *Inst = dyn_cast<Instruction>(Val);
135700637cbSDimitry Andric   if (!Inst)
136700637cbSDimitry Andric     return true;
137700637cbSDimitry Andric 
138700637cbSDimitry Andric   // Check whether Instruction definition is in loop body.
139700637cbSDimitry Andric   return !TheLoop->contains(Inst);
140700637cbSDimitry Andric }
141700637cbSDimitry Andric #endif
142700637cbSDimitry Andric 
143700637cbSDimitry Andric // Create a new VPValue or retrieve an existing one for the Instruction's
144700637cbSDimitry Andric // operand \p IRVal. This function must only be used to create/retrieve VPValues
145700637cbSDimitry Andric // for *Instruction's operands* and not to create regular VPInstruction's. For
146700637cbSDimitry Andric // the latter, please, look at 'createVPInstructionsForVPBB'.
getOrCreateVPOperand(Value * IRVal)147700637cbSDimitry Andric VPValue *PlainCFGBuilder::getOrCreateVPOperand(Value *IRVal) {
148700637cbSDimitry Andric   auto VPValIt = IRDef2VPValue.find(IRVal);
149700637cbSDimitry Andric   if (VPValIt != IRDef2VPValue.end())
150700637cbSDimitry Andric     // Operand has an associated VPInstruction or VPValue that was previously
151700637cbSDimitry Andric     // created.
152700637cbSDimitry Andric     return VPValIt->second;
153700637cbSDimitry Andric 
154700637cbSDimitry Andric   // Operand doesn't have a previously created VPInstruction/VPValue. This
155700637cbSDimitry Andric   // means that operand is:
156700637cbSDimitry Andric   //   A) a definition external to VPlan,
157700637cbSDimitry Andric   //   B) any other Value without specific representation in VPlan.
158700637cbSDimitry Andric   // For now, we use VPValue to represent A and B and classify both as external
159700637cbSDimitry Andric   // definitions. We may introduce specific VPValue subclasses for them in the
160700637cbSDimitry Andric   // future.
161700637cbSDimitry Andric   assert(isExternalDef(IRVal) && "Expected external definition as operand.");
162700637cbSDimitry Andric 
163700637cbSDimitry Andric   // A and B: Create VPValue and add it to the pool of external definitions and
164700637cbSDimitry Andric   // to the Value->VPValue map.
165700637cbSDimitry Andric   VPValue *NewVPVal = Plan->getOrAddLiveIn(IRVal);
166700637cbSDimitry Andric   IRDef2VPValue[IRVal] = NewVPVal;
167700637cbSDimitry Andric   return NewVPVal;
168700637cbSDimitry Andric }
169700637cbSDimitry Andric 
170700637cbSDimitry Andric // Create new VPInstructions in a VPBasicBlock, given its BasicBlock
171700637cbSDimitry Andric // counterpart. This function must be invoked in RPO so that the operands of a
172700637cbSDimitry Andric // VPInstruction in \p BB have been visited before (except for Phi nodes).
createVPInstructionsForVPBB(VPBasicBlock * VPBB,BasicBlock * BB)173700637cbSDimitry Andric void PlainCFGBuilder::createVPInstructionsForVPBB(VPBasicBlock *VPBB,
174700637cbSDimitry Andric                                                   BasicBlock *BB) {
175700637cbSDimitry Andric   VPIRBuilder.setInsertPoint(VPBB);
176700637cbSDimitry Andric   // TODO: Model and preserve debug intrinsics in VPlan.
177700637cbSDimitry Andric   for (Instruction &InstRef : BB->instructionsWithoutDebug(false)) {
178700637cbSDimitry Andric     Instruction *Inst = &InstRef;
179700637cbSDimitry Andric 
180700637cbSDimitry Andric     // There shouldn't be any VPValue for Inst at this point. Otherwise, we
181700637cbSDimitry Andric     // visited Inst when we shouldn't, breaking the RPO traversal order.
182700637cbSDimitry Andric     assert(!IRDef2VPValue.count(Inst) &&
183700637cbSDimitry Andric            "Instruction shouldn't have been visited.");
184700637cbSDimitry Andric 
185700637cbSDimitry Andric     if (auto *Br = dyn_cast<BranchInst>(Inst)) {
186700637cbSDimitry Andric       // Conditional branch instruction are represented using BranchOnCond
187700637cbSDimitry Andric       // recipes.
188700637cbSDimitry Andric       if (Br->isConditional()) {
189700637cbSDimitry Andric         VPValue *Cond = getOrCreateVPOperand(Br->getCondition());
190700637cbSDimitry Andric         VPIRBuilder.createNaryOp(VPInstruction::BranchOnCond, {Cond}, Inst);
191700637cbSDimitry Andric       }
192700637cbSDimitry Andric 
193700637cbSDimitry Andric       // Skip the rest of the Instruction processing for Branch instructions.
194700637cbSDimitry Andric       continue;
195700637cbSDimitry Andric     }
196700637cbSDimitry Andric 
197700637cbSDimitry Andric     if (auto *SI = dyn_cast<SwitchInst>(Inst)) {
198700637cbSDimitry Andric       SmallVector<VPValue *> Ops = {getOrCreateVPOperand(SI->getCondition())};
199700637cbSDimitry Andric       for (auto Case : SI->cases())
200700637cbSDimitry Andric         Ops.push_back(getOrCreateVPOperand(Case.getCaseValue()));
201700637cbSDimitry Andric       VPIRBuilder.createNaryOp(Instruction::Switch, Ops, Inst);
202700637cbSDimitry Andric       continue;
203700637cbSDimitry Andric     }
204700637cbSDimitry Andric 
205700637cbSDimitry Andric     VPSingleDefRecipe *NewR;
206700637cbSDimitry Andric     if (auto *Phi = dyn_cast<PHINode>(Inst)) {
207700637cbSDimitry Andric       // Phi node's operands may have not been visited at this point. We create
208700637cbSDimitry Andric       // an empty VPInstruction that we will fix once the whole plain CFG has
209700637cbSDimitry Andric       // been built.
210700637cbSDimitry Andric       NewR = new VPWidenPHIRecipe(Phi, nullptr, Phi->getDebugLoc(), "vec.phi");
211700637cbSDimitry Andric       VPBB->appendRecipe(NewR);
212700637cbSDimitry Andric       if (isHeaderBB(Phi->getParent(), LI->getLoopFor(Phi->getParent()))) {
213700637cbSDimitry Andric         // Header phis need to be fixed after the VPBB for the latch has been
214700637cbSDimitry Andric         // created.
215700637cbSDimitry Andric         PhisToFix.push_back(Phi);
216700637cbSDimitry Andric       } else {
217700637cbSDimitry Andric         // Add operands for VPPhi in the order matching its predecessors in
218700637cbSDimitry Andric         // VPlan.
219700637cbSDimitry Andric         DenseMap<const VPBasicBlock *, VPValue *> VPPredToIncomingValue;
220700637cbSDimitry Andric         for (unsigned I = 0; I != Phi->getNumOperands(); ++I) {
221700637cbSDimitry Andric           VPPredToIncomingValue[BB2VPBB[Phi->getIncomingBlock(I)]] =
222700637cbSDimitry Andric               getOrCreateVPOperand(Phi->getIncomingValue(I));
223700637cbSDimitry Andric         }
224700637cbSDimitry Andric         for (VPBlockBase *Pred : VPBB->getPredecessors())
225700637cbSDimitry Andric           NewR->addOperand(
226700637cbSDimitry Andric               VPPredToIncomingValue.lookup(Pred->getExitingBasicBlock()));
227700637cbSDimitry Andric       }
228700637cbSDimitry Andric     } else {
229700637cbSDimitry Andric       // Translate LLVM-IR operands into VPValue operands and set them in the
230700637cbSDimitry Andric       // new VPInstruction.
231700637cbSDimitry Andric       SmallVector<VPValue *, 4> VPOperands;
232700637cbSDimitry Andric       for (Value *Op : Inst->operands())
233700637cbSDimitry Andric         VPOperands.push_back(getOrCreateVPOperand(Op));
234700637cbSDimitry Andric 
235700637cbSDimitry Andric       // Build VPInstruction for any arbitrary Instruction without specific
236700637cbSDimitry Andric       // representation in VPlan.
237700637cbSDimitry Andric       NewR = cast<VPInstruction>(
238700637cbSDimitry Andric           VPIRBuilder.createNaryOp(Inst->getOpcode(), VPOperands, Inst));
239700637cbSDimitry Andric     }
240700637cbSDimitry Andric 
241700637cbSDimitry Andric     IRDef2VPValue[Inst] = NewR;
242700637cbSDimitry Andric   }
243700637cbSDimitry Andric }
244700637cbSDimitry Andric 
245700637cbSDimitry Andric // Main interface to build the plain CFG.
buildPlainCFG()246700637cbSDimitry Andric std::unique_ptr<VPlan> PlainCFGBuilder::buildPlainCFG() {
247700637cbSDimitry Andric   VPIRBasicBlock *Entry = cast<VPIRBasicBlock>(Plan->getEntry());
248700637cbSDimitry Andric   BB2VPBB[Entry->getIRBasicBlock()] = Entry;
249700637cbSDimitry Andric   for (VPIRBasicBlock *ExitVPBB : Plan->getExitBlocks())
250700637cbSDimitry Andric     BB2VPBB[ExitVPBB->getIRBasicBlock()] = ExitVPBB;
251700637cbSDimitry Andric 
252700637cbSDimitry Andric   // 1. Scan the body of the loop in a topological order to visit each basic
253700637cbSDimitry Andric   // block after having visited its predecessor basic blocks. Create a VPBB for
254700637cbSDimitry Andric   // each BB and link it to its successor and predecessor VPBBs. Note that
255700637cbSDimitry Andric   // predecessors must be set in the same order as they are in the incomming IR.
256700637cbSDimitry Andric   // Otherwise, there might be problems with existing phi nodes and algorithm
257700637cbSDimitry Andric   // based on predecessors traversal.
258700637cbSDimitry Andric 
259700637cbSDimitry Andric   // Loop PH needs to be explicitly visited since it's not taken into account by
260700637cbSDimitry Andric   // LoopBlocksDFS.
261700637cbSDimitry Andric   BasicBlock *ThePreheaderBB = TheLoop->getLoopPreheader();
262700637cbSDimitry Andric   assert((ThePreheaderBB->getTerminator()->getNumSuccessors() == 1) &&
263700637cbSDimitry Andric          "Unexpected loop preheader");
264700637cbSDimitry Andric   for (auto &I : *ThePreheaderBB) {
265700637cbSDimitry Andric     if (I.getType()->isVoidTy())
266700637cbSDimitry Andric       continue;
267700637cbSDimitry Andric     IRDef2VPValue[&I] = Plan->getOrAddLiveIn(&I);
268700637cbSDimitry Andric   }
269700637cbSDimitry Andric 
270700637cbSDimitry Andric   LoopBlocksRPO RPO(TheLoop);
271700637cbSDimitry Andric   RPO.perform(LI);
272700637cbSDimitry Andric 
273700637cbSDimitry Andric   for (BasicBlock *BB : RPO) {
274700637cbSDimitry Andric     // Create or retrieve the VPBasicBlock for this BB.
275700637cbSDimitry Andric     VPBasicBlock *VPBB = getOrCreateVPBB(BB);
276700637cbSDimitry Andric     // Set VPBB predecessors in the same order as they are in the incoming BB.
277700637cbSDimitry Andric     setVPBBPredsFromBB(VPBB, BB);
278700637cbSDimitry Andric 
279700637cbSDimitry Andric     // Create VPInstructions for BB.
280700637cbSDimitry Andric     createVPInstructionsForVPBB(VPBB, BB);
281700637cbSDimitry Andric 
282700637cbSDimitry Andric     // Set VPBB successors. We create empty VPBBs for successors if they don't
283700637cbSDimitry Andric     // exist already. Recipes will be created when the successor is visited
284700637cbSDimitry Andric     // during the RPO traversal.
285700637cbSDimitry Andric     if (auto *SI = dyn_cast<SwitchInst>(BB->getTerminator())) {
286700637cbSDimitry Andric       SmallVector<VPBlockBase *> Succs = {
287700637cbSDimitry Andric           getOrCreateVPBB(SI->getDefaultDest())};
288700637cbSDimitry Andric       for (auto Case : SI->cases())
289700637cbSDimitry Andric         Succs.push_back(getOrCreateVPBB(Case.getCaseSuccessor()));
290700637cbSDimitry Andric       VPBB->setSuccessors(Succs);
291700637cbSDimitry Andric       continue;
292700637cbSDimitry Andric     }
293700637cbSDimitry Andric     auto *BI = cast<BranchInst>(BB->getTerminator());
294700637cbSDimitry Andric     unsigned NumSuccs = succ_size(BB);
295700637cbSDimitry Andric     if (NumSuccs == 1) {
296700637cbSDimitry Andric       VPBB->setOneSuccessor(getOrCreateVPBB(BB->getSingleSuccessor()));
297700637cbSDimitry Andric       continue;
298700637cbSDimitry Andric     }
299700637cbSDimitry Andric     assert(BI->isConditional() && NumSuccs == 2 && BI->isConditional() &&
300700637cbSDimitry Andric            "block must have conditional branch with 2 successors");
301700637cbSDimitry Andric 
302700637cbSDimitry Andric     BasicBlock *IRSucc0 = BI->getSuccessor(0);
303700637cbSDimitry Andric     BasicBlock *IRSucc1 = BI->getSuccessor(1);
304700637cbSDimitry Andric     VPBasicBlock *Successor0 = getOrCreateVPBB(IRSucc0);
305700637cbSDimitry Andric     VPBasicBlock *Successor1 = getOrCreateVPBB(IRSucc1);
306700637cbSDimitry Andric     VPBB->setTwoSuccessors(Successor0, Successor1);
307700637cbSDimitry Andric   }
308700637cbSDimitry Andric 
309700637cbSDimitry Andric   for (auto *EB : Plan->getExitBlocks())
310700637cbSDimitry Andric     setVPBBPredsFromBB(EB, EB->getIRBasicBlock());
311700637cbSDimitry Andric 
312700637cbSDimitry Andric   // 2. The whole CFG has been built at this point so all the input Values must
313700637cbSDimitry Andric   // have a VPlan counterpart. Fix VPlan header phi by adding their
314700637cbSDimitry Andric   // corresponding VPlan operands.
315700637cbSDimitry Andric   fixHeaderPhis();
316700637cbSDimitry Andric 
317700637cbSDimitry Andric   Plan->getEntry()->setOneSuccessor(getOrCreateVPBB(TheLoop->getHeader()));
318700637cbSDimitry Andric   Plan->getEntry()->setPlan(&*Plan);
319700637cbSDimitry Andric 
320700637cbSDimitry Andric   // Fix VPlan loop-closed-ssa exit phi's by adding incoming operands to the
321700637cbSDimitry Andric   // VPIRInstructions wrapping them.
322700637cbSDimitry Andric   // // Note that the operand order corresponds to IR predecessor order, and may
323700637cbSDimitry Andric   // need adjusting when VPlan predecessors are added, if an exit block has
324700637cbSDimitry Andric   // multiple predecessor.
325700637cbSDimitry Andric   for (auto *EB : Plan->getExitBlocks()) {
326700637cbSDimitry Andric     for (VPRecipeBase &R : EB->phis()) {
327700637cbSDimitry Andric       auto *PhiR = cast<VPIRPhi>(&R);
328700637cbSDimitry Andric       PHINode &Phi = PhiR->getIRPhi();
329700637cbSDimitry Andric       assert(PhiR->getNumOperands() == 0 &&
330700637cbSDimitry Andric              "no phi operands should be added yet");
331700637cbSDimitry Andric       for (BasicBlock *Pred : predecessors(EB->getIRBasicBlock()))
332700637cbSDimitry Andric         PhiR->addOperand(
333700637cbSDimitry Andric             getOrCreateVPOperand(Phi.getIncomingValueForBlock(Pred)));
334700637cbSDimitry Andric     }
335700637cbSDimitry Andric   }
336700637cbSDimitry Andric 
337700637cbSDimitry Andric   LLVM_DEBUG(Plan->setName("Plain CFG\n"); dbgs() << *Plan);
338700637cbSDimitry Andric   return std::move(Plan);
339700637cbSDimitry Andric }
340700637cbSDimitry Andric 
buildPlainCFG(Loop * TheLoop,LoopInfo & LI)341700637cbSDimitry Andric std::unique_ptr<VPlan> VPlanTransforms::buildPlainCFG(Loop *TheLoop,
342700637cbSDimitry Andric                                                       LoopInfo &LI) {
343700637cbSDimitry Andric   PlainCFGBuilder Builder(TheLoop, &LI);
344700637cbSDimitry Andric   return Builder.buildPlainCFG();
345700637cbSDimitry Andric }
346700637cbSDimitry Andric 
347700637cbSDimitry Andric /// Checks if \p HeaderVPB is a loop header block in the plain CFG; that is, it
348700637cbSDimitry Andric /// has exactly 2 predecessors (preheader and latch), where the block
349700637cbSDimitry Andric /// dominates the latch and the preheader dominates the block. If it is a
350700637cbSDimitry Andric /// header block return true and canonicalize the predecessors of the header
351700637cbSDimitry Andric /// (making sure the preheader appears first and the latch second) and the
352700637cbSDimitry Andric /// successors of the latch (making sure the loop exit comes first). Otherwise
353700637cbSDimitry Andric /// return false.
canonicalHeaderAndLatch(VPBlockBase * HeaderVPB,const VPDominatorTree & VPDT)354700637cbSDimitry Andric static bool canonicalHeaderAndLatch(VPBlockBase *HeaderVPB,
355700637cbSDimitry Andric                                     const VPDominatorTree &VPDT) {
356700637cbSDimitry Andric   ArrayRef<VPBlockBase *> Preds = HeaderVPB->getPredecessors();
357700637cbSDimitry Andric   if (Preds.size() != 2)
358700637cbSDimitry Andric     return false;
359700637cbSDimitry Andric 
360700637cbSDimitry Andric   auto *PreheaderVPBB = Preds[0];
361700637cbSDimitry Andric   auto *LatchVPBB = Preds[1];
362700637cbSDimitry Andric   if (!VPDT.dominates(PreheaderVPBB, HeaderVPB) ||
363700637cbSDimitry Andric       !VPDT.dominates(HeaderVPB, LatchVPBB)) {
364700637cbSDimitry Andric     std::swap(PreheaderVPBB, LatchVPBB);
365700637cbSDimitry Andric 
366700637cbSDimitry Andric     if (!VPDT.dominates(PreheaderVPBB, HeaderVPB) ||
367700637cbSDimitry Andric         !VPDT.dominates(HeaderVPB, LatchVPBB))
368700637cbSDimitry Andric       return false;
369700637cbSDimitry Andric 
370700637cbSDimitry Andric     // Canonicalize predecessors of header so that preheader is first and
371700637cbSDimitry Andric     // latch second.
372700637cbSDimitry Andric     HeaderVPB->swapPredecessors();
373700637cbSDimitry Andric     for (VPRecipeBase &R : cast<VPBasicBlock>(HeaderVPB)->phis())
374700637cbSDimitry Andric       R.swapOperands();
375700637cbSDimitry Andric   }
376700637cbSDimitry Andric 
377700637cbSDimitry Andric   // The two successors of conditional branch match the condition, with the
378700637cbSDimitry Andric   // first successor corresponding to true and the second to false. We
379700637cbSDimitry Andric   // canonicalize the successors of the latch when introducing the region, such
380700637cbSDimitry Andric   // that the latch exits the region when its condition is true; invert the
381700637cbSDimitry Andric   // original condition if the original CFG branches to the header on true.
382700637cbSDimitry Andric   // Note that the exit edge is not yet connected for top-level loops.
383700637cbSDimitry Andric   if (LatchVPBB->getSingleSuccessor() ||
384700637cbSDimitry Andric       LatchVPBB->getSuccessors()[0] != HeaderVPB)
385700637cbSDimitry Andric     return true;
386700637cbSDimitry Andric 
387700637cbSDimitry Andric   assert(LatchVPBB->getNumSuccessors() == 2 && "Must have 2 successors");
388700637cbSDimitry Andric   auto *Term = cast<VPBasicBlock>(LatchVPBB)->getTerminator();
389700637cbSDimitry Andric   assert(cast<VPInstruction>(Term)->getOpcode() ==
390700637cbSDimitry Andric              VPInstruction::BranchOnCond &&
391700637cbSDimitry Andric          "terminator must be a BranchOnCond");
392700637cbSDimitry Andric   auto *Not = new VPInstruction(VPInstruction::Not, {Term->getOperand(0)});
393700637cbSDimitry Andric   Not->insertBefore(Term);
394700637cbSDimitry Andric   Term->setOperand(0, Not);
395700637cbSDimitry Andric   LatchVPBB->swapSuccessors();
396700637cbSDimitry Andric 
397700637cbSDimitry Andric   return true;
398700637cbSDimitry Andric }
399700637cbSDimitry Andric 
400700637cbSDimitry Andric /// Create a new VPRegionBlock for the loop starting at \p HeaderVPB.
createLoopRegion(VPlan & Plan,VPBlockBase * HeaderVPB)401700637cbSDimitry Andric static void createLoopRegion(VPlan &Plan, VPBlockBase *HeaderVPB) {
402700637cbSDimitry Andric   auto *PreheaderVPBB = HeaderVPB->getPredecessors()[0];
403700637cbSDimitry Andric   auto *LatchVPBB = HeaderVPB->getPredecessors()[1];
404700637cbSDimitry Andric 
405700637cbSDimitry Andric   VPBlockUtils::disconnectBlocks(PreheaderVPBB, HeaderVPB);
406700637cbSDimitry Andric   VPBlockUtils::disconnectBlocks(LatchVPBB, HeaderVPB);
407700637cbSDimitry Andric   VPBlockBase *LatchExitVPB = LatchVPBB->getSingleSuccessor();
408700637cbSDimitry Andric   assert(LatchExitVPB && "Latch expected to be left with a single successor");
409700637cbSDimitry Andric 
410700637cbSDimitry Andric   // Create an empty region first and insert it between PreheaderVPBB and
411700637cbSDimitry Andric   // LatchExitVPB, taking care to preserve the original predecessor & successor
412700637cbSDimitry Andric   // order of blocks. Set region entry and exiting after both HeaderVPB and
413700637cbSDimitry Andric   // LatchVPBB have been disconnected from their predecessors/successors.
414700637cbSDimitry Andric   auto *R = Plan.createVPRegionBlock("", false /*isReplicator*/);
415700637cbSDimitry Andric   VPBlockUtils::insertOnEdge(LatchVPBB, LatchExitVPB, R);
416700637cbSDimitry Andric   VPBlockUtils::disconnectBlocks(LatchVPBB, R);
417700637cbSDimitry Andric   VPBlockUtils::connectBlocks(PreheaderVPBB, R);
418700637cbSDimitry Andric   R->setEntry(HeaderVPB);
419700637cbSDimitry Andric   R->setExiting(LatchVPBB);
420700637cbSDimitry Andric 
421700637cbSDimitry Andric   // All VPBB's reachable shallowly from HeaderVPB belong to the current region.
422700637cbSDimitry Andric   for (VPBlockBase *VPBB : vp_depth_first_shallow(HeaderVPB))
423700637cbSDimitry Andric     VPBB->setParent(R);
424700637cbSDimitry Andric }
425700637cbSDimitry Andric 
426700637cbSDimitry Andric // Add the necessary canonical IV and branch recipes required to control the
427700637cbSDimitry Andric // loop.
addCanonicalIVRecipes(VPlan & Plan,VPBasicBlock * HeaderVPBB,VPBasicBlock * LatchVPBB,Type * IdxTy,DebugLoc DL)428700637cbSDimitry Andric static void addCanonicalIVRecipes(VPlan &Plan, VPBasicBlock *HeaderVPBB,
429700637cbSDimitry Andric                                   VPBasicBlock *LatchVPBB, Type *IdxTy,
430700637cbSDimitry Andric                                   DebugLoc DL) {
431700637cbSDimitry Andric   Value *StartIdx = ConstantInt::get(IdxTy, 0);
432700637cbSDimitry Andric   auto *StartV = Plan.getOrAddLiveIn(StartIdx);
433700637cbSDimitry Andric 
434700637cbSDimitry Andric   // Add a VPCanonicalIVPHIRecipe starting at 0 to the header.
435700637cbSDimitry Andric   auto *CanonicalIVPHI = new VPCanonicalIVPHIRecipe(StartV, DL);
436700637cbSDimitry Andric   HeaderVPBB->insert(CanonicalIVPHI, HeaderVPBB->begin());
437700637cbSDimitry Andric 
438700637cbSDimitry Andric   // We are about to replace the branch to exit the region. Remove the original
439700637cbSDimitry Andric   // BranchOnCond, if there is any.
440700637cbSDimitry Andric   if (!LatchVPBB->empty() &&
441700637cbSDimitry Andric       match(&LatchVPBB->back(), m_BranchOnCond(m_VPValue())))
442700637cbSDimitry Andric     LatchVPBB->getTerminator()->eraseFromParent();
443700637cbSDimitry Andric 
444700637cbSDimitry Andric   VPBuilder Builder(LatchVPBB);
445700637cbSDimitry Andric   // Add a VPInstruction to increment the scalar canonical IV by VF * UF.
446700637cbSDimitry Andric   // Initially the induction increment is guaranteed to not wrap, but that may
447700637cbSDimitry Andric   // change later, e.g. when tail-folding, when the flags need to be dropped.
448700637cbSDimitry Andric   auto *CanonicalIVIncrement = Builder.createOverflowingOp(
449700637cbSDimitry Andric       Instruction::Add, {CanonicalIVPHI, &Plan.getVFxUF()}, {true, false}, DL,
450700637cbSDimitry Andric       "index.next");
451700637cbSDimitry Andric   CanonicalIVPHI->addOperand(CanonicalIVIncrement);
452700637cbSDimitry Andric 
453700637cbSDimitry Andric   // Add the BranchOnCount VPInstruction to the latch.
454700637cbSDimitry Andric   Builder.createNaryOp(VPInstruction::BranchOnCount,
455700637cbSDimitry Andric                        {CanonicalIVIncrement, &Plan.getVectorTripCount()}, DL);
456700637cbSDimitry Andric }
457700637cbSDimitry Andric 
prepareForVectorization(VPlan & Plan,Type * InductionTy,PredicatedScalarEvolution & PSE,bool RequiresScalarEpilogueCheck,bool TailFolded,Loop * TheLoop,DebugLoc IVDL,bool HasUncountableEarlyExit,VFRange & Range)458700637cbSDimitry Andric void VPlanTransforms::prepareForVectorization(
459700637cbSDimitry Andric     VPlan &Plan, Type *InductionTy, PredicatedScalarEvolution &PSE,
460700637cbSDimitry Andric     bool RequiresScalarEpilogueCheck, bool TailFolded, Loop *TheLoop,
461700637cbSDimitry Andric     DebugLoc IVDL, bool HasUncountableEarlyExit, VFRange &Range) {
462700637cbSDimitry Andric   VPDominatorTree VPDT;
463700637cbSDimitry Andric   VPDT.recalculate(Plan);
464700637cbSDimitry Andric 
465700637cbSDimitry Andric   VPBlockBase *HeaderVPB = Plan.getEntry()->getSingleSuccessor();
466700637cbSDimitry Andric   canonicalHeaderAndLatch(HeaderVPB, VPDT);
467700637cbSDimitry Andric   VPBlockBase *LatchVPB = HeaderVPB->getPredecessors()[1];
468700637cbSDimitry Andric 
469700637cbSDimitry Andric   VPBasicBlock *VecPreheader = Plan.createVPBasicBlock("vector.ph");
470700637cbSDimitry Andric   VPBlockUtils::insertBlockAfter(VecPreheader, Plan.getEntry());
471700637cbSDimitry Andric 
472700637cbSDimitry Andric   VPBasicBlock *MiddleVPBB = Plan.createVPBasicBlock("middle.block");
473700637cbSDimitry Andric   // The canonical LatchVPB has the header block as last successor. If it has
474700637cbSDimitry Andric   // another successor, this successor is an exit block - insert middle block on
475700637cbSDimitry Andric   // its edge. Otherwise, add middle block as another successor retaining header
476700637cbSDimitry Andric   // as last.
477700637cbSDimitry Andric   if (LatchVPB->getNumSuccessors() == 2) {
478700637cbSDimitry Andric     VPBlockBase *LatchExitVPB = LatchVPB->getSuccessors()[0];
479700637cbSDimitry Andric     VPBlockUtils::insertOnEdge(LatchVPB, LatchExitVPB, MiddleVPBB);
480700637cbSDimitry Andric   } else {
481700637cbSDimitry Andric     VPBlockUtils::connectBlocks(LatchVPB, MiddleVPBB);
482700637cbSDimitry Andric     LatchVPB->swapSuccessors();
483700637cbSDimitry Andric   }
484700637cbSDimitry Andric 
485700637cbSDimitry Andric   addCanonicalIVRecipes(Plan, cast<VPBasicBlock>(HeaderVPB),
486700637cbSDimitry Andric                         cast<VPBasicBlock>(LatchVPB), InductionTy, IVDL);
487700637cbSDimitry Andric 
488700637cbSDimitry Andric   [[maybe_unused]] bool HandledUncountableEarlyExit = false;
489700637cbSDimitry Andric   // Disconnect all early exits from the loop leaving it with a single exit from
490700637cbSDimitry Andric   // the latch. Early exits that are countable are left for a scalar epilog. The
491700637cbSDimitry Andric   // condition of uncountable early exits (currently at most one is supported)
492700637cbSDimitry Andric   // is fused into the latch exit, and used to branch from middle block to the
493700637cbSDimitry Andric   // early exit destination.
494700637cbSDimitry Andric   for (VPIRBasicBlock *EB : Plan.getExitBlocks()) {
495700637cbSDimitry Andric     for (VPBlockBase *Pred : to_vector(EB->getPredecessors())) {
496700637cbSDimitry Andric       if (Pred == MiddleVPBB)
497700637cbSDimitry Andric         continue;
498700637cbSDimitry Andric       if (HasUncountableEarlyExit) {
499700637cbSDimitry Andric         assert(!HandledUncountableEarlyExit &&
500700637cbSDimitry Andric                "can handle exactly one uncountable early exit");
501700637cbSDimitry Andric         handleUncountableEarlyExit(cast<VPBasicBlock>(Pred), EB, Plan,
502700637cbSDimitry Andric                                    cast<VPBasicBlock>(HeaderVPB),
503700637cbSDimitry Andric                                    cast<VPBasicBlock>(LatchVPB), Range);
504700637cbSDimitry Andric         HandledUncountableEarlyExit = true;
505700637cbSDimitry Andric       } else {
506700637cbSDimitry Andric         for (VPRecipeBase &R : EB->phis())
507700637cbSDimitry Andric           cast<VPIRPhi>(&R)->removeIncomingValueFor(Pred);
508700637cbSDimitry Andric       }
509700637cbSDimitry Andric       cast<VPBasicBlock>(Pred)->getTerminator()->eraseFromParent();
510700637cbSDimitry Andric       VPBlockUtils::disconnectBlocks(Pred, EB);
511700637cbSDimitry Andric     }
512700637cbSDimitry Andric   }
513700637cbSDimitry Andric 
514700637cbSDimitry Andric   assert((!HasUncountableEarlyExit || HandledUncountableEarlyExit) &&
515700637cbSDimitry Andric          "missed an uncountable exit that must be handled");
516700637cbSDimitry Andric 
517700637cbSDimitry Andric   // Create SCEV and VPValue for the trip count.
518700637cbSDimitry Andric   // We use the symbolic max backedge-taken-count, which works also when
519700637cbSDimitry Andric   // vectorizing loops with uncountable early exits.
520700637cbSDimitry Andric   const SCEV *BackedgeTakenCountSCEV = PSE.getSymbolicMaxBackedgeTakenCount();
521700637cbSDimitry Andric   assert(!isa<SCEVCouldNotCompute>(BackedgeTakenCountSCEV) &&
522700637cbSDimitry Andric          "Invalid loop count");
523700637cbSDimitry Andric   ScalarEvolution &SE = *PSE.getSE();
524700637cbSDimitry Andric   const SCEV *TripCount = SE.getTripCountFromExitCount(BackedgeTakenCountSCEV,
525700637cbSDimitry Andric                                                        InductionTy, TheLoop);
526700637cbSDimitry Andric   Plan.setTripCount(
527700637cbSDimitry Andric       vputils::getOrCreateVPValueForSCEVExpr(Plan, TripCount, SE));
528700637cbSDimitry Andric 
529700637cbSDimitry Andric   VPBasicBlock *ScalarPH = Plan.createVPBasicBlock("scalar.ph");
530700637cbSDimitry Andric   VPBlockUtils::connectBlocks(ScalarPH, Plan.getScalarHeader());
531700637cbSDimitry Andric 
532700637cbSDimitry Andric   // The connection order corresponds to the operands of the conditional branch,
533700637cbSDimitry Andric   // with the middle block already connected to the exit block.
534700637cbSDimitry Andric   VPBlockUtils::connectBlocks(MiddleVPBB, ScalarPH);
535700637cbSDimitry Andric   // Also connect the entry block to the scalar preheader.
536700637cbSDimitry Andric   // TODO: Also introduce a branch recipe together with the minimum trip count
537700637cbSDimitry Andric   // check.
538700637cbSDimitry Andric   VPBlockUtils::connectBlocks(Plan.getEntry(), ScalarPH);
539700637cbSDimitry Andric   Plan.getEntry()->swapSuccessors();
540700637cbSDimitry Andric 
541700637cbSDimitry Andric   // If MiddleVPBB has a single successor then the original loop does not exit
542700637cbSDimitry Andric   // via the latch and the single successor must be the scalar preheader.
543700637cbSDimitry Andric   // There's no need to add a runtime check to MiddleVPBB.
544700637cbSDimitry Andric   if (MiddleVPBB->getNumSuccessors() == 1) {
545700637cbSDimitry Andric     assert(MiddleVPBB->getSingleSuccessor() == ScalarPH &&
546700637cbSDimitry Andric            "must have ScalarPH as single successor");
547700637cbSDimitry Andric     return;
548700637cbSDimitry Andric   }
549700637cbSDimitry Andric 
550700637cbSDimitry Andric   assert(MiddleVPBB->getNumSuccessors() == 2 && "must have 2 successors");
551700637cbSDimitry Andric 
552700637cbSDimitry Andric   // Add a check in the middle block to see if we have completed all of the
553700637cbSDimitry Andric   // iterations in the first vector loop.
554700637cbSDimitry Andric   //
555700637cbSDimitry Andric   // Three cases:
556700637cbSDimitry Andric   // 1) If we require a scalar epilogue, the scalar ph must execute. Set the
557700637cbSDimitry Andric   //    condition to false.
558700637cbSDimitry Andric   // 2) If (N - N%VF) == N, then we *don't* need to run the
559700637cbSDimitry Andric   //    remainder. Thus if tail is to be folded, we know we don't need to run
560700637cbSDimitry Andric   //    the remainder and we can set the condition to true.
561700637cbSDimitry Andric   // 3) Otherwise, construct a runtime check.
562700637cbSDimitry Andric 
563700637cbSDimitry Andric   // We use the same DebugLoc as the scalar loop latch terminator instead of
564700637cbSDimitry Andric   // the corresponding compare because they may have ended up with different
565700637cbSDimitry Andric   // line numbers and we want to avoid awkward line stepping while debugging.
566700637cbSDimitry Andric   // E.g., if the compare has got a line number inside the loop.
567700637cbSDimitry Andric   DebugLoc LatchDL = TheLoop->getLoopLatch()->getTerminator()->getDebugLoc();
568700637cbSDimitry Andric   VPBuilder Builder(MiddleVPBB);
569700637cbSDimitry Andric   VPValue *Cmp;
570700637cbSDimitry Andric   if (!RequiresScalarEpilogueCheck)
571700637cbSDimitry Andric     Cmp = Plan.getOrAddLiveIn(ConstantInt::getFalse(
572700637cbSDimitry Andric         IntegerType::getInt1Ty(TripCount->getType()->getContext())));
573700637cbSDimitry Andric   else if (TailFolded)
574700637cbSDimitry Andric     Cmp = Plan.getOrAddLiveIn(ConstantInt::getTrue(
575700637cbSDimitry Andric         IntegerType::getInt1Ty(TripCount->getType()->getContext())));
576700637cbSDimitry Andric   else
577700637cbSDimitry Andric     Cmp = Builder.createICmp(CmpInst::ICMP_EQ, Plan.getTripCount(),
578700637cbSDimitry Andric                              &Plan.getVectorTripCount(), LatchDL, "cmp.n");
579700637cbSDimitry Andric   Builder.createNaryOp(VPInstruction::BranchOnCond, {Cmp}, LatchDL);
580700637cbSDimitry Andric }
581700637cbSDimitry Andric 
createLoopRegions(VPlan & Plan)582700637cbSDimitry Andric void VPlanTransforms::createLoopRegions(VPlan &Plan) {
583700637cbSDimitry Andric   VPDominatorTree VPDT;
584700637cbSDimitry Andric   VPDT.recalculate(Plan);
585700637cbSDimitry Andric   for (VPBlockBase *HeaderVPB : vp_post_order_shallow(Plan.getEntry()))
586700637cbSDimitry Andric     if (canonicalHeaderAndLatch(HeaderVPB, VPDT))
587700637cbSDimitry Andric       createLoopRegion(Plan, HeaderVPB);
588700637cbSDimitry Andric 
589700637cbSDimitry Andric   VPRegionBlock *TopRegion = Plan.getVectorLoopRegion();
590700637cbSDimitry Andric   TopRegion->setName("vector loop");
591700637cbSDimitry Andric   TopRegion->getEntryBasicBlock()->setName("vector.body");
592700637cbSDimitry Andric }
593700637cbSDimitry Andric 
594700637cbSDimitry Andric // Likelyhood of bypassing the vectorized loop due to a runtime check block,
595700637cbSDimitry Andric // including memory overlap checks block and wrapping/unit-stride checks block.
596700637cbSDimitry Andric static constexpr uint32_t CheckBypassWeights[] = {1, 127};
597700637cbSDimitry Andric 
attachCheckBlock(VPlan & Plan,Value * Cond,BasicBlock * CheckBlock,bool AddBranchWeights)598700637cbSDimitry Andric void VPlanTransforms::attachCheckBlock(VPlan &Plan, Value *Cond,
599700637cbSDimitry Andric                                        BasicBlock *CheckBlock,
600700637cbSDimitry Andric                                        bool AddBranchWeights) {
601700637cbSDimitry Andric   VPValue *CondVPV = Plan.getOrAddLiveIn(Cond);
602700637cbSDimitry Andric   VPBasicBlock *CheckBlockVPBB = Plan.createVPIRBasicBlock(CheckBlock);
603700637cbSDimitry Andric   VPBlockBase *VectorPH = Plan.getVectorPreheader();
604700637cbSDimitry Andric   VPBlockBase *ScalarPH = Plan.getScalarPreheader();
605700637cbSDimitry Andric   VPBlockBase *PreVectorPH = VectorPH->getSinglePredecessor();
606700637cbSDimitry Andric   VPBlockUtils::insertOnEdge(PreVectorPH, VectorPH, CheckBlockVPBB);
607700637cbSDimitry Andric   VPBlockUtils::connectBlocks(CheckBlockVPBB, ScalarPH);
608700637cbSDimitry Andric   CheckBlockVPBB->swapSuccessors();
609700637cbSDimitry Andric 
610700637cbSDimitry Andric   // We just connected a new block to the scalar preheader. Update all
611700637cbSDimitry Andric   // VPPhis by adding an incoming value for it, replicating the last value.
612700637cbSDimitry Andric   unsigned NumPredecessors = ScalarPH->getNumPredecessors();
613700637cbSDimitry Andric   for (VPRecipeBase &R : cast<VPBasicBlock>(ScalarPH)->phis()) {
614700637cbSDimitry Andric     assert(isa<VPPhi>(&R) && "Phi expected to be VPPhi");
615700637cbSDimitry Andric     assert(cast<VPPhi>(&R)->getNumIncoming() == NumPredecessors - 1 &&
616700637cbSDimitry Andric            "must have incoming values for all operands");
617700637cbSDimitry Andric     R.addOperand(R.getOperand(NumPredecessors - 2));
618700637cbSDimitry Andric   }
619700637cbSDimitry Andric 
620700637cbSDimitry Andric   VPIRMetadata VPBranchWeights;
621700637cbSDimitry Andric   auto *Term = VPBuilder(CheckBlockVPBB)
622700637cbSDimitry Andric                    .createNaryOp(VPInstruction::BranchOnCond, {CondVPV},
623700637cbSDimitry Andric                                  Plan.getCanonicalIV()->getDebugLoc());
624700637cbSDimitry Andric   if (AddBranchWeights) {
625700637cbSDimitry Andric     MDBuilder MDB(Plan.getScalarHeader()->getIRBasicBlock()->getContext());
626700637cbSDimitry Andric     MDNode *BranchWeights =
627700637cbSDimitry Andric         MDB.createBranchWeights(CheckBypassWeights, /*IsExpected=*/false);
628700637cbSDimitry Andric     Term->addMetadata(LLVMContext::MD_prof, BranchWeights);
629700637cbSDimitry Andric   }
630700637cbSDimitry Andric }
631*e64bea71SDimitry Andric 
handleMaxMinNumReductions(VPlan & Plan)632*e64bea71SDimitry Andric bool VPlanTransforms::handleMaxMinNumReductions(VPlan &Plan) {
633*e64bea71SDimitry Andric   auto GetMinMaxCompareValue = [](VPReductionPHIRecipe *RedPhiR) -> VPValue * {
634*e64bea71SDimitry Andric     auto *MinMaxR = dyn_cast<VPRecipeWithIRFlags>(
635*e64bea71SDimitry Andric         RedPhiR->getBackedgeValue()->getDefiningRecipe());
636*e64bea71SDimitry Andric     if (!MinMaxR)
637*e64bea71SDimitry Andric       return nullptr;
638*e64bea71SDimitry Andric 
639*e64bea71SDimitry Andric     auto *RepR = dyn_cast<VPReplicateRecipe>(MinMaxR);
640*e64bea71SDimitry Andric     if (!isa<VPWidenIntrinsicRecipe>(MinMaxR) &&
641*e64bea71SDimitry Andric         !(RepR && isa<IntrinsicInst>(RepR->getUnderlyingInstr())))
642*e64bea71SDimitry Andric       return nullptr;
643*e64bea71SDimitry Andric 
644*e64bea71SDimitry Andric #ifndef NDEBUG
645*e64bea71SDimitry Andric     Intrinsic::ID RdxIntrinsicId =
646*e64bea71SDimitry Andric         RedPhiR->getRecurrenceKind() == RecurKind::FMaxNum ? Intrinsic::maxnum
647*e64bea71SDimitry Andric                                                            : Intrinsic::minnum;
648*e64bea71SDimitry Andric     assert((isa<VPWidenIntrinsicRecipe>(MinMaxR) &&
649*e64bea71SDimitry Andric             cast<VPWidenIntrinsicRecipe>(MinMaxR)->getVectorIntrinsicID() ==
650*e64bea71SDimitry Andric                 RdxIntrinsicId) ||
651*e64bea71SDimitry Andric            (RepR &&
652*e64bea71SDimitry Andric             cast<IntrinsicInst>(RepR->getUnderlyingInstr())->getIntrinsicID() ==
653*e64bea71SDimitry Andric                 RdxIntrinsicId) &&
654*e64bea71SDimitry Andric                "Intrinsic did not match recurrence kind");
655*e64bea71SDimitry Andric #endif
656*e64bea71SDimitry Andric 
657*e64bea71SDimitry Andric     if (MinMaxR->getOperand(0) == RedPhiR)
658*e64bea71SDimitry Andric       return MinMaxR->getOperand(1);
659*e64bea71SDimitry Andric 
660*e64bea71SDimitry Andric     assert(MinMaxR->getOperand(1) == RedPhiR &&
661*e64bea71SDimitry Andric            "Reduction phi operand expected");
662*e64bea71SDimitry Andric     return MinMaxR->getOperand(0);
663*e64bea71SDimitry Andric   };
664*e64bea71SDimitry Andric 
665*e64bea71SDimitry Andric   VPRegionBlock *LoopRegion = Plan.getVectorLoopRegion();
666*e64bea71SDimitry Andric   VPReductionPHIRecipe *RedPhiR = nullptr;
667*e64bea71SDimitry Andric   bool HasUnsupportedPhi = false;
668*e64bea71SDimitry Andric   for (auto &R : LoopRegion->getEntryBasicBlock()->phis()) {
669*e64bea71SDimitry Andric     if (isa<VPCanonicalIVPHIRecipe, VPWidenIntOrFpInductionRecipe>(&R))
670*e64bea71SDimitry Andric       continue;
671*e64bea71SDimitry Andric     auto *Cur = dyn_cast<VPReductionPHIRecipe>(&R);
672*e64bea71SDimitry Andric     if (!Cur) {
673*e64bea71SDimitry Andric       // TODO: Also support fixed-order recurrence phis.
674*e64bea71SDimitry Andric       HasUnsupportedPhi = true;
675*e64bea71SDimitry Andric       continue;
676*e64bea71SDimitry Andric     }
677*e64bea71SDimitry Andric     // For now, only a single reduction is supported.
678*e64bea71SDimitry Andric     // TODO: Support multiple MaxNum/MinNum reductions and other reductions.
679*e64bea71SDimitry Andric     if (RedPhiR)
680*e64bea71SDimitry Andric       return false;
681*e64bea71SDimitry Andric     if (Cur->getRecurrenceKind() != RecurKind::FMaxNum &&
682*e64bea71SDimitry Andric         Cur->getRecurrenceKind() != RecurKind::FMinNum) {
683*e64bea71SDimitry Andric       HasUnsupportedPhi = true;
684*e64bea71SDimitry Andric       continue;
685*e64bea71SDimitry Andric     }
686*e64bea71SDimitry Andric     RedPhiR = Cur;
687*e64bea71SDimitry Andric   }
688*e64bea71SDimitry Andric 
689*e64bea71SDimitry Andric   if (!RedPhiR)
690*e64bea71SDimitry Andric     return true;
691*e64bea71SDimitry Andric 
692*e64bea71SDimitry Andric   // We won't be able to resume execution in the scalar tail, if there are
693*e64bea71SDimitry Andric   // unsupported header phis or there is no scalar tail at all, due to
694*e64bea71SDimitry Andric   // tail-folding.
695*e64bea71SDimitry Andric   if (HasUnsupportedPhi || !Plan.hasScalarTail())
696*e64bea71SDimitry Andric     return false;
697*e64bea71SDimitry Andric 
698*e64bea71SDimitry Andric   VPValue *MinMaxOp = GetMinMaxCompareValue(RedPhiR);
699*e64bea71SDimitry Andric   if (!MinMaxOp)
700*e64bea71SDimitry Andric     return false;
701*e64bea71SDimitry Andric 
702*e64bea71SDimitry Andric   RecurKind RedPhiRK = RedPhiR->getRecurrenceKind();
703*e64bea71SDimitry Andric   assert((RedPhiRK == RecurKind::FMaxNum || RedPhiRK == RecurKind::FMinNum) &&
704*e64bea71SDimitry Andric          "unsupported reduction");
705*e64bea71SDimitry Andric 
706*e64bea71SDimitry Andric   /// Check if the vector loop of \p Plan can early exit and restart
707*e64bea71SDimitry Andric   /// execution of last vector iteration in the scalar loop. This requires all
708*e64bea71SDimitry Andric   /// recipes up to early exit point be side-effect free as they are
709*e64bea71SDimitry Andric   /// re-executed. Currently we check that the loop is free of any recipe that
710*e64bea71SDimitry Andric   /// may write to memory. Expected to operate on an early VPlan w/o nested
711*e64bea71SDimitry Andric   /// regions.
712*e64bea71SDimitry Andric   for (VPBlockBase *VPB : vp_depth_first_shallow(
713*e64bea71SDimitry Andric            Plan.getVectorLoopRegion()->getEntryBasicBlock())) {
714*e64bea71SDimitry Andric     auto *VPBB = cast<VPBasicBlock>(VPB);
715*e64bea71SDimitry Andric     for (auto &R : *VPBB) {
716*e64bea71SDimitry Andric       if (R.mayWriteToMemory() &&
717*e64bea71SDimitry Andric           !match(&R, m_BranchOnCount(m_VPValue(), m_VPValue())))
718*e64bea71SDimitry Andric         return false;
719*e64bea71SDimitry Andric     }
720*e64bea71SDimitry Andric   }
721*e64bea71SDimitry Andric 
722*e64bea71SDimitry Andric   VPBasicBlock *LatchVPBB = LoopRegion->getExitingBasicBlock();
723*e64bea71SDimitry Andric   VPBuilder Builder(LatchVPBB->getTerminator());
724*e64bea71SDimitry Andric   auto *LatchExitingBranch = cast<VPInstruction>(LatchVPBB->getTerminator());
725*e64bea71SDimitry Andric   assert(LatchExitingBranch->getOpcode() == VPInstruction::BranchOnCount &&
726*e64bea71SDimitry Andric          "Unexpected terminator");
727*e64bea71SDimitry Andric   auto *IsLatchExitTaken =
728*e64bea71SDimitry Andric       Builder.createICmp(CmpInst::ICMP_EQ, LatchExitingBranch->getOperand(0),
729*e64bea71SDimitry Andric                          LatchExitingBranch->getOperand(1));
730*e64bea71SDimitry Andric 
731*e64bea71SDimitry Andric   VPValue *IsNaN = Builder.createFCmp(CmpInst::FCMP_UNO, MinMaxOp, MinMaxOp);
732*e64bea71SDimitry Andric   VPValue *AnyNaN = Builder.createNaryOp(VPInstruction::AnyOf, {IsNaN});
733*e64bea71SDimitry Andric   auto *AnyExitTaken =
734*e64bea71SDimitry Andric       Builder.createNaryOp(Instruction::Or, {AnyNaN, IsLatchExitTaken});
735*e64bea71SDimitry Andric   Builder.createNaryOp(VPInstruction::BranchOnCond, AnyExitTaken);
736*e64bea71SDimitry Andric   LatchExitingBranch->eraseFromParent();
737*e64bea71SDimitry Andric 
738*e64bea71SDimitry Andric   // If we exit early due to NaNs, compute the final reduction result based on
739*e64bea71SDimitry Andric   // the reduction phi at the beginning of the last vector iteration.
740*e64bea71SDimitry Andric   auto *RdxResult = find_singleton<VPSingleDefRecipe>(
741*e64bea71SDimitry Andric       RedPhiR->users(), [](VPUser *U, bool) -> VPSingleDefRecipe * {
742*e64bea71SDimitry Andric         auto *VPI = dyn_cast<VPInstruction>(U);
743*e64bea71SDimitry Andric         if (VPI && VPI->getOpcode() == VPInstruction::ComputeReductionResult)
744*e64bea71SDimitry Andric           return VPI;
745*e64bea71SDimitry Andric         return nullptr;
746*e64bea71SDimitry Andric       });
747*e64bea71SDimitry Andric 
748*e64bea71SDimitry Andric   auto *MiddleVPBB = Plan.getMiddleBlock();
749*e64bea71SDimitry Andric   Builder.setInsertPoint(MiddleVPBB, MiddleVPBB->begin());
750*e64bea71SDimitry Andric   auto *NewSel =
751*e64bea71SDimitry Andric       Builder.createSelect(AnyNaN, RedPhiR, RdxResult->getOperand(1));
752*e64bea71SDimitry Andric   RdxResult->setOperand(1, NewSel);
753*e64bea71SDimitry Andric 
754*e64bea71SDimitry Andric   auto *ScalarPH = Plan.getScalarPreheader();
755*e64bea71SDimitry Andric   // Update resume phis for inductions in the scalar preheader. If AnyNaN is
756*e64bea71SDimitry Andric   // true, the resume from the start of the last vector iteration via the
757*e64bea71SDimitry Andric   // canonical IV, otherwise from the original value.
758*e64bea71SDimitry Andric   for (auto &R : ScalarPH->phis()) {
759*e64bea71SDimitry Andric     auto *ResumeR = cast<VPPhi>(&R);
760*e64bea71SDimitry Andric     VPValue *VecV = ResumeR->getOperand(0);
761*e64bea71SDimitry Andric     if (VecV == RdxResult)
762*e64bea71SDimitry Andric       continue;
763*e64bea71SDimitry Andric     if (auto *DerivedIV = dyn_cast<VPDerivedIVRecipe>(VecV)) {
764*e64bea71SDimitry Andric       if (DerivedIV->getNumUsers() == 1 &&
765*e64bea71SDimitry Andric           DerivedIV->getOperand(1) == &Plan.getVectorTripCount()) {
766*e64bea71SDimitry Andric         auto *NewSel = Builder.createSelect(AnyNaN, Plan.getCanonicalIV(),
767*e64bea71SDimitry Andric                                             &Plan.getVectorTripCount());
768*e64bea71SDimitry Andric         DerivedIV->moveAfter(&*Builder.getInsertPoint());
769*e64bea71SDimitry Andric         DerivedIV->setOperand(1, NewSel);
770*e64bea71SDimitry Andric         continue;
771*e64bea71SDimitry Andric       }
772*e64bea71SDimitry Andric     }
773*e64bea71SDimitry Andric     // Bail out and abandon the current, partially modified, VPlan if we
774*e64bea71SDimitry Andric     // encounter resume phi that cannot be updated yet.
775*e64bea71SDimitry Andric     if (VecV != &Plan.getVectorTripCount()) {
776*e64bea71SDimitry Andric       LLVM_DEBUG(dbgs() << "Found resume phi we cannot update for VPlan with "
777*e64bea71SDimitry Andric                            "FMaxNum/FMinNum reduction.\n");
778*e64bea71SDimitry Andric       return false;
779*e64bea71SDimitry Andric     }
780*e64bea71SDimitry Andric     auto *NewSel = Builder.createSelect(AnyNaN, Plan.getCanonicalIV(), VecV);
781*e64bea71SDimitry Andric     ResumeR->setOperand(0, NewSel);
782*e64bea71SDimitry Andric   }
783*e64bea71SDimitry Andric 
784*e64bea71SDimitry Andric   auto *MiddleTerm = MiddleVPBB->getTerminator();
785*e64bea71SDimitry Andric   Builder.setInsertPoint(MiddleTerm);
786*e64bea71SDimitry Andric   VPValue *MiddleCond = MiddleTerm->getOperand(0);
787*e64bea71SDimitry Andric   VPValue *NewCond = Builder.createAnd(MiddleCond, Builder.createNot(AnyNaN));
788*e64bea71SDimitry Andric   MiddleTerm->setOperand(0, NewCond);
789*e64bea71SDimitry Andric   return true;
790*e64bea71SDimitry Andric }
791