10b57cec5SDimitry Andric //===- AMDGPUUnifyDivergentExitNodes.cpp ----------------------------------===// 20b57cec5SDimitry Andric // 30b57cec5SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 40b57cec5SDimitry Andric // See https://llvm.org/LICENSE.txt for license information. 50b57cec5SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 60b57cec5SDimitry Andric // 70b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 80b57cec5SDimitry Andric // 90b57cec5SDimitry Andric // This is a variant of the UnifyDivergentExitNodes pass. Rather than ensuring 100b57cec5SDimitry Andric // there is at most one ret and one unreachable instruction, it ensures there is 110b57cec5SDimitry Andric // at most one divergent exiting block. 120b57cec5SDimitry Andric // 130b57cec5SDimitry Andric // StructurizeCFG can't deal with multi-exit regions formed by branches to 140b57cec5SDimitry Andric // multiple return nodes. It is not desirable to structurize regions with 150b57cec5SDimitry Andric // uniform branches, so unifying those to the same return block as divergent 160b57cec5SDimitry Andric // branches inhibits use of scalar branching. It still can't deal with the case 170b57cec5SDimitry Andric // where one branch goes to return, and one unreachable. Replace unreachable in 180b57cec5SDimitry Andric // this case with a return. 190b57cec5SDimitry Andric // 200b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 210b57cec5SDimitry Andric 220b57cec5SDimitry Andric #include "AMDGPU.h" 230b57cec5SDimitry Andric #include "llvm/ADT/ArrayRef.h" 240b57cec5SDimitry Andric #include "llvm/ADT/SmallPtrSet.h" 250b57cec5SDimitry Andric #include "llvm/ADT/SmallVector.h" 260b57cec5SDimitry Andric #include "llvm/ADT/StringRef.h" 270b57cec5SDimitry Andric #include "llvm/Analysis/LegacyDivergenceAnalysis.h" 280b57cec5SDimitry Andric #include "llvm/Analysis/PostDominators.h" 290b57cec5SDimitry Andric #include "llvm/Analysis/TargetTransformInfo.h" 300b57cec5SDimitry Andric #include "llvm/IR/BasicBlock.h" 310b57cec5SDimitry Andric #include "llvm/IR/CFG.h" 320b57cec5SDimitry Andric #include "llvm/IR/Constants.h" 330b57cec5SDimitry Andric #include "llvm/IR/Function.h" 340b57cec5SDimitry Andric #include "llvm/IR/InstrTypes.h" 350b57cec5SDimitry Andric #include "llvm/IR/Instructions.h" 360b57cec5SDimitry Andric #include "llvm/IR/Intrinsics.h" 370b57cec5SDimitry Andric #include "llvm/IR/Type.h" 38*480093f4SDimitry Andric #include "llvm/InitializePasses.h" 390b57cec5SDimitry Andric #include "llvm/Pass.h" 400b57cec5SDimitry Andric #include "llvm/Support/Casting.h" 410b57cec5SDimitry Andric #include "llvm/Transforms/Scalar.h" 420b57cec5SDimitry Andric #include "llvm/Transforms/Utils.h" 43*480093f4SDimitry Andric #include "llvm/Transforms/Utils/Local.h" 440b57cec5SDimitry Andric 450b57cec5SDimitry Andric using namespace llvm; 460b57cec5SDimitry Andric 470b57cec5SDimitry Andric #define DEBUG_TYPE "amdgpu-unify-divergent-exit-nodes" 480b57cec5SDimitry Andric 490b57cec5SDimitry Andric namespace { 500b57cec5SDimitry Andric 510b57cec5SDimitry Andric class AMDGPUUnifyDivergentExitNodes : public FunctionPass { 520b57cec5SDimitry Andric public: 530b57cec5SDimitry Andric static char ID; // Pass identification, replacement for typeid 540b57cec5SDimitry Andric 550b57cec5SDimitry Andric AMDGPUUnifyDivergentExitNodes() : FunctionPass(ID) { 560b57cec5SDimitry Andric initializeAMDGPUUnifyDivergentExitNodesPass(*PassRegistry::getPassRegistry()); 570b57cec5SDimitry Andric } 580b57cec5SDimitry Andric 590b57cec5SDimitry Andric // We can preserve non-critical-edgeness when we unify function exit nodes 600b57cec5SDimitry Andric void getAnalysisUsage(AnalysisUsage &AU) const override; 610b57cec5SDimitry Andric bool runOnFunction(Function &F) override; 620b57cec5SDimitry Andric }; 630b57cec5SDimitry Andric 640b57cec5SDimitry Andric } // end anonymous namespace 650b57cec5SDimitry Andric 660b57cec5SDimitry Andric char AMDGPUUnifyDivergentExitNodes::ID = 0; 670b57cec5SDimitry Andric 680b57cec5SDimitry Andric char &llvm::AMDGPUUnifyDivergentExitNodesID = AMDGPUUnifyDivergentExitNodes::ID; 690b57cec5SDimitry Andric 700b57cec5SDimitry Andric INITIALIZE_PASS_BEGIN(AMDGPUUnifyDivergentExitNodes, DEBUG_TYPE, 710b57cec5SDimitry Andric "Unify divergent function exit nodes", false, false) 720b57cec5SDimitry Andric INITIALIZE_PASS_DEPENDENCY(PostDominatorTreeWrapperPass) 730b57cec5SDimitry Andric INITIALIZE_PASS_DEPENDENCY(LegacyDivergenceAnalysis) 740b57cec5SDimitry Andric INITIALIZE_PASS_END(AMDGPUUnifyDivergentExitNodes, DEBUG_TYPE, 750b57cec5SDimitry Andric "Unify divergent function exit nodes", false, false) 760b57cec5SDimitry Andric 770b57cec5SDimitry Andric void AMDGPUUnifyDivergentExitNodes::getAnalysisUsage(AnalysisUsage &AU) const{ 780b57cec5SDimitry Andric // TODO: Preserve dominator tree. 790b57cec5SDimitry Andric AU.addRequired<PostDominatorTreeWrapperPass>(); 800b57cec5SDimitry Andric 810b57cec5SDimitry Andric AU.addRequired<LegacyDivergenceAnalysis>(); 820b57cec5SDimitry Andric 830b57cec5SDimitry Andric // No divergent values are changed, only blocks and branch edges. 840b57cec5SDimitry Andric AU.addPreserved<LegacyDivergenceAnalysis>(); 850b57cec5SDimitry Andric 860b57cec5SDimitry Andric // We preserve the non-critical-edgeness property 870b57cec5SDimitry Andric AU.addPreservedID(BreakCriticalEdgesID); 880b57cec5SDimitry Andric 890b57cec5SDimitry Andric // This is a cluster of orthogonal Transforms 900b57cec5SDimitry Andric AU.addPreservedID(LowerSwitchID); 910b57cec5SDimitry Andric FunctionPass::getAnalysisUsage(AU); 920b57cec5SDimitry Andric 930b57cec5SDimitry Andric AU.addRequired<TargetTransformInfoWrapperPass>(); 940b57cec5SDimitry Andric } 950b57cec5SDimitry Andric 960b57cec5SDimitry Andric /// \returns true if \p BB is reachable through only uniform branches. 970b57cec5SDimitry Andric /// XXX - Is there a more efficient way to find this? 980b57cec5SDimitry Andric static bool isUniformlyReached(const LegacyDivergenceAnalysis &DA, 990b57cec5SDimitry Andric BasicBlock &BB) { 1000b57cec5SDimitry Andric SmallVector<BasicBlock *, 8> Stack; 1010b57cec5SDimitry Andric SmallPtrSet<BasicBlock *, 8> Visited; 1020b57cec5SDimitry Andric 1030b57cec5SDimitry Andric for (BasicBlock *Pred : predecessors(&BB)) 1040b57cec5SDimitry Andric Stack.push_back(Pred); 1050b57cec5SDimitry Andric 1060b57cec5SDimitry Andric while (!Stack.empty()) { 1070b57cec5SDimitry Andric BasicBlock *Top = Stack.pop_back_val(); 1080b57cec5SDimitry Andric if (!DA.isUniform(Top->getTerminator())) 1090b57cec5SDimitry Andric return false; 1100b57cec5SDimitry Andric 1110b57cec5SDimitry Andric for (BasicBlock *Pred : predecessors(Top)) { 1120b57cec5SDimitry Andric if (Visited.insert(Pred).second) 1130b57cec5SDimitry Andric Stack.push_back(Pred); 1140b57cec5SDimitry Andric } 1150b57cec5SDimitry Andric } 1160b57cec5SDimitry Andric 1170b57cec5SDimitry Andric return true; 1180b57cec5SDimitry Andric } 1190b57cec5SDimitry Andric 1200b57cec5SDimitry Andric static BasicBlock *unifyReturnBlockSet(Function &F, 1210b57cec5SDimitry Andric ArrayRef<BasicBlock *> ReturningBlocks, 1220b57cec5SDimitry Andric const TargetTransformInfo &TTI, 1230b57cec5SDimitry Andric StringRef Name) { 1240b57cec5SDimitry Andric // Otherwise, we need to insert a new basic block into the function, add a PHI 1250b57cec5SDimitry Andric // nodes (if the function returns values), and convert all of the return 1260b57cec5SDimitry Andric // instructions into unconditional branches. 1270b57cec5SDimitry Andric BasicBlock *NewRetBlock = BasicBlock::Create(F.getContext(), Name, &F); 1280b57cec5SDimitry Andric 1290b57cec5SDimitry Andric PHINode *PN = nullptr; 1300b57cec5SDimitry Andric if (F.getReturnType()->isVoidTy()) { 1310b57cec5SDimitry Andric ReturnInst::Create(F.getContext(), nullptr, NewRetBlock); 1320b57cec5SDimitry Andric } else { 1330b57cec5SDimitry Andric // If the function doesn't return void... add a PHI node to the block... 1340b57cec5SDimitry Andric PN = PHINode::Create(F.getReturnType(), ReturningBlocks.size(), 1350b57cec5SDimitry Andric "UnifiedRetVal"); 1360b57cec5SDimitry Andric NewRetBlock->getInstList().push_back(PN); 1370b57cec5SDimitry Andric ReturnInst::Create(F.getContext(), PN, NewRetBlock); 1380b57cec5SDimitry Andric } 1390b57cec5SDimitry Andric 1400b57cec5SDimitry Andric // Loop over all of the blocks, replacing the return instruction with an 1410b57cec5SDimitry Andric // unconditional branch. 1420b57cec5SDimitry Andric for (BasicBlock *BB : ReturningBlocks) { 1430b57cec5SDimitry Andric // Add an incoming element to the PHI node for every return instruction that 1440b57cec5SDimitry Andric // is merging into this new block... 1450b57cec5SDimitry Andric if (PN) 1460b57cec5SDimitry Andric PN->addIncoming(BB->getTerminator()->getOperand(0), BB); 1470b57cec5SDimitry Andric 1480b57cec5SDimitry Andric // Remove and delete the return inst. 1490b57cec5SDimitry Andric BB->getTerminator()->eraseFromParent(); 1500b57cec5SDimitry Andric BranchInst::Create(NewRetBlock, BB); 1510b57cec5SDimitry Andric } 1520b57cec5SDimitry Andric 1530b57cec5SDimitry Andric for (BasicBlock *BB : ReturningBlocks) { 1540b57cec5SDimitry Andric // Cleanup possible branch to unconditional branch to the return. 1550b57cec5SDimitry Andric simplifyCFG(BB, TTI, {2}); 1560b57cec5SDimitry Andric } 1570b57cec5SDimitry Andric 1580b57cec5SDimitry Andric return NewRetBlock; 1590b57cec5SDimitry Andric } 1600b57cec5SDimitry Andric 1610b57cec5SDimitry Andric bool AMDGPUUnifyDivergentExitNodes::runOnFunction(Function &F) { 1620b57cec5SDimitry Andric auto &PDT = getAnalysis<PostDominatorTreeWrapperPass>().getPostDomTree(); 1630b57cec5SDimitry Andric if (PDT.getRoots().size() <= 1) 1640b57cec5SDimitry Andric return false; 1650b57cec5SDimitry Andric 1660b57cec5SDimitry Andric LegacyDivergenceAnalysis &DA = getAnalysis<LegacyDivergenceAnalysis>(); 1670b57cec5SDimitry Andric 1680b57cec5SDimitry Andric // Loop over all of the blocks in a function, tracking all of the blocks that 1690b57cec5SDimitry Andric // return. 1700b57cec5SDimitry Andric SmallVector<BasicBlock *, 4> ReturningBlocks; 1710b57cec5SDimitry Andric SmallVector<BasicBlock *, 4> UnreachableBlocks; 1720b57cec5SDimitry Andric 1730b57cec5SDimitry Andric // Dummy return block for infinite loop. 1740b57cec5SDimitry Andric BasicBlock *DummyReturnBB = nullptr; 1750b57cec5SDimitry Andric 1760b57cec5SDimitry Andric for (BasicBlock *BB : PDT.getRoots()) { 1770b57cec5SDimitry Andric if (isa<ReturnInst>(BB->getTerminator())) { 1780b57cec5SDimitry Andric if (!isUniformlyReached(DA, *BB)) 1790b57cec5SDimitry Andric ReturningBlocks.push_back(BB); 1800b57cec5SDimitry Andric } else if (isa<UnreachableInst>(BB->getTerminator())) { 1810b57cec5SDimitry Andric if (!isUniformlyReached(DA, *BB)) 1820b57cec5SDimitry Andric UnreachableBlocks.push_back(BB); 1830b57cec5SDimitry Andric } else if (BranchInst *BI = dyn_cast<BranchInst>(BB->getTerminator())) { 1840b57cec5SDimitry Andric 1850b57cec5SDimitry Andric ConstantInt *BoolTrue = ConstantInt::getTrue(F.getContext()); 1860b57cec5SDimitry Andric if (DummyReturnBB == nullptr) { 1870b57cec5SDimitry Andric DummyReturnBB = BasicBlock::Create(F.getContext(), 1880b57cec5SDimitry Andric "DummyReturnBlock", &F); 1890b57cec5SDimitry Andric Type *RetTy = F.getReturnType(); 1900b57cec5SDimitry Andric Value *RetVal = RetTy->isVoidTy() ? nullptr : UndefValue::get(RetTy); 1910b57cec5SDimitry Andric ReturnInst::Create(F.getContext(), RetVal, DummyReturnBB); 1920b57cec5SDimitry Andric ReturningBlocks.push_back(DummyReturnBB); 1930b57cec5SDimitry Andric } 1940b57cec5SDimitry Andric 1950b57cec5SDimitry Andric if (BI->isUnconditional()) { 1960b57cec5SDimitry Andric BasicBlock *LoopHeaderBB = BI->getSuccessor(0); 1970b57cec5SDimitry Andric BI->eraseFromParent(); // Delete the unconditional branch. 1980b57cec5SDimitry Andric // Add a new conditional branch with a dummy edge to the return block. 1990b57cec5SDimitry Andric BranchInst::Create(LoopHeaderBB, DummyReturnBB, BoolTrue, BB); 2000b57cec5SDimitry Andric } else { // Conditional branch. 2010b57cec5SDimitry Andric // Create a new transition block to hold the conditional branch. 2020b57cec5SDimitry Andric BasicBlock *TransitionBB = BB->splitBasicBlock(BI, "TransitionBlock"); 2030b57cec5SDimitry Andric 2040b57cec5SDimitry Andric // Create a branch that will always branch to the transition block and 2050b57cec5SDimitry Andric // references DummyReturnBB. 2060b57cec5SDimitry Andric BB->getTerminator()->eraseFromParent(); 2070b57cec5SDimitry Andric BranchInst::Create(TransitionBB, DummyReturnBB, BoolTrue, BB); 2080b57cec5SDimitry Andric } 2090b57cec5SDimitry Andric } 2100b57cec5SDimitry Andric } 2110b57cec5SDimitry Andric 2120b57cec5SDimitry Andric if (!UnreachableBlocks.empty()) { 2130b57cec5SDimitry Andric BasicBlock *UnreachableBlock = nullptr; 2140b57cec5SDimitry Andric 2150b57cec5SDimitry Andric if (UnreachableBlocks.size() == 1) { 2160b57cec5SDimitry Andric UnreachableBlock = UnreachableBlocks.front(); 2170b57cec5SDimitry Andric } else { 2180b57cec5SDimitry Andric UnreachableBlock = BasicBlock::Create(F.getContext(), 2190b57cec5SDimitry Andric "UnifiedUnreachableBlock", &F); 2200b57cec5SDimitry Andric new UnreachableInst(F.getContext(), UnreachableBlock); 2210b57cec5SDimitry Andric 2220b57cec5SDimitry Andric for (BasicBlock *BB : UnreachableBlocks) { 2230b57cec5SDimitry Andric // Remove and delete the unreachable inst. 2240b57cec5SDimitry Andric BB->getTerminator()->eraseFromParent(); 2250b57cec5SDimitry Andric BranchInst::Create(UnreachableBlock, BB); 2260b57cec5SDimitry Andric } 2270b57cec5SDimitry Andric } 2280b57cec5SDimitry Andric 2290b57cec5SDimitry Andric if (!ReturningBlocks.empty()) { 2300b57cec5SDimitry Andric // Don't create a new unreachable inst if we have a return. The 2310b57cec5SDimitry Andric // structurizer/annotator can't handle the multiple exits 2320b57cec5SDimitry Andric 2330b57cec5SDimitry Andric Type *RetTy = F.getReturnType(); 2340b57cec5SDimitry Andric Value *RetVal = RetTy->isVoidTy() ? nullptr : UndefValue::get(RetTy); 2350b57cec5SDimitry Andric // Remove and delete the unreachable inst. 2360b57cec5SDimitry Andric UnreachableBlock->getTerminator()->eraseFromParent(); 2370b57cec5SDimitry Andric 2380b57cec5SDimitry Andric Function *UnreachableIntrin = 2390b57cec5SDimitry Andric Intrinsic::getDeclaration(F.getParent(), Intrinsic::amdgcn_unreachable); 2400b57cec5SDimitry Andric 2410b57cec5SDimitry Andric // Insert a call to an intrinsic tracking that this is an unreachable 2420b57cec5SDimitry Andric // point, in case we want to kill the active lanes or something later. 2430b57cec5SDimitry Andric CallInst::Create(UnreachableIntrin, {}, "", UnreachableBlock); 2440b57cec5SDimitry Andric 2450b57cec5SDimitry Andric // Don't create a scalar trap. We would only want to trap if this code was 2460b57cec5SDimitry Andric // really reached, but a scalar trap would happen even if no lanes 2470b57cec5SDimitry Andric // actually reached here. 2480b57cec5SDimitry Andric ReturnInst::Create(F.getContext(), RetVal, UnreachableBlock); 2490b57cec5SDimitry Andric ReturningBlocks.push_back(UnreachableBlock); 2500b57cec5SDimitry Andric } 2510b57cec5SDimitry Andric } 2520b57cec5SDimitry Andric 2530b57cec5SDimitry Andric // Now handle return blocks. 2540b57cec5SDimitry Andric if (ReturningBlocks.empty()) 2550b57cec5SDimitry Andric return false; // No blocks return 2560b57cec5SDimitry Andric 2570b57cec5SDimitry Andric if (ReturningBlocks.size() == 1) 2580b57cec5SDimitry Andric return false; // Already has a single return block 2590b57cec5SDimitry Andric 2600b57cec5SDimitry Andric const TargetTransformInfo &TTI 2610b57cec5SDimitry Andric = getAnalysis<TargetTransformInfoWrapperPass>().getTTI(F); 2620b57cec5SDimitry Andric 2630b57cec5SDimitry Andric unifyReturnBlockSet(F, ReturningBlocks, TTI, "UnifiedReturnBlock"); 2640b57cec5SDimitry Andric return true; 2650b57cec5SDimitry Andric } 266