10b57cec5SDimitry Andric //===-- MemorySSAUpdater.cpp - Memory SSA Updater--------------------===// 20b57cec5SDimitry Andric // 30b57cec5SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 40b57cec5SDimitry Andric // See https://llvm.org/LICENSE.txt for license information. 50b57cec5SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 60b57cec5SDimitry Andric // 70b57cec5SDimitry Andric //===----------------------------------------------------------------===// 80b57cec5SDimitry Andric // 90b57cec5SDimitry Andric // This file implements the MemorySSAUpdater class. 100b57cec5SDimitry Andric // 110b57cec5SDimitry Andric //===----------------------------------------------------------------===// 120b57cec5SDimitry Andric #include "llvm/Analysis/MemorySSAUpdater.h" 130b57cec5SDimitry Andric #include "llvm/ADT/STLExtras.h" 140b57cec5SDimitry Andric #include "llvm/ADT/SetVector.h" 150b57cec5SDimitry Andric #include "llvm/ADT/SmallPtrSet.h" 160b57cec5SDimitry Andric #include "llvm/Analysis/IteratedDominanceFrontier.h" 170b57cec5SDimitry Andric #include "llvm/Analysis/MemorySSA.h" 180b57cec5SDimitry Andric #include "llvm/IR/DataLayout.h" 190b57cec5SDimitry Andric #include "llvm/IR/Dominators.h" 200b57cec5SDimitry Andric #include "llvm/IR/GlobalVariable.h" 210b57cec5SDimitry Andric #include "llvm/IR/IRBuilder.h" 220b57cec5SDimitry Andric #include "llvm/IR/LLVMContext.h" 230b57cec5SDimitry Andric #include "llvm/IR/Metadata.h" 240b57cec5SDimitry Andric #include "llvm/IR/Module.h" 250b57cec5SDimitry Andric #include "llvm/Support/Debug.h" 260b57cec5SDimitry Andric #include "llvm/Support/FormattedStream.h" 270b57cec5SDimitry Andric #include <algorithm> 280b57cec5SDimitry Andric 290b57cec5SDimitry Andric #define DEBUG_TYPE "memoryssa" 300b57cec5SDimitry Andric using namespace llvm; 310b57cec5SDimitry Andric 320b57cec5SDimitry Andric // This is the marker algorithm from "Simple and Efficient Construction of 330b57cec5SDimitry Andric // Static Single Assignment Form" 340b57cec5SDimitry Andric // The simple, non-marker algorithm places phi nodes at any join 350b57cec5SDimitry Andric // Here, we place markers, and only place phi nodes if they end up necessary. 360b57cec5SDimitry Andric // They are only necessary if they break a cycle (IE we recursively visit 370b57cec5SDimitry Andric // ourselves again), or we discover, while getting the value of the operands, 380b57cec5SDimitry Andric // that there are two or more definitions needing to be merged. 390b57cec5SDimitry Andric // This still will leave non-minimal form in the case of irreducible control 400b57cec5SDimitry Andric // flow, where phi nodes may be in cycles with themselves, but unnecessary. 410b57cec5SDimitry Andric MemoryAccess *MemorySSAUpdater::getPreviousDefRecursive( 420b57cec5SDimitry Andric BasicBlock *BB, 430b57cec5SDimitry Andric DenseMap<BasicBlock *, TrackingVH<MemoryAccess>> &CachedPreviousDef) { 440b57cec5SDimitry Andric // First, do a cache lookup. Without this cache, certain CFG structures 450b57cec5SDimitry Andric // (like a series of if statements) take exponential time to visit. 460b57cec5SDimitry Andric auto Cached = CachedPreviousDef.find(BB); 47*8bcb0991SDimitry Andric if (Cached != CachedPreviousDef.end()) 480b57cec5SDimitry Andric return Cached->second; 490b57cec5SDimitry Andric 50*8bcb0991SDimitry Andric // If this method is called from an unreachable block, return LoE. 51*8bcb0991SDimitry Andric if (!MSSA->DT->isReachableFromEntry(BB)) 52*8bcb0991SDimitry Andric return MSSA->getLiveOnEntryDef(); 53*8bcb0991SDimitry Andric 54*8bcb0991SDimitry Andric if (BasicBlock *Pred = BB->getUniquePredecessor()) { 55*8bcb0991SDimitry Andric VisitedBlocks.insert(BB); 560b57cec5SDimitry Andric // Single predecessor case, just recurse, we can only have one definition. 570b57cec5SDimitry Andric MemoryAccess *Result = getPreviousDefFromEnd(Pred, CachedPreviousDef); 580b57cec5SDimitry Andric CachedPreviousDef.insert({BB, Result}); 590b57cec5SDimitry Andric return Result; 600b57cec5SDimitry Andric } 610b57cec5SDimitry Andric 620b57cec5SDimitry Andric if (VisitedBlocks.count(BB)) { 630b57cec5SDimitry Andric // We hit our node again, meaning we had a cycle, we must insert a phi 640b57cec5SDimitry Andric // node to break it so we have an operand. The only case this will 650b57cec5SDimitry Andric // insert useless phis is if we have irreducible control flow. 660b57cec5SDimitry Andric MemoryAccess *Result = MSSA->createMemoryPhi(BB); 670b57cec5SDimitry Andric CachedPreviousDef.insert({BB, Result}); 680b57cec5SDimitry Andric return Result; 690b57cec5SDimitry Andric } 700b57cec5SDimitry Andric 710b57cec5SDimitry Andric if (VisitedBlocks.insert(BB).second) { 720b57cec5SDimitry Andric // Mark us visited so we can detect a cycle 730b57cec5SDimitry Andric SmallVector<TrackingVH<MemoryAccess>, 8> PhiOps; 740b57cec5SDimitry Andric 750b57cec5SDimitry Andric // Recurse to get the values in our predecessors for placement of a 760b57cec5SDimitry Andric // potential phi node. This will insert phi nodes if we cycle in order to 770b57cec5SDimitry Andric // break the cycle and have an operand. 78*8bcb0991SDimitry Andric bool UniqueIncomingAccess = true; 79*8bcb0991SDimitry Andric MemoryAccess *SingleAccess = nullptr; 80*8bcb0991SDimitry Andric for (auto *Pred : predecessors(BB)) { 81*8bcb0991SDimitry Andric if (MSSA->DT->isReachableFromEntry(Pred)) { 82*8bcb0991SDimitry Andric auto *IncomingAccess = getPreviousDefFromEnd(Pred, CachedPreviousDef); 83*8bcb0991SDimitry Andric if (!SingleAccess) 84*8bcb0991SDimitry Andric SingleAccess = IncomingAccess; 85*8bcb0991SDimitry Andric else if (IncomingAccess != SingleAccess) 86*8bcb0991SDimitry Andric UniqueIncomingAccess = false; 87*8bcb0991SDimitry Andric PhiOps.push_back(IncomingAccess); 88*8bcb0991SDimitry Andric } else 890b57cec5SDimitry Andric PhiOps.push_back(MSSA->getLiveOnEntryDef()); 90*8bcb0991SDimitry Andric } 910b57cec5SDimitry Andric 920b57cec5SDimitry Andric // Now try to simplify the ops to avoid placing a phi. 930b57cec5SDimitry Andric // This may return null if we never created a phi yet, that's okay 940b57cec5SDimitry Andric MemoryPhi *Phi = dyn_cast_or_null<MemoryPhi>(MSSA->getMemoryAccess(BB)); 950b57cec5SDimitry Andric 960b57cec5SDimitry Andric // See if we can avoid the phi by simplifying it. 970b57cec5SDimitry Andric auto *Result = tryRemoveTrivialPhi(Phi, PhiOps); 980b57cec5SDimitry Andric // If we couldn't simplify, we may have to create a phi 99*8bcb0991SDimitry Andric if (Result == Phi && UniqueIncomingAccess && SingleAccess) { 100*8bcb0991SDimitry Andric // A concrete Phi only exists if we created an empty one to break a cycle. 101*8bcb0991SDimitry Andric if (Phi) { 102*8bcb0991SDimitry Andric assert(Phi->operands().empty() && "Expected empty Phi"); 103*8bcb0991SDimitry Andric Phi->replaceAllUsesWith(SingleAccess); 104*8bcb0991SDimitry Andric removeMemoryAccess(Phi); 105*8bcb0991SDimitry Andric } 106*8bcb0991SDimitry Andric Result = SingleAccess; 107*8bcb0991SDimitry Andric } else if (Result == Phi && !(UniqueIncomingAccess && SingleAccess)) { 1080b57cec5SDimitry Andric if (!Phi) 1090b57cec5SDimitry Andric Phi = MSSA->createMemoryPhi(BB); 1100b57cec5SDimitry Andric 1110b57cec5SDimitry Andric // See if the existing phi operands match what we need. 1120b57cec5SDimitry Andric // Unlike normal SSA, we only allow one phi node per block, so we can't just 1130b57cec5SDimitry Andric // create a new one. 1140b57cec5SDimitry Andric if (Phi->getNumOperands() != 0) { 1150b57cec5SDimitry Andric // FIXME: Figure out whether this is dead code and if so remove it. 1160b57cec5SDimitry Andric if (!std::equal(Phi->op_begin(), Phi->op_end(), PhiOps.begin())) { 1170b57cec5SDimitry Andric // These will have been filled in by the recursive read we did above. 1180b57cec5SDimitry Andric llvm::copy(PhiOps, Phi->op_begin()); 1190b57cec5SDimitry Andric std::copy(pred_begin(BB), pred_end(BB), Phi->block_begin()); 1200b57cec5SDimitry Andric } 1210b57cec5SDimitry Andric } else { 1220b57cec5SDimitry Andric unsigned i = 0; 1230b57cec5SDimitry Andric for (auto *Pred : predecessors(BB)) 1240b57cec5SDimitry Andric Phi->addIncoming(&*PhiOps[i++], Pred); 1250b57cec5SDimitry Andric InsertedPHIs.push_back(Phi); 1260b57cec5SDimitry Andric } 1270b57cec5SDimitry Andric Result = Phi; 1280b57cec5SDimitry Andric } 1290b57cec5SDimitry Andric 1300b57cec5SDimitry Andric // Set ourselves up for the next variable by resetting visited state. 1310b57cec5SDimitry Andric VisitedBlocks.erase(BB); 1320b57cec5SDimitry Andric CachedPreviousDef.insert({BB, Result}); 1330b57cec5SDimitry Andric return Result; 1340b57cec5SDimitry Andric } 1350b57cec5SDimitry Andric llvm_unreachable("Should have hit one of the three cases above"); 1360b57cec5SDimitry Andric } 1370b57cec5SDimitry Andric 1380b57cec5SDimitry Andric // This starts at the memory access, and goes backwards in the block to find the 1390b57cec5SDimitry Andric // previous definition. If a definition is not found the block of the access, 1400b57cec5SDimitry Andric // it continues globally, creating phi nodes to ensure we have a single 1410b57cec5SDimitry Andric // definition. 1420b57cec5SDimitry Andric MemoryAccess *MemorySSAUpdater::getPreviousDef(MemoryAccess *MA) { 1430b57cec5SDimitry Andric if (auto *LocalResult = getPreviousDefInBlock(MA)) 1440b57cec5SDimitry Andric return LocalResult; 1450b57cec5SDimitry Andric DenseMap<BasicBlock *, TrackingVH<MemoryAccess>> CachedPreviousDef; 1460b57cec5SDimitry Andric return getPreviousDefRecursive(MA->getBlock(), CachedPreviousDef); 1470b57cec5SDimitry Andric } 1480b57cec5SDimitry Andric 1490b57cec5SDimitry Andric // This starts at the memory access, and goes backwards in the block to the find 1500b57cec5SDimitry Andric // the previous definition. If the definition is not found in the block of the 1510b57cec5SDimitry Andric // access, it returns nullptr. 1520b57cec5SDimitry Andric MemoryAccess *MemorySSAUpdater::getPreviousDefInBlock(MemoryAccess *MA) { 1530b57cec5SDimitry Andric auto *Defs = MSSA->getWritableBlockDefs(MA->getBlock()); 1540b57cec5SDimitry Andric 1550b57cec5SDimitry Andric // It's possible there are no defs, or we got handed the first def to start. 1560b57cec5SDimitry Andric if (Defs) { 1570b57cec5SDimitry Andric // If this is a def, we can just use the def iterators. 1580b57cec5SDimitry Andric if (!isa<MemoryUse>(MA)) { 1590b57cec5SDimitry Andric auto Iter = MA->getReverseDefsIterator(); 1600b57cec5SDimitry Andric ++Iter; 1610b57cec5SDimitry Andric if (Iter != Defs->rend()) 1620b57cec5SDimitry Andric return &*Iter; 1630b57cec5SDimitry Andric } else { 1640b57cec5SDimitry Andric // Otherwise, have to walk the all access iterator. 1650b57cec5SDimitry Andric auto End = MSSA->getWritableBlockAccesses(MA->getBlock())->rend(); 1660b57cec5SDimitry Andric for (auto &U : make_range(++MA->getReverseIterator(), End)) 1670b57cec5SDimitry Andric if (!isa<MemoryUse>(U)) 1680b57cec5SDimitry Andric return cast<MemoryAccess>(&U); 1690b57cec5SDimitry Andric // Note that if MA comes before Defs->begin(), we won't hit a def. 1700b57cec5SDimitry Andric return nullptr; 1710b57cec5SDimitry Andric } 1720b57cec5SDimitry Andric } 1730b57cec5SDimitry Andric return nullptr; 1740b57cec5SDimitry Andric } 1750b57cec5SDimitry Andric 1760b57cec5SDimitry Andric // This starts at the end of block 1770b57cec5SDimitry Andric MemoryAccess *MemorySSAUpdater::getPreviousDefFromEnd( 1780b57cec5SDimitry Andric BasicBlock *BB, 1790b57cec5SDimitry Andric DenseMap<BasicBlock *, TrackingVH<MemoryAccess>> &CachedPreviousDef) { 1800b57cec5SDimitry Andric auto *Defs = MSSA->getWritableBlockDefs(BB); 1810b57cec5SDimitry Andric 1820b57cec5SDimitry Andric if (Defs) { 1830b57cec5SDimitry Andric CachedPreviousDef.insert({BB, &*Defs->rbegin()}); 1840b57cec5SDimitry Andric return &*Defs->rbegin(); 1850b57cec5SDimitry Andric } 1860b57cec5SDimitry Andric 1870b57cec5SDimitry Andric return getPreviousDefRecursive(BB, CachedPreviousDef); 1880b57cec5SDimitry Andric } 1890b57cec5SDimitry Andric // Recurse over a set of phi uses to eliminate the trivial ones 1900b57cec5SDimitry Andric MemoryAccess *MemorySSAUpdater::recursePhi(MemoryAccess *Phi) { 1910b57cec5SDimitry Andric if (!Phi) 1920b57cec5SDimitry Andric return nullptr; 1930b57cec5SDimitry Andric TrackingVH<MemoryAccess> Res(Phi); 1940b57cec5SDimitry Andric SmallVector<TrackingVH<Value>, 8> Uses; 1950b57cec5SDimitry Andric std::copy(Phi->user_begin(), Phi->user_end(), std::back_inserter(Uses)); 196*8bcb0991SDimitry Andric for (auto &U : Uses) 197*8bcb0991SDimitry Andric if (MemoryPhi *UsePhi = dyn_cast<MemoryPhi>(&*U)) 198*8bcb0991SDimitry Andric tryRemoveTrivialPhi(UsePhi); 1990b57cec5SDimitry Andric return Res; 2000b57cec5SDimitry Andric } 2010b57cec5SDimitry Andric 2020b57cec5SDimitry Andric // Eliminate trivial phis 2030b57cec5SDimitry Andric // Phis are trivial if they are defined either by themselves, or all the same 2040b57cec5SDimitry Andric // argument. 2050b57cec5SDimitry Andric // IE phi(a, a) or b = phi(a, b) or c = phi(a, a, c) 2060b57cec5SDimitry Andric // We recursively try to remove them. 207*8bcb0991SDimitry Andric MemoryAccess *MemorySSAUpdater::tryRemoveTrivialPhi(MemoryPhi *Phi) { 208*8bcb0991SDimitry Andric assert(Phi && "Can only remove concrete Phi."); 209*8bcb0991SDimitry Andric auto OperRange = Phi->operands(); 210*8bcb0991SDimitry Andric return tryRemoveTrivialPhi(Phi, OperRange); 211*8bcb0991SDimitry Andric } 2120b57cec5SDimitry Andric template <class RangeType> 2130b57cec5SDimitry Andric MemoryAccess *MemorySSAUpdater::tryRemoveTrivialPhi(MemoryPhi *Phi, 2140b57cec5SDimitry Andric RangeType &Operands) { 2150b57cec5SDimitry Andric // Bail out on non-opt Phis. 2160b57cec5SDimitry Andric if (NonOptPhis.count(Phi)) 2170b57cec5SDimitry Andric return Phi; 2180b57cec5SDimitry Andric 2190b57cec5SDimitry Andric // Detect equal or self arguments 2200b57cec5SDimitry Andric MemoryAccess *Same = nullptr; 2210b57cec5SDimitry Andric for (auto &Op : Operands) { 2220b57cec5SDimitry Andric // If the same or self, good so far 2230b57cec5SDimitry Andric if (Op == Phi || Op == Same) 2240b57cec5SDimitry Andric continue; 2250b57cec5SDimitry Andric // not the same, return the phi since it's not eliminatable by us 2260b57cec5SDimitry Andric if (Same) 2270b57cec5SDimitry Andric return Phi; 2280b57cec5SDimitry Andric Same = cast<MemoryAccess>(&*Op); 2290b57cec5SDimitry Andric } 2300b57cec5SDimitry Andric // Never found a non-self reference, the phi is undef 2310b57cec5SDimitry Andric if (Same == nullptr) 2320b57cec5SDimitry Andric return MSSA->getLiveOnEntryDef(); 2330b57cec5SDimitry Andric if (Phi) { 2340b57cec5SDimitry Andric Phi->replaceAllUsesWith(Same); 2350b57cec5SDimitry Andric removeMemoryAccess(Phi); 2360b57cec5SDimitry Andric } 2370b57cec5SDimitry Andric 2380b57cec5SDimitry Andric // We should only end up recursing in case we replaced something, in which 2390b57cec5SDimitry Andric // case, we may have made other Phis trivial. 2400b57cec5SDimitry Andric return recursePhi(Same); 2410b57cec5SDimitry Andric } 2420b57cec5SDimitry Andric 243*8bcb0991SDimitry Andric void MemorySSAUpdater::insertUse(MemoryUse *MU, bool RenameUses) { 2440b57cec5SDimitry Andric InsertedPHIs.clear(); 2450b57cec5SDimitry Andric MU->setDefiningAccess(getPreviousDef(MU)); 246*8bcb0991SDimitry Andric 247*8bcb0991SDimitry Andric // In cases without unreachable blocks, because uses do not create new 248*8bcb0991SDimitry Andric // may-defs, there are only two cases: 2490b57cec5SDimitry Andric // 1. There was a def already below us, and therefore, we should not have 2500b57cec5SDimitry Andric // created a phi node because it was already needed for the def. 2510b57cec5SDimitry Andric // 2520b57cec5SDimitry Andric // 2. There is no def below us, and therefore, there is no extra renaming work 2530b57cec5SDimitry Andric // to do. 254*8bcb0991SDimitry Andric 255*8bcb0991SDimitry Andric // In cases with unreachable blocks, where the unnecessary Phis were 256*8bcb0991SDimitry Andric // optimized out, adding the Use may re-insert those Phis. Hence, when 257*8bcb0991SDimitry Andric // inserting Uses outside of the MSSA creation process, and new Phis were 258*8bcb0991SDimitry Andric // added, rename all uses if we are asked. 259*8bcb0991SDimitry Andric 260*8bcb0991SDimitry Andric if (!RenameUses && !InsertedPHIs.empty()) { 261*8bcb0991SDimitry Andric auto *Defs = MSSA->getBlockDefs(MU->getBlock()); 262*8bcb0991SDimitry Andric (void)Defs; 263*8bcb0991SDimitry Andric assert((!Defs || (++Defs->begin() == Defs->end())) && 264*8bcb0991SDimitry Andric "Block may have only a Phi or no defs"); 265*8bcb0991SDimitry Andric } 266*8bcb0991SDimitry Andric 267*8bcb0991SDimitry Andric if (RenameUses && InsertedPHIs.size()) { 268*8bcb0991SDimitry Andric SmallPtrSet<BasicBlock *, 16> Visited; 269*8bcb0991SDimitry Andric BasicBlock *StartBlock = MU->getBlock(); 270*8bcb0991SDimitry Andric 271*8bcb0991SDimitry Andric if (auto *Defs = MSSA->getWritableBlockDefs(StartBlock)) { 272*8bcb0991SDimitry Andric MemoryAccess *FirstDef = &*Defs->begin(); 273*8bcb0991SDimitry Andric // Convert to incoming value if it's a memorydef. A phi *is* already an 274*8bcb0991SDimitry Andric // incoming value. 275*8bcb0991SDimitry Andric if (auto *MD = dyn_cast<MemoryDef>(FirstDef)) 276*8bcb0991SDimitry Andric FirstDef = MD->getDefiningAccess(); 277*8bcb0991SDimitry Andric 278*8bcb0991SDimitry Andric MSSA->renamePass(MU->getBlock(), FirstDef, Visited); 279*8bcb0991SDimitry Andric } 280*8bcb0991SDimitry Andric // We just inserted a phi into this block, so the incoming value will 281*8bcb0991SDimitry Andric // become the phi anyway, so it does not matter what we pass. 282*8bcb0991SDimitry Andric for (auto &MP : InsertedPHIs) 283*8bcb0991SDimitry Andric if (MemoryPhi *Phi = cast_or_null<MemoryPhi>(MP)) 284*8bcb0991SDimitry Andric MSSA->renamePass(Phi->getBlock(), nullptr, Visited); 285*8bcb0991SDimitry Andric } 2860b57cec5SDimitry Andric } 2870b57cec5SDimitry Andric 2880b57cec5SDimitry Andric // Set every incoming edge {BB, MP->getBlock()} of MemoryPhi MP to NewDef. 2890b57cec5SDimitry Andric static void setMemoryPhiValueForBlock(MemoryPhi *MP, const BasicBlock *BB, 2900b57cec5SDimitry Andric MemoryAccess *NewDef) { 2910b57cec5SDimitry Andric // Replace any operand with us an incoming block with the new defining 2920b57cec5SDimitry Andric // access. 2930b57cec5SDimitry Andric int i = MP->getBasicBlockIndex(BB); 2940b57cec5SDimitry Andric assert(i != -1 && "Should have found the basic block in the phi"); 2950b57cec5SDimitry Andric // We can't just compare i against getNumOperands since one is signed and the 2960b57cec5SDimitry Andric // other not. So use it to index into the block iterator. 2970b57cec5SDimitry Andric for (auto BBIter = MP->block_begin() + i; BBIter != MP->block_end(); 2980b57cec5SDimitry Andric ++BBIter) { 2990b57cec5SDimitry Andric if (*BBIter != BB) 3000b57cec5SDimitry Andric break; 3010b57cec5SDimitry Andric MP->setIncomingValue(i, NewDef); 3020b57cec5SDimitry Andric ++i; 3030b57cec5SDimitry Andric } 3040b57cec5SDimitry Andric } 3050b57cec5SDimitry Andric 3060b57cec5SDimitry Andric // A brief description of the algorithm: 3070b57cec5SDimitry Andric // First, we compute what should define the new def, using the SSA 3080b57cec5SDimitry Andric // construction algorithm. 3090b57cec5SDimitry Andric // Then, we update the defs below us (and any new phi nodes) in the graph to 3100b57cec5SDimitry Andric // point to the correct new defs, to ensure we only have one variable, and no 3110b57cec5SDimitry Andric // disconnected stores. 3120b57cec5SDimitry Andric void MemorySSAUpdater::insertDef(MemoryDef *MD, bool RenameUses) { 3130b57cec5SDimitry Andric InsertedPHIs.clear(); 3140b57cec5SDimitry Andric 3150b57cec5SDimitry Andric // See if we had a local def, and if not, go hunting. 3160b57cec5SDimitry Andric MemoryAccess *DefBefore = getPreviousDef(MD); 317*8bcb0991SDimitry Andric bool DefBeforeSameBlock = false; 318*8bcb0991SDimitry Andric if (DefBefore->getBlock() == MD->getBlock() && 319*8bcb0991SDimitry Andric !(isa<MemoryPhi>(DefBefore) && 320*8bcb0991SDimitry Andric std::find(InsertedPHIs.begin(), InsertedPHIs.end(), DefBefore) != 321*8bcb0991SDimitry Andric InsertedPHIs.end())) 322*8bcb0991SDimitry Andric DefBeforeSameBlock = true; 3230b57cec5SDimitry Andric 3240b57cec5SDimitry Andric // There is a def before us, which means we can replace any store/phi uses 3250b57cec5SDimitry Andric // of that thing with us, since we are in the way of whatever was there 3260b57cec5SDimitry Andric // before. 3270b57cec5SDimitry Andric // We now define that def's memorydefs and memoryphis 3280b57cec5SDimitry Andric if (DefBeforeSameBlock) { 329*8bcb0991SDimitry Andric DefBefore->replaceUsesWithIf(MD, [MD](Use &U) { 3300b57cec5SDimitry Andric // Leave the MemoryUses alone. 3310b57cec5SDimitry Andric // Also make sure we skip ourselves to avoid self references. 332*8bcb0991SDimitry Andric User *Usr = U.getUser(); 333*8bcb0991SDimitry Andric return !isa<MemoryUse>(Usr) && Usr != MD; 3340b57cec5SDimitry Andric // Defs are automatically unoptimized when the user is set to MD below, 3350b57cec5SDimitry Andric // because the isOptimized() call will fail to find the same ID. 336*8bcb0991SDimitry Andric }); 3370b57cec5SDimitry Andric } 3380b57cec5SDimitry Andric 3390b57cec5SDimitry Andric // and that def is now our defining access. 3400b57cec5SDimitry Andric MD->setDefiningAccess(DefBefore); 3410b57cec5SDimitry Andric 3420b57cec5SDimitry Andric SmallVector<WeakVH, 8> FixupList(InsertedPHIs.begin(), InsertedPHIs.end()); 343*8bcb0991SDimitry Andric 344*8bcb0991SDimitry Andric // Remember the index where we may insert new phis. 345*8bcb0991SDimitry Andric unsigned NewPhiIndex = InsertedPHIs.size(); 3460b57cec5SDimitry Andric if (!DefBeforeSameBlock) { 3470b57cec5SDimitry Andric // If there was a local def before us, we must have the same effect it 3480b57cec5SDimitry Andric // did. Because every may-def is the same, any phis/etc we would create, it 3490b57cec5SDimitry Andric // would also have created. If there was no local def before us, we 3500b57cec5SDimitry Andric // performed a global update, and have to search all successors and make 3510b57cec5SDimitry Andric // sure we update the first def in each of them (following all paths until 3520b57cec5SDimitry Andric // we hit the first def along each path). This may also insert phi nodes. 3530b57cec5SDimitry Andric // TODO: There are other cases we can skip this work, such as when we have a 3540b57cec5SDimitry Andric // single successor, and only used a straight line of single pred blocks 3550b57cec5SDimitry Andric // backwards to find the def. To make that work, we'd have to track whether 3560b57cec5SDimitry Andric // getDefRecursive only ever used the single predecessor case. These types 3570b57cec5SDimitry Andric // of paths also only exist in between CFG simplifications. 3580b57cec5SDimitry Andric 3590b57cec5SDimitry Andric // If this is the first def in the block and this insert is in an arbitrary 3600b57cec5SDimitry Andric // place, compute IDF and place phis. 361*8bcb0991SDimitry Andric SmallPtrSet<BasicBlock *, 2> DefiningBlocks; 362*8bcb0991SDimitry Andric 363*8bcb0991SDimitry Andric // If this is the last Def in the block, also compute IDF based on MD, since 364*8bcb0991SDimitry Andric // this may a new Def added, and we may need additional Phis. 3650b57cec5SDimitry Andric auto Iter = MD->getDefsIterator(); 3660b57cec5SDimitry Andric ++Iter; 3670b57cec5SDimitry Andric auto IterEnd = MSSA->getBlockDefs(MD->getBlock())->end(); 368*8bcb0991SDimitry Andric if (Iter == IterEnd) 369*8bcb0991SDimitry Andric DefiningBlocks.insert(MD->getBlock()); 370*8bcb0991SDimitry Andric 371*8bcb0991SDimitry Andric for (const auto &VH : InsertedPHIs) 372*8bcb0991SDimitry Andric if (const auto *RealPHI = cast_or_null<MemoryPhi>(VH)) 373*8bcb0991SDimitry Andric DefiningBlocks.insert(RealPHI->getBlock()); 3740b57cec5SDimitry Andric ForwardIDFCalculator IDFs(*MSSA->DT); 3750b57cec5SDimitry Andric SmallVector<BasicBlock *, 32> IDFBlocks; 3760b57cec5SDimitry Andric IDFs.setDefiningBlocks(DefiningBlocks); 3770b57cec5SDimitry Andric IDFs.calculate(IDFBlocks); 3780b57cec5SDimitry Andric SmallVector<AssertingVH<MemoryPhi>, 4> NewInsertedPHIs; 379*8bcb0991SDimitry Andric for (auto *BBIDF : IDFBlocks) { 380*8bcb0991SDimitry Andric auto *MPhi = MSSA->getMemoryAccess(BBIDF); 381*8bcb0991SDimitry Andric if (!MPhi) { 382*8bcb0991SDimitry Andric MPhi = MSSA->createMemoryPhi(BBIDF); 3830b57cec5SDimitry Andric NewInsertedPHIs.push_back(MPhi); 384*8bcb0991SDimitry Andric } 385*8bcb0991SDimitry Andric // Add the phis created into the IDF blocks to NonOptPhis, so they are not 386*8bcb0991SDimitry Andric // optimized out as trivial by the call to getPreviousDefFromEnd below. 387*8bcb0991SDimitry Andric // Once they are complete, all these Phis are added to the FixupList, and 388*8bcb0991SDimitry Andric // removed from NonOptPhis inside fixupDefs(). Existing Phis in IDF may 389*8bcb0991SDimitry Andric // need fixing as well, and potentially be trivial before this insertion, 390*8bcb0991SDimitry Andric // hence add all IDF Phis. See PR43044. 3910b57cec5SDimitry Andric NonOptPhis.insert(MPhi); 3920b57cec5SDimitry Andric } 3930b57cec5SDimitry Andric for (auto &MPhi : NewInsertedPHIs) { 3940b57cec5SDimitry Andric auto *BBIDF = MPhi->getBlock(); 3950b57cec5SDimitry Andric for (auto *Pred : predecessors(BBIDF)) { 3960b57cec5SDimitry Andric DenseMap<BasicBlock *, TrackingVH<MemoryAccess>> CachedPreviousDef; 397*8bcb0991SDimitry Andric MPhi->addIncoming(getPreviousDefFromEnd(Pred, CachedPreviousDef), Pred); 3980b57cec5SDimitry Andric } 3990b57cec5SDimitry Andric } 4000b57cec5SDimitry Andric 401*8bcb0991SDimitry Andric // Re-take the index where we're adding the new phis, because the above call 402*8bcb0991SDimitry Andric // to getPreviousDefFromEnd, may have inserted into InsertedPHIs. 4030b57cec5SDimitry Andric NewPhiIndex = InsertedPHIs.size(); 4040b57cec5SDimitry Andric for (auto &MPhi : NewInsertedPHIs) { 4050b57cec5SDimitry Andric InsertedPHIs.push_back(&*MPhi); 4060b57cec5SDimitry Andric FixupList.push_back(&*MPhi); 4070b57cec5SDimitry Andric } 4080b57cec5SDimitry Andric 4090b57cec5SDimitry Andric FixupList.push_back(MD); 4100b57cec5SDimitry Andric } 4110b57cec5SDimitry Andric 4120b57cec5SDimitry Andric // Remember the index where we stopped inserting new phis above, since the 4130b57cec5SDimitry Andric // fixupDefs call in the loop below may insert more, that are already minimal. 4140b57cec5SDimitry Andric unsigned NewPhiIndexEnd = InsertedPHIs.size(); 4150b57cec5SDimitry Andric 4160b57cec5SDimitry Andric while (!FixupList.empty()) { 4170b57cec5SDimitry Andric unsigned StartingPHISize = InsertedPHIs.size(); 4180b57cec5SDimitry Andric fixupDefs(FixupList); 4190b57cec5SDimitry Andric FixupList.clear(); 4200b57cec5SDimitry Andric // Put any new phis on the fixup list, and process them 4210b57cec5SDimitry Andric FixupList.append(InsertedPHIs.begin() + StartingPHISize, InsertedPHIs.end()); 4220b57cec5SDimitry Andric } 4230b57cec5SDimitry Andric 4240b57cec5SDimitry Andric // Optimize potentially non-minimal phis added in this method. 4250b57cec5SDimitry Andric unsigned NewPhiSize = NewPhiIndexEnd - NewPhiIndex; 4260b57cec5SDimitry Andric if (NewPhiSize) 4270b57cec5SDimitry Andric tryRemoveTrivialPhis(ArrayRef<WeakVH>(&InsertedPHIs[NewPhiIndex], NewPhiSize)); 4280b57cec5SDimitry Andric 4290b57cec5SDimitry Andric // Now that all fixups are done, rename all uses if we are asked. 4300b57cec5SDimitry Andric if (RenameUses) { 4310b57cec5SDimitry Andric SmallPtrSet<BasicBlock *, 16> Visited; 4320b57cec5SDimitry Andric BasicBlock *StartBlock = MD->getBlock(); 4330b57cec5SDimitry Andric // We are guaranteed there is a def in the block, because we just got it 4340b57cec5SDimitry Andric // handed to us in this function. 4350b57cec5SDimitry Andric MemoryAccess *FirstDef = &*MSSA->getWritableBlockDefs(StartBlock)->begin(); 4360b57cec5SDimitry Andric // Convert to incoming value if it's a memorydef. A phi *is* already an 4370b57cec5SDimitry Andric // incoming value. 4380b57cec5SDimitry Andric if (auto *MD = dyn_cast<MemoryDef>(FirstDef)) 4390b57cec5SDimitry Andric FirstDef = MD->getDefiningAccess(); 4400b57cec5SDimitry Andric 4410b57cec5SDimitry Andric MSSA->renamePass(MD->getBlock(), FirstDef, Visited); 4420b57cec5SDimitry Andric // We just inserted a phi into this block, so the incoming value will become 4430b57cec5SDimitry Andric // the phi anyway, so it does not matter what we pass. 4440b57cec5SDimitry Andric for (auto &MP : InsertedPHIs) { 4450b57cec5SDimitry Andric MemoryPhi *Phi = dyn_cast_or_null<MemoryPhi>(MP); 4460b57cec5SDimitry Andric if (Phi) 4470b57cec5SDimitry Andric MSSA->renamePass(Phi->getBlock(), nullptr, Visited); 4480b57cec5SDimitry Andric } 4490b57cec5SDimitry Andric } 4500b57cec5SDimitry Andric } 4510b57cec5SDimitry Andric 4520b57cec5SDimitry Andric void MemorySSAUpdater::fixupDefs(const SmallVectorImpl<WeakVH> &Vars) { 4530b57cec5SDimitry Andric SmallPtrSet<const BasicBlock *, 8> Seen; 4540b57cec5SDimitry Andric SmallVector<const BasicBlock *, 16> Worklist; 4550b57cec5SDimitry Andric for (auto &Var : Vars) { 4560b57cec5SDimitry Andric MemoryAccess *NewDef = dyn_cast_or_null<MemoryAccess>(Var); 4570b57cec5SDimitry Andric if (!NewDef) 4580b57cec5SDimitry Andric continue; 4590b57cec5SDimitry Andric // First, see if there is a local def after the operand. 4600b57cec5SDimitry Andric auto *Defs = MSSA->getWritableBlockDefs(NewDef->getBlock()); 4610b57cec5SDimitry Andric auto DefIter = NewDef->getDefsIterator(); 4620b57cec5SDimitry Andric 4630b57cec5SDimitry Andric // The temporary Phi is being fixed, unmark it for not to optimize. 4640b57cec5SDimitry Andric if (MemoryPhi *Phi = dyn_cast<MemoryPhi>(NewDef)) 4650b57cec5SDimitry Andric NonOptPhis.erase(Phi); 4660b57cec5SDimitry Andric 4670b57cec5SDimitry Andric // If there is a local def after us, we only have to rename that. 4680b57cec5SDimitry Andric if (++DefIter != Defs->end()) { 4690b57cec5SDimitry Andric cast<MemoryDef>(DefIter)->setDefiningAccess(NewDef); 4700b57cec5SDimitry Andric continue; 4710b57cec5SDimitry Andric } 4720b57cec5SDimitry Andric 4730b57cec5SDimitry Andric // Otherwise, we need to search down through the CFG. 4740b57cec5SDimitry Andric // For each of our successors, handle it directly if their is a phi, or 4750b57cec5SDimitry Andric // place on the fixup worklist. 4760b57cec5SDimitry Andric for (const auto *S : successors(NewDef->getBlock())) { 4770b57cec5SDimitry Andric if (auto *MP = MSSA->getMemoryAccess(S)) 4780b57cec5SDimitry Andric setMemoryPhiValueForBlock(MP, NewDef->getBlock(), NewDef); 4790b57cec5SDimitry Andric else 4800b57cec5SDimitry Andric Worklist.push_back(S); 4810b57cec5SDimitry Andric } 4820b57cec5SDimitry Andric 4830b57cec5SDimitry Andric while (!Worklist.empty()) { 4840b57cec5SDimitry Andric const BasicBlock *FixupBlock = Worklist.back(); 4850b57cec5SDimitry Andric Worklist.pop_back(); 4860b57cec5SDimitry Andric 4870b57cec5SDimitry Andric // Get the first def in the block that isn't a phi node. 4880b57cec5SDimitry Andric if (auto *Defs = MSSA->getWritableBlockDefs(FixupBlock)) { 4890b57cec5SDimitry Andric auto *FirstDef = &*Defs->begin(); 4900b57cec5SDimitry Andric // The loop above and below should have taken care of phi nodes 4910b57cec5SDimitry Andric assert(!isa<MemoryPhi>(FirstDef) && 4920b57cec5SDimitry Andric "Should have already handled phi nodes!"); 4930b57cec5SDimitry Andric // We are now this def's defining access, make sure we actually dominate 4940b57cec5SDimitry Andric // it 4950b57cec5SDimitry Andric assert(MSSA->dominates(NewDef, FirstDef) && 4960b57cec5SDimitry Andric "Should have dominated the new access"); 4970b57cec5SDimitry Andric 4980b57cec5SDimitry Andric // This may insert new phi nodes, because we are not guaranteed the 4990b57cec5SDimitry Andric // block we are processing has a single pred, and depending where the 5000b57cec5SDimitry Andric // store was inserted, it may require phi nodes below it. 5010b57cec5SDimitry Andric cast<MemoryDef>(FirstDef)->setDefiningAccess(getPreviousDef(FirstDef)); 5020b57cec5SDimitry Andric return; 5030b57cec5SDimitry Andric } 5040b57cec5SDimitry Andric // We didn't find a def, so we must continue. 5050b57cec5SDimitry Andric for (const auto *S : successors(FixupBlock)) { 5060b57cec5SDimitry Andric // If there is a phi node, handle it. 5070b57cec5SDimitry Andric // Otherwise, put the block on the worklist 5080b57cec5SDimitry Andric if (auto *MP = MSSA->getMemoryAccess(S)) 5090b57cec5SDimitry Andric setMemoryPhiValueForBlock(MP, FixupBlock, NewDef); 5100b57cec5SDimitry Andric else { 5110b57cec5SDimitry Andric // If we cycle, we should have ended up at a phi node that we already 5120b57cec5SDimitry Andric // processed. FIXME: Double check this 5130b57cec5SDimitry Andric if (!Seen.insert(S).second) 5140b57cec5SDimitry Andric continue; 5150b57cec5SDimitry Andric Worklist.push_back(S); 5160b57cec5SDimitry Andric } 5170b57cec5SDimitry Andric } 5180b57cec5SDimitry Andric } 5190b57cec5SDimitry Andric } 5200b57cec5SDimitry Andric } 5210b57cec5SDimitry Andric 5220b57cec5SDimitry Andric void MemorySSAUpdater::removeEdge(BasicBlock *From, BasicBlock *To) { 5230b57cec5SDimitry Andric if (MemoryPhi *MPhi = MSSA->getMemoryAccess(To)) { 5240b57cec5SDimitry Andric MPhi->unorderedDeleteIncomingBlock(From); 525*8bcb0991SDimitry Andric tryRemoveTrivialPhi(MPhi); 5260b57cec5SDimitry Andric } 5270b57cec5SDimitry Andric } 5280b57cec5SDimitry Andric 5290b57cec5SDimitry Andric void MemorySSAUpdater::removeDuplicatePhiEdgesBetween(const BasicBlock *From, 5300b57cec5SDimitry Andric const BasicBlock *To) { 5310b57cec5SDimitry Andric if (MemoryPhi *MPhi = MSSA->getMemoryAccess(To)) { 5320b57cec5SDimitry Andric bool Found = false; 5330b57cec5SDimitry Andric MPhi->unorderedDeleteIncomingIf([&](const MemoryAccess *, BasicBlock *B) { 5340b57cec5SDimitry Andric if (From != B) 5350b57cec5SDimitry Andric return false; 5360b57cec5SDimitry Andric if (Found) 5370b57cec5SDimitry Andric return true; 5380b57cec5SDimitry Andric Found = true; 5390b57cec5SDimitry Andric return false; 5400b57cec5SDimitry Andric }); 541*8bcb0991SDimitry Andric tryRemoveTrivialPhi(MPhi); 5420b57cec5SDimitry Andric } 5430b57cec5SDimitry Andric } 5440b57cec5SDimitry Andric 545*8bcb0991SDimitry Andric static MemoryAccess *getNewDefiningAccessForClone(MemoryAccess *MA, 5460b57cec5SDimitry Andric const ValueToValueMapTy &VMap, 5470b57cec5SDimitry Andric PhiToDefMap &MPhiMap, 548*8bcb0991SDimitry Andric bool CloneWasSimplified, 549*8bcb0991SDimitry Andric MemorySSA *MSSA) { 5500b57cec5SDimitry Andric MemoryAccess *InsnDefining = MA; 551*8bcb0991SDimitry Andric if (MemoryDef *DefMUD = dyn_cast<MemoryDef>(InsnDefining)) { 5520b57cec5SDimitry Andric if (!MSSA->isLiveOnEntryDef(DefMUD)) { 5530b57cec5SDimitry Andric Instruction *DefMUDI = DefMUD->getMemoryInst(); 5540b57cec5SDimitry Andric assert(DefMUDI && "Found MemoryUseOrDef with no Instruction."); 5550b57cec5SDimitry Andric if (Instruction *NewDefMUDI = 556*8bcb0991SDimitry Andric cast_or_null<Instruction>(VMap.lookup(DefMUDI))) { 5570b57cec5SDimitry Andric InsnDefining = MSSA->getMemoryAccess(NewDefMUDI); 558*8bcb0991SDimitry Andric if (!CloneWasSimplified) 559*8bcb0991SDimitry Andric assert(InsnDefining && "Defining instruction cannot be nullptr."); 560*8bcb0991SDimitry Andric else if (!InsnDefining || isa<MemoryUse>(InsnDefining)) { 561*8bcb0991SDimitry Andric // The clone was simplified, it's no longer a MemoryDef, look up. 562*8bcb0991SDimitry Andric auto DefIt = DefMUD->getDefsIterator(); 563*8bcb0991SDimitry Andric // Since simplified clones only occur in single block cloning, a 564*8bcb0991SDimitry Andric // previous definition must exist, otherwise NewDefMUDI would not 565*8bcb0991SDimitry Andric // have been found in VMap. 566*8bcb0991SDimitry Andric assert(DefIt != MSSA->getBlockDefs(DefMUD->getBlock())->begin() && 567*8bcb0991SDimitry Andric "Previous def must exist"); 568*8bcb0991SDimitry Andric InsnDefining = getNewDefiningAccessForClone( 569*8bcb0991SDimitry Andric &*(--DefIt), VMap, MPhiMap, CloneWasSimplified, MSSA); 570*8bcb0991SDimitry Andric } 571*8bcb0991SDimitry Andric } 5720b57cec5SDimitry Andric } 5730b57cec5SDimitry Andric } else { 5740b57cec5SDimitry Andric MemoryPhi *DefPhi = cast<MemoryPhi>(InsnDefining); 5750b57cec5SDimitry Andric if (MemoryAccess *NewDefPhi = MPhiMap.lookup(DefPhi)) 5760b57cec5SDimitry Andric InsnDefining = NewDefPhi; 5770b57cec5SDimitry Andric } 5780b57cec5SDimitry Andric assert(InsnDefining && "Defining instruction cannot be nullptr."); 5790b57cec5SDimitry Andric return InsnDefining; 580*8bcb0991SDimitry Andric } 5810b57cec5SDimitry Andric 582*8bcb0991SDimitry Andric void MemorySSAUpdater::cloneUsesAndDefs(BasicBlock *BB, BasicBlock *NewBB, 583*8bcb0991SDimitry Andric const ValueToValueMapTy &VMap, 584*8bcb0991SDimitry Andric PhiToDefMap &MPhiMap, 585*8bcb0991SDimitry Andric bool CloneWasSimplified) { 5860b57cec5SDimitry Andric const MemorySSA::AccessList *Acc = MSSA->getBlockAccesses(BB); 5870b57cec5SDimitry Andric if (!Acc) 5880b57cec5SDimitry Andric return; 5890b57cec5SDimitry Andric for (const MemoryAccess &MA : *Acc) { 5900b57cec5SDimitry Andric if (const MemoryUseOrDef *MUD = dyn_cast<MemoryUseOrDef>(&MA)) { 5910b57cec5SDimitry Andric Instruction *Insn = MUD->getMemoryInst(); 5920b57cec5SDimitry Andric // Entry does not exist if the clone of the block did not clone all 5930b57cec5SDimitry Andric // instructions. This occurs in LoopRotate when cloning instructions 5940b57cec5SDimitry Andric // from the old header to the old preheader. The cloned instruction may 5950b57cec5SDimitry Andric // also be a simplified Value, not an Instruction (see LoopRotate). 5960b57cec5SDimitry Andric // Also in LoopRotate, even when it's an instruction, due to it being 5970b57cec5SDimitry Andric // simplified, it may be a Use rather than a Def, so we cannot use MUD as 5980b57cec5SDimitry Andric // template. Calls coming from updateForClonedBlockIntoPred, ensure this. 5990b57cec5SDimitry Andric if (Instruction *NewInsn = 6000b57cec5SDimitry Andric dyn_cast_or_null<Instruction>(VMap.lookup(Insn))) { 6010b57cec5SDimitry Andric MemoryAccess *NewUseOrDef = MSSA->createDefinedAccess( 602*8bcb0991SDimitry Andric NewInsn, 603*8bcb0991SDimitry Andric getNewDefiningAccessForClone(MUD->getDefiningAccess(), VMap, 604*8bcb0991SDimitry Andric MPhiMap, CloneWasSimplified, MSSA), 605*8bcb0991SDimitry Andric /*Template=*/CloneWasSimplified ? nullptr : MUD, 606*8bcb0991SDimitry Andric /*CreationMustSucceed=*/CloneWasSimplified ? false : true); 607*8bcb0991SDimitry Andric if (NewUseOrDef) 6080b57cec5SDimitry Andric MSSA->insertIntoListsForBlock(NewUseOrDef, NewBB, MemorySSA::End); 6090b57cec5SDimitry Andric } 6100b57cec5SDimitry Andric } 6110b57cec5SDimitry Andric } 6120b57cec5SDimitry Andric } 6130b57cec5SDimitry Andric 6140b57cec5SDimitry Andric void MemorySSAUpdater::updatePhisWhenInsertingUniqueBackedgeBlock( 6150b57cec5SDimitry Andric BasicBlock *Header, BasicBlock *Preheader, BasicBlock *BEBlock) { 6160b57cec5SDimitry Andric auto *MPhi = MSSA->getMemoryAccess(Header); 6170b57cec5SDimitry Andric if (!MPhi) 6180b57cec5SDimitry Andric return; 6190b57cec5SDimitry Andric 6200b57cec5SDimitry Andric // Create phi node in the backedge block and populate it with the same 6210b57cec5SDimitry Andric // incoming values as MPhi. Skip incoming values coming from Preheader. 6220b57cec5SDimitry Andric auto *NewMPhi = MSSA->createMemoryPhi(BEBlock); 6230b57cec5SDimitry Andric bool HasUniqueIncomingValue = true; 6240b57cec5SDimitry Andric MemoryAccess *UniqueValue = nullptr; 6250b57cec5SDimitry Andric for (unsigned I = 0, E = MPhi->getNumIncomingValues(); I != E; ++I) { 6260b57cec5SDimitry Andric BasicBlock *IBB = MPhi->getIncomingBlock(I); 6270b57cec5SDimitry Andric MemoryAccess *IV = MPhi->getIncomingValue(I); 6280b57cec5SDimitry Andric if (IBB != Preheader) { 6290b57cec5SDimitry Andric NewMPhi->addIncoming(IV, IBB); 6300b57cec5SDimitry Andric if (HasUniqueIncomingValue) { 6310b57cec5SDimitry Andric if (!UniqueValue) 6320b57cec5SDimitry Andric UniqueValue = IV; 6330b57cec5SDimitry Andric else if (UniqueValue != IV) 6340b57cec5SDimitry Andric HasUniqueIncomingValue = false; 6350b57cec5SDimitry Andric } 6360b57cec5SDimitry Andric } 6370b57cec5SDimitry Andric } 6380b57cec5SDimitry Andric 6390b57cec5SDimitry Andric // Update incoming edges into MPhi. Remove all but the incoming edge from 6400b57cec5SDimitry Andric // Preheader. Add an edge from NewMPhi 6410b57cec5SDimitry Andric auto *AccFromPreheader = MPhi->getIncomingValueForBlock(Preheader); 6420b57cec5SDimitry Andric MPhi->setIncomingValue(0, AccFromPreheader); 6430b57cec5SDimitry Andric MPhi->setIncomingBlock(0, Preheader); 6440b57cec5SDimitry Andric for (unsigned I = MPhi->getNumIncomingValues() - 1; I >= 1; --I) 6450b57cec5SDimitry Andric MPhi->unorderedDeleteIncoming(I); 6460b57cec5SDimitry Andric MPhi->addIncoming(NewMPhi, BEBlock); 6470b57cec5SDimitry Andric 6480b57cec5SDimitry Andric // If NewMPhi is a trivial phi, remove it. Its use in the header MPhi will be 6490b57cec5SDimitry Andric // replaced with the unique value. 650*8bcb0991SDimitry Andric tryRemoveTrivialPhi(NewMPhi); 6510b57cec5SDimitry Andric } 6520b57cec5SDimitry Andric 6530b57cec5SDimitry Andric void MemorySSAUpdater::updateForClonedLoop(const LoopBlocksRPO &LoopBlocks, 6540b57cec5SDimitry Andric ArrayRef<BasicBlock *> ExitBlocks, 6550b57cec5SDimitry Andric const ValueToValueMapTy &VMap, 6560b57cec5SDimitry Andric bool IgnoreIncomingWithNoClones) { 6570b57cec5SDimitry Andric PhiToDefMap MPhiMap; 6580b57cec5SDimitry Andric 6590b57cec5SDimitry Andric auto FixPhiIncomingValues = [&](MemoryPhi *Phi, MemoryPhi *NewPhi) { 6600b57cec5SDimitry Andric assert(Phi && NewPhi && "Invalid Phi nodes."); 6610b57cec5SDimitry Andric BasicBlock *NewPhiBB = NewPhi->getBlock(); 6620b57cec5SDimitry Andric SmallPtrSet<BasicBlock *, 4> NewPhiBBPreds(pred_begin(NewPhiBB), 6630b57cec5SDimitry Andric pred_end(NewPhiBB)); 6640b57cec5SDimitry Andric for (unsigned It = 0, E = Phi->getNumIncomingValues(); It < E; ++It) { 6650b57cec5SDimitry Andric MemoryAccess *IncomingAccess = Phi->getIncomingValue(It); 6660b57cec5SDimitry Andric BasicBlock *IncBB = Phi->getIncomingBlock(It); 6670b57cec5SDimitry Andric 6680b57cec5SDimitry Andric if (BasicBlock *NewIncBB = cast_or_null<BasicBlock>(VMap.lookup(IncBB))) 6690b57cec5SDimitry Andric IncBB = NewIncBB; 6700b57cec5SDimitry Andric else if (IgnoreIncomingWithNoClones) 6710b57cec5SDimitry Andric continue; 6720b57cec5SDimitry Andric 6730b57cec5SDimitry Andric // Now we have IncBB, and will need to add incoming from it to NewPhi. 6740b57cec5SDimitry Andric 6750b57cec5SDimitry Andric // If IncBB is not a predecessor of NewPhiBB, then do not add it. 6760b57cec5SDimitry Andric // NewPhiBB was cloned without that edge. 6770b57cec5SDimitry Andric if (!NewPhiBBPreds.count(IncBB)) 6780b57cec5SDimitry Andric continue; 6790b57cec5SDimitry Andric 6800b57cec5SDimitry Andric // Determine incoming value and add it as incoming from IncBB. 6810b57cec5SDimitry Andric if (MemoryUseOrDef *IncMUD = dyn_cast<MemoryUseOrDef>(IncomingAccess)) { 6820b57cec5SDimitry Andric if (!MSSA->isLiveOnEntryDef(IncMUD)) { 6830b57cec5SDimitry Andric Instruction *IncI = IncMUD->getMemoryInst(); 6840b57cec5SDimitry Andric assert(IncI && "Found MemoryUseOrDef with no Instruction."); 6850b57cec5SDimitry Andric if (Instruction *NewIncI = 6860b57cec5SDimitry Andric cast_or_null<Instruction>(VMap.lookup(IncI))) { 6870b57cec5SDimitry Andric IncMUD = MSSA->getMemoryAccess(NewIncI); 6880b57cec5SDimitry Andric assert(IncMUD && 6890b57cec5SDimitry Andric "MemoryUseOrDef cannot be null, all preds processed."); 6900b57cec5SDimitry Andric } 6910b57cec5SDimitry Andric } 6920b57cec5SDimitry Andric NewPhi->addIncoming(IncMUD, IncBB); 6930b57cec5SDimitry Andric } else { 6940b57cec5SDimitry Andric MemoryPhi *IncPhi = cast<MemoryPhi>(IncomingAccess); 6950b57cec5SDimitry Andric if (MemoryAccess *NewDefPhi = MPhiMap.lookup(IncPhi)) 6960b57cec5SDimitry Andric NewPhi->addIncoming(NewDefPhi, IncBB); 6970b57cec5SDimitry Andric else 6980b57cec5SDimitry Andric NewPhi->addIncoming(IncPhi, IncBB); 6990b57cec5SDimitry Andric } 7000b57cec5SDimitry Andric } 7010b57cec5SDimitry Andric }; 7020b57cec5SDimitry Andric 7030b57cec5SDimitry Andric auto ProcessBlock = [&](BasicBlock *BB) { 7040b57cec5SDimitry Andric BasicBlock *NewBlock = cast_or_null<BasicBlock>(VMap.lookup(BB)); 7050b57cec5SDimitry Andric if (!NewBlock) 7060b57cec5SDimitry Andric return; 7070b57cec5SDimitry Andric 7080b57cec5SDimitry Andric assert(!MSSA->getWritableBlockAccesses(NewBlock) && 7090b57cec5SDimitry Andric "Cloned block should have no accesses"); 7100b57cec5SDimitry Andric 7110b57cec5SDimitry Andric // Add MemoryPhi. 7120b57cec5SDimitry Andric if (MemoryPhi *MPhi = MSSA->getMemoryAccess(BB)) { 7130b57cec5SDimitry Andric MemoryPhi *NewPhi = MSSA->createMemoryPhi(NewBlock); 7140b57cec5SDimitry Andric MPhiMap[MPhi] = NewPhi; 7150b57cec5SDimitry Andric } 7160b57cec5SDimitry Andric // Update Uses and Defs. 7170b57cec5SDimitry Andric cloneUsesAndDefs(BB, NewBlock, VMap, MPhiMap); 7180b57cec5SDimitry Andric }; 7190b57cec5SDimitry Andric 7200b57cec5SDimitry Andric for (auto BB : llvm::concat<BasicBlock *const>(LoopBlocks, ExitBlocks)) 7210b57cec5SDimitry Andric ProcessBlock(BB); 7220b57cec5SDimitry Andric 7230b57cec5SDimitry Andric for (auto BB : llvm::concat<BasicBlock *const>(LoopBlocks, ExitBlocks)) 7240b57cec5SDimitry Andric if (MemoryPhi *MPhi = MSSA->getMemoryAccess(BB)) 7250b57cec5SDimitry Andric if (MemoryAccess *NewPhi = MPhiMap.lookup(MPhi)) 7260b57cec5SDimitry Andric FixPhiIncomingValues(MPhi, cast<MemoryPhi>(NewPhi)); 7270b57cec5SDimitry Andric } 7280b57cec5SDimitry Andric 7290b57cec5SDimitry Andric void MemorySSAUpdater::updateForClonedBlockIntoPred( 7300b57cec5SDimitry Andric BasicBlock *BB, BasicBlock *P1, const ValueToValueMapTy &VM) { 7310b57cec5SDimitry Andric // All defs/phis from outside BB that are used in BB, are valid uses in P1. 7320b57cec5SDimitry Andric // Since those defs/phis must have dominated BB, and also dominate P1. 7330b57cec5SDimitry Andric // Defs from BB being used in BB will be replaced with the cloned defs from 7340b57cec5SDimitry Andric // VM. The uses of BB's Phi (if it exists) in BB will be replaced by the 7350b57cec5SDimitry Andric // incoming def into the Phi from P1. 7360b57cec5SDimitry Andric // Instructions cloned into the predecessor are in practice sometimes 7370b57cec5SDimitry Andric // simplified, so disable the use of the template, and create an access from 7380b57cec5SDimitry Andric // scratch. 7390b57cec5SDimitry Andric PhiToDefMap MPhiMap; 7400b57cec5SDimitry Andric if (MemoryPhi *MPhi = MSSA->getMemoryAccess(BB)) 7410b57cec5SDimitry Andric MPhiMap[MPhi] = MPhi->getIncomingValueForBlock(P1); 7420b57cec5SDimitry Andric cloneUsesAndDefs(BB, P1, VM, MPhiMap, /*CloneWasSimplified=*/true); 7430b57cec5SDimitry Andric } 7440b57cec5SDimitry Andric 7450b57cec5SDimitry Andric template <typename Iter> 7460b57cec5SDimitry Andric void MemorySSAUpdater::privateUpdateExitBlocksForClonedLoop( 7470b57cec5SDimitry Andric ArrayRef<BasicBlock *> ExitBlocks, Iter ValuesBegin, Iter ValuesEnd, 7480b57cec5SDimitry Andric DominatorTree &DT) { 7490b57cec5SDimitry Andric SmallVector<CFGUpdate, 4> Updates; 7500b57cec5SDimitry Andric // Update/insert phis in all successors of exit blocks. 7510b57cec5SDimitry Andric for (auto *Exit : ExitBlocks) 7520b57cec5SDimitry Andric for (const ValueToValueMapTy *VMap : make_range(ValuesBegin, ValuesEnd)) 7530b57cec5SDimitry Andric if (BasicBlock *NewExit = cast_or_null<BasicBlock>(VMap->lookup(Exit))) { 7540b57cec5SDimitry Andric BasicBlock *ExitSucc = NewExit->getTerminator()->getSuccessor(0); 7550b57cec5SDimitry Andric Updates.push_back({DT.Insert, NewExit, ExitSucc}); 7560b57cec5SDimitry Andric } 7570b57cec5SDimitry Andric applyInsertUpdates(Updates, DT); 7580b57cec5SDimitry Andric } 7590b57cec5SDimitry Andric 7600b57cec5SDimitry Andric void MemorySSAUpdater::updateExitBlocksForClonedLoop( 7610b57cec5SDimitry Andric ArrayRef<BasicBlock *> ExitBlocks, const ValueToValueMapTy &VMap, 7620b57cec5SDimitry Andric DominatorTree &DT) { 7630b57cec5SDimitry Andric const ValueToValueMapTy *const Arr[] = {&VMap}; 7640b57cec5SDimitry Andric privateUpdateExitBlocksForClonedLoop(ExitBlocks, std::begin(Arr), 7650b57cec5SDimitry Andric std::end(Arr), DT); 7660b57cec5SDimitry Andric } 7670b57cec5SDimitry Andric 7680b57cec5SDimitry Andric void MemorySSAUpdater::updateExitBlocksForClonedLoop( 7690b57cec5SDimitry Andric ArrayRef<BasicBlock *> ExitBlocks, 7700b57cec5SDimitry Andric ArrayRef<std::unique_ptr<ValueToValueMapTy>> VMaps, DominatorTree &DT) { 7710b57cec5SDimitry Andric auto GetPtr = [&](const std::unique_ptr<ValueToValueMapTy> &I) { 7720b57cec5SDimitry Andric return I.get(); 7730b57cec5SDimitry Andric }; 7740b57cec5SDimitry Andric using MappedIteratorType = 7750b57cec5SDimitry Andric mapped_iterator<const std::unique_ptr<ValueToValueMapTy> *, 7760b57cec5SDimitry Andric decltype(GetPtr)>; 7770b57cec5SDimitry Andric auto MapBegin = MappedIteratorType(VMaps.begin(), GetPtr); 7780b57cec5SDimitry Andric auto MapEnd = MappedIteratorType(VMaps.end(), GetPtr); 7790b57cec5SDimitry Andric privateUpdateExitBlocksForClonedLoop(ExitBlocks, MapBegin, MapEnd, DT); 7800b57cec5SDimitry Andric } 7810b57cec5SDimitry Andric 7820b57cec5SDimitry Andric void MemorySSAUpdater::applyUpdates(ArrayRef<CFGUpdate> Updates, 7830b57cec5SDimitry Andric DominatorTree &DT) { 7840b57cec5SDimitry Andric SmallVector<CFGUpdate, 4> RevDeleteUpdates; 7850b57cec5SDimitry Andric SmallVector<CFGUpdate, 4> InsertUpdates; 7860b57cec5SDimitry Andric for (auto &Update : Updates) { 7870b57cec5SDimitry Andric if (Update.getKind() == DT.Insert) 7880b57cec5SDimitry Andric InsertUpdates.push_back({DT.Insert, Update.getFrom(), Update.getTo()}); 7890b57cec5SDimitry Andric else 7900b57cec5SDimitry Andric RevDeleteUpdates.push_back({DT.Insert, Update.getFrom(), Update.getTo()}); 7910b57cec5SDimitry Andric } 7920b57cec5SDimitry Andric 7930b57cec5SDimitry Andric if (!RevDeleteUpdates.empty()) { 7940b57cec5SDimitry Andric // Update for inserted edges: use newDT and snapshot CFG as if deletes had 7950b57cec5SDimitry Andric // not occurred. 7960b57cec5SDimitry Andric // FIXME: This creates a new DT, so it's more expensive to do mix 7970b57cec5SDimitry Andric // delete/inserts vs just inserts. We can do an incremental update on the DT 7980b57cec5SDimitry Andric // to revert deletes, than re-delete the edges. Teaching DT to do this, is 7990b57cec5SDimitry Andric // part of a pending cleanup. 8000b57cec5SDimitry Andric DominatorTree NewDT(DT, RevDeleteUpdates); 8010b57cec5SDimitry Andric GraphDiff<BasicBlock *> GD(RevDeleteUpdates); 8020b57cec5SDimitry Andric applyInsertUpdates(InsertUpdates, NewDT, &GD); 8030b57cec5SDimitry Andric } else { 8040b57cec5SDimitry Andric GraphDiff<BasicBlock *> GD; 8050b57cec5SDimitry Andric applyInsertUpdates(InsertUpdates, DT, &GD); 8060b57cec5SDimitry Andric } 8070b57cec5SDimitry Andric 8080b57cec5SDimitry Andric // Update for deleted edges 8090b57cec5SDimitry Andric for (auto &Update : RevDeleteUpdates) 8100b57cec5SDimitry Andric removeEdge(Update.getFrom(), Update.getTo()); 8110b57cec5SDimitry Andric } 8120b57cec5SDimitry Andric 8130b57cec5SDimitry Andric void MemorySSAUpdater::applyInsertUpdates(ArrayRef<CFGUpdate> Updates, 8140b57cec5SDimitry Andric DominatorTree &DT) { 8150b57cec5SDimitry Andric GraphDiff<BasicBlock *> GD; 8160b57cec5SDimitry Andric applyInsertUpdates(Updates, DT, &GD); 8170b57cec5SDimitry Andric } 8180b57cec5SDimitry Andric 8190b57cec5SDimitry Andric void MemorySSAUpdater::applyInsertUpdates(ArrayRef<CFGUpdate> Updates, 8200b57cec5SDimitry Andric DominatorTree &DT, 8210b57cec5SDimitry Andric const GraphDiff<BasicBlock *> *GD) { 8220b57cec5SDimitry Andric // Get recursive last Def, assuming well formed MSSA and updated DT. 8230b57cec5SDimitry Andric auto GetLastDef = [&](BasicBlock *BB) -> MemoryAccess * { 8240b57cec5SDimitry Andric while (true) { 8250b57cec5SDimitry Andric MemorySSA::DefsList *Defs = MSSA->getWritableBlockDefs(BB); 8260b57cec5SDimitry Andric // Return last Def or Phi in BB, if it exists. 8270b57cec5SDimitry Andric if (Defs) 8280b57cec5SDimitry Andric return &*(--Defs->end()); 8290b57cec5SDimitry Andric 8300b57cec5SDimitry Andric // Check number of predecessors, we only care if there's more than one. 8310b57cec5SDimitry Andric unsigned Count = 0; 8320b57cec5SDimitry Andric BasicBlock *Pred = nullptr; 8330b57cec5SDimitry Andric for (auto &Pair : children<GraphDiffInvBBPair>({GD, BB})) { 8340b57cec5SDimitry Andric Pred = Pair.second; 8350b57cec5SDimitry Andric Count++; 8360b57cec5SDimitry Andric if (Count == 2) 8370b57cec5SDimitry Andric break; 8380b57cec5SDimitry Andric } 8390b57cec5SDimitry Andric 8400b57cec5SDimitry Andric // If BB has multiple predecessors, get last definition from IDom. 8410b57cec5SDimitry Andric if (Count != 1) { 8420b57cec5SDimitry Andric // [SimpleLoopUnswitch] If BB is a dead block, about to be deleted, its 8430b57cec5SDimitry Andric // DT is invalidated. Return LoE as its last def. This will be added to 8440b57cec5SDimitry Andric // MemoryPhi node, and later deleted when the block is deleted. 8450b57cec5SDimitry Andric if (!DT.getNode(BB)) 8460b57cec5SDimitry Andric return MSSA->getLiveOnEntryDef(); 8470b57cec5SDimitry Andric if (auto *IDom = DT.getNode(BB)->getIDom()) 8480b57cec5SDimitry Andric if (IDom->getBlock() != BB) { 8490b57cec5SDimitry Andric BB = IDom->getBlock(); 8500b57cec5SDimitry Andric continue; 8510b57cec5SDimitry Andric } 8520b57cec5SDimitry Andric return MSSA->getLiveOnEntryDef(); 8530b57cec5SDimitry Andric } else { 8540b57cec5SDimitry Andric // Single predecessor, BB cannot be dead. GetLastDef of Pred. 8550b57cec5SDimitry Andric assert(Count == 1 && Pred && "Single predecessor expected."); 856*8bcb0991SDimitry Andric // BB can be unreachable though, return LoE if that is the case. 857*8bcb0991SDimitry Andric if (!DT.getNode(BB)) 858*8bcb0991SDimitry Andric return MSSA->getLiveOnEntryDef(); 8590b57cec5SDimitry Andric BB = Pred; 8600b57cec5SDimitry Andric } 8610b57cec5SDimitry Andric }; 8620b57cec5SDimitry Andric llvm_unreachable("Unable to get last definition."); 8630b57cec5SDimitry Andric }; 8640b57cec5SDimitry Andric 8650b57cec5SDimitry Andric // Get nearest IDom given a set of blocks. 8660b57cec5SDimitry Andric // TODO: this can be optimized by starting the search at the node with the 8670b57cec5SDimitry Andric // lowest level (highest in the tree). 8680b57cec5SDimitry Andric auto FindNearestCommonDominator = 8690b57cec5SDimitry Andric [&](const SmallSetVector<BasicBlock *, 2> &BBSet) -> BasicBlock * { 8700b57cec5SDimitry Andric BasicBlock *PrevIDom = *BBSet.begin(); 8710b57cec5SDimitry Andric for (auto *BB : BBSet) 8720b57cec5SDimitry Andric PrevIDom = DT.findNearestCommonDominator(PrevIDom, BB); 8730b57cec5SDimitry Andric return PrevIDom; 8740b57cec5SDimitry Andric }; 8750b57cec5SDimitry Andric 8760b57cec5SDimitry Andric // Get all blocks that dominate PrevIDom, stop when reaching CurrIDom. Do not 8770b57cec5SDimitry Andric // include CurrIDom. 8780b57cec5SDimitry Andric auto GetNoLongerDomBlocks = 8790b57cec5SDimitry Andric [&](BasicBlock *PrevIDom, BasicBlock *CurrIDom, 8800b57cec5SDimitry Andric SmallVectorImpl<BasicBlock *> &BlocksPrevDom) { 8810b57cec5SDimitry Andric if (PrevIDom == CurrIDom) 8820b57cec5SDimitry Andric return; 8830b57cec5SDimitry Andric BlocksPrevDom.push_back(PrevIDom); 8840b57cec5SDimitry Andric BasicBlock *NextIDom = PrevIDom; 8850b57cec5SDimitry Andric while (BasicBlock *UpIDom = 8860b57cec5SDimitry Andric DT.getNode(NextIDom)->getIDom()->getBlock()) { 8870b57cec5SDimitry Andric if (UpIDom == CurrIDom) 8880b57cec5SDimitry Andric break; 8890b57cec5SDimitry Andric BlocksPrevDom.push_back(UpIDom); 8900b57cec5SDimitry Andric NextIDom = UpIDom; 8910b57cec5SDimitry Andric } 8920b57cec5SDimitry Andric }; 8930b57cec5SDimitry Andric 8940b57cec5SDimitry Andric // Map a BB to its predecessors: added + previously existing. To get a 8950b57cec5SDimitry Andric // deterministic order, store predecessors as SetVectors. The order in each 8960b57cec5SDimitry Andric // will be defined by the order in Updates (fixed) and the order given by 8970b57cec5SDimitry Andric // children<> (also fixed). Since we further iterate over these ordered sets, 8980b57cec5SDimitry Andric // we lose the information of multiple edges possibly existing between two 8990b57cec5SDimitry Andric // blocks, so we'll keep and EdgeCount map for that. 9000b57cec5SDimitry Andric // An alternate implementation could keep unordered set for the predecessors, 9010b57cec5SDimitry Andric // traverse either Updates or children<> each time to get the deterministic 9020b57cec5SDimitry Andric // order, and drop the usage of EdgeCount. This alternate approach would still 9030b57cec5SDimitry Andric // require querying the maps for each predecessor, and children<> call has 9040b57cec5SDimitry Andric // additional computation inside for creating the snapshot-graph predecessors. 9050b57cec5SDimitry Andric // As such, we favor using a little additional storage and less compute time. 9060b57cec5SDimitry Andric // This decision can be revisited if we find the alternative more favorable. 9070b57cec5SDimitry Andric 9080b57cec5SDimitry Andric struct PredInfo { 9090b57cec5SDimitry Andric SmallSetVector<BasicBlock *, 2> Added; 9100b57cec5SDimitry Andric SmallSetVector<BasicBlock *, 2> Prev; 9110b57cec5SDimitry Andric }; 9120b57cec5SDimitry Andric SmallDenseMap<BasicBlock *, PredInfo> PredMap; 9130b57cec5SDimitry Andric 9140b57cec5SDimitry Andric for (auto &Edge : Updates) { 9150b57cec5SDimitry Andric BasicBlock *BB = Edge.getTo(); 9160b57cec5SDimitry Andric auto &AddedBlockSet = PredMap[BB].Added; 9170b57cec5SDimitry Andric AddedBlockSet.insert(Edge.getFrom()); 9180b57cec5SDimitry Andric } 9190b57cec5SDimitry Andric 9200b57cec5SDimitry Andric // Store all existing predecessor for each BB, at least one must exist. 9210b57cec5SDimitry Andric SmallDenseMap<std::pair<BasicBlock *, BasicBlock *>, int> EdgeCountMap; 9220b57cec5SDimitry Andric SmallPtrSet<BasicBlock *, 2> NewBlocks; 9230b57cec5SDimitry Andric for (auto &BBPredPair : PredMap) { 9240b57cec5SDimitry Andric auto *BB = BBPredPair.first; 9250b57cec5SDimitry Andric const auto &AddedBlockSet = BBPredPair.second.Added; 9260b57cec5SDimitry Andric auto &PrevBlockSet = BBPredPair.second.Prev; 9270b57cec5SDimitry Andric for (auto &Pair : children<GraphDiffInvBBPair>({GD, BB})) { 9280b57cec5SDimitry Andric BasicBlock *Pi = Pair.second; 9290b57cec5SDimitry Andric if (!AddedBlockSet.count(Pi)) 9300b57cec5SDimitry Andric PrevBlockSet.insert(Pi); 9310b57cec5SDimitry Andric EdgeCountMap[{Pi, BB}]++; 9320b57cec5SDimitry Andric } 9330b57cec5SDimitry Andric 9340b57cec5SDimitry Andric if (PrevBlockSet.empty()) { 9350b57cec5SDimitry Andric assert(pred_size(BB) == AddedBlockSet.size() && "Duplicate edges added."); 9360b57cec5SDimitry Andric LLVM_DEBUG( 9370b57cec5SDimitry Andric dbgs() 9380b57cec5SDimitry Andric << "Adding a predecessor to a block with no predecessors. " 9390b57cec5SDimitry Andric "This must be an edge added to a new, likely cloned, block. " 9400b57cec5SDimitry Andric "Its memory accesses must be already correct, assuming completed " 9410b57cec5SDimitry Andric "via the updateExitBlocksForClonedLoop API. " 9420b57cec5SDimitry Andric "Assert a single such edge is added so no phi addition or " 9430b57cec5SDimitry Andric "additional processing is required.\n"); 9440b57cec5SDimitry Andric assert(AddedBlockSet.size() == 1 && 9450b57cec5SDimitry Andric "Can only handle adding one predecessor to a new block."); 9460b57cec5SDimitry Andric // Need to remove new blocks from PredMap. Remove below to not invalidate 9470b57cec5SDimitry Andric // iterator here. 9480b57cec5SDimitry Andric NewBlocks.insert(BB); 9490b57cec5SDimitry Andric } 9500b57cec5SDimitry Andric } 9510b57cec5SDimitry Andric // Nothing to process for new/cloned blocks. 9520b57cec5SDimitry Andric for (auto *BB : NewBlocks) 9530b57cec5SDimitry Andric PredMap.erase(BB); 9540b57cec5SDimitry Andric 9550b57cec5SDimitry Andric SmallVector<BasicBlock *, 16> BlocksWithDefsToReplace; 9560b57cec5SDimitry Andric SmallVector<WeakVH, 8> InsertedPhis; 9570b57cec5SDimitry Andric 9580b57cec5SDimitry Andric // First create MemoryPhis in all blocks that don't have one. Create in the 9590b57cec5SDimitry Andric // order found in Updates, not in PredMap, to get deterministic numbering. 9600b57cec5SDimitry Andric for (auto &Edge : Updates) { 9610b57cec5SDimitry Andric BasicBlock *BB = Edge.getTo(); 9620b57cec5SDimitry Andric if (PredMap.count(BB) && !MSSA->getMemoryAccess(BB)) 9630b57cec5SDimitry Andric InsertedPhis.push_back(MSSA->createMemoryPhi(BB)); 9640b57cec5SDimitry Andric } 9650b57cec5SDimitry Andric 9660b57cec5SDimitry Andric // Now we'll fill in the MemoryPhis with the right incoming values. 9670b57cec5SDimitry Andric for (auto &BBPredPair : PredMap) { 9680b57cec5SDimitry Andric auto *BB = BBPredPair.first; 9690b57cec5SDimitry Andric const auto &PrevBlockSet = BBPredPair.second.Prev; 9700b57cec5SDimitry Andric const auto &AddedBlockSet = BBPredPair.second.Added; 9710b57cec5SDimitry Andric assert(!PrevBlockSet.empty() && 9720b57cec5SDimitry Andric "At least one previous predecessor must exist."); 9730b57cec5SDimitry Andric 9740b57cec5SDimitry Andric // TODO: if this becomes a bottleneck, we can save on GetLastDef calls by 9750b57cec5SDimitry Andric // keeping this map before the loop. We can reuse already populated entries 9760b57cec5SDimitry Andric // if an edge is added from the same predecessor to two different blocks, 9770b57cec5SDimitry Andric // and this does happen in rotate. Note that the map needs to be updated 9780b57cec5SDimitry Andric // when deleting non-necessary phis below, if the phi is in the map by 9790b57cec5SDimitry Andric // replacing the value with DefP1. 9800b57cec5SDimitry Andric SmallDenseMap<BasicBlock *, MemoryAccess *> LastDefAddedPred; 9810b57cec5SDimitry Andric for (auto *AddedPred : AddedBlockSet) { 9820b57cec5SDimitry Andric auto *DefPn = GetLastDef(AddedPred); 9830b57cec5SDimitry Andric assert(DefPn != nullptr && "Unable to find last definition."); 9840b57cec5SDimitry Andric LastDefAddedPred[AddedPred] = DefPn; 9850b57cec5SDimitry Andric } 9860b57cec5SDimitry Andric 9870b57cec5SDimitry Andric MemoryPhi *NewPhi = MSSA->getMemoryAccess(BB); 9880b57cec5SDimitry Andric // If Phi is not empty, add an incoming edge from each added pred. Must 9890b57cec5SDimitry Andric // still compute blocks with defs to replace for this block below. 9900b57cec5SDimitry Andric if (NewPhi->getNumOperands()) { 9910b57cec5SDimitry Andric for (auto *Pred : AddedBlockSet) { 9920b57cec5SDimitry Andric auto *LastDefForPred = LastDefAddedPred[Pred]; 9930b57cec5SDimitry Andric for (int I = 0, E = EdgeCountMap[{Pred, BB}]; I < E; ++I) 9940b57cec5SDimitry Andric NewPhi->addIncoming(LastDefForPred, Pred); 9950b57cec5SDimitry Andric } 9960b57cec5SDimitry Andric } else { 9970b57cec5SDimitry Andric // Pick any existing predecessor and get its definition. All other 9980b57cec5SDimitry Andric // existing predecessors should have the same one, since no phi existed. 9990b57cec5SDimitry Andric auto *P1 = *PrevBlockSet.begin(); 10000b57cec5SDimitry Andric MemoryAccess *DefP1 = GetLastDef(P1); 10010b57cec5SDimitry Andric 10020b57cec5SDimitry Andric // Check DefP1 against all Defs in LastDefPredPair. If all the same, 10030b57cec5SDimitry Andric // nothing to add. 10040b57cec5SDimitry Andric bool InsertPhi = false; 10050b57cec5SDimitry Andric for (auto LastDefPredPair : LastDefAddedPred) 10060b57cec5SDimitry Andric if (DefP1 != LastDefPredPair.second) { 10070b57cec5SDimitry Andric InsertPhi = true; 10080b57cec5SDimitry Andric break; 10090b57cec5SDimitry Andric } 10100b57cec5SDimitry Andric if (!InsertPhi) { 10110b57cec5SDimitry Andric // Since NewPhi may be used in other newly added Phis, replace all uses 10120b57cec5SDimitry Andric // of NewPhi with the definition coming from all predecessors (DefP1), 10130b57cec5SDimitry Andric // before deleting it. 10140b57cec5SDimitry Andric NewPhi->replaceAllUsesWith(DefP1); 10150b57cec5SDimitry Andric removeMemoryAccess(NewPhi); 10160b57cec5SDimitry Andric continue; 10170b57cec5SDimitry Andric } 10180b57cec5SDimitry Andric 10190b57cec5SDimitry Andric // Update Phi with new values for new predecessors and old value for all 10200b57cec5SDimitry Andric // other predecessors. Since AddedBlockSet and PrevBlockSet are ordered 10210b57cec5SDimitry Andric // sets, the order of entries in NewPhi is deterministic. 10220b57cec5SDimitry Andric for (auto *Pred : AddedBlockSet) { 10230b57cec5SDimitry Andric auto *LastDefForPred = LastDefAddedPred[Pred]; 10240b57cec5SDimitry Andric for (int I = 0, E = EdgeCountMap[{Pred, BB}]; I < E; ++I) 10250b57cec5SDimitry Andric NewPhi->addIncoming(LastDefForPred, Pred); 10260b57cec5SDimitry Andric } 10270b57cec5SDimitry Andric for (auto *Pred : PrevBlockSet) 10280b57cec5SDimitry Andric for (int I = 0, E = EdgeCountMap[{Pred, BB}]; I < E; ++I) 10290b57cec5SDimitry Andric NewPhi->addIncoming(DefP1, Pred); 10300b57cec5SDimitry Andric } 10310b57cec5SDimitry Andric 10320b57cec5SDimitry Andric // Get all blocks that used to dominate BB and no longer do after adding 10330b57cec5SDimitry Andric // AddedBlockSet, where PrevBlockSet are the previously known predecessors. 10340b57cec5SDimitry Andric assert(DT.getNode(BB)->getIDom() && "BB does not have valid idom"); 10350b57cec5SDimitry Andric BasicBlock *PrevIDom = FindNearestCommonDominator(PrevBlockSet); 10360b57cec5SDimitry Andric assert(PrevIDom && "Previous IDom should exists"); 10370b57cec5SDimitry Andric BasicBlock *NewIDom = DT.getNode(BB)->getIDom()->getBlock(); 10380b57cec5SDimitry Andric assert(NewIDom && "BB should have a new valid idom"); 10390b57cec5SDimitry Andric assert(DT.dominates(NewIDom, PrevIDom) && 10400b57cec5SDimitry Andric "New idom should dominate old idom"); 10410b57cec5SDimitry Andric GetNoLongerDomBlocks(PrevIDom, NewIDom, BlocksWithDefsToReplace); 10420b57cec5SDimitry Andric } 10430b57cec5SDimitry Andric 10440b57cec5SDimitry Andric tryRemoveTrivialPhis(InsertedPhis); 10450b57cec5SDimitry Andric // Create the set of blocks that now have a definition. We'll use this to 10460b57cec5SDimitry Andric // compute IDF and add Phis there next. 10470b57cec5SDimitry Andric SmallVector<BasicBlock *, 8> BlocksToProcess; 10480b57cec5SDimitry Andric for (auto &VH : InsertedPhis) 10490b57cec5SDimitry Andric if (auto *MPhi = cast_or_null<MemoryPhi>(VH)) 10500b57cec5SDimitry Andric BlocksToProcess.push_back(MPhi->getBlock()); 10510b57cec5SDimitry Andric 10520b57cec5SDimitry Andric // Compute IDF and add Phis in all IDF blocks that do not have one. 10530b57cec5SDimitry Andric SmallVector<BasicBlock *, 32> IDFBlocks; 10540b57cec5SDimitry Andric if (!BlocksToProcess.empty()) { 10550b57cec5SDimitry Andric ForwardIDFCalculator IDFs(DT, GD); 10560b57cec5SDimitry Andric SmallPtrSet<BasicBlock *, 16> DefiningBlocks(BlocksToProcess.begin(), 10570b57cec5SDimitry Andric BlocksToProcess.end()); 10580b57cec5SDimitry Andric IDFs.setDefiningBlocks(DefiningBlocks); 10590b57cec5SDimitry Andric IDFs.calculate(IDFBlocks); 10600b57cec5SDimitry Andric 10610b57cec5SDimitry Andric SmallSetVector<MemoryPhi *, 4> PhisToFill; 10620b57cec5SDimitry Andric // First create all needed Phis. 10630b57cec5SDimitry Andric for (auto *BBIDF : IDFBlocks) 10640b57cec5SDimitry Andric if (!MSSA->getMemoryAccess(BBIDF)) { 10650b57cec5SDimitry Andric auto *IDFPhi = MSSA->createMemoryPhi(BBIDF); 10660b57cec5SDimitry Andric InsertedPhis.push_back(IDFPhi); 10670b57cec5SDimitry Andric PhisToFill.insert(IDFPhi); 10680b57cec5SDimitry Andric } 10690b57cec5SDimitry Andric // Then update or insert their correct incoming values. 10700b57cec5SDimitry Andric for (auto *BBIDF : IDFBlocks) { 10710b57cec5SDimitry Andric auto *IDFPhi = MSSA->getMemoryAccess(BBIDF); 10720b57cec5SDimitry Andric assert(IDFPhi && "Phi must exist"); 10730b57cec5SDimitry Andric if (!PhisToFill.count(IDFPhi)) { 10740b57cec5SDimitry Andric // Update existing Phi. 10750b57cec5SDimitry Andric // FIXME: some updates may be redundant, try to optimize and skip some. 10760b57cec5SDimitry Andric for (unsigned I = 0, E = IDFPhi->getNumIncomingValues(); I < E; ++I) 10770b57cec5SDimitry Andric IDFPhi->setIncomingValue(I, GetLastDef(IDFPhi->getIncomingBlock(I))); 10780b57cec5SDimitry Andric } else { 10790b57cec5SDimitry Andric for (auto &Pair : children<GraphDiffInvBBPair>({GD, BBIDF})) { 10800b57cec5SDimitry Andric BasicBlock *Pi = Pair.second; 10810b57cec5SDimitry Andric IDFPhi->addIncoming(GetLastDef(Pi), Pi); 10820b57cec5SDimitry Andric } 10830b57cec5SDimitry Andric } 10840b57cec5SDimitry Andric } 10850b57cec5SDimitry Andric } 10860b57cec5SDimitry Andric 10870b57cec5SDimitry Andric // Now for all defs in BlocksWithDefsToReplace, if there are uses they no 10880b57cec5SDimitry Andric // longer dominate, replace those with the closest dominating def. 10890b57cec5SDimitry Andric // This will also update optimized accesses, as they're also uses. 10900b57cec5SDimitry Andric for (auto *BlockWithDefsToReplace : BlocksWithDefsToReplace) { 10910b57cec5SDimitry Andric if (auto DefsList = MSSA->getWritableBlockDefs(BlockWithDefsToReplace)) { 10920b57cec5SDimitry Andric for (auto &DefToReplaceUses : *DefsList) { 10930b57cec5SDimitry Andric BasicBlock *DominatingBlock = DefToReplaceUses.getBlock(); 10940b57cec5SDimitry Andric Value::use_iterator UI = DefToReplaceUses.use_begin(), 10950b57cec5SDimitry Andric E = DefToReplaceUses.use_end(); 10960b57cec5SDimitry Andric for (; UI != E;) { 10970b57cec5SDimitry Andric Use &U = *UI; 10980b57cec5SDimitry Andric ++UI; 1099*8bcb0991SDimitry Andric MemoryAccess *Usr = cast<MemoryAccess>(U.getUser()); 11000b57cec5SDimitry Andric if (MemoryPhi *UsrPhi = dyn_cast<MemoryPhi>(Usr)) { 11010b57cec5SDimitry Andric BasicBlock *DominatedBlock = UsrPhi->getIncomingBlock(U); 11020b57cec5SDimitry Andric if (!DT.dominates(DominatingBlock, DominatedBlock)) 11030b57cec5SDimitry Andric U.set(GetLastDef(DominatedBlock)); 11040b57cec5SDimitry Andric } else { 11050b57cec5SDimitry Andric BasicBlock *DominatedBlock = Usr->getBlock(); 11060b57cec5SDimitry Andric if (!DT.dominates(DominatingBlock, DominatedBlock)) { 11070b57cec5SDimitry Andric if (auto *DomBlPhi = MSSA->getMemoryAccess(DominatedBlock)) 11080b57cec5SDimitry Andric U.set(DomBlPhi); 11090b57cec5SDimitry Andric else { 11100b57cec5SDimitry Andric auto *IDom = DT.getNode(DominatedBlock)->getIDom(); 11110b57cec5SDimitry Andric assert(IDom && "Block must have a valid IDom."); 11120b57cec5SDimitry Andric U.set(GetLastDef(IDom->getBlock())); 11130b57cec5SDimitry Andric } 11140b57cec5SDimitry Andric cast<MemoryUseOrDef>(Usr)->resetOptimized(); 11150b57cec5SDimitry Andric } 11160b57cec5SDimitry Andric } 11170b57cec5SDimitry Andric } 11180b57cec5SDimitry Andric } 11190b57cec5SDimitry Andric } 11200b57cec5SDimitry Andric } 11210b57cec5SDimitry Andric tryRemoveTrivialPhis(InsertedPhis); 11220b57cec5SDimitry Andric } 11230b57cec5SDimitry Andric 11240b57cec5SDimitry Andric // Move What before Where in the MemorySSA IR. 11250b57cec5SDimitry Andric template <class WhereType> 11260b57cec5SDimitry Andric void MemorySSAUpdater::moveTo(MemoryUseOrDef *What, BasicBlock *BB, 11270b57cec5SDimitry Andric WhereType Where) { 11280b57cec5SDimitry Andric // Mark MemoryPhi users of What not to be optimized. 11290b57cec5SDimitry Andric for (auto *U : What->users()) 11300b57cec5SDimitry Andric if (MemoryPhi *PhiUser = dyn_cast<MemoryPhi>(U)) 11310b57cec5SDimitry Andric NonOptPhis.insert(PhiUser); 11320b57cec5SDimitry Andric 11330b57cec5SDimitry Andric // Replace all our users with our defining access. 11340b57cec5SDimitry Andric What->replaceAllUsesWith(What->getDefiningAccess()); 11350b57cec5SDimitry Andric 11360b57cec5SDimitry Andric // Let MemorySSA take care of moving it around in the lists. 11370b57cec5SDimitry Andric MSSA->moveTo(What, BB, Where); 11380b57cec5SDimitry Andric 11390b57cec5SDimitry Andric // Now reinsert it into the IR and do whatever fixups needed. 11400b57cec5SDimitry Andric if (auto *MD = dyn_cast<MemoryDef>(What)) 1141*8bcb0991SDimitry Andric insertDef(MD, /*RenameUses=*/true); 11420b57cec5SDimitry Andric else 1143*8bcb0991SDimitry Andric insertUse(cast<MemoryUse>(What), /*RenameUses=*/true); 11440b57cec5SDimitry Andric 11450b57cec5SDimitry Andric // Clear dangling pointers. We added all MemoryPhi users, but not all 11460b57cec5SDimitry Andric // of them are removed by fixupDefs(). 11470b57cec5SDimitry Andric NonOptPhis.clear(); 11480b57cec5SDimitry Andric } 11490b57cec5SDimitry Andric 11500b57cec5SDimitry Andric // Move What before Where in the MemorySSA IR. 11510b57cec5SDimitry Andric void MemorySSAUpdater::moveBefore(MemoryUseOrDef *What, MemoryUseOrDef *Where) { 11520b57cec5SDimitry Andric moveTo(What, Where->getBlock(), Where->getIterator()); 11530b57cec5SDimitry Andric } 11540b57cec5SDimitry Andric 11550b57cec5SDimitry Andric // Move What after Where in the MemorySSA IR. 11560b57cec5SDimitry Andric void MemorySSAUpdater::moveAfter(MemoryUseOrDef *What, MemoryUseOrDef *Where) { 11570b57cec5SDimitry Andric moveTo(What, Where->getBlock(), ++Where->getIterator()); 11580b57cec5SDimitry Andric } 11590b57cec5SDimitry Andric 11600b57cec5SDimitry Andric void MemorySSAUpdater::moveToPlace(MemoryUseOrDef *What, BasicBlock *BB, 11610b57cec5SDimitry Andric MemorySSA::InsertionPlace Where) { 11620b57cec5SDimitry Andric return moveTo(What, BB, Where); 11630b57cec5SDimitry Andric } 11640b57cec5SDimitry Andric 11650b57cec5SDimitry Andric // All accesses in To used to be in From. Move to end and update access lists. 11660b57cec5SDimitry Andric void MemorySSAUpdater::moveAllAccesses(BasicBlock *From, BasicBlock *To, 11670b57cec5SDimitry Andric Instruction *Start) { 11680b57cec5SDimitry Andric 11690b57cec5SDimitry Andric MemorySSA::AccessList *Accs = MSSA->getWritableBlockAccesses(From); 11700b57cec5SDimitry Andric if (!Accs) 11710b57cec5SDimitry Andric return; 11720b57cec5SDimitry Andric 1173*8bcb0991SDimitry Andric assert(Start->getParent() == To && "Incorrect Start instruction"); 11740b57cec5SDimitry Andric MemoryAccess *FirstInNew = nullptr; 11750b57cec5SDimitry Andric for (Instruction &I : make_range(Start->getIterator(), To->end())) 11760b57cec5SDimitry Andric if ((FirstInNew = MSSA->getMemoryAccess(&I))) 11770b57cec5SDimitry Andric break; 1178*8bcb0991SDimitry Andric if (FirstInNew) { 11790b57cec5SDimitry Andric auto *MUD = cast<MemoryUseOrDef>(FirstInNew); 11800b57cec5SDimitry Andric do { 11810b57cec5SDimitry Andric auto NextIt = ++MUD->getIterator(); 11820b57cec5SDimitry Andric MemoryUseOrDef *NextMUD = (!Accs || NextIt == Accs->end()) 11830b57cec5SDimitry Andric ? nullptr 11840b57cec5SDimitry Andric : cast<MemoryUseOrDef>(&*NextIt); 11850b57cec5SDimitry Andric MSSA->moveTo(MUD, To, MemorySSA::End); 1186*8bcb0991SDimitry Andric // Moving MUD from Accs in the moveTo above, may delete Accs, so we need 1187*8bcb0991SDimitry Andric // to retrieve it again. 11880b57cec5SDimitry Andric Accs = MSSA->getWritableBlockAccesses(From); 11890b57cec5SDimitry Andric MUD = NextMUD; 11900b57cec5SDimitry Andric } while (MUD); 11910b57cec5SDimitry Andric } 11920b57cec5SDimitry Andric 1193*8bcb0991SDimitry Andric // If all accesses were moved and only a trivial Phi remains, we try to remove 1194*8bcb0991SDimitry Andric // that Phi. This is needed when From is going to be deleted. 1195*8bcb0991SDimitry Andric auto *Defs = MSSA->getWritableBlockDefs(From); 1196*8bcb0991SDimitry Andric if (Defs && !Defs->empty()) 1197*8bcb0991SDimitry Andric if (auto *Phi = dyn_cast<MemoryPhi>(&*Defs->begin())) 1198*8bcb0991SDimitry Andric tryRemoveTrivialPhi(Phi); 1199*8bcb0991SDimitry Andric } 1200*8bcb0991SDimitry Andric 12010b57cec5SDimitry Andric void MemorySSAUpdater::moveAllAfterSpliceBlocks(BasicBlock *From, 12020b57cec5SDimitry Andric BasicBlock *To, 12030b57cec5SDimitry Andric Instruction *Start) { 12040b57cec5SDimitry Andric assert(MSSA->getBlockAccesses(To) == nullptr && 12050b57cec5SDimitry Andric "To block is expected to be free of MemoryAccesses."); 12060b57cec5SDimitry Andric moveAllAccesses(From, To, Start); 12070b57cec5SDimitry Andric for (BasicBlock *Succ : successors(To)) 12080b57cec5SDimitry Andric if (MemoryPhi *MPhi = MSSA->getMemoryAccess(Succ)) 12090b57cec5SDimitry Andric MPhi->setIncomingBlock(MPhi->getBasicBlockIndex(From), To); 12100b57cec5SDimitry Andric } 12110b57cec5SDimitry Andric 12120b57cec5SDimitry Andric void MemorySSAUpdater::moveAllAfterMergeBlocks(BasicBlock *From, BasicBlock *To, 12130b57cec5SDimitry Andric Instruction *Start) { 1214*8bcb0991SDimitry Andric assert(From->getUniquePredecessor() == To && 12150b57cec5SDimitry Andric "From block is expected to have a single predecessor (To)."); 12160b57cec5SDimitry Andric moveAllAccesses(From, To, Start); 12170b57cec5SDimitry Andric for (BasicBlock *Succ : successors(From)) 12180b57cec5SDimitry Andric if (MemoryPhi *MPhi = MSSA->getMemoryAccess(Succ)) 12190b57cec5SDimitry Andric MPhi->setIncomingBlock(MPhi->getBasicBlockIndex(From), To); 12200b57cec5SDimitry Andric } 12210b57cec5SDimitry Andric 12220b57cec5SDimitry Andric /// If all arguments of a MemoryPHI are defined by the same incoming 12230b57cec5SDimitry Andric /// argument, return that argument. 12240b57cec5SDimitry Andric static MemoryAccess *onlySingleValue(MemoryPhi *MP) { 12250b57cec5SDimitry Andric MemoryAccess *MA = nullptr; 12260b57cec5SDimitry Andric 12270b57cec5SDimitry Andric for (auto &Arg : MP->operands()) { 12280b57cec5SDimitry Andric if (!MA) 12290b57cec5SDimitry Andric MA = cast<MemoryAccess>(Arg); 12300b57cec5SDimitry Andric else if (MA != Arg) 12310b57cec5SDimitry Andric return nullptr; 12320b57cec5SDimitry Andric } 12330b57cec5SDimitry Andric return MA; 12340b57cec5SDimitry Andric } 12350b57cec5SDimitry Andric 12360b57cec5SDimitry Andric void MemorySSAUpdater::wireOldPredecessorsToNewImmediatePredecessor( 12370b57cec5SDimitry Andric BasicBlock *Old, BasicBlock *New, ArrayRef<BasicBlock *> Preds, 12380b57cec5SDimitry Andric bool IdenticalEdgesWereMerged) { 12390b57cec5SDimitry Andric assert(!MSSA->getWritableBlockAccesses(New) && 12400b57cec5SDimitry Andric "Access list should be null for a new block."); 12410b57cec5SDimitry Andric MemoryPhi *Phi = MSSA->getMemoryAccess(Old); 12420b57cec5SDimitry Andric if (!Phi) 12430b57cec5SDimitry Andric return; 12440b57cec5SDimitry Andric if (Old->hasNPredecessors(1)) { 12450b57cec5SDimitry Andric assert(pred_size(New) == Preds.size() && 12460b57cec5SDimitry Andric "Should have moved all predecessors."); 12470b57cec5SDimitry Andric MSSA->moveTo(Phi, New, MemorySSA::Beginning); 12480b57cec5SDimitry Andric } else { 12490b57cec5SDimitry Andric assert(!Preds.empty() && "Must be moving at least one predecessor to the " 12500b57cec5SDimitry Andric "new immediate predecessor."); 12510b57cec5SDimitry Andric MemoryPhi *NewPhi = MSSA->createMemoryPhi(New); 12520b57cec5SDimitry Andric SmallPtrSet<BasicBlock *, 16> PredsSet(Preds.begin(), Preds.end()); 12530b57cec5SDimitry Andric // Currently only support the case of removing a single incoming edge when 12540b57cec5SDimitry Andric // identical edges were not merged. 12550b57cec5SDimitry Andric if (!IdenticalEdgesWereMerged) 12560b57cec5SDimitry Andric assert(PredsSet.size() == Preds.size() && 12570b57cec5SDimitry Andric "If identical edges were not merged, we cannot have duplicate " 12580b57cec5SDimitry Andric "blocks in the predecessors"); 12590b57cec5SDimitry Andric Phi->unorderedDeleteIncomingIf([&](MemoryAccess *MA, BasicBlock *B) { 12600b57cec5SDimitry Andric if (PredsSet.count(B)) { 12610b57cec5SDimitry Andric NewPhi->addIncoming(MA, B); 12620b57cec5SDimitry Andric if (!IdenticalEdgesWereMerged) 12630b57cec5SDimitry Andric PredsSet.erase(B); 12640b57cec5SDimitry Andric return true; 12650b57cec5SDimitry Andric } 12660b57cec5SDimitry Andric return false; 12670b57cec5SDimitry Andric }); 12680b57cec5SDimitry Andric Phi->addIncoming(NewPhi, New); 1269*8bcb0991SDimitry Andric tryRemoveTrivialPhi(NewPhi); 12700b57cec5SDimitry Andric } 12710b57cec5SDimitry Andric } 12720b57cec5SDimitry Andric 12730b57cec5SDimitry Andric void MemorySSAUpdater::removeMemoryAccess(MemoryAccess *MA, bool OptimizePhis) { 12740b57cec5SDimitry Andric assert(!MSSA->isLiveOnEntryDef(MA) && 12750b57cec5SDimitry Andric "Trying to remove the live on entry def"); 12760b57cec5SDimitry Andric // We can only delete phi nodes if they have no uses, or we can replace all 12770b57cec5SDimitry Andric // uses with a single definition. 12780b57cec5SDimitry Andric MemoryAccess *NewDefTarget = nullptr; 12790b57cec5SDimitry Andric if (MemoryPhi *MP = dyn_cast<MemoryPhi>(MA)) { 12800b57cec5SDimitry Andric // Note that it is sufficient to know that all edges of the phi node have 12810b57cec5SDimitry Andric // the same argument. If they do, by the definition of dominance frontiers 12820b57cec5SDimitry Andric // (which we used to place this phi), that argument must dominate this phi, 12830b57cec5SDimitry Andric // and thus, must dominate the phi's uses, and so we will not hit the assert 12840b57cec5SDimitry Andric // below. 12850b57cec5SDimitry Andric NewDefTarget = onlySingleValue(MP); 12860b57cec5SDimitry Andric assert((NewDefTarget || MP->use_empty()) && 12870b57cec5SDimitry Andric "We can't delete this memory phi"); 12880b57cec5SDimitry Andric } else { 12890b57cec5SDimitry Andric NewDefTarget = cast<MemoryUseOrDef>(MA)->getDefiningAccess(); 12900b57cec5SDimitry Andric } 12910b57cec5SDimitry Andric 12920b57cec5SDimitry Andric SmallSetVector<MemoryPhi *, 4> PhisToCheck; 12930b57cec5SDimitry Andric 12940b57cec5SDimitry Andric // Re-point the uses at our defining access 12950b57cec5SDimitry Andric if (!isa<MemoryUse>(MA) && !MA->use_empty()) { 12960b57cec5SDimitry Andric // Reset optimized on users of this store, and reset the uses. 12970b57cec5SDimitry Andric // A few notes: 12980b57cec5SDimitry Andric // 1. This is a slightly modified version of RAUW to avoid walking the 12990b57cec5SDimitry Andric // uses twice here. 13000b57cec5SDimitry Andric // 2. If we wanted to be complete, we would have to reset the optimized 13010b57cec5SDimitry Andric // flags on users of phi nodes if doing the below makes a phi node have all 13020b57cec5SDimitry Andric // the same arguments. Instead, we prefer users to removeMemoryAccess those 13030b57cec5SDimitry Andric // phi nodes, because doing it here would be N^3. 13040b57cec5SDimitry Andric if (MA->hasValueHandle()) 13050b57cec5SDimitry Andric ValueHandleBase::ValueIsRAUWd(MA, NewDefTarget); 13060b57cec5SDimitry Andric // Note: We assume MemorySSA is not used in metadata since it's not really 13070b57cec5SDimitry Andric // part of the IR. 13080b57cec5SDimitry Andric 13090b57cec5SDimitry Andric while (!MA->use_empty()) { 13100b57cec5SDimitry Andric Use &U = *MA->use_begin(); 13110b57cec5SDimitry Andric if (auto *MUD = dyn_cast<MemoryUseOrDef>(U.getUser())) 13120b57cec5SDimitry Andric MUD->resetOptimized(); 13130b57cec5SDimitry Andric if (OptimizePhis) 13140b57cec5SDimitry Andric if (MemoryPhi *MP = dyn_cast<MemoryPhi>(U.getUser())) 13150b57cec5SDimitry Andric PhisToCheck.insert(MP); 13160b57cec5SDimitry Andric U.set(NewDefTarget); 13170b57cec5SDimitry Andric } 13180b57cec5SDimitry Andric } 13190b57cec5SDimitry Andric 13200b57cec5SDimitry Andric // The call below to erase will destroy MA, so we can't change the order we 13210b57cec5SDimitry Andric // are doing things here 13220b57cec5SDimitry Andric MSSA->removeFromLookups(MA); 13230b57cec5SDimitry Andric MSSA->removeFromLists(MA); 13240b57cec5SDimitry Andric 13250b57cec5SDimitry Andric // Optionally optimize Phi uses. This will recursively remove trivial phis. 13260b57cec5SDimitry Andric if (!PhisToCheck.empty()) { 13270b57cec5SDimitry Andric SmallVector<WeakVH, 16> PhisToOptimize{PhisToCheck.begin(), 13280b57cec5SDimitry Andric PhisToCheck.end()}; 13290b57cec5SDimitry Andric PhisToCheck.clear(); 13300b57cec5SDimitry Andric 13310b57cec5SDimitry Andric unsigned PhisSize = PhisToOptimize.size(); 13320b57cec5SDimitry Andric while (PhisSize-- > 0) 13330b57cec5SDimitry Andric if (MemoryPhi *MP = 1334*8bcb0991SDimitry Andric cast_or_null<MemoryPhi>(PhisToOptimize.pop_back_val())) 1335*8bcb0991SDimitry Andric tryRemoveTrivialPhi(MP); 13360b57cec5SDimitry Andric } 13370b57cec5SDimitry Andric } 13380b57cec5SDimitry Andric 13390b57cec5SDimitry Andric void MemorySSAUpdater::removeBlocks( 13400b57cec5SDimitry Andric const SmallSetVector<BasicBlock *, 8> &DeadBlocks) { 13410b57cec5SDimitry Andric // First delete all uses of BB in MemoryPhis. 13420b57cec5SDimitry Andric for (BasicBlock *BB : DeadBlocks) { 13430b57cec5SDimitry Andric Instruction *TI = BB->getTerminator(); 13440b57cec5SDimitry Andric assert(TI && "Basic block expected to have a terminator instruction"); 13450b57cec5SDimitry Andric for (BasicBlock *Succ : successors(TI)) 13460b57cec5SDimitry Andric if (!DeadBlocks.count(Succ)) 13470b57cec5SDimitry Andric if (MemoryPhi *MP = MSSA->getMemoryAccess(Succ)) { 13480b57cec5SDimitry Andric MP->unorderedDeleteIncomingBlock(BB); 1349*8bcb0991SDimitry Andric tryRemoveTrivialPhi(MP); 13500b57cec5SDimitry Andric } 13510b57cec5SDimitry Andric // Drop all references of all accesses in BB 13520b57cec5SDimitry Andric if (MemorySSA::AccessList *Acc = MSSA->getWritableBlockAccesses(BB)) 13530b57cec5SDimitry Andric for (MemoryAccess &MA : *Acc) 13540b57cec5SDimitry Andric MA.dropAllReferences(); 13550b57cec5SDimitry Andric } 13560b57cec5SDimitry Andric 13570b57cec5SDimitry Andric // Next, delete all memory accesses in each block 13580b57cec5SDimitry Andric for (BasicBlock *BB : DeadBlocks) { 13590b57cec5SDimitry Andric MemorySSA::AccessList *Acc = MSSA->getWritableBlockAccesses(BB); 13600b57cec5SDimitry Andric if (!Acc) 13610b57cec5SDimitry Andric continue; 13620b57cec5SDimitry Andric for (auto AB = Acc->begin(), AE = Acc->end(); AB != AE;) { 13630b57cec5SDimitry Andric MemoryAccess *MA = &*AB; 13640b57cec5SDimitry Andric ++AB; 13650b57cec5SDimitry Andric MSSA->removeFromLookups(MA); 13660b57cec5SDimitry Andric MSSA->removeFromLists(MA); 13670b57cec5SDimitry Andric } 13680b57cec5SDimitry Andric } 13690b57cec5SDimitry Andric } 13700b57cec5SDimitry Andric 13710b57cec5SDimitry Andric void MemorySSAUpdater::tryRemoveTrivialPhis(ArrayRef<WeakVH> UpdatedPHIs) { 13720b57cec5SDimitry Andric for (auto &VH : UpdatedPHIs) 1373*8bcb0991SDimitry Andric if (auto *MPhi = cast_or_null<MemoryPhi>(VH)) 1374*8bcb0991SDimitry Andric tryRemoveTrivialPhi(MPhi); 13750b57cec5SDimitry Andric } 13760b57cec5SDimitry Andric 13770b57cec5SDimitry Andric void MemorySSAUpdater::changeToUnreachable(const Instruction *I) { 13780b57cec5SDimitry Andric const BasicBlock *BB = I->getParent(); 13790b57cec5SDimitry Andric // Remove memory accesses in BB for I and all following instructions. 13800b57cec5SDimitry Andric auto BBI = I->getIterator(), BBE = BB->end(); 13810b57cec5SDimitry Andric // FIXME: If this becomes too expensive, iterate until the first instruction 13820b57cec5SDimitry Andric // with a memory access, then iterate over MemoryAccesses. 13830b57cec5SDimitry Andric while (BBI != BBE) 13840b57cec5SDimitry Andric removeMemoryAccess(&*(BBI++)); 13850b57cec5SDimitry Andric // Update phis in BB's successors to remove BB. 13860b57cec5SDimitry Andric SmallVector<WeakVH, 16> UpdatedPHIs; 13870b57cec5SDimitry Andric for (const BasicBlock *Successor : successors(BB)) { 13880b57cec5SDimitry Andric removeDuplicatePhiEdgesBetween(BB, Successor); 13890b57cec5SDimitry Andric if (MemoryPhi *MPhi = MSSA->getMemoryAccess(Successor)) { 13900b57cec5SDimitry Andric MPhi->unorderedDeleteIncomingBlock(BB); 13910b57cec5SDimitry Andric UpdatedPHIs.push_back(MPhi); 13920b57cec5SDimitry Andric } 13930b57cec5SDimitry Andric } 13940b57cec5SDimitry Andric // Optimize trivial phis. 13950b57cec5SDimitry Andric tryRemoveTrivialPhis(UpdatedPHIs); 13960b57cec5SDimitry Andric } 13970b57cec5SDimitry Andric 13980b57cec5SDimitry Andric void MemorySSAUpdater::changeCondBranchToUnconditionalTo(const BranchInst *BI, 13990b57cec5SDimitry Andric const BasicBlock *To) { 14000b57cec5SDimitry Andric const BasicBlock *BB = BI->getParent(); 14010b57cec5SDimitry Andric SmallVector<WeakVH, 16> UpdatedPHIs; 14020b57cec5SDimitry Andric for (const BasicBlock *Succ : successors(BB)) { 14030b57cec5SDimitry Andric removeDuplicatePhiEdgesBetween(BB, Succ); 14040b57cec5SDimitry Andric if (Succ != To) 14050b57cec5SDimitry Andric if (auto *MPhi = MSSA->getMemoryAccess(Succ)) { 14060b57cec5SDimitry Andric MPhi->unorderedDeleteIncomingBlock(BB); 14070b57cec5SDimitry Andric UpdatedPHIs.push_back(MPhi); 14080b57cec5SDimitry Andric } 14090b57cec5SDimitry Andric } 14100b57cec5SDimitry Andric // Optimize trivial phis. 14110b57cec5SDimitry Andric tryRemoveTrivialPhis(UpdatedPHIs); 14120b57cec5SDimitry Andric } 14130b57cec5SDimitry Andric 14140b57cec5SDimitry Andric MemoryAccess *MemorySSAUpdater::createMemoryAccessInBB( 14150b57cec5SDimitry Andric Instruction *I, MemoryAccess *Definition, const BasicBlock *BB, 14160b57cec5SDimitry Andric MemorySSA::InsertionPlace Point) { 14170b57cec5SDimitry Andric MemoryUseOrDef *NewAccess = MSSA->createDefinedAccess(I, Definition); 14180b57cec5SDimitry Andric MSSA->insertIntoListsForBlock(NewAccess, BB, Point); 14190b57cec5SDimitry Andric return NewAccess; 14200b57cec5SDimitry Andric } 14210b57cec5SDimitry Andric 14220b57cec5SDimitry Andric MemoryUseOrDef *MemorySSAUpdater::createMemoryAccessBefore( 14230b57cec5SDimitry Andric Instruction *I, MemoryAccess *Definition, MemoryUseOrDef *InsertPt) { 14240b57cec5SDimitry Andric assert(I->getParent() == InsertPt->getBlock() && 14250b57cec5SDimitry Andric "New and old access must be in the same block"); 14260b57cec5SDimitry Andric MemoryUseOrDef *NewAccess = MSSA->createDefinedAccess(I, Definition); 14270b57cec5SDimitry Andric MSSA->insertIntoListsBefore(NewAccess, InsertPt->getBlock(), 14280b57cec5SDimitry Andric InsertPt->getIterator()); 14290b57cec5SDimitry Andric return NewAccess; 14300b57cec5SDimitry Andric } 14310b57cec5SDimitry Andric 14320b57cec5SDimitry Andric MemoryUseOrDef *MemorySSAUpdater::createMemoryAccessAfter( 14330b57cec5SDimitry Andric Instruction *I, MemoryAccess *Definition, MemoryAccess *InsertPt) { 14340b57cec5SDimitry Andric assert(I->getParent() == InsertPt->getBlock() && 14350b57cec5SDimitry Andric "New and old access must be in the same block"); 14360b57cec5SDimitry Andric MemoryUseOrDef *NewAccess = MSSA->createDefinedAccess(I, Definition); 14370b57cec5SDimitry Andric MSSA->insertIntoListsBefore(NewAccess, InsertPt->getBlock(), 14380b57cec5SDimitry Andric ++InsertPt->getIterator()); 14390b57cec5SDimitry Andric return NewAccess; 14400b57cec5SDimitry Andric } 1441