10b57cec5SDimitry Andric //===-- MemorySSAUpdater.cpp - Memory SSA Updater--------------------===// 20b57cec5SDimitry Andric // 30b57cec5SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 40b57cec5SDimitry Andric // See https://llvm.org/LICENSE.txt for license information. 50b57cec5SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 60b57cec5SDimitry Andric // 70b57cec5SDimitry Andric //===----------------------------------------------------------------===// 80b57cec5SDimitry Andric // 90b57cec5SDimitry Andric // This file implements the MemorySSAUpdater class. 100b57cec5SDimitry Andric // 110b57cec5SDimitry Andric //===----------------------------------------------------------------===// 120b57cec5SDimitry Andric #include "llvm/Analysis/MemorySSAUpdater.h" 130b57cec5SDimitry Andric #include "llvm/ADT/STLExtras.h" 140b57cec5SDimitry Andric #include "llvm/ADT/SetVector.h" 150b57cec5SDimitry Andric #include "llvm/ADT/SmallPtrSet.h" 160b57cec5SDimitry Andric #include "llvm/Analysis/IteratedDominanceFrontier.h" 1781ad6265SDimitry Andric #include "llvm/Analysis/LoopIterator.h" 180b57cec5SDimitry Andric #include "llvm/Analysis/MemorySSA.h" 195ffd83dbSDimitry Andric #include "llvm/IR/BasicBlock.h" 200b57cec5SDimitry Andric #include "llvm/IR/Dominators.h" 210b57cec5SDimitry Andric #include "llvm/Support/Debug.h" 220b57cec5SDimitry Andric #include <algorithm> 230b57cec5SDimitry Andric 240b57cec5SDimitry Andric #define DEBUG_TYPE "memoryssa" 250b57cec5SDimitry Andric using namespace llvm; 260b57cec5SDimitry Andric 270b57cec5SDimitry Andric // This is the marker algorithm from "Simple and Efficient Construction of 280b57cec5SDimitry Andric // Static Single Assignment Form" 290b57cec5SDimitry Andric // The simple, non-marker algorithm places phi nodes at any join 300b57cec5SDimitry Andric // Here, we place markers, and only place phi nodes if they end up necessary. 310b57cec5SDimitry Andric // They are only necessary if they break a cycle (IE we recursively visit 320b57cec5SDimitry Andric // ourselves again), or we discover, while getting the value of the operands, 330b57cec5SDimitry Andric // that there are two or more definitions needing to be merged. 340b57cec5SDimitry Andric // This still will leave non-minimal form in the case of irreducible control 350b57cec5SDimitry Andric // flow, where phi nodes may be in cycles with themselves, but unnecessary. 360b57cec5SDimitry Andric MemoryAccess *MemorySSAUpdater::getPreviousDefRecursive( 370b57cec5SDimitry Andric BasicBlock *BB, 380b57cec5SDimitry Andric DenseMap<BasicBlock *, TrackingVH<MemoryAccess>> &CachedPreviousDef) { 390b57cec5SDimitry Andric // First, do a cache lookup. Without this cache, certain CFG structures 400b57cec5SDimitry Andric // (like a series of if statements) take exponential time to visit. 410b57cec5SDimitry Andric auto Cached = CachedPreviousDef.find(BB); 428bcb0991SDimitry Andric if (Cached != CachedPreviousDef.end()) 430b57cec5SDimitry Andric return Cached->second; 440b57cec5SDimitry Andric 458bcb0991SDimitry Andric // If this method is called from an unreachable block, return LoE. 468bcb0991SDimitry Andric if (!MSSA->DT->isReachableFromEntry(BB)) 478bcb0991SDimitry Andric return MSSA->getLiveOnEntryDef(); 488bcb0991SDimitry Andric 498bcb0991SDimitry Andric if (BasicBlock *Pred = BB->getUniquePredecessor()) { 508bcb0991SDimitry Andric VisitedBlocks.insert(BB); 510b57cec5SDimitry Andric // Single predecessor case, just recurse, we can only have one definition. 520b57cec5SDimitry Andric MemoryAccess *Result = getPreviousDefFromEnd(Pred, CachedPreviousDef); 530b57cec5SDimitry Andric CachedPreviousDef.insert({BB, Result}); 540b57cec5SDimitry Andric return Result; 550b57cec5SDimitry Andric } 560b57cec5SDimitry Andric 570b57cec5SDimitry Andric if (VisitedBlocks.count(BB)) { 580b57cec5SDimitry Andric // We hit our node again, meaning we had a cycle, we must insert a phi 590b57cec5SDimitry Andric // node to break it so we have an operand. The only case this will 600b57cec5SDimitry Andric // insert useless phis is if we have irreducible control flow. 610b57cec5SDimitry Andric MemoryAccess *Result = MSSA->createMemoryPhi(BB); 620b57cec5SDimitry Andric CachedPreviousDef.insert({BB, Result}); 630b57cec5SDimitry Andric return Result; 640b57cec5SDimitry Andric } 650b57cec5SDimitry Andric 660b57cec5SDimitry Andric if (VisitedBlocks.insert(BB).second) { 670b57cec5SDimitry Andric // Mark us visited so we can detect a cycle 680b57cec5SDimitry Andric SmallVector<TrackingVH<MemoryAccess>, 8> PhiOps; 690b57cec5SDimitry Andric 700b57cec5SDimitry Andric // Recurse to get the values in our predecessors for placement of a 710b57cec5SDimitry Andric // potential phi node. This will insert phi nodes if we cycle in order to 720b57cec5SDimitry Andric // break the cycle and have an operand. 738bcb0991SDimitry Andric bool UniqueIncomingAccess = true; 748bcb0991SDimitry Andric MemoryAccess *SingleAccess = nullptr; 758bcb0991SDimitry Andric for (auto *Pred : predecessors(BB)) { 768bcb0991SDimitry Andric if (MSSA->DT->isReachableFromEntry(Pred)) { 778bcb0991SDimitry Andric auto *IncomingAccess = getPreviousDefFromEnd(Pred, CachedPreviousDef); 788bcb0991SDimitry Andric if (!SingleAccess) 798bcb0991SDimitry Andric SingleAccess = IncomingAccess; 808bcb0991SDimitry Andric else if (IncomingAccess != SingleAccess) 818bcb0991SDimitry Andric UniqueIncomingAccess = false; 828bcb0991SDimitry Andric PhiOps.push_back(IncomingAccess); 838bcb0991SDimitry Andric } else 840b57cec5SDimitry Andric PhiOps.push_back(MSSA->getLiveOnEntryDef()); 858bcb0991SDimitry Andric } 860b57cec5SDimitry Andric 870b57cec5SDimitry Andric // Now try to simplify the ops to avoid placing a phi. 880b57cec5SDimitry Andric // This may return null if we never created a phi yet, that's okay 890b57cec5SDimitry Andric MemoryPhi *Phi = dyn_cast_or_null<MemoryPhi>(MSSA->getMemoryAccess(BB)); 900b57cec5SDimitry Andric 910b57cec5SDimitry Andric // See if we can avoid the phi by simplifying it. 920b57cec5SDimitry Andric auto *Result = tryRemoveTrivialPhi(Phi, PhiOps); 930b57cec5SDimitry Andric // If we couldn't simplify, we may have to create a phi 948bcb0991SDimitry Andric if (Result == Phi && UniqueIncomingAccess && SingleAccess) { 958bcb0991SDimitry Andric // A concrete Phi only exists if we created an empty one to break a cycle. 968bcb0991SDimitry Andric if (Phi) { 978bcb0991SDimitry Andric assert(Phi->operands().empty() && "Expected empty Phi"); 988bcb0991SDimitry Andric Phi->replaceAllUsesWith(SingleAccess); 998bcb0991SDimitry Andric removeMemoryAccess(Phi); 1008bcb0991SDimitry Andric } 1018bcb0991SDimitry Andric Result = SingleAccess; 1028bcb0991SDimitry Andric } else if (Result == Phi && !(UniqueIncomingAccess && SingleAccess)) { 1030b57cec5SDimitry Andric if (!Phi) 1040b57cec5SDimitry Andric Phi = MSSA->createMemoryPhi(BB); 1050b57cec5SDimitry Andric 1060b57cec5SDimitry Andric // See if the existing phi operands match what we need. 1070b57cec5SDimitry Andric // Unlike normal SSA, we only allow one phi node per block, so we can't just 1080b57cec5SDimitry Andric // create a new one. 1090b57cec5SDimitry Andric if (Phi->getNumOperands() != 0) { 1100b57cec5SDimitry Andric // FIXME: Figure out whether this is dead code and if so remove it. 1110b57cec5SDimitry Andric if (!std::equal(Phi->op_begin(), Phi->op_end(), PhiOps.begin())) { 1120b57cec5SDimitry Andric // These will have been filled in by the recursive read we did above. 1130b57cec5SDimitry Andric llvm::copy(PhiOps, Phi->op_begin()); 1140b57cec5SDimitry Andric std::copy(pred_begin(BB), pred_end(BB), Phi->block_begin()); 1150b57cec5SDimitry Andric } 1160b57cec5SDimitry Andric } else { 1170b57cec5SDimitry Andric unsigned i = 0; 1180b57cec5SDimitry Andric for (auto *Pred : predecessors(BB)) 1190b57cec5SDimitry Andric Phi->addIncoming(&*PhiOps[i++], Pred); 1200b57cec5SDimitry Andric InsertedPHIs.push_back(Phi); 1210b57cec5SDimitry Andric } 1220b57cec5SDimitry Andric Result = Phi; 1230b57cec5SDimitry Andric } 1240b57cec5SDimitry Andric 1250b57cec5SDimitry Andric // Set ourselves up for the next variable by resetting visited state. 1260b57cec5SDimitry Andric VisitedBlocks.erase(BB); 1270b57cec5SDimitry Andric CachedPreviousDef.insert({BB, Result}); 1280b57cec5SDimitry Andric return Result; 1290b57cec5SDimitry Andric } 1300b57cec5SDimitry Andric llvm_unreachable("Should have hit one of the three cases above"); 1310b57cec5SDimitry Andric } 1320b57cec5SDimitry Andric 1330b57cec5SDimitry Andric // This starts at the memory access, and goes backwards in the block to find the 1340b57cec5SDimitry Andric // previous definition. If a definition is not found the block of the access, 1350b57cec5SDimitry Andric // it continues globally, creating phi nodes to ensure we have a single 1360b57cec5SDimitry Andric // definition. 1370b57cec5SDimitry Andric MemoryAccess *MemorySSAUpdater::getPreviousDef(MemoryAccess *MA) { 1380b57cec5SDimitry Andric if (auto *LocalResult = getPreviousDefInBlock(MA)) 1390b57cec5SDimitry Andric return LocalResult; 1400b57cec5SDimitry Andric DenseMap<BasicBlock *, TrackingVH<MemoryAccess>> CachedPreviousDef; 1410b57cec5SDimitry Andric return getPreviousDefRecursive(MA->getBlock(), CachedPreviousDef); 1420b57cec5SDimitry Andric } 1430b57cec5SDimitry Andric 1440b57cec5SDimitry Andric // This starts at the memory access, and goes backwards in the block to the find 1450b57cec5SDimitry Andric // the previous definition. If the definition is not found in the block of the 1460b57cec5SDimitry Andric // access, it returns nullptr. 1470b57cec5SDimitry Andric MemoryAccess *MemorySSAUpdater::getPreviousDefInBlock(MemoryAccess *MA) { 1480b57cec5SDimitry Andric auto *Defs = MSSA->getWritableBlockDefs(MA->getBlock()); 1490b57cec5SDimitry Andric 1500b57cec5SDimitry Andric // It's possible there are no defs, or we got handed the first def to start. 1510b57cec5SDimitry Andric if (Defs) { 1520b57cec5SDimitry Andric // If this is a def, we can just use the def iterators. 1530b57cec5SDimitry Andric if (!isa<MemoryUse>(MA)) { 1540b57cec5SDimitry Andric auto Iter = MA->getReverseDefsIterator(); 1550b57cec5SDimitry Andric ++Iter; 1560b57cec5SDimitry Andric if (Iter != Defs->rend()) 1570b57cec5SDimitry Andric return &*Iter; 1580b57cec5SDimitry Andric } else { 1590b57cec5SDimitry Andric // Otherwise, have to walk the all access iterator. 1600b57cec5SDimitry Andric auto End = MSSA->getWritableBlockAccesses(MA->getBlock())->rend(); 1610b57cec5SDimitry Andric for (auto &U : make_range(++MA->getReverseIterator(), End)) 1620b57cec5SDimitry Andric if (!isa<MemoryUse>(U)) 1630b57cec5SDimitry Andric return cast<MemoryAccess>(&U); 1640b57cec5SDimitry Andric // Note that if MA comes before Defs->begin(), we won't hit a def. 1650b57cec5SDimitry Andric return nullptr; 1660b57cec5SDimitry Andric } 1670b57cec5SDimitry Andric } 1680b57cec5SDimitry Andric return nullptr; 1690b57cec5SDimitry Andric } 1700b57cec5SDimitry Andric 1710b57cec5SDimitry Andric // This starts at the end of block 1720b57cec5SDimitry Andric MemoryAccess *MemorySSAUpdater::getPreviousDefFromEnd( 1730b57cec5SDimitry Andric BasicBlock *BB, 1740b57cec5SDimitry Andric DenseMap<BasicBlock *, TrackingVH<MemoryAccess>> &CachedPreviousDef) { 1750b57cec5SDimitry Andric auto *Defs = MSSA->getWritableBlockDefs(BB); 1760b57cec5SDimitry Andric 1770b57cec5SDimitry Andric if (Defs) { 1780b57cec5SDimitry Andric CachedPreviousDef.insert({BB, &*Defs->rbegin()}); 1790b57cec5SDimitry Andric return &*Defs->rbegin(); 1800b57cec5SDimitry Andric } 1810b57cec5SDimitry Andric 1820b57cec5SDimitry Andric return getPreviousDefRecursive(BB, CachedPreviousDef); 1830b57cec5SDimitry Andric } 1840b57cec5SDimitry Andric // Recurse over a set of phi uses to eliminate the trivial ones 1850b57cec5SDimitry Andric MemoryAccess *MemorySSAUpdater::recursePhi(MemoryAccess *Phi) { 1860b57cec5SDimitry Andric if (!Phi) 1870b57cec5SDimitry Andric return nullptr; 1880b57cec5SDimitry Andric TrackingVH<MemoryAccess> Res(Phi); 1890b57cec5SDimitry Andric SmallVector<TrackingVH<Value>, 8> Uses; 1900b57cec5SDimitry Andric std::copy(Phi->user_begin(), Phi->user_end(), std::back_inserter(Uses)); 1918bcb0991SDimitry Andric for (auto &U : Uses) 1928bcb0991SDimitry Andric if (MemoryPhi *UsePhi = dyn_cast<MemoryPhi>(&*U)) 1938bcb0991SDimitry Andric tryRemoveTrivialPhi(UsePhi); 1940b57cec5SDimitry Andric return Res; 1950b57cec5SDimitry Andric } 1960b57cec5SDimitry Andric 1970b57cec5SDimitry Andric // Eliminate trivial phis 1980b57cec5SDimitry Andric // Phis are trivial if they are defined either by themselves, or all the same 1990b57cec5SDimitry Andric // argument. 2000b57cec5SDimitry Andric // IE phi(a, a) or b = phi(a, b) or c = phi(a, a, c) 2010b57cec5SDimitry Andric // We recursively try to remove them. 2028bcb0991SDimitry Andric MemoryAccess *MemorySSAUpdater::tryRemoveTrivialPhi(MemoryPhi *Phi) { 2038bcb0991SDimitry Andric assert(Phi && "Can only remove concrete Phi."); 2048bcb0991SDimitry Andric auto OperRange = Phi->operands(); 2058bcb0991SDimitry Andric return tryRemoveTrivialPhi(Phi, OperRange); 2068bcb0991SDimitry Andric } 2070b57cec5SDimitry Andric template <class RangeType> 2080b57cec5SDimitry Andric MemoryAccess *MemorySSAUpdater::tryRemoveTrivialPhi(MemoryPhi *Phi, 2090b57cec5SDimitry Andric RangeType &Operands) { 2100b57cec5SDimitry Andric // Bail out on non-opt Phis. 2110b57cec5SDimitry Andric if (NonOptPhis.count(Phi)) 2120b57cec5SDimitry Andric return Phi; 2130b57cec5SDimitry Andric 2140b57cec5SDimitry Andric // Detect equal or self arguments 2150b57cec5SDimitry Andric MemoryAccess *Same = nullptr; 2160b57cec5SDimitry Andric for (auto &Op : Operands) { 2170b57cec5SDimitry Andric // If the same or self, good so far 2180b57cec5SDimitry Andric if (Op == Phi || Op == Same) 2190b57cec5SDimitry Andric continue; 2200b57cec5SDimitry Andric // not the same, return the phi since it's not eliminatable by us 2210b57cec5SDimitry Andric if (Same) 2220b57cec5SDimitry Andric return Phi; 2230b57cec5SDimitry Andric Same = cast<MemoryAccess>(&*Op); 2240b57cec5SDimitry Andric } 2250b57cec5SDimitry Andric // Never found a non-self reference, the phi is undef 2260b57cec5SDimitry Andric if (Same == nullptr) 2270b57cec5SDimitry Andric return MSSA->getLiveOnEntryDef(); 2280b57cec5SDimitry Andric if (Phi) { 2290b57cec5SDimitry Andric Phi->replaceAllUsesWith(Same); 2300b57cec5SDimitry Andric removeMemoryAccess(Phi); 2310b57cec5SDimitry Andric } 2320b57cec5SDimitry Andric 2330b57cec5SDimitry Andric // We should only end up recursing in case we replaced something, in which 2340b57cec5SDimitry Andric // case, we may have made other Phis trivial. 2350b57cec5SDimitry Andric return recursePhi(Same); 2360b57cec5SDimitry Andric } 2370b57cec5SDimitry Andric 2388bcb0991SDimitry Andric void MemorySSAUpdater::insertUse(MemoryUse *MU, bool RenameUses) { 23981ad6265SDimitry Andric VisitedBlocks.clear(); 2400b57cec5SDimitry Andric InsertedPHIs.clear(); 2410b57cec5SDimitry Andric MU->setDefiningAccess(getPreviousDef(MU)); 2428bcb0991SDimitry Andric 2438bcb0991SDimitry Andric // In cases without unreachable blocks, because uses do not create new 2448bcb0991SDimitry Andric // may-defs, there are only two cases: 2450b57cec5SDimitry Andric // 1. There was a def already below us, and therefore, we should not have 2460b57cec5SDimitry Andric // created a phi node because it was already needed for the def. 2470b57cec5SDimitry Andric // 2480b57cec5SDimitry Andric // 2. There is no def below us, and therefore, there is no extra renaming work 2490b57cec5SDimitry Andric // to do. 2508bcb0991SDimitry Andric 2518bcb0991SDimitry Andric // In cases with unreachable blocks, where the unnecessary Phis were 2528bcb0991SDimitry Andric // optimized out, adding the Use may re-insert those Phis. Hence, when 2538bcb0991SDimitry Andric // inserting Uses outside of the MSSA creation process, and new Phis were 2548bcb0991SDimitry Andric // added, rename all uses if we are asked. 2558bcb0991SDimitry Andric 2568bcb0991SDimitry Andric if (!RenameUses && !InsertedPHIs.empty()) { 2578bcb0991SDimitry Andric auto *Defs = MSSA->getBlockDefs(MU->getBlock()); 2588bcb0991SDimitry Andric (void)Defs; 2598bcb0991SDimitry Andric assert((!Defs || (++Defs->begin() == Defs->end())) && 2608bcb0991SDimitry Andric "Block may have only a Phi or no defs"); 2618bcb0991SDimitry Andric } 2628bcb0991SDimitry Andric 2638bcb0991SDimitry Andric if (RenameUses && InsertedPHIs.size()) { 2648bcb0991SDimitry Andric SmallPtrSet<BasicBlock *, 16> Visited; 2658bcb0991SDimitry Andric BasicBlock *StartBlock = MU->getBlock(); 2668bcb0991SDimitry Andric 2678bcb0991SDimitry Andric if (auto *Defs = MSSA->getWritableBlockDefs(StartBlock)) { 2688bcb0991SDimitry Andric MemoryAccess *FirstDef = &*Defs->begin(); 2698bcb0991SDimitry Andric // Convert to incoming value if it's a memorydef. A phi *is* already an 2708bcb0991SDimitry Andric // incoming value. 2718bcb0991SDimitry Andric if (auto *MD = dyn_cast<MemoryDef>(FirstDef)) 2728bcb0991SDimitry Andric FirstDef = MD->getDefiningAccess(); 2738bcb0991SDimitry Andric 2748bcb0991SDimitry Andric MSSA->renamePass(MU->getBlock(), FirstDef, Visited); 2758bcb0991SDimitry Andric } 2768bcb0991SDimitry Andric // We just inserted a phi into this block, so the incoming value will 2778bcb0991SDimitry Andric // become the phi anyway, so it does not matter what we pass. 2788bcb0991SDimitry Andric for (auto &MP : InsertedPHIs) 2798bcb0991SDimitry Andric if (MemoryPhi *Phi = cast_or_null<MemoryPhi>(MP)) 2808bcb0991SDimitry Andric MSSA->renamePass(Phi->getBlock(), nullptr, Visited); 2818bcb0991SDimitry Andric } 2820b57cec5SDimitry Andric } 2830b57cec5SDimitry Andric 2840b57cec5SDimitry Andric // Set every incoming edge {BB, MP->getBlock()} of MemoryPhi MP to NewDef. 2850b57cec5SDimitry Andric static void setMemoryPhiValueForBlock(MemoryPhi *MP, const BasicBlock *BB, 2860b57cec5SDimitry Andric MemoryAccess *NewDef) { 2870b57cec5SDimitry Andric // Replace any operand with us an incoming block with the new defining 2880b57cec5SDimitry Andric // access. 2890b57cec5SDimitry Andric int i = MP->getBasicBlockIndex(BB); 2900b57cec5SDimitry Andric assert(i != -1 && "Should have found the basic block in the phi"); 2910b57cec5SDimitry Andric // We can't just compare i against getNumOperands since one is signed and the 2920b57cec5SDimitry Andric // other not. So use it to index into the block iterator. 293349cc55cSDimitry Andric for (const BasicBlock *BlockBB : llvm::drop_begin(MP->blocks(), i)) { 294349cc55cSDimitry Andric if (BlockBB != BB) 2950b57cec5SDimitry Andric break; 2960b57cec5SDimitry Andric MP->setIncomingValue(i, NewDef); 2970b57cec5SDimitry Andric ++i; 2980b57cec5SDimitry Andric } 2990b57cec5SDimitry Andric } 3000b57cec5SDimitry Andric 3010b57cec5SDimitry Andric // A brief description of the algorithm: 3020b57cec5SDimitry Andric // First, we compute what should define the new def, using the SSA 3030b57cec5SDimitry Andric // construction algorithm. 3040b57cec5SDimitry Andric // Then, we update the defs below us (and any new phi nodes) in the graph to 3050b57cec5SDimitry Andric // point to the correct new defs, to ensure we only have one variable, and no 3060b57cec5SDimitry Andric // disconnected stores. 3070b57cec5SDimitry Andric void MemorySSAUpdater::insertDef(MemoryDef *MD, bool RenameUses) { 30881ad6265SDimitry Andric // Don't bother updating dead code. 30981ad6265SDimitry Andric if (!MSSA->DT->isReachableFromEntry(MD->getBlock())) { 31081ad6265SDimitry Andric MD->setDefiningAccess(MSSA->getLiveOnEntryDef()); 31181ad6265SDimitry Andric return; 31281ad6265SDimitry Andric } 31381ad6265SDimitry Andric 31481ad6265SDimitry Andric VisitedBlocks.clear(); 3150b57cec5SDimitry Andric InsertedPHIs.clear(); 3160b57cec5SDimitry Andric 3170b57cec5SDimitry Andric // See if we had a local def, and if not, go hunting. 3180b57cec5SDimitry Andric MemoryAccess *DefBefore = getPreviousDef(MD); 3198bcb0991SDimitry Andric bool DefBeforeSameBlock = false; 3208bcb0991SDimitry Andric if (DefBefore->getBlock() == MD->getBlock() && 3218bcb0991SDimitry Andric !(isa<MemoryPhi>(DefBefore) && 322e8d8bef9SDimitry Andric llvm::is_contained(InsertedPHIs, DefBefore))) 3238bcb0991SDimitry Andric DefBeforeSameBlock = true; 3240b57cec5SDimitry Andric 3250b57cec5SDimitry Andric // There is a def before us, which means we can replace any store/phi uses 3260b57cec5SDimitry Andric // of that thing with us, since we are in the way of whatever was there 3270b57cec5SDimitry Andric // before. 3280b57cec5SDimitry Andric // We now define that def's memorydefs and memoryphis 3290b57cec5SDimitry Andric if (DefBeforeSameBlock) { 3308bcb0991SDimitry Andric DefBefore->replaceUsesWithIf(MD, [MD](Use &U) { 3310b57cec5SDimitry Andric // Leave the MemoryUses alone. 3320b57cec5SDimitry Andric // Also make sure we skip ourselves to avoid self references. 3338bcb0991SDimitry Andric User *Usr = U.getUser(); 3348bcb0991SDimitry Andric return !isa<MemoryUse>(Usr) && Usr != MD; 3350b57cec5SDimitry Andric // Defs are automatically unoptimized when the user is set to MD below, 3360b57cec5SDimitry Andric // because the isOptimized() call will fail to find the same ID. 3378bcb0991SDimitry Andric }); 3380b57cec5SDimitry Andric } 3390b57cec5SDimitry Andric 3400b57cec5SDimitry Andric // and that def is now our defining access. 3410b57cec5SDimitry Andric MD->setDefiningAccess(DefBefore); 3420b57cec5SDimitry Andric 3430b57cec5SDimitry Andric SmallVector<WeakVH, 8> FixupList(InsertedPHIs.begin(), InsertedPHIs.end()); 3448bcb0991SDimitry Andric 345e8d8bef9SDimitry Andric SmallSet<WeakVH, 8> ExistingPhis; 346e8d8bef9SDimitry Andric 3478bcb0991SDimitry Andric // Remember the index where we may insert new phis. 3488bcb0991SDimitry Andric unsigned NewPhiIndex = InsertedPHIs.size(); 3490b57cec5SDimitry Andric if (!DefBeforeSameBlock) { 3500b57cec5SDimitry Andric // If there was a local def before us, we must have the same effect it 3510b57cec5SDimitry Andric // did. Because every may-def is the same, any phis/etc we would create, it 3520b57cec5SDimitry Andric // would also have created. If there was no local def before us, we 3530b57cec5SDimitry Andric // performed a global update, and have to search all successors and make 3540b57cec5SDimitry Andric // sure we update the first def in each of them (following all paths until 3550b57cec5SDimitry Andric // we hit the first def along each path). This may also insert phi nodes. 3560b57cec5SDimitry Andric // TODO: There are other cases we can skip this work, such as when we have a 3570b57cec5SDimitry Andric // single successor, and only used a straight line of single pred blocks 3580b57cec5SDimitry Andric // backwards to find the def. To make that work, we'd have to track whether 3590b57cec5SDimitry Andric // getDefRecursive only ever used the single predecessor case. These types 3600b57cec5SDimitry Andric // of paths also only exist in between CFG simplifications. 3610b57cec5SDimitry Andric 3620b57cec5SDimitry Andric // If this is the first def in the block and this insert is in an arbitrary 3630b57cec5SDimitry Andric // place, compute IDF and place phis. 3648bcb0991SDimitry Andric SmallPtrSet<BasicBlock *, 2> DefiningBlocks; 3658bcb0991SDimitry Andric 366fe6060f1SDimitry Andric // If this is the last Def in the block, we may need additional Phis. 367fe6060f1SDimitry Andric // Compute IDF in all cases, as renaming needs to be done even when MD is 368fe6060f1SDimitry Andric // not the last access, because it can introduce a new access past which a 369fe6060f1SDimitry Andric // previous access was optimized; that access needs to be reoptimized. 3708bcb0991SDimitry Andric DefiningBlocks.insert(MD->getBlock()); 3718bcb0991SDimitry Andric for (const auto &VH : InsertedPHIs) 3728bcb0991SDimitry Andric if (const auto *RealPHI = cast_or_null<MemoryPhi>(VH)) 3738bcb0991SDimitry Andric DefiningBlocks.insert(RealPHI->getBlock()); 3740b57cec5SDimitry Andric ForwardIDFCalculator IDFs(*MSSA->DT); 3750b57cec5SDimitry Andric SmallVector<BasicBlock *, 32> IDFBlocks; 3760b57cec5SDimitry Andric IDFs.setDefiningBlocks(DefiningBlocks); 3770b57cec5SDimitry Andric IDFs.calculate(IDFBlocks); 3780b57cec5SDimitry Andric SmallVector<AssertingVH<MemoryPhi>, 4> NewInsertedPHIs; 3798bcb0991SDimitry Andric for (auto *BBIDF : IDFBlocks) { 3808bcb0991SDimitry Andric auto *MPhi = MSSA->getMemoryAccess(BBIDF); 3818bcb0991SDimitry Andric if (!MPhi) { 3828bcb0991SDimitry Andric MPhi = MSSA->createMemoryPhi(BBIDF); 3830b57cec5SDimitry Andric NewInsertedPHIs.push_back(MPhi); 384e8d8bef9SDimitry Andric } else { 385e8d8bef9SDimitry Andric ExistingPhis.insert(MPhi); 3868bcb0991SDimitry Andric } 3878bcb0991SDimitry Andric // Add the phis created into the IDF blocks to NonOptPhis, so they are not 3888bcb0991SDimitry Andric // optimized out as trivial by the call to getPreviousDefFromEnd below. 3898bcb0991SDimitry Andric // Once they are complete, all these Phis are added to the FixupList, and 3908bcb0991SDimitry Andric // removed from NonOptPhis inside fixupDefs(). Existing Phis in IDF may 3918bcb0991SDimitry Andric // need fixing as well, and potentially be trivial before this insertion, 3928bcb0991SDimitry Andric // hence add all IDF Phis. See PR43044. 3930b57cec5SDimitry Andric NonOptPhis.insert(MPhi); 3940b57cec5SDimitry Andric } 3950b57cec5SDimitry Andric for (auto &MPhi : NewInsertedPHIs) { 3960b57cec5SDimitry Andric auto *BBIDF = MPhi->getBlock(); 3970b57cec5SDimitry Andric for (auto *Pred : predecessors(BBIDF)) { 3980b57cec5SDimitry Andric DenseMap<BasicBlock *, TrackingVH<MemoryAccess>> CachedPreviousDef; 3998bcb0991SDimitry Andric MPhi->addIncoming(getPreviousDefFromEnd(Pred, CachedPreviousDef), Pred); 4000b57cec5SDimitry Andric } 4010b57cec5SDimitry Andric } 4020b57cec5SDimitry Andric 4038bcb0991SDimitry Andric // Re-take the index where we're adding the new phis, because the above call 4048bcb0991SDimitry Andric // to getPreviousDefFromEnd, may have inserted into InsertedPHIs. 4050b57cec5SDimitry Andric NewPhiIndex = InsertedPHIs.size(); 4060b57cec5SDimitry Andric for (auto &MPhi : NewInsertedPHIs) { 4070b57cec5SDimitry Andric InsertedPHIs.push_back(&*MPhi); 4080b57cec5SDimitry Andric FixupList.push_back(&*MPhi); 4090b57cec5SDimitry Andric } 4100b57cec5SDimitry Andric 4110b57cec5SDimitry Andric FixupList.push_back(MD); 4120b57cec5SDimitry Andric } 4130b57cec5SDimitry Andric 4140b57cec5SDimitry Andric // Remember the index where we stopped inserting new phis above, since the 4150b57cec5SDimitry Andric // fixupDefs call in the loop below may insert more, that are already minimal. 4160b57cec5SDimitry Andric unsigned NewPhiIndexEnd = InsertedPHIs.size(); 4170b57cec5SDimitry Andric 4180b57cec5SDimitry Andric while (!FixupList.empty()) { 4190b57cec5SDimitry Andric unsigned StartingPHISize = InsertedPHIs.size(); 4200b57cec5SDimitry Andric fixupDefs(FixupList); 4210b57cec5SDimitry Andric FixupList.clear(); 4220b57cec5SDimitry Andric // Put any new phis on the fixup list, and process them 4230b57cec5SDimitry Andric FixupList.append(InsertedPHIs.begin() + StartingPHISize, InsertedPHIs.end()); 4240b57cec5SDimitry Andric } 4250b57cec5SDimitry Andric 4260b57cec5SDimitry Andric // Optimize potentially non-minimal phis added in this method. 4270b57cec5SDimitry Andric unsigned NewPhiSize = NewPhiIndexEnd - NewPhiIndex; 4280b57cec5SDimitry Andric if (NewPhiSize) 4290b57cec5SDimitry Andric tryRemoveTrivialPhis(ArrayRef<WeakVH>(&InsertedPHIs[NewPhiIndex], NewPhiSize)); 4300b57cec5SDimitry Andric 43181ad6265SDimitry Andric // Now that all fixups are done, rename all uses if we are asked. The defs are 43281ad6265SDimitry Andric // guaranteed to be in reachable code due to the check at the method entry. 4330b57cec5SDimitry Andric BasicBlock *StartBlock = MD->getBlock(); 43481ad6265SDimitry Andric if (RenameUses) { 435e8d8bef9SDimitry Andric SmallPtrSet<BasicBlock *, 16> Visited; 4360b57cec5SDimitry Andric // We are guaranteed there is a def in the block, because we just got it 4370b57cec5SDimitry Andric // handed to us in this function. 4380b57cec5SDimitry Andric MemoryAccess *FirstDef = &*MSSA->getWritableBlockDefs(StartBlock)->begin(); 4390b57cec5SDimitry Andric // Convert to incoming value if it's a memorydef. A phi *is* already an 4400b57cec5SDimitry Andric // incoming value. 4410b57cec5SDimitry Andric if (auto *MD = dyn_cast<MemoryDef>(FirstDef)) 4420b57cec5SDimitry Andric FirstDef = MD->getDefiningAccess(); 4430b57cec5SDimitry Andric 4440b57cec5SDimitry Andric MSSA->renamePass(MD->getBlock(), FirstDef, Visited); 4450b57cec5SDimitry Andric // We just inserted a phi into this block, so the incoming value will become 4460b57cec5SDimitry Andric // the phi anyway, so it does not matter what we pass. 4470b57cec5SDimitry Andric for (auto &MP : InsertedPHIs) { 4480b57cec5SDimitry Andric MemoryPhi *Phi = dyn_cast_or_null<MemoryPhi>(MP); 4490b57cec5SDimitry Andric if (Phi) 4500b57cec5SDimitry Andric MSSA->renamePass(Phi->getBlock(), nullptr, Visited); 4510b57cec5SDimitry Andric } 452e8d8bef9SDimitry Andric // Existing Phi blocks may need renaming too, if an access was previously 453e8d8bef9SDimitry Andric // optimized and the inserted Defs "covers" the Optimized value. 454fcaf7f86SDimitry Andric for (const auto &MP : ExistingPhis) { 455e8d8bef9SDimitry Andric MemoryPhi *Phi = dyn_cast_or_null<MemoryPhi>(MP); 456e8d8bef9SDimitry Andric if (Phi) 457e8d8bef9SDimitry Andric MSSA->renamePass(Phi->getBlock(), nullptr, Visited); 458e8d8bef9SDimitry Andric } 4590b57cec5SDimitry Andric } 4600b57cec5SDimitry Andric } 4610b57cec5SDimitry Andric 4620b57cec5SDimitry Andric void MemorySSAUpdater::fixupDefs(const SmallVectorImpl<WeakVH> &Vars) { 4630b57cec5SDimitry Andric SmallPtrSet<const BasicBlock *, 8> Seen; 4640b57cec5SDimitry Andric SmallVector<const BasicBlock *, 16> Worklist; 465fcaf7f86SDimitry Andric for (const auto &Var : Vars) { 4660b57cec5SDimitry Andric MemoryAccess *NewDef = dyn_cast_or_null<MemoryAccess>(Var); 4670b57cec5SDimitry Andric if (!NewDef) 4680b57cec5SDimitry Andric continue; 4690b57cec5SDimitry Andric // First, see if there is a local def after the operand. 4700b57cec5SDimitry Andric auto *Defs = MSSA->getWritableBlockDefs(NewDef->getBlock()); 4710b57cec5SDimitry Andric auto DefIter = NewDef->getDefsIterator(); 4720b57cec5SDimitry Andric 4730b57cec5SDimitry Andric // The temporary Phi is being fixed, unmark it for not to optimize. 4740b57cec5SDimitry Andric if (MemoryPhi *Phi = dyn_cast<MemoryPhi>(NewDef)) 4750b57cec5SDimitry Andric NonOptPhis.erase(Phi); 4760b57cec5SDimitry Andric 4770b57cec5SDimitry Andric // If there is a local def after us, we only have to rename that. 4780b57cec5SDimitry Andric if (++DefIter != Defs->end()) { 4790b57cec5SDimitry Andric cast<MemoryDef>(DefIter)->setDefiningAccess(NewDef); 4800b57cec5SDimitry Andric continue; 4810b57cec5SDimitry Andric } 4820b57cec5SDimitry Andric 4830b57cec5SDimitry Andric // Otherwise, we need to search down through the CFG. 4840b57cec5SDimitry Andric // For each of our successors, handle it directly if their is a phi, or 4850b57cec5SDimitry Andric // place on the fixup worklist. 4860b57cec5SDimitry Andric for (const auto *S : successors(NewDef->getBlock())) { 4870b57cec5SDimitry Andric if (auto *MP = MSSA->getMemoryAccess(S)) 4880b57cec5SDimitry Andric setMemoryPhiValueForBlock(MP, NewDef->getBlock(), NewDef); 4890b57cec5SDimitry Andric else 4900b57cec5SDimitry Andric Worklist.push_back(S); 4910b57cec5SDimitry Andric } 4920b57cec5SDimitry Andric 4930b57cec5SDimitry Andric while (!Worklist.empty()) { 494349cc55cSDimitry Andric const BasicBlock *FixupBlock = Worklist.pop_back_val(); 4950b57cec5SDimitry Andric 4960b57cec5SDimitry Andric // Get the first def in the block that isn't a phi node. 4970b57cec5SDimitry Andric if (auto *Defs = MSSA->getWritableBlockDefs(FixupBlock)) { 4980b57cec5SDimitry Andric auto *FirstDef = &*Defs->begin(); 4990b57cec5SDimitry Andric // The loop above and below should have taken care of phi nodes 5000b57cec5SDimitry Andric assert(!isa<MemoryPhi>(FirstDef) && 5010b57cec5SDimitry Andric "Should have already handled phi nodes!"); 5020b57cec5SDimitry Andric // We are now this def's defining access, make sure we actually dominate 5030b57cec5SDimitry Andric // it 5040b57cec5SDimitry Andric assert(MSSA->dominates(NewDef, FirstDef) && 5050b57cec5SDimitry Andric "Should have dominated the new access"); 5060b57cec5SDimitry Andric 5070b57cec5SDimitry Andric // This may insert new phi nodes, because we are not guaranteed the 5080b57cec5SDimitry Andric // block we are processing has a single pred, and depending where the 5090b57cec5SDimitry Andric // store was inserted, it may require phi nodes below it. 5100b57cec5SDimitry Andric cast<MemoryDef>(FirstDef)->setDefiningAccess(getPreviousDef(FirstDef)); 5110b57cec5SDimitry Andric return; 5120b57cec5SDimitry Andric } 5130b57cec5SDimitry Andric // We didn't find a def, so we must continue. 5140b57cec5SDimitry Andric for (const auto *S : successors(FixupBlock)) { 5150b57cec5SDimitry Andric // If there is a phi node, handle it. 5160b57cec5SDimitry Andric // Otherwise, put the block on the worklist 5170b57cec5SDimitry Andric if (auto *MP = MSSA->getMemoryAccess(S)) 5180b57cec5SDimitry Andric setMemoryPhiValueForBlock(MP, FixupBlock, NewDef); 5190b57cec5SDimitry Andric else { 5200b57cec5SDimitry Andric // If we cycle, we should have ended up at a phi node that we already 5210b57cec5SDimitry Andric // processed. FIXME: Double check this 5220b57cec5SDimitry Andric if (!Seen.insert(S).second) 5230b57cec5SDimitry Andric continue; 5240b57cec5SDimitry Andric Worklist.push_back(S); 5250b57cec5SDimitry Andric } 5260b57cec5SDimitry Andric } 5270b57cec5SDimitry Andric } 5280b57cec5SDimitry Andric } 5290b57cec5SDimitry Andric } 5300b57cec5SDimitry Andric 5310b57cec5SDimitry Andric void MemorySSAUpdater::removeEdge(BasicBlock *From, BasicBlock *To) { 5320b57cec5SDimitry Andric if (MemoryPhi *MPhi = MSSA->getMemoryAccess(To)) { 5330b57cec5SDimitry Andric MPhi->unorderedDeleteIncomingBlock(From); 5348bcb0991SDimitry Andric tryRemoveTrivialPhi(MPhi); 5350b57cec5SDimitry Andric } 5360b57cec5SDimitry Andric } 5370b57cec5SDimitry Andric 5380b57cec5SDimitry Andric void MemorySSAUpdater::removeDuplicatePhiEdgesBetween(const BasicBlock *From, 5390b57cec5SDimitry Andric const BasicBlock *To) { 5400b57cec5SDimitry Andric if (MemoryPhi *MPhi = MSSA->getMemoryAccess(To)) { 5410b57cec5SDimitry Andric bool Found = false; 5420b57cec5SDimitry Andric MPhi->unorderedDeleteIncomingIf([&](const MemoryAccess *, BasicBlock *B) { 5430b57cec5SDimitry Andric if (From != B) 5440b57cec5SDimitry Andric return false; 5450b57cec5SDimitry Andric if (Found) 5460b57cec5SDimitry Andric return true; 5470b57cec5SDimitry Andric Found = true; 5480b57cec5SDimitry Andric return false; 5490b57cec5SDimitry Andric }); 5508bcb0991SDimitry Andric tryRemoveTrivialPhi(MPhi); 5510b57cec5SDimitry Andric } 5520b57cec5SDimitry Andric } 5530b57cec5SDimitry Andric 554e8d8bef9SDimitry Andric /// If all arguments of a MemoryPHI are defined by the same incoming 555e8d8bef9SDimitry Andric /// argument, return that argument. 556e8d8bef9SDimitry Andric static MemoryAccess *onlySingleValue(MemoryPhi *MP) { 557e8d8bef9SDimitry Andric MemoryAccess *MA = nullptr; 558e8d8bef9SDimitry Andric 559e8d8bef9SDimitry Andric for (auto &Arg : MP->operands()) { 560e8d8bef9SDimitry Andric if (!MA) 561e8d8bef9SDimitry Andric MA = cast<MemoryAccess>(Arg); 562e8d8bef9SDimitry Andric else if (MA != Arg) 563e8d8bef9SDimitry Andric return nullptr; 564e8d8bef9SDimitry Andric } 565e8d8bef9SDimitry Andric return MA; 566e8d8bef9SDimitry Andric } 567e8d8bef9SDimitry Andric 5688bcb0991SDimitry Andric static MemoryAccess *getNewDefiningAccessForClone(MemoryAccess *MA, 5690b57cec5SDimitry Andric const ValueToValueMapTy &VMap, 5700b57cec5SDimitry Andric PhiToDefMap &MPhiMap, 5718bcb0991SDimitry Andric MemorySSA *MSSA) { 5720b57cec5SDimitry Andric MemoryAccess *InsnDefining = MA; 5738bcb0991SDimitry Andric if (MemoryDef *DefMUD = dyn_cast<MemoryDef>(InsnDefining)) { 5740b57cec5SDimitry Andric if (!MSSA->isLiveOnEntryDef(DefMUD)) { 5750b57cec5SDimitry Andric Instruction *DefMUDI = DefMUD->getMemoryInst(); 5760b57cec5SDimitry Andric assert(DefMUDI && "Found MemoryUseOrDef with no Instruction."); 5770b57cec5SDimitry Andric if (Instruction *NewDefMUDI = 5788bcb0991SDimitry Andric cast_or_null<Instruction>(VMap.lookup(DefMUDI))) { 5790b57cec5SDimitry Andric InsnDefining = MSSA->getMemoryAccess(NewDefMUDI); 5801db9f3b2SDimitry Andric if (!InsnDefining || isa<MemoryUse>(InsnDefining)) { 5818bcb0991SDimitry Andric // The clone was simplified, it's no longer a MemoryDef, look up. 5828bcb0991SDimitry Andric InsnDefining = getNewDefiningAccessForClone( 5831db9f3b2SDimitry Andric DefMUD->getDefiningAccess(), VMap, MPhiMap, MSSA); 5848bcb0991SDimitry Andric } 5858bcb0991SDimitry Andric } 5860b57cec5SDimitry Andric } 5870b57cec5SDimitry Andric } else { 5880b57cec5SDimitry Andric MemoryPhi *DefPhi = cast<MemoryPhi>(InsnDefining); 5890b57cec5SDimitry Andric if (MemoryAccess *NewDefPhi = MPhiMap.lookup(DefPhi)) 5900b57cec5SDimitry Andric InsnDefining = NewDefPhi; 5910b57cec5SDimitry Andric } 5920b57cec5SDimitry Andric assert(InsnDefining && "Defining instruction cannot be nullptr."); 5930b57cec5SDimitry Andric return InsnDefining; 5948bcb0991SDimitry Andric } 5950b57cec5SDimitry Andric 5968bcb0991SDimitry Andric void MemorySSAUpdater::cloneUsesAndDefs(BasicBlock *BB, BasicBlock *NewBB, 5978bcb0991SDimitry Andric const ValueToValueMapTy &VMap, 5988bcb0991SDimitry Andric PhiToDefMap &MPhiMap, 5998bcb0991SDimitry Andric bool CloneWasSimplified) { 6000b57cec5SDimitry Andric const MemorySSA::AccessList *Acc = MSSA->getBlockAccesses(BB); 6010b57cec5SDimitry Andric if (!Acc) 6020b57cec5SDimitry Andric return; 6030b57cec5SDimitry Andric for (const MemoryAccess &MA : *Acc) { 6040b57cec5SDimitry Andric if (const MemoryUseOrDef *MUD = dyn_cast<MemoryUseOrDef>(&MA)) { 6050b57cec5SDimitry Andric Instruction *Insn = MUD->getMemoryInst(); 6060b57cec5SDimitry Andric // Entry does not exist if the clone of the block did not clone all 6070b57cec5SDimitry Andric // instructions. This occurs in LoopRotate when cloning instructions 6080b57cec5SDimitry Andric // from the old header to the old preheader. The cloned instruction may 6090b57cec5SDimitry Andric // also be a simplified Value, not an Instruction (see LoopRotate). 6100b57cec5SDimitry Andric // Also in LoopRotate, even when it's an instruction, due to it being 6110b57cec5SDimitry Andric // simplified, it may be a Use rather than a Def, so we cannot use MUD as 6120b57cec5SDimitry Andric // template. Calls coming from updateForClonedBlockIntoPred, ensure this. 6130b57cec5SDimitry Andric if (Instruction *NewInsn = 6140b57cec5SDimitry Andric dyn_cast_or_null<Instruction>(VMap.lookup(Insn))) { 6150b57cec5SDimitry Andric MemoryAccess *NewUseOrDef = MSSA->createDefinedAccess( 6168bcb0991SDimitry Andric NewInsn, 6178bcb0991SDimitry Andric getNewDefiningAccessForClone(MUD->getDefiningAccess(), VMap, 6181db9f3b2SDimitry Andric MPhiMap, MSSA), 6198bcb0991SDimitry Andric /*Template=*/CloneWasSimplified ? nullptr : MUD, 6201db9f3b2SDimitry Andric /*CreationMustSucceed=*/false); 6218bcb0991SDimitry Andric if (NewUseOrDef) 6220b57cec5SDimitry Andric MSSA->insertIntoListsForBlock(NewUseOrDef, NewBB, MemorySSA::End); 6230b57cec5SDimitry Andric } 6240b57cec5SDimitry Andric } 6250b57cec5SDimitry Andric } 6260b57cec5SDimitry Andric } 6270b57cec5SDimitry Andric 6280b57cec5SDimitry Andric void MemorySSAUpdater::updatePhisWhenInsertingUniqueBackedgeBlock( 6290b57cec5SDimitry Andric BasicBlock *Header, BasicBlock *Preheader, BasicBlock *BEBlock) { 6300b57cec5SDimitry Andric auto *MPhi = MSSA->getMemoryAccess(Header); 6310b57cec5SDimitry Andric if (!MPhi) 6320b57cec5SDimitry Andric return; 6330b57cec5SDimitry Andric 6340b57cec5SDimitry Andric // Create phi node in the backedge block and populate it with the same 6350b57cec5SDimitry Andric // incoming values as MPhi. Skip incoming values coming from Preheader. 6360b57cec5SDimitry Andric auto *NewMPhi = MSSA->createMemoryPhi(BEBlock); 6370b57cec5SDimitry Andric bool HasUniqueIncomingValue = true; 6380b57cec5SDimitry Andric MemoryAccess *UniqueValue = nullptr; 6390b57cec5SDimitry Andric for (unsigned I = 0, E = MPhi->getNumIncomingValues(); I != E; ++I) { 6400b57cec5SDimitry Andric BasicBlock *IBB = MPhi->getIncomingBlock(I); 6410b57cec5SDimitry Andric MemoryAccess *IV = MPhi->getIncomingValue(I); 6420b57cec5SDimitry Andric if (IBB != Preheader) { 6430b57cec5SDimitry Andric NewMPhi->addIncoming(IV, IBB); 6440b57cec5SDimitry Andric if (HasUniqueIncomingValue) { 6450b57cec5SDimitry Andric if (!UniqueValue) 6460b57cec5SDimitry Andric UniqueValue = IV; 6470b57cec5SDimitry Andric else if (UniqueValue != IV) 6480b57cec5SDimitry Andric HasUniqueIncomingValue = false; 6490b57cec5SDimitry Andric } 6500b57cec5SDimitry Andric } 6510b57cec5SDimitry Andric } 6520b57cec5SDimitry Andric 6530b57cec5SDimitry Andric // Update incoming edges into MPhi. Remove all but the incoming edge from 6540b57cec5SDimitry Andric // Preheader. Add an edge from NewMPhi 6550b57cec5SDimitry Andric auto *AccFromPreheader = MPhi->getIncomingValueForBlock(Preheader); 6560b57cec5SDimitry Andric MPhi->setIncomingValue(0, AccFromPreheader); 6570b57cec5SDimitry Andric MPhi->setIncomingBlock(0, Preheader); 6580b57cec5SDimitry Andric for (unsigned I = MPhi->getNumIncomingValues() - 1; I >= 1; --I) 6590b57cec5SDimitry Andric MPhi->unorderedDeleteIncoming(I); 6600b57cec5SDimitry Andric MPhi->addIncoming(NewMPhi, BEBlock); 6610b57cec5SDimitry Andric 6620b57cec5SDimitry Andric // If NewMPhi is a trivial phi, remove it. Its use in the header MPhi will be 6630b57cec5SDimitry Andric // replaced with the unique value. 6648bcb0991SDimitry Andric tryRemoveTrivialPhi(NewMPhi); 6650b57cec5SDimitry Andric } 6660b57cec5SDimitry Andric 6670b57cec5SDimitry Andric void MemorySSAUpdater::updateForClonedLoop(const LoopBlocksRPO &LoopBlocks, 6680b57cec5SDimitry Andric ArrayRef<BasicBlock *> ExitBlocks, 6690b57cec5SDimitry Andric const ValueToValueMapTy &VMap, 6700b57cec5SDimitry Andric bool IgnoreIncomingWithNoClones) { 6710b57cec5SDimitry Andric PhiToDefMap MPhiMap; 6720b57cec5SDimitry Andric 6730b57cec5SDimitry Andric auto FixPhiIncomingValues = [&](MemoryPhi *Phi, MemoryPhi *NewPhi) { 6740b57cec5SDimitry Andric assert(Phi && NewPhi && "Invalid Phi nodes."); 6750b57cec5SDimitry Andric BasicBlock *NewPhiBB = NewPhi->getBlock(); 6760b57cec5SDimitry Andric SmallPtrSet<BasicBlock *, 4> NewPhiBBPreds(pred_begin(NewPhiBB), 6770b57cec5SDimitry Andric pred_end(NewPhiBB)); 6780b57cec5SDimitry Andric for (unsigned It = 0, E = Phi->getNumIncomingValues(); It < E; ++It) { 6790b57cec5SDimitry Andric MemoryAccess *IncomingAccess = Phi->getIncomingValue(It); 6800b57cec5SDimitry Andric BasicBlock *IncBB = Phi->getIncomingBlock(It); 6810b57cec5SDimitry Andric 6820b57cec5SDimitry Andric if (BasicBlock *NewIncBB = cast_or_null<BasicBlock>(VMap.lookup(IncBB))) 6830b57cec5SDimitry Andric IncBB = NewIncBB; 6840b57cec5SDimitry Andric else if (IgnoreIncomingWithNoClones) 6850b57cec5SDimitry Andric continue; 6860b57cec5SDimitry Andric 6870b57cec5SDimitry Andric // Now we have IncBB, and will need to add incoming from it to NewPhi. 6880b57cec5SDimitry Andric 6890b57cec5SDimitry Andric // If IncBB is not a predecessor of NewPhiBB, then do not add it. 6900b57cec5SDimitry Andric // NewPhiBB was cloned without that edge. 6910b57cec5SDimitry Andric if (!NewPhiBBPreds.count(IncBB)) 6920b57cec5SDimitry Andric continue; 6930b57cec5SDimitry Andric 6940b57cec5SDimitry Andric // Determine incoming value and add it as incoming from IncBB. 695*b3edf446SDimitry Andric NewPhi->addIncoming( 696*b3edf446SDimitry Andric getNewDefiningAccessForClone(IncomingAccess, VMap, MPhiMap, MSSA), 697*b3edf446SDimitry Andric IncBB); 6980b57cec5SDimitry Andric } 699e8d8bef9SDimitry Andric if (auto *SingleAccess = onlySingleValue(NewPhi)) { 700e8d8bef9SDimitry Andric MPhiMap[Phi] = SingleAccess; 701e8d8bef9SDimitry Andric removeMemoryAccess(NewPhi); 702e8d8bef9SDimitry Andric } 7030b57cec5SDimitry Andric }; 7040b57cec5SDimitry Andric 7050b57cec5SDimitry Andric auto ProcessBlock = [&](BasicBlock *BB) { 7060b57cec5SDimitry Andric BasicBlock *NewBlock = cast_or_null<BasicBlock>(VMap.lookup(BB)); 7070b57cec5SDimitry Andric if (!NewBlock) 7080b57cec5SDimitry Andric return; 7090b57cec5SDimitry Andric 7100b57cec5SDimitry Andric assert(!MSSA->getWritableBlockAccesses(NewBlock) && 7110b57cec5SDimitry Andric "Cloned block should have no accesses"); 7120b57cec5SDimitry Andric 7130b57cec5SDimitry Andric // Add MemoryPhi. 7140b57cec5SDimitry Andric if (MemoryPhi *MPhi = MSSA->getMemoryAccess(BB)) { 7150b57cec5SDimitry Andric MemoryPhi *NewPhi = MSSA->createMemoryPhi(NewBlock); 7160b57cec5SDimitry Andric MPhiMap[MPhi] = NewPhi; 7170b57cec5SDimitry Andric } 7180b57cec5SDimitry Andric // Update Uses and Defs. 7190b57cec5SDimitry Andric cloneUsesAndDefs(BB, NewBlock, VMap, MPhiMap); 7200b57cec5SDimitry Andric }; 7210b57cec5SDimitry Andric 722fcaf7f86SDimitry Andric for (auto *BB : llvm::concat<BasicBlock *const>(LoopBlocks, ExitBlocks)) 7230b57cec5SDimitry Andric ProcessBlock(BB); 7240b57cec5SDimitry Andric 725fcaf7f86SDimitry Andric for (auto *BB : llvm::concat<BasicBlock *const>(LoopBlocks, ExitBlocks)) 7260b57cec5SDimitry Andric if (MemoryPhi *MPhi = MSSA->getMemoryAccess(BB)) 7270b57cec5SDimitry Andric if (MemoryAccess *NewPhi = MPhiMap.lookup(MPhi)) 7280b57cec5SDimitry Andric FixPhiIncomingValues(MPhi, cast<MemoryPhi>(NewPhi)); 7290b57cec5SDimitry Andric } 7300b57cec5SDimitry Andric 7310b57cec5SDimitry Andric void MemorySSAUpdater::updateForClonedBlockIntoPred( 7320b57cec5SDimitry Andric BasicBlock *BB, BasicBlock *P1, const ValueToValueMapTy &VM) { 7330b57cec5SDimitry Andric // All defs/phis from outside BB that are used in BB, are valid uses in P1. 7340b57cec5SDimitry Andric // Since those defs/phis must have dominated BB, and also dominate P1. 7350b57cec5SDimitry Andric // Defs from BB being used in BB will be replaced with the cloned defs from 7360b57cec5SDimitry Andric // VM. The uses of BB's Phi (if it exists) in BB will be replaced by the 7370b57cec5SDimitry Andric // incoming def into the Phi from P1. 7380b57cec5SDimitry Andric // Instructions cloned into the predecessor are in practice sometimes 7390b57cec5SDimitry Andric // simplified, so disable the use of the template, and create an access from 7400b57cec5SDimitry Andric // scratch. 7410b57cec5SDimitry Andric PhiToDefMap MPhiMap; 7420b57cec5SDimitry Andric if (MemoryPhi *MPhi = MSSA->getMemoryAccess(BB)) 7430b57cec5SDimitry Andric MPhiMap[MPhi] = MPhi->getIncomingValueForBlock(P1); 7440b57cec5SDimitry Andric cloneUsesAndDefs(BB, P1, VM, MPhiMap, /*CloneWasSimplified=*/true); 7450b57cec5SDimitry Andric } 7460b57cec5SDimitry Andric 7470b57cec5SDimitry Andric template <typename Iter> 7480b57cec5SDimitry Andric void MemorySSAUpdater::privateUpdateExitBlocksForClonedLoop( 7490b57cec5SDimitry Andric ArrayRef<BasicBlock *> ExitBlocks, Iter ValuesBegin, Iter ValuesEnd, 7500b57cec5SDimitry Andric DominatorTree &DT) { 7510b57cec5SDimitry Andric SmallVector<CFGUpdate, 4> Updates; 7520b57cec5SDimitry Andric // Update/insert phis in all successors of exit blocks. 7530b57cec5SDimitry Andric for (auto *Exit : ExitBlocks) 7540b57cec5SDimitry Andric for (const ValueToValueMapTy *VMap : make_range(ValuesBegin, ValuesEnd)) 7550b57cec5SDimitry Andric if (BasicBlock *NewExit = cast_or_null<BasicBlock>(VMap->lookup(Exit))) { 7560b57cec5SDimitry Andric BasicBlock *ExitSucc = NewExit->getTerminator()->getSuccessor(0); 7570b57cec5SDimitry Andric Updates.push_back({DT.Insert, NewExit, ExitSucc}); 7580b57cec5SDimitry Andric } 7590b57cec5SDimitry Andric applyInsertUpdates(Updates, DT); 7600b57cec5SDimitry Andric } 7610b57cec5SDimitry Andric 7620b57cec5SDimitry Andric void MemorySSAUpdater::updateExitBlocksForClonedLoop( 7630b57cec5SDimitry Andric ArrayRef<BasicBlock *> ExitBlocks, const ValueToValueMapTy &VMap, 7640b57cec5SDimitry Andric DominatorTree &DT) { 7650b57cec5SDimitry Andric const ValueToValueMapTy *const Arr[] = {&VMap}; 7660b57cec5SDimitry Andric privateUpdateExitBlocksForClonedLoop(ExitBlocks, std::begin(Arr), 7670b57cec5SDimitry Andric std::end(Arr), DT); 7680b57cec5SDimitry Andric } 7690b57cec5SDimitry Andric 7700b57cec5SDimitry Andric void MemorySSAUpdater::updateExitBlocksForClonedLoop( 7710b57cec5SDimitry Andric ArrayRef<BasicBlock *> ExitBlocks, 7720b57cec5SDimitry Andric ArrayRef<std::unique_ptr<ValueToValueMapTy>> VMaps, DominatorTree &DT) { 7730b57cec5SDimitry Andric auto GetPtr = [&](const std::unique_ptr<ValueToValueMapTy> &I) { 7740b57cec5SDimitry Andric return I.get(); 7750b57cec5SDimitry Andric }; 7760b57cec5SDimitry Andric using MappedIteratorType = 7770b57cec5SDimitry Andric mapped_iterator<const std::unique_ptr<ValueToValueMapTy> *, 7780b57cec5SDimitry Andric decltype(GetPtr)>; 7790b57cec5SDimitry Andric auto MapBegin = MappedIteratorType(VMaps.begin(), GetPtr); 7800b57cec5SDimitry Andric auto MapEnd = MappedIteratorType(VMaps.end(), GetPtr); 7810b57cec5SDimitry Andric privateUpdateExitBlocksForClonedLoop(ExitBlocks, MapBegin, MapEnd, DT); 7820b57cec5SDimitry Andric } 7830b57cec5SDimitry Andric 7840b57cec5SDimitry Andric void MemorySSAUpdater::applyUpdates(ArrayRef<CFGUpdate> Updates, 785e8d8bef9SDimitry Andric DominatorTree &DT, bool UpdateDT) { 7865ffd83dbSDimitry Andric SmallVector<CFGUpdate, 4> DeleteUpdates; 787e8d8bef9SDimitry Andric SmallVector<CFGUpdate, 4> RevDeleteUpdates; 7880b57cec5SDimitry Andric SmallVector<CFGUpdate, 4> InsertUpdates; 789fcaf7f86SDimitry Andric for (const auto &Update : Updates) { 7900b57cec5SDimitry Andric if (Update.getKind() == DT.Insert) 7910b57cec5SDimitry Andric InsertUpdates.push_back({DT.Insert, Update.getFrom(), Update.getTo()}); 792e8d8bef9SDimitry Andric else { 7935ffd83dbSDimitry Andric DeleteUpdates.push_back({DT.Delete, Update.getFrom(), Update.getTo()}); 794e8d8bef9SDimitry Andric RevDeleteUpdates.push_back({DT.Insert, Update.getFrom(), Update.getTo()}); 795e8d8bef9SDimitry Andric } 7960b57cec5SDimitry Andric } 7970b57cec5SDimitry Andric 7985ffd83dbSDimitry Andric if (!DeleteUpdates.empty()) { 799349cc55cSDimitry Andric if (!InsertUpdates.empty()) { 800e8d8bef9SDimitry Andric if (!UpdateDT) { 801e8d8bef9SDimitry Andric SmallVector<CFGUpdate, 0> Empty; 802e8d8bef9SDimitry Andric // Deletes are reversed applied, because this CFGView is pretending the 803e8d8bef9SDimitry Andric // deletes did not happen yet, hence the edges still exist. 804e8d8bef9SDimitry Andric DT.applyUpdates(Empty, RevDeleteUpdates); 8050b57cec5SDimitry Andric } else { 806e8d8bef9SDimitry Andric // Apply all updates, with the RevDeleteUpdates as PostCFGView. 807e8d8bef9SDimitry Andric DT.applyUpdates(Updates, RevDeleteUpdates); 808e8d8bef9SDimitry Andric } 809e8d8bef9SDimitry Andric 810e8d8bef9SDimitry Andric // Note: the MSSA update below doesn't distinguish between a GD with 811e8d8bef9SDimitry Andric // (RevDelete,false) and (Delete, true), but this matters for the DT 812e8d8bef9SDimitry Andric // updates above; for "children" purposes they are equivalent; but the 813e8d8bef9SDimitry Andric // updates themselves convey the desired update, used inside DT only. 814e8d8bef9SDimitry Andric GraphDiff<BasicBlock *> GD(RevDeleteUpdates); 815e8d8bef9SDimitry Andric applyInsertUpdates(InsertUpdates, DT, &GD); 816349cc55cSDimitry Andric // Update DT to redelete edges; this matches the real CFG so we can 817349cc55cSDimitry Andric // perform the standard update without a postview of the CFG. 818e8d8bef9SDimitry Andric DT.applyUpdates(DeleteUpdates); 819e8d8bef9SDimitry Andric } else { 820e8d8bef9SDimitry Andric if (UpdateDT) 821349cc55cSDimitry Andric DT.applyUpdates(DeleteUpdates); 822349cc55cSDimitry Andric } 823349cc55cSDimitry Andric } else { 824349cc55cSDimitry Andric if (UpdateDT) 825e8d8bef9SDimitry Andric DT.applyUpdates(Updates); 8260b57cec5SDimitry Andric GraphDiff<BasicBlock *> GD; 8270b57cec5SDimitry Andric applyInsertUpdates(InsertUpdates, DT, &GD); 8280b57cec5SDimitry Andric } 8290b57cec5SDimitry Andric 8300b57cec5SDimitry Andric // Update for deleted edges 8315ffd83dbSDimitry Andric for (auto &Update : DeleteUpdates) 8320b57cec5SDimitry Andric removeEdge(Update.getFrom(), Update.getTo()); 8330b57cec5SDimitry Andric } 8340b57cec5SDimitry Andric 8350b57cec5SDimitry Andric void MemorySSAUpdater::applyInsertUpdates(ArrayRef<CFGUpdate> Updates, 8360b57cec5SDimitry Andric DominatorTree &DT) { 8370b57cec5SDimitry Andric GraphDiff<BasicBlock *> GD; 8380b57cec5SDimitry Andric applyInsertUpdates(Updates, DT, &GD); 8390b57cec5SDimitry Andric } 8400b57cec5SDimitry Andric 8410b57cec5SDimitry Andric void MemorySSAUpdater::applyInsertUpdates(ArrayRef<CFGUpdate> Updates, 8420b57cec5SDimitry Andric DominatorTree &DT, 8430b57cec5SDimitry Andric const GraphDiff<BasicBlock *> *GD) { 8440b57cec5SDimitry Andric // Get recursive last Def, assuming well formed MSSA and updated DT. 8450b57cec5SDimitry Andric auto GetLastDef = [&](BasicBlock *BB) -> MemoryAccess * { 8460b57cec5SDimitry Andric while (true) { 8470b57cec5SDimitry Andric MemorySSA::DefsList *Defs = MSSA->getWritableBlockDefs(BB); 8480b57cec5SDimitry Andric // Return last Def or Phi in BB, if it exists. 8490b57cec5SDimitry Andric if (Defs) 8500b57cec5SDimitry Andric return &*(--Defs->end()); 8510b57cec5SDimitry Andric 8520b57cec5SDimitry Andric // Check number of predecessors, we only care if there's more than one. 8530b57cec5SDimitry Andric unsigned Count = 0; 8540b57cec5SDimitry Andric BasicBlock *Pred = nullptr; 855e8d8bef9SDimitry Andric for (auto *Pi : GD->template getChildren</*InverseEdge=*/true>(BB)) { 856e8d8bef9SDimitry Andric Pred = Pi; 8570b57cec5SDimitry Andric Count++; 8580b57cec5SDimitry Andric if (Count == 2) 8590b57cec5SDimitry Andric break; 8600b57cec5SDimitry Andric } 8610b57cec5SDimitry Andric 8620b57cec5SDimitry Andric // If BB has multiple predecessors, get last definition from IDom. 8630b57cec5SDimitry Andric if (Count != 1) { 8640b57cec5SDimitry Andric // [SimpleLoopUnswitch] If BB is a dead block, about to be deleted, its 8650b57cec5SDimitry Andric // DT is invalidated. Return LoE as its last def. This will be added to 8660b57cec5SDimitry Andric // MemoryPhi node, and later deleted when the block is deleted. 8670b57cec5SDimitry Andric if (!DT.getNode(BB)) 8680b57cec5SDimitry Andric return MSSA->getLiveOnEntryDef(); 8690b57cec5SDimitry Andric if (auto *IDom = DT.getNode(BB)->getIDom()) 8700b57cec5SDimitry Andric if (IDom->getBlock() != BB) { 8710b57cec5SDimitry Andric BB = IDom->getBlock(); 8720b57cec5SDimitry Andric continue; 8730b57cec5SDimitry Andric } 8740b57cec5SDimitry Andric return MSSA->getLiveOnEntryDef(); 8750b57cec5SDimitry Andric } else { 8760b57cec5SDimitry Andric // Single predecessor, BB cannot be dead. GetLastDef of Pred. 8770b57cec5SDimitry Andric assert(Count == 1 && Pred && "Single predecessor expected."); 8788bcb0991SDimitry Andric // BB can be unreachable though, return LoE if that is the case. 8798bcb0991SDimitry Andric if (!DT.getNode(BB)) 8808bcb0991SDimitry Andric return MSSA->getLiveOnEntryDef(); 8810b57cec5SDimitry Andric BB = Pred; 8820b57cec5SDimitry Andric } 8830b57cec5SDimitry Andric }; 8840b57cec5SDimitry Andric llvm_unreachable("Unable to get last definition."); 8850b57cec5SDimitry Andric }; 8860b57cec5SDimitry Andric 8870b57cec5SDimitry Andric // Get nearest IDom given a set of blocks. 8880b57cec5SDimitry Andric // TODO: this can be optimized by starting the search at the node with the 8890b57cec5SDimitry Andric // lowest level (highest in the tree). 8900b57cec5SDimitry Andric auto FindNearestCommonDominator = 8910b57cec5SDimitry Andric [&](const SmallSetVector<BasicBlock *, 2> &BBSet) -> BasicBlock * { 8920b57cec5SDimitry Andric BasicBlock *PrevIDom = *BBSet.begin(); 8930b57cec5SDimitry Andric for (auto *BB : BBSet) 8940b57cec5SDimitry Andric PrevIDom = DT.findNearestCommonDominator(PrevIDom, BB); 8950b57cec5SDimitry Andric return PrevIDom; 8960b57cec5SDimitry Andric }; 8970b57cec5SDimitry Andric 8980b57cec5SDimitry Andric // Get all blocks that dominate PrevIDom, stop when reaching CurrIDom. Do not 8990b57cec5SDimitry Andric // include CurrIDom. 9000b57cec5SDimitry Andric auto GetNoLongerDomBlocks = 9010b57cec5SDimitry Andric [&](BasicBlock *PrevIDom, BasicBlock *CurrIDom, 9020b57cec5SDimitry Andric SmallVectorImpl<BasicBlock *> &BlocksPrevDom) { 9030b57cec5SDimitry Andric if (PrevIDom == CurrIDom) 9040b57cec5SDimitry Andric return; 9050b57cec5SDimitry Andric BlocksPrevDom.push_back(PrevIDom); 9060b57cec5SDimitry Andric BasicBlock *NextIDom = PrevIDom; 9070b57cec5SDimitry Andric while (BasicBlock *UpIDom = 9080b57cec5SDimitry Andric DT.getNode(NextIDom)->getIDom()->getBlock()) { 9090b57cec5SDimitry Andric if (UpIDom == CurrIDom) 9100b57cec5SDimitry Andric break; 9110b57cec5SDimitry Andric BlocksPrevDom.push_back(UpIDom); 9120b57cec5SDimitry Andric NextIDom = UpIDom; 9130b57cec5SDimitry Andric } 9140b57cec5SDimitry Andric }; 9150b57cec5SDimitry Andric 9160b57cec5SDimitry Andric // Map a BB to its predecessors: added + previously existing. To get a 9170b57cec5SDimitry Andric // deterministic order, store predecessors as SetVectors. The order in each 9180b57cec5SDimitry Andric // will be defined by the order in Updates (fixed) and the order given by 9190b57cec5SDimitry Andric // children<> (also fixed). Since we further iterate over these ordered sets, 9200b57cec5SDimitry Andric // we lose the information of multiple edges possibly existing between two 9210b57cec5SDimitry Andric // blocks, so we'll keep and EdgeCount map for that. 9220b57cec5SDimitry Andric // An alternate implementation could keep unordered set for the predecessors, 9230b57cec5SDimitry Andric // traverse either Updates or children<> each time to get the deterministic 9240b57cec5SDimitry Andric // order, and drop the usage of EdgeCount. This alternate approach would still 9250b57cec5SDimitry Andric // require querying the maps for each predecessor, and children<> call has 9260b57cec5SDimitry Andric // additional computation inside for creating the snapshot-graph predecessors. 9270b57cec5SDimitry Andric // As such, we favor using a little additional storage and less compute time. 9280b57cec5SDimitry Andric // This decision can be revisited if we find the alternative more favorable. 9290b57cec5SDimitry Andric 9300b57cec5SDimitry Andric struct PredInfo { 9310b57cec5SDimitry Andric SmallSetVector<BasicBlock *, 2> Added; 9320b57cec5SDimitry Andric SmallSetVector<BasicBlock *, 2> Prev; 9330b57cec5SDimitry Andric }; 9340b57cec5SDimitry Andric SmallDenseMap<BasicBlock *, PredInfo> PredMap; 9350b57cec5SDimitry Andric 936fcaf7f86SDimitry Andric for (const auto &Edge : Updates) { 9370b57cec5SDimitry Andric BasicBlock *BB = Edge.getTo(); 9380b57cec5SDimitry Andric auto &AddedBlockSet = PredMap[BB].Added; 9390b57cec5SDimitry Andric AddedBlockSet.insert(Edge.getFrom()); 9400b57cec5SDimitry Andric } 9410b57cec5SDimitry Andric 9420b57cec5SDimitry Andric // Store all existing predecessor for each BB, at least one must exist. 9430b57cec5SDimitry Andric SmallDenseMap<std::pair<BasicBlock *, BasicBlock *>, int> EdgeCountMap; 9440b57cec5SDimitry Andric SmallPtrSet<BasicBlock *, 2> NewBlocks; 9450b57cec5SDimitry Andric for (auto &BBPredPair : PredMap) { 9460b57cec5SDimitry Andric auto *BB = BBPredPair.first; 9470b57cec5SDimitry Andric const auto &AddedBlockSet = BBPredPair.second.Added; 9480b57cec5SDimitry Andric auto &PrevBlockSet = BBPredPair.second.Prev; 949e8d8bef9SDimitry Andric for (auto *Pi : GD->template getChildren</*InverseEdge=*/true>(BB)) { 9500b57cec5SDimitry Andric if (!AddedBlockSet.count(Pi)) 9510b57cec5SDimitry Andric PrevBlockSet.insert(Pi); 9520b57cec5SDimitry Andric EdgeCountMap[{Pi, BB}]++; 9530b57cec5SDimitry Andric } 9540b57cec5SDimitry Andric 9550b57cec5SDimitry Andric if (PrevBlockSet.empty()) { 9560b57cec5SDimitry Andric assert(pred_size(BB) == AddedBlockSet.size() && "Duplicate edges added."); 9570b57cec5SDimitry Andric LLVM_DEBUG( 9580b57cec5SDimitry Andric dbgs() 9590b57cec5SDimitry Andric << "Adding a predecessor to a block with no predecessors. " 9600b57cec5SDimitry Andric "This must be an edge added to a new, likely cloned, block. " 9610b57cec5SDimitry Andric "Its memory accesses must be already correct, assuming completed " 9620b57cec5SDimitry Andric "via the updateExitBlocksForClonedLoop API. " 9630b57cec5SDimitry Andric "Assert a single such edge is added so no phi addition or " 9640b57cec5SDimitry Andric "additional processing is required.\n"); 9650b57cec5SDimitry Andric assert(AddedBlockSet.size() == 1 && 9660b57cec5SDimitry Andric "Can only handle adding one predecessor to a new block."); 9670b57cec5SDimitry Andric // Need to remove new blocks from PredMap. Remove below to not invalidate 9680b57cec5SDimitry Andric // iterator here. 9690b57cec5SDimitry Andric NewBlocks.insert(BB); 9700b57cec5SDimitry Andric } 9710b57cec5SDimitry Andric } 9720b57cec5SDimitry Andric // Nothing to process for new/cloned blocks. 9730b57cec5SDimitry Andric for (auto *BB : NewBlocks) 9740b57cec5SDimitry Andric PredMap.erase(BB); 9750b57cec5SDimitry Andric 9760b57cec5SDimitry Andric SmallVector<BasicBlock *, 16> BlocksWithDefsToReplace; 9770b57cec5SDimitry Andric SmallVector<WeakVH, 8> InsertedPhis; 9780b57cec5SDimitry Andric 9790b57cec5SDimitry Andric // First create MemoryPhis in all blocks that don't have one. Create in the 9800b57cec5SDimitry Andric // order found in Updates, not in PredMap, to get deterministic numbering. 981fcaf7f86SDimitry Andric for (const auto &Edge : Updates) { 9820b57cec5SDimitry Andric BasicBlock *BB = Edge.getTo(); 9830b57cec5SDimitry Andric if (PredMap.count(BB) && !MSSA->getMemoryAccess(BB)) 9840b57cec5SDimitry Andric InsertedPhis.push_back(MSSA->createMemoryPhi(BB)); 9850b57cec5SDimitry Andric } 9860b57cec5SDimitry Andric 9870b57cec5SDimitry Andric // Now we'll fill in the MemoryPhis with the right incoming values. 9880b57cec5SDimitry Andric for (auto &BBPredPair : PredMap) { 9890b57cec5SDimitry Andric auto *BB = BBPredPair.first; 9900b57cec5SDimitry Andric const auto &PrevBlockSet = BBPredPair.second.Prev; 9910b57cec5SDimitry Andric const auto &AddedBlockSet = BBPredPair.second.Added; 9920b57cec5SDimitry Andric assert(!PrevBlockSet.empty() && 9930b57cec5SDimitry Andric "At least one previous predecessor must exist."); 9940b57cec5SDimitry Andric 9950b57cec5SDimitry Andric // TODO: if this becomes a bottleneck, we can save on GetLastDef calls by 9960b57cec5SDimitry Andric // keeping this map before the loop. We can reuse already populated entries 9970b57cec5SDimitry Andric // if an edge is added from the same predecessor to two different blocks, 9980b57cec5SDimitry Andric // and this does happen in rotate. Note that the map needs to be updated 9990b57cec5SDimitry Andric // when deleting non-necessary phis below, if the phi is in the map by 10000b57cec5SDimitry Andric // replacing the value with DefP1. 10010b57cec5SDimitry Andric SmallDenseMap<BasicBlock *, MemoryAccess *> LastDefAddedPred; 10020b57cec5SDimitry Andric for (auto *AddedPred : AddedBlockSet) { 10030b57cec5SDimitry Andric auto *DefPn = GetLastDef(AddedPred); 10040b57cec5SDimitry Andric assert(DefPn != nullptr && "Unable to find last definition."); 10050b57cec5SDimitry Andric LastDefAddedPred[AddedPred] = DefPn; 10060b57cec5SDimitry Andric } 10070b57cec5SDimitry Andric 10080b57cec5SDimitry Andric MemoryPhi *NewPhi = MSSA->getMemoryAccess(BB); 10090b57cec5SDimitry Andric // If Phi is not empty, add an incoming edge from each added pred. Must 10100b57cec5SDimitry Andric // still compute blocks with defs to replace for this block below. 10110b57cec5SDimitry Andric if (NewPhi->getNumOperands()) { 10120b57cec5SDimitry Andric for (auto *Pred : AddedBlockSet) { 10130b57cec5SDimitry Andric auto *LastDefForPred = LastDefAddedPred[Pred]; 10140b57cec5SDimitry Andric for (int I = 0, E = EdgeCountMap[{Pred, BB}]; I < E; ++I) 10150b57cec5SDimitry Andric NewPhi->addIncoming(LastDefForPred, Pred); 10160b57cec5SDimitry Andric } 10170b57cec5SDimitry Andric } else { 10180b57cec5SDimitry Andric // Pick any existing predecessor and get its definition. All other 10190b57cec5SDimitry Andric // existing predecessors should have the same one, since no phi existed. 10200b57cec5SDimitry Andric auto *P1 = *PrevBlockSet.begin(); 10210b57cec5SDimitry Andric MemoryAccess *DefP1 = GetLastDef(P1); 10220b57cec5SDimitry Andric 10230b57cec5SDimitry Andric // Check DefP1 against all Defs in LastDefPredPair. If all the same, 10240b57cec5SDimitry Andric // nothing to add. 10250b57cec5SDimitry Andric bool InsertPhi = false; 10260b57cec5SDimitry Andric for (auto LastDefPredPair : LastDefAddedPred) 10270b57cec5SDimitry Andric if (DefP1 != LastDefPredPair.second) { 10280b57cec5SDimitry Andric InsertPhi = true; 10290b57cec5SDimitry Andric break; 10300b57cec5SDimitry Andric } 10310b57cec5SDimitry Andric if (!InsertPhi) { 10320b57cec5SDimitry Andric // Since NewPhi may be used in other newly added Phis, replace all uses 10330b57cec5SDimitry Andric // of NewPhi with the definition coming from all predecessors (DefP1), 10340b57cec5SDimitry Andric // before deleting it. 10350b57cec5SDimitry Andric NewPhi->replaceAllUsesWith(DefP1); 10360b57cec5SDimitry Andric removeMemoryAccess(NewPhi); 10370b57cec5SDimitry Andric continue; 10380b57cec5SDimitry Andric } 10390b57cec5SDimitry Andric 10400b57cec5SDimitry Andric // Update Phi with new values for new predecessors and old value for all 10410b57cec5SDimitry Andric // other predecessors. Since AddedBlockSet and PrevBlockSet are ordered 10420b57cec5SDimitry Andric // sets, the order of entries in NewPhi is deterministic. 10430b57cec5SDimitry Andric for (auto *Pred : AddedBlockSet) { 10440b57cec5SDimitry Andric auto *LastDefForPred = LastDefAddedPred[Pred]; 10450b57cec5SDimitry Andric for (int I = 0, E = EdgeCountMap[{Pred, BB}]; I < E; ++I) 10460b57cec5SDimitry Andric NewPhi->addIncoming(LastDefForPred, Pred); 10470b57cec5SDimitry Andric } 10480b57cec5SDimitry Andric for (auto *Pred : PrevBlockSet) 10490b57cec5SDimitry Andric for (int I = 0, E = EdgeCountMap[{Pred, BB}]; I < E; ++I) 10500b57cec5SDimitry Andric NewPhi->addIncoming(DefP1, Pred); 10510b57cec5SDimitry Andric } 10520b57cec5SDimitry Andric 10530b57cec5SDimitry Andric // Get all blocks that used to dominate BB and no longer do after adding 10540b57cec5SDimitry Andric // AddedBlockSet, where PrevBlockSet are the previously known predecessors. 10550b57cec5SDimitry Andric assert(DT.getNode(BB)->getIDom() && "BB does not have valid idom"); 10560b57cec5SDimitry Andric BasicBlock *PrevIDom = FindNearestCommonDominator(PrevBlockSet); 10570b57cec5SDimitry Andric assert(PrevIDom && "Previous IDom should exists"); 10580b57cec5SDimitry Andric BasicBlock *NewIDom = DT.getNode(BB)->getIDom()->getBlock(); 10590b57cec5SDimitry Andric assert(NewIDom && "BB should have a new valid idom"); 10600b57cec5SDimitry Andric assert(DT.dominates(NewIDom, PrevIDom) && 10610b57cec5SDimitry Andric "New idom should dominate old idom"); 10620b57cec5SDimitry Andric GetNoLongerDomBlocks(PrevIDom, NewIDom, BlocksWithDefsToReplace); 10630b57cec5SDimitry Andric } 10640b57cec5SDimitry Andric 10650b57cec5SDimitry Andric tryRemoveTrivialPhis(InsertedPhis); 10660b57cec5SDimitry Andric // Create the set of blocks that now have a definition. We'll use this to 10670b57cec5SDimitry Andric // compute IDF and add Phis there next. 10680b57cec5SDimitry Andric SmallVector<BasicBlock *, 8> BlocksToProcess; 10690b57cec5SDimitry Andric for (auto &VH : InsertedPhis) 10700b57cec5SDimitry Andric if (auto *MPhi = cast_or_null<MemoryPhi>(VH)) 10710b57cec5SDimitry Andric BlocksToProcess.push_back(MPhi->getBlock()); 10720b57cec5SDimitry Andric 10730b57cec5SDimitry Andric // Compute IDF and add Phis in all IDF blocks that do not have one. 10740b57cec5SDimitry Andric SmallVector<BasicBlock *, 32> IDFBlocks; 10750b57cec5SDimitry Andric if (!BlocksToProcess.empty()) { 10760b57cec5SDimitry Andric ForwardIDFCalculator IDFs(DT, GD); 10770b57cec5SDimitry Andric SmallPtrSet<BasicBlock *, 16> DefiningBlocks(BlocksToProcess.begin(), 10780b57cec5SDimitry Andric BlocksToProcess.end()); 10790b57cec5SDimitry Andric IDFs.setDefiningBlocks(DefiningBlocks); 10800b57cec5SDimitry Andric IDFs.calculate(IDFBlocks); 10810b57cec5SDimitry Andric 10820b57cec5SDimitry Andric SmallSetVector<MemoryPhi *, 4> PhisToFill; 10830b57cec5SDimitry Andric // First create all needed Phis. 10840b57cec5SDimitry Andric for (auto *BBIDF : IDFBlocks) 10850b57cec5SDimitry Andric if (!MSSA->getMemoryAccess(BBIDF)) { 10860b57cec5SDimitry Andric auto *IDFPhi = MSSA->createMemoryPhi(BBIDF); 10870b57cec5SDimitry Andric InsertedPhis.push_back(IDFPhi); 10880b57cec5SDimitry Andric PhisToFill.insert(IDFPhi); 10890b57cec5SDimitry Andric } 10900b57cec5SDimitry Andric // Then update or insert their correct incoming values. 10910b57cec5SDimitry Andric for (auto *BBIDF : IDFBlocks) { 10920b57cec5SDimitry Andric auto *IDFPhi = MSSA->getMemoryAccess(BBIDF); 10930b57cec5SDimitry Andric assert(IDFPhi && "Phi must exist"); 10940b57cec5SDimitry Andric if (!PhisToFill.count(IDFPhi)) { 10950b57cec5SDimitry Andric // Update existing Phi. 10960b57cec5SDimitry Andric // FIXME: some updates may be redundant, try to optimize and skip some. 10970b57cec5SDimitry Andric for (unsigned I = 0, E = IDFPhi->getNumIncomingValues(); I < E; ++I) 10980b57cec5SDimitry Andric IDFPhi->setIncomingValue(I, GetLastDef(IDFPhi->getIncomingBlock(I))); 10990b57cec5SDimitry Andric } else { 1100e8d8bef9SDimitry Andric for (auto *Pi : GD->template getChildren</*InverseEdge=*/true>(BBIDF)) 11010b57cec5SDimitry Andric IDFPhi->addIncoming(GetLastDef(Pi), Pi); 11020b57cec5SDimitry Andric } 11030b57cec5SDimitry Andric } 11040b57cec5SDimitry Andric } 11050b57cec5SDimitry Andric 11060b57cec5SDimitry Andric // Now for all defs in BlocksWithDefsToReplace, if there are uses they no 11070b57cec5SDimitry Andric // longer dominate, replace those with the closest dominating def. 11080b57cec5SDimitry Andric // This will also update optimized accesses, as they're also uses. 11090b57cec5SDimitry Andric for (auto *BlockWithDefsToReplace : BlocksWithDefsToReplace) { 11100b57cec5SDimitry Andric if (auto DefsList = MSSA->getWritableBlockDefs(BlockWithDefsToReplace)) { 11110b57cec5SDimitry Andric for (auto &DefToReplaceUses : *DefsList) { 11120b57cec5SDimitry Andric BasicBlock *DominatingBlock = DefToReplaceUses.getBlock(); 1113349cc55cSDimitry Andric for (Use &U : llvm::make_early_inc_range(DefToReplaceUses.uses())) { 11148bcb0991SDimitry Andric MemoryAccess *Usr = cast<MemoryAccess>(U.getUser()); 11150b57cec5SDimitry Andric if (MemoryPhi *UsrPhi = dyn_cast<MemoryPhi>(Usr)) { 11160b57cec5SDimitry Andric BasicBlock *DominatedBlock = UsrPhi->getIncomingBlock(U); 11170b57cec5SDimitry Andric if (!DT.dominates(DominatingBlock, DominatedBlock)) 11180b57cec5SDimitry Andric U.set(GetLastDef(DominatedBlock)); 11190b57cec5SDimitry Andric } else { 11200b57cec5SDimitry Andric BasicBlock *DominatedBlock = Usr->getBlock(); 11210b57cec5SDimitry Andric if (!DT.dominates(DominatingBlock, DominatedBlock)) { 11220b57cec5SDimitry Andric if (auto *DomBlPhi = MSSA->getMemoryAccess(DominatedBlock)) 11230b57cec5SDimitry Andric U.set(DomBlPhi); 11240b57cec5SDimitry Andric else { 11250b57cec5SDimitry Andric auto *IDom = DT.getNode(DominatedBlock)->getIDom(); 11260b57cec5SDimitry Andric assert(IDom && "Block must have a valid IDom."); 11270b57cec5SDimitry Andric U.set(GetLastDef(IDom->getBlock())); 11280b57cec5SDimitry Andric } 11290b57cec5SDimitry Andric cast<MemoryUseOrDef>(Usr)->resetOptimized(); 11300b57cec5SDimitry Andric } 11310b57cec5SDimitry Andric } 11320b57cec5SDimitry Andric } 11330b57cec5SDimitry Andric } 11340b57cec5SDimitry Andric } 11350b57cec5SDimitry Andric } 11360b57cec5SDimitry Andric tryRemoveTrivialPhis(InsertedPhis); 11370b57cec5SDimitry Andric } 11380b57cec5SDimitry Andric 11390b57cec5SDimitry Andric // Move What before Where in the MemorySSA IR. 11400b57cec5SDimitry Andric template <class WhereType> 11410b57cec5SDimitry Andric void MemorySSAUpdater::moveTo(MemoryUseOrDef *What, BasicBlock *BB, 11420b57cec5SDimitry Andric WhereType Where) { 11430b57cec5SDimitry Andric // Mark MemoryPhi users of What not to be optimized. 11440b57cec5SDimitry Andric for (auto *U : What->users()) 11450b57cec5SDimitry Andric if (MemoryPhi *PhiUser = dyn_cast<MemoryPhi>(U)) 11460b57cec5SDimitry Andric NonOptPhis.insert(PhiUser); 11470b57cec5SDimitry Andric 11480b57cec5SDimitry Andric // Replace all our users with our defining access. 11490b57cec5SDimitry Andric What->replaceAllUsesWith(What->getDefiningAccess()); 11500b57cec5SDimitry Andric 11510b57cec5SDimitry Andric // Let MemorySSA take care of moving it around in the lists. 11520b57cec5SDimitry Andric MSSA->moveTo(What, BB, Where); 11530b57cec5SDimitry Andric 11540b57cec5SDimitry Andric // Now reinsert it into the IR and do whatever fixups needed. 11550b57cec5SDimitry Andric if (auto *MD = dyn_cast<MemoryDef>(What)) 11568bcb0991SDimitry Andric insertDef(MD, /*RenameUses=*/true); 11570b57cec5SDimitry Andric else 11588bcb0991SDimitry Andric insertUse(cast<MemoryUse>(What), /*RenameUses=*/true); 11590b57cec5SDimitry Andric 11600b57cec5SDimitry Andric // Clear dangling pointers. We added all MemoryPhi users, but not all 11610b57cec5SDimitry Andric // of them are removed by fixupDefs(). 11620b57cec5SDimitry Andric NonOptPhis.clear(); 11630b57cec5SDimitry Andric } 11640b57cec5SDimitry Andric 11650b57cec5SDimitry Andric // Move What before Where in the MemorySSA IR. 11660b57cec5SDimitry Andric void MemorySSAUpdater::moveBefore(MemoryUseOrDef *What, MemoryUseOrDef *Where) { 11670b57cec5SDimitry Andric moveTo(What, Where->getBlock(), Where->getIterator()); 11680b57cec5SDimitry Andric } 11690b57cec5SDimitry Andric 11700b57cec5SDimitry Andric // Move What after Where in the MemorySSA IR. 11710b57cec5SDimitry Andric void MemorySSAUpdater::moveAfter(MemoryUseOrDef *What, MemoryUseOrDef *Where) { 11720b57cec5SDimitry Andric moveTo(What, Where->getBlock(), ++Where->getIterator()); 11730b57cec5SDimitry Andric } 11740b57cec5SDimitry Andric 11750b57cec5SDimitry Andric void MemorySSAUpdater::moveToPlace(MemoryUseOrDef *What, BasicBlock *BB, 11760b57cec5SDimitry Andric MemorySSA::InsertionPlace Where) { 1177480093f4SDimitry Andric if (Where != MemorySSA::InsertionPlace::BeforeTerminator) 11780b57cec5SDimitry Andric return moveTo(What, BB, Where); 1179480093f4SDimitry Andric 1180480093f4SDimitry Andric if (auto *Where = MSSA->getMemoryAccess(BB->getTerminator())) 1181480093f4SDimitry Andric return moveBefore(What, Where); 1182480093f4SDimitry Andric else 1183480093f4SDimitry Andric return moveTo(What, BB, MemorySSA::InsertionPlace::End); 11840b57cec5SDimitry Andric } 11850b57cec5SDimitry Andric 11860b57cec5SDimitry Andric // All accesses in To used to be in From. Move to end and update access lists. 11870b57cec5SDimitry Andric void MemorySSAUpdater::moveAllAccesses(BasicBlock *From, BasicBlock *To, 11880b57cec5SDimitry Andric Instruction *Start) { 11890b57cec5SDimitry Andric 11900b57cec5SDimitry Andric MemorySSA::AccessList *Accs = MSSA->getWritableBlockAccesses(From); 11910b57cec5SDimitry Andric if (!Accs) 11920b57cec5SDimitry Andric return; 11930b57cec5SDimitry Andric 11948bcb0991SDimitry Andric assert(Start->getParent() == To && "Incorrect Start instruction"); 11950b57cec5SDimitry Andric MemoryAccess *FirstInNew = nullptr; 11960b57cec5SDimitry Andric for (Instruction &I : make_range(Start->getIterator(), To->end())) 11970b57cec5SDimitry Andric if ((FirstInNew = MSSA->getMemoryAccess(&I))) 11980b57cec5SDimitry Andric break; 11998bcb0991SDimitry Andric if (FirstInNew) { 12000b57cec5SDimitry Andric auto *MUD = cast<MemoryUseOrDef>(FirstInNew); 12010b57cec5SDimitry Andric do { 12020b57cec5SDimitry Andric auto NextIt = ++MUD->getIterator(); 12030b57cec5SDimitry Andric MemoryUseOrDef *NextMUD = (!Accs || NextIt == Accs->end()) 12040b57cec5SDimitry Andric ? nullptr 12050b57cec5SDimitry Andric : cast<MemoryUseOrDef>(&*NextIt); 12060b57cec5SDimitry Andric MSSA->moveTo(MUD, To, MemorySSA::End); 12078bcb0991SDimitry Andric // Moving MUD from Accs in the moveTo above, may delete Accs, so we need 12088bcb0991SDimitry Andric // to retrieve it again. 12090b57cec5SDimitry Andric Accs = MSSA->getWritableBlockAccesses(From); 12100b57cec5SDimitry Andric MUD = NextMUD; 12110b57cec5SDimitry Andric } while (MUD); 12120b57cec5SDimitry Andric } 12130b57cec5SDimitry Andric 12148bcb0991SDimitry Andric // If all accesses were moved and only a trivial Phi remains, we try to remove 12158bcb0991SDimitry Andric // that Phi. This is needed when From is going to be deleted. 12168bcb0991SDimitry Andric auto *Defs = MSSA->getWritableBlockDefs(From); 12178bcb0991SDimitry Andric if (Defs && !Defs->empty()) 12188bcb0991SDimitry Andric if (auto *Phi = dyn_cast<MemoryPhi>(&*Defs->begin())) 12198bcb0991SDimitry Andric tryRemoveTrivialPhi(Phi); 12208bcb0991SDimitry Andric } 12218bcb0991SDimitry Andric 12220b57cec5SDimitry Andric void MemorySSAUpdater::moveAllAfterSpliceBlocks(BasicBlock *From, 12230b57cec5SDimitry Andric BasicBlock *To, 12240b57cec5SDimitry Andric Instruction *Start) { 12250b57cec5SDimitry Andric assert(MSSA->getBlockAccesses(To) == nullptr && 12260b57cec5SDimitry Andric "To block is expected to be free of MemoryAccesses."); 12270b57cec5SDimitry Andric moveAllAccesses(From, To, Start); 12280b57cec5SDimitry Andric for (BasicBlock *Succ : successors(To)) 12290b57cec5SDimitry Andric if (MemoryPhi *MPhi = MSSA->getMemoryAccess(Succ)) 12300b57cec5SDimitry Andric MPhi->setIncomingBlock(MPhi->getBasicBlockIndex(From), To); 12310b57cec5SDimitry Andric } 12320b57cec5SDimitry Andric 12330b57cec5SDimitry Andric void MemorySSAUpdater::moveAllAfterMergeBlocks(BasicBlock *From, BasicBlock *To, 12340b57cec5SDimitry Andric Instruction *Start) { 12358bcb0991SDimitry Andric assert(From->getUniquePredecessor() == To && 12360b57cec5SDimitry Andric "From block is expected to have a single predecessor (To)."); 12370b57cec5SDimitry Andric moveAllAccesses(From, To, Start); 12380b57cec5SDimitry Andric for (BasicBlock *Succ : successors(From)) 12390b57cec5SDimitry Andric if (MemoryPhi *MPhi = MSSA->getMemoryAccess(Succ)) 12400b57cec5SDimitry Andric MPhi->setIncomingBlock(MPhi->getBasicBlockIndex(From), To); 12410b57cec5SDimitry Andric } 12420b57cec5SDimitry Andric 12430b57cec5SDimitry Andric void MemorySSAUpdater::wireOldPredecessorsToNewImmediatePredecessor( 12440b57cec5SDimitry Andric BasicBlock *Old, BasicBlock *New, ArrayRef<BasicBlock *> Preds, 12450b57cec5SDimitry Andric bool IdenticalEdgesWereMerged) { 12460b57cec5SDimitry Andric assert(!MSSA->getWritableBlockAccesses(New) && 12470b57cec5SDimitry Andric "Access list should be null for a new block."); 12480b57cec5SDimitry Andric MemoryPhi *Phi = MSSA->getMemoryAccess(Old); 12490b57cec5SDimitry Andric if (!Phi) 12500b57cec5SDimitry Andric return; 12510b57cec5SDimitry Andric if (Old->hasNPredecessors(1)) { 12520b57cec5SDimitry Andric assert(pred_size(New) == Preds.size() && 12530b57cec5SDimitry Andric "Should have moved all predecessors."); 12540b57cec5SDimitry Andric MSSA->moveTo(Phi, New, MemorySSA::Beginning); 12550b57cec5SDimitry Andric } else { 12560b57cec5SDimitry Andric assert(!Preds.empty() && "Must be moving at least one predecessor to the " 12570b57cec5SDimitry Andric "new immediate predecessor."); 12580b57cec5SDimitry Andric MemoryPhi *NewPhi = MSSA->createMemoryPhi(New); 12590b57cec5SDimitry Andric SmallPtrSet<BasicBlock *, 16> PredsSet(Preds.begin(), Preds.end()); 12600b57cec5SDimitry Andric // Currently only support the case of removing a single incoming edge when 12610b57cec5SDimitry Andric // identical edges were not merged. 12620b57cec5SDimitry Andric if (!IdenticalEdgesWereMerged) 12630b57cec5SDimitry Andric assert(PredsSet.size() == Preds.size() && 12640b57cec5SDimitry Andric "If identical edges were not merged, we cannot have duplicate " 12650b57cec5SDimitry Andric "blocks in the predecessors"); 12660b57cec5SDimitry Andric Phi->unorderedDeleteIncomingIf([&](MemoryAccess *MA, BasicBlock *B) { 12670b57cec5SDimitry Andric if (PredsSet.count(B)) { 12680b57cec5SDimitry Andric NewPhi->addIncoming(MA, B); 12690b57cec5SDimitry Andric if (!IdenticalEdgesWereMerged) 12700b57cec5SDimitry Andric PredsSet.erase(B); 12710b57cec5SDimitry Andric return true; 12720b57cec5SDimitry Andric } 12730b57cec5SDimitry Andric return false; 12740b57cec5SDimitry Andric }); 12750b57cec5SDimitry Andric Phi->addIncoming(NewPhi, New); 12768bcb0991SDimitry Andric tryRemoveTrivialPhi(NewPhi); 12770b57cec5SDimitry Andric } 12780b57cec5SDimitry Andric } 12790b57cec5SDimitry Andric 12800b57cec5SDimitry Andric void MemorySSAUpdater::removeMemoryAccess(MemoryAccess *MA, bool OptimizePhis) { 12810b57cec5SDimitry Andric assert(!MSSA->isLiveOnEntryDef(MA) && 12820b57cec5SDimitry Andric "Trying to remove the live on entry def"); 12830b57cec5SDimitry Andric // We can only delete phi nodes if they have no uses, or we can replace all 12840b57cec5SDimitry Andric // uses with a single definition. 12850b57cec5SDimitry Andric MemoryAccess *NewDefTarget = nullptr; 12860b57cec5SDimitry Andric if (MemoryPhi *MP = dyn_cast<MemoryPhi>(MA)) { 12870b57cec5SDimitry Andric // Note that it is sufficient to know that all edges of the phi node have 12880b57cec5SDimitry Andric // the same argument. If they do, by the definition of dominance frontiers 12890b57cec5SDimitry Andric // (which we used to place this phi), that argument must dominate this phi, 12900b57cec5SDimitry Andric // and thus, must dominate the phi's uses, and so we will not hit the assert 12910b57cec5SDimitry Andric // below. 12920b57cec5SDimitry Andric NewDefTarget = onlySingleValue(MP); 12930b57cec5SDimitry Andric assert((NewDefTarget || MP->use_empty()) && 12940b57cec5SDimitry Andric "We can't delete this memory phi"); 12950b57cec5SDimitry Andric } else { 12960b57cec5SDimitry Andric NewDefTarget = cast<MemoryUseOrDef>(MA)->getDefiningAccess(); 12970b57cec5SDimitry Andric } 12980b57cec5SDimitry Andric 12990b57cec5SDimitry Andric SmallSetVector<MemoryPhi *, 4> PhisToCheck; 13000b57cec5SDimitry Andric 13010b57cec5SDimitry Andric // Re-point the uses at our defining access 13020b57cec5SDimitry Andric if (!isa<MemoryUse>(MA) && !MA->use_empty()) { 13030b57cec5SDimitry Andric // Reset optimized on users of this store, and reset the uses. 13040b57cec5SDimitry Andric // A few notes: 13050b57cec5SDimitry Andric // 1. This is a slightly modified version of RAUW to avoid walking the 13060b57cec5SDimitry Andric // uses twice here. 13070b57cec5SDimitry Andric // 2. If we wanted to be complete, we would have to reset the optimized 13080b57cec5SDimitry Andric // flags on users of phi nodes if doing the below makes a phi node have all 13090b57cec5SDimitry Andric // the same arguments. Instead, we prefer users to removeMemoryAccess those 13100b57cec5SDimitry Andric // phi nodes, because doing it here would be N^3. 13110b57cec5SDimitry Andric if (MA->hasValueHandle()) 13120b57cec5SDimitry Andric ValueHandleBase::ValueIsRAUWd(MA, NewDefTarget); 13130b57cec5SDimitry Andric // Note: We assume MemorySSA is not used in metadata since it's not really 13140b57cec5SDimitry Andric // part of the IR. 13150b57cec5SDimitry Andric 1316e8d8bef9SDimitry Andric assert(NewDefTarget != MA && "Going into an infinite loop"); 13170b57cec5SDimitry Andric while (!MA->use_empty()) { 13180b57cec5SDimitry Andric Use &U = *MA->use_begin(); 13190b57cec5SDimitry Andric if (auto *MUD = dyn_cast<MemoryUseOrDef>(U.getUser())) 13200b57cec5SDimitry Andric MUD->resetOptimized(); 13210b57cec5SDimitry Andric if (OptimizePhis) 13220b57cec5SDimitry Andric if (MemoryPhi *MP = dyn_cast<MemoryPhi>(U.getUser())) 13230b57cec5SDimitry Andric PhisToCheck.insert(MP); 13240b57cec5SDimitry Andric U.set(NewDefTarget); 13250b57cec5SDimitry Andric } 13260b57cec5SDimitry Andric } 13270b57cec5SDimitry Andric 13280b57cec5SDimitry Andric // The call below to erase will destroy MA, so we can't change the order we 13290b57cec5SDimitry Andric // are doing things here 13300b57cec5SDimitry Andric MSSA->removeFromLookups(MA); 13310b57cec5SDimitry Andric MSSA->removeFromLists(MA); 13320b57cec5SDimitry Andric 13330b57cec5SDimitry Andric // Optionally optimize Phi uses. This will recursively remove trivial phis. 13340b57cec5SDimitry Andric if (!PhisToCheck.empty()) { 13350b57cec5SDimitry Andric SmallVector<WeakVH, 16> PhisToOptimize{PhisToCheck.begin(), 13360b57cec5SDimitry Andric PhisToCheck.end()}; 13370b57cec5SDimitry Andric PhisToCheck.clear(); 13380b57cec5SDimitry Andric 13390b57cec5SDimitry Andric unsigned PhisSize = PhisToOptimize.size(); 13400b57cec5SDimitry Andric while (PhisSize-- > 0) 13410b57cec5SDimitry Andric if (MemoryPhi *MP = 13428bcb0991SDimitry Andric cast_or_null<MemoryPhi>(PhisToOptimize.pop_back_val())) 13438bcb0991SDimitry Andric tryRemoveTrivialPhi(MP); 13440b57cec5SDimitry Andric } 13450b57cec5SDimitry Andric } 13460b57cec5SDimitry Andric 13470b57cec5SDimitry Andric void MemorySSAUpdater::removeBlocks( 13480b57cec5SDimitry Andric const SmallSetVector<BasicBlock *, 8> &DeadBlocks) { 13490b57cec5SDimitry Andric // First delete all uses of BB in MemoryPhis. 13500b57cec5SDimitry Andric for (BasicBlock *BB : DeadBlocks) { 13510b57cec5SDimitry Andric Instruction *TI = BB->getTerminator(); 13520b57cec5SDimitry Andric assert(TI && "Basic block expected to have a terminator instruction"); 13530b57cec5SDimitry Andric for (BasicBlock *Succ : successors(TI)) 13540b57cec5SDimitry Andric if (!DeadBlocks.count(Succ)) 13550b57cec5SDimitry Andric if (MemoryPhi *MP = MSSA->getMemoryAccess(Succ)) { 13560b57cec5SDimitry Andric MP->unorderedDeleteIncomingBlock(BB); 13578bcb0991SDimitry Andric tryRemoveTrivialPhi(MP); 13580b57cec5SDimitry Andric } 13590b57cec5SDimitry Andric // Drop all references of all accesses in BB 13600b57cec5SDimitry Andric if (MemorySSA::AccessList *Acc = MSSA->getWritableBlockAccesses(BB)) 13610b57cec5SDimitry Andric for (MemoryAccess &MA : *Acc) 13620b57cec5SDimitry Andric MA.dropAllReferences(); 13630b57cec5SDimitry Andric } 13640b57cec5SDimitry Andric 13650b57cec5SDimitry Andric // Next, delete all memory accesses in each block 13660b57cec5SDimitry Andric for (BasicBlock *BB : DeadBlocks) { 13670b57cec5SDimitry Andric MemorySSA::AccessList *Acc = MSSA->getWritableBlockAccesses(BB); 13680b57cec5SDimitry Andric if (!Acc) 13690b57cec5SDimitry Andric continue; 1370fe6060f1SDimitry Andric for (MemoryAccess &MA : llvm::make_early_inc_range(*Acc)) { 1371fe6060f1SDimitry Andric MSSA->removeFromLookups(&MA); 1372fe6060f1SDimitry Andric MSSA->removeFromLists(&MA); 13730b57cec5SDimitry Andric } 13740b57cec5SDimitry Andric } 13750b57cec5SDimitry Andric } 13760b57cec5SDimitry Andric 13770b57cec5SDimitry Andric void MemorySSAUpdater::tryRemoveTrivialPhis(ArrayRef<WeakVH> UpdatedPHIs) { 1378fcaf7f86SDimitry Andric for (const auto &VH : UpdatedPHIs) 13798bcb0991SDimitry Andric if (auto *MPhi = cast_or_null<MemoryPhi>(VH)) 13808bcb0991SDimitry Andric tryRemoveTrivialPhi(MPhi); 13810b57cec5SDimitry Andric } 13820b57cec5SDimitry Andric 13830b57cec5SDimitry Andric void MemorySSAUpdater::changeToUnreachable(const Instruction *I) { 13840b57cec5SDimitry Andric const BasicBlock *BB = I->getParent(); 13850b57cec5SDimitry Andric // Remove memory accesses in BB for I and all following instructions. 13860b57cec5SDimitry Andric auto BBI = I->getIterator(), BBE = BB->end(); 13870b57cec5SDimitry Andric // FIXME: If this becomes too expensive, iterate until the first instruction 13880b57cec5SDimitry Andric // with a memory access, then iterate over MemoryAccesses. 13890b57cec5SDimitry Andric while (BBI != BBE) 13900b57cec5SDimitry Andric removeMemoryAccess(&*(BBI++)); 13910b57cec5SDimitry Andric // Update phis in BB's successors to remove BB. 13920b57cec5SDimitry Andric SmallVector<WeakVH, 16> UpdatedPHIs; 13930b57cec5SDimitry Andric for (const BasicBlock *Successor : successors(BB)) { 13940b57cec5SDimitry Andric removeDuplicatePhiEdgesBetween(BB, Successor); 13950b57cec5SDimitry Andric if (MemoryPhi *MPhi = MSSA->getMemoryAccess(Successor)) { 13960b57cec5SDimitry Andric MPhi->unorderedDeleteIncomingBlock(BB); 13970b57cec5SDimitry Andric UpdatedPHIs.push_back(MPhi); 13980b57cec5SDimitry Andric } 13990b57cec5SDimitry Andric } 14000b57cec5SDimitry Andric // Optimize trivial phis. 14010b57cec5SDimitry Andric tryRemoveTrivialPhis(UpdatedPHIs); 14020b57cec5SDimitry Andric } 14030b57cec5SDimitry Andric 14040b57cec5SDimitry Andric MemoryAccess *MemorySSAUpdater::createMemoryAccessInBB( 14050b57cec5SDimitry Andric Instruction *I, MemoryAccess *Definition, const BasicBlock *BB, 14060b57cec5SDimitry Andric MemorySSA::InsertionPlace Point) { 14070b57cec5SDimitry Andric MemoryUseOrDef *NewAccess = MSSA->createDefinedAccess(I, Definition); 14080b57cec5SDimitry Andric MSSA->insertIntoListsForBlock(NewAccess, BB, Point); 14090b57cec5SDimitry Andric return NewAccess; 14100b57cec5SDimitry Andric } 14110b57cec5SDimitry Andric 14120b57cec5SDimitry Andric MemoryUseOrDef *MemorySSAUpdater::createMemoryAccessBefore( 14130b57cec5SDimitry Andric Instruction *I, MemoryAccess *Definition, MemoryUseOrDef *InsertPt) { 14140b57cec5SDimitry Andric assert(I->getParent() == InsertPt->getBlock() && 14150b57cec5SDimitry Andric "New and old access must be in the same block"); 14160b57cec5SDimitry Andric MemoryUseOrDef *NewAccess = MSSA->createDefinedAccess(I, Definition); 14170b57cec5SDimitry Andric MSSA->insertIntoListsBefore(NewAccess, InsertPt->getBlock(), 14180b57cec5SDimitry Andric InsertPt->getIterator()); 14190b57cec5SDimitry Andric return NewAccess; 14200b57cec5SDimitry Andric } 14210b57cec5SDimitry Andric 14220b57cec5SDimitry Andric MemoryUseOrDef *MemorySSAUpdater::createMemoryAccessAfter( 14230b57cec5SDimitry Andric Instruction *I, MemoryAccess *Definition, MemoryAccess *InsertPt) { 14240b57cec5SDimitry Andric assert(I->getParent() == InsertPt->getBlock() && 14250b57cec5SDimitry Andric "New and old access must be in the same block"); 14260b57cec5SDimitry Andric MemoryUseOrDef *NewAccess = MSSA->createDefinedAccess(I, Definition); 14270b57cec5SDimitry Andric MSSA->insertIntoListsBefore(NewAccess, InsertPt->getBlock(), 14280b57cec5SDimitry Andric ++InsertPt->getIterator()); 14290b57cec5SDimitry Andric return NewAccess; 14300b57cec5SDimitry Andric } 1431