10b57cec5SDimitry Andric //===- CGSCCPassManager.cpp - Managing & running CGSCC passes -------------===// 20b57cec5SDimitry Andric // 30b57cec5SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 40b57cec5SDimitry Andric // See https://llvm.org/LICENSE.txt for license information. 50b57cec5SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 60b57cec5SDimitry Andric // 70b57cec5SDimitry Andric //===----------------------------------------------------------------------===// 80b57cec5SDimitry Andric 90b57cec5SDimitry Andric #include "llvm/Analysis/CGSCCPassManager.h" 100b57cec5SDimitry Andric #include "llvm/ADT/ArrayRef.h" 110b57cec5SDimitry Andric #include "llvm/ADT/Optional.h" 120b57cec5SDimitry Andric #include "llvm/ADT/STLExtras.h" 130b57cec5SDimitry Andric #include "llvm/ADT/SetVector.h" 140b57cec5SDimitry Andric #include "llvm/ADT/SmallPtrSet.h" 150b57cec5SDimitry Andric #include "llvm/ADT/SmallVector.h" 160b57cec5SDimitry Andric #include "llvm/ADT/iterator_range.h" 170b57cec5SDimitry Andric #include "llvm/Analysis/LazyCallGraph.h" 180b57cec5SDimitry Andric #include "llvm/IR/Constant.h" 190b57cec5SDimitry Andric #include "llvm/IR/InstIterator.h" 200b57cec5SDimitry Andric #include "llvm/IR/Instruction.h" 210b57cec5SDimitry Andric #include "llvm/IR/PassManager.h" 225ffd83dbSDimitry Andric #include "llvm/IR/PassManagerImpl.h" 23*e8d8bef9SDimitry Andric #include "llvm/IR/ValueHandle.h" 240b57cec5SDimitry Andric #include "llvm/Support/Casting.h" 25*e8d8bef9SDimitry Andric #include "llvm/Support/CommandLine.h" 260b57cec5SDimitry Andric #include "llvm/Support/Debug.h" 27*e8d8bef9SDimitry Andric #include "llvm/Support/ErrorHandling.h" 285ffd83dbSDimitry Andric #include "llvm/Support/TimeProfiler.h" 29*e8d8bef9SDimitry Andric #include "llvm/Support/raw_ostream.h" 300b57cec5SDimitry Andric #include <algorithm> 310b57cec5SDimitry Andric #include <cassert> 320b57cec5SDimitry Andric #include <iterator> 330b57cec5SDimitry Andric 340b57cec5SDimitry Andric #define DEBUG_TYPE "cgscc" 350b57cec5SDimitry Andric 360b57cec5SDimitry Andric using namespace llvm; 370b57cec5SDimitry Andric 380b57cec5SDimitry Andric // Explicit template instantiations and specialization definitions for core 390b57cec5SDimitry Andric // template typedefs. 400b57cec5SDimitry Andric namespace llvm { 410b57cec5SDimitry Andric 42*e8d8bef9SDimitry Andric static cl::opt<bool> AbortOnMaxDevirtIterationsReached( 43*e8d8bef9SDimitry Andric "abort-on-max-devirt-iterations-reached", 44*e8d8bef9SDimitry Andric cl::desc("Abort when the max iterations for devirtualization CGSCC repeat " 45*e8d8bef9SDimitry Andric "pass is reached")); 46*e8d8bef9SDimitry Andric 470b57cec5SDimitry Andric // Explicit instantiations for the core proxy templates. 480b57cec5SDimitry Andric template class AllAnalysesOn<LazyCallGraph::SCC>; 490b57cec5SDimitry Andric template class AnalysisManager<LazyCallGraph::SCC, LazyCallGraph &>; 500b57cec5SDimitry Andric template class PassManager<LazyCallGraph::SCC, CGSCCAnalysisManager, 510b57cec5SDimitry Andric LazyCallGraph &, CGSCCUpdateResult &>; 520b57cec5SDimitry Andric template class InnerAnalysisManagerProxy<CGSCCAnalysisManager, Module>; 530b57cec5SDimitry Andric template class OuterAnalysisManagerProxy<ModuleAnalysisManager, 540b57cec5SDimitry Andric LazyCallGraph::SCC, LazyCallGraph &>; 550b57cec5SDimitry Andric template class OuterAnalysisManagerProxy<CGSCCAnalysisManager, Function>; 560b57cec5SDimitry Andric 570b57cec5SDimitry Andric /// Explicitly specialize the pass manager run method to handle call graph 580b57cec5SDimitry Andric /// updates. 590b57cec5SDimitry Andric template <> 600b57cec5SDimitry Andric PreservedAnalyses 610b57cec5SDimitry Andric PassManager<LazyCallGraph::SCC, CGSCCAnalysisManager, LazyCallGraph &, 620b57cec5SDimitry Andric CGSCCUpdateResult &>::run(LazyCallGraph::SCC &InitialC, 630b57cec5SDimitry Andric CGSCCAnalysisManager &AM, 640b57cec5SDimitry Andric LazyCallGraph &G, CGSCCUpdateResult &UR) { 650b57cec5SDimitry Andric // Request PassInstrumentation from analysis manager, will use it to run 660b57cec5SDimitry Andric // instrumenting callbacks for the passes later. 670b57cec5SDimitry Andric PassInstrumentation PI = 680b57cec5SDimitry Andric AM.getResult<PassInstrumentationAnalysis>(InitialC, G); 690b57cec5SDimitry Andric 700b57cec5SDimitry Andric PreservedAnalyses PA = PreservedAnalyses::all(); 710b57cec5SDimitry Andric 720b57cec5SDimitry Andric if (DebugLogging) 730b57cec5SDimitry Andric dbgs() << "Starting CGSCC pass manager run.\n"; 740b57cec5SDimitry Andric 750b57cec5SDimitry Andric // The SCC may be refined while we are running passes over it, so set up 760b57cec5SDimitry Andric // a pointer that we can update. 770b57cec5SDimitry Andric LazyCallGraph::SCC *C = &InitialC; 780b57cec5SDimitry Andric 795ffd83dbSDimitry Andric // Get Function analysis manager from its proxy. 805ffd83dbSDimitry Andric FunctionAnalysisManager &FAM = 815ffd83dbSDimitry Andric AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>(*C)->getManager(); 820b57cec5SDimitry Andric 835ffd83dbSDimitry Andric for (auto &Pass : Passes) { 840b57cec5SDimitry Andric // Check the PassInstrumentation's BeforePass callbacks before running the 850b57cec5SDimitry Andric // pass, skip its execution completely if asked to (callback returns false). 860b57cec5SDimitry Andric if (!PI.runBeforePass(*Pass, *C)) 870b57cec5SDimitry Andric continue; 880b57cec5SDimitry Andric 895ffd83dbSDimitry Andric PreservedAnalyses PassPA; 905ffd83dbSDimitry Andric { 915ffd83dbSDimitry Andric TimeTraceScope TimeScope(Pass->name()); 925ffd83dbSDimitry Andric PassPA = Pass->run(*C, AM, G, UR); 935ffd83dbSDimitry Andric } 940b57cec5SDimitry Andric 950b57cec5SDimitry Andric if (UR.InvalidatedSCCs.count(C)) 96*e8d8bef9SDimitry Andric PI.runAfterPassInvalidated<LazyCallGraph::SCC>(*Pass, PassPA); 970b57cec5SDimitry Andric else 98*e8d8bef9SDimitry Andric PI.runAfterPass<LazyCallGraph::SCC>(*Pass, *C, PassPA); 990b57cec5SDimitry Andric 1000b57cec5SDimitry Andric // Update the SCC if necessary. 1010b57cec5SDimitry Andric C = UR.UpdatedC ? UR.UpdatedC : C; 1025ffd83dbSDimitry Andric if (UR.UpdatedC) { 1035ffd83dbSDimitry Andric // If C is updated, also create a proxy and update FAM inside the result. 1045ffd83dbSDimitry Andric auto *ResultFAMCP = 1055ffd83dbSDimitry Andric &AM.getResult<FunctionAnalysisManagerCGSCCProxy>(*C, G); 1065ffd83dbSDimitry Andric ResultFAMCP->updateFAM(FAM); 1075ffd83dbSDimitry Andric } 1080b57cec5SDimitry Andric 1090b57cec5SDimitry Andric // If the CGSCC pass wasn't able to provide a valid updated SCC, the 1100b57cec5SDimitry Andric // current SCC may simply need to be skipped if invalid. 1110b57cec5SDimitry Andric if (UR.InvalidatedSCCs.count(C)) { 1120b57cec5SDimitry Andric LLVM_DEBUG(dbgs() << "Skipping invalidated root or island SCC!\n"); 1130b57cec5SDimitry Andric break; 1140b57cec5SDimitry Andric } 1150b57cec5SDimitry Andric // Check that we didn't miss any update scenario. 1160b57cec5SDimitry Andric assert(C->begin() != C->end() && "Cannot have an empty SCC!"); 1170b57cec5SDimitry Andric 1180b57cec5SDimitry Andric // Update the analysis manager as each pass runs and potentially 1190b57cec5SDimitry Andric // invalidates analyses. 1200b57cec5SDimitry Andric AM.invalidate(*C, PassPA); 1210b57cec5SDimitry Andric 1220b57cec5SDimitry Andric // Finally, we intersect the final preserved analyses to compute the 1230b57cec5SDimitry Andric // aggregate preserved set for this pass manager. 1240b57cec5SDimitry Andric PA.intersect(std::move(PassPA)); 1250b57cec5SDimitry Andric 1260b57cec5SDimitry Andric // FIXME: Historically, the pass managers all called the LLVM context's 1270b57cec5SDimitry Andric // yield function here. We don't have a generic way to acquire the 1280b57cec5SDimitry Andric // context and it isn't yet clear what the right pattern is for yielding 1290b57cec5SDimitry Andric // in the new pass manager so it is currently omitted. 1300b57cec5SDimitry Andric // ...getContext().yield(); 1310b57cec5SDimitry Andric } 1320b57cec5SDimitry Andric 1330b57cec5SDimitry Andric // Before we mark all of *this* SCC's analyses as preserved below, intersect 1340b57cec5SDimitry Andric // this with the cross-SCC preserved analysis set. This is used to allow 1350b57cec5SDimitry Andric // CGSCC passes to mutate ancestor SCCs and still trigger proper invalidation 1360b57cec5SDimitry Andric // for them. 1370b57cec5SDimitry Andric UR.CrossSCCPA.intersect(PA); 1380b57cec5SDimitry Andric 1390b57cec5SDimitry Andric // Invalidation was handled after each pass in the above loop for the current 1400b57cec5SDimitry Andric // SCC. Therefore, the remaining analysis results in the AnalysisManager are 1410b57cec5SDimitry Andric // preserved. We mark this with a set so that we don't need to inspect each 1420b57cec5SDimitry Andric // one individually. 1430b57cec5SDimitry Andric PA.preserveSet<AllAnalysesOn<LazyCallGraph::SCC>>(); 1440b57cec5SDimitry Andric 1450b57cec5SDimitry Andric if (DebugLogging) 1460b57cec5SDimitry Andric dbgs() << "Finished CGSCC pass manager run.\n"; 1470b57cec5SDimitry Andric 1480b57cec5SDimitry Andric return PA; 1490b57cec5SDimitry Andric } 1500b57cec5SDimitry Andric 151*e8d8bef9SDimitry Andric PreservedAnalyses 152*e8d8bef9SDimitry Andric ModuleToPostOrderCGSCCPassAdaptor::run(Module &M, ModuleAnalysisManager &AM) { 153*e8d8bef9SDimitry Andric // Setup the CGSCC analysis manager from its proxy. 154*e8d8bef9SDimitry Andric CGSCCAnalysisManager &CGAM = 155*e8d8bef9SDimitry Andric AM.getResult<CGSCCAnalysisManagerModuleProxy>(M).getManager(); 156*e8d8bef9SDimitry Andric 157*e8d8bef9SDimitry Andric // Get the call graph for this module. 158*e8d8bef9SDimitry Andric LazyCallGraph &CG = AM.getResult<LazyCallGraphAnalysis>(M); 159*e8d8bef9SDimitry Andric 160*e8d8bef9SDimitry Andric // Get Function analysis manager from its proxy. 161*e8d8bef9SDimitry Andric FunctionAnalysisManager &FAM = 162*e8d8bef9SDimitry Andric AM.getCachedResult<FunctionAnalysisManagerModuleProxy>(M)->getManager(); 163*e8d8bef9SDimitry Andric 164*e8d8bef9SDimitry Andric // We keep worklists to allow us to push more work onto the pass manager as 165*e8d8bef9SDimitry Andric // the passes are run. 166*e8d8bef9SDimitry Andric SmallPriorityWorklist<LazyCallGraph::RefSCC *, 1> RCWorklist; 167*e8d8bef9SDimitry Andric SmallPriorityWorklist<LazyCallGraph::SCC *, 1> CWorklist; 168*e8d8bef9SDimitry Andric 169*e8d8bef9SDimitry Andric // Keep sets for invalidated SCCs and RefSCCs that should be skipped when 170*e8d8bef9SDimitry Andric // iterating off the worklists. 171*e8d8bef9SDimitry Andric SmallPtrSet<LazyCallGraph::RefSCC *, 4> InvalidRefSCCSet; 172*e8d8bef9SDimitry Andric SmallPtrSet<LazyCallGraph::SCC *, 4> InvalidSCCSet; 173*e8d8bef9SDimitry Andric 174*e8d8bef9SDimitry Andric SmallDenseSet<std::pair<LazyCallGraph::Node *, LazyCallGraph::SCC *>, 4> 175*e8d8bef9SDimitry Andric InlinedInternalEdges; 176*e8d8bef9SDimitry Andric 177*e8d8bef9SDimitry Andric CGSCCUpdateResult UR = { 178*e8d8bef9SDimitry Andric RCWorklist, CWorklist, InvalidRefSCCSet, InvalidSCCSet, 179*e8d8bef9SDimitry Andric nullptr, nullptr, PreservedAnalyses::all(), InlinedInternalEdges, 180*e8d8bef9SDimitry Andric {}}; 181*e8d8bef9SDimitry Andric 182*e8d8bef9SDimitry Andric // Request PassInstrumentation from analysis manager, will use it to run 183*e8d8bef9SDimitry Andric // instrumenting callbacks for the passes later. 184*e8d8bef9SDimitry Andric PassInstrumentation PI = AM.getResult<PassInstrumentationAnalysis>(M); 185*e8d8bef9SDimitry Andric 186*e8d8bef9SDimitry Andric PreservedAnalyses PA = PreservedAnalyses::all(); 187*e8d8bef9SDimitry Andric CG.buildRefSCCs(); 188*e8d8bef9SDimitry Andric for (auto RCI = CG.postorder_ref_scc_begin(), 189*e8d8bef9SDimitry Andric RCE = CG.postorder_ref_scc_end(); 190*e8d8bef9SDimitry Andric RCI != RCE;) { 191*e8d8bef9SDimitry Andric assert(RCWorklist.empty() && 192*e8d8bef9SDimitry Andric "Should always start with an empty RefSCC worklist"); 193*e8d8bef9SDimitry Andric // The postorder_ref_sccs range we are walking is lazily constructed, so 194*e8d8bef9SDimitry Andric // we only push the first one onto the worklist. The worklist allows us 195*e8d8bef9SDimitry Andric // to capture *new* RefSCCs created during transformations. 196*e8d8bef9SDimitry Andric // 197*e8d8bef9SDimitry Andric // We really want to form RefSCCs lazily because that makes them cheaper 198*e8d8bef9SDimitry Andric // to update as the program is simplified and allows us to have greater 199*e8d8bef9SDimitry Andric // cache locality as forming a RefSCC touches all the parts of all the 200*e8d8bef9SDimitry Andric // functions within that RefSCC. 201*e8d8bef9SDimitry Andric // 202*e8d8bef9SDimitry Andric // We also eagerly increment the iterator to the next position because 203*e8d8bef9SDimitry Andric // the CGSCC passes below may delete the current RefSCC. 204*e8d8bef9SDimitry Andric RCWorklist.insert(&*RCI++); 205*e8d8bef9SDimitry Andric 206*e8d8bef9SDimitry Andric do { 207*e8d8bef9SDimitry Andric LazyCallGraph::RefSCC *RC = RCWorklist.pop_back_val(); 208*e8d8bef9SDimitry Andric if (InvalidRefSCCSet.count(RC)) { 209*e8d8bef9SDimitry Andric LLVM_DEBUG(dbgs() << "Skipping an invalid RefSCC...\n"); 210*e8d8bef9SDimitry Andric continue; 211*e8d8bef9SDimitry Andric } 212*e8d8bef9SDimitry Andric 213*e8d8bef9SDimitry Andric assert(CWorklist.empty() && 214*e8d8bef9SDimitry Andric "Should always start with an empty SCC worklist"); 215*e8d8bef9SDimitry Andric 216*e8d8bef9SDimitry Andric LLVM_DEBUG(dbgs() << "Running an SCC pass across the RefSCC: " << *RC 217*e8d8bef9SDimitry Andric << "\n"); 218*e8d8bef9SDimitry Andric 219*e8d8bef9SDimitry Andric // The top of the worklist may *also* be the same SCC we just ran over 220*e8d8bef9SDimitry Andric // (and invalidated for). Keep track of that last SCC we processed due 221*e8d8bef9SDimitry Andric // to SCC update to avoid redundant processing when an SCC is both just 222*e8d8bef9SDimitry Andric // updated itself and at the top of the worklist. 223*e8d8bef9SDimitry Andric LazyCallGraph::SCC *LastUpdatedC = nullptr; 224*e8d8bef9SDimitry Andric 225*e8d8bef9SDimitry Andric // Push the initial SCCs in reverse post-order as we'll pop off the 226*e8d8bef9SDimitry Andric // back and so see this in post-order. 227*e8d8bef9SDimitry Andric for (LazyCallGraph::SCC &C : llvm::reverse(*RC)) 228*e8d8bef9SDimitry Andric CWorklist.insert(&C); 229*e8d8bef9SDimitry Andric 230*e8d8bef9SDimitry Andric do { 231*e8d8bef9SDimitry Andric LazyCallGraph::SCC *C = CWorklist.pop_back_val(); 232*e8d8bef9SDimitry Andric // Due to call graph mutations, we may have invalid SCCs or SCCs from 233*e8d8bef9SDimitry Andric // other RefSCCs in the worklist. The invalid ones are dead and the 234*e8d8bef9SDimitry Andric // other RefSCCs should be queued above, so we just need to skip both 235*e8d8bef9SDimitry Andric // scenarios here. 236*e8d8bef9SDimitry Andric if (InvalidSCCSet.count(C)) { 237*e8d8bef9SDimitry Andric LLVM_DEBUG(dbgs() << "Skipping an invalid SCC...\n"); 238*e8d8bef9SDimitry Andric continue; 239*e8d8bef9SDimitry Andric } 240*e8d8bef9SDimitry Andric if (LastUpdatedC == C) { 241*e8d8bef9SDimitry Andric LLVM_DEBUG(dbgs() << "Skipping redundant run on SCC: " << *C << "\n"); 242*e8d8bef9SDimitry Andric continue; 243*e8d8bef9SDimitry Andric } 244*e8d8bef9SDimitry Andric if (&C->getOuterRefSCC() != RC) { 245*e8d8bef9SDimitry Andric LLVM_DEBUG(dbgs() << "Skipping an SCC that is now part of some other " 246*e8d8bef9SDimitry Andric "RefSCC...\n"); 247*e8d8bef9SDimitry Andric continue; 248*e8d8bef9SDimitry Andric } 249*e8d8bef9SDimitry Andric 250*e8d8bef9SDimitry Andric // Ensure we can proxy analysis updates from the CGSCC analysis manager 251*e8d8bef9SDimitry Andric // into the the Function analysis manager by getting a proxy here. 252*e8d8bef9SDimitry Andric // This also needs to update the FunctionAnalysisManager, as this may be 253*e8d8bef9SDimitry Andric // the first time we see this SCC. 254*e8d8bef9SDimitry Andric CGAM.getResult<FunctionAnalysisManagerCGSCCProxy>(*C, CG).updateFAM( 255*e8d8bef9SDimitry Andric FAM); 256*e8d8bef9SDimitry Andric 257*e8d8bef9SDimitry Andric // Each time we visit a new SCC pulled off the worklist, 258*e8d8bef9SDimitry Andric // a transformation of a child SCC may have also modified this parent 259*e8d8bef9SDimitry Andric // and invalidated analyses. So we invalidate using the update record's 260*e8d8bef9SDimitry Andric // cross-SCC preserved set. This preserved set is intersected by any 261*e8d8bef9SDimitry Andric // CGSCC pass that handles invalidation (primarily pass managers) prior 262*e8d8bef9SDimitry Andric // to marking its SCC as preserved. That lets us track everything that 263*e8d8bef9SDimitry Andric // might need invalidation across SCCs without excessive invalidations 264*e8d8bef9SDimitry Andric // on a single SCC. 265*e8d8bef9SDimitry Andric // 266*e8d8bef9SDimitry Andric // This essentially allows SCC passes to freely invalidate analyses 267*e8d8bef9SDimitry Andric // of any ancestor SCC. If this becomes detrimental to successfully 268*e8d8bef9SDimitry Andric // caching analyses, we could force each SCC pass to manually 269*e8d8bef9SDimitry Andric // invalidate the analyses for any SCCs other than themselves which 270*e8d8bef9SDimitry Andric // are mutated. However, that seems to lose the robustness of the 271*e8d8bef9SDimitry Andric // pass-manager driven invalidation scheme. 272*e8d8bef9SDimitry Andric CGAM.invalidate(*C, UR.CrossSCCPA); 273*e8d8bef9SDimitry Andric 274*e8d8bef9SDimitry Andric do { 275*e8d8bef9SDimitry Andric // Check that we didn't miss any update scenario. 276*e8d8bef9SDimitry Andric assert(!InvalidSCCSet.count(C) && "Processing an invalid SCC!"); 277*e8d8bef9SDimitry Andric assert(C->begin() != C->end() && "Cannot have an empty SCC!"); 278*e8d8bef9SDimitry Andric assert(&C->getOuterRefSCC() == RC && 279*e8d8bef9SDimitry Andric "Processing an SCC in a different RefSCC!"); 280*e8d8bef9SDimitry Andric 281*e8d8bef9SDimitry Andric LastUpdatedC = UR.UpdatedC; 282*e8d8bef9SDimitry Andric UR.UpdatedRC = nullptr; 283*e8d8bef9SDimitry Andric UR.UpdatedC = nullptr; 284*e8d8bef9SDimitry Andric 285*e8d8bef9SDimitry Andric // Check the PassInstrumentation's BeforePass callbacks before 286*e8d8bef9SDimitry Andric // running the pass, skip its execution completely if asked to 287*e8d8bef9SDimitry Andric // (callback returns false). 288*e8d8bef9SDimitry Andric if (!PI.runBeforePass<LazyCallGraph::SCC>(*Pass, *C)) 289*e8d8bef9SDimitry Andric continue; 290*e8d8bef9SDimitry Andric 291*e8d8bef9SDimitry Andric PreservedAnalyses PassPA; 292*e8d8bef9SDimitry Andric { 293*e8d8bef9SDimitry Andric TimeTraceScope TimeScope(Pass->name()); 294*e8d8bef9SDimitry Andric PassPA = Pass->run(*C, CGAM, CG, UR); 295*e8d8bef9SDimitry Andric } 296*e8d8bef9SDimitry Andric 297*e8d8bef9SDimitry Andric if (UR.InvalidatedSCCs.count(C)) 298*e8d8bef9SDimitry Andric PI.runAfterPassInvalidated<LazyCallGraph::SCC>(*Pass, PassPA); 299*e8d8bef9SDimitry Andric else 300*e8d8bef9SDimitry Andric PI.runAfterPass<LazyCallGraph::SCC>(*Pass, *C, PassPA); 301*e8d8bef9SDimitry Andric 302*e8d8bef9SDimitry Andric // Update the SCC and RefSCC if necessary. 303*e8d8bef9SDimitry Andric C = UR.UpdatedC ? UR.UpdatedC : C; 304*e8d8bef9SDimitry Andric RC = UR.UpdatedRC ? UR.UpdatedRC : RC; 305*e8d8bef9SDimitry Andric 306*e8d8bef9SDimitry Andric if (UR.UpdatedC) { 307*e8d8bef9SDimitry Andric // If we're updating the SCC, also update the FAM inside the proxy's 308*e8d8bef9SDimitry Andric // result. 309*e8d8bef9SDimitry Andric CGAM.getResult<FunctionAnalysisManagerCGSCCProxy>(*C, CG).updateFAM( 310*e8d8bef9SDimitry Andric FAM); 311*e8d8bef9SDimitry Andric } 312*e8d8bef9SDimitry Andric 313*e8d8bef9SDimitry Andric // If the CGSCC pass wasn't able to provide a valid updated SCC, 314*e8d8bef9SDimitry Andric // the current SCC may simply need to be skipped if invalid. 315*e8d8bef9SDimitry Andric if (UR.InvalidatedSCCs.count(C)) { 316*e8d8bef9SDimitry Andric LLVM_DEBUG(dbgs() << "Skipping invalidated root or island SCC!\n"); 317*e8d8bef9SDimitry Andric break; 318*e8d8bef9SDimitry Andric } 319*e8d8bef9SDimitry Andric // Check that we didn't miss any update scenario. 320*e8d8bef9SDimitry Andric assert(C->begin() != C->end() && "Cannot have an empty SCC!"); 321*e8d8bef9SDimitry Andric 322*e8d8bef9SDimitry Andric // We handle invalidating the CGSCC analysis manager's information 323*e8d8bef9SDimitry Andric // for the (potentially updated) SCC here. Note that any other SCCs 324*e8d8bef9SDimitry Andric // whose structure has changed should have been invalidated by 325*e8d8bef9SDimitry Andric // whatever was updating the call graph. This SCC gets invalidated 326*e8d8bef9SDimitry Andric // late as it contains the nodes that were actively being 327*e8d8bef9SDimitry Andric // processed. 328*e8d8bef9SDimitry Andric CGAM.invalidate(*C, PassPA); 329*e8d8bef9SDimitry Andric 330*e8d8bef9SDimitry Andric // Then intersect the preserved set so that invalidation of module 331*e8d8bef9SDimitry Andric // analyses will eventually occur when the module pass completes. 332*e8d8bef9SDimitry Andric // Also intersect with the cross-SCC preserved set to capture any 333*e8d8bef9SDimitry Andric // cross-SCC invalidation. 334*e8d8bef9SDimitry Andric UR.CrossSCCPA.intersect(PassPA); 335*e8d8bef9SDimitry Andric PA.intersect(std::move(PassPA)); 336*e8d8bef9SDimitry Andric 337*e8d8bef9SDimitry Andric // The pass may have restructured the call graph and refined the 338*e8d8bef9SDimitry Andric // current SCC and/or RefSCC. We need to update our current SCC and 339*e8d8bef9SDimitry Andric // RefSCC pointers to follow these. Also, when the current SCC is 340*e8d8bef9SDimitry Andric // refined, re-run the SCC pass over the newly refined SCC in order 341*e8d8bef9SDimitry Andric // to observe the most precise SCC model available. This inherently 342*e8d8bef9SDimitry Andric // cannot cycle excessively as it only happens when we split SCCs 343*e8d8bef9SDimitry Andric // apart, at most converging on a DAG of single nodes. 344*e8d8bef9SDimitry Andric // FIXME: If we ever start having RefSCC passes, we'll want to 345*e8d8bef9SDimitry Andric // iterate there too. 346*e8d8bef9SDimitry Andric if (UR.UpdatedC) 347*e8d8bef9SDimitry Andric LLVM_DEBUG(dbgs() 348*e8d8bef9SDimitry Andric << "Re-running SCC passes after a refinement of the " 349*e8d8bef9SDimitry Andric "current SCC: " 350*e8d8bef9SDimitry Andric << *UR.UpdatedC << "\n"); 351*e8d8bef9SDimitry Andric 352*e8d8bef9SDimitry Andric // Note that both `C` and `RC` may at this point refer to deleted, 353*e8d8bef9SDimitry Andric // invalid SCC and RefSCCs respectively. But we will short circuit 354*e8d8bef9SDimitry Andric // the processing when we check them in the loop above. 355*e8d8bef9SDimitry Andric } while (UR.UpdatedC); 356*e8d8bef9SDimitry Andric } while (!CWorklist.empty()); 357*e8d8bef9SDimitry Andric 358*e8d8bef9SDimitry Andric // We only need to keep internal inlined edge information within 359*e8d8bef9SDimitry Andric // a RefSCC, clear it to save on space and let the next time we visit 360*e8d8bef9SDimitry Andric // any of these functions have a fresh start. 361*e8d8bef9SDimitry Andric InlinedInternalEdges.clear(); 362*e8d8bef9SDimitry Andric } while (!RCWorklist.empty()); 363*e8d8bef9SDimitry Andric } 364*e8d8bef9SDimitry Andric 365*e8d8bef9SDimitry Andric // By definition we preserve the call garph, all SCC analyses, and the 366*e8d8bef9SDimitry Andric // analysis proxies by handling them above and in any nested pass managers. 367*e8d8bef9SDimitry Andric PA.preserveSet<AllAnalysesOn<LazyCallGraph::SCC>>(); 368*e8d8bef9SDimitry Andric PA.preserve<LazyCallGraphAnalysis>(); 369*e8d8bef9SDimitry Andric PA.preserve<CGSCCAnalysisManagerModuleProxy>(); 370*e8d8bef9SDimitry Andric PA.preserve<FunctionAnalysisManagerModuleProxy>(); 371*e8d8bef9SDimitry Andric return PA; 372*e8d8bef9SDimitry Andric } 373*e8d8bef9SDimitry Andric 374*e8d8bef9SDimitry Andric PreservedAnalyses DevirtSCCRepeatedPass::run(LazyCallGraph::SCC &InitialC, 375*e8d8bef9SDimitry Andric CGSCCAnalysisManager &AM, 376*e8d8bef9SDimitry Andric LazyCallGraph &CG, 377*e8d8bef9SDimitry Andric CGSCCUpdateResult &UR) { 378*e8d8bef9SDimitry Andric PreservedAnalyses PA = PreservedAnalyses::all(); 379*e8d8bef9SDimitry Andric PassInstrumentation PI = 380*e8d8bef9SDimitry Andric AM.getResult<PassInstrumentationAnalysis>(InitialC, CG); 381*e8d8bef9SDimitry Andric 382*e8d8bef9SDimitry Andric // The SCC may be refined while we are running passes over it, so set up 383*e8d8bef9SDimitry Andric // a pointer that we can update. 384*e8d8bef9SDimitry Andric LazyCallGraph::SCC *C = &InitialC; 385*e8d8bef9SDimitry Andric 386*e8d8bef9SDimitry Andric // Struct to track the counts of direct and indirect calls in each function 387*e8d8bef9SDimitry Andric // of the SCC. 388*e8d8bef9SDimitry Andric struct CallCount { 389*e8d8bef9SDimitry Andric int Direct; 390*e8d8bef9SDimitry Andric int Indirect; 391*e8d8bef9SDimitry Andric }; 392*e8d8bef9SDimitry Andric 393*e8d8bef9SDimitry Andric // Put value handles on all of the indirect calls and return the number of 394*e8d8bef9SDimitry Andric // direct calls for each function in the SCC. 395*e8d8bef9SDimitry Andric auto ScanSCC = [](LazyCallGraph::SCC &C, 396*e8d8bef9SDimitry Andric SmallMapVector<Value *, WeakTrackingVH, 16> &CallHandles) { 397*e8d8bef9SDimitry Andric assert(CallHandles.empty() && "Must start with a clear set of handles."); 398*e8d8bef9SDimitry Andric 399*e8d8bef9SDimitry Andric SmallDenseMap<Function *, CallCount> CallCounts; 400*e8d8bef9SDimitry Andric CallCount CountLocal = {0, 0}; 401*e8d8bef9SDimitry Andric for (LazyCallGraph::Node &N : C) { 402*e8d8bef9SDimitry Andric CallCount &Count = 403*e8d8bef9SDimitry Andric CallCounts.insert(std::make_pair(&N.getFunction(), CountLocal)) 404*e8d8bef9SDimitry Andric .first->second; 405*e8d8bef9SDimitry Andric for (Instruction &I : instructions(N.getFunction())) 406*e8d8bef9SDimitry Andric if (auto *CB = dyn_cast<CallBase>(&I)) { 407*e8d8bef9SDimitry Andric if (CB->getCalledFunction()) { 408*e8d8bef9SDimitry Andric ++Count.Direct; 409*e8d8bef9SDimitry Andric } else { 410*e8d8bef9SDimitry Andric ++Count.Indirect; 411*e8d8bef9SDimitry Andric CallHandles.insert({CB, WeakTrackingVH(CB)}); 412*e8d8bef9SDimitry Andric } 413*e8d8bef9SDimitry Andric } 414*e8d8bef9SDimitry Andric } 415*e8d8bef9SDimitry Andric 416*e8d8bef9SDimitry Andric return CallCounts; 417*e8d8bef9SDimitry Andric }; 418*e8d8bef9SDimitry Andric 419*e8d8bef9SDimitry Andric UR.IndirectVHs.clear(); 420*e8d8bef9SDimitry Andric // Populate the initial call handles and get the initial call counts. 421*e8d8bef9SDimitry Andric auto CallCounts = ScanSCC(*C, UR.IndirectVHs); 422*e8d8bef9SDimitry Andric 423*e8d8bef9SDimitry Andric for (int Iteration = 0;; ++Iteration) { 424*e8d8bef9SDimitry Andric if (!PI.runBeforePass<LazyCallGraph::SCC>(*Pass, *C)) 425*e8d8bef9SDimitry Andric continue; 426*e8d8bef9SDimitry Andric 427*e8d8bef9SDimitry Andric PreservedAnalyses PassPA = Pass->run(*C, AM, CG, UR); 428*e8d8bef9SDimitry Andric 429*e8d8bef9SDimitry Andric if (UR.InvalidatedSCCs.count(C)) 430*e8d8bef9SDimitry Andric PI.runAfterPassInvalidated<LazyCallGraph::SCC>(*Pass, PassPA); 431*e8d8bef9SDimitry Andric else 432*e8d8bef9SDimitry Andric PI.runAfterPass<LazyCallGraph::SCC>(*Pass, *C, PassPA); 433*e8d8bef9SDimitry Andric 434*e8d8bef9SDimitry Andric // If the SCC structure has changed, bail immediately and let the outer 435*e8d8bef9SDimitry Andric // CGSCC layer handle any iteration to reflect the refined structure. 436*e8d8bef9SDimitry Andric if (UR.UpdatedC && UR.UpdatedC != C) { 437*e8d8bef9SDimitry Andric PA.intersect(std::move(PassPA)); 438*e8d8bef9SDimitry Andric break; 439*e8d8bef9SDimitry Andric } 440*e8d8bef9SDimitry Andric 441*e8d8bef9SDimitry Andric // Check that we didn't miss any update scenario. 442*e8d8bef9SDimitry Andric assert(!UR.InvalidatedSCCs.count(C) && "Processing an invalid SCC!"); 443*e8d8bef9SDimitry Andric assert(C->begin() != C->end() && "Cannot have an empty SCC!"); 444*e8d8bef9SDimitry Andric 445*e8d8bef9SDimitry Andric // Check whether any of the handles were devirtualized. 446*e8d8bef9SDimitry Andric bool Devirt = llvm::any_of(UR.IndirectVHs, [](auto &P) -> bool { 447*e8d8bef9SDimitry Andric if (P.second) { 448*e8d8bef9SDimitry Andric if (CallBase *CB = dyn_cast<CallBase>(P.second)) { 449*e8d8bef9SDimitry Andric if (CB->getCalledFunction()) { 450*e8d8bef9SDimitry Andric LLVM_DEBUG(dbgs() << "Found devirtualized call: " << *CB << "\n"); 451*e8d8bef9SDimitry Andric return true; 452*e8d8bef9SDimitry Andric } 453*e8d8bef9SDimitry Andric } 454*e8d8bef9SDimitry Andric } 455*e8d8bef9SDimitry Andric return false; 456*e8d8bef9SDimitry Andric }); 457*e8d8bef9SDimitry Andric 458*e8d8bef9SDimitry Andric // Rescan to build up a new set of handles and count how many direct 459*e8d8bef9SDimitry Andric // calls remain. If we decide to iterate, this also sets up the input to 460*e8d8bef9SDimitry Andric // the next iteration. 461*e8d8bef9SDimitry Andric UR.IndirectVHs.clear(); 462*e8d8bef9SDimitry Andric auto NewCallCounts = ScanSCC(*C, UR.IndirectVHs); 463*e8d8bef9SDimitry Andric 464*e8d8bef9SDimitry Andric // If we haven't found an explicit devirtualization already see if we 465*e8d8bef9SDimitry Andric // have decreased the number of indirect calls and increased the number 466*e8d8bef9SDimitry Andric // of direct calls for any function in the SCC. This can be fooled by all 467*e8d8bef9SDimitry Andric // manner of transformations such as DCE and other things, but seems to 468*e8d8bef9SDimitry Andric // work well in practice. 469*e8d8bef9SDimitry Andric if (!Devirt) 470*e8d8bef9SDimitry Andric // Iterate over the keys in NewCallCounts, if Function also exists in 471*e8d8bef9SDimitry Andric // CallCounts, make the check below. 472*e8d8bef9SDimitry Andric for (auto &Pair : NewCallCounts) { 473*e8d8bef9SDimitry Andric auto &CallCountNew = Pair.second; 474*e8d8bef9SDimitry Andric auto CountIt = CallCounts.find(Pair.first); 475*e8d8bef9SDimitry Andric if (CountIt != CallCounts.end()) { 476*e8d8bef9SDimitry Andric const auto &CallCountOld = CountIt->second; 477*e8d8bef9SDimitry Andric if (CallCountOld.Indirect > CallCountNew.Indirect && 478*e8d8bef9SDimitry Andric CallCountOld.Direct < CallCountNew.Direct) { 479*e8d8bef9SDimitry Andric Devirt = true; 480*e8d8bef9SDimitry Andric break; 481*e8d8bef9SDimitry Andric } 482*e8d8bef9SDimitry Andric } 483*e8d8bef9SDimitry Andric } 484*e8d8bef9SDimitry Andric 485*e8d8bef9SDimitry Andric if (!Devirt) { 486*e8d8bef9SDimitry Andric PA.intersect(std::move(PassPA)); 487*e8d8bef9SDimitry Andric break; 488*e8d8bef9SDimitry Andric } 489*e8d8bef9SDimitry Andric 490*e8d8bef9SDimitry Andric // Otherwise, if we've already hit our max, we're done. 491*e8d8bef9SDimitry Andric if (Iteration >= MaxIterations) { 492*e8d8bef9SDimitry Andric if (AbortOnMaxDevirtIterationsReached) 493*e8d8bef9SDimitry Andric report_fatal_error("Max devirtualization iterations reached"); 494*e8d8bef9SDimitry Andric LLVM_DEBUG( 495*e8d8bef9SDimitry Andric dbgs() << "Found another devirtualization after hitting the max " 496*e8d8bef9SDimitry Andric "number of repetitions (" 497*e8d8bef9SDimitry Andric << MaxIterations << ") on SCC: " << *C << "\n"); 498*e8d8bef9SDimitry Andric PA.intersect(std::move(PassPA)); 499*e8d8bef9SDimitry Andric break; 500*e8d8bef9SDimitry Andric } 501*e8d8bef9SDimitry Andric 502*e8d8bef9SDimitry Andric LLVM_DEBUG( 503*e8d8bef9SDimitry Andric dbgs() << "Repeating an SCC pass after finding a devirtualization in: " 504*e8d8bef9SDimitry Andric << *C << "\n"); 505*e8d8bef9SDimitry Andric 506*e8d8bef9SDimitry Andric // Move over the new call counts in preparation for iterating. 507*e8d8bef9SDimitry Andric CallCounts = std::move(NewCallCounts); 508*e8d8bef9SDimitry Andric 509*e8d8bef9SDimitry Andric // Update the analysis manager with each run and intersect the total set 510*e8d8bef9SDimitry Andric // of preserved analyses so we're ready to iterate. 511*e8d8bef9SDimitry Andric AM.invalidate(*C, PassPA); 512*e8d8bef9SDimitry Andric 513*e8d8bef9SDimitry Andric PA.intersect(std::move(PassPA)); 514*e8d8bef9SDimitry Andric } 515*e8d8bef9SDimitry Andric 516*e8d8bef9SDimitry Andric // Note that we don't add any preserved entries here unlike a more normal 517*e8d8bef9SDimitry Andric // "pass manager" because we only handle invalidation *between* iterations, 518*e8d8bef9SDimitry Andric // not after the last iteration. 519*e8d8bef9SDimitry Andric return PA; 520*e8d8bef9SDimitry Andric } 521*e8d8bef9SDimitry Andric 522*e8d8bef9SDimitry Andric PreservedAnalyses CGSCCToFunctionPassAdaptor::run(LazyCallGraph::SCC &C, 523*e8d8bef9SDimitry Andric CGSCCAnalysisManager &AM, 524*e8d8bef9SDimitry Andric LazyCallGraph &CG, 525*e8d8bef9SDimitry Andric CGSCCUpdateResult &UR) { 526*e8d8bef9SDimitry Andric // Setup the function analysis manager from its proxy. 527*e8d8bef9SDimitry Andric FunctionAnalysisManager &FAM = 528*e8d8bef9SDimitry Andric AM.getResult<FunctionAnalysisManagerCGSCCProxy>(C, CG).getManager(); 529*e8d8bef9SDimitry Andric 530*e8d8bef9SDimitry Andric SmallVector<LazyCallGraph::Node *, 4> Nodes; 531*e8d8bef9SDimitry Andric for (LazyCallGraph::Node &N : C) 532*e8d8bef9SDimitry Andric Nodes.push_back(&N); 533*e8d8bef9SDimitry Andric 534*e8d8bef9SDimitry Andric // The SCC may get split while we are optimizing functions due to deleting 535*e8d8bef9SDimitry Andric // edges. If this happens, the current SCC can shift, so keep track of 536*e8d8bef9SDimitry Andric // a pointer we can overwrite. 537*e8d8bef9SDimitry Andric LazyCallGraph::SCC *CurrentC = &C; 538*e8d8bef9SDimitry Andric 539*e8d8bef9SDimitry Andric LLVM_DEBUG(dbgs() << "Running function passes across an SCC: " << C << "\n"); 540*e8d8bef9SDimitry Andric 541*e8d8bef9SDimitry Andric PreservedAnalyses PA = PreservedAnalyses::all(); 542*e8d8bef9SDimitry Andric for (LazyCallGraph::Node *N : Nodes) { 543*e8d8bef9SDimitry Andric // Skip nodes from other SCCs. These may have been split out during 544*e8d8bef9SDimitry Andric // processing. We'll eventually visit those SCCs and pick up the nodes 545*e8d8bef9SDimitry Andric // there. 546*e8d8bef9SDimitry Andric if (CG.lookupSCC(*N) != CurrentC) 547*e8d8bef9SDimitry Andric continue; 548*e8d8bef9SDimitry Andric 549*e8d8bef9SDimitry Andric Function &F = N->getFunction(); 550*e8d8bef9SDimitry Andric 551*e8d8bef9SDimitry Andric PassInstrumentation PI = FAM.getResult<PassInstrumentationAnalysis>(F); 552*e8d8bef9SDimitry Andric if (!PI.runBeforePass<Function>(*Pass, F)) 553*e8d8bef9SDimitry Andric continue; 554*e8d8bef9SDimitry Andric 555*e8d8bef9SDimitry Andric PreservedAnalyses PassPA; 556*e8d8bef9SDimitry Andric { 557*e8d8bef9SDimitry Andric TimeTraceScope TimeScope(Pass->name()); 558*e8d8bef9SDimitry Andric PassPA = Pass->run(F, FAM); 559*e8d8bef9SDimitry Andric } 560*e8d8bef9SDimitry Andric 561*e8d8bef9SDimitry Andric PI.runAfterPass<Function>(*Pass, F, PassPA); 562*e8d8bef9SDimitry Andric 563*e8d8bef9SDimitry Andric // We know that the function pass couldn't have invalidated any other 564*e8d8bef9SDimitry Andric // function's analyses (that's the contract of a function pass), so 565*e8d8bef9SDimitry Andric // directly handle the function analysis manager's invalidation here. 566*e8d8bef9SDimitry Andric FAM.invalidate(F, PassPA); 567*e8d8bef9SDimitry Andric 568*e8d8bef9SDimitry Andric // Then intersect the preserved set so that invalidation of module 569*e8d8bef9SDimitry Andric // analyses will eventually occur when the module pass completes. 570*e8d8bef9SDimitry Andric PA.intersect(std::move(PassPA)); 571*e8d8bef9SDimitry Andric 572*e8d8bef9SDimitry Andric // If the call graph hasn't been preserved, update it based on this 573*e8d8bef9SDimitry Andric // function pass. This may also update the current SCC to point to 574*e8d8bef9SDimitry Andric // a smaller, more refined SCC. 575*e8d8bef9SDimitry Andric auto PAC = PA.getChecker<LazyCallGraphAnalysis>(); 576*e8d8bef9SDimitry Andric if (!PAC.preserved() && !PAC.preservedSet<AllAnalysesOn<Module>>()) { 577*e8d8bef9SDimitry Andric CurrentC = &updateCGAndAnalysisManagerForFunctionPass(CG, *CurrentC, *N, 578*e8d8bef9SDimitry Andric AM, UR, FAM); 579*e8d8bef9SDimitry Andric assert(CG.lookupSCC(*N) == CurrentC && 580*e8d8bef9SDimitry Andric "Current SCC not updated to the SCC containing the current node!"); 581*e8d8bef9SDimitry Andric } 582*e8d8bef9SDimitry Andric } 583*e8d8bef9SDimitry Andric 584*e8d8bef9SDimitry Andric // By definition we preserve the proxy. And we preserve all analyses on 585*e8d8bef9SDimitry Andric // Functions. This precludes *any* invalidation of function analyses by the 586*e8d8bef9SDimitry Andric // proxy, but that's OK because we've taken care to invalidate analyses in 587*e8d8bef9SDimitry Andric // the function analysis manager incrementally above. 588*e8d8bef9SDimitry Andric PA.preserveSet<AllAnalysesOn<Function>>(); 589*e8d8bef9SDimitry Andric PA.preserve<FunctionAnalysisManagerCGSCCProxy>(); 590*e8d8bef9SDimitry Andric 591*e8d8bef9SDimitry Andric // We've also ensured that we updated the call graph along the way. 592*e8d8bef9SDimitry Andric PA.preserve<LazyCallGraphAnalysis>(); 593*e8d8bef9SDimitry Andric 594*e8d8bef9SDimitry Andric return PA; 595*e8d8bef9SDimitry Andric } 596*e8d8bef9SDimitry Andric 5970b57cec5SDimitry Andric bool CGSCCAnalysisManagerModuleProxy::Result::invalidate( 5980b57cec5SDimitry Andric Module &M, const PreservedAnalyses &PA, 5990b57cec5SDimitry Andric ModuleAnalysisManager::Invalidator &Inv) { 6000b57cec5SDimitry Andric // If literally everything is preserved, we're done. 6010b57cec5SDimitry Andric if (PA.areAllPreserved()) 6020b57cec5SDimitry Andric return false; // This is still a valid proxy. 6030b57cec5SDimitry Andric 6040b57cec5SDimitry Andric // If this proxy or the call graph is going to be invalidated, we also need 6050b57cec5SDimitry Andric // to clear all the keys coming from that analysis. 6060b57cec5SDimitry Andric // 6070b57cec5SDimitry Andric // We also directly invalidate the FAM's module proxy if necessary, and if 6080b57cec5SDimitry Andric // that proxy isn't preserved we can't preserve this proxy either. We rely on 6090b57cec5SDimitry Andric // it to handle module -> function analysis invalidation in the face of 6100b57cec5SDimitry Andric // structural changes and so if it's unavailable we conservatively clear the 6110b57cec5SDimitry Andric // entire SCC layer as well rather than trying to do invalidation ourselves. 6120b57cec5SDimitry Andric auto PAC = PA.getChecker<CGSCCAnalysisManagerModuleProxy>(); 6130b57cec5SDimitry Andric if (!(PAC.preserved() || PAC.preservedSet<AllAnalysesOn<Module>>()) || 6140b57cec5SDimitry Andric Inv.invalidate<LazyCallGraphAnalysis>(M, PA) || 6150b57cec5SDimitry Andric Inv.invalidate<FunctionAnalysisManagerModuleProxy>(M, PA)) { 6160b57cec5SDimitry Andric InnerAM->clear(); 6170b57cec5SDimitry Andric 6180b57cec5SDimitry Andric // And the proxy itself should be marked as invalid so that we can observe 6190b57cec5SDimitry Andric // the new call graph. This isn't strictly necessary because we cheat 6200b57cec5SDimitry Andric // above, but is still useful. 6210b57cec5SDimitry Andric return true; 6220b57cec5SDimitry Andric } 6230b57cec5SDimitry Andric 6240b57cec5SDimitry Andric // Directly check if the relevant set is preserved so we can short circuit 6250b57cec5SDimitry Andric // invalidating SCCs below. 6260b57cec5SDimitry Andric bool AreSCCAnalysesPreserved = 6270b57cec5SDimitry Andric PA.allAnalysesInSetPreserved<AllAnalysesOn<LazyCallGraph::SCC>>(); 6280b57cec5SDimitry Andric 6290b57cec5SDimitry Andric // Ok, we have a graph, so we can propagate the invalidation down into it. 6300b57cec5SDimitry Andric G->buildRefSCCs(); 6310b57cec5SDimitry Andric for (auto &RC : G->postorder_ref_sccs()) 6320b57cec5SDimitry Andric for (auto &C : RC) { 6330b57cec5SDimitry Andric Optional<PreservedAnalyses> InnerPA; 6340b57cec5SDimitry Andric 6350b57cec5SDimitry Andric // Check to see whether the preserved set needs to be adjusted based on 6360b57cec5SDimitry Andric // module-level analysis invalidation triggering deferred invalidation 6370b57cec5SDimitry Andric // for this SCC. 6380b57cec5SDimitry Andric if (auto *OuterProxy = 6390b57cec5SDimitry Andric InnerAM->getCachedResult<ModuleAnalysisManagerCGSCCProxy>(C)) 6400b57cec5SDimitry Andric for (const auto &OuterInvalidationPair : 6410b57cec5SDimitry Andric OuterProxy->getOuterInvalidations()) { 6420b57cec5SDimitry Andric AnalysisKey *OuterAnalysisID = OuterInvalidationPair.first; 6430b57cec5SDimitry Andric const auto &InnerAnalysisIDs = OuterInvalidationPair.second; 6440b57cec5SDimitry Andric if (Inv.invalidate(OuterAnalysisID, M, PA)) { 6450b57cec5SDimitry Andric if (!InnerPA) 6460b57cec5SDimitry Andric InnerPA = PA; 6470b57cec5SDimitry Andric for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs) 6480b57cec5SDimitry Andric InnerPA->abandon(InnerAnalysisID); 6490b57cec5SDimitry Andric } 6500b57cec5SDimitry Andric } 6510b57cec5SDimitry Andric 6520b57cec5SDimitry Andric // Check if we needed a custom PA set. If so we'll need to run the inner 6530b57cec5SDimitry Andric // invalidation. 6540b57cec5SDimitry Andric if (InnerPA) { 6550b57cec5SDimitry Andric InnerAM->invalidate(C, *InnerPA); 6560b57cec5SDimitry Andric continue; 6570b57cec5SDimitry Andric } 6580b57cec5SDimitry Andric 6590b57cec5SDimitry Andric // Otherwise we only need to do invalidation if the original PA set didn't 6600b57cec5SDimitry Andric // preserve all SCC analyses. 6610b57cec5SDimitry Andric if (!AreSCCAnalysesPreserved) 6620b57cec5SDimitry Andric InnerAM->invalidate(C, PA); 6630b57cec5SDimitry Andric } 6640b57cec5SDimitry Andric 6650b57cec5SDimitry Andric // Return false to indicate that this result is still a valid proxy. 6660b57cec5SDimitry Andric return false; 6670b57cec5SDimitry Andric } 6680b57cec5SDimitry Andric 6690b57cec5SDimitry Andric template <> 6700b57cec5SDimitry Andric CGSCCAnalysisManagerModuleProxy::Result 6710b57cec5SDimitry Andric CGSCCAnalysisManagerModuleProxy::run(Module &M, ModuleAnalysisManager &AM) { 6720b57cec5SDimitry Andric // Force the Function analysis manager to also be available so that it can 6730b57cec5SDimitry Andric // be accessed in an SCC analysis and proxied onward to function passes. 6740b57cec5SDimitry Andric // FIXME: It is pretty awkward to just drop the result here and assert that 6750b57cec5SDimitry Andric // we can find it again later. 6760b57cec5SDimitry Andric (void)AM.getResult<FunctionAnalysisManagerModuleProxy>(M); 6770b57cec5SDimitry Andric 6780b57cec5SDimitry Andric return Result(*InnerAM, AM.getResult<LazyCallGraphAnalysis>(M)); 6790b57cec5SDimitry Andric } 6800b57cec5SDimitry Andric 6810b57cec5SDimitry Andric AnalysisKey FunctionAnalysisManagerCGSCCProxy::Key; 6820b57cec5SDimitry Andric 6830b57cec5SDimitry Andric FunctionAnalysisManagerCGSCCProxy::Result 6840b57cec5SDimitry Andric FunctionAnalysisManagerCGSCCProxy::run(LazyCallGraph::SCC &C, 6850b57cec5SDimitry Andric CGSCCAnalysisManager &AM, 6860b57cec5SDimitry Andric LazyCallGraph &CG) { 6875ffd83dbSDimitry Andric // Note: unconditionally getting checking that the proxy exists may get it at 6885ffd83dbSDimitry Andric // this point. There are cases when this is being run unnecessarily, but 6895ffd83dbSDimitry Andric // it is cheap and having the assertion in place is more valuable. 6905ffd83dbSDimitry Andric auto &MAMProxy = AM.getResult<ModuleAnalysisManagerCGSCCProxy>(C, CG); 6910b57cec5SDimitry Andric Module &M = *C.begin()->getFunction().getParent(); 6925ffd83dbSDimitry Andric bool ProxyExists = 6935ffd83dbSDimitry Andric MAMProxy.cachedResultExists<FunctionAnalysisManagerModuleProxy>(M); 6945ffd83dbSDimitry Andric assert(ProxyExists && 6955ffd83dbSDimitry Andric "The CGSCC pass manager requires that the FAM module proxy is run " 6965ffd83dbSDimitry Andric "on the module prior to entering the CGSCC walk"); 6975ffd83dbSDimitry Andric (void)ProxyExists; 6980b57cec5SDimitry Andric 6995ffd83dbSDimitry Andric // We just return an empty result. The caller will use the updateFAM interface 7005ffd83dbSDimitry Andric // to correctly register the relevant FunctionAnalysisManager based on the 7015ffd83dbSDimitry Andric // context in which this proxy is run. 7025ffd83dbSDimitry Andric return Result(); 7030b57cec5SDimitry Andric } 7040b57cec5SDimitry Andric 7050b57cec5SDimitry Andric bool FunctionAnalysisManagerCGSCCProxy::Result::invalidate( 7060b57cec5SDimitry Andric LazyCallGraph::SCC &C, const PreservedAnalyses &PA, 7070b57cec5SDimitry Andric CGSCCAnalysisManager::Invalidator &Inv) { 7080b57cec5SDimitry Andric // If literally everything is preserved, we're done. 7090b57cec5SDimitry Andric if (PA.areAllPreserved()) 7100b57cec5SDimitry Andric return false; // This is still a valid proxy. 7110b57cec5SDimitry Andric 7125ffd83dbSDimitry Andric // All updates to preserve valid results are done below, so we don't need to 7135ffd83dbSDimitry Andric // invalidate this proxy. 7140b57cec5SDimitry Andric // 7150b57cec5SDimitry Andric // Note that in order to preserve this proxy, a module pass must ensure that 7160b57cec5SDimitry Andric // the FAM has been completely updated to handle the deletion of functions. 7170b57cec5SDimitry Andric // Specifically, any FAM-cached results for those functions need to have been 7180b57cec5SDimitry Andric // forcibly cleared. When preserved, this proxy will only invalidate results 7190b57cec5SDimitry Andric // cached on functions *still in the module* at the end of the module pass. 7200b57cec5SDimitry Andric auto PAC = PA.getChecker<FunctionAnalysisManagerCGSCCProxy>(); 7210b57cec5SDimitry Andric if (!PAC.preserved() && !PAC.preservedSet<AllAnalysesOn<LazyCallGraph::SCC>>()) { 7220b57cec5SDimitry Andric for (LazyCallGraph::Node &N : C) 7230b57cec5SDimitry Andric FAM->clear(N.getFunction(), N.getFunction().getName()); 7240b57cec5SDimitry Andric 7255ffd83dbSDimitry Andric return false; 7260b57cec5SDimitry Andric } 7270b57cec5SDimitry Andric 7280b57cec5SDimitry Andric // Directly check if the relevant set is preserved. 7290b57cec5SDimitry Andric bool AreFunctionAnalysesPreserved = 7300b57cec5SDimitry Andric PA.allAnalysesInSetPreserved<AllAnalysesOn<Function>>(); 7310b57cec5SDimitry Andric 7320b57cec5SDimitry Andric // Now walk all the functions to see if any inner analysis invalidation is 7330b57cec5SDimitry Andric // necessary. 7340b57cec5SDimitry Andric for (LazyCallGraph::Node &N : C) { 7350b57cec5SDimitry Andric Function &F = N.getFunction(); 7360b57cec5SDimitry Andric Optional<PreservedAnalyses> FunctionPA; 7370b57cec5SDimitry Andric 7380b57cec5SDimitry Andric // Check to see whether the preserved set needs to be pruned based on 7390b57cec5SDimitry Andric // SCC-level analysis invalidation that triggers deferred invalidation 7400b57cec5SDimitry Andric // registered with the outer analysis manager proxy for this function. 7410b57cec5SDimitry Andric if (auto *OuterProxy = 7420b57cec5SDimitry Andric FAM->getCachedResult<CGSCCAnalysisManagerFunctionProxy>(F)) 7430b57cec5SDimitry Andric for (const auto &OuterInvalidationPair : 7440b57cec5SDimitry Andric OuterProxy->getOuterInvalidations()) { 7450b57cec5SDimitry Andric AnalysisKey *OuterAnalysisID = OuterInvalidationPair.first; 7460b57cec5SDimitry Andric const auto &InnerAnalysisIDs = OuterInvalidationPair.second; 7470b57cec5SDimitry Andric if (Inv.invalidate(OuterAnalysisID, C, PA)) { 7480b57cec5SDimitry Andric if (!FunctionPA) 7490b57cec5SDimitry Andric FunctionPA = PA; 7500b57cec5SDimitry Andric for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs) 7510b57cec5SDimitry Andric FunctionPA->abandon(InnerAnalysisID); 7520b57cec5SDimitry Andric } 7530b57cec5SDimitry Andric } 7540b57cec5SDimitry Andric 7550b57cec5SDimitry Andric // Check if we needed a custom PA set, and if so we'll need to run the 7560b57cec5SDimitry Andric // inner invalidation. 7570b57cec5SDimitry Andric if (FunctionPA) { 7580b57cec5SDimitry Andric FAM->invalidate(F, *FunctionPA); 7590b57cec5SDimitry Andric continue; 7600b57cec5SDimitry Andric } 7610b57cec5SDimitry Andric 7620b57cec5SDimitry Andric // Otherwise we only need to do invalidation if the original PA set didn't 7630b57cec5SDimitry Andric // preserve all function analyses. 7640b57cec5SDimitry Andric if (!AreFunctionAnalysesPreserved) 7650b57cec5SDimitry Andric FAM->invalidate(F, PA); 7660b57cec5SDimitry Andric } 7670b57cec5SDimitry Andric 7680b57cec5SDimitry Andric // Return false to indicate that this result is still a valid proxy. 7690b57cec5SDimitry Andric return false; 7700b57cec5SDimitry Andric } 7710b57cec5SDimitry Andric 7720b57cec5SDimitry Andric } // end namespace llvm 7730b57cec5SDimitry Andric 7745ffd83dbSDimitry Andric /// When a new SCC is created for the graph we first update the 7755ffd83dbSDimitry Andric /// FunctionAnalysisManager in the Proxy's result. 7765ffd83dbSDimitry Andric /// As there might be function analysis results cached for the functions now in 7775ffd83dbSDimitry Andric /// that SCC, two forms of updates are required. 7780b57cec5SDimitry Andric /// 7790b57cec5SDimitry Andric /// First, a proxy from the SCC to the FunctionAnalysisManager needs to be 7800b57cec5SDimitry Andric /// created so that any subsequent invalidation events to the SCC are 7810b57cec5SDimitry Andric /// propagated to the function analysis results cached for functions within it. 7820b57cec5SDimitry Andric /// 7830b57cec5SDimitry Andric /// Second, if any of the functions within the SCC have analysis results with 7840b57cec5SDimitry Andric /// outer analysis dependencies, then those dependencies would point to the 7850b57cec5SDimitry Andric /// *wrong* SCC's analysis result. We forcibly invalidate the necessary 7860b57cec5SDimitry Andric /// function analyses so that they don't retain stale handles. 7870b57cec5SDimitry Andric static void updateNewSCCFunctionAnalyses(LazyCallGraph::SCC &C, 7880b57cec5SDimitry Andric LazyCallGraph &G, 7895ffd83dbSDimitry Andric CGSCCAnalysisManager &AM, 7905ffd83dbSDimitry Andric FunctionAnalysisManager &FAM) { 7915ffd83dbSDimitry Andric AM.getResult<FunctionAnalysisManagerCGSCCProxy>(C, G).updateFAM(FAM); 7920b57cec5SDimitry Andric 7930b57cec5SDimitry Andric // Now walk the functions in this SCC and invalidate any function analysis 7940b57cec5SDimitry Andric // results that might have outer dependencies on an SCC analysis. 7950b57cec5SDimitry Andric for (LazyCallGraph::Node &N : C) { 7960b57cec5SDimitry Andric Function &F = N.getFunction(); 7970b57cec5SDimitry Andric 7980b57cec5SDimitry Andric auto *OuterProxy = 7990b57cec5SDimitry Andric FAM.getCachedResult<CGSCCAnalysisManagerFunctionProxy>(F); 8000b57cec5SDimitry Andric if (!OuterProxy) 8010b57cec5SDimitry Andric // No outer analyses were queried, nothing to do. 8020b57cec5SDimitry Andric continue; 8030b57cec5SDimitry Andric 8040b57cec5SDimitry Andric // Forcibly abandon all the inner analyses with dependencies, but 8050b57cec5SDimitry Andric // invalidate nothing else. 8060b57cec5SDimitry Andric auto PA = PreservedAnalyses::all(); 8070b57cec5SDimitry Andric for (const auto &OuterInvalidationPair : 8080b57cec5SDimitry Andric OuterProxy->getOuterInvalidations()) { 8090b57cec5SDimitry Andric const auto &InnerAnalysisIDs = OuterInvalidationPair.second; 8100b57cec5SDimitry Andric for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs) 8110b57cec5SDimitry Andric PA.abandon(InnerAnalysisID); 8120b57cec5SDimitry Andric } 8130b57cec5SDimitry Andric 8140b57cec5SDimitry Andric // Now invalidate anything we found. 8150b57cec5SDimitry Andric FAM.invalidate(F, PA); 8160b57cec5SDimitry Andric } 8170b57cec5SDimitry Andric } 8180b57cec5SDimitry Andric 8190b57cec5SDimitry Andric /// Helper function to update both the \c CGSCCAnalysisManager \p AM and the \c 8200b57cec5SDimitry Andric /// CGSCCPassManager's \c CGSCCUpdateResult \p UR based on a range of newly 8210b57cec5SDimitry Andric /// added SCCs. 8220b57cec5SDimitry Andric /// 8230b57cec5SDimitry Andric /// The range of new SCCs must be in postorder already. The SCC they were split 8240b57cec5SDimitry Andric /// out of must be provided as \p C. The current node being mutated and 8250b57cec5SDimitry Andric /// triggering updates must be passed as \p N. 8260b57cec5SDimitry Andric /// 8270b57cec5SDimitry Andric /// This function returns the SCC containing \p N. This will be either \p C if 8280b57cec5SDimitry Andric /// no new SCCs have been split out, or it will be the new SCC containing \p N. 8290b57cec5SDimitry Andric template <typename SCCRangeT> 8300b57cec5SDimitry Andric static LazyCallGraph::SCC * 8310b57cec5SDimitry Andric incorporateNewSCCRange(const SCCRangeT &NewSCCRange, LazyCallGraph &G, 8320b57cec5SDimitry Andric LazyCallGraph::Node &N, LazyCallGraph::SCC *C, 8330b57cec5SDimitry Andric CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR) { 8340b57cec5SDimitry Andric using SCC = LazyCallGraph::SCC; 8350b57cec5SDimitry Andric 836*e8d8bef9SDimitry Andric if (NewSCCRange.empty()) 8370b57cec5SDimitry Andric return C; 8380b57cec5SDimitry Andric 8390b57cec5SDimitry Andric // Add the current SCC to the worklist as its shape has changed. 8400b57cec5SDimitry Andric UR.CWorklist.insert(C); 8410b57cec5SDimitry Andric LLVM_DEBUG(dbgs() << "Enqueuing the existing SCC in the worklist:" << *C 8420b57cec5SDimitry Andric << "\n"); 8430b57cec5SDimitry Andric 8440b57cec5SDimitry Andric SCC *OldC = C; 8450b57cec5SDimitry Andric 8460b57cec5SDimitry Andric // Update the current SCC. Note that if we have new SCCs, this must actually 8470b57cec5SDimitry Andric // change the SCC. 8480b57cec5SDimitry Andric assert(C != &*NewSCCRange.begin() && 8490b57cec5SDimitry Andric "Cannot insert new SCCs without changing current SCC!"); 8500b57cec5SDimitry Andric C = &*NewSCCRange.begin(); 8510b57cec5SDimitry Andric assert(G.lookupSCC(N) == C && "Failed to update current SCC!"); 8520b57cec5SDimitry Andric 8530b57cec5SDimitry Andric // If we had a cached FAM proxy originally, we will want to create more of 8540b57cec5SDimitry Andric // them for each SCC that was split off. 8555ffd83dbSDimitry Andric FunctionAnalysisManager *FAM = nullptr; 8565ffd83dbSDimitry Andric if (auto *FAMProxy = 8575ffd83dbSDimitry Andric AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>(*OldC)) 8585ffd83dbSDimitry Andric FAM = &FAMProxy->getManager(); 8590b57cec5SDimitry Andric 8600b57cec5SDimitry Andric // We need to propagate an invalidation call to all but the newly current SCC 8610b57cec5SDimitry Andric // because the outer pass manager won't do that for us after splitting them. 8620b57cec5SDimitry Andric // FIXME: We should accept a PreservedAnalysis from the CG updater so that if 8630b57cec5SDimitry Andric // there are preserved analysis we can avoid invalidating them here for 8640b57cec5SDimitry Andric // split-off SCCs. 8650b57cec5SDimitry Andric // We know however that this will preserve any FAM proxy so go ahead and mark 8660b57cec5SDimitry Andric // that. 8670b57cec5SDimitry Andric PreservedAnalyses PA; 8680b57cec5SDimitry Andric PA.preserve<FunctionAnalysisManagerCGSCCProxy>(); 8690b57cec5SDimitry Andric AM.invalidate(*OldC, PA); 8700b57cec5SDimitry Andric 8710b57cec5SDimitry Andric // Ensure the now-current SCC's function analyses are updated. 8725ffd83dbSDimitry Andric if (FAM) 8735ffd83dbSDimitry Andric updateNewSCCFunctionAnalyses(*C, G, AM, *FAM); 8740b57cec5SDimitry Andric 8750b57cec5SDimitry Andric for (SCC &NewC : llvm::reverse(make_range(std::next(NewSCCRange.begin()), 8760b57cec5SDimitry Andric NewSCCRange.end()))) { 8770b57cec5SDimitry Andric assert(C != &NewC && "No need to re-visit the current SCC!"); 8780b57cec5SDimitry Andric assert(OldC != &NewC && "Already handled the original SCC!"); 8790b57cec5SDimitry Andric UR.CWorklist.insert(&NewC); 8800b57cec5SDimitry Andric LLVM_DEBUG(dbgs() << "Enqueuing a newly formed SCC:" << NewC << "\n"); 8810b57cec5SDimitry Andric 8820b57cec5SDimitry Andric // Ensure new SCCs' function analyses are updated. 8835ffd83dbSDimitry Andric if (FAM) 8845ffd83dbSDimitry Andric updateNewSCCFunctionAnalyses(NewC, G, AM, *FAM); 8850b57cec5SDimitry Andric 8860b57cec5SDimitry Andric // Also propagate a normal invalidation to the new SCC as only the current 8870b57cec5SDimitry Andric // will get one from the pass manager infrastructure. 8880b57cec5SDimitry Andric AM.invalidate(NewC, PA); 8890b57cec5SDimitry Andric } 8900b57cec5SDimitry Andric return C; 8910b57cec5SDimitry Andric } 8920b57cec5SDimitry Andric 8935ffd83dbSDimitry Andric static LazyCallGraph::SCC &updateCGAndAnalysisManagerForPass( 8940b57cec5SDimitry Andric LazyCallGraph &G, LazyCallGraph::SCC &InitialC, LazyCallGraph::Node &N, 8955ffd83dbSDimitry Andric CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR, 8965ffd83dbSDimitry Andric FunctionAnalysisManager &FAM, bool FunctionPass) { 8970b57cec5SDimitry Andric using Node = LazyCallGraph::Node; 8980b57cec5SDimitry Andric using Edge = LazyCallGraph::Edge; 8990b57cec5SDimitry Andric using SCC = LazyCallGraph::SCC; 9000b57cec5SDimitry Andric using RefSCC = LazyCallGraph::RefSCC; 9010b57cec5SDimitry Andric 9020b57cec5SDimitry Andric RefSCC &InitialRC = InitialC.getOuterRefSCC(); 9030b57cec5SDimitry Andric SCC *C = &InitialC; 9040b57cec5SDimitry Andric RefSCC *RC = &InitialRC; 9050b57cec5SDimitry Andric Function &F = N.getFunction(); 9060b57cec5SDimitry Andric 9070b57cec5SDimitry Andric // Walk the function body and build up the set of retained, promoted, and 9080b57cec5SDimitry Andric // demoted edges. 9090b57cec5SDimitry Andric SmallVector<Constant *, 16> Worklist; 9100b57cec5SDimitry Andric SmallPtrSet<Constant *, 16> Visited; 9110b57cec5SDimitry Andric SmallPtrSet<Node *, 16> RetainedEdges; 9120b57cec5SDimitry Andric SmallSetVector<Node *, 4> PromotedRefTargets; 9130b57cec5SDimitry Andric SmallSetVector<Node *, 4> DemotedCallTargets; 9145ffd83dbSDimitry Andric SmallSetVector<Node *, 4> NewCallEdges; 9155ffd83dbSDimitry Andric SmallSetVector<Node *, 4> NewRefEdges; 9160b57cec5SDimitry Andric 9170b57cec5SDimitry Andric // First walk the function and handle all called functions. We do this first 9180b57cec5SDimitry Andric // because if there is a single call edge, whether there are ref edges is 9190b57cec5SDimitry Andric // irrelevant. 920*e8d8bef9SDimitry Andric for (Instruction &I : instructions(F)) { 921*e8d8bef9SDimitry Andric if (auto *CB = dyn_cast<CallBase>(&I)) { 922*e8d8bef9SDimitry Andric if (Function *Callee = CB->getCalledFunction()) { 9230b57cec5SDimitry Andric if (Visited.insert(Callee).second && !Callee->isDeclaration()) { 924*e8d8bef9SDimitry Andric Node *CalleeN = G.lookup(*Callee); 925*e8d8bef9SDimitry Andric assert(CalleeN && 926*e8d8bef9SDimitry Andric "Visited function should already have an associated node"); 927*e8d8bef9SDimitry Andric Edge *E = N->lookup(*CalleeN); 9285ffd83dbSDimitry Andric assert((E || !FunctionPass) && 9295ffd83dbSDimitry Andric "No function transformations should introduce *new* " 9300b57cec5SDimitry Andric "call edges! Any new calls should be modeled as " 9310b57cec5SDimitry Andric "promoted existing ref edges!"); 932*e8d8bef9SDimitry Andric bool Inserted = RetainedEdges.insert(CalleeN).second; 9330b57cec5SDimitry Andric (void)Inserted; 9340b57cec5SDimitry Andric assert(Inserted && "We should never visit a function twice."); 9355ffd83dbSDimitry Andric if (!E) 936*e8d8bef9SDimitry Andric NewCallEdges.insert(CalleeN); 9375ffd83dbSDimitry Andric else if (!E->isCall()) 938*e8d8bef9SDimitry Andric PromotedRefTargets.insert(CalleeN); 939*e8d8bef9SDimitry Andric } 940*e8d8bef9SDimitry Andric } else { 941*e8d8bef9SDimitry Andric // We can miss devirtualization if an indirect call is created then 942*e8d8bef9SDimitry Andric // promoted before updateCGAndAnalysisManagerForPass runs. 943*e8d8bef9SDimitry Andric auto *Entry = UR.IndirectVHs.find(CB); 944*e8d8bef9SDimitry Andric if (Entry == UR.IndirectVHs.end()) 945*e8d8bef9SDimitry Andric UR.IndirectVHs.insert({CB, WeakTrackingVH(CB)}); 946*e8d8bef9SDimitry Andric else if (!Entry->second) 947*e8d8bef9SDimitry Andric Entry->second = WeakTrackingVH(CB); 948*e8d8bef9SDimitry Andric } 949*e8d8bef9SDimitry Andric } 9500b57cec5SDimitry Andric } 9510b57cec5SDimitry Andric 9520b57cec5SDimitry Andric // Now walk all references. 9530b57cec5SDimitry Andric for (Instruction &I : instructions(F)) 9540b57cec5SDimitry Andric for (Value *Op : I.operand_values()) 955*e8d8bef9SDimitry Andric if (auto *OpC = dyn_cast<Constant>(Op)) 956*e8d8bef9SDimitry Andric if (Visited.insert(OpC).second) 957*e8d8bef9SDimitry Andric Worklist.push_back(OpC); 9580b57cec5SDimitry Andric 9590b57cec5SDimitry Andric auto VisitRef = [&](Function &Referee) { 960*e8d8bef9SDimitry Andric Node *RefereeN = G.lookup(Referee); 961*e8d8bef9SDimitry Andric assert(RefereeN && 962*e8d8bef9SDimitry Andric "Visited function should already have an associated node"); 963*e8d8bef9SDimitry Andric Edge *E = N->lookup(*RefereeN); 9645ffd83dbSDimitry Andric assert((E || !FunctionPass) && 9655ffd83dbSDimitry Andric "No function transformations should introduce *new* ref " 9660b57cec5SDimitry Andric "edges! Any new ref edges would require IPO which " 9670b57cec5SDimitry Andric "function passes aren't allowed to do!"); 968*e8d8bef9SDimitry Andric bool Inserted = RetainedEdges.insert(RefereeN).second; 9690b57cec5SDimitry Andric (void)Inserted; 9700b57cec5SDimitry Andric assert(Inserted && "We should never visit a function twice."); 9715ffd83dbSDimitry Andric if (!E) 972*e8d8bef9SDimitry Andric NewRefEdges.insert(RefereeN); 9735ffd83dbSDimitry Andric else if (E->isCall()) 974*e8d8bef9SDimitry Andric DemotedCallTargets.insert(RefereeN); 9750b57cec5SDimitry Andric }; 9760b57cec5SDimitry Andric LazyCallGraph::visitReferences(Worklist, Visited, VisitRef); 9770b57cec5SDimitry Andric 9785ffd83dbSDimitry Andric // Handle new ref edges. 9795ffd83dbSDimitry Andric for (Node *RefTarget : NewRefEdges) { 9805ffd83dbSDimitry Andric SCC &TargetC = *G.lookupSCC(*RefTarget); 9815ffd83dbSDimitry Andric RefSCC &TargetRC = TargetC.getOuterRefSCC(); 9825ffd83dbSDimitry Andric (void)TargetRC; 9835ffd83dbSDimitry Andric // TODO: This only allows trivial edges to be added for now. 9845ffd83dbSDimitry Andric assert((RC == &TargetRC || 9855ffd83dbSDimitry Andric RC->isAncestorOf(TargetRC)) && "New ref edge is not trivial!"); 9865ffd83dbSDimitry Andric RC->insertTrivialRefEdge(N, *RefTarget); 9875ffd83dbSDimitry Andric } 9885ffd83dbSDimitry Andric 9895ffd83dbSDimitry Andric // Handle new call edges. 9905ffd83dbSDimitry Andric for (Node *CallTarget : NewCallEdges) { 9915ffd83dbSDimitry Andric SCC &TargetC = *G.lookupSCC(*CallTarget); 9925ffd83dbSDimitry Andric RefSCC &TargetRC = TargetC.getOuterRefSCC(); 9935ffd83dbSDimitry Andric (void)TargetRC; 9945ffd83dbSDimitry Andric // TODO: This only allows trivial edges to be added for now. 9955ffd83dbSDimitry Andric assert((RC == &TargetRC || 9965ffd83dbSDimitry Andric RC->isAncestorOf(TargetRC)) && "New call edge is not trivial!"); 997*e8d8bef9SDimitry Andric // Add a trivial ref edge to be promoted later on alongside 998*e8d8bef9SDimitry Andric // PromotedRefTargets. 999*e8d8bef9SDimitry Andric RC->insertTrivialRefEdge(N, *CallTarget); 10005ffd83dbSDimitry Andric } 10015ffd83dbSDimitry Andric 10020b57cec5SDimitry Andric // Include synthetic reference edges to known, defined lib functions. 1003*e8d8bef9SDimitry Andric for (auto *LibFn : G.getLibFunctions()) 10040b57cec5SDimitry Andric // While the list of lib functions doesn't have repeats, don't re-visit 10050b57cec5SDimitry Andric // anything handled above. 1006*e8d8bef9SDimitry Andric if (!Visited.count(LibFn)) 1007*e8d8bef9SDimitry Andric VisitRef(*LibFn); 10080b57cec5SDimitry Andric 10090b57cec5SDimitry Andric // First remove all of the edges that are no longer present in this function. 10100b57cec5SDimitry Andric // The first step makes these edges uniformly ref edges and accumulates them 10110b57cec5SDimitry Andric // into a separate data structure so removal doesn't invalidate anything. 10120b57cec5SDimitry Andric SmallVector<Node *, 4> DeadTargets; 10130b57cec5SDimitry Andric for (Edge &E : *N) { 10140b57cec5SDimitry Andric if (RetainedEdges.count(&E.getNode())) 10150b57cec5SDimitry Andric continue; 10160b57cec5SDimitry Andric 10170b57cec5SDimitry Andric SCC &TargetC = *G.lookupSCC(E.getNode()); 10180b57cec5SDimitry Andric RefSCC &TargetRC = TargetC.getOuterRefSCC(); 10190b57cec5SDimitry Andric if (&TargetRC == RC && E.isCall()) { 10200b57cec5SDimitry Andric if (C != &TargetC) { 10210b57cec5SDimitry Andric // For separate SCCs this is trivial. 10220b57cec5SDimitry Andric RC->switchTrivialInternalEdgeToRef(N, E.getNode()); 10230b57cec5SDimitry Andric } else { 10240b57cec5SDimitry Andric // Now update the call graph. 10250b57cec5SDimitry Andric C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, E.getNode()), 10260b57cec5SDimitry Andric G, N, C, AM, UR); 10270b57cec5SDimitry Andric } 10280b57cec5SDimitry Andric } 10290b57cec5SDimitry Andric 10300b57cec5SDimitry Andric // Now that this is ready for actual removal, put it into our list. 10310b57cec5SDimitry Andric DeadTargets.push_back(&E.getNode()); 10320b57cec5SDimitry Andric } 10330b57cec5SDimitry Andric // Remove the easy cases quickly and actually pull them out of our list. 1034*e8d8bef9SDimitry Andric llvm::erase_if(DeadTargets, [&](Node *TargetN) { 10350b57cec5SDimitry Andric SCC &TargetC = *G.lookupSCC(*TargetN); 10360b57cec5SDimitry Andric RefSCC &TargetRC = TargetC.getOuterRefSCC(); 10370b57cec5SDimitry Andric 10380b57cec5SDimitry Andric // We can't trivially remove internal targets, so skip 10390b57cec5SDimitry Andric // those. 10400b57cec5SDimitry Andric if (&TargetRC == RC) 10410b57cec5SDimitry Andric return false; 10420b57cec5SDimitry Andric 10430b57cec5SDimitry Andric RC->removeOutgoingEdge(N, *TargetN); 1044*e8d8bef9SDimitry Andric LLVM_DEBUG(dbgs() << "Deleting outgoing edge from '" << N << "' to '" 1045*e8d8bef9SDimitry Andric << TargetN << "'\n"); 10460b57cec5SDimitry Andric return true; 1047*e8d8bef9SDimitry Andric }); 10480b57cec5SDimitry Andric 10490b57cec5SDimitry Andric // Now do a batch removal of the internal ref edges left. 10500b57cec5SDimitry Andric auto NewRefSCCs = RC->removeInternalRefEdge(N, DeadTargets); 10510b57cec5SDimitry Andric if (!NewRefSCCs.empty()) { 10520b57cec5SDimitry Andric // The old RefSCC is dead, mark it as such. 10530b57cec5SDimitry Andric UR.InvalidatedRefSCCs.insert(RC); 10540b57cec5SDimitry Andric 10550b57cec5SDimitry Andric // Note that we don't bother to invalidate analyses as ref-edge 10560b57cec5SDimitry Andric // connectivity is not really observable in any way and is intended 10570b57cec5SDimitry Andric // exclusively to be used for ordering of transforms rather than for 10580b57cec5SDimitry Andric // analysis conclusions. 10590b57cec5SDimitry Andric 10600b57cec5SDimitry Andric // Update RC to the "bottom". 10610b57cec5SDimitry Andric assert(G.lookupSCC(N) == C && "Changed the SCC when splitting RefSCCs!"); 10620b57cec5SDimitry Andric RC = &C->getOuterRefSCC(); 10630b57cec5SDimitry Andric assert(G.lookupRefSCC(N) == RC && "Failed to update current RefSCC!"); 10640b57cec5SDimitry Andric 10650b57cec5SDimitry Andric // The RC worklist is in reverse postorder, so we enqueue the new ones in 10660b57cec5SDimitry Andric // RPO except for the one which contains the source node as that is the 10670b57cec5SDimitry Andric // "bottom" we will continue processing in the bottom-up walk. 10680b57cec5SDimitry Andric assert(NewRefSCCs.front() == RC && 10690b57cec5SDimitry Andric "New current RefSCC not first in the returned list!"); 10700b57cec5SDimitry Andric for (RefSCC *NewRC : llvm::reverse(make_range(std::next(NewRefSCCs.begin()), 10710b57cec5SDimitry Andric NewRefSCCs.end()))) { 10720b57cec5SDimitry Andric assert(NewRC != RC && "Should not encounter the current RefSCC further " 10730b57cec5SDimitry Andric "in the postorder list of new RefSCCs."); 10740b57cec5SDimitry Andric UR.RCWorklist.insert(NewRC); 10750b57cec5SDimitry Andric LLVM_DEBUG(dbgs() << "Enqueuing a new RefSCC in the update worklist: " 10760b57cec5SDimitry Andric << *NewRC << "\n"); 10770b57cec5SDimitry Andric } 10780b57cec5SDimitry Andric } 10790b57cec5SDimitry Andric 10800b57cec5SDimitry Andric // Next demote all the call edges that are now ref edges. This helps make 10810b57cec5SDimitry Andric // the SCCs small which should minimize the work below as we don't want to 10820b57cec5SDimitry Andric // form cycles that this would break. 10830b57cec5SDimitry Andric for (Node *RefTarget : DemotedCallTargets) { 10840b57cec5SDimitry Andric SCC &TargetC = *G.lookupSCC(*RefTarget); 10850b57cec5SDimitry Andric RefSCC &TargetRC = TargetC.getOuterRefSCC(); 10860b57cec5SDimitry Andric 10870b57cec5SDimitry Andric // The easy case is when the target RefSCC is not this RefSCC. This is 10880b57cec5SDimitry Andric // only supported when the target RefSCC is a child of this RefSCC. 10890b57cec5SDimitry Andric if (&TargetRC != RC) { 10900b57cec5SDimitry Andric assert(RC->isAncestorOf(TargetRC) && 10910b57cec5SDimitry Andric "Cannot potentially form RefSCC cycles here!"); 10920b57cec5SDimitry Andric RC->switchOutgoingEdgeToRef(N, *RefTarget); 10930b57cec5SDimitry Andric LLVM_DEBUG(dbgs() << "Switch outgoing call edge to a ref edge from '" << N 10940b57cec5SDimitry Andric << "' to '" << *RefTarget << "'\n"); 10950b57cec5SDimitry Andric continue; 10960b57cec5SDimitry Andric } 10970b57cec5SDimitry Andric 10980b57cec5SDimitry Andric // We are switching an internal call edge to a ref edge. This may split up 10990b57cec5SDimitry Andric // some SCCs. 11000b57cec5SDimitry Andric if (C != &TargetC) { 11010b57cec5SDimitry Andric // For separate SCCs this is trivial. 11020b57cec5SDimitry Andric RC->switchTrivialInternalEdgeToRef(N, *RefTarget); 11030b57cec5SDimitry Andric continue; 11040b57cec5SDimitry Andric } 11050b57cec5SDimitry Andric 11060b57cec5SDimitry Andric // Now update the call graph. 11070b57cec5SDimitry Andric C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, *RefTarget), G, N, 11080b57cec5SDimitry Andric C, AM, UR); 11090b57cec5SDimitry Andric } 11100b57cec5SDimitry Andric 1111*e8d8bef9SDimitry Andric // We added a ref edge earlier for new call edges, promote those to call edges 1112*e8d8bef9SDimitry Andric // alongside PromotedRefTargets. 1113*e8d8bef9SDimitry Andric for (Node *E : NewCallEdges) 1114*e8d8bef9SDimitry Andric PromotedRefTargets.insert(E); 1115*e8d8bef9SDimitry Andric 11160b57cec5SDimitry Andric // Now promote ref edges into call edges. 11170b57cec5SDimitry Andric for (Node *CallTarget : PromotedRefTargets) { 11180b57cec5SDimitry Andric SCC &TargetC = *G.lookupSCC(*CallTarget); 11190b57cec5SDimitry Andric RefSCC &TargetRC = TargetC.getOuterRefSCC(); 11200b57cec5SDimitry Andric 11210b57cec5SDimitry Andric // The easy case is when the target RefSCC is not this RefSCC. This is 11220b57cec5SDimitry Andric // only supported when the target RefSCC is a child of this RefSCC. 11230b57cec5SDimitry Andric if (&TargetRC != RC) { 11240b57cec5SDimitry Andric assert(RC->isAncestorOf(TargetRC) && 11250b57cec5SDimitry Andric "Cannot potentially form RefSCC cycles here!"); 11260b57cec5SDimitry Andric RC->switchOutgoingEdgeToCall(N, *CallTarget); 11270b57cec5SDimitry Andric LLVM_DEBUG(dbgs() << "Switch outgoing ref edge to a call edge from '" << N 11280b57cec5SDimitry Andric << "' to '" << *CallTarget << "'\n"); 11290b57cec5SDimitry Andric continue; 11300b57cec5SDimitry Andric } 11310b57cec5SDimitry Andric LLVM_DEBUG(dbgs() << "Switch an internal ref edge to a call edge from '" 11320b57cec5SDimitry Andric << N << "' to '" << *CallTarget << "'\n"); 11330b57cec5SDimitry Andric 11340b57cec5SDimitry Andric // Otherwise we are switching an internal ref edge to a call edge. This 11350b57cec5SDimitry Andric // may merge away some SCCs, and we add those to the UpdateResult. We also 11360b57cec5SDimitry Andric // need to make sure to update the worklist in the event SCCs have moved 11370b57cec5SDimitry Andric // before the current one in the post-order sequence 11380b57cec5SDimitry Andric bool HasFunctionAnalysisProxy = false; 11390b57cec5SDimitry Andric auto InitialSCCIndex = RC->find(*C) - RC->begin(); 11400b57cec5SDimitry Andric bool FormedCycle = RC->switchInternalEdgeToCall( 11410b57cec5SDimitry Andric N, *CallTarget, [&](ArrayRef<SCC *> MergedSCCs) { 11420b57cec5SDimitry Andric for (SCC *MergedC : MergedSCCs) { 11430b57cec5SDimitry Andric assert(MergedC != &TargetC && "Cannot merge away the target SCC!"); 11440b57cec5SDimitry Andric 11450b57cec5SDimitry Andric HasFunctionAnalysisProxy |= 11460b57cec5SDimitry Andric AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>( 11470b57cec5SDimitry Andric *MergedC) != nullptr; 11480b57cec5SDimitry Andric 11490b57cec5SDimitry Andric // Mark that this SCC will no longer be valid. 11500b57cec5SDimitry Andric UR.InvalidatedSCCs.insert(MergedC); 11510b57cec5SDimitry Andric 11520b57cec5SDimitry Andric // FIXME: We should really do a 'clear' here to forcibly release 11530b57cec5SDimitry Andric // memory, but we don't have a good way of doing that and 11540b57cec5SDimitry Andric // preserving the function analyses. 11550b57cec5SDimitry Andric auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>(); 11560b57cec5SDimitry Andric PA.preserve<FunctionAnalysisManagerCGSCCProxy>(); 11570b57cec5SDimitry Andric AM.invalidate(*MergedC, PA); 11580b57cec5SDimitry Andric } 11590b57cec5SDimitry Andric }); 11600b57cec5SDimitry Andric 11610b57cec5SDimitry Andric // If we formed a cycle by creating this call, we need to update more data 11620b57cec5SDimitry Andric // structures. 11630b57cec5SDimitry Andric if (FormedCycle) { 11640b57cec5SDimitry Andric C = &TargetC; 11650b57cec5SDimitry Andric assert(G.lookupSCC(N) == C && "Failed to update current SCC!"); 11660b57cec5SDimitry Andric 11670b57cec5SDimitry Andric // If one of the invalidated SCCs had a cached proxy to a function 11680b57cec5SDimitry Andric // analysis manager, we need to create a proxy in the new current SCC as 11690b57cec5SDimitry Andric // the invalidated SCCs had their functions moved. 11700b57cec5SDimitry Andric if (HasFunctionAnalysisProxy) 11715ffd83dbSDimitry Andric AM.getResult<FunctionAnalysisManagerCGSCCProxy>(*C, G).updateFAM(FAM); 11720b57cec5SDimitry Andric 11730b57cec5SDimitry Andric // Any analyses cached for this SCC are no longer precise as the shape 11740b57cec5SDimitry Andric // has changed by introducing this cycle. However, we have taken care to 11750b57cec5SDimitry Andric // update the proxies so it remains valide. 11760b57cec5SDimitry Andric auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>(); 11770b57cec5SDimitry Andric PA.preserve<FunctionAnalysisManagerCGSCCProxy>(); 11780b57cec5SDimitry Andric AM.invalidate(*C, PA); 11790b57cec5SDimitry Andric } 11800b57cec5SDimitry Andric auto NewSCCIndex = RC->find(*C) - RC->begin(); 11810b57cec5SDimitry Andric // If we have actually moved an SCC to be topologically "below" the current 11820b57cec5SDimitry Andric // one due to merging, we will need to revisit the current SCC after 11830b57cec5SDimitry Andric // visiting those moved SCCs. 11840b57cec5SDimitry Andric // 11850b57cec5SDimitry Andric // It is critical that we *do not* revisit the current SCC unless we 11860b57cec5SDimitry Andric // actually move SCCs in the process of merging because otherwise we may 11870b57cec5SDimitry Andric // form a cycle where an SCC is split apart, merged, split, merged and so 11880b57cec5SDimitry Andric // on infinitely. 11890b57cec5SDimitry Andric if (InitialSCCIndex < NewSCCIndex) { 11900b57cec5SDimitry Andric // Put our current SCC back onto the worklist as we'll visit other SCCs 11910b57cec5SDimitry Andric // that are now definitively ordered prior to the current one in the 11920b57cec5SDimitry Andric // post-order sequence, and may end up observing more precise context to 11930b57cec5SDimitry Andric // optimize the current SCC. 11940b57cec5SDimitry Andric UR.CWorklist.insert(C); 11950b57cec5SDimitry Andric LLVM_DEBUG(dbgs() << "Enqueuing the existing SCC in the worklist: " << *C 11960b57cec5SDimitry Andric << "\n"); 11970b57cec5SDimitry Andric // Enqueue in reverse order as we pop off the back of the worklist. 11980b57cec5SDimitry Andric for (SCC &MovedC : llvm::reverse(make_range(RC->begin() + InitialSCCIndex, 11990b57cec5SDimitry Andric RC->begin() + NewSCCIndex))) { 12000b57cec5SDimitry Andric UR.CWorklist.insert(&MovedC); 12010b57cec5SDimitry Andric LLVM_DEBUG(dbgs() << "Enqueuing a newly earlier in post-order SCC: " 12020b57cec5SDimitry Andric << MovedC << "\n"); 12030b57cec5SDimitry Andric } 12040b57cec5SDimitry Andric } 12050b57cec5SDimitry Andric } 12060b57cec5SDimitry Andric 12070b57cec5SDimitry Andric assert(!UR.InvalidatedSCCs.count(C) && "Invalidated the current SCC!"); 12080b57cec5SDimitry Andric assert(!UR.InvalidatedRefSCCs.count(RC) && "Invalidated the current RefSCC!"); 12090b57cec5SDimitry Andric assert(&C->getOuterRefSCC() == RC && "Current SCC not in current RefSCC!"); 12100b57cec5SDimitry Andric 12110b57cec5SDimitry Andric // Record the current RefSCC and SCC for higher layers of the CGSCC pass 12120b57cec5SDimitry Andric // manager now that all the updates have been applied. 12130b57cec5SDimitry Andric if (RC != &InitialRC) 12140b57cec5SDimitry Andric UR.UpdatedRC = RC; 12150b57cec5SDimitry Andric if (C != &InitialC) 12160b57cec5SDimitry Andric UR.UpdatedC = C; 12170b57cec5SDimitry Andric 12180b57cec5SDimitry Andric return *C; 12190b57cec5SDimitry Andric } 12205ffd83dbSDimitry Andric 12215ffd83dbSDimitry Andric LazyCallGraph::SCC &llvm::updateCGAndAnalysisManagerForFunctionPass( 12225ffd83dbSDimitry Andric LazyCallGraph &G, LazyCallGraph::SCC &InitialC, LazyCallGraph::Node &N, 12235ffd83dbSDimitry Andric CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR, 12245ffd83dbSDimitry Andric FunctionAnalysisManager &FAM) { 12255ffd83dbSDimitry Andric return updateCGAndAnalysisManagerForPass(G, InitialC, N, AM, UR, FAM, 12265ffd83dbSDimitry Andric /* FunctionPass */ true); 12275ffd83dbSDimitry Andric } 12285ffd83dbSDimitry Andric LazyCallGraph::SCC &llvm::updateCGAndAnalysisManagerForCGSCCPass( 12295ffd83dbSDimitry Andric LazyCallGraph &G, LazyCallGraph::SCC &InitialC, LazyCallGraph::Node &N, 12305ffd83dbSDimitry Andric CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR, 12315ffd83dbSDimitry Andric FunctionAnalysisManager &FAM) { 12325ffd83dbSDimitry Andric return updateCGAndAnalysisManagerForPass(G, InitialC, N, AM, UR, FAM, 12335ffd83dbSDimitry Andric /* FunctionPass */ false); 12345ffd83dbSDimitry Andric } 1235