xref: /freebsd/contrib/llvm-project/llvm/lib/Transforms/IPO/SCCP.cpp (revision 1631382cf2820245cc72965498ff174bb548dd63)
1 //===-- SCCP.cpp ----------------------------------------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file implements Interprocedural Sparse Conditional Constant Propagation.
10 //
11 //===----------------------------------------------------------------------===//
12 
13 #include "llvm/Transforms/IPO/SCCP.h"
14 #include "llvm/ADT/SetVector.h"
15 #include "llvm/Analysis/AssumptionCache.h"
16 #include "llvm/Analysis/BlockFrequencyInfo.h"
17 #include "llvm/Analysis/PostDominators.h"
18 #include "llvm/Analysis/TargetLibraryInfo.h"
19 #include "llvm/Analysis/TargetTransformInfo.h"
20 #include "llvm/Analysis/ValueLattice.h"
21 #include "llvm/Analysis/ValueLatticeUtils.h"
22 #include "llvm/Analysis/ValueTracking.h"
23 #include "llvm/IR/AttributeMask.h"
24 #include "llvm/IR/Constants.h"
25 #include "llvm/IR/IntrinsicInst.h"
26 #include "llvm/Support/CommandLine.h"
27 #include "llvm/Support/ModRef.h"
28 #include "llvm/Transforms/IPO.h"
29 #include "llvm/Transforms/IPO/FunctionSpecialization.h"
30 #include "llvm/Transforms/Scalar/SCCP.h"
31 #include "llvm/Transforms/Utils/Local.h"
32 #include "llvm/Transforms/Utils/SCCPSolver.h"
33 
34 using namespace llvm;
35 
36 #define DEBUG_TYPE "sccp"
37 
38 STATISTIC(NumInstRemoved, "Number of instructions removed");
39 STATISTIC(NumArgsElimed ,"Number of arguments constant propagated");
40 STATISTIC(NumGlobalConst, "Number of globals found to be constant");
41 STATISTIC(NumDeadBlocks , "Number of basic blocks unreachable");
42 STATISTIC(NumInstReplaced,
43           "Number of instructions replaced with (simpler) instruction");
44 
45 static cl::opt<unsigned> FuncSpecMaxIters(
46     "funcspec-max-iters", cl::init(1), cl::Hidden, cl::desc(
47     "The maximum number of iterations function specialization is run"));
48 
49 static void findReturnsToZap(Function &F,
50                              SmallVector<ReturnInst *, 8> &ReturnsToZap,
51                              SCCPSolver &Solver) {
52   // We can only do this if we know that nothing else can call the function.
53   if (!Solver.isArgumentTrackedFunction(&F))
54     return;
55 
56   if (Solver.mustPreserveReturn(&F)) {
57     LLVM_DEBUG(
58         dbgs()
59         << "Can't zap returns of the function : " << F.getName()
60         << " due to present musttail or \"clang.arc.attachedcall\" call of "
61            "it\n");
62     return;
63   }
64 
65   assert(
66       all_of(F.users(),
67              [&Solver](User *U) {
68                if (isa<Instruction>(U) &&
69                    !Solver.isBlockExecutable(cast<Instruction>(U)->getParent()))
70                  return true;
71                // Non-callsite uses are not impacted by zapping. Also, constant
72                // uses (like blockaddresses) could stuck around, without being
73                // used in the underlying IR, meaning we do not have lattice
74                // values for them.
75                if (!isa<CallBase>(U))
76                  return true;
77                if (U->getType()->isStructTy()) {
78                  return all_of(Solver.getStructLatticeValueFor(U),
79                                [](const ValueLatticeElement &LV) {
80                                  return !SCCPSolver::isOverdefined(LV);
81                                });
82                }
83 
84                // We don't consider assume-like intrinsics to be actual address
85                // captures.
86                if (auto *II = dyn_cast<IntrinsicInst>(U)) {
87                  if (II->isAssumeLikeIntrinsic())
88                    return true;
89                }
90 
91                return !SCCPSolver::isOverdefined(Solver.getLatticeValueFor(U));
92              }) &&
93       "We can only zap functions where all live users have a concrete value");
94 
95   for (BasicBlock &BB : F) {
96     if (CallInst *CI = BB.getTerminatingMustTailCall()) {
97       LLVM_DEBUG(dbgs() << "Can't zap return of the block due to present "
98                         << "musttail call : " << *CI << "\n");
99       (void)CI;
100       return;
101     }
102 
103     if (auto *RI = dyn_cast<ReturnInst>(BB.getTerminator()))
104       if (!isa<UndefValue>(RI->getOperand(0)))
105         ReturnsToZap.push_back(RI);
106   }
107 }
108 
109 static bool runIPSCCP(
110     Module &M, const DataLayout &DL, FunctionAnalysisManager *FAM,
111     std::function<const TargetLibraryInfo &(Function &)> GetTLI,
112     std::function<TargetTransformInfo &(Function &)> GetTTI,
113     std::function<AssumptionCache &(Function &)> GetAC,
114     std::function<DominatorTree &(Function &)> GetDT,
115     std::function<BlockFrequencyInfo &(Function &)> GetBFI,
116     bool IsFuncSpecEnabled) {
117   SCCPSolver Solver(DL, GetTLI, M.getContext());
118   FunctionSpecializer Specializer(Solver, M, FAM, GetBFI, GetTLI, GetTTI,
119                                   GetAC);
120 
121   // Loop over all functions, marking arguments to those with their addresses
122   // taken or that are external as overdefined.
123   for (Function &F : M) {
124     if (F.isDeclaration())
125       continue;
126 
127     DominatorTree &DT = GetDT(F);
128     AssumptionCache &AC = GetAC(F);
129     Solver.addPredicateInfo(F, DT, AC);
130 
131     // Determine if we can track the function's return values. If so, add the
132     // function to the solver's set of return-tracked functions.
133     if (canTrackReturnsInterprocedurally(&F))
134       Solver.addTrackedFunction(&F);
135 
136     // Determine if we can track the function's arguments. If so, add the
137     // function to the solver's set of argument-tracked functions.
138     if (canTrackArgumentsInterprocedurally(&F)) {
139       Solver.addArgumentTrackedFunction(&F);
140       continue;
141     }
142 
143     // Assume the function is called.
144     Solver.markBlockExecutable(&F.front());
145 
146     // Assume nothing about the incoming arguments.
147     for (Argument &AI : F.args())
148       Solver.markOverdefined(&AI);
149   }
150 
151   // Determine if we can track any of the module's global variables. If so, add
152   // the global variables we can track to the solver's set of tracked global
153   // variables.
154   for (GlobalVariable &G : M.globals()) {
155     G.removeDeadConstantUsers();
156     if (canTrackGlobalVariableInterprocedurally(&G))
157       Solver.trackValueOfGlobalVariable(&G);
158   }
159 
160   // Solve for constants.
161   Solver.solveWhileResolvedUndefsIn(M);
162 
163   if (IsFuncSpecEnabled) {
164     unsigned Iters = 0;
165     while (Iters++ < FuncSpecMaxIters && Specializer.run());
166   }
167 
168   // Iterate over all of the instructions in the module, replacing them with
169   // constants if we have found them to be of constant values.
170   bool MadeChanges = false;
171   for (Function &F : M) {
172     if (F.isDeclaration())
173       continue;
174 
175     SmallVector<BasicBlock *, 512> BlocksToErase;
176 
177     if (Solver.isBlockExecutable(&F.front())) {
178       bool ReplacedPointerArg = false;
179       for (Argument &Arg : F.args()) {
180         if (!Arg.use_empty() && Solver.tryToReplaceWithConstant(&Arg)) {
181           ReplacedPointerArg |= Arg.getType()->isPointerTy();
182           ++NumArgsElimed;
183         }
184       }
185 
186       // If we replaced an argument, we may now also access a global (currently
187       // classified as "other" memory). Update memory attribute to reflect this.
188       if (ReplacedPointerArg) {
189         auto UpdateAttrs = [&](AttributeList AL) {
190           MemoryEffects ME = AL.getMemoryEffects();
191           if (ME == MemoryEffects::unknown())
192             return AL;
193 
194           ME |= MemoryEffects(IRMemLocation::Other,
195                               ME.getModRef(IRMemLocation::ArgMem));
196           return AL.addFnAttribute(
197               F.getContext(),
198               Attribute::getWithMemoryEffects(F.getContext(), ME));
199         };
200 
201         F.setAttributes(UpdateAttrs(F.getAttributes()));
202         for (User *U : F.users()) {
203           auto *CB = dyn_cast<CallBase>(U);
204           if (!CB || CB->getCalledFunction() != &F)
205             continue;
206 
207           CB->setAttributes(UpdateAttrs(CB->getAttributes()));
208         }
209       }
210       MadeChanges |= ReplacedPointerArg;
211     }
212 
213     SmallPtrSet<Value *, 32> InsertedValues;
214     for (BasicBlock &BB : F) {
215       if (!Solver.isBlockExecutable(&BB)) {
216         LLVM_DEBUG(dbgs() << "  BasicBlock Dead:" << BB);
217         ++NumDeadBlocks;
218 
219         MadeChanges = true;
220 
221         if (&BB != &F.front())
222           BlocksToErase.push_back(&BB);
223         continue;
224       }
225 
226       MadeChanges |= Solver.simplifyInstsInBlock(
227           BB, InsertedValues, NumInstRemoved, NumInstReplaced);
228     }
229 
230     DominatorTree *DT = FAM->getCachedResult<DominatorTreeAnalysis>(F);
231     PostDominatorTree *PDT = FAM->getCachedResult<PostDominatorTreeAnalysis>(F);
232     DomTreeUpdater DTU(DT, PDT, DomTreeUpdater::UpdateStrategy::Lazy);
233     // Change dead blocks to unreachable. We do it after replacing constants
234     // in all executable blocks, because changeToUnreachable may remove PHI
235     // nodes in executable blocks we found values for. The function's entry
236     // block is not part of BlocksToErase, so we have to handle it separately.
237     for (BasicBlock *BB : BlocksToErase) {
238       NumInstRemoved += changeToUnreachable(BB->getFirstNonPHI(),
239                                             /*PreserveLCSSA=*/false, &DTU);
240     }
241     if (!Solver.isBlockExecutable(&F.front()))
242       NumInstRemoved += changeToUnreachable(F.front().getFirstNonPHI(),
243                                             /*PreserveLCSSA=*/false, &DTU);
244 
245     BasicBlock *NewUnreachableBB = nullptr;
246     for (BasicBlock &BB : F)
247       MadeChanges |= Solver.removeNonFeasibleEdges(&BB, DTU, NewUnreachableBB);
248 
249     for (BasicBlock *DeadBB : BlocksToErase)
250       if (!DeadBB->hasAddressTaken())
251         DTU.deleteBB(DeadBB);
252 
253     for (BasicBlock &BB : F) {
254       for (Instruction &Inst : llvm::make_early_inc_range(BB)) {
255         if (Solver.getPredicateInfoFor(&Inst)) {
256           if (auto *II = dyn_cast<IntrinsicInst>(&Inst)) {
257             if (II->getIntrinsicID() == Intrinsic::ssa_copy) {
258               Value *Op = II->getOperand(0);
259               Inst.replaceAllUsesWith(Op);
260               Inst.eraseFromParent();
261             }
262           }
263         }
264       }
265     }
266   }
267 
268   // If we inferred constant or undef return values for a function, we replaced
269   // all call uses with the inferred value.  This means we don't need to bother
270   // actually returning anything from the function.  Replace all return
271   // instructions with return undef.
272   //
273   // Do this in two stages: first identify the functions we should process, then
274   // actually zap their returns.  This is important because we can only do this
275   // if the address of the function isn't taken.  In cases where a return is the
276   // last use of a function, the order of processing functions would affect
277   // whether other functions are optimizable.
278   SmallVector<ReturnInst*, 8> ReturnsToZap;
279 
280   for (const auto &I : Solver.getTrackedRetVals()) {
281     Function *F = I.first;
282     const ValueLatticeElement &ReturnValue = I.second;
283 
284     // If there is a known constant range for the return value, add !range
285     // metadata to the function's call sites.
286     if (ReturnValue.isConstantRange() &&
287         !ReturnValue.getConstantRange().isSingleElement()) {
288       // Do not add range metadata if the return value may include undef.
289       if (ReturnValue.isConstantRangeIncludingUndef())
290         continue;
291 
292       auto &CR = ReturnValue.getConstantRange();
293       for (User *User : F->users()) {
294         auto *CB = dyn_cast<CallBase>(User);
295         if (!CB || CB->getCalledFunction() != F)
296           continue;
297 
298         // Do not touch existing metadata for now.
299         // TODO: We should be able to take the intersection of the existing
300         // metadata and the inferred range.
301         if (CB->getMetadata(LLVMContext::MD_range))
302           continue;
303 
304         LLVMContext &Context = CB->getParent()->getContext();
305         Metadata *RangeMD[] = {
306             ConstantAsMetadata::get(ConstantInt::get(Context, CR.getLower())),
307             ConstantAsMetadata::get(ConstantInt::get(Context, CR.getUpper()))};
308         CB->setMetadata(LLVMContext::MD_range, MDNode::get(Context, RangeMD));
309       }
310       continue;
311     }
312     if (F->getReturnType()->isVoidTy())
313       continue;
314     if (SCCPSolver::isConstant(ReturnValue) || ReturnValue.isUnknownOrUndef())
315       findReturnsToZap(*F, ReturnsToZap, Solver);
316   }
317 
318   for (auto *F : Solver.getMRVFunctionsTracked()) {
319     assert(F->getReturnType()->isStructTy() &&
320            "The return type should be a struct");
321     StructType *STy = cast<StructType>(F->getReturnType());
322     if (Solver.isStructLatticeConstant(F, STy))
323       findReturnsToZap(*F, ReturnsToZap, Solver);
324   }
325 
326   // Zap all returns which we've identified as zap to change.
327   SmallSetVector<Function *, 8> FuncZappedReturn;
328   for (ReturnInst *RI : ReturnsToZap) {
329     Function *F = RI->getParent()->getParent();
330     RI->setOperand(0, UndefValue::get(F->getReturnType()));
331     // Record all functions that are zapped.
332     FuncZappedReturn.insert(F);
333   }
334 
335   // Remove the returned attribute for zapped functions and the
336   // corresponding call sites.
337   // Also remove any attributes that convert an undef return value into
338   // immediate undefined behavior
339   AttributeMask UBImplyingAttributes =
340       AttributeFuncs::getUBImplyingAttributes();
341   for (Function *F : FuncZappedReturn) {
342     for (Argument &A : F->args())
343       F->removeParamAttr(A.getArgNo(), Attribute::Returned);
344     F->removeRetAttrs(UBImplyingAttributes);
345     for (Use &U : F->uses()) {
346       CallBase *CB = dyn_cast<CallBase>(U.getUser());
347       if (!CB) {
348         assert(isa<BlockAddress>(U.getUser()) ||
349                (isa<Constant>(U.getUser()) &&
350                 all_of(U.getUser()->users(), [](const User *UserUser) {
351                   return cast<IntrinsicInst>(UserUser)->isAssumeLikeIntrinsic();
352                 })));
353         continue;
354       }
355 
356       for (Use &Arg : CB->args())
357         CB->removeParamAttr(CB->getArgOperandNo(&Arg), Attribute::Returned);
358       CB->removeRetAttrs(UBImplyingAttributes);
359     }
360   }
361 
362   // If we inferred constant or undef values for globals variables, we can
363   // delete the global and any stores that remain to it.
364   for (const auto &I : make_early_inc_range(Solver.getTrackedGlobals())) {
365     GlobalVariable *GV = I.first;
366     if (SCCPSolver::isOverdefined(I.second))
367       continue;
368     LLVM_DEBUG(dbgs() << "Found that GV '" << GV->getName()
369                       << "' is constant!\n");
370     while (!GV->use_empty()) {
371       StoreInst *SI = cast<StoreInst>(GV->user_back());
372       SI->eraseFromParent();
373     }
374     MadeChanges = true;
375     M.eraseGlobalVariable(GV);
376     ++NumGlobalConst;
377   }
378 
379   return MadeChanges;
380 }
381 
382 PreservedAnalyses IPSCCPPass::run(Module &M, ModuleAnalysisManager &AM) {
383   const DataLayout &DL = M.getDataLayout();
384   auto &FAM = AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
385   auto GetTLI = [&FAM](Function &F) -> const TargetLibraryInfo & {
386     return FAM.getResult<TargetLibraryAnalysis>(F);
387   };
388   auto GetTTI = [&FAM](Function &F) -> TargetTransformInfo & {
389     return FAM.getResult<TargetIRAnalysis>(F);
390   };
391   auto GetAC = [&FAM](Function &F) -> AssumptionCache & {
392     return FAM.getResult<AssumptionAnalysis>(F);
393   };
394   auto GetDT = [&FAM](Function &F) -> DominatorTree & {
395     return FAM.getResult<DominatorTreeAnalysis>(F);
396   };
397   auto GetBFI = [&FAM](Function &F) -> BlockFrequencyInfo & {
398     return FAM.getResult<BlockFrequencyAnalysis>(F);
399   };
400 
401 
402   if (!runIPSCCP(M, DL, &FAM, GetTLI, GetTTI, GetAC, GetDT, GetBFI,
403                  isFuncSpecEnabled()))
404     return PreservedAnalyses::all();
405 
406   PreservedAnalyses PA;
407   PA.preserve<DominatorTreeAnalysis>();
408   PA.preserve<PostDominatorTreeAnalysis>();
409   PA.preserve<FunctionAnalysisManagerModuleProxy>();
410   return PA;
411 }
412