xref: /freebsd/contrib/llvm-project/llvm/lib/Target/X86/X86PadShortFunction.cpp (revision c66ec88fed842fbaad62c30d510644ceb7bd2d71)
1 //===-------- X86PadShortFunction.cpp - pad short functions -----------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file defines the pass which will pad short functions to prevent
10 // a stall if a function returns before the return address is ready. This
11 // is needed for some Intel Atom processors.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 
16 #include "X86.h"
17 #include "X86InstrInfo.h"
18 #include "X86Subtarget.h"
19 #include "llvm/ADT/Statistic.h"
20 #include "llvm/Analysis/ProfileSummaryInfo.h"
21 #include "llvm/CodeGen/LazyMachineBlockFrequencyInfo.h"
22 #include "llvm/CodeGen/MachineFunctionPass.h"
23 #include "llvm/CodeGen/MachineInstrBuilder.h"
24 #include "llvm/CodeGen/MachineSizeOpts.h"
25 #include "llvm/CodeGen/Passes.h"
26 #include "llvm/CodeGen/TargetSchedule.h"
27 #include "llvm/IR/Function.h"
28 #include "llvm/Support/Debug.h"
29 #include "llvm/Support/raw_ostream.h"
30 
31 using namespace llvm;
32 
33 #define DEBUG_TYPE "x86-pad-short-functions"
34 
35 STATISTIC(NumBBsPadded, "Number of basic blocks padded");
36 
37 namespace {
38   struct VisitedBBInfo {
39     // HasReturn - Whether the BB contains a return instruction
40     bool HasReturn;
41 
42     // Cycles - Number of cycles until return if HasReturn is true, otherwise
43     // number of cycles until end of the BB
44     unsigned int Cycles;
45 
46     VisitedBBInfo() : HasReturn(false), Cycles(0) {}
47     VisitedBBInfo(bool HasReturn, unsigned int Cycles)
48       : HasReturn(HasReturn), Cycles(Cycles) {}
49   };
50 
51   struct PadShortFunc : public MachineFunctionPass {
52     static char ID;
53     PadShortFunc() : MachineFunctionPass(ID)
54                    , Threshold(4) {}
55 
56     bool runOnMachineFunction(MachineFunction &MF) override;
57 
58     void getAnalysisUsage(AnalysisUsage &AU) const override {
59       AU.addRequired<ProfileSummaryInfoWrapperPass>();
60       AU.addRequired<LazyMachineBlockFrequencyInfoPass>();
61       AU.addPreserved<LazyMachineBlockFrequencyInfoPass>();
62       MachineFunctionPass::getAnalysisUsage(AU);
63     }
64 
65     MachineFunctionProperties getRequiredProperties() const override {
66       return MachineFunctionProperties().set(
67           MachineFunctionProperties::Property::NoVRegs);
68     }
69 
70     StringRef getPassName() const override {
71       return "X86 Atom pad short functions";
72     }
73 
74   private:
75     void findReturns(MachineBasicBlock *MBB,
76                      unsigned int Cycles = 0);
77 
78     bool cyclesUntilReturn(MachineBasicBlock *MBB,
79                            unsigned int &Cycles);
80 
81     void addPadding(MachineBasicBlock *MBB,
82                     MachineBasicBlock::iterator &MBBI,
83                     unsigned int NOOPsToAdd);
84 
85     const unsigned int Threshold;
86 
87     // ReturnBBs - Maps basic blocks that return to the minimum number of
88     // cycles until the return, starting from the entry block.
89     DenseMap<MachineBasicBlock*, unsigned int> ReturnBBs;
90 
91     // VisitedBBs - Cache of previously visited BBs.
92     DenseMap<MachineBasicBlock*, VisitedBBInfo> VisitedBBs;
93 
94     TargetSchedModel TSM;
95   };
96 
97   char PadShortFunc::ID = 0;
98 }
99 
100 FunctionPass *llvm::createX86PadShortFunctions() {
101   return new PadShortFunc();
102 }
103 
104 /// runOnMachineFunction - Loop over all of the basic blocks, inserting
105 /// NOOP instructions before early exits.
106 bool PadShortFunc::runOnMachineFunction(MachineFunction &MF) {
107   if (skipFunction(MF.getFunction()))
108     return false;
109 
110   if (MF.getFunction().hasOptSize())
111     return false;
112 
113   if (!MF.getSubtarget<X86Subtarget>().padShortFunctions())
114     return false;
115 
116   TSM.init(&MF.getSubtarget());
117 
118   auto *PSI =
119       &getAnalysis<ProfileSummaryInfoWrapperPass>().getPSI();
120   auto *MBFI = (PSI && PSI->hasProfileSummary()) ?
121                &getAnalysis<LazyMachineBlockFrequencyInfoPass>().getBFI() :
122                nullptr;
123 
124   // Search through basic blocks and mark the ones that have early returns
125   ReturnBBs.clear();
126   VisitedBBs.clear();
127   findReturns(&MF.front());
128 
129   bool MadeChange = false;
130 
131   // Pad the identified basic blocks with NOOPs
132   for (DenseMap<MachineBasicBlock*, unsigned int>::iterator I = ReturnBBs.begin();
133        I != ReturnBBs.end(); ++I) {
134     MachineBasicBlock *MBB = I->first;
135     unsigned Cycles = I->second;
136 
137     // Function::hasOptSize is already checked above.
138     bool OptForSize = llvm::shouldOptimizeForSize(MBB, PSI, MBFI);
139     if (OptForSize)
140       continue;
141 
142     if (Cycles < Threshold) {
143       // BB ends in a return. Skip over any DBG_VALUE instructions
144       // trailing the terminator.
145       assert(MBB->size() > 0 &&
146              "Basic block should contain at least a RET but is empty");
147       MachineBasicBlock::iterator ReturnLoc = --MBB->end();
148 
149       while (ReturnLoc->isDebugInstr())
150         --ReturnLoc;
151       assert(ReturnLoc->isReturn() && !ReturnLoc->isCall() &&
152              "Basic block does not end with RET");
153 
154       addPadding(MBB, ReturnLoc, Threshold - Cycles);
155       NumBBsPadded++;
156       MadeChange = true;
157     }
158   }
159 
160   return MadeChange;
161 }
162 
163 /// findReturn - Starting at MBB, follow control flow and add all
164 /// basic blocks that contain a return to ReturnBBs.
165 void PadShortFunc::findReturns(MachineBasicBlock *MBB, unsigned int Cycles) {
166   // If this BB has a return, note how many cycles it takes to get there.
167   bool hasReturn = cyclesUntilReturn(MBB, Cycles);
168   if (Cycles >= Threshold)
169     return;
170 
171   if (hasReturn) {
172     ReturnBBs[MBB] = std::max(ReturnBBs[MBB], Cycles);
173     return;
174   }
175 
176   // Follow branches in BB and look for returns
177   for (MachineBasicBlock::succ_iterator I = MBB->succ_begin();
178        I != MBB->succ_end(); ++I) {
179     if (*I == MBB)
180       continue;
181     findReturns(*I, Cycles);
182   }
183 }
184 
185 /// cyclesUntilReturn - return true if the MBB has a return instruction,
186 /// and return false otherwise.
187 /// Cycles will be incremented by the number of cycles taken to reach the
188 /// return or the end of the BB, whichever occurs first.
189 bool PadShortFunc::cyclesUntilReturn(MachineBasicBlock *MBB,
190                                      unsigned int &Cycles) {
191   // Return cached result if BB was previously visited
192   DenseMap<MachineBasicBlock*, VisitedBBInfo>::iterator it
193     = VisitedBBs.find(MBB);
194   if (it != VisitedBBs.end()) {
195     VisitedBBInfo BBInfo = it->second;
196     Cycles += BBInfo.Cycles;
197     return BBInfo.HasReturn;
198   }
199 
200   unsigned int CyclesToEnd = 0;
201 
202   for (MachineInstr &MI : *MBB) {
203     // Mark basic blocks with a return instruction. Calls to other
204     // functions do not count because the called function will be padded,
205     // if necessary.
206     if (MI.isReturn() && !MI.isCall()) {
207       VisitedBBs[MBB] = VisitedBBInfo(true, CyclesToEnd);
208       Cycles += CyclesToEnd;
209       return true;
210     }
211 
212     CyclesToEnd += TSM.computeInstrLatency(&MI);
213   }
214 
215   VisitedBBs[MBB] = VisitedBBInfo(false, CyclesToEnd);
216   Cycles += CyclesToEnd;
217   return false;
218 }
219 
220 /// addPadding - Add the given number of NOOP instructions to the function
221 /// just prior to the return at MBBI
222 void PadShortFunc::addPadding(MachineBasicBlock *MBB,
223                               MachineBasicBlock::iterator &MBBI,
224                               unsigned int NOOPsToAdd) {
225   DebugLoc DL = MBBI->getDebugLoc();
226   unsigned IssueWidth = TSM.getIssueWidth();
227 
228   for (unsigned i = 0, e = IssueWidth * NOOPsToAdd; i != e; ++i)
229     BuildMI(*MBB, MBBI, DL, TSM.getInstrInfo()->get(X86::NOOP));
230 }
231