xref: /freebsd/contrib/llvm-project/llvm/lib/Target/X86/X86PadShortFunction.cpp (revision 271171e0d97b88ba2a7c3bf750c9672b484c1c13)
1 //===-------- X86PadShortFunction.cpp - pad short functions -----------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file defines the pass which will pad short functions to prevent
10 // a stall if a function returns before the return address is ready. This
11 // is needed for some Intel Atom processors.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 
16 #include "X86.h"
17 #include "X86InstrInfo.h"
18 #include "X86Subtarget.h"
19 #include "llvm/ADT/Statistic.h"
20 #include "llvm/Analysis/ProfileSummaryInfo.h"
21 #include "llvm/CodeGen/LazyMachineBlockFrequencyInfo.h"
22 #include "llvm/CodeGen/MachineFunctionPass.h"
23 #include "llvm/CodeGen/MachineInstrBuilder.h"
24 #include "llvm/CodeGen/MachineSizeOpts.h"
25 #include "llvm/CodeGen/Passes.h"
26 #include "llvm/CodeGen/TargetSchedule.h"
27 #include "llvm/IR/Function.h"
28 #include "llvm/Support/Debug.h"
29 #include "llvm/Support/raw_ostream.h"
30 
31 using namespace llvm;
32 
33 #define DEBUG_TYPE "x86-pad-short-functions"
34 
35 STATISTIC(NumBBsPadded, "Number of basic blocks padded");
36 
37 namespace {
38   struct VisitedBBInfo {
39     // HasReturn - Whether the BB contains a return instruction
40     bool HasReturn;
41 
42     // Cycles - Number of cycles until return if HasReturn is true, otherwise
43     // number of cycles until end of the BB
44     unsigned int Cycles;
45 
46     VisitedBBInfo() : HasReturn(false), Cycles(0) {}
47     VisitedBBInfo(bool HasReturn, unsigned int Cycles)
48       : HasReturn(HasReturn), Cycles(Cycles) {}
49   };
50 
51   struct PadShortFunc : public MachineFunctionPass {
52     static char ID;
53     PadShortFunc() : MachineFunctionPass(ID)
54                    , Threshold(4) {}
55 
56     bool runOnMachineFunction(MachineFunction &MF) override;
57 
58     void getAnalysisUsage(AnalysisUsage &AU) const override {
59       AU.addRequired<ProfileSummaryInfoWrapperPass>();
60       AU.addRequired<LazyMachineBlockFrequencyInfoPass>();
61       AU.addPreserved<LazyMachineBlockFrequencyInfoPass>();
62       MachineFunctionPass::getAnalysisUsage(AU);
63     }
64 
65     MachineFunctionProperties getRequiredProperties() const override {
66       return MachineFunctionProperties().set(
67           MachineFunctionProperties::Property::NoVRegs);
68     }
69 
70     StringRef getPassName() const override {
71       return "X86 Atom pad short functions";
72     }
73 
74   private:
75     void findReturns(MachineBasicBlock *MBB,
76                      unsigned int Cycles = 0);
77 
78     bool cyclesUntilReturn(MachineBasicBlock *MBB,
79                            unsigned int &Cycles);
80 
81     void addPadding(MachineBasicBlock *MBB,
82                     MachineBasicBlock::iterator &MBBI,
83                     unsigned int NOOPsToAdd);
84 
85     const unsigned int Threshold;
86 
87     // ReturnBBs - Maps basic blocks that return to the minimum number of
88     // cycles until the return, starting from the entry block.
89     DenseMap<MachineBasicBlock*, unsigned int> ReturnBBs;
90 
91     // VisitedBBs - Cache of previously visited BBs.
92     DenseMap<MachineBasicBlock*, VisitedBBInfo> VisitedBBs;
93 
94     TargetSchedModel TSM;
95   };
96 
97   char PadShortFunc::ID = 0;
98 }
99 
100 FunctionPass *llvm::createX86PadShortFunctions() {
101   return new PadShortFunc();
102 }
103 
104 /// runOnMachineFunction - Loop over all of the basic blocks, inserting
105 /// NOOP instructions before early exits.
106 bool PadShortFunc::runOnMachineFunction(MachineFunction &MF) {
107   if (skipFunction(MF.getFunction()))
108     return false;
109 
110   if (MF.getFunction().hasOptSize())
111     return false;
112 
113   if (!MF.getSubtarget<X86Subtarget>().padShortFunctions())
114     return false;
115 
116   TSM.init(&MF.getSubtarget());
117 
118   auto *PSI =
119       &getAnalysis<ProfileSummaryInfoWrapperPass>().getPSI();
120   auto *MBFI = (PSI && PSI->hasProfileSummary()) ?
121                &getAnalysis<LazyMachineBlockFrequencyInfoPass>().getBFI() :
122                nullptr;
123 
124   // Search through basic blocks and mark the ones that have early returns
125   ReturnBBs.clear();
126   VisitedBBs.clear();
127   findReturns(&MF.front());
128 
129   bool MadeChange = false;
130 
131   // Pad the identified basic blocks with NOOPs
132   for (const auto &ReturnBB : ReturnBBs) {
133     MachineBasicBlock *MBB = ReturnBB.first;
134     unsigned Cycles = ReturnBB.second;
135 
136     // Function::hasOptSize is already checked above.
137     bool OptForSize = llvm::shouldOptimizeForSize(MBB, PSI, MBFI);
138     if (OptForSize)
139       continue;
140 
141     if (Cycles < Threshold) {
142       // BB ends in a return. Skip over any DBG_VALUE instructions
143       // trailing the terminator.
144       assert(MBB->size() > 0 &&
145              "Basic block should contain at least a RET but is empty");
146       MachineBasicBlock::iterator ReturnLoc = --MBB->end();
147 
148       while (ReturnLoc->isDebugInstr())
149         --ReturnLoc;
150       assert(ReturnLoc->isReturn() && !ReturnLoc->isCall() &&
151              "Basic block does not end with RET");
152 
153       addPadding(MBB, ReturnLoc, Threshold - Cycles);
154       NumBBsPadded++;
155       MadeChange = true;
156     }
157   }
158 
159   return MadeChange;
160 }
161 
162 /// findReturn - Starting at MBB, follow control flow and add all
163 /// basic blocks that contain a return to ReturnBBs.
164 void PadShortFunc::findReturns(MachineBasicBlock *MBB, unsigned int Cycles) {
165   // If this BB has a return, note how many cycles it takes to get there.
166   bool hasReturn = cyclesUntilReturn(MBB, Cycles);
167   if (Cycles >= Threshold)
168     return;
169 
170   if (hasReturn) {
171     ReturnBBs[MBB] = std::max(ReturnBBs[MBB], Cycles);
172     return;
173   }
174 
175   // Follow branches in BB and look for returns
176   for (MachineBasicBlock *Succ : MBB->successors())
177     if (Succ != MBB)
178       findReturns(Succ, Cycles);
179 }
180 
181 /// cyclesUntilReturn - return true if the MBB has a return instruction,
182 /// and return false otherwise.
183 /// Cycles will be incremented by the number of cycles taken to reach the
184 /// return or the end of the BB, whichever occurs first.
185 bool PadShortFunc::cyclesUntilReturn(MachineBasicBlock *MBB,
186                                      unsigned int &Cycles) {
187   // Return cached result if BB was previously visited
188   DenseMap<MachineBasicBlock*, VisitedBBInfo>::iterator it
189     = VisitedBBs.find(MBB);
190   if (it != VisitedBBs.end()) {
191     VisitedBBInfo BBInfo = it->second;
192     Cycles += BBInfo.Cycles;
193     return BBInfo.HasReturn;
194   }
195 
196   unsigned int CyclesToEnd = 0;
197 
198   for (MachineInstr &MI : *MBB) {
199     // Mark basic blocks with a return instruction. Calls to other
200     // functions do not count because the called function will be padded,
201     // if necessary.
202     if (MI.isReturn() && !MI.isCall()) {
203       VisitedBBs[MBB] = VisitedBBInfo(true, CyclesToEnd);
204       Cycles += CyclesToEnd;
205       return true;
206     }
207 
208     CyclesToEnd += TSM.computeInstrLatency(&MI);
209   }
210 
211   VisitedBBs[MBB] = VisitedBBInfo(false, CyclesToEnd);
212   Cycles += CyclesToEnd;
213   return false;
214 }
215 
216 /// addPadding - Add the given number of NOOP instructions to the function
217 /// just prior to the return at MBBI
218 void PadShortFunc::addPadding(MachineBasicBlock *MBB,
219                               MachineBasicBlock::iterator &MBBI,
220                               unsigned int NOOPsToAdd) {
221   const DebugLoc &DL = MBBI->getDebugLoc();
222   unsigned IssueWidth = TSM.getIssueWidth();
223 
224   for (unsigned i = 0, e = IssueWidth * NOOPsToAdd; i != e; ++i)
225     BuildMI(*MBB, MBBI, DL, TSM.getInstrInfo()->get(X86::NOOP));
226 }
227