xref: /freebsd/contrib/llvm-project/llvm/lib/CodeGen/LocalStackSlotAllocation.cpp (revision 0b57cec536236d46e3dba9bd041533462f33dbb7)
1*0b57cec5SDimitry Andric //===- LocalStackSlotAllocation.cpp - Pre-allocate locals to stack slots --===//
2*0b57cec5SDimitry Andric //
3*0b57cec5SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4*0b57cec5SDimitry Andric // See https://llvm.org/LICENSE.txt for license information.
5*0b57cec5SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6*0b57cec5SDimitry Andric //
7*0b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
8*0b57cec5SDimitry Andric //
9*0b57cec5SDimitry Andric // This pass assigns local frame indices to stack slots relative to one another
10*0b57cec5SDimitry Andric // and allocates additional base registers to access them when the target
11*0b57cec5SDimitry Andric // estimates they are likely to be out of range of stack pointer and frame
12*0b57cec5SDimitry Andric // pointer relative addressing.
13*0b57cec5SDimitry Andric //
14*0b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
15*0b57cec5SDimitry Andric 
16*0b57cec5SDimitry Andric #include "llvm/ADT/SetVector.h"
17*0b57cec5SDimitry Andric #include "llvm/ADT/SmallSet.h"
18*0b57cec5SDimitry Andric #include "llvm/ADT/SmallVector.h"
19*0b57cec5SDimitry Andric #include "llvm/ADT/Statistic.h"
20*0b57cec5SDimitry Andric #include "llvm/CodeGen/MachineBasicBlock.h"
21*0b57cec5SDimitry Andric #include "llvm/CodeGen/MachineFrameInfo.h"
22*0b57cec5SDimitry Andric #include "llvm/CodeGen/MachineFunction.h"
23*0b57cec5SDimitry Andric #include "llvm/CodeGen/MachineFunctionPass.h"
24*0b57cec5SDimitry Andric #include "llvm/CodeGen/MachineInstr.h"
25*0b57cec5SDimitry Andric #include "llvm/CodeGen/MachineOperand.h"
26*0b57cec5SDimitry Andric #include "llvm/CodeGen/MachineRegisterInfo.h"
27*0b57cec5SDimitry Andric #include "llvm/CodeGen/TargetFrameLowering.h"
28*0b57cec5SDimitry Andric #include "llvm/CodeGen/TargetOpcodes.h"
29*0b57cec5SDimitry Andric #include "llvm/CodeGen/TargetRegisterInfo.h"
30*0b57cec5SDimitry Andric #include "llvm/CodeGen/TargetSubtargetInfo.h"
31*0b57cec5SDimitry Andric #include "llvm/Pass.h"
32*0b57cec5SDimitry Andric #include "llvm/Support/Debug.h"
33*0b57cec5SDimitry Andric #include "llvm/Support/ErrorHandling.h"
34*0b57cec5SDimitry Andric #include "llvm/Support/raw_ostream.h"
35*0b57cec5SDimitry Andric #include <algorithm>
36*0b57cec5SDimitry Andric #include <cassert>
37*0b57cec5SDimitry Andric #include <cstdint>
38*0b57cec5SDimitry Andric #include <tuple>
39*0b57cec5SDimitry Andric 
40*0b57cec5SDimitry Andric using namespace llvm;
41*0b57cec5SDimitry Andric 
42*0b57cec5SDimitry Andric #define DEBUG_TYPE "localstackalloc"
43*0b57cec5SDimitry Andric 
44*0b57cec5SDimitry Andric STATISTIC(NumAllocations, "Number of frame indices allocated into local block");
45*0b57cec5SDimitry Andric STATISTIC(NumBaseRegisters, "Number of virtual frame base registers allocated");
46*0b57cec5SDimitry Andric STATISTIC(NumReplacements, "Number of frame indices references replaced");
47*0b57cec5SDimitry Andric 
48*0b57cec5SDimitry Andric namespace {
49*0b57cec5SDimitry Andric 
50*0b57cec5SDimitry Andric   class FrameRef {
51*0b57cec5SDimitry Andric     MachineBasicBlock::iterator MI; // Instr referencing the frame
52*0b57cec5SDimitry Andric     int64_t LocalOffset;            // Local offset of the frame idx referenced
53*0b57cec5SDimitry Andric     int FrameIdx;                   // The frame index
54*0b57cec5SDimitry Andric 
55*0b57cec5SDimitry Andric     // Order reference instruction appears in program. Used to ensure
56*0b57cec5SDimitry Andric     // deterministic order when multiple instructions may reference the same
57*0b57cec5SDimitry Andric     // location.
58*0b57cec5SDimitry Andric     unsigned Order;
59*0b57cec5SDimitry Andric 
60*0b57cec5SDimitry Andric   public:
61*0b57cec5SDimitry Andric     FrameRef(MachineInstr *I, int64_t Offset, int Idx, unsigned Ord) :
62*0b57cec5SDimitry Andric       MI(I), LocalOffset(Offset), FrameIdx(Idx), Order(Ord) {}
63*0b57cec5SDimitry Andric 
64*0b57cec5SDimitry Andric     bool operator<(const FrameRef &RHS) const {
65*0b57cec5SDimitry Andric       return std::tie(LocalOffset, FrameIdx, Order) <
66*0b57cec5SDimitry Andric              std::tie(RHS.LocalOffset, RHS.FrameIdx, RHS.Order);
67*0b57cec5SDimitry Andric     }
68*0b57cec5SDimitry Andric 
69*0b57cec5SDimitry Andric     MachineBasicBlock::iterator getMachineInstr() const { return MI; }
70*0b57cec5SDimitry Andric     int64_t getLocalOffset() const { return LocalOffset; }
71*0b57cec5SDimitry Andric     int getFrameIndex() const { return FrameIdx; }
72*0b57cec5SDimitry Andric   };
73*0b57cec5SDimitry Andric 
74*0b57cec5SDimitry Andric   class LocalStackSlotPass: public MachineFunctionPass {
75*0b57cec5SDimitry Andric     SmallVector<int64_t, 16> LocalOffsets;
76*0b57cec5SDimitry Andric 
77*0b57cec5SDimitry Andric     /// StackObjSet - A set of stack object indexes
78*0b57cec5SDimitry Andric     using StackObjSet = SmallSetVector<int, 8>;
79*0b57cec5SDimitry Andric 
80*0b57cec5SDimitry Andric     void AdjustStackOffset(MachineFrameInfo &MFI, int FrameIdx, int64_t &Offset,
81*0b57cec5SDimitry Andric                            bool StackGrowsDown, unsigned &MaxAlign);
82*0b57cec5SDimitry Andric     void AssignProtectedObjSet(const StackObjSet &UnassignedObjs,
83*0b57cec5SDimitry Andric                                SmallSet<int, 16> &ProtectedObjs,
84*0b57cec5SDimitry Andric                                MachineFrameInfo &MFI, bool StackGrowsDown,
85*0b57cec5SDimitry Andric                                int64_t &Offset, unsigned &MaxAlign);
86*0b57cec5SDimitry Andric     void calculateFrameObjectOffsets(MachineFunction &Fn);
87*0b57cec5SDimitry Andric     bool insertFrameReferenceRegisters(MachineFunction &Fn);
88*0b57cec5SDimitry Andric 
89*0b57cec5SDimitry Andric   public:
90*0b57cec5SDimitry Andric     static char ID; // Pass identification, replacement for typeid
91*0b57cec5SDimitry Andric 
92*0b57cec5SDimitry Andric     explicit LocalStackSlotPass() : MachineFunctionPass(ID) {
93*0b57cec5SDimitry Andric       initializeLocalStackSlotPassPass(*PassRegistry::getPassRegistry());
94*0b57cec5SDimitry Andric     }
95*0b57cec5SDimitry Andric 
96*0b57cec5SDimitry Andric     bool runOnMachineFunction(MachineFunction &MF) override;
97*0b57cec5SDimitry Andric 
98*0b57cec5SDimitry Andric     void getAnalysisUsage(AnalysisUsage &AU) const override {
99*0b57cec5SDimitry Andric       AU.setPreservesCFG();
100*0b57cec5SDimitry Andric       MachineFunctionPass::getAnalysisUsage(AU);
101*0b57cec5SDimitry Andric     }
102*0b57cec5SDimitry Andric   };
103*0b57cec5SDimitry Andric 
104*0b57cec5SDimitry Andric } // end anonymous namespace
105*0b57cec5SDimitry Andric 
106*0b57cec5SDimitry Andric char LocalStackSlotPass::ID = 0;
107*0b57cec5SDimitry Andric 
108*0b57cec5SDimitry Andric char &llvm::LocalStackSlotAllocationID = LocalStackSlotPass::ID;
109*0b57cec5SDimitry Andric INITIALIZE_PASS(LocalStackSlotPass, DEBUG_TYPE,
110*0b57cec5SDimitry Andric                 "Local Stack Slot Allocation", false, false)
111*0b57cec5SDimitry Andric 
112*0b57cec5SDimitry Andric bool LocalStackSlotPass::runOnMachineFunction(MachineFunction &MF) {
113*0b57cec5SDimitry Andric   MachineFrameInfo &MFI = MF.getFrameInfo();
114*0b57cec5SDimitry Andric   const TargetRegisterInfo *TRI = MF.getSubtarget().getRegisterInfo();
115*0b57cec5SDimitry Andric   unsigned LocalObjectCount = MFI.getObjectIndexEnd();
116*0b57cec5SDimitry Andric 
117*0b57cec5SDimitry Andric   // If the target doesn't want/need this pass, or if there are no locals
118*0b57cec5SDimitry Andric   // to consider, early exit.
119*0b57cec5SDimitry Andric   if (!TRI->requiresVirtualBaseRegisters(MF) || LocalObjectCount == 0)
120*0b57cec5SDimitry Andric     return true;
121*0b57cec5SDimitry Andric 
122*0b57cec5SDimitry Andric   // Make sure we have enough space to store the local offsets.
123*0b57cec5SDimitry Andric   LocalOffsets.resize(MFI.getObjectIndexEnd());
124*0b57cec5SDimitry Andric 
125*0b57cec5SDimitry Andric   // Lay out the local blob.
126*0b57cec5SDimitry Andric   calculateFrameObjectOffsets(MF);
127*0b57cec5SDimitry Andric 
128*0b57cec5SDimitry Andric   // Insert virtual base registers to resolve frame index references.
129*0b57cec5SDimitry Andric   bool UsedBaseRegs = insertFrameReferenceRegisters(MF);
130*0b57cec5SDimitry Andric 
131*0b57cec5SDimitry Andric   // Tell MFI whether any base registers were allocated. PEI will only
132*0b57cec5SDimitry Andric   // want to use the local block allocations from this pass if there were any.
133*0b57cec5SDimitry Andric   // Otherwise, PEI can do a bit better job of getting the alignment right
134*0b57cec5SDimitry Andric   // without a hole at the start since it knows the alignment of the stack
135*0b57cec5SDimitry Andric   // at the start of local allocation, and this pass doesn't.
136*0b57cec5SDimitry Andric   MFI.setUseLocalStackAllocationBlock(UsedBaseRegs);
137*0b57cec5SDimitry Andric 
138*0b57cec5SDimitry Andric   return true;
139*0b57cec5SDimitry Andric }
140*0b57cec5SDimitry Andric 
141*0b57cec5SDimitry Andric /// AdjustStackOffset - Helper function used to adjust the stack frame offset.
142*0b57cec5SDimitry Andric void LocalStackSlotPass::AdjustStackOffset(MachineFrameInfo &MFI,
143*0b57cec5SDimitry Andric                                            int FrameIdx, int64_t &Offset,
144*0b57cec5SDimitry Andric                                            bool StackGrowsDown,
145*0b57cec5SDimitry Andric                                            unsigned &MaxAlign) {
146*0b57cec5SDimitry Andric   // If the stack grows down, add the object size to find the lowest address.
147*0b57cec5SDimitry Andric   if (StackGrowsDown)
148*0b57cec5SDimitry Andric     Offset += MFI.getObjectSize(FrameIdx);
149*0b57cec5SDimitry Andric 
150*0b57cec5SDimitry Andric   unsigned Align = MFI.getObjectAlignment(FrameIdx);
151*0b57cec5SDimitry Andric 
152*0b57cec5SDimitry Andric   // If the alignment of this object is greater than that of the stack, then
153*0b57cec5SDimitry Andric   // increase the stack alignment to match.
154*0b57cec5SDimitry Andric   MaxAlign = std::max(MaxAlign, Align);
155*0b57cec5SDimitry Andric 
156*0b57cec5SDimitry Andric   // Adjust to alignment boundary.
157*0b57cec5SDimitry Andric   Offset = (Offset + Align - 1) / Align * Align;
158*0b57cec5SDimitry Andric 
159*0b57cec5SDimitry Andric   int64_t LocalOffset = StackGrowsDown ? -Offset : Offset;
160*0b57cec5SDimitry Andric   LLVM_DEBUG(dbgs() << "Allocate FI(" << FrameIdx << ") to local offset "
161*0b57cec5SDimitry Andric                     << LocalOffset << "\n");
162*0b57cec5SDimitry Andric   // Keep the offset available for base register allocation
163*0b57cec5SDimitry Andric   LocalOffsets[FrameIdx] = LocalOffset;
164*0b57cec5SDimitry Andric   // And tell MFI about it for PEI to use later
165*0b57cec5SDimitry Andric   MFI.mapLocalFrameObject(FrameIdx, LocalOffset);
166*0b57cec5SDimitry Andric 
167*0b57cec5SDimitry Andric   if (!StackGrowsDown)
168*0b57cec5SDimitry Andric     Offset += MFI.getObjectSize(FrameIdx);
169*0b57cec5SDimitry Andric 
170*0b57cec5SDimitry Andric   ++NumAllocations;
171*0b57cec5SDimitry Andric }
172*0b57cec5SDimitry Andric 
173*0b57cec5SDimitry Andric /// AssignProtectedObjSet - Helper function to assign large stack objects (i.e.,
174*0b57cec5SDimitry Andric /// those required to be close to the Stack Protector) to stack offsets.
175*0b57cec5SDimitry Andric void LocalStackSlotPass::AssignProtectedObjSet(const StackObjSet &UnassignedObjs,
176*0b57cec5SDimitry Andric                                            SmallSet<int, 16> &ProtectedObjs,
177*0b57cec5SDimitry Andric                                            MachineFrameInfo &MFI,
178*0b57cec5SDimitry Andric                                            bool StackGrowsDown, int64_t &Offset,
179*0b57cec5SDimitry Andric                                            unsigned &MaxAlign) {
180*0b57cec5SDimitry Andric   for (StackObjSet::const_iterator I = UnassignedObjs.begin(),
181*0b57cec5SDimitry Andric         E = UnassignedObjs.end(); I != E; ++I) {
182*0b57cec5SDimitry Andric     int i = *I;
183*0b57cec5SDimitry Andric     AdjustStackOffset(MFI, i, Offset, StackGrowsDown, MaxAlign);
184*0b57cec5SDimitry Andric     ProtectedObjs.insert(i);
185*0b57cec5SDimitry Andric   }
186*0b57cec5SDimitry Andric }
187*0b57cec5SDimitry Andric 
188*0b57cec5SDimitry Andric /// calculateFrameObjectOffsets - Calculate actual frame offsets for all of the
189*0b57cec5SDimitry Andric /// abstract stack objects.
190*0b57cec5SDimitry Andric void LocalStackSlotPass::calculateFrameObjectOffsets(MachineFunction &Fn) {
191*0b57cec5SDimitry Andric   // Loop over all of the stack objects, assigning sequential addresses...
192*0b57cec5SDimitry Andric   MachineFrameInfo &MFI = Fn.getFrameInfo();
193*0b57cec5SDimitry Andric   const TargetFrameLowering &TFI = *Fn.getSubtarget().getFrameLowering();
194*0b57cec5SDimitry Andric   bool StackGrowsDown =
195*0b57cec5SDimitry Andric     TFI.getStackGrowthDirection() == TargetFrameLowering::StackGrowsDown;
196*0b57cec5SDimitry Andric   int64_t Offset = 0;
197*0b57cec5SDimitry Andric   unsigned MaxAlign = 0;
198*0b57cec5SDimitry Andric 
199*0b57cec5SDimitry Andric   // Make sure that the stack protector comes before the local variables on the
200*0b57cec5SDimitry Andric   // stack.
201*0b57cec5SDimitry Andric   SmallSet<int, 16> ProtectedObjs;
202*0b57cec5SDimitry Andric   if (MFI.hasStackProtectorIndex()) {
203*0b57cec5SDimitry Andric     int StackProtectorFI = MFI.getStackProtectorIndex();
204*0b57cec5SDimitry Andric 
205*0b57cec5SDimitry Andric     // We need to make sure we didn't pre-allocate the stack protector when
206*0b57cec5SDimitry Andric     // doing this.
207*0b57cec5SDimitry Andric     // If we already have a stack protector, this will re-assign it to a slot
208*0b57cec5SDimitry Andric     // that is **not** covering the protected objects.
209*0b57cec5SDimitry Andric     assert(!MFI.isObjectPreAllocated(StackProtectorFI) &&
210*0b57cec5SDimitry Andric            "Stack protector pre-allocated in LocalStackSlotAllocation");
211*0b57cec5SDimitry Andric 
212*0b57cec5SDimitry Andric     StackObjSet LargeArrayObjs;
213*0b57cec5SDimitry Andric     StackObjSet SmallArrayObjs;
214*0b57cec5SDimitry Andric     StackObjSet AddrOfObjs;
215*0b57cec5SDimitry Andric 
216*0b57cec5SDimitry Andric     AdjustStackOffset(MFI, StackProtectorFI, Offset, StackGrowsDown, MaxAlign);
217*0b57cec5SDimitry Andric 
218*0b57cec5SDimitry Andric     // Assign large stack objects first.
219*0b57cec5SDimitry Andric     for (unsigned i = 0, e = MFI.getObjectIndexEnd(); i != e; ++i) {
220*0b57cec5SDimitry Andric       if (MFI.isDeadObjectIndex(i))
221*0b57cec5SDimitry Andric         continue;
222*0b57cec5SDimitry Andric       if (StackProtectorFI == (int)i)
223*0b57cec5SDimitry Andric         continue;
224*0b57cec5SDimitry Andric 
225*0b57cec5SDimitry Andric       switch (MFI.getObjectSSPLayout(i)) {
226*0b57cec5SDimitry Andric       case MachineFrameInfo::SSPLK_None:
227*0b57cec5SDimitry Andric         continue;
228*0b57cec5SDimitry Andric       case MachineFrameInfo::SSPLK_SmallArray:
229*0b57cec5SDimitry Andric         SmallArrayObjs.insert(i);
230*0b57cec5SDimitry Andric         continue;
231*0b57cec5SDimitry Andric       case MachineFrameInfo::SSPLK_AddrOf:
232*0b57cec5SDimitry Andric         AddrOfObjs.insert(i);
233*0b57cec5SDimitry Andric         continue;
234*0b57cec5SDimitry Andric       case MachineFrameInfo::SSPLK_LargeArray:
235*0b57cec5SDimitry Andric         LargeArrayObjs.insert(i);
236*0b57cec5SDimitry Andric         continue;
237*0b57cec5SDimitry Andric       }
238*0b57cec5SDimitry Andric       llvm_unreachable("Unexpected SSPLayoutKind.");
239*0b57cec5SDimitry Andric     }
240*0b57cec5SDimitry Andric 
241*0b57cec5SDimitry Andric     AssignProtectedObjSet(LargeArrayObjs, ProtectedObjs, MFI, StackGrowsDown,
242*0b57cec5SDimitry Andric                           Offset, MaxAlign);
243*0b57cec5SDimitry Andric     AssignProtectedObjSet(SmallArrayObjs, ProtectedObjs, MFI, StackGrowsDown,
244*0b57cec5SDimitry Andric                           Offset, MaxAlign);
245*0b57cec5SDimitry Andric     AssignProtectedObjSet(AddrOfObjs, ProtectedObjs, MFI, StackGrowsDown,
246*0b57cec5SDimitry Andric                           Offset, MaxAlign);
247*0b57cec5SDimitry Andric   }
248*0b57cec5SDimitry Andric 
249*0b57cec5SDimitry Andric   // Then assign frame offsets to stack objects that are not used to spill
250*0b57cec5SDimitry Andric   // callee saved registers.
251*0b57cec5SDimitry Andric   for (unsigned i = 0, e = MFI.getObjectIndexEnd(); i != e; ++i) {
252*0b57cec5SDimitry Andric     if (MFI.isDeadObjectIndex(i))
253*0b57cec5SDimitry Andric       continue;
254*0b57cec5SDimitry Andric     if (MFI.getStackProtectorIndex() == (int)i)
255*0b57cec5SDimitry Andric       continue;
256*0b57cec5SDimitry Andric     if (ProtectedObjs.count(i))
257*0b57cec5SDimitry Andric       continue;
258*0b57cec5SDimitry Andric 
259*0b57cec5SDimitry Andric     AdjustStackOffset(MFI, i, Offset, StackGrowsDown, MaxAlign);
260*0b57cec5SDimitry Andric   }
261*0b57cec5SDimitry Andric 
262*0b57cec5SDimitry Andric   // Remember how big this blob of stack space is
263*0b57cec5SDimitry Andric   MFI.setLocalFrameSize(Offset);
264*0b57cec5SDimitry Andric   MFI.setLocalFrameMaxAlign(MaxAlign);
265*0b57cec5SDimitry Andric }
266*0b57cec5SDimitry Andric 
267*0b57cec5SDimitry Andric static inline bool
268*0b57cec5SDimitry Andric lookupCandidateBaseReg(unsigned BaseReg,
269*0b57cec5SDimitry Andric                        int64_t BaseOffset,
270*0b57cec5SDimitry Andric                        int64_t FrameSizeAdjust,
271*0b57cec5SDimitry Andric                        int64_t LocalFrameOffset,
272*0b57cec5SDimitry Andric                        const MachineInstr &MI,
273*0b57cec5SDimitry Andric                        const TargetRegisterInfo *TRI) {
274*0b57cec5SDimitry Andric   // Check if the relative offset from the where the base register references
275*0b57cec5SDimitry Andric   // to the target address is in range for the instruction.
276*0b57cec5SDimitry Andric   int64_t Offset = FrameSizeAdjust + LocalFrameOffset - BaseOffset;
277*0b57cec5SDimitry Andric   return TRI->isFrameOffsetLegal(&MI, BaseReg, Offset);
278*0b57cec5SDimitry Andric }
279*0b57cec5SDimitry Andric 
280*0b57cec5SDimitry Andric bool LocalStackSlotPass::insertFrameReferenceRegisters(MachineFunction &Fn) {
281*0b57cec5SDimitry Andric   // Scan the function's instructions looking for frame index references.
282*0b57cec5SDimitry Andric   // For each, ask the target if it wants a virtual base register for it
283*0b57cec5SDimitry Andric   // based on what we can tell it about where the local will end up in the
284*0b57cec5SDimitry Andric   // stack frame. If it wants one, re-use a suitable one we've previously
285*0b57cec5SDimitry Andric   // allocated, or if there isn't one that fits the bill, allocate a new one
286*0b57cec5SDimitry Andric   // and ask the target to create a defining instruction for it.
287*0b57cec5SDimitry Andric   bool UsedBaseReg = false;
288*0b57cec5SDimitry Andric 
289*0b57cec5SDimitry Andric   MachineFrameInfo &MFI = Fn.getFrameInfo();
290*0b57cec5SDimitry Andric   const TargetRegisterInfo *TRI = Fn.getSubtarget().getRegisterInfo();
291*0b57cec5SDimitry Andric   const TargetFrameLowering &TFI = *Fn.getSubtarget().getFrameLowering();
292*0b57cec5SDimitry Andric   bool StackGrowsDown =
293*0b57cec5SDimitry Andric     TFI.getStackGrowthDirection() == TargetFrameLowering::StackGrowsDown;
294*0b57cec5SDimitry Andric 
295*0b57cec5SDimitry Andric   // Collect all of the instructions in the block that reference
296*0b57cec5SDimitry Andric   // a frame index. Also store the frame index referenced to ease later
297*0b57cec5SDimitry Andric   // lookup. (For any insn that has more than one FI reference, we arbitrarily
298*0b57cec5SDimitry Andric   // choose the first one).
299*0b57cec5SDimitry Andric   SmallVector<FrameRef, 64> FrameReferenceInsns;
300*0b57cec5SDimitry Andric 
301*0b57cec5SDimitry Andric   unsigned Order = 0;
302*0b57cec5SDimitry Andric 
303*0b57cec5SDimitry Andric   for (MachineBasicBlock &BB : Fn) {
304*0b57cec5SDimitry Andric     for (MachineInstr &MI : BB) {
305*0b57cec5SDimitry Andric       // Debug value, stackmap and patchpoint instructions can't be out of
306*0b57cec5SDimitry Andric       // range, so they don't need any updates.
307*0b57cec5SDimitry Andric       if (MI.isDebugInstr() || MI.getOpcode() == TargetOpcode::STATEPOINT ||
308*0b57cec5SDimitry Andric           MI.getOpcode() == TargetOpcode::STACKMAP ||
309*0b57cec5SDimitry Andric           MI.getOpcode() == TargetOpcode::PATCHPOINT)
310*0b57cec5SDimitry Andric         continue;
311*0b57cec5SDimitry Andric 
312*0b57cec5SDimitry Andric       // For now, allocate the base register(s) within the basic block
313*0b57cec5SDimitry Andric       // where they're used, and don't try to keep them around outside
314*0b57cec5SDimitry Andric       // of that. It may be beneficial to try sharing them more broadly
315*0b57cec5SDimitry Andric       // than that, but the increased register pressure makes that a
316*0b57cec5SDimitry Andric       // tricky thing to balance. Investigate if re-materializing these
317*0b57cec5SDimitry Andric       // becomes an issue.
318*0b57cec5SDimitry Andric       for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) {
319*0b57cec5SDimitry Andric         // Consider replacing all frame index operands that reference
320*0b57cec5SDimitry Andric         // an object allocated in the local block.
321*0b57cec5SDimitry Andric         if (MI.getOperand(i).isFI()) {
322*0b57cec5SDimitry Andric           // Don't try this with values not in the local block.
323*0b57cec5SDimitry Andric           if (!MFI.isObjectPreAllocated(MI.getOperand(i).getIndex()))
324*0b57cec5SDimitry Andric             break;
325*0b57cec5SDimitry Andric           int Idx = MI.getOperand(i).getIndex();
326*0b57cec5SDimitry Andric           int64_t LocalOffset = LocalOffsets[Idx];
327*0b57cec5SDimitry Andric           if (!TRI->needsFrameBaseReg(&MI, LocalOffset))
328*0b57cec5SDimitry Andric             break;
329*0b57cec5SDimitry Andric           FrameReferenceInsns.push_back(FrameRef(&MI, LocalOffset, Idx, Order++));
330*0b57cec5SDimitry Andric           break;
331*0b57cec5SDimitry Andric         }
332*0b57cec5SDimitry Andric       }
333*0b57cec5SDimitry Andric     }
334*0b57cec5SDimitry Andric   }
335*0b57cec5SDimitry Andric 
336*0b57cec5SDimitry Andric   // Sort the frame references by local offset.
337*0b57cec5SDimitry Andric   // Use frame index as a tie-breaker in case MI's have the same offset.
338*0b57cec5SDimitry Andric   llvm::sort(FrameReferenceInsns);
339*0b57cec5SDimitry Andric 
340*0b57cec5SDimitry Andric   MachineBasicBlock *Entry = &Fn.front();
341*0b57cec5SDimitry Andric 
342*0b57cec5SDimitry Andric   unsigned BaseReg = 0;
343*0b57cec5SDimitry Andric   int64_t BaseOffset = 0;
344*0b57cec5SDimitry Andric 
345*0b57cec5SDimitry Andric   // Loop through the frame references and allocate for them as necessary.
346*0b57cec5SDimitry Andric   for (int ref = 0, e = FrameReferenceInsns.size(); ref < e ; ++ref) {
347*0b57cec5SDimitry Andric     FrameRef &FR = FrameReferenceInsns[ref];
348*0b57cec5SDimitry Andric     MachineInstr &MI = *FR.getMachineInstr();
349*0b57cec5SDimitry Andric     int64_t LocalOffset = FR.getLocalOffset();
350*0b57cec5SDimitry Andric     int FrameIdx = FR.getFrameIndex();
351*0b57cec5SDimitry Andric     assert(MFI.isObjectPreAllocated(FrameIdx) &&
352*0b57cec5SDimitry Andric            "Only pre-allocated locals expected!");
353*0b57cec5SDimitry Andric 
354*0b57cec5SDimitry Andric     // We need to keep the references to the stack protector slot through frame
355*0b57cec5SDimitry Andric     // index operands so that it gets resolved by PEI rather than this pass.
356*0b57cec5SDimitry Andric     // This avoids accesses to the stack protector though virtual base
357*0b57cec5SDimitry Andric     // registers, and forces PEI to address it using fp/sp/bp.
358*0b57cec5SDimitry Andric     if (MFI.hasStackProtectorIndex() &&
359*0b57cec5SDimitry Andric         FrameIdx == MFI.getStackProtectorIndex())
360*0b57cec5SDimitry Andric       continue;
361*0b57cec5SDimitry Andric 
362*0b57cec5SDimitry Andric     LLVM_DEBUG(dbgs() << "Considering: " << MI);
363*0b57cec5SDimitry Andric 
364*0b57cec5SDimitry Andric     unsigned idx = 0;
365*0b57cec5SDimitry Andric     for (unsigned f = MI.getNumOperands(); idx != f; ++idx) {
366*0b57cec5SDimitry Andric       if (!MI.getOperand(idx).isFI())
367*0b57cec5SDimitry Andric         continue;
368*0b57cec5SDimitry Andric 
369*0b57cec5SDimitry Andric       if (FrameIdx == MI.getOperand(idx).getIndex())
370*0b57cec5SDimitry Andric         break;
371*0b57cec5SDimitry Andric     }
372*0b57cec5SDimitry Andric 
373*0b57cec5SDimitry Andric     assert(idx < MI.getNumOperands() && "Cannot find FI operand");
374*0b57cec5SDimitry Andric 
375*0b57cec5SDimitry Andric     int64_t Offset = 0;
376*0b57cec5SDimitry Andric     int64_t FrameSizeAdjust = StackGrowsDown ? MFI.getLocalFrameSize() : 0;
377*0b57cec5SDimitry Andric 
378*0b57cec5SDimitry Andric     LLVM_DEBUG(dbgs() << "  Replacing FI in: " << MI);
379*0b57cec5SDimitry Andric 
380*0b57cec5SDimitry Andric     // If we have a suitable base register available, use it; otherwise
381*0b57cec5SDimitry Andric     // create a new one. Note that any offset encoded in the
382*0b57cec5SDimitry Andric     // instruction itself will be taken into account by the target,
383*0b57cec5SDimitry Andric     // so we don't have to adjust for it here when reusing a base
384*0b57cec5SDimitry Andric     // register.
385*0b57cec5SDimitry Andric     if (UsedBaseReg &&
386*0b57cec5SDimitry Andric         lookupCandidateBaseReg(BaseReg, BaseOffset, FrameSizeAdjust,
387*0b57cec5SDimitry Andric                                LocalOffset, MI, TRI)) {
388*0b57cec5SDimitry Andric       LLVM_DEBUG(dbgs() << "  Reusing base register " << BaseReg << "\n");
389*0b57cec5SDimitry Andric       // We found a register to reuse.
390*0b57cec5SDimitry Andric       Offset = FrameSizeAdjust + LocalOffset - BaseOffset;
391*0b57cec5SDimitry Andric     } else {
392*0b57cec5SDimitry Andric       // No previously defined register was in range, so create a new one.
393*0b57cec5SDimitry Andric       int64_t InstrOffset = TRI->getFrameIndexInstrOffset(&MI, idx);
394*0b57cec5SDimitry Andric 
395*0b57cec5SDimitry Andric       int64_t PrevBaseOffset = BaseOffset;
396*0b57cec5SDimitry Andric       BaseOffset = FrameSizeAdjust + LocalOffset + InstrOffset;
397*0b57cec5SDimitry Andric 
398*0b57cec5SDimitry Andric       // We'd like to avoid creating single-use virtual base registers.
399*0b57cec5SDimitry Andric       // Because the FrameRefs are in sorted order, and we've already
400*0b57cec5SDimitry Andric       // processed all FrameRefs before this one, just check whether or not
401*0b57cec5SDimitry Andric       // the next FrameRef will be able to reuse this new register. If not,
402*0b57cec5SDimitry Andric       // then don't bother creating it.
403*0b57cec5SDimitry Andric       if (ref + 1 >= e ||
404*0b57cec5SDimitry Andric           !lookupCandidateBaseReg(
405*0b57cec5SDimitry Andric               BaseReg, BaseOffset, FrameSizeAdjust,
406*0b57cec5SDimitry Andric               FrameReferenceInsns[ref + 1].getLocalOffset(),
407*0b57cec5SDimitry Andric               *FrameReferenceInsns[ref + 1].getMachineInstr(), TRI)) {
408*0b57cec5SDimitry Andric         BaseOffset = PrevBaseOffset;
409*0b57cec5SDimitry Andric         continue;
410*0b57cec5SDimitry Andric       }
411*0b57cec5SDimitry Andric 
412*0b57cec5SDimitry Andric       const MachineFunction *MF = MI.getMF();
413*0b57cec5SDimitry Andric       const TargetRegisterClass *RC = TRI->getPointerRegClass(*MF);
414*0b57cec5SDimitry Andric       BaseReg = Fn.getRegInfo().createVirtualRegister(RC);
415*0b57cec5SDimitry Andric 
416*0b57cec5SDimitry Andric       LLVM_DEBUG(dbgs() << "  Materializing base register " << BaseReg
417*0b57cec5SDimitry Andric                         << " at frame local offset "
418*0b57cec5SDimitry Andric                         << LocalOffset + InstrOffset << "\n");
419*0b57cec5SDimitry Andric 
420*0b57cec5SDimitry Andric       // Tell the target to insert the instruction to initialize
421*0b57cec5SDimitry Andric       // the base register.
422*0b57cec5SDimitry Andric       //            MachineBasicBlock::iterator InsertionPt = Entry->begin();
423*0b57cec5SDimitry Andric       TRI->materializeFrameBaseRegister(Entry, BaseReg, FrameIdx,
424*0b57cec5SDimitry Andric                                         InstrOffset);
425*0b57cec5SDimitry Andric 
426*0b57cec5SDimitry Andric       // The base register already includes any offset specified
427*0b57cec5SDimitry Andric       // by the instruction, so account for that so it doesn't get
428*0b57cec5SDimitry Andric       // applied twice.
429*0b57cec5SDimitry Andric       Offset = -InstrOffset;
430*0b57cec5SDimitry Andric 
431*0b57cec5SDimitry Andric       ++NumBaseRegisters;
432*0b57cec5SDimitry Andric       UsedBaseReg = true;
433*0b57cec5SDimitry Andric     }
434*0b57cec5SDimitry Andric     assert(BaseReg != 0 && "Unable to allocate virtual base register!");
435*0b57cec5SDimitry Andric 
436*0b57cec5SDimitry Andric     // Modify the instruction to use the new base register rather
437*0b57cec5SDimitry Andric     // than the frame index operand.
438*0b57cec5SDimitry Andric     TRI->resolveFrameIndex(MI, BaseReg, Offset);
439*0b57cec5SDimitry Andric     LLVM_DEBUG(dbgs() << "Resolved: " << MI);
440*0b57cec5SDimitry Andric 
441*0b57cec5SDimitry Andric     ++NumReplacements;
442*0b57cec5SDimitry Andric   }
443*0b57cec5SDimitry Andric 
444*0b57cec5SDimitry Andric   return UsedBaseReg;
445*0b57cec5SDimitry Andric }
446