xref: /freebsd/contrib/llvm-project/llvm/lib/Target/X86/X86VZeroUpper.cpp (revision c66ec88fed842fbaad62c30d510644ceb7bd2d71)
1 //===- X86VZeroUpper.cpp - AVX vzeroupper instruction inserter ------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file defines the pass which inserts x86 AVX vzeroupper instructions
10 // before calls to SSE encoded functions. This avoids transition latency
11 // penalty when transferring control between AVX encoded instructions and old
12 // SSE encoding mode.
13 //
14 //===----------------------------------------------------------------------===//
15 
16 #include "X86.h"
17 #include "X86InstrInfo.h"
18 #include "X86Subtarget.h"
19 #include "llvm/ADT/SmallVector.h"
20 #include "llvm/ADT/Statistic.h"
21 #include "llvm/CodeGen/MachineBasicBlock.h"
22 #include "llvm/CodeGen/MachineFunction.h"
23 #include "llvm/CodeGen/MachineFunctionPass.h"
24 #include "llvm/CodeGen/MachineInstr.h"
25 #include "llvm/CodeGen/MachineInstrBuilder.h"
26 #include "llvm/CodeGen/MachineOperand.h"
27 #include "llvm/CodeGen/MachineRegisterInfo.h"
28 #include "llvm/CodeGen/TargetInstrInfo.h"
29 #include "llvm/CodeGen/TargetRegisterInfo.h"
30 #include "llvm/IR/CallingConv.h"
31 #include "llvm/IR/DebugLoc.h"
32 #include "llvm/IR/Function.h"
33 #include "llvm/Support/Debug.h"
34 #include "llvm/Support/ErrorHandling.h"
35 #include "llvm/Support/raw_ostream.h"
36 #include <cassert>
37 
38 using namespace llvm;
39 
40 #define DEBUG_TYPE "x86-vzeroupper"
41 
42 static cl::opt<bool>
43 UseVZeroUpper("x86-use-vzeroupper", cl::Hidden,
44   cl::desc("Minimize AVX to SSE transition penalty"),
45   cl::init(true));
46 
47 STATISTIC(NumVZU, "Number of vzeroupper instructions inserted");
48 
49 namespace {
50 
51   class VZeroUpperInserter : public MachineFunctionPass {
52   public:
53     VZeroUpperInserter() : MachineFunctionPass(ID) {}
54 
55     bool runOnMachineFunction(MachineFunction &MF) override;
56 
57     MachineFunctionProperties getRequiredProperties() const override {
58       return MachineFunctionProperties().set(
59           MachineFunctionProperties::Property::NoVRegs);
60     }
61 
62     StringRef getPassName() const override { return "X86 vzeroupper inserter"; }
63 
64   private:
65     void processBasicBlock(MachineBasicBlock &MBB);
66     void insertVZeroUpper(MachineBasicBlock::iterator I,
67                           MachineBasicBlock &MBB);
68     void addDirtySuccessor(MachineBasicBlock &MBB);
69 
70     using BlockExitState = enum { PASS_THROUGH, EXITS_CLEAN, EXITS_DIRTY };
71 
72     static const char* getBlockExitStateName(BlockExitState ST);
73 
74     // Core algorithm state:
75     // BlockState - Each block is either:
76     //   - PASS_THROUGH: There are neither YMM/ZMM dirtying instructions nor
77     //                   vzeroupper instructions in this block.
78     //   - EXITS_CLEAN: There is (or will be) a vzeroupper instruction in this
79     //                  block that will ensure that YMM/ZMM is clean on exit.
80     //   - EXITS_DIRTY: An instruction in the block dirties YMM/ZMM and no
81     //                  subsequent vzeroupper in the block clears it.
82     //
83     // AddedToDirtySuccessors - This flag is raised when a block is added to the
84     //                          DirtySuccessors list to ensure that it's not
85     //                          added multiple times.
86     //
87     // FirstUnguardedCall - Records the location of the first unguarded call in
88     //                      each basic block that may need to be guarded by a
89     //                      vzeroupper. We won't know whether it actually needs
90     //                      to be guarded until we discover a predecessor that
91     //                      is DIRTY_OUT.
92     struct BlockState {
93       BlockExitState ExitState = PASS_THROUGH;
94       bool AddedToDirtySuccessors = false;
95       MachineBasicBlock::iterator FirstUnguardedCall;
96 
97       BlockState() = default;
98     };
99 
100     using BlockStateMap = SmallVector<BlockState, 8>;
101     using DirtySuccessorsWorkList = SmallVector<MachineBasicBlock *, 8>;
102 
103     BlockStateMap BlockStates;
104     DirtySuccessorsWorkList DirtySuccessors;
105     bool EverMadeChange;
106     bool IsX86INTR;
107     const TargetInstrInfo *TII;
108 
109     static char ID;
110   };
111 
112 } // end anonymous namespace
113 
114 char VZeroUpperInserter::ID = 0;
115 
116 FunctionPass *llvm::createX86IssueVZeroUpperPass() {
117   return new VZeroUpperInserter();
118 }
119 
120 #ifndef NDEBUG
121 const char* VZeroUpperInserter::getBlockExitStateName(BlockExitState ST) {
122   switch (ST) {
123     case PASS_THROUGH: return "Pass-through";
124     case EXITS_DIRTY: return "Exits-dirty";
125     case EXITS_CLEAN: return "Exits-clean";
126   }
127   llvm_unreachable("Invalid block exit state.");
128 }
129 #endif
130 
131 /// VZEROUPPER cleans state that is related to Y/ZMM0-15 only.
132 /// Thus, there is no need to check for Y/ZMM16 and above.
133 static bool isYmmOrZmmReg(unsigned Reg) {
134   return (Reg >= X86::YMM0 && Reg <= X86::YMM15) ||
135          (Reg >= X86::ZMM0 && Reg <= X86::ZMM15);
136 }
137 
138 static bool checkFnHasLiveInYmmOrZmm(MachineRegisterInfo &MRI) {
139   for (std::pair<unsigned, unsigned> LI : MRI.liveins())
140     if (isYmmOrZmmReg(LI.first))
141       return true;
142 
143   return false;
144 }
145 
146 static bool clobbersAllYmmAndZmmRegs(const MachineOperand &MO) {
147   for (unsigned reg = X86::YMM0; reg <= X86::YMM15; ++reg) {
148     if (!MO.clobbersPhysReg(reg))
149       return false;
150   }
151   for (unsigned reg = X86::ZMM0; reg <= X86::ZMM15; ++reg) {
152     if (!MO.clobbersPhysReg(reg))
153       return false;
154   }
155   return true;
156 }
157 
158 static bool hasYmmOrZmmReg(MachineInstr &MI) {
159   for (const MachineOperand &MO : MI.operands()) {
160     if (MI.isCall() && MO.isRegMask() && !clobbersAllYmmAndZmmRegs(MO))
161       return true;
162     if (!MO.isReg())
163       continue;
164     if (MO.isDebug())
165       continue;
166     if (isYmmOrZmmReg(MO.getReg()))
167       return true;
168   }
169   return false;
170 }
171 
172 /// Check if given call instruction has a RegMask operand.
173 static bool callHasRegMask(MachineInstr &MI) {
174   assert(MI.isCall() && "Can only be called on call instructions.");
175   for (const MachineOperand &MO : MI.operands()) {
176     if (MO.isRegMask())
177       return true;
178   }
179   return false;
180 }
181 
182 /// Insert a vzeroupper instruction before I.
183 void VZeroUpperInserter::insertVZeroUpper(MachineBasicBlock::iterator I,
184                                           MachineBasicBlock &MBB) {
185   DebugLoc dl = I->getDebugLoc();
186   BuildMI(MBB, I, dl, TII->get(X86::VZEROUPPER));
187   ++NumVZU;
188   EverMadeChange = true;
189 }
190 
191 /// Add MBB to the DirtySuccessors list if it hasn't already been added.
192 void VZeroUpperInserter::addDirtySuccessor(MachineBasicBlock &MBB) {
193   if (!BlockStates[MBB.getNumber()].AddedToDirtySuccessors) {
194     DirtySuccessors.push_back(&MBB);
195     BlockStates[MBB.getNumber()].AddedToDirtySuccessors = true;
196   }
197 }
198 
199 /// Loop over all of the instructions in the basic block, inserting vzeroupper
200 /// instructions before function calls.
201 void VZeroUpperInserter::processBasicBlock(MachineBasicBlock &MBB) {
202   // Start by assuming that the block is PASS_THROUGH which implies no unguarded
203   // calls.
204   BlockExitState CurState = PASS_THROUGH;
205   BlockStates[MBB.getNumber()].FirstUnguardedCall = MBB.end();
206 
207   for (MachineInstr &MI : MBB) {
208     bool IsCall = MI.isCall();
209     bool IsReturn = MI.isReturn();
210     bool IsControlFlow = IsCall || IsReturn;
211 
212     // No need for vzeroupper before iret in interrupt handler function,
213     // epilogue will restore YMM/ZMM registers if needed.
214     if (IsX86INTR && IsReturn)
215       continue;
216 
217     // An existing VZERO* instruction resets the state.
218     if (MI.getOpcode() == X86::VZEROALL || MI.getOpcode() == X86::VZEROUPPER) {
219       CurState = EXITS_CLEAN;
220       continue;
221     }
222 
223     // Shortcut: don't need to check regular instructions in dirty state.
224     if (!IsControlFlow && CurState == EXITS_DIRTY)
225       continue;
226 
227     if (hasYmmOrZmmReg(MI)) {
228       // We found a ymm/zmm-using instruction; this could be an AVX/AVX512
229       // instruction, or it could be control flow.
230       CurState = EXITS_DIRTY;
231       continue;
232     }
233 
234     // Check for control-flow out of the current function (which might
235     // indirectly execute SSE instructions).
236     if (!IsControlFlow)
237       continue;
238 
239     // If the call has no RegMask, skip it as well. It usually happens on
240     // helper function calls (such as '_chkstk', '_ftol2') where standard
241     // calling convention is not used (RegMask is not used to mark register
242     // clobbered and register usage (def/implicit-def/use) is well-defined and
243     // explicitly specified.
244     if (IsCall && !callHasRegMask(MI))
245       continue;
246 
247     // The VZEROUPPER instruction resets the upper 128 bits of YMM0-YMM15
248     // registers. In addition, the processor changes back to Clean state, after
249     // which execution of SSE instructions or AVX instructions has no transition
250     // penalty. Add the VZEROUPPER instruction before any function call/return
251     // that might execute SSE code.
252     // FIXME: In some cases, we may want to move the VZEROUPPER into a
253     // predecessor block.
254     if (CurState == EXITS_DIRTY) {
255       // After the inserted VZEROUPPER the state becomes clean again, but
256       // other YMM/ZMM may appear before other subsequent calls or even before
257       // the end of the BB.
258       insertVZeroUpper(MI, MBB);
259       CurState = EXITS_CLEAN;
260     } else if (CurState == PASS_THROUGH) {
261       // If this block is currently in pass-through state and we encounter a
262       // call then whether we need a vzeroupper or not depends on whether this
263       // block has successors that exit dirty. Record the location of the call,
264       // and set the state to EXITS_CLEAN, but do not insert the vzeroupper yet.
265       // It will be inserted later if necessary.
266       BlockStates[MBB.getNumber()].FirstUnguardedCall = MI;
267       CurState = EXITS_CLEAN;
268     }
269   }
270 
271   LLVM_DEBUG(dbgs() << "MBB #" << MBB.getNumber() << " exit state: "
272                     << getBlockExitStateName(CurState) << '\n');
273 
274   if (CurState == EXITS_DIRTY)
275     for (MachineBasicBlock::succ_iterator SI = MBB.succ_begin(),
276                                           SE = MBB.succ_end();
277          SI != SE; ++SI)
278       addDirtySuccessor(**SI);
279 
280   BlockStates[MBB.getNumber()].ExitState = CurState;
281 }
282 
283 /// Loop over all of the basic blocks, inserting vzeroupper instructions before
284 /// function calls.
285 bool VZeroUpperInserter::runOnMachineFunction(MachineFunction &MF) {
286   if (!UseVZeroUpper)
287     return false;
288 
289   const X86Subtarget &ST = MF.getSubtarget<X86Subtarget>();
290   if (!ST.hasAVX() || !ST.insertVZEROUPPER())
291     return false;
292   TII = ST.getInstrInfo();
293   MachineRegisterInfo &MRI = MF.getRegInfo();
294   EverMadeChange = false;
295   IsX86INTR = MF.getFunction().getCallingConv() == CallingConv::X86_INTR;
296 
297   bool FnHasLiveInYmmOrZmm = checkFnHasLiveInYmmOrZmm(MRI);
298 
299   // Fast check: if the function doesn't use any ymm/zmm registers, we don't
300   // need to insert any VZEROUPPER instructions.  This is constant-time, so it
301   // is cheap in the common case of no ymm/zmm use.
302   bool YmmOrZmmUsed = FnHasLiveInYmmOrZmm;
303   for (auto *RC : {&X86::VR256RegClass, &X86::VR512_0_15RegClass}) {
304     if (!YmmOrZmmUsed) {
305       for (TargetRegisterClass::iterator i = RC->begin(), e = RC->end(); i != e;
306            i++) {
307         if (!MRI.reg_nodbg_empty(*i)) {
308           YmmOrZmmUsed = true;
309           break;
310         }
311       }
312     }
313   }
314   if (!YmmOrZmmUsed)
315     return false;
316 
317   assert(BlockStates.empty() && DirtySuccessors.empty() &&
318          "X86VZeroUpper state should be clear");
319   BlockStates.resize(MF.getNumBlockIDs());
320 
321   // Process all blocks. This will compute block exit states, record the first
322   // unguarded call in each block, and add successors of dirty blocks to the
323   // DirtySuccessors list.
324   for (MachineBasicBlock &MBB : MF)
325     processBasicBlock(MBB);
326 
327   // If any YMM/ZMM regs are live-in to this function, add the entry block to
328   // the DirtySuccessors list
329   if (FnHasLiveInYmmOrZmm)
330     addDirtySuccessor(MF.front());
331 
332   // Re-visit all blocks that are successors of EXITS_DIRTY blocks. Add
333   // vzeroupper instructions to unguarded calls, and propagate EXITS_DIRTY
334   // through PASS_THROUGH blocks.
335   while (!DirtySuccessors.empty()) {
336     MachineBasicBlock &MBB = *DirtySuccessors.back();
337     DirtySuccessors.pop_back();
338     BlockState &BBState = BlockStates[MBB.getNumber()];
339 
340     // MBB is a successor of a dirty block, so its first call needs to be
341     // guarded.
342     if (BBState.FirstUnguardedCall != MBB.end())
343       insertVZeroUpper(BBState.FirstUnguardedCall, MBB);
344 
345     // If this successor was a pass-through block, then it is now dirty. Its
346     // successors need to be added to the worklist (if they haven't been
347     // already).
348     if (BBState.ExitState == PASS_THROUGH) {
349       LLVM_DEBUG(dbgs() << "MBB #" << MBB.getNumber()
350                         << " was Pass-through, is now Dirty-out.\n");
351       for (MachineBasicBlock *Succ : MBB.successors())
352         addDirtySuccessor(*Succ);
353     }
354   }
355 
356   BlockStates.clear();
357   return EverMadeChange;
358 }
359