1 //===- CalcSpillWeights.cpp -----------------------------------------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 9 #include "llvm/CodeGen/CalcSpillWeights.h" 10 #include "llvm/ADT/SmallPtrSet.h" 11 #include "llvm/CodeGen/LiveInterval.h" 12 #include "llvm/CodeGen/LiveIntervals.h" 13 #include "llvm/CodeGen/MachineFunction.h" 14 #include "llvm/CodeGen/MachineInstr.h" 15 #include "llvm/CodeGen/MachineLoopInfo.h" 16 #include "llvm/CodeGen/MachineOperand.h" 17 #include "llvm/CodeGen/MachineRegisterInfo.h" 18 #include "llvm/CodeGen/StackMaps.h" 19 #include "llvm/CodeGen/TargetInstrInfo.h" 20 #include "llvm/CodeGen/TargetRegisterInfo.h" 21 #include "llvm/CodeGen/TargetSubtargetInfo.h" 22 #include "llvm/CodeGen/VirtRegMap.h" 23 #include "llvm/Support/Debug.h" 24 #include "llvm/Support/MathExtras.h" 25 #include "llvm/Support/raw_ostream.h" 26 #include <cassert> 27 #include <tuple> 28 29 using namespace llvm; 30 31 #define DEBUG_TYPE "calcspillweights" 32 33 void VirtRegAuxInfo::calculateSpillWeightsAndHints() { 34 LLVM_DEBUG(dbgs() << "********** Compute Spill Weights **********\n" 35 << "********** Function: " << MF.getName() << '\n'); 36 37 MachineRegisterInfo &MRI = MF.getRegInfo(); 38 for (unsigned I = 0, E = MRI.getNumVirtRegs(); I != E; ++I) { 39 Register Reg = Register::index2VirtReg(I); 40 if (MRI.reg_nodbg_empty(Reg)) 41 continue; 42 calculateSpillWeightAndHint(LIS.getInterval(Reg)); 43 } 44 } 45 46 // Return the preferred allocation register for reg, given a COPY instruction. 47 Register VirtRegAuxInfo::copyHint(const MachineInstr *MI, Register Reg, 48 const TargetRegisterInfo &TRI, 49 const MachineRegisterInfo &MRI) { 50 unsigned Sub, HSub; 51 Register HReg; 52 if (MI->getOperand(0).getReg() == Reg) { 53 Sub = MI->getOperand(0).getSubReg(); 54 HReg = MI->getOperand(1).getReg(); 55 HSub = MI->getOperand(1).getSubReg(); 56 } else { 57 Sub = MI->getOperand(1).getSubReg(); 58 HReg = MI->getOperand(0).getReg(); 59 HSub = MI->getOperand(0).getSubReg(); 60 } 61 62 if (!HReg) 63 return 0; 64 65 if (HReg.isVirtual()) 66 return Sub == HSub ? HReg : Register(); 67 68 const TargetRegisterClass *RC = MRI.getRegClass(Reg); 69 MCRegister CopiedPReg = HSub ? TRI.getSubReg(HReg, HSub) : HReg.asMCReg(); 70 if (RC->contains(CopiedPReg)) 71 return CopiedPReg; 72 73 // Check if reg:sub matches so that a super register could be hinted. 74 if (Sub) 75 return TRI.getMatchingSuperReg(CopiedPReg, Sub, RC); 76 77 return Register(); 78 } 79 80 // Check if all values in LI are rematerializable 81 bool VirtRegAuxInfo::isRematerializable(const LiveInterval &LI, 82 const LiveIntervals &LIS, 83 const VirtRegMap &VRM, 84 const TargetInstrInfo &TII) { 85 Register Reg = LI.reg(); 86 Register Original = VRM.getOriginal(Reg); 87 for (LiveInterval::const_vni_iterator I = LI.vni_begin(), E = LI.vni_end(); 88 I != E; ++I) { 89 const VNInfo *VNI = *I; 90 if (VNI->isUnused()) 91 continue; 92 if (VNI->isPHIDef()) 93 return false; 94 95 MachineInstr *MI = LIS.getInstructionFromIndex(VNI->def); 96 assert(MI && "Dead valno in interval"); 97 98 // Trace copies introduced by live range splitting. The inline 99 // spiller can rematerialize through these copies, so the spill 100 // weight must reflect this. 101 while (TII.isFullCopyInstr(*MI)) { 102 // The copy destination must match the interval register. 103 if (MI->getOperand(0).getReg() != Reg) 104 return false; 105 106 // Get the source register. 107 Reg = MI->getOperand(1).getReg(); 108 109 // If the original (pre-splitting) registers match this 110 // copy came from a split. 111 if (!Reg.isVirtual() || VRM.getOriginal(Reg) != Original) 112 return false; 113 114 // Follow the copy live-in value. 115 const LiveInterval &SrcLI = LIS.getInterval(Reg); 116 LiveQueryResult SrcQ = SrcLI.Query(VNI->def); 117 VNI = SrcQ.valueIn(); 118 assert(VNI && "Copy from non-existing value"); 119 if (VNI->isPHIDef()) 120 return false; 121 MI = LIS.getInstructionFromIndex(VNI->def); 122 assert(MI && "Dead valno in interval"); 123 } 124 125 if (!TII.isTriviallyReMaterializable(*MI)) 126 return false; 127 } 128 return true; 129 } 130 131 bool VirtRegAuxInfo::isLiveAtStatepointVarArg(LiveInterval &LI) { 132 return any_of(VRM.getRegInfo().reg_operands(LI.reg()), 133 [](MachineOperand &MO) { 134 MachineInstr *MI = MO.getParent(); 135 if (MI->getOpcode() != TargetOpcode::STATEPOINT) 136 return false; 137 return StatepointOpers(MI).getVarIdx() <= MO.getOperandNo(); 138 }); 139 } 140 141 void VirtRegAuxInfo::calculateSpillWeightAndHint(LiveInterval &LI) { 142 float Weight = weightCalcHelper(LI); 143 // Check if unspillable. 144 if (Weight < 0) 145 return; 146 LI.setWeight(Weight); 147 } 148 149 static bool canMemFoldInlineAsm(LiveInterval &LI, 150 const MachineRegisterInfo &MRI) { 151 for (const MachineOperand &MO : MRI.reg_operands(LI.reg())) { 152 const MachineInstr *MI = MO.getParent(); 153 if (MI->isInlineAsm() && MI->mayFoldInlineAsmRegOp(MI->getOperandNo(&MO))) 154 return true; 155 } 156 157 return false; 158 } 159 160 float VirtRegAuxInfo::weightCalcHelper(LiveInterval &LI, SlotIndex *Start, 161 SlotIndex *End) { 162 MachineRegisterInfo &MRI = MF.getRegInfo(); 163 const TargetRegisterInfo &TRI = *MF.getSubtarget().getRegisterInfo(); 164 const TargetInstrInfo &TII = *MF.getSubtarget().getInstrInfo(); 165 MachineBasicBlock *MBB = nullptr; 166 float TotalWeight = 0; 167 unsigned NumInstr = 0; // Number of instructions using LI 168 SmallPtrSet<MachineInstr *, 8> Visited; 169 170 std::pair<unsigned, Register> TargetHint = MRI.getRegAllocationHint(LI.reg()); 171 172 if (LI.isSpillable()) { 173 Register Reg = LI.reg(); 174 Register Original = VRM.getOriginal(Reg); 175 const LiveInterval &OrigInt = LIS.getInterval(Original); 176 // li comes from a split of OrigInt. If OrigInt was marked 177 // as not spillable, make sure the new interval is marked 178 // as not spillable as well. 179 if (!OrigInt.isSpillable()) 180 LI.markNotSpillable(); 181 } 182 183 // Don't recompute spill weight for an unspillable register. 184 bool IsSpillable = LI.isSpillable(); 185 186 bool IsLocalSplitArtifact = Start && End; 187 188 // Do not update future local split artifacts. 189 bool ShouldUpdateLI = !IsLocalSplitArtifact; 190 191 if (IsLocalSplitArtifact) { 192 MachineBasicBlock *LocalMBB = LIS.getMBBFromIndex(*End); 193 assert(LocalMBB == LIS.getMBBFromIndex(*Start) && 194 "start and end are expected to be in the same basic block"); 195 196 // Local split artifact will have 2 additional copy instructions and they 197 // will be in the same BB. 198 // localLI = COPY other 199 // ... 200 // other = COPY localLI 201 TotalWeight += 202 LiveIntervals::getSpillWeight(true, false, &MBFI, LocalMBB, PSI); 203 TotalWeight += 204 LiveIntervals::getSpillWeight(false, true, &MBFI, LocalMBB, PSI); 205 206 NumInstr += 2; 207 } 208 209 // CopyHint is a sortable hint derived from a COPY instruction. 210 struct CopyHint { 211 Register Reg; 212 float Weight; 213 bool IsCSR; 214 CopyHint(Register R, float W, bool IsCSR) 215 : Reg(R), Weight(W), IsCSR(IsCSR) {} 216 bool operator<(const CopyHint &Rhs) const { 217 // Always prefer any physreg hint. 218 if (Reg.isPhysical() != Rhs.Reg.isPhysical()) 219 return Reg.isPhysical(); 220 if (Weight != Rhs.Weight) 221 return (Weight > Rhs.Weight); 222 // Prefer non-CSR to CSR. 223 if (Reg.isPhysical() && IsCSR != Rhs.IsCSR) 224 return !IsCSR; 225 return Reg.id() < Rhs.Reg.id(); // Tie-breaker. 226 } 227 }; 228 229 bool IsExiting = false; 230 SmallDenseMap<Register, float, 8> Hint; 231 for (MachineRegisterInfo::reg_instr_nodbg_iterator 232 I = MRI.reg_instr_nodbg_begin(LI.reg()), 233 E = MRI.reg_instr_nodbg_end(); 234 I != E;) { 235 MachineInstr *MI = &*(I++); 236 237 // For local split artifacts, we are interested only in instructions between 238 // the expected start and end of the range. 239 SlotIndex SI = LIS.getInstructionIndex(*MI); 240 if (IsLocalSplitArtifact && ((SI < *Start) || (SI > *End))) 241 continue; 242 243 NumInstr++; 244 bool identityCopy = false; 245 auto DestSrc = TII.isCopyInstr(*MI); 246 if (DestSrc) { 247 const MachineOperand *DestRegOp = DestSrc->Destination; 248 const MachineOperand *SrcRegOp = DestSrc->Source; 249 identityCopy = DestRegOp->getReg() == SrcRegOp->getReg() && 250 DestRegOp->getSubReg() == SrcRegOp->getSubReg(); 251 } 252 253 if (identityCopy || MI->isImplicitDef()) 254 continue; 255 if (!Visited.insert(MI).second) 256 continue; 257 258 // For terminators that produce values, ask the backend if the register is 259 // not spillable. 260 if (TII.isUnspillableTerminator(MI) && 261 MI->definesRegister(LI.reg(), /*TRI=*/nullptr)) { 262 LI.markNotSpillable(); 263 return -1.0f; 264 } 265 266 // Force Weight onto the stack so that x86 doesn't add hidden precision. 267 stack_float_t Weight = 1.0f; 268 if (IsSpillable) { 269 // Get loop info for mi. 270 if (MI->getParent() != MBB) { 271 MBB = MI->getParent(); 272 const MachineLoop *Loop = Loops.getLoopFor(MBB); 273 IsExiting = Loop ? Loop->isLoopExiting(MBB) : false; 274 } 275 276 // Calculate instr weight. 277 bool Reads, Writes; 278 std::tie(Reads, Writes) = MI->readsWritesVirtualRegister(LI.reg()); 279 Weight = LiveIntervals::getSpillWeight(Writes, Reads, &MBFI, *MI, PSI); 280 281 // Give extra weight to what looks like a loop induction variable update. 282 if (Writes && IsExiting && LIS.isLiveOutOfMBB(LI, MBB)) 283 Weight *= 3; 284 285 TotalWeight += Weight; 286 } 287 288 // Get allocation hints from copies. 289 if (!TII.isCopyInstr(*MI)) 290 continue; 291 Register HintReg = copyHint(MI, LI.reg(), TRI, MRI); 292 if (HintReg && (HintReg.isVirtual() || MRI.isAllocatable(HintReg))) 293 Hint[HintReg] += Weight; 294 } 295 296 // Pass all the sorted copy hints to mri. 297 if (ShouldUpdateLI && Hint.size()) { 298 // Remove a generic hint if previously added by target. 299 if (TargetHint.first == 0 && TargetHint.second) 300 MRI.clearSimpleHint(LI.reg()); 301 302 // Don't add the target-type hint again. 303 Register SkipReg = TargetHint.first != 0 ? TargetHint.second : Register(); 304 SmallVector<CopyHint, 8> RegHints; 305 for (const auto &[Reg, Weight] : Hint) { 306 if (Reg != SkipReg) 307 RegHints.emplace_back( 308 Reg, Weight, 309 Reg.isPhysical() ? TRI.isCalleeSavedPhysReg(Reg, MF) : false); 310 } 311 sort(RegHints); 312 for (const auto &[Reg, _, __] : RegHints) 313 MRI.addRegAllocationHint(LI.reg(), Reg); 314 315 // Weakly boost the spill weight of hinted registers. 316 TotalWeight *= 1.01F; 317 } 318 319 // If the live interval was already unspillable, leave it that way. 320 if (!IsSpillable) 321 return -1.0; 322 323 // Mark li as unspillable if all live ranges are tiny and the interval 324 // is not live at any reg mask. If the interval is live at a reg mask 325 // spilling may be required. If li is live as use in statepoint instruction 326 // spilling may be required due to if we mark interval with use in statepoint 327 // as not spillable we are risky to end up with no register to allocate. 328 // At the same time STATEPOINT instruction is perfectly fine to have this 329 // operand on stack, so spilling such interval and folding its load from stack 330 // into instruction itself makes perfect sense. 331 if (ShouldUpdateLI && LI.isZeroLength(LIS.getSlotIndexes()) && 332 !LI.isLiveAtIndexes(LIS.getRegMaskSlots()) && 333 !isLiveAtStatepointVarArg(LI) && !canMemFoldInlineAsm(LI, MRI)) { 334 LI.markNotSpillable(); 335 return -1.0; 336 } 337 338 // If all of the definitions of the interval are re-materializable, 339 // it is a preferred candidate for spilling. 340 // FIXME: this gets much more complicated once we support non-trivial 341 // re-materialization. 342 if (isRematerializable(LI, LIS, VRM, *MF.getSubtarget().getInstrInfo())) 343 TotalWeight *= 0.5F; 344 345 // Finally, we scale the weight by the scale factor of register class. 346 const TargetRegisterClass *RC = MRI.getRegClass(LI.reg()); 347 TotalWeight *= TRI.getSpillWeightScaleFactor(RC); 348 349 if (IsLocalSplitArtifact) 350 return normalize(TotalWeight, Start->distance(*End), NumInstr); 351 return normalize(TotalWeight, LI.getSize(), NumInstr); 352 } 353