1 //===- CalcSpillWeights.cpp -----------------------------------------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 9 #include "llvm/CodeGen/CalcSpillWeights.h" 10 #include "llvm/ADT/SmallPtrSet.h" 11 #include "llvm/ADT/SmallSet.h" 12 #include "llvm/CodeGen/LiveInterval.h" 13 #include "llvm/CodeGen/LiveIntervals.h" 14 #include "llvm/CodeGen/MachineFunction.h" 15 #include "llvm/CodeGen/MachineInstr.h" 16 #include "llvm/CodeGen/MachineLoopInfo.h" 17 #include "llvm/CodeGen/MachineOperand.h" 18 #include "llvm/CodeGen/MachineRegisterInfo.h" 19 #include "llvm/CodeGen/StackMaps.h" 20 #include "llvm/CodeGen/TargetInstrInfo.h" 21 #include "llvm/CodeGen/TargetRegisterInfo.h" 22 #include "llvm/CodeGen/TargetSubtargetInfo.h" 23 #include "llvm/CodeGen/VirtRegMap.h" 24 #include "llvm/Support/Debug.h" 25 #include "llvm/Support/raw_ostream.h" 26 #include <cassert> 27 #include <tuple> 28 29 using namespace llvm; 30 31 #define DEBUG_TYPE "calcspillweights" 32 33 void VirtRegAuxInfo::calculateSpillWeightsAndHints() { 34 LLVM_DEBUG(dbgs() << "********** Compute Spill Weights **********\n" 35 << "********** Function: " << MF.getName() << '\n'); 36 37 MachineRegisterInfo &MRI = MF.getRegInfo(); 38 for (unsigned I = 0, E = MRI.getNumVirtRegs(); I != E; ++I) { 39 Register Reg = Register::index2VirtReg(I); 40 if (MRI.reg_nodbg_empty(Reg)) 41 continue; 42 calculateSpillWeightAndHint(LIS.getInterval(Reg)); 43 } 44 } 45 46 // Return the preferred allocation register for reg, given a COPY instruction. 47 Register VirtRegAuxInfo::copyHint(const MachineInstr *MI, unsigned Reg, 48 const TargetRegisterInfo &TRI, 49 const MachineRegisterInfo &MRI) { 50 unsigned Sub, HSub; 51 Register HReg; 52 if (MI->getOperand(0).getReg() == Reg) { 53 Sub = MI->getOperand(0).getSubReg(); 54 HReg = MI->getOperand(1).getReg(); 55 HSub = MI->getOperand(1).getSubReg(); 56 } else { 57 Sub = MI->getOperand(1).getSubReg(); 58 HReg = MI->getOperand(0).getReg(); 59 HSub = MI->getOperand(0).getSubReg(); 60 } 61 62 if (!HReg) 63 return 0; 64 65 if (HReg.isVirtual()) 66 return Sub == HSub ? HReg : Register(); 67 68 const TargetRegisterClass *RC = MRI.getRegClass(Reg); 69 MCRegister CopiedPReg = HSub ? TRI.getSubReg(HReg, HSub) : HReg.asMCReg(); 70 if (RC->contains(CopiedPReg)) 71 return CopiedPReg; 72 73 // Check if reg:sub matches so that a super register could be hinted. 74 if (Sub) 75 return TRI.getMatchingSuperReg(CopiedPReg, Sub, RC); 76 77 return 0; 78 } 79 80 // Check if all values in LI are rematerializable 81 bool VirtRegAuxInfo::isRematerializable(const LiveInterval &LI, 82 const LiveIntervals &LIS, 83 const VirtRegMap &VRM, 84 const TargetInstrInfo &TII) { 85 Register Reg = LI.reg(); 86 Register Original = VRM.getOriginal(Reg); 87 for (LiveInterval::const_vni_iterator I = LI.vni_begin(), E = LI.vni_end(); 88 I != E; ++I) { 89 const VNInfo *VNI = *I; 90 if (VNI->isUnused()) 91 continue; 92 if (VNI->isPHIDef()) 93 return false; 94 95 MachineInstr *MI = LIS.getInstructionFromIndex(VNI->def); 96 assert(MI && "Dead valno in interval"); 97 98 // Trace copies introduced by live range splitting. The inline 99 // spiller can rematerialize through these copies, so the spill 100 // weight must reflect this. 101 while (TII.isFullCopyInstr(*MI)) { 102 // The copy destination must match the interval register. 103 if (MI->getOperand(0).getReg() != Reg) 104 return false; 105 106 // Get the source register. 107 Reg = MI->getOperand(1).getReg(); 108 109 // If the original (pre-splitting) registers match this 110 // copy came from a split. 111 if (!Reg.isVirtual() || VRM.getOriginal(Reg) != Original) 112 return false; 113 114 // Follow the copy live-in value. 115 const LiveInterval &SrcLI = LIS.getInterval(Reg); 116 LiveQueryResult SrcQ = SrcLI.Query(VNI->def); 117 VNI = SrcQ.valueIn(); 118 assert(VNI && "Copy from non-existing value"); 119 if (VNI->isPHIDef()) 120 return false; 121 MI = LIS.getInstructionFromIndex(VNI->def); 122 assert(MI && "Dead valno in interval"); 123 } 124 125 if (!TII.isTriviallyReMaterializable(*MI)) 126 return false; 127 } 128 return true; 129 } 130 131 bool VirtRegAuxInfo::isLiveAtStatepointVarArg(LiveInterval &LI) { 132 return any_of(VRM.getRegInfo().reg_operands(LI.reg()), 133 [](MachineOperand &MO) { 134 MachineInstr *MI = MO.getParent(); 135 if (MI->getOpcode() != TargetOpcode::STATEPOINT) 136 return false; 137 return StatepointOpers(MI).getVarIdx() <= MO.getOperandNo(); 138 }); 139 } 140 141 void VirtRegAuxInfo::calculateSpillWeightAndHint(LiveInterval &LI) { 142 float Weight = weightCalcHelper(LI); 143 // Check if unspillable. 144 if (Weight < 0) 145 return; 146 LI.setWeight(Weight); 147 } 148 149 static bool canMemFoldInlineAsm(LiveInterval &LI, 150 const MachineRegisterInfo &MRI) { 151 for (const MachineOperand &MO : MRI.reg_operands(LI.reg())) { 152 const MachineInstr *MI = MO.getParent(); 153 if (MI->isInlineAsm() && MI->mayFoldInlineAsmRegOp(MI->getOperandNo(&MO))) 154 return true; 155 } 156 157 return false; 158 } 159 160 float VirtRegAuxInfo::weightCalcHelper(LiveInterval &LI, SlotIndex *Start, 161 SlotIndex *End) { 162 MachineRegisterInfo &MRI = MF.getRegInfo(); 163 const TargetRegisterInfo &TRI = *MF.getSubtarget().getRegisterInfo(); 164 const TargetInstrInfo &TII = *MF.getSubtarget().getInstrInfo(); 165 MachineBasicBlock *MBB = nullptr; 166 float TotalWeight = 0; 167 unsigned NumInstr = 0; // Number of instructions using LI 168 SmallPtrSet<MachineInstr *, 8> Visited; 169 170 std::pair<unsigned, Register> TargetHint = MRI.getRegAllocationHint(LI.reg()); 171 172 if (LI.isSpillable()) { 173 Register Reg = LI.reg(); 174 Register Original = VRM.getOriginal(Reg); 175 const LiveInterval &OrigInt = LIS.getInterval(Original); 176 // li comes from a split of OrigInt. If OrigInt was marked 177 // as not spillable, make sure the new interval is marked 178 // as not spillable as well. 179 if (!OrigInt.isSpillable()) 180 LI.markNotSpillable(); 181 } 182 183 // Don't recompute spill weight for an unspillable register. 184 bool IsSpillable = LI.isSpillable(); 185 186 bool IsLocalSplitArtifact = Start && End; 187 188 // Do not update future local split artifacts. 189 bool ShouldUpdateLI = !IsLocalSplitArtifact; 190 191 if (IsLocalSplitArtifact) { 192 MachineBasicBlock *LocalMBB = LIS.getMBBFromIndex(*End); 193 assert(LocalMBB == LIS.getMBBFromIndex(*Start) && 194 "start and end are expected to be in the same basic block"); 195 196 // Local split artifact will have 2 additional copy instructions and they 197 // will be in the same BB. 198 // localLI = COPY other 199 // ... 200 // other = COPY localLI 201 TotalWeight += LiveIntervals::getSpillWeight(true, false, &MBFI, LocalMBB); 202 TotalWeight += LiveIntervals::getSpillWeight(false, true, &MBFI, LocalMBB); 203 204 NumInstr += 2; 205 } 206 207 // CopyHint is a sortable hint derived from a COPY instruction. 208 struct CopyHint { 209 const Register Reg; 210 const float Weight; 211 CopyHint(Register R, float W) : Reg(R), Weight(W) {} 212 bool operator<(const CopyHint &Rhs) const { 213 // Always prefer any physreg hint. 214 if (Reg.isPhysical() != Rhs.Reg.isPhysical()) 215 return Reg.isPhysical(); 216 if (Weight != Rhs.Weight) 217 return (Weight > Rhs.Weight); 218 return Reg.id() < Rhs.Reg.id(); // Tie-breaker. 219 } 220 }; 221 222 bool IsExiting = false; 223 std::set<CopyHint> CopyHints; 224 DenseMap<unsigned, float> Hint; 225 for (MachineRegisterInfo::reg_instr_nodbg_iterator 226 I = MRI.reg_instr_nodbg_begin(LI.reg()), 227 E = MRI.reg_instr_nodbg_end(); 228 I != E;) { 229 MachineInstr *MI = &*(I++); 230 231 // For local split artifacts, we are interested only in instructions between 232 // the expected start and end of the range. 233 SlotIndex SI = LIS.getInstructionIndex(*MI); 234 if (IsLocalSplitArtifact && ((SI < *Start) || (SI > *End))) 235 continue; 236 237 NumInstr++; 238 bool identityCopy = false; 239 auto DestSrc = TII.isCopyInstr(*MI); 240 if (DestSrc) { 241 const MachineOperand *DestRegOp = DestSrc->Destination; 242 const MachineOperand *SrcRegOp = DestSrc->Source; 243 identityCopy = DestRegOp->getReg() == SrcRegOp->getReg() && 244 DestRegOp->getSubReg() == SrcRegOp->getSubReg(); 245 } 246 247 if (identityCopy || MI->isImplicitDef()) 248 continue; 249 if (!Visited.insert(MI).second) 250 continue; 251 252 // For terminators that produce values, ask the backend if the register is 253 // not spillable. 254 if (TII.isUnspillableTerminator(MI) && MI->definesRegister(LI.reg())) { 255 LI.markNotSpillable(); 256 return -1.0f; 257 } 258 259 // FreeBSD customization: similar to the HWeight declaration below, add a 260 // volatile qualifier to avoid slightly different weight results on amd64 261 // and i386 hosts, and possibly choosing different registers in the register 262 // allocator. See <https://github.com/llvm/llvm-project/issues/99396> for 263 // more details. 264 volatile float Weight = 1.0f; 265 if (IsSpillable) { 266 // Get loop info for mi. 267 if (MI->getParent() != MBB) { 268 MBB = MI->getParent(); 269 const MachineLoop *Loop = Loops.getLoopFor(MBB); 270 IsExiting = Loop ? Loop->isLoopExiting(MBB) : false; 271 } 272 273 // Calculate instr weight. 274 bool Reads, Writes; 275 std::tie(Reads, Writes) = MI->readsWritesVirtualRegister(LI.reg()); 276 Weight = LiveIntervals::getSpillWeight(Writes, Reads, &MBFI, *MI); 277 278 // Give extra weight to what looks like a loop induction variable update. 279 if (Writes && IsExiting && LIS.isLiveOutOfMBB(LI, MBB)) 280 Weight *= 3; 281 282 TotalWeight += Weight; 283 } 284 285 // Get allocation hints from copies. 286 if (!TII.isCopyInstr(*MI)) 287 continue; 288 Register HintReg = copyHint(MI, LI.reg(), TRI, MRI); 289 if (!HintReg) 290 continue; 291 // Force hweight onto the stack so that x86 doesn't add hidden precision, 292 // making the comparison incorrectly pass (i.e., 1 > 1 == true??). 293 // 294 // FIXME: we probably shouldn't use floats at all. 295 volatile float HWeight = Hint[HintReg] += Weight; 296 if (HintReg.isVirtual() || MRI.isAllocatable(HintReg)) 297 CopyHints.insert(CopyHint(HintReg, HWeight)); 298 } 299 300 // Pass all the sorted copy hints to mri. 301 if (ShouldUpdateLI && CopyHints.size()) { 302 // Remove a generic hint if previously added by target. 303 if (TargetHint.first == 0 && TargetHint.second) 304 MRI.clearSimpleHint(LI.reg()); 305 306 SmallSet<Register, 4> HintedRegs; 307 for (const auto &Hint : CopyHints) { 308 if (!HintedRegs.insert(Hint.Reg).second || 309 (TargetHint.first != 0 && Hint.Reg == TargetHint.second)) 310 // Don't add the same reg twice or the target-type hint again. 311 continue; 312 MRI.addRegAllocationHint(LI.reg(), Hint.Reg); 313 } 314 315 // Weakly boost the spill weight of hinted registers. 316 TotalWeight *= 1.01F; 317 } 318 319 // If the live interval was already unspillable, leave it that way. 320 if (!IsSpillable) 321 return -1.0; 322 323 // Mark li as unspillable if all live ranges are tiny and the interval 324 // is not live at any reg mask. If the interval is live at a reg mask 325 // spilling may be required. If li is live as use in statepoint instruction 326 // spilling may be required due to if we mark interval with use in statepoint 327 // as not spillable we are risky to end up with no register to allocate. 328 // At the same time STATEPOINT instruction is perfectly fine to have this 329 // operand on stack, so spilling such interval and folding its load from stack 330 // into instruction itself makes perfect sense. 331 if (ShouldUpdateLI && LI.isZeroLength(LIS.getSlotIndexes()) && 332 !LI.isLiveAtIndexes(LIS.getRegMaskSlots()) && 333 !isLiveAtStatepointVarArg(LI) && !canMemFoldInlineAsm(LI, MRI)) { 334 LI.markNotSpillable(); 335 return -1.0; 336 } 337 338 // If all of the definitions of the interval are re-materializable, 339 // it is a preferred candidate for spilling. 340 // FIXME: this gets much more complicated once we support non-trivial 341 // re-materialization. 342 if (isRematerializable(LI, LIS, VRM, *MF.getSubtarget().getInstrInfo())) 343 TotalWeight *= 0.5F; 344 345 if (IsLocalSplitArtifact) 346 return normalize(TotalWeight, Start->distance(*End), NumInstr); 347 return normalize(TotalWeight, LI.getSize(), NumInstr); 348 } 349