1 //==- RegAllocGreedy.h ------- greedy register allocator ----------*-C++-*-==// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // This file defines the RAGreedy function pass for register allocation in 9 // optimized builds. 10 //===----------------------------------------------------------------------===// 11 12 #ifndef LLVM_CODEGEN_REGALLOCGREEDY_H_ 13 #define LLVM_CODEGEN_REGALLOCGREEDY_H_ 14 15 #include "InterferenceCache.h" 16 #include "RegAllocBase.h" 17 #include "RegAllocEvictionAdvisor.h" 18 #include "RegAllocPriorityAdvisor.h" 19 #include "SpillPlacement.h" 20 #include "SplitKit.h" 21 #include "llvm/ADT/ArrayRef.h" 22 #include "llvm/ADT/BitVector.h" 23 #include "llvm/ADT/IndexedMap.h" 24 #include "llvm/ADT/SetVector.h" 25 #include "llvm/ADT/SmallVector.h" 26 #include "llvm/ADT/StringRef.h" 27 #include "llvm/CodeGen/CalcSpillWeights.h" 28 #include "llvm/CodeGen/LiveInterval.h" 29 #include "llvm/CodeGen/LiveRangeEdit.h" 30 #include "llvm/CodeGen/MachineFunction.h" 31 #include "llvm/CodeGen/MachineFunctionPass.h" 32 #include "llvm/CodeGen/RegisterClassInfo.h" 33 #include "llvm/CodeGen/Spiller.h" 34 #include "llvm/CodeGen/TargetRegisterInfo.h" 35 #include <algorithm> 36 #include <cstdint> 37 #include <memory> 38 #include <queue> 39 #include <utility> 40 41 namespace llvm { 42 class AllocationOrder; 43 class AnalysisUsage; 44 class EdgeBundles; 45 class LiveDebugVariables; 46 class LiveIntervals; 47 class LiveRegMatrix; 48 class MachineBasicBlock; 49 class MachineBlockFrequencyInfo; 50 class MachineDominatorTree; 51 class MachineLoop; 52 class MachineLoopInfo; 53 class MachineOptimizationRemarkEmitter; 54 class MachineOptimizationRemarkMissed; 55 class SlotIndexes; 56 class TargetInstrInfo; 57 class VirtRegMap; 58 59 class LLVM_LIBRARY_VISIBILITY RAGreedy : public MachineFunctionPass, 60 public RegAllocBase, 61 private LiveRangeEdit::Delegate { 62 // Interface to eviction advisers 63 public: 64 /// Track allocation stage and eviction loop prevention during allocation. 65 class ExtraRegInfo final { 66 // RegInfo - Keep additional information about each live range. 67 struct RegInfo { 68 LiveRangeStage Stage = RS_New; 69 70 // Cascade - Eviction loop prevention. See 71 // canEvictInterferenceBasedOnCost(). 72 unsigned Cascade = 0; 73 74 RegInfo() = default; 75 }; 76 77 IndexedMap<RegInfo, VirtReg2IndexFunctor> Info; 78 unsigned NextCascade = 1; 79 80 public: 81 ExtraRegInfo() {} 82 ExtraRegInfo(const ExtraRegInfo &) = delete; 83 84 LiveRangeStage getStage(Register Reg) const { return Info[Reg].Stage; } 85 86 LiveRangeStage getStage(const LiveInterval &VirtReg) const { 87 return getStage(VirtReg.reg()); 88 } 89 90 void setStage(Register Reg, LiveRangeStage Stage) { 91 Info.grow(Reg.id()); 92 Info[Reg].Stage = Stage; 93 } 94 95 void setStage(const LiveInterval &VirtReg, LiveRangeStage Stage) { 96 setStage(VirtReg.reg(), Stage); 97 } 98 99 /// Return the current stage of the register, if present, otherwise 100 /// initialize it and return that. 101 LiveRangeStage getOrInitStage(Register Reg) { 102 Info.grow(Reg.id()); 103 return getStage(Reg); 104 } 105 106 unsigned getCascade(Register Reg) const { return Info[Reg].Cascade; } 107 108 void setCascade(Register Reg, unsigned Cascade) { 109 Info.grow(Reg.id()); 110 Info[Reg].Cascade = Cascade; 111 } 112 113 unsigned getOrAssignNewCascade(Register Reg) { 114 unsigned Cascade = getCascade(Reg); 115 if (!Cascade) { 116 Cascade = NextCascade++; 117 setCascade(Reg, Cascade); 118 } 119 return Cascade; 120 } 121 122 unsigned getCascadeOrCurrentNext(Register Reg) const { 123 unsigned Cascade = getCascade(Reg); 124 if (!Cascade) 125 Cascade = NextCascade; 126 return Cascade; 127 } 128 129 template <typename Iterator> 130 void setStage(Iterator Begin, Iterator End, LiveRangeStage NewStage) { 131 for (; Begin != End; ++Begin) { 132 Register Reg = *Begin; 133 Info.grow(Reg.id()); 134 if (Info[Reg].Stage == RS_New) 135 Info[Reg].Stage = NewStage; 136 } 137 } 138 void LRE_DidCloneVirtReg(Register New, Register Old); 139 }; 140 141 LiveRegMatrix *getInterferenceMatrix() const { return Matrix; } 142 LiveIntervals *getLiveIntervals() const { return LIS; } 143 VirtRegMap *getVirtRegMap() const { return VRM; } 144 const RegisterClassInfo &getRegClassInfo() const { return RegClassInfo; } 145 const ExtraRegInfo &getExtraInfo() const { return *ExtraInfo; } 146 size_t getQueueSize() const { return Queue.size(); } 147 // end (interface to eviction advisers) 148 149 // Interface to priority advisers 150 bool getRegClassPriorityTrumpsGlobalness() const { 151 return RegClassPriorityTrumpsGlobalness; 152 } 153 bool getReverseLocalAssignment() const { return ReverseLocalAssignment; } 154 // end (interface to priority advisers) 155 156 private: 157 // Convenient shortcuts. 158 using PQueue = std::priority_queue<std::pair<unsigned, unsigned>>; 159 using SmallLISet = SmallSetVector<const LiveInterval *, 4>; 160 161 // We need to track all tentative recolorings so we can roll back any 162 // successful and unsuccessful recoloring attempts. 163 using RecoloringStack = 164 SmallVector<std::pair<const LiveInterval *, MCRegister>, 8>; 165 166 // context 167 MachineFunction *MF = nullptr; 168 169 // Shortcuts to some useful interface. 170 const TargetInstrInfo *TII = nullptr; 171 172 // analyses 173 SlotIndexes *Indexes = nullptr; 174 MachineBlockFrequencyInfo *MBFI = nullptr; 175 MachineDominatorTree *DomTree = nullptr; 176 MachineLoopInfo *Loops = nullptr; 177 MachineOptimizationRemarkEmitter *ORE = nullptr; 178 EdgeBundles *Bundles = nullptr; 179 SpillPlacement *SpillPlacer = nullptr; 180 LiveDebugVariables *DebugVars = nullptr; 181 182 // state 183 std::unique_ptr<Spiller> SpillerInstance; 184 PQueue Queue; 185 std::unique_ptr<VirtRegAuxInfo> VRAI; 186 std::optional<ExtraRegInfo> ExtraInfo; 187 std::unique_ptr<RegAllocEvictionAdvisor> EvictAdvisor; 188 189 std::unique_ptr<RegAllocPriorityAdvisor> PriorityAdvisor; 190 191 // Enum CutOffStage to keep a track whether the register allocation failed 192 // because of the cutoffs encountered in last chance recoloring. 193 // Note: This is used as bitmask. New value should be next power of 2. 194 enum CutOffStage { 195 // No cutoffs encountered 196 CO_None = 0, 197 198 // lcr-max-depth cutoff encountered 199 CO_Depth = 1, 200 201 // lcr-max-interf cutoff encountered 202 CO_Interf = 2 203 }; 204 205 uint8_t CutOffInfo = CutOffStage::CO_None; 206 207 #ifndef NDEBUG 208 static const char *const StageName[]; 209 #endif 210 211 // splitting state. 212 std::unique_ptr<SplitAnalysis> SA; 213 std::unique_ptr<SplitEditor> SE; 214 215 /// Cached per-block interference maps 216 InterferenceCache IntfCache; 217 218 /// All basic blocks where the current register has uses. 219 SmallVector<SpillPlacement::BlockConstraint, 8> SplitConstraints; 220 221 /// Global live range splitting candidate info. 222 struct GlobalSplitCandidate { 223 // Register intended for assignment, or 0. 224 MCRegister PhysReg; 225 226 // SplitKit interval index for this candidate. 227 unsigned IntvIdx; 228 229 // Interference for PhysReg. 230 InterferenceCache::Cursor Intf; 231 232 // Bundles where this candidate should be live. 233 BitVector LiveBundles; 234 SmallVector<unsigned, 8> ActiveBlocks; 235 236 void reset(InterferenceCache &Cache, MCRegister Reg) { 237 PhysReg = Reg; 238 IntvIdx = 0; 239 Intf.setPhysReg(Cache, Reg); 240 LiveBundles.clear(); 241 ActiveBlocks.clear(); 242 } 243 244 // Set B[I] = C for every live bundle where B[I] was NoCand. 245 unsigned getBundles(SmallVectorImpl<unsigned> &B, unsigned C) { 246 unsigned Count = 0; 247 for (unsigned I : LiveBundles.set_bits()) 248 if (B[I] == NoCand) { 249 B[I] = C; 250 Count++; 251 } 252 return Count; 253 } 254 }; 255 256 /// Candidate info for each PhysReg in AllocationOrder. 257 /// This vector never shrinks, but grows to the size of the largest register 258 /// class. 259 SmallVector<GlobalSplitCandidate, 32> GlobalCand; 260 261 enum : unsigned { NoCand = ~0u }; 262 263 /// Candidate map. Each edge bundle is assigned to a GlobalCand entry, or to 264 /// NoCand which indicates the stack interval. 265 SmallVector<unsigned, 32> BundleCand; 266 267 /// Callee-save register cost, calculated once per machine function. 268 BlockFrequency CSRCost; 269 270 /// Set of broken hints that may be reconciled later because of eviction. 271 SmallSetVector<const LiveInterval *, 8> SetOfBrokenHints; 272 273 /// The register cost values. This list will be recreated for each Machine 274 /// Function 275 ArrayRef<uint8_t> RegCosts; 276 277 /// Flags for the live range priority calculation, determined once per 278 /// machine function. 279 bool RegClassPriorityTrumpsGlobalness = false; 280 281 bool ReverseLocalAssignment = false; 282 283 public: 284 RAGreedy(const RegClassFilterFunc F = allocateAllRegClasses); 285 286 /// Return the pass name. 287 StringRef getPassName() const override { return "Greedy Register Allocator"; } 288 289 /// RAGreedy analysis usage. 290 void getAnalysisUsage(AnalysisUsage &AU) const override; 291 void releaseMemory() override; 292 Spiller &spiller() override { return *SpillerInstance; } 293 void enqueueImpl(const LiveInterval *LI) override; 294 const LiveInterval *dequeue() override; 295 MCRegister selectOrSplit(const LiveInterval &, 296 SmallVectorImpl<Register> &) override; 297 void aboutToRemoveInterval(const LiveInterval &) override; 298 299 /// Perform register allocation. 300 bool runOnMachineFunction(MachineFunction &mf) override; 301 302 MachineFunctionProperties getRequiredProperties() const override { 303 return MachineFunctionProperties().set( 304 MachineFunctionProperties::Property::NoPHIs); 305 } 306 307 MachineFunctionProperties getClearedProperties() const override { 308 return MachineFunctionProperties().set( 309 MachineFunctionProperties::Property::IsSSA); 310 } 311 312 static char ID; 313 314 private: 315 MCRegister selectOrSplitImpl(const LiveInterval &, 316 SmallVectorImpl<Register> &, SmallVirtRegSet &, 317 RecoloringStack &, unsigned = 0); 318 319 bool LRE_CanEraseVirtReg(Register) override; 320 void LRE_WillShrinkVirtReg(Register) override; 321 void LRE_DidCloneVirtReg(Register, Register) override; 322 void enqueue(PQueue &CurQueue, const LiveInterval *LI); 323 const LiveInterval *dequeue(PQueue &CurQueue); 324 325 bool hasVirtRegAlloc(); 326 BlockFrequency calcSpillCost(); 327 bool addSplitConstraints(InterferenceCache::Cursor, BlockFrequency &); 328 bool addThroughConstraints(InterferenceCache::Cursor, ArrayRef<unsigned>); 329 bool growRegion(GlobalSplitCandidate &Cand); 330 BlockFrequency calcGlobalSplitCost(GlobalSplitCandidate &, 331 const AllocationOrder &Order); 332 bool calcCompactRegion(GlobalSplitCandidate &); 333 void splitAroundRegion(LiveRangeEdit &, ArrayRef<unsigned>); 334 void calcGapWeights(MCRegister, SmallVectorImpl<float> &); 335 void evictInterference(const LiveInterval &, MCRegister, 336 SmallVectorImpl<Register> &); 337 bool mayRecolorAllInterferences(MCRegister PhysReg, 338 const LiveInterval &VirtReg, 339 SmallLISet &RecoloringCandidates, 340 const SmallVirtRegSet &FixedRegisters); 341 342 MCRegister tryAssign(const LiveInterval &, AllocationOrder &, 343 SmallVectorImpl<Register> &, const SmallVirtRegSet &); 344 MCRegister tryEvict(const LiveInterval &, AllocationOrder &, 345 SmallVectorImpl<Register> &, uint8_t, 346 const SmallVirtRegSet &); 347 MCRegister tryRegionSplit(const LiveInterval &, AllocationOrder &, 348 SmallVectorImpl<Register> &); 349 /// Calculate cost of region splitting around the specified register. 350 unsigned calculateRegionSplitCostAroundReg(MCPhysReg PhysReg, 351 AllocationOrder &Order, 352 BlockFrequency &BestCost, 353 unsigned &NumCands, 354 unsigned &BestCand); 355 /// Calculate cost of region splitting. 356 unsigned calculateRegionSplitCost(const LiveInterval &VirtReg, 357 AllocationOrder &Order, 358 BlockFrequency &BestCost, 359 unsigned &NumCands, bool IgnoreCSR); 360 /// Perform region splitting. 361 unsigned doRegionSplit(const LiveInterval &VirtReg, unsigned BestCand, 362 bool HasCompact, SmallVectorImpl<Register> &NewVRegs); 363 /// Try to split VirtReg around physical Hint register. 364 bool trySplitAroundHintReg(MCPhysReg Hint, const LiveInterval &VirtReg, 365 SmallVectorImpl<Register> &NewVRegs, 366 AllocationOrder &Order); 367 /// Check other options before using a callee-saved register for the first 368 /// time. 369 MCRegister tryAssignCSRFirstTime(const LiveInterval &VirtReg, 370 AllocationOrder &Order, MCRegister PhysReg, 371 uint8_t &CostPerUseLimit, 372 SmallVectorImpl<Register> &NewVRegs); 373 void initializeCSRCost(); 374 unsigned tryBlockSplit(const LiveInterval &, AllocationOrder &, 375 SmallVectorImpl<Register> &); 376 unsigned tryInstructionSplit(const LiveInterval &, AllocationOrder &, 377 SmallVectorImpl<Register> &); 378 unsigned tryLocalSplit(const LiveInterval &, AllocationOrder &, 379 SmallVectorImpl<Register> &); 380 unsigned trySplit(const LiveInterval &, AllocationOrder &, 381 SmallVectorImpl<Register> &, const SmallVirtRegSet &); 382 unsigned tryLastChanceRecoloring(const LiveInterval &, AllocationOrder &, 383 SmallVectorImpl<Register> &, 384 SmallVirtRegSet &, RecoloringStack &, 385 unsigned); 386 bool tryRecoloringCandidates(PQueue &, SmallVectorImpl<Register> &, 387 SmallVirtRegSet &, RecoloringStack &, unsigned); 388 void tryHintRecoloring(const LiveInterval &); 389 void tryHintsRecoloring(); 390 391 /// Model the information carried by one end of a copy. 392 struct HintInfo { 393 /// The frequency of the copy. 394 BlockFrequency Freq; 395 /// The virtual register or physical register. 396 Register Reg; 397 /// Its currently assigned register. 398 /// In case of a physical register Reg == PhysReg. 399 MCRegister PhysReg; 400 401 HintInfo(BlockFrequency Freq, Register Reg, MCRegister PhysReg) 402 : Freq(Freq), Reg(Reg), PhysReg(PhysReg) {} 403 }; 404 using HintsInfo = SmallVector<HintInfo, 4>; 405 406 BlockFrequency getBrokenHintFreq(const HintsInfo &, MCRegister); 407 void collectHintInfo(Register, HintsInfo &); 408 409 /// Greedy RA statistic to remark. 410 struct RAGreedyStats { 411 unsigned Reloads = 0; 412 unsigned FoldedReloads = 0; 413 unsigned ZeroCostFoldedReloads = 0; 414 unsigned Spills = 0; 415 unsigned FoldedSpills = 0; 416 unsigned Copies = 0; 417 float ReloadsCost = 0.0f; 418 float FoldedReloadsCost = 0.0f; 419 float SpillsCost = 0.0f; 420 float FoldedSpillsCost = 0.0f; 421 float CopiesCost = 0.0f; 422 423 bool isEmpty() { 424 return !(Reloads || FoldedReloads || Spills || FoldedSpills || 425 ZeroCostFoldedReloads || Copies); 426 } 427 428 void add(RAGreedyStats other) { 429 Reloads += other.Reloads; 430 FoldedReloads += other.FoldedReloads; 431 ZeroCostFoldedReloads += other.ZeroCostFoldedReloads; 432 Spills += other.Spills; 433 FoldedSpills += other.FoldedSpills; 434 Copies += other.Copies; 435 ReloadsCost += other.ReloadsCost; 436 FoldedReloadsCost += other.FoldedReloadsCost; 437 SpillsCost += other.SpillsCost; 438 FoldedSpillsCost += other.FoldedSpillsCost; 439 CopiesCost += other.CopiesCost; 440 } 441 442 void report(MachineOptimizationRemarkMissed &R); 443 }; 444 445 /// Compute statistic for a basic block. 446 RAGreedyStats computeStats(MachineBasicBlock &MBB); 447 448 /// Compute and report statistic through a remark. 449 RAGreedyStats reportStats(MachineLoop *L); 450 451 /// Report the statistic for each loop. 452 void reportStats(); 453 }; 454 } // namespace llvm 455 #endif // #ifndef LLVM_CODEGEN_REGALLOCGREEDY_H_ 456