1 //===- InterferenceCache.cpp - Caching per-block interference -------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // InterferenceCache remembers per-block interference in LiveIntervalUnions. 10 // 11 //===----------------------------------------------------------------------===// 12 13 #include "InterferenceCache.h" 14 #include "llvm/ADT/ArrayRef.h" 15 #include "llvm/CodeGen/LiveInterval.h" 16 #include "llvm/CodeGen/LiveIntervalUnion.h" 17 #include "llvm/CodeGen/LiveIntervals.h" 18 #include "llvm/CodeGen/MachineBasicBlock.h" 19 #include "llvm/CodeGen/MachineFunction.h" 20 #include "llvm/CodeGen/MachineOperand.h" 21 #include "llvm/CodeGen/SlotIndexes.h" 22 #include "llvm/CodeGen/TargetRegisterInfo.h" 23 #include "llvm/MC/MCRegisterInfo.h" 24 #include "llvm/Support/ErrorHandling.h" 25 #include <cassert> 26 #include <cstdint> 27 #include <cstdlib> 28 #include <tuple> 29 30 using namespace llvm; 31 32 #define DEBUG_TYPE "regalloc" 33 34 // Static member used for null interference cursors. 35 const InterferenceCache::BlockInterference 36 InterferenceCache::Cursor::NoInterference; 37 38 // Initializes PhysRegEntries (instead of a SmallVector, PhysRegEntries is a 39 // buffer of size NumPhysRegs to speed up alloc/clear for targets with large 40 // reg files). Calloced memory is used for good form, and quites tools like 41 // Valgrind too, but zero initialized memory is not required by the algorithm: 42 // this is because PhysRegEntries works like a SparseSet and its entries are 43 // only valid when there is a corresponding CacheEntries assignment. There is 44 // also support for when pass managers are reused for targets with different 45 // numbers of PhysRegs: in this case PhysRegEntries is freed and reinitialized. 46 void InterferenceCache::reinitPhysRegEntries() { 47 if (PhysRegEntriesCount == TRI->getNumRegs()) return; 48 free(PhysRegEntries); 49 PhysRegEntriesCount = TRI->getNumRegs(); 50 PhysRegEntries = static_cast<unsigned char*>( 51 safe_calloc(PhysRegEntriesCount, sizeof(unsigned char))); 52 } 53 54 void InterferenceCache::init(MachineFunction *mf, 55 LiveIntervalUnion *liuarray, 56 SlotIndexes *indexes, 57 LiveIntervals *lis, 58 const TargetRegisterInfo *tri) { 59 MF = mf; 60 LIUArray = liuarray; 61 TRI = tri; 62 reinitPhysRegEntries(); 63 for (unsigned i = 0; i != CacheEntries; ++i) 64 Entries[i].clear(mf, indexes, lis); 65 } 66 67 InterferenceCache::Entry *InterferenceCache::get(unsigned PhysReg) { 68 unsigned E = PhysRegEntries[PhysReg]; 69 if (E < CacheEntries && Entries[E].getPhysReg() == PhysReg) { 70 if (!Entries[E].valid(LIUArray, TRI)) 71 Entries[E].revalidate(LIUArray, TRI); 72 return &Entries[E]; 73 } 74 // No valid entry exists, pick the next round-robin entry. 75 E = RoundRobin; 76 if (++RoundRobin == CacheEntries) 77 RoundRobin = 0; 78 for (unsigned i = 0; i != CacheEntries; ++i) { 79 // Skip entries that are in use. 80 if (Entries[E].hasRefs()) { 81 if (++E == CacheEntries) 82 E = 0; 83 continue; 84 } 85 Entries[E].reset(PhysReg, LIUArray, TRI, MF); 86 PhysRegEntries[PhysReg] = E; 87 return &Entries[E]; 88 } 89 llvm_unreachable("Ran out of interference cache entries."); 90 } 91 92 /// revalidate - LIU contents have changed, update tags. 93 void InterferenceCache::Entry::revalidate(LiveIntervalUnion *LIUArray, 94 const TargetRegisterInfo *TRI) { 95 // Invalidate all block entries. 96 ++Tag; 97 // Invalidate all iterators. 98 PrevPos = SlotIndex(); 99 unsigned i = 0; 100 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units, ++i) 101 RegUnits[i].VirtTag = LIUArray[*Units].getTag(); 102 } 103 104 void InterferenceCache::Entry::reset(unsigned physReg, 105 LiveIntervalUnion *LIUArray, 106 const TargetRegisterInfo *TRI, 107 const MachineFunction *MF) { 108 assert(!hasRefs() && "Cannot reset cache entry with references"); 109 // LIU's changed, invalidate cache. 110 ++Tag; 111 PhysReg = physReg; 112 Blocks.resize(MF->getNumBlockIDs()); 113 114 // Reset iterators. 115 PrevPos = SlotIndex(); 116 RegUnits.clear(); 117 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units) { 118 RegUnits.push_back(LIUArray[*Units]); 119 RegUnits.back().Fixed = &LIS->getRegUnit(*Units); 120 } 121 } 122 123 bool InterferenceCache::Entry::valid(LiveIntervalUnion *LIUArray, 124 const TargetRegisterInfo *TRI) { 125 unsigned i = 0, e = RegUnits.size(); 126 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units, ++i) { 127 if (i == e) 128 return false; 129 if (LIUArray[*Units].changedSince(RegUnits[i].VirtTag)) 130 return false; 131 } 132 return i == e; 133 } 134 135 void InterferenceCache::Entry::update(unsigned MBBNum) { 136 SlotIndex Start, Stop; 137 std::tie(Start, Stop) = Indexes->getMBBRange(MBBNum); 138 139 // Use advanceTo only when possible. 140 if (PrevPos != Start) { 141 if (!PrevPos.isValid() || Start < PrevPos) { 142 for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) { 143 RegUnitInfo &RUI = RegUnits[i]; 144 RUI.VirtI.find(Start); 145 RUI.FixedI = RUI.Fixed->find(Start); 146 } 147 } else { 148 for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) { 149 RegUnitInfo &RUI = RegUnits[i]; 150 RUI.VirtI.advanceTo(Start); 151 if (RUI.FixedI != RUI.Fixed->end()) 152 RUI.FixedI = RUI.Fixed->advanceTo(RUI.FixedI, Start); 153 } 154 } 155 PrevPos = Start; 156 } 157 158 MachineFunction::const_iterator MFI = 159 MF->getBlockNumbered(MBBNum)->getIterator(); 160 BlockInterference *BI = &Blocks[MBBNum]; 161 ArrayRef<SlotIndex> RegMaskSlots; 162 ArrayRef<const uint32_t*> RegMaskBits; 163 while (true) { 164 BI->Tag = Tag; 165 BI->First = BI->Last = SlotIndex(); 166 167 // Check for first interference from virtregs. 168 for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) { 169 LiveIntervalUnion::SegmentIter &I = RegUnits[i].VirtI; 170 if (!I.valid()) 171 continue; 172 SlotIndex StartI = I.start(); 173 if (StartI >= Stop) 174 continue; 175 if (!BI->First.isValid() || StartI < BI->First) 176 BI->First = StartI; 177 } 178 179 // Same thing for fixed interference. 180 for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) { 181 LiveInterval::const_iterator I = RegUnits[i].FixedI; 182 LiveInterval::const_iterator E = RegUnits[i].Fixed->end(); 183 if (I == E) 184 continue; 185 SlotIndex StartI = I->start; 186 if (StartI >= Stop) 187 continue; 188 if (!BI->First.isValid() || StartI < BI->First) 189 BI->First = StartI; 190 } 191 192 // Also check for register mask interference. 193 RegMaskSlots = LIS->getRegMaskSlotsInBlock(MBBNum); 194 RegMaskBits = LIS->getRegMaskBitsInBlock(MBBNum); 195 SlotIndex Limit = BI->First.isValid() ? BI->First : Stop; 196 for (unsigned i = 0, e = RegMaskSlots.size(); 197 i != e && RegMaskSlots[i] < Limit; ++i) 198 if (MachineOperand::clobbersPhysReg(RegMaskBits[i], PhysReg)) { 199 // Register mask i clobbers PhysReg before the LIU interference. 200 BI->First = RegMaskSlots[i]; 201 break; 202 } 203 204 PrevPos = Stop; 205 if (BI->First.isValid()) 206 break; 207 208 // No interference in this block? Go ahead and precompute the next block. 209 if (++MFI == MF->end()) 210 return; 211 MBBNum = MFI->getNumber(); 212 BI = &Blocks[MBBNum]; 213 if (BI->Tag == Tag) 214 return; 215 std::tie(Start, Stop) = Indexes->getMBBRange(MBBNum); 216 } 217 218 // Check for last interference in block. 219 for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) { 220 LiveIntervalUnion::SegmentIter &I = RegUnits[i].VirtI; 221 if (!I.valid() || I.start() >= Stop) 222 continue; 223 I.advanceTo(Stop); 224 bool Backup = !I.valid() || I.start() >= Stop; 225 if (Backup) 226 --I; 227 SlotIndex StopI = I.stop(); 228 if (!BI->Last.isValid() || StopI > BI->Last) 229 BI->Last = StopI; 230 if (Backup) 231 ++I; 232 } 233 234 // Fixed interference. 235 for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) { 236 LiveInterval::iterator &I = RegUnits[i].FixedI; 237 LiveRange *LR = RegUnits[i].Fixed; 238 if (I == LR->end() || I->start >= Stop) 239 continue; 240 I = LR->advanceTo(I, Stop); 241 bool Backup = I == LR->end() || I->start >= Stop; 242 if (Backup) 243 --I; 244 SlotIndex StopI = I->end; 245 if (!BI->Last.isValid() || StopI > BI->Last) 246 BI->Last = StopI; 247 if (Backup) 248 ++I; 249 } 250 251 // Also check for register mask interference. 252 SlotIndex Limit = BI->Last.isValid() ? BI->Last : Start; 253 for (unsigned i = RegMaskSlots.size(); 254 i && RegMaskSlots[i-1].getDeadSlot() > Limit; --i) 255 if (MachineOperand::clobbersPhysReg(RegMaskBits[i-1], PhysReg)) { 256 // Register mask i-1 clobbers PhysReg after the LIU interference. 257 // Model the regmask clobber as a dead def. 258 BI->Last = RegMaskSlots[i-1].getDeadSlot(); 259 break; 260 } 261 } 262