1 //===- InterferenceCache.cpp - Caching per-block interference -------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // InterferenceCache remembers per-block interference in LiveIntervalUnions. 10 // 11 //===----------------------------------------------------------------------===// 12 13 #include "InterferenceCache.h" 14 #include "llvm/ADT/ArrayRef.h" 15 #include "llvm/CodeGen/LiveIntervals.h" 16 #include "llvm/CodeGen/MachineBasicBlock.h" 17 #include "llvm/CodeGen/MachineFunction.h" 18 #include "llvm/CodeGen/MachineOperand.h" 19 #include "llvm/CodeGen/TargetRegisterInfo.h" 20 #include "llvm/MC/MCRegisterInfo.h" 21 #include "llvm/Support/ErrorHandling.h" 22 #include <cassert> 23 #include <cstdint> 24 #include <tuple> 25 26 using namespace llvm; 27 28 #define DEBUG_TYPE "regalloc" 29 30 // Static member used for null interference cursors. 31 const InterferenceCache::BlockInterference 32 InterferenceCache::Cursor::NoInterference; 33 34 // Initializes PhysRegEntries (instead of a SmallVector, PhysRegEntries is a 35 // buffer of size NumPhysRegs to speed up alloc/clear for targets with large 36 // reg files). Calloced memory is used for good form, and quites tools like 37 // Valgrind too, but zero initialized memory is not required by the algorithm: 38 // this is because PhysRegEntries works like a SparseSet and its entries are 39 // only valid when there is a corresponding CacheEntries assignment. There is 40 // also support for when pass managers are reused for targets with different 41 // numbers of PhysRegs: in this case PhysRegEntries is freed and reinitialized. 42 void InterferenceCache::reinitPhysRegEntries() { 43 if (PhysRegEntriesCount == TRI->getNumRegs()) return; 44 free(PhysRegEntries); 45 PhysRegEntriesCount = TRI->getNumRegs(); 46 PhysRegEntries = static_cast<unsigned char*>( 47 safe_calloc(PhysRegEntriesCount, sizeof(unsigned char))); 48 } 49 50 void InterferenceCache::init(MachineFunction *mf, 51 LiveIntervalUnion *liuarray, 52 SlotIndexes *indexes, 53 LiveIntervals *lis, 54 const TargetRegisterInfo *tri) { 55 MF = mf; 56 LIUArray = liuarray; 57 TRI = tri; 58 reinitPhysRegEntries(); 59 for (Entry &E : Entries) 60 E.clear(mf, indexes, lis); 61 } 62 63 InterferenceCache::Entry *InterferenceCache::get(MCRegister PhysReg) { 64 unsigned char E = PhysRegEntries[PhysReg.id()]; 65 if (E < CacheEntries && Entries[E].getPhysReg() == PhysReg) { 66 if (!Entries[E].valid(LIUArray, TRI)) 67 Entries[E].revalidate(LIUArray, TRI); 68 return &Entries[E]; 69 } 70 // No valid entry exists, pick the next round-robin entry. 71 E = RoundRobin; 72 if (++RoundRobin == CacheEntries) 73 RoundRobin = 0; 74 for (unsigned i = 0; i != CacheEntries; ++i) { 75 // Skip entries that are in use. 76 if (Entries[E].hasRefs()) { 77 if (++E == CacheEntries) 78 E = 0; 79 continue; 80 } 81 Entries[E].reset(PhysReg, LIUArray, TRI, MF); 82 PhysRegEntries[PhysReg] = E; 83 return &Entries[E]; 84 } 85 llvm_unreachable("Ran out of interference cache entries."); 86 } 87 88 /// revalidate - LIU contents have changed, update tags. 89 void InterferenceCache::Entry::revalidate(LiveIntervalUnion *LIUArray, 90 const TargetRegisterInfo *TRI) { 91 // Invalidate all block entries. 92 ++Tag; 93 // Invalidate all iterators. 94 PrevPos = SlotIndex(); 95 unsigned i = 0; 96 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units, ++i) 97 RegUnits[i].VirtTag = LIUArray[*Units].getTag(); 98 } 99 100 void InterferenceCache::Entry::reset(MCRegister physReg, 101 LiveIntervalUnion *LIUArray, 102 const TargetRegisterInfo *TRI, 103 const MachineFunction *MF) { 104 assert(!hasRefs() && "Cannot reset cache entry with references"); 105 // LIU's changed, invalidate cache. 106 ++Tag; 107 PhysReg = physReg; 108 Blocks.resize(MF->getNumBlockIDs()); 109 110 // Reset iterators. 111 PrevPos = SlotIndex(); 112 RegUnits.clear(); 113 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units) { 114 RegUnits.push_back(LIUArray[*Units]); 115 RegUnits.back().Fixed = &LIS->getRegUnit(*Units); 116 } 117 } 118 119 bool InterferenceCache::Entry::valid(LiveIntervalUnion *LIUArray, 120 const TargetRegisterInfo *TRI) { 121 unsigned i = 0, e = RegUnits.size(); 122 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units, ++i) { 123 if (i == e) 124 return false; 125 if (LIUArray[*Units].changedSince(RegUnits[i].VirtTag)) 126 return false; 127 } 128 return i == e; 129 } 130 131 void InterferenceCache::Entry::update(unsigned MBBNum) { 132 SlotIndex Start, Stop; 133 std::tie(Start, Stop) = Indexes->getMBBRange(MBBNum); 134 135 // Use advanceTo only when possible. 136 if (PrevPos != Start) { 137 if (!PrevPos.isValid() || Start < PrevPos) { 138 for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) { 139 RegUnitInfo &RUI = RegUnits[i]; 140 RUI.VirtI.find(Start); 141 RUI.FixedI = RUI.Fixed->find(Start); 142 } 143 } else { 144 for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) { 145 RegUnitInfo &RUI = RegUnits[i]; 146 RUI.VirtI.advanceTo(Start); 147 if (RUI.FixedI != RUI.Fixed->end()) 148 RUI.FixedI = RUI.Fixed->advanceTo(RUI.FixedI, Start); 149 } 150 } 151 PrevPos = Start; 152 } 153 154 MachineFunction::const_iterator MFI = 155 MF->getBlockNumbered(MBBNum)->getIterator(); 156 BlockInterference *BI = &Blocks[MBBNum]; 157 ArrayRef<SlotIndex> RegMaskSlots; 158 ArrayRef<const uint32_t*> RegMaskBits; 159 while (true) { 160 BI->Tag = Tag; 161 BI->First = BI->Last = SlotIndex(); 162 163 // Check for first interference from virtregs. 164 for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) { 165 LiveIntervalUnion::SegmentIter &I = RegUnits[i].VirtI; 166 if (!I.valid()) 167 continue; 168 SlotIndex StartI = I.start(); 169 if (StartI >= Stop) 170 continue; 171 if (!BI->First.isValid() || StartI < BI->First) 172 BI->First = StartI; 173 } 174 175 // Same thing for fixed interference. 176 for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) { 177 LiveInterval::const_iterator I = RegUnits[i].FixedI; 178 LiveInterval::const_iterator E = RegUnits[i].Fixed->end(); 179 if (I == E) 180 continue; 181 SlotIndex StartI = I->start; 182 if (StartI >= Stop) 183 continue; 184 if (!BI->First.isValid() || StartI < BI->First) 185 BI->First = StartI; 186 } 187 188 // Also check for register mask interference. 189 RegMaskSlots = LIS->getRegMaskSlotsInBlock(MBBNum); 190 RegMaskBits = LIS->getRegMaskBitsInBlock(MBBNum); 191 SlotIndex Limit = BI->First.isValid() ? BI->First : Stop; 192 for (unsigned i = 0, e = RegMaskSlots.size(); 193 i != e && RegMaskSlots[i] < Limit; ++i) 194 if (MachineOperand::clobbersPhysReg(RegMaskBits[i], PhysReg)) { 195 // Register mask i clobbers PhysReg before the LIU interference. 196 BI->First = RegMaskSlots[i]; 197 break; 198 } 199 200 PrevPos = Stop; 201 if (BI->First.isValid()) 202 break; 203 204 // No interference in this block? Go ahead and precompute the next block. 205 if (++MFI == MF->end()) 206 return; 207 MBBNum = MFI->getNumber(); 208 BI = &Blocks[MBBNum]; 209 if (BI->Tag == Tag) 210 return; 211 std::tie(Start, Stop) = Indexes->getMBBRange(MBBNum); 212 } 213 214 // Check for last interference in block. 215 for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) { 216 LiveIntervalUnion::SegmentIter &I = RegUnits[i].VirtI; 217 if (!I.valid() || I.start() >= Stop) 218 continue; 219 I.advanceTo(Stop); 220 bool Backup = !I.valid() || I.start() >= Stop; 221 if (Backup) 222 --I; 223 SlotIndex StopI = I.stop(); 224 if (!BI->Last.isValid() || StopI > BI->Last) 225 BI->Last = StopI; 226 if (Backup) 227 ++I; 228 } 229 230 // Fixed interference. 231 for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) { 232 LiveInterval::iterator &I = RegUnits[i].FixedI; 233 LiveRange *LR = RegUnits[i].Fixed; 234 if (I == LR->end() || I->start >= Stop) 235 continue; 236 I = LR->advanceTo(I, Stop); 237 bool Backup = I == LR->end() || I->start >= Stop; 238 if (Backup) 239 --I; 240 SlotIndex StopI = I->end; 241 if (!BI->Last.isValid() || StopI > BI->Last) 242 BI->Last = StopI; 243 if (Backup) 244 ++I; 245 } 246 247 // Also check for register mask interference. 248 SlotIndex Limit = BI->Last.isValid() ? BI->Last : Start; 249 for (unsigned i = RegMaskSlots.size(); 250 i && RegMaskSlots[i-1].getDeadSlot() > Limit; --i) 251 if (MachineOperand::clobbersPhysReg(RegMaskBits[i-1], PhysReg)) { 252 // Register mask i-1 clobbers PhysReg after the LIU interference. 253 // Model the regmask clobber as a dead def. 254 BI->Last = RegMaskSlots[i-1].getDeadSlot(); 255 break; 256 } 257 } 258