1 //===- CodeGenRegisters.cpp - Register and RegisterClass Info -------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file defines structures to encapsulate information gleaned from the 10 // target register and register class definitions. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "CodeGenRegisters.h" 15 #include "llvm/ADT/ArrayRef.h" 16 #include "llvm/ADT/BitVector.h" 17 #include "llvm/ADT/DenseMap.h" 18 #include "llvm/ADT/IntEqClasses.h" 19 #include "llvm/ADT/PointerUnion.h" 20 #include "llvm/ADT/PostOrderIterator.h" 21 #include "llvm/ADT/STLExtras.h" 22 #include "llvm/ADT/SetVector.h" 23 #include "llvm/ADT/SmallPtrSet.h" 24 #include "llvm/ADT/SmallSet.h" 25 #include "llvm/ADT/SmallVector.h" 26 #include "llvm/ADT/StringRef.h" 27 #include "llvm/ADT/StringSet.h" 28 #include "llvm/ADT/Twine.h" 29 #include "llvm/Support/Debug.h" 30 #include "llvm/Support/raw_ostream.h" 31 #include "llvm/TableGen/Error.h" 32 #include "llvm/TableGen/Record.h" 33 #include <algorithm> 34 #include <cassert> 35 #include <cstdint> 36 #include <iterator> 37 #include <map> 38 #include <queue> 39 #include <string> 40 #include <tuple> 41 #include <utility> 42 #include <vector> 43 44 using namespace llvm; 45 46 #define DEBUG_TYPE "regalloc-emitter" 47 48 //===----------------------------------------------------------------------===// 49 // CodeGenSubRegIndex 50 //===----------------------------------------------------------------------===// 51 52 CodeGenSubRegIndex::CodeGenSubRegIndex(const Record *R, unsigned Enum, 53 const CodeGenHwModes &CGH) 54 : TheDef(R), Name(R->getName().str()), EnumValue(Enum), 55 AllSuperRegsCovered(true), Artificial(true) { 56 if (R->getValue("Namespace")) 57 Namespace = R->getValueAsString("Namespace").str(); 58 59 if (const RecordVal *RV = R->getValue("SubRegRanges")) 60 if (auto *DI = dyn_cast_or_null<DefInit>(RV->getValue())) 61 Range = SubRegRangeByHwMode(DI->getDef(), CGH); 62 if (!Range.hasDefault()) 63 Range.insertSubRegRangeForMode(DefaultMode, SubRegRange(R)); 64 } 65 66 CodeGenSubRegIndex::CodeGenSubRegIndex(StringRef N, StringRef Nspace, 67 unsigned Enum) 68 : TheDef(nullptr), Name(N.str()), Namespace(Nspace.str()), 69 Range(SubRegRange(-1, -1)), EnumValue(Enum), AllSuperRegsCovered(true), 70 Artificial(true) {} 71 72 std::string CodeGenSubRegIndex::getQualifiedName() const { 73 std::string N = getNamespace(); 74 if (!N.empty()) 75 N += "::"; 76 N += getName(); 77 return N; 78 } 79 80 void CodeGenSubRegIndex::updateComponents(CodeGenRegBank &RegBank) { 81 if (!TheDef) 82 return; 83 84 std::vector<const Record *> Comps = 85 TheDef->getValueAsListOfDefs("ComposedOf"); 86 if (!Comps.empty()) { 87 if (Comps.size() != 2) 88 PrintFatalError(TheDef->getLoc(), 89 "ComposedOf must have exactly two entries"); 90 CodeGenSubRegIndex *A = RegBank.getSubRegIdx(Comps[0]); 91 CodeGenSubRegIndex *B = RegBank.getSubRegIdx(Comps[1]); 92 CodeGenSubRegIndex *X = A->addComposite(B, this, RegBank.getHwModes()); 93 if (X) 94 PrintFatalError(TheDef->getLoc(), "Ambiguous ComposedOf entries"); 95 } 96 97 std::vector<const Record *> Parts = 98 TheDef->getValueAsListOfDefs("CoveringSubRegIndices"); 99 if (!Parts.empty()) { 100 if (Parts.size() < 2) 101 PrintFatalError(TheDef->getLoc(), 102 "CoveringSubRegIndices must have two or more entries"); 103 SmallVector<CodeGenSubRegIndex *, 8> IdxParts; 104 for (const Record *Part : Parts) 105 IdxParts.push_back(RegBank.getSubRegIdx(Part)); 106 setConcatenationOf(IdxParts); 107 } 108 } 109 110 LaneBitmask CodeGenSubRegIndex::computeLaneMask() const { 111 // Already computed? 112 if (LaneMask.any()) 113 return LaneMask; 114 115 // Recursion guard, shouldn't be required. 116 LaneMask = LaneBitmask::getAll(); 117 118 // The lane mask is simply the union of all sub-indices. 119 LaneBitmask M; 120 for (const auto &C : Composed) 121 M |= C.second->computeLaneMask(); 122 assert(M.any() && "Missing lane mask, sub-register cycle?"); 123 LaneMask = M; 124 return LaneMask; 125 } 126 127 void CodeGenSubRegIndex::setConcatenationOf( 128 ArrayRef<CodeGenSubRegIndex *> Parts) { 129 if (ConcatenationOf.empty()) { 130 ConcatenationOf.assign(Parts.begin(), Parts.end()); 131 return; 132 } 133 assert(llvm::equal(Parts, ConcatenationOf) && "parts consistent"); 134 } 135 136 void CodeGenSubRegIndex::computeConcatTransitiveClosure() { 137 for (SmallVectorImpl<CodeGenSubRegIndex *>::iterator I = 138 ConcatenationOf.begin(); 139 I != ConcatenationOf.end(); 140 /*empty*/) { 141 CodeGenSubRegIndex *SubIdx = *I; 142 SubIdx->computeConcatTransitiveClosure(); 143 #ifndef NDEBUG 144 for (CodeGenSubRegIndex *SRI : SubIdx->ConcatenationOf) 145 assert(SRI->ConcatenationOf.empty() && "No transitive closure?"); 146 #endif 147 148 if (SubIdx->ConcatenationOf.empty()) { 149 ++I; 150 } else { 151 I = ConcatenationOf.erase(I); 152 I = ConcatenationOf.insert(I, SubIdx->ConcatenationOf.begin(), 153 SubIdx->ConcatenationOf.end()); 154 I += SubIdx->ConcatenationOf.size(); 155 } 156 } 157 } 158 159 //===----------------------------------------------------------------------===// 160 // CodeGenRegister 161 //===----------------------------------------------------------------------===// 162 163 CodeGenRegister::CodeGenRegister(const Record *R, unsigned Enum) 164 : TheDef(R), EnumValue(Enum), 165 CostPerUse(R->getValueAsListOfInts("CostPerUse")), 166 CoveredBySubRegs(R->getValueAsBit("CoveredBySubRegs")), 167 Constant(R->getValueAsBit("isConstant")), SubRegsComplete(false), 168 SuperRegsComplete(false), TopoSig(~0u) { 169 Artificial = R->getValueAsBit("isArtificial"); 170 } 171 172 void CodeGenRegister::buildObjectGraph(CodeGenRegBank &RegBank) { 173 std::vector<const Record *> SRIs = 174 TheDef->getValueAsListOfDefs("SubRegIndices"); 175 std::vector<const Record *> SRs = TheDef->getValueAsListOfDefs("SubRegs"); 176 177 if (SRIs.size() != SRs.size()) 178 PrintFatalError(TheDef->getLoc(), 179 "SubRegs and SubRegIndices must have the same size"); 180 181 for (const auto &[SRI, SR] : zip_equal(SRIs, SRs)) { 182 ExplicitSubRegIndices.push_back(RegBank.getSubRegIdx(SRI)); 183 ExplicitSubRegs.push_back(RegBank.getReg(SR)); 184 } 185 186 // Also compute leading super-registers. Each register has a list of 187 // covered-by-subregs super-registers where it appears as the first explicit 188 // sub-register. 189 // 190 // This is used by computeSecondarySubRegs() to find candidates. 191 if (CoveredBySubRegs && !ExplicitSubRegs.empty()) 192 ExplicitSubRegs.front()->LeadingSuperRegs.push_back(this); 193 194 // Add ad hoc alias links. This is a symmetric relationship between two 195 // registers, so build a symmetric graph by adding links in both ends. 196 for (const Record *Alias : TheDef->getValueAsListOfDefs("Aliases")) { 197 CodeGenRegister *Reg = RegBank.getReg(Alias); 198 ExplicitAliases.push_back(Reg); 199 Reg->ExplicitAliases.push_back(this); 200 } 201 } 202 203 // Inherit register units from subregisters. 204 // Return true if the RegUnits changed. 205 bool CodeGenRegister::inheritRegUnits(CodeGenRegBank &RegBank) { 206 bool changed = false; 207 for (const auto &[_, SR] : SubRegs) { 208 // Merge the subregister's units into this register's RegUnits. 209 changed |= (RegUnits |= SR->RegUnits); 210 } 211 212 return changed; 213 } 214 215 const CodeGenRegister::SubRegMap & 216 CodeGenRegister::computeSubRegs(CodeGenRegBank &RegBank) { 217 // Only compute this map once. 218 if (SubRegsComplete) 219 return SubRegs; 220 SubRegsComplete = true; 221 222 HasDisjunctSubRegs = ExplicitSubRegs.size() > 1; 223 224 // First insert the explicit subregs and make sure they are fully indexed. 225 for (auto [SR, Idx] : zip_equal(ExplicitSubRegs, ExplicitSubRegIndices)) { 226 if (!SR->Artificial) 227 Idx->Artificial = false; 228 if (!SubRegs.try_emplace(Idx, SR).second) 229 PrintFatalError(TheDef->getLoc(), "SubRegIndex " + Idx->getName() + 230 " appears twice in Register " + 231 getName()); 232 // Map explicit sub-registers first, so the names take precedence. 233 // The inherited sub-registers are mapped below. 234 SubReg2Idx.try_emplace(SR, Idx); 235 } 236 237 // Keep track of inherited subregs and how they can be reached. 238 SmallPtrSet<CodeGenRegister *, 8> Orphans; 239 240 // Clone inherited subregs and place duplicate entries in Orphans. 241 // Here the order is important - earlier subregs take precedence. 242 for (CodeGenRegister *ESR : ExplicitSubRegs) { 243 const SubRegMap &Map = ESR->computeSubRegs(RegBank); 244 HasDisjunctSubRegs |= ESR->HasDisjunctSubRegs; 245 246 for (const auto &SR : Map) { 247 if (!SubRegs.insert(SR).second) 248 Orphans.insert(SR.second); 249 } 250 } 251 252 // Expand any composed subreg indices. 253 // If dsub_2 has ComposedOf = [qsub_1, dsub_0], and this register has a 254 // qsub_1 subreg, add a dsub_2 subreg. Keep growing Indices and process 255 // expanded subreg indices recursively. 256 SmallVector<CodeGenSubRegIndex *, 8> Indices = ExplicitSubRegIndices; 257 for (unsigned i = 0; i != Indices.size(); ++i) { 258 CodeGenSubRegIndex *Idx = Indices[i]; 259 const CodeGenSubRegIndex::CompMap &Comps = Idx->getComposites(); 260 CodeGenRegister *SR = SubRegs[Idx]; 261 const SubRegMap &Map = SR->computeSubRegs(RegBank); 262 263 // Look at the possible compositions of Idx. 264 // They may not all be supported by SR. 265 for (auto [Key, Val] : Comps) { 266 SubRegMap::const_iterator SRI = Map.find(Key); 267 if (SRI == Map.end()) 268 continue; // Idx + I->first doesn't exist in SR. 269 // Add `Val` as a name for the subreg SRI->second, assuming it is 270 // orphaned, and the name isn't already used for something else. 271 if (SubRegs.count(Val) || !Orphans.erase(SRI->second)) 272 continue; 273 // We found a new name for the orphaned sub-register. 274 SubRegs.try_emplace(Val, SRI->second); 275 Indices.push_back(Val); 276 } 277 } 278 279 // Now Orphans contains the inherited subregisters without a direct index. 280 // Create inferred indexes for all missing entries. 281 // Work backwards in the Indices vector in order to compose subregs bottom-up. 282 // Consider this subreg sequence: 283 // 284 // qsub_1 -> dsub_0 -> ssub_0 285 // 286 // The qsub_1 -> dsub_0 composition becomes dsub_2, so the ssub_0 register 287 // can be reached in two different ways: 288 // 289 // qsub_1 -> ssub_0 290 // dsub_2 -> ssub_0 291 // 292 // We pick the latter composition because another register may have [dsub_0, 293 // dsub_1, dsub_2] subregs without necessarily having a qsub_1 subreg. The 294 // dsub_2 -> ssub_0 composition can be shared. 295 while (!Indices.empty() && !Orphans.empty()) { 296 CodeGenSubRegIndex *Idx = Indices.pop_back_val(); 297 CodeGenRegister *SR = SubRegs[Idx]; 298 const SubRegMap &Map = SR->computeSubRegs(RegBank); 299 for (const auto &[SRI, SubReg] : Map) 300 if (Orphans.erase(SubReg)) 301 SubRegs[RegBank.getCompositeSubRegIndex(Idx, SRI)] = SubReg; 302 } 303 304 // Compute the inverse SubReg -> Idx map. 305 for (auto &[SRI, SubReg] : SubRegs) { 306 if (SubReg == this) { 307 ArrayRef<SMLoc> Loc; 308 if (TheDef) 309 Loc = TheDef->getLoc(); 310 PrintFatalError(Loc, "Register " + getName() + 311 " has itself as a sub-register"); 312 } 313 314 // Compute AllSuperRegsCovered. 315 if (!CoveredBySubRegs) 316 SRI->AllSuperRegsCovered = false; 317 318 // Ensure that every sub-register has a unique name. 319 DenseMap<const CodeGenRegister *, CodeGenSubRegIndex *>::iterator Ins = 320 SubReg2Idx.try_emplace(SubReg, SRI).first; 321 if (Ins->second == SRI) 322 continue; 323 // Trouble: Two different names for SubReg.second. 324 ArrayRef<SMLoc> Loc; 325 if (TheDef) 326 Loc = TheDef->getLoc(); 327 PrintFatalError(Loc, "Sub-register can't have two names: " + 328 SubReg->getName() + " available as " + 329 SRI->getName() + " and " + Ins->second->getName()); 330 } 331 332 // Derive possible names for sub-register concatenations from any explicit 333 // sub-registers. By doing this before computeSecondarySubRegs(), we ensure 334 // that getConcatSubRegIndex() won't invent any concatenated indices that the 335 // user already specified. 336 for (auto [Idx, SR] : enumerate(ExplicitSubRegs)) { 337 if (!SR->CoveredBySubRegs || SR->Artificial) 338 continue; 339 340 // SR is composed of multiple sub-regs. Find their names in this register. 341 bool AnyArtificial = false; 342 SmallVector<CodeGenSubRegIndex *, 8> Parts; 343 for (unsigned j = 0, e = SR->ExplicitSubRegs.size(); j != e; ++j) { 344 CodeGenSubRegIndex &I = *SR->ExplicitSubRegIndices[j]; 345 if (I.Artificial) { 346 AnyArtificial = true; 347 break; 348 } 349 Parts.push_back(getSubRegIndex(SR->ExplicitSubRegs[j])); 350 } 351 352 if (AnyArtificial) 353 continue; 354 355 // Offer this as an existing spelling for the concatenation of Parts. 356 ExplicitSubRegIndices[Idx]->setConcatenationOf(Parts); 357 } 358 359 // Initialize RegUnitList. Because getSubRegs is called recursively, this 360 // processes the register hierarchy in postorder. 361 if (ExplicitSubRegs.empty()) { 362 // Create one register unit per leaf register. These units correspond to the 363 // maximal cliques in the register overlap graph which is optimal. 364 RegUnits.set(RegBank.newRegUnit(this)); 365 } else { 366 // Inherit all sub-register units. It is good enough to look at the explicit 367 // sub-registers, the other registers won't contribute any more units. 368 for (const CodeGenRegister *SR : ExplicitSubRegs) 369 RegUnits |= SR->RegUnits; 370 } 371 372 // When there is ad hoc aliasing, we simply create one unit per edge in the 373 // undirected ad hoc aliasing graph. Technically, we could do better by 374 // identifying maximal cliques in the ad hoc graph, but cliques larger than 2 375 // are extremely rare anyway (I've never seen one), so we don't bother with 376 // the added complexity. 377 for (CodeGenRegister *AR : ExplicitAliases) { 378 // Only visit each edge once. 379 if (AR->SubRegsComplete) 380 continue; 381 // Create a RegUnit representing this alias edge, and add it to both 382 // registers. 383 unsigned Unit = RegBank.newRegUnit(this, AR); 384 RegUnits.set(Unit); 385 AR->RegUnits.set(Unit); 386 } 387 388 // We have now computed the native register units. More may be adopted later 389 // for balancing purposes. 390 NativeRegUnits = RegUnits; 391 392 return SubRegs; 393 } 394 395 // In a register that is covered by its sub-registers, try to find redundant 396 // sub-registers. For example: 397 // 398 // QQ0 = {Q0, Q1} 399 // Q0 = {D0, D1} 400 // Q1 = {D2, D3} 401 // 402 // We can infer that D1_D2 is also a sub-register, even if it wasn't named in 403 // the register definition. 404 // 405 // The explicitly specified registers form a tree. This function discovers 406 // sub-register relationships that would force a DAG. 407 // 408 void CodeGenRegister::computeSecondarySubRegs(CodeGenRegBank &RegBank) { 409 SmallVector<SubRegMap::value_type, 8> NewSubRegs; 410 411 std::queue<std::pair<CodeGenSubRegIndex *, CodeGenRegister *>> SubRegQueue; 412 for (auto [SRI, SubReg] : SubRegs) 413 SubRegQueue.emplace(SRI, SubReg); 414 415 // Look at the leading super-registers of each sub-register. Those are the 416 // candidates for new sub-registers, assuming they are fully contained in 417 // this register. 418 while (!SubRegQueue.empty()) { 419 auto [SubRegIdx, SubReg] = SubRegQueue.front(); 420 SubRegQueue.pop(); 421 422 const CodeGenRegister::SuperRegList &Leads = SubReg->LeadingSuperRegs; 423 for (const CodeGenRegister *Cand : Leads) { 424 // Already got this sub-register? 425 if (Cand == this || getSubRegIndex(Cand)) 426 continue; 427 // Check if each component of Cand is already a sub-register. 428 assert(!Cand->ExplicitSubRegs.empty() && 429 "Super-register has no sub-registers"); 430 if (Cand->ExplicitSubRegs.size() == 1) 431 continue; 432 SmallVector<CodeGenSubRegIndex *, 8> Parts; 433 // We know that the first component is (SubRegIdx,SubReg). However we 434 // may still need to split it into smaller subregister parts. 435 assert(Cand->ExplicitSubRegs[0] == SubReg && "LeadingSuperRegs correct"); 436 assert(getSubRegIndex(SubReg) == SubRegIdx && "LeadingSuperRegs correct"); 437 for (CodeGenRegister *SubReg : Cand->ExplicitSubRegs) { 438 if (CodeGenSubRegIndex *SubRegIdx = getSubRegIndex(SubReg)) { 439 if (SubRegIdx->ConcatenationOf.empty()) 440 Parts.push_back(SubRegIdx); 441 else 442 append_range(Parts, SubRegIdx->ConcatenationOf); 443 } else { 444 // Sub-register doesn't exist. 445 Parts.clear(); 446 break; 447 } 448 } 449 // There is nothing to do if some Cand sub-register is not part of this 450 // register. 451 if (Parts.empty()) 452 continue; 453 454 // Each part of Cand is a sub-register of this. Make the full Cand also 455 // a sub-register with a concatenated sub-register index. 456 CodeGenSubRegIndex *Concat = 457 RegBank.getConcatSubRegIndex(Parts, RegBank.getHwModes()); 458 std::pair<CodeGenSubRegIndex *, CodeGenRegister *> NewSubReg = { 459 Concat, const_cast<CodeGenRegister *>(Cand)}; 460 461 if (!SubRegs.insert(NewSubReg).second) 462 continue; 463 464 // We inserted a new subregister. 465 NewSubRegs.push_back(NewSubReg); 466 SubRegQueue.push(NewSubReg); 467 SubReg2Idx.try_emplace(Cand, Concat); 468 } 469 } 470 471 // Create sub-register index composition maps for the synthesized indices. 472 for (auto [NewIdx, NewSubReg] : NewSubRegs) { 473 for (auto [SRI, SubReg] : NewSubReg->SubRegs) { 474 CodeGenSubRegIndex *SubIdx = getSubRegIndex(SubReg); 475 if (!SubIdx) 476 PrintFatalError(TheDef->getLoc(), "No SubRegIndex for " + 477 SubReg->getName() + " in " + 478 getName()); 479 NewIdx->addComposite(SRI, SubIdx, RegBank.getHwModes()); 480 } 481 } 482 } 483 484 void CodeGenRegister::computeSuperRegs(CodeGenRegBank &RegBank) { 485 // Only visit each register once. 486 if (SuperRegsComplete) 487 return; 488 SuperRegsComplete = true; 489 490 // Make sure all sub-registers have been visited first, so the super-reg 491 // lists will be topologically ordered. 492 for (auto SubReg : SubRegs) 493 SubReg.second->computeSuperRegs(RegBank); 494 495 // Now add this as a super-register on all sub-registers. 496 // Also compute the TopoSigId in post-order. 497 TopoSigId Id; 498 for (auto SubReg : SubRegs) { 499 // Topological signature computed from SubIdx, TopoId(SubReg). 500 // Loops and idempotent indices have TopoSig = ~0u. 501 Id.push_back(SubReg.first->EnumValue); 502 Id.push_back(SubReg.second->TopoSig); 503 504 // Don't add duplicate entries. 505 if (!SubReg.second->SuperRegs.empty() && 506 SubReg.second->SuperRegs.back() == this) 507 continue; 508 SubReg.second->SuperRegs.push_back(this); 509 } 510 TopoSig = RegBank.getTopoSig(Id); 511 } 512 513 void CodeGenRegister::addSubRegsPreOrder( 514 SetVector<const CodeGenRegister *> &OSet, CodeGenRegBank &RegBank) const { 515 assert(SubRegsComplete && "Must precompute sub-registers"); 516 for (CodeGenRegister *SR : ExplicitSubRegs) { 517 if (OSet.insert(SR)) 518 SR->addSubRegsPreOrder(OSet, RegBank); 519 } 520 // Add any secondary sub-registers that weren't part of the explicit tree. 521 OSet.insert_range(llvm::make_second_range(SubRegs)); 522 } 523 524 // Get the sum of this register's unit weights. 525 unsigned CodeGenRegister::getWeight(const CodeGenRegBank &RegBank) const { 526 unsigned Weight = 0; 527 for (unsigned RegUnit : RegUnits) 528 Weight += RegBank.getRegUnit(RegUnit).Weight; 529 return Weight; 530 } 531 532 //===----------------------------------------------------------------------===// 533 // RegisterTuples 534 //===----------------------------------------------------------------------===// 535 536 // A RegisterTuples def is used to generate pseudo-registers from lists of 537 // sub-registers. We provide a SetTheory expander class that returns the new 538 // registers. 539 namespace { 540 541 struct TupleExpander : SetTheory::Expander { 542 // Reference to SynthDefs in the containing CodeGenRegBank, to keep track of 543 // the synthesized definitions for their lifetime. 544 std::vector<std::unique_ptr<Record>> &SynthDefs; 545 546 // Track all synthesized tuple names in order to detect duplicate definitions. 547 llvm::StringSet<> TupleNames; 548 549 TupleExpander(std::vector<std::unique_ptr<Record>> &SynthDefs) 550 : SynthDefs(SynthDefs) {} 551 552 void expand(SetTheory &ST, const Record *Def, 553 SetTheory::RecSet &Elts) override { 554 std::vector<const Record *> Indices = 555 Def->getValueAsListOfDefs("SubRegIndices"); 556 unsigned Dim = Indices.size(); 557 const ListInit *SubRegs = Def->getValueAsListInit("SubRegs"); 558 if (Dim != SubRegs->size()) 559 PrintFatalError(Def->getLoc(), "SubRegIndices and SubRegs size mismatch"); 560 if (Dim < 2) 561 PrintFatalError(Def->getLoc(), 562 "Tuples must have at least 2 sub-registers"); 563 564 // Evaluate the sub-register lists to be zipped. 565 unsigned Length = ~0u; 566 SmallVector<SetTheory::RecSet, 4> Lists(Dim); 567 for (unsigned i = 0; i != Dim; ++i) { 568 ST.evaluate(SubRegs->getElement(i), Lists[i], Def->getLoc()); 569 Length = std::min(Length, unsigned(Lists[i].size())); 570 } 571 572 if (Length == 0) 573 return; 574 575 // Precompute some types. 576 const Record *RegisterCl = Def->getRecords().getClass("Register"); 577 const RecTy *RegisterRecTy = RecordRecTy::get(RegisterCl); 578 std::vector<StringRef> RegNames = 579 Def->getValueAsListOfStrings("RegAsmNames"); 580 581 // Zip them up. 582 RecordKeeper &RK = Def->getRecords(); 583 for (unsigned n = 0; n != Length; ++n) { 584 std::string Name; 585 const Record *Proto = Lists[0][n]; 586 std::vector<Init *> Tuple; 587 for (unsigned i = 0; i != Dim; ++i) { 588 const Record *Reg = Lists[i][n]; 589 if (i) 590 Name += '_'; 591 Name += Reg->getName(); 592 Tuple.push_back(Reg->getDefInit()); 593 } 594 595 // Take the cost list of the first register in the tuple. 596 const ListInit *CostList = Proto->getValueAsListInit("CostPerUse"); 597 SmallVector<const Init *, 2> CostPerUse(CostList->getElements()); 598 599 const StringInit *AsmName = StringInit::get(RK, ""); 600 if (!RegNames.empty()) { 601 if (RegNames.size() <= n) 602 PrintFatalError(Def->getLoc(), 603 "Register tuple definition missing name for '" + 604 Name + "'."); 605 AsmName = StringInit::get(RK, RegNames[n]); 606 } 607 608 // Create a new Record representing the synthesized register. This record 609 // is only for consumption by CodeGenRegister, it is not added to the 610 // RecordKeeper. 611 SynthDefs.emplace_back( 612 std::make_unique<Record>(Name, Def->getLoc(), Def->getRecords())); 613 Record *NewReg = SynthDefs.back().get(); 614 Elts.insert(NewReg); 615 616 // Detect duplicates among synthesized registers. 617 const auto Res = TupleNames.insert(NewReg->getName()); 618 if (!Res.second) 619 PrintFatalError(Def->getLoc(), 620 "Register tuple redefines register '" + Name + "'."); 621 622 // Copy Proto super-classes. 623 for (const auto &[Super, Loc] : Proto->getDirectSuperClasses()) 624 NewReg->addDirectSuperClass(Super, Loc); 625 626 // Copy Proto fields. 627 for (RecordVal RV : Proto->getValues()) { 628 // Skip existing fields, like NAME. 629 if (NewReg->getValue(RV.getNameInit())) 630 continue; 631 632 StringRef Field = RV.getName(); 633 634 // Replace the sub-register list with Tuple. 635 if (Field == "SubRegs") 636 RV.setValue(ListInit::get(Tuple, RegisterRecTy)); 637 638 if (Field == "AsmName") 639 RV.setValue(AsmName); 640 641 // CostPerUse is aggregated from all Tuple members. 642 if (Field == "CostPerUse") 643 RV.setValue(ListInit::get(CostPerUse, CostList->getElementType())); 644 645 // Composite registers are always covered by sub-registers. 646 if (Field == "CoveredBySubRegs") 647 RV.setValue(BitInit::get(RK, true)); 648 649 // Copy fields from the RegisterTuples def. 650 if (Field == "SubRegIndices") { 651 NewReg->addValue(*Def->getValue(Field)); 652 continue; 653 } 654 655 // Some fields get their default uninitialized value. 656 if (Field == "DwarfNumbers" || Field == "DwarfAlias" || 657 Field == "Aliases") { 658 if (const RecordVal *DefRV = RegisterCl->getValue(Field)) 659 NewReg->addValue(*DefRV); 660 continue; 661 } 662 663 // Everything else is copied from Proto. 664 NewReg->addValue(RV); 665 } 666 } 667 } 668 }; 669 670 } // end anonymous namespace 671 672 //===----------------------------------------------------------------------===// 673 // CodeGenRegisterClass 674 //===----------------------------------------------------------------------===// 675 676 static void sortAndUniqueRegisters(CodeGenRegister::Vec &M) { 677 llvm::sort(M, deref<std::less<>>()); 678 M.erase(llvm::unique(M, deref<std::equal_to<>>()), M.end()); 679 } 680 681 CodeGenRegisterClass::CodeGenRegisterClass(CodeGenRegBank &RegBank, 682 const Record *R) 683 : TheDef(R), Name(R->getName().str()), 684 RegsWithSuperRegsTopoSigs(RegBank.getNumTopoSigs()), EnumValue(-1), 685 TSFlags(0) { 686 GeneratePressureSet = R->getValueAsBit("GeneratePressureSet"); 687 std::vector<const Record *> TypeList = R->getValueAsListOfDefs("RegTypes"); 688 if (TypeList.empty()) 689 PrintFatalError(R->getLoc(), "RegTypes list must not be empty!"); 690 for (const Record *Type : TypeList) { 691 if (!Type->isSubClassOf("ValueType")) 692 PrintFatalError(R->getLoc(), 693 "RegTypes list member '" + Type->getName() + 694 "' does not derive from the ValueType class!"); 695 VTs.push_back(getValueTypeByHwMode(Type, RegBank.getHwModes())); 696 } 697 698 // Allocation order 0 is the full set. AltOrders provides others. 699 const SetTheory::RecVec *Elements = RegBank.getSets().expand(R); 700 const ListInit *AltOrders = R->getValueAsListInit("AltOrders"); 701 Orders.resize(1 + AltOrders->size()); 702 703 // Default allocation order always contains all registers. 704 Artificial = true; 705 for (const Record *Element : *Elements) { 706 Orders[0].push_back(Element); 707 const CodeGenRegister *Reg = RegBank.getReg(Element); 708 Members.push_back(Reg); 709 Artificial &= Reg->Artificial; 710 if (!Reg->getSuperRegs().empty()) 711 RegsWithSuperRegsTopoSigs.set(Reg->getTopoSig()); 712 } 713 sortAndUniqueRegisters(Members); 714 715 // Alternative allocation orders may be subsets. 716 SetTheory::RecSet Order; 717 for (auto [Idx, AltOrderElem] : enumerate(AltOrders->getElements())) { 718 RegBank.getSets().evaluate(AltOrderElem, Order, R->getLoc()); 719 Orders[1 + Idx].append(Order.begin(), Order.end()); 720 // Verify that all altorder members are regclass members. 721 while (!Order.empty()) { 722 CodeGenRegister *Reg = RegBank.getReg(Order.back()); 723 Order.pop_back(); 724 if (!contains(Reg)) 725 PrintFatalError(R->getLoc(), " AltOrder register " + Reg->getName() + 726 " is not a class member"); 727 } 728 } 729 730 Namespace = R->getValueAsString("Namespace"); 731 732 if (const RecordVal *RV = R->getValue("RegInfos")) 733 if (const DefInit *DI = dyn_cast_or_null<DefInit>(RV->getValue())) 734 RSI = RegSizeInfoByHwMode(DI->getDef(), RegBank.getHwModes()); 735 unsigned Size = R->getValueAsInt("Size"); 736 assert((RSI.hasDefault() || Size != 0 || VTs[0].isSimple()) && 737 "Impossible to determine register size"); 738 if (!RSI.hasDefault()) { 739 RegSizeInfo RI; 740 RI.RegSize = RI.SpillSize = 741 Size ? Size : VTs[0].getSimple().getSizeInBits(); 742 RI.SpillAlignment = R->getValueAsInt("Alignment"); 743 RSI.insertRegSizeForMode(DefaultMode, RI); 744 } 745 746 CopyCost = R->getValueAsInt("CopyCost"); 747 Allocatable = R->getValueAsBit("isAllocatable"); 748 AltOrderSelect = R->getValueAsString("AltOrderSelect"); 749 int AllocationPriority = R->getValueAsInt("AllocationPriority"); 750 if (!isUInt<5>(AllocationPriority)) 751 PrintFatalError(R->getLoc(), "AllocationPriority out of range [0,31]"); 752 this->AllocationPriority = AllocationPriority; 753 754 GlobalPriority = R->getValueAsBit("GlobalPriority"); 755 756 const BitsInit *TSF = R->getValueAsBitsInit("TSFlags"); 757 for (auto [Idx, Bit] : enumerate(TSF->getBits())) 758 TSFlags |= uint8_t(cast<BitInit>(Bit)->getValue()) << Idx; 759 } 760 761 // Create an inferred register class that was missing from the .td files. 762 // Most properties will be inherited from the closest super-class after the 763 // class structure has been computed. 764 CodeGenRegisterClass::CodeGenRegisterClass(CodeGenRegBank &RegBank, 765 StringRef Name, Key Props) 766 : Members(*Props.Members), TheDef(nullptr), Name(Name.str()), 767 RegsWithSuperRegsTopoSigs(RegBank.getNumTopoSigs()), EnumValue(-1), 768 RSI(Props.RSI), CopyCost(0), Allocatable(true), AllocationPriority(0), 769 GlobalPriority(false), TSFlags(0) { 770 Artificial = true; 771 GeneratePressureSet = false; 772 for (const auto R : Members) { 773 if (!R->getSuperRegs().empty()) 774 RegsWithSuperRegsTopoSigs.set(R->getTopoSig()); 775 Artificial &= R->Artificial; 776 } 777 } 778 779 // Compute inherited properties for a synthesized register class. 780 void CodeGenRegisterClass::inheritProperties(CodeGenRegBank &RegBank) { 781 assert(!getDef() && "Only synthesized classes can inherit properties"); 782 assert(!SuperClasses.empty() && "Synthesized class without super class"); 783 784 // The last super-class is the smallest one in topological order. Check for 785 // allocatable super-classes and inherit from the nearest allocatable one if 786 // any. 787 auto NearestAllocSCRIt = 788 find_if(reverse(SuperClasses), 789 [&](const CodeGenRegisterClass *S) { return S->Allocatable; }); 790 CodeGenRegisterClass &Super = NearestAllocSCRIt == SuperClasses.rend() 791 ? *SuperClasses.back() 792 : **NearestAllocSCRIt; 793 794 // Most properties are copied directly. 795 // Exceptions are members, size, and alignment 796 Namespace = Super.Namespace; 797 VTs = Super.VTs; 798 CopyCost = Super.CopyCost; 799 Allocatable = Super.Allocatable; 800 AltOrderSelect = Super.AltOrderSelect; 801 AllocationPriority = Super.AllocationPriority; 802 GlobalPriority = Super.GlobalPriority; 803 TSFlags = Super.TSFlags; 804 GeneratePressureSet |= Super.GeneratePressureSet; 805 806 // Copy all allocation orders, filter out foreign registers from the larger 807 // super-class. 808 Orders.resize(Super.Orders.size()); 809 for (auto [Idx, Outer] : enumerate(Super.Orders)) 810 for (const Record *Reg : Outer) 811 if (contains(RegBank.getReg(Reg))) 812 Orders[Idx].push_back(Reg); 813 } 814 815 bool CodeGenRegisterClass::hasType(const ValueTypeByHwMode &VT) const { 816 if (llvm::is_contained(VTs, VT)) 817 return true; 818 819 // If VT is not identical to any of this class's types, but is a simple 820 // type, check if any of the types for this class contain it under some 821 // mode. 822 // The motivating example came from RISC-V, where (likely because of being 823 // guarded by "64-bit" predicate), the type of X5 was {*:[i64]}, but the 824 // type in GRC was {*:[i32], m1:[i64]}. 825 if (VT.isSimple()) { 826 MVT T = VT.getSimple(); 827 for (const ValueTypeByHwMode &OurVT : VTs) { 828 if (llvm::is_contained(llvm::make_second_range(OurVT), T)) 829 return true; 830 } 831 } 832 return false; 833 } 834 835 bool CodeGenRegisterClass::contains(const CodeGenRegister *Reg) const { 836 return llvm::binary_search(Members, Reg, deref<std::less<>>()); 837 } 838 839 unsigned CodeGenRegisterClass::getWeight(const CodeGenRegBank &RegBank) const { 840 if (TheDef && !TheDef->isValueUnset("Weight")) 841 return TheDef->getValueAsInt("Weight"); 842 843 if (Members.empty() || Artificial) 844 return 0; 845 846 return (*Members.begin())->getWeight(RegBank); 847 } 848 849 namespace llvm { 850 851 raw_ostream &operator<<(raw_ostream &OS, const CodeGenRegisterClass::Key &K) { 852 OS << "{ " << K.RSI; 853 for (const auto R : *K.Members) 854 OS << ", " << R->getName(); 855 return OS << " }"; 856 } 857 858 } // end namespace llvm 859 860 // This is a simple lexicographical order that can be used to search for sets. 861 // It is not the same as the topological order provided by TopoOrderRC. 862 bool CodeGenRegisterClass::Key::operator<( 863 const CodeGenRegisterClass::Key &B) const { 864 assert(Members && B.Members); 865 return std::tie(*Members, RSI) < std::tie(*B.Members, B.RSI); 866 } 867 868 // Returns true if RC is a strict subclass. 869 // RC is a sub-class of this class if it is a valid replacement for any 870 // instruction operand where a register of this classis required. It must 871 // satisfy these conditions: 872 // 873 // 1. All RC registers are also in this. 874 // 2. The RC spill size must not be smaller than our spill size. 875 // 3. RC spill alignment must be compatible with ours. 876 // 877 static bool testSubClass(const CodeGenRegisterClass *A, 878 const CodeGenRegisterClass *B) { 879 return A->RSI.isSubClassOf(B->RSI) && 880 llvm::includes(A->getMembers(), B->getMembers(), deref<std::less<>>()); 881 } 882 883 /// Sorting predicate for register classes. This provides a topological 884 /// ordering that arranges all register classes before their sub-classes. 885 /// 886 /// Register classes with the same registers, spill size, and alignment form a 887 /// clique. They will be ordered alphabetically. 888 /// 889 static bool TopoOrderRC(const CodeGenRegisterClass &A, 890 const CodeGenRegisterClass &B) { 891 if (&A == &B) 892 return false; 893 894 constexpr size_t SIZET_MAX = std::numeric_limits<size_t>::max(); 895 896 // Sort in the following order: 897 // (a) first by register size in ascending order. 898 // (b) then by set size in descending order. 899 // (c) finally, by name as a tie breaker. 900 // 901 // For set size, note that the classes' allocation order may not have been 902 // computed yet, but the members set is always valid. Also, since we use 903 // std::tie() < operator for ordering, we can achieve the descending set size 904 // ordering by using (SIZET_MAX - set_size) in the std::tie. 905 return std::tuple(A.RSI, SIZET_MAX - A.getMembers().size(), 906 StringRef(A.getName())) < 907 std::tuple(B.RSI, SIZET_MAX - B.getMembers().size(), 908 StringRef(B.getName())); 909 } 910 911 std::string CodeGenRegisterClass::getNamespaceQualification() const { 912 return Namespace.empty() ? "" : (Namespace + "::").str(); 913 } 914 915 std::string CodeGenRegisterClass::getQualifiedName() const { 916 return getNamespaceQualification() + getName(); 917 } 918 919 std::string CodeGenRegisterClass::getIdName() const { 920 return getName() + "RegClassID"; 921 } 922 923 std::string CodeGenRegisterClass::getQualifiedIdName() const { 924 return getNamespaceQualification() + getIdName(); 925 } 926 927 // Compute sub-classes of all register classes. 928 // Assume the classes are ordered topologically. 929 void CodeGenRegisterClass::computeSubClasses(CodeGenRegBank &RegBank) { 930 std::list<CodeGenRegisterClass> &RegClasses = RegBank.getRegClasses(); 931 932 const size_t NumRegClasses = RegClasses.size(); 933 // Visit backwards so sub-classes are seen first. 934 for (auto I = RegClasses.rbegin(), E = RegClasses.rend(); I != E; ++I) { 935 CodeGenRegisterClass &RC = *I; 936 RC.SubClasses.resize(NumRegClasses); 937 RC.SubClasses.set(RC.EnumValue); 938 if (RC.Artificial) 939 continue; 940 941 // Normally, all subclasses have IDs >= rci, unless RC is part of a clique. 942 for (auto I2 = I.base(), E2 = RegClasses.end(); I2 != E2; ++I2) { 943 CodeGenRegisterClass &SubRC = *I2; 944 if (RC.SubClasses.test(SubRC.EnumValue)) 945 continue; 946 if (!testSubClass(&RC, &SubRC)) 947 continue; 948 // SubRC is a sub-class. Grap all its sub-classes so we won't have to 949 // check them again. 950 RC.SubClasses |= SubRC.SubClasses; 951 } 952 953 // Sweep up missed clique members. They will be immediately preceding RC. 954 for (auto I2 = std::next(I); I2 != E && testSubClass(&RC, &*I2); ++I2) 955 RC.SubClasses.set(I2->EnumValue); 956 } 957 958 // Compute the SuperClasses lists from the SubClasses vectors. 959 for (auto &RC : RegClasses) { 960 const BitVector &SC = RC.getSubClasses(); 961 auto I = RegClasses.begin(); 962 for (int s = 0, next_s = SC.find_first(); next_s != -1; 963 next_s = SC.find_next(s)) { 964 std::advance(I, next_s - s); 965 s = next_s; 966 if (&*I == &RC) 967 continue; 968 I->SuperClasses.push_back(&RC); 969 } 970 } 971 972 // With the class hierarchy in place, let synthesized register classes inherit 973 // properties from their closest super-class. The iteration order here can 974 // propagate properties down multiple levels. 975 for (CodeGenRegisterClass &RC : RegClasses) 976 if (!RC.getDef()) 977 RC.inheritProperties(RegBank); 978 } 979 980 std::optional<std::pair<CodeGenRegisterClass *, CodeGenRegisterClass *>> 981 CodeGenRegisterClass::getMatchingSubClassWithSubRegs( 982 CodeGenRegBank &RegBank, const CodeGenSubRegIndex *SubIdx) const { 983 auto WeakSizeOrder = [this](const CodeGenRegisterClass *A, 984 const CodeGenRegisterClass *B) { 985 // If there are multiple, identical register classes, prefer the original 986 // register class. 987 if (A == B) 988 return false; 989 if (A->getMembers().size() == B->getMembers().size()) 990 return A == this; 991 return A->getMembers().size() > B->getMembers().size(); 992 }; 993 994 std::list<CodeGenRegisterClass> &RegClasses = RegBank.getRegClasses(); 995 996 // Find all the subclasses of this one that fully support the sub-register 997 // index and order them by size. BiggestSuperRC should always be first. 998 CodeGenRegisterClass *BiggestSuperRegRC = getSubClassWithSubReg(SubIdx); 999 if (!BiggestSuperRegRC) 1000 return std::nullopt; 1001 BitVector SuperRegRCsBV = BiggestSuperRegRC->getSubClasses(); 1002 std::vector<CodeGenRegisterClass *> SuperRegRCs; 1003 for (auto &RC : RegClasses) 1004 if (SuperRegRCsBV[RC.EnumValue]) 1005 SuperRegRCs.emplace_back(&RC); 1006 llvm::stable_sort(SuperRegRCs, WeakSizeOrder); 1007 1008 assert(SuperRegRCs.front() == BiggestSuperRegRC && 1009 "Biggest class wasn't first"); 1010 1011 // Find all the subreg classes and order them by size too. 1012 std::vector<std::pair<CodeGenRegisterClass *, BitVector>> SuperRegClasses; 1013 for (auto &RC : RegClasses) { 1014 BitVector SuperRegClassesBV(RegClasses.size()); 1015 RC.getSuperRegClasses(SubIdx, SuperRegClassesBV); 1016 if (SuperRegClassesBV.any()) 1017 SuperRegClasses.emplace_back(&RC, SuperRegClassesBV); 1018 } 1019 llvm::stable_sort(SuperRegClasses, 1020 [&](const std::pair<CodeGenRegisterClass *, BitVector> &A, 1021 const std::pair<CodeGenRegisterClass *, BitVector> &B) { 1022 return WeakSizeOrder(A.first, B.first); 1023 }); 1024 1025 // Find the biggest subclass and subreg class such that R:subidx is in the 1026 // subreg class for all R in subclass. 1027 // 1028 // For example: 1029 // All registers in X86's GR64 have a sub_32bit subregister but no class 1030 // exists that contains all the 32-bit subregisters because GR64 contains RIP 1031 // but GR32 does not contain EIP. Instead, we constrain SuperRegRC to 1032 // GR32_with_sub_8bit (which is identical to GR32_with_sub_32bit) and then, 1033 // having excluded RIP, we are able to find a SubRegRC (GR32). 1034 CodeGenRegisterClass *ChosenSuperRegClass = nullptr; 1035 CodeGenRegisterClass *SubRegRC = nullptr; 1036 for (CodeGenRegisterClass *SuperRegRC : SuperRegRCs) { 1037 for (const auto &[SuperRegClass, SuperRegClassBV] : SuperRegClasses) { 1038 if (SuperRegClassBV[SuperRegRC->EnumValue]) { 1039 SubRegRC = SuperRegClass; 1040 ChosenSuperRegClass = SuperRegRC; 1041 1042 // If SubRegRC is bigger than SuperRegRC then there are members of 1043 // SubRegRC that don't have super registers via SubIdx. Keep looking to 1044 // find a better fit and fall back on this one if there isn't one. 1045 // 1046 // This is intended to prevent X86 from making odd choices such as 1047 // picking LOW32_ADDR_ACCESS_RBP instead of GR32 in the example above. 1048 // LOW32_ADDR_ACCESS_RBP is a valid choice but contains registers that 1049 // aren't subregisters of SuperRegRC whereas GR32 has a direct 1:1 1050 // mapping. 1051 if (SuperRegRC->getMembers().size() >= SubRegRC->getMembers().size()) 1052 return std::pair(ChosenSuperRegClass, SubRegRC); 1053 } 1054 } 1055 1056 // If we found a fit but it wasn't quite ideal because SubRegRC had excess 1057 // registers, then we're done. 1058 if (ChosenSuperRegClass) 1059 return std::pair(ChosenSuperRegClass, SubRegRC); 1060 } 1061 1062 return std::nullopt; 1063 } 1064 1065 void CodeGenRegisterClass::getSuperRegClasses(const CodeGenSubRegIndex *SubIdx, 1066 BitVector &Out) const { 1067 auto FindI = SuperRegClasses.find(SubIdx); 1068 if (FindI == SuperRegClasses.end()) 1069 return; 1070 for (CodeGenRegisterClass *RC : FindI->second) 1071 Out.set(RC->EnumValue); 1072 } 1073 1074 // Populate a unique sorted list of units from a register set. 1075 void CodeGenRegisterClass::buildRegUnitSet( 1076 const CodeGenRegBank &RegBank, std::vector<unsigned> &RegUnits) const { 1077 std::vector<unsigned> TmpUnits; 1078 for (const CodeGenRegister *Reg : Members) { 1079 for (unsigned UnitI : Reg->getRegUnits()) { 1080 const RegUnit &RU = RegBank.getRegUnit(UnitI); 1081 if (!RU.Artificial) 1082 TmpUnits.push_back(UnitI); 1083 } 1084 } 1085 llvm::sort(TmpUnits); 1086 std::unique_copy(TmpUnits.begin(), TmpUnits.end(), 1087 std::back_inserter(RegUnits)); 1088 } 1089 1090 // Combine our super classes of the given sub-register index with all of their 1091 // super classes in turn. 1092 void CodeGenRegisterClass::extendSuperRegClasses(CodeGenSubRegIndex *SubIdx) { 1093 auto It = SuperRegClasses.find(SubIdx); 1094 if (It == SuperRegClasses.end()) 1095 return; 1096 1097 SmallVector<CodeGenRegisterClass *> MidRCs; 1098 llvm::append_range(MidRCs, It->second); 1099 1100 for (CodeGenRegisterClass *MidRC : MidRCs) { 1101 for (auto &Pair : MidRC->SuperRegClasses) { 1102 CodeGenSubRegIndex *ComposedSubIdx = Pair.first->compose(SubIdx); 1103 if (!ComposedSubIdx) 1104 continue; 1105 1106 for (CodeGenRegisterClass *SuperRC : Pair.second) 1107 addSuperRegClass(ComposedSubIdx, SuperRC); 1108 } 1109 } 1110 } 1111 1112 //===----------------------------------------------------------------------===// 1113 // CodeGenRegisterCategory 1114 //===----------------------------------------------------------------------===// 1115 1116 CodeGenRegisterCategory::CodeGenRegisterCategory(CodeGenRegBank &RegBank, 1117 const Record *R) 1118 : TheDef(R), Name(R->getName().str()) { 1119 for (const Record *RegClass : R->getValueAsListOfDefs("Classes")) 1120 Classes.push_back(RegBank.getRegClass(RegClass)); 1121 } 1122 1123 //===----------------------------------------------------------------------===// 1124 // CodeGenRegBank 1125 //===----------------------------------------------------------------------===// 1126 1127 CodeGenRegBank::CodeGenRegBank(const RecordKeeper &Records, 1128 const CodeGenHwModes &Modes) 1129 : CGH(Modes) { 1130 // Configure register Sets to understand register classes and tuples. 1131 Sets.addFieldExpander("RegisterClass", "MemberList"); 1132 Sets.addFieldExpander("CalleeSavedRegs", "SaveList"); 1133 Sets.addExpander("RegisterTuples", 1134 std::make_unique<TupleExpander>(SynthDefs)); 1135 1136 // Read in the user-defined (named) sub-register indices. 1137 // More indices will be synthesized later. 1138 for (const Record *SRI : Records.getAllDerivedDefinitions("SubRegIndex")) 1139 getSubRegIdx(SRI); 1140 // Build composite maps from ComposedOf fields. 1141 for (auto &Idx : SubRegIndices) 1142 Idx.updateComponents(*this); 1143 1144 // Read in the register and register tuple definitions. 1145 const RecordKeeper &RC = Records; 1146 std::vector<const Record *> Regs = RC.getAllDerivedDefinitions("Register"); 1147 if (!Regs.empty() && Regs[0]->isSubClassOf("X86Reg")) { 1148 // For X86, we need to sort Registers and RegisterTuples together to list 1149 // new registers and register tuples at a later position. So that we can 1150 // reduce unnecessary iterations on unsupported registers in LiveVariables. 1151 // TODO: Remove this logic when migrate from LiveVariables to LiveIntervals 1152 // completely. 1153 for (const Record *R : Records.getAllDerivedDefinitions("RegisterTuples")) { 1154 // Expand tuples and merge the vectors 1155 std::vector<const Record *> TupRegs = *Sets.expand(R); 1156 llvm::append_range(Regs, TupRegs); 1157 } 1158 1159 llvm::sort(Regs, LessRecordRegister()); 1160 // Assign the enumeration values. 1161 for (const Record *Reg : Regs) 1162 getReg(Reg); 1163 } else { 1164 llvm::sort(Regs, LessRecordRegister()); 1165 // Assign the enumeration values. 1166 for (const Record *Reg : Regs) 1167 getReg(Reg); 1168 1169 // Expand tuples and number the new registers. 1170 for (const Record *R : Records.getAllDerivedDefinitions("RegisterTuples")) { 1171 std::vector<const Record *> TupRegs = *Sets.expand(R); 1172 llvm::sort(TupRegs, LessRecordRegister()); 1173 for (const Record *RC : TupRegs) 1174 getReg(RC); 1175 } 1176 } 1177 1178 // Now all the registers are known. Build the object graph of explicit 1179 // register-register references. 1180 for (CodeGenRegister &Reg : Registers) 1181 Reg.buildObjectGraph(*this); 1182 1183 // Compute register name map. 1184 for (CodeGenRegister &Reg : Registers) 1185 // FIXME: This could just be RegistersByName[name] = register, except that 1186 // causes some failures in MIPS - perhaps they have duplicate register name 1187 // entries? (or maybe there's a reason for it - I don't know much about this 1188 // code, just drive-by refactoring) 1189 RegistersByName.try_emplace(Reg.TheDef->getValueAsString("AsmName"), &Reg); 1190 1191 // Precompute all sub-register maps. 1192 // This will create Composite entries for all inferred sub-register indices. 1193 for (CodeGenRegister &Reg : Registers) 1194 Reg.computeSubRegs(*this); 1195 1196 // Compute transitive closure of subregister index ConcatenationOf vectors 1197 // and initialize ConcatIdx map. 1198 for (CodeGenSubRegIndex &SRI : SubRegIndices) { 1199 SRI.computeConcatTransitiveClosure(); 1200 if (!SRI.ConcatenationOf.empty()) 1201 ConcatIdx.try_emplace( 1202 SmallVector<CodeGenSubRegIndex *, 8>(SRI.ConcatenationOf.begin(), 1203 SRI.ConcatenationOf.end()), 1204 &SRI); 1205 } 1206 1207 // Infer even more sub-registers by combining leading super-registers. 1208 for (CodeGenRegister &Reg : Registers) 1209 if (Reg.CoveredBySubRegs) 1210 Reg.computeSecondarySubRegs(*this); 1211 1212 // After the sub-register graph is complete, compute the topologically 1213 // ordered SuperRegs list. 1214 for (CodeGenRegister &Reg : Registers) 1215 Reg.computeSuperRegs(*this); 1216 1217 // For each pair of Reg:SR, if both are non-artificial, mark the 1218 // corresponding sub-register index as non-artificial. 1219 for (CodeGenRegister &Reg : Registers) { 1220 if (Reg.Artificial) 1221 continue; 1222 for (auto [SRI, SR] : Reg.getSubRegs()) { 1223 if (!SR->Artificial) 1224 SRI->Artificial = false; 1225 } 1226 } 1227 1228 computeSubRegIndicesRPOT(); 1229 1230 // Native register units are associated with a leaf register. They've all been 1231 // discovered now. 1232 NumNativeRegUnits = RegUnits.size(); 1233 1234 // Read in register class definitions. 1235 ArrayRef<const Record *> RCs = 1236 Records.getAllDerivedDefinitions("RegisterClass"); 1237 if (RCs.empty()) 1238 PrintFatalError("No 'RegisterClass' subclasses defined!"); 1239 1240 // Allocate user-defined register classes. 1241 for (const Record *R : RCs) { 1242 RegClasses.emplace_back(*this, R); 1243 CodeGenRegisterClass &RC = RegClasses.back(); 1244 if (!RC.Artificial) 1245 addToMaps(&RC); 1246 } 1247 1248 // Infer missing classes to create a full algebra. 1249 computeInferredRegisterClasses(); 1250 1251 // Order register classes topologically and assign enum values. 1252 RegClasses.sort(TopoOrderRC); 1253 for (auto [Idx, RC] : enumerate(RegClasses)) 1254 RC.EnumValue = Idx; 1255 CodeGenRegisterClass::computeSubClasses(*this); 1256 1257 // Read in the register category definitions. 1258 for (const Record *R : Records.getAllDerivedDefinitions("RegisterCategory")) 1259 RegCategories.emplace_back(*this, R); 1260 } 1261 1262 // Create a synthetic CodeGenSubRegIndex without a corresponding Record. 1263 CodeGenSubRegIndex *CodeGenRegBank::createSubRegIndex(StringRef Name, 1264 StringRef Namespace) { 1265 SubRegIndices.emplace_back(Name, Namespace, SubRegIndices.size() + 1); 1266 return &SubRegIndices.back(); 1267 } 1268 1269 CodeGenSubRegIndex *CodeGenRegBank::getSubRegIdx(const Record *Def) { 1270 CodeGenSubRegIndex *&Idx = Def2SubRegIdx[Def]; 1271 if (Idx) 1272 return Idx; 1273 SubRegIndices.emplace_back(Def, SubRegIndices.size() + 1, getHwModes()); 1274 Idx = &SubRegIndices.back(); 1275 return Idx; 1276 } 1277 1278 const CodeGenSubRegIndex * 1279 CodeGenRegBank::findSubRegIdx(const Record *Def) const { 1280 return Def2SubRegIdx.at(Def); 1281 } 1282 1283 CodeGenRegister *CodeGenRegBank::getReg(const Record *Def) { 1284 CodeGenRegister *&Reg = Def2Reg[Def]; 1285 if (Reg) 1286 return Reg; 1287 Registers.emplace_back(Def, Registers.size() + 1); 1288 Reg = &Registers.back(); 1289 return Reg; 1290 } 1291 1292 void CodeGenRegBank::addToMaps(CodeGenRegisterClass *RC) { 1293 if (const Record *Def = RC->getDef()) 1294 Def2RC.try_emplace(Def, RC); 1295 1296 // Duplicate classes are rejected by insert(). 1297 // That's OK, we only care about the properties handled by CGRC::Key. 1298 CodeGenRegisterClass::Key K(*RC); 1299 Key2RC.try_emplace(K, RC); 1300 } 1301 1302 // Create a synthetic sub-class if it is missing. 1303 std::pair<CodeGenRegisterClass *, bool> 1304 CodeGenRegBank::getOrCreateSubClass(const CodeGenRegisterClass *RC, 1305 const CodeGenRegister::Vec *Members, 1306 StringRef Name) { 1307 // Synthetic sub-class has the same size and alignment as RC. 1308 CodeGenRegisterClass::Key K(Members, RC->RSI); 1309 RCKeyMap::const_iterator FoundI = Key2RC.find(K); 1310 if (FoundI != Key2RC.end()) 1311 return {FoundI->second, false}; 1312 1313 // Sub-class doesn't exist, create a new one. 1314 RegClasses.emplace_back(*this, Name, K); 1315 addToMaps(&RegClasses.back()); 1316 return {&RegClasses.back(), true}; 1317 } 1318 1319 CodeGenRegisterClass *CodeGenRegBank::getRegClass(const Record *Def) const { 1320 if (CodeGenRegisterClass *RC = Def2RC.lookup(Def)) 1321 return RC; 1322 1323 PrintFatalError(Def->getLoc(), "Not a known RegisterClass!"); 1324 } 1325 1326 CodeGenSubRegIndex * 1327 CodeGenRegBank::getCompositeSubRegIndex(CodeGenSubRegIndex *A, 1328 CodeGenSubRegIndex *B) { 1329 // Look for an existing entry. 1330 CodeGenSubRegIndex *Comp = A->compose(B); 1331 if (Comp) 1332 return Comp; 1333 1334 // None exists, synthesize one. 1335 std::string Name = A->getName() + "_then_" + B->getName(); 1336 Comp = createSubRegIndex(Name, A->getNamespace()); 1337 A->addComposite(B, Comp, getHwModes()); 1338 return Comp; 1339 } 1340 1341 CodeGenSubRegIndex *CodeGenRegBank::getConcatSubRegIndex( 1342 const SmallVector<CodeGenSubRegIndex *, 8> &Parts, 1343 const CodeGenHwModes &CGH) { 1344 assert(Parts.size() > 1 && "Need two parts to concatenate"); 1345 #ifndef NDEBUG 1346 for (CodeGenSubRegIndex *Idx : Parts) { 1347 assert(Idx->ConcatenationOf.empty() && "No transitive closure?"); 1348 } 1349 #endif 1350 1351 // Look for an existing entry. 1352 CodeGenSubRegIndex *&Idx = ConcatIdx[Parts]; 1353 if (Idx) 1354 return Idx; 1355 1356 // None exists, synthesize one. 1357 std::string Name = Parts.front()->getName(); 1358 const unsigned UnknownSize = (uint16_t)-1; 1359 1360 for (const CodeGenSubRegIndex *Part : ArrayRef(Parts).drop_front()) { 1361 Name += '_'; 1362 Name += Part->getName(); 1363 } 1364 1365 Idx = createSubRegIndex(Name, Parts.front()->getNamespace()); 1366 Idx->ConcatenationOf.assign(Parts.begin(), Parts.end()); 1367 1368 unsigned NumModes = CGH.getNumModeIds(); 1369 for (unsigned M = 0; M < NumModes; ++M) { 1370 const CodeGenSubRegIndex *FirstPart = Parts.front(); 1371 1372 // Determine whether all parts are contiguous. 1373 bool IsContinuous = true; 1374 const SubRegRange &FirstPartRange = FirstPart->Range.get(M); 1375 unsigned Size = FirstPartRange.Size; 1376 unsigned LastOffset = FirstPartRange.Offset; 1377 unsigned LastSize = FirstPartRange.Size; 1378 1379 for (const CodeGenSubRegIndex *Part : ArrayRef(Parts).drop_front()) { 1380 const SubRegRange &PartRange = Part->Range.get(M); 1381 if (Size == UnknownSize || PartRange.Size == UnknownSize) 1382 Size = UnknownSize; 1383 else 1384 Size += PartRange.Size; 1385 if (LastSize == UnknownSize || 1386 PartRange.Offset != (LastOffset + LastSize)) 1387 IsContinuous = false; 1388 LastOffset = PartRange.Offset; 1389 LastSize = PartRange.Size; 1390 } 1391 unsigned Offset = IsContinuous ? FirstPartRange.Offset : -1; 1392 Idx->Range.get(M) = SubRegRange(Size, Offset); 1393 } 1394 1395 return Idx; 1396 } 1397 1398 void CodeGenRegBank::computeComposites() { 1399 using RegMap = std::map<const CodeGenRegister *, const CodeGenRegister *>; 1400 1401 // Subreg -> { Reg->Reg }, where the right-hand side is the mapping from 1402 // register to (sub)register associated with the action of the left-hand 1403 // side subregister. 1404 std::map<const CodeGenSubRegIndex *, RegMap> SubRegAction; 1405 for (const CodeGenRegister &R : Registers) { 1406 const CodeGenRegister::SubRegMap &SM = R.getSubRegs(); 1407 for (auto [SRI, SubReg] : SM) 1408 SubRegAction[SRI].try_emplace(&R, SubReg); 1409 } 1410 1411 // Calculate the composition of two subregisters as compositions of their 1412 // associated actions. 1413 auto compose = [&SubRegAction](const CodeGenSubRegIndex *Sub1, 1414 const CodeGenSubRegIndex *Sub2) { 1415 RegMap C; 1416 const RegMap &Img1 = SubRegAction.at(Sub1); 1417 const RegMap &Img2 = SubRegAction.at(Sub2); 1418 for (auto [R, SubReg] : Img1) { 1419 auto F = Img2.find(SubReg); 1420 if (F != Img2.end()) 1421 C.try_emplace(R, F->second); 1422 } 1423 return C; 1424 }; 1425 1426 // Check if the two maps agree on the intersection of their domains. 1427 auto agree = [](const RegMap &Map1, const RegMap &Map2) { 1428 // Technically speaking, an empty map agrees with any other map, but 1429 // this could flag false positives. We're interested in non-vacuous 1430 // agreements. 1431 if (Map1.empty() || Map2.empty()) 1432 return false; 1433 for (auto [K, V] : Map1) { 1434 auto F = Map2.find(K); 1435 if (F == Map2.end() || V != F->second) 1436 return false; 1437 } 1438 return true; 1439 }; 1440 1441 using CompositePair = 1442 std::pair<const CodeGenSubRegIndex *, const CodeGenSubRegIndex *>; 1443 SmallSet<CompositePair, 4> UserDefined; 1444 for (const CodeGenSubRegIndex &Idx : SubRegIndices) 1445 for (auto P : Idx.getComposites()) 1446 UserDefined.insert({&Idx, P.first}); 1447 1448 // Keep track of TopoSigs visited. We only need to visit each TopoSig once, 1449 // and many registers will share TopoSigs on regular architectures. 1450 BitVector TopoSigs(getNumTopoSigs()); 1451 1452 for (const CodeGenRegister &Reg1 : Registers) { 1453 // Skip identical subreg structures already processed. 1454 if (TopoSigs.test(Reg1.getTopoSig())) 1455 continue; 1456 TopoSigs.set(Reg1.getTopoSig()); 1457 1458 const CodeGenRegister::SubRegMap &SRM1 = Reg1.getSubRegs(); 1459 for (auto [Idx1, Reg2] : SRM1) { 1460 // Ignore identity compositions. 1461 if (&Reg1 == Reg2) 1462 continue; 1463 const CodeGenRegister::SubRegMap &SRM2 = Reg2->getSubRegs(); 1464 // Try composing Idx1 with another SubRegIndex. 1465 for (auto I2 : SRM2) { 1466 CodeGenSubRegIndex *Idx2 = I2.first; 1467 CodeGenRegister *Reg3 = I2.second; 1468 // Ignore identity compositions. 1469 if (Reg2 == Reg3) 1470 continue; 1471 // OK Reg1:IdxPair == Reg3. Find the index with Reg:Idx == Reg3. 1472 CodeGenSubRegIndex *Idx3 = Reg1.getSubRegIndex(Reg3); 1473 assert(Idx3 && "Sub-register doesn't have an index"); 1474 1475 // Conflicting composition? Emit a warning but allow it. 1476 if (CodeGenSubRegIndex *Prev = 1477 Idx1->addComposite(Idx2, Idx3, getHwModes())) { 1478 // If the composition was not user-defined, always emit a warning. 1479 if (!UserDefined.contains({Idx1, Idx2}) || 1480 agree(compose(Idx1, Idx2), SubRegAction.at(Idx3))) 1481 PrintWarning(Twine("SubRegIndex ") + Idx1->getQualifiedName() + 1482 " and " + Idx2->getQualifiedName() + 1483 " compose ambiguously as " + Prev->getQualifiedName() + 1484 " or " + Idx3->getQualifiedName()); 1485 } 1486 } 1487 } 1488 } 1489 } 1490 1491 // Compute lane masks. This is similar to register units, but at the 1492 // sub-register index level. Each bit in the lane mask is like a register unit 1493 // class, and two lane masks will have a bit in common if two sub-register 1494 // indices overlap in some register. 1495 // 1496 // Conservatively share a lane mask bit if two sub-register indices overlap in 1497 // some registers, but not in others. That shouldn't happen a lot. 1498 void CodeGenRegBank::computeSubRegLaneMasks() { 1499 // First assign individual bits to all the leaf indices. 1500 unsigned Bit = 0; 1501 // Determine mask of lanes that cover their registers. 1502 CoveringLanes = LaneBitmask::getAll(); 1503 for (CodeGenSubRegIndex &Idx : SubRegIndices) { 1504 if (Idx.getComposites().empty()) { 1505 if (Bit > LaneBitmask::BitWidth) { 1506 PrintFatalError( 1507 Twine("Ran out of lanemask bits to represent subregister ") + 1508 Idx.getName()); 1509 } 1510 Idx.LaneMask = LaneBitmask::getLane(Bit); 1511 ++Bit; 1512 } else { 1513 Idx.LaneMask = LaneBitmask::getNone(); 1514 } 1515 } 1516 1517 // Compute transformation sequences for composeSubRegIndexLaneMask. The idea 1518 // here is that for each possible target subregister we look at the leafs 1519 // in the subregister graph that compose for this target and create 1520 // transformation sequences for the lanemasks. Each step in the sequence 1521 // consists of a bitmask and a bitrotate operation. As the rotation amounts 1522 // are usually the same for many subregisters we can easily combine the steps 1523 // by combining the masks. 1524 for (const CodeGenSubRegIndex &Idx : SubRegIndices) { 1525 const CodeGenSubRegIndex::CompMap &Composites = Idx.getComposites(); 1526 auto &LaneTransforms = Idx.CompositionLaneMaskTransform; 1527 1528 if (Composites.empty()) { 1529 // Moving from a class with no subregisters we just had a single lane: 1530 // The subregister must be a leaf subregister and only occupies 1 bit. 1531 // Move the bit from the class without subregisters into that position. 1532 unsigned DstBit = Idx.LaneMask.getHighestLane(); 1533 assert(Idx.LaneMask == LaneBitmask::getLane(DstBit) && 1534 "Must be a leaf subregister"); 1535 MaskRolPair MaskRol = {LaneBitmask::getLane(0), (uint8_t)DstBit}; 1536 LaneTransforms.push_back(MaskRol); 1537 } else { 1538 // Go through all leaf subregisters and find the ones that compose with 1539 // Idx. These make out all possible valid bits in the lane mask we want to 1540 // transform. Looking only at the leafs ensure that only a single bit in 1541 // the mask is set. 1542 unsigned NextBit = 0; 1543 for (CodeGenSubRegIndex &Idx2 : SubRegIndices) { 1544 // Skip non-leaf subregisters. 1545 if (!Idx2.getComposites().empty()) 1546 continue; 1547 // Replicate the behaviour from the lane mask generation loop above. 1548 unsigned SrcBit = NextBit; 1549 LaneBitmask SrcMask = LaneBitmask::getLane(SrcBit); 1550 if (NextBit < LaneBitmask::BitWidth - 1) 1551 ++NextBit; 1552 assert(Idx2.LaneMask == SrcMask); 1553 1554 // Get the composed subregister if there is any. 1555 auto C = Composites.find(&Idx2); 1556 if (C == Composites.end()) 1557 continue; 1558 const CodeGenSubRegIndex *Composite = C->second; 1559 // The Composed subreg should be a leaf subreg too 1560 assert(Composite->getComposites().empty()); 1561 1562 // Create Mask+Rotate operation and merge with existing ops if possible. 1563 unsigned DstBit = Composite->LaneMask.getHighestLane(); 1564 int Shift = DstBit - SrcBit; 1565 uint8_t RotateLeft = 1566 Shift >= 0 ? (uint8_t)Shift : LaneBitmask::BitWidth + Shift; 1567 for (MaskRolPair &I : LaneTransforms) { 1568 if (I.RotateLeft == RotateLeft) { 1569 I.Mask |= SrcMask; 1570 SrcMask = LaneBitmask::getNone(); 1571 } 1572 } 1573 if (SrcMask.any()) { 1574 MaskRolPair MaskRol = {SrcMask, RotateLeft}; 1575 LaneTransforms.push_back(MaskRol); 1576 } 1577 } 1578 } 1579 1580 // Optimize if the transformation consists of one step only: Set mask to 1581 // 0xffffffff (including some irrelevant invalid bits) so that it should 1582 // merge with more entries later while compressing the table. 1583 if (LaneTransforms.size() == 1) 1584 LaneTransforms[0].Mask = LaneBitmask::getAll(); 1585 1586 // Further compression optimization: For invalid compositions resulting 1587 // in a sequence with 0 entries we can just pick any other. Choose 1588 // Mask 0xffffffff with Rotation 0. 1589 if (LaneTransforms.size() == 0) { 1590 MaskRolPair P = {LaneBitmask::getAll(), 0}; 1591 LaneTransforms.push_back(P); 1592 } 1593 } 1594 1595 // FIXME: What if ad-hoc aliasing introduces overlaps that aren't represented 1596 // by the sub-register graph? This doesn't occur in any known targets. 1597 1598 // Inherit lanes from composites. 1599 for (const CodeGenSubRegIndex &Idx : SubRegIndices) { 1600 LaneBitmask Mask = Idx.computeLaneMask(); 1601 // If some super-registers without CoveredBySubRegs use this index, we can 1602 // no longer assume that the lanes are covering their registers. 1603 if (!Idx.AllSuperRegsCovered) 1604 CoveringLanes &= ~Mask; 1605 } 1606 1607 // Compute lane mask combinations for register classes. 1608 for (auto &RegClass : RegClasses) { 1609 LaneBitmask LaneMask; 1610 for (const CodeGenSubRegIndex &SubRegIndex : SubRegIndices) { 1611 if (RegClass.getSubClassWithSubReg(&SubRegIndex) == nullptr) 1612 continue; 1613 LaneMask |= SubRegIndex.LaneMask; 1614 } 1615 1616 // For classes without any subregisters set LaneMask to 1 instead of 0. 1617 // This makes it easier for client code to handle classes uniformly. 1618 if (LaneMask.none()) 1619 LaneMask = LaneBitmask::getLane(0); 1620 1621 RegClass.LaneMask = LaneMask; 1622 } 1623 } 1624 1625 namespace { 1626 1627 // A directed graph on sub-register indices with a virtual source node that 1628 // has an arc to all other nodes, and an arc from A to B if sub-register index 1629 // B can be obtained by composing A with some other sub-register index. 1630 struct SubRegIndexCompositionGraph { 1631 std::deque<CodeGenSubRegIndex> &SubRegIndices; 1632 CodeGenSubRegIndex::CompMap EntryNode; 1633 1634 SubRegIndexCompositionGraph(std::deque<CodeGenSubRegIndex> &SubRegIndices) 1635 : SubRegIndices(SubRegIndices) { 1636 for (CodeGenSubRegIndex &Idx : SubRegIndices) { 1637 EntryNode.try_emplace(&Idx, &Idx); 1638 } 1639 } 1640 }; 1641 1642 } // namespace 1643 1644 template <> struct llvm::GraphTraits<SubRegIndexCompositionGraph> { 1645 using NodeRef = 1646 PointerUnion<CodeGenSubRegIndex *, const CodeGenSubRegIndex::CompMap *>; 1647 1648 // Using a reverse iterator causes sub-register indices to appear in their 1649 // more natural order in RPOT. 1650 using CompMapIt = CodeGenSubRegIndex::CompMap::const_reverse_iterator; 1651 struct ChildIteratorType 1652 : public iterator_adaptor_base< 1653 ChildIteratorType, CompMapIt, 1654 typename std::iterator_traits<CompMapIt>::iterator_category, 1655 NodeRef> { 1656 ChildIteratorType(CompMapIt I) 1657 : ChildIteratorType::iterator_adaptor_base(I) {} 1658 1659 NodeRef operator*() const { return wrapped()->second; } 1660 }; 1661 1662 static NodeRef getEntryNode(const SubRegIndexCompositionGraph &G) { 1663 return &G.EntryNode; 1664 } 1665 1666 static const CodeGenSubRegIndex::CompMap *children(NodeRef N) { 1667 if (auto *Idx = dyn_cast<CodeGenSubRegIndex *>(N)) 1668 return &Idx->getComposites(); 1669 return cast<const CodeGenSubRegIndex::CompMap *>(N); 1670 } 1671 1672 static ChildIteratorType child_begin(NodeRef N) { 1673 return ChildIteratorType(children(N)->rbegin()); 1674 } 1675 static ChildIteratorType child_end(NodeRef N) { 1676 return ChildIteratorType(children(N)->rend()); 1677 } 1678 1679 static auto nodes_begin(SubRegIndexCompositionGraph *G) { 1680 return G->SubRegIndices.begin(); 1681 } 1682 static auto nodes_end(SubRegIndexCompositionGraph *G) { 1683 return G->SubRegIndices.end(); 1684 } 1685 1686 static unsigned size(SubRegIndexCompositionGraph *G) { 1687 return G->SubRegIndices.size(); 1688 } 1689 }; 1690 1691 void CodeGenRegBank::computeSubRegIndicesRPOT() { 1692 SubRegIndexCompositionGraph G(SubRegIndices); 1693 ReversePostOrderTraversal<SubRegIndexCompositionGraph> RPOT(G); 1694 for (const auto N : RPOT) { 1695 if (auto *Idx = dyn_cast<CodeGenSubRegIndex *>(N)) 1696 SubRegIndicesRPOT.push_back(Idx); 1697 } 1698 } 1699 1700 namespace { 1701 1702 // UberRegSet is a helper class for computeRegUnitWeights. Each UberRegSet is 1703 // the transitive closure of the union of overlapping register 1704 // classes. Together, the UberRegSets form a partition of the registers. If we 1705 // consider overlapping register classes to be connected, then each UberRegSet 1706 // is a set of connected components. 1707 // 1708 // An UberRegSet will likely be a horizontal slice of register names of 1709 // the same width. Nontrivial subregisters should then be in a separate 1710 // UberRegSet. But this property isn't required for valid computation of 1711 // register unit weights. 1712 // 1713 // A Weight field caches the max per-register unit weight in each UberRegSet. 1714 // 1715 // A set of SingularDeterminants flags single units of some register in this set 1716 // for which the unit weight equals the set weight. These units should not have 1717 // their weight increased. 1718 struct UberRegSet { 1719 CodeGenRegister::Vec Regs; 1720 unsigned Weight = 0; 1721 CodeGenRegister::RegUnitList SingularDeterminants; 1722 1723 UberRegSet() = default; 1724 }; 1725 1726 } // end anonymous namespace 1727 1728 // Partition registers into UberRegSets, where each set is the transitive 1729 // closure of the union of overlapping register classes. 1730 // 1731 // UberRegSets[0] is a special non-allocatable set. 1732 static void computeUberSets(std::vector<UberRegSet> &UberSets, 1733 std::vector<UberRegSet *> &RegSets, 1734 CodeGenRegBank &RegBank) { 1735 const auto &Registers = RegBank.getRegisters(); 1736 1737 // The Register EnumValue is one greater than its index into Registers. 1738 assert(Registers.size() == Registers.back().EnumValue && 1739 "register enum value mismatch"); 1740 1741 // For simplicitly make the SetID the same as EnumValue. 1742 IntEqClasses UberSetIDs(Registers.size() + 1); 1743 BitVector AllocatableRegs(Registers.size() + 1); 1744 for (CodeGenRegisterClass &RegClass : RegBank.getRegClasses()) { 1745 if (!RegClass.Allocatable) 1746 continue; 1747 1748 const CodeGenRegister::Vec &Regs = RegClass.getMembers(); 1749 if (Regs.empty()) 1750 continue; 1751 1752 unsigned USetID = UberSetIDs.findLeader((*Regs.begin())->EnumValue); 1753 assert(USetID && "register number 0 is invalid"); 1754 1755 AllocatableRegs.set((*Regs.begin())->EnumValue); 1756 for (const CodeGenRegister *CGR : llvm::drop_begin(Regs)) { 1757 AllocatableRegs.set(CGR->EnumValue); 1758 UberSetIDs.join(USetID, CGR->EnumValue); 1759 } 1760 } 1761 // Combine non-allocatable regs. 1762 for (const CodeGenRegister &Reg : Registers) { 1763 unsigned RegNum = Reg.EnumValue; 1764 if (AllocatableRegs.test(RegNum)) 1765 continue; 1766 1767 UberSetIDs.join(0, RegNum); 1768 } 1769 UberSetIDs.compress(); 1770 1771 // Make the first UberSet a special unallocatable set. 1772 unsigned ZeroID = UberSetIDs[0]; 1773 1774 // Insert Registers into the UberSets formed by union-find. 1775 // Do not resize after this. 1776 UberSets.resize(UberSetIDs.getNumClasses()); 1777 for (auto [Idx, Reg] : enumerate(Registers)) { 1778 unsigned USetID = UberSetIDs[Reg.EnumValue]; 1779 if (!USetID) 1780 USetID = ZeroID; 1781 else if (USetID == ZeroID) 1782 USetID = 0; 1783 1784 UberRegSet *USet = &UberSets[USetID]; 1785 USet->Regs.push_back(&Reg); 1786 RegSets[Idx] = USet; 1787 } 1788 } 1789 1790 // Recompute each UberSet weight after changing unit weights. 1791 static void computeUberWeights(MutableArrayRef<UberRegSet> UberSets, 1792 CodeGenRegBank &RegBank) { 1793 // Skip the first unallocatable set. 1794 for (UberRegSet &S : UberSets.drop_front()) { 1795 // Initialize all unit weights in this set, and remember the max units/reg. 1796 unsigned MaxWeight = 0; 1797 for (const CodeGenRegister *R : S.Regs) { 1798 unsigned Weight = 0; 1799 for (unsigned U : R->getRegUnits()) { 1800 if (!RegBank.getRegUnit(U).Artificial) { 1801 unsigned UWeight = RegBank.getRegUnit(U).Weight; 1802 if (!UWeight) { 1803 UWeight = 1; 1804 RegBank.increaseRegUnitWeight(U, UWeight); 1805 } 1806 Weight += UWeight; 1807 } 1808 } 1809 MaxWeight = std::max(MaxWeight, Weight); 1810 } 1811 if (S.Weight != MaxWeight) { 1812 LLVM_DEBUG({ 1813 dbgs() << "UberSet " << &S - UberSets.begin() << " Weight " 1814 << MaxWeight; 1815 for (const CodeGenRegister *R : S.Regs) 1816 dbgs() << " " << R->getName(); 1817 dbgs() << '\n'; 1818 }); 1819 // Update the set weight. 1820 S.Weight = MaxWeight; 1821 } 1822 1823 // Find singular determinants. 1824 for (const CodeGenRegister *R : S.Regs) 1825 if (R->getRegUnits().count() == 1 && R->getWeight(RegBank) == S.Weight) 1826 S.SingularDeterminants |= R->getRegUnits(); 1827 } 1828 } 1829 1830 // normalizeWeight is a computeRegUnitWeights helper that adjusts the weight of 1831 // a register and its subregisters so that they have the same weight as their 1832 // UberSet. Self-recursion processes the subregister tree in postorder so 1833 // subregisters are normalized first. 1834 // 1835 // Side effects: 1836 // - creates new adopted register units 1837 // - causes superregisters to inherit adopted units 1838 // - increases the weight of "singular" units 1839 // - induces recomputation of UberWeights. 1840 static bool normalizeWeight(CodeGenRegister *Reg, 1841 std::vector<UberRegSet> &UberSets, 1842 std::vector<UberRegSet *> &RegSets, 1843 BitVector &NormalRegs, 1844 CodeGenRegister::RegUnitList &NormalUnits, 1845 CodeGenRegBank &RegBank) { 1846 NormalRegs.resize(std::max(Reg->EnumValue + 1, NormalRegs.size())); 1847 if (NormalRegs.test(Reg->EnumValue)) 1848 return false; 1849 NormalRegs.set(Reg->EnumValue); 1850 1851 bool Changed = false; 1852 const CodeGenRegister::SubRegMap &SRM = Reg->getSubRegs(); 1853 for (auto SRI : SRM) { 1854 if (SRI.second == Reg) 1855 continue; // self-cycles happen 1856 1857 Changed |= normalizeWeight(SRI.second, UberSets, RegSets, NormalRegs, 1858 NormalUnits, RegBank); 1859 } 1860 // Postorder register normalization. 1861 1862 // Inherit register units newly adopted by subregisters. 1863 if (Reg->inheritRegUnits(RegBank)) 1864 computeUberWeights(UberSets, RegBank); 1865 1866 // Check if this register is too skinny for its UberRegSet. 1867 UberRegSet *UberSet = RegSets[RegBank.getRegIndex(Reg)]; 1868 1869 unsigned RegWeight = Reg->getWeight(RegBank); 1870 if (UberSet->Weight > RegWeight) { 1871 // A register unit's weight can be adjusted only if it is the singular unit 1872 // for this register, has not been used to normalize a subregister's set, 1873 // and has not already been used to singularly determine this UberRegSet. 1874 unsigned AdjustUnit = *Reg->getRegUnits().begin(); 1875 if (Reg->getRegUnits().count() != 1 || NormalUnits.test(AdjustUnit) || 1876 UberSet->SingularDeterminants.test(AdjustUnit)) { 1877 // We don't have an adjustable unit, so adopt a new one. 1878 AdjustUnit = RegBank.newRegUnit(UberSet->Weight - RegWeight); 1879 Reg->adoptRegUnit(AdjustUnit); 1880 // Adopting a unit does not immediately require recomputing set weights. 1881 } else { 1882 // Adjust the existing single unit. 1883 if (!RegBank.getRegUnit(AdjustUnit).Artificial) 1884 RegBank.increaseRegUnitWeight(AdjustUnit, UberSet->Weight - RegWeight); 1885 // The unit may be shared among sets and registers within this set. 1886 computeUberWeights(UberSets, RegBank); 1887 } 1888 Changed = true; 1889 } 1890 1891 // Mark these units normalized so superregisters can't change their weights. 1892 NormalUnits |= Reg->getRegUnits(); 1893 1894 return Changed; 1895 } 1896 1897 // Compute a weight for each register unit created during getSubRegs. 1898 // 1899 // The goal is that two registers in the same class will have the same weight, 1900 // where each register's weight is defined as sum of its units' weights. 1901 void CodeGenRegBank::computeRegUnitWeights() { 1902 std::vector<UberRegSet> UberSets; 1903 std::vector<UberRegSet *> RegSets(Registers.size()); 1904 computeUberSets(UberSets, RegSets, *this); 1905 // UberSets and RegSets are now immutable. 1906 1907 computeUberWeights(UberSets, *this); 1908 1909 // Iterate over each Register, normalizing the unit weights until reaching 1910 // a fix point. 1911 unsigned NumIters = 0; 1912 for (bool Changed = true; Changed; ++NumIters) { 1913 assert(NumIters <= NumNativeRegUnits && "Runaway register unit weights"); 1914 (void)NumIters; 1915 Changed = false; 1916 for (CodeGenRegister &Reg : Registers) { 1917 CodeGenRegister::RegUnitList NormalUnits; 1918 BitVector NormalRegs; 1919 Changed |= normalizeWeight(&Reg, UberSets, RegSets, NormalRegs, 1920 NormalUnits, *this); 1921 } 1922 } 1923 } 1924 1925 // Find a set in UniqueSets with the same elements as Set. 1926 // Return an iterator into UniqueSets. 1927 static std::vector<RegUnitSet>::const_iterator 1928 findRegUnitSet(const std::vector<RegUnitSet> &UniqueSets, 1929 const RegUnitSet &Set) { 1930 return llvm::find_if( 1931 UniqueSets, [&Set](const RegUnitSet &I) { return I.Units == Set.Units; }); 1932 } 1933 1934 // Return true if the RUSubSet is a subset of RUSuperSet. 1935 static bool isRegUnitSubSet(const std::vector<unsigned> &RUSubSet, 1936 const std::vector<unsigned> &RUSuperSet) { 1937 return llvm::includes(RUSuperSet, RUSubSet); 1938 } 1939 1940 /// Iteratively prune unit sets. Prune subsets that are close to the superset, 1941 /// but with one or two registers removed. We occasionally have registers like 1942 /// APSR and PC thrown in with the general registers. We also see many 1943 /// special-purpose register subsets, such as tail-call and Thumb 1944 /// encodings. Generating all possible overlapping sets is combinatorial and 1945 /// overkill for modeling pressure. Ideally we could fix this statically in 1946 /// tablegen by (1) having the target define register classes that only include 1947 /// the allocatable registers and marking other classes as non-allocatable and 1948 /// (2) having a way to mark special purpose classes as "don't-care" classes for 1949 /// the purpose of pressure. However, we make an attempt to handle targets that 1950 /// are not nicely defined by merging nearly identical register unit sets 1951 /// statically. This generates smaller tables. Then, dynamically, we adjust the 1952 /// set limit by filtering the reserved registers. 1953 /// 1954 /// Merge sets only if the units have the same weight. For example, on ARM, 1955 /// Q-tuples with ssub index 0 include all S regs but also include D16+. We 1956 /// should not expand the S set to include D regs. 1957 void CodeGenRegBank::pruneUnitSets() { 1958 assert(RegClassUnitSets.empty() && "this invalidates RegClassUnitSets"); 1959 1960 // Form an equivalence class of UnitSets with no significant difference. 1961 std::vector<unsigned> SuperSetIDs; 1962 unsigned EndIdx = RegUnitSets.size(); 1963 for (auto [SubIdx, SubSet] : enumerate(RegUnitSets)) { 1964 unsigned SuperIdx = 0; 1965 for (; SuperIdx != EndIdx; ++SuperIdx) { 1966 if (SuperIdx == SubIdx) 1967 continue; 1968 1969 unsigned UnitWeight = RegUnits[SubSet.Units[0]].Weight; 1970 const RegUnitSet &SuperSet = RegUnitSets[SuperIdx]; 1971 if (isRegUnitSubSet(SubSet.Units, SuperSet.Units) && 1972 (SubSet.Units.size() + 3 > SuperSet.Units.size()) && 1973 UnitWeight == RegUnits[SuperSet.Units[0]].Weight && 1974 UnitWeight == RegUnits[SuperSet.Units.back()].Weight) { 1975 LLVM_DEBUG({ 1976 dbgs() << "UnitSet " << SubIdx << " subsumed by " << SuperIdx << '\n'; 1977 }); 1978 // We can pick any of the set names for the merged set. Go for the 1979 // shortest one to avoid picking the name of one of the classes that are 1980 // artificially created by tablegen. So "FPR128_lo" instead of 1981 // "QQQQ_with_qsub3_in_FPR128_lo". 1982 if (RegUnitSets[SubIdx].Name.size() < RegUnitSets[SuperIdx].Name.size()) 1983 RegUnitSets[SuperIdx].Name = RegUnitSets[SubIdx].Name; 1984 break; 1985 } 1986 } 1987 if (SuperIdx == EndIdx) 1988 SuperSetIDs.push_back(SubIdx); 1989 } 1990 // Populate PrunedUnitSets with each equivalence class's superset. 1991 std::vector<RegUnitSet> PrunedUnitSets; 1992 PrunedUnitSets.reserve(SuperSetIDs.size()); 1993 for (unsigned SuperIdx : SuperSetIDs) { 1994 PrunedUnitSets.emplace_back(RegUnitSets[SuperIdx].Name); 1995 PrunedUnitSets.back().Units = std::move(RegUnitSets[SuperIdx].Units); 1996 } 1997 RegUnitSets = std::move(PrunedUnitSets); 1998 } 1999 2000 // Create a RegUnitSet for each RegClass that contains all units in the class 2001 // including adopted units that are necessary to model register pressure. Then 2002 // iteratively compute RegUnitSets such that the union of any two overlapping 2003 // RegUnitSets is represented. 2004 // 2005 // RegisterInfoEmitter will map each RegClass to its RegUnitClass and any 2006 // RegUnitSet that is a superset of that RegUnitClass. 2007 void CodeGenRegBank::computeRegUnitSets() { 2008 assert(RegUnitSets.empty() && "dirty RegUnitSets"); 2009 2010 #ifndef NDEBUG 2011 // Helper to print register unit sets. 2012 auto PrintRegUnitSets = [this]() { 2013 for (auto [USIdx, US] : enumerate(RegUnitSets)) { 2014 dbgs() << "UnitSet " << USIdx << " " << US.Name << ":"; 2015 printRegUnitNames(US.Units); 2016 } 2017 }; 2018 #endif // NDEBUG 2019 2020 // Compute a unique RegUnitSet for each RegClass. 2021 auto &RegClasses = getRegClasses(); 2022 for (CodeGenRegisterClass &RC : RegClasses) { 2023 if (!RC.Allocatable || RC.Artificial || !RC.GeneratePressureSet) 2024 continue; 2025 2026 // Compute a sorted list of units in this class. 2027 RegUnitSet RUSet(RC.getName()); 2028 RC.buildRegUnitSet(*this, RUSet.Units); 2029 2030 // Find an existing RegUnitSet. 2031 if (findRegUnitSet(RegUnitSets, RUSet) == RegUnitSets.end()) 2032 RegUnitSets.push_back(std::move(RUSet)); 2033 } 2034 2035 if (RegUnitSets.empty()) 2036 PrintFatalError("RegUnitSets cannot be empty!"); 2037 2038 LLVM_DEBUG({ 2039 dbgs() << "\nBefore pruning:\n"; 2040 PrintRegUnitSets(); 2041 }); 2042 2043 // Iteratively prune unit sets. 2044 pruneUnitSets(); 2045 2046 LLVM_DEBUG({ 2047 dbgs() << "\nBefore union:\n"; 2048 PrintRegUnitSets(); 2049 dbgs() << "\nUnion sets:\n"; 2050 }); 2051 2052 // Iterate over all unit sets, including new ones added by this loop. 2053 // FIXME: Since `EndIdx` is computed just once during loop initialization, 2054 // does this really iterate over new unit sets added by this loop? 2055 unsigned NumRegUnitSubSets = RegUnitSets.size(); 2056 for (unsigned Idx = 0, EndIdx = RegUnitSets.size(); Idx != EndIdx; ++Idx) { 2057 // In theory, this is combinatorial. In practice, it needs to be bounded 2058 // by a small number of sets for regpressure to be efficient. 2059 // If the assert is hit, we need to implement pruning. 2060 assert(Idx < (2 * NumRegUnitSubSets) && "runaway unit set inference"); 2061 2062 // Compare new sets with all original classes. 2063 for (unsigned SearchIdx = (Idx >= NumRegUnitSubSets) ? 0 : Idx + 1; 2064 SearchIdx != EndIdx; ++SearchIdx) { 2065 std::vector<unsigned> Intersection; 2066 std::set_intersection( 2067 RegUnitSets[Idx].Units.begin(), RegUnitSets[Idx].Units.end(), 2068 RegUnitSets[SearchIdx].Units.begin(), 2069 RegUnitSets[SearchIdx].Units.end(), std::back_inserter(Intersection)); 2070 if (Intersection.empty()) 2071 continue; 2072 2073 RegUnitSet RUSet(RegUnitSets[Idx].Name + "_with_" + 2074 RegUnitSets[SearchIdx].Name); 2075 std::set_union(RegUnitSets[Idx].Units.begin(), 2076 RegUnitSets[Idx].Units.end(), 2077 RegUnitSets[SearchIdx].Units.begin(), 2078 RegUnitSets[SearchIdx].Units.end(), 2079 std::inserter(RUSet.Units, RUSet.Units.begin())); 2080 2081 // Find an existing RegUnitSet, or add the union to the unique sets. 2082 if (findRegUnitSet(RegUnitSets, RUSet) == RegUnitSets.end()) { 2083 LLVM_DEBUG({ 2084 dbgs() << "UnitSet " << RegUnitSets.size() << " " << RUSet.Name 2085 << ":"; 2086 printRegUnitNames(RUSet.Units); 2087 }); 2088 RegUnitSets.push_back(std::move(RUSet)); 2089 } 2090 } 2091 } 2092 2093 // Iteratively prune unit sets after inferring supersets. 2094 pruneUnitSets(); 2095 2096 LLVM_DEBUG({ 2097 dbgs() << '\n'; 2098 PrintRegUnitSets(); 2099 }); 2100 2101 // For each register class, list the UnitSets that are supersets. 2102 RegClassUnitSets.resize(RegClasses.size()); 2103 for (CodeGenRegisterClass &RC : RegClasses) { 2104 if (!RC.Allocatable) 2105 continue; 2106 2107 // Recompute the sorted list of units in this class. 2108 std::vector<unsigned> RCRegUnits; 2109 RC.buildRegUnitSet(*this, RCRegUnits); 2110 2111 // Don't increase pressure for unallocatable regclasses. 2112 if (RCRegUnits.empty()) 2113 continue; 2114 2115 LLVM_DEBUG({ 2116 dbgs() << "RC " << RC.getName() << " Units:\n"; 2117 printRegUnitNames(RCRegUnits); 2118 dbgs() << "UnitSetIDs:"; 2119 }); 2120 2121 // Find all supersets. 2122 for (const auto &[USIdx, Set] : enumerate(RegUnitSets)) { 2123 if (isRegUnitSubSet(RCRegUnits, Set.Units)) { 2124 LLVM_DEBUG(dbgs() << " " << USIdx); 2125 RegClassUnitSets[RC.EnumValue].push_back(USIdx); 2126 } 2127 } 2128 LLVM_DEBUG(dbgs() << '\n'); 2129 assert( 2130 (!RegClassUnitSets[RC.EnumValue].empty() || !RC.GeneratePressureSet) && 2131 "missing unit set for regclass"); 2132 } 2133 2134 // For each register unit, ensure that we have the list of UnitSets that 2135 // contain the unit. Normally, this matches an existing list of UnitSets for a 2136 // register class. If not, we create a new entry in RegClassUnitSets as a 2137 // "fake" register class. 2138 for (unsigned UnitIdx = 0, UnitEnd = NumNativeRegUnits; UnitIdx < UnitEnd; 2139 ++UnitIdx) { 2140 std::vector<unsigned> RUSets; 2141 for (auto [Idx, S] : enumerate(RegUnitSets)) 2142 if (is_contained(S.Units, UnitIdx)) 2143 RUSets.push_back(Idx); 2144 2145 unsigned RCUnitSetsIdx = 0; 2146 for (unsigned e = RegClassUnitSets.size(); RCUnitSetsIdx != e; 2147 ++RCUnitSetsIdx) { 2148 if (RegClassUnitSets[RCUnitSetsIdx] == RUSets) { 2149 break; 2150 } 2151 } 2152 RegUnits[UnitIdx].RegClassUnitSetsIdx = RCUnitSetsIdx; 2153 if (RCUnitSetsIdx == RegClassUnitSets.size()) { 2154 // Create a new list of UnitSets as a "fake" register class. 2155 RegClassUnitSets.push_back(std::move(RUSets)); 2156 } 2157 } 2158 } 2159 2160 void CodeGenRegBank::computeRegUnitLaneMasks() { 2161 for (CodeGenRegister &Register : Registers) { 2162 // Create an initial lane mask for all register units. 2163 const auto &RegUnits = Register.getRegUnits(); 2164 CodeGenRegister::RegUnitLaneMaskList RegUnitLaneMasks( 2165 RegUnits.count(), LaneBitmask::getAll()); 2166 // Iterate through SubRegisters. 2167 typedef CodeGenRegister::SubRegMap SubRegMap; 2168 const SubRegMap &SubRegs = Register.getSubRegs(); 2169 for (auto [SubRegIndex, SubReg] : SubRegs) { 2170 // Ignore non-leaf subregisters, their lane masks are fully covered by 2171 // the leaf subregisters anyway. 2172 if (!SubReg->getSubRegs().empty()) 2173 continue; 2174 LaneBitmask LaneMask = SubRegIndex->LaneMask; 2175 // Distribute LaneMask to Register Units touched. 2176 for (unsigned SUI : SubReg->getRegUnits()) { 2177 bool Found = false; 2178 unsigned u = 0; 2179 for (unsigned RU : RegUnits) { 2180 if (SUI == RU) { 2181 RegUnitLaneMasks[u] &= LaneMask; 2182 assert(!Found); 2183 Found = true; 2184 } 2185 ++u; 2186 } 2187 (void)Found; 2188 assert(Found); 2189 } 2190 } 2191 Register.setRegUnitLaneMasks(RegUnitLaneMasks); 2192 } 2193 } 2194 2195 void CodeGenRegBank::computeDerivedInfo() { 2196 computeComposites(); 2197 computeSubRegLaneMasks(); 2198 2199 // Compute a weight for each register unit created during getSubRegs. 2200 // This may create adopted register units (with unit # >= NumNativeRegUnits). 2201 computeRegUnitWeights(); 2202 2203 // Compute a unique set of RegUnitSets. One for each RegClass and inferred 2204 // supersets for the union of overlapping sets. 2205 computeRegUnitSets(); 2206 2207 computeRegUnitLaneMasks(); 2208 2209 // Compute register class HasDisjunctSubRegs/CoveredBySubRegs flag. 2210 for (CodeGenRegisterClass &RC : RegClasses) { 2211 RC.HasDisjunctSubRegs = false; 2212 RC.CoveredBySubRegs = true; 2213 for (const CodeGenRegister *Reg : RC.getMembers()) { 2214 RC.HasDisjunctSubRegs |= Reg->HasDisjunctSubRegs; 2215 RC.CoveredBySubRegs &= Reg->CoveredBySubRegs; 2216 } 2217 } 2218 2219 // Get the weight of each set. 2220 for (auto [Idx, US] : enumerate(RegUnitSets)) 2221 RegUnitSets[Idx].Weight = getRegUnitSetWeight(US.Units); 2222 2223 // Find the order of each set. 2224 RegUnitSetOrder.reserve(RegUnitSets.size()); 2225 for (unsigned Idx : seq<unsigned>(RegUnitSets.size())) 2226 RegUnitSetOrder.push_back(Idx); 2227 2228 llvm::stable_sort(RegUnitSetOrder, [this](unsigned ID1, unsigned ID2) { 2229 return getRegPressureSet(ID1).Units.size() < 2230 getRegPressureSet(ID2).Units.size(); 2231 }); 2232 for (unsigned Idx = 0, EndIdx = RegUnitSets.size(); Idx != EndIdx; ++Idx) 2233 RegUnitSets[RegUnitSetOrder[Idx]].Order = Idx; 2234 } 2235 2236 // 2237 // Synthesize missing register class intersections. 2238 // 2239 // Make sure that sub-classes of RC exists such that getCommonSubClass(RC, X) 2240 // returns a maximal register class for all X. 2241 // 2242 void CodeGenRegBank::inferCommonSubClass(CodeGenRegisterClass *RC) { 2243 assert(!RegClasses.empty()); 2244 // Stash the iterator to the last element so that this loop doesn't visit 2245 // elements added by the getOrCreateSubClass call within it. 2246 for (auto I = RegClasses.begin(), E = std::prev(RegClasses.end()); 2247 I != std::next(E); ++I) { 2248 CodeGenRegisterClass *RC1 = RC; 2249 CodeGenRegisterClass *RC2 = &*I; 2250 if (RC1 == RC2) 2251 continue; 2252 2253 // Compute the set intersection of RC1 and RC2. 2254 const CodeGenRegister::Vec &Memb1 = RC1->getMembers(); 2255 const CodeGenRegister::Vec &Memb2 = RC2->getMembers(); 2256 CodeGenRegister::Vec Intersection; 2257 std::set_intersection(Memb1.begin(), Memb1.end(), Memb2.begin(), 2258 Memb2.end(), 2259 std::inserter(Intersection, Intersection.begin()), 2260 deref<std::less<>>()); 2261 2262 // Skip disjoint class pairs. 2263 if (Intersection.empty()) 2264 continue; 2265 2266 // If RC1 and RC2 have different spill sizes or alignments, use the 2267 // stricter one for sub-classing. If they are equal, prefer RC1. 2268 if (RC2->RSI.hasStricterSpillThan(RC1->RSI)) 2269 std::swap(RC1, RC2); 2270 2271 getOrCreateSubClass(RC1, &Intersection, 2272 RC1->getName() + "_and_" + RC2->getName()); 2273 } 2274 } 2275 2276 // 2277 // Synthesize missing sub-classes for getSubClassWithSubReg(). 2278 // 2279 // Make sure that the set of registers in RC with a given SubIdx sub-register 2280 // form a register class. Update RC->SubClassWithSubReg. 2281 // 2282 void CodeGenRegBank::inferSubClassWithSubReg(CodeGenRegisterClass *RC) { 2283 // Map SubRegIndex to set of registers in RC supporting that SubRegIndex. 2284 typedef std::map<const CodeGenSubRegIndex *, CodeGenRegister::Vec, 2285 deref<std::less<>>> 2286 SubReg2SetMap; 2287 2288 // Compute the set of registers supporting each SubRegIndex. 2289 SubReg2SetMap SRSets; 2290 for (const CodeGenRegister *R : RC->getMembers()) { 2291 if (R->Artificial) 2292 continue; 2293 const CodeGenRegister::SubRegMap &SRM = R->getSubRegs(); 2294 for (auto [I, _] : SRM) 2295 SRSets[I].push_back(R); 2296 } 2297 2298 for (auto I : SRSets) 2299 sortAndUniqueRegisters(I.second); 2300 2301 // Find matching classes for all SRSets entries. Iterate in SubRegIndex 2302 // numerical order to visit synthetic indices last. 2303 for (const CodeGenSubRegIndex &SubIdx : SubRegIndices) { 2304 SubReg2SetMap::const_iterator I = SRSets.find(&SubIdx); 2305 // Unsupported SubRegIndex. Skip it. 2306 if (I == SRSets.end()) 2307 continue; 2308 // In most cases, all RC registers support the SubRegIndex. 2309 if (I->second.size() == RC->getMembers().size()) { 2310 RC->setSubClassWithSubReg(&SubIdx, RC); 2311 continue; 2312 } 2313 if (SubIdx.Artificial) 2314 continue; 2315 // This is a real subset. See if we have a matching class. 2316 CodeGenRegisterClass *SubRC = 2317 getOrCreateSubClass(RC, &I->second, 2318 RC->getName() + "_with_" + I->first->getName()) 2319 .first; 2320 RC->setSubClassWithSubReg(&SubIdx, SubRC); 2321 } 2322 } 2323 2324 // 2325 // Synthesize missing sub-classes of RC for getMatchingSuperRegClass(). 2326 // 2327 // Create sub-classes of RC such that getMatchingSuperRegClass(RC, SubIdx, X) 2328 // has a maximal result for any SubIdx and any X >= FirstSubRegRC. 2329 // 2330 2331 void CodeGenRegBank::inferMatchingSuperRegClass( 2332 CodeGenRegisterClass *RC, 2333 std::list<CodeGenRegisterClass>::iterator FirstSubRegRC) { 2334 DenseSet<const CodeGenSubRegIndex *> ImpliedSubRegIndices; 2335 std::vector<std::pair<const CodeGenRegister *, const CodeGenRegister *>> 2336 SubToSuperRegs; 2337 BitVector TopoSigs(getNumTopoSigs()); 2338 2339 // Iterate subregister indices in topological order to visit larger indices 2340 // first. This allows us to skip the smaller indices in many cases because 2341 // their inferred super-register classes are implied. 2342 for (CodeGenSubRegIndex *SubIdx : SubRegIndicesRPOT) { 2343 // Skip indexes that aren't fully supported by RC's registers. This was 2344 // computed by inferSubClassWithSubReg() above which should have been 2345 // called first. 2346 if (RC->getSubClassWithSubReg(SubIdx) != RC) 2347 continue; 2348 2349 if (ImpliedSubRegIndices.contains(SubIdx)) 2350 continue; 2351 2352 // Build list of (Sub, Super) pairs for this SubIdx, sorted by Sub. Note 2353 // that the list may contain entries with the same Sub but different Supers. 2354 SubToSuperRegs.clear(); 2355 TopoSigs.reset(); 2356 for (const CodeGenRegister *Super : RC->getMembers()) { 2357 const CodeGenRegister *Sub = Super->getSubRegs().find(SubIdx)->second; 2358 assert(Sub && "Missing sub-register"); 2359 SubToSuperRegs.emplace_back(Sub, Super); 2360 TopoSigs.set(Sub->getTopoSig()); 2361 } 2362 sort(SubToSuperRegs, on_first<deref<std::less<>>>()); 2363 2364 // Iterate over sub-register class candidates. Ignore classes created by 2365 // this loop. They will never be useful. 2366 // Store an iterator to the last element (not end) so that this loop doesn't 2367 // visit newly inserted elements. 2368 assert(!RegClasses.empty()); 2369 for (auto I = FirstSubRegRC, E = std::prev(RegClasses.end()); 2370 I != std::next(E); ++I) { 2371 CodeGenRegisterClass &SubRC = *I; 2372 if (SubRC.Artificial) 2373 continue; 2374 // Topological shortcut: SubRC members have the wrong shape. 2375 if (!TopoSigs.anyCommon(SubRC.getRegsWithSuperRegsTopoSigs())) 2376 continue; 2377 // Compute the subset of RC that maps into SubRC with a single linear scan 2378 // through SubToSuperRegs and the members of SubRC. 2379 CodeGenRegister::Vec SubSetVec; 2380 auto SubI = SubRC.getMembers().begin(), SubE = SubRC.getMembers().end(); 2381 for (auto &[Sub, Super] : SubToSuperRegs) { 2382 while (SubI != SubE && **SubI < *Sub) 2383 ++SubI; 2384 if (SubI == SubE) 2385 break; 2386 if (**SubI == *Sub) 2387 SubSetVec.push_back(Super); 2388 } 2389 2390 if (SubSetVec.empty()) 2391 continue; 2392 2393 // RC injects completely into SubRC. 2394 sortAndUniqueRegisters(SubSetVec); 2395 if (SubSetVec.size() == RC->getMembers().size()) { 2396 SubRC.addSuperRegClass(SubIdx, RC); 2397 2398 // We can skip checking subregister indices that can be composed from 2399 // the current SubIdx. 2400 // 2401 // Proof sketch: Let SubRC' be another register class and SubSubIdx 2402 // a subregister index that can be composed from SubIdx. 2403 // 2404 // Calling this function with SubRC in place of RC ensures the existence 2405 // of a subclass X of SubRC with the registers that have subregisters in 2406 // SubRC'. 2407 // 2408 // The set of registers in RC with SubSubIdx in SubRC' is equal to the 2409 // set of registers in RC with SubIdx in X (because every register in 2410 // RC has a corresponding subregister in SubRC), and so checking the 2411 // pair (SubSubIdx, SubRC') is redundant with checking (SubIdx, X). 2412 for (const auto &SubSubIdx : SubIdx->getComposites()) 2413 ImpliedSubRegIndices.insert(SubSubIdx.second); 2414 2415 continue; 2416 } 2417 2418 // Only a subset of RC maps into SubRC. Make sure it is represented by a 2419 // class. 2420 // 2421 // The name of the inferred register class follows the template 2422 // "<RC>_with_<SubIdx>_in_<SubRC>". 2423 // 2424 // When SubRC is already an inferred class, prefer a name of the form 2425 // "<RC>_with_<CompositeSubIdx>_in_<SubSubRC>" over a chain of the form 2426 // "<RC>_with_<SubIdx>_in_<OtherRc>_with_<SubSubIdx>_in_<SubSubRC>". 2427 CodeGenSubRegIndex *CompositeSubIdx = SubIdx; 2428 CodeGenRegisterClass *CompositeSubRC = &SubRC; 2429 if (CodeGenSubRegIndex *SubSubIdx = SubRC.getInferredFromSubRegIdx()) { 2430 auto It = SubIdx->getComposites().find(SubSubIdx); 2431 if (It != SubIdx->getComposites().end()) { 2432 CompositeSubIdx = It->second; 2433 CompositeSubRC = SubRC.getInferredFromRC(); 2434 } 2435 } 2436 2437 auto [SubSetRC, Inserted] = getOrCreateSubClass( 2438 RC, &SubSetVec, 2439 RC->getName() + "_with_" + CompositeSubIdx->getName() + "_in_" + 2440 CompositeSubRC->getName()); 2441 2442 if (Inserted) 2443 SubSetRC->setInferredFrom(CompositeSubIdx, CompositeSubRC); 2444 } 2445 } 2446 } 2447 2448 // 2449 // Infer missing register classes. 2450 // 2451 void CodeGenRegBank::computeInferredRegisterClasses() { 2452 assert(!RegClasses.empty()); 2453 // When this function is called, the register classes have not been sorted 2454 // and assigned EnumValues yet. That means getSubClasses(), 2455 // getSuperClasses(), and hasSubClass() functions are defunct. 2456 2457 // Use one-before-the-end so it doesn't move forward when new elements are 2458 // added. 2459 auto FirstNewRC = std::prev(RegClasses.end()); 2460 2461 // Visit all register classes, including the ones being added by the loop. 2462 // Watch out for iterator invalidation here. 2463 for (auto I = RegClasses.begin(), E = RegClasses.end(); I != E; ++I) { 2464 CodeGenRegisterClass *RC = &*I; 2465 if (RC->Artificial) 2466 continue; 2467 2468 // Synthesize answers for getSubClassWithSubReg(). 2469 inferSubClassWithSubReg(RC); 2470 2471 // Synthesize answers for getCommonSubClass(). 2472 inferCommonSubClass(RC); 2473 2474 // Synthesize answers for getMatchingSuperRegClass(). 2475 inferMatchingSuperRegClass(RC); 2476 2477 // New register classes are created while this loop is running, and we need 2478 // to visit all of them. In particular, inferMatchingSuperRegClass needs 2479 // to match old super-register classes with sub-register classes created 2480 // after inferMatchingSuperRegClass was called. At this point, 2481 // inferMatchingSuperRegClass has checked SuperRC = [0..rci] with SubRC = 2482 // [0..FirstNewRC). We need to cover SubRC = [FirstNewRC..rci]. 2483 if (I == FirstNewRC) { 2484 auto NextNewRC = std::prev(RegClasses.end()); 2485 for (auto I2 = RegClasses.begin(), E2 = std::next(FirstNewRC); I2 != E2; 2486 ++I2) 2487 inferMatchingSuperRegClass(&*I2, E2); 2488 FirstNewRC = NextNewRC; 2489 } 2490 } 2491 2492 // Compute the transitive closure for super-register classes. 2493 // 2494 // By iterating over sub-register indices in topological order, we only ever 2495 // add super-register classes for sub-register indices that have not already 2496 // been visited. That allows computing the transitive closure in a single 2497 // pass. 2498 for (CodeGenSubRegIndex *SubIdx : SubRegIndicesRPOT) { 2499 for (CodeGenRegisterClass &SubRC : RegClasses) 2500 SubRC.extendSuperRegClasses(SubIdx); 2501 } 2502 } 2503 2504 /// getRegisterClassForRegister - Find the register class that contains the 2505 /// specified physical register. If the register is not in a register class, 2506 /// return null. If the register is in multiple classes, and the classes have a 2507 /// superset-subset relationship and the same set of types, return the 2508 /// superclass. Otherwise return null. 2509 const CodeGenRegisterClass * 2510 CodeGenRegBank::getRegClassForRegister(const Record *R) { 2511 const CodeGenRegister *Reg = getReg(R); 2512 const CodeGenRegisterClass *FoundRC = nullptr; 2513 for (const CodeGenRegisterClass &RC : getRegClasses()) { 2514 if (!RC.contains(Reg)) 2515 continue; 2516 2517 // If this is the first class that contains the register, 2518 // make a note of it and go on to the next class. 2519 if (!FoundRC) { 2520 FoundRC = &RC; 2521 continue; 2522 } 2523 2524 // If a register's classes have different types, return null. 2525 if (RC.getValueTypes() != FoundRC->getValueTypes()) 2526 return nullptr; 2527 2528 // Check to see if the previously found class that contains 2529 // the register is a subclass of the current class. If so, 2530 // prefer the superclass. 2531 if (RC.hasSubClass(FoundRC)) { 2532 FoundRC = &RC; 2533 continue; 2534 } 2535 2536 // Check to see if the previously found class that contains 2537 // the register is a superclass of the current class. If so, 2538 // prefer the superclass. 2539 if (FoundRC->hasSubClass(&RC)) 2540 continue; 2541 2542 // Multiple classes, and neither is a superclass of the other. 2543 // Return null. 2544 return nullptr; 2545 } 2546 return FoundRC; 2547 } 2548 2549 const CodeGenRegisterClass * 2550 CodeGenRegBank::getMinimalPhysRegClass(const Record *RegRecord, 2551 ValueTypeByHwMode *VT) { 2552 const CodeGenRegister *Reg = getReg(RegRecord); 2553 const CodeGenRegisterClass *BestRC = nullptr; 2554 for (const CodeGenRegisterClass &RC : getRegClasses()) { 2555 if ((!VT || RC.hasType(*VT)) && RC.contains(Reg) && 2556 (!BestRC || BestRC->hasSubClass(&RC))) 2557 BestRC = &RC; 2558 } 2559 2560 assert(BestRC && "Couldn't find the register class"); 2561 return BestRC; 2562 } 2563 2564 const CodeGenRegisterClass * 2565 CodeGenRegBank::getSuperRegForSubReg(const ValueTypeByHwMode &ValueTy, 2566 const CodeGenSubRegIndex *SubIdx, 2567 bool MustBeAllocatable) const { 2568 std::vector<const CodeGenRegisterClass *> Candidates; 2569 auto &RegClasses = getRegClasses(); 2570 2571 // Try to find a register class which supports ValueTy, and also contains 2572 // SubIdx. 2573 for (const CodeGenRegisterClass &RC : RegClasses) { 2574 // Is there a subclass of this class which contains this subregister index? 2575 const CodeGenRegisterClass *SubClassWithSubReg = 2576 RC.getSubClassWithSubReg(SubIdx); 2577 if (!SubClassWithSubReg) 2578 continue; 2579 2580 // We have a class. Check if it supports this value type. 2581 if (!llvm::is_contained(SubClassWithSubReg->VTs, ValueTy)) 2582 continue; 2583 2584 // If necessary, check that it is allocatable. 2585 if (MustBeAllocatable && !SubClassWithSubReg->Allocatable) 2586 continue; 2587 2588 // We have a register class which supports both the value type and 2589 // subregister index. Remember it. 2590 Candidates.push_back(SubClassWithSubReg); 2591 } 2592 2593 // If we didn't find anything, we're done. 2594 if (Candidates.empty()) 2595 return nullptr; 2596 2597 // Find and return the largest of our candidate classes. 2598 llvm::stable_sort(Candidates, [&](const CodeGenRegisterClass *A, 2599 const CodeGenRegisterClass *B) { 2600 if (A->getMembers().size() > B->getMembers().size()) 2601 return true; 2602 2603 if (A->getMembers().size() < B->getMembers().size()) 2604 return false; 2605 2606 // Order by name as a tie-breaker. 2607 return StringRef(A->getName()) < B->getName(); 2608 }); 2609 2610 return Candidates[0]; 2611 } 2612 2613 BitVector 2614 CodeGenRegBank::computeCoveredRegisters(ArrayRef<const Record *> Regs) { 2615 SetVector<const CodeGenRegister *> Set; 2616 2617 // First add Regs with all sub-registers. 2618 for (const Record *RegDef : Regs) { 2619 CodeGenRegister *Reg = getReg(RegDef); 2620 if (Set.insert(Reg)) 2621 // Reg is new, add all sub-registers. 2622 // The pre-ordering is not important here. 2623 Reg->addSubRegsPreOrder(Set, *this); 2624 } 2625 2626 // Second, find all super-registers that are completely covered by the set. 2627 for (unsigned i = 0; i != Set.size(); ++i) { 2628 for (const CodeGenRegister *Super : Set[i]->getSuperRegs()) { 2629 if (!Super->CoveredBySubRegs || Set.contains(Super)) 2630 continue; 2631 // This new super-register is covered by its sub-registers. 2632 bool AllSubsInSet = true; 2633 const CodeGenRegister::SubRegMap &SRM = Super->getSubRegs(); 2634 for (auto [_, SR] : SRM) 2635 if (!Set.contains(SR)) { 2636 AllSubsInSet = false; 2637 break; 2638 } 2639 // All sub-registers in Set, add Super as well. 2640 // We will visit Super later to recheck its super-registers. 2641 if (AllSubsInSet) 2642 Set.insert(Super); 2643 } 2644 } 2645 2646 // Convert to BitVector. 2647 BitVector BV(Registers.size() + 1); 2648 for (const CodeGenRegister *Reg : Set) 2649 BV.set(Reg->EnumValue); 2650 return BV; 2651 } 2652 2653 void CodeGenRegBank::printRegUnitNames(ArrayRef<unsigned> Units) const { 2654 for (unsigned Unit : Units) { 2655 if (Unit < NumNativeRegUnits) 2656 dbgs() << ' ' << RegUnits[Unit].Roots[0]->getName(); 2657 else 2658 dbgs() << " #" << Unit; 2659 } 2660 dbgs() << '\n'; 2661 } 2662