1 //===- llvm/CodeGen/LiveInterval.h - Interval representation ----*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file implements the LiveRange and LiveInterval classes. Given some 10 // numbering of each the machine instructions an interval [i, j) is said to be a 11 // live range for register v if there is no instruction with number j' >= j 12 // such that v is live at j' and there is no instruction with number i' < i such 13 // that v is live at i'. In this implementation ranges can have holes, 14 // i.e. a range might look like [1,20), [50,65), [1000,1001). Each 15 // individual segment is represented as an instance of LiveRange::Segment, 16 // and the whole range is represented as an instance of LiveRange. 17 // 18 //===----------------------------------------------------------------------===// 19 20 #ifndef LLVM_CODEGEN_LIVEINTERVAL_H 21 #define LLVM_CODEGEN_LIVEINTERVAL_H 22 23 #include "llvm/ADT/ArrayRef.h" 24 #include "llvm/ADT/IntEqClasses.h" 25 #include "llvm/ADT/STLExtras.h" 26 #include "llvm/ADT/SmallVector.h" 27 #include "llvm/ADT/iterator_range.h" 28 #include "llvm/CodeGen/Register.h" 29 #include "llvm/CodeGen/SlotIndexes.h" 30 #include "llvm/MC/LaneBitmask.h" 31 #include "llvm/Support/Allocator.h" 32 #include "llvm/Support/Compiler.h" 33 #include "llvm/Support/MathExtras.h" 34 #include <algorithm> 35 #include <cassert> 36 #include <cstddef> 37 #include <functional> 38 #include <memory> 39 #include <set> 40 #include <tuple> 41 #include <utility> 42 43 namespace llvm { 44 45 class CoalescerPair; 46 class LiveIntervals; 47 class MachineRegisterInfo; 48 class raw_ostream; 49 50 /// VNInfo - Value Number Information. 51 /// This class holds information about a machine level values, including 52 /// definition and use points. 53 /// 54 class VNInfo { 55 public: 56 using Allocator = BumpPtrAllocator; 57 58 /// The ID number of this value. 59 unsigned id; 60 61 /// The index of the defining instruction. 62 SlotIndex def; 63 64 /// VNInfo constructor. VNInfo(unsigned i,SlotIndex d)65 VNInfo(unsigned i, SlotIndex d) : id(i), def(d) {} 66 67 /// VNInfo constructor, copies values from orig, except for the value number. VNInfo(unsigned i,const VNInfo & orig)68 VNInfo(unsigned i, const VNInfo &orig) : id(i), def(orig.def) {} 69 70 /// Copy from the parameter into this VNInfo. copyFrom(VNInfo & src)71 void copyFrom(VNInfo &src) { 72 def = src.def; 73 } 74 75 /// Returns true if this value is defined by a PHI instruction (or was, 76 /// PHI instructions may have been eliminated). 77 /// PHI-defs begin at a block boundary, all other defs begin at register or 78 /// EC slots. isPHIDef()79 bool isPHIDef() const { return def.isBlock(); } 80 81 /// Returns true if this value is unused. isUnused()82 bool isUnused() const { return !def.isValid(); } 83 84 /// Mark this value as unused. markUnused()85 void markUnused() { def = SlotIndex(); } 86 }; 87 88 /// Result of a LiveRange query. This class hides the implementation details 89 /// of live ranges, and it should be used as the primary interface for 90 /// examining live ranges around instructions. 91 class LiveQueryResult { 92 VNInfo *const EarlyVal; 93 VNInfo *const LateVal; 94 const SlotIndex EndPoint; 95 const bool Kill; 96 97 public: LiveQueryResult(VNInfo * EarlyVal,VNInfo * LateVal,SlotIndex EndPoint,bool Kill)98 LiveQueryResult(VNInfo *EarlyVal, VNInfo *LateVal, SlotIndex EndPoint, 99 bool Kill) 100 : EarlyVal(EarlyVal), LateVal(LateVal), EndPoint(EndPoint), Kill(Kill) 101 {} 102 103 /// Return the value that is live-in to the instruction. This is the value 104 /// that will be read by the instruction's use operands. Return NULL if no 105 /// value is live-in. valueIn()106 VNInfo *valueIn() const { 107 return EarlyVal; 108 } 109 110 /// Return true if the live-in value is killed by this instruction. This 111 /// means that either the live range ends at the instruction, or it changes 112 /// value. isKill()113 bool isKill() const { 114 return Kill; 115 } 116 117 /// Return true if this instruction has a dead def. isDeadDef()118 bool isDeadDef() const { 119 return EndPoint.isDead(); 120 } 121 122 /// Return the value leaving the instruction, if any. This can be a 123 /// live-through value, or a live def. A dead def returns NULL. valueOut()124 VNInfo *valueOut() const { 125 return isDeadDef() ? nullptr : LateVal; 126 } 127 128 /// Returns the value alive at the end of the instruction, if any. This can 129 /// be a live-through value, a live def or a dead def. valueOutOrDead()130 VNInfo *valueOutOrDead() const { 131 return LateVal; 132 } 133 134 /// Return the value defined by this instruction, if any. This includes 135 /// dead defs, it is the value created by the instruction's def operands. valueDefined()136 VNInfo *valueDefined() const { 137 return EarlyVal == LateVal ? nullptr : LateVal; 138 } 139 140 /// Return the end point of the last live range segment to interact with 141 /// the instruction, if any. 142 /// 143 /// The end point is an invalid SlotIndex only if the live range doesn't 144 /// intersect the instruction at all. 145 /// 146 /// The end point may be at or past the end of the instruction's basic 147 /// block. That means the value was live out of the block. endPoint()148 SlotIndex endPoint() const { 149 return EndPoint; 150 } 151 }; 152 153 /// This class represents the liveness of a register, stack slot, etc. 154 /// It manages an ordered list of Segment objects. 155 /// The Segments are organized in a static single assignment form: At places 156 /// where a new value is defined or different values reach a CFG join a new 157 /// segment with a new value number is used. 158 class LiveRange { 159 public: 160 /// This represents a simple continuous liveness interval for a value. 161 /// The start point is inclusive, the end point exclusive. These intervals 162 /// are rendered as [start,end). 163 struct Segment { 164 SlotIndex start; // Start point of the interval (inclusive) 165 SlotIndex end; // End point of the interval (exclusive) 166 VNInfo *valno = nullptr; // identifier for the value contained in this 167 // segment. 168 169 Segment() = default; 170 SegmentSegment171 Segment(SlotIndex S, SlotIndex E, VNInfo *V) 172 : start(S), end(E), valno(V) { 173 assert(S < E && "Cannot create empty or backwards segment"); 174 } 175 176 /// Return true if the index is covered by this segment. containsSegment177 bool contains(SlotIndex I) const { 178 return start <= I && I < end; 179 } 180 181 /// Return true if the given interval, [S, E), is covered by this segment. containsIntervalSegment182 bool containsInterval(SlotIndex S, SlotIndex E) const { 183 assert((S < E) && "Backwards interval?"); 184 return (start <= S && S < end) && (start < E && E <= end); 185 } 186 187 bool operator<(const Segment &Other) const { 188 return std::tie(start, end) < std::tie(Other.start, Other.end); 189 } 190 bool operator==(const Segment &Other) const { 191 return start == Other.start && end == Other.end; 192 } 193 194 bool operator!=(const Segment &Other) const { 195 return !(*this == Other); 196 } 197 198 LLVM_ABI void dump() const; 199 }; 200 201 using Segments = SmallVector<Segment, 2>; 202 using VNInfoList = SmallVector<VNInfo *, 2>; 203 204 Segments segments; // the liveness segments 205 VNInfoList valnos; // value#'s 206 207 // The segment set is used temporarily to accelerate initial computation 208 // of live ranges of physical registers in computeRegUnitRange. 209 // After that the set is flushed to the segment vector and deleted. 210 using SegmentSet = std::set<Segment>; 211 std::unique_ptr<SegmentSet> segmentSet; 212 213 using iterator = Segments::iterator; 214 using const_iterator = Segments::const_iterator; 215 begin()216 iterator begin() { return segments.begin(); } end()217 iterator end() { return segments.end(); } 218 begin()219 const_iterator begin() const { return segments.begin(); } end()220 const_iterator end() const { return segments.end(); } 221 222 using vni_iterator = VNInfoList::iterator; 223 using const_vni_iterator = VNInfoList::const_iterator; 224 vni_begin()225 vni_iterator vni_begin() { return valnos.begin(); } vni_end()226 vni_iterator vni_end() { return valnos.end(); } 227 vni_begin()228 const_vni_iterator vni_begin() const { return valnos.begin(); } vni_end()229 const_vni_iterator vni_end() const { return valnos.end(); } 230 vnis()231 iterator_range<vni_iterator> vnis() { 232 return make_range(vni_begin(), vni_end()); 233 } 234 vnis()235 iterator_range<const_vni_iterator> vnis() const { 236 return make_range(vni_begin(), vni_end()); 237 } 238 239 /// Constructs a new LiveRange object. 240 LiveRange(bool UseSegmentSet = false) 241 : segmentSet(UseSegmentSet ? std::make_unique<SegmentSet>() 242 : nullptr) {} 243 244 /// Constructs a new LiveRange object by copying segments and valnos from 245 /// another LiveRange. LiveRange(const LiveRange & Other,BumpPtrAllocator & Allocator)246 LiveRange(const LiveRange &Other, BumpPtrAllocator &Allocator) { 247 assert(Other.segmentSet == nullptr && 248 "Copying of LiveRanges with active SegmentSets is not supported"); 249 assign(Other, Allocator); 250 } 251 252 /// Copies values numbers and live segments from \p Other into this range. assign(const LiveRange & Other,BumpPtrAllocator & Allocator)253 void assign(const LiveRange &Other, BumpPtrAllocator &Allocator) { 254 if (this == &Other) 255 return; 256 257 assert(Other.segmentSet == nullptr && 258 "Copying of LiveRanges with active SegmentSets is not supported"); 259 // Duplicate valnos. 260 for (const VNInfo *VNI : Other.valnos) 261 createValueCopy(VNI, Allocator); 262 // Now we can copy segments and remap their valnos. 263 for (const Segment &S : Other.segments) 264 segments.push_back(Segment(S.start, S.end, valnos[S.valno->id])); 265 } 266 267 /// advanceTo - Advance the specified iterator to point to the Segment 268 /// containing the specified position, or end() if the position is past the 269 /// end of the range. If no Segment contains this position, but the 270 /// position is in a hole, this method returns an iterator pointing to the 271 /// Segment immediately after the hole. advanceTo(iterator I,SlotIndex Pos)272 iterator advanceTo(iterator I, SlotIndex Pos) { 273 assert(I != end()); 274 if (Pos >= endIndex()) 275 return end(); 276 while (I->end <= Pos) ++I; 277 return I; 278 } 279 advanceTo(const_iterator I,SlotIndex Pos)280 const_iterator advanceTo(const_iterator I, SlotIndex Pos) const { 281 assert(I != end()); 282 if (Pos >= endIndex()) 283 return end(); 284 while (I->end <= Pos) ++I; 285 return I; 286 } 287 288 /// find - Return an iterator pointing to the first segment that ends after 289 /// Pos, or end(). This is the same as advanceTo(begin(), Pos), but faster 290 /// when searching large ranges. 291 /// 292 /// If Pos is contained in a Segment, that segment is returned. 293 /// If Pos is in a hole, the following Segment is returned. 294 /// If Pos is beyond endIndex, end() is returned. 295 LLVM_ABI iterator find(SlotIndex Pos); 296 find(SlotIndex Pos)297 const_iterator find(SlotIndex Pos) const { 298 return const_cast<LiveRange*>(this)->find(Pos); 299 } 300 clear()301 void clear() { 302 valnos.clear(); 303 segments.clear(); 304 } 305 size()306 size_t size() const { 307 return segments.size(); 308 } 309 hasAtLeastOneValue()310 bool hasAtLeastOneValue() const { return !valnos.empty(); } 311 containsOneValue()312 bool containsOneValue() const { return valnos.size() == 1; } 313 getNumValNums()314 unsigned getNumValNums() const { return (unsigned)valnos.size(); } 315 316 /// getValNumInfo - Returns pointer to the specified val#. 317 /// getValNumInfo(unsigned ValNo)318 inline VNInfo *getValNumInfo(unsigned ValNo) { 319 return valnos[ValNo]; 320 } getValNumInfo(unsigned ValNo)321 inline const VNInfo *getValNumInfo(unsigned ValNo) const { 322 return valnos[ValNo]; 323 } 324 325 /// containsValue - Returns true if VNI belongs to this range. containsValue(const VNInfo * VNI)326 bool containsValue(const VNInfo *VNI) const { 327 return VNI && VNI->id < getNumValNums() && VNI == getValNumInfo(VNI->id); 328 } 329 330 /// getNextValue - Create a new value number and return it. 331 /// @p Def is the index of instruction that defines the value number. getNextValue(SlotIndex Def,VNInfo::Allocator & VNInfoAllocator)332 VNInfo *getNextValue(SlotIndex Def, VNInfo::Allocator &VNInfoAllocator) { 333 VNInfo *VNI = 334 new (VNInfoAllocator) VNInfo((unsigned)valnos.size(), Def); 335 valnos.push_back(VNI); 336 return VNI; 337 } 338 339 /// createDeadDef - Make sure the range has a value defined at Def. 340 /// If one already exists, return it. Otherwise allocate a new value and 341 /// add liveness for a dead def. 342 LLVM_ABI VNInfo *createDeadDef(SlotIndex Def, VNInfo::Allocator &VNIAlloc); 343 344 /// Create a def of value @p VNI. Return @p VNI. If there already exists 345 /// a definition at VNI->def, the value defined there must be @p VNI. 346 LLVM_ABI VNInfo *createDeadDef(VNInfo *VNI); 347 348 /// Create a copy of the given value. The new value will be identical except 349 /// for the Value number. createValueCopy(const VNInfo * orig,VNInfo::Allocator & VNInfoAllocator)350 VNInfo *createValueCopy(const VNInfo *orig, 351 VNInfo::Allocator &VNInfoAllocator) { 352 VNInfo *VNI = 353 new (VNInfoAllocator) VNInfo((unsigned)valnos.size(), *orig); 354 valnos.push_back(VNI); 355 return VNI; 356 } 357 358 /// RenumberValues - Renumber all values in order of appearance and remove 359 /// unused values. 360 LLVM_ABI void RenumberValues(); 361 362 /// MergeValueNumberInto - This method is called when two value numbers 363 /// are found to be equivalent. This eliminates V1, replacing all 364 /// segments with the V1 value number with the V2 value number. This can 365 /// cause merging of V1/V2 values numbers and compaction of the value space. 366 LLVM_ABI VNInfo *MergeValueNumberInto(VNInfo *V1, VNInfo *V2); 367 368 /// Merge all of the live segments of a specific val# in RHS into this live 369 /// range as the specified value number. The segments in RHS are allowed 370 /// to overlap with segments in the current range, it will replace the 371 /// value numbers of the overlaped live segments with the specified value 372 /// number. 373 LLVM_ABI void MergeSegmentsInAsValue(const LiveRange &RHS, 374 VNInfo *LHSValNo); 375 376 /// MergeValueInAsValue - Merge all of the segments of a specific val# 377 /// in RHS into this live range as the specified value number. 378 /// The segments in RHS are allowed to overlap with segments in the 379 /// current range, but only if the overlapping segments have the 380 /// specified value number. 381 LLVM_ABI void MergeValueInAsValue(const LiveRange &RHS, 382 const VNInfo *RHSValNo, VNInfo *LHSValNo); 383 empty()384 bool empty() const { return segments.empty(); } 385 386 /// beginIndex - Return the lowest numbered slot covered. beginIndex()387 SlotIndex beginIndex() const { 388 assert(!empty() && "Call to beginIndex() on empty range."); 389 return segments.front().start; 390 } 391 392 /// endNumber - return the maximum point of the range of the whole, 393 /// exclusive. endIndex()394 SlotIndex endIndex() const { 395 assert(!empty() && "Call to endIndex() on empty range."); 396 return segments.back().end; 397 } 398 expiredAt(SlotIndex index)399 bool expiredAt(SlotIndex index) const { 400 return index >= endIndex(); 401 } 402 liveAt(SlotIndex index)403 bool liveAt(SlotIndex index) const { 404 const_iterator r = find(index); 405 return r != end() && r->start <= index; 406 } 407 408 /// Return the segment that contains the specified index, or null if there 409 /// is none. getSegmentContaining(SlotIndex Idx)410 const Segment *getSegmentContaining(SlotIndex Idx) const { 411 const_iterator I = FindSegmentContaining(Idx); 412 return I == end() ? nullptr : &*I; 413 } 414 415 /// Return the live segment that contains the specified index, or null if 416 /// there is none. getSegmentContaining(SlotIndex Idx)417 Segment *getSegmentContaining(SlotIndex Idx) { 418 iterator I = FindSegmentContaining(Idx); 419 return I == end() ? nullptr : &*I; 420 } 421 422 /// getVNInfoAt - Return the VNInfo that is live at Idx, or NULL. getVNInfoAt(SlotIndex Idx)423 VNInfo *getVNInfoAt(SlotIndex Idx) const { 424 const_iterator I = FindSegmentContaining(Idx); 425 return I == end() ? nullptr : I->valno; 426 } 427 428 /// getVNInfoBefore - Return the VNInfo that is live up to but not 429 /// necessarily including Idx, or NULL. Use this to find the reaching def 430 /// used by an instruction at this SlotIndex position. getVNInfoBefore(SlotIndex Idx)431 VNInfo *getVNInfoBefore(SlotIndex Idx) const { 432 const_iterator I = FindSegmentContaining(Idx.getPrevSlot()); 433 return I == end() ? nullptr : I->valno; 434 } 435 436 /// Return an iterator to the segment that contains the specified index, or 437 /// end() if there is none. FindSegmentContaining(SlotIndex Idx)438 iterator FindSegmentContaining(SlotIndex Idx) { 439 iterator I = find(Idx); 440 return I != end() && I->start <= Idx ? I : end(); 441 } 442 FindSegmentContaining(SlotIndex Idx)443 const_iterator FindSegmentContaining(SlotIndex Idx) const { 444 const_iterator I = find(Idx); 445 return I != end() && I->start <= Idx ? I : end(); 446 } 447 448 /// overlaps - Return true if the intersection of the two live ranges is 449 /// not empty. overlaps(const LiveRange & other)450 bool overlaps(const LiveRange &other) const { 451 if (other.empty()) 452 return false; 453 return overlapsFrom(other, other.begin()); 454 } 455 456 /// overlaps - Return true if the two ranges have overlapping segments 457 /// that are not coalescable according to CP. 458 /// 459 /// Overlapping segments where one range is defined by a coalescable 460 /// copy are allowed. 461 LLVM_ABI bool overlaps(const LiveRange &Other, const CoalescerPair &CP, 462 const SlotIndexes &) const; 463 464 /// overlaps - Return true if the live range overlaps an interval specified 465 /// by [Start, End). 466 LLVM_ABI bool overlaps(SlotIndex Start, SlotIndex End) const; 467 468 /// overlapsFrom - Return true if the intersection of the two live ranges 469 /// is not empty. The specified iterator is a hint that we can begin 470 /// scanning the Other range starting at I. 471 LLVM_ABI bool overlapsFrom(const LiveRange &Other, 472 const_iterator StartPos) const; 473 474 /// Returns true if all segments of the @p Other live range are completely 475 /// covered by this live range. 476 /// Adjacent live ranges do not affect the covering:the liverange 477 /// [1,5](5,10] covers (3,7]. 478 LLVM_ABI bool covers(const LiveRange &Other) const; 479 480 /// Add the specified Segment to this range, merging segments as 481 /// appropriate. This returns an iterator to the inserted segment (which 482 /// may have grown since it was inserted). 483 LLVM_ABI iterator addSegment(Segment S); 484 485 /// Attempt to extend a value defined after @p StartIdx to include @p Use. 486 /// Both @p StartIdx and @p Use should be in the same basic block. In case 487 /// of subranges, an extension could be prevented by an explicit "undef" 488 /// caused by a <def,read-undef> on a non-overlapping lane. The list of 489 /// location of such "undefs" should be provided in @p Undefs. 490 /// The return value is a pair: the first element is VNInfo of the value 491 /// that was extended (possibly nullptr), the second is a boolean value 492 /// indicating whether an "undef" was encountered. 493 /// If this range is live before @p Use in the basic block that starts at 494 /// @p StartIdx, and there is no intervening "undef", extend it to be live 495 /// up to @p Use, and return the pair {value, false}. If there is no 496 /// segment before @p Use and there is no "undef" between @p StartIdx and 497 /// @p Use, return {nullptr, false}. If there is an "undef" before @p Use, 498 /// return {nullptr, true}. 499 LLVM_ABI std::pair<VNInfo *, bool> extendInBlock(ArrayRef<SlotIndex> Undefs, 500 SlotIndex StartIdx, 501 SlotIndex Kill); 502 503 /// Simplified version of the above "extendInBlock", which assumes that 504 /// no register lanes are undefined by <def,read-undef> operands. 505 /// If this range is live before @p Use in the basic block that starts 506 /// at @p StartIdx, extend it to be live up to @p Use, and return the 507 /// value. If there is no segment before @p Use, return nullptr. 508 LLVM_ABI VNInfo *extendInBlock(SlotIndex StartIdx, SlotIndex Kill); 509 510 /// join - Join two live ranges (this, and other) together. This applies 511 /// mappings to the value numbers in the LHS/RHS ranges as specified. If 512 /// the ranges are not joinable, this aborts. 513 LLVM_ABI void join(LiveRange &Other, const int *ValNoAssignments, 514 const int *RHSValNoAssignments, 515 SmallVectorImpl<VNInfo *> &NewVNInfo); 516 517 /// True iff this segment is a single segment that lies between the 518 /// specified boundaries, exclusively. Vregs live across a backedge are not 519 /// considered local. The boundaries are expected to lie within an extended 520 /// basic block, so vregs that are not live out should contain no holes. isLocal(SlotIndex Start,SlotIndex End)521 bool isLocal(SlotIndex Start, SlotIndex End) const { 522 return beginIndex() > Start.getBaseIndex() && 523 endIndex() < End.getBoundaryIndex(); 524 } 525 526 /// Remove the specified interval from this live range. 527 /// Does nothing if interval is not part of this live range. 528 /// Note that the interval must be within a single Segment in its entirety. 529 LLVM_ABI void removeSegment(SlotIndex Start, SlotIndex End, 530 bool RemoveDeadValNo = false); 531 532 void removeSegment(Segment S, bool RemoveDeadValNo = false) { 533 removeSegment(S.start, S.end, RemoveDeadValNo); 534 } 535 536 /// Remove segment pointed to by iterator @p I from this range. 537 LLVM_ABI iterator removeSegment(iterator I, bool RemoveDeadValNo = false); 538 539 /// Mark \p ValNo for deletion if no segments in this range use it. 540 LLVM_ABI void removeValNoIfDead(VNInfo *ValNo); 541 542 /// Query Liveness at Idx. 543 /// The sub-instruction slot of Idx doesn't matter, only the instruction 544 /// it refers to is considered. Query(SlotIndex Idx)545 LiveQueryResult Query(SlotIndex Idx) const { 546 // Find the segment that enters the instruction. 547 const_iterator I = find(Idx.getBaseIndex()); 548 const_iterator E = end(); 549 if (I == E) 550 return LiveQueryResult(nullptr, nullptr, SlotIndex(), false); 551 552 // Is this an instruction live-in segment? 553 // If Idx is the start index of a basic block, include live-in segments 554 // that start at Idx.getBaseIndex(). 555 VNInfo *EarlyVal = nullptr; 556 VNInfo *LateVal = nullptr; 557 SlotIndex EndPoint; 558 bool Kill = false; 559 if (I->start <= Idx.getBaseIndex()) { 560 EarlyVal = I->valno; 561 EndPoint = I->end; 562 // Move to the potentially live-out segment. 563 if (SlotIndex::isSameInstr(Idx, I->end)) { 564 Kill = true; 565 if (++I == E) 566 return LiveQueryResult(EarlyVal, LateVal, EndPoint, Kill); 567 } 568 // Special case: A PHIDef value can have its def in the middle of a 569 // segment if the value happens to be live out of the layout 570 // predecessor. 571 // Such a value is not live-in. 572 if (EarlyVal->def == Idx.getBaseIndex()) 573 EarlyVal = nullptr; 574 } 575 // I now points to the segment that may be live-through, or defined by 576 // this instr. Ignore segments starting after the current instr. 577 if (!SlotIndex::isEarlierInstr(Idx, I->start)) { 578 LateVal = I->valno; 579 EndPoint = I->end; 580 } 581 return LiveQueryResult(EarlyVal, LateVal, EndPoint, Kill); 582 } 583 584 /// removeValNo - Remove all the segments defined by the specified value#. 585 /// Also remove the value# from value# list. 586 LLVM_ABI void removeValNo(VNInfo *ValNo); 587 588 /// Returns true if the live range is zero length, i.e. no live segments 589 /// span instructions. It doesn't pay to spill such a range. isZeroLength(SlotIndexes * Indexes)590 bool isZeroLength(SlotIndexes *Indexes) const { 591 for (const Segment &S : segments) 592 if (Indexes->getNextNonNullIndex(S.start).getBaseIndex() < 593 S.end.getBaseIndex()) 594 return false; 595 return true; 596 } 597 598 // Returns true if any segment in the live range contains any of the 599 // provided slot indexes. Slots which occur in holes between 600 // segments will not cause the function to return true. 601 LLVM_ABI bool isLiveAtIndexes(ArrayRef<SlotIndex> Slots) const; 602 603 bool operator<(const LiveRange& other) const { 604 const SlotIndex &thisIndex = beginIndex(); 605 const SlotIndex &otherIndex = other.beginIndex(); 606 return thisIndex < otherIndex; 607 } 608 609 /// Returns true if there is an explicit "undef" between @p Begin 610 /// @p End. isUndefIn(ArrayRef<SlotIndex> Undefs,SlotIndex Begin,SlotIndex End)611 bool isUndefIn(ArrayRef<SlotIndex> Undefs, SlotIndex Begin, 612 SlotIndex End) const { 613 return llvm::any_of(Undefs, [Begin, End](SlotIndex Idx) -> bool { 614 return Begin <= Idx && Idx < End; 615 }); 616 } 617 618 /// Flush segment set into the regular segment vector. 619 /// The method is to be called after the live range 620 /// has been created, if use of the segment set was 621 /// activated in the constructor of the live range. 622 LLVM_ABI void flushSegmentSet(); 623 624 /// Stores indexes from the input index sequence R at which this LiveRange 625 /// is live to the output O iterator. 626 /// R is a range of _ascending sorted_ _random_ access iterators 627 /// to the input indexes. Indexes stored at O are ascending sorted so it 628 /// can be used directly in the subsequent search (for example for 629 /// subranges). Returns true if found at least one index. 630 template <typename Range, typename OutputIt> findIndexesLiveAt(Range && R,OutputIt O)631 bool findIndexesLiveAt(Range &&R, OutputIt O) const { 632 assert(llvm::is_sorted(R)); 633 auto Idx = R.begin(), EndIdx = R.end(); 634 auto Seg = segments.begin(), EndSeg = segments.end(); 635 bool Found = false; 636 while (Idx != EndIdx && Seg != EndSeg) { 637 // if the Seg is lower find first segment that is above Idx using binary 638 // search 639 if (Seg->end <= *Idx) { 640 Seg = 641 std::upper_bound(++Seg, EndSeg, *Idx, [=](auto V, const auto &S) { 642 return V < S.end; 643 }); 644 if (Seg == EndSeg) 645 break; 646 } 647 auto NotLessStart = std::lower_bound(Idx, EndIdx, Seg->start); 648 if (NotLessStart == EndIdx) 649 break; 650 auto NotLessEnd = std::lower_bound(NotLessStart, EndIdx, Seg->end); 651 if (NotLessEnd != NotLessStart) { 652 Found = true; 653 O = std::copy(NotLessStart, NotLessEnd, O); 654 } 655 Idx = NotLessEnd; 656 ++Seg; 657 } 658 return Found; 659 } 660 661 LLVM_ABI void print(raw_ostream &OS) const; 662 LLVM_ABI void dump() const; 663 664 /// Walk the range and assert if any invariants fail to hold. 665 /// 666 /// Note that this is a no-op when asserts are disabled. 667 #ifdef NDEBUG verify()668 [[nodiscard]] bool verify() const { return true; } 669 #else 670 [[nodiscard]] bool verify() const; 671 #endif 672 673 protected: 674 /// Append a segment to the list of segments. 675 LLVM_ABI void append(const LiveRange::Segment S); 676 677 private: 678 friend class LiveRangeUpdater; 679 void addSegmentToSet(Segment S); 680 void markValNoForDeletion(VNInfo *V); 681 }; 682 683 inline raw_ostream &operator<<(raw_ostream &OS, const LiveRange &LR) { 684 LR.print(OS); 685 return OS; 686 } 687 688 /// LiveInterval - This class represents the liveness of a register, 689 /// or stack slot. 690 class LiveInterval : public LiveRange { 691 public: 692 using super = LiveRange; 693 694 /// A live range for subregisters. The LaneMask specifies which parts of the 695 /// super register are covered by the interval. 696 /// (@sa TargetRegisterInfo::getSubRegIndexLaneMask()). 697 class SubRange : public LiveRange { 698 public: 699 SubRange *Next = nullptr; 700 LaneBitmask LaneMask; 701 702 /// Constructs a new SubRange object. SubRange(LaneBitmask LaneMask)703 SubRange(LaneBitmask LaneMask) : LaneMask(LaneMask) {} 704 705 /// Constructs a new SubRange object by copying liveness from @p Other. SubRange(LaneBitmask LaneMask,const LiveRange & Other,BumpPtrAllocator & Allocator)706 SubRange(LaneBitmask LaneMask, const LiveRange &Other, 707 BumpPtrAllocator &Allocator) 708 : LiveRange(Other, Allocator), LaneMask(LaneMask) {} 709 710 LLVM_ABI void print(raw_ostream &OS) const; 711 LLVM_ABI void dump() const; 712 }; 713 714 private: 715 SubRange *SubRanges = nullptr; ///< Single linked list of subregister live 716 /// ranges. 717 const Register Reg; // the register or stack slot of this interval. 718 float Weight = 0.0; // weight of this interval 719 720 public: reg()721 Register reg() const { return Reg; } weight()722 float weight() const { return Weight; } incrementWeight(float Inc)723 void incrementWeight(float Inc) { Weight += Inc; } setWeight(float Value)724 void setWeight(float Value) { Weight = Value; } 725 LiveInterval(Register Reg,float Weight)726 LiveInterval(Register Reg, float Weight) : Reg(Reg), Weight(Weight) {} 727 ~LiveInterval()728 ~LiveInterval() { 729 clearSubRanges(); 730 } 731 732 template<typename T> 733 class SingleLinkedListIterator { 734 T *P; 735 736 public: 737 using difference_type = ptrdiff_t; 738 using value_type = T; 739 using pointer = T *; 740 using reference = T &; 741 using iterator_category = std::forward_iterator_tag; 742 SingleLinkedListIterator(T * P)743 SingleLinkedListIterator(T *P) : P(P) {} 744 745 SingleLinkedListIterator<T> &operator++() { 746 P = P->Next; 747 return *this; 748 } 749 SingleLinkedListIterator<T> operator++(int) { 750 SingleLinkedListIterator res = *this; 751 ++*this; 752 return res; 753 } 754 bool operator!=(const SingleLinkedListIterator<T> &Other) const { 755 return P != Other.operator->(); 756 } 757 bool operator==(const SingleLinkedListIterator<T> &Other) const { 758 return P == Other.operator->(); 759 } 760 T &operator*() const { 761 return *P; 762 } 763 T *operator->() const { 764 return P; 765 } 766 }; 767 768 using subrange_iterator = SingleLinkedListIterator<SubRange>; 769 using const_subrange_iterator = SingleLinkedListIterator<const SubRange>; 770 subrange_begin()771 subrange_iterator subrange_begin() { 772 return subrange_iterator(SubRanges); 773 } subrange_end()774 subrange_iterator subrange_end() { 775 return subrange_iterator(nullptr); 776 } 777 subrange_begin()778 const_subrange_iterator subrange_begin() const { 779 return const_subrange_iterator(SubRanges); 780 } subrange_end()781 const_subrange_iterator subrange_end() const { 782 return const_subrange_iterator(nullptr); 783 } 784 subranges()785 iterator_range<subrange_iterator> subranges() { 786 return make_range(subrange_begin(), subrange_end()); 787 } 788 subranges()789 iterator_range<const_subrange_iterator> subranges() const { 790 return make_range(subrange_begin(), subrange_end()); 791 } 792 793 /// Creates a new empty subregister live range. The range is added at the 794 /// beginning of the subrange list; subrange iterators stay valid. createSubRange(BumpPtrAllocator & Allocator,LaneBitmask LaneMask)795 SubRange *createSubRange(BumpPtrAllocator &Allocator, 796 LaneBitmask LaneMask) { 797 SubRange *Range = new (Allocator) SubRange(LaneMask); 798 appendSubRange(Range); 799 return Range; 800 } 801 802 /// Like createSubRange() but the new range is filled with a copy of the 803 /// liveness information in @p CopyFrom. createSubRangeFrom(BumpPtrAllocator & Allocator,LaneBitmask LaneMask,const LiveRange & CopyFrom)804 SubRange *createSubRangeFrom(BumpPtrAllocator &Allocator, 805 LaneBitmask LaneMask, 806 const LiveRange &CopyFrom) { 807 SubRange *Range = new (Allocator) SubRange(LaneMask, CopyFrom, Allocator); 808 appendSubRange(Range); 809 return Range; 810 } 811 812 /// Returns true if subregister liveness information is available. hasSubRanges()813 bool hasSubRanges() const { 814 return SubRanges != nullptr; 815 } 816 817 /// Removes all subregister liveness information. 818 LLVM_ABI void clearSubRanges(); 819 820 /// Removes all subranges without any segments (subranges without segments 821 /// are not considered valid and should only exist temporarily). 822 LLVM_ABI void removeEmptySubRanges(); 823 824 /// getSize - Returns the sum of sizes of all the LiveRange's. 825 /// 826 LLVM_ABI unsigned getSize() const; 827 828 /// isSpillable - Can this interval be spilled? isSpillable()829 bool isSpillable() const { return Weight != huge_valf; } 830 831 /// markNotSpillable - Mark interval as not spillable markNotSpillable()832 void markNotSpillable() { Weight = huge_valf; } 833 834 /// For a given lane mask @p LaneMask, compute indexes at which the 835 /// lane is marked undefined by subregister <def,read-undef> definitions. 836 LLVM_ABI void computeSubRangeUndefs(SmallVectorImpl<SlotIndex> &Undefs, 837 LaneBitmask LaneMask, 838 const MachineRegisterInfo &MRI, 839 const SlotIndexes &Indexes) const; 840 841 /// Refines the subranges to support \p LaneMask. This may only be called 842 /// for LI.hasSubrange()==true. Subregister ranges are split or created 843 /// until \p LaneMask can be matched exactly. \p Mod is executed on the 844 /// matching subranges. 845 /// 846 /// Example: 847 /// Given an interval with subranges with lanemasks L0F00, L00F0 and 848 /// L000F, refining for mask L0018. Will split the L00F0 lane into 849 /// L00E0 and L0010 and the L000F lane into L0007 and L0008. The Mod 850 /// function will be applied to the L0010 and L0008 subranges. 851 /// 852 /// \p Indexes and \p TRI are required to clean up the VNIs that 853 /// don't define the related lane masks after they get shrunk. E.g., 854 /// when L000F gets split into L0007 and L0008 maybe only a subset 855 /// of the VNIs that defined L000F defines L0007. 856 /// 857 /// The clean up of the VNIs need to look at the actual instructions 858 /// to decide what is or is not live at a definition point. If the 859 /// update of the subranges occurs while the IR does not reflect these 860 /// changes, \p ComposeSubRegIdx can be used to specify how the 861 /// definition are going to be rewritten. 862 /// E.g., let say we want to merge: 863 /// V1.sub1:<2 x s32> = COPY V2.sub3:<4 x s32> 864 /// We do that by choosing a class where sub1:<2 x s32> and sub3:<4 x s32> 865 /// overlap, i.e., by choosing a class where we can find "offset + 1 == 3". 866 /// Put differently we align V2's sub3 with V1's sub1: 867 /// V2: sub0 sub1 sub2 sub3 868 /// V1: <offset> sub0 sub1 869 /// 870 /// This offset will look like a composed subregidx in the class: 871 /// V1.(composed sub2 with sub1):<4 x s32> = COPY V2.sub3:<4 x s32> 872 /// => V1.(composed sub2 with sub1):<4 x s32> = COPY V2.sub3:<4 x s32> 873 /// 874 /// Now if we didn't rewrite the uses and def of V1, all the checks for V1 875 /// need to account for this offset. 876 /// This happens during coalescing where we update the live-ranges while 877 /// still having the old IR around because updating the IR on-the-fly 878 /// would actually clobber some information on how the live-ranges that 879 /// are being updated look like. 880 LLVM_ABI void 881 refineSubRanges(BumpPtrAllocator &Allocator, LaneBitmask LaneMask, 882 std::function<void(LiveInterval::SubRange &)> Apply, 883 const SlotIndexes &Indexes, const TargetRegisterInfo &TRI, 884 unsigned ComposeSubRegIdx = 0); 885 886 bool operator<(const LiveInterval& other) const { 887 const SlotIndex &thisIndex = beginIndex(); 888 const SlotIndex &otherIndex = other.beginIndex(); 889 return std::tie(thisIndex, Reg) < std::tie(otherIndex, other.Reg); 890 } 891 892 LLVM_ABI void print(raw_ostream &OS) const; 893 LLVM_ABI void dump() const; 894 895 /// Walks the interval and assert if any invariants fail to hold. 896 /// 897 /// Note that this is a no-op when asserts are disabled. 898 #ifdef NDEBUG 899 [[nodiscard]] bool verify(const MachineRegisterInfo *MRI = nullptr) const { 900 return true; 901 } 902 #else 903 [[nodiscard]] bool verify(const MachineRegisterInfo *MRI = nullptr) const; 904 #endif 905 906 private: 907 /// Appends @p Range to SubRanges list. appendSubRange(SubRange * Range)908 void appendSubRange(SubRange *Range) { 909 Range->Next = SubRanges; 910 SubRanges = Range; 911 } 912 913 /// Free memory held by SubRange. 914 void freeSubRange(SubRange *S); 915 }; 916 917 inline raw_ostream &operator<<(raw_ostream &OS, 918 const LiveInterval::SubRange &SR) { 919 SR.print(OS); 920 return OS; 921 } 922 923 inline raw_ostream &operator<<(raw_ostream &OS, const LiveInterval &LI) { 924 LI.print(OS); 925 return OS; 926 } 927 928 LLVM_ABI raw_ostream &operator<<(raw_ostream &OS, 929 const LiveRange::Segment &S); 930 931 inline bool operator<(SlotIndex V, const LiveRange::Segment &S) { 932 return V < S.start; 933 } 934 935 inline bool operator<(const LiveRange::Segment &S, SlotIndex V) { 936 return S.start < V; 937 } 938 939 /// Helper class for performant LiveRange bulk updates. 940 /// 941 /// Calling LiveRange::addSegment() repeatedly can be expensive on large 942 /// live ranges because segments after the insertion point may need to be 943 /// shifted. The LiveRangeUpdater class can defer the shifting when adding 944 /// many segments in order. 945 /// 946 /// The LiveRange will be in an invalid state until flush() is called. 947 class LiveRangeUpdater { 948 LiveRange *LR; 949 SlotIndex LastStart; 950 LiveRange::iterator WriteI; 951 LiveRange::iterator ReadI; 952 SmallVector<LiveRange::Segment, 16> Spills; 953 void mergeSpills(); 954 955 public: 956 /// Create a LiveRangeUpdater for adding segments to LR. 957 /// LR will temporarily be in an invalid state until flush() is called. LR(lr)958 LiveRangeUpdater(LiveRange *lr = nullptr) : LR(lr) {} 959 ~LiveRangeUpdater()960 ~LiveRangeUpdater() { flush(); } 961 962 /// Add a segment to LR and coalesce when possible, just like 963 /// LR.addSegment(). Segments should be added in increasing start order for 964 /// best performance. 965 LLVM_ABI void add(LiveRange::Segment); 966 add(SlotIndex Start,SlotIndex End,VNInfo * VNI)967 void add(SlotIndex Start, SlotIndex End, VNInfo *VNI) { 968 add(LiveRange::Segment(Start, End, VNI)); 969 } 970 971 /// Return true if the LR is currently in an invalid state, and flush() 972 /// needs to be called. isDirty()973 bool isDirty() const { return LastStart.isValid(); } 974 975 /// Flush the updater state to LR so it is valid and contains all added 976 /// segments. 977 LLVM_ABI void flush(); 978 979 /// Select a different destination live range. setDest(LiveRange * lr)980 void setDest(LiveRange *lr) { 981 if (LR != lr && isDirty()) 982 flush(); 983 LR = lr; 984 } 985 986 /// Get the current destination live range. getDest()987 LiveRange *getDest() const { return LR; } 988 989 LLVM_ABI void dump() const; 990 LLVM_ABI void print(raw_ostream &) const; 991 }; 992 993 inline raw_ostream &operator<<(raw_ostream &OS, const LiveRangeUpdater &X) { 994 X.print(OS); 995 return OS; 996 } 997 998 /// ConnectedVNInfoEqClasses - Helper class that can divide VNInfos in a 999 /// LiveInterval into equivalence clases of connected components. A 1000 /// LiveInterval that has multiple connected components can be broken into 1001 /// multiple LiveIntervals. 1002 /// 1003 /// Given a LiveInterval that may have multiple connected components, run: 1004 /// 1005 /// unsigned numComps = ConEQ.Classify(LI); 1006 /// if (numComps > 1) { 1007 /// // allocate numComps-1 new LiveIntervals into LIS[1..] 1008 /// ConEQ.Distribute(LIS); 1009 /// } 1010 1011 class ConnectedVNInfoEqClasses { 1012 LiveIntervals &LIS; 1013 IntEqClasses EqClass; 1014 1015 public: ConnectedVNInfoEqClasses(LiveIntervals & lis)1016 explicit ConnectedVNInfoEqClasses(LiveIntervals &lis) : LIS(lis) {} 1017 1018 /// Classify the values in \p LR into connected components. 1019 /// Returns the number of connected components. 1020 LLVM_ABI unsigned Classify(const LiveRange &LR); 1021 1022 /// getEqClass - Classify creates equivalence classes numbered 0..N. Return 1023 /// the equivalence class assigned the VNI. getEqClass(const VNInfo * VNI)1024 unsigned getEqClass(const VNInfo *VNI) const { return EqClass[VNI->id]; } 1025 1026 /// Distribute values in \p LI into a separate LiveIntervals 1027 /// for each connected component. LIV must have an empty LiveInterval for 1028 /// each additional connected component. The first connected component is 1029 /// left in \p LI. 1030 LLVM_ABI void Distribute(LiveInterval &LI, LiveInterval *LIV[], 1031 MachineRegisterInfo &MRI); 1032 }; 1033 1034 } // end namespace llvm 1035 1036 #endif // LLVM_CODEGEN_LIVEINTERVAL_H 1037