1 //===- InstrRefBasedImpl.cpp - Tracking Debug Value MIs -------------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 /// \file InstrRefBasedImpl.cpp 9 /// 10 /// This is a separate implementation of LiveDebugValues, see 11 /// LiveDebugValues.cpp and VarLocBasedImpl.cpp for more information. 12 /// 13 /// This pass propagates variable locations between basic blocks, resolving 14 /// control flow conflicts between them. The problem is SSA construction, where 15 /// each debug instruction assigns the *value* that a variable has, and every 16 /// instruction where the variable is in scope uses that variable. The resulting 17 /// map of instruction-to-value is then translated into a register (or spill) 18 /// location for each variable over each instruction. 19 /// 20 /// The primary difference from normal SSA construction is that we cannot 21 /// _create_ PHI values that contain variable values. CodeGen has already 22 /// completed, and we can't alter it just to make debug-info complete. Thus: 23 /// we can identify function positions where we would like a PHI value for a 24 /// variable, but must search the MachineFunction to see whether such a PHI is 25 /// available. If no such PHI exists, the variable location must be dropped. 26 /// 27 /// To achieve this, we perform two kinds of analysis. First, we identify 28 /// every value defined by every instruction (ignoring those that only move 29 /// another value), then re-compute an SSA-form representation of the 30 /// MachineFunction, using value propagation to eliminate any un-necessary 31 /// PHI values. This gives us a map of every value computed in the function, 32 /// and its location within the register file / stack. 33 /// 34 /// Secondly, for each variable we perform the same analysis, where each debug 35 /// instruction is considered a def, and every instruction where the variable 36 /// is in lexical scope as a use. Value propagation is used again to eliminate 37 /// any un-necessary PHIs. This gives us a map of each variable to the value 38 /// it should have in a block. 39 /// 40 /// Once both are complete, we have two maps for each block: 41 /// * Variables to the values they should have, 42 /// * Values to the register / spill slot they are located in. 43 /// After which we can marry-up variable values with a location, and emit 44 /// DBG_VALUE instructions specifying those locations. Variable locations may 45 /// be dropped in this process due to the desired variable value not being 46 /// resident in any machine location, or because there is no PHI value in any 47 /// location that accurately represents the desired value. The building of 48 /// location lists for each block is left to DbgEntityHistoryCalculator. 49 /// 50 /// This pass is kept efficient because the size of the first SSA problem 51 /// is proportional to the working-set size of the function, which the compiler 52 /// tries to keep small. (It's also proportional to the number of blocks). 53 /// Additionally, we repeatedly perform the second SSA problem analysis with 54 /// only the variables and blocks in a single lexical scope, exploiting their 55 /// locality. 56 /// 57 /// ### Terminology 58 /// 59 /// A machine location is a register or spill slot, a value is something that's 60 /// defined by an instruction or PHI node, while a variable value is the value 61 /// assigned to a variable. A variable location is a machine location, that must 62 /// contain the appropriate variable value. A value that is a PHI node is 63 /// occasionally called an mphi. 64 /// 65 /// The first SSA problem is the "machine value location" problem, 66 /// because we're determining which machine locations contain which values. 67 /// The "locations" are constant: what's unknown is what value they contain. 68 /// 69 /// The second SSA problem (the one for variables) is the "variable value 70 /// problem", because it's determining what values a variable has, rather than 71 /// what location those values are placed in. 72 /// 73 /// TODO: 74 /// Overlapping fragments 75 /// Entry values 76 /// Add back DEBUG statements for debugging this 77 /// Collect statistics 78 /// 79 //===----------------------------------------------------------------------===// 80 81 #include "llvm/ADT/DenseMap.h" 82 #include "llvm/ADT/PostOrderIterator.h" 83 #include "llvm/ADT/STLExtras.h" 84 #include "llvm/ADT/SmallPtrSet.h" 85 #include "llvm/ADT/SmallSet.h" 86 #include "llvm/ADT/SmallVector.h" 87 #include "llvm/BinaryFormat/Dwarf.h" 88 #include "llvm/CodeGen/LexicalScopes.h" 89 #include "llvm/CodeGen/MachineBasicBlock.h" 90 #include "llvm/CodeGen/MachineDominators.h" 91 #include "llvm/CodeGen/MachineFrameInfo.h" 92 #include "llvm/CodeGen/MachineFunction.h" 93 #include "llvm/CodeGen/MachineInstr.h" 94 #include "llvm/CodeGen/MachineInstrBuilder.h" 95 #include "llvm/CodeGen/MachineInstrBundle.h" 96 #include "llvm/CodeGen/MachineMemOperand.h" 97 #include "llvm/CodeGen/MachineOperand.h" 98 #include "llvm/CodeGen/PseudoSourceValue.h" 99 #include "llvm/CodeGen/TargetFrameLowering.h" 100 #include "llvm/CodeGen/TargetInstrInfo.h" 101 #include "llvm/CodeGen/TargetLowering.h" 102 #include "llvm/CodeGen/TargetPassConfig.h" 103 #include "llvm/CodeGen/TargetRegisterInfo.h" 104 #include "llvm/CodeGen/TargetSubtargetInfo.h" 105 #include "llvm/Config/llvm-config.h" 106 #include "llvm/IR/DebugInfoMetadata.h" 107 #include "llvm/IR/DebugLoc.h" 108 #include "llvm/IR/Function.h" 109 #include "llvm/MC/MCRegisterInfo.h" 110 #include "llvm/Support/Casting.h" 111 #include "llvm/Support/Compiler.h" 112 #include "llvm/Support/Debug.h" 113 #include "llvm/Support/GenericIteratedDominanceFrontier.h" 114 #include "llvm/Support/TypeSize.h" 115 #include "llvm/Support/raw_ostream.h" 116 #include "llvm/Target/TargetMachine.h" 117 #include "llvm/Transforms/Utils/SSAUpdaterImpl.h" 118 #include <algorithm> 119 #include <cassert> 120 #include <climits> 121 #include <cstdint> 122 #include <functional> 123 #include <queue> 124 #include <tuple> 125 #include <utility> 126 #include <vector> 127 128 #include "InstrRefBasedImpl.h" 129 #include "LiveDebugValues.h" 130 #include <optional> 131 132 using namespace llvm; 133 using namespace LiveDebugValues; 134 135 // SSAUpdaterImple sets DEBUG_TYPE, change it. 136 #undef DEBUG_TYPE 137 #define DEBUG_TYPE "livedebugvalues" 138 139 // Act more like the VarLoc implementation, by propagating some locations too 140 // far and ignoring some transfers. 141 static cl::opt<bool> EmulateOldLDV("emulate-old-livedebugvalues", cl::Hidden, 142 cl::desc("Act like old LiveDebugValues did"), 143 cl::init(false)); 144 145 // Limit for the maximum number of stack slots we should track, past which we 146 // will ignore any spills. InstrRefBasedLDV gathers detailed information on all 147 // stack slots which leads to high memory consumption, and in some scenarios 148 // (such as asan with very many locals) the working set of the function can be 149 // very large, causing many spills. In these scenarios, it is very unlikely that 150 // the developer has hundreds of variables live at the same time that they're 151 // carefully thinking about -- instead, they probably autogenerated the code. 152 // When this happens, gracefully stop tracking excess spill slots, rather than 153 // consuming all the developer's memory. 154 static cl::opt<unsigned> 155 StackWorkingSetLimit("livedebugvalues-max-stack-slots", cl::Hidden, 156 cl::desc("livedebugvalues-stack-ws-limit"), 157 cl::init(250)); 158 159 DbgOpID DbgOpID::UndefID = DbgOpID(0xffffffff); 160 161 /// Tracker for converting machine value locations and variable values into 162 /// variable locations (the output of LiveDebugValues), recorded as DBG_VALUEs 163 /// specifying block live-in locations and transfers within blocks. 164 /// 165 /// Operating on a per-block basis, this class takes a (pre-loaded) MLocTracker 166 /// and must be initialized with the set of variable values that are live-in to 167 /// the block. The caller then repeatedly calls process(). TransferTracker picks 168 /// out variable locations for the live-in variable values (if there _is_ a 169 /// location) and creates the corresponding DBG_VALUEs. Then, as the block is 170 /// stepped through, transfers of values between machine locations are 171 /// identified and if profitable, a DBG_VALUE created. 172 /// 173 /// This is where debug use-before-defs would be resolved: a variable with an 174 /// unavailable value could materialize in the middle of a block, when the 175 /// value becomes available. Or, we could detect clobbers and re-specify the 176 /// variable in a backup location. (XXX these are unimplemented). 177 class TransferTracker { 178 public: 179 const TargetInstrInfo *TII; 180 const TargetLowering *TLI; 181 /// This machine location tracker is assumed to always contain the up-to-date 182 /// value mapping for all machine locations. TransferTracker only reads 183 /// information from it. (XXX make it const?) 184 MLocTracker *MTracker; 185 MachineFunction &MF; 186 bool ShouldEmitDebugEntryValues; 187 188 /// Record of all changes in variable locations at a block position. Awkwardly 189 /// we allow inserting either before or after the point: MBB != nullptr 190 /// indicates it's before, otherwise after. 191 struct Transfer { 192 MachineBasicBlock::instr_iterator Pos; /// Position to insert DBG_VALUes 193 MachineBasicBlock *MBB; /// non-null if we should insert after. 194 SmallVector<MachineInstr *, 4> Insts; /// Vector of DBG_VALUEs to insert. 195 }; 196 197 /// Stores the resolved operands (machine locations and constants) and 198 /// qualifying meta-information needed to construct a concrete DBG_VALUE-like 199 /// instruction. 200 struct ResolvedDbgValue { 201 SmallVector<ResolvedDbgOp> Ops; 202 DbgValueProperties Properties; 203 204 ResolvedDbgValue(SmallVectorImpl<ResolvedDbgOp> &Ops, 205 DbgValueProperties Properties) 206 : Ops(Ops.begin(), Ops.end()), Properties(Properties) {} 207 208 /// Returns all the LocIdx values used in this struct, in the order in which 209 /// they appear as operands in the debug value; may contain duplicates. 210 auto loc_indices() const { 211 return map_range( 212 make_filter_range( 213 Ops, [](const ResolvedDbgOp &Op) { return !Op.IsConst; }), 214 [](const ResolvedDbgOp &Op) { return Op.Loc; }); 215 } 216 }; 217 218 /// Collection of transfers (DBG_VALUEs) to be inserted. 219 SmallVector<Transfer, 32> Transfers; 220 221 /// Local cache of what-value-is-in-what-LocIdx. Used to identify differences 222 /// between TransferTrackers view of variable locations and MLocTrackers. For 223 /// example, MLocTracker observes all clobbers, but TransferTracker lazily 224 /// does not. 225 SmallVector<ValueIDNum, 32> VarLocs; 226 227 /// Map from LocIdxes to which DebugVariables are based that location. 228 /// Mantained while stepping through the block. Not accurate if 229 /// VarLocs[Idx] != MTracker->LocIdxToIDNum[Idx]. 230 DenseMap<LocIdx, SmallSet<DebugVariable, 4>> ActiveMLocs; 231 232 /// Map from DebugVariable to it's current location and qualifying meta 233 /// information. To be used in conjunction with ActiveMLocs to construct 234 /// enough information for the DBG_VALUEs for a particular LocIdx. 235 DenseMap<DebugVariable, ResolvedDbgValue> ActiveVLocs; 236 237 /// Temporary cache of DBG_VALUEs to be entered into the Transfers collection. 238 SmallVector<MachineInstr *, 4> PendingDbgValues; 239 240 /// Record of a use-before-def: created when a value that's live-in to the 241 /// current block isn't available in any machine location, but it will be 242 /// defined in this block. 243 struct UseBeforeDef { 244 /// Value of this variable, def'd in block. 245 SmallVector<DbgOp> Values; 246 /// Identity of this variable. 247 DebugVariable Var; 248 /// Additional variable properties. 249 DbgValueProperties Properties; 250 UseBeforeDef(ArrayRef<DbgOp> Values, const DebugVariable &Var, 251 const DbgValueProperties &Properties) 252 : Values(Values.begin(), Values.end()), Var(Var), 253 Properties(Properties) {} 254 }; 255 256 /// Map from instruction index (within the block) to the set of UseBeforeDefs 257 /// that become defined at that instruction. 258 DenseMap<unsigned, SmallVector<UseBeforeDef, 1>> UseBeforeDefs; 259 260 /// The set of variables that are in UseBeforeDefs and can become a location 261 /// once the relevant value is defined. An element being erased from this 262 /// collection prevents the use-before-def materializing. 263 DenseSet<DebugVariable> UseBeforeDefVariables; 264 265 const TargetRegisterInfo &TRI; 266 const BitVector &CalleeSavedRegs; 267 268 TransferTracker(const TargetInstrInfo *TII, MLocTracker *MTracker, 269 MachineFunction &MF, const TargetRegisterInfo &TRI, 270 const BitVector &CalleeSavedRegs, const TargetPassConfig &TPC) 271 : TII(TII), MTracker(MTracker), MF(MF), TRI(TRI), 272 CalleeSavedRegs(CalleeSavedRegs) { 273 TLI = MF.getSubtarget().getTargetLowering(); 274 auto &TM = TPC.getTM<TargetMachine>(); 275 ShouldEmitDebugEntryValues = TM.Options.ShouldEmitDebugEntryValues(); 276 } 277 278 bool isCalleeSaved(LocIdx L) const { 279 unsigned Reg = MTracker->LocIdxToLocID[L]; 280 if (Reg >= MTracker->NumRegs) 281 return false; 282 for (MCRegAliasIterator RAI(Reg, &TRI, true); RAI.isValid(); ++RAI) 283 if (CalleeSavedRegs.test(*RAI)) 284 return true; 285 return false; 286 }; 287 288 // An estimate of the expected lifespan of values at a machine location, with 289 // a greater value corresponding to a longer expected lifespan, i.e. spill 290 // slots generally live longer than callee-saved registers which generally 291 // live longer than non-callee-saved registers. The minimum value of 0 292 // corresponds to an illegal location that cannot have a "lifespan" at all. 293 enum class LocationQuality : unsigned char { 294 Illegal = 0, 295 Register, 296 CalleeSavedRegister, 297 SpillSlot, 298 Best = SpillSlot 299 }; 300 301 class LocationAndQuality { 302 unsigned Location : 24; 303 unsigned Quality : 8; 304 305 public: 306 LocationAndQuality() : Location(0), Quality(0) {} 307 LocationAndQuality(LocIdx L, LocationQuality Q) 308 : Location(L.asU64()), Quality(static_cast<unsigned>(Q)) {} 309 LocIdx getLoc() const { 310 if (!Quality) 311 return LocIdx::MakeIllegalLoc(); 312 return LocIdx(Location); 313 } 314 LocationQuality getQuality() const { return LocationQuality(Quality); } 315 bool isIllegal() const { return !Quality; } 316 bool isBest() const { return getQuality() == LocationQuality::Best; } 317 }; 318 319 // Returns the LocationQuality for the location L iff the quality of L is 320 // is strictly greater than the provided minimum quality. 321 std::optional<LocationQuality> 322 getLocQualityIfBetter(LocIdx L, LocationQuality Min) const { 323 if (L.isIllegal()) 324 return std::nullopt; 325 if (Min >= LocationQuality::SpillSlot) 326 return std::nullopt; 327 if (MTracker->isSpill(L)) 328 return LocationQuality::SpillSlot; 329 if (Min >= LocationQuality::CalleeSavedRegister) 330 return std::nullopt; 331 if (isCalleeSaved(L)) 332 return LocationQuality::CalleeSavedRegister; 333 if (Min >= LocationQuality::Register) 334 return std::nullopt; 335 return LocationQuality::Register; 336 } 337 338 /// For a variable \p Var with the live-in value \p Value, attempts to resolve 339 /// the DbgValue to a concrete DBG_VALUE, emitting that value and loading the 340 /// tracking information to track Var throughout the block. 341 /// \p ValueToLoc is a map containing the best known location for every 342 /// ValueIDNum that Value may use. 343 /// \p MBB is the basic block that we are loading the live-in value for. 344 /// \p DbgOpStore is the map containing the DbgOpID->DbgOp mapping needed to 345 /// determine the values used by Value. 346 void loadVarInloc(MachineBasicBlock &MBB, DbgOpIDMap &DbgOpStore, 347 const DenseMap<ValueIDNum, LocationAndQuality> &ValueToLoc, 348 DebugVariable Var, DbgValue Value) { 349 SmallVector<DbgOp> DbgOps; 350 SmallVector<ResolvedDbgOp> ResolvedDbgOps; 351 bool IsValueValid = true; 352 unsigned LastUseBeforeDef = 0; 353 354 // If every value used by the incoming DbgValue is available at block 355 // entry, ResolvedDbgOps will contain the machine locations/constants for 356 // those values and will be used to emit a debug location. 357 // If one or more values are not yet available, but will all be defined in 358 // this block, then LastUseBeforeDef will track the instruction index in 359 // this BB at which the last of those values is defined, DbgOps will 360 // contain the values that we will emit when we reach that instruction. 361 // If one or more values are undef or not available throughout this block, 362 // and we can't recover as an entry value, we set IsValueValid=false and 363 // skip this variable. 364 for (DbgOpID ID : Value.getDbgOpIDs()) { 365 DbgOp Op = DbgOpStore.find(ID); 366 DbgOps.push_back(Op); 367 if (ID.isUndef()) { 368 IsValueValid = false; 369 break; 370 } 371 if (ID.isConst()) { 372 ResolvedDbgOps.push_back(Op.MO); 373 continue; 374 } 375 376 // If the value has no location, we can't make a variable location. 377 const ValueIDNum &Num = Op.ID; 378 auto ValuesPreferredLoc = ValueToLoc.find(Num); 379 if (ValuesPreferredLoc->second.isIllegal()) { 380 // If it's a def that occurs in this block, register it as a 381 // use-before-def to be resolved as we step through the block. 382 // Continue processing values so that we add any other UseBeforeDef 383 // entries needed for later. 384 if (Num.getBlock() == (unsigned)MBB.getNumber() && !Num.isPHI()) { 385 LastUseBeforeDef = std::max(LastUseBeforeDef, 386 static_cast<unsigned>(Num.getInst())); 387 continue; 388 } 389 recoverAsEntryValue(Var, Value.Properties, Num); 390 IsValueValid = false; 391 break; 392 } 393 394 // Defer modifying ActiveVLocs until after we've confirmed we have a 395 // live range. 396 LocIdx M = ValuesPreferredLoc->second.getLoc(); 397 ResolvedDbgOps.push_back(M); 398 } 399 400 // If we cannot produce a valid value for the LiveIn value within this 401 // block, skip this variable. 402 if (!IsValueValid) 403 return; 404 405 // Add UseBeforeDef entry for the last value to be defined in this block. 406 if (LastUseBeforeDef) { 407 addUseBeforeDef(Var, Value.Properties, DbgOps, 408 LastUseBeforeDef); 409 return; 410 } 411 412 // The LiveIn value is available at block entry, begin tracking and record 413 // the transfer. 414 for (const ResolvedDbgOp &Op : ResolvedDbgOps) 415 if (!Op.IsConst) 416 ActiveMLocs[Op.Loc].insert(Var); 417 auto NewValue = ResolvedDbgValue{ResolvedDbgOps, Value.Properties}; 418 auto Result = ActiveVLocs.insert(std::make_pair(Var, NewValue)); 419 if (!Result.second) 420 Result.first->second = NewValue; 421 PendingDbgValues.push_back( 422 MTracker->emitLoc(ResolvedDbgOps, Var, Value.Properties)); 423 } 424 425 /// Load object with live-in variable values. \p mlocs contains the live-in 426 /// values in each machine location, while \p vlocs the live-in variable 427 /// values. This method picks variable locations for the live-in variables, 428 /// creates DBG_VALUEs and puts them in #Transfers, then prepares the other 429 /// object fields to track variable locations as we step through the block. 430 /// FIXME: could just examine mloctracker instead of passing in \p mlocs? 431 void 432 loadInlocs(MachineBasicBlock &MBB, ValueTable &MLocs, DbgOpIDMap &DbgOpStore, 433 const SmallVectorImpl<std::pair<DebugVariable, DbgValue>> &VLocs, 434 unsigned NumLocs) { 435 ActiveMLocs.clear(); 436 ActiveVLocs.clear(); 437 VarLocs.clear(); 438 VarLocs.reserve(NumLocs); 439 UseBeforeDefs.clear(); 440 UseBeforeDefVariables.clear(); 441 442 // Map of the preferred location for each value. 443 DenseMap<ValueIDNum, LocationAndQuality> ValueToLoc; 444 445 // Initialized the preferred-location map with illegal locations, to be 446 // filled in later. 447 for (const auto &VLoc : VLocs) 448 if (VLoc.second.Kind == DbgValue::Def) 449 for (DbgOpID OpID : VLoc.second.getDbgOpIDs()) 450 if (!OpID.ID.IsConst) 451 ValueToLoc.insert({DbgOpStore.find(OpID).ID, LocationAndQuality()}); 452 453 ActiveMLocs.reserve(VLocs.size()); 454 ActiveVLocs.reserve(VLocs.size()); 455 456 // Produce a map of value numbers to the current machine locs they live 457 // in. When emulating VarLocBasedImpl, there should only be one 458 // location; when not, we get to pick. 459 for (auto Location : MTracker->locations()) { 460 LocIdx Idx = Location.Idx; 461 ValueIDNum &VNum = MLocs[Idx.asU64()]; 462 if (VNum == ValueIDNum::EmptyValue) 463 continue; 464 VarLocs.push_back(VNum); 465 466 // Is there a variable that wants a location for this value? If not, skip. 467 auto VIt = ValueToLoc.find(VNum); 468 if (VIt == ValueToLoc.end()) 469 continue; 470 471 auto &Previous = VIt->second; 472 // If this is the first location with that value, pick it. Otherwise, 473 // consider whether it's a "longer term" location. 474 std::optional<LocationQuality> ReplacementQuality = 475 getLocQualityIfBetter(Idx, Previous.getQuality()); 476 if (ReplacementQuality) 477 Previous = LocationAndQuality(Idx, *ReplacementQuality); 478 } 479 480 // Now map variables to their picked LocIdxes. 481 for (const auto &Var : VLocs) { 482 loadVarInloc(MBB, DbgOpStore, ValueToLoc, Var.first, Var.second); 483 } 484 flushDbgValues(MBB.begin(), &MBB); 485 } 486 487 /// Record that \p Var has value \p ID, a value that becomes available 488 /// later in the function. 489 void addUseBeforeDef(const DebugVariable &Var, 490 const DbgValueProperties &Properties, 491 const SmallVectorImpl<DbgOp> &DbgOps, unsigned Inst) { 492 UseBeforeDefs[Inst].emplace_back(DbgOps, Var, Properties); 493 UseBeforeDefVariables.insert(Var); 494 } 495 496 /// After the instruction at index \p Inst and position \p pos has been 497 /// processed, check whether it defines a variable value in a use-before-def. 498 /// If so, and the variable value hasn't changed since the start of the 499 /// block, create a DBG_VALUE. 500 void checkInstForNewValues(unsigned Inst, MachineBasicBlock::iterator pos) { 501 auto MIt = UseBeforeDefs.find(Inst); 502 if (MIt == UseBeforeDefs.end()) 503 return; 504 505 // Map of values to the locations that store them for every value used by 506 // the variables that may have become available. 507 SmallDenseMap<ValueIDNum, LocationAndQuality> ValueToLoc; 508 509 // Populate ValueToLoc with illegal default mappings for every value used by 510 // any UseBeforeDef variables for this instruction. 511 for (auto &Use : MIt->second) { 512 if (!UseBeforeDefVariables.count(Use.Var)) 513 continue; 514 515 for (DbgOp &Op : Use.Values) { 516 assert(!Op.isUndef() && "UseBeforeDef erroneously created for a " 517 "DbgValue with undef values."); 518 if (Op.IsConst) 519 continue; 520 521 ValueToLoc.insert({Op.ID, LocationAndQuality()}); 522 } 523 } 524 525 // Exit early if we have no DbgValues to produce. 526 if (ValueToLoc.empty()) 527 return; 528 529 // Determine the best location for each desired value. 530 for (auto Location : MTracker->locations()) { 531 LocIdx Idx = Location.Idx; 532 ValueIDNum &LocValueID = Location.Value; 533 534 // Is there a variable that wants a location for this value? If not, skip. 535 auto VIt = ValueToLoc.find(LocValueID); 536 if (VIt == ValueToLoc.end()) 537 continue; 538 539 auto &Previous = VIt->second; 540 // If this is the first location with that value, pick it. Otherwise, 541 // consider whether it's a "longer term" location. 542 std::optional<LocationQuality> ReplacementQuality = 543 getLocQualityIfBetter(Idx, Previous.getQuality()); 544 if (ReplacementQuality) 545 Previous = LocationAndQuality(Idx, *ReplacementQuality); 546 } 547 548 // Using the map of values to locations, produce a final set of values for 549 // this variable. 550 for (auto &Use : MIt->second) { 551 if (!UseBeforeDefVariables.count(Use.Var)) 552 continue; 553 554 SmallVector<ResolvedDbgOp> DbgOps; 555 556 for (DbgOp &Op : Use.Values) { 557 if (Op.IsConst) { 558 DbgOps.push_back(Op.MO); 559 continue; 560 } 561 LocIdx NewLoc = ValueToLoc.find(Op.ID)->second.getLoc(); 562 if (NewLoc.isIllegal()) 563 break; 564 DbgOps.push_back(NewLoc); 565 } 566 567 // If at least one value used by this debug value is no longer available, 568 // i.e. one of the values was killed before we finished defining all of 569 // the values used by this variable, discard. 570 if (DbgOps.size() != Use.Values.size()) 571 continue; 572 573 // Otherwise, we're good to go. 574 PendingDbgValues.push_back( 575 MTracker->emitLoc(DbgOps, Use.Var, Use.Properties)); 576 } 577 flushDbgValues(pos, nullptr); 578 } 579 580 /// Helper to move created DBG_VALUEs into Transfers collection. 581 void flushDbgValues(MachineBasicBlock::iterator Pos, MachineBasicBlock *MBB) { 582 if (PendingDbgValues.size() == 0) 583 return; 584 585 // Pick out the instruction start position. 586 MachineBasicBlock::instr_iterator BundleStart; 587 if (MBB && Pos == MBB->begin()) 588 BundleStart = MBB->instr_begin(); 589 else 590 BundleStart = getBundleStart(Pos->getIterator()); 591 592 Transfers.push_back({BundleStart, MBB, PendingDbgValues}); 593 PendingDbgValues.clear(); 594 } 595 596 bool isEntryValueVariable(const DebugVariable &Var, 597 const DIExpression *Expr) const { 598 if (!Var.getVariable()->isParameter()) 599 return false; 600 601 if (Var.getInlinedAt()) 602 return false; 603 604 if (Expr->getNumElements() > 0 && !Expr->isDeref()) 605 return false; 606 607 return true; 608 } 609 610 bool isEntryValueValue(const ValueIDNum &Val) const { 611 // Must be in entry block (block number zero), and be a PHI / live-in value. 612 if (Val.getBlock() || !Val.isPHI()) 613 return false; 614 615 // Entry values must enter in a register. 616 if (MTracker->isSpill(Val.getLoc())) 617 return false; 618 619 Register SP = TLI->getStackPointerRegisterToSaveRestore(); 620 Register FP = TRI.getFrameRegister(MF); 621 Register Reg = MTracker->LocIdxToLocID[Val.getLoc()]; 622 return Reg != SP && Reg != FP; 623 } 624 625 bool recoverAsEntryValue(const DebugVariable &Var, 626 const DbgValueProperties &Prop, 627 const ValueIDNum &Num) { 628 // Is this variable location a candidate to be an entry value. First, 629 // should we be trying this at all? 630 if (!ShouldEmitDebugEntryValues) 631 return false; 632 633 const DIExpression *DIExpr = Prop.DIExpr; 634 635 // We don't currently emit entry values for DBG_VALUE_LISTs. 636 if (Prop.IsVariadic) { 637 // If this debug value can be converted to be non-variadic, then do so; 638 // otherwise give up. 639 auto NonVariadicExpression = 640 DIExpression::convertToNonVariadicExpression(DIExpr); 641 if (!NonVariadicExpression) 642 return false; 643 DIExpr = *NonVariadicExpression; 644 } 645 646 // Is the variable appropriate for entry values (i.e., is a parameter). 647 if (!isEntryValueVariable(Var, DIExpr)) 648 return false; 649 650 // Is the value assigned to this variable still the entry value? 651 if (!isEntryValueValue(Num)) 652 return false; 653 654 // Emit a variable location using an entry value expression. 655 DIExpression *NewExpr = 656 DIExpression::prepend(DIExpr, DIExpression::EntryValue); 657 Register Reg = MTracker->LocIdxToLocID[Num.getLoc()]; 658 MachineOperand MO = MachineOperand::CreateReg(Reg, false); 659 660 PendingDbgValues.push_back( 661 emitMOLoc(MO, Var, {NewExpr, Prop.Indirect, false})); 662 return true; 663 } 664 665 /// Change a variable value after encountering a DBG_VALUE inside a block. 666 void redefVar(const MachineInstr &MI) { 667 DebugVariable Var(MI.getDebugVariable(), MI.getDebugExpression(), 668 MI.getDebugLoc()->getInlinedAt()); 669 DbgValueProperties Properties(MI); 670 671 // Ignore non-register locations, we don't transfer those. 672 if (MI.isUndefDebugValue() || 673 all_of(MI.debug_operands(), 674 [](const MachineOperand &MO) { return !MO.isReg(); })) { 675 auto It = ActiveVLocs.find(Var); 676 if (It != ActiveVLocs.end()) { 677 for (LocIdx Loc : It->second.loc_indices()) 678 ActiveMLocs[Loc].erase(Var); 679 ActiveVLocs.erase(It); 680 } 681 // Any use-before-defs no longer apply. 682 UseBeforeDefVariables.erase(Var); 683 return; 684 } 685 686 SmallVector<ResolvedDbgOp> NewLocs; 687 for (const MachineOperand &MO : MI.debug_operands()) { 688 if (MO.isReg()) { 689 // Any undef regs have already been filtered out above. 690 Register Reg = MO.getReg(); 691 LocIdx NewLoc = MTracker->getRegMLoc(Reg); 692 NewLocs.push_back(NewLoc); 693 } else { 694 NewLocs.push_back(MO); 695 } 696 } 697 698 redefVar(MI, Properties, NewLocs); 699 } 700 701 /// Handle a change in variable location within a block. Terminate the 702 /// variables current location, and record the value it now refers to, so 703 /// that we can detect location transfers later on. 704 void redefVar(const MachineInstr &MI, const DbgValueProperties &Properties, 705 SmallVectorImpl<ResolvedDbgOp> &NewLocs) { 706 DebugVariable Var(MI.getDebugVariable(), MI.getDebugExpression(), 707 MI.getDebugLoc()->getInlinedAt()); 708 // Any use-before-defs no longer apply. 709 UseBeforeDefVariables.erase(Var); 710 711 // Erase any previous location. 712 auto It = ActiveVLocs.find(Var); 713 if (It != ActiveVLocs.end()) { 714 for (LocIdx Loc : It->second.loc_indices()) 715 ActiveMLocs[Loc].erase(Var); 716 } 717 718 // If there _is_ no new location, all we had to do was erase. 719 if (NewLocs.empty()) { 720 if (It != ActiveVLocs.end()) 721 ActiveVLocs.erase(It); 722 return; 723 } 724 725 SmallVector<std::pair<LocIdx, DebugVariable>> LostMLocs; 726 for (ResolvedDbgOp &Op : NewLocs) { 727 if (Op.IsConst) 728 continue; 729 730 LocIdx NewLoc = Op.Loc; 731 732 // Check whether our local copy of values-by-location in #VarLocs is out 733 // of date. Wipe old tracking data for the location if it's been clobbered 734 // in the meantime. 735 if (MTracker->readMLoc(NewLoc) != VarLocs[NewLoc.asU64()]) { 736 for (const auto &P : ActiveMLocs[NewLoc]) { 737 auto LostVLocIt = ActiveVLocs.find(P); 738 if (LostVLocIt != ActiveVLocs.end()) { 739 for (LocIdx Loc : LostVLocIt->second.loc_indices()) { 740 // Every active variable mapping for NewLoc will be cleared, no 741 // need to track individual variables. 742 if (Loc == NewLoc) 743 continue; 744 LostMLocs.emplace_back(Loc, P); 745 } 746 } 747 ActiveVLocs.erase(P); 748 } 749 for (const auto &LostMLoc : LostMLocs) 750 ActiveMLocs[LostMLoc.first].erase(LostMLoc.second); 751 LostMLocs.clear(); 752 It = ActiveVLocs.find(Var); 753 ActiveMLocs[NewLoc.asU64()].clear(); 754 VarLocs[NewLoc.asU64()] = MTracker->readMLoc(NewLoc); 755 } 756 757 ActiveMLocs[NewLoc].insert(Var); 758 } 759 760 if (It == ActiveVLocs.end()) { 761 ActiveVLocs.insert( 762 std::make_pair(Var, ResolvedDbgValue(NewLocs, Properties))); 763 } else { 764 It->second.Ops.assign(NewLocs); 765 It->second.Properties = Properties; 766 } 767 } 768 769 /// Account for a location \p mloc being clobbered. Examine the variable 770 /// locations that will be terminated: and try to recover them by using 771 /// another location. Optionally, given \p MakeUndef, emit a DBG_VALUE to 772 /// explicitly terminate a location if it can't be recovered. 773 void clobberMloc(LocIdx MLoc, MachineBasicBlock::iterator Pos, 774 bool MakeUndef = true) { 775 auto ActiveMLocIt = ActiveMLocs.find(MLoc); 776 if (ActiveMLocIt == ActiveMLocs.end()) 777 return; 778 779 // What was the old variable value? 780 ValueIDNum OldValue = VarLocs[MLoc.asU64()]; 781 clobberMloc(MLoc, OldValue, Pos, MakeUndef); 782 } 783 /// Overload that takes an explicit value \p OldValue for when the value in 784 /// \p MLoc has changed and the TransferTracker's locations have not been 785 /// updated yet. 786 void clobberMloc(LocIdx MLoc, ValueIDNum OldValue, 787 MachineBasicBlock::iterator Pos, bool MakeUndef = true) { 788 auto ActiveMLocIt = ActiveMLocs.find(MLoc); 789 if (ActiveMLocIt == ActiveMLocs.end()) 790 return; 791 792 VarLocs[MLoc.asU64()] = ValueIDNum::EmptyValue; 793 794 // Examine the remaining variable locations: if we can find the same value 795 // again, we can recover the location. 796 std::optional<LocIdx> NewLoc; 797 for (auto Loc : MTracker->locations()) 798 if (Loc.Value == OldValue) 799 NewLoc = Loc.Idx; 800 801 // If there is no location, and we weren't asked to make the variable 802 // explicitly undef, then stop here. 803 if (!NewLoc && !MakeUndef) { 804 // Try and recover a few more locations with entry values. 805 for (const auto &Var : ActiveMLocIt->second) { 806 auto &Prop = ActiveVLocs.find(Var)->second.Properties; 807 recoverAsEntryValue(Var, Prop, OldValue); 808 } 809 flushDbgValues(Pos, nullptr); 810 return; 811 } 812 813 // Examine all the variables based on this location. 814 DenseSet<DebugVariable> NewMLocs; 815 // If no new location has been found, every variable that depends on this 816 // MLoc is dead, so end their existing MLoc->Var mappings as well. 817 SmallVector<std::pair<LocIdx, DebugVariable>> LostMLocs; 818 for (const auto &Var : ActiveMLocIt->second) { 819 auto ActiveVLocIt = ActiveVLocs.find(Var); 820 // Re-state the variable location: if there's no replacement then NewLoc 821 // is std::nullopt and a $noreg DBG_VALUE will be created. Otherwise, a 822 // DBG_VALUE identifying the alternative location will be emitted. 823 const DbgValueProperties &Properties = ActiveVLocIt->second.Properties; 824 825 // Produce the new list of debug ops - an empty list if no new location 826 // was found, or the existing list with the substitution MLoc -> NewLoc 827 // otherwise. 828 SmallVector<ResolvedDbgOp> DbgOps; 829 if (NewLoc) { 830 ResolvedDbgOp OldOp(MLoc); 831 ResolvedDbgOp NewOp(*NewLoc); 832 // Insert illegal ops to overwrite afterwards. 833 DbgOps.insert(DbgOps.begin(), ActiveVLocIt->second.Ops.size(), 834 ResolvedDbgOp(LocIdx::MakeIllegalLoc())); 835 replace_copy(ActiveVLocIt->second.Ops, DbgOps.begin(), OldOp, NewOp); 836 } 837 838 PendingDbgValues.push_back(MTracker->emitLoc(DbgOps, Var, Properties)); 839 840 // Update machine locations <=> variable locations maps. Defer updating 841 // ActiveMLocs to avoid invalidating the ActiveMLocIt iterator. 842 if (!NewLoc) { 843 for (LocIdx Loc : ActiveVLocIt->second.loc_indices()) { 844 if (Loc != MLoc) 845 LostMLocs.emplace_back(Loc, Var); 846 } 847 ActiveVLocs.erase(ActiveVLocIt); 848 } else { 849 ActiveVLocIt->second.Ops = DbgOps; 850 NewMLocs.insert(Var); 851 } 852 } 853 854 // Remove variables from ActiveMLocs if they no longer use any other MLocs 855 // due to being killed by this clobber. 856 for (auto &LocVarIt : LostMLocs) { 857 auto LostMLocIt = ActiveMLocs.find(LocVarIt.first); 858 assert(LostMLocIt != ActiveMLocs.end() && 859 "Variable was using this MLoc, but ActiveMLocs[MLoc] has no " 860 "entries?"); 861 LostMLocIt->second.erase(LocVarIt.second); 862 } 863 864 // We lazily track what locations have which values; if we've found a new 865 // location for the clobbered value, remember it. 866 if (NewLoc) 867 VarLocs[NewLoc->asU64()] = OldValue; 868 869 flushDbgValues(Pos, nullptr); 870 871 // Commit ActiveMLoc changes. 872 ActiveMLocIt->second.clear(); 873 if (!NewMLocs.empty()) 874 for (auto &Var : NewMLocs) 875 ActiveMLocs[*NewLoc].insert(Var); 876 } 877 878 /// Transfer variables based on \p Src to be based on \p Dst. This handles 879 /// both register copies as well as spills and restores. Creates DBG_VALUEs 880 /// describing the movement. 881 void transferMlocs(LocIdx Src, LocIdx Dst, MachineBasicBlock::iterator Pos) { 882 // Does Src still contain the value num we expect? If not, it's been 883 // clobbered in the meantime, and our variable locations are stale. 884 if (VarLocs[Src.asU64()] != MTracker->readMLoc(Src)) 885 return; 886 887 // assert(ActiveMLocs[Dst].size() == 0); 888 //^^^ Legitimate scenario on account of un-clobbered slot being assigned to? 889 890 // Move set of active variables from one location to another. 891 auto MovingVars = ActiveMLocs[Src]; 892 ActiveMLocs[Dst].insert(MovingVars.begin(), MovingVars.end()); 893 VarLocs[Dst.asU64()] = VarLocs[Src.asU64()]; 894 895 // For each variable based on Src; create a location at Dst. 896 ResolvedDbgOp SrcOp(Src); 897 ResolvedDbgOp DstOp(Dst); 898 for (const auto &Var : MovingVars) { 899 auto ActiveVLocIt = ActiveVLocs.find(Var); 900 assert(ActiveVLocIt != ActiveVLocs.end()); 901 902 // Update all instances of Src in the variable's tracked values to Dst. 903 std::replace(ActiveVLocIt->second.Ops.begin(), 904 ActiveVLocIt->second.Ops.end(), SrcOp, DstOp); 905 906 MachineInstr *MI = MTracker->emitLoc(ActiveVLocIt->second.Ops, Var, 907 ActiveVLocIt->second.Properties); 908 PendingDbgValues.push_back(MI); 909 } 910 ActiveMLocs[Src].clear(); 911 flushDbgValues(Pos, nullptr); 912 913 // XXX XXX XXX "pretend to be old LDV" means dropping all tracking data 914 // about the old location. 915 if (EmulateOldLDV) 916 VarLocs[Src.asU64()] = ValueIDNum::EmptyValue; 917 } 918 919 MachineInstrBuilder emitMOLoc(const MachineOperand &MO, 920 const DebugVariable &Var, 921 const DbgValueProperties &Properties) { 922 DebugLoc DL = DILocation::get(Var.getVariable()->getContext(), 0, 0, 923 Var.getVariable()->getScope(), 924 const_cast<DILocation *>(Var.getInlinedAt())); 925 auto MIB = BuildMI(MF, DL, TII->get(TargetOpcode::DBG_VALUE)); 926 MIB.add(MO); 927 if (Properties.Indirect) 928 MIB.addImm(0); 929 else 930 MIB.addReg(0); 931 MIB.addMetadata(Var.getVariable()); 932 MIB.addMetadata(Properties.DIExpr); 933 return MIB; 934 } 935 }; 936 937 //===----------------------------------------------------------------------===// 938 // Implementation 939 //===----------------------------------------------------------------------===// 940 941 ValueIDNum ValueIDNum::EmptyValue = {UINT_MAX, UINT_MAX, UINT_MAX}; 942 ValueIDNum ValueIDNum::TombstoneValue = {UINT_MAX, UINT_MAX, UINT_MAX - 1}; 943 944 #ifndef NDEBUG 945 void ResolvedDbgOp::dump(const MLocTracker *MTrack) const { 946 if (IsConst) { 947 dbgs() << MO; 948 } else { 949 dbgs() << MTrack->LocIdxToName(Loc); 950 } 951 } 952 void DbgOp::dump(const MLocTracker *MTrack) const { 953 if (IsConst) { 954 dbgs() << MO; 955 } else if (!isUndef()) { 956 dbgs() << MTrack->IDAsString(ID); 957 } 958 } 959 void DbgOpID::dump(const MLocTracker *MTrack, const DbgOpIDMap *OpStore) const { 960 if (!OpStore) { 961 dbgs() << "ID(" << asU32() << ")"; 962 } else { 963 OpStore->find(*this).dump(MTrack); 964 } 965 } 966 void DbgValue::dump(const MLocTracker *MTrack, 967 const DbgOpIDMap *OpStore) const { 968 if (Kind == NoVal) { 969 dbgs() << "NoVal(" << BlockNo << ")"; 970 } else if (Kind == VPHI || Kind == Def) { 971 if (Kind == VPHI) 972 dbgs() << "VPHI(" << BlockNo << ","; 973 else 974 dbgs() << "Def("; 975 for (unsigned Idx = 0; Idx < getDbgOpIDs().size(); ++Idx) { 976 getDbgOpID(Idx).dump(MTrack, OpStore); 977 if (Idx != 0) 978 dbgs() << ","; 979 } 980 dbgs() << ")"; 981 } 982 if (Properties.Indirect) 983 dbgs() << " indir"; 984 if (Properties.DIExpr) 985 dbgs() << " " << *Properties.DIExpr; 986 } 987 #endif 988 989 MLocTracker::MLocTracker(MachineFunction &MF, const TargetInstrInfo &TII, 990 const TargetRegisterInfo &TRI, 991 const TargetLowering &TLI) 992 : MF(MF), TII(TII), TRI(TRI), TLI(TLI), 993 LocIdxToIDNum(ValueIDNum::EmptyValue), LocIdxToLocID(0) { 994 NumRegs = TRI.getNumRegs(); 995 reset(); 996 LocIDToLocIdx.resize(NumRegs, LocIdx::MakeIllegalLoc()); 997 assert(NumRegs < (1u << NUM_LOC_BITS)); // Detect bit packing failure 998 999 // Always track SP. This avoids the implicit clobbering caused by regmasks 1000 // from affectings its values. (LiveDebugValues disbelieves calls and 1001 // regmasks that claim to clobber SP). 1002 Register SP = TLI.getStackPointerRegisterToSaveRestore(); 1003 if (SP) { 1004 unsigned ID = getLocID(SP); 1005 (void)lookupOrTrackRegister(ID); 1006 1007 for (MCRegAliasIterator RAI(SP, &TRI, true); RAI.isValid(); ++RAI) 1008 SPAliases.insert(*RAI); 1009 } 1010 1011 // Build some common stack positions -- full registers being spilt to the 1012 // stack. 1013 StackSlotIdxes.insert({{8, 0}, 0}); 1014 StackSlotIdxes.insert({{16, 0}, 1}); 1015 StackSlotIdxes.insert({{32, 0}, 2}); 1016 StackSlotIdxes.insert({{64, 0}, 3}); 1017 StackSlotIdxes.insert({{128, 0}, 4}); 1018 StackSlotIdxes.insert({{256, 0}, 5}); 1019 StackSlotIdxes.insert({{512, 0}, 6}); 1020 1021 // Traverse all the subregister idxes, and ensure there's an index for them. 1022 // Duplicates are no problem: we're interested in their position in the 1023 // stack slot, we don't want to type the slot. 1024 for (unsigned int I = 1; I < TRI.getNumSubRegIndices(); ++I) { 1025 unsigned Size = TRI.getSubRegIdxSize(I); 1026 unsigned Offs = TRI.getSubRegIdxOffset(I); 1027 unsigned Idx = StackSlotIdxes.size(); 1028 1029 // Some subregs have -1, -2 and so forth fed into their fields, to mean 1030 // special backend things. Ignore those. 1031 if (Size > 60000 || Offs > 60000) 1032 continue; 1033 1034 StackSlotIdxes.insert({{Size, Offs}, Idx}); 1035 } 1036 1037 // There may also be strange register class sizes (think x86 fp80s). 1038 for (const TargetRegisterClass *RC : TRI.regclasses()) { 1039 unsigned Size = TRI.getRegSizeInBits(*RC); 1040 1041 // We might see special reserved values as sizes, and classes for other 1042 // stuff the machine tries to model. If it's more than 512 bits, then it 1043 // is very unlikely to be a register than can be spilt. 1044 if (Size > 512) 1045 continue; 1046 1047 unsigned Idx = StackSlotIdxes.size(); 1048 StackSlotIdxes.insert({{Size, 0}, Idx}); 1049 } 1050 1051 for (auto &Idx : StackSlotIdxes) 1052 StackIdxesToPos[Idx.second] = Idx.first; 1053 1054 NumSlotIdxes = StackSlotIdxes.size(); 1055 } 1056 1057 LocIdx MLocTracker::trackRegister(unsigned ID) { 1058 assert(ID != 0); 1059 LocIdx NewIdx = LocIdx(LocIdxToIDNum.size()); 1060 LocIdxToIDNum.grow(NewIdx); 1061 LocIdxToLocID.grow(NewIdx); 1062 1063 // Default: it's an mphi. 1064 ValueIDNum ValNum = {CurBB, 0, NewIdx}; 1065 // Was this reg ever touched by a regmask? 1066 for (const auto &MaskPair : reverse(Masks)) { 1067 if (MaskPair.first->clobbersPhysReg(ID)) { 1068 // There was an earlier def we skipped. 1069 ValNum = {CurBB, MaskPair.second, NewIdx}; 1070 break; 1071 } 1072 } 1073 1074 LocIdxToIDNum[NewIdx] = ValNum; 1075 LocIdxToLocID[NewIdx] = ID; 1076 return NewIdx; 1077 } 1078 1079 void MLocTracker::writeRegMask(const MachineOperand *MO, unsigned CurBB, 1080 unsigned InstID) { 1081 // Def any register we track have that isn't preserved. The regmask 1082 // terminates the liveness of a register, meaning its value can't be 1083 // relied upon -- we represent this by giving it a new value. 1084 for (auto Location : locations()) { 1085 unsigned ID = LocIdxToLocID[Location.Idx]; 1086 // Don't clobber SP, even if the mask says it's clobbered. 1087 if (ID < NumRegs && !SPAliases.count(ID) && MO->clobbersPhysReg(ID)) 1088 defReg(ID, CurBB, InstID); 1089 } 1090 Masks.push_back(std::make_pair(MO, InstID)); 1091 } 1092 1093 std::optional<SpillLocationNo> MLocTracker::getOrTrackSpillLoc(SpillLoc L) { 1094 SpillLocationNo SpillID(SpillLocs.idFor(L)); 1095 1096 if (SpillID.id() == 0) { 1097 // If there is no location, and we have reached the limit of how many stack 1098 // slots to track, then don't track this one. 1099 if (SpillLocs.size() >= StackWorkingSetLimit) 1100 return std::nullopt; 1101 1102 // Spill location is untracked: create record for this one, and all 1103 // subregister slots too. 1104 SpillID = SpillLocationNo(SpillLocs.insert(L)); 1105 for (unsigned StackIdx = 0; StackIdx < NumSlotIdxes; ++StackIdx) { 1106 unsigned L = getSpillIDWithIdx(SpillID, StackIdx); 1107 LocIdx Idx = LocIdx(LocIdxToIDNum.size()); // New idx 1108 LocIdxToIDNum.grow(Idx); 1109 LocIdxToLocID.grow(Idx); 1110 LocIDToLocIdx.push_back(Idx); 1111 LocIdxToLocID[Idx] = L; 1112 // Initialize to PHI value; corresponds to the location's live-in value 1113 // during transfer function construction. 1114 LocIdxToIDNum[Idx] = ValueIDNum(CurBB, 0, Idx); 1115 } 1116 } 1117 return SpillID; 1118 } 1119 1120 std::string MLocTracker::LocIdxToName(LocIdx Idx) const { 1121 unsigned ID = LocIdxToLocID[Idx]; 1122 if (ID >= NumRegs) { 1123 StackSlotPos Pos = locIDToSpillIdx(ID); 1124 ID -= NumRegs; 1125 unsigned Slot = ID / NumSlotIdxes; 1126 return Twine("slot ") 1127 .concat(Twine(Slot).concat(Twine(" sz ").concat(Twine(Pos.first) 1128 .concat(Twine(" offs ").concat(Twine(Pos.second)))))) 1129 .str(); 1130 } else { 1131 return TRI.getRegAsmName(ID).str(); 1132 } 1133 } 1134 1135 std::string MLocTracker::IDAsString(const ValueIDNum &Num) const { 1136 std::string DefName = LocIdxToName(Num.getLoc()); 1137 return Num.asString(DefName); 1138 } 1139 1140 #ifndef NDEBUG 1141 LLVM_DUMP_METHOD void MLocTracker::dump() { 1142 for (auto Location : locations()) { 1143 std::string MLocName = LocIdxToName(Location.Value.getLoc()); 1144 std::string DefName = Location.Value.asString(MLocName); 1145 dbgs() << LocIdxToName(Location.Idx) << " --> " << DefName << "\n"; 1146 } 1147 } 1148 1149 LLVM_DUMP_METHOD void MLocTracker::dump_mloc_map() { 1150 for (auto Location : locations()) { 1151 std::string foo = LocIdxToName(Location.Idx); 1152 dbgs() << "Idx " << Location.Idx.asU64() << " " << foo << "\n"; 1153 } 1154 } 1155 #endif 1156 1157 MachineInstrBuilder 1158 MLocTracker::emitLoc(const SmallVectorImpl<ResolvedDbgOp> &DbgOps, 1159 const DebugVariable &Var, 1160 const DbgValueProperties &Properties) { 1161 DebugLoc DL = DILocation::get(Var.getVariable()->getContext(), 0, 0, 1162 Var.getVariable()->getScope(), 1163 const_cast<DILocation *>(Var.getInlinedAt())); 1164 1165 const MCInstrDesc &Desc = Properties.IsVariadic 1166 ? TII.get(TargetOpcode::DBG_VALUE_LIST) 1167 : TII.get(TargetOpcode::DBG_VALUE); 1168 1169 #ifdef EXPENSIVE_CHECKS 1170 assert(all_of(DbgOps, 1171 [](const ResolvedDbgOp &Op) { 1172 return Op.IsConst || !Op.Loc.isIllegal(); 1173 }) && 1174 "Did not expect illegal ops in DbgOps."); 1175 assert((DbgOps.size() == 0 || 1176 DbgOps.size() == Properties.getLocationOpCount()) && 1177 "Expected to have either one DbgOp per MI LocationOp, or none."); 1178 #endif 1179 1180 auto GetRegOp = [](unsigned Reg) -> MachineOperand { 1181 return MachineOperand::CreateReg( 1182 /* Reg */ Reg, /* isDef */ false, /* isImp */ false, 1183 /* isKill */ false, /* isDead */ false, 1184 /* isUndef */ false, /* isEarlyClobber */ false, 1185 /* SubReg */ 0, /* isDebug */ true); 1186 }; 1187 1188 SmallVector<MachineOperand> MOs; 1189 1190 auto EmitUndef = [&]() { 1191 MOs.clear(); 1192 MOs.assign(Properties.getLocationOpCount(), GetRegOp(0)); 1193 return BuildMI(MF, DL, Desc, false, MOs, Var.getVariable(), 1194 Properties.DIExpr); 1195 }; 1196 1197 // Don't bother passing any real operands to BuildMI if any of them would be 1198 // $noreg. 1199 if (DbgOps.empty()) 1200 return EmitUndef(); 1201 1202 bool Indirect = Properties.Indirect; 1203 1204 const DIExpression *Expr = Properties.DIExpr; 1205 1206 assert(DbgOps.size() == Properties.getLocationOpCount()); 1207 1208 // If all locations are valid, accumulate them into our list of 1209 // MachineOperands. For any spilled locations, either update the indirectness 1210 // register or apply the appropriate transformations in the DIExpression. 1211 for (size_t Idx = 0; Idx < Properties.getLocationOpCount(); ++Idx) { 1212 const ResolvedDbgOp &Op = DbgOps[Idx]; 1213 1214 if (Op.IsConst) { 1215 MOs.push_back(Op.MO); 1216 continue; 1217 } 1218 1219 LocIdx MLoc = Op.Loc; 1220 unsigned LocID = LocIdxToLocID[MLoc]; 1221 if (LocID >= NumRegs) { 1222 SpillLocationNo SpillID = locIDToSpill(LocID); 1223 StackSlotPos StackIdx = locIDToSpillIdx(LocID); 1224 unsigned short Offset = StackIdx.second; 1225 1226 // TODO: support variables that are located in spill slots, with non-zero 1227 // offsets from the start of the spill slot. It would require some more 1228 // complex DIExpression calculations. This doesn't seem to be produced by 1229 // LLVM right now, so don't try and support it. 1230 // Accept no-subregister slots and subregisters where the offset is zero. 1231 // The consumer should already have type information to work out how large 1232 // the variable is. 1233 if (Offset == 0) { 1234 const SpillLoc &Spill = SpillLocs[SpillID.id()]; 1235 unsigned Base = Spill.SpillBase; 1236 1237 // There are several ways we can dereference things, and several inputs 1238 // to consider: 1239 // * NRVO variables will appear with IsIndirect set, but should have 1240 // nothing else in their DIExpressions, 1241 // * Variables with DW_OP_stack_value in their expr already need an 1242 // explicit dereference of the stack location, 1243 // * Values that don't match the variable size need DW_OP_deref_size, 1244 // * Everything else can just become a simple location expression. 1245 1246 // We need to use deref_size whenever there's a mismatch between the 1247 // size of value and the size of variable portion being read. 1248 // Additionally, we should use it whenever dealing with stack_value 1249 // fragments, to avoid the consumer having to determine the deref size 1250 // from DW_OP_piece. 1251 bool UseDerefSize = false; 1252 unsigned ValueSizeInBits = getLocSizeInBits(MLoc); 1253 unsigned DerefSizeInBytes = ValueSizeInBits / 8; 1254 if (auto Fragment = Var.getFragment()) { 1255 unsigned VariableSizeInBits = Fragment->SizeInBits; 1256 if (VariableSizeInBits != ValueSizeInBits || Expr->isComplex()) 1257 UseDerefSize = true; 1258 } else if (auto Size = Var.getVariable()->getSizeInBits()) { 1259 if (*Size != ValueSizeInBits) { 1260 UseDerefSize = true; 1261 } 1262 } 1263 1264 SmallVector<uint64_t, 5> OffsetOps; 1265 TRI.getOffsetOpcodes(Spill.SpillOffset, OffsetOps); 1266 bool StackValue = false; 1267 1268 if (Properties.Indirect) { 1269 // This is something like an NRVO variable, where the pointer has been 1270 // spilt to the stack. It should end up being a memory location, with 1271 // the pointer to the variable loaded off the stack with a deref: 1272 assert(!Expr->isImplicit()); 1273 OffsetOps.push_back(dwarf::DW_OP_deref); 1274 } else if (UseDerefSize && Expr->isSingleLocationExpression()) { 1275 // TODO: Figure out how to handle deref size issues for variadic 1276 // values. 1277 // We're loading a value off the stack that's not the same size as the 1278 // variable. Add / subtract stack offset, explicitly deref with a 1279 // size, and add DW_OP_stack_value if not already present. 1280 OffsetOps.push_back(dwarf::DW_OP_deref_size); 1281 OffsetOps.push_back(DerefSizeInBytes); 1282 StackValue = true; 1283 } else if (Expr->isComplex() || Properties.IsVariadic) { 1284 // A variable with no size ambiguity, but with extra elements in it's 1285 // expression. Manually dereference the stack location. 1286 OffsetOps.push_back(dwarf::DW_OP_deref); 1287 } else { 1288 // A plain value that has been spilt to the stack, with no further 1289 // context. Request a location expression, marking the DBG_VALUE as 1290 // IsIndirect. 1291 Indirect = true; 1292 } 1293 1294 Expr = DIExpression::appendOpsToArg(Expr, OffsetOps, Idx, StackValue); 1295 MOs.push_back(GetRegOp(Base)); 1296 } else { 1297 // This is a stack location with a weird subregister offset: emit an 1298 // undef DBG_VALUE instead. 1299 return EmitUndef(); 1300 } 1301 } else { 1302 // Non-empty, non-stack slot, must be a plain register. 1303 MOs.push_back(GetRegOp(LocID)); 1304 } 1305 } 1306 1307 return BuildMI(MF, DL, Desc, Indirect, MOs, Var.getVariable(), Expr); 1308 } 1309 1310 /// Default construct and initialize the pass. 1311 InstrRefBasedLDV::InstrRefBasedLDV() = default; 1312 1313 bool InstrRefBasedLDV::isCalleeSaved(LocIdx L) const { 1314 unsigned Reg = MTracker->LocIdxToLocID[L]; 1315 return isCalleeSavedReg(Reg); 1316 } 1317 bool InstrRefBasedLDV::isCalleeSavedReg(Register R) const { 1318 for (MCRegAliasIterator RAI(R, TRI, true); RAI.isValid(); ++RAI) 1319 if (CalleeSavedRegs.test(*RAI)) 1320 return true; 1321 return false; 1322 } 1323 1324 //===----------------------------------------------------------------------===// 1325 // Debug Range Extension Implementation 1326 //===----------------------------------------------------------------------===// 1327 1328 #ifndef NDEBUG 1329 // Something to restore in the future. 1330 // void InstrRefBasedLDV::printVarLocInMBB(..) 1331 #endif 1332 1333 std::optional<SpillLocationNo> 1334 InstrRefBasedLDV::extractSpillBaseRegAndOffset(const MachineInstr &MI) { 1335 assert(MI.hasOneMemOperand() && 1336 "Spill instruction does not have exactly one memory operand?"); 1337 auto MMOI = MI.memoperands_begin(); 1338 const PseudoSourceValue *PVal = (*MMOI)->getPseudoValue(); 1339 assert(PVal->kind() == PseudoSourceValue::FixedStack && 1340 "Inconsistent memory operand in spill instruction"); 1341 int FI = cast<FixedStackPseudoSourceValue>(PVal)->getFrameIndex(); 1342 const MachineBasicBlock *MBB = MI.getParent(); 1343 Register Reg; 1344 StackOffset Offset = TFI->getFrameIndexReference(*MBB->getParent(), FI, Reg); 1345 return MTracker->getOrTrackSpillLoc({Reg, Offset}); 1346 } 1347 1348 std::optional<LocIdx> 1349 InstrRefBasedLDV::findLocationForMemOperand(const MachineInstr &MI) { 1350 std::optional<SpillLocationNo> SpillLoc = extractSpillBaseRegAndOffset(MI); 1351 if (!SpillLoc) 1352 return std::nullopt; 1353 1354 // Where in the stack slot is this value defined -- i.e., what size of value 1355 // is this? An important question, because it could be loaded into a register 1356 // from the stack at some point. Happily the memory operand will tell us 1357 // the size written to the stack. 1358 auto *MemOperand = *MI.memoperands_begin(); 1359 unsigned SizeInBits = MemOperand->getSizeInBits(); 1360 1361 // Find that position in the stack indexes we're tracking. 1362 auto IdxIt = MTracker->StackSlotIdxes.find({SizeInBits, 0}); 1363 if (IdxIt == MTracker->StackSlotIdxes.end()) 1364 // That index is not tracked. This is suprising, and unlikely to ever 1365 // occur, but the safe action is to indicate the variable is optimised out. 1366 return std::nullopt; 1367 1368 unsigned SpillID = MTracker->getSpillIDWithIdx(*SpillLoc, IdxIt->second); 1369 return MTracker->getSpillMLoc(SpillID); 1370 } 1371 1372 /// End all previous ranges related to @MI and start a new range from @MI 1373 /// if it is a DBG_VALUE instr. 1374 bool InstrRefBasedLDV::transferDebugValue(const MachineInstr &MI) { 1375 if (!MI.isDebugValue()) 1376 return false; 1377 1378 assert(MI.getDebugVariable()->isValidLocationForIntrinsic(MI.getDebugLoc()) && 1379 "Expected inlined-at fields to agree"); 1380 1381 // If there are no instructions in this lexical scope, do no location tracking 1382 // at all, this variable shouldn't get a legitimate location range. 1383 auto *Scope = LS.findLexicalScope(MI.getDebugLoc().get()); 1384 if (Scope == nullptr) 1385 return true; // handled it; by doing nothing 1386 1387 // MLocTracker needs to know that this register is read, even if it's only 1388 // read by a debug inst. 1389 for (const MachineOperand &MO : MI.debug_operands()) 1390 if (MO.isReg() && MO.getReg() != 0) 1391 (void)MTracker->readReg(MO.getReg()); 1392 1393 // If we're preparing for the second analysis (variables), the machine value 1394 // locations are already solved, and we report this DBG_VALUE and the value 1395 // it refers to to VLocTracker. 1396 if (VTracker) { 1397 SmallVector<DbgOpID> DebugOps; 1398 // Feed defVar the new variable location, or if this is a DBG_VALUE $noreg, 1399 // feed defVar None. 1400 if (!MI.isUndefDebugValue()) { 1401 for (const MachineOperand &MO : MI.debug_operands()) { 1402 // There should be no undef registers here, as we've screened for undef 1403 // debug values. 1404 if (MO.isReg()) { 1405 DebugOps.push_back(DbgOpStore.insert(MTracker->readReg(MO.getReg()))); 1406 } else if (MO.isImm() || MO.isFPImm() || MO.isCImm()) { 1407 DebugOps.push_back(DbgOpStore.insert(MO)); 1408 } else { 1409 llvm_unreachable("Unexpected debug operand type."); 1410 } 1411 } 1412 } 1413 VTracker->defVar(MI, DbgValueProperties(MI), DebugOps); 1414 } 1415 1416 // If performing final tracking of transfers, report this variable definition 1417 // to the TransferTracker too. 1418 if (TTracker) 1419 TTracker->redefVar(MI); 1420 return true; 1421 } 1422 1423 std::optional<ValueIDNum> InstrRefBasedLDV::getValueForInstrRef( 1424 unsigned InstNo, unsigned OpNo, MachineInstr &MI, 1425 const FuncValueTable *MLiveOuts, const FuncValueTable *MLiveIns) { 1426 // Various optimizations may have happened to the value during codegen, 1427 // recorded in the value substitution table. Apply any substitutions to 1428 // the instruction / operand number in this DBG_INSTR_REF, and collect 1429 // any subregister extractions performed during optimization. 1430 const MachineFunction &MF = *MI.getParent()->getParent(); 1431 1432 // Create dummy substitution with Src set, for lookup. 1433 auto SoughtSub = 1434 MachineFunction::DebugSubstitution({InstNo, OpNo}, {0, 0}, 0); 1435 1436 SmallVector<unsigned, 4> SeenSubregs; 1437 auto LowerBoundIt = llvm::lower_bound(MF.DebugValueSubstitutions, SoughtSub); 1438 while (LowerBoundIt != MF.DebugValueSubstitutions.end() && 1439 LowerBoundIt->Src == SoughtSub.Src) { 1440 std::tie(InstNo, OpNo) = LowerBoundIt->Dest; 1441 SoughtSub.Src = LowerBoundIt->Dest; 1442 if (unsigned Subreg = LowerBoundIt->Subreg) 1443 SeenSubregs.push_back(Subreg); 1444 LowerBoundIt = llvm::lower_bound(MF.DebugValueSubstitutions, SoughtSub); 1445 } 1446 1447 // Default machine value number is <None> -- if no instruction defines 1448 // the corresponding value, it must have been optimized out. 1449 std::optional<ValueIDNum> NewID; 1450 1451 // Try to lookup the instruction number, and find the machine value number 1452 // that it defines. It could be an instruction, or a PHI. 1453 auto InstrIt = DebugInstrNumToInstr.find(InstNo); 1454 auto PHIIt = llvm::lower_bound(DebugPHINumToValue, InstNo); 1455 if (InstrIt != DebugInstrNumToInstr.end()) { 1456 const MachineInstr &TargetInstr = *InstrIt->second.first; 1457 uint64_t BlockNo = TargetInstr.getParent()->getNumber(); 1458 1459 // Pick out the designated operand. It might be a memory reference, if 1460 // a register def was folded into a stack store. 1461 if (OpNo == MachineFunction::DebugOperandMemNumber && 1462 TargetInstr.hasOneMemOperand()) { 1463 std::optional<LocIdx> L = findLocationForMemOperand(TargetInstr); 1464 if (L) 1465 NewID = ValueIDNum(BlockNo, InstrIt->second.second, *L); 1466 } else if (OpNo != MachineFunction::DebugOperandMemNumber) { 1467 // Permit the debug-info to be completely wrong: identifying a nonexistant 1468 // operand, or one that is not a register definition, means something 1469 // unexpected happened during optimisation. Broken debug-info, however, 1470 // shouldn't crash the compiler -- instead leave the variable value as 1471 // None, which will make it appear "optimised out". 1472 if (OpNo < TargetInstr.getNumOperands()) { 1473 const MachineOperand &MO = TargetInstr.getOperand(OpNo); 1474 1475 if (MO.isReg() && MO.isDef() && MO.getReg()) { 1476 unsigned LocID = MTracker->getLocID(MO.getReg()); 1477 LocIdx L = MTracker->LocIDToLocIdx[LocID]; 1478 NewID = ValueIDNum(BlockNo, InstrIt->second.second, L); 1479 } 1480 } 1481 1482 if (!NewID) { 1483 LLVM_DEBUG( 1484 { dbgs() << "Seen instruction reference to illegal operand\n"; }); 1485 } 1486 } 1487 // else: NewID is left as None. 1488 } else if (PHIIt != DebugPHINumToValue.end() && PHIIt->InstrNum == InstNo) { 1489 // It's actually a PHI value. Which value it is might not be obvious, use 1490 // the resolver helper to find out. 1491 assert(MLiveOuts && MLiveIns); 1492 NewID = resolveDbgPHIs(*MI.getParent()->getParent(), *MLiveOuts, *MLiveIns, 1493 MI, InstNo); 1494 } 1495 1496 // Apply any subregister extractions, in reverse. We might have seen code 1497 // like this: 1498 // CALL64 @foo, implicit-def $rax 1499 // %0:gr64 = COPY $rax 1500 // %1:gr32 = COPY %0.sub_32bit 1501 // %2:gr16 = COPY %1.sub_16bit 1502 // %3:gr8 = COPY %2.sub_8bit 1503 // In which case each copy would have been recorded as a substitution with 1504 // a subregister qualifier. Apply those qualifiers now. 1505 if (NewID && !SeenSubregs.empty()) { 1506 unsigned Offset = 0; 1507 unsigned Size = 0; 1508 1509 // Look at each subregister that we passed through, and progressively 1510 // narrow in, accumulating any offsets that occur. Substitutions should 1511 // only ever be the same or narrower width than what they read from; 1512 // iterate in reverse order so that we go from wide to small. 1513 for (unsigned Subreg : reverse(SeenSubregs)) { 1514 unsigned ThisSize = TRI->getSubRegIdxSize(Subreg); 1515 unsigned ThisOffset = TRI->getSubRegIdxOffset(Subreg); 1516 Offset += ThisOffset; 1517 Size = (Size == 0) ? ThisSize : std::min(Size, ThisSize); 1518 } 1519 1520 // If that worked, look for an appropriate subregister with the register 1521 // where the define happens. Don't look at values that were defined during 1522 // a stack write: we can't currently express register locations within 1523 // spills. 1524 LocIdx L = NewID->getLoc(); 1525 if (NewID && !MTracker->isSpill(L)) { 1526 // Find the register class for the register where this def happened. 1527 // FIXME: no index for this? 1528 Register Reg = MTracker->LocIdxToLocID[L]; 1529 const TargetRegisterClass *TRC = nullptr; 1530 for (const auto *TRCI : TRI->regclasses()) 1531 if (TRCI->contains(Reg)) 1532 TRC = TRCI; 1533 assert(TRC && "Couldn't find target register class?"); 1534 1535 // If the register we have isn't the right size or in the right place, 1536 // Try to find a subregister inside it. 1537 unsigned MainRegSize = TRI->getRegSizeInBits(*TRC); 1538 if (Size != MainRegSize || Offset) { 1539 // Enumerate all subregisters, searching. 1540 Register NewReg = 0; 1541 for (MCPhysReg SR : TRI->subregs(Reg)) { 1542 unsigned Subreg = TRI->getSubRegIndex(Reg, SR); 1543 unsigned SubregSize = TRI->getSubRegIdxSize(Subreg); 1544 unsigned SubregOffset = TRI->getSubRegIdxOffset(Subreg); 1545 if (SubregSize == Size && SubregOffset == Offset) { 1546 NewReg = SR; 1547 break; 1548 } 1549 } 1550 1551 // If we didn't find anything: there's no way to express our value. 1552 if (!NewReg) { 1553 NewID = std::nullopt; 1554 } else { 1555 // Re-state the value as being defined within the subregister 1556 // that we found. 1557 LocIdx NewLoc = MTracker->lookupOrTrackRegister(NewReg); 1558 NewID = ValueIDNum(NewID->getBlock(), NewID->getInst(), NewLoc); 1559 } 1560 } 1561 } else { 1562 // If we can't handle subregisters, unset the new value. 1563 NewID = std::nullopt; 1564 } 1565 } 1566 1567 return NewID; 1568 } 1569 1570 bool InstrRefBasedLDV::transferDebugInstrRef(MachineInstr &MI, 1571 const FuncValueTable *MLiveOuts, 1572 const FuncValueTable *MLiveIns) { 1573 if (!MI.isDebugRef()) 1574 return false; 1575 1576 // Only handle this instruction when we are building the variable value 1577 // transfer function. 1578 if (!VTracker && !TTracker) 1579 return false; 1580 1581 const DILocalVariable *Var = MI.getDebugVariable(); 1582 const DIExpression *Expr = MI.getDebugExpression(); 1583 const DILocation *DebugLoc = MI.getDebugLoc(); 1584 const DILocation *InlinedAt = DebugLoc->getInlinedAt(); 1585 assert(Var->isValidLocationForIntrinsic(DebugLoc) && 1586 "Expected inlined-at fields to agree"); 1587 1588 DebugVariable V(Var, Expr, InlinedAt); 1589 1590 auto *Scope = LS.findLexicalScope(MI.getDebugLoc().get()); 1591 if (Scope == nullptr) 1592 return true; // Handled by doing nothing. This variable is never in scope. 1593 1594 SmallVector<DbgOpID> DbgOpIDs; 1595 for (const MachineOperand &MO : MI.debug_operands()) { 1596 if (!MO.isDbgInstrRef()) { 1597 assert(!MO.isReg() && "DBG_INSTR_REF should not contain registers"); 1598 DbgOpID ConstOpID = DbgOpStore.insert(DbgOp(MO)); 1599 DbgOpIDs.push_back(ConstOpID); 1600 continue; 1601 } 1602 1603 unsigned InstNo = MO.getInstrRefInstrIndex(); 1604 unsigned OpNo = MO.getInstrRefOpIndex(); 1605 1606 // Default machine value number is <None> -- if no instruction defines 1607 // the corresponding value, it must have been optimized out. 1608 std::optional<ValueIDNum> NewID = 1609 getValueForInstrRef(InstNo, OpNo, MI, MLiveOuts, MLiveIns); 1610 // We have a value number or std::nullopt. If the latter, then kill the 1611 // entire debug value. 1612 if (NewID) { 1613 DbgOpIDs.push_back(DbgOpStore.insert(*NewID)); 1614 } else { 1615 DbgOpIDs.clear(); 1616 break; 1617 } 1618 } 1619 1620 // We have a DbgOpID for every value or for none. Tell the variable value 1621 // tracker about it. The rest of this LiveDebugValues implementation acts 1622 // exactly the same for DBG_INSTR_REFs as DBG_VALUEs (just, the former can 1623 // refer to values that aren't immediately available). 1624 DbgValueProperties Properties(Expr, false, true); 1625 if (VTracker) 1626 VTracker->defVar(MI, Properties, DbgOpIDs); 1627 1628 // If we're on the final pass through the function, decompose this INSTR_REF 1629 // into a plain DBG_VALUE. 1630 if (!TTracker) 1631 return true; 1632 1633 // Fetch the concrete DbgOps now, as we will need them later. 1634 SmallVector<DbgOp> DbgOps; 1635 for (DbgOpID OpID : DbgOpIDs) { 1636 DbgOps.push_back(DbgOpStore.find(OpID)); 1637 } 1638 1639 // Pick a location for the machine value number, if such a location exists. 1640 // (This information could be stored in TransferTracker to make it faster). 1641 SmallDenseMap<ValueIDNum, TransferTracker::LocationAndQuality> FoundLocs; 1642 SmallVector<ValueIDNum> ValuesToFind; 1643 // Initialized the preferred-location map with illegal locations, to be 1644 // filled in later. 1645 for (const DbgOp &Op : DbgOps) { 1646 if (!Op.IsConst) 1647 if (FoundLocs.insert({Op.ID, TransferTracker::LocationAndQuality()}) 1648 .second) 1649 ValuesToFind.push_back(Op.ID); 1650 } 1651 1652 for (auto Location : MTracker->locations()) { 1653 LocIdx CurL = Location.Idx; 1654 ValueIDNum ID = MTracker->readMLoc(CurL); 1655 auto ValueToFindIt = find(ValuesToFind, ID); 1656 if (ValueToFindIt == ValuesToFind.end()) 1657 continue; 1658 auto &Previous = FoundLocs.find(ID)->second; 1659 // If this is the first location with that value, pick it. Otherwise, 1660 // consider whether it's a "longer term" location. 1661 std::optional<TransferTracker::LocationQuality> ReplacementQuality = 1662 TTracker->getLocQualityIfBetter(CurL, Previous.getQuality()); 1663 if (ReplacementQuality) { 1664 Previous = TransferTracker::LocationAndQuality(CurL, *ReplacementQuality); 1665 if (Previous.isBest()) { 1666 ValuesToFind.erase(ValueToFindIt); 1667 if (ValuesToFind.empty()) 1668 break; 1669 } 1670 } 1671 } 1672 1673 SmallVector<ResolvedDbgOp> NewLocs; 1674 for (const DbgOp &DbgOp : DbgOps) { 1675 if (DbgOp.IsConst) { 1676 NewLocs.push_back(DbgOp.MO); 1677 continue; 1678 } 1679 LocIdx FoundLoc = FoundLocs.find(DbgOp.ID)->second.getLoc(); 1680 if (FoundLoc.isIllegal()) { 1681 NewLocs.clear(); 1682 break; 1683 } 1684 NewLocs.push_back(FoundLoc); 1685 } 1686 // Tell transfer tracker that the variable value has changed. 1687 TTracker->redefVar(MI, Properties, NewLocs); 1688 1689 // If there were values with no location, but all such values are defined in 1690 // later instructions in this block, this is a block-local use-before-def. 1691 if (!DbgOps.empty() && NewLocs.empty()) { 1692 bool IsValidUseBeforeDef = true; 1693 uint64_t LastUseBeforeDef = 0; 1694 for (auto ValueLoc : FoundLocs) { 1695 ValueIDNum NewID = ValueLoc.first; 1696 LocIdx FoundLoc = ValueLoc.second.getLoc(); 1697 if (!FoundLoc.isIllegal()) 1698 continue; 1699 // If we have an value with no location that is not defined in this block, 1700 // then it has no location in this block, leaving this value undefined. 1701 if (NewID.getBlock() != CurBB || NewID.getInst() <= CurInst) { 1702 IsValidUseBeforeDef = false; 1703 break; 1704 } 1705 LastUseBeforeDef = std::max(LastUseBeforeDef, NewID.getInst()); 1706 } 1707 if (IsValidUseBeforeDef) { 1708 TTracker->addUseBeforeDef(V, {MI.getDebugExpression(), false, true}, 1709 DbgOps, LastUseBeforeDef); 1710 } 1711 } 1712 1713 // Produce a DBG_VALUE representing what this DBG_INSTR_REF meant. 1714 // This DBG_VALUE is potentially a $noreg / undefined location, if 1715 // FoundLoc is illegal. 1716 // (XXX -- could morph the DBG_INSTR_REF in the future). 1717 MachineInstr *DbgMI = MTracker->emitLoc(NewLocs, V, Properties); 1718 1719 TTracker->PendingDbgValues.push_back(DbgMI); 1720 TTracker->flushDbgValues(MI.getIterator(), nullptr); 1721 return true; 1722 } 1723 1724 bool InstrRefBasedLDV::transferDebugPHI(MachineInstr &MI) { 1725 if (!MI.isDebugPHI()) 1726 return false; 1727 1728 // Analyse these only when solving the machine value location problem. 1729 if (VTracker || TTracker) 1730 return true; 1731 1732 // First operand is the value location, either a stack slot or register. 1733 // Second is the debug instruction number of the original PHI. 1734 const MachineOperand &MO = MI.getOperand(0); 1735 unsigned InstrNum = MI.getOperand(1).getImm(); 1736 1737 auto EmitBadPHI = [this, &MI, InstrNum]() -> bool { 1738 // Helper lambda to do any accounting when we fail to find a location for 1739 // a DBG_PHI. This can happen if DBG_PHIs are malformed, or refer to a 1740 // dead stack slot, for example. 1741 // Record a DebugPHIRecord with an empty value + location. 1742 DebugPHINumToValue.push_back( 1743 {InstrNum, MI.getParent(), std::nullopt, std::nullopt}); 1744 return true; 1745 }; 1746 1747 if (MO.isReg() && MO.getReg()) { 1748 // The value is whatever's currently in the register. Read and record it, 1749 // to be analysed later. 1750 Register Reg = MO.getReg(); 1751 ValueIDNum Num = MTracker->readReg(Reg); 1752 auto PHIRec = DebugPHIRecord( 1753 {InstrNum, MI.getParent(), Num, MTracker->lookupOrTrackRegister(Reg)}); 1754 DebugPHINumToValue.push_back(PHIRec); 1755 1756 // Ensure this register is tracked. 1757 for (MCRegAliasIterator RAI(MO.getReg(), TRI, true); RAI.isValid(); ++RAI) 1758 MTracker->lookupOrTrackRegister(*RAI); 1759 } else if (MO.isFI()) { 1760 // The value is whatever's in this stack slot. 1761 unsigned FI = MO.getIndex(); 1762 1763 // If the stack slot is dead, then this was optimized away. 1764 // FIXME: stack slot colouring should account for slots that get merged. 1765 if (MFI->isDeadObjectIndex(FI)) 1766 return EmitBadPHI(); 1767 1768 // Identify this spill slot, ensure it's tracked. 1769 Register Base; 1770 StackOffset Offs = TFI->getFrameIndexReference(*MI.getMF(), FI, Base); 1771 SpillLoc SL = {Base, Offs}; 1772 std::optional<SpillLocationNo> SpillNo = MTracker->getOrTrackSpillLoc(SL); 1773 1774 // We might be able to find a value, but have chosen not to, to avoid 1775 // tracking too much stack information. 1776 if (!SpillNo) 1777 return EmitBadPHI(); 1778 1779 // Any stack location DBG_PHI should have an associate bit-size. 1780 assert(MI.getNumOperands() == 3 && "Stack DBG_PHI with no size?"); 1781 unsigned slotBitSize = MI.getOperand(2).getImm(); 1782 1783 unsigned SpillID = MTracker->getLocID(*SpillNo, {slotBitSize, 0}); 1784 LocIdx SpillLoc = MTracker->getSpillMLoc(SpillID); 1785 ValueIDNum Result = MTracker->readMLoc(SpillLoc); 1786 1787 // Record this DBG_PHI for later analysis. 1788 auto DbgPHI = DebugPHIRecord({InstrNum, MI.getParent(), Result, SpillLoc}); 1789 DebugPHINumToValue.push_back(DbgPHI); 1790 } else { 1791 // Else: if the operand is neither a legal register or a stack slot, then 1792 // we're being fed illegal debug-info. Record an empty PHI, so that any 1793 // debug users trying to read this number will be put off trying to 1794 // interpret the value. 1795 LLVM_DEBUG( 1796 { dbgs() << "Seen DBG_PHI with unrecognised operand format\n"; }); 1797 return EmitBadPHI(); 1798 } 1799 1800 return true; 1801 } 1802 1803 void InstrRefBasedLDV::transferRegisterDef(MachineInstr &MI) { 1804 // Meta Instructions do not affect the debug liveness of any register they 1805 // define. 1806 if (MI.isImplicitDef()) { 1807 // Except when there's an implicit def, and the location it's defining has 1808 // no value number. The whole point of an implicit def is to announce that 1809 // the register is live, without be specific about it's value. So define 1810 // a value if there isn't one already. 1811 ValueIDNum Num = MTracker->readReg(MI.getOperand(0).getReg()); 1812 // Has a legitimate value -> ignore the implicit def. 1813 if (Num.getLoc() != 0) 1814 return; 1815 // Otherwise, def it here. 1816 } else if (MI.isMetaInstruction()) 1817 return; 1818 1819 // We always ignore SP defines on call instructions, they don't actually 1820 // change the value of the stack pointer... except for win32's _chkstk. This 1821 // is rare: filter quickly for the common case (no stack adjustments, not a 1822 // call, etc). If it is a call that modifies SP, recognise the SP register 1823 // defs. 1824 bool CallChangesSP = false; 1825 if (AdjustsStackInCalls && MI.isCall() && MI.getOperand(0).isSymbol() && 1826 !strcmp(MI.getOperand(0).getSymbolName(), StackProbeSymbolName.data())) 1827 CallChangesSP = true; 1828 1829 // Test whether we should ignore a def of this register due to it being part 1830 // of the stack pointer. 1831 auto IgnoreSPAlias = [this, &MI, CallChangesSP](Register R) -> bool { 1832 if (CallChangesSP) 1833 return false; 1834 return MI.isCall() && MTracker->SPAliases.count(R); 1835 }; 1836 1837 // Find the regs killed by MI, and find regmasks of preserved regs. 1838 // Max out the number of statically allocated elements in `DeadRegs`, as this 1839 // prevents fallback to std::set::count() operations. 1840 SmallSet<uint32_t, 32> DeadRegs; 1841 SmallVector<const uint32_t *, 4> RegMasks; 1842 SmallVector<const MachineOperand *, 4> RegMaskPtrs; 1843 for (const MachineOperand &MO : MI.operands()) { 1844 // Determine whether the operand is a register def. 1845 if (MO.isReg() && MO.isDef() && MO.getReg() && MO.getReg().isPhysical() && 1846 !IgnoreSPAlias(MO.getReg())) { 1847 // Remove ranges of all aliased registers. 1848 for (MCRegAliasIterator RAI(MO.getReg(), TRI, true); RAI.isValid(); ++RAI) 1849 // FIXME: Can we break out of this loop early if no insertion occurs? 1850 DeadRegs.insert(*RAI); 1851 } else if (MO.isRegMask()) { 1852 RegMasks.push_back(MO.getRegMask()); 1853 RegMaskPtrs.push_back(&MO); 1854 } 1855 } 1856 1857 // Tell MLocTracker about all definitions, of regmasks and otherwise. 1858 for (uint32_t DeadReg : DeadRegs) 1859 MTracker->defReg(DeadReg, CurBB, CurInst); 1860 1861 for (const auto *MO : RegMaskPtrs) 1862 MTracker->writeRegMask(MO, CurBB, CurInst); 1863 1864 // If this instruction writes to a spill slot, def that slot. 1865 if (hasFoldedStackStore(MI)) { 1866 if (std::optional<SpillLocationNo> SpillNo = 1867 extractSpillBaseRegAndOffset(MI)) { 1868 for (unsigned int I = 0; I < MTracker->NumSlotIdxes; ++I) { 1869 unsigned SpillID = MTracker->getSpillIDWithIdx(*SpillNo, I); 1870 LocIdx L = MTracker->getSpillMLoc(SpillID); 1871 MTracker->setMLoc(L, ValueIDNum(CurBB, CurInst, L)); 1872 } 1873 } 1874 } 1875 1876 if (!TTracker) 1877 return; 1878 1879 // When committing variable values to locations: tell transfer tracker that 1880 // we've clobbered things. It may be able to recover the variable from a 1881 // different location. 1882 1883 // Inform TTracker about any direct clobbers. 1884 for (uint32_t DeadReg : DeadRegs) { 1885 LocIdx Loc = MTracker->lookupOrTrackRegister(DeadReg); 1886 TTracker->clobberMloc(Loc, MI.getIterator(), false); 1887 } 1888 1889 // Look for any clobbers performed by a register mask. Only test locations 1890 // that are actually being tracked. 1891 if (!RegMaskPtrs.empty()) { 1892 for (auto L : MTracker->locations()) { 1893 // Stack locations can't be clobbered by regmasks. 1894 if (MTracker->isSpill(L.Idx)) 1895 continue; 1896 1897 Register Reg = MTracker->LocIdxToLocID[L.Idx]; 1898 if (IgnoreSPAlias(Reg)) 1899 continue; 1900 1901 for (const auto *MO : RegMaskPtrs) 1902 if (MO->clobbersPhysReg(Reg)) 1903 TTracker->clobberMloc(L.Idx, MI.getIterator(), false); 1904 } 1905 } 1906 1907 // Tell TTracker about any folded stack store. 1908 if (hasFoldedStackStore(MI)) { 1909 if (std::optional<SpillLocationNo> SpillNo = 1910 extractSpillBaseRegAndOffset(MI)) { 1911 for (unsigned int I = 0; I < MTracker->NumSlotIdxes; ++I) { 1912 unsigned SpillID = MTracker->getSpillIDWithIdx(*SpillNo, I); 1913 LocIdx L = MTracker->getSpillMLoc(SpillID); 1914 TTracker->clobberMloc(L, MI.getIterator(), true); 1915 } 1916 } 1917 } 1918 } 1919 1920 void InstrRefBasedLDV::performCopy(Register SrcRegNum, Register DstRegNum) { 1921 // In all circumstances, re-def all aliases. It's definitely a new value now. 1922 for (MCRegAliasIterator RAI(DstRegNum, TRI, true); RAI.isValid(); ++RAI) 1923 MTracker->defReg(*RAI, CurBB, CurInst); 1924 1925 ValueIDNum SrcValue = MTracker->readReg(SrcRegNum); 1926 MTracker->setReg(DstRegNum, SrcValue); 1927 1928 // Copy subregisters from one location to another. 1929 for (MCSubRegIndexIterator SRI(SrcRegNum, TRI); SRI.isValid(); ++SRI) { 1930 unsigned SrcSubReg = SRI.getSubReg(); 1931 unsigned SubRegIdx = SRI.getSubRegIndex(); 1932 unsigned DstSubReg = TRI->getSubReg(DstRegNum, SubRegIdx); 1933 if (!DstSubReg) 1934 continue; 1935 1936 // Do copy. There are two matching subregisters, the source value should 1937 // have been def'd when the super-reg was, the latter might not be tracked 1938 // yet. 1939 // This will force SrcSubReg to be tracked, if it isn't yet. Will read 1940 // mphi values if it wasn't tracked. 1941 LocIdx SrcL = MTracker->lookupOrTrackRegister(SrcSubReg); 1942 LocIdx DstL = MTracker->lookupOrTrackRegister(DstSubReg); 1943 (void)SrcL; 1944 (void)DstL; 1945 ValueIDNum CpyValue = MTracker->readReg(SrcSubReg); 1946 1947 MTracker->setReg(DstSubReg, CpyValue); 1948 } 1949 } 1950 1951 std::optional<SpillLocationNo> 1952 InstrRefBasedLDV::isSpillInstruction(const MachineInstr &MI, 1953 MachineFunction *MF) { 1954 // TODO: Handle multiple stores folded into one. 1955 if (!MI.hasOneMemOperand()) 1956 return std::nullopt; 1957 1958 // Reject any memory operand that's aliased -- we can't guarantee its value. 1959 auto MMOI = MI.memoperands_begin(); 1960 const PseudoSourceValue *PVal = (*MMOI)->getPseudoValue(); 1961 if (PVal->isAliased(MFI)) 1962 return std::nullopt; 1963 1964 if (!MI.getSpillSize(TII) && !MI.getFoldedSpillSize(TII)) 1965 return std::nullopt; // This is not a spill instruction, since no valid size 1966 // was returned from either function. 1967 1968 return extractSpillBaseRegAndOffset(MI); 1969 } 1970 1971 bool InstrRefBasedLDV::isLocationSpill(const MachineInstr &MI, 1972 MachineFunction *MF, unsigned &Reg) { 1973 if (!isSpillInstruction(MI, MF)) 1974 return false; 1975 1976 int FI; 1977 Reg = TII->isStoreToStackSlotPostFE(MI, FI); 1978 return Reg != 0; 1979 } 1980 1981 std::optional<SpillLocationNo> 1982 InstrRefBasedLDV::isRestoreInstruction(const MachineInstr &MI, 1983 MachineFunction *MF, unsigned &Reg) { 1984 if (!MI.hasOneMemOperand()) 1985 return std::nullopt; 1986 1987 // FIXME: Handle folded restore instructions with more than one memory 1988 // operand. 1989 if (MI.getRestoreSize(TII)) { 1990 Reg = MI.getOperand(0).getReg(); 1991 return extractSpillBaseRegAndOffset(MI); 1992 } 1993 return std::nullopt; 1994 } 1995 1996 bool InstrRefBasedLDV::transferSpillOrRestoreInst(MachineInstr &MI) { 1997 // XXX -- it's too difficult to implement VarLocBasedImpl's stack location 1998 // limitations under the new model. Therefore, when comparing them, compare 1999 // versions that don't attempt spills or restores at all. 2000 if (EmulateOldLDV) 2001 return false; 2002 2003 // Strictly limit ourselves to plain loads and stores, not all instructions 2004 // that can access the stack. 2005 int DummyFI = -1; 2006 if (!TII->isStoreToStackSlotPostFE(MI, DummyFI) && 2007 !TII->isLoadFromStackSlotPostFE(MI, DummyFI)) 2008 return false; 2009 2010 MachineFunction *MF = MI.getMF(); 2011 unsigned Reg; 2012 2013 LLVM_DEBUG(dbgs() << "Examining instruction: "; MI.dump();); 2014 2015 // Strictly limit ourselves to plain loads and stores, not all instructions 2016 // that can access the stack. 2017 int FIDummy; 2018 if (!TII->isStoreToStackSlotPostFE(MI, FIDummy) && 2019 !TII->isLoadFromStackSlotPostFE(MI, FIDummy)) 2020 return false; 2021 2022 // First, if there are any DBG_VALUEs pointing at a spill slot that is 2023 // written to, terminate that variable location. The value in memory 2024 // will have changed. DbgEntityHistoryCalculator doesn't try to detect this. 2025 if (std::optional<SpillLocationNo> Loc = isSpillInstruction(MI, MF)) { 2026 // Un-set this location and clobber, so that earlier locations don't 2027 // continue past this store. 2028 for (unsigned SlotIdx = 0; SlotIdx < MTracker->NumSlotIdxes; ++SlotIdx) { 2029 unsigned SpillID = MTracker->getSpillIDWithIdx(*Loc, SlotIdx); 2030 std::optional<LocIdx> MLoc = MTracker->getSpillMLoc(SpillID); 2031 if (!MLoc) 2032 continue; 2033 2034 // We need to over-write the stack slot with something (here, a def at 2035 // this instruction) to ensure no values are preserved in this stack slot 2036 // after the spill. It also prevents TTracker from trying to recover the 2037 // location and re-installing it in the same place. 2038 ValueIDNum Def(CurBB, CurInst, *MLoc); 2039 MTracker->setMLoc(*MLoc, Def); 2040 if (TTracker) 2041 TTracker->clobberMloc(*MLoc, MI.getIterator()); 2042 } 2043 } 2044 2045 // Try to recognise spill and restore instructions that may transfer a value. 2046 if (isLocationSpill(MI, MF, Reg)) { 2047 // isLocationSpill returning true should guarantee we can extract a 2048 // location. 2049 SpillLocationNo Loc = *extractSpillBaseRegAndOffset(MI); 2050 2051 auto DoTransfer = [&](Register SrcReg, unsigned SpillID) { 2052 auto ReadValue = MTracker->readReg(SrcReg); 2053 LocIdx DstLoc = MTracker->getSpillMLoc(SpillID); 2054 MTracker->setMLoc(DstLoc, ReadValue); 2055 2056 if (TTracker) { 2057 LocIdx SrcLoc = MTracker->getRegMLoc(SrcReg); 2058 TTracker->transferMlocs(SrcLoc, DstLoc, MI.getIterator()); 2059 } 2060 }; 2061 2062 // Then, transfer subreg bits. 2063 for (MCPhysReg SR : TRI->subregs(Reg)) { 2064 // Ensure this reg is tracked, 2065 (void)MTracker->lookupOrTrackRegister(SR); 2066 unsigned SubregIdx = TRI->getSubRegIndex(Reg, SR); 2067 unsigned SpillID = MTracker->getLocID(Loc, SubregIdx); 2068 DoTransfer(SR, SpillID); 2069 } 2070 2071 // Directly lookup size of main source reg, and transfer. 2072 unsigned Size = TRI->getRegSizeInBits(Reg, *MRI); 2073 unsigned SpillID = MTracker->getLocID(Loc, {Size, 0}); 2074 DoTransfer(Reg, SpillID); 2075 } else { 2076 std::optional<SpillLocationNo> Loc = isRestoreInstruction(MI, MF, Reg); 2077 if (!Loc) 2078 return false; 2079 2080 // Assumption: we're reading from the base of the stack slot, not some 2081 // offset into it. It seems very unlikely LLVM would ever generate 2082 // restores where this wasn't true. This then becomes a question of what 2083 // subregisters in the destination register line up with positions in the 2084 // stack slot. 2085 2086 // Def all registers that alias the destination. 2087 for (MCRegAliasIterator RAI(Reg, TRI, true); RAI.isValid(); ++RAI) 2088 MTracker->defReg(*RAI, CurBB, CurInst); 2089 2090 // Now find subregisters within the destination register, and load values 2091 // from stack slot positions. 2092 auto DoTransfer = [&](Register DestReg, unsigned SpillID) { 2093 LocIdx SrcIdx = MTracker->getSpillMLoc(SpillID); 2094 auto ReadValue = MTracker->readMLoc(SrcIdx); 2095 MTracker->setReg(DestReg, ReadValue); 2096 }; 2097 2098 for (MCPhysReg SR : TRI->subregs(Reg)) { 2099 unsigned Subreg = TRI->getSubRegIndex(Reg, SR); 2100 unsigned SpillID = MTracker->getLocID(*Loc, Subreg); 2101 DoTransfer(SR, SpillID); 2102 } 2103 2104 // Directly look up this registers slot idx by size, and transfer. 2105 unsigned Size = TRI->getRegSizeInBits(Reg, *MRI); 2106 unsigned SpillID = MTracker->getLocID(*Loc, {Size, 0}); 2107 DoTransfer(Reg, SpillID); 2108 } 2109 return true; 2110 } 2111 2112 bool InstrRefBasedLDV::transferRegisterCopy(MachineInstr &MI) { 2113 auto DestSrc = TII->isCopyLikeInstr(MI); 2114 if (!DestSrc) 2115 return false; 2116 2117 const MachineOperand *DestRegOp = DestSrc->Destination; 2118 const MachineOperand *SrcRegOp = DestSrc->Source; 2119 2120 Register SrcReg = SrcRegOp->getReg(); 2121 Register DestReg = DestRegOp->getReg(); 2122 2123 // Ignore identity copies. Yep, these make it as far as LiveDebugValues. 2124 if (SrcReg == DestReg) 2125 return true; 2126 2127 // For emulating VarLocBasedImpl: 2128 // We want to recognize instructions where destination register is callee 2129 // saved register. If register that could be clobbered by the call is 2130 // included, there would be a great chance that it is going to be clobbered 2131 // soon. It is more likely that previous register, which is callee saved, is 2132 // going to stay unclobbered longer, even if it is killed. 2133 // 2134 // For InstrRefBasedImpl, we can track multiple locations per value, so 2135 // ignore this condition. 2136 if (EmulateOldLDV && !isCalleeSavedReg(DestReg)) 2137 return false; 2138 2139 // InstrRefBasedImpl only followed killing copies. 2140 if (EmulateOldLDV && !SrcRegOp->isKill()) 2141 return false; 2142 2143 // Before we update MTracker, remember which values were present in each of 2144 // the locations about to be overwritten, so that we can recover any 2145 // potentially clobbered variables. 2146 DenseMap<LocIdx, ValueIDNum> ClobberedLocs; 2147 if (TTracker) { 2148 for (MCRegAliasIterator RAI(DestReg, TRI, true); RAI.isValid(); ++RAI) { 2149 LocIdx ClobberedLoc = MTracker->getRegMLoc(*RAI); 2150 auto MLocIt = TTracker->ActiveMLocs.find(ClobberedLoc); 2151 // If ActiveMLocs isn't tracking this location or there are no variables 2152 // using it, don't bother remembering. 2153 if (MLocIt == TTracker->ActiveMLocs.end() || MLocIt->second.empty()) 2154 continue; 2155 ValueIDNum Value = MTracker->readReg(*RAI); 2156 ClobberedLocs[ClobberedLoc] = Value; 2157 } 2158 } 2159 2160 // Copy MTracker info, including subregs if available. 2161 InstrRefBasedLDV::performCopy(SrcReg, DestReg); 2162 2163 // The copy might have clobbered variables based on the destination register. 2164 // Tell TTracker about it, passing the old ValueIDNum to search for 2165 // alternative locations (or else terminating those variables). 2166 if (TTracker) { 2167 for (auto LocVal : ClobberedLocs) { 2168 TTracker->clobberMloc(LocVal.first, LocVal.second, MI.getIterator(), false); 2169 } 2170 } 2171 2172 // Only produce a transfer of DBG_VALUE within a block where old LDV 2173 // would have. We might make use of the additional value tracking in some 2174 // other way, later. 2175 if (TTracker && isCalleeSavedReg(DestReg) && SrcRegOp->isKill()) 2176 TTracker->transferMlocs(MTracker->getRegMLoc(SrcReg), 2177 MTracker->getRegMLoc(DestReg), MI.getIterator()); 2178 2179 // VarLocBasedImpl would quit tracking the old location after copying. 2180 if (EmulateOldLDV && SrcReg != DestReg) 2181 MTracker->defReg(SrcReg, CurBB, CurInst); 2182 2183 return true; 2184 } 2185 2186 /// Accumulate a mapping between each DILocalVariable fragment and other 2187 /// fragments of that DILocalVariable which overlap. This reduces work during 2188 /// the data-flow stage from "Find any overlapping fragments" to "Check if the 2189 /// known-to-overlap fragments are present". 2190 /// \param MI A previously unprocessed debug instruction to analyze for 2191 /// fragment usage. 2192 void InstrRefBasedLDV::accumulateFragmentMap(MachineInstr &MI) { 2193 assert(MI.isDebugValueLike()); 2194 DebugVariable MIVar(MI.getDebugVariable(), MI.getDebugExpression(), 2195 MI.getDebugLoc()->getInlinedAt()); 2196 FragmentInfo ThisFragment = MIVar.getFragmentOrDefault(); 2197 2198 // If this is the first sighting of this variable, then we are guaranteed 2199 // there are currently no overlapping fragments either. Initialize the set 2200 // of seen fragments, record no overlaps for the current one, and return. 2201 auto SeenIt = SeenFragments.find(MIVar.getVariable()); 2202 if (SeenIt == SeenFragments.end()) { 2203 SmallSet<FragmentInfo, 4> OneFragment; 2204 OneFragment.insert(ThisFragment); 2205 SeenFragments.insert({MIVar.getVariable(), OneFragment}); 2206 2207 OverlapFragments.insert({{MIVar.getVariable(), ThisFragment}, {}}); 2208 return; 2209 } 2210 2211 // If this particular Variable/Fragment pair already exists in the overlap 2212 // map, it has already been accounted for. 2213 auto IsInOLapMap = 2214 OverlapFragments.insert({{MIVar.getVariable(), ThisFragment}, {}}); 2215 if (!IsInOLapMap.second) 2216 return; 2217 2218 auto &ThisFragmentsOverlaps = IsInOLapMap.first->second; 2219 auto &AllSeenFragments = SeenIt->second; 2220 2221 // Otherwise, examine all other seen fragments for this variable, with "this" 2222 // fragment being a previously unseen fragment. Record any pair of 2223 // overlapping fragments. 2224 for (const auto &ASeenFragment : AllSeenFragments) { 2225 // Does this previously seen fragment overlap? 2226 if (DIExpression::fragmentsOverlap(ThisFragment, ASeenFragment)) { 2227 // Yes: Mark the current fragment as being overlapped. 2228 ThisFragmentsOverlaps.push_back(ASeenFragment); 2229 // Mark the previously seen fragment as being overlapped by the current 2230 // one. 2231 auto ASeenFragmentsOverlaps = 2232 OverlapFragments.find({MIVar.getVariable(), ASeenFragment}); 2233 assert(ASeenFragmentsOverlaps != OverlapFragments.end() && 2234 "Previously seen var fragment has no vector of overlaps"); 2235 ASeenFragmentsOverlaps->second.push_back(ThisFragment); 2236 } 2237 } 2238 2239 AllSeenFragments.insert(ThisFragment); 2240 } 2241 2242 void InstrRefBasedLDV::process(MachineInstr &MI, 2243 const FuncValueTable *MLiveOuts, 2244 const FuncValueTable *MLiveIns) { 2245 // Try to interpret an MI as a debug or transfer instruction. Only if it's 2246 // none of these should we interpret it's register defs as new value 2247 // definitions. 2248 if (transferDebugValue(MI)) 2249 return; 2250 if (transferDebugInstrRef(MI, MLiveOuts, MLiveIns)) 2251 return; 2252 if (transferDebugPHI(MI)) 2253 return; 2254 if (transferRegisterCopy(MI)) 2255 return; 2256 if (transferSpillOrRestoreInst(MI)) 2257 return; 2258 transferRegisterDef(MI); 2259 } 2260 2261 void InstrRefBasedLDV::produceMLocTransferFunction( 2262 MachineFunction &MF, SmallVectorImpl<MLocTransferMap> &MLocTransfer, 2263 unsigned MaxNumBlocks) { 2264 // Because we try to optimize around register mask operands by ignoring regs 2265 // that aren't currently tracked, we set up something ugly for later: RegMask 2266 // operands that are seen earlier than the first use of a register, still need 2267 // to clobber that register in the transfer function. But this information 2268 // isn't actively recorded. Instead, we track each RegMask used in each block, 2269 // and accumulated the clobbered but untracked registers in each block into 2270 // the following bitvector. Later, if new values are tracked, we can add 2271 // appropriate clobbers. 2272 SmallVector<BitVector, 32> BlockMasks; 2273 BlockMasks.resize(MaxNumBlocks); 2274 2275 // Reserve one bit per register for the masks described above. 2276 unsigned BVWords = MachineOperand::getRegMaskSize(TRI->getNumRegs()); 2277 for (auto &BV : BlockMasks) 2278 BV.resize(TRI->getNumRegs(), true); 2279 2280 // Step through all instructions and inhale the transfer function. 2281 for (auto &MBB : MF) { 2282 // Object fields that are read by trackers to know where we are in the 2283 // function. 2284 CurBB = MBB.getNumber(); 2285 CurInst = 1; 2286 2287 // Set all machine locations to a PHI value. For transfer function 2288 // production only, this signifies the live-in value to the block. 2289 MTracker->reset(); 2290 MTracker->setMPhis(CurBB); 2291 2292 // Step through each instruction in this block. 2293 for (auto &MI : MBB) { 2294 // Pass in an empty unique_ptr for the value tables when accumulating the 2295 // machine transfer function. 2296 process(MI, nullptr, nullptr); 2297 2298 // Also accumulate fragment map. 2299 if (MI.isDebugValueLike()) 2300 accumulateFragmentMap(MI); 2301 2302 // Create a map from the instruction number (if present) to the 2303 // MachineInstr and its position. 2304 if (uint64_t InstrNo = MI.peekDebugInstrNum()) { 2305 auto InstrAndPos = std::make_pair(&MI, CurInst); 2306 auto InsertResult = 2307 DebugInstrNumToInstr.insert(std::make_pair(InstrNo, InstrAndPos)); 2308 2309 // There should never be duplicate instruction numbers. 2310 assert(InsertResult.second); 2311 (void)InsertResult; 2312 } 2313 2314 ++CurInst; 2315 } 2316 2317 // Produce the transfer function, a map of machine location to new value. If 2318 // any machine location has the live-in phi value from the start of the 2319 // block, it's live-through and doesn't need recording in the transfer 2320 // function. 2321 for (auto Location : MTracker->locations()) { 2322 LocIdx Idx = Location.Idx; 2323 ValueIDNum &P = Location.Value; 2324 if (P.isPHI() && P.getLoc() == Idx.asU64()) 2325 continue; 2326 2327 // Insert-or-update. 2328 auto &TransferMap = MLocTransfer[CurBB]; 2329 auto Result = TransferMap.insert(std::make_pair(Idx.asU64(), P)); 2330 if (!Result.second) 2331 Result.first->second = P; 2332 } 2333 2334 // Accumulate any bitmask operands into the clobbered reg mask for this 2335 // block. 2336 for (auto &P : MTracker->Masks) { 2337 BlockMasks[CurBB].clearBitsNotInMask(P.first->getRegMask(), BVWords); 2338 } 2339 } 2340 2341 // Compute a bitvector of all the registers that are tracked in this block. 2342 BitVector UsedRegs(TRI->getNumRegs()); 2343 for (auto Location : MTracker->locations()) { 2344 unsigned ID = MTracker->LocIdxToLocID[Location.Idx]; 2345 // Ignore stack slots, and aliases of the stack pointer. 2346 if (ID >= TRI->getNumRegs() || MTracker->SPAliases.count(ID)) 2347 continue; 2348 UsedRegs.set(ID); 2349 } 2350 2351 // Check that any regmask-clobber of a register that gets tracked, is not 2352 // live-through in the transfer function. It needs to be clobbered at the 2353 // very least. 2354 for (unsigned int I = 0; I < MaxNumBlocks; ++I) { 2355 BitVector &BV = BlockMasks[I]; 2356 BV.flip(); 2357 BV &= UsedRegs; 2358 // This produces all the bits that we clobber, but also use. Check that 2359 // they're all clobbered or at least set in the designated transfer 2360 // elem. 2361 for (unsigned Bit : BV.set_bits()) { 2362 unsigned ID = MTracker->getLocID(Bit); 2363 LocIdx Idx = MTracker->LocIDToLocIdx[ID]; 2364 auto &TransferMap = MLocTransfer[I]; 2365 2366 // Install a value representing the fact that this location is effectively 2367 // written to in this block. As there's no reserved value, instead use 2368 // a value number that is never generated. Pick the value number for the 2369 // first instruction in the block, def'ing this location, which we know 2370 // this block never used anyway. 2371 ValueIDNum NotGeneratedNum = ValueIDNum(I, 1, Idx); 2372 auto Result = 2373 TransferMap.insert(std::make_pair(Idx.asU64(), NotGeneratedNum)); 2374 if (!Result.second) { 2375 ValueIDNum &ValueID = Result.first->second; 2376 if (ValueID.getBlock() == I && ValueID.isPHI()) 2377 // It was left as live-through. Set it to clobbered. 2378 ValueID = NotGeneratedNum; 2379 } 2380 } 2381 } 2382 } 2383 2384 bool InstrRefBasedLDV::mlocJoin( 2385 MachineBasicBlock &MBB, SmallPtrSet<const MachineBasicBlock *, 16> &Visited, 2386 FuncValueTable &OutLocs, ValueTable &InLocs) { 2387 LLVM_DEBUG(dbgs() << "join MBB: " << MBB.getNumber() << "\n"); 2388 bool Changed = false; 2389 2390 // Handle value-propagation when control flow merges on entry to a block. For 2391 // any location without a PHI already placed, the location has the same value 2392 // as its predecessors. If a PHI is placed, test to see whether it's now a 2393 // redundant PHI that we can eliminate. 2394 2395 SmallVector<const MachineBasicBlock *, 8> BlockOrders; 2396 for (auto *Pred : MBB.predecessors()) 2397 BlockOrders.push_back(Pred); 2398 2399 // Visit predecessors in RPOT order. 2400 auto Cmp = [&](const MachineBasicBlock *A, const MachineBasicBlock *B) { 2401 return BBToOrder.find(A)->second < BBToOrder.find(B)->second; 2402 }; 2403 llvm::sort(BlockOrders, Cmp); 2404 2405 // Skip entry block. 2406 if (BlockOrders.size() == 0) 2407 return false; 2408 2409 // Step through all machine locations, look at each predecessor and test 2410 // whether we can eliminate redundant PHIs. 2411 for (auto Location : MTracker->locations()) { 2412 LocIdx Idx = Location.Idx; 2413 2414 // Pick out the first predecessors live-out value for this location. It's 2415 // guaranteed to not be a backedge, as we order by RPO. 2416 ValueIDNum FirstVal = OutLocs[*BlockOrders[0]][Idx.asU64()]; 2417 2418 // If we've already eliminated a PHI here, do no further checking, just 2419 // propagate the first live-in value into this block. 2420 if (InLocs[Idx.asU64()] != ValueIDNum(MBB.getNumber(), 0, Idx)) { 2421 if (InLocs[Idx.asU64()] != FirstVal) { 2422 InLocs[Idx.asU64()] = FirstVal; 2423 Changed |= true; 2424 } 2425 continue; 2426 } 2427 2428 // We're now examining a PHI to see whether it's un-necessary. Loop around 2429 // the other live-in values and test whether they're all the same. 2430 bool Disagree = false; 2431 for (unsigned int I = 1; I < BlockOrders.size(); ++I) { 2432 const MachineBasicBlock *PredMBB = BlockOrders[I]; 2433 const ValueIDNum &PredLiveOut = OutLocs[*PredMBB][Idx.asU64()]; 2434 2435 // Incoming values agree, continue trying to eliminate this PHI. 2436 if (FirstVal == PredLiveOut) 2437 continue; 2438 2439 // We can also accept a PHI value that feeds back into itself. 2440 if (PredLiveOut == ValueIDNum(MBB.getNumber(), 0, Idx)) 2441 continue; 2442 2443 // Live-out of a predecessor disagrees with the first predecessor. 2444 Disagree = true; 2445 } 2446 2447 // No disagreement? No PHI. Otherwise, leave the PHI in live-ins. 2448 if (!Disagree) { 2449 InLocs[Idx.asU64()] = FirstVal; 2450 Changed |= true; 2451 } 2452 } 2453 2454 // TODO: Reimplement NumInserted and NumRemoved. 2455 return Changed; 2456 } 2457 2458 void InstrRefBasedLDV::findStackIndexInterference( 2459 SmallVectorImpl<unsigned> &Slots) { 2460 // We could spend a bit of time finding the exact, minimal, set of stack 2461 // indexes that interfere with each other, much like reg units. Or, we can 2462 // rely on the fact that: 2463 // * The smallest / lowest index will interfere with everything at zero 2464 // offset, which will be the largest set of registers, 2465 // * Most indexes with non-zero offset will end up being interference units 2466 // anyway. 2467 // So just pick those out and return them. 2468 2469 // We can rely on a single-byte stack index existing already, because we 2470 // initialize them in MLocTracker. 2471 auto It = MTracker->StackSlotIdxes.find({8, 0}); 2472 assert(It != MTracker->StackSlotIdxes.end()); 2473 Slots.push_back(It->second); 2474 2475 // Find anything that has a non-zero offset and add that too. 2476 for (auto &Pair : MTracker->StackSlotIdxes) { 2477 // Is offset zero? If so, ignore. 2478 if (!Pair.first.second) 2479 continue; 2480 Slots.push_back(Pair.second); 2481 } 2482 } 2483 2484 void InstrRefBasedLDV::placeMLocPHIs( 2485 MachineFunction &MF, SmallPtrSetImpl<MachineBasicBlock *> &AllBlocks, 2486 FuncValueTable &MInLocs, SmallVectorImpl<MLocTransferMap> &MLocTransfer) { 2487 SmallVector<unsigned, 4> StackUnits; 2488 findStackIndexInterference(StackUnits); 2489 2490 // To avoid repeatedly running the PHI placement algorithm, leverage the 2491 // fact that a def of register MUST also def its register units. Find the 2492 // units for registers, place PHIs for them, and then replicate them for 2493 // aliasing registers. Some inputs that are never def'd (DBG_PHIs of 2494 // arguments) don't lead to register units being tracked, just place PHIs for 2495 // those registers directly. Stack slots have their own form of "unit", 2496 // store them to one side. 2497 SmallSet<Register, 32> RegUnitsToPHIUp; 2498 SmallSet<LocIdx, 32> NormalLocsToPHI; 2499 SmallSet<SpillLocationNo, 32> StackSlots; 2500 for (auto Location : MTracker->locations()) { 2501 LocIdx L = Location.Idx; 2502 if (MTracker->isSpill(L)) { 2503 StackSlots.insert(MTracker->locIDToSpill(MTracker->LocIdxToLocID[L])); 2504 continue; 2505 } 2506 2507 Register R = MTracker->LocIdxToLocID[L]; 2508 SmallSet<Register, 8> FoundRegUnits; 2509 bool AnyIllegal = false; 2510 for (MCRegUnit Unit : TRI->regunits(R.asMCReg())) { 2511 for (MCRegUnitRootIterator URoot(Unit, TRI); URoot.isValid(); ++URoot) { 2512 if (!MTracker->isRegisterTracked(*URoot)) { 2513 // Not all roots were loaded into the tracking map: this register 2514 // isn't actually def'd anywhere, we only read from it. Generate PHIs 2515 // for this reg, but don't iterate units. 2516 AnyIllegal = true; 2517 } else { 2518 FoundRegUnits.insert(*URoot); 2519 } 2520 } 2521 } 2522 2523 if (AnyIllegal) { 2524 NormalLocsToPHI.insert(L); 2525 continue; 2526 } 2527 2528 RegUnitsToPHIUp.insert(FoundRegUnits.begin(), FoundRegUnits.end()); 2529 } 2530 2531 // Lambda to fetch PHIs for a given location, and write into the PHIBlocks 2532 // collection. 2533 SmallVector<MachineBasicBlock *, 32> PHIBlocks; 2534 auto CollectPHIsForLoc = [&](LocIdx L) { 2535 // Collect the set of defs. 2536 SmallPtrSet<MachineBasicBlock *, 32> DefBlocks; 2537 for (unsigned int I = 0; I < OrderToBB.size(); ++I) { 2538 MachineBasicBlock *MBB = OrderToBB[I]; 2539 const auto &TransferFunc = MLocTransfer[MBB->getNumber()]; 2540 if (TransferFunc.contains(L)) 2541 DefBlocks.insert(MBB); 2542 } 2543 2544 // The entry block defs the location too: it's the live-in / argument value. 2545 // Only insert if there are other defs though; everything is trivially live 2546 // through otherwise. 2547 if (!DefBlocks.empty()) 2548 DefBlocks.insert(&*MF.begin()); 2549 2550 // Ask the SSA construction algorithm where we should put PHIs. Clear 2551 // anything that might have been hanging around from earlier. 2552 PHIBlocks.clear(); 2553 BlockPHIPlacement(AllBlocks, DefBlocks, PHIBlocks); 2554 }; 2555 2556 auto InstallPHIsAtLoc = [&PHIBlocks, &MInLocs](LocIdx L) { 2557 for (const MachineBasicBlock *MBB : PHIBlocks) 2558 MInLocs[*MBB][L.asU64()] = ValueIDNum(MBB->getNumber(), 0, L); 2559 }; 2560 2561 // For locations with no reg units, just place PHIs. 2562 for (LocIdx L : NormalLocsToPHI) { 2563 CollectPHIsForLoc(L); 2564 // Install those PHI values into the live-in value array. 2565 InstallPHIsAtLoc(L); 2566 } 2567 2568 // For stack slots, calculate PHIs for the equivalent of the units, then 2569 // install for each index. 2570 for (SpillLocationNo Slot : StackSlots) { 2571 for (unsigned Idx : StackUnits) { 2572 unsigned SpillID = MTracker->getSpillIDWithIdx(Slot, Idx); 2573 LocIdx L = MTracker->getSpillMLoc(SpillID); 2574 CollectPHIsForLoc(L); 2575 InstallPHIsAtLoc(L); 2576 2577 // Find anything that aliases this stack index, install PHIs for it too. 2578 unsigned Size, Offset; 2579 std::tie(Size, Offset) = MTracker->StackIdxesToPos[Idx]; 2580 for (auto &Pair : MTracker->StackSlotIdxes) { 2581 unsigned ThisSize, ThisOffset; 2582 std::tie(ThisSize, ThisOffset) = Pair.first; 2583 if (ThisSize + ThisOffset <= Offset || Size + Offset <= ThisOffset) 2584 continue; 2585 2586 unsigned ThisID = MTracker->getSpillIDWithIdx(Slot, Pair.second); 2587 LocIdx ThisL = MTracker->getSpillMLoc(ThisID); 2588 InstallPHIsAtLoc(ThisL); 2589 } 2590 } 2591 } 2592 2593 // For reg units, place PHIs, and then place them for any aliasing registers. 2594 for (Register R : RegUnitsToPHIUp) { 2595 LocIdx L = MTracker->lookupOrTrackRegister(R); 2596 CollectPHIsForLoc(L); 2597 2598 // Install those PHI values into the live-in value array. 2599 InstallPHIsAtLoc(L); 2600 2601 // Now find aliases and install PHIs for those. 2602 for (MCRegAliasIterator RAI(R, TRI, true); RAI.isValid(); ++RAI) { 2603 // Super-registers that are "above" the largest register read/written by 2604 // the function will alias, but will not be tracked. 2605 if (!MTracker->isRegisterTracked(*RAI)) 2606 continue; 2607 2608 LocIdx AliasLoc = MTracker->lookupOrTrackRegister(*RAI); 2609 InstallPHIsAtLoc(AliasLoc); 2610 } 2611 } 2612 } 2613 2614 void InstrRefBasedLDV::buildMLocValueMap( 2615 MachineFunction &MF, FuncValueTable &MInLocs, FuncValueTable &MOutLocs, 2616 SmallVectorImpl<MLocTransferMap> &MLocTransfer) { 2617 std::priority_queue<unsigned int, std::vector<unsigned int>, 2618 std::greater<unsigned int>> 2619 Worklist, Pending; 2620 2621 // We track what is on the current and pending worklist to avoid inserting 2622 // the same thing twice. We could avoid this with a custom priority queue, 2623 // but this is probably not worth it. 2624 SmallPtrSet<MachineBasicBlock *, 16> OnPending, OnWorklist; 2625 2626 // Initialize worklist with every block to be visited. Also produce list of 2627 // all blocks. 2628 SmallPtrSet<MachineBasicBlock *, 32> AllBlocks; 2629 for (unsigned int I = 0; I < BBToOrder.size(); ++I) { 2630 Worklist.push(I); 2631 OnWorklist.insert(OrderToBB[I]); 2632 AllBlocks.insert(OrderToBB[I]); 2633 } 2634 2635 // Initialize entry block to PHIs. These represent arguments. 2636 for (auto Location : MTracker->locations()) 2637 MInLocs.tableForEntryMBB()[Location.Idx.asU64()] = 2638 ValueIDNum(0, 0, Location.Idx); 2639 2640 MTracker->reset(); 2641 2642 // Start by placing PHIs, using the usual SSA constructor algorithm. Consider 2643 // any machine-location that isn't live-through a block to be def'd in that 2644 // block. 2645 placeMLocPHIs(MF, AllBlocks, MInLocs, MLocTransfer); 2646 2647 // Propagate values to eliminate redundant PHIs. At the same time, this 2648 // produces the table of Block x Location => Value for the entry to each 2649 // block. 2650 // The kind of PHIs we can eliminate are, for example, where one path in a 2651 // conditional spills and restores a register, and the register still has 2652 // the same value once control flow joins, unbeknowns to the PHI placement 2653 // code. Propagating values allows us to identify such un-necessary PHIs and 2654 // remove them. 2655 SmallPtrSet<const MachineBasicBlock *, 16> Visited; 2656 while (!Worklist.empty() || !Pending.empty()) { 2657 // Vector for storing the evaluated block transfer function. 2658 SmallVector<std::pair<LocIdx, ValueIDNum>, 32> ToRemap; 2659 2660 while (!Worklist.empty()) { 2661 MachineBasicBlock *MBB = OrderToBB[Worklist.top()]; 2662 CurBB = MBB->getNumber(); 2663 Worklist.pop(); 2664 2665 // Join the values in all predecessor blocks. 2666 bool InLocsChanged; 2667 InLocsChanged = mlocJoin(*MBB, Visited, MOutLocs, MInLocs[*MBB]); 2668 InLocsChanged |= Visited.insert(MBB).second; 2669 2670 // Don't examine transfer function if we've visited this loc at least 2671 // once, and inlocs haven't changed. 2672 if (!InLocsChanged) 2673 continue; 2674 2675 // Load the current set of live-ins into MLocTracker. 2676 MTracker->loadFromArray(MInLocs[*MBB], CurBB); 2677 2678 // Each element of the transfer function can be a new def, or a read of 2679 // a live-in value. Evaluate each element, and store to "ToRemap". 2680 ToRemap.clear(); 2681 for (auto &P : MLocTransfer[CurBB]) { 2682 if (P.second.getBlock() == CurBB && P.second.isPHI()) { 2683 // This is a movement of whatever was live in. Read it. 2684 ValueIDNum NewID = MTracker->readMLoc(P.second.getLoc()); 2685 ToRemap.push_back(std::make_pair(P.first, NewID)); 2686 } else { 2687 // It's a def. Just set it. 2688 assert(P.second.getBlock() == CurBB); 2689 ToRemap.push_back(std::make_pair(P.first, P.second)); 2690 } 2691 } 2692 2693 // Commit the transfer function changes into mloc tracker, which 2694 // transforms the contents of the MLocTracker into the live-outs. 2695 for (auto &P : ToRemap) 2696 MTracker->setMLoc(P.first, P.second); 2697 2698 // Now copy out-locs from mloc tracker into out-loc vector, checking 2699 // whether changes have occurred. These changes can have come from both 2700 // the transfer function, and mlocJoin. 2701 bool OLChanged = false; 2702 for (auto Location : MTracker->locations()) { 2703 OLChanged |= MOutLocs[*MBB][Location.Idx.asU64()] != Location.Value; 2704 MOutLocs[*MBB][Location.Idx.asU64()] = Location.Value; 2705 } 2706 2707 MTracker->reset(); 2708 2709 // No need to examine successors again if out-locs didn't change. 2710 if (!OLChanged) 2711 continue; 2712 2713 // All successors should be visited: put any back-edges on the pending 2714 // list for the next pass-through, and any other successors to be 2715 // visited this pass, if they're not going to be already. 2716 for (auto *s : MBB->successors()) { 2717 // Does branching to this successor represent a back-edge? 2718 if (BBToOrder[s] > BBToOrder[MBB]) { 2719 // No: visit it during this dataflow iteration. 2720 if (OnWorklist.insert(s).second) 2721 Worklist.push(BBToOrder[s]); 2722 } else { 2723 // Yes: visit it on the next iteration. 2724 if (OnPending.insert(s).second) 2725 Pending.push(BBToOrder[s]); 2726 } 2727 } 2728 } 2729 2730 Worklist.swap(Pending); 2731 std::swap(OnPending, OnWorklist); 2732 OnPending.clear(); 2733 // At this point, pending must be empty, since it was just the empty 2734 // worklist 2735 assert(Pending.empty() && "Pending should be empty"); 2736 } 2737 2738 // Once all the live-ins don't change on mlocJoin(), we've eliminated all 2739 // redundant PHIs. 2740 } 2741 2742 void InstrRefBasedLDV::BlockPHIPlacement( 2743 const SmallPtrSetImpl<MachineBasicBlock *> &AllBlocks, 2744 const SmallPtrSetImpl<MachineBasicBlock *> &DefBlocks, 2745 SmallVectorImpl<MachineBasicBlock *> &PHIBlocks) { 2746 // Apply IDF calculator to the designated set of location defs, storing 2747 // required PHIs into PHIBlocks. Uses the dominator tree stored in the 2748 // InstrRefBasedLDV object. 2749 IDFCalculatorBase<MachineBasicBlock, false> IDF(DomTree->getBase()); 2750 2751 IDF.setLiveInBlocks(AllBlocks); 2752 IDF.setDefiningBlocks(DefBlocks); 2753 IDF.calculate(PHIBlocks); 2754 } 2755 2756 bool InstrRefBasedLDV::pickVPHILoc( 2757 SmallVectorImpl<DbgOpID> &OutValues, const MachineBasicBlock &MBB, 2758 const LiveIdxT &LiveOuts, FuncValueTable &MOutLocs, 2759 const SmallVectorImpl<const MachineBasicBlock *> &BlockOrders) { 2760 2761 // No predecessors means no PHIs. 2762 if (BlockOrders.empty()) 2763 return false; 2764 2765 // All the location operands that do not already agree need to be joined, 2766 // track the indices of each such location operand here. 2767 SmallDenseSet<unsigned> LocOpsToJoin; 2768 2769 auto FirstValueIt = LiveOuts.find(BlockOrders[0]); 2770 if (FirstValueIt == LiveOuts.end()) 2771 return false; 2772 const DbgValue &FirstValue = *FirstValueIt->second; 2773 2774 for (const auto p : BlockOrders) { 2775 auto OutValIt = LiveOuts.find(p); 2776 if (OutValIt == LiveOuts.end()) 2777 // If we have a predecessor not in scope, we'll never find a PHI position. 2778 return false; 2779 const DbgValue &OutVal = *OutValIt->second; 2780 2781 // No-values cannot have locations we can join on. 2782 if (OutVal.Kind == DbgValue::NoVal) 2783 return false; 2784 2785 // For unjoined VPHIs where we don't know the location, we definitely 2786 // can't find a join loc unless the VPHI is a backedge. 2787 if (OutVal.isUnjoinedPHI() && OutVal.BlockNo != MBB.getNumber()) 2788 return false; 2789 2790 if (!FirstValue.Properties.isJoinable(OutVal.Properties)) 2791 return false; 2792 2793 for (unsigned Idx = 0; Idx < FirstValue.getLocationOpCount(); ++Idx) { 2794 // An unjoined PHI has no defined locations, and so a shared location must 2795 // be found for every operand. 2796 if (OutVal.isUnjoinedPHI()) { 2797 LocOpsToJoin.insert(Idx); 2798 continue; 2799 } 2800 DbgOpID FirstValOp = FirstValue.getDbgOpID(Idx); 2801 DbgOpID OutValOp = OutVal.getDbgOpID(Idx); 2802 if (FirstValOp != OutValOp) { 2803 // We can never join constant ops - the ops must either both be equal 2804 // constant ops or non-const ops. 2805 if (FirstValOp.isConst() || OutValOp.isConst()) 2806 return false; 2807 else 2808 LocOpsToJoin.insert(Idx); 2809 } 2810 } 2811 } 2812 2813 SmallVector<DbgOpID> NewDbgOps; 2814 2815 for (unsigned Idx = 0; Idx < FirstValue.getLocationOpCount(); ++Idx) { 2816 // If this op doesn't need to be joined because the values agree, use that 2817 // already-agreed value. 2818 if (!LocOpsToJoin.contains(Idx)) { 2819 NewDbgOps.push_back(FirstValue.getDbgOpID(Idx)); 2820 continue; 2821 } 2822 2823 std::optional<ValueIDNum> JoinedOpLoc = 2824 pickOperandPHILoc(Idx, MBB, LiveOuts, MOutLocs, BlockOrders); 2825 2826 if (!JoinedOpLoc) 2827 return false; 2828 2829 NewDbgOps.push_back(DbgOpStore.insert(*JoinedOpLoc)); 2830 } 2831 2832 OutValues.append(NewDbgOps); 2833 return true; 2834 } 2835 2836 std::optional<ValueIDNum> InstrRefBasedLDV::pickOperandPHILoc( 2837 unsigned DbgOpIdx, const MachineBasicBlock &MBB, const LiveIdxT &LiveOuts, 2838 FuncValueTable &MOutLocs, 2839 const SmallVectorImpl<const MachineBasicBlock *> &BlockOrders) { 2840 2841 // Collect a set of locations from predecessor where its live-out value can 2842 // be found. 2843 SmallVector<SmallVector<LocIdx, 4>, 8> Locs; 2844 unsigned NumLocs = MTracker->getNumLocs(); 2845 2846 for (const auto p : BlockOrders) { 2847 auto OutValIt = LiveOuts.find(p); 2848 assert(OutValIt != LiveOuts.end()); 2849 const DbgValue &OutVal = *OutValIt->second; 2850 DbgOpID OutValOpID = OutVal.getDbgOpID(DbgOpIdx); 2851 DbgOp OutValOp = DbgOpStore.find(OutValOpID); 2852 assert(!OutValOp.IsConst); 2853 2854 // Create new empty vector of locations. 2855 Locs.resize(Locs.size() + 1); 2856 2857 // If the live-in value is a def, find the locations where that value is 2858 // present. Do the same for VPHIs where we know the VPHI value. 2859 if (OutVal.Kind == DbgValue::Def || 2860 (OutVal.Kind == DbgValue::VPHI && OutVal.BlockNo != MBB.getNumber() && 2861 !OutValOp.isUndef())) { 2862 ValueIDNum ValToLookFor = OutValOp.ID; 2863 // Search the live-outs of the predecessor for the specified value. 2864 for (unsigned int I = 0; I < NumLocs; ++I) { 2865 if (MOutLocs[*p][I] == ValToLookFor) 2866 Locs.back().push_back(LocIdx(I)); 2867 } 2868 } else { 2869 assert(OutVal.Kind == DbgValue::VPHI); 2870 // Otherwise: this is a VPHI on a backedge feeding back into itself, i.e. 2871 // a value that's live-through the whole loop. (It has to be a backedge, 2872 // because a block can't dominate itself). We can accept as a PHI location 2873 // any location where the other predecessors agree, _and_ the machine 2874 // locations feed back into themselves. Therefore, add all self-looping 2875 // machine-value PHI locations. 2876 for (unsigned int I = 0; I < NumLocs; ++I) { 2877 ValueIDNum MPHI(MBB.getNumber(), 0, LocIdx(I)); 2878 if (MOutLocs[*p][I] == MPHI) 2879 Locs.back().push_back(LocIdx(I)); 2880 } 2881 } 2882 } 2883 // We should have found locations for all predecessors, or returned. 2884 assert(Locs.size() == BlockOrders.size()); 2885 2886 // Starting with the first set of locations, take the intersection with 2887 // subsequent sets. 2888 SmallVector<LocIdx, 4> CandidateLocs = Locs[0]; 2889 for (unsigned int I = 1; I < Locs.size(); ++I) { 2890 auto &LocVec = Locs[I]; 2891 SmallVector<LocIdx, 4> NewCandidates; 2892 std::set_intersection(CandidateLocs.begin(), CandidateLocs.end(), 2893 LocVec.begin(), LocVec.end(), std::inserter(NewCandidates, NewCandidates.begin())); 2894 CandidateLocs = NewCandidates; 2895 } 2896 if (CandidateLocs.empty()) 2897 return std::nullopt; 2898 2899 // We now have a set of LocIdxes that contain the right output value in 2900 // each of the predecessors. Pick the lowest; if there's a register loc, 2901 // that'll be it. 2902 LocIdx L = *CandidateLocs.begin(); 2903 2904 // Return a PHI-value-number for the found location. 2905 ValueIDNum PHIVal = {(unsigned)MBB.getNumber(), 0, L}; 2906 return PHIVal; 2907 } 2908 2909 bool InstrRefBasedLDV::vlocJoin( 2910 MachineBasicBlock &MBB, LiveIdxT &VLOCOutLocs, 2911 SmallPtrSet<const MachineBasicBlock *, 8> &BlocksToExplore, 2912 DbgValue &LiveIn) { 2913 LLVM_DEBUG(dbgs() << "join MBB: " << MBB.getNumber() << "\n"); 2914 bool Changed = false; 2915 2916 // Order predecessors by RPOT order, for exploring them in that order. 2917 SmallVector<MachineBasicBlock *, 8> BlockOrders(MBB.predecessors()); 2918 2919 auto Cmp = [&](MachineBasicBlock *A, MachineBasicBlock *B) { 2920 return BBToOrder[A] < BBToOrder[B]; 2921 }; 2922 2923 llvm::sort(BlockOrders, Cmp); 2924 2925 unsigned CurBlockRPONum = BBToOrder[&MBB]; 2926 2927 // Collect all the incoming DbgValues for this variable, from predecessor 2928 // live-out values. 2929 SmallVector<InValueT, 8> Values; 2930 bool Bail = false; 2931 int BackEdgesStart = 0; 2932 for (auto *p : BlockOrders) { 2933 // If the predecessor isn't in scope / to be explored, we'll never be 2934 // able to join any locations. 2935 if (!BlocksToExplore.contains(p)) { 2936 Bail = true; 2937 break; 2938 } 2939 2940 // All Live-outs will have been initialized. 2941 DbgValue &OutLoc = *VLOCOutLocs.find(p)->second; 2942 2943 // Keep track of where back-edges begin in the Values vector. Relies on 2944 // BlockOrders being sorted by RPO. 2945 unsigned ThisBBRPONum = BBToOrder[p]; 2946 if (ThisBBRPONum < CurBlockRPONum) 2947 ++BackEdgesStart; 2948 2949 Values.push_back(std::make_pair(p, &OutLoc)); 2950 } 2951 2952 // If there were no values, or one of the predecessors couldn't have a 2953 // value, then give up immediately. It's not safe to produce a live-in 2954 // value. Leave as whatever it was before. 2955 if (Bail || Values.size() == 0) 2956 return false; 2957 2958 // All (non-entry) blocks have at least one non-backedge predecessor. 2959 // Pick the variable value from the first of these, to compare against 2960 // all others. 2961 const DbgValue &FirstVal = *Values[0].second; 2962 2963 // If the old live-in value is not a PHI then either a) no PHI is needed 2964 // here, or b) we eliminated the PHI that was here. If so, we can just 2965 // propagate in the first parent's incoming value. 2966 if (LiveIn.Kind != DbgValue::VPHI || LiveIn.BlockNo != MBB.getNumber()) { 2967 Changed = LiveIn != FirstVal; 2968 if (Changed) 2969 LiveIn = FirstVal; 2970 return Changed; 2971 } 2972 2973 // Scan for variable values that can never be resolved: if they have 2974 // different DIExpressions, different indirectness, or are mixed constants / 2975 // non-constants. 2976 for (const auto &V : Values) { 2977 if (!V.second->Properties.isJoinable(FirstVal.Properties)) 2978 return false; 2979 if (V.second->Kind == DbgValue::NoVal) 2980 return false; 2981 if (!V.second->hasJoinableLocOps(FirstVal)) 2982 return false; 2983 } 2984 2985 // Try to eliminate this PHI. Do the incoming values all agree? 2986 bool Disagree = false; 2987 for (auto &V : Values) { 2988 if (*V.second == FirstVal) 2989 continue; // No disagreement. 2990 2991 // If both values are not equal but have equal non-empty IDs then they refer 2992 // to the same value from different sources (e.g. one is VPHI and the other 2993 // is Def), which does not cause disagreement. 2994 if (V.second->hasIdenticalValidLocOps(FirstVal)) 2995 continue; 2996 2997 // Eliminate if a backedge feeds a VPHI back into itself. 2998 if (V.second->Kind == DbgValue::VPHI && 2999 V.second->BlockNo == MBB.getNumber() && 3000 // Is this a backedge? 3001 std::distance(Values.begin(), &V) >= BackEdgesStart) 3002 continue; 3003 3004 Disagree = true; 3005 } 3006 3007 // No disagreement -> live-through value. 3008 if (!Disagree) { 3009 Changed = LiveIn != FirstVal; 3010 if (Changed) 3011 LiveIn = FirstVal; 3012 return Changed; 3013 } else { 3014 // Otherwise use a VPHI. 3015 DbgValue VPHI(MBB.getNumber(), FirstVal.Properties, DbgValue::VPHI); 3016 Changed = LiveIn != VPHI; 3017 if (Changed) 3018 LiveIn = VPHI; 3019 return Changed; 3020 } 3021 } 3022 3023 void InstrRefBasedLDV::getBlocksForScope( 3024 const DILocation *DILoc, 3025 SmallPtrSetImpl<const MachineBasicBlock *> &BlocksToExplore, 3026 const SmallPtrSetImpl<MachineBasicBlock *> &AssignBlocks) { 3027 // Get the set of "normal" in-lexical-scope blocks. 3028 LS.getMachineBasicBlocks(DILoc, BlocksToExplore); 3029 3030 // VarLoc LiveDebugValues tracks variable locations that are defined in 3031 // blocks not in scope. This is something we could legitimately ignore, but 3032 // lets allow it for now for the sake of coverage. 3033 BlocksToExplore.insert(AssignBlocks.begin(), AssignBlocks.end()); 3034 3035 // Storage for artificial blocks we intend to add to BlocksToExplore. 3036 DenseSet<const MachineBasicBlock *> ToAdd; 3037 3038 // To avoid needlessly dropping large volumes of variable locations, propagate 3039 // variables through aritifical blocks, i.e. those that don't have any 3040 // instructions in scope at all. To accurately replicate VarLoc 3041 // LiveDebugValues, this means exploring all artificial successors too. 3042 // Perform a depth-first-search to enumerate those blocks. 3043 for (const auto *MBB : BlocksToExplore) { 3044 // Depth-first-search state: each node is a block and which successor 3045 // we're currently exploring. 3046 SmallVector<std::pair<const MachineBasicBlock *, 3047 MachineBasicBlock::const_succ_iterator>, 3048 8> 3049 DFS; 3050 3051 // Find any artificial successors not already tracked. 3052 for (auto *succ : MBB->successors()) { 3053 if (BlocksToExplore.count(succ)) 3054 continue; 3055 if (!ArtificialBlocks.count(succ)) 3056 continue; 3057 ToAdd.insert(succ); 3058 DFS.push_back({succ, succ->succ_begin()}); 3059 } 3060 3061 // Search all those blocks, depth first. 3062 while (!DFS.empty()) { 3063 const MachineBasicBlock *CurBB = DFS.back().first; 3064 MachineBasicBlock::const_succ_iterator &CurSucc = DFS.back().second; 3065 // Walk back if we've explored this blocks successors to the end. 3066 if (CurSucc == CurBB->succ_end()) { 3067 DFS.pop_back(); 3068 continue; 3069 } 3070 3071 // If the current successor is artificial and unexplored, descend into 3072 // it. 3073 if (!ToAdd.count(*CurSucc) && ArtificialBlocks.count(*CurSucc)) { 3074 ToAdd.insert(*CurSucc); 3075 DFS.push_back({*CurSucc, (*CurSucc)->succ_begin()}); 3076 continue; 3077 } 3078 3079 ++CurSucc; 3080 } 3081 }; 3082 3083 BlocksToExplore.insert(ToAdd.begin(), ToAdd.end()); 3084 } 3085 3086 void InstrRefBasedLDV::buildVLocValueMap( 3087 const DILocation *DILoc, const SmallSet<DebugVariable, 4> &VarsWeCareAbout, 3088 SmallPtrSetImpl<MachineBasicBlock *> &AssignBlocks, LiveInsT &Output, 3089 FuncValueTable &MOutLocs, FuncValueTable &MInLocs, 3090 SmallVectorImpl<VLocTracker> &AllTheVLocs) { 3091 // This method is much like buildMLocValueMap: but focuses on a single 3092 // LexicalScope at a time. Pick out a set of blocks and variables that are 3093 // to have their value assignments solved, then run our dataflow algorithm 3094 // until a fixedpoint is reached. 3095 std::priority_queue<unsigned int, std::vector<unsigned int>, 3096 std::greater<unsigned int>> 3097 Worklist, Pending; 3098 SmallPtrSet<MachineBasicBlock *, 16> OnWorklist, OnPending; 3099 3100 // The set of blocks we'll be examining. 3101 SmallPtrSet<const MachineBasicBlock *, 8> BlocksToExplore; 3102 3103 // The order in which to examine them (RPO). 3104 SmallVector<MachineBasicBlock *, 8> BlockOrders; 3105 3106 // RPO ordering function. 3107 auto Cmp = [&](MachineBasicBlock *A, MachineBasicBlock *B) { 3108 return BBToOrder[A] < BBToOrder[B]; 3109 }; 3110 3111 getBlocksForScope(DILoc, BlocksToExplore, AssignBlocks); 3112 3113 // Single block scope: not interesting! No propagation at all. Note that 3114 // this could probably go above ArtificialBlocks without damage, but 3115 // that then produces output differences from original-live-debug-values, 3116 // which propagates from a single block into many artificial ones. 3117 if (BlocksToExplore.size() == 1) 3118 return; 3119 3120 // Convert a const set to a non-const set. LexicalScopes 3121 // getMachineBasicBlocks returns const MBB pointers, IDF wants mutable ones. 3122 // (Neither of them mutate anything). 3123 SmallPtrSet<MachineBasicBlock *, 8> MutBlocksToExplore; 3124 for (const auto *MBB : BlocksToExplore) 3125 MutBlocksToExplore.insert(const_cast<MachineBasicBlock *>(MBB)); 3126 3127 // Picks out relevants blocks RPO order and sort them. 3128 for (const auto *MBB : BlocksToExplore) 3129 BlockOrders.push_back(const_cast<MachineBasicBlock *>(MBB)); 3130 3131 llvm::sort(BlockOrders, Cmp); 3132 unsigned NumBlocks = BlockOrders.size(); 3133 3134 // Allocate some vectors for storing the live ins and live outs. Large. 3135 SmallVector<DbgValue, 32> LiveIns, LiveOuts; 3136 LiveIns.reserve(NumBlocks); 3137 LiveOuts.reserve(NumBlocks); 3138 3139 // Initialize all values to start as NoVals. This signifies "it's live 3140 // through, but we don't know what it is". 3141 DbgValueProperties EmptyProperties(EmptyExpr, false, false); 3142 for (unsigned int I = 0; I < NumBlocks; ++I) { 3143 DbgValue EmptyDbgValue(I, EmptyProperties, DbgValue::NoVal); 3144 LiveIns.push_back(EmptyDbgValue); 3145 LiveOuts.push_back(EmptyDbgValue); 3146 } 3147 3148 // Produce by-MBB indexes of live-in/live-outs, to ease lookup within 3149 // vlocJoin. 3150 LiveIdxT LiveOutIdx, LiveInIdx; 3151 LiveOutIdx.reserve(NumBlocks); 3152 LiveInIdx.reserve(NumBlocks); 3153 for (unsigned I = 0; I < NumBlocks; ++I) { 3154 LiveOutIdx[BlockOrders[I]] = &LiveOuts[I]; 3155 LiveInIdx[BlockOrders[I]] = &LiveIns[I]; 3156 } 3157 3158 // Loop over each variable and place PHIs for it, then propagate values 3159 // between blocks. This keeps the locality of working on one lexical scope at 3160 // at time, but avoids re-processing variable values because some other 3161 // variable has been assigned. 3162 for (const auto &Var : VarsWeCareAbout) { 3163 // Re-initialize live-ins and live-outs, to clear the remains of previous 3164 // variables live-ins / live-outs. 3165 for (unsigned int I = 0; I < NumBlocks; ++I) { 3166 DbgValue EmptyDbgValue(I, EmptyProperties, DbgValue::NoVal); 3167 LiveIns[I] = EmptyDbgValue; 3168 LiveOuts[I] = EmptyDbgValue; 3169 } 3170 3171 // Place PHIs for variable values, using the LLVM IDF calculator. 3172 // Collect the set of blocks where variables are def'd. 3173 SmallPtrSet<MachineBasicBlock *, 32> DefBlocks; 3174 for (const MachineBasicBlock *ExpMBB : BlocksToExplore) { 3175 auto &TransferFunc = AllTheVLocs[ExpMBB->getNumber()].Vars; 3176 if (TransferFunc.contains(Var)) 3177 DefBlocks.insert(const_cast<MachineBasicBlock *>(ExpMBB)); 3178 } 3179 3180 SmallVector<MachineBasicBlock *, 32> PHIBlocks; 3181 3182 // Request the set of PHIs we should insert for this variable. If there's 3183 // only one value definition, things are very simple. 3184 if (DefBlocks.size() == 1) { 3185 placePHIsForSingleVarDefinition(MutBlocksToExplore, *DefBlocks.begin(), 3186 AllTheVLocs, Var, Output); 3187 continue; 3188 } 3189 3190 // Otherwise: we need to place PHIs through SSA and propagate values. 3191 BlockPHIPlacement(MutBlocksToExplore, DefBlocks, PHIBlocks); 3192 3193 // Insert PHIs into the per-block live-in tables for this variable. 3194 for (MachineBasicBlock *PHIMBB : PHIBlocks) { 3195 unsigned BlockNo = PHIMBB->getNumber(); 3196 DbgValue *LiveIn = LiveInIdx[PHIMBB]; 3197 *LiveIn = DbgValue(BlockNo, EmptyProperties, DbgValue::VPHI); 3198 } 3199 3200 for (auto *MBB : BlockOrders) { 3201 Worklist.push(BBToOrder[MBB]); 3202 OnWorklist.insert(MBB); 3203 } 3204 3205 // Iterate over all the blocks we selected, propagating the variables value. 3206 // This loop does two things: 3207 // * Eliminates un-necessary VPHIs in vlocJoin, 3208 // * Evaluates the blocks transfer function (i.e. variable assignments) and 3209 // stores the result to the blocks live-outs. 3210 // Always evaluate the transfer function on the first iteration, and when 3211 // the live-ins change thereafter. 3212 bool FirstTrip = true; 3213 while (!Worklist.empty() || !Pending.empty()) { 3214 while (!Worklist.empty()) { 3215 auto *MBB = OrderToBB[Worklist.top()]; 3216 CurBB = MBB->getNumber(); 3217 Worklist.pop(); 3218 3219 auto LiveInsIt = LiveInIdx.find(MBB); 3220 assert(LiveInsIt != LiveInIdx.end()); 3221 DbgValue *LiveIn = LiveInsIt->second; 3222 3223 // Join values from predecessors. Updates LiveInIdx, and writes output 3224 // into JoinedInLocs. 3225 bool InLocsChanged = 3226 vlocJoin(*MBB, LiveOutIdx, BlocksToExplore, *LiveIn); 3227 3228 SmallVector<const MachineBasicBlock *, 8> Preds; 3229 for (const auto *Pred : MBB->predecessors()) 3230 Preds.push_back(Pred); 3231 3232 // If this block's live-in value is a VPHI, try to pick a machine-value 3233 // for it. This makes the machine-value available and propagated 3234 // through all blocks by the time value propagation finishes. We can't 3235 // do this any earlier as it needs to read the block live-outs. 3236 if (LiveIn->Kind == DbgValue::VPHI && LiveIn->BlockNo == (int)CurBB) { 3237 // There's a small possibility that on a preceeding path, a VPHI is 3238 // eliminated and transitions from VPHI-with-location to 3239 // live-through-value. As a result, the selected location of any VPHI 3240 // might change, so we need to re-compute it on each iteration. 3241 SmallVector<DbgOpID> JoinedOps; 3242 3243 if (pickVPHILoc(JoinedOps, *MBB, LiveOutIdx, MOutLocs, Preds)) { 3244 bool NewLocPicked = !equal(LiveIn->getDbgOpIDs(), JoinedOps); 3245 InLocsChanged |= NewLocPicked; 3246 if (NewLocPicked) 3247 LiveIn->setDbgOpIDs(JoinedOps); 3248 } 3249 } 3250 3251 if (!InLocsChanged && !FirstTrip) 3252 continue; 3253 3254 DbgValue *LiveOut = LiveOutIdx[MBB]; 3255 bool OLChanged = false; 3256 3257 // Do transfer function. 3258 auto &VTracker = AllTheVLocs[MBB->getNumber()]; 3259 auto TransferIt = VTracker.Vars.find(Var); 3260 if (TransferIt != VTracker.Vars.end()) { 3261 // Erase on empty transfer (DBG_VALUE $noreg). 3262 if (TransferIt->second.Kind == DbgValue::Undef) { 3263 DbgValue NewVal(MBB->getNumber(), EmptyProperties, DbgValue::NoVal); 3264 if (*LiveOut != NewVal) { 3265 *LiveOut = NewVal; 3266 OLChanged = true; 3267 } 3268 } else { 3269 // Insert new variable value; or overwrite. 3270 if (*LiveOut != TransferIt->second) { 3271 *LiveOut = TransferIt->second; 3272 OLChanged = true; 3273 } 3274 } 3275 } else { 3276 // Just copy live-ins to live-outs, for anything not transferred. 3277 if (*LiveOut != *LiveIn) { 3278 *LiveOut = *LiveIn; 3279 OLChanged = true; 3280 } 3281 } 3282 3283 // If no live-out value changed, there's no need to explore further. 3284 if (!OLChanged) 3285 continue; 3286 3287 // We should visit all successors. Ensure we'll visit any non-backedge 3288 // successors during this dataflow iteration; book backedge successors 3289 // to be visited next time around. 3290 for (auto *s : MBB->successors()) { 3291 // Ignore out of scope / not-to-be-explored successors. 3292 if (!LiveInIdx.contains(s)) 3293 continue; 3294 3295 if (BBToOrder[s] > BBToOrder[MBB]) { 3296 if (OnWorklist.insert(s).second) 3297 Worklist.push(BBToOrder[s]); 3298 } else if (OnPending.insert(s).second && (FirstTrip || OLChanged)) { 3299 Pending.push(BBToOrder[s]); 3300 } 3301 } 3302 } 3303 Worklist.swap(Pending); 3304 std::swap(OnWorklist, OnPending); 3305 OnPending.clear(); 3306 assert(Pending.empty()); 3307 FirstTrip = false; 3308 } 3309 3310 // Save live-ins to output vector. Ignore any that are still marked as being 3311 // VPHIs with no location -- those are variables that we know the value of, 3312 // but are not actually available in the register file. 3313 for (auto *MBB : BlockOrders) { 3314 DbgValue *BlockLiveIn = LiveInIdx[MBB]; 3315 if (BlockLiveIn->Kind == DbgValue::NoVal) 3316 continue; 3317 if (BlockLiveIn->isUnjoinedPHI()) 3318 continue; 3319 if (BlockLiveIn->Kind == DbgValue::VPHI) 3320 BlockLiveIn->Kind = DbgValue::Def; 3321 assert(BlockLiveIn->Properties.DIExpr->getFragmentInfo() == 3322 Var.getFragment() && "Fragment info missing during value prop"); 3323 Output[MBB->getNumber()].push_back(std::make_pair(Var, *BlockLiveIn)); 3324 } 3325 } // Per-variable loop. 3326 3327 BlockOrders.clear(); 3328 BlocksToExplore.clear(); 3329 } 3330 3331 void InstrRefBasedLDV::placePHIsForSingleVarDefinition( 3332 const SmallPtrSetImpl<MachineBasicBlock *> &InScopeBlocks, 3333 MachineBasicBlock *AssignMBB, SmallVectorImpl<VLocTracker> &AllTheVLocs, 3334 const DebugVariable &Var, LiveInsT &Output) { 3335 // If there is a single definition of the variable, then working out it's 3336 // value everywhere is very simple: it's every block dominated by the 3337 // definition. At the dominance frontier, the usual algorithm would: 3338 // * Place PHIs, 3339 // * Propagate values into them, 3340 // * Find there's no incoming variable value from the other incoming branches 3341 // of the dominance frontier, 3342 // * Specify there's no variable value in blocks past the frontier. 3343 // This is a common case, hence it's worth special-casing it. 3344 3345 // Pick out the variables value from the block transfer function. 3346 VLocTracker &VLocs = AllTheVLocs[AssignMBB->getNumber()]; 3347 auto ValueIt = VLocs.Vars.find(Var); 3348 const DbgValue &Value = ValueIt->second; 3349 3350 // If it's an explicit assignment of "undef", that means there is no location 3351 // anyway, anywhere. 3352 if (Value.Kind == DbgValue::Undef) 3353 return; 3354 3355 // Assign the variable value to entry to each dominated block that's in scope. 3356 // Skip the definition block -- it's assigned the variable value in the middle 3357 // of the block somewhere. 3358 for (auto *ScopeBlock : InScopeBlocks) { 3359 if (!DomTree->properlyDominates(AssignMBB, ScopeBlock)) 3360 continue; 3361 3362 Output[ScopeBlock->getNumber()].push_back({Var, Value}); 3363 } 3364 3365 // All blocks that aren't dominated have no live-in value, thus no variable 3366 // value will be given to them. 3367 } 3368 3369 #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP) 3370 void InstrRefBasedLDV::dump_mloc_transfer( 3371 const MLocTransferMap &mloc_transfer) const { 3372 for (const auto &P : mloc_transfer) { 3373 std::string foo = MTracker->LocIdxToName(P.first); 3374 std::string bar = MTracker->IDAsString(P.second); 3375 dbgs() << "Loc " << foo << " --> " << bar << "\n"; 3376 } 3377 } 3378 #endif 3379 3380 void InstrRefBasedLDV::initialSetup(MachineFunction &MF) { 3381 // Build some useful data structures. 3382 3383 LLVMContext &Context = MF.getFunction().getContext(); 3384 EmptyExpr = DIExpression::get(Context, {}); 3385 3386 auto hasNonArtificialLocation = [](const MachineInstr &MI) -> bool { 3387 if (const DebugLoc &DL = MI.getDebugLoc()) 3388 return DL.getLine() != 0; 3389 return false; 3390 }; 3391 // Collect a set of all the artificial blocks. 3392 for (auto &MBB : MF) 3393 if (none_of(MBB.instrs(), hasNonArtificialLocation)) 3394 ArtificialBlocks.insert(&MBB); 3395 3396 // Compute mappings of block <=> RPO order. 3397 ReversePostOrderTraversal<MachineFunction *> RPOT(&MF); 3398 unsigned int RPONumber = 0; 3399 auto processMBB = [&](MachineBasicBlock *MBB) { 3400 OrderToBB[RPONumber] = MBB; 3401 BBToOrder[MBB] = RPONumber; 3402 BBNumToRPO[MBB->getNumber()] = RPONumber; 3403 ++RPONumber; 3404 }; 3405 for (MachineBasicBlock *MBB : RPOT) 3406 processMBB(MBB); 3407 for (MachineBasicBlock &MBB : MF) 3408 if (!BBToOrder.contains(&MBB)) 3409 processMBB(&MBB); 3410 3411 // Order value substitutions by their "source" operand pair, for quick lookup. 3412 llvm::sort(MF.DebugValueSubstitutions); 3413 3414 #ifdef EXPENSIVE_CHECKS 3415 // As an expensive check, test whether there are any duplicate substitution 3416 // sources in the collection. 3417 if (MF.DebugValueSubstitutions.size() > 2) { 3418 for (auto It = MF.DebugValueSubstitutions.begin(); 3419 It != std::prev(MF.DebugValueSubstitutions.end()); ++It) { 3420 assert(It->Src != std::next(It)->Src && "Duplicate variable location " 3421 "substitution seen"); 3422 } 3423 } 3424 #endif 3425 } 3426 3427 // Produce an "ejection map" for blocks, i.e., what's the highest-numbered 3428 // lexical scope it's used in. When exploring in DFS order and we pass that 3429 // scope, the block can be processed and any tracking information freed. 3430 void InstrRefBasedLDV::makeDepthFirstEjectionMap( 3431 SmallVectorImpl<unsigned> &EjectionMap, 3432 const ScopeToDILocT &ScopeToDILocation, 3433 ScopeToAssignBlocksT &ScopeToAssignBlocks) { 3434 SmallPtrSet<const MachineBasicBlock *, 8> BlocksToExplore; 3435 SmallVector<std::pair<LexicalScope *, ssize_t>, 4> WorkStack; 3436 auto *TopScope = LS.getCurrentFunctionScope(); 3437 3438 // Unlike lexical scope explorers, we explore in reverse order, to find the 3439 // "last" lexical scope used for each block early. 3440 WorkStack.push_back({TopScope, TopScope->getChildren().size() - 1}); 3441 3442 while (!WorkStack.empty()) { 3443 auto &ScopePosition = WorkStack.back(); 3444 LexicalScope *WS = ScopePosition.first; 3445 ssize_t ChildNum = ScopePosition.second--; 3446 3447 const SmallVectorImpl<LexicalScope *> &Children = WS->getChildren(); 3448 if (ChildNum >= 0) { 3449 // If ChildNum is positive, there are remaining children to explore. 3450 // Push the child and its children-count onto the stack. 3451 auto &ChildScope = Children[ChildNum]; 3452 WorkStack.push_back( 3453 std::make_pair(ChildScope, ChildScope->getChildren().size() - 1)); 3454 } else { 3455 WorkStack.pop_back(); 3456 3457 // We've explored all children and any later blocks: examine all blocks 3458 // in our scope. If they haven't yet had an ejection number set, then 3459 // this scope will be the last to use that block. 3460 auto DILocationIt = ScopeToDILocation.find(WS); 3461 if (DILocationIt != ScopeToDILocation.end()) { 3462 getBlocksForScope(DILocationIt->second, BlocksToExplore, 3463 ScopeToAssignBlocks.find(WS)->second); 3464 for (const auto *MBB : BlocksToExplore) { 3465 unsigned BBNum = MBB->getNumber(); 3466 if (EjectionMap[BBNum] == 0) 3467 EjectionMap[BBNum] = WS->getDFSOut(); 3468 } 3469 3470 BlocksToExplore.clear(); 3471 } 3472 } 3473 } 3474 } 3475 3476 bool InstrRefBasedLDV::depthFirstVLocAndEmit( 3477 unsigned MaxNumBlocks, const ScopeToDILocT &ScopeToDILocation, 3478 const ScopeToVarsT &ScopeToVars, ScopeToAssignBlocksT &ScopeToAssignBlocks, 3479 LiveInsT &Output, FuncValueTable &MOutLocs, FuncValueTable &MInLocs, 3480 SmallVectorImpl<VLocTracker> &AllTheVLocs, MachineFunction &MF, 3481 DenseMap<DebugVariable, unsigned> &AllVarsNumbering, 3482 const TargetPassConfig &TPC) { 3483 TTracker = new TransferTracker(TII, MTracker, MF, *TRI, CalleeSavedRegs, TPC); 3484 unsigned NumLocs = MTracker->getNumLocs(); 3485 VTracker = nullptr; 3486 3487 // No scopes? No variable locations. 3488 if (!LS.getCurrentFunctionScope()) 3489 return false; 3490 3491 // Build map from block number to the last scope that uses the block. 3492 SmallVector<unsigned, 16> EjectionMap; 3493 EjectionMap.resize(MaxNumBlocks, 0); 3494 makeDepthFirstEjectionMap(EjectionMap, ScopeToDILocation, 3495 ScopeToAssignBlocks); 3496 3497 // Helper lambda for ejecting a block -- if nothing is going to use the block, 3498 // we can translate the variable location information into DBG_VALUEs and then 3499 // free all of InstrRefBasedLDV's data structures. 3500 auto EjectBlock = [&](MachineBasicBlock &MBB) -> void { 3501 unsigned BBNum = MBB.getNumber(); 3502 AllTheVLocs[BBNum].clear(); 3503 3504 // Prime the transfer-tracker, and then step through all the block 3505 // instructions, installing transfers. 3506 MTracker->reset(); 3507 MTracker->loadFromArray(MInLocs[MBB], BBNum); 3508 TTracker->loadInlocs(MBB, MInLocs[MBB], DbgOpStore, Output[BBNum], NumLocs); 3509 3510 CurBB = BBNum; 3511 CurInst = 1; 3512 for (auto &MI : MBB) { 3513 process(MI, &MOutLocs, &MInLocs); 3514 TTracker->checkInstForNewValues(CurInst, MI.getIterator()); 3515 ++CurInst; 3516 } 3517 3518 // Free machine-location tables for this block. 3519 MInLocs.ejectTableForBlock(MBB); 3520 MOutLocs.ejectTableForBlock(MBB); 3521 // We don't need live-in variable values for this block either. 3522 Output[BBNum].clear(); 3523 AllTheVLocs[BBNum].clear(); 3524 }; 3525 3526 SmallPtrSet<const MachineBasicBlock *, 8> BlocksToExplore; 3527 SmallVector<std::pair<LexicalScope *, ssize_t>, 4> WorkStack; 3528 WorkStack.push_back({LS.getCurrentFunctionScope(), 0}); 3529 unsigned HighestDFSIn = 0; 3530 3531 // Proceed to explore in depth first order. 3532 while (!WorkStack.empty()) { 3533 auto &ScopePosition = WorkStack.back(); 3534 LexicalScope *WS = ScopePosition.first; 3535 ssize_t ChildNum = ScopePosition.second++; 3536 3537 // We obesrve scopes with children twice here, once descending in, once 3538 // ascending out of the scope nest. Use HighestDFSIn as a ratchet to ensure 3539 // we don't process a scope twice. Additionally, ignore scopes that don't 3540 // have a DILocation -- by proxy, this means we never tracked any variable 3541 // assignments in that scope. 3542 auto DILocIt = ScopeToDILocation.find(WS); 3543 if (HighestDFSIn <= WS->getDFSIn() && DILocIt != ScopeToDILocation.end()) { 3544 const DILocation *DILoc = DILocIt->second; 3545 auto &VarsWeCareAbout = ScopeToVars.find(WS)->second; 3546 auto &BlocksInScope = ScopeToAssignBlocks.find(WS)->second; 3547 3548 buildVLocValueMap(DILoc, VarsWeCareAbout, BlocksInScope, Output, MOutLocs, 3549 MInLocs, AllTheVLocs); 3550 } 3551 3552 HighestDFSIn = std::max(HighestDFSIn, WS->getDFSIn()); 3553 3554 // Descend into any scope nests. 3555 const SmallVectorImpl<LexicalScope *> &Children = WS->getChildren(); 3556 if (ChildNum < (ssize_t)Children.size()) { 3557 // There are children to explore -- push onto stack and continue. 3558 auto &ChildScope = Children[ChildNum]; 3559 WorkStack.push_back(std::make_pair(ChildScope, 0)); 3560 } else { 3561 WorkStack.pop_back(); 3562 3563 // We've explored a leaf, or have explored all the children of a scope. 3564 // Try to eject any blocks where this is the last scope it's relevant to. 3565 auto DILocationIt = ScopeToDILocation.find(WS); 3566 if (DILocationIt == ScopeToDILocation.end()) 3567 continue; 3568 3569 getBlocksForScope(DILocationIt->second, BlocksToExplore, 3570 ScopeToAssignBlocks.find(WS)->second); 3571 for (const auto *MBB : BlocksToExplore) 3572 if (WS->getDFSOut() == EjectionMap[MBB->getNumber()]) 3573 EjectBlock(const_cast<MachineBasicBlock &>(*MBB)); 3574 3575 BlocksToExplore.clear(); 3576 } 3577 } 3578 3579 // Some artificial blocks may not have been ejected, meaning they're not 3580 // connected to an actual legitimate scope. This can technically happen 3581 // with things like the entry block. In theory, we shouldn't need to do 3582 // anything for such out-of-scope blocks, but for the sake of being similar 3583 // to VarLocBasedLDV, eject these too. 3584 for (auto *MBB : ArtificialBlocks) 3585 if (MInLocs.hasTableFor(*MBB)) 3586 EjectBlock(*MBB); 3587 3588 return emitTransfers(AllVarsNumbering); 3589 } 3590 3591 bool InstrRefBasedLDV::emitTransfers( 3592 DenseMap<DebugVariable, unsigned> &AllVarsNumbering) { 3593 // Go through all the transfers recorded in the TransferTracker -- this is 3594 // both the live-ins to a block, and any movements of values that happen 3595 // in the middle. 3596 for (const auto &P : TTracker->Transfers) { 3597 // We have to insert DBG_VALUEs in a consistent order, otherwise they 3598 // appear in DWARF in different orders. Use the order that they appear 3599 // when walking through each block / each instruction, stored in 3600 // AllVarsNumbering. 3601 SmallVector<std::pair<unsigned, MachineInstr *>> Insts; 3602 for (MachineInstr *MI : P.Insts) { 3603 DebugVariable Var(MI->getDebugVariable(), MI->getDebugExpression(), 3604 MI->getDebugLoc()->getInlinedAt()); 3605 Insts.emplace_back(AllVarsNumbering.find(Var)->second, MI); 3606 } 3607 llvm::sort(Insts, llvm::less_first()); 3608 3609 // Insert either before or after the designated point... 3610 if (P.MBB) { 3611 MachineBasicBlock &MBB = *P.MBB; 3612 for (const auto &Pair : Insts) 3613 MBB.insert(P.Pos, Pair.second); 3614 } else { 3615 // Terminators, like tail calls, can clobber things. Don't try and place 3616 // transfers after them. 3617 if (P.Pos->isTerminator()) 3618 continue; 3619 3620 MachineBasicBlock &MBB = *P.Pos->getParent(); 3621 for (const auto &Pair : Insts) 3622 MBB.insertAfterBundle(P.Pos, Pair.second); 3623 } 3624 } 3625 3626 return TTracker->Transfers.size() != 0; 3627 } 3628 3629 /// Calculate the liveness information for the given machine function and 3630 /// extend ranges across basic blocks. 3631 bool InstrRefBasedLDV::ExtendRanges(MachineFunction &MF, 3632 MachineDominatorTree *DomTree, 3633 TargetPassConfig *TPC, 3634 unsigned InputBBLimit, 3635 unsigned InputDbgValLimit) { 3636 // No subprogram means this function contains no debuginfo. 3637 if (!MF.getFunction().getSubprogram()) 3638 return false; 3639 3640 LLVM_DEBUG(dbgs() << "\nDebug Range Extension\n"); 3641 this->TPC = TPC; 3642 3643 this->DomTree = DomTree; 3644 TRI = MF.getSubtarget().getRegisterInfo(); 3645 MRI = &MF.getRegInfo(); 3646 TII = MF.getSubtarget().getInstrInfo(); 3647 TFI = MF.getSubtarget().getFrameLowering(); 3648 TFI->getCalleeSaves(MF, CalleeSavedRegs); 3649 MFI = &MF.getFrameInfo(); 3650 LS.initialize(MF); 3651 3652 const auto &STI = MF.getSubtarget(); 3653 AdjustsStackInCalls = MFI->adjustsStack() && 3654 STI.getFrameLowering()->stackProbeFunctionModifiesSP(); 3655 if (AdjustsStackInCalls) 3656 StackProbeSymbolName = STI.getTargetLowering()->getStackProbeSymbolName(MF); 3657 3658 MTracker = 3659 new MLocTracker(MF, *TII, *TRI, *MF.getSubtarget().getTargetLowering()); 3660 VTracker = nullptr; 3661 TTracker = nullptr; 3662 3663 SmallVector<MLocTransferMap, 32> MLocTransfer; 3664 SmallVector<VLocTracker, 8> vlocs; 3665 LiveInsT SavedLiveIns; 3666 3667 int MaxNumBlocks = -1; 3668 for (auto &MBB : MF) 3669 MaxNumBlocks = std::max(MBB.getNumber(), MaxNumBlocks); 3670 assert(MaxNumBlocks >= 0); 3671 ++MaxNumBlocks; 3672 3673 initialSetup(MF); 3674 3675 MLocTransfer.resize(MaxNumBlocks); 3676 vlocs.resize(MaxNumBlocks, VLocTracker(OverlapFragments, EmptyExpr)); 3677 SavedLiveIns.resize(MaxNumBlocks); 3678 3679 produceMLocTransferFunction(MF, MLocTransfer, MaxNumBlocks); 3680 3681 // Allocate and initialize two array-of-arrays for the live-in and live-out 3682 // machine values. The outer dimension is the block number; while the inner 3683 // dimension is a LocIdx from MLocTracker. 3684 unsigned NumLocs = MTracker->getNumLocs(); 3685 FuncValueTable MOutLocs(MaxNumBlocks, NumLocs); 3686 FuncValueTable MInLocs(MaxNumBlocks, NumLocs); 3687 3688 // Solve the machine value dataflow problem using the MLocTransfer function, 3689 // storing the computed live-ins / live-outs into the array-of-arrays. We use 3690 // both live-ins and live-outs for decision making in the variable value 3691 // dataflow problem. 3692 buildMLocValueMap(MF, MInLocs, MOutLocs, MLocTransfer); 3693 3694 // Patch up debug phi numbers, turning unknown block-live-in values into 3695 // either live-through machine values, or PHIs. 3696 for (auto &DBG_PHI : DebugPHINumToValue) { 3697 // Identify unresolved block-live-ins. 3698 if (!DBG_PHI.ValueRead) 3699 continue; 3700 3701 ValueIDNum &Num = *DBG_PHI.ValueRead; 3702 if (!Num.isPHI()) 3703 continue; 3704 3705 unsigned BlockNo = Num.getBlock(); 3706 LocIdx LocNo = Num.getLoc(); 3707 ValueIDNum ResolvedValue = MInLocs[BlockNo][LocNo.asU64()]; 3708 // If there is no resolved value for this live-in then it is not directly 3709 // reachable from the entry block -- model it as a PHI on entry to this 3710 // block, which means we leave the ValueIDNum unchanged. 3711 if (ResolvedValue != ValueIDNum::EmptyValue) 3712 Num = ResolvedValue; 3713 } 3714 // Later, we'll be looking up ranges of instruction numbers. 3715 llvm::sort(DebugPHINumToValue); 3716 3717 // Walk back through each block / instruction, collecting DBG_VALUE 3718 // instructions and recording what machine value their operands refer to. 3719 for (auto &OrderPair : OrderToBB) { 3720 MachineBasicBlock &MBB = *OrderPair.second; 3721 CurBB = MBB.getNumber(); 3722 VTracker = &vlocs[CurBB]; 3723 VTracker->MBB = &MBB; 3724 MTracker->loadFromArray(MInLocs[MBB], CurBB); 3725 CurInst = 1; 3726 for (auto &MI : MBB) { 3727 process(MI, &MOutLocs, &MInLocs); 3728 ++CurInst; 3729 } 3730 MTracker->reset(); 3731 } 3732 3733 // Number all variables in the order that they appear, to be used as a stable 3734 // insertion order later. 3735 DenseMap<DebugVariable, unsigned> AllVarsNumbering; 3736 3737 // Map from one LexicalScope to all the variables in that scope. 3738 ScopeToVarsT ScopeToVars; 3739 3740 // Map from One lexical scope to all blocks where assignments happen for 3741 // that scope. 3742 ScopeToAssignBlocksT ScopeToAssignBlocks; 3743 3744 // Store map of DILocations that describes scopes. 3745 ScopeToDILocT ScopeToDILocation; 3746 3747 // To mirror old LiveDebugValues, enumerate variables in RPOT order. Otherwise 3748 // the order is unimportant, it just has to be stable. 3749 unsigned VarAssignCount = 0; 3750 for (unsigned int I = 0; I < OrderToBB.size(); ++I) { 3751 auto *MBB = OrderToBB[I]; 3752 auto *VTracker = &vlocs[MBB->getNumber()]; 3753 // Collect each variable with a DBG_VALUE in this block. 3754 for (auto &idx : VTracker->Vars) { 3755 const auto &Var = idx.first; 3756 const DILocation *ScopeLoc = VTracker->Scopes[Var]; 3757 assert(ScopeLoc != nullptr); 3758 auto *Scope = LS.findLexicalScope(ScopeLoc); 3759 3760 // No insts in scope -> shouldn't have been recorded. 3761 assert(Scope != nullptr); 3762 3763 AllVarsNumbering.insert(std::make_pair(Var, AllVarsNumbering.size())); 3764 ScopeToVars[Scope].insert(Var); 3765 ScopeToAssignBlocks[Scope].insert(VTracker->MBB); 3766 ScopeToDILocation[Scope] = ScopeLoc; 3767 ++VarAssignCount; 3768 } 3769 } 3770 3771 bool Changed = false; 3772 3773 // If we have an extremely large number of variable assignments and blocks, 3774 // bail out at this point. We've burnt some time doing analysis already, 3775 // however we should cut our losses. 3776 if ((unsigned)MaxNumBlocks > InputBBLimit && 3777 VarAssignCount > InputDbgValLimit) { 3778 LLVM_DEBUG(dbgs() << "Disabling InstrRefBasedLDV: " << MF.getName() 3779 << " has " << MaxNumBlocks << " basic blocks and " 3780 << VarAssignCount 3781 << " variable assignments, exceeding limits.\n"); 3782 } else { 3783 // Optionally, solve the variable value problem and emit to blocks by using 3784 // a lexical-scope-depth search. It should be functionally identical to 3785 // the "else" block of this condition. 3786 Changed = depthFirstVLocAndEmit( 3787 MaxNumBlocks, ScopeToDILocation, ScopeToVars, ScopeToAssignBlocks, 3788 SavedLiveIns, MOutLocs, MInLocs, vlocs, MF, AllVarsNumbering, *TPC); 3789 } 3790 3791 delete MTracker; 3792 delete TTracker; 3793 MTracker = nullptr; 3794 VTracker = nullptr; 3795 TTracker = nullptr; 3796 3797 ArtificialBlocks.clear(); 3798 OrderToBB.clear(); 3799 BBToOrder.clear(); 3800 BBNumToRPO.clear(); 3801 DebugInstrNumToInstr.clear(); 3802 DebugPHINumToValue.clear(); 3803 OverlapFragments.clear(); 3804 SeenFragments.clear(); 3805 SeenDbgPHIs.clear(); 3806 DbgOpStore.clear(); 3807 3808 return Changed; 3809 } 3810 3811 LDVImpl *llvm::makeInstrRefBasedLiveDebugValues() { 3812 return new InstrRefBasedLDV(); 3813 } 3814 3815 namespace { 3816 class LDVSSABlock; 3817 class LDVSSAUpdater; 3818 3819 // Pick a type to identify incoming block values as we construct SSA. We 3820 // can't use anything more robust than an integer unfortunately, as SSAUpdater 3821 // expects to zero-initialize the type. 3822 typedef uint64_t BlockValueNum; 3823 3824 /// Represents an SSA PHI node for the SSA updater class. Contains the block 3825 /// this PHI is in, the value number it would have, and the expected incoming 3826 /// values from parent blocks. 3827 class LDVSSAPhi { 3828 public: 3829 SmallVector<std::pair<LDVSSABlock *, BlockValueNum>, 4> IncomingValues; 3830 LDVSSABlock *ParentBlock; 3831 BlockValueNum PHIValNum; 3832 LDVSSAPhi(BlockValueNum PHIValNum, LDVSSABlock *ParentBlock) 3833 : ParentBlock(ParentBlock), PHIValNum(PHIValNum) {} 3834 3835 LDVSSABlock *getParent() { return ParentBlock; } 3836 }; 3837 3838 /// Thin wrapper around a block predecessor iterator. Only difference from a 3839 /// normal block iterator is that it dereferences to an LDVSSABlock. 3840 class LDVSSABlockIterator { 3841 public: 3842 MachineBasicBlock::pred_iterator PredIt; 3843 LDVSSAUpdater &Updater; 3844 3845 LDVSSABlockIterator(MachineBasicBlock::pred_iterator PredIt, 3846 LDVSSAUpdater &Updater) 3847 : PredIt(PredIt), Updater(Updater) {} 3848 3849 bool operator!=(const LDVSSABlockIterator &OtherIt) const { 3850 return OtherIt.PredIt != PredIt; 3851 } 3852 3853 LDVSSABlockIterator &operator++() { 3854 ++PredIt; 3855 return *this; 3856 } 3857 3858 LDVSSABlock *operator*(); 3859 }; 3860 3861 /// Thin wrapper around a block for SSA Updater interface. Necessary because 3862 /// we need to track the PHI value(s) that we may have observed as necessary 3863 /// in this block. 3864 class LDVSSABlock { 3865 public: 3866 MachineBasicBlock &BB; 3867 LDVSSAUpdater &Updater; 3868 using PHIListT = SmallVector<LDVSSAPhi, 1>; 3869 /// List of PHIs in this block. There should only ever be one. 3870 PHIListT PHIList; 3871 3872 LDVSSABlock(MachineBasicBlock &BB, LDVSSAUpdater &Updater) 3873 : BB(BB), Updater(Updater) {} 3874 3875 LDVSSABlockIterator succ_begin() { 3876 return LDVSSABlockIterator(BB.succ_begin(), Updater); 3877 } 3878 3879 LDVSSABlockIterator succ_end() { 3880 return LDVSSABlockIterator(BB.succ_end(), Updater); 3881 } 3882 3883 /// SSAUpdater has requested a PHI: create that within this block record. 3884 LDVSSAPhi *newPHI(BlockValueNum Value) { 3885 PHIList.emplace_back(Value, this); 3886 return &PHIList.back(); 3887 } 3888 3889 /// SSAUpdater wishes to know what PHIs already exist in this block. 3890 PHIListT &phis() { return PHIList; } 3891 }; 3892 3893 /// Utility class for the SSAUpdater interface: tracks blocks, PHIs and values 3894 /// while SSAUpdater is exploring the CFG. It's passed as a handle / baton to 3895 // SSAUpdaterTraits<LDVSSAUpdater>. 3896 class LDVSSAUpdater { 3897 public: 3898 /// Map of value numbers to PHI records. 3899 DenseMap<BlockValueNum, LDVSSAPhi *> PHIs; 3900 /// Map of which blocks generate Undef values -- blocks that are not 3901 /// dominated by any Def. 3902 DenseMap<MachineBasicBlock *, BlockValueNum> UndefMap; 3903 /// Map of machine blocks to our own records of them. 3904 DenseMap<MachineBasicBlock *, LDVSSABlock *> BlockMap; 3905 /// Machine location where any PHI must occur. 3906 LocIdx Loc; 3907 /// Table of live-in machine value numbers for blocks / locations. 3908 const FuncValueTable &MLiveIns; 3909 3910 LDVSSAUpdater(LocIdx L, const FuncValueTable &MLiveIns) 3911 : Loc(L), MLiveIns(MLiveIns) {} 3912 3913 void reset() { 3914 for (auto &Block : BlockMap) 3915 delete Block.second; 3916 3917 PHIs.clear(); 3918 UndefMap.clear(); 3919 BlockMap.clear(); 3920 } 3921 3922 ~LDVSSAUpdater() { reset(); } 3923 3924 /// For a given MBB, create a wrapper block for it. Stores it in the 3925 /// LDVSSAUpdater block map. 3926 LDVSSABlock *getSSALDVBlock(MachineBasicBlock *BB) { 3927 auto it = BlockMap.find(BB); 3928 if (it == BlockMap.end()) { 3929 BlockMap[BB] = new LDVSSABlock(*BB, *this); 3930 it = BlockMap.find(BB); 3931 } 3932 return it->second; 3933 } 3934 3935 /// Find the live-in value number for the given block. Looks up the value at 3936 /// the PHI location on entry. 3937 BlockValueNum getValue(LDVSSABlock *LDVBB) { 3938 return MLiveIns[LDVBB->BB][Loc.asU64()].asU64(); 3939 } 3940 }; 3941 3942 LDVSSABlock *LDVSSABlockIterator::operator*() { 3943 return Updater.getSSALDVBlock(*PredIt); 3944 } 3945 3946 #ifndef NDEBUG 3947 3948 raw_ostream &operator<<(raw_ostream &out, const LDVSSAPhi &PHI) { 3949 out << "SSALDVPHI " << PHI.PHIValNum; 3950 return out; 3951 } 3952 3953 #endif 3954 3955 } // namespace 3956 3957 namespace llvm { 3958 3959 /// Template specialization to give SSAUpdater access to CFG and value 3960 /// information. SSAUpdater calls methods in these traits, passing in the 3961 /// LDVSSAUpdater object, to learn about blocks and the values they define. 3962 /// It also provides methods to create PHI nodes and track them. 3963 template <> class SSAUpdaterTraits<LDVSSAUpdater> { 3964 public: 3965 using BlkT = LDVSSABlock; 3966 using ValT = BlockValueNum; 3967 using PhiT = LDVSSAPhi; 3968 using BlkSucc_iterator = LDVSSABlockIterator; 3969 3970 // Methods to access block successors -- dereferencing to our wrapper class. 3971 static BlkSucc_iterator BlkSucc_begin(BlkT *BB) { return BB->succ_begin(); } 3972 static BlkSucc_iterator BlkSucc_end(BlkT *BB) { return BB->succ_end(); } 3973 3974 /// Iterator for PHI operands. 3975 class PHI_iterator { 3976 private: 3977 LDVSSAPhi *PHI; 3978 unsigned Idx; 3979 3980 public: 3981 explicit PHI_iterator(LDVSSAPhi *P) // begin iterator 3982 : PHI(P), Idx(0) {} 3983 PHI_iterator(LDVSSAPhi *P, bool) // end iterator 3984 : PHI(P), Idx(PHI->IncomingValues.size()) {} 3985 3986 PHI_iterator &operator++() { 3987 Idx++; 3988 return *this; 3989 } 3990 bool operator==(const PHI_iterator &X) const { return Idx == X.Idx; } 3991 bool operator!=(const PHI_iterator &X) const { return !operator==(X); } 3992 3993 BlockValueNum getIncomingValue() { return PHI->IncomingValues[Idx].second; } 3994 3995 LDVSSABlock *getIncomingBlock() { return PHI->IncomingValues[Idx].first; } 3996 }; 3997 3998 static inline PHI_iterator PHI_begin(PhiT *PHI) { return PHI_iterator(PHI); } 3999 4000 static inline PHI_iterator PHI_end(PhiT *PHI) { 4001 return PHI_iterator(PHI, true); 4002 } 4003 4004 /// FindPredecessorBlocks - Put the predecessors of BB into the Preds 4005 /// vector. 4006 static void FindPredecessorBlocks(LDVSSABlock *BB, 4007 SmallVectorImpl<LDVSSABlock *> *Preds) { 4008 for (MachineBasicBlock *Pred : BB->BB.predecessors()) 4009 Preds->push_back(BB->Updater.getSSALDVBlock(Pred)); 4010 } 4011 4012 /// GetUndefVal - Normally creates an IMPLICIT_DEF instruction with a new 4013 /// register. For LiveDebugValues, represents a block identified as not having 4014 /// any DBG_PHI predecessors. 4015 static BlockValueNum GetUndefVal(LDVSSABlock *BB, LDVSSAUpdater *Updater) { 4016 // Create a value number for this block -- it needs to be unique and in the 4017 // "undef" collection, so that we know it's not real. Use a number 4018 // representing a PHI into this block. 4019 BlockValueNum Num = ValueIDNum(BB->BB.getNumber(), 0, Updater->Loc).asU64(); 4020 Updater->UndefMap[&BB->BB] = Num; 4021 return Num; 4022 } 4023 4024 /// CreateEmptyPHI - Create a (representation of a) PHI in the given block. 4025 /// SSAUpdater will populate it with information about incoming values. The 4026 /// value number of this PHI is whatever the machine value number problem 4027 /// solution determined it to be. This includes non-phi values if SSAUpdater 4028 /// tries to create a PHI where the incoming values are identical. 4029 static BlockValueNum CreateEmptyPHI(LDVSSABlock *BB, unsigned NumPreds, 4030 LDVSSAUpdater *Updater) { 4031 BlockValueNum PHIValNum = Updater->getValue(BB); 4032 LDVSSAPhi *PHI = BB->newPHI(PHIValNum); 4033 Updater->PHIs[PHIValNum] = PHI; 4034 return PHIValNum; 4035 } 4036 4037 /// AddPHIOperand - Add the specified value as an operand of the PHI for 4038 /// the specified predecessor block. 4039 static void AddPHIOperand(LDVSSAPhi *PHI, BlockValueNum Val, LDVSSABlock *Pred) { 4040 PHI->IncomingValues.push_back(std::make_pair(Pred, Val)); 4041 } 4042 4043 /// ValueIsPHI - Check if the instruction that defines the specified value 4044 /// is a PHI instruction. 4045 static LDVSSAPhi *ValueIsPHI(BlockValueNum Val, LDVSSAUpdater *Updater) { 4046 return Updater->PHIs.lookup(Val); 4047 } 4048 4049 /// ValueIsNewPHI - Like ValueIsPHI but also check if the PHI has no source 4050 /// operands, i.e., it was just added. 4051 static LDVSSAPhi *ValueIsNewPHI(BlockValueNum Val, LDVSSAUpdater *Updater) { 4052 LDVSSAPhi *PHI = ValueIsPHI(Val, Updater); 4053 if (PHI && PHI->IncomingValues.size() == 0) 4054 return PHI; 4055 return nullptr; 4056 } 4057 4058 /// GetPHIValue - For the specified PHI instruction, return the value 4059 /// that it defines. 4060 static BlockValueNum GetPHIValue(LDVSSAPhi *PHI) { return PHI->PHIValNum; } 4061 }; 4062 4063 } // end namespace llvm 4064 4065 std::optional<ValueIDNum> InstrRefBasedLDV::resolveDbgPHIs( 4066 MachineFunction &MF, const FuncValueTable &MLiveOuts, 4067 const FuncValueTable &MLiveIns, MachineInstr &Here, uint64_t InstrNum) { 4068 // This function will be called twice per DBG_INSTR_REF, and might end up 4069 // computing lots of SSA information: memoize it. 4070 auto SeenDbgPHIIt = SeenDbgPHIs.find(std::make_pair(&Here, InstrNum)); 4071 if (SeenDbgPHIIt != SeenDbgPHIs.end()) 4072 return SeenDbgPHIIt->second; 4073 4074 std::optional<ValueIDNum> Result = 4075 resolveDbgPHIsImpl(MF, MLiveOuts, MLiveIns, Here, InstrNum); 4076 SeenDbgPHIs.insert({std::make_pair(&Here, InstrNum), Result}); 4077 return Result; 4078 } 4079 4080 std::optional<ValueIDNum> InstrRefBasedLDV::resolveDbgPHIsImpl( 4081 MachineFunction &MF, const FuncValueTable &MLiveOuts, 4082 const FuncValueTable &MLiveIns, MachineInstr &Here, uint64_t InstrNum) { 4083 // Pick out records of DBG_PHI instructions that have been observed. If there 4084 // are none, then we cannot compute a value number. 4085 auto RangePair = std::equal_range(DebugPHINumToValue.begin(), 4086 DebugPHINumToValue.end(), InstrNum); 4087 auto LowerIt = RangePair.first; 4088 auto UpperIt = RangePair.second; 4089 4090 // No DBG_PHI means there can be no location. 4091 if (LowerIt == UpperIt) 4092 return std::nullopt; 4093 4094 // If any DBG_PHIs referred to a location we didn't understand, don't try to 4095 // compute a value. There might be scenarios where we could recover a value 4096 // for some range of DBG_INSTR_REFs, but at this point we can have high 4097 // confidence that we've seen a bug. 4098 auto DBGPHIRange = make_range(LowerIt, UpperIt); 4099 for (const DebugPHIRecord &DBG_PHI : DBGPHIRange) 4100 if (!DBG_PHI.ValueRead) 4101 return std::nullopt; 4102 4103 // If there's only one DBG_PHI, then that is our value number. 4104 if (std::distance(LowerIt, UpperIt) == 1) 4105 return *LowerIt->ValueRead; 4106 4107 // Pick out the location (physreg, slot) where any PHIs must occur. It's 4108 // technically possible for us to merge values in different registers in each 4109 // block, but highly unlikely that LLVM will generate such code after register 4110 // allocation. 4111 LocIdx Loc = *LowerIt->ReadLoc; 4112 4113 // We have several DBG_PHIs, and a use position (the Here inst). All each 4114 // DBG_PHI does is identify a value at a program position. We can treat each 4115 // DBG_PHI like it's a Def of a value, and the use position is a Use of a 4116 // value, just like SSA. We use the bulk-standard LLVM SSA updater class to 4117 // determine which Def is used at the Use, and any PHIs that happen along 4118 // the way. 4119 // Adapted LLVM SSA Updater: 4120 LDVSSAUpdater Updater(Loc, MLiveIns); 4121 // Map of which Def or PHI is the current value in each block. 4122 DenseMap<LDVSSABlock *, BlockValueNum> AvailableValues; 4123 // Set of PHIs that we have created along the way. 4124 SmallVector<LDVSSAPhi *, 8> CreatedPHIs; 4125 4126 // Each existing DBG_PHI is a Def'd value under this model. Record these Defs 4127 // for the SSAUpdater. 4128 for (const auto &DBG_PHI : DBGPHIRange) { 4129 LDVSSABlock *Block = Updater.getSSALDVBlock(DBG_PHI.MBB); 4130 const ValueIDNum &Num = *DBG_PHI.ValueRead; 4131 AvailableValues.insert(std::make_pair(Block, Num.asU64())); 4132 } 4133 4134 LDVSSABlock *HereBlock = Updater.getSSALDVBlock(Here.getParent()); 4135 const auto &AvailIt = AvailableValues.find(HereBlock); 4136 if (AvailIt != AvailableValues.end()) { 4137 // Actually, we already know what the value is -- the Use is in the same 4138 // block as the Def. 4139 return ValueIDNum::fromU64(AvailIt->second); 4140 } 4141 4142 // Otherwise, we must use the SSA Updater. It will identify the value number 4143 // that we are to use, and the PHIs that must happen along the way. 4144 SSAUpdaterImpl<LDVSSAUpdater> Impl(&Updater, &AvailableValues, &CreatedPHIs); 4145 BlockValueNum ResultInt = Impl.GetValue(Updater.getSSALDVBlock(Here.getParent())); 4146 ValueIDNum Result = ValueIDNum::fromU64(ResultInt); 4147 4148 // We have the number for a PHI, or possibly live-through value, to be used 4149 // at this Use. There are a number of things we have to check about it though: 4150 // * Does any PHI use an 'Undef' (like an IMPLICIT_DEF) value? If so, this 4151 // Use was not completely dominated by DBG_PHIs and we should abort. 4152 // * Are the Defs or PHIs clobbered in a block? SSAUpdater isn't aware that 4153 // we've left SSA form. Validate that the inputs to each PHI are the 4154 // expected values. 4155 // * Is a PHI we've created actually a merging of values, or are all the 4156 // predecessor values the same, leading to a non-PHI machine value number? 4157 // (SSAUpdater doesn't know that either). Remap validated PHIs into the 4158 // the ValidatedValues collection below to sort this out. 4159 DenseMap<LDVSSABlock *, ValueIDNum> ValidatedValues; 4160 4161 // Define all the input DBG_PHI values in ValidatedValues. 4162 for (const auto &DBG_PHI : DBGPHIRange) { 4163 LDVSSABlock *Block = Updater.getSSALDVBlock(DBG_PHI.MBB); 4164 const ValueIDNum &Num = *DBG_PHI.ValueRead; 4165 ValidatedValues.insert(std::make_pair(Block, Num)); 4166 } 4167 4168 // Sort PHIs to validate into RPO-order. 4169 SmallVector<LDVSSAPhi *, 8> SortedPHIs; 4170 for (auto &PHI : CreatedPHIs) 4171 SortedPHIs.push_back(PHI); 4172 4173 llvm::sort(SortedPHIs, [&](LDVSSAPhi *A, LDVSSAPhi *B) { 4174 return BBToOrder[&A->getParent()->BB] < BBToOrder[&B->getParent()->BB]; 4175 }); 4176 4177 for (auto &PHI : SortedPHIs) { 4178 ValueIDNum ThisBlockValueNum = MLiveIns[PHI->ParentBlock->BB][Loc.asU64()]; 4179 4180 // Are all these things actually defined? 4181 for (auto &PHIIt : PHI->IncomingValues) { 4182 // Any undef input means DBG_PHIs didn't dominate the use point. 4183 if (Updater.UndefMap.contains(&PHIIt.first->BB)) 4184 return std::nullopt; 4185 4186 ValueIDNum ValueToCheck; 4187 const ValueTable &BlockLiveOuts = MLiveOuts[PHIIt.first->BB]; 4188 4189 auto VVal = ValidatedValues.find(PHIIt.first); 4190 if (VVal == ValidatedValues.end()) { 4191 // We cross a loop, and this is a backedge. LLVMs tail duplication 4192 // happens so late that DBG_PHI instructions should not be able to 4193 // migrate into loops -- meaning we can only be live-through this 4194 // loop. 4195 ValueToCheck = ThisBlockValueNum; 4196 } else { 4197 // Does the block have as a live-out, in the location we're examining, 4198 // the value that we expect? If not, it's been moved or clobbered. 4199 ValueToCheck = VVal->second; 4200 } 4201 4202 if (BlockLiveOuts[Loc.asU64()] != ValueToCheck) 4203 return std::nullopt; 4204 } 4205 4206 // Record this value as validated. 4207 ValidatedValues.insert({PHI->ParentBlock, ThisBlockValueNum}); 4208 } 4209 4210 // All the PHIs are valid: we can return what the SSAUpdater said our value 4211 // number was. 4212 return Result; 4213 } 4214