1 //===- InstrRefBasedImpl.cpp - Tracking Debug Value MIs -------------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 /// \file InstrRefBasedImpl.cpp 9 /// 10 /// This is a separate implementation of LiveDebugValues, see 11 /// LiveDebugValues.cpp and VarLocBasedImpl.cpp for more information. 12 /// 13 /// This pass propagates variable locations between basic blocks, resolving 14 /// control flow conflicts between them. The problem is SSA construction, where 15 /// each debug instruction assigns the *value* that a variable has, and every 16 /// instruction where the variable is in scope uses that variable. The resulting 17 /// map of instruction-to-value is then translated into a register (or spill) 18 /// location for each variable over each instruction. 19 /// 20 /// The primary difference from normal SSA construction is that we cannot 21 /// _create_ PHI values that contain variable values. CodeGen has already 22 /// completed, and we can't alter it just to make debug-info complete. Thus: 23 /// we can identify function positions where we would like a PHI value for a 24 /// variable, but must search the MachineFunction to see whether such a PHI is 25 /// available. If no such PHI exists, the variable location must be dropped. 26 /// 27 /// To achieve this, we perform two kinds of analysis. First, we identify 28 /// every value defined by every instruction (ignoring those that only move 29 /// another value), then re-compute an SSA-form representation of the 30 /// MachineFunction, using value propagation to eliminate any un-necessary 31 /// PHI values. This gives us a map of every value computed in the function, 32 /// and its location within the register file / stack. 33 /// 34 /// Secondly, for each variable we perform the same analysis, where each debug 35 /// instruction is considered a def, and every instruction where the variable 36 /// is in lexical scope as a use. Value propagation is used again to eliminate 37 /// any un-necessary PHIs. This gives us a map of each variable to the value 38 /// it should have in a block. 39 /// 40 /// Once both are complete, we have two maps for each block: 41 /// * Variables to the values they should have, 42 /// * Values to the register / spill slot they are located in. 43 /// After which we can marry-up variable values with a location, and emit 44 /// DBG_VALUE instructions specifying those locations. Variable locations may 45 /// be dropped in this process due to the desired variable value not being 46 /// resident in any machine location, or because there is no PHI value in any 47 /// location that accurately represents the desired value. The building of 48 /// location lists for each block is left to DbgEntityHistoryCalculator. 49 /// 50 /// This pass is kept efficient because the size of the first SSA problem 51 /// is proportional to the working-set size of the function, which the compiler 52 /// tries to keep small. (It's also proportional to the number of blocks). 53 /// Additionally, we repeatedly perform the second SSA problem analysis with 54 /// only the variables and blocks in a single lexical scope, exploiting their 55 /// locality. 56 /// 57 /// ### Terminology 58 /// 59 /// A machine location is a register or spill slot, a value is something that's 60 /// defined by an instruction or PHI node, while a variable value is the value 61 /// assigned to a variable. A variable location is a machine location, that must 62 /// contain the appropriate variable value. A value that is a PHI node is 63 /// occasionally called an mphi. 64 /// 65 /// The first SSA problem is the "machine value location" problem, 66 /// because we're determining which machine locations contain which values. 67 /// The "locations" are constant: what's unknown is what value they contain. 68 /// 69 /// The second SSA problem (the one for variables) is the "variable value 70 /// problem", because it's determining what values a variable has, rather than 71 /// what location those values are placed in. 72 /// 73 /// TODO: 74 /// Overlapping fragments 75 /// Entry values 76 /// Add back DEBUG statements for debugging this 77 /// Collect statistics 78 /// 79 //===----------------------------------------------------------------------===// 80 81 #include "llvm/ADT/DenseMap.h" 82 #include "llvm/ADT/PostOrderIterator.h" 83 #include "llvm/ADT/STLExtras.h" 84 #include "llvm/ADT/SmallPtrSet.h" 85 #include "llvm/ADT/SmallSet.h" 86 #include "llvm/ADT/SmallVector.h" 87 #include "llvm/BinaryFormat/Dwarf.h" 88 #include "llvm/CodeGen/LexicalScopes.h" 89 #include "llvm/CodeGen/MachineBasicBlock.h" 90 #include "llvm/CodeGen/MachineDominators.h" 91 #include "llvm/CodeGen/MachineFrameInfo.h" 92 #include "llvm/CodeGen/MachineFunction.h" 93 #include "llvm/CodeGen/MachineInstr.h" 94 #include "llvm/CodeGen/MachineInstrBuilder.h" 95 #include "llvm/CodeGen/MachineInstrBundle.h" 96 #include "llvm/CodeGen/MachineMemOperand.h" 97 #include "llvm/CodeGen/MachineOperand.h" 98 #include "llvm/CodeGen/PseudoSourceValue.h" 99 #include "llvm/CodeGen/TargetFrameLowering.h" 100 #include "llvm/CodeGen/TargetInstrInfo.h" 101 #include "llvm/CodeGen/TargetLowering.h" 102 #include "llvm/CodeGen/TargetPassConfig.h" 103 #include "llvm/CodeGen/TargetRegisterInfo.h" 104 #include "llvm/CodeGen/TargetSubtargetInfo.h" 105 #include "llvm/Config/llvm-config.h" 106 #include "llvm/IR/DebugInfoMetadata.h" 107 #include "llvm/IR/DebugLoc.h" 108 #include "llvm/IR/Function.h" 109 #include "llvm/MC/MCRegisterInfo.h" 110 #include "llvm/Support/Casting.h" 111 #include "llvm/Support/Compiler.h" 112 #include "llvm/Support/Debug.h" 113 #include "llvm/Support/GenericIteratedDominanceFrontier.h" 114 #include "llvm/Support/TypeSize.h" 115 #include "llvm/Support/raw_ostream.h" 116 #include "llvm/Target/TargetMachine.h" 117 #include "llvm/Transforms/Utils/SSAUpdaterImpl.h" 118 #include <algorithm> 119 #include <cassert> 120 #include <climits> 121 #include <cstdint> 122 #include <functional> 123 #include <queue> 124 #include <tuple> 125 #include <utility> 126 #include <vector> 127 128 #include "InstrRefBasedImpl.h" 129 #include "LiveDebugValues.h" 130 #include <optional> 131 132 using namespace llvm; 133 using namespace LiveDebugValues; 134 135 // SSAUpdaterImple sets DEBUG_TYPE, change it. 136 #undef DEBUG_TYPE 137 #define DEBUG_TYPE "livedebugvalues" 138 139 // Act more like the VarLoc implementation, by propagating some locations too 140 // far and ignoring some transfers. 141 static cl::opt<bool> EmulateOldLDV("emulate-old-livedebugvalues", cl::Hidden, 142 cl::desc("Act like old LiveDebugValues did"), 143 cl::init(false)); 144 145 // Limit for the maximum number of stack slots we should track, past which we 146 // will ignore any spills. InstrRefBasedLDV gathers detailed information on all 147 // stack slots which leads to high memory consumption, and in some scenarios 148 // (such as asan with very many locals) the working set of the function can be 149 // very large, causing many spills. In these scenarios, it is very unlikely that 150 // the developer has hundreds of variables live at the same time that they're 151 // carefully thinking about -- instead, they probably autogenerated the code. 152 // When this happens, gracefully stop tracking excess spill slots, rather than 153 // consuming all the developer's memory. 154 static cl::opt<unsigned> 155 StackWorkingSetLimit("livedebugvalues-max-stack-slots", cl::Hidden, 156 cl::desc("livedebugvalues-stack-ws-limit"), 157 cl::init(250)); 158 159 DbgOpID DbgOpID::UndefID = DbgOpID(0xffffffff); 160 161 /// Tracker for converting machine value locations and variable values into 162 /// variable locations (the output of LiveDebugValues), recorded as DBG_VALUEs 163 /// specifying block live-in locations and transfers within blocks. 164 /// 165 /// Operating on a per-block basis, this class takes a (pre-loaded) MLocTracker 166 /// and must be initialized with the set of variable values that are live-in to 167 /// the block. The caller then repeatedly calls process(). TransferTracker picks 168 /// out variable locations for the live-in variable values (if there _is_ a 169 /// location) and creates the corresponding DBG_VALUEs. Then, as the block is 170 /// stepped through, transfers of values between machine locations are 171 /// identified and if profitable, a DBG_VALUE created. 172 /// 173 /// This is where debug use-before-defs would be resolved: a variable with an 174 /// unavailable value could materialize in the middle of a block, when the 175 /// value becomes available. Or, we could detect clobbers and re-specify the 176 /// variable in a backup location. (XXX these are unimplemented). 177 class TransferTracker { 178 public: 179 const TargetInstrInfo *TII; 180 const TargetLowering *TLI; 181 /// This machine location tracker is assumed to always contain the up-to-date 182 /// value mapping for all machine locations. TransferTracker only reads 183 /// information from it. (XXX make it const?) 184 MLocTracker *MTracker; 185 MachineFunction &MF; 186 bool ShouldEmitDebugEntryValues; 187 188 /// Record of all changes in variable locations at a block position. Awkwardly 189 /// we allow inserting either before or after the point: MBB != nullptr 190 /// indicates it's before, otherwise after. 191 struct Transfer { 192 MachineBasicBlock::instr_iterator Pos; /// Position to insert DBG_VALUes 193 MachineBasicBlock *MBB; /// non-null if we should insert after. 194 SmallVector<MachineInstr *, 4> Insts; /// Vector of DBG_VALUEs to insert. 195 }; 196 197 /// Stores the resolved operands (machine locations and constants) and 198 /// qualifying meta-information needed to construct a concrete DBG_VALUE-like 199 /// instruction. 200 struct ResolvedDbgValue { 201 SmallVector<ResolvedDbgOp> Ops; 202 DbgValueProperties Properties; 203 204 ResolvedDbgValue(SmallVectorImpl<ResolvedDbgOp> &Ops, 205 DbgValueProperties Properties) 206 : Ops(Ops.begin(), Ops.end()), Properties(Properties) {} 207 208 /// Returns all the LocIdx values used in this struct, in the order in which 209 /// they appear as operands in the debug value; may contain duplicates. 210 auto loc_indices() const { 211 return map_range( 212 make_filter_range( 213 Ops, [](const ResolvedDbgOp &Op) { return !Op.IsConst; }), 214 [](const ResolvedDbgOp &Op) { return Op.Loc; }); 215 } 216 }; 217 218 /// Collection of transfers (DBG_VALUEs) to be inserted. 219 SmallVector<Transfer, 32> Transfers; 220 221 /// Local cache of what-value-is-in-what-LocIdx. Used to identify differences 222 /// between TransferTrackers view of variable locations and MLocTrackers. For 223 /// example, MLocTracker observes all clobbers, but TransferTracker lazily 224 /// does not. 225 SmallVector<ValueIDNum, 32> VarLocs; 226 227 /// Map from LocIdxes to which DebugVariables are based that location. 228 /// Mantained while stepping through the block. Not accurate if 229 /// VarLocs[Idx] != MTracker->LocIdxToIDNum[Idx]. 230 DenseMap<LocIdx, SmallSet<DebugVariable, 4>> ActiveMLocs; 231 232 /// Map from DebugVariable to it's current location and qualifying meta 233 /// information. To be used in conjunction with ActiveMLocs to construct 234 /// enough information for the DBG_VALUEs for a particular LocIdx. 235 DenseMap<DebugVariable, ResolvedDbgValue> ActiveVLocs; 236 237 /// Temporary cache of DBG_VALUEs to be entered into the Transfers collection. 238 SmallVector<MachineInstr *, 4> PendingDbgValues; 239 240 /// Record of a use-before-def: created when a value that's live-in to the 241 /// current block isn't available in any machine location, but it will be 242 /// defined in this block. 243 struct UseBeforeDef { 244 /// Value of this variable, def'd in block. 245 SmallVector<DbgOp> Values; 246 /// Identity of this variable. 247 DebugVariable Var; 248 /// Additional variable properties. 249 DbgValueProperties Properties; 250 UseBeforeDef(ArrayRef<DbgOp> Values, const DebugVariable &Var, 251 const DbgValueProperties &Properties) 252 : Values(Values.begin(), Values.end()), Var(Var), 253 Properties(Properties) {} 254 }; 255 256 /// Map from instruction index (within the block) to the set of UseBeforeDefs 257 /// that become defined at that instruction. 258 DenseMap<unsigned, SmallVector<UseBeforeDef, 1>> UseBeforeDefs; 259 260 /// The set of variables that are in UseBeforeDefs and can become a location 261 /// once the relevant value is defined. An element being erased from this 262 /// collection prevents the use-before-def materializing. 263 DenseSet<DebugVariable> UseBeforeDefVariables; 264 265 const TargetRegisterInfo &TRI; 266 const BitVector &CalleeSavedRegs; 267 268 TransferTracker(const TargetInstrInfo *TII, MLocTracker *MTracker, 269 MachineFunction &MF, const TargetRegisterInfo &TRI, 270 const BitVector &CalleeSavedRegs, const TargetPassConfig &TPC) 271 : TII(TII), MTracker(MTracker), MF(MF), TRI(TRI), 272 CalleeSavedRegs(CalleeSavedRegs) { 273 TLI = MF.getSubtarget().getTargetLowering(); 274 auto &TM = TPC.getTM<TargetMachine>(); 275 ShouldEmitDebugEntryValues = TM.Options.ShouldEmitDebugEntryValues(); 276 } 277 278 bool isCalleeSaved(LocIdx L) const { 279 unsigned Reg = MTracker->LocIdxToLocID[L]; 280 if (Reg >= MTracker->NumRegs) 281 return false; 282 for (MCRegAliasIterator RAI(Reg, &TRI, true); RAI.isValid(); ++RAI) 283 if (CalleeSavedRegs.test(*RAI)) 284 return true; 285 return false; 286 }; 287 288 // An estimate of the expected lifespan of values at a machine location, with 289 // a greater value corresponding to a longer expected lifespan, i.e. spill 290 // slots generally live longer than callee-saved registers which generally 291 // live longer than non-callee-saved registers. The minimum value of 0 292 // corresponds to an illegal location that cannot have a "lifespan" at all. 293 enum class LocationQuality : unsigned char { 294 Illegal = 0, 295 Register, 296 CalleeSavedRegister, 297 SpillSlot, 298 Best = SpillSlot 299 }; 300 301 class LocationAndQuality { 302 unsigned Location : 24; 303 unsigned Quality : 8; 304 305 public: 306 LocationAndQuality() : Location(0), Quality(0) {} 307 LocationAndQuality(LocIdx L, LocationQuality Q) 308 : Location(L.asU64()), Quality(static_cast<unsigned>(Q)) {} 309 LocIdx getLoc() const { 310 if (!Quality) 311 return LocIdx::MakeIllegalLoc(); 312 return LocIdx(Location); 313 } 314 LocationQuality getQuality() const { return LocationQuality(Quality); } 315 bool isIllegal() const { return !Quality; } 316 bool isBest() const { return getQuality() == LocationQuality::Best; } 317 }; 318 319 // Returns the LocationQuality for the location L iff the quality of L is 320 // is strictly greater than the provided minimum quality. 321 std::optional<LocationQuality> 322 getLocQualityIfBetter(LocIdx L, LocationQuality Min) const { 323 if (L.isIllegal()) 324 return std::nullopt; 325 if (Min >= LocationQuality::SpillSlot) 326 return std::nullopt; 327 if (MTracker->isSpill(L)) 328 return LocationQuality::SpillSlot; 329 if (Min >= LocationQuality::CalleeSavedRegister) 330 return std::nullopt; 331 if (isCalleeSaved(L)) 332 return LocationQuality::CalleeSavedRegister; 333 if (Min >= LocationQuality::Register) 334 return std::nullopt; 335 return LocationQuality::Register; 336 } 337 338 /// For a variable \p Var with the live-in value \p Value, attempts to resolve 339 /// the DbgValue to a concrete DBG_VALUE, emitting that value and loading the 340 /// tracking information to track Var throughout the block. 341 /// \p ValueToLoc is a map containing the best known location for every 342 /// ValueIDNum that Value may use. 343 /// \p MBB is the basic block that we are loading the live-in value for. 344 /// \p DbgOpStore is the map containing the DbgOpID->DbgOp mapping needed to 345 /// determine the values used by Value. 346 void loadVarInloc(MachineBasicBlock &MBB, DbgOpIDMap &DbgOpStore, 347 const DenseMap<ValueIDNum, LocationAndQuality> &ValueToLoc, 348 DebugVariable Var, DbgValue Value) { 349 SmallVector<DbgOp> DbgOps; 350 SmallVector<ResolvedDbgOp> ResolvedDbgOps; 351 bool IsValueValid = true; 352 unsigned LastUseBeforeDef = 0; 353 354 // If every value used by the incoming DbgValue is available at block 355 // entry, ResolvedDbgOps will contain the machine locations/constants for 356 // those values and will be used to emit a debug location. 357 // If one or more values are not yet available, but will all be defined in 358 // this block, then LastUseBeforeDef will track the instruction index in 359 // this BB at which the last of those values is defined, DbgOps will 360 // contain the values that we will emit when we reach that instruction. 361 // If one or more values are undef or not available throughout this block, 362 // and we can't recover as an entry value, we set IsValueValid=false and 363 // skip this variable. 364 for (DbgOpID ID : Value.getDbgOpIDs()) { 365 DbgOp Op = DbgOpStore.find(ID); 366 DbgOps.push_back(Op); 367 if (ID.isUndef()) { 368 IsValueValid = false; 369 break; 370 } 371 if (ID.isConst()) { 372 ResolvedDbgOps.push_back(Op.MO); 373 continue; 374 } 375 376 // If the value has no location, we can't make a variable location. 377 const ValueIDNum &Num = Op.ID; 378 auto ValuesPreferredLoc = ValueToLoc.find(Num); 379 if (ValuesPreferredLoc->second.isIllegal()) { 380 // If it's a def that occurs in this block, register it as a 381 // use-before-def to be resolved as we step through the block. 382 // Continue processing values so that we add any other UseBeforeDef 383 // entries needed for later. 384 if (Num.getBlock() == (unsigned)MBB.getNumber() && !Num.isPHI()) { 385 LastUseBeforeDef = std::max(LastUseBeforeDef, 386 static_cast<unsigned>(Num.getInst())); 387 continue; 388 } 389 recoverAsEntryValue(Var, Value.Properties, Num); 390 IsValueValid = false; 391 break; 392 } 393 394 // Defer modifying ActiveVLocs until after we've confirmed we have a 395 // live range. 396 LocIdx M = ValuesPreferredLoc->second.getLoc(); 397 ResolvedDbgOps.push_back(M); 398 } 399 400 // If we cannot produce a valid value for the LiveIn value within this 401 // block, skip this variable. 402 if (!IsValueValid) 403 return; 404 405 // Add UseBeforeDef entry for the last value to be defined in this block. 406 if (LastUseBeforeDef) { 407 addUseBeforeDef(Var, Value.Properties, DbgOps, 408 LastUseBeforeDef); 409 return; 410 } 411 412 // The LiveIn value is available at block entry, begin tracking and record 413 // the transfer. 414 for (const ResolvedDbgOp &Op : ResolvedDbgOps) 415 if (!Op.IsConst) 416 ActiveMLocs[Op.Loc].insert(Var); 417 auto NewValue = ResolvedDbgValue{ResolvedDbgOps, Value.Properties}; 418 auto Result = ActiveVLocs.insert(std::make_pair(Var, NewValue)); 419 if (!Result.second) 420 Result.first->second = NewValue; 421 PendingDbgValues.push_back( 422 MTracker->emitLoc(ResolvedDbgOps, Var, Value.Properties)); 423 } 424 425 /// Load object with live-in variable values. \p mlocs contains the live-in 426 /// values in each machine location, while \p vlocs the live-in variable 427 /// values. This method picks variable locations for the live-in variables, 428 /// creates DBG_VALUEs and puts them in #Transfers, then prepares the other 429 /// object fields to track variable locations as we step through the block. 430 /// FIXME: could just examine mloctracker instead of passing in \p mlocs? 431 void 432 loadInlocs(MachineBasicBlock &MBB, ValueTable &MLocs, DbgOpIDMap &DbgOpStore, 433 const SmallVectorImpl<std::pair<DebugVariable, DbgValue>> &VLocs, 434 unsigned NumLocs) { 435 ActiveMLocs.clear(); 436 ActiveVLocs.clear(); 437 VarLocs.clear(); 438 VarLocs.reserve(NumLocs); 439 UseBeforeDefs.clear(); 440 UseBeforeDefVariables.clear(); 441 442 // Map of the preferred location for each value. 443 DenseMap<ValueIDNum, LocationAndQuality> ValueToLoc; 444 445 // Initialized the preferred-location map with illegal locations, to be 446 // filled in later. 447 for (const auto &VLoc : VLocs) 448 if (VLoc.second.Kind == DbgValue::Def) 449 for (DbgOpID OpID : VLoc.second.getDbgOpIDs()) 450 if (!OpID.ID.IsConst) 451 ValueToLoc.insert({DbgOpStore.find(OpID).ID, LocationAndQuality()}); 452 453 ActiveMLocs.reserve(VLocs.size()); 454 ActiveVLocs.reserve(VLocs.size()); 455 456 // Produce a map of value numbers to the current machine locs they live 457 // in. When emulating VarLocBasedImpl, there should only be one 458 // location; when not, we get to pick. 459 for (auto Location : MTracker->locations()) { 460 LocIdx Idx = Location.Idx; 461 ValueIDNum &VNum = MLocs[Idx.asU64()]; 462 if (VNum == ValueIDNum::EmptyValue) 463 continue; 464 VarLocs.push_back(VNum); 465 466 // Is there a variable that wants a location for this value? If not, skip. 467 auto VIt = ValueToLoc.find(VNum); 468 if (VIt == ValueToLoc.end()) 469 continue; 470 471 auto &Previous = VIt->second; 472 // If this is the first location with that value, pick it. Otherwise, 473 // consider whether it's a "longer term" location. 474 std::optional<LocationQuality> ReplacementQuality = 475 getLocQualityIfBetter(Idx, Previous.getQuality()); 476 if (ReplacementQuality) 477 Previous = LocationAndQuality(Idx, *ReplacementQuality); 478 } 479 480 // Now map variables to their picked LocIdxes. 481 for (const auto &Var : VLocs) { 482 loadVarInloc(MBB, DbgOpStore, ValueToLoc, Var.first, Var.second); 483 } 484 flushDbgValues(MBB.begin(), &MBB); 485 } 486 487 /// Record that \p Var has value \p ID, a value that becomes available 488 /// later in the function. 489 void addUseBeforeDef(const DebugVariable &Var, 490 const DbgValueProperties &Properties, 491 const SmallVectorImpl<DbgOp> &DbgOps, unsigned Inst) { 492 UseBeforeDefs[Inst].emplace_back(DbgOps, Var, Properties); 493 UseBeforeDefVariables.insert(Var); 494 } 495 496 /// After the instruction at index \p Inst and position \p pos has been 497 /// processed, check whether it defines a variable value in a use-before-def. 498 /// If so, and the variable value hasn't changed since the start of the 499 /// block, create a DBG_VALUE. 500 void checkInstForNewValues(unsigned Inst, MachineBasicBlock::iterator pos) { 501 auto MIt = UseBeforeDefs.find(Inst); 502 if (MIt == UseBeforeDefs.end()) 503 return; 504 505 // Map of values to the locations that store them for every value used by 506 // the variables that may have become available. 507 SmallDenseMap<ValueIDNum, LocationAndQuality> ValueToLoc; 508 509 // Populate ValueToLoc with illegal default mappings for every value used by 510 // any UseBeforeDef variables for this instruction. 511 for (auto &Use : MIt->second) { 512 if (!UseBeforeDefVariables.count(Use.Var)) 513 continue; 514 515 for (DbgOp &Op : Use.Values) { 516 assert(!Op.isUndef() && "UseBeforeDef erroneously created for a " 517 "DbgValue with undef values."); 518 if (Op.IsConst) 519 continue; 520 521 ValueToLoc.insert({Op.ID, LocationAndQuality()}); 522 } 523 } 524 525 // Exit early if we have no DbgValues to produce. 526 if (ValueToLoc.empty()) 527 return; 528 529 // Determine the best location for each desired value. 530 for (auto Location : MTracker->locations()) { 531 LocIdx Idx = Location.Idx; 532 ValueIDNum &LocValueID = Location.Value; 533 534 // Is there a variable that wants a location for this value? If not, skip. 535 auto VIt = ValueToLoc.find(LocValueID); 536 if (VIt == ValueToLoc.end()) 537 continue; 538 539 auto &Previous = VIt->second; 540 // If this is the first location with that value, pick it. Otherwise, 541 // consider whether it's a "longer term" location. 542 std::optional<LocationQuality> ReplacementQuality = 543 getLocQualityIfBetter(Idx, Previous.getQuality()); 544 if (ReplacementQuality) 545 Previous = LocationAndQuality(Idx, *ReplacementQuality); 546 } 547 548 // Using the map of values to locations, produce a final set of values for 549 // this variable. 550 for (auto &Use : MIt->second) { 551 if (!UseBeforeDefVariables.count(Use.Var)) 552 continue; 553 554 SmallVector<ResolvedDbgOp> DbgOps; 555 556 for (DbgOp &Op : Use.Values) { 557 if (Op.IsConst) { 558 DbgOps.push_back(Op.MO); 559 continue; 560 } 561 LocIdx NewLoc = ValueToLoc.find(Op.ID)->second.getLoc(); 562 if (NewLoc.isIllegal()) 563 break; 564 DbgOps.push_back(NewLoc); 565 } 566 567 // If at least one value used by this debug value is no longer available, 568 // i.e. one of the values was killed before we finished defining all of 569 // the values used by this variable, discard. 570 if (DbgOps.size() != Use.Values.size()) 571 continue; 572 573 // Otherwise, we're good to go. 574 PendingDbgValues.push_back( 575 MTracker->emitLoc(DbgOps, Use.Var, Use.Properties)); 576 } 577 flushDbgValues(pos, nullptr); 578 } 579 580 /// Helper to move created DBG_VALUEs into Transfers collection. 581 void flushDbgValues(MachineBasicBlock::iterator Pos, MachineBasicBlock *MBB) { 582 if (PendingDbgValues.size() == 0) 583 return; 584 585 // Pick out the instruction start position. 586 MachineBasicBlock::instr_iterator BundleStart; 587 if (MBB && Pos == MBB->begin()) 588 BundleStart = MBB->instr_begin(); 589 else 590 BundleStart = getBundleStart(Pos->getIterator()); 591 592 Transfers.push_back({BundleStart, MBB, PendingDbgValues}); 593 PendingDbgValues.clear(); 594 } 595 596 bool isEntryValueVariable(const DebugVariable &Var, 597 const DIExpression *Expr) const { 598 if (!Var.getVariable()->isParameter()) 599 return false; 600 601 if (Var.getInlinedAt()) 602 return false; 603 604 if (Expr->getNumElements() > 0 && !Expr->isDeref()) 605 return false; 606 607 return true; 608 } 609 610 bool isEntryValueValue(const ValueIDNum &Val) const { 611 // Must be in entry block (block number zero), and be a PHI / live-in value. 612 if (Val.getBlock() || !Val.isPHI()) 613 return false; 614 615 // Entry values must enter in a register. 616 if (MTracker->isSpill(Val.getLoc())) 617 return false; 618 619 Register SP = TLI->getStackPointerRegisterToSaveRestore(); 620 Register FP = TRI.getFrameRegister(MF); 621 Register Reg = MTracker->LocIdxToLocID[Val.getLoc()]; 622 return Reg != SP && Reg != FP; 623 } 624 625 bool recoverAsEntryValue(const DebugVariable &Var, 626 const DbgValueProperties &Prop, 627 const ValueIDNum &Num) { 628 // Is this variable location a candidate to be an entry value. First, 629 // should we be trying this at all? 630 if (!ShouldEmitDebugEntryValues) 631 return false; 632 633 const DIExpression *DIExpr = Prop.DIExpr; 634 635 // We don't currently emit entry values for DBG_VALUE_LISTs. 636 if (Prop.IsVariadic) { 637 // If this debug value can be converted to be non-variadic, then do so; 638 // otherwise give up. 639 auto NonVariadicExpression = 640 DIExpression::convertToNonVariadicExpression(DIExpr); 641 if (!NonVariadicExpression) 642 return false; 643 DIExpr = *NonVariadicExpression; 644 } 645 646 // Is the variable appropriate for entry values (i.e., is a parameter). 647 if (!isEntryValueVariable(Var, DIExpr)) 648 return false; 649 650 // Is the value assigned to this variable still the entry value? 651 if (!isEntryValueValue(Num)) 652 return false; 653 654 // Emit a variable location using an entry value expression. 655 DIExpression *NewExpr = 656 DIExpression::prepend(DIExpr, DIExpression::EntryValue); 657 Register Reg = MTracker->LocIdxToLocID[Num.getLoc()]; 658 MachineOperand MO = MachineOperand::CreateReg(Reg, false); 659 660 PendingDbgValues.push_back( 661 emitMOLoc(MO, Var, {NewExpr, Prop.Indirect, false})); 662 return true; 663 } 664 665 /// Change a variable value after encountering a DBG_VALUE inside a block. 666 void redefVar(const MachineInstr &MI) { 667 DebugVariable Var(MI.getDebugVariable(), MI.getDebugExpression(), 668 MI.getDebugLoc()->getInlinedAt()); 669 DbgValueProperties Properties(MI); 670 671 // Ignore non-register locations, we don't transfer those. 672 if (MI.isUndefDebugValue() || 673 all_of(MI.debug_operands(), 674 [](const MachineOperand &MO) { return !MO.isReg(); })) { 675 auto It = ActiveVLocs.find(Var); 676 if (It != ActiveVLocs.end()) { 677 for (LocIdx Loc : It->second.loc_indices()) 678 ActiveMLocs[Loc].erase(Var); 679 ActiveVLocs.erase(It); 680 } 681 // Any use-before-defs no longer apply. 682 UseBeforeDefVariables.erase(Var); 683 return; 684 } 685 686 SmallVector<ResolvedDbgOp> NewLocs; 687 for (const MachineOperand &MO : MI.debug_operands()) { 688 if (MO.isReg()) { 689 // Any undef regs have already been filtered out above. 690 Register Reg = MO.getReg(); 691 LocIdx NewLoc = MTracker->getRegMLoc(Reg); 692 NewLocs.push_back(NewLoc); 693 } else { 694 NewLocs.push_back(MO); 695 } 696 } 697 698 redefVar(MI, Properties, NewLocs); 699 } 700 701 /// Handle a change in variable location within a block. Terminate the 702 /// variables current location, and record the value it now refers to, so 703 /// that we can detect location transfers later on. 704 void redefVar(const MachineInstr &MI, const DbgValueProperties &Properties, 705 SmallVectorImpl<ResolvedDbgOp> &NewLocs) { 706 DebugVariable Var(MI.getDebugVariable(), MI.getDebugExpression(), 707 MI.getDebugLoc()->getInlinedAt()); 708 // Any use-before-defs no longer apply. 709 UseBeforeDefVariables.erase(Var); 710 711 // Erase any previous location. 712 auto It = ActiveVLocs.find(Var); 713 if (It != ActiveVLocs.end()) { 714 for (LocIdx Loc : It->second.loc_indices()) 715 ActiveMLocs[Loc].erase(Var); 716 } 717 718 // If there _is_ no new location, all we had to do was erase. 719 if (NewLocs.empty()) { 720 if (It != ActiveVLocs.end()) 721 ActiveVLocs.erase(It); 722 return; 723 } 724 725 SmallVector<std::pair<LocIdx, DebugVariable>> LostMLocs; 726 for (ResolvedDbgOp &Op : NewLocs) { 727 if (Op.IsConst) 728 continue; 729 730 LocIdx NewLoc = Op.Loc; 731 732 // Check whether our local copy of values-by-location in #VarLocs is out 733 // of date. Wipe old tracking data for the location if it's been clobbered 734 // in the meantime. 735 if (MTracker->readMLoc(NewLoc) != VarLocs[NewLoc.asU64()]) { 736 for (const auto &P : ActiveMLocs[NewLoc]) { 737 auto LostVLocIt = ActiveVLocs.find(P); 738 if (LostVLocIt != ActiveVLocs.end()) { 739 for (LocIdx Loc : LostVLocIt->second.loc_indices()) { 740 // Every active variable mapping for NewLoc will be cleared, no 741 // need to track individual variables. 742 if (Loc == NewLoc) 743 continue; 744 LostMLocs.emplace_back(Loc, P); 745 } 746 } 747 ActiveVLocs.erase(P); 748 } 749 for (const auto &LostMLoc : LostMLocs) 750 ActiveMLocs[LostMLoc.first].erase(LostMLoc.second); 751 LostMLocs.clear(); 752 It = ActiveVLocs.find(Var); 753 ActiveMLocs[NewLoc.asU64()].clear(); 754 VarLocs[NewLoc.asU64()] = MTracker->readMLoc(NewLoc); 755 } 756 757 ActiveMLocs[NewLoc].insert(Var); 758 } 759 760 if (It == ActiveVLocs.end()) { 761 ActiveVLocs.insert( 762 std::make_pair(Var, ResolvedDbgValue(NewLocs, Properties))); 763 } else { 764 It->second.Ops.assign(NewLocs); 765 It->second.Properties = Properties; 766 } 767 } 768 769 /// Account for a location \p mloc being clobbered. Examine the variable 770 /// locations that will be terminated: and try to recover them by using 771 /// another location. Optionally, given \p MakeUndef, emit a DBG_VALUE to 772 /// explicitly terminate a location if it can't be recovered. 773 void clobberMloc(LocIdx MLoc, MachineBasicBlock::iterator Pos, 774 bool MakeUndef = true) { 775 auto ActiveMLocIt = ActiveMLocs.find(MLoc); 776 if (ActiveMLocIt == ActiveMLocs.end()) 777 return; 778 779 // What was the old variable value? 780 ValueIDNum OldValue = VarLocs[MLoc.asU64()]; 781 clobberMloc(MLoc, OldValue, Pos, MakeUndef); 782 } 783 /// Overload that takes an explicit value \p OldValue for when the value in 784 /// \p MLoc has changed and the TransferTracker's locations have not been 785 /// updated yet. 786 void clobberMloc(LocIdx MLoc, ValueIDNum OldValue, 787 MachineBasicBlock::iterator Pos, bool MakeUndef = true) { 788 auto ActiveMLocIt = ActiveMLocs.find(MLoc); 789 if (ActiveMLocIt == ActiveMLocs.end()) 790 return; 791 792 VarLocs[MLoc.asU64()] = ValueIDNum::EmptyValue; 793 794 // Examine the remaining variable locations: if we can find the same value 795 // again, we can recover the location. 796 std::optional<LocIdx> NewLoc; 797 for (auto Loc : MTracker->locations()) 798 if (Loc.Value == OldValue) 799 NewLoc = Loc.Idx; 800 801 // If there is no location, and we weren't asked to make the variable 802 // explicitly undef, then stop here. 803 if (!NewLoc && !MakeUndef) { 804 // Try and recover a few more locations with entry values. 805 for (const auto &Var : ActiveMLocIt->second) { 806 auto &Prop = ActiveVLocs.find(Var)->second.Properties; 807 recoverAsEntryValue(Var, Prop, OldValue); 808 } 809 flushDbgValues(Pos, nullptr); 810 return; 811 } 812 813 // Examine all the variables based on this location. 814 DenseSet<DebugVariable> NewMLocs; 815 // If no new location has been found, every variable that depends on this 816 // MLoc is dead, so end their existing MLoc->Var mappings as well. 817 SmallVector<std::pair<LocIdx, DebugVariable>> LostMLocs; 818 for (const auto &Var : ActiveMLocIt->second) { 819 auto ActiveVLocIt = ActiveVLocs.find(Var); 820 // Re-state the variable location: if there's no replacement then NewLoc 821 // is std::nullopt and a $noreg DBG_VALUE will be created. Otherwise, a 822 // DBG_VALUE identifying the alternative location will be emitted. 823 const DbgValueProperties &Properties = ActiveVLocIt->second.Properties; 824 825 // Produce the new list of debug ops - an empty list if no new location 826 // was found, or the existing list with the substitution MLoc -> NewLoc 827 // otherwise. 828 SmallVector<ResolvedDbgOp> DbgOps; 829 if (NewLoc) { 830 ResolvedDbgOp OldOp(MLoc); 831 ResolvedDbgOp NewOp(*NewLoc); 832 // Insert illegal ops to overwrite afterwards. 833 DbgOps.insert(DbgOps.begin(), ActiveVLocIt->second.Ops.size(), 834 ResolvedDbgOp(LocIdx::MakeIllegalLoc())); 835 replace_copy(ActiveVLocIt->second.Ops, DbgOps.begin(), OldOp, NewOp); 836 } 837 838 PendingDbgValues.push_back(MTracker->emitLoc(DbgOps, Var, Properties)); 839 840 // Update machine locations <=> variable locations maps. Defer updating 841 // ActiveMLocs to avoid invalidating the ActiveMLocIt iterator. 842 if (!NewLoc) { 843 for (LocIdx Loc : ActiveVLocIt->second.loc_indices()) { 844 if (Loc != MLoc) 845 LostMLocs.emplace_back(Loc, Var); 846 } 847 ActiveVLocs.erase(ActiveVLocIt); 848 } else { 849 ActiveVLocIt->second.Ops = DbgOps; 850 NewMLocs.insert(Var); 851 } 852 } 853 854 // Remove variables from ActiveMLocs if they no longer use any other MLocs 855 // due to being killed by this clobber. 856 for (auto &LocVarIt : LostMLocs) { 857 auto LostMLocIt = ActiveMLocs.find(LocVarIt.first); 858 assert(LostMLocIt != ActiveMLocs.end() && 859 "Variable was using this MLoc, but ActiveMLocs[MLoc] has no " 860 "entries?"); 861 LostMLocIt->second.erase(LocVarIt.second); 862 } 863 864 // We lazily track what locations have which values; if we've found a new 865 // location for the clobbered value, remember it. 866 if (NewLoc) 867 VarLocs[NewLoc->asU64()] = OldValue; 868 869 flushDbgValues(Pos, nullptr); 870 871 // Commit ActiveMLoc changes. 872 ActiveMLocIt->second.clear(); 873 if (!NewMLocs.empty()) 874 for (auto &Var : NewMLocs) 875 ActiveMLocs[*NewLoc].insert(Var); 876 } 877 878 /// Transfer variables based on \p Src to be based on \p Dst. This handles 879 /// both register copies as well as spills and restores. Creates DBG_VALUEs 880 /// describing the movement. 881 void transferMlocs(LocIdx Src, LocIdx Dst, MachineBasicBlock::iterator Pos) { 882 // Does Src still contain the value num we expect? If not, it's been 883 // clobbered in the meantime, and our variable locations are stale. 884 if (VarLocs[Src.asU64()] != MTracker->readMLoc(Src)) 885 return; 886 887 // assert(ActiveMLocs[Dst].size() == 0); 888 //^^^ Legitimate scenario on account of un-clobbered slot being assigned to? 889 890 // Move set of active variables from one location to another. 891 auto MovingVars = ActiveMLocs[Src]; 892 ActiveMLocs[Dst].insert(MovingVars.begin(), MovingVars.end()); 893 VarLocs[Dst.asU64()] = VarLocs[Src.asU64()]; 894 895 // For each variable based on Src; create a location at Dst. 896 ResolvedDbgOp SrcOp(Src); 897 ResolvedDbgOp DstOp(Dst); 898 for (const auto &Var : MovingVars) { 899 auto ActiveVLocIt = ActiveVLocs.find(Var); 900 assert(ActiveVLocIt != ActiveVLocs.end()); 901 902 // Update all instances of Src in the variable's tracked values to Dst. 903 std::replace(ActiveVLocIt->second.Ops.begin(), 904 ActiveVLocIt->second.Ops.end(), SrcOp, DstOp); 905 906 MachineInstr *MI = MTracker->emitLoc(ActiveVLocIt->second.Ops, Var, 907 ActiveVLocIt->second.Properties); 908 PendingDbgValues.push_back(MI); 909 } 910 ActiveMLocs[Src].clear(); 911 flushDbgValues(Pos, nullptr); 912 913 // XXX XXX XXX "pretend to be old LDV" means dropping all tracking data 914 // about the old location. 915 if (EmulateOldLDV) 916 VarLocs[Src.asU64()] = ValueIDNum::EmptyValue; 917 } 918 919 MachineInstrBuilder emitMOLoc(const MachineOperand &MO, 920 const DebugVariable &Var, 921 const DbgValueProperties &Properties) { 922 DebugLoc DL = DILocation::get(Var.getVariable()->getContext(), 0, 0, 923 Var.getVariable()->getScope(), 924 const_cast<DILocation *>(Var.getInlinedAt())); 925 auto MIB = BuildMI(MF, DL, TII->get(TargetOpcode::DBG_VALUE)); 926 MIB.add(MO); 927 if (Properties.Indirect) 928 MIB.addImm(0); 929 else 930 MIB.addReg(0); 931 MIB.addMetadata(Var.getVariable()); 932 MIB.addMetadata(Properties.DIExpr); 933 return MIB; 934 } 935 }; 936 937 //===----------------------------------------------------------------------===// 938 // Implementation 939 //===----------------------------------------------------------------------===// 940 941 ValueIDNum ValueIDNum::EmptyValue = {UINT_MAX, UINT_MAX, UINT_MAX}; 942 ValueIDNum ValueIDNum::TombstoneValue = {UINT_MAX, UINT_MAX, UINT_MAX - 1}; 943 944 #ifndef NDEBUG 945 void ResolvedDbgOp::dump(const MLocTracker *MTrack) const { 946 if (IsConst) { 947 dbgs() << MO; 948 } else { 949 dbgs() << MTrack->LocIdxToName(Loc); 950 } 951 } 952 void DbgOp::dump(const MLocTracker *MTrack) const { 953 if (IsConst) { 954 dbgs() << MO; 955 } else if (!isUndef()) { 956 dbgs() << MTrack->IDAsString(ID); 957 } 958 } 959 void DbgOpID::dump(const MLocTracker *MTrack, const DbgOpIDMap *OpStore) const { 960 if (!OpStore) { 961 dbgs() << "ID(" << asU32() << ")"; 962 } else { 963 OpStore->find(*this).dump(MTrack); 964 } 965 } 966 void DbgValue::dump(const MLocTracker *MTrack, 967 const DbgOpIDMap *OpStore) const { 968 if (Kind == NoVal) { 969 dbgs() << "NoVal(" << BlockNo << ")"; 970 } else if (Kind == VPHI || Kind == Def) { 971 if (Kind == VPHI) 972 dbgs() << "VPHI(" << BlockNo << ","; 973 else 974 dbgs() << "Def("; 975 for (unsigned Idx = 0; Idx < getDbgOpIDs().size(); ++Idx) { 976 getDbgOpID(Idx).dump(MTrack, OpStore); 977 if (Idx != 0) 978 dbgs() << ","; 979 } 980 dbgs() << ")"; 981 } 982 if (Properties.Indirect) 983 dbgs() << " indir"; 984 if (Properties.DIExpr) 985 dbgs() << " " << *Properties.DIExpr; 986 } 987 #endif 988 989 MLocTracker::MLocTracker(MachineFunction &MF, const TargetInstrInfo &TII, 990 const TargetRegisterInfo &TRI, 991 const TargetLowering &TLI) 992 : MF(MF), TII(TII), TRI(TRI), TLI(TLI), 993 LocIdxToIDNum(ValueIDNum::EmptyValue), LocIdxToLocID(0) { 994 NumRegs = TRI.getNumRegs(); 995 reset(); 996 LocIDToLocIdx.resize(NumRegs, LocIdx::MakeIllegalLoc()); 997 assert(NumRegs < (1u << NUM_LOC_BITS)); // Detect bit packing failure 998 999 // Always track SP. This avoids the implicit clobbering caused by regmasks 1000 // from affectings its values. (LiveDebugValues disbelieves calls and 1001 // regmasks that claim to clobber SP). 1002 Register SP = TLI.getStackPointerRegisterToSaveRestore(); 1003 if (SP) { 1004 unsigned ID = getLocID(SP); 1005 (void)lookupOrTrackRegister(ID); 1006 1007 for (MCRegAliasIterator RAI(SP, &TRI, true); RAI.isValid(); ++RAI) 1008 SPAliases.insert(*RAI); 1009 } 1010 1011 // Build some common stack positions -- full registers being spilt to the 1012 // stack. 1013 StackSlotIdxes.insert({{8, 0}, 0}); 1014 StackSlotIdxes.insert({{16, 0}, 1}); 1015 StackSlotIdxes.insert({{32, 0}, 2}); 1016 StackSlotIdxes.insert({{64, 0}, 3}); 1017 StackSlotIdxes.insert({{128, 0}, 4}); 1018 StackSlotIdxes.insert({{256, 0}, 5}); 1019 StackSlotIdxes.insert({{512, 0}, 6}); 1020 1021 // Traverse all the subregister idxes, and ensure there's an index for them. 1022 // Duplicates are no problem: we're interested in their position in the 1023 // stack slot, we don't want to type the slot. 1024 for (unsigned int I = 1; I < TRI.getNumSubRegIndices(); ++I) { 1025 unsigned Size = TRI.getSubRegIdxSize(I); 1026 unsigned Offs = TRI.getSubRegIdxOffset(I); 1027 unsigned Idx = StackSlotIdxes.size(); 1028 1029 // Some subregs have -1, -2 and so forth fed into their fields, to mean 1030 // special backend things. Ignore those. 1031 if (Size > 60000 || Offs > 60000) 1032 continue; 1033 1034 StackSlotIdxes.insert({{Size, Offs}, Idx}); 1035 } 1036 1037 // There may also be strange register class sizes (think x86 fp80s). 1038 for (const TargetRegisterClass *RC : TRI.regclasses()) { 1039 unsigned Size = TRI.getRegSizeInBits(*RC); 1040 1041 // We might see special reserved values as sizes, and classes for other 1042 // stuff the machine tries to model. If it's more than 512 bits, then it 1043 // is very unlikely to be a register than can be spilt. 1044 if (Size > 512) 1045 continue; 1046 1047 unsigned Idx = StackSlotIdxes.size(); 1048 StackSlotIdxes.insert({{Size, 0}, Idx}); 1049 } 1050 1051 for (auto &Idx : StackSlotIdxes) 1052 StackIdxesToPos[Idx.second] = Idx.first; 1053 1054 NumSlotIdxes = StackSlotIdxes.size(); 1055 } 1056 1057 LocIdx MLocTracker::trackRegister(unsigned ID) { 1058 assert(ID != 0); 1059 LocIdx NewIdx = LocIdx(LocIdxToIDNum.size()); 1060 LocIdxToIDNum.grow(NewIdx); 1061 LocIdxToLocID.grow(NewIdx); 1062 1063 // Default: it's an mphi. 1064 ValueIDNum ValNum = {CurBB, 0, NewIdx}; 1065 // Was this reg ever touched by a regmask? 1066 for (const auto &MaskPair : reverse(Masks)) { 1067 if (MaskPair.first->clobbersPhysReg(ID)) { 1068 // There was an earlier def we skipped. 1069 ValNum = {CurBB, MaskPair.second, NewIdx}; 1070 break; 1071 } 1072 } 1073 1074 LocIdxToIDNum[NewIdx] = ValNum; 1075 LocIdxToLocID[NewIdx] = ID; 1076 return NewIdx; 1077 } 1078 1079 void MLocTracker::writeRegMask(const MachineOperand *MO, unsigned CurBB, 1080 unsigned InstID) { 1081 // Def any register we track have that isn't preserved. The regmask 1082 // terminates the liveness of a register, meaning its value can't be 1083 // relied upon -- we represent this by giving it a new value. 1084 for (auto Location : locations()) { 1085 unsigned ID = LocIdxToLocID[Location.Idx]; 1086 // Don't clobber SP, even if the mask says it's clobbered. 1087 if (ID < NumRegs && !SPAliases.count(ID) && MO->clobbersPhysReg(ID)) 1088 defReg(ID, CurBB, InstID); 1089 } 1090 Masks.push_back(std::make_pair(MO, InstID)); 1091 } 1092 1093 std::optional<SpillLocationNo> MLocTracker::getOrTrackSpillLoc(SpillLoc L) { 1094 SpillLocationNo SpillID(SpillLocs.idFor(L)); 1095 1096 if (SpillID.id() == 0) { 1097 // If there is no location, and we have reached the limit of how many stack 1098 // slots to track, then don't track this one. 1099 if (SpillLocs.size() >= StackWorkingSetLimit) 1100 return std::nullopt; 1101 1102 // Spill location is untracked: create record for this one, and all 1103 // subregister slots too. 1104 SpillID = SpillLocationNo(SpillLocs.insert(L)); 1105 for (unsigned StackIdx = 0; StackIdx < NumSlotIdxes; ++StackIdx) { 1106 unsigned L = getSpillIDWithIdx(SpillID, StackIdx); 1107 LocIdx Idx = LocIdx(LocIdxToIDNum.size()); // New idx 1108 LocIdxToIDNum.grow(Idx); 1109 LocIdxToLocID.grow(Idx); 1110 LocIDToLocIdx.push_back(Idx); 1111 LocIdxToLocID[Idx] = L; 1112 // Initialize to PHI value; corresponds to the location's live-in value 1113 // during transfer function construction. 1114 LocIdxToIDNum[Idx] = ValueIDNum(CurBB, 0, Idx); 1115 } 1116 } 1117 return SpillID; 1118 } 1119 1120 std::string MLocTracker::LocIdxToName(LocIdx Idx) const { 1121 unsigned ID = LocIdxToLocID[Idx]; 1122 if (ID >= NumRegs) { 1123 StackSlotPos Pos = locIDToSpillIdx(ID); 1124 ID -= NumRegs; 1125 unsigned Slot = ID / NumSlotIdxes; 1126 return Twine("slot ") 1127 .concat(Twine(Slot).concat(Twine(" sz ").concat(Twine(Pos.first) 1128 .concat(Twine(" offs ").concat(Twine(Pos.second)))))) 1129 .str(); 1130 } else { 1131 return TRI.getRegAsmName(ID).str(); 1132 } 1133 } 1134 1135 std::string MLocTracker::IDAsString(const ValueIDNum &Num) const { 1136 std::string DefName = LocIdxToName(Num.getLoc()); 1137 return Num.asString(DefName); 1138 } 1139 1140 #ifndef NDEBUG 1141 LLVM_DUMP_METHOD void MLocTracker::dump() { 1142 for (auto Location : locations()) { 1143 std::string MLocName = LocIdxToName(Location.Value.getLoc()); 1144 std::string DefName = Location.Value.asString(MLocName); 1145 dbgs() << LocIdxToName(Location.Idx) << " --> " << DefName << "\n"; 1146 } 1147 } 1148 1149 LLVM_DUMP_METHOD void MLocTracker::dump_mloc_map() { 1150 for (auto Location : locations()) { 1151 std::string foo = LocIdxToName(Location.Idx); 1152 dbgs() << "Idx " << Location.Idx.asU64() << " " << foo << "\n"; 1153 } 1154 } 1155 #endif 1156 1157 MachineInstrBuilder 1158 MLocTracker::emitLoc(const SmallVectorImpl<ResolvedDbgOp> &DbgOps, 1159 const DebugVariable &Var, 1160 const DbgValueProperties &Properties) { 1161 DebugLoc DL = DILocation::get(Var.getVariable()->getContext(), 0, 0, 1162 Var.getVariable()->getScope(), 1163 const_cast<DILocation *>(Var.getInlinedAt())); 1164 1165 const MCInstrDesc &Desc = Properties.IsVariadic 1166 ? TII.get(TargetOpcode::DBG_VALUE_LIST) 1167 : TII.get(TargetOpcode::DBG_VALUE); 1168 1169 #ifdef EXPENSIVE_CHECKS 1170 assert(all_of(DbgOps, 1171 [](const ResolvedDbgOp &Op) { 1172 return Op.IsConst || !Op.Loc.isIllegal(); 1173 }) && 1174 "Did not expect illegal ops in DbgOps."); 1175 assert((DbgOps.size() == 0 || 1176 DbgOps.size() == Properties.getLocationOpCount()) && 1177 "Expected to have either one DbgOp per MI LocationOp, or none."); 1178 #endif 1179 1180 auto GetRegOp = [](unsigned Reg) -> MachineOperand { 1181 return MachineOperand::CreateReg( 1182 /* Reg */ Reg, /* isDef */ false, /* isImp */ false, 1183 /* isKill */ false, /* isDead */ false, 1184 /* isUndef */ false, /* isEarlyClobber */ false, 1185 /* SubReg */ 0, /* isDebug */ true); 1186 }; 1187 1188 SmallVector<MachineOperand> MOs; 1189 1190 auto EmitUndef = [&]() { 1191 MOs.clear(); 1192 MOs.assign(Properties.getLocationOpCount(), GetRegOp(0)); 1193 return BuildMI(MF, DL, Desc, false, MOs, Var.getVariable(), 1194 Properties.DIExpr); 1195 }; 1196 1197 // Don't bother passing any real operands to BuildMI if any of them would be 1198 // $noreg. 1199 if (DbgOps.empty()) 1200 return EmitUndef(); 1201 1202 bool Indirect = Properties.Indirect; 1203 1204 const DIExpression *Expr = Properties.DIExpr; 1205 1206 assert(DbgOps.size() == Properties.getLocationOpCount()); 1207 1208 // If all locations are valid, accumulate them into our list of 1209 // MachineOperands. For any spilled locations, either update the indirectness 1210 // register or apply the appropriate transformations in the DIExpression. 1211 for (size_t Idx = 0; Idx < Properties.getLocationOpCount(); ++Idx) { 1212 const ResolvedDbgOp &Op = DbgOps[Idx]; 1213 1214 if (Op.IsConst) { 1215 MOs.push_back(Op.MO); 1216 continue; 1217 } 1218 1219 LocIdx MLoc = Op.Loc; 1220 unsigned LocID = LocIdxToLocID[MLoc]; 1221 if (LocID >= NumRegs) { 1222 SpillLocationNo SpillID = locIDToSpill(LocID); 1223 StackSlotPos StackIdx = locIDToSpillIdx(LocID); 1224 unsigned short Offset = StackIdx.second; 1225 1226 // TODO: support variables that are located in spill slots, with non-zero 1227 // offsets from the start of the spill slot. It would require some more 1228 // complex DIExpression calculations. This doesn't seem to be produced by 1229 // LLVM right now, so don't try and support it. 1230 // Accept no-subregister slots and subregisters where the offset is zero. 1231 // The consumer should already have type information to work out how large 1232 // the variable is. 1233 if (Offset == 0) { 1234 const SpillLoc &Spill = SpillLocs[SpillID.id()]; 1235 unsigned Base = Spill.SpillBase; 1236 1237 // There are several ways we can dereference things, and several inputs 1238 // to consider: 1239 // * NRVO variables will appear with IsIndirect set, but should have 1240 // nothing else in their DIExpressions, 1241 // * Variables with DW_OP_stack_value in their expr already need an 1242 // explicit dereference of the stack location, 1243 // * Values that don't match the variable size need DW_OP_deref_size, 1244 // * Everything else can just become a simple location expression. 1245 1246 // We need to use deref_size whenever there's a mismatch between the 1247 // size of value and the size of variable portion being read. 1248 // Additionally, we should use it whenever dealing with stack_value 1249 // fragments, to avoid the consumer having to determine the deref size 1250 // from DW_OP_piece. 1251 bool UseDerefSize = false; 1252 unsigned ValueSizeInBits = getLocSizeInBits(MLoc); 1253 unsigned DerefSizeInBytes = ValueSizeInBits / 8; 1254 if (auto Fragment = Var.getFragment()) { 1255 unsigned VariableSizeInBits = Fragment->SizeInBits; 1256 if (VariableSizeInBits != ValueSizeInBits || Expr->isComplex()) 1257 UseDerefSize = true; 1258 } else if (auto Size = Var.getVariable()->getSizeInBits()) { 1259 if (*Size != ValueSizeInBits) { 1260 UseDerefSize = true; 1261 } 1262 } 1263 1264 SmallVector<uint64_t, 5> OffsetOps; 1265 TRI.getOffsetOpcodes(Spill.SpillOffset, OffsetOps); 1266 bool StackValue = false; 1267 1268 if (Properties.Indirect) { 1269 // This is something like an NRVO variable, where the pointer has been 1270 // spilt to the stack. It should end up being a memory location, with 1271 // the pointer to the variable loaded off the stack with a deref: 1272 assert(!Expr->isImplicit()); 1273 OffsetOps.push_back(dwarf::DW_OP_deref); 1274 } else if (UseDerefSize && Expr->isSingleLocationExpression()) { 1275 // TODO: Figure out how to handle deref size issues for variadic 1276 // values. 1277 // We're loading a value off the stack that's not the same size as the 1278 // variable. Add / subtract stack offset, explicitly deref with a 1279 // size, and add DW_OP_stack_value if not already present. 1280 OffsetOps.push_back(dwarf::DW_OP_deref_size); 1281 OffsetOps.push_back(DerefSizeInBytes); 1282 StackValue = true; 1283 } else if (Expr->isComplex() || Properties.IsVariadic) { 1284 // A variable with no size ambiguity, but with extra elements in it's 1285 // expression. Manually dereference the stack location. 1286 OffsetOps.push_back(dwarf::DW_OP_deref); 1287 } else { 1288 // A plain value that has been spilt to the stack, with no further 1289 // context. Request a location expression, marking the DBG_VALUE as 1290 // IsIndirect. 1291 Indirect = true; 1292 } 1293 1294 Expr = DIExpression::appendOpsToArg(Expr, OffsetOps, Idx, StackValue); 1295 MOs.push_back(GetRegOp(Base)); 1296 } else { 1297 // This is a stack location with a weird subregister offset: emit an 1298 // undef DBG_VALUE instead. 1299 return EmitUndef(); 1300 } 1301 } else { 1302 // Non-empty, non-stack slot, must be a plain register. 1303 MOs.push_back(GetRegOp(LocID)); 1304 } 1305 } 1306 1307 return BuildMI(MF, DL, Desc, Indirect, MOs, Var.getVariable(), Expr); 1308 } 1309 1310 /// Default construct and initialize the pass. 1311 InstrRefBasedLDV::InstrRefBasedLDV() = default; 1312 1313 bool InstrRefBasedLDV::isCalleeSaved(LocIdx L) const { 1314 unsigned Reg = MTracker->LocIdxToLocID[L]; 1315 return isCalleeSavedReg(Reg); 1316 } 1317 bool InstrRefBasedLDV::isCalleeSavedReg(Register R) const { 1318 for (MCRegAliasIterator RAI(R, TRI, true); RAI.isValid(); ++RAI) 1319 if (CalleeSavedRegs.test(*RAI)) 1320 return true; 1321 return false; 1322 } 1323 1324 //===----------------------------------------------------------------------===// 1325 // Debug Range Extension Implementation 1326 //===----------------------------------------------------------------------===// 1327 1328 #ifndef NDEBUG 1329 // Something to restore in the future. 1330 // void InstrRefBasedLDV::printVarLocInMBB(..) 1331 #endif 1332 1333 std::optional<SpillLocationNo> 1334 InstrRefBasedLDV::extractSpillBaseRegAndOffset(const MachineInstr &MI) { 1335 assert(MI.hasOneMemOperand() && 1336 "Spill instruction does not have exactly one memory operand?"); 1337 auto MMOI = MI.memoperands_begin(); 1338 const PseudoSourceValue *PVal = (*MMOI)->getPseudoValue(); 1339 assert(PVal->kind() == PseudoSourceValue::FixedStack && 1340 "Inconsistent memory operand in spill instruction"); 1341 int FI = cast<FixedStackPseudoSourceValue>(PVal)->getFrameIndex(); 1342 const MachineBasicBlock *MBB = MI.getParent(); 1343 Register Reg; 1344 StackOffset Offset = TFI->getFrameIndexReference(*MBB->getParent(), FI, Reg); 1345 return MTracker->getOrTrackSpillLoc({Reg, Offset}); 1346 } 1347 1348 std::optional<LocIdx> 1349 InstrRefBasedLDV::findLocationForMemOperand(const MachineInstr &MI) { 1350 std::optional<SpillLocationNo> SpillLoc = extractSpillBaseRegAndOffset(MI); 1351 if (!SpillLoc) 1352 return std::nullopt; 1353 1354 // Where in the stack slot is this value defined -- i.e., what size of value 1355 // is this? An important question, because it could be loaded into a register 1356 // from the stack at some point. Happily the memory operand will tell us 1357 // the size written to the stack. 1358 auto *MemOperand = *MI.memoperands_begin(); 1359 unsigned SizeInBits = MemOperand->getSizeInBits(); 1360 1361 // Find that position in the stack indexes we're tracking. 1362 auto IdxIt = MTracker->StackSlotIdxes.find({SizeInBits, 0}); 1363 if (IdxIt == MTracker->StackSlotIdxes.end()) 1364 // That index is not tracked. This is suprising, and unlikely to ever 1365 // occur, but the safe action is to indicate the variable is optimised out. 1366 return std::nullopt; 1367 1368 unsigned SpillID = MTracker->getSpillIDWithIdx(*SpillLoc, IdxIt->second); 1369 return MTracker->getSpillMLoc(SpillID); 1370 } 1371 1372 /// End all previous ranges related to @MI and start a new range from @MI 1373 /// if it is a DBG_VALUE instr. 1374 bool InstrRefBasedLDV::transferDebugValue(const MachineInstr &MI) { 1375 if (!MI.isDebugValue()) 1376 return false; 1377 1378 assert(MI.getDebugVariable()->isValidLocationForIntrinsic(MI.getDebugLoc()) && 1379 "Expected inlined-at fields to agree"); 1380 1381 // If there are no instructions in this lexical scope, do no location tracking 1382 // at all, this variable shouldn't get a legitimate location range. 1383 auto *Scope = LS.findLexicalScope(MI.getDebugLoc().get()); 1384 if (Scope == nullptr) 1385 return true; // handled it; by doing nothing 1386 1387 // MLocTracker needs to know that this register is read, even if it's only 1388 // read by a debug inst. 1389 for (const MachineOperand &MO : MI.debug_operands()) 1390 if (MO.isReg() && MO.getReg() != 0) 1391 (void)MTracker->readReg(MO.getReg()); 1392 1393 // If we're preparing for the second analysis (variables), the machine value 1394 // locations are already solved, and we report this DBG_VALUE and the value 1395 // it refers to to VLocTracker. 1396 if (VTracker) { 1397 SmallVector<DbgOpID> DebugOps; 1398 // Feed defVar the new variable location, or if this is a DBG_VALUE $noreg, 1399 // feed defVar None. 1400 if (!MI.isUndefDebugValue()) { 1401 for (const MachineOperand &MO : MI.debug_operands()) { 1402 // There should be no undef registers here, as we've screened for undef 1403 // debug values. 1404 if (MO.isReg()) { 1405 DebugOps.push_back(DbgOpStore.insert(MTracker->readReg(MO.getReg()))); 1406 } else if (MO.isImm() || MO.isFPImm() || MO.isCImm()) { 1407 DebugOps.push_back(DbgOpStore.insert(MO)); 1408 } else { 1409 llvm_unreachable("Unexpected debug operand type."); 1410 } 1411 } 1412 } 1413 VTracker->defVar(MI, DbgValueProperties(MI), DebugOps); 1414 } 1415 1416 // If performing final tracking of transfers, report this variable definition 1417 // to the TransferTracker too. 1418 if (TTracker) 1419 TTracker->redefVar(MI); 1420 return true; 1421 } 1422 1423 std::optional<ValueIDNum> InstrRefBasedLDV::getValueForInstrRef( 1424 unsigned InstNo, unsigned OpNo, MachineInstr &MI, 1425 const FuncValueTable *MLiveOuts, const FuncValueTable *MLiveIns) { 1426 // Various optimizations may have happened to the value during codegen, 1427 // recorded in the value substitution table. Apply any substitutions to 1428 // the instruction / operand number in this DBG_INSTR_REF, and collect 1429 // any subregister extractions performed during optimization. 1430 const MachineFunction &MF = *MI.getParent()->getParent(); 1431 1432 // Create dummy substitution with Src set, for lookup. 1433 auto SoughtSub = 1434 MachineFunction::DebugSubstitution({InstNo, OpNo}, {0, 0}, 0); 1435 1436 SmallVector<unsigned, 4> SeenSubregs; 1437 auto LowerBoundIt = llvm::lower_bound(MF.DebugValueSubstitutions, SoughtSub); 1438 while (LowerBoundIt != MF.DebugValueSubstitutions.end() && 1439 LowerBoundIt->Src == SoughtSub.Src) { 1440 std::tie(InstNo, OpNo) = LowerBoundIt->Dest; 1441 SoughtSub.Src = LowerBoundIt->Dest; 1442 if (unsigned Subreg = LowerBoundIt->Subreg) 1443 SeenSubregs.push_back(Subreg); 1444 LowerBoundIt = llvm::lower_bound(MF.DebugValueSubstitutions, SoughtSub); 1445 } 1446 1447 // Default machine value number is <None> -- if no instruction defines 1448 // the corresponding value, it must have been optimized out. 1449 std::optional<ValueIDNum> NewID; 1450 1451 // Try to lookup the instruction number, and find the machine value number 1452 // that it defines. It could be an instruction, or a PHI. 1453 auto InstrIt = DebugInstrNumToInstr.find(InstNo); 1454 auto PHIIt = llvm::lower_bound(DebugPHINumToValue, InstNo); 1455 if (InstrIt != DebugInstrNumToInstr.end()) { 1456 const MachineInstr &TargetInstr = *InstrIt->second.first; 1457 uint64_t BlockNo = TargetInstr.getParent()->getNumber(); 1458 1459 // Pick out the designated operand. It might be a memory reference, if 1460 // a register def was folded into a stack store. 1461 if (OpNo == MachineFunction::DebugOperandMemNumber && 1462 TargetInstr.hasOneMemOperand()) { 1463 std::optional<LocIdx> L = findLocationForMemOperand(TargetInstr); 1464 if (L) 1465 NewID = ValueIDNum(BlockNo, InstrIt->second.second, *L); 1466 } else if (OpNo != MachineFunction::DebugOperandMemNumber) { 1467 // Permit the debug-info to be completely wrong: identifying a nonexistant 1468 // operand, or one that is not a register definition, means something 1469 // unexpected happened during optimisation. Broken debug-info, however, 1470 // shouldn't crash the compiler -- instead leave the variable value as 1471 // None, which will make it appear "optimised out". 1472 if (OpNo < TargetInstr.getNumOperands()) { 1473 const MachineOperand &MO = TargetInstr.getOperand(OpNo); 1474 1475 if (MO.isReg() && MO.isDef() && MO.getReg()) { 1476 unsigned LocID = MTracker->getLocID(MO.getReg()); 1477 LocIdx L = MTracker->LocIDToLocIdx[LocID]; 1478 NewID = ValueIDNum(BlockNo, InstrIt->second.second, L); 1479 } 1480 } 1481 1482 if (!NewID) { 1483 LLVM_DEBUG( 1484 { dbgs() << "Seen instruction reference to illegal operand\n"; }); 1485 } 1486 } 1487 // else: NewID is left as None. 1488 } else if (PHIIt != DebugPHINumToValue.end() && PHIIt->InstrNum == InstNo) { 1489 // It's actually a PHI value. Which value it is might not be obvious, use 1490 // the resolver helper to find out. 1491 assert(MLiveOuts && MLiveIns); 1492 NewID = resolveDbgPHIs(*MI.getParent()->getParent(), *MLiveOuts, *MLiveIns, 1493 MI, InstNo); 1494 } 1495 1496 // Apply any subregister extractions, in reverse. We might have seen code 1497 // like this: 1498 // CALL64 @foo, implicit-def $rax 1499 // %0:gr64 = COPY $rax 1500 // %1:gr32 = COPY %0.sub_32bit 1501 // %2:gr16 = COPY %1.sub_16bit 1502 // %3:gr8 = COPY %2.sub_8bit 1503 // In which case each copy would have been recorded as a substitution with 1504 // a subregister qualifier. Apply those qualifiers now. 1505 if (NewID && !SeenSubregs.empty()) { 1506 unsigned Offset = 0; 1507 unsigned Size = 0; 1508 1509 // Look at each subregister that we passed through, and progressively 1510 // narrow in, accumulating any offsets that occur. Substitutions should 1511 // only ever be the same or narrower width than what they read from; 1512 // iterate in reverse order so that we go from wide to small. 1513 for (unsigned Subreg : reverse(SeenSubregs)) { 1514 unsigned ThisSize = TRI->getSubRegIdxSize(Subreg); 1515 unsigned ThisOffset = TRI->getSubRegIdxOffset(Subreg); 1516 Offset += ThisOffset; 1517 Size = (Size == 0) ? ThisSize : std::min(Size, ThisSize); 1518 } 1519 1520 // If that worked, look for an appropriate subregister with the register 1521 // where the define happens. Don't look at values that were defined during 1522 // a stack write: we can't currently express register locations within 1523 // spills. 1524 LocIdx L = NewID->getLoc(); 1525 if (NewID && !MTracker->isSpill(L)) { 1526 // Find the register class for the register where this def happened. 1527 // FIXME: no index for this? 1528 Register Reg = MTracker->LocIdxToLocID[L]; 1529 const TargetRegisterClass *TRC = nullptr; 1530 for (const auto *TRCI : TRI->regclasses()) 1531 if (TRCI->contains(Reg)) 1532 TRC = TRCI; 1533 assert(TRC && "Couldn't find target register class?"); 1534 1535 // If the register we have isn't the right size or in the right place, 1536 // Try to find a subregister inside it. 1537 unsigned MainRegSize = TRI->getRegSizeInBits(*TRC); 1538 if (Size != MainRegSize || Offset) { 1539 // Enumerate all subregisters, searching. 1540 Register NewReg = 0; 1541 for (MCPhysReg SR : TRI->subregs(Reg)) { 1542 unsigned Subreg = TRI->getSubRegIndex(Reg, SR); 1543 unsigned SubregSize = TRI->getSubRegIdxSize(Subreg); 1544 unsigned SubregOffset = TRI->getSubRegIdxOffset(Subreg); 1545 if (SubregSize == Size && SubregOffset == Offset) { 1546 NewReg = SR; 1547 break; 1548 } 1549 } 1550 1551 // If we didn't find anything: there's no way to express our value. 1552 if (!NewReg) { 1553 NewID = std::nullopt; 1554 } else { 1555 // Re-state the value as being defined within the subregister 1556 // that we found. 1557 LocIdx NewLoc = MTracker->lookupOrTrackRegister(NewReg); 1558 NewID = ValueIDNum(NewID->getBlock(), NewID->getInst(), NewLoc); 1559 } 1560 } 1561 } else { 1562 // If we can't handle subregisters, unset the new value. 1563 NewID = std::nullopt; 1564 } 1565 } 1566 1567 return NewID; 1568 } 1569 1570 bool InstrRefBasedLDV::transferDebugInstrRef(MachineInstr &MI, 1571 const FuncValueTable *MLiveOuts, 1572 const FuncValueTable *MLiveIns) { 1573 if (!MI.isDebugRef()) 1574 return false; 1575 1576 // Only handle this instruction when we are building the variable value 1577 // transfer function. 1578 if (!VTracker && !TTracker) 1579 return false; 1580 1581 const DILocalVariable *Var = MI.getDebugVariable(); 1582 const DIExpression *Expr = MI.getDebugExpression(); 1583 const DILocation *DebugLoc = MI.getDebugLoc(); 1584 const DILocation *InlinedAt = DebugLoc->getInlinedAt(); 1585 assert(Var->isValidLocationForIntrinsic(DebugLoc) && 1586 "Expected inlined-at fields to agree"); 1587 1588 DebugVariable V(Var, Expr, InlinedAt); 1589 1590 auto *Scope = LS.findLexicalScope(MI.getDebugLoc().get()); 1591 if (Scope == nullptr) 1592 return true; // Handled by doing nothing. This variable is never in scope. 1593 1594 SmallVector<DbgOpID> DbgOpIDs; 1595 for (const MachineOperand &MO : MI.debug_operands()) { 1596 if (!MO.isDbgInstrRef()) { 1597 assert(!MO.isReg() && "DBG_INSTR_REF should not contain registers"); 1598 DbgOpID ConstOpID = DbgOpStore.insert(DbgOp(MO)); 1599 DbgOpIDs.push_back(ConstOpID); 1600 continue; 1601 } 1602 1603 unsigned InstNo = MO.getInstrRefInstrIndex(); 1604 unsigned OpNo = MO.getInstrRefOpIndex(); 1605 1606 // Default machine value number is <None> -- if no instruction defines 1607 // the corresponding value, it must have been optimized out. 1608 std::optional<ValueIDNum> NewID = 1609 getValueForInstrRef(InstNo, OpNo, MI, MLiveOuts, MLiveIns); 1610 // We have a value number or std::nullopt. If the latter, then kill the 1611 // entire debug value. 1612 if (NewID) { 1613 DbgOpIDs.push_back(DbgOpStore.insert(*NewID)); 1614 } else { 1615 DbgOpIDs.clear(); 1616 break; 1617 } 1618 } 1619 1620 // We have a DbgOpID for every value or for none. Tell the variable value 1621 // tracker about it. The rest of this LiveDebugValues implementation acts 1622 // exactly the same for DBG_INSTR_REFs as DBG_VALUEs (just, the former can 1623 // refer to values that aren't immediately available). 1624 DbgValueProperties Properties(Expr, false, true); 1625 if (VTracker) 1626 VTracker->defVar(MI, Properties, DbgOpIDs); 1627 1628 // If we're on the final pass through the function, decompose this INSTR_REF 1629 // into a plain DBG_VALUE. 1630 if (!TTracker) 1631 return true; 1632 1633 // Fetch the concrete DbgOps now, as we will need them later. 1634 SmallVector<DbgOp> DbgOps; 1635 for (DbgOpID OpID : DbgOpIDs) { 1636 DbgOps.push_back(DbgOpStore.find(OpID)); 1637 } 1638 1639 // Pick a location for the machine value number, if such a location exists. 1640 // (This information could be stored in TransferTracker to make it faster). 1641 SmallDenseMap<ValueIDNum, TransferTracker::LocationAndQuality> FoundLocs; 1642 SmallVector<ValueIDNum> ValuesToFind; 1643 // Initialized the preferred-location map with illegal locations, to be 1644 // filled in later. 1645 for (const DbgOp &Op : DbgOps) { 1646 if (!Op.IsConst) 1647 if (FoundLocs.insert({Op.ID, TransferTracker::LocationAndQuality()}) 1648 .second) 1649 ValuesToFind.push_back(Op.ID); 1650 } 1651 1652 for (auto Location : MTracker->locations()) { 1653 LocIdx CurL = Location.Idx; 1654 ValueIDNum ID = MTracker->readMLoc(CurL); 1655 auto ValueToFindIt = find(ValuesToFind, ID); 1656 if (ValueToFindIt == ValuesToFind.end()) 1657 continue; 1658 auto &Previous = FoundLocs.find(ID)->second; 1659 // If this is the first location with that value, pick it. Otherwise, 1660 // consider whether it's a "longer term" location. 1661 std::optional<TransferTracker::LocationQuality> ReplacementQuality = 1662 TTracker->getLocQualityIfBetter(CurL, Previous.getQuality()); 1663 if (ReplacementQuality) { 1664 Previous = TransferTracker::LocationAndQuality(CurL, *ReplacementQuality); 1665 if (Previous.isBest()) { 1666 ValuesToFind.erase(ValueToFindIt); 1667 if (ValuesToFind.empty()) 1668 break; 1669 } 1670 } 1671 } 1672 1673 SmallVector<ResolvedDbgOp> NewLocs; 1674 for (const DbgOp &DbgOp : DbgOps) { 1675 if (DbgOp.IsConst) { 1676 NewLocs.push_back(DbgOp.MO); 1677 continue; 1678 } 1679 LocIdx FoundLoc = FoundLocs.find(DbgOp.ID)->second.getLoc(); 1680 if (FoundLoc.isIllegal()) { 1681 NewLocs.clear(); 1682 break; 1683 } 1684 NewLocs.push_back(FoundLoc); 1685 } 1686 // Tell transfer tracker that the variable value has changed. 1687 TTracker->redefVar(MI, Properties, NewLocs); 1688 1689 // If there were values with no location, but all such values are defined in 1690 // later instructions in this block, this is a block-local use-before-def. 1691 if (!DbgOps.empty() && NewLocs.empty()) { 1692 bool IsValidUseBeforeDef = true; 1693 uint64_t LastUseBeforeDef = 0; 1694 for (auto ValueLoc : FoundLocs) { 1695 ValueIDNum NewID = ValueLoc.first; 1696 LocIdx FoundLoc = ValueLoc.second.getLoc(); 1697 if (!FoundLoc.isIllegal()) 1698 continue; 1699 // If we have an value with no location that is not defined in this block, 1700 // then it has no location in this block, leaving this value undefined. 1701 if (NewID.getBlock() != CurBB || NewID.getInst() <= CurInst) { 1702 IsValidUseBeforeDef = false; 1703 break; 1704 } 1705 LastUseBeforeDef = std::max(LastUseBeforeDef, NewID.getInst()); 1706 } 1707 if (IsValidUseBeforeDef) { 1708 TTracker->addUseBeforeDef(V, {MI.getDebugExpression(), false, true}, 1709 DbgOps, LastUseBeforeDef); 1710 } 1711 } 1712 1713 // Produce a DBG_VALUE representing what this DBG_INSTR_REF meant. 1714 // This DBG_VALUE is potentially a $noreg / undefined location, if 1715 // FoundLoc is illegal. 1716 // (XXX -- could morph the DBG_INSTR_REF in the future). 1717 MachineInstr *DbgMI = MTracker->emitLoc(NewLocs, V, Properties); 1718 1719 TTracker->PendingDbgValues.push_back(DbgMI); 1720 TTracker->flushDbgValues(MI.getIterator(), nullptr); 1721 return true; 1722 } 1723 1724 bool InstrRefBasedLDV::transferDebugPHI(MachineInstr &MI) { 1725 if (!MI.isDebugPHI()) 1726 return false; 1727 1728 // Analyse these only when solving the machine value location problem. 1729 if (VTracker || TTracker) 1730 return true; 1731 1732 // First operand is the value location, either a stack slot or register. 1733 // Second is the debug instruction number of the original PHI. 1734 const MachineOperand &MO = MI.getOperand(0); 1735 unsigned InstrNum = MI.getOperand(1).getImm(); 1736 1737 auto EmitBadPHI = [this, &MI, InstrNum]() -> bool { 1738 // Helper lambda to do any accounting when we fail to find a location for 1739 // a DBG_PHI. This can happen if DBG_PHIs are malformed, or refer to a 1740 // dead stack slot, for example. 1741 // Record a DebugPHIRecord with an empty value + location. 1742 DebugPHINumToValue.push_back( 1743 {InstrNum, MI.getParent(), std::nullopt, std::nullopt}); 1744 return true; 1745 }; 1746 1747 if (MO.isReg() && MO.getReg()) { 1748 // The value is whatever's currently in the register. Read and record it, 1749 // to be analysed later. 1750 Register Reg = MO.getReg(); 1751 ValueIDNum Num = MTracker->readReg(Reg); 1752 auto PHIRec = DebugPHIRecord( 1753 {InstrNum, MI.getParent(), Num, MTracker->lookupOrTrackRegister(Reg)}); 1754 DebugPHINumToValue.push_back(PHIRec); 1755 1756 // Ensure this register is tracked. 1757 for (MCRegAliasIterator RAI(MO.getReg(), TRI, true); RAI.isValid(); ++RAI) 1758 MTracker->lookupOrTrackRegister(*RAI); 1759 } else if (MO.isFI()) { 1760 // The value is whatever's in this stack slot. 1761 unsigned FI = MO.getIndex(); 1762 1763 // If the stack slot is dead, then this was optimized away. 1764 // FIXME: stack slot colouring should account for slots that get merged. 1765 if (MFI->isDeadObjectIndex(FI)) 1766 return EmitBadPHI(); 1767 1768 // Identify this spill slot, ensure it's tracked. 1769 Register Base; 1770 StackOffset Offs = TFI->getFrameIndexReference(*MI.getMF(), FI, Base); 1771 SpillLoc SL = {Base, Offs}; 1772 std::optional<SpillLocationNo> SpillNo = MTracker->getOrTrackSpillLoc(SL); 1773 1774 // We might be able to find a value, but have chosen not to, to avoid 1775 // tracking too much stack information. 1776 if (!SpillNo) 1777 return EmitBadPHI(); 1778 1779 // Any stack location DBG_PHI should have an associate bit-size. 1780 assert(MI.getNumOperands() == 3 && "Stack DBG_PHI with no size?"); 1781 unsigned slotBitSize = MI.getOperand(2).getImm(); 1782 1783 unsigned SpillID = MTracker->getLocID(*SpillNo, {slotBitSize, 0}); 1784 LocIdx SpillLoc = MTracker->getSpillMLoc(SpillID); 1785 ValueIDNum Result = MTracker->readMLoc(SpillLoc); 1786 1787 // Record this DBG_PHI for later analysis. 1788 auto DbgPHI = DebugPHIRecord({InstrNum, MI.getParent(), Result, SpillLoc}); 1789 DebugPHINumToValue.push_back(DbgPHI); 1790 } else { 1791 // Else: if the operand is neither a legal register or a stack slot, then 1792 // we're being fed illegal debug-info. Record an empty PHI, so that any 1793 // debug users trying to read this number will be put off trying to 1794 // interpret the value. 1795 LLVM_DEBUG( 1796 { dbgs() << "Seen DBG_PHI with unrecognised operand format\n"; }); 1797 return EmitBadPHI(); 1798 } 1799 1800 return true; 1801 } 1802 1803 void InstrRefBasedLDV::transferRegisterDef(MachineInstr &MI) { 1804 // Meta Instructions do not affect the debug liveness of any register they 1805 // define. 1806 if (MI.isImplicitDef()) { 1807 // Except when there's an implicit def, and the location it's defining has 1808 // no value number. The whole point of an implicit def is to announce that 1809 // the register is live, without be specific about it's value. So define 1810 // a value if there isn't one already. 1811 ValueIDNum Num = MTracker->readReg(MI.getOperand(0).getReg()); 1812 // Has a legitimate value -> ignore the implicit def. 1813 if (Num.getLoc() != 0) 1814 return; 1815 // Otherwise, def it here. 1816 } else if (MI.isMetaInstruction()) 1817 return; 1818 1819 // We always ignore SP defines on call instructions, they don't actually 1820 // change the value of the stack pointer... except for win32's _chkstk. This 1821 // is rare: filter quickly for the common case (no stack adjustments, not a 1822 // call, etc). If it is a call that modifies SP, recognise the SP register 1823 // defs. 1824 bool CallChangesSP = false; 1825 if (AdjustsStackInCalls && MI.isCall() && MI.getOperand(0).isSymbol() && 1826 !strcmp(MI.getOperand(0).getSymbolName(), StackProbeSymbolName.data())) 1827 CallChangesSP = true; 1828 1829 // Test whether we should ignore a def of this register due to it being part 1830 // of the stack pointer. 1831 auto IgnoreSPAlias = [this, &MI, CallChangesSP](Register R) -> bool { 1832 if (CallChangesSP) 1833 return false; 1834 return MI.isCall() && MTracker->SPAliases.count(R); 1835 }; 1836 1837 // Find the regs killed by MI, and find regmasks of preserved regs. 1838 // Max out the number of statically allocated elements in `DeadRegs`, as this 1839 // prevents fallback to std::set::count() operations. 1840 SmallSet<uint32_t, 32> DeadRegs; 1841 SmallVector<const uint32_t *, 4> RegMasks; 1842 SmallVector<const MachineOperand *, 4> RegMaskPtrs; 1843 for (const MachineOperand &MO : MI.operands()) { 1844 // Determine whether the operand is a register def. 1845 if (MO.isReg() && MO.isDef() && MO.getReg() && MO.getReg().isPhysical() && 1846 !IgnoreSPAlias(MO.getReg())) { 1847 // Remove ranges of all aliased registers. 1848 for (MCRegAliasIterator RAI(MO.getReg(), TRI, true); RAI.isValid(); ++RAI) 1849 // FIXME: Can we break out of this loop early if no insertion occurs? 1850 DeadRegs.insert(*RAI); 1851 } else if (MO.isRegMask()) { 1852 RegMasks.push_back(MO.getRegMask()); 1853 RegMaskPtrs.push_back(&MO); 1854 } 1855 } 1856 1857 // Tell MLocTracker about all definitions, of regmasks and otherwise. 1858 for (uint32_t DeadReg : DeadRegs) 1859 MTracker->defReg(DeadReg, CurBB, CurInst); 1860 1861 for (const auto *MO : RegMaskPtrs) 1862 MTracker->writeRegMask(MO, CurBB, CurInst); 1863 1864 // If this instruction writes to a spill slot, def that slot. 1865 if (hasFoldedStackStore(MI)) { 1866 if (std::optional<SpillLocationNo> SpillNo = 1867 extractSpillBaseRegAndOffset(MI)) { 1868 for (unsigned int I = 0; I < MTracker->NumSlotIdxes; ++I) { 1869 unsigned SpillID = MTracker->getSpillIDWithIdx(*SpillNo, I); 1870 LocIdx L = MTracker->getSpillMLoc(SpillID); 1871 MTracker->setMLoc(L, ValueIDNum(CurBB, CurInst, L)); 1872 } 1873 } 1874 } 1875 1876 if (!TTracker) 1877 return; 1878 1879 // When committing variable values to locations: tell transfer tracker that 1880 // we've clobbered things. It may be able to recover the variable from a 1881 // different location. 1882 1883 // Inform TTracker about any direct clobbers. 1884 for (uint32_t DeadReg : DeadRegs) { 1885 LocIdx Loc = MTracker->lookupOrTrackRegister(DeadReg); 1886 TTracker->clobberMloc(Loc, MI.getIterator(), false); 1887 } 1888 1889 // Look for any clobbers performed by a register mask. Only test locations 1890 // that are actually being tracked. 1891 if (!RegMaskPtrs.empty()) { 1892 for (auto L : MTracker->locations()) { 1893 // Stack locations can't be clobbered by regmasks. 1894 if (MTracker->isSpill(L.Idx)) 1895 continue; 1896 1897 Register Reg = MTracker->LocIdxToLocID[L.Idx]; 1898 if (IgnoreSPAlias(Reg)) 1899 continue; 1900 1901 for (const auto *MO : RegMaskPtrs) 1902 if (MO->clobbersPhysReg(Reg)) 1903 TTracker->clobberMloc(L.Idx, MI.getIterator(), false); 1904 } 1905 } 1906 1907 // Tell TTracker about any folded stack store. 1908 if (hasFoldedStackStore(MI)) { 1909 if (std::optional<SpillLocationNo> SpillNo = 1910 extractSpillBaseRegAndOffset(MI)) { 1911 for (unsigned int I = 0; I < MTracker->NumSlotIdxes; ++I) { 1912 unsigned SpillID = MTracker->getSpillIDWithIdx(*SpillNo, I); 1913 LocIdx L = MTracker->getSpillMLoc(SpillID); 1914 TTracker->clobberMloc(L, MI.getIterator(), true); 1915 } 1916 } 1917 } 1918 } 1919 1920 void InstrRefBasedLDV::performCopy(Register SrcRegNum, Register DstRegNum) { 1921 // In all circumstances, re-def all aliases. It's definitely a new value now. 1922 for (MCRegAliasIterator RAI(DstRegNum, TRI, true); RAI.isValid(); ++RAI) 1923 MTracker->defReg(*RAI, CurBB, CurInst); 1924 1925 ValueIDNum SrcValue = MTracker->readReg(SrcRegNum); 1926 MTracker->setReg(DstRegNum, SrcValue); 1927 1928 // Copy subregisters from one location to another. 1929 for (MCSubRegIndexIterator SRI(SrcRegNum, TRI); SRI.isValid(); ++SRI) { 1930 unsigned SrcSubReg = SRI.getSubReg(); 1931 unsigned SubRegIdx = SRI.getSubRegIndex(); 1932 unsigned DstSubReg = TRI->getSubReg(DstRegNum, SubRegIdx); 1933 if (!DstSubReg) 1934 continue; 1935 1936 // Do copy. There are two matching subregisters, the source value should 1937 // have been def'd when the super-reg was, the latter might not be tracked 1938 // yet. 1939 // This will force SrcSubReg to be tracked, if it isn't yet. Will read 1940 // mphi values if it wasn't tracked. 1941 LocIdx SrcL = MTracker->lookupOrTrackRegister(SrcSubReg); 1942 LocIdx DstL = MTracker->lookupOrTrackRegister(DstSubReg); 1943 (void)SrcL; 1944 (void)DstL; 1945 ValueIDNum CpyValue = MTracker->readReg(SrcSubReg); 1946 1947 MTracker->setReg(DstSubReg, CpyValue); 1948 } 1949 } 1950 1951 std::optional<SpillLocationNo> 1952 InstrRefBasedLDV::isSpillInstruction(const MachineInstr &MI, 1953 MachineFunction *MF) { 1954 // TODO: Handle multiple stores folded into one. 1955 if (!MI.hasOneMemOperand()) 1956 return std::nullopt; 1957 1958 // Reject any memory operand that's aliased -- we can't guarantee its value. 1959 auto MMOI = MI.memoperands_begin(); 1960 const PseudoSourceValue *PVal = (*MMOI)->getPseudoValue(); 1961 if (PVal->isAliased(MFI)) 1962 return std::nullopt; 1963 1964 if (!MI.getSpillSize(TII) && !MI.getFoldedSpillSize(TII)) 1965 return std::nullopt; // This is not a spill instruction, since no valid size 1966 // was returned from either function. 1967 1968 return extractSpillBaseRegAndOffset(MI); 1969 } 1970 1971 bool InstrRefBasedLDV::isLocationSpill(const MachineInstr &MI, 1972 MachineFunction *MF, unsigned &Reg) { 1973 if (!isSpillInstruction(MI, MF)) 1974 return false; 1975 1976 int FI; 1977 Reg = TII->isStoreToStackSlotPostFE(MI, FI); 1978 return Reg != 0; 1979 } 1980 1981 std::optional<SpillLocationNo> 1982 InstrRefBasedLDV::isRestoreInstruction(const MachineInstr &MI, 1983 MachineFunction *MF, unsigned &Reg) { 1984 if (!MI.hasOneMemOperand()) 1985 return std::nullopt; 1986 1987 // FIXME: Handle folded restore instructions with more than one memory 1988 // operand. 1989 if (MI.getRestoreSize(TII)) { 1990 Reg = MI.getOperand(0).getReg(); 1991 return extractSpillBaseRegAndOffset(MI); 1992 } 1993 return std::nullopt; 1994 } 1995 1996 bool InstrRefBasedLDV::transferSpillOrRestoreInst(MachineInstr &MI) { 1997 // XXX -- it's too difficult to implement VarLocBasedImpl's stack location 1998 // limitations under the new model. Therefore, when comparing them, compare 1999 // versions that don't attempt spills or restores at all. 2000 if (EmulateOldLDV) 2001 return false; 2002 2003 // Strictly limit ourselves to plain loads and stores, not all instructions 2004 // that can access the stack. 2005 int DummyFI = -1; 2006 if (!TII->isStoreToStackSlotPostFE(MI, DummyFI) && 2007 !TII->isLoadFromStackSlotPostFE(MI, DummyFI)) 2008 return false; 2009 2010 MachineFunction *MF = MI.getMF(); 2011 unsigned Reg; 2012 2013 LLVM_DEBUG(dbgs() << "Examining instruction: "; MI.dump();); 2014 2015 // Strictly limit ourselves to plain loads and stores, not all instructions 2016 // that can access the stack. 2017 int FIDummy; 2018 if (!TII->isStoreToStackSlotPostFE(MI, FIDummy) && 2019 !TII->isLoadFromStackSlotPostFE(MI, FIDummy)) 2020 return false; 2021 2022 // First, if there are any DBG_VALUEs pointing at a spill slot that is 2023 // written to, terminate that variable location. The value in memory 2024 // will have changed. DbgEntityHistoryCalculator doesn't try to detect this. 2025 if (std::optional<SpillLocationNo> Loc = isSpillInstruction(MI, MF)) { 2026 // Un-set this location and clobber, so that earlier locations don't 2027 // continue past this store. 2028 for (unsigned SlotIdx = 0; SlotIdx < MTracker->NumSlotIdxes; ++SlotIdx) { 2029 unsigned SpillID = MTracker->getSpillIDWithIdx(*Loc, SlotIdx); 2030 std::optional<LocIdx> MLoc = MTracker->getSpillMLoc(SpillID); 2031 if (!MLoc) 2032 continue; 2033 2034 // We need to over-write the stack slot with something (here, a def at 2035 // this instruction) to ensure no values are preserved in this stack slot 2036 // after the spill. It also prevents TTracker from trying to recover the 2037 // location and re-installing it in the same place. 2038 ValueIDNum Def(CurBB, CurInst, *MLoc); 2039 MTracker->setMLoc(*MLoc, Def); 2040 if (TTracker) 2041 TTracker->clobberMloc(*MLoc, MI.getIterator()); 2042 } 2043 } 2044 2045 // Try to recognise spill and restore instructions that may transfer a value. 2046 if (isLocationSpill(MI, MF, Reg)) { 2047 // isLocationSpill returning true should guarantee we can extract a 2048 // location. 2049 SpillLocationNo Loc = *extractSpillBaseRegAndOffset(MI); 2050 2051 auto DoTransfer = [&](Register SrcReg, unsigned SpillID) { 2052 auto ReadValue = MTracker->readReg(SrcReg); 2053 LocIdx DstLoc = MTracker->getSpillMLoc(SpillID); 2054 MTracker->setMLoc(DstLoc, ReadValue); 2055 2056 if (TTracker) { 2057 LocIdx SrcLoc = MTracker->getRegMLoc(SrcReg); 2058 TTracker->transferMlocs(SrcLoc, DstLoc, MI.getIterator()); 2059 } 2060 }; 2061 2062 // Then, transfer subreg bits. 2063 for (MCPhysReg SR : TRI->subregs(Reg)) { 2064 // Ensure this reg is tracked, 2065 (void)MTracker->lookupOrTrackRegister(SR); 2066 unsigned SubregIdx = TRI->getSubRegIndex(Reg, SR); 2067 unsigned SpillID = MTracker->getLocID(Loc, SubregIdx); 2068 DoTransfer(SR, SpillID); 2069 } 2070 2071 // Directly lookup size of main source reg, and transfer. 2072 unsigned Size = TRI->getRegSizeInBits(Reg, *MRI); 2073 unsigned SpillID = MTracker->getLocID(Loc, {Size, 0}); 2074 DoTransfer(Reg, SpillID); 2075 } else { 2076 std::optional<SpillLocationNo> Loc = isRestoreInstruction(MI, MF, Reg); 2077 if (!Loc) 2078 return false; 2079 2080 // Assumption: we're reading from the base of the stack slot, not some 2081 // offset into it. It seems very unlikely LLVM would ever generate 2082 // restores where this wasn't true. This then becomes a question of what 2083 // subregisters in the destination register line up with positions in the 2084 // stack slot. 2085 2086 // Def all registers that alias the destination. 2087 for (MCRegAliasIterator RAI(Reg, TRI, true); RAI.isValid(); ++RAI) 2088 MTracker->defReg(*RAI, CurBB, CurInst); 2089 2090 // Now find subregisters within the destination register, and load values 2091 // from stack slot positions. 2092 auto DoTransfer = [&](Register DestReg, unsigned SpillID) { 2093 LocIdx SrcIdx = MTracker->getSpillMLoc(SpillID); 2094 auto ReadValue = MTracker->readMLoc(SrcIdx); 2095 MTracker->setReg(DestReg, ReadValue); 2096 }; 2097 2098 for (MCPhysReg SR : TRI->subregs(Reg)) { 2099 unsigned Subreg = TRI->getSubRegIndex(Reg, SR); 2100 unsigned SpillID = MTracker->getLocID(*Loc, Subreg); 2101 DoTransfer(SR, SpillID); 2102 } 2103 2104 // Directly look up this registers slot idx by size, and transfer. 2105 unsigned Size = TRI->getRegSizeInBits(Reg, *MRI); 2106 unsigned SpillID = MTracker->getLocID(*Loc, {Size, 0}); 2107 DoTransfer(Reg, SpillID); 2108 } 2109 return true; 2110 } 2111 2112 bool InstrRefBasedLDV::transferRegisterCopy(MachineInstr &MI) { 2113 auto DestSrc = TII->isCopyLikeInstr(MI); 2114 if (!DestSrc) 2115 return false; 2116 2117 const MachineOperand *DestRegOp = DestSrc->Destination; 2118 const MachineOperand *SrcRegOp = DestSrc->Source; 2119 2120 Register SrcReg = SrcRegOp->getReg(); 2121 Register DestReg = DestRegOp->getReg(); 2122 2123 // Ignore identity copies. Yep, these make it as far as LiveDebugValues. 2124 if (SrcReg == DestReg) 2125 return true; 2126 2127 // For emulating VarLocBasedImpl: 2128 // We want to recognize instructions where destination register is callee 2129 // saved register. If register that could be clobbered by the call is 2130 // included, there would be a great chance that it is going to be clobbered 2131 // soon. It is more likely that previous register, which is callee saved, is 2132 // going to stay unclobbered longer, even if it is killed. 2133 // 2134 // For InstrRefBasedImpl, we can track multiple locations per value, so 2135 // ignore this condition. 2136 if (EmulateOldLDV && !isCalleeSavedReg(DestReg)) 2137 return false; 2138 2139 // InstrRefBasedImpl only followed killing copies. 2140 if (EmulateOldLDV && !SrcRegOp->isKill()) 2141 return false; 2142 2143 // Before we update MTracker, remember which values were present in each of 2144 // the locations about to be overwritten, so that we can recover any 2145 // potentially clobbered variables. 2146 DenseMap<LocIdx, ValueIDNum> ClobberedLocs; 2147 if (TTracker) { 2148 for (MCRegAliasIterator RAI(DestReg, TRI, true); RAI.isValid(); ++RAI) { 2149 LocIdx ClobberedLoc = MTracker->getRegMLoc(*RAI); 2150 auto MLocIt = TTracker->ActiveMLocs.find(ClobberedLoc); 2151 // If ActiveMLocs isn't tracking this location or there are no variables 2152 // using it, don't bother remembering. 2153 if (MLocIt == TTracker->ActiveMLocs.end() || MLocIt->second.empty()) 2154 continue; 2155 ValueIDNum Value = MTracker->readReg(*RAI); 2156 ClobberedLocs[ClobberedLoc] = Value; 2157 } 2158 } 2159 2160 // Copy MTracker info, including subregs if available. 2161 InstrRefBasedLDV::performCopy(SrcReg, DestReg); 2162 2163 // The copy might have clobbered variables based on the destination register. 2164 // Tell TTracker about it, passing the old ValueIDNum to search for 2165 // alternative locations (or else terminating those variables). 2166 if (TTracker) { 2167 for (auto LocVal : ClobberedLocs) { 2168 TTracker->clobberMloc(LocVal.first, LocVal.second, MI.getIterator(), false); 2169 } 2170 } 2171 2172 // Only produce a transfer of DBG_VALUE within a block where old LDV 2173 // would have. We might make use of the additional value tracking in some 2174 // other way, later. 2175 if (TTracker && isCalleeSavedReg(DestReg) && SrcRegOp->isKill()) 2176 TTracker->transferMlocs(MTracker->getRegMLoc(SrcReg), 2177 MTracker->getRegMLoc(DestReg), MI.getIterator()); 2178 2179 // VarLocBasedImpl would quit tracking the old location after copying. 2180 if (EmulateOldLDV && SrcReg != DestReg) 2181 MTracker->defReg(SrcReg, CurBB, CurInst); 2182 2183 return true; 2184 } 2185 2186 /// Accumulate a mapping between each DILocalVariable fragment and other 2187 /// fragments of that DILocalVariable which overlap. This reduces work during 2188 /// the data-flow stage from "Find any overlapping fragments" to "Check if the 2189 /// known-to-overlap fragments are present". 2190 /// \param MI A previously unprocessed debug instruction to analyze for 2191 /// fragment usage. 2192 void InstrRefBasedLDV::accumulateFragmentMap(MachineInstr &MI) { 2193 assert(MI.isDebugValueLike()); 2194 DebugVariable MIVar(MI.getDebugVariable(), MI.getDebugExpression(), 2195 MI.getDebugLoc()->getInlinedAt()); 2196 FragmentInfo ThisFragment = MIVar.getFragmentOrDefault(); 2197 2198 // If this is the first sighting of this variable, then we are guaranteed 2199 // there are currently no overlapping fragments either. Initialize the set 2200 // of seen fragments, record no overlaps for the current one, and return. 2201 auto SeenIt = SeenFragments.find(MIVar.getVariable()); 2202 if (SeenIt == SeenFragments.end()) { 2203 SmallSet<FragmentInfo, 4> OneFragment; 2204 OneFragment.insert(ThisFragment); 2205 SeenFragments.insert({MIVar.getVariable(), OneFragment}); 2206 2207 OverlapFragments.insert({{MIVar.getVariable(), ThisFragment}, {}}); 2208 return; 2209 } 2210 2211 // If this particular Variable/Fragment pair already exists in the overlap 2212 // map, it has already been accounted for. 2213 auto IsInOLapMap = 2214 OverlapFragments.insert({{MIVar.getVariable(), ThisFragment}, {}}); 2215 if (!IsInOLapMap.second) 2216 return; 2217 2218 auto &ThisFragmentsOverlaps = IsInOLapMap.first->second; 2219 auto &AllSeenFragments = SeenIt->second; 2220 2221 // Otherwise, examine all other seen fragments for this variable, with "this" 2222 // fragment being a previously unseen fragment. Record any pair of 2223 // overlapping fragments. 2224 for (const auto &ASeenFragment : AllSeenFragments) { 2225 // Does this previously seen fragment overlap? 2226 if (DIExpression::fragmentsOverlap(ThisFragment, ASeenFragment)) { 2227 // Yes: Mark the current fragment as being overlapped. 2228 ThisFragmentsOverlaps.push_back(ASeenFragment); 2229 // Mark the previously seen fragment as being overlapped by the current 2230 // one. 2231 auto ASeenFragmentsOverlaps = 2232 OverlapFragments.find({MIVar.getVariable(), ASeenFragment}); 2233 assert(ASeenFragmentsOverlaps != OverlapFragments.end() && 2234 "Previously seen var fragment has no vector of overlaps"); 2235 ASeenFragmentsOverlaps->second.push_back(ThisFragment); 2236 } 2237 } 2238 2239 AllSeenFragments.insert(ThisFragment); 2240 } 2241 2242 void InstrRefBasedLDV::process(MachineInstr &MI, 2243 const FuncValueTable *MLiveOuts, 2244 const FuncValueTable *MLiveIns) { 2245 // Try to interpret an MI as a debug or transfer instruction. Only if it's 2246 // none of these should we interpret it's register defs as new value 2247 // definitions. 2248 if (transferDebugValue(MI)) 2249 return; 2250 if (transferDebugInstrRef(MI, MLiveOuts, MLiveIns)) 2251 return; 2252 if (transferDebugPHI(MI)) 2253 return; 2254 if (transferRegisterCopy(MI)) 2255 return; 2256 if (transferSpillOrRestoreInst(MI)) 2257 return; 2258 transferRegisterDef(MI); 2259 } 2260 2261 void InstrRefBasedLDV::produceMLocTransferFunction( 2262 MachineFunction &MF, SmallVectorImpl<MLocTransferMap> &MLocTransfer, 2263 unsigned MaxNumBlocks) { 2264 // Because we try to optimize around register mask operands by ignoring regs 2265 // that aren't currently tracked, we set up something ugly for later: RegMask 2266 // operands that are seen earlier than the first use of a register, still need 2267 // to clobber that register in the transfer function. But this information 2268 // isn't actively recorded. Instead, we track each RegMask used in each block, 2269 // and accumulated the clobbered but untracked registers in each block into 2270 // the following bitvector. Later, if new values are tracked, we can add 2271 // appropriate clobbers. 2272 SmallVector<BitVector, 32> BlockMasks; 2273 BlockMasks.resize(MaxNumBlocks); 2274 2275 // Reserve one bit per register for the masks described above. 2276 unsigned BVWords = MachineOperand::getRegMaskSize(TRI->getNumRegs()); 2277 for (auto &BV : BlockMasks) 2278 BV.resize(TRI->getNumRegs(), true); 2279 2280 // Step through all instructions and inhale the transfer function. 2281 for (auto &MBB : MF) { 2282 // Object fields that are read by trackers to know where we are in the 2283 // function. 2284 CurBB = MBB.getNumber(); 2285 CurInst = 1; 2286 2287 // Set all machine locations to a PHI value. For transfer function 2288 // production only, this signifies the live-in value to the block. 2289 MTracker->reset(); 2290 MTracker->setMPhis(CurBB); 2291 2292 // Step through each instruction in this block. 2293 for (auto &MI : MBB) { 2294 // Pass in an empty unique_ptr for the value tables when accumulating the 2295 // machine transfer function. 2296 process(MI, nullptr, nullptr); 2297 2298 // Also accumulate fragment map. 2299 if (MI.isDebugValueLike()) 2300 accumulateFragmentMap(MI); 2301 2302 // Create a map from the instruction number (if present) to the 2303 // MachineInstr and its position. 2304 if (uint64_t InstrNo = MI.peekDebugInstrNum()) { 2305 auto InstrAndPos = std::make_pair(&MI, CurInst); 2306 auto InsertResult = 2307 DebugInstrNumToInstr.insert(std::make_pair(InstrNo, InstrAndPos)); 2308 2309 // There should never be duplicate instruction numbers. 2310 assert(InsertResult.second); 2311 (void)InsertResult; 2312 } 2313 2314 ++CurInst; 2315 } 2316 2317 // Produce the transfer function, a map of machine location to new value. If 2318 // any machine location has the live-in phi value from the start of the 2319 // block, it's live-through and doesn't need recording in the transfer 2320 // function. 2321 for (auto Location : MTracker->locations()) { 2322 LocIdx Idx = Location.Idx; 2323 ValueIDNum &P = Location.Value; 2324 if (P.isPHI() && P.getLoc() == Idx.asU64()) 2325 continue; 2326 2327 // Insert-or-update. 2328 auto &TransferMap = MLocTransfer[CurBB]; 2329 auto Result = TransferMap.insert(std::make_pair(Idx.asU64(), P)); 2330 if (!Result.second) 2331 Result.first->second = P; 2332 } 2333 2334 // Accumulate any bitmask operands into the clobbered reg mask for this 2335 // block. 2336 for (auto &P : MTracker->Masks) { 2337 BlockMasks[CurBB].clearBitsNotInMask(P.first->getRegMask(), BVWords); 2338 } 2339 } 2340 2341 // Compute a bitvector of all the registers that are tracked in this block. 2342 BitVector UsedRegs(TRI->getNumRegs()); 2343 for (auto Location : MTracker->locations()) { 2344 unsigned ID = MTracker->LocIdxToLocID[Location.Idx]; 2345 // Ignore stack slots, and aliases of the stack pointer. 2346 if (ID >= TRI->getNumRegs() || MTracker->SPAliases.count(ID)) 2347 continue; 2348 UsedRegs.set(ID); 2349 } 2350 2351 // Check that any regmask-clobber of a register that gets tracked, is not 2352 // live-through in the transfer function. It needs to be clobbered at the 2353 // very least. 2354 for (unsigned int I = 0; I < MaxNumBlocks; ++I) { 2355 BitVector &BV = BlockMasks[I]; 2356 BV.flip(); 2357 BV &= UsedRegs; 2358 // This produces all the bits that we clobber, but also use. Check that 2359 // they're all clobbered or at least set in the designated transfer 2360 // elem. 2361 for (unsigned Bit : BV.set_bits()) { 2362 unsigned ID = MTracker->getLocID(Bit); 2363 LocIdx Idx = MTracker->LocIDToLocIdx[ID]; 2364 auto &TransferMap = MLocTransfer[I]; 2365 2366 // Install a value representing the fact that this location is effectively 2367 // written to in this block. As there's no reserved value, instead use 2368 // a value number that is never generated. Pick the value number for the 2369 // first instruction in the block, def'ing this location, which we know 2370 // this block never used anyway. 2371 ValueIDNum NotGeneratedNum = ValueIDNum(I, 1, Idx); 2372 auto Result = 2373 TransferMap.insert(std::make_pair(Idx.asU64(), NotGeneratedNum)); 2374 if (!Result.second) { 2375 ValueIDNum &ValueID = Result.first->second; 2376 if (ValueID.getBlock() == I && ValueID.isPHI()) 2377 // It was left as live-through. Set it to clobbered. 2378 ValueID = NotGeneratedNum; 2379 } 2380 } 2381 } 2382 } 2383 2384 bool InstrRefBasedLDV::mlocJoin( 2385 MachineBasicBlock &MBB, SmallPtrSet<const MachineBasicBlock *, 16> &Visited, 2386 FuncValueTable &OutLocs, ValueTable &InLocs) { 2387 LLVM_DEBUG(dbgs() << "join MBB: " << MBB.getNumber() << "\n"); 2388 bool Changed = false; 2389 2390 // Handle value-propagation when control flow merges on entry to a block. For 2391 // any location without a PHI already placed, the location has the same value 2392 // as its predecessors. If a PHI is placed, test to see whether it's now a 2393 // redundant PHI that we can eliminate. 2394 2395 SmallVector<const MachineBasicBlock *, 8> BlockOrders; 2396 for (auto *Pred : MBB.predecessors()) 2397 BlockOrders.push_back(Pred); 2398 2399 // Visit predecessors in RPOT order. 2400 auto Cmp = [&](const MachineBasicBlock *A, const MachineBasicBlock *B) { 2401 return BBToOrder.find(A)->second < BBToOrder.find(B)->second; 2402 }; 2403 llvm::sort(BlockOrders, Cmp); 2404 2405 // Skip entry block. 2406 if (BlockOrders.size() == 0) { 2407 // FIXME: We don't use assert here to prevent instr-ref-unreachable.mir 2408 // failing. 2409 LLVM_DEBUG(if (!MBB.isEntryBlock()) dbgs() 2410 << "Found not reachable block " << MBB.getFullName() 2411 << " from entry which may lead out of " 2412 "bound access to VarLocs\n"); 2413 return false; 2414 } 2415 2416 // Step through all machine locations, look at each predecessor and test 2417 // whether we can eliminate redundant PHIs. 2418 for (auto Location : MTracker->locations()) { 2419 LocIdx Idx = Location.Idx; 2420 2421 // Pick out the first predecessors live-out value for this location. It's 2422 // guaranteed to not be a backedge, as we order by RPO. 2423 ValueIDNum FirstVal = OutLocs[*BlockOrders[0]][Idx.asU64()]; 2424 2425 // If we've already eliminated a PHI here, do no further checking, just 2426 // propagate the first live-in value into this block. 2427 if (InLocs[Idx.asU64()] != ValueIDNum(MBB.getNumber(), 0, Idx)) { 2428 if (InLocs[Idx.asU64()] != FirstVal) { 2429 InLocs[Idx.asU64()] = FirstVal; 2430 Changed |= true; 2431 } 2432 continue; 2433 } 2434 2435 // We're now examining a PHI to see whether it's un-necessary. Loop around 2436 // the other live-in values and test whether they're all the same. 2437 bool Disagree = false; 2438 for (unsigned int I = 1; I < BlockOrders.size(); ++I) { 2439 const MachineBasicBlock *PredMBB = BlockOrders[I]; 2440 const ValueIDNum &PredLiveOut = OutLocs[*PredMBB][Idx.asU64()]; 2441 2442 // Incoming values agree, continue trying to eliminate this PHI. 2443 if (FirstVal == PredLiveOut) 2444 continue; 2445 2446 // We can also accept a PHI value that feeds back into itself. 2447 if (PredLiveOut == ValueIDNum(MBB.getNumber(), 0, Idx)) 2448 continue; 2449 2450 // Live-out of a predecessor disagrees with the first predecessor. 2451 Disagree = true; 2452 } 2453 2454 // No disagreement? No PHI. Otherwise, leave the PHI in live-ins. 2455 if (!Disagree) { 2456 InLocs[Idx.asU64()] = FirstVal; 2457 Changed |= true; 2458 } 2459 } 2460 2461 // TODO: Reimplement NumInserted and NumRemoved. 2462 return Changed; 2463 } 2464 2465 void InstrRefBasedLDV::findStackIndexInterference( 2466 SmallVectorImpl<unsigned> &Slots) { 2467 // We could spend a bit of time finding the exact, minimal, set of stack 2468 // indexes that interfere with each other, much like reg units. Or, we can 2469 // rely on the fact that: 2470 // * The smallest / lowest index will interfere with everything at zero 2471 // offset, which will be the largest set of registers, 2472 // * Most indexes with non-zero offset will end up being interference units 2473 // anyway. 2474 // So just pick those out and return them. 2475 2476 // We can rely on a single-byte stack index existing already, because we 2477 // initialize them in MLocTracker. 2478 auto It = MTracker->StackSlotIdxes.find({8, 0}); 2479 assert(It != MTracker->StackSlotIdxes.end()); 2480 Slots.push_back(It->second); 2481 2482 // Find anything that has a non-zero offset and add that too. 2483 for (auto &Pair : MTracker->StackSlotIdxes) { 2484 // Is offset zero? If so, ignore. 2485 if (!Pair.first.second) 2486 continue; 2487 Slots.push_back(Pair.second); 2488 } 2489 } 2490 2491 void InstrRefBasedLDV::placeMLocPHIs( 2492 MachineFunction &MF, SmallPtrSetImpl<MachineBasicBlock *> &AllBlocks, 2493 FuncValueTable &MInLocs, SmallVectorImpl<MLocTransferMap> &MLocTransfer) { 2494 SmallVector<unsigned, 4> StackUnits; 2495 findStackIndexInterference(StackUnits); 2496 2497 // To avoid repeatedly running the PHI placement algorithm, leverage the 2498 // fact that a def of register MUST also def its register units. Find the 2499 // units for registers, place PHIs for them, and then replicate them for 2500 // aliasing registers. Some inputs that are never def'd (DBG_PHIs of 2501 // arguments) don't lead to register units being tracked, just place PHIs for 2502 // those registers directly. Stack slots have their own form of "unit", 2503 // store them to one side. 2504 SmallSet<Register, 32> RegUnitsToPHIUp; 2505 SmallSet<LocIdx, 32> NormalLocsToPHI; 2506 SmallSet<SpillLocationNo, 32> StackSlots; 2507 for (auto Location : MTracker->locations()) { 2508 LocIdx L = Location.Idx; 2509 if (MTracker->isSpill(L)) { 2510 StackSlots.insert(MTracker->locIDToSpill(MTracker->LocIdxToLocID[L])); 2511 continue; 2512 } 2513 2514 Register R = MTracker->LocIdxToLocID[L]; 2515 SmallSet<Register, 8> FoundRegUnits; 2516 bool AnyIllegal = false; 2517 for (MCRegUnit Unit : TRI->regunits(R.asMCReg())) { 2518 for (MCRegUnitRootIterator URoot(Unit, TRI); URoot.isValid(); ++URoot) { 2519 if (!MTracker->isRegisterTracked(*URoot)) { 2520 // Not all roots were loaded into the tracking map: this register 2521 // isn't actually def'd anywhere, we only read from it. Generate PHIs 2522 // for this reg, but don't iterate units. 2523 AnyIllegal = true; 2524 } else { 2525 FoundRegUnits.insert(*URoot); 2526 } 2527 } 2528 } 2529 2530 if (AnyIllegal) { 2531 NormalLocsToPHI.insert(L); 2532 continue; 2533 } 2534 2535 RegUnitsToPHIUp.insert(FoundRegUnits.begin(), FoundRegUnits.end()); 2536 } 2537 2538 // Lambda to fetch PHIs for a given location, and write into the PHIBlocks 2539 // collection. 2540 SmallVector<MachineBasicBlock *, 32> PHIBlocks; 2541 auto CollectPHIsForLoc = [&](LocIdx L) { 2542 // Collect the set of defs. 2543 SmallPtrSet<MachineBasicBlock *, 32> DefBlocks; 2544 for (unsigned int I = 0; I < OrderToBB.size(); ++I) { 2545 MachineBasicBlock *MBB = OrderToBB[I]; 2546 const auto &TransferFunc = MLocTransfer[MBB->getNumber()]; 2547 if (TransferFunc.contains(L)) 2548 DefBlocks.insert(MBB); 2549 } 2550 2551 // The entry block defs the location too: it's the live-in / argument value. 2552 // Only insert if there are other defs though; everything is trivially live 2553 // through otherwise. 2554 if (!DefBlocks.empty()) 2555 DefBlocks.insert(&*MF.begin()); 2556 2557 // Ask the SSA construction algorithm where we should put PHIs. Clear 2558 // anything that might have been hanging around from earlier. 2559 PHIBlocks.clear(); 2560 BlockPHIPlacement(AllBlocks, DefBlocks, PHIBlocks); 2561 }; 2562 2563 auto InstallPHIsAtLoc = [&PHIBlocks, &MInLocs](LocIdx L) { 2564 for (const MachineBasicBlock *MBB : PHIBlocks) 2565 MInLocs[*MBB][L.asU64()] = ValueIDNum(MBB->getNumber(), 0, L); 2566 }; 2567 2568 // For locations with no reg units, just place PHIs. 2569 for (LocIdx L : NormalLocsToPHI) { 2570 CollectPHIsForLoc(L); 2571 // Install those PHI values into the live-in value array. 2572 InstallPHIsAtLoc(L); 2573 } 2574 2575 // For stack slots, calculate PHIs for the equivalent of the units, then 2576 // install for each index. 2577 for (SpillLocationNo Slot : StackSlots) { 2578 for (unsigned Idx : StackUnits) { 2579 unsigned SpillID = MTracker->getSpillIDWithIdx(Slot, Idx); 2580 LocIdx L = MTracker->getSpillMLoc(SpillID); 2581 CollectPHIsForLoc(L); 2582 InstallPHIsAtLoc(L); 2583 2584 // Find anything that aliases this stack index, install PHIs for it too. 2585 unsigned Size, Offset; 2586 std::tie(Size, Offset) = MTracker->StackIdxesToPos[Idx]; 2587 for (auto &Pair : MTracker->StackSlotIdxes) { 2588 unsigned ThisSize, ThisOffset; 2589 std::tie(ThisSize, ThisOffset) = Pair.first; 2590 if (ThisSize + ThisOffset <= Offset || Size + Offset <= ThisOffset) 2591 continue; 2592 2593 unsigned ThisID = MTracker->getSpillIDWithIdx(Slot, Pair.second); 2594 LocIdx ThisL = MTracker->getSpillMLoc(ThisID); 2595 InstallPHIsAtLoc(ThisL); 2596 } 2597 } 2598 } 2599 2600 // For reg units, place PHIs, and then place them for any aliasing registers. 2601 for (Register R : RegUnitsToPHIUp) { 2602 LocIdx L = MTracker->lookupOrTrackRegister(R); 2603 CollectPHIsForLoc(L); 2604 2605 // Install those PHI values into the live-in value array. 2606 InstallPHIsAtLoc(L); 2607 2608 // Now find aliases and install PHIs for those. 2609 for (MCRegAliasIterator RAI(R, TRI, true); RAI.isValid(); ++RAI) { 2610 // Super-registers that are "above" the largest register read/written by 2611 // the function will alias, but will not be tracked. 2612 if (!MTracker->isRegisterTracked(*RAI)) 2613 continue; 2614 2615 LocIdx AliasLoc = MTracker->lookupOrTrackRegister(*RAI); 2616 InstallPHIsAtLoc(AliasLoc); 2617 } 2618 } 2619 } 2620 2621 void InstrRefBasedLDV::buildMLocValueMap( 2622 MachineFunction &MF, FuncValueTable &MInLocs, FuncValueTable &MOutLocs, 2623 SmallVectorImpl<MLocTransferMap> &MLocTransfer) { 2624 std::priority_queue<unsigned int, std::vector<unsigned int>, 2625 std::greater<unsigned int>> 2626 Worklist, Pending; 2627 2628 // We track what is on the current and pending worklist to avoid inserting 2629 // the same thing twice. We could avoid this with a custom priority queue, 2630 // but this is probably not worth it. 2631 SmallPtrSet<MachineBasicBlock *, 16> OnPending, OnWorklist; 2632 2633 // Initialize worklist with every block to be visited. Also produce list of 2634 // all blocks. 2635 SmallPtrSet<MachineBasicBlock *, 32> AllBlocks; 2636 for (unsigned int I = 0; I < BBToOrder.size(); ++I) { 2637 Worklist.push(I); 2638 OnWorklist.insert(OrderToBB[I]); 2639 AllBlocks.insert(OrderToBB[I]); 2640 } 2641 2642 // Initialize entry block to PHIs. These represent arguments. 2643 for (auto Location : MTracker->locations()) 2644 MInLocs.tableForEntryMBB()[Location.Idx.asU64()] = 2645 ValueIDNum(0, 0, Location.Idx); 2646 2647 MTracker->reset(); 2648 2649 // Start by placing PHIs, using the usual SSA constructor algorithm. Consider 2650 // any machine-location that isn't live-through a block to be def'd in that 2651 // block. 2652 placeMLocPHIs(MF, AllBlocks, MInLocs, MLocTransfer); 2653 2654 // Propagate values to eliminate redundant PHIs. At the same time, this 2655 // produces the table of Block x Location => Value for the entry to each 2656 // block. 2657 // The kind of PHIs we can eliminate are, for example, where one path in a 2658 // conditional spills and restores a register, and the register still has 2659 // the same value once control flow joins, unbeknowns to the PHI placement 2660 // code. Propagating values allows us to identify such un-necessary PHIs and 2661 // remove them. 2662 SmallPtrSet<const MachineBasicBlock *, 16> Visited; 2663 while (!Worklist.empty() || !Pending.empty()) { 2664 // Vector for storing the evaluated block transfer function. 2665 SmallVector<std::pair<LocIdx, ValueIDNum>, 32> ToRemap; 2666 2667 while (!Worklist.empty()) { 2668 MachineBasicBlock *MBB = OrderToBB[Worklist.top()]; 2669 CurBB = MBB->getNumber(); 2670 Worklist.pop(); 2671 2672 // Join the values in all predecessor blocks. 2673 bool InLocsChanged; 2674 InLocsChanged = mlocJoin(*MBB, Visited, MOutLocs, MInLocs[*MBB]); 2675 InLocsChanged |= Visited.insert(MBB).second; 2676 2677 // Don't examine transfer function if we've visited this loc at least 2678 // once, and inlocs haven't changed. 2679 if (!InLocsChanged) 2680 continue; 2681 2682 // Load the current set of live-ins into MLocTracker. 2683 MTracker->loadFromArray(MInLocs[*MBB], CurBB); 2684 2685 // Each element of the transfer function can be a new def, or a read of 2686 // a live-in value. Evaluate each element, and store to "ToRemap". 2687 ToRemap.clear(); 2688 for (auto &P : MLocTransfer[CurBB]) { 2689 if (P.second.getBlock() == CurBB && P.second.isPHI()) { 2690 // This is a movement of whatever was live in. Read it. 2691 ValueIDNum NewID = MTracker->readMLoc(P.second.getLoc()); 2692 ToRemap.push_back(std::make_pair(P.first, NewID)); 2693 } else { 2694 // It's a def. Just set it. 2695 assert(P.second.getBlock() == CurBB); 2696 ToRemap.push_back(std::make_pair(P.first, P.second)); 2697 } 2698 } 2699 2700 // Commit the transfer function changes into mloc tracker, which 2701 // transforms the contents of the MLocTracker into the live-outs. 2702 for (auto &P : ToRemap) 2703 MTracker->setMLoc(P.first, P.second); 2704 2705 // Now copy out-locs from mloc tracker into out-loc vector, checking 2706 // whether changes have occurred. These changes can have come from both 2707 // the transfer function, and mlocJoin. 2708 bool OLChanged = false; 2709 for (auto Location : MTracker->locations()) { 2710 OLChanged |= MOutLocs[*MBB][Location.Idx.asU64()] != Location.Value; 2711 MOutLocs[*MBB][Location.Idx.asU64()] = Location.Value; 2712 } 2713 2714 MTracker->reset(); 2715 2716 // No need to examine successors again if out-locs didn't change. 2717 if (!OLChanged) 2718 continue; 2719 2720 // All successors should be visited: put any back-edges on the pending 2721 // list for the next pass-through, and any other successors to be 2722 // visited this pass, if they're not going to be already. 2723 for (auto *s : MBB->successors()) { 2724 // Does branching to this successor represent a back-edge? 2725 if (BBToOrder[s] > BBToOrder[MBB]) { 2726 // No: visit it during this dataflow iteration. 2727 if (OnWorklist.insert(s).second) 2728 Worklist.push(BBToOrder[s]); 2729 } else { 2730 // Yes: visit it on the next iteration. 2731 if (OnPending.insert(s).second) 2732 Pending.push(BBToOrder[s]); 2733 } 2734 } 2735 } 2736 2737 Worklist.swap(Pending); 2738 std::swap(OnPending, OnWorklist); 2739 OnPending.clear(); 2740 // At this point, pending must be empty, since it was just the empty 2741 // worklist 2742 assert(Pending.empty() && "Pending should be empty"); 2743 } 2744 2745 // Once all the live-ins don't change on mlocJoin(), we've eliminated all 2746 // redundant PHIs. 2747 } 2748 2749 void InstrRefBasedLDV::BlockPHIPlacement( 2750 const SmallPtrSetImpl<MachineBasicBlock *> &AllBlocks, 2751 const SmallPtrSetImpl<MachineBasicBlock *> &DefBlocks, 2752 SmallVectorImpl<MachineBasicBlock *> &PHIBlocks) { 2753 // Apply IDF calculator to the designated set of location defs, storing 2754 // required PHIs into PHIBlocks. Uses the dominator tree stored in the 2755 // InstrRefBasedLDV object. 2756 IDFCalculatorBase<MachineBasicBlock, false> IDF(DomTree->getBase()); 2757 2758 IDF.setLiveInBlocks(AllBlocks); 2759 IDF.setDefiningBlocks(DefBlocks); 2760 IDF.calculate(PHIBlocks); 2761 } 2762 2763 bool InstrRefBasedLDV::pickVPHILoc( 2764 SmallVectorImpl<DbgOpID> &OutValues, const MachineBasicBlock &MBB, 2765 const LiveIdxT &LiveOuts, FuncValueTable &MOutLocs, 2766 const SmallVectorImpl<const MachineBasicBlock *> &BlockOrders) { 2767 2768 // No predecessors means no PHIs. 2769 if (BlockOrders.empty()) 2770 return false; 2771 2772 // All the location operands that do not already agree need to be joined, 2773 // track the indices of each such location operand here. 2774 SmallDenseSet<unsigned> LocOpsToJoin; 2775 2776 auto FirstValueIt = LiveOuts.find(BlockOrders[0]); 2777 if (FirstValueIt == LiveOuts.end()) 2778 return false; 2779 const DbgValue &FirstValue = *FirstValueIt->second; 2780 2781 for (const auto p : BlockOrders) { 2782 auto OutValIt = LiveOuts.find(p); 2783 if (OutValIt == LiveOuts.end()) 2784 // If we have a predecessor not in scope, we'll never find a PHI position. 2785 return false; 2786 const DbgValue &OutVal = *OutValIt->second; 2787 2788 // No-values cannot have locations we can join on. 2789 if (OutVal.Kind == DbgValue::NoVal) 2790 return false; 2791 2792 // For unjoined VPHIs where we don't know the location, we definitely 2793 // can't find a join loc unless the VPHI is a backedge. 2794 if (OutVal.isUnjoinedPHI() && OutVal.BlockNo != MBB.getNumber()) 2795 return false; 2796 2797 if (!FirstValue.Properties.isJoinable(OutVal.Properties)) 2798 return false; 2799 2800 for (unsigned Idx = 0; Idx < FirstValue.getLocationOpCount(); ++Idx) { 2801 // An unjoined PHI has no defined locations, and so a shared location must 2802 // be found for every operand. 2803 if (OutVal.isUnjoinedPHI()) { 2804 LocOpsToJoin.insert(Idx); 2805 continue; 2806 } 2807 DbgOpID FirstValOp = FirstValue.getDbgOpID(Idx); 2808 DbgOpID OutValOp = OutVal.getDbgOpID(Idx); 2809 if (FirstValOp != OutValOp) { 2810 // We can never join constant ops - the ops must either both be equal 2811 // constant ops or non-const ops. 2812 if (FirstValOp.isConst() || OutValOp.isConst()) 2813 return false; 2814 else 2815 LocOpsToJoin.insert(Idx); 2816 } 2817 } 2818 } 2819 2820 SmallVector<DbgOpID> NewDbgOps; 2821 2822 for (unsigned Idx = 0; Idx < FirstValue.getLocationOpCount(); ++Idx) { 2823 // If this op doesn't need to be joined because the values agree, use that 2824 // already-agreed value. 2825 if (!LocOpsToJoin.contains(Idx)) { 2826 NewDbgOps.push_back(FirstValue.getDbgOpID(Idx)); 2827 continue; 2828 } 2829 2830 std::optional<ValueIDNum> JoinedOpLoc = 2831 pickOperandPHILoc(Idx, MBB, LiveOuts, MOutLocs, BlockOrders); 2832 2833 if (!JoinedOpLoc) 2834 return false; 2835 2836 NewDbgOps.push_back(DbgOpStore.insert(*JoinedOpLoc)); 2837 } 2838 2839 OutValues.append(NewDbgOps); 2840 return true; 2841 } 2842 2843 std::optional<ValueIDNum> InstrRefBasedLDV::pickOperandPHILoc( 2844 unsigned DbgOpIdx, const MachineBasicBlock &MBB, const LiveIdxT &LiveOuts, 2845 FuncValueTable &MOutLocs, 2846 const SmallVectorImpl<const MachineBasicBlock *> &BlockOrders) { 2847 2848 // Collect a set of locations from predecessor where its live-out value can 2849 // be found. 2850 SmallVector<SmallVector<LocIdx, 4>, 8> Locs; 2851 unsigned NumLocs = MTracker->getNumLocs(); 2852 2853 for (const auto p : BlockOrders) { 2854 auto OutValIt = LiveOuts.find(p); 2855 assert(OutValIt != LiveOuts.end()); 2856 const DbgValue &OutVal = *OutValIt->second; 2857 DbgOpID OutValOpID = OutVal.getDbgOpID(DbgOpIdx); 2858 DbgOp OutValOp = DbgOpStore.find(OutValOpID); 2859 assert(!OutValOp.IsConst); 2860 2861 // Create new empty vector of locations. 2862 Locs.resize(Locs.size() + 1); 2863 2864 // If the live-in value is a def, find the locations where that value is 2865 // present. Do the same for VPHIs where we know the VPHI value. 2866 if (OutVal.Kind == DbgValue::Def || 2867 (OutVal.Kind == DbgValue::VPHI && OutVal.BlockNo != MBB.getNumber() && 2868 !OutValOp.isUndef())) { 2869 ValueIDNum ValToLookFor = OutValOp.ID; 2870 // Search the live-outs of the predecessor for the specified value. 2871 for (unsigned int I = 0; I < NumLocs; ++I) { 2872 if (MOutLocs[*p][I] == ValToLookFor) 2873 Locs.back().push_back(LocIdx(I)); 2874 } 2875 } else { 2876 assert(OutVal.Kind == DbgValue::VPHI); 2877 // Otherwise: this is a VPHI on a backedge feeding back into itself, i.e. 2878 // a value that's live-through the whole loop. (It has to be a backedge, 2879 // because a block can't dominate itself). We can accept as a PHI location 2880 // any location where the other predecessors agree, _and_ the machine 2881 // locations feed back into themselves. Therefore, add all self-looping 2882 // machine-value PHI locations. 2883 for (unsigned int I = 0; I < NumLocs; ++I) { 2884 ValueIDNum MPHI(MBB.getNumber(), 0, LocIdx(I)); 2885 if (MOutLocs[*p][I] == MPHI) 2886 Locs.back().push_back(LocIdx(I)); 2887 } 2888 } 2889 } 2890 // We should have found locations for all predecessors, or returned. 2891 assert(Locs.size() == BlockOrders.size()); 2892 2893 // Starting with the first set of locations, take the intersection with 2894 // subsequent sets. 2895 SmallVector<LocIdx, 4> CandidateLocs = Locs[0]; 2896 for (unsigned int I = 1; I < Locs.size(); ++I) { 2897 auto &LocVec = Locs[I]; 2898 SmallVector<LocIdx, 4> NewCandidates; 2899 std::set_intersection(CandidateLocs.begin(), CandidateLocs.end(), 2900 LocVec.begin(), LocVec.end(), std::inserter(NewCandidates, NewCandidates.begin())); 2901 CandidateLocs = NewCandidates; 2902 } 2903 if (CandidateLocs.empty()) 2904 return std::nullopt; 2905 2906 // We now have a set of LocIdxes that contain the right output value in 2907 // each of the predecessors. Pick the lowest; if there's a register loc, 2908 // that'll be it. 2909 LocIdx L = *CandidateLocs.begin(); 2910 2911 // Return a PHI-value-number for the found location. 2912 ValueIDNum PHIVal = {(unsigned)MBB.getNumber(), 0, L}; 2913 return PHIVal; 2914 } 2915 2916 bool InstrRefBasedLDV::vlocJoin( 2917 MachineBasicBlock &MBB, LiveIdxT &VLOCOutLocs, 2918 SmallPtrSet<const MachineBasicBlock *, 8> &BlocksToExplore, 2919 DbgValue &LiveIn) { 2920 LLVM_DEBUG(dbgs() << "join MBB: " << MBB.getNumber() << "\n"); 2921 bool Changed = false; 2922 2923 // Order predecessors by RPOT order, for exploring them in that order. 2924 SmallVector<MachineBasicBlock *, 8> BlockOrders(MBB.predecessors()); 2925 2926 auto Cmp = [&](MachineBasicBlock *A, MachineBasicBlock *B) { 2927 return BBToOrder[A] < BBToOrder[B]; 2928 }; 2929 2930 llvm::sort(BlockOrders, Cmp); 2931 2932 unsigned CurBlockRPONum = BBToOrder[&MBB]; 2933 2934 // Collect all the incoming DbgValues for this variable, from predecessor 2935 // live-out values. 2936 SmallVector<InValueT, 8> Values; 2937 bool Bail = false; 2938 int BackEdgesStart = 0; 2939 for (auto *p : BlockOrders) { 2940 // If the predecessor isn't in scope / to be explored, we'll never be 2941 // able to join any locations. 2942 if (!BlocksToExplore.contains(p)) { 2943 Bail = true; 2944 break; 2945 } 2946 2947 // All Live-outs will have been initialized. 2948 DbgValue &OutLoc = *VLOCOutLocs.find(p)->second; 2949 2950 // Keep track of where back-edges begin in the Values vector. Relies on 2951 // BlockOrders being sorted by RPO. 2952 unsigned ThisBBRPONum = BBToOrder[p]; 2953 if (ThisBBRPONum < CurBlockRPONum) 2954 ++BackEdgesStart; 2955 2956 Values.push_back(std::make_pair(p, &OutLoc)); 2957 } 2958 2959 // If there were no values, or one of the predecessors couldn't have a 2960 // value, then give up immediately. It's not safe to produce a live-in 2961 // value. Leave as whatever it was before. 2962 if (Bail || Values.size() == 0) 2963 return false; 2964 2965 // All (non-entry) blocks have at least one non-backedge predecessor. 2966 // Pick the variable value from the first of these, to compare against 2967 // all others. 2968 const DbgValue &FirstVal = *Values[0].second; 2969 2970 // If the old live-in value is not a PHI then either a) no PHI is needed 2971 // here, or b) we eliminated the PHI that was here. If so, we can just 2972 // propagate in the first parent's incoming value. 2973 if (LiveIn.Kind != DbgValue::VPHI || LiveIn.BlockNo != MBB.getNumber()) { 2974 Changed = LiveIn != FirstVal; 2975 if (Changed) 2976 LiveIn = FirstVal; 2977 return Changed; 2978 } 2979 2980 // Scan for variable values that can never be resolved: if they have 2981 // different DIExpressions, different indirectness, or are mixed constants / 2982 // non-constants. 2983 for (const auto &V : Values) { 2984 if (!V.second->Properties.isJoinable(FirstVal.Properties)) 2985 return false; 2986 if (V.second->Kind == DbgValue::NoVal) 2987 return false; 2988 if (!V.second->hasJoinableLocOps(FirstVal)) 2989 return false; 2990 } 2991 2992 // Try to eliminate this PHI. Do the incoming values all agree? 2993 bool Disagree = false; 2994 for (auto &V : Values) { 2995 if (*V.second == FirstVal) 2996 continue; // No disagreement. 2997 2998 // If both values are not equal but have equal non-empty IDs then they refer 2999 // to the same value from different sources (e.g. one is VPHI and the other 3000 // is Def), which does not cause disagreement. 3001 if (V.second->hasIdenticalValidLocOps(FirstVal)) 3002 continue; 3003 3004 // Eliminate if a backedge feeds a VPHI back into itself. 3005 if (V.second->Kind == DbgValue::VPHI && 3006 V.second->BlockNo == MBB.getNumber() && 3007 // Is this a backedge? 3008 std::distance(Values.begin(), &V) >= BackEdgesStart) 3009 continue; 3010 3011 Disagree = true; 3012 } 3013 3014 // No disagreement -> live-through value. 3015 if (!Disagree) { 3016 Changed = LiveIn != FirstVal; 3017 if (Changed) 3018 LiveIn = FirstVal; 3019 return Changed; 3020 } else { 3021 // Otherwise use a VPHI. 3022 DbgValue VPHI(MBB.getNumber(), FirstVal.Properties, DbgValue::VPHI); 3023 Changed = LiveIn != VPHI; 3024 if (Changed) 3025 LiveIn = VPHI; 3026 return Changed; 3027 } 3028 } 3029 3030 void InstrRefBasedLDV::getBlocksForScope( 3031 const DILocation *DILoc, 3032 SmallPtrSetImpl<const MachineBasicBlock *> &BlocksToExplore, 3033 const SmallPtrSetImpl<MachineBasicBlock *> &AssignBlocks) { 3034 // Get the set of "normal" in-lexical-scope blocks. 3035 LS.getMachineBasicBlocks(DILoc, BlocksToExplore); 3036 3037 // VarLoc LiveDebugValues tracks variable locations that are defined in 3038 // blocks not in scope. This is something we could legitimately ignore, but 3039 // lets allow it for now for the sake of coverage. 3040 BlocksToExplore.insert(AssignBlocks.begin(), AssignBlocks.end()); 3041 3042 // Storage for artificial blocks we intend to add to BlocksToExplore. 3043 DenseSet<const MachineBasicBlock *> ToAdd; 3044 3045 // To avoid needlessly dropping large volumes of variable locations, propagate 3046 // variables through aritifical blocks, i.e. those that don't have any 3047 // instructions in scope at all. To accurately replicate VarLoc 3048 // LiveDebugValues, this means exploring all artificial successors too. 3049 // Perform a depth-first-search to enumerate those blocks. 3050 for (const auto *MBB : BlocksToExplore) { 3051 // Depth-first-search state: each node is a block and which successor 3052 // we're currently exploring. 3053 SmallVector<std::pair<const MachineBasicBlock *, 3054 MachineBasicBlock::const_succ_iterator>, 3055 8> 3056 DFS; 3057 3058 // Find any artificial successors not already tracked. 3059 for (auto *succ : MBB->successors()) { 3060 if (BlocksToExplore.count(succ)) 3061 continue; 3062 if (!ArtificialBlocks.count(succ)) 3063 continue; 3064 ToAdd.insert(succ); 3065 DFS.push_back({succ, succ->succ_begin()}); 3066 } 3067 3068 // Search all those blocks, depth first. 3069 while (!DFS.empty()) { 3070 const MachineBasicBlock *CurBB = DFS.back().first; 3071 MachineBasicBlock::const_succ_iterator &CurSucc = DFS.back().second; 3072 // Walk back if we've explored this blocks successors to the end. 3073 if (CurSucc == CurBB->succ_end()) { 3074 DFS.pop_back(); 3075 continue; 3076 } 3077 3078 // If the current successor is artificial and unexplored, descend into 3079 // it. 3080 if (!ToAdd.count(*CurSucc) && ArtificialBlocks.count(*CurSucc)) { 3081 ToAdd.insert(*CurSucc); 3082 DFS.push_back({*CurSucc, (*CurSucc)->succ_begin()}); 3083 continue; 3084 } 3085 3086 ++CurSucc; 3087 } 3088 }; 3089 3090 BlocksToExplore.insert(ToAdd.begin(), ToAdd.end()); 3091 } 3092 3093 void InstrRefBasedLDV::buildVLocValueMap( 3094 const DILocation *DILoc, const SmallSet<DebugVariable, 4> &VarsWeCareAbout, 3095 SmallPtrSetImpl<MachineBasicBlock *> &AssignBlocks, LiveInsT &Output, 3096 FuncValueTable &MOutLocs, FuncValueTable &MInLocs, 3097 SmallVectorImpl<VLocTracker> &AllTheVLocs) { 3098 // This method is much like buildMLocValueMap: but focuses on a single 3099 // LexicalScope at a time. Pick out a set of blocks and variables that are 3100 // to have their value assignments solved, then run our dataflow algorithm 3101 // until a fixedpoint is reached. 3102 std::priority_queue<unsigned int, std::vector<unsigned int>, 3103 std::greater<unsigned int>> 3104 Worklist, Pending; 3105 SmallPtrSet<MachineBasicBlock *, 16> OnWorklist, OnPending; 3106 3107 // The set of blocks we'll be examining. 3108 SmallPtrSet<const MachineBasicBlock *, 8> BlocksToExplore; 3109 3110 // The order in which to examine them (RPO). 3111 SmallVector<MachineBasicBlock *, 8> BlockOrders; 3112 3113 // RPO ordering function. 3114 auto Cmp = [&](MachineBasicBlock *A, MachineBasicBlock *B) { 3115 return BBToOrder[A] < BBToOrder[B]; 3116 }; 3117 3118 getBlocksForScope(DILoc, BlocksToExplore, AssignBlocks); 3119 3120 // Single block scope: not interesting! No propagation at all. Note that 3121 // this could probably go above ArtificialBlocks without damage, but 3122 // that then produces output differences from original-live-debug-values, 3123 // which propagates from a single block into many artificial ones. 3124 if (BlocksToExplore.size() == 1) 3125 return; 3126 3127 // Convert a const set to a non-const set. LexicalScopes 3128 // getMachineBasicBlocks returns const MBB pointers, IDF wants mutable ones. 3129 // (Neither of them mutate anything). 3130 SmallPtrSet<MachineBasicBlock *, 8> MutBlocksToExplore; 3131 for (const auto *MBB : BlocksToExplore) 3132 MutBlocksToExplore.insert(const_cast<MachineBasicBlock *>(MBB)); 3133 3134 // Picks out relevants blocks RPO order and sort them. 3135 for (const auto *MBB : BlocksToExplore) 3136 BlockOrders.push_back(const_cast<MachineBasicBlock *>(MBB)); 3137 3138 llvm::sort(BlockOrders, Cmp); 3139 unsigned NumBlocks = BlockOrders.size(); 3140 3141 // Allocate some vectors for storing the live ins and live outs. Large. 3142 SmallVector<DbgValue, 32> LiveIns, LiveOuts; 3143 LiveIns.reserve(NumBlocks); 3144 LiveOuts.reserve(NumBlocks); 3145 3146 // Initialize all values to start as NoVals. This signifies "it's live 3147 // through, but we don't know what it is". 3148 DbgValueProperties EmptyProperties(EmptyExpr, false, false); 3149 for (unsigned int I = 0; I < NumBlocks; ++I) { 3150 DbgValue EmptyDbgValue(I, EmptyProperties, DbgValue::NoVal); 3151 LiveIns.push_back(EmptyDbgValue); 3152 LiveOuts.push_back(EmptyDbgValue); 3153 } 3154 3155 // Produce by-MBB indexes of live-in/live-outs, to ease lookup within 3156 // vlocJoin. 3157 LiveIdxT LiveOutIdx, LiveInIdx; 3158 LiveOutIdx.reserve(NumBlocks); 3159 LiveInIdx.reserve(NumBlocks); 3160 for (unsigned I = 0; I < NumBlocks; ++I) { 3161 LiveOutIdx[BlockOrders[I]] = &LiveOuts[I]; 3162 LiveInIdx[BlockOrders[I]] = &LiveIns[I]; 3163 } 3164 3165 // Loop over each variable and place PHIs for it, then propagate values 3166 // between blocks. This keeps the locality of working on one lexical scope at 3167 // at time, but avoids re-processing variable values because some other 3168 // variable has been assigned. 3169 for (const auto &Var : VarsWeCareAbout) { 3170 // Re-initialize live-ins and live-outs, to clear the remains of previous 3171 // variables live-ins / live-outs. 3172 for (unsigned int I = 0; I < NumBlocks; ++I) { 3173 DbgValue EmptyDbgValue(I, EmptyProperties, DbgValue::NoVal); 3174 LiveIns[I] = EmptyDbgValue; 3175 LiveOuts[I] = EmptyDbgValue; 3176 } 3177 3178 // Place PHIs for variable values, using the LLVM IDF calculator. 3179 // Collect the set of blocks where variables are def'd. 3180 SmallPtrSet<MachineBasicBlock *, 32> DefBlocks; 3181 for (const MachineBasicBlock *ExpMBB : BlocksToExplore) { 3182 auto &TransferFunc = AllTheVLocs[ExpMBB->getNumber()].Vars; 3183 if (TransferFunc.contains(Var)) 3184 DefBlocks.insert(const_cast<MachineBasicBlock *>(ExpMBB)); 3185 } 3186 3187 SmallVector<MachineBasicBlock *, 32> PHIBlocks; 3188 3189 // Request the set of PHIs we should insert for this variable. If there's 3190 // only one value definition, things are very simple. 3191 if (DefBlocks.size() == 1) { 3192 placePHIsForSingleVarDefinition(MutBlocksToExplore, *DefBlocks.begin(), 3193 AllTheVLocs, Var, Output); 3194 continue; 3195 } 3196 3197 // Otherwise: we need to place PHIs through SSA and propagate values. 3198 BlockPHIPlacement(MutBlocksToExplore, DefBlocks, PHIBlocks); 3199 3200 // Insert PHIs into the per-block live-in tables for this variable. 3201 for (MachineBasicBlock *PHIMBB : PHIBlocks) { 3202 unsigned BlockNo = PHIMBB->getNumber(); 3203 DbgValue *LiveIn = LiveInIdx[PHIMBB]; 3204 *LiveIn = DbgValue(BlockNo, EmptyProperties, DbgValue::VPHI); 3205 } 3206 3207 for (auto *MBB : BlockOrders) { 3208 Worklist.push(BBToOrder[MBB]); 3209 OnWorklist.insert(MBB); 3210 } 3211 3212 // Iterate over all the blocks we selected, propagating the variables value. 3213 // This loop does two things: 3214 // * Eliminates un-necessary VPHIs in vlocJoin, 3215 // * Evaluates the blocks transfer function (i.e. variable assignments) and 3216 // stores the result to the blocks live-outs. 3217 // Always evaluate the transfer function on the first iteration, and when 3218 // the live-ins change thereafter. 3219 bool FirstTrip = true; 3220 while (!Worklist.empty() || !Pending.empty()) { 3221 while (!Worklist.empty()) { 3222 auto *MBB = OrderToBB[Worklist.top()]; 3223 CurBB = MBB->getNumber(); 3224 Worklist.pop(); 3225 3226 auto LiveInsIt = LiveInIdx.find(MBB); 3227 assert(LiveInsIt != LiveInIdx.end()); 3228 DbgValue *LiveIn = LiveInsIt->second; 3229 3230 // Join values from predecessors. Updates LiveInIdx, and writes output 3231 // into JoinedInLocs. 3232 bool InLocsChanged = 3233 vlocJoin(*MBB, LiveOutIdx, BlocksToExplore, *LiveIn); 3234 3235 SmallVector<const MachineBasicBlock *, 8> Preds; 3236 for (const auto *Pred : MBB->predecessors()) 3237 Preds.push_back(Pred); 3238 3239 // If this block's live-in value is a VPHI, try to pick a machine-value 3240 // for it. This makes the machine-value available and propagated 3241 // through all blocks by the time value propagation finishes. We can't 3242 // do this any earlier as it needs to read the block live-outs. 3243 if (LiveIn->Kind == DbgValue::VPHI && LiveIn->BlockNo == (int)CurBB) { 3244 // There's a small possibility that on a preceeding path, a VPHI is 3245 // eliminated and transitions from VPHI-with-location to 3246 // live-through-value. As a result, the selected location of any VPHI 3247 // might change, so we need to re-compute it on each iteration. 3248 SmallVector<DbgOpID> JoinedOps; 3249 3250 if (pickVPHILoc(JoinedOps, *MBB, LiveOutIdx, MOutLocs, Preds)) { 3251 bool NewLocPicked = !equal(LiveIn->getDbgOpIDs(), JoinedOps); 3252 InLocsChanged |= NewLocPicked; 3253 if (NewLocPicked) 3254 LiveIn->setDbgOpIDs(JoinedOps); 3255 } 3256 } 3257 3258 if (!InLocsChanged && !FirstTrip) 3259 continue; 3260 3261 DbgValue *LiveOut = LiveOutIdx[MBB]; 3262 bool OLChanged = false; 3263 3264 // Do transfer function. 3265 auto &VTracker = AllTheVLocs[MBB->getNumber()]; 3266 auto TransferIt = VTracker.Vars.find(Var); 3267 if (TransferIt != VTracker.Vars.end()) { 3268 // Erase on empty transfer (DBG_VALUE $noreg). 3269 if (TransferIt->second.Kind == DbgValue::Undef) { 3270 DbgValue NewVal(MBB->getNumber(), EmptyProperties, DbgValue::NoVal); 3271 if (*LiveOut != NewVal) { 3272 *LiveOut = NewVal; 3273 OLChanged = true; 3274 } 3275 } else { 3276 // Insert new variable value; or overwrite. 3277 if (*LiveOut != TransferIt->second) { 3278 *LiveOut = TransferIt->second; 3279 OLChanged = true; 3280 } 3281 } 3282 } else { 3283 // Just copy live-ins to live-outs, for anything not transferred. 3284 if (*LiveOut != *LiveIn) { 3285 *LiveOut = *LiveIn; 3286 OLChanged = true; 3287 } 3288 } 3289 3290 // If no live-out value changed, there's no need to explore further. 3291 if (!OLChanged) 3292 continue; 3293 3294 // We should visit all successors. Ensure we'll visit any non-backedge 3295 // successors during this dataflow iteration; book backedge successors 3296 // to be visited next time around. 3297 for (auto *s : MBB->successors()) { 3298 // Ignore out of scope / not-to-be-explored successors. 3299 if (!LiveInIdx.contains(s)) 3300 continue; 3301 3302 if (BBToOrder[s] > BBToOrder[MBB]) { 3303 if (OnWorklist.insert(s).second) 3304 Worklist.push(BBToOrder[s]); 3305 } else if (OnPending.insert(s).second && (FirstTrip || OLChanged)) { 3306 Pending.push(BBToOrder[s]); 3307 } 3308 } 3309 } 3310 Worklist.swap(Pending); 3311 std::swap(OnWorklist, OnPending); 3312 OnPending.clear(); 3313 assert(Pending.empty()); 3314 FirstTrip = false; 3315 } 3316 3317 // Save live-ins to output vector. Ignore any that are still marked as being 3318 // VPHIs with no location -- those are variables that we know the value of, 3319 // but are not actually available in the register file. 3320 for (auto *MBB : BlockOrders) { 3321 DbgValue *BlockLiveIn = LiveInIdx[MBB]; 3322 if (BlockLiveIn->Kind == DbgValue::NoVal) 3323 continue; 3324 if (BlockLiveIn->isUnjoinedPHI()) 3325 continue; 3326 if (BlockLiveIn->Kind == DbgValue::VPHI) 3327 BlockLiveIn->Kind = DbgValue::Def; 3328 assert(BlockLiveIn->Properties.DIExpr->getFragmentInfo() == 3329 Var.getFragment() && "Fragment info missing during value prop"); 3330 Output[MBB->getNumber()].push_back(std::make_pair(Var, *BlockLiveIn)); 3331 } 3332 } // Per-variable loop. 3333 3334 BlockOrders.clear(); 3335 BlocksToExplore.clear(); 3336 } 3337 3338 void InstrRefBasedLDV::placePHIsForSingleVarDefinition( 3339 const SmallPtrSetImpl<MachineBasicBlock *> &InScopeBlocks, 3340 MachineBasicBlock *AssignMBB, SmallVectorImpl<VLocTracker> &AllTheVLocs, 3341 const DebugVariable &Var, LiveInsT &Output) { 3342 // If there is a single definition of the variable, then working out it's 3343 // value everywhere is very simple: it's every block dominated by the 3344 // definition. At the dominance frontier, the usual algorithm would: 3345 // * Place PHIs, 3346 // * Propagate values into them, 3347 // * Find there's no incoming variable value from the other incoming branches 3348 // of the dominance frontier, 3349 // * Specify there's no variable value in blocks past the frontier. 3350 // This is a common case, hence it's worth special-casing it. 3351 3352 // Pick out the variables value from the block transfer function. 3353 VLocTracker &VLocs = AllTheVLocs[AssignMBB->getNumber()]; 3354 auto ValueIt = VLocs.Vars.find(Var); 3355 const DbgValue &Value = ValueIt->second; 3356 3357 // If it's an explicit assignment of "undef", that means there is no location 3358 // anyway, anywhere. 3359 if (Value.Kind == DbgValue::Undef) 3360 return; 3361 3362 // Assign the variable value to entry to each dominated block that's in scope. 3363 // Skip the definition block -- it's assigned the variable value in the middle 3364 // of the block somewhere. 3365 for (auto *ScopeBlock : InScopeBlocks) { 3366 if (!DomTree->properlyDominates(AssignMBB, ScopeBlock)) 3367 continue; 3368 3369 Output[ScopeBlock->getNumber()].push_back({Var, Value}); 3370 } 3371 3372 // All blocks that aren't dominated have no live-in value, thus no variable 3373 // value will be given to them. 3374 } 3375 3376 #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP) 3377 void InstrRefBasedLDV::dump_mloc_transfer( 3378 const MLocTransferMap &mloc_transfer) const { 3379 for (const auto &P : mloc_transfer) { 3380 std::string foo = MTracker->LocIdxToName(P.first); 3381 std::string bar = MTracker->IDAsString(P.second); 3382 dbgs() << "Loc " << foo << " --> " << bar << "\n"; 3383 } 3384 } 3385 #endif 3386 3387 void InstrRefBasedLDV::initialSetup(MachineFunction &MF) { 3388 // Build some useful data structures. 3389 3390 LLVMContext &Context = MF.getFunction().getContext(); 3391 EmptyExpr = DIExpression::get(Context, {}); 3392 3393 auto hasNonArtificialLocation = [](const MachineInstr &MI) -> bool { 3394 if (const DebugLoc &DL = MI.getDebugLoc()) 3395 return DL.getLine() != 0; 3396 return false; 3397 }; 3398 // Collect a set of all the artificial blocks. 3399 for (auto &MBB : MF) 3400 if (none_of(MBB.instrs(), hasNonArtificialLocation)) 3401 ArtificialBlocks.insert(&MBB); 3402 3403 // Compute mappings of block <=> RPO order. 3404 ReversePostOrderTraversal<MachineFunction *> RPOT(&MF); 3405 unsigned int RPONumber = 0; 3406 auto processMBB = [&](MachineBasicBlock *MBB) { 3407 OrderToBB[RPONumber] = MBB; 3408 BBToOrder[MBB] = RPONumber; 3409 BBNumToRPO[MBB->getNumber()] = RPONumber; 3410 ++RPONumber; 3411 }; 3412 for (MachineBasicBlock *MBB : RPOT) 3413 processMBB(MBB); 3414 for (MachineBasicBlock &MBB : MF) 3415 if (!BBToOrder.contains(&MBB)) 3416 processMBB(&MBB); 3417 3418 // Order value substitutions by their "source" operand pair, for quick lookup. 3419 llvm::sort(MF.DebugValueSubstitutions); 3420 3421 #ifdef EXPENSIVE_CHECKS 3422 // As an expensive check, test whether there are any duplicate substitution 3423 // sources in the collection. 3424 if (MF.DebugValueSubstitutions.size() > 2) { 3425 for (auto It = MF.DebugValueSubstitutions.begin(); 3426 It != std::prev(MF.DebugValueSubstitutions.end()); ++It) { 3427 assert(It->Src != std::next(It)->Src && "Duplicate variable location " 3428 "substitution seen"); 3429 } 3430 } 3431 #endif 3432 } 3433 3434 // Produce an "ejection map" for blocks, i.e., what's the highest-numbered 3435 // lexical scope it's used in. When exploring in DFS order and we pass that 3436 // scope, the block can be processed and any tracking information freed. 3437 void InstrRefBasedLDV::makeDepthFirstEjectionMap( 3438 SmallVectorImpl<unsigned> &EjectionMap, 3439 const ScopeToDILocT &ScopeToDILocation, 3440 ScopeToAssignBlocksT &ScopeToAssignBlocks) { 3441 SmallPtrSet<const MachineBasicBlock *, 8> BlocksToExplore; 3442 SmallVector<std::pair<LexicalScope *, ssize_t>, 4> WorkStack; 3443 auto *TopScope = LS.getCurrentFunctionScope(); 3444 3445 // Unlike lexical scope explorers, we explore in reverse order, to find the 3446 // "last" lexical scope used for each block early. 3447 WorkStack.push_back({TopScope, TopScope->getChildren().size() - 1}); 3448 3449 while (!WorkStack.empty()) { 3450 auto &ScopePosition = WorkStack.back(); 3451 LexicalScope *WS = ScopePosition.first; 3452 ssize_t ChildNum = ScopePosition.second--; 3453 3454 const SmallVectorImpl<LexicalScope *> &Children = WS->getChildren(); 3455 if (ChildNum >= 0) { 3456 // If ChildNum is positive, there are remaining children to explore. 3457 // Push the child and its children-count onto the stack. 3458 auto &ChildScope = Children[ChildNum]; 3459 WorkStack.push_back( 3460 std::make_pair(ChildScope, ChildScope->getChildren().size() - 1)); 3461 } else { 3462 WorkStack.pop_back(); 3463 3464 // We've explored all children and any later blocks: examine all blocks 3465 // in our scope. If they haven't yet had an ejection number set, then 3466 // this scope will be the last to use that block. 3467 auto DILocationIt = ScopeToDILocation.find(WS); 3468 if (DILocationIt != ScopeToDILocation.end()) { 3469 getBlocksForScope(DILocationIt->second, BlocksToExplore, 3470 ScopeToAssignBlocks.find(WS)->second); 3471 for (const auto *MBB : BlocksToExplore) { 3472 unsigned BBNum = MBB->getNumber(); 3473 if (EjectionMap[BBNum] == 0) 3474 EjectionMap[BBNum] = WS->getDFSOut(); 3475 } 3476 3477 BlocksToExplore.clear(); 3478 } 3479 } 3480 } 3481 } 3482 3483 bool InstrRefBasedLDV::depthFirstVLocAndEmit( 3484 unsigned MaxNumBlocks, const ScopeToDILocT &ScopeToDILocation, 3485 const ScopeToVarsT &ScopeToVars, ScopeToAssignBlocksT &ScopeToAssignBlocks, 3486 LiveInsT &Output, FuncValueTable &MOutLocs, FuncValueTable &MInLocs, 3487 SmallVectorImpl<VLocTracker> &AllTheVLocs, MachineFunction &MF, 3488 DenseMap<DebugVariable, unsigned> &AllVarsNumbering, 3489 const TargetPassConfig &TPC) { 3490 TTracker = new TransferTracker(TII, MTracker, MF, *TRI, CalleeSavedRegs, TPC); 3491 unsigned NumLocs = MTracker->getNumLocs(); 3492 VTracker = nullptr; 3493 3494 // No scopes? No variable locations. 3495 if (!LS.getCurrentFunctionScope()) 3496 return false; 3497 3498 // Build map from block number to the last scope that uses the block. 3499 SmallVector<unsigned, 16> EjectionMap; 3500 EjectionMap.resize(MaxNumBlocks, 0); 3501 makeDepthFirstEjectionMap(EjectionMap, ScopeToDILocation, 3502 ScopeToAssignBlocks); 3503 3504 // Helper lambda for ejecting a block -- if nothing is going to use the block, 3505 // we can translate the variable location information into DBG_VALUEs and then 3506 // free all of InstrRefBasedLDV's data structures. 3507 auto EjectBlock = [&](MachineBasicBlock &MBB) -> void { 3508 unsigned BBNum = MBB.getNumber(); 3509 AllTheVLocs[BBNum].clear(); 3510 3511 // Prime the transfer-tracker, and then step through all the block 3512 // instructions, installing transfers. 3513 MTracker->reset(); 3514 MTracker->loadFromArray(MInLocs[MBB], BBNum); 3515 TTracker->loadInlocs(MBB, MInLocs[MBB], DbgOpStore, Output[BBNum], NumLocs); 3516 3517 CurBB = BBNum; 3518 CurInst = 1; 3519 for (auto &MI : MBB) { 3520 process(MI, &MOutLocs, &MInLocs); 3521 TTracker->checkInstForNewValues(CurInst, MI.getIterator()); 3522 ++CurInst; 3523 } 3524 3525 // Free machine-location tables for this block. 3526 MInLocs.ejectTableForBlock(MBB); 3527 MOutLocs.ejectTableForBlock(MBB); 3528 // We don't need live-in variable values for this block either. 3529 Output[BBNum].clear(); 3530 AllTheVLocs[BBNum].clear(); 3531 }; 3532 3533 SmallPtrSet<const MachineBasicBlock *, 8> BlocksToExplore; 3534 SmallVector<std::pair<LexicalScope *, ssize_t>, 4> WorkStack; 3535 WorkStack.push_back({LS.getCurrentFunctionScope(), 0}); 3536 unsigned HighestDFSIn = 0; 3537 3538 // Proceed to explore in depth first order. 3539 while (!WorkStack.empty()) { 3540 auto &ScopePosition = WorkStack.back(); 3541 LexicalScope *WS = ScopePosition.first; 3542 ssize_t ChildNum = ScopePosition.second++; 3543 3544 // We obesrve scopes with children twice here, once descending in, once 3545 // ascending out of the scope nest. Use HighestDFSIn as a ratchet to ensure 3546 // we don't process a scope twice. Additionally, ignore scopes that don't 3547 // have a DILocation -- by proxy, this means we never tracked any variable 3548 // assignments in that scope. 3549 auto DILocIt = ScopeToDILocation.find(WS); 3550 if (HighestDFSIn <= WS->getDFSIn() && DILocIt != ScopeToDILocation.end()) { 3551 const DILocation *DILoc = DILocIt->second; 3552 auto &VarsWeCareAbout = ScopeToVars.find(WS)->second; 3553 auto &BlocksInScope = ScopeToAssignBlocks.find(WS)->second; 3554 3555 buildVLocValueMap(DILoc, VarsWeCareAbout, BlocksInScope, Output, MOutLocs, 3556 MInLocs, AllTheVLocs); 3557 } 3558 3559 HighestDFSIn = std::max(HighestDFSIn, WS->getDFSIn()); 3560 3561 // Descend into any scope nests. 3562 const SmallVectorImpl<LexicalScope *> &Children = WS->getChildren(); 3563 if (ChildNum < (ssize_t)Children.size()) { 3564 // There are children to explore -- push onto stack and continue. 3565 auto &ChildScope = Children[ChildNum]; 3566 WorkStack.push_back(std::make_pair(ChildScope, 0)); 3567 } else { 3568 WorkStack.pop_back(); 3569 3570 // We've explored a leaf, or have explored all the children of a scope. 3571 // Try to eject any blocks where this is the last scope it's relevant to. 3572 auto DILocationIt = ScopeToDILocation.find(WS); 3573 if (DILocationIt == ScopeToDILocation.end()) 3574 continue; 3575 3576 getBlocksForScope(DILocationIt->second, BlocksToExplore, 3577 ScopeToAssignBlocks.find(WS)->second); 3578 for (const auto *MBB : BlocksToExplore) 3579 if (WS->getDFSOut() == EjectionMap[MBB->getNumber()]) 3580 EjectBlock(const_cast<MachineBasicBlock &>(*MBB)); 3581 3582 BlocksToExplore.clear(); 3583 } 3584 } 3585 3586 // Some artificial blocks may not have been ejected, meaning they're not 3587 // connected to an actual legitimate scope. This can technically happen 3588 // with things like the entry block. In theory, we shouldn't need to do 3589 // anything for such out-of-scope blocks, but for the sake of being similar 3590 // to VarLocBasedLDV, eject these too. 3591 for (auto *MBB : ArtificialBlocks) 3592 if (MInLocs.hasTableFor(*MBB)) 3593 EjectBlock(*MBB); 3594 3595 return emitTransfers(AllVarsNumbering); 3596 } 3597 3598 bool InstrRefBasedLDV::emitTransfers( 3599 DenseMap<DebugVariable, unsigned> &AllVarsNumbering) { 3600 // Go through all the transfers recorded in the TransferTracker -- this is 3601 // both the live-ins to a block, and any movements of values that happen 3602 // in the middle. 3603 for (const auto &P : TTracker->Transfers) { 3604 // We have to insert DBG_VALUEs in a consistent order, otherwise they 3605 // appear in DWARF in different orders. Use the order that they appear 3606 // when walking through each block / each instruction, stored in 3607 // AllVarsNumbering. 3608 SmallVector<std::pair<unsigned, MachineInstr *>> Insts; 3609 for (MachineInstr *MI : P.Insts) { 3610 DebugVariable Var(MI->getDebugVariable(), MI->getDebugExpression(), 3611 MI->getDebugLoc()->getInlinedAt()); 3612 Insts.emplace_back(AllVarsNumbering.find(Var)->second, MI); 3613 } 3614 llvm::sort(Insts, llvm::less_first()); 3615 3616 // Insert either before or after the designated point... 3617 if (P.MBB) { 3618 MachineBasicBlock &MBB = *P.MBB; 3619 for (const auto &Pair : Insts) 3620 MBB.insert(P.Pos, Pair.second); 3621 } else { 3622 // Terminators, like tail calls, can clobber things. Don't try and place 3623 // transfers after them. 3624 if (P.Pos->isTerminator()) 3625 continue; 3626 3627 MachineBasicBlock &MBB = *P.Pos->getParent(); 3628 for (const auto &Pair : Insts) 3629 MBB.insertAfterBundle(P.Pos, Pair.second); 3630 } 3631 } 3632 3633 return TTracker->Transfers.size() != 0; 3634 } 3635 3636 /// Calculate the liveness information for the given machine function and 3637 /// extend ranges across basic blocks. 3638 bool InstrRefBasedLDV::ExtendRanges(MachineFunction &MF, 3639 MachineDominatorTree *DomTree, 3640 TargetPassConfig *TPC, 3641 unsigned InputBBLimit, 3642 unsigned InputDbgValLimit) { 3643 // No subprogram means this function contains no debuginfo. 3644 if (!MF.getFunction().getSubprogram()) 3645 return false; 3646 3647 LLVM_DEBUG(dbgs() << "\nDebug Range Extension\n"); 3648 this->TPC = TPC; 3649 3650 this->DomTree = DomTree; 3651 TRI = MF.getSubtarget().getRegisterInfo(); 3652 MRI = &MF.getRegInfo(); 3653 TII = MF.getSubtarget().getInstrInfo(); 3654 TFI = MF.getSubtarget().getFrameLowering(); 3655 TFI->getCalleeSaves(MF, CalleeSavedRegs); 3656 MFI = &MF.getFrameInfo(); 3657 LS.initialize(MF); 3658 3659 const auto &STI = MF.getSubtarget(); 3660 AdjustsStackInCalls = MFI->adjustsStack() && 3661 STI.getFrameLowering()->stackProbeFunctionModifiesSP(); 3662 if (AdjustsStackInCalls) 3663 StackProbeSymbolName = STI.getTargetLowering()->getStackProbeSymbolName(MF); 3664 3665 MTracker = 3666 new MLocTracker(MF, *TII, *TRI, *MF.getSubtarget().getTargetLowering()); 3667 VTracker = nullptr; 3668 TTracker = nullptr; 3669 3670 SmallVector<MLocTransferMap, 32> MLocTransfer; 3671 SmallVector<VLocTracker, 8> vlocs; 3672 LiveInsT SavedLiveIns; 3673 3674 int MaxNumBlocks = -1; 3675 for (auto &MBB : MF) 3676 MaxNumBlocks = std::max(MBB.getNumber(), MaxNumBlocks); 3677 assert(MaxNumBlocks >= 0); 3678 ++MaxNumBlocks; 3679 3680 initialSetup(MF); 3681 3682 MLocTransfer.resize(MaxNumBlocks); 3683 vlocs.resize(MaxNumBlocks, VLocTracker(OverlapFragments, EmptyExpr)); 3684 SavedLiveIns.resize(MaxNumBlocks); 3685 3686 produceMLocTransferFunction(MF, MLocTransfer, MaxNumBlocks); 3687 3688 // Allocate and initialize two array-of-arrays for the live-in and live-out 3689 // machine values. The outer dimension is the block number; while the inner 3690 // dimension is a LocIdx from MLocTracker. 3691 unsigned NumLocs = MTracker->getNumLocs(); 3692 FuncValueTable MOutLocs(MaxNumBlocks, NumLocs); 3693 FuncValueTable MInLocs(MaxNumBlocks, NumLocs); 3694 3695 // Solve the machine value dataflow problem using the MLocTransfer function, 3696 // storing the computed live-ins / live-outs into the array-of-arrays. We use 3697 // both live-ins and live-outs for decision making in the variable value 3698 // dataflow problem. 3699 buildMLocValueMap(MF, MInLocs, MOutLocs, MLocTransfer); 3700 3701 // Patch up debug phi numbers, turning unknown block-live-in values into 3702 // either live-through machine values, or PHIs. 3703 for (auto &DBG_PHI : DebugPHINumToValue) { 3704 // Identify unresolved block-live-ins. 3705 if (!DBG_PHI.ValueRead) 3706 continue; 3707 3708 ValueIDNum &Num = *DBG_PHI.ValueRead; 3709 if (!Num.isPHI()) 3710 continue; 3711 3712 unsigned BlockNo = Num.getBlock(); 3713 LocIdx LocNo = Num.getLoc(); 3714 ValueIDNum ResolvedValue = MInLocs[BlockNo][LocNo.asU64()]; 3715 // If there is no resolved value for this live-in then it is not directly 3716 // reachable from the entry block -- model it as a PHI on entry to this 3717 // block, which means we leave the ValueIDNum unchanged. 3718 if (ResolvedValue != ValueIDNum::EmptyValue) 3719 Num = ResolvedValue; 3720 } 3721 // Later, we'll be looking up ranges of instruction numbers. 3722 llvm::sort(DebugPHINumToValue); 3723 3724 // Walk back through each block / instruction, collecting DBG_VALUE 3725 // instructions and recording what machine value their operands refer to. 3726 for (auto &OrderPair : OrderToBB) { 3727 MachineBasicBlock &MBB = *OrderPair.second; 3728 CurBB = MBB.getNumber(); 3729 VTracker = &vlocs[CurBB]; 3730 VTracker->MBB = &MBB; 3731 MTracker->loadFromArray(MInLocs[MBB], CurBB); 3732 CurInst = 1; 3733 for (auto &MI : MBB) { 3734 process(MI, &MOutLocs, &MInLocs); 3735 ++CurInst; 3736 } 3737 MTracker->reset(); 3738 } 3739 3740 // Number all variables in the order that they appear, to be used as a stable 3741 // insertion order later. 3742 DenseMap<DebugVariable, unsigned> AllVarsNumbering; 3743 3744 // Map from one LexicalScope to all the variables in that scope. 3745 ScopeToVarsT ScopeToVars; 3746 3747 // Map from One lexical scope to all blocks where assignments happen for 3748 // that scope. 3749 ScopeToAssignBlocksT ScopeToAssignBlocks; 3750 3751 // Store map of DILocations that describes scopes. 3752 ScopeToDILocT ScopeToDILocation; 3753 3754 // To mirror old LiveDebugValues, enumerate variables in RPOT order. Otherwise 3755 // the order is unimportant, it just has to be stable. 3756 unsigned VarAssignCount = 0; 3757 for (unsigned int I = 0; I < OrderToBB.size(); ++I) { 3758 auto *MBB = OrderToBB[I]; 3759 auto *VTracker = &vlocs[MBB->getNumber()]; 3760 // Collect each variable with a DBG_VALUE in this block. 3761 for (auto &idx : VTracker->Vars) { 3762 const auto &Var = idx.first; 3763 const DILocation *ScopeLoc = VTracker->Scopes[Var]; 3764 assert(ScopeLoc != nullptr); 3765 auto *Scope = LS.findLexicalScope(ScopeLoc); 3766 3767 // No insts in scope -> shouldn't have been recorded. 3768 assert(Scope != nullptr); 3769 3770 AllVarsNumbering.insert(std::make_pair(Var, AllVarsNumbering.size())); 3771 ScopeToVars[Scope].insert(Var); 3772 ScopeToAssignBlocks[Scope].insert(VTracker->MBB); 3773 ScopeToDILocation[Scope] = ScopeLoc; 3774 ++VarAssignCount; 3775 } 3776 } 3777 3778 bool Changed = false; 3779 3780 // If we have an extremely large number of variable assignments and blocks, 3781 // bail out at this point. We've burnt some time doing analysis already, 3782 // however we should cut our losses. 3783 if ((unsigned)MaxNumBlocks > InputBBLimit && 3784 VarAssignCount > InputDbgValLimit) { 3785 LLVM_DEBUG(dbgs() << "Disabling InstrRefBasedLDV: " << MF.getName() 3786 << " has " << MaxNumBlocks << " basic blocks and " 3787 << VarAssignCount 3788 << " variable assignments, exceeding limits.\n"); 3789 } else { 3790 // Optionally, solve the variable value problem and emit to blocks by using 3791 // a lexical-scope-depth search. It should be functionally identical to 3792 // the "else" block of this condition. 3793 Changed = depthFirstVLocAndEmit( 3794 MaxNumBlocks, ScopeToDILocation, ScopeToVars, ScopeToAssignBlocks, 3795 SavedLiveIns, MOutLocs, MInLocs, vlocs, MF, AllVarsNumbering, *TPC); 3796 } 3797 3798 delete MTracker; 3799 delete TTracker; 3800 MTracker = nullptr; 3801 VTracker = nullptr; 3802 TTracker = nullptr; 3803 3804 ArtificialBlocks.clear(); 3805 OrderToBB.clear(); 3806 BBToOrder.clear(); 3807 BBNumToRPO.clear(); 3808 DebugInstrNumToInstr.clear(); 3809 DebugPHINumToValue.clear(); 3810 OverlapFragments.clear(); 3811 SeenFragments.clear(); 3812 SeenDbgPHIs.clear(); 3813 DbgOpStore.clear(); 3814 3815 return Changed; 3816 } 3817 3818 LDVImpl *llvm::makeInstrRefBasedLiveDebugValues() { 3819 return new InstrRefBasedLDV(); 3820 } 3821 3822 namespace { 3823 class LDVSSABlock; 3824 class LDVSSAUpdater; 3825 3826 // Pick a type to identify incoming block values as we construct SSA. We 3827 // can't use anything more robust than an integer unfortunately, as SSAUpdater 3828 // expects to zero-initialize the type. 3829 typedef uint64_t BlockValueNum; 3830 3831 /// Represents an SSA PHI node for the SSA updater class. Contains the block 3832 /// this PHI is in, the value number it would have, and the expected incoming 3833 /// values from parent blocks. 3834 class LDVSSAPhi { 3835 public: 3836 SmallVector<std::pair<LDVSSABlock *, BlockValueNum>, 4> IncomingValues; 3837 LDVSSABlock *ParentBlock; 3838 BlockValueNum PHIValNum; 3839 LDVSSAPhi(BlockValueNum PHIValNum, LDVSSABlock *ParentBlock) 3840 : ParentBlock(ParentBlock), PHIValNum(PHIValNum) {} 3841 3842 LDVSSABlock *getParent() { return ParentBlock; } 3843 }; 3844 3845 /// Thin wrapper around a block predecessor iterator. Only difference from a 3846 /// normal block iterator is that it dereferences to an LDVSSABlock. 3847 class LDVSSABlockIterator { 3848 public: 3849 MachineBasicBlock::pred_iterator PredIt; 3850 LDVSSAUpdater &Updater; 3851 3852 LDVSSABlockIterator(MachineBasicBlock::pred_iterator PredIt, 3853 LDVSSAUpdater &Updater) 3854 : PredIt(PredIt), Updater(Updater) {} 3855 3856 bool operator!=(const LDVSSABlockIterator &OtherIt) const { 3857 return OtherIt.PredIt != PredIt; 3858 } 3859 3860 LDVSSABlockIterator &operator++() { 3861 ++PredIt; 3862 return *this; 3863 } 3864 3865 LDVSSABlock *operator*(); 3866 }; 3867 3868 /// Thin wrapper around a block for SSA Updater interface. Necessary because 3869 /// we need to track the PHI value(s) that we may have observed as necessary 3870 /// in this block. 3871 class LDVSSABlock { 3872 public: 3873 MachineBasicBlock &BB; 3874 LDVSSAUpdater &Updater; 3875 using PHIListT = SmallVector<LDVSSAPhi, 1>; 3876 /// List of PHIs in this block. There should only ever be one. 3877 PHIListT PHIList; 3878 3879 LDVSSABlock(MachineBasicBlock &BB, LDVSSAUpdater &Updater) 3880 : BB(BB), Updater(Updater) {} 3881 3882 LDVSSABlockIterator succ_begin() { 3883 return LDVSSABlockIterator(BB.succ_begin(), Updater); 3884 } 3885 3886 LDVSSABlockIterator succ_end() { 3887 return LDVSSABlockIterator(BB.succ_end(), Updater); 3888 } 3889 3890 /// SSAUpdater has requested a PHI: create that within this block record. 3891 LDVSSAPhi *newPHI(BlockValueNum Value) { 3892 PHIList.emplace_back(Value, this); 3893 return &PHIList.back(); 3894 } 3895 3896 /// SSAUpdater wishes to know what PHIs already exist in this block. 3897 PHIListT &phis() { return PHIList; } 3898 }; 3899 3900 /// Utility class for the SSAUpdater interface: tracks blocks, PHIs and values 3901 /// while SSAUpdater is exploring the CFG. It's passed as a handle / baton to 3902 // SSAUpdaterTraits<LDVSSAUpdater>. 3903 class LDVSSAUpdater { 3904 public: 3905 /// Map of value numbers to PHI records. 3906 DenseMap<BlockValueNum, LDVSSAPhi *> PHIs; 3907 /// Map of which blocks generate Undef values -- blocks that are not 3908 /// dominated by any Def. 3909 DenseMap<MachineBasicBlock *, BlockValueNum> UndefMap; 3910 /// Map of machine blocks to our own records of them. 3911 DenseMap<MachineBasicBlock *, LDVSSABlock *> BlockMap; 3912 /// Machine location where any PHI must occur. 3913 LocIdx Loc; 3914 /// Table of live-in machine value numbers for blocks / locations. 3915 const FuncValueTable &MLiveIns; 3916 3917 LDVSSAUpdater(LocIdx L, const FuncValueTable &MLiveIns) 3918 : Loc(L), MLiveIns(MLiveIns) {} 3919 3920 void reset() { 3921 for (auto &Block : BlockMap) 3922 delete Block.second; 3923 3924 PHIs.clear(); 3925 UndefMap.clear(); 3926 BlockMap.clear(); 3927 } 3928 3929 ~LDVSSAUpdater() { reset(); } 3930 3931 /// For a given MBB, create a wrapper block for it. Stores it in the 3932 /// LDVSSAUpdater block map. 3933 LDVSSABlock *getSSALDVBlock(MachineBasicBlock *BB) { 3934 auto it = BlockMap.find(BB); 3935 if (it == BlockMap.end()) { 3936 BlockMap[BB] = new LDVSSABlock(*BB, *this); 3937 it = BlockMap.find(BB); 3938 } 3939 return it->second; 3940 } 3941 3942 /// Find the live-in value number for the given block. Looks up the value at 3943 /// the PHI location on entry. 3944 BlockValueNum getValue(LDVSSABlock *LDVBB) { 3945 return MLiveIns[LDVBB->BB][Loc.asU64()].asU64(); 3946 } 3947 }; 3948 3949 LDVSSABlock *LDVSSABlockIterator::operator*() { 3950 return Updater.getSSALDVBlock(*PredIt); 3951 } 3952 3953 #ifndef NDEBUG 3954 3955 raw_ostream &operator<<(raw_ostream &out, const LDVSSAPhi &PHI) { 3956 out << "SSALDVPHI " << PHI.PHIValNum; 3957 return out; 3958 } 3959 3960 #endif 3961 3962 } // namespace 3963 3964 namespace llvm { 3965 3966 /// Template specialization to give SSAUpdater access to CFG and value 3967 /// information. SSAUpdater calls methods in these traits, passing in the 3968 /// LDVSSAUpdater object, to learn about blocks and the values they define. 3969 /// It also provides methods to create PHI nodes and track them. 3970 template <> class SSAUpdaterTraits<LDVSSAUpdater> { 3971 public: 3972 using BlkT = LDVSSABlock; 3973 using ValT = BlockValueNum; 3974 using PhiT = LDVSSAPhi; 3975 using BlkSucc_iterator = LDVSSABlockIterator; 3976 3977 // Methods to access block successors -- dereferencing to our wrapper class. 3978 static BlkSucc_iterator BlkSucc_begin(BlkT *BB) { return BB->succ_begin(); } 3979 static BlkSucc_iterator BlkSucc_end(BlkT *BB) { return BB->succ_end(); } 3980 3981 /// Iterator for PHI operands. 3982 class PHI_iterator { 3983 private: 3984 LDVSSAPhi *PHI; 3985 unsigned Idx; 3986 3987 public: 3988 explicit PHI_iterator(LDVSSAPhi *P) // begin iterator 3989 : PHI(P), Idx(0) {} 3990 PHI_iterator(LDVSSAPhi *P, bool) // end iterator 3991 : PHI(P), Idx(PHI->IncomingValues.size()) {} 3992 3993 PHI_iterator &operator++() { 3994 Idx++; 3995 return *this; 3996 } 3997 bool operator==(const PHI_iterator &X) const { return Idx == X.Idx; } 3998 bool operator!=(const PHI_iterator &X) const { return !operator==(X); } 3999 4000 BlockValueNum getIncomingValue() { return PHI->IncomingValues[Idx].second; } 4001 4002 LDVSSABlock *getIncomingBlock() { return PHI->IncomingValues[Idx].first; } 4003 }; 4004 4005 static inline PHI_iterator PHI_begin(PhiT *PHI) { return PHI_iterator(PHI); } 4006 4007 static inline PHI_iterator PHI_end(PhiT *PHI) { 4008 return PHI_iterator(PHI, true); 4009 } 4010 4011 /// FindPredecessorBlocks - Put the predecessors of BB into the Preds 4012 /// vector. 4013 static void FindPredecessorBlocks(LDVSSABlock *BB, 4014 SmallVectorImpl<LDVSSABlock *> *Preds) { 4015 for (MachineBasicBlock *Pred : BB->BB.predecessors()) 4016 Preds->push_back(BB->Updater.getSSALDVBlock(Pred)); 4017 } 4018 4019 /// GetUndefVal - Normally creates an IMPLICIT_DEF instruction with a new 4020 /// register. For LiveDebugValues, represents a block identified as not having 4021 /// any DBG_PHI predecessors. 4022 static BlockValueNum GetUndefVal(LDVSSABlock *BB, LDVSSAUpdater *Updater) { 4023 // Create a value number for this block -- it needs to be unique and in the 4024 // "undef" collection, so that we know it's not real. Use a number 4025 // representing a PHI into this block. 4026 BlockValueNum Num = ValueIDNum(BB->BB.getNumber(), 0, Updater->Loc).asU64(); 4027 Updater->UndefMap[&BB->BB] = Num; 4028 return Num; 4029 } 4030 4031 /// CreateEmptyPHI - Create a (representation of a) PHI in the given block. 4032 /// SSAUpdater will populate it with information about incoming values. The 4033 /// value number of this PHI is whatever the machine value number problem 4034 /// solution determined it to be. This includes non-phi values if SSAUpdater 4035 /// tries to create a PHI where the incoming values are identical. 4036 static BlockValueNum CreateEmptyPHI(LDVSSABlock *BB, unsigned NumPreds, 4037 LDVSSAUpdater *Updater) { 4038 BlockValueNum PHIValNum = Updater->getValue(BB); 4039 LDVSSAPhi *PHI = BB->newPHI(PHIValNum); 4040 Updater->PHIs[PHIValNum] = PHI; 4041 return PHIValNum; 4042 } 4043 4044 /// AddPHIOperand - Add the specified value as an operand of the PHI for 4045 /// the specified predecessor block. 4046 static void AddPHIOperand(LDVSSAPhi *PHI, BlockValueNum Val, LDVSSABlock *Pred) { 4047 PHI->IncomingValues.push_back(std::make_pair(Pred, Val)); 4048 } 4049 4050 /// ValueIsPHI - Check if the instruction that defines the specified value 4051 /// is a PHI instruction. 4052 static LDVSSAPhi *ValueIsPHI(BlockValueNum Val, LDVSSAUpdater *Updater) { 4053 return Updater->PHIs.lookup(Val); 4054 } 4055 4056 /// ValueIsNewPHI - Like ValueIsPHI but also check if the PHI has no source 4057 /// operands, i.e., it was just added. 4058 static LDVSSAPhi *ValueIsNewPHI(BlockValueNum Val, LDVSSAUpdater *Updater) { 4059 LDVSSAPhi *PHI = ValueIsPHI(Val, Updater); 4060 if (PHI && PHI->IncomingValues.size() == 0) 4061 return PHI; 4062 return nullptr; 4063 } 4064 4065 /// GetPHIValue - For the specified PHI instruction, return the value 4066 /// that it defines. 4067 static BlockValueNum GetPHIValue(LDVSSAPhi *PHI) { return PHI->PHIValNum; } 4068 }; 4069 4070 } // end namespace llvm 4071 4072 std::optional<ValueIDNum> InstrRefBasedLDV::resolveDbgPHIs( 4073 MachineFunction &MF, const FuncValueTable &MLiveOuts, 4074 const FuncValueTable &MLiveIns, MachineInstr &Here, uint64_t InstrNum) { 4075 // This function will be called twice per DBG_INSTR_REF, and might end up 4076 // computing lots of SSA information: memoize it. 4077 auto SeenDbgPHIIt = SeenDbgPHIs.find(std::make_pair(&Here, InstrNum)); 4078 if (SeenDbgPHIIt != SeenDbgPHIs.end()) 4079 return SeenDbgPHIIt->second; 4080 4081 std::optional<ValueIDNum> Result = 4082 resolveDbgPHIsImpl(MF, MLiveOuts, MLiveIns, Here, InstrNum); 4083 SeenDbgPHIs.insert({std::make_pair(&Here, InstrNum), Result}); 4084 return Result; 4085 } 4086 4087 std::optional<ValueIDNum> InstrRefBasedLDV::resolveDbgPHIsImpl( 4088 MachineFunction &MF, const FuncValueTable &MLiveOuts, 4089 const FuncValueTable &MLiveIns, MachineInstr &Here, uint64_t InstrNum) { 4090 // Pick out records of DBG_PHI instructions that have been observed. If there 4091 // are none, then we cannot compute a value number. 4092 auto RangePair = std::equal_range(DebugPHINumToValue.begin(), 4093 DebugPHINumToValue.end(), InstrNum); 4094 auto LowerIt = RangePair.first; 4095 auto UpperIt = RangePair.second; 4096 4097 // No DBG_PHI means there can be no location. 4098 if (LowerIt == UpperIt) 4099 return std::nullopt; 4100 4101 // If any DBG_PHIs referred to a location we didn't understand, don't try to 4102 // compute a value. There might be scenarios where we could recover a value 4103 // for some range of DBG_INSTR_REFs, but at this point we can have high 4104 // confidence that we've seen a bug. 4105 auto DBGPHIRange = make_range(LowerIt, UpperIt); 4106 for (const DebugPHIRecord &DBG_PHI : DBGPHIRange) 4107 if (!DBG_PHI.ValueRead) 4108 return std::nullopt; 4109 4110 // If there's only one DBG_PHI, then that is our value number. 4111 if (std::distance(LowerIt, UpperIt) == 1) 4112 return *LowerIt->ValueRead; 4113 4114 // Pick out the location (physreg, slot) where any PHIs must occur. It's 4115 // technically possible for us to merge values in different registers in each 4116 // block, but highly unlikely that LLVM will generate such code after register 4117 // allocation. 4118 LocIdx Loc = *LowerIt->ReadLoc; 4119 4120 // We have several DBG_PHIs, and a use position (the Here inst). All each 4121 // DBG_PHI does is identify a value at a program position. We can treat each 4122 // DBG_PHI like it's a Def of a value, and the use position is a Use of a 4123 // value, just like SSA. We use the bulk-standard LLVM SSA updater class to 4124 // determine which Def is used at the Use, and any PHIs that happen along 4125 // the way. 4126 // Adapted LLVM SSA Updater: 4127 LDVSSAUpdater Updater(Loc, MLiveIns); 4128 // Map of which Def or PHI is the current value in each block. 4129 DenseMap<LDVSSABlock *, BlockValueNum> AvailableValues; 4130 // Set of PHIs that we have created along the way. 4131 SmallVector<LDVSSAPhi *, 8> CreatedPHIs; 4132 4133 // Each existing DBG_PHI is a Def'd value under this model. Record these Defs 4134 // for the SSAUpdater. 4135 for (const auto &DBG_PHI : DBGPHIRange) { 4136 LDVSSABlock *Block = Updater.getSSALDVBlock(DBG_PHI.MBB); 4137 const ValueIDNum &Num = *DBG_PHI.ValueRead; 4138 AvailableValues.insert(std::make_pair(Block, Num.asU64())); 4139 } 4140 4141 LDVSSABlock *HereBlock = Updater.getSSALDVBlock(Here.getParent()); 4142 const auto &AvailIt = AvailableValues.find(HereBlock); 4143 if (AvailIt != AvailableValues.end()) { 4144 // Actually, we already know what the value is -- the Use is in the same 4145 // block as the Def. 4146 return ValueIDNum::fromU64(AvailIt->second); 4147 } 4148 4149 // Otherwise, we must use the SSA Updater. It will identify the value number 4150 // that we are to use, and the PHIs that must happen along the way. 4151 SSAUpdaterImpl<LDVSSAUpdater> Impl(&Updater, &AvailableValues, &CreatedPHIs); 4152 BlockValueNum ResultInt = Impl.GetValue(Updater.getSSALDVBlock(Here.getParent())); 4153 ValueIDNum Result = ValueIDNum::fromU64(ResultInt); 4154 4155 // We have the number for a PHI, or possibly live-through value, to be used 4156 // at this Use. There are a number of things we have to check about it though: 4157 // * Does any PHI use an 'Undef' (like an IMPLICIT_DEF) value? If so, this 4158 // Use was not completely dominated by DBG_PHIs and we should abort. 4159 // * Are the Defs or PHIs clobbered in a block? SSAUpdater isn't aware that 4160 // we've left SSA form. Validate that the inputs to each PHI are the 4161 // expected values. 4162 // * Is a PHI we've created actually a merging of values, or are all the 4163 // predecessor values the same, leading to a non-PHI machine value number? 4164 // (SSAUpdater doesn't know that either). Remap validated PHIs into the 4165 // the ValidatedValues collection below to sort this out. 4166 DenseMap<LDVSSABlock *, ValueIDNum> ValidatedValues; 4167 4168 // Define all the input DBG_PHI values in ValidatedValues. 4169 for (const auto &DBG_PHI : DBGPHIRange) { 4170 LDVSSABlock *Block = Updater.getSSALDVBlock(DBG_PHI.MBB); 4171 const ValueIDNum &Num = *DBG_PHI.ValueRead; 4172 ValidatedValues.insert(std::make_pair(Block, Num)); 4173 } 4174 4175 // Sort PHIs to validate into RPO-order. 4176 SmallVector<LDVSSAPhi *, 8> SortedPHIs; 4177 for (auto &PHI : CreatedPHIs) 4178 SortedPHIs.push_back(PHI); 4179 4180 llvm::sort(SortedPHIs, [&](LDVSSAPhi *A, LDVSSAPhi *B) { 4181 return BBToOrder[&A->getParent()->BB] < BBToOrder[&B->getParent()->BB]; 4182 }); 4183 4184 for (auto &PHI : SortedPHIs) { 4185 ValueIDNum ThisBlockValueNum = MLiveIns[PHI->ParentBlock->BB][Loc.asU64()]; 4186 4187 // Are all these things actually defined? 4188 for (auto &PHIIt : PHI->IncomingValues) { 4189 // Any undef input means DBG_PHIs didn't dominate the use point. 4190 if (Updater.UndefMap.contains(&PHIIt.first->BB)) 4191 return std::nullopt; 4192 4193 ValueIDNum ValueToCheck; 4194 const ValueTable &BlockLiveOuts = MLiveOuts[PHIIt.first->BB]; 4195 4196 auto VVal = ValidatedValues.find(PHIIt.first); 4197 if (VVal == ValidatedValues.end()) { 4198 // We cross a loop, and this is a backedge. LLVMs tail duplication 4199 // happens so late that DBG_PHI instructions should not be able to 4200 // migrate into loops -- meaning we can only be live-through this 4201 // loop. 4202 ValueToCheck = ThisBlockValueNum; 4203 } else { 4204 // Does the block have as a live-out, in the location we're examining, 4205 // the value that we expect? If not, it's been moved or clobbered. 4206 ValueToCheck = VVal->second; 4207 } 4208 4209 if (BlockLiveOuts[Loc.asU64()] != ValueToCheck) 4210 return std::nullopt; 4211 } 4212 4213 // Record this value as validated. 4214 ValidatedValues.insert({PHI->ParentBlock, ThisBlockValueNum}); 4215 } 4216 4217 // All the PHIs are valid: we can return what the SSAUpdater said our value 4218 // number was. 4219 return Result; 4220 } 4221