1 //===- VarLocBasedImpl.cpp - Tracking Debug Value MIs with VarLoc class----===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 /// 9 /// \file VarLocBasedImpl.cpp 10 /// 11 /// LiveDebugValues is an optimistic "available expressions" dataflow 12 /// algorithm. The set of expressions is the set of machine locations 13 /// (registers, spill slots, constants) that a variable fragment might be 14 /// located, qualified by a DIExpression and indirect-ness flag, while each 15 /// variable is identified by a DebugVariable object. The availability of an 16 /// expression begins when a DBG_VALUE instruction specifies the location of a 17 /// DebugVariable, and continues until that location is clobbered or 18 /// re-specified by a different DBG_VALUE for the same DebugVariable. 19 /// 20 /// The output of LiveDebugValues is additional DBG_VALUE instructions, 21 /// placed to extend variable locations as far they're available. This file 22 /// and the VarLocBasedLDV class is an implementation that explicitly tracks 23 /// locations, using the VarLoc class. 24 /// 25 /// The canonical "available expressions" problem doesn't have expression 26 /// clobbering, instead when a variable is re-assigned, any expressions using 27 /// that variable get invalidated. LiveDebugValues can map onto "available 28 /// expressions" by having every register represented by a variable, which is 29 /// used in an expression that becomes available at a DBG_VALUE instruction. 30 /// When the register is clobbered, its variable is effectively reassigned, and 31 /// expressions computed from it become unavailable. A similar construct is 32 /// needed when a DebugVariable has its location re-specified, to invalidate 33 /// all other locations for that DebugVariable. 34 /// 35 /// Using the dataflow analysis to compute the available expressions, we create 36 /// a DBG_VALUE at the beginning of each block where the expression is 37 /// live-in. This propagates variable locations into every basic block where 38 /// the location can be determined, rather than only having DBG_VALUEs in blocks 39 /// where locations are specified due to an assignment or some optimization. 40 /// Movements of values between registers and spill slots are annotated with 41 /// DBG_VALUEs too to track variable values bewteen locations. All this allows 42 /// DbgEntityHistoryCalculator to focus on only the locations within individual 43 /// blocks, facilitating testing and improving modularity. 44 /// 45 /// We follow an optimisic dataflow approach, with this lattice: 46 /// 47 /// \verbatim 48 /// ┬ "Unknown" 49 /// | 50 /// v 51 /// True 52 /// | 53 /// v 54 /// ⊥ False 55 /// \endverbatim With "True" signifying that the expression is available (and 56 /// thus a DebugVariable's location is the corresponding register), while 57 /// "False" signifies that the expression is unavailable. "Unknown"s never 58 /// survive to the end of the analysis (see below). 59 /// 60 /// Formally, all DebugVariable locations that are live-out of a block are 61 /// initialized to \top. A blocks live-in values take the meet of the lattice 62 /// value for every predecessors live-outs, except for the entry block, where 63 /// all live-ins are \bot. The usual dataflow propagation occurs: the transfer 64 /// function for a block assigns an expression for a DebugVariable to be "True" 65 /// if a DBG_VALUE in the block specifies it; "False" if the location is 66 /// clobbered; or the live-in value if it is unaffected by the block. We 67 /// visit each block in reverse post order until a fixedpoint is reached. The 68 /// solution produced is maximal. 69 /// 70 /// Intuitively, we start by assuming that every expression / variable location 71 /// is at least "True", and then propagate "False" from the entry block and any 72 /// clobbers until there are no more changes to make. This gives us an accurate 73 /// solution because all incorrect locations will have a "False" propagated into 74 /// them. It also gives us a solution that copes well with loops by assuming 75 /// that variable locations are live-through every loop, and then removing those 76 /// that are not through dataflow. 77 /// 78 /// Within LiveDebugValues: each variable location is represented by a 79 /// VarLoc object that identifies the source variable, the set of 80 /// machine-locations that currently describe it (a single location for 81 /// DBG_VALUE or multiple for DBG_VALUE_LIST), and the DBG_VALUE inst that 82 /// specifies the location. Each VarLoc is indexed in the (function-scope) \p 83 /// VarLocMap, giving each VarLoc a set of unique indexes, each of which 84 /// corresponds to one of the VarLoc's machine-locations and can be used to 85 /// lookup the VarLoc in the VarLocMap. Rather than operate directly on machine 86 /// locations, the dataflow analysis in this pass identifies locations by their 87 /// indices in the VarLocMap, meaning all the variable locations in a block can 88 /// be described by a sparse vector of VarLocMap indicies. 89 /// 90 /// All the storage for the dataflow analysis is local to the ExtendRanges 91 /// method and passed down to helper methods. "OutLocs" and "InLocs" record the 92 /// in and out lattice values for each block. "OpenRanges" maintains a list of 93 /// variable locations and, with the "process" method, evaluates the transfer 94 /// function of each block. "flushPendingLocs" installs debug value instructions 95 /// for each live-in location at the start of blocks, while "Transfers" records 96 /// transfers of values between machine-locations. 97 /// 98 /// We avoid explicitly representing the "Unknown" (\top) lattice value in the 99 /// implementation. Instead, unvisited blocks implicitly have all lattice 100 /// values set as "Unknown". After being visited, there will be path back to 101 /// the entry block where the lattice value is "False", and as the transfer 102 /// function cannot make new "Unknown" locations, there are no scenarios where 103 /// a block can have an "Unknown" location after being visited. Similarly, we 104 /// don't enumerate all possible variable locations before exploring the 105 /// function: when a new location is discovered, all blocks previously explored 106 /// were implicitly "False" but unrecorded, and become explicitly "False" when 107 /// a new VarLoc is created with its bit not set in predecessor InLocs or 108 /// OutLocs. 109 /// 110 //===----------------------------------------------------------------------===// 111 112 #include "LiveDebugValues.h" 113 114 #include "llvm/ADT/CoalescingBitVector.h" 115 #include "llvm/ADT/DenseMap.h" 116 #include "llvm/ADT/PostOrderIterator.h" 117 #include "llvm/ADT/SmallPtrSet.h" 118 #include "llvm/ADT/SmallSet.h" 119 #include "llvm/ADT/SmallVector.h" 120 #include "llvm/ADT/Statistic.h" 121 #include "llvm/BinaryFormat/Dwarf.h" 122 #include "llvm/CodeGen/LexicalScopes.h" 123 #include "llvm/CodeGen/MachineBasicBlock.h" 124 #include "llvm/CodeGen/MachineFunction.h" 125 #include "llvm/CodeGen/MachineInstr.h" 126 #include "llvm/CodeGen/MachineInstrBuilder.h" 127 #include "llvm/CodeGen/MachineMemOperand.h" 128 #include "llvm/CodeGen/MachineOperand.h" 129 #include "llvm/CodeGen/PseudoSourceValue.h" 130 #include "llvm/CodeGen/TargetFrameLowering.h" 131 #include "llvm/CodeGen/TargetInstrInfo.h" 132 #include "llvm/CodeGen/TargetLowering.h" 133 #include "llvm/CodeGen/TargetPassConfig.h" 134 #include "llvm/CodeGen/TargetRegisterInfo.h" 135 #include "llvm/CodeGen/TargetSubtargetInfo.h" 136 #include "llvm/Config/llvm-config.h" 137 #include "llvm/IR/DebugInfoMetadata.h" 138 #include "llvm/IR/DebugLoc.h" 139 #include "llvm/IR/Function.h" 140 #include "llvm/MC/MCRegisterInfo.h" 141 #include "llvm/Support/Casting.h" 142 #include "llvm/Support/Debug.h" 143 #include "llvm/Support/TypeSize.h" 144 #include "llvm/Support/raw_ostream.h" 145 #include "llvm/Target/TargetMachine.h" 146 #include <algorithm> 147 #include <cassert> 148 #include <cstdint> 149 #include <functional> 150 #include <map> 151 #include <queue> 152 #include <tuple> 153 #include <utility> 154 #include <vector> 155 156 using namespace llvm; 157 158 #define DEBUG_TYPE "livedebugvalues" 159 160 STATISTIC(NumInserted, "Number of DBG_VALUE instructions inserted"); 161 162 /// If \p Op is a stack or frame register return true, otherwise return false. 163 /// This is used to avoid basing the debug entry values on the registers, since 164 /// we do not support it at the moment. 165 static bool isRegOtherThanSPAndFP(const MachineOperand &Op, 166 const MachineInstr &MI, 167 const TargetRegisterInfo *TRI) { 168 if (!Op.isReg()) 169 return false; 170 171 const MachineFunction *MF = MI.getParent()->getParent(); 172 const TargetLowering *TLI = MF->getSubtarget().getTargetLowering(); 173 Register SP = TLI->getStackPointerRegisterToSaveRestore(); 174 Register FP = TRI->getFrameRegister(*MF); 175 Register Reg = Op.getReg(); 176 177 return Reg && Reg != SP && Reg != FP; 178 } 179 180 namespace { 181 182 // Max out the number of statically allocated elements in DefinedRegsSet, as 183 // this prevents fallback to std::set::count() operations. 184 using DefinedRegsSet = SmallSet<Register, 32>; 185 186 // The IDs in this set correspond to MachineLocs in VarLocs, as well as VarLocs 187 // that represent Entry Values; every VarLoc in the set will also appear 188 // exactly once at Location=0. 189 // As a result, each VarLoc may appear more than once in this "set", but each 190 // range corresponding to a Reg, SpillLoc, or EntryValue type will still be a 191 // "true" set (i.e. each VarLoc may appear only once), and the range Location=0 192 // is the set of all VarLocs. 193 using VarLocSet = CoalescingBitVector<uint64_t>; 194 195 /// A type-checked pair of {Register Location (or 0), Index}, used to index 196 /// into a \ref VarLocMap. This can be efficiently converted to a 64-bit int 197 /// for insertion into a \ref VarLocSet, and efficiently converted back. The 198 /// type-checker helps ensure that the conversions aren't lossy. 199 /// 200 /// Why encode a location /into/ the VarLocMap index? This makes it possible 201 /// to find the open VarLocs killed by a register def very quickly. This is a 202 /// performance-critical operation for LiveDebugValues. 203 struct LocIndex { 204 using u32_location_t = uint32_t; 205 using u32_index_t = uint32_t; 206 207 u32_location_t Location; // Physical registers live in the range [1;2^30) (see 208 // \ref MCRegister), so we have plenty of range left 209 // here to encode non-register locations. 210 u32_index_t Index; 211 212 /// The location that has an entry for every VarLoc in the map. 213 static constexpr u32_location_t kUniversalLocation = 0; 214 215 /// The first location that is reserved for VarLocs with locations of kind 216 /// RegisterKind. 217 static constexpr u32_location_t kFirstRegLocation = 1; 218 219 /// The first location greater than 0 that is not reserved for VarLocs with 220 /// locations of kind RegisterKind. 221 static constexpr u32_location_t kFirstInvalidRegLocation = 1 << 30; 222 223 /// A special location reserved for VarLocs with locations of kind 224 /// SpillLocKind. 225 static constexpr u32_location_t kSpillLocation = kFirstInvalidRegLocation; 226 227 /// A special location reserved for VarLocs of kind EntryValueBackupKind and 228 /// EntryValueCopyBackupKind. 229 static constexpr u32_location_t kEntryValueBackupLocation = 230 kFirstInvalidRegLocation + 1; 231 232 LocIndex(u32_location_t Location, u32_index_t Index) 233 : Location(Location), Index(Index) {} 234 235 uint64_t getAsRawInteger() const { 236 return (static_cast<uint64_t>(Location) << 32) | Index; 237 } 238 239 template<typename IntT> static LocIndex fromRawInteger(IntT ID) { 240 static_assert(std::is_unsigned<IntT>::value && 241 sizeof(ID) == sizeof(uint64_t), 242 "Cannot convert raw integer to LocIndex"); 243 return {static_cast<u32_location_t>(ID >> 32), 244 static_cast<u32_index_t>(ID)}; 245 } 246 247 /// Get the start of the interval reserved for VarLocs of kind RegisterKind 248 /// which reside in \p Reg. The end is at rawIndexForReg(Reg+1)-1. 249 static uint64_t rawIndexForReg(Register Reg) { 250 return LocIndex(Reg, 0).getAsRawInteger(); 251 } 252 253 /// Return a range covering all set indices in the interval reserved for 254 /// \p Location in \p Set. 255 static auto indexRangeForLocation(const VarLocSet &Set, 256 u32_location_t Location) { 257 uint64_t Start = LocIndex(Location, 0).getAsRawInteger(); 258 uint64_t End = LocIndex(Location + 1, 0).getAsRawInteger(); 259 return Set.half_open_range(Start, End); 260 } 261 }; 262 263 // Simple Set for storing all the VarLoc Indices at a Location bucket. 264 using VarLocsInRange = SmallSet<LocIndex::u32_index_t, 32>; 265 // Vector of all `LocIndex`s for a given VarLoc; the same Location should not 266 // appear in any two of these, as each VarLoc appears at most once in any 267 // Location bucket. 268 using LocIndices = SmallVector<LocIndex, 2>; 269 270 class VarLocBasedLDV : public LDVImpl { 271 private: 272 const TargetRegisterInfo *TRI; 273 const TargetInstrInfo *TII; 274 const TargetFrameLowering *TFI; 275 TargetPassConfig *TPC; 276 BitVector CalleeSavedRegs; 277 LexicalScopes LS; 278 VarLocSet::Allocator Alloc; 279 280 const MachineInstr *LastNonDbgMI; 281 282 enum struct TransferKind { TransferCopy, TransferSpill, TransferRestore }; 283 284 using FragmentInfo = DIExpression::FragmentInfo; 285 using OptFragmentInfo = Optional<DIExpression::FragmentInfo>; 286 287 /// A pair of debug variable and value location. 288 struct VarLoc { 289 // The location at which a spilled variable resides. It consists of a 290 // register and an offset. 291 struct SpillLoc { 292 unsigned SpillBase; 293 StackOffset SpillOffset; 294 bool operator==(const SpillLoc &Other) const { 295 return SpillBase == Other.SpillBase && SpillOffset == Other.SpillOffset; 296 } 297 bool operator!=(const SpillLoc &Other) const { 298 return !(*this == Other); 299 } 300 }; 301 302 /// Identity of the variable at this location. 303 const DebugVariable Var; 304 305 /// The expression applied to this location. 306 const DIExpression *Expr; 307 308 /// DBG_VALUE to clone var/expr information from if this location 309 /// is moved. 310 const MachineInstr &MI; 311 312 enum class MachineLocKind { 313 InvalidKind = 0, 314 RegisterKind, 315 SpillLocKind, 316 ImmediateKind 317 }; 318 319 enum class EntryValueLocKind { 320 NonEntryValueKind = 0, 321 EntryValueKind, 322 EntryValueBackupKind, 323 EntryValueCopyBackupKind 324 } EVKind = EntryValueLocKind::NonEntryValueKind; 325 326 /// The value location. Stored separately to avoid repeatedly 327 /// extracting it from MI. 328 union MachineLocValue { 329 uint64_t RegNo; 330 SpillLoc SpillLocation; 331 uint64_t Hash; 332 int64_t Immediate; 333 const ConstantFP *FPImm; 334 const ConstantInt *CImm; 335 MachineLocValue() : Hash(0) {} 336 }; 337 338 /// A single machine location; its Kind is either a register, spill 339 /// location, or immediate value. 340 /// If the VarLoc is not a NonEntryValueKind, then it will use only a 341 /// single MachineLoc of RegisterKind. 342 struct MachineLoc { 343 MachineLocKind Kind; 344 MachineLocValue Value; 345 bool operator==(const MachineLoc &Other) const { 346 if (Kind != Other.Kind) 347 return false; 348 switch (Kind) { 349 case MachineLocKind::SpillLocKind: 350 return Value.SpillLocation == Other.Value.SpillLocation; 351 case MachineLocKind::RegisterKind: 352 case MachineLocKind::ImmediateKind: 353 return Value.Hash == Other.Value.Hash; 354 default: 355 llvm_unreachable("Invalid kind"); 356 } 357 } 358 bool operator<(const MachineLoc &Other) const { 359 switch (Kind) { 360 case MachineLocKind::SpillLocKind: 361 return std::make_tuple( 362 Kind, Value.SpillLocation.SpillBase, 363 Value.SpillLocation.SpillOffset.getFixed(), 364 Value.SpillLocation.SpillOffset.getScalable()) < 365 std::make_tuple( 366 Other.Kind, Other.Value.SpillLocation.SpillBase, 367 Other.Value.SpillLocation.SpillOffset.getFixed(), 368 Other.Value.SpillLocation.SpillOffset.getScalable()); 369 case MachineLocKind::RegisterKind: 370 case MachineLocKind::ImmediateKind: 371 return std::tie(Kind, Value.Hash) < 372 std::tie(Other.Kind, Other.Value.Hash); 373 default: 374 llvm_unreachable("Invalid kind"); 375 } 376 } 377 }; 378 379 /// The set of machine locations used to determine the variable's value, in 380 /// conjunction with Expr. Initially populated with MI's debug operands, 381 /// but may be transformed independently afterwards. 382 SmallVector<MachineLoc, 8> Locs; 383 /// Used to map the index of each location in Locs back to the index of its 384 /// original debug operand in MI. Used when multiple location operands are 385 /// coalesced and the original MI's operands need to be accessed while 386 /// emitting a debug value. 387 SmallVector<unsigned, 8> OrigLocMap; 388 389 VarLoc(const MachineInstr &MI, LexicalScopes &LS) 390 : Var(MI.getDebugVariable(), MI.getDebugExpression(), 391 MI.getDebugLoc()->getInlinedAt()), 392 Expr(MI.getDebugExpression()), MI(MI) { 393 assert(MI.isDebugValue() && "not a DBG_VALUE"); 394 assert((MI.isDebugValueList() || MI.getNumOperands() == 4) && 395 "malformed DBG_VALUE"); 396 for (const MachineOperand &Op : MI.debug_operands()) { 397 MachineLoc ML = GetLocForOp(Op); 398 auto It = find(Locs, ML); 399 if (It == Locs.end()) { 400 Locs.push_back(ML); 401 OrigLocMap.push_back(MI.getDebugOperandIndex(&Op)); 402 } else { 403 // ML duplicates an element in Locs; replace references to Op 404 // with references to the duplicating element. 405 unsigned OpIdx = Locs.size(); 406 unsigned DuplicatingIdx = std::distance(Locs.begin(), It); 407 Expr = DIExpression::replaceArg(Expr, OpIdx, DuplicatingIdx); 408 } 409 } 410 411 // We create the debug entry values from the factory functions rather 412 // than from this ctor. 413 assert(EVKind != EntryValueLocKind::EntryValueKind && 414 !isEntryBackupLoc()); 415 } 416 417 static MachineLoc GetLocForOp(const MachineOperand &Op) { 418 MachineLocKind Kind; 419 MachineLocValue Loc; 420 if (Op.isReg()) { 421 Kind = MachineLocKind::RegisterKind; 422 Loc.RegNo = Op.getReg(); 423 } else if (Op.isImm()) { 424 Kind = MachineLocKind::ImmediateKind; 425 Loc.Immediate = Op.getImm(); 426 } else if (Op.isFPImm()) { 427 Kind = MachineLocKind::ImmediateKind; 428 Loc.FPImm = Op.getFPImm(); 429 } else if (Op.isCImm()) { 430 Kind = MachineLocKind::ImmediateKind; 431 Loc.CImm = Op.getCImm(); 432 } else 433 llvm_unreachable("Invalid Op kind for MachineLoc."); 434 return {Kind, Loc}; 435 } 436 437 /// Take the variable and machine-location in DBG_VALUE MI, and build an 438 /// entry location using the given expression. 439 static VarLoc CreateEntryLoc(const MachineInstr &MI, LexicalScopes &LS, 440 const DIExpression *EntryExpr, Register Reg) { 441 VarLoc VL(MI, LS); 442 assert(VL.Locs.size() == 1 && 443 VL.Locs[0].Kind == MachineLocKind::RegisterKind); 444 VL.EVKind = EntryValueLocKind::EntryValueKind; 445 VL.Expr = EntryExpr; 446 VL.Locs[0].Value.RegNo = Reg; 447 return VL; 448 } 449 450 /// Take the variable and machine-location from the DBG_VALUE (from the 451 /// function entry), and build an entry value backup location. The backup 452 /// location will turn into the normal location if the backup is valid at 453 /// the time of the primary location clobbering. 454 static VarLoc CreateEntryBackupLoc(const MachineInstr &MI, 455 LexicalScopes &LS, 456 const DIExpression *EntryExpr) { 457 VarLoc VL(MI, LS); 458 assert(VL.Locs.size() == 1 && 459 VL.Locs[0].Kind == MachineLocKind::RegisterKind); 460 VL.EVKind = EntryValueLocKind::EntryValueBackupKind; 461 VL.Expr = EntryExpr; 462 return VL; 463 } 464 465 /// Take the variable and machine-location from the DBG_VALUE (from the 466 /// function entry), and build a copy of an entry value backup location by 467 /// setting the register location to NewReg. 468 static VarLoc CreateEntryCopyBackupLoc(const MachineInstr &MI, 469 LexicalScopes &LS, 470 const DIExpression *EntryExpr, 471 Register NewReg) { 472 VarLoc VL(MI, LS); 473 assert(VL.Locs.size() == 1 && 474 VL.Locs[0].Kind == MachineLocKind::RegisterKind); 475 VL.EVKind = EntryValueLocKind::EntryValueCopyBackupKind; 476 VL.Expr = EntryExpr; 477 VL.Locs[0].Value.RegNo = NewReg; 478 return VL; 479 } 480 481 /// Copy the register location in DBG_VALUE MI, updating the register to 482 /// be NewReg. 483 static VarLoc CreateCopyLoc(const VarLoc &OldVL, const MachineLoc &OldML, 484 Register NewReg) { 485 VarLoc VL = OldVL; 486 for (MachineLoc &ML : VL.Locs) 487 if (ML == OldML) { 488 ML.Kind = MachineLocKind::RegisterKind; 489 ML.Value.RegNo = NewReg; 490 return VL; 491 } 492 llvm_unreachable("Should have found OldML in new VarLoc."); 493 } 494 495 /// Take the variable described by DBG_VALUE* MI, and create a VarLoc 496 /// locating it in the specified spill location. 497 static VarLoc CreateSpillLoc(const VarLoc &OldVL, const MachineLoc &OldML, 498 unsigned SpillBase, StackOffset SpillOffset) { 499 VarLoc VL = OldVL; 500 for (MachineLoc &ML : VL.Locs) 501 if (ML == OldML) { 502 ML.Kind = MachineLocKind::SpillLocKind; 503 ML.Value.SpillLocation = {SpillBase, SpillOffset}; 504 return VL; 505 } 506 llvm_unreachable("Should have found OldML in new VarLoc."); 507 } 508 509 /// Create a DBG_VALUE representing this VarLoc in the given function. 510 /// Copies variable-specific information such as DILocalVariable and 511 /// inlining information from the original DBG_VALUE instruction, which may 512 /// have been several transfers ago. 513 MachineInstr *BuildDbgValue(MachineFunction &MF) const { 514 assert(!isEntryBackupLoc() && 515 "Tried to produce DBG_VALUE for backup VarLoc"); 516 const DebugLoc &DbgLoc = MI.getDebugLoc(); 517 bool Indirect = MI.isIndirectDebugValue(); 518 const auto &IID = MI.getDesc(); 519 const DILocalVariable *Var = MI.getDebugVariable(); 520 NumInserted++; 521 522 const DIExpression *DIExpr = Expr; 523 SmallVector<MachineOperand, 8> MOs; 524 for (unsigned I = 0, E = Locs.size(); I < E; ++I) { 525 MachineLocKind LocKind = Locs[I].Kind; 526 MachineLocValue Loc = Locs[I].Value; 527 const MachineOperand &Orig = MI.getDebugOperand(OrigLocMap[I]); 528 switch (LocKind) { 529 case MachineLocKind::RegisterKind: 530 // An entry value is a register location -- but with an updated 531 // expression. The register location of such DBG_VALUE is always the 532 // one from the entry DBG_VALUE, it does not matter if the entry value 533 // was copied in to another register due to some optimizations. 534 // Non-entry value register locations are like the source 535 // DBG_VALUE, but with the register number from this VarLoc. 536 MOs.push_back(MachineOperand::CreateReg( 537 EVKind == EntryValueLocKind::EntryValueKind ? Orig.getReg() 538 : Register(Loc.RegNo), 539 false)); 540 break; 541 case MachineLocKind::SpillLocKind: { 542 // Spills are indirect DBG_VALUEs, with a base register and offset. 543 // Use the original DBG_VALUEs expression to build the spilt location 544 // on top of. FIXME: spill locations created before this pass runs 545 // are not recognized, and not handled here. 546 unsigned Base = Loc.SpillLocation.SpillBase; 547 auto *TRI = MF.getSubtarget().getRegisterInfo(); 548 if (MI.isNonListDebugValue()) { 549 auto Deref = Indirect ? DIExpression::DerefAfter : 0; 550 DIExpr = TRI->prependOffsetExpression( 551 DIExpr, DIExpression::ApplyOffset | Deref, 552 Loc.SpillLocation.SpillOffset); 553 Indirect = true; 554 } else { 555 SmallVector<uint64_t, 4> Ops; 556 TRI->getOffsetOpcodes(Loc.SpillLocation.SpillOffset, Ops); 557 Ops.push_back(dwarf::DW_OP_deref); 558 DIExpr = DIExpression::appendOpsToArg(DIExpr, Ops, I); 559 } 560 MOs.push_back(MachineOperand::CreateReg(Base, false)); 561 break; 562 } 563 case MachineLocKind::ImmediateKind: { 564 MOs.push_back(Orig); 565 break; 566 } 567 case MachineLocKind::InvalidKind: 568 llvm_unreachable("Tried to produce DBG_VALUE for invalid VarLoc"); 569 } 570 } 571 return BuildMI(MF, DbgLoc, IID, Indirect, MOs, Var, DIExpr); 572 } 573 574 /// Is the Loc field a constant or constant object? 575 bool isConstant(MachineLocKind Kind) const { 576 return Kind == MachineLocKind::ImmediateKind; 577 } 578 579 /// Check if the Loc field is an entry backup location. 580 bool isEntryBackupLoc() const { 581 return EVKind == EntryValueLocKind::EntryValueBackupKind || 582 EVKind == EntryValueLocKind::EntryValueCopyBackupKind; 583 } 584 585 /// If this variable is described by register \p Reg holding the entry 586 /// value, return true. 587 bool isEntryValueBackupReg(Register Reg) const { 588 return EVKind == EntryValueLocKind::EntryValueBackupKind && usesReg(Reg); 589 } 590 591 /// If this variable is described by register \p Reg holding a copy of the 592 /// entry value, return true. 593 bool isEntryValueCopyBackupReg(Register Reg) const { 594 return EVKind == EntryValueLocKind::EntryValueCopyBackupKind && 595 usesReg(Reg); 596 } 597 598 /// If this variable is described in whole or part by \p Reg, return true. 599 bool usesReg(Register Reg) const { 600 MachineLoc RegML; 601 RegML.Kind = MachineLocKind::RegisterKind; 602 RegML.Value.RegNo = Reg; 603 return is_contained(Locs, RegML); 604 } 605 606 /// If this variable is described in whole or part by \p Reg, return true. 607 unsigned getRegIdx(Register Reg) const { 608 for (unsigned Idx = 0; Idx < Locs.size(); ++Idx) 609 if (Locs[Idx].Kind == MachineLocKind::RegisterKind && 610 Register{static_cast<unsigned>(Locs[Idx].Value.RegNo)} == Reg) 611 return Idx; 612 llvm_unreachable("Could not find given Reg in Locs"); 613 } 614 615 /// If this variable is described in whole or part by 1 or more registers, 616 /// add each of them to \p Regs and return true. 617 bool getDescribingRegs(SmallVectorImpl<uint32_t> &Regs) const { 618 bool AnyRegs = false; 619 for (const auto &Loc : Locs) 620 if (Loc.Kind == MachineLocKind::RegisterKind) { 621 Regs.push_back(Loc.Value.RegNo); 622 AnyRegs = true; 623 } 624 return AnyRegs; 625 } 626 627 bool containsSpillLocs() const { 628 return any_of(Locs, [](VarLoc::MachineLoc ML) { 629 return ML.Kind == VarLoc::MachineLocKind::SpillLocKind; 630 }); 631 } 632 633 /// If this variable is described in whole or part by \p SpillLocation, 634 /// return true. 635 bool usesSpillLoc(SpillLoc SpillLocation) const { 636 MachineLoc SpillML; 637 SpillML.Kind = MachineLocKind::SpillLocKind; 638 SpillML.Value.SpillLocation = SpillLocation; 639 return is_contained(Locs, SpillML); 640 } 641 642 /// If this variable is described in whole or part by \p SpillLocation, 643 /// return the index . 644 unsigned getSpillLocIdx(SpillLoc SpillLocation) const { 645 for (unsigned Idx = 0; Idx < Locs.size(); ++Idx) 646 if (Locs[Idx].Kind == MachineLocKind::SpillLocKind && 647 Locs[Idx].Value.SpillLocation == SpillLocation) 648 return Idx; 649 llvm_unreachable("Could not find given SpillLoc in Locs"); 650 } 651 652 /// Determine whether the lexical scope of this value's debug location 653 /// dominates MBB. 654 bool dominates(LexicalScopes &LS, MachineBasicBlock &MBB) const { 655 return LS.dominates(MI.getDebugLoc().get(), &MBB); 656 } 657 658 #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP) 659 // TRI can be null. 660 void dump(const TargetRegisterInfo *TRI, raw_ostream &Out = dbgs()) const { 661 Out << "VarLoc("; 662 for (const MachineLoc &MLoc : Locs) { 663 if (Locs.begin() != &MLoc) 664 Out << ", "; 665 switch (MLoc.Kind) { 666 case MachineLocKind::RegisterKind: 667 Out << printReg(MLoc.Value.RegNo, TRI); 668 break; 669 case MachineLocKind::SpillLocKind: 670 Out << printReg(MLoc.Value.SpillLocation.SpillBase, TRI); 671 Out << "[" << MLoc.Value.SpillLocation.SpillOffset.getFixed() << " + " 672 << MLoc.Value.SpillLocation.SpillOffset.getScalable() 673 << "x vscale" 674 << "]"; 675 break; 676 case MachineLocKind::ImmediateKind: 677 Out << MLoc.Value.Immediate; 678 break; 679 case MachineLocKind::InvalidKind: 680 llvm_unreachable("Invalid VarLoc in dump method"); 681 } 682 } 683 684 Out << ", \"" << Var.getVariable()->getName() << "\", " << *Expr << ", "; 685 if (Var.getInlinedAt()) 686 Out << "!" << Var.getInlinedAt()->getMetadataID() << ")\n"; 687 else 688 Out << "(null))"; 689 690 if (isEntryBackupLoc()) 691 Out << " (backup loc)\n"; 692 else 693 Out << "\n"; 694 } 695 #endif 696 697 bool operator==(const VarLoc &Other) const { 698 return std::tie(EVKind, Var, Expr, Locs) == 699 std::tie(Other.EVKind, Other.Var, Other.Expr, Other.Locs); 700 } 701 702 /// This operator guarantees that VarLocs are sorted by Variable first. 703 bool operator<(const VarLoc &Other) const { 704 return std::tie(Var, EVKind, Locs, Expr) < 705 std::tie(Other.Var, Other.EVKind, Other.Locs, Other.Expr); 706 } 707 }; 708 709 #ifndef NDEBUG 710 using VarVec = SmallVector<VarLoc, 32>; 711 #endif 712 713 /// VarLocMap is used for two things: 714 /// 1) Assigning LocIndices to a VarLoc. The LocIndices can be used to 715 /// virtually insert a VarLoc into a VarLocSet. 716 /// 2) Given a LocIndex, look up the unique associated VarLoc. 717 class VarLocMap { 718 /// Map a VarLoc to an index within the vector reserved for its location 719 /// within Loc2Vars. 720 std::map<VarLoc, LocIndices> Var2Indices; 721 722 /// Map a location to a vector which holds VarLocs which live in that 723 /// location. 724 SmallDenseMap<LocIndex::u32_location_t, std::vector<VarLoc>> Loc2Vars; 725 726 public: 727 /// Retrieve LocIndices for \p VL. 728 LocIndices insert(const VarLoc &VL) { 729 LocIndices &Indices = Var2Indices[VL]; 730 // If Indices is not empty, VL is already in the map. 731 if (!Indices.empty()) 732 return Indices; 733 SmallVector<LocIndex::u32_location_t, 4> Locations; 734 // LocIndices are determined by EVKind and MLs; each Register has a 735 // unique location, while all SpillLocs use a single bucket, and any EV 736 // VarLocs use only the Backup bucket or none at all (except the 737 // compulsory entry at the universal location index). LocIndices will 738 // always have an index at the universal location index as the last index. 739 if (VL.EVKind == VarLoc::EntryValueLocKind::NonEntryValueKind) { 740 VL.getDescribingRegs(Locations); 741 assert(all_of(Locations, 742 [](auto RegNo) { 743 return RegNo < LocIndex::kFirstInvalidRegLocation; 744 }) && 745 "Physreg out of range?"); 746 if (VL.containsSpillLocs()) { 747 LocIndex::u32_location_t Loc = LocIndex::kSpillLocation; 748 Locations.push_back(Loc); 749 } 750 } else if (VL.EVKind != VarLoc::EntryValueLocKind::EntryValueKind) { 751 LocIndex::u32_location_t Loc = LocIndex::kEntryValueBackupLocation; 752 Locations.push_back(Loc); 753 } 754 Locations.push_back(LocIndex::kUniversalLocation); 755 for (LocIndex::u32_location_t Location : Locations) { 756 auto &Vars = Loc2Vars[Location]; 757 Indices.push_back( 758 {Location, static_cast<LocIndex::u32_index_t>(Vars.size())}); 759 Vars.push_back(VL); 760 } 761 return Indices; 762 } 763 764 LocIndices getAllIndices(const VarLoc &VL) const { 765 auto IndIt = Var2Indices.find(VL); 766 assert(IndIt != Var2Indices.end() && "VarLoc not tracked"); 767 return IndIt->second; 768 } 769 770 /// Retrieve the unique VarLoc associated with \p ID. 771 const VarLoc &operator[](LocIndex ID) const { 772 auto LocIt = Loc2Vars.find(ID.Location); 773 assert(LocIt != Loc2Vars.end() && "Location not tracked"); 774 return LocIt->second[ID.Index]; 775 } 776 }; 777 778 using VarLocInMBB = 779 SmallDenseMap<const MachineBasicBlock *, std::unique_ptr<VarLocSet>>; 780 struct TransferDebugPair { 781 MachineInstr *TransferInst; ///< Instruction where this transfer occurs. 782 LocIndex LocationID; ///< Location number for the transfer dest. 783 }; 784 using TransferMap = SmallVector<TransferDebugPair, 4>; 785 // Types for recording Entry Var Locations emitted by a single MachineInstr, 786 // as well as recording MachineInstr which last defined a register. 787 using InstToEntryLocMap = std::multimap<const MachineInstr *, LocIndex>; 788 using RegDefToInstMap = DenseMap<Register, MachineInstr *>; 789 790 // Types for recording sets of variable fragments that overlap. For a given 791 // local variable, we record all other fragments of that variable that could 792 // overlap it, to reduce search time. 793 using FragmentOfVar = 794 std::pair<const DILocalVariable *, DIExpression::FragmentInfo>; 795 using OverlapMap = 796 DenseMap<FragmentOfVar, SmallVector<DIExpression::FragmentInfo, 1>>; 797 798 // Helper while building OverlapMap, a map of all fragments seen for a given 799 // DILocalVariable. 800 using VarToFragments = 801 DenseMap<const DILocalVariable *, SmallSet<FragmentInfo, 4>>; 802 803 /// Collects all VarLocs from \p CollectFrom. Each unique VarLoc is added 804 /// to \p Collected once, in order of insertion into \p VarLocIDs. 805 static void collectAllVarLocs(SmallVectorImpl<VarLoc> &Collected, 806 const VarLocSet &CollectFrom, 807 const VarLocMap &VarLocIDs); 808 809 /// Get the registers which are used by VarLocs of kind RegisterKind tracked 810 /// by \p CollectFrom. 811 void getUsedRegs(const VarLocSet &CollectFrom, 812 SmallVectorImpl<Register> &UsedRegs) const; 813 814 /// This holds the working set of currently open ranges. For fast 815 /// access, this is done both as a set of VarLocIDs, and a map of 816 /// DebugVariable to recent VarLocID. Note that a DBG_VALUE ends all 817 /// previous open ranges for the same variable. In addition, we keep 818 /// two different maps (Vars/EntryValuesBackupVars), so erase/insert 819 /// methods act differently depending on whether a VarLoc is primary 820 /// location or backup one. In the case the VarLoc is backup location 821 /// we will erase/insert from the EntryValuesBackupVars map, otherwise 822 /// we perform the operation on the Vars. 823 class OpenRangesSet { 824 VarLocSet::Allocator &Alloc; 825 VarLocSet VarLocs; 826 // Map the DebugVariable to recent primary location ID. 827 SmallDenseMap<DebugVariable, LocIndices, 8> Vars; 828 // Map the DebugVariable to recent backup location ID. 829 SmallDenseMap<DebugVariable, LocIndices, 8> EntryValuesBackupVars; 830 OverlapMap &OverlappingFragments; 831 832 public: 833 OpenRangesSet(VarLocSet::Allocator &Alloc, OverlapMap &_OLapMap) 834 : Alloc(Alloc), VarLocs(Alloc), OverlappingFragments(_OLapMap) {} 835 836 const VarLocSet &getVarLocs() const { return VarLocs; } 837 838 // Fetches all VarLocs in \p VarLocIDs and inserts them into \p Collected. 839 // This method is needed to get every VarLoc once, as each VarLoc may have 840 // multiple indices in a VarLocMap (corresponding to each applicable 841 // location), but all VarLocs appear exactly once at the universal location 842 // index. 843 void getUniqueVarLocs(SmallVectorImpl<VarLoc> &Collected, 844 const VarLocMap &VarLocIDs) const { 845 collectAllVarLocs(Collected, VarLocs, VarLocIDs); 846 } 847 848 /// Terminate all open ranges for VL.Var by removing it from the set. 849 void erase(const VarLoc &VL); 850 851 /// Terminate all open ranges listed as indices in \c KillSet with 852 /// \c Location by removing them from the set. 853 void erase(const VarLocsInRange &KillSet, const VarLocMap &VarLocIDs, 854 LocIndex::u32_location_t Location); 855 856 /// Insert a new range into the set. 857 void insert(LocIndices VarLocIDs, const VarLoc &VL); 858 859 /// Insert a set of ranges. 860 void insertFromLocSet(const VarLocSet &ToLoad, const VarLocMap &Map); 861 862 llvm::Optional<LocIndices> getEntryValueBackup(DebugVariable Var); 863 864 /// Empty the set. 865 void clear() { 866 VarLocs.clear(); 867 Vars.clear(); 868 EntryValuesBackupVars.clear(); 869 } 870 871 /// Return whether the set is empty or not. 872 bool empty() const { 873 assert(Vars.empty() == EntryValuesBackupVars.empty() && 874 Vars.empty() == VarLocs.empty() && 875 "open ranges are inconsistent"); 876 return VarLocs.empty(); 877 } 878 879 /// Get an empty range of VarLoc IDs. 880 auto getEmptyVarLocRange() const { 881 return iterator_range<VarLocSet::const_iterator>(getVarLocs().end(), 882 getVarLocs().end()); 883 } 884 885 /// Get all set IDs for VarLocs with MLs of kind RegisterKind in \p Reg. 886 auto getRegisterVarLocs(Register Reg) const { 887 return LocIndex::indexRangeForLocation(getVarLocs(), Reg); 888 } 889 890 /// Get all set IDs for VarLocs with MLs of kind SpillLocKind. 891 auto getSpillVarLocs() const { 892 return LocIndex::indexRangeForLocation(getVarLocs(), 893 LocIndex::kSpillLocation); 894 } 895 896 /// Get all set IDs for VarLocs of EVKind EntryValueBackupKind or 897 /// EntryValueCopyBackupKind. 898 auto getEntryValueBackupVarLocs() const { 899 return LocIndex::indexRangeForLocation( 900 getVarLocs(), LocIndex::kEntryValueBackupLocation); 901 } 902 }; 903 904 /// Collect all VarLoc IDs from \p CollectFrom for VarLocs with MLs of kind 905 /// RegisterKind which are located in any reg in \p Regs. The IDs for each 906 /// VarLoc correspond to entries in the universal location bucket, which every 907 /// VarLoc has exactly 1 entry for. Insert collected IDs into \p Collected. 908 static void collectIDsForRegs(VarLocsInRange &Collected, 909 const DefinedRegsSet &Regs, 910 const VarLocSet &CollectFrom, 911 const VarLocMap &VarLocIDs); 912 913 VarLocSet &getVarLocsInMBB(const MachineBasicBlock *MBB, VarLocInMBB &Locs) { 914 std::unique_ptr<VarLocSet> &VLS = Locs[MBB]; 915 if (!VLS) 916 VLS = std::make_unique<VarLocSet>(Alloc); 917 return *VLS; 918 } 919 920 const VarLocSet &getVarLocsInMBB(const MachineBasicBlock *MBB, 921 const VarLocInMBB &Locs) const { 922 auto It = Locs.find(MBB); 923 assert(It != Locs.end() && "MBB not in map"); 924 return *It->second; 925 } 926 927 /// Tests whether this instruction is a spill to a stack location. 928 bool isSpillInstruction(const MachineInstr &MI, MachineFunction *MF); 929 930 /// Decide if @MI is a spill instruction and return true if it is. We use 2 931 /// criteria to make this decision: 932 /// - Is this instruction a store to a spill slot? 933 /// - Is there a register operand that is both used and killed? 934 /// TODO: Store optimization can fold spills into other stores (including 935 /// other spills). We do not handle this yet (more than one memory operand). 936 bool isLocationSpill(const MachineInstr &MI, MachineFunction *MF, 937 Register &Reg); 938 939 /// Returns true if the given machine instruction is a debug value which we 940 /// can emit entry values for. 941 /// 942 /// Currently, we generate debug entry values only for parameters that are 943 /// unmodified throughout the function and located in a register. 944 bool isEntryValueCandidate(const MachineInstr &MI, 945 const DefinedRegsSet &Regs) const; 946 947 /// If a given instruction is identified as a spill, return the spill location 948 /// and set \p Reg to the spilled register. 949 Optional<VarLoc::SpillLoc> isRestoreInstruction(const MachineInstr &MI, 950 MachineFunction *MF, 951 Register &Reg); 952 /// Given a spill instruction, extract the register and offset used to 953 /// address the spill location in a target independent way. 954 VarLoc::SpillLoc extractSpillBaseRegAndOffset(const MachineInstr &MI); 955 void insertTransferDebugPair(MachineInstr &MI, OpenRangesSet &OpenRanges, 956 TransferMap &Transfers, VarLocMap &VarLocIDs, 957 LocIndex OldVarID, TransferKind Kind, 958 const VarLoc::MachineLoc &OldLoc, 959 Register NewReg = Register()); 960 961 void transferDebugValue(const MachineInstr &MI, OpenRangesSet &OpenRanges, 962 VarLocMap &VarLocIDs, 963 InstToEntryLocMap &EntryValTransfers, 964 RegDefToInstMap &RegSetInstrs); 965 void transferSpillOrRestoreInst(MachineInstr &MI, OpenRangesSet &OpenRanges, 966 VarLocMap &VarLocIDs, TransferMap &Transfers); 967 void cleanupEntryValueTransfers(const MachineInstr *MI, 968 OpenRangesSet &OpenRanges, 969 VarLocMap &VarLocIDs, const VarLoc &EntryVL, 970 InstToEntryLocMap &EntryValTransfers); 971 void removeEntryValue(const MachineInstr &MI, OpenRangesSet &OpenRanges, 972 VarLocMap &VarLocIDs, const VarLoc &EntryVL, 973 InstToEntryLocMap &EntryValTransfers, 974 RegDefToInstMap &RegSetInstrs); 975 void emitEntryValues(MachineInstr &MI, OpenRangesSet &OpenRanges, 976 VarLocMap &VarLocIDs, 977 InstToEntryLocMap &EntryValTransfers, 978 VarLocsInRange &KillSet); 979 void recordEntryValue(const MachineInstr &MI, 980 const DefinedRegsSet &DefinedRegs, 981 OpenRangesSet &OpenRanges, VarLocMap &VarLocIDs); 982 void transferRegisterCopy(MachineInstr &MI, OpenRangesSet &OpenRanges, 983 VarLocMap &VarLocIDs, TransferMap &Transfers); 984 void transferRegisterDef(MachineInstr &MI, OpenRangesSet &OpenRanges, 985 VarLocMap &VarLocIDs, 986 InstToEntryLocMap &EntryValTransfers, 987 RegDefToInstMap &RegSetInstrs); 988 bool transferTerminator(MachineBasicBlock *MBB, OpenRangesSet &OpenRanges, 989 VarLocInMBB &OutLocs, const VarLocMap &VarLocIDs); 990 991 void process(MachineInstr &MI, OpenRangesSet &OpenRanges, 992 VarLocMap &VarLocIDs, TransferMap &Transfers, 993 InstToEntryLocMap &EntryValTransfers, 994 RegDefToInstMap &RegSetInstrs); 995 996 void accumulateFragmentMap(MachineInstr &MI, VarToFragments &SeenFragments, 997 OverlapMap &OLapMap); 998 999 bool join(MachineBasicBlock &MBB, VarLocInMBB &OutLocs, VarLocInMBB &InLocs, 1000 const VarLocMap &VarLocIDs, 1001 SmallPtrSet<const MachineBasicBlock *, 16> &Visited, 1002 SmallPtrSetImpl<const MachineBasicBlock *> &ArtificialBlocks); 1003 1004 /// Create DBG_VALUE insts for inlocs that have been propagated but 1005 /// had their instruction creation deferred. 1006 void flushPendingLocs(VarLocInMBB &PendingInLocs, VarLocMap &VarLocIDs); 1007 1008 bool ExtendRanges(MachineFunction &MF, MachineDominatorTree *DomTree, 1009 TargetPassConfig *TPC, unsigned InputBBLimit, 1010 unsigned InputDbgValLimit) override; 1011 1012 public: 1013 /// Default construct and initialize the pass. 1014 VarLocBasedLDV(); 1015 1016 ~VarLocBasedLDV(); 1017 1018 /// Print to ostream with a message. 1019 void printVarLocInMBB(const MachineFunction &MF, const VarLocInMBB &V, 1020 const VarLocMap &VarLocIDs, const char *msg, 1021 raw_ostream &Out) const; 1022 }; 1023 1024 } // end anonymous namespace 1025 1026 //===----------------------------------------------------------------------===// 1027 // Implementation 1028 //===----------------------------------------------------------------------===// 1029 1030 VarLocBasedLDV::VarLocBasedLDV() = default; 1031 1032 VarLocBasedLDV::~VarLocBasedLDV() = default; 1033 1034 /// Erase a variable from the set of open ranges, and additionally erase any 1035 /// fragments that may overlap it. If the VarLoc is a backup location, erase 1036 /// the variable from the EntryValuesBackupVars set, indicating we should stop 1037 /// tracking its backup entry location. Otherwise, if the VarLoc is primary 1038 /// location, erase the variable from the Vars set. 1039 void VarLocBasedLDV::OpenRangesSet::erase(const VarLoc &VL) { 1040 // Erasure helper. 1041 auto DoErase = [VL, this](DebugVariable VarToErase) { 1042 auto *EraseFrom = VL.isEntryBackupLoc() ? &EntryValuesBackupVars : &Vars; 1043 auto It = EraseFrom->find(VarToErase); 1044 if (It != EraseFrom->end()) { 1045 LocIndices IDs = It->second; 1046 for (LocIndex ID : IDs) 1047 VarLocs.reset(ID.getAsRawInteger()); 1048 EraseFrom->erase(It); 1049 } 1050 }; 1051 1052 DebugVariable Var = VL.Var; 1053 1054 // Erase the variable/fragment that ends here. 1055 DoErase(Var); 1056 1057 // Extract the fragment. Interpret an empty fragment as one that covers all 1058 // possible bits. 1059 FragmentInfo ThisFragment = Var.getFragmentOrDefault(); 1060 1061 // There may be fragments that overlap the designated fragment. Look them up 1062 // in the pre-computed overlap map, and erase them too. 1063 auto MapIt = OverlappingFragments.find({Var.getVariable(), ThisFragment}); 1064 if (MapIt != OverlappingFragments.end()) { 1065 for (auto Fragment : MapIt->second) { 1066 VarLocBasedLDV::OptFragmentInfo FragmentHolder; 1067 if (!DebugVariable::isDefaultFragment(Fragment)) 1068 FragmentHolder = VarLocBasedLDV::OptFragmentInfo(Fragment); 1069 DoErase({Var.getVariable(), FragmentHolder, Var.getInlinedAt()}); 1070 } 1071 } 1072 } 1073 1074 void VarLocBasedLDV::OpenRangesSet::erase(const VarLocsInRange &KillSet, 1075 const VarLocMap &VarLocIDs, 1076 LocIndex::u32_location_t Location) { 1077 VarLocSet RemoveSet(Alloc); 1078 for (LocIndex::u32_index_t ID : KillSet) { 1079 const VarLoc &VL = VarLocIDs[LocIndex(Location, ID)]; 1080 auto *EraseFrom = VL.isEntryBackupLoc() ? &EntryValuesBackupVars : &Vars; 1081 EraseFrom->erase(VL.Var); 1082 LocIndices VLI = VarLocIDs.getAllIndices(VL); 1083 for (LocIndex ID : VLI) 1084 RemoveSet.set(ID.getAsRawInteger()); 1085 } 1086 VarLocs.intersectWithComplement(RemoveSet); 1087 } 1088 1089 void VarLocBasedLDV::OpenRangesSet::insertFromLocSet(const VarLocSet &ToLoad, 1090 const VarLocMap &Map) { 1091 VarLocsInRange UniqueVarLocIDs; 1092 DefinedRegsSet Regs; 1093 Regs.insert(LocIndex::kUniversalLocation); 1094 collectIDsForRegs(UniqueVarLocIDs, Regs, ToLoad, Map); 1095 for (uint64_t ID : UniqueVarLocIDs) { 1096 LocIndex Idx = LocIndex::fromRawInteger(ID); 1097 const VarLoc &VarL = Map[Idx]; 1098 const LocIndices Indices = Map.getAllIndices(VarL); 1099 insert(Indices, VarL); 1100 } 1101 } 1102 1103 void VarLocBasedLDV::OpenRangesSet::insert(LocIndices VarLocIDs, 1104 const VarLoc &VL) { 1105 auto *InsertInto = VL.isEntryBackupLoc() ? &EntryValuesBackupVars : &Vars; 1106 for (LocIndex ID : VarLocIDs) 1107 VarLocs.set(ID.getAsRawInteger()); 1108 InsertInto->insert({VL.Var, VarLocIDs}); 1109 } 1110 1111 /// Return the Loc ID of an entry value backup location, if it exists for the 1112 /// variable. 1113 llvm::Optional<LocIndices> 1114 VarLocBasedLDV::OpenRangesSet::getEntryValueBackup(DebugVariable Var) { 1115 auto It = EntryValuesBackupVars.find(Var); 1116 if (It != EntryValuesBackupVars.end()) 1117 return It->second; 1118 1119 return llvm::None; 1120 } 1121 1122 void VarLocBasedLDV::collectIDsForRegs(VarLocsInRange &Collected, 1123 const DefinedRegsSet &Regs, 1124 const VarLocSet &CollectFrom, 1125 const VarLocMap &VarLocIDs) { 1126 assert(!Regs.empty() && "Nothing to collect"); 1127 SmallVector<Register, 32> SortedRegs; 1128 append_range(SortedRegs, Regs); 1129 array_pod_sort(SortedRegs.begin(), SortedRegs.end()); 1130 auto It = CollectFrom.find(LocIndex::rawIndexForReg(SortedRegs.front())); 1131 auto End = CollectFrom.end(); 1132 for (Register Reg : SortedRegs) { 1133 // The half-open interval [FirstIndexForReg, FirstInvalidIndex) contains 1134 // all possible VarLoc IDs for VarLocs with MLs of kind RegisterKind which 1135 // live in Reg. 1136 uint64_t FirstIndexForReg = LocIndex::rawIndexForReg(Reg); 1137 uint64_t FirstInvalidIndex = LocIndex::rawIndexForReg(Reg + 1); 1138 It.advanceToLowerBound(FirstIndexForReg); 1139 1140 // Iterate through that half-open interval and collect all the set IDs. 1141 for (; It != End && *It < FirstInvalidIndex; ++It) { 1142 LocIndex ItIdx = LocIndex::fromRawInteger(*It); 1143 const VarLoc &VL = VarLocIDs[ItIdx]; 1144 LocIndices LI = VarLocIDs.getAllIndices(VL); 1145 // For now, the back index is always the universal location index. 1146 assert(LI.back().Location == LocIndex::kUniversalLocation && 1147 "Unexpected order of LocIndices for VarLoc; was it inserted into " 1148 "the VarLocMap correctly?"); 1149 Collected.insert(LI.back().Index); 1150 } 1151 1152 if (It == End) 1153 return; 1154 } 1155 } 1156 1157 void VarLocBasedLDV::getUsedRegs(const VarLocSet &CollectFrom, 1158 SmallVectorImpl<Register> &UsedRegs) const { 1159 // All register-based VarLocs are assigned indices greater than or equal to 1160 // FirstRegIndex. 1161 uint64_t FirstRegIndex = 1162 LocIndex::rawIndexForReg(LocIndex::kFirstRegLocation); 1163 uint64_t FirstInvalidIndex = 1164 LocIndex::rawIndexForReg(LocIndex::kFirstInvalidRegLocation); 1165 for (auto It = CollectFrom.find(FirstRegIndex), 1166 End = CollectFrom.find(FirstInvalidIndex); 1167 It != End;) { 1168 // We found a VarLoc ID for a VarLoc that lives in a register. Figure out 1169 // which register and add it to UsedRegs. 1170 uint32_t FoundReg = LocIndex::fromRawInteger(*It).Location; 1171 assert((UsedRegs.empty() || FoundReg != UsedRegs.back()) && 1172 "Duplicate used reg"); 1173 UsedRegs.push_back(FoundReg); 1174 1175 // Skip to the next /set/ register. Note that this finds a lower bound, so 1176 // even if there aren't any VarLocs living in `FoundReg+1`, we're still 1177 // guaranteed to move on to the next register (or to end()). 1178 uint64_t NextRegIndex = LocIndex::rawIndexForReg(FoundReg + 1); 1179 It.advanceToLowerBound(NextRegIndex); 1180 } 1181 } 1182 1183 //===----------------------------------------------------------------------===// 1184 // Debug Range Extension Implementation 1185 //===----------------------------------------------------------------------===// 1186 1187 #ifndef NDEBUG 1188 void VarLocBasedLDV::printVarLocInMBB(const MachineFunction &MF, 1189 const VarLocInMBB &V, 1190 const VarLocMap &VarLocIDs, 1191 const char *msg, 1192 raw_ostream &Out) const { 1193 Out << '\n' << msg << '\n'; 1194 for (const MachineBasicBlock &BB : MF) { 1195 if (!V.count(&BB)) 1196 continue; 1197 const VarLocSet &L = getVarLocsInMBB(&BB, V); 1198 if (L.empty()) 1199 continue; 1200 SmallVector<VarLoc, 32> VarLocs; 1201 collectAllVarLocs(VarLocs, L, VarLocIDs); 1202 Out << "MBB: " << BB.getNumber() << ":\n"; 1203 for (const VarLoc &VL : VarLocs) { 1204 Out << " Var: " << VL.Var.getVariable()->getName(); 1205 Out << " MI: "; 1206 VL.dump(TRI, Out); 1207 } 1208 } 1209 Out << "\n"; 1210 } 1211 #endif 1212 1213 VarLocBasedLDV::VarLoc::SpillLoc 1214 VarLocBasedLDV::extractSpillBaseRegAndOffset(const MachineInstr &MI) { 1215 assert(MI.hasOneMemOperand() && 1216 "Spill instruction does not have exactly one memory operand?"); 1217 auto MMOI = MI.memoperands_begin(); 1218 const PseudoSourceValue *PVal = (*MMOI)->getPseudoValue(); 1219 assert(PVal->kind() == PseudoSourceValue::FixedStack && 1220 "Inconsistent memory operand in spill instruction"); 1221 int FI = cast<FixedStackPseudoSourceValue>(PVal)->getFrameIndex(); 1222 const MachineBasicBlock *MBB = MI.getParent(); 1223 Register Reg; 1224 StackOffset Offset = TFI->getFrameIndexReference(*MBB->getParent(), FI, Reg); 1225 return {Reg, Offset}; 1226 } 1227 1228 /// Do cleanup of \p EntryValTransfers created by \p TRInst, by removing the 1229 /// Transfer, which uses the to-be-deleted \p EntryVL. 1230 void VarLocBasedLDV::cleanupEntryValueTransfers( 1231 const MachineInstr *TRInst, OpenRangesSet &OpenRanges, VarLocMap &VarLocIDs, 1232 const VarLoc &EntryVL, InstToEntryLocMap &EntryValTransfers) { 1233 if (EntryValTransfers.empty() || TRInst == nullptr) 1234 return; 1235 1236 auto TransRange = EntryValTransfers.equal_range(TRInst); 1237 for (auto TDPair : llvm::make_range(TransRange.first, TransRange.second)) { 1238 const VarLoc &EmittedEV = VarLocIDs[TDPair.second]; 1239 if (std::tie(EntryVL.Var, EntryVL.Locs[0].Value.RegNo, EntryVL.Expr) == 1240 std::tie(EmittedEV.Var, EmittedEV.Locs[0].Value.RegNo, 1241 EmittedEV.Expr)) { 1242 OpenRanges.erase(EmittedEV); 1243 EntryValTransfers.erase(TRInst); 1244 break; 1245 } 1246 } 1247 } 1248 1249 /// Try to salvage the debug entry value if we encounter a new debug value 1250 /// describing the same parameter, otherwise stop tracking the value. Return 1251 /// true if we should stop tracking the entry value and do the cleanup of 1252 /// emitted Entry Value Transfers, otherwise return false. 1253 void VarLocBasedLDV::removeEntryValue(const MachineInstr &MI, 1254 OpenRangesSet &OpenRanges, 1255 VarLocMap &VarLocIDs, 1256 const VarLoc &EntryVL, 1257 InstToEntryLocMap &EntryValTransfers, 1258 RegDefToInstMap &RegSetInstrs) { 1259 // Skip the DBG_VALUE which is the debug entry value itself. 1260 if (&MI == &EntryVL.MI) 1261 return; 1262 1263 // If the parameter's location is not register location, we can not track 1264 // the entry value any more. It doesn't have the TransferInst which defines 1265 // register, so no Entry Value Transfers have been emitted already. 1266 if (!MI.getDebugOperand(0).isReg()) 1267 return; 1268 1269 // Try to get non-debug instruction responsible for the DBG_VALUE. 1270 const MachineInstr *TransferInst = nullptr; 1271 Register Reg = MI.getDebugOperand(0).getReg(); 1272 if (Reg.isValid() && RegSetInstrs.find(Reg) != RegSetInstrs.end()) 1273 TransferInst = RegSetInstrs.find(Reg)->second; 1274 1275 // Case of the parameter's DBG_VALUE at the start of entry MBB. 1276 if (!TransferInst && !LastNonDbgMI && MI.getParent()->isEntryBlock()) 1277 return; 1278 1279 // If the debug expression from the DBG_VALUE is not empty, we can assume the 1280 // parameter's value has changed indicating that we should stop tracking its 1281 // entry value as well. 1282 if (MI.getDebugExpression()->getNumElements() == 0 && TransferInst) { 1283 // If the DBG_VALUE comes from a copy instruction that copies the entry 1284 // value, it means the parameter's value has not changed and we should be 1285 // able to use its entry value. 1286 // TODO: Try to keep tracking of an entry value if we encounter a propagated 1287 // DBG_VALUE describing the copy of the entry value. (Propagated entry value 1288 // does not indicate the parameter modification.) 1289 auto DestSrc = TII->isCopyInstr(*TransferInst); 1290 if (DestSrc) { 1291 const MachineOperand *SrcRegOp, *DestRegOp; 1292 SrcRegOp = DestSrc->Source; 1293 DestRegOp = DestSrc->Destination; 1294 if (Reg == DestRegOp->getReg()) { 1295 for (uint64_t ID : OpenRanges.getEntryValueBackupVarLocs()) { 1296 const VarLoc &VL = VarLocIDs[LocIndex::fromRawInteger(ID)]; 1297 if (VL.isEntryValueCopyBackupReg(Reg) && 1298 // Entry Values should not be variadic. 1299 VL.MI.getDebugOperand(0).getReg() == SrcRegOp->getReg()) 1300 return; 1301 } 1302 } 1303 } 1304 } 1305 1306 LLVM_DEBUG(dbgs() << "Deleting a DBG entry value because of: "; 1307 MI.print(dbgs(), /*IsStandalone*/ false, 1308 /*SkipOpers*/ false, /*SkipDebugLoc*/ false, 1309 /*AddNewLine*/ true, TII)); 1310 cleanupEntryValueTransfers(TransferInst, OpenRanges, VarLocIDs, EntryVL, 1311 EntryValTransfers); 1312 OpenRanges.erase(EntryVL); 1313 } 1314 1315 /// End all previous ranges related to @MI and start a new range from @MI 1316 /// if it is a DBG_VALUE instr. 1317 void VarLocBasedLDV::transferDebugValue(const MachineInstr &MI, 1318 OpenRangesSet &OpenRanges, 1319 VarLocMap &VarLocIDs, 1320 InstToEntryLocMap &EntryValTransfers, 1321 RegDefToInstMap &RegSetInstrs) { 1322 if (!MI.isDebugValue()) 1323 return; 1324 const DILocalVariable *Var = MI.getDebugVariable(); 1325 const DIExpression *Expr = MI.getDebugExpression(); 1326 const DILocation *DebugLoc = MI.getDebugLoc(); 1327 const DILocation *InlinedAt = DebugLoc->getInlinedAt(); 1328 assert(Var->isValidLocationForIntrinsic(DebugLoc) && 1329 "Expected inlined-at fields to agree"); 1330 1331 DebugVariable V(Var, Expr, InlinedAt); 1332 1333 // Check if this DBG_VALUE indicates a parameter's value changing. 1334 // If that is the case, we should stop tracking its entry value. 1335 auto EntryValBackupID = OpenRanges.getEntryValueBackup(V); 1336 if (Var->isParameter() && EntryValBackupID) { 1337 const VarLoc &EntryVL = VarLocIDs[EntryValBackupID->back()]; 1338 removeEntryValue(MI, OpenRanges, VarLocIDs, EntryVL, EntryValTransfers, 1339 RegSetInstrs); 1340 } 1341 1342 if (all_of(MI.debug_operands(), [](const MachineOperand &MO) { 1343 return (MO.isReg() && MO.getReg()) || MO.isImm() || MO.isFPImm() || 1344 MO.isCImm(); 1345 })) { 1346 // Use normal VarLoc constructor for registers and immediates. 1347 VarLoc VL(MI, LS); 1348 // End all previous ranges of VL.Var. 1349 OpenRanges.erase(VL); 1350 1351 LocIndices IDs = VarLocIDs.insert(VL); 1352 // Add the VarLoc to OpenRanges from this DBG_VALUE. 1353 OpenRanges.insert(IDs, VL); 1354 } else if (MI.memoperands().size() > 0) { 1355 llvm_unreachable("DBG_VALUE with mem operand encountered after regalloc?"); 1356 } else { 1357 // This must be an undefined location. If it has an open range, erase it. 1358 assert(MI.isUndefDebugValue() && 1359 "Unexpected non-undef DBG_VALUE encountered"); 1360 VarLoc VL(MI, LS); 1361 OpenRanges.erase(VL); 1362 } 1363 } 1364 1365 // This should be removed later, doesn't fit the new design. 1366 void VarLocBasedLDV::collectAllVarLocs(SmallVectorImpl<VarLoc> &Collected, 1367 const VarLocSet &CollectFrom, 1368 const VarLocMap &VarLocIDs) { 1369 // The half-open interval [FirstIndexForReg, FirstInvalidIndex) contains all 1370 // possible VarLoc IDs for VarLocs with MLs of kind RegisterKind which live 1371 // in Reg. 1372 uint64_t FirstIndex = LocIndex::rawIndexForReg(LocIndex::kUniversalLocation); 1373 uint64_t FirstInvalidIndex = 1374 LocIndex::rawIndexForReg(LocIndex::kUniversalLocation + 1); 1375 // Iterate through that half-open interval and collect all the set IDs. 1376 for (auto It = CollectFrom.find(FirstIndex), End = CollectFrom.end(); 1377 It != End && *It < FirstInvalidIndex; ++It) { 1378 LocIndex RegIdx = LocIndex::fromRawInteger(*It); 1379 Collected.push_back(VarLocIDs[RegIdx]); 1380 } 1381 } 1382 1383 /// Turn the entry value backup locations into primary locations. 1384 void VarLocBasedLDV::emitEntryValues(MachineInstr &MI, 1385 OpenRangesSet &OpenRanges, 1386 VarLocMap &VarLocIDs, 1387 InstToEntryLocMap &EntryValTransfers, 1388 VarLocsInRange &KillSet) { 1389 // Do not insert entry value locations after a terminator. 1390 if (MI.isTerminator()) 1391 return; 1392 1393 for (uint32_t ID : KillSet) { 1394 // The KillSet IDs are indices for the universal location bucket. 1395 LocIndex Idx = LocIndex(LocIndex::kUniversalLocation, ID); 1396 const VarLoc &VL = VarLocIDs[Idx]; 1397 if (!VL.Var.getVariable()->isParameter()) 1398 continue; 1399 1400 auto DebugVar = VL.Var; 1401 Optional<LocIndices> EntryValBackupIDs = 1402 OpenRanges.getEntryValueBackup(DebugVar); 1403 1404 // If the parameter has the entry value backup, it means we should 1405 // be able to use its entry value. 1406 if (!EntryValBackupIDs) 1407 continue; 1408 1409 const VarLoc &EntryVL = VarLocIDs[EntryValBackupIDs->back()]; 1410 VarLoc EntryLoc = VarLoc::CreateEntryLoc(EntryVL.MI, LS, EntryVL.Expr, 1411 EntryVL.Locs[0].Value.RegNo); 1412 LocIndices EntryValueIDs = VarLocIDs.insert(EntryLoc); 1413 assert(EntryValueIDs.size() == 1 && 1414 "EntryValue loc should not be variadic"); 1415 EntryValTransfers.insert({&MI, EntryValueIDs.back()}); 1416 OpenRanges.insert(EntryValueIDs, EntryLoc); 1417 } 1418 } 1419 1420 /// Create new TransferDebugPair and insert it in \p Transfers. The VarLoc 1421 /// with \p OldVarID should be deleted form \p OpenRanges and replaced with 1422 /// new VarLoc. If \p NewReg is different than default zero value then the 1423 /// new location will be register location created by the copy like instruction, 1424 /// otherwise it is variable's location on the stack. 1425 void VarLocBasedLDV::insertTransferDebugPair( 1426 MachineInstr &MI, OpenRangesSet &OpenRanges, TransferMap &Transfers, 1427 VarLocMap &VarLocIDs, LocIndex OldVarID, TransferKind Kind, 1428 const VarLoc::MachineLoc &OldLoc, Register NewReg) { 1429 const VarLoc &OldVarLoc = VarLocIDs[OldVarID]; 1430 1431 auto ProcessVarLoc = [&MI, &OpenRanges, &Transfers, &VarLocIDs](VarLoc &VL) { 1432 LocIndices LocIds = VarLocIDs.insert(VL); 1433 1434 // Close this variable's previous location range. 1435 OpenRanges.erase(VL); 1436 1437 // Record the new location as an open range, and a postponed transfer 1438 // inserting a DBG_VALUE for this location. 1439 OpenRanges.insert(LocIds, VL); 1440 assert(!MI.isTerminator() && "Cannot insert DBG_VALUE after terminator"); 1441 TransferDebugPair MIP = {&MI, LocIds.back()}; 1442 Transfers.push_back(MIP); 1443 }; 1444 1445 // End all previous ranges of VL.Var. 1446 OpenRanges.erase(VarLocIDs[OldVarID]); 1447 switch (Kind) { 1448 case TransferKind::TransferCopy: { 1449 assert(NewReg && 1450 "No register supplied when handling a copy of a debug value"); 1451 // Create a DBG_VALUE instruction to describe the Var in its new 1452 // register location. 1453 VarLoc VL = VarLoc::CreateCopyLoc(OldVarLoc, OldLoc, NewReg); 1454 ProcessVarLoc(VL); 1455 LLVM_DEBUG({ 1456 dbgs() << "Creating VarLoc for register copy:"; 1457 VL.dump(TRI); 1458 }); 1459 return; 1460 } 1461 case TransferKind::TransferSpill: { 1462 // Create a DBG_VALUE instruction to describe the Var in its spilled 1463 // location. 1464 VarLoc::SpillLoc SpillLocation = extractSpillBaseRegAndOffset(MI); 1465 VarLoc VL = VarLoc::CreateSpillLoc( 1466 OldVarLoc, OldLoc, SpillLocation.SpillBase, SpillLocation.SpillOffset); 1467 ProcessVarLoc(VL); 1468 LLVM_DEBUG({ 1469 dbgs() << "Creating VarLoc for spill:"; 1470 VL.dump(TRI); 1471 }); 1472 return; 1473 } 1474 case TransferKind::TransferRestore: { 1475 assert(NewReg && 1476 "No register supplied when handling a restore of a debug value"); 1477 // DebugInstr refers to the pre-spill location, therefore we can reuse 1478 // its expression. 1479 VarLoc VL = VarLoc::CreateCopyLoc(OldVarLoc, OldLoc, NewReg); 1480 ProcessVarLoc(VL); 1481 LLVM_DEBUG({ 1482 dbgs() << "Creating VarLoc for restore:"; 1483 VL.dump(TRI); 1484 }); 1485 return; 1486 } 1487 } 1488 llvm_unreachable("Invalid transfer kind"); 1489 } 1490 1491 /// A definition of a register may mark the end of a range. 1492 void VarLocBasedLDV::transferRegisterDef(MachineInstr &MI, 1493 OpenRangesSet &OpenRanges, 1494 VarLocMap &VarLocIDs, 1495 InstToEntryLocMap &EntryValTransfers, 1496 RegDefToInstMap &RegSetInstrs) { 1497 1498 // Meta Instructions do not affect the debug liveness of any register they 1499 // define. 1500 if (MI.isMetaInstruction()) 1501 return; 1502 1503 MachineFunction *MF = MI.getMF(); 1504 const TargetLowering *TLI = MF->getSubtarget().getTargetLowering(); 1505 Register SP = TLI->getStackPointerRegisterToSaveRestore(); 1506 1507 // Find the regs killed by MI, and find regmasks of preserved regs. 1508 DefinedRegsSet DeadRegs; 1509 SmallVector<const uint32_t *, 4> RegMasks; 1510 for (const MachineOperand &MO : MI.operands()) { 1511 // Determine whether the operand is a register def. 1512 if (MO.isReg() && MO.isDef() && MO.getReg() && 1513 Register::isPhysicalRegister(MO.getReg()) && 1514 !(MI.isCall() && MO.getReg() == SP)) { 1515 // Remove ranges of all aliased registers. 1516 for (MCRegAliasIterator RAI(MO.getReg(), TRI, true); RAI.isValid(); ++RAI) 1517 // FIXME: Can we break out of this loop early if no insertion occurs? 1518 DeadRegs.insert(*RAI); 1519 RegSetInstrs.erase(MO.getReg()); 1520 RegSetInstrs.insert({MO.getReg(), &MI}); 1521 } else if (MO.isRegMask()) { 1522 RegMasks.push_back(MO.getRegMask()); 1523 } 1524 } 1525 1526 // Erase VarLocs which reside in one of the dead registers. For performance 1527 // reasons, it's critical to not iterate over the full set of open VarLocs. 1528 // Iterate over the set of dying/used regs instead. 1529 if (!RegMasks.empty()) { 1530 SmallVector<Register, 32> UsedRegs; 1531 getUsedRegs(OpenRanges.getVarLocs(), UsedRegs); 1532 for (Register Reg : UsedRegs) { 1533 // Remove ranges of all clobbered registers. Register masks don't usually 1534 // list SP as preserved. Assume that call instructions never clobber SP, 1535 // because some backends (e.g., AArch64) never list SP in the regmask. 1536 // While the debug info may be off for an instruction or two around 1537 // callee-cleanup calls, transferring the DEBUG_VALUE across the call is 1538 // still a better user experience. 1539 if (Reg == SP) 1540 continue; 1541 bool AnyRegMaskKillsReg = 1542 any_of(RegMasks, [Reg](const uint32_t *RegMask) { 1543 return MachineOperand::clobbersPhysReg(RegMask, Reg); 1544 }); 1545 if (AnyRegMaskKillsReg) 1546 DeadRegs.insert(Reg); 1547 if (AnyRegMaskKillsReg) { 1548 RegSetInstrs.erase(Reg); 1549 RegSetInstrs.insert({Reg, &MI}); 1550 } 1551 } 1552 } 1553 1554 if (DeadRegs.empty()) 1555 return; 1556 1557 VarLocsInRange KillSet; 1558 collectIDsForRegs(KillSet, DeadRegs, OpenRanges.getVarLocs(), VarLocIDs); 1559 OpenRanges.erase(KillSet, VarLocIDs, LocIndex::kUniversalLocation); 1560 1561 if (TPC) { 1562 auto &TM = TPC->getTM<TargetMachine>(); 1563 if (TM.Options.ShouldEmitDebugEntryValues()) 1564 emitEntryValues(MI, OpenRanges, VarLocIDs, EntryValTransfers, KillSet); 1565 } 1566 } 1567 1568 bool VarLocBasedLDV::isSpillInstruction(const MachineInstr &MI, 1569 MachineFunction *MF) { 1570 // TODO: Handle multiple stores folded into one. 1571 if (!MI.hasOneMemOperand()) 1572 return false; 1573 1574 if (!MI.getSpillSize(TII) && !MI.getFoldedSpillSize(TII)) 1575 return false; // This is not a spill instruction, since no valid size was 1576 // returned from either function. 1577 1578 return true; 1579 } 1580 1581 bool VarLocBasedLDV::isLocationSpill(const MachineInstr &MI, 1582 MachineFunction *MF, Register &Reg) { 1583 if (!isSpillInstruction(MI, MF)) 1584 return false; 1585 1586 auto isKilledReg = [&](const MachineOperand MO, Register &Reg) { 1587 if (!MO.isReg() || !MO.isUse()) { 1588 Reg = 0; 1589 return false; 1590 } 1591 Reg = MO.getReg(); 1592 return MO.isKill(); 1593 }; 1594 1595 for (const MachineOperand &MO : MI.operands()) { 1596 // In a spill instruction generated by the InlineSpiller the spilled 1597 // register has its kill flag set. 1598 if (isKilledReg(MO, Reg)) 1599 return true; 1600 if (Reg != 0) { 1601 // Check whether next instruction kills the spilled register. 1602 // FIXME: Current solution does not cover search for killed register in 1603 // bundles and instructions further down the chain. 1604 auto NextI = std::next(MI.getIterator()); 1605 // Skip next instruction that points to basic block end iterator. 1606 if (MI.getParent()->end() == NextI) 1607 continue; 1608 Register RegNext; 1609 for (const MachineOperand &MONext : NextI->operands()) { 1610 // Return true if we came across the register from the 1611 // previous spill instruction that is killed in NextI. 1612 if (isKilledReg(MONext, RegNext) && RegNext == Reg) 1613 return true; 1614 } 1615 } 1616 } 1617 // Return false if we didn't find spilled register. 1618 return false; 1619 } 1620 1621 Optional<VarLocBasedLDV::VarLoc::SpillLoc> 1622 VarLocBasedLDV::isRestoreInstruction(const MachineInstr &MI, 1623 MachineFunction *MF, Register &Reg) { 1624 if (!MI.hasOneMemOperand()) 1625 return None; 1626 1627 // FIXME: Handle folded restore instructions with more than one memory 1628 // operand. 1629 if (MI.getRestoreSize(TII)) { 1630 Reg = MI.getOperand(0).getReg(); 1631 return extractSpillBaseRegAndOffset(MI); 1632 } 1633 return None; 1634 } 1635 1636 /// A spilled register may indicate that we have to end the current range of 1637 /// a variable and create a new one for the spill location. 1638 /// A restored register may indicate the reverse situation. 1639 /// We don't want to insert any instructions in process(), so we just create 1640 /// the DBG_VALUE without inserting it and keep track of it in \p Transfers. 1641 /// It will be inserted into the BB when we're done iterating over the 1642 /// instructions. 1643 void VarLocBasedLDV::transferSpillOrRestoreInst(MachineInstr &MI, 1644 OpenRangesSet &OpenRanges, 1645 VarLocMap &VarLocIDs, 1646 TransferMap &Transfers) { 1647 MachineFunction *MF = MI.getMF(); 1648 TransferKind TKind; 1649 Register Reg; 1650 Optional<VarLoc::SpillLoc> Loc; 1651 1652 LLVM_DEBUG(dbgs() << "Examining instruction: "; MI.dump();); 1653 1654 // First, if there are any DBG_VALUEs pointing at a spill slot that is 1655 // written to, then close the variable location. The value in memory 1656 // will have changed. 1657 VarLocsInRange KillSet; 1658 if (isSpillInstruction(MI, MF)) { 1659 Loc = extractSpillBaseRegAndOffset(MI); 1660 for (uint64_t ID : OpenRanges.getSpillVarLocs()) { 1661 LocIndex Idx = LocIndex::fromRawInteger(ID); 1662 const VarLoc &VL = VarLocIDs[Idx]; 1663 assert(VL.containsSpillLocs() && "Broken VarLocSet?"); 1664 if (VL.usesSpillLoc(*Loc)) { 1665 // This location is overwritten by the current instruction -- terminate 1666 // the open range, and insert an explicit DBG_VALUE $noreg. 1667 // 1668 // Doing this at a later stage would require re-interpreting all 1669 // DBG_VALUes and DIExpressions to identify whether they point at 1670 // memory, and then analysing all memory writes to see if they 1671 // overwrite that memory, which is expensive. 1672 // 1673 // At this stage, we already know which DBG_VALUEs are for spills and 1674 // where they are located; it's best to fix handle overwrites now. 1675 KillSet.insert(ID); 1676 unsigned SpillLocIdx = VL.getSpillLocIdx(*Loc); 1677 VarLoc::MachineLoc OldLoc = VL.Locs[SpillLocIdx]; 1678 VarLoc UndefVL = VarLoc::CreateCopyLoc(VL, OldLoc, 0); 1679 LocIndices UndefLocIDs = VarLocIDs.insert(UndefVL); 1680 Transfers.push_back({&MI, UndefLocIDs.back()}); 1681 } 1682 } 1683 OpenRanges.erase(KillSet, VarLocIDs, LocIndex::kSpillLocation); 1684 } 1685 1686 // Try to recognise spill and restore instructions that may create a new 1687 // variable location. 1688 if (isLocationSpill(MI, MF, Reg)) { 1689 TKind = TransferKind::TransferSpill; 1690 LLVM_DEBUG(dbgs() << "Recognized as spill: "; MI.dump();); 1691 LLVM_DEBUG(dbgs() << "Register: " << Reg << " " << printReg(Reg, TRI) 1692 << "\n"); 1693 } else { 1694 if (!(Loc = isRestoreInstruction(MI, MF, Reg))) 1695 return; 1696 TKind = TransferKind::TransferRestore; 1697 LLVM_DEBUG(dbgs() << "Recognized as restore: "; MI.dump();); 1698 LLVM_DEBUG(dbgs() << "Register: " << Reg << " " << printReg(Reg, TRI) 1699 << "\n"); 1700 } 1701 // Check if the register or spill location is the location of a debug value. 1702 auto TransferCandidates = OpenRanges.getEmptyVarLocRange(); 1703 if (TKind == TransferKind::TransferSpill) 1704 TransferCandidates = OpenRanges.getRegisterVarLocs(Reg); 1705 else if (TKind == TransferKind::TransferRestore) 1706 TransferCandidates = OpenRanges.getSpillVarLocs(); 1707 for (uint64_t ID : TransferCandidates) { 1708 LocIndex Idx = LocIndex::fromRawInteger(ID); 1709 const VarLoc &VL = VarLocIDs[Idx]; 1710 unsigned LocIdx; 1711 if (TKind == TransferKind::TransferSpill) { 1712 assert(VL.usesReg(Reg) && "Broken VarLocSet?"); 1713 LLVM_DEBUG(dbgs() << "Spilling Register " << printReg(Reg, TRI) << '(' 1714 << VL.Var.getVariable()->getName() << ")\n"); 1715 LocIdx = VL.getRegIdx(Reg); 1716 } else { 1717 assert(TKind == TransferKind::TransferRestore && VL.containsSpillLocs() && 1718 "Broken VarLocSet?"); 1719 if (!VL.usesSpillLoc(*Loc)) 1720 // The spill location is not the location of a debug value. 1721 continue; 1722 LLVM_DEBUG(dbgs() << "Restoring Register " << printReg(Reg, TRI) << '(' 1723 << VL.Var.getVariable()->getName() << ")\n"); 1724 LocIdx = VL.getSpillLocIdx(*Loc); 1725 } 1726 VarLoc::MachineLoc MLoc = VL.Locs[LocIdx]; 1727 insertTransferDebugPair(MI, OpenRanges, Transfers, VarLocIDs, Idx, TKind, 1728 MLoc, Reg); 1729 // FIXME: A comment should explain why it's correct to return early here, 1730 // if that is in fact correct. 1731 return; 1732 } 1733 } 1734 1735 /// If \p MI is a register copy instruction, that copies a previously tracked 1736 /// value from one register to another register that is callee saved, we 1737 /// create new DBG_VALUE instruction described with copy destination register. 1738 void VarLocBasedLDV::transferRegisterCopy(MachineInstr &MI, 1739 OpenRangesSet &OpenRanges, 1740 VarLocMap &VarLocIDs, 1741 TransferMap &Transfers) { 1742 auto DestSrc = TII->isCopyInstr(MI); 1743 if (!DestSrc) 1744 return; 1745 1746 const MachineOperand *DestRegOp = DestSrc->Destination; 1747 const MachineOperand *SrcRegOp = DestSrc->Source; 1748 1749 if (!DestRegOp->isDef()) 1750 return; 1751 1752 auto isCalleeSavedReg = [&](Register Reg) { 1753 for (MCRegAliasIterator RAI(Reg, TRI, true); RAI.isValid(); ++RAI) 1754 if (CalleeSavedRegs.test(*RAI)) 1755 return true; 1756 return false; 1757 }; 1758 1759 Register SrcReg = SrcRegOp->getReg(); 1760 Register DestReg = DestRegOp->getReg(); 1761 1762 // We want to recognize instructions where destination register is callee 1763 // saved register. If register that could be clobbered by the call is 1764 // included, there would be a great chance that it is going to be clobbered 1765 // soon. It is more likely that previous register location, which is callee 1766 // saved, is going to stay unclobbered longer, even if it is killed. 1767 if (!isCalleeSavedReg(DestReg)) 1768 return; 1769 1770 // Remember an entry value movement. If we encounter a new debug value of 1771 // a parameter describing only a moving of the value around, rather then 1772 // modifying it, we are still able to use the entry value if needed. 1773 if (isRegOtherThanSPAndFP(*DestRegOp, MI, TRI)) { 1774 for (uint64_t ID : OpenRanges.getEntryValueBackupVarLocs()) { 1775 LocIndex Idx = LocIndex::fromRawInteger(ID); 1776 const VarLoc &VL = VarLocIDs[Idx]; 1777 if (VL.isEntryValueBackupReg(SrcReg)) { 1778 LLVM_DEBUG(dbgs() << "Copy of the entry value: "; MI.dump();); 1779 VarLoc EntryValLocCopyBackup = 1780 VarLoc::CreateEntryCopyBackupLoc(VL.MI, LS, VL.Expr, DestReg); 1781 // Stop tracking the original entry value. 1782 OpenRanges.erase(VL); 1783 1784 // Start tracking the entry value copy. 1785 LocIndices EntryValCopyLocIDs = VarLocIDs.insert(EntryValLocCopyBackup); 1786 OpenRanges.insert(EntryValCopyLocIDs, EntryValLocCopyBackup); 1787 break; 1788 } 1789 } 1790 } 1791 1792 if (!SrcRegOp->isKill()) 1793 return; 1794 1795 for (uint64_t ID : OpenRanges.getRegisterVarLocs(SrcReg)) { 1796 LocIndex Idx = LocIndex::fromRawInteger(ID); 1797 assert(VarLocIDs[Idx].usesReg(SrcReg) && "Broken VarLocSet?"); 1798 VarLoc::MachineLocValue Loc; 1799 Loc.RegNo = SrcReg; 1800 VarLoc::MachineLoc MLoc{VarLoc::MachineLocKind::RegisterKind, Loc}; 1801 insertTransferDebugPair(MI, OpenRanges, Transfers, VarLocIDs, Idx, 1802 TransferKind::TransferCopy, MLoc, DestReg); 1803 // FIXME: A comment should explain why it's correct to return early here, 1804 // if that is in fact correct. 1805 return; 1806 } 1807 } 1808 1809 /// Terminate all open ranges at the end of the current basic block. 1810 bool VarLocBasedLDV::transferTerminator(MachineBasicBlock *CurMBB, 1811 OpenRangesSet &OpenRanges, 1812 VarLocInMBB &OutLocs, 1813 const VarLocMap &VarLocIDs) { 1814 bool Changed = false; 1815 LLVM_DEBUG({ 1816 VarVec VarLocs; 1817 OpenRanges.getUniqueVarLocs(VarLocs, VarLocIDs); 1818 for (VarLoc &VL : VarLocs) { 1819 // Copy OpenRanges to OutLocs, if not already present. 1820 dbgs() << "Add to OutLocs in MBB #" << CurMBB->getNumber() << ": "; 1821 VL.dump(TRI); 1822 } 1823 }); 1824 VarLocSet &VLS = getVarLocsInMBB(CurMBB, OutLocs); 1825 Changed = VLS != OpenRanges.getVarLocs(); 1826 // New OutLocs set may be different due to spill, restore or register 1827 // copy instruction processing. 1828 if (Changed) 1829 VLS = OpenRanges.getVarLocs(); 1830 OpenRanges.clear(); 1831 return Changed; 1832 } 1833 1834 /// Accumulate a mapping between each DILocalVariable fragment and other 1835 /// fragments of that DILocalVariable which overlap. This reduces work during 1836 /// the data-flow stage from "Find any overlapping fragments" to "Check if the 1837 /// known-to-overlap fragments are present". 1838 /// \param MI A previously unprocessed DEBUG_VALUE instruction to analyze for 1839 /// fragment usage. 1840 /// \param SeenFragments Map from DILocalVariable to all fragments of that 1841 /// Variable which are known to exist. 1842 /// \param OverlappingFragments The overlap map being constructed, from one 1843 /// Var/Fragment pair to a vector of fragments known to overlap. 1844 void VarLocBasedLDV::accumulateFragmentMap(MachineInstr &MI, 1845 VarToFragments &SeenFragments, 1846 OverlapMap &OverlappingFragments) { 1847 DebugVariable MIVar(MI.getDebugVariable(), MI.getDebugExpression(), 1848 MI.getDebugLoc()->getInlinedAt()); 1849 FragmentInfo ThisFragment = MIVar.getFragmentOrDefault(); 1850 1851 // If this is the first sighting of this variable, then we are guaranteed 1852 // there are currently no overlapping fragments either. Initialize the set 1853 // of seen fragments, record no overlaps for the current one, and return. 1854 auto SeenIt = SeenFragments.find(MIVar.getVariable()); 1855 if (SeenIt == SeenFragments.end()) { 1856 SmallSet<FragmentInfo, 4> OneFragment; 1857 OneFragment.insert(ThisFragment); 1858 SeenFragments.insert({MIVar.getVariable(), OneFragment}); 1859 1860 OverlappingFragments.insert({{MIVar.getVariable(), ThisFragment}, {}}); 1861 return; 1862 } 1863 1864 // If this particular Variable/Fragment pair already exists in the overlap 1865 // map, it has already been accounted for. 1866 auto IsInOLapMap = 1867 OverlappingFragments.insert({{MIVar.getVariable(), ThisFragment}, {}}); 1868 if (!IsInOLapMap.second) 1869 return; 1870 1871 auto &ThisFragmentsOverlaps = IsInOLapMap.first->second; 1872 auto &AllSeenFragments = SeenIt->second; 1873 1874 // Otherwise, examine all other seen fragments for this variable, with "this" 1875 // fragment being a previously unseen fragment. Record any pair of 1876 // overlapping fragments. 1877 for (const auto &ASeenFragment : AllSeenFragments) { 1878 // Does this previously seen fragment overlap? 1879 if (DIExpression::fragmentsOverlap(ThisFragment, ASeenFragment)) { 1880 // Yes: Mark the current fragment as being overlapped. 1881 ThisFragmentsOverlaps.push_back(ASeenFragment); 1882 // Mark the previously seen fragment as being overlapped by the current 1883 // one. 1884 auto ASeenFragmentsOverlaps = 1885 OverlappingFragments.find({MIVar.getVariable(), ASeenFragment}); 1886 assert(ASeenFragmentsOverlaps != OverlappingFragments.end() && 1887 "Previously seen var fragment has no vector of overlaps"); 1888 ASeenFragmentsOverlaps->second.push_back(ThisFragment); 1889 } 1890 } 1891 1892 AllSeenFragments.insert(ThisFragment); 1893 } 1894 1895 /// This routine creates OpenRanges. 1896 void VarLocBasedLDV::process(MachineInstr &MI, OpenRangesSet &OpenRanges, 1897 VarLocMap &VarLocIDs, TransferMap &Transfers, 1898 InstToEntryLocMap &EntryValTransfers, 1899 RegDefToInstMap &RegSetInstrs) { 1900 if (!MI.isDebugInstr()) 1901 LastNonDbgMI = &MI; 1902 transferDebugValue(MI, OpenRanges, VarLocIDs, EntryValTransfers, 1903 RegSetInstrs); 1904 transferRegisterDef(MI, OpenRanges, VarLocIDs, EntryValTransfers, 1905 RegSetInstrs); 1906 transferRegisterCopy(MI, OpenRanges, VarLocIDs, Transfers); 1907 transferSpillOrRestoreInst(MI, OpenRanges, VarLocIDs, Transfers); 1908 } 1909 1910 /// This routine joins the analysis results of all incoming edges in @MBB by 1911 /// inserting a new DBG_VALUE instruction at the start of the @MBB - if the same 1912 /// source variable in all the predecessors of @MBB reside in the same location. 1913 bool VarLocBasedLDV::join( 1914 MachineBasicBlock &MBB, VarLocInMBB &OutLocs, VarLocInMBB &InLocs, 1915 const VarLocMap &VarLocIDs, 1916 SmallPtrSet<const MachineBasicBlock *, 16> &Visited, 1917 SmallPtrSetImpl<const MachineBasicBlock *> &ArtificialBlocks) { 1918 LLVM_DEBUG(dbgs() << "join MBB: " << MBB.getNumber() << "\n"); 1919 1920 VarLocSet InLocsT(Alloc); // Temporary incoming locations. 1921 1922 // For all predecessors of this MBB, find the set of VarLocs that 1923 // can be joined. 1924 int NumVisited = 0; 1925 for (auto *p : MBB.predecessors()) { 1926 // Ignore backedges if we have not visited the predecessor yet. As the 1927 // predecessor hasn't yet had locations propagated into it, most locations 1928 // will not yet be valid, so treat them as all being uninitialized and 1929 // potentially valid. If a location guessed to be correct here is 1930 // invalidated later, we will remove it when we revisit this block. 1931 if (!Visited.count(p)) { 1932 LLVM_DEBUG(dbgs() << " ignoring unvisited pred MBB: " << p->getNumber() 1933 << "\n"); 1934 continue; 1935 } 1936 auto OL = OutLocs.find(p); 1937 // Join is null in case of empty OutLocs from any of the pred. 1938 if (OL == OutLocs.end()) 1939 return false; 1940 1941 // Just copy over the Out locs to incoming locs for the first visited 1942 // predecessor, and for all other predecessors join the Out locs. 1943 VarLocSet &OutLocVLS = *OL->second; 1944 if (!NumVisited) 1945 InLocsT = OutLocVLS; 1946 else 1947 InLocsT &= OutLocVLS; 1948 1949 LLVM_DEBUG({ 1950 if (!InLocsT.empty()) { 1951 VarVec VarLocs; 1952 collectAllVarLocs(VarLocs, InLocsT, VarLocIDs); 1953 for (const VarLoc &VL : VarLocs) 1954 dbgs() << " gathered candidate incoming var: " 1955 << VL.Var.getVariable()->getName() << "\n"; 1956 } 1957 }); 1958 1959 NumVisited++; 1960 } 1961 1962 // Filter out DBG_VALUES that are out of scope. 1963 VarLocSet KillSet(Alloc); 1964 bool IsArtificial = ArtificialBlocks.count(&MBB); 1965 if (!IsArtificial) { 1966 for (uint64_t ID : InLocsT) { 1967 LocIndex Idx = LocIndex::fromRawInteger(ID); 1968 if (!VarLocIDs[Idx].dominates(LS, MBB)) { 1969 KillSet.set(ID); 1970 LLVM_DEBUG({ 1971 auto Name = VarLocIDs[Idx].Var.getVariable()->getName(); 1972 dbgs() << " killing " << Name << ", it doesn't dominate MBB\n"; 1973 }); 1974 } 1975 } 1976 } 1977 InLocsT.intersectWithComplement(KillSet); 1978 1979 // As we are processing blocks in reverse post-order we 1980 // should have processed at least one predecessor, unless it 1981 // is the entry block which has no predecessor. 1982 assert((NumVisited || MBB.pred_empty()) && 1983 "Should have processed at least one predecessor"); 1984 1985 VarLocSet &ILS = getVarLocsInMBB(&MBB, InLocs); 1986 bool Changed = false; 1987 if (ILS != InLocsT) { 1988 ILS = InLocsT; 1989 Changed = true; 1990 } 1991 1992 return Changed; 1993 } 1994 1995 void VarLocBasedLDV::flushPendingLocs(VarLocInMBB &PendingInLocs, 1996 VarLocMap &VarLocIDs) { 1997 // PendingInLocs records all locations propagated into blocks, which have 1998 // not had DBG_VALUE insts created. Go through and create those insts now. 1999 for (auto &Iter : PendingInLocs) { 2000 // Map is keyed on a constant pointer, unwrap it so we can insert insts. 2001 auto &MBB = const_cast<MachineBasicBlock &>(*Iter.first); 2002 VarLocSet &Pending = *Iter.second; 2003 2004 SmallVector<VarLoc, 32> VarLocs; 2005 collectAllVarLocs(VarLocs, Pending, VarLocIDs); 2006 2007 for (VarLoc DiffIt : VarLocs) { 2008 // The ID location is live-in to MBB -- work out what kind of machine 2009 // location it is and create a DBG_VALUE. 2010 if (DiffIt.isEntryBackupLoc()) 2011 continue; 2012 MachineInstr *MI = DiffIt.BuildDbgValue(*MBB.getParent()); 2013 MBB.insert(MBB.instr_begin(), MI); 2014 2015 (void)MI; 2016 LLVM_DEBUG(dbgs() << "Inserted: "; MI->dump();); 2017 } 2018 } 2019 } 2020 2021 bool VarLocBasedLDV::isEntryValueCandidate( 2022 const MachineInstr &MI, const DefinedRegsSet &DefinedRegs) const { 2023 assert(MI.isDebugValue() && "This must be DBG_VALUE."); 2024 2025 // TODO: Add support for local variables that are expressed in terms of 2026 // parameters entry values. 2027 // TODO: Add support for modified arguments that can be expressed 2028 // by using its entry value. 2029 auto *DIVar = MI.getDebugVariable(); 2030 if (!DIVar->isParameter()) 2031 return false; 2032 2033 // Do not consider parameters that belong to an inlined function. 2034 if (MI.getDebugLoc()->getInlinedAt()) 2035 return false; 2036 2037 // Only consider parameters that are described using registers. Parameters 2038 // that are passed on the stack are not yet supported, so ignore debug 2039 // values that are described by the frame or stack pointer. 2040 if (!isRegOtherThanSPAndFP(MI.getDebugOperand(0), MI, TRI)) 2041 return false; 2042 2043 // If a parameter's value has been propagated from the caller, then the 2044 // parameter's DBG_VALUE may be described using a register defined by some 2045 // instruction in the entry block, in which case we shouldn't create an 2046 // entry value. 2047 if (DefinedRegs.count(MI.getDebugOperand(0).getReg())) 2048 return false; 2049 2050 // TODO: Add support for parameters that have a pre-existing debug expressions 2051 // (e.g. fragments). 2052 if (MI.getDebugExpression()->getNumElements() > 0) 2053 return false; 2054 2055 return true; 2056 } 2057 2058 /// Collect all register defines (including aliases) for the given instruction. 2059 static void collectRegDefs(const MachineInstr &MI, DefinedRegsSet &Regs, 2060 const TargetRegisterInfo *TRI) { 2061 for (const MachineOperand &MO : MI.operands()) 2062 if (MO.isReg() && MO.isDef() && MO.getReg()) 2063 for (MCRegAliasIterator AI(MO.getReg(), TRI, true); AI.isValid(); ++AI) 2064 Regs.insert(*AI); 2065 } 2066 2067 /// This routine records the entry values of function parameters. The values 2068 /// could be used as backup values. If we loose the track of some unmodified 2069 /// parameters, the backup values will be used as a primary locations. 2070 void VarLocBasedLDV::recordEntryValue(const MachineInstr &MI, 2071 const DefinedRegsSet &DefinedRegs, 2072 OpenRangesSet &OpenRanges, 2073 VarLocMap &VarLocIDs) { 2074 if (TPC) { 2075 auto &TM = TPC->getTM<TargetMachine>(); 2076 if (!TM.Options.ShouldEmitDebugEntryValues()) 2077 return; 2078 } 2079 2080 DebugVariable V(MI.getDebugVariable(), MI.getDebugExpression(), 2081 MI.getDebugLoc()->getInlinedAt()); 2082 2083 if (!isEntryValueCandidate(MI, DefinedRegs) || 2084 OpenRanges.getEntryValueBackup(V)) 2085 return; 2086 2087 LLVM_DEBUG(dbgs() << "Creating the backup entry location: "; MI.dump();); 2088 2089 // Create the entry value and use it as a backup location until it is 2090 // valid. It is valid until a parameter is not changed. 2091 DIExpression *NewExpr = 2092 DIExpression::prepend(MI.getDebugExpression(), DIExpression::EntryValue); 2093 VarLoc EntryValLocAsBackup = VarLoc::CreateEntryBackupLoc(MI, LS, NewExpr); 2094 LocIndices EntryValLocIDs = VarLocIDs.insert(EntryValLocAsBackup); 2095 OpenRanges.insert(EntryValLocIDs, EntryValLocAsBackup); 2096 } 2097 2098 /// Calculate the liveness information for the given machine function and 2099 /// extend ranges across basic blocks. 2100 bool VarLocBasedLDV::ExtendRanges(MachineFunction &MF, 2101 MachineDominatorTree *DomTree, 2102 TargetPassConfig *TPC, unsigned InputBBLimit, 2103 unsigned InputDbgValLimit) { 2104 (void)DomTree; 2105 LLVM_DEBUG(dbgs() << "\nDebug Range Extension\n"); 2106 2107 if (!MF.getFunction().getSubprogram()) 2108 // VarLocBaseLDV will already have removed all DBG_VALUEs. 2109 return false; 2110 2111 // Skip functions from NoDebug compilation units. 2112 if (MF.getFunction().getSubprogram()->getUnit()->getEmissionKind() == 2113 DICompileUnit::NoDebug) 2114 return false; 2115 2116 TRI = MF.getSubtarget().getRegisterInfo(); 2117 TII = MF.getSubtarget().getInstrInfo(); 2118 TFI = MF.getSubtarget().getFrameLowering(); 2119 TFI->getCalleeSaves(MF, CalleeSavedRegs); 2120 this->TPC = TPC; 2121 LS.initialize(MF); 2122 2123 bool Changed = false; 2124 bool OLChanged = false; 2125 bool MBBJoined = false; 2126 2127 VarLocMap VarLocIDs; // Map VarLoc<>unique ID for use in bitvectors. 2128 OverlapMap OverlapFragments; // Map of overlapping variable fragments. 2129 OpenRangesSet OpenRanges(Alloc, OverlapFragments); 2130 // Ranges that are open until end of bb. 2131 VarLocInMBB OutLocs; // Ranges that exist beyond bb. 2132 VarLocInMBB InLocs; // Ranges that are incoming after joining. 2133 TransferMap Transfers; // DBG_VALUEs associated with transfers (such as 2134 // spills, copies and restores). 2135 // Map responsible MI to attached Transfer emitted from Backup Entry Value. 2136 InstToEntryLocMap EntryValTransfers; 2137 // Map a Register to the last MI which clobbered it. 2138 RegDefToInstMap RegSetInstrs; 2139 2140 VarToFragments SeenFragments; 2141 2142 // Blocks which are artificial, i.e. blocks which exclusively contain 2143 // instructions without locations, or with line 0 locations. 2144 SmallPtrSet<const MachineBasicBlock *, 16> ArtificialBlocks; 2145 2146 DenseMap<unsigned int, MachineBasicBlock *> OrderToBB; 2147 DenseMap<MachineBasicBlock *, unsigned int> BBToOrder; 2148 std::priority_queue<unsigned int, std::vector<unsigned int>, 2149 std::greater<unsigned int>> 2150 Worklist; 2151 std::priority_queue<unsigned int, std::vector<unsigned int>, 2152 std::greater<unsigned int>> 2153 Pending; 2154 2155 // Set of register defines that are seen when traversing the entry block 2156 // looking for debug entry value candidates. 2157 DefinedRegsSet DefinedRegs; 2158 2159 // Only in the case of entry MBB collect DBG_VALUEs representing 2160 // function parameters in order to generate debug entry values for them. 2161 MachineBasicBlock &First_MBB = *(MF.begin()); 2162 for (auto &MI : First_MBB) { 2163 collectRegDefs(MI, DefinedRegs, TRI); 2164 if (MI.isDebugValue()) 2165 recordEntryValue(MI, DefinedRegs, OpenRanges, VarLocIDs); 2166 } 2167 2168 // Initialize per-block structures and scan for fragment overlaps. 2169 for (auto &MBB : MF) 2170 for (auto &MI : MBB) 2171 if (MI.isDebugValue()) 2172 accumulateFragmentMap(MI, SeenFragments, OverlapFragments); 2173 2174 auto hasNonArtificialLocation = [](const MachineInstr &MI) -> bool { 2175 if (const DebugLoc &DL = MI.getDebugLoc()) 2176 return DL.getLine() != 0; 2177 return false; 2178 }; 2179 for (auto &MBB : MF) 2180 if (none_of(MBB.instrs(), hasNonArtificialLocation)) 2181 ArtificialBlocks.insert(&MBB); 2182 2183 LLVM_DEBUG(printVarLocInMBB(MF, OutLocs, VarLocIDs, 2184 "OutLocs after initialization", dbgs())); 2185 2186 ReversePostOrderTraversal<MachineFunction *> RPOT(&MF); 2187 unsigned int RPONumber = 0; 2188 for (MachineBasicBlock *MBB : RPOT) { 2189 OrderToBB[RPONumber] = MBB; 2190 BBToOrder[MBB] = RPONumber; 2191 Worklist.push(RPONumber); 2192 ++RPONumber; 2193 } 2194 2195 if (RPONumber > InputBBLimit) { 2196 unsigned NumInputDbgValues = 0; 2197 for (auto &MBB : MF) 2198 for (auto &MI : MBB) 2199 if (MI.isDebugValue()) 2200 ++NumInputDbgValues; 2201 if (NumInputDbgValues > InputDbgValLimit) { 2202 LLVM_DEBUG(dbgs() << "Disabling VarLocBasedLDV: " << MF.getName() 2203 << " has " << RPONumber << " basic blocks and " 2204 << NumInputDbgValues 2205 << " input DBG_VALUEs, exceeding limits.\n"); 2206 return false; 2207 } 2208 } 2209 2210 // This is a standard "union of predecessor outs" dataflow problem. 2211 // To solve it, we perform join() and process() using the two worklist method 2212 // until the ranges converge. 2213 // Ranges have converged when both worklists are empty. 2214 SmallPtrSet<const MachineBasicBlock *, 16> Visited; 2215 while (!Worklist.empty() || !Pending.empty()) { 2216 // We track what is on the pending worklist to avoid inserting the same 2217 // thing twice. We could avoid this with a custom priority queue, but this 2218 // is probably not worth it. 2219 SmallPtrSet<MachineBasicBlock *, 16> OnPending; 2220 LLVM_DEBUG(dbgs() << "Processing Worklist\n"); 2221 while (!Worklist.empty()) { 2222 MachineBasicBlock *MBB = OrderToBB[Worklist.top()]; 2223 Worklist.pop(); 2224 MBBJoined = join(*MBB, OutLocs, InLocs, VarLocIDs, Visited, 2225 ArtificialBlocks); 2226 MBBJoined |= Visited.insert(MBB).second; 2227 if (MBBJoined) { 2228 MBBJoined = false; 2229 Changed = true; 2230 // Now that we have started to extend ranges across BBs we need to 2231 // examine spill, copy and restore instructions to see whether they 2232 // operate with registers that correspond to user variables. 2233 // First load any pending inlocs. 2234 OpenRanges.insertFromLocSet(getVarLocsInMBB(MBB, InLocs), VarLocIDs); 2235 LastNonDbgMI = nullptr; 2236 RegSetInstrs.clear(); 2237 for (auto &MI : *MBB) 2238 process(MI, OpenRanges, VarLocIDs, Transfers, EntryValTransfers, 2239 RegSetInstrs); 2240 OLChanged |= transferTerminator(MBB, OpenRanges, OutLocs, VarLocIDs); 2241 2242 LLVM_DEBUG(printVarLocInMBB(MF, OutLocs, VarLocIDs, 2243 "OutLocs after propagating", dbgs())); 2244 LLVM_DEBUG(printVarLocInMBB(MF, InLocs, VarLocIDs, 2245 "InLocs after propagating", dbgs())); 2246 2247 if (OLChanged) { 2248 OLChanged = false; 2249 for (auto *s : MBB->successors()) 2250 if (OnPending.insert(s).second) { 2251 Pending.push(BBToOrder[s]); 2252 } 2253 } 2254 } 2255 } 2256 Worklist.swap(Pending); 2257 // At this point, pending must be empty, since it was just the empty 2258 // worklist 2259 assert(Pending.empty() && "Pending should be empty"); 2260 } 2261 2262 // Add any DBG_VALUE instructions created by location transfers. 2263 for (auto &TR : Transfers) { 2264 assert(!TR.TransferInst->isTerminator() && 2265 "Cannot insert DBG_VALUE after terminator"); 2266 MachineBasicBlock *MBB = TR.TransferInst->getParent(); 2267 const VarLoc &VL = VarLocIDs[TR.LocationID]; 2268 MachineInstr *MI = VL.BuildDbgValue(MF); 2269 MBB->insertAfterBundle(TR.TransferInst->getIterator(), MI); 2270 } 2271 Transfers.clear(); 2272 2273 // Add DBG_VALUEs created using Backup Entry Value location. 2274 for (auto &TR : EntryValTransfers) { 2275 MachineInstr *TRInst = const_cast<MachineInstr *>(TR.first); 2276 assert(!TRInst->isTerminator() && 2277 "Cannot insert DBG_VALUE after terminator"); 2278 MachineBasicBlock *MBB = TRInst->getParent(); 2279 const VarLoc &VL = VarLocIDs[TR.second]; 2280 MachineInstr *MI = VL.BuildDbgValue(MF); 2281 MBB->insertAfterBundle(TRInst->getIterator(), MI); 2282 } 2283 EntryValTransfers.clear(); 2284 2285 // Deferred inlocs will not have had any DBG_VALUE insts created; do 2286 // that now. 2287 flushPendingLocs(InLocs, VarLocIDs); 2288 2289 LLVM_DEBUG(printVarLocInMBB(MF, OutLocs, VarLocIDs, "Final OutLocs", dbgs())); 2290 LLVM_DEBUG(printVarLocInMBB(MF, InLocs, VarLocIDs, "Final InLocs", dbgs())); 2291 return Changed; 2292 } 2293 2294 LDVImpl * 2295 llvm::makeVarLocBasedLiveDebugValues() 2296 { 2297 return new VarLocBasedLDV(); 2298 } 2299