1 //===-- Value.cpp - Implement the Value class -----------------------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file implements the Value, ValueHandle, and User classes. 10 // 11 //===----------------------------------------------------------------------===// 12 13 #include "llvm/IR/Value.h" 14 #include "LLVMContextImpl.h" 15 #include "llvm/ADT/DenseMap.h" 16 #include "llvm/ADT/SmallString.h" 17 #include "llvm/IR/Constant.h" 18 #include "llvm/IR/Constants.h" 19 #include "llvm/IR/DataLayout.h" 20 #include "llvm/IR/DebugInfo.h" 21 #include "llvm/IR/DerivedTypes.h" 22 #include "llvm/IR/DerivedUser.h" 23 #include "llvm/IR/InstrTypes.h" 24 #include "llvm/IR/Instructions.h" 25 #include "llvm/IR/IntrinsicInst.h" 26 #include "llvm/IR/Module.h" 27 #include "llvm/IR/Operator.h" 28 #include "llvm/IR/TypedPointerType.h" 29 #include "llvm/IR/ValueHandle.h" 30 #include "llvm/IR/ValueSymbolTable.h" 31 #include "llvm/Support/CommandLine.h" 32 #include "llvm/Support/ErrorHandling.h" 33 #include "llvm/Support/raw_ostream.h" 34 #include <algorithm> 35 36 using namespace llvm; 37 38 static cl::opt<unsigned> UseDerefAtPointSemantics( 39 "use-dereferenceable-at-point-semantics", cl::Hidden, cl::init(false), 40 cl::desc("Deref attributes and metadata infer facts at definition only")); 41 42 //===----------------------------------------------------------------------===// 43 // Value Class 44 //===----------------------------------------------------------------------===// 45 static inline Type *checkType(Type *Ty) { 46 assert(Ty && "Value defined with a null type: Error!"); 47 assert(!isa<TypedPointerType>(Ty->getScalarType()) && 48 "Cannot have values with typed pointer types"); 49 return Ty; 50 } 51 52 Value::Value(Type *ty, unsigned scid) 53 : VTy(checkType(ty)), UseList(nullptr), SubclassID(scid), HasValueHandle(0), 54 SubclassOptionalData(0), SubclassData(0), NumUserOperands(0), 55 IsUsedByMD(false), HasName(false), HasMetadata(false) { 56 static_assert(ConstantFirstVal == 0, "!(SubclassID < ConstantFirstVal)"); 57 // FIXME: Why isn't this in the subclass gunk?? 58 // Note, we cannot call isa<CallInst> before the CallInst has been 59 // constructed. 60 unsigned OpCode = 0; 61 if (SubclassID >= InstructionVal) 62 OpCode = SubclassID - InstructionVal; 63 if (OpCode == Instruction::Call || OpCode == Instruction::Invoke || 64 OpCode == Instruction::CallBr) 65 assert((VTy->isFirstClassType() || VTy->isVoidTy() || VTy->isStructTy()) && 66 "invalid CallBase type!"); 67 else if (SubclassID != BasicBlockVal && 68 (/*SubclassID < ConstantFirstVal ||*/ SubclassID > ConstantLastVal)) 69 assert((VTy->isFirstClassType() || VTy->isVoidTy()) && 70 "Cannot create non-first-class values except for constants!"); 71 static_assert(sizeof(Value) == 2 * sizeof(void *) + 2 * sizeof(unsigned), 72 "Value too big"); 73 } 74 75 Value::~Value() { 76 // Notify all ValueHandles (if present) that this value is going away. 77 if (HasValueHandle) 78 ValueHandleBase::ValueIsDeleted(this); 79 if (isUsedByMetadata()) 80 ValueAsMetadata::handleDeletion(this); 81 82 // Remove associated metadata from context. 83 if (HasMetadata) 84 clearMetadata(); 85 86 #ifndef NDEBUG // Only in -g mode... 87 // Check to make sure that there are no uses of this value that are still 88 // around when the value is destroyed. If there are, then we have a dangling 89 // reference and something is wrong. This code is here to print out where 90 // the value is still being referenced. 91 // 92 // Note that use_empty() cannot be called here, as it eventually downcasts 93 // 'this' to GlobalValue (derived class of Value), but GlobalValue has already 94 // been destructed, so accessing it is UB. 95 // 96 if (!materialized_use_empty()) { 97 dbgs() << "While deleting: " << *VTy << " %" << getName() << "\n"; 98 for (auto *U : users()) 99 dbgs() << "Use still stuck around after Def is destroyed:" << *U << "\n"; 100 } 101 #endif 102 assert(materialized_use_empty() && "Uses remain when a value is destroyed!"); 103 104 // If this value is named, destroy the name. This should not be in a symtab 105 // at this point. 106 destroyValueName(); 107 } 108 109 void Value::deleteValue() { 110 switch (getValueID()) { 111 #define HANDLE_VALUE(Name) \ 112 case Value::Name##Val: \ 113 delete static_cast<Name *>(this); \ 114 break; 115 #define HANDLE_MEMORY_VALUE(Name) \ 116 case Value::Name##Val: \ 117 static_cast<DerivedUser *>(this)->DeleteValue( \ 118 static_cast<DerivedUser *>(this)); \ 119 break; 120 #define HANDLE_CONSTANT(Name) \ 121 case Value::Name##Val: \ 122 llvm_unreachable("constants should be destroyed with destroyConstant"); \ 123 break; 124 #define HANDLE_INSTRUCTION(Name) /* nothing */ 125 #include "llvm/IR/Value.def" 126 127 #define HANDLE_INST(N, OPC, CLASS) \ 128 case Value::InstructionVal + Instruction::OPC: \ 129 delete static_cast<CLASS *>(this); \ 130 break; 131 #define HANDLE_USER_INST(N, OPC, CLASS) 132 #include "llvm/IR/Instruction.def" 133 134 default: 135 llvm_unreachable("attempting to delete unknown value kind"); 136 } 137 } 138 139 void Value::destroyValueName() { 140 ValueName *Name = getValueName(); 141 if (Name) { 142 MallocAllocator Allocator; 143 Name->Destroy(Allocator); 144 } 145 setValueName(nullptr); 146 } 147 148 bool Value::hasNUses(unsigned N) const { 149 return hasNItems(use_begin(), use_end(), N); 150 } 151 152 bool Value::hasNUsesOrMore(unsigned N) const { 153 return hasNItemsOrMore(use_begin(), use_end(), N); 154 } 155 156 bool Value::hasOneUser() const { 157 if (use_empty()) 158 return false; 159 if (hasOneUse()) 160 return true; 161 return std::equal(++user_begin(), user_end(), user_begin()); 162 } 163 164 static bool isUnDroppableUser(const User *U) { return !U->isDroppable(); } 165 166 Use *Value::getSingleUndroppableUse() { 167 Use *Result = nullptr; 168 for (Use &U : uses()) { 169 if (!U.getUser()->isDroppable()) { 170 if (Result) 171 return nullptr; 172 Result = &U; 173 } 174 } 175 return Result; 176 } 177 178 User *Value::getUniqueUndroppableUser() { 179 User *Result = nullptr; 180 for (auto *U : users()) { 181 if (!U->isDroppable()) { 182 if (Result && Result != U) 183 return nullptr; 184 Result = U; 185 } 186 } 187 return Result; 188 } 189 190 bool Value::hasNUndroppableUses(unsigned int N) const { 191 return hasNItems(user_begin(), user_end(), N, isUnDroppableUser); 192 } 193 194 bool Value::hasNUndroppableUsesOrMore(unsigned int N) const { 195 return hasNItemsOrMore(user_begin(), user_end(), N, isUnDroppableUser); 196 } 197 198 void Value::dropDroppableUses( 199 llvm::function_ref<bool(const Use *)> ShouldDrop) { 200 SmallVector<Use *, 8> ToBeEdited; 201 for (Use &U : uses()) 202 if (U.getUser()->isDroppable() && ShouldDrop(&U)) 203 ToBeEdited.push_back(&U); 204 for (Use *U : ToBeEdited) 205 dropDroppableUse(*U); 206 } 207 208 void Value::dropDroppableUsesIn(User &Usr) { 209 assert(Usr.isDroppable() && "Expected a droppable user!"); 210 for (Use &UsrOp : Usr.operands()) { 211 if (UsrOp.get() == this) 212 dropDroppableUse(UsrOp); 213 } 214 } 215 216 void Value::dropDroppableUse(Use &U) { 217 U.removeFromList(); 218 if (auto *Assume = dyn_cast<AssumeInst>(U.getUser())) { 219 unsigned OpNo = U.getOperandNo(); 220 if (OpNo == 0) 221 U.set(ConstantInt::getTrue(Assume->getContext())); 222 else { 223 U.set(UndefValue::get(U.get()->getType())); 224 CallInst::BundleOpInfo &BOI = Assume->getBundleOpInfoForOperand(OpNo); 225 BOI.Tag = Assume->getContext().pImpl->getOrInsertBundleTag("ignore"); 226 } 227 return; 228 } 229 230 llvm_unreachable("unkown droppable use"); 231 } 232 233 bool Value::isUsedInBasicBlock(const BasicBlock *BB) const { 234 // This can be computed either by scanning the instructions in BB, or by 235 // scanning the use list of this Value. Both lists can be very long, but 236 // usually one is quite short. 237 // 238 // Scan both lists simultaneously until one is exhausted. This limits the 239 // search to the shorter list. 240 BasicBlock::const_iterator BI = BB->begin(), BE = BB->end(); 241 const_user_iterator UI = user_begin(), UE = user_end(); 242 for (; BI != BE && UI != UE; ++BI, ++UI) { 243 // Scan basic block: Check if this Value is used by the instruction at BI. 244 if (is_contained(BI->operands(), this)) 245 return true; 246 // Scan use list: Check if the use at UI is in BB. 247 const auto *User = dyn_cast<Instruction>(*UI); 248 if (User && User->getParent() == BB) 249 return true; 250 } 251 return false; 252 } 253 254 unsigned Value::getNumUses() const { 255 return (unsigned)std::distance(use_begin(), use_end()); 256 } 257 258 static bool getSymTab(Value *V, ValueSymbolTable *&ST) { 259 ST = nullptr; 260 if (Instruction *I = dyn_cast<Instruction>(V)) { 261 if (BasicBlock *P = I->getParent()) 262 if (Function *PP = P->getParent()) 263 ST = PP->getValueSymbolTable(); 264 } else if (BasicBlock *BB = dyn_cast<BasicBlock>(V)) { 265 if (Function *P = BB->getParent()) 266 ST = P->getValueSymbolTable(); 267 } else if (GlobalValue *GV = dyn_cast<GlobalValue>(V)) { 268 if (Module *P = GV->getParent()) 269 ST = &P->getValueSymbolTable(); 270 } else if (Argument *A = dyn_cast<Argument>(V)) { 271 if (Function *P = A->getParent()) 272 ST = P->getValueSymbolTable(); 273 } else { 274 assert(isa<Constant>(V) && "Unknown value type!"); 275 return true; // no name is setable for this. 276 } 277 return false; 278 } 279 280 ValueName *Value::getValueName() const { 281 if (!HasName) return nullptr; 282 283 LLVMContext &Ctx = getContext(); 284 auto I = Ctx.pImpl->ValueNames.find(this); 285 assert(I != Ctx.pImpl->ValueNames.end() && 286 "No name entry found!"); 287 288 return I->second; 289 } 290 291 void Value::setValueName(ValueName *VN) { 292 LLVMContext &Ctx = getContext(); 293 294 assert(HasName == Ctx.pImpl->ValueNames.count(this) && 295 "HasName bit out of sync!"); 296 297 if (!VN) { 298 if (HasName) 299 Ctx.pImpl->ValueNames.erase(this); 300 HasName = false; 301 return; 302 } 303 304 HasName = true; 305 Ctx.pImpl->ValueNames[this] = VN; 306 } 307 308 StringRef Value::getName() const { 309 // Make sure the empty string is still a C string. For historical reasons, 310 // some clients want to call .data() on the result and expect it to be null 311 // terminated. 312 if (!hasName()) 313 return StringRef("", 0); 314 return getValueName()->getKey(); 315 } 316 317 void Value::setNameImpl(const Twine &NewName) { 318 // Fast-path: LLVMContext can be set to strip out non-GlobalValue names 319 if (getContext().shouldDiscardValueNames() && !isa<GlobalValue>(this)) 320 return; 321 322 // Fast path for common IRBuilder case of setName("") when there is no name. 323 if (NewName.isTriviallyEmpty() && !hasName()) 324 return; 325 326 SmallString<256> NameData; 327 StringRef NameRef = NewName.toStringRef(NameData); 328 assert(NameRef.find_first_of(0) == StringRef::npos && 329 "Null bytes are not allowed in names"); 330 331 // Name isn't changing? 332 if (getName() == NameRef) 333 return; 334 335 assert(!getType()->isVoidTy() && "Cannot assign a name to void values!"); 336 337 // Get the symbol table to update for this object. 338 ValueSymbolTable *ST; 339 if (getSymTab(this, ST)) 340 return; // Cannot set a name on this value (e.g. constant). 341 342 if (!ST) { // No symbol table to update? Just do the change. 343 if (NameRef.empty()) { 344 // Free the name for this value. 345 destroyValueName(); 346 return; 347 } 348 349 // NOTE: Could optimize for the case the name is shrinking to not deallocate 350 // then reallocated. 351 destroyValueName(); 352 353 // Create the new name. 354 MallocAllocator Allocator; 355 setValueName(ValueName::create(NameRef, Allocator)); 356 getValueName()->setValue(this); 357 return; 358 } 359 360 // NOTE: Could optimize for the case the name is shrinking to not deallocate 361 // then reallocated. 362 if (hasName()) { 363 // Remove old name. 364 ST->removeValueName(getValueName()); 365 destroyValueName(); 366 367 if (NameRef.empty()) 368 return; 369 } 370 371 // Name is changing to something new. 372 setValueName(ST->createValueName(NameRef, this)); 373 } 374 375 void Value::setName(const Twine &NewName) { 376 setNameImpl(NewName); 377 if (Function *F = dyn_cast<Function>(this)) 378 F->recalculateIntrinsicID(); 379 } 380 381 void Value::takeName(Value *V) { 382 assert(V != this && "Illegal call to this->takeName(this)!"); 383 ValueSymbolTable *ST = nullptr; 384 // If this value has a name, drop it. 385 if (hasName()) { 386 // Get the symtab this is in. 387 if (getSymTab(this, ST)) { 388 // We can't set a name on this value, but we need to clear V's name if 389 // it has one. 390 if (V->hasName()) V->setName(""); 391 return; // Cannot set a name on this value (e.g. constant). 392 } 393 394 // Remove old name. 395 if (ST) 396 ST->removeValueName(getValueName()); 397 destroyValueName(); 398 } 399 400 // Now we know that this has no name. 401 402 // If V has no name either, we're done. 403 if (!V->hasName()) return; 404 405 // Get this's symtab if we didn't before. 406 if (!ST) { 407 if (getSymTab(this, ST)) { 408 // Clear V's name. 409 V->setName(""); 410 return; // Cannot set a name on this value (e.g. constant). 411 } 412 } 413 414 // Get V's ST, this should always succeed, because V has a name. 415 ValueSymbolTable *VST; 416 bool Failure = getSymTab(V, VST); 417 assert(!Failure && "V has a name, so it should have a ST!"); (void)Failure; 418 419 // If these values are both in the same symtab, we can do this very fast. 420 // This works even if both values have no symtab yet. 421 if (ST == VST) { 422 // Take the name! 423 setValueName(V->getValueName()); 424 V->setValueName(nullptr); 425 getValueName()->setValue(this); 426 return; 427 } 428 429 // Otherwise, things are slightly more complex. Remove V's name from VST and 430 // then reinsert it into ST. 431 432 if (VST) 433 VST->removeValueName(V->getValueName()); 434 setValueName(V->getValueName()); 435 V->setValueName(nullptr); 436 getValueName()->setValue(this); 437 438 if (ST) 439 ST->reinsertValue(this); 440 } 441 442 #ifndef NDEBUG 443 std::string Value::getNameOrAsOperand() const { 444 if (!getName().empty()) 445 return std::string(getName()); 446 447 std::string BBName; 448 raw_string_ostream OS(BBName); 449 printAsOperand(OS, false); 450 return OS.str(); 451 } 452 #endif 453 454 void Value::assertModuleIsMaterializedImpl() const { 455 #ifndef NDEBUG 456 const GlobalValue *GV = dyn_cast<GlobalValue>(this); 457 if (!GV) 458 return; 459 const Module *M = GV->getParent(); 460 if (!M) 461 return; 462 assert(M->isMaterialized()); 463 #endif 464 } 465 466 #ifndef NDEBUG 467 static bool contains(SmallPtrSetImpl<ConstantExpr *> &Cache, ConstantExpr *Expr, 468 Constant *C) { 469 if (!Cache.insert(Expr).second) 470 return false; 471 472 for (auto &O : Expr->operands()) { 473 if (O == C) 474 return true; 475 auto *CE = dyn_cast<ConstantExpr>(O); 476 if (!CE) 477 continue; 478 if (contains(Cache, CE, C)) 479 return true; 480 } 481 return false; 482 } 483 484 static bool contains(Value *Expr, Value *V) { 485 if (Expr == V) 486 return true; 487 488 auto *C = dyn_cast<Constant>(V); 489 if (!C) 490 return false; 491 492 auto *CE = dyn_cast<ConstantExpr>(Expr); 493 if (!CE) 494 return false; 495 496 SmallPtrSet<ConstantExpr *, 4> Cache; 497 return contains(Cache, CE, C); 498 } 499 #endif // NDEBUG 500 501 void Value::doRAUW(Value *New, ReplaceMetadataUses ReplaceMetaUses) { 502 assert(New && "Value::replaceAllUsesWith(<null>) is invalid!"); 503 assert(!contains(New, this) && 504 "this->replaceAllUsesWith(expr(this)) is NOT valid!"); 505 assert(New->getType() == getType() && 506 "replaceAllUses of value with new value of different type!"); 507 508 // Notify all ValueHandles (if present) that this value is going away. 509 if (HasValueHandle) 510 ValueHandleBase::ValueIsRAUWd(this, New); 511 if (ReplaceMetaUses == ReplaceMetadataUses::Yes && isUsedByMetadata()) 512 ValueAsMetadata::handleRAUW(this, New); 513 514 while (!materialized_use_empty()) { 515 Use &U = *UseList; 516 // Must handle Constants specially, we cannot call replaceUsesOfWith on a 517 // constant because they are uniqued. 518 if (auto *C = dyn_cast<Constant>(U.getUser())) { 519 if (!isa<GlobalValue>(C)) { 520 C->handleOperandChange(this, New); 521 continue; 522 } 523 } 524 525 U.set(New); 526 } 527 528 if (BasicBlock *BB = dyn_cast<BasicBlock>(this)) 529 BB->replaceSuccessorsPhiUsesWith(cast<BasicBlock>(New)); 530 } 531 532 void Value::replaceAllUsesWith(Value *New) { 533 doRAUW(New, ReplaceMetadataUses::Yes); 534 } 535 536 void Value::replaceNonMetadataUsesWith(Value *New) { 537 doRAUW(New, ReplaceMetadataUses::No); 538 } 539 540 void Value::replaceUsesWithIf(Value *New, 541 llvm::function_ref<bool(Use &U)> ShouldReplace) { 542 assert(New && "Value::replaceUsesWithIf(<null>) is invalid!"); 543 assert(New->getType() == getType() && 544 "replaceUses of value with new value of different type!"); 545 546 SmallVector<TrackingVH<Constant>, 8> Consts; 547 SmallPtrSet<Constant *, 8> Visited; 548 549 for (Use &U : llvm::make_early_inc_range(uses())) { 550 if (!ShouldReplace(U)) 551 continue; 552 // Must handle Constants specially, we cannot call replaceUsesOfWith on a 553 // constant because they are uniqued. 554 if (auto *C = dyn_cast<Constant>(U.getUser())) { 555 if (!isa<GlobalValue>(C)) { 556 if (Visited.insert(C).second) 557 Consts.push_back(TrackingVH<Constant>(C)); 558 continue; 559 } 560 } 561 U.set(New); 562 } 563 564 while (!Consts.empty()) { 565 // FIXME: handleOperandChange() updates all the uses in a given Constant, 566 // not just the one passed to ShouldReplace 567 Consts.pop_back_val()->handleOperandChange(this, New); 568 } 569 } 570 571 /// Replace llvm.dbg.* uses of MetadataAsValue(ValueAsMetadata(V)) outside BB 572 /// with New. 573 static void replaceDbgUsesOutsideBlock(Value *V, Value *New, BasicBlock *BB) { 574 SmallVector<DbgVariableIntrinsic *> DbgUsers; 575 findDbgUsers(DbgUsers, V); 576 for (auto *DVI : DbgUsers) { 577 if (DVI->getParent() != BB) 578 DVI->replaceVariableLocationOp(V, New); 579 } 580 } 581 582 // Like replaceAllUsesWith except it does not handle constants or basic blocks. 583 // This routine leaves uses within BB. 584 void Value::replaceUsesOutsideBlock(Value *New, BasicBlock *BB) { 585 assert(New && "Value::replaceUsesOutsideBlock(<null>, BB) is invalid!"); 586 assert(!contains(New, this) && 587 "this->replaceUsesOutsideBlock(expr(this), BB) is NOT valid!"); 588 assert(New->getType() == getType() && 589 "replaceUses of value with new value of different type!"); 590 assert(BB && "Basic block that may contain a use of 'New' must be defined\n"); 591 592 replaceDbgUsesOutsideBlock(this, New, BB); 593 replaceUsesWithIf(New, [BB](Use &U) { 594 auto *I = dyn_cast<Instruction>(U.getUser()); 595 // Don't replace if it's an instruction in the BB basic block. 596 return !I || I->getParent() != BB; 597 }); 598 } 599 600 namespace { 601 // Various metrics for how much to strip off of pointers. 602 enum PointerStripKind { 603 PSK_ZeroIndices, 604 PSK_ZeroIndicesAndAliases, 605 PSK_ZeroIndicesSameRepresentation, 606 PSK_ForAliasAnalysis, 607 PSK_InBoundsConstantIndices, 608 PSK_InBounds 609 }; 610 611 template <PointerStripKind StripKind> static void NoopCallback(const Value *) {} 612 613 template <PointerStripKind StripKind> 614 static const Value *stripPointerCastsAndOffsets( 615 const Value *V, 616 function_ref<void(const Value *)> Func = NoopCallback<StripKind>) { 617 if (!V->getType()->isPointerTy()) 618 return V; 619 620 // Even though we don't look through PHI nodes, we could be called on an 621 // instruction in an unreachable block, which may be on a cycle. 622 SmallPtrSet<const Value *, 4> Visited; 623 624 Visited.insert(V); 625 do { 626 Func(V); 627 if (auto *GEP = dyn_cast<GEPOperator>(V)) { 628 switch (StripKind) { 629 case PSK_ZeroIndices: 630 case PSK_ZeroIndicesAndAliases: 631 case PSK_ZeroIndicesSameRepresentation: 632 case PSK_ForAliasAnalysis: 633 if (!GEP->hasAllZeroIndices()) 634 return V; 635 break; 636 case PSK_InBoundsConstantIndices: 637 if (!GEP->hasAllConstantIndices()) 638 return V; 639 [[fallthrough]]; 640 case PSK_InBounds: 641 if (!GEP->isInBounds()) 642 return V; 643 break; 644 } 645 V = GEP->getPointerOperand(); 646 } else if (Operator::getOpcode(V) == Instruction::BitCast) { 647 V = cast<Operator>(V)->getOperand(0); 648 if (!V->getType()->isPointerTy()) 649 return V; 650 } else if (StripKind != PSK_ZeroIndicesSameRepresentation && 651 Operator::getOpcode(V) == Instruction::AddrSpaceCast) { 652 // TODO: If we know an address space cast will not change the 653 // representation we could look through it here as well. 654 V = cast<Operator>(V)->getOperand(0); 655 } else if (StripKind == PSK_ZeroIndicesAndAliases && isa<GlobalAlias>(V)) { 656 V = cast<GlobalAlias>(V)->getAliasee(); 657 } else if (StripKind == PSK_ForAliasAnalysis && isa<PHINode>(V) && 658 cast<PHINode>(V)->getNumIncomingValues() == 1) { 659 V = cast<PHINode>(V)->getIncomingValue(0); 660 } else { 661 if (const auto *Call = dyn_cast<CallBase>(V)) { 662 if (const Value *RV = Call->getReturnedArgOperand()) { 663 V = RV; 664 continue; 665 } 666 // The result of launder.invariant.group must alias it's argument, 667 // but it can't be marked with returned attribute, that's why it needs 668 // special case. 669 if (StripKind == PSK_ForAliasAnalysis && 670 (Call->getIntrinsicID() == Intrinsic::launder_invariant_group || 671 Call->getIntrinsicID() == Intrinsic::strip_invariant_group)) { 672 V = Call->getArgOperand(0); 673 continue; 674 } 675 } 676 return V; 677 } 678 assert(V->getType()->isPointerTy() && "Unexpected operand type!"); 679 } while (Visited.insert(V).second); 680 681 return V; 682 } 683 } // end anonymous namespace 684 685 const Value *Value::stripPointerCasts() const { 686 return stripPointerCastsAndOffsets<PSK_ZeroIndices>(this); 687 } 688 689 const Value *Value::stripPointerCastsAndAliases() const { 690 return stripPointerCastsAndOffsets<PSK_ZeroIndicesAndAliases>(this); 691 } 692 693 const Value *Value::stripPointerCastsSameRepresentation() const { 694 return stripPointerCastsAndOffsets<PSK_ZeroIndicesSameRepresentation>(this); 695 } 696 697 const Value *Value::stripInBoundsConstantOffsets() const { 698 return stripPointerCastsAndOffsets<PSK_InBoundsConstantIndices>(this); 699 } 700 701 const Value *Value::stripPointerCastsForAliasAnalysis() const { 702 return stripPointerCastsAndOffsets<PSK_ForAliasAnalysis>(this); 703 } 704 705 const Value *Value::stripAndAccumulateConstantOffsets( 706 const DataLayout &DL, APInt &Offset, bool AllowNonInbounds, 707 bool AllowInvariantGroup, 708 function_ref<bool(Value &, APInt &)> ExternalAnalysis) const { 709 if (!getType()->isPtrOrPtrVectorTy()) 710 return this; 711 712 unsigned BitWidth = Offset.getBitWidth(); 713 assert(BitWidth == DL.getIndexTypeSizeInBits(getType()) && 714 "The offset bit width does not match the DL specification."); 715 716 // Even though we don't look through PHI nodes, we could be called on an 717 // instruction in an unreachable block, which may be on a cycle. 718 SmallPtrSet<const Value *, 4> Visited; 719 Visited.insert(this); 720 const Value *V = this; 721 do { 722 if (auto *GEP = dyn_cast<GEPOperator>(V)) { 723 // If in-bounds was requested, we do not strip non-in-bounds GEPs. 724 if (!AllowNonInbounds && !GEP->isInBounds()) 725 return V; 726 727 // If one of the values we have visited is an addrspacecast, then 728 // the pointer type of this GEP may be different from the type 729 // of the Ptr parameter which was passed to this function. This 730 // means when we construct GEPOffset, we need to use the size 731 // of GEP's pointer type rather than the size of the original 732 // pointer type. 733 APInt GEPOffset(DL.getIndexTypeSizeInBits(V->getType()), 0); 734 if (!GEP->accumulateConstantOffset(DL, GEPOffset, ExternalAnalysis)) 735 return V; 736 737 // Stop traversal if the pointer offset wouldn't fit in the bit-width 738 // provided by the Offset argument. This can happen due to AddrSpaceCast 739 // stripping. 740 if (GEPOffset.getMinSignedBits() > BitWidth) 741 return V; 742 743 // External Analysis can return a result higher/lower than the value 744 // represents. We need to detect overflow/underflow. 745 APInt GEPOffsetST = GEPOffset.sextOrTrunc(BitWidth); 746 if (!ExternalAnalysis) { 747 Offset += GEPOffsetST; 748 } else { 749 bool Overflow = false; 750 APInt OldOffset = Offset; 751 Offset = Offset.sadd_ov(GEPOffsetST, Overflow); 752 if (Overflow) { 753 Offset = OldOffset; 754 return V; 755 } 756 } 757 V = GEP->getPointerOperand(); 758 } else if (Operator::getOpcode(V) == Instruction::BitCast || 759 Operator::getOpcode(V) == Instruction::AddrSpaceCast) { 760 V = cast<Operator>(V)->getOperand(0); 761 } else if (auto *GA = dyn_cast<GlobalAlias>(V)) { 762 if (!GA->isInterposable()) 763 V = GA->getAliasee(); 764 } else if (const auto *Call = dyn_cast<CallBase>(V)) { 765 if (const Value *RV = Call->getReturnedArgOperand()) 766 V = RV; 767 if (AllowInvariantGroup && Call->isLaunderOrStripInvariantGroup()) 768 V = Call->getArgOperand(0); 769 } 770 assert(V->getType()->isPtrOrPtrVectorTy() && "Unexpected operand type!"); 771 } while (Visited.insert(V).second); 772 773 return V; 774 } 775 776 const Value * 777 Value::stripInBoundsOffsets(function_ref<void(const Value *)> Func) const { 778 return stripPointerCastsAndOffsets<PSK_InBounds>(this, Func); 779 } 780 781 bool Value::canBeFreed() const { 782 assert(getType()->isPointerTy()); 783 784 // Cases that can simply never be deallocated 785 // *) Constants aren't allocated per se, thus not deallocated either. 786 if (isa<Constant>(this)) 787 return false; 788 789 // Handle byval/byref/sret/inalloca/preallocated arguments. The storage 790 // lifetime is guaranteed to be longer than the callee's lifetime. 791 if (auto *A = dyn_cast<Argument>(this)) { 792 if (A->hasPointeeInMemoryValueAttr()) 793 return false; 794 // A pointer to an object in a function which neither frees, nor can arrange 795 // for another thread to free on its behalf, can not be freed in the scope 796 // of the function. Note that this logic is restricted to memory 797 // allocations in existance before the call; a nofree function *is* allowed 798 // to free memory it allocated. 799 const Function *F = A->getParent(); 800 if (F->doesNotFreeMemory() && F->hasNoSync()) 801 return false; 802 } 803 804 const Function *F = nullptr; 805 if (auto *I = dyn_cast<Instruction>(this)) 806 F = I->getFunction(); 807 if (auto *A = dyn_cast<Argument>(this)) 808 F = A->getParent(); 809 810 if (!F) 811 return true; 812 813 // With garbage collection, deallocation typically occurs solely at or after 814 // safepoints. If we're compiling for a collector which uses the 815 // gc.statepoint infrastructure, safepoints aren't explicitly present 816 // in the IR until after lowering from abstract to physical machine model. 817 // The collector could chose to mix explicit deallocation and gc'd objects 818 // which is why we need the explicit opt in on a per collector basis. 819 if (!F->hasGC()) 820 return true; 821 822 const auto &GCName = F->getGC(); 823 if (GCName == "statepoint-example") { 824 auto *PT = cast<PointerType>(this->getType()); 825 if (PT->getAddressSpace() != 1) 826 // For the sake of this example GC, we arbitrarily pick addrspace(1) as 827 // our GC managed heap. This must match the same check in 828 // RewriteStatepointsForGC (and probably needs better factored.) 829 return true; 830 831 // It is cheaper to scan for a declaration than to scan for a use in this 832 // function. Note that gc.statepoint is a type overloaded function so the 833 // usual trick of requesting declaration of the intrinsic from the module 834 // doesn't work. 835 for (auto &Fn : *F->getParent()) 836 if (Fn.getIntrinsicID() == Intrinsic::experimental_gc_statepoint) 837 return true; 838 return false; 839 } 840 return true; 841 } 842 843 uint64_t Value::getPointerDereferenceableBytes(const DataLayout &DL, 844 bool &CanBeNull, 845 bool &CanBeFreed) const { 846 assert(getType()->isPointerTy() && "must be pointer"); 847 848 uint64_t DerefBytes = 0; 849 CanBeNull = false; 850 CanBeFreed = UseDerefAtPointSemantics && canBeFreed(); 851 if (const Argument *A = dyn_cast<Argument>(this)) { 852 DerefBytes = A->getDereferenceableBytes(); 853 if (DerefBytes == 0) { 854 // Handle byval/byref/inalloca/preallocated arguments 855 if (Type *ArgMemTy = A->getPointeeInMemoryValueType()) { 856 if (ArgMemTy->isSized()) { 857 // FIXME: Why isn't this the type alloc size? 858 DerefBytes = DL.getTypeStoreSize(ArgMemTy).getKnownMinValue(); 859 } 860 } 861 } 862 863 if (DerefBytes == 0) { 864 DerefBytes = A->getDereferenceableOrNullBytes(); 865 CanBeNull = true; 866 } 867 } else if (const auto *Call = dyn_cast<CallBase>(this)) { 868 DerefBytes = Call->getRetDereferenceableBytes(); 869 if (DerefBytes == 0) { 870 DerefBytes = Call->getRetDereferenceableOrNullBytes(); 871 CanBeNull = true; 872 } 873 } else if (const LoadInst *LI = dyn_cast<LoadInst>(this)) { 874 if (MDNode *MD = LI->getMetadata(LLVMContext::MD_dereferenceable)) { 875 ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0)); 876 DerefBytes = CI->getLimitedValue(); 877 } 878 if (DerefBytes == 0) { 879 if (MDNode *MD = 880 LI->getMetadata(LLVMContext::MD_dereferenceable_or_null)) { 881 ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0)); 882 DerefBytes = CI->getLimitedValue(); 883 } 884 CanBeNull = true; 885 } 886 } else if (auto *IP = dyn_cast<IntToPtrInst>(this)) { 887 if (MDNode *MD = IP->getMetadata(LLVMContext::MD_dereferenceable)) { 888 ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0)); 889 DerefBytes = CI->getLimitedValue(); 890 } 891 if (DerefBytes == 0) { 892 if (MDNode *MD = 893 IP->getMetadata(LLVMContext::MD_dereferenceable_or_null)) { 894 ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0)); 895 DerefBytes = CI->getLimitedValue(); 896 } 897 CanBeNull = true; 898 } 899 } else if (auto *AI = dyn_cast<AllocaInst>(this)) { 900 if (!AI->isArrayAllocation()) { 901 DerefBytes = 902 DL.getTypeStoreSize(AI->getAllocatedType()).getKnownMinValue(); 903 CanBeNull = false; 904 CanBeFreed = false; 905 } 906 } else if (auto *GV = dyn_cast<GlobalVariable>(this)) { 907 if (GV->getValueType()->isSized() && !GV->hasExternalWeakLinkage()) { 908 // TODO: Don't outright reject hasExternalWeakLinkage but set the 909 // CanBeNull flag. 910 DerefBytes = DL.getTypeStoreSize(GV->getValueType()).getFixedValue(); 911 CanBeNull = false; 912 CanBeFreed = false; 913 } 914 } 915 return DerefBytes; 916 } 917 918 Align Value::getPointerAlignment(const DataLayout &DL) const { 919 assert(getType()->isPointerTy() && "must be pointer"); 920 if (auto *GO = dyn_cast<GlobalObject>(this)) { 921 if (isa<Function>(GO)) { 922 Align FunctionPtrAlign = DL.getFunctionPtrAlign().valueOrOne(); 923 switch (DL.getFunctionPtrAlignType()) { 924 case DataLayout::FunctionPtrAlignType::Independent: 925 return FunctionPtrAlign; 926 case DataLayout::FunctionPtrAlignType::MultipleOfFunctionAlign: 927 return std::max(FunctionPtrAlign, GO->getAlign().valueOrOne()); 928 } 929 llvm_unreachable("Unhandled FunctionPtrAlignType"); 930 } 931 const MaybeAlign Alignment(GO->getAlign()); 932 if (!Alignment) { 933 if (auto *GVar = dyn_cast<GlobalVariable>(GO)) { 934 Type *ObjectType = GVar->getValueType(); 935 if (ObjectType->isSized()) { 936 // If the object is defined in the current Module, we'll be giving 937 // it the preferred alignment. Otherwise, we have to assume that it 938 // may only have the minimum ABI alignment. 939 if (GVar->isStrongDefinitionForLinker()) 940 return DL.getPreferredAlign(GVar); 941 else 942 return DL.getABITypeAlign(ObjectType); 943 } 944 } 945 } 946 return Alignment.valueOrOne(); 947 } else if (const Argument *A = dyn_cast<Argument>(this)) { 948 const MaybeAlign Alignment = A->getParamAlign(); 949 if (!Alignment && A->hasStructRetAttr()) { 950 // An sret parameter has at least the ABI alignment of the return type. 951 Type *EltTy = A->getParamStructRetType(); 952 if (EltTy->isSized()) 953 return DL.getABITypeAlign(EltTy); 954 } 955 return Alignment.valueOrOne(); 956 } else if (const AllocaInst *AI = dyn_cast<AllocaInst>(this)) { 957 return AI->getAlign(); 958 } else if (const auto *Call = dyn_cast<CallBase>(this)) { 959 MaybeAlign Alignment = Call->getRetAlign(); 960 if (!Alignment && Call->getCalledFunction()) 961 Alignment = Call->getCalledFunction()->getAttributes().getRetAlignment(); 962 return Alignment.valueOrOne(); 963 } else if (const LoadInst *LI = dyn_cast<LoadInst>(this)) { 964 if (MDNode *MD = LI->getMetadata(LLVMContext::MD_align)) { 965 ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0)); 966 return Align(CI->getLimitedValue()); 967 } 968 } else if (auto *CstPtr = dyn_cast<Constant>(this)) { 969 // Strip pointer casts to avoid creating unnecessary ptrtoint expression 970 // if the only "reduction" is combining a bitcast + ptrtoint. 971 CstPtr = CstPtr->stripPointerCasts(); 972 if (auto *CstInt = dyn_cast_or_null<ConstantInt>(ConstantExpr::getPtrToInt( 973 const_cast<Constant *>(CstPtr), DL.getIntPtrType(getType()), 974 /*OnlyIfReduced=*/true))) { 975 size_t TrailingZeros = CstInt->getValue().countTrailingZeros(); 976 // While the actual alignment may be large, elsewhere we have 977 // an arbitrary upper alignmet limit, so let's clamp to it. 978 return Align(TrailingZeros < Value::MaxAlignmentExponent 979 ? uint64_t(1) << TrailingZeros 980 : Value::MaximumAlignment); 981 } 982 } 983 return Align(1); 984 } 985 986 const Value *Value::DoPHITranslation(const BasicBlock *CurBB, 987 const BasicBlock *PredBB) const { 988 auto *PN = dyn_cast<PHINode>(this); 989 if (PN && PN->getParent() == CurBB) 990 return PN->getIncomingValueForBlock(PredBB); 991 return this; 992 } 993 994 LLVMContext &Value::getContext() const { return VTy->getContext(); } 995 996 void Value::reverseUseList() { 997 if (!UseList || !UseList->Next) 998 // No need to reverse 0 or 1 uses. 999 return; 1000 1001 Use *Head = UseList; 1002 Use *Current = UseList->Next; 1003 Head->Next = nullptr; 1004 while (Current) { 1005 Use *Next = Current->Next; 1006 Current->Next = Head; 1007 Head->Prev = &Current->Next; 1008 Head = Current; 1009 Current = Next; 1010 } 1011 UseList = Head; 1012 Head->Prev = &UseList; 1013 } 1014 1015 bool Value::isSwiftError() const { 1016 auto *Arg = dyn_cast<Argument>(this); 1017 if (Arg) 1018 return Arg->hasSwiftErrorAttr(); 1019 auto *Alloca = dyn_cast<AllocaInst>(this); 1020 if (!Alloca) 1021 return false; 1022 return Alloca->isSwiftError(); 1023 } 1024 1025 //===----------------------------------------------------------------------===// 1026 // ValueHandleBase Class 1027 //===----------------------------------------------------------------------===// 1028 1029 void ValueHandleBase::AddToExistingUseList(ValueHandleBase **List) { 1030 assert(List && "Handle list is null?"); 1031 1032 // Splice ourselves into the list. 1033 Next = *List; 1034 *List = this; 1035 setPrevPtr(List); 1036 if (Next) { 1037 Next->setPrevPtr(&Next); 1038 assert(getValPtr() == Next->getValPtr() && "Added to wrong list?"); 1039 } 1040 } 1041 1042 void ValueHandleBase::AddToExistingUseListAfter(ValueHandleBase *List) { 1043 assert(List && "Must insert after existing node"); 1044 1045 Next = List->Next; 1046 setPrevPtr(&List->Next); 1047 List->Next = this; 1048 if (Next) 1049 Next->setPrevPtr(&Next); 1050 } 1051 1052 void ValueHandleBase::AddToUseList() { 1053 assert(getValPtr() && "Null pointer doesn't have a use list!"); 1054 1055 LLVMContextImpl *pImpl = getValPtr()->getContext().pImpl; 1056 1057 if (getValPtr()->HasValueHandle) { 1058 // If this value already has a ValueHandle, then it must be in the 1059 // ValueHandles map already. 1060 ValueHandleBase *&Entry = pImpl->ValueHandles[getValPtr()]; 1061 assert(Entry && "Value doesn't have any handles?"); 1062 AddToExistingUseList(&Entry); 1063 return; 1064 } 1065 1066 // Ok, it doesn't have any handles yet, so we must insert it into the 1067 // DenseMap. However, doing this insertion could cause the DenseMap to 1068 // reallocate itself, which would invalidate all of the PrevP pointers that 1069 // point into the old table. Handle this by checking for reallocation and 1070 // updating the stale pointers only if needed. 1071 DenseMap<Value*, ValueHandleBase*> &Handles = pImpl->ValueHandles; 1072 const void *OldBucketPtr = Handles.getPointerIntoBucketsArray(); 1073 1074 ValueHandleBase *&Entry = Handles[getValPtr()]; 1075 assert(!Entry && "Value really did already have handles?"); 1076 AddToExistingUseList(&Entry); 1077 getValPtr()->HasValueHandle = true; 1078 1079 // If reallocation didn't happen or if this was the first insertion, don't 1080 // walk the table. 1081 if (Handles.isPointerIntoBucketsArray(OldBucketPtr) || 1082 Handles.size() == 1) { 1083 return; 1084 } 1085 1086 // Okay, reallocation did happen. Fix the Prev Pointers. 1087 for (DenseMap<Value*, ValueHandleBase*>::iterator I = Handles.begin(), 1088 E = Handles.end(); I != E; ++I) { 1089 assert(I->second && I->first == I->second->getValPtr() && 1090 "List invariant broken!"); 1091 I->second->setPrevPtr(&I->second); 1092 } 1093 } 1094 1095 void ValueHandleBase::RemoveFromUseList() { 1096 assert(getValPtr() && getValPtr()->HasValueHandle && 1097 "Pointer doesn't have a use list!"); 1098 1099 // Unlink this from its use list. 1100 ValueHandleBase **PrevPtr = getPrevPtr(); 1101 assert(*PrevPtr == this && "List invariant broken"); 1102 1103 *PrevPtr = Next; 1104 if (Next) { 1105 assert(Next->getPrevPtr() == &Next && "List invariant broken"); 1106 Next->setPrevPtr(PrevPtr); 1107 return; 1108 } 1109 1110 // If the Next pointer was null, then it is possible that this was the last 1111 // ValueHandle watching VP. If so, delete its entry from the ValueHandles 1112 // map. 1113 LLVMContextImpl *pImpl = getValPtr()->getContext().pImpl; 1114 DenseMap<Value*, ValueHandleBase*> &Handles = pImpl->ValueHandles; 1115 if (Handles.isPointerIntoBucketsArray(PrevPtr)) { 1116 Handles.erase(getValPtr()); 1117 getValPtr()->HasValueHandle = false; 1118 } 1119 } 1120 1121 void ValueHandleBase::ValueIsDeleted(Value *V) { 1122 assert(V->HasValueHandle && "Should only be called if ValueHandles present"); 1123 1124 // Get the linked list base, which is guaranteed to exist since the 1125 // HasValueHandle flag is set. 1126 LLVMContextImpl *pImpl = V->getContext().pImpl; 1127 ValueHandleBase *Entry = pImpl->ValueHandles[V]; 1128 assert(Entry && "Value bit set but no entries exist"); 1129 1130 // We use a local ValueHandleBase as an iterator so that ValueHandles can add 1131 // and remove themselves from the list without breaking our iteration. This 1132 // is not really an AssertingVH; we just have to give ValueHandleBase a kind. 1133 // Note that we deliberately do not the support the case when dropping a value 1134 // handle results in a new value handle being permanently added to the list 1135 // (as might occur in theory for CallbackVH's): the new value handle will not 1136 // be processed and the checking code will mete out righteous punishment if 1137 // the handle is still present once we have finished processing all the other 1138 // value handles (it is fine to momentarily add then remove a value handle). 1139 for (ValueHandleBase Iterator(Assert, *Entry); Entry; Entry = Iterator.Next) { 1140 Iterator.RemoveFromUseList(); 1141 Iterator.AddToExistingUseListAfter(Entry); 1142 assert(Entry->Next == &Iterator && "Loop invariant broken."); 1143 1144 switch (Entry->getKind()) { 1145 case Assert: 1146 break; 1147 case Weak: 1148 case WeakTracking: 1149 // WeakTracking and Weak just go to null, which unlinks them 1150 // from the list. 1151 Entry->operator=(nullptr); 1152 break; 1153 case Callback: 1154 // Forward to the subclass's implementation. 1155 static_cast<CallbackVH*>(Entry)->deleted(); 1156 break; 1157 } 1158 } 1159 1160 // All callbacks, weak references, and assertingVHs should be dropped by now. 1161 if (V->HasValueHandle) { 1162 #ifndef NDEBUG // Only in +Asserts mode... 1163 dbgs() << "While deleting: " << *V->getType() << " %" << V->getName() 1164 << "\n"; 1165 if (pImpl->ValueHandles[V]->getKind() == Assert) 1166 llvm_unreachable("An asserting value handle still pointed to this" 1167 " value!"); 1168 1169 #endif 1170 llvm_unreachable("All references to V were not removed?"); 1171 } 1172 } 1173 1174 void ValueHandleBase::ValueIsRAUWd(Value *Old, Value *New) { 1175 assert(Old->HasValueHandle &&"Should only be called if ValueHandles present"); 1176 assert(Old != New && "Changing value into itself!"); 1177 assert(Old->getType() == New->getType() && 1178 "replaceAllUses of value with new value of different type!"); 1179 1180 // Get the linked list base, which is guaranteed to exist since the 1181 // HasValueHandle flag is set. 1182 LLVMContextImpl *pImpl = Old->getContext().pImpl; 1183 ValueHandleBase *Entry = pImpl->ValueHandles[Old]; 1184 1185 assert(Entry && "Value bit set but no entries exist"); 1186 1187 // We use a local ValueHandleBase as an iterator so that 1188 // ValueHandles can add and remove themselves from the list without 1189 // breaking our iteration. This is not really an AssertingVH; we 1190 // just have to give ValueHandleBase some kind. 1191 for (ValueHandleBase Iterator(Assert, *Entry); Entry; Entry = Iterator.Next) { 1192 Iterator.RemoveFromUseList(); 1193 Iterator.AddToExistingUseListAfter(Entry); 1194 assert(Entry->Next == &Iterator && "Loop invariant broken."); 1195 1196 switch (Entry->getKind()) { 1197 case Assert: 1198 case Weak: 1199 // Asserting and Weak handles do not follow RAUW implicitly. 1200 break; 1201 case WeakTracking: 1202 // Weak goes to the new value, which will unlink it from Old's list. 1203 Entry->operator=(New); 1204 break; 1205 case Callback: 1206 // Forward to the subclass's implementation. 1207 static_cast<CallbackVH*>(Entry)->allUsesReplacedWith(New); 1208 break; 1209 } 1210 } 1211 1212 #ifndef NDEBUG 1213 // If any new weak value handles were added while processing the 1214 // list, then complain about it now. 1215 if (Old->HasValueHandle) 1216 for (Entry = pImpl->ValueHandles[Old]; Entry; Entry = Entry->Next) 1217 switch (Entry->getKind()) { 1218 case WeakTracking: 1219 dbgs() << "After RAUW from " << *Old->getType() << " %" 1220 << Old->getName() << " to " << *New->getType() << " %" 1221 << New->getName() << "\n"; 1222 llvm_unreachable( 1223 "A weak tracking value handle still pointed to the old value!\n"); 1224 default: 1225 break; 1226 } 1227 #endif 1228 } 1229 1230 // Pin the vtable to this file. 1231 void CallbackVH::anchor() {} 1232