1 //===- MemoryBuiltins.cpp - Identify calls to memory builtins -------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This family of functions identifies calls to builtin functions that allocate 10 // or free memory. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "llvm/Analysis/MemoryBuiltins.h" 15 #include "llvm/ADT/APInt.h" 16 #include "llvm/ADT/None.h" 17 #include "llvm/ADT/Optional.h" 18 #include "llvm/ADT/STLExtras.h" 19 #include "llvm/ADT/Statistic.h" 20 #include "llvm/ADT/StringRef.h" 21 #include "llvm/Analysis/TargetFolder.h" 22 #include "llvm/Analysis/TargetLibraryInfo.h" 23 #include "llvm/Analysis/Utils/Local.h" 24 #include "llvm/Analysis/ValueTracking.h" 25 #include "llvm/IR/Argument.h" 26 #include "llvm/IR/Attributes.h" 27 #include "llvm/IR/Constants.h" 28 #include "llvm/IR/DataLayout.h" 29 #include "llvm/IR/DerivedTypes.h" 30 #include "llvm/IR/Function.h" 31 #include "llvm/IR/GlobalAlias.h" 32 #include "llvm/IR/GlobalVariable.h" 33 #include "llvm/IR/Instruction.h" 34 #include "llvm/IR/Instructions.h" 35 #include "llvm/IR/IntrinsicInst.h" 36 #include "llvm/IR/Operator.h" 37 #include "llvm/IR/Type.h" 38 #include "llvm/IR/Value.h" 39 #include "llvm/Support/Casting.h" 40 #include "llvm/Support/Debug.h" 41 #include "llvm/Support/MathExtras.h" 42 #include "llvm/Support/raw_ostream.h" 43 #include <cassert> 44 #include <cstdint> 45 #include <iterator> 46 #include <utility> 47 48 using namespace llvm; 49 50 #define DEBUG_TYPE "memory-builtins" 51 52 enum AllocType : uint8_t { 53 OpNewLike = 1<<0, // allocates; never returns null 54 MallocLike = 1<<1 | OpNewLike, // allocates; may return null 55 AlignedAllocLike = 1<<2, // allocates with alignment; may return null 56 CallocLike = 1<<3, // allocates + bzero 57 ReallocLike = 1<<4, // reallocates 58 StrDupLike = 1<<5, 59 MallocOrCallocLike = MallocLike | CallocLike | AlignedAllocLike, 60 AllocLike = MallocOrCallocLike | StrDupLike, 61 AnyAlloc = AllocLike | ReallocLike 62 }; 63 64 struct AllocFnsTy { 65 AllocType AllocTy; 66 unsigned NumParams; 67 // First and Second size parameters (or -1 if unused) 68 int FstParam, SndParam; 69 }; 70 71 // FIXME: certain users need more information. E.g., SimplifyLibCalls needs to 72 // know which functions are nounwind, noalias, nocapture parameters, etc. 73 static const std::pair<LibFunc, AllocFnsTy> AllocationFnData[] = { 74 {LibFunc_malloc, {MallocLike, 1, 0, -1}}, 75 {LibFunc_vec_malloc, {MallocLike, 1, 0, -1}}, 76 {LibFunc_valloc, {MallocLike, 1, 0, -1}}, 77 {LibFunc_Znwj, {OpNewLike, 1, 0, -1}}, // new(unsigned int) 78 {LibFunc_ZnwjRKSt9nothrow_t, {MallocLike, 2, 0, -1}}, // new(unsigned int, nothrow) 79 {LibFunc_ZnwjSt11align_val_t, {OpNewLike, 2, 0, -1}}, // new(unsigned int, align_val_t) 80 {LibFunc_ZnwjSt11align_val_tRKSt9nothrow_t, // new(unsigned int, align_val_t, nothrow) 81 {MallocLike, 3, 0, -1}}, 82 {LibFunc_Znwm, {OpNewLike, 1, 0, -1}}, // new(unsigned long) 83 {LibFunc_ZnwmRKSt9nothrow_t, {MallocLike, 2, 0, -1}}, // new(unsigned long, nothrow) 84 {LibFunc_ZnwmSt11align_val_t, {OpNewLike, 2, 0, -1}}, // new(unsigned long, align_val_t) 85 {LibFunc_ZnwmSt11align_val_tRKSt9nothrow_t, // new(unsigned long, align_val_t, nothrow) 86 {MallocLike, 3, 0, -1}}, 87 {LibFunc_Znaj, {OpNewLike, 1, 0, -1}}, // new[](unsigned int) 88 {LibFunc_ZnajRKSt9nothrow_t, {MallocLike, 2, 0, -1}}, // new[](unsigned int, nothrow) 89 {LibFunc_ZnajSt11align_val_t, {OpNewLike, 2, 0, -1}}, // new[](unsigned int, align_val_t) 90 {LibFunc_ZnajSt11align_val_tRKSt9nothrow_t, // new[](unsigned int, align_val_t, nothrow) 91 {MallocLike, 3, 0, -1}}, 92 {LibFunc_Znam, {OpNewLike, 1, 0, -1}}, // new[](unsigned long) 93 {LibFunc_ZnamRKSt9nothrow_t, {MallocLike, 2, 0, -1}}, // new[](unsigned long, nothrow) 94 {LibFunc_ZnamSt11align_val_t, {OpNewLike, 2, 0, -1}}, // new[](unsigned long, align_val_t) 95 {LibFunc_ZnamSt11align_val_tRKSt9nothrow_t, // new[](unsigned long, align_val_t, nothrow) 96 {MallocLike, 3, 0, -1}}, 97 {LibFunc_msvc_new_int, {OpNewLike, 1, 0, -1}}, // new(unsigned int) 98 {LibFunc_msvc_new_int_nothrow, {MallocLike, 2, 0, -1}}, // new(unsigned int, nothrow) 99 {LibFunc_msvc_new_longlong, {OpNewLike, 1, 0, -1}}, // new(unsigned long long) 100 {LibFunc_msvc_new_longlong_nothrow, {MallocLike, 2, 0, -1}}, // new(unsigned long long, nothrow) 101 {LibFunc_msvc_new_array_int, {OpNewLike, 1, 0, -1}}, // new[](unsigned int) 102 {LibFunc_msvc_new_array_int_nothrow, {MallocLike, 2, 0, -1}}, // new[](unsigned int, nothrow) 103 {LibFunc_msvc_new_array_longlong, {OpNewLike, 1, 0, -1}}, // new[](unsigned long long) 104 {LibFunc_msvc_new_array_longlong_nothrow, {MallocLike, 2, 0, -1}}, // new[](unsigned long long, nothrow) 105 {LibFunc_aligned_alloc, {AlignedAllocLike, 2, 1, -1}}, 106 {LibFunc_memalign, {AlignedAllocLike, 2, 1, -1}}, 107 {LibFunc_calloc, {CallocLike, 2, 0, 1}}, 108 {LibFunc_vec_calloc, {CallocLike, 2, 0, 1}}, 109 {LibFunc_realloc, {ReallocLike, 2, 1, -1}}, 110 {LibFunc_vec_realloc, {ReallocLike, 2, 1, -1}}, 111 {LibFunc_reallocf, {ReallocLike, 2, 1, -1}}, 112 {LibFunc_strdup, {StrDupLike, 1, -1, -1}}, 113 {LibFunc_strndup, {StrDupLike, 2, 1, -1}}, 114 {LibFunc___kmpc_alloc_shared, {MallocLike, 1, 0, -1}} 115 // TODO: Handle "int posix_memalign(void **, size_t, size_t)" 116 }; 117 118 static const Function *getCalledFunction(const Value *V, bool LookThroughBitCast, 119 bool &IsNoBuiltin) { 120 // Don't care about intrinsics in this case. 121 if (isa<IntrinsicInst>(V)) 122 return nullptr; 123 124 if (LookThroughBitCast) 125 V = V->stripPointerCasts(); 126 127 const auto *CB = dyn_cast<CallBase>(V); 128 if (!CB) 129 return nullptr; 130 131 IsNoBuiltin = CB->isNoBuiltin(); 132 133 if (const Function *Callee = CB->getCalledFunction()) 134 return Callee; 135 return nullptr; 136 } 137 138 /// Returns the allocation data for the given value if it's either a call to a 139 /// known allocation function, or a call to a function with the allocsize 140 /// attribute. 141 static Optional<AllocFnsTy> 142 getAllocationDataForFunction(const Function *Callee, AllocType AllocTy, 143 const TargetLibraryInfo *TLI) { 144 // Make sure that the function is available. 145 LibFunc TLIFn; 146 if (!TLI || !TLI->getLibFunc(*Callee, TLIFn) || !TLI->has(TLIFn)) 147 return None; 148 149 const auto *Iter = find_if( 150 AllocationFnData, [TLIFn](const std::pair<LibFunc, AllocFnsTy> &P) { 151 return P.first == TLIFn; 152 }); 153 154 if (Iter == std::end(AllocationFnData)) 155 return None; 156 157 const AllocFnsTy *FnData = &Iter->second; 158 if ((FnData->AllocTy & AllocTy) != FnData->AllocTy) 159 return None; 160 161 // Check function prototype. 162 int FstParam = FnData->FstParam; 163 int SndParam = FnData->SndParam; 164 FunctionType *FTy = Callee->getFunctionType(); 165 166 if (FTy->getReturnType() == Type::getInt8PtrTy(FTy->getContext()) && 167 FTy->getNumParams() == FnData->NumParams && 168 (FstParam < 0 || 169 (FTy->getParamType(FstParam)->isIntegerTy(32) || 170 FTy->getParamType(FstParam)->isIntegerTy(64))) && 171 (SndParam < 0 || 172 FTy->getParamType(SndParam)->isIntegerTy(32) || 173 FTy->getParamType(SndParam)->isIntegerTy(64))) 174 return *FnData; 175 return None; 176 } 177 178 static Optional<AllocFnsTy> getAllocationData(const Value *V, AllocType AllocTy, 179 const TargetLibraryInfo *TLI, 180 bool LookThroughBitCast = false) { 181 bool IsNoBuiltinCall; 182 if (const Function *Callee = 183 getCalledFunction(V, LookThroughBitCast, IsNoBuiltinCall)) 184 if (!IsNoBuiltinCall) 185 return getAllocationDataForFunction(Callee, AllocTy, TLI); 186 return None; 187 } 188 189 static Optional<AllocFnsTy> 190 getAllocationData(const Value *V, AllocType AllocTy, 191 function_ref<const TargetLibraryInfo &(Function &)> GetTLI, 192 bool LookThroughBitCast = false) { 193 bool IsNoBuiltinCall; 194 if (const Function *Callee = 195 getCalledFunction(V, LookThroughBitCast, IsNoBuiltinCall)) 196 if (!IsNoBuiltinCall) 197 return getAllocationDataForFunction( 198 Callee, AllocTy, &GetTLI(const_cast<Function &>(*Callee))); 199 return None; 200 } 201 202 static Optional<AllocFnsTy> getAllocationSize(const Value *V, 203 const TargetLibraryInfo *TLI) { 204 bool IsNoBuiltinCall; 205 const Function *Callee = 206 getCalledFunction(V, /*LookThroughBitCast=*/false, IsNoBuiltinCall); 207 if (!Callee) 208 return None; 209 210 // Prefer to use existing information over allocsize. This will give us an 211 // accurate AllocTy. 212 if (!IsNoBuiltinCall) 213 if (Optional<AllocFnsTy> Data = 214 getAllocationDataForFunction(Callee, AnyAlloc, TLI)) 215 return Data; 216 217 Attribute Attr = Callee->getFnAttribute(Attribute::AllocSize); 218 if (Attr == Attribute()) 219 return None; 220 221 std::pair<unsigned, Optional<unsigned>> Args = Attr.getAllocSizeArgs(); 222 223 AllocFnsTy Result; 224 // Because allocsize only tells us how many bytes are allocated, we're not 225 // really allowed to assume anything, so we use MallocLike. 226 Result.AllocTy = MallocLike; 227 Result.NumParams = Callee->getNumOperands(); 228 Result.FstParam = Args.first; 229 Result.SndParam = Args.second.getValueOr(-1); 230 return Result; 231 } 232 233 static bool hasNoAliasAttr(const Value *V, bool LookThroughBitCast) { 234 const auto *CB = 235 dyn_cast<CallBase>(LookThroughBitCast ? V->stripPointerCasts() : V); 236 return CB && CB->hasRetAttr(Attribute::NoAlias); 237 } 238 239 /// Tests if a value is a call or invoke to a library function that 240 /// allocates or reallocates memory (either malloc, calloc, realloc, or strdup 241 /// like). 242 bool llvm::isAllocationFn(const Value *V, const TargetLibraryInfo *TLI, 243 bool LookThroughBitCast) { 244 return getAllocationData(V, AnyAlloc, TLI, LookThroughBitCast).hasValue(); 245 } 246 bool llvm::isAllocationFn( 247 const Value *V, function_ref<const TargetLibraryInfo &(Function &)> GetTLI, 248 bool LookThroughBitCast) { 249 return getAllocationData(V, AnyAlloc, GetTLI, LookThroughBitCast).hasValue(); 250 } 251 252 /// Tests if a value is a call or invoke to a function that returns a 253 /// NoAlias pointer (including malloc/calloc/realloc/strdup-like functions). 254 bool llvm::isNoAliasFn(const Value *V, const TargetLibraryInfo *TLI, 255 bool LookThroughBitCast) { 256 // it's safe to consider realloc as noalias since accessing the original 257 // pointer is undefined behavior 258 return isAllocationFn(V, TLI, LookThroughBitCast) || 259 hasNoAliasAttr(V, LookThroughBitCast); 260 } 261 262 /// Tests if a value is a call or invoke to a library function that 263 /// allocates uninitialized memory (such as malloc). 264 bool llvm::isMallocLikeFn(const Value *V, const TargetLibraryInfo *TLI, 265 bool LookThroughBitCast) { 266 return getAllocationData(V, MallocLike, TLI, LookThroughBitCast).hasValue(); 267 } 268 bool llvm::isMallocLikeFn( 269 const Value *V, function_ref<const TargetLibraryInfo &(Function &)> GetTLI, 270 bool LookThroughBitCast) { 271 return getAllocationData(V, MallocLike, GetTLI, LookThroughBitCast) 272 .hasValue(); 273 } 274 275 /// Tests if a value is a call or invoke to a library function that 276 /// allocates uninitialized memory with alignment (such as aligned_alloc). 277 bool llvm::isAlignedAllocLikeFn(const Value *V, const TargetLibraryInfo *TLI, 278 bool LookThroughBitCast) { 279 return getAllocationData(V, AlignedAllocLike, TLI, LookThroughBitCast) 280 .hasValue(); 281 } 282 bool llvm::isAlignedAllocLikeFn( 283 const Value *V, function_ref<const TargetLibraryInfo &(Function &)> GetTLI, 284 bool LookThroughBitCast) { 285 return getAllocationData(V, AlignedAllocLike, GetTLI, LookThroughBitCast) 286 .hasValue(); 287 } 288 289 /// Tests if a value is a call or invoke to a library function that 290 /// allocates zero-filled memory (such as calloc). 291 bool llvm::isCallocLikeFn(const Value *V, const TargetLibraryInfo *TLI, 292 bool LookThroughBitCast) { 293 return getAllocationData(V, CallocLike, TLI, LookThroughBitCast).hasValue(); 294 } 295 296 /// Tests if a value is a call or invoke to a library function that 297 /// allocates memory similar to malloc or calloc. 298 bool llvm::isMallocOrCallocLikeFn(const Value *V, const TargetLibraryInfo *TLI, 299 bool LookThroughBitCast) { 300 return getAllocationData(V, MallocOrCallocLike, TLI, 301 LookThroughBitCast).hasValue(); 302 } 303 304 /// Tests if a value is a call or invoke to a library function that 305 /// allocates memory (either malloc, calloc, or strdup like). 306 bool llvm::isAllocLikeFn(const Value *V, const TargetLibraryInfo *TLI, 307 bool LookThroughBitCast) { 308 return getAllocationData(V, AllocLike, TLI, LookThroughBitCast).hasValue(); 309 } 310 311 /// Tests if a value is a call or invoke to a library function that 312 /// reallocates memory (e.g., realloc). 313 bool llvm::isReallocLikeFn(const Value *V, const TargetLibraryInfo *TLI, 314 bool LookThroughBitCast) { 315 return getAllocationData(V, ReallocLike, TLI, LookThroughBitCast).hasValue(); 316 } 317 318 /// Tests if a functions is a call or invoke to a library function that 319 /// reallocates memory (e.g., realloc). 320 bool llvm::isReallocLikeFn(const Function *F, const TargetLibraryInfo *TLI) { 321 return getAllocationDataForFunction(F, ReallocLike, TLI).hasValue(); 322 } 323 324 /// Tests if a value is a call or invoke to a library function that 325 /// allocates memory and throws if an allocation failed (e.g., new). 326 bool llvm::isOpNewLikeFn(const Value *V, const TargetLibraryInfo *TLI, 327 bool LookThroughBitCast) { 328 return getAllocationData(V, OpNewLike, TLI, LookThroughBitCast).hasValue(); 329 } 330 331 /// Tests if a value is a call or invoke to a library function that 332 /// allocates memory (strdup, strndup). 333 bool llvm::isStrdupLikeFn(const Value *V, const TargetLibraryInfo *TLI, 334 bool LookThroughBitCast) { 335 return getAllocationData(V, StrDupLike, TLI, LookThroughBitCast).hasValue(); 336 } 337 338 /// extractMallocCall - Returns the corresponding CallInst if the instruction 339 /// is a malloc call. Since CallInst::CreateMalloc() only creates calls, we 340 /// ignore InvokeInst here. 341 const CallInst *llvm::extractMallocCall( 342 const Value *I, 343 function_ref<const TargetLibraryInfo &(Function &)> GetTLI) { 344 return isMallocLikeFn(I, GetTLI) ? dyn_cast<CallInst>(I) : nullptr; 345 } 346 347 static Value *computeArraySize(const CallInst *CI, const DataLayout &DL, 348 const TargetLibraryInfo *TLI, 349 bool LookThroughSExt = false) { 350 if (!CI) 351 return nullptr; 352 353 // The size of the malloc's result type must be known to determine array size. 354 Type *T = getMallocAllocatedType(CI, TLI); 355 if (!T || !T->isSized()) 356 return nullptr; 357 358 unsigned ElementSize = DL.getTypeAllocSize(T); 359 if (StructType *ST = dyn_cast<StructType>(T)) 360 ElementSize = DL.getStructLayout(ST)->getSizeInBytes(); 361 362 // If malloc call's arg can be determined to be a multiple of ElementSize, 363 // return the multiple. Otherwise, return NULL. 364 Value *MallocArg = CI->getArgOperand(0); 365 Value *Multiple = nullptr; 366 if (ComputeMultiple(MallocArg, ElementSize, Multiple, LookThroughSExt)) 367 return Multiple; 368 369 return nullptr; 370 } 371 372 /// getMallocType - Returns the PointerType resulting from the malloc call. 373 /// The PointerType depends on the number of bitcast uses of the malloc call: 374 /// 0: PointerType is the calls' return type. 375 /// 1: PointerType is the bitcast's result type. 376 /// >1: Unique PointerType cannot be determined, return NULL. 377 PointerType *llvm::getMallocType(const CallInst *CI, 378 const TargetLibraryInfo *TLI) { 379 assert(isMallocLikeFn(CI, TLI) && "getMallocType and not malloc call"); 380 381 PointerType *MallocType = nullptr; 382 unsigned NumOfBitCastUses = 0; 383 384 // Determine if CallInst has a bitcast use. 385 for (const User *U : CI->users()) 386 if (const BitCastInst *BCI = dyn_cast<BitCastInst>(U)) { 387 MallocType = cast<PointerType>(BCI->getDestTy()); 388 NumOfBitCastUses++; 389 } 390 391 // Malloc call has 1 bitcast use, so type is the bitcast's destination type. 392 if (NumOfBitCastUses == 1) 393 return MallocType; 394 395 // Malloc call was not bitcast, so type is the malloc function's return type. 396 if (NumOfBitCastUses == 0) 397 return cast<PointerType>(CI->getType()); 398 399 // Type could not be determined. 400 return nullptr; 401 } 402 403 /// getMallocAllocatedType - Returns the Type allocated by malloc call. 404 /// The Type depends on the number of bitcast uses of the malloc call: 405 /// 0: PointerType is the malloc calls' return type. 406 /// 1: PointerType is the bitcast's result type. 407 /// >1: Unique PointerType cannot be determined, return NULL. 408 Type *llvm::getMallocAllocatedType(const CallInst *CI, 409 const TargetLibraryInfo *TLI) { 410 PointerType *PT = getMallocType(CI, TLI); 411 return PT ? PT->getElementType() : nullptr; 412 } 413 414 /// getMallocArraySize - Returns the array size of a malloc call. If the 415 /// argument passed to malloc is a multiple of the size of the malloced type, 416 /// then return that multiple. For non-array mallocs, the multiple is 417 /// constant 1. Otherwise, return NULL for mallocs whose array size cannot be 418 /// determined. 419 Value *llvm::getMallocArraySize(CallInst *CI, const DataLayout &DL, 420 const TargetLibraryInfo *TLI, 421 bool LookThroughSExt) { 422 assert(isMallocLikeFn(CI, TLI) && "getMallocArraySize and not malloc call"); 423 return computeArraySize(CI, DL, TLI, LookThroughSExt); 424 } 425 426 /// extractCallocCall - Returns the corresponding CallInst if the instruction 427 /// is a calloc call. 428 const CallInst *llvm::extractCallocCall(const Value *I, 429 const TargetLibraryInfo *TLI) { 430 return isCallocLikeFn(I, TLI) ? cast<CallInst>(I) : nullptr; 431 } 432 433 /// isLibFreeFunction - Returns true if the function is a builtin free() 434 bool llvm::isLibFreeFunction(const Function *F, const LibFunc TLIFn) { 435 unsigned ExpectedNumParams; 436 if (TLIFn == LibFunc_free || 437 TLIFn == LibFunc_ZdlPv || // operator delete(void*) 438 TLIFn == LibFunc_ZdaPv || // operator delete[](void*) 439 TLIFn == LibFunc_msvc_delete_ptr32 || // operator delete(void*) 440 TLIFn == LibFunc_msvc_delete_ptr64 || // operator delete(void*) 441 TLIFn == LibFunc_msvc_delete_array_ptr32 || // operator delete[](void*) 442 TLIFn == LibFunc_msvc_delete_array_ptr64) // operator delete[](void*) 443 ExpectedNumParams = 1; 444 else if (TLIFn == LibFunc_ZdlPvj || // delete(void*, uint) 445 TLIFn == LibFunc_ZdlPvm || // delete(void*, ulong) 446 TLIFn == LibFunc_ZdlPvRKSt9nothrow_t || // delete(void*, nothrow) 447 TLIFn == LibFunc_ZdlPvSt11align_val_t || // delete(void*, align_val_t) 448 TLIFn == LibFunc_ZdaPvj || // delete[](void*, uint) 449 TLIFn == LibFunc_ZdaPvm || // delete[](void*, ulong) 450 TLIFn == LibFunc_ZdaPvRKSt9nothrow_t || // delete[](void*, nothrow) 451 TLIFn == LibFunc_ZdaPvSt11align_val_t || // delete[](void*, align_val_t) 452 TLIFn == LibFunc_msvc_delete_ptr32_int || // delete(void*, uint) 453 TLIFn == LibFunc_msvc_delete_ptr64_longlong || // delete(void*, ulonglong) 454 TLIFn == LibFunc_msvc_delete_ptr32_nothrow || // delete(void*, nothrow) 455 TLIFn == LibFunc_msvc_delete_ptr64_nothrow || // delete(void*, nothrow) 456 TLIFn == LibFunc_msvc_delete_array_ptr32_int || // delete[](void*, uint) 457 TLIFn == LibFunc_msvc_delete_array_ptr64_longlong || // delete[](void*, ulonglong) 458 TLIFn == LibFunc_msvc_delete_array_ptr32_nothrow || // delete[](void*, nothrow) 459 TLIFn == LibFunc_msvc_delete_array_ptr64_nothrow || // delete[](void*, nothrow) 460 TLIFn == LibFunc___kmpc_free_shared) // OpenMP Offloading RTL free 461 ExpectedNumParams = 2; 462 else if (TLIFn == LibFunc_ZdaPvSt11align_val_tRKSt9nothrow_t || // delete(void*, align_val_t, nothrow) 463 TLIFn == LibFunc_ZdlPvSt11align_val_tRKSt9nothrow_t || // delete[](void*, align_val_t, nothrow) 464 TLIFn == LibFunc_ZdlPvjSt11align_val_t || // delete(void*, unsigned long, align_val_t) 465 TLIFn == LibFunc_ZdlPvmSt11align_val_t || // delete(void*, unsigned long, align_val_t) 466 TLIFn == LibFunc_ZdaPvjSt11align_val_t || // delete[](void*, unsigned int, align_val_t) 467 TLIFn == LibFunc_ZdaPvmSt11align_val_t) // delete[](void*, unsigned long, align_val_t) 468 ExpectedNumParams = 3; 469 else 470 return false; 471 472 // Check free prototype. 473 // FIXME: workaround for PR5130, this will be obsolete when a nobuiltin 474 // attribute will exist. 475 FunctionType *FTy = F->getFunctionType(); 476 if (!FTy->getReturnType()->isVoidTy()) 477 return false; 478 if (FTy->getNumParams() != ExpectedNumParams) 479 return false; 480 if (FTy->getParamType(0) != Type::getInt8PtrTy(F->getContext())) 481 return false; 482 483 return true; 484 } 485 486 /// isFreeCall - Returns non-null if the value is a call to the builtin free() 487 const CallInst *llvm::isFreeCall(const Value *I, const TargetLibraryInfo *TLI) { 488 bool IsNoBuiltinCall; 489 const Function *Callee = 490 getCalledFunction(I, /*LookThroughBitCast=*/false, IsNoBuiltinCall); 491 if (Callee == nullptr || IsNoBuiltinCall) 492 return nullptr; 493 494 LibFunc TLIFn; 495 if (!TLI || !TLI->getLibFunc(*Callee, TLIFn) || !TLI->has(TLIFn)) 496 return nullptr; 497 498 return isLibFreeFunction(Callee, TLIFn) ? dyn_cast<CallInst>(I) : nullptr; 499 } 500 501 502 //===----------------------------------------------------------------------===// 503 // Utility functions to compute size of objects. 504 // 505 static APInt getSizeWithOverflow(const SizeOffsetType &Data) { 506 if (Data.second.isNegative() || Data.first.ult(Data.second)) 507 return APInt(Data.first.getBitWidth(), 0); 508 return Data.first - Data.second; 509 } 510 511 /// Compute the size of the object pointed by Ptr. Returns true and the 512 /// object size in Size if successful, and false otherwise. 513 /// If RoundToAlign is true, then Size is rounded up to the alignment of 514 /// allocas, byval arguments, and global variables. 515 bool llvm::getObjectSize(const Value *Ptr, uint64_t &Size, const DataLayout &DL, 516 const TargetLibraryInfo *TLI, ObjectSizeOpts Opts) { 517 ObjectSizeOffsetVisitor Visitor(DL, TLI, Ptr->getContext(), Opts); 518 SizeOffsetType Data = Visitor.compute(const_cast<Value*>(Ptr)); 519 if (!Visitor.bothKnown(Data)) 520 return false; 521 522 Size = getSizeWithOverflow(Data).getZExtValue(); 523 return true; 524 } 525 526 Value *llvm::lowerObjectSizeCall(IntrinsicInst *ObjectSize, 527 const DataLayout &DL, 528 const TargetLibraryInfo *TLI, 529 bool MustSucceed) { 530 assert(ObjectSize->getIntrinsicID() == Intrinsic::objectsize && 531 "ObjectSize must be a call to llvm.objectsize!"); 532 533 bool MaxVal = cast<ConstantInt>(ObjectSize->getArgOperand(1))->isZero(); 534 ObjectSizeOpts EvalOptions; 535 // Unless we have to fold this to something, try to be as accurate as 536 // possible. 537 if (MustSucceed) 538 EvalOptions.EvalMode = 539 MaxVal ? ObjectSizeOpts::Mode::Max : ObjectSizeOpts::Mode::Min; 540 else 541 EvalOptions.EvalMode = ObjectSizeOpts::Mode::Exact; 542 543 EvalOptions.NullIsUnknownSize = 544 cast<ConstantInt>(ObjectSize->getArgOperand(2))->isOne(); 545 546 auto *ResultType = cast<IntegerType>(ObjectSize->getType()); 547 bool StaticOnly = cast<ConstantInt>(ObjectSize->getArgOperand(3))->isZero(); 548 if (StaticOnly) { 549 // FIXME: Does it make sense to just return a failure value if the size won't 550 // fit in the output and `!MustSucceed`? 551 uint64_t Size; 552 if (getObjectSize(ObjectSize->getArgOperand(0), Size, DL, TLI, EvalOptions) && 553 isUIntN(ResultType->getBitWidth(), Size)) 554 return ConstantInt::get(ResultType, Size); 555 } else { 556 LLVMContext &Ctx = ObjectSize->getFunction()->getContext(); 557 ObjectSizeOffsetEvaluator Eval(DL, TLI, Ctx, EvalOptions); 558 SizeOffsetEvalType SizeOffsetPair = 559 Eval.compute(ObjectSize->getArgOperand(0)); 560 561 if (SizeOffsetPair != ObjectSizeOffsetEvaluator::unknown()) { 562 IRBuilder<TargetFolder> Builder(Ctx, TargetFolder(DL)); 563 Builder.SetInsertPoint(ObjectSize); 564 565 // If we've outside the end of the object, then we can always access 566 // exactly 0 bytes. 567 Value *ResultSize = 568 Builder.CreateSub(SizeOffsetPair.first, SizeOffsetPair.second); 569 Value *UseZero = 570 Builder.CreateICmpULT(SizeOffsetPair.first, SizeOffsetPair.second); 571 ResultSize = Builder.CreateZExtOrTrunc(ResultSize, ResultType); 572 Value *Ret = Builder.CreateSelect( 573 UseZero, ConstantInt::get(ResultType, 0), ResultSize); 574 575 // The non-constant size expression cannot evaluate to -1. 576 if (!isa<Constant>(SizeOffsetPair.first) || 577 !isa<Constant>(SizeOffsetPair.second)) 578 Builder.CreateAssumption( 579 Builder.CreateICmpNE(Ret, ConstantInt::get(ResultType, -1))); 580 581 return Ret; 582 } 583 } 584 585 if (!MustSucceed) 586 return nullptr; 587 588 return ConstantInt::get(ResultType, MaxVal ? -1ULL : 0); 589 } 590 591 STATISTIC(ObjectVisitorArgument, 592 "Number of arguments with unsolved size and offset"); 593 STATISTIC(ObjectVisitorLoad, 594 "Number of load instructions with unsolved size and offset"); 595 596 APInt ObjectSizeOffsetVisitor::align(APInt Size, uint64_t Alignment) { 597 if (Options.RoundToAlign && Alignment) 598 return APInt(IntTyBits, alignTo(Size.getZExtValue(), Align(Alignment))); 599 return Size; 600 } 601 602 ObjectSizeOffsetVisitor::ObjectSizeOffsetVisitor(const DataLayout &DL, 603 const TargetLibraryInfo *TLI, 604 LLVMContext &Context, 605 ObjectSizeOpts Options) 606 : DL(DL), TLI(TLI), Options(Options) { 607 // Pointer size must be rechecked for each object visited since it could have 608 // a different address space. 609 } 610 611 SizeOffsetType ObjectSizeOffsetVisitor::compute(Value *V) { 612 IntTyBits = DL.getIndexTypeSizeInBits(V->getType()); 613 Zero = APInt::getNullValue(IntTyBits); 614 615 V = V->stripPointerCasts(); 616 if (Instruction *I = dyn_cast<Instruction>(V)) { 617 // If we have already seen this instruction, bail out. Cycles can happen in 618 // unreachable code after constant propagation. 619 if (!SeenInsts.insert(I).second) 620 return unknown(); 621 622 if (GEPOperator *GEP = dyn_cast<GEPOperator>(V)) 623 return visitGEPOperator(*GEP); 624 return visit(*I); 625 } 626 if (Argument *A = dyn_cast<Argument>(V)) 627 return visitArgument(*A); 628 if (ConstantPointerNull *P = dyn_cast<ConstantPointerNull>(V)) 629 return visitConstantPointerNull(*P); 630 if (GlobalAlias *GA = dyn_cast<GlobalAlias>(V)) 631 return visitGlobalAlias(*GA); 632 if (GlobalVariable *GV = dyn_cast<GlobalVariable>(V)) 633 return visitGlobalVariable(*GV); 634 if (UndefValue *UV = dyn_cast<UndefValue>(V)) 635 return visitUndefValue(*UV); 636 if (ConstantExpr *CE = dyn_cast<ConstantExpr>(V)) { 637 if (CE->getOpcode() == Instruction::IntToPtr) 638 return unknown(); // clueless 639 if (CE->getOpcode() == Instruction::GetElementPtr) 640 return visitGEPOperator(cast<GEPOperator>(*CE)); 641 } 642 643 LLVM_DEBUG(dbgs() << "ObjectSizeOffsetVisitor::compute() unhandled value: " 644 << *V << '\n'); 645 return unknown(); 646 } 647 648 /// When we're compiling N-bit code, and the user uses parameters that are 649 /// greater than N bits (e.g. uint64_t on a 32-bit build), we can run into 650 /// trouble with APInt size issues. This function handles resizing + overflow 651 /// checks for us. Check and zext or trunc \p I depending on IntTyBits and 652 /// I's value. 653 bool ObjectSizeOffsetVisitor::CheckedZextOrTrunc(APInt &I) { 654 // More bits than we can handle. Checking the bit width isn't necessary, but 655 // it's faster than checking active bits, and should give `false` in the 656 // vast majority of cases. 657 if (I.getBitWidth() > IntTyBits && I.getActiveBits() > IntTyBits) 658 return false; 659 if (I.getBitWidth() != IntTyBits) 660 I = I.zextOrTrunc(IntTyBits); 661 return true; 662 } 663 664 SizeOffsetType ObjectSizeOffsetVisitor::visitAllocaInst(AllocaInst &I) { 665 if (!I.getAllocatedType()->isSized()) 666 return unknown(); 667 668 if (isa<ScalableVectorType>(I.getAllocatedType())) 669 return unknown(); 670 671 APInt Size(IntTyBits, DL.getTypeAllocSize(I.getAllocatedType())); 672 if (!I.isArrayAllocation()) 673 return std::make_pair(align(Size, I.getAlignment()), Zero); 674 675 Value *ArraySize = I.getArraySize(); 676 if (const ConstantInt *C = dyn_cast<ConstantInt>(ArraySize)) { 677 APInt NumElems = C->getValue(); 678 if (!CheckedZextOrTrunc(NumElems)) 679 return unknown(); 680 681 bool Overflow; 682 Size = Size.umul_ov(NumElems, Overflow); 683 return Overflow ? unknown() : std::make_pair(align(Size, I.getAlignment()), 684 Zero); 685 } 686 return unknown(); 687 } 688 689 SizeOffsetType ObjectSizeOffsetVisitor::visitArgument(Argument &A) { 690 Type *MemoryTy = A.getPointeeInMemoryValueType(); 691 // No interprocedural analysis is done at the moment. 692 if (!MemoryTy|| !MemoryTy->isSized()) { 693 ++ObjectVisitorArgument; 694 return unknown(); 695 } 696 697 APInt Size(IntTyBits, DL.getTypeAllocSize(MemoryTy)); 698 return std::make_pair(align(Size, A.getParamAlignment()), Zero); 699 } 700 701 SizeOffsetType ObjectSizeOffsetVisitor::visitCallBase(CallBase &CB) { 702 Optional<AllocFnsTy> FnData = getAllocationSize(&CB, TLI); 703 if (!FnData) 704 return unknown(); 705 706 // Handle strdup-like functions separately. 707 if (FnData->AllocTy == StrDupLike) { 708 APInt Size(IntTyBits, GetStringLength(CB.getArgOperand(0))); 709 if (!Size) 710 return unknown(); 711 712 // Strndup limits strlen. 713 if (FnData->FstParam > 0) { 714 ConstantInt *Arg = 715 dyn_cast<ConstantInt>(CB.getArgOperand(FnData->FstParam)); 716 if (!Arg) 717 return unknown(); 718 719 APInt MaxSize = Arg->getValue().zextOrSelf(IntTyBits); 720 if (Size.ugt(MaxSize)) 721 Size = MaxSize + 1; 722 } 723 return std::make_pair(Size, Zero); 724 } 725 726 ConstantInt *Arg = dyn_cast<ConstantInt>(CB.getArgOperand(FnData->FstParam)); 727 if (!Arg) 728 return unknown(); 729 730 APInt Size = Arg->getValue(); 731 if (!CheckedZextOrTrunc(Size)) 732 return unknown(); 733 734 // Size is determined by just 1 parameter. 735 if (FnData->SndParam < 0) 736 return std::make_pair(Size, Zero); 737 738 Arg = dyn_cast<ConstantInt>(CB.getArgOperand(FnData->SndParam)); 739 if (!Arg) 740 return unknown(); 741 742 APInt NumElems = Arg->getValue(); 743 if (!CheckedZextOrTrunc(NumElems)) 744 return unknown(); 745 746 bool Overflow; 747 Size = Size.umul_ov(NumElems, Overflow); 748 return Overflow ? unknown() : std::make_pair(Size, Zero); 749 750 // TODO: handle more standard functions (+ wchar cousins): 751 // - strdup / strndup 752 // - strcpy / strncpy 753 // - strcat / strncat 754 // - memcpy / memmove 755 // - strcat / strncat 756 // - memset 757 } 758 759 SizeOffsetType 760 ObjectSizeOffsetVisitor::visitConstantPointerNull(ConstantPointerNull& CPN) { 761 // If null is unknown, there's nothing we can do. Additionally, non-zero 762 // address spaces can make use of null, so we don't presume to know anything 763 // about that. 764 // 765 // TODO: How should this work with address space casts? We currently just drop 766 // them on the floor, but it's unclear what we should do when a NULL from 767 // addrspace(1) gets casted to addrspace(0) (or vice-versa). 768 if (Options.NullIsUnknownSize || CPN.getType()->getAddressSpace()) 769 return unknown(); 770 return std::make_pair(Zero, Zero); 771 } 772 773 SizeOffsetType 774 ObjectSizeOffsetVisitor::visitExtractElementInst(ExtractElementInst&) { 775 return unknown(); 776 } 777 778 SizeOffsetType 779 ObjectSizeOffsetVisitor::visitExtractValueInst(ExtractValueInst&) { 780 // Easy cases were already folded by previous passes. 781 return unknown(); 782 } 783 784 SizeOffsetType ObjectSizeOffsetVisitor::visitGEPOperator(GEPOperator &GEP) { 785 SizeOffsetType PtrData = compute(GEP.getPointerOperand()); 786 APInt Offset(DL.getIndexTypeSizeInBits(GEP.getPointerOperand()->getType()), 0); 787 if (!bothKnown(PtrData) || !GEP.accumulateConstantOffset(DL, Offset)) 788 return unknown(); 789 790 return std::make_pair(PtrData.first, PtrData.second + Offset); 791 } 792 793 SizeOffsetType ObjectSizeOffsetVisitor::visitGlobalAlias(GlobalAlias &GA) { 794 if (GA.isInterposable()) 795 return unknown(); 796 return compute(GA.getAliasee()); 797 } 798 799 SizeOffsetType ObjectSizeOffsetVisitor::visitGlobalVariable(GlobalVariable &GV){ 800 if (!GV.hasDefinitiveInitializer()) 801 return unknown(); 802 803 APInt Size(IntTyBits, DL.getTypeAllocSize(GV.getValueType())); 804 return std::make_pair(align(Size, GV.getAlignment()), Zero); 805 } 806 807 SizeOffsetType ObjectSizeOffsetVisitor::visitIntToPtrInst(IntToPtrInst&) { 808 // clueless 809 return unknown(); 810 } 811 812 SizeOffsetType ObjectSizeOffsetVisitor::visitLoadInst(LoadInst&) { 813 ++ObjectVisitorLoad; 814 return unknown(); 815 } 816 817 SizeOffsetType ObjectSizeOffsetVisitor::visitPHINode(PHINode&) { 818 // too complex to analyze statically. 819 return unknown(); 820 } 821 822 SizeOffsetType ObjectSizeOffsetVisitor::visitSelectInst(SelectInst &I) { 823 SizeOffsetType TrueSide = compute(I.getTrueValue()); 824 SizeOffsetType FalseSide = compute(I.getFalseValue()); 825 if (bothKnown(TrueSide) && bothKnown(FalseSide)) { 826 if (TrueSide == FalseSide) { 827 return TrueSide; 828 } 829 830 APInt TrueResult = getSizeWithOverflow(TrueSide); 831 APInt FalseResult = getSizeWithOverflow(FalseSide); 832 833 if (TrueResult == FalseResult) { 834 return TrueSide; 835 } 836 if (Options.EvalMode == ObjectSizeOpts::Mode::Min) { 837 if (TrueResult.slt(FalseResult)) 838 return TrueSide; 839 return FalseSide; 840 } 841 if (Options.EvalMode == ObjectSizeOpts::Mode::Max) { 842 if (TrueResult.sgt(FalseResult)) 843 return TrueSide; 844 return FalseSide; 845 } 846 } 847 return unknown(); 848 } 849 850 SizeOffsetType ObjectSizeOffsetVisitor::visitUndefValue(UndefValue&) { 851 return std::make_pair(Zero, Zero); 852 } 853 854 SizeOffsetType ObjectSizeOffsetVisitor::visitInstruction(Instruction &I) { 855 LLVM_DEBUG(dbgs() << "ObjectSizeOffsetVisitor unknown instruction:" << I 856 << '\n'); 857 return unknown(); 858 } 859 860 ObjectSizeOffsetEvaluator::ObjectSizeOffsetEvaluator( 861 const DataLayout &DL, const TargetLibraryInfo *TLI, LLVMContext &Context, 862 ObjectSizeOpts EvalOpts) 863 : DL(DL), TLI(TLI), Context(Context), 864 Builder(Context, TargetFolder(DL), 865 IRBuilderCallbackInserter( 866 [&](Instruction *I) { InsertedInstructions.insert(I); })), 867 EvalOpts(EvalOpts) { 868 // IntTy and Zero must be set for each compute() since the address space may 869 // be different for later objects. 870 } 871 872 SizeOffsetEvalType ObjectSizeOffsetEvaluator::compute(Value *V) { 873 // XXX - Are vectors of pointers possible here? 874 IntTy = cast<IntegerType>(DL.getIndexType(V->getType())); 875 Zero = ConstantInt::get(IntTy, 0); 876 877 SizeOffsetEvalType Result = compute_(V); 878 879 if (!bothKnown(Result)) { 880 // Erase everything that was computed in this iteration from the cache, so 881 // that no dangling references are left behind. We could be a bit smarter if 882 // we kept a dependency graph. It's probably not worth the complexity. 883 for (const Value *SeenVal : SeenVals) { 884 CacheMapTy::iterator CacheIt = CacheMap.find(SeenVal); 885 // non-computable results can be safely cached 886 if (CacheIt != CacheMap.end() && anyKnown(CacheIt->second)) 887 CacheMap.erase(CacheIt); 888 } 889 890 // Erase any instructions we inserted as part of the traversal. 891 for (Instruction *I : InsertedInstructions) { 892 I->replaceAllUsesWith(UndefValue::get(I->getType())); 893 I->eraseFromParent(); 894 } 895 } 896 897 SeenVals.clear(); 898 InsertedInstructions.clear(); 899 return Result; 900 } 901 902 SizeOffsetEvalType ObjectSizeOffsetEvaluator::compute_(Value *V) { 903 ObjectSizeOffsetVisitor Visitor(DL, TLI, Context, EvalOpts); 904 SizeOffsetType Const = Visitor.compute(V); 905 if (Visitor.bothKnown(Const)) 906 return std::make_pair(ConstantInt::get(Context, Const.first), 907 ConstantInt::get(Context, Const.second)); 908 909 V = V->stripPointerCasts(); 910 911 // Check cache. 912 CacheMapTy::iterator CacheIt = CacheMap.find(V); 913 if (CacheIt != CacheMap.end()) 914 return CacheIt->second; 915 916 // Always generate code immediately before the instruction being 917 // processed, so that the generated code dominates the same BBs. 918 BuilderTy::InsertPointGuard Guard(Builder); 919 if (Instruction *I = dyn_cast<Instruction>(V)) 920 Builder.SetInsertPoint(I); 921 922 // Now compute the size and offset. 923 SizeOffsetEvalType Result; 924 925 // Record the pointers that were handled in this run, so that they can be 926 // cleaned later if something fails. We also use this set to break cycles that 927 // can occur in dead code. 928 if (!SeenVals.insert(V).second) { 929 Result = unknown(); 930 } else if (GEPOperator *GEP = dyn_cast<GEPOperator>(V)) { 931 Result = visitGEPOperator(*GEP); 932 } else if (Instruction *I = dyn_cast<Instruction>(V)) { 933 Result = visit(*I); 934 } else if (isa<Argument>(V) || 935 (isa<ConstantExpr>(V) && 936 cast<ConstantExpr>(V)->getOpcode() == Instruction::IntToPtr) || 937 isa<GlobalAlias>(V) || 938 isa<GlobalVariable>(V)) { 939 // Ignore values where we cannot do more than ObjectSizeVisitor. 940 Result = unknown(); 941 } else { 942 LLVM_DEBUG( 943 dbgs() << "ObjectSizeOffsetEvaluator::compute() unhandled value: " << *V 944 << '\n'); 945 Result = unknown(); 946 } 947 948 // Don't reuse CacheIt since it may be invalid at this point. 949 CacheMap[V] = Result; 950 return Result; 951 } 952 953 SizeOffsetEvalType ObjectSizeOffsetEvaluator::visitAllocaInst(AllocaInst &I) { 954 if (!I.getAllocatedType()->isSized()) 955 return unknown(); 956 957 // must be a VLA 958 assert(I.isArrayAllocation()); 959 960 // If needed, adjust the alloca's operand size to match the pointer size. 961 // Subsequent math operations expect the types to match. 962 Value *ArraySize = Builder.CreateZExtOrTrunc( 963 I.getArraySize(), DL.getIntPtrType(I.getContext())); 964 assert(ArraySize->getType() == Zero->getType() && 965 "Expected zero constant to have pointer type"); 966 967 Value *Size = ConstantInt::get(ArraySize->getType(), 968 DL.getTypeAllocSize(I.getAllocatedType())); 969 Size = Builder.CreateMul(Size, ArraySize); 970 return std::make_pair(Size, Zero); 971 } 972 973 SizeOffsetEvalType ObjectSizeOffsetEvaluator::visitCallBase(CallBase &CB) { 974 Optional<AllocFnsTy> FnData = getAllocationSize(&CB, TLI); 975 if (!FnData) 976 return unknown(); 977 978 // Handle strdup-like functions separately. 979 if (FnData->AllocTy == StrDupLike) { 980 // TODO 981 return unknown(); 982 } 983 984 Value *FirstArg = CB.getArgOperand(FnData->FstParam); 985 FirstArg = Builder.CreateZExtOrTrunc(FirstArg, IntTy); 986 if (FnData->SndParam < 0) 987 return std::make_pair(FirstArg, Zero); 988 989 Value *SecondArg = CB.getArgOperand(FnData->SndParam); 990 SecondArg = Builder.CreateZExtOrTrunc(SecondArg, IntTy); 991 Value *Size = Builder.CreateMul(FirstArg, SecondArg); 992 return std::make_pair(Size, Zero); 993 994 // TODO: handle more standard functions (+ wchar cousins): 995 // - strdup / strndup 996 // - strcpy / strncpy 997 // - strcat / strncat 998 // - memcpy / memmove 999 // - strcat / strncat 1000 // - memset 1001 } 1002 1003 SizeOffsetEvalType 1004 ObjectSizeOffsetEvaluator::visitExtractElementInst(ExtractElementInst&) { 1005 return unknown(); 1006 } 1007 1008 SizeOffsetEvalType 1009 ObjectSizeOffsetEvaluator::visitExtractValueInst(ExtractValueInst&) { 1010 return unknown(); 1011 } 1012 1013 SizeOffsetEvalType 1014 ObjectSizeOffsetEvaluator::visitGEPOperator(GEPOperator &GEP) { 1015 SizeOffsetEvalType PtrData = compute_(GEP.getPointerOperand()); 1016 if (!bothKnown(PtrData)) 1017 return unknown(); 1018 1019 Value *Offset = EmitGEPOffset(&Builder, DL, &GEP, /*NoAssumptions=*/true); 1020 Offset = Builder.CreateAdd(PtrData.second, Offset); 1021 return std::make_pair(PtrData.first, Offset); 1022 } 1023 1024 SizeOffsetEvalType ObjectSizeOffsetEvaluator::visitIntToPtrInst(IntToPtrInst&) { 1025 // clueless 1026 return unknown(); 1027 } 1028 1029 SizeOffsetEvalType ObjectSizeOffsetEvaluator::visitLoadInst(LoadInst&) { 1030 return unknown(); 1031 } 1032 1033 SizeOffsetEvalType ObjectSizeOffsetEvaluator::visitPHINode(PHINode &PHI) { 1034 // Create 2 PHIs: one for size and another for offset. 1035 PHINode *SizePHI = Builder.CreatePHI(IntTy, PHI.getNumIncomingValues()); 1036 PHINode *OffsetPHI = Builder.CreatePHI(IntTy, PHI.getNumIncomingValues()); 1037 1038 // Insert right away in the cache to handle recursive PHIs. 1039 CacheMap[&PHI] = std::make_pair(SizePHI, OffsetPHI); 1040 1041 // Compute offset/size for each PHI incoming pointer. 1042 for (unsigned i = 0, e = PHI.getNumIncomingValues(); i != e; ++i) { 1043 Builder.SetInsertPoint(&*PHI.getIncomingBlock(i)->getFirstInsertionPt()); 1044 SizeOffsetEvalType EdgeData = compute_(PHI.getIncomingValue(i)); 1045 1046 if (!bothKnown(EdgeData)) { 1047 OffsetPHI->replaceAllUsesWith(UndefValue::get(IntTy)); 1048 OffsetPHI->eraseFromParent(); 1049 InsertedInstructions.erase(OffsetPHI); 1050 SizePHI->replaceAllUsesWith(UndefValue::get(IntTy)); 1051 SizePHI->eraseFromParent(); 1052 InsertedInstructions.erase(SizePHI); 1053 return unknown(); 1054 } 1055 SizePHI->addIncoming(EdgeData.first, PHI.getIncomingBlock(i)); 1056 OffsetPHI->addIncoming(EdgeData.second, PHI.getIncomingBlock(i)); 1057 } 1058 1059 Value *Size = SizePHI, *Offset = OffsetPHI; 1060 if (Value *Tmp = SizePHI->hasConstantValue()) { 1061 Size = Tmp; 1062 SizePHI->replaceAllUsesWith(Size); 1063 SizePHI->eraseFromParent(); 1064 InsertedInstructions.erase(SizePHI); 1065 } 1066 if (Value *Tmp = OffsetPHI->hasConstantValue()) { 1067 Offset = Tmp; 1068 OffsetPHI->replaceAllUsesWith(Offset); 1069 OffsetPHI->eraseFromParent(); 1070 InsertedInstructions.erase(OffsetPHI); 1071 } 1072 return std::make_pair(Size, Offset); 1073 } 1074 1075 SizeOffsetEvalType ObjectSizeOffsetEvaluator::visitSelectInst(SelectInst &I) { 1076 SizeOffsetEvalType TrueSide = compute_(I.getTrueValue()); 1077 SizeOffsetEvalType FalseSide = compute_(I.getFalseValue()); 1078 1079 if (!bothKnown(TrueSide) || !bothKnown(FalseSide)) 1080 return unknown(); 1081 if (TrueSide == FalseSide) 1082 return TrueSide; 1083 1084 Value *Size = Builder.CreateSelect(I.getCondition(), TrueSide.first, 1085 FalseSide.first); 1086 Value *Offset = Builder.CreateSelect(I.getCondition(), TrueSide.second, 1087 FalseSide.second); 1088 return std::make_pair(Size, Offset); 1089 } 1090 1091 SizeOffsetEvalType ObjectSizeOffsetEvaluator::visitInstruction(Instruction &I) { 1092 LLVM_DEBUG(dbgs() << "ObjectSizeOffsetEvaluator unknown instruction:" << I 1093 << '\n'); 1094 return unknown(); 1095 } 1096