1 //===-- SPIRVEmitIntrinsics.cpp - emit SPIRV intrinsics ---------*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // The pass emits SPIRV intrinsics keeping essential high-level information for 10 // the translation of LLVM IR to SPIR-V. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "SPIRV.h" 15 #include "SPIRVTargetMachine.h" 16 #include "SPIRVUtils.h" 17 #include "llvm/IR/IRBuilder.h" 18 #include "llvm/IR/InstIterator.h" 19 #include "llvm/IR/InstVisitor.h" 20 #include "llvm/IR/IntrinsicsSPIRV.h" 21 22 #include <queue> 23 24 // This pass performs the following transformation on LLVM IR level required 25 // for the following translation to SPIR-V: 26 // - replaces direct usages of aggregate constants with target-specific 27 // intrinsics; 28 // - replaces aggregates-related instructions (extract/insert, ld/st, etc) 29 // with a target-specific intrinsics; 30 // - emits intrinsics for the global variable initializers since IRTranslator 31 // doesn't handle them and it's not very convenient to translate them 32 // ourselves; 33 // - emits intrinsics to keep track of the string names assigned to the values; 34 // - emits intrinsics to keep track of constants (this is necessary to have an 35 // LLVM IR constant after the IRTranslation is completed) for their further 36 // deduplication; 37 // - emits intrinsics to keep track of original LLVM types of the values 38 // to be able to emit proper SPIR-V types eventually. 39 // 40 // TODO: consider removing spv.track.constant in favor of spv.assign.type. 41 42 using namespace llvm; 43 44 namespace llvm { 45 void initializeSPIRVEmitIntrinsicsPass(PassRegistry &); 46 } // namespace llvm 47 48 namespace { 49 class SPIRVEmitIntrinsics 50 : public FunctionPass, 51 public InstVisitor<SPIRVEmitIntrinsics, Instruction *> { 52 SPIRVTargetMachine *TM = nullptr; 53 IRBuilder<> *IRB = nullptr; 54 Function *F = nullptr; 55 bool TrackConstants = true; 56 DenseMap<Instruction *, Constant *> AggrConsts; 57 DenseSet<Instruction *> AggrStores; 58 void preprocessCompositeConstants(); 59 void preprocessUndefs(); 60 CallInst *buildIntrWithMD(Intrinsic::ID IntrID, ArrayRef<Type *> Types, 61 Value *Arg, Value *Arg2, 62 ArrayRef<Constant *> Imms) { 63 ConstantAsMetadata *CM = ValueAsMetadata::getConstant(Arg); 64 MDTuple *TyMD = MDNode::get(F->getContext(), CM); 65 MetadataAsValue *VMD = MetadataAsValue::get(F->getContext(), TyMD); 66 SmallVector<Value *, 4> Args; 67 Args.push_back(Arg2); 68 Args.push_back(VMD); 69 for (auto *Imm : Imms) 70 Args.push_back(Imm); 71 return IRB->CreateIntrinsic(IntrID, {Types}, Args); 72 } 73 void replaceMemInstrUses(Instruction *Old, Instruction *New); 74 void processInstrAfterVisit(Instruction *I); 75 void insertAssignPtrTypeIntrs(Instruction *I); 76 void insertAssignTypeIntrs(Instruction *I); 77 void insertPtrCastInstr(Instruction *I); 78 void processGlobalValue(GlobalVariable &GV); 79 80 public: 81 static char ID; 82 SPIRVEmitIntrinsics() : FunctionPass(ID) { 83 initializeSPIRVEmitIntrinsicsPass(*PassRegistry::getPassRegistry()); 84 } 85 SPIRVEmitIntrinsics(SPIRVTargetMachine *_TM) : FunctionPass(ID), TM(_TM) { 86 initializeSPIRVEmitIntrinsicsPass(*PassRegistry::getPassRegistry()); 87 } 88 Instruction *visitInstruction(Instruction &I) { return &I; } 89 Instruction *visitSwitchInst(SwitchInst &I); 90 Instruction *visitGetElementPtrInst(GetElementPtrInst &I); 91 Instruction *visitBitCastInst(BitCastInst &I); 92 Instruction *visitInsertElementInst(InsertElementInst &I); 93 Instruction *visitExtractElementInst(ExtractElementInst &I); 94 Instruction *visitInsertValueInst(InsertValueInst &I); 95 Instruction *visitExtractValueInst(ExtractValueInst &I); 96 Instruction *visitLoadInst(LoadInst &I); 97 Instruction *visitStoreInst(StoreInst &I); 98 Instruction *visitAllocaInst(AllocaInst &I); 99 Instruction *visitAtomicCmpXchgInst(AtomicCmpXchgInst &I); 100 Instruction *visitUnreachableInst(UnreachableInst &I); 101 bool runOnFunction(Function &F) override; 102 }; 103 } // namespace 104 105 char SPIRVEmitIntrinsics::ID = 0; 106 107 INITIALIZE_PASS(SPIRVEmitIntrinsics, "emit-intrinsics", "SPIRV emit intrinsics", 108 false, false) 109 110 static inline bool isAssignTypeInstr(const Instruction *I) { 111 return isa<IntrinsicInst>(I) && 112 cast<IntrinsicInst>(I)->getIntrinsicID() == Intrinsic::spv_assign_type; 113 } 114 115 static bool isMemInstrToReplace(Instruction *I) { 116 return isa<StoreInst>(I) || isa<LoadInst>(I) || isa<InsertValueInst>(I) || 117 isa<ExtractValueInst>(I) || isa<AtomicCmpXchgInst>(I); 118 } 119 120 static bool isAggrToReplace(const Value *V) { 121 return isa<ConstantAggregate>(V) || isa<ConstantDataArray>(V) || 122 (isa<ConstantAggregateZero>(V) && !V->getType()->isVectorTy()); 123 } 124 125 static void setInsertPointSkippingPhis(IRBuilder<> &B, Instruction *I) { 126 if (isa<PHINode>(I)) 127 B.SetInsertPoint(I->getParent(), I->getParent()->getFirstInsertionPt()); 128 else 129 B.SetInsertPoint(I); 130 } 131 132 static bool requireAssignPtrType(Instruction *I) { 133 if (isa<AllocaInst>(I) || isa<GetElementPtrInst>(I)) 134 return true; 135 136 return false; 137 } 138 139 static bool requireAssignType(Instruction *I) { 140 IntrinsicInst *Intr = dyn_cast<IntrinsicInst>(I); 141 if (Intr) { 142 switch (Intr->getIntrinsicID()) { 143 case Intrinsic::invariant_start: 144 case Intrinsic::invariant_end: 145 return false; 146 } 147 } 148 return true; 149 } 150 151 void SPIRVEmitIntrinsics::replaceMemInstrUses(Instruction *Old, 152 Instruction *New) { 153 while (!Old->user_empty()) { 154 auto *U = Old->user_back(); 155 if (isAssignTypeInstr(U)) { 156 IRB->SetInsertPoint(U); 157 SmallVector<Value *, 2> Args = {New, U->getOperand(1)}; 158 IRB->CreateIntrinsic(Intrinsic::spv_assign_type, {New->getType()}, Args); 159 U->eraseFromParent(); 160 } else if (isMemInstrToReplace(U) || isa<ReturnInst>(U) || 161 isa<CallInst>(U)) { 162 U->replaceUsesOfWith(Old, New); 163 } else { 164 llvm_unreachable("illegal aggregate intrinsic user"); 165 } 166 } 167 Old->eraseFromParent(); 168 } 169 170 void SPIRVEmitIntrinsics::preprocessUndefs() { 171 std::queue<Instruction *> Worklist; 172 for (auto &I : instructions(F)) 173 Worklist.push(&I); 174 175 while (!Worklist.empty()) { 176 Instruction *I = Worklist.front(); 177 Worklist.pop(); 178 179 for (auto &Op : I->operands()) { 180 auto *AggrUndef = dyn_cast<UndefValue>(Op); 181 if (!AggrUndef || !Op->getType()->isAggregateType()) 182 continue; 183 184 IRB->SetInsertPoint(I); 185 auto *IntrUndef = IRB->CreateIntrinsic(Intrinsic::spv_undef, {}, {}); 186 Worklist.push(IntrUndef); 187 I->replaceUsesOfWith(Op, IntrUndef); 188 AggrConsts[IntrUndef] = AggrUndef; 189 } 190 } 191 } 192 193 void SPIRVEmitIntrinsics::preprocessCompositeConstants() { 194 std::queue<Instruction *> Worklist; 195 for (auto &I : instructions(F)) 196 Worklist.push(&I); 197 198 while (!Worklist.empty()) { 199 auto *I = Worklist.front(); 200 assert(I); 201 bool KeepInst = false; 202 for (const auto &Op : I->operands()) { 203 auto BuildCompositeIntrinsic = [&KeepInst, &Worklist, &I, &Op, 204 this](Constant *AggrC, 205 ArrayRef<Value *> Args) { 206 IRB->SetInsertPoint(I); 207 auto *CCI = 208 IRB->CreateIntrinsic(Intrinsic::spv_const_composite, {}, {Args}); 209 Worklist.push(CCI); 210 I->replaceUsesOfWith(Op, CCI); 211 KeepInst = true; 212 AggrConsts[CCI] = AggrC; 213 }; 214 215 if (auto *AggrC = dyn_cast<ConstantAggregate>(Op)) { 216 SmallVector<Value *> Args(AggrC->op_begin(), AggrC->op_end()); 217 BuildCompositeIntrinsic(AggrC, Args); 218 } else if (auto *AggrC = dyn_cast<ConstantDataArray>(Op)) { 219 SmallVector<Value *> Args; 220 for (unsigned i = 0; i < AggrC->getNumElements(); ++i) 221 Args.push_back(AggrC->getElementAsConstant(i)); 222 BuildCompositeIntrinsic(AggrC, Args); 223 } else if (isa<ConstantAggregateZero>(Op) && 224 !Op->getType()->isVectorTy()) { 225 auto *AggrC = cast<ConstantAggregateZero>(Op); 226 SmallVector<Value *> Args(AggrC->op_begin(), AggrC->op_end()); 227 BuildCompositeIntrinsic(AggrC, Args); 228 } 229 } 230 if (!KeepInst) 231 Worklist.pop(); 232 } 233 } 234 235 Instruction *SPIRVEmitIntrinsics::visitSwitchInst(SwitchInst &I) { 236 SmallVector<Value *, 4> Args; 237 for (auto &Op : I.operands()) 238 if (Op.get()->getType()->isSized()) 239 Args.push_back(Op); 240 IRB->SetInsertPoint(&I); 241 IRB->CreateIntrinsic(Intrinsic::spv_switch, {I.getOperand(0)->getType()}, 242 {Args}); 243 return &I; 244 } 245 246 Instruction *SPIRVEmitIntrinsics::visitGetElementPtrInst(GetElementPtrInst &I) { 247 SmallVector<Type *, 2> Types = {I.getType(), I.getOperand(0)->getType()}; 248 SmallVector<Value *, 4> Args; 249 Args.push_back(IRB->getInt1(I.isInBounds())); 250 for (auto &Op : I.operands()) 251 Args.push_back(Op); 252 auto *NewI = IRB->CreateIntrinsic(Intrinsic::spv_gep, {Types}, {Args}); 253 I.replaceAllUsesWith(NewI); 254 I.eraseFromParent(); 255 return NewI; 256 } 257 258 Instruction *SPIRVEmitIntrinsics::visitBitCastInst(BitCastInst &I) { 259 Value *Source = I.getOperand(0); 260 261 // SPIR-V, contrary to LLVM 17+ IR, supports bitcasts between pointers of 262 // varying element types. In case of IR coming from older versions of LLVM 263 // such bitcasts do not provide sufficient information, should be just skipped 264 // here, and handled in insertPtrCastInstr. 265 if (I.getType()->isPointerTy()) { 266 I.replaceAllUsesWith(Source); 267 I.eraseFromParent(); 268 return nullptr; 269 } 270 271 SmallVector<Type *, 2> Types = {I.getType(), Source->getType()}; 272 SmallVector<Value *> Args(I.op_begin(), I.op_end()); 273 auto *NewI = IRB->CreateIntrinsic(Intrinsic::spv_bitcast, {Types}, {Args}); 274 std::string InstName = I.hasName() ? I.getName().str() : ""; 275 I.replaceAllUsesWith(NewI); 276 I.eraseFromParent(); 277 NewI->setName(InstName); 278 return NewI; 279 } 280 281 void SPIRVEmitIntrinsics::insertPtrCastInstr(Instruction *I) { 282 Value *Pointer; 283 Type *ExpectedElementType; 284 unsigned OperandToReplace; 285 if (StoreInst *SI = dyn_cast<StoreInst>(I)) { 286 Pointer = SI->getPointerOperand(); 287 ExpectedElementType = SI->getValueOperand()->getType(); 288 OperandToReplace = 1; 289 } else if (LoadInst *LI = dyn_cast<LoadInst>(I)) { 290 Pointer = LI->getPointerOperand(); 291 ExpectedElementType = LI->getType(); 292 OperandToReplace = 0; 293 } else if (GetElementPtrInst *GEPI = dyn_cast<GetElementPtrInst>(I)) { 294 Pointer = GEPI->getPointerOperand(); 295 ExpectedElementType = GEPI->getSourceElementType(); 296 OperandToReplace = 0; 297 } else { 298 return; 299 } 300 301 // If Pointer is the result of nop BitCastInst (ptr -> ptr), use the source 302 // pointer instead. The BitCastInst should be later removed when visited. 303 while (BitCastInst *BC = dyn_cast<BitCastInst>(Pointer)) 304 Pointer = BC->getOperand(0); 305 306 // Do not emit spv_ptrcast if Pointer is a GlobalValue of expected type. 307 GlobalValue *GV = dyn_cast<GlobalValue>(Pointer); 308 if (GV && GV->getValueType() == ExpectedElementType) 309 return; 310 311 // Do not emit spv_ptrcast if Pointer is a result of alloca with expected 312 // type. 313 AllocaInst *A = dyn_cast<AllocaInst>(Pointer); 314 if (A && A->getAllocatedType() == ExpectedElementType) 315 return; 316 317 // Do not emit spv_ptrcast if Pointer is a result of GEP of expected type. 318 GetElementPtrInst *GEPI = dyn_cast<GetElementPtrInst>(Pointer); 319 if (GEPI && GEPI->getResultElementType() == ExpectedElementType) 320 return; 321 322 setInsertPointSkippingPhis(*IRB, I); 323 Constant *ExpectedElementTypeConst = 324 Constant::getNullValue(ExpectedElementType); 325 ConstantAsMetadata *CM = 326 ValueAsMetadata::getConstant(ExpectedElementTypeConst); 327 MDTuple *TyMD = MDNode::get(F->getContext(), CM); 328 MetadataAsValue *VMD = MetadataAsValue::get(F->getContext(), TyMD); 329 unsigned AddressSpace = Pointer->getType()->getPointerAddressSpace(); 330 bool FirstPtrCastOrAssignPtrType = true; 331 332 // Do not emit new spv_ptrcast if equivalent one already exists or when 333 // spv_assign_ptr_type already targets this pointer with the same element 334 // type. 335 for (auto User : Pointer->users()) { 336 auto *II = dyn_cast<IntrinsicInst>(User); 337 if (!II || 338 (II->getIntrinsicID() != Intrinsic::spv_assign_ptr_type && 339 II->getIntrinsicID() != Intrinsic::spv_ptrcast) || 340 II->getOperand(0) != Pointer) 341 continue; 342 343 // There is some spv_ptrcast/spv_assign_ptr_type already targeting this 344 // pointer. 345 FirstPtrCastOrAssignPtrType = false; 346 if (II->getOperand(1) != VMD || 347 dyn_cast<ConstantInt>(II->getOperand(2))->getSExtValue() != 348 AddressSpace) 349 continue; 350 351 // The spv_ptrcast/spv_assign_ptr_type targeting this pointer is of the same 352 // element type and address space. 353 if (II->getIntrinsicID() != Intrinsic::spv_ptrcast) 354 return; 355 356 // This must be a spv_ptrcast, do not emit new if this one has the same BB 357 // as I. Otherwise, search for other spv_ptrcast/spv_assign_ptr_type. 358 if (II->getParent() != I->getParent()) 359 continue; 360 361 I->setOperand(OperandToReplace, II); 362 return; 363 } 364 365 // Do not emit spv_ptrcast if it would cast to the default pointer element 366 // type (i8) of the same address space. 367 if (ExpectedElementType->isIntegerTy(8)) 368 return; 369 370 // If this would be the first spv_ptrcast and there is no spv_assign_ptr_type 371 // for this pointer before, do not emit spv_ptrcast but emit 372 // spv_assign_ptr_type instead. 373 if (FirstPtrCastOrAssignPtrType && isa<Instruction>(Pointer)) { 374 buildIntrWithMD(Intrinsic::spv_assign_ptr_type, {Pointer->getType()}, 375 ExpectedElementTypeConst, Pointer, 376 {IRB->getInt32(AddressSpace)}); 377 return; 378 } else { 379 SmallVector<Type *, 2> Types = {Pointer->getType(), Pointer->getType()}; 380 SmallVector<Value *, 2> Args = {Pointer, VMD, IRB->getInt32(AddressSpace)}; 381 auto *PtrCastI = 382 IRB->CreateIntrinsic(Intrinsic::spv_ptrcast, {Types}, Args); 383 I->setOperand(OperandToReplace, PtrCastI); 384 return; 385 } 386 } 387 388 Instruction *SPIRVEmitIntrinsics::visitInsertElementInst(InsertElementInst &I) { 389 SmallVector<Type *, 4> Types = {I.getType(), I.getOperand(0)->getType(), 390 I.getOperand(1)->getType(), 391 I.getOperand(2)->getType()}; 392 SmallVector<Value *> Args(I.op_begin(), I.op_end()); 393 auto *NewI = IRB->CreateIntrinsic(Intrinsic::spv_insertelt, {Types}, {Args}); 394 std::string InstName = I.hasName() ? I.getName().str() : ""; 395 I.replaceAllUsesWith(NewI); 396 I.eraseFromParent(); 397 NewI->setName(InstName); 398 return NewI; 399 } 400 401 Instruction * 402 SPIRVEmitIntrinsics::visitExtractElementInst(ExtractElementInst &I) { 403 SmallVector<Type *, 3> Types = {I.getType(), I.getVectorOperandType(), 404 I.getIndexOperand()->getType()}; 405 SmallVector<Value *, 2> Args = {I.getVectorOperand(), I.getIndexOperand()}; 406 auto *NewI = IRB->CreateIntrinsic(Intrinsic::spv_extractelt, {Types}, {Args}); 407 std::string InstName = I.hasName() ? I.getName().str() : ""; 408 I.replaceAllUsesWith(NewI); 409 I.eraseFromParent(); 410 NewI->setName(InstName); 411 return NewI; 412 } 413 414 Instruction *SPIRVEmitIntrinsics::visitInsertValueInst(InsertValueInst &I) { 415 SmallVector<Type *, 1> Types = {I.getInsertedValueOperand()->getType()}; 416 SmallVector<Value *> Args; 417 for (auto &Op : I.operands()) 418 if (isa<UndefValue>(Op)) 419 Args.push_back(UndefValue::get(IRB->getInt32Ty())); 420 else 421 Args.push_back(Op); 422 for (auto &Op : I.indices()) 423 Args.push_back(IRB->getInt32(Op)); 424 Instruction *NewI = 425 IRB->CreateIntrinsic(Intrinsic::spv_insertv, {Types}, {Args}); 426 replaceMemInstrUses(&I, NewI); 427 return NewI; 428 } 429 430 Instruction *SPIRVEmitIntrinsics::visitExtractValueInst(ExtractValueInst &I) { 431 SmallVector<Value *> Args; 432 for (auto &Op : I.operands()) 433 Args.push_back(Op); 434 for (auto &Op : I.indices()) 435 Args.push_back(IRB->getInt32(Op)); 436 auto *NewI = 437 IRB->CreateIntrinsic(Intrinsic::spv_extractv, {I.getType()}, {Args}); 438 I.replaceAllUsesWith(NewI); 439 I.eraseFromParent(); 440 return NewI; 441 } 442 443 Instruction *SPIRVEmitIntrinsics::visitLoadInst(LoadInst &I) { 444 if (!I.getType()->isAggregateType()) 445 return &I; 446 TrackConstants = false; 447 const auto *TLI = TM->getSubtargetImpl()->getTargetLowering(); 448 MachineMemOperand::Flags Flags = 449 TLI->getLoadMemOperandFlags(I, F->getParent()->getDataLayout()); 450 auto *NewI = 451 IRB->CreateIntrinsic(Intrinsic::spv_load, {I.getOperand(0)->getType()}, 452 {I.getPointerOperand(), IRB->getInt16(Flags), 453 IRB->getInt8(I.getAlign().value())}); 454 replaceMemInstrUses(&I, NewI); 455 return NewI; 456 } 457 458 Instruction *SPIRVEmitIntrinsics::visitStoreInst(StoreInst &I) { 459 if (!AggrStores.contains(&I)) 460 return &I; 461 TrackConstants = false; 462 const auto *TLI = TM->getSubtargetImpl()->getTargetLowering(); 463 MachineMemOperand::Flags Flags = 464 TLI->getStoreMemOperandFlags(I, F->getParent()->getDataLayout()); 465 auto *PtrOp = I.getPointerOperand(); 466 auto *NewI = IRB->CreateIntrinsic( 467 Intrinsic::spv_store, {I.getValueOperand()->getType(), PtrOp->getType()}, 468 {I.getValueOperand(), PtrOp, IRB->getInt16(Flags), 469 IRB->getInt8(I.getAlign().value())}); 470 I.eraseFromParent(); 471 return NewI; 472 } 473 474 Instruction *SPIRVEmitIntrinsics::visitAllocaInst(AllocaInst &I) { 475 TrackConstants = false; 476 Type *PtrTy = I.getType(); 477 auto *NewI = IRB->CreateIntrinsic(Intrinsic::spv_alloca, {PtrTy}, {}); 478 std::string InstName = I.hasName() ? I.getName().str() : ""; 479 I.replaceAllUsesWith(NewI); 480 I.eraseFromParent(); 481 NewI->setName(InstName); 482 return NewI; 483 } 484 485 Instruction *SPIRVEmitIntrinsics::visitAtomicCmpXchgInst(AtomicCmpXchgInst &I) { 486 assert(I.getType()->isAggregateType() && "Aggregate result is expected"); 487 SmallVector<Value *> Args; 488 for (auto &Op : I.operands()) 489 Args.push_back(Op); 490 Args.push_back(IRB->getInt32(I.getSyncScopeID())); 491 Args.push_back(IRB->getInt32( 492 static_cast<uint32_t>(getMemSemantics(I.getSuccessOrdering())))); 493 Args.push_back(IRB->getInt32( 494 static_cast<uint32_t>(getMemSemantics(I.getFailureOrdering())))); 495 auto *NewI = IRB->CreateIntrinsic(Intrinsic::spv_cmpxchg, 496 {I.getPointerOperand()->getType()}, {Args}); 497 replaceMemInstrUses(&I, NewI); 498 return NewI; 499 } 500 501 Instruction *SPIRVEmitIntrinsics::visitUnreachableInst(UnreachableInst &I) { 502 IRB->SetInsertPoint(&I); 503 IRB->CreateIntrinsic(Intrinsic::spv_unreachable, {}, {}); 504 return &I; 505 } 506 507 void SPIRVEmitIntrinsics::processGlobalValue(GlobalVariable &GV) { 508 // Skip special artifical variable llvm.global.annotations. 509 if (GV.getName() == "llvm.global.annotations") 510 return; 511 if (GV.hasInitializer() && !isa<UndefValue>(GV.getInitializer())) { 512 Constant *Init = GV.getInitializer(); 513 Type *Ty = isAggrToReplace(Init) ? IRB->getInt32Ty() : Init->getType(); 514 Constant *Const = isAggrToReplace(Init) ? IRB->getInt32(1) : Init; 515 auto *InitInst = IRB->CreateIntrinsic(Intrinsic::spv_init_global, 516 {GV.getType(), Ty}, {&GV, Const}); 517 InitInst->setArgOperand(1, Init); 518 } 519 if ((!GV.hasInitializer() || isa<UndefValue>(GV.getInitializer())) && 520 GV.getNumUses() == 0) 521 IRB->CreateIntrinsic(Intrinsic::spv_unref_global, GV.getType(), &GV); 522 } 523 524 void SPIRVEmitIntrinsics::insertAssignPtrTypeIntrs(Instruction *I) { 525 if (I->getType()->isVoidTy() || !requireAssignPtrType(I)) 526 return; 527 528 setInsertPointSkippingPhis(*IRB, I->getNextNode()); 529 530 Constant *EltTyConst; 531 unsigned AddressSpace = 0; 532 if (auto *AI = dyn_cast<AllocaInst>(I)) { 533 EltTyConst = UndefValue::get(AI->getAllocatedType()); 534 AddressSpace = AI->getAddressSpace(); 535 } else if (auto *GEP = dyn_cast<GetElementPtrInst>(I)) { 536 EltTyConst = UndefValue::get(GEP->getResultElementType()); 537 AddressSpace = GEP->getPointerAddressSpace(); 538 } else { 539 llvm_unreachable("Unexpected instruction!"); 540 } 541 542 buildIntrWithMD(Intrinsic::spv_assign_ptr_type, {I->getType()}, EltTyConst, I, 543 {IRB->getInt32(AddressSpace)}); 544 } 545 546 void SPIRVEmitIntrinsics::insertAssignTypeIntrs(Instruction *I) { 547 Type *Ty = I->getType(); 548 if (!Ty->isVoidTy() && requireAssignType(I) && !requireAssignPtrType(I)) { 549 setInsertPointSkippingPhis(*IRB, I->getNextNode()); 550 Type *TypeToAssign = Ty; 551 if (auto *II = dyn_cast<IntrinsicInst>(I)) { 552 if (II->getIntrinsicID() == Intrinsic::spv_const_composite || 553 II->getIntrinsicID() == Intrinsic::spv_undef) { 554 auto t = AggrConsts.find(II); 555 assert(t != AggrConsts.end()); 556 TypeToAssign = t->second->getType(); 557 } 558 } 559 Constant *Const = UndefValue::get(TypeToAssign); 560 buildIntrWithMD(Intrinsic::spv_assign_type, {Ty}, Const, I, {}); 561 } 562 for (const auto &Op : I->operands()) { 563 if (isa<ConstantPointerNull>(Op) || isa<UndefValue>(Op) || 564 // Check GetElementPtrConstantExpr case. 565 (isa<ConstantExpr>(Op) && isa<GEPOperator>(Op))) { 566 setInsertPointSkippingPhis(*IRB, I); 567 if (isa<UndefValue>(Op) && Op->getType()->isAggregateType()) 568 buildIntrWithMD(Intrinsic::spv_assign_type, {IRB->getInt32Ty()}, Op, 569 UndefValue::get(IRB->getInt32Ty()), {}); 570 else 571 buildIntrWithMD(Intrinsic::spv_assign_type, {Op->getType()}, Op, Op, 572 {}); 573 } 574 } 575 } 576 577 void SPIRVEmitIntrinsics::processInstrAfterVisit(Instruction *I) { 578 auto *II = dyn_cast<IntrinsicInst>(I); 579 if (II && II->getIntrinsicID() == Intrinsic::spv_const_composite && 580 TrackConstants) { 581 IRB->SetInsertPoint(I->getNextNode()); 582 Type *Ty = IRB->getInt32Ty(); 583 auto t = AggrConsts.find(I); 584 assert(t != AggrConsts.end()); 585 auto *NewOp = buildIntrWithMD(Intrinsic::spv_track_constant, {Ty, Ty}, 586 t->second, I, {}); 587 I->replaceAllUsesWith(NewOp); 588 NewOp->setArgOperand(0, I); 589 } 590 for (const auto &Op : I->operands()) { 591 if ((isa<ConstantAggregateZero>(Op) && Op->getType()->isVectorTy()) || 592 isa<PHINode>(I) || isa<SwitchInst>(I)) 593 TrackConstants = false; 594 if ((isa<ConstantData>(Op) || isa<ConstantExpr>(Op)) && TrackConstants) { 595 unsigned OpNo = Op.getOperandNo(); 596 if (II && ((II->getIntrinsicID() == Intrinsic::spv_gep && OpNo == 0) || 597 (II->paramHasAttr(OpNo, Attribute::ImmArg)))) 598 continue; 599 IRB->SetInsertPoint(I); 600 auto *NewOp = buildIntrWithMD(Intrinsic::spv_track_constant, 601 {Op->getType(), Op->getType()}, Op, Op, {}); 602 I->setOperand(OpNo, NewOp); 603 } 604 } 605 if (I->hasName()) { 606 setInsertPointSkippingPhis(*IRB, I->getNextNode()); 607 std::vector<Value *> Args = {I}; 608 addStringImm(I->getName(), *IRB, Args); 609 IRB->CreateIntrinsic(Intrinsic::spv_assign_name, {I->getType()}, Args); 610 } 611 } 612 613 bool SPIRVEmitIntrinsics::runOnFunction(Function &Func) { 614 if (Func.isDeclaration()) 615 return false; 616 F = &Func; 617 IRB = new IRBuilder<>(Func.getContext()); 618 AggrConsts.clear(); 619 AggrStores.clear(); 620 621 // StoreInst's operand type can be changed during the next transformations, 622 // so we need to store it in the set. Also store already transformed types. 623 for (auto &I : instructions(Func)) { 624 StoreInst *SI = dyn_cast<StoreInst>(&I); 625 if (!SI) 626 continue; 627 Type *ElTy = SI->getValueOperand()->getType(); 628 if (ElTy->isAggregateType() || ElTy->isVectorTy()) 629 AggrStores.insert(&I); 630 } 631 632 IRB->SetInsertPoint(&Func.getEntryBlock(), Func.getEntryBlock().begin()); 633 for (auto &GV : Func.getParent()->globals()) 634 processGlobalValue(GV); 635 636 preprocessUndefs(); 637 preprocessCompositeConstants(); 638 SmallVector<Instruction *> Worklist; 639 for (auto &I : instructions(Func)) 640 Worklist.push_back(&I); 641 642 for (auto &I : Worklist) { 643 insertAssignPtrTypeIntrs(I); 644 insertAssignTypeIntrs(I); 645 insertPtrCastInstr(I); 646 } 647 648 for (auto *I : Worklist) { 649 TrackConstants = true; 650 if (!I->getType()->isVoidTy() || isa<StoreInst>(I)) 651 IRB->SetInsertPoint(I->getNextNode()); 652 // Visitors return either the original/newly created instruction for further 653 // processing, nullptr otherwise. 654 I = visit(*I); 655 if (!I) 656 continue; 657 processInstrAfterVisit(I); 658 } 659 return true; 660 } 661 662 FunctionPass *llvm::createSPIRVEmitIntrinsicsPass(SPIRVTargetMachine *TM) { 663 return new SPIRVEmitIntrinsics(TM); 664 } 665