xref: /freebsd/contrib/llvm-project/llvm/lib/Transforms/Instrumentation/InstrProfiling.cpp (revision 8ddb146abcdf061be9f2c0db7e391697dafad85c)
1 //===-- InstrProfiling.cpp - Frontend instrumentation based profiling -----===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This pass lowers instrprof_* intrinsics emitted by a frontend for profiling.
10 // It also builds the data structures and initialization code needed for
11 // updating execution counts and emitting the profile at runtime.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #include "llvm/Transforms/Instrumentation/InstrProfiling.h"
16 #include "llvm/ADT/ArrayRef.h"
17 #include "llvm/ADT/SmallVector.h"
18 #include "llvm/ADT/StringRef.h"
19 #include "llvm/ADT/Triple.h"
20 #include "llvm/ADT/Twine.h"
21 #include "llvm/Analysis/BlockFrequencyInfo.h"
22 #include "llvm/Analysis/BranchProbabilityInfo.h"
23 #include "llvm/Analysis/LoopInfo.h"
24 #include "llvm/Analysis/TargetLibraryInfo.h"
25 #include "llvm/IR/Attributes.h"
26 #include "llvm/IR/BasicBlock.h"
27 #include "llvm/IR/Constant.h"
28 #include "llvm/IR/Constants.h"
29 #include "llvm/IR/DIBuilder.h"
30 #include "llvm/IR/DerivedTypes.h"
31 #include "llvm/IR/DiagnosticInfo.h"
32 #include "llvm/IR/Dominators.h"
33 #include "llvm/IR/Function.h"
34 #include "llvm/IR/GlobalValue.h"
35 #include "llvm/IR/GlobalVariable.h"
36 #include "llvm/IR/IRBuilder.h"
37 #include "llvm/IR/Instruction.h"
38 #include "llvm/IR/Instructions.h"
39 #include "llvm/IR/IntrinsicInst.h"
40 #include "llvm/IR/Module.h"
41 #include "llvm/IR/Type.h"
42 #include "llvm/InitializePasses.h"
43 #include "llvm/Pass.h"
44 #include "llvm/ProfileData/InstrProf.h"
45 #include "llvm/ProfileData/InstrProfCorrelator.h"
46 #include "llvm/Support/Casting.h"
47 #include "llvm/Support/CommandLine.h"
48 #include "llvm/Support/Error.h"
49 #include "llvm/Support/ErrorHandling.h"
50 #include "llvm/Transforms/Utils/BasicBlockUtils.h"
51 #include "llvm/Transforms/Utils/ModuleUtils.h"
52 #include "llvm/Transforms/Utils/SSAUpdater.h"
53 #include <algorithm>
54 #include <cassert>
55 #include <cstddef>
56 #include <cstdint>
57 #include <string>
58 
59 using namespace llvm;
60 
61 #define DEBUG_TYPE "instrprof"
62 
63 namespace llvm {
64 cl::opt<bool>
65     DebugInfoCorrelate("debug-info-correlate", cl::ZeroOrMore,
66                        cl::desc("Use debug info to correlate profiles."),
67                        cl::init(false));
68 } // namespace llvm
69 
70 namespace {
71 
72 cl::opt<bool> DoHashBasedCounterSplit(
73     "hash-based-counter-split",
74     cl::desc("Rename counter variable of a comdat function based on cfg hash"),
75     cl::init(true));
76 
77 cl::opt<bool>
78     RuntimeCounterRelocation("runtime-counter-relocation",
79                              cl::desc("Enable relocating counters at runtime."),
80                              cl::init(false));
81 
82 cl::opt<bool> ValueProfileStaticAlloc(
83     "vp-static-alloc",
84     cl::desc("Do static counter allocation for value profiler"),
85     cl::init(true));
86 
87 cl::opt<double> NumCountersPerValueSite(
88     "vp-counters-per-site",
89     cl::desc("The average number of profile counters allocated "
90              "per value profiling site."),
91     // This is set to a very small value because in real programs, only
92     // a very small percentage of value sites have non-zero targets, e.g, 1/30.
93     // For those sites with non-zero profile, the average number of targets
94     // is usually smaller than 2.
95     cl::init(1.0));
96 
97 cl::opt<bool> AtomicCounterUpdateAll(
98     "instrprof-atomic-counter-update-all", cl::ZeroOrMore,
99     cl::desc("Make all profile counter updates atomic (for testing only)"),
100     cl::init(false));
101 
102 cl::opt<bool> AtomicCounterUpdatePromoted(
103     "atomic-counter-update-promoted", cl::ZeroOrMore,
104     cl::desc("Do counter update using atomic fetch add "
105              " for promoted counters only"),
106     cl::init(false));
107 
108 cl::opt<bool> AtomicFirstCounter(
109     "atomic-first-counter", cl::ZeroOrMore,
110     cl::desc("Use atomic fetch add for first counter in a function (usually "
111              "the entry counter)"),
112     cl::init(false));
113 
114 // If the option is not specified, the default behavior about whether
115 // counter promotion is done depends on how instrumentaiton lowering
116 // pipeline is setup, i.e., the default value of true of this option
117 // does not mean the promotion will be done by default. Explicitly
118 // setting this option can override the default behavior.
119 cl::opt<bool> DoCounterPromotion("do-counter-promotion", cl::ZeroOrMore,
120                                  cl::desc("Do counter register promotion"),
121                                  cl::init(false));
122 cl::opt<unsigned> MaxNumOfPromotionsPerLoop(
123     cl::ZeroOrMore, "max-counter-promotions-per-loop", cl::init(20),
124     cl::desc("Max number counter promotions per loop to avoid"
125              " increasing register pressure too much"));
126 
127 // A debug option
128 cl::opt<int>
129     MaxNumOfPromotions(cl::ZeroOrMore, "max-counter-promotions", cl::init(-1),
130                        cl::desc("Max number of allowed counter promotions"));
131 
132 cl::opt<unsigned> SpeculativeCounterPromotionMaxExiting(
133     cl::ZeroOrMore, "speculative-counter-promotion-max-exiting", cl::init(3),
134     cl::desc("The max number of exiting blocks of a loop to allow "
135              " speculative counter promotion"));
136 
137 cl::opt<bool> SpeculativeCounterPromotionToLoop(
138     cl::ZeroOrMore, "speculative-counter-promotion-to-loop", cl::init(false),
139     cl::desc("When the option is false, if the target block is in a loop, "
140              "the promotion will be disallowed unless the promoted counter "
141              " update can be further/iteratively promoted into an acyclic "
142              " region."));
143 
144 cl::opt<bool> IterativeCounterPromotion(
145     cl::ZeroOrMore, "iterative-counter-promotion", cl::init(true),
146     cl::desc("Allow counter promotion across the whole loop nest."));
147 
148 cl::opt<bool> SkipRetExitBlock(
149     cl::ZeroOrMore, "skip-ret-exit-block", cl::init(true),
150     cl::desc("Suppress counter promotion if exit blocks contain ret."));
151 
152 class InstrProfilingLegacyPass : public ModulePass {
153   InstrProfiling InstrProf;
154 
155 public:
156   static char ID;
157 
158   InstrProfilingLegacyPass() : ModulePass(ID) {}
159   InstrProfilingLegacyPass(const InstrProfOptions &Options, bool IsCS = false)
160       : ModulePass(ID), InstrProf(Options, IsCS) {
161     initializeInstrProfilingLegacyPassPass(*PassRegistry::getPassRegistry());
162   }
163 
164   StringRef getPassName() const override {
165     return "Frontend instrumentation-based coverage lowering";
166   }
167 
168   bool runOnModule(Module &M) override {
169     auto GetTLI = [this](Function &F) -> TargetLibraryInfo & {
170       return this->getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(F);
171     };
172     return InstrProf.run(M, GetTLI);
173   }
174 
175   void getAnalysisUsage(AnalysisUsage &AU) const override {
176     AU.setPreservesCFG();
177     AU.addRequired<TargetLibraryInfoWrapperPass>();
178   }
179 };
180 
181 ///
182 /// A helper class to promote one counter RMW operation in the loop
183 /// into register update.
184 ///
185 /// RWM update for the counter will be sinked out of the loop after
186 /// the transformation.
187 ///
188 class PGOCounterPromoterHelper : public LoadAndStorePromoter {
189 public:
190   PGOCounterPromoterHelper(
191       Instruction *L, Instruction *S, SSAUpdater &SSA, Value *Init,
192       BasicBlock *PH, ArrayRef<BasicBlock *> ExitBlocks,
193       ArrayRef<Instruction *> InsertPts,
194       DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCands,
195       LoopInfo &LI)
196       : LoadAndStorePromoter({L, S}, SSA), Store(S), ExitBlocks(ExitBlocks),
197         InsertPts(InsertPts), LoopToCandidates(LoopToCands), LI(LI) {
198     assert(isa<LoadInst>(L));
199     assert(isa<StoreInst>(S));
200     SSA.AddAvailableValue(PH, Init);
201   }
202 
203   void doExtraRewritesBeforeFinalDeletion() override {
204     for (unsigned i = 0, e = ExitBlocks.size(); i != e; ++i) {
205       BasicBlock *ExitBlock = ExitBlocks[i];
206       Instruction *InsertPos = InsertPts[i];
207       // Get LiveIn value into the ExitBlock. If there are multiple
208       // predecessors, the value is defined by a PHI node in this
209       // block.
210       Value *LiveInValue = SSA.GetValueInMiddleOfBlock(ExitBlock);
211       Value *Addr = cast<StoreInst>(Store)->getPointerOperand();
212       Type *Ty = LiveInValue->getType();
213       IRBuilder<> Builder(InsertPos);
214       if (AtomicCounterUpdatePromoted)
215         // automic update currently can only be promoted across the current
216         // loop, not the whole loop nest.
217         Builder.CreateAtomicRMW(AtomicRMWInst::Add, Addr, LiveInValue,
218                                 MaybeAlign(),
219                                 AtomicOrdering::SequentiallyConsistent);
220       else {
221         LoadInst *OldVal = Builder.CreateLoad(Ty, Addr, "pgocount.promoted");
222         auto *NewVal = Builder.CreateAdd(OldVal, LiveInValue);
223         auto *NewStore = Builder.CreateStore(NewVal, Addr);
224 
225         // Now update the parent loop's candidate list:
226         if (IterativeCounterPromotion) {
227           auto *TargetLoop = LI.getLoopFor(ExitBlock);
228           if (TargetLoop)
229             LoopToCandidates[TargetLoop].emplace_back(OldVal, NewStore);
230         }
231       }
232     }
233   }
234 
235 private:
236   Instruction *Store;
237   ArrayRef<BasicBlock *> ExitBlocks;
238   ArrayRef<Instruction *> InsertPts;
239   DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCandidates;
240   LoopInfo &LI;
241 };
242 
243 /// A helper class to do register promotion for all profile counter
244 /// updates in a loop.
245 ///
246 class PGOCounterPromoter {
247 public:
248   PGOCounterPromoter(
249       DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCands,
250       Loop &CurLoop, LoopInfo &LI, BlockFrequencyInfo *BFI)
251       : LoopToCandidates(LoopToCands), L(CurLoop), LI(LI), BFI(BFI) {
252 
253     // Skip collection of ExitBlocks and InsertPts for loops that will not be
254     // able to have counters promoted.
255     SmallVector<BasicBlock *, 8> LoopExitBlocks;
256     SmallPtrSet<BasicBlock *, 8> BlockSet;
257 
258     L.getExitBlocks(LoopExitBlocks);
259     if (!isPromotionPossible(&L, LoopExitBlocks))
260       return;
261 
262     for (BasicBlock *ExitBlock : LoopExitBlocks) {
263       if (BlockSet.insert(ExitBlock).second) {
264         ExitBlocks.push_back(ExitBlock);
265         InsertPts.push_back(&*ExitBlock->getFirstInsertionPt());
266       }
267     }
268   }
269 
270   bool run(int64_t *NumPromoted) {
271     // Skip 'infinite' loops:
272     if (ExitBlocks.size() == 0)
273       return false;
274 
275     // Skip if any of the ExitBlocks contains a ret instruction.
276     // This is to prevent dumping of incomplete profile -- if the
277     // the loop is a long running loop and dump is called in the middle
278     // of the loop, the result profile is incomplete.
279     // FIXME: add other heuristics to detect long running loops.
280     if (SkipRetExitBlock) {
281       for (auto BB : ExitBlocks)
282         if (isa<ReturnInst>(BB->getTerminator()))
283           return false;
284     }
285 
286     unsigned MaxProm = getMaxNumOfPromotionsInLoop(&L);
287     if (MaxProm == 0)
288       return false;
289 
290     unsigned Promoted = 0;
291     for (auto &Cand : LoopToCandidates[&L]) {
292 
293       SmallVector<PHINode *, 4> NewPHIs;
294       SSAUpdater SSA(&NewPHIs);
295       Value *InitVal = ConstantInt::get(Cand.first->getType(), 0);
296 
297       // If BFI is set, we will use it to guide the promotions.
298       if (BFI) {
299         auto *BB = Cand.first->getParent();
300         auto InstrCount = BFI->getBlockProfileCount(BB);
301         if (!InstrCount)
302           continue;
303         auto PreheaderCount = BFI->getBlockProfileCount(L.getLoopPreheader());
304         // If the average loop trip count is not greater than 1.5, we skip
305         // promotion.
306         if (PreheaderCount &&
307             (PreheaderCount.getValue() * 3) >= (InstrCount.getValue() * 2))
308           continue;
309       }
310 
311       PGOCounterPromoterHelper Promoter(Cand.first, Cand.second, SSA, InitVal,
312                                         L.getLoopPreheader(), ExitBlocks,
313                                         InsertPts, LoopToCandidates, LI);
314       Promoter.run(SmallVector<Instruction *, 2>({Cand.first, Cand.second}));
315       Promoted++;
316       if (Promoted >= MaxProm)
317         break;
318 
319       (*NumPromoted)++;
320       if (MaxNumOfPromotions != -1 && *NumPromoted >= MaxNumOfPromotions)
321         break;
322     }
323 
324     LLVM_DEBUG(dbgs() << Promoted << " counters promoted for loop (depth="
325                       << L.getLoopDepth() << ")\n");
326     return Promoted != 0;
327   }
328 
329 private:
330   bool allowSpeculativeCounterPromotion(Loop *LP) {
331     SmallVector<BasicBlock *, 8> ExitingBlocks;
332     L.getExitingBlocks(ExitingBlocks);
333     // Not considierered speculative.
334     if (ExitingBlocks.size() == 1)
335       return true;
336     if (ExitingBlocks.size() > SpeculativeCounterPromotionMaxExiting)
337       return false;
338     return true;
339   }
340 
341   // Check whether the loop satisfies the basic conditions needed to perform
342   // Counter Promotions.
343   bool
344   isPromotionPossible(Loop *LP,
345                       const SmallVectorImpl<BasicBlock *> &LoopExitBlocks) {
346     // We can't insert into a catchswitch.
347     if (llvm::any_of(LoopExitBlocks, [](BasicBlock *Exit) {
348           return isa<CatchSwitchInst>(Exit->getTerminator());
349         }))
350       return false;
351 
352     if (!LP->hasDedicatedExits())
353       return false;
354 
355     BasicBlock *PH = LP->getLoopPreheader();
356     if (!PH)
357       return false;
358 
359     return true;
360   }
361 
362   // Returns the max number of Counter Promotions for LP.
363   unsigned getMaxNumOfPromotionsInLoop(Loop *LP) {
364     SmallVector<BasicBlock *, 8> LoopExitBlocks;
365     LP->getExitBlocks(LoopExitBlocks);
366     if (!isPromotionPossible(LP, LoopExitBlocks))
367       return 0;
368 
369     SmallVector<BasicBlock *, 8> ExitingBlocks;
370     LP->getExitingBlocks(ExitingBlocks);
371 
372     // If BFI is set, we do more aggressive promotions based on BFI.
373     if (BFI)
374       return (unsigned)-1;
375 
376     // Not considierered speculative.
377     if (ExitingBlocks.size() == 1)
378       return MaxNumOfPromotionsPerLoop;
379 
380     if (ExitingBlocks.size() > SpeculativeCounterPromotionMaxExiting)
381       return 0;
382 
383     // Whether the target block is in a loop does not matter:
384     if (SpeculativeCounterPromotionToLoop)
385       return MaxNumOfPromotionsPerLoop;
386 
387     // Now check the target block:
388     unsigned MaxProm = MaxNumOfPromotionsPerLoop;
389     for (auto *TargetBlock : LoopExitBlocks) {
390       auto *TargetLoop = LI.getLoopFor(TargetBlock);
391       if (!TargetLoop)
392         continue;
393       unsigned MaxPromForTarget = getMaxNumOfPromotionsInLoop(TargetLoop);
394       unsigned PendingCandsInTarget = LoopToCandidates[TargetLoop].size();
395       MaxProm =
396           std::min(MaxProm, std::max(MaxPromForTarget, PendingCandsInTarget) -
397                                 PendingCandsInTarget);
398     }
399     return MaxProm;
400   }
401 
402   DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCandidates;
403   SmallVector<BasicBlock *, 8> ExitBlocks;
404   SmallVector<Instruction *, 8> InsertPts;
405   Loop &L;
406   LoopInfo &LI;
407   BlockFrequencyInfo *BFI;
408 };
409 
410 enum class ValueProfilingCallType {
411   // Individual values are tracked. Currently used for indiret call target
412   // profiling.
413   Default,
414 
415   // MemOp: the memop size value profiling.
416   MemOp
417 };
418 
419 } // end anonymous namespace
420 
421 PreservedAnalyses InstrProfiling::run(Module &M, ModuleAnalysisManager &AM) {
422   FunctionAnalysisManager &FAM =
423       AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
424   auto GetTLI = [&FAM](Function &F) -> TargetLibraryInfo & {
425     return FAM.getResult<TargetLibraryAnalysis>(F);
426   };
427   if (!run(M, GetTLI))
428     return PreservedAnalyses::all();
429 
430   return PreservedAnalyses::none();
431 }
432 
433 char InstrProfilingLegacyPass::ID = 0;
434 INITIALIZE_PASS_BEGIN(InstrProfilingLegacyPass, "instrprof",
435                       "Frontend instrumentation-based coverage lowering.",
436                       false, false)
437 INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass)
438 INITIALIZE_PASS_END(InstrProfilingLegacyPass, "instrprof",
439                     "Frontend instrumentation-based coverage lowering.", false,
440                     false)
441 
442 ModulePass *
443 llvm::createInstrProfilingLegacyPass(const InstrProfOptions &Options,
444                                      bool IsCS) {
445   return new InstrProfilingLegacyPass(Options, IsCS);
446 }
447 
448 bool InstrProfiling::lowerIntrinsics(Function *F) {
449   bool MadeChange = false;
450   PromotionCandidates.clear();
451   for (BasicBlock &BB : *F) {
452     for (Instruction &Instr : llvm::make_early_inc_range(BB)) {
453       if (auto *IPIS = dyn_cast<InstrProfIncrementInstStep>(&Instr)) {
454         lowerIncrement(IPIS);
455         MadeChange = true;
456       } else if (auto *IPI = dyn_cast<InstrProfIncrementInst>(&Instr)) {
457         lowerIncrement(IPI);
458         MadeChange = true;
459       } else if (auto *IPC = dyn_cast<InstrProfCoverInst>(&Instr)) {
460         lowerCover(IPC);
461         MadeChange = true;
462       } else if (auto *IPVP = dyn_cast<InstrProfValueProfileInst>(&Instr)) {
463         lowerValueProfileInst(IPVP);
464         MadeChange = true;
465       }
466     }
467   }
468 
469   if (!MadeChange)
470     return false;
471 
472   promoteCounterLoadStores(F);
473   return true;
474 }
475 
476 bool InstrProfiling::isRuntimeCounterRelocationEnabled() const {
477   // Mach-O don't support weak external references.
478   if (TT.isOSBinFormatMachO())
479     return false;
480 
481   if (RuntimeCounterRelocation.getNumOccurrences() > 0)
482     return RuntimeCounterRelocation;
483 
484   // Fuchsia uses runtime counter relocation by default.
485   return TT.isOSFuchsia();
486 }
487 
488 bool InstrProfiling::isCounterPromotionEnabled() const {
489   if (DoCounterPromotion.getNumOccurrences() > 0)
490     return DoCounterPromotion;
491 
492   return Options.DoCounterPromotion;
493 }
494 
495 void InstrProfiling::promoteCounterLoadStores(Function *F) {
496   if (!isCounterPromotionEnabled())
497     return;
498 
499   DominatorTree DT(*F);
500   LoopInfo LI(DT);
501   DenseMap<Loop *, SmallVector<LoadStorePair, 8>> LoopPromotionCandidates;
502 
503   std::unique_ptr<BlockFrequencyInfo> BFI;
504   if (Options.UseBFIInPromotion) {
505     std::unique_ptr<BranchProbabilityInfo> BPI;
506     BPI.reset(new BranchProbabilityInfo(*F, LI, &GetTLI(*F)));
507     BFI.reset(new BlockFrequencyInfo(*F, *BPI, LI));
508   }
509 
510   for (const auto &LoadStore : PromotionCandidates) {
511     auto *CounterLoad = LoadStore.first;
512     auto *CounterStore = LoadStore.second;
513     BasicBlock *BB = CounterLoad->getParent();
514     Loop *ParentLoop = LI.getLoopFor(BB);
515     if (!ParentLoop)
516       continue;
517     LoopPromotionCandidates[ParentLoop].emplace_back(CounterLoad, CounterStore);
518   }
519 
520   SmallVector<Loop *, 4> Loops = LI.getLoopsInPreorder();
521 
522   // Do a post-order traversal of the loops so that counter updates can be
523   // iteratively hoisted outside the loop nest.
524   for (auto *Loop : llvm::reverse(Loops)) {
525     PGOCounterPromoter Promoter(LoopPromotionCandidates, *Loop, LI, BFI.get());
526     Promoter.run(&TotalCountersPromoted);
527   }
528 }
529 
530 static bool needsRuntimeHookUnconditionally(const Triple &TT) {
531   // On Fuchsia, we only need runtime hook if any counters are present.
532   if (TT.isOSFuchsia())
533     return false;
534 
535   return true;
536 }
537 
538 /// Check if the module contains uses of any profiling intrinsics.
539 static bool containsProfilingIntrinsics(Module &M) {
540   auto containsIntrinsic = [&](int ID) {
541     if (auto *F = M.getFunction(Intrinsic::getName(ID)))
542       return !F->use_empty();
543     return false;
544   };
545   return containsIntrinsic(llvm::Intrinsic::instrprof_cover) ||
546          containsIntrinsic(llvm::Intrinsic::instrprof_increment) ||
547          containsIntrinsic(llvm::Intrinsic::instrprof_increment_step) ||
548          containsIntrinsic(llvm::Intrinsic::instrprof_value_profile);
549 }
550 
551 bool InstrProfiling::run(
552     Module &M, std::function<const TargetLibraryInfo &(Function &F)> GetTLI) {
553   this->M = &M;
554   this->GetTLI = std::move(GetTLI);
555   NamesVar = nullptr;
556   NamesSize = 0;
557   ProfileDataMap.clear();
558   CompilerUsedVars.clear();
559   UsedVars.clear();
560   TT = Triple(M.getTargetTriple());
561 
562   bool MadeChange = false;
563 
564   // Emit the runtime hook even if no counters are present.
565   if (needsRuntimeHookUnconditionally(TT))
566     MadeChange = emitRuntimeHook();
567 
568   // Improve compile time by avoiding linear scans when there is no work.
569   GlobalVariable *CoverageNamesVar =
570       M.getNamedGlobal(getCoverageUnusedNamesVarName());
571   if (!containsProfilingIntrinsics(M) && !CoverageNamesVar)
572     return MadeChange;
573 
574   // We did not know how many value sites there would be inside
575   // the instrumented function. This is counting the number of instrumented
576   // target value sites to enter it as field in the profile data variable.
577   for (Function &F : M) {
578     InstrProfIncrementInst *FirstProfIncInst = nullptr;
579     for (BasicBlock &BB : F)
580       for (auto I = BB.begin(), E = BB.end(); I != E; I++)
581         if (auto *Ind = dyn_cast<InstrProfValueProfileInst>(I))
582           computeNumValueSiteCounts(Ind);
583         else if (FirstProfIncInst == nullptr)
584           FirstProfIncInst = dyn_cast<InstrProfIncrementInst>(I);
585 
586     // Value profiling intrinsic lowering requires per-function profile data
587     // variable to be created first.
588     if (FirstProfIncInst != nullptr)
589       static_cast<void>(getOrCreateRegionCounters(FirstProfIncInst));
590   }
591 
592   for (Function &F : M)
593     MadeChange |= lowerIntrinsics(&F);
594 
595   if (CoverageNamesVar) {
596     lowerCoverageData(CoverageNamesVar);
597     MadeChange = true;
598   }
599 
600   if (!MadeChange)
601     return false;
602 
603   emitVNodes();
604   emitNameData();
605   emitRuntimeHook();
606   emitRegistration();
607   emitUses();
608   emitInitialization();
609   return true;
610 }
611 
612 static FunctionCallee getOrInsertValueProfilingCall(
613     Module &M, const TargetLibraryInfo &TLI,
614     ValueProfilingCallType CallType = ValueProfilingCallType::Default) {
615   LLVMContext &Ctx = M.getContext();
616   auto *ReturnTy = Type::getVoidTy(M.getContext());
617 
618   AttributeList AL;
619   if (auto AK = TLI.getExtAttrForI32Param(false))
620     AL = AL.addParamAttribute(M.getContext(), 2, AK);
621 
622   assert((CallType == ValueProfilingCallType::Default ||
623           CallType == ValueProfilingCallType::MemOp) &&
624          "Must be Default or MemOp");
625   Type *ParamTypes[] = {
626 #define VALUE_PROF_FUNC_PARAM(ParamType, ParamName, ParamLLVMType) ParamLLVMType
627 #include "llvm/ProfileData/InstrProfData.inc"
628   };
629   auto *ValueProfilingCallTy =
630       FunctionType::get(ReturnTy, makeArrayRef(ParamTypes), false);
631   StringRef FuncName = CallType == ValueProfilingCallType::Default
632                            ? getInstrProfValueProfFuncName()
633                            : getInstrProfValueProfMemOpFuncName();
634   return M.getOrInsertFunction(FuncName, ValueProfilingCallTy, AL);
635 }
636 
637 void InstrProfiling::computeNumValueSiteCounts(InstrProfValueProfileInst *Ind) {
638   GlobalVariable *Name = Ind->getName();
639   uint64_t ValueKind = Ind->getValueKind()->getZExtValue();
640   uint64_t Index = Ind->getIndex()->getZExtValue();
641   auto &PD = ProfileDataMap[Name];
642   PD.NumValueSites[ValueKind] =
643       std::max(PD.NumValueSites[ValueKind], (uint32_t)(Index + 1));
644 }
645 
646 void InstrProfiling::lowerValueProfileInst(InstrProfValueProfileInst *Ind) {
647   // TODO: Value profiling heavily depends on the data section which is omitted
648   // in lightweight mode. We need to move the value profile pointer to the
649   // Counter struct to get this working.
650   assert(
651       !DebugInfoCorrelate &&
652       "Value profiling is not yet supported with lightweight instrumentation");
653   GlobalVariable *Name = Ind->getName();
654   auto It = ProfileDataMap.find(Name);
655   assert(It != ProfileDataMap.end() && It->second.DataVar &&
656          "value profiling detected in function with no counter incerement");
657 
658   GlobalVariable *DataVar = It->second.DataVar;
659   uint64_t ValueKind = Ind->getValueKind()->getZExtValue();
660   uint64_t Index = Ind->getIndex()->getZExtValue();
661   for (uint32_t Kind = IPVK_First; Kind < ValueKind; ++Kind)
662     Index += It->second.NumValueSites[Kind];
663 
664   IRBuilder<> Builder(Ind);
665   bool IsMemOpSize = (Ind->getValueKind()->getZExtValue() ==
666                       llvm::InstrProfValueKind::IPVK_MemOPSize);
667   CallInst *Call = nullptr;
668   auto *TLI = &GetTLI(*Ind->getFunction());
669 
670   // To support value profiling calls within Windows exception handlers, funclet
671   // information contained within operand bundles needs to be copied over to
672   // the library call. This is required for the IR to be processed by the
673   // WinEHPrepare pass.
674   SmallVector<OperandBundleDef, 1> OpBundles;
675   Ind->getOperandBundlesAsDefs(OpBundles);
676   if (!IsMemOpSize) {
677     Value *Args[3] = {Ind->getTargetValue(),
678                       Builder.CreateBitCast(DataVar, Builder.getInt8PtrTy()),
679                       Builder.getInt32(Index)};
680     Call = Builder.CreateCall(getOrInsertValueProfilingCall(*M, *TLI), Args,
681                               OpBundles);
682   } else {
683     Value *Args[3] = {Ind->getTargetValue(),
684                       Builder.CreateBitCast(DataVar, Builder.getInt8PtrTy()),
685                       Builder.getInt32(Index)};
686     Call = Builder.CreateCall(
687         getOrInsertValueProfilingCall(*M, *TLI, ValueProfilingCallType::MemOp),
688         Args, OpBundles);
689   }
690   if (auto AK = TLI->getExtAttrForI32Param(false))
691     Call->addParamAttr(2, AK);
692   Ind->replaceAllUsesWith(Call);
693   Ind->eraseFromParent();
694 }
695 
696 Value *InstrProfiling::getCounterAddress(InstrProfInstBase *I) {
697   auto *Counters = getOrCreateRegionCounters(I);
698   IRBuilder<> Builder(I);
699 
700   auto *Addr = Builder.CreateConstInBoundsGEP2_32(
701       Counters->getValueType(), Counters, 0, I->getIndex()->getZExtValue());
702 
703   if (!isRuntimeCounterRelocationEnabled())
704     return Addr;
705 
706   Type *Int64Ty = Type::getInt64Ty(M->getContext());
707   Function *Fn = I->getParent()->getParent();
708   Instruction &EntryI = Fn->getEntryBlock().front();
709   LoadInst *LI = dyn_cast<LoadInst>(&EntryI);
710   if (!LI) {
711     IRBuilder<> EntryBuilder(&EntryI);
712     auto *Bias = M->getGlobalVariable(getInstrProfCounterBiasVarName());
713     if (!Bias) {
714       // Compiler must define this variable when runtime counter relocation
715       // is being used. Runtime has a weak external reference that is used
716       // to check whether that's the case or not.
717       Bias = new GlobalVariable(
718           *M, Int64Ty, false, GlobalValue::LinkOnceODRLinkage,
719           Constant::getNullValue(Int64Ty), getInstrProfCounterBiasVarName());
720       Bias->setVisibility(GlobalVariable::HiddenVisibility);
721       // A definition that's weak (linkonce_odr) without being in a COMDAT
722       // section wouldn't lead to link errors, but it would lead to a dead
723       // data word from every TU but one. Putting it in COMDAT ensures there
724       // will be exactly one data slot in the link.
725       if (TT.supportsCOMDAT())
726         Bias->setComdat(M->getOrInsertComdat(Bias->getName()));
727     }
728     LI = EntryBuilder.CreateLoad(Int64Ty, Bias);
729   }
730   auto *Add = Builder.CreateAdd(Builder.CreatePtrToInt(Addr, Int64Ty), LI);
731   return Builder.CreateIntToPtr(Add, Addr->getType());
732 }
733 
734 void InstrProfiling::lowerCover(InstrProfCoverInst *CoverInstruction) {
735   auto *Addr = getCounterAddress(CoverInstruction);
736   IRBuilder<> Builder(CoverInstruction);
737   // We store zero to represent that this block is covered.
738   Builder.CreateStore(Builder.getInt8(0), Addr);
739   CoverInstruction->eraseFromParent();
740 }
741 
742 void InstrProfiling::lowerIncrement(InstrProfIncrementInst *Inc) {
743   auto *Addr = getCounterAddress(Inc);
744 
745   IRBuilder<> Builder(Inc);
746   if (Options.Atomic || AtomicCounterUpdateAll ||
747       (Inc->getIndex()->isZeroValue() && AtomicFirstCounter)) {
748     Builder.CreateAtomicRMW(AtomicRMWInst::Add, Addr, Inc->getStep(),
749                             MaybeAlign(), AtomicOrdering::Monotonic);
750   } else {
751     Value *IncStep = Inc->getStep();
752     Value *Load = Builder.CreateLoad(IncStep->getType(), Addr, "pgocount");
753     auto *Count = Builder.CreateAdd(Load, Inc->getStep());
754     auto *Store = Builder.CreateStore(Count, Addr);
755     if (isCounterPromotionEnabled())
756       PromotionCandidates.emplace_back(cast<Instruction>(Load), Store);
757   }
758   Inc->eraseFromParent();
759 }
760 
761 void InstrProfiling::lowerCoverageData(GlobalVariable *CoverageNamesVar) {
762   ConstantArray *Names =
763       cast<ConstantArray>(CoverageNamesVar->getInitializer());
764   for (unsigned I = 0, E = Names->getNumOperands(); I < E; ++I) {
765     Constant *NC = Names->getOperand(I);
766     Value *V = NC->stripPointerCasts();
767     assert(isa<GlobalVariable>(V) && "Missing reference to function name");
768     GlobalVariable *Name = cast<GlobalVariable>(V);
769 
770     Name->setLinkage(GlobalValue::PrivateLinkage);
771     ReferencedNames.push_back(Name);
772     NC->dropAllReferences();
773   }
774   CoverageNamesVar->eraseFromParent();
775 }
776 
777 /// Get the name of a profiling variable for a particular function.
778 static std::string getVarName(InstrProfInstBase *Inc, StringRef Prefix,
779                               bool &Renamed) {
780   StringRef NamePrefix = getInstrProfNameVarPrefix();
781   StringRef Name = Inc->getName()->getName().substr(NamePrefix.size());
782   Function *F = Inc->getParent()->getParent();
783   Module *M = F->getParent();
784   if (!DoHashBasedCounterSplit || !isIRPGOFlagSet(M) ||
785       !canRenameComdatFunc(*F)) {
786     Renamed = false;
787     return (Prefix + Name).str();
788   }
789   Renamed = true;
790   uint64_t FuncHash = Inc->getHash()->getZExtValue();
791   SmallVector<char, 24> HashPostfix;
792   if (Name.endswith((Twine(".") + Twine(FuncHash)).toStringRef(HashPostfix)))
793     return (Prefix + Name).str();
794   return (Prefix + Name + "." + Twine(FuncHash)).str();
795 }
796 
797 static uint64_t getIntModuleFlagOrZero(const Module &M, StringRef Flag) {
798   auto *MD = dyn_cast_or_null<ConstantAsMetadata>(M.getModuleFlag(Flag));
799   if (!MD)
800     return 0;
801 
802   // If the flag is a ConstantAsMetadata, it should be an integer representable
803   // in 64-bits.
804   return cast<ConstantInt>(MD->getValue())->getZExtValue();
805 }
806 
807 static bool enablesValueProfiling(const Module &M) {
808   return isIRPGOFlagSet(&M) ||
809          getIntModuleFlagOrZero(M, "EnableValueProfiling") != 0;
810 }
811 
812 // Conservatively returns true if data variables may be referenced by code.
813 static bool profDataReferencedByCode(const Module &M) {
814   return enablesValueProfiling(M);
815 }
816 
817 static inline bool shouldRecordFunctionAddr(Function *F) {
818   // Only record function addresses if IR PGO is enabled or if clang value
819   // profiling is enabled. Recording function addresses greatly increases object
820   // file size, because it prevents the inliner from deleting functions that
821   // have been inlined everywhere.
822   if (!profDataReferencedByCode(*F->getParent()))
823     return false;
824 
825   // Check the linkage
826   bool HasAvailableExternallyLinkage = F->hasAvailableExternallyLinkage();
827   if (!F->hasLinkOnceLinkage() && !F->hasLocalLinkage() &&
828       !HasAvailableExternallyLinkage)
829     return true;
830 
831   // A function marked 'alwaysinline' with available_externally linkage can't
832   // have its address taken. Doing so would create an undefined external ref to
833   // the function, which would fail to link.
834   if (HasAvailableExternallyLinkage &&
835       F->hasFnAttribute(Attribute::AlwaysInline))
836     return false;
837 
838   // Prohibit function address recording if the function is both internal and
839   // COMDAT. This avoids the profile data variable referencing internal symbols
840   // in COMDAT.
841   if (F->hasLocalLinkage() && F->hasComdat())
842     return false;
843 
844   // Check uses of this function for other than direct calls or invokes to it.
845   // Inline virtual functions have linkeOnceODR linkage. When a key method
846   // exists, the vtable will only be emitted in the TU where the key method
847   // is defined. In a TU where vtable is not available, the function won't
848   // be 'addresstaken'. If its address is not recorded here, the profile data
849   // with missing address may be picked by the linker leading  to missing
850   // indirect call target info.
851   return F->hasAddressTaken() || F->hasLinkOnceLinkage();
852 }
853 
854 static bool needsRuntimeRegistrationOfSectionRange(const Triple &TT) {
855   // Don't do this for Darwin.  compiler-rt uses linker magic.
856   if (TT.isOSDarwin())
857     return false;
858   // Use linker script magic to get data/cnts/name start/end.
859   if (TT.isOSLinux() || TT.isOSFreeBSD() || TT.isOSNetBSD() ||
860       TT.isOSSolaris() || TT.isOSFuchsia() || TT.isPS4CPU() || TT.isOSWindows())
861     return false;
862 
863   return true;
864 }
865 
866 GlobalVariable *
867 InstrProfiling::createRegionCounters(InstrProfInstBase *Inc, StringRef Name,
868                                      GlobalValue::LinkageTypes Linkage) {
869   uint64_t NumCounters = Inc->getNumCounters()->getZExtValue();
870   auto &Ctx = M->getContext();
871   GlobalVariable *GV;
872   if (isa<InstrProfCoverInst>(Inc)) {
873     auto *CounterTy = Type::getInt8Ty(Ctx);
874     auto *CounterArrTy = ArrayType::get(CounterTy, NumCounters);
875     // TODO: `Constant::getAllOnesValue()` does not yet accept an array type.
876     std::vector<Constant *> InitialValues(NumCounters,
877                                           Constant::getAllOnesValue(CounterTy));
878     GV = new GlobalVariable(*M, CounterArrTy, false, Linkage,
879                             ConstantArray::get(CounterArrTy, InitialValues),
880                             Name);
881     GV->setAlignment(Align(1));
882   } else {
883     auto *CounterTy = ArrayType::get(Type::getInt64Ty(Ctx), NumCounters);
884     GV = new GlobalVariable(*M, CounterTy, false, Linkage,
885                             Constant::getNullValue(CounterTy), Name);
886     GV->setAlignment(Align(8));
887   }
888   return GV;
889 }
890 
891 GlobalVariable *
892 InstrProfiling::getOrCreateRegionCounters(InstrProfInstBase *Inc) {
893   GlobalVariable *NamePtr = Inc->getName();
894   auto &PD = ProfileDataMap[NamePtr];
895   if (PD.RegionCounters)
896     return PD.RegionCounters;
897 
898   // Match the linkage and visibility of the name global.
899   Function *Fn = Inc->getParent()->getParent();
900   GlobalValue::LinkageTypes Linkage = NamePtr->getLinkage();
901   GlobalValue::VisibilityTypes Visibility = NamePtr->getVisibility();
902 
903   // Use internal rather than private linkage so the counter variable shows up
904   // in the symbol table when using debug info for correlation.
905   if (DebugInfoCorrelate && TT.isOSBinFormatMachO() &&
906       Linkage == GlobalValue::PrivateLinkage)
907     Linkage = GlobalValue::InternalLinkage;
908 
909   // Due to the limitation of binder as of 2021/09/28, the duplicate weak
910   // symbols in the same csect won't be discarded. When there are duplicate weak
911   // symbols, we can NOT guarantee that the relocations get resolved to the
912   // intended weak symbol, so we can not ensure the correctness of the relative
913   // CounterPtr, so we have to use private linkage for counter and data symbols.
914   if (TT.isOSBinFormatXCOFF()) {
915     Linkage = GlobalValue::PrivateLinkage;
916     Visibility = GlobalValue::DefaultVisibility;
917   }
918   // Move the name variable to the right section. Place them in a COMDAT group
919   // if the associated function is a COMDAT. This will make sure that only one
920   // copy of counters of the COMDAT function will be emitted after linking. Keep
921   // in mind that this pass may run before the inliner, so we need to create a
922   // new comdat group for the counters and profiling data. If we use the comdat
923   // of the parent function, that will result in relocations against discarded
924   // sections.
925   //
926   // If the data variable is referenced by code,  counters and data have to be
927   // in different comdats for COFF because the Visual C++ linker will report
928   // duplicate symbol errors if there are multiple external symbols with the
929   // same name marked IMAGE_COMDAT_SELECT_ASSOCIATIVE.
930   //
931   // For ELF, when not using COMDAT, put counters, data and values into a
932   // nodeduplicate COMDAT which is lowered to a zero-flag section group. This
933   // allows -z start-stop-gc to discard the entire group when the function is
934   // discarded.
935   bool DataReferencedByCode = profDataReferencedByCode(*M);
936   bool NeedComdat = needsComdatForCounter(*Fn, *M);
937   bool Renamed;
938   std::string CntsVarName =
939       getVarName(Inc, getInstrProfCountersVarPrefix(), Renamed);
940   std::string DataVarName =
941       getVarName(Inc, getInstrProfDataVarPrefix(), Renamed);
942   auto MaybeSetComdat = [&](GlobalVariable *GV) {
943     bool UseComdat = (NeedComdat || TT.isOSBinFormatELF());
944     if (UseComdat) {
945       StringRef GroupName = TT.isOSBinFormatCOFF() && DataReferencedByCode
946                                 ? GV->getName()
947                                 : CntsVarName;
948       Comdat *C = M->getOrInsertComdat(GroupName);
949       if (!NeedComdat)
950         C->setSelectionKind(Comdat::NoDeduplicate);
951       GV->setComdat(C);
952     }
953   };
954 
955   uint64_t NumCounters = Inc->getNumCounters()->getZExtValue();
956   LLVMContext &Ctx = M->getContext();
957 
958   auto *CounterPtr = createRegionCounters(Inc, CntsVarName, Linkage);
959   CounterPtr->setVisibility(Visibility);
960   CounterPtr->setSection(
961       getInstrProfSectionName(IPSK_cnts, TT.getObjectFormat()));
962   MaybeSetComdat(CounterPtr);
963   CounterPtr->setLinkage(Linkage);
964   PD.RegionCounters = CounterPtr;
965   if (DebugInfoCorrelate) {
966     if (auto *SP = Fn->getSubprogram()) {
967       DIBuilder DB(*M, true, SP->getUnit());
968       Metadata *FunctionNameAnnotation[] = {
969           MDString::get(Ctx, InstrProfCorrelator::FunctionNameAttributeName),
970           MDString::get(Ctx, getPGOFuncNameVarInitializer(NamePtr)),
971       };
972       Metadata *CFGHashAnnotation[] = {
973           MDString::get(Ctx, InstrProfCorrelator::CFGHashAttributeName),
974           ConstantAsMetadata::get(Inc->getHash()),
975       };
976       Metadata *NumCountersAnnotation[] = {
977           MDString::get(Ctx, InstrProfCorrelator::NumCountersAttributeName),
978           ConstantAsMetadata::get(Inc->getNumCounters()),
979       };
980       auto Annotations = DB.getOrCreateArray({
981           MDNode::get(Ctx, FunctionNameAnnotation),
982           MDNode::get(Ctx, CFGHashAnnotation),
983           MDNode::get(Ctx, NumCountersAnnotation),
984       });
985       auto *DICounter = DB.createGlobalVariableExpression(
986           SP, CounterPtr->getName(), /*LinkageName=*/StringRef(), SP->getFile(),
987           /*LineNo=*/0, DB.createUnspecifiedType("Profile Data Type"),
988           CounterPtr->hasLocalLinkage(), /*IsDefined=*/true, /*Expr=*/nullptr,
989           /*Decl=*/nullptr, /*TemplateParams=*/nullptr, /*AlignInBits=*/0,
990           Annotations);
991       CounterPtr->addDebugInfo(DICounter);
992       DB.finalize();
993     } else {
994       std::string Msg = ("Missing debug info for function " + Fn->getName() +
995                          "; required for profile correlation.")
996                             .str();
997       Ctx.diagnose(
998           DiagnosticInfoPGOProfile(M->getName().data(), Msg, DS_Warning));
999     }
1000   }
1001 
1002   auto *Int8PtrTy = Type::getInt8PtrTy(Ctx);
1003   // Allocate statically the array of pointers to value profile nodes for
1004   // the current function.
1005   Constant *ValuesPtrExpr = ConstantPointerNull::get(Int8PtrTy);
1006   uint64_t NS = 0;
1007   for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind)
1008     NS += PD.NumValueSites[Kind];
1009   if (NS > 0 && ValueProfileStaticAlloc &&
1010       !needsRuntimeRegistrationOfSectionRange(TT)) {
1011     ArrayType *ValuesTy = ArrayType::get(Type::getInt64Ty(Ctx), NS);
1012     auto *ValuesVar = new GlobalVariable(
1013         *M, ValuesTy, false, Linkage, Constant::getNullValue(ValuesTy),
1014         getVarName(Inc, getInstrProfValuesVarPrefix(), Renamed));
1015     ValuesVar->setVisibility(Visibility);
1016     ValuesVar->setSection(
1017         getInstrProfSectionName(IPSK_vals, TT.getObjectFormat()));
1018     ValuesVar->setAlignment(Align(8));
1019     MaybeSetComdat(ValuesVar);
1020     ValuesPtrExpr =
1021         ConstantExpr::getBitCast(ValuesVar, Type::getInt8PtrTy(Ctx));
1022   }
1023 
1024   if (DebugInfoCorrelate) {
1025     // Mark the counter variable as used so that it isn't optimized out.
1026     CompilerUsedVars.push_back(PD.RegionCounters);
1027     return PD.RegionCounters;
1028   }
1029 
1030   // Create data variable.
1031   auto *IntPtrTy = M->getDataLayout().getIntPtrType(M->getContext());
1032   auto *Int16Ty = Type::getInt16Ty(Ctx);
1033   auto *Int16ArrayTy = ArrayType::get(Int16Ty, IPVK_Last + 1);
1034   Type *DataTypes[] = {
1035 #define INSTR_PROF_DATA(Type, LLVMType, Name, Init) LLVMType,
1036 #include "llvm/ProfileData/InstrProfData.inc"
1037   };
1038   auto *DataTy = StructType::get(Ctx, makeArrayRef(DataTypes));
1039 
1040   Constant *FunctionAddr = shouldRecordFunctionAddr(Fn)
1041                                ? ConstantExpr::getBitCast(Fn, Int8PtrTy)
1042                                : ConstantPointerNull::get(Int8PtrTy);
1043 
1044   Constant *Int16ArrayVals[IPVK_Last + 1];
1045   for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind)
1046     Int16ArrayVals[Kind] = ConstantInt::get(Int16Ty, PD.NumValueSites[Kind]);
1047 
1048   // If the data variable is not referenced by code (if we don't emit
1049   // @llvm.instrprof.value.profile, NS will be 0), and the counter keeps the
1050   // data variable live under linker GC, the data variable can be private. This
1051   // optimization applies to ELF.
1052   //
1053   // On COFF, a comdat leader cannot be local so we require DataReferencedByCode
1054   // to be false.
1055   //
1056   // If profd is in a deduplicate comdat, NS==0 with a hash suffix guarantees
1057   // that other copies must have the same CFG and cannot have value profiling.
1058   // If no hash suffix, other profd copies may be referenced by code.
1059   if (NS == 0 && !(DataReferencedByCode && NeedComdat && !Renamed) &&
1060       (TT.isOSBinFormatELF() ||
1061        (!DataReferencedByCode && TT.isOSBinFormatCOFF()))) {
1062     Linkage = GlobalValue::PrivateLinkage;
1063     Visibility = GlobalValue::DefaultVisibility;
1064   }
1065   auto *Data =
1066       new GlobalVariable(*M, DataTy, false, Linkage, nullptr, DataVarName);
1067   // Reference the counter variable with a label difference (link-time
1068   // constant).
1069   auto *RelativeCounterPtr =
1070       ConstantExpr::getSub(ConstantExpr::getPtrToInt(CounterPtr, IntPtrTy),
1071                            ConstantExpr::getPtrToInt(Data, IntPtrTy));
1072 
1073   Constant *DataVals[] = {
1074 #define INSTR_PROF_DATA(Type, LLVMType, Name, Init) Init,
1075 #include "llvm/ProfileData/InstrProfData.inc"
1076   };
1077   Data->setInitializer(ConstantStruct::get(DataTy, DataVals));
1078 
1079   Data->setVisibility(Visibility);
1080   Data->setSection(getInstrProfSectionName(IPSK_data, TT.getObjectFormat()));
1081   Data->setAlignment(Align(INSTR_PROF_DATA_ALIGNMENT));
1082   MaybeSetComdat(Data);
1083   Data->setLinkage(Linkage);
1084 
1085   PD.DataVar = Data;
1086 
1087   // Mark the data variable as used so that it isn't stripped out.
1088   CompilerUsedVars.push_back(Data);
1089   // Now that the linkage set by the FE has been passed to the data and counter
1090   // variables, reset Name variable's linkage and visibility to private so that
1091   // it can be removed later by the compiler.
1092   NamePtr->setLinkage(GlobalValue::PrivateLinkage);
1093   // Collect the referenced names to be used by emitNameData.
1094   ReferencedNames.push_back(NamePtr);
1095 
1096   return PD.RegionCounters;
1097 }
1098 
1099 void InstrProfiling::emitVNodes() {
1100   if (!ValueProfileStaticAlloc)
1101     return;
1102 
1103   // For now only support this on platforms that do
1104   // not require runtime registration to discover
1105   // named section start/end.
1106   if (needsRuntimeRegistrationOfSectionRange(TT))
1107     return;
1108 
1109   size_t TotalNS = 0;
1110   for (auto &PD : ProfileDataMap) {
1111     for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind)
1112       TotalNS += PD.second.NumValueSites[Kind];
1113   }
1114 
1115   if (!TotalNS)
1116     return;
1117 
1118   uint64_t NumCounters = TotalNS * NumCountersPerValueSite;
1119 // Heuristic for small programs with very few total value sites.
1120 // The default value of vp-counters-per-site is chosen based on
1121 // the observation that large apps usually have a low percentage
1122 // of value sites that actually have any profile data, and thus
1123 // the average number of counters per site is low. For small
1124 // apps with very few sites, this may not be true. Bump up the
1125 // number of counters in this case.
1126 #define INSTR_PROF_MIN_VAL_COUNTS 10
1127   if (NumCounters < INSTR_PROF_MIN_VAL_COUNTS)
1128     NumCounters = std::max(INSTR_PROF_MIN_VAL_COUNTS, (int)NumCounters * 2);
1129 
1130   auto &Ctx = M->getContext();
1131   Type *VNodeTypes[] = {
1132 #define INSTR_PROF_VALUE_NODE(Type, LLVMType, Name, Init) LLVMType,
1133 #include "llvm/ProfileData/InstrProfData.inc"
1134   };
1135   auto *VNodeTy = StructType::get(Ctx, makeArrayRef(VNodeTypes));
1136 
1137   ArrayType *VNodesTy = ArrayType::get(VNodeTy, NumCounters);
1138   auto *VNodesVar = new GlobalVariable(
1139       *M, VNodesTy, false, GlobalValue::PrivateLinkage,
1140       Constant::getNullValue(VNodesTy), getInstrProfVNodesVarName());
1141   VNodesVar->setSection(
1142       getInstrProfSectionName(IPSK_vnodes, TT.getObjectFormat()));
1143   // VNodesVar is used by runtime but not referenced via relocation by other
1144   // sections. Conservatively make it linker retained.
1145   UsedVars.push_back(VNodesVar);
1146 }
1147 
1148 void InstrProfiling::emitNameData() {
1149   std::string UncompressedData;
1150 
1151   if (ReferencedNames.empty())
1152     return;
1153 
1154   std::string CompressedNameStr;
1155   if (Error E = collectPGOFuncNameStrings(ReferencedNames, CompressedNameStr,
1156                                           DoInstrProfNameCompression)) {
1157     report_fatal_error(Twine(toString(std::move(E))), false);
1158   }
1159 
1160   auto &Ctx = M->getContext();
1161   auto *NamesVal =
1162       ConstantDataArray::getString(Ctx, StringRef(CompressedNameStr), false);
1163   NamesVar = new GlobalVariable(*M, NamesVal->getType(), true,
1164                                 GlobalValue::PrivateLinkage, NamesVal,
1165                                 getInstrProfNamesVarName());
1166   NamesSize = CompressedNameStr.size();
1167   NamesVar->setSection(
1168       getInstrProfSectionName(IPSK_name, TT.getObjectFormat()));
1169   // On COFF, it's important to reduce the alignment down to 1 to prevent the
1170   // linker from inserting padding before the start of the names section or
1171   // between names entries.
1172   NamesVar->setAlignment(Align(1));
1173   // NamesVar is used by runtime but not referenced via relocation by other
1174   // sections. Conservatively make it linker retained.
1175   UsedVars.push_back(NamesVar);
1176 
1177   for (auto *NamePtr : ReferencedNames)
1178     NamePtr->eraseFromParent();
1179 }
1180 
1181 void InstrProfiling::emitRegistration() {
1182   if (!needsRuntimeRegistrationOfSectionRange(TT))
1183     return;
1184 
1185   // Construct the function.
1186   auto *VoidTy = Type::getVoidTy(M->getContext());
1187   auto *VoidPtrTy = Type::getInt8PtrTy(M->getContext());
1188   auto *Int64Ty = Type::getInt64Ty(M->getContext());
1189   auto *RegisterFTy = FunctionType::get(VoidTy, false);
1190   auto *RegisterF = Function::Create(RegisterFTy, GlobalValue::InternalLinkage,
1191                                      getInstrProfRegFuncsName(), M);
1192   RegisterF->setUnnamedAddr(GlobalValue::UnnamedAddr::Global);
1193   if (Options.NoRedZone)
1194     RegisterF->addFnAttr(Attribute::NoRedZone);
1195 
1196   auto *RuntimeRegisterTy = FunctionType::get(VoidTy, VoidPtrTy, false);
1197   auto *RuntimeRegisterF =
1198       Function::Create(RuntimeRegisterTy, GlobalVariable::ExternalLinkage,
1199                        getInstrProfRegFuncName(), M);
1200 
1201   IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", RegisterF));
1202   for (Value *Data : CompilerUsedVars)
1203     if (!isa<Function>(Data))
1204       IRB.CreateCall(RuntimeRegisterF, IRB.CreateBitCast(Data, VoidPtrTy));
1205   for (Value *Data : UsedVars)
1206     if (Data != NamesVar && !isa<Function>(Data))
1207       IRB.CreateCall(RuntimeRegisterF, IRB.CreateBitCast(Data, VoidPtrTy));
1208 
1209   if (NamesVar) {
1210     Type *ParamTypes[] = {VoidPtrTy, Int64Ty};
1211     auto *NamesRegisterTy =
1212         FunctionType::get(VoidTy, makeArrayRef(ParamTypes), false);
1213     auto *NamesRegisterF =
1214         Function::Create(NamesRegisterTy, GlobalVariable::ExternalLinkage,
1215                          getInstrProfNamesRegFuncName(), M);
1216     IRB.CreateCall(NamesRegisterF, {IRB.CreateBitCast(NamesVar, VoidPtrTy),
1217                                     IRB.getInt64(NamesSize)});
1218   }
1219 
1220   IRB.CreateRetVoid();
1221 }
1222 
1223 bool InstrProfiling::emitRuntimeHook() {
1224   // We expect the linker to be invoked with -u<hook_var> flag for Linux
1225   // in which case there is no need to emit the external variable.
1226   if (TT.isOSLinux())
1227     return false;
1228 
1229   // If the module's provided its own runtime, we don't need to do anything.
1230   if (M->getGlobalVariable(getInstrProfRuntimeHookVarName()))
1231     return false;
1232 
1233   // Declare an external variable that will pull in the runtime initialization.
1234   auto *Int32Ty = Type::getInt32Ty(M->getContext());
1235   auto *Var =
1236       new GlobalVariable(*M, Int32Ty, false, GlobalValue::ExternalLinkage,
1237                          nullptr, getInstrProfRuntimeHookVarName());
1238 
1239   if (TT.isOSBinFormatELF()) {
1240     // Mark the user variable as used so that it isn't stripped out.
1241     CompilerUsedVars.push_back(Var);
1242   } else {
1243     // Make a function that uses it.
1244     auto *User = Function::Create(FunctionType::get(Int32Ty, false),
1245                                   GlobalValue::LinkOnceODRLinkage,
1246                                   getInstrProfRuntimeHookVarUseFuncName(), M);
1247     User->addFnAttr(Attribute::NoInline);
1248     if (Options.NoRedZone)
1249       User->addFnAttr(Attribute::NoRedZone);
1250     User->setVisibility(GlobalValue::HiddenVisibility);
1251     if (TT.supportsCOMDAT())
1252       User->setComdat(M->getOrInsertComdat(User->getName()));
1253 
1254     IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", User));
1255     auto *Load = IRB.CreateLoad(Int32Ty, Var);
1256     IRB.CreateRet(Load);
1257 
1258     // Mark the function as used so that it isn't stripped out.
1259     CompilerUsedVars.push_back(User);
1260   }
1261   return true;
1262 }
1263 
1264 void InstrProfiling::emitUses() {
1265   // The metadata sections are parallel arrays. Optimizers (e.g.
1266   // GlobalOpt/ConstantMerge) may not discard associated sections as a unit, so
1267   // we conservatively retain all unconditionally in the compiler.
1268   //
1269   // On ELF and Mach-O, the linker can guarantee the associated sections will be
1270   // retained or discarded as a unit, so llvm.compiler.used is sufficient.
1271   // Similarly on COFF, if prof data is not referenced by code we use one comdat
1272   // and ensure this GC property as well. Otherwise, we have to conservatively
1273   // make all of the sections retained by the linker.
1274   if (TT.isOSBinFormatELF() || TT.isOSBinFormatMachO() ||
1275       (TT.isOSBinFormatCOFF() && !profDataReferencedByCode(*M)))
1276     appendToCompilerUsed(*M, CompilerUsedVars);
1277   else
1278     appendToUsed(*M, CompilerUsedVars);
1279 
1280   // We do not add proper references from used metadata sections to NamesVar and
1281   // VNodesVar, so we have to be conservative and place them in llvm.used
1282   // regardless of the target,
1283   appendToUsed(*M, UsedVars);
1284 }
1285 
1286 void InstrProfiling::emitInitialization() {
1287   // Create ProfileFileName variable. Don't don't this for the
1288   // context-sensitive instrumentation lowering: This lowering is after
1289   // LTO/ThinLTO linking. Pass PGOInstrumentationGenCreateVar should
1290   // have already create the variable before LTO/ThinLTO linking.
1291   if (!IsCS)
1292     createProfileFileNameVar(*M, Options.InstrProfileOutput);
1293   Function *RegisterF = M->getFunction(getInstrProfRegFuncsName());
1294   if (!RegisterF)
1295     return;
1296 
1297   // Create the initialization function.
1298   auto *VoidTy = Type::getVoidTy(M->getContext());
1299   auto *F = Function::Create(FunctionType::get(VoidTy, false),
1300                              GlobalValue::InternalLinkage,
1301                              getInstrProfInitFuncName(), M);
1302   F->setUnnamedAddr(GlobalValue::UnnamedAddr::Global);
1303   F->addFnAttr(Attribute::NoInline);
1304   if (Options.NoRedZone)
1305     F->addFnAttr(Attribute::NoRedZone);
1306 
1307   // Add the basic block and the necessary calls.
1308   IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", F));
1309   IRB.CreateCall(RegisterF, {});
1310   IRB.CreateRetVoid();
1311 
1312   appendToGlobalCtors(*M, F, 0);
1313 }
1314