xref: /freebsd/contrib/llvm-project/clang/lib/Analysis/ThreadSafetyTIL.cpp (revision cfd6422a5217410fbd66f7a7a8a64d9d85e61229)
1 //===- ThreadSafetyTIL.cpp ------------------------------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
9 #include "clang/Analysis/Analyses/ThreadSafetyTIL.h"
10 #include "clang/Basic/LLVM.h"
11 #include "llvm/Support/Casting.h"
12 #include <cassert>
13 #include <cstddef>
14 
15 using namespace clang;
16 using namespace threadSafety;
17 using namespace til;
18 
19 StringRef til::getUnaryOpcodeString(TIL_UnaryOpcode Op) {
20   switch (Op) {
21     case UOP_Minus:    return "-";
22     case UOP_BitNot:   return "~";
23     case UOP_LogicNot: return "!";
24   }
25   return {};
26 }
27 
28 StringRef til::getBinaryOpcodeString(TIL_BinaryOpcode Op) {
29   switch (Op) {
30     case BOP_Mul:      return "*";
31     case BOP_Div:      return "/";
32     case BOP_Rem:      return "%";
33     case BOP_Add:      return "+";
34     case BOP_Sub:      return "-";
35     case BOP_Shl:      return "<<";
36     case BOP_Shr:      return ">>";
37     case BOP_BitAnd:   return "&";
38     case BOP_BitXor:   return "^";
39     case BOP_BitOr:    return "|";
40     case BOP_Eq:       return "==";
41     case BOP_Neq:      return "!=";
42     case BOP_Lt:       return "<";
43     case BOP_Leq:      return "<=";
44     case BOP_Cmp:      return "<=>";
45     case BOP_LogicAnd: return "&&";
46     case BOP_LogicOr:  return "||";
47   }
48   return {};
49 }
50 
51 SExpr* Future::force() {
52   Status = FS_evaluating;
53   Result = compute();
54   Status = FS_done;
55   return Result;
56 }
57 
58 unsigned BasicBlock::addPredecessor(BasicBlock *Pred) {
59   unsigned Idx = Predecessors.size();
60   Predecessors.reserveCheck(1, Arena);
61   Predecessors.push_back(Pred);
62   for (auto *E : Args) {
63     if (auto *Ph = dyn_cast<Phi>(E)) {
64       Ph->values().reserveCheck(1, Arena);
65       Ph->values().push_back(nullptr);
66     }
67   }
68   return Idx;
69 }
70 
71 void BasicBlock::reservePredecessors(unsigned NumPreds) {
72   Predecessors.reserve(NumPreds, Arena);
73   for (auto *E : Args) {
74     if (auto *Ph = dyn_cast<Phi>(E)) {
75       Ph->values().reserve(NumPreds, Arena);
76     }
77   }
78 }
79 
80 // If E is a variable, then trace back through any aliases or redundant
81 // Phi nodes to find the canonical definition.
82 const SExpr *til::getCanonicalVal(const SExpr *E) {
83   while (true) {
84     if (const auto *V = dyn_cast<Variable>(E)) {
85       if (V->kind() == Variable::VK_Let) {
86         E = V->definition();
87         continue;
88       }
89     }
90     if (const auto *Ph = dyn_cast<Phi>(E)) {
91       if (Ph->status() == Phi::PH_SingleVal) {
92         E = Ph->values()[0];
93         continue;
94       }
95     }
96     break;
97   }
98   return E;
99 }
100 
101 // If E is a variable, then trace back through any aliases or redundant
102 // Phi nodes to find the canonical definition.
103 // The non-const version will simplify incomplete Phi nodes.
104 SExpr *til::simplifyToCanonicalVal(SExpr *E) {
105   while (true) {
106     if (auto *V = dyn_cast<Variable>(E)) {
107       if (V->kind() != Variable::VK_Let)
108         return V;
109       // Eliminate redundant variables, e.g. x = y, or x = 5,
110       // but keep anything more complicated.
111       if (til::ThreadSafetyTIL::isTrivial(V->definition())) {
112         E = V->definition();
113         continue;
114       }
115       return V;
116     }
117     if (auto *Ph = dyn_cast<Phi>(E)) {
118       if (Ph->status() == Phi::PH_Incomplete)
119         simplifyIncompleteArg(Ph);
120       // Eliminate redundant Phi nodes.
121       if (Ph->status() == Phi::PH_SingleVal) {
122         E = Ph->values()[0];
123         continue;
124       }
125     }
126     return E;
127   }
128 }
129 
130 // Trace the arguments of an incomplete Phi node to see if they have the same
131 // canonical definition.  If so, mark the Phi node as redundant.
132 // getCanonicalVal() will recursively call simplifyIncompletePhi().
133 void til::simplifyIncompleteArg(til::Phi *Ph) {
134   assert(Ph && Ph->status() == Phi::PH_Incomplete);
135 
136   // eliminate infinite recursion -- assume that this node is not redundant.
137   Ph->setStatus(Phi::PH_MultiVal);
138 
139   SExpr *E0 = simplifyToCanonicalVal(Ph->values()[0]);
140   for (unsigned i = 1, n = Ph->values().size(); i < n; ++i) {
141     SExpr *Ei = simplifyToCanonicalVal(Ph->values()[i]);
142     if (Ei == Ph)
143       continue;  // Recursive reference to itself.  Don't count.
144     if (Ei != E0) {
145       return;    // Status is already set to MultiVal.
146     }
147   }
148   Ph->setStatus(Phi::PH_SingleVal);
149 }
150 
151 // Renumbers the arguments and instructions to have unique, sequential IDs.
152 unsigned BasicBlock::renumberInstrs(unsigned ID) {
153   for (auto *Arg : Args)
154     Arg->setID(this, ID++);
155   for (auto *Instr : Instrs)
156     Instr->setID(this, ID++);
157   TermInstr->setID(this, ID++);
158   return ID;
159 }
160 
161 // Sorts the CFGs blocks using a reverse post-order depth-first traversal.
162 // Each block will be written into the Blocks array in order, and its BlockID
163 // will be set to the index in the array.  Sorting should start from the entry
164 // block, and ID should be the total number of blocks.
165 unsigned BasicBlock::topologicalSort(SimpleArray<BasicBlock *> &Blocks,
166                                      unsigned ID) {
167   if (Visited) return ID;
168   Visited = true;
169   for (auto *Block : successors())
170     ID = Block->topologicalSort(Blocks, ID);
171   // set ID and update block array in place.
172   // We may lose pointers to unreachable blocks.
173   assert(ID > 0);
174   BlockID = --ID;
175   Blocks[BlockID] = this;
176   return ID;
177 }
178 
179 // Performs a reverse topological traversal, starting from the exit block and
180 // following back-edges.  The dominator is serialized before any predecessors,
181 // which guarantees that all blocks are serialized after their dominator and
182 // before their post-dominator (because it's a reverse topological traversal).
183 // ID should be initially set to 0.
184 //
185 // This sort assumes that (1) dominators have been computed, (2) there are no
186 // critical edges, and (3) the entry block is reachable from the exit block
187 // and no blocks are accessible via traversal of back-edges from the exit that
188 // weren't accessible via forward edges from the entry.
189 unsigned BasicBlock::topologicalFinalSort(SimpleArray<BasicBlock *> &Blocks,
190                                           unsigned ID) {
191   // Visited is assumed to have been set by the topologicalSort.  This pass
192   // assumes !Visited means that we've visited this node before.
193   if (!Visited) return ID;
194   Visited = false;
195   if (DominatorNode.Parent)
196     ID = DominatorNode.Parent->topologicalFinalSort(Blocks, ID);
197   for (auto *Pred : Predecessors)
198     ID = Pred->topologicalFinalSort(Blocks, ID);
199   assert(static_cast<size_t>(ID) < Blocks.size());
200   BlockID = ID++;
201   Blocks[BlockID] = this;
202   return ID;
203 }
204 
205 // Computes the immediate dominator of the current block.  Assumes that all of
206 // its predecessors have already computed their dominators.  This is achieved
207 // by visiting the nodes in topological order.
208 void BasicBlock::computeDominator() {
209   BasicBlock *Candidate = nullptr;
210   // Walk backwards from each predecessor to find the common dominator node.
211   for (auto *Pred : Predecessors) {
212     // Skip back-edges
213     if (Pred->BlockID >= BlockID) continue;
214     // If we don't yet have a candidate for dominator yet, take this one.
215     if (Candidate == nullptr) {
216       Candidate = Pred;
217       continue;
218     }
219     // Walk the alternate and current candidate back to find a common ancestor.
220     auto *Alternate = Pred;
221     while (Alternate != Candidate) {
222       if (Candidate->BlockID > Alternate->BlockID)
223         Candidate = Candidate->DominatorNode.Parent;
224       else
225         Alternate = Alternate->DominatorNode.Parent;
226     }
227   }
228   DominatorNode.Parent = Candidate;
229   DominatorNode.SizeOfSubTree = 1;
230 }
231 
232 // Computes the immediate post-dominator of the current block.  Assumes that all
233 // of its successors have already computed their post-dominators.  This is
234 // achieved visiting the nodes in reverse topological order.
235 void BasicBlock::computePostDominator() {
236   BasicBlock *Candidate = nullptr;
237   // Walk back from each predecessor to find the common post-dominator node.
238   for (auto *Succ : successors()) {
239     // Skip back-edges
240     if (Succ->BlockID <= BlockID) continue;
241     // If we don't yet have a candidate for post-dominator yet, take this one.
242     if (Candidate == nullptr) {
243       Candidate = Succ;
244       continue;
245     }
246     // Walk the alternate and current candidate back to find a common ancestor.
247     auto *Alternate = Succ;
248     while (Alternate != Candidate) {
249       if (Candidate->BlockID < Alternate->BlockID)
250         Candidate = Candidate->PostDominatorNode.Parent;
251       else
252         Alternate = Alternate->PostDominatorNode.Parent;
253     }
254   }
255   PostDominatorNode.Parent = Candidate;
256   PostDominatorNode.SizeOfSubTree = 1;
257 }
258 
259 // Renumber instructions in all blocks
260 void SCFG::renumberInstrs() {
261   unsigned InstrID = 0;
262   for (auto *Block : Blocks)
263     InstrID = Block->renumberInstrs(InstrID);
264 }
265 
266 static inline void computeNodeSize(BasicBlock *B,
267                                    BasicBlock::TopologyNode BasicBlock::*TN) {
268   BasicBlock::TopologyNode *N = &(B->*TN);
269   if (N->Parent) {
270     BasicBlock::TopologyNode *P = &(N->Parent->*TN);
271     // Initially set ID relative to the (as yet uncomputed) parent ID
272     N->NodeID = P->SizeOfSubTree;
273     P->SizeOfSubTree += N->SizeOfSubTree;
274   }
275 }
276 
277 static inline void computeNodeID(BasicBlock *B,
278                                  BasicBlock::TopologyNode BasicBlock::*TN) {
279   BasicBlock::TopologyNode *N = &(B->*TN);
280   if (N->Parent) {
281     BasicBlock::TopologyNode *P = &(N->Parent->*TN);
282     N->NodeID += P->NodeID;    // Fix NodeIDs relative to starting node.
283   }
284 }
285 
286 // Normalizes a CFG.  Normalization has a few major components:
287 // 1) Removing unreachable blocks.
288 // 2) Computing dominators and post-dominators
289 // 3) Topologically sorting the blocks into the "Blocks" array.
290 void SCFG::computeNormalForm() {
291   // Topologically sort the blocks starting from the entry block.
292   unsigned NumUnreachableBlocks = Entry->topologicalSort(Blocks, Blocks.size());
293   if (NumUnreachableBlocks > 0) {
294     // If there were unreachable blocks shift everything down, and delete them.
295     for (unsigned I = NumUnreachableBlocks, E = Blocks.size(); I < E; ++I) {
296       unsigned NI = I - NumUnreachableBlocks;
297       Blocks[NI] = Blocks[I];
298       Blocks[NI]->BlockID = NI;
299       // FIXME: clean up predecessor pointers to unreachable blocks?
300     }
301     Blocks.drop(NumUnreachableBlocks);
302   }
303 
304   // Compute dominators.
305   for (auto *Block : Blocks)
306     Block->computeDominator();
307 
308   // Once dominators have been computed, the final sort may be performed.
309   unsigned NumBlocks = Exit->topologicalFinalSort(Blocks, 0);
310   assert(static_cast<size_t>(NumBlocks) == Blocks.size());
311   (void) NumBlocks;
312 
313   // Renumber the instructions now that we have a final sort.
314   renumberInstrs();
315 
316   // Compute post-dominators and compute the sizes of each node in the
317   // dominator tree.
318   for (auto *Block : Blocks.reverse()) {
319     Block->computePostDominator();
320     computeNodeSize(Block, &BasicBlock::DominatorNode);
321   }
322   // Compute the sizes of each node in the post-dominator tree and assign IDs in
323   // the dominator tree.
324   for (auto *Block : Blocks) {
325     computeNodeID(Block, &BasicBlock::DominatorNode);
326     computeNodeSize(Block, &BasicBlock::PostDominatorNode);
327   }
328   // Assign IDs in the post-dominator tree.
329   for (auto *Block : Blocks.reverse()) {
330     computeNodeID(Block, &BasicBlock::PostDominatorNode);
331   }
332 }
333