xref: /netbsd-src/external/apache2/llvm/dist/clang/lib/Analysis/ThreadSafetyTIL.cpp (revision 7330f729ccf0bd976a06f95fad452fe774fc7fd1)
1*7330f729Sjoerg //===- ThreadSafetyTIL.cpp ------------------------------------------------===//
2*7330f729Sjoerg //
3*7330f729Sjoerg // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4*7330f729Sjoerg // See https://llvm.org/LICENSE.txt for license information.
5*7330f729Sjoerg // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6*7330f729Sjoerg //
7*7330f729Sjoerg //===----------------------------------------------------------------------===//
8*7330f729Sjoerg 
9*7330f729Sjoerg #include "clang/Analysis/Analyses/ThreadSafetyTIL.h"
10*7330f729Sjoerg #include "clang/Basic/LLVM.h"
11*7330f729Sjoerg #include "llvm/Support/Casting.h"
12*7330f729Sjoerg #include <cassert>
13*7330f729Sjoerg #include <cstddef>
14*7330f729Sjoerg 
15*7330f729Sjoerg using namespace clang;
16*7330f729Sjoerg using namespace threadSafety;
17*7330f729Sjoerg using namespace til;
18*7330f729Sjoerg 
getUnaryOpcodeString(TIL_UnaryOpcode Op)19*7330f729Sjoerg StringRef til::getUnaryOpcodeString(TIL_UnaryOpcode Op) {
20*7330f729Sjoerg   switch (Op) {
21*7330f729Sjoerg     case UOP_Minus:    return "-";
22*7330f729Sjoerg     case UOP_BitNot:   return "~";
23*7330f729Sjoerg     case UOP_LogicNot: return "!";
24*7330f729Sjoerg   }
25*7330f729Sjoerg   return {};
26*7330f729Sjoerg }
27*7330f729Sjoerg 
getBinaryOpcodeString(TIL_BinaryOpcode Op)28*7330f729Sjoerg StringRef til::getBinaryOpcodeString(TIL_BinaryOpcode Op) {
29*7330f729Sjoerg   switch (Op) {
30*7330f729Sjoerg     case BOP_Mul:      return "*";
31*7330f729Sjoerg     case BOP_Div:      return "/";
32*7330f729Sjoerg     case BOP_Rem:      return "%";
33*7330f729Sjoerg     case BOP_Add:      return "+";
34*7330f729Sjoerg     case BOP_Sub:      return "-";
35*7330f729Sjoerg     case BOP_Shl:      return "<<";
36*7330f729Sjoerg     case BOP_Shr:      return ">>";
37*7330f729Sjoerg     case BOP_BitAnd:   return "&";
38*7330f729Sjoerg     case BOP_BitXor:   return "^";
39*7330f729Sjoerg     case BOP_BitOr:    return "|";
40*7330f729Sjoerg     case BOP_Eq:       return "==";
41*7330f729Sjoerg     case BOP_Neq:      return "!=";
42*7330f729Sjoerg     case BOP_Lt:       return "<";
43*7330f729Sjoerg     case BOP_Leq:      return "<=";
44*7330f729Sjoerg     case BOP_Cmp:      return "<=>";
45*7330f729Sjoerg     case BOP_LogicAnd: return "&&";
46*7330f729Sjoerg     case BOP_LogicOr:  return "||";
47*7330f729Sjoerg   }
48*7330f729Sjoerg   return {};
49*7330f729Sjoerg }
50*7330f729Sjoerg 
force()51*7330f729Sjoerg SExpr* Future::force() {
52*7330f729Sjoerg   Status = FS_evaluating;
53*7330f729Sjoerg   Result = compute();
54*7330f729Sjoerg   Status = FS_done;
55*7330f729Sjoerg   return Result;
56*7330f729Sjoerg }
57*7330f729Sjoerg 
addPredecessor(BasicBlock * Pred)58*7330f729Sjoerg unsigned BasicBlock::addPredecessor(BasicBlock *Pred) {
59*7330f729Sjoerg   unsigned Idx = Predecessors.size();
60*7330f729Sjoerg   Predecessors.reserveCheck(1, Arena);
61*7330f729Sjoerg   Predecessors.push_back(Pred);
62*7330f729Sjoerg   for (auto *E : Args) {
63*7330f729Sjoerg     if (auto *Ph = dyn_cast<Phi>(E)) {
64*7330f729Sjoerg       Ph->values().reserveCheck(1, Arena);
65*7330f729Sjoerg       Ph->values().push_back(nullptr);
66*7330f729Sjoerg     }
67*7330f729Sjoerg   }
68*7330f729Sjoerg   return Idx;
69*7330f729Sjoerg }
70*7330f729Sjoerg 
reservePredecessors(unsigned NumPreds)71*7330f729Sjoerg void BasicBlock::reservePredecessors(unsigned NumPreds) {
72*7330f729Sjoerg   Predecessors.reserve(NumPreds, Arena);
73*7330f729Sjoerg   for (auto *E : Args) {
74*7330f729Sjoerg     if (auto *Ph = dyn_cast<Phi>(E)) {
75*7330f729Sjoerg       Ph->values().reserve(NumPreds, Arena);
76*7330f729Sjoerg     }
77*7330f729Sjoerg   }
78*7330f729Sjoerg }
79*7330f729Sjoerg 
80*7330f729Sjoerg // If E is a variable, then trace back through any aliases or redundant
81*7330f729Sjoerg // Phi nodes to find the canonical definition.
getCanonicalVal(const SExpr * E)82*7330f729Sjoerg const SExpr *til::getCanonicalVal(const SExpr *E) {
83*7330f729Sjoerg   while (true) {
84*7330f729Sjoerg     if (const auto *V = dyn_cast<Variable>(E)) {
85*7330f729Sjoerg       if (V->kind() == Variable::VK_Let) {
86*7330f729Sjoerg         E = V->definition();
87*7330f729Sjoerg         continue;
88*7330f729Sjoerg       }
89*7330f729Sjoerg     }
90*7330f729Sjoerg     if (const auto *Ph = dyn_cast<Phi>(E)) {
91*7330f729Sjoerg       if (Ph->status() == Phi::PH_SingleVal) {
92*7330f729Sjoerg         E = Ph->values()[0];
93*7330f729Sjoerg         continue;
94*7330f729Sjoerg       }
95*7330f729Sjoerg     }
96*7330f729Sjoerg     break;
97*7330f729Sjoerg   }
98*7330f729Sjoerg   return E;
99*7330f729Sjoerg }
100*7330f729Sjoerg 
101*7330f729Sjoerg // If E is a variable, then trace back through any aliases or redundant
102*7330f729Sjoerg // Phi nodes to find the canonical definition.
103*7330f729Sjoerg // The non-const version will simplify incomplete Phi nodes.
simplifyToCanonicalVal(SExpr * E)104*7330f729Sjoerg SExpr *til::simplifyToCanonicalVal(SExpr *E) {
105*7330f729Sjoerg   while (true) {
106*7330f729Sjoerg     if (auto *V = dyn_cast<Variable>(E)) {
107*7330f729Sjoerg       if (V->kind() != Variable::VK_Let)
108*7330f729Sjoerg         return V;
109*7330f729Sjoerg       // Eliminate redundant variables, e.g. x = y, or x = 5,
110*7330f729Sjoerg       // but keep anything more complicated.
111*7330f729Sjoerg       if (til::ThreadSafetyTIL::isTrivial(V->definition())) {
112*7330f729Sjoerg         E = V->definition();
113*7330f729Sjoerg         continue;
114*7330f729Sjoerg       }
115*7330f729Sjoerg       return V;
116*7330f729Sjoerg     }
117*7330f729Sjoerg     if (auto *Ph = dyn_cast<Phi>(E)) {
118*7330f729Sjoerg       if (Ph->status() == Phi::PH_Incomplete)
119*7330f729Sjoerg         simplifyIncompleteArg(Ph);
120*7330f729Sjoerg       // Eliminate redundant Phi nodes.
121*7330f729Sjoerg       if (Ph->status() == Phi::PH_SingleVal) {
122*7330f729Sjoerg         E = Ph->values()[0];
123*7330f729Sjoerg         continue;
124*7330f729Sjoerg       }
125*7330f729Sjoerg     }
126*7330f729Sjoerg     return E;
127*7330f729Sjoerg   }
128*7330f729Sjoerg }
129*7330f729Sjoerg 
130*7330f729Sjoerg // Trace the arguments of an incomplete Phi node to see if they have the same
131*7330f729Sjoerg // canonical definition.  If so, mark the Phi node as redundant.
132*7330f729Sjoerg // getCanonicalVal() will recursively call simplifyIncompletePhi().
simplifyIncompleteArg(til::Phi * Ph)133*7330f729Sjoerg void til::simplifyIncompleteArg(til::Phi *Ph) {
134*7330f729Sjoerg   assert(Ph && Ph->status() == Phi::PH_Incomplete);
135*7330f729Sjoerg 
136*7330f729Sjoerg   // eliminate infinite recursion -- assume that this node is not redundant.
137*7330f729Sjoerg   Ph->setStatus(Phi::PH_MultiVal);
138*7330f729Sjoerg 
139*7330f729Sjoerg   SExpr *E0 = simplifyToCanonicalVal(Ph->values()[0]);
140*7330f729Sjoerg   for (unsigned i = 1, n = Ph->values().size(); i < n; ++i) {
141*7330f729Sjoerg     SExpr *Ei = simplifyToCanonicalVal(Ph->values()[i]);
142*7330f729Sjoerg     if (Ei == Ph)
143*7330f729Sjoerg       continue;  // Recursive reference to itself.  Don't count.
144*7330f729Sjoerg     if (Ei != E0) {
145*7330f729Sjoerg       return;    // Status is already set to MultiVal.
146*7330f729Sjoerg     }
147*7330f729Sjoerg   }
148*7330f729Sjoerg   Ph->setStatus(Phi::PH_SingleVal);
149*7330f729Sjoerg }
150*7330f729Sjoerg 
151*7330f729Sjoerg // Renumbers the arguments and instructions to have unique, sequential IDs.
renumberInstrs(unsigned ID)152*7330f729Sjoerg unsigned BasicBlock::renumberInstrs(unsigned ID) {
153*7330f729Sjoerg   for (auto *Arg : Args)
154*7330f729Sjoerg     Arg->setID(this, ID++);
155*7330f729Sjoerg   for (auto *Instr : Instrs)
156*7330f729Sjoerg     Instr->setID(this, ID++);
157*7330f729Sjoerg   TermInstr->setID(this, ID++);
158*7330f729Sjoerg   return ID;
159*7330f729Sjoerg }
160*7330f729Sjoerg 
161*7330f729Sjoerg // Sorts the CFGs blocks using a reverse post-order depth-first traversal.
162*7330f729Sjoerg // Each block will be written into the Blocks array in order, and its BlockID
163*7330f729Sjoerg // will be set to the index in the array.  Sorting should start from the entry
164*7330f729Sjoerg // block, and ID should be the total number of blocks.
topologicalSort(SimpleArray<BasicBlock * > & Blocks,unsigned ID)165*7330f729Sjoerg unsigned BasicBlock::topologicalSort(SimpleArray<BasicBlock *> &Blocks,
166*7330f729Sjoerg                                      unsigned ID) {
167*7330f729Sjoerg   if (Visited) return ID;
168*7330f729Sjoerg   Visited = true;
169*7330f729Sjoerg   for (auto *Block : successors())
170*7330f729Sjoerg     ID = Block->topologicalSort(Blocks, ID);
171*7330f729Sjoerg   // set ID and update block array in place.
172*7330f729Sjoerg   // We may lose pointers to unreachable blocks.
173*7330f729Sjoerg   assert(ID > 0);
174*7330f729Sjoerg   BlockID = --ID;
175*7330f729Sjoerg   Blocks[BlockID] = this;
176*7330f729Sjoerg   return ID;
177*7330f729Sjoerg }
178*7330f729Sjoerg 
179*7330f729Sjoerg // Performs a reverse topological traversal, starting from the exit block and
180*7330f729Sjoerg // following back-edges.  The dominator is serialized before any predecessors,
181*7330f729Sjoerg // which guarantees that all blocks are serialized after their dominator and
182*7330f729Sjoerg // before their post-dominator (because it's a reverse topological traversal).
183*7330f729Sjoerg // ID should be initially set to 0.
184*7330f729Sjoerg //
185*7330f729Sjoerg // This sort assumes that (1) dominators have been computed, (2) there are no
186*7330f729Sjoerg // critical edges, and (3) the entry block is reachable from the exit block
187*7330f729Sjoerg // and no blocks are accessible via traversal of back-edges from the exit that
188*7330f729Sjoerg // weren't accessible via forward edges from the entry.
topologicalFinalSort(SimpleArray<BasicBlock * > & Blocks,unsigned ID)189*7330f729Sjoerg unsigned BasicBlock::topologicalFinalSort(SimpleArray<BasicBlock *> &Blocks,
190*7330f729Sjoerg                                           unsigned ID) {
191*7330f729Sjoerg   // Visited is assumed to have been set by the topologicalSort.  This pass
192*7330f729Sjoerg   // assumes !Visited means that we've visited this node before.
193*7330f729Sjoerg   if (!Visited) return ID;
194*7330f729Sjoerg   Visited = false;
195*7330f729Sjoerg   if (DominatorNode.Parent)
196*7330f729Sjoerg     ID = DominatorNode.Parent->topologicalFinalSort(Blocks, ID);
197*7330f729Sjoerg   for (auto *Pred : Predecessors)
198*7330f729Sjoerg     ID = Pred->topologicalFinalSort(Blocks, ID);
199*7330f729Sjoerg   assert(static_cast<size_t>(ID) < Blocks.size());
200*7330f729Sjoerg   BlockID = ID++;
201*7330f729Sjoerg   Blocks[BlockID] = this;
202*7330f729Sjoerg   return ID;
203*7330f729Sjoerg }
204*7330f729Sjoerg 
205*7330f729Sjoerg // Computes the immediate dominator of the current block.  Assumes that all of
206*7330f729Sjoerg // its predecessors have already computed their dominators.  This is achieved
207*7330f729Sjoerg // by visiting the nodes in topological order.
computeDominator()208*7330f729Sjoerg void BasicBlock::computeDominator() {
209*7330f729Sjoerg   BasicBlock *Candidate = nullptr;
210*7330f729Sjoerg   // Walk backwards from each predecessor to find the common dominator node.
211*7330f729Sjoerg   for (auto *Pred : Predecessors) {
212*7330f729Sjoerg     // Skip back-edges
213*7330f729Sjoerg     if (Pred->BlockID >= BlockID) continue;
214*7330f729Sjoerg     // If we don't yet have a candidate for dominator yet, take this one.
215*7330f729Sjoerg     if (Candidate == nullptr) {
216*7330f729Sjoerg       Candidate = Pred;
217*7330f729Sjoerg       continue;
218*7330f729Sjoerg     }
219*7330f729Sjoerg     // Walk the alternate and current candidate back to find a common ancestor.
220*7330f729Sjoerg     auto *Alternate = Pred;
221*7330f729Sjoerg     while (Alternate != Candidate) {
222*7330f729Sjoerg       if (Candidate->BlockID > Alternate->BlockID)
223*7330f729Sjoerg         Candidate = Candidate->DominatorNode.Parent;
224*7330f729Sjoerg       else
225*7330f729Sjoerg         Alternate = Alternate->DominatorNode.Parent;
226*7330f729Sjoerg     }
227*7330f729Sjoerg   }
228*7330f729Sjoerg   DominatorNode.Parent = Candidate;
229*7330f729Sjoerg   DominatorNode.SizeOfSubTree = 1;
230*7330f729Sjoerg }
231*7330f729Sjoerg 
232*7330f729Sjoerg // Computes the immediate post-dominator of the current block.  Assumes that all
233*7330f729Sjoerg // of its successors have already computed their post-dominators.  This is
234*7330f729Sjoerg // achieved visiting the nodes in reverse topological order.
computePostDominator()235*7330f729Sjoerg void BasicBlock::computePostDominator() {
236*7330f729Sjoerg   BasicBlock *Candidate = nullptr;
237*7330f729Sjoerg   // Walk back from each predecessor to find the common post-dominator node.
238*7330f729Sjoerg   for (auto *Succ : successors()) {
239*7330f729Sjoerg     // Skip back-edges
240*7330f729Sjoerg     if (Succ->BlockID <= BlockID) continue;
241*7330f729Sjoerg     // If we don't yet have a candidate for post-dominator yet, take this one.
242*7330f729Sjoerg     if (Candidate == nullptr) {
243*7330f729Sjoerg       Candidate = Succ;
244*7330f729Sjoerg       continue;
245*7330f729Sjoerg     }
246*7330f729Sjoerg     // Walk the alternate and current candidate back to find a common ancestor.
247*7330f729Sjoerg     auto *Alternate = Succ;
248*7330f729Sjoerg     while (Alternate != Candidate) {
249*7330f729Sjoerg       if (Candidate->BlockID < Alternate->BlockID)
250*7330f729Sjoerg         Candidate = Candidate->PostDominatorNode.Parent;
251*7330f729Sjoerg       else
252*7330f729Sjoerg         Alternate = Alternate->PostDominatorNode.Parent;
253*7330f729Sjoerg     }
254*7330f729Sjoerg   }
255*7330f729Sjoerg   PostDominatorNode.Parent = Candidate;
256*7330f729Sjoerg   PostDominatorNode.SizeOfSubTree = 1;
257*7330f729Sjoerg }
258*7330f729Sjoerg 
259*7330f729Sjoerg // Renumber instructions in all blocks
renumberInstrs()260*7330f729Sjoerg void SCFG::renumberInstrs() {
261*7330f729Sjoerg   unsigned InstrID = 0;
262*7330f729Sjoerg   for (auto *Block : Blocks)
263*7330f729Sjoerg     InstrID = Block->renumberInstrs(InstrID);
264*7330f729Sjoerg }
265*7330f729Sjoerg 
computeNodeSize(BasicBlock * B,BasicBlock::TopologyNode BasicBlock::* TN)266*7330f729Sjoerg static inline void computeNodeSize(BasicBlock *B,
267*7330f729Sjoerg                                    BasicBlock::TopologyNode BasicBlock::*TN) {
268*7330f729Sjoerg   BasicBlock::TopologyNode *N = &(B->*TN);
269*7330f729Sjoerg   if (N->Parent) {
270*7330f729Sjoerg     BasicBlock::TopologyNode *P = &(N->Parent->*TN);
271*7330f729Sjoerg     // Initially set ID relative to the (as yet uncomputed) parent ID
272*7330f729Sjoerg     N->NodeID = P->SizeOfSubTree;
273*7330f729Sjoerg     P->SizeOfSubTree += N->SizeOfSubTree;
274*7330f729Sjoerg   }
275*7330f729Sjoerg }
276*7330f729Sjoerg 
computeNodeID(BasicBlock * B,BasicBlock::TopologyNode BasicBlock::* TN)277*7330f729Sjoerg static inline void computeNodeID(BasicBlock *B,
278*7330f729Sjoerg                                  BasicBlock::TopologyNode BasicBlock::*TN) {
279*7330f729Sjoerg   BasicBlock::TopologyNode *N = &(B->*TN);
280*7330f729Sjoerg   if (N->Parent) {
281*7330f729Sjoerg     BasicBlock::TopologyNode *P = &(N->Parent->*TN);
282*7330f729Sjoerg     N->NodeID += P->NodeID;    // Fix NodeIDs relative to starting node.
283*7330f729Sjoerg   }
284*7330f729Sjoerg }
285*7330f729Sjoerg 
286*7330f729Sjoerg // Normalizes a CFG.  Normalization has a few major components:
287*7330f729Sjoerg // 1) Removing unreachable blocks.
288*7330f729Sjoerg // 2) Computing dominators and post-dominators
289*7330f729Sjoerg // 3) Topologically sorting the blocks into the "Blocks" array.
computeNormalForm()290*7330f729Sjoerg void SCFG::computeNormalForm() {
291*7330f729Sjoerg   // Topologically sort the blocks starting from the entry block.
292*7330f729Sjoerg   unsigned NumUnreachableBlocks = Entry->topologicalSort(Blocks, Blocks.size());
293*7330f729Sjoerg   if (NumUnreachableBlocks > 0) {
294*7330f729Sjoerg     // If there were unreachable blocks shift everything down, and delete them.
295*7330f729Sjoerg     for (unsigned I = NumUnreachableBlocks, E = Blocks.size(); I < E; ++I) {
296*7330f729Sjoerg       unsigned NI = I - NumUnreachableBlocks;
297*7330f729Sjoerg       Blocks[NI] = Blocks[I];
298*7330f729Sjoerg       Blocks[NI]->BlockID = NI;
299*7330f729Sjoerg       // FIXME: clean up predecessor pointers to unreachable blocks?
300*7330f729Sjoerg     }
301*7330f729Sjoerg     Blocks.drop(NumUnreachableBlocks);
302*7330f729Sjoerg   }
303*7330f729Sjoerg 
304*7330f729Sjoerg   // Compute dominators.
305*7330f729Sjoerg   for (auto *Block : Blocks)
306*7330f729Sjoerg     Block->computeDominator();
307*7330f729Sjoerg 
308*7330f729Sjoerg   // Once dominators have been computed, the final sort may be performed.
309*7330f729Sjoerg   unsigned NumBlocks = Exit->topologicalFinalSort(Blocks, 0);
310*7330f729Sjoerg   assert(static_cast<size_t>(NumBlocks) == Blocks.size());
311*7330f729Sjoerg   (void) NumBlocks;
312*7330f729Sjoerg 
313*7330f729Sjoerg   // Renumber the instructions now that we have a final sort.
314*7330f729Sjoerg   renumberInstrs();
315*7330f729Sjoerg 
316*7330f729Sjoerg   // Compute post-dominators and compute the sizes of each node in the
317*7330f729Sjoerg   // dominator tree.
318*7330f729Sjoerg   for (auto *Block : Blocks.reverse()) {
319*7330f729Sjoerg     Block->computePostDominator();
320*7330f729Sjoerg     computeNodeSize(Block, &BasicBlock::DominatorNode);
321*7330f729Sjoerg   }
322*7330f729Sjoerg   // Compute the sizes of each node in the post-dominator tree and assign IDs in
323*7330f729Sjoerg   // the dominator tree.
324*7330f729Sjoerg   for (auto *Block : Blocks) {
325*7330f729Sjoerg     computeNodeID(Block, &BasicBlock::DominatorNode);
326*7330f729Sjoerg     computeNodeSize(Block, &BasicBlock::PostDominatorNode);
327*7330f729Sjoerg   }
328*7330f729Sjoerg   // Assign IDs in the post-dominator tree.
329*7330f729Sjoerg   for (auto *Block : Blocks.reverse()) {
330*7330f729Sjoerg     computeNodeID(Block, &BasicBlock::PostDominatorNode);
331*7330f729Sjoerg   }
332*7330f729Sjoerg }
333