1*0a6a1f1dSLionel Sambuc //===- ThreadSafetyTIL.cpp -------------------------------------*- C++ --*-===//
2*0a6a1f1dSLionel Sambuc //
3*0a6a1f1dSLionel Sambuc // The LLVM Compiler Infrastructure
4*0a6a1f1dSLionel Sambuc //
5*0a6a1f1dSLionel Sambuc // This file is distributed under the University of Illinois Open Source
6*0a6a1f1dSLionel Sambuc // License. See LICENSE.TXT in the llvm repository for details.
7*0a6a1f1dSLionel Sambuc //
8*0a6a1f1dSLionel Sambuc //===----------------------------------------------------------------------===//
9*0a6a1f1dSLionel Sambuc
10*0a6a1f1dSLionel Sambuc #include "clang/Analysis/Analyses/ThreadSafetyTIL.h"
11*0a6a1f1dSLionel Sambuc #include "clang/Analysis/Analyses/ThreadSafetyTraverse.h"
12*0a6a1f1dSLionel Sambuc
13*0a6a1f1dSLionel Sambuc namespace clang {
14*0a6a1f1dSLionel Sambuc namespace threadSafety {
15*0a6a1f1dSLionel Sambuc namespace til {
16*0a6a1f1dSLionel Sambuc
17*0a6a1f1dSLionel Sambuc
getUnaryOpcodeString(TIL_UnaryOpcode Op)18*0a6a1f1dSLionel Sambuc StringRef getUnaryOpcodeString(TIL_UnaryOpcode Op) {
19*0a6a1f1dSLionel Sambuc switch (Op) {
20*0a6a1f1dSLionel Sambuc case UOP_Minus: return "-";
21*0a6a1f1dSLionel Sambuc case UOP_BitNot: return "~";
22*0a6a1f1dSLionel Sambuc case UOP_LogicNot: return "!";
23*0a6a1f1dSLionel Sambuc }
24*0a6a1f1dSLionel Sambuc return "";
25*0a6a1f1dSLionel Sambuc }
26*0a6a1f1dSLionel Sambuc
27*0a6a1f1dSLionel Sambuc
getBinaryOpcodeString(TIL_BinaryOpcode Op)28*0a6a1f1dSLionel Sambuc StringRef getBinaryOpcodeString(TIL_BinaryOpcode Op) {
29*0a6a1f1dSLionel Sambuc switch (Op) {
30*0a6a1f1dSLionel Sambuc case BOP_Mul: return "*";
31*0a6a1f1dSLionel Sambuc case BOP_Div: return "/";
32*0a6a1f1dSLionel Sambuc case BOP_Rem: return "%";
33*0a6a1f1dSLionel Sambuc case BOP_Add: return "+";
34*0a6a1f1dSLionel Sambuc case BOP_Sub: return "-";
35*0a6a1f1dSLionel Sambuc case BOP_Shl: return "<<";
36*0a6a1f1dSLionel Sambuc case BOP_Shr: return ">>";
37*0a6a1f1dSLionel Sambuc case BOP_BitAnd: return "&";
38*0a6a1f1dSLionel Sambuc case BOP_BitXor: return "^";
39*0a6a1f1dSLionel Sambuc case BOP_BitOr: return "|";
40*0a6a1f1dSLionel Sambuc case BOP_Eq: return "==";
41*0a6a1f1dSLionel Sambuc case BOP_Neq: return "!=";
42*0a6a1f1dSLionel Sambuc case BOP_Lt: return "<";
43*0a6a1f1dSLionel Sambuc case BOP_Leq: return "<=";
44*0a6a1f1dSLionel Sambuc case BOP_LogicAnd: return "&&";
45*0a6a1f1dSLionel Sambuc case BOP_LogicOr: return "||";
46*0a6a1f1dSLionel Sambuc }
47*0a6a1f1dSLionel Sambuc return "";
48*0a6a1f1dSLionel Sambuc }
49*0a6a1f1dSLionel Sambuc
50*0a6a1f1dSLionel Sambuc
force()51*0a6a1f1dSLionel Sambuc SExpr* Future::force() {
52*0a6a1f1dSLionel Sambuc Status = FS_evaluating;
53*0a6a1f1dSLionel Sambuc Result = compute();
54*0a6a1f1dSLionel Sambuc Status = FS_done;
55*0a6a1f1dSLionel Sambuc return Result;
56*0a6a1f1dSLionel Sambuc }
57*0a6a1f1dSLionel Sambuc
58*0a6a1f1dSLionel Sambuc
addPredecessor(BasicBlock * Pred)59*0a6a1f1dSLionel Sambuc unsigned BasicBlock::addPredecessor(BasicBlock *Pred) {
60*0a6a1f1dSLionel Sambuc unsigned Idx = Predecessors.size();
61*0a6a1f1dSLionel Sambuc Predecessors.reserveCheck(1, Arena);
62*0a6a1f1dSLionel Sambuc Predecessors.push_back(Pred);
63*0a6a1f1dSLionel Sambuc for (SExpr *E : Args) {
64*0a6a1f1dSLionel Sambuc if (Phi* Ph = dyn_cast<Phi>(E)) {
65*0a6a1f1dSLionel Sambuc Ph->values().reserveCheck(1, Arena);
66*0a6a1f1dSLionel Sambuc Ph->values().push_back(nullptr);
67*0a6a1f1dSLionel Sambuc }
68*0a6a1f1dSLionel Sambuc }
69*0a6a1f1dSLionel Sambuc return Idx;
70*0a6a1f1dSLionel Sambuc }
71*0a6a1f1dSLionel Sambuc
72*0a6a1f1dSLionel Sambuc
reservePredecessors(unsigned NumPreds)73*0a6a1f1dSLionel Sambuc void BasicBlock::reservePredecessors(unsigned NumPreds) {
74*0a6a1f1dSLionel Sambuc Predecessors.reserve(NumPreds, Arena);
75*0a6a1f1dSLionel Sambuc for (SExpr *E : Args) {
76*0a6a1f1dSLionel Sambuc if (Phi* Ph = dyn_cast<Phi>(E)) {
77*0a6a1f1dSLionel Sambuc Ph->values().reserve(NumPreds, Arena);
78*0a6a1f1dSLionel Sambuc }
79*0a6a1f1dSLionel Sambuc }
80*0a6a1f1dSLionel Sambuc }
81*0a6a1f1dSLionel Sambuc
82*0a6a1f1dSLionel Sambuc
83*0a6a1f1dSLionel Sambuc // If E is a variable, then trace back through any aliases or redundant
84*0a6a1f1dSLionel Sambuc // Phi nodes to find the canonical definition.
getCanonicalVal(const SExpr * E)85*0a6a1f1dSLionel Sambuc const SExpr *getCanonicalVal(const SExpr *E) {
86*0a6a1f1dSLionel Sambuc while (true) {
87*0a6a1f1dSLionel Sambuc if (auto *V = dyn_cast<Variable>(E)) {
88*0a6a1f1dSLionel Sambuc if (V->kind() == Variable::VK_Let) {
89*0a6a1f1dSLionel Sambuc E = V->definition();
90*0a6a1f1dSLionel Sambuc continue;
91*0a6a1f1dSLionel Sambuc }
92*0a6a1f1dSLionel Sambuc }
93*0a6a1f1dSLionel Sambuc if (const Phi *Ph = dyn_cast<Phi>(E)) {
94*0a6a1f1dSLionel Sambuc if (Ph->status() == Phi::PH_SingleVal) {
95*0a6a1f1dSLionel Sambuc E = Ph->values()[0];
96*0a6a1f1dSLionel Sambuc continue;
97*0a6a1f1dSLionel Sambuc }
98*0a6a1f1dSLionel Sambuc }
99*0a6a1f1dSLionel Sambuc break;
100*0a6a1f1dSLionel Sambuc }
101*0a6a1f1dSLionel Sambuc return E;
102*0a6a1f1dSLionel Sambuc }
103*0a6a1f1dSLionel Sambuc
104*0a6a1f1dSLionel Sambuc
105*0a6a1f1dSLionel Sambuc // If E is a variable, then trace back through any aliases or redundant
106*0a6a1f1dSLionel Sambuc // Phi nodes to find the canonical definition.
107*0a6a1f1dSLionel Sambuc // The non-const version will simplify incomplete Phi nodes.
simplifyToCanonicalVal(SExpr * E)108*0a6a1f1dSLionel Sambuc SExpr *simplifyToCanonicalVal(SExpr *E) {
109*0a6a1f1dSLionel Sambuc while (true) {
110*0a6a1f1dSLionel Sambuc if (auto *V = dyn_cast<Variable>(E)) {
111*0a6a1f1dSLionel Sambuc if (V->kind() != Variable::VK_Let)
112*0a6a1f1dSLionel Sambuc return V;
113*0a6a1f1dSLionel Sambuc // Eliminate redundant variables, e.g. x = y, or x = 5,
114*0a6a1f1dSLionel Sambuc // but keep anything more complicated.
115*0a6a1f1dSLionel Sambuc if (til::ThreadSafetyTIL::isTrivial(V->definition())) {
116*0a6a1f1dSLionel Sambuc E = V->definition();
117*0a6a1f1dSLionel Sambuc continue;
118*0a6a1f1dSLionel Sambuc }
119*0a6a1f1dSLionel Sambuc return V;
120*0a6a1f1dSLionel Sambuc }
121*0a6a1f1dSLionel Sambuc if (auto *Ph = dyn_cast<Phi>(E)) {
122*0a6a1f1dSLionel Sambuc if (Ph->status() == Phi::PH_Incomplete)
123*0a6a1f1dSLionel Sambuc simplifyIncompleteArg(Ph);
124*0a6a1f1dSLionel Sambuc // Eliminate redundant Phi nodes.
125*0a6a1f1dSLionel Sambuc if (Ph->status() == Phi::PH_SingleVal) {
126*0a6a1f1dSLionel Sambuc E = Ph->values()[0];
127*0a6a1f1dSLionel Sambuc continue;
128*0a6a1f1dSLionel Sambuc }
129*0a6a1f1dSLionel Sambuc }
130*0a6a1f1dSLionel Sambuc return E;
131*0a6a1f1dSLionel Sambuc }
132*0a6a1f1dSLionel Sambuc }
133*0a6a1f1dSLionel Sambuc
134*0a6a1f1dSLionel Sambuc
135*0a6a1f1dSLionel Sambuc // Trace the arguments of an incomplete Phi node to see if they have the same
136*0a6a1f1dSLionel Sambuc // canonical definition. If so, mark the Phi node as redundant.
137*0a6a1f1dSLionel Sambuc // getCanonicalVal() will recursively call simplifyIncompletePhi().
simplifyIncompleteArg(til::Phi * Ph)138*0a6a1f1dSLionel Sambuc void simplifyIncompleteArg(til::Phi *Ph) {
139*0a6a1f1dSLionel Sambuc assert(Ph && Ph->status() == Phi::PH_Incomplete);
140*0a6a1f1dSLionel Sambuc
141*0a6a1f1dSLionel Sambuc // eliminate infinite recursion -- assume that this node is not redundant.
142*0a6a1f1dSLionel Sambuc Ph->setStatus(Phi::PH_MultiVal);
143*0a6a1f1dSLionel Sambuc
144*0a6a1f1dSLionel Sambuc SExpr *E0 = simplifyToCanonicalVal(Ph->values()[0]);
145*0a6a1f1dSLionel Sambuc for (unsigned i=1, n=Ph->values().size(); i<n; ++i) {
146*0a6a1f1dSLionel Sambuc SExpr *Ei = simplifyToCanonicalVal(Ph->values()[i]);
147*0a6a1f1dSLionel Sambuc if (Ei == Ph)
148*0a6a1f1dSLionel Sambuc continue; // Recursive reference to itself. Don't count.
149*0a6a1f1dSLionel Sambuc if (Ei != E0) {
150*0a6a1f1dSLionel Sambuc return; // Status is already set to MultiVal.
151*0a6a1f1dSLionel Sambuc }
152*0a6a1f1dSLionel Sambuc }
153*0a6a1f1dSLionel Sambuc Ph->setStatus(Phi::PH_SingleVal);
154*0a6a1f1dSLionel Sambuc }
155*0a6a1f1dSLionel Sambuc
156*0a6a1f1dSLionel Sambuc
157*0a6a1f1dSLionel Sambuc // Renumbers the arguments and instructions to have unique, sequential IDs.
renumberInstrs(int ID)158*0a6a1f1dSLionel Sambuc int BasicBlock::renumberInstrs(int ID) {
159*0a6a1f1dSLionel Sambuc for (auto *Arg : Args)
160*0a6a1f1dSLionel Sambuc Arg->setID(this, ID++);
161*0a6a1f1dSLionel Sambuc for (auto *Instr : Instrs)
162*0a6a1f1dSLionel Sambuc Instr->setID(this, ID++);
163*0a6a1f1dSLionel Sambuc TermInstr->setID(this, ID++);
164*0a6a1f1dSLionel Sambuc return ID;
165*0a6a1f1dSLionel Sambuc }
166*0a6a1f1dSLionel Sambuc
167*0a6a1f1dSLionel Sambuc // Sorts the CFGs blocks using a reverse post-order depth-first traversal.
168*0a6a1f1dSLionel Sambuc // Each block will be written into the Blocks array in order, and its BlockID
169*0a6a1f1dSLionel Sambuc // will be set to the index in the array. Sorting should start from the entry
170*0a6a1f1dSLionel Sambuc // block, and ID should be the total number of blocks.
topologicalSort(SimpleArray<BasicBlock * > & Blocks,int ID)171*0a6a1f1dSLionel Sambuc int BasicBlock::topologicalSort(SimpleArray<BasicBlock*>& Blocks, int ID) {
172*0a6a1f1dSLionel Sambuc if (Visited) return ID;
173*0a6a1f1dSLionel Sambuc Visited = true;
174*0a6a1f1dSLionel Sambuc for (auto *Block : successors())
175*0a6a1f1dSLionel Sambuc ID = Block->topologicalSort(Blocks, ID);
176*0a6a1f1dSLionel Sambuc // set ID and update block array in place.
177*0a6a1f1dSLionel Sambuc // We may lose pointers to unreachable blocks.
178*0a6a1f1dSLionel Sambuc assert(ID > 0);
179*0a6a1f1dSLionel Sambuc BlockID = --ID;
180*0a6a1f1dSLionel Sambuc Blocks[BlockID] = this;
181*0a6a1f1dSLionel Sambuc return ID;
182*0a6a1f1dSLionel Sambuc }
183*0a6a1f1dSLionel Sambuc
184*0a6a1f1dSLionel Sambuc // Performs a reverse topological traversal, starting from the exit block and
185*0a6a1f1dSLionel Sambuc // following back-edges. The dominator is serialized before any predecessors,
186*0a6a1f1dSLionel Sambuc // which guarantees that all blocks are serialized after their dominator and
187*0a6a1f1dSLionel Sambuc // before their post-dominator (because it's a reverse topological traversal).
188*0a6a1f1dSLionel Sambuc // ID should be initially set to 0.
189*0a6a1f1dSLionel Sambuc //
190*0a6a1f1dSLionel Sambuc // This sort assumes that (1) dominators have been computed, (2) there are no
191*0a6a1f1dSLionel Sambuc // critical edges, and (3) the entry block is reachable from the exit block
192*0a6a1f1dSLionel Sambuc // and no blocks are accessable via traversal of back-edges from the exit that
193*0a6a1f1dSLionel Sambuc // weren't accessable via forward edges from the entry.
topologicalFinalSort(SimpleArray<BasicBlock * > & Blocks,int ID)194*0a6a1f1dSLionel Sambuc int BasicBlock::topologicalFinalSort(SimpleArray<BasicBlock*>& Blocks, int ID) {
195*0a6a1f1dSLionel Sambuc // Visited is assumed to have been set by the topologicalSort. This pass
196*0a6a1f1dSLionel Sambuc // assumes !Visited means that we've visited this node before.
197*0a6a1f1dSLionel Sambuc if (!Visited) return ID;
198*0a6a1f1dSLionel Sambuc Visited = false;
199*0a6a1f1dSLionel Sambuc if (DominatorNode.Parent)
200*0a6a1f1dSLionel Sambuc ID = DominatorNode.Parent->topologicalFinalSort(Blocks, ID);
201*0a6a1f1dSLionel Sambuc for (auto *Pred : Predecessors)
202*0a6a1f1dSLionel Sambuc ID = Pred->topologicalFinalSort(Blocks, ID);
203*0a6a1f1dSLionel Sambuc assert(static_cast<size_t>(ID) < Blocks.size());
204*0a6a1f1dSLionel Sambuc BlockID = ID++;
205*0a6a1f1dSLionel Sambuc Blocks[BlockID] = this;
206*0a6a1f1dSLionel Sambuc return ID;
207*0a6a1f1dSLionel Sambuc }
208*0a6a1f1dSLionel Sambuc
209*0a6a1f1dSLionel Sambuc // Computes the immediate dominator of the current block. Assumes that all of
210*0a6a1f1dSLionel Sambuc // its predecessors have already computed their dominators. This is achieved
211*0a6a1f1dSLionel Sambuc // by visiting the nodes in topological order.
computeDominator()212*0a6a1f1dSLionel Sambuc void BasicBlock::computeDominator() {
213*0a6a1f1dSLionel Sambuc BasicBlock *Candidate = nullptr;
214*0a6a1f1dSLionel Sambuc // Walk backwards from each predecessor to find the common dominator node.
215*0a6a1f1dSLionel Sambuc for (auto *Pred : Predecessors) {
216*0a6a1f1dSLionel Sambuc // Skip back-edges
217*0a6a1f1dSLionel Sambuc if (Pred->BlockID >= BlockID) continue;
218*0a6a1f1dSLionel Sambuc // If we don't yet have a candidate for dominator yet, take this one.
219*0a6a1f1dSLionel Sambuc if (Candidate == nullptr) {
220*0a6a1f1dSLionel Sambuc Candidate = Pred;
221*0a6a1f1dSLionel Sambuc continue;
222*0a6a1f1dSLionel Sambuc }
223*0a6a1f1dSLionel Sambuc // Walk the alternate and current candidate back to find a common ancestor.
224*0a6a1f1dSLionel Sambuc auto *Alternate = Pred;
225*0a6a1f1dSLionel Sambuc while (Alternate != Candidate) {
226*0a6a1f1dSLionel Sambuc if (Candidate->BlockID > Alternate->BlockID)
227*0a6a1f1dSLionel Sambuc Candidate = Candidate->DominatorNode.Parent;
228*0a6a1f1dSLionel Sambuc else
229*0a6a1f1dSLionel Sambuc Alternate = Alternate->DominatorNode.Parent;
230*0a6a1f1dSLionel Sambuc }
231*0a6a1f1dSLionel Sambuc }
232*0a6a1f1dSLionel Sambuc DominatorNode.Parent = Candidate;
233*0a6a1f1dSLionel Sambuc DominatorNode.SizeOfSubTree = 1;
234*0a6a1f1dSLionel Sambuc }
235*0a6a1f1dSLionel Sambuc
236*0a6a1f1dSLionel Sambuc // Computes the immediate post-dominator of the current block. Assumes that all
237*0a6a1f1dSLionel Sambuc // of its successors have already computed their post-dominators. This is
238*0a6a1f1dSLionel Sambuc // achieved visiting the nodes in reverse topological order.
computePostDominator()239*0a6a1f1dSLionel Sambuc void BasicBlock::computePostDominator() {
240*0a6a1f1dSLionel Sambuc BasicBlock *Candidate = nullptr;
241*0a6a1f1dSLionel Sambuc // Walk back from each predecessor to find the common post-dominator node.
242*0a6a1f1dSLionel Sambuc for (auto *Succ : successors()) {
243*0a6a1f1dSLionel Sambuc // Skip back-edges
244*0a6a1f1dSLionel Sambuc if (Succ->BlockID <= BlockID) continue;
245*0a6a1f1dSLionel Sambuc // If we don't yet have a candidate for post-dominator yet, take this one.
246*0a6a1f1dSLionel Sambuc if (Candidate == nullptr) {
247*0a6a1f1dSLionel Sambuc Candidate = Succ;
248*0a6a1f1dSLionel Sambuc continue;
249*0a6a1f1dSLionel Sambuc }
250*0a6a1f1dSLionel Sambuc // Walk the alternate and current candidate back to find a common ancestor.
251*0a6a1f1dSLionel Sambuc auto *Alternate = Succ;
252*0a6a1f1dSLionel Sambuc while (Alternate != Candidate) {
253*0a6a1f1dSLionel Sambuc if (Candidate->BlockID < Alternate->BlockID)
254*0a6a1f1dSLionel Sambuc Candidate = Candidate->PostDominatorNode.Parent;
255*0a6a1f1dSLionel Sambuc else
256*0a6a1f1dSLionel Sambuc Alternate = Alternate->PostDominatorNode.Parent;
257*0a6a1f1dSLionel Sambuc }
258*0a6a1f1dSLionel Sambuc }
259*0a6a1f1dSLionel Sambuc PostDominatorNode.Parent = Candidate;
260*0a6a1f1dSLionel Sambuc PostDominatorNode.SizeOfSubTree = 1;
261*0a6a1f1dSLionel Sambuc }
262*0a6a1f1dSLionel Sambuc
263*0a6a1f1dSLionel Sambuc
264*0a6a1f1dSLionel Sambuc // Renumber instructions in all blocks
renumberInstrs()265*0a6a1f1dSLionel Sambuc void SCFG::renumberInstrs() {
266*0a6a1f1dSLionel Sambuc int InstrID = 0;
267*0a6a1f1dSLionel Sambuc for (auto *Block : Blocks)
268*0a6a1f1dSLionel Sambuc InstrID = Block->renumberInstrs(InstrID);
269*0a6a1f1dSLionel Sambuc }
270*0a6a1f1dSLionel Sambuc
271*0a6a1f1dSLionel Sambuc
computeNodeSize(BasicBlock * B,BasicBlock::TopologyNode BasicBlock::* TN)272*0a6a1f1dSLionel Sambuc static inline void computeNodeSize(BasicBlock *B,
273*0a6a1f1dSLionel Sambuc BasicBlock::TopologyNode BasicBlock::*TN) {
274*0a6a1f1dSLionel Sambuc BasicBlock::TopologyNode *N = &(B->*TN);
275*0a6a1f1dSLionel Sambuc if (N->Parent) {
276*0a6a1f1dSLionel Sambuc BasicBlock::TopologyNode *P = &(N->Parent->*TN);
277*0a6a1f1dSLionel Sambuc // Initially set ID relative to the (as yet uncomputed) parent ID
278*0a6a1f1dSLionel Sambuc N->NodeID = P->SizeOfSubTree;
279*0a6a1f1dSLionel Sambuc P->SizeOfSubTree += N->SizeOfSubTree;
280*0a6a1f1dSLionel Sambuc }
281*0a6a1f1dSLionel Sambuc }
282*0a6a1f1dSLionel Sambuc
computeNodeID(BasicBlock * B,BasicBlock::TopologyNode BasicBlock::* TN)283*0a6a1f1dSLionel Sambuc static inline void computeNodeID(BasicBlock *B,
284*0a6a1f1dSLionel Sambuc BasicBlock::TopologyNode BasicBlock::*TN) {
285*0a6a1f1dSLionel Sambuc BasicBlock::TopologyNode *N = &(B->*TN);
286*0a6a1f1dSLionel Sambuc if (N->Parent) {
287*0a6a1f1dSLionel Sambuc BasicBlock::TopologyNode *P = &(N->Parent->*TN);
288*0a6a1f1dSLionel Sambuc N->NodeID += P->NodeID; // Fix NodeIDs relative to starting node.
289*0a6a1f1dSLionel Sambuc }
290*0a6a1f1dSLionel Sambuc }
291*0a6a1f1dSLionel Sambuc
292*0a6a1f1dSLionel Sambuc
293*0a6a1f1dSLionel Sambuc // Normalizes a CFG. Normalization has a few major components:
294*0a6a1f1dSLionel Sambuc // 1) Removing unreachable blocks.
295*0a6a1f1dSLionel Sambuc // 2) Computing dominators and post-dominators
296*0a6a1f1dSLionel Sambuc // 3) Topologically sorting the blocks into the "Blocks" array.
computeNormalForm()297*0a6a1f1dSLionel Sambuc void SCFG::computeNormalForm() {
298*0a6a1f1dSLionel Sambuc // Topologically sort the blocks starting from the entry block.
299*0a6a1f1dSLionel Sambuc int NumUnreachableBlocks = Entry->topologicalSort(Blocks, Blocks.size());
300*0a6a1f1dSLionel Sambuc if (NumUnreachableBlocks > 0) {
301*0a6a1f1dSLionel Sambuc // If there were unreachable blocks shift everything down, and delete them.
302*0a6a1f1dSLionel Sambuc for (size_t I = NumUnreachableBlocks, E = Blocks.size(); I < E; ++I) {
303*0a6a1f1dSLionel Sambuc size_t NI = I - NumUnreachableBlocks;
304*0a6a1f1dSLionel Sambuc Blocks[NI] = Blocks[I];
305*0a6a1f1dSLionel Sambuc Blocks[NI]->BlockID = NI;
306*0a6a1f1dSLionel Sambuc // FIXME: clean up predecessor pointers to unreachable blocks?
307*0a6a1f1dSLionel Sambuc }
308*0a6a1f1dSLionel Sambuc Blocks.drop(NumUnreachableBlocks);
309*0a6a1f1dSLionel Sambuc }
310*0a6a1f1dSLionel Sambuc
311*0a6a1f1dSLionel Sambuc // Compute dominators.
312*0a6a1f1dSLionel Sambuc for (auto *Block : Blocks)
313*0a6a1f1dSLionel Sambuc Block->computeDominator();
314*0a6a1f1dSLionel Sambuc
315*0a6a1f1dSLionel Sambuc // Once dominators have been computed, the final sort may be performed.
316*0a6a1f1dSLionel Sambuc int NumBlocks = Exit->topologicalFinalSort(Blocks, 0);
317*0a6a1f1dSLionel Sambuc assert(static_cast<size_t>(NumBlocks) == Blocks.size());
318*0a6a1f1dSLionel Sambuc (void) NumBlocks;
319*0a6a1f1dSLionel Sambuc
320*0a6a1f1dSLionel Sambuc // Renumber the instructions now that we have a final sort.
321*0a6a1f1dSLionel Sambuc renumberInstrs();
322*0a6a1f1dSLionel Sambuc
323*0a6a1f1dSLionel Sambuc // Compute post-dominators and compute the sizes of each node in the
324*0a6a1f1dSLionel Sambuc // dominator tree.
325*0a6a1f1dSLionel Sambuc for (auto *Block : Blocks.reverse()) {
326*0a6a1f1dSLionel Sambuc Block->computePostDominator();
327*0a6a1f1dSLionel Sambuc computeNodeSize(Block, &BasicBlock::DominatorNode);
328*0a6a1f1dSLionel Sambuc }
329*0a6a1f1dSLionel Sambuc // Compute the sizes of each node in the post-dominator tree and assign IDs in
330*0a6a1f1dSLionel Sambuc // the dominator tree.
331*0a6a1f1dSLionel Sambuc for (auto *Block : Blocks) {
332*0a6a1f1dSLionel Sambuc computeNodeID(Block, &BasicBlock::DominatorNode);
333*0a6a1f1dSLionel Sambuc computeNodeSize(Block, &BasicBlock::PostDominatorNode);
334*0a6a1f1dSLionel Sambuc }
335*0a6a1f1dSLionel Sambuc // Assign IDs in the post-dominator tree.
336*0a6a1f1dSLionel Sambuc for (auto *Block : Blocks.reverse()) {
337*0a6a1f1dSLionel Sambuc computeNodeID(Block, &BasicBlock::PostDominatorNode);
338*0a6a1f1dSLionel Sambuc }
339*0a6a1f1dSLionel Sambuc }
340*0a6a1f1dSLionel Sambuc
341*0a6a1f1dSLionel Sambuc } // end namespace til
342*0a6a1f1dSLionel Sambuc } // end namespace threadSafety
343*0a6a1f1dSLionel Sambuc } // end namespace clang
344