1//===- ThreadSafetyTIL.cpp ------------------------------------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8
9#include "clang/Analysis/Analyses/ThreadSafetyTIL.h"
10#include "clang/Basic/LLVM.h"
11#include <cassert>
12#include <cstddef>
13
14using namespace clang;
15using namespace threadSafety;
16using namespace til;
17
18StringRef til::getUnaryOpcodeString(TIL_UnaryOpcode Op) {
19 switch (Op) {
20 case UOP_Minus: return "-";
21 case UOP_BitNot: return "~";
22 case UOP_LogicNot: return "!";
23 }
24 return {};
25}
26
27StringRef til::getBinaryOpcodeString(TIL_BinaryOpcode Op) {
28 switch (Op) {
29 case BOP_Mul: return "*";
30 case BOP_Div: return "/";
31 case BOP_Rem: return "%";
32 case BOP_Add: return "+";
33 case BOP_Sub: return "-";
34 case BOP_Shl: return "<<";
35 case BOP_Shr: return ">>";
36 case BOP_BitAnd: return "&";
37 case BOP_BitXor: return "^";
38 case BOP_BitOr: return "|";
39 case BOP_Eq: return "==";
40 case BOP_Neq: return "!=";
41 case BOP_Lt: return "<";
42 case BOP_Leq: return "<=";
43 case BOP_Cmp: return "<=>";
44 case BOP_LogicAnd: return "&&";
45 case BOP_LogicOr: return "||";
46 }
47 return {};
48}
49
50SExpr* Future::force() {
51 Status = FS_evaluating;
52 Result = compute();
53 Status = FS_done;
54 return Result;
55}
56
57unsigned BasicBlock::addPredecessor(BasicBlock *Pred) {
58 unsigned Idx = Predecessors.size();
59 Predecessors.reserveCheck(N: 1, A: Arena);
60 Predecessors.push_back(Elem: Pred);
61 for (auto *E : Args) {
62 if (auto *Ph = dyn_cast<Phi>(Val: E)) {
63 Ph->values().reserveCheck(N: 1, A: Arena);
64 Ph->values().push_back(Elem: nullptr);
65 }
66 }
67 return Idx;
68}
69
70void BasicBlock::reservePredecessors(unsigned NumPreds) {
71 Predecessors.reserve(Ncp: NumPreds, A: Arena);
72 for (auto *E : Args) {
73 if (auto *Ph = dyn_cast<Phi>(Val: E)) {
74 Ph->values().reserve(Ncp: NumPreds, A: Arena);
75 }
76 }
77}
78
79// If E is a variable, then trace back through any aliases or redundant
80// Phi nodes to find the canonical definition.
81const SExpr *til::getCanonicalVal(const SExpr *E) {
82 while (true) {
83 if (const auto *V = dyn_cast<Variable>(Val: E)) {
84 if (V->kind() == Variable::VK_Let) {
85 E = V->definition();
86 continue;
87 }
88 }
89 if (const auto *Ph = dyn_cast<Phi>(Val: E)) {
90 if (Ph->status() == Phi::PH_SingleVal) {
91 E = Ph->values()[0];
92 continue;
93 }
94 }
95 break;
96 }
97 return E;
98}
99
100// If E is a variable, then trace back through any aliases or redundant
101// Phi nodes to find the canonical definition.
102// The non-const version will simplify incomplete Phi nodes.
103SExpr *til::simplifyToCanonicalVal(SExpr *E) {
104 while (true) {
105 if (auto *V = dyn_cast<Variable>(Val: E)) {
106 if (V->kind() != Variable::VK_Let)
107 return V;
108 // Eliminate redundant variables, e.g. x = y, or x = 5,
109 // but keep anything more complicated.
110 if (til::ThreadSafetyTIL::isTrivial(E: V->definition())) {
111 E = V->definition();
112 continue;
113 }
114 return V;
115 }
116 if (auto *Ph = dyn_cast<Phi>(Val: E)) {
117 if (Ph->status() == Phi::PH_Incomplete)
118 simplifyIncompleteArg(Ph);
119 // Eliminate redundant Phi nodes.
120 if (Ph->status() == Phi::PH_SingleVal) {
121 E = Ph->values()[0];
122 continue;
123 }
124 }
125 return E;
126 }
127}
128
129// Trace the arguments of an incomplete Phi node to see if they have the same
130// canonical definition. If so, mark the Phi node as redundant.
131// getCanonicalVal() will recursively call simplifyIncompletePhi().
132void til::simplifyIncompleteArg(til::Phi *Ph) {
133 assert(Ph && Ph->status() == Phi::PH_Incomplete);
134
135 // eliminate infinite recursion -- assume that this node is not redundant.
136 Ph->setStatus(Phi::PH_MultiVal);
137
138 SExpr *E0 = simplifyToCanonicalVal(E: Ph->values()[0]);
139 for (unsigned i = 1, n = Ph->values().size(); i < n; ++i) {
140 SExpr *Ei = simplifyToCanonicalVal(E: Ph->values()[i]);
141 if (Ei == Ph)
142 continue; // Recursive reference to itself. Don't count.
143 if (Ei != E0) {
144 return; // Status is already set to MultiVal.
145 }
146 }
147 Ph->setStatus(Phi::PH_SingleVal);
148}
149
150// Renumbers the arguments and instructions to have unique, sequential IDs.
151unsigned BasicBlock::renumberInstrs(unsigned ID) {
152 for (auto *Arg : Args)
153 Arg->setID(B: this, id: ID++);
154 for (auto *Instr : Instrs)
155 Instr->setID(B: this, id: ID++);
156 TermInstr->setID(B: this, id: ID++);
157 return ID;
158}
159
160// Sorts the CFGs blocks using a reverse post-order depth-first traversal.
161// Each block will be written into the Blocks array in order, and its BlockID
162// will be set to the index in the array. Sorting should start from the entry
163// block, and ID should be the total number of blocks.
164unsigned BasicBlock::topologicalSort(SimpleArray<BasicBlock *> &Blocks,
165 unsigned ID) {
166 if (Visited) return ID;
167 Visited = true;
168 for (auto *Block : successors())
169 ID = Block->topologicalSort(Blocks, ID);
170 // set ID and update block array in place.
171 // We may lose pointers to unreachable blocks.
172 assert(ID > 0);
173 BlockID = --ID;
174 Blocks[BlockID] = this;
175 return ID;
176}
177
178// Performs a reverse topological traversal, starting from the exit block and
179// following back-edges. The dominator is serialized before any predecessors,
180// which guarantees that all blocks are serialized after their dominator and
181// before their post-dominator (because it's a reverse topological traversal).
182// ID should be initially set to 0.
183//
184// This sort assumes that (1) dominators have been computed, (2) there are no
185// critical edges, and (3) the entry block is reachable from the exit block
186// and no blocks are accessible via traversal of back-edges from the exit that
187// weren't accessible via forward edges from the entry.
188unsigned BasicBlock::topologicalFinalSort(SimpleArray<BasicBlock *> &Blocks,
189 unsigned ID) {
190 // Visited is assumed to have been set by the topologicalSort. This pass
191 // assumes !Visited means that we've visited this node before.
192 if (!Visited) return ID;
193 Visited = false;
194 if (DominatorNode.Parent)
195 ID = DominatorNode.Parent->topologicalFinalSort(Blocks, ID);
196 for (auto *Pred : Predecessors)
197 ID = Pred->topologicalFinalSort(Blocks, ID);
198 assert(static_cast<size_t>(ID) < Blocks.size());
199 BlockID = ID++;
200 Blocks[BlockID] = this;
201 return ID;
202}
203
204// Computes the immediate dominator of the current block. Assumes that all of
205// its predecessors have already computed their dominators. This is achieved
206// by visiting the nodes in topological order.
207void BasicBlock::computeDominator() {
208 BasicBlock *Candidate = nullptr;
209 // Walk backwards from each predecessor to find the common dominator node.
210 for (auto *Pred : Predecessors) {
211 // Skip back-edges
212 if (Pred->BlockID >= BlockID) continue;
213 // If we don't yet have a candidate for dominator yet, take this one.
214 if (Candidate == nullptr) {
215 Candidate = Pred;
216 continue;
217 }
218 // Walk the alternate and current candidate back to find a common ancestor.
219 auto *Alternate = Pred;
220 while (Alternate != Candidate) {
221 if (Candidate->BlockID > Alternate->BlockID)
222 Candidate = Candidate->DominatorNode.Parent;
223 else
224 Alternate = Alternate->DominatorNode.Parent;
225 }
226 }
227 DominatorNode.Parent = Candidate;
228 DominatorNode.SizeOfSubTree = 1;
229}
230
231// Computes the immediate post-dominator of the current block. Assumes that all
232// of its successors have already computed their post-dominators. This is
233// achieved visiting the nodes in reverse topological order.
234void BasicBlock::computePostDominator() {
235 BasicBlock *Candidate = nullptr;
236 // Walk back from each predecessor to find the common post-dominator node.
237 for (auto *Succ : successors()) {
238 // Skip back-edges
239 if (Succ->BlockID <= BlockID) continue;
240 // If we don't yet have a candidate for post-dominator yet, take this one.
241 if (Candidate == nullptr) {
242 Candidate = Succ;
243 continue;
244 }
245 // Walk the alternate and current candidate back to find a common ancestor.
246 auto *Alternate = Succ;
247 while (Alternate != Candidate) {
248 if (Candidate->BlockID < Alternate->BlockID)
249 Candidate = Candidate->PostDominatorNode.Parent;
250 else
251 Alternate = Alternate->PostDominatorNode.Parent;
252 }
253 }
254 PostDominatorNode.Parent = Candidate;
255 PostDominatorNode.SizeOfSubTree = 1;
256}
257
258// Renumber instructions in all blocks
259void SCFG::renumberInstrs() {
260 unsigned InstrID = 0;
261 for (auto *Block : Blocks)
262 InstrID = Block->renumberInstrs(ID: InstrID);
263}
264
265static inline void computeNodeSize(BasicBlock *B,
266 BasicBlock::TopologyNode BasicBlock::*TN) {
267 BasicBlock::TopologyNode *N = &(B->*TN);
268 if (N->Parent) {
269 BasicBlock::TopologyNode *P = &(N->Parent->*TN);
270 // Initially set ID relative to the (as yet uncomputed) parent ID
271 N->NodeID = P->SizeOfSubTree;
272 P->SizeOfSubTree += N->SizeOfSubTree;
273 }
274}
275
276static inline void computeNodeID(BasicBlock *B,
277 BasicBlock::TopologyNode BasicBlock::*TN) {
278 BasicBlock::TopologyNode *N = &(B->*TN);
279 if (N->Parent) {
280 BasicBlock::TopologyNode *P = &(N->Parent->*TN);
281 N->NodeID += P->NodeID; // Fix NodeIDs relative to starting node.
282 }
283}
284
285// Normalizes a CFG. Normalization has a few major components:
286// 1) Removing unreachable blocks.
287// 2) Computing dominators and post-dominators
288// 3) Topologically sorting the blocks into the "Blocks" array.
289void SCFG::computeNormalForm() {
290 // Topologically sort the blocks starting from the entry block.
291 unsigned NumUnreachableBlocks = Entry->topologicalSort(Blocks, ID: Blocks.size());
292 if (NumUnreachableBlocks > 0) {
293 // If there were unreachable blocks shift everything down, and delete them.
294 for (unsigned I = NumUnreachableBlocks, E = Blocks.size(); I < E; ++I) {
295 unsigned NI = I - NumUnreachableBlocks;
296 Blocks[NI] = Blocks[I];
297 Blocks[NI]->BlockID = NI;
298 // FIXME: clean up predecessor pointers to unreachable blocks?
299 }
300 Blocks.drop(n: NumUnreachableBlocks);
301 }
302
303 // Compute dominators.
304 for (auto *Block : Blocks)
305 Block->computeDominator();
306
307 // Once dominators have been computed, the final sort may be performed.
308 unsigned NumBlocks = Exit->topologicalFinalSort(Blocks, ID: 0);
309 assert(static_cast<size_t>(NumBlocks) == Blocks.size());
310 (void) NumBlocks;
311
312 // Renumber the instructions now that we have a final sort.
313 renumberInstrs();
314
315 // Compute post-dominators and compute the sizes of each node in the
316 // dominator tree.
317 for (auto *Block : Blocks.reverse()) {
318 Block->computePostDominator();
319 computeNodeSize(B: Block, TN: &BasicBlock::DominatorNode);
320 }
321 // Compute the sizes of each node in the post-dominator tree and assign IDs in
322 // the dominator tree.
323 for (auto *Block : Blocks) {
324 computeNodeID(B: Block, TN: &BasicBlock::DominatorNode);
325 computeNodeSize(B: Block, TN: &BasicBlock::PostDominatorNode);
326 }
327 // Assign IDs in the post-dominator tree.
328 for (auto *Block : Blocks.reverse()) {
329 computeNodeID(B: Block, TN: &BasicBlock::PostDominatorNode);
330 }
331}
332

Provided by KDAB

Privacy Policy
Improve your Profiling and Debugging skills
Find out more

source code of clang/lib/Analysis/ThreadSafetyTIL.cpp