1 | //===-- EHScopeStack.h - Stack for cleanup IR generation --------*- C++ -*-===// |
2 | // |
3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
4 | // See https://llvm.org/LICENSE.txt for license information. |
5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
6 | // |
7 | //===----------------------------------------------------------------------===// |
8 | // |
9 | // These classes should be the minimum interface required for other parts of |
10 | // CodeGen to emit cleanups. The implementation is in CGCleanup.cpp and other |
11 | // implemenentation details that are not widely needed are in CGCleanup.h. |
12 | // |
13 | //===----------------------------------------------------------------------===// |
14 | |
15 | #ifndef LLVM_CLANG_LIB_CODEGEN_EHSCOPESTACK_H |
16 | #define LLVM_CLANG_LIB_CODEGEN_EHSCOPESTACK_H |
17 | |
18 | #include "clang/Basic/LLVM.h" |
19 | #include "llvm/ADT/STLExtras.h" |
20 | #include "llvm/ADT/SmallVector.h" |
21 | #include "llvm/IR/BasicBlock.h" |
22 | #include "llvm/IR/Instructions.h" |
23 | #include "llvm/IR/Value.h" |
24 | |
25 | namespace clang { |
26 | namespace CodeGen { |
27 | |
28 | class CodeGenFunction; |
29 | |
30 | /// A branch fixup. These are required when emitting a goto to a |
31 | /// label which hasn't been emitted yet. The goto is optimistically |
32 | /// emitted as a branch to the basic block for the label, and (if it |
33 | /// occurs in a scope with non-trivial cleanups) a fixup is added to |
34 | /// the innermost cleanup. When a (normal) cleanup is popped, any |
35 | /// unresolved fixups in that scope are threaded through the cleanup. |
36 | struct BranchFixup { |
37 | /// The block containing the terminator which needs to be modified |
38 | /// into a switch if this fixup is resolved into the current scope. |
39 | /// If null, LatestBranch points directly to the destination. |
40 | llvm::BasicBlock *OptimisticBranchBlock; |
41 | |
42 | /// The ultimate destination of the branch. |
43 | /// |
44 | /// This can be set to null to indicate that this fixup was |
45 | /// successfully resolved. |
46 | llvm::BasicBlock *Destination; |
47 | |
48 | /// The destination index value. |
49 | unsigned DestinationIndex; |
50 | |
51 | /// The initial branch of the fixup. |
52 | llvm::BranchInst *InitialBranch; |
53 | }; |
54 | |
55 | template <class T> struct InvariantValue { |
56 | typedef T type; |
57 | typedef T saved_type; |
58 | static bool needsSaving(type value) { return false; } |
59 | static saved_type save(CodeGenFunction &CGF, type value) { return value; } |
60 | static type restore(CodeGenFunction &CGF, saved_type value) { return value; } |
61 | }; |
62 | |
63 | /// A metaprogramming class for ensuring that a value will dominate an |
64 | /// arbitrary position in a function. |
65 | template <class T> struct DominatingValue : InvariantValue<T> {}; |
66 | |
67 | template <class T, bool mightBeInstruction = |
68 | std::is_base_of<llvm::Value, T>::value && |
69 | !std::is_base_of<llvm::Constant, T>::value && |
70 | !std::is_base_of<llvm::BasicBlock, T>::value> |
71 | struct DominatingPointer; |
72 | template <class T> struct DominatingPointer<T,false> : InvariantValue<T*> {}; |
73 | // template <class T> struct DominatingPointer<T,true> at end of file |
74 | |
75 | template <class T> struct DominatingValue<T*> : DominatingPointer<T> {}; |
76 | |
77 | enum CleanupKind : unsigned { |
78 | /// Denotes a cleanup that should run when a scope is exited using exceptional |
79 | /// control flow (a throw statement leading to stack unwinding, ). |
80 | EHCleanup = 0x1, |
81 | |
82 | /// Denotes a cleanup that should run when a scope is exited using normal |
83 | /// control flow (falling off the end of the scope, return, goto, ...). |
84 | NormalCleanup = 0x2, |
85 | |
86 | NormalAndEHCleanup = EHCleanup | NormalCleanup, |
87 | |
88 | LifetimeMarker = 0x8, |
89 | NormalEHLifetimeMarker = LifetimeMarker | NormalAndEHCleanup, |
90 | }; |
91 | |
92 | /// A stack of scopes which respond to exceptions, including cleanups |
93 | /// and catch blocks. |
94 | class EHScopeStack { |
95 | public: |
96 | /* Should switch to alignof(uint64_t) instead of 8, when EHCleanupScope can */ |
97 | enum { ScopeStackAlignment = 8 }; |
98 | |
99 | /// A saved depth on the scope stack. This is necessary because |
100 | /// pushing scopes onto the stack invalidates iterators. |
101 | class stable_iterator { |
102 | friend class EHScopeStack; |
103 | |
104 | /// Offset from StartOfData to EndOfBuffer. |
105 | ptrdiff_t Size; |
106 | |
107 | stable_iterator(ptrdiff_t Size) : Size(Size) {} |
108 | |
109 | public: |
110 | static stable_iterator invalid() { return stable_iterator(-1); } |
111 | stable_iterator() : Size(-1) {} |
112 | |
113 | bool isValid() const { return Size >= 0; } |
114 | |
115 | /// Returns true if this scope encloses I. |
116 | /// Returns false if I is invalid. |
117 | /// This scope must be valid. |
118 | bool encloses(stable_iterator I) const { return Size <= I.Size; } |
119 | |
120 | /// Returns true if this scope strictly encloses I: that is, |
121 | /// if it encloses I and is not I. |
122 | /// Returns false is I is invalid. |
123 | /// This scope must be valid. |
124 | bool strictlyEncloses(stable_iterator I) const { return Size < I.Size; } |
125 | |
126 | friend bool operator==(stable_iterator A, stable_iterator B) { |
127 | return A.Size == B.Size; |
128 | } |
129 | friend bool operator!=(stable_iterator A, stable_iterator B) { |
130 | return A.Size != B.Size; |
131 | } |
132 | }; |
133 | |
134 | /// Information for lazily generating a cleanup. Subclasses must be |
135 | /// POD-like: cleanups will not be destructed, and they will be |
136 | /// allocated on the cleanup stack and freely copied and moved |
137 | /// around. |
138 | /// |
139 | /// Cleanup implementations should generally be declared in an |
140 | /// anonymous namespace. |
141 | class Cleanup { |
142 | // Anchor the construction vtable. |
143 | virtual void anchor(); |
144 | |
145 | protected: |
146 | ~Cleanup() = default; |
147 | |
148 | public: |
149 | Cleanup(const Cleanup &) = default; |
150 | Cleanup(Cleanup &&) {} |
151 | |
152 | // The copy and move assignment operator is defined as deleted pending |
153 | // further motivation. |
154 | Cleanup &operator=(const Cleanup &) = delete; |
155 | Cleanup &operator=(Cleanup &&) = delete; |
156 | |
157 | Cleanup() = default; |
158 | |
159 | virtual bool isRedundantBeforeReturn() { return false; } |
160 | |
161 | /// Generation flags. |
162 | class Flags { |
163 | enum { |
164 | F_IsForEH = 0x1, |
165 | F_IsNormalCleanupKind = 0x2, |
166 | F_IsEHCleanupKind = 0x4, |
167 | F_HasExitSwitch = 0x8, |
168 | }; |
169 | unsigned flags = 0; |
170 | |
171 | public: |
172 | Flags() = default; |
173 | |
174 | /// isForEH - true if the current emission is for an EH cleanup. |
175 | bool isForEHCleanup() const { return flags & F_IsForEH; } |
176 | bool isForNormalCleanup() const { return !isForEHCleanup(); } |
177 | void setIsForEHCleanup() { flags |= F_IsForEH; } |
178 | |
179 | bool isNormalCleanupKind() const { return flags & F_IsNormalCleanupKind; } |
180 | void setIsNormalCleanupKind() { flags |= F_IsNormalCleanupKind; } |
181 | |
182 | /// isEHCleanupKind - true if the cleanup was pushed as an EH |
183 | /// cleanup. |
184 | bool isEHCleanupKind() const { return flags & F_IsEHCleanupKind; } |
185 | void setIsEHCleanupKind() { flags |= F_IsEHCleanupKind; } |
186 | |
187 | bool hasExitSwitch() const { return flags & F_HasExitSwitch; } |
188 | void setHasExitSwitch() { flags |= F_HasExitSwitch; } |
189 | }; |
190 | |
191 | /// Emit the cleanup. For normal cleanups, this is run in the |
192 | /// same EH context as when the cleanup was pushed, i.e. the |
193 | /// immediately-enclosing context of the cleanup scope. For |
194 | /// EH cleanups, this is run in a terminate context. |
195 | /// |
196 | // \param flags cleanup kind. |
197 | virtual void Emit(CodeGenFunction &CGF, Flags flags) = 0; |
198 | }; |
199 | |
200 | /// ConditionalCleanup stores the saved form of its parameters, |
201 | /// then restores them and performs the cleanup. |
202 | template <class T, class... As> |
203 | class ConditionalCleanup final : public Cleanup { |
204 | typedef std::tuple<typename DominatingValue<As>::saved_type...> SavedTuple; |
205 | SavedTuple Saved; |
206 | |
207 | template <std::size_t... Is> |
208 | T restore(CodeGenFunction &CGF, std::index_sequence<Is...>) { |
209 | // It's important that the restores are emitted in order. The braced init |
210 | // list guarantees that. |
211 | return T{DominatingValue<As>::restore(CGF, std::get<Is>(Saved))...}; |
212 | } |
213 | |
214 | void Emit(CodeGenFunction &CGF, Flags flags) override { |
215 | restore(CGF, std::index_sequence_for<As...>()).Emit(CGF, flags); |
216 | } |
217 | |
218 | public: |
219 | ConditionalCleanup(typename DominatingValue<As>::saved_type... A) |
220 | : Saved(A...) {} |
221 | |
222 | ConditionalCleanup(SavedTuple Tuple) : Saved(std::move(Tuple)) {} |
223 | }; |
224 | |
225 | private: |
226 | // The implementation for this class is in CGException.h and |
227 | // CGException.cpp; the definition is here because it's used as a |
228 | // member of CodeGenFunction. |
229 | |
230 | /// The start of the scope-stack buffer, i.e. the allocated pointer |
231 | /// for the buffer. All of these pointers are either simultaneously |
232 | /// null or simultaneously valid. |
233 | char *StartOfBuffer; |
234 | |
235 | /// The end of the buffer. |
236 | char *EndOfBuffer; |
237 | |
238 | /// The first valid entry in the buffer. |
239 | char *StartOfData; |
240 | |
241 | /// The innermost normal cleanup on the stack. |
242 | stable_iterator InnermostNormalCleanup; |
243 | |
244 | /// The innermost EH scope on the stack. |
245 | stable_iterator InnermostEHScope; |
246 | |
247 | /// The CGF this Stack belong to |
248 | CodeGenFunction* CGF; |
249 | |
250 | /// The current set of branch fixups. A branch fixup is a jump to |
251 | /// an as-yet unemitted label, i.e. a label for which we don't yet |
252 | /// know the EH stack depth. Whenever we pop a cleanup, we have |
253 | /// to thread all the current branch fixups through it. |
254 | /// |
255 | /// Fixups are recorded as the Use of the respective branch or |
256 | /// switch statement. The use points to the final destination. |
257 | /// When popping out of a cleanup, these uses are threaded through |
258 | /// the cleanup and adjusted to point to the new cleanup. |
259 | /// |
260 | /// Note that branches are allowed to jump into protected scopes |
261 | /// in certain situations; e.g. the following code is legal: |
262 | /// struct A { ~A(); }; // trivial ctor, non-trivial dtor |
263 | /// goto foo; |
264 | /// A a; |
265 | /// foo: |
266 | /// bar(); |
267 | SmallVector<BranchFixup, 8> BranchFixups; |
268 | |
269 | char *allocate(size_t Size); |
270 | void deallocate(size_t Size); |
271 | |
272 | void *pushCleanup(CleanupKind K, size_t DataSize); |
273 | |
274 | public: |
275 | EHScopeStack() |
276 | : StartOfBuffer(nullptr), EndOfBuffer(nullptr), StartOfData(nullptr), |
277 | InnermostNormalCleanup(stable_end()), InnermostEHScope(stable_end()), |
278 | CGF(nullptr) {} |
279 | ~EHScopeStack() { delete[] StartOfBuffer; } |
280 | |
281 | EHScopeStack(const EHScopeStack &) = delete; |
282 | EHScopeStack &operator=(const EHScopeStack &) = delete; |
283 | |
284 | /// Push a lazily-created cleanup on the stack. |
285 | template <class T, class... As> void pushCleanup(CleanupKind Kind, As... A) { |
286 | static_assert(alignof(T) <= ScopeStackAlignment, |
287 | "Cleanup's alignment is too large." ); |
288 | void *Buffer = pushCleanup(K: Kind, DataSize: sizeof(T)); |
289 | Cleanup *Obj = new (Buffer) T(A...); |
290 | (void) Obj; |
291 | } |
292 | |
293 | /// Push a lazily-created cleanup on the stack. Tuple version. |
294 | template <class T, class... As> |
295 | void pushCleanupTuple(CleanupKind Kind, std::tuple<As...> A) { |
296 | static_assert(alignof(T) <= ScopeStackAlignment, |
297 | "Cleanup's alignment is too large." ); |
298 | void *Buffer = pushCleanup(K: Kind, DataSize: sizeof(T)); |
299 | Cleanup *Obj = new (Buffer) T(std::move(A)); |
300 | (void) Obj; |
301 | } |
302 | |
303 | // Feel free to add more variants of the following: |
304 | |
305 | /// Push a cleanup with non-constant storage requirements on the |
306 | /// stack. The cleanup type must provide an additional static method: |
307 | /// static size_t getExtraSize(size_t); |
308 | /// The argument to this method will be the value N, which will also |
309 | /// be passed as the first argument to the constructor. |
310 | /// |
311 | /// The data stored in the extra storage must obey the same |
312 | /// restrictions as normal cleanup member data. |
313 | /// |
314 | /// The pointer returned from this method is valid until the cleanup |
315 | /// stack is modified. |
316 | template <class T, class... As> |
317 | T *(CleanupKind Kind, size_t N, As... A) { |
318 | static_assert(alignof(T) <= ScopeStackAlignment, |
319 | "Cleanup's alignment is too large." ); |
320 | void *Buffer = pushCleanup(Kind, sizeof(T) + T::getExtraSize(N)); |
321 | return new (Buffer) T(N, A...); |
322 | } |
323 | |
324 | void pushCopyOfCleanup(CleanupKind Kind, const void *Cleanup, size_t Size) { |
325 | void *Buffer = pushCleanup(K: Kind, DataSize: Size); |
326 | std::memcpy(dest: Buffer, src: Cleanup, n: Size); |
327 | } |
328 | |
329 | void setCGF(CodeGenFunction *inCGF) { CGF = inCGF; } |
330 | |
331 | /// Pops a cleanup scope off the stack. This is private to CGCleanup.cpp. |
332 | void popCleanup(); |
333 | |
334 | /// Push a set of catch handlers on the stack. The catch is |
335 | /// uninitialized and will need to have the given number of handlers |
336 | /// set on it. |
337 | class EHCatchScope *pushCatch(unsigned NumHandlers); |
338 | |
339 | /// Pops a catch scope off the stack. This is private to CGException.cpp. |
340 | void popCatch(); |
341 | |
342 | /// Push an exceptions filter on the stack. |
343 | class EHFilterScope *pushFilter(unsigned NumFilters); |
344 | |
345 | /// Pops an exceptions filter off the stack. |
346 | void popFilter(); |
347 | |
348 | /// Push a terminate handler on the stack. |
349 | void pushTerminate(); |
350 | |
351 | /// Pops a terminate handler off the stack. |
352 | void popTerminate(); |
353 | |
354 | // Returns true iff the current scope is either empty or contains only |
355 | // lifetime markers, i.e. no real cleanup code |
356 | bool containsOnlyLifetimeMarkers(stable_iterator Old) const; |
357 | |
358 | /// Determines whether the exception-scopes stack is empty. |
359 | bool empty() const { return StartOfData == EndOfBuffer; } |
360 | |
361 | bool requiresLandingPad() const; |
362 | |
363 | /// Determines whether there are any normal cleanups on the stack. |
364 | bool hasNormalCleanups() const { |
365 | return InnermostNormalCleanup != stable_end(); |
366 | } |
367 | |
368 | /// Returns the innermost normal cleanup on the stack, or |
369 | /// stable_end() if there are no normal cleanups. |
370 | stable_iterator getInnermostNormalCleanup() const { |
371 | return InnermostNormalCleanup; |
372 | } |
373 | stable_iterator getInnermostActiveNormalCleanup() const; |
374 | |
375 | stable_iterator getInnermostEHScope() const { |
376 | return InnermostEHScope; |
377 | } |
378 | |
379 | |
380 | /// An unstable reference to a scope-stack depth. Invalidated by |
381 | /// pushes but not pops. |
382 | class iterator; |
383 | |
384 | /// Returns an iterator pointing to the innermost EH scope. |
385 | iterator begin() const; |
386 | |
387 | /// Returns an iterator pointing to the outermost EH scope. |
388 | iterator end() const; |
389 | |
390 | /// Create a stable reference to the top of the EH stack. The |
391 | /// returned reference is valid until that scope is popped off the |
392 | /// stack. |
393 | stable_iterator stable_begin() const { |
394 | return stable_iterator(EndOfBuffer - StartOfData); |
395 | } |
396 | |
397 | /// Create a stable reference to the bottom of the EH stack. |
398 | static stable_iterator stable_end() { |
399 | return stable_iterator(0); |
400 | } |
401 | |
402 | /// Translates an iterator into a stable_iterator. |
403 | stable_iterator stabilize(iterator it) const; |
404 | |
405 | /// Turn a stable reference to a scope depth into a unstable pointer |
406 | /// to the EH stack. |
407 | iterator find(stable_iterator save) const; |
408 | |
409 | /// Add a branch fixup to the current cleanup scope. |
410 | BranchFixup &addBranchFixup() { |
411 | assert(hasNormalCleanups() && "adding fixup in scope without cleanups" ); |
412 | BranchFixups.push_back(Elt: BranchFixup()); |
413 | return BranchFixups.back(); |
414 | } |
415 | |
416 | unsigned getNumBranchFixups() const { return BranchFixups.size(); } |
417 | BranchFixup &getBranchFixup(unsigned I) { |
418 | assert(I < getNumBranchFixups()); |
419 | return BranchFixups[I]; |
420 | } |
421 | |
422 | /// Pops lazily-removed fixups from the end of the list. This |
423 | /// should only be called by procedures which have just popped a |
424 | /// cleanup or resolved one or more fixups. |
425 | void popNullFixups(); |
426 | |
427 | /// Clears the branch-fixups list. This should only be called by |
428 | /// ResolveAllBranchFixups. |
429 | void clearFixups() { BranchFixups.clear(); } |
430 | }; |
431 | |
432 | } // namespace CodeGen |
433 | } // namespace clang |
434 | |
435 | #endif |
436 | |