xref: /freebsd/contrib/llvm-project/clang/lib/CodeGen/EHScopeStack.h (revision 0b57cec536236d46e3dba9bd041533462f33dbb7)
1*0b57cec5SDimitry Andric //===-- EHScopeStack.h - Stack for cleanup IR generation --------*- C++ -*-===//
2*0b57cec5SDimitry Andric //
3*0b57cec5SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4*0b57cec5SDimitry Andric // See https://llvm.org/LICENSE.txt for license information.
5*0b57cec5SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6*0b57cec5SDimitry Andric //
7*0b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
8*0b57cec5SDimitry Andric //
9*0b57cec5SDimitry Andric // These classes should be the minimum interface required for other parts of
10*0b57cec5SDimitry Andric // CodeGen to emit cleanups.  The implementation is in CGCleanup.cpp and other
11*0b57cec5SDimitry Andric // implemenentation details that are not widely needed are in CGCleanup.h.
12*0b57cec5SDimitry Andric //
13*0b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
14*0b57cec5SDimitry Andric 
15*0b57cec5SDimitry Andric #ifndef LLVM_CLANG_LIB_CODEGEN_EHSCOPESTACK_H
16*0b57cec5SDimitry Andric #define LLVM_CLANG_LIB_CODEGEN_EHSCOPESTACK_H
17*0b57cec5SDimitry Andric 
18*0b57cec5SDimitry Andric #include "clang/Basic/LLVM.h"
19*0b57cec5SDimitry Andric #include "llvm/ADT/STLExtras.h"
20*0b57cec5SDimitry Andric #include "llvm/ADT/SmallVector.h"
21*0b57cec5SDimitry Andric #include "llvm/IR/BasicBlock.h"
22*0b57cec5SDimitry Andric #include "llvm/IR/Instructions.h"
23*0b57cec5SDimitry Andric #include "llvm/IR/Value.h"
24*0b57cec5SDimitry Andric 
25*0b57cec5SDimitry Andric namespace clang {
26*0b57cec5SDimitry Andric namespace CodeGen {
27*0b57cec5SDimitry Andric 
28*0b57cec5SDimitry Andric class CodeGenFunction;
29*0b57cec5SDimitry Andric 
30*0b57cec5SDimitry Andric /// A branch fixup.  These are required when emitting a goto to a
31*0b57cec5SDimitry Andric /// label which hasn't been emitted yet.  The goto is optimistically
32*0b57cec5SDimitry Andric /// emitted as a branch to the basic block for the label, and (if it
33*0b57cec5SDimitry Andric /// occurs in a scope with non-trivial cleanups) a fixup is added to
34*0b57cec5SDimitry Andric /// the innermost cleanup.  When a (normal) cleanup is popped, any
35*0b57cec5SDimitry Andric /// unresolved fixups in that scope are threaded through the cleanup.
36*0b57cec5SDimitry Andric struct BranchFixup {
37*0b57cec5SDimitry Andric   /// The block containing the terminator which needs to be modified
38*0b57cec5SDimitry Andric   /// into a switch if this fixup is resolved into the current scope.
39*0b57cec5SDimitry Andric   /// If null, LatestBranch points directly to the destination.
40*0b57cec5SDimitry Andric   llvm::BasicBlock *OptimisticBranchBlock;
41*0b57cec5SDimitry Andric 
42*0b57cec5SDimitry Andric   /// The ultimate destination of the branch.
43*0b57cec5SDimitry Andric   ///
44*0b57cec5SDimitry Andric   /// This can be set to null to indicate that this fixup was
45*0b57cec5SDimitry Andric   /// successfully resolved.
46*0b57cec5SDimitry Andric   llvm::BasicBlock *Destination;
47*0b57cec5SDimitry Andric 
48*0b57cec5SDimitry Andric   /// The destination index value.
49*0b57cec5SDimitry Andric   unsigned DestinationIndex;
50*0b57cec5SDimitry Andric 
51*0b57cec5SDimitry Andric   /// The initial branch of the fixup.
52*0b57cec5SDimitry Andric   llvm::BranchInst *InitialBranch;
53*0b57cec5SDimitry Andric };
54*0b57cec5SDimitry Andric 
55*0b57cec5SDimitry Andric template <class T> struct InvariantValue {
56*0b57cec5SDimitry Andric   typedef T type;
57*0b57cec5SDimitry Andric   typedef T saved_type;
58*0b57cec5SDimitry Andric   static bool needsSaving(type value) { return false; }
59*0b57cec5SDimitry Andric   static saved_type save(CodeGenFunction &CGF, type value) { return value; }
60*0b57cec5SDimitry Andric   static type restore(CodeGenFunction &CGF, saved_type value) { return value; }
61*0b57cec5SDimitry Andric };
62*0b57cec5SDimitry Andric 
63*0b57cec5SDimitry Andric /// A metaprogramming class for ensuring that a value will dominate an
64*0b57cec5SDimitry Andric /// arbitrary position in a function.
65*0b57cec5SDimitry Andric template <class T> struct DominatingValue : InvariantValue<T> {};
66*0b57cec5SDimitry Andric 
67*0b57cec5SDimitry Andric template <class T, bool mightBeInstruction =
68*0b57cec5SDimitry Andric             std::is_base_of<llvm::Value, T>::value &&
69*0b57cec5SDimitry Andric             !std::is_base_of<llvm::Constant, T>::value &&
70*0b57cec5SDimitry Andric             !std::is_base_of<llvm::BasicBlock, T>::value>
71*0b57cec5SDimitry Andric struct DominatingPointer;
72*0b57cec5SDimitry Andric template <class T> struct DominatingPointer<T,false> : InvariantValue<T*> {};
73*0b57cec5SDimitry Andric // template <class T> struct DominatingPointer<T,true> at end of file
74*0b57cec5SDimitry Andric 
75*0b57cec5SDimitry Andric template <class T> struct DominatingValue<T*> : DominatingPointer<T> {};
76*0b57cec5SDimitry Andric 
77*0b57cec5SDimitry Andric enum CleanupKind : unsigned {
78*0b57cec5SDimitry Andric   /// Denotes a cleanup that should run when a scope is exited using exceptional
79*0b57cec5SDimitry Andric   /// control flow (a throw statement leading to stack unwinding, ).
80*0b57cec5SDimitry Andric   EHCleanup = 0x1,
81*0b57cec5SDimitry Andric 
82*0b57cec5SDimitry Andric   /// Denotes a cleanup that should run when a scope is exited using normal
83*0b57cec5SDimitry Andric   /// control flow (falling off the end of the scope, return, goto, ...).
84*0b57cec5SDimitry Andric   NormalCleanup = 0x2,
85*0b57cec5SDimitry Andric 
86*0b57cec5SDimitry Andric   NormalAndEHCleanup = EHCleanup | NormalCleanup,
87*0b57cec5SDimitry Andric 
88*0b57cec5SDimitry Andric   InactiveCleanup = 0x4,
89*0b57cec5SDimitry Andric   InactiveEHCleanup = EHCleanup | InactiveCleanup,
90*0b57cec5SDimitry Andric   InactiveNormalCleanup = NormalCleanup | InactiveCleanup,
91*0b57cec5SDimitry Andric   InactiveNormalAndEHCleanup = NormalAndEHCleanup | InactiveCleanup,
92*0b57cec5SDimitry Andric 
93*0b57cec5SDimitry Andric   LifetimeMarker = 0x8,
94*0b57cec5SDimitry Andric   NormalEHLifetimeMarker = LifetimeMarker | NormalAndEHCleanup,
95*0b57cec5SDimitry Andric };
96*0b57cec5SDimitry Andric 
97*0b57cec5SDimitry Andric /// A stack of scopes which respond to exceptions, including cleanups
98*0b57cec5SDimitry Andric /// and catch blocks.
99*0b57cec5SDimitry Andric class EHScopeStack {
100*0b57cec5SDimitry Andric public:
101*0b57cec5SDimitry Andric   /* Should switch to alignof(uint64_t) instead of 8, when EHCleanupScope can */
102*0b57cec5SDimitry Andric   enum { ScopeStackAlignment = 8 };
103*0b57cec5SDimitry Andric 
104*0b57cec5SDimitry Andric   /// A saved depth on the scope stack.  This is necessary because
105*0b57cec5SDimitry Andric   /// pushing scopes onto the stack invalidates iterators.
106*0b57cec5SDimitry Andric   class stable_iterator {
107*0b57cec5SDimitry Andric     friend class EHScopeStack;
108*0b57cec5SDimitry Andric 
109*0b57cec5SDimitry Andric     /// Offset from StartOfData to EndOfBuffer.
110*0b57cec5SDimitry Andric     ptrdiff_t Size;
111*0b57cec5SDimitry Andric 
112*0b57cec5SDimitry Andric     stable_iterator(ptrdiff_t Size) : Size(Size) {}
113*0b57cec5SDimitry Andric 
114*0b57cec5SDimitry Andric   public:
115*0b57cec5SDimitry Andric     static stable_iterator invalid() { return stable_iterator(-1); }
116*0b57cec5SDimitry Andric     stable_iterator() : Size(-1) {}
117*0b57cec5SDimitry Andric 
118*0b57cec5SDimitry Andric     bool isValid() const { return Size >= 0; }
119*0b57cec5SDimitry Andric 
120*0b57cec5SDimitry Andric     /// Returns true if this scope encloses I.
121*0b57cec5SDimitry Andric     /// Returns false if I is invalid.
122*0b57cec5SDimitry Andric     /// This scope must be valid.
123*0b57cec5SDimitry Andric     bool encloses(stable_iterator I) const { return Size <= I.Size; }
124*0b57cec5SDimitry Andric 
125*0b57cec5SDimitry Andric     /// Returns true if this scope strictly encloses I: that is,
126*0b57cec5SDimitry Andric     /// if it encloses I and is not I.
127*0b57cec5SDimitry Andric     /// Returns false is I is invalid.
128*0b57cec5SDimitry Andric     /// This scope must be valid.
129*0b57cec5SDimitry Andric     bool strictlyEncloses(stable_iterator I) const { return Size < I.Size; }
130*0b57cec5SDimitry Andric 
131*0b57cec5SDimitry Andric     friend bool operator==(stable_iterator A, stable_iterator B) {
132*0b57cec5SDimitry Andric       return A.Size == B.Size;
133*0b57cec5SDimitry Andric     }
134*0b57cec5SDimitry Andric     friend bool operator!=(stable_iterator A, stable_iterator B) {
135*0b57cec5SDimitry Andric       return A.Size != B.Size;
136*0b57cec5SDimitry Andric     }
137*0b57cec5SDimitry Andric   };
138*0b57cec5SDimitry Andric 
139*0b57cec5SDimitry Andric   /// Information for lazily generating a cleanup.  Subclasses must be
140*0b57cec5SDimitry Andric   /// POD-like: cleanups will not be destructed, and they will be
141*0b57cec5SDimitry Andric   /// allocated on the cleanup stack and freely copied and moved
142*0b57cec5SDimitry Andric   /// around.
143*0b57cec5SDimitry Andric   ///
144*0b57cec5SDimitry Andric   /// Cleanup implementations should generally be declared in an
145*0b57cec5SDimitry Andric   /// anonymous namespace.
146*0b57cec5SDimitry Andric   class Cleanup {
147*0b57cec5SDimitry Andric     // Anchor the construction vtable.
148*0b57cec5SDimitry Andric     virtual void anchor();
149*0b57cec5SDimitry Andric 
150*0b57cec5SDimitry Andric   protected:
151*0b57cec5SDimitry Andric     ~Cleanup() = default;
152*0b57cec5SDimitry Andric 
153*0b57cec5SDimitry Andric   public:
154*0b57cec5SDimitry Andric     Cleanup(const Cleanup &) = default;
155*0b57cec5SDimitry Andric     Cleanup(Cleanup &&) {}
156*0b57cec5SDimitry Andric     Cleanup() = default;
157*0b57cec5SDimitry Andric 
158*0b57cec5SDimitry Andric     /// Generation flags.
159*0b57cec5SDimitry Andric     class Flags {
160*0b57cec5SDimitry Andric       enum {
161*0b57cec5SDimitry Andric         F_IsForEH             = 0x1,
162*0b57cec5SDimitry Andric         F_IsNormalCleanupKind = 0x2,
163*0b57cec5SDimitry Andric         F_IsEHCleanupKind     = 0x4
164*0b57cec5SDimitry Andric       };
165*0b57cec5SDimitry Andric       unsigned flags;
166*0b57cec5SDimitry Andric 
167*0b57cec5SDimitry Andric     public:
168*0b57cec5SDimitry Andric       Flags() : flags(0) {}
169*0b57cec5SDimitry Andric 
170*0b57cec5SDimitry Andric       /// isForEH - true if the current emission is for an EH cleanup.
171*0b57cec5SDimitry Andric       bool isForEHCleanup() const { return flags & F_IsForEH; }
172*0b57cec5SDimitry Andric       bool isForNormalCleanup() const { return !isForEHCleanup(); }
173*0b57cec5SDimitry Andric       void setIsForEHCleanup() { flags |= F_IsForEH; }
174*0b57cec5SDimitry Andric 
175*0b57cec5SDimitry Andric       bool isNormalCleanupKind() const { return flags & F_IsNormalCleanupKind; }
176*0b57cec5SDimitry Andric       void setIsNormalCleanupKind() { flags |= F_IsNormalCleanupKind; }
177*0b57cec5SDimitry Andric 
178*0b57cec5SDimitry Andric       /// isEHCleanupKind - true if the cleanup was pushed as an EH
179*0b57cec5SDimitry Andric       /// cleanup.
180*0b57cec5SDimitry Andric       bool isEHCleanupKind() const { return flags & F_IsEHCleanupKind; }
181*0b57cec5SDimitry Andric       void setIsEHCleanupKind() { flags |= F_IsEHCleanupKind; }
182*0b57cec5SDimitry Andric     };
183*0b57cec5SDimitry Andric 
184*0b57cec5SDimitry Andric 
185*0b57cec5SDimitry Andric     /// Emit the cleanup.  For normal cleanups, this is run in the
186*0b57cec5SDimitry Andric     /// same EH context as when the cleanup was pushed, i.e. the
187*0b57cec5SDimitry Andric     /// immediately-enclosing context of the cleanup scope.  For
188*0b57cec5SDimitry Andric     /// EH cleanups, this is run in a terminate context.
189*0b57cec5SDimitry Andric     ///
190*0b57cec5SDimitry Andric     // \param flags cleanup kind.
191*0b57cec5SDimitry Andric     virtual void Emit(CodeGenFunction &CGF, Flags flags) = 0;
192*0b57cec5SDimitry Andric   };
193*0b57cec5SDimitry Andric 
194*0b57cec5SDimitry Andric   /// ConditionalCleanup stores the saved form of its parameters,
195*0b57cec5SDimitry Andric   /// then restores them and performs the cleanup.
196*0b57cec5SDimitry Andric   template <class T, class... As>
197*0b57cec5SDimitry Andric   class ConditionalCleanup final : public Cleanup {
198*0b57cec5SDimitry Andric     typedef std::tuple<typename DominatingValue<As>::saved_type...> SavedTuple;
199*0b57cec5SDimitry Andric     SavedTuple Saved;
200*0b57cec5SDimitry Andric 
201*0b57cec5SDimitry Andric     template <std::size_t... Is>
202*0b57cec5SDimitry Andric     T restore(CodeGenFunction &CGF, llvm::index_sequence<Is...>) {
203*0b57cec5SDimitry Andric       // It's important that the restores are emitted in order. The braced init
204*0b57cec5SDimitry Andric       // list guarantees that.
205*0b57cec5SDimitry Andric       return T{DominatingValue<As>::restore(CGF, std::get<Is>(Saved))...};
206*0b57cec5SDimitry Andric     }
207*0b57cec5SDimitry Andric 
208*0b57cec5SDimitry Andric     void Emit(CodeGenFunction &CGF, Flags flags) override {
209*0b57cec5SDimitry Andric       restore(CGF, llvm::index_sequence_for<As...>()).Emit(CGF, flags);
210*0b57cec5SDimitry Andric     }
211*0b57cec5SDimitry Andric 
212*0b57cec5SDimitry Andric   public:
213*0b57cec5SDimitry Andric     ConditionalCleanup(typename DominatingValue<As>::saved_type... A)
214*0b57cec5SDimitry Andric         : Saved(A...) {}
215*0b57cec5SDimitry Andric 
216*0b57cec5SDimitry Andric     ConditionalCleanup(SavedTuple Tuple) : Saved(std::move(Tuple)) {}
217*0b57cec5SDimitry Andric   };
218*0b57cec5SDimitry Andric 
219*0b57cec5SDimitry Andric private:
220*0b57cec5SDimitry Andric   // The implementation for this class is in CGException.h and
221*0b57cec5SDimitry Andric   // CGException.cpp; the definition is here because it's used as a
222*0b57cec5SDimitry Andric   // member of CodeGenFunction.
223*0b57cec5SDimitry Andric 
224*0b57cec5SDimitry Andric   /// The start of the scope-stack buffer, i.e. the allocated pointer
225*0b57cec5SDimitry Andric   /// for the buffer.  All of these pointers are either simultaneously
226*0b57cec5SDimitry Andric   /// null or simultaneously valid.
227*0b57cec5SDimitry Andric   char *StartOfBuffer;
228*0b57cec5SDimitry Andric 
229*0b57cec5SDimitry Andric   /// The end of the buffer.
230*0b57cec5SDimitry Andric   char *EndOfBuffer;
231*0b57cec5SDimitry Andric 
232*0b57cec5SDimitry Andric   /// The first valid entry in the buffer.
233*0b57cec5SDimitry Andric   char *StartOfData;
234*0b57cec5SDimitry Andric 
235*0b57cec5SDimitry Andric   /// The innermost normal cleanup on the stack.
236*0b57cec5SDimitry Andric   stable_iterator InnermostNormalCleanup;
237*0b57cec5SDimitry Andric 
238*0b57cec5SDimitry Andric   /// The innermost EH scope on the stack.
239*0b57cec5SDimitry Andric   stable_iterator InnermostEHScope;
240*0b57cec5SDimitry Andric 
241*0b57cec5SDimitry Andric   /// The current set of branch fixups.  A branch fixup is a jump to
242*0b57cec5SDimitry Andric   /// an as-yet unemitted label, i.e. a label for which we don't yet
243*0b57cec5SDimitry Andric   /// know the EH stack depth.  Whenever we pop a cleanup, we have
244*0b57cec5SDimitry Andric   /// to thread all the current branch fixups through it.
245*0b57cec5SDimitry Andric   ///
246*0b57cec5SDimitry Andric   /// Fixups are recorded as the Use of the respective branch or
247*0b57cec5SDimitry Andric   /// switch statement.  The use points to the final destination.
248*0b57cec5SDimitry Andric   /// When popping out of a cleanup, these uses are threaded through
249*0b57cec5SDimitry Andric   /// the cleanup and adjusted to point to the new cleanup.
250*0b57cec5SDimitry Andric   ///
251*0b57cec5SDimitry Andric   /// Note that branches are allowed to jump into protected scopes
252*0b57cec5SDimitry Andric   /// in certain situations;  e.g. the following code is legal:
253*0b57cec5SDimitry Andric   ///     struct A { ~A(); }; // trivial ctor, non-trivial dtor
254*0b57cec5SDimitry Andric   ///     goto foo;
255*0b57cec5SDimitry Andric   ///     A a;
256*0b57cec5SDimitry Andric   ///    foo:
257*0b57cec5SDimitry Andric   ///     bar();
258*0b57cec5SDimitry Andric   SmallVector<BranchFixup, 8> BranchFixups;
259*0b57cec5SDimitry Andric 
260*0b57cec5SDimitry Andric   char *allocate(size_t Size);
261*0b57cec5SDimitry Andric   void deallocate(size_t Size);
262*0b57cec5SDimitry Andric 
263*0b57cec5SDimitry Andric   void *pushCleanup(CleanupKind K, size_t DataSize);
264*0b57cec5SDimitry Andric 
265*0b57cec5SDimitry Andric public:
266*0b57cec5SDimitry Andric   EHScopeStack() : StartOfBuffer(nullptr), EndOfBuffer(nullptr),
267*0b57cec5SDimitry Andric                    StartOfData(nullptr), InnermostNormalCleanup(stable_end()),
268*0b57cec5SDimitry Andric                    InnermostEHScope(stable_end()) {}
269*0b57cec5SDimitry Andric   ~EHScopeStack() { delete[] StartOfBuffer; }
270*0b57cec5SDimitry Andric 
271*0b57cec5SDimitry Andric   /// Push a lazily-created cleanup on the stack.
272*0b57cec5SDimitry Andric   template <class T, class... As> void pushCleanup(CleanupKind Kind, As... A) {
273*0b57cec5SDimitry Andric     static_assert(alignof(T) <= ScopeStackAlignment,
274*0b57cec5SDimitry Andric                   "Cleanup's alignment is too large.");
275*0b57cec5SDimitry Andric     void *Buffer = pushCleanup(Kind, sizeof(T));
276*0b57cec5SDimitry Andric     Cleanup *Obj = new (Buffer) T(A...);
277*0b57cec5SDimitry Andric     (void) Obj;
278*0b57cec5SDimitry Andric   }
279*0b57cec5SDimitry Andric 
280*0b57cec5SDimitry Andric   /// Push a lazily-created cleanup on the stack. Tuple version.
281*0b57cec5SDimitry Andric   template <class T, class... As>
282*0b57cec5SDimitry Andric   void pushCleanupTuple(CleanupKind Kind, std::tuple<As...> A) {
283*0b57cec5SDimitry Andric     static_assert(alignof(T) <= ScopeStackAlignment,
284*0b57cec5SDimitry Andric                   "Cleanup's alignment is too large.");
285*0b57cec5SDimitry Andric     void *Buffer = pushCleanup(Kind, sizeof(T));
286*0b57cec5SDimitry Andric     Cleanup *Obj = new (Buffer) T(std::move(A));
287*0b57cec5SDimitry Andric     (void) Obj;
288*0b57cec5SDimitry Andric   }
289*0b57cec5SDimitry Andric 
290*0b57cec5SDimitry Andric   // Feel free to add more variants of the following:
291*0b57cec5SDimitry Andric 
292*0b57cec5SDimitry Andric   /// Push a cleanup with non-constant storage requirements on the
293*0b57cec5SDimitry Andric   /// stack.  The cleanup type must provide an additional static method:
294*0b57cec5SDimitry Andric   ///   static size_t getExtraSize(size_t);
295*0b57cec5SDimitry Andric   /// The argument to this method will be the value N, which will also
296*0b57cec5SDimitry Andric   /// be passed as the first argument to the constructor.
297*0b57cec5SDimitry Andric   ///
298*0b57cec5SDimitry Andric   /// The data stored in the extra storage must obey the same
299*0b57cec5SDimitry Andric   /// restrictions as normal cleanup member data.
300*0b57cec5SDimitry Andric   ///
301*0b57cec5SDimitry Andric   /// The pointer returned from this method is valid until the cleanup
302*0b57cec5SDimitry Andric   /// stack is modified.
303*0b57cec5SDimitry Andric   template <class T, class... As>
304*0b57cec5SDimitry Andric   T *pushCleanupWithExtra(CleanupKind Kind, size_t N, As... A) {
305*0b57cec5SDimitry Andric     static_assert(alignof(T) <= ScopeStackAlignment,
306*0b57cec5SDimitry Andric                   "Cleanup's alignment is too large.");
307*0b57cec5SDimitry Andric     void *Buffer = pushCleanup(Kind, sizeof(T) + T::getExtraSize(N));
308*0b57cec5SDimitry Andric     return new (Buffer) T(N, A...);
309*0b57cec5SDimitry Andric   }
310*0b57cec5SDimitry Andric 
311*0b57cec5SDimitry Andric   void pushCopyOfCleanup(CleanupKind Kind, const void *Cleanup, size_t Size) {
312*0b57cec5SDimitry Andric     void *Buffer = pushCleanup(Kind, Size);
313*0b57cec5SDimitry Andric     std::memcpy(Buffer, Cleanup, Size);
314*0b57cec5SDimitry Andric   }
315*0b57cec5SDimitry Andric 
316*0b57cec5SDimitry Andric   /// Pops a cleanup scope off the stack.  This is private to CGCleanup.cpp.
317*0b57cec5SDimitry Andric   void popCleanup();
318*0b57cec5SDimitry Andric 
319*0b57cec5SDimitry Andric   /// Push a set of catch handlers on the stack.  The catch is
320*0b57cec5SDimitry Andric   /// uninitialized and will need to have the given number of handlers
321*0b57cec5SDimitry Andric   /// set on it.
322*0b57cec5SDimitry Andric   class EHCatchScope *pushCatch(unsigned NumHandlers);
323*0b57cec5SDimitry Andric 
324*0b57cec5SDimitry Andric   /// Pops a catch scope off the stack.  This is private to CGException.cpp.
325*0b57cec5SDimitry Andric   void popCatch();
326*0b57cec5SDimitry Andric 
327*0b57cec5SDimitry Andric   /// Push an exceptions filter on the stack.
328*0b57cec5SDimitry Andric   class EHFilterScope *pushFilter(unsigned NumFilters);
329*0b57cec5SDimitry Andric 
330*0b57cec5SDimitry Andric   /// Pops an exceptions filter off the stack.
331*0b57cec5SDimitry Andric   void popFilter();
332*0b57cec5SDimitry Andric 
333*0b57cec5SDimitry Andric   /// Push a terminate handler on the stack.
334*0b57cec5SDimitry Andric   void pushTerminate();
335*0b57cec5SDimitry Andric 
336*0b57cec5SDimitry Andric   /// Pops a terminate handler off the stack.
337*0b57cec5SDimitry Andric   void popTerminate();
338*0b57cec5SDimitry Andric 
339*0b57cec5SDimitry Andric   // Returns true iff the current scope is either empty or contains only
340*0b57cec5SDimitry Andric   // lifetime markers, i.e. no real cleanup code
341*0b57cec5SDimitry Andric   bool containsOnlyLifetimeMarkers(stable_iterator Old) const;
342*0b57cec5SDimitry Andric 
343*0b57cec5SDimitry Andric   /// Determines whether the exception-scopes stack is empty.
344*0b57cec5SDimitry Andric   bool empty() const { return StartOfData == EndOfBuffer; }
345*0b57cec5SDimitry Andric 
346*0b57cec5SDimitry Andric   bool requiresLandingPad() const;
347*0b57cec5SDimitry Andric 
348*0b57cec5SDimitry Andric   /// Determines whether there are any normal cleanups on the stack.
349*0b57cec5SDimitry Andric   bool hasNormalCleanups() const {
350*0b57cec5SDimitry Andric     return InnermostNormalCleanup != stable_end();
351*0b57cec5SDimitry Andric   }
352*0b57cec5SDimitry Andric 
353*0b57cec5SDimitry Andric   /// Returns the innermost normal cleanup on the stack, or
354*0b57cec5SDimitry Andric   /// stable_end() if there are no normal cleanups.
355*0b57cec5SDimitry Andric   stable_iterator getInnermostNormalCleanup() const {
356*0b57cec5SDimitry Andric     return InnermostNormalCleanup;
357*0b57cec5SDimitry Andric   }
358*0b57cec5SDimitry Andric   stable_iterator getInnermostActiveNormalCleanup() const;
359*0b57cec5SDimitry Andric 
360*0b57cec5SDimitry Andric   stable_iterator getInnermostEHScope() const {
361*0b57cec5SDimitry Andric     return InnermostEHScope;
362*0b57cec5SDimitry Andric   }
363*0b57cec5SDimitry Andric 
364*0b57cec5SDimitry Andric 
365*0b57cec5SDimitry Andric   /// An unstable reference to a scope-stack depth.  Invalidated by
366*0b57cec5SDimitry Andric   /// pushes but not pops.
367*0b57cec5SDimitry Andric   class iterator;
368*0b57cec5SDimitry Andric 
369*0b57cec5SDimitry Andric   /// Returns an iterator pointing to the innermost EH scope.
370*0b57cec5SDimitry Andric   iterator begin() const;
371*0b57cec5SDimitry Andric 
372*0b57cec5SDimitry Andric   /// Returns an iterator pointing to the outermost EH scope.
373*0b57cec5SDimitry Andric   iterator end() const;
374*0b57cec5SDimitry Andric 
375*0b57cec5SDimitry Andric   /// Create a stable reference to the top of the EH stack.  The
376*0b57cec5SDimitry Andric   /// returned reference is valid until that scope is popped off the
377*0b57cec5SDimitry Andric   /// stack.
378*0b57cec5SDimitry Andric   stable_iterator stable_begin() const {
379*0b57cec5SDimitry Andric     return stable_iterator(EndOfBuffer - StartOfData);
380*0b57cec5SDimitry Andric   }
381*0b57cec5SDimitry Andric 
382*0b57cec5SDimitry Andric   /// Create a stable reference to the bottom of the EH stack.
383*0b57cec5SDimitry Andric   static stable_iterator stable_end() {
384*0b57cec5SDimitry Andric     return stable_iterator(0);
385*0b57cec5SDimitry Andric   }
386*0b57cec5SDimitry Andric 
387*0b57cec5SDimitry Andric   /// Translates an iterator into a stable_iterator.
388*0b57cec5SDimitry Andric   stable_iterator stabilize(iterator it) const;
389*0b57cec5SDimitry Andric 
390*0b57cec5SDimitry Andric   /// Turn a stable reference to a scope depth into a unstable pointer
391*0b57cec5SDimitry Andric   /// to the EH stack.
392*0b57cec5SDimitry Andric   iterator find(stable_iterator save) const;
393*0b57cec5SDimitry Andric 
394*0b57cec5SDimitry Andric   /// Add a branch fixup to the current cleanup scope.
395*0b57cec5SDimitry Andric   BranchFixup &addBranchFixup() {
396*0b57cec5SDimitry Andric     assert(hasNormalCleanups() && "adding fixup in scope without cleanups");
397*0b57cec5SDimitry Andric     BranchFixups.push_back(BranchFixup());
398*0b57cec5SDimitry Andric     return BranchFixups.back();
399*0b57cec5SDimitry Andric   }
400*0b57cec5SDimitry Andric 
401*0b57cec5SDimitry Andric   unsigned getNumBranchFixups() const { return BranchFixups.size(); }
402*0b57cec5SDimitry Andric   BranchFixup &getBranchFixup(unsigned I) {
403*0b57cec5SDimitry Andric     assert(I < getNumBranchFixups());
404*0b57cec5SDimitry Andric     return BranchFixups[I];
405*0b57cec5SDimitry Andric   }
406*0b57cec5SDimitry Andric 
407*0b57cec5SDimitry Andric   /// Pops lazily-removed fixups from the end of the list.  This
408*0b57cec5SDimitry Andric   /// should only be called by procedures which have just popped a
409*0b57cec5SDimitry Andric   /// cleanup or resolved one or more fixups.
410*0b57cec5SDimitry Andric   void popNullFixups();
411*0b57cec5SDimitry Andric 
412*0b57cec5SDimitry Andric   /// Clears the branch-fixups list.  This should only be called by
413*0b57cec5SDimitry Andric   /// ResolveAllBranchFixups.
414*0b57cec5SDimitry Andric   void clearFixups() { BranchFixups.clear(); }
415*0b57cec5SDimitry Andric };
416*0b57cec5SDimitry Andric 
417*0b57cec5SDimitry Andric } // namespace CodeGen
418*0b57cec5SDimitry Andric } // namespace clang
419*0b57cec5SDimitry Andric 
420*0b57cec5SDimitry Andric #endif
421