1 //===-- EHScopeStack.h - Stack for cleanup IR generation --------*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // These classes should be the minimum interface required for other parts of 10 // CodeGen to emit cleanups. The implementation is in CGCleanup.cpp and other 11 // implemenentation details that are not widely needed are in CGCleanup.h. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #ifndef LLVM_CLANG_LIB_CODEGEN_EHSCOPESTACK_H 16 #define LLVM_CLANG_LIB_CODEGEN_EHSCOPESTACK_H 17 18 #include "clang/Basic/LLVM.h" 19 #include "llvm/ADT/STLExtras.h" 20 #include "llvm/ADT/SmallVector.h" 21 #include "llvm/IR/BasicBlock.h" 22 #include "llvm/IR/Instructions.h" 23 #include "llvm/IR/Value.h" 24 25 namespace clang { 26 namespace CodeGen { 27 28 class CodeGenFunction; 29 30 /// A branch fixup. These are required when emitting a goto to a 31 /// label which hasn't been emitted yet. The goto is optimistically 32 /// emitted as a branch to the basic block for the label, and (if it 33 /// occurs in a scope with non-trivial cleanups) a fixup is added to 34 /// the innermost cleanup. When a (normal) cleanup is popped, any 35 /// unresolved fixups in that scope are threaded through the cleanup. 36 struct BranchFixup { 37 /// The block containing the terminator which needs to be modified 38 /// into a switch if this fixup is resolved into the current scope. 39 /// If null, LatestBranch points directly to the destination. 40 llvm::BasicBlock *OptimisticBranchBlock; 41 42 /// The ultimate destination of the branch. 43 /// 44 /// This can be set to null to indicate that this fixup was 45 /// successfully resolved. 46 llvm::BasicBlock *Destination; 47 48 /// The destination index value. 49 unsigned DestinationIndex; 50 51 /// The initial branch of the fixup. 52 llvm::BranchInst *InitialBranch; 53 }; 54 55 template <class T> struct InvariantValue { 56 typedef T type; 57 typedef T saved_type; 58 static bool needsSaving(type value) { return false; } 59 static saved_type save(CodeGenFunction &CGF, type value) { return value; } 60 static type restore(CodeGenFunction &CGF, saved_type value) { return value; } 61 }; 62 63 /// A metaprogramming class for ensuring that a value will dominate an 64 /// arbitrary position in a function. 65 template <class T> struct DominatingValue : InvariantValue<T> {}; 66 67 template <class T, bool mightBeInstruction = 68 std::is_base_of<llvm::Value, T>::value && 69 !std::is_base_of<llvm::Constant, T>::value && 70 !std::is_base_of<llvm::BasicBlock, T>::value> 71 struct DominatingPointer; 72 template <class T> struct DominatingPointer<T,false> : InvariantValue<T*> {}; 73 // template <class T> struct DominatingPointer<T,true> at end of file 74 75 template <class T> struct DominatingValue<T*> : DominatingPointer<T> {}; 76 77 enum CleanupKind : unsigned { 78 /// Denotes a cleanup that should run when a scope is exited using exceptional 79 /// control flow (a throw statement leading to stack unwinding, ). 80 EHCleanup = 0x1, 81 82 /// Denotes a cleanup that should run when a scope is exited using normal 83 /// control flow (falling off the end of the scope, return, goto, ...). 84 NormalCleanup = 0x2, 85 86 NormalAndEHCleanup = EHCleanup | NormalCleanup, 87 88 LifetimeMarker = 0x8, 89 NormalEHLifetimeMarker = LifetimeMarker | NormalAndEHCleanup, 90 91 // FakeUse needs to be recognized as a special cleanup similar to lifetime 92 // markers chiefly to be ignored in most contexts. 93 FakeUse = 0x10, 94 NormalFakeUse = FakeUse | NormalCleanup, 95 }; 96 97 /// A stack of scopes which respond to exceptions, including cleanups 98 /// and catch blocks. 99 class EHScopeStack { 100 public: 101 /* Should switch to alignof(uint64_t) instead of 8, when EHCleanupScope can */ 102 enum { ScopeStackAlignment = 8 }; 103 104 /// A saved depth on the scope stack. This is necessary because 105 /// pushing scopes onto the stack invalidates iterators. 106 class stable_iterator { 107 friend class EHScopeStack; 108 109 /// Offset from StartOfData to EndOfBuffer. 110 ptrdiff_t Size; 111 112 stable_iterator(ptrdiff_t Size) : Size(Size) {} 113 114 public: 115 static stable_iterator invalid() { return stable_iterator(-1); } 116 stable_iterator() : Size(-1) {} 117 118 bool isValid() const { return Size >= 0; } 119 120 /// Returns true if this scope encloses I. 121 /// Returns false if I is invalid. 122 /// This scope must be valid. 123 bool encloses(stable_iterator I) const { return Size <= I.Size; } 124 125 /// Returns true if this scope strictly encloses I: that is, 126 /// if it encloses I and is not I. 127 /// Returns false is I is invalid. 128 /// This scope must be valid. 129 bool strictlyEncloses(stable_iterator I) const { return Size < I.Size; } 130 131 friend bool operator==(stable_iterator A, stable_iterator B) { 132 return A.Size == B.Size; 133 } 134 friend bool operator!=(stable_iterator A, stable_iterator B) { 135 return A.Size != B.Size; 136 } 137 }; 138 139 /// Information for lazily generating a cleanup. Subclasses must be 140 /// POD-like: cleanups will not be destructed, and they will be 141 /// allocated on the cleanup stack and freely copied and moved 142 /// around. 143 /// 144 /// Cleanup implementations should generally be declared in an 145 /// anonymous namespace. 146 class Cleanup { 147 // Anchor the construction vtable. 148 virtual void anchor(); 149 150 protected: 151 ~Cleanup() = default; 152 153 public: 154 Cleanup(const Cleanup &) = default; 155 Cleanup(Cleanup &&) {} 156 157 // The copy and move assignment operator is defined as deleted pending 158 // further motivation. 159 Cleanup &operator=(const Cleanup &) = delete; 160 Cleanup &operator=(Cleanup &&) = delete; 161 162 Cleanup() = default; 163 164 virtual bool isRedundantBeforeReturn() { return false; } 165 166 /// Generation flags. 167 class Flags { 168 enum { 169 F_IsForEH = 0x1, 170 F_IsNormalCleanupKind = 0x2, 171 F_IsEHCleanupKind = 0x4, 172 F_HasExitSwitch = 0x8, 173 }; 174 unsigned flags = 0; 175 176 public: 177 Flags() = default; 178 179 /// isForEH - true if the current emission is for an EH cleanup. 180 bool isForEHCleanup() const { return flags & F_IsForEH; } 181 bool isForNormalCleanup() const { return !isForEHCleanup(); } 182 void setIsForEHCleanup() { flags |= F_IsForEH; } 183 184 bool isNormalCleanupKind() const { return flags & F_IsNormalCleanupKind; } 185 void setIsNormalCleanupKind() { flags |= F_IsNormalCleanupKind; } 186 187 /// isEHCleanupKind - true if the cleanup was pushed as an EH 188 /// cleanup. 189 bool isEHCleanupKind() const { return flags & F_IsEHCleanupKind; } 190 void setIsEHCleanupKind() { flags |= F_IsEHCleanupKind; } 191 192 bool hasExitSwitch() const { return flags & F_HasExitSwitch; } 193 void setHasExitSwitch() { flags |= F_HasExitSwitch; } 194 }; 195 196 /// Emit the cleanup. For normal cleanups, this is run in the 197 /// same EH context as when the cleanup was pushed, i.e. the 198 /// immediately-enclosing context of the cleanup scope. For 199 /// EH cleanups, this is run in a terminate context. 200 /// 201 // \param flags cleanup kind. 202 virtual void Emit(CodeGenFunction &CGF, Flags flags) = 0; 203 }; 204 205 /// ConditionalCleanup stores the saved form of its parameters, 206 /// then restores them and performs the cleanup. 207 template <class T, class... As> 208 class ConditionalCleanup final : public Cleanup { 209 typedef std::tuple<typename DominatingValue<As>::saved_type...> SavedTuple; 210 SavedTuple Saved; 211 212 template <std::size_t... Is> 213 T restore(CodeGenFunction &CGF, std::index_sequence<Is...>) { 214 // It's important that the restores are emitted in order. The braced init 215 // list guarantees that. 216 return T{DominatingValue<As>::restore(CGF, std::get<Is>(Saved))...}; 217 } 218 219 void Emit(CodeGenFunction &CGF, Flags flags) override { 220 restore(CGF, std::index_sequence_for<As...>()).Emit(CGF, flags); 221 } 222 223 public: 224 ConditionalCleanup(typename DominatingValue<As>::saved_type... A) 225 : Saved(A...) {} 226 227 ConditionalCleanup(SavedTuple Tuple) : Saved(std::move(Tuple)) {} 228 }; 229 230 private: 231 // The implementation for this class is in CGException.h and 232 // CGException.cpp; the definition is here because it's used as a 233 // member of CodeGenFunction. 234 235 /// The start of the scope-stack buffer, i.e. the allocated pointer 236 /// for the buffer. All of these pointers are either simultaneously 237 /// null or simultaneously valid. 238 char *StartOfBuffer; 239 240 /// The end of the buffer. 241 char *EndOfBuffer; 242 243 /// The first valid entry in the buffer. 244 char *StartOfData; 245 246 /// The innermost normal cleanup on the stack. 247 stable_iterator InnermostNormalCleanup; 248 249 /// The innermost EH scope on the stack. 250 stable_iterator InnermostEHScope; 251 252 /// The CGF this Stack belong to 253 CodeGenFunction* CGF; 254 255 /// The current set of branch fixups. A branch fixup is a jump to 256 /// an as-yet unemitted label, i.e. a label for which we don't yet 257 /// know the EH stack depth. Whenever we pop a cleanup, we have 258 /// to thread all the current branch fixups through it. 259 /// 260 /// Fixups are recorded as the Use of the respective branch or 261 /// switch statement. The use points to the final destination. 262 /// When popping out of a cleanup, these uses are threaded through 263 /// the cleanup and adjusted to point to the new cleanup. 264 /// 265 /// Note that branches are allowed to jump into protected scopes 266 /// in certain situations; e.g. the following code is legal: 267 /// struct A { ~A(); }; // trivial ctor, non-trivial dtor 268 /// goto foo; 269 /// A a; 270 /// foo: 271 /// bar(); 272 SmallVector<BranchFixup, 8> BranchFixups; 273 274 char *allocate(size_t Size); 275 void deallocate(size_t Size); 276 277 void *pushCleanup(CleanupKind K, size_t DataSize); 278 279 public: 280 EHScopeStack() 281 : StartOfBuffer(nullptr), EndOfBuffer(nullptr), StartOfData(nullptr), 282 InnermostNormalCleanup(stable_end()), InnermostEHScope(stable_end()), 283 CGF(nullptr) {} 284 ~EHScopeStack() { delete[] StartOfBuffer; } 285 286 EHScopeStack(const EHScopeStack &) = delete; 287 EHScopeStack &operator=(const EHScopeStack &) = delete; 288 289 /// Push a lazily-created cleanup on the stack. 290 template <class T, class... As> void pushCleanup(CleanupKind Kind, As... A) { 291 static_assert(alignof(T) <= ScopeStackAlignment, 292 "Cleanup's alignment is too large."); 293 void *Buffer = pushCleanup(Kind, sizeof(T)); 294 Cleanup *Obj = new (Buffer) T(A...); 295 (void) Obj; 296 } 297 298 /// Push a lazily-created cleanup on the stack. Tuple version. 299 template <class T, class... As> 300 void pushCleanupTuple(CleanupKind Kind, std::tuple<As...> A) { 301 static_assert(alignof(T) <= ScopeStackAlignment, 302 "Cleanup's alignment is too large."); 303 void *Buffer = pushCleanup(Kind, sizeof(T)); 304 Cleanup *Obj = new (Buffer) T(std::move(A)); 305 (void) Obj; 306 } 307 308 // Feel free to add more variants of the following: 309 310 /// Push a cleanup with non-constant storage requirements on the 311 /// stack. The cleanup type must provide an additional static method: 312 /// static size_t getExtraSize(size_t); 313 /// The argument to this method will be the value N, which will also 314 /// be passed as the first argument to the constructor. 315 /// 316 /// The data stored in the extra storage must obey the same 317 /// restrictions as normal cleanup member data. 318 /// 319 /// The pointer returned from this method is valid until the cleanup 320 /// stack is modified. 321 template <class T, class... As> 322 T *pushCleanupWithExtra(CleanupKind Kind, size_t N, As... A) { 323 static_assert(alignof(T) <= ScopeStackAlignment, 324 "Cleanup's alignment is too large."); 325 void *Buffer = pushCleanup(Kind, sizeof(T) + T::getExtraSize(N)); 326 return new (Buffer) T(N, A...); 327 } 328 329 void pushCopyOfCleanup(CleanupKind Kind, const void *Cleanup, size_t Size) { 330 void *Buffer = pushCleanup(Kind, Size); 331 std::memcpy(Buffer, Cleanup, Size); 332 } 333 334 void setCGF(CodeGenFunction *inCGF) { CGF = inCGF; } 335 336 /// Pops a cleanup scope off the stack. This is private to CGCleanup.cpp. 337 void popCleanup(); 338 339 /// Push a set of catch handlers on the stack. The catch is 340 /// uninitialized and will need to have the given number of handlers 341 /// set on it. 342 class EHCatchScope *pushCatch(unsigned NumHandlers); 343 344 /// Pops a catch scope off the stack. This is private to CGException.cpp. 345 void popCatch(); 346 347 /// Push an exceptions filter on the stack. 348 class EHFilterScope *pushFilter(unsigned NumFilters); 349 350 /// Pops an exceptions filter off the stack. 351 void popFilter(); 352 353 /// Push a terminate handler on the stack. 354 void pushTerminate(); 355 356 /// Pops a terminate handler off the stack. 357 void popTerminate(); 358 359 // Returns true iff the current scope is either empty or contains only 360 // noop cleanups, i.e. lifetime markers and fake uses. 361 bool containsOnlyNoopCleanups(stable_iterator Old) const; 362 363 /// Determines whether the exception-scopes stack is empty. 364 bool empty() const { return StartOfData == EndOfBuffer; } 365 366 bool requiresLandingPad() const; 367 368 /// Determines whether there are any normal cleanups on the stack. 369 bool hasNormalCleanups() const { 370 return InnermostNormalCleanup != stable_end(); 371 } 372 373 /// Returns the innermost normal cleanup on the stack, or 374 /// stable_end() if there are no normal cleanups. 375 stable_iterator getInnermostNormalCleanup() const { 376 return InnermostNormalCleanup; 377 } 378 stable_iterator getInnermostActiveNormalCleanup() const; 379 380 stable_iterator getInnermostEHScope() const { 381 return InnermostEHScope; 382 } 383 384 385 /// An unstable reference to a scope-stack depth. Invalidated by 386 /// pushes but not pops. 387 class iterator; 388 389 /// Returns an iterator pointing to the innermost EH scope. 390 iterator begin() const; 391 392 /// Returns an iterator pointing to the outermost EH scope. 393 iterator end() const; 394 395 /// Create a stable reference to the top of the EH stack. The 396 /// returned reference is valid until that scope is popped off the 397 /// stack. 398 stable_iterator stable_begin() const { 399 return stable_iterator(EndOfBuffer - StartOfData); 400 } 401 402 /// Create a stable reference to the bottom of the EH stack. 403 static stable_iterator stable_end() { 404 return stable_iterator(0); 405 } 406 407 /// Translates an iterator into a stable_iterator. 408 stable_iterator stabilize(iterator it) const; 409 410 /// Turn a stable reference to a scope depth into a unstable pointer 411 /// to the EH stack. 412 iterator find(stable_iterator save) const; 413 414 /// Add a branch fixup to the current cleanup scope. 415 BranchFixup &addBranchFixup() { 416 assert(hasNormalCleanups() && "adding fixup in scope without cleanups"); 417 BranchFixups.push_back(BranchFixup()); 418 return BranchFixups.back(); 419 } 420 421 unsigned getNumBranchFixups() const { return BranchFixups.size(); } 422 BranchFixup &getBranchFixup(unsigned I) { 423 assert(I < getNumBranchFixups()); 424 return BranchFixups[I]; 425 } 426 427 /// Pops lazily-removed fixups from the end of the list. This 428 /// should only be called by procedures which have just popped a 429 /// cleanup or resolved one or more fixups. 430 void popNullFixups(); 431 432 /// Clears the branch-fixups list. This should only be called by 433 /// ResolveAllBranchFixups. 434 void clearFixups() { BranchFixups.clear(); } 435 }; 436 437 } // namespace CodeGen 438 } // namespace clang 439 440 #endif 441