1 //===-- CGCleanup.h - Classes for cleanups IR generation --------*- C++ -*-===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // These classes support the generation of LLVM IR for cleanups. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #ifndef LLVM_CLANG_LIB_CODEGEN_CGCLEANUP_H 15 #define LLVM_CLANG_LIB_CODEGEN_CGCLEANUP_H 16 17 #include "EHScopeStack.h" 18 #include "llvm/ADT/SmallPtrSet.h" 19 #include "llvm/ADT/SmallVector.h" 20 21 namespace llvm { 22 class BasicBlock; 23 class Value; 24 class ConstantInt; 25 class AllocaInst; 26 } 27 28 namespace clang { 29 namespace CodeGen { 30 31 /// A protected scope for zero-cost EH handling. 32 class EHScope { 33 llvm::BasicBlock *CachedLandingPad; 34 llvm::BasicBlock *CachedEHDispatchBlock; 35 36 EHScopeStack::stable_iterator EnclosingEHScope; 37 38 class CommonBitFields { 39 friend class EHScope; 40 unsigned Kind : 2; 41 }; 42 enum { NumCommonBits = 2 }; 43 44 protected: 45 class CatchBitFields { 46 friend class EHCatchScope; 47 unsigned : NumCommonBits; 48 49 unsigned NumHandlers : 32 - NumCommonBits; 50 }; 51 52 class CleanupBitFields { 53 friend class EHCleanupScope; 54 unsigned : NumCommonBits; 55 56 /// Whether this cleanup needs to be run along normal edges. 57 unsigned IsNormalCleanup : 1; 58 59 /// Whether this cleanup needs to be run along exception edges. 60 unsigned IsEHCleanup : 1; 61 62 /// Whether this cleanup is currently active. 63 unsigned IsActive : 1; 64 65 /// Whether the normal cleanup should test the activation flag. 66 unsigned TestFlagInNormalCleanup : 1; 67 68 /// Whether the EH cleanup should test the activation flag. 69 unsigned TestFlagInEHCleanup : 1; 70 71 /// The amount of extra storage needed by the Cleanup. 72 /// Always a multiple of the scope-stack alignment. 73 unsigned CleanupSize : 12; 74 75 /// The number of fixups required by enclosing scopes (not including 76 /// this one). If this is the top cleanup scope, all the fixups 77 /// from this index onwards belong to this scope. 78 unsigned FixupDepth : 32 - 17 - NumCommonBits; // currently 13 79 }; 80 81 class FilterBitFields { 82 friend class EHFilterScope; 83 unsigned : NumCommonBits; 84 85 unsigned NumFilters : 32 - NumCommonBits; 86 }; 87 88 union { 89 CommonBitFields CommonBits; 90 CatchBitFields CatchBits; 91 CleanupBitFields CleanupBits; 92 FilterBitFields FilterBits; 93 }; 94 95 public: 96 enum Kind { Cleanup, Catch, Terminate, Filter }; 97 98 EHScope(Kind kind, EHScopeStack::stable_iterator enclosingEHScope) 99 : CachedLandingPad(nullptr), CachedEHDispatchBlock(nullptr), 100 EnclosingEHScope(enclosingEHScope) { 101 CommonBits.Kind = kind; 102 } 103 104 Kind getKind() const { return static_cast<Kind>(CommonBits.Kind); } 105 106 llvm::BasicBlock *getCachedLandingPad() const { 107 return CachedLandingPad; 108 } 109 110 void setCachedLandingPad(llvm::BasicBlock *block) { 111 CachedLandingPad = block; 112 } 113 114 llvm::BasicBlock *getCachedEHDispatchBlock() const { 115 return CachedEHDispatchBlock; 116 } 117 118 void setCachedEHDispatchBlock(llvm::BasicBlock *block) { 119 CachedEHDispatchBlock = block; 120 } 121 122 bool hasEHBranches() const { 123 if (llvm::BasicBlock *block = getCachedEHDispatchBlock()) 124 return !block->use_empty(); 125 return false; 126 } 127 128 EHScopeStack::stable_iterator getEnclosingEHScope() const { 129 return EnclosingEHScope; 130 } 131 }; 132 133 /// A scope which attempts to handle some, possibly all, types of 134 /// exceptions. 135 /// 136 /// Objective C \@finally blocks are represented using a cleanup scope 137 /// after the catch scope. 138 class EHCatchScope : public EHScope { 139 // In effect, we have a flexible array member 140 // Handler Handlers[0]; 141 // But that's only standard in C99, not C++, so we have to do 142 // annoying pointer arithmetic instead. 143 144 public: 145 struct Handler { 146 /// A type info value, or null (C++ null, not an LLVM null pointer) 147 /// for a catch-all. 148 llvm::Constant *Type; 149 150 /// The catch handler for this type. 151 llvm::BasicBlock *Block; 152 153 bool isCatchAll() const { return Type == nullptr; } 154 }; 155 156 private: 157 friend class EHScopeStack; 158 159 Handler *getHandlers() { 160 return reinterpret_cast<Handler*>(this+1); 161 } 162 163 const Handler *getHandlers() const { 164 return reinterpret_cast<const Handler*>(this+1); 165 } 166 167 public: 168 static size_t getSizeForNumHandlers(unsigned N) { 169 return sizeof(EHCatchScope) + N * sizeof(Handler); 170 } 171 172 EHCatchScope(unsigned numHandlers, 173 EHScopeStack::stable_iterator enclosingEHScope) 174 : EHScope(Catch, enclosingEHScope) { 175 CatchBits.NumHandlers = numHandlers; 176 } 177 178 unsigned getNumHandlers() const { 179 return CatchBits.NumHandlers; 180 } 181 182 void setCatchAllHandler(unsigned I, llvm::BasicBlock *Block) { 183 setHandler(I, /*catchall*/ nullptr, Block); 184 } 185 186 void setHandler(unsigned I, llvm::Constant *Type, llvm::BasicBlock *Block) { 187 assert(I < getNumHandlers()); 188 getHandlers()[I].Type = Type; 189 getHandlers()[I].Block = Block; 190 } 191 192 const Handler &getHandler(unsigned I) const { 193 assert(I < getNumHandlers()); 194 return getHandlers()[I]; 195 } 196 197 // Clear all handler blocks. 198 // FIXME: it's better to always call clearHandlerBlocks in DTOR and have a 199 // 'takeHandler' or some such function which removes ownership from the 200 // EHCatchScope object if the handlers should live longer than EHCatchScope. 201 void clearHandlerBlocks() { 202 for (unsigned I = 0, N = getNumHandlers(); I != N; ++I) 203 delete getHandler(I).Block; 204 } 205 206 typedef const Handler *iterator; 207 iterator begin() const { return getHandlers(); } 208 iterator end() const { return getHandlers() + getNumHandlers(); } 209 210 static bool classof(const EHScope *Scope) { 211 return Scope->getKind() == Catch; 212 } 213 }; 214 215 /// A cleanup scope which generates the cleanup blocks lazily. 216 class EHCleanupScope : public EHScope { 217 /// The nearest normal cleanup scope enclosing this one. 218 EHScopeStack::stable_iterator EnclosingNormal; 219 220 /// The nearest EH scope enclosing this one. 221 EHScopeStack::stable_iterator EnclosingEH; 222 223 /// The dual entry/exit block along the normal edge. This is lazily 224 /// created if needed before the cleanup is popped. 225 llvm::BasicBlock *NormalBlock; 226 227 /// An optional i1 variable indicating whether this cleanup has been 228 /// activated yet. 229 llvm::AllocaInst *ActiveFlag; 230 231 /// Extra information required for cleanups that have resolved 232 /// branches through them. This has to be allocated on the side 233 /// because everything on the cleanup stack has be trivially 234 /// movable. 235 struct ExtInfo { 236 /// The destinations of normal branch-afters and branch-throughs. 237 llvm::SmallPtrSet<llvm::BasicBlock*, 4> Branches; 238 239 /// Normal branch-afters. 240 SmallVector<std::pair<llvm::BasicBlock*,llvm::ConstantInt*>, 4> 241 BranchAfters; 242 }; 243 mutable struct ExtInfo *ExtInfo; 244 245 struct ExtInfo &getExtInfo() { 246 if (!ExtInfo) ExtInfo = new struct ExtInfo(); 247 return *ExtInfo; 248 } 249 250 const struct ExtInfo &getExtInfo() const { 251 if (!ExtInfo) ExtInfo = new struct ExtInfo(); 252 return *ExtInfo; 253 } 254 255 public: 256 /// Gets the size required for a lazy cleanup scope with the given 257 /// cleanup-data requirements. 258 static size_t getSizeForCleanupSize(size_t Size) { 259 return sizeof(EHCleanupScope) + Size; 260 } 261 262 size_t getAllocatedSize() const { 263 return sizeof(EHCleanupScope) + CleanupBits.CleanupSize; 264 } 265 266 EHCleanupScope(bool isNormal, bool isEH, bool isActive, 267 unsigned cleanupSize, unsigned fixupDepth, 268 EHScopeStack::stable_iterator enclosingNormal, 269 EHScopeStack::stable_iterator enclosingEH) 270 : EHScope(EHScope::Cleanup, enclosingEH), EnclosingNormal(enclosingNormal), 271 NormalBlock(nullptr), ActiveFlag(nullptr), ExtInfo(nullptr) { 272 CleanupBits.IsNormalCleanup = isNormal; 273 CleanupBits.IsEHCleanup = isEH; 274 CleanupBits.IsActive = isActive; 275 CleanupBits.TestFlagInNormalCleanup = false; 276 CleanupBits.TestFlagInEHCleanup = false; 277 CleanupBits.CleanupSize = cleanupSize; 278 CleanupBits.FixupDepth = fixupDepth; 279 280 assert(CleanupBits.CleanupSize == cleanupSize && "cleanup size overflow"); 281 } 282 283 void Destroy() { 284 delete ExtInfo; 285 } 286 // Objects of EHCleanupScope are not destructed. Use Destroy(). 287 ~EHCleanupScope() LLVM_DELETED_FUNCTION; 288 289 bool isNormalCleanup() const { return CleanupBits.IsNormalCleanup; } 290 llvm::BasicBlock *getNormalBlock() const { return NormalBlock; } 291 void setNormalBlock(llvm::BasicBlock *BB) { NormalBlock = BB; } 292 293 bool isEHCleanup() const { return CleanupBits.IsEHCleanup; } 294 llvm::BasicBlock *getEHBlock() const { return getCachedEHDispatchBlock(); } 295 void setEHBlock(llvm::BasicBlock *BB) { setCachedEHDispatchBlock(BB); } 296 297 bool isActive() const { return CleanupBits.IsActive; } 298 void setActive(bool A) { CleanupBits.IsActive = A; } 299 300 llvm::AllocaInst *getActiveFlag() const { return ActiveFlag; } 301 void setActiveFlag(llvm::AllocaInst *Var) { ActiveFlag = Var; } 302 303 void setTestFlagInNormalCleanup() { 304 CleanupBits.TestFlagInNormalCleanup = true; 305 } 306 bool shouldTestFlagInNormalCleanup() const { 307 return CleanupBits.TestFlagInNormalCleanup; 308 } 309 310 void setTestFlagInEHCleanup() { 311 CleanupBits.TestFlagInEHCleanup = true; 312 } 313 bool shouldTestFlagInEHCleanup() const { 314 return CleanupBits.TestFlagInEHCleanup; 315 } 316 317 unsigned getFixupDepth() const { return CleanupBits.FixupDepth; } 318 EHScopeStack::stable_iterator getEnclosingNormalCleanup() const { 319 return EnclosingNormal; 320 } 321 322 size_t getCleanupSize() const { return CleanupBits.CleanupSize; } 323 void *getCleanupBuffer() { return this + 1; } 324 325 EHScopeStack::Cleanup *getCleanup() { 326 return reinterpret_cast<EHScopeStack::Cleanup*>(getCleanupBuffer()); 327 } 328 329 /// True if this cleanup scope has any branch-afters or branch-throughs. 330 bool hasBranches() const { return ExtInfo && !ExtInfo->Branches.empty(); } 331 332 /// Add a branch-after to this cleanup scope. A branch-after is a 333 /// branch from a point protected by this (normal) cleanup to a 334 /// point in the normal cleanup scope immediately containing it. 335 /// For example, 336 /// for (;;) { A a; break; } 337 /// contains a branch-after. 338 /// 339 /// Branch-afters each have their own destination out of the 340 /// cleanup, guaranteed distinct from anything else threaded through 341 /// it. Therefore branch-afters usually force a switch after the 342 /// cleanup. 343 void addBranchAfter(llvm::ConstantInt *Index, 344 llvm::BasicBlock *Block) { 345 struct ExtInfo &ExtInfo = getExtInfo(); 346 if (ExtInfo.Branches.insert(Block).second) 347 ExtInfo.BranchAfters.push_back(std::make_pair(Block, Index)); 348 } 349 350 /// Return the number of unique branch-afters on this scope. 351 unsigned getNumBranchAfters() const { 352 return ExtInfo ? ExtInfo->BranchAfters.size() : 0; 353 } 354 355 llvm::BasicBlock *getBranchAfterBlock(unsigned I) const { 356 assert(I < getNumBranchAfters()); 357 return ExtInfo->BranchAfters[I].first; 358 } 359 360 llvm::ConstantInt *getBranchAfterIndex(unsigned I) const { 361 assert(I < getNumBranchAfters()); 362 return ExtInfo->BranchAfters[I].second; 363 } 364 365 /// Add a branch-through to this cleanup scope. A branch-through is 366 /// a branch from a scope protected by this (normal) cleanup to an 367 /// enclosing scope other than the immediately-enclosing normal 368 /// cleanup scope. 369 /// 370 /// In the following example, the branch through B's scope is a 371 /// branch-through, while the branch through A's scope is a 372 /// branch-after: 373 /// for (;;) { A a; B b; break; } 374 /// 375 /// All branch-throughs have a common destination out of the 376 /// cleanup, one possibly shared with the fall-through. Therefore 377 /// branch-throughs usually don't force a switch after the cleanup. 378 /// 379 /// \return true if the branch-through was new to this scope 380 bool addBranchThrough(llvm::BasicBlock *Block) { 381 return getExtInfo().Branches.insert(Block).second; 382 } 383 384 /// Determines if this cleanup scope has any branch throughs. 385 bool hasBranchThroughs() const { 386 if (!ExtInfo) return false; 387 return (ExtInfo->BranchAfters.size() != ExtInfo->Branches.size()); 388 } 389 390 static bool classof(const EHScope *Scope) { 391 return (Scope->getKind() == Cleanup); 392 } 393 }; 394 395 /// An exceptions scope which filters exceptions thrown through it. 396 /// Only exceptions matching the filter types will be permitted to be 397 /// thrown. 398 /// 399 /// This is used to implement C++ exception specifications. 400 class EHFilterScope : public EHScope { 401 // Essentially ends in a flexible array member: 402 // llvm::Value *FilterTypes[0]; 403 404 llvm::Value **getFilters() { 405 return reinterpret_cast<llvm::Value**>(this+1); 406 } 407 408 llvm::Value * const *getFilters() const { 409 return reinterpret_cast<llvm::Value* const *>(this+1); 410 } 411 412 public: 413 EHFilterScope(unsigned numFilters) 414 : EHScope(Filter, EHScopeStack::stable_end()) { 415 FilterBits.NumFilters = numFilters; 416 } 417 418 static size_t getSizeForNumFilters(unsigned numFilters) { 419 return sizeof(EHFilterScope) + numFilters * sizeof(llvm::Value*); 420 } 421 422 unsigned getNumFilters() const { return FilterBits.NumFilters; } 423 424 void setFilter(unsigned i, llvm::Value *filterValue) { 425 assert(i < getNumFilters()); 426 getFilters()[i] = filterValue; 427 } 428 429 llvm::Value *getFilter(unsigned i) const { 430 assert(i < getNumFilters()); 431 return getFilters()[i]; 432 } 433 434 static bool classof(const EHScope *scope) { 435 return scope->getKind() == Filter; 436 } 437 }; 438 439 /// An exceptions scope which calls std::terminate if any exception 440 /// reaches it. 441 class EHTerminateScope : public EHScope { 442 public: 443 EHTerminateScope(EHScopeStack::stable_iterator enclosingEHScope) 444 : EHScope(Terminate, enclosingEHScope) {} 445 static size_t getSize() { return sizeof(EHTerminateScope); } 446 447 static bool classof(const EHScope *scope) { 448 return scope->getKind() == Terminate; 449 } 450 }; 451 452 /// A non-stable pointer into the scope stack. 453 class EHScopeStack::iterator { 454 char *Ptr; 455 456 friend class EHScopeStack; 457 explicit iterator(char *Ptr) : Ptr(Ptr) {} 458 459 public: 460 iterator() : Ptr(nullptr) {} 461 462 EHScope *get() const { 463 return reinterpret_cast<EHScope*>(Ptr); 464 } 465 466 EHScope *operator->() const { return get(); } 467 EHScope &operator*() const { return *get(); } 468 469 iterator &operator++() { 470 switch (get()->getKind()) { 471 case EHScope::Catch: 472 Ptr += EHCatchScope::getSizeForNumHandlers( 473 static_cast<const EHCatchScope*>(get())->getNumHandlers()); 474 break; 475 476 case EHScope::Filter: 477 Ptr += EHFilterScope::getSizeForNumFilters( 478 static_cast<const EHFilterScope*>(get())->getNumFilters()); 479 break; 480 481 case EHScope::Cleanup: 482 Ptr += static_cast<const EHCleanupScope*>(get()) 483 ->getAllocatedSize(); 484 break; 485 486 case EHScope::Terminate: 487 Ptr += EHTerminateScope::getSize(); 488 break; 489 } 490 491 return *this; 492 } 493 494 iterator next() { 495 iterator copy = *this; 496 ++copy; 497 return copy; 498 } 499 500 iterator operator++(int) { 501 iterator copy = *this; 502 operator++(); 503 return copy; 504 } 505 506 bool encloses(iterator other) const { return Ptr >= other.Ptr; } 507 bool strictlyEncloses(iterator other) const { return Ptr > other.Ptr; } 508 509 bool operator==(iterator other) const { return Ptr == other.Ptr; } 510 bool operator!=(iterator other) const { return Ptr != other.Ptr; } 511 }; 512 513 inline EHScopeStack::iterator EHScopeStack::begin() const { 514 return iterator(StartOfData); 515 } 516 517 inline EHScopeStack::iterator EHScopeStack::end() const { 518 return iterator(EndOfBuffer); 519 } 520 521 inline void EHScopeStack::popCatch() { 522 assert(!empty() && "popping exception stack when not empty"); 523 524 EHCatchScope &scope = cast<EHCatchScope>(*begin()); 525 InnermostEHScope = scope.getEnclosingEHScope(); 526 StartOfData += EHCatchScope::getSizeForNumHandlers(scope.getNumHandlers()); 527 } 528 529 inline void EHScopeStack::popTerminate() { 530 assert(!empty() && "popping exception stack when not empty"); 531 532 EHTerminateScope &scope = cast<EHTerminateScope>(*begin()); 533 InnermostEHScope = scope.getEnclosingEHScope(); 534 StartOfData += EHTerminateScope::getSize(); 535 } 536 537 inline EHScopeStack::iterator EHScopeStack::find(stable_iterator sp) const { 538 assert(sp.isValid() && "finding invalid savepoint"); 539 assert(sp.Size <= stable_begin().Size && "finding savepoint after pop"); 540 return iterator(EndOfBuffer - sp.Size); 541 } 542 543 inline EHScopeStack::stable_iterator 544 EHScopeStack::stabilize(iterator ir) const { 545 assert(StartOfData <= ir.Ptr && ir.Ptr <= EndOfBuffer); 546 return stable_iterator(EndOfBuffer - ir.Ptr); 547 } 548 549 } 550 } 551 552 #endif 553