1 //===-- WebAssemblyCFGStackify.cpp - CFG Stackification -------------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 /// 9 /// \file 10 /// This file implements a CFG stacking pass. 11 /// 12 /// This pass inserts BLOCK, LOOP, and TRY markers to mark the start of scopes, 13 /// since scope boundaries serve as the labels for WebAssembly's control 14 /// transfers. 15 /// 16 /// This is sufficient to convert arbitrary CFGs into a form that works on 17 /// WebAssembly, provided that all loops are single-entry. 18 /// 19 /// In case we use exceptions, this pass also fixes mismatches in unwind 20 /// destinations created during transforming CFG into wasm structured format. 21 /// 22 //===----------------------------------------------------------------------===// 23 24 #include "WebAssembly.h" 25 #include "WebAssemblyExceptionInfo.h" 26 #include "WebAssemblyMachineFunctionInfo.h" 27 #include "WebAssemblySubtarget.h" 28 #include "WebAssemblyUtilities.h" 29 #include "llvm/ADT/Statistic.h" 30 #include "llvm/CodeGen/MachineDominators.h" 31 #include "llvm/CodeGen/MachineInstrBuilder.h" 32 #include "llvm/CodeGen/MachineLoopInfo.h" 33 #include "llvm/MC/MCAsmInfo.h" 34 using namespace llvm; 35 36 #define DEBUG_TYPE "wasm-cfg-stackify" 37 38 STATISTIC(NumUnwindMismatches, "Number of EH pad unwind mismatches found"); 39 40 namespace { 41 class WebAssemblyCFGStackify final : public MachineFunctionPass { 42 StringRef getPassName() const override { return "WebAssembly CFG Stackify"; } 43 44 void getAnalysisUsage(AnalysisUsage &AU) const override { 45 AU.addRequired<MachineDominatorTree>(); 46 AU.addRequired<MachineLoopInfo>(); 47 AU.addRequired<WebAssemblyExceptionInfo>(); 48 MachineFunctionPass::getAnalysisUsage(AU); 49 } 50 51 bool runOnMachineFunction(MachineFunction &MF) override; 52 53 // For each block whose label represents the end of a scope, record the block 54 // which holds the beginning of the scope. This will allow us to quickly skip 55 // over scoped regions when walking blocks. 56 SmallVector<MachineBasicBlock *, 8> ScopeTops; 57 58 // Placing markers. 59 void placeMarkers(MachineFunction &MF); 60 void placeBlockMarker(MachineBasicBlock &MBB); 61 void placeLoopMarker(MachineBasicBlock &MBB); 62 void placeTryMarker(MachineBasicBlock &MBB); 63 void removeUnnecessaryInstrs(MachineFunction &MF); 64 bool fixUnwindMismatches(MachineFunction &MF); 65 void rewriteDepthImmediates(MachineFunction &MF); 66 void fixEndsAtEndOfFunction(MachineFunction &MF); 67 68 // For each BLOCK|LOOP|TRY, the corresponding END_(BLOCK|LOOP|TRY). 69 DenseMap<const MachineInstr *, MachineInstr *> BeginToEnd; 70 // For each END_(BLOCK|LOOP|TRY), the corresponding BLOCK|LOOP|TRY. 71 DenseMap<const MachineInstr *, MachineInstr *> EndToBegin; 72 // <TRY marker, EH pad> map 73 DenseMap<const MachineInstr *, MachineBasicBlock *> TryToEHPad; 74 // <EH pad, TRY marker> map 75 DenseMap<const MachineBasicBlock *, MachineInstr *> EHPadToTry; 76 77 // There can be an appendix block at the end of each function, shared for: 78 // - creating a correct signature for fallthrough returns 79 // - target for rethrows that need to unwind to the caller, but are trapped 80 // inside another try/catch 81 MachineBasicBlock *AppendixBB = nullptr; 82 MachineBasicBlock *getAppendixBlock(MachineFunction &MF) { 83 if (!AppendixBB) { 84 AppendixBB = MF.CreateMachineBasicBlock(); 85 // Give it a fake predecessor so that AsmPrinter prints its label. 86 AppendixBB->addSuccessor(AppendixBB); 87 MF.push_back(AppendixBB); 88 } 89 return AppendixBB; 90 } 91 92 // Helper functions to register / unregister scope information created by 93 // marker instructions. 94 void registerScope(MachineInstr *Begin, MachineInstr *End); 95 void registerTryScope(MachineInstr *Begin, MachineInstr *End, 96 MachineBasicBlock *EHPad); 97 void unregisterScope(MachineInstr *Begin); 98 99 public: 100 static char ID; // Pass identification, replacement for typeid 101 WebAssemblyCFGStackify() : MachineFunctionPass(ID) {} 102 ~WebAssemblyCFGStackify() override { releaseMemory(); } 103 void releaseMemory() override; 104 }; 105 } // end anonymous namespace 106 107 char WebAssemblyCFGStackify::ID = 0; 108 INITIALIZE_PASS(WebAssemblyCFGStackify, DEBUG_TYPE, 109 "Insert BLOCK/LOOP/TRY markers for WebAssembly scopes", false, 110 false) 111 112 FunctionPass *llvm::createWebAssemblyCFGStackify() { 113 return new WebAssemblyCFGStackify(); 114 } 115 116 /// Test whether Pred has any terminators explicitly branching to MBB, as 117 /// opposed to falling through. Note that it's possible (eg. in unoptimized 118 /// code) for a branch instruction to both branch to a block and fallthrough 119 /// to it, so we check the actual branch operands to see if there are any 120 /// explicit mentions. 121 static bool explicitlyBranchesTo(MachineBasicBlock *Pred, 122 MachineBasicBlock *MBB) { 123 for (MachineInstr &MI : Pred->terminators()) 124 for (MachineOperand &MO : MI.explicit_operands()) 125 if (MO.isMBB() && MO.getMBB() == MBB) 126 return true; 127 return false; 128 } 129 130 // Returns an iterator to the earliest position possible within the MBB, 131 // satisfying the restrictions given by BeforeSet and AfterSet. BeforeSet 132 // contains instructions that should go before the marker, and AfterSet contains 133 // ones that should go after the marker. In this function, AfterSet is only 134 // used for sanity checking. 135 static MachineBasicBlock::iterator 136 getEarliestInsertPos(MachineBasicBlock *MBB, 137 const SmallPtrSet<const MachineInstr *, 4> &BeforeSet, 138 const SmallPtrSet<const MachineInstr *, 4> &AfterSet) { 139 auto InsertPos = MBB->end(); 140 while (InsertPos != MBB->begin()) { 141 if (BeforeSet.count(&*std::prev(InsertPos))) { 142 #ifndef NDEBUG 143 // Sanity check 144 for (auto Pos = InsertPos, E = MBB->begin(); Pos != E; --Pos) 145 assert(!AfterSet.count(&*std::prev(Pos))); 146 #endif 147 break; 148 } 149 --InsertPos; 150 } 151 return InsertPos; 152 } 153 154 // Returns an iterator to the latest position possible within the MBB, 155 // satisfying the restrictions given by BeforeSet and AfterSet. BeforeSet 156 // contains instructions that should go before the marker, and AfterSet contains 157 // ones that should go after the marker. In this function, BeforeSet is only 158 // used for sanity checking. 159 static MachineBasicBlock::iterator 160 getLatestInsertPos(MachineBasicBlock *MBB, 161 const SmallPtrSet<const MachineInstr *, 4> &BeforeSet, 162 const SmallPtrSet<const MachineInstr *, 4> &AfterSet) { 163 auto InsertPos = MBB->begin(); 164 while (InsertPos != MBB->end()) { 165 if (AfterSet.count(&*InsertPos)) { 166 #ifndef NDEBUG 167 // Sanity check 168 for (auto Pos = InsertPos, E = MBB->end(); Pos != E; ++Pos) 169 assert(!BeforeSet.count(&*Pos)); 170 #endif 171 break; 172 } 173 ++InsertPos; 174 } 175 return InsertPos; 176 } 177 178 void WebAssemblyCFGStackify::registerScope(MachineInstr *Begin, 179 MachineInstr *End) { 180 BeginToEnd[Begin] = End; 181 EndToBegin[End] = Begin; 182 } 183 184 void WebAssemblyCFGStackify::registerTryScope(MachineInstr *Begin, 185 MachineInstr *End, 186 MachineBasicBlock *EHPad) { 187 registerScope(Begin, End); 188 TryToEHPad[Begin] = EHPad; 189 EHPadToTry[EHPad] = Begin; 190 } 191 192 void WebAssemblyCFGStackify::unregisterScope(MachineInstr *Begin) { 193 assert(BeginToEnd.count(Begin)); 194 MachineInstr *End = BeginToEnd[Begin]; 195 assert(EndToBegin.count(End)); 196 BeginToEnd.erase(Begin); 197 EndToBegin.erase(End); 198 MachineBasicBlock *EHPad = TryToEHPad.lookup(Begin); 199 if (EHPad) { 200 assert(EHPadToTry.count(EHPad)); 201 TryToEHPad.erase(Begin); 202 EHPadToTry.erase(EHPad); 203 } 204 } 205 206 /// Insert a BLOCK marker for branches to MBB (if needed). 207 // TODO Consider a more generalized way of handling block (and also loop and 208 // try) signatures when we implement the multi-value proposal later. 209 void WebAssemblyCFGStackify::placeBlockMarker(MachineBasicBlock &MBB) { 210 assert(!MBB.isEHPad()); 211 MachineFunction &MF = *MBB.getParent(); 212 auto &MDT = getAnalysis<MachineDominatorTree>(); 213 const auto &TII = *MF.getSubtarget<WebAssemblySubtarget>().getInstrInfo(); 214 const auto &MFI = *MF.getInfo<WebAssemblyFunctionInfo>(); 215 216 // First compute the nearest common dominator of all forward non-fallthrough 217 // predecessors so that we minimize the time that the BLOCK is on the stack, 218 // which reduces overall stack height. 219 MachineBasicBlock *Header = nullptr; 220 bool IsBranchedTo = false; 221 bool IsBrOnExn = false; 222 MachineInstr *BrOnExn = nullptr; 223 int MBBNumber = MBB.getNumber(); 224 for (MachineBasicBlock *Pred : MBB.predecessors()) { 225 if (Pred->getNumber() < MBBNumber) { 226 Header = Header ? MDT.findNearestCommonDominator(Header, Pred) : Pred; 227 if (explicitlyBranchesTo(Pred, &MBB)) { 228 IsBranchedTo = true; 229 if (Pred->getFirstTerminator()->getOpcode() == WebAssembly::BR_ON_EXN) { 230 IsBrOnExn = true; 231 assert(!BrOnExn && "There should be only one br_on_exn per block"); 232 BrOnExn = &*Pred->getFirstTerminator(); 233 } 234 } 235 } 236 } 237 if (!Header) 238 return; 239 if (!IsBranchedTo) 240 return; 241 242 assert(&MBB != &MF.front() && "Header blocks shouldn't have predecessors"); 243 MachineBasicBlock *LayoutPred = MBB.getPrevNode(); 244 245 // If the nearest common dominator is inside a more deeply nested context, 246 // walk out to the nearest scope which isn't more deeply nested. 247 for (MachineFunction::iterator I(LayoutPred), E(Header); I != E; --I) { 248 if (MachineBasicBlock *ScopeTop = ScopeTops[I->getNumber()]) { 249 if (ScopeTop->getNumber() > Header->getNumber()) { 250 // Skip over an intervening scope. 251 I = std::next(ScopeTop->getIterator()); 252 } else { 253 // We found a scope level at an appropriate depth. 254 Header = ScopeTop; 255 break; 256 } 257 } 258 } 259 260 // Decide where in Header to put the BLOCK. 261 262 // Instructions that should go before the BLOCK. 263 SmallPtrSet<const MachineInstr *, 4> BeforeSet; 264 // Instructions that should go after the BLOCK. 265 SmallPtrSet<const MachineInstr *, 4> AfterSet; 266 for (const auto &MI : *Header) { 267 // If there is a previously placed LOOP marker and the bottom block of the 268 // loop is above MBB, it should be after the BLOCK, because the loop is 269 // nested in this BLOCK. Otherwise it should be before the BLOCK. 270 if (MI.getOpcode() == WebAssembly::LOOP) { 271 auto *LoopBottom = BeginToEnd[&MI]->getParent()->getPrevNode(); 272 if (MBB.getNumber() > LoopBottom->getNumber()) 273 AfterSet.insert(&MI); 274 #ifndef NDEBUG 275 else 276 BeforeSet.insert(&MI); 277 #endif 278 } 279 280 // If there is a previously placed BLOCK/TRY marker and its corresponding 281 // END marker is before the current BLOCK's END marker, that should be 282 // placed after this BLOCK. Otherwise it should be placed before this BLOCK 283 // marker. 284 if (MI.getOpcode() == WebAssembly::BLOCK || 285 MI.getOpcode() == WebAssembly::TRY) { 286 if (BeginToEnd[&MI]->getParent()->getNumber() <= MBB.getNumber()) 287 AfterSet.insert(&MI); 288 #ifndef NDEBUG 289 else 290 BeforeSet.insert(&MI); 291 #endif 292 } 293 294 #ifndef NDEBUG 295 // All END_(BLOCK|LOOP|TRY) markers should be before the BLOCK. 296 if (MI.getOpcode() == WebAssembly::END_BLOCK || 297 MI.getOpcode() == WebAssembly::END_LOOP || 298 MI.getOpcode() == WebAssembly::END_TRY) 299 BeforeSet.insert(&MI); 300 #endif 301 302 // Terminators should go after the BLOCK. 303 if (MI.isTerminator()) 304 AfterSet.insert(&MI); 305 } 306 307 // Local expression tree should go after the BLOCK. 308 for (auto I = Header->getFirstTerminator(), E = Header->begin(); I != E; 309 --I) { 310 if (std::prev(I)->isDebugInstr() || std::prev(I)->isPosition()) 311 continue; 312 if (WebAssembly::isChild(*std::prev(I), MFI)) 313 AfterSet.insert(&*std::prev(I)); 314 else 315 break; 316 } 317 318 // Add the BLOCK. 319 320 // 'br_on_exn' extracts exnref object and pushes variable number of values 321 // depending on its tag. For C++ exception, its a single i32 value, and the 322 // generated code will be in the form of: 323 // block i32 324 // br_on_exn 0, $__cpp_exception 325 // rethrow 326 // end_block 327 WebAssembly::BlockType ReturnType = WebAssembly::BlockType::Void; 328 if (IsBrOnExn) { 329 const char *TagName = BrOnExn->getOperand(1).getSymbolName(); 330 if (std::strcmp(TagName, "__cpp_exception") != 0) 331 llvm_unreachable("Only C++ exception is supported"); 332 ReturnType = WebAssembly::BlockType::I32; 333 } 334 335 auto InsertPos = getLatestInsertPos(Header, BeforeSet, AfterSet); 336 MachineInstr *Begin = 337 BuildMI(*Header, InsertPos, Header->findDebugLoc(InsertPos), 338 TII.get(WebAssembly::BLOCK)) 339 .addImm(int64_t(ReturnType)); 340 341 // Decide where in Header to put the END_BLOCK. 342 BeforeSet.clear(); 343 AfterSet.clear(); 344 for (auto &MI : MBB) { 345 #ifndef NDEBUG 346 // END_BLOCK should precede existing LOOP and TRY markers. 347 if (MI.getOpcode() == WebAssembly::LOOP || 348 MI.getOpcode() == WebAssembly::TRY) 349 AfterSet.insert(&MI); 350 #endif 351 352 // If there is a previously placed END_LOOP marker and the header of the 353 // loop is above this block's header, the END_LOOP should be placed after 354 // the BLOCK, because the loop contains this block. Otherwise the END_LOOP 355 // should be placed before the BLOCK. The same for END_TRY. 356 if (MI.getOpcode() == WebAssembly::END_LOOP || 357 MI.getOpcode() == WebAssembly::END_TRY) { 358 if (EndToBegin[&MI]->getParent()->getNumber() >= Header->getNumber()) 359 BeforeSet.insert(&MI); 360 #ifndef NDEBUG 361 else 362 AfterSet.insert(&MI); 363 #endif 364 } 365 } 366 367 // Mark the end of the block. 368 InsertPos = getEarliestInsertPos(&MBB, BeforeSet, AfterSet); 369 MachineInstr *End = BuildMI(MBB, InsertPos, MBB.findPrevDebugLoc(InsertPos), 370 TII.get(WebAssembly::END_BLOCK)); 371 registerScope(Begin, End); 372 373 // Track the farthest-spanning scope that ends at this point. 374 int Number = MBB.getNumber(); 375 if (!ScopeTops[Number] || 376 ScopeTops[Number]->getNumber() > Header->getNumber()) 377 ScopeTops[Number] = Header; 378 } 379 380 /// Insert a LOOP marker for a loop starting at MBB (if it's a loop header). 381 void WebAssemblyCFGStackify::placeLoopMarker(MachineBasicBlock &MBB) { 382 MachineFunction &MF = *MBB.getParent(); 383 const auto &MLI = getAnalysis<MachineLoopInfo>(); 384 const auto &TII = *MF.getSubtarget<WebAssemblySubtarget>().getInstrInfo(); 385 386 MachineLoop *Loop = MLI.getLoopFor(&MBB); 387 if (!Loop || Loop->getHeader() != &MBB) 388 return; 389 390 // The operand of a LOOP is the first block after the loop. If the loop is the 391 // bottom of the function, insert a dummy block at the end. 392 MachineBasicBlock *Bottom = WebAssembly::getBottom(Loop); 393 auto Iter = std::next(Bottom->getIterator()); 394 if (Iter == MF.end()) { 395 getAppendixBlock(MF); 396 Iter = std::next(Bottom->getIterator()); 397 } 398 MachineBasicBlock *AfterLoop = &*Iter; 399 400 // Decide where in Header to put the LOOP. 401 SmallPtrSet<const MachineInstr *, 4> BeforeSet; 402 SmallPtrSet<const MachineInstr *, 4> AfterSet; 403 for (const auto &MI : MBB) { 404 // LOOP marker should be after any existing loop that ends here. Otherwise 405 // we assume the instruction belongs to the loop. 406 if (MI.getOpcode() == WebAssembly::END_LOOP) 407 BeforeSet.insert(&MI); 408 #ifndef NDEBUG 409 else 410 AfterSet.insert(&MI); 411 #endif 412 } 413 414 // Mark the beginning of the loop. 415 auto InsertPos = getEarliestInsertPos(&MBB, BeforeSet, AfterSet); 416 MachineInstr *Begin = BuildMI(MBB, InsertPos, MBB.findDebugLoc(InsertPos), 417 TII.get(WebAssembly::LOOP)) 418 .addImm(int64_t(WebAssembly::BlockType::Void)); 419 420 // Decide where in Header to put the END_LOOP. 421 BeforeSet.clear(); 422 AfterSet.clear(); 423 #ifndef NDEBUG 424 for (const auto &MI : MBB) 425 // Existing END_LOOP markers belong to parent loops of this loop 426 if (MI.getOpcode() == WebAssembly::END_LOOP) 427 AfterSet.insert(&MI); 428 #endif 429 430 // Mark the end of the loop (using arbitrary debug location that branched to 431 // the loop end as its location). 432 InsertPos = getEarliestInsertPos(AfterLoop, BeforeSet, AfterSet); 433 DebugLoc EndDL = AfterLoop->pred_empty() 434 ? DebugLoc() 435 : (*AfterLoop->pred_rbegin())->findBranchDebugLoc(); 436 MachineInstr *End = 437 BuildMI(*AfterLoop, InsertPos, EndDL, TII.get(WebAssembly::END_LOOP)); 438 registerScope(Begin, End); 439 440 assert((!ScopeTops[AfterLoop->getNumber()] || 441 ScopeTops[AfterLoop->getNumber()]->getNumber() < MBB.getNumber()) && 442 "With block sorting the outermost loop for a block should be first."); 443 if (!ScopeTops[AfterLoop->getNumber()]) 444 ScopeTops[AfterLoop->getNumber()] = &MBB; 445 } 446 447 void WebAssemblyCFGStackify::placeTryMarker(MachineBasicBlock &MBB) { 448 assert(MBB.isEHPad()); 449 MachineFunction &MF = *MBB.getParent(); 450 auto &MDT = getAnalysis<MachineDominatorTree>(); 451 const auto &TII = *MF.getSubtarget<WebAssemblySubtarget>().getInstrInfo(); 452 const auto &WEI = getAnalysis<WebAssemblyExceptionInfo>(); 453 const auto &MFI = *MF.getInfo<WebAssemblyFunctionInfo>(); 454 455 // Compute the nearest common dominator of all unwind predecessors 456 MachineBasicBlock *Header = nullptr; 457 int MBBNumber = MBB.getNumber(); 458 for (auto *Pred : MBB.predecessors()) { 459 if (Pred->getNumber() < MBBNumber) { 460 Header = Header ? MDT.findNearestCommonDominator(Header, Pred) : Pred; 461 assert(!explicitlyBranchesTo(Pred, &MBB) && 462 "Explicit branch to an EH pad!"); 463 } 464 } 465 if (!Header) 466 return; 467 468 // If this try is at the bottom of the function, insert a dummy block at the 469 // end. 470 WebAssemblyException *WE = WEI.getExceptionFor(&MBB); 471 assert(WE); 472 MachineBasicBlock *Bottom = WebAssembly::getBottom(WE); 473 474 auto Iter = std::next(Bottom->getIterator()); 475 if (Iter == MF.end()) { 476 getAppendixBlock(MF); 477 Iter = std::next(Bottom->getIterator()); 478 } 479 MachineBasicBlock *Cont = &*Iter; 480 481 assert(Cont != &MF.front()); 482 MachineBasicBlock *LayoutPred = Cont->getPrevNode(); 483 484 // If the nearest common dominator is inside a more deeply nested context, 485 // walk out to the nearest scope which isn't more deeply nested. 486 for (MachineFunction::iterator I(LayoutPred), E(Header); I != E; --I) { 487 if (MachineBasicBlock *ScopeTop = ScopeTops[I->getNumber()]) { 488 if (ScopeTop->getNumber() > Header->getNumber()) { 489 // Skip over an intervening scope. 490 I = std::next(ScopeTop->getIterator()); 491 } else { 492 // We found a scope level at an appropriate depth. 493 Header = ScopeTop; 494 break; 495 } 496 } 497 } 498 499 // Decide where in Header to put the TRY. 500 501 // Instructions that should go before the TRY. 502 SmallPtrSet<const MachineInstr *, 4> BeforeSet; 503 // Instructions that should go after the TRY. 504 SmallPtrSet<const MachineInstr *, 4> AfterSet; 505 for (const auto &MI : *Header) { 506 // If there is a previously placed LOOP marker and the bottom block of the 507 // loop is above MBB, it should be after the TRY, because the loop is nested 508 // in this TRY. Otherwise it should be before the TRY. 509 if (MI.getOpcode() == WebAssembly::LOOP) { 510 auto *LoopBottom = BeginToEnd[&MI]->getParent()->getPrevNode(); 511 if (MBB.getNumber() > LoopBottom->getNumber()) 512 AfterSet.insert(&MI); 513 #ifndef NDEBUG 514 else 515 BeforeSet.insert(&MI); 516 #endif 517 } 518 519 // All previously inserted BLOCK/TRY markers should be after the TRY because 520 // they are all nested trys. 521 if (MI.getOpcode() == WebAssembly::BLOCK || 522 MI.getOpcode() == WebAssembly::TRY) 523 AfterSet.insert(&MI); 524 525 #ifndef NDEBUG 526 // All END_(BLOCK/LOOP/TRY) markers should be before the TRY. 527 if (MI.getOpcode() == WebAssembly::END_BLOCK || 528 MI.getOpcode() == WebAssembly::END_LOOP || 529 MI.getOpcode() == WebAssembly::END_TRY) 530 BeforeSet.insert(&MI); 531 #endif 532 533 // Terminators should go after the TRY. 534 if (MI.isTerminator()) 535 AfterSet.insert(&MI); 536 } 537 538 // If Header unwinds to MBB (= Header contains 'invoke'), the try block should 539 // contain the call within it. So the call should go after the TRY. The 540 // exception is when the header's terminator is a rethrow instruction, in 541 // which case that instruction, not a call instruction before it, is gonna 542 // throw. 543 MachineInstr *ThrowingCall = nullptr; 544 if (MBB.isPredecessor(Header)) { 545 auto TermPos = Header->getFirstTerminator(); 546 if (TermPos == Header->end() || 547 TermPos->getOpcode() != WebAssembly::RETHROW) { 548 for (auto &MI : reverse(*Header)) { 549 if (MI.isCall()) { 550 AfterSet.insert(&MI); 551 ThrowingCall = &MI; 552 // Possibly throwing calls are usually wrapped by EH_LABEL 553 // instructions. We don't want to split them and the call. 554 if (MI.getIterator() != Header->begin() && 555 std::prev(MI.getIterator())->isEHLabel()) { 556 AfterSet.insert(&*std::prev(MI.getIterator())); 557 ThrowingCall = &*std::prev(MI.getIterator()); 558 } 559 break; 560 } 561 } 562 } 563 } 564 565 // Local expression tree should go after the TRY. 566 // For BLOCK placement, we start the search from the previous instruction of a 567 // BB's terminator, but in TRY's case, we should start from the previous 568 // instruction of a call that can throw, or a EH_LABEL that precedes the call, 569 // because the return values of the call's previous instructions can be 570 // stackified and consumed by the throwing call. 571 auto SearchStartPt = ThrowingCall ? MachineBasicBlock::iterator(ThrowingCall) 572 : Header->getFirstTerminator(); 573 for (auto I = SearchStartPt, E = Header->begin(); I != E; --I) { 574 if (std::prev(I)->isDebugInstr() || std::prev(I)->isPosition()) 575 continue; 576 if (WebAssembly::isChild(*std::prev(I), MFI)) 577 AfterSet.insert(&*std::prev(I)); 578 else 579 break; 580 } 581 582 // Add the TRY. 583 auto InsertPos = getLatestInsertPos(Header, BeforeSet, AfterSet); 584 MachineInstr *Begin = 585 BuildMI(*Header, InsertPos, Header->findDebugLoc(InsertPos), 586 TII.get(WebAssembly::TRY)) 587 .addImm(int64_t(WebAssembly::BlockType::Void)); 588 589 // Decide where in Header to put the END_TRY. 590 BeforeSet.clear(); 591 AfterSet.clear(); 592 for (const auto &MI : *Cont) { 593 #ifndef NDEBUG 594 // END_TRY should precede existing LOOP and BLOCK markers. 595 if (MI.getOpcode() == WebAssembly::LOOP || 596 MI.getOpcode() == WebAssembly::BLOCK) 597 AfterSet.insert(&MI); 598 599 // All END_TRY markers placed earlier belong to exceptions that contains 600 // this one. 601 if (MI.getOpcode() == WebAssembly::END_TRY) 602 AfterSet.insert(&MI); 603 #endif 604 605 // If there is a previously placed END_LOOP marker and its header is after 606 // where TRY marker is, this loop is contained within the 'catch' part, so 607 // the END_TRY marker should go after that. Otherwise, the whole try-catch 608 // is contained within this loop, so the END_TRY should go before that. 609 if (MI.getOpcode() == WebAssembly::END_LOOP) { 610 // For a LOOP to be after TRY, LOOP's BB should be after TRY's BB; if they 611 // are in the same BB, LOOP is always before TRY. 612 if (EndToBegin[&MI]->getParent()->getNumber() > Header->getNumber()) 613 BeforeSet.insert(&MI); 614 #ifndef NDEBUG 615 else 616 AfterSet.insert(&MI); 617 #endif 618 } 619 620 // It is not possible for an END_BLOCK to be already in this block. 621 } 622 623 // Mark the end of the TRY. 624 InsertPos = getEarliestInsertPos(Cont, BeforeSet, AfterSet); 625 MachineInstr *End = 626 BuildMI(*Cont, InsertPos, Bottom->findBranchDebugLoc(), 627 TII.get(WebAssembly::END_TRY)); 628 registerTryScope(Begin, End, &MBB); 629 630 // Track the farthest-spanning scope that ends at this point. We create two 631 // mappings: (BB with 'end_try' -> BB with 'try') and (BB with 'catch' -> BB 632 // with 'try'). We need to create 'catch' -> 'try' mapping here too because 633 // markers should not span across 'catch'. For example, this should not 634 // happen: 635 // 636 // try 637 // block --| (X) 638 // catch | 639 // end_block --| 640 // end_try 641 for (int Number : {Cont->getNumber(), MBB.getNumber()}) { 642 if (!ScopeTops[Number] || 643 ScopeTops[Number]->getNumber() > Header->getNumber()) 644 ScopeTops[Number] = Header; 645 } 646 } 647 648 void WebAssemblyCFGStackify::removeUnnecessaryInstrs(MachineFunction &MF) { 649 const auto &TII = *MF.getSubtarget<WebAssemblySubtarget>().getInstrInfo(); 650 651 // When there is an unconditional branch right before a catch instruction and 652 // it branches to the end of end_try marker, we don't need the branch, because 653 // it there is no exception, the control flow transfers to that point anyway. 654 // bb0: 655 // try 656 // ... 657 // br bb2 <- Not necessary 658 // bb1: 659 // catch 660 // ... 661 // bb2: 662 // end 663 for (auto &MBB : MF) { 664 if (!MBB.isEHPad()) 665 continue; 666 667 MachineBasicBlock *TBB = nullptr, *FBB = nullptr; 668 SmallVector<MachineOperand, 4> Cond; 669 MachineBasicBlock *EHPadLayoutPred = MBB.getPrevNode(); 670 MachineBasicBlock *Cont = BeginToEnd[EHPadToTry[&MBB]]->getParent(); 671 bool Analyzable = !TII.analyzeBranch(*EHPadLayoutPred, TBB, FBB, Cond); 672 if (Analyzable && ((Cond.empty() && TBB && TBB == Cont) || 673 (!Cond.empty() && FBB && FBB == Cont))) 674 TII.removeBranch(*EHPadLayoutPred); 675 } 676 677 // When there are block / end_block markers that overlap with try / end_try 678 // markers, and the block and try markers' return types are the same, the 679 // block /end_block markers are not necessary, because try / end_try markers 680 // also can serve as boundaries for branches. 681 // block <- Not necessary 682 // try 683 // ... 684 // catch 685 // ... 686 // end 687 // end <- Not necessary 688 SmallVector<MachineInstr *, 32> ToDelete; 689 for (auto &MBB : MF) { 690 for (auto &MI : MBB) { 691 if (MI.getOpcode() != WebAssembly::TRY) 692 continue; 693 694 MachineInstr *Try = &MI, *EndTry = BeginToEnd[Try]; 695 MachineBasicBlock *TryBB = Try->getParent(); 696 MachineBasicBlock *Cont = EndTry->getParent(); 697 int64_t RetType = Try->getOperand(0).getImm(); 698 for (auto B = Try->getIterator(), E = std::next(EndTry->getIterator()); 699 B != TryBB->begin() && E != Cont->end() && 700 std::prev(B)->getOpcode() == WebAssembly::BLOCK && 701 E->getOpcode() == WebAssembly::END_BLOCK && 702 std::prev(B)->getOperand(0).getImm() == RetType; 703 --B, ++E) { 704 ToDelete.push_back(&*std::prev(B)); 705 ToDelete.push_back(&*E); 706 } 707 } 708 } 709 for (auto *MI : ToDelete) { 710 if (MI->getOpcode() == WebAssembly::BLOCK) 711 unregisterScope(MI); 712 MI->eraseFromParent(); 713 } 714 } 715 716 // When MBB is split into MBB and Split, we should unstackify defs in MBB that 717 // have their uses in Split. 718 static void unstackifyVRegsUsedInSplitBB(MachineBasicBlock &MBB, 719 MachineBasicBlock &Split, 720 WebAssemblyFunctionInfo &MFI, 721 MachineRegisterInfo &MRI) { 722 for (auto &MI : Split) { 723 for (auto &MO : MI.explicit_uses()) { 724 if (!MO.isReg() || Register::isPhysicalRegister(MO.getReg())) 725 continue; 726 if (MachineInstr *Def = MRI.getUniqueVRegDef(MO.getReg())) 727 if (Def->getParent() == &MBB) 728 MFI.unstackifyVReg(MO.getReg()); 729 } 730 } 731 } 732 733 bool WebAssemblyCFGStackify::fixUnwindMismatches(MachineFunction &MF) { 734 const auto &TII = *MF.getSubtarget<WebAssemblySubtarget>().getInstrInfo(); 735 auto &MFI = *MF.getInfo<WebAssemblyFunctionInfo>(); 736 MachineRegisterInfo &MRI = MF.getRegInfo(); 737 738 // Linearizing the control flow by placing TRY / END_TRY markers can create 739 // mismatches in unwind destinations. There are two kinds of mismatches we 740 // try to solve here. 741 742 // 1. When an instruction may throw, but the EH pad it will unwind to can be 743 // different from the original CFG. 744 // 745 // Example: we have the following CFG: 746 // bb0: 747 // call @foo (if it throws, unwind to bb2) 748 // bb1: 749 // call @bar (if it throws, unwind to bb3) 750 // bb2 (ehpad): 751 // catch 752 // ... 753 // bb3 (ehpad) 754 // catch 755 // handler body 756 // 757 // And the CFG is sorted in this order. Then after placing TRY markers, it 758 // will look like: (BB markers are omitted) 759 // try $label1 760 // try 761 // call @foo 762 // call @bar (if it throws, unwind to bb3) 763 // catch <- ehpad (bb2) 764 // ... 765 // end_try 766 // catch <- ehpad (bb3) 767 // handler body 768 // end_try 769 // 770 // Now if bar() throws, it is going to end up ip in bb2, not bb3, where it 771 // is supposed to end up. We solve this problem by 772 // a. Split the target unwind EH pad (here bb3) so that the handler body is 773 // right after 'end_try', which means we extract the handler body out of 774 // the catch block. We do this because this handler body should be 775 // somewhere branch-eable from the inner scope. 776 // b. Wrap the call that has an incorrect unwind destination ('call @bar' 777 // here) with a nested try/catch/end_try scope, and within the new catch 778 // block, branches to the handler body. 779 // c. Place a branch after the newly inserted nested end_try so it can bypass 780 // the handler body, which is now outside of a catch block. 781 // 782 // The result will like as follows. (new: a) means this instruction is newly 783 // created in the process of doing 'a' above. 784 // 785 // block $label0 (new: placeBlockMarker) 786 // try $label1 787 // try 788 // call @foo 789 // try (new: b) 790 // call @bar 791 // catch (new: b) 792 // local.set n / drop (new: b) 793 // br $label1 (new: b) 794 // end_try (new: b) 795 // catch <- ehpad (bb2) 796 // end_try 797 // br $label0 (new: c) 798 // catch <- ehpad (bb3) 799 // end_try (hoisted: a) 800 // handler body 801 // end_block (new: placeBlockMarker) 802 // 803 // Note that the new wrapping block/end_block will be generated later in 804 // placeBlockMarker. 805 // 806 // TODO Currently local.set and local.gets are generated to move exnref value 807 // created by catches. That's because we don't support yielding values from a 808 // block in LLVM machine IR yet, even though it is supported by wasm. Delete 809 // unnecessary local.get/local.sets once yielding values from a block is 810 // supported. The full EH spec requires multi-value support to do this, but 811 // for C++ we don't yet need it because we only throw a single i32. 812 // 813 // --- 814 // 2. The same as 1, but in this case an instruction unwinds to a caller 815 // function and not another EH pad. 816 // 817 // Example: we have the following CFG: 818 // bb0: 819 // call @foo (if it throws, unwind to bb2) 820 // bb1: 821 // call @bar (if it throws, unwind to caller) 822 // bb2 (ehpad): 823 // catch 824 // ... 825 // 826 // And the CFG is sorted in this order. Then after placing TRY markers, it 827 // will look like: 828 // try 829 // call @foo 830 // call @bar (if it throws, unwind to caller) 831 // catch <- ehpad (bb2) 832 // ... 833 // end_try 834 // 835 // Now if bar() throws, it is going to end up ip in bb2, when it is supposed 836 // throw up to the caller. 837 // We solve this problem by 838 // a. Create a new 'appendix' BB at the end of the function and put a single 839 // 'rethrow' instruction (+ local.get) in there. 840 // b. Wrap the call that has an incorrect unwind destination ('call @bar' 841 // here) with a nested try/catch/end_try scope, and within the new catch 842 // block, branches to the new appendix block. 843 // 844 // block $label0 (new: placeBlockMarker) 845 // try 846 // call @foo 847 // try (new: b) 848 // call @bar 849 // catch (new: b) 850 // local.set n (new: b) 851 // br $label0 (new: b) 852 // end_try (new: b) 853 // catch <- ehpad (bb2) 854 // ... 855 // end_try 856 // ... 857 // end_block (new: placeBlockMarker) 858 // local.get n (new: a) <- appendix block 859 // rethrow (new: a) 860 // 861 // In case there are multiple calls in a BB that may throw to the caller, they 862 // can be wrapped together in one nested try scope. (In 1, this couldn't 863 // happen, because may-throwing instruction there had an unwind destination, 864 // i.e., it was an invoke before, and there could be only one invoke within a 865 // BB.) 866 867 SmallVector<const MachineBasicBlock *, 8> EHPadStack; 868 // Range of intructions to be wrapped in a new nested try/catch 869 using TryRange = std::pair<MachineInstr *, MachineInstr *>; 870 // In original CFG, <unwind destination BB, a vector of try ranges> 871 DenseMap<MachineBasicBlock *, SmallVector<TryRange, 4>> UnwindDestToTryRanges; 872 // In new CFG, <destination to branch to, a vector of try ranges> 873 DenseMap<MachineBasicBlock *, SmallVector<TryRange, 4>> BrDestToTryRanges; 874 // In new CFG, <destination to branch to, register containing exnref> 875 DenseMap<MachineBasicBlock *, unsigned> BrDestToExnReg; 876 877 // Destinations for branches that will be newly added, for which a new 878 // BLOCK/END_BLOCK markers are necessary. 879 SmallVector<MachineBasicBlock *, 8> BrDests; 880 881 // Gather possibly throwing calls (i.e., previously invokes) whose current 882 // unwind destination is not the same as the original CFG. 883 for (auto &MBB : reverse(MF)) { 884 bool SeenThrowableInstInBB = false; 885 for (auto &MI : reverse(MBB)) { 886 if (MI.getOpcode() == WebAssembly::TRY) 887 EHPadStack.pop_back(); 888 else if (MI.getOpcode() == WebAssembly::CATCH) 889 EHPadStack.push_back(MI.getParent()); 890 891 // In this loop we only gather calls that have an EH pad to unwind. So 892 // there will be at most 1 such call (= invoke) in a BB, so after we've 893 // seen one, we can skip the rest of BB. Also if MBB has no EH pad 894 // successor or MI does not throw, this is not an invoke. 895 if (SeenThrowableInstInBB || !MBB.hasEHPadSuccessor() || 896 !WebAssembly::mayThrow(MI)) 897 continue; 898 SeenThrowableInstInBB = true; 899 900 // If the EH pad on the stack top is where this instruction should unwind 901 // next, we're good. 902 MachineBasicBlock *UnwindDest = nullptr; 903 for (auto *Succ : MBB.successors()) { 904 if (Succ->isEHPad()) { 905 UnwindDest = Succ; 906 break; 907 } 908 } 909 if (EHPadStack.back() == UnwindDest) 910 continue; 911 912 // If not, record the range. 913 UnwindDestToTryRanges[UnwindDest].push_back(TryRange(&MI, &MI)); 914 } 915 } 916 917 assert(EHPadStack.empty()); 918 919 // Gather possibly throwing calls that are supposed to unwind up to the caller 920 // if they throw, but currently unwind to an incorrect destination. Unlike the 921 // loop above, there can be multiple calls within a BB that unwind to the 922 // caller, which we should group together in a range. 923 bool NeedAppendixBlock = false; 924 for (auto &MBB : reverse(MF)) { 925 MachineInstr *RangeBegin = nullptr, *RangeEnd = nullptr; // inclusive 926 for (auto &MI : reverse(MBB)) { 927 if (MI.getOpcode() == WebAssembly::TRY) 928 EHPadStack.pop_back(); 929 else if (MI.getOpcode() == WebAssembly::CATCH) 930 EHPadStack.push_back(MI.getParent()); 931 932 // If MBB has an EH pad successor, this inst does not unwind to caller. 933 if (MBB.hasEHPadSuccessor()) 934 continue; 935 936 // We wrap up the current range when we see a marker even if we haven't 937 // finished a BB. 938 if (RangeEnd && WebAssembly::isMarker(MI.getOpcode())) { 939 NeedAppendixBlock = true; 940 // Record the range. nullptr here means the unwind destination is the 941 // caller. 942 UnwindDestToTryRanges[nullptr].push_back( 943 TryRange(RangeBegin, RangeEnd)); 944 RangeBegin = RangeEnd = nullptr; // Reset range pointers 945 } 946 947 // If EHPadStack is empty, that means it is correctly unwind to caller if 948 // it throws, so we're good. If MI does not throw, we're good too. 949 if (EHPadStack.empty() || !WebAssembly::mayThrow(MI)) 950 continue; 951 952 // We found an instruction that unwinds to the caller but currently has an 953 // incorrect unwind destination. Create a new range or increment the 954 // currently existing range. 955 if (!RangeEnd) 956 RangeBegin = RangeEnd = &MI; 957 else 958 RangeBegin = &MI; 959 } 960 961 if (RangeEnd) { 962 NeedAppendixBlock = true; 963 // Record the range. nullptr here means the unwind destination is the 964 // caller. 965 UnwindDestToTryRanges[nullptr].push_back(TryRange(RangeBegin, RangeEnd)); 966 RangeBegin = RangeEnd = nullptr; // Reset range pointers 967 } 968 } 969 970 assert(EHPadStack.empty()); 971 // We don't have any unwind destination mismatches to resolve. 972 if (UnwindDestToTryRanges.empty()) 973 return false; 974 975 // If we found instructions that should unwind to the caller but currently 976 // have incorrect unwind destination, we create an appendix block at the end 977 // of the function with a local.get and a rethrow instruction. 978 if (NeedAppendixBlock) { 979 auto *AppendixBB = getAppendixBlock(MF); 980 Register ExnReg = MRI.createVirtualRegister(&WebAssembly::EXNREFRegClass); 981 BuildMI(AppendixBB, DebugLoc(), TII.get(WebAssembly::RETHROW)) 982 .addReg(ExnReg); 983 // These instruction ranges should branch to this appendix BB. 984 for (auto Range : UnwindDestToTryRanges[nullptr]) 985 BrDestToTryRanges[AppendixBB].push_back(Range); 986 BrDestToExnReg[AppendixBB] = ExnReg; 987 } 988 989 // We loop through unwind destination EH pads that are targeted from some 990 // inner scopes. Because these EH pads are destination of more than one scope 991 // now, we split them so that the handler body is after 'end_try'. 992 // - Before 993 // ehpad: 994 // catch 995 // local.set n / drop 996 // handler body 997 // ... 998 // cont: 999 // end_try 1000 // 1001 // - After 1002 // ehpad: 1003 // catch 1004 // local.set n / drop 1005 // brdest: (new) 1006 // end_try (hoisted from 'cont' BB) 1007 // handler body (taken from 'ehpad') 1008 // ... 1009 // cont: 1010 for (auto &P : UnwindDestToTryRanges) { 1011 NumUnwindMismatches += P.second.size(); 1012 1013 // This means the destination is the appendix BB, which was separately 1014 // handled above. 1015 if (!P.first) 1016 continue; 1017 1018 MachineBasicBlock *EHPad = P.first; 1019 1020 // Find 'catch' and 'local.set' or 'drop' instruction that follows the 1021 // 'catch'. If -wasm-disable-explicit-locals is not set, 'catch' should be 1022 // always followed by either 'local.set' or a 'drop', because 'br_on_exn' is 1023 // generated after 'catch' in LateEHPrepare and we don't support blocks 1024 // taking values yet. 1025 MachineInstr *Catch = nullptr; 1026 unsigned ExnReg = 0; 1027 for (auto &MI : *EHPad) { 1028 switch (MI.getOpcode()) { 1029 case WebAssembly::CATCH: 1030 Catch = &MI; 1031 ExnReg = Catch->getOperand(0).getReg(); 1032 break; 1033 } 1034 } 1035 assert(Catch && "EH pad does not have a catch"); 1036 assert(ExnReg != 0 && "Invalid register"); 1037 1038 auto SplitPos = std::next(Catch->getIterator()); 1039 1040 // Create a new BB that's gonna be the destination for branches from the 1041 // inner mismatched scope. 1042 MachineInstr *BeginTry = EHPadToTry[EHPad]; 1043 MachineInstr *EndTry = BeginToEnd[BeginTry]; 1044 MachineBasicBlock *Cont = EndTry->getParent(); 1045 auto *BrDest = MF.CreateMachineBasicBlock(); 1046 MF.insert(std::next(EHPad->getIterator()), BrDest); 1047 // Hoist up the existing 'end_try'. 1048 BrDest->insert(BrDest->end(), EndTry->removeFromParent()); 1049 // Take out the handler body from EH pad to the new branch destination BB. 1050 BrDest->splice(BrDest->end(), EHPad, SplitPos, EHPad->end()); 1051 unstackifyVRegsUsedInSplitBB(*EHPad, *BrDest, MFI, MRI); 1052 // Fix predecessor-successor relationship. 1053 BrDest->transferSuccessors(EHPad); 1054 EHPad->addSuccessor(BrDest); 1055 1056 // All try ranges that were supposed to unwind to this EH pad now have to 1057 // branch to this new branch dest BB. 1058 for (auto Range : UnwindDestToTryRanges[EHPad]) 1059 BrDestToTryRanges[BrDest].push_back(Range); 1060 BrDestToExnReg[BrDest] = ExnReg; 1061 1062 // In case we fall through to the continuation BB after the catch block, we 1063 // now have to add a branch to it. 1064 // - Before 1065 // try 1066 // ... 1067 // (falls through to 'cont') 1068 // catch 1069 // handler body 1070 // end 1071 // <-- cont 1072 // 1073 // - After 1074 // try 1075 // ... 1076 // br %cont (new) 1077 // catch 1078 // end 1079 // handler body 1080 // <-- cont 1081 MachineBasicBlock *EHPadLayoutPred = &*std::prev(EHPad->getIterator()); 1082 MachineBasicBlock *TBB = nullptr, *FBB = nullptr; 1083 SmallVector<MachineOperand, 4> Cond; 1084 bool Analyzable = !TII.analyzeBranch(*EHPadLayoutPred, TBB, FBB, Cond); 1085 if (Analyzable && !TBB && !FBB) { 1086 DebugLoc DL = EHPadLayoutPred->empty() 1087 ? DebugLoc() 1088 : EHPadLayoutPred->rbegin()->getDebugLoc(); 1089 BuildMI(EHPadLayoutPred, DL, TII.get(WebAssembly::BR)).addMBB(Cont); 1090 BrDests.push_back(Cont); 1091 } 1092 } 1093 1094 // For possibly throwing calls whose unwind destinations are currently 1095 // incorrect because of CFG linearization, we wrap them with a nested 1096 // try/catch/end_try, and within the new catch block, we branch to the correct 1097 // handler. 1098 // - Before 1099 // mbb: 1100 // call @foo <- Unwind destination mismatch! 1101 // ehpad: 1102 // ... 1103 // 1104 // - After 1105 // mbb: 1106 // try (new) 1107 // call @foo 1108 // nested-ehpad: (new) 1109 // catch (new) 1110 // local.set n / drop (new) 1111 // br %brdest (new) 1112 // nested-end: (new) 1113 // end_try (new) 1114 // ehpad: 1115 // ... 1116 for (auto &P : BrDestToTryRanges) { 1117 MachineBasicBlock *BrDest = P.first; 1118 auto &TryRanges = P.second; 1119 unsigned ExnReg = BrDestToExnReg[BrDest]; 1120 1121 for (auto Range : TryRanges) { 1122 MachineInstr *RangeBegin = nullptr, *RangeEnd = nullptr; 1123 std::tie(RangeBegin, RangeEnd) = Range; 1124 auto *MBB = RangeBegin->getParent(); 1125 // Store the first function call from this range, because RangeBegin can 1126 // be moved to point EH_LABEL before the call 1127 MachineInstr *RangeBeginCall = RangeBegin; 1128 1129 // Include possible EH_LABELs in the range 1130 if (RangeBegin->getIterator() != MBB->begin() && 1131 std::prev(RangeBegin->getIterator())->isEHLabel()) 1132 RangeBegin = &*std::prev(RangeBegin->getIterator()); 1133 if (std::next(RangeEnd->getIterator()) != MBB->end() && 1134 std::next(RangeEnd->getIterator())->isEHLabel()) 1135 RangeEnd = &*std::next(RangeEnd->getIterator()); 1136 1137 MachineBasicBlock *EHPad = nullptr; 1138 for (auto *Succ : MBB->successors()) { 1139 if (Succ->isEHPad()) { 1140 EHPad = Succ; 1141 break; 1142 } 1143 } 1144 1145 // Local expression tree before the first call of this range should go 1146 // after the nested TRY. 1147 SmallPtrSet<const MachineInstr *, 4> AfterSet; 1148 AfterSet.insert(RangeBegin); 1149 AfterSet.insert(RangeBeginCall); 1150 for (auto I = MachineBasicBlock::iterator(RangeBeginCall), 1151 E = MBB->begin(); 1152 I != E; --I) { 1153 if (std::prev(I)->isDebugInstr() || std::prev(I)->isPosition()) 1154 continue; 1155 if (WebAssembly::isChild(*std::prev(I), MFI)) 1156 AfterSet.insert(&*std::prev(I)); 1157 else 1158 break; 1159 } 1160 1161 // Create the nested try instruction. 1162 auto InsertPos = getLatestInsertPos( 1163 MBB, SmallPtrSet<const MachineInstr *, 4>(), AfterSet); 1164 MachineInstr *NestedTry = 1165 BuildMI(*MBB, InsertPos, RangeBegin->getDebugLoc(), 1166 TII.get(WebAssembly::TRY)) 1167 .addImm(int64_t(WebAssembly::BlockType::Void)); 1168 1169 // Create the nested EH pad and fill instructions in. 1170 MachineBasicBlock *NestedEHPad = MF.CreateMachineBasicBlock(); 1171 MF.insert(std::next(MBB->getIterator()), NestedEHPad); 1172 NestedEHPad->setIsEHPad(); 1173 NestedEHPad->setIsEHScopeEntry(); 1174 BuildMI(NestedEHPad, RangeEnd->getDebugLoc(), TII.get(WebAssembly::CATCH), 1175 ExnReg); 1176 BuildMI(NestedEHPad, RangeEnd->getDebugLoc(), TII.get(WebAssembly::BR)) 1177 .addMBB(BrDest); 1178 1179 // Create the nested continuation BB and end_try instruction. 1180 MachineBasicBlock *NestedCont = MF.CreateMachineBasicBlock(); 1181 MF.insert(std::next(NestedEHPad->getIterator()), NestedCont); 1182 MachineInstr *NestedEndTry = 1183 BuildMI(*NestedCont, NestedCont->begin(), RangeEnd->getDebugLoc(), 1184 TII.get(WebAssembly::END_TRY)); 1185 // In case MBB has more instructions after the try range, move them to the 1186 // new nested continuation BB. 1187 NestedCont->splice(NestedCont->end(), MBB, 1188 std::next(RangeEnd->getIterator()), MBB->end()); 1189 unstackifyVRegsUsedInSplitBB(*MBB, *NestedCont, MFI, MRI); 1190 registerTryScope(NestedTry, NestedEndTry, NestedEHPad); 1191 1192 // Fix predecessor-successor relationship. 1193 NestedCont->transferSuccessors(MBB); 1194 if (EHPad) { 1195 NestedCont->removeSuccessor(EHPad); 1196 // If EHPad does not have any predecessors left after removing 1197 // NextedCont predecessor, remove its successor too, because this EHPad 1198 // is not reachable from the entry BB anyway. We can't remove EHPad BB 1199 // itself because it can contain 'catch' or 'end', which are necessary 1200 // for keeping try-catch-end structure. 1201 if (EHPad->pred_empty()) 1202 EHPad->removeSuccessor(BrDest); 1203 } 1204 MBB->addSuccessor(NestedEHPad); 1205 MBB->addSuccessor(NestedCont); 1206 NestedEHPad->addSuccessor(BrDest); 1207 } 1208 } 1209 1210 // Renumber BBs and recalculate ScopeTop info because new BBs might have been 1211 // created and inserted above. 1212 MF.RenumberBlocks(); 1213 ScopeTops.clear(); 1214 ScopeTops.resize(MF.getNumBlockIDs()); 1215 for (auto &MBB : reverse(MF)) { 1216 for (auto &MI : reverse(MBB)) { 1217 if (ScopeTops[MBB.getNumber()]) 1218 break; 1219 switch (MI.getOpcode()) { 1220 case WebAssembly::END_BLOCK: 1221 case WebAssembly::END_LOOP: 1222 case WebAssembly::END_TRY: 1223 ScopeTops[MBB.getNumber()] = EndToBegin[&MI]->getParent(); 1224 break; 1225 case WebAssembly::CATCH: 1226 ScopeTops[MBB.getNumber()] = EHPadToTry[&MBB]->getParent(); 1227 break; 1228 } 1229 } 1230 } 1231 1232 // Recompute the dominator tree. 1233 getAnalysis<MachineDominatorTree>().runOnMachineFunction(MF); 1234 1235 // Place block markers for newly added branches, if necessary. 1236 1237 // If we've created an appendix BB and a branch to it, place a block/end_block 1238 // marker for that. For some new branches, those branch destination BBs start 1239 // with a hoisted end_try marker, so we don't need a new marker there. 1240 if (AppendixBB) 1241 BrDests.push_back(AppendixBB); 1242 1243 llvm::sort(BrDests, 1244 [&](const MachineBasicBlock *A, const MachineBasicBlock *B) { 1245 auto ANum = A->getNumber(); 1246 auto BNum = B->getNumber(); 1247 return ANum < BNum; 1248 }); 1249 for (auto *Dest : BrDests) 1250 placeBlockMarker(*Dest); 1251 1252 return true; 1253 } 1254 1255 static unsigned 1256 getDepth(const SmallVectorImpl<const MachineBasicBlock *> &Stack, 1257 const MachineBasicBlock *MBB) { 1258 unsigned Depth = 0; 1259 for (auto X : reverse(Stack)) { 1260 if (X == MBB) 1261 break; 1262 ++Depth; 1263 } 1264 assert(Depth < Stack.size() && "Branch destination should be in scope"); 1265 return Depth; 1266 } 1267 1268 /// In normal assembly languages, when the end of a function is unreachable, 1269 /// because the function ends in an infinite loop or a noreturn call or similar, 1270 /// it isn't necessary to worry about the function return type at the end of 1271 /// the function, because it's never reached. However, in WebAssembly, blocks 1272 /// that end at the function end need to have a return type signature that 1273 /// matches the function signature, even though it's unreachable. This function 1274 /// checks for such cases and fixes up the signatures. 1275 void WebAssemblyCFGStackify::fixEndsAtEndOfFunction(MachineFunction &MF) { 1276 const auto &MFI = *MF.getInfo<WebAssemblyFunctionInfo>(); 1277 1278 if (MFI.getResults().empty()) 1279 return; 1280 1281 // MCInstLower will add the proper types to multivalue signatures based on the 1282 // function return type 1283 WebAssembly::BlockType RetType = 1284 MFI.getResults().size() > 1 1285 ? WebAssembly::BlockType::Multivalue 1286 : WebAssembly::BlockType( 1287 WebAssembly::toValType(MFI.getResults().front())); 1288 1289 for (MachineBasicBlock &MBB : reverse(MF)) { 1290 for (MachineInstr &MI : reverse(MBB)) { 1291 if (MI.isPosition() || MI.isDebugInstr()) 1292 continue; 1293 switch (MI.getOpcode()) { 1294 case WebAssembly::END_BLOCK: 1295 case WebAssembly::END_LOOP: 1296 case WebAssembly::END_TRY: 1297 EndToBegin[&MI]->getOperand(0).setImm(int32_t(RetType)); 1298 continue; 1299 default: 1300 // Something other than an `end`. We're done. 1301 return; 1302 } 1303 } 1304 } 1305 } 1306 1307 // WebAssembly functions end with an end instruction, as if the function body 1308 // were a block. 1309 static void appendEndToFunction(MachineFunction &MF, 1310 const WebAssemblyInstrInfo &TII) { 1311 BuildMI(MF.back(), MF.back().end(), 1312 MF.back().findPrevDebugLoc(MF.back().end()), 1313 TII.get(WebAssembly::END_FUNCTION)); 1314 } 1315 1316 /// Insert LOOP/TRY/BLOCK markers at appropriate places. 1317 void WebAssemblyCFGStackify::placeMarkers(MachineFunction &MF) { 1318 // We allocate one more than the number of blocks in the function to 1319 // accommodate for the possible fake block we may insert at the end. 1320 ScopeTops.resize(MF.getNumBlockIDs() + 1); 1321 // Place the LOOP for MBB if MBB is the header of a loop. 1322 for (auto &MBB : MF) 1323 placeLoopMarker(MBB); 1324 1325 const MCAsmInfo *MCAI = MF.getTarget().getMCAsmInfo(); 1326 for (auto &MBB : MF) { 1327 if (MBB.isEHPad()) { 1328 // Place the TRY for MBB if MBB is the EH pad of an exception. 1329 if (MCAI->getExceptionHandlingType() == ExceptionHandling::Wasm && 1330 MF.getFunction().hasPersonalityFn()) 1331 placeTryMarker(MBB); 1332 } else { 1333 // Place the BLOCK for MBB if MBB is branched to from above. 1334 placeBlockMarker(MBB); 1335 } 1336 } 1337 // Fix mismatches in unwind destinations induced by linearizing the code. 1338 if (MCAI->getExceptionHandlingType() == ExceptionHandling::Wasm && 1339 MF.getFunction().hasPersonalityFn()) 1340 fixUnwindMismatches(MF); 1341 } 1342 1343 void WebAssemblyCFGStackify::rewriteDepthImmediates(MachineFunction &MF) { 1344 // Now rewrite references to basic blocks to be depth immediates. 1345 SmallVector<const MachineBasicBlock *, 8> Stack; 1346 for (auto &MBB : reverse(MF)) { 1347 for (auto I = MBB.rbegin(), E = MBB.rend(); I != E; ++I) { 1348 MachineInstr &MI = *I; 1349 switch (MI.getOpcode()) { 1350 case WebAssembly::BLOCK: 1351 case WebAssembly::TRY: 1352 assert(ScopeTops[Stack.back()->getNumber()]->getNumber() <= 1353 MBB.getNumber() && 1354 "Block/try marker should be balanced"); 1355 Stack.pop_back(); 1356 break; 1357 1358 case WebAssembly::LOOP: 1359 assert(Stack.back() == &MBB && "Loop top should be balanced"); 1360 Stack.pop_back(); 1361 break; 1362 1363 case WebAssembly::END_BLOCK: 1364 case WebAssembly::END_TRY: 1365 Stack.push_back(&MBB); 1366 break; 1367 1368 case WebAssembly::END_LOOP: 1369 Stack.push_back(EndToBegin[&MI]->getParent()); 1370 break; 1371 1372 default: 1373 if (MI.isTerminator()) { 1374 // Rewrite MBB operands to be depth immediates. 1375 SmallVector<MachineOperand, 4> Ops(MI.operands()); 1376 while (MI.getNumOperands() > 0) 1377 MI.RemoveOperand(MI.getNumOperands() - 1); 1378 for (auto MO : Ops) { 1379 if (MO.isMBB()) 1380 MO = MachineOperand::CreateImm(getDepth(Stack, MO.getMBB())); 1381 MI.addOperand(MF, MO); 1382 } 1383 } 1384 break; 1385 } 1386 } 1387 } 1388 assert(Stack.empty() && "Control flow should be balanced"); 1389 } 1390 1391 void WebAssemblyCFGStackify::releaseMemory() { 1392 ScopeTops.clear(); 1393 BeginToEnd.clear(); 1394 EndToBegin.clear(); 1395 TryToEHPad.clear(); 1396 EHPadToTry.clear(); 1397 AppendixBB = nullptr; 1398 } 1399 1400 bool WebAssemblyCFGStackify::runOnMachineFunction(MachineFunction &MF) { 1401 LLVM_DEBUG(dbgs() << "********** CFG Stackifying **********\n" 1402 "********** Function: " 1403 << MF.getName() << '\n'); 1404 const MCAsmInfo *MCAI = MF.getTarget().getMCAsmInfo(); 1405 1406 releaseMemory(); 1407 1408 // Liveness is not tracked for VALUE_STACK physreg. 1409 MF.getRegInfo().invalidateLiveness(); 1410 1411 // Place the BLOCK/LOOP/TRY markers to indicate the beginnings of scopes. 1412 placeMarkers(MF); 1413 1414 // Remove unnecessary instructions possibly introduced by try/end_trys. 1415 if (MCAI->getExceptionHandlingType() == ExceptionHandling::Wasm && 1416 MF.getFunction().hasPersonalityFn()) 1417 removeUnnecessaryInstrs(MF); 1418 1419 // Convert MBB operands in terminators to relative depth immediates. 1420 rewriteDepthImmediates(MF); 1421 1422 // Fix up block/loop/try signatures at the end of the function to conform to 1423 // WebAssembly's rules. 1424 fixEndsAtEndOfFunction(MF); 1425 1426 // Add an end instruction at the end of the function body. 1427 const auto &TII = *MF.getSubtarget<WebAssemblySubtarget>().getInstrInfo(); 1428 if (!MF.getSubtarget<WebAssemblySubtarget>() 1429 .getTargetTriple() 1430 .isOSBinFormatELF()) 1431 appendEndToFunction(MF, TII); 1432 1433 MF.getInfo<WebAssemblyFunctionInfo>()->setCFGStackified(); 1434 return true; 1435 } 1436