1 //===-- WebAssemblyCFGStackify.cpp - CFG Stackification -------------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 /// 9 /// \file 10 /// This file implements a CFG stacking pass. 11 /// 12 /// This pass inserts BLOCK, LOOP, and TRY markers to mark the start of scopes, 13 /// since scope boundaries serve as the labels for WebAssembly's control 14 /// transfers. 15 /// 16 /// This is sufficient to convert arbitrary CFGs into a form that works on 17 /// WebAssembly, provided that all loops are single-entry. 18 /// 19 /// In case we use exceptions, this pass also fixes mismatches in unwind 20 /// destinations created during transforming CFG into wasm structured format. 21 /// 22 //===----------------------------------------------------------------------===// 23 24 #include "WebAssembly.h" 25 #include "WebAssemblyExceptionInfo.h" 26 #include "WebAssemblyMachineFunctionInfo.h" 27 #include "WebAssemblySubtarget.h" 28 #include "WebAssemblyUtilities.h" 29 #include "llvm/ADT/Statistic.h" 30 #include "llvm/CodeGen/MachineDominators.h" 31 #include "llvm/CodeGen/MachineInstrBuilder.h" 32 #include "llvm/CodeGen/MachineLoopInfo.h" 33 #include "llvm/MC/MCAsmInfo.h" 34 #include "llvm/Target/TargetMachine.h" 35 using namespace llvm; 36 37 #define DEBUG_TYPE "wasm-cfg-stackify" 38 39 STATISTIC(NumUnwindMismatches, "Number of EH pad unwind mismatches found"); 40 41 namespace { 42 class WebAssemblyCFGStackify final : public MachineFunctionPass { 43 StringRef getPassName() const override { return "WebAssembly CFG Stackify"; } 44 45 void getAnalysisUsage(AnalysisUsage &AU) const override { 46 AU.addRequired<MachineDominatorTree>(); 47 AU.addRequired<MachineLoopInfo>(); 48 AU.addRequired<WebAssemblyExceptionInfo>(); 49 MachineFunctionPass::getAnalysisUsage(AU); 50 } 51 52 bool runOnMachineFunction(MachineFunction &MF) override; 53 54 // For each block whose label represents the end of a scope, record the block 55 // which holds the beginning of the scope. This will allow us to quickly skip 56 // over scoped regions when walking blocks. 57 SmallVector<MachineBasicBlock *, 8> ScopeTops; 58 59 // Placing markers. 60 void placeMarkers(MachineFunction &MF); 61 void placeBlockMarker(MachineBasicBlock &MBB); 62 void placeLoopMarker(MachineBasicBlock &MBB); 63 void placeTryMarker(MachineBasicBlock &MBB); 64 void removeUnnecessaryInstrs(MachineFunction &MF); 65 bool fixUnwindMismatches(MachineFunction &MF); 66 void rewriteDepthImmediates(MachineFunction &MF); 67 void fixEndsAtEndOfFunction(MachineFunction &MF); 68 69 // For each BLOCK|LOOP|TRY, the corresponding END_(BLOCK|LOOP|TRY). 70 DenseMap<const MachineInstr *, MachineInstr *> BeginToEnd; 71 // For each END_(BLOCK|LOOP|TRY), the corresponding BLOCK|LOOP|TRY. 72 DenseMap<const MachineInstr *, MachineInstr *> EndToBegin; 73 // <TRY marker, EH pad> map 74 DenseMap<const MachineInstr *, MachineBasicBlock *> TryToEHPad; 75 // <EH pad, TRY marker> map 76 DenseMap<const MachineBasicBlock *, MachineInstr *> EHPadToTry; 77 78 // There can be an appendix block at the end of each function, shared for: 79 // - creating a correct signature for fallthrough returns 80 // - target for rethrows that need to unwind to the caller, but are trapped 81 // inside another try/catch 82 MachineBasicBlock *AppendixBB = nullptr; 83 MachineBasicBlock *getAppendixBlock(MachineFunction &MF) { 84 if (!AppendixBB) { 85 AppendixBB = MF.CreateMachineBasicBlock(); 86 // Give it a fake predecessor so that AsmPrinter prints its label. 87 AppendixBB->addSuccessor(AppendixBB); 88 MF.push_back(AppendixBB); 89 } 90 return AppendixBB; 91 } 92 93 // Helper functions to register / unregister scope information created by 94 // marker instructions. 95 void registerScope(MachineInstr *Begin, MachineInstr *End); 96 void registerTryScope(MachineInstr *Begin, MachineInstr *End, 97 MachineBasicBlock *EHPad); 98 void unregisterScope(MachineInstr *Begin); 99 100 public: 101 static char ID; // Pass identification, replacement for typeid 102 WebAssemblyCFGStackify() : MachineFunctionPass(ID) {} 103 ~WebAssemblyCFGStackify() override { releaseMemory(); } 104 void releaseMemory() override; 105 }; 106 } // end anonymous namespace 107 108 char WebAssemblyCFGStackify::ID = 0; 109 INITIALIZE_PASS(WebAssemblyCFGStackify, DEBUG_TYPE, 110 "Insert BLOCK/LOOP/TRY markers for WebAssembly scopes", false, 111 false) 112 113 FunctionPass *llvm::createWebAssemblyCFGStackify() { 114 return new WebAssemblyCFGStackify(); 115 } 116 117 /// Test whether Pred has any terminators explicitly branching to MBB, as 118 /// opposed to falling through. Note that it's possible (eg. in unoptimized 119 /// code) for a branch instruction to both branch to a block and fallthrough 120 /// to it, so we check the actual branch operands to see if there are any 121 /// explicit mentions. 122 static bool explicitlyBranchesTo(MachineBasicBlock *Pred, 123 MachineBasicBlock *MBB) { 124 for (MachineInstr &MI : Pred->terminators()) 125 for (MachineOperand &MO : MI.explicit_operands()) 126 if (MO.isMBB() && MO.getMBB() == MBB) 127 return true; 128 return false; 129 } 130 131 // Returns an iterator to the earliest position possible within the MBB, 132 // satisfying the restrictions given by BeforeSet and AfterSet. BeforeSet 133 // contains instructions that should go before the marker, and AfterSet contains 134 // ones that should go after the marker. In this function, AfterSet is only 135 // used for sanity checking. 136 static MachineBasicBlock::iterator 137 getEarliestInsertPos(MachineBasicBlock *MBB, 138 const SmallPtrSet<const MachineInstr *, 4> &BeforeSet, 139 const SmallPtrSet<const MachineInstr *, 4> &AfterSet) { 140 auto InsertPos = MBB->end(); 141 while (InsertPos != MBB->begin()) { 142 if (BeforeSet.count(&*std::prev(InsertPos))) { 143 #ifndef NDEBUG 144 // Sanity check 145 for (auto Pos = InsertPos, E = MBB->begin(); Pos != E; --Pos) 146 assert(!AfterSet.count(&*std::prev(Pos))); 147 #endif 148 break; 149 } 150 --InsertPos; 151 } 152 return InsertPos; 153 } 154 155 // Returns an iterator to the latest position possible within the MBB, 156 // satisfying the restrictions given by BeforeSet and AfterSet. BeforeSet 157 // contains instructions that should go before the marker, and AfterSet contains 158 // ones that should go after the marker. In this function, BeforeSet is only 159 // used for sanity checking. 160 static MachineBasicBlock::iterator 161 getLatestInsertPos(MachineBasicBlock *MBB, 162 const SmallPtrSet<const MachineInstr *, 4> &BeforeSet, 163 const SmallPtrSet<const MachineInstr *, 4> &AfterSet) { 164 auto InsertPos = MBB->begin(); 165 while (InsertPos != MBB->end()) { 166 if (AfterSet.count(&*InsertPos)) { 167 #ifndef NDEBUG 168 // Sanity check 169 for (auto Pos = InsertPos, E = MBB->end(); Pos != E; ++Pos) 170 assert(!BeforeSet.count(&*Pos)); 171 #endif 172 break; 173 } 174 ++InsertPos; 175 } 176 return InsertPos; 177 } 178 179 void WebAssemblyCFGStackify::registerScope(MachineInstr *Begin, 180 MachineInstr *End) { 181 BeginToEnd[Begin] = End; 182 EndToBegin[End] = Begin; 183 } 184 185 void WebAssemblyCFGStackify::registerTryScope(MachineInstr *Begin, 186 MachineInstr *End, 187 MachineBasicBlock *EHPad) { 188 registerScope(Begin, End); 189 TryToEHPad[Begin] = EHPad; 190 EHPadToTry[EHPad] = Begin; 191 } 192 193 void WebAssemblyCFGStackify::unregisterScope(MachineInstr *Begin) { 194 assert(BeginToEnd.count(Begin)); 195 MachineInstr *End = BeginToEnd[Begin]; 196 assert(EndToBegin.count(End)); 197 BeginToEnd.erase(Begin); 198 EndToBegin.erase(End); 199 MachineBasicBlock *EHPad = TryToEHPad.lookup(Begin); 200 if (EHPad) { 201 assert(EHPadToTry.count(EHPad)); 202 TryToEHPad.erase(Begin); 203 EHPadToTry.erase(EHPad); 204 } 205 } 206 207 /// Insert a BLOCK marker for branches to MBB (if needed). 208 // TODO Consider a more generalized way of handling block (and also loop and 209 // try) signatures when we implement the multi-value proposal later. 210 void WebAssemblyCFGStackify::placeBlockMarker(MachineBasicBlock &MBB) { 211 assert(!MBB.isEHPad()); 212 MachineFunction &MF = *MBB.getParent(); 213 auto &MDT = getAnalysis<MachineDominatorTree>(); 214 const auto &TII = *MF.getSubtarget<WebAssemblySubtarget>().getInstrInfo(); 215 const auto &MFI = *MF.getInfo<WebAssemblyFunctionInfo>(); 216 217 // First compute the nearest common dominator of all forward non-fallthrough 218 // predecessors so that we minimize the time that the BLOCK is on the stack, 219 // which reduces overall stack height. 220 MachineBasicBlock *Header = nullptr; 221 bool IsBranchedTo = false; 222 bool IsBrOnExn = false; 223 MachineInstr *BrOnExn = nullptr; 224 int MBBNumber = MBB.getNumber(); 225 for (MachineBasicBlock *Pred : MBB.predecessors()) { 226 if (Pred->getNumber() < MBBNumber) { 227 Header = Header ? MDT.findNearestCommonDominator(Header, Pred) : Pred; 228 if (explicitlyBranchesTo(Pred, &MBB)) { 229 IsBranchedTo = true; 230 if (Pred->getFirstTerminator()->getOpcode() == WebAssembly::BR_ON_EXN) { 231 IsBrOnExn = true; 232 assert(!BrOnExn && "There should be only one br_on_exn per block"); 233 BrOnExn = &*Pred->getFirstTerminator(); 234 } 235 } 236 } 237 } 238 if (!Header) 239 return; 240 if (!IsBranchedTo) 241 return; 242 243 assert(&MBB != &MF.front() && "Header blocks shouldn't have predecessors"); 244 MachineBasicBlock *LayoutPred = MBB.getPrevNode(); 245 246 // If the nearest common dominator is inside a more deeply nested context, 247 // walk out to the nearest scope which isn't more deeply nested. 248 for (MachineFunction::iterator I(LayoutPred), E(Header); I != E; --I) { 249 if (MachineBasicBlock *ScopeTop = ScopeTops[I->getNumber()]) { 250 if (ScopeTop->getNumber() > Header->getNumber()) { 251 // Skip over an intervening scope. 252 I = std::next(ScopeTop->getIterator()); 253 } else { 254 // We found a scope level at an appropriate depth. 255 Header = ScopeTop; 256 break; 257 } 258 } 259 } 260 261 // Decide where in Header to put the BLOCK. 262 263 // Instructions that should go before the BLOCK. 264 SmallPtrSet<const MachineInstr *, 4> BeforeSet; 265 // Instructions that should go after the BLOCK. 266 SmallPtrSet<const MachineInstr *, 4> AfterSet; 267 for (const auto &MI : *Header) { 268 // If there is a previously placed LOOP marker and the bottom block of the 269 // loop is above MBB, it should be after the BLOCK, because the loop is 270 // nested in this BLOCK. Otherwise it should be before the BLOCK. 271 if (MI.getOpcode() == WebAssembly::LOOP) { 272 auto *LoopBottom = BeginToEnd[&MI]->getParent()->getPrevNode(); 273 if (MBB.getNumber() > LoopBottom->getNumber()) 274 AfterSet.insert(&MI); 275 #ifndef NDEBUG 276 else 277 BeforeSet.insert(&MI); 278 #endif 279 } 280 281 // If there is a previously placed BLOCK/TRY marker and its corresponding 282 // END marker is before the current BLOCK's END marker, that should be 283 // placed after this BLOCK. Otherwise it should be placed before this BLOCK 284 // marker. 285 if (MI.getOpcode() == WebAssembly::BLOCK || 286 MI.getOpcode() == WebAssembly::TRY) { 287 if (BeginToEnd[&MI]->getParent()->getNumber() <= MBB.getNumber()) 288 AfterSet.insert(&MI); 289 #ifndef NDEBUG 290 else 291 BeforeSet.insert(&MI); 292 #endif 293 } 294 295 #ifndef NDEBUG 296 // All END_(BLOCK|LOOP|TRY) markers should be before the BLOCK. 297 if (MI.getOpcode() == WebAssembly::END_BLOCK || 298 MI.getOpcode() == WebAssembly::END_LOOP || 299 MI.getOpcode() == WebAssembly::END_TRY) 300 BeforeSet.insert(&MI); 301 #endif 302 303 // Terminators should go after the BLOCK. 304 if (MI.isTerminator()) 305 AfterSet.insert(&MI); 306 } 307 308 // Local expression tree should go after the BLOCK. 309 for (auto I = Header->getFirstTerminator(), E = Header->begin(); I != E; 310 --I) { 311 if (std::prev(I)->isDebugInstr() || std::prev(I)->isPosition()) 312 continue; 313 if (WebAssembly::isChild(*std::prev(I), MFI)) 314 AfterSet.insert(&*std::prev(I)); 315 else 316 break; 317 } 318 319 // Add the BLOCK. 320 321 // 'br_on_exn' extracts exnref object and pushes variable number of values 322 // depending on its tag. For C++ exception, its a single i32 value, and the 323 // generated code will be in the form of: 324 // block i32 325 // br_on_exn 0, $__cpp_exception 326 // rethrow 327 // end_block 328 WebAssembly::BlockType ReturnType = WebAssembly::BlockType::Void; 329 if (IsBrOnExn) { 330 const char *TagName = BrOnExn->getOperand(1).getSymbolName(); 331 if (std::strcmp(TagName, "__cpp_exception") != 0) 332 llvm_unreachable("Only C++ exception is supported"); 333 ReturnType = WebAssembly::BlockType::I32; 334 } 335 336 auto InsertPos = getLatestInsertPos(Header, BeforeSet, AfterSet); 337 MachineInstr *Begin = 338 BuildMI(*Header, InsertPos, Header->findDebugLoc(InsertPos), 339 TII.get(WebAssembly::BLOCK)) 340 .addImm(int64_t(ReturnType)); 341 342 // Decide where in Header to put the END_BLOCK. 343 BeforeSet.clear(); 344 AfterSet.clear(); 345 for (auto &MI : MBB) { 346 #ifndef NDEBUG 347 // END_BLOCK should precede existing LOOP and TRY markers. 348 if (MI.getOpcode() == WebAssembly::LOOP || 349 MI.getOpcode() == WebAssembly::TRY) 350 AfterSet.insert(&MI); 351 #endif 352 353 // If there is a previously placed END_LOOP marker and the header of the 354 // loop is above this block's header, the END_LOOP should be placed after 355 // the BLOCK, because the loop contains this block. Otherwise the END_LOOP 356 // should be placed before the BLOCK. The same for END_TRY. 357 if (MI.getOpcode() == WebAssembly::END_LOOP || 358 MI.getOpcode() == WebAssembly::END_TRY) { 359 if (EndToBegin[&MI]->getParent()->getNumber() >= Header->getNumber()) 360 BeforeSet.insert(&MI); 361 #ifndef NDEBUG 362 else 363 AfterSet.insert(&MI); 364 #endif 365 } 366 } 367 368 // Mark the end of the block. 369 InsertPos = getEarliestInsertPos(&MBB, BeforeSet, AfterSet); 370 MachineInstr *End = BuildMI(MBB, InsertPos, MBB.findPrevDebugLoc(InsertPos), 371 TII.get(WebAssembly::END_BLOCK)); 372 registerScope(Begin, End); 373 374 // Track the farthest-spanning scope that ends at this point. 375 int Number = MBB.getNumber(); 376 if (!ScopeTops[Number] || 377 ScopeTops[Number]->getNumber() > Header->getNumber()) 378 ScopeTops[Number] = Header; 379 } 380 381 /// Insert a LOOP marker for a loop starting at MBB (if it's a loop header). 382 void WebAssemblyCFGStackify::placeLoopMarker(MachineBasicBlock &MBB) { 383 MachineFunction &MF = *MBB.getParent(); 384 const auto &MLI = getAnalysis<MachineLoopInfo>(); 385 const auto &TII = *MF.getSubtarget<WebAssemblySubtarget>().getInstrInfo(); 386 387 MachineLoop *Loop = MLI.getLoopFor(&MBB); 388 if (!Loop || Loop->getHeader() != &MBB) 389 return; 390 391 // The operand of a LOOP is the first block after the loop. If the loop is the 392 // bottom of the function, insert a dummy block at the end. 393 MachineBasicBlock *Bottom = WebAssembly::getBottom(Loop); 394 auto Iter = std::next(Bottom->getIterator()); 395 if (Iter == MF.end()) { 396 getAppendixBlock(MF); 397 Iter = std::next(Bottom->getIterator()); 398 } 399 MachineBasicBlock *AfterLoop = &*Iter; 400 401 // Decide where in Header to put the LOOP. 402 SmallPtrSet<const MachineInstr *, 4> BeforeSet; 403 SmallPtrSet<const MachineInstr *, 4> AfterSet; 404 for (const auto &MI : MBB) { 405 // LOOP marker should be after any existing loop that ends here. Otherwise 406 // we assume the instruction belongs to the loop. 407 if (MI.getOpcode() == WebAssembly::END_LOOP) 408 BeforeSet.insert(&MI); 409 #ifndef NDEBUG 410 else 411 AfterSet.insert(&MI); 412 #endif 413 } 414 415 // Mark the beginning of the loop. 416 auto InsertPos = getEarliestInsertPos(&MBB, BeforeSet, AfterSet); 417 MachineInstr *Begin = BuildMI(MBB, InsertPos, MBB.findDebugLoc(InsertPos), 418 TII.get(WebAssembly::LOOP)) 419 .addImm(int64_t(WebAssembly::BlockType::Void)); 420 421 // Decide where in Header to put the END_LOOP. 422 BeforeSet.clear(); 423 AfterSet.clear(); 424 #ifndef NDEBUG 425 for (const auto &MI : MBB) 426 // Existing END_LOOP markers belong to parent loops of this loop 427 if (MI.getOpcode() == WebAssembly::END_LOOP) 428 AfterSet.insert(&MI); 429 #endif 430 431 // Mark the end of the loop (using arbitrary debug location that branched to 432 // the loop end as its location). 433 InsertPos = getEarliestInsertPos(AfterLoop, BeforeSet, AfterSet); 434 DebugLoc EndDL = AfterLoop->pred_empty() 435 ? DebugLoc() 436 : (*AfterLoop->pred_rbegin())->findBranchDebugLoc(); 437 MachineInstr *End = 438 BuildMI(*AfterLoop, InsertPos, EndDL, TII.get(WebAssembly::END_LOOP)); 439 registerScope(Begin, End); 440 441 assert((!ScopeTops[AfterLoop->getNumber()] || 442 ScopeTops[AfterLoop->getNumber()]->getNumber() < MBB.getNumber()) && 443 "With block sorting the outermost loop for a block should be first."); 444 if (!ScopeTops[AfterLoop->getNumber()]) 445 ScopeTops[AfterLoop->getNumber()] = &MBB; 446 } 447 448 void WebAssemblyCFGStackify::placeTryMarker(MachineBasicBlock &MBB) { 449 assert(MBB.isEHPad()); 450 MachineFunction &MF = *MBB.getParent(); 451 auto &MDT = getAnalysis<MachineDominatorTree>(); 452 const auto &TII = *MF.getSubtarget<WebAssemblySubtarget>().getInstrInfo(); 453 const auto &WEI = getAnalysis<WebAssemblyExceptionInfo>(); 454 const auto &MFI = *MF.getInfo<WebAssemblyFunctionInfo>(); 455 456 // Compute the nearest common dominator of all unwind predecessors 457 MachineBasicBlock *Header = nullptr; 458 int MBBNumber = MBB.getNumber(); 459 for (auto *Pred : MBB.predecessors()) { 460 if (Pred->getNumber() < MBBNumber) { 461 Header = Header ? MDT.findNearestCommonDominator(Header, Pred) : Pred; 462 assert(!explicitlyBranchesTo(Pred, &MBB) && 463 "Explicit branch to an EH pad!"); 464 } 465 } 466 if (!Header) 467 return; 468 469 // If this try is at the bottom of the function, insert a dummy block at the 470 // end. 471 WebAssemblyException *WE = WEI.getExceptionFor(&MBB); 472 assert(WE); 473 MachineBasicBlock *Bottom = WebAssembly::getBottom(WE); 474 475 auto Iter = std::next(Bottom->getIterator()); 476 if (Iter == MF.end()) { 477 getAppendixBlock(MF); 478 Iter = std::next(Bottom->getIterator()); 479 } 480 MachineBasicBlock *Cont = &*Iter; 481 482 assert(Cont != &MF.front()); 483 MachineBasicBlock *LayoutPred = Cont->getPrevNode(); 484 485 // If the nearest common dominator is inside a more deeply nested context, 486 // walk out to the nearest scope which isn't more deeply nested. 487 for (MachineFunction::iterator I(LayoutPred), E(Header); I != E; --I) { 488 if (MachineBasicBlock *ScopeTop = ScopeTops[I->getNumber()]) { 489 if (ScopeTop->getNumber() > Header->getNumber()) { 490 // Skip over an intervening scope. 491 I = std::next(ScopeTop->getIterator()); 492 } else { 493 // We found a scope level at an appropriate depth. 494 Header = ScopeTop; 495 break; 496 } 497 } 498 } 499 500 // Decide where in Header to put the TRY. 501 502 // Instructions that should go before the TRY. 503 SmallPtrSet<const MachineInstr *, 4> BeforeSet; 504 // Instructions that should go after the TRY. 505 SmallPtrSet<const MachineInstr *, 4> AfterSet; 506 for (const auto &MI : *Header) { 507 // If there is a previously placed LOOP marker and the bottom block of the 508 // loop is above MBB, it should be after the TRY, because the loop is nested 509 // in this TRY. Otherwise it should be before the TRY. 510 if (MI.getOpcode() == WebAssembly::LOOP) { 511 auto *LoopBottom = BeginToEnd[&MI]->getParent()->getPrevNode(); 512 if (MBB.getNumber() > LoopBottom->getNumber()) 513 AfterSet.insert(&MI); 514 #ifndef NDEBUG 515 else 516 BeforeSet.insert(&MI); 517 #endif 518 } 519 520 // All previously inserted BLOCK/TRY markers should be after the TRY because 521 // they are all nested trys. 522 if (MI.getOpcode() == WebAssembly::BLOCK || 523 MI.getOpcode() == WebAssembly::TRY) 524 AfterSet.insert(&MI); 525 526 #ifndef NDEBUG 527 // All END_(BLOCK/LOOP/TRY) markers should be before the TRY. 528 if (MI.getOpcode() == WebAssembly::END_BLOCK || 529 MI.getOpcode() == WebAssembly::END_LOOP || 530 MI.getOpcode() == WebAssembly::END_TRY) 531 BeforeSet.insert(&MI); 532 #endif 533 534 // Terminators should go after the TRY. 535 if (MI.isTerminator()) 536 AfterSet.insert(&MI); 537 } 538 539 // If Header unwinds to MBB (= Header contains 'invoke'), the try block should 540 // contain the call within it. So the call should go after the TRY. The 541 // exception is when the header's terminator is a rethrow instruction, in 542 // which case that instruction, not a call instruction before it, is gonna 543 // throw. 544 MachineInstr *ThrowingCall = nullptr; 545 if (MBB.isPredecessor(Header)) { 546 auto TermPos = Header->getFirstTerminator(); 547 if (TermPos == Header->end() || 548 TermPos->getOpcode() != WebAssembly::RETHROW) { 549 for (auto &MI : reverse(*Header)) { 550 if (MI.isCall()) { 551 AfterSet.insert(&MI); 552 ThrowingCall = &MI; 553 // Possibly throwing calls are usually wrapped by EH_LABEL 554 // instructions. We don't want to split them and the call. 555 if (MI.getIterator() != Header->begin() && 556 std::prev(MI.getIterator())->isEHLabel()) { 557 AfterSet.insert(&*std::prev(MI.getIterator())); 558 ThrowingCall = &*std::prev(MI.getIterator()); 559 } 560 break; 561 } 562 } 563 } 564 } 565 566 // Local expression tree should go after the TRY. 567 // For BLOCK placement, we start the search from the previous instruction of a 568 // BB's terminator, but in TRY's case, we should start from the previous 569 // instruction of a call that can throw, or a EH_LABEL that precedes the call, 570 // because the return values of the call's previous instructions can be 571 // stackified and consumed by the throwing call. 572 auto SearchStartPt = ThrowingCall ? MachineBasicBlock::iterator(ThrowingCall) 573 : Header->getFirstTerminator(); 574 for (auto I = SearchStartPt, E = Header->begin(); I != E; --I) { 575 if (std::prev(I)->isDebugInstr() || std::prev(I)->isPosition()) 576 continue; 577 if (WebAssembly::isChild(*std::prev(I), MFI)) 578 AfterSet.insert(&*std::prev(I)); 579 else 580 break; 581 } 582 583 // Add the TRY. 584 auto InsertPos = getLatestInsertPos(Header, BeforeSet, AfterSet); 585 MachineInstr *Begin = 586 BuildMI(*Header, InsertPos, Header->findDebugLoc(InsertPos), 587 TII.get(WebAssembly::TRY)) 588 .addImm(int64_t(WebAssembly::BlockType::Void)); 589 590 // Decide where in Header to put the END_TRY. 591 BeforeSet.clear(); 592 AfterSet.clear(); 593 for (const auto &MI : *Cont) { 594 #ifndef NDEBUG 595 // END_TRY should precede existing LOOP and BLOCK markers. 596 if (MI.getOpcode() == WebAssembly::LOOP || 597 MI.getOpcode() == WebAssembly::BLOCK) 598 AfterSet.insert(&MI); 599 600 // All END_TRY markers placed earlier belong to exceptions that contains 601 // this one. 602 if (MI.getOpcode() == WebAssembly::END_TRY) 603 AfterSet.insert(&MI); 604 #endif 605 606 // If there is a previously placed END_LOOP marker and its header is after 607 // where TRY marker is, this loop is contained within the 'catch' part, so 608 // the END_TRY marker should go after that. Otherwise, the whole try-catch 609 // is contained within this loop, so the END_TRY should go before that. 610 if (MI.getOpcode() == WebAssembly::END_LOOP) { 611 // For a LOOP to be after TRY, LOOP's BB should be after TRY's BB; if they 612 // are in the same BB, LOOP is always before TRY. 613 if (EndToBegin[&MI]->getParent()->getNumber() > Header->getNumber()) 614 BeforeSet.insert(&MI); 615 #ifndef NDEBUG 616 else 617 AfterSet.insert(&MI); 618 #endif 619 } 620 621 // It is not possible for an END_BLOCK to be already in this block. 622 } 623 624 // Mark the end of the TRY. 625 InsertPos = getEarliestInsertPos(Cont, BeforeSet, AfterSet); 626 MachineInstr *End = 627 BuildMI(*Cont, InsertPos, Bottom->findBranchDebugLoc(), 628 TII.get(WebAssembly::END_TRY)); 629 registerTryScope(Begin, End, &MBB); 630 631 // Track the farthest-spanning scope that ends at this point. We create two 632 // mappings: (BB with 'end_try' -> BB with 'try') and (BB with 'catch' -> BB 633 // with 'try'). We need to create 'catch' -> 'try' mapping here too because 634 // markers should not span across 'catch'. For example, this should not 635 // happen: 636 // 637 // try 638 // block --| (X) 639 // catch | 640 // end_block --| 641 // end_try 642 for (int Number : {Cont->getNumber(), MBB.getNumber()}) { 643 if (!ScopeTops[Number] || 644 ScopeTops[Number]->getNumber() > Header->getNumber()) 645 ScopeTops[Number] = Header; 646 } 647 } 648 649 void WebAssemblyCFGStackify::removeUnnecessaryInstrs(MachineFunction &MF) { 650 const auto &TII = *MF.getSubtarget<WebAssemblySubtarget>().getInstrInfo(); 651 652 // When there is an unconditional branch right before a catch instruction and 653 // it branches to the end of end_try marker, we don't need the branch, because 654 // it there is no exception, the control flow transfers to that point anyway. 655 // bb0: 656 // try 657 // ... 658 // br bb2 <- Not necessary 659 // bb1: 660 // catch 661 // ... 662 // bb2: 663 // end 664 for (auto &MBB : MF) { 665 if (!MBB.isEHPad()) 666 continue; 667 668 MachineBasicBlock *TBB = nullptr, *FBB = nullptr; 669 SmallVector<MachineOperand, 4> Cond; 670 MachineBasicBlock *EHPadLayoutPred = MBB.getPrevNode(); 671 MachineBasicBlock *Cont = BeginToEnd[EHPadToTry[&MBB]]->getParent(); 672 bool Analyzable = !TII.analyzeBranch(*EHPadLayoutPred, TBB, FBB, Cond); 673 // This condition means either 674 // 1. This BB ends with a single unconditional branch whose destinaion is 675 // Cont. 676 // 2. This BB ends with a conditional branch followed by an unconditional 677 // branch, and the unconditional branch's destination is Cont. 678 // In both cases, we want to remove the last (= unconditional) branch. 679 if (Analyzable && ((Cond.empty() && TBB && TBB == Cont) || 680 (!Cond.empty() && FBB && FBB == Cont))) { 681 bool ErasedUncondBr = false; 682 (void)ErasedUncondBr; 683 for (auto I = EHPadLayoutPred->end(), E = EHPadLayoutPred->begin(); 684 I != E; --I) { 685 auto PrevI = std::prev(I); 686 if (PrevI->isTerminator()) { 687 assert(PrevI->getOpcode() == WebAssembly::BR); 688 PrevI->eraseFromParent(); 689 ErasedUncondBr = true; 690 break; 691 } 692 } 693 assert(ErasedUncondBr && "Unconditional branch not erased!"); 694 } 695 } 696 697 // When there are block / end_block markers that overlap with try / end_try 698 // markers, and the block and try markers' return types are the same, the 699 // block /end_block markers are not necessary, because try / end_try markers 700 // also can serve as boundaries for branches. 701 // block <- Not necessary 702 // try 703 // ... 704 // catch 705 // ... 706 // end 707 // end <- Not necessary 708 SmallVector<MachineInstr *, 32> ToDelete; 709 for (auto &MBB : MF) { 710 for (auto &MI : MBB) { 711 if (MI.getOpcode() != WebAssembly::TRY) 712 continue; 713 714 MachineInstr *Try = &MI, *EndTry = BeginToEnd[Try]; 715 MachineBasicBlock *TryBB = Try->getParent(); 716 MachineBasicBlock *Cont = EndTry->getParent(); 717 int64_t RetType = Try->getOperand(0).getImm(); 718 for (auto B = Try->getIterator(), E = std::next(EndTry->getIterator()); 719 B != TryBB->begin() && E != Cont->end() && 720 std::prev(B)->getOpcode() == WebAssembly::BLOCK && 721 E->getOpcode() == WebAssembly::END_BLOCK && 722 std::prev(B)->getOperand(0).getImm() == RetType; 723 --B, ++E) { 724 ToDelete.push_back(&*std::prev(B)); 725 ToDelete.push_back(&*E); 726 } 727 } 728 } 729 for (auto *MI : ToDelete) { 730 if (MI->getOpcode() == WebAssembly::BLOCK) 731 unregisterScope(MI); 732 MI->eraseFromParent(); 733 } 734 } 735 736 // When MBB is split into MBB and Split, we should unstackify defs in MBB that 737 // have their uses in Split. 738 static void unstackifyVRegsUsedInSplitBB(MachineBasicBlock &MBB, 739 MachineBasicBlock &Split, 740 WebAssemblyFunctionInfo &MFI, 741 MachineRegisterInfo &MRI) { 742 for (auto &MI : Split) { 743 for (auto &MO : MI.explicit_uses()) { 744 if (!MO.isReg() || Register::isPhysicalRegister(MO.getReg())) 745 continue; 746 if (MachineInstr *Def = MRI.getUniqueVRegDef(MO.getReg())) 747 if (Def->getParent() == &MBB) 748 MFI.unstackifyVReg(MO.getReg()); 749 } 750 } 751 } 752 753 bool WebAssemblyCFGStackify::fixUnwindMismatches(MachineFunction &MF) { 754 const auto &TII = *MF.getSubtarget<WebAssemblySubtarget>().getInstrInfo(); 755 auto &MFI = *MF.getInfo<WebAssemblyFunctionInfo>(); 756 MachineRegisterInfo &MRI = MF.getRegInfo(); 757 758 // Linearizing the control flow by placing TRY / END_TRY markers can create 759 // mismatches in unwind destinations. There are two kinds of mismatches we 760 // try to solve here. 761 762 // 1. When an instruction may throw, but the EH pad it will unwind to can be 763 // different from the original CFG. 764 // 765 // Example: we have the following CFG: 766 // bb0: 767 // call @foo (if it throws, unwind to bb2) 768 // bb1: 769 // call @bar (if it throws, unwind to bb3) 770 // bb2 (ehpad): 771 // catch 772 // ... 773 // bb3 (ehpad) 774 // catch 775 // handler body 776 // 777 // And the CFG is sorted in this order. Then after placing TRY markers, it 778 // will look like: (BB markers are omitted) 779 // try $label1 780 // try 781 // call @foo 782 // call @bar (if it throws, unwind to bb3) 783 // catch <- ehpad (bb2) 784 // ... 785 // end_try 786 // catch <- ehpad (bb3) 787 // handler body 788 // end_try 789 // 790 // Now if bar() throws, it is going to end up ip in bb2, not bb3, where it 791 // is supposed to end up. We solve this problem by 792 // a. Split the target unwind EH pad (here bb3) so that the handler body is 793 // right after 'end_try', which means we extract the handler body out of 794 // the catch block. We do this because this handler body should be 795 // somewhere branch-eable from the inner scope. 796 // b. Wrap the call that has an incorrect unwind destination ('call @bar' 797 // here) with a nested try/catch/end_try scope, and within the new catch 798 // block, branches to the handler body. 799 // c. Place a branch after the newly inserted nested end_try so it can bypass 800 // the handler body, which is now outside of a catch block. 801 // 802 // The result will like as follows. (new: a) means this instruction is newly 803 // created in the process of doing 'a' above. 804 // 805 // block $label0 (new: placeBlockMarker) 806 // try $label1 807 // try 808 // call @foo 809 // try (new: b) 810 // call @bar 811 // catch (new: b) 812 // local.set n / drop (new: b) 813 // br $label1 (new: b) 814 // end_try (new: b) 815 // catch <- ehpad (bb2) 816 // end_try 817 // br $label0 (new: c) 818 // catch <- ehpad (bb3) 819 // end_try (hoisted: a) 820 // handler body 821 // end_block (new: placeBlockMarker) 822 // 823 // Note that the new wrapping block/end_block will be generated later in 824 // placeBlockMarker. 825 // 826 // TODO Currently local.set and local.gets are generated to move exnref value 827 // created by catches. That's because we don't support yielding values from a 828 // block in LLVM machine IR yet, even though it is supported by wasm. Delete 829 // unnecessary local.get/local.sets once yielding values from a block is 830 // supported. The full EH spec requires multi-value support to do this, but 831 // for C++ we don't yet need it because we only throw a single i32. 832 // 833 // --- 834 // 2. The same as 1, but in this case an instruction unwinds to a caller 835 // function and not another EH pad. 836 // 837 // Example: we have the following CFG: 838 // bb0: 839 // call @foo (if it throws, unwind to bb2) 840 // bb1: 841 // call @bar (if it throws, unwind to caller) 842 // bb2 (ehpad): 843 // catch 844 // ... 845 // 846 // And the CFG is sorted in this order. Then after placing TRY markers, it 847 // will look like: 848 // try 849 // call @foo 850 // call @bar (if it throws, unwind to caller) 851 // catch <- ehpad (bb2) 852 // ... 853 // end_try 854 // 855 // Now if bar() throws, it is going to end up ip in bb2, when it is supposed 856 // throw up to the caller. 857 // We solve this problem by 858 // a. Create a new 'appendix' BB at the end of the function and put a single 859 // 'rethrow' instruction (+ local.get) in there. 860 // b. Wrap the call that has an incorrect unwind destination ('call @bar' 861 // here) with a nested try/catch/end_try scope, and within the new catch 862 // block, branches to the new appendix block. 863 // 864 // block $label0 (new: placeBlockMarker) 865 // try 866 // call @foo 867 // try (new: b) 868 // call @bar 869 // catch (new: b) 870 // local.set n (new: b) 871 // br $label0 (new: b) 872 // end_try (new: b) 873 // catch <- ehpad (bb2) 874 // ... 875 // end_try 876 // ... 877 // end_block (new: placeBlockMarker) 878 // local.get n (new: a) <- appendix block 879 // rethrow (new: a) 880 // 881 // In case there are multiple calls in a BB that may throw to the caller, they 882 // can be wrapped together in one nested try scope. (In 1, this couldn't 883 // happen, because may-throwing instruction there had an unwind destination, 884 // i.e., it was an invoke before, and there could be only one invoke within a 885 // BB.) 886 887 SmallVector<const MachineBasicBlock *, 8> EHPadStack; 888 // Range of intructions to be wrapped in a new nested try/catch 889 using TryRange = std::pair<MachineInstr *, MachineInstr *>; 890 // In original CFG, <unwind destination BB, a vector of try ranges> 891 DenseMap<MachineBasicBlock *, SmallVector<TryRange, 4>> UnwindDestToTryRanges; 892 // In new CFG, <destination to branch to, a vector of try ranges> 893 DenseMap<MachineBasicBlock *, SmallVector<TryRange, 4>> BrDestToTryRanges; 894 // In new CFG, <destination to branch to, register containing exnref> 895 DenseMap<MachineBasicBlock *, unsigned> BrDestToExnReg; 896 897 // Destinations for branches that will be newly added, for which a new 898 // BLOCK/END_BLOCK markers are necessary. 899 SmallVector<MachineBasicBlock *, 8> BrDests; 900 901 // Gather possibly throwing calls (i.e., previously invokes) whose current 902 // unwind destination is not the same as the original CFG. 903 for (auto &MBB : reverse(MF)) { 904 bool SeenThrowableInstInBB = false; 905 for (auto &MI : reverse(MBB)) { 906 if (MI.getOpcode() == WebAssembly::TRY) 907 EHPadStack.pop_back(); 908 else if (MI.getOpcode() == WebAssembly::CATCH) 909 EHPadStack.push_back(MI.getParent()); 910 911 // In this loop we only gather calls that have an EH pad to unwind. So 912 // there will be at most 1 such call (= invoke) in a BB, so after we've 913 // seen one, we can skip the rest of BB. Also if MBB has no EH pad 914 // successor or MI does not throw, this is not an invoke. 915 if (SeenThrowableInstInBB || !MBB.hasEHPadSuccessor() || 916 !WebAssembly::mayThrow(MI)) 917 continue; 918 SeenThrowableInstInBB = true; 919 920 // If the EH pad on the stack top is where this instruction should unwind 921 // next, we're good. 922 MachineBasicBlock *UnwindDest = nullptr; 923 for (auto *Succ : MBB.successors()) { 924 if (Succ->isEHPad()) { 925 UnwindDest = Succ; 926 break; 927 } 928 } 929 if (EHPadStack.back() == UnwindDest) 930 continue; 931 932 // If not, record the range. 933 UnwindDestToTryRanges[UnwindDest].push_back(TryRange(&MI, &MI)); 934 } 935 } 936 937 assert(EHPadStack.empty()); 938 939 // Gather possibly throwing calls that are supposed to unwind up to the caller 940 // if they throw, but currently unwind to an incorrect destination. Unlike the 941 // loop above, there can be multiple calls within a BB that unwind to the 942 // caller, which we should group together in a range. 943 bool NeedAppendixBlock = false; 944 for (auto &MBB : reverse(MF)) { 945 MachineInstr *RangeBegin = nullptr, *RangeEnd = nullptr; // inclusive 946 for (auto &MI : reverse(MBB)) { 947 if (MI.getOpcode() == WebAssembly::TRY) 948 EHPadStack.pop_back(); 949 else if (MI.getOpcode() == WebAssembly::CATCH) 950 EHPadStack.push_back(MI.getParent()); 951 952 // If MBB has an EH pad successor, this inst does not unwind to caller. 953 if (MBB.hasEHPadSuccessor()) 954 continue; 955 956 // We wrap up the current range when we see a marker even if we haven't 957 // finished a BB. 958 if (RangeEnd && WebAssembly::isMarker(MI.getOpcode())) { 959 NeedAppendixBlock = true; 960 // Record the range. nullptr here means the unwind destination is the 961 // caller. 962 UnwindDestToTryRanges[nullptr].push_back( 963 TryRange(RangeBegin, RangeEnd)); 964 RangeBegin = RangeEnd = nullptr; // Reset range pointers 965 } 966 967 // If EHPadStack is empty, that means it is correctly unwind to caller if 968 // it throws, so we're good. If MI does not throw, we're good too. 969 if (EHPadStack.empty() || !WebAssembly::mayThrow(MI)) 970 continue; 971 972 // We found an instruction that unwinds to the caller but currently has an 973 // incorrect unwind destination. Create a new range or increment the 974 // currently existing range. 975 if (!RangeEnd) 976 RangeBegin = RangeEnd = &MI; 977 else 978 RangeBegin = &MI; 979 } 980 981 if (RangeEnd) { 982 NeedAppendixBlock = true; 983 // Record the range. nullptr here means the unwind destination is the 984 // caller. 985 UnwindDestToTryRanges[nullptr].push_back(TryRange(RangeBegin, RangeEnd)); 986 RangeBegin = RangeEnd = nullptr; // Reset range pointers 987 } 988 } 989 990 assert(EHPadStack.empty()); 991 // We don't have any unwind destination mismatches to resolve. 992 if (UnwindDestToTryRanges.empty()) 993 return false; 994 995 // If we found instructions that should unwind to the caller but currently 996 // have incorrect unwind destination, we create an appendix block at the end 997 // of the function with a local.get and a rethrow instruction. 998 if (NeedAppendixBlock) { 999 auto *AppendixBB = getAppendixBlock(MF); 1000 Register ExnReg = MRI.createVirtualRegister(&WebAssembly::EXNREFRegClass); 1001 BuildMI(AppendixBB, DebugLoc(), TII.get(WebAssembly::RETHROW)) 1002 .addReg(ExnReg); 1003 // These instruction ranges should branch to this appendix BB. 1004 for (auto Range : UnwindDestToTryRanges[nullptr]) 1005 BrDestToTryRanges[AppendixBB].push_back(Range); 1006 BrDestToExnReg[AppendixBB] = ExnReg; 1007 } 1008 1009 // We loop through unwind destination EH pads that are targeted from some 1010 // inner scopes. Because these EH pads are destination of more than one scope 1011 // now, we split them so that the handler body is after 'end_try'. 1012 // - Before 1013 // ehpad: 1014 // catch 1015 // local.set n / drop 1016 // handler body 1017 // ... 1018 // cont: 1019 // end_try 1020 // 1021 // - After 1022 // ehpad: 1023 // catch 1024 // local.set n / drop 1025 // brdest: (new) 1026 // end_try (hoisted from 'cont' BB) 1027 // handler body (taken from 'ehpad') 1028 // ... 1029 // cont: 1030 for (auto &P : UnwindDestToTryRanges) { 1031 NumUnwindMismatches += P.second.size(); 1032 1033 // This means the destination is the appendix BB, which was separately 1034 // handled above. 1035 if (!P.first) 1036 continue; 1037 1038 MachineBasicBlock *EHPad = P.first; 1039 1040 // Find 'catch' and 'local.set' or 'drop' instruction that follows the 1041 // 'catch'. If -wasm-disable-explicit-locals is not set, 'catch' should be 1042 // always followed by either 'local.set' or a 'drop', because 'br_on_exn' is 1043 // generated after 'catch' in LateEHPrepare and we don't support blocks 1044 // taking values yet. 1045 MachineInstr *Catch = nullptr; 1046 unsigned ExnReg = 0; 1047 for (auto &MI : *EHPad) { 1048 switch (MI.getOpcode()) { 1049 case WebAssembly::CATCH: 1050 Catch = &MI; 1051 ExnReg = Catch->getOperand(0).getReg(); 1052 break; 1053 } 1054 } 1055 assert(Catch && "EH pad does not have a catch"); 1056 assert(ExnReg != 0 && "Invalid register"); 1057 1058 auto SplitPos = std::next(Catch->getIterator()); 1059 1060 // Create a new BB that's gonna be the destination for branches from the 1061 // inner mismatched scope. 1062 MachineInstr *BeginTry = EHPadToTry[EHPad]; 1063 MachineInstr *EndTry = BeginToEnd[BeginTry]; 1064 MachineBasicBlock *Cont = EndTry->getParent(); 1065 auto *BrDest = MF.CreateMachineBasicBlock(); 1066 MF.insert(std::next(EHPad->getIterator()), BrDest); 1067 // Hoist up the existing 'end_try'. 1068 BrDest->insert(BrDest->end(), EndTry->removeFromParent()); 1069 // Take out the handler body from EH pad to the new branch destination BB. 1070 BrDest->splice(BrDest->end(), EHPad, SplitPos, EHPad->end()); 1071 unstackifyVRegsUsedInSplitBB(*EHPad, *BrDest, MFI, MRI); 1072 // Fix predecessor-successor relationship. 1073 BrDest->transferSuccessors(EHPad); 1074 EHPad->addSuccessor(BrDest); 1075 1076 // All try ranges that were supposed to unwind to this EH pad now have to 1077 // branch to this new branch dest BB. 1078 for (auto Range : UnwindDestToTryRanges[EHPad]) 1079 BrDestToTryRanges[BrDest].push_back(Range); 1080 BrDestToExnReg[BrDest] = ExnReg; 1081 1082 // In case we fall through to the continuation BB after the catch block, we 1083 // now have to add a branch to it. 1084 // - Before 1085 // try 1086 // ... 1087 // (falls through to 'cont') 1088 // catch 1089 // handler body 1090 // end 1091 // <-- cont 1092 // 1093 // - After 1094 // try 1095 // ... 1096 // br %cont (new) 1097 // catch 1098 // end 1099 // handler body 1100 // <-- cont 1101 MachineBasicBlock *EHPadLayoutPred = &*std::prev(EHPad->getIterator()); 1102 MachineBasicBlock *TBB = nullptr, *FBB = nullptr; 1103 SmallVector<MachineOperand, 4> Cond; 1104 bool Analyzable = !TII.analyzeBranch(*EHPadLayoutPred, TBB, FBB, Cond); 1105 if (Analyzable && !TBB && !FBB) { 1106 DebugLoc DL = EHPadLayoutPred->empty() 1107 ? DebugLoc() 1108 : EHPadLayoutPred->rbegin()->getDebugLoc(); 1109 BuildMI(EHPadLayoutPred, DL, TII.get(WebAssembly::BR)).addMBB(Cont); 1110 BrDests.push_back(Cont); 1111 } 1112 } 1113 1114 // For possibly throwing calls whose unwind destinations are currently 1115 // incorrect because of CFG linearization, we wrap them with a nested 1116 // try/catch/end_try, and within the new catch block, we branch to the correct 1117 // handler. 1118 // - Before 1119 // mbb: 1120 // call @foo <- Unwind destination mismatch! 1121 // ehpad: 1122 // ... 1123 // 1124 // - After 1125 // mbb: 1126 // try (new) 1127 // call @foo 1128 // nested-ehpad: (new) 1129 // catch (new) 1130 // local.set n / drop (new) 1131 // br %brdest (new) 1132 // nested-end: (new) 1133 // end_try (new) 1134 // ehpad: 1135 // ... 1136 for (auto &P : BrDestToTryRanges) { 1137 MachineBasicBlock *BrDest = P.first; 1138 auto &TryRanges = P.second; 1139 unsigned ExnReg = BrDestToExnReg[BrDest]; 1140 1141 for (auto Range : TryRanges) { 1142 MachineInstr *RangeBegin = nullptr, *RangeEnd = nullptr; 1143 std::tie(RangeBegin, RangeEnd) = Range; 1144 auto *MBB = RangeBegin->getParent(); 1145 // Store the first function call from this range, because RangeBegin can 1146 // be moved to point EH_LABEL before the call 1147 MachineInstr *RangeBeginCall = RangeBegin; 1148 1149 // Include possible EH_LABELs in the range 1150 if (RangeBegin->getIterator() != MBB->begin() && 1151 std::prev(RangeBegin->getIterator())->isEHLabel()) 1152 RangeBegin = &*std::prev(RangeBegin->getIterator()); 1153 if (std::next(RangeEnd->getIterator()) != MBB->end() && 1154 std::next(RangeEnd->getIterator())->isEHLabel()) 1155 RangeEnd = &*std::next(RangeEnd->getIterator()); 1156 1157 MachineBasicBlock *EHPad = nullptr; 1158 for (auto *Succ : MBB->successors()) { 1159 if (Succ->isEHPad()) { 1160 EHPad = Succ; 1161 break; 1162 } 1163 } 1164 1165 // Local expression tree before the first call of this range should go 1166 // after the nested TRY. 1167 SmallPtrSet<const MachineInstr *, 4> AfterSet; 1168 AfterSet.insert(RangeBegin); 1169 AfterSet.insert(RangeBeginCall); 1170 for (auto I = MachineBasicBlock::iterator(RangeBeginCall), 1171 E = MBB->begin(); 1172 I != E; --I) { 1173 if (std::prev(I)->isDebugInstr() || std::prev(I)->isPosition()) 1174 continue; 1175 if (WebAssembly::isChild(*std::prev(I), MFI)) 1176 AfterSet.insert(&*std::prev(I)); 1177 else 1178 break; 1179 } 1180 1181 // Create the nested try instruction. 1182 auto InsertPos = getLatestInsertPos( 1183 MBB, SmallPtrSet<const MachineInstr *, 4>(), AfterSet); 1184 MachineInstr *NestedTry = 1185 BuildMI(*MBB, InsertPos, RangeBegin->getDebugLoc(), 1186 TII.get(WebAssembly::TRY)) 1187 .addImm(int64_t(WebAssembly::BlockType::Void)); 1188 1189 // Create the nested EH pad and fill instructions in. 1190 MachineBasicBlock *NestedEHPad = MF.CreateMachineBasicBlock(); 1191 MF.insert(std::next(MBB->getIterator()), NestedEHPad); 1192 NestedEHPad->setIsEHPad(); 1193 NestedEHPad->setIsEHScopeEntry(); 1194 BuildMI(NestedEHPad, RangeEnd->getDebugLoc(), TII.get(WebAssembly::CATCH), 1195 ExnReg); 1196 BuildMI(NestedEHPad, RangeEnd->getDebugLoc(), TII.get(WebAssembly::BR)) 1197 .addMBB(BrDest); 1198 1199 // Create the nested continuation BB and end_try instruction. 1200 MachineBasicBlock *NestedCont = MF.CreateMachineBasicBlock(); 1201 MF.insert(std::next(NestedEHPad->getIterator()), NestedCont); 1202 MachineInstr *NestedEndTry = 1203 BuildMI(*NestedCont, NestedCont->begin(), RangeEnd->getDebugLoc(), 1204 TII.get(WebAssembly::END_TRY)); 1205 // In case MBB has more instructions after the try range, move them to the 1206 // new nested continuation BB. 1207 NestedCont->splice(NestedCont->end(), MBB, 1208 std::next(RangeEnd->getIterator()), MBB->end()); 1209 unstackifyVRegsUsedInSplitBB(*MBB, *NestedCont, MFI, MRI); 1210 registerTryScope(NestedTry, NestedEndTry, NestedEHPad); 1211 1212 // Fix predecessor-successor relationship. 1213 NestedCont->transferSuccessors(MBB); 1214 if (EHPad) { 1215 NestedCont->removeSuccessor(EHPad); 1216 // If EHPad does not have any predecessors left after removing 1217 // NextedCont predecessor, remove its successor too, because this EHPad 1218 // is not reachable from the entry BB anyway. We can't remove EHPad BB 1219 // itself because it can contain 'catch' or 'end', which are necessary 1220 // for keeping try-catch-end structure. 1221 if (EHPad->pred_empty()) 1222 EHPad->removeSuccessor(BrDest); 1223 } 1224 MBB->addSuccessor(NestedEHPad); 1225 MBB->addSuccessor(NestedCont); 1226 NestedEHPad->addSuccessor(BrDest); 1227 } 1228 } 1229 1230 // Renumber BBs and recalculate ScopeTop info because new BBs might have been 1231 // created and inserted above. 1232 MF.RenumberBlocks(); 1233 ScopeTops.clear(); 1234 ScopeTops.resize(MF.getNumBlockIDs()); 1235 for (auto &MBB : reverse(MF)) { 1236 for (auto &MI : reverse(MBB)) { 1237 if (ScopeTops[MBB.getNumber()]) 1238 break; 1239 switch (MI.getOpcode()) { 1240 case WebAssembly::END_BLOCK: 1241 case WebAssembly::END_LOOP: 1242 case WebAssembly::END_TRY: 1243 ScopeTops[MBB.getNumber()] = EndToBegin[&MI]->getParent(); 1244 break; 1245 case WebAssembly::CATCH: 1246 ScopeTops[MBB.getNumber()] = EHPadToTry[&MBB]->getParent(); 1247 break; 1248 } 1249 } 1250 } 1251 1252 // Recompute the dominator tree. 1253 getAnalysis<MachineDominatorTree>().runOnMachineFunction(MF); 1254 1255 // Place block markers for newly added branches, if necessary. 1256 1257 // If we've created an appendix BB and a branch to it, place a block/end_block 1258 // marker for that. For some new branches, those branch destination BBs start 1259 // with a hoisted end_try marker, so we don't need a new marker there. 1260 if (AppendixBB) 1261 BrDests.push_back(AppendixBB); 1262 1263 llvm::sort(BrDests, 1264 [&](const MachineBasicBlock *A, const MachineBasicBlock *B) { 1265 auto ANum = A->getNumber(); 1266 auto BNum = B->getNumber(); 1267 return ANum < BNum; 1268 }); 1269 for (auto *Dest : BrDests) 1270 placeBlockMarker(*Dest); 1271 1272 return true; 1273 } 1274 1275 static unsigned 1276 getDepth(const SmallVectorImpl<const MachineBasicBlock *> &Stack, 1277 const MachineBasicBlock *MBB) { 1278 unsigned Depth = 0; 1279 for (auto X : reverse(Stack)) { 1280 if (X == MBB) 1281 break; 1282 ++Depth; 1283 } 1284 assert(Depth < Stack.size() && "Branch destination should be in scope"); 1285 return Depth; 1286 } 1287 1288 /// In normal assembly languages, when the end of a function is unreachable, 1289 /// because the function ends in an infinite loop or a noreturn call or similar, 1290 /// it isn't necessary to worry about the function return type at the end of 1291 /// the function, because it's never reached. However, in WebAssembly, blocks 1292 /// that end at the function end need to have a return type signature that 1293 /// matches the function signature, even though it's unreachable. This function 1294 /// checks for such cases and fixes up the signatures. 1295 void WebAssemblyCFGStackify::fixEndsAtEndOfFunction(MachineFunction &MF) { 1296 const auto &MFI = *MF.getInfo<WebAssemblyFunctionInfo>(); 1297 1298 if (MFI.getResults().empty()) 1299 return; 1300 1301 // MCInstLower will add the proper types to multivalue signatures based on the 1302 // function return type 1303 WebAssembly::BlockType RetType = 1304 MFI.getResults().size() > 1 1305 ? WebAssembly::BlockType::Multivalue 1306 : WebAssembly::BlockType( 1307 WebAssembly::toValType(MFI.getResults().front())); 1308 1309 for (MachineBasicBlock &MBB : reverse(MF)) { 1310 for (MachineInstr &MI : reverse(MBB)) { 1311 if (MI.isPosition() || MI.isDebugInstr()) 1312 continue; 1313 switch (MI.getOpcode()) { 1314 case WebAssembly::END_BLOCK: 1315 case WebAssembly::END_LOOP: 1316 case WebAssembly::END_TRY: 1317 EndToBegin[&MI]->getOperand(0).setImm(int32_t(RetType)); 1318 continue; 1319 default: 1320 // Something other than an `end`. We're done. 1321 return; 1322 } 1323 } 1324 } 1325 } 1326 1327 // WebAssembly functions end with an end instruction, as if the function body 1328 // were a block. 1329 static void appendEndToFunction(MachineFunction &MF, 1330 const WebAssemblyInstrInfo &TII) { 1331 BuildMI(MF.back(), MF.back().end(), 1332 MF.back().findPrevDebugLoc(MF.back().end()), 1333 TII.get(WebAssembly::END_FUNCTION)); 1334 } 1335 1336 /// Insert LOOP/TRY/BLOCK markers at appropriate places. 1337 void WebAssemblyCFGStackify::placeMarkers(MachineFunction &MF) { 1338 // We allocate one more than the number of blocks in the function to 1339 // accommodate for the possible fake block we may insert at the end. 1340 ScopeTops.resize(MF.getNumBlockIDs() + 1); 1341 // Place the LOOP for MBB if MBB is the header of a loop. 1342 for (auto &MBB : MF) 1343 placeLoopMarker(MBB); 1344 1345 const MCAsmInfo *MCAI = MF.getTarget().getMCAsmInfo(); 1346 for (auto &MBB : MF) { 1347 if (MBB.isEHPad()) { 1348 // Place the TRY for MBB if MBB is the EH pad of an exception. 1349 if (MCAI->getExceptionHandlingType() == ExceptionHandling::Wasm && 1350 MF.getFunction().hasPersonalityFn()) 1351 placeTryMarker(MBB); 1352 } else { 1353 // Place the BLOCK for MBB if MBB is branched to from above. 1354 placeBlockMarker(MBB); 1355 } 1356 } 1357 // Fix mismatches in unwind destinations induced by linearizing the code. 1358 if (MCAI->getExceptionHandlingType() == ExceptionHandling::Wasm && 1359 MF.getFunction().hasPersonalityFn()) 1360 fixUnwindMismatches(MF); 1361 } 1362 1363 void WebAssemblyCFGStackify::rewriteDepthImmediates(MachineFunction &MF) { 1364 // Now rewrite references to basic blocks to be depth immediates. 1365 SmallVector<const MachineBasicBlock *, 8> Stack; 1366 for (auto &MBB : reverse(MF)) { 1367 for (auto I = MBB.rbegin(), E = MBB.rend(); I != E; ++I) { 1368 MachineInstr &MI = *I; 1369 switch (MI.getOpcode()) { 1370 case WebAssembly::BLOCK: 1371 case WebAssembly::TRY: 1372 assert(ScopeTops[Stack.back()->getNumber()]->getNumber() <= 1373 MBB.getNumber() && 1374 "Block/try marker should be balanced"); 1375 Stack.pop_back(); 1376 break; 1377 1378 case WebAssembly::LOOP: 1379 assert(Stack.back() == &MBB && "Loop top should be balanced"); 1380 Stack.pop_back(); 1381 break; 1382 1383 case WebAssembly::END_BLOCK: 1384 case WebAssembly::END_TRY: 1385 Stack.push_back(&MBB); 1386 break; 1387 1388 case WebAssembly::END_LOOP: 1389 Stack.push_back(EndToBegin[&MI]->getParent()); 1390 break; 1391 1392 default: 1393 if (MI.isTerminator()) { 1394 // Rewrite MBB operands to be depth immediates. 1395 SmallVector<MachineOperand, 4> Ops(MI.operands()); 1396 while (MI.getNumOperands() > 0) 1397 MI.RemoveOperand(MI.getNumOperands() - 1); 1398 for (auto MO : Ops) { 1399 if (MO.isMBB()) 1400 MO = MachineOperand::CreateImm(getDepth(Stack, MO.getMBB())); 1401 MI.addOperand(MF, MO); 1402 } 1403 } 1404 break; 1405 } 1406 } 1407 } 1408 assert(Stack.empty() && "Control flow should be balanced"); 1409 } 1410 1411 void WebAssemblyCFGStackify::releaseMemory() { 1412 ScopeTops.clear(); 1413 BeginToEnd.clear(); 1414 EndToBegin.clear(); 1415 TryToEHPad.clear(); 1416 EHPadToTry.clear(); 1417 AppendixBB = nullptr; 1418 } 1419 1420 bool WebAssemblyCFGStackify::runOnMachineFunction(MachineFunction &MF) { 1421 LLVM_DEBUG(dbgs() << "********** CFG Stackifying **********\n" 1422 "********** Function: " 1423 << MF.getName() << '\n'); 1424 const MCAsmInfo *MCAI = MF.getTarget().getMCAsmInfo(); 1425 1426 releaseMemory(); 1427 1428 // Liveness is not tracked for VALUE_STACK physreg. 1429 MF.getRegInfo().invalidateLiveness(); 1430 1431 // Place the BLOCK/LOOP/TRY markers to indicate the beginnings of scopes. 1432 placeMarkers(MF); 1433 1434 // Remove unnecessary instructions possibly introduced by try/end_trys. 1435 if (MCAI->getExceptionHandlingType() == ExceptionHandling::Wasm && 1436 MF.getFunction().hasPersonalityFn()) 1437 removeUnnecessaryInstrs(MF); 1438 1439 // Convert MBB operands in terminators to relative depth immediates. 1440 rewriteDepthImmediates(MF); 1441 1442 // Fix up block/loop/try signatures at the end of the function to conform to 1443 // WebAssembly's rules. 1444 fixEndsAtEndOfFunction(MF); 1445 1446 // Add an end instruction at the end of the function body. 1447 const auto &TII = *MF.getSubtarget<WebAssemblySubtarget>().getInstrInfo(); 1448 if (!MF.getSubtarget<WebAssemblySubtarget>() 1449 .getTargetTriple() 1450 .isOSBinFormatELF()) 1451 appendEndToFunction(MF, TII); 1452 1453 MF.getInfo<WebAssemblyFunctionInfo>()->setCFGStackified(); 1454 return true; 1455 } 1456