1 //===-- WebAssemblyCFGStackify.cpp - CFG Stackification -------------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 /// 9 /// \file 10 /// This file implements a CFG stacking pass. 11 /// 12 /// This pass inserts BLOCK, LOOP, and TRY markers to mark the start of scopes, 13 /// since scope boundaries serve as the labels for WebAssembly's control 14 /// transfers. 15 /// 16 /// This is sufficient to convert arbitrary CFGs into a form that works on 17 /// WebAssembly, provided that all loops are single-entry. 18 /// 19 /// In case we use exceptions, this pass also fixes mismatches in unwind 20 /// destinations created during transforming CFG into wasm structured format. 21 /// 22 //===----------------------------------------------------------------------===// 23 24 #include "WebAssembly.h" 25 #include "WebAssemblyExceptionInfo.h" 26 #include "WebAssemblyMachineFunctionInfo.h" 27 #include "WebAssemblySubtarget.h" 28 #include "WebAssemblyUtilities.h" 29 #include "llvm/ADT/Statistic.h" 30 #include "llvm/CodeGen/MachineDominators.h" 31 #include "llvm/CodeGen/MachineInstrBuilder.h" 32 #include "llvm/CodeGen/MachineLoopInfo.h" 33 #include "llvm/MC/MCAsmInfo.h" 34 #include "llvm/Target/TargetMachine.h" 35 using namespace llvm; 36 37 #define DEBUG_TYPE "wasm-cfg-stackify" 38 39 STATISTIC(NumUnwindMismatches, "Number of EH pad unwind mismatches found"); 40 41 namespace { 42 class WebAssemblyCFGStackify final : public MachineFunctionPass { 43 StringRef getPassName() const override { return "WebAssembly CFG Stackify"; } 44 45 void getAnalysisUsage(AnalysisUsage &AU) const override { 46 AU.addRequired<MachineDominatorTree>(); 47 AU.addRequired<MachineLoopInfo>(); 48 AU.addRequired<WebAssemblyExceptionInfo>(); 49 MachineFunctionPass::getAnalysisUsage(AU); 50 } 51 52 bool runOnMachineFunction(MachineFunction &MF) override; 53 54 // For each block whose label represents the end of a scope, record the block 55 // which holds the beginning of the scope. This will allow us to quickly skip 56 // over scoped regions when walking blocks. 57 SmallVector<MachineBasicBlock *, 8> ScopeTops; 58 59 // Placing markers. 60 void placeMarkers(MachineFunction &MF); 61 void placeBlockMarker(MachineBasicBlock &MBB); 62 void placeLoopMarker(MachineBasicBlock &MBB); 63 void placeTryMarker(MachineBasicBlock &MBB); 64 void removeUnnecessaryInstrs(MachineFunction &MF); 65 bool fixUnwindMismatches(MachineFunction &MF); 66 void rewriteDepthImmediates(MachineFunction &MF); 67 void fixEndsAtEndOfFunction(MachineFunction &MF); 68 69 // For each BLOCK|LOOP|TRY, the corresponding END_(BLOCK|LOOP|TRY). 70 DenseMap<const MachineInstr *, MachineInstr *> BeginToEnd; 71 // For each END_(BLOCK|LOOP|TRY), the corresponding BLOCK|LOOP|TRY. 72 DenseMap<const MachineInstr *, MachineInstr *> EndToBegin; 73 // <TRY marker, EH pad> map 74 DenseMap<const MachineInstr *, MachineBasicBlock *> TryToEHPad; 75 // <EH pad, TRY marker> map 76 DenseMap<const MachineBasicBlock *, MachineInstr *> EHPadToTry; 77 78 // There can be an appendix block at the end of each function, shared for: 79 // - creating a correct signature for fallthrough returns 80 // - target for rethrows that need to unwind to the caller, but are trapped 81 // inside another try/catch 82 MachineBasicBlock *AppendixBB = nullptr; 83 MachineBasicBlock *getAppendixBlock(MachineFunction &MF) { 84 if (!AppendixBB) { 85 AppendixBB = MF.CreateMachineBasicBlock(); 86 // Give it a fake predecessor so that AsmPrinter prints its label. 87 AppendixBB->addSuccessor(AppendixBB); 88 MF.push_back(AppendixBB); 89 } 90 return AppendixBB; 91 } 92 93 // Helper functions to register / unregister scope information created by 94 // marker instructions. 95 void registerScope(MachineInstr *Begin, MachineInstr *End); 96 void registerTryScope(MachineInstr *Begin, MachineInstr *End, 97 MachineBasicBlock *EHPad); 98 void unregisterScope(MachineInstr *Begin); 99 100 public: 101 static char ID; // Pass identification, replacement for typeid 102 WebAssemblyCFGStackify() : MachineFunctionPass(ID) {} 103 ~WebAssemblyCFGStackify() override { releaseMemory(); } 104 void releaseMemory() override; 105 }; 106 } // end anonymous namespace 107 108 char WebAssemblyCFGStackify::ID = 0; 109 INITIALIZE_PASS(WebAssemblyCFGStackify, DEBUG_TYPE, 110 "Insert BLOCK/LOOP/TRY markers for WebAssembly scopes", false, 111 false) 112 113 FunctionPass *llvm::createWebAssemblyCFGStackify() { 114 return new WebAssemblyCFGStackify(); 115 } 116 117 /// Test whether Pred has any terminators explicitly branching to MBB, as 118 /// opposed to falling through. Note that it's possible (eg. in unoptimized 119 /// code) for a branch instruction to both branch to a block and fallthrough 120 /// to it, so we check the actual branch operands to see if there are any 121 /// explicit mentions. 122 static bool explicitlyBranchesTo(MachineBasicBlock *Pred, 123 MachineBasicBlock *MBB) { 124 for (MachineInstr &MI : Pred->terminators()) 125 for (MachineOperand &MO : MI.explicit_operands()) 126 if (MO.isMBB() && MO.getMBB() == MBB) 127 return true; 128 return false; 129 } 130 131 // Returns an iterator to the earliest position possible within the MBB, 132 // satisfying the restrictions given by BeforeSet and AfterSet. BeforeSet 133 // contains instructions that should go before the marker, and AfterSet contains 134 // ones that should go after the marker. In this function, AfterSet is only 135 // used for sanity checking. 136 static MachineBasicBlock::iterator 137 getEarliestInsertPos(MachineBasicBlock *MBB, 138 const SmallPtrSet<const MachineInstr *, 4> &BeforeSet, 139 const SmallPtrSet<const MachineInstr *, 4> &AfterSet) { 140 auto InsertPos = MBB->end(); 141 while (InsertPos != MBB->begin()) { 142 if (BeforeSet.count(&*std::prev(InsertPos))) { 143 #ifndef NDEBUG 144 // Sanity check 145 for (auto Pos = InsertPos, E = MBB->begin(); Pos != E; --Pos) 146 assert(!AfterSet.count(&*std::prev(Pos))); 147 #endif 148 break; 149 } 150 --InsertPos; 151 } 152 return InsertPos; 153 } 154 155 // Returns an iterator to the latest position possible within the MBB, 156 // satisfying the restrictions given by BeforeSet and AfterSet. BeforeSet 157 // contains instructions that should go before the marker, and AfterSet contains 158 // ones that should go after the marker. In this function, BeforeSet is only 159 // used for sanity checking. 160 static MachineBasicBlock::iterator 161 getLatestInsertPos(MachineBasicBlock *MBB, 162 const SmallPtrSet<const MachineInstr *, 4> &BeforeSet, 163 const SmallPtrSet<const MachineInstr *, 4> &AfterSet) { 164 auto InsertPos = MBB->begin(); 165 while (InsertPos != MBB->end()) { 166 if (AfterSet.count(&*InsertPos)) { 167 #ifndef NDEBUG 168 // Sanity check 169 for (auto Pos = InsertPos, E = MBB->end(); Pos != E; ++Pos) 170 assert(!BeforeSet.count(&*Pos)); 171 #endif 172 break; 173 } 174 ++InsertPos; 175 } 176 return InsertPos; 177 } 178 179 void WebAssemblyCFGStackify::registerScope(MachineInstr *Begin, 180 MachineInstr *End) { 181 BeginToEnd[Begin] = End; 182 EndToBegin[End] = Begin; 183 } 184 185 void WebAssemblyCFGStackify::registerTryScope(MachineInstr *Begin, 186 MachineInstr *End, 187 MachineBasicBlock *EHPad) { 188 registerScope(Begin, End); 189 TryToEHPad[Begin] = EHPad; 190 EHPadToTry[EHPad] = Begin; 191 } 192 193 void WebAssemblyCFGStackify::unregisterScope(MachineInstr *Begin) { 194 assert(BeginToEnd.count(Begin)); 195 MachineInstr *End = BeginToEnd[Begin]; 196 assert(EndToBegin.count(End)); 197 BeginToEnd.erase(Begin); 198 EndToBegin.erase(End); 199 MachineBasicBlock *EHPad = TryToEHPad.lookup(Begin); 200 if (EHPad) { 201 assert(EHPadToTry.count(EHPad)); 202 TryToEHPad.erase(Begin); 203 EHPadToTry.erase(EHPad); 204 } 205 } 206 207 /// Insert a BLOCK marker for branches to MBB (if needed). 208 // TODO Consider a more generalized way of handling block (and also loop and 209 // try) signatures when we implement the multi-value proposal later. 210 void WebAssemblyCFGStackify::placeBlockMarker(MachineBasicBlock &MBB) { 211 assert(!MBB.isEHPad()); 212 MachineFunction &MF = *MBB.getParent(); 213 auto &MDT = getAnalysis<MachineDominatorTree>(); 214 const auto &TII = *MF.getSubtarget<WebAssemblySubtarget>().getInstrInfo(); 215 const auto &MFI = *MF.getInfo<WebAssemblyFunctionInfo>(); 216 217 // First compute the nearest common dominator of all forward non-fallthrough 218 // predecessors so that we minimize the time that the BLOCK is on the stack, 219 // which reduces overall stack height. 220 MachineBasicBlock *Header = nullptr; 221 bool IsBranchedTo = false; 222 bool IsBrOnExn = false; 223 MachineInstr *BrOnExn = nullptr; 224 int MBBNumber = MBB.getNumber(); 225 for (MachineBasicBlock *Pred : MBB.predecessors()) { 226 if (Pred->getNumber() < MBBNumber) { 227 Header = Header ? MDT.findNearestCommonDominator(Header, Pred) : Pred; 228 if (explicitlyBranchesTo(Pred, &MBB)) { 229 IsBranchedTo = true; 230 if (Pred->getFirstTerminator()->getOpcode() == WebAssembly::BR_ON_EXN) { 231 IsBrOnExn = true; 232 assert(!BrOnExn && "There should be only one br_on_exn per block"); 233 BrOnExn = &*Pred->getFirstTerminator(); 234 } 235 } 236 } 237 } 238 if (!Header) 239 return; 240 if (!IsBranchedTo) 241 return; 242 243 assert(&MBB != &MF.front() && "Header blocks shouldn't have predecessors"); 244 MachineBasicBlock *LayoutPred = MBB.getPrevNode(); 245 246 // If the nearest common dominator is inside a more deeply nested context, 247 // walk out to the nearest scope which isn't more deeply nested. 248 for (MachineFunction::iterator I(LayoutPred), E(Header); I != E; --I) { 249 if (MachineBasicBlock *ScopeTop = ScopeTops[I->getNumber()]) { 250 if (ScopeTop->getNumber() > Header->getNumber()) { 251 // Skip over an intervening scope. 252 I = std::next(ScopeTop->getIterator()); 253 } else { 254 // We found a scope level at an appropriate depth. 255 Header = ScopeTop; 256 break; 257 } 258 } 259 } 260 261 // Decide where in Header to put the BLOCK. 262 263 // Instructions that should go before the BLOCK. 264 SmallPtrSet<const MachineInstr *, 4> BeforeSet; 265 // Instructions that should go after the BLOCK. 266 SmallPtrSet<const MachineInstr *, 4> AfterSet; 267 for (const auto &MI : *Header) { 268 // If there is a previously placed LOOP marker and the bottom block of the 269 // loop is above MBB, it should be after the BLOCK, because the loop is 270 // nested in this BLOCK. Otherwise it should be before the BLOCK. 271 if (MI.getOpcode() == WebAssembly::LOOP) { 272 auto *LoopBottom = BeginToEnd[&MI]->getParent()->getPrevNode(); 273 if (MBB.getNumber() > LoopBottom->getNumber()) 274 AfterSet.insert(&MI); 275 #ifndef NDEBUG 276 else 277 BeforeSet.insert(&MI); 278 #endif 279 } 280 281 // If there is a previously placed BLOCK/TRY marker and its corresponding 282 // END marker is before the current BLOCK's END marker, that should be 283 // placed after this BLOCK. Otherwise it should be placed before this BLOCK 284 // marker. 285 if (MI.getOpcode() == WebAssembly::BLOCK || 286 MI.getOpcode() == WebAssembly::TRY) { 287 if (BeginToEnd[&MI]->getParent()->getNumber() <= MBB.getNumber()) 288 AfterSet.insert(&MI); 289 #ifndef NDEBUG 290 else 291 BeforeSet.insert(&MI); 292 #endif 293 } 294 295 #ifndef NDEBUG 296 // All END_(BLOCK|LOOP|TRY) markers should be before the BLOCK. 297 if (MI.getOpcode() == WebAssembly::END_BLOCK || 298 MI.getOpcode() == WebAssembly::END_LOOP || 299 MI.getOpcode() == WebAssembly::END_TRY) 300 BeforeSet.insert(&MI); 301 #endif 302 303 // Terminators should go after the BLOCK. 304 if (MI.isTerminator()) 305 AfterSet.insert(&MI); 306 } 307 308 // Local expression tree should go after the BLOCK. 309 for (auto I = Header->getFirstTerminator(), E = Header->begin(); I != E; 310 --I) { 311 if (std::prev(I)->isDebugInstr() || std::prev(I)->isPosition()) 312 continue; 313 if (WebAssembly::isChild(*std::prev(I), MFI)) 314 AfterSet.insert(&*std::prev(I)); 315 else 316 break; 317 } 318 319 // Add the BLOCK. 320 321 // 'br_on_exn' extracts exnref object and pushes variable number of values 322 // depending on its tag. For C++ exception, its a single i32 value, and the 323 // generated code will be in the form of: 324 // block i32 325 // br_on_exn 0, $__cpp_exception 326 // rethrow 327 // end_block 328 WebAssembly::BlockType ReturnType = WebAssembly::BlockType::Void; 329 if (IsBrOnExn) { 330 const char *TagName = BrOnExn->getOperand(1).getSymbolName(); 331 if (std::strcmp(TagName, "__cpp_exception") != 0) 332 llvm_unreachable("Only C++ exception is supported"); 333 ReturnType = WebAssembly::BlockType::I32; 334 } 335 336 auto InsertPos = getLatestInsertPos(Header, BeforeSet, AfterSet); 337 MachineInstr *Begin = 338 BuildMI(*Header, InsertPos, Header->findDebugLoc(InsertPos), 339 TII.get(WebAssembly::BLOCK)) 340 .addImm(int64_t(ReturnType)); 341 342 // Decide where in Header to put the END_BLOCK. 343 BeforeSet.clear(); 344 AfterSet.clear(); 345 for (auto &MI : MBB) { 346 #ifndef NDEBUG 347 // END_BLOCK should precede existing LOOP and TRY markers. 348 if (MI.getOpcode() == WebAssembly::LOOP || 349 MI.getOpcode() == WebAssembly::TRY) 350 AfterSet.insert(&MI); 351 #endif 352 353 // If there is a previously placed END_LOOP marker and the header of the 354 // loop is above this block's header, the END_LOOP should be placed after 355 // the BLOCK, because the loop contains this block. Otherwise the END_LOOP 356 // should be placed before the BLOCK. The same for END_TRY. 357 if (MI.getOpcode() == WebAssembly::END_LOOP || 358 MI.getOpcode() == WebAssembly::END_TRY) { 359 if (EndToBegin[&MI]->getParent()->getNumber() >= Header->getNumber()) 360 BeforeSet.insert(&MI); 361 #ifndef NDEBUG 362 else 363 AfterSet.insert(&MI); 364 #endif 365 } 366 } 367 368 // Mark the end of the block. 369 InsertPos = getEarliestInsertPos(&MBB, BeforeSet, AfterSet); 370 MachineInstr *End = BuildMI(MBB, InsertPos, MBB.findPrevDebugLoc(InsertPos), 371 TII.get(WebAssembly::END_BLOCK)); 372 registerScope(Begin, End); 373 374 // Track the farthest-spanning scope that ends at this point. 375 int Number = MBB.getNumber(); 376 if (!ScopeTops[Number] || 377 ScopeTops[Number]->getNumber() > Header->getNumber()) 378 ScopeTops[Number] = Header; 379 } 380 381 /// Insert a LOOP marker for a loop starting at MBB (if it's a loop header). 382 void WebAssemblyCFGStackify::placeLoopMarker(MachineBasicBlock &MBB) { 383 MachineFunction &MF = *MBB.getParent(); 384 const auto &MLI = getAnalysis<MachineLoopInfo>(); 385 const auto &TII = *MF.getSubtarget<WebAssemblySubtarget>().getInstrInfo(); 386 387 MachineLoop *Loop = MLI.getLoopFor(&MBB); 388 if (!Loop || Loop->getHeader() != &MBB) 389 return; 390 391 // The operand of a LOOP is the first block after the loop. If the loop is the 392 // bottom of the function, insert a dummy block at the end. 393 MachineBasicBlock *Bottom = WebAssembly::getBottom(Loop); 394 auto Iter = std::next(Bottom->getIterator()); 395 if (Iter == MF.end()) { 396 getAppendixBlock(MF); 397 Iter = std::next(Bottom->getIterator()); 398 } 399 MachineBasicBlock *AfterLoop = &*Iter; 400 401 // Decide where in Header to put the LOOP. 402 SmallPtrSet<const MachineInstr *, 4> BeforeSet; 403 SmallPtrSet<const MachineInstr *, 4> AfterSet; 404 for (const auto &MI : MBB) { 405 // LOOP marker should be after any existing loop that ends here. Otherwise 406 // we assume the instruction belongs to the loop. 407 if (MI.getOpcode() == WebAssembly::END_LOOP) 408 BeforeSet.insert(&MI); 409 #ifndef NDEBUG 410 else 411 AfterSet.insert(&MI); 412 #endif 413 } 414 415 // Mark the beginning of the loop. 416 auto InsertPos = getEarliestInsertPos(&MBB, BeforeSet, AfterSet); 417 MachineInstr *Begin = BuildMI(MBB, InsertPos, MBB.findDebugLoc(InsertPos), 418 TII.get(WebAssembly::LOOP)) 419 .addImm(int64_t(WebAssembly::BlockType::Void)); 420 421 // Decide where in Header to put the END_LOOP. 422 BeforeSet.clear(); 423 AfterSet.clear(); 424 #ifndef NDEBUG 425 for (const auto &MI : MBB) 426 // Existing END_LOOP markers belong to parent loops of this loop 427 if (MI.getOpcode() == WebAssembly::END_LOOP) 428 AfterSet.insert(&MI); 429 #endif 430 431 // Mark the end of the loop (using arbitrary debug location that branched to 432 // the loop end as its location). 433 InsertPos = getEarliestInsertPos(AfterLoop, BeforeSet, AfterSet); 434 DebugLoc EndDL = AfterLoop->pred_empty() 435 ? DebugLoc() 436 : (*AfterLoop->pred_rbegin())->findBranchDebugLoc(); 437 MachineInstr *End = 438 BuildMI(*AfterLoop, InsertPos, EndDL, TII.get(WebAssembly::END_LOOP)); 439 registerScope(Begin, End); 440 441 assert((!ScopeTops[AfterLoop->getNumber()] || 442 ScopeTops[AfterLoop->getNumber()]->getNumber() < MBB.getNumber()) && 443 "With block sorting the outermost loop for a block should be first."); 444 if (!ScopeTops[AfterLoop->getNumber()]) 445 ScopeTops[AfterLoop->getNumber()] = &MBB; 446 } 447 448 void WebAssemblyCFGStackify::placeTryMarker(MachineBasicBlock &MBB) { 449 assert(MBB.isEHPad()); 450 MachineFunction &MF = *MBB.getParent(); 451 auto &MDT = getAnalysis<MachineDominatorTree>(); 452 const auto &TII = *MF.getSubtarget<WebAssemblySubtarget>().getInstrInfo(); 453 const auto &WEI = getAnalysis<WebAssemblyExceptionInfo>(); 454 const auto &MFI = *MF.getInfo<WebAssemblyFunctionInfo>(); 455 456 // Compute the nearest common dominator of all unwind predecessors 457 MachineBasicBlock *Header = nullptr; 458 int MBBNumber = MBB.getNumber(); 459 for (auto *Pred : MBB.predecessors()) { 460 if (Pred->getNumber() < MBBNumber) { 461 Header = Header ? MDT.findNearestCommonDominator(Header, Pred) : Pred; 462 assert(!explicitlyBranchesTo(Pred, &MBB) && 463 "Explicit branch to an EH pad!"); 464 } 465 } 466 if (!Header) 467 return; 468 469 // If this try is at the bottom of the function, insert a dummy block at the 470 // end. 471 WebAssemblyException *WE = WEI.getExceptionFor(&MBB); 472 assert(WE); 473 MachineBasicBlock *Bottom = WebAssembly::getBottom(WE); 474 475 auto Iter = std::next(Bottom->getIterator()); 476 if (Iter == MF.end()) { 477 getAppendixBlock(MF); 478 Iter = std::next(Bottom->getIterator()); 479 } 480 MachineBasicBlock *Cont = &*Iter; 481 482 assert(Cont != &MF.front()); 483 MachineBasicBlock *LayoutPred = Cont->getPrevNode(); 484 485 // If the nearest common dominator is inside a more deeply nested context, 486 // walk out to the nearest scope which isn't more deeply nested. 487 for (MachineFunction::iterator I(LayoutPred), E(Header); I != E; --I) { 488 if (MachineBasicBlock *ScopeTop = ScopeTops[I->getNumber()]) { 489 if (ScopeTop->getNumber() > Header->getNumber()) { 490 // Skip over an intervening scope. 491 I = std::next(ScopeTop->getIterator()); 492 } else { 493 // We found a scope level at an appropriate depth. 494 Header = ScopeTop; 495 break; 496 } 497 } 498 } 499 500 // Decide where in Header to put the TRY. 501 502 // Instructions that should go before the TRY. 503 SmallPtrSet<const MachineInstr *, 4> BeforeSet; 504 // Instructions that should go after the TRY. 505 SmallPtrSet<const MachineInstr *, 4> AfterSet; 506 for (const auto &MI : *Header) { 507 // If there is a previously placed LOOP marker and the bottom block of the 508 // loop is above MBB, it should be after the TRY, because the loop is nested 509 // in this TRY. Otherwise it should be before the TRY. 510 if (MI.getOpcode() == WebAssembly::LOOP) { 511 auto *LoopBottom = BeginToEnd[&MI]->getParent()->getPrevNode(); 512 if (MBB.getNumber() > LoopBottom->getNumber()) 513 AfterSet.insert(&MI); 514 #ifndef NDEBUG 515 else 516 BeforeSet.insert(&MI); 517 #endif 518 } 519 520 // All previously inserted BLOCK/TRY markers should be after the TRY because 521 // they are all nested trys. 522 if (MI.getOpcode() == WebAssembly::BLOCK || 523 MI.getOpcode() == WebAssembly::TRY) 524 AfterSet.insert(&MI); 525 526 #ifndef NDEBUG 527 // All END_(BLOCK/LOOP/TRY) markers should be before the TRY. 528 if (MI.getOpcode() == WebAssembly::END_BLOCK || 529 MI.getOpcode() == WebAssembly::END_LOOP || 530 MI.getOpcode() == WebAssembly::END_TRY) 531 BeforeSet.insert(&MI); 532 #endif 533 534 // Terminators should go after the TRY. 535 if (MI.isTerminator()) 536 AfterSet.insert(&MI); 537 } 538 539 // If Header unwinds to MBB (= Header contains 'invoke'), the try block should 540 // contain the call within it. So the call should go after the TRY. The 541 // exception is when the header's terminator is a rethrow instruction, in 542 // which case that instruction, not a call instruction before it, is gonna 543 // throw. 544 MachineInstr *ThrowingCall = nullptr; 545 if (MBB.isPredecessor(Header)) { 546 auto TermPos = Header->getFirstTerminator(); 547 if (TermPos == Header->end() || 548 TermPos->getOpcode() != WebAssembly::RETHROW) { 549 for (auto &MI : reverse(*Header)) { 550 if (MI.isCall()) { 551 AfterSet.insert(&MI); 552 ThrowingCall = &MI; 553 // Possibly throwing calls are usually wrapped by EH_LABEL 554 // instructions. We don't want to split them and the call. 555 if (MI.getIterator() != Header->begin() && 556 std::prev(MI.getIterator())->isEHLabel()) { 557 AfterSet.insert(&*std::prev(MI.getIterator())); 558 ThrowingCall = &*std::prev(MI.getIterator()); 559 } 560 break; 561 } 562 } 563 } 564 } 565 566 // Local expression tree should go after the TRY. 567 // For BLOCK placement, we start the search from the previous instruction of a 568 // BB's terminator, but in TRY's case, we should start from the previous 569 // instruction of a call that can throw, or a EH_LABEL that precedes the call, 570 // because the return values of the call's previous instructions can be 571 // stackified and consumed by the throwing call. 572 auto SearchStartPt = ThrowingCall ? MachineBasicBlock::iterator(ThrowingCall) 573 : Header->getFirstTerminator(); 574 for (auto I = SearchStartPt, E = Header->begin(); I != E; --I) { 575 if (std::prev(I)->isDebugInstr() || std::prev(I)->isPosition()) 576 continue; 577 if (WebAssembly::isChild(*std::prev(I), MFI)) 578 AfterSet.insert(&*std::prev(I)); 579 else 580 break; 581 } 582 583 // Add the TRY. 584 auto InsertPos = getLatestInsertPos(Header, BeforeSet, AfterSet); 585 MachineInstr *Begin = 586 BuildMI(*Header, InsertPos, Header->findDebugLoc(InsertPos), 587 TII.get(WebAssembly::TRY)) 588 .addImm(int64_t(WebAssembly::BlockType::Void)); 589 590 // Decide where in Header to put the END_TRY. 591 BeforeSet.clear(); 592 AfterSet.clear(); 593 for (const auto &MI : *Cont) { 594 #ifndef NDEBUG 595 // END_TRY should precede existing LOOP and BLOCK markers. 596 if (MI.getOpcode() == WebAssembly::LOOP || 597 MI.getOpcode() == WebAssembly::BLOCK) 598 AfterSet.insert(&MI); 599 600 // All END_TRY markers placed earlier belong to exceptions that contains 601 // this one. 602 if (MI.getOpcode() == WebAssembly::END_TRY) 603 AfterSet.insert(&MI); 604 #endif 605 606 // If there is a previously placed END_LOOP marker and its header is after 607 // where TRY marker is, this loop is contained within the 'catch' part, so 608 // the END_TRY marker should go after that. Otherwise, the whole try-catch 609 // is contained within this loop, so the END_TRY should go before that. 610 if (MI.getOpcode() == WebAssembly::END_LOOP) { 611 // For a LOOP to be after TRY, LOOP's BB should be after TRY's BB; if they 612 // are in the same BB, LOOP is always before TRY. 613 if (EndToBegin[&MI]->getParent()->getNumber() > Header->getNumber()) 614 BeforeSet.insert(&MI); 615 #ifndef NDEBUG 616 else 617 AfterSet.insert(&MI); 618 #endif 619 } 620 621 // It is not possible for an END_BLOCK to be already in this block. 622 } 623 624 // Mark the end of the TRY. 625 InsertPos = getEarliestInsertPos(Cont, BeforeSet, AfterSet); 626 MachineInstr *End = 627 BuildMI(*Cont, InsertPos, Bottom->findBranchDebugLoc(), 628 TII.get(WebAssembly::END_TRY)); 629 registerTryScope(Begin, End, &MBB); 630 631 // Track the farthest-spanning scope that ends at this point. We create two 632 // mappings: (BB with 'end_try' -> BB with 'try') and (BB with 'catch' -> BB 633 // with 'try'). We need to create 'catch' -> 'try' mapping here too because 634 // markers should not span across 'catch'. For example, this should not 635 // happen: 636 // 637 // try 638 // block --| (X) 639 // catch | 640 // end_block --| 641 // end_try 642 for (int Number : {Cont->getNumber(), MBB.getNumber()}) { 643 if (!ScopeTops[Number] || 644 ScopeTops[Number]->getNumber() > Header->getNumber()) 645 ScopeTops[Number] = Header; 646 } 647 } 648 649 void WebAssemblyCFGStackify::removeUnnecessaryInstrs(MachineFunction &MF) { 650 const auto &TII = *MF.getSubtarget<WebAssemblySubtarget>().getInstrInfo(); 651 652 // When there is an unconditional branch right before a catch instruction and 653 // it branches to the end of end_try marker, we don't need the branch, because 654 // it there is no exception, the control flow transfers to that point anyway. 655 // bb0: 656 // try 657 // ... 658 // br bb2 <- Not necessary 659 // bb1: 660 // catch 661 // ... 662 // bb2: 663 // end 664 for (auto &MBB : MF) { 665 if (!MBB.isEHPad()) 666 continue; 667 668 MachineBasicBlock *TBB = nullptr, *FBB = nullptr; 669 SmallVector<MachineOperand, 4> Cond; 670 MachineBasicBlock *EHPadLayoutPred = MBB.getPrevNode(); 671 MachineBasicBlock *Cont = BeginToEnd[EHPadToTry[&MBB]]->getParent(); 672 bool Analyzable = !TII.analyzeBranch(*EHPadLayoutPred, TBB, FBB, Cond); 673 // This condition means either 674 // 1. This BB ends with a single unconditional branch whose destinaion is 675 // Cont. 676 // 2. This BB ends with a conditional branch followed by an unconditional 677 // branch, and the unconditional branch's destination is Cont. 678 // In both cases, we want to remove the last (= unconditional) branch. 679 if (Analyzable && ((Cond.empty() && TBB && TBB == Cont) || 680 (!Cond.empty() && FBB && FBB == Cont))) { 681 bool ErasedUncondBr = false; 682 (void)ErasedUncondBr; 683 for (auto I = EHPadLayoutPred->end(), E = EHPadLayoutPred->begin(); 684 I != E; --I) { 685 auto PrevI = std::prev(I); 686 if (PrevI->isTerminator()) { 687 assert(PrevI->getOpcode() == WebAssembly::BR); 688 PrevI->eraseFromParent(); 689 ErasedUncondBr = true; 690 break; 691 } 692 } 693 assert(ErasedUncondBr && "Unconditional branch not erased!"); 694 } 695 } 696 697 // When there are block / end_block markers that overlap with try / end_try 698 // markers, and the block and try markers' return types are the same, the 699 // block /end_block markers are not necessary, because try / end_try markers 700 // also can serve as boundaries for branches. 701 // block <- Not necessary 702 // try 703 // ... 704 // catch 705 // ... 706 // end 707 // end <- Not necessary 708 SmallVector<MachineInstr *, 32> ToDelete; 709 for (auto &MBB : MF) { 710 for (auto &MI : MBB) { 711 if (MI.getOpcode() != WebAssembly::TRY) 712 continue; 713 714 MachineInstr *Try = &MI, *EndTry = BeginToEnd[Try]; 715 MachineBasicBlock *TryBB = Try->getParent(); 716 MachineBasicBlock *Cont = EndTry->getParent(); 717 int64_t RetType = Try->getOperand(0).getImm(); 718 for (auto B = Try->getIterator(), E = std::next(EndTry->getIterator()); 719 B != TryBB->begin() && E != Cont->end() && 720 std::prev(B)->getOpcode() == WebAssembly::BLOCK && 721 E->getOpcode() == WebAssembly::END_BLOCK && 722 std::prev(B)->getOperand(0).getImm() == RetType; 723 --B, ++E) { 724 ToDelete.push_back(&*std::prev(B)); 725 ToDelete.push_back(&*E); 726 } 727 } 728 } 729 for (auto *MI : ToDelete) { 730 if (MI->getOpcode() == WebAssembly::BLOCK) 731 unregisterScope(MI); 732 MI->eraseFromParent(); 733 } 734 } 735 736 // Get the appropriate copy opcode for the given register class. 737 static unsigned getCopyOpcode(const TargetRegisterClass *RC) { 738 if (RC == &WebAssembly::I32RegClass) 739 return WebAssembly::COPY_I32; 740 if (RC == &WebAssembly::I64RegClass) 741 return WebAssembly::COPY_I64; 742 if (RC == &WebAssembly::F32RegClass) 743 return WebAssembly::COPY_F32; 744 if (RC == &WebAssembly::F64RegClass) 745 return WebAssembly::COPY_F64; 746 if (RC == &WebAssembly::V128RegClass) 747 return WebAssembly::COPY_V128; 748 if (RC == &WebAssembly::EXNREFRegClass) 749 return WebAssembly::COPY_EXNREF; 750 llvm_unreachable("Unexpected register class"); 751 } 752 753 // When MBB is split into MBB and Split, we should unstackify defs in MBB that 754 // have their uses in Split. 755 static void unstackifyVRegsUsedInSplitBB(MachineBasicBlock &MBB, 756 MachineBasicBlock &Split, 757 WebAssemblyFunctionInfo &MFI, 758 MachineRegisterInfo &MRI, 759 const WebAssemblyInstrInfo &TII) { 760 for (auto &MI : Split) { 761 for (auto &MO : MI.explicit_uses()) { 762 if (!MO.isReg() || Register::isPhysicalRegister(MO.getReg())) 763 continue; 764 if (MachineInstr *Def = MRI.getUniqueVRegDef(MO.getReg())) 765 if (Def->getParent() == &MBB) 766 MFI.unstackifyVReg(MO.getReg()); 767 } 768 } 769 770 // In RegStackify, when a register definition is used multiple times, 771 // Reg = INST ... 772 // INST ..., Reg, ... 773 // INST ..., Reg, ... 774 // INST ..., Reg, ... 775 // 776 // we introduce a TEE, which has the following form: 777 // DefReg = INST ... 778 // TeeReg, Reg = TEE_... DefReg 779 // INST ..., TeeReg, ... 780 // INST ..., Reg, ... 781 // INST ..., Reg, ... 782 // with DefReg and TeeReg stackified but Reg not stackified. 783 // 784 // But the invariant that TeeReg should be stackified can be violated while we 785 // unstackify registers in the split BB above. In this case, we convert TEEs 786 // into two COPYs. This COPY will be eventually eliminated in ExplicitLocals. 787 // DefReg = INST ... 788 // TeeReg = COPY DefReg 789 // Reg = COPY DefReg 790 // INST ..., TeeReg, ... 791 // INST ..., Reg, ... 792 // INST ..., Reg, ... 793 for (auto I = MBB.begin(), E = MBB.end(); I != E;) { 794 MachineInstr &MI = *I++; 795 if (!WebAssembly::isTee(MI.getOpcode())) 796 continue; 797 Register TeeReg = MI.getOperand(0).getReg(); 798 Register Reg = MI.getOperand(1).getReg(); 799 Register DefReg = MI.getOperand(2).getReg(); 800 if (!MFI.isVRegStackified(TeeReg)) { 801 // Now we are not using TEE anymore, so unstackify DefReg too 802 MFI.unstackifyVReg(DefReg); 803 unsigned CopyOpc = getCopyOpcode(MRI.getRegClass(DefReg)); 804 BuildMI(MBB, &MI, MI.getDebugLoc(), TII.get(CopyOpc), TeeReg) 805 .addReg(DefReg); 806 BuildMI(MBB, &MI, MI.getDebugLoc(), TII.get(CopyOpc), Reg).addReg(DefReg); 807 MI.eraseFromParent(); 808 } 809 } 810 } 811 812 bool WebAssemblyCFGStackify::fixUnwindMismatches(MachineFunction &MF) { 813 const auto &TII = *MF.getSubtarget<WebAssemblySubtarget>().getInstrInfo(); 814 auto &MFI = *MF.getInfo<WebAssemblyFunctionInfo>(); 815 MachineRegisterInfo &MRI = MF.getRegInfo(); 816 817 // Linearizing the control flow by placing TRY / END_TRY markers can create 818 // mismatches in unwind destinations. There are two kinds of mismatches we 819 // try to solve here. 820 821 // 1. When an instruction may throw, but the EH pad it will unwind to can be 822 // different from the original CFG. 823 // 824 // Example: we have the following CFG: 825 // bb0: 826 // call @foo (if it throws, unwind to bb2) 827 // bb1: 828 // call @bar (if it throws, unwind to bb3) 829 // bb2 (ehpad): 830 // catch 831 // ... 832 // bb3 (ehpad) 833 // catch 834 // handler body 835 // 836 // And the CFG is sorted in this order. Then after placing TRY markers, it 837 // will look like: (BB markers are omitted) 838 // try $label1 839 // try 840 // call @foo 841 // call @bar (if it throws, unwind to bb3) 842 // catch <- ehpad (bb2) 843 // ... 844 // end_try 845 // catch <- ehpad (bb3) 846 // handler body 847 // end_try 848 // 849 // Now if bar() throws, it is going to end up ip in bb2, not bb3, where it 850 // is supposed to end up. We solve this problem by 851 // a. Split the target unwind EH pad (here bb3) so that the handler body is 852 // right after 'end_try', which means we extract the handler body out of 853 // the catch block. We do this because this handler body should be 854 // somewhere branch-eable from the inner scope. 855 // b. Wrap the call that has an incorrect unwind destination ('call @bar' 856 // here) with a nested try/catch/end_try scope, and within the new catch 857 // block, branches to the handler body. 858 // c. Place a branch after the newly inserted nested end_try so it can bypass 859 // the handler body, which is now outside of a catch block. 860 // 861 // The result will like as follows. (new: a) means this instruction is newly 862 // created in the process of doing 'a' above. 863 // 864 // block $label0 (new: placeBlockMarker) 865 // try $label1 866 // try 867 // call @foo 868 // try (new: b) 869 // call @bar 870 // catch (new: b) 871 // local.set n / drop (new: b) 872 // br $label1 (new: b) 873 // end_try (new: b) 874 // catch <- ehpad (bb2) 875 // end_try 876 // br $label0 (new: c) 877 // catch <- ehpad (bb3) 878 // end_try (hoisted: a) 879 // handler body 880 // end_block (new: placeBlockMarker) 881 // 882 // Note that the new wrapping block/end_block will be generated later in 883 // placeBlockMarker. 884 // 885 // TODO Currently local.set and local.gets are generated to move exnref value 886 // created by catches. That's because we don't support yielding values from a 887 // block in LLVM machine IR yet, even though it is supported by wasm. Delete 888 // unnecessary local.get/local.sets once yielding values from a block is 889 // supported. The full EH spec requires multi-value support to do this, but 890 // for C++ we don't yet need it because we only throw a single i32. 891 // 892 // --- 893 // 2. The same as 1, but in this case an instruction unwinds to a caller 894 // function and not another EH pad. 895 // 896 // Example: we have the following CFG: 897 // bb0: 898 // call @foo (if it throws, unwind to bb2) 899 // bb1: 900 // call @bar (if it throws, unwind to caller) 901 // bb2 (ehpad): 902 // catch 903 // ... 904 // 905 // And the CFG is sorted in this order. Then after placing TRY markers, it 906 // will look like: 907 // try 908 // call @foo 909 // call @bar (if it throws, unwind to caller) 910 // catch <- ehpad (bb2) 911 // ... 912 // end_try 913 // 914 // Now if bar() throws, it is going to end up ip in bb2, when it is supposed 915 // throw up to the caller. 916 // We solve this problem by 917 // a. Create a new 'appendix' BB at the end of the function and put a single 918 // 'rethrow' instruction (+ local.get) in there. 919 // b. Wrap the call that has an incorrect unwind destination ('call @bar' 920 // here) with a nested try/catch/end_try scope, and within the new catch 921 // block, branches to the new appendix block. 922 // 923 // block $label0 (new: placeBlockMarker) 924 // try 925 // call @foo 926 // try (new: b) 927 // call @bar 928 // catch (new: b) 929 // local.set n (new: b) 930 // br $label0 (new: b) 931 // end_try (new: b) 932 // catch <- ehpad (bb2) 933 // ... 934 // end_try 935 // ... 936 // end_block (new: placeBlockMarker) 937 // local.get n (new: a) <- appendix block 938 // rethrow (new: a) 939 // 940 // In case there are multiple calls in a BB that may throw to the caller, they 941 // can be wrapped together in one nested try scope. (In 1, this couldn't 942 // happen, because may-throwing instruction there had an unwind destination, 943 // i.e., it was an invoke before, and there could be only one invoke within a 944 // BB.) 945 946 SmallVector<const MachineBasicBlock *, 8> EHPadStack; 947 // Range of intructions to be wrapped in a new nested try/catch 948 using TryRange = std::pair<MachineInstr *, MachineInstr *>; 949 // In original CFG, <unwind destination BB, a vector of try ranges> 950 DenseMap<MachineBasicBlock *, SmallVector<TryRange, 4>> UnwindDestToTryRanges; 951 // In new CFG, <destination to branch to, a vector of try ranges> 952 DenseMap<MachineBasicBlock *, SmallVector<TryRange, 4>> BrDestToTryRanges; 953 // In new CFG, <destination to branch to, register containing exnref> 954 DenseMap<MachineBasicBlock *, unsigned> BrDestToExnReg; 955 956 // Destinations for branches that will be newly added, for which a new 957 // BLOCK/END_BLOCK markers are necessary. 958 SmallVector<MachineBasicBlock *, 8> BrDests; 959 960 // Gather possibly throwing calls (i.e., previously invokes) whose current 961 // unwind destination is not the same as the original CFG. 962 for (auto &MBB : reverse(MF)) { 963 bool SeenThrowableInstInBB = false; 964 for (auto &MI : reverse(MBB)) { 965 if (MI.getOpcode() == WebAssembly::TRY) 966 EHPadStack.pop_back(); 967 else if (MI.getOpcode() == WebAssembly::CATCH) 968 EHPadStack.push_back(MI.getParent()); 969 970 // In this loop we only gather calls that have an EH pad to unwind. So 971 // there will be at most 1 such call (= invoke) in a BB, so after we've 972 // seen one, we can skip the rest of BB. Also if MBB has no EH pad 973 // successor or MI does not throw, this is not an invoke. 974 if (SeenThrowableInstInBB || !MBB.hasEHPadSuccessor() || 975 !WebAssembly::mayThrow(MI)) 976 continue; 977 SeenThrowableInstInBB = true; 978 979 // If the EH pad on the stack top is where this instruction should unwind 980 // next, we're good. 981 MachineBasicBlock *UnwindDest = nullptr; 982 for (auto *Succ : MBB.successors()) { 983 if (Succ->isEHPad()) { 984 UnwindDest = Succ; 985 break; 986 } 987 } 988 if (EHPadStack.back() == UnwindDest) 989 continue; 990 991 // If not, record the range. 992 UnwindDestToTryRanges[UnwindDest].push_back(TryRange(&MI, &MI)); 993 } 994 } 995 996 assert(EHPadStack.empty()); 997 998 // Gather possibly throwing calls that are supposed to unwind up to the caller 999 // if they throw, but currently unwind to an incorrect destination. Unlike the 1000 // loop above, there can be multiple calls within a BB that unwind to the 1001 // caller, which we should group together in a range. 1002 bool NeedAppendixBlock = false; 1003 for (auto &MBB : reverse(MF)) { 1004 MachineInstr *RangeBegin = nullptr, *RangeEnd = nullptr; // inclusive 1005 for (auto &MI : reverse(MBB)) { 1006 if (MI.getOpcode() == WebAssembly::TRY) 1007 EHPadStack.pop_back(); 1008 else if (MI.getOpcode() == WebAssembly::CATCH) 1009 EHPadStack.push_back(MI.getParent()); 1010 1011 // If MBB has an EH pad successor, this inst does not unwind to caller. 1012 if (MBB.hasEHPadSuccessor()) 1013 continue; 1014 1015 // We wrap up the current range when we see a marker even if we haven't 1016 // finished a BB. 1017 if (RangeEnd && WebAssembly::isMarker(MI.getOpcode())) { 1018 NeedAppendixBlock = true; 1019 // Record the range. nullptr here means the unwind destination is the 1020 // caller. 1021 UnwindDestToTryRanges[nullptr].push_back( 1022 TryRange(RangeBegin, RangeEnd)); 1023 RangeBegin = RangeEnd = nullptr; // Reset range pointers 1024 } 1025 1026 // If EHPadStack is empty, that means it is correctly unwind to caller if 1027 // it throws, so we're good. If MI does not throw, we're good too. 1028 if (EHPadStack.empty() || !WebAssembly::mayThrow(MI)) 1029 continue; 1030 1031 // We found an instruction that unwinds to the caller but currently has an 1032 // incorrect unwind destination. Create a new range or increment the 1033 // currently existing range. 1034 if (!RangeEnd) 1035 RangeBegin = RangeEnd = &MI; 1036 else 1037 RangeBegin = &MI; 1038 } 1039 1040 if (RangeEnd) { 1041 NeedAppendixBlock = true; 1042 // Record the range. nullptr here means the unwind destination is the 1043 // caller. 1044 UnwindDestToTryRanges[nullptr].push_back(TryRange(RangeBegin, RangeEnd)); 1045 RangeBegin = RangeEnd = nullptr; // Reset range pointers 1046 } 1047 } 1048 1049 assert(EHPadStack.empty()); 1050 // We don't have any unwind destination mismatches to resolve. 1051 if (UnwindDestToTryRanges.empty()) 1052 return false; 1053 1054 // If we found instructions that should unwind to the caller but currently 1055 // have incorrect unwind destination, we create an appendix block at the end 1056 // of the function with a local.get and a rethrow instruction. 1057 if (NeedAppendixBlock) { 1058 auto *AppendixBB = getAppendixBlock(MF); 1059 Register ExnReg = MRI.createVirtualRegister(&WebAssembly::EXNREFRegClass); 1060 BuildMI(AppendixBB, DebugLoc(), TII.get(WebAssembly::RETHROW)) 1061 .addReg(ExnReg); 1062 // These instruction ranges should branch to this appendix BB. 1063 for (auto Range : UnwindDestToTryRanges[nullptr]) 1064 BrDestToTryRanges[AppendixBB].push_back(Range); 1065 BrDestToExnReg[AppendixBB] = ExnReg; 1066 } 1067 1068 // We loop through unwind destination EH pads that are targeted from some 1069 // inner scopes. Because these EH pads are destination of more than one scope 1070 // now, we split them so that the handler body is after 'end_try'. 1071 // - Before 1072 // ehpad: 1073 // catch 1074 // local.set n / drop 1075 // handler body 1076 // ... 1077 // cont: 1078 // end_try 1079 // 1080 // - After 1081 // ehpad: 1082 // catch 1083 // local.set n / drop 1084 // brdest: (new) 1085 // end_try (hoisted from 'cont' BB) 1086 // handler body (taken from 'ehpad') 1087 // ... 1088 // cont: 1089 for (auto &P : UnwindDestToTryRanges) { 1090 NumUnwindMismatches += P.second.size(); 1091 1092 // This means the destination is the appendix BB, which was separately 1093 // handled above. 1094 if (!P.first) 1095 continue; 1096 1097 MachineBasicBlock *EHPad = P.first; 1098 1099 // Find 'catch' and 'local.set' or 'drop' instruction that follows the 1100 // 'catch'. If -wasm-disable-explicit-locals is not set, 'catch' should be 1101 // always followed by either 'local.set' or a 'drop', because 'br_on_exn' is 1102 // generated after 'catch' in LateEHPrepare and we don't support blocks 1103 // taking values yet. 1104 MachineInstr *Catch = nullptr; 1105 unsigned ExnReg = 0; 1106 for (auto &MI : *EHPad) { 1107 switch (MI.getOpcode()) { 1108 case WebAssembly::CATCH: 1109 Catch = &MI; 1110 ExnReg = Catch->getOperand(0).getReg(); 1111 break; 1112 } 1113 } 1114 assert(Catch && "EH pad does not have a catch"); 1115 assert(ExnReg != 0 && "Invalid register"); 1116 1117 auto SplitPos = std::next(Catch->getIterator()); 1118 1119 // Create a new BB that's gonna be the destination for branches from the 1120 // inner mismatched scope. 1121 MachineInstr *BeginTry = EHPadToTry[EHPad]; 1122 MachineInstr *EndTry = BeginToEnd[BeginTry]; 1123 MachineBasicBlock *Cont = EndTry->getParent(); 1124 auto *BrDest = MF.CreateMachineBasicBlock(); 1125 MF.insert(std::next(EHPad->getIterator()), BrDest); 1126 // Hoist up the existing 'end_try'. 1127 BrDest->insert(BrDest->end(), EndTry->removeFromParent()); 1128 // Take out the handler body from EH pad to the new branch destination BB. 1129 BrDest->splice(BrDest->end(), EHPad, SplitPos, EHPad->end()); 1130 unstackifyVRegsUsedInSplitBB(*EHPad, *BrDest, MFI, MRI, TII); 1131 // Fix predecessor-successor relationship. 1132 BrDest->transferSuccessors(EHPad); 1133 EHPad->addSuccessor(BrDest); 1134 1135 // All try ranges that were supposed to unwind to this EH pad now have to 1136 // branch to this new branch dest BB. 1137 for (auto Range : UnwindDestToTryRanges[EHPad]) 1138 BrDestToTryRanges[BrDest].push_back(Range); 1139 BrDestToExnReg[BrDest] = ExnReg; 1140 1141 // In case we fall through to the continuation BB after the catch block, we 1142 // now have to add a branch to it. 1143 // - Before 1144 // try 1145 // ... 1146 // (falls through to 'cont') 1147 // catch 1148 // handler body 1149 // end 1150 // <-- cont 1151 // 1152 // - After 1153 // try 1154 // ... 1155 // br %cont (new) 1156 // catch 1157 // end 1158 // handler body 1159 // <-- cont 1160 MachineBasicBlock *EHPadLayoutPred = &*std::prev(EHPad->getIterator()); 1161 MachineBasicBlock *TBB = nullptr, *FBB = nullptr; 1162 SmallVector<MachineOperand, 4> Cond; 1163 bool Analyzable = !TII.analyzeBranch(*EHPadLayoutPred, TBB, FBB, Cond); 1164 if (Analyzable && !TBB && !FBB) { 1165 DebugLoc DL = EHPadLayoutPred->empty() 1166 ? DebugLoc() 1167 : EHPadLayoutPred->rbegin()->getDebugLoc(); 1168 BuildMI(EHPadLayoutPred, DL, TII.get(WebAssembly::BR)).addMBB(Cont); 1169 BrDests.push_back(Cont); 1170 } 1171 } 1172 1173 // For possibly throwing calls whose unwind destinations are currently 1174 // incorrect because of CFG linearization, we wrap them with a nested 1175 // try/catch/end_try, and within the new catch block, we branch to the correct 1176 // handler. 1177 // - Before 1178 // mbb: 1179 // call @foo <- Unwind destination mismatch! 1180 // ehpad: 1181 // ... 1182 // 1183 // - After 1184 // mbb: 1185 // try (new) 1186 // call @foo 1187 // nested-ehpad: (new) 1188 // catch (new) 1189 // local.set n / drop (new) 1190 // br %brdest (new) 1191 // nested-end: (new) 1192 // end_try (new) 1193 // ehpad: 1194 // ... 1195 for (auto &P : BrDestToTryRanges) { 1196 MachineBasicBlock *BrDest = P.first; 1197 auto &TryRanges = P.second; 1198 unsigned ExnReg = BrDestToExnReg[BrDest]; 1199 1200 for (auto Range : TryRanges) { 1201 MachineInstr *RangeBegin = nullptr, *RangeEnd = nullptr; 1202 std::tie(RangeBegin, RangeEnd) = Range; 1203 auto *MBB = RangeBegin->getParent(); 1204 // Store the first function call from this range, because RangeBegin can 1205 // be moved to point EH_LABEL before the call 1206 MachineInstr *RangeBeginCall = RangeBegin; 1207 1208 // Include possible EH_LABELs in the range 1209 if (RangeBegin->getIterator() != MBB->begin() && 1210 std::prev(RangeBegin->getIterator())->isEHLabel()) 1211 RangeBegin = &*std::prev(RangeBegin->getIterator()); 1212 if (std::next(RangeEnd->getIterator()) != MBB->end() && 1213 std::next(RangeEnd->getIterator())->isEHLabel()) 1214 RangeEnd = &*std::next(RangeEnd->getIterator()); 1215 1216 MachineBasicBlock *EHPad = nullptr; 1217 for (auto *Succ : MBB->successors()) { 1218 if (Succ->isEHPad()) { 1219 EHPad = Succ; 1220 break; 1221 } 1222 } 1223 1224 // Local expression tree before the first call of this range should go 1225 // after the nested TRY. 1226 SmallPtrSet<const MachineInstr *, 4> AfterSet; 1227 AfterSet.insert(RangeBegin); 1228 AfterSet.insert(RangeBeginCall); 1229 for (auto I = MachineBasicBlock::iterator(RangeBeginCall), 1230 E = MBB->begin(); 1231 I != E; --I) { 1232 if (std::prev(I)->isDebugInstr() || std::prev(I)->isPosition()) 1233 continue; 1234 if (WebAssembly::isChild(*std::prev(I), MFI)) 1235 AfterSet.insert(&*std::prev(I)); 1236 else 1237 break; 1238 } 1239 1240 // Create the nested try instruction. 1241 auto InsertPos = getLatestInsertPos( 1242 MBB, SmallPtrSet<const MachineInstr *, 4>(), AfterSet); 1243 MachineInstr *NestedTry = 1244 BuildMI(*MBB, InsertPos, RangeBegin->getDebugLoc(), 1245 TII.get(WebAssembly::TRY)) 1246 .addImm(int64_t(WebAssembly::BlockType::Void)); 1247 1248 // Create the nested EH pad and fill instructions in. 1249 MachineBasicBlock *NestedEHPad = MF.CreateMachineBasicBlock(); 1250 MF.insert(std::next(MBB->getIterator()), NestedEHPad); 1251 NestedEHPad->setIsEHPad(); 1252 NestedEHPad->setIsEHScopeEntry(); 1253 BuildMI(NestedEHPad, RangeEnd->getDebugLoc(), TII.get(WebAssembly::CATCH), 1254 ExnReg); 1255 BuildMI(NestedEHPad, RangeEnd->getDebugLoc(), TII.get(WebAssembly::BR)) 1256 .addMBB(BrDest); 1257 1258 // Create the nested continuation BB and end_try instruction. 1259 MachineBasicBlock *NestedCont = MF.CreateMachineBasicBlock(); 1260 MF.insert(std::next(NestedEHPad->getIterator()), NestedCont); 1261 MachineInstr *NestedEndTry = 1262 BuildMI(*NestedCont, NestedCont->begin(), RangeEnd->getDebugLoc(), 1263 TII.get(WebAssembly::END_TRY)); 1264 // In case MBB has more instructions after the try range, move them to the 1265 // new nested continuation BB. 1266 NestedCont->splice(NestedCont->end(), MBB, 1267 std::next(RangeEnd->getIterator()), MBB->end()); 1268 unstackifyVRegsUsedInSplitBB(*MBB, *NestedCont, MFI, MRI, TII); 1269 registerTryScope(NestedTry, NestedEndTry, NestedEHPad); 1270 1271 // Fix predecessor-successor relationship. 1272 NestedCont->transferSuccessors(MBB); 1273 if (EHPad) { 1274 NestedCont->removeSuccessor(EHPad); 1275 // If EHPad does not have any predecessors left after removing 1276 // NextedCont predecessor, remove its successor too, because this EHPad 1277 // is not reachable from the entry BB anyway. We can't remove EHPad BB 1278 // itself because it can contain 'catch' or 'end', which are necessary 1279 // for keeping try-catch-end structure. 1280 if (EHPad->pred_empty()) 1281 EHPad->removeSuccessor(BrDest); 1282 } 1283 MBB->addSuccessor(NestedEHPad); 1284 MBB->addSuccessor(NestedCont); 1285 NestedEHPad->addSuccessor(BrDest); 1286 } 1287 } 1288 1289 // Renumber BBs and recalculate ScopeTop info because new BBs might have been 1290 // created and inserted above. 1291 MF.RenumberBlocks(); 1292 ScopeTops.clear(); 1293 ScopeTops.resize(MF.getNumBlockIDs()); 1294 for (auto &MBB : reverse(MF)) { 1295 for (auto &MI : reverse(MBB)) { 1296 if (ScopeTops[MBB.getNumber()]) 1297 break; 1298 switch (MI.getOpcode()) { 1299 case WebAssembly::END_BLOCK: 1300 case WebAssembly::END_LOOP: 1301 case WebAssembly::END_TRY: 1302 ScopeTops[MBB.getNumber()] = EndToBegin[&MI]->getParent(); 1303 break; 1304 case WebAssembly::CATCH: 1305 ScopeTops[MBB.getNumber()] = EHPadToTry[&MBB]->getParent(); 1306 break; 1307 } 1308 } 1309 } 1310 1311 // Recompute the dominator tree. 1312 getAnalysis<MachineDominatorTree>().runOnMachineFunction(MF); 1313 1314 // Place block markers for newly added branches, if necessary. 1315 1316 // If we've created an appendix BB and a branch to it, place a block/end_block 1317 // marker for that. For some new branches, those branch destination BBs start 1318 // with a hoisted end_try marker, so we don't need a new marker there. 1319 if (AppendixBB) 1320 BrDests.push_back(AppendixBB); 1321 1322 llvm::sort(BrDests, 1323 [&](const MachineBasicBlock *A, const MachineBasicBlock *B) { 1324 auto ANum = A->getNumber(); 1325 auto BNum = B->getNumber(); 1326 return ANum < BNum; 1327 }); 1328 for (auto *Dest : BrDests) 1329 placeBlockMarker(*Dest); 1330 1331 return true; 1332 } 1333 1334 static unsigned 1335 getDepth(const SmallVectorImpl<const MachineBasicBlock *> &Stack, 1336 const MachineBasicBlock *MBB) { 1337 unsigned Depth = 0; 1338 for (auto X : reverse(Stack)) { 1339 if (X == MBB) 1340 break; 1341 ++Depth; 1342 } 1343 assert(Depth < Stack.size() && "Branch destination should be in scope"); 1344 return Depth; 1345 } 1346 1347 /// In normal assembly languages, when the end of a function is unreachable, 1348 /// because the function ends in an infinite loop or a noreturn call or similar, 1349 /// it isn't necessary to worry about the function return type at the end of 1350 /// the function, because it's never reached. However, in WebAssembly, blocks 1351 /// that end at the function end need to have a return type signature that 1352 /// matches the function signature, even though it's unreachable. This function 1353 /// checks for such cases and fixes up the signatures. 1354 void WebAssemblyCFGStackify::fixEndsAtEndOfFunction(MachineFunction &MF) { 1355 const auto &MFI = *MF.getInfo<WebAssemblyFunctionInfo>(); 1356 1357 if (MFI.getResults().empty()) 1358 return; 1359 1360 // MCInstLower will add the proper types to multivalue signatures based on the 1361 // function return type 1362 WebAssembly::BlockType RetType = 1363 MFI.getResults().size() > 1 1364 ? WebAssembly::BlockType::Multivalue 1365 : WebAssembly::BlockType( 1366 WebAssembly::toValType(MFI.getResults().front())); 1367 1368 for (MachineBasicBlock &MBB : reverse(MF)) { 1369 for (MachineInstr &MI : reverse(MBB)) { 1370 if (MI.isPosition() || MI.isDebugInstr()) 1371 continue; 1372 switch (MI.getOpcode()) { 1373 case WebAssembly::END_BLOCK: 1374 case WebAssembly::END_LOOP: 1375 case WebAssembly::END_TRY: 1376 EndToBegin[&MI]->getOperand(0).setImm(int32_t(RetType)); 1377 continue; 1378 default: 1379 // Something other than an `end`. We're done. 1380 return; 1381 } 1382 } 1383 } 1384 } 1385 1386 // WebAssembly functions end with an end instruction, as if the function body 1387 // were a block. 1388 static void appendEndToFunction(MachineFunction &MF, 1389 const WebAssemblyInstrInfo &TII) { 1390 BuildMI(MF.back(), MF.back().end(), 1391 MF.back().findPrevDebugLoc(MF.back().end()), 1392 TII.get(WebAssembly::END_FUNCTION)); 1393 } 1394 1395 /// Insert LOOP/TRY/BLOCK markers at appropriate places. 1396 void WebAssemblyCFGStackify::placeMarkers(MachineFunction &MF) { 1397 // We allocate one more than the number of blocks in the function to 1398 // accommodate for the possible fake block we may insert at the end. 1399 ScopeTops.resize(MF.getNumBlockIDs() + 1); 1400 // Place the LOOP for MBB if MBB is the header of a loop. 1401 for (auto &MBB : MF) 1402 placeLoopMarker(MBB); 1403 1404 const MCAsmInfo *MCAI = MF.getTarget().getMCAsmInfo(); 1405 for (auto &MBB : MF) { 1406 if (MBB.isEHPad()) { 1407 // Place the TRY for MBB if MBB is the EH pad of an exception. 1408 if (MCAI->getExceptionHandlingType() == ExceptionHandling::Wasm && 1409 MF.getFunction().hasPersonalityFn()) 1410 placeTryMarker(MBB); 1411 } else { 1412 // Place the BLOCK for MBB if MBB is branched to from above. 1413 placeBlockMarker(MBB); 1414 } 1415 } 1416 // Fix mismatches in unwind destinations induced by linearizing the code. 1417 if (MCAI->getExceptionHandlingType() == ExceptionHandling::Wasm && 1418 MF.getFunction().hasPersonalityFn()) 1419 fixUnwindMismatches(MF); 1420 } 1421 1422 void WebAssemblyCFGStackify::rewriteDepthImmediates(MachineFunction &MF) { 1423 // Now rewrite references to basic blocks to be depth immediates. 1424 SmallVector<const MachineBasicBlock *, 8> Stack; 1425 for (auto &MBB : reverse(MF)) { 1426 for (auto I = MBB.rbegin(), E = MBB.rend(); I != E; ++I) { 1427 MachineInstr &MI = *I; 1428 switch (MI.getOpcode()) { 1429 case WebAssembly::BLOCK: 1430 case WebAssembly::TRY: 1431 assert(ScopeTops[Stack.back()->getNumber()]->getNumber() <= 1432 MBB.getNumber() && 1433 "Block/try marker should be balanced"); 1434 Stack.pop_back(); 1435 break; 1436 1437 case WebAssembly::LOOP: 1438 assert(Stack.back() == &MBB && "Loop top should be balanced"); 1439 Stack.pop_back(); 1440 break; 1441 1442 case WebAssembly::END_BLOCK: 1443 case WebAssembly::END_TRY: 1444 Stack.push_back(&MBB); 1445 break; 1446 1447 case WebAssembly::END_LOOP: 1448 Stack.push_back(EndToBegin[&MI]->getParent()); 1449 break; 1450 1451 default: 1452 if (MI.isTerminator()) { 1453 // Rewrite MBB operands to be depth immediates. 1454 SmallVector<MachineOperand, 4> Ops(MI.operands()); 1455 while (MI.getNumOperands() > 0) 1456 MI.RemoveOperand(MI.getNumOperands() - 1); 1457 for (auto MO : Ops) { 1458 if (MO.isMBB()) 1459 MO = MachineOperand::CreateImm(getDepth(Stack, MO.getMBB())); 1460 MI.addOperand(MF, MO); 1461 } 1462 } 1463 break; 1464 } 1465 } 1466 } 1467 assert(Stack.empty() && "Control flow should be balanced"); 1468 } 1469 1470 void WebAssemblyCFGStackify::releaseMemory() { 1471 ScopeTops.clear(); 1472 BeginToEnd.clear(); 1473 EndToBegin.clear(); 1474 TryToEHPad.clear(); 1475 EHPadToTry.clear(); 1476 AppendixBB = nullptr; 1477 } 1478 1479 bool WebAssemblyCFGStackify::runOnMachineFunction(MachineFunction &MF) { 1480 LLVM_DEBUG(dbgs() << "********** CFG Stackifying **********\n" 1481 "********** Function: " 1482 << MF.getName() << '\n'); 1483 const MCAsmInfo *MCAI = MF.getTarget().getMCAsmInfo(); 1484 1485 releaseMemory(); 1486 1487 // Liveness is not tracked for VALUE_STACK physreg. 1488 MF.getRegInfo().invalidateLiveness(); 1489 1490 // Place the BLOCK/LOOP/TRY markers to indicate the beginnings of scopes. 1491 placeMarkers(MF); 1492 1493 // Remove unnecessary instructions possibly introduced by try/end_trys. 1494 if (MCAI->getExceptionHandlingType() == ExceptionHandling::Wasm && 1495 MF.getFunction().hasPersonalityFn()) 1496 removeUnnecessaryInstrs(MF); 1497 1498 // Convert MBB operands in terminators to relative depth immediates. 1499 rewriteDepthImmediates(MF); 1500 1501 // Fix up block/loop/try signatures at the end of the function to conform to 1502 // WebAssembly's rules. 1503 fixEndsAtEndOfFunction(MF); 1504 1505 // Add an end instruction at the end of the function body. 1506 const auto &TII = *MF.getSubtarget<WebAssemblySubtarget>().getInstrInfo(); 1507 if (!MF.getSubtarget<WebAssemblySubtarget>() 1508 .getTargetTriple() 1509 .isOSBinFormatELF()) 1510 appendEndToFunction(MF, TII); 1511 1512 MF.getInfo<WebAssemblyFunctionInfo>()->setCFGStackified(); 1513 return true; 1514 } 1515