1 //===-- LiveRangeEdit.cpp - Basic tools for editing a register live range -===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // The LiveRangeEdit class represents changes done to a virtual register when it 10 // is spilled or split. 11 //===----------------------------------------------------------------------===// 12 13 #include "llvm/CodeGen/LiveRangeEdit.h" 14 #include "llvm/ADT/Statistic.h" 15 #include "llvm/CodeGen/CalcSpillWeights.h" 16 #include "llvm/CodeGen/LiveIntervals.h" 17 #include "llvm/CodeGen/MachineRegisterInfo.h" 18 #include "llvm/CodeGen/TargetInstrInfo.h" 19 #include "llvm/CodeGen/VirtRegMap.h" 20 #include "llvm/Support/Debug.h" 21 #include "llvm/Support/raw_ostream.h" 22 23 using namespace llvm; 24 25 #define DEBUG_TYPE "regalloc" 26 27 STATISTIC(NumDCEDeleted, "Number of instructions deleted by DCE"); 28 STATISTIC(NumDCEFoldedLoads, "Number of single use loads folded after DCE"); 29 STATISTIC(NumFracRanges, "Number of live ranges fractured by DCE"); 30 31 void LiveRangeEdit::Delegate::anchor() { } 32 33 LiveInterval &LiveRangeEdit::createEmptyIntervalFrom(Register OldReg, 34 bool createSubRanges) { 35 Register VReg = MRI.createVirtualRegister(MRI.getRegClass(OldReg)); 36 if (VRM) 37 VRM->setIsSplitFromReg(VReg, VRM->getOriginal(OldReg)); 38 39 LiveInterval &LI = LIS.createEmptyInterval(VReg); 40 if (Parent && !Parent->isSpillable()) 41 LI.markNotSpillable(); 42 if (createSubRanges) { 43 // Create empty subranges if the OldReg's interval has them. Do not create 44 // the main range here---it will be constructed later after the subranges 45 // have been finalized. 46 LiveInterval &OldLI = LIS.getInterval(OldReg); 47 VNInfo::Allocator &Alloc = LIS.getVNInfoAllocator(); 48 for (LiveInterval::SubRange &S : OldLI.subranges()) 49 LI.createSubRange(Alloc, S.LaneMask); 50 } 51 return LI; 52 } 53 54 Register LiveRangeEdit::createFrom(Register OldReg) { 55 Register VReg = MRI.createVirtualRegister(MRI.getRegClass(OldReg)); 56 if (VRM) { 57 VRM->setIsSplitFromReg(VReg, VRM->getOriginal(OldReg)); 58 } 59 // FIXME: Getting the interval here actually computes it. 60 // In theory, this may not be what we want, but in practice 61 // the createEmptyIntervalFrom API is used when this is not 62 // the case. Generally speaking we just want to annotate the 63 // LiveInterval when it gets created but we cannot do that at 64 // the moment. 65 if (Parent && !Parent->isSpillable()) 66 LIS.getInterval(VReg).markNotSpillable(); 67 return VReg; 68 } 69 70 bool LiveRangeEdit::checkRematerializable(VNInfo *VNI, 71 const MachineInstr *DefMI) { 72 assert(DefMI && "Missing instruction"); 73 ScannedRemattable = true; 74 if (!TII.isTriviallyReMaterializable(*DefMI)) 75 return false; 76 Remattable.insert(VNI); 77 return true; 78 } 79 80 void LiveRangeEdit::scanRemattable() { 81 for (VNInfo *VNI : getParent().valnos) { 82 if (VNI->isUnused()) 83 continue; 84 unsigned Original = VRM->getOriginal(getReg()); 85 LiveInterval &OrigLI = LIS.getInterval(Original); 86 VNInfo *OrigVNI = OrigLI.getVNInfoAt(VNI->def); 87 if (!OrigVNI) 88 continue; 89 MachineInstr *DefMI = LIS.getInstructionFromIndex(OrigVNI->def); 90 if (!DefMI) 91 continue; 92 checkRematerializable(OrigVNI, DefMI); 93 } 94 ScannedRemattable = true; 95 } 96 97 bool LiveRangeEdit::anyRematerializable() { 98 if (!ScannedRemattable) 99 scanRemattable(); 100 return !Remattable.empty(); 101 } 102 103 /// allUsesAvailableAt - Return true if all registers used by OrigMI at 104 /// OrigIdx are also available with the same value at UseIdx. 105 bool LiveRangeEdit::allUsesAvailableAt(const MachineInstr *OrigMI, 106 SlotIndex OrigIdx, 107 SlotIndex UseIdx) const { 108 OrigIdx = OrigIdx.getRegSlot(true); 109 UseIdx = std::max(UseIdx, UseIdx.getRegSlot(true)); 110 for (const MachineOperand &MO : OrigMI->operands()) { 111 if (!MO.isReg() || !MO.getReg() || !MO.readsReg()) 112 continue; 113 114 // We can't remat physreg uses, unless it is a constant or target wants 115 // to ignore this use. 116 if (Register::isPhysicalRegister(MO.getReg())) { 117 if (MRI.isConstantPhysReg(MO.getReg()) || TII.isIgnorableUse(MO)) 118 continue; 119 return false; 120 } 121 122 LiveInterval &li = LIS.getInterval(MO.getReg()); 123 const VNInfo *OVNI = li.getVNInfoAt(OrigIdx); 124 if (!OVNI) 125 continue; 126 127 // Don't allow rematerialization immediately after the original def. 128 // It would be incorrect if OrigMI redefines the register. 129 // See PR14098. 130 if (SlotIndex::isSameInstr(OrigIdx, UseIdx)) 131 return false; 132 133 if (OVNI != li.getVNInfoAt(UseIdx)) 134 return false; 135 136 // Check that subrange is live at UseIdx. 137 if (li.hasSubRanges()) { 138 const TargetRegisterInfo *TRI = MRI.getTargetRegisterInfo(); 139 unsigned SubReg = MO.getSubReg(); 140 LaneBitmask LM = SubReg ? TRI->getSubRegIndexLaneMask(SubReg) 141 : MRI.getMaxLaneMaskForVReg(MO.getReg()); 142 for (LiveInterval::SubRange &SR : li.subranges()) { 143 if ((SR.LaneMask & LM).none()) 144 continue; 145 if (!SR.liveAt(UseIdx)) 146 return false; 147 // Early exit if all used lanes are checked. No need to continue. 148 LM &= ~SR.LaneMask; 149 if (LM.none()) 150 break; 151 } 152 } 153 } 154 return true; 155 } 156 157 bool LiveRangeEdit::canRematerializeAt(Remat &RM, VNInfo *OrigVNI, 158 SlotIndex UseIdx, bool cheapAsAMove) { 159 assert(ScannedRemattable && "Call anyRematerializable first"); 160 161 // Use scanRemattable info. 162 if (!Remattable.count(OrigVNI)) 163 return false; 164 165 // No defining instruction provided. 166 SlotIndex DefIdx; 167 assert(RM.OrigMI && "No defining instruction for remattable value"); 168 DefIdx = LIS.getInstructionIndex(*RM.OrigMI); 169 170 // If only cheap remats were requested, bail out early. 171 if (cheapAsAMove && !TII.isAsCheapAsAMove(*RM.OrigMI)) 172 return false; 173 174 // Verify that all used registers are available with the same values. 175 if (!allUsesAvailableAt(RM.OrigMI, DefIdx, UseIdx)) 176 return false; 177 178 return true; 179 } 180 181 SlotIndex LiveRangeEdit::rematerializeAt(MachineBasicBlock &MBB, 182 MachineBasicBlock::iterator MI, 183 unsigned DestReg, 184 const Remat &RM, 185 const TargetRegisterInfo &tri, 186 bool Late) { 187 assert(RM.OrigMI && "Invalid remat"); 188 TII.reMaterialize(MBB, MI, DestReg, 0, *RM.OrigMI, tri); 189 // DestReg of the cloned instruction cannot be Dead. Set isDead of DestReg 190 // to false anyway in case the isDead flag of RM.OrigMI's dest register 191 // is true. 192 (*--MI).getOperand(0).setIsDead(false); 193 Rematted.insert(RM.ParentVNI); 194 return LIS.getSlotIndexes()->insertMachineInstrInMaps(*MI, Late).getRegSlot(); 195 } 196 197 void LiveRangeEdit::eraseVirtReg(Register Reg) { 198 if (TheDelegate && TheDelegate->LRE_CanEraseVirtReg(Reg)) 199 LIS.removeInterval(Reg); 200 } 201 202 bool LiveRangeEdit::foldAsLoad(LiveInterval *LI, 203 SmallVectorImpl<MachineInstr*> &Dead) { 204 MachineInstr *DefMI = nullptr, *UseMI = nullptr; 205 206 // Check that there is a single def and a single use. 207 for (MachineOperand &MO : MRI.reg_nodbg_operands(LI->reg())) { 208 MachineInstr *MI = MO.getParent(); 209 if (MO.isDef()) { 210 if (DefMI && DefMI != MI) 211 return false; 212 if (!MI->canFoldAsLoad()) 213 return false; 214 DefMI = MI; 215 } else if (!MO.isUndef()) { 216 if (UseMI && UseMI != MI) 217 return false; 218 // FIXME: Targets don't know how to fold subreg uses. 219 if (MO.getSubReg()) 220 return false; 221 UseMI = MI; 222 } 223 } 224 if (!DefMI || !UseMI) 225 return false; 226 227 // Since we're moving the DefMI load, make sure we're not extending any live 228 // ranges. 229 if (!allUsesAvailableAt(DefMI, LIS.getInstructionIndex(*DefMI), 230 LIS.getInstructionIndex(*UseMI))) 231 return false; 232 233 // We also need to make sure it is safe to move the load. 234 // Assume there are stores between DefMI and UseMI. 235 bool SawStore = true; 236 if (!DefMI->isSafeToMove(nullptr, SawStore)) 237 return false; 238 239 LLVM_DEBUG(dbgs() << "Try to fold single def: " << *DefMI 240 << " into single use: " << *UseMI); 241 242 SmallVector<unsigned, 8> Ops; 243 if (UseMI->readsWritesVirtualRegister(LI->reg(), &Ops).second) 244 return false; 245 246 MachineInstr *FoldMI = TII.foldMemoryOperand(*UseMI, Ops, *DefMI, &LIS); 247 if (!FoldMI) 248 return false; 249 LLVM_DEBUG(dbgs() << " folded: " << *FoldMI); 250 LIS.ReplaceMachineInstrInMaps(*UseMI, *FoldMI); 251 // Update the call site info. 252 if (UseMI->shouldUpdateCallSiteInfo()) 253 UseMI->getMF()->moveCallSiteInfo(UseMI, FoldMI); 254 UseMI->eraseFromParent(); 255 DefMI->addRegisterDead(LI->reg(), nullptr); 256 Dead.push_back(DefMI); 257 ++NumDCEFoldedLoads; 258 return true; 259 } 260 261 bool LiveRangeEdit::useIsKill(const LiveInterval &LI, 262 const MachineOperand &MO) const { 263 const MachineInstr &MI = *MO.getParent(); 264 SlotIndex Idx = LIS.getInstructionIndex(MI).getRegSlot(); 265 if (LI.Query(Idx).isKill()) 266 return true; 267 const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo(); 268 unsigned SubReg = MO.getSubReg(); 269 LaneBitmask LaneMask = TRI.getSubRegIndexLaneMask(SubReg); 270 for (const LiveInterval::SubRange &S : LI.subranges()) { 271 if ((S.LaneMask & LaneMask).any() && S.Query(Idx).isKill()) 272 return true; 273 } 274 return false; 275 } 276 277 /// Find all live intervals that need to shrink, then remove the instruction. 278 void LiveRangeEdit::eliminateDeadDef(MachineInstr *MI, ToShrinkSet &ToShrink) { 279 assert(MI->allDefsAreDead() && "Def isn't really dead"); 280 SlotIndex Idx = LIS.getInstructionIndex(*MI).getRegSlot(); 281 282 // Never delete a bundled instruction. 283 if (MI->isBundled()) { 284 return; 285 } 286 // Never delete inline asm. 287 if (MI->isInlineAsm()) { 288 LLVM_DEBUG(dbgs() << "Won't delete: " << Idx << '\t' << *MI); 289 return; 290 } 291 292 // Use the same criteria as DeadMachineInstructionElim. 293 bool SawStore = false; 294 if (!MI->isSafeToMove(nullptr, SawStore)) { 295 LLVM_DEBUG(dbgs() << "Can't delete: " << Idx << '\t' << *MI); 296 return; 297 } 298 299 LLVM_DEBUG(dbgs() << "Deleting dead def " << Idx << '\t' << *MI); 300 301 // Collect virtual registers to be erased after MI is gone. 302 SmallVector<unsigned, 8> RegsToErase; 303 bool ReadsPhysRegs = false; 304 bool isOrigDef = false; 305 Register Dest; 306 unsigned DestSubReg; 307 // Only optimize rematerialize case when the instruction has one def, since 308 // otherwise we could leave some dead defs in the code. This case is 309 // extremely rare. 310 if (VRM && MI->getOperand(0).isReg() && MI->getOperand(0).isDef() && 311 MI->getDesc().getNumDefs() == 1) { 312 Dest = MI->getOperand(0).getReg(); 313 DestSubReg = MI->getOperand(0).getSubReg(); 314 unsigned Original = VRM->getOriginal(Dest); 315 LiveInterval &OrigLI = LIS.getInterval(Original); 316 VNInfo *OrigVNI = OrigLI.getVNInfoAt(Idx); 317 // The original live-range may have been shrunk to 318 // an empty live-range. It happens when it is dead, but 319 // we still keep it around to be able to rematerialize 320 // other values that depend on it. 321 if (OrigVNI) 322 isOrigDef = SlotIndex::isSameInstr(OrigVNI->def, Idx); 323 } 324 325 bool HasLiveVRegUses = false; 326 327 // Check for live intervals that may shrink 328 for (const MachineOperand &MO : MI->operands()) { 329 if (!MO.isReg()) 330 continue; 331 Register Reg = MO.getReg(); 332 if (!Register::isVirtualRegister(Reg)) { 333 // Check if MI reads any unreserved physregs. 334 if (Reg && MO.readsReg() && !MRI.isReserved(Reg)) 335 ReadsPhysRegs = true; 336 else if (MO.isDef()) 337 LIS.removePhysRegDefAt(Reg.asMCReg(), Idx); 338 continue; 339 } 340 LiveInterval &LI = LIS.getInterval(Reg); 341 342 // Shrink read registers, unless it is likely to be expensive and 343 // unlikely to change anything. We typically don't want to shrink the 344 // PIC base register that has lots of uses everywhere. 345 // Always shrink COPY uses that probably come from live range splitting. 346 if ((MI->readsVirtualRegister(Reg) && (MI->isCopy() || MO.isDef())) || 347 (MO.readsReg() && (MRI.hasOneNonDBGUse(Reg) || useIsKill(LI, MO)))) 348 ToShrink.insert(&LI); 349 else if (MO.readsReg()) 350 HasLiveVRegUses = true; 351 352 // Remove defined value. 353 if (MO.isDef()) { 354 if (TheDelegate && LI.getVNInfoAt(Idx) != nullptr) 355 TheDelegate->LRE_WillShrinkVirtReg(LI.reg()); 356 LIS.removeVRegDefAt(LI, Idx); 357 if (LI.empty()) 358 RegsToErase.push_back(Reg); 359 } 360 } 361 362 // Currently, we don't support DCE of physreg live ranges. If MI reads 363 // any unreserved physregs, don't erase the instruction, but turn it into 364 // a KILL instead. This way, the physreg live ranges don't end up 365 // dangling. 366 // FIXME: It would be better to have something like shrinkToUses() for 367 // physregs. That could potentially enable more DCE and it would free up 368 // the physreg. It would not happen often, though. 369 if (ReadsPhysRegs) { 370 MI->setDesc(TII.get(TargetOpcode::KILL)); 371 // Remove all operands that aren't physregs. 372 for (unsigned i = MI->getNumOperands(); i; --i) { 373 const MachineOperand &MO = MI->getOperand(i-1); 374 if (MO.isReg() && Register::isPhysicalRegister(MO.getReg())) 375 continue; 376 MI->removeOperand(i-1); 377 } 378 LLVM_DEBUG(dbgs() << "Converted physregs to:\t" << *MI); 379 } else { 380 // If the dest of MI is an original reg and MI is reMaterializable, 381 // don't delete the inst. Replace the dest with a new reg, and keep 382 // the inst for remat of other siblings. The inst is saved in 383 // LiveRangeEdit::DeadRemats and will be deleted after all the 384 // allocations of the func are done. 385 // However, immediately delete instructions which have unshrunk virtual 386 // register uses. That may provoke RA to split an interval at the KILL 387 // and later result in an invalid live segment end. 388 if (isOrigDef && DeadRemats && !HasLiveVRegUses && 389 TII.isTriviallyReMaterializable(*MI)) { 390 LiveInterval &NewLI = createEmptyIntervalFrom(Dest, false); 391 VNInfo::Allocator &Alloc = LIS.getVNInfoAllocator(); 392 VNInfo *VNI = NewLI.getNextValue(Idx, Alloc); 393 NewLI.addSegment(LiveInterval::Segment(Idx, Idx.getDeadSlot(), VNI)); 394 395 if (DestSubReg) { 396 const TargetRegisterInfo *TRI = MRI.getTargetRegisterInfo(); 397 auto *SR = NewLI.createSubRange( 398 Alloc, TRI->getSubRegIndexLaneMask(DestSubReg)); 399 SR->addSegment(LiveInterval::Segment(Idx, Idx.getDeadSlot(), 400 SR->getNextValue(Idx, Alloc))); 401 } 402 403 pop_back(); 404 DeadRemats->insert(MI); 405 const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo(); 406 MI->substituteRegister(Dest, NewLI.reg(), 0, TRI); 407 MI->getOperand(0).setIsDead(true); 408 } else { 409 if (TheDelegate) 410 TheDelegate->LRE_WillEraseInstruction(MI); 411 LIS.RemoveMachineInstrFromMaps(*MI); 412 MI->eraseFromParent(); 413 ++NumDCEDeleted; 414 } 415 } 416 417 // Erase any virtregs that are now empty and unused. There may be <undef> 418 // uses around. Keep the empty live range in that case. 419 for (unsigned i = 0, e = RegsToErase.size(); i != e; ++i) { 420 Register Reg = RegsToErase[i]; 421 if (LIS.hasInterval(Reg) && MRI.reg_nodbg_empty(Reg)) { 422 ToShrink.remove(&LIS.getInterval(Reg)); 423 eraseVirtReg(Reg); 424 } 425 } 426 } 427 428 void LiveRangeEdit::eliminateDeadDefs(SmallVectorImpl<MachineInstr *> &Dead, 429 ArrayRef<Register> RegsBeingSpilled) { 430 ToShrinkSet ToShrink; 431 432 for (;;) { 433 // Erase all dead defs. 434 while (!Dead.empty()) 435 eliminateDeadDef(Dead.pop_back_val(), ToShrink); 436 437 if (ToShrink.empty()) 438 break; 439 440 // Shrink just one live interval. Then delete new dead defs. 441 LiveInterval *LI = ToShrink.pop_back_val(); 442 if (foldAsLoad(LI, Dead)) 443 continue; 444 unsigned VReg = LI->reg(); 445 if (TheDelegate) 446 TheDelegate->LRE_WillShrinkVirtReg(VReg); 447 if (!LIS.shrinkToUses(LI, &Dead)) 448 continue; 449 450 // Don't create new intervals for a register being spilled. 451 // The new intervals would have to be spilled anyway so its not worth it. 452 // Also they currently aren't spilled so creating them and not spilling 453 // them results in incorrect code. 454 if (llvm::is_contained(RegsBeingSpilled, VReg)) 455 continue; 456 457 // LI may have been separated, create new intervals. 458 LI->RenumberValues(); 459 SmallVector<LiveInterval*, 8> SplitLIs; 460 LIS.splitSeparateComponents(*LI, SplitLIs); 461 if (!SplitLIs.empty()) 462 ++NumFracRanges; 463 464 Register Original = VRM ? VRM->getOriginal(VReg) : Register(); 465 for (const LiveInterval *SplitLI : SplitLIs) { 466 // If LI is an original interval that hasn't been split yet, make the new 467 // intervals their own originals instead of referring to LI. The original 468 // interval must contain all the split products, and LI doesn't. 469 if (Original != VReg && Original != 0) 470 VRM->setIsSplitFromReg(SplitLI->reg(), Original); 471 if (TheDelegate) 472 TheDelegate->LRE_DidCloneVirtReg(SplitLI->reg(), VReg); 473 } 474 } 475 } 476 477 // Keep track of new virtual registers created via 478 // MachineRegisterInfo::createVirtualRegister. 479 void 480 LiveRangeEdit::MRI_NoteNewVirtualRegister(Register VReg) { 481 if (VRM) 482 VRM->grow(); 483 484 NewRegs.push_back(VReg); 485 } 486 487 void LiveRangeEdit::calculateRegClassAndHint(MachineFunction &MF, 488 VirtRegAuxInfo &VRAI) { 489 for (unsigned I = 0, Size = size(); I < Size; ++I) { 490 LiveInterval &LI = LIS.getInterval(get(I)); 491 if (MRI.recomputeRegClass(LI.reg())) 492 LLVM_DEBUG({ 493 const TargetRegisterInfo *TRI = MF.getSubtarget().getRegisterInfo(); 494 dbgs() << "Inflated " << printReg(LI.reg()) << " to " 495 << TRI->getRegClassName(MRI.getRegClass(LI.reg())) << '\n'; 496 }); 497 VRAI.calculateSpillWeightAndHint(LI); 498 } 499 } 500