1 //===- InterferenceCache.cpp - Caching per-block interference -------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // InterferenceCache remembers per-block interference in LiveIntervalUnions. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "InterferenceCache.h" 15 #include "llvm/ADT/ArrayRef.h" 16 #include "llvm/CodeGen/LiveInterval.h" 17 #include "llvm/CodeGen/LiveIntervalAnalysis.h" 18 #include "llvm/CodeGen/LiveIntervalUnion.h" 19 #include "llvm/CodeGen/MachineBasicBlock.h" 20 #include "llvm/CodeGen/MachineFunction.h" 21 #include "llvm/CodeGen/MachineOperand.h" 22 #include "llvm/CodeGen/SlotIndexes.h" 23 #include "llvm/CodeGen/TargetRegisterInfo.h" 24 #include "llvm/MC/MCRegisterInfo.h" 25 #include "llvm/Support/ErrorHandling.h" 26 #include <cassert> 27 #include <cstdint> 28 #include <cstdlib> 29 #include <tuple> 30 31 using namespace llvm; 32 33 #define DEBUG_TYPE "regalloc" 34 35 // Static member used for null interference cursors. 36 const InterferenceCache::BlockInterference 37 InterferenceCache::Cursor::NoInterference; 38 39 // Initializes PhysRegEntries (instead of a SmallVector, PhysRegEntries is a 40 // buffer of size NumPhysRegs to speed up alloc/clear for targets with large 41 // reg files). Calloced memory is used for good form, and quites tools like 42 // Valgrind too, but zero initialized memory is not required by the algorithm: 43 // this is because PhysRegEntries works like a SparseSet and its entries are 44 // only valid when there is a corresponding CacheEntries assignment. There is 45 // also support for when pass managers are reused for targets with different 46 // numbers of PhysRegs: in this case PhysRegEntries is freed and reinitialized. 47 void InterferenceCache::reinitPhysRegEntries() { 48 if (PhysRegEntriesCount == TRI->getNumRegs()) return; 49 free(PhysRegEntries); 50 PhysRegEntriesCount = TRI->getNumRegs(); 51 PhysRegEntries = (unsigned char*) 52 calloc(PhysRegEntriesCount, sizeof(unsigned char)); 53 } 54 55 void InterferenceCache::init(MachineFunction *mf, 56 LiveIntervalUnion *liuarray, 57 SlotIndexes *indexes, 58 LiveIntervals *lis, 59 const TargetRegisterInfo *tri) { 60 MF = mf; 61 LIUArray = liuarray; 62 TRI = tri; 63 reinitPhysRegEntries(); 64 for (unsigned i = 0; i != CacheEntries; ++i) 65 Entries[i].clear(mf, indexes, lis); 66 } 67 68 InterferenceCache::Entry *InterferenceCache::get(unsigned PhysReg) { 69 unsigned E = PhysRegEntries[PhysReg]; 70 if (E < CacheEntries && Entries[E].getPhysReg() == PhysReg) { 71 if (!Entries[E].valid(LIUArray, TRI)) 72 Entries[E].revalidate(LIUArray, TRI); 73 return &Entries[E]; 74 } 75 // No valid entry exists, pick the next round-robin entry. 76 E = RoundRobin; 77 if (++RoundRobin == CacheEntries) 78 RoundRobin = 0; 79 for (unsigned i = 0; i != CacheEntries; ++i) { 80 // Skip entries that are in use. 81 if (Entries[E].hasRefs()) { 82 if (++E == CacheEntries) 83 E = 0; 84 continue; 85 } 86 Entries[E].reset(PhysReg, LIUArray, TRI, MF); 87 PhysRegEntries[PhysReg] = E; 88 return &Entries[E]; 89 } 90 llvm_unreachable("Ran out of interference cache entries."); 91 } 92 93 /// revalidate - LIU contents have changed, update tags. 94 void InterferenceCache::Entry::revalidate(LiveIntervalUnion *LIUArray, 95 const TargetRegisterInfo *TRI) { 96 // Invalidate all block entries. 97 ++Tag; 98 // Invalidate all iterators. 99 PrevPos = SlotIndex(); 100 unsigned i = 0; 101 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units, ++i) 102 RegUnits[i].VirtTag = LIUArray[*Units].getTag(); 103 } 104 105 void InterferenceCache::Entry::reset(unsigned physReg, 106 LiveIntervalUnion *LIUArray, 107 const TargetRegisterInfo *TRI, 108 const MachineFunction *MF) { 109 assert(!hasRefs() && "Cannot reset cache entry with references"); 110 // LIU's changed, invalidate cache. 111 ++Tag; 112 PhysReg = physReg; 113 Blocks.resize(MF->getNumBlockIDs()); 114 115 // Reset iterators. 116 PrevPos = SlotIndex(); 117 RegUnits.clear(); 118 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units) { 119 RegUnits.push_back(LIUArray[*Units]); 120 RegUnits.back().Fixed = &LIS->getRegUnit(*Units); 121 } 122 } 123 124 bool InterferenceCache::Entry::valid(LiveIntervalUnion *LIUArray, 125 const TargetRegisterInfo *TRI) { 126 unsigned i = 0, e = RegUnits.size(); 127 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units, ++i) { 128 if (i == e) 129 return false; 130 if (LIUArray[*Units].changedSince(RegUnits[i].VirtTag)) 131 return false; 132 } 133 return i == e; 134 } 135 136 void InterferenceCache::Entry::update(unsigned MBBNum) { 137 SlotIndex Start, Stop; 138 std::tie(Start, Stop) = Indexes->getMBBRange(MBBNum); 139 140 // Use advanceTo only when possible. 141 if (PrevPos != Start) { 142 if (!PrevPos.isValid() || Start < PrevPos) { 143 for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) { 144 RegUnitInfo &RUI = RegUnits[i]; 145 RUI.VirtI.find(Start); 146 RUI.FixedI = RUI.Fixed->find(Start); 147 } 148 } else { 149 for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) { 150 RegUnitInfo &RUI = RegUnits[i]; 151 RUI.VirtI.advanceTo(Start); 152 if (RUI.FixedI != RUI.Fixed->end()) 153 RUI.FixedI = RUI.Fixed->advanceTo(RUI.FixedI, Start); 154 } 155 } 156 PrevPos = Start; 157 } 158 159 MachineFunction::const_iterator MFI = 160 MF->getBlockNumbered(MBBNum)->getIterator(); 161 BlockInterference *BI = &Blocks[MBBNum]; 162 ArrayRef<SlotIndex> RegMaskSlots; 163 ArrayRef<const uint32_t*> RegMaskBits; 164 while (true) { 165 BI->Tag = Tag; 166 BI->First = BI->Last = SlotIndex(); 167 168 // Check for first interference from virtregs. 169 for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) { 170 LiveIntervalUnion::SegmentIter &I = RegUnits[i].VirtI; 171 if (!I.valid()) 172 continue; 173 SlotIndex StartI = I.start(); 174 if (StartI >= Stop) 175 continue; 176 if (!BI->First.isValid() || StartI < BI->First) 177 BI->First = StartI; 178 } 179 180 // Same thing for fixed interference. 181 for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) { 182 LiveInterval::const_iterator I = RegUnits[i].FixedI; 183 LiveInterval::const_iterator E = RegUnits[i].Fixed->end(); 184 if (I == E) 185 continue; 186 SlotIndex StartI = I->start; 187 if (StartI >= Stop) 188 continue; 189 if (!BI->First.isValid() || StartI < BI->First) 190 BI->First = StartI; 191 } 192 193 // Also check for register mask interference. 194 RegMaskSlots = LIS->getRegMaskSlotsInBlock(MBBNum); 195 RegMaskBits = LIS->getRegMaskBitsInBlock(MBBNum); 196 SlotIndex Limit = BI->First.isValid() ? BI->First : Stop; 197 for (unsigned i = 0, e = RegMaskSlots.size(); 198 i != e && RegMaskSlots[i] < Limit; ++i) 199 if (MachineOperand::clobbersPhysReg(RegMaskBits[i], PhysReg)) { 200 // Register mask i clobbers PhysReg before the LIU interference. 201 BI->First = RegMaskSlots[i]; 202 break; 203 } 204 205 PrevPos = Stop; 206 if (BI->First.isValid()) 207 break; 208 209 // No interference in this block? Go ahead and precompute the next block. 210 if (++MFI == MF->end()) 211 return; 212 MBBNum = MFI->getNumber(); 213 BI = &Blocks[MBBNum]; 214 if (BI->Tag == Tag) 215 return; 216 std::tie(Start, Stop) = Indexes->getMBBRange(MBBNum); 217 } 218 219 // Check for last interference in block. 220 for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) { 221 LiveIntervalUnion::SegmentIter &I = RegUnits[i].VirtI; 222 if (!I.valid() || I.start() >= Stop) 223 continue; 224 I.advanceTo(Stop); 225 bool Backup = !I.valid() || I.start() >= Stop; 226 if (Backup) 227 --I; 228 SlotIndex StopI = I.stop(); 229 if (!BI->Last.isValid() || StopI > BI->Last) 230 BI->Last = StopI; 231 if (Backup) 232 ++I; 233 } 234 235 // Fixed interference. 236 for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) { 237 LiveInterval::iterator &I = RegUnits[i].FixedI; 238 LiveRange *LR = RegUnits[i].Fixed; 239 if (I == LR->end() || I->start >= Stop) 240 continue; 241 I = LR->advanceTo(I, Stop); 242 bool Backup = I == LR->end() || I->start >= Stop; 243 if (Backup) 244 --I; 245 SlotIndex StopI = I->end; 246 if (!BI->Last.isValid() || StopI > BI->Last) 247 BI->Last = StopI; 248 if (Backup) 249 ++I; 250 } 251 252 // Also check for register mask interference. 253 SlotIndex Limit = BI->Last.isValid() ? BI->Last : Start; 254 for (unsigned i = RegMaskSlots.size(); 255 i && RegMaskSlots[i-1].getDeadSlot() > Limit; --i) 256 if (MachineOperand::clobbersPhysReg(RegMaskBits[i-1], PhysReg)) { 257 // Register mask i-1 clobbers PhysReg after the LIU interference. 258 // Model the regmask clobber as a dead def. 259 BI->Last = RegMaskSlots[i-1].getDeadSlot(); 260 break; 261 } 262 } 263