|
reference, declaration → definition
definition → references, declarations, derived classes, virtual overrides
reference to multiple definitions → definitions
unreferenced
|
References
include/llvm/CodeGen/LiveInterval.h 847 return std::tie(thisIndex, reg) < std::tie(otherIndex, other.reg);
847 return std::tie(thisIndex, reg) < std::tie(otherIndex, other.reg);
include/llvm/CodeGen/LiveRangeEdit.h 155 unsigned getReg() const { return getParent().reg; }
lib/CodeGen/CalcSpillWeights.cpp 89 unsigned Reg = LI.reg;
162 std::pair<unsigned, unsigned> TargetHint = mri.getRegAllocationHint(li.reg);
207 I = mri.reg_instr_begin(li.reg), E = mri.reg_instr_end();
234 std::tie(reads, writes) = mi->readsWritesVirtualRegister(li.reg);
247 Register hint = copyHint(mi, li.reg, tri, mri);
266 mri.clearSimpleHint(li.reg);
274 mri.addRegAllocationHint(li.reg, Hint.Reg);
lib/CodeGen/InlineSpiller.cpp 291 RI = MRI.reg_instr_nodbg_begin(SnipLI.reg),
301 if (SnipLI.reg == TII.isLoadFromStackSlot(MI, FI) && FI == StackSlot)
305 if (SnipLI.reg == TII.isStoreToStackSlot(MI, FI) && FI == StackSlot)
434 unsigned Reg = LI->reg;
513 assert(isRegToSpill(SnipLI.reg) && "Unexpected register in copy");
547 MIBundleOperands(MI).analyzeVirtReg(VirtReg.reg, &Ops);
559 if (MO.isReg() && MO.isUse() && MO.getReg() == VirtReg.reg)
599 if (!canGuaranteeAssignmentAfterRemat(VirtReg.reg, MI)) {
624 if (MO.isReg() && MO.isUse() && MO.getReg() == VirtReg.reg) {
1148 auto LI = std::make_unique<LiveInterval>(OrigLI.reg, OrigLI.weight);
1176 unsigned OrigReg = OrigLI.reg;
lib/CodeGen/LiveDebugVariables.cpp 768 if (!Register::isVirtualRegister(LI->reg))
773 for (MachineOperand &MO : MRI.use_nodbg_operands(LI->reg)) {
1043 MachineOperand MO = MachineOperand::CreateReg(LI->reg, false);
lib/CodeGen/LiveInterval.cpp 947 stripValuesNotDefiningMask(reg, *MatchingRange, Matching, Indexes, TRI);
948 stripValuesNotDefiningMask(reg, SR, SR.LaneMask, Indexes, TRI);
971 assert(Register::isVirtualRegister(reg));
972 LaneBitmask VRegMask = MRI.getMaxLaneMaskForVReg(reg);
975 for (const MachineOperand &MO : MRI.def_operands(reg)) {
1037 OS << printReg(reg) << ' ';
1081 LaneBitmask MaxMask = MRI != nullptr ? MRI->getMaxLaneMaskForVReg(reg)
1355 for (MachineRegisterInfo::reg_iterator RI = MRI.reg_begin(LI.reg),
1376 MO.setReg(LIV[EqClass-1]->reg);
lib/CodeGen/LiveIntervalUnion.cpp 89 << printReg(SI.value()->reg, TRI);
98 VisitedVRegs.set(SI.value()->reg);
lib/CodeGen/LiveIntervals.cpp 198 LRCalc->calculate(LI, MRI->shouldTrackSubRegLiveness(LI.reg));
444 assert(Register::isVirtualRegister(li->reg) &&
450 shrinkToUses(S, li->reg);
461 unsigned Reg = li->reg;
512 unsigned VReg = LI.reg;
532 MI->addRegisterDead(LI.reg, TRI);
1659 unsigned Reg = LI.reg;
lib/CodeGen/LiveRangeCalc.cpp 81 unsigned Reg = LI.reg;
144 extendToUses(MainRange, LI.reg, LaneBitmask::getAll(), &LI);
lib/CodeGen/LiveRangeEdit.cpp 190 for (MachineOperand &MO : MRI.reg_nodbg_operands(LI->reg)) {
226 if (UseMI->readsWritesVirtualRegister(LI->reg, &Ops).second)
237 DefMI->addRegisterDead(LI->reg, nullptr);
333 TheDelegate->LRE_WillShrinkVirtReg(LI.reg);
370 MI->substituteRegister(Dest, NewLI.reg, 0, TRI);
410 unsigned VReg = LI->reg;
443 VRM->setIsSplitFromReg(SplitLI->reg, Original);
445 TheDelegate->LRE_DidCloneVirtReg(SplitLI->reg, VReg);
468 if (MRI.recomputeRegClass(LI.reg))
471 dbgs() << "Inflated " << printReg(LI.reg) << " to "
472 << TRI->getRegClassName(MRI.getRegClass(LI.reg)) << '\n';
lib/CodeGen/LiveRegMatrix.cpp 104 LLVM_DEBUG(dbgs() << "assigning " << printReg(VirtReg.reg, TRI) << " to "
106 assert(!VRM->hasPhys(VirtReg.reg) && "Duplicate VirtReg assignment");
107 VRM->assignVirt2Phys(VirtReg.reg, PhysReg);
121 Register PhysReg = VRM->getPhys(VirtReg.reg);
122 LLVM_DEBUG(dbgs() << "unassigning " << printReg(VirtReg.reg, TRI) << " from "
124 VRM->clearVirt(VirtReg.reg);
150 if (RegMaskVirtReg != VirtReg.reg || RegMaskTag != UserTag) {
151 RegMaskVirtReg = VirtReg.reg;
167 CoalescerPair CP(VirtReg.reg, PhysReg, *TRI);
lib/CodeGen/MachineVerifier.cpp 2354 assert(Reg == LI.reg && "Invalid reg to interval mapping");
2681 unsigned Reg = LI.reg;
2698 report_context(SR, LI.reg, SR.LaneMask);
2701 verifyLiveRange(SR, LI.reg, SR.LaneMask);
lib/CodeGen/RegAllocBase.cpp 90 assert(!VRM->hasPhys(VirtReg->reg) && "Register already assigned");
93 if (MRI->reg_nodbg_empty(VirtReg->reg)) {
96 LIS->removeInterval(VirtReg->reg);
107 << TRI->getRegClassName(MRI->getRegClass(VirtReg->reg))
120 I = MRI->reg_instr_begin(VirtReg->reg), E = MRI->reg_instr_end();
136 VRM->assignVirt2Phys(VirtReg->reg,
137 RegClassInfo.getOrder(MRI->getRegClass(VirtReg->reg)).front());
148 assert(!VRM->hasPhys(SplitVirtReg->reg) && "Register already assigned");
149 if (MRI->reg_nodbg_empty(SplitVirtReg->reg)) {
153 LIS->removeInterval(SplitVirtReg->reg);
157 assert(Register::isVirtualRegister(SplitVirtReg->reg) &&
lib/CodeGen/RegAllocBasic.cpp 230 if (!VRM->hasPhys(Spill.reg))
262 AllocationOrder Order(VirtReg.reg, *VRM, RegClassInfo, Matrix);
lib/CodeGen/RegAllocGreedy.cpp 256 return ExtraRegInfo[VirtReg.reg].Stage;
261 ExtraRegInfo[VirtReg.reg].Stage = Stage;
687 const unsigned Reg = LI->reg;
778 if (unsigned Hint = MRI->getSimpleHint(VirtReg.reg))
810 AllocationOrder Order(VirtReg.reg, *VRM, RegClassInfo, Matrix);
888 unsigned Cascade = ExtraRegInfo[VirtReg.reg].Cascade;
902 assert(Register::isVirtualRegister(Intf->reg) &&
908 if (FixedRegisters.count(Intf->reg))
922 RegClassInfo.getNumAllocatableRegs(MRI->getRegClass(VirtReg.reg)) <
923 RegClassInfo.getNumAllocatableRegs(MRI->getRegClass(Intf->reg)));
925 unsigned IntfCascade = ExtraRegInfo[Intf->reg].Cascade;
934 bool BreaksHint = VRM->hasPreferredPhys(Intf->reg);
987 if (!Register::isVirtualRegister(Intf->reg))
994 bool BreaksHint = VRM->hasPreferredPhys(Intf->reg);
1053 unsigned Cascade = ExtraRegInfo[VirtReg.reg].Cascade;
1055 Cascade = ExtraRegInfo[VirtReg.reg].Cascade = NextCascade++;
1077 if (!VRM->hasPhys(Intf->reg))
1080 LastEvicted.addEviction(PhysReg, VirtReg.reg, Intf->reg);
1080 LastEvicted.addEviction(PhysReg, VirtReg.reg, Intf->reg);
1083 assert((ExtraRegInfo[Intf->reg].Cascade < Cascade ||
1086 ExtraRegInfo[Intf->reg].Cascade = Cascade;
1088 NewVRegs.push_back(Intf->reg);
1127 const TargetRegisterClass *RC = MRI->getRegClass(VirtReg.reg);
1588 unsigned VirtRegToSplit = SA->getParent().reg;
1689 unsigned Reg = SA->getParent().reg;
1822 MachineInstr *MI = MRI->getUniqueVRegDef(VirtReg.reg);
1963 << printReg(VirtReg.reg, TRI) << " may ");
2022 unsigned Reg = VirtReg.reg;
2088 const TargetRegisterClass *CurRC = MRI->getRegClass(VirtReg.reg);
2116 getNumAllocatableRegsForConstraints(MI, VirtReg.reg, SuperRC, TII,
2134 DebugVars->splitRegister(VirtReg.reg, LREdit.regs(), *LIS);
2430 DebugVars->splitRegister(VirtReg.reg, LREdit.regs(), *LIS);
2532 const TargetRegisterClass *CurRC = MRI->getRegClass(VirtReg.reg);
2551 MRI->getRegClass(Intf->reg) == CurRC) &&
2552 !(hasTiedDef(MRI, VirtReg.reg) && !hasTiedDef(MRI, Intf->reg))) ||
2552 !(hasTiedDef(MRI, VirtReg.reg) && !hasTiedDef(MRI, Intf->reg))) ||
2553 FixedRegisters.count(Intf->reg)) {
2629 assert(!FixedRegisters.count(VirtReg.reg));
2630 FixedRegisters.insert(VirtReg.reg);
2665 unsigned ItVirtReg = (*It)->reg;
2718 unsigned ItVirtReg = (*It)->reg;
2764 FixedRegisters.insert(LI->reg);
2921 unsigned Reg = VirtReg.reg;
3024 assert(Register::isVirtualRegister(LI->reg) &&
3028 if (!VRM->hasPhys(LI->reg))
3040 AllocationOrder Order(VirtReg.reg, *VRM, RegClassInfo, Matrix);
3043 LastEvicted.clearEvicteeInfo(VirtReg.reg);
3061 << ExtraRegInfo[VirtReg.reg].Cascade << '\n');
3070 unsigned Hint = MRI->getSimpleHint(VirtReg.reg);
3080 LastEvicted.clearEvicteeInfo(VirtReg.reg);
3092 NewVRegs.push_back(VirtReg.reg);
3102 LastEvicted.clearEvicteeInfo(VirtReg.reg);
3121 NewVRegs.push_back(VirtReg.reg);
lib/CodeGen/RegAllocPBQP.cpp 293 return std::get<0>(I1)->reg < std::get<0>(I2)->reg;
293 return std::get<0>(I1)->reg < std::get<0>(I2)->reg;
599 EmptyIntervalVRegs.insert(VRegLI.reg);
600 VRegsToAlloc.erase(VRegLI.reg);
696 LLVM_DEBUG(dbgs() << printReg(LI.reg, &TRI) << " ");
697 VRegsToAlloc.insert(LI.reg);
753 unsigned PReg = MRI.getSimpleHint(LI.reg);
756 const TargetRegisterClass &RC = *MRI.getRegClass(LI.reg);
768 VRM.assignVirt2Phys(LI.reg, PReg);
lib/CodeGen/RegisterCoalescer.cpp 623 LLVM_DEBUG(dbgs() << "Extending: " << printReg(IntB.reg, TRI));
660 int UIdx = ValSEndInst->findRegisterUseOperandIdx(IntB.reg, true);
666 CopyMI->substituteRegister(IntA.reg, IntB.reg, 0, *TRI);
666 CopyMI->substituteRegister(IntA.reg, IntB.reg, 0, *TRI);
785 int DefIdx = DefMI->findRegisterDefOperandIdx(IntA.reg);
806 if (NewReg != IntB.reg || !IntB.Query(AValNo->def).isKill())
816 for (MachineOperand &MO : MRI->use_nodbg_operands(IntA.reg)) {
838 if (Register::isVirtualRegister(IntA.reg) &&
839 Register::isVirtualRegister(IntB.reg) &&
840 !MRI->constrainRegClass(IntB.reg, MRI->getRegClass(IntA.reg)))
840 !MRI->constrainRegClass(IntB.reg, MRI->getRegClass(IntA.reg)))
859 for (MachineRegisterInfo::use_iterator UI = MRI->use_begin(IntA.reg),
888 if (UseMI->getOperand(0).getReg() != IntB.reg ||
919 LaneBitmask Mask = MRI->getMaxLaneMaskForVReg(IntA.reg);
922 LaneBitmask Mask = MRI->getMaxLaneMaskForVReg(IntB.reg);
1066 if (DefMI->getOperand(0).getReg() != IntA.reg ||
1067 DefMI->getOperand(1).getReg() != IntB.reg ||
1124 TII->get(TargetOpcode::COPY), IntB.reg)
1125 .addReg(IntA.reg);
1708 LaneBitmask Mask = MRI->getMaxLaneMaskForVReg(DstInt->reg);
1940 LIS->shrinkToUses(S, LI.reg);
3294 auto &Counter = LargeLIVisitCounter[LI.reg];
3394 MRI->clearKillFlags(LHS.reg);
3395 MRI->clearKillFlags(RHS.reg);
lib/CodeGen/RenameIndependentSubregs.cpp 132 unsigned Reg = LI.reg;
177 unsigned Reg = LI.reg;
214 unsigned Reg = Intervals[0]->reg;
244 unsigned VReg = Intervals[ID]->reg;
306 unsigned Reg = LI.reg;
lib/CodeGen/SplitKit.cpp 176 for (MachineOperand &MO : MRI.use_nodbg_operands(CurLI->reg))
341 unsigned OrigReg = VRM.getOriginal(CurLI->reg);
441 if (R != LI.reg)
644 unsigned Reg = LI->reg;
1337 MO.setReg(LI.reg);
1419 MI->addRegisterDead(LI->reg, &TRI);
1539 VRM.setIsSplitFromReg(SplitLI->reg, Original);
lib/CodeGen/StackSlotColoring.cpp 224 int FI = Register::stackSlot2Index(li.reg);
271 int FI = Register::stackSlot2Index(li->reg);
333 int SS = Register::stackSlot2Index(li->reg);
346 int SS = Register::stackSlot2Index(li->reg);
lib/Target/AMDGPU/GCNNSAReassign.cpp 116 if (VRM->hasPhys(Intervals[N]->reg))
298 dbgs() << " " << llvm::printReg((VRM->getPhys(LI->reg)), TRI);
304 if (VRM->hasPhys(Intervals.back()->reg)) // Did not change allocation.
323 if (VRM->hasPhys(Intervals[I]->reg))
335 << llvm::printReg((VRM->getPhys(Intervals.front()->reg)), TRI)
337 << llvm::printReg((VRM->getPhys(Intervals.back()->reg)), TRI)
lib/Target/AMDGPU/GCNRegBankReassign.cpp 591 const TargetRegisterClass *RC = MRI->getRegClass(LI.reg);
lib/Target/WebAssembly/WebAssemblyRegColoring.cpp 121 if (MRI->isLiveIn(LHS->reg) != MRI->isLiveIn(RHS->reg))
121 if (MRI->isLiveIn(LHS->reg) != MRI->isLiveIn(RHS->reg))
122 return MRI->isLiveIn(LHS->reg);
138 unsigned Old = LI->reg;
145 if (MRI->getRegClass(SortedIntervals[C]->reg) != RC)
155 unsigned New = SortedIntervals[Color]->reg;
160 LLVM_DEBUG(dbgs() << "Assigning vreg" << Register::virtReg2Index(LI->reg)
168 unsigned Old = SortedIntervals[I]->reg;