51#define DEBUG_TYPE "regalloc"
55STATISTIC(NumCoalesced,
"Number of copies coalesced");
68class InstrPosIndexes {
70 void unsetInitialized() { IsInitialized =
false; }
72 void init(
const MachineBasicBlock &
MBB) {
74 Instr2PosIndex.
clear();
75 uint64_t LastIndex = 0;
76 for (
const MachineInstr &
MI :
MBB) {
77 LastIndex += InstrDist;
78 Instr2PosIndex[&
MI] = LastIndex;
85 bool getIndex(
const MachineInstr &
MI, uint64_t &Index) {
93 assert(
MI.getParent() == CurMBB &&
"MI is not in CurMBB");
94 auto It = Instr2PosIndex.find(&
MI);
95 if (It != Instr2PosIndex.end()) {
109 unsigned Distance = 1;
111 End = std::next(Start);
112 while (Start != CurMBB->begin() &&
113 !Instr2PosIndex.count(&*std::prev(Start))) {
117 while (End != CurMBB->end() && !Instr2PosIndex.count(&*(End))) {
125 Start == CurMBB->begin() ? 0 : Instr2PosIndex.at(&*std::prev(Start));
127 if (End == CurMBB->end())
128 Step =
static_cast<uint64_t
>(InstrDist);
131 uint64_t EndIndex = Instr2PosIndex.at(&*End);
132 assert(EndIndex > LastIndex &&
"Index must be ascending order");
133 unsigned NumAvailableIndexes = EndIndex - LastIndex - 1;
152 Step = (NumAvailableIndexes + 1) / (Distance + 1);
157 if (
LLVM_UNLIKELY(!Step || (!LastIndex && Step == InstrDist))) {
159 Index = Instr2PosIndex.at(&
MI);
163 for (
auto I = Start;
I != End; ++
I) {
165 Instr2PosIndex[&*
I] = LastIndex;
167 Index = Instr2PosIndex.at(&
MI);
172 bool IsInitialized =
false;
173 enum { InstrDist = 1024 };
174 const MachineBasicBlock *CurMBB =
nullptr;
175 DenseMap<const MachineInstr *, uint64_t> Instr2PosIndex;
178class RegAllocFastImpl {
181 bool ClearVirtRegs_ =
true)
182 : ShouldAllocateRegisterImpl(
F), StackSlotForVirtReg(-1),
183 ClearVirtRegs(ClearVirtRegs_) {}
186 MachineFrameInfo *MFI =
nullptr;
187 MachineRegisterInfo *MRI =
nullptr;
188 const TargetRegisterInfo *TRI =
nullptr;
189 const TargetInstrInfo *TII =
nullptr;
190 RegisterClassInfo RegClassInfo;
194 MachineBasicBlock *MBB =
nullptr;
197 IndexedMap<int, VirtReg2IndexFunctor> StackSlotForVirtReg;
201 MachineInstr *LastUse =
nullptr;
204 bool LiveOut =
false;
205 bool Reloaded =
false;
208 explicit LiveReg(
Register VirtReg) : VirtReg(VirtReg) {}
209 explicit LiveReg() {}
211 unsigned getSparseSetIndex()
const {
return VirtReg.virtRegIndex(); }
214 using LiveRegMap = SparseSet<LiveReg, identity<unsigned>, uint16_t>;
217 LiveRegMap LiveVirtRegs;
220 DenseMap<Register, LiveReg> BundleVirtRegsMap;
222 DenseMap<Register, SmallVector<MachineOperand *, 2>> LiveDbgValueMap;
225 DenseMap<Register, SmallVector<MachineInstr *, 1>> DanglingDbgValues;
229 BitVector MayLiveAcrossBlocks;
251 std::vector<unsigned> RegUnitStates;
269 SmallVector<unsigned, 0> UsedInInstr;
271 SmallVector<unsigned, 8> DefOperandIndexes;
276 InstrPosIndexes PosIndexes;
278 void setPhysRegState(MCRegister PhysReg,
unsigned NewState);
279 bool isPhysRegFree(MCRegister PhysReg)
const;
282 void markRegUsedInInstr(
MCPhysReg PhysReg) {
283 for (
MCRegUnit Unit : TRI->regunits(PhysReg))
284 UsedInInstr[
Unit] = InstrGen | 1;
288 bool isClobberedByRegMasks(MCRegister PhysReg)
const {
289 return llvm::any_of(RegMasks, [PhysReg](
const uint32_t *Mask) {
295 bool isRegUsedInInstr(MCRegister PhysReg,
bool LookAtPhysRegUses)
const {
296 if (LookAtPhysRegUses && isClobberedByRegMasks(PhysReg))
298 for (
MCRegUnit Unit : TRI->regunits(PhysReg))
299 if (UsedInInstr[Unit] >= (InstrGen | !LookAtPhysRegUses))
306 void markPhysRegUsedInInstr(MCRegister PhysReg) {
307 for (
MCRegUnit Unit : TRI->regunits(PhysReg)) {
308 assert(UsedInInstr[Unit] <= InstrGen &&
"non-phys use before phys use?");
309 UsedInInstr[
Unit] = InstrGen;
314 void unmarkRegUsedInInstr(MCRegister PhysReg) {
315 for (
MCRegUnit Unit : TRI->regunits(PhysReg))
316 UsedInInstr[
Unit] = 0;
323 spillImpossible = ~0
u
329 bool runOnMachineFunction(MachineFunction &MF);
332 void allocateBasicBlock(MachineBasicBlock &MBB);
337 void findAndSortDefOperandIndexes(
const MachineInstr &
MI);
339 void allocateInstruction(MachineInstr &
MI);
340 void handleDebugValue(MachineInstr &
MI);
341 void handleBundle(MachineInstr &
MI);
343 bool usePhysReg(MachineInstr &
MI, MCRegister PhysReg);
344 bool definePhysReg(MachineInstr &
MI, MCRegister PhysReg);
345 bool displacePhysReg(MachineInstr &
MI, MCRegister PhysReg);
346 void freePhysReg(MCRegister PhysReg);
348 unsigned calcSpillCost(
MCPhysReg PhysReg)
const;
358 void assignVirtToPhysReg(MachineInstr &
MI, LiveReg &, MCRegister PhysReg);
359 void allocVirtReg(MachineInstr &
MI, LiveReg &LR,
Register Hint,
360 bool LookAtPhysRegUses =
false);
361 void allocVirtRegUndef(MachineOperand &MO);
362 void assignDanglingDebugValues(MachineInstr &Def,
Register VirtReg,
364 bool defineLiveThroughVirtReg(MachineInstr &
MI,
unsigned OpNum,
366 bool defineVirtReg(MachineInstr &
MI,
unsigned OpNum,
Register VirtReg,
367 bool LookAtPhysRegUses =
false);
368 bool useVirtReg(MachineInstr &
MI, MachineOperand &MO,
Register VirtReg);
370 MCPhysReg getErrorAssignment(
const LiveReg &LR, MachineInstr &
MI,
371 const TargetRegisterClass &RC);
374 getMBBBeginInsertionPoint(MachineBasicBlock &MBB,
375 SmallSet<Register, 2> &PrologLiveIns)
const;
377 void reloadAtBegin(MachineBasicBlock &MBB);
378 bool setPhysReg(MachineInstr &
MI, MachineOperand &MO,
379 const LiveReg &Assignment);
384 bool shouldAllocateRegister(
const Register Reg)
const;
385 int getStackSpaceFor(
Register VirtReg);
387 MCPhysReg AssignedReg,
bool Kill,
bool LiveOut);
394 bool mayBeSpillFromInlineAsmBr(
const MachineInstr &
MI)
const;
396 void dumpState()
const;
400 RegAllocFastImpl Impl;
406 : MachineFunctionPass(ID), Impl(
F, ClearVirtRegs_) {}
408 bool runOnMachineFunction(MachineFunction &MF)
override {
409 return Impl.runOnMachineFunction(MF);
412 StringRef getPassName()
const override {
return "Fast Register Allocator"; }
414 void getAnalysisUsage(AnalysisUsage &AU)
const override {
419 MachineFunctionProperties getRequiredProperties()
const override {
420 return MachineFunctionProperties().setNoPHIs();
423 MachineFunctionProperties getSetProperties()
const override {
424 if (Impl.ClearVirtRegs) {
425 return MachineFunctionProperties().setNoVRegs();
428 return MachineFunctionProperties();
431 MachineFunctionProperties getClearedProperties()
const override {
432 return MachineFunctionProperties().setIsSSA();
438char RegAllocFast::ID = 0;
445 if (!ShouldAllocateRegisterImpl)
448 return ShouldAllocateRegisterImpl(*
TRI, *
MRI,
Reg);
451void RegAllocFastImpl::setPhysRegState(
MCRegister PhysReg,
unsigned NewState) {
453 RegUnitStates[
Unit] = NewState;
456bool RegAllocFastImpl::isPhysRegFree(MCRegister PhysReg)
const {
458 if (RegUnitStates[Unit] != regFree)
466int RegAllocFastImpl::getStackSpaceFor(
Register VirtReg) {
468 int SS = StackSlotForVirtReg[VirtReg];
474 const TargetRegisterClass &RC = *
MRI->getRegClass(VirtReg);
475 unsigned Size =
TRI->getSpillSize(RC);
476 Align Alignment =
TRI->getSpillAlign(RC);
478 const MachineFunction &MF =
MRI->getMF();
480 Align CurrentAlign =
ST.getFrameLowering()->getStackAlign();
481 if (Alignment > CurrentAlign && !
TRI->canRealignStack(MF))
482 Alignment = CurrentAlign;
487 StackSlotForVirtReg[VirtReg] = FrameIdx;
494 PosIndexes.getIndex(
A, IndexA);
496 PosIndexes.getIndex(
A, IndexA);
497 return IndexA < IndexB;
504bool RegAllocFastImpl::mayBeSpillFromInlineAsmBr(
const MachineInstr &
MI)
const {
506 auto *
MBB =
MI.getParent();
509 for (
const auto &
Op :
MI.operands())
516bool RegAllocFastImpl::mayLiveOut(
Register VirtReg) {
522 const MachineInstr *SelfLoopDef =
nullptr;
528 for (
const MachineInstr &DefInst :
MRI->def_instructions(VirtReg)) {
529 if (DefInst.getParent() !=
MBB) {
533 if (!SelfLoopDef ||
dominates(PosIndexes, DefInst, *SelfLoopDef))
534 SelfLoopDef = &DefInst;
545 static const unsigned Limit = 8;
547 for (
const MachineInstr &UseInst :
MRI->use_nodbg_instructions(VirtReg)) {
548 if (UseInst.getParent() !=
MBB || ++
C >= Limit) {
557 if (SelfLoopDef == &UseInst ||
558 !
dominates(PosIndexes, *SelfLoopDef, UseInst)) {
569bool RegAllocFastImpl::mayLiveIn(
Register VirtReg) {
574 static const unsigned Limit = 8;
576 for (
const MachineInstr &DefInst :
MRI->def_instructions(VirtReg)) {
577 if (DefInst.getParent() !=
MBB || ++
C >= Limit) {
593 int FI = getStackSpaceFor(VirtReg);
596 const TargetRegisterClass &RC = *
MRI->getRegClass(VirtReg);
606 SmallVectorImpl<MachineOperand *> &LRIDbgOperands = LiveDbgValueMap[VirtReg];
607 SmallMapVector<MachineInstr *, SmallVector<const MachineOperand *>, 2>
609 for (MachineOperand *MO : LRIDbgOperands)
610 SpilledOperandsMap[MO->getParent()].push_back(MO);
611 for (
const auto &MISpilledOperands : SpilledOperandsMap) {
612 MachineInstr &DBG = *MISpilledOperands.first;
617 *
MBB, Before, *MISpilledOperands.first, FI, MISpilledOperands.second);
620 LLVM_DEBUG(
dbgs() <<
"Inserting debug info due to spill:\n" << *NewDV);
627 MachineInstr *ClonedDV =
MBB->
getParent()->CloneMachineInstr(NewDV);
629 LLVM_DEBUG(
dbgs() <<
"Cloning debug info due to live out spill\n");
645 LRIDbgOperands.
clear();
653 int FI = getStackSpaceFor(VirtReg);
654 const TargetRegisterClass &RC = *
MRI->getRegClass(VirtReg);
664 MachineBasicBlock &
MBB, SmallSet<Register, 2> &PrologLiveIns)
const {
673 if (!
TII->isBasicBlockPrologue(*
I) && !mayBeSpillFromInlineAsmBr(*
I))
678 for (MachineOperand &MO :
I->operands()) {
690void RegAllocFastImpl::reloadAtBegin(MachineBasicBlock &
MBB) {
691 if (LiveVirtRegs.empty())
694 for (MachineBasicBlock::RegisterMaskPair
P :
MBB.
liveins()) {
695 MCRegister
Reg =
P.PhysReg;
698 setPhysRegState(
Reg, regLiveIn);
701 SmallSet<Register, 2> PrologLiveIns;
706 getMBBBeginInsertionPoint(
MBB, PrologLiveIns);
707 for (
const LiveReg &LR : LiveVirtRegs) {
709 if (PhysReg == 0 || LR.Error)
713 if (RegUnitStates[FirstUnit] == regLiveIn)
717 "no reload in start block. Missing vreg def?");
719 if (PrologLiveIns.
count(PhysReg)) {
723 reload(
MBB.
begin(), LR.VirtReg, PhysReg);
725 reload(InsertBefore, LR.VirtReg, PhysReg);
727 LiveVirtRegs.clear();
733bool RegAllocFastImpl::usePhysReg(MachineInstr &
MI, MCRegister
Reg) {
734 assert(Register::isPhysicalRegister(
Reg) &&
"expected physreg");
735 bool displacedAny = displacePhysReg(
MI,
Reg);
736 setPhysRegState(
Reg, regPreAssigned);
737 markRegUsedInInstr(
Reg);
741bool RegAllocFastImpl::definePhysReg(MachineInstr &
MI, MCRegister
Reg) {
742 bool displacedAny = displacePhysReg(
MI,
Reg);
743 setPhysRegState(
Reg, regPreAssigned);
750bool RegAllocFastImpl::displacePhysReg(MachineInstr &
MI, MCRegister PhysReg) {
751 bool displacedAny =
false;
754 switch (
unsigned VirtReg = RegUnitStates[Unit]) {
756 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
757 assert(LRI != LiveVirtRegs.end() &&
"datastructures in sync");
760 while (mayBeSpillFromInlineAsmBr(*ReloadBefore))
762 reload(ReloadBefore, VirtReg, LRI->PhysReg);
764 setPhysRegState(LRI->PhysReg, regFree);
766 LRI->Reloaded =
true;
771 RegUnitStates[
Unit] = regFree;
781void RegAllocFastImpl::freePhysReg(MCRegister PhysReg) {
785 switch (
unsigned VirtReg = RegUnitStates[FirstUnit]) {
791 setPhysRegState(PhysReg, regFree);
794 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
795 assert(LRI != LiveVirtRegs.end());
797 setPhysRegState(LRI->PhysReg, regFree);
808unsigned RegAllocFastImpl::calcSpillCost(
MCPhysReg PhysReg)
const {
810 switch (
unsigned VirtReg = RegUnitStates[Unit]) {
816 return spillImpossible;
818 bool SureSpill = StackSlotForVirtReg[VirtReg] != -1 ||
819 findLiveVirtReg(VirtReg)->LiveOut;
820 return SureSpill ? spillClean : spillDirty;
827void RegAllocFastImpl::assignDanglingDebugValues(MachineInstr &Definition,
830 auto UDBGValIter = DanglingDbgValues.
find(VirtReg);
831 if (UDBGValIter == DanglingDbgValues.
end())
834 SmallVectorImpl<MachineInstr *> &Dangling = UDBGValIter->second;
835 for (MachineInstr *DbgValue : Dangling) {
836 assert(DbgValue->isDebugValue());
837 if (!DbgValue->hasDebugOperandForReg(VirtReg))
844 E = DbgValue->getIterator();
846 if (
I->modifiesRegister(
Reg,
TRI) || --Limit == 0) {
853 for (MachineOperand &MO : DbgValue->getDebugOperandsForReg(VirtReg)) {
865void RegAllocFastImpl::assignVirtToPhysReg(MachineInstr &AtMI, LiveReg &LR,
866 MCRegister PhysReg) {
870 assert(LR.PhysReg == 0 &&
"Already assigned a physreg");
871 assert(PhysReg != 0 &&
"Trying to assign no register");
872 LR.PhysReg = PhysReg;
873 setPhysRegState(PhysReg, VirtReg.
id());
875 assignDanglingDebugValues(AtMI, VirtReg, PhysReg);
881 static const unsigned ChainLengthLimit = 3;
888 MachineInstr *VRegDef =
MRI->getUniqueVRegDef(
Reg);
892 }
while (++
C <= ChainLengthLimit);
900 static const unsigned DefLimit = 3;
902 for (
const MachineInstr &
MI :
MRI->def_instructions(VirtReg)) {
905 Reg = traceCopyChain(
Reg);
917void RegAllocFastImpl::allocVirtReg(MachineInstr &
MI, LiveReg &LR,
918 Register Hint0,
bool LookAtPhysRegUses) {
919 const Register VirtReg = LR.VirtReg;
922 const TargetRegisterClass &RC = *
MRI->getRegClass(VirtReg);
924 <<
" in class " <<
TRI->getRegClassName(&RC)
929 !isRegUsedInInstr(Hint0, LookAtPhysRegUses)) {
931 if (isPhysRegFree(Hint0)) {
934 assignVirtToPhysReg(
MI, LR, Hint0);
945 Register Hint1 = traceCopies(VirtReg);
947 !isRegUsedInInstr(Hint1, LookAtPhysRegUses)) {
949 if (isPhysRegFree(Hint1)) {
952 assignVirtToPhysReg(
MI, LR, Hint1);
963 unsigned BestCost = spillImpossible;
965 for (
MCPhysReg PhysReg : AllocationOrder) {
967 if (isRegUsedInInstr(PhysReg, LookAtPhysRegUses)) {
972 unsigned Cost = calcSpillCost(PhysReg);
976 assignVirtToPhysReg(
MI, LR, PhysReg);
980 if (PhysReg == Hint0 || PhysReg == Hint1)
981 Cost -= spillPrefBonus;
983 if (
Cost < BestCost) {
992 LR.PhysReg = getErrorAssignment(LR,
MI, RC);
997 displacePhysReg(
MI, BestReg);
998 assignVirtToPhysReg(
MI, LR, BestReg);
1001void RegAllocFastImpl::allocVirtRegUndef(MachineOperand &MO) {
1005 if (!shouldAllocateRegister(VirtReg))
1008 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
1010 bool IsRenamable =
true;
1011 if (LRI != LiveVirtRegs.end() && LRI->PhysReg) {
1012 PhysReg = LRI->PhysReg;
1014 const TargetRegisterClass &RC = *
MRI->getRegClass(VirtReg);
1016 if (AllocationOrder.
empty()) {
1022 PhysReg = getErrorAssignment(*LRI, *MO.
getParent(), RC);
1024 IsRenamable =
false;
1026 PhysReg = AllocationOrder.
front();
1030 if (SubRegIdx != 0) {
1031 PhysReg =
TRI->getSubReg(PhysReg, SubRegIdx);
1041bool RegAllocFastImpl::defineLiveThroughVirtReg(MachineInstr &
MI,
1044 if (!shouldAllocateRegister(VirtReg))
1046 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
1047 if (LRI != LiveVirtRegs.end()) {
1049 if (PrevReg != 0 && isRegUsedInInstr(PrevReg,
true)) {
1051 <<
" (tied/earlyclobber resolution)\n");
1052 freePhysReg(PrevReg);
1054 allocVirtReg(
MI, *LRI, 0,
true);
1060 TII->get(TargetOpcode::COPY), PrevReg)
1063 MachineOperand &MO =
MI.getOperand(OpNum);
1068 return defineVirtReg(
MI, OpNum, VirtReg,
true);
1078bool RegAllocFastImpl::defineVirtReg(MachineInstr &
MI,
unsigned OpNum,
1079 Register VirtReg,
bool LookAtPhysRegUses) {
1081 if (!shouldAllocateRegister(VirtReg))
1083 MachineOperand &MO =
MI.getOperand(OpNum);
1084 LiveRegMap::iterator LRI;
1086 std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
1089 if (mayLiveOut(VirtReg)) {
1090 LRI->LiveOut =
true;
1097 if (LRI->PhysReg == 0) {
1098 allocVirtReg(
MI, *LRI, 0, LookAtPhysRegUses);
1100 assert((!isRegUsedInInstr(LRI->PhysReg, LookAtPhysRegUses) || LRI->Error) &&
1101 "TODO: preassign mismatch");
1103 <<
" use existing assignment to "
1108 if (LRI->Reloaded || LRI->LiveOut) {
1109 if (!
MI.isImplicitDef()) {
1113 <<
" RL: " << LRI->Reloaded <<
'\n');
1114 bool Kill = LRI->LastUse ==
nullptr;
1115 spill(SpillBefore, VirtReg, PhysReg, Kill, LRI->LiveOut);
1119 if (
MI.getOpcode() == TargetOpcode::INLINEASM_BR) {
1120 int FI = StackSlotForVirtReg[VirtReg];
1121 const TargetRegisterClass &RC = *
MRI->getRegClass(VirtReg);
1122 for (MachineOperand &MO :
MI.operands()) {
1124 MachineBasicBlock *Succ = MO.
getMBB();
1133 LRI->LastUse =
nullptr;
1135 LRI->LiveOut =
false;
1136 LRI->Reloaded =
false;
1138 if (
MI.getOpcode() == TargetOpcode::BUNDLE) {
1139 BundleVirtRegsMap[VirtReg] = *LRI;
1141 markRegUsedInInstr(PhysReg);
1142 return setPhysReg(
MI, MO, *LRI);
1147bool RegAllocFastImpl::useVirtReg(MachineInstr &
MI, MachineOperand &MO,
1150 if (!shouldAllocateRegister(VirtReg))
1152 LiveRegMap::iterator LRI;
1154 std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
1157 if (mayLiveOut(VirtReg)) {
1158 LRI->LiveOut =
true;
1165 assert((!MO.
isKill() || LRI->LastUse == &
MI) &&
"Invalid kill flag");
1169 if (LRI->PhysReg == 0) {
1172 if (
MI.isCopy() &&
MI.getOperand(1).getSubReg() == 0) {
1173 Hint =
MI.getOperand(0).getReg();
1174 if (
Hint.isVirtual()) {
1175 assert(!shouldAllocateRegister(Hint));
1179 "Copy destination should already be assigned");
1182 allocVirtReg(
MI, *LRI, Hint,
false);
1187 if (
MI.getOpcode() == TargetOpcode::BUNDLE) {
1188 BundleVirtRegsMap[VirtReg] = *LRI;
1190 markRegUsedInInstr(LRI->PhysReg);
1191 return setPhysReg(
MI, MO, *LRI);
1197MCPhysReg RegAllocFastImpl::getErrorAssignment(
const LiveReg &LR,
1199 const TargetRegisterClass &RC) {
1200 MachineFunction &MF = *
MI.getMF();
1211 if (AllocationOrder.
empty()) {
1215 "no registers from class available to allocate", Fn,
1220 assert(!RawRegs.
empty() &&
"register classes cannot have no registers");
1221 return RawRegs.
front();
1224 if (!LR.Error && EmitError) {
1227 if (
MI.isInlineAsm()) {
1228 MI.emitInlineAsmError(
1229 "inline assembly requires more registers than available");
1233 "ran out of registers during register allocation", Fn,
1238 return AllocationOrder.
front();
1243bool RegAllocFastImpl::setPhysReg(MachineInstr &
MI, MachineOperand &MO,
1244 const LiveReg &Assignment) {
1246 assert(PhysReg &&
"assignments should always be to a valid physreg");
1274 MI.addRegisterKilled(PhysReg,
TRI,
true);
1283 MI.addRegisterDead(PhysReg,
TRI,
true);
1285 MI.addRegisterDefined(PhysReg,
TRI);
1294void RegAllocFastImpl::dumpState()
const {
1295 for (
unsigned Unit = 1, UnitE =
TRI->getNumRegUnits(); Unit != UnitE;
1297 switch (
unsigned VirtReg = RegUnitStates[Unit]) {
1300 case regPreAssigned:
1307 LiveRegMap::const_iterator
I = findLiveVirtReg(VirtReg);
1308 assert(
I != LiveVirtRegs.end() &&
"have LiveVirtRegs entry");
1309 if (
I->LiveOut ||
I->Reloaded) {
1317 assert(
TRI->hasRegUnit(
I->PhysReg, Unit) &&
"inverse mapping present");
1324 for (
const LiveReg &LR : LiveVirtRegs) {
1329 assert(Register::isPhysicalRegister(PhysReg) &&
"mapped to physreg");
1331 assert(RegUnitStates[Unit] == VirtReg &&
"inverse map valid");
1339void RegAllocFastImpl::addRegClassDefCounts(
1341 assert(RegClassDefCounts.
size() ==
TRI->getNumRegClasses());
1344 if (!shouldAllocateRegister(
Reg))
1346 const TargetRegisterClass *OpRC =
MRI->getRegClass(
Reg);
1347 for (
unsigned RCIdx = 0, RCIdxEnd =
TRI->getNumRegClasses();
1348 RCIdx != RCIdxEnd; ++RCIdx) {
1349 const TargetRegisterClass *IdxRC =
TRI->getRegClass(RCIdx);
1352 ++RegClassDefCounts[RCIdx];
1358 for (
unsigned RCIdx = 0, RCIdxEnd =
TRI->getNumRegClasses();
1359 RCIdx != RCIdxEnd; ++RCIdx) {
1360 const TargetRegisterClass *IdxRC =
TRI->getRegClass(RCIdx);
1361 for (MCRegAliasIterator Alias(
Reg,
TRI,
true); Alias.isValid(); ++Alias) {
1363 ++RegClassDefCounts[RCIdx];
1373void RegAllocFastImpl::findAndSortDefOperandIndexes(
const MachineInstr &
MI) {
1374 DefOperandIndexes.
clear();
1377 for (
unsigned I = 0,
E =
MI.getNumOperands();
I <
E; ++
I) {
1378 const MachineOperand &MO =
MI.getOperand(
I);
1385 markPhysRegUsedInInstr(
Reg);
1395 if (DefOperandIndexes.
size() <= 1)
1403 SmallVector<unsigned> RegClassDefCounts(
TRI->getNumRegClasses(), 0);
1405 for (
const MachineOperand &MO :
MI.all_defs())
1406 addRegClassDefCounts(RegClassDefCounts, MO.
getReg());
1408 llvm::sort(DefOperandIndexes, [&](
unsigned I0,
unsigned I1) {
1409 const MachineOperand &MO0 =
MI.getOperand(I0);
1410 const MachineOperand &MO1 =
MI.getOperand(I1);
1413 const TargetRegisterClass &RC0 = *
MRI->getRegClass(Reg0);
1414 const TargetRegisterClass &RC1 = *
MRI->getRegClass(Reg1);
1418 unsigned ClassSize0 = RegClassInfo.
getOrder(&RC0).size();
1419 unsigned ClassSize1 = RegClassInfo.
getOrder(&RC1).size();
1421 bool SmallClass0 = ClassSize0 < RegClassDefCounts[RC0.
getID()];
1422 bool SmallClass1 = ClassSize1 < RegClassDefCounts[RC1.
getID()];
1423 if (SmallClass0 > SmallClass1)
1425 if (SmallClass0 < SmallClass1)
1433 if (Livethrough0 > Livethrough1)
1435 if (Livethrough0 < Livethrough1)
1449 unsigned TiedIdx =
MI.findTiedOperandIdx(
MI.getOperandNo(&MO));
1454void RegAllocFastImpl::allocateInstruction(MachineInstr &
MI) {
1474 BundleVirtRegsMap.
clear();
1477 bool HasPhysRegUse =
false;
1478 bool HasRegMask =
false;
1479 bool HasVRegDef =
false;
1480 bool HasDef =
false;
1481 bool HasEarlyClobber =
false;
1482 bool NeedToAssignLiveThroughs =
false;
1483 for (MachineOperand &MO :
MI.operands()) {
1487 if (!shouldAllocateRegister(
Reg))
1493 HasEarlyClobber =
true;
1494 NeedToAssignLiveThroughs =
true;
1497 NeedToAssignLiveThroughs =
true;
1500 if (!
MRI->isReserved(
Reg)) {
1503 bool displacedAny = definePhysReg(
MI,
Reg);
1505 HasEarlyClobber =
true;
1510 HasPhysRegUse =
true;
1524 bool ReArrangedImplicitOps =
true;
1532 if (NeedToAssignLiveThroughs) {
1533 while (ReArrangedImplicitOps) {
1534 ReArrangedImplicitOps =
false;
1535 findAndSortDefOperandIndexes(
MI);
1536 for (
unsigned OpIdx : DefOperandIndexes) {
1537 MachineOperand &MO =
MI.getOperand(
OpIdx);
1542 ReArrangedImplicitOps = defineLiveThroughVirtReg(
MI,
OpIdx,
Reg);
1544 ReArrangedImplicitOps = defineVirtReg(
MI,
OpIdx,
Reg);
1548 if (ReArrangedImplicitOps)
1554 while (ReArrangedImplicitOps) {
1555 ReArrangedImplicitOps =
false;
1556 for (MachineOperand &MO :
MI.all_defs()) {
1559 ReArrangedImplicitOps =
1560 defineVirtReg(
MI,
MI.getOperandNo(&MO),
Reg);
1561 if (ReArrangedImplicitOps)
1572 for (MachineOperand &MO :
reverse(
MI.all_defs())) {
1583 "tied def assigned to clobbered register");
1595 if (
MRI->isReserved(
Reg))
1598 unmarkRegUsedInInstr(
Reg);
1606 for (
const auto *RM : RegMasks)
1607 MRI->addPhysRegsUsedFromRegMask(RM);
1610 for (
const LiveReg &LR : LiveVirtRegs) {
1612 if (PhysReg != 0 && isClobberedByRegMasks(PhysReg))
1613 displacePhysReg(
MI, PhysReg);
1618 if (HasPhysRegUse) {
1619 for (MachineOperand &MO :
MI.operands()) {
1625 if (
MRI->isReserved(
Reg))
1627 if (!usePhysReg(
MI,
Reg))
1635 bool HasUndefUse =
false;
1636 bool ReArrangedImplicitMOs =
true;
1637 while (ReArrangedImplicitMOs) {
1638 ReArrangedImplicitMOs =
false;
1639 for (MachineOperand &MO :
MI.operands()) {
1657 ReArrangedImplicitMOs = useVirtReg(
MI, MO,
Reg);
1658 if (ReArrangedImplicitMOs)
1667 for (MachineOperand &MO :
MI.all_uses()) {
1672 assert(MO.
isUndef() &&
"Should only have undef virtreg uses left");
1673 allocVirtRegUndef(MO);
1678 if (HasEarlyClobber) {
1679 for (MachineOperand &MO :
reverse(
MI.all_defs())) {
1682 assert(!MO.
getSubReg() &&
"should be already handled in def processing");
1707 if (
MI.isCopy() &&
MI.getOperand(0).getReg() ==
MI.getOperand(1).getReg() &&
1708 MI.getNumOperands() == 2) {
1714void RegAllocFastImpl::handleDebugValue(MachineInstr &
MI) {
1717 assert(
MI.isDebugValue() &&
"not a DBG_VALUE*");
1718 for (
const auto &MO :
MI.debug_operands()) {
1724 if (!shouldAllocateRegister(
Reg))
1728 int SS = StackSlotForVirtReg[
Reg];
1738 LiveRegMap::iterator LRI = findLiveVirtReg(
Reg);
1742 if (LRI != LiveVirtRegs.end() && LRI->PhysReg) {
1744 for (
auto &RegMO : DbgOps)
1745 setPhysReg(
MI, *RegMO, *LRI);
1747 DanglingDbgValues[
Reg].push_back(&
MI);
1752 LiveDbgValueMap[
Reg].append(DbgOps.begin(), DbgOps.end());
1756void RegAllocFastImpl::handleBundle(MachineInstr &
MI) {
1759 while (BundledMI->isBundledWithPred()) {
1760 for (MachineOperand &MO : BundledMI->operands()) {
1768 DenseMap<Register, LiveReg>::iterator DI = BundleVirtRegsMap.
find(
Reg);
1769 assert(DI != BundleVirtRegsMap.
end() &&
"Unassigned virtual register");
1771 setPhysReg(
MI, MO, DI->second);
1778void RegAllocFastImpl::allocateBasicBlock(MachineBasicBlock &
MBB) {
1782 PosIndexes.unsetInitialized();
1783 RegUnitStates.assign(
TRI->getNumRegUnits(), regFree);
1784 assert(LiveVirtRegs.empty() &&
"Mapping not cleared from last block?");
1787 setPhysRegState(LiveReg.PhysReg, regPreAssigned);
1797 if (
MI.isDebugValue()) {
1798 handleDebugValue(
MI);
1802 allocateInstruction(
MI);
1806 if (
MI.getOpcode() == TargetOpcode::BUNDLE) {
1814 LLVM_DEBUG(
dbgs() <<
"Loading live registers at begin of block.\n");
1819 for (MachineInstr *
MI : Coalesced)
1821 NumCoalesced += Coalesced.size();
1823 for (
auto &UDBGPair : DanglingDbgValues) {
1824 for (MachineInstr *DbgValue : UDBGPair.second) {
1829 LLVM_DEBUG(
dbgs() <<
"Register did not survive for " << *DbgValue
1834 DanglingDbgValues.clear();
1839bool RegAllocFastImpl::runOnMachineFunction(MachineFunction &MF) {
1840 LLVM_DEBUG(
dbgs() <<
"********** FAST REGISTER ALLOCATION **********\n"
1841 <<
"********** Function: " << MF.
getName() <<
'\n');
1847 MRI->freezeReservedRegs();
1849 unsigned NumRegUnits =
TRI->getNumRegUnits();
1851 UsedInInstr.
assign(NumRegUnits, 0);
1855 unsigned NumVirtRegs =
MRI->getNumVirtRegs();
1856 StackSlotForVirtReg.
resize(NumVirtRegs);
1857 LiveVirtRegs.setUniverse(NumVirtRegs);
1858 MayLiveAcrossBlocks.
clear();
1859 MayLiveAcrossBlocks.
resize(NumVirtRegs);
1862 for (MachineBasicBlock &
MBB : MF)
1863 allocateBasicBlock(
MBB);
1865 if (ClearVirtRegs) {
1868 MRI->clearVirtRegs();
1871 StackSlotForVirtReg.
clear();
1872 LiveDbgValueMap.
clear();
1879 RegAllocFastImpl Impl(Opts.Filter, Opts.ClearVRegs);
1880 bool Changed = Impl.runOnMachineFunction(MF);
1890 bool PrintFilterName = Opts.FilterName !=
"all";
1891 bool PrintNoClearVRegs = !Opts.ClearVRegs;
1892 bool PrintSemicolon = PrintFilterName && PrintNoClearVRegs;
1894 OS <<
"regallocfast";
1895 if (PrintFilterName || PrintNoClearVRegs) {
1897 if (PrintFilterName)
1898 OS <<
"filter=" << Opts.FilterName;
1901 if (PrintNoClearVRegs)
1902 OS <<
"no-clear-vregs";
1910 bool ClearVirtRegs) {
1911 return new RegAllocFast(Ftor, ClearVirtRegs);
unsigned const MachineRegisterInfo * MRI
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
#define LLVM_UNLIKELY(EXPR)
This file defines the DenseMap class.
const HexagonInstrInfo * TII
This file implements an indexed map.
Register const TargetRegisterInfo * TRI
This file implements a map that provides insertion order iteration.
Promote Memory to Register
MachineInstr unsigned OpIdx
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
static bool isCoalescable(const MachineInstr &MI)
static cl::opt< bool > IgnoreMissingDefs("rafast-ignore-missing-defs", cl::Hidden)
static bool dominates(InstrPosIndexes &PosIndexes, const MachineInstr &A, const MachineInstr &B)
static bool isTiedToNotUndef(const MachineOperand &MO)
static RegisterRegAlloc fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator)
This file defines the SmallSet class.
This file defines the SmallVector class.
This file defines the SparseSet class derived from the version described in Briggs,...
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
LLVM_ABI void setPreservesCFG()
This function should be called by the pass, iff they do not:
const T & front() const
front - Get the first element.
size_t size() const
size - Get the array size.
bool empty() const
empty - Check if the array is empty.
bool test(unsigned Idx) const
void resize(unsigned N, bool t=false)
resize - Grow or shrink the bitvector.
void clear()
clear - Removes all bits from the bitvector.
Represents analyses that only rely on functions' control flow.
iterator find(const_arg_type_t< KeyT > Val)
FunctionPass class - This class is used to implement most global optimizations.
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
void storeRegToStackSlot(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, Register SrcReg, bool isKill, int FrameIndex, const TargetRegisterClass *RC, const TargetRegisterInfo *TRI, Register VReg, MachineInstr::MIFlag Flags=MachineInstr::NoFlags) const override
Store the specified register of the given register class to the specified stack frame index.
void loadRegFromStackSlot(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, Register DestReg, int FrameIndex, const TargetRegisterClass *RC, const TargetRegisterInfo *TRI, Register VReg, MachineInstr::MIFlag Flags=MachineInstr::NoFlags) const override
Load the specified register of the given register class from the specified stack frame index.
Register isStoreToStackSlot(const MachineInstr &MI, int &FrameIndex) const override
If the specified machine instruction is a direct store to a stack slot, return the virtual or physica...
void resize(typename StorageT::size_type s)
LLVM_ABI void diagnose(const DiagnosticInfo &DI)
Report a message to the currently installed diagnostic handler.
Wrapper class representing physical registers. Should be passed by value.
An RAII based helper class to modify MachineFunctionProperties when running pass.
bool isInlineAsmBrIndirectTarget() const
Returns true if this is the indirect dest of an INLINEASM_BR.
iterator_range< liveout_iterator > liveouts() const
MachineInstrBundleIterator< const MachineInstr > const_iterator
LLVM_ABI instr_iterator insert(instr_iterator I, MachineInstr *M)
Insert MI into the instruction list before I, possibly inside a bundle.
iterator_range< livein_iterator > liveins() const
LLVM_ABI iterator getFirstTerminator()
Returns an iterator to the first terminator instruction of this basic block.
LLVM_ABI void dump() const
Instructions::iterator instr_iterator
void addLiveIn(MCRegister PhysReg, LaneBitmask LaneMask=LaneBitmask::getAll())
Adds the specified register as a live in.
const MachineFunction * getParent() const
Return the MachineFunction containing this basic block.
LLVM_ABI instr_iterator erase(instr_iterator I)
Remove an instruction from the instruction list and delete it.
LLVM_ABI bool isSuccessor(const MachineBasicBlock *MBB) const
Return true if the specified MBB is a successor of this block.
MachineInstrBundleIterator< MachineInstr > iterator
LLVM_ABI bool isLiveIn(MCRegister Reg, LaneBitmask LaneMask=LaneBitmask::getAll()) const
Return true if the specified register is in the live in set.
LLVM_ABI int CreateSpillStackObject(uint64_t Size, Align Alignment)
Create a new statically sized stack object that represents a spill slot, returning a nonnegative iden...
bool isSpillSlotObjectIndex(int ObjectIdx) const
Returns true if the specified index corresponds to a spill slot.
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - Subclasses that override getAnalysisUsage must call this.
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
StringRef getName() const
getName - Return the name of the corresponding LLVM function.
MachineFrameInfo & getFrameInfo()
getFrameInfo - Return the frame info object for the current function.
MachineRegisterInfo & getRegInfo()
getRegInfo - Return information about the registers currently in use.
Function & getFunction()
Return the LLVM function that this machine code represents.
const MachineFunctionProperties & getProperties() const
Get the function properties.
const MachineBasicBlock & front() const
const MachineInstrBuilder & addReg(Register RegNo, unsigned flags=0, unsigned SubReg=0) const
Add a new virtual register operand.
Representation of each machine instruction.
bool hasDebugOperandForReg(Register Reg) const
Returns whether this debug value has at least one debug operand with the register Reg.
bool isDebugValueList() const
void setDebugValueUndef()
Sets all register debug operands in this debug value instruction to be undef.
const MachineBasicBlock * getParent() const
bool isNonListDebugValue() const
bool isDebugValue() const
MachineOperand & getDebugOperand(unsigned Index)
const MachineOperand & getOperand(unsigned i) const
MachineOperand class - Representation of each machine instruction operand.
void setSubReg(unsigned subReg)
unsigned getSubReg() const
bool readsReg() const
readsReg - Returns true if this operand reads the previous value of its register.
LLVM_ABI void setIsRenamable(bool Val=true)
bool isReg() const
isReg - Tests if this is a MO_Register operand.
bool isRegMask() const
isRegMask - Tests if this is a MO_RegisterMask operand.
MachineBasicBlock * getMBB() const
void setIsDead(bool Val=true)
LLVM_ABI void setReg(Register Reg)
Change the register this operand corresponds to.
void setIsKill(bool Val=true)
MachineInstr * getParent()
getParent - Return the instruction that this operand belongs to.
void setIsUndef(bool Val=true)
bool isEarlyClobber() const
Register getReg() const
getReg - Returns the register number.
bool isInternalRead() const
static bool clobbersPhysReg(const uint32_t *RegMask, MCRegister PhysReg)
clobbersPhysReg - Returns true if this RegMask clobbers PhysReg.
const uint32_t * getRegMask() const
getRegMask - Returns a bit mask of registers preserved by this RegMask operand.
bool isMBB() const
isMBB - Tests if this is a MO_MachineBasicBlock operand.
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
PreservedAnalyses run(MachineFunction &MF, MachineFunctionAnalysisManager &)
void printPipeline(raw_ostream &OS, function_ref< StringRef(StringRef)> MapClassName2PassName)
LLVM_ABI void runOnMachineFunction(const MachineFunction &MF, bool Rev=false)
runOnFunction - Prepare to answer questions about MF.
ArrayRef< MCPhysReg > getOrder(const TargetRegisterClass *RC) const
getOrder - Returns the preferred allocation order for RC.
Wrapper class representing virtual and physical registers.
unsigned virtRegIndex() const
Convert a virtual register number to a 0-based index.
constexpr bool isValid() const
constexpr bool isVirtual() const
Return true if the specified register number is in the virtual register namespace.
constexpr unsigned id() const
constexpr bool isPhysical() const
Return true if the specified register number is in the physical register namespace.
size_type count(const T &V) const
count - Return 1 if the element is in the set, 0 otherwise.
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
void assign(size_type NumElts, ValueParamT Elt)
void push_back(const T &Elt)
typename DenseT::iterator iterator
typename DenseT::const_iterator const_iterator
StringRef - Represent a constant reference to a string, i.e.
ArrayRef< MCPhysReg > getRegisters() const
unsigned getID() const
Return the register class ID number.
bool contains(Register Reg) const
Return true if the specified register is included in this register class.
bool hasSubClassEq(const TargetRegisterClass *RC) const
Returns true if RC is a sub-class of or equal to this class.
virtual const TargetInstrInfo * getInstrInfo() const
virtual const TargetRegisterInfo * getRegisterInfo() const =0
Return the target's register information.
An efficient, type-erasing, non-owning reference to a callable.
self_iterator getIterator()
This class implements an extremely fast bulk output stream that can only output to a stream.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr char Align[]
Key for Kernel::Arg::Metadata::mAlign.
@ C
The default llvm calling convention, compatible with C.
@ Kill
The last use of a register.
initializer< Ty > init(const Ty &Val)
This is an optimization pass for GlobalISel generic memory operations.
LLVM_ABI FunctionPass * createFastRegisterAllocator()
FastRegisterAllocation Pass - This pass register allocates as fast as possible.
std::function< bool(const TargetRegisterInfo &TRI, const MachineRegisterInfo &MRI, const Register Reg)> RegAllocFilterFunc
Filter function for register classes during regalloc.
MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)
Builder interface. Specify how to create the initial instruction itself.
LLVM_ABI void updateDbgValueForSpill(MachineInstr &Orig, int FrameIndex, Register Reg)
Update a DBG_VALUE whose value has been spilled to FrameIndex.
LLVM_ABI Printable printRegUnit(unsigned Unit, const TargetRegisterInfo *TRI)
Create Printable object to print register units on a raw_ostream.
AnalysisManager< MachineFunction > MachineFunctionAnalysisManager
LLVM_ABI PreservedAnalyses getMachineFunctionPassPreservedAnalyses()
Returns the minimum set of Analyses that all machine function passes must preserve.
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
auto reverse(ContainerTy &&C)
void sort(IteratorTy Start, IteratorTy End)
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
unsigned MCRegUnit
Register units are used to compute register aliasing.
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
MutableArrayRef(T &OneElt) -> MutableArrayRef< T >
uint16_t MCPhysReg
An unsigned integer type large enough to represent all physical registers, but not necessarily virtua...
DWARFExpression::Operation Op
ArrayRef(const T &OneElt) -> ArrayRef< T >
LLVM_ABI MachineInstr * buildDbgValueForSpill(MachineBasicBlock &BB, MachineBasicBlock::iterator I, const MachineInstr &Orig, int FrameIndex, Register SpillReg)
Clone a DBG_VALUE whose value has been spilled to FrameIndex.
iterator_range< pointer_iterator< WrappedIteratorT > > make_pointer_range(RangeT &&Range)
LLVM_ABI Printable printReg(Register Reg, const TargetRegisterInfo *TRI=nullptr, unsigned SubIdx=0, const MachineRegisterInfo *MRI=nullptr)
Prints virtual and physical registers with or without a TRI instance.