66#define DEBUG_TYPE "regalloc"
68STATISTIC(numJoins,
"Number of interval joins performed");
69STATISTIC(numCrossRCs,
"Number of cross class joins performed");
70STATISTIC(numCommutes,
"Number of instruction commuting performed");
72STATISTIC(NumReMats,
"Number of instructions re-materialized");
73STATISTIC(NumInflated,
"Number of register classes inflated");
74STATISTIC(NumLaneConflicts,
"Number of dead lane conflicts tested");
75STATISTIC(NumLaneResolves,
"Number of dead lane conflicts resolved");
76STATISTIC(NumShrinkToUses,
"Number of shrinkToUses called");
79 cl::desc(
"Coalesce copies (default=true)"),
94 cl::desc(
"Coalesce copies that span blocks (default=subtarget)"),
99 cl::desc(
"Verify machine instrs before and after register coalescing"),
104 cl::desc(
"During rematerialization for a copy, if the def instruction has "
105 "many other copy uses to be rematerialized, delay the multiple "
106 "separate live interval update work and do them all at once after "
107 "all those rematerialization are done. It will save a lot of "
113 cl::desc(
"If the valnos size of an interval is larger than the threshold, "
114 "it is regarded as a large interval. "),
119 cl::desc(
"For a large interval, if it is coalesced with other live "
120 "intervals many times more than the threshold, stop its "
121 "coalescing to control the compile time. "),
146 DenseMap<unsigned, PHIValPos> PHIValToPos;
150 DenseMap<Register, SmallVector<unsigned, 2>> RegToPHIIdx;
155 using DbgValueLoc = std::pair<SlotIndex, MachineInstr *>;
156 DenseMap<Register, std::vector<DbgValueLoc>> DbgVRegToValues;
160 LaneBitmask ShrinkMask;
164 bool ShrinkMainRange =
false;
168 bool JoinGlobalCopies =
false;
172 bool JoinSplitEdges =
false;
175 SmallVector<MachineInstr *, 8> WorkList;
176 SmallVector<MachineInstr *, 8> LocalWorkList;
180 SmallPtrSet<MachineInstr *, 8> ErasedInstrs;
183 SmallVector<MachineInstr *, 8> DeadDefs;
191 DenseSet<Register> ToBeUpdated;
195 DenseMap<Register, unsigned long> LargeLIVisitCounter;
198 void eliminateDeadDefs(LiveRangeEdit *Edit =
nullptr);
201 void LRE_WillEraseInstruction(MachineInstr *
MI)
override;
204 void coalesceLocals();
207 void joinAllIntervals();
211 void copyCoalesceInMBB(MachineBasicBlock *
MBB);
222 void lateLiveIntervalUpdate();
227 bool copyValueUndefInPredecessors(
LiveRange &S,
const MachineBasicBlock *
MBB,
228 LiveQueryResult SLRQ);
232 void setUndefOnPrunedSubRegUses(LiveInterval &LI,
Register Reg,
233 LaneBitmask PrunedLanes);
240 bool joinCopy(MachineInstr *CopyMI,
bool &Again,
241 SmallPtrSetImpl<MachineInstr *> &CurrentErasedInstrs);
246 bool joinIntervals(CoalescerPair &CP);
249 bool joinVirtRegs(CoalescerPair &CP);
254 bool isHighCostLiveInterval(LiveInterval &LI);
257 bool joinReservedPhysReg(CoalescerPair &CP);
264 void mergeSubRangeInto(LiveInterval &LI,
const LiveRange &ToMerge,
265 LaneBitmask LaneMask, CoalescerPair &CP,
271 LaneBitmask LaneMask,
const CoalescerPair &CP);
277 bool adjustCopiesBackFrom(
const CoalescerPair &CP, MachineInstr *CopyMI);
281 bool hasOtherReachingDefs(LiveInterval &IntA, LiveInterval &IntB,
282 VNInfo *AValNo, VNInfo *BValNo);
292 std::pair<bool, bool> removeCopyByCommutingDef(
const CoalescerPair &CP,
293 MachineInstr *CopyMI);
296 bool removePartialRedundancy(
const CoalescerPair &CP, MachineInstr &CopyMI);
300 bool reMaterializeDef(
const CoalescerPair &CP, MachineInstr *CopyMI,
304 bool canJoinPhys(
const CoalescerPair &CP);
319 void addUndefFlag(
const LiveInterval &
Int, SlotIndex UseIdx,
320 MachineOperand &MO,
unsigned SubRegIdx);
326 MachineInstr *eliminateUndefCopy(MachineInstr *CopyMI);
340 bool applyTerminalRule(
const MachineInstr &Copy)
const;
346 SmallVectorImpl<MachineInstr *> *
Dead =
nullptr) {
348 if (LIS->shrinkToUses(LI,
Dead)) {
352 LIS->splitSeparateComponents(*LI, SplitLIs);
360 void deleteInstr(MachineInstr *
MI) {
361 ErasedInstrs.insert(
MI);
362 LIS->RemoveMachineInstrFromMaps(*
MI);
363 MI->eraseFromParent();
372 void checkMergingChangesDbgValues(CoalescerPair &CP,
LiveRange &
LHS,
381 RegisterCoalescer() =
default;
382 RegisterCoalescer &operator=(RegisterCoalescer &&
Other) =
default;
384 RegisterCoalescer(LiveIntervals *LIS, SlotIndexes *SI,
385 const MachineLoopInfo *Loops)
386 : LIS(LIS), SI(SI), Loops(Loops) {}
388 bool run(MachineFunction &MF);
395 RegisterCoalescerLegacy() : MachineFunctionPass(ID) {}
397 void getAnalysisUsage(AnalysisUsage &AU)
const override;
399 MachineFunctionProperties getClearedProperties()
const override {
400 return MachineFunctionProperties().setIsSSA();
404 bool runOnMachineFunction(MachineFunction &)
override;
409char RegisterCoalescerLegacy::ID = 0;
414 "Register Coalescer",
false,
false)
426 Dst = MI->getOperand(0).getReg();
427 DstSub = MI->getOperand(0).getSubReg();
428 Src = MI->getOperand(1).getReg();
429 SrcSub = MI->getOperand(1).getSubReg();
430 }
else if (
MI->isSubregToReg()) {
431 Dst = MI->getOperand(0).getReg();
432 DstSub = tri.composeSubRegIndices(MI->getOperand(0).getSubReg(),
433 MI->getOperand(2).getImm());
434 Src = MI->getOperand(1).getReg();
435 SrcSub = MI->getOperand(1).getSubReg();
447 if (
MBB->pred_size() != 1 ||
MBB->succ_size() != 1)
450 for (
const auto &
MI : *
MBB) {
451 if (!
MI.isCopyLike() && !
MI.isUnconditionalBranch())
461 Flipped = CrossClass =
false;
464 unsigned SrcSub = 0, DstSub = 0;
467 Partial = SrcSub || DstSub;
470 if (Src.isPhysical()) {
471 if (Dst.isPhysical())
481 if (Dst.isPhysical()) {
484 Dst = TRI.getSubReg(Dst, DstSub);
492 Dst = TRI.getMatchingSuperReg(Dst, SrcSub, SrcRC);
503 if (SrcSub && DstSub) {
505 if (Src == Dst && SrcSub != DstSub)
508 NewRC = TRI.getCommonSuperRegClass(SrcRC, SrcSub, DstRC, DstSub, SrcIdx,
515 NewRC = TRI.getMatchingSuperRegClass(DstRC, SrcRC, DstSub);
519 NewRC = TRI.getMatchingSuperRegClass(SrcRC, DstRC, SrcSub);
522 NewRC = TRI.getCommonSubClass(DstRC, SrcRC);
531 if (DstIdx && !SrcIdx) {
537 CrossClass = NewRC != DstRC || NewRC != SrcRC;
540 assert(Src.isVirtual() &&
"Src must be virtual");
541 assert(!(Dst.isPhysical() && DstSub) &&
"Cannot have a physical SubIdx");
548 if (DstReg.isPhysical())
560 unsigned SrcSub = 0, DstSub = 0;
568 }
else if (Src != SrcReg) {
573 if (DstReg.isPhysical()) {
574 if (!Dst.isPhysical())
576 assert(!DstIdx && !SrcIdx &&
"Inconsistent CoalescerPair state.");
579 Dst = TRI.getSubReg(Dst, DstSub);
582 return DstReg == Dst;
584 return Register(TRI.getSubReg(DstReg, SrcSub)) == Dst;
591 return TRI.composeSubRegIndices(SrcIdx, SrcSub) ==
592 TRI.composeSubRegIndices(DstIdx, DstSub);
595void RegisterCoalescerLegacy::getAnalysisUsage(
AnalysisUsage &AU)
const {
607void RegisterCoalescer::eliminateDeadDefs(
LiveRangeEdit *Edit) {
617void RegisterCoalescer::LRE_WillEraseInstruction(
MachineInstr *
MI) {
622bool RegisterCoalescer::adjustCopiesBackFrom(
const CoalescerPair &CP,
625 assert(!CP.
isPhys() &&
"This doesn't work for physreg copies.");
650 if (BS == IntB.
end())
652 VNInfo *BValNo = BS->valno;
657 if (BValNo->
def != CopyIdx)
664 if (AS == IntA.
end())
666 VNInfo *AValNo = AS->valno;
678 if (ValS == IntB.
end())
696 SlotIndex FillerStart = ValS->end, FillerEnd = BS->start;
700 BValNo->
def = FillerStart;
708 if (BValNo != ValS->valno)
717 S.removeSegment(*SS,
true);
721 if (!S.getVNInfoAt(FillerStart)) {
724 S.extendInBlock(BBStart, FillerStart);
726 VNInfo *SubBValNo = S.getVNInfoAt(CopyIdx);
729 if (SubBValNo != SubValSNo)
730 S.MergeValueNumberInto(SubBValNo, SubValSNo);
747 bool RecomputeLiveRange = AS->end == CopyIdx;
748 if (!RecomputeLiveRange) {
751 if (SS != S.end() &&
SS->end == CopyIdx) {
752 RecomputeLiveRange =
true;
757 if (RecomputeLiveRange)
764bool RegisterCoalescer::hasOtherReachingDefs(
LiveInterval &IntA,
773 if (ASeg.
valno != AValNo)
776 if (BI != IntB.
begin())
778 for (; BI != IntB.
end() && ASeg.
end >= BI->start; ++BI) {
779 if (BI->valno == BValNo)
781 if (BI->start <= ASeg.
start && BI->end > ASeg.
start)
783 if (BI->start > ASeg.
start && BI->start < ASeg.
end)
797 bool MergedWithDead =
false;
799 if (S.
valno != SrcValNo)
810 MergedWithDead =
true;
813 return std::make_pair(
Changed, MergedWithDead);
817RegisterCoalescer::removeCopyByCommutingDef(
const CoalescerPair &CP,
850 assert(BValNo !=
nullptr && BValNo->
def == CopyIdx);
856 return {
false,
false};
859 return {
false,
false};
861 return {
false,
false};
868 return {
false,
false};
874 return DefMO.getReg() == IntA.reg() && !DefMO.getSubReg();
876 return {
false,
false};
888 if (!
TII->findCommutedOpIndices(*
DefMI, UseOpIdx, NewDstIdx))
889 return {
false,
false};
894 return {
false,
false};
898 if (hasOtherReachingDefs(IntA, IntB, AValNo, BValNo))
899 return {
false,
false};
908 if (US == IntA.
end() || US->valno != AValNo)
912 return {
false,
false};
922 TII->commuteInstruction(*
DefMI,
false, UseOpIdx, NewDstIdx);
924 return {
false,
false};
927 return {
false,
false};
928 if (NewMI !=
DefMI) {
953 UseMO.setReg(NewReg);
958 assert(US != IntA.
end() &&
"Use must be live");
959 if (US->valno != AValNo)
962 UseMO.setIsKill(
false);
964 UseMO.substPhysReg(NewReg, *
TRI);
966 UseMO.setReg(NewReg);
985 VNInfo *SubDVNI = S.getVNInfoAt(DefIdx);
988 VNInfo *SubBValNo = S.getVNInfoAt(CopyIdx);
990 S.MergeValueNumberInto(SubDVNI, SubBValNo);
998 bool ShrinkB =
false;
1012 VNInfo *ASubValNo = SA.getVNInfoAt(AIdx);
1021 MaskA |= SA.LaneMask;
1027 VNInfo *BSubValNo = SR.empty() ? SR.getNextValue(CopyIdx, Allocator)
1028 : SR.getVNInfoAt(CopyIdx);
1029 assert(BSubValNo != nullptr);
1030 auto P = addSegmentsWithValNo(SR, BSubValNo, SA, ASubValNo);
1031 ShrinkB |= P.second;
1033 BSubValNo->def = ASubValNo->def;
1041 if ((SB.LaneMask & MaskA).any())
1045 SB.removeSegment(*S,
true);
1049 BValNo->
def = AValNo->
def;
1051 ShrinkB |=
P.second;
1058 return {
true, ShrinkB};
1108bool RegisterCoalescer::removePartialRedundancy(
const CoalescerPair &CP,
1141 bool FoundReverseCopy =
false;
1160 bool ValB_Changed =
false;
1161 for (
auto *VNI : IntB.
valnos) {
1162 if (VNI->isUnused())
1165 ValB_Changed =
true;
1173 FoundReverseCopy =
true;
1177 if (!FoundReverseCopy)
1187 if (CopyLeftBB && CopyLeftBB->
succ_size() > 1)
1198 if (InsPos != CopyLeftBB->
end()) {
1204 LLVM_DEBUG(
dbgs() <<
"\tremovePartialRedundancy: Move the copy to "
1209 TII->get(TargetOpcode::COPY), IntB.
reg())
1220 ErasedInstrs.
erase(NewCopyMI);
1222 LLVM_DEBUG(
dbgs() <<
"\tremovePartialRedundancy: Remove the copy from "
1233 deleteInstr(&CopyMI);
1249 if (!IntB.
liveAt(UseIdx))
1250 MO.setIsUndef(
true);
1260 VNInfo *BValNo = SR.Query(CopyIdx).valueOutOrDead();
1261 assert(BValNo &&
"All sublanes should be live");
1270 for (
unsigned I = 0;
I != EndPoints.
size();) {
1272 EndPoints[
I] = EndPoints.
back();
1291bool RegisterCoalescer::reMaterializeDef(
const CoalescerPair &CP,
1319 if (!
TII->isReMaterializable(*
DefMI))
1322 bool SawStore =
false;
1326 if (
MCID.getNumDefs() != 1)
1334 if (SrcIdx && DstIdx)
1350 for (MCRegUnit Unit :
TRI->regunits(DstReg)) {
1369 unsigned NewDstIdx =
TRI->composeSubRegIndices(CP.
getSrcIdx(), DefSubIdx);
1371 NewDstReg =
TRI->getSubReg(DstReg, NewDstIdx);
1381 "Only expect to deal with virtual or physical registers");
1395 LiveRangeEdit Edit(&SrcInt, NewRegs, *MF, *LIS,
nullptr,
this);
1410 "Shouldn't have SrcIdx+DstIdx at this point");
1413 TRI->getCommonSubClass(DefRC, DstRC);
1414 if (CommonRC !=
nullptr) {
1422 if (MO.isReg() && MO.getReg() == DstReg && MO.getSubReg() == DstIdx) {
1444 "No explicit operands after implicit operands.");
1447 "unexpected implicit virtual register def");
1453 ErasedInstrs.
insert(CopyMI);
1477 ((
TRI->getSubReg(MO.
getReg(), DefSubIdx) ==
1491 "subrange update for implicit-def of super register may not be "
1492 "properly handled");
1500 if (DefRC !=
nullptr) {
1502 NewRC =
TRI->getMatchingSuperRegClass(NewRC, DefRC, NewIdx);
1504 NewRC =
TRI->getCommonSubClass(NewRC, DefRC);
1505 assert(NewRC &&
"subreg chosen for remat incompatible with instruction");
1511 SR.LaneMask =
TRI->composeSubRegIndexLaneMask(DstIdx, SR.LaneMask);
1516 updateRegDefsUses(DstReg, DstReg, DstIdx);
1565 if (!SR.liveAt(DefIndex))
1566 SR.createDeadDef(DefIndex,
Alloc);
1567 MaxMask &= ~SR.LaneMask;
1569 if (MaxMask.
any()) {
1587 bool UpdatedSubRanges =
false;
1602 if (!SR.
liveAt(DefIndex))
1608 if ((SR.
LaneMask & DstMask).none()) {
1610 <<
"Removing undefined SubRange "
1623 UpdatedSubRanges =
true;
1626 if (UpdatedSubRanges)
1633 "Only expect virtual or physical registers in remat");
1641 bool HasDefMatchingCopy =
false;
1648 if (DstReg != CopyDstReg)
1651 HasDefMatchingCopy =
true;
1655 if (!HasDefMatchingCopy)
1657 CopyDstReg,
true ,
true ,
false ));
1705 UseMO.substPhysReg(DstReg, *
TRI);
1707 UseMO.setReg(DstReg);
1716 if (ToBeUpdated.
count(SrcReg))
1719 unsigned NumCopyUses = 0;
1721 if (UseMO.getParent()->isCopyLike())
1727 if (!DeadDefs.
empty())
1728 eliminateDeadDefs(&Edit);
1730 ToBeUpdated.
insert(SrcReg);
1748 unsigned SrcSubIdx = 0, DstSubIdx = 0;
1749 if (!
isMoveInstr(*
TRI, CopyMI, SrcReg, DstReg, SrcSubIdx, DstSubIdx))
1758 if ((SR.
LaneMask & SrcMask).none())
1763 }
else if (SrcLI.
liveAt(Idx))
1771 assert(Seg !=
nullptr &&
"No segment for defining instruction");
1776 if (((V &&
V->isPHIDef()) || (!V && !DstLI.
liveAt(Idx)))) {
1784 CopyMI->
getOpcode() == TargetOpcode::SUBREG_TO_REG);
1789 CopyMI->
setDesc(
TII->get(TargetOpcode::IMPLICIT_DEF));
1806 if ((SR.
LaneMask & DstMask).none())
1828 if ((SR.
LaneMask & UseMask).none())
1836 isLive = DstLI.
liveAt(UseIdx);
1849 if (MO.
getReg() == DstReg)
1861 bool IsUndef =
true;
1863 if ((S.LaneMask & Mask).none())
1865 if (S.liveAt(UseIdx)) {
1878 ShrinkMainRange =
true;
1887 if (DstInt && DstReg != SrcReg) {
1893 if (SubReg == 0 && MO.
isDef())
1899 addUndefFlag(*DstInt, UseIdx, MO, SubReg);
1900 }
else if (MO.
isUse() && SubReg == 0 && !DstInt->
liveAt(UseIdx)) {
1922 if (SrcReg == DstReg && !Visited.
insert(
UseMI).second)
1935 for (
unsigned Op :
Ops) {
1941 if (SubIdx && MO.
isDef())
1947 unsigned SubUseIdx =
TRI->composeSubRegIndices(SubIdx, MO.
getSubReg());
1965 addUndefFlag(*DstInt, UseIdx, MO, SubUseIdx);
1976 dbgs() <<
"\t\tupdated: ";
1984bool RegisterCoalescer::canJoinPhys(
const CoalescerPair &CP) {
1989 LLVM_DEBUG(
dbgs() <<
"\tCan only merge into reserved registers.\n");
1998 dbgs() <<
"\tCannot join complex intervals into reserved register.\n");
2002bool RegisterCoalescer::copyValueUndefInPredecessors(
2016void RegisterCoalescer::setUndefOnPrunedSubRegUses(
LiveInterval &LI,
2023 if (SubRegIdx == 0 || MO.
isUndef())
2029 if (!S.
liveAt(Pos) && (PrunedLanes & SubRegMask).any()) {
2045bool RegisterCoalescer::joinCopy(
2060 <<
"are available for allocation\n");
2072 if (!
TRI->shouldCoalesce(CopyMI, SrcRC, SrcIdx, DstRC, DstIdx,
2085 eliminateDeadDefs();
2092 if (
MachineInstr *UndefMI = eliminateUndefCopy(CopyMI)) {
2093 if (UndefMI->isImplicitDef())
2095 deleteInstr(CopyMI);
2105 LLVM_DEBUG(
dbgs() <<
"\tCopy already coalesced: " << LI <<
'\n');
2110 assert(ReadVNI &&
"No value before copy and no <undef> flag.");
2111 assert(ReadVNI != DefVNI &&
"Cannot read and define the same value.");
2126 if (copyValueUndefInPredecessors(S,
MBB, SLRQ)) {
2127 LLVM_DEBUG(
dbgs() <<
"Incoming sublane value is undef at copy\n");
2128 PrunedLanes |= S.LaneMask;
2135 if (PrunedLanes.
any()) {
2136 LLVM_DEBUG(
dbgs() <<
"Pruning undef incoming lanes: " << PrunedLanes
2138 setUndefOnPrunedSubRegUses(LI, CP.
getSrcReg(), PrunedLanes);
2143 deleteInstr(CopyMI);
2152 if (!canJoinPhys(CP)) {
2155 bool IsDefCopy =
false;
2156 if (reMaterializeDef(CP, CopyMI, IsDefCopy))
2169 dbgs() <<
"\tConsidering merging to "
2170 <<
TRI->getRegClassName(CP.
getNewRC()) <<
" with ";
2183 ShrinkMainRange =
false;
2189 if (!joinIntervals(CP)) {
2193 bool IsDefCopy =
false;
2194 if (reMaterializeDef(CP, CopyMI, IsDefCopy))
2200 bool Changed = adjustCopiesBackFrom(CP, CopyMI);
2201 bool Shrink =
false;
2203 std::tie(
Changed, Shrink) = removeCopyByCommutingDef(CP, CopyMI);
2205 deleteInstr(CopyMI);
2220 if (removePartialRedundancy(CP, *CopyMI))
2244 if (ErasedInstrs.
erase(CopyMI))
2246 CurrentErasedInstrs.
insert(CopyMI);
2255 if (ShrinkMask.
any()) {
2258 if ((S.LaneMask & ShrinkMask).none())
2263 ShrinkMainRange =
true;
2272 ShrinkMainRange =
true;
2274 if (ShrinkMainRange) {
2289 dbgs() <<
"\tResult = ";
2301bool RegisterCoalescer::joinReservedPhysReg(
CoalescerPair &CP) {
2309 assert(
RHS.containsOneValue() &&
"Invalid join with reserved register");
2319 for (MCRegUnit Unit :
TRI->regunits(DstReg)) {
2335 !RegMaskUsable.
test(DstReg.
id())) {
2357 deleteInstr(CopyMI);
2389 if (
MI->readsRegister(DstReg,
TRI)) {
2399 <<
printReg(DstReg,
TRI) <<
" at " << CopyRegIdx <<
"\n");
2402 deleteInstr(CopyMI);
2405 for (MCRegUnit Unit :
TRI->regunits(DstReg)) {
2497 const unsigned SubIdx;
2501 const LaneBitmask LaneMask;
2505 const bool SubRangeJoin;
2508 const bool TrackSubRegLiveness;
2511 SmallVectorImpl<VNInfo *> &NewVNInfo;
2513 const CoalescerPair &CP;
2515 SlotIndexes *Indexes;
2516 const TargetRegisterInfo *
TRI;
2520 SmallVector<int, 8> Assignments;
2524 enum ConflictResolution {
2556 ConflictResolution Resolution = CR_Keep;
2559 LaneBitmask WriteLanes;
2563 LaneBitmask ValidLanes;
2566 VNInfo *RedefVNI =
nullptr;
2569 VNInfo *OtherVNI =
nullptr;
2582 bool ErasableImplicitDef =
false;
2586 bool Pruned =
false;
2589 bool PrunedComputed =
false;
2596 bool Identical =
false;
2600 bool isAnalyzed()
const {
return WriteLanes.
any(); }
2604 void mustKeepImplicitDef(
const TargetRegisterInfo &
TRI,
2605 const MachineInstr &ImpDef) {
2607 ErasableImplicitDef =
false;
2618 LaneBitmask computeWriteLanes(
const MachineInstr *
DefMI,
bool &Redef)
const;
2621 std::pair<const VNInfo *, Register> followCopyChain(
const VNInfo *VNI)
const;
2623 bool valuesIdentical(VNInfo *Value0, VNInfo *Value1,
2624 const JoinVals &
Other)
const;
2633 ConflictResolution analyzeValue(
unsigned ValNo, JoinVals &
Other);
2638 void computeAssignment(
unsigned ValNo, JoinVals &
Other);
2656 taintExtent(
unsigned ValNo, LaneBitmask TaintedLanes, JoinVals &
Other,
2657 SmallVectorImpl<std::pair<SlotIndex, LaneBitmask>> &TaintExtent);
2661 bool usesLanes(
const MachineInstr &
MI,
Register,
unsigned, LaneBitmask)
const;
2669 bool isPrunedValue(
unsigned ValNo, JoinVals &
Other);
2673 SmallVectorImpl<VNInfo *> &newVNInfo,
const CoalescerPair &cp,
2674 LiveIntervals *lis,
const TargetRegisterInfo *
TRI,
bool SubRangeJoin,
2675 bool TrackSubRegLiveness)
2676 : LR(LR),
Reg(
Reg), SubIdx(SubIdx), LaneMask(LaneMask),
2677 SubRangeJoin(SubRangeJoin), TrackSubRegLiveness(TrackSubRegLiveness),
2678 NewVNInfo(newVNInfo), CP(cp), LIS(lis), Indexes(LIS->getSlotIndexes()),
2679 TRI(
TRI), Assignments(LR.getNumValNums(), -1),
2680 Vals(LR.getNumValNums()) {}
2684 bool mapValues(JoinVals &
Other);
2688 bool resolveConflicts(JoinVals &
Other);
2693 void pruneValues(JoinVals &
Other, SmallVectorImpl<SlotIndex> &EndPoints,
2699 void pruneSubRegValues(LiveInterval &LI, LaneBitmask &ShrinkMask);
2708 void pruneMainSegments(LiveInterval &LI,
bool &ShrinkMainRange);
2714 void eraseInstrs(SmallPtrSetImpl<MachineInstr *> &ErasedInstrs,
2715 SmallVectorImpl<Register> &ShrinkRegs,
2716 LiveInterval *LI =
nullptr);
2719 void removeImplicitDefs();
2722 const int *getAssignments()
const {
return Assignments.
data(); }
2725 ConflictResolution getResolution(
unsigned Num)
const {
2726 return Vals[Num].Resolution;
2733 bool &Redef)
const {
2738 L |=
TRI->getSubRegIndexLaneMask(
2746std::pair<const VNInfo *, Register>
2747JoinVals::followCopyChain(
const VNInfo *VNI)
const {
2753 assert(
MI &&
"No defining instruction");
2754 if (!
MI->isFullCopy())
2755 return std::make_pair(VNI, TrackReg);
2756 Register SrcReg =
MI->getOperand(1).getReg();
2758 return std::make_pair(VNI, TrackReg);
2772 LaneBitmask SMask =
TRI->composeSubRegIndexLaneMask(SubIdx, S.LaneMask);
2773 if ((SMask & LaneMask).
none())
2781 return std::make_pair(VNI, TrackReg);
2784 if (ValueIn ==
nullptr) {
2791 return std::make_pair(
nullptr, SrcReg);
2796 return std::make_pair(VNI, TrackReg);
2799bool JoinVals::valuesIdentical(
VNInfo *Value0,
VNInfo *Value1,
2800 const JoinVals &
Other)
const {
2803 std::tie(Orig0, Reg0) = followCopyChain(Value0);
2804 if (Orig0 == Value1 && Reg0 ==
Other.Reg)
2809 std::tie(Orig1, Reg1) =
Other.followCopyChain(Value1);
2813 if (Orig0 ==
nullptr || Orig1 ==
nullptr)
2814 return Orig0 == Orig1 && Reg0 == Reg1;
2820 return Orig0->
def == Orig1->
def && Reg0 == Reg1;
2823JoinVals::ConflictResolution JoinVals::analyzeValue(
unsigned ValNo,
2825 Val &
V = Vals[ValNo];
2826 assert(!
V.isAnalyzed() &&
"Value has already been analyzed!");
2838 :
TRI->getSubRegIndexLaneMask(SubIdx);
2839 V.ValidLanes =
V.WriteLanes = Lanes;
2848 V.ErasableImplicitDef =
true;
2852 V.ValidLanes =
V.WriteLanes = computeWriteLanes(
DefMI, Redef);
2871 assert((TrackSubRegLiveness ||
V.RedefVNI) &&
2872 "Instruction is reading nonexistent value");
2873 if (
V.RedefVNI !=
nullptr) {
2874 computeAssignment(
V.RedefVNI->id,
Other);
2875 V.ValidLanes |= Vals[
V.RedefVNI->id].ValidLanes;
2887 V.ErasableImplicitDef =
true;
2904 if (OtherVNI->
def < VNI->
def)
2905 Other.computeAssignment(OtherVNI->
id, *
this);
2910 return CR_Impossible;
2912 V.OtherVNI = OtherVNI;
2913 Val &OtherV =
Other.Vals[OtherVNI->
id];
2917 if (!OtherV.isAnalyzed() ||
Other.Assignments[OtherVNI->
id] == -1)
2924 if ((
V.ValidLanes & OtherV.ValidLanes).any())
2926 return CR_Impossible;
2940 Other.computeAssignment(
V.OtherVNI->id, *
this);
2941 Val &OtherV =
Other.Vals[
V.OtherVNI->id];
2943 if (OtherV.ErasableImplicitDef) {
2963 <<
", keeping it.\n");
2964 OtherV.mustKeepImplicitDef(*
TRI, *OtherImpDef);
2971 dbgs() <<
"IMPLICIT_DEF defined at " <<
V.OtherVNI->def
2972 <<
" may be live into EH pad successors, keeping it.\n");
2973 OtherV.mustKeepImplicitDef(*
TRI, *OtherImpDef);
2976 OtherV.ValidLanes &= ~OtherV.WriteLanes;
2994 V.ValidLanes &= ~V.WriteLanes | OtherV.ValidLanes;
3009 valuesIdentical(VNI,
V.OtherVNI,
Other)) {
3032 if ((
V.WriteLanes & OtherV.ValidLanes).none())
3045 "Only early clobber defs can overlap a kill");
3046 return CR_Impossible;
3053 if ((
TRI->getSubRegIndexLaneMask(
Other.SubIdx) & ~
V.WriteLanes).none())
3054 return CR_Impossible;
3056 if (TrackSubRegLiveness) {
3061 if (!OtherLI.hasSubRanges()) {
3063 return (OtherMask &
V.WriteLanes).none() ? CR_Replace : CR_Impossible;
3071 TRI->composeSubRegIndexLaneMask(
Other.SubIdx, OtherSR.LaneMask);
3072 if ((OtherMask &
V.WriteLanes).none())
3075 auto OtherSRQ = OtherSR.Query(VNI->
def);
3076 if (OtherSRQ.valueIn() && OtherSRQ.endPoint() > VNI->
def) {
3078 return CR_Impossible;
3091 return CR_Impossible;
3100 return CR_Unresolved;
3103void JoinVals::computeAssignment(
unsigned ValNo, JoinVals &
Other) {
3104 Val &
V = Vals[ValNo];
3105 if (
V.isAnalyzed()) {
3108 assert(Assignments[ValNo] != -1 &&
"Bad recursion?");
3111 switch ((
V.Resolution = analyzeValue(ValNo,
Other))) {
3115 assert(
V.OtherVNI &&
"OtherVNI not assigned, can't merge.");
3116 assert(
Other.Vals[
V.OtherVNI->id].isAnalyzed() &&
"Missing recursion");
3117 Assignments[ValNo] =
Other.Assignments[
V.OtherVNI->id];
3121 <<
V.OtherVNI->def <<
" --> @"
3122 << NewVNInfo[Assignments[ValNo]]->def <<
'\n');
3125 case CR_Unresolved: {
3127 assert(
V.OtherVNI &&
"OtherVNI not assigned, can't prune");
3128 Val &OtherV =
Other.Vals[
V.OtherVNI->id];
3129 OtherV.Pruned =
true;
3134 Assignments[ValNo] = NewVNInfo.
size();
3140bool JoinVals::mapValues(JoinVals &
Other) {
3142 computeAssignment(i,
Other);
3143 if (Vals[i].Resolution == CR_Impossible) {
3152bool JoinVals::taintExtent(
3161 assert(OtherI !=
Other.LR.end() &&
"No conflict?");
3166 if (End >= MBBEnd) {
3168 << OtherI->valno->id <<
'@' << OtherI->start <<
'\n');
3172 << OtherI->valno->id <<
'@' << OtherI->start <<
" to "
3177 TaintExtent.push_back(std::make_pair(End, TaintedLanes));
3180 if (++OtherI ==
Other.LR.end() || OtherI->start >= MBBEnd)
3184 const Val &OV =
Other.Vals[OtherI->valno->id];
3185 TaintedLanes &= ~OV.WriteLanes;
3188 }
while (TaintedLanes.
any());
3194 if (
MI.isDebugOrPseudoInstr())
3201 unsigned S =
TRI->composeSubRegIndices(SubIdx, MO.
getSubReg());
3202 if ((Lanes &
TRI->getSubRegIndexLaneMask(S)).any())
3208bool JoinVals::resolveConflicts(JoinVals &
Other) {
3211 assert(
V.Resolution != CR_Impossible &&
"Unresolvable conflict");
3212 if (
V.Resolution != CR_Unresolved)
3221 assert(
V.OtherVNI &&
"Inconsistent conflict resolution.");
3223 const Val &OtherV =
Other.Vals[
V.OtherVNI->id];
3228 LaneBitmask TaintedLanes =
V.WriteLanes & OtherV.ValidLanes;
3230 if (!taintExtent(i, TaintedLanes,
Other, TaintExtent))
3234 assert(!TaintExtent.
empty() &&
"There should be at least one conflict.");
3247 "Interference ends on VNI->def. Should have been handled earlier");
3250 assert(LastMI &&
"Range must end at a proper instruction");
3251 unsigned TaintNum = 0;
3254 if (usesLanes(*
MI,
Other.Reg,
Other.SubIdx, TaintedLanes)) {
3259 if (&*
MI == LastMI) {
3260 if (++TaintNum == TaintExtent.
size())
3263 assert(LastMI &&
"Range must end at a proper instruction");
3264 TaintedLanes = TaintExtent[TaintNum].second;
3270 V.Resolution = CR_Replace;
3276bool JoinVals::isPrunedValue(
unsigned ValNo, JoinVals &
Other) {
3277 Val &
V = Vals[ValNo];
3278 if (
V.Pruned ||
V.PrunedComputed)
3281 if (
V.Resolution != CR_Erase &&
V.Resolution != CR_Merge)
3286 V.PrunedComputed =
true;
3287 V.Pruned =
Other.isPrunedValue(
V.OtherVNI->id, *
this);
3291void JoinVals::pruneValues(JoinVals &
Other,
3293 bool changeInstrs) {
3296 switch (Vals[i].Resolution) {
3306 Val &OtherV =
Other.Vals[Vals[i].OtherVNI->id];
3308 OtherV.ErasableImplicitDef && OtherV.Resolution == CR_Keep;
3309 if (!
Def.isBlock()) {
3329 <<
": " <<
Other.LR <<
'\n');
3334 if (isPrunedValue(i,
Other)) {
3339 Val &OtherV =
Other.Vals[Vals[i].OtherVNI->id];
3341 OtherV.ErasableImplicitDef && OtherV.Resolution == CR_Keep;
3344 LIS->
pruneValue(LR, Def, EraseImpDef ?
nullptr : &EndPoints);
3346 << Def <<
": " << LR <<
'\n');
3404 bool DidPrune =
false;
3409 if (
V.Resolution != CR_Erase &&
3410 (
V.Resolution != CR_Keep || !
V.ErasableImplicitDef || !
V.Pruned))
3417 OtherDef =
V.OtherVNI->def;
3420 LLVM_DEBUG(
dbgs() <<
"\t\tExpecting instruction removal at " << Def
3428 if (ValueOut !=
nullptr &&
3430 (
V.Identical &&
V.Resolution == CR_Erase && ValueOut->
def == Def))) {
3432 <<
" at " << Def <<
"\n");
3437 if (ValueOut->
def == Def)
3440 if (
V.Identical && S.Query(OtherDef).valueOutOrDead()) {
3450 ShrinkMask |= S.LaneMask;
3464 ShrinkMask |= S.LaneMask;
3476 if (VNI->
def == Def)
3482void JoinVals::pruneMainSegments(
LiveInterval &LI,
bool &ShrinkMainRange) {
3486 if (Vals[i].Resolution != CR_Keep)
3491 Vals[i].Pruned =
true;
3492 ShrinkMainRange =
true;
3496void JoinVals::removeImplicitDefs() {
3499 if (
V.Resolution != CR_Keep || !
V.ErasableImplicitDef || !
V.Pruned)
3515 switch (Vals[i].Resolution) {
3520 if (!Vals[i].ErasableImplicitDef || !Vals[i].Pruned)
3532 if (LI !=
nullptr) {
3557 ED = ED.
isValid() ? std::min(ED,
I->start) :
I->start;
3559 LE =
LE.isValid() ? std::max(LE,
I->end) :
I->
end;
3562 NewEnd = std::min(NewEnd, LE);
3564 NewEnd = std::min(NewEnd, ED);
3570 if (S != LR.
begin())
3571 std::prev(S)->end = NewEnd;
3575 dbgs() <<
"\t\tremoved " << i <<
'@' <<
Def <<
": " << LR <<
'\n';
3577 dbgs() <<
"\t\t LHS = " << *LI <<
'\n';
3584 assert(
MI &&
"No instruction to erase");
3593 MI->eraseFromParent();
3607 CP, LIS,
TRI,
true,
true);
3609 CP, LIS,
TRI,
true,
true);
3616 if (!LHSVals.mapValues(RHSVals) || !RHSVals.mapValues(LHSVals)) {
3621 if (!LHSVals.resolveConflicts(RHSVals) ||
3622 !RHSVals.resolveConflicts(LHSVals)) {
3633 LHSVals.pruneValues(RHSVals, EndPoints,
false);
3634 RHSVals.pruneValues(LHSVals, EndPoints,
false);
3636 LHSVals.removeImplicitDefs();
3637 RHSVals.removeImplicitDefs();
3642 LRange.
join(RRange, LHSVals.getAssignments(), RHSVals.getAssignments(),
3647 if (EndPoints.
empty())
3653 dbgs() <<
"\t\trestoring liveness to " << EndPoints.
size() <<
" points: ";
3654 for (
unsigned i = 0, n = EndPoints.
size(); i != n; ++i) {
3655 dbgs() << EndPoints[i];
3659 dbgs() <<
": " << LRange <<
'\n';
3664void RegisterCoalescer::mergeSubRangeInto(
LiveInterval &LI,
3668 unsigned ComposeSubRegIdx) {
3678 joinSubRegRanges(SR, RangeCopy, SR.
LaneMask, CP);
3684bool RegisterCoalescer::isHighCostLiveInterval(
LiveInterval &LI) {
3687 auto &Counter = LargeLIVisitCounter[LI.
reg()];
3701 NewVNInfo, CP, LIS,
TRI,
false, TrackSubRegLiveness);
3703 NewVNInfo, CP, LIS,
TRI,
false, TrackSubRegLiveness);
3707 if (isHighCostLiveInterval(
LHS) || isHighCostLiveInterval(
RHS))
3712 if (!LHSVals.mapValues(RHSVals) || !RHSVals.mapValues(LHSVals))
3716 if (!LHSVals.resolveConflicts(RHSVals) || !RHSVals.resolveConflicts(LHSVals))
3720 if (
RHS.hasSubRanges() ||
LHS.hasSubRanges()) {
3726 if (!
LHS.hasSubRanges()) {
3728 :
TRI->getSubRegIndexLaneMask(DstIdx);
3732 }
else if (DstIdx != 0) {
3744 if (!
RHS.hasSubRanges()) {
3746 :
TRI->getSubRegIndexLaneMask(SrcIdx);
3747 mergeSubRangeInto(
LHS,
RHS, Mask, CP, DstIdx);
3752 mergeSubRangeInto(
LHS, R, Mask, CP, DstIdx);
3759 LHSVals.pruneMainSegments(
LHS, ShrinkMainRange);
3761 LHSVals.pruneSubRegValues(
LHS, ShrinkMask);
3762 RHSVals.pruneSubRegValues(
LHS, ShrinkMask);
3768 LHSVals.pruneMainSegments(
LHS, ShrinkMainRange);
3769 LHSVals.pruneSubRegValues(
LHS, ShrinkMask);
3777 LHSVals.pruneValues(RHSVals, EndPoints,
true);
3778 RHSVals.pruneValues(LHSVals, EndPoints,
true);
3783 LHSVals.eraseInstrs(ErasedInstrs, ShrinkRegs, &
LHS);
3784 RHSVals.eraseInstrs(ErasedInstrs, ShrinkRegs);
3785 while (!ShrinkRegs.
empty())
3789 checkMergingChangesDbgValues(CP,
LHS, LHSVals,
RHS, RHSVals);
3794 if (RegIt != RegToPHIIdx.
end()) {
3796 for (
unsigned InstID : RegIt->second) {
3797 auto PHIIt = PHIValToPos.
find(InstID);
3802 auto LII =
RHS.find(
SI);
3803 if (LII ==
RHS.end() || LII->start >
SI)
3821 if (PHIIt->second.SubReg && PHIIt->second.SubReg != CP.
getSrcIdx())
3836 auto InstrNums = RegIt->second;
3837 RegToPHIIdx.
erase(RegIt);
3842 if (RegIt != RegToPHIIdx.
end())
3849 LHS.join(
RHS, LHSVals.getAssignments(), RHSVals.getAssignments(), NewVNInfo);
3857 if (!EndPoints.
empty()) {
3861 dbgs() <<
"\t\trestoring liveness to " << EndPoints.
size() <<
" points: ";
3862 for (
unsigned i = 0, n = EndPoints.
size(); i != n; ++i) {
3863 dbgs() << EndPoints[i];
3867 dbgs() <<
": " <<
LHS <<
'\n';
3876 return CP.
isPhys() ? joinReservedPhysReg(CP) : joinVirtRegs(CP);
3886 for (
auto *
X : ToInsert) {
3887 for (
const auto &
Op :
X->debug_operands()) {
3888 if (
Op.isReg() &&
Op.getReg().isVirtual())
3889 DbgVRegToValues[
Op.getReg()].push_back({
Slot,
X});
3899 for (
auto &
MBB : MF) {
3902 for (
auto &
MI :
MBB) {
3903 if (
MI.isDebugValue()) {
3905 return MO.isReg() && MO.getReg().isVirtual();
3907 ToInsert.push_back(&
MI);
3908 }
else if (!
MI.isDebugOrPseudoInstr()) {
3910 CloseNewDVRange(CurrentSlot);
3919 for (
auto &Pair : DbgVRegToValues)
3923void RegisterCoalescer::checkMergingChangesDbgValues(
CoalescerPair &CP,
3927 JoinVals &RHSVals) {
3929 checkMergingChangesDbgValuesImpl(
Reg,
RHS,
LHS, LHSVals);
3933 checkMergingChangesDbgValuesImpl(
Reg,
LHS,
RHS, RHSVals);
3941void RegisterCoalescer::checkMergingChangesDbgValuesImpl(
Register Reg,
3944 JoinVals &RegVals) {
3946 auto VRegMapIt = DbgVRegToValues.
find(
Reg);
3947 if (VRegMapIt == DbgVRegToValues.
end())
3950 auto &DbgValueSet = VRegMapIt->second;
3951 auto DbgValueSetIt = DbgValueSet.begin();
3952 auto SegmentIt = OtherLR.
begin();
3954 bool LastUndefResult =
false;
3959 auto ShouldUndef = [&RegVals, &
RegLR, &LastUndefResult,
3964 if (LastUndefIdx == Idx)
3965 return LastUndefResult;
3971 auto OtherIt =
RegLR.find(Idx);
3972 if (OtherIt ==
RegLR.end())
3981 auto Resolution = RegVals.getResolution(OtherIt->valno->id);
3983 Resolution != JoinVals::CR_Keep && Resolution != JoinVals::CR_Erase;
3985 return LastUndefResult;
3991 while (DbgValueSetIt != DbgValueSet.end() && SegmentIt != OtherLR.
end()) {
3992 if (DbgValueSetIt->first < SegmentIt->end) {
3995 if (DbgValueSetIt->first >= SegmentIt->start) {
3996 bool HasReg = DbgValueSetIt->second->hasDebugOperandForReg(
Reg);
3997 bool ShouldUndefReg = ShouldUndef(DbgValueSetIt->first);
3998 if (HasReg && ShouldUndefReg) {
4000 DbgValueSetIt->second->setDebugValueUndef();
4014struct MBBPriorityInfo {
4015 MachineBasicBlock *
MBB;
4019 MBBPriorityInfo(MachineBasicBlock *mbb,
unsigned depth,
bool issplit)
4020 :
MBB(mbb),
Depth(depth), IsSplit(issplit) {}
4030 const MBBPriorityInfo *
RHS) {
4032 if (
LHS->Depth !=
RHS->Depth)
4033 return LHS->Depth >
RHS->Depth ? -1 : 1;
4036 if (
LHS->IsSplit !=
RHS->IsSplit)
4037 return LHS->IsSplit ? -1 : 1;
4041 unsigned cl =
LHS->MBB->pred_size() +
LHS->MBB->succ_size();
4042 unsigned cr =
RHS->MBB->pred_size() +
RHS->MBB->succ_size();
4044 return cl > cr ? -1 : 1;
4047 return LHS->MBB->getNumber() <
RHS->MBB->getNumber() ? -1 : 1;
4052 if (!Copy->isCopy())
4055 if (Copy->getOperand(1).isUndef())
4058 Register SrcReg = Copy->getOperand(1).getReg();
4059 Register DstReg = Copy->getOperand(0).getReg();
4067void RegisterCoalescer::lateLiveIntervalUpdate() {
4073 if (!DeadDefs.
empty())
4074 eliminateDeadDefs();
4076 ToBeUpdated.
clear();
4079bool RegisterCoalescer::copyCoalesceWorkList(
4081 bool Progress =
false;
4093 bool Success = joinCopy(
MI, Again, CurrentErasedInstrs);
4099 if (!CurrentErasedInstrs.
empty()) {
4101 if (
MI && CurrentErasedInstrs.
count(
MI))
4105 if (
MI && CurrentErasedInstrs.
count(
MI))
4116 assert(Copy.isCopyLike());
4119 if (&
MI != &Copy &&
MI.isCopyLike())
4124bool RegisterCoalescer::applyTerminalRule(
const MachineInstr &Copy)
const {
4129 unsigned SrcSubReg = 0, DstSubReg = 0;
4130 if (!
isMoveInstr(*
TRI, &Copy, SrcReg, DstReg, SrcSubReg, DstSubReg))
4151 if (&
MI == &Copy || !
MI.isCopyLike() ||
MI.getParent() != OrigBB)
4154 unsigned OtherSrcSubReg = 0, OtherSubReg = 0;
4158 if (OtherReg == SrcReg)
4159 OtherReg = OtherSrcReg;
4178 const unsigned PrevSize = WorkList.
size();
4179 if (JoinGlobalCopies) {
4185 if (!
MI.isCopyLike())
4187 bool ApplyTerminalRule = applyTerminalRule(
MI);
4189 if (ApplyTerminalRule)
4194 if (ApplyTerminalRule)
4201 LocalWorkList.
append(LocalTerminals.
begin(), LocalTerminals.
end());
4208 if (MII.isCopyLike()) {
4209 if (applyTerminalRule(MII))
4222 if (copyCoalesceWorkList(CurrList))
4224 std::remove(WorkList.
begin() + PrevSize, WorkList.
end(),
nullptr),
4228void RegisterCoalescer::coalesceLocals() {
4229 copyCoalesceWorkList(LocalWorkList);
4234 LocalWorkList.clear();
4237void RegisterCoalescer::joinAllIntervals() {
4238 LLVM_DEBUG(
dbgs() <<
"********** JOINING INTERVALS ***********\n");
4239 assert(WorkList.
empty() && LocalWorkList.empty() &&
"Old data still around.");
4241 std::vector<MBBPriorityInfo> MBBs;
4242 MBBs.reserve(MF->size());
4244 MBBs.push_back(MBBPriorityInfo(&
MBB,
Loops->getLoopDepth(&
MBB),
4250 unsigned CurrDepth = std::numeric_limits<unsigned>::max();
4251 for (MBBPriorityInfo &
MBB : MBBs) {
4253 if (JoinGlobalCopies &&
MBB.Depth < CurrDepth) {
4255 CurrDepth =
MBB.Depth;
4257 copyCoalesceInMBB(
MBB.MBB);
4259 lateLiveIntervalUpdate();
4264 while (copyCoalesceWorkList(WorkList))
4266 lateLiveIntervalUpdate();
4276 RegisterCoalescer Impl(&LIS,
SI, &
Loops);
4288bool RegisterCoalescerLegacy::runOnMachineFunction(
MachineFunction &MF) {
4289 auto *LIS = &getAnalysis<LiveIntervalsWrapperPass>().getLIS();
4290 auto *
Loops = &getAnalysis<MachineLoopInfoWrapperPass>().getLI();
4291 auto *SIWrapper = getAnalysisIfAvailable<SlotIndexesWrapperPass>();
4292 SlotIndexes *
SI = SIWrapper ? &SIWrapper->getSI() :
nullptr;
4293 RegisterCoalescer Impl(LIS,
SI,
Loops);
4294 return Impl.run(MF);
4298 LLVM_DEBUG(
dbgs() <<
"********** REGISTER COALESCER **********\n"
4299 <<
"********** Function: " << fn.
getName() <<
'\n');
4311 dbgs() <<
"* Skipped as it exposes functions that returns twice.\n");
4331 unsigned SubReg = DebugPHI.second.SubReg;
4333 PHIValPos
P = {
SI,
Reg, SubReg};
4334 PHIValToPos.
insert(std::make_pair(DebugPHI.first,
P));
4335 RegToPHIIdx[
Reg].push_back(DebugPHI.first);
4344 MF->
verify(LIS,
SI,
"Before register coalescing", &
errs());
4346 DbgVRegToValues.
clear();
4382 assert((S.LaneMask & ~MaxMask).none());
4393 auto it = PHIValToPos.
find(
p.first);
4395 p.second.Reg = it->second.Reg;
4396 p.second.SubReg = it->second.SubReg;
4399 PHIValToPos.
clear();
4400 RegToPHIIdx.
clear();
4405 MF->
verify(LIS,
SI,
"After register coalescing", &
errs());
MachineInstrBuilder & UseMI
MachineInstrBuilder MachineInstrBuilder & DefMI
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
This file implements the BitVector class.
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
This file defines the DenseSet and SmallDenseSet classes.
const HexagonInstrInfo * TII
const AbstractManglingParser< Derived, Alloc >::OperatorInfo AbstractManglingParser< Derived, Alloc >::Ops[]
A common definition of LaneBitmask for use in TableGen and CodeGen.
Register const TargetRegisterInfo * TRI
Promote Memory to Register
#define INITIALIZE_PASS_DEPENDENCY(depName)
#define INITIALIZE_PASS_END(passName, arg, name, cfg, analysis)
#define INITIALIZE_PASS_BEGIN(passName, arg, name, cfg, analysis)
static cl::opt< bool > UseTerminalRule("terminal-rule", cl::desc("Apply the terminal rule"), cl::init(true), cl::Hidden)
static cl::opt< cl::boolOrDefault > EnableGlobalCopies("join-globalcopies", cl::desc("Coalesce copies that span blocks (default=subtarget)"), cl::init(cl::BOU_UNSET), cl::Hidden)
Temporary flag to test global copy optimization.
static bool isLocalCopy(MachineInstr *Copy, const LiveIntervals *LIS)
static bool isSplitEdge(const MachineBasicBlock *MBB)
Return true if this block should be vacated by the coalescer to eliminate branches.
static int compareMBBPriority(const MBBPriorityInfo *LHS, const MBBPriorityInfo *RHS)
C-style comparator that sorts first based on the loop depth of the basic block (the unsigned),...
static cl::opt< unsigned > LargeIntervalSizeThreshold("large-interval-size-threshold", cl::Hidden, cl::desc("If the valnos size of an interval is larger than the threshold, " "it is regarded as a large interval. "), cl::init(100))
static bool isDefInSubRange(LiveInterval &LI, SlotIndex Def)
Check if any of the subranges of LI contain a definition at Def.
static std::pair< bool, bool > addSegmentsWithValNo(LiveRange &Dst, VNInfo *DstValNo, const LiveRange &Src, const VNInfo *SrcValNo)
Copy segments with value number SrcValNo from liverange Src to live range @Dst and use value number D...
static bool isLiveThrough(const LiveQueryResult Q)
static bool isTerminalReg(Register DstReg, const MachineInstr &Copy, const MachineRegisterInfo *MRI)
Check if DstReg is a terminal node.
static cl::opt< bool > VerifyCoalescing("verify-coalescing", cl::desc("Verify machine instrs before and after register coalescing"), cl::Hidden)
register Register static false bool isMoveInstr(const TargetRegisterInfo &tri, const MachineInstr *MI, Register &Src, Register &Dst, unsigned &SrcSub, unsigned &DstSub)
static cl::opt< bool > EnableJoinSplits("join-splitedges", cl::desc("Coalesce copies on split edges (default=subtarget)"), cl::Hidden)
Temporary flag to test critical edge unsplitting.
static cl::opt< bool > EnableJoining("join-liveintervals", cl::desc("Coalesce copies (default=true)"), cl::init(true), cl::Hidden)
static cl::opt< unsigned > LargeIntervalFreqThreshold("large-interval-freq-threshold", cl::Hidden, cl::desc("For a large interval, if it is coalesced with other live " "intervals many times more than the threshold, stop its " "coalescing to control the compile time. "), cl::init(256))
static cl::opt< unsigned > LateRematUpdateThreshold("late-remat-update-threshold", cl::Hidden, cl::desc("During rematerialization for a copy, if the def instruction has " "many other copy uses to be rematerialized, delay the multiple " "separate live interval update work and do them all at once after " "all those rematerialization are done. It will save a lot of " "repeated work. "), cl::init(100))
SI Optimize VGPR LiveRange
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static DenseMap< Register, std::vector< std::pair< SlotIndex, MachineInstr * > > > buildVRegToDbgValueMap(MachineFunction &MF, const LiveIntervals *Liveness)
static void shrinkToUses(LiveInterval &LI, LiveIntervals &LIS)
PassT::Result * getCachedResult(IRUnitT &IR) const
Get the cached result of an analysis pass for a given IR unit.
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
Represent the analysis usage information of a pass.
AnalysisUsage & addPreservedID(const void *ID)
AnalysisUsage & addUsedIfAvailable()
Add the specified Pass class to the set of analyses used by this pass.
AnalysisUsage & addRequired()
AnalysisUsage & addPreserved()
Add the specified Pass class to the set of analyses preserved by this pass.
LLVM_ABI void setPreservesCFG()
This function should be called by the pass, iff they do not:
bool test(unsigned Idx) const
Returns true if bit Idx is set.
Represents analyses that only rely on functions' control flow.
A helper class for register coalescers.
unsigned getDstIdx() const
Return the subregister index that DstReg will be coalesced into, or 0.
bool isFlipped() const
Return true when getSrcReg is the register being defined by the original copy instruction.
bool isPartial() const
Return true if the original copy instruction did not copy the full register, but was a subreg operati...
bool flip()
Swap SrcReg and DstReg.
bool isPhys() const
Return true if DstReg is a physical register.
bool isCrossClass() const
Return true if DstReg is virtual and NewRC is a smaller register class than DstReg's.
Register getDstReg() const
Return the register (virtual or physical) that will remain after coalescing.
bool isCoalescable(const MachineInstr *) const
Return true if MI is a copy instruction that will become an identity copy after coalescing.
const TargetRegisterClass * getNewRC() const
Return the register class of the coalesced register.
bool setRegisters(const MachineInstr *)
Set registers to match the copy instruction MI.
unsigned getSrcIdx() const
Return the subregister index that SrcReg will be coalesced into, or 0.
Register getSrcReg() const
Return the virtual register that will be coalesced away.
iterator find(const_arg_type_t< KeyT > Val)
bool erase(const KeyT &Val)
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
bool isAsCheapAsAMove(const MachineInstr &MI) const override
A live range for subregisters.
LiveInterval - This class represents the liveness of a register, or stack slot.
LLVM_ABI void removeEmptySubRanges()
Removes all subranges without any segments (subranges without segments are not considered valid and s...
bool hasSubRanges() const
Returns true if subregister liveness information is available.
SubRange * createSubRangeFrom(BumpPtrAllocator &Allocator, LaneBitmask LaneMask, const LiveRange &CopyFrom)
Like createSubRange() but the new range is filled with a copy of the liveness information in CopyFrom...
iterator_range< subrange_iterator > subranges()
LLVM_ABI void refineSubRanges(BumpPtrAllocator &Allocator, LaneBitmask LaneMask, std::function< void(LiveInterval::SubRange &)> Apply, const SlotIndexes &Indexes, const TargetRegisterInfo &TRI, unsigned ComposeSubRegIdx=0)
Refines the subranges to support LaneMask.
LLVM_ABI void computeSubRangeUndefs(SmallVectorImpl< SlotIndex > &Undefs, LaneBitmask LaneMask, const MachineRegisterInfo &MRI, const SlotIndexes &Indexes) const
For a given lane mask LaneMask, compute indexes at which the lane is marked undefined by subregister ...
SubRange * createSubRange(BumpPtrAllocator &Allocator, LaneBitmask LaneMask)
Creates a new empty subregister live range.
LLVM_ABI void clearSubRanges()
Removes all subregister liveness information.
bool hasInterval(Register Reg) const
SlotIndex getMBBStartIdx(const MachineBasicBlock *mbb) const
Return the first index in the given basic block.
MachineInstr * getInstructionFromIndex(SlotIndex index) const
Returns the instruction associated with the given index.
LLVM_ABI bool hasPHIKill(const LiveInterval &LI, const VNInfo *VNI) const
Returns true if VNI is killed by any PHI-def values in LI.
SlotIndex InsertMachineInstrInMaps(MachineInstr &MI)
LLVM_ABI bool checkRegMaskInterference(const LiveInterval &LI, BitVector &UsableRegs)
Test if LI is live across any register mask instructions, and compute a bit mask of physical register...
SlotIndexes * getSlotIndexes() const
SlotIndex getInstructionIndex(const MachineInstr &Instr) const
Returns the base index of the given instruction.
void RemoveMachineInstrFromMaps(MachineInstr &MI)
VNInfo::Allocator & getVNInfoAllocator()
SlotIndex getMBBEndIdx(const MachineBasicBlock *mbb) const
Return the last index in the given basic block.
LiveInterval & getInterval(Register Reg)
LLVM_ABI void pruneValue(LiveRange &LR, SlotIndex Kill, SmallVectorImpl< SlotIndex > *EndPoints)
If LR has a live value at Kill, prune its live range by removing any liveness reachable from Kill.
void removeInterval(Register Reg)
Interval removal.
LiveRange & getRegUnit(MCRegUnit Unit)
Return the live range for register unit Unit.
LLVM_ABI MachineBasicBlock * intervalIsInOneMBB(const LiveInterval &LI) const
If LI is confined to a single basic block, return a pointer to that block.
LiveRange * getCachedRegUnit(MCRegUnit Unit)
Return the live range for register unit Unit if it has already been computed, or nullptr if it hasn't...
LLVM_ABI void removeVRegDefAt(LiveInterval &LI, SlotIndex Pos)
Remove value number and related live segments of LI and its subranges that start at position Pos.
LLVM_ABI bool shrinkToUses(LiveInterval *li, SmallVectorImpl< MachineInstr * > *dead=nullptr)
After removing some uses of a register, shrink its live range to just the remaining uses.
LLVM_ABI void extendToIndices(LiveRange &LR, ArrayRef< SlotIndex > Indices, ArrayRef< SlotIndex > Undefs)
Extend the live range LR to reach all points in Indices.
LLVM_ABI void dump() const
LLVM_ABI void removePhysRegDefAt(MCRegister Reg, SlotIndex Pos)
Remove value numbers and related live segments starting at position Pos that are part of any liverang...
MachineBasicBlock * getMBBFromIndex(SlotIndex index) const
bool isLiveInToMBB(const LiveRange &LR, const MachineBasicBlock *mbb) const
SlotIndex ReplaceMachineInstrInMaps(MachineInstr &MI, MachineInstr &NewMI)
Result of a LiveRange query.
VNInfo * valueOutOrDead() const
Returns the value alive at the end of the instruction, if any.
VNInfo * valueIn() const
Return the value that is live-in to the instruction.
VNInfo * valueOut() const
Return the value leaving the instruction, if any.
VNInfo * valueDefined() const
Return the value defined by this instruction, if any.
SlotIndex endPoint() const
Return the end point of the last live range segment to interact with the instruction,...
bool isKill() const
Return true if the live-in value is killed by this instruction.
Callback methods for LiveRangeEdit owners.
SlotIndex rematerializeAt(MachineBasicBlock &MBB, MachineBasicBlock::iterator MI, Register DestReg, const Remat &RM, const TargetRegisterInfo &, bool Late=false, unsigned SubIdx=0, MachineInstr *ReplaceIndexMI=nullptr, LaneBitmask UsedLanes=LaneBitmask::getAll())
rematerializeAt - Rematerialize RM.ParentVNI into DestReg by inserting an instruction into MBB before...
void eliminateDeadDefs(SmallVectorImpl< MachineInstr * > &Dead, ArrayRef< Register > RegsBeingSpilled={})
eliminateDeadDefs - Try to delete machine instructions that are now dead (allDefsAreDead returns true...
This class represents the liveness of a register, stack slot, etc.
VNInfo * getValNumInfo(unsigned ValNo)
getValNumInfo - Returns pointer to the specified val#.
LLVM_ABI iterator addSegment(Segment S)
Add the specified Segment to this range, merging segments as appropriate.
Segments::iterator iterator
const Segment * getSegmentContaining(SlotIndex Idx) const
Return the segment that contains the specified index, or null if there is none.
LLVM_ABI void join(LiveRange &Other, const int *ValNoAssignments, const int *RHSValNoAssignments, SmallVectorImpl< VNInfo * > &NewVNInfo)
join - Join two live ranges (this, and other) together.
bool liveAt(SlotIndex index) const
LLVM_ABI VNInfo * createDeadDef(SlotIndex Def, VNInfo::Allocator &VNIAlloc)
createDeadDef - Make sure the range has a value defined at Def.
LLVM_ABI void removeValNo(VNInfo *ValNo)
removeValNo - Remove all the segments defined by the specified value#.
bool overlaps(const LiveRange &other) const
overlaps - Return true if the intersection of the two live ranges is not empty.
LiveQueryResult Query(SlotIndex Idx) const
Query Liveness at Idx.
VNInfo * getVNInfoBefore(SlotIndex Idx) const
getVNInfoBefore - Return the VNInfo that is live up to but not necessarily including Idx,...
bool verify() const
Walk the range and assert if any invariants fail to hold.
LLVM_ABI VNInfo * MergeValueNumberInto(VNInfo *V1, VNInfo *V2)
MergeValueNumberInto - This method is called when two value numbers are found to be equivalent.
unsigned getNumValNums() const
bool containsOneValue() const
iterator FindSegmentContaining(SlotIndex Idx)
Return an iterator to the segment that contains the specified index, or end() if there is none.
void assign(const LiveRange &Other, BumpPtrAllocator &Allocator)
Copies values numbers and live segments from Other into this range.
VNInfo * getVNInfoAt(SlotIndex Idx) const
getVNInfoAt - Return the VNInfo that is live at Idx, or NULL.
LLVM_ABI iterator find(SlotIndex Pos)
find - Return an iterator pointing to the first segment that ends after Pos, or end().
Describe properties that are true of each instruction in the target description file.
unsigned getNumOperands() const
Return the number of declared MachineOperands for this MachineInstruction.
MCRegUnitRootIterator enumerates the root registers of a register unit.
bool isValid() const
Check if the iterator is at the end of the list.
Wrapper class representing physical registers. Should be passed by value.
An RAII based helper class to modify MachineFunctionProperties when running pass.
bool isInlineAsmBrIndirectTarget() const
Returns true if this is the indirect dest of an INLINEASM_BR.
unsigned pred_size() const
LLVM_ABI bool hasEHPadSuccessor() const
bool isEHPad() const
Returns true if the block is a landing pad.
LLVM_ABI instr_iterator insert(instr_iterator I, MachineInstr *M)
Insert MI into the instruction list before I, possibly inside a bundle.
LLVM_ABI iterator getFirstTerminator()
Returns an iterator to the first terminator instruction of this basic block.
unsigned succ_size() const
LLVM_ABI instr_iterator erase(instr_iterator I)
Remove an instruction from the instruction list and delete it.
iterator_range< pred_iterator > predecessors()
void splice(iterator Where, MachineBasicBlock *Other, iterator From)
Take an instruction from MBB 'Other' at the position From, and insert it into this MBB right before '...
MachineInstrBundleIterator< MachineInstr > iterator
LLVM_ABI StringRef getName() const
Return the name of the corresponding LLVM basic block, or an empty string.
Analysis pass which computes a MachineDominatorTree.
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - Subclasses that override getAnalysisUsage must call this.
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
StringRef getName() const
getName - Return the name of the corresponding LLVM function.
bool exposesReturnsTwice() const
exposesReturnsTwice - Returns true if the function calls setjmp or any other similar functions with a...
MachineRegisterInfo & getRegInfo()
getRegInfo - Return information about the registers currently in use.
bool verify(Pass *p=nullptr, const char *Banner=nullptr, raw_ostream *OS=nullptr, bool AbortOnError=true) const
Run the current MachineFunction through the machine code verifier, useful for debugger use.
DenseMap< unsigned, DebugPHIRegallocPos > DebugPHIPositions
Map of debug instruction numbers to the position of their PHI instructions during register allocation...
const MachineInstrBuilder & addReg(Register RegNo, RegState Flags={}, unsigned SubReg=0) const
Add a new virtual register operand.
Representation of each machine instruction.
unsigned getOpcode() const
Returns the opcode of this MachineInstr.
LLVM_ABI void setRegisterDefReadUndef(Register Reg, bool IsUndef=true)
Mark all subregister defs of register Reg with the undef flag.
bool isImplicitDef() const
const MachineBasicBlock * getParent() const
bool isCopyLike() const
Return true if the instruction behaves like a copy.
filtered_mop_range all_defs()
Returns an iterator range over all operands that are (explicit or implicit) register defs.
LLVM_ABI std::pair< bool, bool > readsWritesVirtualRegister(Register Reg, SmallVectorImpl< unsigned > *Ops=nullptr) const
Return a pair of bools (reads, writes) indicating if this instruction reads or writes Reg.
bool isRegTiedToDefOperand(unsigned UseOpIdx, unsigned *DefOpIdx=nullptr) const
Return true if the use operand of the specified index is tied to a def operand.
LLVM_ABI bool isSafeToMove(bool &SawStore) const
Return true if it is safe to move this instruction.
bool isDebugInstr() const
unsigned getNumOperands() const
Retuns the total number of operands.
LLVM_ABI void addOperand(MachineFunction &MF, const MachineOperand &Op)
Add the specified operand to the instruction.
bool isRegTiedToUseOperand(unsigned DefOpIdx, unsigned *UseOpIdx=nullptr) const
Given the index of a register def operand, check if the register def is tied to a source operand,...
LLVM_ABI int findRegisterUseOperandIdx(Register Reg, const TargetRegisterInfo *TRI, bool isKill=false) const
Returns the operand index that is a use of the specific register or -1 if it is not found.
const MCInstrDesc & getDesc() const
Returns the target instruction descriptor of this MachineInstr.
bool isCommutable(QueryType Type=IgnoreBundle) const
Return true if this may be a 2- or 3-address instruction (of the form "X = op Y, Z,...
LLVM_ABI void setDesc(const MCInstrDesc &TID)
Replace the instruction descriptor (thus opcode) of the current instruction with a new one.
LLVM_ABI void substituteRegister(Register FromReg, Register ToReg, unsigned SubIdx, const TargetRegisterInfo &RegInfo)
Replace all occurrences of FromReg with ToReg:SubIdx, properly composing subreg indices where necessa...
const DebugLoc & getDebugLoc() const
Returns the debug location id of this MachineInstr.
LLVM_ABI void removeOperand(unsigned OpNo)
Erase an operand from an instruction, leaving it with one fewer operand than it started with.
const MachineOperand & getOperand(unsigned i) const
LLVM_ABI int findRegisterDefOperandIdx(Register Reg, const TargetRegisterInfo *TRI, bool isDead=false, bool Overlap=false) const
Returns the operand index that is a def of the specified register or -1 if it is not found.
LLVM_ABI MachineInstrBundleIterator< MachineInstr > eraseFromParent()
Unlink 'this' from the containing basic block and delete it.
void setDebugLoc(DebugLoc DL)
Replace current source information with new such.
LLVM_ABI bool allDefsAreDead() const
Return true if all the defs of this instruction are dead.
Analysis pass that exposes the MachineLoopInfo for a machine function.
MachineOperand class - Representation of each machine instruction operand.
void setSubReg(unsigned subReg)
unsigned getSubReg() const
LLVM_ABI void substVirtReg(Register Reg, unsigned SubIdx, const TargetRegisterInfo &)
substVirtReg - Substitute the current register with the virtual subregister Reg:SubReg.
bool readsReg() const
readsReg - Returns true if this operand reads the previous value of its register.
bool isReg() const
isReg - Tests if this is a MO_Register operand.
void setIsDead(bool Val=true)
bool isImm() const
isImm - Tests if this is a MO_Immediate operand.
void setIsKill(bool Val=true)
MachineInstr * getParent()
getParent - Return the instruction that this operand belongs to.
LLVM_ABI void substPhysReg(MCRegister Reg, const TargetRegisterInfo &)
substPhysReg - Substitute the current register with the physical register Reg, taking any existing Su...
void setIsUndef(bool Val=true)
bool isEarlyClobber() const
Register getReg() const
getReg - Returns the register number.
static MachineOperand CreateReg(Register Reg, bool isDef, bool isImp=false, bool isKill=false, bool isDead=false, bool isUndef=false, bool isEarlyClobber=false, unsigned SubReg=0, bool isDebug=false, bool isInternalRead=false, bool isRenamable=false)
MachineRegisterInfo - Keep track of information for virtual and physical registers,...
LLVM_ABI bool hasOneNonDBGUse(Register RegNo) const
hasOneNonDBGUse - Return true if there is exactly one non-Debug use of the specified register.
LLVM_ABI bool recomputeRegClass(Register Reg)
recomputeRegClass - Try to find a legal super-class of Reg's register class that still satisfies the ...
reg_instr_iterator reg_instr_begin(Register RegNo) const
const TargetRegisterClass * getRegClass(Register Reg) const
Return the register class of the specified virtual register.
LLVM_ABI void clearKillFlags(Register Reg) const
clearKillFlags - Iterate over all the uses of the given register and clear the kill flag from the Mac...
LLVM_ABI MachineInstr * getVRegDef(Register Reg) const
getVRegDef - Return the machine instr that defines the specified virtual register or null if none is ...
iterator_range< use_nodbg_iterator > use_nodbg_operands(Register Reg) const
static reg_instr_iterator reg_instr_end()
bool use_nodbg_empty(Register RegNo) const
use_nodbg_empty - Return true if there are no non-Debug instructions using the specified register.
bool isReserved(MCRegister PhysReg) const
isReserved - Returns true when PhysReg is a reserved register.
bool reg_nodbg_empty(Register RegNo) const
reg_nodbg_empty - Return true if the only instructions using or defining Reg are Debug instructions.
use_instr_nodbg_iterator use_instr_nodbg_begin(Register RegNo) const
bool shouldTrackSubRegLiveness(const TargetRegisterClass &RC) const
Returns true if liveness for register class RC should be tracked at the subregister level.
LLVM_ABI void setRegClass(Register Reg, const TargetRegisterClass *RC)
setRegClass - Set the register class of the specified virtual register.
LLVM_ABI LaneBitmask getMaxLaneMaskForVReg(Register Reg) const
Returns a mask covering all bits that can appear in lane masks of subregisters of the virtual registe...
LLVM_ABI bool isConstantPhysReg(MCRegister PhysReg) const
Returns true if PhysReg is unallocatable and constant throughout the function.
iterator_range< reg_nodbg_iterator > reg_nodbg_operands(Register Reg) const
defusechain_instr_iterator< true, true, false, true > reg_instr_iterator
reg_instr_iterator/reg_instr_begin/reg_instr_end - Walk all defs and uses of the specified register,...
LLVM_ABI const TargetRegisterClass * constrainRegClass(Register Reg, const TargetRegisterClass *RC, unsigned MinNumRegs=0)
constrainRegClass - Constrain the register class of the specified virtual register to be a common sub...
iterator_range< use_iterator > use_operands(Register Reg) const
iterator_range< reg_instr_nodbg_iterator > reg_nodbg_instructions(Register Reg) const
Represent a mutable reference to an array (0 or more elements consecutively in memory),...
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
bool isProperSubClass(const TargetRegisterClass *RC) const
isProperSubClass - Returns true if RC has a legal super-class with more allocatable registers.
unsigned getNumAllocatableRegs(const TargetRegisterClass *RC) const
getNumAllocatableRegs - Returns the number of actually allocatable registers in RC in the current fun...
LLVM_ABI void runOnMachineFunction(const MachineFunction &MF, bool Rev=false)
runOnFunction - Prepare to answer questions about MF.
PreservedAnalyses run(MachineFunction &MF, MachineFunctionAnalysisManager &MFAM)
Wrapper class representing virtual and physical registers.
MCRegister asMCReg() const
Utility to check-convert this value to a MCRegister.
constexpr bool isVirtual() const
Return true if the specified register number is in the virtual register namespace.
constexpr unsigned id() const
constexpr bool isPhysical() const
Return true if the specified register number is in the physical register namespace.
SlotIndex - An opaque wrapper around machine indexes.
static bool isSameInstr(SlotIndex A, SlotIndex B)
isSameInstr - Return true if A and B refer to the same instruction.
bool isEarlyClobber() const
isEarlyClobber - Returns true if this is an early-clobber slot.
bool isValid() const
Returns true if this is a valid index.
SlotIndex getBaseIndex() const
Returns the base index for associated with this index.
SlotIndex getPrevSlot() const
Returns the previous slot in the index list.
SlotIndex getRegSlot(bool EC=false) const
Returns the register use/def slot in the current instruction for a normal or early-clobber def.
bool isDead() const
isDead - Returns true if this is a dead def kill slot.
MachineBasicBlock * getMBBFromIndex(SlotIndex index) const
Returns the basic block which the given index falls in.
SlotIndex getMBBEndIdx(unsigned Num) const
Returns the index past the last valid index in the given basic block.
SlotIndex getNextNonNullIndex(SlotIndex Index)
Returns the next non-null index, if one exists.
SlotIndex getInstructionIndex(const MachineInstr &MI, bool IgnoreBundle=false) const
Returns the base index for the given instruction.
SlotIndex getMBBStartIdx(unsigned Num) const
Returns the first index in the given basic block number.
SlotIndex getIndexBefore(const MachineInstr &MI) const
getIndexBefore - Returns the index of the last indexed instruction before MI, or the start index of i...
MachineInstr * getInstructionFromIndex(SlotIndex index) const
Returns the instruction for the given index, or null if the given index has no instruction associated...
A templated base class for SmallPtrSet which provides the typesafe interface that is common across al...
bool erase(PtrType Ptr)
Remove pointer from the set.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
void reserve(size_type N)
iterator erase(const_iterator CI)
void append(ItTy in_start, ItTy in_end)
Add the specified range to the end of the SmallVector.
void push_back(const T &Elt)
pointer data()
Return a pointer to the vector's buffer, even if empty().
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
TargetInstrInfo - Interface to description of machine instruction set.
static const unsigned CommuteAnyOperandIndex
bool contains(Register Reg) const
Return true if the specified register is included in this register class.
LaneBitmask getLaneMask() const
Returns the combination of all lane masks of register in this class.
TargetRegisterInfo base class - We assume that the target defines a static array of TargetRegisterDes...
TargetSubtargetInfo - Generic base class for all target subtargets.
virtual bool enableJoinGlobalCopies() const
True if the subtarget should enable joining global copies.
virtual const TargetInstrInfo * getInstrInfo() const
virtual const TargetRegisterInfo * getRegisterInfo() const =0
Return the target's register information.
VNInfo - Value Number Information.
void markUnused()
Mark this value as unused.
BumpPtrAllocator Allocator
bool isUnused() const
Returns true if this value is unused.
unsigned id
The ID number of this value.
SlotIndex def
The index of the defining instruction.
bool isPHIDef() const
Returns true if this value is defined by a PHI instruction (or was, PHI instructions may have been el...
static bool allUsesAvailableAt(const MachineInstr *MI, SlotIndex UseIdx, const LiveIntervals &LIS, const MachineRegisterInfo &MRI, const TargetInstrInfo &TII)
std::pair< iterator, bool > insert(const ValueT &V)
size_type count(const_arg_type_t< ValueT > V) const
Return 1 if the specified key is in the set, 0 otherwise.
self_iterator getIterator()
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr std::underlying_type_t< E > Mask()
Get a bitmask with 1s in all places up to the high-order bit of E's largest value.
This namespace contains all of the command line option processing machinery.
initializer< Ty > init(const Ty &Val)
DXILDebugInfoMap run(Module &M)
NodeAddr< DefNode * > Def
This is an optimization pass for GlobalISel generic memory operations.
MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)
Builder interface. Specify how to create the initial instruction itself.
LLVM_ABI char & RegisterCoalescerID
RegisterCoalescer - This pass merges live ranges to eliminate copies.
LLVM_ABI char & MachineDominatorsID
MachineDominators - This pass is a machine dominators analysis pass.
void append_range(Container &C, Range &&R)
Wrapper function to append range R to container C.
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
Printable PrintLaneMask(LaneBitmask LaneMask)
Create Printable object to print LaneBitmasks on a raw_ostream.
LLVM_ABI Printable printRegUnit(MCRegUnit Unit, const TargetRegisterInfo *TRI)
Create Printable object to print register units on a raw_ostream.
AnalysisManager< MachineFunction > MachineFunctionAnalysisManager
auto unique(Range &&R, Predicate P)
auto upper_bound(R &&Range, T &&Value)
Provide wrappers to std::upper_bound which take ranges instead of having to pass begin/end explicitly...
LLVM_ABI PreservedAnalyses getMachineFunctionPassPreservedAnalyses()
Returns the minimum set of Analyses that all machine function passes must preserve.
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
void sort(IteratorTy Start, IteratorTy End)
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
bool none_of(R &&Range, UnaryPredicate P)
Provide wrappers to std::none_of which take ranges instead of having to pass begin/end explicitly.
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
@ Success
The lock was released successfully.
MutableArrayRef(T &OneElt) -> MutableArrayRef< T >
LLVM_ABI raw_fd_ostream & errs()
This returns a reference to a raw_ostream for standard error.
DWARFExpression::Operation Op
auto make_second_range(ContainerTy &&c)
Given a container of pairs, return a range over the second elements.
bool is_contained(R &&Range, const E &Element)
Returns true if Element is found in Range.
LLVM_ABI void eraseInstrs(ArrayRef< MachineInstr * > DeadInstrs, MachineRegisterInfo &MRI, LostDebugLocObserver *LocObserver=nullptr)
void array_pod_sort(IteratorTy Start, IteratorTy End)
array_pod_sort - This sorts an array with the specified start and end extent.
BumpPtrAllocatorImpl<> BumpPtrAllocator
The standard BumpPtrAllocator which just uses the default template parameters.
LLVM_ABI Printable printReg(Register Reg, const TargetRegisterInfo *TRI=nullptr, unsigned SubIdx=0, const MachineRegisterInfo *MRI=nullptr)
Prints virtual and physical registers with or without a TRI instance.
LLVM_ABI Printable printMBBReference(const MachineBasicBlock &MBB)
Prints a machine basic block reference.
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
static constexpr LaneBitmask getLane(unsigned Lane)
static constexpr LaneBitmask getAll()
constexpr bool any() const
static constexpr LaneBitmask getNone()
Remat - Information needed to rematerialize at a specific location.
This represents a simple continuous liveness interval for a value.