103#define DEBUG_TYPE "peephole-opt"
107 cl::desc(
"Aggressive extension optimization"));
111 cl::desc(
"Disable the peephole optimizer"));
118 cl::desc(
"Disable advanced copy optimization"));
122 cl::desc(
"Disable non-allocatable physical register copy optimization"));
128 cl::desc(
"Limit the length of PHI chains to lookup"));
134 cl::desc(
"Maximum length of recurrence chain when evaluating the benefit "
135 "of commuting operands"));
137STATISTIC(NumReuse,
"Number of extension results reused");
139STATISTIC(NumImmFold,
"Number of move immediate folded");
142STATISTIC(NumUncoalescableCopies,
"Number of uncoalescable copies optimized");
143STATISTIC(NumRewrittenCopies,
"Number of copies rewritten");
144STATISTIC(NumNAPhysCopies,
"Number of non-allocatable physical copies removed");
148class ValueTrackerResult;
149class RecurrenceInstr;
155 int CurrentSrcIdx = 0;
158 virtual ~Rewriter() =
default;
190 virtual bool RewriteCurrentSource(
Register NewReg,
unsigned NewSubReg) = 0;
194class CopyRewriter :
public Rewriter {
197 assert(
MI.isCopy() &&
"Expected copy instruction");
199 ~CopyRewriter()
override =
default;
203 if (++CurrentSrcIdx > 1)
207 const MachineOperand &MOSrc = CopyLike.getOperand(CurrentSrcIdx);
210 const MachineOperand &MODef = CopyLike.getOperand(0);
215 bool RewriteCurrentSource(
Register NewReg,
unsigned NewSubReg)
override {
216 MachineOperand &MOSrc = CopyLike.getOperand(CurrentSrcIdx);
225class UncoalescableRewriter :
public Rewriter {
229 UncoalescableRewriter(MachineInstr &
MI) :
Rewriter(
MI) {
230 NumDefs =
MI.getDesc().getNumDefs();
240 if (CurrentSrcIdx == NumDefs)
243 while (CopyLike.getOperand(CurrentSrcIdx).isDead()) {
245 if (CurrentSrcIdx == NumDefs)
251 const MachineOperand &MODef = CopyLike.getOperand(CurrentSrcIdx);
258 bool RewriteCurrentSource(
Register NewReg,
unsigned NewSubReg)
override {
264class InsertSubregRewriter :
public Rewriter {
267 assert(
MI.isInsertSubreg() &&
"Invalid instruction");
284 if (CurrentSrcIdx == 2)
288 const MachineOperand &MOInsertedReg = CopyLike.getOperand(2);
290 const MachineOperand &MODef = CopyLike.getOperand(0);
298 (
unsigned)CopyLike.getOperand(3).getImm());
302 bool RewriteCurrentSource(
Register NewReg,
unsigned NewSubReg)
override {
303 if (CurrentSrcIdx != 2)
306 MachineOperand &MO = CopyLike.getOperand(CurrentSrcIdx);
314class ExtractSubregRewriter :
public Rewriter {
315 const TargetInstrInfo &TII;
318 ExtractSubregRewriter(MachineInstr &
MI,
const TargetInstrInfo &TII)
320 assert(
MI.isExtractSubreg() &&
"Invalid instruction");
331 if (CurrentSrcIdx == 1)
335 const MachineOperand &MOExtractedReg = CopyLike.getOperand(1);
344 const MachineOperand &MODef = CopyLike.getOperand(0);
349 bool RewriteCurrentSource(
Register NewReg,
unsigned NewSubReg)
override {
351 if (CurrentSrcIdx != 1)
354 CopyLike.getOperand(CurrentSrcIdx).setReg(NewReg);
365 CopyLike.removeOperand(2);
367 CopyLike.setDesc(TII.get(TargetOpcode::COPY));
370 CopyLike.getOperand(CurrentSrcIdx + 1).setImm(NewSubReg);
376class RegSequenceRewriter :
public Rewriter {
379 assert(
MI.isRegSequence() &&
"Invalid instruction");
403 if (
static_cast<unsigned>(CurrentSrcIdx) >= CopyLike.getNumOperands())
406 const MachineOperand &MOInsertedReg = CopyLike.getOperand(CurrentSrcIdx);
407 Src.Reg = MOInsertedReg.
getReg();
412 Dst.SubReg = CopyLike.getOperand(CurrentSrcIdx + 1).getImm();
414 const MachineOperand &MODef = CopyLike.getOperand(0);
416 assert(MODef.
getSubReg() == 0 &&
"cannot have subregister def in SSA");
420 bool RewriteCurrentSource(
Register NewReg,
unsigned NewSubReg)
override {
421 MachineOperand &MO = CopyLike.getOperand(CurrentSrcIdx);
429 const TargetInstrInfo *TII =
nullptr;
430 const TargetRegisterInfo *TRI =
nullptr;
431 MachineRegisterInfo *MRI =
nullptr;
432 MachineDominatorTree *DT =
nullptr;
433 MachineLoopInfo *MLI =
nullptr;
436 PeepholeOptimizer(MachineDominatorTree *DT, MachineLoopInfo *MLI)
437 : DT(DT), MLI(MLI) {}
439 bool run(MachineFunction &MF);
441 using RewriteMapTy = SmallDenseMap<RegSubRegPair, ValueTrackerResult>;
444 using RecurrenceCycle = SmallVector<RecurrenceInstr, 4>;
447 bool optimizeCmpInstr(MachineInstr &
MI, MachineFunction &MF,
448 SmallPtrSet<MachineInstr *, 16> &LocalMIs);
449 bool optimizeExtInstr(MachineInstr &
MI, MachineBasicBlock &
MBB,
450 SmallPtrSetImpl<MachineInstr *> &LocalMIs);
451 bool optimizeSelect(MachineInstr &
MI,
452 SmallPtrSetImpl<MachineInstr *> &LocalMIs);
453 bool optimizeCondBranch(MachineInstr &
MI);
455 bool optimizeCoalescableCopyImpl(
Rewriter &&CpyRewriter);
456 bool optimizeCoalescableCopy(MachineInstr &
MI);
457 bool optimizeUncoalescableCopy(MachineInstr &
MI,
458 SmallPtrSetImpl<MachineInstr *> &LocalMIs);
459 bool optimizeRecurrence(MachineInstr &
PHI);
460 bool findNextSource(
const TargetRegisterClass *DefRC,
unsigned DefSubReg,
462 bool isMoveImmediate(MachineInstr &
MI, SmallSet<Register, 4> &ImmDefRegs,
463 DenseMap<Register, MachineInstr *> &ImmDefMIs);
464 bool foldImmediate(MachineInstr &
MI, SmallSet<Register, 4> &ImmDefRegs,
465 DenseMap<Register, MachineInstr *> &ImmDefMIs,
473 const SmallSet<Register, 2> &TargetReg,
474 RecurrenceCycle &RC);
481 bool foldRedundantCopy(MachineInstr &
MI);
492 foldRedundantNAPhysCopy(MachineInstr &
MI,
493 DenseMap<Register, MachineInstr *> &NAPhysToVirtMIs);
495 bool isLoadFoldable(MachineInstr &
MI,
496 SmallSet<Register, 16> &FoldAsLoadDefCandidates);
501 MachineInstr *foldLoadInto(MachineFunction &MF, MachineInstr &
MI,
503 SmallPtrSet<MachineInstr *, 16> &LocalMIs);
507 static bool isCoalescableCopy(
const MachineInstr &
MI) {
510 return MI.isCopy() ||
512 MI.isExtractSubreg()));
517 static bool isUncoalescableCopy(
const MachineInstr &
MI) {
519 MI.isInsertSubregLike() ||
520 MI.isExtractSubregLike()));
523 MachineInstr &rewriteSource(MachineInstr &CopyLike,
RegSubRegPair Def,
524 RewriteMapTy &RewriteMap);
528 DenseMap<RegSubRegPair, MachineInstr *> CopySrcMIs;
531 void MF_HandleInsertion(MachineInstr &
MI)
override {}
538 unsigned SrcSubReg =
MI.getOperand(1).getSubReg();
539 if (!SrcReg.
isVirtual() && !MRI->isConstantPhysReg(SrcReg))
548 void deleteChangedCopy(MachineInstr &
MI) {
550 if (!getCopySrc(
MI, SrcPair))
553 auto It = CopySrcMIs.find(SrcPair);
554 if (It != CopySrcMIs.end() && It->second == &
MI)
555 CopySrcMIs.erase(It);
558 void MF_HandleRemoval(MachineInstr &
MI)
override { deleteChangedCopy(
MI); }
560 void MF_HandleChangeDesc(MachineInstr &
MI,
const MCInstrDesc &TID)
override {
561 deleteChangedCopy(
MI);
569 PeepholeOptimizerLegacy() : MachineFunctionPass(ID) {}
571 bool runOnMachineFunction(MachineFunction &MF)
override;
573 void getAnalysisUsage(AnalysisUsage &AU)
const override {
584 MachineFunctionProperties getRequiredProperties()
const override {
585 return MachineFunctionProperties().setIsSSA();
595class RecurrenceInstr {
597 using IndexPair = std::pair<unsigned, unsigned>;
599 RecurrenceInstr(MachineInstr *MI) : MI(MI) {}
600 RecurrenceInstr(MachineInstr *MI,
unsigned Idx1,
unsigned Idx2)
601 : MI(MI), CommutePair(std::make_pair(Idx1, Idx2)) {}
603 MachineInstr *getMI()
const {
return MI; }
604 std::optional<IndexPair> getCommutePair()
const {
return CommutePair; }
608 std::optional<IndexPair> CommutePair;
614class ValueTrackerResult {
620 const MachineInstr *Inst =
nullptr;
623 ValueTrackerResult() =
default;
625 ValueTrackerResult(
Register Reg,
unsigned SubReg) { addSource(
Reg, SubReg); }
627 bool isValid()
const {
return getNumSources() > 0; }
629 void setInst(
const MachineInstr *
I) { Inst =
I; }
630 const MachineInstr *getInst()
const {
return Inst; }
637 void addSource(
Register SrcReg,
unsigned SrcSubReg) {
641 void setSource(
int Idx,
Register SrcReg,
unsigned SrcSubReg) {
642 assert(Idx < getNumSources() &&
"Reg pair source out of index");
646 int getNumSources()
const {
return RegSrcs.size(); }
651 assert(Idx < getNumSources() &&
"Reg source out of index");
652 return RegSrcs[Idx].Reg;
655 unsigned getSrcSubReg(
int Idx)
const {
656 assert(Idx < getNumSources() &&
"SubReg source out of index");
657 return RegSrcs[Idx].SubReg;
661 if (
Other.getInst() != getInst())
664 if (
Other.getNumSources() != getNumSources())
667 for (
int i = 0, e =
Other.getNumSources(); i != e; ++i)
668 if (
Other.getSrcReg(i) != getSrcReg(i) ||
669 Other.getSrcSubReg(i) != getSrcSubReg(i))
694 const MachineInstr *Def =
nullptr;
706 const MachineRegisterInfo &MRI;
709 const TargetInstrInfo *TII;
712 ValueTrackerResult getNextSourceImpl();
715 ValueTrackerResult getNextSourceFromCopy();
718 ValueTrackerResult getNextSourceFromBitcast();
721 ValueTrackerResult getNextSourceFromRegSequence();
724 ValueTrackerResult getNextSourceFromInsertSubreg();
727 ValueTrackerResult getNextSourceFromExtractSubreg();
730 ValueTrackerResult getNextSourceFromSubregToReg();
733 ValueTrackerResult getNextSourceFromPHI();
745 ValueTracker(
Register Reg,
unsigned DefSubReg,
const MachineRegisterInfo &MRI,
746 const TargetInstrInfo *TII =
nullptr)
747 : DefSubReg(DefSubReg), Reg(Reg), MRI(MRI), TII(TII) {
748 if (!Reg.isPhysical()) {
749 Def = MRI.getVRegDef(Reg);
750 DefIdx = MRI.def_begin(Reg).getOperandNo();
759 ValueTrackerResult getNextSource();
764char PeepholeOptimizerLegacy::ID = 0;
769 "Peephole Optimizations",
false,
false)
783bool PeepholeOptimizer::optimizeExtInstr(
788 if (!
TII->isCoalescableExtInstr(
MI, SrcReg, DstReg, SubIdx))
801 DstRC =
TRI->getSubClassWithSubReg(DstRC, SubIdx);
811 TRI->getSubClassWithSubReg(MRI->
getRegClass(SrcReg), SubIdx) !=
nullptr;
817 ReachedBBs.insert(UI.getParent());
825 bool ExtendLife =
true;
827 MachineInstr *UseMI = UseMO.getParent();
831 if (UseMI->isPHI()) {
837 if (UseSrcSubIdx && UseMO.getSubReg() != SubIdx)
857 if (
UseMI->getOpcode() == TargetOpcode::SUBREG_TO_REG)
861 if (UseMBB == &
MBB) {
863 if (!LocalMIs.count(
UseMI))
864 Uses.push_back(&UseMO);
865 }
else if (ReachedBBs.count(UseMBB)) {
868 Uses.push_back(&UseMO);
872 ExtendedUses.push_back(&UseMO);
881 if (ExtendLife && !ExtendedUses.empty())
883 Uses.append(ExtendedUses.begin(), ExtendedUses.end());
888 SmallPtrSet<MachineBasicBlock *, 4> PHIBBs;
893 for (MachineInstr &UI : MRI->use_nodbg_instructions(DstReg))
895 PHIBBs.insert(UI.getParent());
897 const TargetRegisterClass *RC = MRI->getRegClass(SrcReg);
898 for (MachineOperand *UseMO : Uses) {
899 MachineInstr *UseMI = UseMO->getParent();
900 MachineBasicBlock *UseMBB = UseMI->getParent();
901 if (PHIBBs.count(UseMBB))
906 MRI->clearKillFlags(DstReg);
907 MRI->constrainRegClass(DstReg, DstRC);
925 RC = MRI->getRegClass(UseMI->getOperand(0).getReg());
927 Register NewVR = MRI->createVirtualRegister(RC);
928 BuildMI(*UseMBB, UseMI, UseMI->getDebugLoc(),
929 TII->get(TargetOpcode::COPY), NewVR)
930 .addReg(DstReg, {}, SubIdx);
934 UseMO->setReg(NewVR);
947bool PeepholeOptimizer::optimizeCmpInstr(
953 int64_t CmpMask, CmpValue;
960 if (!
TII->optimizeCompareInstr(
MI, SrcReg, SrcReg2, CmpMask, CmpValue, MRI))
970 MachineInstr *LoadMI = MRI->
getVRegDef(SrcReg);
973 foldLoadInto(MF, *FlagProducer, SrcReg, LocalMIs);
980bool PeepholeOptimizer::optimizeSelect(
981 MachineInstr &
MI, SmallPtrSetImpl<MachineInstr *> &LocalMIs) {
982 assert(
MI.isSelect() &&
"Should only be called when MI->isSelect() is true");
983 if (!
TII->optimizeSelect(
MI, LocalMIs))
986 MI.eraseFromParent();
992bool PeepholeOptimizer::optimizeCondBranch(MachineInstr &
MI) {
993 return TII->optimizeCondBranch(
MI);
1009bool PeepholeOptimizer::findNextSource(
const TargetRegisterClass *DefRC,
1012 RewriteMapTy &RewriteMap) {
1021 unsigned PHICount = 0;
1028 ValueTracker ValTracker(CurSrcPair.
Reg, CurSrcPair.
SubReg, *MRI,
TII);
1033 ValueTrackerResult Res = ValTracker.getNextSource();
1039 auto [InsertPt, WasInserted] = RewriteMap.try_emplace(CurSrcPair, Res);
1042 const ValueTrackerResult &CurSrcRes = InsertPt->second;
1044 assert(CurSrcRes == Res &&
"ValueTrackerResult found must match");
1047 if (CurSrcRes.getNumSources() > 1) {
1049 <<
"findNextSource: found PHI cycle, aborting...\n");
1057 unsigned NumSrcs = Res.getNumSources();
1065 for (
unsigned i = 0; i < NumSrcs; ++i)
1070 CurSrcPair = Res.getSrc(0);
1079 const TargetRegisterClass *SrcRC = MRI->
getRegClass(CurSrcPair.
Reg);
1080 if (!
TRI->shouldRewriteCopySrc(DefRC, DefSubReg, SrcRC,
1086 if (PHICount > 0 && CurSrcPair.
SubReg != 0)
1092 }
while (!SrcToLook.
empty());
1095 return CurSrcPair.
Reg !=
Reg;
1107 assert(!SrcRegs.
empty() &&
"No sources to create a PHI instruction?");
1112 assert(SrcRegs[0].SubReg == 0 &&
"should not have subreg operand");
1116 TII.get(TargetOpcode::PHI), NewVR);
1118 unsigned MBBOpIdx = 2;
1120 MIB.
addReg(RegPair.Reg, {}, RegPair.SubReg);
1141 const PeepholeOptimizer::RewriteMapTy &RewriteMap,
1142 bool HandleMultipleSources =
true) {
1145 ValueTrackerResult Res = RewriteMap.
lookup(LookupSrc);
1151 unsigned NumSrcs = Res.getNumSources();
1153 LookupSrc.
Reg = Res.getSrcReg(0);
1154 LookupSrc.
SubReg = Res.getSrcSubReg(0);
1159 if (!HandleMultipleSources)
1165 for (
unsigned i = 0; i < NumSrcs; ++i) {
1166 RegSubRegPair PHISrc(Res.getSrcReg(i), Res.getSrcSubReg(i));
1184bool PeepholeOptimizer::optimizeCoalescableCopyImpl(
Rewriter &&CpyRewriter) {
1190 while (CpyRewriter.getNextRewritableSource(TrackPair, Dst)) {
1191 if (Dst.Reg.isPhysical()) {
1199 const TargetRegisterClass *DefRC = MRI->
getRegClass(Dst.Reg);
1202 RewriteMapTy RewriteMap;
1205 if (!findNextSource(DefRC, Dst.SubReg, TrackPair, RewriteMap))
1213 "should not rewrite source to original value");
1222 const TargetRegisterClass *WithSubRC =
1223 TRI->getSubClassWithSubReg(RC, NewSrc.
SubReg);
1230 if (CpyRewriter.RewriteCurrentSource(NewSrc.
Reg, NewSrc.
SubReg)) {
1242 NumRewrittenCopies +=
Changed;
1257bool PeepholeOptimizer::optimizeCoalescableCopy(MachineInstr &
MI) {
1258 assert(isCoalescableCopy(
MI) &&
"Invalid argument");
1259 assert(
MI.getDesc().getNumDefs() == 1 &&
1260 "Coalescer can understand multiple defs?!");
1261 const MachineOperand &MODef =
MI.getOperand(0);
1266 switch (
MI.getOpcode()) {
1267 case TargetOpcode::COPY:
1268 return optimizeCoalescableCopyImpl(CopyRewriter(
MI));
1269 case TargetOpcode::INSERT_SUBREG:
1270 return optimizeCoalescableCopyImpl(InsertSubregRewriter(
MI));
1271 case TargetOpcode::EXTRACT_SUBREG:
1272 return optimizeCoalescableCopyImpl(ExtractSubregRewriter(
MI, *
TII));
1273 case TargetOpcode::REG_SEQUENCE:
1274 return optimizeCoalescableCopyImpl(RegSequenceRewriter(
MI));
1277 if (
MI.isBitcast() ||
MI.isRegSequenceLike() ||
MI.isInsertSubregLike() ||
1278 MI.isExtractSubregLike())
1279 return optimizeCoalescableCopyImpl(UncoalescableRewriter(
MI));
1289MachineInstr &PeepholeOptimizer::rewriteSource(MachineInstr &CopyLike,
1291 RewriteMapTy &RewriteMap) {
1292 assert(!
Def.Reg.isPhysical() &&
"We do not rewrite physical registers");
1302 const TargetRegisterClass *NewSrcRC = MRI->
getRegClass(NewSrc.
Reg);
1303 const TargetRegisterClass *WithSubRC =
1304 TRI->getSubClassWithSubReg(NewSrcRC, NewSrc.
SubReg);
1313 MachineInstr *NewCopy =
1315 TII->get(TargetOpcode::COPY), NewVReg)
1347bool PeepholeOptimizer::optimizeUncoalescableCopy(
1348 MachineInstr &
MI, SmallPtrSetImpl<MachineInstr *> &LocalMIs) {
1349 assert(isUncoalescableCopy(
MI) &&
"Invalid argument");
1350 UncoalescableRewriter CpyRewriter(
MI);
1355 RewriteMapTy RewriteMap;
1359 while (CpyRewriter.getNextRewritableSource(Src, Def)) {
1362 if (
Def.Reg.isPhysical())
1372 if (!findNextSource(DefRC,
Def.SubReg, Def, RewriteMap))
1381 MachineInstr &NewCopy = rewriteSource(
MI, Def, RewriteMap);
1382 LocalMIs.
insert(&NewCopy);
1387 MI.eraseFromParent();
1388 ++NumUncoalescableCopies;
1395bool PeepholeOptimizer::isLoadFoldable(
1396 MachineInstr &
MI, SmallSet<Register, 16> &FoldAsLoadDefCandidates) {
1397 if (!
MI.canFoldAsLoad() || !
MI.mayLoad())
1399 const MCInstrDesc &MCID =
MI.getDesc();
1416PeepholeOptimizer::foldLoadInto(MachineFunction &MF, MachineInstr &
MI,
1418 SmallPtrSet<MachineInstr *, 16> &LocalMIs) {
1420 MachineInstr *
DefMI =
nullptr;
1421 MachineInstr *CopyMI =
nullptr;
1422 MachineInstr *FoldMI =
TII->optimizeLoadInstr(
MI, MRI,
Reg,
DefMI, CopyMI);
1431 if (
MI.shouldUpdateAdditionalCallInfo())
1433 MI.eraseFromParent();
1440bool PeepholeOptimizer::isMoveImmediate(
1441 MachineInstr &
MI, SmallSet<Register, 4> &ImmDefRegs,
1442 DenseMap<Register, MachineInstr *> &ImmDefMIs) {
1443 const MCInstrDesc &MCID =
MI.getDesc();
1444 if (MCID.
getNumDefs() != 1 || !
MI.getOperand(0).isReg())
1451 if (!
MI.isMoveImmediate() && !
TII->getConstValDefinedInReg(
MI,
Reg, ImmVal))
1462bool PeepholeOptimizer::foldImmediate(
1463 MachineInstr &
MI, SmallSet<Register, 4> &ImmDefRegs,
1464 DenseMap<Register, MachineInstr *> &ImmDefMIs,
bool &
Deleted) {
1466 for (
unsigned i = 0, e =
MI.getDesc().getNumOperands(); i != e; ++i) {
1467 MachineOperand &MO =
MI.getOperand(i);
1476 assert(
II != ImmDefMIs.
end() &&
"couldn't find immediate definition");
1477 if (
TII->foldImmediate(
MI, *
II->second,
Reg, MRI)) {
1489 MI.eraseFromParent();
1513bool PeepholeOptimizer::foldRedundantCopy(MachineInstr &
MI) {
1514 assert(
MI.isCopy() &&
"expected a COPY machine instruction");
1517 if (!getCopySrc(
MI, SrcPair))
1524 if (CopySrcMIs.
insert(std::make_pair(SrcPair, &
MI)).second) {
1529 MachineInstr *PrevCopy = CopySrcMIs.
find(SrcPair)->second;
1532 "Unexpected mismatching subreg!");
1550bool PeepholeOptimizer::isNAPhysCopy(
Register Reg) {
1554bool PeepholeOptimizer::foldRedundantNAPhysCopy(
1555 MachineInstr &
MI, DenseMap<Register, MachineInstr *> &NAPhysToVirtMIs) {
1556 assert(
MI.isCopy() &&
"expected a COPY machine instruction");
1563 if (isNAPhysCopy(SrcReg) && DstReg.
isVirtual()) {
1567 NAPhysToVirtMIs.
insert({SrcReg, &
MI});
1571 if (!(SrcReg.
isVirtual() && isNAPhysCopy(DstReg)))
1575 auto PrevCopy = NAPhysToVirtMIs.
find(DstReg);
1576 if (PrevCopy == NAPhysToVirtMIs.
end()) {
1579 LLVM_DEBUG(
dbgs() <<
"NAPhysCopy: intervening clobber forbids erasing "
1585 if (PrevDstReg == SrcReg) {
1598 NAPhysToVirtMIs.
erase(PrevCopy);
1607bool PeepholeOptimizer::findTargetRecurrence(
1608 Register Reg,
const SmallSet<Register, 2> &TargetRegs,
1609 RecurrenceCycle &RC) {
1627 unsigned Idx =
MI.findRegisterUseOperandIdx(
Reg,
nullptr);
1631 if (
MI.getDesc().getNumDefs() != 1)
1634 MachineOperand &DefOp =
MI.getOperand(0);
1641 unsigned TiedUseIdx;
1642 if (!
MI.isRegTiedToUseOperand(0, &TiedUseIdx))
1645 if (Idx == TiedUseIdx) {
1646 RC.push_back(RecurrenceInstr(&
MI));
1647 return findTargetRecurrence(DefOp.
getReg(), TargetRegs, RC);
1651 if (
TII->findCommutedOpIndices(
MI, Idx, CommIdx) && CommIdx == TiedUseIdx) {
1652 RC.push_back(RecurrenceInstr(&
MI, Idx, CommIdx));
1653 return findTargetRecurrence(DefOp.
getReg(), TargetRegs, RC);
1678bool PeepholeOptimizer::optimizeRecurrence(MachineInstr &
PHI) {
1679 SmallSet<Register, 2> TargetRegs;
1680 for (
unsigned Idx = 1; Idx <
PHI.getNumOperands(); Idx += 2) {
1681 MachineOperand &MO =
PHI.getOperand(Idx);
1688 if (findTargetRecurrence(
PHI.getOperand(0).getReg(), TargetRegs, RC)) {
1692 for (
auto &RI : RC) {
1694 auto CP = RI.getCommutePair();
1697 TII->commuteInstruction(*(RI.getMI()),
false, (*CP).first,
1714 PeepholeOptimizer Impl(DT, MLI);
1726bool PeepholeOptimizerLegacy::runOnMachineFunction(
MachineFunction &MF) {
1730 ? &getAnalysis<MachineDominatorTreeWrapperPass>().getDomTree()
1732 auto *MLI = &getAnalysis<MachineLoopInfoWrapperPass>().getLI();
1733 PeepholeOptimizer Impl(DT, MLI);
1734 return Impl.run(MF);
1739 LLVM_DEBUG(
dbgs() <<
"********** PEEPHOLE OPTIMIZER **********\n");
1753 bool SeenMoveImm =
false;
1786 if (
MI->isDebugInstr())
1789 if (
MI->isPosition())
1792 if (IsLoopHeader &&
MI->isPHI()) {
1793 if (optimizeRecurrence(*
MI)) {
1799 if (!
MI->isCopy()) {
1800 for (
const MachineOperand &MO :
MI->operands()) {
1804 if (MO.
isDef() && isNAPhysCopy(
Reg)) {
1806 if (Def != NAPhysToVirtMIs.
end()) {
1810 <<
"NAPhysCopy: invalidating because of " << *
MI);
1811 NAPhysToVirtMIs.
erase(Def);
1816 for (
auto &RegMI : NAPhysToVirtMIs) {
1820 <<
"NAPhysCopy: invalidating because of " << *
MI);
1821 NAPhysToVirtMIs.erase(Def);
1828 if (
MI->isImplicitDef() ||
MI->isKill())
1831 if (
MI->isInlineAsm() ||
MI->hasUnmodeledSideEffects()) {
1838 NAPhysToVirtMIs.clear();
1841 if (
MI->isCompare() && optimizeCmpInstr(*
MI, MF, LocalMIs)) {
1847 if ((isUncoalescableCopy(*
MI) &&
1848 optimizeUncoalescableCopy(*
MI, LocalMIs)) ||
1849 (
MI->isSelect() && optimizeSelect(*
MI, LocalMIs))) {
1856 if (
MI->isConditionalBranch() && optimizeCondBranch(*
MI)) {
1861 if (isCoalescableCopy(*
MI) && optimizeCoalescableCopy(*
MI)) {
1867 if (
MI->isCopy() && (foldRedundantCopy(*
MI) ||
1868 foldRedundantNAPhysCopy(*
MI, NAPhysToVirtMIs))) {
1871 MI->eraseFromParent();
1876 if (isMoveImmediate(*
MI, ImmDefRegs, ImmDefMIs)) {
1898 if (!isLoadFoldable(*
MI, FoldAsLoadDefCandidates) &&
1899 !FoldAsLoadDefCandidates.
empty()) {
1906 const MCInstrDesc &MIDesc =
MI->getDesc();
1907 for (
unsigned i = MIDesc.
getNumDefs(); i !=
MI->getNumOperands(); ++i) {
1908 const MachineOperand &MOp =
MI->getOperand(i);
1912 if (FoldAsLoadDefCandidates.
count(FoldAsLoadDefReg)) {
1915 Register FoldedReg = FoldAsLoadDefReg;
1916 if (MachineInstr *FoldMI =
1917 foldLoadInto(MF, *
MI, FoldAsLoadDefReg, LocalMIs)) {
1918 FoldAsLoadDefCandidates.
erase(FoldedReg);
1930 if (
MI->isLoadFoldBarrier()) {
1932 FoldAsLoadDefCandidates.
clear();
1937 MF.resetDelegate(
this);
1941ValueTrackerResult ValueTracker::getNextSourceFromCopy() {
1942 assert(
Def->isCopy() &&
"Invalid definition");
1947 assert(
Def->getNumOperands() -
Def->getNumImplicitOperands() == 2 &&
1948 "Invalid number of operands");
1949 assert(!
Def->hasImplicitDef() &&
"Only implicit uses are allowed");
1950 assert(!
Def->getOperand(DefIdx).getSubReg() &&
"no subregister defs in SSA");
1953 const MachineOperand &Src =
Def->getOperand(1);
1955 return ValueTrackerResult();
1958 unsigned SubReg = Src.getSubReg();
1961 SubReg =
TRI->composeSubRegIndices(SubReg, DefSubReg);
1965 const TargetRegisterClass *RegRC = MRI.
getRegClass(SrcReg);
1966 if (!
TRI->isSubRegValidForRegClass(RegRC, SubReg))
1967 return ValueTrackerResult();
1969 if (!
TRI->getSubReg(SrcReg, SubReg))
1970 return ValueTrackerResult();
1974 return ValueTrackerResult(SrcReg, SubReg);
1977ValueTrackerResult ValueTracker::getNextSourceFromBitcast() {
1978 assert(
Def->isBitcast() &&
"Invalid definition");
1981 if (
Def->mayRaiseFPException() ||
Def->hasUnmodeledSideEffects())
1982 return ValueTrackerResult();
1985 if (
Def->getDesc().getNumDefs() != 1)
1986 return ValueTrackerResult();
1988 assert(!
Def->getOperand(DefIdx).getSubReg() &&
"no subregister defs in SSA");
1990 unsigned SrcIdx =
Def->getNumOperands();
1991 for (
unsigned OpIdx = DefIdx + 1, EndOpIdx = SrcIdx;
OpIdx != EndOpIdx;
1993 const MachineOperand &MO =
Def->getOperand(
OpIdx);
1999 assert(!MO.
isDef() &&
"We should have skipped all the definitions by now");
2000 if (SrcIdx != EndOpIdx)
2002 return ValueTrackerResult();
2008 if (SrcIdx >=
Def->getNumOperands())
2009 return ValueTrackerResult();
2011 const MachineOperand &DefOp =
Def->getOperand(DefIdx);
2016 if (
UseMI.isSubregToReg())
2017 return ValueTrackerResult();
2020 const MachineOperand &Src =
Def->getOperand(SrcIdx);
2022 return ValueTrackerResult();
2023 return ValueTrackerResult(Src.getReg(), Src.getSubReg());
2026ValueTrackerResult ValueTracker::getNextSourceFromRegSequence() {
2027 assert((
Def->isRegSequence() ||
Def->isRegSequenceLike()) &&
2028 "Invalid definition");
2030 assert(!
Def->getOperand(DefIdx).getSubReg() &&
"illegal subregister def");
2033 if (!
TII->getRegSequenceInputs(*Def, DefIdx, RegSeqInputRegs))
2034 return ValueTrackerResult();
2042 if (RegSeqInput.SubIdx == DefSubReg)
2043 return ValueTrackerResult(RegSeqInput.Reg, RegSeqInput.SubReg);
2051 LaneBitmask DefMask =
TRI->getSubRegIndexLaneMask(DefSubReg);
2052 LaneBitmask ThisOpRegMask =
TRI->getSubRegIndexLaneMask(RegSeqInput.SubIdx);
2058 if ((DefMask & ThisOpRegMask) != DefMask)
2061 unsigned ReverseDefCompose =
2062 TRI->reverseComposeSubRegIndices(RegSeqInput.SubIdx, DefSubReg);
2063 if (!ReverseDefCompose)
2066 unsigned ComposedDefInSrcReg1 =
2067 TRI->composeSubRegIndices(RegSeqInput.SubReg, ReverseDefCompose);
2073 const TargetRegisterClass *SrcRC = MRI.
getRegClass(RegSeqInput.Reg);
2074 if (!
TRI->isSubRegValidForRegClass(SrcRC, ComposedDefInSrcReg1))
2075 return ValueTrackerResult();
2077 return ValueTrackerResult(RegSeqInput.Reg, ComposedDefInSrcReg1);
2083 return ValueTrackerResult();
2086ValueTrackerResult ValueTracker::getNextSourceFromInsertSubreg() {
2087 assert((
Def->isInsertSubreg() ||
Def->isInsertSubregLike()) &&
2088 "Invalid definition");
2089 assert(!
Def->getOperand(DefIdx).getSubReg() &&
"no subreg defs in SSA");
2093 if (!
TII->getInsertSubregInputs(*Def, DefIdx, BaseReg, InsertedReg))
2094 return ValueTrackerResult();
2103 if (InsertedReg.
SubIdx == DefSubReg) {
2104 return ValueTrackerResult(InsertedReg.
Reg, InsertedReg.
SubReg);
2109 const MachineOperand &MODef =
Def->getOperand(DefIdx);
2115 return ValueTrackerResult();
2120 if ((
TRI->getSubRegIndexLaneMask(DefSubReg) &
2121 TRI->getSubRegIndexLaneMask(InsertedReg.
SubIdx))
2123 return ValueTrackerResult();
2126 return ValueTrackerResult(
BaseReg.Reg, DefSubReg);
2129ValueTrackerResult ValueTracker::getNextSourceFromExtractSubreg() {
2130 assert((
Def->isExtractSubreg() ||
Def->isExtractSubregLike()) &&
2131 "Invalid definition");
2138 return ValueTrackerResult();
2141 if (!
TII->getExtractSubregInputs(*Def, DefIdx, ExtractSubregInputReg))
2142 return ValueTrackerResult();
2146 if (ExtractSubregInputReg.
SubReg)
2147 return ValueTrackerResult();
2149 return ValueTrackerResult(ExtractSubregInputReg.
Reg,
2150 ExtractSubregInputReg.
SubIdx);
2153ValueTrackerResult ValueTracker::getNextSourceFromSubregToReg() {
2154 assert(
Def->isSubregToReg() &&
"Invalid definition");
2162 if (DefSubReg !=
Def->getOperand(2).getImm())
2163 return ValueTrackerResult();
2166 if (
Def->getOperand(1).getSubReg())
2167 return ValueTrackerResult();
2169 return ValueTrackerResult(
Def->getOperand(1).getReg(),
2170 Def->getOperand(2).getImm());
2174ValueTrackerResult ValueTracker::getNextSourceFromPHI() {
2175 assert(
Def->isPHI() &&
"Invalid definition");
2176 ValueTrackerResult Res;
2179 for (
unsigned i = 1, e =
Def->getNumOperands(); i < e; i += 2) {
2180 const MachineOperand &MO =
Def->getOperand(i);
2185 return ValueTrackerResult();
2192ValueTrackerResult ValueTracker::getNextSourceImpl() {
2193 assert(Def &&
"This method needs a valid definition");
2195 assert(((
Def->getOperand(DefIdx).isDef() &&
2196 (DefIdx < Def->
getDesc().getNumDefs() ||
2197 Def->getDesc().isVariadic())) ||
2198 Def->getOperand(DefIdx).isImplicit()) &&
2201 return getNextSourceFromCopy();
2202 if (
Def->isBitcast())
2203 return getNextSourceFromBitcast();
2207 return ValueTrackerResult();
2208 if (
Def->isRegSequence() ||
Def->isRegSequenceLike())
2209 return getNextSourceFromRegSequence();
2210 if (
Def->isInsertSubreg() ||
Def->isInsertSubregLike())
2211 return getNextSourceFromInsertSubreg();
2212 if (
Def->isExtractSubreg() ||
Def->isExtractSubregLike())
2213 return getNextSourceFromExtractSubreg();
2214 if (
Def->isSubregToReg())
2215 return getNextSourceFromSubregToReg();
2217 return getNextSourceFromPHI();
2218 return ValueTrackerResult();
2221ValueTrackerResult ValueTracker::getNextSource() {
2225 return ValueTrackerResult();
2227 ValueTrackerResult Res = getNextSourceImpl();
2228 if (Res.isValid()) {
2232 bool OneRegSrc = Res.getNumSources() == 1;
2234 Reg = Res.getSrcReg(0);
2246 DefSubReg = Res.getSrcSubReg(0);
for(const MachineOperand &MO :llvm::drop_begin(OldMI.operands(), Desc.getNumOperands()))
MachineInstrBuilder & UseMI
MachineInstrBuilder MachineInstrBuilder & DefMI
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
This file defines the DenseMap class.
const HexagonInstrInfo * TII
A common definition of LaneBitmask for use in TableGen and CodeGen.
TargetInstrInfo::RegSubRegPair RegSubRegPair
Register const TargetRegisterInfo * TRI
Promote Memory to Register
MachineInstr unsigned OpIdx
uint64_t IntrinsicInst * II
#define INITIALIZE_PASS_DEPENDENCY(depName)
#define INITIALIZE_PASS_END(passName, arg, name, cfg, analysis)
#define INITIALIZE_PASS_BEGIN(passName, arg, name, cfg, analysis)
static cl::opt< unsigned > RewritePHILimit("rewrite-phi-limit", cl::Hidden, cl::init(10), cl::desc("Limit the length of PHI chains to lookup"))
static cl::opt< bool > DisablePeephole("disable-peephole", cl::Hidden, cl::init(false), cl::desc("Disable the peephole optimizer"))
static cl::opt< unsigned > MaxRecurrenceChain("recurrence-chain-limit", cl::Hidden, cl::init(3), cl::desc("Maximum length of recurrence chain when evaluating the benefit " "of commuting operands"))
static cl::opt< bool > DisableNAPhysCopyOpt("disable-non-allocatable-phys-copy-opt", cl::Hidden, cl::init(false), cl::desc("Disable non-allocatable physical register copy optimization"))
static bool isVirtualRegisterOperand(MachineOperand &MO)
\bried Returns true if MO is a virtual register operand.
static MachineInstr & insertPHI(MachineRegisterInfo &MRI, const TargetInstrInfo &TII, const SmallVectorImpl< RegSubRegPair > &SrcRegs, MachineInstr &OrigPHI)
Insert a PHI instruction with incoming edges SrcRegs that are guaranteed to have the same register cl...
static cl::opt< bool > Aggressive("aggressive-ext-opt", cl::Hidden, cl::desc("Aggressive extension optimization"))
static cl::opt< bool > DisableAdvCopyOpt("disable-adv-copy-opt", cl::Hidden, cl::init(false), cl::desc("Disable advanced copy optimization"))
Specifiy whether or not the value tracking looks through complex instructions.
TargetInstrInfo::RegSubRegPairAndIdx RegSubRegPairAndIdx
static RegSubRegPair getNewSource(MachineRegisterInfo *MRI, const TargetInstrInfo *TII, RegSubRegPair Def, const PeepholeOptimizer::RewriteMapTy &RewriteMap, bool HandleMultipleSources=true)
Given a Def.Reg and Def.SubReg pair, use RewriteMap to find the new source to use for rewrite.
Remove Loads Into Fake Uses
static bool isValid(const char C)
Returns true if C is a valid mangled character: <0-9a-zA-Z_>.
This file defines the SmallPtrSet class.
This file defines the SmallSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
Virtual Register Rewriter
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
AnalysisUsage & addRequired()
AnalysisUsage & addPreserved()
Add the specified Pass class to the set of analyses preserved by this pass.
LLVM_ABI void setPreservesCFG()
This function should be called by the pass, iff they do not:
Represents analyses that only rely on functions' control flow.
ValueT lookup(const_arg_type_t< KeyT > Val) const
Return the entry for the specified key, or a default constructed value if no such entry exists.
iterator find(const_arg_type_t< KeyT > Val)
bool erase(const KeyT &Val)
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
bool analyzeCompare(const MachineInstr &MI, Register &SrcReg, Register &SrcReg2, int64_t &Mask, int64_t &Value) const override
For a comparison instruction, return the source registers in SrcReg and SrcReg2 if having two registe...
bool isLoopHeader(const BlockT *BB) const
unsigned getNumDefs() const
Return the number of MachineOperands that are register definitions.
An RAII based helper class to modify MachineFunctionProperties when running pass.
MachineInstrBundleIterator< MachineInstr > iterator
Analysis pass which computes a MachineDominatorTree.
Analysis pass which computes a MachineDominatorTree.
bool dominates(const MachineInstr *A, const MachineInstr *B) const
MachineFunctionPass - This class adapts the FunctionPass interface to allow convenient creation of pa...
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - Subclasses that override getAnalysisUsage must call this.
void moveAdditionalCallInfo(const MachineInstr *Old, const MachineInstr *New)
Move the call site info from Old to \New call site info.
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
StringRef getName() const
getName - Return the name of the corresponding LLVM function.
MachineRegisterInfo & getRegInfo()
getRegInfo - Return information about the registers currently in use.
Function & getFunction()
Return the LLVM function that this machine code represents.
void setDelegate(Delegate *delegate)
Set the delegate.
const MachineInstrBuilder & addReg(Register RegNo, RegState Flags={}, unsigned SubReg=0) const
Add a new virtual register operand.
const MachineInstrBuilder & addMBB(MachineBasicBlock *MBB, unsigned TargetFlags=0) const
Representation of each machine instruction.
const MachineBasicBlock * getParent() const
bool mayLoad(QueryType Type=AnyInBundle) const
Return true if this instruction could possibly read memory.
const DebugLoc & getDebugLoc() const
Returns the debug location id of this MachineInstr.
const MachineOperand & getOperand(unsigned i) const
LLVM_ABI MachineInstrBundleIterator< MachineInstr > eraseFromParent()
Unlink 'this' from the containing basic block and delete it.
bool canFoldAsLoad(QueryType Type=IgnoreBundle) const
Return true for instructions that can be folded as memory operands in other instructions.
Analysis pass that exposes the MachineLoopInfo for a machine function.
MachineOperand class - Representation of each machine instruction operand.
void setSubReg(unsigned subReg)
unsigned getSubReg() const
bool isReg() const
isReg - Tests if this is a MO_Register operand.
bool isRegMask() const
isRegMask - Tests if this is a MO_RegisterMask operand.
MachineBasicBlock * getMBB() const
LLVM_ABI void setReg(Register Reg)
Change the register this operand corresponds to.
MachineInstr * getParent()
getParent - Return the instruction that this operand belongs to.
void setIsUndef(bool Val=true)
Register getReg() const
getReg - Returns the register number.
static bool clobbersPhysReg(const uint32_t *RegMask, MCRegister PhysReg)
clobbersPhysReg - Returns true if this RegMask clobbers PhysReg.
const uint32_t * getRegMask() const
getRegMask - Returns a bit mask of registers preserved by this RegMask operand.
unsigned getOperandNo() const
getOperandNo - Return the operand # of this MachineOperand in its MachineInstr.
MachineRegisterInfo - Keep track of information for virtual and physical registers,...
LLVM_ABI bool hasOneNonDBGUse(Register RegNo) const
hasOneNonDBGUse - Return true if there is exactly one non-Debug use of the specified register.
use_nodbg_iterator use_nodbg_begin(Register RegNo) const
LLVM_ABI void markUsesInDebugValueAsUndef(Register Reg) const
markUsesInDebugValueAsUndef - Mark every DBG_VALUE referencing the specified register as undefined wh...
const TargetRegisterClass * getRegClass(Register Reg) const
Return the register class of the specified virtual register.
LLVM_ABI void clearKillFlags(Register Reg) const
clearKillFlags - Iterate over all the uses of the given register and clear the kill flag from the Mac...
LLVM_ABI MachineInstr * getVRegDef(Register Reg) const
getVRegDef - Return the machine instr that defines the specified virtual register or null if none is ...
iterator_range< use_nodbg_iterator > use_nodbg_operands(Register Reg) const
def_iterator def_begin(Register RegNo) const
LLVM_ABI Register createVirtualRegister(const TargetRegisterClass *RegClass, StringRef Name="")
createVirtualRegister - Create and return a new virtual register in the function with the specified r...
use_instr_nodbg_iterator use_instr_nodbg_begin(Register RegNo) const
LLVM_ABI bool hasOneNonDBGUser(Register RegNo) const
hasOneNonDBGUse - Return true if there is exactly one non-Debug instruction using the specified regis...
bool isAllocatable(MCRegister PhysReg) const
isAllocatable - Returns true when PhysReg belongs to an allocatable register class and it hasn't been...
defusechain_iterator< false, true, false, true, false > def_iterator
def_iterator/def_begin/def_end - Walk all defs of the specified register.
iterator_range< use_instr_nodbg_iterator > use_nodbg_instructions(Register Reg) const
static def_iterator def_end()
const TargetRegisterInfo * getTargetRegisterInfo() const
LLVM_ABI const TargetRegisterClass * constrainRegClass(Register Reg, const TargetRegisterClass *RC, unsigned MinNumRegs=0)
constrainRegClass - Constrain the register class of the specified virtual register to be a common sub...
LLVM_ABI void replaceRegWith(Register FromReg, Register ToReg)
replaceRegWith - Replace all instances of FromReg with ToReg in the machine function.
PreservedAnalyses run(MachineFunction &MF, MachineFunctionAnalysisManager &MFAM)
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
Wrapper class representing virtual and physical registers.
constexpr bool isVirtual() const
Return true if the specified register number is in the virtual register namespace.
constexpr bool isPhysical() const
Return true if the specified register number is in the physical register namespace.
A templated base class for SmallPtrSet which provides the typesafe interface that is common across al...
bool erase(PtrType Ptr)
Remove pointer from the set.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
size_type count(const T &V) const
count - Return 1 if the element is in the set, 0 otherwise.
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
TargetInstrInfo - Interface to description of machine instruction set.
static const unsigned CommuteAnyOperandIndex
virtual const TargetInstrInfo * getInstrInfo() const
virtual const TargetRegisterInfo * getRegisterInfo() const =0
Return the target's register information.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
MCInstrDesc const & getDesc(MCInstrInfo const &MCII, MCInst const &MCI)
initializer< Ty > init(const Ty &Val)
DXILDebugInfoMap run(Module &M)
NodeAddr< DefNode * > Def
BaseReg
Stack frame base register. Bit 0 of FREInfo.Info.
This is an optimization pass for GlobalISel generic memory operations.
MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)
Builder interface. Specify how to create the initial instruction itself.
AnalysisManager< MachineFunction > MachineFunctionAnalysisManager
bool operator==(const AddressRangeValuePair &LHS, const AddressRangeValuePair &RHS)
LLVM_ABI char & PeepholeOptimizerLegacyID
PeepholeOptimizer - This pass performs peephole optimizations - like extension and comparison elimina...
LLVM_ABI PreservedAnalyses getMachineFunctionPassPreservedAnalyses()
Returns the minimum set of Analyses that all machine function passes must preserve.
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
A pair composed of a pair of a register and a sub-register index, and another sub-register index.
A pair composed of a register and a sub-register index.