19#define DEBUG_TYPE "frame-info"
21STATISTIC(NumRedZoneFunctions,
"Number of functions using red zone");
32 return AFI->hasStreamingModeChanges() &&
38 unsigned Opc =
MBBI->getOpcode();
39 if (
Opc == AArch64::CNTD_XPiI)
45 if (
Opc == AArch64::BL)
48 return Opc == TargetOpcode::COPY;
53 switch (
I->getOpcode()) {
56 case AArch64::LD1B_2Z_IMM:
57 case AArch64::ST1B_2Z_IMM:
58 case AArch64::STR_ZXI:
59 case AArch64::LDR_ZXI:
60 case AArch64::PTRUE_C_B:
63 case AArch64::SEH_SaveZReg:
70 switch (
I->getOpcode()) {
73 case AArch64::STR_PXI:
74 case AArch64::LDR_PXI:
77 case AArch64::SEH_SavePReg:
111 if (
Subtarget.isTargetWindows() &&
AFI->getSVECalleeSavedStackSize()) {
112 if (AFI->hasStackHazardSlotIndex())
113 reportFatalUsageError(
"SME hazard padding is not supported on Windows");
114 SVELayout = SVEStackLayout::CalleeSavesAboveFrameRecord;
115 }
else if (
AFI->hasSplitSVEObjects()) {
116 SVELayout = SVEStackLayout::Split;
129 if (
AFL.requiresSaveVG(
MF)) {
130 auto &TLI = *
Subtarget.getTargetLowering();
135 switch (
MBBI->getOpcode()) {
139 NewOpc = AArch64::STPXpre;
142 NewOpc = AArch64::STPDpre;
145 NewOpc = AArch64::STPQpre;
147 case AArch64::STRXui:
148 NewOpc = AArch64::STRXpre;
150 case AArch64::STRDui:
151 NewOpc = AArch64::STRDpre;
153 case AArch64::STRQui:
154 NewOpc = AArch64::STRQpre;
157 NewOpc = AArch64::LDPXpost;
160 NewOpc = AArch64::LDPDpost;
163 NewOpc = AArch64::LDPQpost;
165 case AArch64::LDRXui:
166 NewOpc = AArch64::LDRXpost;
168 case AArch64::LDRDui:
169 NewOpc = AArch64::LDRDpost;
171 case AArch64::LDRQui:
172 NewOpc = AArch64::LDRQpost;
176 int64_t MinOffset, MaxOffset;
178 NewOpc, Scale, Width, MinOffset, MaxOffset);
184 if (
MBBI->getOperand(
MBBI->getNumOperands() - 1).getImm() != 0 ||
185 CSStackSizeInc < MinOffset * (int64_t)Scale.
getFixedValue() ||
186 CSStackSizeInc > MaxOffset * (int64_t)Scale.
getFixedValue()) {
200 return std::prev(
MBBI);
205 auto SEH = std::next(
MBBI);
206 if (AArch64InstrInfo::isSEHInstruction(*SEH))
207 SEH->eraseFromParent();
214 unsigned OpndIdx = 0;
215 for (
unsigned OpndEnd =
MBBI->getNumOperands() - 1; OpndIdx < OpndEnd;
217 MIB.
add(
MBBI->getOperand(OpndIdx));
219 assert(
MBBI->getOperand(OpndIdx).getImm() == 0 &&
220 "Unexpected immediate offset in first/last callee-save save/restore "
222 assert(
MBBI->getOperand(OpndIdx - 1).getReg() == AArch64::SP &&
223 "Unexpected base register in callee-save save/restore instruction!");
224 assert(CSStackSizeInc % Scale == 0);
225 MIB.
addImm(CSStackSizeInc / (
int)Scale);
233 AFL.insertSEH(*MIB, *
TII, FrameFlag);
240 return std::prev(
MBB.erase(
MBBI));
245 unsigned LocalStackSize) {
247 unsigned ImmIdx =
MBBI->getNumOperands() - 1;
248 switch (
MBBI->getOpcode()) {
251 case AArch64::SEH_SaveFPLR:
252 case AArch64::SEH_SaveRegP:
253 case AArch64::SEH_SaveReg:
254 case AArch64::SEH_SaveFRegP:
255 case AArch64::SEH_SaveFReg:
256 case AArch64::SEH_SaveAnyRegI:
257 case AArch64::SEH_SaveAnyRegIP:
258 case AArch64::SEH_SaveAnyRegQP:
259 case AArch64::SEH_SaveAnyRegQPX:
260 ImmOpnd = &
MBBI->getOperand(ImmIdx);
269 if (AArch64InstrInfo::isSEHInstruction(
MI))
272 unsigned Opc =
MI.getOpcode();
276 case AArch64::STRXui:
278 case AArch64::STRDui:
280 case AArch64::LDRXui:
282 case AArch64::LDRDui:
286 case AArch64::STRQui:
288 case AArch64::LDRQui:
295 unsigned OffsetIdx =
MI.getNumExplicitOperands() - 1;
296 assert(
MI.getOperand(OffsetIdx - 1).getReg() == AArch64::SP &&
297 "Unexpected base register in callee-save save/restore instruction!");
301 assert(LocalStackSize % Scale == 0);
302 OffsetOpnd.
setImm(OffsetOpnd.
getImm() + LocalStackSize / Scale);
307 assert(
MBBI !=
MI.getParent()->end() &&
"Expecting a valid instruction");
308 assert(AArch64InstrInfo::isSEHInstruction(*
MBBI) &&
309 "Expecting a SEH instruction");
316 if (
AFL.homogeneousPrologEpilog(
MF))
319 if (
AFI->getLocalStackSize() == 0)
330 if (
AFL.needsWinCFI(
MF) &&
AFI->getCalleeSavedStackSize() > 0 &&
331 MF.getFunction().hasOptSize())
336 if (StackBumpBytes >= 512 ||
337 AFL.windowsRequiresStackProbe(
MF, StackBumpBytes))
340 if (
MFI.hasVarSizedObjects())
349 if (
AFL.canUseRedZone(
MF))
354 if (
AFI->hasSVEStackSize())
368 return {{PPRCalleeSavesSize, PPRLocalsSize},
369 {ZPRCalleeSavesSize, ZPRLocalsSize}};
372 {ZPRCalleeSavesSize, PPRLocalsSize + ZPRLocalsSize}};
381 BeforePPRs =
SVE.PPR.CalleeSavesSize;
383 if (
SVE.ZPR.CalleeSavesSize)
384 AfterPPRs +=
SVE.PPR.LocalsSize +
SVE.ZPR.CalleeSavesSize;
386 AfterZPRs +=
SVE.PPR.LocalsSize;
388 return {BeforePPRs, AfterPPRs, AfterZPRs};
404 IsEpilogue ?
MBB.begin() :
MBB.getFirstTerminator();
405 auto AdjustI = [&](
auto MBBI) {
return IsEpilogue ? std::prev(
MBBI) :
MBBI; };
407 if (PPRCalleeSavesSize) {
408 PPRsI = AdjustI(PPRsI);
411 IsEpilogue ? (--PPRsI) : (++PPRsI);
414 if (ZPRCalleeSavesSize) {
415 ZPRsI = AdjustI(ZPRsI);
418 IsEpilogue ? (--ZPRsI) : (++ZPRsI);
421 return {{PPRsI,
MBBI}, {ZPRsI, PPRsI}};
422 return {{
MBBI, PPRsI}, {PPRsI, ZPRsI}};
430 EmitAsyncCFI =
AFI->needsAsyncDwarfUnwindInfo(
MF);
435 collectBlockLiveins();
452void AArch64PrologueEmitter::collectBlockLiveins() {
455 PrologueEndI =
MBB.begin();
456 while (PrologueEndI !=
MBB.end() &&
460 if (PrologueEndI !=
MBB.end()) {
476void AArch64PrologueEmitter::verifyPrologueClobbers()
const {
477 if (PrologueEndI ==
MBB.end())
480 for (MachineInstr &
MI :
481 make_range(
MBB.instr_begin(), PrologueEndI->getIterator())) {
482 for (
auto &
Op :
MI.operands())
483 if (
Op.isReg() &&
Op.isDef())
484 assert(!LiveRegs.contains(
Op.getReg()) &&
485 "live register clobbered by inserted prologue instructions");
490void AArch64PrologueEmitter::determineLocalsStackSize(
491 uint64_t StackSize, uint64_t PrologueSaveSize) {
492 AFI->setLocalStackSize(StackSize - PrologueSaveSize);
499 static const int64_t MAX_BYTES_PER_SCALABLE_BYTE = 16;
500 return Size.getScalable() * MAX_BYTES_PER_SCALABLE_BYTE +
Size.getFixed();
503void AArch64PrologueEmitter::allocateStackSpace(
505 StackOffset AllocSize,
bool EmitCFI, StackOffset InitialOffset,
506 bool FollowupAllocs) {
513 const uint64_t AndMask = ~(MaxAlign - 1);
516 Register TargetReg = RealignmentPadding
517 ?
AFL.findScratchNonCalleeSaveRegister(&
MBB)
524 if (RealignmentPadding) {
545 if (AllocSize.getScalable() == 0 && RealignmentPadding == 0) {
547 assert(ScratchReg != AArch64::NoRegister);
550 .
addImm(AllocSize.getFixed())
551 .
addImm(InitialOffset.getFixed())
552 .
addImm(InitialOffset.getScalable());
557 if (FollowupAllocs) {
573 int64_t ProbeSize =
AFI->getStackProbeSize();
574 if (
upperBound(AllocSize) + RealignmentPadding <= ProbeSize) {
575 Register ScratchReg = RealignmentPadding
576 ?
AFL.findScratchNonCalleeSaveRegister(&
MBB)
578 assert(ScratchReg != AArch64::NoRegister);
583 if (RealignmentPadding) {
589 AFI->setStackRealigned(
true);
591 if (FollowupAllocs ||
upperBound(AllocSize) + RealignmentPadding >
607 assert(TargetReg != AArch64::NoRegister);
612 if (RealignmentPadding) {
625 .buildDefCFARegister(AArch64::SP);
627 if (RealignmentPadding)
628 AFI->setStackRealigned(
true);
638 AFI->setHasRedZone(
false);
648 if (
AFI->getArgumentStackToRestore())
651 if (
AFI->shouldSignReturnAddress(
MF)) {
654 if (!
AFL.shouldSignReturnAddressEverywhere(
MF)) {
662 if (
AFI->needsShadowCallStackPrologueEpilogue(
MF)) {
663 emitShadowCallStackPrologue(PrologueBeginI,
DL);
675 if (
HasFP &&
AFI->hasSwiftAsyncContext())
676 emitSwiftAsyncContextFramePointer(PrologueBeginI,
DL);
685 if (std::optional<int> TBPI =
AFI->getTaggedBasePointerIndex())
686 AFI->setTaggedBasePointerOffset(-
MFI.getObjectOffset(*TBPI));
688 AFI->setTaggedBasePointerOffset(
MFI.getStackSize());
698 if (!
AFI->hasStackFrame() && !
AFL.windowsRequiresStackProbe(
MF, NumBytes))
699 return emitEmptyStackFramePrologue(NumBytes, PrologueBeginI,
DL);
701 bool IsWin64 =
Subtarget.isCallingConvWin64(F.getCallingConv(), F.isVarArg());
704 auto PrologueSaveSize =
AFI->getCalleeSavedStackSize() + FixedObject;
706 determineLocalsStackSize(NumBytes, PrologueSaveSize);
714 "unexpected SVE allocs after PPRs with CalleeSavesAboveFrameRecord");
721 allocateStackSpace(PrologueBeginI, 0, SaveSize,
false,
StackOffset{},
723 NumBytes -= FixedObject;
730 MBBI,
DL, -
AFI->getCalleeSavedStackSize(), EmitAsyncCFI);
731 NumBytes -=
AFI->getCalleeSavedStackSize();
732 }
else if (CombineSPBump) {
733 assert(!
AFL.getSVEStackSize(
MF) &&
"Cannot combine SP bump with SVE");
741 NumBytes -= PrologueSaveSize;
742 }
else if (PrologueSaveSize != 0) {
744 PrologueBeginI,
DL, -PrologueSaveSize, EmitAsyncCFI);
745 NumBytes -= PrologueSaveSize;
747 assert(NumBytes >= 0 &&
"Negative stack allocation size!?");
752 auto &TLI = *
Subtarget.getTargetLowering();
755 while (AfterGPRSavesI != EndI &&
762 AFI->getLocalStackSize());
769 emitFramePointerSetup(AfterGPRSavesI,
DL, FixedObject);
775 emitCalleeSavedGPRLocations(AfterGPRSavesI);
778 const bool NeedsRealignment =
780 const int64_t RealignmentPadding =
781 (NeedsRealignment &&
MFI.getMaxAlign() >
Align(16))
782 ?
MFI.getMaxAlign().value() - 16
785 if (
AFL.windowsRequiresStackProbe(
MF, NumBytes + RealignmentPadding))
786 emitWindowsStackProbe(AfterGPRSavesI,
DL, NumBytes, RealignmentPadding);
796 auto [PPRRange, ZPRRange] =
798 ZPR.CalleeSavesSize,
false);
799 AfterSVESavesI = ZPRRange.End;
801 emitCalleeSavedSVELocations(AfterSVESavesI);
803 allocateStackSpace(PPRRange.Begin, 0, SVEAllocs.
BeforePPRs,
804 EmitAsyncCFI && !
HasFP, CFAOffset,
808 assert(PPRRange.End == ZPRRange.Begin &&
809 "Expected ZPR callee saves after PPR locals");
810 allocateStackSpace(PPRRange.End, 0, SVEAllocs.
AfterPPRs,
811 EmitAsyncCFI && !
HasFP, CFAOffset,
824 assert(!(
AFL.canUseRedZone(
MF) && NeedsRealignment) &&
825 "Cannot use redzone with stack realignment");
826 if (!
AFL.canUseRedZone(
MF)) {
830 allocateStackSpace(AfterSVESavesI, RealignmentPadding, SVEAllocs.
AfterZPRs,
831 EmitAsyncCFI && !
HasFP, CFAOffset,
832 MFI.hasVarSizedObjects());
871 MBB.addLiveIn(AArch64::X1);
875 if (
EmitCFI && !EmitAsyncCFI) {
877 emitDefineCFAWithFP(AfterSVESavesI, FixedObject);
880 AFL.getSVEStackSize(
MF) +
887 emitCalleeSavedGPRLocations(AfterSVESavesI);
888 emitCalleeSavedSVELocations(AfterSVESavesI);
892void AArch64PrologueEmitter::emitShadowCallStackPrologue(
903 MBB.addLiveIn(AArch64::X18);
912 static const char CFIInst[] = {
913 dwarf::DW_CFA_val_expression,
916 static_cast<char>(
unsigned(dwarf::DW_OP_breg18)),
917 static_cast<char>(-8) & 0x7f,
920 .buildEscape(StringRef(CFIInst,
sizeof(CFIInst)));
924void AArch64PrologueEmitter::emitSwiftAsyncContextFramePointer(
926 switch (
MF.getTarget().Options.SwiftAsyncFramePointer) {
928 if (
Subtarget.swiftAsyncContextIsDynamicallySet()) {
970void AArch64PrologueEmitter::emitEmptyStackFramePrologue(
973 assert(!
HasFP &&
"unexpected function without stack frame but with FP");
975 "unexpected function without stack frame but with SVE objects");
977 AFI->setLocalStackSize(NumBytes);
987 if (
AFL.canUseRedZone(
MF)) {
988 AFI->setHasRedZone(
true);
989 ++NumRedZoneFunctions;
996 MCSymbol *FrameLabel =
MF.getContext().createTempSymbol();
999 .buildDefCFAOffset(NumBytes, FrameLabel);
1010void AArch64PrologueEmitter::emitFramePointerSetup(
1012 unsigned FixedObject) {
1013 int64_t FPOffset =
AFI->getCalleeSaveBaseToFrameRecordOffset();
1015 FPOffset +=
AFI->getLocalStackSize();
1017 if (
AFI->hasSwiftAsyncContext()) {
1021 const auto &
Attrs =
MF.getFunction().getAttributes();
1022 bool HaveInitialContext =
Attrs.hasAttrSomewhere(Attribute::SwiftAsync);
1023 if (HaveInitialContext)
1024 MBB.addLiveIn(AArch64::X22);
1025 Register Reg = HaveInitialContext ? AArch64::X22 : AArch64::XZR;
1063 emitDefineCFAWithFP(
MBBI, FixedObject);
1067void AArch64PrologueEmitter::emitDefineCFAWithFP(
1069 const int OffsetToFirstCalleeSaveFromFP =
1070 AFI->getCalleeSaveBaseToFrameRecordOffset() -
1071 AFI->getCalleeSavedStackSize();
1074 .buildDefCFA(
FramePtr, FixedObject - OffsetToFirstCalleeSaveFromFP);
1077void AArch64PrologueEmitter::emitWindowsStackProbe(
1079 int64_t RealignmentPadding)
const {
1080 if (
AFI->getSVECalleeSavedStackSize())
1085 unsigned X15Scratch = AArch64::NoRegister;
1087 [
this](
const MachineBasicBlock::RegisterMaskPair &LiveIn) {
1088 return RegInfo.isSuperOrSubRegisterEq(AArch64::X15,
1091 X15Scratch =
AFL.findScratchNonCalleeSaveRegister(&
MBB,
true);
1092 assert(X15Scratch != AArch64::NoRegister &&
1093 (X15Scratch < AArch64::X15 || X15Scratch > AArch64::X17));
1095 LiveRegs.removeReg(AArch64::X15);
1104 uint64_t NumWords = (NumBytes + RealignmentPadding) >> 4;
1112 if (NumBytes >= (1 << 28))
1114 "unwinding purposes");
1116 uint32_t LowNumWords = NumWords & 0xFFFF;
1123 if ((NumWords & 0xFFFF0000) != 0) {
1126 .
addImm((NumWords & 0xFFFF0000) >> 16)
1138 const char *ChkStk =
Subtarget.getChkStkName();
1139 switch (
MF.getTarget().getCodeModel()) {
1203 if (RealignmentPadding > 0) {
1204 if (RealignmentPadding >= 4096) {
1207 .
addImm(RealignmentPadding)
1217 .
addImm(RealignmentPadding)
1222 uint64_t AndMask = ~(
MFI.getMaxAlign().value() - 1);
1226 AFI->setStackRealigned(
true);
1232 if (X15Scratch != AArch64::NoRegister) {
1241void AArch64PrologueEmitter::emitCalleeSavedGPRLocations(
1243 const std::vector<CalleeSavedInfo> &CSI =
MFI.getCalleeSavedInfo();
1248 for (
const auto &
Info : CSI) {
1249 unsigned FrameIdx =
Info.getFrameIdx();
1250 if (
MFI.hasScalableStackID(FrameIdx))
1253 assert(!
Info.isSpilledToReg() &&
"Spilling to registers not implemented");
1254 int64_t
Offset =
MFI.getObjectOffset(FrameIdx) -
AFL.getOffsetOfLocalArea();
1255 CFIBuilder.buildOffset(
Info.getReg(),
Offset);
1259void AArch64PrologueEmitter::emitCalleeSavedSVELocations(
1262 const std::vector<CalleeSavedInfo> &CSI =
MFI.getCalleeSavedInfo();
1268 std::optional<int64_t> IncomingVGOffsetFromDefCFA;
1269 if (
AFL.requiresSaveVG(
MF)) {
1271 reverse(CSI), [](
auto &
Info) {
return Info.getReg() == AArch64::VG; });
1272 IncomingVGOffsetFromDefCFA =
MFI.getObjectOffset(IncomingVG.getFrameIdx()) -
1273 AFL.getOffsetOfLocalArea();
1276 StackOffset PPRStackSize =
AFL.getPPRStackSize(
MF);
1277 for (
const auto &
Info : CSI) {
1278 int FI =
Info.getFrameIdx();
1279 if (!
MFI.hasScalableStackID(FI))
1284 assert(!
Info.isSpilledToReg() &&
"Spilling to registers not implemented");
1285 MCRegister
Reg =
Info.getReg();
1299 CFIBuilder.insertCFIInst(
1305 switch (
MI.getOpcode()) {
1308 case AArch64::CATCHRET:
1309 case AArch64::CLEANUPRET:
1320 SEHEpilogueStartI =
MBB.end();
1327 "expected negative offset (with optional fixed portion)");
1329 if (int64_t FixedOffset =
Offset.getFixed()) {
1345 if (
MBB.end() != EpilogueEndI) {
1346 DL = EpilogueEndI->getDebugLoc();
1360 int64_t ArgumentStackToRestore =
AFL.getArgumentStackToRestore(
MF,
MBB);
1361 bool IsWin64 =
Subtarget.isCallingConvWin64(
MF.getFunction().getCallingConv(),
1362 MF.getFunction().isVarArg());
1365 int64_t AfterCSRPopSize = ArgumentStackToRestore;
1366 auto PrologueSaveSize =
AFI->getCalleeSavedStackSize() + FixedObject;
1371 if (
MF.hasEHFunclets())
1372 AFI->setLocalStackSize(NumBytes - PrologueSaveSize);
1376 auto FirstHomogenousEpilogI =
MBB.getFirstTerminator();
1377 if (FirstHomogenousEpilogI !=
MBB.begin()) {
1378 auto HomogeneousEpilog = std::prev(FirstHomogenousEpilogI);
1379 if (HomogeneousEpilog->getOpcode() == AArch64::HOM_Epilog)
1380 FirstHomogenousEpilogI = HomogeneousEpilog;
1390 assert(AfterCSRPopSize == 0);
1394 bool CombineSPBump = shouldCombineCSRLocalStackBump(NumBytes);
1396 unsigned ProloguePopSize = PrologueSaveSize;
1402 ProloguePopSize -= FixedObject;
1403 AfterCSRPopSize += FixedObject;
1407 if (!CombineSPBump && ProloguePopSize != 0) {
1409 while (Pop->getOpcode() == TargetOpcode::CFI_INSTRUCTION ||
1410 AArch64InstrInfo::isSEHInstruction(*Pop) ||
1413 Pop = std::prev(Pop);
1420 if (
OffsetOp.getImm() == 0 && AfterCSRPopSize >= 0) {
1426 if (AArch64InstrInfo::isSEHInstruction(*AfterLastPop))
1440 AfterCSRPopSize += ProloguePopSize;
1449 while (FirstGPRRestoreI != Begin) {
1456 }
else if (CombineSPBump)
1458 AFI->getLocalStackSize());
1468 BuildMI(
MBB, FirstGPRRestoreI, DL,
TII->get(AArch64::SEH_EpilogStart))
1470 SEHEpilogueStartI = FirstGPRRestoreI;
1471 --SEHEpilogueStartI;
1481 ?
MBB.getFirstTerminator()
1483 PPR.CalleeSavesSize, ZPR.CalleeSavesSize,
true);
1485 if (
HasFP &&
AFI->hasSwiftAsyncContext())
1486 emitSwiftAsyncContextFramePointer(EpilogueEndI, DL);
1489 if (CombineSPBump) {
1490 assert(!
AFI->hasSVEStackSize() &&
"Cannot combine SP bump with SVE");
1504 NumBytes -= PrologueSaveSize;
1505 assert(NumBytes >= 0 &&
"Negative stack allocation size!?");
1507 StackOffset SVECalleeSavesSize = ZPR.CalleeSavesSize + PPR.CalleeSavesSize;
1513 "unexpected SVE allocs after PPRs with CalleeSavesAboveFrameRecord");
1517 if (!
AFI->isStackRealigned() && !
MFI.hasVarSizedObjects()) {
1527 }
else if (
AFI->hasSVEStackSize()) {
1532 (
AFI->isStackRealigned() ||
MFI.hasVarSizedObjects()) ? AArch64::FP
1534 if (SVECalleeSavesSize && BaseForSVEDealloc == AArch64::FP) {
1538 -SVECalleeSavesSize - PPR.LocalsSize -
1542 moveSPBelowFP(ZPRRange.Begin, FPOffsetZPR);
1550 assert(!FPOffsetPPR.
getFixed() &&
"expected only scalable offset");
1554 }
else if (BaseForSVEDealloc == AArch64::SP) {
1563 NumBytes -= NonSVELocals.getFixed();
1571 assert(PPRRange.Begin == ZPRRange.End &&
1572 "Expected PPR restores after ZPR");
1585 emitCalleeSavedSVERestores(
1590 bool RedZone =
AFL.canUseRedZone(
MF);
1593 if (RedZone && AfterCSRPopSize == 0)
1600 bool NoCalleeSaveRestore = PrologueSaveSize == 0;
1601 int64_t StackRestoreBytes = RedZone ? 0 : NumBytes;
1602 if (NoCalleeSaveRestore)
1603 StackRestoreBytes += AfterCSRPopSize;
1606 MBB, FirstGPRRestoreI, DL, AArch64::SP, AArch64::SP,
1613 if (NoCalleeSaveRestore || AfterCSRPopSize == 0)
1623 if (!
IsFunclet && (
MFI.hasVarSizedObjects() ||
AFI->isStackRealigned())) {
1625 MBB, FirstGPRRestoreI, DL, AArch64::SP, AArch64::FP,
1628 }
else if (NumBytes)
1641 if (AfterCSRPopSize) {
1642 assert(AfterCSRPopSize > 0 &&
"attempting to reallocate arg stack that an "
1643 "interrupt may have clobbered");
1646 MBB,
MBB.getFirstTerminator(), DL, AArch64::SP, AArch64::SP,
1653bool AArch64EpilogueEmitter::shouldCombineCSRLocalStackBump(
1665 while (LastI != Begin) {
1667 if (LastI->isTransient())
1672 switch (LastI->getOpcode()) {
1673 case AArch64::STGloop:
1674 case AArch64::STZGloop:
1676 case AArch64::STZGi:
1677 case AArch64::ST2Gi:
1678 case AArch64::STZ2Gi:
1686void AArch64EpilogueEmitter::emitSwiftAsyncContextFramePointer(
1688 switch (
MF.getTarget().Options.SwiftAsyncFramePointer) {
1716void AArch64EpilogueEmitter::emitShadowCallStackEpilogue(
1730 if (
AFI->needsAsyncDwarfUnwindInfo(
MF))
1732 .buildRestore(AArch64::X18);
1735void AArch64EpilogueEmitter::emitCalleeSavedRestores(
1737 const std::vector<CalleeSavedInfo> &CSI =
MFI.getCalleeSavedInfo();
1743 for (
const auto &
Info : CSI) {
1744 if (
SVE !=
MFI.hasScalableStackID(
Info.getFrameIdx()))
1747 MCRegister
Reg =
Info.getReg();
1751 CFIBuilder.buildRestore(
Info.getReg());
1755void AArch64EpilogueEmitter::finalizeEpilogue()
const {
1756 if (
AFI->needsShadowCallStackPrologueEpilogue(
MF)) {
1757 emitShadowCallStackEpilogue(
MBB.getFirstTerminator(), DL);
1761 emitCalleeSavedGPRRestores(
MBB.getFirstTerminator());
1762 if (
AFI->shouldSignReturnAddress(
MF)) {
1765 if (!
AFL.shouldSignReturnAddressEverywhere(
MF)) {
1767 TII->get(AArch64::PAUTH_EPILOGUE))
1774 BuildMI(
MBB,
MBB.getFirstTerminator(), DL,
TII->get(AArch64::SEH_EpilogEnd))
1776 if (!
MF.hasWinCFI())
1777 MF.setHasWinCFI(
true);
1782 MBB.erase(SEHEpilogueStartI);
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
This file contains the declaration of the AArch64PrologueEmitter and AArch64EpilogueEmitter classes,...
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
MachineBasicBlock MachineBasicBlock::iterator MBBI
Analysis containing CSE Info
This file contains constants used for implementing Dwarf debug support.
const HexagonInstrInfo * TII
std::pair< Instruction::BinaryOps, Value * > OffsetOp
Find all possible pairs (BinOp, RHS) that BinOp V, RHS can be simplified.
Register const TargetRegisterInfo * TRI
Promote Memory to Register
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static Function * getFunction(FunctionType *Ty, const Twine &Name, Module *M)
static const unsigned FramePtr
void emitEpilogue()
Emit the epilogue.
AArch64EpilogueEmitter(MachineFunction &MF, MachineBasicBlock &MBB, const AArch64FrameLowering &AFL)
AArch64FunctionInfo - This class is derived from MachineFunctionInfo and contains private AArch64-spe...
void setStackRealigned(bool s)
void emitPrologue()
Emit the prologue.
AArch64PrologueEmitter(MachineFunction &MF, MachineBasicBlock &MBB, const AArch64FrameLowering &AFL)
const MachineFrameInfo & MFI
AArch64FunctionInfo * AFI
MachineBasicBlock::iterator convertCalleeSaveRestoreToSPPrePostIncDec(MachineBasicBlock::iterator MBBI, const DebugLoc &DL, int CSStackSizeInc, bool EmitCFI, MachineInstr::MIFlag FrameFlag=MachineInstr::FrameSetup, int CFAOffset=0) const
SVEFrameSizes getSVEStackFrameSizes() const
bool isVGInstruction(MachineBasicBlock::iterator MBBI, const TargetLowering &TLI) const
AArch64PrologueEpilogueCommon(MachineFunction &MF, MachineBasicBlock &MBB, const AArch64FrameLowering &AFL)
const AArch64RegisterInfo & RegInfo
const AArch64FrameLowering & AFL
@ CalleeSavesAboveFrameRecord
void fixupCalleeSaveRestoreStackOffset(MachineInstr &MI, uint64_t LocalStackSize) const
bool shouldCombineCSRLocalStackBump(uint64_t StackBumpBytes) const
const AArch64Subtarget & Subtarget
SVEStackAllocations getSVEStackAllocations(SVEFrameSizes const &)
bool requiresGetVGCall() const
const TargetInstrInfo * TII
const AArch64TargetLowering * getTargetLowering() const override
bool hasInlineStackProbe(const MachineFunction &MF) const override
True if stack clash protection is enabled for this functions.
Helper class for creating CFI instructions and inserting them into MIR.
void buildDefCFAOffset(int64_t Offset, MCSymbol *Label=nullptr) const
void insertCFIInst(const MCCFIInstruction &CFIInst) const
void buildDefCFA(MCRegister Reg, int64_t Offset) const
A set of physical registers with utility functions to track liveness when walking backward/forward th...
MachineInstrBundleIterator< MachineInstr > iterator
Align getMaxAlign() const
Return the alignment in bytes that this function must be aligned to, which is greater than the defaul...
MachineRegisterInfo & getRegInfo()
getRegInfo - Return information about the registers currently in use.
const MachineInstrBuilder & setMemRefs(ArrayRef< MachineMemOperand * > MMOs) const
const MachineInstrBuilder & addExternalSymbol(const char *FnName, unsigned TargetFlags=0) const
const MachineInstrBuilder & setMIFlag(MachineInstr::MIFlag Flag) const
const MachineInstrBuilder & addImm(int64_t Val) const
Add a new immediate operand.
const MachineInstrBuilder & add(const MachineOperand &MO) const
const MachineInstrBuilder & addReg(Register RegNo, unsigned flags=0, unsigned SubReg=0) const
Add a new virtual register operand.
const MachineInstrBuilder & addUse(Register RegNo, unsigned Flags=0, unsigned SubReg=0) const
Add a virtual register use operand.
const MachineInstrBuilder & setMIFlags(unsigned Flags) const
const MachineInstrBuilder & addDef(Register RegNo, unsigned Flags=0, unsigned SubReg=0) const
Add a virtual register definition operand.
Representation of each machine instruction.
MachineOperand class - Representation of each machine instruction operand.
void setImm(int64_t immVal)
bool isSymbol() const
isSymbol - Tests if this is a MO_ExternalSymbol operand.
static MachineOperand CreateImm(int64_t Val)
const char * getSymbolName() const
LLVM_ABI Register createVirtualRegister(const TargetRegisterClass *RegClass, StringRef Name="")
createVirtualRegister - Create and return a new virtual register in the function with the specified r...
Wrapper class representing virtual and physical registers.
StackOffset holds a fixed and a scalable offset in bytes.
int64_t getFixed() const
Returns the fixed component of the stack.
int64_t getScalable() const
Returns the scalable component of the stack.
static StackOffset getFixed(int64_t Fixed)
StringRef - Represent a constant reference to a string, i.e.
const char * getLibcallName(RTLIB::Libcall Call) const
Get the libcall routine name for the specified libcall.
This class defines information used to lower LLVM code to legal SelectionDAG operators that the targe...
TargetRegisterInfo base class - We assume that the target defines a static array of TargetRegisterDes...
static constexpr TypeSize getFixed(ScalarTy ExactSize)
constexpr ScalarTy getFixedValue() const
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
@ MO_GOT
MO_GOT - This flag indicates that a symbol operand represents the address of the GOT entry for the sy...
static unsigned getArithExtendImm(AArch64_AM::ShiftExtendType ET, unsigned Imm)
getArithExtendImm - Encode the extend type and shift amount for an arithmetic instruction: imm: 3-bit...
static uint64_t encodeLogicalImmediate(uint64_t imm, unsigned regSize)
encodeLogicalImmediate - Return the encoded immediate value for a logical immediate instruction of th...
static unsigned getShifterImm(AArch64_AM::ShiftExtendType ST, unsigned Imm)
getShifterImm - Encode the shift type and amount: imm: 6-bit shift amount shifter: 000 ==> lsl 001 ==...
const unsigned StackProbeMaxUnprobedStack
Maximum allowed number of unprobed bytes above SP at an ABI boundary.
constexpr char Attrs[]
Key for Kernel::Metadata::mAttrs.
@ GHC
Used by the Glasgow Haskell Compiler (GHC).
@ Implicit
Not emitted register (e.g. carry, or temporary result).
@ Define
Register definition.
@ Kill
The last use of a register.
@ Undef
Value of the register doesn't matter.
This is an optimization pass for GlobalISel generic memory operations.
MCCFIInstruction createDefCFA(const TargetRegisterInfo &TRI, unsigned FrameReg, unsigned Reg, const StackOffset &Offset, bool LastAdjustmentWasScalable=true)
MachineInstrBuilder BuildMI(MachineFunction &MF, const MIMetadata &MIMD, const MCInstrDesc &MCID)
Builder interface. Specify how to create the initial instruction itself.
iterator_range< T > make_range(T x, T y)
Convenience function for iterating over sub-ranges.
unsigned getBLRCallOpcode(const MachineFunction &MF)
Return opcode to be used for indirect calls.
static bool matchLibcall(const TargetLowering &TLI, const MachineOperand &MO, RTLIB::Libcall LC)
static bool isPartOfSVECalleeSaves(MachineBasicBlock::iterator I)
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
static bool isFuncletReturnInstr(const MachineInstr &MI)
auto reverse(ContainerTy &&C)
static void getLivePhysRegsUpTo(MachineInstr &MI, const TargetRegisterInfo &TRI, LivePhysRegs &LiveRegs)
Collect live registers from the end of MI's parent up to (including) MI in LiveRegs.
@ Always
Always set the bit.
@ Never
Never set the bit.
@ DeploymentBased
Determine whether to set the bit statically or dynamically based on the deployment target.
void emitFrameOffset(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, const DebugLoc &DL, unsigned DestReg, unsigned SrcReg, StackOffset Offset, const TargetInstrInfo *TII, MachineInstr::MIFlag=MachineInstr::NoFlags, bool SetNZCV=false, bool NeedsWinCFI=false, bool *HasWinCFI=nullptr, bool EmitCFAOffset=false, StackOffset InitialOffset={}, unsigned FrameReg=AArch64::SP)
emitFrameOffset - Emit instructions as needed to set DestReg to SrcReg plus Offset.
LLVM_ABI void report_fatal_error(Error Err, bool gen_crash_diag=true)
LLVM_ABI EHPersonality classifyEHPersonality(const Value *Pers)
See if the given exception handling personality function is one that we understand.
static bool isPartOfPPRCalleeSaves(MachineBasicBlock::iterator I)
@ Success
The lock was released successfully.
static void fixupSEHOpcode(MachineBasicBlock::iterator MBBI, unsigned LocalStackSize)
MCCFIInstruction createCFAOffset(const TargetRegisterInfo &MRI, unsigned Reg, const StackOffset &OffsetFromDefCFA, std::optional< int64_t > IncomingVGOffsetFromDefCFA)
DWARFExpression::Operation Op
bool isAsynchronousEHPersonality(EHPersonality Pers)
Returns true if this personality function catches asynchronous exceptions.
static SVEPartitions partitionSVECS(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, StackOffset PPRCalleeSavesSize, StackOffset ZPRCalleeSavesSize, bool IsEpilogue)
auto find_if(R &&Range, UnaryPredicate P)
Provide wrappers to std::find_if which take ranges instead of having to pass begin/end explicitly.
static bool isPartOfZPRCalleeSaves(MachineBasicBlock::iterator I)
static int64_t upperBound(StackOffset Size)
This struct is a compact representation of a valid (non-zero power of two) alignment.
constexpr uint64_t value() const
This is a hole in the type system and should not be abused.
MachineBasicBlock::iterator End
struct llvm::SVEPartitions::@327166152017175235362202041204223104077330276266 ZPR
struct llvm::SVEPartitions::@327166152017175235362202041204223104077330276266 PPR
MachineBasicBlock::iterator Begin
StackOffset totalSize() const