LLVM 22.0.0git
StackProtector.cpp
Go to the documentation of this file.
1//===- StackProtector.cpp - Stack Protector Insertion ---------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This pass inserts stack protectors into functions which need them. A variable
10// with a random value in it is stored onto the stack before the local variables
11// are allocated. Upon exiting the block, the stored value is checked. If it's
12// changed, then there was some sort of violation and the program aborts.
13//
14//===----------------------------------------------------------------------===//
15
18#include "llvm/ADT/Statistic.h"
23#include "llvm/CodeGen/Passes.h"
27#include "llvm/IR/Attributes.h"
28#include "llvm/IR/BasicBlock.h"
29#include "llvm/IR/Constants.h"
30#include "llvm/IR/DataLayout.h"
32#include "llvm/IR/Dominators.h"
34#include "llvm/IR/Function.h"
35#include "llvm/IR/IRBuilder.h"
36#include "llvm/IR/Instruction.h"
39#include "llvm/IR/Intrinsics.h"
40#include "llvm/IR/MDBuilder.h"
41#include "llvm/IR/Module.h"
42#include "llvm/IR/Type.h"
43#include "llvm/IR/User.h"
45#include "llvm/Pass.h"
51#include <optional>
52
53using namespace llvm;
54
55#define DEBUG_TYPE "stack-protector"
56
57STATISTIC(NumFunProtected, "Number of functions protected");
58STATISTIC(NumAddrTaken, "Number of local variables that have their address"
59 " taken.");
60
61static cl::opt<bool> EnableSelectionDAGSP("enable-selectiondag-sp",
62 cl::init(true), cl::Hidden);
63static cl::opt<bool> DisableCheckNoReturn("disable-check-noreturn-call",
64 cl::init(false), cl::Hidden);
65
66/// InsertStackProtectors - Insert code into the prologue and epilogue of the
67/// function.
68///
69/// - The prologue code loads and stores the stack guard onto the stack.
70/// - The epilogue checks the value stored in the prologue against the original
71/// value. It calls __stack_chk_fail if they differ.
72static bool InsertStackProtectors(const TargetMachine *TM, Function *F,
73 DomTreeUpdater *DTU, bool &HasPrologue,
74 bool &HasIRCheck);
75
76/// CreateFailBB - Create a basic block to jump to when the stack protector
77/// check fails.
78static BasicBlock *CreateFailBB(Function *F, const TargetLowering &TLI);
79
81 return HasPrologue && !HasIRCheck && isa<ReturnInst>(BB.getTerminator());
82}
83
85 if (Layout.empty())
86 return;
87
88 for (int I = 0, E = MFI.getObjectIndexEnd(); I != E; ++I) {
89 if (MFI.isDeadObjectIndex(I))
90 continue;
91
92 const AllocaInst *AI = MFI.getObjectAllocation(I);
93 if (!AI)
94 continue;
95
96 SSPLayoutMap::const_iterator LI = Layout.find(AI);
97 if (LI == Layout.end())
98 continue;
99
100 MFI.setObjectSSPLayout(I, LI->second);
101 }
102}
103
106
107 SSPLayoutInfo Info;
108 Info.RequireStackProtector =
110 Info.SSPBufferSize = F.getFnAttributeAsParsedInteger(
111 "stack-protector-buffer-size", SSPLayoutInfo::DefaultSSPBufferSize);
112 return Info;
113}
114
115AnalysisKey SSPLayoutAnalysis::Key;
116
119 auto &Info = FAM.getResult<SSPLayoutAnalysis>(F);
120 auto *DT = FAM.getCachedResult<DominatorTreeAnalysis>(F);
121 DomTreeUpdater DTU(DT, DomTreeUpdater::UpdateStrategy::Lazy);
122
123 if (!Info.RequireStackProtector)
124 return PreservedAnalyses::all();
125
126 // TODO(etienneb): Functions with funclets are not correctly supported now.
127 // Do nothing if this is funclet-based personality.
128 if (F.hasPersonalityFn()) {
129 EHPersonality Personality = classifyEHPersonality(F.getPersonalityFn());
130 if (isFuncletEHPersonality(Personality))
131 return PreservedAnalyses::all();
132 }
133
134 ++NumFunProtected;
135 bool Changed = InsertStackProtectors(TM, &F, DT ? &DTU : nullptr,
136 Info.HasPrologue, Info.HasIRCheck);
137#ifdef EXPENSIVE_CHECKS
138 assert((!DT ||
139 DTU.getDomTree().verify(DominatorTree::VerificationLevel::Full)) &&
140 "Failed to maintain validity of domtree!");
141#endif
142
143 if (!Changed)
144 return PreservedAnalyses::all();
148 return PA;
149}
150
151char StackProtector::ID = 0;
152
156
158 "Insert stack protectors", false, true)
162 "Insert stack protectors", false, true)
163
165
170
172 F = &Fn;
173 M = F->getParent();
175 DTU.emplace(DTWP->getDomTree(), DomTreeUpdater::UpdateStrategy::Lazy);
177 LayoutInfo.HasPrologue = false;
178 LayoutInfo.HasIRCheck = false;
179
180 LayoutInfo.SSPBufferSize = Fn.getFnAttributeAsParsedInteger(
181 "stack-protector-buffer-size", SSPLayoutInfo::DefaultSSPBufferSize);
182 if (!requiresStackProtector(F, &LayoutInfo.Layout))
183 return false;
184
185 // TODO(etienneb): Functions with funclets are not correctly supported now.
186 // Do nothing if this is funclet-based personality.
187 if (Fn.hasPersonalityFn()) {
189 if (isFuncletEHPersonality(Personality))
190 return false;
191 }
192
193 ++NumFunProtected;
194 bool Changed =
195 InsertStackProtectors(TM, F, DTU ? &*DTU : nullptr,
196 LayoutInfo.HasPrologue, LayoutInfo.HasIRCheck);
197#ifdef EXPENSIVE_CHECKS
198 assert((!DTU ||
199 DTU->getDomTree().verify(DominatorTree::VerificationLevel::Full)) &&
200 "Failed to maintain validity of domtree!");
201#endif
202 DTU.reset();
203 return Changed;
204}
205
206/// \param [out] IsLarge is set to true if a protectable array is found and
207/// it is "large" ( >= ssp-buffer-size). In the case of a structure with
208/// multiple arrays, this gets set if any of them is large.
209static bool ContainsProtectableArray(Type *Ty, Module *M, unsigned SSPBufferSize,
210 bool &IsLarge, bool Strong,
211 bool InStruct) {
212 if (!Ty)
213 return false;
214 if (ArrayType *AT = dyn_cast<ArrayType>(Ty)) {
215 if (!AT->getElementType()->isIntegerTy(8)) {
216 // If we're on a non-Darwin platform or we're inside of a structure, don't
217 // add stack protectors unless the array is a character array.
218 // However, in strong mode any array, regardless of type and size,
219 // triggers a protector.
220 if (!Strong && (InStruct || !M->getTargetTriple().isOSDarwin()))
221 return false;
222 }
223
224 // If an array has more than SSPBufferSize bytes of allocated space, then we
225 // emit stack protectors.
226 if (SSPBufferSize <= M->getDataLayout().getTypeAllocSize(AT)) {
227 IsLarge = true;
228 return true;
229 }
230
231 if (Strong)
232 // Require a protector for all arrays in strong mode
233 return true;
234 }
235
236 const StructType *ST = dyn_cast<StructType>(Ty);
237 if (!ST)
238 return false;
239
240 bool NeedsProtector = false;
241 for (Type *ET : ST->elements())
242 if (ContainsProtectableArray(ET, M, SSPBufferSize, IsLarge, Strong, true)) {
243 // If the element is a protectable array and is large (>= SSPBufferSize)
244 // then we are done. If the protectable array is not large, then
245 // keep looking in case a subsequent element is a large array.
246 if (IsLarge)
247 return true;
248 NeedsProtector = true;
249 }
250
251 return NeedsProtector;
252}
253
254/// Maximum remaining allocation size observed for a phi node, and how often
255/// the allocation size has already been decreased. We only allow a limited
256/// number of decreases.
257struct PhiInfo {
259 unsigned NumDecreased = 0;
260 static constexpr unsigned MaxNumDecreased = 3;
262};
264
265/// Check whether a stack allocation has its address taken.
266static bool HasAddressTaken(const Instruction *AI, TypeSize AllocSize,
267 Module *M,
268 PhiMap &VisitedPHIs) {
269 const DataLayout &DL = M->getDataLayout();
270 for (const User *U : AI->users()) {
271 const auto *I = cast<Instruction>(U);
272 // If this instruction accesses memory make sure it doesn't access beyond
273 // the bounds of the allocated object.
274 std::optional<MemoryLocation> MemLoc = MemoryLocation::getOrNone(I);
275 if (MemLoc && MemLoc->Size.hasValue() &&
276 !TypeSize::isKnownGE(AllocSize, MemLoc->Size.getValue()))
277 return true;
278 switch (I->getOpcode()) {
279 case Instruction::Store:
280 if (AI == cast<StoreInst>(I)->getValueOperand())
281 return true;
282 break;
283 case Instruction::AtomicCmpXchg:
284 // cmpxchg conceptually includes both a load and store from the same
285 // location. So, like store, the value being stored is what matters.
286 if (AI == cast<AtomicCmpXchgInst>(I)->getNewValOperand())
287 return true;
288 break;
289 case Instruction::AtomicRMW:
290 if (AI == cast<AtomicRMWInst>(I)->getValOperand())
291 return true;
292 break;
293 case Instruction::PtrToInt:
294 if (AI == cast<PtrToIntInst>(I)->getOperand(0))
295 return true;
296 break;
297 case Instruction::Call: {
298 // Ignore intrinsics that do not become real instructions.
299 // TODO: Narrow this to intrinsics that have store-like effects.
300 const auto *CI = cast<CallInst>(I);
301 if (!CI->isDebugOrPseudoInst() && !CI->isLifetimeStartOrEnd())
302 return true;
303 break;
304 }
305 case Instruction::Invoke:
306 return true;
307 case Instruction::GetElementPtr: {
308 // If the GEP offset is out-of-bounds, or is non-constant and so has to be
309 // assumed to be potentially out-of-bounds, then any memory access that
310 // would use it could also be out-of-bounds meaning stack protection is
311 // required.
313 unsigned IndexSize = DL.getIndexTypeSizeInBits(I->getType());
314 APInt Offset(IndexSize, 0);
315 if (!GEP->accumulateConstantOffset(DL, Offset))
316 return true;
317 TypeSize OffsetSize = TypeSize::getFixed(Offset.getLimitedValue());
318 if (!TypeSize::isKnownGT(AllocSize, OffsetSize))
319 return true;
320 // Adjust AllocSize to be the space remaining after this offset.
321 // We can't subtract a fixed size from a scalable one, so in that case
322 // assume the scalable value is of minimum size.
323 TypeSize NewAllocSize =
324 TypeSize::getFixed(AllocSize.getKnownMinValue()) - OffsetSize;
325 if (HasAddressTaken(I, NewAllocSize, M, VisitedPHIs))
326 return true;
327 break;
328 }
329 case Instruction::BitCast:
330 case Instruction::Select:
331 case Instruction::AddrSpaceCast:
332 if (HasAddressTaken(I, AllocSize, M, VisitedPHIs))
333 return true;
334 break;
335 case Instruction::PHI: {
336 // Keep track of what PHI nodes we have already visited to ensure
337 // they are only visited once.
338 const auto *PN = cast<PHINode>(I);
339 auto [It, Inserted] = VisitedPHIs.try_emplace(PN, AllocSize);
340 if (!Inserted) {
341 if (TypeSize::isKnownGE(AllocSize, It->second.AllocSize))
342 break;
343
344 // Check again with smaller size.
345 if (It->second.NumDecreased == PhiInfo::MaxNumDecreased)
346 return true;
347
348 It->second.AllocSize = AllocSize;
349 ++It->second.NumDecreased;
350 }
351 if (HasAddressTaken(PN, AllocSize, M, VisitedPHIs))
352 return true;
353 break;
354 }
355 case Instruction::Load:
356 case Instruction::Ret:
357 // These instructions take an address operand, but have load-like or
358 // other innocuous behavior that should not trigger a stack protector.
359 break;
360 default:
361 // Conservatively return true for any instruction that takes an address
362 // operand, but is not handled above.
363 return true;
364 }
365 }
366 return false;
367}
368
369/// Search for the first call to the llvm.stackprotector intrinsic and return it
370/// if present.
372 for (const BasicBlock &BB : F)
373 for (const Instruction &I : BB)
374 if (const auto *II = dyn_cast<IntrinsicInst>(&I))
375 if (II->getIntrinsicID() == Intrinsic::stackprotector)
376 return II;
377 return nullptr;
378}
379
380/// Check whether or not this function needs a stack protector based
381/// upon the stack protector level.
382///
383/// We use two heuristics: a standard (ssp) and strong (sspstrong).
384/// The standard heuristic which will add a guard variable to functions that
385/// call alloca with a either a variable size or a size >= SSPBufferSize,
386/// functions with character buffers larger than SSPBufferSize, and functions
387/// with aggregates containing character buffers larger than SSPBufferSize. The
388/// strong heuristic will add a guard variables to functions that call alloca
389/// regardless of size, functions with any buffer regardless of type and size,
390/// functions with aggregates that contain any buffer regardless of type and
391/// size, and functions that contain stack-based variables that have had their
392/// address taken.
394 SSPLayoutMap *Layout) {
395 Module *M = F->getParent();
396 bool Strong = false;
397 bool NeedsProtector = false;
398
399 // The set of PHI nodes visited when determining if a variable's reference has
400 // been taken. This set is maintained to ensure we don't visit the same PHI
401 // node multiple times.
402 PhiMap VisitedPHIs;
403
404 unsigned SSPBufferSize = F->getFnAttributeAsParsedInteger(
405 "stack-protector-buffer-size", SSPLayoutInfo::DefaultSSPBufferSize);
406
407 if (F->hasFnAttribute(Attribute::SafeStack))
408 return false;
409
410 // We are constructing the OptimizationRemarkEmitter on the fly rather than
411 // using the analysis pass to avoid building DominatorTree and LoopInfo which
412 // are not available this late in the IR pipeline.
414
415 if (F->hasFnAttribute(Attribute::StackProtectReq)) {
416 if (!Layout)
417 return true;
418 ORE.emit([&]() {
419 return OptimizationRemark(DEBUG_TYPE, "StackProtectorRequested", F)
420 << "Stack protection applied to function "
421 << ore::NV("Function", F)
422 << " due to a function attribute or command-line switch";
423 });
424 NeedsProtector = true;
425 Strong = true; // Use the same heuristic as strong to determine SSPLayout
426 } else if (F->hasFnAttribute(Attribute::StackProtectStrong))
427 Strong = true;
428 else if (!F->hasFnAttribute(Attribute::StackProtect))
429 return false;
430
431 for (const BasicBlock &BB : *F) {
432 for (const Instruction &I : BB) {
433 if (const AllocaInst *AI = dyn_cast<AllocaInst>(&I)) {
434 if (const MDNode *MD = AI->getMetadata("stack-protector")) {
435 const auto *CI = mdconst::dyn_extract<ConstantInt>(MD->getOperand(0));
436 if (CI->isZero())
437 continue;
438 }
439 if (AI->isArrayAllocation()) {
440 auto RemarkBuilder = [&]() {
441 return OptimizationRemark(DEBUG_TYPE, "StackProtectorAllocaOrArray",
442 &I)
443 << "Stack protection applied to function "
444 << ore::NV("Function", F)
445 << " due to a call to alloca or use of a variable length "
446 "array";
447 };
448 if (const auto *CI = dyn_cast<ConstantInt>(AI->getArraySize())) {
449 if (CI->getLimitedValue(SSPBufferSize) >= SSPBufferSize) {
450 // A call to alloca with size >= SSPBufferSize requires
451 // stack protectors.
452 if (!Layout)
453 return true;
454 Layout->insert(
455 std::make_pair(AI, MachineFrameInfo::SSPLK_LargeArray));
456 ORE.emit(RemarkBuilder);
457 NeedsProtector = true;
458 } else if (Strong) {
459 // Require protectors for all alloca calls in strong mode.
460 if (!Layout)
461 return true;
462 Layout->insert(
463 std::make_pair(AI, MachineFrameInfo::SSPLK_SmallArray));
464 ORE.emit(RemarkBuilder);
465 NeedsProtector = true;
466 }
467 } else {
468 // A call to alloca with a variable size requires protectors.
469 if (!Layout)
470 return true;
471 Layout->insert(
472 std::make_pair(AI, MachineFrameInfo::SSPLK_LargeArray));
473 ORE.emit(RemarkBuilder);
474 NeedsProtector = true;
475 }
476 continue;
477 }
478
479 bool IsLarge = false;
480 if (ContainsProtectableArray(AI->getAllocatedType(), M, SSPBufferSize,
481 IsLarge, Strong, false)) {
482 if (!Layout)
483 return true;
484 Layout->insert(std::make_pair(
487 ORE.emit([&]() {
488 return OptimizationRemark(DEBUG_TYPE, "StackProtectorBuffer", &I)
489 << "Stack protection applied to function "
490 << ore::NV("Function", F)
491 << " due to a stack allocated buffer or struct containing a "
492 "buffer";
493 });
494 NeedsProtector = true;
495 continue;
496 }
497
498 if (Strong &&
500 AI, M->getDataLayout().getTypeAllocSize(AI->getAllocatedType()),
501 M, VisitedPHIs)) {
502 ++NumAddrTaken;
503 if (!Layout)
504 return true;
505 Layout->insert(std::make_pair(AI, MachineFrameInfo::SSPLK_AddrOf));
506 ORE.emit([&]() {
507 return OptimizationRemark(DEBUG_TYPE, "StackProtectorAddressTaken",
508 &I)
509 << "Stack protection applied to function "
510 << ore::NV("Function", F)
511 << " due to the address of a local variable being taken";
512 });
513 NeedsProtector = true;
514 }
515 // Clear any PHIs that we visited, to make sure we examine all uses of
516 // any subsequent allocas that we look at.
517 VisitedPHIs.clear();
518 }
519 }
520 }
521
522 return NeedsProtector;
523}
524
525/// Create a stack guard loading and populate whether SelectionDAG SSP is
526/// supported.
528 IRBuilder<> &B,
529 bool *SupportsSelectionDAGSP = nullptr) {
530 Value *Guard = TLI->getIRStackGuard(B);
531 StringRef GuardMode = M->getStackProtectorGuard();
532 if ((GuardMode == "tls" || GuardMode.empty()) && Guard)
533 return B.CreateLoad(B.getPtrTy(), Guard, true, "StackGuard");
534
535 // Use SelectionDAG SSP handling, since there isn't an IR guard.
536 //
537 // This is more or less weird, since we optionally output whether we
538 // should perform a SelectionDAG SP here. The reason is that it's strictly
539 // defined as !TLI->getIRStackGuard(B), where getIRStackGuard is also
540 // mutating. There is no way to get this bit without mutating the IR, so
541 // getting this bit has to happen in this right time.
542 //
543 // We could have define a new function TLI::supportsSelectionDAGSP(), but that
544 // will put more burden on the backends' overriding work, especially when it
545 // actually conveys the same information getIRStackGuard() already gives.
546 if (SupportsSelectionDAGSP)
547 *SupportsSelectionDAGSP = true;
548 TLI->insertSSPDeclarations(*M);
549 return B.CreateIntrinsic(Intrinsic::stackguard, {});
550}
551
552/// Insert code into the entry block that stores the stack guard
553/// variable onto the stack:
554///
555/// entry:
556/// StackGuardSlot = alloca i8*
557/// StackGuard = <stack guard>
558/// call void @llvm.stackprotector(StackGuard, StackGuardSlot)
559///
560/// Returns true if the platform/triple supports the stackprotectorcreate pseudo
561/// node.
562static bool CreatePrologue(Function *F, Module *M, Instruction *CheckLoc,
563 const TargetLoweringBase *TLI, AllocaInst *&AI) {
564 bool SupportsSelectionDAGSP = false;
565 IRBuilder<> B(&F->getEntryBlock().front());
566 PointerType *PtrTy = PointerType::getUnqual(CheckLoc->getContext());
567 AI = B.CreateAlloca(PtrTy, nullptr, "StackGuardSlot");
568
569 Value *GuardSlot = getStackGuard(TLI, M, B, &SupportsSelectionDAGSP);
570 B.CreateIntrinsic(Intrinsic::stackprotector, {GuardSlot, AI});
571 return SupportsSelectionDAGSP;
572}
573
575 DomTreeUpdater *DTU, bool &HasPrologue,
576 bool &HasIRCheck) {
577 auto *M = F->getParent();
578 auto *TLI = TM->getSubtargetImpl(*F)->getTargetLowering();
579
580 // If the target wants to XOR the frame pointer into the guard value, it's
581 // impossible to emit the check in IR, so the target *must* support stack
582 // protection in SDAG.
583 bool SupportsSelectionDAGSP =
584 TLI->useStackGuardMixCookie() ||
586 AllocaInst *AI = nullptr; // Place on stack that stores the stack guard.
587 BasicBlock *FailBB = nullptr;
588
590 // This is stack protector auto generated check BB, skip it.
591 if (&BB == FailBB)
592 continue;
593 Instruction *CheckLoc = dyn_cast<ReturnInst>(BB.getTerminator());
594 if (!CheckLoc && !DisableCheckNoReturn)
595 for (auto &Inst : BB) {
596 if (IntrinsicInst *IB = dyn_cast<IntrinsicInst>(&Inst);
597 IB && (IB->getIntrinsicID() == Intrinsic::eh_sjlj_callsite)) {
598 // eh_sjlj_callsite has to be in same BB as the
599 // bb terminator. Don't insert within this range.
600 CheckLoc = IB;
601 break;
602 }
603 if (auto *CB = dyn_cast<CallBase>(&Inst))
604 // Do stack check before noreturn calls that aren't nounwind (e.g:
605 // __cxa_throw).
606 if (CB->doesNotReturn() && !CB->doesNotThrow()) {
607 CheckLoc = CB;
608 break;
609 }
610 }
611
612 if (!CheckLoc)
613 continue;
614
615 // Generate prologue instrumentation if not already generated.
616 if (!HasPrologue) {
617 HasPrologue = true;
618 SupportsSelectionDAGSP &= CreatePrologue(F, M, CheckLoc, TLI, AI);
619 }
620
621 // SelectionDAG based code generation. Nothing else needs to be done here.
622 // The epilogue instrumentation is postponed to SelectionDAG.
623 if (SupportsSelectionDAGSP)
624 break;
625
626 // Find the stack guard slot if the prologue was not created by this pass
627 // itself via a previous call to CreatePrologue().
628 if (!AI) {
629 const CallInst *SPCall = findStackProtectorIntrinsic(*F);
630 assert(SPCall && "Call to llvm.stackprotector is missing");
631 AI = cast<AllocaInst>(SPCall->getArgOperand(1));
632 }
633
634 // Set HasIRCheck to true, so that SelectionDAG will not generate its own
635 // version. SelectionDAG called 'shouldEmitSDCheck' to check whether
636 // instrumentation has already been generated.
637 HasIRCheck = true;
638
639 // If we're instrumenting a block with a tail call, the check has to be
640 // inserted before the call rather than between it and the return.
641 Instruction *Prev = CheckLoc->getPrevNode();
642 if (auto *CI = dyn_cast_if_present<CallInst>(Prev))
643 if (CI->isTailCall() && isInTailCallPosition(*CI, *TM))
644 CheckLoc = Prev;
645
646 // Generate epilogue instrumentation. The epilogue intrumentation can be
647 // function-based or inlined depending on which mechanism the target is
648 // providing.
649 if (Function *GuardCheck = TLI->getSSPStackGuardCheck(*M)) {
650 // Generate the function-based epilogue instrumentation.
651 // The target provides a guard check function, generate a call to it.
652 IRBuilder<> B(CheckLoc);
653 LoadInst *Guard = B.CreateLoad(B.getPtrTy(), AI, true, "Guard");
654 CallInst *Call = B.CreateCall(GuardCheck, {Guard});
655 Call->setAttributes(GuardCheck->getAttributes());
656 Call->setCallingConv(GuardCheck->getCallingConv());
657 } else {
658 // Generate the epilogue with inline instrumentation.
659 // If we do not support SelectionDAG based calls, generate IR level
660 // calls.
661 //
662 // For each block with a return instruction, convert this:
663 //
664 // return:
665 // ...
666 // ret ...
667 //
668 // into this:
669 //
670 // return:
671 // ...
672 // %1 = <stack guard>
673 // %2 = load StackGuardSlot
674 // %3 = icmp ne i1 %1, %2
675 // br i1 %3, label %CallStackCheckFailBlk, label %SP_return
676 //
677 // SP_return:
678 // ret ...
679 //
680 // CallStackCheckFailBlk:
681 // call void @__stack_chk_fail()
682 // unreachable
683
684 // Create the FailBB. We duplicate the BB every time since the MI tail
685 // merge pass will merge together all of the various BB into one including
686 // fail BB generated by the stack protector pseudo instruction.
687 if (!FailBB)
688 FailBB = CreateFailBB(F, *TLI);
689
690 IRBuilder<> B(CheckLoc);
691 Value *Guard = getStackGuard(TLI, M, B);
692 LoadInst *LI2 = B.CreateLoad(B.getPtrTy(), AI, true);
693 auto *Cmp = cast<ICmpInst>(B.CreateICmpNE(Guard, LI2));
694 auto SuccessProb =
696 auto FailureProb =
698 MDNode *Weights = MDBuilder(F->getContext())
699 .createBranchWeights(FailureProb.getNumerator(),
700 SuccessProb.getNumerator());
701
702 SplitBlockAndInsertIfThen(Cmp, CheckLoc,
703 /*Unreachable=*/false, Weights, DTU,
704 /*LI=*/nullptr, /*ThenBlock=*/FailBB);
705
706 auto *BI = cast<BranchInst>(Cmp->getParent()->getTerminator());
707 BasicBlock *NewBB = BI->getSuccessor(1);
708 NewBB->setName("SP_return");
709 NewBB->moveAfter(&BB);
710
711 Cmp->setPredicate(Cmp->getInversePredicate());
712 BI->swapSuccessors();
713 }
714 }
715
716 // Return if we didn't modify any basic blocks. i.e., there are no return
717 // statements in the function.
718 return HasPrologue;
719}
720
722 auto *M = F->getParent();
723 LLVMContext &Context = F->getContext();
724 BasicBlock *FailBB = BasicBlock::Create(Context, "CallStackCheckFailBlk", F);
725 IRBuilder<> B(FailBB);
726 if (F->getSubprogram())
727 B.SetCurrentDebugLocation(
728 DILocation::get(Context, 0, 0, F->getSubprogram()));
729 FunctionCallee StackChkFail;
731
732 if (const char *ChkFailName =
733 TLI.getLibcallName(RTLIB::STACKPROTECTOR_CHECK_FAIL)) {
734 StackChkFail =
735 M->getOrInsertFunction(ChkFailName, Type::getVoidTy(Context));
736 } else if (const char *SSHName =
737 TLI.getLibcallName(RTLIB::STACK_SMASH_HANDLER)) {
738 StackChkFail = M->getOrInsertFunction(SSHName, Type::getVoidTy(Context),
739 PointerType::getUnqual(Context));
740 Args.push_back(B.CreateGlobalString(F->getName(), "SSH"));
741 } else {
742 Context.emitError("no libcall available for stack protector");
743 }
744
745 if (StackChkFail) {
746 CallInst *Call = B.CreateCall(StackChkFail, Args);
747 Call->addFnAttr(Attribute::NoReturn);
748 }
749
750 B.CreateUnreachable();
751 return FailBB;
752}
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
This file contains the simple types necessary to represent the attributes associated with functions a...
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
This file contains the declarations for the subclasses of Constant, which represent the different fla...
#define DEBUG_TYPE
Hexagon Common GEP
Module.h This file contains the declarations for the Module class.
#define F(x, y, z)
Definition MD5.cpp:54
#define I(x, y, z)
Definition MD5.cpp:57
This file provides utility analysis objects describing memory locations.
uint64_t IntrinsicInst * II
FunctionAnalysisManager FAM
#define INITIALIZE_PASS_DEPENDENCY(depName)
Definition PassSupport.h:42
#define INITIALIZE_PASS_END(passName, arg, name, cfg, analysis)
Definition PassSupport.h:44
#define INITIALIZE_PASS_BEGIN(passName, arg, name, cfg, analysis)
Definition PassSupport.h:39
This file defines the SmallVector class.
static Value * getStackGuard(const TargetLoweringBase *TLI, Module *M, IRBuilder<> &B, bool *SupportsSelectionDAGSP=nullptr)
Create a stack guard loading and populate whether SelectionDAG SSP is supported.
static BasicBlock * CreateFailBB(Function *F, const TargetLowering &TLI)
CreateFailBB - Create a basic block to jump to when the stack protector check fails.
static bool InsertStackProtectors(const TargetMachine *TM, Function *F, DomTreeUpdater *DTU, bool &HasPrologue, bool &HasIRCheck)
InsertStackProtectors - Insert code into the prologue and epilogue of the function.
static bool HasAddressTaken(const Instruction *AI, TypeSize AllocSize, Module *M, PhiMap &VisitedPHIs)
Check whether a stack allocation has its address taken.
static cl::opt< bool > DisableCheckNoReturn("disable-check-noreturn-call", cl::init(false), cl::Hidden)
static bool CreatePrologue(Function *F, Module *M, Instruction *CheckLoc, const TargetLoweringBase *TLI, AllocaInst *&AI)
Insert code into the entry block that stores the stack guard variable onto the stack:
SmallDenseMap< const PHINode *, PhiInfo, 16 > PhiMap
static bool ContainsProtectableArray(Type *Ty, Module *M, unsigned SSPBufferSize, bool &IsLarge, bool Strong, bool InStruct)
static cl::opt< bool > EnableSelectionDAGSP("enable-selectiondag-sp", cl::init(true), cl::Hidden)
static const CallInst * findStackProtectorIntrinsic(Function &F)
Search for the first call to the llvm.stackprotector intrinsic and return it if present.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
Definition Statistic.h:171
This file describes how to lower LLVM code to machine code.
Target-Independent Code Generator Pass Configuration Options pass.
Class for arbitrary precision integers.
Definition APInt.h:78
an instruction to allocate memory on the stack
Represent the analysis usage information of a pass.
AnalysisUsage & addRequired()
AnalysisUsage & addPreserved()
Add the specified Pass class to the set of analyses preserved by this pass.
LLVM Basic Block Representation.
Definition BasicBlock.h:62
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
Definition BasicBlock.h:206
LLVM_ABI void moveAfter(BasicBlock *MovePos)
Unlink this basic block from its current function and insert it right after MovePos in the function M...
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
Definition BasicBlock.h:233
static BranchProbability getBranchProbStackProtector(bool IsLikely)
Value * getArgOperand(unsigned i) const
This class represents a function call, abstracting a target machine's calling convention.
A parsed version of the target data layout string in and methods for querying it.
Definition DataLayout.h:64
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
Definition DenseMap.h:256
DenseMapIterator< KeyT, ValueT, KeyInfoT, BucketT, true > const_iterator
Definition DenseMap.h:75
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Definition DenseMap.h:241
Analysis pass which computes a DominatorTree.
Definition Dominators.h:283
Legacy analysis pass which computes a DominatorTree.
Definition Dominators.h:321
A handy container for a FunctionType+Callee-pointer pair, which can be passed around as a single enti...
FunctionPass class - This class is used to implement most global optimizations.
Definition Pass.h:314
FunctionPass(char &pid)
Definition Pass.h:316
uint64_t getFnAttributeAsParsedInteger(StringRef Kind, uint64_t Default=0) const
For a string attribute Kind, parse attribute as an integer.
Definition Function.cpp:777
bool hasPersonalityFn() const
Check whether this function has a personality function.
Definition Function.h:903
Constant * getPersonalityFn() const
Get the personality function associated with this function.
an instruction for type-safe pointer arithmetic to access elements of arrays and structs
Module * getParent()
Get the module that this global value is contained inside of...
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
Definition IRBuilder.h:2788
A wrapper class for inspecting calls to intrinsic functions.
This is an important class for using LLVM in a threaded context.
Definition LLVMContext.h:68
An instruction for reading from memory.
LLVM_ABI MDNode * createBranchWeights(uint32_t TrueWeight, uint32_t FalseWeight, bool IsExpected=false)
Return metadata containing two branch weights.
Definition MDBuilder.cpp:38
Metadata node.
Definition Metadata.h:1078
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
Definition Metadata.h:1569
The MachineFrameInfo class represents an abstract stack frame until prolog/epilog code is inserted.
const AllocaInst * getObjectAllocation(int ObjectIdx) const
Return the underlying Alloca of the specified stack object if it exists.
@ SSPLK_SmallArray
Array or nested array < SSP-buffer-size.
@ SSPLK_LargeArray
Array or nested array >= SSP-buffer-size.
@ SSPLK_AddrOf
The address of this allocation is exposed and triggered protection.
void setObjectSSPLayout(int ObjectIdx, SSPLayoutKind Kind)
int getObjectIndexEnd() const
Return one past the maximum frame object index.
bool isDeadObjectIndex(int ObjectIdx) const
Returns true if the specified index corresponds to a dead object.
static LLVM_ABI std::optional< MemoryLocation > getOrNone(const Instruction *Inst)
A Module instance is used to store all the information related to an LLVM module.
Definition Module.h:67
The optimization diagnostic interface.
LLVM_ABI void emit(DiagnosticInfoOptimizationBase &OptDiag)
Output the remark via the diagnostic handler and to the optimization record file.
Diagnostic information for applied optimization remarks.
static LLVM_ABI PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
AnalysisType & getAnalysis() const
getAnalysis<AnalysisType>() - This function is used by subclasses to get to the analysis information ...
AnalysisType * getAnalysisIfAvailable() const
getAnalysisIfAvailable<AnalysisType>() - Subclasses use this function to get analysis information tha...
static PointerType * getUnqual(Type *ElementType)
This constructs a pointer to an object of the specified type in the default address space (address sp...
A set of analyses that are preserved following a run of a transformation pass.
Definition Analysis.h:112
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
Definition Analysis.h:118
PreservedAnalyses & preserve()
Mark an analysis as preserved.
Definition Analysis.h:132
static bool requiresStackProtector(Function *F, SSPLayoutMap *Layout=nullptr)
Check whether or not F needs a stack protector based upon the stack protector level.
Result run(Function &F, FunctionAnalysisManager &FAM)
void copyToMachineFrameInfo(MachineFrameInfo &MFI) const
bool shouldEmitSDCheck(const BasicBlock &BB) const
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
PreservedAnalyses run(Function &F, FunctionAnalysisManager &FAM)
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - This function should be overriden by passes that need analysis information to do t...
static bool requiresStackProtector(Function *F, SSPLayoutMap *Layout=nullptr)
Check whether or not F needs a stack protector based upon the stack protector level.
bool runOnFunction(Function &Fn) override
runOnFunction - Virtual method overriden by subclasses to do the per-function processing of the pass.
StringRef - Represent a constant reference to a string, i.e.
Definition StringRef.h:55
constexpr bool empty() const
empty - Check if the string is empty.
Definition StringRef.h:143
Class to represent struct types.
This base class for TargetLowering contains the SelectionDAG-independent parts that can be used from ...
virtual Value * getIRStackGuard(IRBuilderBase &IRB) const
If the target has a standard location for the stack protector guard, returns the address of that loca...
const char * getLibcallName(RTLIB::Libcall Call) const
Get the libcall routine name for the specified libcall.
virtual bool useStackGuardMixCookie() const
If this function returns true, stack protection checks should mix the stack guard value before checki...
virtual void insertSSPDeclarations(Module &M) const
Inserts necessary declarations for SSP (stack protection) purpose.
This class defines information used to lower LLVM code to legal SelectionDAG operators that the targe...
Primary interface to the complete machine description for the target machine.
virtual const TargetSubtargetInfo * getSubtargetImpl(const Function &) const
Virtual method implemented by subclasses that returns a reference to that target's TargetSubtargetInf...
TargetOptions Options
unsigned EnableFastISel
EnableFastISel - This flag enables fast-path instruction selection which trades away generated code q...
Target-Independent Code Generator Pass Configuration Options.
virtual const TargetLowering * getTargetLowering() const
static constexpr TypeSize getFixed(ScalarTy ExactSize)
Definition TypeSize.h:343
The instances of the Type class are immutable: once they are created, they are never changed.
Definition Type.h:45
static LLVM_ABI Type * getVoidTy(LLVMContext &C)
Definition Type.cpp:280
LLVM Value Representation.
Definition Value.h:75
LLVM_ABI void setName(const Twine &Name)
Change the name of the value.
Definition Value.cpp:397
iterator_range< user_iterator > users()
Definition Value.h:426
constexpr ScalarTy getKnownMinValue() const
Returns the minimum value this quantity can represent.
Definition TypeSize.h:165
static constexpr bool isKnownGT(const FixedOrScalableQuantity &LHS, const FixedOrScalableQuantity &RHS)
Definition TypeSize.h:223
static constexpr bool isKnownGE(const FixedOrScalableQuantity &LHS, const FixedOrScalableQuantity &RHS)
Definition TypeSize.h:237
CallInst * Call
Changed
initializer< Ty > init(const Ty &Val)
std::enable_if_t< detail::IsValidPointer< X, Y >::value, X * > dyn_extract(Y &&MD)
Extract a Value from Metadata, if any.
Definition Metadata.h:695
DiagnosticInfoOptimizationBase::Argument NV
This is an optimization pass for GlobalISel generic memory operations.
@ Offset
Definition DWP.cpp:532
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
Definition Casting.h:643
auto dyn_cast_if_present(const Y &Val)
dyn_cast_if_present<X> - Functionally identical to dyn_cast, except that a null (or none in the case ...
Definition Casting.h:732
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
Definition STLExtras.h:632
LLVM_ABI FunctionPass * createStackProtectorPass()
createStackProtectorPass - This pass adds stack protectors to functions.
LLVM_ABI void initializeStackProtectorPass(PassRegistry &)
LLVM_ABI EHPersonality classifyEHPersonality(const Value *Pers)
See if the given exception handling personality function is one that we understand.
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
Definition Casting.h:547
bool isFuncletEHPersonality(EHPersonality Pers)
Returns true if this is a personality function that invokes handler funclets (which must return to it...
bool isInTailCallPosition(const CallBase &Call, const TargetMachine &TM, bool ReturnsFirstArg=false)
Test if the given instruction is in a position to be optimized with a tail-call.
Definition Analysis.cpp:543
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
Definition Casting.h:559
LLVM_ABI Instruction * SplitBlockAndInsertIfThen(Value *Cond, BasicBlock::iterator SplitBefore, bool Unreachable, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr, BasicBlock *ThenBlock=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
AnalysisManager< Function > FunctionAnalysisManager
Convenience typedef for the Function analysis manager.
PhiInfo(TypeSize AllocSize)
unsigned NumDecreased
static constexpr unsigned MaxNumDecreased
TypeSize AllocSize
A special type used by analysis passes to provide an address that identifies that particular analysis...
Definition Analysis.h:29