24#include "llvm/IR/IntrinsicsSPIRV.h"
31#include <unordered_set>
54#define GET_BuiltinGroup_DECL
55#include "SPIRVGenTables.inc"
60class SPIRVEmitIntrinsics
62 public InstVisitor<SPIRVEmitIntrinsics, Instruction *> {
63 SPIRVTargetMachine *TM =
nullptr;
64 SPIRVGlobalRegistry *GR =
nullptr;
66 bool TrackConstants =
true;
67 bool HaveFunPtrs =
false;
68 DenseMap<Instruction *, Constant *> AggrConsts;
69 DenseMap<Instruction *, Type *> AggrConstTypes;
70 DenseSet<Instruction *> AggrStores;
71 std::unordered_set<Value *> Named;
74 DenseMap<Function *, SmallVector<std::pair<unsigned, Type *>>> FDeclPtrTys;
77 bool CanTodoType =
true;
78 unsigned TodoTypeSz = 0;
79 DenseMap<Value *, bool> TodoType;
80 void insertTodoType(
Value *
Op) {
83 auto It = TodoType.try_emplace(
Op,
true);
89 auto It = TodoType.find(
Op);
90 if (It != TodoType.end() && It->second) {
98 auto It = TodoType.find(
Op);
99 return It != TodoType.end() && It->second;
103 std::unordered_set<Instruction *> TypeValidated;
106 enum WellKnownTypes { Event };
109 Type *deduceElementType(
Value *
I,
bool UnknownElemTypeI8);
110 Type *deduceElementTypeHelper(
Value *
I,
bool UnknownElemTypeI8);
111 Type *deduceElementTypeHelper(
Value *
I, std::unordered_set<Value *> &Visited,
112 bool UnknownElemTypeI8,
113 bool IgnoreKnownType =
false);
114 Type *deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
115 bool UnknownElemTypeI8);
116 Type *deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
117 std::unordered_set<Value *> &Visited,
118 bool UnknownElemTypeI8);
120 std::unordered_set<Value *> &Visited,
121 bool UnknownElemTypeI8);
123 bool UnknownElemTypeI8);
126 Type *deduceNestedTypeHelper(
User *
U,
bool UnknownElemTypeI8);
128 std::unordered_set<Value *> &Visited,
129 bool UnknownElemTypeI8);
133 SmallPtrSet<Instruction *, 4> *IncompleteRets,
134 const SmallPtrSet<Value *, 4> *AskOps =
nullptr,
135 bool IsPostprocessing =
false);
140 Type *reconstructType(
Value *
Op,
bool UnknownElemTypeI8,
141 bool IsPostprocessing);
146 bool UnknownElemTypeI8);
148 void insertAssignPtrTypeTargetExt(TargetExtType *AssignedType,
Value *
V,
151 Type *ExpectedElementType,
152 unsigned OperandToReplace,
155 bool shouldTryToAddMemAliasingDecoration(
Instruction *Inst);
157 void insertConstantsForFPFastMathDefault(
Module &
M);
158 void processGlobalValue(GlobalVariable &GV,
IRBuilder<> &
B);
163 std::unordered_set<Function *> &FVisited);
165 bool deduceOperandElementTypeCalledFunction(
167 Type *&KnownElemTy,
bool &Incomplete);
168 void deduceOperandElementTypeFunctionPointer(
170 Type *&KnownElemTy,
bool IsPostprocessing);
171 bool deduceOperandElementTypeFunctionRet(
172 Instruction *
I, SmallPtrSet<Instruction *, 4> *IncompleteRets,
173 const SmallPtrSet<Value *, 4> *AskOps,
bool IsPostprocessing,
178 DenseMap<Function *, CallInst *> Ptrcasts);
180 DenseSet<std::pair<Value *, Value *>> &VisitedSubst);
183 DenseSet<std::pair<Value *, Value *>> &VisitedSubst);
184 void propagateElemTypeRec(
Value *
Op,
Type *PtrElemTy,
Type *CastElemTy,
185 DenseSet<std::pair<Value *, Value *>> &VisitedSubst,
186 std::unordered_set<Value *> &Visited,
187 DenseMap<Function *, CallInst *> Ptrcasts);
195 GetElementPtrInst *simplifyZeroLengthArrayGepInst(GetElementPtrInst *
GEP);
198 bool postprocessTypes(
Module &
M);
199 bool processFunctionPointers(
Module &
M);
200 void parseFunDeclarations(
Module &
M);
202 void useRoundingMode(ConstrainedFPIntrinsic *FPI,
IRBuilder<> &
B);
218 bool walkLogicalAccessChain(
219 GetElementPtrInst &
GEP,
220 const std::function<
void(
Type *PointedType, uint64_t
Index)>
229 Type *getGEPType(GetElementPtrInst *
GEP);
236 Type *getGEPTypeLogical(GetElementPtrInst *
GEP);
238 Instruction *buildLogicalAccessChainFromGEP(GetElementPtrInst &
GEP);
242 SPIRVEmitIntrinsics(SPIRVTargetMachine *TM =
nullptr)
243 : ModulePass(ID), TM(TM) {}
246 Instruction *visitGetElementPtrInst(GetElementPtrInst &
I);
248 Instruction *visitInsertElementInst(InsertElementInst &
I);
249 Instruction *visitExtractElementInst(ExtractElementInst &
I);
251 Instruction *visitExtractValueInst(ExtractValueInst &
I);
255 Instruction *visitAtomicCmpXchgInst(AtomicCmpXchgInst &
I);
259 StringRef getPassName()
const override {
return "SPIRV emit intrinsics"; }
261 bool runOnModule(
Module &
M)
override;
263 void getAnalysisUsage(AnalysisUsage &AU)
const override {
264 ModulePass::getAnalysisUsage(AU);
273 return II->getIntrinsicID() == Intrinsic::experimental_convergence_entry ||
274 II->getIntrinsicID() == Intrinsic::experimental_convergence_loop ||
275 II->getIntrinsicID() == Intrinsic::experimental_convergence_anchor;
278bool expectIgnoredInIRTranslation(
const Instruction *
I) {
282 switch (
II->getIntrinsicID()) {
283 case Intrinsic::invariant_start:
284 case Intrinsic::spv_resource_handlefrombinding:
285 case Intrinsic::spv_resource_getpointer:
295 if (
II->getIntrinsicID() == Intrinsic::spv_ptrcast) {
296 Value *V =
II->getArgOperand(0);
297 return getPointerRoot(V);
305char SPIRVEmitIntrinsics::ID = 0;
328 B.SetInsertPoint(
I->getParent()->getFirstNonPHIOrDbgOrAlloca());
334 B.SetCurrentDebugLocation(
I->getDebugLoc());
335 if (
I->getType()->isVoidTy())
336 B.SetInsertPoint(
I->getNextNode());
338 B.SetInsertPoint(*
I->getInsertionPointAfterDef());
343 switch (Intr->getIntrinsicID()) {
344 case Intrinsic::invariant_start:
345 case Intrinsic::invariant_end:
353 if (
I->getType()->isTokenTy())
355 "does not support token type",
360 if (!
I->hasName() ||
I->getType()->isAggregateType() ||
361 expectIgnoredInIRTranslation(
I))
366 std::vector<Value *> Args = {
369 B.CreateIntrinsic(Intrinsic::spv_assign_name, {
I->getType()}, Args);
372void SPIRVEmitIntrinsics::replaceAllUsesWith(
Value *Src,
Value *Dest,
376 if (isTodoType(Src)) {
379 insertTodoType(Dest);
383void SPIRVEmitIntrinsics::replaceAllUsesWithAndErase(
IRBuilder<> &
B,
388 std::string
Name = Src->hasName() ? Src->getName().str() :
"";
389 Src->eraseFromParent();
392 if (Named.insert(Dest).second)
417Type *SPIRVEmitIntrinsics::reconstructType(
Value *
Op,
bool UnknownElemTypeI8,
418 bool IsPostprocessing) {
433 if (UnknownElemTypeI8) {
434 if (!IsPostprocessing)
442CallInst *SPIRVEmitIntrinsics::buildSpvPtrcast(Function *
F,
Value *
Op,
450 B.SetInsertPointPastAllocas(OpA->getParent());
453 B.SetInsertPoint(
F->getEntryBlock().getFirstNonPHIOrDbgOrAlloca());
455 Type *OpTy =
Op->getType();
459 CallInst *PtrCasted =
460 B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
465void SPIRVEmitIntrinsics::replaceUsesOfWithSpvPtrcast(
467 DenseMap<Function *, CallInst *> Ptrcasts) {
469 CallInst *PtrCastedI =
nullptr;
470 auto It = Ptrcasts.
find(
F);
471 if (It == Ptrcasts.
end()) {
472 PtrCastedI = buildSpvPtrcast(
F,
Op, ElemTy);
473 Ptrcasts[
F] = PtrCastedI;
475 PtrCastedI = It->second;
477 I->replaceUsesOfWith(
Op, PtrCastedI);
480void SPIRVEmitIntrinsics::propagateElemType(
482 DenseSet<std::pair<Value *, Value *>> &VisitedSubst) {
483 DenseMap<Function *, CallInst *> Ptrcasts;
485 for (
auto *U :
Users) {
488 if (!VisitedSubst.insert(std::make_pair(U,
Op)).second)
494 TypeValidated.find(UI) != TypeValidated.end())
495 replaceUsesOfWithSpvPtrcast(
Op, ElemTy, UI, Ptrcasts);
499void SPIRVEmitIntrinsics::propagateElemTypeRec(
501 DenseSet<std::pair<Value *, Value *>> &VisitedSubst) {
502 std::unordered_set<Value *> Visited;
503 DenseMap<Function *, CallInst *> Ptrcasts;
504 propagateElemTypeRec(
Op, PtrElemTy, CastElemTy, VisitedSubst, Visited,
505 std::move(Ptrcasts));
508void SPIRVEmitIntrinsics::propagateElemTypeRec(
510 DenseSet<std::pair<Value *, Value *>> &VisitedSubst,
511 std::unordered_set<Value *> &Visited,
512 DenseMap<Function *, CallInst *> Ptrcasts) {
513 if (!Visited.insert(
Op).second)
516 for (
auto *U :
Users) {
519 if (!VisitedSubst.insert(std::make_pair(U,
Op)).second)
525 TypeValidated.find(UI) != TypeValidated.end())
526 replaceUsesOfWithSpvPtrcast(
Op, CastElemTy, UI, Ptrcasts);
534SPIRVEmitIntrinsics::deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
535 bool UnknownElemTypeI8) {
536 std::unordered_set<Value *> Visited;
537 return deduceElementTypeByValueDeep(ValueTy, Operand, Visited,
541Type *SPIRVEmitIntrinsics::deduceElementTypeByValueDeep(
542 Type *ValueTy,
Value *Operand, std::unordered_set<Value *> &Visited,
543 bool UnknownElemTypeI8) {
548 deduceElementTypeHelper(Operand, Visited, UnknownElemTypeI8))
559Type *SPIRVEmitIntrinsics::deduceElementTypeByUsersDeep(
560 Value *
Op, std::unordered_set<Value *> &Visited,
bool UnknownElemTypeI8) {
572 for (User *OpU :
Op->users()) {
574 if (
Type *Ty = deduceElementTypeHelper(Inst, Visited, UnknownElemTypeI8))
587 if ((DemangledName.
starts_with(
"__spirv_ocl_printf(") ||
596Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(
Value *
I,
597 bool UnknownElemTypeI8) {
598 std::unordered_set<Value *> Visited;
599 return deduceElementTypeHelper(
I, Visited, UnknownElemTypeI8);
602void SPIRVEmitIntrinsics::maybeAssignPtrType(
Type *&Ty,
Value *
Op,
Type *RefTy,
603 bool UnknownElemTypeI8) {
605 if (!UnknownElemTypeI8)
612bool SPIRVEmitIntrinsics::walkLogicalAccessChain(
613 GetElementPtrInst &
GEP,
614 const std::function<
void(
Type *, uint64_t)> &OnLiteralIndexing,
615 const std::function<
void(
Type *,
Value *)> &OnDynamicIndexing) {
623 Value *Src = getPointerRoot(
GEP.getPointerOperand());
624 Type *CurType = deduceElementType(Src,
true);
633 OnDynamicIndexing(AT->getElementType(), Operand);
634 return AT ==
nullptr;
642 uint32_t EltTypeSize =
DL.getTypeSizeInBits(AT->getElementType()) / 8;
646 CurType = AT->getElementType();
647 OnLiteralIndexing(CurType, Index);
649 uint32_t StructSize =
DL.getTypeSizeInBits(ST) / 8;
652 const auto &STL =
DL.getStructLayout(ST);
653 unsigned Element = STL->getElementContainingOffset(
Offset);
654 Offset -= STL->getElementOffset(Element);
655 CurType =
ST->getElementType(Element);
656 OnLiteralIndexing(CurType, Element);
668SPIRVEmitIntrinsics::buildLogicalAccessChainFromGEP(GetElementPtrInst &
GEP) {
671 B.SetInsertPoint(&
GEP);
673 std::vector<Value *> Indices;
674 Indices.push_back(ConstantInt::get(
675 IntegerType::getInt32Ty(CurrF->
getContext()), 0,
false));
676 walkLogicalAccessChain(
678 [&Indices, &
B](
Type *EltType, uint64_t Index) {
680 ConstantInt::get(
B.getInt64Ty(), Index,
false));
683 uint32_t EltTypeSize =
DL.getTypeSizeInBits(EltType) / 8;
685 Offset, ConstantInt::get(
Offset->getType(), EltTypeSize,
687 Indices.push_back(Index);
692 Args.push_back(
B.getInt1(
GEP.isInBounds()));
693 Args.push_back(
GEP.getOperand(0));
695 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_gep, {
Types}, {
Args});
696 replaceAllUsesWithAndErase(
B, &
GEP, NewI);
700Type *SPIRVEmitIntrinsics::getGEPTypeLogical(GetElementPtrInst *
GEP) {
702 Type *CurType =
GEP->getResultElementType();
704 bool Interrupted = walkLogicalAccessChain(
705 *
GEP, [&CurType](
Type *EltType, uint64_t Index) { CurType = EltType; },
708 return Interrupted ?
GEP->getResultElementType() : CurType;
711Type *SPIRVEmitIntrinsics::getGEPType(GetElementPtrInst *
Ref) {
712 if (
Ref->getSourceElementType() ==
713 IntegerType::getInt8Ty(CurrF->
getContext()) &&
715 return getGEPTypeLogical(
Ref);
722 Ty =
Ref->getSourceElementType();
726 Ty =
Ref->getResultElementType();
731Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(
732 Value *
I, std::unordered_set<Value *> &Visited,
bool UnknownElemTypeI8,
733 bool IgnoreKnownType) {
739 if (!IgnoreKnownType)
744 if (!Visited.insert(
I).second)
751 maybeAssignPtrType(Ty,
I,
Ref->getAllocatedType(), UnknownElemTypeI8);
753 Ty = getGEPType(
Ref);
758 KnownTy =
Op->getType();
760 maybeAssignPtrType(Ty,
I, ElemTy, UnknownElemTypeI8);
762 Ty = deduceElementTypeByValueDeep(
764 Ref->getNumOperands() > 0 ?
Ref->getOperand(0) :
nullptr, Visited,
767 Type *RefTy = deduceElementTypeHelper(
Ref->getPointerOperand(), Visited,
769 maybeAssignPtrType(Ty,
I, RefTy, UnknownElemTypeI8);
771 maybeAssignPtrType(Ty,
I,
Ref->getDestTy(), UnknownElemTypeI8);
773 if (
Type *Src =
Ref->getSrcTy(), *Dest =
Ref->getDestTy();
775 Ty = deduceElementTypeHelper(
Ref->getOperand(0), Visited,
780 Ty = deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8);
784 Ty = deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8);
786 Type *BestTy =
nullptr;
788 DenseMap<Type *, unsigned> PhiTys;
789 for (
int i =
Ref->getNumIncomingValues() - 1; i >= 0; --i) {
790 Ty = deduceElementTypeByUsersDeep(
Ref->getIncomingValue(i), Visited,
797 if (It.first->second > MaxN) {
798 MaxN = It.first->second;
806 for (
Value *
Op : {
Ref->getTrueValue(),
Ref->getFalseValue()}) {
807 Ty = deduceElementTypeByUsersDeep(
Op, Visited, UnknownElemTypeI8);
812 static StringMap<unsigned> ResTypeByArg = {
816 {
"__spirv_GenericCastToPtr_ToGlobal", 0},
817 {
"__spirv_GenericCastToPtr_ToLocal", 0},
818 {
"__spirv_GenericCastToPtr_ToPrivate", 0},
819 {
"__spirv_GenericCastToPtrExplicit_ToGlobal", 0},
820 {
"__spirv_GenericCastToPtrExplicit_ToLocal", 0},
821 {
"__spirv_GenericCastToPtrExplicit_ToPrivate", 0}};
825 if (
II &&
II->getIntrinsicID() == Intrinsic::spv_resource_getpointer) {
827 if (HandleType->getTargetExtName() ==
"spirv.Image" ||
828 HandleType->getTargetExtName() ==
"spirv.SignedImage") {
829 for (User *U :
II->users()) {
834 }
else if (HandleType->getTargetExtName() ==
"spirv.VulkanBuffer") {
836 Ty = HandleType->getTypeParameter(0);
850 }
else if (
II &&
II->getIntrinsicID() ==
851 Intrinsic::spv_generic_cast_to_ptr_explicit) {
852 Ty = deduceElementTypeHelper(CI->getArgOperand(0), Visited,
854 }
else if (Function *CalledF = CI->getCalledFunction()) {
855 std::string DemangledName =
857 if (DemangledName.length() > 0)
858 DemangledName = SPIRV::lookupBuiltinNameHelper(DemangledName);
859 auto AsArgIt = ResTypeByArg.
find(DemangledName);
860 if (AsArgIt != ResTypeByArg.
end())
861 Ty = deduceElementTypeHelper(CI->getArgOperand(AsArgIt->second),
862 Visited, UnknownElemTypeI8);
869 if (Ty && !IgnoreKnownType) {
880Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(User *U,
881 bool UnknownElemTypeI8) {
882 std::unordered_set<Value *> Visited;
883 return deduceNestedTypeHelper(U,
U->getType(), Visited, UnknownElemTypeI8);
886Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(
887 User *U,
Type *OrigTy, std::unordered_set<Value *> &Visited,
888 bool UnknownElemTypeI8) {
897 if (!Visited.insert(U).second)
903 for (
unsigned i = 0; i <
U->getNumOperands(); ++i) {
905 assert(
Op &&
"Operands should not be null.");
906 Type *OpTy =
Op->getType();
910 deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8))
917 Change |= Ty != OpTy;
925 if (
Value *
Op =
U->getNumOperands() > 0 ?
U->getOperand(0) :
nullptr) {
926 Type *OpTy = ArrTy->getElementType();
930 deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8))
937 Type *NewTy = ArrayType::get(Ty, ArrTy->getNumElements());
943 if (
Value *
Op =
U->getNumOperands() > 0 ?
U->getOperand(0) :
nullptr) {
944 Type *OpTy = VecTy->getElementType();
948 deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8))
955 Type *NewTy = VectorType::get(Ty, VecTy->getElementCount());
965Type *SPIRVEmitIntrinsics::deduceElementType(
Value *
I,
bool UnknownElemTypeI8) {
966 if (
Type *Ty = deduceElementTypeHelper(
I, UnknownElemTypeI8))
968 if (!UnknownElemTypeI8)
971 return IntegerType::getInt8Ty(
I->getContext());
975 Value *PointerOperand) {
989bool SPIRVEmitIntrinsics::deduceOperandElementTypeCalledFunction(
991 Type *&KnownElemTy,
bool &Incomplete) {
995 std::string DemangledName =
997 if (DemangledName.length() > 0 &&
999 const SPIRVSubtarget &
ST = TM->
getSubtarget<SPIRVSubtarget>(*CalledF);
1000 auto [Grp, Opcode, ExtNo] = SPIRV::mapBuiltinToOpcode(
1001 DemangledName,
ST.getPreferredInstructionSet());
1002 if (Opcode == SPIRV::OpGroupAsyncCopy) {
1003 for (
unsigned i = 0, PtrCnt = 0; i < CI->
arg_size() && PtrCnt < 2; ++i) {
1009 KnownElemTy = ElemTy;
1010 Ops.push_back(std::make_pair(
Op, i));
1012 }
else if (Grp == SPIRV::Atomic || Grp == SPIRV::AtomicFloating) {
1019 case SPIRV::OpAtomicFAddEXT:
1020 case SPIRV::OpAtomicFMinEXT:
1021 case SPIRV::OpAtomicFMaxEXT:
1022 case SPIRV::OpAtomicLoad:
1023 case SPIRV::OpAtomicCompareExchangeWeak:
1024 case SPIRV::OpAtomicCompareExchange:
1025 case SPIRV::OpAtomicExchange:
1026 case SPIRV::OpAtomicIAdd:
1027 case SPIRV::OpAtomicISub:
1028 case SPIRV::OpAtomicOr:
1029 case SPIRV::OpAtomicXor:
1030 case SPIRV::OpAtomicAnd:
1031 case SPIRV::OpAtomicUMin:
1032 case SPIRV::OpAtomicUMax:
1033 case SPIRV::OpAtomicSMin:
1034 case SPIRV::OpAtomicSMax: {
1039 Incomplete = isTodoType(
Op);
1040 Ops.push_back(std::make_pair(
Op, 0));
1042 case SPIRV::OpAtomicStore: {
1051 Incomplete = isTodoType(
Op);
1052 Ops.push_back(std::make_pair(
Op, 0));
1061void SPIRVEmitIntrinsics::deduceOperandElementTypeFunctionPointer(
1063 Type *&KnownElemTy,
bool IsPostprocessing) {
1067 Ops.push_back(std::make_pair(
Op, std::numeric_limits<unsigned>::max()));
1069 bool IsNewFTy =
false, IsIncomplete =
false;
1072 Type *ArgTy = Arg->getType();
1077 if (isTodoType(Arg))
1078 IsIncomplete =
true;
1080 IsIncomplete =
true;
1085 Type *RetTy = FTy->getReturnType();
1092 IsIncomplete =
true;
1094 IsIncomplete =
true;
1097 if (!IsPostprocessing && IsIncomplete)
1100 IsNewFTy ? FunctionType::get(RetTy, ArgTys, FTy->isVarArg()) : FTy;
1103bool SPIRVEmitIntrinsics::deduceOperandElementTypeFunctionRet(
1104 Instruction *
I, SmallPtrSet<Instruction *, 4> *IncompleteRets,
1105 const SmallPtrSet<Value *, 4> *AskOps,
bool IsPostprocessing,
1117 DenseSet<std::pair<Value *, Value *>> VisitedSubst{std::make_pair(
I,
Op)};
1118 for (User *U :
F->users()) {
1126 propagateElemType(CI, PrevElemTy, VisitedSubst);
1136 for (Instruction *IncompleteRetI : *IncompleteRets)
1137 deduceOperandElementType(IncompleteRetI,
nullptr, AskOps,
1139 }
else if (IncompleteRets) {
1142 TypeValidated.insert(
I);
1150void SPIRVEmitIntrinsics::deduceOperandElementType(
1151 Instruction *
I, SmallPtrSet<Instruction *, 4> *IncompleteRets,
1152 const SmallPtrSet<Value *, 4> *AskOps,
bool IsPostprocessing) {
1154 Type *KnownElemTy =
nullptr;
1155 bool Incomplete =
false;
1161 Incomplete = isTodoType(
I);
1162 for (
unsigned i = 0; i <
Ref->getNumIncomingValues(); i++) {
1165 Ops.push_back(std::make_pair(
Op, i));
1171 Incomplete = isTodoType(
I);
1172 Ops.push_back(std::make_pair(
Ref->getPointerOperand(), 0));
1179 Incomplete = isTodoType(
I);
1180 Ops.push_back(std::make_pair(
Ref->getOperand(0), 0));
1184 KnownElemTy =
Ref->getSourceElementType();
1185 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1188 KnownElemTy =
I->getType();
1194 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1198 reconstructType(
Ref->getValueOperand(),
false, IsPostprocessing)))
1203 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1211 Incomplete = isTodoType(
Ref->getPointerOperand());
1212 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1220 Incomplete = isTodoType(
Ref->getPointerOperand());
1221 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1227 Incomplete = isTodoType(
I);
1228 for (
unsigned i = 0; i <
Ref->getNumOperands(); i++) {
1231 Ops.push_back(std::make_pair(
Op, i));
1239 if (deduceOperandElementTypeFunctionRet(
I, IncompleteRets, AskOps,
1240 IsPostprocessing, KnownElemTy,
Op,
1243 Incomplete = isTodoType(CurrF);
1244 Ops.push_back(std::make_pair(
Op, 0));
1250 bool Incomplete0 = isTodoType(Op0);
1251 bool Incomplete1 = isTodoType(Op1);
1253 Type *ElemTy0 = (Incomplete0 && !Incomplete1 && ElemTy1)
1255 : GR->findDeducedElementType(Op0);
1257 KnownElemTy = ElemTy0;
1258 Incomplete = Incomplete0;
1259 Ops.push_back(std::make_pair(Op1, 1));
1260 }
else if (ElemTy1) {
1261 KnownElemTy = ElemTy1;
1262 Incomplete = Incomplete1;
1263 Ops.push_back(std::make_pair(Op0, 0));
1267 deduceOperandElementTypeCalledFunction(CI,
Ops, KnownElemTy, Incomplete);
1268 else if (HaveFunPtrs)
1269 deduceOperandElementTypeFunctionPointer(CI,
Ops, KnownElemTy,
1274 if (!KnownElemTy ||
Ops.size() == 0)
1279 for (
auto &OpIt :
Ops) {
1283 Type *AskTy =
nullptr;
1284 CallInst *AskCI =
nullptr;
1285 if (IsPostprocessing && AskOps) {
1291 if (Ty == KnownElemTy)
1294 Type *OpTy =
Op->getType();
1295 if (
Op->hasUseList() &&
1302 else if (!IsPostprocessing)
1306 if (AssignCI ==
nullptr) {
1315 DenseSet<std::pair<Value *, Value *>> VisitedSubst{
1316 std::make_pair(
I,
Op)};
1317 propagateElemTypeRec(
Op, KnownElemTy, PrevElemTy, VisitedSubst);
1321 CallInst *PtrCastI =
1322 buildSpvPtrcast(
I->getParent()->getParent(),
Op, KnownElemTy);
1323 if (OpIt.second == std::numeric_limits<unsigned>::max())
1326 I->setOperand(OpIt.second, PtrCastI);
1329 TypeValidated.insert(
I);
1332void SPIRVEmitIntrinsics::replaceMemInstrUses(Instruction *Old,
1337 if (isAssignTypeInstr(U)) {
1338 B.SetInsertPoint(U);
1339 SmallVector<Value *, 2>
Args = {
New,
U->getOperand(1)};
1340 CallInst *AssignCI =
1341 B.CreateIntrinsic(Intrinsic::spv_assign_type, {
New->getType()},
Args);
1343 U->eraseFromParent();
1346 U->replaceUsesOfWith(Old, New);
1351 New->copyMetadata(*Old);
1355void SPIRVEmitIntrinsics::preprocessUndefs(
IRBuilder<> &
B) {
1356 std::queue<Instruction *> Worklist;
1360 while (!Worklist.empty()) {
1362 bool BPrepared =
false;
1365 for (
auto &
Op :
I->operands()) {
1367 if (!AggrUndef || !
Op->getType()->isAggregateType())
1374 auto *IntrUndef =
B.CreateIntrinsic(Intrinsic::spv_undef, {});
1375 Worklist.push(IntrUndef);
1376 I->replaceUsesOfWith(
Op, IntrUndef);
1377 AggrConsts[IntrUndef] = AggrUndef;
1378 AggrConstTypes[IntrUndef] = AggrUndef->getType();
1383void SPIRVEmitIntrinsics::preprocessCompositeConstants(
IRBuilder<> &
B) {
1384 std::queue<Instruction *> Worklist;
1388 while (!Worklist.empty()) {
1389 auto *
I = Worklist.front();
1392 bool KeepInst =
false;
1393 for (
const auto &
Op :
I->operands()) {
1395 Type *ResTy =
nullptr;
1398 ResTy = COp->getType();
1410 ResTy =
Op->getType()->isVectorTy() ? COp->getType() :
B.getInt32Ty();
1415 for (
unsigned i = 0; i < COp->getNumElements(); ++i)
1416 Args.push_back(COp->getElementAsConstant(i));
1420 IsPhi ?
B.SetInsertPointPastAllocas(
I->getParent()->getParent())
1421 :
B.SetInsertPoint(
I);
1425 B.CreateIntrinsic(Intrinsic::spv_const_composite, {ResTy}, {
Args});
1429 AggrConsts[CI] = AggrConst;
1430 AggrConstTypes[CI] = deduceNestedTypeHelper(AggrConst,
false);
1442 B.CreateIntrinsic(Intrinsic::spv_assign_decoration, {
I->getType()},
1447 unsigned RoundingModeDeco,
1454 ConstantInt::get(
Int32Ty, SPIRV::Decoration::FPRoundingMode)),
1463 MDNode *SaturatedConversionNode =
1465 Int32Ty, SPIRV::Decoration::SaturatedConversion))});
1472 if (Fu->isIntrinsic()) {
1473 unsigned const int IntrinsicId = Fu->getIntrinsicID();
1474 switch (IntrinsicId) {
1475 case Intrinsic::fptosi_sat:
1476 case Intrinsic::fptoui_sat:
1495 MDString *ConstraintString =
MDString::get(Ctx,
IA->getConstraintString());
1503 B.SetInsertPoint(&
Call);
1504 B.CreateIntrinsic(Intrinsic::spv_inline_asm, {
Args});
1509void SPIRVEmitIntrinsics::useRoundingMode(ConstrainedFPIntrinsic *FPI,
1512 if (!
RM.has_value())
1514 unsigned RoundingModeDeco = std::numeric_limits<unsigned>::max();
1515 switch (
RM.value()) {
1519 case RoundingMode::NearestTiesToEven:
1520 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTE;
1522 case RoundingMode::TowardNegative:
1523 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTN;
1525 case RoundingMode::TowardPositive:
1526 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTP;
1528 case RoundingMode::TowardZero:
1529 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTZ;
1531 case RoundingMode::Dynamic:
1532 case RoundingMode::NearestTiesToAway:
1536 if (RoundingModeDeco == std::numeric_limits<unsigned>::max())
1542Instruction *SPIRVEmitIntrinsics::visitSwitchInst(SwitchInst &
I) {
1545 B.SetInsertPoint(&
I);
1548 for (
auto &
Op :
I.operands()) {
1549 if (
Op.get()->getType()->isSized()) {
1558 CallInst *NewI =
B.CreateIntrinsic(Intrinsic::spv_switch,
1559 {
I.getOperand(0)->getType()}, {
Args});
1563 I.eraseFromParent();
1566 B.SetInsertPoint(ParentBB);
1567 IndirectBrInst *BrI =
B.CreateIndirectBr(
1570 for (BasicBlock *BBCase : BBCases)
1575Instruction *SPIRVEmitIntrinsics::visitGetElementPtrInst(GetElementPtrInst &
I) {
1576 if (
I.getSourceElementType() == IntegerType::getInt8Ty(CurrF->
getContext()) &&
1584 B.SetInsertPoint(&
I);
1587 Args.push_back(
B.getInt1(
I.isInBounds()));
1589 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_gep, {
Types}, {
Args});
1590 replaceAllUsesWithAndErase(
B, &
I, NewI);
1594Instruction *SPIRVEmitIntrinsics::visitBitCastInst(BitCastInst &
I) {
1596 B.SetInsertPoint(&
I);
1605 I.eraseFromParent();
1611 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_bitcast, {
Types}, {
Args});
1612 replaceAllUsesWithAndErase(
B, &
I, NewI);
1616void SPIRVEmitIntrinsics::insertAssignPtrTypeTargetExt(
1618 Type *VTy =
V->getType();
1623 if (ElemTy != AssignedType)
1636 if (CurrentType == AssignedType)
1643 " for value " +
V->getName(),
1651void SPIRVEmitIntrinsics::replacePointerOperandWithPtrCast(
1652 Instruction *
I,
Value *Pointer,
Type *ExpectedElementType,
1654 TypeValidated.insert(
I);
1657 Type *PointerElemTy = deduceElementTypeHelper(Pointer,
false);
1658 if (PointerElemTy == ExpectedElementType ||
1664 MetadataAsValue *VMD =
buildMD(ExpectedElementVal);
1666 bool FirstPtrCastOrAssignPtrType =
true;
1672 for (
auto User :
Pointer->users()) {
1675 (
II->getIntrinsicID() != Intrinsic::spv_assign_ptr_type &&
1676 II->getIntrinsicID() != Intrinsic::spv_ptrcast) ||
1677 II->getOperand(0) != Pointer)
1682 FirstPtrCastOrAssignPtrType =
false;
1683 if (
II->getOperand(1) != VMD ||
1690 if (
II->getIntrinsicID() != Intrinsic::spv_ptrcast)
1695 if (
II->getParent() !=
I->getParent())
1698 I->setOperand(OperandToReplace,
II);
1704 if (FirstPtrCastOrAssignPtrType) {
1709 }
else if (isTodoType(Pointer)) {
1710 eraseTodoType(Pointer);
1717 DenseSet<std::pair<Value *, Value *>> VisitedSubst{
1718 std::make_pair(
I, Pointer)};
1720 propagateElemType(Pointer, PrevElemTy, VisitedSubst);
1732 auto *PtrCastI =
B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
1738void SPIRVEmitIntrinsics::insertPtrCastOrAssignTypeInstr(Instruction *
I,
1743 replacePointerOperandWithPtrCast(
1744 I,
SI->getValueOperand(), IntegerType::getInt8Ty(CurrF->
getContext()),
1750 Type *OpTy =
Op->getType();
1753 if (OpTy ==
Op->getType())
1754 OpTy = deduceElementTypeByValueDeep(OpTy,
Op,
false);
1755 replacePointerOperandWithPtrCast(
I, Pointer, OpTy, 1,
B);
1760 Type *OpTy = LI->getType();
1765 Type *NewOpTy = OpTy;
1766 OpTy = deduceElementTypeByValueDeep(OpTy, LI,
false);
1767 if (OpTy == NewOpTy)
1768 insertTodoType(Pointer);
1771 replacePointerOperandWithPtrCast(
I, Pointer, OpTy, 0,
B);
1776 Type *OpTy =
nullptr;
1783 bool IsRewrittenGEP =
1784 GEPI->getSourceElementType() == IntegerType::getInt8Ty(
I->getContext());
1786 Value *Src = getPointerRoot(Pointer);
1792 OpTy = GEPI->getSourceElementType();
1794 replacePointerOperandWithPtrCast(
I, Pointer, OpTy, 0,
B);
1796 insertTodoType(Pointer);
1808 std::string DemangledName =
1812 bool HaveTypes =
false;
1830 for (User *U : CalledArg->
users()) {
1832 if ((ElemTy = deduceElementTypeHelper(Inst,
false)) !=
nullptr)
1838 HaveTypes |= ElemTy !=
nullptr;
1843 if (DemangledName.empty() && !HaveTypes)
1861 Type *ExpectedType =
1863 if (!ExpectedType && !DemangledName.empty())
1864 ExpectedType = SPIRV::parseBuiltinCallArgumentBaseType(
1865 DemangledName,
OpIdx,
I->getContext());
1866 if (!ExpectedType || ExpectedType->
isVoidTy())
1874 replacePointerOperandWithPtrCast(CI, ArgOperand, ExpectedType,
OpIdx,
B);
1878Instruction *SPIRVEmitIntrinsics::visitInsertElementInst(InsertElementInst &
I) {
1885 I.getOperand(1)->getType(),
1886 I.getOperand(2)->getType()};
1888 B.SetInsertPoint(&
I);
1890 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_insertelt, {
Types}, {
Args});
1891 replaceAllUsesWithAndErase(
B, &
I, NewI);
1896SPIRVEmitIntrinsics::visitExtractElementInst(ExtractElementInst &
I) {
1903 B.SetInsertPoint(&
I);
1905 I.getIndexOperand()->getType()};
1906 SmallVector<Value *, 2>
Args = {
I.getVectorOperand(),
I.getIndexOperand()};
1907 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_extractelt, {
Types}, {
Args});
1908 replaceAllUsesWithAndErase(
B, &
I, NewI);
1912Instruction *SPIRVEmitIntrinsics::visitInsertValueInst(InsertValueInst &
I) {
1914 B.SetInsertPoint(&
I);
1917 Value *AggregateOp =
I.getAggregateOperand();
1921 Args.push_back(AggregateOp);
1922 Args.push_back(
I.getInsertedValueOperand());
1923 for (
auto &
Op :
I.indices())
1924 Args.push_back(
B.getInt32(
Op));
1926 B.CreateIntrinsic(Intrinsic::spv_insertv, {
Types}, {
Args});
1927 replaceMemInstrUses(&
I, NewI,
B);
1931Instruction *SPIRVEmitIntrinsics::visitExtractValueInst(ExtractValueInst &
I) {
1932 if (
I.getAggregateOperand()->getType()->isAggregateType())
1935 B.SetInsertPoint(&
I);
1937 for (
auto &
Op :
I.indices())
1938 Args.push_back(
B.getInt32(
Op));
1940 B.CreateIntrinsic(Intrinsic::spv_extractv, {
I.getType()}, {
Args});
1941 replaceAllUsesWithAndErase(
B, &
I, NewI);
1945Instruction *SPIRVEmitIntrinsics::visitLoadInst(LoadInst &
I) {
1946 if (!
I.getType()->isAggregateType())
1949 B.SetInsertPoint(&
I);
1950 TrackConstants =
false;
1955 B.CreateIntrinsic(Intrinsic::spv_load, {
I.getOperand(0)->getType()},
1956 {
I.getPointerOperand(),
B.getInt16(Flags),
1957 B.getInt8(
I.getAlign().value())});
1958 replaceMemInstrUses(&
I, NewI,
B);
1962Instruction *SPIRVEmitIntrinsics::visitStoreInst(StoreInst &
I) {
1963 if (!AggrStores.contains(&
I))
1966 B.SetInsertPoint(&
I);
1967 TrackConstants =
false;
1971 auto *PtrOp =
I.getPointerOperand();
1972 auto *NewI =
B.CreateIntrinsic(
1973 Intrinsic::spv_store, {
I.getValueOperand()->getType(), PtrOp->getType()},
1974 {
I.getValueOperand(), PtrOp,
B.getInt16(Flags),
1975 B.getInt8(
I.getAlign().value())});
1977 I.eraseFromParent();
1981Instruction *SPIRVEmitIntrinsics::visitAllocaInst(AllocaInst &
I) {
1982 Value *ArraySize =
nullptr;
1983 if (
I.isArrayAllocation()) {
1986 SPIRV::Extension::SPV_INTEL_variable_length_array))
1988 "array allocation: this instruction requires the following "
1989 "SPIR-V extension: SPV_INTEL_variable_length_array",
1991 ArraySize =
I.getArraySize();
1994 B.SetInsertPoint(&
I);
1995 TrackConstants =
false;
1996 Type *PtrTy =
I.getType();
1999 ?
B.CreateIntrinsic(Intrinsic::spv_alloca_array,
2000 {PtrTy, ArraySize->
getType()},
2001 {ArraySize,
B.getInt8(
I.getAlign().value())})
2002 :
B.CreateIntrinsic(
Intrinsic::spv_alloca, {PtrTy},
2003 {
B.getInt8(
I.getAlign().value())});
2004 replaceAllUsesWithAndErase(
B, &
I, NewI);
2008Instruction *SPIRVEmitIntrinsics::visitAtomicCmpXchgInst(AtomicCmpXchgInst &
I) {
2009 assert(
I.getType()->isAggregateType() &&
"Aggregate result is expected");
2011 B.SetInsertPoint(&
I);
2013 Args.push_back(
B.getInt32(
2014 static_cast<uint32_t
>(
getMemScope(
I.getContext(),
I.getSyncScopeID()))));
2015 Args.push_back(
B.getInt32(
2017 Args.push_back(
B.getInt32(
2019 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_cmpxchg,
2020 {
I.getPointerOperand()->getType()}, {
Args});
2021 replaceMemInstrUses(&
I, NewI,
B);
2025Instruction *SPIRVEmitIntrinsics::visitUnreachableInst(UnreachableInst &
I) {
2027 B.SetInsertPoint(&
I);
2028 B.CreateIntrinsic(Intrinsic::spv_unreachable, {});
2032void SPIRVEmitIntrinsics::processGlobalValue(GlobalVariable &GV,
2035 static const StringSet<> ArtificialGlobals{
"llvm.global.annotations",
2036 "llvm.compiler.used"};
2046 deduceElementTypeHelper(&GV,
false);
2050 auto *InitInst =
B.CreateIntrinsic(Intrinsic::spv_init_global,
2052 InitInst->setArgOperand(1, Init);
2055 B.CreateIntrinsic(Intrinsic::spv_unref_global, GV.
getType(), &GV);
2061bool SPIRVEmitIntrinsics::insertAssignPtrTypeIntrs(Instruction *
I,
2063 bool UnknownElemTypeI8) {
2069 if (
Type *ElemTy = deduceElementType(
I, UnknownElemTypeI8)) {
2076void SPIRVEmitIntrinsics::insertAssignTypeIntrs(Instruction *
I,
2079 static StringMap<unsigned> ResTypeWellKnown = {
2080 {
"async_work_group_copy", WellKnownTypes::Event},
2081 {
"async_work_group_strided_copy", WellKnownTypes::Event},
2082 {
"__spirv_GroupAsyncCopy", WellKnownTypes::Event}};
2086 bool IsKnown =
false;
2091 std::string DemangledName =
2094 if (DemangledName.length() > 0)
2096 SPIRV::lookupBuiltinNameHelper(DemangledName, &DecorationId);
2097 auto ResIt = ResTypeWellKnown.
find(DemangledName);
2098 if (ResIt != ResTypeWellKnown.
end()) {
2101 switch (ResIt->second) {
2102 case WellKnownTypes::Event:
2109 switch (DecorationId) {
2112 case FPDecorationId::SAT:
2115 case FPDecorationId::RTE:
2117 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTE,
B);
2119 case FPDecorationId::RTZ:
2121 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTZ,
B);
2123 case FPDecorationId::RTP:
2125 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTP,
B);
2127 case FPDecorationId::RTN:
2129 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTN,
B);
2135 Type *Ty =
I->getType();
2138 Type *TypeToAssign = Ty;
2140 if (
II->getIntrinsicID() == Intrinsic::spv_const_composite ||
2141 II->getIntrinsicID() == Intrinsic::spv_undef) {
2142 auto It = AggrConstTypes.find(
II);
2143 if (It == AggrConstTypes.end())
2145 TypeToAssign = It->second;
2151 for (
const auto &
Op :
I->operands()) {
2158 Type *OpTy =
Op->getType();
2160 CallInst *AssignCI =
2165 Type *OpTy =
Op->getType();
2180 CallInst *AssignCI =
2190bool SPIRVEmitIntrinsics::shouldTryToAddMemAliasingDecoration(
2191 Instruction *Inst) {
2193 if (!STI->
canUseExtension(SPIRV::Extension::SPV_INTEL_memory_access_aliasing))
2204 case Intrinsic::spv_load:
2205 case Intrinsic::spv_store:
2212 const std::string
Prefix =
"__spirv_Atomic";
2213 const bool IsAtomic =
Name.find(Prefix) == 0;
2221void SPIRVEmitIntrinsics::insertSpirvDecorations(Instruction *
I,
2223 if (MDNode *MD =
I->getMetadata(
"spirv.Decorations")) {
2225 B.CreateIntrinsic(Intrinsic::spv_assign_decoration, {
I->getType()},
2230 auto processMemAliasingDecoration = [&](
unsigned Kind) {
2231 if (MDNode *AliasListMD =
I->getMetadata(Kind)) {
2232 if (shouldTryToAddMemAliasingDecoration(
I)) {
2233 uint32_t Dec =
Kind == LLVMContext::MD_alias_scope
2234 ? SPIRV::Decoration::AliasScopeINTEL
2235 : SPIRV::Decoration::NoAliasINTEL;
2237 I, ConstantInt::get(
B.getInt32Ty(), Dec),
2240 B.CreateIntrinsic(Intrinsic::spv_assign_aliasing_decoration,
2241 {
I->getType()}, {
Args});
2245 processMemAliasingDecoration(LLVMContext::MD_alias_scope);
2246 processMemAliasingDecoration(LLVMContext::MD_noalias);
2249 if (MDNode *MD =
I->getMetadata(LLVMContext::MD_fpmath)) {
2251 bool AllowFPMaxError =
2253 if (!AllowFPMaxError)
2257 B.CreateIntrinsic(Intrinsic::spv_assign_fpmaxerror_decoration,
2266 &FPFastMathDefaultInfoMap,
2268 auto it = FPFastMathDefaultInfoMap.
find(
F);
2269 if (it != FPFastMathDefaultInfoMap.
end())
2277 SPIRV::FPFastMathMode::None);
2279 SPIRV::FPFastMathMode::None);
2281 SPIRV::FPFastMathMode::None);
2282 return FPFastMathDefaultInfoMap[
F] = std::move(FPFastMathDefaultInfoVec);
2288 size_t BitWidth = Ty->getScalarSizeInBits();
2292 assert(Index >= 0 && Index < 3 &&
2293 "Expected FPFastMathDefaultInfo for half, float, or double");
2294 assert(FPFastMathDefaultInfoVec.
size() == 3 &&
2295 "Expected FPFastMathDefaultInfoVec to have exactly 3 elements");
2296 return FPFastMathDefaultInfoVec[Index];
2299void SPIRVEmitIntrinsics::insertConstantsForFPFastMathDefault(
Module &M) {
2301 if (!
ST->canUseExtension(SPIRV::Extension::SPV_KHR_float_controls2))
2310 auto Node =
M.getNamedMetadata(
"spirv.ExecutionMode");
2312 if (!
M.getNamedMetadata(
"opencl.enable.FP_CONTRACT")) {
2320 ConstantInt::get(Type::getInt32Ty(
M.getContext()), 0);
2323 [[maybe_unused]] GlobalVariable *GV =
2324 new GlobalVariable(M,
2325 Type::getInt32Ty(
M.getContext()),
2339 DenseMap<Function *, SPIRV::FPFastMathDefaultInfoVector>
2340 FPFastMathDefaultInfoMap;
2342 for (
unsigned i = 0; i <
Node->getNumOperands(); i++) {
2351 if (EM == SPIRV::ExecutionMode::FPFastMathDefault) {
2353 "Expected 4 operands for FPFastMathDefault");
2359 SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec =
2361 SPIRV::FPFastMathDefaultInfo &
Info =
2364 Info.FPFastMathDefault =
true;
2365 }
else if (EM == SPIRV::ExecutionMode::ContractionOff) {
2367 "Expected no operands for ContractionOff");
2371 SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec =
2373 for (SPIRV::FPFastMathDefaultInfo &
Info : FPFastMathDefaultInfoVec) {
2374 Info.ContractionOff =
true;
2376 }
else if (EM == SPIRV::ExecutionMode::SignedZeroInfNanPreserve) {
2378 "Expected 1 operand for SignedZeroInfNanPreserve");
2379 unsigned TargetWidth =
2384 SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec =
2388 assert(Index >= 0 && Index < 3 &&
2389 "Expected FPFastMathDefaultInfo for half, float, or double");
2390 assert(FPFastMathDefaultInfoVec.
size() == 3 &&
2391 "Expected FPFastMathDefaultInfoVec to have exactly 3 elements");
2392 FPFastMathDefaultInfoVec[
Index].SignedZeroInfNanPreserve =
true;
2396 std::unordered_map<unsigned, GlobalVariable *> GlobalVars;
2397 for (
auto &[Func, FPFastMathDefaultInfoVec] : FPFastMathDefaultInfoMap) {
2398 if (FPFastMathDefaultInfoVec.
empty())
2401 for (
const SPIRV::FPFastMathDefaultInfo &
Info : FPFastMathDefaultInfoVec) {
2402 assert(
Info.Ty &&
"Expected target type for FPFastMathDefaultInfo");
2405 if (Flags == SPIRV::FPFastMathMode::None && !
Info.ContractionOff &&
2406 !
Info.SignedZeroInfNanPreserve && !
Info.FPFastMathDefault)
2410 if (
Info.ContractionOff && (Flags & SPIRV::FPFastMathMode::AllowContract))
2412 "and AllowContract");
2414 if (
Info.SignedZeroInfNanPreserve &&
2416 (SPIRV::FPFastMathMode::NotNaN | SPIRV::FPFastMathMode::NotInf |
2417 SPIRV::FPFastMathMode::NSZ))) {
2418 if (
Info.FPFastMathDefault)
2420 "SignedZeroInfNanPreserve but at least one of "
2421 "NotNaN/NotInf/NSZ is enabled.");
2424 if ((Flags & SPIRV::FPFastMathMode::AllowTransform) &&
2425 !((Flags & SPIRV::FPFastMathMode::AllowReassoc) &&
2426 (Flags & SPIRV::FPFastMathMode::AllowContract))) {
2428 "AllowTransform requires AllowReassoc and "
2429 "AllowContract to be set.");
2432 auto it = GlobalVars.find(Flags);
2433 GlobalVariable *GV =
nullptr;
2434 if (it != GlobalVars.end()) {
2440 ConstantInt::get(Type::getInt32Ty(
M.getContext()), Flags);
2443 GV =
new GlobalVariable(M,
2444 Type::getInt32Ty(
M.getContext()),
2449 GlobalVars[
Flags] = GV;
2455void SPIRVEmitIntrinsics::processInstrAfterVisit(Instruction *
I,
2458 bool IsConstComposite =
2459 II &&
II->getIntrinsicID() == Intrinsic::spv_const_composite;
2460 if (IsConstComposite && TrackConstants) {
2462 auto t = AggrConsts.find(
I);
2463 assert(t != AggrConsts.end());
2466 {
II->getType(),
II->getType()}, t->second,
I, {},
B);
2468 NewOp->setArgOperand(0,
I);
2471 for (
const auto &
Op :
I->operands()) {
2475 unsigned OpNo =
Op.getOperandNo();
2476 if (
II && ((
II->getIntrinsicID() == Intrinsic::spv_gep && OpNo == 0) ||
2477 (
II->paramHasAttr(OpNo, Attribute::ImmArg))))
2481 IsPhi ?
B.SetInsertPointPastAllocas(
I->getParent()->getParent())
2482 :
B.SetInsertPoint(
I);
2485 Type *OpTy =
Op->getType();
2493 {OpTy, OpTyVal->
getType()},
Op, OpTyVal, {},
B);
2495 if (!IsConstComposite &&
isPointerTy(OpTy) && OpElemTy !=
nullptr &&
2496 OpElemTy != IntegerType::getInt8Ty(
I->getContext())) {
2498 SmallVector<Value *, 2>
Args = {
2501 CallInst *PtrCasted =
2502 B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
2507 I->setOperand(OpNo, NewOp);
2509 if (Named.insert(
I).second)
2513Type *SPIRVEmitIntrinsics::deduceFunParamElementType(Function *
F,
2515 std::unordered_set<Function *> FVisited;
2516 return deduceFunParamElementType(
F,
OpIdx, FVisited);
2519Type *SPIRVEmitIntrinsics::deduceFunParamElementType(
2520 Function *
F,
unsigned OpIdx, std::unordered_set<Function *> &FVisited) {
2522 if (!FVisited.insert(
F).second)
2525 std::unordered_set<Value *> Visited;
2528 for (User *U :
F->users()) {
2540 if (
Type *Ty = deduceElementTypeHelper(OpArg, Visited,
false))
2543 for (User *OpU : OpArg->
users()) {
2545 if (!Inst || Inst == CI)
2548 if (
Type *Ty = deduceElementTypeHelper(Inst, Visited,
false))
2555 if (FVisited.find(OuterF) != FVisited.end())
2557 for (
unsigned i = 0; i < OuterF->
arg_size(); ++i) {
2558 if (OuterF->
getArg(i) == OpArg) {
2559 Lookup.push_back(std::make_pair(OuterF, i));
2566 for (
auto &Pair :
Lookup) {
2567 if (
Type *Ty = deduceFunParamElementType(Pair.first, Pair.second, FVisited))
2574void SPIRVEmitIntrinsics::processParamTypesByFunHeader(Function *
F,
2576 B.SetInsertPointPastAllocas(
F);
2590 for (User *U :
F->users()) {
2606 for (User *U : Arg->
users()) {
2610 CI->
getParent()->getParent() == CurrF) {
2612 deduceOperandElementTypeFunctionPointer(CI,
Ops, ElemTy,
false);
2623void SPIRVEmitIntrinsics::processParamTypes(Function *
F,
IRBuilder<> &
B) {
2624 B.SetInsertPointPastAllocas(
F);
2630 if (!ElemTy && (ElemTy = deduceFunParamElementType(
F,
OpIdx)) !=
nullptr) {
2632 DenseSet<std::pair<Value *, Value *>> VisitedSubst;
2634 propagateElemType(Arg, IntegerType::getInt8Ty(
F->getContext()),
2646 bool IsNewFTy =
false;
2662bool SPIRVEmitIntrinsics::processFunctionPointers(
Module &M) {
2665 if (
F.isIntrinsic())
2667 if (
F.isDeclaration()) {
2668 for (User *U :
F.users()) {
2681 for (User *U :
F.users()) {
2683 if (!
II ||
II->arg_size() != 3 ||
II->getOperand(0) != &
F)
2685 if (
II->getIntrinsicID() == Intrinsic::spv_assign_ptr_type ||
2686 II->getIntrinsicID() == Intrinsic::spv_ptrcast) {
2693 if (Worklist.
empty())
2699 "cannot allocate a name for the internal service function");
2700 LLVMContext &Ctx =
M.getContext();
2708 for (Function *
F : Worklist) {
2710 for (
const auto &Arg :
F->args())
2712 IRB.CreateCall(
F, Args);
2714 IRB.CreateRetVoid();
2720void SPIRVEmitIntrinsics::applyDemangledPtrArgTypes(
IRBuilder<> &
B) {
2721 DenseMap<Function *, CallInst *> Ptrcasts;
2722 for (
auto It : FDeclPtrTys) {
2724 for (
auto *U :
F->users()) {
2729 for (
auto [Idx, ElemTy] : It.second) {
2737 B.SetInsertPointPastAllocas(Arg->
getParent());
2742 replaceUsesOfWithSpvPtrcast(Param,
normalizeType(ElemTy), CI,
2751 .getFirstNonPHIOrDbgOrAlloca());
2772SPIRVEmitIntrinsics::simplifyZeroLengthArrayGepInst(GetElementPtrInst *
GEP) {
2779 Type *SrcTy =
GEP->getSourceElementType();
2780 SmallVector<Value *, 8> Indices(
GEP->indices());
2782 if (ArrTy && ArrTy->getNumElements() == 0 &&
2785 Indices.erase(Indices.begin());
2786 SrcTy = ArrTy->getElementType();
2787 Value *NewGEP = Builder.CreateGEP(SrcTy,
GEP->getPointerOperand(), Indices,
2788 "",
GEP->getNoWrapFlags());
2795bool SPIRVEmitIntrinsics::runOnFunction(Function &Func) {
2796 if (
Func.isDeclaration())
2800 GR =
ST.getSPIRVGlobalRegistry();
2804 ST.canUseExtension(SPIRV::Extension::SPV_INTEL_function_pointers);
2809 AggrConstTypes.clear();
2814 SmallPtrSet<Instruction *, 4> DeadInsts;
2817 if (!
Ref || GR->findDeducedElementType(
Ref))
2820 GetElementPtrInst *NewGEP = simplifyZeroLengthArrayGepInst(
Ref);
2822 Ref->replaceAllUsesWith(NewGEP);
2826 if (
Type *GepTy = getGEPType(
Ref))
2830 for (
auto *
I : DeadInsts) {
2831 assert(
I->use_empty() &&
"Dead instruction should not have any uses left");
2832 I->eraseFromParent();
2835 processParamTypesByFunHeader(CurrF,
B);
2844 Type *ElTy =
SI->getValueOperand()->getType();
2846 AggrStores.insert(&
I);
2849 B.SetInsertPoint(&
Func.getEntryBlock(),
Func.getEntryBlock().begin());
2850 for (
auto &GV :
Func.getParent()->globals())
2851 processGlobalValue(GV,
B);
2853 preprocessUndefs(
B);
2854 preprocessCompositeConstants(
B);
2858 applyDemangledPtrArgTypes(
B);
2861 for (
auto &
I : Worklist) {
2863 if (isConvergenceIntrinsic(
I))
2866 bool Postpone = insertAssignPtrTypeIntrs(
I,
B,
false);
2868 insertAssignTypeIntrs(
I,
B);
2869 insertPtrCastOrAssignTypeInstr(
I,
B);
2873 if (Postpone && !GR->findAssignPtrTypeInstr(
I))
2874 insertAssignPtrTypeIntrs(
I,
B,
true);
2877 useRoundingMode(FPI,
B);
2882 SmallPtrSet<Instruction *, 4> IncompleteRets;
2884 deduceOperandElementType(&
I, &IncompleteRets);
2888 for (BasicBlock &BB : Func)
2889 for (PHINode &Phi : BB.
phis())
2891 deduceOperandElementType(&Phi,
nullptr);
2893 for (
auto *
I : Worklist) {
2894 TrackConstants =
true;
2904 if (isConvergenceIntrinsic(
I))
2908 processInstrAfterVisit(
I,
B);
2915bool SPIRVEmitIntrinsics::postprocessTypes(
Module &M) {
2916 if (!GR || TodoTypeSz == 0)
2919 unsigned SzTodo = TodoTypeSz;
2920 DenseMap<Value *, SmallPtrSet<Value *, 4>> ToProcess;
2925 CallInst *AssignCI = GR->findAssignPtrTypeInstr(
Op);
2926 Type *KnownTy = GR->findDeducedElementType(
Op);
2927 if (!KnownTy || !AssignCI)
2933 std::unordered_set<Value *> Visited;
2934 if (
Type *ElemTy = deduceElementTypeHelper(
Op, Visited,
false,
true)) {
2935 if (ElemTy != KnownTy) {
2936 DenseSet<std::pair<Value *, Value *>> VisitedSubst;
2937 propagateElemType(CI, ElemTy, VisitedSubst);
2944 if (
Op->hasUseList()) {
2945 for (User *U :
Op->users()) {
2952 if (TodoTypeSz == 0)
2957 SmallPtrSet<Instruction *, 4> IncompleteRets;
2959 auto It = ToProcess.
find(&
I);
2960 if (It == ToProcess.
end())
2962 It->second.remove_if([
this](
Value *V) {
return !isTodoType(V); });
2963 if (It->second.size() == 0)
2965 deduceOperandElementType(&
I, &IncompleteRets, &It->second,
true);
2966 if (TodoTypeSz == 0)
2971 return SzTodo > TodoTypeSz;
2975void SPIRVEmitIntrinsics::parseFunDeclarations(
Module &M) {
2977 if (!
F.isDeclaration() ||
F.isIntrinsic())
2981 if (DemangledName.empty())
2985 auto [Grp, Opcode, ExtNo] = SPIRV::mapBuiltinToOpcode(
2986 DemangledName,
ST.getPreferredInstructionSet());
2987 if (Opcode != SPIRV::OpGroupAsyncCopy)
2990 SmallVector<unsigned> Idxs;
2999 LLVMContext &Ctx =
F.getContext();
3001 SPIRV::parseBuiltinTypeStr(TypeStrs, DemangledName, Ctx);
3002 if (!TypeStrs.
size())
3005 for (
unsigned Idx : Idxs) {
3006 if (Idx >= TypeStrs.
size())
3009 SPIRV::parseBuiltinCallArgumentType(TypeStrs[Idx].trim(), Ctx))
3012 FDeclPtrTys[&
F].push_back(std::make_pair(Idx, ElemTy));
3017bool SPIRVEmitIntrinsics::runOnModule(
Module &M) {
3020 parseFunDeclarations(M);
3021 insertConstantsForFPFastMathDefault(M);
3031 if (!
F.isDeclaration() && !
F.isIntrinsic()) {
3033 processParamTypes(&
F,
B);
3037 CanTodoType =
false;
3038 Changed |= postprocessTypes(M);
3041 Changed |= processFunctionPointers(M);
3047 return new SPIRVEmitIntrinsics(TM);
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
Expand Atomic instructions
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
Analysis containing CSE Info
static void replaceAllUsesWith(Value *Old, Value *New, SmallPtrSet< BasicBlock *, 32 > &FreshBBs, bool IsHuge)
Replace all old uses with new ones, and push the updated BBs into FreshBBs.
This file defines the DenseSet and SmallDenseSet classes.
static bool runOnFunction(Function &F, bool PostInlining)
iv Induction Variable Users
const AbstractManglingParser< Derived, Alloc >::OperatorInfo AbstractManglingParser< Derived, Alloc >::Ops[]
Machine Check Debug Module
MachineInstr unsigned OpIdx
uint64_t IntrinsicInst * II
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
static unsigned getNumElements(Type *Ty)
static bool isMemInstrToReplace(Instruction *I)
static bool isAggrConstForceInt32(const Value *V)
static SPIRV::FPFastMathDefaultInfoVector & getOrCreateFPFastMathDefaultInfoVec(const Module &M, DenseMap< Function *, SPIRV::FPFastMathDefaultInfoVector > &FPFastMathDefaultInfoMap, Function *F)
static Type * getAtomicElemTy(SPIRVGlobalRegistry *GR, Instruction *I, Value *PointerOperand)
static void reportFatalOnTokenType(const Instruction *I)
static void setInsertPointAfterDef(IRBuilder<> &B, Instruction *I)
static void emitAssignName(Instruction *I, IRBuilder<> &B)
static Type * getPointeeTypeByCallInst(StringRef DemangledName, Function *CalledF, unsigned OpIdx)
static void createRoundingModeDecoration(Instruction *I, unsigned RoundingModeDeco, IRBuilder<> &B)
static void createDecorationIntrinsic(Instruction *I, MDNode *Node, IRBuilder<> &B)
static SPIRV::FPFastMathDefaultInfo & getFPFastMathDefaultInfo(SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec, const Type *Ty)
static bool IsKernelArgInt8(Function *F, StoreInst *SI)
static void addSaturatedDecorationToIntrinsic(Instruction *I, IRBuilder<> &B)
static void setInsertPointSkippingPhis(IRBuilder<> &B, Instruction *I)
static FunctionType * getFunctionPointerElemType(Function *F, SPIRVGlobalRegistry *GR)
static void createSaturatedConversionDecoration(Instruction *I, IRBuilder<> &B)
static Type * restoreMutatedType(SPIRVGlobalRegistry *GR, Instruction *I, Type *Ty)
static bool requireAssignType(Instruction *I)
void visit(MachineFunction &MF, MachineBasicBlock &Start, std::function< void(MachineBasicBlock *)> op)
static void insertSpirvDecorations(MachineFunction &MF, SPIRVGlobalRegistry *GR, MachineIRBuilder MIB)
#define SPIRV_BACKEND_SERVICE_FUN_NAME
StringSet - A set-like wrapper for the StringMap.
static SymbolRef::Type getType(const Symbol *Sym)
static std::optional< unsigned > getOpcode(ArrayRef< VPValue * > Values)
Returns the opcode of Values or ~0 if they do not all agree.
static int Lookup(ArrayRef< TableEntry > Table, unsigned Opcode)
This class represents an incoming formal argument to a Function.
const Function * getParent() const
static unsigned getPointerOperandIndex()
static unsigned getPointerOperandIndex()
iterator_range< const_phi_iterator > phis() const
Returns a range that iterates over the phis in the basic block.
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
LLVM_ABI LLVMContext & getContext() const
Get the context in which this basic block lives.
static LLVM_ABI BlockAddress * get(Function *F, BasicBlock *BB)
Return a BlockAddress for the specified function and basic block.
bool isInlineAsm() const
Check if this call is an inline asm statement.
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
LLVM_ABI bool isIndirectCall() const
Return true if the callsite is an indirect call.
Value * getCalledOperand() const
Value * getArgOperand(unsigned i) const
FunctionType * getFunctionType() const
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
unsigned arg_size() const
This class represents a function call, abstracting a target machine's calling convention.
uint64_t getZExtValue() const
Return the constant as a 64-bit unsigned integer value after it has been zero extended as appropriate...
static LLVM_ABI Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
LLVM_ABI std::optional< RoundingMode > getRoundingMode() const
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
static LLVM_ABI FunctionType * get(Type *Result, ArrayRef< Type * > Params, bool isVarArg)
This static method is the primary way of constructing a FunctionType.
void addFnAttr(Attribute::AttrKind Kind)
Add function attributes to this function.
static Function * Create(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, const Twine &N="", Module *M=nullptr)
const DataLayout & getDataLayout() const
Get the data layout of the module this function belongs to.
Intrinsic::ID getIntrinsicID() const LLVM_READONLY
getIntrinsicID - This method returns the ID number of the specified function, or Intrinsic::not_intri...
bool isIntrinsic() const
isIntrinsic - Returns true if the function's name starts with "llvm.".
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
Type * getReturnType() const
Returns the type of the ret val.
Argument * getArg(unsigned i) const
static LLVM_ABI Type * getTypeAtIndex(Type *Ty, Value *Idx)
Return the type of the element at the given index of an indexable type.
static unsigned getPointerOperandIndex()
PointerType * getType() const
Global values are always pointers.
@ PrivateLinkage
Like Internal, but omit from symbol table.
@ InternalLinkage
Rename collisions when linking (static functions).
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
LLVM_ABI void addDestination(BasicBlock *Dest)
Add a destination.
Base class for instruction visitors.
LLVM_ABI InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
Instruction * user_back()
Specialize the methods defined in Value, as we know that an instruction can only be used by other ins...
LLVM_ABI const Function * getFunction() const
Return the function this instruction belongs to.
LLVM_ABI void copyMetadata(const Instruction &SrcInst, ArrayRef< unsigned > WL=ArrayRef< unsigned >())
Copy metadata from SrcInst to this instruction.
This is an important class for using LLVM in a threaded context.
static unsigned getPointerOperandIndex()
const MDOperand & getOperand(unsigned I) const
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
unsigned getNumOperands() const
Return number of MDNode operands.
static LLVM_ABI MDString * get(LLVMContext &Context, StringRef Str)
Flags
Flags values. These may be or'd together.
ModulePass class - This class is used to implement unstructured interprocedural optimizations and ana...
A Module instance is used to store all the information related to an LLVM module.
void addAssignPtrTypeInstr(Value *Val, CallInst *AssignPtrTyCI)
void buildAssignPtr(IRBuilder<> &B, Type *ElemTy, Value *Arg)
Type * findDeducedCompositeType(const Value *Val)
void replaceAllUsesWith(Value *Old, Value *New, bool DeleteOld=true)
void addDeducedElementType(Value *Val, Type *Ty)
void addReturnType(const Function *ArgF, TypedPointerType *DerivedTy)
Type * findMutated(const Value *Val)
void addDeducedCompositeType(Value *Val, Type *Ty)
void buildAssignType(IRBuilder<> &B, Type *Ty, Value *Arg)
Type * findDeducedElementType(const Value *Val)
void updateAssignType(CallInst *AssignCI, Value *Arg, Value *OfType)
CallInst * findAssignPtrTypeInstr(const Value *Val)
const SPIRVTargetLowering * getTargetLowering() const override
bool isLogicalSPIRV() const
bool canUseExtension(SPIRV::Extension::Extension E) const
const SPIRVSubtarget * getSubtargetImpl() const
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
bool contains(ConstPtrType Ptr) const
reference emplace_back(ArgTypes &&... Args)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
An instruction for storing to memory.
static unsigned getPointerOperandIndex()
iterator find(StringRef Key)
StringRef - Represent a constant reference to a string, i.e.
bool starts_with(StringRef Prefix) const
Check if this string starts with the given Prefix.
bool contains(StringRef key) const
Check if the set contains the given key.
static LLVM_ABI StructType * create(LLVMContext &Context, StringRef Name)
This creates an identified struct.
static LLVM_ABI TargetExtType * get(LLVMContext &Context, StringRef Name, ArrayRef< Type * > Types={}, ArrayRef< unsigned > Ints={})
Return a target extension type having the specified name and optional type and integer parameters.
Type * getTypeParameter(unsigned i) const
const STC & getSubtarget(const Function &F) const
This method returns a pointer to the specified type of TargetSubtargetInfo.
The instances of the Type class are immutable: once they are created, they are never changed.
bool isVectorTy() const
True if this is an instance of VectorType.
bool isArrayTy() const
True if this is an instance of ArrayType.
static LLVM_ABI IntegerType * getInt32Ty(LLVMContext &C)
bool isPointerTy() const
True if this is an instance of PointerType.
Type * getArrayElementType() const
LLVM_ABI StringRef getTargetExtName() const
static LLVM_ABI IntegerType * getInt8Ty(LLVMContext &C)
bool isStructTy() const
True if this is an instance of StructType.
bool isTargetExtTy() const
Return true if this is a target extension type.
bool isAggregateType() const
Return true if the type is an aggregate type.
static LLVM_ABI Type * getDoubleTy(LLVMContext &C)
static LLVM_ABI Type * getFloatTy(LLVMContext &C)
static LLVM_ABI Type * getHalfTy(LLVMContext &C)
bool isVoidTy() const
Return true if this is 'void'.
static LLVM_ABI bool isValidElementType(Type *ElemTy)
Return true if the specified type is valid as a element type.
static LLVM_ABI TypedPointerType * get(Type *ElementType, unsigned AddressSpace)
This constructs a pointer to an object of the specified type in a numbered address space.
static LLVM_ABI UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
void setOperand(unsigned i, Value *Val)
LLVM_ABI bool replaceUsesOfWith(Value *From, Value *To)
Replace uses of one Value with another.
Value * getOperand(unsigned i) const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
LLVM_ABI void setName(const Twine &Name)
Change the name of the value.
iterator_range< user_iterator > users()
LLVM_ABI StringRef getName() const
Return a constant reference to the value's name.
const ParentTy * getParent() const
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr char Args[]
Key for Kernel::Metadata::mArgs.
@ SPIR_KERNEL
Used for SPIR kernel functions.
@ BasicBlock
Various leaf nodes.
bool match(Val *V, const Pattern &P)
is_zero m_Zero()
Match any null constant or a vector with all elements equal to 0.
DenseSetImpl< ValueT, DenseMap< ValueT, DenseSetEmpty, ValueInfoT, DenseSetPair< ValueT > >, ValueInfoT > DenseSet
ElementType
The element type of an SRV or UAV resource.
@ User
could "use" a pointer
NodeAddr< PhiNode * > Phi
NodeAddr< NodeBase * > Node
NodeAddr< FuncNode * > Func
friend class Instruction
Iterator for Instructions in a `BasicBlock.
This is an optimization pass for GlobalISel generic memory operations.
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
bool getVacantFunctionName(Module &M, std::string &Name)
FunctionAddr VTableAddr Value
bool isTypedPointerWrapper(const TargetExtType *ExtTy)
ModulePass * createSPIRVEmitIntrinsicsPass(SPIRVTargetMachine *TM)
unsigned getPointerAddressSpace(const Type *T)
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
FunctionAddr VTableAddr uintptr_t uintptr_t Int32Ty
CallInst * buildIntrWithMD(Intrinsic::ID IntrID, ArrayRef< Type * > Types, Value *Arg, Value *Arg2, ArrayRef< Constant * > Imms, IRBuilder<> &B)
void append_range(Container &C, Range &&R)
Wrapper function to append range R to container C.
bool isNestedPointer(const Type *Ty)
MetadataAsValue * buildMD(Value *Arg)
std::string getOclOrSpirvBuiltinDemangledName(StringRef Name)
auto reverse(ContainerTy &&C)
Type * getTypedPointerWrapper(Type *ElemTy, unsigned AS)
bool isPointerTy(const Type *T)
LLVM_ABI void report_fatal_error(Error Err, bool gen_crash_diag=true)
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
SPIRV::Scope::Scope getMemScope(LLVMContext &Ctx, SyncScope::ID Id)
@ Ref
The access may reference the value stored in memory.
IRBuilder(LLVMContext &, FolderTy, InserterTy, MDNode *, ArrayRef< OperandBundleDef >) -> IRBuilder< FolderTy, InserterTy >
DWARFExpression::Operation Op
Type * getPointeeTypeByAttr(Argument *Arg)
bool hasPointeeTypeAttr(Argument *Arg)
constexpr unsigned BitWidth
bool isEquivalentTypes(Type *Ty1, Type *Ty2)
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
iterator_range< pointer_iterator< WrappedIteratorT > > make_pointer_range(RangeT &&Range)
bool hasInitializer(const GlobalVariable *GV)
Type * normalizeType(Type *Ty)
bool isSpvIntrinsic(const MachineInstr &MI, Intrinsic::ID IntrinsicID)
Type * getPointeeType(const Type *Ty)
PoisonValue * getNormalizedPoisonValue(Type *Ty)
bool isUntypedPointerTy(const Type *T)
SPIRV::MemorySemantics::MemorySemantics getMemSemantics(AtomicOrdering Ord)
static size_t computeFPFastMathDefaultInfoVecIndex(size_t BitWidth)