23#include "llvm/IR/IntrinsicsSPIRV.h"
29#include <unordered_set>
52#define GET_BuiltinGroup_DECL
53#include "SPIRVGenTables.inc"
58class SPIRVEmitIntrinsics
60 public InstVisitor<SPIRVEmitIntrinsics, Instruction *> {
61 SPIRVTargetMachine *TM =
nullptr;
62 SPIRVGlobalRegistry *GR =
nullptr;
64 bool TrackConstants =
true;
65 bool HaveFunPtrs =
false;
66 DenseMap<Instruction *, Constant *> AggrConsts;
67 DenseMap<Instruction *, Type *> AggrConstTypes;
68 DenseSet<Instruction *> AggrStores;
69 std::unordered_set<Value *> Named;
72 DenseMap<Function *, SmallVector<std::pair<unsigned, Type *>>> FDeclPtrTys;
75 bool CanTodoType =
true;
76 unsigned TodoTypeSz = 0;
77 DenseMap<Value *, bool> TodoType;
78 void insertTodoType(
Value *
Op) {
81 auto It = TodoType.try_emplace(
Op,
true);
87 auto It = TodoType.find(
Op);
88 if (It != TodoType.end() && It->second) {
96 auto It = TodoType.find(
Op);
97 return It != TodoType.end() && It->second;
101 std::unordered_set<Instruction *> TypeValidated;
104 enum WellKnownTypes { Event };
107 Type *deduceElementType(
Value *
I,
bool UnknownElemTypeI8);
108 Type *deduceElementTypeHelper(
Value *
I,
bool UnknownElemTypeI8);
109 Type *deduceElementTypeHelper(
Value *
I, std::unordered_set<Value *> &Visited,
110 bool UnknownElemTypeI8,
111 bool IgnoreKnownType =
false);
112 Type *deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
113 bool UnknownElemTypeI8);
114 Type *deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
115 std::unordered_set<Value *> &Visited,
116 bool UnknownElemTypeI8);
118 std::unordered_set<Value *> &Visited,
119 bool UnknownElemTypeI8);
121 bool UnknownElemTypeI8);
124 Type *deduceNestedTypeHelper(
User *
U,
bool UnknownElemTypeI8);
126 std::unordered_set<Value *> &Visited,
127 bool UnknownElemTypeI8);
131 SmallPtrSet<Instruction *, 4> *IncompleteRets,
132 const SmallPtrSet<Value *, 4> *AskOps =
nullptr,
133 bool IsPostprocessing =
false);
138 Type *reconstructType(
Value *
Op,
bool UnknownElemTypeI8,
139 bool IsPostprocessing);
144 bool UnknownElemTypeI8);
146 void insertAssignPtrTypeTargetExt(TargetExtType *AssignedType,
Value *
V,
149 Type *ExpectedElementType,
150 unsigned OperandToReplace,
153 bool shouldTryToAddMemAliasingDecoration(
Instruction *Inst);
155 void processGlobalValue(GlobalVariable &GV,
IRBuilder<> &
B);
160 std::unordered_set<Function *> &FVisited);
162 bool deduceOperandElementTypeCalledFunction(
164 Type *&KnownElemTy,
bool &Incomplete);
165 void deduceOperandElementTypeFunctionPointer(
167 Type *&KnownElemTy,
bool IsPostprocessing);
168 bool deduceOperandElementTypeFunctionRet(
169 Instruction *
I, SmallPtrSet<Instruction *, 4> *IncompleteRets,
170 const SmallPtrSet<Value *, 4> *AskOps,
bool IsPostprocessing,
175 DenseMap<Function *, CallInst *> Ptrcasts);
177 DenseSet<std::pair<Value *, Value *>> &VisitedSubst);
180 DenseSet<std::pair<Value *, Value *>> &VisitedSubst);
181 void propagateElemTypeRec(
Value *
Op,
Type *PtrElemTy,
Type *CastElemTy,
182 DenseSet<std::pair<Value *, Value *>> &VisitedSubst,
183 std::unordered_set<Value *> &Visited,
184 DenseMap<Function *, CallInst *> Ptrcasts);
192 GetElementPtrInst *simplifyZeroLengthArrayGepInst(GetElementPtrInst *
GEP);
195 bool postprocessTypes(
Module &
M);
196 bool processFunctionPointers(
Module &
M);
197 void parseFunDeclarations(
Module &
M);
199 void useRoundingMode(ConstrainedFPIntrinsic *FPI,
IRBuilder<> &
B);
215 bool walkLogicalAccessChain(
216 GetElementPtrInst &
GEP,
217 const std::function<
void(
Type *PointedType, uint64_t
Index)>
226 Type *getGEPType(GetElementPtrInst *
GEP);
233 Type *getGEPTypeLogical(GetElementPtrInst *
GEP);
235 Instruction *buildLogicalAccessChainFromGEP(GetElementPtrInst &
GEP);
239 SPIRVEmitIntrinsics(SPIRVTargetMachine *TM =
nullptr)
240 : ModulePass(ID), TM(TM) {}
243 Instruction *visitGetElementPtrInst(GetElementPtrInst &
I);
245 Instruction *visitInsertElementInst(InsertElementInst &
I);
246 Instruction *visitExtractElementInst(ExtractElementInst &
I);
248 Instruction *visitExtractValueInst(ExtractValueInst &
I);
252 Instruction *visitAtomicCmpXchgInst(AtomicCmpXchgInst &
I);
256 StringRef getPassName()
const override {
return "SPIRV emit intrinsics"; }
258 bool runOnModule(
Module &
M)
override;
260 void getAnalysisUsage(AnalysisUsage &AU)
const override {
261 ModulePass::getAnalysisUsage(AU);
270 return II->getIntrinsicID() == Intrinsic::experimental_convergence_entry ||
271 II->getIntrinsicID() == Intrinsic::experimental_convergence_loop ||
272 II->getIntrinsicID() == Intrinsic::experimental_convergence_anchor;
275bool expectIgnoredInIRTranslation(
const Instruction *
I) {
279 switch (
II->getIntrinsicID()) {
280 case Intrinsic::invariant_start:
281 case Intrinsic::spv_resource_handlefrombinding:
282 case Intrinsic::spv_resource_getpointer:
292 if (
II->getIntrinsicID() == Intrinsic::spv_ptrcast) {
293 Value *V =
II->getArgOperand(0);
294 return getPointerRoot(V);
302char SPIRVEmitIntrinsics::ID = 0;
325 B.SetInsertPoint(
I->getParent()->getFirstNonPHIOrDbgOrAlloca());
331 B.SetCurrentDebugLocation(
I->getDebugLoc());
332 if (
I->getType()->isVoidTy())
333 B.SetInsertPoint(
I->getNextNode());
335 B.SetInsertPoint(*
I->getInsertionPointAfterDef());
340 switch (Intr->getIntrinsicID()) {
341 case Intrinsic::invariant_start:
342 case Intrinsic::invariant_end:
350 if (
I->getType()->isTokenTy())
352 "does not support token type",
357 if (!
I->hasName() ||
I->getType()->isAggregateType() ||
358 expectIgnoredInIRTranslation(
I))
363 std::vector<Value *> Args = {
366 B.CreateIntrinsic(Intrinsic::spv_assign_name, {
I->getType()}, Args);
369void SPIRVEmitIntrinsics::replaceAllUsesWith(
Value *Src,
Value *Dest,
373 if (isTodoType(Src)) {
376 insertTodoType(Dest);
380void SPIRVEmitIntrinsics::replaceAllUsesWithAndErase(
IRBuilder<> &
B,
385 std::string
Name = Src->hasName() ? Src->getName().str() :
"";
386 Src->eraseFromParent();
389 if (Named.insert(Dest).second)
414Type *SPIRVEmitIntrinsics::reconstructType(
Value *
Op,
bool UnknownElemTypeI8,
415 bool IsPostprocessing) {
430 if (UnknownElemTypeI8) {
431 if (!IsPostprocessing)
439CallInst *SPIRVEmitIntrinsics::buildSpvPtrcast(Function *
F,
Value *
Op,
447 B.SetInsertPointPastAllocas(OpA->getParent());
450 B.SetInsertPoint(
F->getEntryBlock().getFirstNonPHIOrDbgOrAlloca());
452 Type *OpTy =
Op->getType();
456 CallInst *PtrCasted =
457 B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
462void SPIRVEmitIntrinsics::replaceUsesOfWithSpvPtrcast(
464 DenseMap<Function *, CallInst *> Ptrcasts) {
466 CallInst *PtrCastedI =
nullptr;
467 auto It = Ptrcasts.
find(
F);
468 if (It == Ptrcasts.
end()) {
469 PtrCastedI = buildSpvPtrcast(
F,
Op, ElemTy);
470 Ptrcasts[
F] = PtrCastedI;
472 PtrCastedI = It->second;
474 I->replaceUsesOfWith(
Op, PtrCastedI);
477void SPIRVEmitIntrinsics::propagateElemType(
479 DenseSet<std::pair<Value *, Value *>> &VisitedSubst) {
480 DenseMap<Function *, CallInst *> Ptrcasts;
482 for (
auto *U :
Users) {
485 if (!VisitedSubst.insert(std::make_pair(U,
Op)).second)
491 TypeValidated.find(UI) != TypeValidated.end())
492 replaceUsesOfWithSpvPtrcast(
Op, ElemTy, UI, Ptrcasts);
496void SPIRVEmitIntrinsics::propagateElemTypeRec(
498 DenseSet<std::pair<Value *, Value *>> &VisitedSubst) {
499 std::unordered_set<Value *> Visited;
500 DenseMap<Function *, CallInst *> Ptrcasts;
501 propagateElemTypeRec(
Op, PtrElemTy, CastElemTy, VisitedSubst, Visited,
502 std::move(Ptrcasts));
505void SPIRVEmitIntrinsics::propagateElemTypeRec(
507 DenseSet<std::pair<Value *, Value *>> &VisitedSubst,
508 std::unordered_set<Value *> &Visited,
509 DenseMap<Function *, CallInst *> Ptrcasts) {
510 if (!Visited.insert(
Op).second)
513 for (
auto *U :
Users) {
516 if (!VisitedSubst.insert(std::make_pair(U,
Op)).second)
522 TypeValidated.find(UI) != TypeValidated.end())
523 replaceUsesOfWithSpvPtrcast(
Op, CastElemTy, UI, Ptrcasts);
531SPIRVEmitIntrinsics::deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
532 bool UnknownElemTypeI8) {
533 std::unordered_set<Value *> Visited;
534 return deduceElementTypeByValueDeep(ValueTy, Operand, Visited,
538Type *SPIRVEmitIntrinsics::deduceElementTypeByValueDeep(
539 Type *ValueTy,
Value *Operand, std::unordered_set<Value *> &Visited,
540 bool UnknownElemTypeI8) {
545 deduceElementTypeHelper(Operand, Visited, UnknownElemTypeI8))
556Type *SPIRVEmitIntrinsics::deduceElementTypeByUsersDeep(
557 Value *
Op, std::unordered_set<Value *> &Visited,
bool UnknownElemTypeI8) {
569 for (User *OpU :
Op->users()) {
571 if (
Type *Ty = deduceElementTypeHelper(Inst, Visited, UnknownElemTypeI8))
584 if ((DemangledName.
starts_with(
"__spirv_ocl_printf(") ||
593Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(
Value *
I,
594 bool UnknownElemTypeI8) {
595 std::unordered_set<Value *> Visited;
596 return deduceElementTypeHelper(
I, Visited, UnknownElemTypeI8);
599void SPIRVEmitIntrinsics::maybeAssignPtrType(
Type *&Ty,
Value *
Op,
Type *RefTy,
600 bool UnknownElemTypeI8) {
602 if (!UnknownElemTypeI8)
609bool SPIRVEmitIntrinsics::walkLogicalAccessChain(
610 GetElementPtrInst &
GEP,
611 const std::function<
void(
Type *, uint64_t)> &OnLiteralIndexing,
612 const std::function<
void(
Type *,
Value *)> &OnDynamicIndexing) {
620 Value *Src = getPointerRoot(
GEP.getPointerOperand());
621 Type *CurType = deduceElementType(Src,
true);
630 OnDynamicIndexing(AT->getElementType(), Operand);
631 return AT ==
nullptr;
639 uint32_t EltTypeSize =
DL.getTypeSizeInBits(AT->getElementType()) / 8;
643 CurType = AT->getElementType();
644 OnLiteralIndexing(CurType, Index);
646 uint32_t StructSize =
DL.getTypeSizeInBits(ST) / 8;
649 const auto &STL =
DL.getStructLayout(ST);
650 unsigned Element = STL->getElementContainingOffset(
Offset);
651 Offset -= STL->getElementOffset(Element);
652 CurType =
ST->getElementType(Element);
653 OnLiteralIndexing(CurType, Element);
665SPIRVEmitIntrinsics::buildLogicalAccessChainFromGEP(GetElementPtrInst &
GEP) {
668 B.SetInsertPoint(&
GEP);
670 std::vector<Value *> Indices;
671 Indices.push_back(ConstantInt::get(
672 IntegerType::getInt32Ty(CurrF->
getContext()), 0,
false));
673 walkLogicalAccessChain(
675 [&Indices, &
B](
Type *EltType, uint64_t Index) {
677 ConstantInt::get(
B.getInt64Ty(), Index,
false));
680 uint32_t EltTypeSize =
DL.getTypeSizeInBits(EltType) / 8;
682 Offset, ConstantInt::get(
Offset->getType(), EltTypeSize,
684 Indices.push_back(Index);
689 Args.push_back(
B.getInt1(
GEP.isInBounds()));
690 Args.push_back(
GEP.getOperand(0));
692 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_gep, {
Types}, {
Args});
693 replaceAllUsesWithAndErase(
B, &
GEP, NewI);
697Type *SPIRVEmitIntrinsics::getGEPTypeLogical(GetElementPtrInst *
GEP) {
699 Type *CurType =
GEP->getResultElementType();
701 bool Interrupted = walkLogicalAccessChain(
702 *
GEP, [&CurType](
Type *EltType, uint64_t Index) { CurType = EltType; },
705 return Interrupted ?
GEP->getResultElementType() : CurType;
708Type *SPIRVEmitIntrinsics::getGEPType(GetElementPtrInst *
Ref) {
709 if (
Ref->getSourceElementType() ==
710 IntegerType::getInt8Ty(CurrF->
getContext()) &&
712 return getGEPTypeLogical(
Ref);
719 Ty =
Ref->getSourceElementType();
723 Ty =
Ref->getResultElementType();
728Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(
729 Value *
I, std::unordered_set<Value *> &Visited,
bool UnknownElemTypeI8,
730 bool IgnoreKnownType) {
736 if (!IgnoreKnownType)
741 if (!Visited.insert(
I).second)
748 maybeAssignPtrType(Ty,
I,
Ref->getAllocatedType(), UnknownElemTypeI8);
750 Ty = getGEPType(
Ref);
755 KnownTy =
Op->getType();
757 maybeAssignPtrType(Ty,
I, ElemTy, UnknownElemTypeI8);
759 Ty = deduceElementTypeByValueDeep(
761 Ref->getNumOperands() > 0 ?
Ref->getOperand(0) :
nullptr, Visited,
764 Type *RefTy = deduceElementTypeHelper(
Ref->getPointerOperand(), Visited,
766 maybeAssignPtrType(Ty,
I, RefTy, UnknownElemTypeI8);
768 if (
Type *Src =
Ref->getSrcTy(), *Dest =
Ref->getDestTy();
770 Ty = deduceElementTypeHelper(
Ref->getOperand(0), Visited,
775 Ty = deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8);
779 Ty = deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8);
781 Type *BestTy =
nullptr;
783 DenseMap<Type *, unsigned> PhiTys;
784 for (
int i =
Ref->getNumIncomingValues() - 1; i >= 0; --i) {
785 Ty = deduceElementTypeByUsersDeep(
Ref->getIncomingValue(i), Visited,
792 if (It.first->second > MaxN) {
793 MaxN = It.first->second;
801 for (
Value *
Op : {
Ref->getTrueValue(),
Ref->getFalseValue()}) {
802 Ty = deduceElementTypeByUsersDeep(
Op, Visited, UnknownElemTypeI8);
807 static StringMap<unsigned> ResTypeByArg = {
811 {
"__spirv_GenericCastToPtr_ToGlobal", 0},
812 {
"__spirv_GenericCastToPtr_ToLocal", 0},
813 {
"__spirv_GenericCastToPtr_ToPrivate", 0},
814 {
"__spirv_GenericCastToPtrExplicit_ToGlobal", 0},
815 {
"__spirv_GenericCastToPtrExplicit_ToLocal", 0},
816 {
"__spirv_GenericCastToPtrExplicit_ToPrivate", 0}};
820 if (
II &&
II->getIntrinsicID() == Intrinsic::spv_resource_getpointer) {
822 if (HandleType->getTargetExtName() ==
"spirv.Image" ||
823 HandleType->getTargetExtName() ==
"spirv.SignedImage") {
824 for (User *U :
II->users()) {
829 }
else if (HandleType->getTargetExtName() ==
"spirv.VulkanBuffer") {
831 Ty = HandleType->getTypeParameter(0);
845 }
else if (
II &&
II->getIntrinsicID() ==
846 Intrinsic::spv_generic_cast_to_ptr_explicit) {
847 Ty = deduceElementTypeHelper(CI->getArgOperand(0), Visited,
849 }
else if (Function *CalledF = CI->getCalledFunction()) {
850 std::string DemangledName =
852 if (DemangledName.length() > 0)
853 DemangledName = SPIRV::lookupBuiltinNameHelper(DemangledName);
854 auto AsArgIt = ResTypeByArg.
find(DemangledName);
855 if (AsArgIt != ResTypeByArg.
end())
856 Ty = deduceElementTypeHelper(CI->getArgOperand(AsArgIt->second),
857 Visited, UnknownElemTypeI8);
864 if (Ty && !IgnoreKnownType) {
875Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(User *U,
876 bool UnknownElemTypeI8) {
877 std::unordered_set<Value *> Visited;
878 return deduceNestedTypeHelper(U,
U->getType(), Visited, UnknownElemTypeI8);
881Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(
882 User *U,
Type *OrigTy, std::unordered_set<Value *> &Visited,
883 bool UnknownElemTypeI8) {
892 if (!Visited.insert(U).second)
898 for (
unsigned i = 0; i <
U->getNumOperands(); ++i) {
900 assert(
Op &&
"Operands should not be null.");
901 Type *OpTy =
Op->getType();
905 deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8))
912 Change |= Ty != OpTy;
920 if (
Value *
Op =
U->getNumOperands() > 0 ?
U->getOperand(0) :
nullptr) {
921 Type *OpTy = ArrTy->getElementType();
925 deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8))
932 Type *NewTy = ArrayType::get(Ty, ArrTy->getNumElements());
938 if (
Value *
Op =
U->getNumOperands() > 0 ?
U->getOperand(0) :
nullptr) {
939 Type *OpTy = VecTy->getElementType();
943 deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8))
950 Type *NewTy = VectorType::get(Ty, VecTy->getElementCount());
960Type *SPIRVEmitIntrinsics::deduceElementType(
Value *
I,
bool UnknownElemTypeI8) {
961 if (
Type *Ty = deduceElementTypeHelper(
I, UnknownElemTypeI8))
963 if (!UnknownElemTypeI8)
966 return IntegerType::getInt8Ty(
I->getContext());
970 Value *PointerOperand) {
984bool SPIRVEmitIntrinsics::deduceOperandElementTypeCalledFunction(
986 Type *&KnownElemTy,
bool &Incomplete) {
990 std::string DemangledName =
992 if (DemangledName.length() > 0 &&
994 const SPIRVSubtarget &
ST = TM->
getSubtarget<SPIRVSubtarget>(*CalledF);
995 auto [Grp, Opcode, ExtNo] = SPIRV::mapBuiltinToOpcode(
996 DemangledName,
ST.getPreferredInstructionSet());
997 if (Opcode == SPIRV::OpGroupAsyncCopy) {
998 for (
unsigned i = 0, PtrCnt = 0; i < CI->
arg_size() && PtrCnt < 2; ++i) {
1004 KnownElemTy = ElemTy;
1005 Ops.push_back(std::make_pair(
Op, i));
1007 }
else if (Grp == SPIRV::Atomic || Grp == SPIRV::AtomicFloating) {
1014 case SPIRV::OpAtomicFAddEXT:
1015 case SPIRV::OpAtomicFMinEXT:
1016 case SPIRV::OpAtomicFMaxEXT:
1017 case SPIRV::OpAtomicLoad:
1018 case SPIRV::OpAtomicCompareExchangeWeak:
1019 case SPIRV::OpAtomicCompareExchange:
1020 case SPIRV::OpAtomicExchange:
1021 case SPIRV::OpAtomicIAdd:
1022 case SPIRV::OpAtomicISub:
1023 case SPIRV::OpAtomicOr:
1024 case SPIRV::OpAtomicXor:
1025 case SPIRV::OpAtomicAnd:
1026 case SPIRV::OpAtomicUMin:
1027 case SPIRV::OpAtomicUMax:
1028 case SPIRV::OpAtomicSMin:
1029 case SPIRV::OpAtomicSMax: {
1034 Incomplete = isTodoType(
Op);
1035 Ops.push_back(std::make_pair(
Op, 0));
1037 case SPIRV::OpAtomicStore: {
1046 Incomplete = isTodoType(
Op);
1047 Ops.push_back(std::make_pair(
Op, 0));
1056void SPIRVEmitIntrinsics::deduceOperandElementTypeFunctionPointer(
1058 Type *&KnownElemTy,
bool IsPostprocessing) {
1062 Ops.push_back(std::make_pair(
Op, std::numeric_limits<unsigned>::max()));
1064 bool IsNewFTy =
false, IsIncomplete =
false;
1067 Type *ArgTy = Arg->getType();
1072 if (isTodoType(Arg))
1073 IsIncomplete =
true;
1075 IsIncomplete =
true;
1080 Type *RetTy = FTy->getReturnType();
1087 IsIncomplete =
true;
1089 IsIncomplete =
true;
1092 if (!IsPostprocessing && IsIncomplete)
1095 IsNewFTy ? FunctionType::get(RetTy, ArgTys, FTy->isVarArg()) : FTy;
1098bool SPIRVEmitIntrinsics::deduceOperandElementTypeFunctionRet(
1099 Instruction *
I, SmallPtrSet<Instruction *, 4> *IncompleteRets,
1100 const SmallPtrSet<Value *, 4> *AskOps,
bool IsPostprocessing,
1112 DenseSet<std::pair<Value *, Value *>> VisitedSubst{std::make_pair(
I,
Op)};
1113 for (User *U :
F->users()) {
1121 propagateElemType(CI, PrevElemTy, VisitedSubst);
1131 for (Instruction *IncompleteRetI : *IncompleteRets)
1132 deduceOperandElementType(IncompleteRetI,
nullptr, AskOps,
1134 }
else if (IncompleteRets) {
1137 TypeValidated.insert(
I);
1145void SPIRVEmitIntrinsics::deduceOperandElementType(
1146 Instruction *
I, SmallPtrSet<Instruction *, 4> *IncompleteRets,
1147 const SmallPtrSet<Value *, 4> *AskOps,
bool IsPostprocessing) {
1149 Type *KnownElemTy =
nullptr;
1150 bool Incomplete =
false;
1156 Incomplete = isTodoType(
I);
1157 for (
unsigned i = 0; i <
Ref->getNumIncomingValues(); i++) {
1160 Ops.push_back(std::make_pair(
Op, i));
1166 Incomplete = isTodoType(
I);
1167 Ops.push_back(std::make_pair(
Ref->getPointerOperand(), 0));
1174 Incomplete = isTodoType(
I);
1175 Ops.push_back(std::make_pair(
Ref->getOperand(0), 0));
1179 KnownElemTy =
Ref->getSourceElementType();
1180 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1183 KnownElemTy =
I->getType();
1189 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1193 reconstructType(
Ref->getValueOperand(),
false, IsPostprocessing)))
1198 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1206 Incomplete = isTodoType(
Ref->getPointerOperand());
1207 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1215 Incomplete = isTodoType(
Ref->getPointerOperand());
1216 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1222 Incomplete = isTodoType(
I);
1223 for (
unsigned i = 0; i <
Ref->getNumOperands(); i++) {
1226 Ops.push_back(std::make_pair(
Op, i));
1234 if (deduceOperandElementTypeFunctionRet(
I, IncompleteRets, AskOps,
1235 IsPostprocessing, KnownElemTy,
Op,
1238 Incomplete = isTodoType(CurrF);
1239 Ops.push_back(std::make_pair(
Op, 0));
1245 bool Incomplete0 = isTodoType(Op0);
1246 bool Incomplete1 = isTodoType(Op1);
1248 Type *ElemTy0 = (Incomplete0 && !Incomplete1 && ElemTy1)
1250 : GR->findDeducedElementType(Op0);
1252 KnownElemTy = ElemTy0;
1253 Incomplete = Incomplete0;
1254 Ops.push_back(std::make_pair(Op1, 1));
1255 }
else if (ElemTy1) {
1256 KnownElemTy = ElemTy1;
1257 Incomplete = Incomplete1;
1258 Ops.push_back(std::make_pair(Op0, 0));
1262 deduceOperandElementTypeCalledFunction(CI,
Ops, KnownElemTy, Incomplete);
1263 else if (HaveFunPtrs)
1264 deduceOperandElementTypeFunctionPointer(CI,
Ops, KnownElemTy,
1269 if (!KnownElemTy ||
Ops.size() == 0)
1274 for (
auto &OpIt :
Ops) {
1278 Type *AskTy =
nullptr;
1279 CallInst *AskCI =
nullptr;
1280 if (IsPostprocessing && AskOps) {
1286 if (Ty == KnownElemTy)
1289 Type *OpTy =
Op->getType();
1290 if (
Op->hasUseList() &&
1297 else if (!IsPostprocessing)
1301 if (AssignCI ==
nullptr) {
1310 DenseSet<std::pair<Value *, Value *>> VisitedSubst{
1311 std::make_pair(
I,
Op)};
1312 propagateElemTypeRec(
Op, KnownElemTy, PrevElemTy, VisitedSubst);
1316 CallInst *PtrCastI =
1317 buildSpvPtrcast(
I->getParent()->getParent(),
Op, KnownElemTy);
1318 if (OpIt.second == std::numeric_limits<unsigned>::max())
1321 I->setOperand(OpIt.second, PtrCastI);
1324 TypeValidated.insert(
I);
1327void SPIRVEmitIntrinsics::replaceMemInstrUses(Instruction *Old,
1332 if (isAssignTypeInstr(U)) {
1333 B.SetInsertPoint(U);
1334 SmallVector<Value *, 2>
Args = {
New,
U->getOperand(1)};
1335 CallInst *AssignCI =
1336 B.CreateIntrinsic(Intrinsic::spv_assign_type, {
New->getType()},
Args);
1338 U->eraseFromParent();
1341 U->replaceUsesOfWith(Old, New);
1346 New->copyMetadata(*Old);
1350void SPIRVEmitIntrinsics::preprocessUndefs(
IRBuilder<> &
B) {
1351 std::queue<Instruction *> Worklist;
1355 while (!Worklist.empty()) {
1357 bool BPrepared =
false;
1360 for (
auto &
Op :
I->operands()) {
1362 if (!AggrUndef || !
Op->getType()->isAggregateType())
1369 auto *IntrUndef =
B.CreateIntrinsic(Intrinsic::spv_undef, {});
1370 Worklist.push(IntrUndef);
1371 I->replaceUsesOfWith(
Op, IntrUndef);
1372 AggrConsts[IntrUndef] = AggrUndef;
1373 AggrConstTypes[IntrUndef] = AggrUndef->getType();
1378void SPIRVEmitIntrinsics::preprocessCompositeConstants(
IRBuilder<> &
B) {
1379 std::queue<Instruction *> Worklist;
1383 while (!Worklist.empty()) {
1384 auto *
I = Worklist.front();
1387 bool KeepInst =
false;
1388 for (
const auto &
Op :
I->operands()) {
1390 Type *ResTy =
nullptr;
1393 ResTy = COp->getType();
1405 ResTy =
Op->getType()->isVectorTy() ? COp->getType() :
B.getInt32Ty();
1410 for (
unsigned i = 0; i < COp->getNumElements(); ++i)
1411 Args.push_back(COp->getElementAsConstant(i));
1415 IsPhi ?
B.SetInsertPointPastAllocas(
I->getParent()->getParent())
1416 :
B.SetInsertPoint(
I);
1420 B.CreateIntrinsic(Intrinsic::spv_const_composite, {ResTy}, {
Args});
1424 AggrConsts[CI] = AggrConst;
1425 AggrConstTypes[CI] = deduceNestedTypeHelper(AggrConst,
false);
1437 B.CreateIntrinsic(Intrinsic::spv_assign_decoration, {
I->getType()},
1442 unsigned RoundingModeDeco,
1449 ConstantInt::get(
Int32Ty, SPIRV::Decoration::FPRoundingMode)),
1458 MDNode *SaturatedConversionNode =
1460 Int32Ty, SPIRV::Decoration::SaturatedConversion))});
1467 if (Fu->isIntrinsic()) {
1468 unsigned const int IntrinsicId = Fu->getIntrinsicID();
1469 switch (IntrinsicId) {
1470 case Intrinsic::fptosi_sat:
1471 case Intrinsic::fptoui_sat:
1490 MDString *ConstraintString =
MDString::get(Ctx,
IA->getConstraintString());
1498 B.SetInsertPoint(&
Call);
1499 B.CreateIntrinsic(Intrinsic::spv_inline_asm, {
Args});
1504void SPIRVEmitIntrinsics::useRoundingMode(ConstrainedFPIntrinsic *FPI,
1507 if (!
RM.has_value())
1509 unsigned RoundingModeDeco = std::numeric_limits<unsigned>::max();
1510 switch (
RM.value()) {
1514 case RoundingMode::NearestTiesToEven:
1515 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTE;
1517 case RoundingMode::TowardNegative:
1518 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTN;
1520 case RoundingMode::TowardPositive:
1521 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTP;
1523 case RoundingMode::TowardZero:
1524 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTZ;
1526 case RoundingMode::Dynamic:
1527 case RoundingMode::NearestTiesToAway:
1531 if (RoundingModeDeco == std::numeric_limits<unsigned>::max())
1537Instruction *SPIRVEmitIntrinsics::visitSwitchInst(SwitchInst &
I) {
1540 B.SetInsertPoint(&
I);
1543 for (
auto &
Op :
I.operands()) {
1544 if (
Op.get()->getType()->isSized()) {
1553 CallInst *NewI =
B.CreateIntrinsic(Intrinsic::spv_switch,
1554 {
I.getOperand(0)->getType()}, {
Args});
1558 I.eraseFromParent();
1561 B.SetInsertPoint(ParentBB);
1562 IndirectBrInst *BrI =
B.CreateIndirectBr(
1565 for (BasicBlock *BBCase : BBCases)
1570Instruction *SPIRVEmitIntrinsics::visitGetElementPtrInst(GetElementPtrInst &
I) {
1571 if (
I.getSourceElementType() == IntegerType::getInt8Ty(CurrF->
getContext()) &&
1579 B.SetInsertPoint(&
I);
1582 Args.push_back(
B.getInt1(
I.isInBounds()));
1584 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_gep, {
Types}, {
Args});
1585 replaceAllUsesWithAndErase(
B, &
I, NewI);
1589Instruction *SPIRVEmitIntrinsics::visitBitCastInst(BitCastInst &
I) {
1591 B.SetInsertPoint(&
I);
1600 I.eraseFromParent();
1606 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_bitcast, {
Types}, {
Args});
1607 replaceAllUsesWithAndErase(
B, &
I, NewI);
1611void SPIRVEmitIntrinsics::insertAssignPtrTypeTargetExt(
1613 Type *VTy =
V->getType();
1618 if (ElemTy != AssignedType)
1631 if (CurrentType == AssignedType)
1638 " for value " +
V->getName(),
1646void SPIRVEmitIntrinsics::replacePointerOperandWithPtrCast(
1647 Instruction *
I,
Value *Pointer,
Type *ExpectedElementType,
1649 TypeValidated.insert(
I);
1652 Type *PointerElemTy = deduceElementTypeHelper(Pointer,
false);
1653 if (PointerElemTy == ExpectedElementType ||
1659 MetadataAsValue *VMD =
buildMD(ExpectedElementVal);
1661 bool FirstPtrCastOrAssignPtrType =
true;
1667 for (
auto User :
Pointer->users()) {
1670 (
II->getIntrinsicID() != Intrinsic::spv_assign_ptr_type &&
1671 II->getIntrinsicID() != Intrinsic::spv_ptrcast) ||
1672 II->getOperand(0) != Pointer)
1677 FirstPtrCastOrAssignPtrType =
false;
1678 if (
II->getOperand(1) != VMD ||
1685 if (
II->getIntrinsicID() != Intrinsic::spv_ptrcast)
1690 if (
II->getParent() !=
I->getParent())
1693 I->setOperand(OperandToReplace,
II);
1699 if (FirstPtrCastOrAssignPtrType) {
1704 }
else if (isTodoType(Pointer)) {
1705 eraseTodoType(Pointer);
1712 DenseSet<std::pair<Value *, Value *>> VisitedSubst{
1713 std::make_pair(
I, Pointer)};
1715 propagateElemType(Pointer, PrevElemTy, VisitedSubst);
1727 auto *PtrCastI =
B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
1733void SPIRVEmitIntrinsics::insertPtrCastOrAssignTypeInstr(Instruction *
I,
1738 replacePointerOperandWithPtrCast(
1739 I,
SI->getValueOperand(), IntegerType::getInt8Ty(CurrF->
getContext()),
1745 Type *OpTy =
Op->getType();
1748 if (OpTy ==
Op->getType())
1749 OpTy = deduceElementTypeByValueDeep(OpTy,
Op,
false);
1750 replacePointerOperandWithPtrCast(
I, Pointer, OpTy, 1,
B);
1755 Type *OpTy = LI->getType();
1760 Type *NewOpTy = OpTy;
1761 OpTy = deduceElementTypeByValueDeep(OpTy, LI,
false);
1762 if (OpTy == NewOpTy)
1763 insertTodoType(Pointer);
1766 replacePointerOperandWithPtrCast(
I, Pointer, OpTy, 0,
B);
1771 Type *OpTy =
nullptr;
1778 bool IsRewrittenGEP =
1779 GEPI->getSourceElementType() == IntegerType::getInt8Ty(
I->getContext());
1781 Value *Src = getPointerRoot(Pointer);
1787 OpTy = GEPI->getSourceElementType();
1789 replacePointerOperandWithPtrCast(
I, Pointer, OpTy, 0,
B);
1791 insertTodoType(Pointer);
1803 std::string DemangledName =
1807 bool HaveTypes =
false;
1825 for (User *U : CalledArg->
users()) {
1827 if ((ElemTy = deduceElementTypeHelper(Inst,
false)) !=
nullptr)
1833 HaveTypes |= ElemTy !=
nullptr;
1838 if (DemangledName.empty() && !HaveTypes)
1856 Type *ExpectedType =
1858 if (!ExpectedType && !DemangledName.empty())
1859 ExpectedType = SPIRV::parseBuiltinCallArgumentBaseType(
1860 DemangledName,
OpIdx,
I->getContext());
1861 if (!ExpectedType || ExpectedType->
isVoidTy())
1869 replacePointerOperandWithPtrCast(CI, ArgOperand, ExpectedType,
OpIdx,
B);
1873Instruction *SPIRVEmitIntrinsics::visitInsertElementInst(InsertElementInst &
I) {
1880 I.getOperand(1)->getType(),
1881 I.getOperand(2)->getType()};
1883 B.SetInsertPoint(&
I);
1885 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_insertelt, {
Types}, {
Args});
1886 replaceAllUsesWithAndErase(
B, &
I, NewI);
1891SPIRVEmitIntrinsics::visitExtractElementInst(ExtractElementInst &
I) {
1898 B.SetInsertPoint(&
I);
1900 I.getIndexOperand()->getType()};
1901 SmallVector<Value *, 2>
Args = {
I.getVectorOperand(),
I.getIndexOperand()};
1902 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_extractelt, {
Types}, {
Args});
1903 replaceAllUsesWithAndErase(
B, &
I, NewI);
1907Instruction *SPIRVEmitIntrinsics::visitInsertValueInst(InsertValueInst &
I) {
1909 B.SetInsertPoint(&
I);
1912 for (
auto &
Op :
I.operands())
1917 for (
auto &
Op :
I.indices())
1918 Args.push_back(
B.getInt32(
Op));
1920 B.CreateIntrinsic(Intrinsic::spv_insertv, {
Types}, {
Args});
1921 replaceMemInstrUses(&
I, NewI,
B);
1925Instruction *SPIRVEmitIntrinsics::visitExtractValueInst(ExtractValueInst &
I) {
1926 if (
I.getAggregateOperand()->getType()->isAggregateType())
1929 B.SetInsertPoint(&
I);
1931 for (
auto &
Op :
I.indices())
1932 Args.push_back(
B.getInt32(
Op));
1934 B.CreateIntrinsic(Intrinsic::spv_extractv, {
I.getType()}, {
Args});
1935 replaceAllUsesWithAndErase(
B, &
I, NewI);
1939Instruction *SPIRVEmitIntrinsics::visitLoadInst(LoadInst &
I) {
1940 if (!
I.getType()->isAggregateType())
1943 B.SetInsertPoint(&
I);
1944 TrackConstants =
false;
1949 B.CreateIntrinsic(Intrinsic::spv_load, {
I.getOperand(0)->getType()},
1950 {
I.getPointerOperand(),
B.getInt16(Flags),
1951 B.getInt8(
I.getAlign().value())});
1952 replaceMemInstrUses(&
I, NewI,
B);
1956Instruction *SPIRVEmitIntrinsics::visitStoreInst(StoreInst &
I) {
1957 if (!AggrStores.contains(&
I))
1960 B.SetInsertPoint(&
I);
1961 TrackConstants =
false;
1965 auto *PtrOp =
I.getPointerOperand();
1966 auto *NewI =
B.CreateIntrinsic(
1967 Intrinsic::spv_store, {
I.getValueOperand()->getType(), PtrOp->getType()},
1968 {
I.getValueOperand(), PtrOp,
B.getInt16(Flags),
1969 B.getInt8(
I.getAlign().value())});
1971 I.eraseFromParent();
1975Instruction *SPIRVEmitIntrinsics::visitAllocaInst(AllocaInst &
I) {
1976 Value *ArraySize =
nullptr;
1977 if (
I.isArrayAllocation()) {
1980 SPIRV::Extension::SPV_INTEL_variable_length_array))
1982 "array allocation: this instruction requires the following "
1983 "SPIR-V extension: SPV_INTEL_variable_length_array",
1985 ArraySize =
I.getArraySize();
1988 B.SetInsertPoint(&
I);
1989 TrackConstants =
false;
1990 Type *PtrTy =
I.getType();
1993 ?
B.CreateIntrinsic(Intrinsic::spv_alloca_array,
1994 {PtrTy, ArraySize->
getType()},
1995 {ArraySize,
B.getInt8(
I.getAlign().value())})
1996 :
B.CreateIntrinsic(
Intrinsic::spv_alloca, {PtrTy},
1997 {
B.getInt8(
I.getAlign().value())});
1998 replaceAllUsesWithAndErase(
B, &
I, NewI);
2002Instruction *SPIRVEmitIntrinsics::visitAtomicCmpXchgInst(AtomicCmpXchgInst &
I) {
2003 assert(
I.getType()->isAggregateType() &&
"Aggregate result is expected");
2005 B.SetInsertPoint(&
I);
2007 Args.push_back(
B.getInt32(
2008 static_cast<uint32_t
>(
getMemScope(
I.getContext(),
I.getSyncScopeID()))));
2009 Args.push_back(
B.getInt32(
2011 Args.push_back(
B.getInt32(
2013 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_cmpxchg,
2014 {
I.getPointerOperand()->getType()}, {
Args});
2015 replaceMemInstrUses(&
I, NewI,
B);
2019Instruction *SPIRVEmitIntrinsics::visitUnreachableInst(UnreachableInst &
I) {
2021 B.SetInsertPoint(&
I);
2022 B.CreateIntrinsic(Intrinsic::spv_unreachable, {});
2026void SPIRVEmitIntrinsics::processGlobalValue(GlobalVariable &GV,
2029 if (GV.
getName() ==
"llvm.global.annotations")
2036 deduceElementTypeHelper(&GV,
false);
2040 auto *InitInst =
B.CreateIntrinsic(Intrinsic::spv_init_global,
2042 InitInst->setArgOperand(1, Init);
2045 B.CreateIntrinsic(Intrinsic::spv_unref_global, GV.
getType(), &GV);
2051bool SPIRVEmitIntrinsics::insertAssignPtrTypeIntrs(Instruction *
I,
2053 bool UnknownElemTypeI8) {
2059 if (
Type *ElemTy = deduceElementType(
I, UnknownElemTypeI8)) {
2066void SPIRVEmitIntrinsics::insertAssignTypeIntrs(Instruction *
I,
2069 static StringMap<unsigned> ResTypeWellKnown = {
2070 {
"async_work_group_copy", WellKnownTypes::Event},
2071 {
"async_work_group_strided_copy", WellKnownTypes::Event},
2072 {
"__spirv_GroupAsyncCopy", WellKnownTypes::Event}};
2076 bool IsKnown =
false;
2081 std::string DemangledName =
2084 if (DemangledName.length() > 0)
2086 SPIRV::lookupBuiltinNameHelper(DemangledName, &DecorationId);
2087 auto ResIt = ResTypeWellKnown.
find(DemangledName);
2088 if (ResIt != ResTypeWellKnown.
end()) {
2091 switch (ResIt->second) {
2092 case WellKnownTypes::Event:
2099 switch (DecorationId) {
2102 case FPDecorationId::SAT:
2105 case FPDecorationId::RTE:
2107 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTE,
B);
2109 case FPDecorationId::RTZ:
2111 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTZ,
B);
2113 case FPDecorationId::RTP:
2115 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTP,
B);
2117 case FPDecorationId::RTN:
2119 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTN,
B);
2125 Type *Ty =
I->getType();
2128 Type *TypeToAssign = Ty;
2130 if (
II->getIntrinsicID() == Intrinsic::spv_const_composite ||
2131 II->getIntrinsicID() == Intrinsic::spv_undef) {
2132 auto It = AggrConstTypes.find(
II);
2133 if (It == AggrConstTypes.end())
2135 TypeToAssign = It->second;
2141 for (
const auto &
Op :
I->operands()) {
2146 Type *OpTy =
Op->getType();
2148 CallInst *AssignCI =
2153 Type *OpTy =
Op->getType();
2168 CallInst *AssignCI =
2178bool SPIRVEmitIntrinsics::shouldTryToAddMemAliasingDecoration(
2179 Instruction *Inst) {
2181 if (!STI->
canUseExtension(SPIRV::Extension::SPV_INTEL_memory_access_aliasing))
2192 case Intrinsic::spv_load:
2193 case Intrinsic::spv_store:
2200 const std::string
Prefix =
"__spirv_Atomic";
2201 const bool IsAtomic =
Name.find(Prefix) == 0;
2209void SPIRVEmitIntrinsics::insertSpirvDecorations(Instruction *
I,
2211 if (MDNode *MD =
I->getMetadata(
"spirv.Decorations")) {
2213 B.CreateIntrinsic(Intrinsic::spv_assign_decoration, {
I->getType()},
2218 auto processMemAliasingDecoration = [&](
unsigned Kind) {
2219 if (MDNode *AliasListMD =
I->getMetadata(Kind)) {
2220 if (shouldTryToAddMemAliasingDecoration(
I)) {
2221 uint32_t Dec =
Kind == LLVMContext::MD_alias_scope
2222 ? SPIRV::Decoration::AliasScopeINTEL
2223 : SPIRV::Decoration::NoAliasINTEL;
2225 I, ConstantInt::get(
B.getInt32Ty(), Dec),
2228 B.CreateIntrinsic(Intrinsic::spv_assign_aliasing_decoration,
2229 {
I->getType()}, {
Args});
2233 processMemAliasingDecoration(LLVMContext::MD_alias_scope);
2234 processMemAliasingDecoration(LLVMContext::MD_noalias);
2237 if (MDNode *MD =
I->getMetadata(LLVMContext::MD_fpmath)) {
2239 bool AllowFPMaxError =
2241 if (!AllowFPMaxError)
2245 B.CreateIntrinsic(Intrinsic::spv_assign_fpmaxerror_decoration,
2251void SPIRVEmitIntrinsics::processInstrAfterVisit(Instruction *
I,
2254 bool IsConstComposite =
2255 II &&
II->getIntrinsicID() == Intrinsic::spv_const_composite;
2256 if (IsConstComposite && TrackConstants) {
2258 auto t = AggrConsts.find(
I);
2259 assert(t != AggrConsts.end());
2262 {
II->getType(),
II->getType()}, t->second,
I, {},
B);
2264 NewOp->setArgOperand(0,
I);
2267 for (
const auto &
Op :
I->operands()) {
2271 unsigned OpNo =
Op.getOperandNo();
2272 if (
II && ((
II->getIntrinsicID() == Intrinsic::spv_gep && OpNo == 0) ||
2273 (
II->paramHasAttr(OpNo, Attribute::ImmArg))))
2277 IsPhi ?
B.SetInsertPointPastAllocas(
I->getParent()->getParent())
2278 :
B.SetInsertPoint(
I);
2281 Type *OpTy =
Op->getType();
2289 {OpTy, OpTyVal->
getType()},
Op, OpTyVal, {},
B);
2291 if (!IsConstComposite &&
isPointerTy(OpTy) && OpElemTy !=
nullptr &&
2292 OpElemTy != IntegerType::getInt8Ty(
I->getContext())) {
2294 SmallVector<Value *, 2>
Args = {
2297 CallInst *PtrCasted =
2298 B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
2303 I->setOperand(OpNo, NewOp);
2305 if (Named.insert(
I).second)
2309Type *SPIRVEmitIntrinsics::deduceFunParamElementType(Function *
F,
2311 std::unordered_set<Function *> FVisited;
2312 return deduceFunParamElementType(
F,
OpIdx, FVisited);
2315Type *SPIRVEmitIntrinsics::deduceFunParamElementType(
2316 Function *
F,
unsigned OpIdx, std::unordered_set<Function *> &FVisited) {
2318 if (!FVisited.insert(
F).second)
2321 std::unordered_set<Value *> Visited;
2324 for (User *U :
F->users()) {
2336 if (
Type *Ty = deduceElementTypeHelper(OpArg, Visited,
false))
2339 for (User *OpU : OpArg->
users()) {
2341 if (!Inst || Inst == CI)
2344 if (
Type *Ty = deduceElementTypeHelper(Inst, Visited,
false))
2351 if (FVisited.find(OuterF) != FVisited.end())
2353 for (
unsigned i = 0; i < OuterF->
arg_size(); ++i) {
2354 if (OuterF->
getArg(i) == OpArg) {
2355 Lookup.push_back(std::make_pair(OuterF, i));
2362 for (
auto &Pair :
Lookup) {
2363 if (
Type *Ty = deduceFunParamElementType(Pair.first, Pair.second, FVisited))
2370void SPIRVEmitIntrinsics::processParamTypesByFunHeader(Function *
F,
2372 B.SetInsertPointPastAllocas(
F);
2386 for (User *U :
F->users()) {
2402 for (User *U : Arg->
users()) {
2406 CI->
getParent()->getParent() == CurrF) {
2408 deduceOperandElementTypeFunctionPointer(CI,
Ops, ElemTy,
false);
2419void SPIRVEmitIntrinsics::processParamTypes(Function *
F,
IRBuilder<> &
B) {
2420 B.SetInsertPointPastAllocas(
F);
2426 if (!ElemTy && (ElemTy = deduceFunParamElementType(
F,
OpIdx)) !=
nullptr) {
2428 DenseSet<std::pair<Value *, Value *>> VisitedSubst;
2430 propagateElemType(Arg, IntegerType::getInt8Ty(
F->getContext()),
2442 bool IsNewFTy =
false;
2458bool SPIRVEmitIntrinsics::processFunctionPointers(
Module &M) {
2461 if (
F.isIntrinsic())
2463 if (
F.isDeclaration()) {
2464 for (User *U :
F.users()) {
2477 for (User *U :
F.users()) {
2479 if (!
II ||
II->arg_size() != 3 ||
II->getOperand(0) != &
F)
2481 if (
II->getIntrinsicID() == Intrinsic::spv_assign_ptr_type ||
2482 II->getIntrinsicID() == Intrinsic::spv_ptrcast) {
2489 if (Worklist.
empty())
2495 "cannot allocate a name for the internal service function");
2496 LLVMContext &Ctx =
M.getContext();
2504 for (Function *
F : Worklist) {
2506 for (
const auto &Arg :
F->args())
2508 IRB.CreateCall(
F, Args);
2510 IRB.CreateRetVoid();
2516void SPIRVEmitIntrinsics::applyDemangledPtrArgTypes(
IRBuilder<> &
B) {
2517 DenseMap<Function *, CallInst *> Ptrcasts;
2518 for (
auto It : FDeclPtrTys) {
2520 for (
auto *U :
F->users()) {
2525 for (
auto [Idx, ElemTy] : It.second) {
2533 B.SetInsertPointPastAllocas(Arg->
getParent());
2538 replaceUsesOfWithSpvPtrcast(Param,
normalizeType(ElemTy), CI,
2547 .getFirstNonPHIOrDbgOrAlloca());
2568SPIRVEmitIntrinsics::simplifyZeroLengthArrayGepInst(GetElementPtrInst *
GEP) {
2575 Type *SrcTy =
GEP->getSourceElementType();
2576 SmallVector<Value *, 8> Indices(
GEP->indices());
2578 if (ArrTy && ArrTy->getNumElements() == 0 &&
2581 Indices.erase(Indices.begin());
2582 SrcTy = ArrTy->getElementType();
2583 Value *NewGEP = Builder.CreateGEP(SrcTy,
GEP->getPointerOperand(), Indices,
2584 "",
GEP->getNoWrapFlags());
2591bool SPIRVEmitIntrinsics::runOnFunction(Function &Func) {
2592 if (
Func.isDeclaration())
2596 GR =
ST.getSPIRVGlobalRegistry();
2600 ST.canUseExtension(SPIRV::Extension::SPV_INTEL_function_pointers);
2605 AggrConstTypes.clear();
2610 SmallPtrSet<Instruction *, 4> DeadInsts;
2613 if (!
Ref || GR->findDeducedElementType(
Ref))
2616 GetElementPtrInst *NewGEP = simplifyZeroLengthArrayGepInst(
Ref);
2618 Ref->replaceAllUsesWith(NewGEP);
2624 if (
Type *GepTy = getGEPType(
Ref))
2628 for (
auto *
I : DeadInsts) {
2629 assert(
I->use_empty() &&
"Dead instruction should not have any uses left");
2630 I->eraseFromParent();
2633 processParamTypesByFunHeader(CurrF,
B);
2641 Type *ElTy =
SI->getValueOperand()->getType();
2643 AggrStores.insert(&
I);
2646 B.SetInsertPoint(&
Func.getEntryBlock(),
Func.getEntryBlock().begin());
2647 for (
auto &GV :
Func.getParent()->globals())
2648 processGlobalValue(GV,
B);
2650 preprocessUndefs(
B);
2651 preprocessCompositeConstants(
B);
2655 applyDemangledPtrArgTypes(
B);
2658 for (
auto &
I : Worklist) {
2660 if (isConvergenceIntrinsic(
I))
2663 bool Postpone = insertAssignPtrTypeIntrs(
I,
B,
false);
2665 insertAssignTypeIntrs(
I,
B);
2666 insertPtrCastOrAssignTypeInstr(
I,
B);
2670 if (Postpone && !GR->findAssignPtrTypeInstr(
I))
2671 insertAssignPtrTypeIntrs(
I,
B,
true);
2674 useRoundingMode(FPI,
B);
2679 SmallPtrSet<Instruction *, 4> IncompleteRets;
2681 deduceOperandElementType(&
I, &IncompleteRets);
2685 for (BasicBlock &BB : Func)
2686 for (PHINode &Phi : BB.
phis())
2688 deduceOperandElementType(&Phi,
nullptr);
2690 for (
auto *
I : Worklist) {
2691 TrackConstants =
true;
2701 if (isConvergenceIntrinsic(
I))
2705 processInstrAfterVisit(
I,
B);
2712bool SPIRVEmitIntrinsics::postprocessTypes(
Module &M) {
2713 if (!GR || TodoTypeSz == 0)
2716 unsigned SzTodo = TodoTypeSz;
2717 DenseMap<Value *, SmallPtrSet<Value *, 4>> ToProcess;
2722 CallInst *AssignCI = GR->findAssignPtrTypeInstr(
Op);
2723 Type *KnownTy = GR->findDeducedElementType(
Op);
2724 if (!KnownTy || !AssignCI)
2730 std::unordered_set<Value *> Visited;
2731 if (
Type *ElemTy = deduceElementTypeHelper(
Op, Visited,
false,
true)) {
2732 if (ElemTy != KnownTy) {
2733 DenseSet<std::pair<Value *, Value *>> VisitedSubst;
2734 propagateElemType(CI, ElemTy, VisitedSubst);
2741 if (
Op->hasUseList()) {
2742 for (User *U :
Op->users()) {
2749 if (TodoTypeSz == 0)
2754 SmallPtrSet<Instruction *, 4> IncompleteRets;
2756 auto It = ToProcess.
find(&
I);
2757 if (It == ToProcess.
end())
2759 It->second.remove_if([
this](
Value *V) {
return !isTodoType(V); });
2760 if (It->second.size() == 0)
2762 deduceOperandElementType(&
I, &IncompleteRets, &It->second,
true);
2763 if (TodoTypeSz == 0)
2768 return SzTodo > TodoTypeSz;
2772void SPIRVEmitIntrinsics::parseFunDeclarations(
Module &M) {
2774 if (!
F.isDeclaration() ||
F.isIntrinsic())
2778 if (DemangledName.empty())
2782 auto [Grp, Opcode, ExtNo] = SPIRV::mapBuiltinToOpcode(
2783 DemangledName,
ST.getPreferredInstructionSet());
2784 if (Opcode != SPIRV::OpGroupAsyncCopy)
2787 SmallVector<unsigned> Idxs;
2796 LLVMContext &Ctx =
F.getContext();
2798 SPIRV::parseBuiltinTypeStr(TypeStrs, DemangledName, Ctx);
2799 if (!TypeStrs.
size())
2802 for (
unsigned Idx : Idxs) {
2803 if (Idx >= TypeStrs.
size())
2806 SPIRV::parseBuiltinCallArgumentType(TypeStrs[Idx].trim(), Ctx))
2809 FDeclPtrTys[&
F].push_back(std::make_pair(Idx, ElemTy));
2814bool SPIRVEmitIntrinsics::runOnModule(
Module &M) {
2817 parseFunDeclarations(M);
2827 if (!
F.isDeclaration() && !
F.isIntrinsic()) {
2829 processParamTypes(&
F,
B);
2833 CanTodoType =
false;
2834 Changed |= postprocessTypes(M);
2837 Changed |= processFunctionPointers(M);
2843 return new SPIRVEmitIntrinsics(TM);
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
Expand Atomic instructions
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static void replaceAllUsesWith(Value *Old, Value *New, SmallPtrSet< BasicBlock *, 32 > &FreshBBs, bool IsHuge)
Replace all old uses with new ones, and push the updated BBs into FreshBBs.
This file defines the DenseSet and SmallDenseSet classes.
static bool runOnFunction(Function &F, bool PostInlining)
iv Induction Variable Users
const AbstractManglingParser< Derived, Alloc >::OperatorInfo AbstractManglingParser< Derived, Alloc >::Ops[]
Machine Check Debug Module
MachineInstr unsigned OpIdx
uint64_t IntrinsicInst * II
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
static unsigned getNumElements(Type *Ty)
static bool isMemInstrToReplace(Instruction *I)
static bool isAggrConstForceInt32(const Value *V)
static Type * getAtomicElemTy(SPIRVGlobalRegistry *GR, Instruction *I, Value *PointerOperand)
static void reportFatalOnTokenType(const Instruction *I)
static void setInsertPointAfterDef(IRBuilder<> &B, Instruction *I)
static void emitAssignName(Instruction *I, IRBuilder<> &B)
static Type * getPointeeTypeByCallInst(StringRef DemangledName, Function *CalledF, unsigned OpIdx)
static void createRoundingModeDecoration(Instruction *I, unsigned RoundingModeDeco, IRBuilder<> &B)
static void createDecorationIntrinsic(Instruction *I, MDNode *Node, IRBuilder<> &B)
static bool IsKernelArgInt8(Function *F, StoreInst *SI)
static void addSaturatedDecorationToIntrinsic(Instruction *I, IRBuilder<> &B)
static void setInsertPointSkippingPhis(IRBuilder<> &B, Instruction *I)
static FunctionType * getFunctionPointerElemType(Function *F, SPIRVGlobalRegistry *GR)
static void createSaturatedConversionDecoration(Instruction *I, IRBuilder<> &B)
static Type * restoreMutatedType(SPIRVGlobalRegistry *GR, Instruction *I, Type *Ty)
static bool requireAssignType(Instruction *I)
void visit(MachineFunction &MF, MachineBasicBlock &Start, std::function< void(MachineBasicBlock *)> op)
static void insertSpirvDecorations(MachineFunction &MF, SPIRVGlobalRegistry *GR, MachineIRBuilder MIB)
#define SPIRV_BACKEND_SERVICE_FUN_NAME
static SymbolRef::Type getType(const Symbol *Sym)
static int Lookup(ArrayRef< TableEntry > Table, unsigned Opcode)
This class represents an incoming formal argument to a Function.
const Function * getParent() const
static unsigned getPointerOperandIndex()
static unsigned getPointerOperandIndex()
iterator_range< const_phi_iterator > phis() const
Returns a range that iterates over the phis in the basic block.
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
LLVM_ABI LLVMContext & getContext() const
Get the context in which this basic block lives.
static LLVM_ABI BlockAddress * get(Function *F, BasicBlock *BB)
Return a BlockAddress for the specified function and basic block.
bool isInlineAsm() const
Check if this call is an inline asm statement.
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
LLVM_ABI bool isIndirectCall() const
Return true if the callsite is an indirect call.
Value * getCalledOperand() const
Value * getArgOperand(unsigned i) const
FunctionType * getFunctionType() const
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
unsigned arg_size() const
This class represents a function call, abstracting a target machine's calling convention.
uint64_t getZExtValue() const
Return the constant as a 64-bit unsigned integer value after it has been zero extended as appropriate...
static LLVM_ABI Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
LLVM_ABI std::optional< RoundingMode > getRoundingMode() const
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
static LLVM_ABI FunctionType * get(Type *Result, ArrayRef< Type * > Params, bool isVarArg)
This static method is the primary way of constructing a FunctionType.
void addFnAttr(Attribute::AttrKind Kind)
Add function attributes to this function.
static Function * Create(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, const Twine &N="", Module *M=nullptr)
const DataLayout & getDataLayout() const
Get the data layout of the module this function belongs to.
Intrinsic::ID getIntrinsicID() const LLVM_READONLY
getIntrinsicID - This method returns the ID number of the specified function, or Intrinsic::not_intri...
bool isIntrinsic() const
isIntrinsic - Returns true if the function's name starts with "llvm.".
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
Type * getReturnType() const
Returns the type of the ret val.
Argument * getArg(unsigned i) const
static LLVM_ABI Type * getTypeAtIndex(Type *Ty, Value *Idx)
Return the type of the element at the given index of an indexable type.
static unsigned getPointerOperandIndex()
PointerType * getType() const
Global values are always pointers.
@ PrivateLinkage
Like Internal, but omit from symbol table.
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
LLVM_ABI void addDestination(BasicBlock *Dest)
Add a destination.
Base class for instruction visitors.
LLVM_ABI InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
Instruction * user_back()
Specialize the methods defined in Value, as we know that an instruction can only be used by other ins...
LLVM_ABI const Function * getFunction() const
Return the function this instruction belongs to.
LLVM_ABI void copyMetadata(const Instruction &SrcInst, ArrayRef< unsigned > WL=ArrayRef< unsigned >())
Copy metadata from SrcInst to this instruction.
This is an important class for using LLVM in a threaded context.
static unsigned getPointerOperandIndex()
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
static LLVM_ABI MDString * get(LLVMContext &Context, StringRef Str)
Flags
Flags values. These may be or'd together.
ModulePass class - This class is used to implement unstructured interprocedural optimizations and ana...
void addAssignPtrTypeInstr(Value *Val, CallInst *AssignPtrTyCI)
void buildAssignPtr(IRBuilder<> &B, Type *ElemTy, Value *Arg)
Type * findDeducedCompositeType(const Value *Val)
void replaceAllUsesWith(Value *Old, Value *New, bool DeleteOld=true)
void addDeducedElementType(Value *Val, Type *Ty)
void addReturnType(const Function *ArgF, TypedPointerType *DerivedTy)
Type * findMutated(const Value *Val)
void addDeducedCompositeType(Value *Val, Type *Ty)
void buildAssignType(IRBuilder<> &B, Type *Ty, Value *Arg)
Type * findDeducedElementType(const Value *Val)
void updateAssignType(CallInst *AssignCI, Value *Arg, Value *OfType)
CallInst * findAssignPtrTypeInstr(const Value *Val)
const SPIRVTargetLowering * getTargetLowering() const override
bool isLogicalSPIRV() const
bool canUseExtension(SPIRV::Extension::Extension E) const
const SPIRVSubtarget * getSubtargetImpl() const
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
bool contains(ConstPtrType Ptr) const
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
An instruction for storing to memory.
static unsigned getPointerOperandIndex()
iterator find(StringRef Key)
StringRef - Represent a constant reference to a string, i.e.
bool starts_with(StringRef Prefix) const
Check if this string starts with the given Prefix.
static LLVM_ABI StructType * create(LLVMContext &Context, StringRef Name)
This creates an identified struct.
static LLVM_ABI TargetExtType * get(LLVMContext &Context, StringRef Name, ArrayRef< Type * > Types={}, ArrayRef< unsigned > Ints={})
Return a target extension type having the specified name and optional type and integer parameters.
Type * getTypeParameter(unsigned i) const
const STC & getSubtarget(const Function &F) const
This method returns a pointer to the specified type of TargetSubtargetInfo.
The instances of the Type class are immutable: once they are created, they are never changed.
bool isVectorTy() const
True if this is an instance of VectorType.
bool isArrayTy() const
True if this is an instance of ArrayType.
static LLVM_ABI IntegerType * getInt32Ty(LLVMContext &C)
bool isPointerTy() const
True if this is an instance of PointerType.
Type * getArrayElementType() const
LLVM_ABI StringRef getTargetExtName() const
static LLVM_ABI IntegerType * getInt8Ty(LLVMContext &C)
bool isStructTy() const
True if this is an instance of StructType.
bool isTargetExtTy() const
Return true if this is a target extension type.
bool isAggregateType() const
Return true if the type is an aggregate type.
bool isVoidTy() const
Return true if this is 'void'.
static LLVM_ABI bool isValidElementType(Type *ElemTy)
Return true if the specified type is valid as a element type.
static LLVM_ABI TypedPointerType * get(Type *ElementType, unsigned AddressSpace)
This constructs a pointer to an object of the specified type in a numbered address space.
static LLVM_ABI UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
LLVM_ABI bool replaceUsesOfWith(Value *From, Value *To)
Replace uses of one Value with another.
void setOperand(unsigned i, Value *Val)
Value * getOperand(unsigned i) const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
LLVM_ABI void setName(const Twine &Name)
Change the name of the value.
iterator_range< user_iterator > users()
LLVM_ABI StringRef getName() const
Return a constant reference to the value's name.
const ParentTy * getParent() const
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr char Args[]
Key for Kernel::Metadata::mArgs.
@ SPIR_KERNEL
Used for SPIR kernel functions.
@ BasicBlock
Various leaf nodes.
bool match(Val *V, const Pattern &P)
is_zero m_Zero()
Match any null constant or a vector with all elements equal to 0.
ElementType
The element type of an SRV or UAV resource.
@ User
could "use" a pointer
NodeAddr< PhiNode * > Phi
NodeAddr< FuncNode * > Func
friend class Instruction
Iterator for Instructions in a `BasicBlock.
This is an optimization pass for GlobalISel generic memory operations.
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
bool getVacantFunctionName(Module &M, std::string &Name)
FunctionAddr VTableAddr Value
bool isTypedPointerWrapper(const TargetExtType *ExtTy)
ModulePass * createSPIRVEmitIntrinsicsPass(SPIRVTargetMachine *TM)
unsigned getPointerAddressSpace(const Type *T)
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
FunctionAddr VTableAddr uintptr_t uintptr_t Int32Ty
CallInst * buildIntrWithMD(Intrinsic::ID IntrID, ArrayRef< Type * > Types, Value *Arg, Value *Arg2, ArrayRef< Constant * > Imms, IRBuilder<> &B)
void append_range(Container &C, Range &&R)
Wrapper function to append range R to container C.
bool isNestedPointer(const Type *Ty)
MetadataAsValue * buildMD(Value *Arg)
std::string getOclOrSpirvBuiltinDemangledName(StringRef Name)
LLVM_ABI bool isInstructionTriviallyDead(Instruction *I, const TargetLibraryInfo *TLI=nullptr)
Return true if the result produced by the instruction is not used, and the instruction will return.
auto reverse(ContainerTy &&C)
Type * getTypedPointerWrapper(Type *ElemTy, unsigned AS)
bool isPointerTy(const Type *T)
LLVM_ABI void report_fatal_error(Error Err, bool gen_crash_diag=true)
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
SPIRV::Scope::Scope getMemScope(LLVMContext &Ctx, SyncScope::ID Id)
@ Ref
The access may reference the value stored in memory.
IRBuilder(LLVMContext &, FolderTy, InserterTy, MDNode *, ArrayRef< OperandBundleDef >) -> IRBuilder< FolderTy, InserterTy >
DWARFExpression::Operation Op
Type * getPointeeTypeByAttr(Argument *Arg)
bool hasPointeeTypeAttr(Argument *Arg)
bool isEquivalentTypes(Type *Ty1, Type *Ty2)
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
iterator_range< pointer_iterator< WrappedIteratorT > > make_pointer_range(RangeT &&Range)
bool hasInitializer(const GlobalVariable *GV)
Type * normalizeType(Type *Ty)
bool isSpvIntrinsic(const MachineInstr &MI, Intrinsic::ID IntrinsicID)
Type * getPointeeType(const Type *Ty)
PoisonValue * getNormalizedPoisonValue(Type *Ty)
bool isUntypedPointerTy(const Type *T)
SPIRV::MemorySemantics::MemorySemantics getMemSemantics(AtomicOrdering Ord)