1//===-- SPIRVEmitIntrinsics.cpp - emit SPIRV intrinsics ---------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// The pass emits SPIRV intrinsics keeping essential high-level information for
10// the translation of LLVM IR to SPIR-V.
11//
12//===----------------------------------------------------------------------===//
13
14#include "SPIRV.h"
15#include "SPIRVBuiltins.h"
16#include "SPIRVMetadata.h"
17#include "SPIRVSubtarget.h"
18#include "SPIRVTargetMachine.h"
19#include "SPIRVUtils.h"
20#include "llvm/IR/IRBuilder.h"
21#include "llvm/IR/InstIterator.h"
22#include "llvm/IR/InstVisitor.h"
23#include "llvm/IR/IntrinsicsSPIRV.h"
24#include "llvm/IR/TypedPointerType.h"
25
26#include <queue>
27
28// This pass performs the following transformation on LLVM IR level required
29// for the following translation to SPIR-V:
30// - replaces direct usages of aggregate constants with target-specific
31// intrinsics;
32// - replaces aggregates-related instructions (extract/insert, ld/st, etc)
33// with a target-specific intrinsics;
34// - emits intrinsics for the global variable initializers since IRTranslator
35// doesn't handle them and it's not very convenient to translate them
36// ourselves;
37// - emits intrinsics to keep track of the string names assigned to the values;
38// - emits intrinsics to keep track of constants (this is necessary to have an
39// LLVM IR constant after the IRTranslation is completed) for their further
40// deduplication;
41// - emits intrinsics to keep track of original LLVM types of the values
42// to be able to emit proper SPIR-V types eventually.
43//
44// TODO: consider removing spv.track.constant in favor of spv.assign.type.
45
46using namespace llvm;
47
48namespace llvm {
49void initializeSPIRVEmitIntrinsicsPass(PassRegistry &);
50} // namespace llvm
51
52namespace {
53class SPIRVEmitIntrinsics
54 : public ModulePass,
55 public InstVisitor<SPIRVEmitIntrinsics, Instruction *> {
56 SPIRVTargetMachine *TM = nullptr;
57 SPIRVGlobalRegistry *GR = nullptr;
58 Function *F = nullptr;
59 bool TrackConstants = true;
60 DenseMap<Instruction *, Constant *> AggrConsts;
61 DenseMap<Instruction *, Type *> AggrConstTypes;
62 DenseSet<Instruction *> AggrStores;
63
64 // a registry of created Intrinsic::spv_assign_ptr_type instructions
65 DenseMap<Value *, CallInst *> AssignPtrTypeInstr;
66
67 // deduce element type of untyped pointers
68 Type *deduceElementType(Value *I);
69 Type *deduceElementTypeHelper(Value *I);
70 Type *deduceElementTypeHelper(Value *I, std::unordered_set<Value *> &Visited);
71 Type *deduceElementTypeByValueDeep(Type *ValueTy, Value *Operand,
72 std::unordered_set<Value *> &Visited);
73 Type *deduceElementTypeByUsersDeep(Value *Op,
74 std::unordered_set<Value *> &Visited);
75
76 // deduce nested types of composites
77 Type *deduceNestedTypeHelper(User *U);
78 Type *deduceNestedTypeHelper(User *U, Type *Ty,
79 std::unordered_set<Value *> &Visited);
80
81 // deduce Types of operands of the Instruction if possible
82 void deduceOperandElementType(Instruction *I);
83
84 void preprocessCompositeConstants(IRBuilder<> &B);
85 void preprocessUndefs(IRBuilder<> &B);
86
87 CallInst *buildIntrWithMD(Intrinsic::ID IntrID, ArrayRef<Type *> Types,
88 Value *Arg, Value *Arg2, ArrayRef<Constant *> Imms,
89 IRBuilder<> &B) {
90 ConstantAsMetadata *CM = ValueAsMetadata::getConstant(C: Arg);
91 MDTuple *TyMD = MDNode::get(Context&: F->getContext(), MDs: CM);
92 MetadataAsValue *VMD = MetadataAsValue::get(Context&: F->getContext(), MD: TyMD);
93 SmallVector<Value *, 4> Args;
94 Args.push_back(Elt: Arg2);
95 Args.push_back(Elt: VMD);
96 for (auto *Imm : Imms)
97 Args.push_back(Elt: Imm);
98 return B.CreateIntrinsic(ID: IntrID, Types: {Types}, Args);
99 }
100
101 void replaceMemInstrUses(Instruction *Old, Instruction *New, IRBuilder<> &B);
102 void processInstrAfterVisit(Instruction *I, IRBuilder<> &B);
103 void insertAssignPtrTypeIntrs(Instruction *I, IRBuilder<> &B);
104 void insertAssignTypeIntrs(Instruction *I, IRBuilder<> &B);
105 void insertAssignTypeInstrForTargetExtTypes(TargetExtType *AssignedType,
106 Value *V, IRBuilder<> &B);
107 void replacePointerOperandWithPtrCast(Instruction *I, Value *Pointer,
108 Type *ExpectedElementType,
109 unsigned OperandToReplace,
110 IRBuilder<> &B);
111 void insertPtrCastOrAssignTypeInstr(Instruction *I, IRBuilder<> &B);
112 void processGlobalValue(GlobalVariable &GV, IRBuilder<> &B);
113 void processParamTypes(Function *F, IRBuilder<> &B);
114 Type *deduceFunParamElementType(Function *F, unsigned OpIdx);
115 Type *deduceFunParamElementType(Function *F, unsigned OpIdx,
116 std::unordered_set<Function *> &FVisited);
117
118public:
119 static char ID;
120 SPIRVEmitIntrinsics() : ModulePass(ID) {
121 initializeSPIRVEmitIntrinsicsPass(*PassRegistry::getPassRegistry());
122 }
123 SPIRVEmitIntrinsics(SPIRVTargetMachine *_TM) : ModulePass(ID), TM(_TM) {
124 initializeSPIRVEmitIntrinsicsPass(*PassRegistry::getPassRegistry());
125 }
126 Instruction *visitInstruction(Instruction &I) { return &I; }
127 Instruction *visitSwitchInst(SwitchInst &I);
128 Instruction *visitGetElementPtrInst(GetElementPtrInst &I);
129 Instruction *visitBitCastInst(BitCastInst &I);
130 Instruction *visitInsertElementInst(InsertElementInst &I);
131 Instruction *visitExtractElementInst(ExtractElementInst &I);
132 Instruction *visitInsertValueInst(InsertValueInst &I);
133 Instruction *visitExtractValueInst(ExtractValueInst &I);
134 Instruction *visitLoadInst(LoadInst &I);
135 Instruction *visitStoreInst(StoreInst &I);
136 Instruction *visitAllocaInst(AllocaInst &I);
137 Instruction *visitAtomicCmpXchgInst(AtomicCmpXchgInst &I);
138 Instruction *visitUnreachableInst(UnreachableInst &I);
139
140 StringRef getPassName() const override { return "SPIRV emit intrinsics"; }
141
142 bool runOnModule(Module &M) override;
143 bool runOnFunction(Function &F);
144
145 void getAnalysisUsage(AnalysisUsage &AU) const override {
146 ModulePass::getAnalysisUsage(AU);
147 }
148};
149} // namespace
150
151char SPIRVEmitIntrinsics::ID = 0;
152
153INITIALIZE_PASS(SPIRVEmitIntrinsics, "emit-intrinsics", "SPIRV emit intrinsics",
154 false, false)
155
156static inline bool isAssignTypeInstr(const Instruction *I) {
157 return isa<IntrinsicInst>(Val: I) &&
158 cast<IntrinsicInst>(Val: I)->getIntrinsicID() == Intrinsic::spv_assign_type;
159}
160
161static bool isMemInstrToReplace(Instruction *I) {
162 return isa<StoreInst>(Val: I) || isa<LoadInst>(Val: I) || isa<InsertValueInst>(Val: I) ||
163 isa<ExtractValueInst>(Val: I) || isa<AtomicCmpXchgInst>(Val: I);
164}
165
166static bool isAggrToReplace(const Value *V) {
167 return isa<ConstantAggregate>(Val: V) || isa<ConstantDataArray>(Val: V) ||
168 (isa<ConstantAggregateZero>(Val: V) && !V->getType()->isVectorTy());
169}
170
171static void setInsertPointSkippingPhis(IRBuilder<> &B, Instruction *I) {
172 if (isa<PHINode>(Val: I))
173 B.SetInsertPoint(TheBB: I->getParent(), IP: I->getParent()->getFirstInsertionPt());
174 else
175 B.SetInsertPoint(I);
176}
177
178static bool requireAssignType(Instruction *I) {
179 IntrinsicInst *Intr = dyn_cast<IntrinsicInst>(Val: I);
180 if (Intr) {
181 switch (Intr->getIntrinsicID()) {
182 case Intrinsic::invariant_start:
183 case Intrinsic::invariant_end:
184 return false;
185 }
186 }
187 return true;
188}
189
190static inline void reportFatalOnTokenType(const Instruction *I) {
191 if (I->getType()->isTokenTy())
192 report_fatal_error(reason: "A token is encountered but SPIR-V without extensions "
193 "does not support token type",
194 gen_crash_diag: false);
195}
196
197// Set element pointer type to the given value of ValueTy and tries to
198// specify this type further (recursively) by Operand value, if needed.
199Type *SPIRVEmitIntrinsics::deduceElementTypeByValueDeep(
200 Type *ValueTy, Value *Operand, std::unordered_set<Value *> &Visited) {
201 Type *Ty = ValueTy;
202 if (Operand) {
203 if (auto *PtrTy = dyn_cast<PointerType>(Val: Ty)) {
204 if (Type *NestedTy = deduceElementTypeHelper(I: Operand, Visited))
205 Ty = TypedPointerType::get(ElementType: NestedTy, AddressSpace: PtrTy->getAddressSpace());
206 } else {
207 Ty = deduceNestedTypeHelper(U: dyn_cast<User>(Val: Operand), Ty, Visited);
208 }
209 }
210 return Ty;
211}
212
213// Traverse User instructions to deduce an element pointer type of the operand.
214Type *SPIRVEmitIntrinsics::deduceElementTypeByUsersDeep(
215 Value *Op, std::unordered_set<Value *> &Visited) {
216 if (!Op || !isPointerTy(T: Op->getType()))
217 return nullptr;
218
219 if (auto PType = dyn_cast<TypedPointerType>(Val: Op->getType()))
220 return PType->getElementType();
221
222 // maybe we already know operand's element type
223 if (Type *KnownTy = GR->findDeducedElementType(Val: Op))
224 return KnownTy;
225
226 for (User *OpU : Op->users()) {
227 if (Instruction *Inst = dyn_cast<Instruction>(Val: OpU)) {
228 if (Type *Ty = deduceElementTypeHelper(I: Inst, Visited))
229 return Ty;
230 }
231 }
232 return nullptr;
233}
234
235// Deduce and return a successfully deduced Type of the Instruction,
236// or nullptr otherwise.
237Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(Value *I) {
238 std::unordered_set<Value *> Visited;
239 return deduceElementTypeHelper(I, Visited);
240}
241
242Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(
243 Value *I, std::unordered_set<Value *> &Visited) {
244 // allow to pass nullptr as an argument
245 if (!I)
246 return nullptr;
247
248 // maybe already known
249 if (Type *KnownTy = GR->findDeducedElementType(Val: I))
250 return KnownTy;
251
252 // maybe a cycle
253 if (Visited.find(x: I) != Visited.end())
254 return nullptr;
255 Visited.insert(x: I);
256
257 // fallback value in case when we fail to deduce a type
258 Type *Ty = nullptr;
259 // look for known basic patterns of type inference
260 if (auto *Ref = dyn_cast<AllocaInst>(Val: I)) {
261 Ty = Ref->getAllocatedType();
262 } else if (auto *Ref = dyn_cast<GetElementPtrInst>(Val: I)) {
263 Ty = Ref->getResultElementType();
264 } else if (auto *Ref = dyn_cast<GlobalValue>(Val: I)) {
265 Ty = deduceElementTypeByValueDeep(
266 ValueTy: Ref->getValueType(),
267 Operand: Ref->getNumOperands() > 0 ? Ref->getOperand(i: 0) : nullptr, Visited);
268 } else if (auto *Ref = dyn_cast<AddrSpaceCastInst>(Val: I)) {
269 Ty = deduceElementTypeHelper(I: Ref->getPointerOperand(), Visited);
270 } else if (auto *Ref = dyn_cast<BitCastInst>(Val: I)) {
271 if (Type *Src = Ref->getSrcTy(), *Dest = Ref->getDestTy();
272 isPointerTy(T: Src) && isPointerTy(T: Dest))
273 Ty = deduceElementTypeHelper(I: Ref->getOperand(i_nocapture: 0), Visited);
274 } else if (auto *Ref = dyn_cast<AtomicCmpXchgInst>(Val: I)) {
275 Value *Op = Ref->getNewValOperand();
276 Ty = deduceElementTypeByValueDeep(ValueTy: Op->getType(), Operand: Op, Visited);
277 } else if (auto *Ref = dyn_cast<AtomicRMWInst>(Val: I)) {
278 Value *Op = Ref->getValOperand();
279 Ty = deduceElementTypeByValueDeep(ValueTy: Op->getType(), Operand: Op, Visited);
280 } else if (auto *Ref = dyn_cast<PHINode>(Val: I)) {
281 for (unsigned i = 0; i < Ref->getNumIncomingValues(); i++) {
282 Ty = deduceElementTypeByUsersDeep(Op: Ref->getIncomingValue(i), Visited);
283 if (Ty)
284 break;
285 }
286 } else if (auto *Ref = dyn_cast<SelectInst>(Val: I)) {
287 for (Value *Op : {Ref->getTrueValue(), Ref->getFalseValue()}) {
288 Ty = deduceElementTypeByUsersDeep(Op, Visited);
289 if (Ty)
290 break;
291 }
292 }
293
294 // remember the found relationship
295 if (Ty) {
296 // specify nested types if needed, otherwise return unchanged
297 GR->addDeducedElementType(Val: I, Ty);
298 }
299
300 return Ty;
301}
302
303// Re-create a type of the value if it has untyped pointer fields, also nested.
304// Return the original value type if no corrections of untyped pointer
305// information is found or needed.
306Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(User *U) {
307 std::unordered_set<Value *> Visited;
308 return deduceNestedTypeHelper(U, Ty: U->getType(), Visited);
309}
310
311Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(
312 User *U, Type *OrigTy, std::unordered_set<Value *> &Visited) {
313 if (!U)
314 return OrigTy;
315
316 // maybe already known
317 if (Type *KnownTy = GR->findDeducedCompositeType(Val: U))
318 return KnownTy;
319
320 // maybe a cycle
321 if (Visited.find(x: U) != Visited.end())
322 return OrigTy;
323 Visited.insert(x: U);
324
325 if (dyn_cast<StructType>(Val: OrigTy)) {
326 SmallVector<Type *> Tys;
327 bool Change = false;
328 for (unsigned i = 0; i < U->getNumOperands(); ++i) {
329 Value *Op = U->getOperand(i);
330 Type *OpTy = Op->getType();
331 Type *Ty = OpTy;
332 if (Op) {
333 if (auto *PtrTy = dyn_cast<PointerType>(Val: OpTy)) {
334 if (Type *NestedTy = deduceElementTypeHelper(I: Op, Visited))
335 Ty = TypedPointerType::get(ElementType: NestedTy, AddressSpace: PtrTy->getAddressSpace());
336 } else {
337 Ty = deduceNestedTypeHelper(U: dyn_cast<User>(Val: Op), OrigTy: OpTy, Visited);
338 }
339 }
340 Tys.push_back(Elt: Ty);
341 Change |= Ty != OpTy;
342 }
343 if (Change) {
344 Type *NewTy = StructType::create(Elements: Tys);
345 GR->addDeducedCompositeType(Val: U, Ty: NewTy);
346 return NewTy;
347 }
348 } else if (auto *ArrTy = dyn_cast<ArrayType>(Val: OrigTy)) {
349 if (Value *Op = U->getNumOperands() > 0 ? U->getOperand(i: 0) : nullptr) {
350 Type *OpTy = ArrTy->getElementType();
351 Type *Ty = OpTy;
352 if (auto *PtrTy = dyn_cast<PointerType>(Val: OpTy)) {
353 if (Type *NestedTy = deduceElementTypeHelper(I: Op, Visited))
354 Ty = TypedPointerType::get(ElementType: NestedTy, AddressSpace: PtrTy->getAddressSpace());
355 } else {
356 Ty = deduceNestedTypeHelper(U: dyn_cast<User>(Val: Op), OrigTy: OpTy, Visited);
357 }
358 if (Ty != OpTy) {
359 Type *NewTy = ArrayType::get(ElementType: Ty, NumElements: ArrTy->getNumElements());
360 GR->addDeducedCompositeType(Val: U, Ty: NewTy);
361 return NewTy;
362 }
363 }
364 } else if (auto *VecTy = dyn_cast<VectorType>(Val: OrigTy)) {
365 if (Value *Op = U->getNumOperands() > 0 ? U->getOperand(i: 0) : nullptr) {
366 Type *OpTy = VecTy->getElementType();
367 Type *Ty = OpTy;
368 if (auto *PtrTy = dyn_cast<PointerType>(Val: OpTy)) {
369 if (Type *NestedTy = deduceElementTypeHelper(I: Op, Visited))
370 Ty = TypedPointerType::get(ElementType: NestedTy, AddressSpace: PtrTy->getAddressSpace());
371 } else {
372 Ty = deduceNestedTypeHelper(U: dyn_cast<User>(Val: Op), OrigTy: OpTy, Visited);
373 }
374 if (Ty != OpTy) {
375 Type *NewTy = VectorType::get(ElementType: Ty, EC: VecTy->getElementCount());
376 GR->addDeducedCompositeType(Val: U, Ty: NewTy);
377 return NewTy;
378 }
379 }
380 }
381
382 return OrigTy;
383}
384
385Type *SPIRVEmitIntrinsics::deduceElementType(Value *I) {
386 if (Type *Ty = deduceElementTypeHelper(I))
387 return Ty;
388 return IntegerType::getInt8Ty(C&: I->getContext());
389}
390
391// If the Instruction has Pointer operands with unresolved types, this function
392// tries to deduce them. If the Instruction has Pointer operands with known
393// types which differ from expected, this function tries to insert a bitcast to
394// resolve the issue.
395void SPIRVEmitIntrinsics::deduceOperandElementType(Instruction *I) {
396 SmallVector<std::pair<Value *, unsigned>> Ops;
397 Type *KnownElemTy = nullptr;
398 // look for known basic patterns of type inference
399 if (auto *Ref = dyn_cast<PHINode>(Val: I)) {
400 if (!isPointerTy(T: I->getType()) ||
401 !(KnownElemTy = GR->findDeducedElementType(Val: I)))
402 return;
403 for (unsigned i = 0; i < Ref->getNumIncomingValues(); i++) {
404 Value *Op = Ref->getIncomingValue(i);
405 if (isPointerTy(T: Op->getType()))
406 Ops.push_back(Elt: std::make_pair(x&: Op, y&: i));
407 }
408 } else if (auto *Ref = dyn_cast<SelectInst>(Val: I)) {
409 if (!isPointerTy(T: I->getType()) ||
410 !(KnownElemTy = GR->findDeducedElementType(Val: I)))
411 return;
412 for (unsigned i = 0; i < Ref->getNumOperands(); i++) {
413 Value *Op = Ref->getOperand(i_nocapture: i);
414 if (isPointerTy(T: Op->getType()))
415 Ops.push_back(Elt: std::make_pair(x&: Op, y&: i));
416 }
417 } else if (auto *Ref = dyn_cast<ReturnInst>(Val: I)) {
418 Type *RetTy = F->getReturnType();
419 if (!isPointerTy(T: RetTy))
420 return;
421 Value *Op = Ref->getReturnValue();
422 if (!Op)
423 return;
424 if (!(KnownElemTy = GR->findDeducedElementType(Val: F))) {
425 if (Type *OpElemTy = GR->findDeducedElementType(Val: Op)) {
426 GR->addDeducedElementType(Val: F, Ty: OpElemTy);
427 TypedPointerType *DerivedTy =
428 TypedPointerType::get(ElementType: OpElemTy, AddressSpace: getPointerAddressSpace(T: RetTy));
429 GR->addReturnType(ArgF: F, DerivedTy);
430 }
431 return;
432 }
433 Ops.push_back(Elt: std::make_pair(x&: Op, y: 0));
434 } else if (auto *Ref = dyn_cast<ICmpInst>(Val: I)) {
435 if (!isPointerTy(T: Ref->getOperand(i_nocapture: 0)->getType()))
436 return;
437 Value *Op0 = Ref->getOperand(i_nocapture: 0);
438 Value *Op1 = Ref->getOperand(i_nocapture: 1);
439 Type *ElemTy0 = GR->findDeducedElementType(Val: Op0);
440 Type *ElemTy1 = GR->findDeducedElementType(Val: Op1);
441 if (ElemTy0) {
442 KnownElemTy = ElemTy0;
443 Ops.push_back(Elt: std::make_pair(x&: Op1, y: 1));
444 } else if (ElemTy1) {
445 KnownElemTy = ElemTy1;
446 Ops.push_back(Elt: std::make_pair(x&: Op0, y: 0));
447 }
448 }
449
450 // There is no enough info to deduce types or all is valid.
451 if (!KnownElemTy || Ops.size() == 0)
452 return;
453
454 LLVMContext &Ctx = F->getContext();
455 IRBuilder<> B(Ctx);
456 for (auto &OpIt : Ops) {
457 Value *Op = OpIt.first;
458 if (Op->use_empty())
459 continue;
460 Type *Ty = GR->findDeducedElementType(Val: Op);
461 if (Ty == KnownElemTy)
462 continue;
463 if (Instruction *User = dyn_cast<Instruction>(Val: Op->use_begin()->get()))
464 setInsertPointSkippingPhis(B, I: User->getNextNode());
465 else
466 B.SetInsertPoint(I);
467 Value *OpTyVal = Constant::getNullValue(Ty: KnownElemTy);
468 Type *OpTy = Op->getType();
469 if (!Ty) {
470 GR->addDeducedElementType(Val: Op, Ty: KnownElemTy);
471 // check if there is existing Intrinsic::spv_assign_ptr_type instruction
472 auto It = AssignPtrTypeInstr.find(Val: Op);
473 if (It == AssignPtrTypeInstr.end()) {
474 CallInst *CI =
475 buildIntrWithMD(Intrinsic::spv_assign_ptr_type, {OpTy}, OpTyVal, Op,
476 {B.getInt32(getPointerAddressSpace(OpTy))}, B);
477 AssignPtrTypeInstr[Op] = CI;
478 } else {
479 It->second->setArgOperand(
480 i: 1,
481 v: MetadataAsValue::get(
482 Context&: Ctx, MD: MDNode::get(Context&: Ctx, MDs: ValueAsMetadata::getConstant(C: OpTyVal))));
483 }
484 } else {
485 SmallVector<Type *, 2> Types = {OpTy, OpTy};
486 MetadataAsValue *VMD = MetadataAsValue::get(
487 Context&: Ctx, MD: MDNode::get(Context&: Ctx, MDs: ValueAsMetadata::getConstant(C: OpTyVal)));
488 SmallVector<Value *, 2> Args = {Op, VMD,
489 B.getInt32(C: getPointerAddressSpace(T: OpTy))};
490 CallInst *PtrCastI =
491 B.CreateIntrinsic(Intrinsic::spv_ptrcast, {Types}, Args);
492 I->setOperand(i: OpIt.second, Val: PtrCastI);
493 }
494 }
495}
496
497void SPIRVEmitIntrinsics::replaceMemInstrUses(Instruction *Old,
498 Instruction *New,
499 IRBuilder<> &B) {
500 while (!Old->user_empty()) {
501 auto *U = Old->user_back();
502 if (isAssignTypeInstr(I: U)) {
503 B.SetInsertPoint(U);
504 SmallVector<Value *, 2> Args = {New, U->getOperand(i: 1)};
505 B.CreateIntrinsic(Intrinsic::spv_assign_type, {New->getType()}, Args);
506 U->eraseFromParent();
507 } else if (isMemInstrToReplace(I: U) || isa<ReturnInst>(Val: U) ||
508 isa<CallInst>(Val: U)) {
509 U->replaceUsesOfWith(From: Old, To: New);
510 } else {
511 llvm_unreachable("illegal aggregate intrinsic user");
512 }
513 }
514 Old->eraseFromParent();
515}
516
517void SPIRVEmitIntrinsics::preprocessUndefs(IRBuilder<> &B) {
518 std::queue<Instruction *> Worklist;
519 for (auto &I : instructions(F))
520 Worklist.push(x: &I);
521
522 while (!Worklist.empty()) {
523 Instruction *I = Worklist.front();
524 Worklist.pop();
525
526 for (auto &Op : I->operands()) {
527 auto *AggrUndef = dyn_cast<UndefValue>(Val&: Op);
528 if (!AggrUndef || !Op->getType()->isAggregateType())
529 continue;
530
531 B.SetInsertPoint(I);
532 auto *IntrUndef = B.CreateIntrinsic(Intrinsic::spv_undef, {}, {});
533 Worklist.push(IntrUndef);
534 I->replaceUsesOfWith(From: Op, To: IntrUndef);
535 AggrConsts[IntrUndef] = AggrUndef;
536 AggrConstTypes[IntrUndef] = AggrUndef->getType();
537 }
538 }
539}
540
541void SPIRVEmitIntrinsics::preprocessCompositeConstants(IRBuilder<> &B) {
542 std::queue<Instruction *> Worklist;
543 for (auto &I : instructions(F))
544 Worklist.push(x: &I);
545
546 while (!Worklist.empty()) {
547 auto *I = Worklist.front();
548 assert(I);
549 bool KeepInst = false;
550 for (const auto &Op : I->operands()) {
551 auto BuildCompositeIntrinsic =
552 [](Constant *AggrC, ArrayRef<Value *> Args, Value *Op, Instruction *I,
553 IRBuilder<> &B, std::queue<Instruction *> &Worklist,
554 bool &KeepInst, SPIRVEmitIntrinsics &SEI) {
555 B.SetInsertPoint(I);
556 auto *CCI =
557 B.CreateIntrinsic(Intrinsic::spv_const_composite, {}, {Args});
558 Worklist.push(CCI);
559 I->replaceUsesOfWith(From: Op, To: CCI);
560 KeepInst = true;
561 SEI.AggrConsts[CCI] = AggrC;
562 SEI.AggrConstTypes[CCI] = SEI.deduceNestedTypeHelper(U: AggrC);
563 };
564
565 if (auto *AggrC = dyn_cast<ConstantAggregate>(Val: Op)) {
566 SmallVector<Value *> Args(AggrC->op_begin(), AggrC->op_end());
567 BuildCompositeIntrinsic(AggrC, Args, Op, I, B, Worklist, KeepInst,
568 *this);
569 } else if (auto *AggrC = dyn_cast<ConstantDataArray>(Val: Op)) {
570 SmallVector<Value *> Args;
571 for (unsigned i = 0; i < AggrC->getNumElements(); ++i)
572 Args.push_back(Elt: AggrC->getElementAsConstant(i));
573 BuildCompositeIntrinsic(AggrC, Args, Op, I, B, Worklist, KeepInst,
574 *this);
575 } else if (isa<ConstantAggregateZero>(Val: Op) &&
576 !Op->getType()->isVectorTy()) {
577 auto *AggrC = cast<ConstantAggregateZero>(Val: Op);
578 SmallVector<Value *> Args(AggrC->op_begin(), AggrC->op_end());
579 BuildCompositeIntrinsic(AggrC, Args, Op, I, B, Worklist, KeepInst,
580 *this);
581 }
582 }
583 if (!KeepInst)
584 Worklist.pop();
585 }
586}
587
588Instruction *SPIRVEmitIntrinsics::visitSwitchInst(SwitchInst &I) {
589 BasicBlock *ParentBB = I.getParent();
590 IRBuilder<> B(ParentBB);
591 B.SetInsertPoint(&I);
592 SmallVector<Value *, 4> Args;
593 SmallVector<BasicBlock *> BBCases;
594 for (auto &Op : I.operands()) {
595 if (Op.get()->getType()->isSized()) {
596 Args.push_back(Elt: Op);
597 } else if (BasicBlock *BB = dyn_cast<BasicBlock>(Val: Op.get())) {
598 BBCases.push_back(Elt: BB);
599 Args.push_back(Elt: BlockAddress::get(F: BB->getParent(), BB));
600 } else {
601 report_fatal_error(reason: "Unexpected switch operand");
602 }
603 }
604 CallInst *NewI = B.CreateIntrinsic(Intrinsic::spv_switch,
605 {I.getOperand(0)->getType()}, {Args});
606 // remove switch to avoid its unneeded and undesirable unwrap into branches
607 // and conditions
608 I.replaceAllUsesWith(V: NewI);
609 I.eraseFromParent();
610 // insert artificial and temporary instruction to preserve valid CFG,
611 // it will be removed after IR translation pass
612 B.SetInsertPoint(ParentBB);
613 IndirectBrInst *BrI = B.CreateIndirectBr(
614 Addr: Constant::getNullValue(Ty: PointerType::getUnqual(C&: ParentBB->getContext())),
615 NumDests: BBCases.size());
616 for (BasicBlock *BBCase : BBCases)
617 BrI->addDestination(Dest: BBCase);
618 return BrI;
619}
620
621Instruction *SPIRVEmitIntrinsics::visitGetElementPtrInst(GetElementPtrInst &I) {
622 IRBuilder<> B(I.getParent());
623 B.SetInsertPoint(&I);
624 SmallVector<Type *, 2> Types = {I.getType(), I.getOperand(i_nocapture: 0)->getType()};
625 SmallVector<Value *, 4> Args;
626 Args.push_back(Elt: B.getInt1(V: I.isInBounds()));
627 for (auto &Op : I.operands())
628 Args.push_back(Elt: Op);
629 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_gep, {Types}, {Args});
630 I.replaceAllUsesWith(V: NewI);
631 I.eraseFromParent();
632 return NewI;
633}
634
635Instruction *SPIRVEmitIntrinsics::visitBitCastInst(BitCastInst &I) {
636 IRBuilder<> B(I.getParent());
637 B.SetInsertPoint(&I);
638 Value *Source = I.getOperand(i_nocapture: 0);
639
640 // SPIR-V, contrary to LLVM 17+ IR, supports bitcasts between pointers of
641 // varying element types. In case of IR coming from older versions of LLVM
642 // such bitcasts do not provide sufficient information, should be just skipped
643 // here, and handled in insertPtrCastOrAssignTypeInstr.
644 if (isPointerTy(T: I.getType())) {
645 I.replaceAllUsesWith(V: Source);
646 I.eraseFromParent();
647 return nullptr;
648 }
649
650 SmallVector<Type *, 2> Types = {I.getType(), Source->getType()};
651 SmallVector<Value *> Args(I.op_begin(), I.op_end());
652 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_bitcast, {Types}, {Args});
653 std::string InstName = I.hasName() ? I.getName().str() : "";
654 I.replaceAllUsesWith(V: NewI);
655 I.eraseFromParent();
656 NewI->setName(InstName);
657 return NewI;
658}
659
660void SPIRVEmitIntrinsics::insertAssignTypeInstrForTargetExtTypes(
661 TargetExtType *AssignedType, Value *V, IRBuilder<> &B) {
662 // Do not emit spv_assign_type if the V is of the AssignedType already.
663 if (V->getType() == AssignedType)
664 return;
665
666 // Do not emit spv_assign_type if there is one already targetting V. If the
667 // found spv_assign_type assigns a type different than AssignedType, report an
668 // error. Builtin types cannot be redeclared or casted.
669 for (auto User : V->users()) {
670 auto *II = dyn_cast<IntrinsicInst>(Val: User);
671 if (!II || II->getIntrinsicID() != Intrinsic::spv_assign_type)
672 continue;
673
674 MetadataAsValue *VMD = cast<MetadataAsValue>(Val: II->getOperand(i_nocapture: 1));
675 Type *BuiltinType =
676 dyn_cast<ConstantAsMetadata>(Val: VMD->getMetadata())->getType();
677 if (BuiltinType != AssignedType)
678 report_fatal_error(reason: "Type mismatch " + BuiltinType->getTargetExtName() +
679 "/" + AssignedType->getTargetExtName() +
680 " for value " + V->getName(),
681 gen_crash_diag: false);
682 return;
683 }
684
685 Constant *Const = UndefValue::get(T: AssignedType);
686 buildIntrWithMD(Intrinsic::spv_assign_type, {V->getType()}, Const, V, {}, B);
687}
688
689void SPIRVEmitIntrinsics::replacePointerOperandWithPtrCast(
690 Instruction *I, Value *Pointer, Type *ExpectedElementType,
691 unsigned OperandToReplace, IRBuilder<> &B) {
692 // If Pointer is the result of nop BitCastInst (ptr -> ptr), use the source
693 // pointer instead. The BitCastInst should be later removed when visited.
694 while (BitCastInst *BC = dyn_cast<BitCastInst>(Val: Pointer))
695 Pointer = BC->getOperand(i_nocapture: 0);
696
697 // Do not emit spv_ptrcast if Pointer's element type is ExpectedElementType
698 Type *PointerElemTy = deduceElementTypeHelper(I: Pointer);
699 if (PointerElemTy == ExpectedElementType)
700 return;
701
702 setInsertPointSkippingPhis(B, I);
703 Constant *ExpectedElementTypeConst =
704 Constant::getNullValue(Ty: ExpectedElementType);
705 ConstantAsMetadata *CM =
706 ValueAsMetadata::getConstant(C: ExpectedElementTypeConst);
707 MDTuple *TyMD = MDNode::get(Context&: F->getContext(), MDs: CM);
708 MetadataAsValue *VMD = MetadataAsValue::get(Context&: F->getContext(), MD: TyMD);
709 unsigned AddressSpace = getPointerAddressSpace(T: Pointer->getType());
710 bool FirstPtrCastOrAssignPtrType = true;
711
712 // Do not emit new spv_ptrcast if equivalent one already exists or when
713 // spv_assign_ptr_type already targets this pointer with the same element
714 // type.
715 for (auto User : Pointer->users()) {
716 auto *II = dyn_cast<IntrinsicInst>(Val: User);
717 if (!II ||
718 (II->getIntrinsicID() != Intrinsic::spv_assign_ptr_type &&
719 II->getIntrinsicID() != Intrinsic::spv_ptrcast) ||
720 II->getOperand(0) != Pointer)
721 continue;
722
723 // There is some spv_ptrcast/spv_assign_ptr_type already targeting this
724 // pointer.
725 FirstPtrCastOrAssignPtrType = false;
726 if (II->getOperand(i_nocapture: 1) != VMD ||
727 dyn_cast<ConstantInt>(Val: II->getOperand(i_nocapture: 2))->getSExtValue() !=
728 AddressSpace)
729 continue;
730
731 // The spv_ptrcast/spv_assign_ptr_type targeting this pointer is of the same
732 // element type and address space.
733 if (II->getIntrinsicID() != Intrinsic::spv_ptrcast)
734 return;
735
736 // This must be a spv_ptrcast, do not emit new if this one has the same BB
737 // as I. Otherwise, search for other spv_ptrcast/spv_assign_ptr_type.
738 if (II->getParent() != I->getParent())
739 continue;
740
741 I->setOperand(i: OperandToReplace, Val: II);
742 return;
743 }
744
745 // // Do not emit spv_ptrcast if it would cast to the default pointer element
746 // // type (i8) of the same address space.
747 // if (ExpectedElementType->isIntegerTy(8))
748 // return;
749
750 // If this would be the first spv_ptrcast, do not emit spv_ptrcast and emit
751 // spv_assign_ptr_type instead.
752 if (FirstPtrCastOrAssignPtrType &&
753 (isa<Instruction>(Val: Pointer) || isa<Argument>(Val: Pointer))) {
754 CallInst *CI = buildIntrWithMD(
755 Intrinsic::spv_assign_ptr_type, {Pointer->getType()},
756 ExpectedElementTypeConst, Pointer, {B.getInt32(AddressSpace)}, B);
757 GR->addDeducedElementType(Val: CI, Ty: ExpectedElementType);
758 GR->addDeducedElementType(Val: Pointer, Ty: ExpectedElementType);
759 AssignPtrTypeInstr[Pointer] = CI;
760 return;
761 }
762
763 // Emit spv_ptrcast
764 SmallVector<Type *, 2> Types = {Pointer->getType(), Pointer->getType()};
765 SmallVector<Value *, 2> Args = {Pointer, VMD, B.getInt32(C: AddressSpace)};
766 auto *PtrCastI = B.CreateIntrinsic(Intrinsic::spv_ptrcast, {Types}, Args);
767 I->setOperand(i: OperandToReplace, Val: PtrCastI);
768}
769
770void SPIRVEmitIntrinsics::insertPtrCastOrAssignTypeInstr(Instruction *I,
771 IRBuilder<> &B) {
772 // Handle basic instructions:
773 StoreInst *SI = dyn_cast<StoreInst>(Val: I);
774 if (SI && F->getCallingConv() == CallingConv::SPIR_KERNEL &&
775 isPointerTy(T: SI->getValueOperand()->getType()) &&
776 isa<Argument>(Val: SI->getValueOperand())) {
777 return replacePointerOperandWithPtrCast(
778 I, Pointer: SI->getValueOperand(), ExpectedElementType: IntegerType::getInt8Ty(C&: F->getContext()), OperandToReplace: 0,
779 B);
780 } else if (SI) {
781 return replacePointerOperandWithPtrCast(
782 I, Pointer: SI->getPointerOperand(), ExpectedElementType: SI->getValueOperand()->getType(), OperandToReplace: 1, B);
783 } else if (LoadInst *LI = dyn_cast<LoadInst>(Val: I)) {
784 return replacePointerOperandWithPtrCast(I, Pointer: LI->getPointerOperand(),
785 ExpectedElementType: LI->getType(), OperandToReplace: 0, B);
786 } else if (GetElementPtrInst *GEPI = dyn_cast<GetElementPtrInst>(Val: I)) {
787 return replacePointerOperandWithPtrCast(I, Pointer: GEPI->getPointerOperand(),
788 ExpectedElementType: GEPI->getSourceElementType(), OperandToReplace: 0, B);
789 }
790
791 // Handle calls to builtins (non-intrinsics):
792 CallInst *CI = dyn_cast<CallInst>(Val: I);
793 if (!CI || CI->isIndirectCall() || CI->isInlineAsm() ||
794 !CI->getCalledFunction() || CI->getCalledFunction()->isIntrinsic())
795 return;
796
797 // collect information about formal parameter types
798 Function *CalledF = CI->getCalledFunction();
799 SmallVector<Type *, 4> CalledArgTys;
800 bool HaveTypes = false;
801 for (unsigned OpIdx = 0; OpIdx < CalledF->arg_size(); ++OpIdx) {
802 Argument *CalledArg = CalledF->getArg(i: OpIdx);
803 Type *ArgType = CalledArg->getType();
804 if (!isPointerTy(T: ArgType)) {
805 CalledArgTys.push_back(Elt: nullptr);
806 } else if (isTypedPointerTy(T: ArgType)) {
807 CalledArgTys.push_back(Elt: cast<TypedPointerType>(Val: ArgType)->getElementType());
808 HaveTypes = true;
809 } else {
810 Type *ElemTy = GR->findDeducedElementType(Val: CalledArg);
811 if (!ElemTy && hasPointeeTypeAttr(Arg: CalledArg))
812 ElemTy = getPointeeTypeByAttr(Arg: CalledArg);
813 if (!ElemTy) {
814 for (User *U : CalledArg->users()) {
815 if (Instruction *Inst = dyn_cast<Instruction>(Val: U)) {
816 if ((ElemTy = deduceElementTypeHelper(I: Inst)) != nullptr)
817 break;
818 }
819 }
820 }
821 HaveTypes |= ElemTy != nullptr;
822 CalledArgTys.push_back(Elt: ElemTy);
823 }
824 }
825
826 std::string DemangledName =
827 getOclOrSpirvBuiltinDemangledName(Name: CI->getCalledFunction()->getName());
828 if (DemangledName.empty() && !HaveTypes)
829 return;
830
831 for (unsigned OpIdx = 0; OpIdx < CI->arg_size(); OpIdx++) {
832 Value *ArgOperand = CI->getArgOperand(i: OpIdx);
833 if (!isa<PointerType>(Val: ArgOperand->getType()) &&
834 !isa<TypedPointerType>(Val: ArgOperand->getType()))
835 continue;
836
837 // Constants (nulls/undefs) are handled in insertAssignPtrTypeIntrs()
838 if (!isa<Instruction>(Val: ArgOperand) && !isa<Argument>(Val: ArgOperand))
839 continue;
840
841 Type *ExpectedType =
842 OpIdx < CalledArgTys.size() ? CalledArgTys[OpIdx] : nullptr;
843 if (!ExpectedType && !DemangledName.empty())
844 ExpectedType = SPIRV::parseBuiltinCallArgumentBaseType(
845 DemangledCall: DemangledName, ArgIdx: OpIdx, Ctx&: I->getContext());
846 if (!ExpectedType)
847 continue;
848
849 if (ExpectedType->isTargetExtTy())
850 insertAssignTypeInstrForTargetExtTypes(AssignedType: cast<TargetExtType>(Val: ExpectedType),
851 V: ArgOperand, B);
852 else
853 replacePointerOperandWithPtrCast(I: CI, Pointer: ArgOperand, ExpectedElementType: ExpectedType, OperandToReplace: OpIdx, B);
854 }
855}
856
857Instruction *SPIRVEmitIntrinsics::visitInsertElementInst(InsertElementInst &I) {
858 SmallVector<Type *, 4> Types = {I.getType(), I.getOperand(i_nocapture: 0)->getType(),
859 I.getOperand(i_nocapture: 1)->getType(),
860 I.getOperand(i_nocapture: 2)->getType()};
861 IRBuilder<> B(I.getParent());
862 B.SetInsertPoint(&I);
863 SmallVector<Value *> Args(I.op_begin(), I.op_end());
864 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_insertelt, {Types}, {Args});
865 std::string InstName = I.hasName() ? I.getName().str() : "";
866 I.replaceAllUsesWith(V: NewI);
867 I.eraseFromParent();
868 NewI->setName(InstName);
869 return NewI;
870}
871
872Instruction *
873SPIRVEmitIntrinsics::visitExtractElementInst(ExtractElementInst &I) {
874 IRBuilder<> B(I.getParent());
875 B.SetInsertPoint(&I);
876 SmallVector<Type *, 3> Types = {I.getType(), I.getVectorOperandType(),
877 I.getIndexOperand()->getType()};
878 SmallVector<Value *, 2> Args = {I.getVectorOperand(), I.getIndexOperand()};
879 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_extractelt, {Types}, {Args});
880 std::string InstName = I.hasName() ? I.getName().str() : "";
881 I.replaceAllUsesWith(V: NewI);
882 I.eraseFromParent();
883 NewI->setName(InstName);
884 return NewI;
885}
886
887Instruction *SPIRVEmitIntrinsics::visitInsertValueInst(InsertValueInst &I) {
888 IRBuilder<> B(I.getParent());
889 B.SetInsertPoint(&I);
890 SmallVector<Type *, 1> Types = {I.getInsertedValueOperand()->getType()};
891 SmallVector<Value *> Args;
892 for (auto &Op : I.operands())
893 if (isa<UndefValue>(Val: Op))
894 Args.push_back(Elt: UndefValue::get(T: B.getInt32Ty()));
895 else
896 Args.push_back(Elt: Op);
897 for (auto &Op : I.indices())
898 Args.push_back(Elt: B.getInt32(C: Op));
899 Instruction *NewI =
900 B.CreateIntrinsic(Intrinsic::spv_insertv, {Types}, {Args});
901 replaceMemInstrUses(Old: &I, New: NewI, B);
902 return NewI;
903}
904
905Instruction *SPIRVEmitIntrinsics::visitExtractValueInst(ExtractValueInst &I) {
906 IRBuilder<> B(I.getParent());
907 B.SetInsertPoint(&I);
908 SmallVector<Value *> Args;
909 for (auto &Op : I.operands())
910 Args.push_back(Elt: Op);
911 for (auto &Op : I.indices())
912 Args.push_back(Elt: B.getInt32(C: Op));
913 auto *NewI =
914 B.CreateIntrinsic(Intrinsic::spv_extractv, {I.getType()}, {Args});
915 I.replaceAllUsesWith(V: NewI);
916 I.eraseFromParent();
917 return NewI;
918}
919
920Instruction *SPIRVEmitIntrinsics::visitLoadInst(LoadInst &I) {
921 if (!I.getType()->isAggregateType())
922 return &I;
923 IRBuilder<> B(I.getParent());
924 B.SetInsertPoint(&I);
925 TrackConstants = false;
926 const auto *TLI = TM->getSubtargetImpl()->getTargetLowering();
927 MachineMemOperand::Flags Flags =
928 TLI->getLoadMemOperandFlags(LI: I, DL: F->getParent()->getDataLayout());
929 auto *NewI =
930 B.CreateIntrinsic(Intrinsic::spv_load, {I.getOperand(0)->getType()},
931 {I.getPointerOperand(), B.getInt16(Flags),
932 B.getInt8(I.getAlign().value())});
933 replaceMemInstrUses(Old: &I, New: NewI, B);
934 return NewI;
935}
936
937Instruction *SPIRVEmitIntrinsics::visitStoreInst(StoreInst &I) {
938 if (!AggrStores.contains(V: &I))
939 return &I;
940 IRBuilder<> B(I.getParent());
941 B.SetInsertPoint(&I);
942 TrackConstants = false;
943 const auto *TLI = TM->getSubtargetImpl()->getTargetLowering();
944 MachineMemOperand::Flags Flags =
945 TLI->getStoreMemOperandFlags(SI: I, DL: F->getParent()->getDataLayout());
946 auto *PtrOp = I.getPointerOperand();
947 auto *NewI = B.CreateIntrinsic(
948 Intrinsic::spv_store, {I.getValueOperand()->getType(), PtrOp->getType()},
949 {I.getValueOperand(), PtrOp, B.getInt16(Flags),
950 B.getInt8(I.getAlign().value())});
951 I.eraseFromParent();
952 return NewI;
953}
954
955Instruction *SPIRVEmitIntrinsics::visitAllocaInst(AllocaInst &I) {
956 Value *ArraySize = nullptr;
957 if (I.isArrayAllocation()) {
958 const SPIRVSubtarget *STI = TM->getSubtargetImpl(*I.getFunction());
959 if (!STI->canUseExtension(
960 SPIRV::Extension::SPV_INTEL_variable_length_array))
961 report_fatal_error(
962 reason: "array allocation: this instruction requires the following "
963 "SPIR-V extension: SPV_INTEL_variable_length_array",
964 gen_crash_diag: false);
965 ArraySize = I.getArraySize();
966 }
967 IRBuilder<> B(I.getParent());
968 B.SetInsertPoint(&I);
969 TrackConstants = false;
970 Type *PtrTy = I.getType();
971 auto *NewI =
972 ArraySize ? B.CreateIntrinsic(Intrinsic::spv_alloca_array,
973 {PtrTy, ArraySize->getType()}, {ArraySize})
974 : B.CreateIntrinsic(Intrinsic::spv_alloca, {PtrTy}, {});
975 std::string InstName = I.hasName() ? I.getName().str() : "";
976 I.replaceAllUsesWith(V: NewI);
977 I.eraseFromParent();
978 NewI->setName(InstName);
979 return NewI;
980}
981
982Instruction *SPIRVEmitIntrinsics::visitAtomicCmpXchgInst(AtomicCmpXchgInst &I) {
983 assert(I.getType()->isAggregateType() && "Aggregate result is expected");
984 IRBuilder<> B(I.getParent());
985 B.SetInsertPoint(&I);
986 SmallVector<Value *> Args;
987 for (auto &Op : I.operands())
988 Args.push_back(Elt: Op);
989 Args.push_back(Elt: B.getInt32(C: I.getSyncScopeID()));
990 Args.push_back(B.getInt32(
991 static_cast<uint32_t>(getMemSemantics(I.getSuccessOrdering()))));
992 Args.push_back(B.getInt32(
993 static_cast<uint32_t>(getMemSemantics(I.getFailureOrdering()))));
994 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_cmpxchg,
995 {I.getPointerOperand()->getType()}, {Args});
996 replaceMemInstrUses(Old: &I, New: NewI, B);
997 return NewI;
998}
999
1000Instruction *SPIRVEmitIntrinsics::visitUnreachableInst(UnreachableInst &I) {
1001 IRBuilder<> B(I.getParent());
1002 B.SetInsertPoint(&I);
1003 B.CreateIntrinsic(Intrinsic::spv_unreachable, {}, {});
1004 return &I;
1005}
1006
1007void SPIRVEmitIntrinsics::processGlobalValue(GlobalVariable &GV,
1008 IRBuilder<> &B) {
1009 // Skip special artifical variable llvm.global.annotations.
1010 if (GV.getName() == "llvm.global.annotations")
1011 return;
1012 if (GV.hasInitializer() && !isa<UndefValue>(Val: GV.getInitializer())) {
1013 // Deduce element type and store results in Global Registry.
1014 // Result is ignored, because TypedPointerType is not supported
1015 // by llvm IR general logic.
1016 deduceElementTypeHelper(I: &GV);
1017 Constant *Init = GV.getInitializer();
1018 Type *Ty = isAggrToReplace(V: Init) ? B.getInt32Ty() : Init->getType();
1019 Constant *Const = isAggrToReplace(V: Init) ? B.getInt32(C: 1) : Init;
1020 auto *InitInst = B.CreateIntrinsic(Intrinsic::spv_init_global,
1021 {GV.getType(), Ty}, {&GV, Const});
1022 InitInst->setArgOperand(1, Init);
1023 }
1024 if ((!GV.hasInitializer() || isa<UndefValue>(GV.getInitializer())) &&
1025 GV.getNumUses() == 0)
1026 B.CreateIntrinsic(Intrinsic::spv_unref_global, GV.getType(), &GV);
1027}
1028
1029void SPIRVEmitIntrinsics::insertAssignPtrTypeIntrs(Instruction *I,
1030 IRBuilder<> &B) {
1031 reportFatalOnTokenType(I);
1032 if (!isPointerTy(T: I->getType()) || !requireAssignType(I) ||
1033 isa<BitCastInst>(Val: I))
1034 return;
1035
1036 setInsertPointSkippingPhis(B, I: I->getNextNode());
1037
1038 Type *ElemTy = deduceElementType(I);
1039 Constant *EltTyConst = UndefValue::get(T: ElemTy);
1040 unsigned AddressSpace = getPointerAddressSpace(T: I->getType());
1041 CallInst *CI = buildIntrWithMD(Intrinsic::spv_assign_ptr_type, {I->getType()},
1042 EltTyConst, I, {B.getInt32(AddressSpace)}, B);
1043 GR->addDeducedElementType(Val: CI, Ty: ElemTy);
1044 AssignPtrTypeInstr[I] = CI;
1045}
1046
1047void SPIRVEmitIntrinsics::insertAssignTypeIntrs(Instruction *I,
1048 IRBuilder<> &B) {
1049 reportFatalOnTokenType(I);
1050 Type *Ty = I->getType();
1051 if (!Ty->isVoidTy() && !isPointerTy(T: Ty) && requireAssignType(I)) {
1052 setInsertPointSkippingPhis(B, I: I->getNextNode());
1053 Type *TypeToAssign = Ty;
1054 if (auto *II = dyn_cast<IntrinsicInst>(Val: I)) {
1055 if (II->getIntrinsicID() == Intrinsic::spv_const_composite ||
1056 II->getIntrinsicID() == Intrinsic::spv_undef) {
1057 auto It = AggrConstTypes.find(Val: II);
1058 if (It == AggrConstTypes.end())
1059 report_fatal_error(reason: "Unknown composite intrinsic type");
1060 TypeToAssign = It->second;
1061 }
1062 }
1063 Constant *Const = UndefValue::get(T: TypeToAssign);
1064 buildIntrWithMD(Intrinsic::spv_assign_type, {Ty}, Const, I, {}, B);
1065 }
1066 for (const auto &Op : I->operands()) {
1067 if (isa<ConstantPointerNull>(Val: Op) || isa<UndefValue>(Val: Op) ||
1068 // Check GetElementPtrConstantExpr case.
1069 (isa<ConstantExpr>(Val: Op) && isa<GEPOperator>(Val: Op))) {
1070 setInsertPointSkippingPhis(B, I);
1071 if (isa<UndefValue>(Op) && Op->getType()->isAggregateType())
1072 buildIntrWithMD(Intrinsic::spv_assign_type, {B.getInt32Ty()}, Op,
1073 UndefValue::get(B.getInt32Ty()), {}, B);
1074 else if (!isa<Instruction>(Op)) // TODO: This case could be removed
1075 buildIntrWithMD(Intrinsic::spv_assign_type, {Op->getType()}, Op, Op, {},
1076 B);
1077 }
1078 }
1079}
1080
1081void SPIRVEmitIntrinsics::processInstrAfterVisit(Instruction *I,
1082 IRBuilder<> &B) {
1083 auto *II = dyn_cast<IntrinsicInst>(Val: I);
1084 if (II && II->getIntrinsicID() == Intrinsic::spv_const_composite &&
1085 TrackConstants) {
1086 B.SetInsertPoint(I->getNextNode());
1087 Type *Ty = B.getInt32Ty();
1088 auto t = AggrConsts.find(Val: I);
1089 assert(t != AggrConsts.end());
1090 auto *NewOp = buildIntrWithMD(Intrinsic::spv_track_constant, {Ty, Ty},
1091 t->second, I, {}, B);
1092 I->replaceAllUsesWith(V: NewOp);
1093 NewOp->setArgOperand(0, I);
1094 }
1095 for (const auto &Op : I->operands()) {
1096 if ((isa<ConstantAggregateZero>(Val: Op) && Op->getType()->isVectorTy()) ||
1097 isa<PHINode>(Val: I) || isa<SwitchInst>(Val: I))
1098 TrackConstants = false;
1099 if ((isa<ConstantData>(Val: Op) || isa<ConstantExpr>(Val: Op)) && TrackConstants) {
1100 unsigned OpNo = Op.getOperandNo();
1101 if (II && ((II->getIntrinsicID() == Intrinsic::spv_gep && OpNo == 0) ||
1102 (II->paramHasAttr(OpNo, Attribute::ImmArg))))
1103 continue;
1104 B.SetInsertPoint(I);
1105 auto *NewOp =
1106 buildIntrWithMD(Intrinsic::spv_track_constant,
1107 {Op->getType(), Op->getType()}, Op, Op, {}, B);
1108 I->setOperand(i: OpNo, Val: NewOp);
1109 }
1110 }
1111 if (I->hasName()) {
1112 reportFatalOnTokenType(I);
1113 setInsertPointSkippingPhis(B, I: I->getNextNode());
1114 std::vector<Value *> Args = {I};
1115 addStringImm(Str: I->getName(), B, Args);
1116 B.CreateIntrinsic(Intrinsic::spv_assign_name, {I->getType()}, Args);
1117 }
1118}
1119
1120Type *SPIRVEmitIntrinsics::deduceFunParamElementType(Function *F,
1121 unsigned OpIdx) {
1122 std::unordered_set<Function *> FVisited;
1123 return deduceFunParamElementType(F, OpIdx, FVisited);
1124}
1125
1126Type *SPIRVEmitIntrinsics::deduceFunParamElementType(
1127 Function *F, unsigned OpIdx, std::unordered_set<Function *> &FVisited) {
1128 // maybe a cycle
1129 if (FVisited.find(x: F) != FVisited.end())
1130 return nullptr;
1131 FVisited.insert(x: F);
1132
1133 std::unordered_set<Value *> Visited;
1134 SmallVector<std::pair<Function *, unsigned>> Lookup;
1135 // search in function's call sites
1136 for (User *U : F->users()) {
1137 CallInst *CI = dyn_cast<CallInst>(Val: U);
1138 if (!CI || OpIdx >= CI->arg_size())
1139 continue;
1140 Value *OpArg = CI->getArgOperand(i: OpIdx);
1141 if (!isPointerTy(T: OpArg->getType()))
1142 continue;
1143 // maybe we already know operand's element type
1144 if (Type *KnownTy = GR->findDeducedElementType(Val: OpArg))
1145 return KnownTy;
1146 // try to deduce from the operand itself
1147 Visited.clear();
1148 if (Type *Ty = deduceElementTypeHelper(I: OpArg, Visited))
1149 return Ty;
1150 // search in actual parameter's users
1151 for (User *OpU : OpArg->users()) {
1152 Instruction *Inst = dyn_cast<Instruction>(Val: OpU);
1153 if (!Inst || Inst == CI)
1154 continue;
1155 Visited.clear();
1156 if (Type *Ty = deduceElementTypeHelper(I: Inst, Visited))
1157 return Ty;
1158 }
1159 // check if it's a formal parameter of the outer function
1160 if (!CI->getParent() || !CI->getParent()->getParent())
1161 continue;
1162 Function *OuterF = CI->getParent()->getParent();
1163 if (FVisited.find(x: OuterF) != FVisited.end())
1164 continue;
1165 for (unsigned i = 0; i < OuterF->arg_size(); ++i) {
1166 if (OuterF->getArg(i) == OpArg) {
1167 Lookup.push_back(Elt: std::make_pair(x&: OuterF, y&: i));
1168 break;
1169 }
1170 }
1171 }
1172
1173 // search in function parameters
1174 for (auto &Pair : Lookup) {
1175 if (Type *Ty = deduceFunParamElementType(F: Pair.first, OpIdx: Pair.second, FVisited))
1176 return Ty;
1177 }
1178
1179 return nullptr;
1180}
1181
1182void SPIRVEmitIntrinsics::processParamTypes(Function *F, IRBuilder<> &B) {
1183 B.SetInsertPointPastAllocas(F);
1184 for (unsigned OpIdx = 0; OpIdx < F->arg_size(); ++OpIdx) {
1185 Argument *Arg = F->getArg(i: OpIdx);
1186 if (!isUntypedPointerTy(T: Arg->getType()))
1187 continue;
1188
1189 Type *ElemTy = GR->findDeducedElementType(Val: Arg);
1190 if (!ElemTy) {
1191 if (hasPointeeTypeAttr(Arg) &&
1192 (ElemTy = getPointeeTypeByAttr(Arg)) != nullptr) {
1193 GR->addDeducedElementType(Val: Arg, Ty: ElemTy);
1194 } else if ((ElemTy = deduceFunParamElementType(F, OpIdx)) != nullptr) {
1195 CallInst *AssignPtrTyCI = buildIntrWithMD(
1196 Intrinsic::spv_assign_ptr_type, {Arg->getType()},
1197 Constant::getNullValue(ElemTy), Arg,
1198 {B.getInt32(getPointerAddressSpace(Arg->getType()))}, B);
1199 GR->addDeducedElementType(Val: AssignPtrTyCI, Ty: ElemTy);
1200 GR->addDeducedElementType(Val: Arg, Ty: ElemTy);
1201 AssignPtrTypeInstr[Arg] = AssignPtrTyCI;
1202 }
1203 }
1204 }
1205}
1206
1207bool SPIRVEmitIntrinsics::runOnFunction(Function &Func) {
1208 if (Func.isDeclaration())
1209 return false;
1210
1211 const SPIRVSubtarget &ST = TM->getSubtarget<SPIRVSubtarget>(Func);
1212 GR = ST.getSPIRVGlobalRegistry();
1213
1214 F = &Func;
1215 IRBuilder<> B(Func.getContext());
1216 AggrConsts.clear();
1217 AggrConstTypes.clear();
1218 AggrStores.clear();
1219
1220 // StoreInst's operand type can be changed during the next transformations,
1221 // so we need to store it in the set. Also store already transformed types.
1222 for (auto &I : instructions(F&: Func)) {
1223 StoreInst *SI = dyn_cast<StoreInst>(Val: &I);
1224 if (!SI)
1225 continue;
1226 Type *ElTy = SI->getValueOperand()->getType();
1227 if (ElTy->isAggregateType() || ElTy->isVectorTy())
1228 AggrStores.insert(V: &I);
1229 }
1230
1231 B.SetInsertPoint(TheBB: &Func.getEntryBlock(), IP: Func.getEntryBlock().begin());
1232 for (auto &GV : Func.getParent()->globals())
1233 processGlobalValue(GV, B);
1234
1235 preprocessUndefs(B);
1236 preprocessCompositeConstants(B);
1237 SmallVector<Instruction *> Worklist;
1238 for (auto &I : instructions(F&: Func))
1239 Worklist.push_back(Elt: &I);
1240
1241 for (auto &I : Worklist) {
1242 insertAssignPtrTypeIntrs(I, B);
1243 insertAssignTypeIntrs(I, B);
1244 insertPtrCastOrAssignTypeInstr(I, B);
1245 }
1246
1247 for (auto &I : instructions(F&: Func))
1248 deduceOperandElementType(I: &I);
1249
1250 for (auto *I : Worklist) {
1251 TrackConstants = true;
1252 if (!I->getType()->isVoidTy() || isa<StoreInst>(Val: I))
1253 B.SetInsertPoint(I->getNextNode());
1254 // Visitors return either the original/newly created instruction for further
1255 // processing, nullptr otherwise.
1256 I = visit(I&: *I);
1257 if (!I)
1258 continue;
1259 processInstrAfterVisit(I, B);
1260 }
1261
1262 return true;
1263}
1264
1265bool SPIRVEmitIntrinsics::runOnModule(Module &M) {
1266 bool Changed = false;
1267
1268 for (auto &F : M) {
1269 Changed |= runOnFunction(Func&: F);
1270 }
1271
1272 for (auto &F : M) {
1273 // check if function parameter types are set
1274 if (!F.isDeclaration() && !F.isIntrinsic()) {
1275 const SPIRVSubtarget &ST = TM->getSubtarget<SPIRVSubtarget>(F);
1276 GR = ST.getSPIRVGlobalRegistry();
1277 IRBuilder<> B(F.getContext());
1278 processParamTypes(F: &F, B);
1279 }
1280 }
1281
1282 return Changed;
1283}
1284
1285ModulePass *llvm::createSPIRVEmitIntrinsicsPass(SPIRVTargetMachine *TM) {
1286 return new SPIRVEmitIntrinsics(TM);
1287}
1288

source code of llvm/lib/Target/SPIRV/SPIRVEmitIntrinsics.cpp