xref: /freebsd-src/contrib/llvm-project/llvm/lib/Target/NVPTX/NVPTXLowerArgs.cpp (revision 0fca6ea1d4eea4c934cfff25ac9ee8ad6fe95583)
10b57cec5SDimitry Andric //===-- NVPTXLowerArgs.cpp - Lower arguments ------------------------------===//
20b57cec5SDimitry Andric //
30b57cec5SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
40b57cec5SDimitry Andric // See https://llvm.org/LICENSE.txt for license information.
50b57cec5SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
60b57cec5SDimitry Andric //
70b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
80b57cec5SDimitry Andric //
90b57cec5SDimitry Andric //
100b57cec5SDimitry Andric // Arguments to kernel and device functions are passed via param space,
110b57cec5SDimitry Andric // which imposes certain restrictions:
120b57cec5SDimitry Andric // http://docs.nvidia.com/cuda/parallel-thread-execution/#state-spaces
130b57cec5SDimitry Andric //
140b57cec5SDimitry Andric // Kernel parameters are read-only and accessible only via ld.param
15*0fca6ea1SDimitry Andric // instruction, directly or via a pointer.
160b57cec5SDimitry Andric //
170b57cec5SDimitry Andric // Device function parameters are directly accessible via
180b57cec5SDimitry Andric // ld.param/st.param, but taking the address of one returns a pointer
190b57cec5SDimitry Andric // to a copy created in local space which *can't* be used with
200b57cec5SDimitry Andric // ld.param/st.param.
210b57cec5SDimitry Andric //
220b57cec5SDimitry Andric // Copying a byval struct into local memory in IR allows us to enforce
230b57cec5SDimitry Andric // the param space restrictions, gives the rest of IR a pointer w/o
240b57cec5SDimitry Andric // param space restrictions, and gives us an opportunity to eliminate
250b57cec5SDimitry Andric // the copy.
260b57cec5SDimitry Andric //
270b57cec5SDimitry Andric // Pointer arguments to kernel functions need more work to be lowered:
280b57cec5SDimitry Andric //
290b57cec5SDimitry Andric // 1. Convert non-byval pointer arguments of CUDA kernels to pointers in the
300b57cec5SDimitry Andric //    global address space. This allows later optimizations to emit
310b57cec5SDimitry Andric //    ld.global.*/st.global.* for accessing these pointer arguments. For
320b57cec5SDimitry Andric //    example,
330b57cec5SDimitry Andric //
340b57cec5SDimitry Andric //    define void @foo(float* %input) {
350b57cec5SDimitry Andric //      %v = load float, float* %input, align 4
360b57cec5SDimitry Andric //      ...
370b57cec5SDimitry Andric //    }
380b57cec5SDimitry Andric //
390b57cec5SDimitry Andric //    becomes
400b57cec5SDimitry Andric //
410b57cec5SDimitry Andric //    define void @foo(float* %input) {
420b57cec5SDimitry Andric //      %input2 = addrspacecast float* %input to float addrspace(1)*
430b57cec5SDimitry Andric //      %input3 = addrspacecast float addrspace(1)* %input2 to float*
440b57cec5SDimitry Andric //      %v = load float, float* %input3, align 4
450b57cec5SDimitry Andric //      ...
460b57cec5SDimitry Andric //    }
470b57cec5SDimitry Andric //
480b57cec5SDimitry Andric //    Later, NVPTXInferAddressSpaces will optimize it to
490b57cec5SDimitry Andric //
500b57cec5SDimitry Andric //    define void @foo(float* %input) {
510b57cec5SDimitry Andric //      %input2 = addrspacecast float* %input to float addrspace(1)*
520b57cec5SDimitry Andric //      %v = load float, float addrspace(1)* %input2, align 4
530b57cec5SDimitry Andric //      ...
540b57cec5SDimitry Andric //    }
550b57cec5SDimitry Andric //
56*0fca6ea1SDimitry Andric // 2. Convert byval kernel parameters to pointers in the param address space
57*0fca6ea1SDimitry Andric //    (so that NVPTX emits ld/st.param).  Convert pointers *within* a byval
58*0fca6ea1SDimitry Andric //    kernel parameter to pointers in the global address space. This allows
59*0fca6ea1SDimitry Andric //    NVPTX to emit ld/st.global.
600b57cec5SDimitry Andric //
610b57cec5SDimitry Andric //    struct S {
620b57cec5SDimitry Andric //      int *x;
630b57cec5SDimitry Andric //      int *y;
640b57cec5SDimitry Andric //    };
650b57cec5SDimitry Andric //    __global__ void foo(S s) {
660b57cec5SDimitry Andric //      int *b = s.y;
670b57cec5SDimitry Andric //      // use b
680b57cec5SDimitry Andric //    }
690b57cec5SDimitry Andric //
700b57cec5SDimitry Andric //    "b" points to the global address space. In the IR level,
710b57cec5SDimitry Andric //
72*0fca6ea1SDimitry Andric //    define void @foo(ptr byval %input) {
73*0fca6ea1SDimitry Andric //      %b_ptr = getelementptr {ptr, ptr}, ptr %input, i64 0, i32 1
74*0fca6ea1SDimitry Andric //      %b = load ptr, ptr %b_ptr
750b57cec5SDimitry Andric //      ; use %b
760b57cec5SDimitry Andric //    }
770b57cec5SDimitry Andric //
780b57cec5SDimitry Andric //    becomes
790b57cec5SDimitry Andric //
800b57cec5SDimitry Andric //    define void @foo({i32*, i32*}* byval %input) {
81*0fca6ea1SDimitry Andric //      %b_param = addrspacecat ptr %input to ptr addrspace(101)
82*0fca6ea1SDimitry Andric //      %b_ptr = getelementptr {ptr, ptr}, ptr addrspace(101) %b_param, i64 0, i32 1
83*0fca6ea1SDimitry Andric //      %b = load ptr, ptr addrspace(101) %b_ptr
84*0fca6ea1SDimitry Andric //      %b_global = addrspacecast ptr %b to ptr addrspace(1)
850b57cec5SDimitry Andric //      ; use %b_generic
860b57cec5SDimitry Andric //    }
870b57cec5SDimitry Andric //
88*0fca6ea1SDimitry Andric //    Create a local copy of kernel byval parameters used in a way that *might* mutate
89*0fca6ea1SDimitry Andric //    the parameter, by storing it in an alloca. Mutations to "grid_constant" parameters
90*0fca6ea1SDimitry Andric //    are undefined behaviour, and don't require local copies.
91*0fca6ea1SDimitry Andric //
92*0fca6ea1SDimitry Andric //    define void @foo(ptr byval(%struct.s) align 4 %input) {
93*0fca6ea1SDimitry Andric //       store i32 42, ptr %input
94*0fca6ea1SDimitry Andric //       ret void
95*0fca6ea1SDimitry Andric //    }
96*0fca6ea1SDimitry Andric //
97*0fca6ea1SDimitry Andric //    becomes
98*0fca6ea1SDimitry Andric //
99*0fca6ea1SDimitry Andric //    define void @foo(ptr byval(%struct.s) align 4 %input) #1 {
100*0fca6ea1SDimitry Andric //      %input1 = alloca %struct.s, align 4
101*0fca6ea1SDimitry Andric //      %input2 = addrspacecast ptr %input to ptr addrspace(101)
102*0fca6ea1SDimitry Andric //      %input3 = load %struct.s, ptr addrspace(101) %input2, align 4
103*0fca6ea1SDimitry Andric //      store %struct.s %input3, ptr %input1, align 4
104*0fca6ea1SDimitry Andric //      store i32 42, ptr %input1, align 4
105*0fca6ea1SDimitry Andric //      ret void
106*0fca6ea1SDimitry Andric //    }
107*0fca6ea1SDimitry Andric //
108*0fca6ea1SDimitry Andric //    If %input were passed to a device function, or written to memory,
109*0fca6ea1SDimitry Andric //    conservatively assume that %input gets mutated, and create a local copy.
110*0fca6ea1SDimitry Andric //
111*0fca6ea1SDimitry Andric //    Convert param pointers to grid_constant byval kernel parameters that are
112*0fca6ea1SDimitry Andric //    passed into calls (device functions, intrinsics, inline asm), or otherwise
113*0fca6ea1SDimitry Andric //    "escape" (into stores/ptrtoints) to the generic address space, using the
114*0fca6ea1SDimitry Andric //    `nvvm.ptr.param.to.gen` intrinsic, so that NVPTX emits cvta.param
115*0fca6ea1SDimitry Andric //    (available for sm70+)
116*0fca6ea1SDimitry Andric //
117*0fca6ea1SDimitry Andric //    define void @foo(ptr byval(%struct.s) %input) {
118*0fca6ea1SDimitry Andric //      ; %input is a grid_constant
119*0fca6ea1SDimitry Andric //      %call = call i32 @escape(ptr %input)
120*0fca6ea1SDimitry Andric //      ret void
121*0fca6ea1SDimitry Andric //    }
122*0fca6ea1SDimitry Andric //
123*0fca6ea1SDimitry Andric //    becomes
124*0fca6ea1SDimitry Andric //
125*0fca6ea1SDimitry Andric //    define void @foo(ptr byval(%struct.s) %input) {
126*0fca6ea1SDimitry Andric //      %input1 = addrspacecast ptr %input to ptr addrspace(101)
127*0fca6ea1SDimitry Andric //      ; the following intrinsic converts pointer to generic. We don't use an addrspacecast
128*0fca6ea1SDimitry Andric //      ; to prevent generic -> param -> generic from getting cancelled out
129*0fca6ea1SDimitry Andric //      %input1.gen = call ptr @llvm.nvvm.ptr.param.to.gen.p0.p101(ptr addrspace(101) %input1)
130*0fca6ea1SDimitry Andric //      %call = call i32 @escape(ptr %input1.gen)
131*0fca6ea1SDimitry Andric //      ret void
132*0fca6ea1SDimitry Andric //    }
133*0fca6ea1SDimitry Andric //
1340b57cec5SDimitry Andric // TODO: merge this pass with NVPTXInferAddressSpaces so that other passes don't
1350b57cec5SDimitry Andric // cancel the addrspacecast pair this pass emits.
1360b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
1370b57cec5SDimitry Andric 
13881ad6265SDimitry Andric #include "MCTargetDesc/NVPTXBaseInfo.h"
1390b57cec5SDimitry Andric #include "NVPTX.h"
1400b57cec5SDimitry Andric #include "NVPTXTargetMachine.h"
1410b57cec5SDimitry Andric #include "NVPTXUtilities.h"
1420b57cec5SDimitry Andric #include "llvm/Analysis/ValueTracking.h"
14306c3fb27SDimitry Andric #include "llvm/CodeGen/TargetPassConfig.h"
1440b57cec5SDimitry Andric #include "llvm/IR/Function.h"
145*0fca6ea1SDimitry Andric #include "llvm/IR/IRBuilder.h"
1460b57cec5SDimitry Andric #include "llvm/IR/Instructions.h"
147*0fca6ea1SDimitry Andric #include "llvm/IR/IntrinsicsNVPTX.h"
1480b57cec5SDimitry Andric #include "llvm/IR/Module.h"
1490b57cec5SDimitry Andric #include "llvm/IR/Type.h"
15006c3fb27SDimitry Andric #include "llvm/InitializePasses.h"
1510b57cec5SDimitry Andric #include "llvm/Pass.h"
152bdd1243dSDimitry Andric #include <numeric>
15381ad6265SDimitry Andric #include <queue>
1540b57cec5SDimitry Andric 
155fe6060f1SDimitry Andric #define DEBUG_TYPE "nvptx-lower-args"
156fe6060f1SDimitry Andric 
1570b57cec5SDimitry Andric using namespace llvm;
1580b57cec5SDimitry Andric 
1590b57cec5SDimitry Andric namespace llvm {
1600b57cec5SDimitry Andric void initializeNVPTXLowerArgsPass(PassRegistry &);
1610b57cec5SDimitry Andric }
1620b57cec5SDimitry Andric 
1630b57cec5SDimitry Andric namespace {
1640b57cec5SDimitry Andric class NVPTXLowerArgs : public FunctionPass {
1650b57cec5SDimitry Andric   bool runOnFunction(Function &F) override;
1660b57cec5SDimitry Andric 
16706c3fb27SDimitry Andric   bool runOnKernelFunction(const NVPTXTargetMachine &TM, Function &F);
16806c3fb27SDimitry Andric   bool runOnDeviceFunction(const NVPTXTargetMachine &TM, Function &F);
1690b57cec5SDimitry Andric 
1700b57cec5SDimitry Andric   // handle byval parameters
17106c3fb27SDimitry Andric   void handleByValParam(const NVPTXTargetMachine &TM, Argument *Arg);
1720b57cec5SDimitry Andric   // Knowing Ptr must point to the global address space, this function
1730b57cec5SDimitry Andric   // addrspacecasts Ptr to global and then back to generic. This allows
1740b57cec5SDimitry Andric   // NVPTXInferAddressSpaces to fold the global-to-generic cast into
1750b57cec5SDimitry Andric   // loads/stores that appear later.
1760b57cec5SDimitry Andric   void markPointerAsGlobal(Value *Ptr);
1770b57cec5SDimitry Andric 
1780b57cec5SDimitry Andric public:
1790b57cec5SDimitry Andric   static char ID; // Pass identification, replacement for typeid
18006c3fb27SDimitry Andric   NVPTXLowerArgs() : FunctionPass(ID) {}
1810b57cec5SDimitry Andric   StringRef getPassName() const override {
1820b57cec5SDimitry Andric     return "Lower pointer arguments of CUDA kernels";
1830b57cec5SDimitry Andric   }
18406c3fb27SDimitry Andric   void getAnalysisUsage(AnalysisUsage &AU) const override {
18506c3fb27SDimitry Andric     AU.addRequired<TargetPassConfig>();
18606c3fb27SDimitry Andric   }
1870b57cec5SDimitry Andric };
1880b57cec5SDimitry Andric } // namespace
1890b57cec5SDimitry Andric 
1900b57cec5SDimitry Andric char NVPTXLowerArgs::ID = 1;
1910b57cec5SDimitry Andric 
19206c3fb27SDimitry Andric INITIALIZE_PASS_BEGIN(NVPTXLowerArgs, "nvptx-lower-args",
19306c3fb27SDimitry Andric                       "Lower arguments (NVPTX)", false, false)
19406c3fb27SDimitry Andric INITIALIZE_PASS_DEPENDENCY(TargetPassConfig)
19506c3fb27SDimitry Andric INITIALIZE_PASS_END(NVPTXLowerArgs, "nvptx-lower-args",
1960b57cec5SDimitry Andric                     "Lower arguments (NVPTX)", false, false)
1970b57cec5SDimitry Andric 
1980b57cec5SDimitry Andric // =============================================================================
1990b57cec5SDimitry Andric // If the function had a byval struct ptr arg, say foo(%struct.x* byval %d),
200fe6060f1SDimitry Andric // and we can't guarantee that the only accesses are loads,
2010b57cec5SDimitry Andric // then add the following instructions to the first basic block:
2020b57cec5SDimitry Andric //
2030b57cec5SDimitry Andric // %temp = alloca %struct.x, align 8
2040b57cec5SDimitry Andric // %tempd = addrspacecast %struct.x* %d to %struct.x addrspace(101)*
2050b57cec5SDimitry Andric // %tv = load %struct.x addrspace(101)* %tempd
2060b57cec5SDimitry Andric // store %struct.x %tv, %struct.x* %temp, align 8
2070b57cec5SDimitry Andric //
2080b57cec5SDimitry Andric // The above code allocates some space in the stack and copies the incoming
2090b57cec5SDimitry Andric // struct from param space to local space.
2100b57cec5SDimitry Andric // Then replace all occurrences of %d by %temp.
211fe6060f1SDimitry Andric //
212fe6060f1SDimitry Andric // In case we know that all users are GEPs or Loads, replace them with the same
213fe6060f1SDimitry Andric // ones in parameter AS, so we can access them using ld.param.
2140b57cec5SDimitry Andric // =============================================================================
215fe6060f1SDimitry Andric 
216*0fca6ea1SDimitry Andric // For Loads, replaces the \p OldUse of the pointer with a Use of the same
217*0fca6ea1SDimitry Andric // pointer in parameter AS.
218*0fca6ea1SDimitry Andric // For "escapes" (to memory, a function call, or a ptrtoint), cast the OldUse to
219*0fca6ea1SDimitry Andric // generic using cvta.param.
220*0fca6ea1SDimitry Andric static void convertToParamAS(Use *OldUse, Value *Param, bool GridConstant) {
221*0fca6ea1SDimitry Andric   Instruction *I = dyn_cast<Instruction>(OldUse->getUser());
222*0fca6ea1SDimitry Andric   assert(I && "OldUse must be in an instruction");
223fe6060f1SDimitry Andric   struct IP {
224*0fca6ea1SDimitry Andric     Use *OldUse;
225fe6060f1SDimitry Andric     Instruction *OldInstruction;
226fe6060f1SDimitry Andric     Value *NewParam;
227fe6060f1SDimitry Andric   };
228*0fca6ea1SDimitry Andric   SmallVector<IP> ItemsToConvert = {{OldUse, I, Param}};
229fe6060f1SDimitry Andric   SmallVector<Instruction *> InstructionsToDelete;
230fe6060f1SDimitry Andric 
231*0fca6ea1SDimitry Andric   auto CloneInstInParamAS = [GridConstant](const IP &I) -> Value * {
232fe6060f1SDimitry Andric     if (auto *LI = dyn_cast<LoadInst>(I.OldInstruction)) {
233fe6060f1SDimitry Andric       LI->setOperand(0, I.NewParam);
234fe6060f1SDimitry Andric       return LI;
235fe6060f1SDimitry Andric     }
236fe6060f1SDimitry Andric     if (auto *GEP = dyn_cast<GetElementPtrInst>(I.OldInstruction)) {
237fe6060f1SDimitry Andric       SmallVector<Value *, 4> Indices(GEP->indices());
238*0fca6ea1SDimitry Andric       auto *NewGEP = GetElementPtrInst::Create(
239*0fca6ea1SDimitry Andric           GEP->getSourceElementType(), I.NewParam, Indices, GEP->getName(),
240*0fca6ea1SDimitry Andric           GEP->getIterator());
241fe6060f1SDimitry Andric       NewGEP->setIsInBounds(GEP->isInBounds());
242fe6060f1SDimitry Andric       return NewGEP;
243fe6060f1SDimitry Andric     }
244fe6060f1SDimitry Andric     if (auto *BC = dyn_cast<BitCastInst>(I.OldInstruction)) {
24506c3fb27SDimitry Andric       auto *NewBCType = PointerType::get(BC->getContext(), ADDRESS_SPACE_PARAM);
246fe6060f1SDimitry Andric       return BitCastInst::Create(BC->getOpcode(), I.NewParam, NewBCType,
247*0fca6ea1SDimitry Andric                                  BC->getName(), BC->getIterator());
248fe6060f1SDimitry Andric     }
249fe6060f1SDimitry Andric     if (auto *ASC = dyn_cast<AddrSpaceCastInst>(I.OldInstruction)) {
250fe6060f1SDimitry Andric       assert(ASC->getDestAddressSpace() == ADDRESS_SPACE_PARAM);
251fe6060f1SDimitry Andric       (void)ASC;
252fe6060f1SDimitry Andric       // Just pass through the argument, the old ASC is no longer needed.
253fe6060f1SDimitry Andric       return I.NewParam;
254fe6060f1SDimitry Andric     }
255*0fca6ea1SDimitry Andric 
256*0fca6ea1SDimitry Andric     if (GridConstant) {
257*0fca6ea1SDimitry Andric       auto GetParamAddrCastToGeneric =
258*0fca6ea1SDimitry Andric           [](Value *Addr, Instruction *OriginalUser) -> Value * {
259*0fca6ea1SDimitry Andric         PointerType *ReturnTy =
260*0fca6ea1SDimitry Andric             PointerType::get(OriginalUser->getContext(), ADDRESS_SPACE_GENERIC);
261*0fca6ea1SDimitry Andric         Function *CvtToGen = Intrinsic::getDeclaration(
262*0fca6ea1SDimitry Andric             OriginalUser->getModule(), Intrinsic::nvvm_ptr_param_to_gen,
263*0fca6ea1SDimitry Andric             {ReturnTy, PointerType::get(OriginalUser->getContext(),
264*0fca6ea1SDimitry Andric                                         ADDRESS_SPACE_PARAM)});
265*0fca6ea1SDimitry Andric 
266*0fca6ea1SDimitry Andric         // Cast param address to generic address space
267*0fca6ea1SDimitry Andric         Value *CvtToGenCall =
268*0fca6ea1SDimitry Andric             CallInst::Create(CvtToGen, Addr, Addr->getName() + ".gen",
269*0fca6ea1SDimitry Andric                              OriginalUser->getIterator());
270*0fca6ea1SDimitry Andric         return CvtToGenCall;
271*0fca6ea1SDimitry Andric       };
272*0fca6ea1SDimitry Andric 
273*0fca6ea1SDimitry Andric       if (auto *CI = dyn_cast<CallInst>(I.OldInstruction)) {
274*0fca6ea1SDimitry Andric         I.OldUse->set(GetParamAddrCastToGeneric(I.NewParam, CI));
275*0fca6ea1SDimitry Andric         return CI;
276*0fca6ea1SDimitry Andric       }
277*0fca6ea1SDimitry Andric       if (auto *SI = dyn_cast<StoreInst>(I.OldInstruction)) {
278*0fca6ea1SDimitry Andric         // byval address is being stored, cast it to generic
279*0fca6ea1SDimitry Andric         if (SI->getValueOperand() == I.OldUse->get())
280*0fca6ea1SDimitry Andric           SI->setOperand(0, GetParamAddrCastToGeneric(I.NewParam, SI));
281*0fca6ea1SDimitry Andric         return SI;
282*0fca6ea1SDimitry Andric       }
283*0fca6ea1SDimitry Andric       if (auto *PI = dyn_cast<PtrToIntInst>(I.OldInstruction)) {
284*0fca6ea1SDimitry Andric         if (PI->getPointerOperand() == I.OldUse->get())
285*0fca6ea1SDimitry Andric           PI->setOperand(0, GetParamAddrCastToGeneric(I.NewParam, PI));
286*0fca6ea1SDimitry Andric         return PI;
287*0fca6ea1SDimitry Andric       }
288*0fca6ea1SDimitry Andric       llvm_unreachable(
289*0fca6ea1SDimitry Andric           "Instruction unsupported even for grid_constant argument");
290*0fca6ea1SDimitry Andric     }
291*0fca6ea1SDimitry Andric 
292fe6060f1SDimitry Andric     llvm_unreachable("Unsupported instruction");
293fe6060f1SDimitry Andric   };
294fe6060f1SDimitry Andric 
295fe6060f1SDimitry Andric   while (!ItemsToConvert.empty()) {
296fe6060f1SDimitry Andric     IP I = ItemsToConvert.pop_back_val();
297fe6060f1SDimitry Andric     Value *NewInst = CloneInstInParamAS(I);
298fe6060f1SDimitry Andric 
299fe6060f1SDimitry Andric     if (NewInst && NewInst != I.OldInstruction) {
300fe6060f1SDimitry Andric       // We've created a new instruction. Queue users of the old instruction to
301fe6060f1SDimitry Andric       // be converted and the instruction itself to be deleted. We can't delete
302fe6060f1SDimitry Andric       // the old instruction yet, because it's still in use by a load somewhere.
303*0fca6ea1SDimitry Andric       for (Use &U : I.OldInstruction->uses())
304*0fca6ea1SDimitry Andric         ItemsToConvert.push_back({&U, cast<Instruction>(U.getUser()), NewInst});
305fe6060f1SDimitry Andric 
306fe6060f1SDimitry Andric       InstructionsToDelete.push_back(I.OldInstruction);
307fe6060f1SDimitry Andric     }
308fe6060f1SDimitry Andric   }
309fe6060f1SDimitry Andric 
310fe6060f1SDimitry Andric   // Now we know that all argument loads are using addresses in parameter space
311fe6060f1SDimitry Andric   // and we can finally remove the old instructions in generic AS.  Instructions
312fe6060f1SDimitry Andric   // scheduled for removal should be processed in reverse order so the ones
313fe6060f1SDimitry Andric   // closest to the load are deleted first. Otherwise they may still be in use.
314fe6060f1SDimitry Andric   // E.g if we have Value = Load(BitCast(GEP(arg))), InstructionsToDelete will
315fe6060f1SDimitry Andric   // have {GEP,BitCast}. GEP can't be deleted first, because it's still used by
316fe6060f1SDimitry Andric   // the BitCast.
31781ad6265SDimitry Andric   for (Instruction *I : llvm::reverse(InstructionsToDelete))
31881ad6265SDimitry Andric     I->eraseFromParent();
31981ad6265SDimitry Andric }
32081ad6265SDimitry Andric 
32181ad6265SDimitry Andric // Adjust alignment of arguments passed byval in .param address space. We can
32281ad6265SDimitry Andric // increase alignment of such arguments in a way that ensures that we can
32381ad6265SDimitry Andric // effectively vectorize their loads. We should also traverse all loads from
32481ad6265SDimitry Andric // byval pointer and adjust their alignment, if those were using known offset.
32581ad6265SDimitry Andric // Such alignment changes must be conformed with parameter store and load in
32681ad6265SDimitry Andric // NVPTXTargetLowering::LowerCall.
32781ad6265SDimitry Andric static void adjustByValArgAlignment(Argument *Arg, Value *ArgInParamAS,
32881ad6265SDimitry Andric                                     const NVPTXTargetLowering *TLI) {
32981ad6265SDimitry Andric   Function *Func = Arg->getParent();
33081ad6265SDimitry Andric   Type *StructType = Arg->getParamByValType();
33181ad6265SDimitry Andric   const DataLayout DL(Func->getParent());
33281ad6265SDimitry Andric 
33381ad6265SDimitry Andric   uint64_t NewArgAlign =
33481ad6265SDimitry Andric       TLI->getFunctionParamOptimizedAlign(Func, StructType, DL).value();
33581ad6265SDimitry Andric   uint64_t CurArgAlign =
33681ad6265SDimitry Andric       Arg->getAttribute(Attribute::Alignment).getValueAsInt();
33781ad6265SDimitry Andric 
33881ad6265SDimitry Andric   if (CurArgAlign >= NewArgAlign)
33981ad6265SDimitry Andric     return;
34081ad6265SDimitry Andric 
34181ad6265SDimitry Andric   LLVM_DEBUG(dbgs() << "Try to use alignment " << NewArgAlign << " instead of "
34281ad6265SDimitry Andric                     << CurArgAlign << " for " << *Arg << '\n');
34381ad6265SDimitry Andric 
34481ad6265SDimitry Andric   auto NewAlignAttr =
34581ad6265SDimitry Andric       Attribute::get(Func->getContext(), Attribute::Alignment, NewArgAlign);
34681ad6265SDimitry Andric   Arg->removeAttr(Attribute::Alignment);
34781ad6265SDimitry Andric   Arg->addAttr(NewAlignAttr);
34881ad6265SDimitry Andric 
34981ad6265SDimitry Andric   struct Load {
35081ad6265SDimitry Andric     LoadInst *Inst;
35181ad6265SDimitry Andric     uint64_t Offset;
35281ad6265SDimitry Andric   };
35381ad6265SDimitry Andric 
35481ad6265SDimitry Andric   struct LoadContext {
35581ad6265SDimitry Andric     Value *InitialVal;
35681ad6265SDimitry Andric     uint64_t Offset;
35781ad6265SDimitry Andric   };
35881ad6265SDimitry Andric 
35981ad6265SDimitry Andric   SmallVector<Load> Loads;
36081ad6265SDimitry Andric   std::queue<LoadContext> Worklist;
36181ad6265SDimitry Andric   Worklist.push({ArgInParamAS, 0});
362*0fca6ea1SDimitry Andric   bool IsGridConstant = isParamGridConstant(*Arg);
36381ad6265SDimitry Andric 
36481ad6265SDimitry Andric   while (!Worklist.empty()) {
36581ad6265SDimitry Andric     LoadContext Ctx = Worklist.front();
36681ad6265SDimitry Andric     Worklist.pop();
36781ad6265SDimitry Andric 
36881ad6265SDimitry Andric     for (User *CurUser : Ctx.InitialVal->users()) {
36981ad6265SDimitry Andric       if (auto *I = dyn_cast<LoadInst>(CurUser)) {
37081ad6265SDimitry Andric         Loads.push_back({I, Ctx.Offset});
37181ad6265SDimitry Andric         continue;
37281ad6265SDimitry Andric       }
37381ad6265SDimitry Andric 
37481ad6265SDimitry Andric       if (auto *I = dyn_cast<BitCastInst>(CurUser)) {
37581ad6265SDimitry Andric         Worklist.push({I, Ctx.Offset});
37681ad6265SDimitry Andric         continue;
37781ad6265SDimitry Andric       }
37881ad6265SDimitry Andric 
37981ad6265SDimitry Andric       if (auto *I = dyn_cast<GetElementPtrInst>(CurUser)) {
38081ad6265SDimitry Andric         APInt OffsetAccumulated =
38181ad6265SDimitry Andric             APInt::getZero(DL.getIndexSizeInBits(ADDRESS_SPACE_PARAM));
38281ad6265SDimitry Andric 
38381ad6265SDimitry Andric         if (!I->accumulateConstantOffset(DL, OffsetAccumulated))
38481ad6265SDimitry Andric           continue;
38581ad6265SDimitry Andric 
38681ad6265SDimitry Andric         uint64_t OffsetLimit = -1;
38781ad6265SDimitry Andric         uint64_t Offset = OffsetAccumulated.getLimitedValue(OffsetLimit);
38881ad6265SDimitry Andric         assert(Offset != OffsetLimit && "Expect Offset less than UINT64_MAX");
38981ad6265SDimitry Andric 
39081ad6265SDimitry Andric         Worklist.push({I, Ctx.Offset + Offset});
39181ad6265SDimitry Andric         continue;
39281ad6265SDimitry Andric       }
39381ad6265SDimitry Andric 
394*0fca6ea1SDimitry Andric       // supported for grid_constant
395*0fca6ea1SDimitry Andric       if (IsGridConstant &&
396*0fca6ea1SDimitry Andric           (isa<CallInst>(CurUser) || isa<StoreInst>(CurUser) ||
397*0fca6ea1SDimitry Andric            isa<PtrToIntInst>(CurUser)))
398*0fca6ea1SDimitry Andric         continue;
399*0fca6ea1SDimitry Andric 
40081ad6265SDimitry Andric       llvm_unreachable("All users must be one of: load, "
401*0fca6ea1SDimitry Andric                        "bitcast, getelementptr, call, store, ptrtoint");
40281ad6265SDimitry Andric     }
40381ad6265SDimitry Andric   }
40481ad6265SDimitry Andric 
40581ad6265SDimitry Andric   for (Load &CurLoad : Loads) {
406bdd1243dSDimitry Andric     Align NewLoadAlign(std::gcd(NewArgAlign, CurLoad.Offset));
40781ad6265SDimitry Andric     Align CurLoadAlign(CurLoad.Inst->getAlign());
40881ad6265SDimitry Andric     CurLoad.Inst->setAlignment(std::max(NewLoadAlign, CurLoadAlign));
40981ad6265SDimitry Andric   }
410fe6060f1SDimitry Andric }
411fe6060f1SDimitry Andric 
41206c3fb27SDimitry Andric void NVPTXLowerArgs::handleByValParam(const NVPTXTargetMachine &TM,
41306c3fb27SDimitry Andric                                       Argument *Arg) {
414*0fca6ea1SDimitry Andric   bool IsGridConstant = isParamGridConstant(*Arg);
4150b57cec5SDimitry Andric   Function *Func = Arg->getParent();
416*0fca6ea1SDimitry Andric   BasicBlock::iterator FirstInst = Func->getEntryBlock().begin();
41781ad6265SDimitry Andric   Type *StructType = Arg->getParamByValType();
41881ad6265SDimitry Andric   assert(StructType && "Missing byval type");
419fe6060f1SDimitry Andric 
420*0fca6ea1SDimitry Andric   auto AreSupportedUsers = [&](Value *Start) {
421fe6060f1SDimitry Andric     SmallVector<Value *, 16> ValuesToCheck = {Start};
422*0fca6ea1SDimitry Andric     auto IsSupportedUse = [IsGridConstant](Value *V) -> bool {
423fe6060f1SDimitry Andric       if (isa<GetElementPtrInst>(V) || isa<BitCastInst>(V) || isa<LoadInst>(V))
424fe6060f1SDimitry Andric         return true;
425fe6060f1SDimitry Andric       // ASC to param space are OK, too -- we'll just strip them.
426fe6060f1SDimitry Andric       if (auto *ASC = dyn_cast<AddrSpaceCastInst>(V)) {
427fe6060f1SDimitry Andric         if (ASC->getDestAddressSpace() == ADDRESS_SPACE_PARAM)
428fe6060f1SDimitry Andric           return true;
429fe6060f1SDimitry Andric       }
430*0fca6ea1SDimitry Andric       // Simple calls and stores are supported for grid_constants
431*0fca6ea1SDimitry Andric       // writes to these pointers are undefined behaviour
432*0fca6ea1SDimitry Andric       if (IsGridConstant &&
433*0fca6ea1SDimitry Andric           (isa<CallInst>(V) || isa<StoreInst>(V) || isa<PtrToIntInst>(V)))
434*0fca6ea1SDimitry Andric         return true;
435fe6060f1SDimitry Andric       return false;
436fe6060f1SDimitry Andric     };
437fe6060f1SDimitry Andric 
438fe6060f1SDimitry Andric     while (!ValuesToCheck.empty()) {
439fe6060f1SDimitry Andric       Value *V = ValuesToCheck.pop_back_val();
440*0fca6ea1SDimitry Andric       if (!IsSupportedUse(V)) {
441*0fca6ea1SDimitry Andric         LLVM_DEBUG(dbgs() << "Need a "
442*0fca6ea1SDimitry Andric                           << (isParamGridConstant(*Arg) ? "cast " : "copy ")
443*0fca6ea1SDimitry Andric                           << "of " << *Arg << " because of " << *V << "\n");
444fe6060f1SDimitry Andric         (void)Arg;
445fe6060f1SDimitry Andric         return false;
446fe6060f1SDimitry Andric       }
447*0fca6ea1SDimitry Andric       if (!isa<LoadInst>(V) && !isa<CallInst>(V) && !isa<StoreInst>(V) &&
448*0fca6ea1SDimitry Andric           !isa<PtrToIntInst>(V))
449fe6060f1SDimitry Andric         llvm::append_range(ValuesToCheck, V->users());
450fe6060f1SDimitry Andric     }
451fe6060f1SDimitry Andric     return true;
452fe6060f1SDimitry Andric   };
453fe6060f1SDimitry Andric 
454*0fca6ea1SDimitry Andric   if (llvm::all_of(Arg->users(), AreSupportedUsers)) {
455fe6060f1SDimitry Andric     // Convert all loads and intermediate operations to use parameter AS and
456fe6060f1SDimitry Andric     // skip creation of a local copy of the argument.
457*0fca6ea1SDimitry Andric     SmallVector<Use *, 16> UsesToUpdate;
458*0fca6ea1SDimitry Andric     for (Use &U : Arg->uses())
459*0fca6ea1SDimitry Andric       UsesToUpdate.push_back(&U);
460*0fca6ea1SDimitry Andric 
461fe6060f1SDimitry Andric     Value *ArgInParamAS = new AddrSpaceCastInst(
462fe6060f1SDimitry Andric         Arg, PointerType::get(StructType, ADDRESS_SPACE_PARAM), Arg->getName(),
463fe6060f1SDimitry Andric         FirstInst);
464*0fca6ea1SDimitry Andric     for (Use *U : UsesToUpdate)
465*0fca6ea1SDimitry Andric       convertToParamAS(U, ArgInParamAS, IsGridConstant);
466*0fca6ea1SDimitry Andric     LLVM_DEBUG(dbgs() << "No need to copy or cast " << *Arg << "\n");
46781ad6265SDimitry Andric 
46881ad6265SDimitry Andric     const auto *TLI =
46906c3fb27SDimitry Andric         cast<NVPTXTargetLowering>(TM.getSubtargetImpl()->getTargetLowering());
47081ad6265SDimitry Andric 
47181ad6265SDimitry Andric     adjustByValArgAlignment(Arg, ArgInParamAS, TLI);
47281ad6265SDimitry Andric 
473fe6060f1SDimitry Andric     return;
474fe6060f1SDimitry Andric   }
475fe6060f1SDimitry Andric 
476*0fca6ea1SDimitry Andric   const DataLayout &DL = Func->getDataLayout();
4775ffd83dbSDimitry Andric   unsigned AS = DL.getAllocaAddrSpace();
478*0fca6ea1SDimitry Andric   if (isParamGridConstant(*Arg)) {
479*0fca6ea1SDimitry Andric     // Writes to a grid constant are undefined behaviour. We do not need a
480*0fca6ea1SDimitry Andric     // temporary copy. When a pointer might have escaped, conservatively replace
481*0fca6ea1SDimitry Andric     // all of its uses (which might include a device function call) with a cast
482*0fca6ea1SDimitry Andric     // to the generic address space.
483*0fca6ea1SDimitry Andric     IRBuilder<> IRB(&Func->getEntryBlock().front());
484*0fca6ea1SDimitry Andric 
485*0fca6ea1SDimitry Andric     // Cast argument to param address space
486*0fca6ea1SDimitry Andric     auto *CastToParam = cast<AddrSpaceCastInst>(IRB.CreateAddrSpaceCast(
487*0fca6ea1SDimitry Andric         Arg, IRB.getPtrTy(ADDRESS_SPACE_PARAM), Arg->getName() + ".param"));
488*0fca6ea1SDimitry Andric 
489*0fca6ea1SDimitry Andric     // Cast param address to generic address space. We do not use an
490*0fca6ea1SDimitry Andric     // addrspacecast to generic here, because, LLVM considers `Arg` to be in the
491*0fca6ea1SDimitry Andric     // generic address space, and a `generic -> param` cast followed by a `param
492*0fca6ea1SDimitry Andric     // -> generic` cast will be folded away. The `param -> generic` intrinsic
493*0fca6ea1SDimitry Andric     // will be correctly lowered to `cvta.param`.
494*0fca6ea1SDimitry Andric     Value *CvtToGenCall = IRB.CreateIntrinsic(
495*0fca6ea1SDimitry Andric         IRB.getPtrTy(ADDRESS_SPACE_GENERIC), Intrinsic::nvvm_ptr_param_to_gen,
496*0fca6ea1SDimitry Andric         CastToParam, nullptr, CastToParam->getName() + ".gen");
497*0fca6ea1SDimitry Andric 
498*0fca6ea1SDimitry Andric     Arg->replaceAllUsesWith(CvtToGenCall);
499*0fca6ea1SDimitry Andric 
500*0fca6ea1SDimitry Andric     // Do not replace Arg in the cast to param space
501*0fca6ea1SDimitry Andric     CastToParam->setOperand(0, Arg);
502*0fca6ea1SDimitry Andric   } else {
503*0fca6ea1SDimitry Andric     // Otherwise we have to create a temporary copy.
504*0fca6ea1SDimitry Andric     AllocaInst *AllocA =
505*0fca6ea1SDimitry Andric         new AllocaInst(StructType, AS, Arg->getName(), FirstInst);
5060b57cec5SDimitry Andric     // Set the alignment to alignment of the byval parameter. This is because,
5070b57cec5SDimitry Andric     // later load/stores assume that alignment, and we are going to replace
5080b57cec5SDimitry Andric     // the use of the byval parameter with this alloca instruction.
5095ffd83dbSDimitry Andric     AllocA->setAlignment(Func->getParamAlign(Arg->getArgNo())
51081ad6265SDimitry Andric                              .value_or(DL.getPrefTypeAlign(StructType)));
5110b57cec5SDimitry Andric     Arg->replaceAllUsesWith(AllocA);
5120b57cec5SDimitry Andric 
5130b57cec5SDimitry Andric     Value *ArgInParam = new AddrSpaceCastInst(
514*0fca6ea1SDimitry Andric         Arg, PointerType::get(Arg->getContext(), ADDRESS_SPACE_PARAM),
515*0fca6ea1SDimitry Andric         Arg->getName(), FirstInst);
516e8d8bef9SDimitry Andric     // Be sure to propagate alignment to this load; LLVM doesn't know that NVPTX
517*0fca6ea1SDimitry Andric     // addrspacecast preserves alignment.  Since params are constant, this load
518*0fca6ea1SDimitry Andric     // is definitely not volatile.
5190b57cec5SDimitry Andric     LoadInst *LI =
520e8d8bef9SDimitry Andric         new LoadInst(StructType, ArgInParam, Arg->getName(),
521e8d8bef9SDimitry Andric                      /*isVolatile=*/false, AllocA->getAlign(), FirstInst);
5220b57cec5SDimitry Andric     new StoreInst(LI, AllocA, FirstInst);
5230b57cec5SDimitry Andric   }
524*0fca6ea1SDimitry Andric }
5250b57cec5SDimitry Andric 
5260b57cec5SDimitry Andric void NVPTXLowerArgs::markPointerAsGlobal(Value *Ptr) {
52706c3fb27SDimitry Andric   if (Ptr->getType()->getPointerAddressSpace() != ADDRESS_SPACE_GENERIC)
5280b57cec5SDimitry Andric     return;
5290b57cec5SDimitry Andric 
5300b57cec5SDimitry Andric   // Deciding where to emit the addrspacecast pair.
5310b57cec5SDimitry Andric   BasicBlock::iterator InsertPt;
5320b57cec5SDimitry Andric   if (Argument *Arg = dyn_cast<Argument>(Ptr)) {
5330b57cec5SDimitry Andric     // Insert at the functon entry if Ptr is an argument.
5340b57cec5SDimitry Andric     InsertPt = Arg->getParent()->getEntryBlock().begin();
5350b57cec5SDimitry Andric   } else {
5360b57cec5SDimitry Andric     // Insert right after Ptr if Ptr is an instruction.
5370b57cec5SDimitry Andric     InsertPt = ++cast<Instruction>(Ptr)->getIterator();
5380b57cec5SDimitry Andric     assert(InsertPt != InsertPt->getParent()->end() &&
5390b57cec5SDimitry Andric            "We don't call this function with Ptr being a terminator.");
5400b57cec5SDimitry Andric   }
5410b57cec5SDimitry Andric 
5420b57cec5SDimitry Andric   Instruction *PtrInGlobal = new AddrSpaceCastInst(
54306c3fb27SDimitry Andric       Ptr, PointerType::get(Ptr->getContext(), ADDRESS_SPACE_GLOBAL),
544*0fca6ea1SDimitry Andric       Ptr->getName(), InsertPt);
5450b57cec5SDimitry Andric   Value *PtrInGeneric = new AddrSpaceCastInst(PtrInGlobal, Ptr->getType(),
546*0fca6ea1SDimitry Andric                                               Ptr->getName(), InsertPt);
5470b57cec5SDimitry Andric   // Replace with PtrInGeneric all uses of Ptr except PtrInGlobal.
5480b57cec5SDimitry Andric   Ptr->replaceAllUsesWith(PtrInGeneric);
5490b57cec5SDimitry Andric   PtrInGlobal->setOperand(0, Ptr);
5500b57cec5SDimitry Andric }
5510b57cec5SDimitry Andric 
5520b57cec5SDimitry Andric // =============================================================================
5530b57cec5SDimitry Andric // Main function for this pass.
5540b57cec5SDimitry Andric // =============================================================================
55506c3fb27SDimitry Andric bool NVPTXLowerArgs::runOnKernelFunction(const NVPTXTargetMachine &TM,
55606c3fb27SDimitry Andric                                          Function &F) {
55706c3fb27SDimitry Andric   // Copying of byval aggregates + SROA may result in pointers being loaded as
55806c3fb27SDimitry Andric   // integers, followed by intotoptr. We may want to mark those as global, too,
55906c3fb27SDimitry Andric   // but only if the loaded integer is used exclusively for conversion to a
56006c3fb27SDimitry Andric   // pointer with inttoptr.
56106c3fb27SDimitry Andric   auto HandleIntToPtr = [this](Value &V) {
56206c3fb27SDimitry Andric     if (llvm::all_of(V.users(), [](User *U) { return isa<IntToPtrInst>(U); })) {
56306c3fb27SDimitry Andric       SmallVector<User *, 16> UsersToUpdate(V.users());
5645f757f3fSDimitry Andric       for (User *U : UsersToUpdate)
5655f757f3fSDimitry Andric         markPointerAsGlobal(U);
56606c3fb27SDimitry Andric     }
56706c3fb27SDimitry Andric   };
56806c3fb27SDimitry Andric   if (TM.getDrvInterface() == NVPTX::CUDA) {
5690b57cec5SDimitry Andric     // Mark pointers in byval structs as global.
5700b57cec5SDimitry Andric     for (auto &B : F) {
5710b57cec5SDimitry Andric       for (auto &I : B) {
5720b57cec5SDimitry Andric         if (LoadInst *LI = dyn_cast<LoadInst>(&I)) {
57306c3fb27SDimitry Andric           if (LI->getType()->isPointerTy() || LI->getType()->isIntegerTy()) {
574e8d8bef9SDimitry Andric             Value *UO = getUnderlyingObject(LI->getPointerOperand());
5750b57cec5SDimitry Andric             if (Argument *Arg = dyn_cast<Argument>(UO)) {
5760b57cec5SDimitry Andric               if (Arg->hasByValAttr()) {
5770b57cec5SDimitry Andric                 // LI is a load from a pointer within a byval kernel parameter.
57806c3fb27SDimitry Andric                 if (LI->getType()->isPointerTy())
5790b57cec5SDimitry Andric                   markPointerAsGlobal(LI);
58006c3fb27SDimitry Andric                 else
58106c3fb27SDimitry Andric                   HandleIntToPtr(*LI);
5820b57cec5SDimitry Andric               }
5830b57cec5SDimitry Andric             }
5840b57cec5SDimitry Andric           }
5850b57cec5SDimitry Andric         }
5860b57cec5SDimitry Andric       }
5870b57cec5SDimitry Andric     }
5880b57cec5SDimitry Andric   }
5890b57cec5SDimitry Andric 
590fe6060f1SDimitry Andric   LLVM_DEBUG(dbgs() << "Lowering kernel args of " << F.getName() << "\n");
5910b57cec5SDimitry Andric   for (Argument &Arg : F.args()) {
5920b57cec5SDimitry Andric     if (Arg.getType()->isPointerTy()) {
5930b57cec5SDimitry Andric       if (Arg.hasByValAttr())
59406c3fb27SDimitry Andric         handleByValParam(TM, &Arg);
59506c3fb27SDimitry Andric       else if (TM.getDrvInterface() == NVPTX::CUDA)
5960b57cec5SDimitry Andric         markPointerAsGlobal(&Arg);
59706c3fb27SDimitry Andric     } else if (Arg.getType()->isIntegerTy() &&
59806c3fb27SDimitry Andric                TM.getDrvInterface() == NVPTX::CUDA) {
59906c3fb27SDimitry Andric       HandleIntToPtr(Arg);
6000b57cec5SDimitry Andric     }
6010b57cec5SDimitry Andric   }
6020b57cec5SDimitry Andric   return true;
6030b57cec5SDimitry Andric }
6040b57cec5SDimitry Andric 
6050b57cec5SDimitry Andric // Device functions only need to copy byval args into local memory.
60606c3fb27SDimitry Andric bool NVPTXLowerArgs::runOnDeviceFunction(const NVPTXTargetMachine &TM,
60706c3fb27SDimitry Andric                                          Function &F) {
608fe6060f1SDimitry Andric   LLVM_DEBUG(dbgs() << "Lowering function args of " << F.getName() << "\n");
6090b57cec5SDimitry Andric   for (Argument &Arg : F.args())
6100b57cec5SDimitry Andric     if (Arg.getType()->isPointerTy() && Arg.hasByValAttr())
61106c3fb27SDimitry Andric       handleByValParam(TM, &Arg);
6120b57cec5SDimitry Andric   return true;
6130b57cec5SDimitry Andric }
6140b57cec5SDimitry Andric 
6150b57cec5SDimitry Andric bool NVPTXLowerArgs::runOnFunction(Function &F) {
61606c3fb27SDimitry Andric   auto &TM = getAnalysis<TargetPassConfig>().getTM<NVPTXTargetMachine>();
61706c3fb27SDimitry Andric 
61806c3fb27SDimitry Andric   return isKernelFunction(F) ? runOnKernelFunction(TM, F)
61906c3fb27SDimitry Andric                              : runOnDeviceFunction(TM, F);
6200b57cec5SDimitry Andric }
6210b57cec5SDimitry Andric 
62206c3fb27SDimitry Andric FunctionPass *llvm::createNVPTXLowerArgsPass() { return new NVPTXLowerArgs(); }
623