15ffd83dbSDimitry Andric //===-- IPO/OpenMPOpt.cpp - Collection of OpenMP specific optimizations ---===// 25ffd83dbSDimitry Andric // 35ffd83dbSDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 45ffd83dbSDimitry Andric // See https://llvm.org/LICENSE.txt for license information. 55ffd83dbSDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 65ffd83dbSDimitry Andric // 75ffd83dbSDimitry Andric //===----------------------------------------------------------------------===// 85ffd83dbSDimitry Andric // 95ffd83dbSDimitry Andric // OpenMP specific optimizations: 105ffd83dbSDimitry Andric // 115ffd83dbSDimitry Andric // - Deduplication of runtime calls, e.g., omp_get_thread_num. 12fe6060f1SDimitry Andric // - Replacing globalized device memory with stack memory. 13fe6060f1SDimitry Andric // - Replacing globalized device memory with shared memory. 14fe6060f1SDimitry Andric // - Parallel region merging. 15fe6060f1SDimitry Andric // - Transforming generic-mode device kernels to SPMD mode. 16fe6060f1SDimitry Andric // - Specializing the state machine for generic-mode device kernels. 175ffd83dbSDimitry Andric // 185ffd83dbSDimitry Andric //===----------------------------------------------------------------------===// 195ffd83dbSDimitry Andric 205ffd83dbSDimitry Andric #include "llvm/Transforms/IPO/OpenMPOpt.h" 215ffd83dbSDimitry Andric 225ffd83dbSDimitry Andric #include "llvm/ADT/EnumeratedArray.h" 23fe6060f1SDimitry Andric #include "llvm/ADT/PostOrderIterator.h" 2404eeddc0SDimitry Andric #include "llvm/ADT/SetVector.h" 25bdd1243dSDimitry Andric #include "llvm/ADT/SmallVector.h" 265ffd83dbSDimitry Andric #include "llvm/ADT/Statistic.h" 27349cc55cSDimitry Andric #include "llvm/ADT/StringRef.h" 285ffd83dbSDimitry Andric #include "llvm/Analysis/CallGraph.h" 295ffd83dbSDimitry Andric #include "llvm/Analysis/CallGraphSCCPass.h" 301fd87a68SDimitry Andric #include "llvm/Analysis/MemoryLocation.h" 315ffd83dbSDimitry Andric #include "llvm/Analysis/OptimizationRemarkEmitter.h" 32e8d8bef9SDimitry Andric #include "llvm/Analysis/ValueTracking.h" 335ffd83dbSDimitry Andric #include "llvm/Frontend/OpenMP/OMPConstants.h" 345ffd83dbSDimitry Andric #include "llvm/Frontend/OpenMP/OMPIRBuilder.h" 35fe6060f1SDimitry Andric #include "llvm/IR/Assumptions.h" 36bdd1243dSDimitry Andric #include "llvm/IR/BasicBlock.h" 371fd87a68SDimitry Andric #include "llvm/IR/Constants.h" 38fe6060f1SDimitry Andric #include "llvm/IR/DiagnosticInfo.h" 39fe6060f1SDimitry Andric #include "llvm/IR/GlobalValue.h" 401fd87a68SDimitry Andric #include "llvm/IR/GlobalVariable.h" 41fe6060f1SDimitry Andric #include "llvm/IR/Instruction.h" 421fd87a68SDimitry Andric #include "llvm/IR/Instructions.h" 43fe6060f1SDimitry Andric #include "llvm/IR/IntrinsicInst.h" 44349cc55cSDimitry Andric #include "llvm/IR/IntrinsicsAMDGPU.h" 45349cc55cSDimitry Andric #include "llvm/IR/IntrinsicsNVPTX.h" 461fd87a68SDimitry Andric #include "llvm/IR/LLVMContext.h" 475ffd83dbSDimitry Andric #include "llvm/InitializePasses.h" 485ffd83dbSDimitry Andric #include "llvm/Support/CommandLine.h" 491fd87a68SDimitry Andric #include "llvm/Support/Debug.h" 505ffd83dbSDimitry Andric #include "llvm/Transforms/IPO/Attributor.h" 51e8d8bef9SDimitry Andric #include "llvm/Transforms/Utils/BasicBlockUtils.h" 525ffd83dbSDimitry Andric #include "llvm/Transforms/Utils/CallGraphUpdater.h" 535ffd83dbSDimitry Andric 54349cc55cSDimitry Andric #include <algorithm> 55bdd1243dSDimitry Andric #include <optional> 56bdd1243dSDimitry Andric #include <string> 57349cc55cSDimitry Andric 585ffd83dbSDimitry Andric using namespace llvm; 595ffd83dbSDimitry Andric using namespace omp; 605ffd83dbSDimitry Andric 615ffd83dbSDimitry Andric #define DEBUG_TYPE "openmp-opt" 625ffd83dbSDimitry Andric 635ffd83dbSDimitry Andric static cl::opt<bool> DisableOpenMPOptimizations( 6481ad6265SDimitry Andric "openmp-opt-disable", cl::desc("Disable OpenMP specific optimizations."), 6581ad6265SDimitry Andric cl::Hidden, cl::init(false)); 665ffd83dbSDimitry Andric 67e8d8bef9SDimitry Andric static cl::opt<bool> EnableParallelRegionMerging( 6881ad6265SDimitry Andric "openmp-opt-enable-merging", 69e8d8bef9SDimitry Andric cl::desc("Enable the OpenMP region merging optimization."), cl::Hidden, 70e8d8bef9SDimitry Andric cl::init(false)); 71e8d8bef9SDimitry Andric 72fe6060f1SDimitry Andric static cl::opt<bool> 7381ad6265SDimitry Andric DisableInternalization("openmp-opt-disable-internalization", 74fe6060f1SDimitry Andric cl::desc("Disable function internalization."), 75fe6060f1SDimitry Andric cl::Hidden, cl::init(false)); 76fe6060f1SDimitry Andric 77bdd1243dSDimitry Andric static cl::opt<bool> DeduceICVValues("openmp-deduce-icv-values", 78bdd1243dSDimitry Andric cl::init(false), cl::Hidden); 795ffd83dbSDimitry Andric static cl::opt<bool> PrintICVValues("openmp-print-icv-values", cl::init(false), 805ffd83dbSDimitry Andric cl::Hidden); 815ffd83dbSDimitry Andric static cl::opt<bool> PrintOpenMPKernels("openmp-print-gpu-kernels", 825ffd83dbSDimitry Andric cl::init(false), cl::Hidden); 835ffd83dbSDimitry Andric 84e8d8bef9SDimitry Andric static cl::opt<bool> HideMemoryTransferLatency( 85e8d8bef9SDimitry Andric "openmp-hide-memory-transfer-latency", 86e8d8bef9SDimitry Andric cl::desc("[WIP] Tries to hide the latency of host to device memory" 87e8d8bef9SDimitry Andric " transfers"), 88e8d8bef9SDimitry Andric cl::Hidden, cl::init(false)); 89e8d8bef9SDimitry Andric 90349cc55cSDimitry Andric static cl::opt<bool> DisableOpenMPOptDeglobalization( 9181ad6265SDimitry Andric "openmp-opt-disable-deglobalization", 92349cc55cSDimitry Andric cl::desc("Disable OpenMP optimizations involving deglobalization."), 93349cc55cSDimitry Andric cl::Hidden, cl::init(false)); 94349cc55cSDimitry Andric 95349cc55cSDimitry Andric static cl::opt<bool> DisableOpenMPOptSPMDization( 9681ad6265SDimitry Andric "openmp-opt-disable-spmdization", 97349cc55cSDimitry Andric cl::desc("Disable OpenMP optimizations involving SPMD-ization."), 98349cc55cSDimitry Andric cl::Hidden, cl::init(false)); 99349cc55cSDimitry Andric 100349cc55cSDimitry Andric static cl::opt<bool> DisableOpenMPOptFolding( 10181ad6265SDimitry Andric "openmp-opt-disable-folding", 102349cc55cSDimitry Andric cl::desc("Disable OpenMP optimizations involving folding."), cl::Hidden, 103349cc55cSDimitry Andric cl::init(false)); 104349cc55cSDimitry Andric 105349cc55cSDimitry Andric static cl::opt<bool> DisableOpenMPOptStateMachineRewrite( 10681ad6265SDimitry Andric "openmp-opt-disable-state-machine-rewrite", 107349cc55cSDimitry Andric cl::desc("Disable OpenMP optimizations that replace the state machine."), 108349cc55cSDimitry Andric cl::Hidden, cl::init(false)); 109349cc55cSDimitry Andric 1101fd87a68SDimitry Andric static cl::opt<bool> DisableOpenMPOptBarrierElimination( 11181ad6265SDimitry Andric "openmp-opt-disable-barrier-elimination", 1121fd87a68SDimitry Andric cl::desc("Disable OpenMP optimizations that eliminate barriers."), 1131fd87a68SDimitry Andric cl::Hidden, cl::init(false)); 1141fd87a68SDimitry Andric 115349cc55cSDimitry Andric static cl::opt<bool> PrintModuleAfterOptimizations( 11681ad6265SDimitry Andric "openmp-opt-print-module-after", 117349cc55cSDimitry Andric cl::desc("Print the current module after OpenMP optimizations."), 118349cc55cSDimitry Andric cl::Hidden, cl::init(false)); 119349cc55cSDimitry Andric 12081ad6265SDimitry Andric static cl::opt<bool> PrintModuleBeforeOptimizations( 12181ad6265SDimitry Andric "openmp-opt-print-module-before", 12281ad6265SDimitry Andric cl::desc("Print the current module before OpenMP optimizations."), 12381ad6265SDimitry Andric cl::Hidden, cl::init(false)); 12481ad6265SDimitry Andric 125349cc55cSDimitry Andric static cl::opt<bool> AlwaysInlineDeviceFunctions( 12681ad6265SDimitry Andric "openmp-opt-inline-device", 127349cc55cSDimitry Andric cl::desc("Inline all applicible functions on the device."), cl::Hidden, 128349cc55cSDimitry Andric cl::init(false)); 129349cc55cSDimitry Andric 130349cc55cSDimitry Andric static cl::opt<bool> 13181ad6265SDimitry Andric EnableVerboseRemarks("openmp-opt-verbose-remarks", 132349cc55cSDimitry Andric cl::desc("Enables more verbose remarks."), cl::Hidden, 133349cc55cSDimitry Andric cl::init(false)); 134349cc55cSDimitry Andric 135349cc55cSDimitry Andric static cl::opt<unsigned> 136349cc55cSDimitry Andric SetFixpointIterations("openmp-opt-max-iterations", cl::Hidden, 137349cc55cSDimitry Andric cl::desc("Maximal number of attributor iterations."), 138349cc55cSDimitry Andric cl::init(256)); 139349cc55cSDimitry Andric 14081ad6265SDimitry Andric static cl::opt<unsigned> 14181ad6265SDimitry Andric SharedMemoryLimit("openmp-opt-shared-limit", cl::Hidden, 14281ad6265SDimitry Andric cl::desc("Maximum amount of shared memory to use."), 14381ad6265SDimitry Andric cl::init(std::numeric_limits<unsigned>::max())); 14481ad6265SDimitry Andric 1455ffd83dbSDimitry Andric STATISTIC(NumOpenMPRuntimeCallsDeduplicated, 1465ffd83dbSDimitry Andric "Number of OpenMP runtime calls deduplicated"); 1475ffd83dbSDimitry Andric STATISTIC(NumOpenMPParallelRegionsDeleted, 1485ffd83dbSDimitry Andric "Number of OpenMP parallel regions deleted"); 1495ffd83dbSDimitry Andric STATISTIC(NumOpenMPRuntimeFunctionsIdentified, 1505ffd83dbSDimitry Andric "Number of OpenMP runtime functions identified"); 1515ffd83dbSDimitry Andric STATISTIC(NumOpenMPRuntimeFunctionUsesIdentified, 1525ffd83dbSDimitry Andric "Number of OpenMP runtime function uses identified"); 1535ffd83dbSDimitry Andric STATISTIC(NumOpenMPTargetRegionKernels, 1545ffd83dbSDimitry Andric "Number of OpenMP target region entry points (=kernels) identified"); 155fe6060f1SDimitry Andric STATISTIC(NumOpenMPTargetRegionKernelsSPMD, 156fe6060f1SDimitry Andric "Number of OpenMP target region entry points (=kernels) executed in " 157fe6060f1SDimitry Andric "SPMD-mode instead of generic-mode"); 158fe6060f1SDimitry Andric STATISTIC(NumOpenMPTargetRegionKernelsWithoutStateMachine, 159fe6060f1SDimitry Andric "Number of OpenMP target region entry points (=kernels) executed in " 160fe6060f1SDimitry Andric "generic-mode without a state machines"); 161fe6060f1SDimitry Andric STATISTIC(NumOpenMPTargetRegionKernelsCustomStateMachineWithFallback, 162fe6060f1SDimitry Andric "Number of OpenMP target region entry points (=kernels) executed in " 163fe6060f1SDimitry Andric "generic-mode with customized state machines with fallback"); 164fe6060f1SDimitry Andric STATISTIC(NumOpenMPTargetRegionKernelsCustomStateMachineWithoutFallback, 165fe6060f1SDimitry Andric "Number of OpenMP target region entry points (=kernels) executed in " 166fe6060f1SDimitry Andric "generic-mode with customized state machines without fallback"); 1675ffd83dbSDimitry Andric STATISTIC( 1685ffd83dbSDimitry Andric NumOpenMPParallelRegionsReplacedInGPUStateMachine, 1695ffd83dbSDimitry Andric "Number of OpenMP parallel regions replaced with ID in GPU state machines"); 170e8d8bef9SDimitry Andric STATISTIC(NumOpenMPParallelRegionsMerged, 171e8d8bef9SDimitry Andric "Number of OpenMP parallel regions merged"); 172fe6060f1SDimitry Andric STATISTIC(NumBytesMovedToSharedMemory, 173fe6060f1SDimitry Andric "Amount of memory pushed to shared memory"); 1741fd87a68SDimitry Andric STATISTIC(NumBarriersEliminated, "Number of redundant barriers eliminated"); 1755ffd83dbSDimitry Andric 1765ffd83dbSDimitry Andric #if !defined(NDEBUG) 1775ffd83dbSDimitry Andric static constexpr auto TAG = "[" DEBUG_TYPE "]"; 1785ffd83dbSDimitry Andric #endif 1795ffd83dbSDimitry Andric 1805ffd83dbSDimitry Andric namespace { 1815ffd83dbSDimitry Andric 182fe6060f1SDimitry Andric struct AAHeapToShared; 183fe6060f1SDimitry Andric 1845ffd83dbSDimitry Andric struct AAICVTracker; 1855ffd83dbSDimitry Andric 1865ffd83dbSDimitry Andric /// OpenMP specific information. For now, stores RFIs and ICVs also needed for 1875ffd83dbSDimitry Andric /// Attributor runs. 1885ffd83dbSDimitry Andric struct OMPInformationCache : public InformationCache { 1895ffd83dbSDimitry Andric OMPInformationCache(Module &M, AnalysisGetter &AG, 190bdd1243dSDimitry Andric BumpPtrAllocator &Allocator, SetVector<Function *> *CGSCC, 191*1ac55f4cSDimitry Andric KernelSet &Kernels, bool OpenMPPostLink) 192bdd1243dSDimitry Andric : InformationCache(M, AG, Allocator, CGSCC), OMPBuilder(M), 193*1ac55f4cSDimitry Andric Kernels(Kernels), OpenMPPostLink(OpenMPPostLink) { 1945ffd83dbSDimitry Andric 1955ffd83dbSDimitry Andric OMPBuilder.initialize(); 196bdd1243dSDimitry Andric initializeRuntimeFunctions(M); 1975ffd83dbSDimitry Andric initializeInternalControlVars(); 1985ffd83dbSDimitry Andric } 1995ffd83dbSDimitry Andric 2005ffd83dbSDimitry Andric /// Generic information that describes an internal control variable. 2015ffd83dbSDimitry Andric struct InternalControlVarInfo { 2025ffd83dbSDimitry Andric /// The kind, as described by InternalControlVar enum. 2035ffd83dbSDimitry Andric InternalControlVar Kind; 2045ffd83dbSDimitry Andric 2055ffd83dbSDimitry Andric /// The name of the ICV. 2065ffd83dbSDimitry Andric StringRef Name; 2075ffd83dbSDimitry Andric 2085ffd83dbSDimitry Andric /// Environment variable associated with this ICV. 2095ffd83dbSDimitry Andric StringRef EnvVarName; 2105ffd83dbSDimitry Andric 2115ffd83dbSDimitry Andric /// Initial value kind. 2125ffd83dbSDimitry Andric ICVInitValue InitKind; 2135ffd83dbSDimitry Andric 2145ffd83dbSDimitry Andric /// Initial value. 2155ffd83dbSDimitry Andric ConstantInt *InitValue; 2165ffd83dbSDimitry Andric 2175ffd83dbSDimitry Andric /// Setter RTL function associated with this ICV. 2185ffd83dbSDimitry Andric RuntimeFunction Setter; 2195ffd83dbSDimitry Andric 2205ffd83dbSDimitry Andric /// Getter RTL function associated with this ICV. 2215ffd83dbSDimitry Andric RuntimeFunction Getter; 2225ffd83dbSDimitry Andric 2235ffd83dbSDimitry Andric /// RTL Function corresponding to the override clause of this ICV 2245ffd83dbSDimitry Andric RuntimeFunction Clause; 2255ffd83dbSDimitry Andric }; 2265ffd83dbSDimitry Andric 2275ffd83dbSDimitry Andric /// Generic information that describes a runtime function 2285ffd83dbSDimitry Andric struct RuntimeFunctionInfo { 2295ffd83dbSDimitry Andric 2305ffd83dbSDimitry Andric /// The kind, as described by the RuntimeFunction enum. 2315ffd83dbSDimitry Andric RuntimeFunction Kind; 2325ffd83dbSDimitry Andric 2335ffd83dbSDimitry Andric /// The name of the function. 2345ffd83dbSDimitry Andric StringRef Name; 2355ffd83dbSDimitry Andric 2365ffd83dbSDimitry Andric /// Flag to indicate a variadic function. 2375ffd83dbSDimitry Andric bool IsVarArg; 2385ffd83dbSDimitry Andric 2395ffd83dbSDimitry Andric /// The return type of the function. 2405ffd83dbSDimitry Andric Type *ReturnType; 2415ffd83dbSDimitry Andric 2425ffd83dbSDimitry Andric /// The argument types of the function. 2435ffd83dbSDimitry Andric SmallVector<Type *, 8> ArgumentTypes; 2445ffd83dbSDimitry Andric 2455ffd83dbSDimitry Andric /// The declaration if available. 2465ffd83dbSDimitry Andric Function *Declaration = nullptr; 2475ffd83dbSDimitry Andric 2485ffd83dbSDimitry Andric /// Uses of this runtime function per function containing the use. 2495ffd83dbSDimitry Andric using UseVector = SmallVector<Use *, 16>; 2505ffd83dbSDimitry Andric 2515ffd83dbSDimitry Andric /// Clear UsesMap for runtime function. 2525ffd83dbSDimitry Andric void clearUsesMap() { UsesMap.clear(); } 2535ffd83dbSDimitry Andric 2545ffd83dbSDimitry Andric /// Boolean conversion that is true if the runtime function was found. 2555ffd83dbSDimitry Andric operator bool() const { return Declaration; } 2565ffd83dbSDimitry Andric 2575ffd83dbSDimitry Andric /// Return the vector of uses in function \p F. 2585ffd83dbSDimitry Andric UseVector &getOrCreateUseVector(Function *F) { 2595ffd83dbSDimitry Andric std::shared_ptr<UseVector> &UV = UsesMap[F]; 2605ffd83dbSDimitry Andric if (!UV) 2615ffd83dbSDimitry Andric UV = std::make_shared<UseVector>(); 2625ffd83dbSDimitry Andric return *UV; 2635ffd83dbSDimitry Andric } 2645ffd83dbSDimitry Andric 2655ffd83dbSDimitry Andric /// Return the vector of uses in function \p F or `nullptr` if there are 2665ffd83dbSDimitry Andric /// none. 2675ffd83dbSDimitry Andric const UseVector *getUseVector(Function &F) const { 2685ffd83dbSDimitry Andric auto I = UsesMap.find(&F); 2695ffd83dbSDimitry Andric if (I != UsesMap.end()) 2705ffd83dbSDimitry Andric return I->second.get(); 2715ffd83dbSDimitry Andric return nullptr; 2725ffd83dbSDimitry Andric } 2735ffd83dbSDimitry Andric 2745ffd83dbSDimitry Andric /// Return how many functions contain uses of this runtime function. 2755ffd83dbSDimitry Andric size_t getNumFunctionsWithUses() const { return UsesMap.size(); } 2765ffd83dbSDimitry Andric 2775ffd83dbSDimitry Andric /// Return the number of arguments (or the minimal number for variadic 2785ffd83dbSDimitry Andric /// functions). 2795ffd83dbSDimitry Andric size_t getNumArgs() const { return ArgumentTypes.size(); } 2805ffd83dbSDimitry Andric 2815ffd83dbSDimitry Andric /// Run the callback \p CB on each use and forget the use if the result is 2825ffd83dbSDimitry Andric /// true. The callback will be fed the function in which the use was 2835ffd83dbSDimitry Andric /// encountered as second argument. 2845ffd83dbSDimitry Andric void foreachUse(SmallVectorImpl<Function *> &SCC, 2855ffd83dbSDimitry Andric function_ref<bool(Use &, Function &)> CB) { 2865ffd83dbSDimitry Andric for (Function *F : SCC) 2875ffd83dbSDimitry Andric foreachUse(CB, F); 2885ffd83dbSDimitry Andric } 2895ffd83dbSDimitry Andric 2905ffd83dbSDimitry Andric /// Run the callback \p CB on each use within the function \p F and forget 2915ffd83dbSDimitry Andric /// the use if the result is true. 2925ffd83dbSDimitry Andric void foreachUse(function_ref<bool(Use &, Function &)> CB, Function *F) { 2935ffd83dbSDimitry Andric SmallVector<unsigned, 8> ToBeDeleted; 2945ffd83dbSDimitry Andric ToBeDeleted.clear(); 2955ffd83dbSDimitry Andric 2965ffd83dbSDimitry Andric unsigned Idx = 0; 2975ffd83dbSDimitry Andric UseVector &UV = getOrCreateUseVector(F); 2985ffd83dbSDimitry Andric 2995ffd83dbSDimitry Andric for (Use *U : UV) { 3005ffd83dbSDimitry Andric if (CB(*U, *F)) 3015ffd83dbSDimitry Andric ToBeDeleted.push_back(Idx); 3025ffd83dbSDimitry Andric ++Idx; 3035ffd83dbSDimitry Andric } 3045ffd83dbSDimitry Andric 3055ffd83dbSDimitry Andric // Remove the to-be-deleted indices in reverse order as prior 3065ffd83dbSDimitry Andric // modifications will not modify the smaller indices. 3075ffd83dbSDimitry Andric while (!ToBeDeleted.empty()) { 3085ffd83dbSDimitry Andric unsigned Idx = ToBeDeleted.pop_back_val(); 3095ffd83dbSDimitry Andric UV[Idx] = UV.back(); 3105ffd83dbSDimitry Andric UV.pop_back(); 3115ffd83dbSDimitry Andric } 3125ffd83dbSDimitry Andric } 3135ffd83dbSDimitry Andric 3145ffd83dbSDimitry Andric private: 3155ffd83dbSDimitry Andric /// Map from functions to all uses of this runtime function contained in 3165ffd83dbSDimitry Andric /// them. 3175ffd83dbSDimitry Andric DenseMap<Function *, std::shared_ptr<UseVector>> UsesMap; 318fe6060f1SDimitry Andric 319fe6060f1SDimitry Andric public: 320fe6060f1SDimitry Andric /// Iterators for the uses of this runtime function. 321fe6060f1SDimitry Andric decltype(UsesMap)::iterator begin() { return UsesMap.begin(); } 322fe6060f1SDimitry Andric decltype(UsesMap)::iterator end() { return UsesMap.end(); } 3235ffd83dbSDimitry Andric }; 3245ffd83dbSDimitry Andric 3255ffd83dbSDimitry Andric /// An OpenMP-IR-Builder instance 3265ffd83dbSDimitry Andric OpenMPIRBuilder OMPBuilder; 3275ffd83dbSDimitry Andric 3285ffd83dbSDimitry Andric /// Map from runtime function kind to the runtime function description. 3295ffd83dbSDimitry Andric EnumeratedArray<RuntimeFunctionInfo, RuntimeFunction, 3305ffd83dbSDimitry Andric RuntimeFunction::OMPRTL___last> 3315ffd83dbSDimitry Andric RFIs; 3325ffd83dbSDimitry Andric 333fe6060f1SDimitry Andric /// Map from function declarations/definitions to their runtime enum type. 334fe6060f1SDimitry Andric DenseMap<Function *, RuntimeFunction> RuntimeFunctionIDMap; 335fe6060f1SDimitry Andric 3365ffd83dbSDimitry Andric /// Map from ICV kind to the ICV description. 3375ffd83dbSDimitry Andric EnumeratedArray<InternalControlVarInfo, InternalControlVar, 3385ffd83dbSDimitry Andric InternalControlVar::ICV___last> 3395ffd83dbSDimitry Andric ICVs; 3405ffd83dbSDimitry Andric 3415ffd83dbSDimitry Andric /// Helper to initialize all internal control variable information for those 3425ffd83dbSDimitry Andric /// defined in OMPKinds.def. 3435ffd83dbSDimitry Andric void initializeInternalControlVars() { 3445ffd83dbSDimitry Andric #define ICV_RT_SET(_Name, RTL) \ 3455ffd83dbSDimitry Andric { \ 3465ffd83dbSDimitry Andric auto &ICV = ICVs[_Name]; \ 3475ffd83dbSDimitry Andric ICV.Setter = RTL; \ 3485ffd83dbSDimitry Andric } 3495ffd83dbSDimitry Andric #define ICV_RT_GET(Name, RTL) \ 3505ffd83dbSDimitry Andric { \ 3515ffd83dbSDimitry Andric auto &ICV = ICVs[Name]; \ 3525ffd83dbSDimitry Andric ICV.Getter = RTL; \ 3535ffd83dbSDimitry Andric } 3545ffd83dbSDimitry Andric #define ICV_DATA_ENV(Enum, _Name, _EnvVarName, Init) \ 3555ffd83dbSDimitry Andric { \ 3565ffd83dbSDimitry Andric auto &ICV = ICVs[Enum]; \ 3575ffd83dbSDimitry Andric ICV.Name = _Name; \ 3585ffd83dbSDimitry Andric ICV.Kind = Enum; \ 3595ffd83dbSDimitry Andric ICV.InitKind = Init; \ 3605ffd83dbSDimitry Andric ICV.EnvVarName = _EnvVarName; \ 3615ffd83dbSDimitry Andric switch (ICV.InitKind) { \ 3625ffd83dbSDimitry Andric case ICV_IMPLEMENTATION_DEFINED: \ 3635ffd83dbSDimitry Andric ICV.InitValue = nullptr; \ 3645ffd83dbSDimitry Andric break; \ 3655ffd83dbSDimitry Andric case ICV_ZERO: \ 3665ffd83dbSDimitry Andric ICV.InitValue = ConstantInt::get( \ 3675ffd83dbSDimitry Andric Type::getInt32Ty(OMPBuilder.Int32->getContext()), 0); \ 3685ffd83dbSDimitry Andric break; \ 3695ffd83dbSDimitry Andric case ICV_FALSE: \ 3705ffd83dbSDimitry Andric ICV.InitValue = ConstantInt::getFalse(OMPBuilder.Int1->getContext()); \ 3715ffd83dbSDimitry Andric break; \ 3725ffd83dbSDimitry Andric case ICV_LAST: \ 3735ffd83dbSDimitry Andric break; \ 3745ffd83dbSDimitry Andric } \ 3755ffd83dbSDimitry Andric } 3765ffd83dbSDimitry Andric #include "llvm/Frontend/OpenMP/OMPKinds.def" 3775ffd83dbSDimitry Andric } 3785ffd83dbSDimitry Andric 3795ffd83dbSDimitry Andric /// Returns true if the function declaration \p F matches the runtime 3805ffd83dbSDimitry Andric /// function types, that is, return type \p RTFRetType, and argument types 3815ffd83dbSDimitry Andric /// \p RTFArgTypes. 3825ffd83dbSDimitry Andric static bool declMatchesRTFTypes(Function *F, Type *RTFRetType, 3835ffd83dbSDimitry Andric SmallVector<Type *, 8> &RTFArgTypes) { 3845ffd83dbSDimitry Andric // TODO: We should output information to the user (under debug output 3855ffd83dbSDimitry Andric // and via remarks). 3865ffd83dbSDimitry Andric 3875ffd83dbSDimitry Andric if (!F) 3885ffd83dbSDimitry Andric return false; 3895ffd83dbSDimitry Andric if (F->getReturnType() != RTFRetType) 3905ffd83dbSDimitry Andric return false; 3915ffd83dbSDimitry Andric if (F->arg_size() != RTFArgTypes.size()) 3925ffd83dbSDimitry Andric return false; 3935ffd83dbSDimitry Andric 394349cc55cSDimitry Andric auto *RTFTyIt = RTFArgTypes.begin(); 3955ffd83dbSDimitry Andric for (Argument &Arg : F->args()) { 3965ffd83dbSDimitry Andric if (Arg.getType() != *RTFTyIt) 3975ffd83dbSDimitry Andric return false; 3985ffd83dbSDimitry Andric 3995ffd83dbSDimitry Andric ++RTFTyIt; 4005ffd83dbSDimitry Andric } 4015ffd83dbSDimitry Andric 4025ffd83dbSDimitry Andric return true; 4035ffd83dbSDimitry Andric } 4045ffd83dbSDimitry Andric 4055ffd83dbSDimitry Andric // Helper to collect all uses of the declaration in the UsesMap. 4065ffd83dbSDimitry Andric unsigned collectUses(RuntimeFunctionInfo &RFI, bool CollectStats = true) { 4075ffd83dbSDimitry Andric unsigned NumUses = 0; 4085ffd83dbSDimitry Andric if (!RFI.Declaration) 4095ffd83dbSDimitry Andric return NumUses; 4105ffd83dbSDimitry Andric OMPBuilder.addAttributes(RFI.Kind, *RFI.Declaration); 4115ffd83dbSDimitry Andric 4125ffd83dbSDimitry Andric if (CollectStats) { 4135ffd83dbSDimitry Andric NumOpenMPRuntimeFunctionsIdentified += 1; 4145ffd83dbSDimitry Andric NumOpenMPRuntimeFunctionUsesIdentified += RFI.Declaration->getNumUses(); 4155ffd83dbSDimitry Andric } 4165ffd83dbSDimitry Andric 4175ffd83dbSDimitry Andric // TODO: We directly convert uses into proper calls and unknown uses. 4185ffd83dbSDimitry Andric for (Use &U : RFI.Declaration->uses()) { 4195ffd83dbSDimitry Andric if (Instruction *UserI = dyn_cast<Instruction>(U.getUser())) { 420bdd1243dSDimitry Andric if (ModuleSlice.empty() || ModuleSlice.count(UserI->getFunction())) { 4215ffd83dbSDimitry Andric RFI.getOrCreateUseVector(UserI->getFunction()).push_back(&U); 4225ffd83dbSDimitry Andric ++NumUses; 4235ffd83dbSDimitry Andric } 4245ffd83dbSDimitry Andric } else { 4255ffd83dbSDimitry Andric RFI.getOrCreateUseVector(nullptr).push_back(&U); 4265ffd83dbSDimitry Andric ++NumUses; 4275ffd83dbSDimitry Andric } 4285ffd83dbSDimitry Andric } 4295ffd83dbSDimitry Andric return NumUses; 4305ffd83dbSDimitry Andric } 4315ffd83dbSDimitry Andric 432e8d8bef9SDimitry Andric // Helper function to recollect uses of a runtime function. 433e8d8bef9SDimitry Andric void recollectUsesForFunction(RuntimeFunction RTF) { 434e8d8bef9SDimitry Andric auto &RFI = RFIs[RTF]; 4355ffd83dbSDimitry Andric RFI.clearUsesMap(); 4365ffd83dbSDimitry Andric collectUses(RFI, /*CollectStats*/ false); 4375ffd83dbSDimitry Andric } 438e8d8bef9SDimitry Andric 439e8d8bef9SDimitry Andric // Helper function to recollect uses of all runtime functions. 440e8d8bef9SDimitry Andric void recollectUses() { 441e8d8bef9SDimitry Andric for (int Idx = 0; Idx < RFIs.size(); ++Idx) 442e8d8bef9SDimitry Andric recollectUsesForFunction(static_cast<RuntimeFunction>(Idx)); 4435ffd83dbSDimitry Andric } 4445ffd83dbSDimitry Andric 44504eeddc0SDimitry Andric // Helper function to inherit the calling convention of the function callee. 44604eeddc0SDimitry Andric void setCallingConvention(FunctionCallee Callee, CallInst *CI) { 44704eeddc0SDimitry Andric if (Function *Fn = dyn_cast<Function>(Callee.getCallee())) 44804eeddc0SDimitry Andric CI->setCallingConv(Fn->getCallingConv()); 44904eeddc0SDimitry Andric } 45004eeddc0SDimitry Andric 451*1ac55f4cSDimitry Andric // Helper function to determine if it's legal to create a call to the runtime 452*1ac55f4cSDimitry Andric // functions. 453*1ac55f4cSDimitry Andric bool runtimeFnsAvailable(ArrayRef<RuntimeFunction> Fns) { 454*1ac55f4cSDimitry Andric // We can always emit calls if we haven't yet linked in the runtime. 455*1ac55f4cSDimitry Andric if (!OpenMPPostLink) 456*1ac55f4cSDimitry Andric return true; 457*1ac55f4cSDimitry Andric 458*1ac55f4cSDimitry Andric // Once the runtime has been already been linked in we cannot emit calls to 459*1ac55f4cSDimitry Andric // any undefined functions. 460*1ac55f4cSDimitry Andric for (RuntimeFunction Fn : Fns) { 461*1ac55f4cSDimitry Andric RuntimeFunctionInfo &RFI = RFIs[Fn]; 462*1ac55f4cSDimitry Andric 463*1ac55f4cSDimitry Andric if (RFI.Declaration && RFI.Declaration->isDeclaration()) 464*1ac55f4cSDimitry Andric return false; 465*1ac55f4cSDimitry Andric } 466*1ac55f4cSDimitry Andric return true; 467*1ac55f4cSDimitry Andric } 468*1ac55f4cSDimitry Andric 4695ffd83dbSDimitry Andric /// Helper to initialize all runtime function information for those defined 4705ffd83dbSDimitry Andric /// in OpenMPKinds.def. 471bdd1243dSDimitry Andric void initializeRuntimeFunctions(Module &M) { 4725ffd83dbSDimitry Andric 4735ffd83dbSDimitry Andric // Helper macros for handling __VA_ARGS__ in OMP_RTL 4745ffd83dbSDimitry Andric #define OMP_TYPE(VarName, ...) \ 4755ffd83dbSDimitry Andric Type *VarName = OMPBuilder.VarName; \ 4765ffd83dbSDimitry Andric (void)VarName; 4775ffd83dbSDimitry Andric 4785ffd83dbSDimitry Andric #define OMP_ARRAY_TYPE(VarName, ...) \ 4795ffd83dbSDimitry Andric ArrayType *VarName##Ty = OMPBuilder.VarName##Ty; \ 4805ffd83dbSDimitry Andric (void)VarName##Ty; \ 4815ffd83dbSDimitry Andric PointerType *VarName##PtrTy = OMPBuilder.VarName##PtrTy; \ 4825ffd83dbSDimitry Andric (void)VarName##PtrTy; 4835ffd83dbSDimitry Andric 4845ffd83dbSDimitry Andric #define OMP_FUNCTION_TYPE(VarName, ...) \ 4855ffd83dbSDimitry Andric FunctionType *VarName = OMPBuilder.VarName; \ 4865ffd83dbSDimitry Andric (void)VarName; \ 4875ffd83dbSDimitry Andric PointerType *VarName##Ptr = OMPBuilder.VarName##Ptr; \ 4885ffd83dbSDimitry Andric (void)VarName##Ptr; 4895ffd83dbSDimitry Andric 4905ffd83dbSDimitry Andric #define OMP_STRUCT_TYPE(VarName, ...) \ 4915ffd83dbSDimitry Andric StructType *VarName = OMPBuilder.VarName; \ 4925ffd83dbSDimitry Andric (void)VarName; \ 4935ffd83dbSDimitry Andric PointerType *VarName##Ptr = OMPBuilder.VarName##Ptr; \ 4945ffd83dbSDimitry Andric (void)VarName##Ptr; 4955ffd83dbSDimitry Andric 4965ffd83dbSDimitry Andric #define OMP_RTL(_Enum, _Name, _IsVarArg, _ReturnType, ...) \ 4975ffd83dbSDimitry Andric { \ 4985ffd83dbSDimitry Andric SmallVector<Type *, 8> ArgsTypes({__VA_ARGS__}); \ 4995ffd83dbSDimitry Andric Function *F = M.getFunction(_Name); \ 500fe6060f1SDimitry Andric RTLFunctions.insert(F); \ 5015ffd83dbSDimitry Andric if (declMatchesRTFTypes(F, OMPBuilder._ReturnType, ArgsTypes)) { \ 502fe6060f1SDimitry Andric RuntimeFunctionIDMap[F] = _Enum; \ 5035ffd83dbSDimitry Andric auto &RFI = RFIs[_Enum]; \ 5045ffd83dbSDimitry Andric RFI.Kind = _Enum; \ 5055ffd83dbSDimitry Andric RFI.Name = _Name; \ 5065ffd83dbSDimitry Andric RFI.IsVarArg = _IsVarArg; \ 5075ffd83dbSDimitry Andric RFI.ReturnType = OMPBuilder._ReturnType; \ 5085ffd83dbSDimitry Andric RFI.ArgumentTypes = std::move(ArgsTypes); \ 5095ffd83dbSDimitry Andric RFI.Declaration = F; \ 5105ffd83dbSDimitry Andric unsigned NumUses = collectUses(RFI); \ 5115ffd83dbSDimitry Andric (void)NumUses; \ 5125ffd83dbSDimitry Andric LLVM_DEBUG({ \ 5135ffd83dbSDimitry Andric dbgs() << TAG << RFI.Name << (RFI.Declaration ? "" : " not") \ 5145ffd83dbSDimitry Andric << " found\n"; \ 5155ffd83dbSDimitry Andric if (RFI.Declaration) \ 5165ffd83dbSDimitry Andric dbgs() << TAG << "-> got " << NumUses << " uses in " \ 5175ffd83dbSDimitry Andric << RFI.getNumFunctionsWithUses() \ 5185ffd83dbSDimitry Andric << " different functions.\n"; \ 5195ffd83dbSDimitry Andric }); \ 5205ffd83dbSDimitry Andric } \ 5215ffd83dbSDimitry Andric } 5225ffd83dbSDimitry Andric #include "llvm/Frontend/OpenMP/OMPKinds.def" 5235ffd83dbSDimitry Andric 524bdd1243dSDimitry Andric // Remove the `noinline` attribute from `__kmpc`, `ompx::` and `omp_` 52561cfbce3SDimitry Andric // functions, except if `optnone` is present. 52661cfbce3SDimitry Andric if (isOpenMPDevice(M)) { 52761cfbce3SDimitry Andric for (Function &F : M) { 528bdd1243dSDimitry Andric for (StringRef Prefix : {"__kmpc", "_ZN4ompx", "omp_"}) 52961cfbce3SDimitry Andric if (F.hasFnAttribute(Attribute::NoInline) && 53061cfbce3SDimitry Andric F.getName().startswith(Prefix) && 53161cfbce3SDimitry Andric !F.hasFnAttribute(Attribute::OptimizeNone)) 53261cfbce3SDimitry Andric F.removeFnAttr(Attribute::NoInline); 53361cfbce3SDimitry Andric } 53461cfbce3SDimitry Andric } 53561cfbce3SDimitry Andric 5365ffd83dbSDimitry Andric // TODO: We should attach the attributes defined in OMPKinds.def. 5375ffd83dbSDimitry Andric } 5385ffd83dbSDimitry Andric 5395ffd83dbSDimitry Andric /// Collection of known kernels (\see Kernel) in the module. 54004eeddc0SDimitry Andric KernelSet &Kernels; 541fe6060f1SDimitry Andric 542fe6060f1SDimitry Andric /// Collection of known OpenMP runtime functions.. 543fe6060f1SDimitry Andric DenseSet<const Function *> RTLFunctions; 544*1ac55f4cSDimitry Andric 545*1ac55f4cSDimitry Andric /// Indicates if we have already linked in the OpenMP device library. 546*1ac55f4cSDimitry Andric bool OpenMPPostLink = false; 547fe6060f1SDimitry Andric }; 548fe6060f1SDimitry Andric 549fe6060f1SDimitry Andric template <typename Ty, bool InsertInvalidates = true> 550fe6060f1SDimitry Andric struct BooleanStateWithSetVector : public BooleanState { 551fe6060f1SDimitry Andric bool contains(const Ty &Elem) const { return Set.contains(Elem); } 552fe6060f1SDimitry Andric bool insert(const Ty &Elem) { 553fe6060f1SDimitry Andric if (InsertInvalidates) 554fe6060f1SDimitry Andric BooleanState::indicatePessimisticFixpoint(); 555fe6060f1SDimitry Andric return Set.insert(Elem); 556fe6060f1SDimitry Andric } 557fe6060f1SDimitry Andric 558fe6060f1SDimitry Andric const Ty &operator[](int Idx) const { return Set[Idx]; } 559fe6060f1SDimitry Andric bool operator==(const BooleanStateWithSetVector &RHS) const { 560fe6060f1SDimitry Andric return BooleanState::operator==(RHS) && Set == RHS.Set; 561fe6060f1SDimitry Andric } 562fe6060f1SDimitry Andric bool operator!=(const BooleanStateWithSetVector &RHS) const { 563fe6060f1SDimitry Andric return !(*this == RHS); 564fe6060f1SDimitry Andric } 565fe6060f1SDimitry Andric 566fe6060f1SDimitry Andric bool empty() const { return Set.empty(); } 567fe6060f1SDimitry Andric size_t size() const { return Set.size(); } 568fe6060f1SDimitry Andric 569fe6060f1SDimitry Andric /// "Clamp" this state with \p RHS. 570fe6060f1SDimitry Andric BooleanStateWithSetVector &operator^=(const BooleanStateWithSetVector &RHS) { 571fe6060f1SDimitry Andric BooleanState::operator^=(RHS); 572fe6060f1SDimitry Andric Set.insert(RHS.Set.begin(), RHS.Set.end()); 573fe6060f1SDimitry Andric return *this; 574fe6060f1SDimitry Andric } 575fe6060f1SDimitry Andric 576fe6060f1SDimitry Andric private: 577fe6060f1SDimitry Andric /// A set to keep track of elements. 578fe6060f1SDimitry Andric SetVector<Ty> Set; 579fe6060f1SDimitry Andric 580fe6060f1SDimitry Andric public: 581fe6060f1SDimitry Andric typename decltype(Set)::iterator begin() { return Set.begin(); } 582fe6060f1SDimitry Andric typename decltype(Set)::iterator end() { return Set.end(); } 583fe6060f1SDimitry Andric typename decltype(Set)::const_iterator begin() const { return Set.begin(); } 584fe6060f1SDimitry Andric typename decltype(Set)::const_iterator end() const { return Set.end(); } 585fe6060f1SDimitry Andric }; 586fe6060f1SDimitry Andric 587fe6060f1SDimitry Andric template <typename Ty, bool InsertInvalidates = true> 588fe6060f1SDimitry Andric using BooleanStateWithPtrSetVector = 589fe6060f1SDimitry Andric BooleanStateWithSetVector<Ty *, InsertInvalidates>; 590fe6060f1SDimitry Andric 591fe6060f1SDimitry Andric struct KernelInfoState : AbstractState { 592fe6060f1SDimitry Andric /// Flag to track if we reached a fixpoint. 593fe6060f1SDimitry Andric bool IsAtFixpoint = false; 594fe6060f1SDimitry Andric 595fe6060f1SDimitry Andric /// The parallel regions (identified by the outlined parallel functions) that 596fe6060f1SDimitry Andric /// can be reached from the associated function. 597fe6060f1SDimitry Andric BooleanStateWithPtrSetVector<Function, /* InsertInvalidates */ false> 598fe6060f1SDimitry Andric ReachedKnownParallelRegions; 599fe6060f1SDimitry Andric 600fe6060f1SDimitry Andric /// State to track what parallel region we might reach. 601fe6060f1SDimitry Andric BooleanStateWithPtrSetVector<CallBase> ReachedUnknownParallelRegions; 602fe6060f1SDimitry Andric 603fe6060f1SDimitry Andric /// State to track if we are in SPMD-mode, assumed or know, and why we decided 604fe6060f1SDimitry Andric /// we cannot be. If it is assumed, then RequiresFullRuntime should also be 605fe6060f1SDimitry Andric /// false. 606349cc55cSDimitry Andric BooleanStateWithPtrSetVector<Instruction, false> SPMDCompatibilityTracker; 607fe6060f1SDimitry Andric 608fe6060f1SDimitry Andric /// The __kmpc_target_init call in this kernel, if any. If we find more than 609fe6060f1SDimitry Andric /// one we abort as the kernel is malformed. 610fe6060f1SDimitry Andric CallBase *KernelInitCB = nullptr; 611fe6060f1SDimitry Andric 612fe6060f1SDimitry Andric /// The __kmpc_target_deinit call in this kernel, if any. If we find more than 613fe6060f1SDimitry Andric /// one we abort as the kernel is malformed. 614fe6060f1SDimitry Andric CallBase *KernelDeinitCB = nullptr; 615fe6060f1SDimitry Andric 616fe6060f1SDimitry Andric /// Flag to indicate if the associated function is a kernel entry. 617fe6060f1SDimitry Andric bool IsKernelEntry = false; 618fe6060f1SDimitry Andric 619fe6060f1SDimitry Andric /// State to track what kernel entries can reach the associated function. 620fe6060f1SDimitry Andric BooleanStateWithPtrSetVector<Function, false> ReachingKernelEntries; 621fe6060f1SDimitry Andric 622fe6060f1SDimitry Andric /// State to indicate if we can track parallel level of the associated 623fe6060f1SDimitry Andric /// function. We will give up tracking if we encounter unknown caller or the 624fe6060f1SDimitry Andric /// caller is __kmpc_parallel_51. 625fe6060f1SDimitry Andric BooleanStateWithSetVector<uint8_t> ParallelLevels; 626fe6060f1SDimitry Andric 627bdd1243dSDimitry Andric /// Flag that indicates if the kernel has nested Parallelism 628bdd1243dSDimitry Andric bool NestedParallelism = false; 629bdd1243dSDimitry Andric 630fe6060f1SDimitry Andric /// Abstract State interface 631fe6060f1SDimitry Andric ///{ 632fe6060f1SDimitry Andric 63381ad6265SDimitry Andric KernelInfoState() = default; 634fe6060f1SDimitry Andric KernelInfoState(bool BestState) { 635fe6060f1SDimitry Andric if (!BestState) 636fe6060f1SDimitry Andric indicatePessimisticFixpoint(); 637fe6060f1SDimitry Andric } 638fe6060f1SDimitry Andric 639fe6060f1SDimitry Andric /// See AbstractState::isValidState(...) 640fe6060f1SDimitry Andric bool isValidState() const override { return true; } 641fe6060f1SDimitry Andric 642fe6060f1SDimitry Andric /// See AbstractState::isAtFixpoint(...) 643fe6060f1SDimitry Andric bool isAtFixpoint() const override { return IsAtFixpoint; } 644fe6060f1SDimitry Andric 645fe6060f1SDimitry Andric /// See AbstractState::indicatePessimisticFixpoint(...) 646fe6060f1SDimitry Andric ChangeStatus indicatePessimisticFixpoint() override { 647fe6060f1SDimitry Andric IsAtFixpoint = true; 648bdd1243dSDimitry Andric ParallelLevels.indicatePessimisticFixpoint(); 649349cc55cSDimitry Andric ReachingKernelEntries.indicatePessimisticFixpoint(); 650fe6060f1SDimitry Andric SPMDCompatibilityTracker.indicatePessimisticFixpoint(); 651349cc55cSDimitry Andric ReachedKnownParallelRegions.indicatePessimisticFixpoint(); 652fe6060f1SDimitry Andric ReachedUnknownParallelRegions.indicatePessimisticFixpoint(); 653fe6060f1SDimitry Andric return ChangeStatus::CHANGED; 654fe6060f1SDimitry Andric } 655fe6060f1SDimitry Andric 656fe6060f1SDimitry Andric /// See AbstractState::indicateOptimisticFixpoint(...) 657fe6060f1SDimitry Andric ChangeStatus indicateOptimisticFixpoint() override { 658fe6060f1SDimitry Andric IsAtFixpoint = true; 659bdd1243dSDimitry Andric ParallelLevels.indicateOptimisticFixpoint(); 660349cc55cSDimitry Andric ReachingKernelEntries.indicateOptimisticFixpoint(); 661349cc55cSDimitry Andric SPMDCompatibilityTracker.indicateOptimisticFixpoint(); 662349cc55cSDimitry Andric ReachedKnownParallelRegions.indicateOptimisticFixpoint(); 663349cc55cSDimitry Andric ReachedUnknownParallelRegions.indicateOptimisticFixpoint(); 664fe6060f1SDimitry Andric return ChangeStatus::UNCHANGED; 665fe6060f1SDimitry Andric } 666fe6060f1SDimitry Andric 667fe6060f1SDimitry Andric /// Return the assumed state 668fe6060f1SDimitry Andric KernelInfoState &getAssumed() { return *this; } 669fe6060f1SDimitry Andric const KernelInfoState &getAssumed() const { return *this; } 670fe6060f1SDimitry Andric 671fe6060f1SDimitry Andric bool operator==(const KernelInfoState &RHS) const { 672fe6060f1SDimitry Andric if (SPMDCompatibilityTracker != RHS.SPMDCompatibilityTracker) 673fe6060f1SDimitry Andric return false; 674fe6060f1SDimitry Andric if (ReachedKnownParallelRegions != RHS.ReachedKnownParallelRegions) 675fe6060f1SDimitry Andric return false; 676fe6060f1SDimitry Andric if (ReachedUnknownParallelRegions != RHS.ReachedUnknownParallelRegions) 677fe6060f1SDimitry Andric return false; 678fe6060f1SDimitry Andric if (ReachingKernelEntries != RHS.ReachingKernelEntries) 679fe6060f1SDimitry Andric return false; 680bdd1243dSDimitry Andric if (ParallelLevels != RHS.ParallelLevels) 681bdd1243dSDimitry Andric return false; 682fe6060f1SDimitry Andric return true; 683fe6060f1SDimitry Andric } 684fe6060f1SDimitry Andric 685349cc55cSDimitry Andric /// Returns true if this kernel contains any OpenMP parallel regions. 686349cc55cSDimitry Andric bool mayContainParallelRegion() { 687349cc55cSDimitry Andric return !ReachedKnownParallelRegions.empty() || 688349cc55cSDimitry Andric !ReachedUnknownParallelRegions.empty(); 689349cc55cSDimitry Andric } 690349cc55cSDimitry Andric 691fe6060f1SDimitry Andric /// Return empty set as the best state of potential values. 692fe6060f1SDimitry Andric static KernelInfoState getBestState() { return KernelInfoState(true); } 693fe6060f1SDimitry Andric 694fe6060f1SDimitry Andric static KernelInfoState getBestState(KernelInfoState &KIS) { 695fe6060f1SDimitry Andric return getBestState(); 696fe6060f1SDimitry Andric } 697fe6060f1SDimitry Andric 698fe6060f1SDimitry Andric /// Return full set as the worst state of potential values. 699fe6060f1SDimitry Andric static KernelInfoState getWorstState() { return KernelInfoState(false); } 700fe6060f1SDimitry Andric 701fe6060f1SDimitry Andric /// "Clamp" this state with \p KIS. 702fe6060f1SDimitry Andric KernelInfoState operator^=(const KernelInfoState &KIS) { 703fe6060f1SDimitry Andric // Do not merge two different _init and _deinit call sites. 704fe6060f1SDimitry Andric if (KIS.KernelInitCB) { 705fe6060f1SDimitry Andric if (KernelInitCB && KernelInitCB != KIS.KernelInitCB) 706349cc55cSDimitry Andric llvm_unreachable("Kernel that calls another kernel violates OpenMP-Opt " 707349cc55cSDimitry Andric "assumptions."); 708fe6060f1SDimitry Andric KernelInitCB = KIS.KernelInitCB; 709fe6060f1SDimitry Andric } 710fe6060f1SDimitry Andric if (KIS.KernelDeinitCB) { 711fe6060f1SDimitry Andric if (KernelDeinitCB && KernelDeinitCB != KIS.KernelDeinitCB) 712349cc55cSDimitry Andric llvm_unreachable("Kernel that calls another kernel violates OpenMP-Opt " 713349cc55cSDimitry Andric "assumptions."); 714fe6060f1SDimitry Andric KernelDeinitCB = KIS.KernelDeinitCB; 715fe6060f1SDimitry Andric } 716fe6060f1SDimitry Andric SPMDCompatibilityTracker ^= KIS.SPMDCompatibilityTracker; 717fe6060f1SDimitry Andric ReachedKnownParallelRegions ^= KIS.ReachedKnownParallelRegions; 718fe6060f1SDimitry Andric ReachedUnknownParallelRegions ^= KIS.ReachedUnknownParallelRegions; 719bdd1243dSDimitry Andric NestedParallelism |= KIS.NestedParallelism; 720fe6060f1SDimitry Andric return *this; 721fe6060f1SDimitry Andric } 722fe6060f1SDimitry Andric 723fe6060f1SDimitry Andric KernelInfoState operator&=(const KernelInfoState &KIS) { 724fe6060f1SDimitry Andric return (*this ^= KIS); 725fe6060f1SDimitry Andric } 726fe6060f1SDimitry Andric 727fe6060f1SDimitry Andric ///} 7285ffd83dbSDimitry Andric }; 7295ffd83dbSDimitry Andric 730e8d8bef9SDimitry Andric /// Used to map the values physically (in the IR) stored in an offload 731e8d8bef9SDimitry Andric /// array, to a vector in memory. 732e8d8bef9SDimitry Andric struct OffloadArray { 733e8d8bef9SDimitry Andric /// Physical array (in the IR). 734e8d8bef9SDimitry Andric AllocaInst *Array = nullptr; 735e8d8bef9SDimitry Andric /// Mapped values. 736e8d8bef9SDimitry Andric SmallVector<Value *, 8> StoredValues; 737e8d8bef9SDimitry Andric /// Last stores made in the offload array. 738e8d8bef9SDimitry Andric SmallVector<StoreInst *, 8> LastAccesses; 739e8d8bef9SDimitry Andric 740e8d8bef9SDimitry Andric OffloadArray() = default; 741e8d8bef9SDimitry Andric 742e8d8bef9SDimitry Andric /// Initializes the OffloadArray with the values stored in \p Array before 743e8d8bef9SDimitry Andric /// instruction \p Before is reached. Returns false if the initialization 744e8d8bef9SDimitry Andric /// fails. 745e8d8bef9SDimitry Andric /// This MUST be used immediately after the construction of the object. 746e8d8bef9SDimitry Andric bool initialize(AllocaInst &Array, Instruction &Before) { 747e8d8bef9SDimitry Andric if (!Array.getAllocatedType()->isArrayTy()) 748e8d8bef9SDimitry Andric return false; 749e8d8bef9SDimitry Andric 750e8d8bef9SDimitry Andric if (!getValues(Array, Before)) 751e8d8bef9SDimitry Andric return false; 752e8d8bef9SDimitry Andric 753e8d8bef9SDimitry Andric this->Array = &Array; 754e8d8bef9SDimitry Andric return true; 755e8d8bef9SDimitry Andric } 756e8d8bef9SDimitry Andric 757e8d8bef9SDimitry Andric static const unsigned DeviceIDArgNum = 1; 758e8d8bef9SDimitry Andric static const unsigned BasePtrsArgNum = 3; 759e8d8bef9SDimitry Andric static const unsigned PtrsArgNum = 4; 760e8d8bef9SDimitry Andric static const unsigned SizesArgNum = 5; 761e8d8bef9SDimitry Andric 762e8d8bef9SDimitry Andric private: 763e8d8bef9SDimitry Andric /// Traverses the BasicBlock where \p Array is, collecting the stores made to 764e8d8bef9SDimitry Andric /// \p Array, leaving StoredValues with the values stored before the 765e8d8bef9SDimitry Andric /// instruction \p Before is reached. 766e8d8bef9SDimitry Andric bool getValues(AllocaInst &Array, Instruction &Before) { 767e8d8bef9SDimitry Andric // Initialize container. 768e8d8bef9SDimitry Andric const uint64_t NumValues = Array.getAllocatedType()->getArrayNumElements(); 769e8d8bef9SDimitry Andric StoredValues.assign(NumValues, nullptr); 770e8d8bef9SDimitry Andric LastAccesses.assign(NumValues, nullptr); 771e8d8bef9SDimitry Andric 772e8d8bef9SDimitry Andric // TODO: This assumes the instruction \p Before is in the same 773e8d8bef9SDimitry Andric // BasicBlock as Array. Make it general, for any control flow graph. 774e8d8bef9SDimitry Andric BasicBlock *BB = Array.getParent(); 775e8d8bef9SDimitry Andric if (BB != Before.getParent()) 776e8d8bef9SDimitry Andric return false; 777e8d8bef9SDimitry Andric 778e8d8bef9SDimitry Andric const DataLayout &DL = Array.getModule()->getDataLayout(); 779e8d8bef9SDimitry Andric const unsigned int PointerSize = DL.getPointerSize(); 780e8d8bef9SDimitry Andric 781e8d8bef9SDimitry Andric for (Instruction &I : *BB) { 782e8d8bef9SDimitry Andric if (&I == &Before) 783e8d8bef9SDimitry Andric break; 784e8d8bef9SDimitry Andric 785e8d8bef9SDimitry Andric if (!isa<StoreInst>(&I)) 786e8d8bef9SDimitry Andric continue; 787e8d8bef9SDimitry Andric 788e8d8bef9SDimitry Andric auto *S = cast<StoreInst>(&I); 789e8d8bef9SDimitry Andric int64_t Offset = -1; 790e8d8bef9SDimitry Andric auto *Dst = 791e8d8bef9SDimitry Andric GetPointerBaseWithConstantOffset(S->getPointerOperand(), Offset, DL); 792e8d8bef9SDimitry Andric if (Dst == &Array) { 793e8d8bef9SDimitry Andric int64_t Idx = Offset / PointerSize; 794e8d8bef9SDimitry Andric StoredValues[Idx] = getUnderlyingObject(S->getValueOperand()); 795e8d8bef9SDimitry Andric LastAccesses[Idx] = S; 796e8d8bef9SDimitry Andric } 797e8d8bef9SDimitry Andric } 798e8d8bef9SDimitry Andric 799e8d8bef9SDimitry Andric return isFilled(); 800e8d8bef9SDimitry Andric } 801e8d8bef9SDimitry Andric 802e8d8bef9SDimitry Andric /// Returns true if all values in StoredValues and 803e8d8bef9SDimitry Andric /// LastAccesses are not nullptrs. 804e8d8bef9SDimitry Andric bool isFilled() { 805e8d8bef9SDimitry Andric const unsigned NumValues = StoredValues.size(); 806e8d8bef9SDimitry Andric for (unsigned I = 0; I < NumValues; ++I) { 807e8d8bef9SDimitry Andric if (!StoredValues[I] || !LastAccesses[I]) 808e8d8bef9SDimitry Andric return false; 809e8d8bef9SDimitry Andric } 810e8d8bef9SDimitry Andric 811e8d8bef9SDimitry Andric return true; 812e8d8bef9SDimitry Andric } 813e8d8bef9SDimitry Andric }; 814e8d8bef9SDimitry Andric 8155ffd83dbSDimitry Andric struct OpenMPOpt { 8165ffd83dbSDimitry Andric 8175ffd83dbSDimitry Andric using OptimizationRemarkGetter = 8185ffd83dbSDimitry Andric function_ref<OptimizationRemarkEmitter &(Function *)>; 8195ffd83dbSDimitry Andric 8205ffd83dbSDimitry Andric OpenMPOpt(SmallVectorImpl<Function *> &SCC, CallGraphUpdater &CGUpdater, 8215ffd83dbSDimitry Andric OptimizationRemarkGetter OREGetter, 8225ffd83dbSDimitry Andric OMPInformationCache &OMPInfoCache, Attributor &A) 8235ffd83dbSDimitry Andric : M(*(*SCC.begin())->getParent()), SCC(SCC), CGUpdater(CGUpdater), 8245ffd83dbSDimitry Andric OREGetter(OREGetter), OMPInfoCache(OMPInfoCache), A(A) {} 8255ffd83dbSDimitry Andric 826e8d8bef9SDimitry Andric /// Check if any remarks are enabled for openmp-opt 827e8d8bef9SDimitry Andric bool remarksEnabled() { 828e8d8bef9SDimitry Andric auto &Ctx = M.getContext(); 829e8d8bef9SDimitry Andric return Ctx.getDiagHandlerPtr()->isAnyRemarkEnabled(DEBUG_TYPE); 830e8d8bef9SDimitry Andric } 831e8d8bef9SDimitry Andric 8325ffd83dbSDimitry Andric /// Run all OpenMP optimizations on the underlying SCC/ModuleSlice. 833fe6060f1SDimitry Andric bool run(bool IsModulePass) { 8345ffd83dbSDimitry Andric if (SCC.empty()) 8355ffd83dbSDimitry Andric return false; 8365ffd83dbSDimitry Andric 8375ffd83dbSDimitry Andric bool Changed = false; 8385ffd83dbSDimitry Andric 8395ffd83dbSDimitry Andric LLVM_DEBUG(dbgs() << TAG << "Run on SCC with " << SCC.size() 8405ffd83dbSDimitry Andric << " functions in a slice with " 8415ffd83dbSDimitry Andric << OMPInfoCache.ModuleSlice.size() << " functions\n"); 8425ffd83dbSDimitry Andric 843fe6060f1SDimitry Andric if (IsModulePass) { 844fe6060f1SDimitry Andric Changed |= runAttributor(IsModulePass); 845fe6060f1SDimitry Andric 846fe6060f1SDimitry Andric // Recollect uses, in case Attributor deleted any. 847fe6060f1SDimitry Andric OMPInfoCache.recollectUses(); 848fe6060f1SDimitry Andric 849fe6060f1SDimitry Andric // TODO: This should be folded into buildCustomStateMachine. 850fe6060f1SDimitry Andric Changed |= rewriteDeviceCodeStateMachine(); 851fe6060f1SDimitry Andric 852fe6060f1SDimitry Andric if (remarksEnabled()) 853fe6060f1SDimitry Andric analysisGlobalization(); 854fe6060f1SDimitry Andric } else { 8555ffd83dbSDimitry Andric if (PrintICVValues) 8565ffd83dbSDimitry Andric printICVs(); 8575ffd83dbSDimitry Andric if (PrintOpenMPKernels) 8585ffd83dbSDimitry Andric printKernels(); 8595ffd83dbSDimitry Andric 860fe6060f1SDimitry Andric Changed |= runAttributor(IsModulePass); 8615ffd83dbSDimitry Andric 8625ffd83dbSDimitry Andric // Recollect uses, in case Attributor deleted any. 8635ffd83dbSDimitry Andric OMPInfoCache.recollectUses(); 8645ffd83dbSDimitry Andric 8655ffd83dbSDimitry Andric Changed |= deleteParallelRegions(); 866fe6060f1SDimitry Andric 867e8d8bef9SDimitry Andric if (HideMemoryTransferLatency) 868e8d8bef9SDimitry Andric Changed |= hideMemTransfersLatency(); 869e8d8bef9SDimitry Andric Changed |= deduplicateRuntimeCalls(); 870e8d8bef9SDimitry Andric if (EnableParallelRegionMerging) { 871e8d8bef9SDimitry Andric if (mergeParallelRegions()) { 872e8d8bef9SDimitry Andric deduplicateRuntimeCalls(); 873e8d8bef9SDimitry Andric Changed = true; 874e8d8bef9SDimitry Andric } 875e8d8bef9SDimitry Andric } 876fe6060f1SDimitry Andric } 8775ffd83dbSDimitry Andric 8785ffd83dbSDimitry Andric return Changed; 8795ffd83dbSDimitry Andric } 8805ffd83dbSDimitry Andric 8815ffd83dbSDimitry Andric /// Print initial ICV values for testing. 8825ffd83dbSDimitry Andric /// FIXME: This should be done from the Attributor once it is added. 8835ffd83dbSDimitry Andric void printICVs() const { 884e8d8bef9SDimitry Andric InternalControlVar ICVs[] = {ICV_nthreads, ICV_active_levels, ICV_cancel, 885e8d8bef9SDimitry Andric ICV_proc_bind}; 8865ffd83dbSDimitry Andric 887bdd1243dSDimitry Andric for (Function *F : SCC) { 8885ffd83dbSDimitry Andric for (auto ICV : ICVs) { 8895ffd83dbSDimitry Andric auto ICVInfo = OMPInfoCache.ICVs[ICV]; 890fe6060f1SDimitry Andric auto Remark = [&](OptimizationRemarkAnalysis ORA) { 891fe6060f1SDimitry Andric return ORA << "OpenMP ICV " << ore::NV("OpenMPICV", ICVInfo.Name) 8925ffd83dbSDimitry Andric << " Value: " 8935ffd83dbSDimitry Andric << (ICVInfo.InitValue 894fe6060f1SDimitry Andric ? toString(ICVInfo.InitValue->getValue(), 10, true) 8955ffd83dbSDimitry Andric : "IMPLEMENTATION_DEFINED"); 8965ffd83dbSDimitry Andric }; 8975ffd83dbSDimitry Andric 898fe6060f1SDimitry Andric emitRemark<OptimizationRemarkAnalysis>(F, "OpenMPICVTracker", Remark); 8995ffd83dbSDimitry Andric } 9005ffd83dbSDimitry Andric } 9015ffd83dbSDimitry Andric } 9025ffd83dbSDimitry Andric 9035ffd83dbSDimitry Andric /// Print OpenMP GPU kernels for testing. 9045ffd83dbSDimitry Andric void printKernels() const { 9055ffd83dbSDimitry Andric for (Function *F : SCC) { 9065ffd83dbSDimitry Andric if (!OMPInfoCache.Kernels.count(F)) 9075ffd83dbSDimitry Andric continue; 9085ffd83dbSDimitry Andric 909fe6060f1SDimitry Andric auto Remark = [&](OptimizationRemarkAnalysis ORA) { 910fe6060f1SDimitry Andric return ORA << "OpenMP GPU kernel " 9115ffd83dbSDimitry Andric << ore::NV("OpenMPGPUKernel", F->getName()) << "\n"; 9125ffd83dbSDimitry Andric }; 9135ffd83dbSDimitry Andric 914fe6060f1SDimitry Andric emitRemark<OptimizationRemarkAnalysis>(F, "OpenMPGPU", Remark); 9155ffd83dbSDimitry Andric } 9165ffd83dbSDimitry Andric } 9175ffd83dbSDimitry Andric 9185ffd83dbSDimitry Andric /// Return the call if \p U is a callee use in a regular call. If \p RFI is 9195ffd83dbSDimitry Andric /// given it has to be the callee or a nullptr is returned. 9205ffd83dbSDimitry Andric static CallInst *getCallIfRegularCall( 9215ffd83dbSDimitry Andric Use &U, OMPInformationCache::RuntimeFunctionInfo *RFI = nullptr) { 9225ffd83dbSDimitry Andric CallInst *CI = dyn_cast<CallInst>(U.getUser()); 9235ffd83dbSDimitry Andric if (CI && CI->isCallee(&U) && !CI->hasOperandBundles() && 924fe6060f1SDimitry Andric (!RFI || 925fe6060f1SDimitry Andric (RFI->Declaration && CI->getCalledFunction() == RFI->Declaration))) 9265ffd83dbSDimitry Andric return CI; 9275ffd83dbSDimitry Andric return nullptr; 9285ffd83dbSDimitry Andric } 9295ffd83dbSDimitry Andric 9305ffd83dbSDimitry Andric /// Return the call if \p V is a regular call. If \p RFI is given it has to be 9315ffd83dbSDimitry Andric /// the callee or a nullptr is returned. 9325ffd83dbSDimitry Andric static CallInst *getCallIfRegularCall( 9335ffd83dbSDimitry Andric Value &V, OMPInformationCache::RuntimeFunctionInfo *RFI = nullptr) { 9345ffd83dbSDimitry Andric CallInst *CI = dyn_cast<CallInst>(&V); 9355ffd83dbSDimitry Andric if (CI && !CI->hasOperandBundles() && 936fe6060f1SDimitry Andric (!RFI || 937fe6060f1SDimitry Andric (RFI->Declaration && CI->getCalledFunction() == RFI->Declaration))) 9385ffd83dbSDimitry Andric return CI; 9395ffd83dbSDimitry Andric return nullptr; 9405ffd83dbSDimitry Andric } 9415ffd83dbSDimitry Andric 9425ffd83dbSDimitry Andric private: 943e8d8bef9SDimitry Andric /// Merge parallel regions when it is safe. 944e8d8bef9SDimitry Andric bool mergeParallelRegions() { 945e8d8bef9SDimitry Andric const unsigned CallbackCalleeOperand = 2; 946e8d8bef9SDimitry Andric const unsigned CallbackFirstArgOperand = 3; 947e8d8bef9SDimitry Andric using InsertPointTy = OpenMPIRBuilder::InsertPointTy; 948e8d8bef9SDimitry Andric 949e8d8bef9SDimitry Andric // Check if there are any __kmpc_fork_call calls to merge. 950e8d8bef9SDimitry Andric OMPInformationCache::RuntimeFunctionInfo &RFI = 951e8d8bef9SDimitry Andric OMPInfoCache.RFIs[OMPRTL___kmpc_fork_call]; 952e8d8bef9SDimitry Andric 953e8d8bef9SDimitry Andric if (!RFI.Declaration) 954e8d8bef9SDimitry Andric return false; 955e8d8bef9SDimitry Andric 956e8d8bef9SDimitry Andric // Unmergable calls that prevent merging a parallel region. 957e8d8bef9SDimitry Andric OMPInformationCache::RuntimeFunctionInfo UnmergableCallsInfo[] = { 958e8d8bef9SDimitry Andric OMPInfoCache.RFIs[OMPRTL___kmpc_push_proc_bind], 959e8d8bef9SDimitry Andric OMPInfoCache.RFIs[OMPRTL___kmpc_push_num_threads], 960e8d8bef9SDimitry Andric }; 961e8d8bef9SDimitry Andric 962e8d8bef9SDimitry Andric bool Changed = false; 963e8d8bef9SDimitry Andric LoopInfo *LI = nullptr; 964e8d8bef9SDimitry Andric DominatorTree *DT = nullptr; 965e8d8bef9SDimitry Andric 966e8d8bef9SDimitry Andric SmallDenseMap<BasicBlock *, SmallPtrSet<Instruction *, 4>> BB2PRMap; 967e8d8bef9SDimitry Andric 968e8d8bef9SDimitry Andric BasicBlock *StartBB = nullptr, *EndBB = nullptr; 96981ad6265SDimitry Andric auto BodyGenCB = [&](InsertPointTy AllocaIP, InsertPointTy CodeGenIP) { 970e8d8bef9SDimitry Andric BasicBlock *CGStartBB = CodeGenIP.getBlock(); 971e8d8bef9SDimitry Andric BasicBlock *CGEndBB = 972e8d8bef9SDimitry Andric SplitBlock(CGStartBB, &*CodeGenIP.getPoint(), DT, LI); 973e8d8bef9SDimitry Andric assert(StartBB != nullptr && "StartBB should not be null"); 974e8d8bef9SDimitry Andric CGStartBB->getTerminator()->setSuccessor(0, StartBB); 975e8d8bef9SDimitry Andric assert(EndBB != nullptr && "EndBB should not be null"); 976e8d8bef9SDimitry Andric EndBB->getTerminator()->setSuccessor(0, CGEndBB); 977e8d8bef9SDimitry Andric }; 978e8d8bef9SDimitry Andric 979e8d8bef9SDimitry Andric auto PrivCB = [&](InsertPointTy AllocaIP, InsertPointTy CodeGenIP, Value &, 980e8d8bef9SDimitry Andric Value &Inner, Value *&ReplacementValue) -> InsertPointTy { 981e8d8bef9SDimitry Andric ReplacementValue = &Inner; 982e8d8bef9SDimitry Andric return CodeGenIP; 983e8d8bef9SDimitry Andric }; 984e8d8bef9SDimitry Andric 985e8d8bef9SDimitry Andric auto FiniCB = [&](InsertPointTy CodeGenIP) {}; 986e8d8bef9SDimitry Andric 987e8d8bef9SDimitry Andric /// Create a sequential execution region within a merged parallel region, 988e8d8bef9SDimitry Andric /// encapsulated in a master construct with a barrier for synchronization. 989e8d8bef9SDimitry Andric auto CreateSequentialRegion = [&](Function *OuterFn, 990e8d8bef9SDimitry Andric BasicBlock *OuterPredBB, 991e8d8bef9SDimitry Andric Instruction *SeqStartI, 992e8d8bef9SDimitry Andric Instruction *SeqEndI) { 993e8d8bef9SDimitry Andric // Isolate the instructions of the sequential region to a separate 994e8d8bef9SDimitry Andric // block. 995e8d8bef9SDimitry Andric BasicBlock *ParentBB = SeqStartI->getParent(); 996e8d8bef9SDimitry Andric BasicBlock *SeqEndBB = 997e8d8bef9SDimitry Andric SplitBlock(ParentBB, SeqEndI->getNextNode(), DT, LI); 998e8d8bef9SDimitry Andric BasicBlock *SeqAfterBB = 999e8d8bef9SDimitry Andric SplitBlock(SeqEndBB, &*SeqEndBB->getFirstInsertionPt(), DT, LI); 1000e8d8bef9SDimitry Andric BasicBlock *SeqStartBB = 1001e8d8bef9SDimitry Andric SplitBlock(ParentBB, SeqStartI, DT, LI, nullptr, "seq.par.merged"); 1002e8d8bef9SDimitry Andric 1003e8d8bef9SDimitry Andric assert(ParentBB->getUniqueSuccessor() == SeqStartBB && 1004e8d8bef9SDimitry Andric "Expected a different CFG"); 1005e8d8bef9SDimitry Andric const DebugLoc DL = ParentBB->getTerminator()->getDebugLoc(); 1006e8d8bef9SDimitry Andric ParentBB->getTerminator()->eraseFromParent(); 1007e8d8bef9SDimitry Andric 100881ad6265SDimitry Andric auto BodyGenCB = [&](InsertPointTy AllocaIP, InsertPointTy CodeGenIP) { 1009e8d8bef9SDimitry Andric BasicBlock *CGStartBB = CodeGenIP.getBlock(); 1010e8d8bef9SDimitry Andric BasicBlock *CGEndBB = 1011e8d8bef9SDimitry Andric SplitBlock(CGStartBB, &*CodeGenIP.getPoint(), DT, LI); 1012e8d8bef9SDimitry Andric assert(SeqStartBB != nullptr && "SeqStartBB should not be null"); 1013e8d8bef9SDimitry Andric CGStartBB->getTerminator()->setSuccessor(0, SeqStartBB); 1014e8d8bef9SDimitry Andric assert(SeqEndBB != nullptr && "SeqEndBB should not be null"); 1015e8d8bef9SDimitry Andric SeqEndBB->getTerminator()->setSuccessor(0, CGEndBB); 1016e8d8bef9SDimitry Andric }; 1017e8d8bef9SDimitry Andric auto FiniCB = [&](InsertPointTy CodeGenIP) {}; 1018e8d8bef9SDimitry Andric 1019e8d8bef9SDimitry Andric // Find outputs from the sequential region to outside users and 1020e8d8bef9SDimitry Andric // broadcast their values to them. 1021e8d8bef9SDimitry Andric for (Instruction &I : *SeqStartBB) { 1022e8d8bef9SDimitry Andric SmallPtrSet<Instruction *, 4> OutsideUsers; 1023e8d8bef9SDimitry Andric for (User *Usr : I.users()) { 1024e8d8bef9SDimitry Andric Instruction &UsrI = *cast<Instruction>(Usr); 1025e8d8bef9SDimitry Andric // Ignore outputs to LT intrinsics, code extraction for the merged 1026e8d8bef9SDimitry Andric // parallel region will fix them. 1027e8d8bef9SDimitry Andric if (UsrI.isLifetimeStartOrEnd()) 1028e8d8bef9SDimitry Andric continue; 1029e8d8bef9SDimitry Andric 1030e8d8bef9SDimitry Andric if (UsrI.getParent() != SeqStartBB) 1031e8d8bef9SDimitry Andric OutsideUsers.insert(&UsrI); 1032e8d8bef9SDimitry Andric } 1033e8d8bef9SDimitry Andric 1034e8d8bef9SDimitry Andric if (OutsideUsers.empty()) 1035e8d8bef9SDimitry Andric continue; 1036e8d8bef9SDimitry Andric 1037e8d8bef9SDimitry Andric // Emit an alloca in the outer region to store the broadcasted 1038e8d8bef9SDimitry Andric // value. 1039e8d8bef9SDimitry Andric const DataLayout &DL = M.getDataLayout(); 1040e8d8bef9SDimitry Andric AllocaInst *AllocaI = new AllocaInst( 1041e8d8bef9SDimitry Andric I.getType(), DL.getAllocaAddrSpace(), nullptr, 1042e8d8bef9SDimitry Andric I.getName() + ".seq.output.alloc", &OuterFn->front().front()); 1043e8d8bef9SDimitry Andric 1044e8d8bef9SDimitry Andric // Emit a store instruction in the sequential BB to update the 1045e8d8bef9SDimitry Andric // value. 1046e8d8bef9SDimitry Andric new StoreInst(&I, AllocaI, SeqStartBB->getTerminator()); 1047e8d8bef9SDimitry Andric 1048e8d8bef9SDimitry Andric // Emit a load instruction and replace the use of the output value 1049e8d8bef9SDimitry Andric // with it. 1050e8d8bef9SDimitry Andric for (Instruction *UsrI : OutsideUsers) { 1051fe6060f1SDimitry Andric LoadInst *LoadI = new LoadInst( 1052fe6060f1SDimitry Andric I.getType(), AllocaI, I.getName() + ".seq.output.load", UsrI); 1053e8d8bef9SDimitry Andric UsrI->replaceUsesOfWith(&I, LoadI); 1054e8d8bef9SDimitry Andric } 1055e8d8bef9SDimitry Andric } 1056e8d8bef9SDimitry Andric 1057e8d8bef9SDimitry Andric OpenMPIRBuilder::LocationDescription Loc( 1058e8d8bef9SDimitry Andric InsertPointTy(ParentBB, ParentBB->end()), DL); 1059e8d8bef9SDimitry Andric InsertPointTy SeqAfterIP = 1060e8d8bef9SDimitry Andric OMPInfoCache.OMPBuilder.createMaster(Loc, BodyGenCB, FiniCB); 1061e8d8bef9SDimitry Andric 1062e8d8bef9SDimitry Andric OMPInfoCache.OMPBuilder.createBarrier(SeqAfterIP, OMPD_parallel); 1063e8d8bef9SDimitry Andric 1064e8d8bef9SDimitry Andric BranchInst::Create(SeqAfterBB, SeqAfterIP.getBlock()); 1065e8d8bef9SDimitry Andric 1066e8d8bef9SDimitry Andric LLVM_DEBUG(dbgs() << TAG << "After sequential inlining " << *OuterFn 1067e8d8bef9SDimitry Andric << "\n"); 1068e8d8bef9SDimitry Andric }; 1069e8d8bef9SDimitry Andric 1070e8d8bef9SDimitry Andric // Helper to merge the __kmpc_fork_call calls in MergableCIs. They are all 1071e8d8bef9SDimitry Andric // contained in BB and only separated by instructions that can be 1072e8d8bef9SDimitry Andric // redundantly executed in parallel. The block BB is split before the first 1073e8d8bef9SDimitry Andric // call (in MergableCIs) and after the last so the entire region we merge 1074e8d8bef9SDimitry Andric // into a single parallel region is contained in a single basic block 1075e8d8bef9SDimitry Andric // without any other instructions. We use the OpenMPIRBuilder to outline 1076e8d8bef9SDimitry Andric // that block and call the resulting function via __kmpc_fork_call. 107704eeddc0SDimitry Andric auto Merge = [&](const SmallVectorImpl<CallInst *> &MergableCIs, 107804eeddc0SDimitry Andric BasicBlock *BB) { 1079e8d8bef9SDimitry Andric // TODO: Change the interface to allow single CIs expanded, e.g, to 1080e8d8bef9SDimitry Andric // include an outer loop. 1081e8d8bef9SDimitry Andric assert(MergableCIs.size() > 1 && "Assumed multiple mergable CIs"); 1082e8d8bef9SDimitry Andric 1083e8d8bef9SDimitry Andric auto Remark = [&](OptimizationRemark OR) { 1084fe6060f1SDimitry Andric OR << "Parallel region merged with parallel region" 1085fe6060f1SDimitry Andric << (MergableCIs.size() > 2 ? "s" : "") << " at "; 1086e8d8bef9SDimitry Andric for (auto *CI : llvm::drop_begin(MergableCIs)) { 1087e8d8bef9SDimitry Andric OR << ore::NV("OpenMPParallelMerge", CI->getDebugLoc()); 1088e8d8bef9SDimitry Andric if (CI != MergableCIs.back()) 1089e8d8bef9SDimitry Andric OR << ", "; 1090e8d8bef9SDimitry Andric } 1091fe6060f1SDimitry Andric return OR << "."; 1092e8d8bef9SDimitry Andric }; 1093e8d8bef9SDimitry Andric 1094fe6060f1SDimitry Andric emitRemark<OptimizationRemark>(MergableCIs.front(), "OMP150", Remark); 1095e8d8bef9SDimitry Andric 1096e8d8bef9SDimitry Andric Function *OriginalFn = BB->getParent(); 1097e8d8bef9SDimitry Andric LLVM_DEBUG(dbgs() << TAG << "Merge " << MergableCIs.size() 1098e8d8bef9SDimitry Andric << " parallel regions in " << OriginalFn->getName() 1099e8d8bef9SDimitry Andric << "\n"); 1100e8d8bef9SDimitry Andric 1101e8d8bef9SDimitry Andric // Isolate the calls to merge in a separate block. 1102e8d8bef9SDimitry Andric EndBB = SplitBlock(BB, MergableCIs.back()->getNextNode(), DT, LI); 1103e8d8bef9SDimitry Andric BasicBlock *AfterBB = 1104e8d8bef9SDimitry Andric SplitBlock(EndBB, &*EndBB->getFirstInsertionPt(), DT, LI); 1105e8d8bef9SDimitry Andric StartBB = SplitBlock(BB, MergableCIs.front(), DT, LI, nullptr, 1106e8d8bef9SDimitry Andric "omp.par.merged"); 1107e8d8bef9SDimitry Andric 1108e8d8bef9SDimitry Andric assert(BB->getUniqueSuccessor() == StartBB && "Expected a different CFG"); 1109e8d8bef9SDimitry Andric const DebugLoc DL = BB->getTerminator()->getDebugLoc(); 1110e8d8bef9SDimitry Andric BB->getTerminator()->eraseFromParent(); 1111e8d8bef9SDimitry Andric 1112e8d8bef9SDimitry Andric // Create sequential regions for sequential instructions that are 1113e8d8bef9SDimitry Andric // in-between mergable parallel regions. 1114e8d8bef9SDimitry Andric for (auto *It = MergableCIs.begin(), *End = MergableCIs.end() - 1; 1115e8d8bef9SDimitry Andric It != End; ++It) { 1116e8d8bef9SDimitry Andric Instruction *ForkCI = *It; 1117e8d8bef9SDimitry Andric Instruction *NextForkCI = *(It + 1); 1118e8d8bef9SDimitry Andric 1119e8d8bef9SDimitry Andric // Continue if there are not in-between instructions. 1120e8d8bef9SDimitry Andric if (ForkCI->getNextNode() == NextForkCI) 1121e8d8bef9SDimitry Andric continue; 1122e8d8bef9SDimitry Andric 1123e8d8bef9SDimitry Andric CreateSequentialRegion(OriginalFn, BB, ForkCI->getNextNode(), 1124e8d8bef9SDimitry Andric NextForkCI->getPrevNode()); 1125e8d8bef9SDimitry Andric } 1126e8d8bef9SDimitry Andric 1127e8d8bef9SDimitry Andric OpenMPIRBuilder::LocationDescription Loc(InsertPointTy(BB, BB->end()), 1128e8d8bef9SDimitry Andric DL); 1129e8d8bef9SDimitry Andric IRBuilder<>::InsertPoint AllocaIP( 1130e8d8bef9SDimitry Andric &OriginalFn->getEntryBlock(), 1131e8d8bef9SDimitry Andric OriginalFn->getEntryBlock().getFirstInsertionPt()); 1132e8d8bef9SDimitry Andric // Create the merged parallel region with default proc binding, to 1133e8d8bef9SDimitry Andric // avoid overriding binding settings, and without explicit cancellation. 1134e8d8bef9SDimitry Andric InsertPointTy AfterIP = OMPInfoCache.OMPBuilder.createParallel( 1135e8d8bef9SDimitry Andric Loc, AllocaIP, BodyGenCB, PrivCB, FiniCB, nullptr, nullptr, 1136e8d8bef9SDimitry Andric OMP_PROC_BIND_default, /* IsCancellable */ false); 1137e8d8bef9SDimitry Andric BranchInst::Create(AfterBB, AfterIP.getBlock()); 1138e8d8bef9SDimitry Andric 1139e8d8bef9SDimitry Andric // Perform the actual outlining. 114004eeddc0SDimitry Andric OMPInfoCache.OMPBuilder.finalize(OriginalFn); 1141e8d8bef9SDimitry Andric 1142e8d8bef9SDimitry Andric Function *OutlinedFn = MergableCIs.front()->getCaller(); 1143e8d8bef9SDimitry Andric 1144e8d8bef9SDimitry Andric // Replace the __kmpc_fork_call calls with direct calls to the outlined 1145e8d8bef9SDimitry Andric // callbacks. 1146e8d8bef9SDimitry Andric SmallVector<Value *, 8> Args; 1147e8d8bef9SDimitry Andric for (auto *CI : MergableCIs) { 114881ad6265SDimitry Andric Value *Callee = CI->getArgOperand(CallbackCalleeOperand); 114981ad6265SDimitry Andric FunctionType *FT = OMPInfoCache.OMPBuilder.ParallelTask; 1150e8d8bef9SDimitry Andric Args.clear(); 1151e8d8bef9SDimitry Andric Args.push_back(OutlinedFn->getArg(0)); 1152e8d8bef9SDimitry Andric Args.push_back(OutlinedFn->getArg(1)); 1153349cc55cSDimitry Andric for (unsigned U = CallbackFirstArgOperand, E = CI->arg_size(); U < E; 1154349cc55cSDimitry Andric ++U) 1155e8d8bef9SDimitry Andric Args.push_back(CI->getArgOperand(U)); 1156e8d8bef9SDimitry Andric 1157e8d8bef9SDimitry Andric CallInst *NewCI = CallInst::Create(FT, Callee, Args, "", CI); 1158e8d8bef9SDimitry Andric if (CI->getDebugLoc()) 1159e8d8bef9SDimitry Andric NewCI->setDebugLoc(CI->getDebugLoc()); 1160e8d8bef9SDimitry Andric 1161e8d8bef9SDimitry Andric // Forward parameter attributes from the callback to the callee. 1162349cc55cSDimitry Andric for (unsigned U = CallbackFirstArgOperand, E = CI->arg_size(); U < E; 1163349cc55cSDimitry Andric ++U) 1164349cc55cSDimitry Andric for (const Attribute &A : CI->getAttributes().getParamAttrs(U)) 1165e8d8bef9SDimitry Andric NewCI->addParamAttr( 1166e8d8bef9SDimitry Andric U - (CallbackFirstArgOperand - CallbackCalleeOperand), A); 1167e8d8bef9SDimitry Andric 1168e8d8bef9SDimitry Andric // Emit an explicit barrier to replace the implicit fork-join barrier. 1169e8d8bef9SDimitry Andric if (CI != MergableCIs.back()) { 1170e8d8bef9SDimitry Andric // TODO: Remove barrier if the merged parallel region includes the 1171e8d8bef9SDimitry Andric // 'nowait' clause. 1172e8d8bef9SDimitry Andric OMPInfoCache.OMPBuilder.createBarrier( 1173e8d8bef9SDimitry Andric InsertPointTy(NewCI->getParent(), 1174e8d8bef9SDimitry Andric NewCI->getNextNode()->getIterator()), 1175e8d8bef9SDimitry Andric OMPD_parallel); 1176e8d8bef9SDimitry Andric } 1177e8d8bef9SDimitry Andric 1178e8d8bef9SDimitry Andric CI->eraseFromParent(); 1179e8d8bef9SDimitry Andric } 1180e8d8bef9SDimitry Andric 1181e8d8bef9SDimitry Andric assert(OutlinedFn != OriginalFn && "Outlining failed"); 1182e8d8bef9SDimitry Andric CGUpdater.registerOutlinedFunction(*OriginalFn, *OutlinedFn); 1183e8d8bef9SDimitry Andric CGUpdater.reanalyzeFunction(*OriginalFn); 1184e8d8bef9SDimitry Andric 1185e8d8bef9SDimitry Andric NumOpenMPParallelRegionsMerged += MergableCIs.size(); 1186e8d8bef9SDimitry Andric 1187e8d8bef9SDimitry Andric return true; 1188e8d8bef9SDimitry Andric }; 1189e8d8bef9SDimitry Andric 1190e8d8bef9SDimitry Andric // Helper function that identifes sequences of 1191e8d8bef9SDimitry Andric // __kmpc_fork_call uses in a basic block. 1192e8d8bef9SDimitry Andric auto DetectPRsCB = [&](Use &U, Function &F) { 1193e8d8bef9SDimitry Andric CallInst *CI = getCallIfRegularCall(U, &RFI); 1194e8d8bef9SDimitry Andric BB2PRMap[CI->getParent()].insert(CI); 1195e8d8bef9SDimitry Andric 1196e8d8bef9SDimitry Andric return false; 1197e8d8bef9SDimitry Andric }; 1198e8d8bef9SDimitry Andric 1199e8d8bef9SDimitry Andric BB2PRMap.clear(); 1200e8d8bef9SDimitry Andric RFI.foreachUse(SCC, DetectPRsCB); 1201e8d8bef9SDimitry Andric SmallVector<SmallVector<CallInst *, 4>, 4> MergableCIsVector; 1202e8d8bef9SDimitry Andric // Find mergable parallel regions within a basic block that are 1203e8d8bef9SDimitry Andric // safe to merge, that is any in-between instructions can safely 1204e8d8bef9SDimitry Andric // execute in parallel after merging. 1205e8d8bef9SDimitry Andric // TODO: support merging across basic-blocks. 1206e8d8bef9SDimitry Andric for (auto &It : BB2PRMap) { 1207e8d8bef9SDimitry Andric auto &CIs = It.getSecond(); 1208e8d8bef9SDimitry Andric if (CIs.size() < 2) 1209e8d8bef9SDimitry Andric continue; 1210e8d8bef9SDimitry Andric 1211e8d8bef9SDimitry Andric BasicBlock *BB = It.getFirst(); 1212e8d8bef9SDimitry Andric SmallVector<CallInst *, 4> MergableCIs; 1213e8d8bef9SDimitry Andric 1214e8d8bef9SDimitry Andric /// Returns true if the instruction is mergable, false otherwise. 1215e8d8bef9SDimitry Andric /// A terminator instruction is unmergable by definition since merging 1216e8d8bef9SDimitry Andric /// works within a BB. Instructions before the mergable region are 1217e8d8bef9SDimitry Andric /// mergable if they are not calls to OpenMP runtime functions that may 1218e8d8bef9SDimitry Andric /// set different execution parameters for subsequent parallel regions. 1219e8d8bef9SDimitry Andric /// Instructions in-between parallel regions are mergable if they are not 1220e8d8bef9SDimitry Andric /// calls to any non-intrinsic function since that may call a non-mergable 1221e8d8bef9SDimitry Andric /// OpenMP runtime function. 1222e8d8bef9SDimitry Andric auto IsMergable = [&](Instruction &I, bool IsBeforeMergableRegion) { 1223e8d8bef9SDimitry Andric // We do not merge across BBs, hence return false (unmergable) if the 1224e8d8bef9SDimitry Andric // instruction is a terminator. 1225e8d8bef9SDimitry Andric if (I.isTerminator()) 1226e8d8bef9SDimitry Andric return false; 1227e8d8bef9SDimitry Andric 1228e8d8bef9SDimitry Andric if (!isa<CallInst>(&I)) 1229e8d8bef9SDimitry Andric return true; 1230e8d8bef9SDimitry Andric 1231e8d8bef9SDimitry Andric CallInst *CI = cast<CallInst>(&I); 1232e8d8bef9SDimitry Andric if (IsBeforeMergableRegion) { 1233e8d8bef9SDimitry Andric Function *CalledFunction = CI->getCalledFunction(); 1234e8d8bef9SDimitry Andric if (!CalledFunction) 1235e8d8bef9SDimitry Andric return false; 1236e8d8bef9SDimitry Andric // Return false (unmergable) if the call before the parallel 1237e8d8bef9SDimitry Andric // region calls an explicit affinity (proc_bind) or number of 1238e8d8bef9SDimitry Andric // threads (num_threads) compiler-generated function. Those settings 1239e8d8bef9SDimitry Andric // may be incompatible with following parallel regions. 1240e8d8bef9SDimitry Andric // TODO: ICV tracking to detect compatibility. 1241e8d8bef9SDimitry Andric for (const auto &RFI : UnmergableCallsInfo) { 1242e8d8bef9SDimitry Andric if (CalledFunction == RFI.Declaration) 1243e8d8bef9SDimitry Andric return false; 1244e8d8bef9SDimitry Andric } 1245e8d8bef9SDimitry Andric } else { 1246e8d8bef9SDimitry Andric // Return false (unmergable) if there is a call instruction 1247e8d8bef9SDimitry Andric // in-between parallel regions when it is not an intrinsic. It 1248e8d8bef9SDimitry Andric // may call an unmergable OpenMP runtime function in its callpath. 1249e8d8bef9SDimitry Andric // TODO: Keep track of possible OpenMP calls in the callpath. 1250e8d8bef9SDimitry Andric if (!isa<IntrinsicInst>(CI)) 1251e8d8bef9SDimitry Andric return false; 1252e8d8bef9SDimitry Andric } 1253e8d8bef9SDimitry Andric 1254e8d8bef9SDimitry Andric return true; 1255e8d8bef9SDimitry Andric }; 1256e8d8bef9SDimitry Andric // Find maximal number of parallel region CIs that are safe to merge. 1257e8d8bef9SDimitry Andric for (auto It = BB->begin(), End = BB->end(); It != End;) { 1258e8d8bef9SDimitry Andric Instruction &I = *It; 1259e8d8bef9SDimitry Andric ++It; 1260e8d8bef9SDimitry Andric 1261e8d8bef9SDimitry Andric if (CIs.count(&I)) { 1262e8d8bef9SDimitry Andric MergableCIs.push_back(cast<CallInst>(&I)); 1263e8d8bef9SDimitry Andric continue; 1264e8d8bef9SDimitry Andric } 1265e8d8bef9SDimitry Andric 1266e8d8bef9SDimitry Andric // Continue expanding if the instruction is mergable. 1267e8d8bef9SDimitry Andric if (IsMergable(I, MergableCIs.empty())) 1268e8d8bef9SDimitry Andric continue; 1269e8d8bef9SDimitry Andric 1270e8d8bef9SDimitry Andric // Forward the instruction iterator to skip the next parallel region 1271e8d8bef9SDimitry Andric // since there is an unmergable instruction which can affect it. 1272e8d8bef9SDimitry Andric for (; It != End; ++It) { 1273e8d8bef9SDimitry Andric Instruction &SkipI = *It; 1274e8d8bef9SDimitry Andric if (CIs.count(&SkipI)) { 1275e8d8bef9SDimitry Andric LLVM_DEBUG(dbgs() << TAG << "Skip parallel region " << SkipI 1276e8d8bef9SDimitry Andric << " due to " << I << "\n"); 1277e8d8bef9SDimitry Andric ++It; 1278e8d8bef9SDimitry Andric break; 1279e8d8bef9SDimitry Andric } 1280e8d8bef9SDimitry Andric } 1281e8d8bef9SDimitry Andric 1282e8d8bef9SDimitry Andric // Store mergable regions found. 1283e8d8bef9SDimitry Andric if (MergableCIs.size() > 1) { 1284e8d8bef9SDimitry Andric MergableCIsVector.push_back(MergableCIs); 1285e8d8bef9SDimitry Andric LLVM_DEBUG(dbgs() << TAG << "Found " << MergableCIs.size() 1286e8d8bef9SDimitry Andric << " parallel regions in block " << BB->getName() 1287e8d8bef9SDimitry Andric << " of function " << BB->getParent()->getName() 1288e8d8bef9SDimitry Andric << "\n";); 1289e8d8bef9SDimitry Andric } 1290e8d8bef9SDimitry Andric 1291e8d8bef9SDimitry Andric MergableCIs.clear(); 1292e8d8bef9SDimitry Andric } 1293e8d8bef9SDimitry Andric 1294e8d8bef9SDimitry Andric if (!MergableCIsVector.empty()) { 1295e8d8bef9SDimitry Andric Changed = true; 1296e8d8bef9SDimitry Andric 1297e8d8bef9SDimitry Andric for (auto &MergableCIs : MergableCIsVector) 1298e8d8bef9SDimitry Andric Merge(MergableCIs, BB); 1299fe6060f1SDimitry Andric MergableCIsVector.clear(); 1300e8d8bef9SDimitry Andric } 1301e8d8bef9SDimitry Andric } 1302e8d8bef9SDimitry Andric 1303e8d8bef9SDimitry Andric if (Changed) { 1304e8d8bef9SDimitry Andric /// Re-collect use for fork calls, emitted barrier calls, and 1305e8d8bef9SDimitry Andric /// any emitted master/end_master calls. 1306e8d8bef9SDimitry Andric OMPInfoCache.recollectUsesForFunction(OMPRTL___kmpc_fork_call); 1307e8d8bef9SDimitry Andric OMPInfoCache.recollectUsesForFunction(OMPRTL___kmpc_barrier); 1308e8d8bef9SDimitry Andric OMPInfoCache.recollectUsesForFunction(OMPRTL___kmpc_master); 1309e8d8bef9SDimitry Andric OMPInfoCache.recollectUsesForFunction(OMPRTL___kmpc_end_master); 1310e8d8bef9SDimitry Andric } 1311e8d8bef9SDimitry Andric 1312e8d8bef9SDimitry Andric return Changed; 1313e8d8bef9SDimitry Andric } 1314e8d8bef9SDimitry Andric 13155ffd83dbSDimitry Andric /// Try to delete parallel regions if possible. 13165ffd83dbSDimitry Andric bool deleteParallelRegions() { 13175ffd83dbSDimitry Andric const unsigned CallbackCalleeOperand = 2; 13185ffd83dbSDimitry Andric 13195ffd83dbSDimitry Andric OMPInformationCache::RuntimeFunctionInfo &RFI = 13205ffd83dbSDimitry Andric OMPInfoCache.RFIs[OMPRTL___kmpc_fork_call]; 13215ffd83dbSDimitry Andric 13225ffd83dbSDimitry Andric if (!RFI.Declaration) 13235ffd83dbSDimitry Andric return false; 13245ffd83dbSDimitry Andric 13255ffd83dbSDimitry Andric bool Changed = false; 13265ffd83dbSDimitry Andric auto DeleteCallCB = [&](Use &U, Function &) { 13275ffd83dbSDimitry Andric CallInst *CI = getCallIfRegularCall(U); 13285ffd83dbSDimitry Andric if (!CI) 13295ffd83dbSDimitry Andric return false; 13305ffd83dbSDimitry Andric auto *Fn = dyn_cast<Function>( 13315ffd83dbSDimitry Andric CI->getArgOperand(CallbackCalleeOperand)->stripPointerCasts()); 13325ffd83dbSDimitry Andric if (!Fn) 13335ffd83dbSDimitry Andric return false; 13345ffd83dbSDimitry Andric if (!Fn->onlyReadsMemory()) 13355ffd83dbSDimitry Andric return false; 13365ffd83dbSDimitry Andric if (!Fn->hasFnAttribute(Attribute::WillReturn)) 13375ffd83dbSDimitry Andric return false; 13385ffd83dbSDimitry Andric 13395ffd83dbSDimitry Andric LLVM_DEBUG(dbgs() << TAG << "Delete read-only parallel region in " 13405ffd83dbSDimitry Andric << CI->getCaller()->getName() << "\n"); 13415ffd83dbSDimitry Andric 13425ffd83dbSDimitry Andric auto Remark = [&](OptimizationRemark OR) { 1343fe6060f1SDimitry Andric return OR << "Removing parallel region with no side-effects."; 13445ffd83dbSDimitry Andric }; 1345fe6060f1SDimitry Andric emitRemark<OptimizationRemark>(CI, "OMP160", Remark); 13465ffd83dbSDimitry Andric 13475ffd83dbSDimitry Andric CGUpdater.removeCallSite(*CI); 13485ffd83dbSDimitry Andric CI->eraseFromParent(); 13495ffd83dbSDimitry Andric Changed = true; 13505ffd83dbSDimitry Andric ++NumOpenMPParallelRegionsDeleted; 13515ffd83dbSDimitry Andric return true; 13525ffd83dbSDimitry Andric }; 13535ffd83dbSDimitry Andric 13545ffd83dbSDimitry Andric RFI.foreachUse(SCC, DeleteCallCB); 13555ffd83dbSDimitry Andric 13565ffd83dbSDimitry Andric return Changed; 13575ffd83dbSDimitry Andric } 13585ffd83dbSDimitry Andric 13595ffd83dbSDimitry Andric /// Try to eliminate runtime calls by reusing existing ones. 13605ffd83dbSDimitry Andric bool deduplicateRuntimeCalls() { 13615ffd83dbSDimitry Andric bool Changed = false; 13625ffd83dbSDimitry Andric 13635ffd83dbSDimitry Andric RuntimeFunction DeduplicableRuntimeCallIDs[] = { 13645ffd83dbSDimitry Andric OMPRTL_omp_get_num_threads, 13655ffd83dbSDimitry Andric OMPRTL_omp_in_parallel, 13665ffd83dbSDimitry Andric OMPRTL_omp_get_cancellation, 13675ffd83dbSDimitry Andric OMPRTL_omp_get_thread_limit, 13685ffd83dbSDimitry Andric OMPRTL_omp_get_supported_active_levels, 13695ffd83dbSDimitry Andric OMPRTL_omp_get_level, 13705ffd83dbSDimitry Andric OMPRTL_omp_get_ancestor_thread_num, 13715ffd83dbSDimitry Andric OMPRTL_omp_get_team_size, 13725ffd83dbSDimitry Andric OMPRTL_omp_get_active_level, 13735ffd83dbSDimitry Andric OMPRTL_omp_in_final, 13745ffd83dbSDimitry Andric OMPRTL_omp_get_proc_bind, 13755ffd83dbSDimitry Andric OMPRTL_omp_get_num_places, 13765ffd83dbSDimitry Andric OMPRTL_omp_get_num_procs, 13775ffd83dbSDimitry Andric OMPRTL_omp_get_place_num, 13785ffd83dbSDimitry Andric OMPRTL_omp_get_partition_num_places, 13795ffd83dbSDimitry Andric OMPRTL_omp_get_partition_place_nums}; 13805ffd83dbSDimitry Andric 13815ffd83dbSDimitry Andric // Global-tid is handled separately. 13825ffd83dbSDimitry Andric SmallSetVector<Value *, 16> GTIdArgs; 13835ffd83dbSDimitry Andric collectGlobalThreadIdArguments(GTIdArgs); 13845ffd83dbSDimitry Andric LLVM_DEBUG(dbgs() << TAG << "Found " << GTIdArgs.size() 13855ffd83dbSDimitry Andric << " global thread ID arguments\n"); 13865ffd83dbSDimitry Andric 13875ffd83dbSDimitry Andric for (Function *F : SCC) { 13885ffd83dbSDimitry Andric for (auto DeduplicableRuntimeCallID : DeduplicableRuntimeCallIDs) 1389e8d8bef9SDimitry Andric Changed |= deduplicateRuntimeCalls( 1390e8d8bef9SDimitry Andric *F, OMPInfoCache.RFIs[DeduplicableRuntimeCallID]); 13915ffd83dbSDimitry Andric 13925ffd83dbSDimitry Andric // __kmpc_global_thread_num is special as we can replace it with an 13935ffd83dbSDimitry Andric // argument in enough cases to make it worth trying. 13945ffd83dbSDimitry Andric Value *GTIdArg = nullptr; 13955ffd83dbSDimitry Andric for (Argument &Arg : F->args()) 13965ffd83dbSDimitry Andric if (GTIdArgs.count(&Arg)) { 13975ffd83dbSDimitry Andric GTIdArg = &Arg; 13985ffd83dbSDimitry Andric break; 13995ffd83dbSDimitry Andric } 14005ffd83dbSDimitry Andric Changed |= deduplicateRuntimeCalls( 14015ffd83dbSDimitry Andric *F, OMPInfoCache.RFIs[OMPRTL___kmpc_global_thread_num], GTIdArg); 14025ffd83dbSDimitry Andric } 14035ffd83dbSDimitry Andric 14045ffd83dbSDimitry Andric return Changed; 14055ffd83dbSDimitry Andric } 14065ffd83dbSDimitry Andric 1407e8d8bef9SDimitry Andric /// Tries to hide the latency of runtime calls that involve host to 1408e8d8bef9SDimitry Andric /// device memory transfers by splitting them into their "issue" and "wait" 1409e8d8bef9SDimitry Andric /// versions. The "issue" is moved upwards as much as possible. The "wait" is 1410e8d8bef9SDimitry Andric /// moved downards as much as possible. The "issue" issues the memory transfer 1411e8d8bef9SDimitry Andric /// asynchronously, returning a handle. The "wait" waits in the returned 1412e8d8bef9SDimitry Andric /// handle for the memory transfer to finish. 1413e8d8bef9SDimitry Andric bool hideMemTransfersLatency() { 1414e8d8bef9SDimitry Andric auto &RFI = OMPInfoCache.RFIs[OMPRTL___tgt_target_data_begin_mapper]; 1415e8d8bef9SDimitry Andric bool Changed = false; 1416e8d8bef9SDimitry Andric auto SplitMemTransfers = [&](Use &U, Function &Decl) { 1417e8d8bef9SDimitry Andric auto *RTCall = getCallIfRegularCall(U, &RFI); 1418e8d8bef9SDimitry Andric if (!RTCall) 1419e8d8bef9SDimitry Andric return false; 1420e8d8bef9SDimitry Andric 1421e8d8bef9SDimitry Andric OffloadArray OffloadArrays[3]; 1422e8d8bef9SDimitry Andric if (!getValuesInOffloadArrays(*RTCall, OffloadArrays)) 1423e8d8bef9SDimitry Andric return false; 1424e8d8bef9SDimitry Andric 1425e8d8bef9SDimitry Andric LLVM_DEBUG(dumpValuesInOffloadArrays(OffloadArrays)); 1426e8d8bef9SDimitry Andric 1427e8d8bef9SDimitry Andric // TODO: Check if can be moved upwards. 1428e8d8bef9SDimitry Andric bool WasSplit = false; 1429e8d8bef9SDimitry Andric Instruction *WaitMovementPoint = canBeMovedDownwards(*RTCall); 1430e8d8bef9SDimitry Andric if (WaitMovementPoint) 1431e8d8bef9SDimitry Andric WasSplit = splitTargetDataBeginRTC(*RTCall, *WaitMovementPoint); 1432e8d8bef9SDimitry Andric 1433e8d8bef9SDimitry Andric Changed |= WasSplit; 1434e8d8bef9SDimitry Andric return WasSplit; 1435e8d8bef9SDimitry Andric }; 1436*1ac55f4cSDimitry Andric if (OMPInfoCache.runtimeFnsAvailable( 1437*1ac55f4cSDimitry Andric {OMPRTL___tgt_target_data_begin_mapper_issue, 1438*1ac55f4cSDimitry Andric OMPRTL___tgt_target_data_begin_mapper_wait})) 1439e8d8bef9SDimitry Andric RFI.foreachUse(SCC, SplitMemTransfers); 1440e8d8bef9SDimitry Andric 1441e8d8bef9SDimitry Andric return Changed; 1442e8d8bef9SDimitry Andric } 1443e8d8bef9SDimitry Andric 1444e8d8bef9SDimitry Andric void analysisGlobalization() { 1445fe6060f1SDimitry Andric auto &RFI = OMPInfoCache.RFIs[OMPRTL___kmpc_alloc_shared]; 1446e8d8bef9SDimitry Andric 1447e8d8bef9SDimitry Andric auto CheckGlobalization = [&](Use &U, Function &Decl) { 1448e8d8bef9SDimitry Andric if (CallInst *CI = getCallIfRegularCall(U, &RFI)) { 1449fe6060f1SDimitry Andric auto Remark = [&](OptimizationRemarkMissed ORM) { 1450fe6060f1SDimitry Andric return ORM 1451e8d8bef9SDimitry Andric << "Found thread data sharing on the GPU. " 1452e8d8bef9SDimitry Andric << "Expect degraded performance due to data globalization."; 1453e8d8bef9SDimitry Andric }; 1454fe6060f1SDimitry Andric emitRemark<OptimizationRemarkMissed>(CI, "OMP112", Remark); 1455e8d8bef9SDimitry Andric } 1456e8d8bef9SDimitry Andric 1457e8d8bef9SDimitry Andric return false; 1458e8d8bef9SDimitry Andric }; 1459e8d8bef9SDimitry Andric 1460e8d8bef9SDimitry Andric RFI.foreachUse(SCC, CheckGlobalization); 1461e8d8bef9SDimitry Andric } 1462e8d8bef9SDimitry Andric 1463e8d8bef9SDimitry Andric /// Maps the values stored in the offload arrays passed as arguments to 1464e8d8bef9SDimitry Andric /// \p RuntimeCall into the offload arrays in \p OAs. 1465e8d8bef9SDimitry Andric bool getValuesInOffloadArrays(CallInst &RuntimeCall, 1466e8d8bef9SDimitry Andric MutableArrayRef<OffloadArray> OAs) { 1467e8d8bef9SDimitry Andric assert(OAs.size() == 3 && "Need space for three offload arrays!"); 1468e8d8bef9SDimitry Andric 1469e8d8bef9SDimitry Andric // A runtime call that involves memory offloading looks something like: 1470e8d8bef9SDimitry Andric // call void @__tgt_target_data_begin_mapper(arg0, arg1, 1471e8d8bef9SDimitry Andric // i8** %offload_baseptrs, i8** %offload_ptrs, i64* %offload_sizes, 1472e8d8bef9SDimitry Andric // ...) 1473e8d8bef9SDimitry Andric // So, the idea is to access the allocas that allocate space for these 1474e8d8bef9SDimitry Andric // offload arrays, offload_baseptrs, offload_ptrs, offload_sizes. 1475e8d8bef9SDimitry Andric // Therefore: 1476e8d8bef9SDimitry Andric // i8** %offload_baseptrs. 1477e8d8bef9SDimitry Andric Value *BasePtrsArg = 1478e8d8bef9SDimitry Andric RuntimeCall.getArgOperand(OffloadArray::BasePtrsArgNum); 1479e8d8bef9SDimitry Andric // i8** %offload_ptrs. 1480e8d8bef9SDimitry Andric Value *PtrsArg = RuntimeCall.getArgOperand(OffloadArray::PtrsArgNum); 1481e8d8bef9SDimitry Andric // i8** %offload_sizes. 1482e8d8bef9SDimitry Andric Value *SizesArg = RuntimeCall.getArgOperand(OffloadArray::SizesArgNum); 1483e8d8bef9SDimitry Andric 1484e8d8bef9SDimitry Andric // Get values stored in **offload_baseptrs. 1485e8d8bef9SDimitry Andric auto *V = getUnderlyingObject(BasePtrsArg); 1486e8d8bef9SDimitry Andric if (!isa<AllocaInst>(V)) 1487e8d8bef9SDimitry Andric return false; 1488e8d8bef9SDimitry Andric auto *BasePtrsArray = cast<AllocaInst>(V); 1489e8d8bef9SDimitry Andric if (!OAs[0].initialize(*BasePtrsArray, RuntimeCall)) 1490e8d8bef9SDimitry Andric return false; 1491e8d8bef9SDimitry Andric 1492e8d8bef9SDimitry Andric // Get values stored in **offload_baseptrs. 1493e8d8bef9SDimitry Andric V = getUnderlyingObject(PtrsArg); 1494e8d8bef9SDimitry Andric if (!isa<AllocaInst>(V)) 1495e8d8bef9SDimitry Andric return false; 1496e8d8bef9SDimitry Andric auto *PtrsArray = cast<AllocaInst>(V); 1497e8d8bef9SDimitry Andric if (!OAs[1].initialize(*PtrsArray, RuntimeCall)) 1498e8d8bef9SDimitry Andric return false; 1499e8d8bef9SDimitry Andric 1500e8d8bef9SDimitry Andric // Get values stored in **offload_sizes. 1501e8d8bef9SDimitry Andric V = getUnderlyingObject(SizesArg); 1502e8d8bef9SDimitry Andric // If it's a [constant] global array don't analyze it. 1503e8d8bef9SDimitry Andric if (isa<GlobalValue>(V)) 1504e8d8bef9SDimitry Andric return isa<Constant>(V); 1505e8d8bef9SDimitry Andric if (!isa<AllocaInst>(V)) 1506e8d8bef9SDimitry Andric return false; 1507e8d8bef9SDimitry Andric 1508e8d8bef9SDimitry Andric auto *SizesArray = cast<AllocaInst>(V); 1509e8d8bef9SDimitry Andric if (!OAs[2].initialize(*SizesArray, RuntimeCall)) 1510e8d8bef9SDimitry Andric return false; 1511e8d8bef9SDimitry Andric 1512e8d8bef9SDimitry Andric return true; 1513e8d8bef9SDimitry Andric } 1514e8d8bef9SDimitry Andric 1515e8d8bef9SDimitry Andric /// Prints the values in the OffloadArrays \p OAs using LLVM_DEBUG. 1516e8d8bef9SDimitry Andric /// For now this is a way to test that the function getValuesInOffloadArrays 1517e8d8bef9SDimitry Andric /// is working properly. 1518e8d8bef9SDimitry Andric /// TODO: Move this to a unittest when unittests are available for OpenMPOpt. 1519e8d8bef9SDimitry Andric void dumpValuesInOffloadArrays(ArrayRef<OffloadArray> OAs) { 1520e8d8bef9SDimitry Andric assert(OAs.size() == 3 && "There are three offload arrays to debug!"); 1521e8d8bef9SDimitry Andric 1522e8d8bef9SDimitry Andric LLVM_DEBUG(dbgs() << TAG << " Successfully got offload values:\n"); 1523e8d8bef9SDimitry Andric std::string ValuesStr; 1524e8d8bef9SDimitry Andric raw_string_ostream Printer(ValuesStr); 1525e8d8bef9SDimitry Andric std::string Separator = " --- "; 1526e8d8bef9SDimitry Andric 1527e8d8bef9SDimitry Andric for (auto *BP : OAs[0].StoredValues) { 1528e8d8bef9SDimitry Andric BP->print(Printer); 1529e8d8bef9SDimitry Andric Printer << Separator; 1530e8d8bef9SDimitry Andric } 1531e8d8bef9SDimitry Andric LLVM_DEBUG(dbgs() << "\t\toffload_baseptrs: " << Printer.str() << "\n"); 1532e8d8bef9SDimitry Andric ValuesStr.clear(); 1533e8d8bef9SDimitry Andric 1534e8d8bef9SDimitry Andric for (auto *P : OAs[1].StoredValues) { 1535e8d8bef9SDimitry Andric P->print(Printer); 1536e8d8bef9SDimitry Andric Printer << Separator; 1537e8d8bef9SDimitry Andric } 1538e8d8bef9SDimitry Andric LLVM_DEBUG(dbgs() << "\t\toffload_ptrs: " << Printer.str() << "\n"); 1539e8d8bef9SDimitry Andric ValuesStr.clear(); 1540e8d8bef9SDimitry Andric 1541e8d8bef9SDimitry Andric for (auto *S : OAs[2].StoredValues) { 1542e8d8bef9SDimitry Andric S->print(Printer); 1543e8d8bef9SDimitry Andric Printer << Separator; 1544e8d8bef9SDimitry Andric } 1545e8d8bef9SDimitry Andric LLVM_DEBUG(dbgs() << "\t\toffload_sizes: " << Printer.str() << "\n"); 1546e8d8bef9SDimitry Andric } 1547e8d8bef9SDimitry Andric 1548e8d8bef9SDimitry Andric /// Returns the instruction where the "wait" counterpart \p RuntimeCall can be 1549e8d8bef9SDimitry Andric /// moved. Returns nullptr if the movement is not possible, or not worth it. 1550e8d8bef9SDimitry Andric Instruction *canBeMovedDownwards(CallInst &RuntimeCall) { 1551e8d8bef9SDimitry Andric // FIXME: This traverses only the BasicBlock where RuntimeCall is. 1552e8d8bef9SDimitry Andric // Make it traverse the CFG. 1553e8d8bef9SDimitry Andric 1554e8d8bef9SDimitry Andric Instruction *CurrentI = &RuntimeCall; 1555e8d8bef9SDimitry Andric bool IsWorthIt = false; 1556e8d8bef9SDimitry Andric while ((CurrentI = CurrentI->getNextNode())) { 1557e8d8bef9SDimitry Andric 1558e8d8bef9SDimitry Andric // TODO: Once we detect the regions to be offloaded we should use the 1559e8d8bef9SDimitry Andric // alias analysis manager to check if CurrentI may modify one of 1560e8d8bef9SDimitry Andric // the offloaded regions. 1561e8d8bef9SDimitry Andric if (CurrentI->mayHaveSideEffects() || CurrentI->mayReadFromMemory()) { 1562e8d8bef9SDimitry Andric if (IsWorthIt) 1563e8d8bef9SDimitry Andric return CurrentI; 1564e8d8bef9SDimitry Andric 1565e8d8bef9SDimitry Andric return nullptr; 1566e8d8bef9SDimitry Andric } 1567e8d8bef9SDimitry Andric 1568e8d8bef9SDimitry Andric // FIXME: For now if we move it over anything without side effect 1569e8d8bef9SDimitry Andric // is worth it. 1570e8d8bef9SDimitry Andric IsWorthIt = true; 1571e8d8bef9SDimitry Andric } 1572e8d8bef9SDimitry Andric 1573e8d8bef9SDimitry Andric // Return end of BasicBlock. 1574e8d8bef9SDimitry Andric return RuntimeCall.getParent()->getTerminator(); 1575e8d8bef9SDimitry Andric } 1576e8d8bef9SDimitry Andric 1577e8d8bef9SDimitry Andric /// Splits \p RuntimeCall into its "issue" and "wait" counterparts. 1578e8d8bef9SDimitry Andric bool splitTargetDataBeginRTC(CallInst &RuntimeCall, 1579e8d8bef9SDimitry Andric Instruction &WaitMovementPoint) { 1580e8d8bef9SDimitry Andric // Create stack allocated handle (__tgt_async_info) at the beginning of the 1581e8d8bef9SDimitry Andric // function. Used for storing information of the async transfer, allowing to 1582e8d8bef9SDimitry Andric // wait on it later. 1583e8d8bef9SDimitry Andric auto &IRBuilder = OMPInfoCache.OMPBuilder; 1584bdd1243dSDimitry Andric Function *F = RuntimeCall.getCaller(); 1585bdd1243dSDimitry Andric BasicBlock &Entry = F->getEntryBlock(); 1586bdd1243dSDimitry Andric IRBuilder.Builder.SetInsertPoint(&Entry, 1587bdd1243dSDimitry Andric Entry.getFirstNonPHIOrDbgOrAlloca()); 1588bdd1243dSDimitry Andric Value *Handle = IRBuilder.Builder.CreateAlloca( 1589bdd1243dSDimitry Andric IRBuilder.AsyncInfo, /*ArraySize=*/nullptr, "handle"); 1590bdd1243dSDimitry Andric Handle = 1591bdd1243dSDimitry Andric IRBuilder.Builder.CreateAddrSpaceCast(Handle, IRBuilder.AsyncInfoPtr); 1592e8d8bef9SDimitry Andric 1593e8d8bef9SDimitry Andric // Add "issue" runtime call declaration: 1594e8d8bef9SDimitry Andric // declare %struct.tgt_async_info @__tgt_target_data_begin_issue(i64, i32, 1595e8d8bef9SDimitry Andric // i8**, i8**, i64*, i64*) 1596e8d8bef9SDimitry Andric FunctionCallee IssueDecl = IRBuilder.getOrCreateRuntimeFunction( 1597e8d8bef9SDimitry Andric M, OMPRTL___tgt_target_data_begin_mapper_issue); 1598e8d8bef9SDimitry Andric 1599e8d8bef9SDimitry Andric // Change RuntimeCall call site for its asynchronous version. 1600e8d8bef9SDimitry Andric SmallVector<Value *, 16> Args; 1601e8d8bef9SDimitry Andric for (auto &Arg : RuntimeCall.args()) 1602e8d8bef9SDimitry Andric Args.push_back(Arg.get()); 1603e8d8bef9SDimitry Andric Args.push_back(Handle); 1604e8d8bef9SDimitry Andric 1605e8d8bef9SDimitry Andric CallInst *IssueCallsite = 1606e8d8bef9SDimitry Andric CallInst::Create(IssueDecl, Args, /*NameStr=*/"", &RuntimeCall); 160704eeddc0SDimitry Andric OMPInfoCache.setCallingConvention(IssueDecl, IssueCallsite); 1608e8d8bef9SDimitry Andric RuntimeCall.eraseFromParent(); 1609e8d8bef9SDimitry Andric 1610e8d8bef9SDimitry Andric // Add "wait" runtime call declaration: 1611e8d8bef9SDimitry Andric // declare void @__tgt_target_data_begin_wait(i64, %struct.__tgt_async_info) 1612e8d8bef9SDimitry Andric FunctionCallee WaitDecl = IRBuilder.getOrCreateRuntimeFunction( 1613e8d8bef9SDimitry Andric M, OMPRTL___tgt_target_data_begin_mapper_wait); 1614e8d8bef9SDimitry Andric 1615e8d8bef9SDimitry Andric Value *WaitParams[2] = { 1616e8d8bef9SDimitry Andric IssueCallsite->getArgOperand( 1617e8d8bef9SDimitry Andric OffloadArray::DeviceIDArgNum), // device_id. 1618e8d8bef9SDimitry Andric Handle // handle to wait on. 1619e8d8bef9SDimitry Andric }; 162004eeddc0SDimitry Andric CallInst *WaitCallsite = CallInst::Create( 162104eeddc0SDimitry Andric WaitDecl, WaitParams, /*NameStr=*/"", &WaitMovementPoint); 162204eeddc0SDimitry Andric OMPInfoCache.setCallingConvention(WaitDecl, WaitCallsite); 1623e8d8bef9SDimitry Andric 1624e8d8bef9SDimitry Andric return true; 1625e8d8bef9SDimitry Andric } 1626e8d8bef9SDimitry Andric 16275ffd83dbSDimitry Andric static Value *combinedIdentStruct(Value *CurrentIdent, Value *NextIdent, 16285ffd83dbSDimitry Andric bool GlobalOnly, bool &SingleChoice) { 16295ffd83dbSDimitry Andric if (CurrentIdent == NextIdent) 16305ffd83dbSDimitry Andric return CurrentIdent; 16315ffd83dbSDimitry Andric 16325ffd83dbSDimitry Andric // TODO: Figure out how to actually combine multiple debug locations. For 16335ffd83dbSDimitry Andric // now we just keep an existing one if there is a single choice. 16345ffd83dbSDimitry Andric if (!GlobalOnly || isa<GlobalValue>(NextIdent)) { 16355ffd83dbSDimitry Andric SingleChoice = !CurrentIdent; 16365ffd83dbSDimitry Andric return NextIdent; 16375ffd83dbSDimitry Andric } 16385ffd83dbSDimitry Andric return nullptr; 16395ffd83dbSDimitry Andric } 16405ffd83dbSDimitry Andric 16415ffd83dbSDimitry Andric /// Return an `struct ident_t*` value that represents the ones used in the 16425ffd83dbSDimitry Andric /// calls of \p RFI inside of \p F. If \p GlobalOnly is true, we will not 16435ffd83dbSDimitry Andric /// return a local `struct ident_t*`. For now, if we cannot find a suitable 16445ffd83dbSDimitry Andric /// return value we create one from scratch. We also do not yet combine 16455ffd83dbSDimitry Andric /// information, e.g., the source locations, see combinedIdentStruct. 16465ffd83dbSDimitry Andric Value * 16475ffd83dbSDimitry Andric getCombinedIdentFromCallUsesIn(OMPInformationCache::RuntimeFunctionInfo &RFI, 16485ffd83dbSDimitry Andric Function &F, bool GlobalOnly) { 16495ffd83dbSDimitry Andric bool SingleChoice = true; 16505ffd83dbSDimitry Andric Value *Ident = nullptr; 16515ffd83dbSDimitry Andric auto CombineIdentStruct = [&](Use &U, Function &Caller) { 16525ffd83dbSDimitry Andric CallInst *CI = getCallIfRegularCall(U, &RFI); 16535ffd83dbSDimitry Andric if (!CI || &F != &Caller) 16545ffd83dbSDimitry Andric return false; 16555ffd83dbSDimitry Andric Ident = combinedIdentStruct(Ident, CI->getArgOperand(0), 16565ffd83dbSDimitry Andric /* GlobalOnly */ true, SingleChoice); 16575ffd83dbSDimitry Andric return false; 16585ffd83dbSDimitry Andric }; 16595ffd83dbSDimitry Andric RFI.foreachUse(SCC, CombineIdentStruct); 16605ffd83dbSDimitry Andric 16615ffd83dbSDimitry Andric if (!Ident || !SingleChoice) { 16625ffd83dbSDimitry Andric // The IRBuilder uses the insertion block to get to the module, this is 16635ffd83dbSDimitry Andric // unfortunate but we work around it for now. 16645ffd83dbSDimitry Andric if (!OMPInfoCache.OMPBuilder.getInsertionPoint().getBlock()) 16655ffd83dbSDimitry Andric OMPInfoCache.OMPBuilder.updateToLocation(OpenMPIRBuilder::InsertPointTy( 16665ffd83dbSDimitry Andric &F.getEntryBlock(), F.getEntryBlock().begin())); 16675ffd83dbSDimitry Andric // Create a fallback location if non was found. 16685ffd83dbSDimitry Andric // TODO: Use the debug locations of the calls instead. 166904eeddc0SDimitry Andric uint32_t SrcLocStrSize; 167004eeddc0SDimitry Andric Constant *Loc = 167104eeddc0SDimitry Andric OMPInfoCache.OMPBuilder.getOrCreateDefaultSrcLocStr(SrcLocStrSize); 167204eeddc0SDimitry Andric Ident = OMPInfoCache.OMPBuilder.getOrCreateIdent(Loc, SrcLocStrSize); 16735ffd83dbSDimitry Andric } 16745ffd83dbSDimitry Andric return Ident; 16755ffd83dbSDimitry Andric } 16765ffd83dbSDimitry Andric 16775ffd83dbSDimitry Andric /// Try to eliminate calls of \p RFI in \p F by reusing an existing one or 16785ffd83dbSDimitry Andric /// \p ReplVal if given. 16795ffd83dbSDimitry Andric bool deduplicateRuntimeCalls(Function &F, 16805ffd83dbSDimitry Andric OMPInformationCache::RuntimeFunctionInfo &RFI, 16815ffd83dbSDimitry Andric Value *ReplVal = nullptr) { 16825ffd83dbSDimitry Andric auto *UV = RFI.getUseVector(F); 16835ffd83dbSDimitry Andric if (!UV || UV->size() + (ReplVal != nullptr) < 2) 16845ffd83dbSDimitry Andric return false; 16855ffd83dbSDimitry Andric 16865ffd83dbSDimitry Andric LLVM_DEBUG( 16875ffd83dbSDimitry Andric dbgs() << TAG << "Deduplicate " << UV->size() << " uses of " << RFI.Name 16885ffd83dbSDimitry Andric << (ReplVal ? " with an existing value\n" : "\n") << "\n"); 16895ffd83dbSDimitry Andric 16905ffd83dbSDimitry Andric assert((!ReplVal || (isa<Argument>(ReplVal) && 16915ffd83dbSDimitry Andric cast<Argument>(ReplVal)->getParent() == &F)) && 16925ffd83dbSDimitry Andric "Unexpected replacement value!"); 16935ffd83dbSDimitry Andric 16945ffd83dbSDimitry Andric // TODO: Use dominance to find a good position instead. 16955ffd83dbSDimitry Andric auto CanBeMoved = [this](CallBase &CB) { 1696349cc55cSDimitry Andric unsigned NumArgs = CB.arg_size(); 16975ffd83dbSDimitry Andric if (NumArgs == 0) 16985ffd83dbSDimitry Andric return true; 16995ffd83dbSDimitry Andric if (CB.getArgOperand(0)->getType() != OMPInfoCache.OMPBuilder.IdentPtr) 17005ffd83dbSDimitry Andric return false; 1701349cc55cSDimitry Andric for (unsigned U = 1; U < NumArgs; ++U) 1702349cc55cSDimitry Andric if (isa<Instruction>(CB.getArgOperand(U))) 17035ffd83dbSDimitry Andric return false; 17045ffd83dbSDimitry Andric return true; 17055ffd83dbSDimitry Andric }; 17065ffd83dbSDimitry Andric 17075ffd83dbSDimitry Andric if (!ReplVal) { 17085ffd83dbSDimitry Andric for (Use *U : *UV) 17095ffd83dbSDimitry Andric if (CallInst *CI = getCallIfRegularCall(*U, &RFI)) { 17105ffd83dbSDimitry Andric if (!CanBeMoved(*CI)) 17115ffd83dbSDimitry Andric continue; 17125ffd83dbSDimitry Andric 1713fe6060f1SDimitry Andric // If the function is a kernel, dedup will move 1714fe6060f1SDimitry Andric // the runtime call right after the kernel init callsite. Otherwise, 1715fe6060f1SDimitry Andric // it will move it to the beginning of the caller function. 1716fe6060f1SDimitry Andric if (isKernel(F)) { 1717fe6060f1SDimitry Andric auto &KernelInitRFI = OMPInfoCache.RFIs[OMPRTL___kmpc_target_init]; 1718fe6060f1SDimitry Andric auto *KernelInitUV = KernelInitRFI.getUseVector(F); 17195ffd83dbSDimitry Andric 1720fe6060f1SDimitry Andric if (KernelInitUV->empty()) 1721fe6060f1SDimitry Andric continue; 1722fe6060f1SDimitry Andric 1723fe6060f1SDimitry Andric assert(KernelInitUV->size() == 1 && 1724fe6060f1SDimitry Andric "Expected a single __kmpc_target_init in kernel\n"); 1725fe6060f1SDimitry Andric 1726fe6060f1SDimitry Andric CallInst *KernelInitCI = 1727fe6060f1SDimitry Andric getCallIfRegularCall(*KernelInitUV->front(), &KernelInitRFI); 1728fe6060f1SDimitry Andric assert(KernelInitCI && 1729fe6060f1SDimitry Andric "Expected a call to __kmpc_target_init in kernel\n"); 1730fe6060f1SDimitry Andric 1731fe6060f1SDimitry Andric CI->moveAfter(KernelInitCI); 1732fe6060f1SDimitry Andric } else 17335ffd83dbSDimitry Andric CI->moveBefore(&*F.getEntryBlock().getFirstInsertionPt()); 17345ffd83dbSDimitry Andric ReplVal = CI; 17355ffd83dbSDimitry Andric break; 17365ffd83dbSDimitry Andric } 17375ffd83dbSDimitry Andric if (!ReplVal) 17385ffd83dbSDimitry Andric return false; 17395ffd83dbSDimitry Andric } 17405ffd83dbSDimitry Andric 17415ffd83dbSDimitry Andric // If we use a call as a replacement value we need to make sure the ident is 17425ffd83dbSDimitry Andric // valid at the new location. For now we just pick a global one, either 17435ffd83dbSDimitry Andric // existing and used by one of the calls, or created from scratch. 17445ffd83dbSDimitry Andric if (CallBase *CI = dyn_cast<CallBase>(ReplVal)) { 1745349cc55cSDimitry Andric if (!CI->arg_empty() && 17465ffd83dbSDimitry Andric CI->getArgOperand(0)->getType() == OMPInfoCache.OMPBuilder.IdentPtr) { 17475ffd83dbSDimitry Andric Value *Ident = getCombinedIdentFromCallUsesIn(RFI, F, 17485ffd83dbSDimitry Andric /* GlobalOnly */ true); 17495ffd83dbSDimitry Andric CI->setArgOperand(0, Ident); 17505ffd83dbSDimitry Andric } 17515ffd83dbSDimitry Andric } 17525ffd83dbSDimitry Andric 17535ffd83dbSDimitry Andric bool Changed = false; 17545ffd83dbSDimitry Andric auto ReplaceAndDeleteCB = [&](Use &U, Function &Caller) { 17555ffd83dbSDimitry Andric CallInst *CI = getCallIfRegularCall(U, &RFI); 17565ffd83dbSDimitry Andric if (!CI || CI == ReplVal || &F != &Caller) 17575ffd83dbSDimitry Andric return false; 17585ffd83dbSDimitry Andric assert(CI->getCaller() == &F && "Unexpected call!"); 17595ffd83dbSDimitry Andric 17605ffd83dbSDimitry Andric auto Remark = [&](OptimizationRemark OR) { 17615ffd83dbSDimitry Andric return OR << "OpenMP runtime call " 1762fe6060f1SDimitry Andric << ore::NV("OpenMPOptRuntime", RFI.Name) << " deduplicated."; 17635ffd83dbSDimitry Andric }; 1764fe6060f1SDimitry Andric if (CI->getDebugLoc()) 1765fe6060f1SDimitry Andric emitRemark<OptimizationRemark>(CI, "OMP170", Remark); 1766fe6060f1SDimitry Andric else 1767fe6060f1SDimitry Andric emitRemark<OptimizationRemark>(&F, "OMP170", Remark); 17685ffd83dbSDimitry Andric 17695ffd83dbSDimitry Andric CGUpdater.removeCallSite(*CI); 17705ffd83dbSDimitry Andric CI->replaceAllUsesWith(ReplVal); 17715ffd83dbSDimitry Andric CI->eraseFromParent(); 17725ffd83dbSDimitry Andric ++NumOpenMPRuntimeCallsDeduplicated; 17735ffd83dbSDimitry Andric Changed = true; 17745ffd83dbSDimitry Andric return true; 17755ffd83dbSDimitry Andric }; 17765ffd83dbSDimitry Andric RFI.foreachUse(SCC, ReplaceAndDeleteCB); 17775ffd83dbSDimitry Andric 17785ffd83dbSDimitry Andric return Changed; 17795ffd83dbSDimitry Andric } 17805ffd83dbSDimitry Andric 17815ffd83dbSDimitry Andric /// Collect arguments that represent the global thread id in \p GTIdArgs. 17825ffd83dbSDimitry Andric void collectGlobalThreadIdArguments(SmallSetVector<Value *, 16> >IdArgs) { 17835ffd83dbSDimitry Andric // TODO: Below we basically perform a fixpoint iteration with a pessimistic 17845ffd83dbSDimitry Andric // initialization. We could define an AbstractAttribute instead and 17855ffd83dbSDimitry Andric // run the Attributor here once it can be run as an SCC pass. 17865ffd83dbSDimitry Andric 17875ffd83dbSDimitry Andric // Helper to check the argument \p ArgNo at all call sites of \p F for 17885ffd83dbSDimitry Andric // a GTId. 17895ffd83dbSDimitry Andric auto CallArgOpIsGTId = [&](Function &F, unsigned ArgNo, CallInst &RefCI) { 17905ffd83dbSDimitry Andric if (!F.hasLocalLinkage()) 17915ffd83dbSDimitry Andric return false; 17925ffd83dbSDimitry Andric for (Use &U : F.uses()) { 17935ffd83dbSDimitry Andric if (CallInst *CI = getCallIfRegularCall(U)) { 17945ffd83dbSDimitry Andric Value *ArgOp = CI->getArgOperand(ArgNo); 17955ffd83dbSDimitry Andric if (CI == &RefCI || GTIdArgs.count(ArgOp) || 17965ffd83dbSDimitry Andric getCallIfRegularCall( 17975ffd83dbSDimitry Andric *ArgOp, &OMPInfoCache.RFIs[OMPRTL___kmpc_global_thread_num])) 17985ffd83dbSDimitry Andric continue; 17995ffd83dbSDimitry Andric } 18005ffd83dbSDimitry Andric return false; 18015ffd83dbSDimitry Andric } 18025ffd83dbSDimitry Andric return true; 18035ffd83dbSDimitry Andric }; 18045ffd83dbSDimitry Andric 18055ffd83dbSDimitry Andric // Helper to identify uses of a GTId as GTId arguments. 18065ffd83dbSDimitry Andric auto AddUserArgs = [&](Value >Id) { 18075ffd83dbSDimitry Andric for (Use &U : GTId.uses()) 18085ffd83dbSDimitry Andric if (CallInst *CI = dyn_cast<CallInst>(U.getUser())) 18095ffd83dbSDimitry Andric if (CI->isArgOperand(&U)) 18105ffd83dbSDimitry Andric if (Function *Callee = CI->getCalledFunction()) 18115ffd83dbSDimitry Andric if (CallArgOpIsGTId(*Callee, U.getOperandNo(), *CI)) 18125ffd83dbSDimitry Andric GTIdArgs.insert(Callee->getArg(U.getOperandNo())); 18135ffd83dbSDimitry Andric }; 18145ffd83dbSDimitry Andric 18155ffd83dbSDimitry Andric // The argument users of __kmpc_global_thread_num calls are GTIds. 18165ffd83dbSDimitry Andric OMPInformationCache::RuntimeFunctionInfo &GlobThreadNumRFI = 18175ffd83dbSDimitry Andric OMPInfoCache.RFIs[OMPRTL___kmpc_global_thread_num]; 18185ffd83dbSDimitry Andric 18195ffd83dbSDimitry Andric GlobThreadNumRFI.foreachUse(SCC, [&](Use &U, Function &F) { 18205ffd83dbSDimitry Andric if (CallInst *CI = getCallIfRegularCall(U, &GlobThreadNumRFI)) 18215ffd83dbSDimitry Andric AddUserArgs(*CI); 18225ffd83dbSDimitry Andric return false; 18235ffd83dbSDimitry Andric }); 18245ffd83dbSDimitry Andric 18255ffd83dbSDimitry Andric // Transitively search for more arguments by looking at the users of the 18265ffd83dbSDimitry Andric // ones we know already. During the search the GTIdArgs vector is extended 18275ffd83dbSDimitry Andric // so we cannot cache the size nor can we use a range based for. 1828349cc55cSDimitry Andric for (unsigned U = 0; U < GTIdArgs.size(); ++U) 1829349cc55cSDimitry Andric AddUserArgs(*GTIdArgs[U]); 18305ffd83dbSDimitry Andric } 18315ffd83dbSDimitry Andric 18325ffd83dbSDimitry Andric /// Kernel (=GPU) optimizations and utility functions 18335ffd83dbSDimitry Andric /// 18345ffd83dbSDimitry Andric ///{{ 18355ffd83dbSDimitry Andric 18365ffd83dbSDimitry Andric /// Check if \p F is a kernel, hence entry point for target offloading. 18375ffd83dbSDimitry Andric bool isKernel(Function &F) { return OMPInfoCache.Kernels.count(&F); } 18385ffd83dbSDimitry Andric 18395ffd83dbSDimitry Andric /// Cache to remember the unique kernel for a function. 1840bdd1243dSDimitry Andric DenseMap<Function *, std::optional<Kernel>> UniqueKernelMap; 18415ffd83dbSDimitry Andric 18425ffd83dbSDimitry Andric /// Find the unique kernel that will execute \p F, if any. 18435ffd83dbSDimitry Andric Kernel getUniqueKernelFor(Function &F); 18445ffd83dbSDimitry Andric 18455ffd83dbSDimitry Andric /// Find the unique kernel that will execute \p I, if any. 18465ffd83dbSDimitry Andric Kernel getUniqueKernelFor(Instruction &I) { 18475ffd83dbSDimitry Andric return getUniqueKernelFor(*I.getFunction()); 18485ffd83dbSDimitry Andric } 18495ffd83dbSDimitry Andric 18505ffd83dbSDimitry Andric /// Rewrite the device (=GPU) code state machine create in non-SPMD mode in 18515ffd83dbSDimitry Andric /// the cases we can avoid taking the address of a function. 18525ffd83dbSDimitry Andric bool rewriteDeviceCodeStateMachine(); 18535ffd83dbSDimitry Andric 18545ffd83dbSDimitry Andric /// 18555ffd83dbSDimitry Andric ///}} 18565ffd83dbSDimitry Andric 18575ffd83dbSDimitry Andric /// Emit a remark generically 18585ffd83dbSDimitry Andric /// 18595ffd83dbSDimitry Andric /// This template function can be used to generically emit a remark. The 18605ffd83dbSDimitry Andric /// RemarkKind should be one of the following: 18615ffd83dbSDimitry Andric /// - OptimizationRemark to indicate a successful optimization attempt 18625ffd83dbSDimitry Andric /// - OptimizationRemarkMissed to report a failed optimization attempt 18635ffd83dbSDimitry Andric /// - OptimizationRemarkAnalysis to provide additional information about an 18645ffd83dbSDimitry Andric /// optimization attempt 18655ffd83dbSDimitry Andric /// 18665ffd83dbSDimitry Andric /// The remark is built using a callback function provided by the caller that 18675ffd83dbSDimitry Andric /// takes a RemarkKind as input and returns a RemarkKind. 1868fe6060f1SDimitry Andric template <typename RemarkKind, typename RemarkCallBack> 1869fe6060f1SDimitry Andric void emitRemark(Instruction *I, StringRef RemarkName, 18705ffd83dbSDimitry Andric RemarkCallBack &&RemarkCB) const { 1871fe6060f1SDimitry Andric Function *F = I->getParent()->getParent(); 18725ffd83dbSDimitry Andric auto &ORE = OREGetter(F); 18735ffd83dbSDimitry Andric 1874fe6060f1SDimitry Andric if (RemarkName.startswith("OMP")) 18755ffd83dbSDimitry Andric ORE.emit([&]() { 1876fe6060f1SDimitry Andric return RemarkCB(RemarkKind(DEBUG_TYPE, RemarkName, I)) 1877fe6060f1SDimitry Andric << " [" << RemarkName << "]"; 18785ffd83dbSDimitry Andric }); 1879fe6060f1SDimitry Andric else 1880fe6060f1SDimitry Andric ORE.emit( 1881fe6060f1SDimitry Andric [&]() { return RemarkCB(RemarkKind(DEBUG_TYPE, RemarkName, I)); }); 18825ffd83dbSDimitry Andric } 18835ffd83dbSDimitry Andric 1884fe6060f1SDimitry Andric /// Emit a remark on a function. 1885fe6060f1SDimitry Andric template <typename RemarkKind, typename RemarkCallBack> 1886fe6060f1SDimitry Andric void emitRemark(Function *F, StringRef RemarkName, 1887fe6060f1SDimitry Andric RemarkCallBack &&RemarkCB) const { 1888fe6060f1SDimitry Andric auto &ORE = OREGetter(F); 1889fe6060f1SDimitry Andric 1890fe6060f1SDimitry Andric if (RemarkName.startswith("OMP")) 1891fe6060f1SDimitry Andric ORE.emit([&]() { 1892fe6060f1SDimitry Andric return RemarkCB(RemarkKind(DEBUG_TYPE, RemarkName, F)) 1893fe6060f1SDimitry Andric << " [" << RemarkName << "]"; 1894fe6060f1SDimitry Andric }); 1895fe6060f1SDimitry Andric else 1896fe6060f1SDimitry Andric ORE.emit( 1897fe6060f1SDimitry Andric [&]() { return RemarkCB(RemarkKind(DEBUG_TYPE, RemarkName, F)); }); 1898fe6060f1SDimitry Andric } 1899fe6060f1SDimitry Andric 19005ffd83dbSDimitry Andric /// The underlying module. 19015ffd83dbSDimitry Andric Module &M; 19025ffd83dbSDimitry Andric 19035ffd83dbSDimitry Andric /// The SCC we are operating on. 19045ffd83dbSDimitry Andric SmallVectorImpl<Function *> &SCC; 19055ffd83dbSDimitry Andric 19065ffd83dbSDimitry Andric /// Callback to update the call graph, the first argument is a removed call, 19075ffd83dbSDimitry Andric /// the second an optional replacement call. 19085ffd83dbSDimitry Andric CallGraphUpdater &CGUpdater; 19095ffd83dbSDimitry Andric 19105ffd83dbSDimitry Andric /// Callback to get an OptimizationRemarkEmitter from a Function * 19115ffd83dbSDimitry Andric OptimizationRemarkGetter OREGetter; 19125ffd83dbSDimitry Andric 19135ffd83dbSDimitry Andric /// OpenMP-specific information cache. Also Used for Attributor runs. 19145ffd83dbSDimitry Andric OMPInformationCache &OMPInfoCache; 19155ffd83dbSDimitry Andric 19165ffd83dbSDimitry Andric /// Attributor instance. 19175ffd83dbSDimitry Andric Attributor &A; 19185ffd83dbSDimitry Andric 19195ffd83dbSDimitry Andric /// Helper function to run Attributor on SCC. 1920fe6060f1SDimitry Andric bool runAttributor(bool IsModulePass) { 19215ffd83dbSDimitry Andric if (SCC.empty()) 19225ffd83dbSDimitry Andric return false; 19235ffd83dbSDimitry Andric 1924fe6060f1SDimitry Andric registerAAs(IsModulePass); 19255ffd83dbSDimitry Andric 19265ffd83dbSDimitry Andric ChangeStatus Changed = A.run(); 19275ffd83dbSDimitry Andric 19285ffd83dbSDimitry Andric LLVM_DEBUG(dbgs() << "[Attributor] Done with " << SCC.size() 19295ffd83dbSDimitry Andric << " functions, result: " << Changed << ".\n"); 19305ffd83dbSDimitry Andric 19315ffd83dbSDimitry Andric return Changed == ChangeStatus::CHANGED; 19325ffd83dbSDimitry Andric } 19335ffd83dbSDimitry Andric 1934fe6060f1SDimitry Andric void registerFoldRuntimeCall(RuntimeFunction RF); 1935fe6060f1SDimitry Andric 19365ffd83dbSDimitry Andric /// Populate the Attributor with abstract attribute opportunities in the 1937bdd1243dSDimitry Andric /// functions. 1938fe6060f1SDimitry Andric void registerAAs(bool IsModulePass); 1939bdd1243dSDimitry Andric 1940bdd1243dSDimitry Andric public: 1941bdd1243dSDimitry Andric /// Callback to register AAs for live functions, including internal functions 1942bdd1243dSDimitry Andric /// marked live during the traversal. 1943bdd1243dSDimitry Andric static void registerAAsForFunction(Attributor &A, const Function &F); 19445ffd83dbSDimitry Andric }; 19455ffd83dbSDimitry Andric 19465ffd83dbSDimitry Andric Kernel OpenMPOpt::getUniqueKernelFor(Function &F) { 1947bdd1243dSDimitry Andric if (!OMPInfoCache.ModuleSlice.empty() && !OMPInfoCache.ModuleSlice.count(&F)) 19485ffd83dbSDimitry Andric return nullptr; 19495ffd83dbSDimitry Andric 19505ffd83dbSDimitry Andric // Use a scope to keep the lifetime of the CachedKernel short. 19515ffd83dbSDimitry Andric { 1952bdd1243dSDimitry Andric std::optional<Kernel> &CachedKernel = UniqueKernelMap[&F]; 19535ffd83dbSDimitry Andric if (CachedKernel) 19545ffd83dbSDimitry Andric return *CachedKernel; 19555ffd83dbSDimitry Andric 19565ffd83dbSDimitry Andric // TODO: We should use an AA to create an (optimistic and callback 19575ffd83dbSDimitry Andric // call-aware) call graph. For now we stick to simple patterns that 19585ffd83dbSDimitry Andric // are less powerful, basically the worst fixpoint. 19595ffd83dbSDimitry Andric if (isKernel(F)) { 19605ffd83dbSDimitry Andric CachedKernel = Kernel(&F); 19615ffd83dbSDimitry Andric return *CachedKernel; 19625ffd83dbSDimitry Andric } 19635ffd83dbSDimitry Andric 19645ffd83dbSDimitry Andric CachedKernel = nullptr; 1965e8d8bef9SDimitry Andric if (!F.hasLocalLinkage()) { 1966e8d8bef9SDimitry Andric 1967e8d8bef9SDimitry Andric // See https://openmp.llvm.org/remarks/OptimizationRemarks.html 1968fe6060f1SDimitry Andric auto Remark = [&](OptimizationRemarkAnalysis ORA) { 1969fe6060f1SDimitry Andric return ORA << "Potentially unknown OpenMP target region caller."; 1970e8d8bef9SDimitry Andric }; 1971fe6060f1SDimitry Andric emitRemark<OptimizationRemarkAnalysis>(&F, "OMP100", Remark); 1972e8d8bef9SDimitry Andric 19735ffd83dbSDimitry Andric return nullptr; 19745ffd83dbSDimitry Andric } 1975e8d8bef9SDimitry Andric } 19765ffd83dbSDimitry Andric 19775ffd83dbSDimitry Andric auto GetUniqueKernelForUse = [&](const Use &U) -> Kernel { 19785ffd83dbSDimitry Andric if (auto *Cmp = dyn_cast<ICmpInst>(U.getUser())) { 19795ffd83dbSDimitry Andric // Allow use in equality comparisons. 19805ffd83dbSDimitry Andric if (Cmp->isEquality()) 19815ffd83dbSDimitry Andric return getUniqueKernelFor(*Cmp); 19825ffd83dbSDimitry Andric return nullptr; 19835ffd83dbSDimitry Andric } 19845ffd83dbSDimitry Andric if (auto *CB = dyn_cast<CallBase>(U.getUser())) { 19855ffd83dbSDimitry Andric // Allow direct calls. 19865ffd83dbSDimitry Andric if (CB->isCallee(&U)) 19875ffd83dbSDimitry Andric return getUniqueKernelFor(*CB); 1988fe6060f1SDimitry Andric 1989fe6060f1SDimitry Andric OMPInformationCache::RuntimeFunctionInfo &KernelParallelRFI = 1990fe6060f1SDimitry Andric OMPInfoCache.RFIs[OMPRTL___kmpc_parallel_51]; 1991fe6060f1SDimitry Andric // Allow the use in __kmpc_parallel_51 calls. 1992fe6060f1SDimitry Andric if (OpenMPOpt::getCallIfRegularCall(*U.getUser(), &KernelParallelRFI)) 19935ffd83dbSDimitry Andric return getUniqueKernelFor(*CB); 19945ffd83dbSDimitry Andric return nullptr; 19955ffd83dbSDimitry Andric } 19965ffd83dbSDimitry Andric // Disallow every other use. 19975ffd83dbSDimitry Andric return nullptr; 19985ffd83dbSDimitry Andric }; 19995ffd83dbSDimitry Andric 20005ffd83dbSDimitry Andric // TODO: In the future we want to track more than just a unique kernel. 20015ffd83dbSDimitry Andric SmallPtrSet<Kernel, 2> PotentialKernels; 2002e8d8bef9SDimitry Andric OMPInformationCache::foreachUse(F, [&](const Use &U) { 20035ffd83dbSDimitry Andric PotentialKernels.insert(GetUniqueKernelForUse(U)); 20045ffd83dbSDimitry Andric }); 20055ffd83dbSDimitry Andric 20065ffd83dbSDimitry Andric Kernel K = nullptr; 20075ffd83dbSDimitry Andric if (PotentialKernels.size() == 1) 20085ffd83dbSDimitry Andric K = *PotentialKernels.begin(); 20095ffd83dbSDimitry Andric 20105ffd83dbSDimitry Andric // Cache the result. 20115ffd83dbSDimitry Andric UniqueKernelMap[&F] = K; 20125ffd83dbSDimitry Andric 20135ffd83dbSDimitry Andric return K; 20145ffd83dbSDimitry Andric } 20155ffd83dbSDimitry Andric 20165ffd83dbSDimitry Andric bool OpenMPOpt::rewriteDeviceCodeStateMachine() { 2017fe6060f1SDimitry Andric OMPInformationCache::RuntimeFunctionInfo &KernelParallelRFI = 2018fe6060f1SDimitry Andric OMPInfoCache.RFIs[OMPRTL___kmpc_parallel_51]; 20195ffd83dbSDimitry Andric 20205ffd83dbSDimitry Andric bool Changed = false; 2021fe6060f1SDimitry Andric if (!KernelParallelRFI) 20225ffd83dbSDimitry Andric return Changed; 20235ffd83dbSDimitry Andric 2024349cc55cSDimitry Andric // If we have disabled state machine changes, exit 2025349cc55cSDimitry Andric if (DisableOpenMPOptStateMachineRewrite) 2026349cc55cSDimitry Andric return Changed; 2027349cc55cSDimitry Andric 20285ffd83dbSDimitry Andric for (Function *F : SCC) { 20295ffd83dbSDimitry Andric 2030fe6060f1SDimitry Andric // Check if the function is a use in a __kmpc_parallel_51 call at 20315ffd83dbSDimitry Andric // all. 20325ffd83dbSDimitry Andric bool UnknownUse = false; 2033fe6060f1SDimitry Andric bool KernelParallelUse = false; 20345ffd83dbSDimitry Andric unsigned NumDirectCalls = 0; 20355ffd83dbSDimitry Andric 20365ffd83dbSDimitry Andric SmallVector<Use *, 2> ToBeReplacedStateMachineUses; 2037e8d8bef9SDimitry Andric OMPInformationCache::foreachUse(*F, [&](Use &U) { 20385ffd83dbSDimitry Andric if (auto *CB = dyn_cast<CallBase>(U.getUser())) 20395ffd83dbSDimitry Andric if (CB->isCallee(&U)) { 20405ffd83dbSDimitry Andric ++NumDirectCalls; 20415ffd83dbSDimitry Andric return; 20425ffd83dbSDimitry Andric } 20435ffd83dbSDimitry Andric 20445ffd83dbSDimitry Andric if (isa<ICmpInst>(U.getUser())) { 20455ffd83dbSDimitry Andric ToBeReplacedStateMachineUses.push_back(&U); 20465ffd83dbSDimitry Andric return; 20475ffd83dbSDimitry Andric } 2048fe6060f1SDimitry Andric 2049fe6060f1SDimitry Andric // Find wrapper functions that represent parallel kernels. 2050fe6060f1SDimitry Andric CallInst *CI = 2051fe6060f1SDimitry Andric OpenMPOpt::getCallIfRegularCall(*U.getUser(), &KernelParallelRFI); 2052fe6060f1SDimitry Andric const unsigned int WrapperFunctionArgNo = 6; 2053fe6060f1SDimitry Andric if (!KernelParallelUse && CI && 2054fe6060f1SDimitry Andric CI->getArgOperandNo(&U) == WrapperFunctionArgNo) { 2055fe6060f1SDimitry Andric KernelParallelUse = true; 20565ffd83dbSDimitry Andric ToBeReplacedStateMachineUses.push_back(&U); 20575ffd83dbSDimitry Andric return; 20585ffd83dbSDimitry Andric } 20595ffd83dbSDimitry Andric UnknownUse = true; 20605ffd83dbSDimitry Andric }); 20615ffd83dbSDimitry Andric 2062fe6060f1SDimitry Andric // Do not emit a remark if we haven't seen a __kmpc_parallel_51 20635ffd83dbSDimitry Andric // use. 2064fe6060f1SDimitry Andric if (!KernelParallelUse) 20655ffd83dbSDimitry Andric continue; 20665ffd83dbSDimitry Andric 20675ffd83dbSDimitry Andric // If this ever hits, we should investigate. 20685ffd83dbSDimitry Andric // TODO: Checking the number of uses is not a necessary restriction and 20695ffd83dbSDimitry Andric // should be lifted. 20705ffd83dbSDimitry Andric if (UnknownUse || NumDirectCalls != 1 || 2071fe6060f1SDimitry Andric ToBeReplacedStateMachineUses.size() > 2) { 2072fe6060f1SDimitry Andric auto Remark = [&](OptimizationRemarkAnalysis ORA) { 2073fe6060f1SDimitry Andric return ORA << "Parallel region is used in " 20745ffd83dbSDimitry Andric << (UnknownUse ? "unknown" : "unexpected") 2075fe6060f1SDimitry Andric << " ways. Will not attempt to rewrite the state machine."; 20765ffd83dbSDimitry Andric }; 2077fe6060f1SDimitry Andric emitRemark<OptimizationRemarkAnalysis>(F, "OMP101", Remark); 20785ffd83dbSDimitry Andric continue; 20795ffd83dbSDimitry Andric } 20805ffd83dbSDimitry Andric 2081fe6060f1SDimitry Andric // Even if we have __kmpc_parallel_51 calls, we (for now) give 20825ffd83dbSDimitry Andric // up if the function is not called from a unique kernel. 20835ffd83dbSDimitry Andric Kernel K = getUniqueKernelFor(*F); 20845ffd83dbSDimitry Andric if (!K) { 2085fe6060f1SDimitry Andric auto Remark = [&](OptimizationRemarkAnalysis ORA) { 2086fe6060f1SDimitry Andric return ORA << "Parallel region is not called from a unique kernel. " 2087fe6060f1SDimitry Andric "Will not attempt to rewrite the state machine."; 20885ffd83dbSDimitry Andric }; 2089fe6060f1SDimitry Andric emitRemark<OptimizationRemarkAnalysis>(F, "OMP102", Remark); 20905ffd83dbSDimitry Andric continue; 20915ffd83dbSDimitry Andric } 20925ffd83dbSDimitry Andric 20935ffd83dbSDimitry Andric // We now know F is a parallel body function called only from the kernel K. 20945ffd83dbSDimitry Andric // We also identified the state machine uses in which we replace the 20955ffd83dbSDimitry Andric // function pointer by a new global symbol for identification purposes. This 20965ffd83dbSDimitry Andric // ensures only direct calls to the function are left. 20975ffd83dbSDimitry Andric 20985ffd83dbSDimitry Andric Module &M = *F->getParent(); 20995ffd83dbSDimitry Andric Type *Int8Ty = Type::getInt8Ty(M.getContext()); 21005ffd83dbSDimitry Andric 21015ffd83dbSDimitry Andric auto *ID = new GlobalVariable( 21025ffd83dbSDimitry Andric M, Int8Ty, /* isConstant */ true, GlobalValue::PrivateLinkage, 21035ffd83dbSDimitry Andric UndefValue::get(Int8Ty), F->getName() + ".ID"); 21045ffd83dbSDimitry Andric 21055ffd83dbSDimitry Andric for (Use *U : ToBeReplacedStateMachineUses) 21068c6f6c0cSDimitry Andric U->set(ConstantExpr::getPointerBitCastOrAddrSpaceCast( 21078c6f6c0cSDimitry Andric ID, U->get()->getType())); 21085ffd83dbSDimitry Andric 21095ffd83dbSDimitry Andric ++NumOpenMPParallelRegionsReplacedInGPUStateMachine; 21105ffd83dbSDimitry Andric 21115ffd83dbSDimitry Andric Changed = true; 21125ffd83dbSDimitry Andric } 21135ffd83dbSDimitry Andric 21145ffd83dbSDimitry Andric return Changed; 21155ffd83dbSDimitry Andric } 21165ffd83dbSDimitry Andric 21175ffd83dbSDimitry Andric /// Abstract Attribute for tracking ICV values. 21185ffd83dbSDimitry Andric struct AAICVTracker : public StateWrapper<BooleanState, AbstractAttribute> { 21195ffd83dbSDimitry Andric using Base = StateWrapper<BooleanState, AbstractAttribute>; 21205ffd83dbSDimitry Andric AAICVTracker(const IRPosition &IRP, Attributor &A) : Base(IRP) {} 21215ffd83dbSDimitry Andric 2122e8d8bef9SDimitry Andric void initialize(Attributor &A) override { 2123e8d8bef9SDimitry Andric Function *F = getAnchorScope(); 2124e8d8bef9SDimitry Andric if (!F || !A.isFunctionIPOAmendable(*F)) 2125e8d8bef9SDimitry Andric indicatePessimisticFixpoint(); 2126e8d8bef9SDimitry Andric } 2127e8d8bef9SDimitry Andric 21285ffd83dbSDimitry Andric /// Returns true if value is assumed to be tracked. 21295ffd83dbSDimitry Andric bool isAssumedTracked() const { return getAssumed(); } 21305ffd83dbSDimitry Andric 21315ffd83dbSDimitry Andric /// Returns true if value is known to be tracked. 21325ffd83dbSDimitry Andric bool isKnownTracked() const { return getAssumed(); } 21335ffd83dbSDimitry Andric 21345ffd83dbSDimitry Andric /// Create an abstract attribute biew for the position \p IRP. 21355ffd83dbSDimitry Andric static AAICVTracker &createForPosition(const IRPosition &IRP, Attributor &A); 21365ffd83dbSDimitry Andric 21375ffd83dbSDimitry Andric /// Return the value with which \p I can be replaced for specific \p ICV. 2138bdd1243dSDimitry Andric virtual std::optional<Value *> getReplacementValue(InternalControlVar ICV, 2139e8d8bef9SDimitry Andric const Instruction *I, 2140e8d8bef9SDimitry Andric Attributor &A) const { 2141bdd1243dSDimitry Andric return std::nullopt; 2142e8d8bef9SDimitry Andric } 2143e8d8bef9SDimitry Andric 2144e8d8bef9SDimitry Andric /// Return an assumed unique ICV value if a single candidate is found. If 2145bdd1243dSDimitry Andric /// there cannot be one, return a nullptr. If it is not clear yet, return 2146bdd1243dSDimitry Andric /// std::nullopt. 2147bdd1243dSDimitry Andric virtual std::optional<Value *> 2148e8d8bef9SDimitry Andric getUniqueReplacementValue(InternalControlVar ICV) const = 0; 2149e8d8bef9SDimitry Andric 2150e8d8bef9SDimitry Andric // Currently only nthreads is being tracked. 2151e8d8bef9SDimitry Andric // this array will only grow with time. 2152e8d8bef9SDimitry Andric InternalControlVar TrackableICVs[1] = {ICV_nthreads}; 21535ffd83dbSDimitry Andric 21545ffd83dbSDimitry Andric /// See AbstractAttribute::getName() 21555ffd83dbSDimitry Andric const std::string getName() const override { return "AAICVTracker"; } 21565ffd83dbSDimitry Andric 21575ffd83dbSDimitry Andric /// See AbstractAttribute::getIdAddr() 21585ffd83dbSDimitry Andric const char *getIdAddr() const override { return &ID; } 21595ffd83dbSDimitry Andric 21605ffd83dbSDimitry Andric /// This function should return true if the type of the \p AA is AAICVTracker 21615ffd83dbSDimitry Andric static bool classof(const AbstractAttribute *AA) { 21625ffd83dbSDimitry Andric return (AA->getIdAddr() == &ID); 21635ffd83dbSDimitry Andric } 21645ffd83dbSDimitry Andric 21655ffd83dbSDimitry Andric static const char ID; 21665ffd83dbSDimitry Andric }; 21675ffd83dbSDimitry Andric 21685ffd83dbSDimitry Andric struct AAICVTrackerFunction : public AAICVTracker { 21695ffd83dbSDimitry Andric AAICVTrackerFunction(const IRPosition &IRP, Attributor &A) 21705ffd83dbSDimitry Andric : AAICVTracker(IRP, A) {} 21715ffd83dbSDimitry Andric 21725ffd83dbSDimitry Andric // FIXME: come up with better string. 2173e8d8bef9SDimitry Andric const std::string getAsStr() const override { return "ICVTrackerFunction"; } 21745ffd83dbSDimitry Andric 21755ffd83dbSDimitry Andric // FIXME: come up with some stats. 21765ffd83dbSDimitry Andric void trackStatistics() const override {} 21775ffd83dbSDimitry Andric 2178e8d8bef9SDimitry Andric /// We don't manifest anything for this AA. 21795ffd83dbSDimitry Andric ChangeStatus manifest(Attributor &A) override { 2180e8d8bef9SDimitry Andric return ChangeStatus::UNCHANGED; 21815ffd83dbSDimitry Andric } 21825ffd83dbSDimitry Andric 21835ffd83dbSDimitry Andric // Map of ICV to their values at specific program point. 2184e8d8bef9SDimitry Andric EnumeratedArray<DenseMap<Instruction *, Value *>, InternalControlVar, 21855ffd83dbSDimitry Andric InternalControlVar::ICV___last> 2186e8d8bef9SDimitry Andric ICVReplacementValuesMap; 21875ffd83dbSDimitry Andric 21885ffd83dbSDimitry Andric ChangeStatus updateImpl(Attributor &A) override { 21895ffd83dbSDimitry Andric ChangeStatus HasChanged = ChangeStatus::UNCHANGED; 21905ffd83dbSDimitry Andric 21915ffd83dbSDimitry Andric Function *F = getAnchorScope(); 21925ffd83dbSDimitry Andric 21935ffd83dbSDimitry Andric auto &OMPInfoCache = static_cast<OMPInformationCache &>(A.getInfoCache()); 21945ffd83dbSDimitry Andric 21955ffd83dbSDimitry Andric for (InternalControlVar ICV : TrackableICVs) { 21965ffd83dbSDimitry Andric auto &SetterRFI = OMPInfoCache.RFIs[OMPInfoCache.ICVs[ICV].Setter]; 21975ffd83dbSDimitry Andric 2198e8d8bef9SDimitry Andric auto &ValuesMap = ICVReplacementValuesMap[ICV]; 21995ffd83dbSDimitry Andric auto TrackValues = [&](Use &U, Function &) { 22005ffd83dbSDimitry Andric CallInst *CI = OpenMPOpt::getCallIfRegularCall(U); 22015ffd83dbSDimitry Andric if (!CI) 22025ffd83dbSDimitry Andric return false; 22035ffd83dbSDimitry Andric 22045ffd83dbSDimitry Andric // FIXME: handle setters with more that 1 arguments. 22055ffd83dbSDimitry Andric /// Track new value. 2206e8d8bef9SDimitry Andric if (ValuesMap.insert(std::make_pair(CI, CI->getArgOperand(0))).second) 22075ffd83dbSDimitry Andric HasChanged = ChangeStatus::CHANGED; 22085ffd83dbSDimitry Andric 22095ffd83dbSDimitry Andric return false; 22105ffd83dbSDimitry Andric }; 22115ffd83dbSDimitry Andric 2212e8d8bef9SDimitry Andric auto CallCheck = [&](Instruction &I) { 2213bdd1243dSDimitry Andric std::optional<Value *> ReplVal = getValueForCall(A, I, ICV); 221481ad6265SDimitry Andric if (ReplVal && ValuesMap.insert(std::make_pair(&I, *ReplVal)).second) 2215e8d8bef9SDimitry Andric HasChanged = ChangeStatus::CHANGED; 2216e8d8bef9SDimitry Andric 2217e8d8bef9SDimitry Andric return true; 2218e8d8bef9SDimitry Andric }; 2219e8d8bef9SDimitry Andric 2220e8d8bef9SDimitry Andric // Track all changes of an ICV. 22215ffd83dbSDimitry Andric SetterRFI.foreachUse(TrackValues, F); 2222e8d8bef9SDimitry Andric 2223fe6060f1SDimitry Andric bool UsedAssumedInformation = false; 2224e8d8bef9SDimitry Andric A.checkForAllInstructions(CallCheck, *this, {Instruction::Call}, 2225fe6060f1SDimitry Andric UsedAssumedInformation, 2226e8d8bef9SDimitry Andric /* CheckBBLivenessOnly */ true); 2227e8d8bef9SDimitry Andric 2228e8d8bef9SDimitry Andric /// TODO: Figure out a way to avoid adding entry in 2229e8d8bef9SDimitry Andric /// ICVReplacementValuesMap 2230e8d8bef9SDimitry Andric Instruction *Entry = &F->getEntryBlock().front(); 2231e8d8bef9SDimitry Andric if (HasChanged == ChangeStatus::CHANGED && !ValuesMap.count(Entry)) 2232e8d8bef9SDimitry Andric ValuesMap.insert(std::make_pair(Entry, nullptr)); 22335ffd83dbSDimitry Andric } 22345ffd83dbSDimitry Andric 22355ffd83dbSDimitry Andric return HasChanged; 22365ffd83dbSDimitry Andric } 22375ffd83dbSDimitry Andric 223804eeddc0SDimitry Andric /// Helper to check if \p I is a call and get the value for it if it is 2239e8d8bef9SDimitry Andric /// unique. 2240bdd1243dSDimitry Andric std::optional<Value *> getValueForCall(Attributor &A, const Instruction &I, 2241e8d8bef9SDimitry Andric InternalControlVar &ICV) const { 22425ffd83dbSDimitry Andric 224304eeddc0SDimitry Andric const auto *CB = dyn_cast<CallBase>(&I); 2244e8d8bef9SDimitry Andric if (!CB || CB->hasFnAttr("no_openmp") || 2245e8d8bef9SDimitry Andric CB->hasFnAttr("no_openmp_routines")) 2246bdd1243dSDimitry Andric return std::nullopt; 2247e8d8bef9SDimitry Andric 22485ffd83dbSDimitry Andric auto &OMPInfoCache = static_cast<OMPInformationCache &>(A.getInfoCache()); 22495ffd83dbSDimitry Andric auto &GetterRFI = OMPInfoCache.RFIs[OMPInfoCache.ICVs[ICV].Getter]; 2250e8d8bef9SDimitry Andric auto &SetterRFI = OMPInfoCache.RFIs[OMPInfoCache.ICVs[ICV].Setter]; 2251e8d8bef9SDimitry Andric Function *CalledFunction = CB->getCalledFunction(); 22525ffd83dbSDimitry Andric 2253e8d8bef9SDimitry Andric // Indirect call, assume ICV changes. 2254e8d8bef9SDimitry Andric if (CalledFunction == nullptr) 2255e8d8bef9SDimitry Andric return nullptr; 2256e8d8bef9SDimitry Andric if (CalledFunction == GetterRFI.Declaration) 2257bdd1243dSDimitry Andric return std::nullopt; 2258e8d8bef9SDimitry Andric if (CalledFunction == SetterRFI.Declaration) { 225904eeddc0SDimitry Andric if (ICVReplacementValuesMap[ICV].count(&I)) 226004eeddc0SDimitry Andric return ICVReplacementValuesMap[ICV].lookup(&I); 2261e8d8bef9SDimitry Andric 2262e8d8bef9SDimitry Andric return nullptr; 2263e8d8bef9SDimitry Andric } 2264e8d8bef9SDimitry Andric 2265e8d8bef9SDimitry Andric // Since we don't know, assume it changes the ICV. 2266e8d8bef9SDimitry Andric if (CalledFunction->isDeclaration()) 2267e8d8bef9SDimitry Andric return nullptr; 2268e8d8bef9SDimitry Andric 2269fe6060f1SDimitry Andric const auto &ICVTrackingAA = A.getAAFor<AAICVTracker>( 2270fe6060f1SDimitry Andric *this, IRPosition::callsite_returned(*CB), DepClassTy::REQUIRED); 2271e8d8bef9SDimitry Andric 227204eeddc0SDimitry Andric if (ICVTrackingAA.isAssumedTracked()) { 2273bdd1243dSDimitry Andric std::optional<Value *> URV = ICVTrackingAA.getUniqueReplacementValue(ICV); 227481ad6265SDimitry Andric if (!URV || (*URV && AA::isValidAtPosition(AA::ValueAndContext(**URV, I), 227581ad6265SDimitry Andric OMPInfoCache))) 227604eeddc0SDimitry Andric return URV; 227704eeddc0SDimitry Andric } 2278e8d8bef9SDimitry Andric 2279e8d8bef9SDimitry Andric // If we don't know, assume it changes. 2280e8d8bef9SDimitry Andric return nullptr; 2281e8d8bef9SDimitry Andric } 2282e8d8bef9SDimitry Andric 2283bdd1243dSDimitry Andric // We don't check unique value for a function, so return std::nullopt. 2284bdd1243dSDimitry Andric std::optional<Value *> 2285e8d8bef9SDimitry Andric getUniqueReplacementValue(InternalControlVar ICV) const override { 2286bdd1243dSDimitry Andric return std::nullopt; 2287e8d8bef9SDimitry Andric } 2288e8d8bef9SDimitry Andric 2289e8d8bef9SDimitry Andric /// Return the value with which \p I can be replaced for specific \p ICV. 2290bdd1243dSDimitry Andric std::optional<Value *> getReplacementValue(InternalControlVar ICV, 2291e8d8bef9SDimitry Andric const Instruction *I, 2292e8d8bef9SDimitry Andric Attributor &A) const override { 2293e8d8bef9SDimitry Andric const auto &ValuesMap = ICVReplacementValuesMap[ICV]; 2294e8d8bef9SDimitry Andric if (ValuesMap.count(I)) 2295e8d8bef9SDimitry Andric return ValuesMap.lookup(I); 2296e8d8bef9SDimitry Andric 2297e8d8bef9SDimitry Andric SmallVector<const Instruction *, 16> Worklist; 2298e8d8bef9SDimitry Andric SmallPtrSet<const Instruction *, 16> Visited; 2299e8d8bef9SDimitry Andric Worklist.push_back(I); 2300e8d8bef9SDimitry Andric 2301bdd1243dSDimitry Andric std::optional<Value *> ReplVal; 2302e8d8bef9SDimitry Andric 2303e8d8bef9SDimitry Andric while (!Worklist.empty()) { 2304e8d8bef9SDimitry Andric const Instruction *CurrInst = Worklist.pop_back_val(); 2305e8d8bef9SDimitry Andric if (!Visited.insert(CurrInst).second) 23065ffd83dbSDimitry Andric continue; 23075ffd83dbSDimitry Andric 2308e8d8bef9SDimitry Andric const BasicBlock *CurrBB = CurrInst->getParent(); 2309e8d8bef9SDimitry Andric 2310e8d8bef9SDimitry Andric // Go up and look for all potential setters/calls that might change the 2311e8d8bef9SDimitry Andric // ICV. 2312e8d8bef9SDimitry Andric while ((CurrInst = CurrInst->getPrevNode())) { 2313e8d8bef9SDimitry Andric if (ValuesMap.count(CurrInst)) { 2314bdd1243dSDimitry Andric std::optional<Value *> NewReplVal = ValuesMap.lookup(CurrInst); 2315e8d8bef9SDimitry Andric // Unknown value, track new. 231681ad6265SDimitry Andric if (!ReplVal) { 2317e8d8bef9SDimitry Andric ReplVal = NewReplVal; 2318e8d8bef9SDimitry Andric break; 2319e8d8bef9SDimitry Andric } 2320e8d8bef9SDimitry Andric 2321e8d8bef9SDimitry Andric // If we found a new value, we can't know the icv value anymore. 232281ad6265SDimitry Andric if (NewReplVal) 2323e8d8bef9SDimitry Andric if (ReplVal != NewReplVal) 23245ffd83dbSDimitry Andric return nullptr; 23255ffd83dbSDimitry Andric 2326e8d8bef9SDimitry Andric break; 23275ffd83dbSDimitry Andric } 23285ffd83dbSDimitry Andric 2329bdd1243dSDimitry Andric std::optional<Value *> NewReplVal = getValueForCall(A, *CurrInst, ICV); 233081ad6265SDimitry Andric if (!NewReplVal) 2331e8d8bef9SDimitry Andric continue; 2332e8d8bef9SDimitry Andric 2333e8d8bef9SDimitry Andric // Unknown value, track new. 233481ad6265SDimitry Andric if (!ReplVal) { 2335e8d8bef9SDimitry Andric ReplVal = NewReplVal; 2336e8d8bef9SDimitry Andric break; 23375ffd83dbSDimitry Andric } 23385ffd83dbSDimitry Andric 2339e8d8bef9SDimitry Andric // if (NewReplVal.hasValue()) 2340e8d8bef9SDimitry Andric // We found a new value, we can't know the icv value anymore. 2341e8d8bef9SDimitry Andric if (ReplVal != NewReplVal) 23425ffd83dbSDimitry Andric return nullptr; 23435ffd83dbSDimitry Andric } 2344e8d8bef9SDimitry Andric 2345e8d8bef9SDimitry Andric // If we are in the same BB and we have a value, we are done. 234681ad6265SDimitry Andric if (CurrBB == I->getParent() && ReplVal) 2347e8d8bef9SDimitry Andric return ReplVal; 2348e8d8bef9SDimitry Andric 2349e8d8bef9SDimitry Andric // Go through all predecessors and add terminators for analysis. 2350e8d8bef9SDimitry Andric for (const BasicBlock *Pred : predecessors(CurrBB)) 2351e8d8bef9SDimitry Andric if (const Instruction *Terminator = Pred->getTerminator()) 2352e8d8bef9SDimitry Andric Worklist.push_back(Terminator); 2353e8d8bef9SDimitry Andric } 2354e8d8bef9SDimitry Andric 2355e8d8bef9SDimitry Andric return ReplVal; 2356e8d8bef9SDimitry Andric } 2357e8d8bef9SDimitry Andric }; 2358e8d8bef9SDimitry Andric 2359e8d8bef9SDimitry Andric struct AAICVTrackerFunctionReturned : AAICVTracker { 2360e8d8bef9SDimitry Andric AAICVTrackerFunctionReturned(const IRPosition &IRP, Attributor &A) 2361e8d8bef9SDimitry Andric : AAICVTracker(IRP, A) {} 2362e8d8bef9SDimitry Andric 2363e8d8bef9SDimitry Andric // FIXME: come up with better string. 2364e8d8bef9SDimitry Andric const std::string getAsStr() const override { 2365e8d8bef9SDimitry Andric return "ICVTrackerFunctionReturned"; 2366e8d8bef9SDimitry Andric } 2367e8d8bef9SDimitry Andric 2368e8d8bef9SDimitry Andric // FIXME: come up with some stats. 2369e8d8bef9SDimitry Andric void trackStatistics() const override {} 2370e8d8bef9SDimitry Andric 2371e8d8bef9SDimitry Andric /// We don't manifest anything for this AA. 2372e8d8bef9SDimitry Andric ChangeStatus manifest(Attributor &A) override { 2373e8d8bef9SDimitry Andric return ChangeStatus::UNCHANGED; 2374e8d8bef9SDimitry Andric } 2375e8d8bef9SDimitry Andric 2376e8d8bef9SDimitry Andric // Map of ICV to their values at specific program point. 2377bdd1243dSDimitry Andric EnumeratedArray<std::optional<Value *>, InternalControlVar, 2378e8d8bef9SDimitry Andric InternalControlVar::ICV___last> 2379e8d8bef9SDimitry Andric ICVReplacementValuesMap; 2380e8d8bef9SDimitry Andric 2381e8d8bef9SDimitry Andric /// Return the value with which \p I can be replaced for specific \p ICV. 2382bdd1243dSDimitry Andric std::optional<Value *> 2383e8d8bef9SDimitry Andric getUniqueReplacementValue(InternalControlVar ICV) const override { 2384e8d8bef9SDimitry Andric return ICVReplacementValuesMap[ICV]; 2385e8d8bef9SDimitry Andric } 2386e8d8bef9SDimitry Andric 2387e8d8bef9SDimitry Andric ChangeStatus updateImpl(Attributor &A) override { 2388e8d8bef9SDimitry Andric ChangeStatus Changed = ChangeStatus::UNCHANGED; 2389e8d8bef9SDimitry Andric const auto &ICVTrackingAA = A.getAAFor<AAICVTracker>( 2390fe6060f1SDimitry Andric *this, IRPosition::function(*getAnchorScope()), DepClassTy::REQUIRED); 2391e8d8bef9SDimitry Andric 2392e8d8bef9SDimitry Andric if (!ICVTrackingAA.isAssumedTracked()) 2393e8d8bef9SDimitry Andric return indicatePessimisticFixpoint(); 2394e8d8bef9SDimitry Andric 2395e8d8bef9SDimitry Andric for (InternalControlVar ICV : TrackableICVs) { 2396bdd1243dSDimitry Andric std::optional<Value *> &ReplVal = ICVReplacementValuesMap[ICV]; 2397bdd1243dSDimitry Andric std::optional<Value *> UniqueICVValue; 2398e8d8bef9SDimitry Andric 2399e8d8bef9SDimitry Andric auto CheckReturnInst = [&](Instruction &I) { 2400bdd1243dSDimitry Andric std::optional<Value *> NewReplVal = 2401e8d8bef9SDimitry Andric ICVTrackingAA.getReplacementValue(ICV, &I, A); 2402e8d8bef9SDimitry Andric 2403e8d8bef9SDimitry Andric // If we found a second ICV value there is no unique returned value. 240481ad6265SDimitry Andric if (UniqueICVValue && UniqueICVValue != NewReplVal) 2405e8d8bef9SDimitry Andric return false; 2406e8d8bef9SDimitry Andric 2407e8d8bef9SDimitry Andric UniqueICVValue = NewReplVal; 2408e8d8bef9SDimitry Andric 2409e8d8bef9SDimitry Andric return true; 2410e8d8bef9SDimitry Andric }; 2411e8d8bef9SDimitry Andric 2412fe6060f1SDimitry Andric bool UsedAssumedInformation = false; 2413e8d8bef9SDimitry Andric if (!A.checkForAllInstructions(CheckReturnInst, *this, {Instruction::Ret}, 2414fe6060f1SDimitry Andric UsedAssumedInformation, 2415e8d8bef9SDimitry Andric /* CheckBBLivenessOnly */ true)) 2416e8d8bef9SDimitry Andric UniqueICVValue = nullptr; 2417e8d8bef9SDimitry Andric 2418e8d8bef9SDimitry Andric if (UniqueICVValue == ReplVal) 2419e8d8bef9SDimitry Andric continue; 2420e8d8bef9SDimitry Andric 2421e8d8bef9SDimitry Andric ReplVal = UniqueICVValue; 2422e8d8bef9SDimitry Andric Changed = ChangeStatus::CHANGED; 2423e8d8bef9SDimitry Andric } 2424e8d8bef9SDimitry Andric 2425e8d8bef9SDimitry Andric return Changed; 2426e8d8bef9SDimitry Andric } 2427e8d8bef9SDimitry Andric }; 2428e8d8bef9SDimitry Andric 2429e8d8bef9SDimitry Andric struct AAICVTrackerCallSite : AAICVTracker { 2430e8d8bef9SDimitry Andric AAICVTrackerCallSite(const IRPosition &IRP, Attributor &A) 2431e8d8bef9SDimitry Andric : AAICVTracker(IRP, A) {} 2432e8d8bef9SDimitry Andric 2433e8d8bef9SDimitry Andric void initialize(Attributor &A) override { 2434e8d8bef9SDimitry Andric Function *F = getAnchorScope(); 2435e8d8bef9SDimitry Andric if (!F || !A.isFunctionIPOAmendable(*F)) 2436e8d8bef9SDimitry Andric indicatePessimisticFixpoint(); 2437e8d8bef9SDimitry Andric 2438e8d8bef9SDimitry Andric // We only initialize this AA for getters, so we need to know which ICV it 2439e8d8bef9SDimitry Andric // gets. 2440e8d8bef9SDimitry Andric auto &OMPInfoCache = static_cast<OMPInformationCache &>(A.getInfoCache()); 2441e8d8bef9SDimitry Andric for (InternalControlVar ICV : TrackableICVs) { 2442e8d8bef9SDimitry Andric auto ICVInfo = OMPInfoCache.ICVs[ICV]; 2443e8d8bef9SDimitry Andric auto &Getter = OMPInfoCache.RFIs[ICVInfo.Getter]; 2444e8d8bef9SDimitry Andric if (Getter.Declaration == getAssociatedFunction()) { 2445e8d8bef9SDimitry Andric AssociatedICV = ICVInfo.Kind; 2446e8d8bef9SDimitry Andric return; 2447e8d8bef9SDimitry Andric } 2448e8d8bef9SDimitry Andric } 2449e8d8bef9SDimitry Andric 2450e8d8bef9SDimitry Andric /// Unknown ICV. 2451e8d8bef9SDimitry Andric indicatePessimisticFixpoint(); 2452e8d8bef9SDimitry Andric } 2453e8d8bef9SDimitry Andric 2454e8d8bef9SDimitry Andric ChangeStatus manifest(Attributor &A) override { 245581ad6265SDimitry Andric if (!ReplVal || !*ReplVal) 2456e8d8bef9SDimitry Andric return ChangeStatus::UNCHANGED; 2457e8d8bef9SDimitry Andric 245881ad6265SDimitry Andric A.changeAfterManifest(IRPosition::inst(*getCtxI()), **ReplVal); 2459e8d8bef9SDimitry Andric A.deleteAfterManifest(*getCtxI()); 2460e8d8bef9SDimitry Andric 2461e8d8bef9SDimitry Andric return ChangeStatus::CHANGED; 2462e8d8bef9SDimitry Andric } 2463e8d8bef9SDimitry Andric 2464e8d8bef9SDimitry Andric // FIXME: come up with better string. 2465e8d8bef9SDimitry Andric const std::string getAsStr() const override { return "ICVTrackerCallSite"; } 2466e8d8bef9SDimitry Andric 2467e8d8bef9SDimitry Andric // FIXME: come up with some stats. 2468e8d8bef9SDimitry Andric void trackStatistics() const override {} 2469e8d8bef9SDimitry Andric 2470e8d8bef9SDimitry Andric InternalControlVar AssociatedICV; 2471bdd1243dSDimitry Andric std::optional<Value *> ReplVal; 2472e8d8bef9SDimitry Andric 2473e8d8bef9SDimitry Andric ChangeStatus updateImpl(Attributor &A) override { 2474e8d8bef9SDimitry Andric const auto &ICVTrackingAA = A.getAAFor<AAICVTracker>( 2475fe6060f1SDimitry Andric *this, IRPosition::function(*getAnchorScope()), DepClassTy::REQUIRED); 2476e8d8bef9SDimitry Andric 2477e8d8bef9SDimitry Andric // We don't have any information, so we assume it changes the ICV. 2478e8d8bef9SDimitry Andric if (!ICVTrackingAA.isAssumedTracked()) 2479e8d8bef9SDimitry Andric return indicatePessimisticFixpoint(); 2480e8d8bef9SDimitry Andric 2481bdd1243dSDimitry Andric std::optional<Value *> NewReplVal = 2482e8d8bef9SDimitry Andric ICVTrackingAA.getReplacementValue(AssociatedICV, getCtxI(), A); 2483e8d8bef9SDimitry Andric 2484e8d8bef9SDimitry Andric if (ReplVal == NewReplVal) 2485e8d8bef9SDimitry Andric return ChangeStatus::UNCHANGED; 2486e8d8bef9SDimitry Andric 2487e8d8bef9SDimitry Andric ReplVal = NewReplVal; 2488e8d8bef9SDimitry Andric return ChangeStatus::CHANGED; 2489e8d8bef9SDimitry Andric } 2490e8d8bef9SDimitry Andric 2491e8d8bef9SDimitry Andric // Return the value with which associated value can be replaced for specific 2492e8d8bef9SDimitry Andric // \p ICV. 2493bdd1243dSDimitry Andric std::optional<Value *> 2494e8d8bef9SDimitry Andric getUniqueReplacementValue(InternalControlVar ICV) const override { 2495e8d8bef9SDimitry Andric return ReplVal; 2496e8d8bef9SDimitry Andric } 2497e8d8bef9SDimitry Andric }; 2498e8d8bef9SDimitry Andric 2499e8d8bef9SDimitry Andric struct AAICVTrackerCallSiteReturned : AAICVTracker { 2500e8d8bef9SDimitry Andric AAICVTrackerCallSiteReturned(const IRPosition &IRP, Attributor &A) 2501e8d8bef9SDimitry Andric : AAICVTracker(IRP, A) {} 2502e8d8bef9SDimitry Andric 2503e8d8bef9SDimitry Andric // FIXME: come up with better string. 2504e8d8bef9SDimitry Andric const std::string getAsStr() const override { 2505e8d8bef9SDimitry Andric return "ICVTrackerCallSiteReturned"; 2506e8d8bef9SDimitry Andric } 2507e8d8bef9SDimitry Andric 2508e8d8bef9SDimitry Andric // FIXME: come up with some stats. 2509e8d8bef9SDimitry Andric void trackStatistics() const override {} 2510e8d8bef9SDimitry Andric 2511e8d8bef9SDimitry Andric /// We don't manifest anything for this AA. 2512e8d8bef9SDimitry Andric ChangeStatus manifest(Attributor &A) override { 2513e8d8bef9SDimitry Andric return ChangeStatus::UNCHANGED; 2514e8d8bef9SDimitry Andric } 2515e8d8bef9SDimitry Andric 2516e8d8bef9SDimitry Andric // Map of ICV to their values at specific program point. 2517bdd1243dSDimitry Andric EnumeratedArray<std::optional<Value *>, InternalControlVar, 2518e8d8bef9SDimitry Andric InternalControlVar::ICV___last> 2519e8d8bef9SDimitry Andric ICVReplacementValuesMap; 2520e8d8bef9SDimitry Andric 2521e8d8bef9SDimitry Andric /// Return the value with which associated value can be replaced for specific 2522e8d8bef9SDimitry Andric /// \p ICV. 2523bdd1243dSDimitry Andric std::optional<Value *> 2524e8d8bef9SDimitry Andric getUniqueReplacementValue(InternalControlVar ICV) const override { 2525e8d8bef9SDimitry Andric return ICVReplacementValuesMap[ICV]; 2526e8d8bef9SDimitry Andric } 2527e8d8bef9SDimitry Andric 2528e8d8bef9SDimitry Andric ChangeStatus updateImpl(Attributor &A) override { 2529e8d8bef9SDimitry Andric ChangeStatus Changed = ChangeStatus::UNCHANGED; 2530e8d8bef9SDimitry Andric const auto &ICVTrackingAA = A.getAAFor<AAICVTracker>( 2531fe6060f1SDimitry Andric *this, IRPosition::returned(*getAssociatedFunction()), 2532fe6060f1SDimitry Andric DepClassTy::REQUIRED); 2533e8d8bef9SDimitry Andric 2534e8d8bef9SDimitry Andric // We don't have any information, so we assume it changes the ICV. 2535e8d8bef9SDimitry Andric if (!ICVTrackingAA.isAssumedTracked()) 2536e8d8bef9SDimitry Andric return indicatePessimisticFixpoint(); 2537e8d8bef9SDimitry Andric 2538e8d8bef9SDimitry Andric for (InternalControlVar ICV : TrackableICVs) { 2539bdd1243dSDimitry Andric std::optional<Value *> &ReplVal = ICVReplacementValuesMap[ICV]; 2540bdd1243dSDimitry Andric std::optional<Value *> NewReplVal = 2541e8d8bef9SDimitry Andric ICVTrackingAA.getUniqueReplacementValue(ICV); 2542e8d8bef9SDimitry Andric 2543e8d8bef9SDimitry Andric if (ReplVal == NewReplVal) 2544e8d8bef9SDimitry Andric continue; 2545e8d8bef9SDimitry Andric 2546e8d8bef9SDimitry Andric ReplVal = NewReplVal; 2547e8d8bef9SDimitry Andric Changed = ChangeStatus::CHANGED; 2548e8d8bef9SDimitry Andric } 2549e8d8bef9SDimitry Andric return Changed; 2550e8d8bef9SDimitry Andric } 25515ffd83dbSDimitry Andric }; 2552fe6060f1SDimitry Andric 2553fe6060f1SDimitry Andric struct AAExecutionDomainFunction : public AAExecutionDomain { 2554fe6060f1SDimitry Andric AAExecutionDomainFunction(const IRPosition &IRP, Attributor &A) 2555fe6060f1SDimitry Andric : AAExecutionDomain(IRP, A) {} 2556fe6060f1SDimitry Andric 2557bdd1243dSDimitry Andric ~AAExecutionDomainFunction() { 2558bdd1243dSDimitry Andric delete RPOT; 2559bdd1243dSDimitry Andric } 2560bdd1243dSDimitry Andric 2561bdd1243dSDimitry Andric void initialize(Attributor &A) override { 2562bdd1243dSDimitry Andric if (getAnchorScope()->isDeclaration()) { 2563bdd1243dSDimitry Andric indicatePessimisticFixpoint(); 2564bdd1243dSDimitry Andric return; 2565bdd1243dSDimitry Andric } 2566bdd1243dSDimitry Andric RPOT = new ReversePostOrderTraversal<Function *>(getAnchorScope()); 2567bdd1243dSDimitry Andric } 2568bdd1243dSDimitry Andric 2569fe6060f1SDimitry Andric const std::string getAsStr() const override { 2570bdd1243dSDimitry Andric unsigned TotalBlocks = 0, InitialThreadBlocks = 0; 2571bdd1243dSDimitry Andric for (auto &It : BEDMap) { 2572bdd1243dSDimitry Andric TotalBlocks++; 2573bdd1243dSDimitry Andric InitialThreadBlocks += It.getSecond().IsExecutedByInitialThreadOnly; 2574bdd1243dSDimitry Andric } 2575bdd1243dSDimitry Andric return "[AAExecutionDomain] " + std::to_string(InitialThreadBlocks) + "/" + 2576bdd1243dSDimitry Andric std::to_string(TotalBlocks) + " executed by initial thread only"; 2577fe6060f1SDimitry Andric } 2578fe6060f1SDimitry Andric 2579fe6060f1SDimitry Andric /// See AbstractAttribute::trackStatistics(). 2580fe6060f1SDimitry Andric void trackStatistics() const override {} 2581fe6060f1SDimitry Andric 2582fe6060f1SDimitry Andric ChangeStatus manifest(Attributor &A) override { 2583fe6060f1SDimitry Andric LLVM_DEBUG({ 2584bdd1243dSDimitry Andric for (const BasicBlock &BB : *getAnchorScope()) { 2585bdd1243dSDimitry Andric if (!isExecutedByInitialThreadOnly(BB)) 2586bdd1243dSDimitry Andric continue; 2587fe6060f1SDimitry Andric dbgs() << TAG << " Basic block @" << getAnchorScope()->getName() << " " 2588bdd1243dSDimitry Andric << BB.getName() << " is executed by a single thread.\n"; 2589bdd1243dSDimitry Andric } 2590fe6060f1SDimitry Andric }); 2591bdd1243dSDimitry Andric 2592bdd1243dSDimitry Andric ChangeStatus Changed = ChangeStatus::UNCHANGED; 2593bdd1243dSDimitry Andric 2594bdd1243dSDimitry Andric if (DisableOpenMPOptBarrierElimination) 2595bdd1243dSDimitry Andric return Changed; 2596bdd1243dSDimitry Andric 2597bdd1243dSDimitry Andric SmallPtrSet<CallBase *, 16> DeletedBarriers; 2598bdd1243dSDimitry Andric auto HandleAlignedBarrier = [&](CallBase *CB) { 2599bdd1243dSDimitry Andric const ExecutionDomainTy &ED = CEDMap[CB]; 2600bdd1243dSDimitry Andric if (!ED.IsReachedFromAlignedBarrierOnly || 2601bdd1243dSDimitry Andric ED.EncounteredNonLocalSideEffect) 2602bdd1243dSDimitry Andric return; 2603bdd1243dSDimitry Andric 2604bdd1243dSDimitry Andric // We can remove this barrier, if it is one, or all aligned barriers 2605bdd1243dSDimitry Andric // reaching the kernel end. In the latter case we can transitively work 2606bdd1243dSDimitry Andric // our way back until we find a barrier that guards a side-effect if we 2607bdd1243dSDimitry Andric // are dealing with the kernel end here. 2608bdd1243dSDimitry Andric if (CB) { 2609bdd1243dSDimitry Andric DeletedBarriers.insert(CB); 2610bdd1243dSDimitry Andric A.deleteAfterManifest(*CB); 2611bdd1243dSDimitry Andric ++NumBarriersEliminated; 2612bdd1243dSDimitry Andric Changed = ChangeStatus::CHANGED; 2613bdd1243dSDimitry Andric } else if (!ED.AlignedBarriers.empty()) { 2614bdd1243dSDimitry Andric NumBarriersEliminated += ED.AlignedBarriers.size(); 2615bdd1243dSDimitry Andric Changed = ChangeStatus::CHANGED; 2616bdd1243dSDimitry Andric SmallVector<CallBase *> Worklist(ED.AlignedBarriers.begin(), 2617bdd1243dSDimitry Andric ED.AlignedBarriers.end()); 2618bdd1243dSDimitry Andric SmallSetVector<CallBase *, 16> Visited; 2619bdd1243dSDimitry Andric while (!Worklist.empty()) { 2620bdd1243dSDimitry Andric CallBase *LastCB = Worklist.pop_back_val(); 2621bdd1243dSDimitry Andric if (!Visited.insert(LastCB)) 2622bdd1243dSDimitry Andric continue; 2623bdd1243dSDimitry Andric if (!DeletedBarriers.count(LastCB)) { 2624bdd1243dSDimitry Andric A.deleteAfterManifest(*LastCB); 2625bdd1243dSDimitry Andric continue; 2626bdd1243dSDimitry Andric } 2627bdd1243dSDimitry Andric // The final aligned barrier (LastCB) reaching the kernel end was 2628bdd1243dSDimitry Andric // removed already. This means we can go one step further and remove 2629bdd1243dSDimitry Andric // the barriers encoutered last before (LastCB). 2630bdd1243dSDimitry Andric const ExecutionDomainTy &LastED = CEDMap[LastCB]; 2631bdd1243dSDimitry Andric Worklist.append(LastED.AlignedBarriers.begin(), 2632bdd1243dSDimitry Andric LastED.AlignedBarriers.end()); 2633bdd1243dSDimitry Andric } 2634fe6060f1SDimitry Andric } 2635fe6060f1SDimitry Andric 2636bdd1243dSDimitry Andric // If we actually eliminated a barrier we need to eliminate the associated 2637bdd1243dSDimitry Andric // llvm.assumes as well to avoid creating UB. 2638bdd1243dSDimitry Andric if (!ED.EncounteredAssumes.empty() && (CB || !ED.AlignedBarriers.empty())) 2639bdd1243dSDimitry Andric for (auto *AssumeCB : ED.EncounteredAssumes) 2640bdd1243dSDimitry Andric A.deleteAfterManifest(*AssumeCB); 2641fe6060f1SDimitry Andric }; 2642fe6060f1SDimitry Andric 2643bdd1243dSDimitry Andric for (auto *CB : AlignedBarriers) 2644bdd1243dSDimitry Andric HandleAlignedBarrier(CB); 2645fe6060f1SDimitry Andric 2646fe6060f1SDimitry Andric auto &OMPInfoCache = static_cast<OMPInformationCache &>(A.getInfoCache()); 2647bdd1243dSDimitry Andric // Handle the "kernel end barrier" for kernels too. 2648bdd1243dSDimitry Andric if (OMPInfoCache.Kernels.count(getAnchorScope())) 2649bdd1243dSDimitry Andric HandleAlignedBarrier(nullptr); 2650bdd1243dSDimitry Andric 2651bdd1243dSDimitry Andric return Changed; 2652bdd1243dSDimitry Andric } 2653bdd1243dSDimitry Andric 2654bdd1243dSDimitry Andric /// Merge barrier and assumption information from \p PredED into the successor 2655bdd1243dSDimitry Andric /// \p ED. 2656bdd1243dSDimitry Andric void 2657bdd1243dSDimitry Andric mergeInPredecessorBarriersAndAssumptions(Attributor &A, ExecutionDomainTy &ED, 2658bdd1243dSDimitry Andric const ExecutionDomainTy &PredED); 2659bdd1243dSDimitry Andric 2660bdd1243dSDimitry Andric /// Merge all information from \p PredED into the successor \p ED. If 2661bdd1243dSDimitry Andric /// \p InitialEdgeOnly is set, only the initial edge will enter the block 2662bdd1243dSDimitry Andric /// represented by \p ED from this predecessor. 2663bdd1243dSDimitry Andric void mergeInPredecessor(Attributor &A, ExecutionDomainTy &ED, 2664bdd1243dSDimitry Andric const ExecutionDomainTy &PredED, 2665bdd1243dSDimitry Andric bool InitialEdgeOnly = false); 2666bdd1243dSDimitry Andric 2667bdd1243dSDimitry Andric /// Accumulate information for the entry block in \p EntryBBED. 2668bdd1243dSDimitry Andric void handleEntryBB(Attributor &A, ExecutionDomainTy &EntryBBED); 2669bdd1243dSDimitry Andric 2670bdd1243dSDimitry Andric /// See AbstractAttribute::updateImpl. 2671bdd1243dSDimitry Andric ChangeStatus updateImpl(Attributor &A) override; 2672bdd1243dSDimitry Andric 2673bdd1243dSDimitry Andric /// Query interface, see AAExecutionDomain 2674bdd1243dSDimitry Andric ///{ 2675bdd1243dSDimitry Andric bool isExecutedByInitialThreadOnly(const BasicBlock &BB) const override { 2676bdd1243dSDimitry Andric if (!isValidState()) 2677bdd1243dSDimitry Andric return false; 2678bdd1243dSDimitry Andric return BEDMap.lookup(&BB).IsExecutedByInitialThreadOnly; 2679bdd1243dSDimitry Andric } 2680bdd1243dSDimitry Andric 2681bdd1243dSDimitry Andric bool isExecutedInAlignedRegion(Attributor &A, 2682bdd1243dSDimitry Andric const Instruction &I) const override { 2683*1ac55f4cSDimitry Andric assert(I.getFunction() == getAnchorScope() && 2684*1ac55f4cSDimitry Andric "Instruction is out of scope!"); 2685*1ac55f4cSDimitry Andric if (!isValidState()) 2686bdd1243dSDimitry Andric return false; 2687bdd1243dSDimitry Andric 2688bdd1243dSDimitry Andric const Instruction *CurI; 2689bdd1243dSDimitry Andric 2690bdd1243dSDimitry Andric // Check forward until a call or the block end is reached. 2691bdd1243dSDimitry Andric CurI = &I; 2692bdd1243dSDimitry Andric do { 2693bdd1243dSDimitry Andric auto *CB = dyn_cast<CallBase>(CurI); 2694bdd1243dSDimitry Andric if (!CB) 2695bdd1243dSDimitry Andric continue; 2696*1ac55f4cSDimitry Andric if (CB != &I && AlignedBarriers.contains(const_cast<CallBase *>(CB))) { 2697*1ac55f4cSDimitry Andric break; 2698*1ac55f4cSDimitry Andric } 2699bdd1243dSDimitry Andric const auto &It = CEDMap.find(CB); 2700bdd1243dSDimitry Andric if (It == CEDMap.end()) 2701bdd1243dSDimitry Andric continue; 2702*1ac55f4cSDimitry Andric if (!It->getSecond().IsReachingAlignedBarrierOnly) 2703bdd1243dSDimitry Andric return false; 2704*1ac55f4cSDimitry Andric break; 2705bdd1243dSDimitry Andric } while ((CurI = CurI->getNextNonDebugInstruction())); 2706bdd1243dSDimitry Andric 2707*1ac55f4cSDimitry Andric if (!CurI && !BEDMap.lookup(I.getParent()).IsReachingAlignedBarrierOnly) 2708bdd1243dSDimitry Andric return false; 2709bdd1243dSDimitry Andric 2710bdd1243dSDimitry Andric // Check backward until a call or the block beginning is reached. 2711bdd1243dSDimitry Andric CurI = &I; 2712bdd1243dSDimitry Andric do { 2713bdd1243dSDimitry Andric auto *CB = dyn_cast<CallBase>(CurI); 2714bdd1243dSDimitry Andric if (!CB) 2715bdd1243dSDimitry Andric continue; 2716*1ac55f4cSDimitry Andric if (CB != &I && AlignedBarriers.contains(const_cast<CallBase *>(CB))) { 2717*1ac55f4cSDimitry Andric break; 2718*1ac55f4cSDimitry Andric } 2719bdd1243dSDimitry Andric const auto &It = CEDMap.find(CB); 2720bdd1243dSDimitry Andric if (It == CEDMap.end()) 2721bdd1243dSDimitry Andric continue; 2722bdd1243dSDimitry Andric if (!AA::isNoSyncInst(A, *CB, *this)) { 2723*1ac55f4cSDimitry Andric if (It->getSecond().IsReachedFromAlignedBarrierOnly) { 2724bdd1243dSDimitry Andric break; 2725*1ac55f4cSDimitry Andric } 2726bdd1243dSDimitry Andric return false; 2727bdd1243dSDimitry Andric } 2728bdd1243dSDimitry Andric 2729bdd1243dSDimitry Andric Function *Callee = CB->getCalledFunction(); 2730bdd1243dSDimitry Andric if (!Callee || Callee->isDeclaration()) 2731bdd1243dSDimitry Andric return false; 2732bdd1243dSDimitry Andric const auto &EDAA = A.getAAFor<AAExecutionDomain>( 2733bdd1243dSDimitry Andric *this, IRPosition::function(*Callee), DepClassTy::OPTIONAL); 2734bdd1243dSDimitry Andric if (!EDAA.getState().isValidState()) 2735bdd1243dSDimitry Andric return false; 2736bdd1243dSDimitry Andric if (!EDAA.getFunctionExecutionDomain().IsReachedFromAlignedBarrierOnly) 2737bdd1243dSDimitry Andric return false; 2738bdd1243dSDimitry Andric break; 2739bdd1243dSDimitry Andric } while ((CurI = CurI->getPrevNonDebugInstruction())); 2740bdd1243dSDimitry Andric 2741bdd1243dSDimitry Andric if (!CurI && 2742bdd1243dSDimitry Andric !llvm::all_of( 2743bdd1243dSDimitry Andric predecessors(I.getParent()), [&](const BasicBlock *PredBB) { 2744bdd1243dSDimitry Andric return BEDMap.lookup(PredBB).IsReachedFromAlignedBarrierOnly; 2745bdd1243dSDimitry Andric })) { 2746bdd1243dSDimitry Andric return false; 2747bdd1243dSDimitry Andric } 2748bdd1243dSDimitry Andric 2749bdd1243dSDimitry Andric // On neither traversal we found a anything but aligned barriers. 2750bdd1243dSDimitry Andric return true; 2751bdd1243dSDimitry Andric } 2752bdd1243dSDimitry Andric 2753bdd1243dSDimitry Andric ExecutionDomainTy getExecutionDomain(const BasicBlock &BB) const override { 2754bdd1243dSDimitry Andric assert(isValidState() && 2755bdd1243dSDimitry Andric "No request should be made against an invalid state!"); 2756bdd1243dSDimitry Andric return BEDMap.lookup(&BB); 2757bdd1243dSDimitry Andric } 2758bdd1243dSDimitry Andric ExecutionDomainTy getExecutionDomain(const CallBase &CB) const override { 2759bdd1243dSDimitry Andric assert(isValidState() && 2760bdd1243dSDimitry Andric "No request should be made against an invalid state!"); 2761bdd1243dSDimitry Andric return CEDMap.lookup(&CB); 2762bdd1243dSDimitry Andric } 2763bdd1243dSDimitry Andric ExecutionDomainTy getFunctionExecutionDomain() const override { 2764bdd1243dSDimitry Andric assert(isValidState() && 2765bdd1243dSDimitry Andric "No request should be made against an invalid state!"); 2766bdd1243dSDimitry Andric return BEDMap.lookup(nullptr); 2767bdd1243dSDimitry Andric } 2768bdd1243dSDimitry Andric ///} 2769fe6060f1SDimitry Andric 2770349cc55cSDimitry Andric // Check if the edge into the successor block contains a condition that only 2771349cc55cSDimitry Andric // lets the main thread execute it. 2772bdd1243dSDimitry Andric static bool isInitialThreadOnlyEdge(Attributor &A, BranchInst *Edge, 2773bdd1243dSDimitry Andric BasicBlock &SuccessorBB) { 2774fe6060f1SDimitry Andric if (!Edge || !Edge->isConditional()) 2775fe6060f1SDimitry Andric return false; 2776bdd1243dSDimitry Andric if (Edge->getSuccessor(0) != &SuccessorBB) 2777fe6060f1SDimitry Andric return false; 2778fe6060f1SDimitry Andric 2779fe6060f1SDimitry Andric auto *Cmp = dyn_cast<CmpInst>(Edge->getCondition()); 2780fe6060f1SDimitry Andric if (!Cmp || !Cmp->isTrueWhenEqual() || !Cmp->isEquality()) 2781fe6060f1SDimitry Andric return false; 2782fe6060f1SDimitry Andric 2783fe6060f1SDimitry Andric ConstantInt *C = dyn_cast<ConstantInt>(Cmp->getOperand(1)); 2784fe6060f1SDimitry Andric if (!C) 2785fe6060f1SDimitry Andric return false; 2786fe6060f1SDimitry Andric 2787fe6060f1SDimitry Andric // Match: -1 == __kmpc_target_init (for non-SPMD kernels only!) 2788fe6060f1SDimitry Andric if (C->isAllOnesValue()) { 2789fe6060f1SDimitry Andric auto *CB = dyn_cast<CallBase>(Cmp->getOperand(0)); 2790bdd1243dSDimitry Andric auto &OMPInfoCache = static_cast<OMPInformationCache &>(A.getInfoCache()); 2791bdd1243dSDimitry Andric auto &RFI = OMPInfoCache.RFIs[OMPRTL___kmpc_target_init]; 2792fe6060f1SDimitry Andric CB = CB ? OpenMPOpt::getCallIfRegularCall(*CB, &RFI) : nullptr; 2793fe6060f1SDimitry Andric if (!CB) 2794fe6060f1SDimitry Andric return false; 2795349cc55cSDimitry Andric const int InitModeArgNo = 1; 2796349cc55cSDimitry Andric auto *ModeCI = dyn_cast<ConstantInt>(CB->getOperand(InitModeArgNo)); 2797349cc55cSDimitry Andric return ModeCI && (ModeCI->getSExtValue() & OMP_TGT_EXEC_MODE_GENERIC); 2798349cc55cSDimitry Andric } 2799349cc55cSDimitry Andric 2800349cc55cSDimitry Andric if (C->isZero()) { 2801349cc55cSDimitry Andric // Match: 0 == llvm.nvvm.read.ptx.sreg.tid.x() 2802349cc55cSDimitry Andric if (auto *II = dyn_cast<IntrinsicInst>(Cmp->getOperand(0))) 2803349cc55cSDimitry Andric if (II->getIntrinsicID() == Intrinsic::nvvm_read_ptx_sreg_tid_x) 2804349cc55cSDimitry Andric return true; 2805349cc55cSDimitry Andric 2806349cc55cSDimitry Andric // Match: 0 == llvm.amdgcn.workitem.id.x() 2807349cc55cSDimitry Andric if (auto *II = dyn_cast<IntrinsicInst>(Cmp->getOperand(0))) 2808349cc55cSDimitry Andric if (II->getIntrinsicID() == Intrinsic::amdgcn_workitem_id_x) 2809349cc55cSDimitry Andric return true; 2810fe6060f1SDimitry Andric } 2811fe6060f1SDimitry Andric 2812fe6060f1SDimitry Andric return false; 2813fe6060f1SDimitry Andric }; 2814fe6060f1SDimitry Andric 2815bdd1243dSDimitry Andric /// Mapping containing information per block. 2816bdd1243dSDimitry Andric DenseMap<const BasicBlock *, ExecutionDomainTy> BEDMap; 2817bdd1243dSDimitry Andric DenseMap<const CallBase *, ExecutionDomainTy> CEDMap; 2818bdd1243dSDimitry Andric SmallSetVector<CallBase *, 16> AlignedBarriers; 2819fe6060f1SDimitry Andric 2820bdd1243dSDimitry Andric ReversePostOrderTraversal<Function *> *RPOT = nullptr; 2821fe6060f1SDimitry Andric }; 2822fe6060f1SDimitry Andric 2823bdd1243dSDimitry Andric void AAExecutionDomainFunction::mergeInPredecessorBarriersAndAssumptions( 2824bdd1243dSDimitry Andric Attributor &A, ExecutionDomainTy &ED, const ExecutionDomainTy &PredED) { 2825bdd1243dSDimitry Andric for (auto *EA : PredED.EncounteredAssumes) 2826bdd1243dSDimitry Andric ED.addAssumeInst(A, *EA); 2827bdd1243dSDimitry Andric 2828bdd1243dSDimitry Andric for (auto *AB : PredED.AlignedBarriers) 2829bdd1243dSDimitry Andric ED.addAlignedBarrier(A, *AB); 2830fe6060f1SDimitry Andric } 2831fe6060f1SDimitry Andric 2832bdd1243dSDimitry Andric void AAExecutionDomainFunction::mergeInPredecessor( 2833bdd1243dSDimitry Andric Attributor &A, ExecutionDomainTy &ED, const ExecutionDomainTy &PredED, 2834bdd1243dSDimitry Andric bool InitialEdgeOnly) { 2835bdd1243dSDimitry Andric ED.IsExecutedByInitialThreadOnly = 2836bdd1243dSDimitry Andric InitialEdgeOnly || (PredED.IsExecutedByInitialThreadOnly && 2837bdd1243dSDimitry Andric ED.IsExecutedByInitialThreadOnly); 2838bdd1243dSDimitry Andric 2839bdd1243dSDimitry Andric ED.IsReachedFromAlignedBarrierOnly = ED.IsReachedFromAlignedBarrierOnly && 2840bdd1243dSDimitry Andric PredED.IsReachedFromAlignedBarrierOnly; 2841bdd1243dSDimitry Andric ED.EncounteredNonLocalSideEffect = 2842bdd1243dSDimitry Andric ED.EncounteredNonLocalSideEffect | PredED.EncounteredNonLocalSideEffect; 2843bdd1243dSDimitry Andric if (ED.IsReachedFromAlignedBarrierOnly) 2844bdd1243dSDimitry Andric mergeInPredecessorBarriersAndAssumptions(A, ED, PredED); 2845bdd1243dSDimitry Andric else 2846bdd1243dSDimitry Andric ED.clearAssumeInstAndAlignedBarriers(); 2847bdd1243dSDimitry Andric } 2848bdd1243dSDimitry Andric 2849bdd1243dSDimitry Andric void AAExecutionDomainFunction::handleEntryBB(Attributor &A, 2850bdd1243dSDimitry Andric ExecutionDomainTy &EntryBBED) { 2851bdd1243dSDimitry Andric SmallVector<ExecutionDomainTy> PredExecDomains; 2852bdd1243dSDimitry Andric auto PredForCallSite = [&](AbstractCallSite ACS) { 2853bdd1243dSDimitry Andric const auto &EDAA = A.getAAFor<AAExecutionDomain>( 2854bdd1243dSDimitry Andric *this, IRPosition::function(*ACS.getInstruction()->getFunction()), 2855bdd1243dSDimitry Andric DepClassTy::OPTIONAL); 2856bdd1243dSDimitry Andric if (!EDAA.getState().isValidState()) 2857bdd1243dSDimitry Andric return false; 2858bdd1243dSDimitry Andric PredExecDomains.emplace_back( 2859bdd1243dSDimitry Andric EDAA.getExecutionDomain(*cast<CallBase>(ACS.getInstruction()))); 2860bdd1243dSDimitry Andric return true; 2861bdd1243dSDimitry Andric }; 2862bdd1243dSDimitry Andric 2863bdd1243dSDimitry Andric bool AllCallSitesKnown; 2864bdd1243dSDimitry Andric if (A.checkForAllCallSites(PredForCallSite, *this, 2865bdd1243dSDimitry Andric /* RequiresAllCallSites */ true, 2866bdd1243dSDimitry Andric AllCallSitesKnown)) { 2867bdd1243dSDimitry Andric for (const auto &PredED : PredExecDomains) 2868bdd1243dSDimitry Andric mergeInPredecessor(A, EntryBBED, PredED); 2869bdd1243dSDimitry Andric 2870bdd1243dSDimitry Andric } else { 2871bdd1243dSDimitry Andric // We could not find all predecessors, so this is either a kernel or a 2872bdd1243dSDimitry Andric // function with external linkage (or with some other weird uses). 2873bdd1243dSDimitry Andric auto &OMPInfoCache = static_cast<OMPInformationCache &>(A.getInfoCache()); 2874bdd1243dSDimitry Andric if (OMPInfoCache.Kernels.count(getAnchorScope())) { 2875bdd1243dSDimitry Andric EntryBBED.IsExecutedByInitialThreadOnly = false; 2876bdd1243dSDimitry Andric EntryBBED.IsReachedFromAlignedBarrierOnly = true; 2877bdd1243dSDimitry Andric EntryBBED.EncounteredNonLocalSideEffect = false; 2878bdd1243dSDimitry Andric } else { 2879bdd1243dSDimitry Andric EntryBBED.IsExecutedByInitialThreadOnly = false; 2880bdd1243dSDimitry Andric EntryBBED.IsReachedFromAlignedBarrierOnly = false; 2881bdd1243dSDimitry Andric EntryBBED.EncounteredNonLocalSideEffect = true; 2882bdd1243dSDimitry Andric } 2883bdd1243dSDimitry Andric } 2884bdd1243dSDimitry Andric 2885bdd1243dSDimitry Andric auto &FnED = BEDMap[nullptr]; 2886bdd1243dSDimitry Andric FnED.IsReachingAlignedBarrierOnly &= 2887bdd1243dSDimitry Andric EntryBBED.IsReachedFromAlignedBarrierOnly; 2888bdd1243dSDimitry Andric } 2889bdd1243dSDimitry Andric 2890bdd1243dSDimitry Andric ChangeStatus AAExecutionDomainFunction::updateImpl(Attributor &A) { 2891bdd1243dSDimitry Andric 2892bdd1243dSDimitry Andric bool Changed = false; 2893bdd1243dSDimitry Andric 2894bdd1243dSDimitry Andric // Helper to deal with an aligned barrier encountered during the forward 2895bdd1243dSDimitry Andric // traversal. \p CB is the aligned barrier, \p ED is the execution domain when 2896bdd1243dSDimitry Andric // it was encountered. 2897bdd1243dSDimitry Andric auto HandleAlignedBarrier = [&](CallBase *CB, ExecutionDomainTy &ED) { 2898bdd1243dSDimitry Andric if (CB) 2899bdd1243dSDimitry Andric Changed |= AlignedBarriers.insert(CB); 2900bdd1243dSDimitry Andric // First, update the barrier ED kept in the separate CEDMap. 2901bdd1243dSDimitry Andric auto &CallED = CEDMap[CB]; 2902bdd1243dSDimitry Andric mergeInPredecessor(A, CallED, ED); 2903bdd1243dSDimitry Andric // Next adjust the ED we use for the traversal. 2904bdd1243dSDimitry Andric ED.EncounteredNonLocalSideEffect = false; 2905bdd1243dSDimitry Andric ED.IsReachedFromAlignedBarrierOnly = true; 2906bdd1243dSDimitry Andric // Aligned barrier collection has to come last. 2907bdd1243dSDimitry Andric ED.clearAssumeInstAndAlignedBarriers(); 2908bdd1243dSDimitry Andric if (CB) 2909bdd1243dSDimitry Andric ED.addAlignedBarrier(A, *CB); 2910bdd1243dSDimitry Andric }; 2911bdd1243dSDimitry Andric 2912bdd1243dSDimitry Andric auto &LivenessAA = 2913bdd1243dSDimitry Andric A.getAAFor<AAIsDead>(*this, getIRPosition(), DepClassTy::OPTIONAL); 2914bdd1243dSDimitry Andric 2915bdd1243dSDimitry Andric // Set \p R to \V and report true if that changed \p R. 2916bdd1243dSDimitry Andric auto SetAndRecord = [&](bool &R, bool V) { 2917bdd1243dSDimitry Andric bool Eq = (R == V); 2918bdd1243dSDimitry Andric R = V; 2919bdd1243dSDimitry Andric return !Eq; 2920bdd1243dSDimitry Andric }; 2921bdd1243dSDimitry Andric 2922bdd1243dSDimitry Andric auto &OMPInfoCache = static_cast<OMPInformationCache &>(A.getInfoCache()); 2923bdd1243dSDimitry Andric 2924bdd1243dSDimitry Andric Function *F = getAnchorScope(); 2925bdd1243dSDimitry Andric BasicBlock &EntryBB = F->getEntryBlock(); 2926bdd1243dSDimitry Andric bool IsKernel = OMPInfoCache.Kernels.count(F); 2927bdd1243dSDimitry Andric 2928bdd1243dSDimitry Andric SmallVector<Instruction *> SyncInstWorklist; 2929bdd1243dSDimitry Andric for (auto &RIt : *RPOT) { 2930bdd1243dSDimitry Andric BasicBlock &BB = *RIt; 2931bdd1243dSDimitry Andric 2932bdd1243dSDimitry Andric bool IsEntryBB = &BB == &EntryBB; 2933bdd1243dSDimitry Andric // TODO: We use local reasoning since we don't have a divergence analysis 2934bdd1243dSDimitry Andric // running as well. We could basically allow uniform branches here. 2935bdd1243dSDimitry Andric bool AlignedBarrierLastInBlock = IsEntryBB && IsKernel; 2936bdd1243dSDimitry Andric ExecutionDomainTy ED; 2937bdd1243dSDimitry Andric // Propagate "incoming edges" into information about this block. 2938bdd1243dSDimitry Andric if (IsEntryBB) { 2939bdd1243dSDimitry Andric handleEntryBB(A, ED); 2940bdd1243dSDimitry Andric } else { 2941bdd1243dSDimitry Andric // For live non-entry blocks we only propagate 2942bdd1243dSDimitry Andric // information via live edges. 2943bdd1243dSDimitry Andric if (LivenessAA.isAssumedDead(&BB)) 2944bdd1243dSDimitry Andric continue; 2945bdd1243dSDimitry Andric 2946bdd1243dSDimitry Andric for (auto *PredBB : predecessors(&BB)) { 2947bdd1243dSDimitry Andric if (LivenessAA.isEdgeDead(PredBB, &BB)) 2948bdd1243dSDimitry Andric continue; 2949bdd1243dSDimitry Andric bool InitialEdgeOnly = isInitialThreadOnlyEdge( 2950bdd1243dSDimitry Andric A, dyn_cast<BranchInst>(PredBB->getTerminator()), BB); 2951bdd1243dSDimitry Andric mergeInPredecessor(A, ED, BEDMap[PredBB], InitialEdgeOnly); 2952bdd1243dSDimitry Andric } 2953bdd1243dSDimitry Andric } 2954bdd1243dSDimitry Andric 2955bdd1243dSDimitry Andric // Now we traverse the block, accumulate effects in ED and attach 2956bdd1243dSDimitry Andric // information to calls. 2957bdd1243dSDimitry Andric for (Instruction &I : BB) { 2958bdd1243dSDimitry Andric bool UsedAssumedInformation; 2959bdd1243dSDimitry Andric if (A.isAssumedDead(I, *this, &LivenessAA, UsedAssumedInformation, 2960bdd1243dSDimitry Andric /* CheckBBLivenessOnly */ false, DepClassTy::OPTIONAL, 2961bdd1243dSDimitry Andric /* CheckForDeadStore */ true)) 2962bdd1243dSDimitry Andric continue; 2963bdd1243dSDimitry Andric 2964bdd1243dSDimitry Andric // Asummes and "assume-like" (dbg, lifetime, ...) are handled first, the 2965bdd1243dSDimitry Andric // former is collected the latter is ignored. 2966bdd1243dSDimitry Andric if (auto *II = dyn_cast<IntrinsicInst>(&I)) { 2967bdd1243dSDimitry Andric if (auto *AI = dyn_cast_or_null<AssumeInst>(II)) { 2968bdd1243dSDimitry Andric ED.addAssumeInst(A, *AI); 2969bdd1243dSDimitry Andric continue; 2970bdd1243dSDimitry Andric } 2971bdd1243dSDimitry Andric // TODO: Should we also collect and delete lifetime markers? 2972bdd1243dSDimitry Andric if (II->isAssumeLikeIntrinsic()) 2973bdd1243dSDimitry Andric continue; 2974bdd1243dSDimitry Andric } 2975bdd1243dSDimitry Andric 2976bdd1243dSDimitry Andric auto *CB = dyn_cast<CallBase>(&I); 2977bdd1243dSDimitry Andric bool IsNoSync = AA::isNoSyncInst(A, I, *this); 2978bdd1243dSDimitry Andric bool IsAlignedBarrier = 2979bdd1243dSDimitry Andric !IsNoSync && CB && 2980bdd1243dSDimitry Andric AANoSync::isAlignedBarrier(*CB, AlignedBarrierLastInBlock); 2981bdd1243dSDimitry Andric 2982bdd1243dSDimitry Andric AlignedBarrierLastInBlock &= IsNoSync; 2983bdd1243dSDimitry Andric 2984bdd1243dSDimitry Andric // Next we check for calls. Aligned barriers are handled 2985bdd1243dSDimitry Andric // explicitly, everything else is kept for the backward traversal and will 2986bdd1243dSDimitry Andric // also affect our state. 2987bdd1243dSDimitry Andric if (CB) { 2988bdd1243dSDimitry Andric if (IsAlignedBarrier) { 2989bdd1243dSDimitry Andric HandleAlignedBarrier(CB, ED); 2990bdd1243dSDimitry Andric AlignedBarrierLastInBlock = true; 2991bdd1243dSDimitry Andric continue; 2992bdd1243dSDimitry Andric } 2993bdd1243dSDimitry Andric 2994bdd1243dSDimitry Andric // Check the pointer(s) of a memory intrinsic explicitly. 2995bdd1243dSDimitry Andric if (isa<MemIntrinsic>(&I)) { 2996bdd1243dSDimitry Andric if (!ED.EncounteredNonLocalSideEffect && 2997bdd1243dSDimitry Andric AA::isPotentiallyAffectedByBarrier(A, I, *this)) 2998bdd1243dSDimitry Andric ED.EncounteredNonLocalSideEffect = true; 2999bdd1243dSDimitry Andric if (!IsNoSync) { 3000bdd1243dSDimitry Andric ED.IsReachedFromAlignedBarrierOnly = false; 3001bdd1243dSDimitry Andric SyncInstWorklist.push_back(&I); 3002bdd1243dSDimitry Andric } 3003bdd1243dSDimitry Andric continue; 3004bdd1243dSDimitry Andric } 3005bdd1243dSDimitry Andric 3006bdd1243dSDimitry Andric // Record how we entered the call, then accumulate the effect of the 3007bdd1243dSDimitry Andric // call in ED for potential use by the callee. 3008bdd1243dSDimitry Andric auto &CallED = CEDMap[CB]; 3009bdd1243dSDimitry Andric mergeInPredecessor(A, CallED, ED); 3010bdd1243dSDimitry Andric 3011bdd1243dSDimitry Andric // If we have a sync-definition we can check if it starts/ends in an 3012bdd1243dSDimitry Andric // aligned barrier. If we are unsure we assume any sync breaks 3013bdd1243dSDimitry Andric // alignment. 3014bdd1243dSDimitry Andric Function *Callee = CB->getCalledFunction(); 3015bdd1243dSDimitry Andric if (!IsNoSync && Callee && !Callee->isDeclaration()) { 3016bdd1243dSDimitry Andric const auto &EDAA = A.getAAFor<AAExecutionDomain>( 3017bdd1243dSDimitry Andric *this, IRPosition::function(*Callee), DepClassTy::OPTIONAL); 3018bdd1243dSDimitry Andric if (EDAA.getState().isValidState()) { 3019bdd1243dSDimitry Andric const auto &CalleeED = EDAA.getFunctionExecutionDomain(); 3020bdd1243dSDimitry Andric ED.IsReachedFromAlignedBarrierOnly = 3021*1ac55f4cSDimitry Andric CallED.IsReachedFromAlignedBarrierOnly = 3022bdd1243dSDimitry Andric CalleeED.IsReachedFromAlignedBarrierOnly; 3023bdd1243dSDimitry Andric AlignedBarrierLastInBlock = ED.IsReachedFromAlignedBarrierOnly; 3024bdd1243dSDimitry Andric if (IsNoSync || !CalleeED.IsReachedFromAlignedBarrierOnly) 3025bdd1243dSDimitry Andric ED.EncounteredNonLocalSideEffect |= 3026bdd1243dSDimitry Andric CalleeED.EncounteredNonLocalSideEffect; 3027bdd1243dSDimitry Andric else 3028bdd1243dSDimitry Andric ED.EncounteredNonLocalSideEffect = 3029bdd1243dSDimitry Andric CalleeED.EncounteredNonLocalSideEffect; 3030bdd1243dSDimitry Andric if (!CalleeED.IsReachingAlignedBarrierOnly) 3031bdd1243dSDimitry Andric SyncInstWorklist.push_back(&I); 3032bdd1243dSDimitry Andric if (CalleeED.IsReachedFromAlignedBarrierOnly) 3033bdd1243dSDimitry Andric mergeInPredecessorBarriersAndAssumptions(A, ED, CalleeED); 3034bdd1243dSDimitry Andric continue; 3035bdd1243dSDimitry Andric } 3036bdd1243dSDimitry Andric } 3037*1ac55f4cSDimitry Andric if (!IsNoSync) 3038bdd1243dSDimitry Andric ED.IsReachedFromAlignedBarrierOnly = 3039*1ac55f4cSDimitry Andric CallED.IsReachedFromAlignedBarrierOnly = false; 3040bdd1243dSDimitry Andric AlignedBarrierLastInBlock &= ED.IsReachedFromAlignedBarrierOnly; 3041bdd1243dSDimitry Andric ED.EncounteredNonLocalSideEffect |= !CB->doesNotAccessMemory(); 3042bdd1243dSDimitry Andric if (!IsNoSync) 3043bdd1243dSDimitry Andric SyncInstWorklist.push_back(&I); 3044bdd1243dSDimitry Andric } 3045bdd1243dSDimitry Andric 3046bdd1243dSDimitry Andric if (!I.mayHaveSideEffects() && !I.mayReadFromMemory()) 3047bdd1243dSDimitry Andric continue; 3048bdd1243dSDimitry Andric 3049bdd1243dSDimitry Andric // If we have a callee we try to use fine-grained information to 3050bdd1243dSDimitry Andric // determine local side-effects. 3051bdd1243dSDimitry Andric if (CB) { 3052bdd1243dSDimitry Andric const auto &MemAA = A.getAAFor<AAMemoryLocation>( 3053bdd1243dSDimitry Andric *this, IRPosition::callsite_function(*CB), DepClassTy::OPTIONAL); 3054bdd1243dSDimitry Andric 3055bdd1243dSDimitry Andric auto AccessPred = [&](const Instruction *I, const Value *Ptr, 3056bdd1243dSDimitry Andric AAMemoryLocation::AccessKind, 3057bdd1243dSDimitry Andric AAMemoryLocation::MemoryLocationsKind) { 3058bdd1243dSDimitry Andric return !AA::isPotentiallyAffectedByBarrier(A, {Ptr}, *this, I); 3059bdd1243dSDimitry Andric }; 3060bdd1243dSDimitry Andric if (MemAA.getState().isValidState() && 3061bdd1243dSDimitry Andric MemAA.checkForAllAccessesToMemoryKind( 3062bdd1243dSDimitry Andric AccessPred, AAMemoryLocation::ALL_LOCATIONS)) 3063bdd1243dSDimitry Andric continue; 3064bdd1243dSDimitry Andric } 3065bdd1243dSDimitry Andric 3066bdd1243dSDimitry Andric if (!I.mayHaveSideEffects() && OMPInfoCache.isOnlyUsedByAssume(I)) 3067bdd1243dSDimitry Andric continue; 3068bdd1243dSDimitry Andric 3069bdd1243dSDimitry Andric if (auto *LI = dyn_cast<LoadInst>(&I)) 3070bdd1243dSDimitry Andric if (LI->hasMetadata(LLVMContext::MD_invariant_load)) 3071bdd1243dSDimitry Andric continue; 3072bdd1243dSDimitry Andric 3073bdd1243dSDimitry Andric if (!ED.EncounteredNonLocalSideEffect && 3074bdd1243dSDimitry Andric AA::isPotentiallyAffectedByBarrier(A, I, *this)) 3075bdd1243dSDimitry Andric ED.EncounteredNonLocalSideEffect = true; 3076bdd1243dSDimitry Andric } 3077bdd1243dSDimitry Andric 3078bdd1243dSDimitry Andric if (!isa<UnreachableInst>(BB.getTerminator()) && 3079bdd1243dSDimitry Andric !BB.getTerminator()->getNumSuccessors()) { 3080bdd1243dSDimitry Andric 3081bdd1243dSDimitry Andric auto &FnED = BEDMap[nullptr]; 3082bdd1243dSDimitry Andric mergeInPredecessor(A, FnED, ED); 3083bdd1243dSDimitry Andric 3084bdd1243dSDimitry Andric if (IsKernel) 3085bdd1243dSDimitry Andric HandleAlignedBarrier(nullptr, ED); 3086bdd1243dSDimitry Andric } 3087bdd1243dSDimitry Andric 3088bdd1243dSDimitry Andric ExecutionDomainTy &StoredED = BEDMap[&BB]; 3089bdd1243dSDimitry Andric ED.IsReachingAlignedBarrierOnly = StoredED.IsReachingAlignedBarrierOnly; 3090bdd1243dSDimitry Andric 3091bdd1243dSDimitry Andric // Check if we computed anything different as part of the forward 3092bdd1243dSDimitry Andric // traversal. We do not take assumptions and aligned barriers into account 3093bdd1243dSDimitry Andric // as they do not influence the state we iterate. Backward traversal values 3094bdd1243dSDimitry Andric // are handled later on. 3095bdd1243dSDimitry Andric if (ED.IsExecutedByInitialThreadOnly != 3096bdd1243dSDimitry Andric StoredED.IsExecutedByInitialThreadOnly || 3097bdd1243dSDimitry Andric ED.IsReachedFromAlignedBarrierOnly != 3098bdd1243dSDimitry Andric StoredED.IsReachedFromAlignedBarrierOnly || 3099bdd1243dSDimitry Andric ED.EncounteredNonLocalSideEffect != 3100bdd1243dSDimitry Andric StoredED.EncounteredNonLocalSideEffect) 3101bdd1243dSDimitry Andric Changed = true; 3102bdd1243dSDimitry Andric 3103bdd1243dSDimitry Andric // Update the state with the new value. 3104bdd1243dSDimitry Andric StoredED = std::move(ED); 3105bdd1243dSDimitry Andric } 3106bdd1243dSDimitry Andric 3107bdd1243dSDimitry Andric // Propagate (non-aligned) sync instruction effects backwards until the 3108bdd1243dSDimitry Andric // entry is hit or an aligned barrier. 3109bdd1243dSDimitry Andric SmallSetVector<BasicBlock *, 16> Visited; 3110bdd1243dSDimitry Andric while (!SyncInstWorklist.empty()) { 3111bdd1243dSDimitry Andric Instruction *SyncInst = SyncInstWorklist.pop_back_val(); 3112bdd1243dSDimitry Andric Instruction *CurInst = SyncInst; 3113bdd1243dSDimitry Andric bool HitAlignedBarrier = false; 3114bdd1243dSDimitry Andric while ((CurInst = CurInst->getPrevNode())) { 3115bdd1243dSDimitry Andric auto *CB = dyn_cast<CallBase>(CurInst); 3116bdd1243dSDimitry Andric if (!CB) 3117bdd1243dSDimitry Andric continue; 3118bdd1243dSDimitry Andric auto &CallED = CEDMap[CB]; 3119bdd1243dSDimitry Andric if (SetAndRecord(CallED.IsReachingAlignedBarrierOnly, false)) 3120bdd1243dSDimitry Andric Changed = true; 3121bdd1243dSDimitry Andric HitAlignedBarrier = AlignedBarriers.count(CB); 3122bdd1243dSDimitry Andric if (HitAlignedBarrier) 3123bdd1243dSDimitry Andric break; 3124bdd1243dSDimitry Andric } 3125bdd1243dSDimitry Andric if (HitAlignedBarrier) 3126bdd1243dSDimitry Andric continue; 3127bdd1243dSDimitry Andric BasicBlock *SyncBB = SyncInst->getParent(); 3128bdd1243dSDimitry Andric for (auto *PredBB : predecessors(SyncBB)) { 3129bdd1243dSDimitry Andric if (LivenessAA.isEdgeDead(PredBB, SyncBB)) 3130bdd1243dSDimitry Andric continue; 3131bdd1243dSDimitry Andric if (!Visited.insert(PredBB)) 3132bdd1243dSDimitry Andric continue; 3133bdd1243dSDimitry Andric SyncInstWorklist.push_back(PredBB->getTerminator()); 3134bdd1243dSDimitry Andric auto &PredED = BEDMap[PredBB]; 3135bdd1243dSDimitry Andric if (SetAndRecord(PredED.IsReachingAlignedBarrierOnly, false)) 3136bdd1243dSDimitry Andric Changed = true; 3137bdd1243dSDimitry Andric } 3138bdd1243dSDimitry Andric if (SyncBB != &EntryBB) 3139bdd1243dSDimitry Andric continue; 3140bdd1243dSDimitry Andric auto &FnED = BEDMap[nullptr]; 3141bdd1243dSDimitry Andric if (SetAndRecord(FnED.IsReachingAlignedBarrierOnly, false)) 3142bdd1243dSDimitry Andric Changed = true; 3143bdd1243dSDimitry Andric } 3144bdd1243dSDimitry Andric 3145bdd1243dSDimitry Andric return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED; 3146fe6060f1SDimitry Andric } 3147fe6060f1SDimitry Andric 3148fe6060f1SDimitry Andric /// Try to replace memory allocation calls called by a single thread with a 3149fe6060f1SDimitry Andric /// static buffer of shared memory. 3150fe6060f1SDimitry Andric struct AAHeapToShared : public StateWrapper<BooleanState, AbstractAttribute> { 3151fe6060f1SDimitry Andric using Base = StateWrapper<BooleanState, AbstractAttribute>; 3152fe6060f1SDimitry Andric AAHeapToShared(const IRPosition &IRP, Attributor &A) : Base(IRP) {} 3153fe6060f1SDimitry Andric 3154fe6060f1SDimitry Andric /// Create an abstract attribute view for the position \p IRP. 3155fe6060f1SDimitry Andric static AAHeapToShared &createForPosition(const IRPosition &IRP, 3156fe6060f1SDimitry Andric Attributor &A); 3157fe6060f1SDimitry Andric 3158fe6060f1SDimitry Andric /// Returns true if HeapToShared conversion is assumed to be possible. 3159fe6060f1SDimitry Andric virtual bool isAssumedHeapToShared(CallBase &CB) const = 0; 3160fe6060f1SDimitry Andric 3161fe6060f1SDimitry Andric /// Returns true if HeapToShared conversion is assumed and the CB is a 3162fe6060f1SDimitry Andric /// callsite to a free operation to be removed. 3163fe6060f1SDimitry Andric virtual bool isAssumedHeapToSharedRemovedFree(CallBase &CB) const = 0; 3164fe6060f1SDimitry Andric 3165fe6060f1SDimitry Andric /// See AbstractAttribute::getName(). 3166fe6060f1SDimitry Andric const std::string getName() const override { return "AAHeapToShared"; } 3167fe6060f1SDimitry Andric 3168fe6060f1SDimitry Andric /// See AbstractAttribute::getIdAddr(). 3169fe6060f1SDimitry Andric const char *getIdAddr() const override { return &ID; } 3170fe6060f1SDimitry Andric 3171fe6060f1SDimitry Andric /// This function should return true if the type of the \p AA is 3172fe6060f1SDimitry Andric /// AAHeapToShared. 3173fe6060f1SDimitry Andric static bool classof(const AbstractAttribute *AA) { 3174fe6060f1SDimitry Andric return (AA->getIdAddr() == &ID); 3175fe6060f1SDimitry Andric } 3176fe6060f1SDimitry Andric 3177fe6060f1SDimitry Andric /// Unique ID (due to the unique address) 3178fe6060f1SDimitry Andric static const char ID; 3179fe6060f1SDimitry Andric }; 3180fe6060f1SDimitry Andric 3181fe6060f1SDimitry Andric struct AAHeapToSharedFunction : public AAHeapToShared { 3182fe6060f1SDimitry Andric AAHeapToSharedFunction(const IRPosition &IRP, Attributor &A) 3183fe6060f1SDimitry Andric : AAHeapToShared(IRP, A) {} 3184fe6060f1SDimitry Andric 3185fe6060f1SDimitry Andric const std::string getAsStr() const override { 3186fe6060f1SDimitry Andric return "[AAHeapToShared] " + std::to_string(MallocCalls.size()) + 3187fe6060f1SDimitry Andric " malloc calls eligible."; 3188fe6060f1SDimitry Andric } 3189fe6060f1SDimitry Andric 3190fe6060f1SDimitry Andric /// See AbstractAttribute::trackStatistics(). 3191fe6060f1SDimitry Andric void trackStatistics() const override {} 3192fe6060f1SDimitry Andric 3193fe6060f1SDimitry Andric /// This functions finds free calls that will be removed by the 3194fe6060f1SDimitry Andric /// HeapToShared transformation. 3195fe6060f1SDimitry Andric void findPotentialRemovedFreeCalls(Attributor &A) { 3196fe6060f1SDimitry Andric auto &OMPInfoCache = static_cast<OMPInformationCache &>(A.getInfoCache()); 3197fe6060f1SDimitry Andric auto &FreeRFI = OMPInfoCache.RFIs[OMPRTL___kmpc_free_shared]; 3198fe6060f1SDimitry Andric 3199fe6060f1SDimitry Andric PotentialRemovedFreeCalls.clear(); 3200fe6060f1SDimitry Andric // Update free call users of found malloc calls. 3201fe6060f1SDimitry Andric for (CallBase *CB : MallocCalls) { 3202fe6060f1SDimitry Andric SmallVector<CallBase *, 4> FreeCalls; 3203fe6060f1SDimitry Andric for (auto *U : CB->users()) { 3204fe6060f1SDimitry Andric CallBase *C = dyn_cast<CallBase>(U); 3205fe6060f1SDimitry Andric if (C && C->getCalledFunction() == FreeRFI.Declaration) 3206fe6060f1SDimitry Andric FreeCalls.push_back(C); 3207fe6060f1SDimitry Andric } 3208fe6060f1SDimitry Andric 3209fe6060f1SDimitry Andric if (FreeCalls.size() != 1) 3210fe6060f1SDimitry Andric continue; 3211fe6060f1SDimitry Andric 3212fe6060f1SDimitry Andric PotentialRemovedFreeCalls.insert(FreeCalls.front()); 3213fe6060f1SDimitry Andric } 3214fe6060f1SDimitry Andric } 3215fe6060f1SDimitry Andric 3216fe6060f1SDimitry Andric void initialize(Attributor &A) override { 321781ad6265SDimitry Andric if (DisableOpenMPOptDeglobalization) { 321881ad6265SDimitry Andric indicatePessimisticFixpoint(); 321981ad6265SDimitry Andric return; 322081ad6265SDimitry Andric } 322181ad6265SDimitry Andric 3222fe6060f1SDimitry Andric auto &OMPInfoCache = static_cast<OMPInformationCache &>(A.getInfoCache()); 3223fe6060f1SDimitry Andric auto &RFI = OMPInfoCache.RFIs[OMPRTL___kmpc_alloc_shared]; 3224bdd1243dSDimitry Andric if (!RFI.Declaration) 3225bdd1243dSDimitry Andric return; 3226fe6060f1SDimitry Andric 322781ad6265SDimitry Andric Attributor::SimplifictionCallbackTy SCB = 322881ad6265SDimitry Andric [](const IRPosition &, const AbstractAttribute *, 3229bdd1243dSDimitry Andric bool &) -> std::optional<Value *> { return nullptr; }; 3230bdd1243dSDimitry Andric 3231bdd1243dSDimitry Andric Function *F = getAnchorScope(); 3232fe6060f1SDimitry Andric for (User *U : RFI.Declaration->users()) 323381ad6265SDimitry Andric if (CallBase *CB = dyn_cast<CallBase>(U)) { 3234bdd1243dSDimitry Andric if (CB->getFunction() != F) 3235bdd1243dSDimitry Andric continue; 3236fe6060f1SDimitry Andric MallocCalls.insert(CB); 323781ad6265SDimitry Andric A.registerSimplificationCallback(IRPosition::callsite_returned(*CB), 323881ad6265SDimitry Andric SCB); 323981ad6265SDimitry Andric } 3240fe6060f1SDimitry Andric 3241fe6060f1SDimitry Andric findPotentialRemovedFreeCalls(A); 3242fe6060f1SDimitry Andric } 3243fe6060f1SDimitry Andric 3244fe6060f1SDimitry Andric bool isAssumedHeapToShared(CallBase &CB) const override { 3245fe6060f1SDimitry Andric return isValidState() && MallocCalls.count(&CB); 3246fe6060f1SDimitry Andric } 3247fe6060f1SDimitry Andric 3248fe6060f1SDimitry Andric bool isAssumedHeapToSharedRemovedFree(CallBase &CB) const override { 3249fe6060f1SDimitry Andric return isValidState() && PotentialRemovedFreeCalls.count(&CB); 3250fe6060f1SDimitry Andric } 3251fe6060f1SDimitry Andric 3252fe6060f1SDimitry Andric ChangeStatus manifest(Attributor &A) override { 3253fe6060f1SDimitry Andric if (MallocCalls.empty()) 3254fe6060f1SDimitry Andric return ChangeStatus::UNCHANGED; 3255fe6060f1SDimitry Andric 3256fe6060f1SDimitry Andric auto &OMPInfoCache = static_cast<OMPInformationCache &>(A.getInfoCache()); 3257fe6060f1SDimitry Andric auto &FreeCall = OMPInfoCache.RFIs[OMPRTL___kmpc_free_shared]; 3258fe6060f1SDimitry Andric 3259fe6060f1SDimitry Andric Function *F = getAnchorScope(); 3260fe6060f1SDimitry Andric auto *HS = A.lookupAAFor<AAHeapToStack>(IRPosition::function(*F), this, 3261fe6060f1SDimitry Andric DepClassTy::OPTIONAL); 3262fe6060f1SDimitry Andric 3263fe6060f1SDimitry Andric ChangeStatus Changed = ChangeStatus::UNCHANGED; 3264fe6060f1SDimitry Andric for (CallBase *CB : MallocCalls) { 3265fe6060f1SDimitry Andric // Skip replacing this if HeapToStack has already claimed it. 3266fe6060f1SDimitry Andric if (HS && HS->isAssumedHeapToStack(*CB)) 3267fe6060f1SDimitry Andric continue; 3268fe6060f1SDimitry Andric 3269fe6060f1SDimitry Andric // Find the unique free call to remove it. 3270fe6060f1SDimitry Andric SmallVector<CallBase *, 4> FreeCalls; 3271fe6060f1SDimitry Andric for (auto *U : CB->users()) { 3272fe6060f1SDimitry Andric CallBase *C = dyn_cast<CallBase>(U); 3273fe6060f1SDimitry Andric if (C && C->getCalledFunction() == FreeCall.Declaration) 3274fe6060f1SDimitry Andric FreeCalls.push_back(C); 3275fe6060f1SDimitry Andric } 3276fe6060f1SDimitry Andric if (FreeCalls.size() != 1) 3277fe6060f1SDimitry Andric continue; 3278fe6060f1SDimitry Andric 327904eeddc0SDimitry Andric auto *AllocSize = cast<ConstantInt>(CB->getArgOperand(0)); 3280fe6060f1SDimitry Andric 328181ad6265SDimitry Andric if (AllocSize->getZExtValue() + SharedMemoryUsed > SharedMemoryLimit) { 328281ad6265SDimitry Andric LLVM_DEBUG(dbgs() << TAG << "Cannot replace call " << *CB 328381ad6265SDimitry Andric << " with shared memory." 328481ad6265SDimitry Andric << " Shared memory usage is limited to " 328581ad6265SDimitry Andric << SharedMemoryLimit << " bytes\n"); 328681ad6265SDimitry Andric continue; 328781ad6265SDimitry Andric } 328881ad6265SDimitry Andric 3289349cc55cSDimitry Andric LLVM_DEBUG(dbgs() << TAG << "Replace globalization call " << *CB 3290349cc55cSDimitry Andric << " with " << AllocSize->getZExtValue() 3291fe6060f1SDimitry Andric << " bytes of shared memory\n"); 3292fe6060f1SDimitry Andric 3293fe6060f1SDimitry Andric // Create a new shared memory buffer of the same size as the allocation 3294fe6060f1SDimitry Andric // and replace all the uses of the original allocation with it. 3295fe6060f1SDimitry Andric Module *M = CB->getModule(); 3296fe6060f1SDimitry Andric Type *Int8Ty = Type::getInt8Ty(M->getContext()); 3297fe6060f1SDimitry Andric Type *Int8ArrTy = ArrayType::get(Int8Ty, AllocSize->getZExtValue()); 3298fe6060f1SDimitry Andric auto *SharedMem = new GlobalVariable( 3299fe6060f1SDimitry Andric *M, Int8ArrTy, /* IsConstant */ false, GlobalValue::InternalLinkage, 330004eeddc0SDimitry Andric UndefValue::get(Int8ArrTy), CB->getName() + "_shared", nullptr, 3301fe6060f1SDimitry Andric GlobalValue::NotThreadLocal, 3302fe6060f1SDimitry Andric static_cast<unsigned>(AddressSpace::Shared)); 3303fe6060f1SDimitry Andric auto *NewBuffer = 3304fe6060f1SDimitry Andric ConstantExpr::getPointerCast(SharedMem, Int8Ty->getPointerTo()); 3305fe6060f1SDimitry Andric 3306fe6060f1SDimitry Andric auto Remark = [&](OptimizationRemark OR) { 3307fe6060f1SDimitry Andric return OR << "Replaced globalized variable with " 3308fe6060f1SDimitry Andric << ore::NV("SharedMemory", AllocSize->getZExtValue()) 3309fe6060f1SDimitry Andric << ((AllocSize->getZExtValue() != 1) ? " bytes " : " byte ") 3310fe6060f1SDimitry Andric << "of shared memory."; 3311fe6060f1SDimitry Andric }; 3312fe6060f1SDimitry Andric A.emitRemark<OptimizationRemark>(CB, "OMP111", Remark); 3313fe6060f1SDimitry Andric 331404eeddc0SDimitry Andric MaybeAlign Alignment = CB->getRetAlign(); 331504eeddc0SDimitry Andric assert(Alignment && 331604eeddc0SDimitry Andric "HeapToShared on allocation without alignment attribute"); 331704eeddc0SDimitry Andric SharedMem->setAlignment(MaybeAlign(Alignment)); 3318fe6060f1SDimitry Andric 331981ad6265SDimitry Andric A.changeAfterManifest(IRPosition::callsite_returned(*CB), *NewBuffer); 3320fe6060f1SDimitry Andric A.deleteAfterManifest(*CB); 3321fe6060f1SDimitry Andric A.deleteAfterManifest(*FreeCalls.front()); 3322fe6060f1SDimitry Andric 332381ad6265SDimitry Andric SharedMemoryUsed += AllocSize->getZExtValue(); 332481ad6265SDimitry Andric NumBytesMovedToSharedMemory = SharedMemoryUsed; 3325fe6060f1SDimitry Andric Changed = ChangeStatus::CHANGED; 3326fe6060f1SDimitry Andric } 3327fe6060f1SDimitry Andric 3328fe6060f1SDimitry Andric return Changed; 3329fe6060f1SDimitry Andric } 3330fe6060f1SDimitry Andric 3331fe6060f1SDimitry Andric ChangeStatus updateImpl(Attributor &A) override { 3332bdd1243dSDimitry Andric if (MallocCalls.empty()) 3333bdd1243dSDimitry Andric return indicatePessimisticFixpoint(); 3334fe6060f1SDimitry Andric auto &OMPInfoCache = static_cast<OMPInformationCache &>(A.getInfoCache()); 3335fe6060f1SDimitry Andric auto &RFI = OMPInfoCache.RFIs[OMPRTL___kmpc_alloc_shared]; 3336bdd1243dSDimitry Andric if (!RFI.Declaration) 3337bdd1243dSDimitry Andric return ChangeStatus::UNCHANGED; 3338bdd1243dSDimitry Andric 3339fe6060f1SDimitry Andric Function *F = getAnchorScope(); 3340fe6060f1SDimitry Andric 3341fe6060f1SDimitry Andric auto NumMallocCalls = MallocCalls.size(); 3342fe6060f1SDimitry Andric 3343fe6060f1SDimitry Andric // Only consider malloc calls executed by a single thread with a constant. 3344fe6060f1SDimitry Andric for (User *U : RFI.Declaration->users()) { 3345bdd1243dSDimitry Andric if (CallBase *CB = dyn_cast<CallBase>(U)) { 3346bdd1243dSDimitry Andric if (CB->getCaller() != F) 3347bdd1243dSDimitry Andric continue; 3348bdd1243dSDimitry Andric if (!MallocCalls.count(CB)) 3349bdd1243dSDimitry Andric continue; 3350bdd1243dSDimitry Andric if (!isa<ConstantInt>(CB->getArgOperand(0))) { 3351bdd1243dSDimitry Andric MallocCalls.remove(CB); 3352bdd1243dSDimitry Andric continue; 3353bdd1243dSDimitry Andric } 3354fe6060f1SDimitry Andric const auto &ED = A.getAAFor<AAExecutionDomain>( 3355fe6060f1SDimitry Andric *this, IRPosition::function(*F), DepClassTy::REQUIRED); 3356bdd1243dSDimitry Andric if (!ED.isExecutedByInitialThreadOnly(*CB)) 335704eeddc0SDimitry Andric MallocCalls.remove(CB); 3358fe6060f1SDimitry Andric } 3359bdd1243dSDimitry Andric } 3360fe6060f1SDimitry Andric 3361fe6060f1SDimitry Andric findPotentialRemovedFreeCalls(A); 3362fe6060f1SDimitry Andric 3363fe6060f1SDimitry Andric if (NumMallocCalls != MallocCalls.size()) 3364fe6060f1SDimitry Andric return ChangeStatus::CHANGED; 3365fe6060f1SDimitry Andric 3366fe6060f1SDimitry Andric return ChangeStatus::UNCHANGED; 3367fe6060f1SDimitry Andric } 3368fe6060f1SDimitry Andric 3369fe6060f1SDimitry Andric /// Collection of all malloc calls in a function. 337004eeddc0SDimitry Andric SmallSetVector<CallBase *, 4> MallocCalls; 3371fe6060f1SDimitry Andric /// Collection of potentially removed free calls in a function. 3372fe6060f1SDimitry Andric SmallPtrSet<CallBase *, 4> PotentialRemovedFreeCalls; 337381ad6265SDimitry Andric /// The total amount of shared memory that has been used for HeapToShared. 337481ad6265SDimitry Andric unsigned SharedMemoryUsed = 0; 3375fe6060f1SDimitry Andric }; 3376fe6060f1SDimitry Andric 3377fe6060f1SDimitry Andric struct AAKernelInfo : public StateWrapper<KernelInfoState, AbstractAttribute> { 3378fe6060f1SDimitry Andric using Base = StateWrapper<KernelInfoState, AbstractAttribute>; 3379fe6060f1SDimitry Andric AAKernelInfo(const IRPosition &IRP, Attributor &A) : Base(IRP) {} 3380fe6060f1SDimitry Andric 3381fe6060f1SDimitry Andric /// Statistics are tracked as part of manifest for now. 3382fe6060f1SDimitry Andric void trackStatistics() const override {} 3383fe6060f1SDimitry Andric 3384fe6060f1SDimitry Andric /// See AbstractAttribute::getAsStr() 3385fe6060f1SDimitry Andric const std::string getAsStr() const override { 3386fe6060f1SDimitry Andric if (!isValidState()) 3387fe6060f1SDimitry Andric return "<invalid>"; 3388fe6060f1SDimitry Andric return std::string(SPMDCompatibilityTracker.isAssumed() ? "SPMD" 3389fe6060f1SDimitry Andric : "generic") + 3390fe6060f1SDimitry Andric std::string(SPMDCompatibilityTracker.isAtFixpoint() ? " [FIX]" 3391fe6060f1SDimitry Andric : "") + 3392fe6060f1SDimitry Andric std::string(" #PRs: ") + 3393349cc55cSDimitry Andric (ReachedKnownParallelRegions.isValidState() 3394349cc55cSDimitry Andric ? std::to_string(ReachedKnownParallelRegions.size()) 3395349cc55cSDimitry Andric : "<invalid>") + 3396fe6060f1SDimitry Andric ", #Unknown PRs: " + 3397349cc55cSDimitry Andric (ReachedUnknownParallelRegions.isValidState() 3398349cc55cSDimitry Andric ? std::to_string(ReachedUnknownParallelRegions.size()) 3399349cc55cSDimitry Andric : "<invalid>") + 3400349cc55cSDimitry Andric ", #Reaching Kernels: " + 3401349cc55cSDimitry Andric (ReachingKernelEntries.isValidState() 3402349cc55cSDimitry Andric ? std::to_string(ReachingKernelEntries.size()) 3403bdd1243dSDimitry Andric : "<invalid>") + 3404bdd1243dSDimitry Andric ", #ParLevels: " + 3405bdd1243dSDimitry Andric (ParallelLevels.isValidState() 3406bdd1243dSDimitry Andric ? std::to_string(ParallelLevels.size()) 3407349cc55cSDimitry Andric : "<invalid>"); 3408fe6060f1SDimitry Andric } 3409fe6060f1SDimitry Andric 3410fe6060f1SDimitry Andric /// Create an abstract attribute biew for the position \p IRP. 3411fe6060f1SDimitry Andric static AAKernelInfo &createForPosition(const IRPosition &IRP, Attributor &A); 3412fe6060f1SDimitry Andric 3413fe6060f1SDimitry Andric /// See AbstractAttribute::getName() 3414fe6060f1SDimitry Andric const std::string getName() const override { return "AAKernelInfo"; } 3415fe6060f1SDimitry Andric 3416fe6060f1SDimitry Andric /// See AbstractAttribute::getIdAddr() 3417fe6060f1SDimitry Andric const char *getIdAddr() const override { return &ID; } 3418fe6060f1SDimitry Andric 3419fe6060f1SDimitry Andric /// This function should return true if the type of the \p AA is AAKernelInfo 3420fe6060f1SDimitry Andric static bool classof(const AbstractAttribute *AA) { 3421fe6060f1SDimitry Andric return (AA->getIdAddr() == &ID); 3422fe6060f1SDimitry Andric } 3423fe6060f1SDimitry Andric 3424fe6060f1SDimitry Andric static const char ID; 3425fe6060f1SDimitry Andric }; 3426fe6060f1SDimitry Andric 3427fe6060f1SDimitry Andric /// The function kernel info abstract attribute, basically, what can we say 3428fe6060f1SDimitry Andric /// about a function with regards to the KernelInfoState. 3429fe6060f1SDimitry Andric struct AAKernelInfoFunction : AAKernelInfo { 3430fe6060f1SDimitry Andric AAKernelInfoFunction(const IRPosition &IRP, Attributor &A) 3431fe6060f1SDimitry Andric : AAKernelInfo(IRP, A) {} 3432fe6060f1SDimitry Andric 3433349cc55cSDimitry Andric SmallPtrSet<Instruction *, 4> GuardedInstructions; 3434349cc55cSDimitry Andric 3435349cc55cSDimitry Andric SmallPtrSetImpl<Instruction *> &getGuardedInstructions() { 3436349cc55cSDimitry Andric return GuardedInstructions; 3437349cc55cSDimitry Andric } 3438349cc55cSDimitry Andric 3439fe6060f1SDimitry Andric /// See AbstractAttribute::initialize(...). 3440fe6060f1SDimitry Andric void initialize(Attributor &A) override { 3441fe6060f1SDimitry Andric // This is a high-level transform that might change the constant arguments 3442fe6060f1SDimitry Andric // of the init and dinit calls. We need to tell the Attributor about this 3443fe6060f1SDimitry Andric // to avoid other parts using the current constant value for simpliication. 3444fe6060f1SDimitry Andric auto &OMPInfoCache = static_cast<OMPInformationCache &>(A.getInfoCache()); 3445fe6060f1SDimitry Andric 3446fe6060f1SDimitry Andric Function *Fn = getAnchorScope(); 3447fe6060f1SDimitry Andric 3448fe6060f1SDimitry Andric OMPInformationCache::RuntimeFunctionInfo &InitRFI = 3449fe6060f1SDimitry Andric OMPInfoCache.RFIs[OMPRTL___kmpc_target_init]; 3450fe6060f1SDimitry Andric OMPInformationCache::RuntimeFunctionInfo &DeinitRFI = 3451fe6060f1SDimitry Andric OMPInfoCache.RFIs[OMPRTL___kmpc_target_deinit]; 3452fe6060f1SDimitry Andric 3453fe6060f1SDimitry Andric // For kernels we perform more initialization work, first we find the init 3454fe6060f1SDimitry Andric // and deinit calls. 3455fe6060f1SDimitry Andric auto StoreCallBase = [](Use &U, 3456fe6060f1SDimitry Andric OMPInformationCache::RuntimeFunctionInfo &RFI, 3457fe6060f1SDimitry Andric CallBase *&Storage) { 3458fe6060f1SDimitry Andric CallBase *CB = OpenMPOpt::getCallIfRegularCall(U, &RFI); 3459fe6060f1SDimitry Andric assert(CB && 3460fe6060f1SDimitry Andric "Unexpected use of __kmpc_target_init or __kmpc_target_deinit!"); 3461fe6060f1SDimitry Andric assert(!Storage && 3462fe6060f1SDimitry Andric "Multiple uses of __kmpc_target_init or __kmpc_target_deinit!"); 3463fe6060f1SDimitry Andric Storage = CB; 3464fe6060f1SDimitry Andric return false; 3465fe6060f1SDimitry Andric }; 3466fe6060f1SDimitry Andric InitRFI.foreachUse( 3467fe6060f1SDimitry Andric [&](Use &U, Function &) { 3468fe6060f1SDimitry Andric StoreCallBase(U, InitRFI, KernelInitCB); 3469fe6060f1SDimitry Andric return false; 3470fe6060f1SDimitry Andric }, 3471fe6060f1SDimitry Andric Fn); 3472fe6060f1SDimitry Andric DeinitRFI.foreachUse( 3473fe6060f1SDimitry Andric [&](Use &U, Function &) { 3474fe6060f1SDimitry Andric StoreCallBase(U, DeinitRFI, KernelDeinitCB); 3475fe6060f1SDimitry Andric return false; 3476fe6060f1SDimitry Andric }, 3477fe6060f1SDimitry Andric Fn); 3478fe6060f1SDimitry Andric 3479349cc55cSDimitry Andric // Ignore kernels without initializers such as global constructors. 348081ad6265SDimitry Andric if (!KernelInitCB || !KernelDeinitCB) 3481349cc55cSDimitry Andric return; 348281ad6265SDimitry Andric 348381ad6265SDimitry Andric // Add itself to the reaching kernel and set IsKernelEntry. 348481ad6265SDimitry Andric ReachingKernelEntries.insert(Fn); 348581ad6265SDimitry Andric IsKernelEntry = true; 3486fe6060f1SDimitry Andric 3487fe6060f1SDimitry Andric // For kernels we might need to initialize/finalize the IsSPMD state and 3488fe6060f1SDimitry Andric // we need to register a simplification callback so that the Attributor 3489fe6060f1SDimitry Andric // knows the constant arguments to __kmpc_target_init and 3490fe6060f1SDimitry Andric // __kmpc_target_deinit might actually change. 3491fe6060f1SDimitry Andric 3492fe6060f1SDimitry Andric Attributor::SimplifictionCallbackTy StateMachineSimplifyCB = 3493fe6060f1SDimitry Andric [&](const IRPosition &IRP, const AbstractAttribute *AA, 3494bdd1243dSDimitry Andric bool &UsedAssumedInformation) -> std::optional<Value *> { 3495fe6060f1SDimitry Andric // IRP represents the "use generic state machine" argument of an 3496fe6060f1SDimitry Andric // __kmpc_target_init call. We will answer this one with the internal 3497fe6060f1SDimitry Andric // state. As long as we are not in an invalid state, we will create a 3498fe6060f1SDimitry Andric // custom state machine so the value should be a `i1 false`. If we are 3499fe6060f1SDimitry Andric // in an invalid state, we won't change the value that is in the IR. 3500349cc55cSDimitry Andric if (!ReachedKnownParallelRegions.isValidState()) 3501349cc55cSDimitry Andric return nullptr; 3502349cc55cSDimitry Andric // If we have disabled state machine rewrites, don't make a custom one. 3503349cc55cSDimitry Andric if (DisableOpenMPOptStateMachineRewrite) 3504fe6060f1SDimitry Andric return nullptr; 3505fe6060f1SDimitry Andric if (AA) 3506fe6060f1SDimitry Andric A.recordDependence(*this, *AA, DepClassTy::OPTIONAL); 3507fe6060f1SDimitry Andric UsedAssumedInformation = !isAtFixpoint(); 3508fe6060f1SDimitry Andric auto *FalseVal = 350904eeddc0SDimitry Andric ConstantInt::getBool(IRP.getAnchorValue().getContext(), false); 3510fe6060f1SDimitry Andric return FalseVal; 3511fe6060f1SDimitry Andric }; 3512fe6060f1SDimitry Andric 3513349cc55cSDimitry Andric Attributor::SimplifictionCallbackTy ModeSimplifyCB = 3514fe6060f1SDimitry Andric [&](const IRPosition &IRP, const AbstractAttribute *AA, 3515bdd1243dSDimitry Andric bool &UsedAssumedInformation) -> std::optional<Value *> { 3516fe6060f1SDimitry Andric // IRP represents the "SPMDCompatibilityTracker" argument of an 3517fe6060f1SDimitry Andric // __kmpc_target_init or 3518fe6060f1SDimitry Andric // __kmpc_target_deinit call. We will answer this one with the internal 3519fe6060f1SDimitry Andric // state. 3520fe6060f1SDimitry Andric if (!SPMDCompatibilityTracker.isValidState()) 3521fe6060f1SDimitry Andric return nullptr; 3522fe6060f1SDimitry Andric if (!SPMDCompatibilityTracker.isAtFixpoint()) { 3523fe6060f1SDimitry Andric if (AA) 3524fe6060f1SDimitry Andric A.recordDependence(*this, *AA, DepClassTy::OPTIONAL); 3525fe6060f1SDimitry Andric UsedAssumedInformation = true; 3526fe6060f1SDimitry Andric } else { 3527fe6060f1SDimitry Andric UsedAssumedInformation = false; 3528fe6060f1SDimitry Andric } 3529349cc55cSDimitry Andric auto *Val = ConstantInt::getSigned( 3530349cc55cSDimitry Andric IntegerType::getInt8Ty(IRP.getAnchorValue().getContext()), 3531349cc55cSDimitry Andric SPMDCompatibilityTracker.isAssumed() ? OMP_TGT_EXEC_MODE_SPMD 3532349cc55cSDimitry Andric : OMP_TGT_EXEC_MODE_GENERIC); 3533fe6060f1SDimitry Andric return Val; 3534fe6060f1SDimitry Andric }; 3535fe6060f1SDimitry Andric 3536349cc55cSDimitry Andric constexpr const int InitModeArgNo = 1; 3537349cc55cSDimitry Andric constexpr const int DeinitModeArgNo = 1; 3538fe6060f1SDimitry Andric constexpr const int InitUseStateMachineArgNo = 2; 3539fe6060f1SDimitry Andric A.registerSimplificationCallback( 3540fe6060f1SDimitry Andric IRPosition::callsite_argument(*KernelInitCB, InitUseStateMachineArgNo), 3541fe6060f1SDimitry Andric StateMachineSimplifyCB); 3542fe6060f1SDimitry Andric A.registerSimplificationCallback( 3543349cc55cSDimitry Andric IRPosition::callsite_argument(*KernelInitCB, InitModeArgNo), 3544349cc55cSDimitry Andric ModeSimplifyCB); 3545fe6060f1SDimitry Andric A.registerSimplificationCallback( 3546349cc55cSDimitry Andric IRPosition::callsite_argument(*KernelDeinitCB, DeinitModeArgNo), 3547349cc55cSDimitry Andric ModeSimplifyCB); 3548fe6060f1SDimitry Andric 3549fe6060f1SDimitry Andric // Check if we know we are in SPMD-mode already. 3550349cc55cSDimitry Andric ConstantInt *ModeArg = 3551349cc55cSDimitry Andric dyn_cast<ConstantInt>(KernelInitCB->getArgOperand(InitModeArgNo)); 3552349cc55cSDimitry Andric if (ModeArg && (ModeArg->getSExtValue() & OMP_TGT_EXEC_MODE_SPMD)) 3553fe6060f1SDimitry Andric SPMDCompatibilityTracker.indicateOptimisticFixpoint(); 3554349cc55cSDimitry Andric // This is a generic region but SPMDization is disabled so stop tracking. 3555349cc55cSDimitry Andric else if (DisableOpenMPOptSPMDization) 3556349cc55cSDimitry Andric SPMDCompatibilityTracker.indicatePessimisticFixpoint(); 3557bdd1243dSDimitry Andric 3558bdd1243dSDimitry Andric // Register virtual uses of functions we might need to preserve. 3559bdd1243dSDimitry Andric auto RegisterVirtualUse = [&](RuntimeFunction RFKind, 3560bdd1243dSDimitry Andric Attributor::VirtualUseCallbackTy &CB) { 3561bdd1243dSDimitry Andric if (!OMPInfoCache.RFIs[RFKind].Declaration) 3562bdd1243dSDimitry Andric return; 3563bdd1243dSDimitry Andric A.registerVirtualUseCallback(*OMPInfoCache.RFIs[RFKind].Declaration, CB); 3564bdd1243dSDimitry Andric }; 3565bdd1243dSDimitry Andric 3566bdd1243dSDimitry Andric // Add a dependence to ensure updates if the state changes. 3567bdd1243dSDimitry Andric auto AddDependence = [](Attributor &A, const AAKernelInfo *KI, 3568bdd1243dSDimitry Andric const AbstractAttribute *QueryingAA) { 3569bdd1243dSDimitry Andric if (QueryingAA) { 3570bdd1243dSDimitry Andric A.recordDependence(*KI, *QueryingAA, DepClassTy::OPTIONAL); 3571bdd1243dSDimitry Andric } 3572bdd1243dSDimitry Andric return true; 3573bdd1243dSDimitry Andric }; 3574bdd1243dSDimitry Andric 3575bdd1243dSDimitry Andric Attributor::VirtualUseCallbackTy CustomStateMachineUseCB = 3576bdd1243dSDimitry Andric [&](Attributor &A, const AbstractAttribute *QueryingAA) { 3577bdd1243dSDimitry Andric // Whenever we create a custom state machine we will insert calls to 3578bdd1243dSDimitry Andric // __kmpc_get_hardware_num_threads_in_block, 3579bdd1243dSDimitry Andric // __kmpc_get_warp_size, 3580bdd1243dSDimitry Andric // __kmpc_barrier_simple_generic, 3581bdd1243dSDimitry Andric // __kmpc_kernel_parallel, and 3582bdd1243dSDimitry Andric // __kmpc_kernel_end_parallel. 3583bdd1243dSDimitry Andric // Not needed if we are on track for SPMDzation. 3584bdd1243dSDimitry Andric if (SPMDCompatibilityTracker.isValidState()) 3585bdd1243dSDimitry Andric return AddDependence(A, this, QueryingAA); 3586bdd1243dSDimitry Andric // Not needed if we can't rewrite due to an invalid state. 3587bdd1243dSDimitry Andric if (!ReachedKnownParallelRegions.isValidState()) 3588bdd1243dSDimitry Andric return AddDependence(A, this, QueryingAA); 3589bdd1243dSDimitry Andric return false; 3590bdd1243dSDimitry Andric }; 3591bdd1243dSDimitry Andric 3592bdd1243dSDimitry Andric // Not needed if we are pre-runtime merge. 3593bdd1243dSDimitry Andric if (!KernelInitCB->getCalledFunction()->isDeclaration()) { 3594bdd1243dSDimitry Andric RegisterVirtualUse(OMPRTL___kmpc_get_hardware_num_threads_in_block, 3595bdd1243dSDimitry Andric CustomStateMachineUseCB); 3596bdd1243dSDimitry Andric RegisterVirtualUse(OMPRTL___kmpc_get_warp_size, CustomStateMachineUseCB); 3597bdd1243dSDimitry Andric RegisterVirtualUse(OMPRTL___kmpc_barrier_simple_generic, 3598bdd1243dSDimitry Andric CustomStateMachineUseCB); 3599bdd1243dSDimitry Andric RegisterVirtualUse(OMPRTL___kmpc_kernel_parallel, 3600bdd1243dSDimitry Andric CustomStateMachineUseCB); 3601bdd1243dSDimitry Andric RegisterVirtualUse(OMPRTL___kmpc_kernel_end_parallel, 3602bdd1243dSDimitry Andric CustomStateMachineUseCB); 3603bdd1243dSDimitry Andric } 3604bdd1243dSDimitry Andric 3605bdd1243dSDimitry Andric // If we do not perform SPMDzation we do not need the virtual uses below. 3606bdd1243dSDimitry Andric if (SPMDCompatibilityTracker.isAtFixpoint()) 3607bdd1243dSDimitry Andric return; 3608bdd1243dSDimitry Andric 3609bdd1243dSDimitry Andric Attributor::VirtualUseCallbackTy HWThreadIdUseCB = 3610bdd1243dSDimitry Andric [&](Attributor &A, const AbstractAttribute *QueryingAA) { 3611bdd1243dSDimitry Andric // Whenever we perform SPMDzation we will insert 3612bdd1243dSDimitry Andric // __kmpc_get_hardware_thread_id_in_block calls. 3613bdd1243dSDimitry Andric if (!SPMDCompatibilityTracker.isValidState()) 3614bdd1243dSDimitry Andric return AddDependence(A, this, QueryingAA); 3615bdd1243dSDimitry Andric return false; 3616bdd1243dSDimitry Andric }; 3617bdd1243dSDimitry Andric RegisterVirtualUse(OMPRTL___kmpc_get_hardware_thread_id_in_block, 3618bdd1243dSDimitry Andric HWThreadIdUseCB); 3619bdd1243dSDimitry Andric 3620bdd1243dSDimitry Andric Attributor::VirtualUseCallbackTy SPMDBarrierUseCB = 3621bdd1243dSDimitry Andric [&](Attributor &A, const AbstractAttribute *QueryingAA) { 3622bdd1243dSDimitry Andric // Whenever we perform SPMDzation with guarding we will insert 3623bdd1243dSDimitry Andric // __kmpc_simple_barrier_spmd calls. If SPMDzation failed, there is 3624bdd1243dSDimitry Andric // nothing to guard, or there are no parallel regions, we don't need 3625bdd1243dSDimitry Andric // the calls. 3626bdd1243dSDimitry Andric if (!SPMDCompatibilityTracker.isValidState()) 3627bdd1243dSDimitry Andric return AddDependence(A, this, QueryingAA); 3628bdd1243dSDimitry Andric if (SPMDCompatibilityTracker.empty()) 3629bdd1243dSDimitry Andric return AddDependence(A, this, QueryingAA); 3630bdd1243dSDimitry Andric if (!mayContainParallelRegion()) 3631bdd1243dSDimitry Andric return AddDependence(A, this, QueryingAA); 3632bdd1243dSDimitry Andric return false; 3633bdd1243dSDimitry Andric }; 3634bdd1243dSDimitry Andric RegisterVirtualUse(OMPRTL___kmpc_barrier_simple_spmd, SPMDBarrierUseCB); 3635349cc55cSDimitry Andric } 3636349cc55cSDimitry Andric 3637349cc55cSDimitry Andric /// Sanitize the string \p S such that it is a suitable global symbol name. 3638349cc55cSDimitry Andric static std::string sanitizeForGlobalName(std::string S) { 3639349cc55cSDimitry Andric std::replace_if( 3640349cc55cSDimitry Andric S.begin(), S.end(), 3641349cc55cSDimitry Andric [](const char C) { 3642349cc55cSDimitry Andric return !((C >= 'a' && C <= 'z') || (C >= 'A' && C <= 'Z') || 3643349cc55cSDimitry Andric (C >= '0' && C <= '9') || C == '_'); 3644349cc55cSDimitry Andric }, 3645349cc55cSDimitry Andric '.'); 3646349cc55cSDimitry Andric return S; 3647fe6060f1SDimitry Andric } 3648fe6060f1SDimitry Andric 3649fe6060f1SDimitry Andric /// Modify the IR based on the KernelInfoState as the fixpoint iteration is 3650fe6060f1SDimitry Andric /// finished now. 3651fe6060f1SDimitry Andric ChangeStatus manifest(Attributor &A) override { 3652fe6060f1SDimitry Andric // If we are not looking at a kernel with __kmpc_target_init and 3653fe6060f1SDimitry Andric // __kmpc_target_deinit call we cannot actually manifest the information. 3654fe6060f1SDimitry Andric if (!KernelInitCB || !KernelDeinitCB) 3655fe6060f1SDimitry Andric return ChangeStatus::UNCHANGED; 3656fe6060f1SDimitry Andric 3657bdd1243dSDimitry Andric /// Insert nested Parallelism global variable 3658bdd1243dSDimitry Andric Function *Kernel = getAnchorScope(); 3659bdd1243dSDimitry Andric Module &M = *Kernel->getParent(); 3660bdd1243dSDimitry Andric Type *Int8Ty = Type::getInt8Ty(M.getContext()); 3661bdd1243dSDimitry Andric new GlobalVariable(M, Int8Ty, /* isConstant */ true, 3662bdd1243dSDimitry Andric GlobalValue::WeakAnyLinkage, 3663bdd1243dSDimitry Andric ConstantInt::get(Int8Ty, NestedParallelism ? 1 : 0), 3664bdd1243dSDimitry Andric Kernel->getName() + "_nested_parallelism"); 3665bdd1243dSDimitry Andric 3666fe6060f1SDimitry Andric // If we can we change the execution mode to SPMD-mode otherwise we build a 3667fe6060f1SDimitry Andric // custom state machine. 3668349cc55cSDimitry Andric ChangeStatus Changed = ChangeStatus::UNCHANGED; 3669bdd1243dSDimitry Andric if (!changeToSPMDMode(A, Changed)) { 3670bdd1243dSDimitry Andric if (!KernelInitCB->getCalledFunction()->isDeclaration()) 3671349cc55cSDimitry Andric return buildCustomStateMachine(A); 3672bdd1243dSDimitry Andric } 3673fe6060f1SDimitry Andric 3674349cc55cSDimitry Andric return Changed; 3675fe6060f1SDimitry Andric } 3676fe6060f1SDimitry Andric 3677bdd1243dSDimitry Andric void insertInstructionGuardsHelper(Attributor &A) { 3678fe6060f1SDimitry Andric auto &OMPInfoCache = static_cast<OMPInformationCache &>(A.getInfoCache()); 3679fe6060f1SDimitry Andric 3680349cc55cSDimitry Andric auto CreateGuardedRegion = [&](Instruction *RegionStartI, 3681349cc55cSDimitry Andric Instruction *RegionEndI) { 3682349cc55cSDimitry Andric LoopInfo *LI = nullptr; 3683349cc55cSDimitry Andric DominatorTree *DT = nullptr; 3684349cc55cSDimitry Andric MemorySSAUpdater *MSU = nullptr; 3685349cc55cSDimitry Andric using InsertPointTy = OpenMPIRBuilder::InsertPointTy; 3686349cc55cSDimitry Andric 3687349cc55cSDimitry Andric BasicBlock *ParentBB = RegionStartI->getParent(); 3688349cc55cSDimitry Andric Function *Fn = ParentBB->getParent(); 3689349cc55cSDimitry Andric Module &M = *Fn->getParent(); 3690349cc55cSDimitry Andric 3691349cc55cSDimitry Andric // Create all the blocks and logic. 3692349cc55cSDimitry Andric // ParentBB: 3693349cc55cSDimitry Andric // goto RegionCheckTidBB 3694349cc55cSDimitry Andric // RegionCheckTidBB: 3695349cc55cSDimitry Andric // Tid = __kmpc_hardware_thread_id() 3696349cc55cSDimitry Andric // if (Tid != 0) 3697349cc55cSDimitry Andric // goto RegionBarrierBB 3698349cc55cSDimitry Andric // RegionStartBB: 3699349cc55cSDimitry Andric // <execute instructions guarded> 3700349cc55cSDimitry Andric // goto RegionEndBB 3701349cc55cSDimitry Andric // RegionEndBB: 3702349cc55cSDimitry Andric // <store escaping values to shared mem> 3703349cc55cSDimitry Andric // goto RegionBarrierBB 3704349cc55cSDimitry Andric // RegionBarrierBB: 3705349cc55cSDimitry Andric // __kmpc_simple_barrier_spmd() 3706349cc55cSDimitry Andric // // second barrier is omitted if lacking escaping values. 3707349cc55cSDimitry Andric // <load escaping values from shared mem> 3708349cc55cSDimitry Andric // __kmpc_simple_barrier_spmd() 3709349cc55cSDimitry Andric // goto RegionExitBB 3710349cc55cSDimitry Andric // RegionExitBB: 3711349cc55cSDimitry Andric // <execute rest of instructions> 3712349cc55cSDimitry Andric 3713349cc55cSDimitry Andric BasicBlock *RegionEndBB = SplitBlock(ParentBB, RegionEndI->getNextNode(), 3714349cc55cSDimitry Andric DT, LI, MSU, "region.guarded.end"); 3715349cc55cSDimitry Andric BasicBlock *RegionBarrierBB = 3716349cc55cSDimitry Andric SplitBlock(RegionEndBB, &*RegionEndBB->getFirstInsertionPt(), DT, LI, 3717349cc55cSDimitry Andric MSU, "region.barrier"); 3718349cc55cSDimitry Andric BasicBlock *RegionExitBB = 3719349cc55cSDimitry Andric SplitBlock(RegionBarrierBB, &*RegionBarrierBB->getFirstInsertionPt(), 3720349cc55cSDimitry Andric DT, LI, MSU, "region.exit"); 3721349cc55cSDimitry Andric BasicBlock *RegionStartBB = 3722349cc55cSDimitry Andric SplitBlock(ParentBB, RegionStartI, DT, LI, MSU, "region.guarded"); 3723349cc55cSDimitry Andric 3724349cc55cSDimitry Andric assert(ParentBB->getUniqueSuccessor() == RegionStartBB && 3725349cc55cSDimitry Andric "Expected a different CFG"); 3726349cc55cSDimitry Andric 3727349cc55cSDimitry Andric BasicBlock *RegionCheckTidBB = SplitBlock( 3728349cc55cSDimitry Andric ParentBB, ParentBB->getTerminator(), DT, LI, MSU, "region.check.tid"); 3729349cc55cSDimitry Andric 3730349cc55cSDimitry Andric // Register basic blocks with the Attributor. 3731349cc55cSDimitry Andric A.registerManifestAddedBasicBlock(*RegionEndBB); 3732349cc55cSDimitry Andric A.registerManifestAddedBasicBlock(*RegionBarrierBB); 3733349cc55cSDimitry Andric A.registerManifestAddedBasicBlock(*RegionExitBB); 3734349cc55cSDimitry Andric A.registerManifestAddedBasicBlock(*RegionStartBB); 3735349cc55cSDimitry Andric A.registerManifestAddedBasicBlock(*RegionCheckTidBB); 3736349cc55cSDimitry Andric 3737349cc55cSDimitry Andric bool HasBroadcastValues = false; 3738349cc55cSDimitry Andric // Find escaping outputs from the guarded region to outside users and 3739349cc55cSDimitry Andric // broadcast their values to them. 3740349cc55cSDimitry Andric for (Instruction &I : *RegionStartBB) { 3741349cc55cSDimitry Andric SmallPtrSet<Instruction *, 4> OutsideUsers; 3742349cc55cSDimitry Andric for (User *Usr : I.users()) { 3743349cc55cSDimitry Andric Instruction &UsrI = *cast<Instruction>(Usr); 3744349cc55cSDimitry Andric if (UsrI.getParent() != RegionStartBB) 3745349cc55cSDimitry Andric OutsideUsers.insert(&UsrI); 3746349cc55cSDimitry Andric } 3747349cc55cSDimitry Andric 3748349cc55cSDimitry Andric if (OutsideUsers.empty()) 3749349cc55cSDimitry Andric continue; 3750349cc55cSDimitry Andric 3751349cc55cSDimitry Andric HasBroadcastValues = true; 3752349cc55cSDimitry Andric 3753349cc55cSDimitry Andric // Emit a global variable in shared memory to store the broadcasted 3754349cc55cSDimitry Andric // value. 3755349cc55cSDimitry Andric auto *SharedMem = new GlobalVariable( 3756349cc55cSDimitry Andric M, I.getType(), /* IsConstant */ false, 3757349cc55cSDimitry Andric GlobalValue::InternalLinkage, UndefValue::get(I.getType()), 3758349cc55cSDimitry Andric sanitizeForGlobalName( 3759349cc55cSDimitry Andric (I.getName() + ".guarded.output.alloc").str()), 3760349cc55cSDimitry Andric nullptr, GlobalValue::NotThreadLocal, 3761349cc55cSDimitry Andric static_cast<unsigned>(AddressSpace::Shared)); 3762349cc55cSDimitry Andric 3763349cc55cSDimitry Andric // Emit a store instruction to update the value. 3764349cc55cSDimitry Andric new StoreInst(&I, SharedMem, RegionEndBB->getTerminator()); 3765349cc55cSDimitry Andric 3766349cc55cSDimitry Andric LoadInst *LoadI = new LoadInst(I.getType(), SharedMem, 3767349cc55cSDimitry Andric I.getName() + ".guarded.output.load", 3768349cc55cSDimitry Andric RegionBarrierBB->getTerminator()); 3769349cc55cSDimitry Andric 3770349cc55cSDimitry Andric // Emit a load instruction and replace uses of the output value. 3771349cc55cSDimitry Andric for (Instruction *UsrI : OutsideUsers) 3772349cc55cSDimitry Andric UsrI->replaceUsesOfWith(&I, LoadI); 3773349cc55cSDimitry Andric } 3774349cc55cSDimitry Andric 3775349cc55cSDimitry Andric auto &OMPInfoCache = static_cast<OMPInformationCache &>(A.getInfoCache()); 3776349cc55cSDimitry Andric 3777349cc55cSDimitry Andric // Go to tid check BB in ParentBB. 3778349cc55cSDimitry Andric const DebugLoc DL = ParentBB->getTerminator()->getDebugLoc(); 3779349cc55cSDimitry Andric ParentBB->getTerminator()->eraseFromParent(); 3780349cc55cSDimitry Andric OpenMPIRBuilder::LocationDescription Loc( 3781349cc55cSDimitry Andric InsertPointTy(ParentBB, ParentBB->end()), DL); 3782349cc55cSDimitry Andric OMPInfoCache.OMPBuilder.updateToLocation(Loc); 378304eeddc0SDimitry Andric uint32_t SrcLocStrSize; 378404eeddc0SDimitry Andric auto *SrcLocStr = 378504eeddc0SDimitry Andric OMPInfoCache.OMPBuilder.getOrCreateSrcLocStr(Loc, SrcLocStrSize); 378604eeddc0SDimitry Andric Value *Ident = 378704eeddc0SDimitry Andric OMPInfoCache.OMPBuilder.getOrCreateIdent(SrcLocStr, SrcLocStrSize); 3788349cc55cSDimitry Andric BranchInst::Create(RegionCheckTidBB, ParentBB)->setDebugLoc(DL); 3789349cc55cSDimitry Andric 3790349cc55cSDimitry Andric // Add check for Tid in RegionCheckTidBB 3791349cc55cSDimitry Andric RegionCheckTidBB->getTerminator()->eraseFromParent(); 3792349cc55cSDimitry Andric OpenMPIRBuilder::LocationDescription LocRegionCheckTid( 3793349cc55cSDimitry Andric InsertPointTy(RegionCheckTidBB, RegionCheckTidBB->end()), DL); 3794349cc55cSDimitry Andric OMPInfoCache.OMPBuilder.updateToLocation(LocRegionCheckTid); 3795349cc55cSDimitry Andric FunctionCallee HardwareTidFn = 3796349cc55cSDimitry Andric OMPInfoCache.OMPBuilder.getOrCreateRuntimeFunction( 3797349cc55cSDimitry Andric M, OMPRTL___kmpc_get_hardware_thread_id_in_block); 379804eeddc0SDimitry Andric CallInst *Tid = 3799349cc55cSDimitry Andric OMPInfoCache.OMPBuilder.Builder.CreateCall(HardwareTidFn, {}); 380004eeddc0SDimitry Andric Tid->setDebugLoc(DL); 380104eeddc0SDimitry Andric OMPInfoCache.setCallingConvention(HardwareTidFn, Tid); 3802349cc55cSDimitry Andric Value *TidCheck = OMPInfoCache.OMPBuilder.Builder.CreateIsNull(Tid); 3803349cc55cSDimitry Andric OMPInfoCache.OMPBuilder.Builder 3804349cc55cSDimitry Andric .CreateCondBr(TidCheck, RegionStartBB, RegionBarrierBB) 3805349cc55cSDimitry Andric ->setDebugLoc(DL); 3806349cc55cSDimitry Andric 3807349cc55cSDimitry Andric // First barrier for synchronization, ensures main thread has updated 3808349cc55cSDimitry Andric // values. 3809349cc55cSDimitry Andric FunctionCallee BarrierFn = 3810349cc55cSDimitry Andric OMPInfoCache.OMPBuilder.getOrCreateRuntimeFunction( 3811349cc55cSDimitry Andric M, OMPRTL___kmpc_barrier_simple_spmd); 3812349cc55cSDimitry Andric OMPInfoCache.OMPBuilder.updateToLocation(InsertPointTy( 3813349cc55cSDimitry Andric RegionBarrierBB, RegionBarrierBB->getFirstInsertionPt())); 381404eeddc0SDimitry Andric CallInst *Barrier = 381504eeddc0SDimitry Andric OMPInfoCache.OMPBuilder.Builder.CreateCall(BarrierFn, {Ident, Tid}); 381604eeddc0SDimitry Andric Barrier->setDebugLoc(DL); 381704eeddc0SDimitry Andric OMPInfoCache.setCallingConvention(BarrierFn, Barrier); 3818349cc55cSDimitry Andric 3819349cc55cSDimitry Andric // Second barrier ensures workers have read broadcast values. 382004eeddc0SDimitry Andric if (HasBroadcastValues) { 382104eeddc0SDimitry Andric CallInst *Barrier = CallInst::Create(BarrierFn, {Ident, Tid}, "", 382204eeddc0SDimitry Andric RegionBarrierBB->getTerminator()); 382304eeddc0SDimitry Andric Barrier->setDebugLoc(DL); 382404eeddc0SDimitry Andric OMPInfoCache.setCallingConvention(BarrierFn, Barrier); 382504eeddc0SDimitry Andric } 3826349cc55cSDimitry Andric }; 3827349cc55cSDimitry Andric 3828349cc55cSDimitry Andric auto &AllocSharedRFI = OMPInfoCache.RFIs[OMPRTL___kmpc_alloc_shared]; 3829349cc55cSDimitry Andric SmallPtrSet<BasicBlock *, 8> Visited; 3830349cc55cSDimitry Andric for (Instruction *GuardedI : SPMDCompatibilityTracker) { 3831349cc55cSDimitry Andric BasicBlock *BB = GuardedI->getParent(); 3832349cc55cSDimitry Andric if (!Visited.insert(BB).second) 3833349cc55cSDimitry Andric continue; 3834349cc55cSDimitry Andric 3835349cc55cSDimitry Andric SmallVector<std::pair<Instruction *, Instruction *>> Reorders; 3836349cc55cSDimitry Andric Instruction *LastEffect = nullptr; 3837349cc55cSDimitry Andric BasicBlock::reverse_iterator IP = BB->rbegin(), IPEnd = BB->rend(); 3838349cc55cSDimitry Andric while (++IP != IPEnd) { 3839349cc55cSDimitry Andric if (!IP->mayHaveSideEffects() && !IP->mayReadFromMemory()) 3840349cc55cSDimitry Andric continue; 3841349cc55cSDimitry Andric Instruction *I = &*IP; 3842349cc55cSDimitry Andric if (OpenMPOpt::getCallIfRegularCall(*I, &AllocSharedRFI)) 3843349cc55cSDimitry Andric continue; 3844349cc55cSDimitry Andric if (!I->user_empty() || !SPMDCompatibilityTracker.contains(I)) { 3845349cc55cSDimitry Andric LastEffect = nullptr; 3846349cc55cSDimitry Andric continue; 3847349cc55cSDimitry Andric } 3848349cc55cSDimitry Andric if (LastEffect) 3849349cc55cSDimitry Andric Reorders.push_back({I, LastEffect}); 3850349cc55cSDimitry Andric LastEffect = &*IP; 3851349cc55cSDimitry Andric } 3852349cc55cSDimitry Andric for (auto &Reorder : Reorders) 3853349cc55cSDimitry Andric Reorder.first->moveBefore(Reorder.second); 3854349cc55cSDimitry Andric } 3855349cc55cSDimitry Andric 3856349cc55cSDimitry Andric SmallVector<std::pair<Instruction *, Instruction *>, 4> GuardedRegions; 3857349cc55cSDimitry Andric 3858349cc55cSDimitry Andric for (Instruction *GuardedI : SPMDCompatibilityTracker) { 3859349cc55cSDimitry Andric BasicBlock *BB = GuardedI->getParent(); 3860349cc55cSDimitry Andric auto *CalleeAA = A.lookupAAFor<AAKernelInfo>( 3861349cc55cSDimitry Andric IRPosition::function(*GuardedI->getFunction()), nullptr, 3862349cc55cSDimitry Andric DepClassTy::NONE); 3863349cc55cSDimitry Andric assert(CalleeAA != nullptr && "Expected Callee AAKernelInfo"); 3864349cc55cSDimitry Andric auto &CalleeAAFunction = *cast<AAKernelInfoFunction>(CalleeAA); 3865349cc55cSDimitry Andric // Continue if instruction is already guarded. 3866349cc55cSDimitry Andric if (CalleeAAFunction.getGuardedInstructions().contains(GuardedI)) 3867349cc55cSDimitry Andric continue; 3868349cc55cSDimitry Andric 3869349cc55cSDimitry Andric Instruction *GuardedRegionStart = nullptr, *GuardedRegionEnd = nullptr; 3870349cc55cSDimitry Andric for (Instruction &I : *BB) { 3871349cc55cSDimitry Andric // If instruction I needs to be guarded update the guarded region 3872349cc55cSDimitry Andric // bounds. 3873349cc55cSDimitry Andric if (SPMDCompatibilityTracker.contains(&I)) { 3874349cc55cSDimitry Andric CalleeAAFunction.getGuardedInstructions().insert(&I); 3875349cc55cSDimitry Andric if (GuardedRegionStart) 3876349cc55cSDimitry Andric GuardedRegionEnd = &I; 3877349cc55cSDimitry Andric else 3878349cc55cSDimitry Andric GuardedRegionStart = GuardedRegionEnd = &I; 3879349cc55cSDimitry Andric 3880349cc55cSDimitry Andric continue; 3881349cc55cSDimitry Andric } 3882349cc55cSDimitry Andric 3883349cc55cSDimitry Andric // Instruction I does not need guarding, store 3884349cc55cSDimitry Andric // any region found and reset bounds. 3885349cc55cSDimitry Andric if (GuardedRegionStart) { 3886349cc55cSDimitry Andric GuardedRegions.push_back( 3887349cc55cSDimitry Andric std::make_pair(GuardedRegionStart, GuardedRegionEnd)); 3888349cc55cSDimitry Andric GuardedRegionStart = nullptr; 3889349cc55cSDimitry Andric GuardedRegionEnd = nullptr; 3890349cc55cSDimitry Andric } 3891349cc55cSDimitry Andric } 3892349cc55cSDimitry Andric } 3893349cc55cSDimitry Andric 3894349cc55cSDimitry Andric for (auto &GR : GuardedRegions) 3895349cc55cSDimitry Andric CreateGuardedRegion(GR.first, GR.second); 3896bdd1243dSDimitry Andric } 3897bdd1243dSDimitry Andric 3898bdd1243dSDimitry Andric void forceSingleThreadPerWorkgroupHelper(Attributor &A) { 3899bdd1243dSDimitry Andric // Only allow 1 thread per workgroup to continue executing the user code. 3900bdd1243dSDimitry Andric // 3901bdd1243dSDimitry Andric // InitCB = __kmpc_target_init(...) 3902bdd1243dSDimitry Andric // ThreadIdInBlock = __kmpc_get_hardware_thread_id_in_block(); 3903bdd1243dSDimitry Andric // if (ThreadIdInBlock != 0) return; 3904bdd1243dSDimitry Andric // UserCode: 3905bdd1243dSDimitry Andric // // user code 3906bdd1243dSDimitry Andric // 3907bdd1243dSDimitry Andric auto &Ctx = getAnchorValue().getContext(); 3908bdd1243dSDimitry Andric Function *Kernel = getAssociatedFunction(); 3909bdd1243dSDimitry Andric assert(Kernel && "Expected an associated function!"); 3910bdd1243dSDimitry Andric 3911bdd1243dSDimitry Andric // Create block for user code to branch to from initial block. 3912bdd1243dSDimitry Andric BasicBlock *InitBB = KernelInitCB->getParent(); 3913bdd1243dSDimitry Andric BasicBlock *UserCodeBB = InitBB->splitBasicBlock( 3914bdd1243dSDimitry Andric KernelInitCB->getNextNode(), "main.thread.user_code"); 3915bdd1243dSDimitry Andric BasicBlock *ReturnBB = 3916bdd1243dSDimitry Andric BasicBlock::Create(Ctx, "exit.threads", Kernel, UserCodeBB); 3917bdd1243dSDimitry Andric 3918bdd1243dSDimitry Andric // Register blocks with attributor: 3919bdd1243dSDimitry Andric A.registerManifestAddedBasicBlock(*InitBB); 3920bdd1243dSDimitry Andric A.registerManifestAddedBasicBlock(*UserCodeBB); 3921bdd1243dSDimitry Andric A.registerManifestAddedBasicBlock(*ReturnBB); 3922bdd1243dSDimitry Andric 3923bdd1243dSDimitry Andric // Debug location: 3924bdd1243dSDimitry Andric const DebugLoc &DLoc = KernelInitCB->getDebugLoc(); 3925bdd1243dSDimitry Andric ReturnInst::Create(Ctx, ReturnBB)->setDebugLoc(DLoc); 3926bdd1243dSDimitry Andric InitBB->getTerminator()->eraseFromParent(); 3927bdd1243dSDimitry Andric 3928bdd1243dSDimitry Andric // Prepare call to OMPRTL___kmpc_get_hardware_thread_id_in_block. 3929bdd1243dSDimitry Andric Module &M = *Kernel->getParent(); 3930bdd1243dSDimitry Andric auto &OMPInfoCache = static_cast<OMPInformationCache &>(A.getInfoCache()); 3931bdd1243dSDimitry Andric FunctionCallee ThreadIdInBlockFn = 3932bdd1243dSDimitry Andric OMPInfoCache.OMPBuilder.getOrCreateRuntimeFunction( 3933bdd1243dSDimitry Andric M, OMPRTL___kmpc_get_hardware_thread_id_in_block); 3934bdd1243dSDimitry Andric 3935bdd1243dSDimitry Andric // Get thread ID in block. 3936bdd1243dSDimitry Andric CallInst *ThreadIdInBlock = 3937bdd1243dSDimitry Andric CallInst::Create(ThreadIdInBlockFn, "thread_id.in.block", InitBB); 3938bdd1243dSDimitry Andric OMPInfoCache.setCallingConvention(ThreadIdInBlockFn, ThreadIdInBlock); 3939bdd1243dSDimitry Andric ThreadIdInBlock->setDebugLoc(DLoc); 3940bdd1243dSDimitry Andric 3941bdd1243dSDimitry Andric // Eliminate all threads in the block with ID not equal to 0: 3942bdd1243dSDimitry Andric Instruction *IsMainThread = 3943bdd1243dSDimitry Andric ICmpInst::Create(ICmpInst::ICmp, CmpInst::ICMP_NE, ThreadIdInBlock, 3944bdd1243dSDimitry Andric ConstantInt::get(ThreadIdInBlock->getType(), 0), 3945bdd1243dSDimitry Andric "thread.is_main", InitBB); 3946bdd1243dSDimitry Andric IsMainThread->setDebugLoc(DLoc); 3947bdd1243dSDimitry Andric BranchInst::Create(ReturnBB, UserCodeBB, IsMainThread, InitBB); 3948bdd1243dSDimitry Andric } 3949bdd1243dSDimitry Andric 3950bdd1243dSDimitry Andric bool changeToSPMDMode(Attributor &A, ChangeStatus &Changed) { 3951bdd1243dSDimitry Andric auto &OMPInfoCache = static_cast<OMPInformationCache &>(A.getInfoCache()); 3952bdd1243dSDimitry Andric 3953*1ac55f4cSDimitry Andric // We cannot change to SPMD mode if the runtime functions aren't availible. 3954*1ac55f4cSDimitry Andric if (!OMPInfoCache.runtimeFnsAvailable( 3955*1ac55f4cSDimitry Andric {OMPRTL___kmpc_get_hardware_thread_id_in_block, 3956*1ac55f4cSDimitry Andric OMPRTL___kmpc_barrier_simple_spmd})) 3957*1ac55f4cSDimitry Andric return false; 3958*1ac55f4cSDimitry Andric 3959bdd1243dSDimitry Andric if (!SPMDCompatibilityTracker.isAssumed()) { 3960bdd1243dSDimitry Andric for (Instruction *NonCompatibleI : SPMDCompatibilityTracker) { 3961bdd1243dSDimitry Andric if (!NonCompatibleI) 3962bdd1243dSDimitry Andric continue; 3963bdd1243dSDimitry Andric 3964bdd1243dSDimitry Andric // Skip diagnostics on calls to known OpenMP runtime functions for now. 3965bdd1243dSDimitry Andric if (auto *CB = dyn_cast<CallBase>(NonCompatibleI)) 3966bdd1243dSDimitry Andric if (OMPInfoCache.RTLFunctions.contains(CB->getCalledFunction())) 3967bdd1243dSDimitry Andric continue; 3968bdd1243dSDimitry Andric 3969bdd1243dSDimitry Andric auto Remark = [&](OptimizationRemarkAnalysis ORA) { 3970bdd1243dSDimitry Andric ORA << "Value has potential side effects preventing SPMD-mode " 3971bdd1243dSDimitry Andric "execution"; 3972bdd1243dSDimitry Andric if (isa<CallBase>(NonCompatibleI)) { 3973bdd1243dSDimitry Andric ORA << ". Add `__attribute__((assume(\"ompx_spmd_amenable\")))` to " 3974bdd1243dSDimitry Andric "the called function to override"; 3975bdd1243dSDimitry Andric } 3976bdd1243dSDimitry Andric return ORA << "."; 3977bdd1243dSDimitry Andric }; 3978bdd1243dSDimitry Andric A.emitRemark<OptimizationRemarkAnalysis>(NonCompatibleI, "OMP121", 3979bdd1243dSDimitry Andric Remark); 3980bdd1243dSDimitry Andric 3981bdd1243dSDimitry Andric LLVM_DEBUG(dbgs() << TAG << "SPMD-incompatible side-effect: " 3982bdd1243dSDimitry Andric << *NonCompatibleI << "\n"); 3983bdd1243dSDimitry Andric } 3984bdd1243dSDimitry Andric 3985bdd1243dSDimitry Andric return false; 3986bdd1243dSDimitry Andric } 3987bdd1243dSDimitry Andric 3988bdd1243dSDimitry Andric // Get the actual kernel, could be the caller of the anchor scope if we have 3989bdd1243dSDimitry Andric // a debug wrapper. 3990bdd1243dSDimitry Andric Function *Kernel = getAnchorScope(); 3991bdd1243dSDimitry Andric if (Kernel->hasLocalLinkage()) { 3992bdd1243dSDimitry Andric assert(Kernel->hasOneUse() && "Unexpected use of debug kernel wrapper."); 3993bdd1243dSDimitry Andric auto *CB = cast<CallBase>(Kernel->user_back()); 3994bdd1243dSDimitry Andric Kernel = CB->getCaller(); 3995bdd1243dSDimitry Andric } 3996bdd1243dSDimitry Andric assert(OMPInfoCache.Kernels.count(Kernel) && "Expected kernel function!"); 3997bdd1243dSDimitry Andric 3998bdd1243dSDimitry Andric // Check if the kernel is already in SPMD mode, if so, return success. 3999bdd1243dSDimitry Andric GlobalVariable *ExecMode = Kernel->getParent()->getGlobalVariable( 4000bdd1243dSDimitry Andric (Kernel->getName() + "_exec_mode").str()); 4001bdd1243dSDimitry Andric assert(ExecMode && "Kernel without exec mode?"); 4002bdd1243dSDimitry Andric assert(ExecMode->getInitializer() && "ExecMode doesn't have initializer!"); 4003bdd1243dSDimitry Andric 4004bdd1243dSDimitry Andric // Set the global exec mode flag to indicate SPMD-Generic mode. 4005bdd1243dSDimitry Andric assert(isa<ConstantInt>(ExecMode->getInitializer()) && 4006bdd1243dSDimitry Andric "ExecMode is not an integer!"); 4007bdd1243dSDimitry Andric const int8_t ExecModeVal = 4008bdd1243dSDimitry Andric cast<ConstantInt>(ExecMode->getInitializer())->getSExtValue(); 4009bdd1243dSDimitry Andric if (ExecModeVal != OMP_TGT_EXEC_MODE_GENERIC) 4010bdd1243dSDimitry Andric return true; 4011bdd1243dSDimitry Andric 4012bdd1243dSDimitry Andric // We will now unconditionally modify the IR, indicate a change. 4013bdd1243dSDimitry Andric Changed = ChangeStatus::CHANGED; 4014bdd1243dSDimitry Andric 4015bdd1243dSDimitry Andric // Do not use instruction guards when no parallel is present inside 4016bdd1243dSDimitry Andric // the target region. 4017bdd1243dSDimitry Andric if (mayContainParallelRegion()) 4018bdd1243dSDimitry Andric insertInstructionGuardsHelper(A); 4019bdd1243dSDimitry Andric else 4020bdd1243dSDimitry Andric forceSingleThreadPerWorkgroupHelper(A); 4021349cc55cSDimitry Andric 4022349cc55cSDimitry Andric // Adjust the global exec mode flag that tells the runtime what mode this 4023349cc55cSDimitry Andric // kernel is executed in. 4024349cc55cSDimitry Andric assert(ExecModeVal == OMP_TGT_EXEC_MODE_GENERIC && 4025349cc55cSDimitry Andric "Initially non-SPMD kernel has SPMD exec mode!"); 4026fe6060f1SDimitry Andric ExecMode->setInitializer( 4027349cc55cSDimitry Andric ConstantInt::get(ExecMode->getInitializer()->getType(), 4028349cc55cSDimitry Andric ExecModeVal | OMP_TGT_EXEC_MODE_GENERIC_SPMD)); 4029fe6060f1SDimitry Andric 4030fe6060f1SDimitry Andric // Next rewrite the init and deinit calls to indicate we use SPMD-mode now. 4031349cc55cSDimitry Andric const int InitModeArgNo = 1; 4032349cc55cSDimitry Andric const int DeinitModeArgNo = 1; 4033fe6060f1SDimitry Andric const int InitUseStateMachineArgNo = 2; 4034fe6060f1SDimitry Andric 4035fe6060f1SDimitry Andric auto &Ctx = getAnchorValue().getContext(); 4036349cc55cSDimitry Andric A.changeUseAfterManifest( 4037349cc55cSDimitry Andric KernelInitCB->getArgOperandUse(InitModeArgNo), 4038349cc55cSDimitry Andric *ConstantInt::getSigned(IntegerType::getInt8Ty(Ctx), 4039349cc55cSDimitry Andric OMP_TGT_EXEC_MODE_SPMD)); 4040fe6060f1SDimitry Andric A.changeUseAfterManifest( 4041fe6060f1SDimitry Andric KernelInitCB->getArgOperandUse(InitUseStateMachineArgNo), 404204eeddc0SDimitry Andric *ConstantInt::getBool(Ctx, false)); 4043fe6060f1SDimitry Andric A.changeUseAfterManifest( 4044349cc55cSDimitry Andric KernelDeinitCB->getArgOperandUse(DeinitModeArgNo), 4045349cc55cSDimitry Andric *ConstantInt::getSigned(IntegerType::getInt8Ty(Ctx), 4046349cc55cSDimitry Andric OMP_TGT_EXEC_MODE_SPMD)); 4047fe6060f1SDimitry Andric 4048fe6060f1SDimitry Andric ++NumOpenMPTargetRegionKernelsSPMD; 4049fe6060f1SDimitry Andric 4050fe6060f1SDimitry Andric auto Remark = [&](OptimizationRemark OR) { 4051fe6060f1SDimitry Andric return OR << "Transformed generic-mode kernel to SPMD-mode."; 4052fe6060f1SDimitry Andric }; 4053fe6060f1SDimitry Andric A.emitRemark<OptimizationRemark>(KernelInitCB, "OMP120", Remark); 4054fe6060f1SDimitry Andric return true; 4055fe6060f1SDimitry Andric }; 4056fe6060f1SDimitry Andric 4057fe6060f1SDimitry Andric ChangeStatus buildCustomStateMachine(Attributor &A) { 4058349cc55cSDimitry Andric // If we have disabled state machine rewrites, don't make a custom one 4059349cc55cSDimitry Andric if (DisableOpenMPOptStateMachineRewrite) 4060349cc55cSDimitry Andric return ChangeStatus::UNCHANGED; 4061fe6060f1SDimitry Andric 4062349cc55cSDimitry Andric // Don't rewrite the state machine if we are not in a valid state. 4063349cc55cSDimitry Andric if (!ReachedKnownParallelRegions.isValidState()) 4064349cc55cSDimitry Andric return ChangeStatus::UNCHANGED; 4065349cc55cSDimitry Andric 4066*1ac55f4cSDimitry Andric auto &OMPInfoCache = static_cast<OMPInformationCache &>(A.getInfoCache()); 4067*1ac55f4cSDimitry Andric if (!OMPInfoCache.runtimeFnsAvailable( 4068*1ac55f4cSDimitry Andric {OMPRTL___kmpc_get_hardware_num_threads_in_block, 4069*1ac55f4cSDimitry Andric OMPRTL___kmpc_get_warp_size, OMPRTL___kmpc_barrier_simple_generic, 4070*1ac55f4cSDimitry Andric OMPRTL___kmpc_kernel_parallel, OMPRTL___kmpc_kernel_end_parallel})) 4071*1ac55f4cSDimitry Andric return ChangeStatus::UNCHANGED; 4072*1ac55f4cSDimitry Andric 4073349cc55cSDimitry Andric const int InitModeArgNo = 1; 4074fe6060f1SDimitry Andric const int InitUseStateMachineArgNo = 2; 4075fe6060f1SDimitry Andric 4076fe6060f1SDimitry Andric // Check if the current configuration is non-SPMD and generic state machine. 4077fe6060f1SDimitry Andric // If we already have SPMD mode or a custom state machine we do not need to 4078fe6060f1SDimitry Andric // go any further. If it is anything but a constant something is weird and 4079fe6060f1SDimitry Andric // we give up. 4080fe6060f1SDimitry Andric ConstantInt *UseStateMachine = dyn_cast<ConstantInt>( 4081fe6060f1SDimitry Andric KernelInitCB->getArgOperand(InitUseStateMachineArgNo)); 4082349cc55cSDimitry Andric ConstantInt *Mode = 4083349cc55cSDimitry Andric dyn_cast<ConstantInt>(KernelInitCB->getArgOperand(InitModeArgNo)); 4084fe6060f1SDimitry Andric 4085fe6060f1SDimitry Andric // If we are stuck with generic mode, try to create a custom device (=GPU) 4086fe6060f1SDimitry Andric // state machine which is specialized for the parallel regions that are 4087fe6060f1SDimitry Andric // reachable by the kernel. 4088349cc55cSDimitry Andric if (!UseStateMachine || UseStateMachine->isZero() || !Mode || 4089349cc55cSDimitry Andric (Mode->getSExtValue() & OMP_TGT_EXEC_MODE_SPMD)) 4090fe6060f1SDimitry Andric return ChangeStatus::UNCHANGED; 4091fe6060f1SDimitry Andric 4092fe6060f1SDimitry Andric // If not SPMD mode, indicate we use a custom state machine now. 4093fe6060f1SDimitry Andric auto &Ctx = getAnchorValue().getContext(); 409404eeddc0SDimitry Andric auto *FalseVal = ConstantInt::getBool(Ctx, false); 4095fe6060f1SDimitry Andric A.changeUseAfterManifest( 4096fe6060f1SDimitry Andric KernelInitCB->getArgOperandUse(InitUseStateMachineArgNo), *FalseVal); 4097fe6060f1SDimitry Andric 4098fe6060f1SDimitry Andric // If we don't actually need a state machine we are done here. This can 4099fe6060f1SDimitry Andric // happen if there simply are no parallel regions. In the resulting kernel 4100fe6060f1SDimitry Andric // all worker threads will simply exit right away, leaving the main thread 4101fe6060f1SDimitry Andric // to do the work alone. 4102349cc55cSDimitry Andric if (!mayContainParallelRegion()) { 4103fe6060f1SDimitry Andric ++NumOpenMPTargetRegionKernelsWithoutStateMachine; 4104fe6060f1SDimitry Andric 4105fe6060f1SDimitry Andric auto Remark = [&](OptimizationRemark OR) { 4106fe6060f1SDimitry Andric return OR << "Removing unused state machine from generic-mode kernel."; 4107fe6060f1SDimitry Andric }; 4108fe6060f1SDimitry Andric A.emitRemark<OptimizationRemark>(KernelInitCB, "OMP130", Remark); 4109fe6060f1SDimitry Andric 4110fe6060f1SDimitry Andric return ChangeStatus::CHANGED; 4111fe6060f1SDimitry Andric } 4112fe6060f1SDimitry Andric 4113fe6060f1SDimitry Andric // Keep track in the statistics of our new shiny custom state machine. 4114fe6060f1SDimitry Andric if (ReachedUnknownParallelRegions.empty()) { 4115fe6060f1SDimitry Andric ++NumOpenMPTargetRegionKernelsCustomStateMachineWithoutFallback; 4116fe6060f1SDimitry Andric 4117fe6060f1SDimitry Andric auto Remark = [&](OptimizationRemark OR) { 4118fe6060f1SDimitry Andric return OR << "Rewriting generic-mode kernel with a customized state " 4119fe6060f1SDimitry Andric "machine."; 4120fe6060f1SDimitry Andric }; 4121fe6060f1SDimitry Andric A.emitRemark<OptimizationRemark>(KernelInitCB, "OMP131", Remark); 4122fe6060f1SDimitry Andric } else { 4123fe6060f1SDimitry Andric ++NumOpenMPTargetRegionKernelsCustomStateMachineWithFallback; 4124fe6060f1SDimitry Andric 4125fe6060f1SDimitry Andric auto Remark = [&](OptimizationRemarkAnalysis OR) { 4126fe6060f1SDimitry Andric return OR << "Generic-mode kernel is executed with a customized state " 4127fe6060f1SDimitry Andric "machine that requires a fallback."; 4128fe6060f1SDimitry Andric }; 4129fe6060f1SDimitry Andric A.emitRemark<OptimizationRemarkAnalysis>(KernelInitCB, "OMP132", Remark); 4130fe6060f1SDimitry Andric 4131fe6060f1SDimitry Andric // Tell the user why we ended up with a fallback. 4132fe6060f1SDimitry Andric for (CallBase *UnknownParallelRegionCB : ReachedUnknownParallelRegions) { 4133fe6060f1SDimitry Andric if (!UnknownParallelRegionCB) 4134fe6060f1SDimitry Andric continue; 4135fe6060f1SDimitry Andric auto Remark = [&](OptimizationRemarkAnalysis ORA) { 4136fe6060f1SDimitry Andric return ORA << "Call may contain unknown parallel regions. Use " 4137fe6060f1SDimitry Andric << "`__attribute__((assume(\"omp_no_parallelism\")))` to " 4138fe6060f1SDimitry Andric "override."; 4139fe6060f1SDimitry Andric }; 4140fe6060f1SDimitry Andric A.emitRemark<OptimizationRemarkAnalysis>(UnknownParallelRegionCB, 4141fe6060f1SDimitry Andric "OMP133", Remark); 4142fe6060f1SDimitry Andric } 4143fe6060f1SDimitry Andric } 4144fe6060f1SDimitry Andric 4145fe6060f1SDimitry Andric // Create all the blocks: 4146fe6060f1SDimitry Andric // 4147fe6060f1SDimitry Andric // InitCB = __kmpc_target_init(...) 4148349cc55cSDimitry Andric // BlockHwSize = 4149349cc55cSDimitry Andric // __kmpc_get_hardware_num_threads_in_block(); 4150349cc55cSDimitry Andric // WarpSize = __kmpc_get_warp_size(); 4151349cc55cSDimitry Andric // BlockSize = BlockHwSize - WarpSize; 4152fb03ea46SDimitry Andric // IsWorkerCheckBB: bool IsWorker = InitCB != -1; 4153fe6060f1SDimitry Andric // if (IsWorker) { 4154fb03ea46SDimitry Andric // if (InitCB >= BlockSize) return; 4155349cc55cSDimitry Andric // SMBeginBB: __kmpc_barrier_simple_generic(...); 4156fe6060f1SDimitry Andric // void *WorkFn; 4157fe6060f1SDimitry Andric // bool Active = __kmpc_kernel_parallel(&WorkFn); 4158fe6060f1SDimitry Andric // if (!WorkFn) return; 4159fe6060f1SDimitry Andric // SMIsActiveCheckBB: if (Active) { 4160fe6060f1SDimitry Andric // SMIfCascadeCurrentBB: if (WorkFn == <ParFn0>) 4161fe6060f1SDimitry Andric // ParFn0(...); 4162fe6060f1SDimitry Andric // SMIfCascadeCurrentBB: else if (WorkFn == <ParFn1>) 4163fe6060f1SDimitry Andric // ParFn1(...); 4164fe6060f1SDimitry Andric // ... 4165fe6060f1SDimitry Andric // SMIfCascadeCurrentBB: else 4166fe6060f1SDimitry Andric // ((WorkFnTy*)WorkFn)(...); 4167fe6060f1SDimitry Andric // SMEndParallelBB: __kmpc_kernel_end_parallel(...); 4168fe6060f1SDimitry Andric // } 4169349cc55cSDimitry Andric // SMDoneBB: __kmpc_barrier_simple_generic(...); 4170fe6060f1SDimitry Andric // goto SMBeginBB; 4171fe6060f1SDimitry Andric // } 4172fe6060f1SDimitry Andric // UserCodeEntryBB: // user code 4173fe6060f1SDimitry Andric // __kmpc_target_deinit(...) 4174fe6060f1SDimitry Andric // 4175fe6060f1SDimitry Andric Function *Kernel = getAssociatedFunction(); 4176fe6060f1SDimitry Andric assert(Kernel && "Expected an associated function!"); 4177fe6060f1SDimitry Andric 4178fe6060f1SDimitry Andric BasicBlock *InitBB = KernelInitCB->getParent(); 4179fe6060f1SDimitry Andric BasicBlock *UserCodeEntryBB = InitBB->splitBasicBlock( 4180fe6060f1SDimitry Andric KernelInitCB->getNextNode(), "thread.user_code.check"); 4181349cc55cSDimitry Andric BasicBlock *IsWorkerCheckBB = 4182349cc55cSDimitry Andric BasicBlock::Create(Ctx, "is_worker_check", Kernel, UserCodeEntryBB); 4183fe6060f1SDimitry Andric BasicBlock *StateMachineBeginBB = BasicBlock::Create( 4184fe6060f1SDimitry Andric Ctx, "worker_state_machine.begin", Kernel, UserCodeEntryBB); 4185fe6060f1SDimitry Andric BasicBlock *StateMachineFinishedBB = BasicBlock::Create( 4186fe6060f1SDimitry Andric Ctx, "worker_state_machine.finished", Kernel, UserCodeEntryBB); 4187fe6060f1SDimitry Andric BasicBlock *StateMachineIsActiveCheckBB = BasicBlock::Create( 4188fe6060f1SDimitry Andric Ctx, "worker_state_machine.is_active.check", Kernel, UserCodeEntryBB); 4189fe6060f1SDimitry Andric BasicBlock *StateMachineIfCascadeCurrentBB = 4190fe6060f1SDimitry Andric BasicBlock::Create(Ctx, "worker_state_machine.parallel_region.check", 4191fe6060f1SDimitry Andric Kernel, UserCodeEntryBB); 4192fe6060f1SDimitry Andric BasicBlock *StateMachineEndParallelBB = 4193fe6060f1SDimitry Andric BasicBlock::Create(Ctx, "worker_state_machine.parallel_region.end", 4194fe6060f1SDimitry Andric Kernel, UserCodeEntryBB); 4195fe6060f1SDimitry Andric BasicBlock *StateMachineDoneBarrierBB = BasicBlock::Create( 4196fe6060f1SDimitry Andric Ctx, "worker_state_machine.done.barrier", Kernel, UserCodeEntryBB); 4197fe6060f1SDimitry Andric A.registerManifestAddedBasicBlock(*InitBB); 4198fe6060f1SDimitry Andric A.registerManifestAddedBasicBlock(*UserCodeEntryBB); 4199349cc55cSDimitry Andric A.registerManifestAddedBasicBlock(*IsWorkerCheckBB); 4200fe6060f1SDimitry Andric A.registerManifestAddedBasicBlock(*StateMachineBeginBB); 4201fe6060f1SDimitry Andric A.registerManifestAddedBasicBlock(*StateMachineFinishedBB); 4202fe6060f1SDimitry Andric A.registerManifestAddedBasicBlock(*StateMachineIsActiveCheckBB); 4203fe6060f1SDimitry Andric A.registerManifestAddedBasicBlock(*StateMachineIfCascadeCurrentBB); 4204fe6060f1SDimitry Andric A.registerManifestAddedBasicBlock(*StateMachineEndParallelBB); 4205fe6060f1SDimitry Andric A.registerManifestAddedBasicBlock(*StateMachineDoneBarrierBB); 4206fe6060f1SDimitry Andric 4207fe6060f1SDimitry Andric const DebugLoc &DLoc = KernelInitCB->getDebugLoc(); 4208fe6060f1SDimitry Andric ReturnInst::Create(Ctx, StateMachineFinishedBB)->setDebugLoc(DLoc); 4209fe6060f1SDimitry Andric InitBB->getTerminator()->eraseFromParent(); 4210349cc55cSDimitry Andric 4211fb03ea46SDimitry Andric Instruction *IsWorker = 4212fb03ea46SDimitry Andric ICmpInst::Create(ICmpInst::ICmp, llvm::CmpInst::ICMP_NE, KernelInitCB, 4213fb03ea46SDimitry Andric ConstantInt::get(KernelInitCB->getType(), -1), 4214fb03ea46SDimitry Andric "thread.is_worker", InitBB); 4215fb03ea46SDimitry Andric IsWorker->setDebugLoc(DLoc); 4216fb03ea46SDimitry Andric BranchInst::Create(IsWorkerCheckBB, UserCodeEntryBB, IsWorker, InitBB); 4217fb03ea46SDimitry Andric 4218349cc55cSDimitry Andric Module &M = *Kernel->getParent(); 4219349cc55cSDimitry Andric FunctionCallee BlockHwSizeFn = 4220349cc55cSDimitry Andric OMPInfoCache.OMPBuilder.getOrCreateRuntimeFunction( 4221349cc55cSDimitry Andric M, OMPRTL___kmpc_get_hardware_num_threads_in_block); 4222349cc55cSDimitry Andric FunctionCallee WarpSizeFn = 4223349cc55cSDimitry Andric OMPInfoCache.OMPBuilder.getOrCreateRuntimeFunction( 4224349cc55cSDimitry Andric M, OMPRTL___kmpc_get_warp_size); 422504eeddc0SDimitry Andric CallInst *BlockHwSize = 4226fb03ea46SDimitry Andric CallInst::Create(BlockHwSizeFn, "block.hw_size", IsWorkerCheckBB); 422704eeddc0SDimitry Andric OMPInfoCache.setCallingConvention(BlockHwSizeFn, BlockHwSize); 4228349cc55cSDimitry Andric BlockHwSize->setDebugLoc(DLoc); 4229fb03ea46SDimitry Andric CallInst *WarpSize = 4230fb03ea46SDimitry Andric CallInst::Create(WarpSizeFn, "warp.size", IsWorkerCheckBB); 423104eeddc0SDimitry Andric OMPInfoCache.setCallingConvention(WarpSizeFn, WarpSize); 4232349cc55cSDimitry Andric WarpSize->setDebugLoc(DLoc); 4233fb03ea46SDimitry Andric Instruction *BlockSize = BinaryOperator::CreateSub( 4234fb03ea46SDimitry Andric BlockHwSize, WarpSize, "block.size", IsWorkerCheckBB); 4235349cc55cSDimitry Andric BlockSize->setDebugLoc(DLoc); 4236fb03ea46SDimitry Andric Instruction *IsMainOrWorker = ICmpInst::Create( 4237fb03ea46SDimitry Andric ICmpInst::ICmp, llvm::CmpInst::ICMP_SLT, KernelInitCB, BlockSize, 4238fb03ea46SDimitry Andric "thread.is_main_or_worker", IsWorkerCheckBB); 4239349cc55cSDimitry Andric IsMainOrWorker->setDebugLoc(DLoc); 4240fb03ea46SDimitry Andric BranchInst::Create(StateMachineBeginBB, StateMachineFinishedBB, 4241fb03ea46SDimitry Andric IsMainOrWorker, IsWorkerCheckBB); 42428c6f6c0cSDimitry Andric 4243fe6060f1SDimitry Andric // Create local storage for the work function pointer. 42448c6f6c0cSDimitry Andric const DataLayout &DL = M.getDataLayout(); 4245fe6060f1SDimitry Andric Type *VoidPtrTy = Type::getInt8PtrTy(Ctx); 42468c6f6c0cSDimitry Andric Instruction *WorkFnAI = 42478c6f6c0cSDimitry Andric new AllocaInst(VoidPtrTy, DL.getAllocaAddrSpace(), nullptr, 42488c6f6c0cSDimitry Andric "worker.work_fn.addr", &Kernel->getEntryBlock().front()); 4249fe6060f1SDimitry Andric WorkFnAI->setDebugLoc(DLoc); 4250fe6060f1SDimitry Andric 4251fe6060f1SDimitry Andric OMPInfoCache.OMPBuilder.updateToLocation( 4252fe6060f1SDimitry Andric OpenMPIRBuilder::LocationDescription( 4253fe6060f1SDimitry Andric IRBuilder<>::InsertPoint(StateMachineBeginBB, 4254fe6060f1SDimitry Andric StateMachineBeginBB->end()), 4255fe6060f1SDimitry Andric DLoc)); 4256fe6060f1SDimitry Andric 4257fe6060f1SDimitry Andric Value *Ident = KernelInitCB->getArgOperand(0); 4258fe6060f1SDimitry Andric Value *GTid = KernelInitCB; 4259fe6060f1SDimitry Andric 4260fe6060f1SDimitry Andric FunctionCallee BarrierFn = 4261fe6060f1SDimitry Andric OMPInfoCache.OMPBuilder.getOrCreateRuntimeFunction( 4262349cc55cSDimitry Andric M, OMPRTL___kmpc_barrier_simple_generic); 426304eeddc0SDimitry Andric CallInst *Barrier = 426404eeddc0SDimitry Andric CallInst::Create(BarrierFn, {Ident, GTid}, "", StateMachineBeginBB); 426504eeddc0SDimitry Andric OMPInfoCache.setCallingConvention(BarrierFn, Barrier); 426604eeddc0SDimitry Andric Barrier->setDebugLoc(DLoc); 4267fe6060f1SDimitry Andric 42688c6f6c0cSDimitry Andric if (WorkFnAI->getType()->getPointerAddressSpace() != 42698c6f6c0cSDimitry Andric (unsigned int)AddressSpace::Generic) { 42708c6f6c0cSDimitry Andric WorkFnAI = new AddrSpaceCastInst( 42718c6f6c0cSDimitry Andric WorkFnAI, 42728c6f6c0cSDimitry Andric PointerType::getWithSamePointeeType( 42738c6f6c0cSDimitry Andric cast<PointerType>(WorkFnAI->getType()), 42748c6f6c0cSDimitry Andric (unsigned int)AddressSpace::Generic), 42758c6f6c0cSDimitry Andric WorkFnAI->getName() + ".generic", StateMachineBeginBB); 42768c6f6c0cSDimitry Andric WorkFnAI->setDebugLoc(DLoc); 42778c6f6c0cSDimitry Andric } 42788c6f6c0cSDimitry Andric 4279fe6060f1SDimitry Andric FunctionCallee KernelParallelFn = 4280fe6060f1SDimitry Andric OMPInfoCache.OMPBuilder.getOrCreateRuntimeFunction( 4281fe6060f1SDimitry Andric M, OMPRTL___kmpc_kernel_parallel); 428204eeddc0SDimitry Andric CallInst *IsActiveWorker = CallInst::Create( 4283fe6060f1SDimitry Andric KernelParallelFn, {WorkFnAI}, "worker.is_active", StateMachineBeginBB); 428404eeddc0SDimitry Andric OMPInfoCache.setCallingConvention(KernelParallelFn, IsActiveWorker); 4285fe6060f1SDimitry Andric IsActiveWorker->setDebugLoc(DLoc); 4286fe6060f1SDimitry Andric Instruction *WorkFn = new LoadInst(VoidPtrTy, WorkFnAI, "worker.work_fn", 4287fe6060f1SDimitry Andric StateMachineBeginBB); 4288fe6060f1SDimitry Andric WorkFn->setDebugLoc(DLoc); 4289fe6060f1SDimitry Andric 4290fe6060f1SDimitry Andric FunctionType *ParallelRegionFnTy = FunctionType::get( 4291fe6060f1SDimitry Andric Type::getVoidTy(Ctx), {Type::getInt16Ty(Ctx), Type::getInt32Ty(Ctx)}, 4292fe6060f1SDimitry Andric false); 4293fe6060f1SDimitry Andric Value *WorkFnCast = BitCastInst::CreatePointerBitCastOrAddrSpaceCast( 4294fe6060f1SDimitry Andric WorkFn, ParallelRegionFnTy->getPointerTo(), "worker.work_fn.addr_cast", 4295fe6060f1SDimitry Andric StateMachineBeginBB); 4296fe6060f1SDimitry Andric 4297fe6060f1SDimitry Andric Instruction *IsDone = 4298fe6060f1SDimitry Andric ICmpInst::Create(ICmpInst::ICmp, llvm::CmpInst::ICMP_EQ, WorkFn, 4299fe6060f1SDimitry Andric Constant::getNullValue(VoidPtrTy), "worker.is_done", 4300fe6060f1SDimitry Andric StateMachineBeginBB); 4301fe6060f1SDimitry Andric IsDone->setDebugLoc(DLoc); 4302fe6060f1SDimitry Andric BranchInst::Create(StateMachineFinishedBB, StateMachineIsActiveCheckBB, 4303fe6060f1SDimitry Andric IsDone, StateMachineBeginBB) 4304fe6060f1SDimitry Andric ->setDebugLoc(DLoc); 4305fe6060f1SDimitry Andric 4306fe6060f1SDimitry Andric BranchInst::Create(StateMachineIfCascadeCurrentBB, 4307fe6060f1SDimitry Andric StateMachineDoneBarrierBB, IsActiveWorker, 4308fe6060f1SDimitry Andric StateMachineIsActiveCheckBB) 4309fe6060f1SDimitry Andric ->setDebugLoc(DLoc); 4310fe6060f1SDimitry Andric 4311fe6060f1SDimitry Andric Value *ZeroArg = 4312fe6060f1SDimitry Andric Constant::getNullValue(ParallelRegionFnTy->getParamType(0)); 4313fe6060f1SDimitry Andric 4314fe6060f1SDimitry Andric // Now that we have most of the CFG skeleton it is time for the if-cascade 4315fe6060f1SDimitry Andric // that checks the function pointer we got from the runtime against the 4316fe6060f1SDimitry Andric // parallel regions we expect, if there are any. 4317349cc55cSDimitry Andric for (int I = 0, E = ReachedKnownParallelRegions.size(); I < E; ++I) { 4318349cc55cSDimitry Andric auto *ParallelRegion = ReachedKnownParallelRegions[I]; 4319fe6060f1SDimitry Andric BasicBlock *PRExecuteBB = BasicBlock::Create( 4320fe6060f1SDimitry Andric Ctx, "worker_state_machine.parallel_region.execute", Kernel, 4321fe6060f1SDimitry Andric StateMachineEndParallelBB); 4322fe6060f1SDimitry Andric CallInst::Create(ParallelRegion, {ZeroArg, GTid}, "", PRExecuteBB) 4323fe6060f1SDimitry Andric ->setDebugLoc(DLoc); 4324fe6060f1SDimitry Andric BranchInst::Create(StateMachineEndParallelBB, PRExecuteBB) 4325fe6060f1SDimitry Andric ->setDebugLoc(DLoc); 4326fe6060f1SDimitry Andric 4327fe6060f1SDimitry Andric BasicBlock *PRNextBB = 4328fe6060f1SDimitry Andric BasicBlock::Create(Ctx, "worker_state_machine.parallel_region.check", 4329fe6060f1SDimitry Andric Kernel, StateMachineEndParallelBB); 4330fe6060f1SDimitry Andric 4331fe6060f1SDimitry Andric // Check if we need to compare the pointer at all or if we can just 4332fe6060f1SDimitry Andric // call the parallel region function. 4333fe6060f1SDimitry Andric Value *IsPR; 4334349cc55cSDimitry Andric if (I + 1 < E || !ReachedUnknownParallelRegions.empty()) { 4335fe6060f1SDimitry Andric Instruction *CmpI = ICmpInst::Create( 4336fe6060f1SDimitry Andric ICmpInst::ICmp, llvm::CmpInst::ICMP_EQ, WorkFnCast, ParallelRegion, 4337fe6060f1SDimitry Andric "worker.check_parallel_region", StateMachineIfCascadeCurrentBB); 4338fe6060f1SDimitry Andric CmpI->setDebugLoc(DLoc); 4339fe6060f1SDimitry Andric IsPR = CmpI; 4340fe6060f1SDimitry Andric } else { 4341fe6060f1SDimitry Andric IsPR = ConstantInt::getTrue(Ctx); 4342fe6060f1SDimitry Andric } 4343fe6060f1SDimitry Andric 4344fe6060f1SDimitry Andric BranchInst::Create(PRExecuteBB, PRNextBB, IsPR, 4345fe6060f1SDimitry Andric StateMachineIfCascadeCurrentBB) 4346fe6060f1SDimitry Andric ->setDebugLoc(DLoc); 4347fe6060f1SDimitry Andric StateMachineIfCascadeCurrentBB = PRNextBB; 4348fe6060f1SDimitry Andric } 4349fe6060f1SDimitry Andric 4350fe6060f1SDimitry Andric // At the end of the if-cascade we place the indirect function pointer call 4351fe6060f1SDimitry Andric // in case we might need it, that is if there can be parallel regions we 4352fe6060f1SDimitry Andric // have not handled in the if-cascade above. 4353fe6060f1SDimitry Andric if (!ReachedUnknownParallelRegions.empty()) { 4354fe6060f1SDimitry Andric StateMachineIfCascadeCurrentBB->setName( 4355fe6060f1SDimitry Andric "worker_state_machine.parallel_region.fallback.execute"); 4356fe6060f1SDimitry Andric CallInst::Create(ParallelRegionFnTy, WorkFnCast, {ZeroArg, GTid}, "", 4357fe6060f1SDimitry Andric StateMachineIfCascadeCurrentBB) 4358fe6060f1SDimitry Andric ->setDebugLoc(DLoc); 4359fe6060f1SDimitry Andric } 4360fe6060f1SDimitry Andric BranchInst::Create(StateMachineEndParallelBB, 4361fe6060f1SDimitry Andric StateMachineIfCascadeCurrentBB) 4362fe6060f1SDimitry Andric ->setDebugLoc(DLoc); 4363fe6060f1SDimitry Andric 436404eeddc0SDimitry Andric FunctionCallee EndParallelFn = 436504eeddc0SDimitry Andric OMPInfoCache.OMPBuilder.getOrCreateRuntimeFunction( 436604eeddc0SDimitry Andric M, OMPRTL___kmpc_kernel_end_parallel); 436704eeddc0SDimitry Andric CallInst *EndParallel = 436804eeddc0SDimitry Andric CallInst::Create(EndParallelFn, {}, "", StateMachineEndParallelBB); 436904eeddc0SDimitry Andric OMPInfoCache.setCallingConvention(EndParallelFn, EndParallel); 437004eeddc0SDimitry Andric EndParallel->setDebugLoc(DLoc); 4371fe6060f1SDimitry Andric BranchInst::Create(StateMachineDoneBarrierBB, StateMachineEndParallelBB) 4372fe6060f1SDimitry Andric ->setDebugLoc(DLoc); 4373fe6060f1SDimitry Andric 4374fe6060f1SDimitry Andric CallInst::Create(BarrierFn, {Ident, GTid}, "", StateMachineDoneBarrierBB) 4375fe6060f1SDimitry Andric ->setDebugLoc(DLoc); 4376fe6060f1SDimitry Andric BranchInst::Create(StateMachineBeginBB, StateMachineDoneBarrierBB) 4377fe6060f1SDimitry Andric ->setDebugLoc(DLoc); 4378fe6060f1SDimitry Andric 4379fe6060f1SDimitry Andric return ChangeStatus::CHANGED; 4380fe6060f1SDimitry Andric } 4381fe6060f1SDimitry Andric 4382fe6060f1SDimitry Andric /// Fixpoint iteration update function. Will be called every time a dependence 4383fe6060f1SDimitry Andric /// changed its state (and in the beginning). 4384fe6060f1SDimitry Andric ChangeStatus updateImpl(Attributor &A) override { 4385fe6060f1SDimitry Andric KernelInfoState StateBefore = getState(); 4386fe6060f1SDimitry Andric 4387fe6060f1SDimitry Andric // Callback to check a read/write instruction. 4388fe6060f1SDimitry Andric auto CheckRWInst = [&](Instruction &I) { 4389fe6060f1SDimitry Andric // We handle calls later. 4390fe6060f1SDimitry Andric if (isa<CallBase>(I)) 4391fe6060f1SDimitry Andric return true; 4392fe6060f1SDimitry Andric // We only care about write effects. 4393fe6060f1SDimitry Andric if (!I.mayWriteToMemory()) 4394fe6060f1SDimitry Andric return true; 4395fe6060f1SDimitry Andric if (auto *SI = dyn_cast<StoreInst>(&I)) { 4396bdd1243dSDimitry Andric const auto &UnderlyingObjsAA = A.getAAFor<AAUnderlyingObjects>( 4397bdd1243dSDimitry Andric *this, IRPosition::value(*SI->getPointerOperand()), 4398bdd1243dSDimitry Andric DepClassTy::OPTIONAL); 4399349cc55cSDimitry Andric auto &HS = A.getAAFor<AAHeapToStack>( 4400349cc55cSDimitry Andric *this, IRPosition::function(*I.getFunction()), 4401349cc55cSDimitry Andric DepClassTy::OPTIONAL); 4402bdd1243dSDimitry Andric if (UnderlyingObjsAA.forallUnderlyingObjects([&](Value &Obj) { 4403bdd1243dSDimitry Andric if (AA::isAssumedThreadLocalObject(A, Obj, *this)) 4404349cc55cSDimitry Andric return true; 4405bdd1243dSDimitry Andric // Check for AAHeapToStack moved objects which must not be 4406bdd1243dSDimitry Andric // guarded. 4407bdd1243dSDimitry Andric auto *CB = dyn_cast<CallBase>(&Obj); 4408bdd1243dSDimitry Andric return CB && HS.isAssumedHeapToStack(*CB); 4409bdd1243dSDimitry Andric })) 4410bdd1243dSDimitry Andric return true; 4411349cc55cSDimitry Andric } 4412349cc55cSDimitry Andric 4413349cc55cSDimitry Andric // Insert instruction that needs guarding. 4414fe6060f1SDimitry Andric SPMDCompatibilityTracker.insert(&I); 4415fe6060f1SDimitry Andric return true; 4416fe6060f1SDimitry Andric }; 4417fe6060f1SDimitry Andric 4418fe6060f1SDimitry Andric bool UsedAssumedInformationInCheckRWInst = false; 4419fe6060f1SDimitry Andric if (!SPMDCompatibilityTracker.isAtFixpoint()) 4420fe6060f1SDimitry Andric if (!A.checkForAllReadWriteInstructions( 4421fe6060f1SDimitry Andric CheckRWInst, *this, UsedAssumedInformationInCheckRWInst)) 4422fe6060f1SDimitry Andric SPMDCompatibilityTracker.indicatePessimisticFixpoint(); 4423fe6060f1SDimitry Andric 44244824e7fdSDimitry Andric bool UsedAssumedInformationFromReachingKernels = false; 4425fe6060f1SDimitry Andric if (!IsKernelEntry) { 4426fe6060f1SDimitry Andric updateParallelLevels(A); 4427349cc55cSDimitry Andric 44284824e7fdSDimitry Andric bool AllReachingKernelsKnown = true; 44294824e7fdSDimitry Andric updateReachingKernelEntries(A, AllReachingKernelsKnown); 44304824e7fdSDimitry Andric UsedAssumedInformationFromReachingKernels = !AllReachingKernelsKnown; 44314824e7fdSDimitry Andric 4432bdd1243dSDimitry Andric if (!SPMDCompatibilityTracker.empty()) { 4433349cc55cSDimitry Andric if (!ParallelLevels.isValidState()) 4434349cc55cSDimitry Andric SPMDCompatibilityTracker.indicatePessimisticFixpoint(); 44354824e7fdSDimitry Andric else if (!ReachingKernelEntries.isValidState()) 44364824e7fdSDimitry Andric SPMDCompatibilityTracker.indicatePessimisticFixpoint(); 4437bdd1243dSDimitry Andric else { 44384824e7fdSDimitry Andric // Check if all reaching kernels agree on the mode as we can otherwise 44394824e7fdSDimitry Andric // not guard instructions. We might not be sure about the mode so we 44404824e7fdSDimitry Andric // we cannot fix the internal spmd-zation state either. 44414824e7fdSDimitry Andric int SPMD = 0, Generic = 0; 44424824e7fdSDimitry Andric for (auto *Kernel : ReachingKernelEntries) { 44434824e7fdSDimitry Andric auto &CBAA = A.getAAFor<AAKernelInfo>( 44444824e7fdSDimitry Andric *this, IRPosition::function(*Kernel), DepClassTy::OPTIONAL); 44454824e7fdSDimitry Andric if (CBAA.SPMDCompatibilityTracker.isValidState() && 44464824e7fdSDimitry Andric CBAA.SPMDCompatibilityTracker.isAssumed()) 44474824e7fdSDimitry Andric ++SPMD; 44484824e7fdSDimitry Andric else 44494824e7fdSDimitry Andric ++Generic; 44504824e7fdSDimitry Andric if (!CBAA.SPMDCompatibilityTracker.isAtFixpoint()) 44514824e7fdSDimitry Andric UsedAssumedInformationFromReachingKernels = true; 44524824e7fdSDimitry Andric } 44534824e7fdSDimitry Andric if (SPMD != 0 && Generic != 0) 44544824e7fdSDimitry Andric SPMDCompatibilityTracker.indicatePessimisticFixpoint(); 44554824e7fdSDimitry Andric } 4456fe6060f1SDimitry Andric } 4457bdd1243dSDimitry Andric } 4458fe6060f1SDimitry Andric 4459fe6060f1SDimitry Andric // Callback to check a call instruction. 4460349cc55cSDimitry Andric bool AllParallelRegionStatesWereFixed = true; 4461fe6060f1SDimitry Andric bool AllSPMDStatesWereFixed = true; 4462fe6060f1SDimitry Andric auto CheckCallInst = [&](Instruction &I) { 4463fe6060f1SDimitry Andric auto &CB = cast<CallBase>(I); 4464fe6060f1SDimitry Andric auto &CBAA = A.getAAFor<AAKernelInfo>( 4465fe6060f1SDimitry Andric *this, IRPosition::callsite_function(CB), DepClassTy::OPTIONAL); 4466fe6060f1SDimitry Andric getState() ^= CBAA.getState(); 4467fe6060f1SDimitry Andric AllSPMDStatesWereFixed &= CBAA.SPMDCompatibilityTracker.isAtFixpoint(); 4468349cc55cSDimitry Andric AllParallelRegionStatesWereFixed &= 4469349cc55cSDimitry Andric CBAA.ReachedKnownParallelRegions.isAtFixpoint(); 4470349cc55cSDimitry Andric AllParallelRegionStatesWereFixed &= 4471349cc55cSDimitry Andric CBAA.ReachedUnknownParallelRegions.isAtFixpoint(); 4472fe6060f1SDimitry Andric return true; 4473fe6060f1SDimitry Andric }; 4474fe6060f1SDimitry Andric 4475fe6060f1SDimitry Andric bool UsedAssumedInformationInCheckCallInst = false; 4476fe6060f1SDimitry Andric if (!A.checkForAllCallLikeInstructions( 4477349cc55cSDimitry Andric CheckCallInst, *this, UsedAssumedInformationInCheckCallInst)) { 4478349cc55cSDimitry Andric LLVM_DEBUG(dbgs() << TAG 4479349cc55cSDimitry Andric << "Failed to visit all call-like instructions!\n";); 4480fe6060f1SDimitry Andric return indicatePessimisticFixpoint(); 4481349cc55cSDimitry Andric } 4482349cc55cSDimitry Andric 4483349cc55cSDimitry Andric // If we haven't used any assumed information for the reached parallel 4484349cc55cSDimitry Andric // region states we can fix it. 4485349cc55cSDimitry Andric if (!UsedAssumedInformationInCheckCallInst && 4486349cc55cSDimitry Andric AllParallelRegionStatesWereFixed) { 4487349cc55cSDimitry Andric ReachedKnownParallelRegions.indicateOptimisticFixpoint(); 4488349cc55cSDimitry Andric ReachedUnknownParallelRegions.indicateOptimisticFixpoint(); 4489349cc55cSDimitry Andric } 4490349cc55cSDimitry Andric 4491fe6060f1SDimitry Andric // If we haven't used any assumed information for the SPMD state we can fix 4492fe6060f1SDimitry Andric // it. 4493fe6060f1SDimitry Andric if (!UsedAssumedInformationInCheckRWInst && 44944824e7fdSDimitry Andric !UsedAssumedInformationInCheckCallInst && 44954824e7fdSDimitry Andric !UsedAssumedInformationFromReachingKernels && AllSPMDStatesWereFixed) 4496fe6060f1SDimitry Andric SPMDCompatibilityTracker.indicateOptimisticFixpoint(); 4497fe6060f1SDimitry Andric 4498fe6060f1SDimitry Andric return StateBefore == getState() ? ChangeStatus::UNCHANGED 4499fe6060f1SDimitry Andric : ChangeStatus::CHANGED; 4500fe6060f1SDimitry Andric } 4501fe6060f1SDimitry Andric 4502fe6060f1SDimitry Andric private: 4503fe6060f1SDimitry Andric /// Update info regarding reaching kernels. 45044824e7fdSDimitry Andric void updateReachingKernelEntries(Attributor &A, 45054824e7fdSDimitry Andric bool &AllReachingKernelsKnown) { 4506fe6060f1SDimitry Andric auto PredCallSite = [&](AbstractCallSite ACS) { 4507fe6060f1SDimitry Andric Function *Caller = ACS.getInstruction()->getFunction(); 4508fe6060f1SDimitry Andric 4509fe6060f1SDimitry Andric assert(Caller && "Caller is nullptr"); 4510fe6060f1SDimitry Andric 4511fe6060f1SDimitry Andric auto &CAA = A.getOrCreateAAFor<AAKernelInfo>( 4512fe6060f1SDimitry Andric IRPosition::function(*Caller), this, DepClassTy::REQUIRED); 4513fe6060f1SDimitry Andric if (CAA.ReachingKernelEntries.isValidState()) { 4514fe6060f1SDimitry Andric ReachingKernelEntries ^= CAA.ReachingKernelEntries; 4515fe6060f1SDimitry Andric return true; 4516fe6060f1SDimitry Andric } 4517fe6060f1SDimitry Andric 4518fe6060f1SDimitry Andric // We lost track of the caller of the associated function, any kernel 4519fe6060f1SDimitry Andric // could reach now. 4520fe6060f1SDimitry Andric ReachingKernelEntries.indicatePessimisticFixpoint(); 4521fe6060f1SDimitry Andric 4522fe6060f1SDimitry Andric return true; 4523fe6060f1SDimitry Andric }; 4524fe6060f1SDimitry Andric 4525fe6060f1SDimitry Andric if (!A.checkForAllCallSites(PredCallSite, *this, 4526fe6060f1SDimitry Andric true /* RequireAllCallSites */, 45274824e7fdSDimitry Andric AllReachingKernelsKnown)) 4528fe6060f1SDimitry Andric ReachingKernelEntries.indicatePessimisticFixpoint(); 4529fe6060f1SDimitry Andric } 4530fe6060f1SDimitry Andric 4531fe6060f1SDimitry Andric /// Update info regarding parallel levels. 4532fe6060f1SDimitry Andric void updateParallelLevels(Attributor &A) { 4533fe6060f1SDimitry Andric auto &OMPInfoCache = static_cast<OMPInformationCache &>(A.getInfoCache()); 4534fe6060f1SDimitry Andric OMPInformationCache::RuntimeFunctionInfo &Parallel51RFI = 4535fe6060f1SDimitry Andric OMPInfoCache.RFIs[OMPRTL___kmpc_parallel_51]; 4536fe6060f1SDimitry Andric 4537fe6060f1SDimitry Andric auto PredCallSite = [&](AbstractCallSite ACS) { 4538fe6060f1SDimitry Andric Function *Caller = ACS.getInstruction()->getFunction(); 4539fe6060f1SDimitry Andric 4540fe6060f1SDimitry Andric assert(Caller && "Caller is nullptr"); 4541fe6060f1SDimitry Andric 4542fe6060f1SDimitry Andric auto &CAA = 4543fe6060f1SDimitry Andric A.getOrCreateAAFor<AAKernelInfo>(IRPosition::function(*Caller)); 4544fe6060f1SDimitry Andric if (CAA.ParallelLevels.isValidState()) { 4545fe6060f1SDimitry Andric // Any function that is called by `__kmpc_parallel_51` will not be 4546fe6060f1SDimitry Andric // folded as the parallel level in the function is updated. In order to 4547fe6060f1SDimitry Andric // get it right, all the analysis would depend on the implentation. That 4548fe6060f1SDimitry Andric // said, if in the future any change to the implementation, the analysis 4549fe6060f1SDimitry Andric // could be wrong. As a consequence, we are just conservative here. 4550fe6060f1SDimitry Andric if (Caller == Parallel51RFI.Declaration) { 4551fe6060f1SDimitry Andric ParallelLevels.indicatePessimisticFixpoint(); 4552fe6060f1SDimitry Andric return true; 4553fe6060f1SDimitry Andric } 4554fe6060f1SDimitry Andric 4555fe6060f1SDimitry Andric ParallelLevels ^= CAA.ParallelLevels; 4556fe6060f1SDimitry Andric 4557fe6060f1SDimitry Andric return true; 4558fe6060f1SDimitry Andric } 4559fe6060f1SDimitry Andric 4560fe6060f1SDimitry Andric // We lost track of the caller of the associated function, any kernel 4561fe6060f1SDimitry Andric // could reach now. 4562fe6060f1SDimitry Andric ParallelLevels.indicatePessimisticFixpoint(); 4563fe6060f1SDimitry Andric 4564fe6060f1SDimitry Andric return true; 4565fe6060f1SDimitry Andric }; 4566fe6060f1SDimitry Andric 4567fe6060f1SDimitry Andric bool AllCallSitesKnown = true; 4568fe6060f1SDimitry Andric if (!A.checkForAllCallSites(PredCallSite, *this, 4569fe6060f1SDimitry Andric true /* RequireAllCallSites */, 4570fe6060f1SDimitry Andric AllCallSitesKnown)) 4571fe6060f1SDimitry Andric ParallelLevels.indicatePessimisticFixpoint(); 4572fe6060f1SDimitry Andric } 4573fe6060f1SDimitry Andric }; 4574fe6060f1SDimitry Andric 4575fe6060f1SDimitry Andric /// The call site kernel info abstract attribute, basically, what can we say 4576fe6060f1SDimitry Andric /// about a call site with regards to the KernelInfoState. For now this simply 4577fe6060f1SDimitry Andric /// forwards the information from the callee. 4578fe6060f1SDimitry Andric struct AAKernelInfoCallSite : AAKernelInfo { 4579fe6060f1SDimitry Andric AAKernelInfoCallSite(const IRPosition &IRP, Attributor &A) 4580fe6060f1SDimitry Andric : AAKernelInfo(IRP, A) {} 4581fe6060f1SDimitry Andric 4582fe6060f1SDimitry Andric /// See AbstractAttribute::initialize(...). 4583fe6060f1SDimitry Andric void initialize(Attributor &A) override { 4584fe6060f1SDimitry Andric AAKernelInfo::initialize(A); 4585fe6060f1SDimitry Andric 4586fe6060f1SDimitry Andric CallBase &CB = cast<CallBase>(getAssociatedValue()); 4587fe6060f1SDimitry Andric Function *Callee = getAssociatedFunction(); 4588fe6060f1SDimitry Andric 4589349cc55cSDimitry Andric auto &AssumptionAA = A.getAAFor<AAAssumptionInfo>( 4590349cc55cSDimitry Andric *this, IRPosition::callsite_function(CB), DepClassTy::OPTIONAL); 4591fe6060f1SDimitry Andric 4592fe6060f1SDimitry Andric // Check for SPMD-mode assumptions. 4593349cc55cSDimitry Andric if (AssumptionAA.hasAssumption("ompx_spmd_amenable")) { 4594fe6060f1SDimitry Andric SPMDCompatibilityTracker.indicateOptimisticFixpoint(); 4595349cc55cSDimitry Andric indicateOptimisticFixpoint(); 4596349cc55cSDimitry Andric } 4597fe6060f1SDimitry Andric 4598fe6060f1SDimitry Andric // First weed out calls we do not care about, that is readonly/readnone 4599fe6060f1SDimitry Andric // calls, intrinsics, and "no_openmp" calls. Neither of these can reach a 4600fe6060f1SDimitry Andric // parallel region or anything else we are looking for. 4601fe6060f1SDimitry Andric if (!CB.mayWriteToMemory() || isa<IntrinsicInst>(CB)) { 4602fe6060f1SDimitry Andric indicateOptimisticFixpoint(); 4603fe6060f1SDimitry Andric return; 4604fe6060f1SDimitry Andric } 4605fe6060f1SDimitry Andric 4606fe6060f1SDimitry Andric // Next we check if we know the callee. If it is a known OpenMP function 4607fe6060f1SDimitry Andric // we will handle them explicitly in the switch below. If it is not, we 4608fe6060f1SDimitry Andric // will use an AAKernelInfo object on the callee to gather information and 4609fe6060f1SDimitry Andric // merge that into the current state. The latter happens in the updateImpl. 4610fe6060f1SDimitry Andric auto &OMPInfoCache = static_cast<OMPInformationCache &>(A.getInfoCache()); 4611fe6060f1SDimitry Andric const auto &It = OMPInfoCache.RuntimeFunctionIDMap.find(Callee); 4612fe6060f1SDimitry Andric if (It == OMPInfoCache.RuntimeFunctionIDMap.end()) { 4613fe6060f1SDimitry Andric // Unknown caller or declarations are not analyzable, we give up. 4614fe6060f1SDimitry Andric if (!Callee || !A.isFunctionIPOAmendable(*Callee)) { 4615fe6060f1SDimitry Andric 4616fe6060f1SDimitry Andric // Unknown callees might contain parallel regions, except if they have 4617fe6060f1SDimitry Andric // an appropriate assumption attached. 4618349cc55cSDimitry Andric if (!(AssumptionAA.hasAssumption("omp_no_openmp") || 4619349cc55cSDimitry Andric AssumptionAA.hasAssumption("omp_no_parallelism"))) 4620fe6060f1SDimitry Andric ReachedUnknownParallelRegions.insert(&CB); 4621fe6060f1SDimitry Andric 4622fe6060f1SDimitry Andric // If SPMDCompatibilityTracker is not fixed, we need to give up on the 4623fe6060f1SDimitry Andric // idea we can run something unknown in SPMD-mode. 4624349cc55cSDimitry Andric if (!SPMDCompatibilityTracker.isAtFixpoint()) { 4625349cc55cSDimitry Andric SPMDCompatibilityTracker.indicatePessimisticFixpoint(); 4626fe6060f1SDimitry Andric SPMDCompatibilityTracker.insert(&CB); 4627349cc55cSDimitry Andric } 4628fe6060f1SDimitry Andric 4629fe6060f1SDimitry Andric // We have updated the state for this unknown call properly, there won't 4630fe6060f1SDimitry Andric // be any change so we indicate a fixpoint. 4631fe6060f1SDimitry Andric indicateOptimisticFixpoint(); 4632fe6060f1SDimitry Andric } 4633fe6060f1SDimitry Andric // If the callee is known and can be used in IPO, we will update the state 4634fe6060f1SDimitry Andric // based on the callee state in updateImpl. 4635fe6060f1SDimitry Andric return; 4636fe6060f1SDimitry Andric } 4637fe6060f1SDimitry Andric 4638fe6060f1SDimitry Andric const unsigned int WrapperFunctionArgNo = 6; 4639fe6060f1SDimitry Andric RuntimeFunction RF = It->getSecond(); 4640fe6060f1SDimitry Andric switch (RF) { 4641fe6060f1SDimitry Andric // All the functions we know are compatible with SPMD mode. 4642fe6060f1SDimitry Andric case OMPRTL___kmpc_is_spmd_exec_mode: 4643349cc55cSDimitry Andric case OMPRTL___kmpc_distribute_static_fini: 4644fe6060f1SDimitry Andric case OMPRTL___kmpc_for_static_fini: 4645fe6060f1SDimitry Andric case OMPRTL___kmpc_global_thread_num: 4646fe6060f1SDimitry Andric case OMPRTL___kmpc_get_hardware_num_threads_in_block: 4647fe6060f1SDimitry Andric case OMPRTL___kmpc_get_hardware_num_blocks: 4648fe6060f1SDimitry Andric case OMPRTL___kmpc_single: 4649fe6060f1SDimitry Andric case OMPRTL___kmpc_end_single: 4650fe6060f1SDimitry Andric case OMPRTL___kmpc_master: 4651fe6060f1SDimitry Andric case OMPRTL___kmpc_end_master: 4652fe6060f1SDimitry Andric case OMPRTL___kmpc_barrier: 46530eae32dcSDimitry Andric case OMPRTL___kmpc_nvptx_parallel_reduce_nowait_v2: 46540eae32dcSDimitry Andric case OMPRTL___kmpc_nvptx_teams_reduce_nowait_v2: 46550eae32dcSDimitry Andric case OMPRTL___kmpc_nvptx_end_reduce_nowait: 4656fe6060f1SDimitry Andric break; 4657349cc55cSDimitry Andric case OMPRTL___kmpc_distribute_static_init_4: 4658349cc55cSDimitry Andric case OMPRTL___kmpc_distribute_static_init_4u: 4659349cc55cSDimitry Andric case OMPRTL___kmpc_distribute_static_init_8: 4660349cc55cSDimitry Andric case OMPRTL___kmpc_distribute_static_init_8u: 4661fe6060f1SDimitry Andric case OMPRTL___kmpc_for_static_init_4: 4662fe6060f1SDimitry Andric case OMPRTL___kmpc_for_static_init_4u: 4663fe6060f1SDimitry Andric case OMPRTL___kmpc_for_static_init_8: 4664fe6060f1SDimitry Andric case OMPRTL___kmpc_for_static_init_8u: { 4665fe6060f1SDimitry Andric // Check the schedule and allow static schedule in SPMD mode. 4666fe6060f1SDimitry Andric unsigned ScheduleArgOpNo = 2; 4667fe6060f1SDimitry Andric auto *ScheduleTypeCI = 4668fe6060f1SDimitry Andric dyn_cast<ConstantInt>(CB.getArgOperand(ScheduleArgOpNo)); 4669fe6060f1SDimitry Andric unsigned ScheduleTypeVal = 4670fe6060f1SDimitry Andric ScheduleTypeCI ? ScheduleTypeCI->getZExtValue() : 0; 4671fe6060f1SDimitry Andric switch (OMPScheduleType(ScheduleTypeVal)) { 467281ad6265SDimitry Andric case OMPScheduleType::UnorderedStatic: 467381ad6265SDimitry Andric case OMPScheduleType::UnorderedStaticChunked: 467481ad6265SDimitry Andric case OMPScheduleType::OrderedDistribute: 467581ad6265SDimitry Andric case OMPScheduleType::OrderedDistributeChunked: 4676fe6060f1SDimitry Andric break; 4677fe6060f1SDimitry Andric default: 4678349cc55cSDimitry Andric SPMDCompatibilityTracker.indicatePessimisticFixpoint(); 4679fe6060f1SDimitry Andric SPMDCompatibilityTracker.insert(&CB); 4680fe6060f1SDimitry Andric break; 4681fe6060f1SDimitry Andric }; 4682fe6060f1SDimitry Andric } break; 4683fe6060f1SDimitry Andric case OMPRTL___kmpc_target_init: 4684fe6060f1SDimitry Andric KernelInitCB = &CB; 4685fe6060f1SDimitry Andric break; 4686fe6060f1SDimitry Andric case OMPRTL___kmpc_target_deinit: 4687fe6060f1SDimitry Andric KernelDeinitCB = &CB; 4688fe6060f1SDimitry Andric break; 4689fe6060f1SDimitry Andric case OMPRTL___kmpc_parallel_51: 4690fe6060f1SDimitry Andric if (auto *ParallelRegion = dyn_cast<Function>( 4691fe6060f1SDimitry Andric CB.getArgOperand(WrapperFunctionArgNo)->stripPointerCasts())) { 4692fe6060f1SDimitry Andric ReachedKnownParallelRegions.insert(ParallelRegion); 4693bdd1243dSDimitry Andric /// Check nested parallelism 4694bdd1243dSDimitry Andric auto &FnAA = A.getAAFor<AAKernelInfo>( 4695bdd1243dSDimitry Andric *this, IRPosition::function(*ParallelRegion), DepClassTy::OPTIONAL); 4696bdd1243dSDimitry Andric NestedParallelism |= !FnAA.getState().isValidState() || 4697bdd1243dSDimitry Andric !FnAA.ReachedKnownParallelRegions.empty() || 4698bdd1243dSDimitry Andric !FnAA.ReachedUnknownParallelRegions.empty(); 4699fe6060f1SDimitry Andric break; 4700fe6060f1SDimitry Andric } 4701fe6060f1SDimitry Andric // The condition above should usually get the parallel region function 4702fe6060f1SDimitry Andric // pointer and record it. In the off chance it doesn't we assume the 4703fe6060f1SDimitry Andric // worst. 4704fe6060f1SDimitry Andric ReachedUnknownParallelRegions.insert(&CB); 4705fe6060f1SDimitry Andric break; 4706fe6060f1SDimitry Andric case OMPRTL___kmpc_omp_task: 4707fe6060f1SDimitry Andric // We do not look into tasks right now, just give up. 47080eae32dcSDimitry Andric SPMDCompatibilityTracker.indicatePessimisticFixpoint(); 4709fe6060f1SDimitry Andric SPMDCompatibilityTracker.insert(&CB); 4710fe6060f1SDimitry Andric ReachedUnknownParallelRegions.insert(&CB); 4711fe6060f1SDimitry Andric break; 4712fe6060f1SDimitry Andric case OMPRTL___kmpc_alloc_shared: 4713fe6060f1SDimitry Andric case OMPRTL___kmpc_free_shared: 4714fe6060f1SDimitry Andric // Return without setting a fixpoint, to be resolved in updateImpl. 4715fe6060f1SDimitry Andric return; 4716fe6060f1SDimitry Andric default: 4717fe6060f1SDimitry Andric // Unknown OpenMP runtime calls cannot be executed in SPMD-mode, 4718349cc55cSDimitry Andric // generally. However, they do not hide parallel regions. 47190eae32dcSDimitry Andric SPMDCompatibilityTracker.indicatePessimisticFixpoint(); 4720fe6060f1SDimitry Andric SPMDCompatibilityTracker.insert(&CB); 4721fe6060f1SDimitry Andric break; 4722fe6060f1SDimitry Andric } 4723fe6060f1SDimitry Andric // All other OpenMP runtime calls will not reach parallel regions so they 4724fe6060f1SDimitry Andric // can be safely ignored for now. Since it is a known OpenMP runtime call we 4725fe6060f1SDimitry Andric // have now modeled all effects and there is no need for any update. 4726fe6060f1SDimitry Andric indicateOptimisticFixpoint(); 4727fe6060f1SDimitry Andric } 4728fe6060f1SDimitry Andric 4729fe6060f1SDimitry Andric ChangeStatus updateImpl(Attributor &A) override { 4730fe6060f1SDimitry Andric // TODO: Once we have call site specific value information we can provide 4731fe6060f1SDimitry Andric // call site specific liveness information and then it makes 4732fe6060f1SDimitry Andric // sense to specialize attributes for call sites arguments instead of 4733fe6060f1SDimitry Andric // redirecting requests to the callee argument. 4734fe6060f1SDimitry Andric Function *F = getAssociatedFunction(); 4735fe6060f1SDimitry Andric 4736fe6060f1SDimitry Andric auto &OMPInfoCache = static_cast<OMPInformationCache &>(A.getInfoCache()); 4737fe6060f1SDimitry Andric const auto &It = OMPInfoCache.RuntimeFunctionIDMap.find(F); 4738fe6060f1SDimitry Andric 4739fe6060f1SDimitry Andric // If F is not a runtime function, propagate the AAKernelInfo of the callee. 4740fe6060f1SDimitry Andric if (It == OMPInfoCache.RuntimeFunctionIDMap.end()) { 4741fe6060f1SDimitry Andric const IRPosition &FnPos = IRPosition::function(*F); 4742fe6060f1SDimitry Andric auto &FnAA = A.getAAFor<AAKernelInfo>(*this, FnPos, DepClassTy::REQUIRED); 4743fe6060f1SDimitry Andric if (getState() == FnAA.getState()) 4744fe6060f1SDimitry Andric return ChangeStatus::UNCHANGED; 4745fe6060f1SDimitry Andric getState() = FnAA.getState(); 4746fe6060f1SDimitry Andric return ChangeStatus::CHANGED; 4747fe6060f1SDimitry Andric } 4748fe6060f1SDimitry Andric 4749fe6060f1SDimitry Andric // F is a runtime function that allocates or frees memory, check 4750fe6060f1SDimitry Andric // AAHeapToStack and AAHeapToShared. 4751fe6060f1SDimitry Andric KernelInfoState StateBefore = getState(); 4752fe6060f1SDimitry Andric assert((It->getSecond() == OMPRTL___kmpc_alloc_shared || 4753fe6060f1SDimitry Andric It->getSecond() == OMPRTL___kmpc_free_shared) && 4754fe6060f1SDimitry Andric "Expected a __kmpc_alloc_shared or __kmpc_free_shared runtime call"); 4755fe6060f1SDimitry Andric 4756fe6060f1SDimitry Andric CallBase &CB = cast<CallBase>(getAssociatedValue()); 4757fe6060f1SDimitry Andric 4758fe6060f1SDimitry Andric auto &HeapToStackAA = A.getAAFor<AAHeapToStack>( 4759fe6060f1SDimitry Andric *this, IRPosition::function(*CB.getCaller()), DepClassTy::OPTIONAL); 4760fe6060f1SDimitry Andric auto &HeapToSharedAA = A.getAAFor<AAHeapToShared>( 4761fe6060f1SDimitry Andric *this, IRPosition::function(*CB.getCaller()), DepClassTy::OPTIONAL); 4762fe6060f1SDimitry Andric 4763fe6060f1SDimitry Andric RuntimeFunction RF = It->getSecond(); 4764fe6060f1SDimitry Andric 4765fe6060f1SDimitry Andric switch (RF) { 4766fe6060f1SDimitry Andric // If neither HeapToStack nor HeapToShared assume the call is removed, 4767fe6060f1SDimitry Andric // assume SPMD incompatibility. 4768fe6060f1SDimitry Andric case OMPRTL___kmpc_alloc_shared: 4769fe6060f1SDimitry Andric if (!HeapToStackAA.isAssumedHeapToStack(CB) && 4770fe6060f1SDimitry Andric !HeapToSharedAA.isAssumedHeapToShared(CB)) 4771fe6060f1SDimitry Andric SPMDCompatibilityTracker.insert(&CB); 4772fe6060f1SDimitry Andric break; 4773fe6060f1SDimitry Andric case OMPRTL___kmpc_free_shared: 4774fe6060f1SDimitry Andric if (!HeapToStackAA.isAssumedHeapToStackRemovedFree(CB) && 4775fe6060f1SDimitry Andric !HeapToSharedAA.isAssumedHeapToSharedRemovedFree(CB)) 4776fe6060f1SDimitry Andric SPMDCompatibilityTracker.insert(&CB); 4777fe6060f1SDimitry Andric break; 4778fe6060f1SDimitry Andric default: 47790eae32dcSDimitry Andric SPMDCompatibilityTracker.indicatePessimisticFixpoint(); 4780fe6060f1SDimitry Andric SPMDCompatibilityTracker.insert(&CB); 4781fe6060f1SDimitry Andric } 4782fe6060f1SDimitry Andric 4783fe6060f1SDimitry Andric return StateBefore == getState() ? ChangeStatus::UNCHANGED 4784fe6060f1SDimitry Andric : ChangeStatus::CHANGED; 4785fe6060f1SDimitry Andric } 4786fe6060f1SDimitry Andric }; 4787fe6060f1SDimitry Andric 4788fe6060f1SDimitry Andric struct AAFoldRuntimeCall 4789fe6060f1SDimitry Andric : public StateWrapper<BooleanState, AbstractAttribute> { 4790fe6060f1SDimitry Andric using Base = StateWrapper<BooleanState, AbstractAttribute>; 4791fe6060f1SDimitry Andric 4792fe6060f1SDimitry Andric AAFoldRuntimeCall(const IRPosition &IRP, Attributor &A) : Base(IRP) {} 4793fe6060f1SDimitry Andric 4794fe6060f1SDimitry Andric /// Statistics are tracked as part of manifest for now. 4795fe6060f1SDimitry Andric void trackStatistics() const override {} 4796fe6060f1SDimitry Andric 4797fe6060f1SDimitry Andric /// Create an abstract attribute biew for the position \p IRP. 4798fe6060f1SDimitry Andric static AAFoldRuntimeCall &createForPosition(const IRPosition &IRP, 4799fe6060f1SDimitry Andric Attributor &A); 4800fe6060f1SDimitry Andric 4801fe6060f1SDimitry Andric /// See AbstractAttribute::getName() 4802fe6060f1SDimitry Andric const std::string getName() const override { return "AAFoldRuntimeCall"; } 4803fe6060f1SDimitry Andric 4804fe6060f1SDimitry Andric /// See AbstractAttribute::getIdAddr() 4805fe6060f1SDimitry Andric const char *getIdAddr() const override { return &ID; } 4806fe6060f1SDimitry Andric 4807fe6060f1SDimitry Andric /// This function should return true if the type of the \p AA is 4808fe6060f1SDimitry Andric /// AAFoldRuntimeCall 4809fe6060f1SDimitry Andric static bool classof(const AbstractAttribute *AA) { 4810fe6060f1SDimitry Andric return (AA->getIdAddr() == &ID); 4811fe6060f1SDimitry Andric } 4812fe6060f1SDimitry Andric 4813fe6060f1SDimitry Andric static const char ID; 4814fe6060f1SDimitry Andric }; 4815fe6060f1SDimitry Andric 4816fe6060f1SDimitry Andric struct AAFoldRuntimeCallCallSiteReturned : AAFoldRuntimeCall { 4817fe6060f1SDimitry Andric AAFoldRuntimeCallCallSiteReturned(const IRPosition &IRP, Attributor &A) 4818fe6060f1SDimitry Andric : AAFoldRuntimeCall(IRP, A) {} 4819fe6060f1SDimitry Andric 4820fe6060f1SDimitry Andric /// See AbstractAttribute::getAsStr() 4821fe6060f1SDimitry Andric const std::string getAsStr() const override { 4822fe6060f1SDimitry Andric if (!isValidState()) 4823fe6060f1SDimitry Andric return "<invalid>"; 4824fe6060f1SDimitry Andric 4825fe6060f1SDimitry Andric std::string Str("simplified value: "); 4826fe6060f1SDimitry Andric 482781ad6265SDimitry Andric if (!SimplifiedValue) 4828fe6060f1SDimitry Andric return Str + std::string("none"); 4829fe6060f1SDimitry Andric 4830bdd1243dSDimitry Andric if (!*SimplifiedValue) 4831fe6060f1SDimitry Andric return Str + std::string("nullptr"); 4832fe6060f1SDimitry Andric 4833bdd1243dSDimitry Andric if (ConstantInt *CI = dyn_cast<ConstantInt>(*SimplifiedValue)) 4834fe6060f1SDimitry Andric return Str + std::to_string(CI->getSExtValue()); 4835fe6060f1SDimitry Andric 4836fe6060f1SDimitry Andric return Str + std::string("unknown"); 4837fe6060f1SDimitry Andric } 4838fe6060f1SDimitry Andric 4839fe6060f1SDimitry Andric void initialize(Attributor &A) override { 4840349cc55cSDimitry Andric if (DisableOpenMPOptFolding) 4841349cc55cSDimitry Andric indicatePessimisticFixpoint(); 4842349cc55cSDimitry Andric 4843fe6060f1SDimitry Andric Function *Callee = getAssociatedFunction(); 4844fe6060f1SDimitry Andric 4845fe6060f1SDimitry Andric auto &OMPInfoCache = static_cast<OMPInformationCache &>(A.getInfoCache()); 4846fe6060f1SDimitry Andric const auto &It = OMPInfoCache.RuntimeFunctionIDMap.find(Callee); 4847fe6060f1SDimitry Andric assert(It != OMPInfoCache.RuntimeFunctionIDMap.end() && 4848fe6060f1SDimitry Andric "Expected a known OpenMP runtime function"); 4849fe6060f1SDimitry Andric 4850fe6060f1SDimitry Andric RFKind = It->getSecond(); 4851fe6060f1SDimitry Andric 4852fe6060f1SDimitry Andric CallBase &CB = cast<CallBase>(getAssociatedValue()); 4853fe6060f1SDimitry Andric A.registerSimplificationCallback( 4854fe6060f1SDimitry Andric IRPosition::callsite_returned(CB), 4855fe6060f1SDimitry Andric [&](const IRPosition &IRP, const AbstractAttribute *AA, 4856bdd1243dSDimitry Andric bool &UsedAssumedInformation) -> std::optional<Value *> { 485781ad6265SDimitry Andric assert((isValidState() || 4858bdd1243dSDimitry Andric (SimplifiedValue && *SimplifiedValue == nullptr)) && 4859fe6060f1SDimitry Andric "Unexpected invalid state!"); 4860fe6060f1SDimitry Andric 4861fe6060f1SDimitry Andric if (!isAtFixpoint()) { 4862fe6060f1SDimitry Andric UsedAssumedInformation = true; 4863fe6060f1SDimitry Andric if (AA) 4864fe6060f1SDimitry Andric A.recordDependence(*this, *AA, DepClassTy::OPTIONAL); 4865fe6060f1SDimitry Andric } 4866fe6060f1SDimitry Andric return SimplifiedValue; 4867fe6060f1SDimitry Andric }); 4868fe6060f1SDimitry Andric } 4869fe6060f1SDimitry Andric 4870fe6060f1SDimitry Andric ChangeStatus updateImpl(Attributor &A) override { 4871fe6060f1SDimitry Andric ChangeStatus Changed = ChangeStatus::UNCHANGED; 4872fe6060f1SDimitry Andric switch (RFKind) { 4873fe6060f1SDimitry Andric case OMPRTL___kmpc_is_spmd_exec_mode: 4874fe6060f1SDimitry Andric Changed |= foldIsSPMDExecMode(A); 4875fe6060f1SDimitry Andric break; 4876fe6060f1SDimitry Andric case OMPRTL___kmpc_parallel_level: 4877fe6060f1SDimitry Andric Changed |= foldParallelLevel(A); 4878fe6060f1SDimitry Andric break; 4879fe6060f1SDimitry Andric case OMPRTL___kmpc_get_hardware_num_threads_in_block: 4880fe6060f1SDimitry Andric Changed = Changed | foldKernelFnAttribute(A, "omp_target_thread_limit"); 4881fe6060f1SDimitry Andric break; 4882fe6060f1SDimitry Andric case OMPRTL___kmpc_get_hardware_num_blocks: 4883fe6060f1SDimitry Andric Changed = Changed | foldKernelFnAttribute(A, "omp_target_num_teams"); 4884fe6060f1SDimitry Andric break; 4885fe6060f1SDimitry Andric default: 4886fe6060f1SDimitry Andric llvm_unreachable("Unhandled OpenMP runtime function!"); 4887fe6060f1SDimitry Andric } 4888fe6060f1SDimitry Andric 4889fe6060f1SDimitry Andric return Changed; 4890fe6060f1SDimitry Andric } 4891fe6060f1SDimitry Andric 4892fe6060f1SDimitry Andric ChangeStatus manifest(Attributor &A) override { 4893fe6060f1SDimitry Andric ChangeStatus Changed = ChangeStatus::UNCHANGED; 4894fe6060f1SDimitry Andric 489581ad6265SDimitry Andric if (SimplifiedValue && *SimplifiedValue) { 4896349cc55cSDimitry Andric Instruction &I = *getCtxI(); 489781ad6265SDimitry Andric A.changeAfterManifest(IRPosition::inst(I), **SimplifiedValue); 4898349cc55cSDimitry Andric A.deleteAfterManifest(I); 4899fe6060f1SDimitry Andric 4900349cc55cSDimitry Andric CallBase *CB = dyn_cast<CallBase>(&I); 4901349cc55cSDimitry Andric auto Remark = [&](OptimizationRemark OR) { 4902349cc55cSDimitry Andric if (auto *C = dyn_cast<ConstantInt>(*SimplifiedValue)) 4903349cc55cSDimitry Andric return OR << "Replacing OpenMP runtime call " 4904349cc55cSDimitry Andric << CB->getCalledFunction()->getName() << " with " 4905349cc55cSDimitry Andric << ore::NV("FoldedValue", C->getZExtValue()) << "."; 4906349cc55cSDimitry Andric return OR << "Replacing OpenMP runtime call " 4907349cc55cSDimitry Andric << CB->getCalledFunction()->getName() << "."; 4908349cc55cSDimitry Andric }; 4909349cc55cSDimitry Andric 4910349cc55cSDimitry Andric if (CB && EnableVerboseRemarks) 4911349cc55cSDimitry Andric A.emitRemark<OptimizationRemark>(CB, "OMP180", Remark); 4912349cc55cSDimitry Andric 4913349cc55cSDimitry Andric LLVM_DEBUG(dbgs() << TAG << "Replacing runtime call: " << I << " with " 4914fe6060f1SDimitry Andric << **SimplifiedValue << "\n"); 4915fe6060f1SDimitry Andric 4916fe6060f1SDimitry Andric Changed = ChangeStatus::CHANGED; 4917fe6060f1SDimitry Andric } 4918fe6060f1SDimitry Andric 4919fe6060f1SDimitry Andric return Changed; 4920fe6060f1SDimitry Andric } 4921fe6060f1SDimitry Andric 4922fe6060f1SDimitry Andric ChangeStatus indicatePessimisticFixpoint() override { 4923fe6060f1SDimitry Andric SimplifiedValue = nullptr; 4924fe6060f1SDimitry Andric return AAFoldRuntimeCall::indicatePessimisticFixpoint(); 4925fe6060f1SDimitry Andric } 4926fe6060f1SDimitry Andric 4927fe6060f1SDimitry Andric private: 4928fe6060f1SDimitry Andric /// Fold __kmpc_is_spmd_exec_mode into a constant if possible. 4929fe6060f1SDimitry Andric ChangeStatus foldIsSPMDExecMode(Attributor &A) { 4930bdd1243dSDimitry Andric std::optional<Value *> SimplifiedValueBefore = SimplifiedValue; 4931fe6060f1SDimitry Andric 4932fe6060f1SDimitry Andric unsigned AssumedSPMDCount = 0, KnownSPMDCount = 0; 4933fe6060f1SDimitry Andric unsigned AssumedNonSPMDCount = 0, KnownNonSPMDCount = 0; 4934fe6060f1SDimitry Andric auto &CallerKernelInfoAA = A.getAAFor<AAKernelInfo>( 4935fe6060f1SDimitry Andric *this, IRPosition::function(*getAnchorScope()), DepClassTy::REQUIRED); 4936fe6060f1SDimitry Andric 4937fe6060f1SDimitry Andric if (!CallerKernelInfoAA.ReachingKernelEntries.isValidState()) 4938fe6060f1SDimitry Andric return indicatePessimisticFixpoint(); 4939fe6060f1SDimitry Andric 4940fe6060f1SDimitry Andric for (Kernel K : CallerKernelInfoAA.ReachingKernelEntries) { 4941fe6060f1SDimitry Andric auto &AA = A.getAAFor<AAKernelInfo>(*this, IRPosition::function(*K), 4942fe6060f1SDimitry Andric DepClassTy::REQUIRED); 4943fe6060f1SDimitry Andric 4944fe6060f1SDimitry Andric if (!AA.isValidState()) { 4945fe6060f1SDimitry Andric SimplifiedValue = nullptr; 4946fe6060f1SDimitry Andric return indicatePessimisticFixpoint(); 4947fe6060f1SDimitry Andric } 4948fe6060f1SDimitry Andric 4949fe6060f1SDimitry Andric if (AA.SPMDCompatibilityTracker.isAssumed()) { 4950fe6060f1SDimitry Andric if (AA.SPMDCompatibilityTracker.isAtFixpoint()) 4951fe6060f1SDimitry Andric ++KnownSPMDCount; 4952fe6060f1SDimitry Andric else 4953fe6060f1SDimitry Andric ++AssumedSPMDCount; 4954fe6060f1SDimitry Andric } else { 4955fe6060f1SDimitry Andric if (AA.SPMDCompatibilityTracker.isAtFixpoint()) 4956fe6060f1SDimitry Andric ++KnownNonSPMDCount; 4957fe6060f1SDimitry Andric else 4958fe6060f1SDimitry Andric ++AssumedNonSPMDCount; 4959fe6060f1SDimitry Andric } 4960fe6060f1SDimitry Andric } 4961fe6060f1SDimitry Andric 4962fe6060f1SDimitry Andric if ((AssumedSPMDCount + KnownSPMDCount) && 4963fe6060f1SDimitry Andric (AssumedNonSPMDCount + KnownNonSPMDCount)) 4964fe6060f1SDimitry Andric return indicatePessimisticFixpoint(); 4965fe6060f1SDimitry Andric 4966fe6060f1SDimitry Andric auto &Ctx = getAnchorValue().getContext(); 4967fe6060f1SDimitry Andric if (KnownSPMDCount || AssumedSPMDCount) { 4968fe6060f1SDimitry Andric assert(KnownNonSPMDCount == 0 && AssumedNonSPMDCount == 0 && 4969fe6060f1SDimitry Andric "Expected only SPMD kernels!"); 4970fe6060f1SDimitry Andric // All reaching kernels are in SPMD mode. Update all function calls to 4971fe6060f1SDimitry Andric // __kmpc_is_spmd_exec_mode to 1. 4972fe6060f1SDimitry Andric SimplifiedValue = ConstantInt::get(Type::getInt8Ty(Ctx), true); 4973fe6060f1SDimitry Andric } else if (KnownNonSPMDCount || AssumedNonSPMDCount) { 4974fe6060f1SDimitry Andric assert(KnownSPMDCount == 0 && AssumedSPMDCount == 0 && 4975fe6060f1SDimitry Andric "Expected only non-SPMD kernels!"); 4976fe6060f1SDimitry Andric // All reaching kernels are in non-SPMD mode. Update all function 4977fe6060f1SDimitry Andric // calls to __kmpc_is_spmd_exec_mode to 0. 4978fe6060f1SDimitry Andric SimplifiedValue = ConstantInt::get(Type::getInt8Ty(Ctx), false); 4979fe6060f1SDimitry Andric } else { 4980fe6060f1SDimitry Andric // We have empty reaching kernels, therefore we cannot tell if the 4981fe6060f1SDimitry Andric // associated call site can be folded. At this moment, SimplifiedValue 4982fe6060f1SDimitry Andric // must be none. 498381ad6265SDimitry Andric assert(!SimplifiedValue && "SimplifiedValue should be none"); 4984fe6060f1SDimitry Andric } 4985fe6060f1SDimitry Andric 4986fe6060f1SDimitry Andric return SimplifiedValue == SimplifiedValueBefore ? ChangeStatus::UNCHANGED 4987fe6060f1SDimitry Andric : ChangeStatus::CHANGED; 4988fe6060f1SDimitry Andric } 4989fe6060f1SDimitry Andric 4990fe6060f1SDimitry Andric /// Fold __kmpc_parallel_level into a constant if possible. 4991fe6060f1SDimitry Andric ChangeStatus foldParallelLevel(Attributor &A) { 4992bdd1243dSDimitry Andric std::optional<Value *> SimplifiedValueBefore = SimplifiedValue; 4993fe6060f1SDimitry Andric 4994fe6060f1SDimitry Andric auto &CallerKernelInfoAA = A.getAAFor<AAKernelInfo>( 4995fe6060f1SDimitry Andric *this, IRPosition::function(*getAnchorScope()), DepClassTy::REQUIRED); 4996fe6060f1SDimitry Andric 4997fe6060f1SDimitry Andric if (!CallerKernelInfoAA.ParallelLevels.isValidState()) 4998fe6060f1SDimitry Andric return indicatePessimisticFixpoint(); 4999fe6060f1SDimitry Andric 5000fe6060f1SDimitry Andric if (!CallerKernelInfoAA.ReachingKernelEntries.isValidState()) 5001fe6060f1SDimitry Andric return indicatePessimisticFixpoint(); 5002fe6060f1SDimitry Andric 5003fe6060f1SDimitry Andric if (CallerKernelInfoAA.ReachingKernelEntries.empty()) { 500481ad6265SDimitry Andric assert(!SimplifiedValue && 5005fe6060f1SDimitry Andric "SimplifiedValue should keep none at this point"); 5006fe6060f1SDimitry Andric return ChangeStatus::UNCHANGED; 5007fe6060f1SDimitry Andric } 5008fe6060f1SDimitry Andric 5009fe6060f1SDimitry Andric unsigned AssumedSPMDCount = 0, KnownSPMDCount = 0; 5010fe6060f1SDimitry Andric unsigned AssumedNonSPMDCount = 0, KnownNonSPMDCount = 0; 5011fe6060f1SDimitry Andric for (Kernel K : CallerKernelInfoAA.ReachingKernelEntries) { 5012fe6060f1SDimitry Andric auto &AA = A.getAAFor<AAKernelInfo>(*this, IRPosition::function(*K), 5013fe6060f1SDimitry Andric DepClassTy::REQUIRED); 5014fe6060f1SDimitry Andric if (!AA.SPMDCompatibilityTracker.isValidState()) 5015fe6060f1SDimitry Andric return indicatePessimisticFixpoint(); 5016fe6060f1SDimitry Andric 5017fe6060f1SDimitry Andric if (AA.SPMDCompatibilityTracker.isAssumed()) { 5018fe6060f1SDimitry Andric if (AA.SPMDCompatibilityTracker.isAtFixpoint()) 5019fe6060f1SDimitry Andric ++KnownSPMDCount; 5020fe6060f1SDimitry Andric else 5021fe6060f1SDimitry Andric ++AssumedSPMDCount; 5022fe6060f1SDimitry Andric } else { 5023fe6060f1SDimitry Andric if (AA.SPMDCompatibilityTracker.isAtFixpoint()) 5024fe6060f1SDimitry Andric ++KnownNonSPMDCount; 5025fe6060f1SDimitry Andric else 5026fe6060f1SDimitry Andric ++AssumedNonSPMDCount; 5027fe6060f1SDimitry Andric } 5028fe6060f1SDimitry Andric } 5029fe6060f1SDimitry Andric 5030fe6060f1SDimitry Andric if ((AssumedSPMDCount + KnownSPMDCount) && 5031fe6060f1SDimitry Andric (AssumedNonSPMDCount + KnownNonSPMDCount)) 5032fe6060f1SDimitry Andric return indicatePessimisticFixpoint(); 5033fe6060f1SDimitry Andric 5034fe6060f1SDimitry Andric auto &Ctx = getAnchorValue().getContext(); 5035fe6060f1SDimitry Andric // If the caller can only be reached by SPMD kernel entries, the parallel 5036fe6060f1SDimitry Andric // level is 1. Similarly, if the caller can only be reached by non-SPMD 5037fe6060f1SDimitry Andric // kernel entries, it is 0. 5038fe6060f1SDimitry Andric if (AssumedSPMDCount || KnownSPMDCount) { 5039fe6060f1SDimitry Andric assert(KnownNonSPMDCount == 0 && AssumedNonSPMDCount == 0 && 5040fe6060f1SDimitry Andric "Expected only SPMD kernels!"); 5041fe6060f1SDimitry Andric SimplifiedValue = ConstantInt::get(Type::getInt8Ty(Ctx), 1); 5042fe6060f1SDimitry Andric } else { 5043fe6060f1SDimitry Andric assert(KnownSPMDCount == 0 && AssumedSPMDCount == 0 && 5044fe6060f1SDimitry Andric "Expected only non-SPMD kernels!"); 5045fe6060f1SDimitry Andric SimplifiedValue = ConstantInt::get(Type::getInt8Ty(Ctx), 0); 5046fe6060f1SDimitry Andric } 5047fe6060f1SDimitry Andric return SimplifiedValue == SimplifiedValueBefore ? ChangeStatus::UNCHANGED 5048fe6060f1SDimitry Andric : ChangeStatus::CHANGED; 5049fe6060f1SDimitry Andric } 5050fe6060f1SDimitry Andric 5051fe6060f1SDimitry Andric ChangeStatus foldKernelFnAttribute(Attributor &A, llvm::StringRef Attr) { 5052fe6060f1SDimitry Andric // Specialize only if all the calls agree with the attribute constant value 5053fe6060f1SDimitry Andric int32_t CurrentAttrValue = -1; 5054bdd1243dSDimitry Andric std::optional<Value *> SimplifiedValueBefore = SimplifiedValue; 5055fe6060f1SDimitry Andric 5056fe6060f1SDimitry Andric auto &CallerKernelInfoAA = A.getAAFor<AAKernelInfo>( 5057fe6060f1SDimitry Andric *this, IRPosition::function(*getAnchorScope()), DepClassTy::REQUIRED); 5058fe6060f1SDimitry Andric 5059fe6060f1SDimitry Andric if (!CallerKernelInfoAA.ReachingKernelEntries.isValidState()) 5060fe6060f1SDimitry Andric return indicatePessimisticFixpoint(); 5061fe6060f1SDimitry Andric 5062fe6060f1SDimitry Andric // Iterate over the kernels that reach this function 5063fe6060f1SDimitry Andric for (Kernel K : CallerKernelInfoAA.ReachingKernelEntries) { 5064bdd1243dSDimitry Andric int32_t NextAttrVal = K->getFnAttributeAsParsedInteger(Attr, -1); 5065fe6060f1SDimitry Andric 5066fe6060f1SDimitry Andric if (NextAttrVal == -1 || 5067fe6060f1SDimitry Andric (CurrentAttrValue != -1 && CurrentAttrValue != NextAttrVal)) 5068fe6060f1SDimitry Andric return indicatePessimisticFixpoint(); 5069fe6060f1SDimitry Andric CurrentAttrValue = NextAttrVal; 5070fe6060f1SDimitry Andric } 5071fe6060f1SDimitry Andric 5072fe6060f1SDimitry Andric if (CurrentAttrValue != -1) { 5073fe6060f1SDimitry Andric auto &Ctx = getAnchorValue().getContext(); 5074fe6060f1SDimitry Andric SimplifiedValue = 5075fe6060f1SDimitry Andric ConstantInt::get(Type::getInt32Ty(Ctx), CurrentAttrValue); 5076fe6060f1SDimitry Andric } 5077fe6060f1SDimitry Andric return SimplifiedValue == SimplifiedValueBefore ? ChangeStatus::UNCHANGED 5078fe6060f1SDimitry Andric : ChangeStatus::CHANGED; 5079fe6060f1SDimitry Andric } 5080fe6060f1SDimitry Andric 5081fe6060f1SDimitry Andric /// An optional value the associated value is assumed to fold to. That is, we 5082fe6060f1SDimitry Andric /// assume the associated value (which is a call) can be replaced by this 5083fe6060f1SDimitry Andric /// simplified value. 5084bdd1243dSDimitry Andric std::optional<Value *> SimplifiedValue; 5085fe6060f1SDimitry Andric 5086fe6060f1SDimitry Andric /// The runtime function kind of the callee of the associated call site. 5087fe6060f1SDimitry Andric RuntimeFunction RFKind; 5088fe6060f1SDimitry Andric }; 5089fe6060f1SDimitry Andric 50905ffd83dbSDimitry Andric } // namespace 50915ffd83dbSDimitry Andric 5092fe6060f1SDimitry Andric /// Register folding callsite 5093fe6060f1SDimitry Andric void OpenMPOpt::registerFoldRuntimeCall(RuntimeFunction RF) { 5094fe6060f1SDimitry Andric auto &RFI = OMPInfoCache.RFIs[RF]; 5095fe6060f1SDimitry Andric RFI.foreachUse(SCC, [&](Use &U, Function &F) { 5096fe6060f1SDimitry Andric CallInst *CI = OpenMPOpt::getCallIfRegularCall(U, &RFI); 5097fe6060f1SDimitry Andric if (!CI) 5098fe6060f1SDimitry Andric return false; 5099fe6060f1SDimitry Andric A.getOrCreateAAFor<AAFoldRuntimeCall>( 5100fe6060f1SDimitry Andric IRPosition::callsite_returned(*CI), /* QueryingAA */ nullptr, 5101fe6060f1SDimitry Andric DepClassTy::NONE, /* ForceUpdate */ false, 5102fe6060f1SDimitry Andric /* UpdateAfterInit */ false); 5103fe6060f1SDimitry Andric return false; 5104fe6060f1SDimitry Andric }); 5105fe6060f1SDimitry Andric } 5106fe6060f1SDimitry Andric 5107fe6060f1SDimitry Andric void OpenMPOpt::registerAAs(bool IsModulePass) { 5108fe6060f1SDimitry Andric if (SCC.empty()) 5109fe6060f1SDimitry Andric return; 511081ad6265SDimitry Andric 5111fe6060f1SDimitry Andric if (IsModulePass) { 5112fe6060f1SDimitry Andric // Ensure we create the AAKernelInfo AAs first and without triggering an 5113fe6060f1SDimitry Andric // update. This will make sure we register all value simplification 5114fe6060f1SDimitry Andric // callbacks before any other AA has the chance to create an AAValueSimplify 5115fe6060f1SDimitry Andric // or similar. 511681ad6265SDimitry Andric auto CreateKernelInfoCB = [&](Use &, Function &Kernel) { 5117fe6060f1SDimitry Andric A.getOrCreateAAFor<AAKernelInfo>( 511881ad6265SDimitry Andric IRPosition::function(Kernel), /* QueryingAA */ nullptr, 5119fe6060f1SDimitry Andric DepClassTy::NONE, /* ForceUpdate */ false, 5120fe6060f1SDimitry Andric /* UpdateAfterInit */ false); 512181ad6265SDimitry Andric return false; 512281ad6265SDimitry Andric }; 512381ad6265SDimitry Andric OMPInformationCache::RuntimeFunctionInfo &InitRFI = 512481ad6265SDimitry Andric OMPInfoCache.RFIs[OMPRTL___kmpc_target_init]; 512581ad6265SDimitry Andric InitRFI.foreachUse(SCC, CreateKernelInfoCB); 5126fe6060f1SDimitry Andric 5127fe6060f1SDimitry Andric registerFoldRuntimeCall(OMPRTL___kmpc_is_spmd_exec_mode); 5128fe6060f1SDimitry Andric registerFoldRuntimeCall(OMPRTL___kmpc_parallel_level); 5129fe6060f1SDimitry Andric registerFoldRuntimeCall(OMPRTL___kmpc_get_hardware_num_threads_in_block); 5130fe6060f1SDimitry Andric registerFoldRuntimeCall(OMPRTL___kmpc_get_hardware_num_blocks); 5131fe6060f1SDimitry Andric } 5132fe6060f1SDimitry Andric 5133fe6060f1SDimitry Andric // Create CallSite AA for all Getters. 5134bdd1243dSDimitry Andric if (DeduceICVValues) { 5135fe6060f1SDimitry Andric for (int Idx = 0; Idx < OMPInfoCache.ICVs.size() - 1; ++Idx) { 5136fe6060f1SDimitry Andric auto ICVInfo = OMPInfoCache.ICVs[static_cast<InternalControlVar>(Idx)]; 5137fe6060f1SDimitry Andric 5138fe6060f1SDimitry Andric auto &GetterRFI = OMPInfoCache.RFIs[ICVInfo.Getter]; 5139fe6060f1SDimitry Andric 5140fe6060f1SDimitry Andric auto CreateAA = [&](Use &U, Function &Caller) { 5141fe6060f1SDimitry Andric CallInst *CI = OpenMPOpt::getCallIfRegularCall(U, &GetterRFI); 5142fe6060f1SDimitry Andric if (!CI) 5143fe6060f1SDimitry Andric return false; 5144fe6060f1SDimitry Andric 5145fe6060f1SDimitry Andric auto &CB = cast<CallBase>(*CI); 5146fe6060f1SDimitry Andric 5147fe6060f1SDimitry Andric IRPosition CBPos = IRPosition::callsite_function(CB); 5148fe6060f1SDimitry Andric A.getOrCreateAAFor<AAICVTracker>(CBPos); 5149fe6060f1SDimitry Andric return false; 5150fe6060f1SDimitry Andric }; 5151fe6060f1SDimitry Andric 5152fe6060f1SDimitry Andric GetterRFI.foreachUse(SCC, CreateAA); 5153fe6060f1SDimitry Andric } 5154bdd1243dSDimitry Andric } 5155fe6060f1SDimitry Andric 5156fe6060f1SDimitry Andric // Create an ExecutionDomain AA for every function and a HeapToStack AA for 5157fe6060f1SDimitry Andric // every function if there is a device kernel. 5158fe6060f1SDimitry Andric if (!isOpenMPDevice(M)) 5159fe6060f1SDimitry Andric return; 5160fe6060f1SDimitry Andric 5161fe6060f1SDimitry Andric for (auto *F : SCC) { 5162fe6060f1SDimitry Andric if (F->isDeclaration()) 5163fe6060f1SDimitry Andric continue; 5164fe6060f1SDimitry Andric 5165bdd1243dSDimitry Andric // We look at internal functions only on-demand but if any use is not a 5166bdd1243dSDimitry Andric // direct call or outside the current set of analyzed functions, we have 5167bdd1243dSDimitry Andric // to do it eagerly. 5168bdd1243dSDimitry Andric if (F->hasLocalLinkage()) { 5169bdd1243dSDimitry Andric if (llvm::all_of(F->uses(), [this](const Use &U) { 5170bdd1243dSDimitry Andric const auto *CB = dyn_cast<CallBase>(U.getUser()); 5171bdd1243dSDimitry Andric return CB && CB->isCallee(&U) && 5172bdd1243dSDimitry Andric A.isRunOn(const_cast<Function *>(CB->getCaller())); 5173bdd1243dSDimitry Andric })) 5174bdd1243dSDimitry Andric continue; 5175bdd1243dSDimitry Andric } 5176bdd1243dSDimitry Andric registerAAsForFunction(A, *F); 5177bdd1243dSDimitry Andric } 5178bdd1243dSDimitry Andric } 5179fe6060f1SDimitry Andric 5180bdd1243dSDimitry Andric void OpenMPOpt::registerAAsForFunction(Attributor &A, const Function &F) { 5181bdd1243dSDimitry Andric if (!DisableOpenMPOptDeglobalization) 5182bdd1243dSDimitry Andric A.getOrCreateAAFor<AAHeapToShared>(IRPosition::function(F)); 5183bdd1243dSDimitry Andric A.getOrCreateAAFor<AAExecutionDomain>(IRPosition::function(F)); 5184bdd1243dSDimitry Andric if (!DisableOpenMPOptDeglobalization) 5185bdd1243dSDimitry Andric A.getOrCreateAAFor<AAHeapToStack>(IRPosition::function(F)); 5186bdd1243dSDimitry Andric 5187bdd1243dSDimitry Andric for (auto &I : instructions(F)) { 5188fe6060f1SDimitry Andric if (auto *LI = dyn_cast<LoadInst>(&I)) { 5189fe6060f1SDimitry Andric bool UsedAssumedInformation = false; 5190fe6060f1SDimitry Andric A.getAssumedSimplified(IRPosition::value(*LI), /* AA */ nullptr, 5191fcaf7f86SDimitry Andric UsedAssumedInformation, AA::Interprocedural); 5192bdd1243dSDimitry Andric continue; 5193bdd1243dSDimitry Andric } 5194bdd1243dSDimitry Andric if (auto *SI = dyn_cast<StoreInst>(&I)) { 519504eeddc0SDimitry Andric A.getOrCreateAAFor<AAIsDead>(IRPosition::value(*SI)); 5196bdd1243dSDimitry Andric continue; 5197bdd1243dSDimitry Andric } 5198bdd1243dSDimitry Andric if (auto *II = dyn_cast<IntrinsicInst>(&I)) { 5199bdd1243dSDimitry Andric if (II->getIntrinsicID() == Intrinsic::assume) { 5200bdd1243dSDimitry Andric A.getOrCreateAAFor<AAPotentialValues>( 5201bdd1243dSDimitry Andric IRPosition::value(*II->getArgOperand(0))); 5202bdd1243dSDimitry Andric continue; 5203fe6060f1SDimitry Andric } 5204fe6060f1SDimitry Andric } 5205fe6060f1SDimitry Andric } 5206fe6060f1SDimitry Andric } 5207fe6060f1SDimitry Andric 52085ffd83dbSDimitry Andric const char AAICVTracker::ID = 0; 5209fe6060f1SDimitry Andric const char AAKernelInfo::ID = 0; 5210fe6060f1SDimitry Andric const char AAExecutionDomain::ID = 0; 5211fe6060f1SDimitry Andric const char AAHeapToShared::ID = 0; 5212fe6060f1SDimitry Andric const char AAFoldRuntimeCall::ID = 0; 52135ffd83dbSDimitry Andric 52145ffd83dbSDimitry Andric AAICVTracker &AAICVTracker::createForPosition(const IRPosition &IRP, 52155ffd83dbSDimitry Andric Attributor &A) { 52165ffd83dbSDimitry Andric AAICVTracker *AA = nullptr; 52175ffd83dbSDimitry Andric switch (IRP.getPositionKind()) { 52185ffd83dbSDimitry Andric case IRPosition::IRP_INVALID: 52195ffd83dbSDimitry Andric case IRPosition::IRP_FLOAT: 52205ffd83dbSDimitry Andric case IRPosition::IRP_ARGUMENT: 52215ffd83dbSDimitry Andric case IRPosition::IRP_CALL_SITE_ARGUMENT: 52225ffd83dbSDimitry Andric llvm_unreachable("ICVTracker can only be created for function position!"); 5223e8d8bef9SDimitry Andric case IRPosition::IRP_RETURNED: 5224e8d8bef9SDimitry Andric AA = new (A.Allocator) AAICVTrackerFunctionReturned(IRP, A); 5225e8d8bef9SDimitry Andric break; 5226e8d8bef9SDimitry Andric case IRPosition::IRP_CALL_SITE_RETURNED: 5227e8d8bef9SDimitry Andric AA = new (A.Allocator) AAICVTrackerCallSiteReturned(IRP, A); 5228e8d8bef9SDimitry Andric break; 5229e8d8bef9SDimitry Andric case IRPosition::IRP_CALL_SITE: 5230e8d8bef9SDimitry Andric AA = new (A.Allocator) AAICVTrackerCallSite(IRP, A); 5231e8d8bef9SDimitry Andric break; 52325ffd83dbSDimitry Andric case IRPosition::IRP_FUNCTION: 52335ffd83dbSDimitry Andric AA = new (A.Allocator) AAICVTrackerFunction(IRP, A); 52345ffd83dbSDimitry Andric break; 52355ffd83dbSDimitry Andric } 52365ffd83dbSDimitry Andric 52375ffd83dbSDimitry Andric return *AA; 52385ffd83dbSDimitry Andric } 52395ffd83dbSDimitry Andric 5240fe6060f1SDimitry Andric AAExecutionDomain &AAExecutionDomain::createForPosition(const IRPosition &IRP, 5241fe6060f1SDimitry Andric Attributor &A) { 5242fe6060f1SDimitry Andric AAExecutionDomainFunction *AA = nullptr; 5243fe6060f1SDimitry Andric switch (IRP.getPositionKind()) { 5244fe6060f1SDimitry Andric case IRPosition::IRP_INVALID: 5245fe6060f1SDimitry Andric case IRPosition::IRP_FLOAT: 5246fe6060f1SDimitry Andric case IRPosition::IRP_ARGUMENT: 5247fe6060f1SDimitry Andric case IRPosition::IRP_CALL_SITE_ARGUMENT: 5248fe6060f1SDimitry Andric case IRPosition::IRP_RETURNED: 5249fe6060f1SDimitry Andric case IRPosition::IRP_CALL_SITE_RETURNED: 5250fe6060f1SDimitry Andric case IRPosition::IRP_CALL_SITE: 5251fe6060f1SDimitry Andric llvm_unreachable( 5252fe6060f1SDimitry Andric "AAExecutionDomain can only be created for function position!"); 5253fe6060f1SDimitry Andric case IRPosition::IRP_FUNCTION: 5254fe6060f1SDimitry Andric AA = new (A.Allocator) AAExecutionDomainFunction(IRP, A); 5255fe6060f1SDimitry Andric break; 5256fe6060f1SDimitry Andric } 5257fe6060f1SDimitry Andric 5258fe6060f1SDimitry Andric return *AA; 5259fe6060f1SDimitry Andric } 5260fe6060f1SDimitry Andric 5261fe6060f1SDimitry Andric AAHeapToShared &AAHeapToShared::createForPosition(const IRPosition &IRP, 5262fe6060f1SDimitry Andric Attributor &A) { 5263fe6060f1SDimitry Andric AAHeapToSharedFunction *AA = nullptr; 5264fe6060f1SDimitry Andric switch (IRP.getPositionKind()) { 5265fe6060f1SDimitry Andric case IRPosition::IRP_INVALID: 5266fe6060f1SDimitry Andric case IRPosition::IRP_FLOAT: 5267fe6060f1SDimitry Andric case IRPosition::IRP_ARGUMENT: 5268fe6060f1SDimitry Andric case IRPosition::IRP_CALL_SITE_ARGUMENT: 5269fe6060f1SDimitry Andric case IRPosition::IRP_RETURNED: 5270fe6060f1SDimitry Andric case IRPosition::IRP_CALL_SITE_RETURNED: 5271fe6060f1SDimitry Andric case IRPosition::IRP_CALL_SITE: 5272fe6060f1SDimitry Andric llvm_unreachable( 5273fe6060f1SDimitry Andric "AAHeapToShared can only be created for function position!"); 5274fe6060f1SDimitry Andric case IRPosition::IRP_FUNCTION: 5275fe6060f1SDimitry Andric AA = new (A.Allocator) AAHeapToSharedFunction(IRP, A); 5276fe6060f1SDimitry Andric break; 5277fe6060f1SDimitry Andric } 5278fe6060f1SDimitry Andric 5279fe6060f1SDimitry Andric return *AA; 5280fe6060f1SDimitry Andric } 5281fe6060f1SDimitry Andric 5282fe6060f1SDimitry Andric AAKernelInfo &AAKernelInfo::createForPosition(const IRPosition &IRP, 5283fe6060f1SDimitry Andric Attributor &A) { 5284fe6060f1SDimitry Andric AAKernelInfo *AA = nullptr; 5285fe6060f1SDimitry Andric switch (IRP.getPositionKind()) { 5286fe6060f1SDimitry Andric case IRPosition::IRP_INVALID: 5287fe6060f1SDimitry Andric case IRPosition::IRP_FLOAT: 5288fe6060f1SDimitry Andric case IRPosition::IRP_ARGUMENT: 5289fe6060f1SDimitry Andric case IRPosition::IRP_RETURNED: 5290fe6060f1SDimitry Andric case IRPosition::IRP_CALL_SITE_RETURNED: 5291fe6060f1SDimitry Andric case IRPosition::IRP_CALL_SITE_ARGUMENT: 5292fe6060f1SDimitry Andric llvm_unreachable("KernelInfo can only be created for function position!"); 5293fe6060f1SDimitry Andric case IRPosition::IRP_CALL_SITE: 5294fe6060f1SDimitry Andric AA = new (A.Allocator) AAKernelInfoCallSite(IRP, A); 5295fe6060f1SDimitry Andric break; 5296fe6060f1SDimitry Andric case IRPosition::IRP_FUNCTION: 5297fe6060f1SDimitry Andric AA = new (A.Allocator) AAKernelInfoFunction(IRP, A); 5298fe6060f1SDimitry Andric break; 5299fe6060f1SDimitry Andric } 5300fe6060f1SDimitry Andric 5301fe6060f1SDimitry Andric return *AA; 5302fe6060f1SDimitry Andric } 5303fe6060f1SDimitry Andric 5304fe6060f1SDimitry Andric AAFoldRuntimeCall &AAFoldRuntimeCall::createForPosition(const IRPosition &IRP, 5305fe6060f1SDimitry Andric Attributor &A) { 5306fe6060f1SDimitry Andric AAFoldRuntimeCall *AA = nullptr; 5307fe6060f1SDimitry Andric switch (IRP.getPositionKind()) { 5308fe6060f1SDimitry Andric case IRPosition::IRP_INVALID: 5309fe6060f1SDimitry Andric case IRPosition::IRP_FLOAT: 5310fe6060f1SDimitry Andric case IRPosition::IRP_ARGUMENT: 5311fe6060f1SDimitry Andric case IRPosition::IRP_RETURNED: 5312fe6060f1SDimitry Andric case IRPosition::IRP_FUNCTION: 5313fe6060f1SDimitry Andric case IRPosition::IRP_CALL_SITE: 5314fe6060f1SDimitry Andric case IRPosition::IRP_CALL_SITE_ARGUMENT: 5315fe6060f1SDimitry Andric llvm_unreachable("KernelInfo can only be created for call site position!"); 5316fe6060f1SDimitry Andric case IRPosition::IRP_CALL_SITE_RETURNED: 5317fe6060f1SDimitry Andric AA = new (A.Allocator) AAFoldRuntimeCallCallSiteReturned(IRP, A); 5318fe6060f1SDimitry Andric break; 5319fe6060f1SDimitry Andric } 5320fe6060f1SDimitry Andric 5321fe6060f1SDimitry Andric return *AA; 5322fe6060f1SDimitry Andric } 5323fe6060f1SDimitry Andric 5324fe6060f1SDimitry Andric PreservedAnalyses OpenMPOptPass::run(Module &M, ModuleAnalysisManager &AM) { 5325fe6060f1SDimitry Andric if (!containsOpenMP(M)) 5326fe6060f1SDimitry Andric return PreservedAnalyses::all(); 5327fe6060f1SDimitry Andric if (DisableOpenMPOptimizations) 53285ffd83dbSDimitry Andric return PreservedAnalyses::all(); 53295ffd83dbSDimitry Andric 5330fe6060f1SDimitry Andric FunctionAnalysisManager &FAM = 5331fe6060f1SDimitry Andric AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager(); 5332fe6060f1SDimitry Andric KernelSet Kernels = getDeviceKernels(M); 5333fe6060f1SDimitry Andric 533481ad6265SDimitry Andric if (PrintModuleBeforeOptimizations) 533581ad6265SDimitry Andric LLVM_DEBUG(dbgs() << TAG << "Module before OpenMPOpt Module Pass:\n" << M); 533681ad6265SDimitry Andric 5337fe6060f1SDimitry Andric auto IsCalled = [&](Function &F) { 5338fe6060f1SDimitry Andric if (Kernels.contains(&F)) 5339fe6060f1SDimitry Andric return true; 5340fe6060f1SDimitry Andric for (const User *U : F.users()) 5341fe6060f1SDimitry Andric if (!isa<BlockAddress>(U)) 5342fe6060f1SDimitry Andric return true; 5343fe6060f1SDimitry Andric return false; 5344fe6060f1SDimitry Andric }; 5345fe6060f1SDimitry Andric 5346fe6060f1SDimitry Andric auto EmitRemark = [&](Function &F) { 5347fe6060f1SDimitry Andric auto &ORE = FAM.getResult<OptimizationRemarkEmitterAnalysis>(F); 5348fe6060f1SDimitry Andric ORE.emit([&]() { 5349fe6060f1SDimitry Andric OptimizationRemarkAnalysis ORA(DEBUG_TYPE, "OMP140", &F); 5350fe6060f1SDimitry Andric return ORA << "Could not internalize function. " 53516e75b2fbSDimitry Andric << "Some optimizations may not be possible. [OMP140]"; 5352fe6060f1SDimitry Andric }); 5353fe6060f1SDimitry Andric }; 5354fe6060f1SDimitry Andric 5355fe6060f1SDimitry Andric // Create internal copies of each function if this is a kernel Module. This 5356fe6060f1SDimitry Andric // allows iterprocedural passes to see every call edge. 53576e75b2fbSDimitry Andric DenseMap<Function *, Function *> InternalizedMap; 53586e75b2fbSDimitry Andric if (isOpenMPDevice(M)) { 53596e75b2fbSDimitry Andric SmallPtrSet<Function *, 16> InternalizeFns; 5360fe6060f1SDimitry Andric for (Function &F : M) 5361fe6060f1SDimitry Andric if (!F.isDeclaration() && !Kernels.contains(&F) && IsCalled(F) && 5362fe6060f1SDimitry Andric !DisableInternalization) { 53636e75b2fbSDimitry Andric if (Attributor::isInternalizable(F)) { 53646e75b2fbSDimitry Andric InternalizeFns.insert(&F); 5365fe6060f1SDimitry Andric } else if (!F.hasLocalLinkage() && !F.hasFnAttribute(Attribute::Cold)) { 5366fe6060f1SDimitry Andric EmitRemark(F); 5367fe6060f1SDimitry Andric } 5368fe6060f1SDimitry Andric } 5369fe6060f1SDimitry Andric 53706e75b2fbSDimitry Andric Attributor::internalizeFunctions(InternalizeFns, InternalizedMap); 53716e75b2fbSDimitry Andric } 53726e75b2fbSDimitry Andric 5373fe6060f1SDimitry Andric // Look at every function in the Module unless it was internalized. 5374bdd1243dSDimitry Andric SetVector<Function *> Functions; 5375fe6060f1SDimitry Andric SmallVector<Function *, 16> SCC; 5376fe6060f1SDimitry Andric for (Function &F : M) 5377bdd1243dSDimitry Andric if (!F.isDeclaration() && !InternalizedMap.lookup(&F)) { 5378fe6060f1SDimitry Andric SCC.push_back(&F); 5379bdd1243dSDimitry Andric Functions.insert(&F); 5380bdd1243dSDimitry Andric } 5381fe6060f1SDimitry Andric 5382fe6060f1SDimitry Andric if (SCC.empty()) 5383fe6060f1SDimitry Andric return PreservedAnalyses::all(); 5384fe6060f1SDimitry Andric 5385fe6060f1SDimitry Andric AnalysisGetter AG(FAM); 5386fe6060f1SDimitry Andric 5387fe6060f1SDimitry Andric auto OREGetter = [&FAM](Function *F) -> OptimizationRemarkEmitter & { 5388fe6060f1SDimitry Andric return FAM.getResult<OptimizationRemarkEmitterAnalysis>(*F); 5389fe6060f1SDimitry Andric }; 5390fe6060f1SDimitry Andric 5391fe6060f1SDimitry Andric BumpPtrAllocator Allocator; 5392fe6060f1SDimitry Andric CallGraphUpdater CGUpdater; 5393fe6060f1SDimitry Andric 5394*1ac55f4cSDimitry Andric bool PostLink = LTOPhase == ThinOrFullLTOPhase::FullLTOPostLink || 5395*1ac55f4cSDimitry Andric LTOPhase == ThinOrFullLTOPhase::ThinLTOPreLink; 5396*1ac55f4cSDimitry Andric OMPInformationCache InfoCache(M, AG, Allocator, /*CGSCC*/ nullptr, Kernels, 5397*1ac55f4cSDimitry Andric PostLink); 5398fe6060f1SDimitry Andric 5399349cc55cSDimitry Andric unsigned MaxFixpointIterations = 5400349cc55cSDimitry Andric (isOpenMPDevice(M)) ? SetFixpointIterations : 32; 540181ad6265SDimitry Andric 540281ad6265SDimitry Andric AttributorConfig AC(CGUpdater); 540381ad6265SDimitry Andric AC.DefaultInitializeLiveInternals = false; 5404bdd1243dSDimitry Andric AC.IsModulePass = true; 540581ad6265SDimitry Andric AC.RewriteSignatures = false; 540681ad6265SDimitry Andric AC.MaxFixpointIterations = MaxFixpointIterations; 540781ad6265SDimitry Andric AC.OREGetter = OREGetter; 540881ad6265SDimitry Andric AC.PassName = DEBUG_TYPE; 5409bdd1243dSDimitry Andric AC.InitializationCallback = OpenMPOpt::registerAAsForFunction; 541081ad6265SDimitry Andric 541181ad6265SDimitry Andric Attributor A(Functions, InfoCache, AC); 5412fe6060f1SDimitry Andric 5413fe6060f1SDimitry Andric OpenMPOpt OMPOpt(SCC, CGUpdater, OREGetter, InfoCache, A); 5414fe6060f1SDimitry Andric bool Changed = OMPOpt.run(true); 5415349cc55cSDimitry Andric 5416349cc55cSDimitry Andric // Optionally inline device functions for potentially better performance. 5417349cc55cSDimitry Andric if (AlwaysInlineDeviceFunctions && isOpenMPDevice(M)) 5418349cc55cSDimitry Andric for (Function &F : M) 5419349cc55cSDimitry Andric if (!F.isDeclaration() && !Kernels.contains(&F) && 5420349cc55cSDimitry Andric !F.hasFnAttribute(Attribute::NoInline)) 5421349cc55cSDimitry Andric F.addFnAttr(Attribute::AlwaysInline); 5422349cc55cSDimitry Andric 5423349cc55cSDimitry Andric if (PrintModuleAfterOptimizations) 5424349cc55cSDimitry Andric LLVM_DEBUG(dbgs() << TAG << "Module after OpenMPOpt Module Pass:\n" << M); 5425349cc55cSDimitry Andric 5426fe6060f1SDimitry Andric if (Changed) 5427fe6060f1SDimitry Andric return PreservedAnalyses::none(); 5428fe6060f1SDimitry Andric 5429fe6060f1SDimitry Andric return PreservedAnalyses::all(); 5430fe6060f1SDimitry Andric } 5431fe6060f1SDimitry Andric 5432fe6060f1SDimitry Andric PreservedAnalyses OpenMPOptCGSCCPass::run(LazyCallGraph::SCC &C, 5433fe6060f1SDimitry Andric CGSCCAnalysisManager &AM, 5434fe6060f1SDimitry Andric LazyCallGraph &CG, 5435fe6060f1SDimitry Andric CGSCCUpdateResult &UR) { 5436fe6060f1SDimitry Andric if (!containsOpenMP(*C.begin()->getFunction().getParent())) 5437fe6060f1SDimitry Andric return PreservedAnalyses::all(); 54385ffd83dbSDimitry Andric if (DisableOpenMPOptimizations) 54395ffd83dbSDimitry Andric return PreservedAnalyses::all(); 54405ffd83dbSDimitry Andric 54415ffd83dbSDimitry Andric SmallVector<Function *, 16> SCC; 5442e8d8bef9SDimitry Andric // If there are kernels in the module, we have to run on all SCC's. 5443e8d8bef9SDimitry Andric for (LazyCallGraph::Node &N : C) { 5444e8d8bef9SDimitry Andric Function *Fn = &N.getFunction(); 5445e8d8bef9SDimitry Andric SCC.push_back(Fn); 5446e8d8bef9SDimitry Andric } 5447e8d8bef9SDimitry Andric 5448fe6060f1SDimitry Andric if (SCC.empty()) 54495ffd83dbSDimitry Andric return PreservedAnalyses::all(); 54505ffd83dbSDimitry Andric 5451fe6060f1SDimitry Andric Module &M = *C.begin()->getFunction().getParent(); 5452fe6060f1SDimitry Andric 545381ad6265SDimitry Andric if (PrintModuleBeforeOptimizations) 545481ad6265SDimitry Andric LLVM_DEBUG(dbgs() << TAG << "Module before OpenMPOpt CGSCC Pass:\n" << M); 545581ad6265SDimitry Andric 5456fe6060f1SDimitry Andric KernelSet Kernels = getDeviceKernels(M); 5457fe6060f1SDimitry Andric 54585ffd83dbSDimitry Andric FunctionAnalysisManager &FAM = 54595ffd83dbSDimitry Andric AM.getResult<FunctionAnalysisManagerCGSCCProxy>(C, CG).getManager(); 54605ffd83dbSDimitry Andric 54615ffd83dbSDimitry Andric AnalysisGetter AG(FAM); 54625ffd83dbSDimitry Andric 54635ffd83dbSDimitry Andric auto OREGetter = [&FAM](Function *F) -> OptimizationRemarkEmitter & { 54645ffd83dbSDimitry Andric return FAM.getResult<OptimizationRemarkEmitterAnalysis>(*F); 54655ffd83dbSDimitry Andric }; 54665ffd83dbSDimitry Andric 5467fe6060f1SDimitry Andric BumpPtrAllocator Allocator; 54685ffd83dbSDimitry Andric CallGraphUpdater CGUpdater; 54695ffd83dbSDimitry Andric CGUpdater.initialize(CG, C, AM, UR); 54705ffd83dbSDimitry Andric 5471*1ac55f4cSDimitry Andric bool PostLink = LTOPhase == ThinOrFullLTOPhase::FullLTOPostLink || 5472*1ac55f4cSDimitry Andric LTOPhase == ThinOrFullLTOPhase::ThinLTOPreLink; 54735ffd83dbSDimitry Andric SetVector<Function *> Functions(SCC.begin(), SCC.end()); 54745ffd83dbSDimitry Andric OMPInformationCache InfoCache(*(Functions.back()->getParent()), AG, Allocator, 5475*1ac55f4cSDimitry Andric /*CGSCC*/ &Functions, Kernels, PostLink); 54765ffd83dbSDimitry Andric 5477349cc55cSDimitry Andric unsigned MaxFixpointIterations = 5478349cc55cSDimitry Andric (isOpenMPDevice(M)) ? SetFixpointIterations : 32; 547981ad6265SDimitry Andric 548081ad6265SDimitry Andric AttributorConfig AC(CGUpdater); 548181ad6265SDimitry Andric AC.DefaultInitializeLiveInternals = false; 548281ad6265SDimitry Andric AC.IsModulePass = false; 548381ad6265SDimitry Andric AC.RewriteSignatures = false; 548481ad6265SDimitry Andric AC.MaxFixpointIterations = MaxFixpointIterations; 548581ad6265SDimitry Andric AC.OREGetter = OREGetter; 548681ad6265SDimitry Andric AC.PassName = DEBUG_TYPE; 5487bdd1243dSDimitry Andric AC.InitializationCallback = OpenMPOpt::registerAAsForFunction; 548881ad6265SDimitry Andric 548981ad6265SDimitry Andric Attributor A(Functions, InfoCache, AC); 54905ffd83dbSDimitry Andric 54915ffd83dbSDimitry Andric OpenMPOpt OMPOpt(SCC, CGUpdater, OREGetter, InfoCache, A); 5492fe6060f1SDimitry Andric bool Changed = OMPOpt.run(false); 5493349cc55cSDimitry Andric 5494349cc55cSDimitry Andric if (PrintModuleAfterOptimizations) 5495349cc55cSDimitry Andric LLVM_DEBUG(dbgs() << TAG << "Module after OpenMPOpt CGSCC Pass:\n" << M); 5496349cc55cSDimitry Andric 54975ffd83dbSDimitry Andric if (Changed) 54985ffd83dbSDimitry Andric return PreservedAnalyses::none(); 54995ffd83dbSDimitry Andric 55005ffd83dbSDimitry Andric return PreservedAnalyses::all(); 55015ffd83dbSDimitry Andric } 55025ffd83dbSDimitry Andric 5503fe6060f1SDimitry Andric KernelSet llvm::omp::getDeviceKernels(Module &M) { 5504fe6060f1SDimitry Andric // TODO: Create a more cross-platform way of determining device kernels. 5505bdd1243dSDimitry Andric NamedMDNode *MD = M.getNamedMetadata("nvvm.annotations"); 5506fe6060f1SDimitry Andric KernelSet Kernels; 5507fe6060f1SDimitry Andric 55085ffd83dbSDimitry Andric if (!MD) 5509fe6060f1SDimitry Andric return Kernels; 55105ffd83dbSDimitry Andric 55115ffd83dbSDimitry Andric for (auto *Op : MD->operands()) { 55125ffd83dbSDimitry Andric if (Op->getNumOperands() < 2) 55135ffd83dbSDimitry Andric continue; 55145ffd83dbSDimitry Andric MDString *KindID = dyn_cast<MDString>(Op->getOperand(1)); 55155ffd83dbSDimitry Andric if (!KindID || KindID->getString() != "kernel") 55165ffd83dbSDimitry Andric continue; 55175ffd83dbSDimitry Andric 55185ffd83dbSDimitry Andric Function *KernelFn = 55195ffd83dbSDimitry Andric mdconst::dyn_extract_or_null<Function>(Op->getOperand(0)); 55205ffd83dbSDimitry Andric if (!KernelFn) 55215ffd83dbSDimitry Andric continue; 55225ffd83dbSDimitry Andric 55235ffd83dbSDimitry Andric ++NumOpenMPTargetRegionKernels; 55245ffd83dbSDimitry Andric 55255ffd83dbSDimitry Andric Kernels.insert(KernelFn); 55265ffd83dbSDimitry Andric } 5527fe6060f1SDimitry Andric 5528fe6060f1SDimitry Andric return Kernels; 55295ffd83dbSDimitry Andric } 55305ffd83dbSDimitry Andric 5531fe6060f1SDimitry Andric bool llvm::omp::containsOpenMP(Module &M) { 5532fe6060f1SDimitry Andric Metadata *MD = M.getModuleFlag("openmp"); 5533fe6060f1SDimitry Andric if (!MD) 5534fe6060f1SDimitry Andric return false; 55355ffd83dbSDimitry Andric 55365ffd83dbSDimitry Andric return true; 55375ffd83dbSDimitry Andric } 55385ffd83dbSDimitry Andric 5539fe6060f1SDimitry Andric bool llvm::omp::isOpenMPDevice(Module &M) { 5540fe6060f1SDimitry Andric Metadata *MD = M.getModuleFlag("openmp-device"); 5541fe6060f1SDimitry Andric if (!MD) 5542fe6060f1SDimitry Andric return false; 5543fe6060f1SDimitry Andric 5544fe6060f1SDimitry Andric return true; 55455ffd83dbSDimitry Andric } 5546