xref: /freebsd-src/contrib/llvm-project/llvm/lib/Target/AMDGPU/AMDGPUInstCombineIntrinsic.cpp (revision cb14a3fe5122c879eae1fb480ed7ce82a699ddb6)
1e8d8bef9SDimitry Andric //===- AMDGPInstCombineIntrinsic.cpp - AMDGPU specific InstCombine pass ---===//
2e8d8bef9SDimitry Andric //
3e8d8bef9SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4e8d8bef9SDimitry Andric // See https://llvm.org/LICENSE.txt for license information.
5e8d8bef9SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6e8d8bef9SDimitry Andric //
7e8d8bef9SDimitry Andric //===----------------------------------------------------------------------===//
8e8d8bef9SDimitry Andric //
9e8d8bef9SDimitry Andric // \file
10e8d8bef9SDimitry Andric // This file implements a TargetTransformInfo analysis pass specific to the
11e8d8bef9SDimitry Andric // AMDGPU target machine. It uses the target's detailed information to provide
12e8d8bef9SDimitry Andric // more precise answers to certain TTI queries, while letting the target
13e8d8bef9SDimitry Andric // independent and default TTI implementations handle the rest.
14e8d8bef9SDimitry Andric //
15e8d8bef9SDimitry Andric //===----------------------------------------------------------------------===//
16e8d8bef9SDimitry Andric 
17e8d8bef9SDimitry Andric #include "AMDGPUInstrInfo.h"
18e8d8bef9SDimitry Andric #include "AMDGPUTargetTransformInfo.h"
19e8d8bef9SDimitry Andric #include "GCNSubtarget.h"
20bdd1243dSDimitry Andric #include "llvm/ADT/FloatingPointMode.h"
21e8d8bef9SDimitry Andric #include "llvm/IR/IntrinsicsAMDGPU.h"
22e8d8bef9SDimitry Andric #include "llvm/Transforms/InstCombine/InstCombiner.h"
23bdd1243dSDimitry Andric #include <optional>
24e8d8bef9SDimitry Andric 
25e8d8bef9SDimitry Andric using namespace llvm;
2606c3fb27SDimitry Andric using namespace llvm::PatternMatch;
27e8d8bef9SDimitry Andric 
28e8d8bef9SDimitry Andric #define DEBUG_TYPE "AMDGPUtti"
29e8d8bef9SDimitry Andric 
30e8d8bef9SDimitry Andric namespace {
31e8d8bef9SDimitry Andric 
32e8d8bef9SDimitry Andric struct AMDGPUImageDMaskIntrinsic {
33e8d8bef9SDimitry Andric   unsigned Intr;
34e8d8bef9SDimitry Andric };
35e8d8bef9SDimitry Andric 
36e8d8bef9SDimitry Andric #define GET_AMDGPUImageDMaskIntrinsicTable_IMPL
37e8d8bef9SDimitry Andric #include "InstCombineTables.inc"
38e8d8bef9SDimitry Andric 
39e8d8bef9SDimitry Andric } // end anonymous namespace
40e8d8bef9SDimitry Andric 
41e8d8bef9SDimitry Andric // Constant fold llvm.amdgcn.fmed3 intrinsics for standard inputs.
42e8d8bef9SDimitry Andric //
43e8d8bef9SDimitry Andric // A single NaN input is folded to minnum, so we rely on that folding for
44e8d8bef9SDimitry Andric // handling NaNs.
45e8d8bef9SDimitry Andric static APFloat fmed3AMDGCN(const APFloat &Src0, const APFloat &Src1,
46e8d8bef9SDimitry Andric                            const APFloat &Src2) {
47e8d8bef9SDimitry Andric   APFloat Max3 = maxnum(maxnum(Src0, Src1), Src2);
48e8d8bef9SDimitry Andric 
49e8d8bef9SDimitry Andric   APFloat::cmpResult Cmp0 = Max3.compare(Src0);
50e8d8bef9SDimitry Andric   assert(Cmp0 != APFloat::cmpUnordered && "nans handled separately");
51e8d8bef9SDimitry Andric   if (Cmp0 == APFloat::cmpEqual)
52e8d8bef9SDimitry Andric     return maxnum(Src1, Src2);
53e8d8bef9SDimitry Andric 
54e8d8bef9SDimitry Andric   APFloat::cmpResult Cmp1 = Max3.compare(Src1);
55e8d8bef9SDimitry Andric   assert(Cmp1 != APFloat::cmpUnordered && "nans handled separately");
56e8d8bef9SDimitry Andric   if (Cmp1 == APFloat::cmpEqual)
57e8d8bef9SDimitry Andric     return maxnum(Src0, Src2);
58e8d8bef9SDimitry Andric 
59e8d8bef9SDimitry Andric   return maxnum(Src0, Src1);
60e8d8bef9SDimitry Andric }
61e8d8bef9SDimitry Andric 
62e8d8bef9SDimitry Andric // Check if a value can be converted to a 16-bit value without losing
63e8d8bef9SDimitry Andric // precision.
6404eeddc0SDimitry Andric // The value is expected to be either a float (IsFloat = true) or an unsigned
6504eeddc0SDimitry Andric // integer (IsFloat = false).
6604eeddc0SDimitry Andric static bool canSafelyConvertTo16Bit(Value &V, bool IsFloat) {
67e8d8bef9SDimitry Andric   Type *VTy = V.getType();
68e8d8bef9SDimitry Andric   if (VTy->isHalfTy() || VTy->isIntegerTy(16)) {
69e8d8bef9SDimitry Andric     // The value is already 16-bit, so we don't want to convert to 16-bit again!
70e8d8bef9SDimitry Andric     return false;
71e8d8bef9SDimitry Andric   }
7204eeddc0SDimitry Andric   if (IsFloat) {
73e8d8bef9SDimitry Andric     if (ConstantFP *ConstFloat = dyn_cast<ConstantFP>(&V)) {
7404eeddc0SDimitry Andric       // We need to check that if we cast the index down to a half, we do not
7504eeddc0SDimitry Andric       // lose precision.
76e8d8bef9SDimitry Andric       APFloat FloatValue(ConstFloat->getValueAPF());
77e8d8bef9SDimitry Andric       bool LosesInfo = true;
7804eeddc0SDimitry Andric       FloatValue.convert(APFloat::IEEEhalf(), APFloat::rmTowardZero,
7904eeddc0SDimitry Andric                          &LosesInfo);
80e8d8bef9SDimitry Andric       return !LosesInfo;
81e8d8bef9SDimitry Andric     }
8204eeddc0SDimitry Andric   } else {
8304eeddc0SDimitry Andric     if (ConstantInt *ConstInt = dyn_cast<ConstantInt>(&V)) {
8404eeddc0SDimitry Andric       // We need to check that if we cast the index down to an i16, we do not
8504eeddc0SDimitry Andric       // lose precision.
8604eeddc0SDimitry Andric       APInt IntValue(ConstInt->getValue());
8704eeddc0SDimitry Andric       return IntValue.getActiveBits() <= 16;
8804eeddc0SDimitry Andric     }
8904eeddc0SDimitry Andric   }
9004eeddc0SDimitry Andric 
91e8d8bef9SDimitry Andric   Value *CastSrc;
9204eeddc0SDimitry Andric   bool IsExt = IsFloat ? match(&V, m_FPExt(PatternMatch::m_Value(CastSrc)))
9304eeddc0SDimitry Andric                        : match(&V, m_ZExt(PatternMatch::m_Value(CastSrc)));
9404eeddc0SDimitry Andric   if (IsExt) {
95e8d8bef9SDimitry Andric     Type *CastSrcTy = CastSrc->getType();
96e8d8bef9SDimitry Andric     if (CastSrcTy->isHalfTy() || CastSrcTy->isIntegerTy(16))
97e8d8bef9SDimitry Andric       return true;
98e8d8bef9SDimitry Andric   }
99e8d8bef9SDimitry Andric 
100e8d8bef9SDimitry Andric   return false;
101e8d8bef9SDimitry Andric }
102e8d8bef9SDimitry Andric 
103e8d8bef9SDimitry Andric // Convert a value to 16-bit.
104e8d8bef9SDimitry Andric static Value *convertTo16Bit(Value &V, InstCombiner::BuilderTy &Builder) {
105e8d8bef9SDimitry Andric   Type *VTy = V.getType();
106e8d8bef9SDimitry Andric   if (isa<FPExtInst>(&V) || isa<SExtInst>(&V) || isa<ZExtInst>(&V))
107e8d8bef9SDimitry Andric     return cast<Instruction>(&V)->getOperand(0);
108e8d8bef9SDimitry Andric   if (VTy->isIntegerTy())
109e8d8bef9SDimitry Andric     return Builder.CreateIntCast(&V, Type::getInt16Ty(V.getContext()), false);
110e8d8bef9SDimitry Andric   if (VTy->isFloatingPointTy())
111e8d8bef9SDimitry Andric     return Builder.CreateFPCast(&V, Type::getHalfTy(V.getContext()));
112e8d8bef9SDimitry Andric 
113e8d8bef9SDimitry Andric   llvm_unreachable("Should never be called!");
114e8d8bef9SDimitry Andric }
115e8d8bef9SDimitry Andric 
11681ad6265SDimitry Andric /// Applies Func(OldIntr.Args, OldIntr.ArgTys), creates intrinsic call with
11781ad6265SDimitry Andric /// modified arguments (based on OldIntr) and replaces InstToReplace with
11881ad6265SDimitry Andric /// this newly created intrinsic call.
119bdd1243dSDimitry Andric static std::optional<Instruction *> modifyIntrinsicCall(
12081ad6265SDimitry Andric     IntrinsicInst &OldIntr, Instruction &InstToReplace, unsigned NewIntr,
12181ad6265SDimitry Andric     InstCombiner &IC,
12204eeddc0SDimitry Andric     std::function<void(SmallVectorImpl<Value *> &, SmallVectorImpl<Type *> &)>
12304eeddc0SDimitry Andric         Func) {
12404eeddc0SDimitry Andric   SmallVector<Type *, 4> ArgTys;
12581ad6265SDimitry Andric   if (!Intrinsic::getIntrinsicSignature(OldIntr.getCalledFunction(), ArgTys))
126bdd1243dSDimitry Andric     return std::nullopt;
12704eeddc0SDimitry Andric 
12881ad6265SDimitry Andric   SmallVector<Value *, 8> Args(OldIntr.args());
12904eeddc0SDimitry Andric 
13004eeddc0SDimitry Andric   // Modify arguments and types
13104eeddc0SDimitry Andric   Func(Args, ArgTys);
13204eeddc0SDimitry Andric 
13381ad6265SDimitry Andric   Function *I = Intrinsic::getDeclaration(OldIntr.getModule(), NewIntr, ArgTys);
13404eeddc0SDimitry Andric 
13504eeddc0SDimitry Andric   CallInst *NewCall = IC.Builder.CreateCall(I, Args);
13681ad6265SDimitry Andric   NewCall->takeName(&OldIntr);
13781ad6265SDimitry Andric   NewCall->copyMetadata(OldIntr);
13804eeddc0SDimitry Andric   if (isa<FPMathOperator>(NewCall))
13981ad6265SDimitry Andric     NewCall->copyFastMathFlags(&OldIntr);
14004eeddc0SDimitry Andric 
14104eeddc0SDimitry Andric   // Erase and replace uses
14281ad6265SDimitry Andric   if (!InstToReplace.getType()->isVoidTy())
14381ad6265SDimitry Andric     IC.replaceInstUsesWith(InstToReplace, NewCall);
14481ad6265SDimitry Andric 
14581ad6265SDimitry Andric   bool RemoveOldIntr = &OldIntr != &InstToReplace;
14681ad6265SDimitry Andric 
14781ad6265SDimitry Andric   auto RetValue = IC.eraseInstFromFunction(InstToReplace);
14881ad6265SDimitry Andric   if (RemoveOldIntr)
14981ad6265SDimitry Andric     IC.eraseInstFromFunction(OldIntr);
15081ad6265SDimitry Andric 
15181ad6265SDimitry Andric   return RetValue;
15204eeddc0SDimitry Andric }
15304eeddc0SDimitry Andric 
154bdd1243dSDimitry Andric static std::optional<Instruction *>
155e8d8bef9SDimitry Andric simplifyAMDGCNImageIntrinsic(const GCNSubtarget *ST,
156e8d8bef9SDimitry Andric                              const AMDGPU::ImageDimIntrinsicInfo *ImageDimIntr,
157e8d8bef9SDimitry Andric                              IntrinsicInst &II, InstCombiner &IC) {
15804eeddc0SDimitry Andric   // Optimize _L to _LZ when _L is zero
15904eeddc0SDimitry Andric   if (const auto *LZMappingInfo =
16004eeddc0SDimitry Andric           AMDGPU::getMIMGLZMappingInfo(ImageDimIntr->BaseOpcode)) {
16104eeddc0SDimitry Andric     if (auto *ConstantLod =
16204eeddc0SDimitry Andric             dyn_cast<ConstantFP>(II.getOperand(ImageDimIntr->LodIndex))) {
16304eeddc0SDimitry Andric       if (ConstantLod->isZero() || ConstantLod->isNegative()) {
16404eeddc0SDimitry Andric         const AMDGPU::ImageDimIntrinsicInfo *NewImageDimIntr =
16504eeddc0SDimitry Andric             AMDGPU::getImageDimIntrinsicByBaseOpcode(LZMappingInfo->LZ,
16604eeddc0SDimitry Andric                                                      ImageDimIntr->Dim);
16704eeddc0SDimitry Andric         return modifyIntrinsicCall(
16881ad6265SDimitry Andric             II, II, NewImageDimIntr->Intr, IC, [&](auto &Args, auto &ArgTys) {
16904eeddc0SDimitry Andric               Args.erase(Args.begin() + ImageDimIntr->LodIndex);
17004eeddc0SDimitry Andric             });
17104eeddc0SDimitry Andric       }
17204eeddc0SDimitry Andric     }
17304eeddc0SDimitry Andric   }
17404eeddc0SDimitry Andric 
17504eeddc0SDimitry Andric   // Optimize _mip away, when 'lod' is zero
17604eeddc0SDimitry Andric   if (const auto *MIPMappingInfo =
17704eeddc0SDimitry Andric           AMDGPU::getMIMGMIPMappingInfo(ImageDimIntr->BaseOpcode)) {
17804eeddc0SDimitry Andric     if (auto *ConstantMip =
17904eeddc0SDimitry Andric             dyn_cast<ConstantInt>(II.getOperand(ImageDimIntr->MipIndex))) {
18004eeddc0SDimitry Andric       if (ConstantMip->isZero()) {
18104eeddc0SDimitry Andric         const AMDGPU::ImageDimIntrinsicInfo *NewImageDimIntr =
18204eeddc0SDimitry Andric             AMDGPU::getImageDimIntrinsicByBaseOpcode(MIPMappingInfo->NONMIP,
18304eeddc0SDimitry Andric                                                      ImageDimIntr->Dim);
18404eeddc0SDimitry Andric         return modifyIntrinsicCall(
18581ad6265SDimitry Andric             II, II, NewImageDimIntr->Intr, IC, [&](auto &Args, auto &ArgTys) {
18604eeddc0SDimitry Andric               Args.erase(Args.begin() + ImageDimIntr->MipIndex);
18704eeddc0SDimitry Andric             });
18804eeddc0SDimitry Andric       }
18904eeddc0SDimitry Andric     }
19004eeddc0SDimitry Andric   }
19104eeddc0SDimitry Andric 
19204eeddc0SDimitry Andric   // Optimize _bias away when 'bias' is zero
19304eeddc0SDimitry Andric   if (const auto *BiasMappingInfo =
19404eeddc0SDimitry Andric           AMDGPU::getMIMGBiasMappingInfo(ImageDimIntr->BaseOpcode)) {
19504eeddc0SDimitry Andric     if (auto *ConstantBias =
19604eeddc0SDimitry Andric             dyn_cast<ConstantFP>(II.getOperand(ImageDimIntr->BiasIndex))) {
19704eeddc0SDimitry Andric       if (ConstantBias->isZero()) {
19804eeddc0SDimitry Andric         const AMDGPU::ImageDimIntrinsicInfo *NewImageDimIntr =
19904eeddc0SDimitry Andric             AMDGPU::getImageDimIntrinsicByBaseOpcode(BiasMappingInfo->NoBias,
20004eeddc0SDimitry Andric                                                      ImageDimIntr->Dim);
20104eeddc0SDimitry Andric         return modifyIntrinsicCall(
20281ad6265SDimitry Andric             II, II, NewImageDimIntr->Intr, IC, [&](auto &Args, auto &ArgTys) {
20304eeddc0SDimitry Andric               Args.erase(Args.begin() + ImageDimIntr->BiasIndex);
20404eeddc0SDimitry Andric               ArgTys.erase(ArgTys.begin() + ImageDimIntr->BiasTyArg);
20504eeddc0SDimitry Andric             });
20604eeddc0SDimitry Andric       }
20704eeddc0SDimitry Andric     }
20804eeddc0SDimitry Andric   }
20904eeddc0SDimitry Andric 
21004eeddc0SDimitry Andric   // Optimize _offset away when 'offset' is zero
21104eeddc0SDimitry Andric   if (const auto *OffsetMappingInfo =
21204eeddc0SDimitry Andric           AMDGPU::getMIMGOffsetMappingInfo(ImageDimIntr->BaseOpcode)) {
21304eeddc0SDimitry Andric     if (auto *ConstantOffset =
21404eeddc0SDimitry Andric             dyn_cast<ConstantInt>(II.getOperand(ImageDimIntr->OffsetIndex))) {
21504eeddc0SDimitry Andric       if (ConstantOffset->isZero()) {
21604eeddc0SDimitry Andric         const AMDGPU::ImageDimIntrinsicInfo *NewImageDimIntr =
21704eeddc0SDimitry Andric             AMDGPU::getImageDimIntrinsicByBaseOpcode(
21804eeddc0SDimitry Andric                 OffsetMappingInfo->NoOffset, ImageDimIntr->Dim);
21904eeddc0SDimitry Andric         return modifyIntrinsicCall(
22081ad6265SDimitry Andric             II, II, NewImageDimIntr->Intr, IC, [&](auto &Args, auto &ArgTys) {
22104eeddc0SDimitry Andric               Args.erase(Args.begin() + ImageDimIntr->OffsetIndex);
22204eeddc0SDimitry Andric             });
22304eeddc0SDimitry Andric       }
22404eeddc0SDimitry Andric     }
22504eeddc0SDimitry Andric   }
22604eeddc0SDimitry Andric 
22781ad6265SDimitry Andric   // Try to use D16
22881ad6265SDimitry Andric   if (ST->hasD16Images()) {
22981ad6265SDimitry Andric 
23081ad6265SDimitry Andric     const AMDGPU::MIMGBaseOpcodeInfo *BaseOpcode =
23181ad6265SDimitry Andric         AMDGPU::getMIMGBaseOpcodeInfo(ImageDimIntr->BaseOpcode);
23281ad6265SDimitry Andric 
23381ad6265SDimitry Andric     if (BaseOpcode->HasD16) {
23481ad6265SDimitry Andric 
23581ad6265SDimitry Andric       // If the only use of image intrinsic is a fptrunc (with conversion to
23681ad6265SDimitry Andric       // half) then both fptrunc and image intrinsic will be replaced with image
23781ad6265SDimitry Andric       // intrinsic with D16 flag.
23881ad6265SDimitry Andric       if (II.hasOneUse()) {
23981ad6265SDimitry Andric         Instruction *User = II.user_back();
24081ad6265SDimitry Andric 
24181ad6265SDimitry Andric         if (User->getOpcode() == Instruction::FPTrunc &&
24281ad6265SDimitry Andric             User->getType()->getScalarType()->isHalfTy()) {
24381ad6265SDimitry Andric 
24481ad6265SDimitry Andric           return modifyIntrinsicCall(II, *User, ImageDimIntr->Intr, IC,
24581ad6265SDimitry Andric                                      [&](auto &Args, auto &ArgTys) {
24681ad6265SDimitry Andric                                        // Change return type of image intrinsic.
24781ad6265SDimitry Andric                                        // Set it to return type of fptrunc.
24881ad6265SDimitry Andric                                        ArgTys[0] = User->getType();
24981ad6265SDimitry Andric                                      });
25081ad6265SDimitry Andric         }
25181ad6265SDimitry Andric       }
25281ad6265SDimitry Andric     }
25381ad6265SDimitry Andric   }
25481ad6265SDimitry Andric 
25504eeddc0SDimitry Andric   // Try to use A16 or G16
256e8d8bef9SDimitry Andric   if (!ST->hasA16() && !ST->hasG16())
257bdd1243dSDimitry Andric     return std::nullopt;
258e8d8bef9SDimitry Andric 
25904eeddc0SDimitry Andric   // Address is interpreted as float if the instruction has a sampler or as
26004eeddc0SDimitry Andric   // unsigned int if there is no sampler.
26104eeddc0SDimitry Andric   bool HasSampler =
26204eeddc0SDimitry Andric       AMDGPU::getMIMGBaseOpcodeInfo(ImageDimIntr->BaseOpcode)->Sampler;
263e8d8bef9SDimitry Andric   bool FloatCoord = false;
264e8d8bef9SDimitry Andric   // true means derivatives can be converted to 16 bit, coordinates not
265e8d8bef9SDimitry Andric   bool OnlyDerivatives = false;
266e8d8bef9SDimitry Andric 
267e8d8bef9SDimitry Andric   for (unsigned OperandIndex = ImageDimIntr->GradientStart;
268e8d8bef9SDimitry Andric        OperandIndex < ImageDimIntr->VAddrEnd; OperandIndex++) {
269e8d8bef9SDimitry Andric     Value *Coord = II.getOperand(OperandIndex);
270e8d8bef9SDimitry Andric     // If the values are not derived from 16-bit values, we cannot optimize.
27104eeddc0SDimitry Andric     if (!canSafelyConvertTo16Bit(*Coord, HasSampler)) {
272e8d8bef9SDimitry Andric       if (OperandIndex < ImageDimIntr->CoordStart ||
273e8d8bef9SDimitry Andric           ImageDimIntr->GradientStart == ImageDimIntr->CoordStart) {
274bdd1243dSDimitry Andric         return std::nullopt;
275e8d8bef9SDimitry Andric       }
276e8d8bef9SDimitry Andric       // All gradients can be converted, so convert only them
277e8d8bef9SDimitry Andric       OnlyDerivatives = true;
278e8d8bef9SDimitry Andric       break;
279e8d8bef9SDimitry Andric     }
280e8d8bef9SDimitry Andric 
281e8d8bef9SDimitry Andric     assert(OperandIndex == ImageDimIntr->GradientStart ||
282e8d8bef9SDimitry Andric            FloatCoord == Coord->getType()->isFloatingPointTy());
283e8d8bef9SDimitry Andric     FloatCoord = Coord->getType()->isFloatingPointTy();
284e8d8bef9SDimitry Andric   }
285e8d8bef9SDimitry Andric 
28604eeddc0SDimitry Andric   if (!OnlyDerivatives && !ST->hasA16())
287e8d8bef9SDimitry Andric     OnlyDerivatives = true; // Only supports G16
28804eeddc0SDimitry Andric 
28904eeddc0SDimitry Andric   // Check if there is a bias parameter and if it can be converted to f16
29004eeddc0SDimitry Andric   if (!OnlyDerivatives && ImageDimIntr->NumBiasArgs != 0) {
29104eeddc0SDimitry Andric     Value *Bias = II.getOperand(ImageDimIntr->BiasIndex);
29204eeddc0SDimitry Andric     assert(HasSampler &&
29304eeddc0SDimitry Andric            "Only image instructions with a sampler can have a bias");
29404eeddc0SDimitry Andric     if (!canSafelyConvertTo16Bit(*Bias, HasSampler))
29504eeddc0SDimitry Andric       OnlyDerivatives = true;
296e8d8bef9SDimitry Andric   }
297e8d8bef9SDimitry Andric 
29804eeddc0SDimitry Andric   if (OnlyDerivatives && (!ST->hasG16() || ImageDimIntr->GradientStart ==
29904eeddc0SDimitry Andric                                                ImageDimIntr->CoordStart))
300bdd1243dSDimitry Andric     return std::nullopt;
30104eeddc0SDimitry Andric 
302e8d8bef9SDimitry Andric   Type *CoordType = FloatCoord ? Type::getHalfTy(II.getContext())
303e8d8bef9SDimitry Andric                                : Type::getInt16Ty(II.getContext());
304e8d8bef9SDimitry Andric 
30504eeddc0SDimitry Andric   return modifyIntrinsicCall(
30681ad6265SDimitry Andric       II, II, II.getIntrinsicID(), IC, [&](auto &Args, auto &ArgTys) {
307e8d8bef9SDimitry Andric         ArgTys[ImageDimIntr->GradientTyArg] = CoordType;
30804eeddc0SDimitry Andric         if (!OnlyDerivatives) {
309e8d8bef9SDimitry Andric           ArgTys[ImageDimIntr->CoordTyArg] = CoordType;
310e8d8bef9SDimitry Andric 
31104eeddc0SDimitry Andric           // Change the bias type
31204eeddc0SDimitry Andric           if (ImageDimIntr->NumBiasArgs != 0)
31304eeddc0SDimitry Andric             ArgTys[ImageDimIntr->BiasTyArg] = Type::getHalfTy(II.getContext());
31404eeddc0SDimitry Andric         }
315e8d8bef9SDimitry Andric 
316e8d8bef9SDimitry Andric         unsigned EndIndex =
317e8d8bef9SDimitry Andric             OnlyDerivatives ? ImageDimIntr->CoordStart : ImageDimIntr->VAddrEnd;
318e8d8bef9SDimitry Andric         for (unsigned OperandIndex = ImageDimIntr->GradientStart;
319e8d8bef9SDimitry Andric              OperandIndex < EndIndex; OperandIndex++) {
320e8d8bef9SDimitry Andric           Args[OperandIndex] =
321e8d8bef9SDimitry Andric               convertTo16Bit(*II.getOperand(OperandIndex), IC.Builder);
322e8d8bef9SDimitry Andric         }
323e8d8bef9SDimitry Andric 
32404eeddc0SDimitry Andric         // Convert the bias
32504eeddc0SDimitry Andric         if (!OnlyDerivatives && ImageDimIntr->NumBiasArgs != 0) {
32604eeddc0SDimitry Andric           Value *Bias = II.getOperand(ImageDimIntr->BiasIndex);
32704eeddc0SDimitry Andric           Args[ImageDimIntr->BiasIndex] = convertTo16Bit(*Bias, IC.Builder);
32804eeddc0SDimitry Andric         }
32904eeddc0SDimitry Andric       });
330e8d8bef9SDimitry Andric }
331e8d8bef9SDimitry Andric 
33206c3fb27SDimitry Andric bool GCNTTIImpl::canSimplifyLegacyMulToMul(const Instruction &I,
33306c3fb27SDimitry Andric                                            const Value *Op0, const Value *Op1,
334e8d8bef9SDimitry Andric                                            InstCombiner &IC) const {
335e8d8bef9SDimitry Andric   // The legacy behaviour is that multiplying +/-0.0 by anything, even NaN or
336e8d8bef9SDimitry Andric   // infinity, gives +0.0. If we can prove we don't have one of the special
337e8d8bef9SDimitry Andric   // cases then we can use a normal multiply instead.
338e8d8bef9SDimitry Andric   // TODO: Create and use isKnownFiniteNonZero instead of just matching
339e8d8bef9SDimitry Andric   // constants here.
340e8d8bef9SDimitry Andric   if (match(Op0, PatternMatch::m_FiniteNonZero()) ||
341e8d8bef9SDimitry Andric       match(Op1, PatternMatch::m_FiniteNonZero())) {
342e8d8bef9SDimitry Andric     // One operand is not zero or infinity or NaN.
343e8d8bef9SDimitry Andric     return true;
344e8d8bef9SDimitry Andric   }
34506c3fb27SDimitry Andric 
346e8d8bef9SDimitry Andric   auto *TLI = &IC.getTargetLibraryInfo();
34706c3fb27SDimitry Andric   if (isKnownNeverInfOrNaN(Op0, IC.getDataLayout(), TLI, 0,
34806c3fb27SDimitry Andric                            &IC.getAssumptionCache(), &I,
34906c3fb27SDimitry Andric                            &IC.getDominatorTree()) &&
35006c3fb27SDimitry Andric       isKnownNeverInfOrNaN(Op1, IC.getDataLayout(), TLI, 0,
35106c3fb27SDimitry Andric                            &IC.getAssumptionCache(), &I,
35206c3fb27SDimitry Andric                            &IC.getDominatorTree())) {
353e8d8bef9SDimitry Andric     // Neither operand is infinity or NaN.
354e8d8bef9SDimitry Andric     return true;
355e8d8bef9SDimitry Andric   }
356e8d8bef9SDimitry Andric   return false;
357e8d8bef9SDimitry Andric }
358e8d8bef9SDimitry Andric 
35906c3fb27SDimitry Andric /// Match an fpext from half to float, or a constant we can convert.
36006c3fb27SDimitry Andric static bool matchFPExtFromF16(Value *Arg, Value *&FPExtSrc) {
36106c3fb27SDimitry Andric   if (match(Arg, m_OneUse(m_FPExt(m_Value(FPExtSrc)))))
36206c3fb27SDimitry Andric     return FPExtSrc->getType()->isHalfTy();
36306c3fb27SDimitry Andric 
36406c3fb27SDimitry Andric   ConstantFP *CFP;
36506c3fb27SDimitry Andric   if (match(Arg, m_ConstantFP(CFP))) {
36606c3fb27SDimitry Andric     bool LosesInfo;
36706c3fb27SDimitry Andric     APFloat Val(CFP->getValueAPF());
36806c3fb27SDimitry Andric     Val.convert(APFloat::IEEEhalf(), APFloat::rmNearestTiesToEven, &LosesInfo);
36906c3fb27SDimitry Andric     if (LosesInfo)
37006c3fb27SDimitry Andric       return false;
37106c3fb27SDimitry Andric 
37206c3fb27SDimitry Andric     FPExtSrc = ConstantFP::get(Type::getHalfTy(Arg->getContext()), Val);
37306c3fb27SDimitry Andric     return true;
37406c3fb27SDimitry Andric   }
37506c3fb27SDimitry Andric 
37606c3fb27SDimitry Andric   return false;
37706c3fb27SDimitry Andric }
37806c3fb27SDimitry Andric 
37906c3fb27SDimitry Andric // Trim all zero components from the end of the vector \p UseV and return
38006c3fb27SDimitry Andric // an appropriate bitset with known elements.
38106c3fb27SDimitry Andric static APInt trimTrailingZerosInVector(InstCombiner &IC, Value *UseV,
38206c3fb27SDimitry Andric                                        Instruction *I) {
38306c3fb27SDimitry Andric   auto *VTy = cast<FixedVectorType>(UseV->getType());
38406c3fb27SDimitry Andric   unsigned VWidth = VTy->getNumElements();
38506c3fb27SDimitry Andric   APInt DemandedElts = APInt::getAllOnes(VWidth);
38606c3fb27SDimitry Andric 
38706c3fb27SDimitry Andric   for (int i = VWidth - 1; i > 0; --i) {
38806c3fb27SDimitry Andric     auto *Elt = findScalarElement(UseV, i);
38906c3fb27SDimitry Andric     if (!Elt)
39006c3fb27SDimitry Andric       break;
39106c3fb27SDimitry Andric 
39206c3fb27SDimitry Andric     if (auto *ConstElt = dyn_cast<Constant>(Elt)) {
39306c3fb27SDimitry Andric       if (!ConstElt->isNullValue() && !isa<UndefValue>(Elt))
39406c3fb27SDimitry Andric         break;
39506c3fb27SDimitry Andric     } else {
39606c3fb27SDimitry Andric       break;
39706c3fb27SDimitry Andric     }
39806c3fb27SDimitry Andric 
39906c3fb27SDimitry Andric     DemandedElts.clearBit(i);
40006c3fb27SDimitry Andric   }
40106c3fb27SDimitry Andric 
40206c3fb27SDimitry Andric   return DemandedElts;
40306c3fb27SDimitry Andric }
40406c3fb27SDimitry Andric 
40506c3fb27SDimitry Andric static Value *simplifyAMDGCNMemoryIntrinsicDemanded(InstCombiner &IC,
40606c3fb27SDimitry Andric                                                     IntrinsicInst &II,
40706c3fb27SDimitry Andric                                                     APInt DemandedElts,
40806c3fb27SDimitry Andric                                                     int DMaskIdx = -1,
40906c3fb27SDimitry Andric                                                     bool IsLoad = true);
41006c3fb27SDimitry Andric 
4115f757f3fSDimitry Andric /// Return true if it's legal to contract llvm.amdgcn.rcp(llvm.sqrt)
4125f757f3fSDimitry Andric static bool canContractSqrtToRsq(const FPMathOperator *SqrtOp) {
4135f757f3fSDimitry Andric   return (SqrtOp->getType()->isFloatTy() &&
4145f757f3fSDimitry Andric           (SqrtOp->hasApproxFunc() || SqrtOp->getFPAccuracy() >= 1.0f)) ||
4155f757f3fSDimitry Andric          SqrtOp->getType()->isHalfTy();
4165f757f3fSDimitry Andric }
4175f757f3fSDimitry Andric 
418bdd1243dSDimitry Andric std::optional<Instruction *>
419e8d8bef9SDimitry Andric GCNTTIImpl::instCombineIntrinsic(InstCombiner &IC, IntrinsicInst &II) const {
420e8d8bef9SDimitry Andric   Intrinsic::ID IID = II.getIntrinsicID();
421e8d8bef9SDimitry Andric   switch (IID) {
422e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_rcp: {
423e8d8bef9SDimitry Andric     Value *Src = II.getArgOperand(0);
424e8d8bef9SDimitry Andric 
425e8d8bef9SDimitry Andric     // TODO: Move to ConstantFolding/InstSimplify?
426e8d8bef9SDimitry Andric     if (isa<UndefValue>(Src)) {
427e8d8bef9SDimitry Andric       Type *Ty = II.getType();
428e8d8bef9SDimitry Andric       auto *QNaN = ConstantFP::get(Ty, APFloat::getQNaN(Ty->getFltSemantics()));
429e8d8bef9SDimitry Andric       return IC.replaceInstUsesWith(II, QNaN);
430e8d8bef9SDimitry Andric     }
431e8d8bef9SDimitry Andric 
432e8d8bef9SDimitry Andric     if (II.isStrictFP())
433e8d8bef9SDimitry Andric       break;
434e8d8bef9SDimitry Andric 
435e8d8bef9SDimitry Andric     if (const ConstantFP *C = dyn_cast<ConstantFP>(Src)) {
436e8d8bef9SDimitry Andric       const APFloat &ArgVal = C->getValueAPF();
437e8d8bef9SDimitry Andric       APFloat Val(ArgVal.getSemantics(), 1);
438e8d8bef9SDimitry Andric       Val.divide(ArgVal, APFloat::rmNearestTiesToEven);
439e8d8bef9SDimitry Andric 
440e8d8bef9SDimitry Andric       // This is more precise than the instruction may give.
441e8d8bef9SDimitry Andric       //
442e8d8bef9SDimitry Andric       // TODO: The instruction always flushes denormal results (except for f16),
443e8d8bef9SDimitry Andric       // should this also?
444e8d8bef9SDimitry Andric       return IC.replaceInstUsesWith(II, ConstantFP::get(II.getContext(), Val));
445e8d8bef9SDimitry Andric     }
446e8d8bef9SDimitry Andric 
4475f757f3fSDimitry Andric     FastMathFlags FMF = cast<FPMathOperator>(II).getFastMathFlags();
4485f757f3fSDimitry Andric     if (!FMF.allowContract())
4495f757f3fSDimitry Andric       break;
4505f757f3fSDimitry Andric     auto *SrcCI = dyn_cast<IntrinsicInst>(Src);
4515f757f3fSDimitry Andric     if (!SrcCI)
4525f757f3fSDimitry Andric       break;
4535f757f3fSDimitry Andric 
4545f757f3fSDimitry Andric     auto IID = SrcCI->getIntrinsicID();
4555f757f3fSDimitry Andric     // llvm.amdgcn.rcp(llvm.amdgcn.sqrt(x)) -> llvm.amdgcn.rsq(x) if contractable
4565f757f3fSDimitry Andric     //
4575f757f3fSDimitry Andric     // llvm.amdgcn.rcp(llvm.sqrt(x)) -> llvm.amdgcn.rsq(x) if contractable and
4585f757f3fSDimitry Andric     // relaxed.
4595f757f3fSDimitry Andric     if (IID == Intrinsic::amdgcn_sqrt || IID == Intrinsic::sqrt) {
4605f757f3fSDimitry Andric       const FPMathOperator *SqrtOp = cast<FPMathOperator>(SrcCI);
4615f757f3fSDimitry Andric       FastMathFlags InnerFMF = SqrtOp->getFastMathFlags();
4625f757f3fSDimitry Andric       if (!InnerFMF.allowContract() || !SrcCI->hasOneUse())
4635f757f3fSDimitry Andric         break;
4645f757f3fSDimitry Andric 
4655f757f3fSDimitry Andric       if (IID == Intrinsic::sqrt && !canContractSqrtToRsq(SqrtOp))
4665f757f3fSDimitry Andric         break;
4675f757f3fSDimitry Andric 
4685f757f3fSDimitry Andric       Function *NewDecl = Intrinsic::getDeclaration(
4695f757f3fSDimitry Andric           SrcCI->getModule(), Intrinsic::amdgcn_rsq, {SrcCI->getType()});
4705f757f3fSDimitry Andric 
4715f757f3fSDimitry Andric       InnerFMF |= FMF;
4725f757f3fSDimitry Andric       II.setFastMathFlags(InnerFMF);
4735f757f3fSDimitry Andric 
4745f757f3fSDimitry Andric       II.setCalledFunction(NewDecl);
4755f757f3fSDimitry Andric       return IC.replaceOperand(II, 0, SrcCI->getArgOperand(0));
4765f757f3fSDimitry Andric     }
4775f757f3fSDimitry Andric 
478e8d8bef9SDimitry Andric     break;
479e8d8bef9SDimitry Andric   }
480bdd1243dSDimitry Andric   case Intrinsic::amdgcn_sqrt:
481e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_rsq: {
482e8d8bef9SDimitry Andric     Value *Src = II.getArgOperand(0);
483e8d8bef9SDimitry Andric 
484e8d8bef9SDimitry Andric     // TODO: Move to ConstantFolding/InstSimplify?
485e8d8bef9SDimitry Andric     if (isa<UndefValue>(Src)) {
486e8d8bef9SDimitry Andric       Type *Ty = II.getType();
487e8d8bef9SDimitry Andric       auto *QNaN = ConstantFP::get(Ty, APFloat::getQNaN(Ty->getFltSemantics()));
488e8d8bef9SDimitry Andric       return IC.replaceInstUsesWith(II, QNaN);
489e8d8bef9SDimitry Andric     }
490e8d8bef9SDimitry Andric 
4915f757f3fSDimitry Andric     // f16 amdgcn.sqrt is identical to regular sqrt.
4925f757f3fSDimitry Andric     if (IID == Intrinsic::amdgcn_sqrt && Src->getType()->isHalfTy()) {
4935f757f3fSDimitry Andric       Function *NewDecl = Intrinsic::getDeclaration(
4945f757f3fSDimitry Andric           II.getModule(), Intrinsic::sqrt, {II.getType()});
4955f757f3fSDimitry Andric       II.setCalledFunction(NewDecl);
4965f757f3fSDimitry Andric       return &II;
4975f757f3fSDimitry Andric     }
4985f757f3fSDimitry Andric 
499e8d8bef9SDimitry Andric     break;
500e8d8bef9SDimitry Andric   }
50106c3fb27SDimitry Andric   case Intrinsic::amdgcn_log:
50206c3fb27SDimitry Andric   case Intrinsic::amdgcn_exp2: {
50306c3fb27SDimitry Andric     const bool IsLog = IID == Intrinsic::amdgcn_log;
50406c3fb27SDimitry Andric     const bool IsExp = IID == Intrinsic::amdgcn_exp2;
50506c3fb27SDimitry Andric     Value *Src = II.getArgOperand(0);
50606c3fb27SDimitry Andric     Type *Ty = II.getType();
50706c3fb27SDimitry Andric 
50806c3fb27SDimitry Andric     if (isa<PoisonValue>(Src))
50906c3fb27SDimitry Andric       return IC.replaceInstUsesWith(II, Src);
51006c3fb27SDimitry Andric 
51106c3fb27SDimitry Andric     if (IC.getSimplifyQuery().isUndefValue(Src))
51206c3fb27SDimitry Andric       return IC.replaceInstUsesWith(II, ConstantFP::getNaN(Ty));
51306c3fb27SDimitry Andric 
51406c3fb27SDimitry Andric     if (ConstantFP *C = dyn_cast<ConstantFP>(Src)) {
51506c3fb27SDimitry Andric       if (C->isInfinity()) {
51606c3fb27SDimitry Andric         // exp2(+inf) -> +inf
51706c3fb27SDimitry Andric         // log2(+inf) -> +inf
51806c3fb27SDimitry Andric         if (!C->isNegative())
51906c3fb27SDimitry Andric           return IC.replaceInstUsesWith(II, C);
52006c3fb27SDimitry Andric 
52106c3fb27SDimitry Andric         // exp2(-inf) -> 0
52206c3fb27SDimitry Andric         if (IsExp && C->isNegative())
52306c3fb27SDimitry Andric           return IC.replaceInstUsesWith(II, ConstantFP::getZero(Ty));
52406c3fb27SDimitry Andric       }
52506c3fb27SDimitry Andric 
52606c3fb27SDimitry Andric       if (II.isStrictFP())
52706c3fb27SDimitry Andric         break;
52806c3fb27SDimitry Andric 
52906c3fb27SDimitry Andric       if (C->isNaN()) {
53006c3fb27SDimitry Andric         Constant *Quieted = ConstantFP::get(Ty, C->getValue().makeQuiet());
53106c3fb27SDimitry Andric         return IC.replaceInstUsesWith(II, Quieted);
53206c3fb27SDimitry Andric       }
53306c3fb27SDimitry Andric 
53406c3fb27SDimitry Andric       // f32 instruction doesn't handle denormals, f16 does.
53506c3fb27SDimitry Andric       if (C->isZero() || (C->getValue().isDenormal() && Ty->isFloatTy())) {
53606c3fb27SDimitry Andric         Constant *FoldedValue = IsLog ? ConstantFP::getInfinity(Ty, true)
53706c3fb27SDimitry Andric                                       : ConstantFP::get(Ty, 1.0);
53806c3fb27SDimitry Andric         return IC.replaceInstUsesWith(II, FoldedValue);
53906c3fb27SDimitry Andric       }
54006c3fb27SDimitry Andric 
54106c3fb27SDimitry Andric       if (IsLog && C->isNegative())
54206c3fb27SDimitry Andric         return IC.replaceInstUsesWith(II, ConstantFP::getNaN(Ty));
54306c3fb27SDimitry Andric 
54406c3fb27SDimitry Andric       // TODO: Full constant folding matching hardware behavior.
54506c3fb27SDimitry Andric     }
54606c3fb27SDimitry Andric 
54706c3fb27SDimitry Andric     break;
54806c3fb27SDimitry Andric   }
549e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_frexp_mant:
550e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_frexp_exp: {
551e8d8bef9SDimitry Andric     Value *Src = II.getArgOperand(0);
552e8d8bef9SDimitry Andric     if (const ConstantFP *C = dyn_cast<ConstantFP>(Src)) {
553e8d8bef9SDimitry Andric       int Exp;
554e8d8bef9SDimitry Andric       APFloat Significand =
555e8d8bef9SDimitry Andric           frexp(C->getValueAPF(), Exp, APFloat::rmNearestTiesToEven);
556e8d8bef9SDimitry Andric 
557e8d8bef9SDimitry Andric       if (IID == Intrinsic::amdgcn_frexp_mant) {
558e8d8bef9SDimitry Andric         return IC.replaceInstUsesWith(
559e8d8bef9SDimitry Andric             II, ConstantFP::get(II.getContext(), Significand));
560e8d8bef9SDimitry Andric       }
561e8d8bef9SDimitry Andric 
562e8d8bef9SDimitry Andric       // Match instruction special case behavior.
563e8d8bef9SDimitry Andric       if (Exp == APFloat::IEK_NaN || Exp == APFloat::IEK_Inf)
564e8d8bef9SDimitry Andric         Exp = 0;
565e8d8bef9SDimitry Andric 
566e8d8bef9SDimitry Andric       return IC.replaceInstUsesWith(II, ConstantInt::get(II.getType(), Exp));
567e8d8bef9SDimitry Andric     }
568e8d8bef9SDimitry Andric 
569e8d8bef9SDimitry Andric     if (isa<UndefValue>(Src)) {
570e8d8bef9SDimitry Andric       return IC.replaceInstUsesWith(II, UndefValue::get(II.getType()));
571e8d8bef9SDimitry Andric     }
572e8d8bef9SDimitry Andric 
573e8d8bef9SDimitry Andric     break;
574e8d8bef9SDimitry Andric   }
575e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_class: {
576e8d8bef9SDimitry Andric     Value *Src0 = II.getArgOperand(0);
577e8d8bef9SDimitry Andric     Value *Src1 = II.getArgOperand(1);
578e8d8bef9SDimitry Andric     const ConstantInt *CMask = dyn_cast<ConstantInt>(Src1);
57906c3fb27SDimitry Andric     if (CMask) {
58006c3fb27SDimitry Andric       II.setCalledOperand(Intrinsic::getDeclaration(
58106c3fb27SDimitry Andric           II.getModule(), Intrinsic::is_fpclass, Src0->getType()));
58206c3fb27SDimitry Andric 
58306c3fb27SDimitry Andric       // Clamp any excess bits, as they're illegal for the generic intrinsic.
58406c3fb27SDimitry Andric       II.setArgOperand(1, ConstantInt::get(Src1->getType(),
58506c3fb27SDimitry Andric                                            CMask->getZExtValue() & fcAllFlags));
58606c3fb27SDimitry Andric       return &II;
587e8d8bef9SDimitry Andric     }
588e8d8bef9SDimitry Andric 
58906c3fb27SDimitry Andric     // Propagate poison.
59006c3fb27SDimitry Andric     if (isa<PoisonValue>(Src0) || isa<PoisonValue>(Src1))
59106c3fb27SDimitry Andric       return IC.replaceInstUsesWith(II, PoisonValue::get(II.getType()));
592e8d8bef9SDimitry Andric 
59306c3fb27SDimitry Andric     // llvm.amdgcn.class(_, undef) -> false
59406c3fb27SDimitry Andric     if (IC.getSimplifyQuery().isUndefValue(Src1))
595e8d8bef9SDimitry Andric       return IC.replaceInstUsesWith(II, ConstantInt::get(II.getType(), false));
59606c3fb27SDimitry Andric 
59706c3fb27SDimitry Andric     // llvm.amdgcn.class(undef, mask) -> mask != 0
59806c3fb27SDimitry Andric     if (IC.getSimplifyQuery().isUndefValue(Src0)) {
59906c3fb27SDimitry Andric       Value *CmpMask = IC.Builder.CreateICmpNE(
60006c3fb27SDimitry Andric           Src1, ConstantInt::getNullValue(Src1->getType()));
60106c3fb27SDimitry Andric       return IC.replaceInstUsesWith(II, CmpMask);
602e8d8bef9SDimitry Andric     }
603e8d8bef9SDimitry Andric     break;
604e8d8bef9SDimitry Andric   }
605e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_cvt_pkrtz: {
606e8d8bef9SDimitry Andric     Value *Src0 = II.getArgOperand(0);
607e8d8bef9SDimitry Andric     Value *Src1 = II.getArgOperand(1);
608e8d8bef9SDimitry Andric     if (const ConstantFP *C0 = dyn_cast<ConstantFP>(Src0)) {
609e8d8bef9SDimitry Andric       if (const ConstantFP *C1 = dyn_cast<ConstantFP>(Src1)) {
610e8d8bef9SDimitry Andric         const fltSemantics &HalfSem =
611e8d8bef9SDimitry Andric             II.getType()->getScalarType()->getFltSemantics();
612e8d8bef9SDimitry Andric         bool LosesInfo;
613e8d8bef9SDimitry Andric         APFloat Val0 = C0->getValueAPF();
614e8d8bef9SDimitry Andric         APFloat Val1 = C1->getValueAPF();
615e8d8bef9SDimitry Andric         Val0.convert(HalfSem, APFloat::rmTowardZero, &LosesInfo);
616e8d8bef9SDimitry Andric         Val1.convert(HalfSem, APFloat::rmTowardZero, &LosesInfo);
617e8d8bef9SDimitry Andric 
618e8d8bef9SDimitry Andric         Constant *Folded =
619e8d8bef9SDimitry Andric             ConstantVector::get({ConstantFP::get(II.getContext(), Val0),
620e8d8bef9SDimitry Andric                                  ConstantFP::get(II.getContext(), Val1)});
621e8d8bef9SDimitry Andric         return IC.replaceInstUsesWith(II, Folded);
622e8d8bef9SDimitry Andric       }
623e8d8bef9SDimitry Andric     }
624e8d8bef9SDimitry Andric 
625e8d8bef9SDimitry Andric     if (isa<UndefValue>(Src0) && isa<UndefValue>(Src1)) {
626e8d8bef9SDimitry Andric       return IC.replaceInstUsesWith(II, UndefValue::get(II.getType()));
627e8d8bef9SDimitry Andric     }
628e8d8bef9SDimitry Andric 
629e8d8bef9SDimitry Andric     break;
630e8d8bef9SDimitry Andric   }
631e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_cvt_pknorm_i16:
632e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_cvt_pknorm_u16:
633e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_cvt_pk_i16:
634e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_cvt_pk_u16: {
635e8d8bef9SDimitry Andric     Value *Src0 = II.getArgOperand(0);
636e8d8bef9SDimitry Andric     Value *Src1 = II.getArgOperand(1);
637e8d8bef9SDimitry Andric 
638e8d8bef9SDimitry Andric     if (isa<UndefValue>(Src0) && isa<UndefValue>(Src1)) {
639e8d8bef9SDimitry Andric       return IC.replaceInstUsesWith(II, UndefValue::get(II.getType()));
640e8d8bef9SDimitry Andric     }
641e8d8bef9SDimitry Andric 
642e8d8bef9SDimitry Andric     break;
643e8d8bef9SDimitry Andric   }
644e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_ubfe:
645e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_sbfe: {
646e8d8bef9SDimitry Andric     // Decompose simple cases into standard shifts.
647e8d8bef9SDimitry Andric     Value *Src = II.getArgOperand(0);
648e8d8bef9SDimitry Andric     if (isa<UndefValue>(Src)) {
649e8d8bef9SDimitry Andric       return IC.replaceInstUsesWith(II, Src);
650e8d8bef9SDimitry Andric     }
651e8d8bef9SDimitry Andric 
652e8d8bef9SDimitry Andric     unsigned Width;
653e8d8bef9SDimitry Andric     Type *Ty = II.getType();
654e8d8bef9SDimitry Andric     unsigned IntSize = Ty->getIntegerBitWidth();
655e8d8bef9SDimitry Andric 
656e8d8bef9SDimitry Andric     ConstantInt *CWidth = dyn_cast<ConstantInt>(II.getArgOperand(2));
657e8d8bef9SDimitry Andric     if (CWidth) {
658e8d8bef9SDimitry Andric       Width = CWidth->getZExtValue();
659e8d8bef9SDimitry Andric       if ((Width & (IntSize - 1)) == 0) {
660e8d8bef9SDimitry Andric         return IC.replaceInstUsesWith(II, ConstantInt::getNullValue(Ty));
661e8d8bef9SDimitry Andric       }
662e8d8bef9SDimitry Andric 
663e8d8bef9SDimitry Andric       // Hardware ignores high bits, so remove those.
664e8d8bef9SDimitry Andric       if (Width >= IntSize) {
665e8d8bef9SDimitry Andric         return IC.replaceOperand(
666e8d8bef9SDimitry Andric             II, 2, ConstantInt::get(CWidth->getType(), Width & (IntSize - 1)));
667e8d8bef9SDimitry Andric       }
668e8d8bef9SDimitry Andric     }
669e8d8bef9SDimitry Andric 
670e8d8bef9SDimitry Andric     unsigned Offset;
671e8d8bef9SDimitry Andric     ConstantInt *COffset = dyn_cast<ConstantInt>(II.getArgOperand(1));
672e8d8bef9SDimitry Andric     if (COffset) {
673e8d8bef9SDimitry Andric       Offset = COffset->getZExtValue();
674e8d8bef9SDimitry Andric       if (Offset >= IntSize) {
675e8d8bef9SDimitry Andric         return IC.replaceOperand(
676e8d8bef9SDimitry Andric             II, 1,
677e8d8bef9SDimitry Andric             ConstantInt::get(COffset->getType(), Offset & (IntSize - 1)));
678e8d8bef9SDimitry Andric       }
679e8d8bef9SDimitry Andric     }
680e8d8bef9SDimitry Andric 
681e8d8bef9SDimitry Andric     bool Signed = IID == Intrinsic::amdgcn_sbfe;
682e8d8bef9SDimitry Andric 
683e8d8bef9SDimitry Andric     if (!CWidth || !COffset)
684e8d8bef9SDimitry Andric       break;
685e8d8bef9SDimitry Andric 
686349cc55cSDimitry Andric     // The case of Width == 0 is handled above, which makes this transformation
687e8d8bef9SDimitry Andric     // safe.  If Width == 0, then the ashr and lshr instructions become poison
688e8d8bef9SDimitry Andric     // value since the shift amount would be equal to the bit size.
689e8d8bef9SDimitry Andric     assert(Width != 0);
690e8d8bef9SDimitry Andric 
691e8d8bef9SDimitry Andric     // TODO: This allows folding to undef when the hardware has specific
692e8d8bef9SDimitry Andric     // behavior?
693e8d8bef9SDimitry Andric     if (Offset + Width < IntSize) {
694e8d8bef9SDimitry Andric       Value *Shl = IC.Builder.CreateShl(Src, IntSize - Offset - Width);
695e8d8bef9SDimitry Andric       Value *RightShift = Signed ? IC.Builder.CreateAShr(Shl, IntSize - Width)
696e8d8bef9SDimitry Andric                                  : IC.Builder.CreateLShr(Shl, IntSize - Width);
697e8d8bef9SDimitry Andric       RightShift->takeName(&II);
698e8d8bef9SDimitry Andric       return IC.replaceInstUsesWith(II, RightShift);
699e8d8bef9SDimitry Andric     }
700e8d8bef9SDimitry Andric 
701e8d8bef9SDimitry Andric     Value *RightShift = Signed ? IC.Builder.CreateAShr(Src, Offset)
702e8d8bef9SDimitry Andric                                : IC.Builder.CreateLShr(Src, Offset);
703e8d8bef9SDimitry Andric 
704e8d8bef9SDimitry Andric     RightShift->takeName(&II);
705e8d8bef9SDimitry Andric     return IC.replaceInstUsesWith(II, RightShift);
706e8d8bef9SDimitry Andric   }
707e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_exp:
70881ad6265SDimitry Andric   case Intrinsic::amdgcn_exp_row:
709e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_exp_compr: {
710e8d8bef9SDimitry Andric     ConstantInt *En = cast<ConstantInt>(II.getArgOperand(1));
711e8d8bef9SDimitry Andric     unsigned EnBits = En->getZExtValue();
712e8d8bef9SDimitry Andric     if (EnBits == 0xf)
713e8d8bef9SDimitry Andric       break; // All inputs enabled.
714e8d8bef9SDimitry Andric 
715e8d8bef9SDimitry Andric     bool IsCompr = IID == Intrinsic::amdgcn_exp_compr;
716e8d8bef9SDimitry Andric     bool Changed = false;
717e8d8bef9SDimitry Andric     for (int I = 0; I < (IsCompr ? 2 : 4); ++I) {
718e8d8bef9SDimitry Andric       if ((!IsCompr && (EnBits & (1 << I)) == 0) ||
719e8d8bef9SDimitry Andric           (IsCompr && ((EnBits & (0x3 << (2 * I))) == 0))) {
720e8d8bef9SDimitry Andric         Value *Src = II.getArgOperand(I + 2);
721e8d8bef9SDimitry Andric         if (!isa<UndefValue>(Src)) {
722e8d8bef9SDimitry Andric           IC.replaceOperand(II, I + 2, UndefValue::get(Src->getType()));
723e8d8bef9SDimitry Andric           Changed = true;
724e8d8bef9SDimitry Andric         }
725e8d8bef9SDimitry Andric       }
726e8d8bef9SDimitry Andric     }
727e8d8bef9SDimitry Andric 
728e8d8bef9SDimitry Andric     if (Changed) {
729e8d8bef9SDimitry Andric       return &II;
730e8d8bef9SDimitry Andric     }
731e8d8bef9SDimitry Andric 
732e8d8bef9SDimitry Andric     break;
733e8d8bef9SDimitry Andric   }
734e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_fmed3: {
735e8d8bef9SDimitry Andric     // Note this does not preserve proper sNaN behavior if IEEE-mode is enabled
736e8d8bef9SDimitry Andric     // for the shader.
737e8d8bef9SDimitry Andric 
738e8d8bef9SDimitry Andric     Value *Src0 = II.getArgOperand(0);
739e8d8bef9SDimitry Andric     Value *Src1 = II.getArgOperand(1);
740e8d8bef9SDimitry Andric     Value *Src2 = II.getArgOperand(2);
741e8d8bef9SDimitry Andric 
742e8d8bef9SDimitry Andric     // Checking for NaN before canonicalization provides better fidelity when
743e8d8bef9SDimitry Andric     // mapping other operations onto fmed3 since the order of operands is
744e8d8bef9SDimitry Andric     // unchanged.
745e8d8bef9SDimitry Andric     CallInst *NewCall = nullptr;
746e8d8bef9SDimitry Andric     if (match(Src0, PatternMatch::m_NaN()) || isa<UndefValue>(Src0)) {
747e8d8bef9SDimitry Andric       NewCall = IC.Builder.CreateMinNum(Src1, Src2);
748e8d8bef9SDimitry Andric     } else if (match(Src1, PatternMatch::m_NaN()) || isa<UndefValue>(Src1)) {
749e8d8bef9SDimitry Andric       NewCall = IC.Builder.CreateMinNum(Src0, Src2);
750e8d8bef9SDimitry Andric     } else if (match(Src2, PatternMatch::m_NaN()) || isa<UndefValue>(Src2)) {
751e8d8bef9SDimitry Andric       NewCall = IC.Builder.CreateMaxNum(Src0, Src1);
752e8d8bef9SDimitry Andric     }
753e8d8bef9SDimitry Andric 
754e8d8bef9SDimitry Andric     if (NewCall) {
755e8d8bef9SDimitry Andric       NewCall->copyFastMathFlags(&II);
756e8d8bef9SDimitry Andric       NewCall->takeName(&II);
757e8d8bef9SDimitry Andric       return IC.replaceInstUsesWith(II, NewCall);
758e8d8bef9SDimitry Andric     }
759e8d8bef9SDimitry Andric 
760e8d8bef9SDimitry Andric     bool Swap = false;
761e8d8bef9SDimitry Andric     // Canonicalize constants to RHS operands.
762e8d8bef9SDimitry Andric     //
763e8d8bef9SDimitry Andric     // fmed3(c0, x, c1) -> fmed3(x, c0, c1)
764e8d8bef9SDimitry Andric     if (isa<Constant>(Src0) && !isa<Constant>(Src1)) {
765e8d8bef9SDimitry Andric       std::swap(Src0, Src1);
766e8d8bef9SDimitry Andric       Swap = true;
767e8d8bef9SDimitry Andric     }
768e8d8bef9SDimitry Andric 
769e8d8bef9SDimitry Andric     if (isa<Constant>(Src1) && !isa<Constant>(Src2)) {
770e8d8bef9SDimitry Andric       std::swap(Src1, Src2);
771e8d8bef9SDimitry Andric       Swap = true;
772e8d8bef9SDimitry Andric     }
773e8d8bef9SDimitry Andric 
774e8d8bef9SDimitry Andric     if (isa<Constant>(Src0) && !isa<Constant>(Src1)) {
775e8d8bef9SDimitry Andric       std::swap(Src0, Src1);
776e8d8bef9SDimitry Andric       Swap = true;
777e8d8bef9SDimitry Andric     }
778e8d8bef9SDimitry Andric 
779e8d8bef9SDimitry Andric     if (Swap) {
780e8d8bef9SDimitry Andric       II.setArgOperand(0, Src0);
781e8d8bef9SDimitry Andric       II.setArgOperand(1, Src1);
782e8d8bef9SDimitry Andric       II.setArgOperand(2, Src2);
783e8d8bef9SDimitry Andric       return &II;
784e8d8bef9SDimitry Andric     }
785e8d8bef9SDimitry Andric 
786e8d8bef9SDimitry Andric     if (const ConstantFP *C0 = dyn_cast<ConstantFP>(Src0)) {
787e8d8bef9SDimitry Andric       if (const ConstantFP *C1 = dyn_cast<ConstantFP>(Src1)) {
788e8d8bef9SDimitry Andric         if (const ConstantFP *C2 = dyn_cast<ConstantFP>(Src2)) {
789e8d8bef9SDimitry Andric           APFloat Result = fmed3AMDGCN(C0->getValueAPF(), C1->getValueAPF(),
790e8d8bef9SDimitry Andric                                        C2->getValueAPF());
791e8d8bef9SDimitry Andric           return IC.replaceInstUsesWith(
792e8d8bef9SDimitry Andric               II, ConstantFP::get(IC.Builder.getContext(), Result));
793e8d8bef9SDimitry Andric         }
794e8d8bef9SDimitry Andric       }
795e8d8bef9SDimitry Andric     }
796e8d8bef9SDimitry Andric 
79706c3fb27SDimitry Andric     if (!ST->hasMed3_16())
79806c3fb27SDimitry Andric       break;
79906c3fb27SDimitry Andric 
80006c3fb27SDimitry Andric     Value *X, *Y, *Z;
80106c3fb27SDimitry Andric 
80206c3fb27SDimitry Andric     // Repeat floating-point width reduction done for minnum/maxnum.
80306c3fb27SDimitry Andric     // fmed3((fpext X), (fpext Y), (fpext Z)) -> fpext (fmed3(X, Y, Z))
80406c3fb27SDimitry Andric     if (matchFPExtFromF16(Src0, X) && matchFPExtFromF16(Src1, Y) &&
80506c3fb27SDimitry Andric         matchFPExtFromF16(Src2, Z)) {
80606c3fb27SDimitry Andric       Value *NewCall = IC.Builder.CreateIntrinsic(IID, {X->getType()},
80706c3fb27SDimitry Andric                                                   {X, Y, Z}, &II, II.getName());
80806c3fb27SDimitry Andric       return new FPExtInst(NewCall, II.getType());
80906c3fb27SDimitry Andric     }
81006c3fb27SDimitry Andric 
811e8d8bef9SDimitry Andric     break;
812e8d8bef9SDimitry Andric   }
813e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_icmp:
814e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_fcmp: {
815e8d8bef9SDimitry Andric     const ConstantInt *CC = cast<ConstantInt>(II.getArgOperand(2));
816e8d8bef9SDimitry Andric     // Guard against invalid arguments.
817e8d8bef9SDimitry Andric     int64_t CCVal = CC->getZExtValue();
818e8d8bef9SDimitry Andric     bool IsInteger = IID == Intrinsic::amdgcn_icmp;
819e8d8bef9SDimitry Andric     if ((IsInteger && (CCVal < CmpInst::FIRST_ICMP_PREDICATE ||
820e8d8bef9SDimitry Andric                        CCVal > CmpInst::LAST_ICMP_PREDICATE)) ||
821e8d8bef9SDimitry Andric         (!IsInteger && (CCVal < CmpInst::FIRST_FCMP_PREDICATE ||
822e8d8bef9SDimitry Andric                         CCVal > CmpInst::LAST_FCMP_PREDICATE)))
823e8d8bef9SDimitry Andric       break;
824e8d8bef9SDimitry Andric 
825e8d8bef9SDimitry Andric     Value *Src0 = II.getArgOperand(0);
826e8d8bef9SDimitry Andric     Value *Src1 = II.getArgOperand(1);
827e8d8bef9SDimitry Andric 
828e8d8bef9SDimitry Andric     if (auto *CSrc0 = dyn_cast<Constant>(Src0)) {
829e8d8bef9SDimitry Andric       if (auto *CSrc1 = dyn_cast<Constant>(Src1)) {
830e8d8bef9SDimitry Andric         Constant *CCmp = ConstantExpr::getCompare(CCVal, CSrc0, CSrc1);
831e8d8bef9SDimitry Andric         if (CCmp->isNullValue()) {
832e8d8bef9SDimitry Andric           return IC.replaceInstUsesWith(
8335f757f3fSDimitry Andric               II, IC.Builder.CreateSExt(CCmp, II.getType()));
834e8d8bef9SDimitry Andric         }
835e8d8bef9SDimitry Andric 
836e8d8bef9SDimitry Andric         // The result of V_ICMP/V_FCMP assembly instructions (which this
837e8d8bef9SDimitry Andric         // intrinsic exposes) is one bit per thread, masked with the EXEC
838e8d8bef9SDimitry Andric         // register (which contains the bitmask of live threads). So a
839e8d8bef9SDimitry Andric         // comparison that always returns true is the same as a read of the
840e8d8bef9SDimitry Andric         // EXEC register.
841e8d8bef9SDimitry Andric         Function *NewF = Intrinsic::getDeclaration(
842e8d8bef9SDimitry Andric             II.getModule(), Intrinsic::read_register, II.getType());
843e8d8bef9SDimitry Andric         Metadata *MDArgs[] = {MDString::get(II.getContext(), "exec")};
844e8d8bef9SDimitry Andric         MDNode *MD = MDNode::get(II.getContext(), MDArgs);
845e8d8bef9SDimitry Andric         Value *Args[] = {MetadataAsValue::get(II.getContext(), MD)};
846e8d8bef9SDimitry Andric         CallInst *NewCall = IC.Builder.CreateCall(NewF, Args);
847349cc55cSDimitry Andric         NewCall->addFnAttr(Attribute::Convergent);
848e8d8bef9SDimitry Andric         NewCall->takeName(&II);
849e8d8bef9SDimitry Andric         return IC.replaceInstUsesWith(II, NewCall);
850e8d8bef9SDimitry Andric       }
851e8d8bef9SDimitry Andric 
852e8d8bef9SDimitry Andric       // Canonicalize constants to RHS.
853e8d8bef9SDimitry Andric       CmpInst::Predicate SwapPred =
854e8d8bef9SDimitry Andric           CmpInst::getSwappedPredicate(static_cast<CmpInst::Predicate>(CCVal));
855e8d8bef9SDimitry Andric       II.setArgOperand(0, Src1);
856e8d8bef9SDimitry Andric       II.setArgOperand(1, Src0);
857e8d8bef9SDimitry Andric       II.setArgOperand(
858e8d8bef9SDimitry Andric           2, ConstantInt::get(CC->getType(), static_cast<int>(SwapPred)));
859e8d8bef9SDimitry Andric       return &II;
860e8d8bef9SDimitry Andric     }
861e8d8bef9SDimitry Andric 
862e8d8bef9SDimitry Andric     if (CCVal != CmpInst::ICMP_EQ && CCVal != CmpInst::ICMP_NE)
863e8d8bef9SDimitry Andric       break;
864e8d8bef9SDimitry Andric 
865e8d8bef9SDimitry Andric     // Canonicalize compare eq with true value to compare != 0
866e8d8bef9SDimitry Andric     // llvm.amdgcn.icmp(zext (i1 x), 1, eq)
867e8d8bef9SDimitry Andric     //   -> llvm.amdgcn.icmp(zext (i1 x), 0, ne)
868e8d8bef9SDimitry Andric     // llvm.amdgcn.icmp(sext (i1 x), -1, eq)
869e8d8bef9SDimitry Andric     //   -> llvm.amdgcn.icmp(sext (i1 x), 0, ne)
870e8d8bef9SDimitry Andric     Value *ExtSrc;
871e8d8bef9SDimitry Andric     if (CCVal == CmpInst::ICMP_EQ &&
872e8d8bef9SDimitry Andric         ((match(Src1, PatternMatch::m_One()) &&
873e8d8bef9SDimitry Andric           match(Src0, m_ZExt(PatternMatch::m_Value(ExtSrc)))) ||
874e8d8bef9SDimitry Andric          (match(Src1, PatternMatch::m_AllOnes()) &&
875e8d8bef9SDimitry Andric           match(Src0, m_SExt(PatternMatch::m_Value(ExtSrc))))) &&
876e8d8bef9SDimitry Andric         ExtSrc->getType()->isIntegerTy(1)) {
877e8d8bef9SDimitry Andric       IC.replaceOperand(II, 1, ConstantInt::getNullValue(Src1->getType()));
878e8d8bef9SDimitry Andric       IC.replaceOperand(II, 2,
879e8d8bef9SDimitry Andric                         ConstantInt::get(CC->getType(), CmpInst::ICMP_NE));
880e8d8bef9SDimitry Andric       return &II;
881e8d8bef9SDimitry Andric     }
882e8d8bef9SDimitry Andric 
883e8d8bef9SDimitry Andric     CmpInst::Predicate SrcPred;
884e8d8bef9SDimitry Andric     Value *SrcLHS;
885e8d8bef9SDimitry Andric     Value *SrcRHS;
886e8d8bef9SDimitry Andric 
887e8d8bef9SDimitry Andric     // Fold compare eq/ne with 0 from a compare result as the predicate to the
888e8d8bef9SDimitry Andric     // intrinsic. The typical use is a wave vote function in the library, which
889e8d8bef9SDimitry Andric     // will be fed from a user code condition compared with 0. Fold in the
890e8d8bef9SDimitry Andric     // redundant compare.
891e8d8bef9SDimitry Andric 
892e8d8bef9SDimitry Andric     // llvm.amdgcn.icmp([sz]ext ([if]cmp pred a, b), 0, ne)
893e8d8bef9SDimitry Andric     //   -> llvm.amdgcn.[if]cmp(a, b, pred)
894e8d8bef9SDimitry Andric     //
895e8d8bef9SDimitry Andric     // llvm.amdgcn.icmp([sz]ext ([if]cmp pred a, b), 0, eq)
896e8d8bef9SDimitry Andric     //   -> llvm.amdgcn.[if]cmp(a, b, inv pred)
897e8d8bef9SDimitry Andric     if (match(Src1, PatternMatch::m_Zero()) &&
898e8d8bef9SDimitry Andric         match(Src0, PatternMatch::m_ZExtOrSExt(
899e8d8bef9SDimitry Andric                         m_Cmp(SrcPred, PatternMatch::m_Value(SrcLHS),
900e8d8bef9SDimitry Andric                               PatternMatch::m_Value(SrcRHS))))) {
901e8d8bef9SDimitry Andric       if (CCVal == CmpInst::ICMP_EQ)
902e8d8bef9SDimitry Andric         SrcPred = CmpInst::getInversePredicate(SrcPred);
903e8d8bef9SDimitry Andric 
904e8d8bef9SDimitry Andric       Intrinsic::ID NewIID = CmpInst::isFPPredicate(SrcPred)
905e8d8bef9SDimitry Andric                                  ? Intrinsic::amdgcn_fcmp
906e8d8bef9SDimitry Andric                                  : Intrinsic::amdgcn_icmp;
907e8d8bef9SDimitry Andric 
908e8d8bef9SDimitry Andric       Type *Ty = SrcLHS->getType();
909e8d8bef9SDimitry Andric       if (auto *CmpType = dyn_cast<IntegerType>(Ty)) {
910e8d8bef9SDimitry Andric         // Promote to next legal integer type.
911e8d8bef9SDimitry Andric         unsigned Width = CmpType->getBitWidth();
912e8d8bef9SDimitry Andric         unsigned NewWidth = Width;
913e8d8bef9SDimitry Andric 
914e8d8bef9SDimitry Andric         // Don't do anything for i1 comparisons.
915e8d8bef9SDimitry Andric         if (Width == 1)
916e8d8bef9SDimitry Andric           break;
917e8d8bef9SDimitry Andric 
918e8d8bef9SDimitry Andric         if (Width <= 16)
919e8d8bef9SDimitry Andric           NewWidth = 16;
920e8d8bef9SDimitry Andric         else if (Width <= 32)
921e8d8bef9SDimitry Andric           NewWidth = 32;
922e8d8bef9SDimitry Andric         else if (Width <= 64)
923e8d8bef9SDimitry Andric           NewWidth = 64;
924e8d8bef9SDimitry Andric         else if (Width > 64)
925e8d8bef9SDimitry Andric           break; // Can't handle this.
926e8d8bef9SDimitry Andric 
927e8d8bef9SDimitry Andric         if (Width != NewWidth) {
928e8d8bef9SDimitry Andric           IntegerType *CmpTy = IC.Builder.getIntNTy(NewWidth);
929e8d8bef9SDimitry Andric           if (CmpInst::isSigned(SrcPred)) {
930e8d8bef9SDimitry Andric             SrcLHS = IC.Builder.CreateSExt(SrcLHS, CmpTy);
931e8d8bef9SDimitry Andric             SrcRHS = IC.Builder.CreateSExt(SrcRHS, CmpTy);
932e8d8bef9SDimitry Andric           } else {
933e8d8bef9SDimitry Andric             SrcLHS = IC.Builder.CreateZExt(SrcLHS, CmpTy);
934e8d8bef9SDimitry Andric             SrcRHS = IC.Builder.CreateZExt(SrcRHS, CmpTy);
935e8d8bef9SDimitry Andric           }
936e8d8bef9SDimitry Andric         }
937e8d8bef9SDimitry Andric       } else if (!Ty->isFloatTy() && !Ty->isDoubleTy() && !Ty->isHalfTy())
938e8d8bef9SDimitry Andric         break;
939e8d8bef9SDimitry Andric 
940e8d8bef9SDimitry Andric       Function *NewF = Intrinsic::getDeclaration(
941e8d8bef9SDimitry Andric           II.getModule(), NewIID, {II.getType(), SrcLHS->getType()});
942e8d8bef9SDimitry Andric       Value *Args[] = {SrcLHS, SrcRHS,
943e8d8bef9SDimitry Andric                        ConstantInt::get(CC->getType(), SrcPred)};
944e8d8bef9SDimitry Andric       CallInst *NewCall = IC.Builder.CreateCall(NewF, Args);
945e8d8bef9SDimitry Andric       NewCall->takeName(&II);
946e8d8bef9SDimitry Andric       return IC.replaceInstUsesWith(II, NewCall);
947e8d8bef9SDimitry Andric     }
948e8d8bef9SDimitry Andric 
949e8d8bef9SDimitry Andric     break;
950e8d8bef9SDimitry Andric   }
95106c3fb27SDimitry Andric   case Intrinsic::amdgcn_mbcnt_hi: {
95206c3fb27SDimitry Andric     // exec_hi is all 0, so this is just a copy.
95306c3fb27SDimitry Andric     if (ST->isWave32())
95406c3fb27SDimitry Andric       return IC.replaceInstUsesWith(II, II.getArgOperand(1));
95506c3fb27SDimitry Andric     break;
95606c3fb27SDimitry Andric   }
957e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_ballot: {
958e8d8bef9SDimitry Andric     if (auto *Src = dyn_cast<ConstantInt>(II.getArgOperand(0))) {
959e8d8bef9SDimitry Andric       if (Src->isZero()) {
960e8d8bef9SDimitry Andric         // amdgcn.ballot(i1 0) is zero.
961e8d8bef9SDimitry Andric         return IC.replaceInstUsesWith(II, Constant::getNullValue(II.getType()));
962e8d8bef9SDimitry Andric       }
963e8d8bef9SDimitry Andric     }
964e8d8bef9SDimitry Andric     break;
965e8d8bef9SDimitry Andric   }
966e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_wqm_vote: {
967e8d8bef9SDimitry Andric     // wqm_vote is identity when the argument is constant.
968e8d8bef9SDimitry Andric     if (!isa<Constant>(II.getArgOperand(0)))
969e8d8bef9SDimitry Andric       break;
970e8d8bef9SDimitry Andric 
971e8d8bef9SDimitry Andric     return IC.replaceInstUsesWith(II, II.getArgOperand(0));
972e8d8bef9SDimitry Andric   }
973e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_kill: {
974e8d8bef9SDimitry Andric     const ConstantInt *C = dyn_cast<ConstantInt>(II.getArgOperand(0));
975e8d8bef9SDimitry Andric     if (!C || !C->getZExtValue())
976e8d8bef9SDimitry Andric       break;
977e8d8bef9SDimitry Andric 
978e8d8bef9SDimitry Andric     // amdgcn.kill(i1 1) is a no-op
979e8d8bef9SDimitry Andric     return IC.eraseInstFromFunction(II);
980e8d8bef9SDimitry Andric   }
981e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_update_dpp: {
982e8d8bef9SDimitry Andric     Value *Old = II.getArgOperand(0);
983e8d8bef9SDimitry Andric 
984e8d8bef9SDimitry Andric     auto *BC = cast<ConstantInt>(II.getArgOperand(5));
985e8d8bef9SDimitry Andric     auto *RM = cast<ConstantInt>(II.getArgOperand(3));
986e8d8bef9SDimitry Andric     auto *BM = cast<ConstantInt>(II.getArgOperand(4));
987e8d8bef9SDimitry Andric     if (BC->isZeroValue() || RM->getZExtValue() != 0xF ||
988e8d8bef9SDimitry Andric         BM->getZExtValue() != 0xF || isa<UndefValue>(Old))
989e8d8bef9SDimitry Andric       break;
990e8d8bef9SDimitry Andric 
991e8d8bef9SDimitry Andric     // If bound_ctrl = 1, row mask = bank mask = 0xf we can omit old value.
992e8d8bef9SDimitry Andric     return IC.replaceOperand(II, 0, UndefValue::get(Old->getType()));
993e8d8bef9SDimitry Andric   }
994e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_permlane16:
9955f757f3fSDimitry Andric   case Intrinsic::amdgcn_permlane16_var:
9965f757f3fSDimitry Andric   case Intrinsic::amdgcn_permlanex16:
9975f757f3fSDimitry Andric   case Intrinsic::amdgcn_permlanex16_var: {
998e8d8bef9SDimitry Andric     // Discard vdst_in if it's not going to be read.
999e8d8bef9SDimitry Andric     Value *VDstIn = II.getArgOperand(0);
1000e8d8bef9SDimitry Andric     if (isa<UndefValue>(VDstIn))
1001e8d8bef9SDimitry Andric       break;
1002e8d8bef9SDimitry Andric 
10035f757f3fSDimitry Andric     // FetchInvalid operand idx.
10045f757f3fSDimitry Andric     unsigned int FiIdx = (IID == Intrinsic::amdgcn_permlane16 ||
10055f757f3fSDimitry Andric                           IID == Intrinsic::amdgcn_permlanex16)
10065f757f3fSDimitry Andric                              ? 4  /* for permlane16 and permlanex16 */
10075f757f3fSDimitry Andric                              : 3; /* for permlane16_var and permlanex16_var */
10085f757f3fSDimitry Andric 
10095f757f3fSDimitry Andric     // BoundCtrl operand idx.
10105f757f3fSDimitry Andric     // For permlane16 and permlanex16 it should be 5
10115f757f3fSDimitry Andric     // For Permlane16_var and permlanex16_var it should be 4
10125f757f3fSDimitry Andric     unsigned int BcIdx = FiIdx + 1;
10135f757f3fSDimitry Andric 
10145f757f3fSDimitry Andric     ConstantInt *FetchInvalid = cast<ConstantInt>(II.getArgOperand(FiIdx));
10155f757f3fSDimitry Andric     ConstantInt *BoundCtrl = cast<ConstantInt>(II.getArgOperand(BcIdx));
1016e8d8bef9SDimitry Andric     if (!FetchInvalid->getZExtValue() && !BoundCtrl->getZExtValue())
1017e8d8bef9SDimitry Andric       break;
1018e8d8bef9SDimitry Andric 
1019e8d8bef9SDimitry Andric     return IC.replaceOperand(II, 0, UndefValue::get(VDstIn->getType()));
1020e8d8bef9SDimitry Andric   }
102181ad6265SDimitry Andric   case Intrinsic::amdgcn_permlane64:
102281ad6265SDimitry Andric     // A constant value is trivially uniform.
102381ad6265SDimitry Andric     if (Constant *C = dyn_cast<Constant>(II.getArgOperand(0))) {
102481ad6265SDimitry Andric       return IC.replaceInstUsesWith(II, C);
102581ad6265SDimitry Andric     }
102681ad6265SDimitry Andric     break;
1027e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_readfirstlane:
1028e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_readlane: {
1029e8d8bef9SDimitry Andric     // A constant value is trivially uniform.
1030e8d8bef9SDimitry Andric     if (Constant *C = dyn_cast<Constant>(II.getArgOperand(0))) {
1031e8d8bef9SDimitry Andric       return IC.replaceInstUsesWith(II, C);
1032e8d8bef9SDimitry Andric     }
1033e8d8bef9SDimitry Andric 
1034e8d8bef9SDimitry Andric     // The rest of these may not be safe if the exec may not be the same between
1035e8d8bef9SDimitry Andric     // the def and use.
1036e8d8bef9SDimitry Andric     Value *Src = II.getArgOperand(0);
1037e8d8bef9SDimitry Andric     Instruction *SrcInst = dyn_cast<Instruction>(Src);
1038e8d8bef9SDimitry Andric     if (SrcInst && SrcInst->getParent() != II.getParent())
1039e8d8bef9SDimitry Andric       break;
1040e8d8bef9SDimitry Andric 
1041e8d8bef9SDimitry Andric     // readfirstlane (readfirstlane x) -> readfirstlane x
1042e8d8bef9SDimitry Andric     // readlane (readfirstlane x), y -> readfirstlane x
1043e8d8bef9SDimitry Andric     if (match(Src,
1044e8d8bef9SDimitry Andric               PatternMatch::m_Intrinsic<Intrinsic::amdgcn_readfirstlane>())) {
1045e8d8bef9SDimitry Andric       return IC.replaceInstUsesWith(II, Src);
1046e8d8bef9SDimitry Andric     }
1047e8d8bef9SDimitry Andric 
1048e8d8bef9SDimitry Andric     if (IID == Intrinsic::amdgcn_readfirstlane) {
1049e8d8bef9SDimitry Andric       // readfirstlane (readlane x, y) -> readlane x, y
1050e8d8bef9SDimitry Andric       if (match(Src, PatternMatch::m_Intrinsic<Intrinsic::amdgcn_readlane>())) {
1051e8d8bef9SDimitry Andric         return IC.replaceInstUsesWith(II, Src);
1052e8d8bef9SDimitry Andric       }
1053e8d8bef9SDimitry Andric     } else {
1054e8d8bef9SDimitry Andric       // readlane (readlane x, y), y -> readlane x, y
1055e8d8bef9SDimitry Andric       if (match(Src, PatternMatch::m_Intrinsic<Intrinsic::amdgcn_readlane>(
1056e8d8bef9SDimitry Andric                          PatternMatch::m_Value(),
1057e8d8bef9SDimitry Andric                          PatternMatch::m_Specific(II.getArgOperand(1))))) {
1058e8d8bef9SDimitry Andric         return IC.replaceInstUsesWith(II, Src);
1059e8d8bef9SDimitry Andric       }
1060e8d8bef9SDimitry Andric     }
1061e8d8bef9SDimitry Andric 
1062e8d8bef9SDimitry Andric     break;
1063e8d8bef9SDimitry Andric   }
1064e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_fmul_legacy: {
1065e8d8bef9SDimitry Andric     Value *Op0 = II.getArgOperand(0);
1066e8d8bef9SDimitry Andric     Value *Op1 = II.getArgOperand(1);
1067e8d8bef9SDimitry Andric 
1068e8d8bef9SDimitry Andric     // The legacy behaviour is that multiplying +/-0.0 by anything, even NaN or
1069e8d8bef9SDimitry Andric     // infinity, gives +0.0.
1070e8d8bef9SDimitry Andric     // TODO: Move to InstSimplify?
1071e8d8bef9SDimitry Andric     if (match(Op0, PatternMatch::m_AnyZeroFP()) ||
1072e8d8bef9SDimitry Andric         match(Op1, PatternMatch::m_AnyZeroFP()))
107306c3fb27SDimitry Andric       return IC.replaceInstUsesWith(II, ConstantFP::getZero(II.getType()));
1074e8d8bef9SDimitry Andric 
1075e8d8bef9SDimitry Andric     // If we can prove we don't have one of the special cases then we can use a
1076e8d8bef9SDimitry Andric     // normal fmul instruction instead.
107706c3fb27SDimitry Andric     if (canSimplifyLegacyMulToMul(II, Op0, Op1, IC)) {
1078e8d8bef9SDimitry Andric       auto *FMul = IC.Builder.CreateFMulFMF(Op0, Op1, &II);
1079e8d8bef9SDimitry Andric       FMul->takeName(&II);
1080e8d8bef9SDimitry Andric       return IC.replaceInstUsesWith(II, FMul);
1081e8d8bef9SDimitry Andric     }
1082e8d8bef9SDimitry Andric     break;
1083e8d8bef9SDimitry Andric   }
1084e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_fma_legacy: {
1085e8d8bef9SDimitry Andric     Value *Op0 = II.getArgOperand(0);
1086e8d8bef9SDimitry Andric     Value *Op1 = II.getArgOperand(1);
1087e8d8bef9SDimitry Andric     Value *Op2 = II.getArgOperand(2);
1088e8d8bef9SDimitry Andric 
1089e8d8bef9SDimitry Andric     // The legacy behaviour is that multiplying +/-0.0 by anything, even NaN or
1090e8d8bef9SDimitry Andric     // infinity, gives +0.0.
1091e8d8bef9SDimitry Andric     // TODO: Move to InstSimplify?
1092e8d8bef9SDimitry Andric     if (match(Op0, PatternMatch::m_AnyZeroFP()) ||
1093e8d8bef9SDimitry Andric         match(Op1, PatternMatch::m_AnyZeroFP())) {
1094e8d8bef9SDimitry Andric       // It's tempting to just return Op2 here, but that would give the wrong
1095e8d8bef9SDimitry Andric       // result if Op2 was -0.0.
109606c3fb27SDimitry Andric       auto *Zero = ConstantFP::getZero(II.getType());
1097e8d8bef9SDimitry Andric       auto *FAdd = IC.Builder.CreateFAddFMF(Zero, Op2, &II);
1098e8d8bef9SDimitry Andric       FAdd->takeName(&II);
1099e8d8bef9SDimitry Andric       return IC.replaceInstUsesWith(II, FAdd);
1100e8d8bef9SDimitry Andric     }
1101e8d8bef9SDimitry Andric 
1102e8d8bef9SDimitry Andric     // If we can prove we don't have one of the special cases then we can use a
1103e8d8bef9SDimitry Andric     // normal fma instead.
110406c3fb27SDimitry Andric     if (canSimplifyLegacyMulToMul(II, Op0, Op1, IC)) {
1105e8d8bef9SDimitry Andric       II.setCalledOperand(Intrinsic::getDeclaration(
1106e8d8bef9SDimitry Andric           II.getModule(), Intrinsic::fma, II.getType()));
1107e8d8bef9SDimitry Andric       return &II;
1108e8d8bef9SDimitry Andric     }
1109e8d8bef9SDimitry Andric     break;
1110e8d8bef9SDimitry Andric   }
11110eae32dcSDimitry Andric   case Intrinsic::amdgcn_is_shared:
11120eae32dcSDimitry Andric   case Intrinsic::amdgcn_is_private: {
11130eae32dcSDimitry Andric     if (isa<UndefValue>(II.getArgOperand(0)))
11140eae32dcSDimitry Andric       return IC.replaceInstUsesWith(II, UndefValue::get(II.getType()));
11150eae32dcSDimitry Andric 
11160eae32dcSDimitry Andric     if (isa<ConstantPointerNull>(II.getArgOperand(0)))
11170eae32dcSDimitry Andric       return IC.replaceInstUsesWith(II, ConstantInt::getFalse(II.getType()));
11180eae32dcSDimitry Andric     break;
11190eae32dcSDimitry Andric   }
112006c3fb27SDimitry Andric   case Intrinsic::amdgcn_buffer_store_format:
112106c3fb27SDimitry Andric   case Intrinsic::amdgcn_raw_buffer_store_format:
112206c3fb27SDimitry Andric   case Intrinsic::amdgcn_struct_buffer_store_format:
112306c3fb27SDimitry Andric   case Intrinsic::amdgcn_raw_tbuffer_store:
112406c3fb27SDimitry Andric   case Intrinsic::amdgcn_struct_tbuffer_store:
112506c3fb27SDimitry Andric   case Intrinsic::amdgcn_tbuffer_store:
112606c3fb27SDimitry Andric   case Intrinsic::amdgcn_image_store_1d:
112706c3fb27SDimitry Andric   case Intrinsic::amdgcn_image_store_1darray:
112806c3fb27SDimitry Andric   case Intrinsic::amdgcn_image_store_2d:
112906c3fb27SDimitry Andric   case Intrinsic::amdgcn_image_store_2darray:
113006c3fb27SDimitry Andric   case Intrinsic::amdgcn_image_store_2darraymsaa:
113106c3fb27SDimitry Andric   case Intrinsic::amdgcn_image_store_2dmsaa:
113206c3fb27SDimitry Andric   case Intrinsic::amdgcn_image_store_3d:
113306c3fb27SDimitry Andric   case Intrinsic::amdgcn_image_store_cube:
113406c3fb27SDimitry Andric   case Intrinsic::amdgcn_image_store_mip_1d:
113506c3fb27SDimitry Andric   case Intrinsic::amdgcn_image_store_mip_1darray:
113606c3fb27SDimitry Andric   case Intrinsic::amdgcn_image_store_mip_2d:
113706c3fb27SDimitry Andric   case Intrinsic::amdgcn_image_store_mip_2darray:
113806c3fb27SDimitry Andric   case Intrinsic::amdgcn_image_store_mip_3d:
113906c3fb27SDimitry Andric   case Intrinsic::amdgcn_image_store_mip_cube: {
114006c3fb27SDimitry Andric     if (!isa<FixedVectorType>(II.getArgOperand(0)->getType()))
114106c3fb27SDimitry Andric       break;
114206c3fb27SDimitry Andric 
114306c3fb27SDimitry Andric     APInt DemandedElts =
114406c3fb27SDimitry Andric         trimTrailingZerosInVector(IC, II.getArgOperand(0), &II);
114506c3fb27SDimitry Andric 
114606c3fb27SDimitry Andric     int DMaskIdx = getAMDGPUImageDMaskIntrinsic(II.getIntrinsicID()) ? 1 : -1;
114706c3fb27SDimitry Andric     if (simplifyAMDGCNMemoryIntrinsicDemanded(IC, II, DemandedElts, DMaskIdx,
114806c3fb27SDimitry Andric                                               false)) {
114906c3fb27SDimitry Andric       return IC.eraseInstFromFunction(II);
115006c3fb27SDimitry Andric     }
115106c3fb27SDimitry Andric 
115206c3fb27SDimitry Andric     break;
115306c3fb27SDimitry Andric   }
115406c3fb27SDimitry Andric   }
1155e8d8bef9SDimitry Andric   if (const AMDGPU::ImageDimIntrinsicInfo *ImageDimIntr =
1156e8d8bef9SDimitry Andric             AMDGPU::getImageDimIntrinsicInfo(II.getIntrinsicID())) {
1157e8d8bef9SDimitry Andric     return simplifyAMDGCNImageIntrinsic(ST, ImageDimIntr, II, IC);
1158e8d8bef9SDimitry Andric   }
1159bdd1243dSDimitry Andric   return std::nullopt;
1160e8d8bef9SDimitry Andric }
1161e8d8bef9SDimitry Andric 
1162e8d8bef9SDimitry Andric /// Implement SimplifyDemandedVectorElts for amdgcn buffer and image intrinsics.
1163e8d8bef9SDimitry Andric ///
116406c3fb27SDimitry Andric /// The result of simplifying amdgcn image and buffer store intrinsics is updating
116506c3fb27SDimitry Andric /// definitions of the intrinsics vector argument, not Uses of the result like
116606c3fb27SDimitry Andric /// image and buffer loads.
1167e8d8bef9SDimitry Andric /// Note: This only supports non-TFE/LWE image intrinsic calls; those have
1168e8d8bef9SDimitry Andric ///       struct returns.
1169e8d8bef9SDimitry Andric static Value *simplifyAMDGCNMemoryIntrinsicDemanded(InstCombiner &IC,
1170e8d8bef9SDimitry Andric                                                     IntrinsicInst &II,
1171e8d8bef9SDimitry Andric                                                     APInt DemandedElts,
117206c3fb27SDimitry Andric                                                     int DMaskIdx, bool IsLoad) {
1173e8d8bef9SDimitry Andric 
117406c3fb27SDimitry Andric   auto *IIVTy = cast<FixedVectorType>(IsLoad ? II.getType()
117506c3fb27SDimitry Andric                                              : II.getOperand(0)->getType());
1176e8d8bef9SDimitry Andric   unsigned VWidth = IIVTy->getNumElements();
1177e8d8bef9SDimitry Andric   if (VWidth == 1)
1178e8d8bef9SDimitry Andric     return nullptr;
1179bdd1243dSDimitry Andric   Type *EltTy = IIVTy->getElementType();
1180e8d8bef9SDimitry Andric 
1181e8d8bef9SDimitry Andric   IRBuilderBase::InsertPointGuard Guard(IC.Builder);
1182e8d8bef9SDimitry Andric   IC.Builder.SetInsertPoint(&II);
1183e8d8bef9SDimitry Andric 
1184e8d8bef9SDimitry Andric   // Assume the arguments are unchanged and later override them, if needed.
1185e8d8bef9SDimitry Andric   SmallVector<Value *, 16> Args(II.args());
1186e8d8bef9SDimitry Andric 
1187e8d8bef9SDimitry Andric   if (DMaskIdx < 0) {
1188e8d8bef9SDimitry Andric     // Buffer case.
1189e8d8bef9SDimitry Andric 
1190e8d8bef9SDimitry Andric     const unsigned ActiveBits = DemandedElts.getActiveBits();
119106c3fb27SDimitry Andric     const unsigned UnusedComponentsAtFront = DemandedElts.countr_zero();
1192e8d8bef9SDimitry Andric 
1193e8d8bef9SDimitry Andric     // Start assuming the prefix of elements is demanded, but possibly clear
1194e8d8bef9SDimitry Andric     // some other bits if there are trailing zeros (unused components at front)
1195e8d8bef9SDimitry Andric     // and update offset.
1196e8d8bef9SDimitry Andric     DemandedElts = (1 << ActiveBits) - 1;
1197e8d8bef9SDimitry Andric 
1198e8d8bef9SDimitry Andric     if (UnusedComponentsAtFront > 0) {
1199e8d8bef9SDimitry Andric       static const unsigned InvalidOffsetIdx = 0xf;
1200e8d8bef9SDimitry Andric 
1201e8d8bef9SDimitry Andric       unsigned OffsetIdx;
1202e8d8bef9SDimitry Andric       switch (II.getIntrinsicID()) {
1203e8d8bef9SDimitry Andric       case Intrinsic::amdgcn_raw_buffer_load:
120406c3fb27SDimitry Andric       case Intrinsic::amdgcn_raw_ptr_buffer_load:
1205e8d8bef9SDimitry Andric         OffsetIdx = 1;
1206e8d8bef9SDimitry Andric         break;
1207e8d8bef9SDimitry Andric       case Intrinsic::amdgcn_s_buffer_load:
1208e8d8bef9SDimitry Andric         // If resulting type is vec3, there is no point in trimming the
1209e8d8bef9SDimitry Andric         // load with updated offset, as the vec3 would most likely be widened to
1210e8d8bef9SDimitry Andric         // vec4 anyway during lowering.
1211e8d8bef9SDimitry Andric         if (ActiveBits == 4 && UnusedComponentsAtFront == 1)
1212e8d8bef9SDimitry Andric           OffsetIdx = InvalidOffsetIdx;
1213e8d8bef9SDimitry Andric         else
1214e8d8bef9SDimitry Andric           OffsetIdx = 1;
1215e8d8bef9SDimitry Andric         break;
1216e8d8bef9SDimitry Andric       case Intrinsic::amdgcn_struct_buffer_load:
121706c3fb27SDimitry Andric       case Intrinsic::amdgcn_struct_ptr_buffer_load:
1218e8d8bef9SDimitry Andric         OffsetIdx = 2;
1219e8d8bef9SDimitry Andric         break;
1220e8d8bef9SDimitry Andric       default:
1221e8d8bef9SDimitry Andric         // TODO: handle tbuffer* intrinsics.
1222e8d8bef9SDimitry Andric         OffsetIdx = InvalidOffsetIdx;
1223e8d8bef9SDimitry Andric         break;
1224e8d8bef9SDimitry Andric       }
1225e8d8bef9SDimitry Andric 
1226e8d8bef9SDimitry Andric       if (OffsetIdx != InvalidOffsetIdx) {
1227e8d8bef9SDimitry Andric         // Clear demanded bits and update the offset.
1228e8d8bef9SDimitry Andric         DemandedElts &= ~((1 << UnusedComponentsAtFront) - 1);
1229bdd1243dSDimitry Andric         auto *Offset = Args[OffsetIdx];
1230e8d8bef9SDimitry Andric         unsigned SingleComponentSizeInBits =
1231bdd1243dSDimitry Andric             IC.getDataLayout().getTypeSizeInBits(EltTy);
1232e8d8bef9SDimitry Andric         unsigned OffsetAdd =
1233e8d8bef9SDimitry Andric             UnusedComponentsAtFront * SingleComponentSizeInBits / 8;
1234e8d8bef9SDimitry Andric         auto *OffsetAddVal = ConstantInt::get(Offset->getType(), OffsetAdd);
1235e8d8bef9SDimitry Andric         Args[OffsetIdx] = IC.Builder.CreateAdd(Offset, OffsetAddVal);
1236e8d8bef9SDimitry Andric       }
1237e8d8bef9SDimitry Andric     }
1238e8d8bef9SDimitry Andric   } else {
1239e8d8bef9SDimitry Andric     // Image case.
1240e8d8bef9SDimitry Andric 
1241bdd1243dSDimitry Andric     ConstantInt *DMask = cast<ConstantInt>(Args[DMaskIdx]);
1242e8d8bef9SDimitry Andric     unsigned DMaskVal = DMask->getZExtValue() & 0xf;
1243e8d8bef9SDimitry Andric 
1244*cb14a3feSDimitry Andric     // dmask 0 has special semantics, do not simplify.
1245*cb14a3feSDimitry Andric     if (DMaskVal == 0)
1246*cb14a3feSDimitry Andric       return nullptr;
1247*cb14a3feSDimitry Andric 
1248e8d8bef9SDimitry Andric     // Mask off values that are undefined because the dmask doesn't cover them
1249bdd1243dSDimitry Andric     DemandedElts &= (1 << llvm::popcount(DMaskVal)) - 1;
1250e8d8bef9SDimitry Andric 
1251e8d8bef9SDimitry Andric     unsigned NewDMaskVal = 0;
125206c3fb27SDimitry Andric     unsigned OrigLdStIdx = 0;
1253e8d8bef9SDimitry Andric     for (unsigned SrcIdx = 0; SrcIdx < 4; ++SrcIdx) {
1254e8d8bef9SDimitry Andric       const unsigned Bit = 1 << SrcIdx;
1255e8d8bef9SDimitry Andric       if (!!(DMaskVal & Bit)) {
125606c3fb27SDimitry Andric         if (!!DemandedElts[OrigLdStIdx])
1257e8d8bef9SDimitry Andric           NewDMaskVal |= Bit;
125806c3fb27SDimitry Andric         OrigLdStIdx++;
1259e8d8bef9SDimitry Andric       }
1260e8d8bef9SDimitry Andric     }
1261e8d8bef9SDimitry Andric 
1262e8d8bef9SDimitry Andric     if (DMaskVal != NewDMaskVal)
1263e8d8bef9SDimitry Andric       Args[DMaskIdx] = ConstantInt::get(DMask->getType(), NewDMaskVal);
1264e8d8bef9SDimitry Andric   }
1265e8d8bef9SDimitry Andric 
126606c3fb27SDimitry Andric   unsigned NewNumElts = DemandedElts.popcount();
1267e8d8bef9SDimitry Andric   if (!NewNumElts)
1268*cb14a3feSDimitry Andric     return PoisonValue::get(IIVTy);
1269e8d8bef9SDimitry Andric 
1270e8d8bef9SDimitry Andric   if (NewNumElts >= VWidth && DemandedElts.isMask()) {
1271e8d8bef9SDimitry Andric     if (DMaskIdx >= 0)
1272e8d8bef9SDimitry Andric       II.setArgOperand(DMaskIdx, Args[DMaskIdx]);
1273e8d8bef9SDimitry Andric     return nullptr;
1274e8d8bef9SDimitry Andric   }
1275e8d8bef9SDimitry Andric 
1276e8d8bef9SDimitry Andric   // Validate function argument and return types, extracting overloaded types
1277e8d8bef9SDimitry Andric   // along the way.
1278e8d8bef9SDimitry Andric   SmallVector<Type *, 6> OverloadTys;
1279e8d8bef9SDimitry Andric   if (!Intrinsic::getIntrinsicSignature(II.getCalledFunction(), OverloadTys))
1280e8d8bef9SDimitry Andric     return nullptr;
1281e8d8bef9SDimitry Andric 
1282e8d8bef9SDimitry Andric   Type *NewTy =
1283e8d8bef9SDimitry Andric       (NewNumElts == 1) ? EltTy : FixedVectorType::get(EltTy, NewNumElts);
1284e8d8bef9SDimitry Andric   OverloadTys[0] = NewTy;
1285e8d8bef9SDimitry Andric 
128606c3fb27SDimitry Andric   if (!IsLoad) {
128706c3fb27SDimitry Andric     SmallVector<int, 8> EltMask;
128806c3fb27SDimitry Andric     for (unsigned OrigStoreIdx = 0; OrigStoreIdx < VWidth; ++OrigStoreIdx)
128906c3fb27SDimitry Andric       if (DemandedElts[OrigStoreIdx])
129006c3fb27SDimitry Andric         EltMask.push_back(OrigStoreIdx);
129106c3fb27SDimitry Andric 
129206c3fb27SDimitry Andric     if (NewNumElts == 1)
129306c3fb27SDimitry Andric       Args[0] = IC.Builder.CreateExtractElement(II.getOperand(0), EltMask[0]);
129406c3fb27SDimitry Andric     else
129506c3fb27SDimitry Andric       Args[0] = IC.Builder.CreateShuffleVector(II.getOperand(0), EltMask);
129606c3fb27SDimitry Andric   }
129706c3fb27SDimitry Andric 
1298bdd1243dSDimitry Andric   Function *NewIntrin = Intrinsic::getDeclaration(
1299bdd1243dSDimitry Andric       II.getModule(), II.getIntrinsicID(), OverloadTys);
1300e8d8bef9SDimitry Andric   CallInst *NewCall = IC.Builder.CreateCall(NewIntrin, Args);
1301e8d8bef9SDimitry Andric   NewCall->takeName(&II);
1302e8d8bef9SDimitry Andric   NewCall->copyMetadata(II);
1303e8d8bef9SDimitry Andric 
130406c3fb27SDimitry Andric   if (IsLoad) {
1305e8d8bef9SDimitry Andric     if (NewNumElts == 1) {
1306*cb14a3feSDimitry Andric       return IC.Builder.CreateInsertElement(PoisonValue::get(IIVTy), NewCall,
130706c3fb27SDimitry Andric                                             DemandedElts.countr_zero());
1308e8d8bef9SDimitry Andric     }
1309e8d8bef9SDimitry Andric 
1310e8d8bef9SDimitry Andric     SmallVector<int, 8> EltMask;
1311e8d8bef9SDimitry Andric     unsigned NewLoadIdx = 0;
1312e8d8bef9SDimitry Andric     for (unsigned OrigLoadIdx = 0; OrigLoadIdx < VWidth; ++OrigLoadIdx) {
1313e8d8bef9SDimitry Andric       if (!!DemandedElts[OrigLoadIdx])
1314e8d8bef9SDimitry Andric         EltMask.push_back(NewLoadIdx++);
1315e8d8bef9SDimitry Andric       else
1316e8d8bef9SDimitry Andric         EltMask.push_back(NewNumElts);
1317e8d8bef9SDimitry Andric     }
1318e8d8bef9SDimitry Andric 
131906c3fb27SDimitry Andric     auto *Shuffle = IC.Builder.CreateShuffleVector(NewCall, EltMask);
1320e8d8bef9SDimitry Andric 
1321e8d8bef9SDimitry Andric     return Shuffle;
1322e8d8bef9SDimitry Andric   }
1323e8d8bef9SDimitry Andric 
132406c3fb27SDimitry Andric   return NewCall;
132506c3fb27SDimitry Andric }
132606c3fb27SDimitry Andric 
1327bdd1243dSDimitry Andric std::optional<Value *> GCNTTIImpl::simplifyDemandedVectorEltsIntrinsic(
1328e8d8bef9SDimitry Andric     InstCombiner &IC, IntrinsicInst &II, APInt DemandedElts, APInt &UndefElts,
1329e8d8bef9SDimitry Andric     APInt &UndefElts2, APInt &UndefElts3,
1330e8d8bef9SDimitry Andric     std::function<void(Instruction *, unsigned, APInt, APInt &)>
1331e8d8bef9SDimitry Andric         SimplifyAndSetOp) const {
1332e8d8bef9SDimitry Andric   switch (II.getIntrinsicID()) {
1333e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_buffer_load:
1334e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_buffer_load_format:
1335e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_raw_buffer_load:
133606c3fb27SDimitry Andric   case Intrinsic::amdgcn_raw_ptr_buffer_load:
1337e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_raw_buffer_load_format:
133806c3fb27SDimitry Andric   case Intrinsic::amdgcn_raw_ptr_buffer_load_format:
1339e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_raw_tbuffer_load:
134006c3fb27SDimitry Andric   case Intrinsic::amdgcn_raw_ptr_tbuffer_load:
1341e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_s_buffer_load:
1342e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_struct_buffer_load:
134306c3fb27SDimitry Andric   case Intrinsic::amdgcn_struct_ptr_buffer_load:
1344e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_struct_buffer_load_format:
134506c3fb27SDimitry Andric   case Intrinsic::amdgcn_struct_ptr_buffer_load_format:
1346e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_struct_tbuffer_load:
134706c3fb27SDimitry Andric   case Intrinsic::amdgcn_struct_ptr_tbuffer_load:
1348e8d8bef9SDimitry Andric   case Intrinsic::amdgcn_tbuffer_load:
1349e8d8bef9SDimitry Andric     return simplifyAMDGCNMemoryIntrinsicDemanded(IC, II, DemandedElts);
1350e8d8bef9SDimitry Andric   default: {
1351e8d8bef9SDimitry Andric     if (getAMDGPUImageDMaskIntrinsic(II.getIntrinsicID())) {
1352e8d8bef9SDimitry Andric       return simplifyAMDGCNMemoryIntrinsicDemanded(IC, II, DemandedElts, 0);
1353e8d8bef9SDimitry Andric     }
1354e8d8bef9SDimitry Andric     break;
1355e8d8bef9SDimitry Andric   }
1356e8d8bef9SDimitry Andric   }
1357bdd1243dSDimitry Andric   return std::nullopt;
1358e8d8bef9SDimitry Andric }
1359