1 //===-- Intrinsics.cpp - Intrinsic Function Handling ------------*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file implements functions required for supporting intrinsic functions. 10 // 11 //===----------------------------------------------------------------------===// 12 13 #include "llvm/IR/Intrinsics.h" 14 #include "llvm/ADT/StringExtras.h" 15 #include "llvm/IR/Function.h" 16 #include "llvm/IR/IntrinsicsAArch64.h" 17 #include "llvm/IR/IntrinsicsAMDGPU.h" 18 #include "llvm/IR/IntrinsicsARM.h" 19 #include "llvm/IR/IntrinsicsBPF.h" 20 #include "llvm/IR/IntrinsicsHexagon.h" 21 #include "llvm/IR/IntrinsicsLoongArch.h" 22 #include "llvm/IR/IntrinsicsMips.h" 23 #include "llvm/IR/IntrinsicsNVPTX.h" 24 #include "llvm/IR/IntrinsicsPowerPC.h" 25 #include "llvm/IR/IntrinsicsR600.h" 26 #include "llvm/IR/IntrinsicsRISCV.h" 27 #include "llvm/IR/IntrinsicsS390.h" 28 #include "llvm/IR/IntrinsicsVE.h" 29 #include "llvm/IR/IntrinsicsX86.h" 30 #include "llvm/IR/IntrinsicsXCore.h" 31 #include "llvm/IR/Module.h" 32 #include "llvm/IR/Type.h" 33 34 using namespace llvm; 35 36 /// Table of string intrinsic names indexed by enum value. 37 static constexpr const char *const IntrinsicNameTable[] = { 38 "not_intrinsic", 39 #define GET_INTRINSIC_NAME_TABLE 40 #include "llvm/IR/IntrinsicImpl.inc" 41 #undef GET_INTRINSIC_NAME_TABLE 42 }; 43 44 StringRef Intrinsic::getBaseName(ID id) { 45 assert(id < num_intrinsics && "Invalid intrinsic ID!"); 46 return IntrinsicNameTable[id]; 47 } 48 49 StringRef Intrinsic::getName(ID id) { 50 assert(id < num_intrinsics && "Invalid intrinsic ID!"); 51 assert(!Intrinsic::isOverloaded(id) && 52 "This version of getName does not support overloading"); 53 return getBaseName(id); 54 } 55 56 /// Returns a stable mangling for the type specified for use in the name 57 /// mangling scheme used by 'any' types in intrinsic signatures. The mangling 58 /// of named types is simply their name. Manglings for unnamed types consist 59 /// of a prefix ('p' for pointers, 'a' for arrays, 'f_' for functions) 60 /// combined with the mangling of their component types. A vararg function 61 /// type will have a suffix of 'vararg'. Since function types can contain 62 /// other function types, we close a function type mangling with suffix 'f' 63 /// which can't be confused with it's prefix. This ensures we don't have 64 /// collisions between two unrelated function types. Otherwise, you might 65 /// parse ffXX as f(fXX) or f(fX)X. (X is a placeholder for any other type.) 66 /// The HasUnnamedType boolean is set if an unnamed type was encountered, 67 /// indicating that extra care must be taken to ensure a unique name. 68 static std::string getMangledTypeStr(Type *Ty, bool &HasUnnamedType) { 69 std::string Result; 70 if (PointerType *PTyp = dyn_cast<PointerType>(Ty)) { 71 Result += "p" + utostr(PTyp->getAddressSpace()); 72 } else if (ArrayType *ATyp = dyn_cast<ArrayType>(Ty)) { 73 Result += "a" + utostr(ATyp->getNumElements()) + 74 getMangledTypeStr(ATyp->getElementType(), HasUnnamedType); 75 } else if (StructType *STyp = dyn_cast<StructType>(Ty)) { 76 if (!STyp->isLiteral()) { 77 Result += "s_"; 78 if (STyp->hasName()) 79 Result += STyp->getName(); 80 else 81 HasUnnamedType = true; 82 } else { 83 Result += "sl_"; 84 for (auto *Elem : STyp->elements()) 85 Result += getMangledTypeStr(Elem, HasUnnamedType); 86 } 87 // Ensure nested structs are distinguishable. 88 Result += "s"; 89 } else if (FunctionType *FT = dyn_cast<FunctionType>(Ty)) { 90 Result += "f_" + getMangledTypeStr(FT->getReturnType(), HasUnnamedType); 91 for (size_t i = 0; i < FT->getNumParams(); i++) 92 Result += getMangledTypeStr(FT->getParamType(i), HasUnnamedType); 93 if (FT->isVarArg()) 94 Result += "vararg"; 95 // Ensure nested function types are distinguishable. 96 Result += "f"; 97 } else if (VectorType *VTy = dyn_cast<VectorType>(Ty)) { 98 ElementCount EC = VTy->getElementCount(); 99 if (EC.isScalable()) 100 Result += "nx"; 101 Result += "v" + utostr(EC.getKnownMinValue()) + 102 getMangledTypeStr(VTy->getElementType(), HasUnnamedType); 103 } else if (TargetExtType *TETy = dyn_cast<TargetExtType>(Ty)) { 104 Result += "t"; 105 Result += TETy->getName(); 106 for (Type *ParamTy : TETy->type_params()) 107 Result += "_" + getMangledTypeStr(ParamTy, HasUnnamedType); 108 for (unsigned IntParam : TETy->int_params()) 109 Result += "_" + utostr(IntParam); 110 // Ensure nested target extension types are distinguishable. 111 Result += "t"; 112 } else if (Ty) { 113 switch (Ty->getTypeID()) { 114 default: 115 llvm_unreachable("Unhandled type"); 116 case Type::VoidTyID: 117 Result += "isVoid"; 118 break; 119 case Type::MetadataTyID: 120 Result += "Metadata"; 121 break; 122 case Type::HalfTyID: 123 Result += "f16"; 124 break; 125 case Type::BFloatTyID: 126 Result += "bf16"; 127 break; 128 case Type::FloatTyID: 129 Result += "f32"; 130 break; 131 case Type::DoubleTyID: 132 Result += "f64"; 133 break; 134 case Type::X86_FP80TyID: 135 Result += "f80"; 136 break; 137 case Type::FP128TyID: 138 Result += "f128"; 139 break; 140 case Type::PPC_FP128TyID: 141 Result += "ppcf128"; 142 break; 143 case Type::X86_AMXTyID: 144 Result += "x86amx"; 145 break; 146 case Type::IntegerTyID: 147 Result += "i" + utostr(cast<IntegerType>(Ty)->getBitWidth()); 148 break; 149 } 150 } 151 return Result; 152 } 153 154 static std::string getIntrinsicNameImpl(Intrinsic::ID Id, ArrayRef<Type *> Tys, 155 Module *M, FunctionType *FT, 156 bool EarlyModuleCheck) { 157 158 assert(Id < Intrinsic::num_intrinsics && "Invalid intrinsic ID!"); 159 assert((Tys.empty() || Intrinsic::isOverloaded(Id)) && 160 "This version of getName is for overloaded intrinsics only"); 161 (void)EarlyModuleCheck; 162 assert((!EarlyModuleCheck || M || 163 !any_of(Tys, [](Type *T) { return isa<PointerType>(T); })) && 164 "Intrinsic overloading on pointer types need to provide a Module"); 165 bool HasUnnamedType = false; 166 std::string Result(Intrinsic::getBaseName(Id)); 167 for (Type *Ty : Tys) 168 Result += "." + getMangledTypeStr(Ty, HasUnnamedType); 169 if (HasUnnamedType) { 170 assert(M && "unnamed types need a module"); 171 if (!FT) 172 FT = Intrinsic::getType(M->getContext(), Id, Tys); 173 else 174 assert((FT == Intrinsic::getType(M->getContext(), Id, Tys)) && 175 "Provided FunctionType must match arguments"); 176 return M->getUniqueIntrinsicName(Result, Id, FT); 177 } 178 return Result; 179 } 180 181 std::string Intrinsic::getName(ID Id, ArrayRef<Type *> Tys, Module *M, 182 FunctionType *FT) { 183 assert(M && "We need to have a Module"); 184 return getIntrinsicNameImpl(Id, Tys, M, FT, true); 185 } 186 187 std::string Intrinsic::getNameNoUnnamedTypes(ID Id, ArrayRef<Type *> Tys) { 188 return getIntrinsicNameImpl(Id, Tys, nullptr, nullptr, false); 189 } 190 191 /// IIT_Info - These are enumerators that describe the entries returned by the 192 /// getIntrinsicInfoTableEntries function. 193 /// 194 /// Defined in Intrinsics.td. 195 enum IIT_Info { 196 #define GET_INTRINSIC_IITINFO 197 #include "llvm/IR/IntrinsicImpl.inc" 198 #undef GET_INTRINSIC_IITINFO 199 }; 200 201 static void 202 DecodeIITType(unsigned &NextElt, ArrayRef<unsigned char> Infos, 203 IIT_Info LastInfo, 204 SmallVectorImpl<Intrinsic::IITDescriptor> &OutputTable) { 205 using namespace Intrinsic; 206 207 bool IsScalableVector = (LastInfo == IIT_SCALABLE_VEC); 208 209 IIT_Info Info = IIT_Info(Infos[NextElt++]); 210 unsigned StructElts = 2; 211 212 switch (Info) { 213 case IIT_Done: 214 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Void, 0)); 215 return; 216 case IIT_VARARG: 217 OutputTable.push_back(IITDescriptor::get(IITDescriptor::VarArg, 0)); 218 return; 219 case IIT_MMX: 220 OutputTable.push_back(IITDescriptor::get(IITDescriptor::MMX, 0)); 221 return; 222 case IIT_AMX: 223 OutputTable.push_back(IITDescriptor::get(IITDescriptor::AMX, 0)); 224 return; 225 case IIT_TOKEN: 226 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Token, 0)); 227 return; 228 case IIT_METADATA: 229 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Metadata, 0)); 230 return; 231 case IIT_F16: 232 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Half, 0)); 233 return; 234 case IIT_BF16: 235 OutputTable.push_back(IITDescriptor::get(IITDescriptor::BFloat, 0)); 236 return; 237 case IIT_F32: 238 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Float, 0)); 239 return; 240 case IIT_F64: 241 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Double, 0)); 242 return; 243 case IIT_F128: 244 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Quad, 0)); 245 return; 246 case IIT_PPCF128: 247 OutputTable.push_back(IITDescriptor::get(IITDescriptor::PPCQuad, 0)); 248 return; 249 case IIT_I1: 250 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 1)); 251 return; 252 case IIT_I2: 253 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 2)); 254 return; 255 case IIT_I4: 256 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 4)); 257 return; 258 case IIT_AARCH64_SVCOUNT: 259 OutputTable.push_back(IITDescriptor::get(IITDescriptor::AArch64Svcount, 0)); 260 return; 261 case IIT_I8: 262 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 8)); 263 return; 264 case IIT_I16: 265 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 16)); 266 return; 267 case IIT_I32: 268 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 32)); 269 return; 270 case IIT_I64: 271 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 64)); 272 return; 273 case IIT_I128: 274 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 128)); 275 return; 276 case IIT_V1: 277 OutputTable.push_back(IITDescriptor::getVector(1, IsScalableVector)); 278 DecodeIITType(NextElt, Infos, Info, OutputTable); 279 return; 280 case IIT_V2: 281 OutputTable.push_back(IITDescriptor::getVector(2, IsScalableVector)); 282 DecodeIITType(NextElt, Infos, Info, OutputTable); 283 return; 284 case IIT_V3: 285 OutputTable.push_back(IITDescriptor::getVector(3, IsScalableVector)); 286 DecodeIITType(NextElt, Infos, Info, OutputTable); 287 return; 288 case IIT_V4: 289 OutputTable.push_back(IITDescriptor::getVector(4, IsScalableVector)); 290 DecodeIITType(NextElt, Infos, Info, OutputTable); 291 return; 292 case IIT_V6: 293 OutputTable.push_back(IITDescriptor::getVector(6, IsScalableVector)); 294 DecodeIITType(NextElt, Infos, Info, OutputTable); 295 return; 296 case IIT_V8: 297 OutputTable.push_back(IITDescriptor::getVector(8, IsScalableVector)); 298 DecodeIITType(NextElt, Infos, Info, OutputTable); 299 return; 300 case IIT_V10: 301 OutputTable.push_back(IITDescriptor::getVector(10, IsScalableVector)); 302 DecodeIITType(NextElt, Infos, Info, OutputTable); 303 return; 304 case IIT_V16: 305 OutputTable.push_back(IITDescriptor::getVector(16, IsScalableVector)); 306 DecodeIITType(NextElt, Infos, Info, OutputTable); 307 return; 308 case IIT_V32: 309 OutputTable.push_back(IITDescriptor::getVector(32, IsScalableVector)); 310 DecodeIITType(NextElt, Infos, Info, OutputTable); 311 return; 312 case IIT_V64: 313 OutputTable.push_back(IITDescriptor::getVector(64, IsScalableVector)); 314 DecodeIITType(NextElt, Infos, Info, OutputTable); 315 return; 316 case IIT_V128: 317 OutputTable.push_back(IITDescriptor::getVector(128, IsScalableVector)); 318 DecodeIITType(NextElt, Infos, Info, OutputTable); 319 return; 320 case IIT_V256: 321 OutputTable.push_back(IITDescriptor::getVector(256, IsScalableVector)); 322 DecodeIITType(NextElt, Infos, Info, OutputTable); 323 return; 324 case IIT_V512: 325 OutputTable.push_back(IITDescriptor::getVector(512, IsScalableVector)); 326 DecodeIITType(NextElt, Infos, Info, OutputTable); 327 return; 328 case IIT_V1024: 329 OutputTable.push_back(IITDescriptor::getVector(1024, IsScalableVector)); 330 DecodeIITType(NextElt, Infos, Info, OutputTable); 331 return; 332 case IIT_EXTERNREF: 333 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Pointer, 10)); 334 return; 335 case IIT_FUNCREF: 336 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Pointer, 20)); 337 return; 338 case IIT_PTR: 339 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Pointer, 0)); 340 return; 341 case IIT_ANYPTR: // [ANYPTR addrspace] 342 OutputTable.push_back( 343 IITDescriptor::get(IITDescriptor::Pointer, Infos[NextElt++])); 344 return; 345 case IIT_ARG: { 346 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 347 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Argument, ArgInfo)); 348 return; 349 } 350 case IIT_EXTEND_ARG: { 351 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 352 OutputTable.push_back( 353 IITDescriptor::get(IITDescriptor::ExtendArgument, ArgInfo)); 354 return; 355 } 356 case IIT_TRUNC_ARG: { 357 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 358 OutputTable.push_back( 359 IITDescriptor::get(IITDescriptor::TruncArgument, ArgInfo)); 360 return; 361 } 362 case IIT_HALF_VEC_ARG: { 363 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 364 OutputTable.push_back( 365 IITDescriptor::get(IITDescriptor::HalfVecArgument, ArgInfo)); 366 return; 367 } 368 case IIT_SAME_VEC_WIDTH_ARG: { 369 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 370 OutputTable.push_back( 371 IITDescriptor::get(IITDescriptor::SameVecWidthArgument, ArgInfo)); 372 return; 373 } 374 case IIT_VEC_OF_ANYPTRS_TO_ELT: { 375 unsigned short ArgNo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 376 unsigned short RefNo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 377 OutputTable.push_back( 378 IITDescriptor::get(IITDescriptor::VecOfAnyPtrsToElt, ArgNo, RefNo)); 379 return; 380 } 381 case IIT_EMPTYSTRUCT: 382 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Struct, 0)); 383 return; 384 case IIT_STRUCT9: 385 ++StructElts; 386 [[fallthrough]]; 387 case IIT_STRUCT8: 388 ++StructElts; 389 [[fallthrough]]; 390 case IIT_STRUCT7: 391 ++StructElts; 392 [[fallthrough]]; 393 case IIT_STRUCT6: 394 ++StructElts; 395 [[fallthrough]]; 396 case IIT_STRUCT5: 397 ++StructElts; 398 [[fallthrough]]; 399 case IIT_STRUCT4: 400 ++StructElts; 401 [[fallthrough]]; 402 case IIT_STRUCT3: 403 ++StructElts; 404 [[fallthrough]]; 405 case IIT_STRUCT2: { 406 OutputTable.push_back( 407 IITDescriptor::get(IITDescriptor::Struct, StructElts)); 408 409 for (unsigned i = 0; i != StructElts; ++i) 410 DecodeIITType(NextElt, Infos, Info, OutputTable); 411 return; 412 } 413 case IIT_SUBDIVIDE2_ARG: { 414 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 415 OutputTable.push_back( 416 IITDescriptor::get(IITDescriptor::Subdivide2Argument, ArgInfo)); 417 return; 418 } 419 case IIT_SUBDIVIDE4_ARG: { 420 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 421 OutputTable.push_back( 422 IITDescriptor::get(IITDescriptor::Subdivide4Argument, ArgInfo)); 423 return; 424 } 425 case IIT_VEC_ELEMENT: { 426 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 427 OutputTable.push_back( 428 IITDescriptor::get(IITDescriptor::VecElementArgument, ArgInfo)); 429 return; 430 } 431 case IIT_SCALABLE_VEC: { 432 DecodeIITType(NextElt, Infos, Info, OutputTable); 433 return; 434 } 435 case IIT_VEC_OF_BITCASTS_TO_INT: { 436 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 437 OutputTable.push_back( 438 IITDescriptor::get(IITDescriptor::VecOfBitcastsToInt, ArgInfo)); 439 return; 440 } 441 } 442 llvm_unreachable("unhandled"); 443 } 444 445 #define GET_INTRINSIC_GENERATOR_GLOBAL 446 #include "llvm/IR/IntrinsicImpl.inc" 447 #undef GET_INTRINSIC_GENERATOR_GLOBAL 448 449 void Intrinsic::getIntrinsicInfoTableEntries( 450 ID id, SmallVectorImpl<IITDescriptor> &T) { 451 static_assert(sizeof(IIT_Table[0]) == 2, 452 "Expect 16-bit entries in IIT_Table"); 453 // Check to see if the intrinsic's type was expressible by the table. 454 uint16_t TableVal = IIT_Table[id - 1]; 455 456 // Decode the TableVal into an array of IITValues. 457 SmallVector<unsigned char> IITValues; 458 ArrayRef<unsigned char> IITEntries; 459 unsigned NextElt = 0; 460 if (TableVal >> 15) { 461 // This is an offset into the IIT_LongEncodingTable. 462 IITEntries = IIT_LongEncodingTable; 463 464 // Strip sentinel bit. 465 NextElt = TableVal & 0x7fff; 466 } else { 467 // If the entry was encoded into a single word in the table itself, decode 468 // it from an array of nibbles to an array of bytes. 469 do { 470 IITValues.push_back(TableVal & 0xF); 471 TableVal >>= 4; 472 } while (TableVal); 473 474 IITEntries = IITValues; 475 NextElt = 0; 476 } 477 478 // Okay, decode the table into the output vector of IITDescriptors. 479 DecodeIITType(NextElt, IITEntries, IIT_Done, T); 480 while (NextElt != IITEntries.size() && IITEntries[NextElt] != 0) 481 DecodeIITType(NextElt, IITEntries, IIT_Done, T); 482 } 483 484 static Type *DecodeFixedType(ArrayRef<Intrinsic::IITDescriptor> &Infos, 485 ArrayRef<Type *> Tys, LLVMContext &Context) { 486 using namespace Intrinsic; 487 488 IITDescriptor D = Infos.front(); 489 Infos = Infos.slice(1); 490 491 switch (D.Kind) { 492 case IITDescriptor::Void: 493 return Type::getVoidTy(Context); 494 case IITDescriptor::VarArg: 495 return Type::getVoidTy(Context); 496 case IITDescriptor::MMX: 497 return llvm::FixedVectorType::get(llvm::IntegerType::get(Context, 64), 1); 498 case IITDescriptor::AMX: 499 return Type::getX86_AMXTy(Context); 500 case IITDescriptor::Token: 501 return Type::getTokenTy(Context); 502 case IITDescriptor::Metadata: 503 return Type::getMetadataTy(Context); 504 case IITDescriptor::Half: 505 return Type::getHalfTy(Context); 506 case IITDescriptor::BFloat: 507 return Type::getBFloatTy(Context); 508 case IITDescriptor::Float: 509 return Type::getFloatTy(Context); 510 case IITDescriptor::Double: 511 return Type::getDoubleTy(Context); 512 case IITDescriptor::Quad: 513 return Type::getFP128Ty(Context); 514 case IITDescriptor::PPCQuad: 515 return Type::getPPC_FP128Ty(Context); 516 case IITDescriptor::AArch64Svcount: 517 return TargetExtType::get(Context, "aarch64.svcount"); 518 519 case IITDescriptor::Integer: 520 return IntegerType::get(Context, D.Integer_Width); 521 case IITDescriptor::Vector: 522 return VectorType::get(DecodeFixedType(Infos, Tys, Context), 523 D.Vector_Width); 524 case IITDescriptor::Pointer: 525 return PointerType::get(Context, D.Pointer_AddressSpace); 526 case IITDescriptor::Struct: { 527 SmallVector<Type *, 8> Elts; 528 for (unsigned i = 0, e = D.Struct_NumElements; i != e; ++i) 529 Elts.push_back(DecodeFixedType(Infos, Tys, Context)); 530 return StructType::get(Context, Elts); 531 } 532 case IITDescriptor::Argument: 533 return Tys[D.getArgumentNumber()]; 534 case IITDescriptor::ExtendArgument: { 535 Type *Ty = Tys[D.getArgumentNumber()]; 536 if (VectorType *VTy = dyn_cast<VectorType>(Ty)) 537 return VectorType::getExtendedElementVectorType(VTy); 538 539 return IntegerType::get(Context, 2 * cast<IntegerType>(Ty)->getBitWidth()); 540 } 541 case IITDescriptor::TruncArgument: { 542 Type *Ty = Tys[D.getArgumentNumber()]; 543 if (VectorType *VTy = dyn_cast<VectorType>(Ty)) 544 return VectorType::getTruncatedElementVectorType(VTy); 545 546 IntegerType *ITy = cast<IntegerType>(Ty); 547 assert(ITy->getBitWidth() % 2 == 0); 548 return IntegerType::get(Context, ITy->getBitWidth() / 2); 549 } 550 case IITDescriptor::Subdivide2Argument: 551 case IITDescriptor::Subdivide4Argument: { 552 Type *Ty = Tys[D.getArgumentNumber()]; 553 VectorType *VTy = dyn_cast<VectorType>(Ty); 554 assert(VTy && "Expected an argument of Vector Type"); 555 int SubDivs = D.Kind == IITDescriptor::Subdivide2Argument ? 1 : 2; 556 return VectorType::getSubdividedVectorType(VTy, SubDivs); 557 } 558 case IITDescriptor::HalfVecArgument: 559 return VectorType::getHalfElementsVectorType( 560 cast<VectorType>(Tys[D.getArgumentNumber()])); 561 case IITDescriptor::SameVecWidthArgument: { 562 Type *EltTy = DecodeFixedType(Infos, Tys, Context); 563 Type *Ty = Tys[D.getArgumentNumber()]; 564 if (auto *VTy = dyn_cast<VectorType>(Ty)) 565 return VectorType::get(EltTy, VTy->getElementCount()); 566 return EltTy; 567 } 568 case IITDescriptor::VecElementArgument: { 569 Type *Ty = Tys[D.getArgumentNumber()]; 570 if (VectorType *VTy = dyn_cast<VectorType>(Ty)) 571 return VTy->getElementType(); 572 llvm_unreachable("Expected an argument of Vector Type"); 573 } 574 case IITDescriptor::VecOfBitcastsToInt: { 575 Type *Ty = Tys[D.getArgumentNumber()]; 576 VectorType *VTy = dyn_cast<VectorType>(Ty); 577 assert(VTy && "Expected an argument of Vector Type"); 578 return VectorType::getInteger(VTy); 579 } 580 case IITDescriptor::VecOfAnyPtrsToElt: 581 // Return the overloaded type (which determines the pointers address space) 582 return Tys[D.getOverloadArgNumber()]; 583 } 584 llvm_unreachable("unhandled"); 585 } 586 587 FunctionType *Intrinsic::getType(LLVMContext &Context, ID id, 588 ArrayRef<Type *> Tys) { 589 SmallVector<IITDescriptor, 8> Table; 590 getIntrinsicInfoTableEntries(id, Table); 591 592 ArrayRef<IITDescriptor> TableRef = Table; 593 Type *ResultTy = DecodeFixedType(TableRef, Tys, Context); 594 595 SmallVector<Type *, 8> ArgTys; 596 while (!TableRef.empty()) 597 ArgTys.push_back(DecodeFixedType(TableRef, Tys, Context)); 598 599 // DecodeFixedType returns Void for IITDescriptor::Void and 600 // IITDescriptor::VarArg If we see void type as the type of the last argument, 601 // it is vararg intrinsic 602 if (!ArgTys.empty() && ArgTys.back()->isVoidTy()) { 603 ArgTys.pop_back(); 604 return FunctionType::get(ResultTy, ArgTys, true); 605 } 606 return FunctionType::get(ResultTy, ArgTys, false); 607 } 608 609 bool Intrinsic::isOverloaded(ID id) { 610 #define GET_INTRINSIC_OVERLOAD_TABLE 611 #include "llvm/IR/IntrinsicImpl.inc" 612 #undef GET_INTRINSIC_OVERLOAD_TABLE 613 } 614 615 /// Table of per-target intrinsic name tables. 616 #define GET_INTRINSIC_TARGET_DATA 617 #include "llvm/IR/IntrinsicImpl.inc" 618 #undef GET_INTRINSIC_TARGET_DATA 619 620 bool Intrinsic::isTargetIntrinsic(Intrinsic::ID IID) { 621 return IID > TargetInfos[0].Count; 622 } 623 624 int llvm::Intrinsic::lookupLLVMIntrinsicByName(ArrayRef<const char *> NameTable, 625 StringRef Name, 626 StringRef Target) { 627 assert(Name.starts_with("llvm.") && "Unexpected intrinsic prefix"); 628 assert(Name.drop_front(5).starts_with(Target) && "Unexpected target"); 629 630 // Do successive binary searches of the dotted name components. For 631 // "llvm.gc.experimental.statepoint.p1i8.p1i32", we will find the range of 632 // intrinsics starting with "llvm.gc", then "llvm.gc.experimental", then 633 // "llvm.gc.experimental.statepoint", and then we will stop as the range is 634 // size 1. During the search, we can skip the prefix that we already know is 635 // identical. By using strncmp we consider names with differing suffixes to 636 // be part of the equal range. 637 size_t CmpEnd = 4; // Skip the "llvm" component. 638 if (!Target.empty()) 639 CmpEnd += 1 + Target.size(); // skip the .target component. 640 641 const char *const *Low = NameTable.begin(); 642 const char *const *High = NameTable.end(); 643 const char *const *LastLow = Low; 644 while (CmpEnd < Name.size() && High - Low > 0) { 645 size_t CmpStart = CmpEnd; 646 CmpEnd = Name.find('.', CmpStart + 1); 647 CmpEnd = CmpEnd == StringRef::npos ? Name.size() : CmpEnd; 648 auto Cmp = [CmpStart, CmpEnd](const char *LHS, const char *RHS) { 649 return strncmp(LHS + CmpStart, RHS + CmpStart, CmpEnd - CmpStart) < 0; 650 }; 651 LastLow = Low; 652 std::tie(Low, High) = std::equal_range(Low, High, Name.data(), Cmp); 653 } 654 if (High - Low > 0) 655 LastLow = Low; 656 657 if (LastLow == NameTable.end()) 658 return -1; 659 StringRef NameFound = *LastLow; 660 if (Name == NameFound || 661 (Name.starts_with(NameFound) && Name[NameFound.size()] == '.')) 662 return LastLow - NameTable.begin(); 663 return -1; 664 } 665 666 /// Find the segment of \c IntrinsicNameTable for intrinsics with the same 667 /// target as \c Name, or the generic table if \c Name is not target specific. 668 /// 669 /// Returns the relevant slice of \c IntrinsicNameTable and the target name. 670 static std::pair<ArrayRef<const char *>, StringRef> 671 findTargetSubtable(StringRef Name) { 672 assert(Name.starts_with("llvm.")); 673 674 ArrayRef<IntrinsicTargetInfo> Targets(TargetInfos); 675 // Drop "llvm." and take the first dotted component. That will be the target 676 // if this is target specific. 677 StringRef Target = Name.drop_front(5).split('.').first; 678 auto It = partition_point( 679 Targets, [=](const IntrinsicTargetInfo &TI) { return TI.Name < Target; }); 680 // We've either found the target or just fall back to the generic set, which 681 // is always first. 682 const auto &TI = It != Targets.end() && It->Name == Target ? *It : Targets[0]; 683 return {ArrayRef(&IntrinsicNameTable[1] + TI.Offset, TI.Count), TI.Name}; 684 } 685 686 /// This does the actual lookup of an intrinsic ID which matches the given 687 /// function name. 688 Intrinsic::ID Intrinsic::lookupIntrinsicID(StringRef Name) { 689 auto [NameTable, Target] = findTargetSubtable(Name); 690 int Idx = Intrinsic::lookupLLVMIntrinsicByName(NameTable, Name, Target); 691 if (Idx == -1) 692 return Intrinsic::not_intrinsic; 693 694 // Intrinsic IDs correspond to the location in IntrinsicNameTable, but we have 695 // an index into a sub-table. 696 int Adjust = NameTable.data() - IntrinsicNameTable; 697 Intrinsic::ID ID = static_cast<Intrinsic::ID>(Idx + Adjust); 698 699 // If the intrinsic is not overloaded, require an exact match. If it is 700 // overloaded, require either exact or prefix match. 701 const auto MatchSize = strlen(NameTable[Idx]); 702 assert(Name.size() >= MatchSize && "Expected either exact or prefix match"); 703 bool IsExactMatch = Name.size() == MatchSize; 704 return IsExactMatch || Intrinsic::isOverloaded(ID) ? ID 705 : Intrinsic::not_intrinsic; 706 } 707 708 /// This defines the "Intrinsic::getAttributes(ID id)" method. 709 #define GET_INTRINSIC_ATTRIBUTES 710 #include "llvm/IR/IntrinsicImpl.inc" 711 #undef GET_INTRINSIC_ATTRIBUTES 712 713 Function *Intrinsic::getOrInsertDeclaration(Module *M, ID id, 714 ArrayRef<Type *> Tys) { 715 // There can never be multiple globals with the same name of different types, 716 // because intrinsics must be a specific type. 717 auto *FT = getType(M->getContext(), id, Tys); 718 return cast<Function>( 719 M->getOrInsertFunction( 720 Tys.empty() ? getName(id) : getName(id, Tys, M, FT), FT) 721 .getCallee()); 722 } 723 724 Function *Intrinsic::getDeclarationIfExists(const Module *M, ID id) { 725 return M->getFunction(getName(id)); 726 } 727 728 Function *Intrinsic::getDeclarationIfExists(Module *M, ID id, 729 ArrayRef<Type *> Tys, 730 FunctionType *FT) { 731 return M->getFunction(getName(id, Tys, M, FT)); 732 } 733 734 // This defines the "Intrinsic::getIntrinsicForClangBuiltin()" method. 735 #define GET_LLVM_INTRINSIC_FOR_CLANG_BUILTIN 736 #include "llvm/IR/IntrinsicImpl.inc" 737 #undef GET_LLVM_INTRINSIC_FOR_CLANG_BUILTIN 738 739 // This defines the "Intrinsic::getIntrinsicForMSBuiltin()" method. 740 #define GET_LLVM_INTRINSIC_FOR_MS_BUILTIN 741 #include "llvm/IR/IntrinsicImpl.inc" 742 #undef GET_LLVM_INTRINSIC_FOR_MS_BUILTIN 743 744 bool Intrinsic::isConstrainedFPIntrinsic(ID QID) { 745 switch (QID) { 746 #define INSTRUCTION(NAME, NARG, ROUND_MODE, INTRINSIC) \ 747 case Intrinsic::INTRINSIC: 748 #include "llvm/IR/ConstrainedOps.def" 749 #undef INSTRUCTION 750 return true; 751 default: 752 return false; 753 } 754 } 755 756 bool Intrinsic::hasConstrainedFPRoundingModeOperand(Intrinsic::ID QID) { 757 switch (QID) { 758 #define INSTRUCTION(NAME, NARG, ROUND_MODE, INTRINSIC) \ 759 case Intrinsic::INTRINSIC: \ 760 return ROUND_MODE == 1; 761 #include "llvm/IR/ConstrainedOps.def" 762 #undef INSTRUCTION 763 default: 764 return false; 765 } 766 } 767 768 using DeferredIntrinsicMatchPair = 769 std::pair<Type *, ArrayRef<Intrinsic::IITDescriptor>>; 770 771 static bool 772 matchIntrinsicType(Type *Ty, ArrayRef<Intrinsic::IITDescriptor> &Infos, 773 SmallVectorImpl<Type *> &ArgTys, 774 SmallVectorImpl<DeferredIntrinsicMatchPair> &DeferredChecks, 775 bool IsDeferredCheck) { 776 using namespace Intrinsic; 777 778 // If we ran out of descriptors, there are too many arguments. 779 if (Infos.empty()) 780 return true; 781 782 // Do this before slicing off the 'front' part 783 auto InfosRef = Infos; 784 auto DeferCheck = [&DeferredChecks, &InfosRef](Type *T) { 785 DeferredChecks.emplace_back(T, InfosRef); 786 return false; 787 }; 788 789 IITDescriptor D = Infos.front(); 790 Infos = Infos.slice(1); 791 792 switch (D.Kind) { 793 case IITDescriptor::Void: 794 return !Ty->isVoidTy(); 795 case IITDescriptor::VarArg: 796 return true; 797 case IITDescriptor::MMX: { 798 FixedVectorType *VT = dyn_cast<FixedVectorType>(Ty); 799 return !VT || VT->getNumElements() != 1 || 800 !VT->getElementType()->isIntegerTy(64); 801 } 802 case IITDescriptor::AMX: 803 return !Ty->isX86_AMXTy(); 804 case IITDescriptor::Token: 805 return !Ty->isTokenTy(); 806 case IITDescriptor::Metadata: 807 return !Ty->isMetadataTy(); 808 case IITDescriptor::Half: 809 return !Ty->isHalfTy(); 810 case IITDescriptor::BFloat: 811 return !Ty->isBFloatTy(); 812 case IITDescriptor::Float: 813 return !Ty->isFloatTy(); 814 case IITDescriptor::Double: 815 return !Ty->isDoubleTy(); 816 case IITDescriptor::Quad: 817 return !Ty->isFP128Ty(); 818 case IITDescriptor::PPCQuad: 819 return !Ty->isPPC_FP128Ty(); 820 case IITDescriptor::Integer: 821 return !Ty->isIntegerTy(D.Integer_Width); 822 case IITDescriptor::AArch64Svcount: 823 return !isa<TargetExtType>(Ty) || 824 cast<TargetExtType>(Ty)->getName() != "aarch64.svcount"; 825 case IITDescriptor::Vector: { 826 VectorType *VT = dyn_cast<VectorType>(Ty); 827 return !VT || VT->getElementCount() != D.Vector_Width || 828 matchIntrinsicType(VT->getElementType(), Infos, ArgTys, 829 DeferredChecks, IsDeferredCheck); 830 } 831 case IITDescriptor::Pointer: { 832 PointerType *PT = dyn_cast<PointerType>(Ty); 833 return !PT || PT->getAddressSpace() != D.Pointer_AddressSpace; 834 } 835 836 case IITDescriptor::Struct: { 837 StructType *ST = dyn_cast<StructType>(Ty); 838 if (!ST || !ST->isLiteral() || ST->isPacked() || 839 ST->getNumElements() != D.Struct_NumElements) 840 return true; 841 842 for (unsigned i = 0, e = D.Struct_NumElements; i != e; ++i) 843 if (matchIntrinsicType(ST->getElementType(i), Infos, ArgTys, 844 DeferredChecks, IsDeferredCheck)) 845 return true; 846 return false; 847 } 848 849 case IITDescriptor::Argument: 850 // If this is the second occurrence of an argument, 851 // verify that the later instance matches the previous instance. 852 if (D.getArgumentNumber() < ArgTys.size()) 853 return Ty != ArgTys[D.getArgumentNumber()]; 854 855 if (D.getArgumentNumber() > ArgTys.size() || 856 D.getArgumentKind() == IITDescriptor::AK_MatchType) 857 return IsDeferredCheck || DeferCheck(Ty); 858 859 assert(D.getArgumentNumber() == ArgTys.size() && !IsDeferredCheck && 860 "Table consistency error"); 861 ArgTys.push_back(Ty); 862 863 switch (D.getArgumentKind()) { 864 case IITDescriptor::AK_Any: 865 return false; // Success 866 case IITDescriptor::AK_AnyInteger: 867 return !Ty->isIntOrIntVectorTy(); 868 case IITDescriptor::AK_AnyFloat: 869 return !Ty->isFPOrFPVectorTy(); 870 case IITDescriptor::AK_AnyVector: 871 return !isa<VectorType>(Ty); 872 case IITDescriptor::AK_AnyPointer: 873 return !isa<PointerType>(Ty); 874 default: 875 break; 876 } 877 llvm_unreachable("all argument kinds not covered"); 878 879 case IITDescriptor::ExtendArgument: { 880 // If this is a forward reference, defer the check for later. 881 if (D.getArgumentNumber() >= ArgTys.size()) 882 return IsDeferredCheck || DeferCheck(Ty); 883 884 Type *NewTy = ArgTys[D.getArgumentNumber()]; 885 if (VectorType *VTy = dyn_cast<VectorType>(NewTy)) 886 NewTy = VectorType::getExtendedElementVectorType(VTy); 887 else if (IntegerType *ITy = dyn_cast<IntegerType>(NewTy)) 888 NewTy = IntegerType::get(ITy->getContext(), 2 * ITy->getBitWidth()); 889 else 890 return true; 891 892 return Ty != NewTy; 893 } 894 case IITDescriptor::TruncArgument: { 895 // If this is a forward reference, defer the check for later. 896 if (D.getArgumentNumber() >= ArgTys.size()) 897 return IsDeferredCheck || DeferCheck(Ty); 898 899 Type *NewTy = ArgTys[D.getArgumentNumber()]; 900 if (VectorType *VTy = dyn_cast<VectorType>(NewTy)) 901 NewTy = VectorType::getTruncatedElementVectorType(VTy); 902 else if (IntegerType *ITy = dyn_cast<IntegerType>(NewTy)) 903 NewTy = IntegerType::get(ITy->getContext(), ITy->getBitWidth() / 2); 904 else 905 return true; 906 907 return Ty != NewTy; 908 } 909 case IITDescriptor::HalfVecArgument: 910 // If this is a forward reference, defer the check for later. 911 if (D.getArgumentNumber() >= ArgTys.size()) 912 return IsDeferredCheck || DeferCheck(Ty); 913 return !isa<VectorType>(ArgTys[D.getArgumentNumber()]) || 914 VectorType::getHalfElementsVectorType( 915 cast<VectorType>(ArgTys[D.getArgumentNumber()])) != Ty; 916 case IITDescriptor::SameVecWidthArgument: { 917 if (D.getArgumentNumber() >= ArgTys.size()) { 918 // Defer check and subsequent check for the vector element type. 919 Infos = Infos.slice(1); 920 return IsDeferredCheck || DeferCheck(Ty); 921 } 922 auto *ReferenceType = dyn_cast<VectorType>(ArgTys[D.getArgumentNumber()]); 923 auto *ThisArgType = dyn_cast<VectorType>(Ty); 924 // Both must be vectors of the same number of elements or neither. 925 if ((ReferenceType != nullptr) != (ThisArgType != nullptr)) 926 return true; 927 Type *EltTy = Ty; 928 if (ThisArgType) { 929 if (ReferenceType->getElementCount() != ThisArgType->getElementCount()) 930 return true; 931 EltTy = ThisArgType->getElementType(); 932 } 933 return matchIntrinsicType(EltTy, Infos, ArgTys, DeferredChecks, 934 IsDeferredCheck); 935 } 936 case IITDescriptor::VecOfAnyPtrsToElt: { 937 unsigned RefArgNumber = D.getRefArgNumber(); 938 if (RefArgNumber >= ArgTys.size()) { 939 if (IsDeferredCheck) 940 return true; 941 // If forward referencing, already add the pointer-vector type and 942 // defer the checks for later. 943 ArgTys.push_back(Ty); 944 return DeferCheck(Ty); 945 } 946 947 if (!IsDeferredCheck) { 948 assert(D.getOverloadArgNumber() == ArgTys.size() && 949 "Table consistency error"); 950 ArgTys.push_back(Ty); 951 } 952 953 // Verify the overloaded type "matches" the Ref type. 954 // i.e. Ty is a vector with the same width as Ref. 955 // Composed of pointers to the same element type as Ref. 956 auto *ReferenceType = dyn_cast<VectorType>(ArgTys[RefArgNumber]); 957 auto *ThisArgVecTy = dyn_cast<VectorType>(Ty); 958 if (!ThisArgVecTy || !ReferenceType || 959 (ReferenceType->getElementCount() != ThisArgVecTy->getElementCount())) 960 return true; 961 return !ThisArgVecTy->getElementType()->isPointerTy(); 962 } 963 case IITDescriptor::VecElementArgument: { 964 if (D.getArgumentNumber() >= ArgTys.size()) 965 return IsDeferredCheck ? true : DeferCheck(Ty); 966 auto *ReferenceType = dyn_cast<VectorType>(ArgTys[D.getArgumentNumber()]); 967 return !ReferenceType || Ty != ReferenceType->getElementType(); 968 } 969 case IITDescriptor::Subdivide2Argument: 970 case IITDescriptor::Subdivide4Argument: { 971 // If this is a forward reference, defer the check for later. 972 if (D.getArgumentNumber() >= ArgTys.size()) 973 return IsDeferredCheck || DeferCheck(Ty); 974 975 Type *NewTy = ArgTys[D.getArgumentNumber()]; 976 if (auto *VTy = dyn_cast<VectorType>(NewTy)) { 977 int SubDivs = D.Kind == IITDescriptor::Subdivide2Argument ? 1 : 2; 978 NewTy = VectorType::getSubdividedVectorType(VTy, SubDivs); 979 return Ty != NewTy; 980 } 981 return true; 982 } 983 case IITDescriptor::VecOfBitcastsToInt: { 984 if (D.getArgumentNumber() >= ArgTys.size()) 985 return IsDeferredCheck || DeferCheck(Ty); 986 auto *ReferenceType = dyn_cast<VectorType>(ArgTys[D.getArgumentNumber()]); 987 auto *ThisArgVecTy = dyn_cast<VectorType>(Ty); 988 if (!ThisArgVecTy || !ReferenceType) 989 return true; 990 return ThisArgVecTy != VectorType::getInteger(ReferenceType); 991 } 992 } 993 llvm_unreachable("unhandled"); 994 } 995 996 Intrinsic::MatchIntrinsicTypesResult 997 Intrinsic::matchIntrinsicSignature(FunctionType *FTy, 998 ArrayRef<Intrinsic::IITDescriptor> &Infos, 999 SmallVectorImpl<Type *> &ArgTys) { 1000 SmallVector<DeferredIntrinsicMatchPair, 2> DeferredChecks; 1001 if (matchIntrinsicType(FTy->getReturnType(), Infos, ArgTys, DeferredChecks, 1002 false)) 1003 return MatchIntrinsicTypes_NoMatchRet; 1004 1005 unsigned NumDeferredReturnChecks = DeferredChecks.size(); 1006 1007 for (auto *Ty : FTy->params()) 1008 if (matchIntrinsicType(Ty, Infos, ArgTys, DeferredChecks, false)) 1009 return MatchIntrinsicTypes_NoMatchArg; 1010 1011 for (unsigned I = 0, E = DeferredChecks.size(); I != E; ++I) { 1012 DeferredIntrinsicMatchPair &Check = DeferredChecks[I]; 1013 if (matchIntrinsicType(Check.first, Check.second, ArgTys, DeferredChecks, 1014 true)) 1015 return I < NumDeferredReturnChecks ? MatchIntrinsicTypes_NoMatchRet 1016 : MatchIntrinsicTypes_NoMatchArg; 1017 } 1018 1019 return MatchIntrinsicTypes_Match; 1020 } 1021 1022 bool Intrinsic::matchIntrinsicVarArg( 1023 bool isVarArg, ArrayRef<Intrinsic::IITDescriptor> &Infos) { 1024 // If there are no descriptors left, then it can't be a vararg. 1025 if (Infos.empty()) 1026 return isVarArg; 1027 1028 // There should be only one descriptor remaining at this point. 1029 if (Infos.size() != 1) 1030 return true; 1031 1032 // Check and verify the descriptor. 1033 IITDescriptor D = Infos.front(); 1034 Infos = Infos.slice(1); 1035 if (D.Kind == IITDescriptor::VarArg) 1036 return !isVarArg; 1037 1038 return true; 1039 } 1040 1041 bool Intrinsic::getIntrinsicSignature(Intrinsic::ID ID, FunctionType *FT, 1042 SmallVectorImpl<Type *> &ArgTys) { 1043 if (!ID) 1044 return false; 1045 1046 SmallVector<Intrinsic::IITDescriptor, 8> Table; 1047 getIntrinsicInfoTableEntries(ID, Table); 1048 ArrayRef<Intrinsic::IITDescriptor> TableRef = Table; 1049 1050 if (Intrinsic::matchIntrinsicSignature(FT, TableRef, ArgTys) != 1051 Intrinsic::MatchIntrinsicTypesResult::MatchIntrinsicTypes_Match) { 1052 return false; 1053 } 1054 if (Intrinsic::matchIntrinsicVarArg(FT->isVarArg(), TableRef)) 1055 return false; 1056 return true; 1057 } 1058 1059 bool Intrinsic::getIntrinsicSignature(Function *F, 1060 SmallVectorImpl<Type *> &ArgTys) { 1061 return getIntrinsicSignature(F->getIntrinsicID(), F->getFunctionType(), 1062 ArgTys); 1063 } 1064 1065 std::optional<Function *> Intrinsic::remangleIntrinsicFunction(Function *F) { 1066 SmallVector<Type *, 4> ArgTys; 1067 if (!getIntrinsicSignature(F, ArgTys)) 1068 return std::nullopt; 1069 1070 Intrinsic::ID ID = F->getIntrinsicID(); 1071 StringRef Name = F->getName(); 1072 std::string WantedName = 1073 Intrinsic::getName(ID, ArgTys, F->getParent(), F->getFunctionType()); 1074 if (Name == WantedName) 1075 return std::nullopt; 1076 1077 Function *NewDecl = [&] { 1078 if (auto *ExistingGV = F->getParent()->getNamedValue(WantedName)) { 1079 if (auto *ExistingF = dyn_cast<Function>(ExistingGV)) 1080 if (ExistingF->getFunctionType() == F->getFunctionType()) 1081 return ExistingF; 1082 1083 // The name already exists, but is not a function or has the wrong 1084 // prototype. Make place for the new one by renaming the old version. 1085 // Either this old version will be removed later on or the module is 1086 // invalid and we'll get an error. 1087 ExistingGV->setName(WantedName + ".renamed"); 1088 } 1089 return Intrinsic::getOrInsertDeclaration(F->getParent(), ID, ArgTys); 1090 }(); 1091 1092 NewDecl->setCallingConv(F->getCallingConv()); 1093 assert(NewDecl->getFunctionType() == F->getFunctionType() && 1094 "Shouldn't change the signature"); 1095 return NewDecl; 1096 } 1097