1 //===- Function.cpp - Implement the Global object classes -----------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file implements the Function class for the IR library. 10 // 11 //===----------------------------------------------------------------------===// 12 13 #include "llvm/IR/Function.h" 14 #include "SymbolTableListTraitsImpl.h" 15 #include "llvm/ADT/ArrayRef.h" 16 #include "llvm/ADT/DenseSet.h" 17 #include "llvm/ADT/STLExtras.h" 18 #include "llvm/ADT/SmallString.h" 19 #include "llvm/ADT/SmallVector.h" 20 #include "llvm/ADT/StringExtras.h" 21 #include "llvm/ADT/StringRef.h" 22 #include "llvm/IR/AbstractCallSite.h" 23 #include "llvm/IR/Argument.h" 24 #include "llvm/IR/Attributes.h" 25 #include "llvm/IR/BasicBlock.h" 26 #include "llvm/IR/Constant.h" 27 #include "llvm/IR/Constants.h" 28 #include "llvm/IR/DerivedTypes.h" 29 #include "llvm/IR/GlobalValue.h" 30 #include "llvm/IR/InstIterator.h" 31 #include "llvm/IR/Instruction.h" 32 #include "llvm/IR/IntrinsicInst.h" 33 #include "llvm/IR/Intrinsics.h" 34 #include "llvm/IR/IntrinsicsAArch64.h" 35 #include "llvm/IR/IntrinsicsAMDGPU.h" 36 #include "llvm/IR/IntrinsicsARM.h" 37 #include "llvm/IR/IntrinsicsBPF.h" 38 #include "llvm/IR/IntrinsicsDirectX.h" 39 #include "llvm/IR/IntrinsicsHexagon.h" 40 #include "llvm/IR/IntrinsicsMips.h" 41 #include "llvm/IR/IntrinsicsNVPTX.h" 42 #include "llvm/IR/IntrinsicsPowerPC.h" 43 #include "llvm/IR/IntrinsicsR600.h" 44 #include "llvm/IR/IntrinsicsRISCV.h" 45 #include "llvm/IR/IntrinsicsS390.h" 46 #include "llvm/IR/IntrinsicsVE.h" 47 #include "llvm/IR/IntrinsicsWebAssembly.h" 48 #include "llvm/IR/IntrinsicsX86.h" 49 #include "llvm/IR/IntrinsicsXCore.h" 50 #include "llvm/IR/LLVMContext.h" 51 #include "llvm/IR/MDBuilder.h" 52 #include "llvm/IR/Metadata.h" 53 #include "llvm/IR/Module.h" 54 #include "llvm/IR/Operator.h" 55 #include "llvm/IR/SymbolTableListTraits.h" 56 #include "llvm/IR/Type.h" 57 #include "llvm/IR/Use.h" 58 #include "llvm/IR/User.h" 59 #include "llvm/IR/Value.h" 60 #include "llvm/IR/ValueSymbolTable.h" 61 #include "llvm/Support/Casting.h" 62 #include "llvm/Support/CommandLine.h" 63 #include "llvm/Support/Compiler.h" 64 #include "llvm/Support/ErrorHandling.h" 65 #include "llvm/Support/ModRef.h" 66 #include <cassert> 67 #include <cstddef> 68 #include <cstdint> 69 #include <cstring> 70 #include <string> 71 72 using namespace llvm; 73 using ProfileCount = Function::ProfileCount; 74 75 // Explicit instantiations of SymbolTableListTraits since some of the methods 76 // are not in the public header file... 77 template class llvm::SymbolTableListTraits<BasicBlock>; 78 79 static cl::opt<unsigned> NonGlobalValueMaxNameSize( 80 "non-global-value-max-name-size", cl::Hidden, cl::init(1024), 81 cl::desc("Maximum size for the name of non-global values.")); 82 83 //===----------------------------------------------------------------------===// 84 // Argument Implementation 85 //===----------------------------------------------------------------------===// 86 87 Argument::Argument(Type *Ty, const Twine &Name, Function *Par, unsigned ArgNo) 88 : Value(Ty, Value::ArgumentVal), Parent(Par), ArgNo(ArgNo) { 89 setName(Name); 90 } 91 92 void Argument::setParent(Function *parent) { 93 Parent = parent; 94 } 95 96 bool Argument::hasNonNullAttr(bool AllowUndefOrPoison) const { 97 if (!getType()->isPointerTy()) return false; 98 if (getParent()->hasParamAttribute(getArgNo(), Attribute::NonNull) && 99 (AllowUndefOrPoison || 100 getParent()->hasParamAttribute(getArgNo(), Attribute::NoUndef))) 101 return true; 102 else if (getDereferenceableBytes() > 0 && 103 !NullPointerIsDefined(getParent(), 104 getType()->getPointerAddressSpace())) 105 return true; 106 return false; 107 } 108 109 bool Argument::hasByValAttr() const { 110 if (!getType()->isPointerTy()) return false; 111 return hasAttribute(Attribute::ByVal); 112 } 113 114 bool Argument::hasByRefAttr() const { 115 if (!getType()->isPointerTy()) 116 return false; 117 return hasAttribute(Attribute::ByRef); 118 } 119 120 bool Argument::hasSwiftSelfAttr() const { 121 return getParent()->hasParamAttribute(getArgNo(), Attribute::SwiftSelf); 122 } 123 124 bool Argument::hasSwiftErrorAttr() const { 125 return getParent()->hasParamAttribute(getArgNo(), Attribute::SwiftError); 126 } 127 128 bool Argument::hasInAllocaAttr() const { 129 if (!getType()->isPointerTy()) return false; 130 return hasAttribute(Attribute::InAlloca); 131 } 132 133 bool Argument::hasPreallocatedAttr() const { 134 if (!getType()->isPointerTy()) 135 return false; 136 return hasAttribute(Attribute::Preallocated); 137 } 138 139 bool Argument::hasPassPointeeByValueCopyAttr() const { 140 if (!getType()->isPointerTy()) return false; 141 AttributeList Attrs = getParent()->getAttributes(); 142 return Attrs.hasParamAttr(getArgNo(), Attribute::ByVal) || 143 Attrs.hasParamAttr(getArgNo(), Attribute::InAlloca) || 144 Attrs.hasParamAttr(getArgNo(), Attribute::Preallocated); 145 } 146 147 bool Argument::hasPointeeInMemoryValueAttr() const { 148 if (!getType()->isPointerTy()) 149 return false; 150 AttributeList Attrs = getParent()->getAttributes(); 151 return Attrs.hasParamAttr(getArgNo(), Attribute::ByVal) || 152 Attrs.hasParamAttr(getArgNo(), Attribute::StructRet) || 153 Attrs.hasParamAttr(getArgNo(), Attribute::InAlloca) || 154 Attrs.hasParamAttr(getArgNo(), Attribute::Preallocated) || 155 Attrs.hasParamAttr(getArgNo(), Attribute::ByRef); 156 } 157 158 /// For a byval, sret, inalloca, or preallocated parameter, get the in-memory 159 /// parameter type. 160 static Type *getMemoryParamAllocType(AttributeSet ParamAttrs) { 161 // FIXME: All the type carrying attributes are mutually exclusive, so there 162 // should be a single query to get the stored type that handles any of them. 163 if (Type *ByValTy = ParamAttrs.getByValType()) 164 return ByValTy; 165 if (Type *ByRefTy = ParamAttrs.getByRefType()) 166 return ByRefTy; 167 if (Type *PreAllocTy = ParamAttrs.getPreallocatedType()) 168 return PreAllocTy; 169 if (Type *InAllocaTy = ParamAttrs.getInAllocaType()) 170 return InAllocaTy; 171 if (Type *SRetTy = ParamAttrs.getStructRetType()) 172 return SRetTy; 173 174 return nullptr; 175 } 176 177 uint64_t Argument::getPassPointeeByValueCopySize(const DataLayout &DL) const { 178 AttributeSet ParamAttrs = 179 getParent()->getAttributes().getParamAttrs(getArgNo()); 180 if (Type *MemTy = getMemoryParamAllocType(ParamAttrs)) 181 return DL.getTypeAllocSize(MemTy); 182 return 0; 183 } 184 185 Type *Argument::getPointeeInMemoryValueType() const { 186 AttributeSet ParamAttrs = 187 getParent()->getAttributes().getParamAttrs(getArgNo()); 188 return getMemoryParamAllocType(ParamAttrs); 189 } 190 191 uint64_t Argument::getParamAlignment() const { 192 assert(getType()->isPointerTy() && "Only pointers have alignments"); 193 return getParent()->getParamAlignment(getArgNo()); 194 } 195 196 MaybeAlign Argument::getParamAlign() const { 197 assert(getType()->isPointerTy() && "Only pointers have alignments"); 198 return getParent()->getParamAlign(getArgNo()); 199 } 200 201 MaybeAlign Argument::getParamStackAlign() const { 202 return getParent()->getParamStackAlign(getArgNo()); 203 } 204 205 Type *Argument::getParamByValType() const { 206 assert(getType()->isPointerTy() && "Only pointers have byval types"); 207 return getParent()->getParamByValType(getArgNo()); 208 } 209 210 Type *Argument::getParamStructRetType() const { 211 assert(getType()->isPointerTy() && "Only pointers have sret types"); 212 return getParent()->getParamStructRetType(getArgNo()); 213 } 214 215 Type *Argument::getParamByRefType() const { 216 assert(getType()->isPointerTy() && "Only pointers have byref types"); 217 return getParent()->getParamByRefType(getArgNo()); 218 } 219 220 Type *Argument::getParamInAllocaType() const { 221 assert(getType()->isPointerTy() && "Only pointers have inalloca types"); 222 return getParent()->getParamInAllocaType(getArgNo()); 223 } 224 225 uint64_t Argument::getDereferenceableBytes() const { 226 assert(getType()->isPointerTy() && 227 "Only pointers have dereferenceable bytes"); 228 return getParent()->getParamDereferenceableBytes(getArgNo()); 229 } 230 231 uint64_t Argument::getDereferenceableOrNullBytes() const { 232 assert(getType()->isPointerTy() && 233 "Only pointers have dereferenceable bytes"); 234 return getParent()->getParamDereferenceableOrNullBytes(getArgNo()); 235 } 236 237 bool Argument::hasNestAttr() const { 238 if (!getType()->isPointerTy()) return false; 239 return hasAttribute(Attribute::Nest); 240 } 241 242 bool Argument::hasNoAliasAttr() const { 243 if (!getType()->isPointerTy()) return false; 244 return hasAttribute(Attribute::NoAlias); 245 } 246 247 bool Argument::hasNoCaptureAttr() const { 248 if (!getType()->isPointerTy()) return false; 249 return hasAttribute(Attribute::NoCapture); 250 } 251 252 bool Argument::hasNoFreeAttr() const { 253 if (!getType()->isPointerTy()) return false; 254 return hasAttribute(Attribute::NoFree); 255 } 256 257 bool Argument::hasStructRetAttr() const { 258 if (!getType()->isPointerTy()) return false; 259 return hasAttribute(Attribute::StructRet); 260 } 261 262 bool Argument::hasInRegAttr() const { 263 return hasAttribute(Attribute::InReg); 264 } 265 266 bool Argument::hasReturnedAttr() const { 267 return hasAttribute(Attribute::Returned); 268 } 269 270 bool Argument::hasZExtAttr() const { 271 return hasAttribute(Attribute::ZExt); 272 } 273 274 bool Argument::hasSExtAttr() const { 275 return hasAttribute(Attribute::SExt); 276 } 277 278 bool Argument::onlyReadsMemory() const { 279 AttributeList Attrs = getParent()->getAttributes(); 280 return Attrs.hasParamAttr(getArgNo(), Attribute::ReadOnly) || 281 Attrs.hasParamAttr(getArgNo(), Attribute::ReadNone); 282 } 283 284 void Argument::addAttrs(AttrBuilder &B) { 285 AttributeList AL = getParent()->getAttributes(); 286 AL = AL.addParamAttributes(Parent->getContext(), getArgNo(), B); 287 getParent()->setAttributes(AL); 288 } 289 290 void Argument::addAttr(Attribute::AttrKind Kind) { 291 getParent()->addParamAttr(getArgNo(), Kind); 292 } 293 294 void Argument::addAttr(Attribute Attr) { 295 getParent()->addParamAttr(getArgNo(), Attr); 296 } 297 298 void Argument::removeAttr(Attribute::AttrKind Kind) { 299 getParent()->removeParamAttr(getArgNo(), Kind); 300 } 301 302 void Argument::removeAttrs(const AttributeMask &AM) { 303 AttributeList AL = getParent()->getAttributes(); 304 AL = AL.removeParamAttributes(Parent->getContext(), getArgNo(), AM); 305 getParent()->setAttributes(AL); 306 } 307 308 bool Argument::hasAttribute(Attribute::AttrKind Kind) const { 309 return getParent()->hasParamAttribute(getArgNo(), Kind); 310 } 311 312 Attribute Argument::getAttribute(Attribute::AttrKind Kind) const { 313 return getParent()->getParamAttribute(getArgNo(), Kind); 314 } 315 316 //===----------------------------------------------------------------------===// 317 // Helper Methods in Function 318 //===----------------------------------------------------------------------===// 319 320 LLVMContext &Function::getContext() const { 321 return getType()->getContext(); 322 } 323 324 unsigned Function::getInstructionCount() const { 325 unsigned NumInstrs = 0; 326 for (const BasicBlock &BB : BasicBlocks) 327 NumInstrs += std::distance(BB.instructionsWithoutDebug().begin(), 328 BB.instructionsWithoutDebug().end()); 329 return NumInstrs; 330 } 331 332 Function *Function::Create(FunctionType *Ty, LinkageTypes Linkage, 333 const Twine &N, Module &M) { 334 return Create(Ty, Linkage, M.getDataLayout().getProgramAddressSpace(), N, &M); 335 } 336 337 Function *Function::createWithDefaultAttr(FunctionType *Ty, 338 LinkageTypes Linkage, 339 unsigned AddrSpace, const Twine &N, 340 Module *M) { 341 auto *F = new Function(Ty, Linkage, AddrSpace, N, M); 342 AttrBuilder B(F->getContext()); 343 UWTableKind UWTable = M->getUwtable(); 344 if (UWTable != UWTableKind::None) 345 B.addUWTableAttr(UWTable); 346 switch (M->getFramePointer()) { 347 case FramePointerKind::None: 348 // 0 ("none") is the default. 349 break; 350 case FramePointerKind::NonLeaf: 351 B.addAttribute("frame-pointer", "non-leaf"); 352 break; 353 case FramePointerKind::All: 354 B.addAttribute("frame-pointer", "all"); 355 break; 356 } 357 if (M->getModuleFlag("function_return_thunk_extern")) 358 B.addAttribute(Attribute::FnRetThunkExtern); 359 F->addFnAttrs(B); 360 return F; 361 } 362 363 void Function::removeFromParent() { 364 getParent()->getFunctionList().remove(getIterator()); 365 } 366 367 void Function::eraseFromParent() { 368 getParent()->getFunctionList().erase(getIterator()); 369 } 370 371 void Function::splice(Function::iterator ToIt, Function *FromF, 372 Function::iterator FromBeginIt, 373 Function::iterator FromEndIt) { 374 #ifdef EXPENSIVE_CHECKS 375 // Check that FromBeginIt is before FromEndIt. 376 auto FromFEnd = FromF->end(); 377 for (auto It = FromBeginIt; It != FromEndIt; ++It) 378 assert(It != FromFEnd && "FromBeginIt not before FromEndIt!"); 379 #endif // EXPENSIVE_CHECKS 380 BasicBlocks.splice(ToIt, FromF->BasicBlocks, FromBeginIt, FromEndIt); 381 } 382 383 Function::iterator Function::erase(Function::iterator FromIt, 384 Function::iterator ToIt) { 385 return BasicBlocks.erase(FromIt, ToIt); 386 } 387 388 //===----------------------------------------------------------------------===// 389 // Function Implementation 390 //===----------------------------------------------------------------------===// 391 392 static unsigned computeAddrSpace(unsigned AddrSpace, Module *M) { 393 // If AS == -1 and we are passed a valid module pointer we place the function 394 // in the program address space. Otherwise we default to AS0. 395 if (AddrSpace == static_cast<unsigned>(-1)) 396 return M ? M->getDataLayout().getProgramAddressSpace() : 0; 397 return AddrSpace; 398 } 399 400 Function::Function(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, 401 const Twine &name, Module *ParentModule) 402 : GlobalObject(Ty, Value::FunctionVal, 403 OperandTraits<Function>::op_begin(this), 0, Linkage, name, 404 computeAddrSpace(AddrSpace, ParentModule)), 405 NumArgs(Ty->getNumParams()) { 406 assert(FunctionType::isValidReturnType(getReturnType()) && 407 "invalid return type"); 408 setGlobalObjectSubClassData(0); 409 410 // We only need a symbol table for a function if the context keeps value names 411 if (!getContext().shouldDiscardValueNames()) 412 SymTab = std::make_unique<ValueSymbolTable>(NonGlobalValueMaxNameSize); 413 414 // If the function has arguments, mark them as lazily built. 415 if (Ty->getNumParams()) 416 setValueSubclassData(1); // Set the "has lazy arguments" bit. 417 418 if (ParentModule) 419 ParentModule->getFunctionList().push_back(this); 420 421 HasLLVMReservedName = getName().startswith("llvm."); 422 // Ensure intrinsics have the right parameter attributes. 423 // Note, the IntID field will have been set in Value::setName if this function 424 // name is a valid intrinsic ID. 425 if (IntID) 426 setAttributes(Intrinsic::getAttributes(getContext(), IntID)); 427 } 428 429 Function::~Function() { 430 dropAllReferences(); // After this it is safe to delete instructions. 431 432 // Delete all of the method arguments and unlink from symbol table... 433 if (Arguments) 434 clearArguments(); 435 436 // Remove the function from the on-the-side GC table. 437 clearGC(); 438 } 439 440 void Function::BuildLazyArguments() const { 441 // Create the arguments vector, all arguments start out unnamed. 442 auto *FT = getFunctionType(); 443 if (NumArgs > 0) { 444 Arguments = std::allocator<Argument>().allocate(NumArgs); 445 for (unsigned i = 0, e = NumArgs; i != e; ++i) { 446 Type *ArgTy = FT->getParamType(i); 447 assert(!ArgTy->isVoidTy() && "Cannot have void typed arguments!"); 448 new (Arguments + i) Argument(ArgTy, "", const_cast<Function *>(this), i); 449 } 450 } 451 452 // Clear the lazy arguments bit. 453 unsigned SDC = getSubclassDataFromValue(); 454 SDC &= ~(1 << 0); 455 const_cast<Function*>(this)->setValueSubclassData(SDC); 456 assert(!hasLazyArguments()); 457 } 458 459 static MutableArrayRef<Argument> makeArgArray(Argument *Args, size_t Count) { 460 return MutableArrayRef<Argument>(Args, Count); 461 } 462 463 bool Function::isConstrainedFPIntrinsic() const { 464 switch (getIntrinsicID()) { 465 #define INSTRUCTION(NAME, NARG, ROUND_MODE, INTRINSIC) \ 466 case Intrinsic::INTRINSIC: 467 #include "llvm/IR/ConstrainedOps.def" 468 return true; 469 #undef INSTRUCTION 470 default: 471 return false; 472 } 473 } 474 475 void Function::clearArguments() { 476 for (Argument &A : makeArgArray(Arguments, NumArgs)) { 477 A.setName(""); 478 A.~Argument(); 479 } 480 std::allocator<Argument>().deallocate(Arguments, NumArgs); 481 Arguments = nullptr; 482 } 483 484 void Function::stealArgumentListFrom(Function &Src) { 485 assert(isDeclaration() && "Expected no references to current arguments"); 486 487 // Drop the current arguments, if any, and set the lazy argument bit. 488 if (!hasLazyArguments()) { 489 assert(llvm::all_of(makeArgArray(Arguments, NumArgs), 490 [](const Argument &A) { return A.use_empty(); }) && 491 "Expected arguments to be unused in declaration"); 492 clearArguments(); 493 setValueSubclassData(getSubclassDataFromValue() | (1 << 0)); 494 } 495 496 // Nothing to steal if Src has lazy arguments. 497 if (Src.hasLazyArguments()) 498 return; 499 500 // Steal arguments from Src, and fix the lazy argument bits. 501 assert(arg_size() == Src.arg_size()); 502 Arguments = Src.Arguments; 503 Src.Arguments = nullptr; 504 for (Argument &A : makeArgArray(Arguments, NumArgs)) { 505 // FIXME: This does the work of transferNodesFromList inefficiently. 506 SmallString<128> Name; 507 if (A.hasName()) 508 Name = A.getName(); 509 if (!Name.empty()) 510 A.setName(""); 511 A.setParent(this); 512 if (!Name.empty()) 513 A.setName(Name); 514 } 515 516 setValueSubclassData(getSubclassDataFromValue() & ~(1 << 0)); 517 assert(!hasLazyArguments()); 518 Src.setValueSubclassData(Src.getSubclassDataFromValue() | (1 << 0)); 519 } 520 521 // dropAllReferences() - This function causes all the subinstructions to "let 522 // go" of all references that they are maintaining. This allows one to 523 // 'delete' a whole class at a time, even though there may be circular 524 // references... first all references are dropped, and all use counts go to 525 // zero. Then everything is deleted for real. Note that no operations are 526 // valid on an object that has "dropped all references", except operator 527 // delete. 528 // 529 void Function::dropAllReferences() { 530 setIsMaterializable(false); 531 532 for (BasicBlock &BB : *this) 533 BB.dropAllReferences(); 534 535 // Delete all basic blocks. They are now unused, except possibly by 536 // blockaddresses, but BasicBlock's destructor takes care of those. 537 while (!BasicBlocks.empty()) 538 BasicBlocks.begin()->eraseFromParent(); 539 540 // Drop uses of any optional data (real or placeholder). 541 if (getNumOperands()) { 542 User::dropAllReferences(); 543 setNumHungOffUseOperands(0); 544 setValueSubclassData(getSubclassDataFromValue() & ~0xe); 545 } 546 547 // Metadata is stored in a side-table. 548 clearMetadata(); 549 } 550 551 void Function::addAttributeAtIndex(unsigned i, Attribute Attr) { 552 AttributeSets = AttributeSets.addAttributeAtIndex(getContext(), i, Attr); 553 } 554 555 void Function::addFnAttr(Attribute::AttrKind Kind) { 556 AttributeSets = AttributeSets.addFnAttribute(getContext(), Kind); 557 } 558 559 void Function::addFnAttr(StringRef Kind, StringRef Val) { 560 AttributeSets = AttributeSets.addFnAttribute(getContext(), Kind, Val); 561 } 562 563 void Function::addFnAttr(Attribute Attr) { 564 AttributeSets = AttributeSets.addFnAttribute(getContext(), Attr); 565 } 566 567 void Function::addFnAttrs(const AttrBuilder &Attrs) { 568 AttributeSets = AttributeSets.addFnAttributes(getContext(), Attrs); 569 } 570 571 void Function::addRetAttr(Attribute::AttrKind Kind) { 572 AttributeSets = AttributeSets.addRetAttribute(getContext(), Kind); 573 } 574 575 void Function::addRetAttr(Attribute Attr) { 576 AttributeSets = AttributeSets.addRetAttribute(getContext(), Attr); 577 } 578 579 void Function::addRetAttrs(const AttrBuilder &Attrs) { 580 AttributeSets = AttributeSets.addRetAttributes(getContext(), Attrs); 581 } 582 583 void Function::addParamAttr(unsigned ArgNo, Attribute::AttrKind Kind) { 584 AttributeSets = AttributeSets.addParamAttribute(getContext(), ArgNo, Kind); 585 } 586 587 void Function::addParamAttr(unsigned ArgNo, Attribute Attr) { 588 AttributeSets = AttributeSets.addParamAttribute(getContext(), ArgNo, Attr); 589 } 590 591 void Function::addParamAttrs(unsigned ArgNo, const AttrBuilder &Attrs) { 592 AttributeSets = AttributeSets.addParamAttributes(getContext(), ArgNo, Attrs); 593 } 594 595 void Function::removeAttributeAtIndex(unsigned i, Attribute::AttrKind Kind) { 596 AttributeSets = AttributeSets.removeAttributeAtIndex(getContext(), i, Kind); 597 } 598 599 void Function::removeAttributeAtIndex(unsigned i, StringRef Kind) { 600 AttributeSets = AttributeSets.removeAttributeAtIndex(getContext(), i, Kind); 601 } 602 603 void Function::removeFnAttr(Attribute::AttrKind Kind) { 604 AttributeSets = AttributeSets.removeFnAttribute(getContext(), Kind); 605 } 606 607 void Function::removeFnAttr(StringRef Kind) { 608 AttributeSets = AttributeSets.removeFnAttribute(getContext(), Kind); 609 } 610 611 void Function::removeFnAttrs(const AttributeMask &AM) { 612 AttributeSets = AttributeSets.removeFnAttributes(getContext(), AM); 613 } 614 615 void Function::removeRetAttr(Attribute::AttrKind Kind) { 616 AttributeSets = AttributeSets.removeRetAttribute(getContext(), Kind); 617 } 618 619 void Function::removeRetAttr(StringRef Kind) { 620 AttributeSets = AttributeSets.removeRetAttribute(getContext(), Kind); 621 } 622 623 void Function::removeRetAttrs(const AttributeMask &Attrs) { 624 AttributeSets = AttributeSets.removeRetAttributes(getContext(), Attrs); 625 } 626 627 void Function::removeParamAttr(unsigned ArgNo, Attribute::AttrKind Kind) { 628 AttributeSets = AttributeSets.removeParamAttribute(getContext(), ArgNo, Kind); 629 } 630 631 void Function::removeParamAttr(unsigned ArgNo, StringRef Kind) { 632 AttributeSets = AttributeSets.removeParamAttribute(getContext(), ArgNo, Kind); 633 } 634 635 void Function::removeParamAttrs(unsigned ArgNo, const AttributeMask &Attrs) { 636 AttributeSets = 637 AttributeSets.removeParamAttributes(getContext(), ArgNo, Attrs); 638 } 639 640 void Function::addDereferenceableParamAttr(unsigned ArgNo, uint64_t Bytes) { 641 AttributeSets = 642 AttributeSets.addDereferenceableParamAttr(getContext(), ArgNo, Bytes); 643 } 644 645 bool Function::hasFnAttribute(Attribute::AttrKind Kind) const { 646 return AttributeSets.hasFnAttr(Kind); 647 } 648 649 bool Function::hasFnAttribute(StringRef Kind) const { 650 return AttributeSets.hasFnAttr(Kind); 651 } 652 653 bool Function::hasRetAttribute(Attribute::AttrKind Kind) const { 654 return AttributeSets.hasRetAttr(Kind); 655 } 656 657 bool Function::hasParamAttribute(unsigned ArgNo, 658 Attribute::AttrKind Kind) const { 659 return AttributeSets.hasParamAttr(ArgNo, Kind); 660 } 661 662 Attribute Function::getAttributeAtIndex(unsigned i, 663 Attribute::AttrKind Kind) const { 664 return AttributeSets.getAttributeAtIndex(i, Kind); 665 } 666 667 Attribute Function::getAttributeAtIndex(unsigned i, StringRef Kind) const { 668 return AttributeSets.getAttributeAtIndex(i, Kind); 669 } 670 671 Attribute Function::getFnAttribute(Attribute::AttrKind Kind) const { 672 return AttributeSets.getFnAttr(Kind); 673 } 674 675 Attribute Function::getFnAttribute(StringRef Kind) const { 676 return AttributeSets.getFnAttr(Kind); 677 } 678 679 uint64_t Function::getFnAttributeAsParsedInteger(StringRef Name, 680 uint64_t Default) const { 681 Attribute A = getFnAttribute(Name); 682 uint64_t Result = Default; 683 if (A.isStringAttribute()) { 684 StringRef Str = A.getValueAsString(); 685 if (Str.getAsInteger(0, Result)) 686 getContext().emitError("cannot parse integer attribute " + Name); 687 } 688 689 return Result; 690 } 691 692 /// gets the specified attribute from the list of attributes. 693 Attribute Function::getParamAttribute(unsigned ArgNo, 694 Attribute::AttrKind Kind) const { 695 return AttributeSets.getParamAttr(ArgNo, Kind); 696 } 697 698 void Function::addDereferenceableOrNullParamAttr(unsigned ArgNo, 699 uint64_t Bytes) { 700 AttributeSets = AttributeSets.addDereferenceableOrNullParamAttr(getContext(), 701 ArgNo, Bytes); 702 } 703 704 DenormalMode Function::getDenormalMode(const fltSemantics &FPType) const { 705 if (&FPType == &APFloat::IEEEsingle()) { 706 Attribute Attr = getFnAttribute("denormal-fp-math-f32"); 707 StringRef Val = Attr.getValueAsString(); 708 if (!Val.empty()) 709 return parseDenormalFPAttribute(Val); 710 711 // If the f32 variant of the attribute isn't specified, try to use the 712 // generic one. 713 } 714 715 Attribute Attr = getFnAttribute("denormal-fp-math"); 716 return parseDenormalFPAttribute(Attr.getValueAsString()); 717 } 718 719 const std::string &Function::getGC() const { 720 assert(hasGC() && "Function has no collector"); 721 return getContext().getGC(*this); 722 } 723 724 void Function::setGC(std::string Str) { 725 setValueSubclassDataBit(14, !Str.empty()); 726 getContext().setGC(*this, std::move(Str)); 727 } 728 729 void Function::clearGC() { 730 if (!hasGC()) 731 return; 732 getContext().deleteGC(*this); 733 setValueSubclassDataBit(14, false); 734 } 735 736 bool Function::hasStackProtectorFnAttr() const { 737 return hasFnAttribute(Attribute::StackProtect) || 738 hasFnAttribute(Attribute::StackProtectStrong) || 739 hasFnAttribute(Attribute::StackProtectReq); 740 } 741 742 /// Copy all additional attributes (those not needed to create a Function) from 743 /// the Function Src to this one. 744 void Function::copyAttributesFrom(const Function *Src) { 745 GlobalObject::copyAttributesFrom(Src); 746 setCallingConv(Src->getCallingConv()); 747 setAttributes(Src->getAttributes()); 748 if (Src->hasGC()) 749 setGC(Src->getGC()); 750 else 751 clearGC(); 752 if (Src->hasPersonalityFn()) 753 setPersonalityFn(Src->getPersonalityFn()); 754 if (Src->hasPrefixData()) 755 setPrefixData(Src->getPrefixData()); 756 if (Src->hasPrologueData()) 757 setPrologueData(Src->getPrologueData()); 758 } 759 760 MemoryEffects Function::getMemoryEffects() const { 761 return getAttributes().getMemoryEffects(); 762 } 763 void Function::setMemoryEffects(MemoryEffects ME) { 764 addFnAttr(Attribute::getWithMemoryEffects(getContext(), ME)); 765 } 766 767 /// Determine if the function does not access memory. 768 bool Function::doesNotAccessMemory() const { 769 return getMemoryEffects().doesNotAccessMemory(); 770 } 771 void Function::setDoesNotAccessMemory() { 772 setMemoryEffects(MemoryEffects::none()); 773 } 774 775 /// Determine if the function does not access or only reads memory. 776 bool Function::onlyReadsMemory() const { 777 return getMemoryEffects().onlyReadsMemory(); 778 } 779 void Function::setOnlyReadsMemory() { 780 setMemoryEffects(getMemoryEffects() & MemoryEffects::readOnly()); 781 } 782 783 /// Determine if the function does not access or only writes memory. 784 bool Function::onlyWritesMemory() const { 785 return getMemoryEffects().onlyWritesMemory(); 786 } 787 void Function::setOnlyWritesMemory() { 788 setMemoryEffects(getMemoryEffects() & MemoryEffects::writeOnly()); 789 } 790 791 /// Determine if the call can access memmory only using pointers based 792 /// on its arguments. 793 bool Function::onlyAccessesArgMemory() const { 794 return getMemoryEffects().onlyAccessesArgPointees(); 795 } 796 void Function::setOnlyAccessesArgMemory() { 797 setMemoryEffects(getMemoryEffects() & MemoryEffects::argMemOnly()); 798 } 799 800 /// Determine if the function may only access memory that is 801 /// inaccessible from the IR. 802 bool Function::onlyAccessesInaccessibleMemory() const { 803 return getMemoryEffects().onlyAccessesInaccessibleMem(); 804 } 805 void Function::setOnlyAccessesInaccessibleMemory() { 806 setMemoryEffects(getMemoryEffects() & MemoryEffects::inaccessibleMemOnly()); 807 } 808 809 /// Determine if the function may only access memory that is 810 /// either inaccessible from the IR or pointed to by its arguments. 811 bool Function::onlyAccessesInaccessibleMemOrArgMem() const { 812 return getMemoryEffects().onlyAccessesInaccessibleOrArgMem(); 813 } 814 void Function::setOnlyAccessesInaccessibleMemOrArgMem() { 815 setMemoryEffects(getMemoryEffects() & 816 MemoryEffects::inaccessibleOrArgMemOnly()); 817 } 818 819 /// Table of string intrinsic names indexed by enum value. 820 static const char * const IntrinsicNameTable[] = { 821 "not_intrinsic", 822 #define GET_INTRINSIC_NAME_TABLE 823 #include "llvm/IR/IntrinsicImpl.inc" 824 #undef GET_INTRINSIC_NAME_TABLE 825 }; 826 827 /// Table of per-target intrinsic name tables. 828 #define GET_INTRINSIC_TARGET_DATA 829 #include "llvm/IR/IntrinsicImpl.inc" 830 #undef GET_INTRINSIC_TARGET_DATA 831 832 bool Function::isTargetIntrinsic(Intrinsic::ID IID) { 833 return IID > TargetInfos[0].Count; 834 } 835 836 bool Function::isTargetIntrinsic() const { 837 return isTargetIntrinsic(IntID); 838 } 839 840 /// Find the segment of \c IntrinsicNameTable for intrinsics with the same 841 /// target as \c Name, or the generic table if \c Name is not target specific. 842 /// 843 /// Returns the relevant slice of \c IntrinsicNameTable 844 static ArrayRef<const char *> findTargetSubtable(StringRef Name) { 845 assert(Name.startswith("llvm.")); 846 847 ArrayRef<IntrinsicTargetInfo> Targets(TargetInfos); 848 // Drop "llvm." and take the first dotted component. That will be the target 849 // if this is target specific. 850 StringRef Target = Name.drop_front(5).split('.').first; 851 auto It = partition_point( 852 Targets, [=](const IntrinsicTargetInfo &TI) { return TI.Name < Target; }); 853 // We've either found the target or just fall back to the generic set, which 854 // is always first. 855 const auto &TI = It != Targets.end() && It->Name == Target ? *It : Targets[0]; 856 return makeArrayRef(&IntrinsicNameTable[1] + TI.Offset, TI.Count); 857 } 858 859 /// This does the actual lookup of an intrinsic ID which 860 /// matches the given function name. 861 Intrinsic::ID Function::lookupIntrinsicID(StringRef Name) { 862 ArrayRef<const char *> NameTable = findTargetSubtable(Name); 863 int Idx = Intrinsic::lookupLLVMIntrinsicByName(NameTable, Name); 864 if (Idx == -1) 865 return Intrinsic::not_intrinsic; 866 867 // Intrinsic IDs correspond to the location in IntrinsicNameTable, but we have 868 // an index into a sub-table. 869 int Adjust = NameTable.data() - IntrinsicNameTable; 870 Intrinsic::ID ID = static_cast<Intrinsic::ID>(Idx + Adjust); 871 872 // If the intrinsic is not overloaded, require an exact match. If it is 873 // overloaded, require either exact or prefix match. 874 const auto MatchSize = strlen(NameTable[Idx]); 875 assert(Name.size() >= MatchSize && "Expected either exact or prefix match"); 876 bool IsExactMatch = Name.size() == MatchSize; 877 return IsExactMatch || Intrinsic::isOverloaded(ID) ? ID 878 : Intrinsic::not_intrinsic; 879 } 880 881 void Function::recalculateIntrinsicID() { 882 StringRef Name = getName(); 883 if (!Name.startswith("llvm.")) { 884 HasLLVMReservedName = false; 885 IntID = Intrinsic::not_intrinsic; 886 return; 887 } 888 HasLLVMReservedName = true; 889 IntID = lookupIntrinsicID(Name); 890 } 891 892 /// Returns a stable mangling for the type specified for use in the name 893 /// mangling scheme used by 'any' types in intrinsic signatures. The mangling 894 /// of named types is simply their name. Manglings for unnamed types consist 895 /// of a prefix ('p' for pointers, 'a' for arrays, 'f_' for functions) 896 /// combined with the mangling of their component types. A vararg function 897 /// type will have a suffix of 'vararg'. Since function types can contain 898 /// other function types, we close a function type mangling with suffix 'f' 899 /// which can't be confused with it's prefix. This ensures we don't have 900 /// collisions between two unrelated function types. Otherwise, you might 901 /// parse ffXX as f(fXX) or f(fX)X. (X is a placeholder for any other type.) 902 /// The HasUnnamedType boolean is set if an unnamed type was encountered, 903 /// indicating that extra care must be taken to ensure a unique name. 904 static std::string getMangledTypeStr(Type *Ty, bool &HasUnnamedType) { 905 std::string Result; 906 if (PointerType *PTyp = dyn_cast<PointerType>(Ty)) { 907 Result += "p" + utostr(PTyp->getAddressSpace()); 908 // Opaque pointer doesn't have pointee type information, so we just mangle 909 // address space for opaque pointer. 910 if (!PTyp->isOpaque()) 911 Result += getMangledTypeStr(PTyp->getNonOpaquePointerElementType(), 912 HasUnnamedType); 913 } else if (ArrayType *ATyp = dyn_cast<ArrayType>(Ty)) { 914 Result += "a" + utostr(ATyp->getNumElements()) + 915 getMangledTypeStr(ATyp->getElementType(), HasUnnamedType); 916 } else if (StructType *STyp = dyn_cast<StructType>(Ty)) { 917 if (!STyp->isLiteral()) { 918 Result += "s_"; 919 if (STyp->hasName()) 920 Result += STyp->getName(); 921 else 922 HasUnnamedType = true; 923 } else { 924 Result += "sl_"; 925 for (auto *Elem : STyp->elements()) 926 Result += getMangledTypeStr(Elem, HasUnnamedType); 927 } 928 // Ensure nested structs are distinguishable. 929 Result += "s"; 930 } else if (FunctionType *FT = dyn_cast<FunctionType>(Ty)) { 931 Result += "f_" + getMangledTypeStr(FT->getReturnType(), HasUnnamedType); 932 for (size_t i = 0; i < FT->getNumParams(); i++) 933 Result += getMangledTypeStr(FT->getParamType(i), HasUnnamedType); 934 if (FT->isVarArg()) 935 Result += "vararg"; 936 // Ensure nested function types are distinguishable. 937 Result += "f"; 938 } else if (VectorType *VTy = dyn_cast<VectorType>(Ty)) { 939 ElementCount EC = VTy->getElementCount(); 940 if (EC.isScalable()) 941 Result += "nx"; 942 Result += "v" + utostr(EC.getKnownMinValue()) + 943 getMangledTypeStr(VTy->getElementType(), HasUnnamedType); 944 } else if (TargetExtType *TETy = dyn_cast<TargetExtType>(Ty)) { 945 Result += "t"; 946 Result += TETy->getName(); 947 for (Type *ParamTy : TETy->type_params()) 948 Result += "_" + getMangledTypeStr(ParamTy, HasUnnamedType); 949 for (unsigned IntParam : TETy->int_params()) 950 Result += "_" + utostr(IntParam); 951 // Ensure nested target extension types are distinguishable. 952 Result += "t"; 953 } else if (Ty) { 954 switch (Ty->getTypeID()) { 955 default: llvm_unreachable("Unhandled type"); 956 case Type::VoidTyID: Result += "isVoid"; break; 957 case Type::MetadataTyID: Result += "Metadata"; break; 958 case Type::HalfTyID: Result += "f16"; break; 959 case Type::BFloatTyID: Result += "bf16"; break; 960 case Type::FloatTyID: Result += "f32"; break; 961 case Type::DoubleTyID: Result += "f64"; break; 962 case Type::X86_FP80TyID: Result += "f80"; break; 963 case Type::FP128TyID: Result += "f128"; break; 964 case Type::PPC_FP128TyID: Result += "ppcf128"; break; 965 case Type::X86_MMXTyID: Result += "x86mmx"; break; 966 case Type::X86_AMXTyID: Result += "x86amx"; break; 967 case Type::IntegerTyID: 968 Result += "i" + utostr(cast<IntegerType>(Ty)->getBitWidth()); 969 break; 970 } 971 } 972 return Result; 973 } 974 975 StringRef Intrinsic::getBaseName(ID id) { 976 assert(id < num_intrinsics && "Invalid intrinsic ID!"); 977 return IntrinsicNameTable[id]; 978 } 979 980 StringRef Intrinsic::getName(ID id) { 981 assert(id < num_intrinsics && "Invalid intrinsic ID!"); 982 assert(!Intrinsic::isOverloaded(id) && 983 "This version of getName does not support overloading"); 984 return getBaseName(id); 985 } 986 987 static std::string getIntrinsicNameImpl(Intrinsic::ID Id, ArrayRef<Type *> Tys, 988 Module *M, FunctionType *FT, 989 bool EarlyModuleCheck) { 990 991 assert(Id < Intrinsic::num_intrinsics && "Invalid intrinsic ID!"); 992 assert((Tys.empty() || Intrinsic::isOverloaded(Id)) && 993 "This version of getName is for overloaded intrinsics only"); 994 (void)EarlyModuleCheck; 995 assert((!EarlyModuleCheck || M || 996 !any_of(Tys, [](Type *T) { return isa<PointerType>(T); })) && 997 "Intrinsic overloading on pointer types need to provide a Module"); 998 bool HasUnnamedType = false; 999 std::string Result(Intrinsic::getBaseName(Id)); 1000 for (Type *Ty : Tys) 1001 Result += "." + getMangledTypeStr(Ty, HasUnnamedType); 1002 if (HasUnnamedType) { 1003 assert(M && "unnamed types need a module"); 1004 if (!FT) 1005 FT = Intrinsic::getType(M->getContext(), Id, Tys); 1006 else 1007 assert((FT == Intrinsic::getType(M->getContext(), Id, Tys)) && 1008 "Provided FunctionType must match arguments"); 1009 return M->getUniqueIntrinsicName(Result, Id, FT); 1010 } 1011 return Result; 1012 } 1013 1014 std::string Intrinsic::getName(ID Id, ArrayRef<Type *> Tys, Module *M, 1015 FunctionType *FT) { 1016 assert(M && "We need to have a Module"); 1017 return getIntrinsicNameImpl(Id, Tys, M, FT, true); 1018 } 1019 1020 std::string Intrinsic::getNameNoUnnamedTypes(ID Id, ArrayRef<Type *> Tys) { 1021 return getIntrinsicNameImpl(Id, Tys, nullptr, nullptr, false); 1022 } 1023 1024 /// IIT_Info - These are enumerators that describe the entries returned by the 1025 /// getIntrinsicInfoTableEntries function. 1026 /// 1027 /// NOTE: This must be kept in synch with the copy in TblGen/IntrinsicEmitter! 1028 enum IIT_Info { 1029 // Common values should be encoded with 0-15. 1030 IIT_Done = 0, 1031 IIT_I1 = 1, 1032 IIT_I8 = 2, 1033 IIT_I16 = 3, 1034 IIT_I32 = 4, 1035 IIT_I64 = 5, 1036 IIT_F16 = 6, 1037 IIT_F32 = 7, 1038 IIT_F64 = 8, 1039 IIT_V2 = 9, 1040 IIT_V4 = 10, 1041 IIT_V8 = 11, 1042 IIT_V16 = 12, 1043 IIT_V32 = 13, 1044 IIT_PTR = 14, 1045 IIT_ARG = 15, 1046 1047 // Values from 16+ are only encodable with the inefficient encoding. 1048 IIT_V64 = 16, 1049 IIT_MMX = 17, 1050 IIT_TOKEN = 18, 1051 IIT_METADATA = 19, 1052 IIT_EMPTYSTRUCT = 20, 1053 IIT_STRUCT2 = 21, 1054 IIT_STRUCT3 = 22, 1055 IIT_STRUCT4 = 23, 1056 IIT_STRUCT5 = 24, 1057 IIT_EXTEND_ARG = 25, 1058 IIT_TRUNC_ARG = 26, 1059 IIT_ANYPTR = 27, 1060 IIT_V1 = 28, 1061 IIT_VARARG = 29, 1062 IIT_HALF_VEC_ARG = 30, 1063 IIT_SAME_VEC_WIDTH_ARG = 31, 1064 IIT_PTR_TO_ARG = 32, 1065 IIT_PTR_TO_ELT = 33, 1066 IIT_VEC_OF_ANYPTRS_TO_ELT = 34, 1067 IIT_I128 = 35, 1068 IIT_V512 = 36, 1069 IIT_V1024 = 37, 1070 IIT_STRUCT6 = 38, 1071 IIT_STRUCT7 = 39, 1072 IIT_STRUCT8 = 40, 1073 IIT_F128 = 41, 1074 IIT_VEC_ELEMENT = 42, 1075 IIT_SCALABLE_VEC = 43, 1076 IIT_SUBDIVIDE2_ARG = 44, 1077 IIT_SUBDIVIDE4_ARG = 45, 1078 IIT_VEC_OF_BITCASTS_TO_INT = 46, 1079 IIT_V128 = 47, 1080 IIT_BF16 = 48, 1081 IIT_STRUCT9 = 49, 1082 IIT_V256 = 50, 1083 IIT_AMX = 51, 1084 IIT_PPCF128 = 52, 1085 IIT_V3 = 53, 1086 IIT_EXTERNREF = 54, 1087 IIT_FUNCREF = 55, 1088 IIT_ANYPTR_TO_ELT = 56, 1089 IIT_I2 = 57, 1090 IIT_I4 = 58, 1091 }; 1092 1093 static void DecodeIITType(unsigned &NextElt, ArrayRef<unsigned char> Infos, 1094 IIT_Info LastInfo, 1095 SmallVectorImpl<Intrinsic::IITDescriptor> &OutputTable) { 1096 using namespace Intrinsic; 1097 1098 bool IsScalableVector = (LastInfo == IIT_SCALABLE_VEC); 1099 1100 IIT_Info Info = IIT_Info(Infos[NextElt++]); 1101 unsigned StructElts = 2; 1102 1103 switch (Info) { 1104 case IIT_Done: 1105 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Void, 0)); 1106 return; 1107 case IIT_VARARG: 1108 OutputTable.push_back(IITDescriptor::get(IITDescriptor::VarArg, 0)); 1109 return; 1110 case IIT_MMX: 1111 OutputTable.push_back(IITDescriptor::get(IITDescriptor::MMX, 0)); 1112 return; 1113 case IIT_AMX: 1114 OutputTable.push_back(IITDescriptor::get(IITDescriptor::AMX, 0)); 1115 return; 1116 case IIT_TOKEN: 1117 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Token, 0)); 1118 return; 1119 case IIT_METADATA: 1120 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Metadata, 0)); 1121 return; 1122 case IIT_F16: 1123 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Half, 0)); 1124 return; 1125 case IIT_BF16: 1126 OutputTable.push_back(IITDescriptor::get(IITDescriptor::BFloat, 0)); 1127 return; 1128 case IIT_F32: 1129 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Float, 0)); 1130 return; 1131 case IIT_F64: 1132 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Double, 0)); 1133 return; 1134 case IIT_F128: 1135 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Quad, 0)); 1136 return; 1137 case IIT_PPCF128: 1138 OutputTable.push_back(IITDescriptor::get(IITDescriptor::PPCQuad, 0)); 1139 return; 1140 case IIT_I1: 1141 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 1)); 1142 return; 1143 case IIT_I2: 1144 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 2)); 1145 return; 1146 case IIT_I4: 1147 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 4)); 1148 return; 1149 case IIT_I8: 1150 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 8)); 1151 return; 1152 case IIT_I16: 1153 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer,16)); 1154 return; 1155 case IIT_I32: 1156 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 32)); 1157 return; 1158 case IIT_I64: 1159 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 64)); 1160 return; 1161 case IIT_I128: 1162 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 128)); 1163 return; 1164 case IIT_V1: 1165 OutputTable.push_back(IITDescriptor::getVector(1, IsScalableVector)); 1166 DecodeIITType(NextElt, Infos, Info, OutputTable); 1167 return; 1168 case IIT_V2: 1169 OutputTable.push_back(IITDescriptor::getVector(2, IsScalableVector)); 1170 DecodeIITType(NextElt, Infos, Info, OutputTable); 1171 return; 1172 case IIT_V3: 1173 OutputTable.push_back(IITDescriptor::getVector(3, IsScalableVector)); 1174 DecodeIITType(NextElt, Infos, Info, OutputTable); 1175 return; 1176 case IIT_V4: 1177 OutputTable.push_back(IITDescriptor::getVector(4, IsScalableVector)); 1178 DecodeIITType(NextElt, Infos, Info, OutputTable); 1179 return; 1180 case IIT_V8: 1181 OutputTable.push_back(IITDescriptor::getVector(8, IsScalableVector)); 1182 DecodeIITType(NextElt, Infos, Info, OutputTable); 1183 return; 1184 case IIT_V16: 1185 OutputTable.push_back(IITDescriptor::getVector(16, IsScalableVector)); 1186 DecodeIITType(NextElt, Infos, Info, OutputTable); 1187 return; 1188 case IIT_V32: 1189 OutputTable.push_back(IITDescriptor::getVector(32, IsScalableVector)); 1190 DecodeIITType(NextElt, Infos, Info, OutputTable); 1191 return; 1192 case IIT_V64: 1193 OutputTable.push_back(IITDescriptor::getVector(64, IsScalableVector)); 1194 DecodeIITType(NextElt, Infos, Info, OutputTable); 1195 return; 1196 case IIT_V128: 1197 OutputTable.push_back(IITDescriptor::getVector(128, IsScalableVector)); 1198 DecodeIITType(NextElt, Infos, Info, OutputTable); 1199 return; 1200 case IIT_V256: 1201 OutputTable.push_back(IITDescriptor::getVector(256, IsScalableVector)); 1202 DecodeIITType(NextElt, Infos, Info, OutputTable); 1203 return; 1204 case IIT_V512: 1205 OutputTable.push_back(IITDescriptor::getVector(512, IsScalableVector)); 1206 DecodeIITType(NextElt, Infos, Info, OutputTable); 1207 return; 1208 case IIT_V1024: 1209 OutputTable.push_back(IITDescriptor::getVector(1024, IsScalableVector)); 1210 DecodeIITType(NextElt, Infos, Info, OutputTable); 1211 return; 1212 case IIT_EXTERNREF: 1213 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Pointer, 10)); 1214 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Struct, 0)); 1215 return; 1216 case IIT_FUNCREF: 1217 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Pointer, 20)); 1218 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 8)); 1219 return; 1220 case IIT_PTR: 1221 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Pointer, 0)); 1222 DecodeIITType(NextElt, Infos, Info, OutputTable); 1223 return; 1224 case IIT_ANYPTR: { // [ANYPTR addrspace, subtype] 1225 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Pointer, 1226 Infos[NextElt++])); 1227 DecodeIITType(NextElt, Infos, Info, OutputTable); 1228 return; 1229 } 1230 case IIT_ARG: { 1231 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1232 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Argument, ArgInfo)); 1233 return; 1234 } 1235 case IIT_EXTEND_ARG: { 1236 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1237 OutputTable.push_back(IITDescriptor::get(IITDescriptor::ExtendArgument, 1238 ArgInfo)); 1239 return; 1240 } 1241 case IIT_TRUNC_ARG: { 1242 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1243 OutputTable.push_back(IITDescriptor::get(IITDescriptor::TruncArgument, 1244 ArgInfo)); 1245 return; 1246 } 1247 case IIT_HALF_VEC_ARG: { 1248 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1249 OutputTable.push_back(IITDescriptor::get(IITDescriptor::HalfVecArgument, 1250 ArgInfo)); 1251 return; 1252 } 1253 case IIT_SAME_VEC_WIDTH_ARG: { 1254 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1255 OutputTable.push_back(IITDescriptor::get(IITDescriptor::SameVecWidthArgument, 1256 ArgInfo)); 1257 return; 1258 } 1259 case IIT_PTR_TO_ARG: { 1260 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1261 OutputTable.push_back(IITDescriptor::get(IITDescriptor::PtrToArgument, 1262 ArgInfo)); 1263 return; 1264 } 1265 case IIT_PTR_TO_ELT: { 1266 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1267 OutputTable.push_back(IITDescriptor::get(IITDescriptor::PtrToElt, ArgInfo)); 1268 return; 1269 } 1270 case IIT_ANYPTR_TO_ELT: { 1271 unsigned short ArgNo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1272 unsigned short RefNo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1273 OutputTable.push_back( 1274 IITDescriptor::get(IITDescriptor::AnyPtrToElt, ArgNo, RefNo)); 1275 return; 1276 } 1277 case IIT_VEC_OF_ANYPTRS_TO_ELT: { 1278 unsigned short ArgNo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1279 unsigned short RefNo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1280 OutputTable.push_back( 1281 IITDescriptor::get(IITDescriptor::VecOfAnyPtrsToElt, ArgNo, RefNo)); 1282 return; 1283 } 1284 case IIT_EMPTYSTRUCT: 1285 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Struct, 0)); 1286 return; 1287 case IIT_STRUCT9: ++StructElts; [[fallthrough]]; 1288 case IIT_STRUCT8: ++StructElts; [[fallthrough]]; 1289 case IIT_STRUCT7: ++StructElts; [[fallthrough]]; 1290 case IIT_STRUCT6: ++StructElts; [[fallthrough]]; 1291 case IIT_STRUCT5: ++StructElts; [[fallthrough]]; 1292 case IIT_STRUCT4: ++StructElts; [[fallthrough]]; 1293 case IIT_STRUCT3: ++StructElts; [[fallthrough]]; 1294 case IIT_STRUCT2: { 1295 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Struct,StructElts)); 1296 1297 for (unsigned i = 0; i != StructElts; ++i) 1298 DecodeIITType(NextElt, Infos, Info, OutputTable); 1299 return; 1300 } 1301 case IIT_SUBDIVIDE2_ARG: { 1302 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1303 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Subdivide2Argument, 1304 ArgInfo)); 1305 return; 1306 } 1307 case IIT_SUBDIVIDE4_ARG: { 1308 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1309 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Subdivide4Argument, 1310 ArgInfo)); 1311 return; 1312 } 1313 case IIT_VEC_ELEMENT: { 1314 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1315 OutputTable.push_back(IITDescriptor::get(IITDescriptor::VecElementArgument, 1316 ArgInfo)); 1317 return; 1318 } 1319 case IIT_SCALABLE_VEC: { 1320 DecodeIITType(NextElt, Infos, Info, OutputTable); 1321 return; 1322 } 1323 case IIT_VEC_OF_BITCASTS_TO_INT: { 1324 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1325 OutputTable.push_back(IITDescriptor::get(IITDescriptor::VecOfBitcastsToInt, 1326 ArgInfo)); 1327 return; 1328 } 1329 } 1330 llvm_unreachable("unhandled"); 1331 } 1332 1333 #define GET_INTRINSIC_GENERATOR_GLOBAL 1334 #include "llvm/IR/IntrinsicImpl.inc" 1335 #undef GET_INTRINSIC_GENERATOR_GLOBAL 1336 1337 void Intrinsic::getIntrinsicInfoTableEntries(ID id, 1338 SmallVectorImpl<IITDescriptor> &T){ 1339 // Check to see if the intrinsic's type was expressible by the table. 1340 unsigned TableVal = IIT_Table[id-1]; 1341 1342 // Decode the TableVal into an array of IITValues. 1343 SmallVector<unsigned char, 8> IITValues; 1344 ArrayRef<unsigned char> IITEntries; 1345 unsigned NextElt = 0; 1346 if ((TableVal >> 31) != 0) { 1347 // This is an offset into the IIT_LongEncodingTable. 1348 IITEntries = IIT_LongEncodingTable; 1349 1350 // Strip sentinel bit. 1351 NextElt = (TableVal << 1) >> 1; 1352 } else { 1353 // Decode the TableVal into an array of IITValues. If the entry was encoded 1354 // into a single word in the table itself, decode it now. 1355 do { 1356 IITValues.push_back(TableVal & 0xF); 1357 TableVal >>= 4; 1358 } while (TableVal); 1359 1360 IITEntries = IITValues; 1361 NextElt = 0; 1362 } 1363 1364 // Okay, decode the table into the output vector of IITDescriptors. 1365 DecodeIITType(NextElt, IITEntries, IIT_Done, T); 1366 while (NextElt != IITEntries.size() && IITEntries[NextElt] != 0) 1367 DecodeIITType(NextElt, IITEntries, IIT_Done, T); 1368 } 1369 1370 static Type *DecodeFixedType(ArrayRef<Intrinsic::IITDescriptor> &Infos, 1371 ArrayRef<Type*> Tys, LLVMContext &Context) { 1372 using namespace Intrinsic; 1373 1374 IITDescriptor D = Infos.front(); 1375 Infos = Infos.slice(1); 1376 1377 switch (D.Kind) { 1378 case IITDescriptor::Void: return Type::getVoidTy(Context); 1379 case IITDescriptor::VarArg: return Type::getVoidTy(Context); 1380 case IITDescriptor::MMX: return Type::getX86_MMXTy(Context); 1381 case IITDescriptor::AMX: return Type::getX86_AMXTy(Context); 1382 case IITDescriptor::Token: return Type::getTokenTy(Context); 1383 case IITDescriptor::Metadata: return Type::getMetadataTy(Context); 1384 case IITDescriptor::Half: return Type::getHalfTy(Context); 1385 case IITDescriptor::BFloat: return Type::getBFloatTy(Context); 1386 case IITDescriptor::Float: return Type::getFloatTy(Context); 1387 case IITDescriptor::Double: return Type::getDoubleTy(Context); 1388 case IITDescriptor::Quad: return Type::getFP128Ty(Context); 1389 case IITDescriptor::PPCQuad: return Type::getPPC_FP128Ty(Context); 1390 1391 case IITDescriptor::Integer: 1392 return IntegerType::get(Context, D.Integer_Width); 1393 case IITDescriptor::Vector: 1394 return VectorType::get(DecodeFixedType(Infos, Tys, Context), 1395 D.Vector_Width); 1396 case IITDescriptor::Pointer: 1397 return PointerType::get(DecodeFixedType(Infos, Tys, Context), 1398 D.Pointer_AddressSpace); 1399 case IITDescriptor::Struct: { 1400 SmallVector<Type *, 8> Elts; 1401 for (unsigned i = 0, e = D.Struct_NumElements; i != e; ++i) 1402 Elts.push_back(DecodeFixedType(Infos, Tys, Context)); 1403 return StructType::get(Context, Elts); 1404 } 1405 case IITDescriptor::Argument: 1406 return Tys[D.getArgumentNumber()]; 1407 case IITDescriptor::ExtendArgument: { 1408 Type *Ty = Tys[D.getArgumentNumber()]; 1409 if (VectorType *VTy = dyn_cast<VectorType>(Ty)) 1410 return VectorType::getExtendedElementVectorType(VTy); 1411 1412 return IntegerType::get(Context, 2 * cast<IntegerType>(Ty)->getBitWidth()); 1413 } 1414 case IITDescriptor::TruncArgument: { 1415 Type *Ty = Tys[D.getArgumentNumber()]; 1416 if (VectorType *VTy = dyn_cast<VectorType>(Ty)) 1417 return VectorType::getTruncatedElementVectorType(VTy); 1418 1419 IntegerType *ITy = cast<IntegerType>(Ty); 1420 assert(ITy->getBitWidth() % 2 == 0); 1421 return IntegerType::get(Context, ITy->getBitWidth() / 2); 1422 } 1423 case IITDescriptor::Subdivide2Argument: 1424 case IITDescriptor::Subdivide4Argument: { 1425 Type *Ty = Tys[D.getArgumentNumber()]; 1426 VectorType *VTy = dyn_cast<VectorType>(Ty); 1427 assert(VTy && "Expected an argument of Vector Type"); 1428 int SubDivs = D.Kind == IITDescriptor::Subdivide2Argument ? 1 : 2; 1429 return VectorType::getSubdividedVectorType(VTy, SubDivs); 1430 } 1431 case IITDescriptor::HalfVecArgument: 1432 return VectorType::getHalfElementsVectorType(cast<VectorType>( 1433 Tys[D.getArgumentNumber()])); 1434 case IITDescriptor::SameVecWidthArgument: { 1435 Type *EltTy = DecodeFixedType(Infos, Tys, Context); 1436 Type *Ty = Tys[D.getArgumentNumber()]; 1437 if (auto *VTy = dyn_cast<VectorType>(Ty)) 1438 return VectorType::get(EltTy, VTy->getElementCount()); 1439 return EltTy; 1440 } 1441 case IITDescriptor::PtrToArgument: { 1442 Type *Ty = Tys[D.getArgumentNumber()]; 1443 return PointerType::getUnqual(Ty); 1444 } 1445 case IITDescriptor::PtrToElt: { 1446 Type *Ty = Tys[D.getArgumentNumber()]; 1447 VectorType *VTy = dyn_cast<VectorType>(Ty); 1448 if (!VTy) 1449 llvm_unreachable("Expected an argument of Vector Type"); 1450 Type *EltTy = VTy->getElementType(); 1451 return PointerType::getUnqual(EltTy); 1452 } 1453 case IITDescriptor::VecElementArgument: { 1454 Type *Ty = Tys[D.getArgumentNumber()]; 1455 if (VectorType *VTy = dyn_cast<VectorType>(Ty)) 1456 return VTy->getElementType(); 1457 llvm_unreachable("Expected an argument of Vector Type"); 1458 } 1459 case IITDescriptor::VecOfBitcastsToInt: { 1460 Type *Ty = Tys[D.getArgumentNumber()]; 1461 VectorType *VTy = dyn_cast<VectorType>(Ty); 1462 assert(VTy && "Expected an argument of Vector Type"); 1463 return VectorType::getInteger(VTy); 1464 } 1465 case IITDescriptor::VecOfAnyPtrsToElt: 1466 // Return the overloaded type (which determines the pointers address space) 1467 return Tys[D.getOverloadArgNumber()]; 1468 case IITDescriptor::AnyPtrToElt: 1469 // Return the overloaded type (which determines the pointers address space) 1470 return Tys[D.getOverloadArgNumber()]; 1471 } 1472 llvm_unreachable("unhandled"); 1473 } 1474 1475 FunctionType *Intrinsic::getType(LLVMContext &Context, 1476 ID id, ArrayRef<Type*> Tys) { 1477 SmallVector<IITDescriptor, 8> Table; 1478 getIntrinsicInfoTableEntries(id, Table); 1479 1480 ArrayRef<IITDescriptor> TableRef = Table; 1481 Type *ResultTy = DecodeFixedType(TableRef, Tys, Context); 1482 1483 SmallVector<Type*, 8> ArgTys; 1484 while (!TableRef.empty()) 1485 ArgTys.push_back(DecodeFixedType(TableRef, Tys, Context)); 1486 1487 // DecodeFixedType returns Void for IITDescriptor::Void and IITDescriptor::VarArg 1488 // If we see void type as the type of the last argument, it is vararg intrinsic 1489 if (!ArgTys.empty() && ArgTys.back()->isVoidTy()) { 1490 ArgTys.pop_back(); 1491 return FunctionType::get(ResultTy, ArgTys, true); 1492 } 1493 return FunctionType::get(ResultTy, ArgTys, false); 1494 } 1495 1496 bool Intrinsic::isOverloaded(ID id) { 1497 #define GET_INTRINSIC_OVERLOAD_TABLE 1498 #include "llvm/IR/IntrinsicImpl.inc" 1499 #undef GET_INTRINSIC_OVERLOAD_TABLE 1500 } 1501 1502 bool Intrinsic::isLeaf(ID id) { 1503 switch (id) { 1504 default: 1505 return true; 1506 1507 case Intrinsic::experimental_gc_statepoint: 1508 case Intrinsic::experimental_patchpoint_void: 1509 case Intrinsic::experimental_patchpoint_i64: 1510 return false; 1511 } 1512 } 1513 1514 /// This defines the "Intrinsic::getAttributes(ID id)" method. 1515 #define GET_INTRINSIC_ATTRIBUTES 1516 #include "llvm/IR/IntrinsicImpl.inc" 1517 #undef GET_INTRINSIC_ATTRIBUTES 1518 1519 Function *Intrinsic::getDeclaration(Module *M, ID id, ArrayRef<Type*> Tys) { 1520 // There can never be multiple globals with the same name of different types, 1521 // because intrinsics must be a specific type. 1522 auto *FT = getType(M->getContext(), id, Tys); 1523 return cast<Function>( 1524 M->getOrInsertFunction( 1525 Tys.empty() ? getName(id) : getName(id, Tys, M, FT), FT) 1526 .getCallee()); 1527 } 1528 1529 // This defines the "Intrinsic::getIntrinsicForClangBuiltin()" method. 1530 #define GET_LLVM_INTRINSIC_FOR_CLANG_BUILTIN 1531 #include "llvm/IR/IntrinsicImpl.inc" 1532 #undef GET_LLVM_INTRINSIC_FOR_CLANG_BUILTIN 1533 1534 // This defines the "Intrinsic::getIntrinsicForMSBuiltin()" method. 1535 #define GET_LLVM_INTRINSIC_FOR_MS_BUILTIN 1536 #include "llvm/IR/IntrinsicImpl.inc" 1537 #undef GET_LLVM_INTRINSIC_FOR_MS_BUILTIN 1538 1539 using DeferredIntrinsicMatchPair = 1540 std::pair<Type *, ArrayRef<Intrinsic::IITDescriptor>>; 1541 1542 static bool matchIntrinsicType( 1543 Type *Ty, ArrayRef<Intrinsic::IITDescriptor> &Infos, 1544 SmallVectorImpl<Type *> &ArgTys, 1545 SmallVectorImpl<DeferredIntrinsicMatchPair> &DeferredChecks, 1546 bool IsDeferredCheck) { 1547 using namespace Intrinsic; 1548 1549 // If we ran out of descriptors, there are too many arguments. 1550 if (Infos.empty()) return true; 1551 1552 // Do this before slicing off the 'front' part 1553 auto InfosRef = Infos; 1554 auto DeferCheck = [&DeferredChecks, &InfosRef](Type *T) { 1555 DeferredChecks.emplace_back(T, InfosRef); 1556 return false; 1557 }; 1558 1559 IITDescriptor D = Infos.front(); 1560 Infos = Infos.slice(1); 1561 1562 switch (D.Kind) { 1563 case IITDescriptor::Void: return !Ty->isVoidTy(); 1564 case IITDescriptor::VarArg: return true; 1565 case IITDescriptor::MMX: return !Ty->isX86_MMXTy(); 1566 case IITDescriptor::AMX: return !Ty->isX86_AMXTy(); 1567 case IITDescriptor::Token: return !Ty->isTokenTy(); 1568 case IITDescriptor::Metadata: return !Ty->isMetadataTy(); 1569 case IITDescriptor::Half: return !Ty->isHalfTy(); 1570 case IITDescriptor::BFloat: return !Ty->isBFloatTy(); 1571 case IITDescriptor::Float: return !Ty->isFloatTy(); 1572 case IITDescriptor::Double: return !Ty->isDoubleTy(); 1573 case IITDescriptor::Quad: return !Ty->isFP128Ty(); 1574 case IITDescriptor::PPCQuad: return !Ty->isPPC_FP128Ty(); 1575 case IITDescriptor::Integer: return !Ty->isIntegerTy(D.Integer_Width); 1576 case IITDescriptor::Vector: { 1577 VectorType *VT = dyn_cast<VectorType>(Ty); 1578 return !VT || VT->getElementCount() != D.Vector_Width || 1579 matchIntrinsicType(VT->getElementType(), Infos, ArgTys, 1580 DeferredChecks, IsDeferredCheck); 1581 } 1582 case IITDescriptor::Pointer: { 1583 PointerType *PT = dyn_cast<PointerType>(Ty); 1584 if (!PT || PT->getAddressSpace() != D.Pointer_AddressSpace) 1585 return true; 1586 if (!PT->isOpaque()) { 1587 /* Manually consume a pointer to empty struct descriptor, which is 1588 * used for externref. We don't want to enforce that the struct is 1589 * anonymous in this case. (This renders externref intrinsics 1590 * non-unique, but this will go away with opaque pointers anyway.) */ 1591 if (Infos.front().Kind == IITDescriptor::Struct && 1592 Infos.front().Struct_NumElements == 0) { 1593 Infos = Infos.slice(1); 1594 return false; 1595 } 1596 return matchIntrinsicType(PT->getNonOpaquePointerElementType(), Infos, 1597 ArgTys, DeferredChecks, IsDeferredCheck); 1598 } 1599 // Consume IIT descriptors relating to the pointer element type. 1600 // FIXME: Intrinsic type matching of nested single value types or even 1601 // aggregates doesn't work properly with opaque pointers but hopefully 1602 // doesn't happen in practice. 1603 while (Infos.front().Kind == IITDescriptor::Pointer || 1604 Infos.front().Kind == IITDescriptor::Vector) 1605 Infos = Infos.slice(1); 1606 assert((Infos.front().Kind != IITDescriptor::Argument || 1607 Infos.front().getArgumentKind() == IITDescriptor::AK_MatchType) && 1608 "Unsupported polymorphic pointer type with opaque pointer"); 1609 Infos = Infos.slice(1); 1610 return false; 1611 } 1612 1613 case IITDescriptor::Struct: { 1614 StructType *ST = dyn_cast<StructType>(Ty); 1615 if (!ST || !ST->isLiteral() || ST->isPacked() || 1616 ST->getNumElements() != D.Struct_NumElements) 1617 return true; 1618 1619 for (unsigned i = 0, e = D.Struct_NumElements; i != e; ++i) 1620 if (matchIntrinsicType(ST->getElementType(i), Infos, ArgTys, 1621 DeferredChecks, IsDeferredCheck)) 1622 return true; 1623 return false; 1624 } 1625 1626 case IITDescriptor::Argument: 1627 // If this is the second occurrence of an argument, 1628 // verify that the later instance matches the previous instance. 1629 if (D.getArgumentNumber() < ArgTys.size()) 1630 return Ty != ArgTys[D.getArgumentNumber()]; 1631 1632 if (D.getArgumentNumber() > ArgTys.size() || 1633 D.getArgumentKind() == IITDescriptor::AK_MatchType) 1634 return IsDeferredCheck || DeferCheck(Ty); 1635 1636 assert(D.getArgumentNumber() == ArgTys.size() && !IsDeferredCheck && 1637 "Table consistency error"); 1638 ArgTys.push_back(Ty); 1639 1640 switch (D.getArgumentKind()) { 1641 case IITDescriptor::AK_Any: return false; // Success 1642 case IITDescriptor::AK_AnyInteger: return !Ty->isIntOrIntVectorTy(); 1643 case IITDescriptor::AK_AnyFloat: return !Ty->isFPOrFPVectorTy(); 1644 case IITDescriptor::AK_AnyVector: return !isa<VectorType>(Ty); 1645 case IITDescriptor::AK_AnyPointer: return !isa<PointerType>(Ty); 1646 default: break; 1647 } 1648 llvm_unreachable("all argument kinds not covered"); 1649 1650 case IITDescriptor::ExtendArgument: { 1651 // If this is a forward reference, defer the check for later. 1652 if (D.getArgumentNumber() >= ArgTys.size()) 1653 return IsDeferredCheck || DeferCheck(Ty); 1654 1655 Type *NewTy = ArgTys[D.getArgumentNumber()]; 1656 if (VectorType *VTy = dyn_cast<VectorType>(NewTy)) 1657 NewTy = VectorType::getExtendedElementVectorType(VTy); 1658 else if (IntegerType *ITy = dyn_cast<IntegerType>(NewTy)) 1659 NewTy = IntegerType::get(ITy->getContext(), 2 * ITy->getBitWidth()); 1660 else 1661 return true; 1662 1663 return Ty != NewTy; 1664 } 1665 case IITDescriptor::TruncArgument: { 1666 // If this is a forward reference, defer the check for later. 1667 if (D.getArgumentNumber() >= ArgTys.size()) 1668 return IsDeferredCheck || DeferCheck(Ty); 1669 1670 Type *NewTy = ArgTys[D.getArgumentNumber()]; 1671 if (VectorType *VTy = dyn_cast<VectorType>(NewTy)) 1672 NewTy = VectorType::getTruncatedElementVectorType(VTy); 1673 else if (IntegerType *ITy = dyn_cast<IntegerType>(NewTy)) 1674 NewTy = IntegerType::get(ITy->getContext(), ITy->getBitWidth() / 2); 1675 else 1676 return true; 1677 1678 return Ty != NewTy; 1679 } 1680 case IITDescriptor::HalfVecArgument: 1681 // If this is a forward reference, defer the check for later. 1682 if (D.getArgumentNumber() >= ArgTys.size()) 1683 return IsDeferredCheck || DeferCheck(Ty); 1684 return !isa<VectorType>(ArgTys[D.getArgumentNumber()]) || 1685 VectorType::getHalfElementsVectorType( 1686 cast<VectorType>(ArgTys[D.getArgumentNumber()])) != Ty; 1687 case IITDescriptor::SameVecWidthArgument: { 1688 if (D.getArgumentNumber() >= ArgTys.size()) { 1689 // Defer check and subsequent check for the vector element type. 1690 Infos = Infos.slice(1); 1691 return IsDeferredCheck || DeferCheck(Ty); 1692 } 1693 auto *ReferenceType = dyn_cast<VectorType>(ArgTys[D.getArgumentNumber()]); 1694 auto *ThisArgType = dyn_cast<VectorType>(Ty); 1695 // Both must be vectors of the same number of elements or neither. 1696 if ((ReferenceType != nullptr) != (ThisArgType != nullptr)) 1697 return true; 1698 Type *EltTy = Ty; 1699 if (ThisArgType) { 1700 if (ReferenceType->getElementCount() != 1701 ThisArgType->getElementCount()) 1702 return true; 1703 EltTy = ThisArgType->getElementType(); 1704 } 1705 return matchIntrinsicType(EltTy, Infos, ArgTys, DeferredChecks, 1706 IsDeferredCheck); 1707 } 1708 case IITDescriptor::PtrToArgument: { 1709 if (D.getArgumentNumber() >= ArgTys.size()) 1710 return IsDeferredCheck || DeferCheck(Ty); 1711 Type * ReferenceType = ArgTys[D.getArgumentNumber()]; 1712 PointerType *ThisArgType = dyn_cast<PointerType>(Ty); 1713 return (!ThisArgType || 1714 !ThisArgType->isOpaqueOrPointeeTypeMatches(ReferenceType)); 1715 } 1716 case IITDescriptor::PtrToElt: { 1717 if (D.getArgumentNumber() >= ArgTys.size()) 1718 return IsDeferredCheck || DeferCheck(Ty); 1719 VectorType * ReferenceType = 1720 dyn_cast<VectorType> (ArgTys[D.getArgumentNumber()]); 1721 PointerType *ThisArgType = dyn_cast<PointerType>(Ty); 1722 1723 if (!ThisArgType || !ReferenceType) 1724 return true; 1725 return !ThisArgType->isOpaqueOrPointeeTypeMatches( 1726 ReferenceType->getElementType()); 1727 } 1728 case IITDescriptor::AnyPtrToElt: { 1729 unsigned RefArgNumber = D.getRefArgNumber(); 1730 if (RefArgNumber >= ArgTys.size()) { 1731 if (IsDeferredCheck) 1732 return true; 1733 // If forward referencing, already add the pointer type and 1734 // defer the checks for later. 1735 ArgTys.push_back(Ty); 1736 return DeferCheck(Ty); 1737 } 1738 1739 if (!IsDeferredCheck) { 1740 assert(D.getOverloadArgNumber() == ArgTys.size() && 1741 "Table consistency error"); 1742 ArgTys.push_back(Ty); 1743 } 1744 1745 auto *ReferenceType = dyn_cast<VectorType>(ArgTys[RefArgNumber]); 1746 auto *ThisArgType = dyn_cast<PointerType>(Ty); 1747 if (!ThisArgType || !ReferenceType) 1748 return true; 1749 return !ThisArgType->isOpaqueOrPointeeTypeMatches( 1750 ReferenceType->getElementType()); 1751 } 1752 case IITDescriptor::VecOfAnyPtrsToElt: { 1753 unsigned RefArgNumber = D.getRefArgNumber(); 1754 if (RefArgNumber >= ArgTys.size()) { 1755 if (IsDeferredCheck) 1756 return true; 1757 // If forward referencing, already add the pointer-vector type and 1758 // defer the checks for later. 1759 ArgTys.push_back(Ty); 1760 return DeferCheck(Ty); 1761 } 1762 1763 if (!IsDeferredCheck){ 1764 assert(D.getOverloadArgNumber() == ArgTys.size() && 1765 "Table consistency error"); 1766 ArgTys.push_back(Ty); 1767 } 1768 1769 // Verify the overloaded type "matches" the Ref type. 1770 // i.e. Ty is a vector with the same width as Ref. 1771 // Composed of pointers to the same element type as Ref. 1772 auto *ReferenceType = dyn_cast<VectorType>(ArgTys[RefArgNumber]); 1773 auto *ThisArgVecTy = dyn_cast<VectorType>(Ty); 1774 if (!ThisArgVecTy || !ReferenceType || 1775 (ReferenceType->getElementCount() != ThisArgVecTy->getElementCount())) 1776 return true; 1777 PointerType *ThisArgEltTy = 1778 dyn_cast<PointerType>(ThisArgVecTy->getElementType()); 1779 if (!ThisArgEltTy) 1780 return true; 1781 return !ThisArgEltTy->isOpaqueOrPointeeTypeMatches( 1782 ReferenceType->getElementType()); 1783 } 1784 case IITDescriptor::VecElementArgument: { 1785 if (D.getArgumentNumber() >= ArgTys.size()) 1786 return IsDeferredCheck ? true : DeferCheck(Ty); 1787 auto *ReferenceType = dyn_cast<VectorType>(ArgTys[D.getArgumentNumber()]); 1788 return !ReferenceType || Ty != ReferenceType->getElementType(); 1789 } 1790 case IITDescriptor::Subdivide2Argument: 1791 case IITDescriptor::Subdivide4Argument: { 1792 // If this is a forward reference, defer the check for later. 1793 if (D.getArgumentNumber() >= ArgTys.size()) 1794 return IsDeferredCheck || DeferCheck(Ty); 1795 1796 Type *NewTy = ArgTys[D.getArgumentNumber()]; 1797 if (auto *VTy = dyn_cast<VectorType>(NewTy)) { 1798 int SubDivs = D.Kind == IITDescriptor::Subdivide2Argument ? 1 : 2; 1799 NewTy = VectorType::getSubdividedVectorType(VTy, SubDivs); 1800 return Ty != NewTy; 1801 } 1802 return true; 1803 } 1804 case IITDescriptor::VecOfBitcastsToInt: { 1805 if (D.getArgumentNumber() >= ArgTys.size()) 1806 return IsDeferredCheck || DeferCheck(Ty); 1807 auto *ReferenceType = dyn_cast<VectorType>(ArgTys[D.getArgumentNumber()]); 1808 auto *ThisArgVecTy = dyn_cast<VectorType>(Ty); 1809 if (!ThisArgVecTy || !ReferenceType) 1810 return true; 1811 return ThisArgVecTy != VectorType::getInteger(ReferenceType); 1812 } 1813 } 1814 llvm_unreachable("unhandled"); 1815 } 1816 1817 Intrinsic::MatchIntrinsicTypesResult 1818 Intrinsic::matchIntrinsicSignature(FunctionType *FTy, 1819 ArrayRef<Intrinsic::IITDescriptor> &Infos, 1820 SmallVectorImpl<Type *> &ArgTys) { 1821 SmallVector<DeferredIntrinsicMatchPair, 2> DeferredChecks; 1822 if (matchIntrinsicType(FTy->getReturnType(), Infos, ArgTys, DeferredChecks, 1823 false)) 1824 return MatchIntrinsicTypes_NoMatchRet; 1825 1826 unsigned NumDeferredReturnChecks = DeferredChecks.size(); 1827 1828 for (auto *Ty : FTy->params()) 1829 if (matchIntrinsicType(Ty, Infos, ArgTys, DeferredChecks, false)) 1830 return MatchIntrinsicTypes_NoMatchArg; 1831 1832 for (unsigned I = 0, E = DeferredChecks.size(); I != E; ++I) { 1833 DeferredIntrinsicMatchPair &Check = DeferredChecks[I]; 1834 if (matchIntrinsicType(Check.first, Check.second, ArgTys, DeferredChecks, 1835 true)) 1836 return I < NumDeferredReturnChecks ? MatchIntrinsicTypes_NoMatchRet 1837 : MatchIntrinsicTypes_NoMatchArg; 1838 } 1839 1840 return MatchIntrinsicTypes_Match; 1841 } 1842 1843 bool 1844 Intrinsic::matchIntrinsicVarArg(bool isVarArg, 1845 ArrayRef<Intrinsic::IITDescriptor> &Infos) { 1846 // If there are no descriptors left, then it can't be a vararg. 1847 if (Infos.empty()) 1848 return isVarArg; 1849 1850 // There should be only one descriptor remaining at this point. 1851 if (Infos.size() != 1) 1852 return true; 1853 1854 // Check and verify the descriptor. 1855 IITDescriptor D = Infos.front(); 1856 Infos = Infos.slice(1); 1857 if (D.Kind == IITDescriptor::VarArg) 1858 return !isVarArg; 1859 1860 return true; 1861 } 1862 1863 bool Intrinsic::getIntrinsicSignature(Function *F, 1864 SmallVectorImpl<Type *> &ArgTys) { 1865 Intrinsic::ID ID = F->getIntrinsicID(); 1866 if (!ID) 1867 return false; 1868 1869 SmallVector<Intrinsic::IITDescriptor, 8> Table; 1870 getIntrinsicInfoTableEntries(ID, Table); 1871 ArrayRef<Intrinsic::IITDescriptor> TableRef = Table; 1872 1873 if (Intrinsic::matchIntrinsicSignature(F->getFunctionType(), TableRef, 1874 ArgTys) != 1875 Intrinsic::MatchIntrinsicTypesResult::MatchIntrinsicTypes_Match) { 1876 return false; 1877 } 1878 if (Intrinsic::matchIntrinsicVarArg(F->getFunctionType()->isVarArg(), 1879 TableRef)) 1880 return false; 1881 return true; 1882 } 1883 1884 std::optional<Function *> Intrinsic::remangleIntrinsicFunction(Function *F) { 1885 SmallVector<Type *, 4> ArgTys; 1886 if (!getIntrinsicSignature(F, ArgTys)) 1887 return std::nullopt; 1888 1889 Intrinsic::ID ID = F->getIntrinsicID(); 1890 StringRef Name = F->getName(); 1891 std::string WantedName = 1892 Intrinsic::getName(ID, ArgTys, F->getParent(), F->getFunctionType()); 1893 if (Name == WantedName) 1894 return std::nullopt; 1895 1896 Function *NewDecl = [&] { 1897 if (auto *ExistingGV = F->getParent()->getNamedValue(WantedName)) { 1898 if (auto *ExistingF = dyn_cast<Function>(ExistingGV)) 1899 if (ExistingF->getFunctionType() == F->getFunctionType()) 1900 return ExistingF; 1901 1902 // The name already exists, but is not a function or has the wrong 1903 // prototype. Make place for the new one by renaming the old version. 1904 // Either this old version will be removed later on or the module is 1905 // invalid and we'll get an error. 1906 ExistingGV->setName(WantedName + ".renamed"); 1907 } 1908 return Intrinsic::getDeclaration(F->getParent(), ID, ArgTys); 1909 }(); 1910 1911 NewDecl->setCallingConv(F->getCallingConv()); 1912 assert(NewDecl->getFunctionType() == F->getFunctionType() && 1913 "Shouldn't change the signature"); 1914 return NewDecl; 1915 } 1916 1917 /// hasAddressTaken - returns true if there are any uses of this function 1918 /// other than direct calls or invokes to it. Optionally ignores callback 1919 /// uses, assume like pointer annotation calls, and references in llvm.used 1920 /// and llvm.compiler.used variables. 1921 bool Function::hasAddressTaken(const User **PutOffender, 1922 bool IgnoreCallbackUses, 1923 bool IgnoreAssumeLikeCalls, bool IgnoreLLVMUsed, 1924 bool IgnoreARCAttachedCall) const { 1925 for (const Use &U : uses()) { 1926 const User *FU = U.getUser(); 1927 if (isa<BlockAddress>(FU)) 1928 continue; 1929 1930 if (IgnoreCallbackUses) { 1931 AbstractCallSite ACS(&U); 1932 if (ACS && ACS.isCallbackCall()) 1933 continue; 1934 } 1935 1936 const auto *Call = dyn_cast<CallBase>(FU); 1937 if (!Call) { 1938 if (IgnoreAssumeLikeCalls && 1939 isa<BitCastOperator, AddrSpaceCastOperator>(FU) && 1940 all_of(FU->users(), [](const User *U) { 1941 if (const auto *I = dyn_cast<IntrinsicInst>(U)) 1942 return I->isAssumeLikeIntrinsic(); 1943 return false; 1944 })) { 1945 continue; 1946 } 1947 1948 if (IgnoreLLVMUsed && !FU->user_empty()) { 1949 const User *FUU = FU; 1950 if (isa<BitCastOperator, AddrSpaceCastOperator>(FU) && 1951 FU->hasOneUse() && !FU->user_begin()->user_empty()) 1952 FUU = *FU->user_begin(); 1953 if (llvm::all_of(FUU->users(), [](const User *U) { 1954 if (const auto *GV = dyn_cast<GlobalVariable>(U)) 1955 return GV->hasName() && 1956 (GV->getName().equals("llvm.compiler.used") || 1957 GV->getName().equals("llvm.used")); 1958 return false; 1959 })) 1960 continue; 1961 } 1962 if (PutOffender) 1963 *PutOffender = FU; 1964 return true; 1965 } 1966 1967 if (IgnoreAssumeLikeCalls) { 1968 if (const auto *I = dyn_cast<IntrinsicInst>(Call)) 1969 if (I->isAssumeLikeIntrinsic()) 1970 continue; 1971 } 1972 1973 if (!Call->isCallee(&U) || Call->getFunctionType() != getFunctionType()) { 1974 if (IgnoreARCAttachedCall && 1975 Call->isOperandBundleOfType(LLVMContext::OB_clang_arc_attachedcall, 1976 U.getOperandNo())) 1977 continue; 1978 1979 if (PutOffender) 1980 *PutOffender = FU; 1981 return true; 1982 } 1983 } 1984 return false; 1985 } 1986 1987 bool Function::isDefTriviallyDead() const { 1988 // Check the linkage 1989 if (!hasLinkOnceLinkage() && !hasLocalLinkage() && 1990 !hasAvailableExternallyLinkage()) 1991 return false; 1992 1993 // Check if the function is used by anything other than a blockaddress. 1994 for (const User *U : users()) 1995 if (!isa<BlockAddress>(U)) 1996 return false; 1997 1998 return true; 1999 } 2000 2001 /// callsFunctionThatReturnsTwice - Return true if the function has a call to 2002 /// setjmp or other function that gcc recognizes as "returning twice". 2003 bool Function::callsFunctionThatReturnsTwice() const { 2004 for (const Instruction &I : instructions(this)) 2005 if (const auto *Call = dyn_cast<CallBase>(&I)) 2006 if (Call->hasFnAttr(Attribute::ReturnsTwice)) 2007 return true; 2008 2009 return false; 2010 } 2011 2012 Constant *Function::getPersonalityFn() const { 2013 assert(hasPersonalityFn() && getNumOperands()); 2014 return cast<Constant>(Op<0>()); 2015 } 2016 2017 void Function::setPersonalityFn(Constant *Fn) { 2018 setHungoffOperand<0>(Fn); 2019 setValueSubclassDataBit(3, Fn != nullptr); 2020 } 2021 2022 Constant *Function::getPrefixData() const { 2023 assert(hasPrefixData() && getNumOperands()); 2024 return cast<Constant>(Op<1>()); 2025 } 2026 2027 void Function::setPrefixData(Constant *PrefixData) { 2028 setHungoffOperand<1>(PrefixData); 2029 setValueSubclassDataBit(1, PrefixData != nullptr); 2030 } 2031 2032 Constant *Function::getPrologueData() const { 2033 assert(hasPrologueData() && getNumOperands()); 2034 return cast<Constant>(Op<2>()); 2035 } 2036 2037 void Function::setPrologueData(Constant *PrologueData) { 2038 setHungoffOperand<2>(PrologueData); 2039 setValueSubclassDataBit(2, PrologueData != nullptr); 2040 } 2041 2042 void Function::allocHungoffUselist() { 2043 // If we've already allocated a uselist, stop here. 2044 if (getNumOperands()) 2045 return; 2046 2047 allocHungoffUses(3, /*IsPhi=*/ false); 2048 setNumHungOffUseOperands(3); 2049 2050 // Initialize the uselist with placeholder operands to allow traversal. 2051 auto *CPN = ConstantPointerNull::get(Type::getInt1PtrTy(getContext(), 0)); 2052 Op<0>().set(CPN); 2053 Op<1>().set(CPN); 2054 Op<2>().set(CPN); 2055 } 2056 2057 template <int Idx> 2058 void Function::setHungoffOperand(Constant *C) { 2059 if (C) { 2060 allocHungoffUselist(); 2061 Op<Idx>().set(C); 2062 } else if (getNumOperands()) { 2063 Op<Idx>().set( 2064 ConstantPointerNull::get(Type::getInt1PtrTy(getContext(), 0))); 2065 } 2066 } 2067 2068 void Function::setValueSubclassDataBit(unsigned Bit, bool On) { 2069 assert(Bit < 16 && "SubclassData contains only 16 bits"); 2070 if (On) 2071 setValueSubclassData(getSubclassDataFromValue() | (1 << Bit)); 2072 else 2073 setValueSubclassData(getSubclassDataFromValue() & ~(1 << Bit)); 2074 } 2075 2076 void Function::setEntryCount(ProfileCount Count, 2077 const DenseSet<GlobalValue::GUID> *S) { 2078 #if !defined(NDEBUG) 2079 auto PrevCount = getEntryCount(); 2080 assert(!PrevCount || PrevCount->getType() == Count.getType()); 2081 #endif 2082 2083 auto ImportGUIDs = getImportGUIDs(); 2084 if (S == nullptr && ImportGUIDs.size()) 2085 S = &ImportGUIDs; 2086 2087 MDBuilder MDB(getContext()); 2088 setMetadata( 2089 LLVMContext::MD_prof, 2090 MDB.createFunctionEntryCount(Count.getCount(), Count.isSynthetic(), S)); 2091 } 2092 2093 void Function::setEntryCount(uint64_t Count, Function::ProfileCountType Type, 2094 const DenseSet<GlobalValue::GUID> *Imports) { 2095 setEntryCount(ProfileCount(Count, Type), Imports); 2096 } 2097 2098 std::optional<ProfileCount> Function::getEntryCount(bool AllowSynthetic) const { 2099 MDNode *MD = getMetadata(LLVMContext::MD_prof); 2100 if (MD && MD->getOperand(0)) 2101 if (MDString *MDS = dyn_cast<MDString>(MD->getOperand(0))) { 2102 if (MDS->getString().equals("function_entry_count")) { 2103 ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(1)); 2104 uint64_t Count = CI->getValue().getZExtValue(); 2105 // A value of -1 is used for SamplePGO when there were no samples. 2106 // Treat this the same as unknown. 2107 if (Count == (uint64_t)-1) 2108 return std::nullopt; 2109 return ProfileCount(Count, PCT_Real); 2110 } else if (AllowSynthetic && 2111 MDS->getString().equals("synthetic_function_entry_count")) { 2112 ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(1)); 2113 uint64_t Count = CI->getValue().getZExtValue(); 2114 return ProfileCount(Count, PCT_Synthetic); 2115 } 2116 } 2117 return std::nullopt; 2118 } 2119 2120 DenseSet<GlobalValue::GUID> Function::getImportGUIDs() const { 2121 DenseSet<GlobalValue::GUID> R; 2122 if (MDNode *MD = getMetadata(LLVMContext::MD_prof)) 2123 if (MDString *MDS = dyn_cast<MDString>(MD->getOperand(0))) 2124 if (MDS->getString().equals("function_entry_count")) 2125 for (unsigned i = 2; i < MD->getNumOperands(); i++) 2126 R.insert(mdconst::extract<ConstantInt>(MD->getOperand(i)) 2127 ->getValue() 2128 .getZExtValue()); 2129 return R; 2130 } 2131 2132 void Function::setSectionPrefix(StringRef Prefix) { 2133 MDBuilder MDB(getContext()); 2134 setMetadata(LLVMContext::MD_section_prefix, 2135 MDB.createFunctionSectionPrefix(Prefix)); 2136 } 2137 2138 std::optional<StringRef> Function::getSectionPrefix() const { 2139 if (MDNode *MD = getMetadata(LLVMContext::MD_section_prefix)) { 2140 assert(cast<MDString>(MD->getOperand(0)) 2141 ->getString() 2142 .equals("function_section_prefix") && 2143 "Metadata not match"); 2144 return cast<MDString>(MD->getOperand(1))->getString(); 2145 } 2146 return std::nullopt; 2147 } 2148 2149 bool Function::nullPointerIsDefined() const { 2150 return hasFnAttribute(Attribute::NullPointerIsValid); 2151 } 2152 2153 bool llvm::NullPointerIsDefined(const Function *F, unsigned AS) { 2154 if (F && F->nullPointerIsDefined()) 2155 return true; 2156 2157 if (AS != 0) 2158 return true; 2159 2160 return false; 2161 } 2162