1 //===- Function.cpp - Implement the Global object classes -----------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file implements the Function class for the IR library. 10 // 11 //===----------------------------------------------------------------------===// 12 13 #include "llvm/IR/Function.h" 14 #include "SymbolTableListTraitsImpl.h" 15 #include "llvm/ADT/ArrayRef.h" 16 #include "llvm/ADT/DenseSet.h" 17 #include "llvm/ADT/None.h" 18 #include "llvm/ADT/STLExtras.h" 19 #include "llvm/ADT/SmallString.h" 20 #include "llvm/ADT/SmallVector.h" 21 #include "llvm/ADT/StringExtras.h" 22 #include "llvm/ADT/StringRef.h" 23 #include "llvm/IR/AbstractCallSite.h" 24 #include "llvm/IR/Argument.h" 25 #include "llvm/IR/Attributes.h" 26 #include "llvm/IR/BasicBlock.h" 27 #include "llvm/IR/Constant.h" 28 #include "llvm/IR/Constants.h" 29 #include "llvm/IR/DerivedTypes.h" 30 #include "llvm/IR/GlobalValue.h" 31 #include "llvm/IR/InstIterator.h" 32 #include "llvm/IR/Instruction.h" 33 #include "llvm/IR/Instructions.h" 34 #include "llvm/IR/IntrinsicInst.h" 35 #include "llvm/IR/Intrinsics.h" 36 #include "llvm/IR/IntrinsicsAArch64.h" 37 #include "llvm/IR/IntrinsicsAMDGPU.h" 38 #include "llvm/IR/IntrinsicsARM.h" 39 #include "llvm/IR/IntrinsicsBPF.h" 40 #include "llvm/IR/IntrinsicsHexagon.h" 41 #include "llvm/IR/IntrinsicsMips.h" 42 #include "llvm/IR/IntrinsicsNVPTX.h" 43 #include "llvm/IR/IntrinsicsPowerPC.h" 44 #include "llvm/IR/IntrinsicsR600.h" 45 #include "llvm/IR/IntrinsicsRISCV.h" 46 #include "llvm/IR/IntrinsicsS390.h" 47 #include "llvm/IR/IntrinsicsVE.h" 48 #include "llvm/IR/IntrinsicsWebAssembly.h" 49 #include "llvm/IR/IntrinsicsX86.h" 50 #include "llvm/IR/IntrinsicsXCore.h" 51 #include "llvm/IR/LLVMContext.h" 52 #include "llvm/IR/MDBuilder.h" 53 #include "llvm/IR/Metadata.h" 54 #include "llvm/IR/Module.h" 55 #include "llvm/IR/Operator.h" 56 #include "llvm/IR/SymbolTableListTraits.h" 57 #include "llvm/IR/Type.h" 58 #include "llvm/IR/Use.h" 59 #include "llvm/IR/User.h" 60 #include "llvm/IR/Value.h" 61 #include "llvm/IR/ValueSymbolTable.h" 62 #include "llvm/Support/Casting.h" 63 #include "llvm/Support/Compiler.h" 64 #include "llvm/Support/ErrorHandling.h" 65 #include <algorithm> 66 #include <cassert> 67 #include <cstddef> 68 #include <cstdint> 69 #include <cstring> 70 #include <string> 71 72 using namespace llvm; 73 using ProfileCount = Function::ProfileCount; 74 75 // Explicit instantiations of SymbolTableListTraits since some of the methods 76 // are not in the public header file... 77 template class llvm::SymbolTableListTraits<BasicBlock>; 78 79 //===----------------------------------------------------------------------===// 80 // Argument Implementation 81 //===----------------------------------------------------------------------===// 82 83 Argument::Argument(Type *Ty, const Twine &Name, Function *Par, unsigned ArgNo) 84 : Value(Ty, Value::ArgumentVal), Parent(Par), ArgNo(ArgNo) { 85 setName(Name); 86 } 87 88 void Argument::setParent(Function *parent) { 89 Parent = parent; 90 } 91 92 bool Argument::hasNonNullAttr(bool AllowUndefOrPoison) const { 93 if (!getType()->isPointerTy()) return false; 94 if (getParent()->hasParamAttribute(getArgNo(), Attribute::NonNull) && 95 (AllowUndefOrPoison || 96 getParent()->hasParamAttribute(getArgNo(), Attribute::NoUndef))) 97 return true; 98 else if (getDereferenceableBytes() > 0 && 99 !NullPointerIsDefined(getParent(), 100 getType()->getPointerAddressSpace())) 101 return true; 102 return false; 103 } 104 105 bool Argument::hasByValAttr() const { 106 if (!getType()->isPointerTy()) return false; 107 return hasAttribute(Attribute::ByVal); 108 } 109 110 bool Argument::hasByRefAttr() const { 111 if (!getType()->isPointerTy()) 112 return false; 113 return hasAttribute(Attribute::ByRef); 114 } 115 116 bool Argument::hasSwiftSelfAttr() const { 117 return getParent()->hasParamAttribute(getArgNo(), Attribute::SwiftSelf); 118 } 119 120 bool Argument::hasSwiftErrorAttr() const { 121 return getParent()->hasParamAttribute(getArgNo(), Attribute::SwiftError); 122 } 123 124 bool Argument::hasInAllocaAttr() const { 125 if (!getType()->isPointerTy()) return false; 126 return hasAttribute(Attribute::InAlloca); 127 } 128 129 bool Argument::hasPreallocatedAttr() const { 130 if (!getType()->isPointerTy()) 131 return false; 132 return hasAttribute(Attribute::Preallocated); 133 } 134 135 bool Argument::hasPassPointeeByValueCopyAttr() const { 136 if (!getType()->isPointerTy()) return false; 137 AttributeList Attrs = getParent()->getAttributes(); 138 return Attrs.hasParamAttribute(getArgNo(), Attribute::ByVal) || 139 Attrs.hasParamAttribute(getArgNo(), Attribute::InAlloca) || 140 Attrs.hasParamAttribute(getArgNo(), Attribute::Preallocated); 141 } 142 143 bool Argument::hasPointeeInMemoryValueAttr() const { 144 if (!getType()->isPointerTy()) 145 return false; 146 AttributeList Attrs = getParent()->getAttributes(); 147 return Attrs.hasParamAttribute(getArgNo(), Attribute::ByVal) || 148 Attrs.hasParamAttribute(getArgNo(), Attribute::StructRet) || 149 Attrs.hasParamAttribute(getArgNo(), Attribute::InAlloca) || 150 Attrs.hasParamAttribute(getArgNo(), Attribute::Preallocated) || 151 Attrs.hasParamAttribute(getArgNo(), Attribute::ByRef); 152 } 153 154 /// For a byval, sret, inalloca, or preallocated parameter, get the in-memory 155 /// parameter type. 156 static Type *getMemoryParamAllocType(AttributeSet ParamAttrs, Type *ArgTy) { 157 // FIXME: All the type carrying attributes are mutually exclusive, so there 158 // should be a single query to get the stored type that handles any of them. 159 if (Type *ByValTy = ParamAttrs.getByValType()) 160 return ByValTy; 161 if (Type *ByRefTy = ParamAttrs.getByRefType()) 162 return ByRefTy; 163 if (Type *PreAllocTy = ParamAttrs.getPreallocatedType()) 164 return PreAllocTy; 165 166 // FIXME: sret and inalloca always depends on pointee element type. It's also 167 // possible for byval to miss it. 168 if (ParamAttrs.hasAttribute(Attribute::InAlloca) || 169 ParamAttrs.hasAttribute(Attribute::ByVal) || 170 ParamAttrs.hasAttribute(Attribute::StructRet) || 171 ParamAttrs.hasAttribute(Attribute::Preallocated)) 172 return cast<PointerType>(ArgTy)->getElementType(); 173 174 return nullptr; 175 } 176 177 uint64_t Argument::getPassPointeeByValueCopySize(const DataLayout &DL) const { 178 AttributeSet ParamAttrs = 179 getParent()->getAttributes().getParamAttributes(getArgNo()); 180 if (Type *MemTy = getMemoryParamAllocType(ParamAttrs, getType())) 181 return DL.getTypeAllocSize(MemTy); 182 return 0; 183 } 184 185 Type *Argument::getPointeeInMemoryValueType() const { 186 AttributeSet ParamAttrs = 187 getParent()->getAttributes().getParamAttributes(getArgNo()); 188 return getMemoryParamAllocType(ParamAttrs, getType()); 189 } 190 191 unsigned Argument::getParamAlignment() const { 192 assert(getType()->isPointerTy() && "Only pointers have alignments"); 193 return getParent()->getParamAlignment(getArgNo()); 194 } 195 196 MaybeAlign Argument::getParamAlign() const { 197 assert(getType()->isPointerTy() && "Only pointers have alignments"); 198 return getParent()->getParamAlign(getArgNo()); 199 } 200 201 Type *Argument::getParamByValType() const { 202 assert(getType()->isPointerTy() && "Only pointers have byval types"); 203 return getParent()->getParamByValType(getArgNo()); 204 } 205 206 Type *Argument::getParamStructRetType() const { 207 assert(getType()->isPointerTy() && "Only pointers have sret types"); 208 return getParent()->getParamStructRetType(getArgNo()); 209 } 210 211 Type *Argument::getParamByRefType() const { 212 assert(getType()->isPointerTy() && "Only pointers have byref types"); 213 return getParent()->getParamByRefType(getArgNo()); 214 } 215 216 uint64_t Argument::getDereferenceableBytes() const { 217 assert(getType()->isPointerTy() && 218 "Only pointers have dereferenceable bytes"); 219 return getParent()->getParamDereferenceableBytes(getArgNo()); 220 } 221 222 uint64_t Argument::getDereferenceableOrNullBytes() const { 223 assert(getType()->isPointerTy() && 224 "Only pointers have dereferenceable bytes"); 225 return getParent()->getParamDereferenceableOrNullBytes(getArgNo()); 226 } 227 228 bool Argument::hasNestAttr() const { 229 if (!getType()->isPointerTy()) return false; 230 return hasAttribute(Attribute::Nest); 231 } 232 233 bool Argument::hasNoAliasAttr() const { 234 if (!getType()->isPointerTy()) return false; 235 return hasAttribute(Attribute::NoAlias); 236 } 237 238 bool Argument::hasNoCaptureAttr() const { 239 if (!getType()->isPointerTy()) return false; 240 return hasAttribute(Attribute::NoCapture); 241 } 242 243 bool Argument::hasNoFreeAttr() const { 244 if (!getType()->isPointerTy()) return false; 245 return hasAttribute(Attribute::NoFree); 246 } 247 248 bool Argument::hasStructRetAttr() const { 249 if (!getType()->isPointerTy()) return false; 250 return hasAttribute(Attribute::StructRet); 251 } 252 253 bool Argument::hasInRegAttr() const { 254 return hasAttribute(Attribute::InReg); 255 } 256 257 bool Argument::hasReturnedAttr() const { 258 return hasAttribute(Attribute::Returned); 259 } 260 261 bool Argument::hasZExtAttr() const { 262 return hasAttribute(Attribute::ZExt); 263 } 264 265 bool Argument::hasSExtAttr() const { 266 return hasAttribute(Attribute::SExt); 267 } 268 269 bool Argument::onlyReadsMemory() const { 270 AttributeList Attrs = getParent()->getAttributes(); 271 return Attrs.hasParamAttribute(getArgNo(), Attribute::ReadOnly) || 272 Attrs.hasParamAttribute(getArgNo(), Attribute::ReadNone); 273 } 274 275 void Argument::addAttrs(AttrBuilder &B) { 276 AttributeList AL = getParent()->getAttributes(); 277 AL = AL.addParamAttributes(Parent->getContext(), getArgNo(), B); 278 getParent()->setAttributes(AL); 279 } 280 281 void Argument::addAttr(Attribute::AttrKind Kind) { 282 getParent()->addParamAttr(getArgNo(), Kind); 283 } 284 285 void Argument::addAttr(Attribute Attr) { 286 getParent()->addParamAttr(getArgNo(), Attr); 287 } 288 289 void Argument::removeAttr(Attribute::AttrKind Kind) { 290 getParent()->removeParamAttr(getArgNo(), Kind); 291 } 292 293 bool Argument::hasAttribute(Attribute::AttrKind Kind) const { 294 return getParent()->hasParamAttribute(getArgNo(), Kind); 295 } 296 297 Attribute Argument::getAttribute(Attribute::AttrKind Kind) const { 298 return getParent()->getParamAttribute(getArgNo(), Kind); 299 } 300 301 //===----------------------------------------------------------------------===// 302 // Helper Methods in Function 303 //===----------------------------------------------------------------------===// 304 305 LLVMContext &Function::getContext() const { 306 return getType()->getContext(); 307 } 308 309 unsigned Function::getInstructionCount() const { 310 unsigned NumInstrs = 0; 311 for (const BasicBlock &BB : BasicBlocks) 312 NumInstrs += std::distance(BB.instructionsWithoutDebug().begin(), 313 BB.instructionsWithoutDebug().end()); 314 return NumInstrs; 315 } 316 317 Function *Function::Create(FunctionType *Ty, LinkageTypes Linkage, 318 const Twine &N, Module &M) { 319 return Create(Ty, Linkage, M.getDataLayout().getProgramAddressSpace(), N, &M); 320 } 321 322 void Function::removeFromParent() { 323 getParent()->getFunctionList().remove(getIterator()); 324 } 325 326 void Function::eraseFromParent() { 327 getParent()->getFunctionList().erase(getIterator()); 328 } 329 330 //===----------------------------------------------------------------------===// 331 // Function Implementation 332 //===----------------------------------------------------------------------===// 333 334 static unsigned computeAddrSpace(unsigned AddrSpace, Module *M) { 335 // If AS == -1 and we are passed a valid module pointer we place the function 336 // in the program address space. Otherwise we default to AS0. 337 if (AddrSpace == static_cast<unsigned>(-1)) 338 return M ? M->getDataLayout().getProgramAddressSpace() : 0; 339 return AddrSpace; 340 } 341 342 Function::Function(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, 343 const Twine &name, Module *ParentModule) 344 : GlobalObject(Ty, Value::FunctionVal, 345 OperandTraits<Function>::op_begin(this), 0, Linkage, name, 346 computeAddrSpace(AddrSpace, ParentModule)), 347 NumArgs(Ty->getNumParams()) { 348 assert(FunctionType::isValidReturnType(getReturnType()) && 349 "invalid return type"); 350 setGlobalObjectSubClassData(0); 351 352 // We only need a symbol table for a function if the context keeps value names 353 if (!getContext().shouldDiscardValueNames()) 354 SymTab = std::make_unique<ValueSymbolTable>(); 355 356 // If the function has arguments, mark them as lazily built. 357 if (Ty->getNumParams()) 358 setValueSubclassData(1); // Set the "has lazy arguments" bit. 359 360 if (ParentModule) 361 ParentModule->getFunctionList().push_back(this); 362 363 HasLLVMReservedName = getName().startswith("llvm."); 364 // Ensure intrinsics have the right parameter attributes. 365 // Note, the IntID field will have been set in Value::setName if this function 366 // name is a valid intrinsic ID. 367 if (IntID) 368 setAttributes(Intrinsic::getAttributes(getContext(), IntID)); 369 } 370 371 Function::~Function() { 372 dropAllReferences(); // After this it is safe to delete instructions. 373 374 // Delete all of the method arguments and unlink from symbol table... 375 if (Arguments) 376 clearArguments(); 377 378 // Remove the function from the on-the-side GC table. 379 clearGC(); 380 } 381 382 void Function::BuildLazyArguments() const { 383 // Create the arguments vector, all arguments start out unnamed. 384 auto *FT = getFunctionType(); 385 if (NumArgs > 0) { 386 Arguments = std::allocator<Argument>().allocate(NumArgs); 387 for (unsigned i = 0, e = NumArgs; i != e; ++i) { 388 Type *ArgTy = FT->getParamType(i); 389 assert(!ArgTy->isVoidTy() && "Cannot have void typed arguments!"); 390 new (Arguments + i) Argument(ArgTy, "", const_cast<Function *>(this), i); 391 } 392 } 393 394 // Clear the lazy arguments bit. 395 unsigned SDC = getSubclassDataFromValue(); 396 SDC &= ~(1 << 0); 397 const_cast<Function*>(this)->setValueSubclassData(SDC); 398 assert(!hasLazyArguments()); 399 } 400 401 static MutableArrayRef<Argument> makeArgArray(Argument *Args, size_t Count) { 402 return MutableArrayRef<Argument>(Args, Count); 403 } 404 405 bool Function::isConstrainedFPIntrinsic() const { 406 switch (getIntrinsicID()) { 407 #define INSTRUCTION(NAME, NARG, ROUND_MODE, INTRINSIC) \ 408 case Intrinsic::INTRINSIC: 409 #include "llvm/IR/ConstrainedOps.def" 410 return true; 411 #undef INSTRUCTION 412 default: 413 return false; 414 } 415 } 416 417 void Function::clearArguments() { 418 for (Argument &A : makeArgArray(Arguments, NumArgs)) { 419 A.setName(""); 420 A.~Argument(); 421 } 422 std::allocator<Argument>().deallocate(Arguments, NumArgs); 423 Arguments = nullptr; 424 } 425 426 void Function::stealArgumentListFrom(Function &Src) { 427 assert(isDeclaration() && "Expected no references to current arguments"); 428 429 // Drop the current arguments, if any, and set the lazy argument bit. 430 if (!hasLazyArguments()) { 431 assert(llvm::all_of(makeArgArray(Arguments, NumArgs), 432 [](const Argument &A) { return A.use_empty(); }) && 433 "Expected arguments to be unused in declaration"); 434 clearArguments(); 435 setValueSubclassData(getSubclassDataFromValue() | (1 << 0)); 436 } 437 438 // Nothing to steal if Src has lazy arguments. 439 if (Src.hasLazyArguments()) 440 return; 441 442 // Steal arguments from Src, and fix the lazy argument bits. 443 assert(arg_size() == Src.arg_size()); 444 Arguments = Src.Arguments; 445 Src.Arguments = nullptr; 446 for (Argument &A : makeArgArray(Arguments, NumArgs)) { 447 // FIXME: This does the work of transferNodesFromList inefficiently. 448 SmallString<128> Name; 449 if (A.hasName()) 450 Name = A.getName(); 451 if (!Name.empty()) 452 A.setName(""); 453 A.setParent(this); 454 if (!Name.empty()) 455 A.setName(Name); 456 } 457 458 setValueSubclassData(getSubclassDataFromValue() & ~(1 << 0)); 459 assert(!hasLazyArguments()); 460 Src.setValueSubclassData(Src.getSubclassDataFromValue() | (1 << 0)); 461 } 462 463 // dropAllReferences() - This function causes all the subinstructions to "let 464 // go" of all references that they are maintaining. This allows one to 465 // 'delete' a whole class at a time, even though there may be circular 466 // references... first all references are dropped, and all use counts go to 467 // zero. Then everything is deleted for real. Note that no operations are 468 // valid on an object that has "dropped all references", except operator 469 // delete. 470 // 471 void Function::dropAllReferences() { 472 setIsMaterializable(false); 473 474 for (BasicBlock &BB : *this) 475 BB.dropAllReferences(); 476 477 // Delete all basic blocks. They are now unused, except possibly by 478 // blockaddresses, but BasicBlock's destructor takes care of those. 479 while (!BasicBlocks.empty()) 480 BasicBlocks.begin()->eraseFromParent(); 481 482 // Drop uses of any optional data (real or placeholder). 483 if (getNumOperands()) { 484 User::dropAllReferences(); 485 setNumHungOffUseOperands(0); 486 setValueSubclassData(getSubclassDataFromValue() & ~0xe); 487 } 488 489 // Metadata is stored in a side-table. 490 clearMetadata(); 491 } 492 493 void Function::addAttribute(unsigned i, Attribute::AttrKind Kind) { 494 AttributeList PAL = getAttributes(); 495 PAL = PAL.addAttribute(getContext(), i, Kind); 496 setAttributes(PAL); 497 } 498 499 void Function::addAttribute(unsigned i, Attribute Attr) { 500 AttributeList PAL = getAttributes(); 501 PAL = PAL.addAttribute(getContext(), i, Attr); 502 setAttributes(PAL); 503 } 504 505 void Function::addAttributes(unsigned i, const AttrBuilder &Attrs) { 506 AttributeList PAL = getAttributes(); 507 PAL = PAL.addAttributes(getContext(), i, Attrs); 508 setAttributes(PAL); 509 } 510 511 void Function::addParamAttr(unsigned ArgNo, Attribute::AttrKind Kind) { 512 AttributeList PAL = getAttributes(); 513 PAL = PAL.addParamAttribute(getContext(), ArgNo, Kind); 514 setAttributes(PAL); 515 } 516 517 void Function::addParamAttr(unsigned ArgNo, Attribute Attr) { 518 AttributeList PAL = getAttributes(); 519 PAL = PAL.addParamAttribute(getContext(), ArgNo, Attr); 520 setAttributes(PAL); 521 } 522 523 void Function::addParamAttrs(unsigned ArgNo, const AttrBuilder &Attrs) { 524 AttributeList PAL = getAttributes(); 525 PAL = PAL.addParamAttributes(getContext(), ArgNo, Attrs); 526 setAttributes(PAL); 527 } 528 529 void Function::removeAttribute(unsigned i, Attribute::AttrKind Kind) { 530 AttributeList PAL = getAttributes(); 531 PAL = PAL.removeAttribute(getContext(), i, Kind); 532 setAttributes(PAL); 533 } 534 535 void Function::removeAttribute(unsigned i, StringRef Kind) { 536 AttributeList PAL = getAttributes(); 537 PAL = PAL.removeAttribute(getContext(), i, Kind); 538 setAttributes(PAL); 539 } 540 541 void Function::removeAttributes(unsigned i, const AttrBuilder &Attrs) { 542 AttributeList PAL = getAttributes(); 543 PAL = PAL.removeAttributes(getContext(), i, Attrs); 544 setAttributes(PAL); 545 } 546 547 void Function::removeParamAttr(unsigned ArgNo, Attribute::AttrKind Kind) { 548 AttributeList PAL = getAttributes(); 549 PAL = PAL.removeParamAttribute(getContext(), ArgNo, Kind); 550 setAttributes(PAL); 551 } 552 553 void Function::removeParamAttr(unsigned ArgNo, StringRef Kind) { 554 AttributeList PAL = getAttributes(); 555 PAL = PAL.removeParamAttribute(getContext(), ArgNo, Kind); 556 setAttributes(PAL); 557 } 558 559 void Function::removeParamAttrs(unsigned ArgNo, const AttrBuilder &Attrs) { 560 AttributeList PAL = getAttributes(); 561 PAL = PAL.removeParamAttributes(getContext(), ArgNo, Attrs); 562 setAttributes(PAL); 563 } 564 565 void Function::addDereferenceableAttr(unsigned i, uint64_t Bytes) { 566 AttributeList PAL = getAttributes(); 567 PAL = PAL.addDereferenceableAttr(getContext(), i, Bytes); 568 setAttributes(PAL); 569 } 570 571 void Function::addDereferenceableParamAttr(unsigned ArgNo, uint64_t Bytes) { 572 AttributeList PAL = getAttributes(); 573 PAL = PAL.addDereferenceableParamAttr(getContext(), ArgNo, Bytes); 574 setAttributes(PAL); 575 } 576 577 void Function::addDereferenceableOrNullAttr(unsigned i, uint64_t Bytes) { 578 AttributeList PAL = getAttributes(); 579 PAL = PAL.addDereferenceableOrNullAttr(getContext(), i, Bytes); 580 setAttributes(PAL); 581 } 582 583 void Function::addDereferenceableOrNullParamAttr(unsigned ArgNo, 584 uint64_t Bytes) { 585 AttributeList PAL = getAttributes(); 586 PAL = PAL.addDereferenceableOrNullParamAttr(getContext(), ArgNo, Bytes); 587 setAttributes(PAL); 588 } 589 590 DenormalMode Function::getDenormalMode(const fltSemantics &FPType) const { 591 if (&FPType == &APFloat::IEEEsingle()) { 592 Attribute Attr = getFnAttribute("denormal-fp-math-f32"); 593 StringRef Val = Attr.getValueAsString(); 594 if (!Val.empty()) 595 return parseDenormalFPAttribute(Val); 596 597 // If the f32 variant of the attribute isn't specified, try to use the 598 // generic one. 599 } 600 601 Attribute Attr = getFnAttribute("denormal-fp-math"); 602 return parseDenormalFPAttribute(Attr.getValueAsString()); 603 } 604 605 const std::string &Function::getGC() const { 606 assert(hasGC() && "Function has no collector"); 607 return getContext().getGC(*this); 608 } 609 610 void Function::setGC(std::string Str) { 611 setValueSubclassDataBit(14, !Str.empty()); 612 getContext().setGC(*this, std::move(Str)); 613 } 614 615 void Function::clearGC() { 616 if (!hasGC()) 617 return; 618 getContext().deleteGC(*this); 619 setValueSubclassDataBit(14, false); 620 } 621 622 bool Function::hasStackProtectorFnAttr() const { 623 return hasFnAttribute(Attribute::StackProtect) || 624 hasFnAttribute(Attribute::StackProtectStrong) || 625 hasFnAttribute(Attribute::StackProtectReq); 626 } 627 628 /// Copy all additional attributes (those not needed to create a Function) from 629 /// the Function Src to this one. 630 void Function::copyAttributesFrom(const Function *Src) { 631 GlobalObject::copyAttributesFrom(Src); 632 setCallingConv(Src->getCallingConv()); 633 setAttributes(Src->getAttributes()); 634 if (Src->hasGC()) 635 setGC(Src->getGC()); 636 else 637 clearGC(); 638 if (Src->hasPersonalityFn()) 639 setPersonalityFn(Src->getPersonalityFn()); 640 if (Src->hasPrefixData()) 641 setPrefixData(Src->getPrefixData()); 642 if (Src->hasPrologueData()) 643 setPrologueData(Src->getPrologueData()); 644 } 645 646 /// Table of string intrinsic names indexed by enum value. 647 static const char * const IntrinsicNameTable[] = { 648 "not_intrinsic", 649 #define GET_INTRINSIC_NAME_TABLE 650 #include "llvm/IR/IntrinsicImpl.inc" 651 #undef GET_INTRINSIC_NAME_TABLE 652 }; 653 654 /// Table of per-target intrinsic name tables. 655 #define GET_INTRINSIC_TARGET_DATA 656 #include "llvm/IR/IntrinsicImpl.inc" 657 #undef GET_INTRINSIC_TARGET_DATA 658 659 bool Function::isTargetIntrinsic(Intrinsic::ID IID) { 660 return IID > TargetInfos[0].Count; 661 } 662 663 bool Function::isTargetIntrinsic() const { 664 return isTargetIntrinsic(IntID); 665 } 666 667 /// Find the segment of \c IntrinsicNameTable for intrinsics with the same 668 /// target as \c Name, or the generic table if \c Name is not target specific. 669 /// 670 /// Returns the relevant slice of \c IntrinsicNameTable 671 static ArrayRef<const char *> findTargetSubtable(StringRef Name) { 672 assert(Name.startswith("llvm.")); 673 674 ArrayRef<IntrinsicTargetInfo> Targets(TargetInfos); 675 // Drop "llvm." and take the first dotted component. That will be the target 676 // if this is target specific. 677 StringRef Target = Name.drop_front(5).split('.').first; 678 auto It = partition_point( 679 Targets, [=](const IntrinsicTargetInfo &TI) { return TI.Name < Target; }); 680 // We've either found the target or just fall back to the generic set, which 681 // is always first. 682 const auto &TI = It != Targets.end() && It->Name == Target ? *It : Targets[0]; 683 return makeArrayRef(&IntrinsicNameTable[1] + TI.Offset, TI.Count); 684 } 685 686 /// This does the actual lookup of an intrinsic ID which 687 /// matches the given function name. 688 Intrinsic::ID Function::lookupIntrinsicID(StringRef Name) { 689 ArrayRef<const char *> NameTable = findTargetSubtable(Name); 690 int Idx = Intrinsic::lookupLLVMIntrinsicByName(NameTable, Name); 691 if (Idx == -1) 692 return Intrinsic::not_intrinsic; 693 694 // Intrinsic IDs correspond to the location in IntrinsicNameTable, but we have 695 // an index into a sub-table. 696 int Adjust = NameTable.data() - IntrinsicNameTable; 697 Intrinsic::ID ID = static_cast<Intrinsic::ID>(Idx + Adjust); 698 699 // If the intrinsic is not overloaded, require an exact match. If it is 700 // overloaded, require either exact or prefix match. 701 const auto MatchSize = strlen(NameTable[Idx]); 702 assert(Name.size() >= MatchSize && "Expected either exact or prefix match"); 703 bool IsExactMatch = Name.size() == MatchSize; 704 return IsExactMatch || Intrinsic::isOverloaded(ID) ? ID 705 : Intrinsic::not_intrinsic; 706 } 707 708 void Function::recalculateIntrinsicID() { 709 StringRef Name = getName(); 710 if (!Name.startswith("llvm.")) { 711 HasLLVMReservedName = false; 712 IntID = Intrinsic::not_intrinsic; 713 return; 714 } 715 HasLLVMReservedName = true; 716 IntID = lookupIntrinsicID(Name); 717 } 718 719 /// Returns a stable mangling for the type specified for use in the name 720 /// mangling scheme used by 'any' types in intrinsic signatures. The mangling 721 /// of named types is simply their name. Manglings for unnamed types consist 722 /// of a prefix ('p' for pointers, 'a' for arrays, 'f_' for functions) 723 /// combined with the mangling of their component types. A vararg function 724 /// type will have a suffix of 'vararg'. Since function types can contain 725 /// other function types, we close a function type mangling with suffix 'f' 726 /// which can't be confused with it's prefix. This ensures we don't have 727 /// collisions between two unrelated function types. Otherwise, you might 728 /// parse ffXX as f(fXX) or f(fX)X. (X is a placeholder for any other type.) 729 /// The HasUnnamedType boolean is set if an unnamed type was encountered, 730 /// indicating that extra care must be taken to ensure a unique name. 731 static std::string getMangledTypeStr(Type *Ty, bool &HasUnnamedType) { 732 std::string Result; 733 if (PointerType* PTyp = dyn_cast<PointerType>(Ty)) { 734 Result += "p" + utostr(PTyp->getAddressSpace()) + 735 getMangledTypeStr(PTyp->getElementType(), HasUnnamedType); 736 } else if (ArrayType* ATyp = dyn_cast<ArrayType>(Ty)) { 737 Result += "a" + utostr(ATyp->getNumElements()) + 738 getMangledTypeStr(ATyp->getElementType(), HasUnnamedType); 739 } else if (StructType *STyp = dyn_cast<StructType>(Ty)) { 740 if (!STyp->isLiteral()) { 741 Result += "s_"; 742 if (STyp->hasName()) 743 Result += STyp->getName(); 744 else 745 HasUnnamedType = true; 746 } else { 747 Result += "sl_"; 748 for (auto Elem : STyp->elements()) 749 Result += getMangledTypeStr(Elem, HasUnnamedType); 750 } 751 // Ensure nested structs are distinguishable. 752 Result += "s"; 753 } else if (FunctionType *FT = dyn_cast<FunctionType>(Ty)) { 754 Result += "f_" + getMangledTypeStr(FT->getReturnType(), HasUnnamedType); 755 for (size_t i = 0; i < FT->getNumParams(); i++) 756 Result += getMangledTypeStr(FT->getParamType(i), HasUnnamedType); 757 if (FT->isVarArg()) 758 Result += "vararg"; 759 // Ensure nested function types are distinguishable. 760 Result += "f"; 761 } else if (VectorType* VTy = dyn_cast<VectorType>(Ty)) { 762 ElementCount EC = VTy->getElementCount(); 763 if (EC.isScalable()) 764 Result += "nx"; 765 Result += "v" + utostr(EC.getKnownMinValue()) + 766 getMangledTypeStr(VTy->getElementType(), HasUnnamedType); 767 } else if (Ty) { 768 switch (Ty->getTypeID()) { 769 default: llvm_unreachable("Unhandled type"); 770 case Type::VoidTyID: Result += "isVoid"; break; 771 case Type::MetadataTyID: Result += "Metadata"; break; 772 case Type::HalfTyID: Result += "f16"; break; 773 case Type::BFloatTyID: Result += "bf16"; break; 774 case Type::FloatTyID: Result += "f32"; break; 775 case Type::DoubleTyID: Result += "f64"; break; 776 case Type::X86_FP80TyID: Result += "f80"; break; 777 case Type::FP128TyID: Result += "f128"; break; 778 case Type::PPC_FP128TyID: Result += "ppcf128"; break; 779 case Type::X86_MMXTyID: Result += "x86mmx"; break; 780 case Type::X86_AMXTyID: Result += "x86amx"; break; 781 case Type::IntegerTyID: 782 Result += "i" + utostr(cast<IntegerType>(Ty)->getBitWidth()); 783 break; 784 } 785 } 786 return Result; 787 } 788 789 StringRef Intrinsic::getName(ID id) { 790 assert(id < num_intrinsics && "Invalid intrinsic ID!"); 791 assert(!Intrinsic::isOverloaded(id) && 792 "This version of getName does not support overloading"); 793 return IntrinsicNameTable[id]; 794 } 795 796 std::string Intrinsic::getName(ID Id, ArrayRef<Type *> Tys, Module *M, 797 FunctionType *FT) { 798 assert(Id < num_intrinsics && "Invalid intrinsic ID!"); 799 assert((Tys.empty() || Intrinsic::isOverloaded(Id)) && 800 "This version of getName is for overloaded intrinsics only"); 801 bool HasUnnamedType = false; 802 std::string Result(IntrinsicNameTable[Id]); 803 for (Type *Ty : Tys) { 804 Result += "." + getMangledTypeStr(Ty, HasUnnamedType); 805 } 806 assert((M || !HasUnnamedType) && "unnamed types need a module"); 807 if (M && HasUnnamedType) { 808 if (!FT) 809 FT = getType(M->getContext(), Id, Tys); 810 else 811 assert((FT == getType(M->getContext(), Id, Tys)) && 812 "Provided FunctionType must match arguments"); 813 return M->getUniqueIntrinsicName(Result, Id, FT); 814 } 815 return Result; 816 } 817 818 std::string Intrinsic::getName(ID Id, ArrayRef<Type *> Tys) { 819 return getName(Id, Tys, nullptr, nullptr); 820 } 821 822 /// IIT_Info - These are enumerators that describe the entries returned by the 823 /// getIntrinsicInfoTableEntries function. 824 /// 825 /// NOTE: This must be kept in synch with the copy in TblGen/IntrinsicEmitter! 826 enum IIT_Info { 827 // Common values should be encoded with 0-15. 828 IIT_Done = 0, 829 IIT_I1 = 1, 830 IIT_I8 = 2, 831 IIT_I16 = 3, 832 IIT_I32 = 4, 833 IIT_I64 = 5, 834 IIT_F16 = 6, 835 IIT_F32 = 7, 836 IIT_F64 = 8, 837 IIT_V2 = 9, 838 IIT_V4 = 10, 839 IIT_V8 = 11, 840 IIT_V16 = 12, 841 IIT_V32 = 13, 842 IIT_PTR = 14, 843 IIT_ARG = 15, 844 845 // Values from 16+ are only encodable with the inefficient encoding. 846 IIT_V64 = 16, 847 IIT_MMX = 17, 848 IIT_TOKEN = 18, 849 IIT_METADATA = 19, 850 IIT_EMPTYSTRUCT = 20, 851 IIT_STRUCT2 = 21, 852 IIT_STRUCT3 = 22, 853 IIT_STRUCT4 = 23, 854 IIT_STRUCT5 = 24, 855 IIT_EXTEND_ARG = 25, 856 IIT_TRUNC_ARG = 26, 857 IIT_ANYPTR = 27, 858 IIT_V1 = 28, 859 IIT_VARARG = 29, 860 IIT_HALF_VEC_ARG = 30, 861 IIT_SAME_VEC_WIDTH_ARG = 31, 862 IIT_PTR_TO_ARG = 32, 863 IIT_PTR_TO_ELT = 33, 864 IIT_VEC_OF_ANYPTRS_TO_ELT = 34, 865 IIT_I128 = 35, 866 IIT_V512 = 36, 867 IIT_V1024 = 37, 868 IIT_STRUCT6 = 38, 869 IIT_STRUCT7 = 39, 870 IIT_STRUCT8 = 40, 871 IIT_F128 = 41, 872 IIT_VEC_ELEMENT = 42, 873 IIT_SCALABLE_VEC = 43, 874 IIT_SUBDIVIDE2_ARG = 44, 875 IIT_SUBDIVIDE4_ARG = 45, 876 IIT_VEC_OF_BITCASTS_TO_INT = 46, 877 IIT_V128 = 47, 878 IIT_BF16 = 48, 879 IIT_STRUCT9 = 49, 880 IIT_V256 = 50, 881 IIT_AMX = 51 882 }; 883 884 static void DecodeIITType(unsigned &NextElt, ArrayRef<unsigned char> Infos, 885 IIT_Info LastInfo, 886 SmallVectorImpl<Intrinsic::IITDescriptor> &OutputTable) { 887 using namespace Intrinsic; 888 889 bool IsScalableVector = (LastInfo == IIT_SCALABLE_VEC); 890 891 IIT_Info Info = IIT_Info(Infos[NextElt++]); 892 unsigned StructElts = 2; 893 894 switch (Info) { 895 case IIT_Done: 896 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Void, 0)); 897 return; 898 case IIT_VARARG: 899 OutputTable.push_back(IITDescriptor::get(IITDescriptor::VarArg, 0)); 900 return; 901 case IIT_MMX: 902 OutputTable.push_back(IITDescriptor::get(IITDescriptor::MMX, 0)); 903 return; 904 case IIT_AMX: 905 OutputTable.push_back(IITDescriptor::get(IITDescriptor::AMX, 0)); 906 return; 907 case IIT_TOKEN: 908 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Token, 0)); 909 return; 910 case IIT_METADATA: 911 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Metadata, 0)); 912 return; 913 case IIT_F16: 914 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Half, 0)); 915 return; 916 case IIT_BF16: 917 OutputTable.push_back(IITDescriptor::get(IITDescriptor::BFloat, 0)); 918 return; 919 case IIT_F32: 920 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Float, 0)); 921 return; 922 case IIT_F64: 923 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Double, 0)); 924 return; 925 case IIT_F128: 926 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Quad, 0)); 927 return; 928 case IIT_I1: 929 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 1)); 930 return; 931 case IIT_I8: 932 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 8)); 933 return; 934 case IIT_I16: 935 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer,16)); 936 return; 937 case IIT_I32: 938 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 32)); 939 return; 940 case IIT_I64: 941 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 64)); 942 return; 943 case IIT_I128: 944 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 128)); 945 return; 946 case IIT_V1: 947 OutputTable.push_back(IITDescriptor::getVector(1, IsScalableVector)); 948 DecodeIITType(NextElt, Infos, Info, OutputTable); 949 return; 950 case IIT_V2: 951 OutputTable.push_back(IITDescriptor::getVector(2, IsScalableVector)); 952 DecodeIITType(NextElt, Infos, Info, OutputTable); 953 return; 954 case IIT_V4: 955 OutputTable.push_back(IITDescriptor::getVector(4, IsScalableVector)); 956 DecodeIITType(NextElt, Infos, Info, OutputTable); 957 return; 958 case IIT_V8: 959 OutputTable.push_back(IITDescriptor::getVector(8, IsScalableVector)); 960 DecodeIITType(NextElt, Infos, Info, OutputTable); 961 return; 962 case IIT_V16: 963 OutputTable.push_back(IITDescriptor::getVector(16, IsScalableVector)); 964 DecodeIITType(NextElt, Infos, Info, OutputTable); 965 return; 966 case IIT_V32: 967 OutputTable.push_back(IITDescriptor::getVector(32, IsScalableVector)); 968 DecodeIITType(NextElt, Infos, Info, OutputTable); 969 return; 970 case IIT_V64: 971 OutputTable.push_back(IITDescriptor::getVector(64, IsScalableVector)); 972 DecodeIITType(NextElt, Infos, Info, OutputTable); 973 return; 974 case IIT_V128: 975 OutputTable.push_back(IITDescriptor::getVector(128, IsScalableVector)); 976 DecodeIITType(NextElt, Infos, Info, OutputTable); 977 return; 978 case IIT_V256: 979 OutputTable.push_back(IITDescriptor::getVector(256, IsScalableVector)); 980 DecodeIITType(NextElt, Infos, Info, OutputTable); 981 return; 982 case IIT_V512: 983 OutputTable.push_back(IITDescriptor::getVector(512, IsScalableVector)); 984 DecodeIITType(NextElt, Infos, Info, OutputTable); 985 return; 986 case IIT_V1024: 987 OutputTable.push_back(IITDescriptor::getVector(1024, IsScalableVector)); 988 DecodeIITType(NextElt, Infos, Info, OutputTable); 989 return; 990 case IIT_PTR: 991 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Pointer, 0)); 992 DecodeIITType(NextElt, Infos, Info, OutputTable); 993 return; 994 case IIT_ANYPTR: { // [ANYPTR addrspace, subtype] 995 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Pointer, 996 Infos[NextElt++])); 997 DecodeIITType(NextElt, Infos, Info, OutputTable); 998 return; 999 } 1000 case IIT_ARG: { 1001 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1002 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Argument, ArgInfo)); 1003 return; 1004 } 1005 case IIT_EXTEND_ARG: { 1006 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1007 OutputTable.push_back(IITDescriptor::get(IITDescriptor::ExtendArgument, 1008 ArgInfo)); 1009 return; 1010 } 1011 case IIT_TRUNC_ARG: { 1012 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1013 OutputTable.push_back(IITDescriptor::get(IITDescriptor::TruncArgument, 1014 ArgInfo)); 1015 return; 1016 } 1017 case IIT_HALF_VEC_ARG: { 1018 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1019 OutputTable.push_back(IITDescriptor::get(IITDescriptor::HalfVecArgument, 1020 ArgInfo)); 1021 return; 1022 } 1023 case IIT_SAME_VEC_WIDTH_ARG: { 1024 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1025 OutputTable.push_back(IITDescriptor::get(IITDescriptor::SameVecWidthArgument, 1026 ArgInfo)); 1027 return; 1028 } 1029 case IIT_PTR_TO_ARG: { 1030 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1031 OutputTable.push_back(IITDescriptor::get(IITDescriptor::PtrToArgument, 1032 ArgInfo)); 1033 return; 1034 } 1035 case IIT_PTR_TO_ELT: { 1036 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1037 OutputTable.push_back(IITDescriptor::get(IITDescriptor::PtrToElt, ArgInfo)); 1038 return; 1039 } 1040 case IIT_VEC_OF_ANYPTRS_TO_ELT: { 1041 unsigned short ArgNo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1042 unsigned short RefNo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1043 OutputTable.push_back( 1044 IITDescriptor::get(IITDescriptor::VecOfAnyPtrsToElt, ArgNo, RefNo)); 1045 return; 1046 } 1047 case IIT_EMPTYSTRUCT: 1048 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Struct, 0)); 1049 return; 1050 case IIT_STRUCT9: ++StructElts; LLVM_FALLTHROUGH; 1051 case IIT_STRUCT8: ++StructElts; LLVM_FALLTHROUGH; 1052 case IIT_STRUCT7: ++StructElts; LLVM_FALLTHROUGH; 1053 case IIT_STRUCT6: ++StructElts; LLVM_FALLTHROUGH; 1054 case IIT_STRUCT5: ++StructElts; LLVM_FALLTHROUGH; 1055 case IIT_STRUCT4: ++StructElts; LLVM_FALLTHROUGH; 1056 case IIT_STRUCT3: ++StructElts; LLVM_FALLTHROUGH; 1057 case IIT_STRUCT2: { 1058 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Struct,StructElts)); 1059 1060 for (unsigned i = 0; i != StructElts; ++i) 1061 DecodeIITType(NextElt, Infos, Info, OutputTable); 1062 return; 1063 } 1064 case IIT_SUBDIVIDE2_ARG: { 1065 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1066 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Subdivide2Argument, 1067 ArgInfo)); 1068 return; 1069 } 1070 case IIT_SUBDIVIDE4_ARG: { 1071 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1072 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Subdivide4Argument, 1073 ArgInfo)); 1074 return; 1075 } 1076 case IIT_VEC_ELEMENT: { 1077 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1078 OutputTable.push_back(IITDescriptor::get(IITDescriptor::VecElementArgument, 1079 ArgInfo)); 1080 return; 1081 } 1082 case IIT_SCALABLE_VEC: { 1083 DecodeIITType(NextElt, Infos, Info, OutputTable); 1084 return; 1085 } 1086 case IIT_VEC_OF_BITCASTS_TO_INT: { 1087 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1088 OutputTable.push_back(IITDescriptor::get(IITDescriptor::VecOfBitcastsToInt, 1089 ArgInfo)); 1090 return; 1091 } 1092 } 1093 llvm_unreachable("unhandled"); 1094 } 1095 1096 #define GET_INTRINSIC_GENERATOR_GLOBAL 1097 #include "llvm/IR/IntrinsicImpl.inc" 1098 #undef GET_INTRINSIC_GENERATOR_GLOBAL 1099 1100 void Intrinsic::getIntrinsicInfoTableEntries(ID id, 1101 SmallVectorImpl<IITDescriptor> &T){ 1102 // Check to see if the intrinsic's type was expressible by the table. 1103 unsigned TableVal = IIT_Table[id-1]; 1104 1105 // Decode the TableVal into an array of IITValues. 1106 SmallVector<unsigned char, 8> IITValues; 1107 ArrayRef<unsigned char> IITEntries; 1108 unsigned NextElt = 0; 1109 if ((TableVal >> 31) != 0) { 1110 // This is an offset into the IIT_LongEncodingTable. 1111 IITEntries = IIT_LongEncodingTable; 1112 1113 // Strip sentinel bit. 1114 NextElt = (TableVal << 1) >> 1; 1115 } else { 1116 // Decode the TableVal into an array of IITValues. If the entry was encoded 1117 // into a single word in the table itself, decode it now. 1118 do { 1119 IITValues.push_back(TableVal & 0xF); 1120 TableVal >>= 4; 1121 } while (TableVal); 1122 1123 IITEntries = IITValues; 1124 NextElt = 0; 1125 } 1126 1127 // Okay, decode the table into the output vector of IITDescriptors. 1128 DecodeIITType(NextElt, IITEntries, IIT_Done, T); 1129 while (NextElt != IITEntries.size() && IITEntries[NextElt] != 0) 1130 DecodeIITType(NextElt, IITEntries, IIT_Done, T); 1131 } 1132 1133 static Type *DecodeFixedType(ArrayRef<Intrinsic::IITDescriptor> &Infos, 1134 ArrayRef<Type*> Tys, LLVMContext &Context) { 1135 using namespace Intrinsic; 1136 1137 IITDescriptor D = Infos.front(); 1138 Infos = Infos.slice(1); 1139 1140 switch (D.Kind) { 1141 case IITDescriptor::Void: return Type::getVoidTy(Context); 1142 case IITDescriptor::VarArg: return Type::getVoidTy(Context); 1143 case IITDescriptor::MMX: return Type::getX86_MMXTy(Context); 1144 case IITDescriptor::AMX: return Type::getX86_AMXTy(Context); 1145 case IITDescriptor::Token: return Type::getTokenTy(Context); 1146 case IITDescriptor::Metadata: return Type::getMetadataTy(Context); 1147 case IITDescriptor::Half: return Type::getHalfTy(Context); 1148 case IITDescriptor::BFloat: return Type::getBFloatTy(Context); 1149 case IITDescriptor::Float: return Type::getFloatTy(Context); 1150 case IITDescriptor::Double: return Type::getDoubleTy(Context); 1151 case IITDescriptor::Quad: return Type::getFP128Ty(Context); 1152 1153 case IITDescriptor::Integer: 1154 return IntegerType::get(Context, D.Integer_Width); 1155 case IITDescriptor::Vector: 1156 return VectorType::get(DecodeFixedType(Infos, Tys, Context), 1157 D.Vector_Width); 1158 case IITDescriptor::Pointer: 1159 return PointerType::get(DecodeFixedType(Infos, Tys, Context), 1160 D.Pointer_AddressSpace); 1161 case IITDescriptor::Struct: { 1162 SmallVector<Type *, 8> Elts; 1163 for (unsigned i = 0, e = D.Struct_NumElements; i != e; ++i) 1164 Elts.push_back(DecodeFixedType(Infos, Tys, Context)); 1165 return StructType::get(Context, Elts); 1166 } 1167 case IITDescriptor::Argument: 1168 return Tys[D.getArgumentNumber()]; 1169 case IITDescriptor::ExtendArgument: { 1170 Type *Ty = Tys[D.getArgumentNumber()]; 1171 if (VectorType *VTy = dyn_cast<VectorType>(Ty)) 1172 return VectorType::getExtendedElementVectorType(VTy); 1173 1174 return IntegerType::get(Context, 2 * cast<IntegerType>(Ty)->getBitWidth()); 1175 } 1176 case IITDescriptor::TruncArgument: { 1177 Type *Ty = Tys[D.getArgumentNumber()]; 1178 if (VectorType *VTy = dyn_cast<VectorType>(Ty)) 1179 return VectorType::getTruncatedElementVectorType(VTy); 1180 1181 IntegerType *ITy = cast<IntegerType>(Ty); 1182 assert(ITy->getBitWidth() % 2 == 0); 1183 return IntegerType::get(Context, ITy->getBitWidth() / 2); 1184 } 1185 case IITDescriptor::Subdivide2Argument: 1186 case IITDescriptor::Subdivide4Argument: { 1187 Type *Ty = Tys[D.getArgumentNumber()]; 1188 VectorType *VTy = dyn_cast<VectorType>(Ty); 1189 assert(VTy && "Expected an argument of Vector Type"); 1190 int SubDivs = D.Kind == IITDescriptor::Subdivide2Argument ? 1 : 2; 1191 return VectorType::getSubdividedVectorType(VTy, SubDivs); 1192 } 1193 case IITDescriptor::HalfVecArgument: 1194 return VectorType::getHalfElementsVectorType(cast<VectorType>( 1195 Tys[D.getArgumentNumber()])); 1196 case IITDescriptor::SameVecWidthArgument: { 1197 Type *EltTy = DecodeFixedType(Infos, Tys, Context); 1198 Type *Ty = Tys[D.getArgumentNumber()]; 1199 if (auto *VTy = dyn_cast<VectorType>(Ty)) 1200 return VectorType::get(EltTy, VTy->getElementCount()); 1201 return EltTy; 1202 } 1203 case IITDescriptor::PtrToArgument: { 1204 Type *Ty = Tys[D.getArgumentNumber()]; 1205 return PointerType::getUnqual(Ty); 1206 } 1207 case IITDescriptor::PtrToElt: { 1208 Type *Ty = Tys[D.getArgumentNumber()]; 1209 VectorType *VTy = dyn_cast<VectorType>(Ty); 1210 if (!VTy) 1211 llvm_unreachable("Expected an argument of Vector Type"); 1212 Type *EltTy = VTy->getElementType(); 1213 return PointerType::getUnqual(EltTy); 1214 } 1215 case IITDescriptor::VecElementArgument: { 1216 Type *Ty = Tys[D.getArgumentNumber()]; 1217 if (VectorType *VTy = dyn_cast<VectorType>(Ty)) 1218 return VTy->getElementType(); 1219 llvm_unreachable("Expected an argument of Vector Type"); 1220 } 1221 case IITDescriptor::VecOfBitcastsToInt: { 1222 Type *Ty = Tys[D.getArgumentNumber()]; 1223 VectorType *VTy = dyn_cast<VectorType>(Ty); 1224 assert(VTy && "Expected an argument of Vector Type"); 1225 return VectorType::getInteger(VTy); 1226 } 1227 case IITDescriptor::VecOfAnyPtrsToElt: 1228 // Return the overloaded type (which determines the pointers address space) 1229 return Tys[D.getOverloadArgNumber()]; 1230 } 1231 llvm_unreachable("unhandled"); 1232 } 1233 1234 FunctionType *Intrinsic::getType(LLVMContext &Context, 1235 ID id, ArrayRef<Type*> Tys) { 1236 SmallVector<IITDescriptor, 8> Table; 1237 getIntrinsicInfoTableEntries(id, Table); 1238 1239 ArrayRef<IITDescriptor> TableRef = Table; 1240 Type *ResultTy = DecodeFixedType(TableRef, Tys, Context); 1241 1242 SmallVector<Type*, 8> ArgTys; 1243 while (!TableRef.empty()) 1244 ArgTys.push_back(DecodeFixedType(TableRef, Tys, Context)); 1245 1246 // DecodeFixedType returns Void for IITDescriptor::Void and IITDescriptor::VarArg 1247 // If we see void type as the type of the last argument, it is vararg intrinsic 1248 if (!ArgTys.empty() && ArgTys.back()->isVoidTy()) { 1249 ArgTys.pop_back(); 1250 return FunctionType::get(ResultTy, ArgTys, true); 1251 } 1252 return FunctionType::get(ResultTy, ArgTys, false); 1253 } 1254 1255 bool Intrinsic::isOverloaded(ID id) { 1256 #define GET_INTRINSIC_OVERLOAD_TABLE 1257 #include "llvm/IR/IntrinsicImpl.inc" 1258 #undef GET_INTRINSIC_OVERLOAD_TABLE 1259 } 1260 1261 bool Intrinsic::isLeaf(ID id) { 1262 switch (id) { 1263 default: 1264 return true; 1265 1266 case Intrinsic::experimental_gc_statepoint: 1267 case Intrinsic::experimental_patchpoint_void: 1268 case Intrinsic::experimental_patchpoint_i64: 1269 return false; 1270 } 1271 } 1272 1273 /// This defines the "Intrinsic::getAttributes(ID id)" method. 1274 #define GET_INTRINSIC_ATTRIBUTES 1275 #include "llvm/IR/IntrinsicImpl.inc" 1276 #undef GET_INTRINSIC_ATTRIBUTES 1277 1278 Function *Intrinsic::getDeclaration(Module *M, ID id, ArrayRef<Type*> Tys) { 1279 // There can never be multiple globals with the same name of different types, 1280 // because intrinsics must be a specific type. 1281 auto *FT = getType(M->getContext(), id, Tys); 1282 return cast<Function>( 1283 M->getOrInsertFunction(Tys.empty() ? getName(id) 1284 : getName(id, Tys, M, FT), 1285 getType(M->getContext(), id, Tys)) 1286 .getCallee()); 1287 } 1288 1289 // This defines the "Intrinsic::getIntrinsicForGCCBuiltin()" method. 1290 #define GET_LLVM_INTRINSIC_FOR_GCC_BUILTIN 1291 #include "llvm/IR/IntrinsicImpl.inc" 1292 #undef GET_LLVM_INTRINSIC_FOR_GCC_BUILTIN 1293 1294 // This defines the "Intrinsic::getIntrinsicForMSBuiltin()" method. 1295 #define GET_LLVM_INTRINSIC_FOR_MS_BUILTIN 1296 #include "llvm/IR/IntrinsicImpl.inc" 1297 #undef GET_LLVM_INTRINSIC_FOR_MS_BUILTIN 1298 1299 using DeferredIntrinsicMatchPair = 1300 std::pair<Type *, ArrayRef<Intrinsic::IITDescriptor>>; 1301 1302 static bool matchIntrinsicType( 1303 Type *Ty, ArrayRef<Intrinsic::IITDescriptor> &Infos, 1304 SmallVectorImpl<Type *> &ArgTys, 1305 SmallVectorImpl<DeferredIntrinsicMatchPair> &DeferredChecks, 1306 bool IsDeferredCheck) { 1307 using namespace Intrinsic; 1308 1309 // If we ran out of descriptors, there are too many arguments. 1310 if (Infos.empty()) return true; 1311 1312 // Do this before slicing off the 'front' part 1313 auto InfosRef = Infos; 1314 auto DeferCheck = [&DeferredChecks, &InfosRef](Type *T) { 1315 DeferredChecks.emplace_back(T, InfosRef); 1316 return false; 1317 }; 1318 1319 IITDescriptor D = Infos.front(); 1320 Infos = Infos.slice(1); 1321 1322 switch (D.Kind) { 1323 case IITDescriptor::Void: return !Ty->isVoidTy(); 1324 case IITDescriptor::VarArg: return true; 1325 case IITDescriptor::MMX: return !Ty->isX86_MMXTy(); 1326 case IITDescriptor::AMX: return !Ty->isX86_AMXTy(); 1327 case IITDescriptor::Token: return !Ty->isTokenTy(); 1328 case IITDescriptor::Metadata: return !Ty->isMetadataTy(); 1329 case IITDescriptor::Half: return !Ty->isHalfTy(); 1330 case IITDescriptor::BFloat: return !Ty->isBFloatTy(); 1331 case IITDescriptor::Float: return !Ty->isFloatTy(); 1332 case IITDescriptor::Double: return !Ty->isDoubleTy(); 1333 case IITDescriptor::Quad: return !Ty->isFP128Ty(); 1334 case IITDescriptor::Integer: return !Ty->isIntegerTy(D.Integer_Width); 1335 case IITDescriptor::Vector: { 1336 VectorType *VT = dyn_cast<VectorType>(Ty); 1337 return !VT || VT->getElementCount() != D.Vector_Width || 1338 matchIntrinsicType(VT->getElementType(), Infos, ArgTys, 1339 DeferredChecks, IsDeferredCheck); 1340 } 1341 case IITDescriptor::Pointer: { 1342 PointerType *PT = dyn_cast<PointerType>(Ty); 1343 return !PT || PT->getAddressSpace() != D.Pointer_AddressSpace || 1344 matchIntrinsicType(PT->getElementType(), Infos, ArgTys, 1345 DeferredChecks, IsDeferredCheck); 1346 } 1347 1348 case IITDescriptor::Struct: { 1349 StructType *ST = dyn_cast<StructType>(Ty); 1350 if (!ST || ST->getNumElements() != D.Struct_NumElements) 1351 return true; 1352 1353 for (unsigned i = 0, e = D.Struct_NumElements; i != e; ++i) 1354 if (matchIntrinsicType(ST->getElementType(i), Infos, ArgTys, 1355 DeferredChecks, IsDeferredCheck)) 1356 return true; 1357 return false; 1358 } 1359 1360 case IITDescriptor::Argument: 1361 // If this is the second occurrence of an argument, 1362 // verify that the later instance matches the previous instance. 1363 if (D.getArgumentNumber() < ArgTys.size()) 1364 return Ty != ArgTys[D.getArgumentNumber()]; 1365 1366 if (D.getArgumentNumber() > ArgTys.size() || 1367 D.getArgumentKind() == IITDescriptor::AK_MatchType) 1368 return IsDeferredCheck || DeferCheck(Ty); 1369 1370 assert(D.getArgumentNumber() == ArgTys.size() && !IsDeferredCheck && 1371 "Table consistency error"); 1372 ArgTys.push_back(Ty); 1373 1374 switch (D.getArgumentKind()) { 1375 case IITDescriptor::AK_Any: return false; // Success 1376 case IITDescriptor::AK_AnyInteger: return !Ty->isIntOrIntVectorTy(); 1377 case IITDescriptor::AK_AnyFloat: return !Ty->isFPOrFPVectorTy(); 1378 case IITDescriptor::AK_AnyVector: return !isa<VectorType>(Ty); 1379 case IITDescriptor::AK_AnyPointer: return !isa<PointerType>(Ty); 1380 default: break; 1381 } 1382 llvm_unreachable("all argument kinds not covered"); 1383 1384 case IITDescriptor::ExtendArgument: { 1385 // If this is a forward reference, defer the check for later. 1386 if (D.getArgumentNumber() >= ArgTys.size()) 1387 return IsDeferredCheck || DeferCheck(Ty); 1388 1389 Type *NewTy = ArgTys[D.getArgumentNumber()]; 1390 if (VectorType *VTy = dyn_cast<VectorType>(NewTy)) 1391 NewTy = VectorType::getExtendedElementVectorType(VTy); 1392 else if (IntegerType *ITy = dyn_cast<IntegerType>(NewTy)) 1393 NewTy = IntegerType::get(ITy->getContext(), 2 * ITy->getBitWidth()); 1394 else 1395 return true; 1396 1397 return Ty != NewTy; 1398 } 1399 case IITDescriptor::TruncArgument: { 1400 // If this is a forward reference, defer the check for later. 1401 if (D.getArgumentNumber() >= ArgTys.size()) 1402 return IsDeferredCheck || DeferCheck(Ty); 1403 1404 Type *NewTy = ArgTys[D.getArgumentNumber()]; 1405 if (VectorType *VTy = dyn_cast<VectorType>(NewTy)) 1406 NewTy = VectorType::getTruncatedElementVectorType(VTy); 1407 else if (IntegerType *ITy = dyn_cast<IntegerType>(NewTy)) 1408 NewTy = IntegerType::get(ITy->getContext(), ITy->getBitWidth() / 2); 1409 else 1410 return true; 1411 1412 return Ty != NewTy; 1413 } 1414 case IITDescriptor::HalfVecArgument: 1415 // If this is a forward reference, defer the check for later. 1416 if (D.getArgumentNumber() >= ArgTys.size()) 1417 return IsDeferredCheck || DeferCheck(Ty); 1418 return !isa<VectorType>(ArgTys[D.getArgumentNumber()]) || 1419 VectorType::getHalfElementsVectorType( 1420 cast<VectorType>(ArgTys[D.getArgumentNumber()])) != Ty; 1421 case IITDescriptor::SameVecWidthArgument: { 1422 if (D.getArgumentNumber() >= ArgTys.size()) { 1423 // Defer check and subsequent check for the vector element type. 1424 Infos = Infos.slice(1); 1425 return IsDeferredCheck || DeferCheck(Ty); 1426 } 1427 auto *ReferenceType = dyn_cast<VectorType>(ArgTys[D.getArgumentNumber()]); 1428 auto *ThisArgType = dyn_cast<VectorType>(Ty); 1429 // Both must be vectors of the same number of elements or neither. 1430 if ((ReferenceType != nullptr) != (ThisArgType != nullptr)) 1431 return true; 1432 Type *EltTy = Ty; 1433 if (ThisArgType) { 1434 if (ReferenceType->getElementCount() != 1435 ThisArgType->getElementCount()) 1436 return true; 1437 EltTy = ThisArgType->getElementType(); 1438 } 1439 return matchIntrinsicType(EltTy, Infos, ArgTys, DeferredChecks, 1440 IsDeferredCheck); 1441 } 1442 case IITDescriptor::PtrToArgument: { 1443 if (D.getArgumentNumber() >= ArgTys.size()) 1444 return IsDeferredCheck || DeferCheck(Ty); 1445 Type * ReferenceType = ArgTys[D.getArgumentNumber()]; 1446 PointerType *ThisArgType = dyn_cast<PointerType>(Ty); 1447 return (!ThisArgType || ThisArgType->getElementType() != ReferenceType); 1448 } 1449 case IITDescriptor::PtrToElt: { 1450 if (D.getArgumentNumber() >= ArgTys.size()) 1451 return IsDeferredCheck || DeferCheck(Ty); 1452 VectorType * ReferenceType = 1453 dyn_cast<VectorType> (ArgTys[D.getArgumentNumber()]); 1454 PointerType *ThisArgType = dyn_cast<PointerType>(Ty); 1455 1456 return (!ThisArgType || !ReferenceType || 1457 ThisArgType->getElementType() != ReferenceType->getElementType()); 1458 } 1459 case IITDescriptor::VecOfAnyPtrsToElt: { 1460 unsigned RefArgNumber = D.getRefArgNumber(); 1461 if (RefArgNumber >= ArgTys.size()) { 1462 if (IsDeferredCheck) 1463 return true; 1464 // If forward referencing, already add the pointer-vector type and 1465 // defer the checks for later. 1466 ArgTys.push_back(Ty); 1467 return DeferCheck(Ty); 1468 } 1469 1470 if (!IsDeferredCheck){ 1471 assert(D.getOverloadArgNumber() == ArgTys.size() && 1472 "Table consistency error"); 1473 ArgTys.push_back(Ty); 1474 } 1475 1476 // Verify the overloaded type "matches" the Ref type. 1477 // i.e. Ty is a vector with the same width as Ref. 1478 // Composed of pointers to the same element type as Ref. 1479 auto *ReferenceType = dyn_cast<VectorType>(ArgTys[RefArgNumber]); 1480 auto *ThisArgVecTy = dyn_cast<VectorType>(Ty); 1481 if (!ThisArgVecTy || !ReferenceType || 1482 (ReferenceType->getElementCount() != ThisArgVecTy->getElementCount())) 1483 return true; 1484 PointerType *ThisArgEltTy = 1485 dyn_cast<PointerType>(ThisArgVecTy->getElementType()); 1486 if (!ThisArgEltTy) 1487 return true; 1488 return ThisArgEltTy->getElementType() != ReferenceType->getElementType(); 1489 } 1490 case IITDescriptor::VecElementArgument: { 1491 if (D.getArgumentNumber() >= ArgTys.size()) 1492 return IsDeferredCheck ? true : DeferCheck(Ty); 1493 auto *ReferenceType = dyn_cast<VectorType>(ArgTys[D.getArgumentNumber()]); 1494 return !ReferenceType || Ty != ReferenceType->getElementType(); 1495 } 1496 case IITDescriptor::Subdivide2Argument: 1497 case IITDescriptor::Subdivide4Argument: { 1498 // If this is a forward reference, defer the check for later. 1499 if (D.getArgumentNumber() >= ArgTys.size()) 1500 return IsDeferredCheck || DeferCheck(Ty); 1501 1502 Type *NewTy = ArgTys[D.getArgumentNumber()]; 1503 if (auto *VTy = dyn_cast<VectorType>(NewTy)) { 1504 int SubDivs = D.Kind == IITDescriptor::Subdivide2Argument ? 1 : 2; 1505 NewTy = VectorType::getSubdividedVectorType(VTy, SubDivs); 1506 return Ty != NewTy; 1507 } 1508 return true; 1509 } 1510 case IITDescriptor::VecOfBitcastsToInt: { 1511 if (D.getArgumentNumber() >= ArgTys.size()) 1512 return IsDeferredCheck || DeferCheck(Ty); 1513 auto *ReferenceType = dyn_cast<VectorType>(ArgTys[D.getArgumentNumber()]); 1514 auto *ThisArgVecTy = dyn_cast<VectorType>(Ty); 1515 if (!ThisArgVecTy || !ReferenceType) 1516 return true; 1517 return ThisArgVecTy != VectorType::getInteger(ReferenceType); 1518 } 1519 } 1520 llvm_unreachable("unhandled"); 1521 } 1522 1523 Intrinsic::MatchIntrinsicTypesResult 1524 Intrinsic::matchIntrinsicSignature(FunctionType *FTy, 1525 ArrayRef<Intrinsic::IITDescriptor> &Infos, 1526 SmallVectorImpl<Type *> &ArgTys) { 1527 SmallVector<DeferredIntrinsicMatchPair, 2> DeferredChecks; 1528 if (matchIntrinsicType(FTy->getReturnType(), Infos, ArgTys, DeferredChecks, 1529 false)) 1530 return MatchIntrinsicTypes_NoMatchRet; 1531 1532 unsigned NumDeferredReturnChecks = DeferredChecks.size(); 1533 1534 for (auto Ty : FTy->params()) 1535 if (matchIntrinsicType(Ty, Infos, ArgTys, DeferredChecks, false)) 1536 return MatchIntrinsicTypes_NoMatchArg; 1537 1538 for (unsigned I = 0, E = DeferredChecks.size(); I != E; ++I) { 1539 DeferredIntrinsicMatchPair &Check = DeferredChecks[I]; 1540 if (matchIntrinsicType(Check.first, Check.second, ArgTys, DeferredChecks, 1541 true)) 1542 return I < NumDeferredReturnChecks ? MatchIntrinsicTypes_NoMatchRet 1543 : MatchIntrinsicTypes_NoMatchArg; 1544 } 1545 1546 return MatchIntrinsicTypes_Match; 1547 } 1548 1549 bool 1550 Intrinsic::matchIntrinsicVarArg(bool isVarArg, 1551 ArrayRef<Intrinsic::IITDescriptor> &Infos) { 1552 // If there are no descriptors left, then it can't be a vararg. 1553 if (Infos.empty()) 1554 return isVarArg; 1555 1556 // There should be only one descriptor remaining at this point. 1557 if (Infos.size() != 1) 1558 return true; 1559 1560 // Check and verify the descriptor. 1561 IITDescriptor D = Infos.front(); 1562 Infos = Infos.slice(1); 1563 if (D.Kind == IITDescriptor::VarArg) 1564 return !isVarArg; 1565 1566 return true; 1567 } 1568 1569 bool Intrinsic::getIntrinsicSignature(Function *F, 1570 SmallVectorImpl<Type *> &ArgTys) { 1571 Intrinsic::ID ID = F->getIntrinsicID(); 1572 if (!ID) 1573 return false; 1574 1575 SmallVector<Intrinsic::IITDescriptor, 8> Table; 1576 getIntrinsicInfoTableEntries(ID, Table); 1577 ArrayRef<Intrinsic::IITDescriptor> TableRef = Table; 1578 1579 if (Intrinsic::matchIntrinsicSignature(F->getFunctionType(), TableRef, 1580 ArgTys) != 1581 Intrinsic::MatchIntrinsicTypesResult::MatchIntrinsicTypes_Match) { 1582 return false; 1583 } 1584 if (Intrinsic::matchIntrinsicVarArg(F->getFunctionType()->isVarArg(), 1585 TableRef)) 1586 return false; 1587 return true; 1588 } 1589 1590 Optional<Function *> Intrinsic::remangleIntrinsicFunction(Function *F) { 1591 SmallVector<Type *, 4> ArgTys; 1592 if (!getIntrinsicSignature(F, ArgTys)) 1593 return None; 1594 1595 Intrinsic::ID ID = F->getIntrinsicID(); 1596 StringRef Name = F->getName(); 1597 if (Name == 1598 Intrinsic::getName(ID, ArgTys, F->getParent(), F->getFunctionType())) 1599 return None; 1600 1601 auto NewDecl = Intrinsic::getDeclaration(F->getParent(), ID, ArgTys); 1602 NewDecl->setCallingConv(F->getCallingConv()); 1603 assert(NewDecl->getFunctionType() == F->getFunctionType() && 1604 "Shouldn't change the signature"); 1605 return NewDecl; 1606 } 1607 1608 /// hasAddressTaken - returns true if there are any uses of this function 1609 /// other than direct calls or invokes to it. Optionally ignores callback 1610 /// uses, assume like pointer annotation calls, and references in llvm.used 1611 /// and llvm.compiler.used variables. 1612 bool Function::hasAddressTaken(const User **PutOffender, 1613 bool IgnoreCallbackUses, 1614 bool IgnoreAssumeLikeCalls, 1615 bool IgnoreLLVMUsed) const { 1616 for (const Use &U : uses()) { 1617 const User *FU = U.getUser(); 1618 if (isa<BlockAddress>(FU)) 1619 continue; 1620 1621 if (IgnoreCallbackUses) { 1622 AbstractCallSite ACS(&U); 1623 if (ACS && ACS.isCallbackCall()) 1624 continue; 1625 } 1626 1627 const auto *Call = dyn_cast<CallBase>(FU); 1628 if (!Call) { 1629 if (IgnoreAssumeLikeCalls) { 1630 if (const auto *FI = dyn_cast<Instruction>(FU)) { 1631 if (FI->isCast() && !FI->user_empty() && 1632 llvm::all_of(FU->users(), [](const User *U) { 1633 if (const auto *I = dyn_cast<IntrinsicInst>(U)) 1634 return I->isAssumeLikeIntrinsic(); 1635 return false; 1636 })) 1637 continue; 1638 } 1639 } 1640 if (IgnoreLLVMUsed && !FU->user_empty()) { 1641 const User *FUU = FU; 1642 if (isa<BitCastOperator>(FU) && FU->hasOneUse() && 1643 !FU->user_begin()->user_empty()) 1644 FUU = *FU->user_begin(); 1645 if (llvm::all_of(FUU->users(), [](const User *U) { 1646 if (const auto *GV = dyn_cast<GlobalVariable>(U)) 1647 return GV->hasName() && 1648 (GV->getName().equals("llvm.compiler.used") || 1649 GV->getName().equals("llvm.used")); 1650 return false; 1651 })) 1652 continue; 1653 } 1654 if (PutOffender) 1655 *PutOffender = FU; 1656 return true; 1657 } 1658 if (!Call->isCallee(&U)) { 1659 if (PutOffender) 1660 *PutOffender = FU; 1661 return true; 1662 } 1663 } 1664 return false; 1665 } 1666 1667 bool Function::isDefTriviallyDead() const { 1668 // Check the linkage 1669 if (!hasLinkOnceLinkage() && !hasLocalLinkage() && 1670 !hasAvailableExternallyLinkage()) 1671 return false; 1672 1673 // Check if the function is used by anything other than a blockaddress. 1674 for (const User *U : users()) 1675 if (!isa<BlockAddress>(U)) 1676 return false; 1677 1678 return true; 1679 } 1680 1681 /// callsFunctionThatReturnsTwice - Return true if the function has a call to 1682 /// setjmp or other function that gcc recognizes as "returning twice". 1683 bool Function::callsFunctionThatReturnsTwice() const { 1684 for (const Instruction &I : instructions(this)) 1685 if (const auto *Call = dyn_cast<CallBase>(&I)) 1686 if (Call->hasFnAttr(Attribute::ReturnsTwice)) 1687 return true; 1688 1689 return false; 1690 } 1691 1692 Constant *Function::getPersonalityFn() const { 1693 assert(hasPersonalityFn() && getNumOperands()); 1694 return cast<Constant>(Op<0>()); 1695 } 1696 1697 void Function::setPersonalityFn(Constant *Fn) { 1698 setHungoffOperand<0>(Fn); 1699 setValueSubclassDataBit(3, Fn != nullptr); 1700 } 1701 1702 Constant *Function::getPrefixData() const { 1703 assert(hasPrefixData() && getNumOperands()); 1704 return cast<Constant>(Op<1>()); 1705 } 1706 1707 void Function::setPrefixData(Constant *PrefixData) { 1708 setHungoffOperand<1>(PrefixData); 1709 setValueSubclassDataBit(1, PrefixData != nullptr); 1710 } 1711 1712 Constant *Function::getPrologueData() const { 1713 assert(hasPrologueData() && getNumOperands()); 1714 return cast<Constant>(Op<2>()); 1715 } 1716 1717 void Function::setPrologueData(Constant *PrologueData) { 1718 setHungoffOperand<2>(PrologueData); 1719 setValueSubclassDataBit(2, PrologueData != nullptr); 1720 } 1721 1722 void Function::allocHungoffUselist() { 1723 // If we've already allocated a uselist, stop here. 1724 if (getNumOperands()) 1725 return; 1726 1727 allocHungoffUses(3, /*IsPhi=*/ false); 1728 setNumHungOffUseOperands(3); 1729 1730 // Initialize the uselist with placeholder operands to allow traversal. 1731 auto *CPN = ConstantPointerNull::get(Type::getInt1PtrTy(getContext(), 0)); 1732 Op<0>().set(CPN); 1733 Op<1>().set(CPN); 1734 Op<2>().set(CPN); 1735 } 1736 1737 template <int Idx> 1738 void Function::setHungoffOperand(Constant *C) { 1739 if (C) { 1740 allocHungoffUselist(); 1741 Op<Idx>().set(C); 1742 } else if (getNumOperands()) { 1743 Op<Idx>().set( 1744 ConstantPointerNull::get(Type::getInt1PtrTy(getContext(), 0))); 1745 } 1746 } 1747 1748 void Function::setValueSubclassDataBit(unsigned Bit, bool On) { 1749 assert(Bit < 16 && "SubclassData contains only 16 bits"); 1750 if (On) 1751 setValueSubclassData(getSubclassDataFromValue() | (1 << Bit)); 1752 else 1753 setValueSubclassData(getSubclassDataFromValue() & ~(1 << Bit)); 1754 } 1755 1756 void Function::setEntryCount(ProfileCount Count, 1757 const DenseSet<GlobalValue::GUID> *S) { 1758 assert(Count.hasValue()); 1759 #if !defined(NDEBUG) 1760 auto PrevCount = getEntryCount(); 1761 assert(!PrevCount.hasValue() || PrevCount.getType() == Count.getType()); 1762 #endif 1763 1764 auto ImportGUIDs = getImportGUIDs(); 1765 if (S == nullptr && ImportGUIDs.size()) 1766 S = &ImportGUIDs; 1767 1768 MDBuilder MDB(getContext()); 1769 setMetadata( 1770 LLVMContext::MD_prof, 1771 MDB.createFunctionEntryCount(Count.getCount(), Count.isSynthetic(), S)); 1772 } 1773 1774 void Function::setEntryCount(uint64_t Count, Function::ProfileCountType Type, 1775 const DenseSet<GlobalValue::GUID> *Imports) { 1776 setEntryCount(ProfileCount(Count, Type), Imports); 1777 } 1778 1779 ProfileCount Function::getEntryCount(bool AllowSynthetic) const { 1780 MDNode *MD = getMetadata(LLVMContext::MD_prof); 1781 if (MD && MD->getOperand(0)) 1782 if (MDString *MDS = dyn_cast<MDString>(MD->getOperand(0))) { 1783 if (MDS->getString().equals("function_entry_count")) { 1784 ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(1)); 1785 uint64_t Count = CI->getValue().getZExtValue(); 1786 // A value of -1 is used for SamplePGO when there were no samples. 1787 // Treat this the same as unknown. 1788 if (Count == (uint64_t)-1) 1789 return ProfileCount::getInvalid(); 1790 return ProfileCount(Count, PCT_Real); 1791 } else if (AllowSynthetic && 1792 MDS->getString().equals("synthetic_function_entry_count")) { 1793 ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(1)); 1794 uint64_t Count = CI->getValue().getZExtValue(); 1795 return ProfileCount(Count, PCT_Synthetic); 1796 } 1797 } 1798 return ProfileCount::getInvalid(); 1799 } 1800 1801 DenseSet<GlobalValue::GUID> Function::getImportGUIDs() const { 1802 DenseSet<GlobalValue::GUID> R; 1803 if (MDNode *MD = getMetadata(LLVMContext::MD_prof)) 1804 if (MDString *MDS = dyn_cast<MDString>(MD->getOperand(0))) 1805 if (MDS->getString().equals("function_entry_count")) 1806 for (unsigned i = 2; i < MD->getNumOperands(); i++) 1807 R.insert(mdconst::extract<ConstantInt>(MD->getOperand(i)) 1808 ->getValue() 1809 .getZExtValue()); 1810 return R; 1811 } 1812 1813 void Function::setSectionPrefix(StringRef Prefix) { 1814 MDBuilder MDB(getContext()); 1815 setMetadata(LLVMContext::MD_section_prefix, 1816 MDB.createFunctionSectionPrefix(Prefix)); 1817 } 1818 1819 Optional<StringRef> Function::getSectionPrefix() const { 1820 if (MDNode *MD = getMetadata(LLVMContext::MD_section_prefix)) { 1821 assert(cast<MDString>(MD->getOperand(0)) 1822 ->getString() 1823 .equals("function_section_prefix") && 1824 "Metadata not match"); 1825 return cast<MDString>(MD->getOperand(1))->getString(); 1826 } 1827 return None; 1828 } 1829 1830 bool Function::nullPointerIsDefined() const { 1831 return hasFnAttribute(Attribute::NullPointerIsValid); 1832 } 1833 1834 bool llvm::NullPointerIsDefined(const Function *F, unsigned AS) { 1835 if (F && F->nullPointerIsDefined()) 1836 return true; 1837 1838 if (AS != 0) 1839 return true; 1840 1841 return false; 1842 } 1843