1 //===- Function.cpp - Implement the Global object classes -----------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file implements the Function class for the IR library. 10 // 11 //===----------------------------------------------------------------------===// 12 13 #include "llvm/IR/Function.h" 14 #include "SymbolTableListTraitsImpl.h" 15 #include "llvm/ADT/ArrayRef.h" 16 #include "llvm/ADT/DenseSet.h" 17 #include "llvm/ADT/None.h" 18 #include "llvm/ADT/STLExtras.h" 19 #include "llvm/ADT/SmallString.h" 20 #include "llvm/ADT/SmallVector.h" 21 #include "llvm/ADT/StringExtras.h" 22 #include "llvm/ADT/StringRef.h" 23 #include "llvm/IR/AbstractCallSite.h" 24 #include "llvm/IR/Argument.h" 25 #include "llvm/IR/Attributes.h" 26 #include "llvm/IR/BasicBlock.h" 27 #include "llvm/IR/Constant.h" 28 #include "llvm/IR/Constants.h" 29 #include "llvm/IR/DerivedTypes.h" 30 #include "llvm/IR/GlobalValue.h" 31 #include "llvm/IR/InstIterator.h" 32 #include "llvm/IR/Instruction.h" 33 #include "llvm/IR/Instructions.h" 34 #include "llvm/IR/IntrinsicInst.h" 35 #include "llvm/IR/Intrinsics.h" 36 #include "llvm/IR/IntrinsicsAArch64.h" 37 #include "llvm/IR/IntrinsicsAMDGPU.h" 38 #include "llvm/IR/IntrinsicsARM.h" 39 #include "llvm/IR/IntrinsicsBPF.h" 40 #include "llvm/IR/IntrinsicsHexagon.h" 41 #include "llvm/IR/IntrinsicsMips.h" 42 #include "llvm/IR/IntrinsicsNVPTX.h" 43 #include "llvm/IR/IntrinsicsPowerPC.h" 44 #include "llvm/IR/IntrinsicsR600.h" 45 #include "llvm/IR/IntrinsicsRISCV.h" 46 #include "llvm/IR/IntrinsicsS390.h" 47 #include "llvm/IR/IntrinsicsVE.h" 48 #include "llvm/IR/IntrinsicsWebAssembly.h" 49 #include "llvm/IR/IntrinsicsX86.h" 50 #include "llvm/IR/IntrinsicsXCore.h" 51 #include "llvm/IR/LLVMContext.h" 52 #include "llvm/IR/MDBuilder.h" 53 #include "llvm/IR/Metadata.h" 54 #include "llvm/IR/Module.h" 55 #include "llvm/IR/Operator.h" 56 #include "llvm/IR/SymbolTableListTraits.h" 57 #include "llvm/IR/Type.h" 58 #include "llvm/IR/Use.h" 59 #include "llvm/IR/User.h" 60 #include "llvm/IR/Value.h" 61 #include "llvm/IR/ValueSymbolTable.h" 62 #include "llvm/Support/Casting.h" 63 #include "llvm/Support/Compiler.h" 64 #include "llvm/Support/ErrorHandling.h" 65 #include <algorithm> 66 #include <cassert> 67 #include <cstddef> 68 #include <cstdint> 69 #include <cstring> 70 #include <string> 71 72 using namespace llvm; 73 using ProfileCount = Function::ProfileCount; 74 75 // Explicit instantiations of SymbolTableListTraits since some of the methods 76 // are not in the public header file... 77 template class llvm::SymbolTableListTraits<BasicBlock>; 78 79 //===----------------------------------------------------------------------===// 80 // Argument Implementation 81 //===----------------------------------------------------------------------===// 82 83 Argument::Argument(Type *Ty, const Twine &Name, Function *Par, unsigned ArgNo) 84 : Value(Ty, Value::ArgumentVal), Parent(Par), ArgNo(ArgNo) { 85 setName(Name); 86 } 87 88 void Argument::setParent(Function *parent) { 89 Parent = parent; 90 } 91 92 bool Argument::hasNonNullAttr(bool AllowUndefOrPoison) const { 93 if (!getType()->isPointerTy()) return false; 94 if (getParent()->hasParamAttribute(getArgNo(), Attribute::NonNull) && 95 (AllowUndefOrPoison || 96 getParent()->hasParamAttribute(getArgNo(), Attribute::NoUndef))) 97 return true; 98 else if (getDereferenceableBytes() > 0 && 99 !NullPointerIsDefined(getParent(), 100 getType()->getPointerAddressSpace())) 101 return true; 102 return false; 103 } 104 105 bool Argument::hasByValAttr() const { 106 if (!getType()->isPointerTy()) return false; 107 return hasAttribute(Attribute::ByVal); 108 } 109 110 bool Argument::hasByRefAttr() const { 111 if (!getType()->isPointerTy()) 112 return false; 113 return hasAttribute(Attribute::ByRef); 114 } 115 116 bool Argument::hasSwiftSelfAttr() const { 117 return getParent()->hasParamAttribute(getArgNo(), Attribute::SwiftSelf); 118 } 119 120 bool Argument::hasSwiftErrorAttr() const { 121 return getParent()->hasParamAttribute(getArgNo(), Attribute::SwiftError); 122 } 123 124 bool Argument::hasInAllocaAttr() const { 125 if (!getType()->isPointerTy()) return false; 126 return hasAttribute(Attribute::InAlloca); 127 } 128 129 bool Argument::hasPreallocatedAttr() const { 130 if (!getType()->isPointerTy()) 131 return false; 132 return hasAttribute(Attribute::Preallocated); 133 } 134 135 bool Argument::hasPassPointeeByValueCopyAttr() const { 136 if (!getType()->isPointerTy()) return false; 137 AttributeList Attrs = getParent()->getAttributes(); 138 return Attrs.hasParamAttribute(getArgNo(), Attribute::ByVal) || 139 Attrs.hasParamAttribute(getArgNo(), Attribute::InAlloca) || 140 Attrs.hasParamAttribute(getArgNo(), Attribute::Preallocated); 141 } 142 143 bool Argument::hasPointeeInMemoryValueAttr() const { 144 if (!getType()->isPointerTy()) 145 return false; 146 AttributeList Attrs = getParent()->getAttributes(); 147 return Attrs.hasParamAttribute(getArgNo(), Attribute::ByVal) || 148 Attrs.hasParamAttribute(getArgNo(), Attribute::StructRet) || 149 Attrs.hasParamAttribute(getArgNo(), Attribute::InAlloca) || 150 Attrs.hasParamAttribute(getArgNo(), Attribute::Preallocated) || 151 Attrs.hasParamAttribute(getArgNo(), Attribute::ByRef); 152 } 153 154 /// For a byval, sret, inalloca, or preallocated parameter, get the in-memory 155 /// parameter type. 156 static Type *getMemoryParamAllocType(AttributeSet ParamAttrs, Type *ArgTy) { 157 // FIXME: All the type carrying attributes are mutually exclusive, so there 158 // should be a single query to get the stored type that handles any of them. 159 if (Type *ByValTy = ParamAttrs.getByValType()) 160 return ByValTy; 161 if (Type *ByRefTy = ParamAttrs.getByRefType()) 162 return ByRefTy; 163 if (Type *PreAllocTy = ParamAttrs.getPreallocatedType()) 164 return PreAllocTy; 165 166 // FIXME: sret and inalloca always depends on pointee element type. It's also 167 // possible for byval to miss it. 168 if (ParamAttrs.hasAttribute(Attribute::InAlloca) || 169 ParamAttrs.hasAttribute(Attribute::ByVal) || 170 ParamAttrs.hasAttribute(Attribute::StructRet) || 171 ParamAttrs.hasAttribute(Attribute::Preallocated)) 172 return cast<PointerType>(ArgTy)->getElementType(); 173 174 return nullptr; 175 } 176 177 uint64_t Argument::getPassPointeeByValueCopySize(const DataLayout &DL) const { 178 AttributeSet ParamAttrs = 179 getParent()->getAttributes().getParamAttributes(getArgNo()); 180 if (Type *MemTy = getMemoryParamAllocType(ParamAttrs, getType())) 181 return DL.getTypeAllocSize(MemTy); 182 return 0; 183 } 184 185 Type *Argument::getPointeeInMemoryValueType() const { 186 AttributeSet ParamAttrs = 187 getParent()->getAttributes().getParamAttributes(getArgNo()); 188 return getMemoryParamAllocType(ParamAttrs, getType()); 189 } 190 191 unsigned Argument::getParamAlignment() const { 192 assert(getType()->isPointerTy() && "Only pointers have alignments"); 193 return getParent()->getParamAlignment(getArgNo()); 194 } 195 196 MaybeAlign Argument::getParamAlign() const { 197 assert(getType()->isPointerTy() && "Only pointers have alignments"); 198 return getParent()->getParamAlign(getArgNo()); 199 } 200 201 Type *Argument::getParamByValType() const { 202 assert(getType()->isPointerTy() && "Only pointers have byval types"); 203 return getParent()->getParamByValType(getArgNo()); 204 } 205 206 Type *Argument::getParamStructRetType() const { 207 assert(getType()->isPointerTy() && "Only pointers have sret types"); 208 return getParent()->getParamStructRetType(getArgNo()); 209 } 210 211 Type *Argument::getParamByRefType() const { 212 assert(getType()->isPointerTy() && "Only pointers have byval types"); 213 return getParent()->getParamByRefType(getArgNo()); 214 } 215 216 uint64_t Argument::getDereferenceableBytes() const { 217 assert(getType()->isPointerTy() && 218 "Only pointers have dereferenceable bytes"); 219 return getParent()->getParamDereferenceableBytes(getArgNo()); 220 } 221 222 uint64_t Argument::getDereferenceableOrNullBytes() const { 223 assert(getType()->isPointerTy() && 224 "Only pointers have dereferenceable bytes"); 225 return getParent()->getParamDereferenceableOrNullBytes(getArgNo()); 226 } 227 228 bool Argument::hasNestAttr() const { 229 if (!getType()->isPointerTy()) return false; 230 return hasAttribute(Attribute::Nest); 231 } 232 233 bool Argument::hasNoAliasAttr() const { 234 if (!getType()->isPointerTy()) return false; 235 return hasAttribute(Attribute::NoAlias); 236 } 237 238 bool Argument::hasNoCaptureAttr() const { 239 if (!getType()->isPointerTy()) return false; 240 return hasAttribute(Attribute::NoCapture); 241 } 242 243 bool Argument::hasStructRetAttr() const { 244 if (!getType()->isPointerTy()) return false; 245 return hasAttribute(Attribute::StructRet); 246 } 247 248 bool Argument::hasInRegAttr() const { 249 return hasAttribute(Attribute::InReg); 250 } 251 252 bool Argument::hasReturnedAttr() const { 253 return hasAttribute(Attribute::Returned); 254 } 255 256 bool Argument::hasZExtAttr() const { 257 return hasAttribute(Attribute::ZExt); 258 } 259 260 bool Argument::hasSExtAttr() const { 261 return hasAttribute(Attribute::SExt); 262 } 263 264 bool Argument::onlyReadsMemory() const { 265 AttributeList Attrs = getParent()->getAttributes(); 266 return Attrs.hasParamAttribute(getArgNo(), Attribute::ReadOnly) || 267 Attrs.hasParamAttribute(getArgNo(), Attribute::ReadNone); 268 } 269 270 void Argument::addAttrs(AttrBuilder &B) { 271 AttributeList AL = getParent()->getAttributes(); 272 AL = AL.addParamAttributes(Parent->getContext(), getArgNo(), B); 273 getParent()->setAttributes(AL); 274 } 275 276 void Argument::addAttr(Attribute::AttrKind Kind) { 277 getParent()->addParamAttr(getArgNo(), Kind); 278 } 279 280 void Argument::addAttr(Attribute Attr) { 281 getParent()->addParamAttr(getArgNo(), Attr); 282 } 283 284 void Argument::removeAttr(Attribute::AttrKind Kind) { 285 getParent()->removeParamAttr(getArgNo(), Kind); 286 } 287 288 bool Argument::hasAttribute(Attribute::AttrKind Kind) const { 289 return getParent()->hasParamAttribute(getArgNo(), Kind); 290 } 291 292 Attribute Argument::getAttribute(Attribute::AttrKind Kind) const { 293 return getParent()->getParamAttribute(getArgNo(), Kind); 294 } 295 296 //===----------------------------------------------------------------------===// 297 // Helper Methods in Function 298 //===----------------------------------------------------------------------===// 299 300 LLVMContext &Function::getContext() const { 301 return getType()->getContext(); 302 } 303 304 unsigned Function::getInstructionCount() const { 305 unsigned NumInstrs = 0; 306 for (const BasicBlock &BB : BasicBlocks) 307 NumInstrs += std::distance(BB.instructionsWithoutDebug().begin(), 308 BB.instructionsWithoutDebug().end()); 309 return NumInstrs; 310 } 311 312 Function *Function::Create(FunctionType *Ty, LinkageTypes Linkage, 313 const Twine &N, Module &M) { 314 return Create(Ty, Linkage, M.getDataLayout().getProgramAddressSpace(), N, &M); 315 } 316 317 void Function::removeFromParent() { 318 getParent()->getFunctionList().remove(getIterator()); 319 } 320 321 void Function::eraseFromParent() { 322 getParent()->getFunctionList().erase(getIterator()); 323 } 324 325 //===----------------------------------------------------------------------===// 326 // Function Implementation 327 //===----------------------------------------------------------------------===// 328 329 static unsigned computeAddrSpace(unsigned AddrSpace, Module *M) { 330 // If AS == -1 and we are passed a valid module pointer we place the function 331 // in the program address space. Otherwise we default to AS0. 332 if (AddrSpace == static_cast<unsigned>(-1)) 333 return M ? M->getDataLayout().getProgramAddressSpace() : 0; 334 return AddrSpace; 335 } 336 337 Function::Function(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, 338 const Twine &name, Module *ParentModule) 339 : GlobalObject(Ty, Value::FunctionVal, 340 OperandTraits<Function>::op_begin(this), 0, Linkage, name, 341 computeAddrSpace(AddrSpace, ParentModule)), 342 NumArgs(Ty->getNumParams()) { 343 assert(FunctionType::isValidReturnType(getReturnType()) && 344 "invalid return type"); 345 setGlobalObjectSubClassData(0); 346 347 // We only need a symbol table for a function if the context keeps value names 348 if (!getContext().shouldDiscardValueNames()) 349 SymTab = std::make_unique<ValueSymbolTable>(); 350 351 // If the function has arguments, mark them as lazily built. 352 if (Ty->getNumParams()) 353 setValueSubclassData(1); // Set the "has lazy arguments" bit. 354 355 if (ParentModule) 356 ParentModule->getFunctionList().push_back(this); 357 358 HasLLVMReservedName = getName().startswith("llvm."); 359 // Ensure intrinsics have the right parameter attributes. 360 // Note, the IntID field will have been set in Value::setName if this function 361 // name is a valid intrinsic ID. 362 if (IntID) 363 setAttributes(Intrinsic::getAttributes(getContext(), IntID)); 364 } 365 366 Function::~Function() { 367 dropAllReferences(); // After this it is safe to delete instructions. 368 369 // Delete all of the method arguments and unlink from symbol table... 370 if (Arguments) 371 clearArguments(); 372 373 // Remove the function from the on-the-side GC table. 374 clearGC(); 375 } 376 377 void Function::BuildLazyArguments() const { 378 // Create the arguments vector, all arguments start out unnamed. 379 auto *FT = getFunctionType(); 380 if (NumArgs > 0) { 381 Arguments = std::allocator<Argument>().allocate(NumArgs); 382 for (unsigned i = 0, e = NumArgs; i != e; ++i) { 383 Type *ArgTy = FT->getParamType(i); 384 assert(!ArgTy->isVoidTy() && "Cannot have void typed arguments!"); 385 new (Arguments + i) Argument(ArgTy, "", const_cast<Function *>(this), i); 386 } 387 } 388 389 // Clear the lazy arguments bit. 390 unsigned SDC = getSubclassDataFromValue(); 391 SDC &= ~(1 << 0); 392 const_cast<Function*>(this)->setValueSubclassData(SDC); 393 assert(!hasLazyArguments()); 394 } 395 396 static MutableArrayRef<Argument> makeArgArray(Argument *Args, size_t Count) { 397 return MutableArrayRef<Argument>(Args, Count); 398 } 399 400 bool Function::isConstrainedFPIntrinsic() const { 401 switch (getIntrinsicID()) { 402 #define INSTRUCTION(NAME, NARG, ROUND_MODE, INTRINSIC) \ 403 case Intrinsic::INTRINSIC: 404 #include "llvm/IR/ConstrainedOps.def" 405 return true; 406 #undef INSTRUCTION 407 default: 408 return false; 409 } 410 } 411 412 void Function::clearArguments() { 413 for (Argument &A : makeArgArray(Arguments, NumArgs)) { 414 A.setName(""); 415 A.~Argument(); 416 } 417 std::allocator<Argument>().deallocate(Arguments, NumArgs); 418 Arguments = nullptr; 419 } 420 421 void Function::stealArgumentListFrom(Function &Src) { 422 assert(isDeclaration() && "Expected no references to current arguments"); 423 424 // Drop the current arguments, if any, and set the lazy argument bit. 425 if (!hasLazyArguments()) { 426 assert(llvm::all_of(makeArgArray(Arguments, NumArgs), 427 [](const Argument &A) { return A.use_empty(); }) && 428 "Expected arguments to be unused in declaration"); 429 clearArguments(); 430 setValueSubclassData(getSubclassDataFromValue() | (1 << 0)); 431 } 432 433 // Nothing to steal if Src has lazy arguments. 434 if (Src.hasLazyArguments()) 435 return; 436 437 // Steal arguments from Src, and fix the lazy argument bits. 438 assert(arg_size() == Src.arg_size()); 439 Arguments = Src.Arguments; 440 Src.Arguments = nullptr; 441 for (Argument &A : makeArgArray(Arguments, NumArgs)) { 442 // FIXME: This does the work of transferNodesFromList inefficiently. 443 SmallString<128> Name; 444 if (A.hasName()) 445 Name = A.getName(); 446 if (!Name.empty()) 447 A.setName(""); 448 A.setParent(this); 449 if (!Name.empty()) 450 A.setName(Name); 451 } 452 453 setValueSubclassData(getSubclassDataFromValue() & ~(1 << 0)); 454 assert(!hasLazyArguments()); 455 Src.setValueSubclassData(Src.getSubclassDataFromValue() | (1 << 0)); 456 } 457 458 // dropAllReferences() - This function causes all the subinstructions to "let 459 // go" of all references that they are maintaining. This allows one to 460 // 'delete' a whole class at a time, even though there may be circular 461 // references... first all references are dropped, and all use counts go to 462 // zero. Then everything is deleted for real. Note that no operations are 463 // valid on an object that has "dropped all references", except operator 464 // delete. 465 // 466 void Function::dropAllReferences() { 467 setIsMaterializable(false); 468 469 for (BasicBlock &BB : *this) 470 BB.dropAllReferences(); 471 472 // Delete all basic blocks. They are now unused, except possibly by 473 // blockaddresses, but BasicBlock's destructor takes care of those. 474 while (!BasicBlocks.empty()) 475 BasicBlocks.begin()->eraseFromParent(); 476 477 // Drop uses of any optional data (real or placeholder). 478 if (getNumOperands()) { 479 User::dropAllReferences(); 480 setNumHungOffUseOperands(0); 481 setValueSubclassData(getSubclassDataFromValue() & ~0xe); 482 } 483 484 // Metadata is stored in a side-table. 485 clearMetadata(); 486 } 487 488 void Function::addAttribute(unsigned i, Attribute::AttrKind Kind) { 489 AttributeList PAL = getAttributes(); 490 PAL = PAL.addAttribute(getContext(), i, Kind); 491 setAttributes(PAL); 492 } 493 494 void Function::addAttribute(unsigned i, Attribute Attr) { 495 AttributeList PAL = getAttributes(); 496 PAL = PAL.addAttribute(getContext(), i, Attr); 497 setAttributes(PAL); 498 } 499 500 void Function::addAttributes(unsigned i, const AttrBuilder &Attrs) { 501 AttributeList PAL = getAttributes(); 502 PAL = PAL.addAttributes(getContext(), i, Attrs); 503 setAttributes(PAL); 504 } 505 506 void Function::addParamAttr(unsigned ArgNo, Attribute::AttrKind Kind) { 507 AttributeList PAL = getAttributes(); 508 PAL = PAL.addParamAttribute(getContext(), ArgNo, Kind); 509 setAttributes(PAL); 510 } 511 512 void Function::addParamAttr(unsigned ArgNo, Attribute Attr) { 513 AttributeList PAL = getAttributes(); 514 PAL = PAL.addParamAttribute(getContext(), ArgNo, Attr); 515 setAttributes(PAL); 516 } 517 518 void Function::addParamAttrs(unsigned ArgNo, const AttrBuilder &Attrs) { 519 AttributeList PAL = getAttributes(); 520 PAL = PAL.addParamAttributes(getContext(), ArgNo, Attrs); 521 setAttributes(PAL); 522 } 523 524 void Function::removeAttribute(unsigned i, Attribute::AttrKind Kind) { 525 AttributeList PAL = getAttributes(); 526 PAL = PAL.removeAttribute(getContext(), i, Kind); 527 setAttributes(PAL); 528 } 529 530 void Function::removeAttribute(unsigned i, StringRef Kind) { 531 AttributeList PAL = getAttributes(); 532 PAL = PAL.removeAttribute(getContext(), i, Kind); 533 setAttributes(PAL); 534 } 535 536 void Function::removeAttributes(unsigned i, const AttrBuilder &Attrs) { 537 AttributeList PAL = getAttributes(); 538 PAL = PAL.removeAttributes(getContext(), i, Attrs); 539 setAttributes(PAL); 540 } 541 542 void Function::removeParamAttr(unsigned ArgNo, Attribute::AttrKind Kind) { 543 AttributeList PAL = getAttributes(); 544 PAL = PAL.removeParamAttribute(getContext(), ArgNo, Kind); 545 setAttributes(PAL); 546 } 547 548 void Function::removeParamAttr(unsigned ArgNo, StringRef Kind) { 549 AttributeList PAL = getAttributes(); 550 PAL = PAL.removeParamAttribute(getContext(), ArgNo, Kind); 551 setAttributes(PAL); 552 } 553 554 void Function::removeParamAttrs(unsigned ArgNo, const AttrBuilder &Attrs) { 555 AttributeList PAL = getAttributes(); 556 PAL = PAL.removeParamAttributes(getContext(), ArgNo, Attrs); 557 setAttributes(PAL); 558 } 559 560 void Function::addDereferenceableAttr(unsigned i, uint64_t Bytes) { 561 AttributeList PAL = getAttributes(); 562 PAL = PAL.addDereferenceableAttr(getContext(), i, Bytes); 563 setAttributes(PAL); 564 } 565 566 void Function::addDereferenceableParamAttr(unsigned ArgNo, uint64_t Bytes) { 567 AttributeList PAL = getAttributes(); 568 PAL = PAL.addDereferenceableParamAttr(getContext(), ArgNo, Bytes); 569 setAttributes(PAL); 570 } 571 572 void Function::addDereferenceableOrNullAttr(unsigned i, uint64_t Bytes) { 573 AttributeList PAL = getAttributes(); 574 PAL = PAL.addDereferenceableOrNullAttr(getContext(), i, Bytes); 575 setAttributes(PAL); 576 } 577 578 void Function::addDereferenceableOrNullParamAttr(unsigned ArgNo, 579 uint64_t Bytes) { 580 AttributeList PAL = getAttributes(); 581 PAL = PAL.addDereferenceableOrNullParamAttr(getContext(), ArgNo, Bytes); 582 setAttributes(PAL); 583 } 584 585 DenormalMode Function::getDenormalMode(const fltSemantics &FPType) const { 586 if (&FPType == &APFloat::IEEEsingle()) { 587 Attribute Attr = getFnAttribute("denormal-fp-math-f32"); 588 StringRef Val = Attr.getValueAsString(); 589 if (!Val.empty()) 590 return parseDenormalFPAttribute(Val); 591 592 // If the f32 variant of the attribute isn't specified, try to use the 593 // generic one. 594 } 595 596 Attribute Attr = getFnAttribute("denormal-fp-math"); 597 return parseDenormalFPAttribute(Attr.getValueAsString()); 598 } 599 600 const std::string &Function::getGC() const { 601 assert(hasGC() && "Function has no collector"); 602 return getContext().getGC(*this); 603 } 604 605 void Function::setGC(std::string Str) { 606 setValueSubclassDataBit(14, !Str.empty()); 607 getContext().setGC(*this, std::move(Str)); 608 } 609 610 void Function::clearGC() { 611 if (!hasGC()) 612 return; 613 getContext().deleteGC(*this); 614 setValueSubclassDataBit(14, false); 615 } 616 617 bool Function::hasStackProtectorFnAttr() const { 618 return hasFnAttribute(Attribute::StackProtect) || 619 hasFnAttribute(Attribute::StackProtectStrong) || 620 hasFnAttribute(Attribute::StackProtectReq); 621 } 622 623 /// Copy all additional attributes (those not needed to create a Function) from 624 /// the Function Src to this one. 625 void Function::copyAttributesFrom(const Function *Src) { 626 GlobalObject::copyAttributesFrom(Src); 627 setCallingConv(Src->getCallingConv()); 628 setAttributes(Src->getAttributes()); 629 if (Src->hasGC()) 630 setGC(Src->getGC()); 631 else 632 clearGC(); 633 if (Src->hasPersonalityFn()) 634 setPersonalityFn(Src->getPersonalityFn()); 635 if (Src->hasPrefixData()) 636 setPrefixData(Src->getPrefixData()); 637 if (Src->hasPrologueData()) 638 setPrologueData(Src->getPrologueData()); 639 } 640 641 /// Table of string intrinsic names indexed by enum value. 642 static const char * const IntrinsicNameTable[] = { 643 "not_intrinsic", 644 #define GET_INTRINSIC_NAME_TABLE 645 #include "llvm/IR/IntrinsicImpl.inc" 646 #undef GET_INTRINSIC_NAME_TABLE 647 }; 648 649 /// Table of per-target intrinsic name tables. 650 #define GET_INTRINSIC_TARGET_DATA 651 #include "llvm/IR/IntrinsicImpl.inc" 652 #undef GET_INTRINSIC_TARGET_DATA 653 654 bool Function::isTargetIntrinsic(Intrinsic::ID IID) { 655 return IID > TargetInfos[0].Count; 656 } 657 658 bool Function::isTargetIntrinsic() const { 659 return isTargetIntrinsic(IntID); 660 } 661 662 /// Find the segment of \c IntrinsicNameTable for intrinsics with the same 663 /// target as \c Name, or the generic table if \c Name is not target specific. 664 /// 665 /// Returns the relevant slice of \c IntrinsicNameTable 666 static ArrayRef<const char *> findTargetSubtable(StringRef Name) { 667 assert(Name.startswith("llvm.")); 668 669 ArrayRef<IntrinsicTargetInfo> Targets(TargetInfos); 670 // Drop "llvm." and take the first dotted component. That will be the target 671 // if this is target specific. 672 StringRef Target = Name.drop_front(5).split('.').first; 673 auto It = partition_point( 674 Targets, [=](const IntrinsicTargetInfo &TI) { return TI.Name < Target; }); 675 // We've either found the target or just fall back to the generic set, which 676 // is always first. 677 const auto &TI = It != Targets.end() && It->Name == Target ? *It : Targets[0]; 678 return makeArrayRef(&IntrinsicNameTable[1] + TI.Offset, TI.Count); 679 } 680 681 /// This does the actual lookup of an intrinsic ID which 682 /// matches the given function name. 683 Intrinsic::ID Function::lookupIntrinsicID(StringRef Name) { 684 ArrayRef<const char *> NameTable = findTargetSubtable(Name); 685 int Idx = Intrinsic::lookupLLVMIntrinsicByName(NameTable, Name); 686 if (Idx == -1) 687 return Intrinsic::not_intrinsic; 688 689 // Intrinsic IDs correspond to the location in IntrinsicNameTable, but we have 690 // an index into a sub-table. 691 int Adjust = NameTable.data() - IntrinsicNameTable; 692 Intrinsic::ID ID = static_cast<Intrinsic::ID>(Idx + Adjust); 693 694 // If the intrinsic is not overloaded, require an exact match. If it is 695 // overloaded, require either exact or prefix match. 696 const auto MatchSize = strlen(NameTable[Idx]); 697 assert(Name.size() >= MatchSize && "Expected either exact or prefix match"); 698 bool IsExactMatch = Name.size() == MatchSize; 699 return IsExactMatch || Intrinsic::isOverloaded(ID) ? ID 700 : Intrinsic::not_intrinsic; 701 } 702 703 void Function::recalculateIntrinsicID() { 704 StringRef Name = getName(); 705 if (!Name.startswith("llvm.")) { 706 HasLLVMReservedName = false; 707 IntID = Intrinsic::not_intrinsic; 708 return; 709 } 710 HasLLVMReservedName = true; 711 IntID = lookupIntrinsicID(Name); 712 } 713 714 /// Returns a stable mangling for the type specified for use in the name 715 /// mangling scheme used by 'any' types in intrinsic signatures. The mangling 716 /// of named types is simply their name. Manglings for unnamed types consist 717 /// of a prefix ('p' for pointers, 'a' for arrays, 'f_' for functions) 718 /// combined with the mangling of their component types. A vararg function 719 /// type will have a suffix of 'vararg'. Since function types can contain 720 /// other function types, we close a function type mangling with suffix 'f' 721 /// which can't be confused with it's prefix. This ensures we don't have 722 /// collisions between two unrelated function types. Otherwise, you might 723 /// parse ffXX as f(fXX) or f(fX)X. (X is a placeholder for any other type.) 724 /// 725 static std::string getMangledTypeStr(Type* Ty) { 726 std::string Result; 727 if (PointerType* PTyp = dyn_cast<PointerType>(Ty)) { 728 Result += "p" + utostr(PTyp->getAddressSpace()) + 729 getMangledTypeStr(PTyp->getElementType()); 730 } else if (ArrayType* ATyp = dyn_cast<ArrayType>(Ty)) { 731 Result += "a" + utostr(ATyp->getNumElements()) + 732 getMangledTypeStr(ATyp->getElementType()); 733 } else if (StructType *STyp = dyn_cast<StructType>(Ty)) { 734 if (!STyp->isLiteral()) { 735 Result += "s_"; 736 Result += STyp->getName(); 737 } else { 738 Result += "sl_"; 739 for (auto Elem : STyp->elements()) 740 Result += getMangledTypeStr(Elem); 741 } 742 // Ensure nested structs are distinguishable. 743 Result += "s"; 744 } else if (FunctionType *FT = dyn_cast<FunctionType>(Ty)) { 745 Result += "f_" + getMangledTypeStr(FT->getReturnType()); 746 for (size_t i = 0; i < FT->getNumParams(); i++) 747 Result += getMangledTypeStr(FT->getParamType(i)); 748 if (FT->isVarArg()) 749 Result += "vararg"; 750 // Ensure nested function types are distinguishable. 751 Result += "f"; 752 } else if (VectorType* VTy = dyn_cast<VectorType>(Ty)) { 753 ElementCount EC = VTy->getElementCount(); 754 if (EC.isScalable()) 755 Result += "nx"; 756 Result += "v" + utostr(EC.getKnownMinValue()) + 757 getMangledTypeStr(VTy->getElementType()); 758 } else if (Ty) { 759 switch (Ty->getTypeID()) { 760 default: llvm_unreachable("Unhandled type"); 761 case Type::VoidTyID: Result += "isVoid"; break; 762 case Type::MetadataTyID: Result += "Metadata"; break; 763 case Type::HalfTyID: Result += "f16"; break; 764 case Type::BFloatTyID: Result += "bf16"; break; 765 case Type::FloatTyID: Result += "f32"; break; 766 case Type::DoubleTyID: Result += "f64"; break; 767 case Type::X86_FP80TyID: Result += "f80"; break; 768 case Type::FP128TyID: Result += "f128"; break; 769 case Type::PPC_FP128TyID: Result += "ppcf128"; break; 770 case Type::X86_MMXTyID: Result += "x86mmx"; break; 771 case Type::X86_AMXTyID: Result += "x86amx"; break; 772 case Type::IntegerTyID: 773 Result += "i" + utostr(cast<IntegerType>(Ty)->getBitWidth()); 774 break; 775 } 776 } 777 return Result; 778 } 779 780 StringRef Intrinsic::getName(ID id) { 781 assert(id < num_intrinsics && "Invalid intrinsic ID!"); 782 assert(!Intrinsic::isOverloaded(id) && 783 "This version of getName does not support overloading"); 784 return IntrinsicNameTable[id]; 785 } 786 787 std::string Intrinsic::getName(ID id, ArrayRef<Type*> Tys) { 788 assert(id < num_intrinsics && "Invalid intrinsic ID!"); 789 assert((Tys.empty() || Intrinsic::isOverloaded(id)) && 790 "This version of getName is for overloaded intrinsics only"); 791 std::string Result(IntrinsicNameTable[id]); 792 for (Type *Ty : Tys) { 793 Result += "." + getMangledTypeStr(Ty); 794 } 795 return Result; 796 } 797 798 /// IIT_Info - These are enumerators that describe the entries returned by the 799 /// getIntrinsicInfoTableEntries function. 800 /// 801 /// NOTE: This must be kept in synch with the copy in TblGen/IntrinsicEmitter! 802 enum IIT_Info { 803 // Common values should be encoded with 0-15. 804 IIT_Done = 0, 805 IIT_I1 = 1, 806 IIT_I8 = 2, 807 IIT_I16 = 3, 808 IIT_I32 = 4, 809 IIT_I64 = 5, 810 IIT_F16 = 6, 811 IIT_F32 = 7, 812 IIT_F64 = 8, 813 IIT_V2 = 9, 814 IIT_V4 = 10, 815 IIT_V8 = 11, 816 IIT_V16 = 12, 817 IIT_V32 = 13, 818 IIT_PTR = 14, 819 IIT_ARG = 15, 820 821 // Values from 16+ are only encodable with the inefficient encoding. 822 IIT_V64 = 16, 823 IIT_MMX = 17, 824 IIT_TOKEN = 18, 825 IIT_METADATA = 19, 826 IIT_EMPTYSTRUCT = 20, 827 IIT_STRUCT2 = 21, 828 IIT_STRUCT3 = 22, 829 IIT_STRUCT4 = 23, 830 IIT_STRUCT5 = 24, 831 IIT_EXTEND_ARG = 25, 832 IIT_TRUNC_ARG = 26, 833 IIT_ANYPTR = 27, 834 IIT_V1 = 28, 835 IIT_VARARG = 29, 836 IIT_HALF_VEC_ARG = 30, 837 IIT_SAME_VEC_WIDTH_ARG = 31, 838 IIT_PTR_TO_ARG = 32, 839 IIT_PTR_TO_ELT = 33, 840 IIT_VEC_OF_ANYPTRS_TO_ELT = 34, 841 IIT_I128 = 35, 842 IIT_V512 = 36, 843 IIT_V1024 = 37, 844 IIT_STRUCT6 = 38, 845 IIT_STRUCT7 = 39, 846 IIT_STRUCT8 = 40, 847 IIT_F128 = 41, 848 IIT_VEC_ELEMENT = 42, 849 IIT_SCALABLE_VEC = 43, 850 IIT_SUBDIVIDE2_ARG = 44, 851 IIT_SUBDIVIDE4_ARG = 45, 852 IIT_VEC_OF_BITCASTS_TO_INT = 46, 853 IIT_V128 = 47, 854 IIT_BF16 = 48, 855 IIT_STRUCT9 = 49, 856 IIT_V256 = 50, 857 IIT_AMX = 51 858 }; 859 860 static void DecodeIITType(unsigned &NextElt, ArrayRef<unsigned char> Infos, 861 IIT_Info LastInfo, 862 SmallVectorImpl<Intrinsic::IITDescriptor> &OutputTable) { 863 using namespace Intrinsic; 864 865 bool IsScalableVector = (LastInfo == IIT_SCALABLE_VEC); 866 867 IIT_Info Info = IIT_Info(Infos[NextElt++]); 868 unsigned StructElts = 2; 869 870 switch (Info) { 871 case IIT_Done: 872 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Void, 0)); 873 return; 874 case IIT_VARARG: 875 OutputTable.push_back(IITDescriptor::get(IITDescriptor::VarArg, 0)); 876 return; 877 case IIT_MMX: 878 OutputTable.push_back(IITDescriptor::get(IITDescriptor::MMX, 0)); 879 return; 880 case IIT_AMX: 881 OutputTable.push_back(IITDescriptor::get(IITDescriptor::AMX, 0)); 882 return; 883 case IIT_TOKEN: 884 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Token, 0)); 885 return; 886 case IIT_METADATA: 887 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Metadata, 0)); 888 return; 889 case IIT_F16: 890 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Half, 0)); 891 return; 892 case IIT_BF16: 893 OutputTable.push_back(IITDescriptor::get(IITDescriptor::BFloat, 0)); 894 return; 895 case IIT_F32: 896 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Float, 0)); 897 return; 898 case IIT_F64: 899 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Double, 0)); 900 return; 901 case IIT_F128: 902 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Quad, 0)); 903 return; 904 case IIT_I1: 905 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 1)); 906 return; 907 case IIT_I8: 908 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 8)); 909 return; 910 case IIT_I16: 911 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer,16)); 912 return; 913 case IIT_I32: 914 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 32)); 915 return; 916 case IIT_I64: 917 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 64)); 918 return; 919 case IIT_I128: 920 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 128)); 921 return; 922 case IIT_V1: 923 OutputTable.push_back(IITDescriptor::getVector(1, IsScalableVector)); 924 DecodeIITType(NextElt, Infos, Info, OutputTable); 925 return; 926 case IIT_V2: 927 OutputTable.push_back(IITDescriptor::getVector(2, IsScalableVector)); 928 DecodeIITType(NextElt, Infos, Info, OutputTable); 929 return; 930 case IIT_V4: 931 OutputTable.push_back(IITDescriptor::getVector(4, IsScalableVector)); 932 DecodeIITType(NextElt, Infos, Info, OutputTable); 933 return; 934 case IIT_V8: 935 OutputTable.push_back(IITDescriptor::getVector(8, IsScalableVector)); 936 DecodeIITType(NextElt, Infos, Info, OutputTable); 937 return; 938 case IIT_V16: 939 OutputTable.push_back(IITDescriptor::getVector(16, IsScalableVector)); 940 DecodeIITType(NextElt, Infos, Info, OutputTable); 941 return; 942 case IIT_V32: 943 OutputTable.push_back(IITDescriptor::getVector(32, IsScalableVector)); 944 DecodeIITType(NextElt, Infos, Info, OutputTable); 945 return; 946 case IIT_V64: 947 OutputTable.push_back(IITDescriptor::getVector(64, IsScalableVector)); 948 DecodeIITType(NextElt, Infos, Info, OutputTable); 949 return; 950 case IIT_V128: 951 OutputTable.push_back(IITDescriptor::getVector(128, IsScalableVector)); 952 DecodeIITType(NextElt, Infos, Info, OutputTable); 953 return; 954 case IIT_V256: 955 OutputTable.push_back(IITDescriptor::getVector(256, IsScalableVector)); 956 DecodeIITType(NextElt, Infos, Info, OutputTable); 957 return; 958 case IIT_V512: 959 OutputTable.push_back(IITDescriptor::getVector(512, IsScalableVector)); 960 DecodeIITType(NextElt, Infos, Info, OutputTable); 961 return; 962 case IIT_V1024: 963 OutputTable.push_back(IITDescriptor::getVector(1024, IsScalableVector)); 964 DecodeIITType(NextElt, Infos, Info, OutputTable); 965 return; 966 case IIT_PTR: 967 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Pointer, 0)); 968 DecodeIITType(NextElt, Infos, Info, OutputTable); 969 return; 970 case IIT_ANYPTR: { // [ANYPTR addrspace, subtype] 971 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Pointer, 972 Infos[NextElt++])); 973 DecodeIITType(NextElt, Infos, Info, OutputTable); 974 return; 975 } 976 case IIT_ARG: { 977 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 978 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Argument, ArgInfo)); 979 return; 980 } 981 case IIT_EXTEND_ARG: { 982 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 983 OutputTable.push_back(IITDescriptor::get(IITDescriptor::ExtendArgument, 984 ArgInfo)); 985 return; 986 } 987 case IIT_TRUNC_ARG: { 988 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 989 OutputTable.push_back(IITDescriptor::get(IITDescriptor::TruncArgument, 990 ArgInfo)); 991 return; 992 } 993 case IIT_HALF_VEC_ARG: { 994 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 995 OutputTable.push_back(IITDescriptor::get(IITDescriptor::HalfVecArgument, 996 ArgInfo)); 997 return; 998 } 999 case IIT_SAME_VEC_WIDTH_ARG: { 1000 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1001 OutputTable.push_back(IITDescriptor::get(IITDescriptor::SameVecWidthArgument, 1002 ArgInfo)); 1003 return; 1004 } 1005 case IIT_PTR_TO_ARG: { 1006 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1007 OutputTable.push_back(IITDescriptor::get(IITDescriptor::PtrToArgument, 1008 ArgInfo)); 1009 return; 1010 } 1011 case IIT_PTR_TO_ELT: { 1012 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1013 OutputTable.push_back(IITDescriptor::get(IITDescriptor::PtrToElt, ArgInfo)); 1014 return; 1015 } 1016 case IIT_VEC_OF_ANYPTRS_TO_ELT: { 1017 unsigned short ArgNo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1018 unsigned short RefNo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1019 OutputTable.push_back( 1020 IITDescriptor::get(IITDescriptor::VecOfAnyPtrsToElt, ArgNo, RefNo)); 1021 return; 1022 } 1023 case IIT_EMPTYSTRUCT: 1024 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Struct, 0)); 1025 return; 1026 case IIT_STRUCT9: ++StructElts; LLVM_FALLTHROUGH; 1027 case IIT_STRUCT8: ++StructElts; LLVM_FALLTHROUGH; 1028 case IIT_STRUCT7: ++StructElts; LLVM_FALLTHROUGH; 1029 case IIT_STRUCT6: ++StructElts; LLVM_FALLTHROUGH; 1030 case IIT_STRUCT5: ++StructElts; LLVM_FALLTHROUGH; 1031 case IIT_STRUCT4: ++StructElts; LLVM_FALLTHROUGH; 1032 case IIT_STRUCT3: ++StructElts; LLVM_FALLTHROUGH; 1033 case IIT_STRUCT2: { 1034 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Struct,StructElts)); 1035 1036 for (unsigned i = 0; i != StructElts; ++i) 1037 DecodeIITType(NextElt, Infos, Info, OutputTable); 1038 return; 1039 } 1040 case IIT_SUBDIVIDE2_ARG: { 1041 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1042 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Subdivide2Argument, 1043 ArgInfo)); 1044 return; 1045 } 1046 case IIT_SUBDIVIDE4_ARG: { 1047 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1048 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Subdivide4Argument, 1049 ArgInfo)); 1050 return; 1051 } 1052 case IIT_VEC_ELEMENT: { 1053 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1054 OutputTable.push_back(IITDescriptor::get(IITDescriptor::VecElementArgument, 1055 ArgInfo)); 1056 return; 1057 } 1058 case IIT_SCALABLE_VEC: { 1059 DecodeIITType(NextElt, Infos, Info, OutputTable); 1060 return; 1061 } 1062 case IIT_VEC_OF_BITCASTS_TO_INT: { 1063 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1064 OutputTable.push_back(IITDescriptor::get(IITDescriptor::VecOfBitcastsToInt, 1065 ArgInfo)); 1066 return; 1067 } 1068 } 1069 llvm_unreachable("unhandled"); 1070 } 1071 1072 #define GET_INTRINSIC_GENERATOR_GLOBAL 1073 #include "llvm/IR/IntrinsicImpl.inc" 1074 #undef GET_INTRINSIC_GENERATOR_GLOBAL 1075 1076 void Intrinsic::getIntrinsicInfoTableEntries(ID id, 1077 SmallVectorImpl<IITDescriptor> &T){ 1078 // Check to see if the intrinsic's type was expressible by the table. 1079 unsigned TableVal = IIT_Table[id-1]; 1080 1081 // Decode the TableVal into an array of IITValues. 1082 SmallVector<unsigned char, 8> IITValues; 1083 ArrayRef<unsigned char> IITEntries; 1084 unsigned NextElt = 0; 1085 if ((TableVal >> 31) != 0) { 1086 // This is an offset into the IIT_LongEncodingTable. 1087 IITEntries = IIT_LongEncodingTable; 1088 1089 // Strip sentinel bit. 1090 NextElt = (TableVal << 1) >> 1; 1091 } else { 1092 // Decode the TableVal into an array of IITValues. If the entry was encoded 1093 // into a single word in the table itself, decode it now. 1094 do { 1095 IITValues.push_back(TableVal & 0xF); 1096 TableVal >>= 4; 1097 } while (TableVal); 1098 1099 IITEntries = IITValues; 1100 NextElt = 0; 1101 } 1102 1103 // Okay, decode the table into the output vector of IITDescriptors. 1104 DecodeIITType(NextElt, IITEntries, IIT_Done, T); 1105 while (NextElt != IITEntries.size() && IITEntries[NextElt] != 0) 1106 DecodeIITType(NextElt, IITEntries, IIT_Done, T); 1107 } 1108 1109 static Type *DecodeFixedType(ArrayRef<Intrinsic::IITDescriptor> &Infos, 1110 ArrayRef<Type*> Tys, LLVMContext &Context) { 1111 using namespace Intrinsic; 1112 1113 IITDescriptor D = Infos.front(); 1114 Infos = Infos.slice(1); 1115 1116 switch (D.Kind) { 1117 case IITDescriptor::Void: return Type::getVoidTy(Context); 1118 case IITDescriptor::VarArg: return Type::getVoidTy(Context); 1119 case IITDescriptor::MMX: return Type::getX86_MMXTy(Context); 1120 case IITDescriptor::AMX: return Type::getX86_AMXTy(Context); 1121 case IITDescriptor::Token: return Type::getTokenTy(Context); 1122 case IITDescriptor::Metadata: return Type::getMetadataTy(Context); 1123 case IITDescriptor::Half: return Type::getHalfTy(Context); 1124 case IITDescriptor::BFloat: return Type::getBFloatTy(Context); 1125 case IITDescriptor::Float: return Type::getFloatTy(Context); 1126 case IITDescriptor::Double: return Type::getDoubleTy(Context); 1127 case IITDescriptor::Quad: return Type::getFP128Ty(Context); 1128 1129 case IITDescriptor::Integer: 1130 return IntegerType::get(Context, D.Integer_Width); 1131 case IITDescriptor::Vector: 1132 return VectorType::get(DecodeFixedType(Infos, Tys, Context), 1133 D.Vector_Width); 1134 case IITDescriptor::Pointer: 1135 return PointerType::get(DecodeFixedType(Infos, Tys, Context), 1136 D.Pointer_AddressSpace); 1137 case IITDescriptor::Struct: { 1138 SmallVector<Type *, 8> Elts; 1139 for (unsigned i = 0, e = D.Struct_NumElements; i != e; ++i) 1140 Elts.push_back(DecodeFixedType(Infos, Tys, Context)); 1141 return StructType::get(Context, Elts); 1142 } 1143 case IITDescriptor::Argument: 1144 return Tys[D.getArgumentNumber()]; 1145 case IITDescriptor::ExtendArgument: { 1146 Type *Ty = Tys[D.getArgumentNumber()]; 1147 if (VectorType *VTy = dyn_cast<VectorType>(Ty)) 1148 return VectorType::getExtendedElementVectorType(VTy); 1149 1150 return IntegerType::get(Context, 2 * cast<IntegerType>(Ty)->getBitWidth()); 1151 } 1152 case IITDescriptor::TruncArgument: { 1153 Type *Ty = Tys[D.getArgumentNumber()]; 1154 if (VectorType *VTy = dyn_cast<VectorType>(Ty)) 1155 return VectorType::getTruncatedElementVectorType(VTy); 1156 1157 IntegerType *ITy = cast<IntegerType>(Ty); 1158 assert(ITy->getBitWidth() % 2 == 0); 1159 return IntegerType::get(Context, ITy->getBitWidth() / 2); 1160 } 1161 case IITDescriptor::Subdivide2Argument: 1162 case IITDescriptor::Subdivide4Argument: { 1163 Type *Ty = Tys[D.getArgumentNumber()]; 1164 VectorType *VTy = dyn_cast<VectorType>(Ty); 1165 assert(VTy && "Expected an argument of Vector Type"); 1166 int SubDivs = D.Kind == IITDescriptor::Subdivide2Argument ? 1 : 2; 1167 return VectorType::getSubdividedVectorType(VTy, SubDivs); 1168 } 1169 case IITDescriptor::HalfVecArgument: 1170 return VectorType::getHalfElementsVectorType(cast<VectorType>( 1171 Tys[D.getArgumentNumber()])); 1172 case IITDescriptor::SameVecWidthArgument: { 1173 Type *EltTy = DecodeFixedType(Infos, Tys, Context); 1174 Type *Ty = Tys[D.getArgumentNumber()]; 1175 if (auto *VTy = dyn_cast<VectorType>(Ty)) 1176 return VectorType::get(EltTy, VTy->getElementCount()); 1177 return EltTy; 1178 } 1179 case IITDescriptor::PtrToArgument: { 1180 Type *Ty = Tys[D.getArgumentNumber()]; 1181 return PointerType::getUnqual(Ty); 1182 } 1183 case IITDescriptor::PtrToElt: { 1184 Type *Ty = Tys[D.getArgumentNumber()]; 1185 VectorType *VTy = dyn_cast<VectorType>(Ty); 1186 if (!VTy) 1187 llvm_unreachable("Expected an argument of Vector Type"); 1188 Type *EltTy = VTy->getElementType(); 1189 return PointerType::getUnqual(EltTy); 1190 } 1191 case IITDescriptor::VecElementArgument: { 1192 Type *Ty = Tys[D.getArgumentNumber()]; 1193 if (VectorType *VTy = dyn_cast<VectorType>(Ty)) 1194 return VTy->getElementType(); 1195 llvm_unreachable("Expected an argument of Vector Type"); 1196 } 1197 case IITDescriptor::VecOfBitcastsToInt: { 1198 Type *Ty = Tys[D.getArgumentNumber()]; 1199 VectorType *VTy = dyn_cast<VectorType>(Ty); 1200 assert(VTy && "Expected an argument of Vector Type"); 1201 return VectorType::getInteger(VTy); 1202 } 1203 case IITDescriptor::VecOfAnyPtrsToElt: 1204 // Return the overloaded type (which determines the pointers address space) 1205 return Tys[D.getOverloadArgNumber()]; 1206 } 1207 llvm_unreachable("unhandled"); 1208 } 1209 1210 FunctionType *Intrinsic::getType(LLVMContext &Context, 1211 ID id, ArrayRef<Type*> Tys) { 1212 SmallVector<IITDescriptor, 8> Table; 1213 getIntrinsicInfoTableEntries(id, Table); 1214 1215 ArrayRef<IITDescriptor> TableRef = Table; 1216 Type *ResultTy = DecodeFixedType(TableRef, Tys, Context); 1217 1218 SmallVector<Type*, 8> ArgTys; 1219 while (!TableRef.empty()) 1220 ArgTys.push_back(DecodeFixedType(TableRef, Tys, Context)); 1221 1222 // DecodeFixedType returns Void for IITDescriptor::Void and IITDescriptor::VarArg 1223 // If we see void type as the type of the last argument, it is vararg intrinsic 1224 if (!ArgTys.empty() && ArgTys.back()->isVoidTy()) { 1225 ArgTys.pop_back(); 1226 return FunctionType::get(ResultTy, ArgTys, true); 1227 } 1228 return FunctionType::get(ResultTy, ArgTys, false); 1229 } 1230 1231 bool Intrinsic::isOverloaded(ID id) { 1232 #define GET_INTRINSIC_OVERLOAD_TABLE 1233 #include "llvm/IR/IntrinsicImpl.inc" 1234 #undef GET_INTRINSIC_OVERLOAD_TABLE 1235 } 1236 1237 bool Intrinsic::isLeaf(ID id) { 1238 switch (id) { 1239 default: 1240 return true; 1241 1242 case Intrinsic::experimental_gc_statepoint: 1243 case Intrinsic::experimental_patchpoint_void: 1244 case Intrinsic::experimental_patchpoint_i64: 1245 return false; 1246 } 1247 } 1248 1249 /// This defines the "Intrinsic::getAttributes(ID id)" method. 1250 #define GET_INTRINSIC_ATTRIBUTES 1251 #include "llvm/IR/IntrinsicImpl.inc" 1252 #undef GET_INTRINSIC_ATTRIBUTES 1253 1254 Function *Intrinsic::getDeclaration(Module *M, ID id, ArrayRef<Type*> Tys) { 1255 // There can never be multiple globals with the same name of different types, 1256 // because intrinsics must be a specific type. 1257 return cast<Function>( 1258 M->getOrInsertFunction(Tys.empty() ? getName(id) : getName(id, Tys), 1259 getType(M->getContext(), id, Tys)) 1260 .getCallee()); 1261 } 1262 1263 // This defines the "Intrinsic::getIntrinsicForGCCBuiltin()" method. 1264 #define GET_LLVM_INTRINSIC_FOR_GCC_BUILTIN 1265 #include "llvm/IR/IntrinsicImpl.inc" 1266 #undef GET_LLVM_INTRINSIC_FOR_GCC_BUILTIN 1267 1268 // This defines the "Intrinsic::getIntrinsicForMSBuiltin()" method. 1269 #define GET_LLVM_INTRINSIC_FOR_MS_BUILTIN 1270 #include "llvm/IR/IntrinsicImpl.inc" 1271 #undef GET_LLVM_INTRINSIC_FOR_MS_BUILTIN 1272 1273 using DeferredIntrinsicMatchPair = 1274 std::pair<Type *, ArrayRef<Intrinsic::IITDescriptor>>; 1275 1276 static bool matchIntrinsicType( 1277 Type *Ty, ArrayRef<Intrinsic::IITDescriptor> &Infos, 1278 SmallVectorImpl<Type *> &ArgTys, 1279 SmallVectorImpl<DeferredIntrinsicMatchPair> &DeferredChecks, 1280 bool IsDeferredCheck) { 1281 using namespace Intrinsic; 1282 1283 // If we ran out of descriptors, there are too many arguments. 1284 if (Infos.empty()) return true; 1285 1286 // Do this before slicing off the 'front' part 1287 auto InfosRef = Infos; 1288 auto DeferCheck = [&DeferredChecks, &InfosRef](Type *T) { 1289 DeferredChecks.emplace_back(T, InfosRef); 1290 return false; 1291 }; 1292 1293 IITDescriptor D = Infos.front(); 1294 Infos = Infos.slice(1); 1295 1296 switch (D.Kind) { 1297 case IITDescriptor::Void: return !Ty->isVoidTy(); 1298 case IITDescriptor::VarArg: return true; 1299 case IITDescriptor::MMX: return !Ty->isX86_MMXTy(); 1300 case IITDescriptor::AMX: return !Ty->isX86_AMXTy(); 1301 case IITDescriptor::Token: return !Ty->isTokenTy(); 1302 case IITDescriptor::Metadata: return !Ty->isMetadataTy(); 1303 case IITDescriptor::Half: return !Ty->isHalfTy(); 1304 case IITDescriptor::BFloat: return !Ty->isBFloatTy(); 1305 case IITDescriptor::Float: return !Ty->isFloatTy(); 1306 case IITDescriptor::Double: return !Ty->isDoubleTy(); 1307 case IITDescriptor::Quad: return !Ty->isFP128Ty(); 1308 case IITDescriptor::Integer: return !Ty->isIntegerTy(D.Integer_Width); 1309 case IITDescriptor::Vector: { 1310 VectorType *VT = dyn_cast<VectorType>(Ty); 1311 return !VT || VT->getElementCount() != D.Vector_Width || 1312 matchIntrinsicType(VT->getElementType(), Infos, ArgTys, 1313 DeferredChecks, IsDeferredCheck); 1314 } 1315 case IITDescriptor::Pointer: { 1316 PointerType *PT = dyn_cast<PointerType>(Ty); 1317 return !PT || PT->getAddressSpace() != D.Pointer_AddressSpace || 1318 matchIntrinsicType(PT->getElementType(), Infos, ArgTys, 1319 DeferredChecks, IsDeferredCheck); 1320 } 1321 1322 case IITDescriptor::Struct: { 1323 StructType *ST = dyn_cast<StructType>(Ty); 1324 if (!ST || ST->getNumElements() != D.Struct_NumElements) 1325 return true; 1326 1327 for (unsigned i = 0, e = D.Struct_NumElements; i != e; ++i) 1328 if (matchIntrinsicType(ST->getElementType(i), Infos, ArgTys, 1329 DeferredChecks, IsDeferredCheck)) 1330 return true; 1331 return false; 1332 } 1333 1334 case IITDescriptor::Argument: 1335 // If this is the second occurrence of an argument, 1336 // verify that the later instance matches the previous instance. 1337 if (D.getArgumentNumber() < ArgTys.size()) 1338 return Ty != ArgTys[D.getArgumentNumber()]; 1339 1340 if (D.getArgumentNumber() > ArgTys.size() || 1341 D.getArgumentKind() == IITDescriptor::AK_MatchType) 1342 return IsDeferredCheck || DeferCheck(Ty); 1343 1344 assert(D.getArgumentNumber() == ArgTys.size() && !IsDeferredCheck && 1345 "Table consistency error"); 1346 ArgTys.push_back(Ty); 1347 1348 switch (D.getArgumentKind()) { 1349 case IITDescriptor::AK_Any: return false; // Success 1350 case IITDescriptor::AK_AnyInteger: return !Ty->isIntOrIntVectorTy(); 1351 case IITDescriptor::AK_AnyFloat: return !Ty->isFPOrFPVectorTy(); 1352 case IITDescriptor::AK_AnyVector: return !isa<VectorType>(Ty); 1353 case IITDescriptor::AK_AnyPointer: return !isa<PointerType>(Ty); 1354 default: break; 1355 } 1356 llvm_unreachable("all argument kinds not covered"); 1357 1358 case IITDescriptor::ExtendArgument: { 1359 // If this is a forward reference, defer the check for later. 1360 if (D.getArgumentNumber() >= ArgTys.size()) 1361 return IsDeferredCheck || DeferCheck(Ty); 1362 1363 Type *NewTy = ArgTys[D.getArgumentNumber()]; 1364 if (VectorType *VTy = dyn_cast<VectorType>(NewTy)) 1365 NewTy = VectorType::getExtendedElementVectorType(VTy); 1366 else if (IntegerType *ITy = dyn_cast<IntegerType>(NewTy)) 1367 NewTy = IntegerType::get(ITy->getContext(), 2 * ITy->getBitWidth()); 1368 else 1369 return true; 1370 1371 return Ty != NewTy; 1372 } 1373 case IITDescriptor::TruncArgument: { 1374 // If this is a forward reference, defer the check for later. 1375 if (D.getArgumentNumber() >= ArgTys.size()) 1376 return IsDeferredCheck || DeferCheck(Ty); 1377 1378 Type *NewTy = ArgTys[D.getArgumentNumber()]; 1379 if (VectorType *VTy = dyn_cast<VectorType>(NewTy)) 1380 NewTy = VectorType::getTruncatedElementVectorType(VTy); 1381 else if (IntegerType *ITy = dyn_cast<IntegerType>(NewTy)) 1382 NewTy = IntegerType::get(ITy->getContext(), ITy->getBitWidth() / 2); 1383 else 1384 return true; 1385 1386 return Ty != NewTy; 1387 } 1388 case IITDescriptor::HalfVecArgument: 1389 // If this is a forward reference, defer the check for later. 1390 if (D.getArgumentNumber() >= ArgTys.size()) 1391 return IsDeferredCheck || DeferCheck(Ty); 1392 return !isa<VectorType>(ArgTys[D.getArgumentNumber()]) || 1393 VectorType::getHalfElementsVectorType( 1394 cast<VectorType>(ArgTys[D.getArgumentNumber()])) != Ty; 1395 case IITDescriptor::SameVecWidthArgument: { 1396 if (D.getArgumentNumber() >= ArgTys.size()) { 1397 // Defer check and subsequent check for the vector element type. 1398 Infos = Infos.slice(1); 1399 return IsDeferredCheck || DeferCheck(Ty); 1400 } 1401 auto *ReferenceType = dyn_cast<VectorType>(ArgTys[D.getArgumentNumber()]); 1402 auto *ThisArgType = dyn_cast<VectorType>(Ty); 1403 // Both must be vectors of the same number of elements or neither. 1404 if ((ReferenceType != nullptr) != (ThisArgType != nullptr)) 1405 return true; 1406 Type *EltTy = Ty; 1407 if (ThisArgType) { 1408 if (ReferenceType->getElementCount() != 1409 ThisArgType->getElementCount()) 1410 return true; 1411 EltTy = ThisArgType->getElementType(); 1412 } 1413 return matchIntrinsicType(EltTy, Infos, ArgTys, DeferredChecks, 1414 IsDeferredCheck); 1415 } 1416 case IITDescriptor::PtrToArgument: { 1417 if (D.getArgumentNumber() >= ArgTys.size()) 1418 return IsDeferredCheck || DeferCheck(Ty); 1419 Type * ReferenceType = ArgTys[D.getArgumentNumber()]; 1420 PointerType *ThisArgType = dyn_cast<PointerType>(Ty); 1421 return (!ThisArgType || ThisArgType->getElementType() != ReferenceType); 1422 } 1423 case IITDescriptor::PtrToElt: { 1424 if (D.getArgumentNumber() >= ArgTys.size()) 1425 return IsDeferredCheck || DeferCheck(Ty); 1426 VectorType * ReferenceType = 1427 dyn_cast<VectorType> (ArgTys[D.getArgumentNumber()]); 1428 PointerType *ThisArgType = dyn_cast<PointerType>(Ty); 1429 1430 return (!ThisArgType || !ReferenceType || 1431 ThisArgType->getElementType() != ReferenceType->getElementType()); 1432 } 1433 case IITDescriptor::VecOfAnyPtrsToElt: { 1434 unsigned RefArgNumber = D.getRefArgNumber(); 1435 if (RefArgNumber >= ArgTys.size()) { 1436 if (IsDeferredCheck) 1437 return true; 1438 // If forward referencing, already add the pointer-vector type and 1439 // defer the checks for later. 1440 ArgTys.push_back(Ty); 1441 return DeferCheck(Ty); 1442 } 1443 1444 if (!IsDeferredCheck){ 1445 assert(D.getOverloadArgNumber() == ArgTys.size() && 1446 "Table consistency error"); 1447 ArgTys.push_back(Ty); 1448 } 1449 1450 // Verify the overloaded type "matches" the Ref type. 1451 // i.e. Ty is a vector with the same width as Ref. 1452 // Composed of pointers to the same element type as Ref. 1453 auto *ReferenceType = dyn_cast<VectorType>(ArgTys[RefArgNumber]); 1454 auto *ThisArgVecTy = dyn_cast<VectorType>(Ty); 1455 if (!ThisArgVecTy || !ReferenceType || 1456 (ReferenceType->getElementCount() != ThisArgVecTy->getElementCount())) 1457 return true; 1458 PointerType *ThisArgEltTy = 1459 dyn_cast<PointerType>(ThisArgVecTy->getElementType()); 1460 if (!ThisArgEltTy) 1461 return true; 1462 return ThisArgEltTy->getElementType() != ReferenceType->getElementType(); 1463 } 1464 case IITDescriptor::VecElementArgument: { 1465 if (D.getArgumentNumber() >= ArgTys.size()) 1466 return IsDeferredCheck ? true : DeferCheck(Ty); 1467 auto *ReferenceType = dyn_cast<VectorType>(ArgTys[D.getArgumentNumber()]); 1468 return !ReferenceType || Ty != ReferenceType->getElementType(); 1469 } 1470 case IITDescriptor::Subdivide2Argument: 1471 case IITDescriptor::Subdivide4Argument: { 1472 // If this is a forward reference, defer the check for later. 1473 if (D.getArgumentNumber() >= ArgTys.size()) 1474 return IsDeferredCheck || DeferCheck(Ty); 1475 1476 Type *NewTy = ArgTys[D.getArgumentNumber()]; 1477 if (auto *VTy = dyn_cast<VectorType>(NewTy)) { 1478 int SubDivs = D.Kind == IITDescriptor::Subdivide2Argument ? 1 : 2; 1479 NewTy = VectorType::getSubdividedVectorType(VTy, SubDivs); 1480 return Ty != NewTy; 1481 } 1482 return true; 1483 } 1484 case IITDescriptor::VecOfBitcastsToInt: { 1485 if (D.getArgumentNumber() >= ArgTys.size()) 1486 return IsDeferredCheck || DeferCheck(Ty); 1487 auto *ReferenceType = dyn_cast<VectorType>(ArgTys[D.getArgumentNumber()]); 1488 auto *ThisArgVecTy = dyn_cast<VectorType>(Ty); 1489 if (!ThisArgVecTy || !ReferenceType) 1490 return true; 1491 return ThisArgVecTy != VectorType::getInteger(ReferenceType); 1492 } 1493 } 1494 llvm_unreachable("unhandled"); 1495 } 1496 1497 Intrinsic::MatchIntrinsicTypesResult 1498 Intrinsic::matchIntrinsicSignature(FunctionType *FTy, 1499 ArrayRef<Intrinsic::IITDescriptor> &Infos, 1500 SmallVectorImpl<Type *> &ArgTys) { 1501 SmallVector<DeferredIntrinsicMatchPair, 2> DeferredChecks; 1502 if (matchIntrinsicType(FTy->getReturnType(), Infos, ArgTys, DeferredChecks, 1503 false)) 1504 return MatchIntrinsicTypes_NoMatchRet; 1505 1506 unsigned NumDeferredReturnChecks = DeferredChecks.size(); 1507 1508 for (auto Ty : FTy->params()) 1509 if (matchIntrinsicType(Ty, Infos, ArgTys, DeferredChecks, false)) 1510 return MatchIntrinsicTypes_NoMatchArg; 1511 1512 for (unsigned I = 0, E = DeferredChecks.size(); I != E; ++I) { 1513 DeferredIntrinsicMatchPair &Check = DeferredChecks[I]; 1514 if (matchIntrinsicType(Check.first, Check.second, ArgTys, DeferredChecks, 1515 true)) 1516 return I < NumDeferredReturnChecks ? MatchIntrinsicTypes_NoMatchRet 1517 : MatchIntrinsicTypes_NoMatchArg; 1518 } 1519 1520 return MatchIntrinsicTypes_Match; 1521 } 1522 1523 bool 1524 Intrinsic::matchIntrinsicVarArg(bool isVarArg, 1525 ArrayRef<Intrinsic::IITDescriptor> &Infos) { 1526 // If there are no descriptors left, then it can't be a vararg. 1527 if (Infos.empty()) 1528 return isVarArg; 1529 1530 // There should be only one descriptor remaining at this point. 1531 if (Infos.size() != 1) 1532 return true; 1533 1534 // Check and verify the descriptor. 1535 IITDescriptor D = Infos.front(); 1536 Infos = Infos.slice(1); 1537 if (D.Kind == IITDescriptor::VarArg) 1538 return !isVarArg; 1539 1540 return true; 1541 } 1542 1543 bool Intrinsic::getIntrinsicSignature(Function *F, 1544 SmallVectorImpl<Type *> &ArgTys) { 1545 Intrinsic::ID ID = F->getIntrinsicID(); 1546 if (!ID) 1547 return false; 1548 1549 SmallVector<Intrinsic::IITDescriptor, 8> Table; 1550 getIntrinsicInfoTableEntries(ID, Table); 1551 ArrayRef<Intrinsic::IITDescriptor> TableRef = Table; 1552 1553 if (Intrinsic::matchIntrinsicSignature(F->getFunctionType(), TableRef, 1554 ArgTys) != 1555 Intrinsic::MatchIntrinsicTypesResult::MatchIntrinsicTypes_Match) { 1556 return false; 1557 } 1558 if (Intrinsic::matchIntrinsicVarArg(F->getFunctionType()->isVarArg(), 1559 TableRef)) 1560 return false; 1561 return true; 1562 } 1563 1564 Optional<Function *> Intrinsic::remangleIntrinsicFunction(Function *F) { 1565 SmallVector<Type *, 4> ArgTys; 1566 if (!getIntrinsicSignature(F, ArgTys)) 1567 return None; 1568 1569 Intrinsic::ID ID = F->getIntrinsicID(); 1570 StringRef Name = F->getName(); 1571 if (Name == Intrinsic::getName(ID, ArgTys)) 1572 return None; 1573 1574 auto NewDecl = Intrinsic::getDeclaration(F->getParent(), ID, ArgTys); 1575 NewDecl->setCallingConv(F->getCallingConv()); 1576 assert(NewDecl->getFunctionType() == F->getFunctionType() && 1577 "Shouldn't change the signature"); 1578 return NewDecl; 1579 } 1580 1581 /// hasAddressTaken - returns true if there are any uses of this function 1582 /// other than direct calls or invokes to it. Optionally ignores callback 1583 /// uses, assume like pointer annotation calls, and references in llvm.used 1584 /// and llvm.compiler.used variables. 1585 bool Function::hasAddressTaken(const User **PutOffender, 1586 bool IgnoreCallbackUses, 1587 bool IgnoreAssumeLikeCalls, 1588 bool IgnoreLLVMUsed) const { 1589 for (const Use &U : uses()) { 1590 const User *FU = U.getUser(); 1591 if (isa<BlockAddress>(FU)) 1592 continue; 1593 1594 if (IgnoreCallbackUses) { 1595 AbstractCallSite ACS(&U); 1596 if (ACS && ACS.isCallbackCall()) 1597 continue; 1598 } 1599 1600 const auto *Call = dyn_cast<CallBase>(FU); 1601 if (!Call) { 1602 if (IgnoreAssumeLikeCalls) { 1603 if (const auto *FI = dyn_cast<Instruction>(FU)) { 1604 if (FI->isCast() && !FI->user_empty() && 1605 llvm::all_of(FU->users(), [](const User *U) { 1606 if (const auto *I = dyn_cast<IntrinsicInst>(U)) 1607 return I->isAssumeLikeIntrinsic(); 1608 return false; 1609 })) 1610 continue; 1611 } 1612 } 1613 if (IgnoreLLVMUsed && !FU->user_empty()) { 1614 const User *FUU = FU; 1615 if (isa<BitCastOperator>(FU) && FU->hasOneUse() && 1616 !FU->user_begin()->user_empty()) 1617 FUU = *FU->user_begin(); 1618 if (llvm::all_of(FUU->users(), [](const User *U) { 1619 if (const auto *GV = dyn_cast<GlobalVariable>(U)) 1620 return GV->hasName() && 1621 (GV->getName().equals("llvm.compiler.used") || 1622 GV->getName().equals("llvm.used")); 1623 return false; 1624 })) 1625 continue; 1626 } 1627 if (PutOffender) 1628 *PutOffender = FU; 1629 return true; 1630 } 1631 if (!Call->isCallee(&U)) { 1632 if (PutOffender) 1633 *PutOffender = FU; 1634 return true; 1635 } 1636 } 1637 return false; 1638 } 1639 1640 bool Function::isDefTriviallyDead() const { 1641 // Check the linkage 1642 if (!hasLinkOnceLinkage() && !hasLocalLinkage() && 1643 !hasAvailableExternallyLinkage()) 1644 return false; 1645 1646 // Check if the function is used by anything other than a blockaddress. 1647 for (const User *U : users()) 1648 if (!isa<BlockAddress>(U)) 1649 return false; 1650 1651 return true; 1652 } 1653 1654 /// callsFunctionThatReturnsTwice - Return true if the function has a call to 1655 /// setjmp or other function that gcc recognizes as "returning twice". 1656 bool Function::callsFunctionThatReturnsTwice() const { 1657 for (const Instruction &I : instructions(this)) 1658 if (const auto *Call = dyn_cast<CallBase>(&I)) 1659 if (Call->hasFnAttr(Attribute::ReturnsTwice)) 1660 return true; 1661 1662 return false; 1663 } 1664 1665 Constant *Function::getPersonalityFn() const { 1666 assert(hasPersonalityFn() && getNumOperands()); 1667 return cast<Constant>(Op<0>()); 1668 } 1669 1670 void Function::setPersonalityFn(Constant *Fn) { 1671 setHungoffOperand<0>(Fn); 1672 setValueSubclassDataBit(3, Fn != nullptr); 1673 } 1674 1675 Constant *Function::getPrefixData() const { 1676 assert(hasPrefixData() && getNumOperands()); 1677 return cast<Constant>(Op<1>()); 1678 } 1679 1680 void Function::setPrefixData(Constant *PrefixData) { 1681 setHungoffOperand<1>(PrefixData); 1682 setValueSubclassDataBit(1, PrefixData != nullptr); 1683 } 1684 1685 Constant *Function::getPrologueData() const { 1686 assert(hasPrologueData() && getNumOperands()); 1687 return cast<Constant>(Op<2>()); 1688 } 1689 1690 void Function::setPrologueData(Constant *PrologueData) { 1691 setHungoffOperand<2>(PrologueData); 1692 setValueSubclassDataBit(2, PrologueData != nullptr); 1693 } 1694 1695 void Function::allocHungoffUselist() { 1696 // If we've already allocated a uselist, stop here. 1697 if (getNumOperands()) 1698 return; 1699 1700 allocHungoffUses(3, /*IsPhi=*/ false); 1701 setNumHungOffUseOperands(3); 1702 1703 // Initialize the uselist with placeholder operands to allow traversal. 1704 auto *CPN = ConstantPointerNull::get(Type::getInt1PtrTy(getContext(), 0)); 1705 Op<0>().set(CPN); 1706 Op<1>().set(CPN); 1707 Op<2>().set(CPN); 1708 } 1709 1710 template <int Idx> 1711 void Function::setHungoffOperand(Constant *C) { 1712 if (C) { 1713 allocHungoffUselist(); 1714 Op<Idx>().set(C); 1715 } else if (getNumOperands()) { 1716 Op<Idx>().set( 1717 ConstantPointerNull::get(Type::getInt1PtrTy(getContext(), 0))); 1718 } 1719 } 1720 1721 void Function::setValueSubclassDataBit(unsigned Bit, bool On) { 1722 assert(Bit < 16 && "SubclassData contains only 16 bits"); 1723 if (On) 1724 setValueSubclassData(getSubclassDataFromValue() | (1 << Bit)); 1725 else 1726 setValueSubclassData(getSubclassDataFromValue() & ~(1 << Bit)); 1727 } 1728 1729 void Function::setEntryCount(ProfileCount Count, 1730 const DenseSet<GlobalValue::GUID> *S) { 1731 assert(Count.hasValue()); 1732 #if !defined(NDEBUG) 1733 auto PrevCount = getEntryCount(); 1734 assert(!PrevCount.hasValue() || PrevCount.getType() == Count.getType()); 1735 #endif 1736 1737 auto ImportGUIDs = getImportGUIDs(); 1738 if (S == nullptr && ImportGUIDs.size()) 1739 S = &ImportGUIDs; 1740 1741 MDBuilder MDB(getContext()); 1742 setMetadata( 1743 LLVMContext::MD_prof, 1744 MDB.createFunctionEntryCount(Count.getCount(), Count.isSynthetic(), S)); 1745 } 1746 1747 void Function::setEntryCount(uint64_t Count, Function::ProfileCountType Type, 1748 const DenseSet<GlobalValue::GUID> *Imports) { 1749 setEntryCount(ProfileCount(Count, Type), Imports); 1750 } 1751 1752 ProfileCount Function::getEntryCount(bool AllowSynthetic) const { 1753 MDNode *MD = getMetadata(LLVMContext::MD_prof); 1754 if (MD && MD->getOperand(0)) 1755 if (MDString *MDS = dyn_cast<MDString>(MD->getOperand(0))) { 1756 if (MDS->getString().equals("function_entry_count")) { 1757 ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(1)); 1758 uint64_t Count = CI->getValue().getZExtValue(); 1759 // A value of -1 is used for SamplePGO when there were no samples. 1760 // Treat this the same as unknown. 1761 if (Count == (uint64_t)-1) 1762 return ProfileCount::getInvalid(); 1763 return ProfileCount(Count, PCT_Real); 1764 } else if (AllowSynthetic && 1765 MDS->getString().equals("synthetic_function_entry_count")) { 1766 ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(1)); 1767 uint64_t Count = CI->getValue().getZExtValue(); 1768 return ProfileCount(Count, PCT_Synthetic); 1769 } 1770 } 1771 return ProfileCount::getInvalid(); 1772 } 1773 1774 DenseSet<GlobalValue::GUID> Function::getImportGUIDs() const { 1775 DenseSet<GlobalValue::GUID> R; 1776 if (MDNode *MD = getMetadata(LLVMContext::MD_prof)) 1777 if (MDString *MDS = dyn_cast<MDString>(MD->getOperand(0))) 1778 if (MDS->getString().equals("function_entry_count")) 1779 for (unsigned i = 2; i < MD->getNumOperands(); i++) 1780 R.insert(mdconst::extract<ConstantInt>(MD->getOperand(i)) 1781 ->getValue() 1782 .getZExtValue()); 1783 return R; 1784 } 1785 1786 void Function::setSectionPrefix(StringRef Prefix) { 1787 MDBuilder MDB(getContext()); 1788 setMetadata(LLVMContext::MD_section_prefix, 1789 MDB.createFunctionSectionPrefix(Prefix)); 1790 } 1791 1792 Optional<StringRef> Function::getSectionPrefix() const { 1793 if (MDNode *MD = getMetadata(LLVMContext::MD_section_prefix)) { 1794 assert(cast<MDString>(MD->getOperand(0)) 1795 ->getString() 1796 .equals("function_section_prefix") && 1797 "Metadata not match"); 1798 return cast<MDString>(MD->getOperand(1))->getString(); 1799 } 1800 return None; 1801 } 1802 1803 bool Function::nullPointerIsDefined() const { 1804 return hasFnAttribute(Attribute::NullPointerIsValid); 1805 } 1806 1807 bool llvm::NullPointerIsDefined(const Function *F, unsigned AS) { 1808 if (F && F->nullPointerIsDefined()) 1809 return true; 1810 1811 if (AS != 0) 1812 return true; 1813 1814 return false; 1815 } 1816