1 //===- Function.cpp - Implement the Global object classes -----------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file implements the Function class for the IR library. 10 // 11 //===----------------------------------------------------------------------===// 12 13 #include "llvm/IR/Function.h" 14 #include "SymbolTableListTraitsImpl.h" 15 #include "llvm/ADT/ArrayRef.h" 16 #include "llvm/ADT/DenseSet.h" 17 #include "llvm/ADT/None.h" 18 #include "llvm/ADT/STLExtras.h" 19 #include "llvm/ADT/SmallString.h" 20 #include "llvm/ADT/SmallVector.h" 21 #include "llvm/ADT/StringExtras.h" 22 #include "llvm/ADT/StringRef.h" 23 #include "llvm/IR/AbstractCallSite.h" 24 #include "llvm/IR/Argument.h" 25 #include "llvm/IR/Attributes.h" 26 #include "llvm/IR/BasicBlock.h" 27 #include "llvm/IR/Constant.h" 28 #include "llvm/IR/Constants.h" 29 #include "llvm/IR/DerivedTypes.h" 30 #include "llvm/IR/GlobalValue.h" 31 #include "llvm/IR/InstIterator.h" 32 #include "llvm/IR/Instruction.h" 33 #include "llvm/IR/Instructions.h" 34 #include "llvm/IR/IntrinsicInst.h" 35 #include "llvm/IR/Intrinsics.h" 36 #include "llvm/IR/IntrinsicsAArch64.h" 37 #include "llvm/IR/IntrinsicsAMDGPU.h" 38 #include "llvm/IR/IntrinsicsARM.h" 39 #include "llvm/IR/IntrinsicsBPF.h" 40 #include "llvm/IR/IntrinsicsHexagon.h" 41 #include "llvm/IR/IntrinsicsMips.h" 42 #include "llvm/IR/IntrinsicsNVPTX.h" 43 #include "llvm/IR/IntrinsicsPowerPC.h" 44 #include "llvm/IR/IntrinsicsR600.h" 45 #include "llvm/IR/IntrinsicsRISCV.h" 46 #include "llvm/IR/IntrinsicsS390.h" 47 #include "llvm/IR/IntrinsicsVE.h" 48 #include "llvm/IR/IntrinsicsWebAssembly.h" 49 #include "llvm/IR/IntrinsicsX86.h" 50 #include "llvm/IR/IntrinsicsXCore.h" 51 #include "llvm/IR/LLVMContext.h" 52 #include "llvm/IR/MDBuilder.h" 53 #include "llvm/IR/Metadata.h" 54 #include "llvm/IR/Module.h" 55 #include "llvm/IR/SymbolTableListTraits.h" 56 #include "llvm/IR/Type.h" 57 #include "llvm/IR/Use.h" 58 #include "llvm/IR/User.h" 59 #include "llvm/IR/Value.h" 60 #include "llvm/IR/ValueSymbolTable.h" 61 #include "llvm/Support/Casting.h" 62 #include "llvm/Support/Compiler.h" 63 #include "llvm/Support/ErrorHandling.h" 64 #include <algorithm> 65 #include <cassert> 66 #include <cstddef> 67 #include <cstdint> 68 #include <cstring> 69 #include <string> 70 71 using namespace llvm; 72 using ProfileCount = Function::ProfileCount; 73 74 // Explicit instantiations of SymbolTableListTraits since some of the methods 75 // are not in the public header file... 76 template class llvm::SymbolTableListTraits<BasicBlock>; 77 78 //===----------------------------------------------------------------------===// 79 // Argument Implementation 80 //===----------------------------------------------------------------------===// 81 82 Argument::Argument(Type *Ty, const Twine &Name, Function *Par, unsigned ArgNo) 83 : Value(Ty, Value::ArgumentVal), Parent(Par), ArgNo(ArgNo) { 84 setName(Name); 85 } 86 87 void Argument::setParent(Function *parent) { 88 Parent = parent; 89 } 90 91 bool Argument::hasNonNullAttr(bool AllowUndefOrPoison) const { 92 if (!getType()->isPointerTy()) return false; 93 if (getParent()->hasParamAttribute(getArgNo(), Attribute::NonNull) && 94 (AllowUndefOrPoison || 95 getParent()->hasParamAttribute(getArgNo(), Attribute::NoUndef))) 96 return true; 97 else if (getDereferenceableBytes() > 0 && 98 !NullPointerIsDefined(getParent(), 99 getType()->getPointerAddressSpace())) 100 return true; 101 return false; 102 } 103 104 bool Argument::hasByValAttr() const { 105 if (!getType()->isPointerTy()) return false; 106 return hasAttribute(Attribute::ByVal); 107 } 108 109 bool Argument::hasByRefAttr() const { 110 if (!getType()->isPointerTy()) 111 return false; 112 return hasAttribute(Attribute::ByRef); 113 } 114 115 bool Argument::hasSwiftSelfAttr() const { 116 return getParent()->hasParamAttribute(getArgNo(), Attribute::SwiftSelf); 117 } 118 119 bool Argument::hasSwiftErrorAttr() const { 120 return getParent()->hasParamAttribute(getArgNo(), Attribute::SwiftError); 121 } 122 123 bool Argument::hasInAllocaAttr() const { 124 if (!getType()->isPointerTy()) return false; 125 return hasAttribute(Attribute::InAlloca); 126 } 127 128 bool Argument::hasPreallocatedAttr() const { 129 if (!getType()->isPointerTy()) 130 return false; 131 return hasAttribute(Attribute::Preallocated); 132 } 133 134 bool Argument::hasPassPointeeByValueCopyAttr() const { 135 if (!getType()->isPointerTy()) return false; 136 AttributeList Attrs = getParent()->getAttributes(); 137 return Attrs.hasParamAttribute(getArgNo(), Attribute::ByVal) || 138 Attrs.hasParamAttribute(getArgNo(), Attribute::InAlloca) || 139 Attrs.hasParamAttribute(getArgNo(), Attribute::Preallocated); 140 } 141 142 bool Argument::hasPointeeInMemoryValueAttr() const { 143 if (!getType()->isPointerTy()) 144 return false; 145 AttributeList Attrs = getParent()->getAttributes(); 146 return Attrs.hasParamAttribute(getArgNo(), Attribute::ByVal) || 147 Attrs.hasParamAttribute(getArgNo(), Attribute::StructRet) || 148 Attrs.hasParamAttribute(getArgNo(), Attribute::InAlloca) || 149 Attrs.hasParamAttribute(getArgNo(), Attribute::Preallocated) || 150 Attrs.hasParamAttribute(getArgNo(), Attribute::ByRef); 151 } 152 153 /// For a byval, sret, inalloca, or preallocated parameter, get the in-memory 154 /// parameter type. 155 static Type *getMemoryParamAllocType(AttributeSet ParamAttrs, Type *ArgTy) { 156 // FIXME: All the type carrying attributes are mutually exclusive, so there 157 // should be a single query to get the stored type that handles any of them. 158 if (Type *ByValTy = ParamAttrs.getByValType()) 159 return ByValTy; 160 if (Type *ByRefTy = ParamAttrs.getByRefType()) 161 return ByRefTy; 162 if (Type *PreAllocTy = ParamAttrs.getPreallocatedType()) 163 return PreAllocTy; 164 165 // FIXME: sret and inalloca always depends on pointee element type. It's also 166 // possible for byval to miss it. 167 if (ParamAttrs.hasAttribute(Attribute::InAlloca) || 168 ParamAttrs.hasAttribute(Attribute::ByVal) || 169 ParamAttrs.hasAttribute(Attribute::StructRet) || 170 ParamAttrs.hasAttribute(Attribute::Preallocated)) 171 return cast<PointerType>(ArgTy)->getElementType(); 172 173 return nullptr; 174 } 175 176 uint64_t Argument::getPassPointeeByValueCopySize(const DataLayout &DL) const { 177 AttributeSet ParamAttrs = 178 getParent()->getAttributes().getParamAttributes(getArgNo()); 179 if (Type *MemTy = getMemoryParamAllocType(ParamAttrs, getType())) 180 return DL.getTypeAllocSize(MemTy); 181 return 0; 182 } 183 184 Type *Argument::getPointeeInMemoryValueType() const { 185 AttributeSet ParamAttrs = 186 getParent()->getAttributes().getParamAttributes(getArgNo()); 187 return getMemoryParamAllocType(ParamAttrs, getType()); 188 } 189 190 unsigned Argument::getParamAlignment() const { 191 assert(getType()->isPointerTy() && "Only pointers have alignments"); 192 return getParent()->getParamAlignment(getArgNo()); 193 } 194 195 MaybeAlign Argument::getParamAlign() const { 196 assert(getType()->isPointerTy() && "Only pointers have alignments"); 197 return getParent()->getParamAlign(getArgNo()); 198 } 199 200 Type *Argument::getParamByValType() const { 201 assert(getType()->isPointerTy() && "Only pointers have byval types"); 202 return getParent()->getParamByValType(getArgNo()); 203 } 204 205 Type *Argument::getParamStructRetType() const { 206 assert(getType()->isPointerTy() && "Only pointers have sret types"); 207 return getParent()->getParamStructRetType(getArgNo()); 208 } 209 210 Type *Argument::getParamByRefType() const { 211 assert(getType()->isPointerTy() && "Only pointers have byval types"); 212 return getParent()->getParamByRefType(getArgNo()); 213 } 214 215 uint64_t Argument::getDereferenceableBytes() const { 216 assert(getType()->isPointerTy() && 217 "Only pointers have dereferenceable bytes"); 218 return getParent()->getParamDereferenceableBytes(getArgNo()); 219 } 220 221 uint64_t Argument::getDereferenceableOrNullBytes() const { 222 assert(getType()->isPointerTy() && 223 "Only pointers have dereferenceable bytes"); 224 return getParent()->getParamDereferenceableOrNullBytes(getArgNo()); 225 } 226 227 bool Argument::hasNestAttr() const { 228 if (!getType()->isPointerTy()) return false; 229 return hasAttribute(Attribute::Nest); 230 } 231 232 bool Argument::hasNoAliasAttr() const { 233 if (!getType()->isPointerTy()) return false; 234 return hasAttribute(Attribute::NoAlias); 235 } 236 237 bool Argument::hasNoCaptureAttr() const { 238 if (!getType()->isPointerTy()) return false; 239 return hasAttribute(Attribute::NoCapture); 240 } 241 242 bool Argument::hasStructRetAttr() const { 243 if (!getType()->isPointerTy()) return false; 244 return hasAttribute(Attribute::StructRet); 245 } 246 247 bool Argument::hasInRegAttr() const { 248 return hasAttribute(Attribute::InReg); 249 } 250 251 bool Argument::hasReturnedAttr() const { 252 return hasAttribute(Attribute::Returned); 253 } 254 255 bool Argument::hasZExtAttr() const { 256 return hasAttribute(Attribute::ZExt); 257 } 258 259 bool Argument::hasSExtAttr() const { 260 return hasAttribute(Attribute::SExt); 261 } 262 263 bool Argument::onlyReadsMemory() const { 264 AttributeList Attrs = getParent()->getAttributes(); 265 return Attrs.hasParamAttribute(getArgNo(), Attribute::ReadOnly) || 266 Attrs.hasParamAttribute(getArgNo(), Attribute::ReadNone); 267 } 268 269 void Argument::addAttrs(AttrBuilder &B) { 270 AttributeList AL = getParent()->getAttributes(); 271 AL = AL.addParamAttributes(Parent->getContext(), getArgNo(), B); 272 getParent()->setAttributes(AL); 273 } 274 275 void Argument::addAttr(Attribute::AttrKind Kind) { 276 getParent()->addParamAttr(getArgNo(), Kind); 277 } 278 279 void Argument::addAttr(Attribute Attr) { 280 getParent()->addParamAttr(getArgNo(), Attr); 281 } 282 283 void Argument::removeAttr(Attribute::AttrKind Kind) { 284 getParent()->removeParamAttr(getArgNo(), Kind); 285 } 286 287 bool Argument::hasAttribute(Attribute::AttrKind Kind) const { 288 return getParent()->hasParamAttribute(getArgNo(), Kind); 289 } 290 291 Attribute Argument::getAttribute(Attribute::AttrKind Kind) const { 292 return getParent()->getParamAttribute(getArgNo(), Kind); 293 } 294 295 //===----------------------------------------------------------------------===// 296 // Helper Methods in Function 297 //===----------------------------------------------------------------------===// 298 299 LLVMContext &Function::getContext() const { 300 return getType()->getContext(); 301 } 302 303 unsigned Function::getInstructionCount() const { 304 unsigned NumInstrs = 0; 305 for (const BasicBlock &BB : BasicBlocks) 306 NumInstrs += std::distance(BB.instructionsWithoutDebug().begin(), 307 BB.instructionsWithoutDebug().end()); 308 return NumInstrs; 309 } 310 311 Function *Function::Create(FunctionType *Ty, LinkageTypes Linkage, 312 const Twine &N, Module &M) { 313 return Create(Ty, Linkage, M.getDataLayout().getProgramAddressSpace(), N, &M); 314 } 315 316 void Function::removeFromParent() { 317 getParent()->getFunctionList().remove(getIterator()); 318 } 319 320 void Function::eraseFromParent() { 321 getParent()->getFunctionList().erase(getIterator()); 322 } 323 324 //===----------------------------------------------------------------------===// 325 // Function Implementation 326 //===----------------------------------------------------------------------===// 327 328 static unsigned computeAddrSpace(unsigned AddrSpace, Module *M) { 329 // If AS == -1 and we are passed a valid module pointer we place the function 330 // in the program address space. Otherwise we default to AS0. 331 if (AddrSpace == static_cast<unsigned>(-1)) 332 return M ? M->getDataLayout().getProgramAddressSpace() : 0; 333 return AddrSpace; 334 } 335 336 Function::Function(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, 337 const Twine &name, Module *ParentModule) 338 : GlobalObject(Ty, Value::FunctionVal, 339 OperandTraits<Function>::op_begin(this), 0, Linkage, name, 340 computeAddrSpace(AddrSpace, ParentModule)), 341 NumArgs(Ty->getNumParams()) { 342 assert(FunctionType::isValidReturnType(getReturnType()) && 343 "invalid return type"); 344 setGlobalObjectSubClassData(0); 345 346 // We only need a symbol table for a function if the context keeps value names 347 if (!getContext().shouldDiscardValueNames()) 348 SymTab = std::make_unique<ValueSymbolTable>(); 349 350 // If the function has arguments, mark them as lazily built. 351 if (Ty->getNumParams()) 352 setValueSubclassData(1); // Set the "has lazy arguments" bit. 353 354 if (ParentModule) 355 ParentModule->getFunctionList().push_back(this); 356 357 HasLLVMReservedName = getName().startswith("llvm."); 358 // Ensure intrinsics have the right parameter attributes. 359 // Note, the IntID field will have been set in Value::setName if this function 360 // name is a valid intrinsic ID. 361 if (IntID) 362 setAttributes(Intrinsic::getAttributes(getContext(), IntID)); 363 } 364 365 Function::~Function() { 366 dropAllReferences(); // After this it is safe to delete instructions. 367 368 // Delete all of the method arguments and unlink from symbol table... 369 if (Arguments) 370 clearArguments(); 371 372 // Remove the function from the on-the-side GC table. 373 clearGC(); 374 } 375 376 void Function::BuildLazyArguments() const { 377 // Create the arguments vector, all arguments start out unnamed. 378 auto *FT = getFunctionType(); 379 if (NumArgs > 0) { 380 Arguments = std::allocator<Argument>().allocate(NumArgs); 381 for (unsigned i = 0, e = NumArgs; i != e; ++i) { 382 Type *ArgTy = FT->getParamType(i); 383 assert(!ArgTy->isVoidTy() && "Cannot have void typed arguments!"); 384 new (Arguments + i) Argument(ArgTy, "", const_cast<Function *>(this), i); 385 } 386 } 387 388 // Clear the lazy arguments bit. 389 unsigned SDC = getSubclassDataFromValue(); 390 SDC &= ~(1 << 0); 391 const_cast<Function*>(this)->setValueSubclassData(SDC); 392 assert(!hasLazyArguments()); 393 } 394 395 static MutableArrayRef<Argument> makeArgArray(Argument *Args, size_t Count) { 396 return MutableArrayRef<Argument>(Args, Count); 397 } 398 399 bool Function::isConstrainedFPIntrinsic() const { 400 switch (getIntrinsicID()) { 401 #define INSTRUCTION(NAME, NARG, ROUND_MODE, INTRINSIC) \ 402 case Intrinsic::INTRINSIC: 403 #include "llvm/IR/ConstrainedOps.def" 404 return true; 405 #undef INSTRUCTION 406 default: 407 return false; 408 } 409 } 410 411 void Function::clearArguments() { 412 for (Argument &A : makeArgArray(Arguments, NumArgs)) { 413 A.setName(""); 414 A.~Argument(); 415 } 416 std::allocator<Argument>().deallocate(Arguments, NumArgs); 417 Arguments = nullptr; 418 } 419 420 void Function::stealArgumentListFrom(Function &Src) { 421 assert(isDeclaration() && "Expected no references to current arguments"); 422 423 // Drop the current arguments, if any, and set the lazy argument bit. 424 if (!hasLazyArguments()) { 425 assert(llvm::all_of(makeArgArray(Arguments, NumArgs), 426 [](const Argument &A) { return A.use_empty(); }) && 427 "Expected arguments to be unused in declaration"); 428 clearArguments(); 429 setValueSubclassData(getSubclassDataFromValue() | (1 << 0)); 430 } 431 432 // Nothing to steal if Src has lazy arguments. 433 if (Src.hasLazyArguments()) 434 return; 435 436 // Steal arguments from Src, and fix the lazy argument bits. 437 assert(arg_size() == Src.arg_size()); 438 Arguments = Src.Arguments; 439 Src.Arguments = nullptr; 440 for (Argument &A : makeArgArray(Arguments, NumArgs)) { 441 // FIXME: This does the work of transferNodesFromList inefficiently. 442 SmallString<128> Name; 443 if (A.hasName()) 444 Name = A.getName(); 445 if (!Name.empty()) 446 A.setName(""); 447 A.setParent(this); 448 if (!Name.empty()) 449 A.setName(Name); 450 } 451 452 setValueSubclassData(getSubclassDataFromValue() & ~(1 << 0)); 453 assert(!hasLazyArguments()); 454 Src.setValueSubclassData(Src.getSubclassDataFromValue() | (1 << 0)); 455 } 456 457 // dropAllReferences() - This function causes all the subinstructions to "let 458 // go" of all references that they are maintaining. This allows one to 459 // 'delete' a whole class at a time, even though there may be circular 460 // references... first all references are dropped, and all use counts go to 461 // zero. Then everything is deleted for real. Note that no operations are 462 // valid on an object that has "dropped all references", except operator 463 // delete. 464 // 465 void Function::dropAllReferences() { 466 setIsMaterializable(false); 467 468 for (BasicBlock &BB : *this) 469 BB.dropAllReferences(); 470 471 // Delete all basic blocks. They are now unused, except possibly by 472 // blockaddresses, but BasicBlock's destructor takes care of those. 473 while (!BasicBlocks.empty()) 474 BasicBlocks.begin()->eraseFromParent(); 475 476 // Drop uses of any optional data (real or placeholder). 477 if (getNumOperands()) { 478 User::dropAllReferences(); 479 setNumHungOffUseOperands(0); 480 setValueSubclassData(getSubclassDataFromValue() & ~0xe); 481 } 482 483 // Metadata is stored in a side-table. 484 clearMetadata(); 485 } 486 487 void Function::addAttribute(unsigned i, Attribute::AttrKind Kind) { 488 AttributeList PAL = getAttributes(); 489 PAL = PAL.addAttribute(getContext(), i, Kind); 490 setAttributes(PAL); 491 } 492 493 void Function::addAttribute(unsigned i, Attribute Attr) { 494 AttributeList PAL = getAttributes(); 495 PAL = PAL.addAttribute(getContext(), i, Attr); 496 setAttributes(PAL); 497 } 498 499 void Function::addAttributes(unsigned i, const AttrBuilder &Attrs) { 500 AttributeList PAL = getAttributes(); 501 PAL = PAL.addAttributes(getContext(), i, Attrs); 502 setAttributes(PAL); 503 } 504 505 void Function::addParamAttr(unsigned ArgNo, Attribute::AttrKind Kind) { 506 AttributeList PAL = getAttributes(); 507 PAL = PAL.addParamAttribute(getContext(), ArgNo, Kind); 508 setAttributes(PAL); 509 } 510 511 void Function::addParamAttr(unsigned ArgNo, Attribute Attr) { 512 AttributeList PAL = getAttributes(); 513 PAL = PAL.addParamAttribute(getContext(), ArgNo, Attr); 514 setAttributes(PAL); 515 } 516 517 void Function::addParamAttrs(unsigned ArgNo, const AttrBuilder &Attrs) { 518 AttributeList PAL = getAttributes(); 519 PAL = PAL.addParamAttributes(getContext(), ArgNo, Attrs); 520 setAttributes(PAL); 521 } 522 523 void Function::removeAttribute(unsigned i, Attribute::AttrKind Kind) { 524 AttributeList PAL = getAttributes(); 525 PAL = PAL.removeAttribute(getContext(), i, Kind); 526 setAttributes(PAL); 527 } 528 529 void Function::removeAttribute(unsigned i, StringRef Kind) { 530 AttributeList PAL = getAttributes(); 531 PAL = PAL.removeAttribute(getContext(), i, Kind); 532 setAttributes(PAL); 533 } 534 535 void Function::removeAttributes(unsigned i, const AttrBuilder &Attrs) { 536 AttributeList PAL = getAttributes(); 537 PAL = PAL.removeAttributes(getContext(), i, Attrs); 538 setAttributes(PAL); 539 } 540 541 void Function::removeParamAttr(unsigned ArgNo, Attribute::AttrKind Kind) { 542 AttributeList PAL = getAttributes(); 543 PAL = PAL.removeParamAttribute(getContext(), ArgNo, Kind); 544 setAttributes(PAL); 545 } 546 547 void Function::removeParamAttr(unsigned ArgNo, StringRef Kind) { 548 AttributeList PAL = getAttributes(); 549 PAL = PAL.removeParamAttribute(getContext(), ArgNo, Kind); 550 setAttributes(PAL); 551 } 552 553 void Function::removeParamAttrs(unsigned ArgNo, const AttrBuilder &Attrs) { 554 AttributeList PAL = getAttributes(); 555 PAL = PAL.removeParamAttributes(getContext(), ArgNo, Attrs); 556 setAttributes(PAL); 557 } 558 559 void Function::addDereferenceableAttr(unsigned i, uint64_t Bytes) { 560 AttributeList PAL = getAttributes(); 561 PAL = PAL.addDereferenceableAttr(getContext(), i, Bytes); 562 setAttributes(PAL); 563 } 564 565 void Function::addDereferenceableParamAttr(unsigned ArgNo, uint64_t Bytes) { 566 AttributeList PAL = getAttributes(); 567 PAL = PAL.addDereferenceableParamAttr(getContext(), ArgNo, Bytes); 568 setAttributes(PAL); 569 } 570 571 void Function::addDereferenceableOrNullAttr(unsigned i, uint64_t Bytes) { 572 AttributeList PAL = getAttributes(); 573 PAL = PAL.addDereferenceableOrNullAttr(getContext(), i, Bytes); 574 setAttributes(PAL); 575 } 576 577 void Function::addDereferenceableOrNullParamAttr(unsigned ArgNo, 578 uint64_t Bytes) { 579 AttributeList PAL = getAttributes(); 580 PAL = PAL.addDereferenceableOrNullParamAttr(getContext(), ArgNo, Bytes); 581 setAttributes(PAL); 582 } 583 584 DenormalMode Function::getDenormalMode(const fltSemantics &FPType) const { 585 if (&FPType == &APFloat::IEEEsingle()) { 586 Attribute Attr = getFnAttribute("denormal-fp-math-f32"); 587 StringRef Val = Attr.getValueAsString(); 588 if (!Val.empty()) 589 return parseDenormalFPAttribute(Val); 590 591 // If the f32 variant of the attribute isn't specified, try to use the 592 // generic one. 593 } 594 595 Attribute Attr = getFnAttribute("denormal-fp-math"); 596 return parseDenormalFPAttribute(Attr.getValueAsString()); 597 } 598 599 const std::string &Function::getGC() const { 600 assert(hasGC() && "Function has no collector"); 601 return getContext().getGC(*this); 602 } 603 604 void Function::setGC(std::string Str) { 605 setValueSubclassDataBit(14, !Str.empty()); 606 getContext().setGC(*this, std::move(Str)); 607 } 608 609 void Function::clearGC() { 610 if (!hasGC()) 611 return; 612 getContext().deleteGC(*this); 613 setValueSubclassDataBit(14, false); 614 } 615 616 bool Function::hasStackProtectorFnAttr() const { 617 return hasFnAttribute(Attribute::StackProtect) || 618 hasFnAttribute(Attribute::StackProtectStrong) || 619 hasFnAttribute(Attribute::StackProtectReq); 620 } 621 622 /// Copy all additional attributes (those not needed to create a Function) from 623 /// the Function Src to this one. 624 void Function::copyAttributesFrom(const Function *Src) { 625 GlobalObject::copyAttributesFrom(Src); 626 setCallingConv(Src->getCallingConv()); 627 setAttributes(Src->getAttributes()); 628 if (Src->hasGC()) 629 setGC(Src->getGC()); 630 else 631 clearGC(); 632 if (Src->hasPersonalityFn()) 633 setPersonalityFn(Src->getPersonalityFn()); 634 if (Src->hasPrefixData()) 635 setPrefixData(Src->getPrefixData()); 636 if (Src->hasPrologueData()) 637 setPrologueData(Src->getPrologueData()); 638 } 639 640 /// Table of string intrinsic names indexed by enum value. 641 static const char * const IntrinsicNameTable[] = { 642 "not_intrinsic", 643 #define GET_INTRINSIC_NAME_TABLE 644 #include "llvm/IR/IntrinsicImpl.inc" 645 #undef GET_INTRINSIC_NAME_TABLE 646 }; 647 648 /// Table of per-target intrinsic name tables. 649 #define GET_INTRINSIC_TARGET_DATA 650 #include "llvm/IR/IntrinsicImpl.inc" 651 #undef GET_INTRINSIC_TARGET_DATA 652 653 bool Function::isTargetIntrinsic(Intrinsic::ID IID) { 654 return IID > TargetInfos[0].Count; 655 } 656 657 bool Function::isTargetIntrinsic() const { 658 return isTargetIntrinsic(IntID); 659 } 660 661 /// Find the segment of \c IntrinsicNameTable for intrinsics with the same 662 /// target as \c Name, or the generic table if \c Name is not target specific. 663 /// 664 /// Returns the relevant slice of \c IntrinsicNameTable 665 static ArrayRef<const char *> findTargetSubtable(StringRef Name) { 666 assert(Name.startswith("llvm.")); 667 668 ArrayRef<IntrinsicTargetInfo> Targets(TargetInfos); 669 // Drop "llvm." and take the first dotted component. That will be the target 670 // if this is target specific. 671 StringRef Target = Name.drop_front(5).split('.').first; 672 auto It = partition_point( 673 Targets, [=](const IntrinsicTargetInfo &TI) { return TI.Name < Target; }); 674 // We've either found the target or just fall back to the generic set, which 675 // is always first. 676 const auto &TI = It != Targets.end() && It->Name == Target ? *It : Targets[0]; 677 return makeArrayRef(&IntrinsicNameTable[1] + TI.Offset, TI.Count); 678 } 679 680 /// This does the actual lookup of an intrinsic ID which 681 /// matches the given function name. 682 Intrinsic::ID Function::lookupIntrinsicID(StringRef Name) { 683 ArrayRef<const char *> NameTable = findTargetSubtable(Name); 684 int Idx = Intrinsic::lookupLLVMIntrinsicByName(NameTable, Name); 685 if (Idx == -1) 686 return Intrinsic::not_intrinsic; 687 688 // Intrinsic IDs correspond to the location in IntrinsicNameTable, but we have 689 // an index into a sub-table. 690 int Adjust = NameTable.data() - IntrinsicNameTable; 691 Intrinsic::ID ID = static_cast<Intrinsic::ID>(Idx + Adjust); 692 693 // If the intrinsic is not overloaded, require an exact match. If it is 694 // overloaded, require either exact or prefix match. 695 const auto MatchSize = strlen(NameTable[Idx]); 696 assert(Name.size() >= MatchSize && "Expected either exact or prefix match"); 697 bool IsExactMatch = Name.size() == MatchSize; 698 return IsExactMatch || Intrinsic::isOverloaded(ID) ? ID 699 : Intrinsic::not_intrinsic; 700 } 701 702 void Function::recalculateIntrinsicID() { 703 StringRef Name = getName(); 704 if (!Name.startswith("llvm.")) { 705 HasLLVMReservedName = false; 706 IntID = Intrinsic::not_intrinsic; 707 return; 708 } 709 HasLLVMReservedName = true; 710 IntID = lookupIntrinsicID(Name); 711 } 712 713 /// Returns a stable mangling for the type specified for use in the name 714 /// mangling scheme used by 'any' types in intrinsic signatures. The mangling 715 /// of named types is simply their name. Manglings for unnamed types consist 716 /// of a prefix ('p' for pointers, 'a' for arrays, 'f_' for functions) 717 /// combined with the mangling of their component types. A vararg function 718 /// type will have a suffix of 'vararg'. Since function types can contain 719 /// other function types, we close a function type mangling with suffix 'f' 720 /// which can't be confused with it's prefix. This ensures we don't have 721 /// collisions between two unrelated function types. Otherwise, you might 722 /// parse ffXX as f(fXX) or f(fX)X. (X is a placeholder for any other type.) 723 /// 724 static std::string getMangledTypeStr(Type* Ty) { 725 std::string Result; 726 if (PointerType* PTyp = dyn_cast<PointerType>(Ty)) { 727 Result += "p" + utostr(PTyp->getAddressSpace()) + 728 getMangledTypeStr(PTyp->getElementType()); 729 } else if (ArrayType* ATyp = dyn_cast<ArrayType>(Ty)) { 730 Result += "a" + utostr(ATyp->getNumElements()) + 731 getMangledTypeStr(ATyp->getElementType()); 732 } else if (StructType *STyp = dyn_cast<StructType>(Ty)) { 733 if (!STyp->isLiteral()) { 734 Result += "s_"; 735 Result += STyp->getName(); 736 } else { 737 Result += "sl_"; 738 for (auto Elem : STyp->elements()) 739 Result += getMangledTypeStr(Elem); 740 } 741 // Ensure nested structs are distinguishable. 742 Result += "s"; 743 } else if (FunctionType *FT = dyn_cast<FunctionType>(Ty)) { 744 Result += "f_" + getMangledTypeStr(FT->getReturnType()); 745 for (size_t i = 0; i < FT->getNumParams(); i++) 746 Result += getMangledTypeStr(FT->getParamType(i)); 747 if (FT->isVarArg()) 748 Result += "vararg"; 749 // Ensure nested function types are distinguishable. 750 Result += "f"; 751 } else if (VectorType* VTy = dyn_cast<VectorType>(Ty)) { 752 ElementCount EC = VTy->getElementCount(); 753 if (EC.isScalable()) 754 Result += "nx"; 755 Result += "v" + utostr(EC.getKnownMinValue()) + 756 getMangledTypeStr(VTy->getElementType()); 757 } else if (Ty) { 758 switch (Ty->getTypeID()) { 759 default: llvm_unreachable("Unhandled type"); 760 case Type::VoidTyID: Result += "isVoid"; break; 761 case Type::MetadataTyID: Result += "Metadata"; break; 762 case Type::HalfTyID: Result += "f16"; break; 763 case Type::BFloatTyID: Result += "bf16"; break; 764 case Type::FloatTyID: Result += "f32"; break; 765 case Type::DoubleTyID: Result += "f64"; break; 766 case Type::X86_FP80TyID: Result += "f80"; break; 767 case Type::FP128TyID: Result += "f128"; break; 768 case Type::PPC_FP128TyID: Result += "ppcf128"; break; 769 case Type::X86_MMXTyID: Result += "x86mmx"; break; 770 case Type::X86_AMXTyID: Result += "x86amx"; break; 771 case Type::IntegerTyID: 772 Result += "i" + utostr(cast<IntegerType>(Ty)->getBitWidth()); 773 break; 774 } 775 } 776 return Result; 777 } 778 779 StringRef Intrinsic::getName(ID id) { 780 assert(id < num_intrinsics && "Invalid intrinsic ID!"); 781 assert(!Intrinsic::isOverloaded(id) && 782 "This version of getName does not support overloading"); 783 return IntrinsicNameTable[id]; 784 } 785 786 std::string Intrinsic::getName(ID id, ArrayRef<Type*> Tys) { 787 assert(id < num_intrinsics && "Invalid intrinsic ID!"); 788 assert((Tys.empty() || Intrinsic::isOverloaded(id)) && 789 "This version of getName is for overloaded intrinsics only"); 790 std::string Result(IntrinsicNameTable[id]); 791 for (Type *Ty : Tys) { 792 Result += "." + getMangledTypeStr(Ty); 793 } 794 return Result; 795 } 796 797 /// IIT_Info - These are enumerators that describe the entries returned by the 798 /// getIntrinsicInfoTableEntries function. 799 /// 800 /// NOTE: This must be kept in synch with the copy in TblGen/IntrinsicEmitter! 801 enum IIT_Info { 802 // Common values should be encoded with 0-15. 803 IIT_Done = 0, 804 IIT_I1 = 1, 805 IIT_I8 = 2, 806 IIT_I16 = 3, 807 IIT_I32 = 4, 808 IIT_I64 = 5, 809 IIT_F16 = 6, 810 IIT_F32 = 7, 811 IIT_F64 = 8, 812 IIT_V2 = 9, 813 IIT_V4 = 10, 814 IIT_V8 = 11, 815 IIT_V16 = 12, 816 IIT_V32 = 13, 817 IIT_PTR = 14, 818 IIT_ARG = 15, 819 820 // Values from 16+ are only encodable with the inefficient encoding. 821 IIT_V64 = 16, 822 IIT_MMX = 17, 823 IIT_TOKEN = 18, 824 IIT_METADATA = 19, 825 IIT_EMPTYSTRUCT = 20, 826 IIT_STRUCT2 = 21, 827 IIT_STRUCT3 = 22, 828 IIT_STRUCT4 = 23, 829 IIT_STRUCT5 = 24, 830 IIT_EXTEND_ARG = 25, 831 IIT_TRUNC_ARG = 26, 832 IIT_ANYPTR = 27, 833 IIT_V1 = 28, 834 IIT_VARARG = 29, 835 IIT_HALF_VEC_ARG = 30, 836 IIT_SAME_VEC_WIDTH_ARG = 31, 837 IIT_PTR_TO_ARG = 32, 838 IIT_PTR_TO_ELT = 33, 839 IIT_VEC_OF_ANYPTRS_TO_ELT = 34, 840 IIT_I128 = 35, 841 IIT_V512 = 36, 842 IIT_V1024 = 37, 843 IIT_STRUCT6 = 38, 844 IIT_STRUCT7 = 39, 845 IIT_STRUCT8 = 40, 846 IIT_F128 = 41, 847 IIT_VEC_ELEMENT = 42, 848 IIT_SCALABLE_VEC = 43, 849 IIT_SUBDIVIDE2_ARG = 44, 850 IIT_SUBDIVIDE4_ARG = 45, 851 IIT_VEC_OF_BITCASTS_TO_INT = 46, 852 IIT_V128 = 47, 853 IIT_BF16 = 48, 854 IIT_STRUCT9 = 49, 855 IIT_V256 = 50, 856 IIT_AMX = 51 857 }; 858 859 static void DecodeIITType(unsigned &NextElt, ArrayRef<unsigned char> Infos, 860 IIT_Info LastInfo, 861 SmallVectorImpl<Intrinsic::IITDescriptor> &OutputTable) { 862 using namespace Intrinsic; 863 864 bool IsScalableVector = (LastInfo == IIT_SCALABLE_VEC); 865 866 IIT_Info Info = IIT_Info(Infos[NextElt++]); 867 unsigned StructElts = 2; 868 869 switch (Info) { 870 case IIT_Done: 871 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Void, 0)); 872 return; 873 case IIT_VARARG: 874 OutputTable.push_back(IITDescriptor::get(IITDescriptor::VarArg, 0)); 875 return; 876 case IIT_MMX: 877 OutputTable.push_back(IITDescriptor::get(IITDescriptor::MMX, 0)); 878 return; 879 case IIT_AMX: 880 OutputTable.push_back(IITDescriptor::get(IITDescriptor::AMX, 0)); 881 return; 882 case IIT_TOKEN: 883 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Token, 0)); 884 return; 885 case IIT_METADATA: 886 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Metadata, 0)); 887 return; 888 case IIT_F16: 889 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Half, 0)); 890 return; 891 case IIT_BF16: 892 OutputTable.push_back(IITDescriptor::get(IITDescriptor::BFloat, 0)); 893 return; 894 case IIT_F32: 895 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Float, 0)); 896 return; 897 case IIT_F64: 898 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Double, 0)); 899 return; 900 case IIT_F128: 901 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Quad, 0)); 902 return; 903 case IIT_I1: 904 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 1)); 905 return; 906 case IIT_I8: 907 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 8)); 908 return; 909 case IIT_I16: 910 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer,16)); 911 return; 912 case IIT_I32: 913 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 32)); 914 return; 915 case IIT_I64: 916 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 64)); 917 return; 918 case IIT_I128: 919 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 128)); 920 return; 921 case IIT_V1: 922 OutputTable.push_back(IITDescriptor::getVector(1, IsScalableVector)); 923 DecodeIITType(NextElt, Infos, Info, OutputTable); 924 return; 925 case IIT_V2: 926 OutputTable.push_back(IITDescriptor::getVector(2, IsScalableVector)); 927 DecodeIITType(NextElt, Infos, Info, OutputTable); 928 return; 929 case IIT_V4: 930 OutputTable.push_back(IITDescriptor::getVector(4, IsScalableVector)); 931 DecodeIITType(NextElt, Infos, Info, OutputTable); 932 return; 933 case IIT_V8: 934 OutputTable.push_back(IITDescriptor::getVector(8, IsScalableVector)); 935 DecodeIITType(NextElt, Infos, Info, OutputTable); 936 return; 937 case IIT_V16: 938 OutputTable.push_back(IITDescriptor::getVector(16, IsScalableVector)); 939 DecodeIITType(NextElt, Infos, Info, OutputTable); 940 return; 941 case IIT_V32: 942 OutputTable.push_back(IITDescriptor::getVector(32, IsScalableVector)); 943 DecodeIITType(NextElt, Infos, Info, OutputTable); 944 return; 945 case IIT_V64: 946 OutputTable.push_back(IITDescriptor::getVector(64, IsScalableVector)); 947 DecodeIITType(NextElt, Infos, Info, OutputTable); 948 return; 949 case IIT_V128: 950 OutputTable.push_back(IITDescriptor::getVector(128, IsScalableVector)); 951 DecodeIITType(NextElt, Infos, Info, OutputTable); 952 return; 953 case IIT_V256: 954 OutputTable.push_back(IITDescriptor::getVector(256, IsScalableVector)); 955 DecodeIITType(NextElt, Infos, Info, OutputTable); 956 return; 957 case IIT_V512: 958 OutputTable.push_back(IITDescriptor::getVector(512, IsScalableVector)); 959 DecodeIITType(NextElt, Infos, Info, OutputTable); 960 return; 961 case IIT_V1024: 962 OutputTable.push_back(IITDescriptor::getVector(1024, IsScalableVector)); 963 DecodeIITType(NextElt, Infos, Info, OutputTable); 964 return; 965 case IIT_PTR: 966 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Pointer, 0)); 967 DecodeIITType(NextElt, Infos, Info, OutputTable); 968 return; 969 case IIT_ANYPTR: { // [ANYPTR addrspace, subtype] 970 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Pointer, 971 Infos[NextElt++])); 972 DecodeIITType(NextElt, Infos, Info, OutputTable); 973 return; 974 } 975 case IIT_ARG: { 976 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 977 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Argument, ArgInfo)); 978 return; 979 } 980 case IIT_EXTEND_ARG: { 981 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 982 OutputTable.push_back(IITDescriptor::get(IITDescriptor::ExtendArgument, 983 ArgInfo)); 984 return; 985 } 986 case IIT_TRUNC_ARG: { 987 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 988 OutputTable.push_back(IITDescriptor::get(IITDescriptor::TruncArgument, 989 ArgInfo)); 990 return; 991 } 992 case IIT_HALF_VEC_ARG: { 993 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 994 OutputTable.push_back(IITDescriptor::get(IITDescriptor::HalfVecArgument, 995 ArgInfo)); 996 return; 997 } 998 case IIT_SAME_VEC_WIDTH_ARG: { 999 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1000 OutputTable.push_back(IITDescriptor::get(IITDescriptor::SameVecWidthArgument, 1001 ArgInfo)); 1002 return; 1003 } 1004 case IIT_PTR_TO_ARG: { 1005 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1006 OutputTable.push_back(IITDescriptor::get(IITDescriptor::PtrToArgument, 1007 ArgInfo)); 1008 return; 1009 } 1010 case IIT_PTR_TO_ELT: { 1011 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1012 OutputTable.push_back(IITDescriptor::get(IITDescriptor::PtrToElt, ArgInfo)); 1013 return; 1014 } 1015 case IIT_VEC_OF_ANYPTRS_TO_ELT: { 1016 unsigned short ArgNo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1017 unsigned short RefNo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1018 OutputTable.push_back( 1019 IITDescriptor::get(IITDescriptor::VecOfAnyPtrsToElt, ArgNo, RefNo)); 1020 return; 1021 } 1022 case IIT_EMPTYSTRUCT: 1023 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Struct, 0)); 1024 return; 1025 case IIT_STRUCT9: ++StructElts; LLVM_FALLTHROUGH; 1026 case IIT_STRUCT8: ++StructElts; LLVM_FALLTHROUGH; 1027 case IIT_STRUCT7: ++StructElts; LLVM_FALLTHROUGH; 1028 case IIT_STRUCT6: ++StructElts; LLVM_FALLTHROUGH; 1029 case IIT_STRUCT5: ++StructElts; LLVM_FALLTHROUGH; 1030 case IIT_STRUCT4: ++StructElts; LLVM_FALLTHROUGH; 1031 case IIT_STRUCT3: ++StructElts; LLVM_FALLTHROUGH; 1032 case IIT_STRUCT2: { 1033 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Struct,StructElts)); 1034 1035 for (unsigned i = 0; i != StructElts; ++i) 1036 DecodeIITType(NextElt, Infos, Info, OutputTable); 1037 return; 1038 } 1039 case IIT_SUBDIVIDE2_ARG: { 1040 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1041 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Subdivide2Argument, 1042 ArgInfo)); 1043 return; 1044 } 1045 case IIT_SUBDIVIDE4_ARG: { 1046 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1047 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Subdivide4Argument, 1048 ArgInfo)); 1049 return; 1050 } 1051 case IIT_VEC_ELEMENT: { 1052 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1053 OutputTable.push_back(IITDescriptor::get(IITDescriptor::VecElementArgument, 1054 ArgInfo)); 1055 return; 1056 } 1057 case IIT_SCALABLE_VEC: { 1058 DecodeIITType(NextElt, Infos, Info, OutputTable); 1059 return; 1060 } 1061 case IIT_VEC_OF_BITCASTS_TO_INT: { 1062 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1063 OutputTable.push_back(IITDescriptor::get(IITDescriptor::VecOfBitcastsToInt, 1064 ArgInfo)); 1065 return; 1066 } 1067 } 1068 llvm_unreachable("unhandled"); 1069 } 1070 1071 #define GET_INTRINSIC_GENERATOR_GLOBAL 1072 #include "llvm/IR/IntrinsicImpl.inc" 1073 #undef GET_INTRINSIC_GENERATOR_GLOBAL 1074 1075 void Intrinsic::getIntrinsicInfoTableEntries(ID id, 1076 SmallVectorImpl<IITDescriptor> &T){ 1077 // Check to see if the intrinsic's type was expressible by the table. 1078 unsigned TableVal = IIT_Table[id-1]; 1079 1080 // Decode the TableVal into an array of IITValues. 1081 SmallVector<unsigned char, 8> IITValues; 1082 ArrayRef<unsigned char> IITEntries; 1083 unsigned NextElt = 0; 1084 if ((TableVal >> 31) != 0) { 1085 // This is an offset into the IIT_LongEncodingTable. 1086 IITEntries = IIT_LongEncodingTable; 1087 1088 // Strip sentinel bit. 1089 NextElt = (TableVal << 1) >> 1; 1090 } else { 1091 // Decode the TableVal into an array of IITValues. If the entry was encoded 1092 // into a single word in the table itself, decode it now. 1093 do { 1094 IITValues.push_back(TableVal & 0xF); 1095 TableVal >>= 4; 1096 } while (TableVal); 1097 1098 IITEntries = IITValues; 1099 NextElt = 0; 1100 } 1101 1102 // Okay, decode the table into the output vector of IITDescriptors. 1103 DecodeIITType(NextElt, IITEntries, IIT_Done, T); 1104 while (NextElt != IITEntries.size() && IITEntries[NextElt] != 0) 1105 DecodeIITType(NextElt, IITEntries, IIT_Done, T); 1106 } 1107 1108 static Type *DecodeFixedType(ArrayRef<Intrinsic::IITDescriptor> &Infos, 1109 ArrayRef<Type*> Tys, LLVMContext &Context) { 1110 using namespace Intrinsic; 1111 1112 IITDescriptor D = Infos.front(); 1113 Infos = Infos.slice(1); 1114 1115 switch (D.Kind) { 1116 case IITDescriptor::Void: return Type::getVoidTy(Context); 1117 case IITDescriptor::VarArg: return Type::getVoidTy(Context); 1118 case IITDescriptor::MMX: return Type::getX86_MMXTy(Context); 1119 case IITDescriptor::AMX: return Type::getX86_AMXTy(Context); 1120 case IITDescriptor::Token: return Type::getTokenTy(Context); 1121 case IITDescriptor::Metadata: return Type::getMetadataTy(Context); 1122 case IITDescriptor::Half: return Type::getHalfTy(Context); 1123 case IITDescriptor::BFloat: return Type::getBFloatTy(Context); 1124 case IITDescriptor::Float: return Type::getFloatTy(Context); 1125 case IITDescriptor::Double: return Type::getDoubleTy(Context); 1126 case IITDescriptor::Quad: return Type::getFP128Ty(Context); 1127 1128 case IITDescriptor::Integer: 1129 return IntegerType::get(Context, D.Integer_Width); 1130 case IITDescriptor::Vector: 1131 return VectorType::get(DecodeFixedType(Infos, Tys, Context), 1132 D.Vector_Width); 1133 case IITDescriptor::Pointer: 1134 return PointerType::get(DecodeFixedType(Infos, Tys, Context), 1135 D.Pointer_AddressSpace); 1136 case IITDescriptor::Struct: { 1137 SmallVector<Type *, 8> Elts; 1138 for (unsigned i = 0, e = D.Struct_NumElements; i != e; ++i) 1139 Elts.push_back(DecodeFixedType(Infos, Tys, Context)); 1140 return StructType::get(Context, Elts); 1141 } 1142 case IITDescriptor::Argument: 1143 return Tys[D.getArgumentNumber()]; 1144 case IITDescriptor::ExtendArgument: { 1145 Type *Ty = Tys[D.getArgumentNumber()]; 1146 if (VectorType *VTy = dyn_cast<VectorType>(Ty)) 1147 return VectorType::getExtendedElementVectorType(VTy); 1148 1149 return IntegerType::get(Context, 2 * cast<IntegerType>(Ty)->getBitWidth()); 1150 } 1151 case IITDescriptor::TruncArgument: { 1152 Type *Ty = Tys[D.getArgumentNumber()]; 1153 if (VectorType *VTy = dyn_cast<VectorType>(Ty)) 1154 return VectorType::getTruncatedElementVectorType(VTy); 1155 1156 IntegerType *ITy = cast<IntegerType>(Ty); 1157 assert(ITy->getBitWidth() % 2 == 0); 1158 return IntegerType::get(Context, ITy->getBitWidth() / 2); 1159 } 1160 case IITDescriptor::Subdivide2Argument: 1161 case IITDescriptor::Subdivide4Argument: { 1162 Type *Ty = Tys[D.getArgumentNumber()]; 1163 VectorType *VTy = dyn_cast<VectorType>(Ty); 1164 assert(VTy && "Expected an argument of Vector Type"); 1165 int SubDivs = D.Kind == IITDescriptor::Subdivide2Argument ? 1 : 2; 1166 return VectorType::getSubdividedVectorType(VTy, SubDivs); 1167 } 1168 case IITDescriptor::HalfVecArgument: 1169 return VectorType::getHalfElementsVectorType(cast<VectorType>( 1170 Tys[D.getArgumentNumber()])); 1171 case IITDescriptor::SameVecWidthArgument: { 1172 Type *EltTy = DecodeFixedType(Infos, Tys, Context); 1173 Type *Ty = Tys[D.getArgumentNumber()]; 1174 if (auto *VTy = dyn_cast<VectorType>(Ty)) 1175 return VectorType::get(EltTy, VTy->getElementCount()); 1176 return EltTy; 1177 } 1178 case IITDescriptor::PtrToArgument: { 1179 Type *Ty = Tys[D.getArgumentNumber()]; 1180 return PointerType::getUnqual(Ty); 1181 } 1182 case IITDescriptor::PtrToElt: { 1183 Type *Ty = Tys[D.getArgumentNumber()]; 1184 VectorType *VTy = dyn_cast<VectorType>(Ty); 1185 if (!VTy) 1186 llvm_unreachable("Expected an argument of Vector Type"); 1187 Type *EltTy = VTy->getElementType(); 1188 return PointerType::getUnqual(EltTy); 1189 } 1190 case IITDescriptor::VecElementArgument: { 1191 Type *Ty = Tys[D.getArgumentNumber()]; 1192 if (VectorType *VTy = dyn_cast<VectorType>(Ty)) 1193 return VTy->getElementType(); 1194 llvm_unreachable("Expected an argument of Vector Type"); 1195 } 1196 case IITDescriptor::VecOfBitcastsToInt: { 1197 Type *Ty = Tys[D.getArgumentNumber()]; 1198 VectorType *VTy = dyn_cast<VectorType>(Ty); 1199 assert(VTy && "Expected an argument of Vector Type"); 1200 return VectorType::getInteger(VTy); 1201 } 1202 case IITDescriptor::VecOfAnyPtrsToElt: 1203 // Return the overloaded type (which determines the pointers address space) 1204 return Tys[D.getOverloadArgNumber()]; 1205 } 1206 llvm_unreachable("unhandled"); 1207 } 1208 1209 FunctionType *Intrinsic::getType(LLVMContext &Context, 1210 ID id, ArrayRef<Type*> Tys) { 1211 SmallVector<IITDescriptor, 8> Table; 1212 getIntrinsicInfoTableEntries(id, Table); 1213 1214 ArrayRef<IITDescriptor> TableRef = Table; 1215 Type *ResultTy = DecodeFixedType(TableRef, Tys, Context); 1216 1217 SmallVector<Type*, 8> ArgTys; 1218 while (!TableRef.empty()) 1219 ArgTys.push_back(DecodeFixedType(TableRef, Tys, Context)); 1220 1221 // DecodeFixedType returns Void for IITDescriptor::Void and IITDescriptor::VarArg 1222 // If we see void type as the type of the last argument, it is vararg intrinsic 1223 if (!ArgTys.empty() && ArgTys.back()->isVoidTy()) { 1224 ArgTys.pop_back(); 1225 return FunctionType::get(ResultTy, ArgTys, true); 1226 } 1227 return FunctionType::get(ResultTy, ArgTys, false); 1228 } 1229 1230 bool Intrinsic::isOverloaded(ID id) { 1231 #define GET_INTRINSIC_OVERLOAD_TABLE 1232 #include "llvm/IR/IntrinsicImpl.inc" 1233 #undef GET_INTRINSIC_OVERLOAD_TABLE 1234 } 1235 1236 bool Intrinsic::isLeaf(ID id) { 1237 switch (id) { 1238 default: 1239 return true; 1240 1241 case Intrinsic::experimental_gc_statepoint: 1242 case Intrinsic::experimental_patchpoint_void: 1243 case Intrinsic::experimental_patchpoint_i64: 1244 return false; 1245 } 1246 } 1247 1248 /// This defines the "Intrinsic::getAttributes(ID id)" method. 1249 #define GET_INTRINSIC_ATTRIBUTES 1250 #include "llvm/IR/IntrinsicImpl.inc" 1251 #undef GET_INTRINSIC_ATTRIBUTES 1252 1253 Function *Intrinsic::getDeclaration(Module *M, ID id, ArrayRef<Type*> Tys) { 1254 // There can never be multiple globals with the same name of different types, 1255 // because intrinsics must be a specific type. 1256 return cast<Function>( 1257 M->getOrInsertFunction(Tys.empty() ? getName(id) : getName(id, Tys), 1258 getType(M->getContext(), id, Tys)) 1259 .getCallee()); 1260 } 1261 1262 // This defines the "Intrinsic::getIntrinsicForGCCBuiltin()" method. 1263 #define GET_LLVM_INTRINSIC_FOR_GCC_BUILTIN 1264 #include "llvm/IR/IntrinsicImpl.inc" 1265 #undef GET_LLVM_INTRINSIC_FOR_GCC_BUILTIN 1266 1267 // This defines the "Intrinsic::getIntrinsicForMSBuiltin()" method. 1268 #define GET_LLVM_INTRINSIC_FOR_MS_BUILTIN 1269 #include "llvm/IR/IntrinsicImpl.inc" 1270 #undef GET_LLVM_INTRINSIC_FOR_MS_BUILTIN 1271 1272 using DeferredIntrinsicMatchPair = 1273 std::pair<Type *, ArrayRef<Intrinsic::IITDescriptor>>; 1274 1275 static bool matchIntrinsicType( 1276 Type *Ty, ArrayRef<Intrinsic::IITDescriptor> &Infos, 1277 SmallVectorImpl<Type *> &ArgTys, 1278 SmallVectorImpl<DeferredIntrinsicMatchPair> &DeferredChecks, 1279 bool IsDeferredCheck) { 1280 using namespace Intrinsic; 1281 1282 // If we ran out of descriptors, there are too many arguments. 1283 if (Infos.empty()) return true; 1284 1285 // Do this before slicing off the 'front' part 1286 auto InfosRef = Infos; 1287 auto DeferCheck = [&DeferredChecks, &InfosRef](Type *T) { 1288 DeferredChecks.emplace_back(T, InfosRef); 1289 return false; 1290 }; 1291 1292 IITDescriptor D = Infos.front(); 1293 Infos = Infos.slice(1); 1294 1295 switch (D.Kind) { 1296 case IITDescriptor::Void: return !Ty->isVoidTy(); 1297 case IITDescriptor::VarArg: return true; 1298 case IITDescriptor::MMX: return !Ty->isX86_MMXTy(); 1299 case IITDescriptor::AMX: return !Ty->isX86_AMXTy(); 1300 case IITDescriptor::Token: return !Ty->isTokenTy(); 1301 case IITDescriptor::Metadata: return !Ty->isMetadataTy(); 1302 case IITDescriptor::Half: return !Ty->isHalfTy(); 1303 case IITDescriptor::BFloat: return !Ty->isBFloatTy(); 1304 case IITDescriptor::Float: return !Ty->isFloatTy(); 1305 case IITDescriptor::Double: return !Ty->isDoubleTy(); 1306 case IITDescriptor::Quad: return !Ty->isFP128Ty(); 1307 case IITDescriptor::Integer: return !Ty->isIntegerTy(D.Integer_Width); 1308 case IITDescriptor::Vector: { 1309 VectorType *VT = dyn_cast<VectorType>(Ty); 1310 return !VT || VT->getElementCount() != D.Vector_Width || 1311 matchIntrinsicType(VT->getElementType(), Infos, ArgTys, 1312 DeferredChecks, IsDeferredCheck); 1313 } 1314 case IITDescriptor::Pointer: { 1315 PointerType *PT = dyn_cast<PointerType>(Ty); 1316 return !PT || PT->getAddressSpace() != D.Pointer_AddressSpace || 1317 matchIntrinsicType(PT->getElementType(), Infos, ArgTys, 1318 DeferredChecks, IsDeferredCheck); 1319 } 1320 1321 case IITDescriptor::Struct: { 1322 StructType *ST = dyn_cast<StructType>(Ty); 1323 if (!ST || ST->getNumElements() != D.Struct_NumElements) 1324 return true; 1325 1326 for (unsigned i = 0, e = D.Struct_NumElements; i != e; ++i) 1327 if (matchIntrinsicType(ST->getElementType(i), Infos, ArgTys, 1328 DeferredChecks, IsDeferredCheck)) 1329 return true; 1330 return false; 1331 } 1332 1333 case IITDescriptor::Argument: 1334 // If this is the second occurrence of an argument, 1335 // verify that the later instance matches the previous instance. 1336 if (D.getArgumentNumber() < ArgTys.size()) 1337 return Ty != ArgTys[D.getArgumentNumber()]; 1338 1339 if (D.getArgumentNumber() > ArgTys.size() || 1340 D.getArgumentKind() == IITDescriptor::AK_MatchType) 1341 return IsDeferredCheck || DeferCheck(Ty); 1342 1343 assert(D.getArgumentNumber() == ArgTys.size() && !IsDeferredCheck && 1344 "Table consistency error"); 1345 ArgTys.push_back(Ty); 1346 1347 switch (D.getArgumentKind()) { 1348 case IITDescriptor::AK_Any: return false; // Success 1349 case IITDescriptor::AK_AnyInteger: return !Ty->isIntOrIntVectorTy(); 1350 case IITDescriptor::AK_AnyFloat: return !Ty->isFPOrFPVectorTy(); 1351 case IITDescriptor::AK_AnyVector: return !isa<VectorType>(Ty); 1352 case IITDescriptor::AK_AnyPointer: return !isa<PointerType>(Ty); 1353 default: break; 1354 } 1355 llvm_unreachable("all argument kinds not covered"); 1356 1357 case IITDescriptor::ExtendArgument: { 1358 // If this is a forward reference, defer the check for later. 1359 if (D.getArgumentNumber() >= ArgTys.size()) 1360 return IsDeferredCheck || DeferCheck(Ty); 1361 1362 Type *NewTy = ArgTys[D.getArgumentNumber()]; 1363 if (VectorType *VTy = dyn_cast<VectorType>(NewTy)) 1364 NewTy = VectorType::getExtendedElementVectorType(VTy); 1365 else if (IntegerType *ITy = dyn_cast<IntegerType>(NewTy)) 1366 NewTy = IntegerType::get(ITy->getContext(), 2 * ITy->getBitWidth()); 1367 else 1368 return true; 1369 1370 return Ty != NewTy; 1371 } 1372 case IITDescriptor::TruncArgument: { 1373 // If this is a forward reference, defer the check for later. 1374 if (D.getArgumentNumber() >= ArgTys.size()) 1375 return IsDeferredCheck || DeferCheck(Ty); 1376 1377 Type *NewTy = ArgTys[D.getArgumentNumber()]; 1378 if (VectorType *VTy = dyn_cast<VectorType>(NewTy)) 1379 NewTy = VectorType::getTruncatedElementVectorType(VTy); 1380 else if (IntegerType *ITy = dyn_cast<IntegerType>(NewTy)) 1381 NewTy = IntegerType::get(ITy->getContext(), ITy->getBitWidth() / 2); 1382 else 1383 return true; 1384 1385 return Ty != NewTy; 1386 } 1387 case IITDescriptor::HalfVecArgument: 1388 // If this is a forward reference, defer the check for later. 1389 if (D.getArgumentNumber() >= ArgTys.size()) 1390 return IsDeferredCheck || DeferCheck(Ty); 1391 return !isa<VectorType>(ArgTys[D.getArgumentNumber()]) || 1392 VectorType::getHalfElementsVectorType( 1393 cast<VectorType>(ArgTys[D.getArgumentNumber()])) != Ty; 1394 case IITDescriptor::SameVecWidthArgument: { 1395 if (D.getArgumentNumber() >= ArgTys.size()) { 1396 // Defer check and subsequent check for the vector element type. 1397 Infos = Infos.slice(1); 1398 return IsDeferredCheck || DeferCheck(Ty); 1399 } 1400 auto *ReferenceType = dyn_cast<VectorType>(ArgTys[D.getArgumentNumber()]); 1401 auto *ThisArgType = dyn_cast<VectorType>(Ty); 1402 // Both must be vectors of the same number of elements or neither. 1403 if ((ReferenceType != nullptr) != (ThisArgType != nullptr)) 1404 return true; 1405 Type *EltTy = Ty; 1406 if (ThisArgType) { 1407 if (ReferenceType->getElementCount() != 1408 ThisArgType->getElementCount()) 1409 return true; 1410 EltTy = ThisArgType->getElementType(); 1411 } 1412 return matchIntrinsicType(EltTy, Infos, ArgTys, DeferredChecks, 1413 IsDeferredCheck); 1414 } 1415 case IITDescriptor::PtrToArgument: { 1416 if (D.getArgumentNumber() >= ArgTys.size()) 1417 return IsDeferredCheck || DeferCheck(Ty); 1418 Type * ReferenceType = ArgTys[D.getArgumentNumber()]; 1419 PointerType *ThisArgType = dyn_cast<PointerType>(Ty); 1420 return (!ThisArgType || ThisArgType->getElementType() != ReferenceType); 1421 } 1422 case IITDescriptor::PtrToElt: { 1423 if (D.getArgumentNumber() >= ArgTys.size()) 1424 return IsDeferredCheck || DeferCheck(Ty); 1425 VectorType * ReferenceType = 1426 dyn_cast<VectorType> (ArgTys[D.getArgumentNumber()]); 1427 PointerType *ThisArgType = dyn_cast<PointerType>(Ty); 1428 1429 return (!ThisArgType || !ReferenceType || 1430 ThisArgType->getElementType() != ReferenceType->getElementType()); 1431 } 1432 case IITDescriptor::VecOfAnyPtrsToElt: { 1433 unsigned RefArgNumber = D.getRefArgNumber(); 1434 if (RefArgNumber >= ArgTys.size()) { 1435 if (IsDeferredCheck) 1436 return true; 1437 // If forward referencing, already add the pointer-vector type and 1438 // defer the checks for later. 1439 ArgTys.push_back(Ty); 1440 return DeferCheck(Ty); 1441 } 1442 1443 if (!IsDeferredCheck){ 1444 assert(D.getOverloadArgNumber() == ArgTys.size() && 1445 "Table consistency error"); 1446 ArgTys.push_back(Ty); 1447 } 1448 1449 // Verify the overloaded type "matches" the Ref type. 1450 // i.e. Ty is a vector with the same width as Ref. 1451 // Composed of pointers to the same element type as Ref. 1452 auto *ReferenceType = dyn_cast<VectorType>(ArgTys[RefArgNumber]); 1453 auto *ThisArgVecTy = dyn_cast<VectorType>(Ty); 1454 if (!ThisArgVecTy || !ReferenceType || 1455 (ReferenceType->getElementCount() != ThisArgVecTy->getElementCount())) 1456 return true; 1457 PointerType *ThisArgEltTy = 1458 dyn_cast<PointerType>(ThisArgVecTy->getElementType()); 1459 if (!ThisArgEltTy) 1460 return true; 1461 return ThisArgEltTy->getElementType() != ReferenceType->getElementType(); 1462 } 1463 case IITDescriptor::VecElementArgument: { 1464 if (D.getArgumentNumber() >= ArgTys.size()) 1465 return IsDeferredCheck ? true : DeferCheck(Ty); 1466 auto *ReferenceType = dyn_cast<VectorType>(ArgTys[D.getArgumentNumber()]); 1467 return !ReferenceType || Ty != ReferenceType->getElementType(); 1468 } 1469 case IITDescriptor::Subdivide2Argument: 1470 case IITDescriptor::Subdivide4Argument: { 1471 // If this is a forward reference, defer the check for later. 1472 if (D.getArgumentNumber() >= ArgTys.size()) 1473 return IsDeferredCheck || DeferCheck(Ty); 1474 1475 Type *NewTy = ArgTys[D.getArgumentNumber()]; 1476 if (auto *VTy = dyn_cast<VectorType>(NewTy)) { 1477 int SubDivs = D.Kind == IITDescriptor::Subdivide2Argument ? 1 : 2; 1478 NewTy = VectorType::getSubdividedVectorType(VTy, SubDivs); 1479 return Ty != NewTy; 1480 } 1481 return true; 1482 } 1483 case IITDescriptor::VecOfBitcastsToInt: { 1484 if (D.getArgumentNumber() >= ArgTys.size()) 1485 return IsDeferredCheck || DeferCheck(Ty); 1486 auto *ReferenceType = dyn_cast<VectorType>(ArgTys[D.getArgumentNumber()]); 1487 auto *ThisArgVecTy = dyn_cast<VectorType>(Ty); 1488 if (!ThisArgVecTy || !ReferenceType) 1489 return true; 1490 return ThisArgVecTy != VectorType::getInteger(ReferenceType); 1491 } 1492 } 1493 llvm_unreachable("unhandled"); 1494 } 1495 1496 Intrinsic::MatchIntrinsicTypesResult 1497 Intrinsic::matchIntrinsicSignature(FunctionType *FTy, 1498 ArrayRef<Intrinsic::IITDescriptor> &Infos, 1499 SmallVectorImpl<Type *> &ArgTys) { 1500 SmallVector<DeferredIntrinsicMatchPair, 2> DeferredChecks; 1501 if (matchIntrinsicType(FTy->getReturnType(), Infos, ArgTys, DeferredChecks, 1502 false)) 1503 return MatchIntrinsicTypes_NoMatchRet; 1504 1505 unsigned NumDeferredReturnChecks = DeferredChecks.size(); 1506 1507 for (auto Ty : FTy->params()) 1508 if (matchIntrinsicType(Ty, Infos, ArgTys, DeferredChecks, false)) 1509 return MatchIntrinsicTypes_NoMatchArg; 1510 1511 for (unsigned I = 0, E = DeferredChecks.size(); I != E; ++I) { 1512 DeferredIntrinsicMatchPair &Check = DeferredChecks[I]; 1513 if (matchIntrinsicType(Check.first, Check.second, ArgTys, DeferredChecks, 1514 true)) 1515 return I < NumDeferredReturnChecks ? MatchIntrinsicTypes_NoMatchRet 1516 : MatchIntrinsicTypes_NoMatchArg; 1517 } 1518 1519 return MatchIntrinsicTypes_Match; 1520 } 1521 1522 bool 1523 Intrinsic::matchIntrinsicVarArg(bool isVarArg, 1524 ArrayRef<Intrinsic::IITDescriptor> &Infos) { 1525 // If there are no descriptors left, then it can't be a vararg. 1526 if (Infos.empty()) 1527 return isVarArg; 1528 1529 // There should be only one descriptor remaining at this point. 1530 if (Infos.size() != 1) 1531 return true; 1532 1533 // Check and verify the descriptor. 1534 IITDescriptor D = Infos.front(); 1535 Infos = Infos.slice(1); 1536 if (D.Kind == IITDescriptor::VarArg) 1537 return !isVarArg; 1538 1539 return true; 1540 } 1541 1542 bool Intrinsic::getIntrinsicSignature(Function *F, 1543 SmallVectorImpl<Type *> &ArgTys) { 1544 Intrinsic::ID ID = F->getIntrinsicID(); 1545 if (!ID) 1546 return false; 1547 1548 SmallVector<Intrinsic::IITDescriptor, 8> Table; 1549 getIntrinsicInfoTableEntries(ID, Table); 1550 ArrayRef<Intrinsic::IITDescriptor> TableRef = Table; 1551 1552 if (Intrinsic::matchIntrinsicSignature(F->getFunctionType(), TableRef, 1553 ArgTys) != 1554 Intrinsic::MatchIntrinsicTypesResult::MatchIntrinsicTypes_Match) { 1555 return false; 1556 } 1557 if (Intrinsic::matchIntrinsicVarArg(F->getFunctionType()->isVarArg(), 1558 TableRef)) 1559 return false; 1560 return true; 1561 } 1562 1563 Optional<Function *> Intrinsic::remangleIntrinsicFunction(Function *F) { 1564 SmallVector<Type *, 4> ArgTys; 1565 if (!getIntrinsicSignature(F, ArgTys)) 1566 return None; 1567 1568 Intrinsic::ID ID = F->getIntrinsicID(); 1569 StringRef Name = F->getName(); 1570 if (Name == Intrinsic::getName(ID, ArgTys)) 1571 return None; 1572 1573 auto NewDecl = Intrinsic::getDeclaration(F->getParent(), ID, ArgTys); 1574 NewDecl->setCallingConv(F->getCallingConv()); 1575 assert(NewDecl->getFunctionType() == F->getFunctionType() && 1576 "Shouldn't change the signature"); 1577 return NewDecl; 1578 } 1579 1580 /// hasAddressTaken - returns true if there are any uses of this function 1581 /// other than direct calls or invokes to it. Optionally ignores callback 1582 /// uses and assume like pointer annotation calls. 1583 bool Function::hasAddressTaken(const User **PutOffender, 1584 bool IgnoreCallbackUses, 1585 bool IgnoreAssumeLikeCalls) const { 1586 for (const Use &U : uses()) { 1587 const User *FU = U.getUser(); 1588 if (isa<BlockAddress>(FU)) 1589 continue; 1590 1591 if (IgnoreCallbackUses) { 1592 AbstractCallSite ACS(&U); 1593 if (ACS && ACS.isCallbackCall()) 1594 continue; 1595 } 1596 1597 const auto *Call = dyn_cast<CallBase>(FU); 1598 if (!Call) { 1599 if (IgnoreAssumeLikeCalls) { 1600 if (const auto *FI = dyn_cast<Instruction>(FU)) { 1601 if (FI->isCast() && !FI->user_empty() && 1602 llvm::all_of(FU->users(), [](const User *U) { 1603 if (const auto *I = dyn_cast<IntrinsicInst>(U)) 1604 return I->isAssumeLikeIntrinsic(); 1605 return false; 1606 })) 1607 continue; 1608 } 1609 } 1610 if (PutOffender) 1611 *PutOffender = FU; 1612 return true; 1613 } 1614 if (!Call->isCallee(&U)) { 1615 if (PutOffender) 1616 *PutOffender = FU; 1617 return true; 1618 } 1619 } 1620 return false; 1621 } 1622 1623 bool Function::isDefTriviallyDead() const { 1624 // Check the linkage 1625 if (!hasLinkOnceLinkage() && !hasLocalLinkage() && 1626 !hasAvailableExternallyLinkage()) 1627 return false; 1628 1629 // Check if the function is used by anything other than a blockaddress. 1630 for (const User *U : users()) 1631 if (!isa<BlockAddress>(U)) 1632 return false; 1633 1634 return true; 1635 } 1636 1637 /// callsFunctionThatReturnsTwice - Return true if the function has a call to 1638 /// setjmp or other function that gcc recognizes as "returning twice". 1639 bool Function::callsFunctionThatReturnsTwice() const { 1640 for (const Instruction &I : instructions(this)) 1641 if (const auto *Call = dyn_cast<CallBase>(&I)) 1642 if (Call->hasFnAttr(Attribute::ReturnsTwice)) 1643 return true; 1644 1645 return false; 1646 } 1647 1648 Constant *Function::getPersonalityFn() const { 1649 assert(hasPersonalityFn() && getNumOperands()); 1650 return cast<Constant>(Op<0>()); 1651 } 1652 1653 void Function::setPersonalityFn(Constant *Fn) { 1654 setHungoffOperand<0>(Fn); 1655 setValueSubclassDataBit(3, Fn != nullptr); 1656 } 1657 1658 Constant *Function::getPrefixData() const { 1659 assert(hasPrefixData() && getNumOperands()); 1660 return cast<Constant>(Op<1>()); 1661 } 1662 1663 void Function::setPrefixData(Constant *PrefixData) { 1664 setHungoffOperand<1>(PrefixData); 1665 setValueSubclassDataBit(1, PrefixData != nullptr); 1666 } 1667 1668 Constant *Function::getPrologueData() const { 1669 assert(hasPrologueData() && getNumOperands()); 1670 return cast<Constant>(Op<2>()); 1671 } 1672 1673 void Function::setPrologueData(Constant *PrologueData) { 1674 setHungoffOperand<2>(PrologueData); 1675 setValueSubclassDataBit(2, PrologueData != nullptr); 1676 } 1677 1678 void Function::allocHungoffUselist() { 1679 // If we've already allocated a uselist, stop here. 1680 if (getNumOperands()) 1681 return; 1682 1683 allocHungoffUses(3, /*IsPhi=*/ false); 1684 setNumHungOffUseOperands(3); 1685 1686 // Initialize the uselist with placeholder operands to allow traversal. 1687 auto *CPN = ConstantPointerNull::get(Type::getInt1PtrTy(getContext(), 0)); 1688 Op<0>().set(CPN); 1689 Op<1>().set(CPN); 1690 Op<2>().set(CPN); 1691 } 1692 1693 template <int Idx> 1694 void Function::setHungoffOperand(Constant *C) { 1695 if (C) { 1696 allocHungoffUselist(); 1697 Op<Idx>().set(C); 1698 } else if (getNumOperands()) { 1699 Op<Idx>().set( 1700 ConstantPointerNull::get(Type::getInt1PtrTy(getContext(), 0))); 1701 } 1702 } 1703 1704 void Function::setValueSubclassDataBit(unsigned Bit, bool On) { 1705 assert(Bit < 16 && "SubclassData contains only 16 bits"); 1706 if (On) 1707 setValueSubclassData(getSubclassDataFromValue() | (1 << Bit)); 1708 else 1709 setValueSubclassData(getSubclassDataFromValue() & ~(1 << Bit)); 1710 } 1711 1712 void Function::setEntryCount(ProfileCount Count, 1713 const DenseSet<GlobalValue::GUID> *S) { 1714 assert(Count.hasValue()); 1715 #if !defined(NDEBUG) 1716 auto PrevCount = getEntryCount(); 1717 assert(!PrevCount.hasValue() || PrevCount.getType() == Count.getType()); 1718 #endif 1719 1720 auto ImportGUIDs = getImportGUIDs(); 1721 if (S == nullptr && ImportGUIDs.size()) 1722 S = &ImportGUIDs; 1723 1724 MDBuilder MDB(getContext()); 1725 setMetadata( 1726 LLVMContext::MD_prof, 1727 MDB.createFunctionEntryCount(Count.getCount(), Count.isSynthetic(), S)); 1728 } 1729 1730 void Function::setEntryCount(uint64_t Count, Function::ProfileCountType Type, 1731 const DenseSet<GlobalValue::GUID> *Imports) { 1732 setEntryCount(ProfileCount(Count, Type), Imports); 1733 } 1734 1735 ProfileCount Function::getEntryCount(bool AllowSynthetic) const { 1736 MDNode *MD = getMetadata(LLVMContext::MD_prof); 1737 if (MD && MD->getOperand(0)) 1738 if (MDString *MDS = dyn_cast<MDString>(MD->getOperand(0))) { 1739 if (MDS->getString().equals("function_entry_count")) { 1740 ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(1)); 1741 uint64_t Count = CI->getValue().getZExtValue(); 1742 // A value of -1 is used for SamplePGO when there were no samples. 1743 // Treat this the same as unknown. 1744 if (Count == (uint64_t)-1) 1745 return ProfileCount::getInvalid(); 1746 return ProfileCount(Count, PCT_Real); 1747 } else if (AllowSynthetic && 1748 MDS->getString().equals("synthetic_function_entry_count")) { 1749 ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(1)); 1750 uint64_t Count = CI->getValue().getZExtValue(); 1751 return ProfileCount(Count, PCT_Synthetic); 1752 } 1753 } 1754 return ProfileCount::getInvalid(); 1755 } 1756 1757 DenseSet<GlobalValue::GUID> Function::getImportGUIDs() const { 1758 DenseSet<GlobalValue::GUID> R; 1759 if (MDNode *MD = getMetadata(LLVMContext::MD_prof)) 1760 if (MDString *MDS = dyn_cast<MDString>(MD->getOperand(0))) 1761 if (MDS->getString().equals("function_entry_count")) 1762 for (unsigned i = 2; i < MD->getNumOperands(); i++) 1763 R.insert(mdconst::extract<ConstantInt>(MD->getOperand(i)) 1764 ->getValue() 1765 .getZExtValue()); 1766 return R; 1767 } 1768 1769 void Function::setSectionPrefix(StringRef Prefix) { 1770 MDBuilder MDB(getContext()); 1771 setMetadata(LLVMContext::MD_section_prefix, 1772 MDB.createFunctionSectionPrefix(Prefix)); 1773 } 1774 1775 Optional<StringRef> Function::getSectionPrefix() const { 1776 if (MDNode *MD = getMetadata(LLVMContext::MD_section_prefix)) { 1777 assert(cast<MDString>(MD->getOperand(0)) 1778 ->getString() 1779 .equals("function_section_prefix") && 1780 "Metadata not match"); 1781 return cast<MDString>(MD->getOperand(1))->getString(); 1782 } 1783 return None; 1784 } 1785 1786 bool Function::nullPointerIsDefined() const { 1787 return hasFnAttribute(Attribute::NullPointerIsValid); 1788 } 1789 1790 bool llvm::NullPointerIsDefined(const Function *F, unsigned AS) { 1791 if (F && F->nullPointerIsDefined()) 1792 return true; 1793 1794 if (AS != 0) 1795 return true; 1796 1797 return false; 1798 } 1799