1 //===- Function.cpp - Implement the Global object classes -----------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file implements the Function class for the IR library. 10 // 11 //===----------------------------------------------------------------------===// 12 13 #include "llvm/IR/Function.h" 14 #include "SymbolTableListTraitsImpl.h" 15 #include "llvm/ADT/ArrayRef.h" 16 #include "llvm/ADT/DenseSet.h" 17 #include "llvm/ADT/None.h" 18 #include "llvm/ADT/STLExtras.h" 19 #include "llvm/ADT/SmallString.h" 20 #include "llvm/ADT/SmallVector.h" 21 #include "llvm/ADT/StringExtras.h" 22 #include "llvm/ADT/StringRef.h" 23 #include "llvm/IR/AbstractCallSite.h" 24 #include "llvm/IR/Argument.h" 25 #include "llvm/IR/Attributes.h" 26 #include "llvm/IR/BasicBlock.h" 27 #include "llvm/IR/Constant.h" 28 #include "llvm/IR/Constants.h" 29 #include "llvm/IR/DerivedTypes.h" 30 #include "llvm/IR/GlobalValue.h" 31 #include "llvm/IR/InstIterator.h" 32 #include "llvm/IR/Instruction.h" 33 #include "llvm/IR/Instructions.h" 34 #include "llvm/IR/IntrinsicInst.h" 35 #include "llvm/IR/Intrinsics.h" 36 #include "llvm/IR/IntrinsicsAArch64.h" 37 #include "llvm/IR/IntrinsicsAMDGPU.h" 38 #include "llvm/IR/IntrinsicsARM.h" 39 #include "llvm/IR/IntrinsicsBPF.h" 40 #include "llvm/IR/IntrinsicsHexagon.h" 41 #include "llvm/IR/IntrinsicsMips.h" 42 #include "llvm/IR/IntrinsicsNVPTX.h" 43 #include "llvm/IR/IntrinsicsPowerPC.h" 44 #include "llvm/IR/IntrinsicsR600.h" 45 #include "llvm/IR/IntrinsicsRISCV.h" 46 #include "llvm/IR/IntrinsicsS390.h" 47 #include "llvm/IR/IntrinsicsVE.h" 48 #include "llvm/IR/IntrinsicsWebAssembly.h" 49 #include "llvm/IR/IntrinsicsX86.h" 50 #include "llvm/IR/IntrinsicsXCore.h" 51 #include "llvm/IR/LLVMContext.h" 52 #include "llvm/IR/MDBuilder.h" 53 #include "llvm/IR/Metadata.h" 54 #include "llvm/IR/Module.h" 55 #include "llvm/IR/Operator.h" 56 #include "llvm/IR/SymbolTableListTraits.h" 57 #include "llvm/IR/Type.h" 58 #include "llvm/IR/Use.h" 59 #include "llvm/IR/User.h" 60 #include "llvm/IR/Value.h" 61 #include "llvm/IR/ValueSymbolTable.h" 62 #include "llvm/Support/Casting.h" 63 #include "llvm/Support/Compiler.h" 64 #include "llvm/Support/ErrorHandling.h" 65 #include <algorithm> 66 #include <cassert> 67 #include <cstddef> 68 #include <cstdint> 69 #include <cstring> 70 #include <string> 71 72 using namespace llvm; 73 using ProfileCount = Function::ProfileCount; 74 75 // Explicit instantiations of SymbolTableListTraits since some of the methods 76 // are not in the public header file... 77 template class llvm::SymbolTableListTraits<BasicBlock>; 78 79 //===----------------------------------------------------------------------===// 80 // Argument Implementation 81 //===----------------------------------------------------------------------===// 82 83 Argument::Argument(Type *Ty, const Twine &Name, Function *Par, unsigned ArgNo) 84 : Value(Ty, Value::ArgumentVal), Parent(Par), ArgNo(ArgNo) { 85 setName(Name); 86 } 87 88 void Argument::setParent(Function *parent) { 89 Parent = parent; 90 } 91 92 bool Argument::hasNonNullAttr(bool AllowUndefOrPoison) const { 93 if (!getType()->isPointerTy()) return false; 94 if (getParent()->hasParamAttribute(getArgNo(), Attribute::NonNull) && 95 (AllowUndefOrPoison || 96 getParent()->hasParamAttribute(getArgNo(), Attribute::NoUndef))) 97 return true; 98 else if (getDereferenceableBytes() > 0 && 99 !NullPointerIsDefined(getParent(), 100 getType()->getPointerAddressSpace())) 101 return true; 102 return false; 103 } 104 105 bool Argument::hasByValAttr() const { 106 if (!getType()->isPointerTy()) return false; 107 return hasAttribute(Attribute::ByVal); 108 } 109 110 bool Argument::hasByRefAttr() const { 111 if (!getType()->isPointerTy()) 112 return false; 113 return hasAttribute(Attribute::ByRef); 114 } 115 116 bool Argument::hasSwiftSelfAttr() const { 117 return getParent()->hasParamAttribute(getArgNo(), Attribute::SwiftSelf); 118 } 119 120 bool Argument::hasSwiftErrorAttr() const { 121 return getParent()->hasParamAttribute(getArgNo(), Attribute::SwiftError); 122 } 123 124 bool Argument::hasInAllocaAttr() const { 125 if (!getType()->isPointerTy()) return false; 126 return hasAttribute(Attribute::InAlloca); 127 } 128 129 bool Argument::hasPreallocatedAttr() const { 130 if (!getType()->isPointerTy()) 131 return false; 132 return hasAttribute(Attribute::Preallocated); 133 } 134 135 bool Argument::hasPassPointeeByValueCopyAttr() const { 136 if (!getType()->isPointerTy()) return false; 137 AttributeList Attrs = getParent()->getAttributes(); 138 return Attrs.hasParamAttribute(getArgNo(), Attribute::ByVal) || 139 Attrs.hasParamAttribute(getArgNo(), Attribute::InAlloca) || 140 Attrs.hasParamAttribute(getArgNo(), Attribute::Preallocated); 141 } 142 143 bool Argument::hasPointeeInMemoryValueAttr() const { 144 if (!getType()->isPointerTy()) 145 return false; 146 AttributeList Attrs = getParent()->getAttributes(); 147 return Attrs.hasParamAttribute(getArgNo(), Attribute::ByVal) || 148 Attrs.hasParamAttribute(getArgNo(), Attribute::StructRet) || 149 Attrs.hasParamAttribute(getArgNo(), Attribute::InAlloca) || 150 Attrs.hasParamAttribute(getArgNo(), Attribute::Preallocated) || 151 Attrs.hasParamAttribute(getArgNo(), Attribute::ByRef); 152 } 153 154 /// For a byval, sret, inalloca, or preallocated parameter, get the in-memory 155 /// parameter type. 156 static Type *getMemoryParamAllocType(AttributeSet ParamAttrs, Type *ArgTy) { 157 // FIXME: All the type carrying attributes are mutually exclusive, so there 158 // should be a single query to get the stored type that handles any of them. 159 if (Type *ByValTy = ParamAttrs.getByValType()) 160 return ByValTy; 161 if (Type *ByRefTy = ParamAttrs.getByRefType()) 162 return ByRefTy; 163 if (Type *PreAllocTy = ParamAttrs.getPreallocatedType()) 164 return PreAllocTy; 165 166 // FIXME: sret and inalloca always depends on pointee element type. It's also 167 // possible for byval to miss it. 168 if (ParamAttrs.hasAttribute(Attribute::InAlloca) || 169 ParamAttrs.hasAttribute(Attribute::ByVal) || 170 ParamAttrs.hasAttribute(Attribute::StructRet) || 171 ParamAttrs.hasAttribute(Attribute::Preallocated)) 172 return cast<PointerType>(ArgTy)->getElementType(); 173 174 return nullptr; 175 } 176 177 uint64_t Argument::getPassPointeeByValueCopySize(const DataLayout &DL) const { 178 AttributeSet ParamAttrs = 179 getParent()->getAttributes().getParamAttributes(getArgNo()); 180 if (Type *MemTy = getMemoryParamAllocType(ParamAttrs, getType())) 181 return DL.getTypeAllocSize(MemTy); 182 return 0; 183 } 184 185 Type *Argument::getPointeeInMemoryValueType() const { 186 AttributeSet ParamAttrs = 187 getParent()->getAttributes().getParamAttributes(getArgNo()); 188 return getMemoryParamAllocType(ParamAttrs, getType()); 189 } 190 191 unsigned Argument::getParamAlignment() const { 192 assert(getType()->isPointerTy() && "Only pointers have alignments"); 193 return getParent()->getParamAlignment(getArgNo()); 194 } 195 196 MaybeAlign Argument::getParamAlign() const { 197 assert(getType()->isPointerTy() && "Only pointers have alignments"); 198 return getParent()->getParamAlign(getArgNo()); 199 } 200 201 Type *Argument::getParamByValType() const { 202 assert(getType()->isPointerTy() && "Only pointers have byval types"); 203 return getParent()->getParamByValType(getArgNo()); 204 } 205 206 Type *Argument::getParamStructRetType() const { 207 assert(getType()->isPointerTy() && "Only pointers have sret types"); 208 return getParent()->getParamStructRetType(getArgNo()); 209 } 210 211 Type *Argument::getParamByRefType() const { 212 assert(getType()->isPointerTy() && "Only pointers have byref types"); 213 return getParent()->getParamByRefType(getArgNo()); 214 } 215 216 uint64_t Argument::getDereferenceableBytes() const { 217 assert(getType()->isPointerTy() && 218 "Only pointers have dereferenceable bytes"); 219 return getParent()->getParamDereferenceableBytes(getArgNo()); 220 } 221 222 uint64_t Argument::getDereferenceableOrNullBytes() const { 223 assert(getType()->isPointerTy() && 224 "Only pointers have dereferenceable bytes"); 225 return getParent()->getParamDereferenceableOrNullBytes(getArgNo()); 226 } 227 228 bool Argument::hasNestAttr() const { 229 if (!getType()->isPointerTy()) return false; 230 return hasAttribute(Attribute::Nest); 231 } 232 233 bool Argument::hasNoAliasAttr() const { 234 if (!getType()->isPointerTy()) return false; 235 return hasAttribute(Attribute::NoAlias); 236 } 237 238 bool Argument::hasNoCaptureAttr() const { 239 if (!getType()->isPointerTy()) return false; 240 return hasAttribute(Attribute::NoCapture); 241 } 242 243 bool Argument::hasNoFreeAttr() const { 244 if (!getType()->isPointerTy()) return false; 245 return hasAttribute(Attribute::NoFree); 246 } 247 248 bool Argument::hasStructRetAttr() const { 249 if (!getType()->isPointerTy()) return false; 250 return hasAttribute(Attribute::StructRet); 251 } 252 253 bool Argument::hasInRegAttr() const { 254 return hasAttribute(Attribute::InReg); 255 } 256 257 bool Argument::hasReturnedAttr() const { 258 return hasAttribute(Attribute::Returned); 259 } 260 261 bool Argument::hasZExtAttr() const { 262 return hasAttribute(Attribute::ZExt); 263 } 264 265 bool Argument::hasSExtAttr() const { 266 return hasAttribute(Attribute::SExt); 267 } 268 269 bool Argument::onlyReadsMemory() const { 270 AttributeList Attrs = getParent()->getAttributes(); 271 return Attrs.hasParamAttribute(getArgNo(), Attribute::ReadOnly) || 272 Attrs.hasParamAttribute(getArgNo(), Attribute::ReadNone); 273 } 274 275 void Argument::addAttrs(AttrBuilder &B) { 276 AttributeList AL = getParent()->getAttributes(); 277 AL = AL.addParamAttributes(Parent->getContext(), getArgNo(), B); 278 getParent()->setAttributes(AL); 279 } 280 281 void Argument::addAttr(Attribute::AttrKind Kind) { 282 getParent()->addParamAttr(getArgNo(), Kind); 283 } 284 285 void Argument::addAttr(Attribute Attr) { 286 getParent()->addParamAttr(getArgNo(), Attr); 287 } 288 289 void Argument::removeAttr(Attribute::AttrKind Kind) { 290 getParent()->removeParamAttr(getArgNo(), Kind); 291 } 292 293 bool Argument::hasAttribute(Attribute::AttrKind Kind) const { 294 return getParent()->hasParamAttribute(getArgNo(), Kind); 295 } 296 297 Attribute Argument::getAttribute(Attribute::AttrKind Kind) const { 298 return getParent()->getParamAttribute(getArgNo(), Kind); 299 } 300 301 //===----------------------------------------------------------------------===// 302 // Helper Methods in Function 303 //===----------------------------------------------------------------------===// 304 305 LLVMContext &Function::getContext() const { 306 return getType()->getContext(); 307 } 308 309 unsigned Function::getInstructionCount() const { 310 unsigned NumInstrs = 0; 311 for (const BasicBlock &BB : BasicBlocks) 312 NumInstrs += std::distance(BB.instructionsWithoutDebug().begin(), 313 BB.instructionsWithoutDebug().end()); 314 return NumInstrs; 315 } 316 317 Function *Function::Create(FunctionType *Ty, LinkageTypes Linkage, 318 const Twine &N, Module &M) { 319 return Create(Ty, Linkage, M.getDataLayout().getProgramAddressSpace(), N, &M); 320 } 321 322 void Function::removeFromParent() { 323 getParent()->getFunctionList().remove(getIterator()); 324 } 325 326 void Function::eraseFromParent() { 327 getParent()->getFunctionList().erase(getIterator()); 328 } 329 330 //===----------------------------------------------------------------------===// 331 // Function Implementation 332 //===----------------------------------------------------------------------===// 333 334 static unsigned computeAddrSpace(unsigned AddrSpace, Module *M) { 335 // If AS == -1 and we are passed a valid module pointer we place the function 336 // in the program address space. Otherwise we default to AS0. 337 if (AddrSpace == static_cast<unsigned>(-1)) 338 return M ? M->getDataLayout().getProgramAddressSpace() : 0; 339 return AddrSpace; 340 } 341 342 Function::Function(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, 343 const Twine &name, Module *ParentModule) 344 : GlobalObject(Ty, Value::FunctionVal, 345 OperandTraits<Function>::op_begin(this), 0, Linkage, name, 346 computeAddrSpace(AddrSpace, ParentModule)), 347 NumArgs(Ty->getNumParams()) { 348 assert(FunctionType::isValidReturnType(getReturnType()) && 349 "invalid return type"); 350 setGlobalObjectSubClassData(0); 351 352 // We only need a symbol table for a function if the context keeps value names 353 if (!getContext().shouldDiscardValueNames()) 354 SymTab = std::make_unique<ValueSymbolTable>(); 355 356 // If the function has arguments, mark them as lazily built. 357 if (Ty->getNumParams()) 358 setValueSubclassData(1); // Set the "has lazy arguments" bit. 359 360 if (ParentModule) 361 ParentModule->getFunctionList().push_back(this); 362 363 HasLLVMReservedName = getName().startswith("llvm."); 364 // Ensure intrinsics have the right parameter attributes. 365 // Note, the IntID field will have been set in Value::setName if this function 366 // name is a valid intrinsic ID. 367 if (IntID) 368 setAttributes(Intrinsic::getAttributes(getContext(), IntID)); 369 } 370 371 Function::~Function() { 372 dropAllReferences(); // After this it is safe to delete instructions. 373 374 // Delete all of the method arguments and unlink from symbol table... 375 if (Arguments) 376 clearArguments(); 377 378 // Remove the function from the on-the-side GC table. 379 clearGC(); 380 } 381 382 void Function::BuildLazyArguments() const { 383 // Create the arguments vector, all arguments start out unnamed. 384 auto *FT = getFunctionType(); 385 if (NumArgs > 0) { 386 Arguments = std::allocator<Argument>().allocate(NumArgs); 387 for (unsigned i = 0, e = NumArgs; i != e; ++i) { 388 Type *ArgTy = FT->getParamType(i); 389 assert(!ArgTy->isVoidTy() && "Cannot have void typed arguments!"); 390 new (Arguments + i) Argument(ArgTy, "", const_cast<Function *>(this), i); 391 } 392 } 393 394 // Clear the lazy arguments bit. 395 unsigned SDC = getSubclassDataFromValue(); 396 SDC &= ~(1 << 0); 397 const_cast<Function*>(this)->setValueSubclassData(SDC); 398 assert(!hasLazyArguments()); 399 } 400 401 static MutableArrayRef<Argument> makeArgArray(Argument *Args, size_t Count) { 402 return MutableArrayRef<Argument>(Args, Count); 403 } 404 405 bool Function::isConstrainedFPIntrinsic() const { 406 switch (getIntrinsicID()) { 407 #define INSTRUCTION(NAME, NARG, ROUND_MODE, INTRINSIC) \ 408 case Intrinsic::INTRINSIC: 409 #include "llvm/IR/ConstrainedOps.def" 410 return true; 411 #undef INSTRUCTION 412 default: 413 return false; 414 } 415 } 416 417 void Function::clearArguments() { 418 for (Argument &A : makeArgArray(Arguments, NumArgs)) { 419 A.setName(""); 420 A.~Argument(); 421 } 422 std::allocator<Argument>().deallocate(Arguments, NumArgs); 423 Arguments = nullptr; 424 } 425 426 void Function::stealArgumentListFrom(Function &Src) { 427 assert(isDeclaration() && "Expected no references to current arguments"); 428 429 // Drop the current arguments, if any, and set the lazy argument bit. 430 if (!hasLazyArguments()) { 431 assert(llvm::all_of(makeArgArray(Arguments, NumArgs), 432 [](const Argument &A) { return A.use_empty(); }) && 433 "Expected arguments to be unused in declaration"); 434 clearArguments(); 435 setValueSubclassData(getSubclassDataFromValue() | (1 << 0)); 436 } 437 438 // Nothing to steal if Src has lazy arguments. 439 if (Src.hasLazyArguments()) 440 return; 441 442 // Steal arguments from Src, and fix the lazy argument bits. 443 assert(arg_size() == Src.arg_size()); 444 Arguments = Src.Arguments; 445 Src.Arguments = nullptr; 446 for (Argument &A : makeArgArray(Arguments, NumArgs)) { 447 // FIXME: This does the work of transferNodesFromList inefficiently. 448 SmallString<128> Name; 449 if (A.hasName()) 450 Name = A.getName(); 451 if (!Name.empty()) 452 A.setName(""); 453 A.setParent(this); 454 if (!Name.empty()) 455 A.setName(Name); 456 } 457 458 setValueSubclassData(getSubclassDataFromValue() & ~(1 << 0)); 459 assert(!hasLazyArguments()); 460 Src.setValueSubclassData(Src.getSubclassDataFromValue() | (1 << 0)); 461 } 462 463 // dropAllReferences() - This function causes all the subinstructions to "let 464 // go" of all references that they are maintaining. This allows one to 465 // 'delete' a whole class at a time, even though there may be circular 466 // references... first all references are dropped, and all use counts go to 467 // zero. Then everything is deleted for real. Note that no operations are 468 // valid on an object that has "dropped all references", except operator 469 // delete. 470 // 471 void Function::dropAllReferences() { 472 setIsMaterializable(false); 473 474 for (BasicBlock &BB : *this) 475 BB.dropAllReferences(); 476 477 // Delete all basic blocks. They are now unused, except possibly by 478 // blockaddresses, but BasicBlock's destructor takes care of those. 479 while (!BasicBlocks.empty()) 480 BasicBlocks.begin()->eraseFromParent(); 481 482 // Drop uses of any optional data (real or placeholder). 483 if (getNumOperands()) { 484 User::dropAllReferences(); 485 setNumHungOffUseOperands(0); 486 setValueSubclassData(getSubclassDataFromValue() & ~0xe); 487 } 488 489 // Metadata is stored in a side-table. 490 clearMetadata(); 491 } 492 493 void Function::addAttribute(unsigned i, Attribute::AttrKind Kind) { 494 AttributeList PAL = getAttributes(); 495 PAL = PAL.addAttribute(getContext(), i, Kind); 496 setAttributes(PAL); 497 } 498 499 void Function::addAttribute(unsigned i, Attribute Attr) { 500 AttributeList PAL = getAttributes(); 501 PAL = PAL.addAttribute(getContext(), i, Attr); 502 setAttributes(PAL); 503 } 504 505 void Function::addAttributes(unsigned i, const AttrBuilder &Attrs) { 506 AttributeList PAL = getAttributes(); 507 PAL = PAL.addAttributes(getContext(), i, Attrs); 508 setAttributes(PAL); 509 } 510 511 void Function::addParamAttr(unsigned ArgNo, Attribute::AttrKind Kind) { 512 AttributeList PAL = getAttributes(); 513 PAL = PAL.addParamAttribute(getContext(), ArgNo, Kind); 514 setAttributes(PAL); 515 } 516 517 void Function::addParamAttr(unsigned ArgNo, Attribute Attr) { 518 AttributeList PAL = getAttributes(); 519 PAL = PAL.addParamAttribute(getContext(), ArgNo, Attr); 520 setAttributes(PAL); 521 } 522 523 void Function::addParamAttrs(unsigned ArgNo, const AttrBuilder &Attrs) { 524 AttributeList PAL = getAttributes(); 525 PAL = PAL.addParamAttributes(getContext(), ArgNo, Attrs); 526 setAttributes(PAL); 527 } 528 529 void Function::removeAttribute(unsigned i, Attribute::AttrKind Kind) { 530 AttributeList PAL = getAttributes(); 531 PAL = PAL.removeAttribute(getContext(), i, Kind); 532 setAttributes(PAL); 533 } 534 535 void Function::removeAttribute(unsigned i, StringRef Kind) { 536 AttributeList PAL = getAttributes(); 537 PAL = PAL.removeAttribute(getContext(), i, Kind); 538 setAttributes(PAL); 539 } 540 541 void Function::removeAttributes(unsigned i, const AttrBuilder &Attrs) { 542 AttributeList PAL = getAttributes(); 543 PAL = PAL.removeAttributes(getContext(), i, Attrs); 544 setAttributes(PAL); 545 } 546 547 void Function::removeParamAttr(unsigned ArgNo, Attribute::AttrKind Kind) { 548 AttributeList PAL = getAttributes(); 549 PAL = PAL.removeParamAttribute(getContext(), ArgNo, Kind); 550 setAttributes(PAL); 551 } 552 553 void Function::removeParamAttr(unsigned ArgNo, StringRef Kind) { 554 AttributeList PAL = getAttributes(); 555 PAL = PAL.removeParamAttribute(getContext(), ArgNo, Kind); 556 setAttributes(PAL); 557 } 558 559 void Function::removeParamAttrs(unsigned ArgNo, const AttrBuilder &Attrs) { 560 AttributeList PAL = getAttributes(); 561 PAL = PAL.removeParamAttributes(getContext(), ArgNo, Attrs); 562 setAttributes(PAL); 563 } 564 565 void Function::removeParamUndefImplyingAttrs(unsigned ArgNo) { 566 AttributeList PAL = getAttributes(); 567 PAL = PAL.removeParamUndefImplyingAttributes(getContext(), ArgNo); 568 setAttributes(PAL); 569 } 570 571 void Function::addDereferenceableAttr(unsigned i, uint64_t Bytes) { 572 AttributeList PAL = getAttributes(); 573 PAL = PAL.addDereferenceableAttr(getContext(), i, Bytes); 574 setAttributes(PAL); 575 } 576 577 void Function::addDereferenceableParamAttr(unsigned ArgNo, uint64_t Bytes) { 578 AttributeList PAL = getAttributes(); 579 PAL = PAL.addDereferenceableParamAttr(getContext(), ArgNo, Bytes); 580 setAttributes(PAL); 581 } 582 583 void Function::addDereferenceableOrNullAttr(unsigned i, uint64_t Bytes) { 584 AttributeList PAL = getAttributes(); 585 PAL = PAL.addDereferenceableOrNullAttr(getContext(), i, Bytes); 586 setAttributes(PAL); 587 } 588 589 void Function::addDereferenceableOrNullParamAttr(unsigned ArgNo, 590 uint64_t Bytes) { 591 AttributeList PAL = getAttributes(); 592 PAL = PAL.addDereferenceableOrNullParamAttr(getContext(), ArgNo, Bytes); 593 setAttributes(PAL); 594 } 595 596 DenormalMode Function::getDenormalMode(const fltSemantics &FPType) const { 597 if (&FPType == &APFloat::IEEEsingle()) { 598 Attribute Attr = getFnAttribute("denormal-fp-math-f32"); 599 StringRef Val = Attr.getValueAsString(); 600 if (!Val.empty()) 601 return parseDenormalFPAttribute(Val); 602 603 // If the f32 variant of the attribute isn't specified, try to use the 604 // generic one. 605 } 606 607 Attribute Attr = getFnAttribute("denormal-fp-math"); 608 return parseDenormalFPAttribute(Attr.getValueAsString()); 609 } 610 611 const std::string &Function::getGC() const { 612 assert(hasGC() && "Function has no collector"); 613 return getContext().getGC(*this); 614 } 615 616 void Function::setGC(std::string Str) { 617 setValueSubclassDataBit(14, !Str.empty()); 618 getContext().setGC(*this, std::move(Str)); 619 } 620 621 void Function::clearGC() { 622 if (!hasGC()) 623 return; 624 getContext().deleteGC(*this); 625 setValueSubclassDataBit(14, false); 626 } 627 628 bool Function::hasStackProtectorFnAttr() const { 629 return hasFnAttribute(Attribute::StackProtect) || 630 hasFnAttribute(Attribute::StackProtectStrong) || 631 hasFnAttribute(Attribute::StackProtectReq); 632 } 633 634 /// Copy all additional attributes (those not needed to create a Function) from 635 /// the Function Src to this one. 636 void Function::copyAttributesFrom(const Function *Src) { 637 GlobalObject::copyAttributesFrom(Src); 638 setCallingConv(Src->getCallingConv()); 639 setAttributes(Src->getAttributes()); 640 if (Src->hasGC()) 641 setGC(Src->getGC()); 642 else 643 clearGC(); 644 if (Src->hasPersonalityFn()) 645 setPersonalityFn(Src->getPersonalityFn()); 646 if (Src->hasPrefixData()) 647 setPrefixData(Src->getPrefixData()); 648 if (Src->hasPrologueData()) 649 setPrologueData(Src->getPrologueData()); 650 } 651 652 /// Table of string intrinsic names indexed by enum value. 653 static const char * const IntrinsicNameTable[] = { 654 "not_intrinsic", 655 #define GET_INTRINSIC_NAME_TABLE 656 #include "llvm/IR/IntrinsicImpl.inc" 657 #undef GET_INTRINSIC_NAME_TABLE 658 }; 659 660 /// Table of per-target intrinsic name tables. 661 #define GET_INTRINSIC_TARGET_DATA 662 #include "llvm/IR/IntrinsicImpl.inc" 663 #undef GET_INTRINSIC_TARGET_DATA 664 665 bool Function::isTargetIntrinsic(Intrinsic::ID IID) { 666 return IID > TargetInfos[0].Count; 667 } 668 669 bool Function::isTargetIntrinsic() const { 670 return isTargetIntrinsic(IntID); 671 } 672 673 /// Find the segment of \c IntrinsicNameTable for intrinsics with the same 674 /// target as \c Name, or the generic table if \c Name is not target specific. 675 /// 676 /// Returns the relevant slice of \c IntrinsicNameTable 677 static ArrayRef<const char *> findTargetSubtable(StringRef Name) { 678 assert(Name.startswith("llvm.")); 679 680 ArrayRef<IntrinsicTargetInfo> Targets(TargetInfos); 681 // Drop "llvm." and take the first dotted component. That will be the target 682 // if this is target specific. 683 StringRef Target = Name.drop_front(5).split('.').first; 684 auto It = partition_point( 685 Targets, [=](const IntrinsicTargetInfo &TI) { return TI.Name < Target; }); 686 // We've either found the target or just fall back to the generic set, which 687 // is always first. 688 const auto &TI = It != Targets.end() && It->Name == Target ? *It : Targets[0]; 689 return makeArrayRef(&IntrinsicNameTable[1] + TI.Offset, TI.Count); 690 } 691 692 /// This does the actual lookup of an intrinsic ID which 693 /// matches the given function name. 694 Intrinsic::ID Function::lookupIntrinsicID(StringRef Name) { 695 ArrayRef<const char *> NameTable = findTargetSubtable(Name); 696 int Idx = Intrinsic::lookupLLVMIntrinsicByName(NameTable, Name); 697 if (Idx == -1) 698 return Intrinsic::not_intrinsic; 699 700 // Intrinsic IDs correspond to the location in IntrinsicNameTable, but we have 701 // an index into a sub-table. 702 int Adjust = NameTable.data() - IntrinsicNameTable; 703 Intrinsic::ID ID = static_cast<Intrinsic::ID>(Idx + Adjust); 704 705 // If the intrinsic is not overloaded, require an exact match. If it is 706 // overloaded, require either exact or prefix match. 707 const auto MatchSize = strlen(NameTable[Idx]); 708 assert(Name.size() >= MatchSize && "Expected either exact or prefix match"); 709 bool IsExactMatch = Name.size() == MatchSize; 710 return IsExactMatch || Intrinsic::isOverloaded(ID) ? ID 711 : Intrinsic::not_intrinsic; 712 } 713 714 void Function::recalculateIntrinsicID() { 715 StringRef Name = getName(); 716 if (!Name.startswith("llvm.")) { 717 HasLLVMReservedName = false; 718 IntID = Intrinsic::not_intrinsic; 719 return; 720 } 721 HasLLVMReservedName = true; 722 IntID = lookupIntrinsicID(Name); 723 } 724 725 /// Returns a stable mangling for the type specified for use in the name 726 /// mangling scheme used by 'any' types in intrinsic signatures. The mangling 727 /// of named types is simply their name. Manglings for unnamed types consist 728 /// of a prefix ('p' for pointers, 'a' for arrays, 'f_' for functions) 729 /// combined with the mangling of their component types. A vararg function 730 /// type will have a suffix of 'vararg'. Since function types can contain 731 /// other function types, we close a function type mangling with suffix 'f' 732 /// which can't be confused with it's prefix. This ensures we don't have 733 /// collisions between two unrelated function types. Otherwise, you might 734 /// parse ffXX as f(fXX) or f(fX)X. (X is a placeholder for any other type.) 735 /// The HasUnnamedType boolean is set if an unnamed type was encountered, 736 /// indicating that extra care must be taken to ensure a unique name. 737 static std::string getMangledTypeStr(Type *Ty, bool &HasUnnamedType) { 738 std::string Result; 739 if (PointerType* PTyp = dyn_cast<PointerType>(Ty)) { 740 Result += "p" + utostr(PTyp->getAddressSpace()) + 741 getMangledTypeStr(PTyp->getElementType(), HasUnnamedType); 742 } else if (ArrayType* ATyp = dyn_cast<ArrayType>(Ty)) { 743 Result += "a" + utostr(ATyp->getNumElements()) + 744 getMangledTypeStr(ATyp->getElementType(), HasUnnamedType); 745 } else if (StructType *STyp = dyn_cast<StructType>(Ty)) { 746 if (!STyp->isLiteral()) { 747 Result += "s_"; 748 if (STyp->hasName()) 749 Result += STyp->getName(); 750 else 751 HasUnnamedType = true; 752 } else { 753 Result += "sl_"; 754 for (auto Elem : STyp->elements()) 755 Result += getMangledTypeStr(Elem, HasUnnamedType); 756 } 757 // Ensure nested structs are distinguishable. 758 Result += "s"; 759 } else if (FunctionType *FT = dyn_cast<FunctionType>(Ty)) { 760 Result += "f_" + getMangledTypeStr(FT->getReturnType(), HasUnnamedType); 761 for (size_t i = 0; i < FT->getNumParams(); i++) 762 Result += getMangledTypeStr(FT->getParamType(i), HasUnnamedType); 763 if (FT->isVarArg()) 764 Result += "vararg"; 765 // Ensure nested function types are distinguishable. 766 Result += "f"; 767 } else if (VectorType* VTy = dyn_cast<VectorType>(Ty)) { 768 ElementCount EC = VTy->getElementCount(); 769 if (EC.isScalable()) 770 Result += "nx"; 771 Result += "v" + utostr(EC.getKnownMinValue()) + 772 getMangledTypeStr(VTy->getElementType(), HasUnnamedType); 773 } else if (Ty) { 774 switch (Ty->getTypeID()) { 775 default: llvm_unreachable("Unhandled type"); 776 case Type::VoidTyID: Result += "isVoid"; break; 777 case Type::MetadataTyID: Result += "Metadata"; break; 778 case Type::HalfTyID: Result += "f16"; break; 779 case Type::BFloatTyID: Result += "bf16"; break; 780 case Type::FloatTyID: Result += "f32"; break; 781 case Type::DoubleTyID: Result += "f64"; break; 782 case Type::X86_FP80TyID: Result += "f80"; break; 783 case Type::FP128TyID: Result += "f128"; break; 784 case Type::PPC_FP128TyID: Result += "ppcf128"; break; 785 case Type::X86_MMXTyID: Result += "x86mmx"; break; 786 case Type::X86_AMXTyID: Result += "x86amx"; break; 787 case Type::IntegerTyID: 788 Result += "i" + utostr(cast<IntegerType>(Ty)->getBitWidth()); 789 break; 790 } 791 } 792 return Result; 793 } 794 795 StringRef Intrinsic::getName(ID id) { 796 assert(id < num_intrinsics && "Invalid intrinsic ID!"); 797 assert(!Intrinsic::isOverloaded(id) && 798 "This version of getName does not support overloading"); 799 return IntrinsicNameTable[id]; 800 } 801 802 std::string Intrinsic::getName(ID Id, ArrayRef<Type *> Tys, Module *M, 803 FunctionType *FT) { 804 assert(Id < num_intrinsics && "Invalid intrinsic ID!"); 805 assert((Tys.empty() || Intrinsic::isOverloaded(Id)) && 806 "This version of getName is for overloaded intrinsics only"); 807 bool HasUnnamedType = false; 808 std::string Result(IntrinsicNameTable[Id]); 809 for (Type *Ty : Tys) { 810 Result += "." + getMangledTypeStr(Ty, HasUnnamedType); 811 } 812 assert((M || !HasUnnamedType) && "unnamed types need a module"); 813 if (M && HasUnnamedType) { 814 if (!FT) 815 FT = getType(M->getContext(), Id, Tys); 816 else 817 assert((FT == getType(M->getContext(), Id, Tys)) && 818 "Provided FunctionType must match arguments"); 819 return M->getUniqueIntrinsicName(Result, Id, FT); 820 } 821 return Result; 822 } 823 824 std::string Intrinsic::getName(ID Id, ArrayRef<Type *> Tys) { 825 return getName(Id, Tys, nullptr, nullptr); 826 } 827 828 /// IIT_Info - These are enumerators that describe the entries returned by the 829 /// getIntrinsicInfoTableEntries function. 830 /// 831 /// NOTE: This must be kept in synch with the copy in TblGen/IntrinsicEmitter! 832 enum IIT_Info { 833 // Common values should be encoded with 0-15. 834 IIT_Done = 0, 835 IIT_I1 = 1, 836 IIT_I8 = 2, 837 IIT_I16 = 3, 838 IIT_I32 = 4, 839 IIT_I64 = 5, 840 IIT_F16 = 6, 841 IIT_F32 = 7, 842 IIT_F64 = 8, 843 IIT_V2 = 9, 844 IIT_V4 = 10, 845 IIT_V8 = 11, 846 IIT_V16 = 12, 847 IIT_V32 = 13, 848 IIT_PTR = 14, 849 IIT_ARG = 15, 850 851 // Values from 16+ are only encodable with the inefficient encoding. 852 IIT_V64 = 16, 853 IIT_MMX = 17, 854 IIT_TOKEN = 18, 855 IIT_METADATA = 19, 856 IIT_EMPTYSTRUCT = 20, 857 IIT_STRUCT2 = 21, 858 IIT_STRUCT3 = 22, 859 IIT_STRUCT4 = 23, 860 IIT_STRUCT5 = 24, 861 IIT_EXTEND_ARG = 25, 862 IIT_TRUNC_ARG = 26, 863 IIT_ANYPTR = 27, 864 IIT_V1 = 28, 865 IIT_VARARG = 29, 866 IIT_HALF_VEC_ARG = 30, 867 IIT_SAME_VEC_WIDTH_ARG = 31, 868 IIT_PTR_TO_ARG = 32, 869 IIT_PTR_TO_ELT = 33, 870 IIT_VEC_OF_ANYPTRS_TO_ELT = 34, 871 IIT_I128 = 35, 872 IIT_V512 = 36, 873 IIT_V1024 = 37, 874 IIT_STRUCT6 = 38, 875 IIT_STRUCT7 = 39, 876 IIT_STRUCT8 = 40, 877 IIT_F128 = 41, 878 IIT_VEC_ELEMENT = 42, 879 IIT_SCALABLE_VEC = 43, 880 IIT_SUBDIVIDE2_ARG = 44, 881 IIT_SUBDIVIDE4_ARG = 45, 882 IIT_VEC_OF_BITCASTS_TO_INT = 46, 883 IIT_V128 = 47, 884 IIT_BF16 = 48, 885 IIT_STRUCT9 = 49, 886 IIT_V256 = 50, 887 IIT_AMX = 51 888 }; 889 890 static void DecodeIITType(unsigned &NextElt, ArrayRef<unsigned char> Infos, 891 IIT_Info LastInfo, 892 SmallVectorImpl<Intrinsic::IITDescriptor> &OutputTable) { 893 using namespace Intrinsic; 894 895 bool IsScalableVector = (LastInfo == IIT_SCALABLE_VEC); 896 897 IIT_Info Info = IIT_Info(Infos[NextElt++]); 898 unsigned StructElts = 2; 899 900 switch (Info) { 901 case IIT_Done: 902 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Void, 0)); 903 return; 904 case IIT_VARARG: 905 OutputTable.push_back(IITDescriptor::get(IITDescriptor::VarArg, 0)); 906 return; 907 case IIT_MMX: 908 OutputTable.push_back(IITDescriptor::get(IITDescriptor::MMX, 0)); 909 return; 910 case IIT_AMX: 911 OutputTable.push_back(IITDescriptor::get(IITDescriptor::AMX, 0)); 912 return; 913 case IIT_TOKEN: 914 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Token, 0)); 915 return; 916 case IIT_METADATA: 917 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Metadata, 0)); 918 return; 919 case IIT_F16: 920 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Half, 0)); 921 return; 922 case IIT_BF16: 923 OutputTable.push_back(IITDescriptor::get(IITDescriptor::BFloat, 0)); 924 return; 925 case IIT_F32: 926 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Float, 0)); 927 return; 928 case IIT_F64: 929 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Double, 0)); 930 return; 931 case IIT_F128: 932 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Quad, 0)); 933 return; 934 case IIT_I1: 935 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 1)); 936 return; 937 case IIT_I8: 938 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 8)); 939 return; 940 case IIT_I16: 941 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer,16)); 942 return; 943 case IIT_I32: 944 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 32)); 945 return; 946 case IIT_I64: 947 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 64)); 948 return; 949 case IIT_I128: 950 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Integer, 128)); 951 return; 952 case IIT_V1: 953 OutputTable.push_back(IITDescriptor::getVector(1, IsScalableVector)); 954 DecodeIITType(NextElt, Infos, Info, OutputTable); 955 return; 956 case IIT_V2: 957 OutputTable.push_back(IITDescriptor::getVector(2, IsScalableVector)); 958 DecodeIITType(NextElt, Infos, Info, OutputTable); 959 return; 960 case IIT_V4: 961 OutputTable.push_back(IITDescriptor::getVector(4, IsScalableVector)); 962 DecodeIITType(NextElt, Infos, Info, OutputTable); 963 return; 964 case IIT_V8: 965 OutputTable.push_back(IITDescriptor::getVector(8, IsScalableVector)); 966 DecodeIITType(NextElt, Infos, Info, OutputTable); 967 return; 968 case IIT_V16: 969 OutputTable.push_back(IITDescriptor::getVector(16, IsScalableVector)); 970 DecodeIITType(NextElt, Infos, Info, OutputTable); 971 return; 972 case IIT_V32: 973 OutputTable.push_back(IITDescriptor::getVector(32, IsScalableVector)); 974 DecodeIITType(NextElt, Infos, Info, OutputTable); 975 return; 976 case IIT_V64: 977 OutputTable.push_back(IITDescriptor::getVector(64, IsScalableVector)); 978 DecodeIITType(NextElt, Infos, Info, OutputTable); 979 return; 980 case IIT_V128: 981 OutputTable.push_back(IITDescriptor::getVector(128, IsScalableVector)); 982 DecodeIITType(NextElt, Infos, Info, OutputTable); 983 return; 984 case IIT_V256: 985 OutputTable.push_back(IITDescriptor::getVector(256, IsScalableVector)); 986 DecodeIITType(NextElt, Infos, Info, OutputTable); 987 return; 988 case IIT_V512: 989 OutputTable.push_back(IITDescriptor::getVector(512, IsScalableVector)); 990 DecodeIITType(NextElt, Infos, Info, OutputTable); 991 return; 992 case IIT_V1024: 993 OutputTable.push_back(IITDescriptor::getVector(1024, IsScalableVector)); 994 DecodeIITType(NextElt, Infos, Info, OutputTable); 995 return; 996 case IIT_PTR: 997 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Pointer, 0)); 998 DecodeIITType(NextElt, Infos, Info, OutputTable); 999 return; 1000 case IIT_ANYPTR: { // [ANYPTR addrspace, subtype] 1001 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Pointer, 1002 Infos[NextElt++])); 1003 DecodeIITType(NextElt, Infos, Info, OutputTable); 1004 return; 1005 } 1006 case IIT_ARG: { 1007 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1008 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Argument, ArgInfo)); 1009 return; 1010 } 1011 case IIT_EXTEND_ARG: { 1012 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1013 OutputTable.push_back(IITDescriptor::get(IITDescriptor::ExtendArgument, 1014 ArgInfo)); 1015 return; 1016 } 1017 case IIT_TRUNC_ARG: { 1018 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1019 OutputTable.push_back(IITDescriptor::get(IITDescriptor::TruncArgument, 1020 ArgInfo)); 1021 return; 1022 } 1023 case IIT_HALF_VEC_ARG: { 1024 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1025 OutputTable.push_back(IITDescriptor::get(IITDescriptor::HalfVecArgument, 1026 ArgInfo)); 1027 return; 1028 } 1029 case IIT_SAME_VEC_WIDTH_ARG: { 1030 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1031 OutputTable.push_back(IITDescriptor::get(IITDescriptor::SameVecWidthArgument, 1032 ArgInfo)); 1033 return; 1034 } 1035 case IIT_PTR_TO_ARG: { 1036 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1037 OutputTable.push_back(IITDescriptor::get(IITDescriptor::PtrToArgument, 1038 ArgInfo)); 1039 return; 1040 } 1041 case IIT_PTR_TO_ELT: { 1042 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1043 OutputTable.push_back(IITDescriptor::get(IITDescriptor::PtrToElt, ArgInfo)); 1044 return; 1045 } 1046 case IIT_VEC_OF_ANYPTRS_TO_ELT: { 1047 unsigned short ArgNo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1048 unsigned short RefNo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1049 OutputTable.push_back( 1050 IITDescriptor::get(IITDescriptor::VecOfAnyPtrsToElt, ArgNo, RefNo)); 1051 return; 1052 } 1053 case IIT_EMPTYSTRUCT: 1054 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Struct, 0)); 1055 return; 1056 case IIT_STRUCT9: ++StructElts; LLVM_FALLTHROUGH; 1057 case IIT_STRUCT8: ++StructElts; LLVM_FALLTHROUGH; 1058 case IIT_STRUCT7: ++StructElts; LLVM_FALLTHROUGH; 1059 case IIT_STRUCT6: ++StructElts; LLVM_FALLTHROUGH; 1060 case IIT_STRUCT5: ++StructElts; LLVM_FALLTHROUGH; 1061 case IIT_STRUCT4: ++StructElts; LLVM_FALLTHROUGH; 1062 case IIT_STRUCT3: ++StructElts; LLVM_FALLTHROUGH; 1063 case IIT_STRUCT2: { 1064 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Struct,StructElts)); 1065 1066 for (unsigned i = 0; i != StructElts; ++i) 1067 DecodeIITType(NextElt, Infos, Info, OutputTable); 1068 return; 1069 } 1070 case IIT_SUBDIVIDE2_ARG: { 1071 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1072 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Subdivide2Argument, 1073 ArgInfo)); 1074 return; 1075 } 1076 case IIT_SUBDIVIDE4_ARG: { 1077 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1078 OutputTable.push_back(IITDescriptor::get(IITDescriptor::Subdivide4Argument, 1079 ArgInfo)); 1080 return; 1081 } 1082 case IIT_VEC_ELEMENT: { 1083 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1084 OutputTable.push_back(IITDescriptor::get(IITDescriptor::VecElementArgument, 1085 ArgInfo)); 1086 return; 1087 } 1088 case IIT_SCALABLE_VEC: { 1089 DecodeIITType(NextElt, Infos, Info, OutputTable); 1090 return; 1091 } 1092 case IIT_VEC_OF_BITCASTS_TO_INT: { 1093 unsigned ArgInfo = (NextElt == Infos.size() ? 0 : Infos[NextElt++]); 1094 OutputTable.push_back(IITDescriptor::get(IITDescriptor::VecOfBitcastsToInt, 1095 ArgInfo)); 1096 return; 1097 } 1098 } 1099 llvm_unreachable("unhandled"); 1100 } 1101 1102 #define GET_INTRINSIC_GENERATOR_GLOBAL 1103 #include "llvm/IR/IntrinsicImpl.inc" 1104 #undef GET_INTRINSIC_GENERATOR_GLOBAL 1105 1106 void Intrinsic::getIntrinsicInfoTableEntries(ID id, 1107 SmallVectorImpl<IITDescriptor> &T){ 1108 // Check to see if the intrinsic's type was expressible by the table. 1109 unsigned TableVal = IIT_Table[id-1]; 1110 1111 // Decode the TableVal into an array of IITValues. 1112 SmallVector<unsigned char, 8> IITValues; 1113 ArrayRef<unsigned char> IITEntries; 1114 unsigned NextElt = 0; 1115 if ((TableVal >> 31) != 0) { 1116 // This is an offset into the IIT_LongEncodingTable. 1117 IITEntries = IIT_LongEncodingTable; 1118 1119 // Strip sentinel bit. 1120 NextElt = (TableVal << 1) >> 1; 1121 } else { 1122 // Decode the TableVal into an array of IITValues. If the entry was encoded 1123 // into a single word in the table itself, decode it now. 1124 do { 1125 IITValues.push_back(TableVal & 0xF); 1126 TableVal >>= 4; 1127 } while (TableVal); 1128 1129 IITEntries = IITValues; 1130 NextElt = 0; 1131 } 1132 1133 // Okay, decode the table into the output vector of IITDescriptors. 1134 DecodeIITType(NextElt, IITEntries, IIT_Done, T); 1135 while (NextElt != IITEntries.size() && IITEntries[NextElt] != 0) 1136 DecodeIITType(NextElt, IITEntries, IIT_Done, T); 1137 } 1138 1139 static Type *DecodeFixedType(ArrayRef<Intrinsic::IITDescriptor> &Infos, 1140 ArrayRef<Type*> Tys, LLVMContext &Context) { 1141 using namespace Intrinsic; 1142 1143 IITDescriptor D = Infos.front(); 1144 Infos = Infos.slice(1); 1145 1146 switch (D.Kind) { 1147 case IITDescriptor::Void: return Type::getVoidTy(Context); 1148 case IITDescriptor::VarArg: return Type::getVoidTy(Context); 1149 case IITDescriptor::MMX: return Type::getX86_MMXTy(Context); 1150 case IITDescriptor::AMX: return Type::getX86_AMXTy(Context); 1151 case IITDescriptor::Token: return Type::getTokenTy(Context); 1152 case IITDescriptor::Metadata: return Type::getMetadataTy(Context); 1153 case IITDescriptor::Half: return Type::getHalfTy(Context); 1154 case IITDescriptor::BFloat: return Type::getBFloatTy(Context); 1155 case IITDescriptor::Float: return Type::getFloatTy(Context); 1156 case IITDescriptor::Double: return Type::getDoubleTy(Context); 1157 case IITDescriptor::Quad: return Type::getFP128Ty(Context); 1158 1159 case IITDescriptor::Integer: 1160 return IntegerType::get(Context, D.Integer_Width); 1161 case IITDescriptor::Vector: 1162 return VectorType::get(DecodeFixedType(Infos, Tys, Context), 1163 D.Vector_Width); 1164 case IITDescriptor::Pointer: 1165 return PointerType::get(DecodeFixedType(Infos, Tys, Context), 1166 D.Pointer_AddressSpace); 1167 case IITDescriptor::Struct: { 1168 SmallVector<Type *, 8> Elts; 1169 for (unsigned i = 0, e = D.Struct_NumElements; i != e; ++i) 1170 Elts.push_back(DecodeFixedType(Infos, Tys, Context)); 1171 return StructType::get(Context, Elts); 1172 } 1173 case IITDescriptor::Argument: 1174 return Tys[D.getArgumentNumber()]; 1175 case IITDescriptor::ExtendArgument: { 1176 Type *Ty = Tys[D.getArgumentNumber()]; 1177 if (VectorType *VTy = dyn_cast<VectorType>(Ty)) 1178 return VectorType::getExtendedElementVectorType(VTy); 1179 1180 return IntegerType::get(Context, 2 * cast<IntegerType>(Ty)->getBitWidth()); 1181 } 1182 case IITDescriptor::TruncArgument: { 1183 Type *Ty = Tys[D.getArgumentNumber()]; 1184 if (VectorType *VTy = dyn_cast<VectorType>(Ty)) 1185 return VectorType::getTruncatedElementVectorType(VTy); 1186 1187 IntegerType *ITy = cast<IntegerType>(Ty); 1188 assert(ITy->getBitWidth() % 2 == 0); 1189 return IntegerType::get(Context, ITy->getBitWidth() / 2); 1190 } 1191 case IITDescriptor::Subdivide2Argument: 1192 case IITDescriptor::Subdivide4Argument: { 1193 Type *Ty = Tys[D.getArgumentNumber()]; 1194 VectorType *VTy = dyn_cast<VectorType>(Ty); 1195 assert(VTy && "Expected an argument of Vector Type"); 1196 int SubDivs = D.Kind == IITDescriptor::Subdivide2Argument ? 1 : 2; 1197 return VectorType::getSubdividedVectorType(VTy, SubDivs); 1198 } 1199 case IITDescriptor::HalfVecArgument: 1200 return VectorType::getHalfElementsVectorType(cast<VectorType>( 1201 Tys[D.getArgumentNumber()])); 1202 case IITDescriptor::SameVecWidthArgument: { 1203 Type *EltTy = DecodeFixedType(Infos, Tys, Context); 1204 Type *Ty = Tys[D.getArgumentNumber()]; 1205 if (auto *VTy = dyn_cast<VectorType>(Ty)) 1206 return VectorType::get(EltTy, VTy->getElementCount()); 1207 return EltTy; 1208 } 1209 case IITDescriptor::PtrToArgument: { 1210 Type *Ty = Tys[D.getArgumentNumber()]; 1211 return PointerType::getUnqual(Ty); 1212 } 1213 case IITDescriptor::PtrToElt: { 1214 Type *Ty = Tys[D.getArgumentNumber()]; 1215 VectorType *VTy = dyn_cast<VectorType>(Ty); 1216 if (!VTy) 1217 llvm_unreachable("Expected an argument of Vector Type"); 1218 Type *EltTy = VTy->getElementType(); 1219 return PointerType::getUnqual(EltTy); 1220 } 1221 case IITDescriptor::VecElementArgument: { 1222 Type *Ty = Tys[D.getArgumentNumber()]; 1223 if (VectorType *VTy = dyn_cast<VectorType>(Ty)) 1224 return VTy->getElementType(); 1225 llvm_unreachable("Expected an argument of Vector Type"); 1226 } 1227 case IITDescriptor::VecOfBitcastsToInt: { 1228 Type *Ty = Tys[D.getArgumentNumber()]; 1229 VectorType *VTy = dyn_cast<VectorType>(Ty); 1230 assert(VTy && "Expected an argument of Vector Type"); 1231 return VectorType::getInteger(VTy); 1232 } 1233 case IITDescriptor::VecOfAnyPtrsToElt: 1234 // Return the overloaded type (which determines the pointers address space) 1235 return Tys[D.getOverloadArgNumber()]; 1236 } 1237 llvm_unreachable("unhandled"); 1238 } 1239 1240 FunctionType *Intrinsic::getType(LLVMContext &Context, 1241 ID id, ArrayRef<Type*> Tys) { 1242 SmallVector<IITDescriptor, 8> Table; 1243 getIntrinsicInfoTableEntries(id, Table); 1244 1245 ArrayRef<IITDescriptor> TableRef = Table; 1246 Type *ResultTy = DecodeFixedType(TableRef, Tys, Context); 1247 1248 SmallVector<Type*, 8> ArgTys; 1249 while (!TableRef.empty()) 1250 ArgTys.push_back(DecodeFixedType(TableRef, Tys, Context)); 1251 1252 // DecodeFixedType returns Void for IITDescriptor::Void and IITDescriptor::VarArg 1253 // If we see void type as the type of the last argument, it is vararg intrinsic 1254 if (!ArgTys.empty() && ArgTys.back()->isVoidTy()) { 1255 ArgTys.pop_back(); 1256 return FunctionType::get(ResultTy, ArgTys, true); 1257 } 1258 return FunctionType::get(ResultTy, ArgTys, false); 1259 } 1260 1261 bool Intrinsic::isOverloaded(ID id) { 1262 #define GET_INTRINSIC_OVERLOAD_TABLE 1263 #include "llvm/IR/IntrinsicImpl.inc" 1264 #undef GET_INTRINSIC_OVERLOAD_TABLE 1265 } 1266 1267 bool Intrinsic::isLeaf(ID id) { 1268 switch (id) { 1269 default: 1270 return true; 1271 1272 case Intrinsic::experimental_gc_statepoint: 1273 case Intrinsic::experimental_patchpoint_void: 1274 case Intrinsic::experimental_patchpoint_i64: 1275 return false; 1276 } 1277 } 1278 1279 /// This defines the "Intrinsic::getAttributes(ID id)" method. 1280 #define GET_INTRINSIC_ATTRIBUTES 1281 #include "llvm/IR/IntrinsicImpl.inc" 1282 #undef GET_INTRINSIC_ATTRIBUTES 1283 1284 Function *Intrinsic::getDeclaration(Module *M, ID id, ArrayRef<Type*> Tys) { 1285 // There can never be multiple globals with the same name of different types, 1286 // because intrinsics must be a specific type. 1287 auto *FT = getType(M->getContext(), id, Tys); 1288 return cast<Function>( 1289 M->getOrInsertFunction(Tys.empty() ? getName(id) 1290 : getName(id, Tys, M, FT), 1291 getType(M->getContext(), id, Tys)) 1292 .getCallee()); 1293 } 1294 1295 // This defines the "Intrinsic::getIntrinsicForGCCBuiltin()" method. 1296 #define GET_LLVM_INTRINSIC_FOR_GCC_BUILTIN 1297 #include "llvm/IR/IntrinsicImpl.inc" 1298 #undef GET_LLVM_INTRINSIC_FOR_GCC_BUILTIN 1299 1300 // This defines the "Intrinsic::getIntrinsicForMSBuiltin()" method. 1301 #define GET_LLVM_INTRINSIC_FOR_MS_BUILTIN 1302 #include "llvm/IR/IntrinsicImpl.inc" 1303 #undef GET_LLVM_INTRINSIC_FOR_MS_BUILTIN 1304 1305 using DeferredIntrinsicMatchPair = 1306 std::pair<Type *, ArrayRef<Intrinsic::IITDescriptor>>; 1307 1308 static bool matchIntrinsicType( 1309 Type *Ty, ArrayRef<Intrinsic::IITDescriptor> &Infos, 1310 SmallVectorImpl<Type *> &ArgTys, 1311 SmallVectorImpl<DeferredIntrinsicMatchPair> &DeferredChecks, 1312 bool IsDeferredCheck) { 1313 using namespace Intrinsic; 1314 1315 // If we ran out of descriptors, there are too many arguments. 1316 if (Infos.empty()) return true; 1317 1318 // Do this before slicing off the 'front' part 1319 auto InfosRef = Infos; 1320 auto DeferCheck = [&DeferredChecks, &InfosRef](Type *T) { 1321 DeferredChecks.emplace_back(T, InfosRef); 1322 return false; 1323 }; 1324 1325 IITDescriptor D = Infos.front(); 1326 Infos = Infos.slice(1); 1327 1328 switch (D.Kind) { 1329 case IITDescriptor::Void: return !Ty->isVoidTy(); 1330 case IITDescriptor::VarArg: return true; 1331 case IITDescriptor::MMX: return !Ty->isX86_MMXTy(); 1332 case IITDescriptor::AMX: return !Ty->isX86_AMXTy(); 1333 case IITDescriptor::Token: return !Ty->isTokenTy(); 1334 case IITDescriptor::Metadata: return !Ty->isMetadataTy(); 1335 case IITDescriptor::Half: return !Ty->isHalfTy(); 1336 case IITDescriptor::BFloat: return !Ty->isBFloatTy(); 1337 case IITDescriptor::Float: return !Ty->isFloatTy(); 1338 case IITDescriptor::Double: return !Ty->isDoubleTy(); 1339 case IITDescriptor::Quad: return !Ty->isFP128Ty(); 1340 case IITDescriptor::Integer: return !Ty->isIntegerTy(D.Integer_Width); 1341 case IITDescriptor::Vector: { 1342 VectorType *VT = dyn_cast<VectorType>(Ty); 1343 return !VT || VT->getElementCount() != D.Vector_Width || 1344 matchIntrinsicType(VT->getElementType(), Infos, ArgTys, 1345 DeferredChecks, IsDeferredCheck); 1346 } 1347 case IITDescriptor::Pointer: { 1348 PointerType *PT = dyn_cast<PointerType>(Ty); 1349 return !PT || PT->getAddressSpace() != D.Pointer_AddressSpace || 1350 matchIntrinsicType(PT->getElementType(), Infos, ArgTys, 1351 DeferredChecks, IsDeferredCheck); 1352 } 1353 1354 case IITDescriptor::Struct: { 1355 StructType *ST = dyn_cast<StructType>(Ty); 1356 if (!ST || ST->getNumElements() != D.Struct_NumElements) 1357 return true; 1358 1359 for (unsigned i = 0, e = D.Struct_NumElements; i != e; ++i) 1360 if (matchIntrinsicType(ST->getElementType(i), Infos, ArgTys, 1361 DeferredChecks, IsDeferredCheck)) 1362 return true; 1363 return false; 1364 } 1365 1366 case IITDescriptor::Argument: 1367 // If this is the second occurrence of an argument, 1368 // verify that the later instance matches the previous instance. 1369 if (D.getArgumentNumber() < ArgTys.size()) 1370 return Ty != ArgTys[D.getArgumentNumber()]; 1371 1372 if (D.getArgumentNumber() > ArgTys.size() || 1373 D.getArgumentKind() == IITDescriptor::AK_MatchType) 1374 return IsDeferredCheck || DeferCheck(Ty); 1375 1376 assert(D.getArgumentNumber() == ArgTys.size() && !IsDeferredCheck && 1377 "Table consistency error"); 1378 ArgTys.push_back(Ty); 1379 1380 switch (D.getArgumentKind()) { 1381 case IITDescriptor::AK_Any: return false; // Success 1382 case IITDescriptor::AK_AnyInteger: return !Ty->isIntOrIntVectorTy(); 1383 case IITDescriptor::AK_AnyFloat: return !Ty->isFPOrFPVectorTy(); 1384 case IITDescriptor::AK_AnyVector: return !isa<VectorType>(Ty); 1385 case IITDescriptor::AK_AnyPointer: return !isa<PointerType>(Ty); 1386 default: break; 1387 } 1388 llvm_unreachable("all argument kinds not covered"); 1389 1390 case IITDescriptor::ExtendArgument: { 1391 // If this is a forward reference, defer the check for later. 1392 if (D.getArgumentNumber() >= ArgTys.size()) 1393 return IsDeferredCheck || DeferCheck(Ty); 1394 1395 Type *NewTy = ArgTys[D.getArgumentNumber()]; 1396 if (VectorType *VTy = dyn_cast<VectorType>(NewTy)) 1397 NewTy = VectorType::getExtendedElementVectorType(VTy); 1398 else if (IntegerType *ITy = dyn_cast<IntegerType>(NewTy)) 1399 NewTy = IntegerType::get(ITy->getContext(), 2 * ITy->getBitWidth()); 1400 else 1401 return true; 1402 1403 return Ty != NewTy; 1404 } 1405 case IITDescriptor::TruncArgument: { 1406 // If this is a forward reference, defer the check for later. 1407 if (D.getArgumentNumber() >= ArgTys.size()) 1408 return IsDeferredCheck || DeferCheck(Ty); 1409 1410 Type *NewTy = ArgTys[D.getArgumentNumber()]; 1411 if (VectorType *VTy = dyn_cast<VectorType>(NewTy)) 1412 NewTy = VectorType::getTruncatedElementVectorType(VTy); 1413 else if (IntegerType *ITy = dyn_cast<IntegerType>(NewTy)) 1414 NewTy = IntegerType::get(ITy->getContext(), ITy->getBitWidth() / 2); 1415 else 1416 return true; 1417 1418 return Ty != NewTy; 1419 } 1420 case IITDescriptor::HalfVecArgument: 1421 // If this is a forward reference, defer the check for later. 1422 if (D.getArgumentNumber() >= ArgTys.size()) 1423 return IsDeferredCheck || DeferCheck(Ty); 1424 return !isa<VectorType>(ArgTys[D.getArgumentNumber()]) || 1425 VectorType::getHalfElementsVectorType( 1426 cast<VectorType>(ArgTys[D.getArgumentNumber()])) != Ty; 1427 case IITDescriptor::SameVecWidthArgument: { 1428 if (D.getArgumentNumber() >= ArgTys.size()) { 1429 // Defer check and subsequent check for the vector element type. 1430 Infos = Infos.slice(1); 1431 return IsDeferredCheck || DeferCheck(Ty); 1432 } 1433 auto *ReferenceType = dyn_cast<VectorType>(ArgTys[D.getArgumentNumber()]); 1434 auto *ThisArgType = dyn_cast<VectorType>(Ty); 1435 // Both must be vectors of the same number of elements or neither. 1436 if ((ReferenceType != nullptr) != (ThisArgType != nullptr)) 1437 return true; 1438 Type *EltTy = Ty; 1439 if (ThisArgType) { 1440 if (ReferenceType->getElementCount() != 1441 ThisArgType->getElementCount()) 1442 return true; 1443 EltTy = ThisArgType->getElementType(); 1444 } 1445 return matchIntrinsicType(EltTy, Infos, ArgTys, DeferredChecks, 1446 IsDeferredCheck); 1447 } 1448 case IITDescriptor::PtrToArgument: { 1449 if (D.getArgumentNumber() >= ArgTys.size()) 1450 return IsDeferredCheck || DeferCheck(Ty); 1451 Type * ReferenceType = ArgTys[D.getArgumentNumber()]; 1452 PointerType *ThisArgType = dyn_cast<PointerType>(Ty); 1453 return (!ThisArgType || ThisArgType->getElementType() != ReferenceType); 1454 } 1455 case IITDescriptor::PtrToElt: { 1456 if (D.getArgumentNumber() >= ArgTys.size()) 1457 return IsDeferredCheck || DeferCheck(Ty); 1458 VectorType * ReferenceType = 1459 dyn_cast<VectorType> (ArgTys[D.getArgumentNumber()]); 1460 PointerType *ThisArgType = dyn_cast<PointerType>(Ty); 1461 1462 return (!ThisArgType || !ReferenceType || 1463 ThisArgType->getElementType() != ReferenceType->getElementType()); 1464 } 1465 case IITDescriptor::VecOfAnyPtrsToElt: { 1466 unsigned RefArgNumber = D.getRefArgNumber(); 1467 if (RefArgNumber >= ArgTys.size()) { 1468 if (IsDeferredCheck) 1469 return true; 1470 // If forward referencing, already add the pointer-vector type and 1471 // defer the checks for later. 1472 ArgTys.push_back(Ty); 1473 return DeferCheck(Ty); 1474 } 1475 1476 if (!IsDeferredCheck){ 1477 assert(D.getOverloadArgNumber() == ArgTys.size() && 1478 "Table consistency error"); 1479 ArgTys.push_back(Ty); 1480 } 1481 1482 // Verify the overloaded type "matches" the Ref type. 1483 // i.e. Ty is a vector with the same width as Ref. 1484 // Composed of pointers to the same element type as Ref. 1485 auto *ReferenceType = dyn_cast<VectorType>(ArgTys[RefArgNumber]); 1486 auto *ThisArgVecTy = dyn_cast<VectorType>(Ty); 1487 if (!ThisArgVecTy || !ReferenceType || 1488 (ReferenceType->getElementCount() != ThisArgVecTy->getElementCount())) 1489 return true; 1490 PointerType *ThisArgEltTy = 1491 dyn_cast<PointerType>(ThisArgVecTy->getElementType()); 1492 if (!ThisArgEltTy) 1493 return true; 1494 return ThisArgEltTy->getElementType() != ReferenceType->getElementType(); 1495 } 1496 case IITDescriptor::VecElementArgument: { 1497 if (D.getArgumentNumber() >= ArgTys.size()) 1498 return IsDeferredCheck ? true : DeferCheck(Ty); 1499 auto *ReferenceType = dyn_cast<VectorType>(ArgTys[D.getArgumentNumber()]); 1500 return !ReferenceType || Ty != ReferenceType->getElementType(); 1501 } 1502 case IITDescriptor::Subdivide2Argument: 1503 case IITDescriptor::Subdivide4Argument: { 1504 // If this is a forward reference, defer the check for later. 1505 if (D.getArgumentNumber() >= ArgTys.size()) 1506 return IsDeferredCheck || DeferCheck(Ty); 1507 1508 Type *NewTy = ArgTys[D.getArgumentNumber()]; 1509 if (auto *VTy = dyn_cast<VectorType>(NewTy)) { 1510 int SubDivs = D.Kind == IITDescriptor::Subdivide2Argument ? 1 : 2; 1511 NewTy = VectorType::getSubdividedVectorType(VTy, SubDivs); 1512 return Ty != NewTy; 1513 } 1514 return true; 1515 } 1516 case IITDescriptor::VecOfBitcastsToInt: { 1517 if (D.getArgumentNumber() >= ArgTys.size()) 1518 return IsDeferredCheck || DeferCheck(Ty); 1519 auto *ReferenceType = dyn_cast<VectorType>(ArgTys[D.getArgumentNumber()]); 1520 auto *ThisArgVecTy = dyn_cast<VectorType>(Ty); 1521 if (!ThisArgVecTy || !ReferenceType) 1522 return true; 1523 return ThisArgVecTy != VectorType::getInteger(ReferenceType); 1524 } 1525 } 1526 llvm_unreachable("unhandled"); 1527 } 1528 1529 Intrinsic::MatchIntrinsicTypesResult 1530 Intrinsic::matchIntrinsicSignature(FunctionType *FTy, 1531 ArrayRef<Intrinsic::IITDescriptor> &Infos, 1532 SmallVectorImpl<Type *> &ArgTys) { 1533 SmallVector<DeferredIntrinsicMatchPair, 2> DeferredChecks; 1534 if (matchIntrinsicType(FTy->getReturnType(), Infos, ArgTys, DeferredChecks, 1535 false)) 1536 return MatchIntrinsicTypes_NoMatchRet; 1537 1538 unsigned NumDeferredReturnChecks = DeferredChecks.size(); 1539 1540 for (auto Ty : FTy->params()) 1541 if (matchIntrinsicType(Ty, Infos, ArgTys, DeferredChecks, false)) 1542 return MatchIntrinsicTypes_NoMatchArg; 1543 1544 for (unsigned I = 0, E = DeferredChecks.size(); I != E; ++I) { 1545 DeferredIntrinsicMatchPair &Check = DeferredChecks[I]; 1546 if (matchIntrinsicType(Check.first, Check.second, ArgTys, DeferredChecks, 1547 true)) 1548 return I < NumDeferredReturnChecks ? MatchIntrinsicTypes_NoMatchRet 1549 : MatchIntrinsicTypes_NoMatchArg; 1550 } 1551 1552 return MatchIntrinsicTypes_Match; 1553 } 1554 1555 bool 1556 Intrinsic::matchIntrinsicVarArg(bool isVarArg, 1557 ArrayRef<Intrinsic::IITDescriptor> &Infos) { 1558 // If there are no descriptors left, then it can't be a vararg. 1559 if (Infos.empty()) 1560 return isVarArg; 1561 1562 // There should be only one descriptor remaining at this point. 1563 if (Infos.size() != 1) 1564 return true; 1565 1566 // Check and verify the descriptor. 1567 IITDescriptor D = Infos.front(); 1568 Infos = Infos.slice(1); 1569 if (D.Kind == IITDescriptor::VarArg) 1570 return !isVarArg; 1571 1572 return true; 1573 } 1574 1575 bool Intrinsic::getIntrinsicSignature(Function *F, 1576 SmallVectorImpl<Type *> &ArgTys) { 1577 Intrinsic::ID ID = F->getIntrinsicID(); 1578 if (!ID) 1579 return false; 1580 1581 SmallVector<Intrinsic::IITDescriptor, 8> Table; 1582 getIntrinsicInfoTableEntries(ID, Table); 1583 ArrayRef<Intrinsic::IITDescriptor> TableRef = Table; 1584 1585 if (Intrinsic::matchIntrinsicSignature(F->getFunctionType(), TableRef, 1586 ArgTys) != 1587 Intrinsic::MatchIntrinsicTypesResult::MatchIntrinsicTypes_Match) { 1588 return false; 1589 } 1590 if (Intrinsic::matchIntrinsicVarArg(F->getFunctionType()->isVarArg(), 1591 TableRef)) 1592 return false; 1593 return true; 1594 } 1595 1596 Optional<Function *> Intrinsic::remangleIntrinsicFunction(Function *F) { 1597 SmallVector<Type *, 4> ArgTys; 1598 if (!getIntrinsicSignature(F, ArgTys)) 1599 return None; 1600 1601 Intrinsic::ID ID = F->getIntrinsicID(); 1602 StringRef Name = F->getName(); 1603 if (Name == 1604 Intrinsic::getName(ID, ArgTys, F->getParent(), F->getFunctionType())) 1605 return None; 1606 1607 auto NewDecl = Intrinsic::getDeclaration(F->getParent(), ID, ArgTys); 1608 NewDecl->setCallingConv(F->getCallingConv()); 1609 assert(NewDecl->getFunctionType() == F->getFunctionType() && 1610 "Shouldn't change the signature"); 1611 return NewDecl; 1612 } 1613 1614 /// hasAddressTaken - returns true if there are any uses of this function 1615 /// other than direct calls or invokes to it. Optionally ignores callback 1616 /// uses, assume like pointer annotation calls, and references in llvm.used 1617 /// and llvm.compiler.used variables. 1618 bool Function::hasAddressTaken(const User **PutOffender, 1619 bool IgnoreCallbackUses, 1620 bool IgnoreAssumeLikeCalls, 1621 bool IgnoreLLVMUsed) const { 1622 for (const Use &U : uses()) { 1623 const User *FU = U.getUser(); 1624 if (isa<BlockAddress>(FU)) 1625 continue; 1626 1627 if (IgnoreCallbackUses) { 1628 AbstractCallSite ACS(&U); 1629 if (ACS && ACS.isCallbackCall()) 1630 continue; 1631 } 1632 1633 const auto *Call = dyn_cast<CallBase>(FU); 1634 if (!Call) { 1635 if (IgnoreAssumeLikeCalls) { 1636 if (const auto *FI = dyn_cast<Instruction>(FU)) { 1637 if (FI->isCast() && !FI->user_empty() && 1638 llvm::all_of(FU->users(), [](const User *U) { 1639 if (const auto *I = dyn_cast<IntrinsicInst>(U)) 1640 return I->isAssumeLikeIntrinsic(); 1641 return false; 1642 })) 1643 continue; 1644 } 1645 } 1646 if (IgnoreLLVMUsed && !FU->user_empty()) { 1647 const User *FUU = FU; 1648 if (isa<BitCastOperator>(FU) && FU->hasOneUse() && 1649 !FU->user_begin()->user_empty()) 1650 FUU = *FU->user_begin(); 1651 if (llvm::all_of(FUU->users(), [](const User *U) { 1652 if (const auto *GV = dyn_cast<GlobalVariable>(U)) 1653 return GV->hasName() && 1654 (GV->getName().equals("llvm.compiler.used") || 1655 GV->getName().equals("llvm.used")); 1656 return false; 1657 })) 1658 continue; 1659 } 1660 if (PutOffender) 1661 *PutOffender = FU; 1662 return true; 1663 } 1664 if (!Call->isCallee(&U)) { 1665 if (PutOffender) 1666 *PutOffender = FU; 1667 return true; 1668 } 1669 } 1670 return false; 1671 } 1672 1673 bool Function::isDefTriviallyDead() const { 1674 // Check the linkage 1675 if (!hasLinkOnceLinkage() && !hasLocalLinkage() && 1676 !hasAvailableExternallyLinkage()) 1677 return false; 1678 1679 // Check if the function is used by anything other than a blockaddress. 1680 for (const User *U : users()) 1681 if (!isa<BlockAddress>(U)) 1682 return false; 1683 1684 return true; 1685 } 1686 1687 /// callsFunctionThatReturnsTwice - Return true if the function has a call to 1688 /// setjmp or other function that gcc recognizes as "returning twice". 1689 bool Function::callsFunctionThatReturnsTwice() const { 1690 for (const Instruction &I : instructions(this)) 1691 if (const auto *Call = dyn_cast<CallBase>(&I)) 1692 if (Call->hasFnAttr(Attribute::ReturnsTwice)) 1693 return true; 1694 1695 return false; 1696 } 1697 1698 Constant *Function::getPersonalityFn() const { 1699 assert(hasPersonalityFn() && getNumOperands()); 1700 return cast<Constant>(Op<0>()); 1701 } 1702 1703 void Function::setPersonalityFn(Constant *Fn) { 1704 setHungoffOperand<0>(Fn); 1705 setValueSubclassDataBit(3, Fn != nullptr); 1706 } 1707 1708 Constant *Function::getPrefixData() const { 1709 assert(hasPrefixData() && getNumOperands()); 1710 return cast<Constant>(Op<1>()); 1711 } 1712 1713 void Function::setPrefixData(Constant *PrefixData) { 1714 setHungoffOperand<1>(PrefixData); 1715 setValueSubclassDataBit(1, PrefixData != nullptr); 1716 } 1717 1718 Constant *Function::getPrologueData() const { 1719 assert(hasPrologueData() && getNumOperands()); 1720 return cast<Constant>(Op<2>()); 1721 } 1722 1723 void Function::setPrologueData(Constant *PrologueData) { 1724 setHungoffOperand<2>(PrologueData); 1725 setValueSubclassDataBit(2, PrologueData != nullptr); 1726 } 1727 1728 void Function::allocHungoffUselist() { 1729 // If we've already allocated a uselist, stop here. 1730 if (getNumOperands()) 1731 return; 1732 1733 allocHungoffUses(3, /*IsPhi=*/ false); 1734 setNumHungOffUseOperands(3); 1735 1736 // Initialize the uselist with placeholder operands to allow traversal. 1737 auto *CPN = ConstantPointerNull::get(Type::getInt1PtrTy(getContext(), 0)); 1738 Op<0>().set(CPN); 1739 Op<1>().set(CPN); 1740 Op<2>().set(CPN); 1741 } 1742 1743 template <int Idx> 1744 void Function::setHungoffOperand(Constant *C) { 1745 if (C) { 1746 allocHungoffUselist(); 1747 Op<Idx>().set(C); 1748 } else if (getNumOperands()) { 1749 Op<Idx>().set( 1750 ConstantPointerNull::get(Type::getInt1PtrTy(getContext(), 0))); 1751 } 1752 } 1753 1754 void Function::setValueSubclassDataBit(unsigned Bit, bool On) { 1755 assert(Bit < 16 && "SubclassData contains only 16 bits"); 1756 if (On) 1757 setValueSubclassData(getSubclassDataFromValue() | (1 << Bit)); 1758 else 1759 setValueSubclassData(getSubclassDataFromValue() & ~(1 << Bit)); 1760 } 1761 1762 void Function::setEntryCount(ProfileCount Count, 1763 const DenseSet<GlobalValue::GUID> *S) { 1764 assert(Count.hasValue()); 1765 #if !defined(NDEBUG) 1766 auto PrevCount = getEntryCount(); 1767 assert(!PrevCount.hasValue() || PrevCount.getType() == Count.getType()); 1768 #endif 1769 1770 auto ImportGUIDs = getImportGUIDs(); 1771 if (S == nullptr && ImportGUIDs.size()) 1772 S = &ImportGUIDs; 1773 1774 MDBuilder MDB(getContext()); 1775 setMetadata( 1776 LLVMContext::MD_prof, 1777 MDB.createFunctionEntryCount(Count.getCount(), Count.isSynthetic(), S)); 1778 } 1779 1780 void Function::setEntryCount(uint64_t Count, Function::ProfileCountType Type, 1781 const DenseSet<GlobalValue::GUID> *Imports) { 1782 setEntryCount(ProfileCount(Count, Type), Imports); 1783 } 1784 1785 ProfileCount Function::getEntryCount(bool AllowSynthetic) const { 1786 MDNode *MD = getMetadata(LLVMContext::MD_prof); 1787 if (MD && MD->getOperand(0)) 1788 if (MDString *MDS = dyn_cast<MDString>(MD->getOperand(0))) { 1789 if (MDS->getString().equals("function_entry_count")) { 1790 ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(1)); 1791 uint64_t Count = CI->getValue().getZExtValue(); 1792 // A value of -1 is used for SamplePGO when there were no samples. 1793 // Treat this the same as unknown. 1794 if (Count == (uint64_t)-1) 1795 return ProfileCount::getInvalid(); 1796 return ProfileCount(Count, PCT_Real); 1797 } else if (AllowSynthetic && 1798 MDS->getString().equals("synthetic_function_entry_count")) { 1799 ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(1)); 1800 uint64_t Count = CI->getValue().getZExtValue(); 1801 return ProfileCount(Count, PCT_Synthetic); 1802 } 1803 } 1804 return ProfileCount::getInvalid(); 1805 } 1806 1807 DenseSet<GlobalValue::GUID> Function::getImportGUIDs() const { 1808 DenseSet<GlobalValue::GUID> R; 1809 if (MDNode *MD = getMetadata(LLVMContext::MD_prof)) 1810 if (MDString *MDS = dyn_cast<MDString>(MD->getOperand(0))) 1811 if (MDS->getString().equals("function_entry_count")) 1812 for (unsigned i = 2; i < MD->getNumOperands(); i++) 1813 R.insert(mdconst::extract<ConstantInt>(MD->getOperand(i)) 1814 ->getValue() 1815 .getZExtValue()); 1816 return R; 1817 } 1818 1819 void Function::setSectionPrefix(StringRef Prefix) { 1820 MDBuilder MDB(getContext()); 1821 setMetadata(LLVMContext::MD_section_prefix, 1822 MDB.createFunctionSectionPrefix(Prefix)); 1823 } 1824 1825 Optional<StringRef> Function::getSectionPrefix() const { 1826 if (MDNode *MD = getMetadata(LLVMContext::MD_section_prefix)) { 1827 assert(cast<MDString>(MD->getOperand(0)) 1828 ->getString() 1829 .equals("function_section_prefix") && 1830 "Metadata not match"); 1831 return cast<MDString>(MD->getOperand(1))->getString(); 1832 } 1833 return None; 1834 } 1835 1836 bool Function::nullPointerIsDefined() const { 1837 return hasFnAttribute(Attribute::NullPointerIsValid); 1838 } 1839 1840 bool llvm::NullPointerIsDefined(const Function *F, unsigned AS) { 1841 if (F && F->nullPointerIsDefined()) 1842 return true; 1843 1844 if (AS != 0) 1845 return true; 1846 1847 return false; 1848 } 1849