1 //===--- Program.cpp - Bytecode for the constexpr VM ------------*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 9 #include "Program.h" 10 #include "Context.h" 11 #include "Function.h" 12 #include "Integral.h" 13 #include "Opcode.h" 14 #include "PrimType.h" 15 #include "clang/AST/Decl.h" 16 #include "clang/AST/DeclCXX.h" 17 18 using namespace clang; 19 using namespace clang::interp; 20 21 unsigned Program::getOrCreateNativePointer(const void *Ptr) { 22 auto It = NativePointerIndices.find(Ptr); 23 if (It != NativePointerIndices.end()) 24 return It->second; 25 26 unsigned Idx = NativePointers.size(); 27 NativePointers.push_back(Ptr); 28 NativePointerIndices[Ptr] = Idx; 29 return Idx; 30 } 31 32 const void *Program::getNativePointer(unsigned Idx) { 33 return NativePointers[Idx]; 34 } 35 36 unsigned Program::createGlobalString(const StringLiteral *S) { 37 const size_t CharWidth = S->getCharByteWidth(); 38 const size_t BitWidth = CharWidth * Ctx.getCharBit(); 39 40 PrimType CharType; 41 switch (CharWidth) { 42 case 1: 43 CharType = PT_Sint8; 44 break; 45 case 2: 46 CharType = PT_Uint16; 47 break; 48 case 4: 49 CharType = PT_Uint32; 50 break; 51 default: 52 llvm_unreachable("unsupported character width"); 53 } 54 55 // Create a descriptor for the string. 56 Descriptor *Desc = 57 allocateDescriptor(S, CharType, Descriptor::GlobalMD, S->getLength() + 1, 58 /*isConst=*/true, 59 /*isTemporary=*/false, 60 /*isMutable=*/false); 61 62 // Allocate storage for the string. 63 // The byte length does not include the null terminator. 64 unsigned I = Globals.size(); 65 unsigned Sz = Desc->getAllocSize(); 66 auto *G = new (Allocator, Sz) Global(Ctx.getEvalID(), Desc, /*isStatic=*/true, 67 /*isExtern=*/false); 68 G->block()->invokeCtor(); 69 70 new (G->block()->rawData()) InlineDescriptor(Desc); 71 Globals.push_back(G); 72 73 // Construct the string in storage. 74 const Pointer Ptr(G->block()); 75 for (unsigned I = 0, N = S->getLength(); I <= N; ++I) { 76 Pointer Field = Ptr.atIndex(I).narrow(); 77 const uint32_t CodePoint = I == N ? 0 : S->getCodeUnit(I); 78 switch (CharType) { 79 case PT_Sint8: { 80 using T = PrimConv<PT_Sint8>::T; 81 Field.deref<T>() = T::from(CodePoint, BitWidth); 82 Field.initialize(); 83 break; 84 } 85 case PT_Uint16: { 86 using T = PrimConv<PT_Uint16>::T; 87 Field.deref<T>() = T::from(CodePoint, BitWidth); 88 Field.initialize(); 89 break; 90 } 91 case PT_Uint32: { 92 using T = PrimConv<PT_Uint32>::T; 93 Field.deref<T>() = T::from(CodePoint, BitWidth); 94 Field.initialize(); 95 break; 96 } 97 default: 98 llvm_unreachable("unsupported character type"); 99 } 100 } 101 return I; 102 } 103 104 Pointer Program::getPtrGlobal(unsigned Idx) const { 105 assert(Idx < Globals.size()); 106 return Pointer(Globals[Idx]->block()); 107 } 108 109 std::optional<unsigned> Program::getGlobal(const ValueDecl *VD) { 110 if (auto It = GlobalIndices.find(VD); It != GlobalIndices.end()) 111 return It->second; 112 113 // Find any previous declarations which were already evaluated. 114 std::optional<unsigned> Index; 115 for (const Decl *P = VD->getPreviousDecl(); P; P = P->getPreviousDecl()) { 116 if (auto It = GlobalIndices.find(P); It != GlobalIndices.end()) { 117 Index = It->second; 118 break; 119 } 120 } 121 122 // Map the decl to the existing index. 123 if (Index) 124 GlobalIndices[VD] = *Index; 125 126 return std::nullopt; 127 } 128 129 std::optional<unsigned> Program::getGlobal(const Expr *E) { 130 if (auto It = GlobalIndices.find(E); It != GlobalIndices.end()) 131 return It->second; 132 return std::nullopt; 133 } 134 135 std::optional<unsigned> Program::getOrCreateGlobal(const ValueDecl *VD, 136 const Expr *Init) { 137 if (auto Idx = getGlobal(VD)) 138 return Idx; 139 140 if (auto Idx = createGlobal(VD, Init)) { 141 GlobalIndices[VD] = *Idx; 142 return Idx; 143 } 144 return std::nullopt; 145 } 146 147 std::optional<unsigned> Program::getOrCreateDummy(const DeclTy &D) { 148 assert(D); 149 // Dedup blocks since they are immutable and pointers cannot be compared. 150 if (auto It = DummyVariables.find(D.getOpaqueValue()); 151 It != DummyVariables.end()) 152 return It->second; 153 154 QualType QT; 155 bool IsWeak = false; 156 if (const auto *E = D.dyn_cast<const Expr *>()) { 157 QT = E->getType(); 158 } else { 159 const ValueDecl *VD = cast<ValueDecl>(D.get<const Decl *>()); 160 IsWeak = VD->isWeak(); 161 QT = VD->getType(); 162 if (const auto *RT = QT->getAs<ReferenceType>()) 163 QT = RT->getPointeeType(); 164 } 165 assert(!QT.isNull()); 166 167 Descriptor *Desc; 168 if (std::optional<PrimType> T = Ctx.classify(QT)) 169 Desc = createDescriptor(D, *T, std::nullopt, /*IsTemporary=*/true, 170 /*IsMutable=*/false); 171 else 172 Desc = createDescriptor(D, QT.getTypePtr(), std::nullopt, 173 /*IsTemporary=*/true, /*IsMutable=*/false); 174 if (!Desc) 175 Desc = allocateDescriptor(D); 176 177 assert(Desc); 178 Desc->makeDummy(); 179 180 assert(Desc->isDummy()); 181 182 // Allocate a block for storage. 183 unsigned I = Globals.size(); 184 185 auto *G = new (Allocator, Desc->getAllocSize()) 186 Global(Ctx.getEvalID(), getCurrentDecl(), Desc, /*IsStatic=*/true, 187 /*IsExtern=*/false, IsWeak); 188 G->block()->invokeCtor(); 189 190 Globals.push_back(G); 191 DummyVariables[D.getOpaqueValue()] = I; 192 return I; 193 } 194 195 std::optional<unsigned> Program::createGlobal(const ValueDecl *VD, 196 const Expr *Init) { 197 bool IsStatic, IsExtern; 198 bool IsWeak = VD->isWeak(); 199 if (const auto *Var = dyn_cast<VarDecl>(VD)) { 200 IsStatic = Context::shouldBeGloballyIndexed(VD); 201 IsExtern = Var->hasExternalStorage(); 202 } else if (isa<UnnamedGlobalConstantDecl, MSGuidDecl, 203 TemplateParamObjectDecl>(VD)) { 204 IsStatic = true; 205 IsExtern = false; 206 } else { 207 IsStatic = false; 208 IsExtern = true; 209 } 210 211 // Register all previous declarations as well. For extern blocks, just replace 212 // the index with the new variable. 213 if (auto Idx = 214 createGlobal(VD, VD->getType(), IsStatic, IsExtern, IsWeak, Init)) { 215 for (const Decl *P = VD; P; P = P->getPreviousDecl()) { 216 if (P != VD) { 217 unsigned PIdx = GlobalIndices[P]; 218 if (Globals[PIdx]->block()->isExtern()) 219 Globals[PIdx] = Globals[*Idx]; 220 } 221 GlobalIndices[P] = *Idx; 222 } 223 return *Idx; 224 } 225 return std::nullopt; 226 } 227 228 std::optional<unsigned> Program::createGlobal(const Expr *E) { 229 if (auto Idx = getGlobal(E)) 230 return Idx; 231 if (auto Idx = createGlobal(E, E->getType(), /*isStatic=*/true, 232 /*isExtern=*/false, /*IsWeak=*/false)) { 233 GlobalIndices[E] = *Idx; 234 return *Idx; 235 } 236 return std::nullopt; 237 } 238 239 std::optional<unsigned> Program::createGlobal(const DeclTy &D, QualType Ty, 240 bool IsStatic, bool IsExtern, 241 bool IsWeak, const Expr *Init) { 242 // Create a descriptor for the global. 243 Descriptor *Desc; 244 const bool IsConst = Ty.isConstQualified(); 245 const bool IsTemporary = D.dyn_cast<const Expr *>(); 246 if (std::optional<PrimType> T = Ctx.classify(Ty)) 247 Desc = createDescriptor(D, *T, Descriptor::GlobalMD, IsConst, IsTemporary); 248 else 249 Desc = createDescriptor(D, Ty.getTypePtr(), Descriptor::GlobalMD, IsConst, 250 IsTemporary); 251 252 if (!Desc) 253 return std::nullopt; 254 255 // Allocate a block for storage. 256 unsigned I = Globals.size(); 257 258 auto *G = new (Allocator, Desc->getAllocSize()) Global( 259 Ctx.getEvalID(), getCurrentDecl(), Desc, IsStatic, IsExtern, IsWeak); 260 G->block()->invokeCtor(); 261 262 // Initialize InlineDescriptor fields. 263 auto *GD = new (G->block()->rawData()) GlobalInlineDescriptor(); 264 if (!Init) 265 GD->InitState = GlobalInitState::NoInitializer; 266 Globals.push_back(G); 267 268 return I; 269 } 270 271 Function *Program::getFunction(const FunctionDecl *F) { 272 F = F->getCanonicalDecl(); 273 assert(F); 274 auto It = Funcs.find(F); 275 return It == Funcs.end() ? nullptr : It->second.get(); 276 } 277 278 Record *Program::getOrCreateRecord(const RecordDecl *RD) { 279 // Use the actual definition as a key. 280 RD = RD->getDefinition(); 281 if (!RD) 282 return nullptr; 283 284 if (!RD->isCompleteDefinition()) 285 return nullptr; 286 287 // Return an existing record if available. Otherwise, we insert nullptr now 288 // and replace that later, so recursive calls to this function with the same 289 // RecordDecl don't run into infinite recursion. 290 auto [It, Inserted] = Records.try_emplace(RD); 291 if (!Inserted) 292 return It->second; 293 294 // Number of bytes required by fields and base classes. 295 unsigned BaseSize = 0; 296 // Number of bytes required by virtual base. 297 unsigned VirtSize = 0; 298 299 // Helper to get a base descriptor. 300 auto GetBaseDesc = [this](const RecordDecl *BD, 301 const Record *BR) -> const Descriptor * { 302 if (!BR) 303 return nullptr; 304 return allocateDescriptor(BD, BR, std::nullopt, /*isConst=*/false, 305 /*isTemporary=*/false, 306 /*isMutable=*/false); 307 }; 308 309 // Reserve space for base classes. 310 Record::BaseList Bases; 311 Record::VirtualBaseList VirtBases; 312 if (const auto *CD = dyn_cast<CXXRecordDecl>(RD)) { 313 for (const CXXBaseSpecifier &Spec : CD->bases()) { 314 if (Spec.isVirtual()) 315 continue; 316 317 // In error cases, the base might not be a RecordType. 318 const auto *RT = Spec.getType()->getAs<RecordType>(); 319 if (!RT) 320 return nullptr; 321 const RecordDecl *BD = RT->getDecl(); 322 const Record *BR = getOrCreateRecord(BD); 323 324 const Descriptor *Desc = GetBaseDesc(BD, BR); 325 if (!Desc) 326 return nullptr; 327 328 BaseSize += align(sizeof(InlineDescriptor)); 329 Bases.push_back({BD, BaseSize, Desc, BR}); 330 BaseSize += align(BR->getSize()); 331 } 332 333 for (const CXXBaseSpecifier &Spec : CD->vbases()) { 334 const auto *RT = Spec.getType()->getAs<RecordType>(); 335 if (!RT) 336 return nullptr; 337 338 const RecordDecl *BD = RT->getDecl(); 339 const Record *BR = getOrCreateRecord(BD); 340 341 const Descriptor *Desc = GetBaseDesc(BD, BR); 342 if (!Desc) 343 return nullptr; 344 345 VirtSize += align(sizeof(InlineDescriptor)); 346 VirtBases.push_back({BD, VirtSize, Desc, BR}); 347 VirtSize += align(BR->getSize()); 348 } 349 } 350 351 // Reserve space for fields. 352 Record::FieldList Fields; 353 for (const FieldDecl *FD : RD->fields()) { 354 FD = FD->getFirstDecl(); 355 // Note that we DO create fields and descriptors 356 // for unnamed bitfields here, even though we later ignore 357 // them everywhere. That's so the FieldDecl's getFieldIndex() matches. 358 359 // Reserve space for the field's descriptor and the offset. 360 BaseSize += align(sizeof(InlineDescriptor)); 361 362 // Classify the field and add its metadata. 363 QualType FT = FD->getType(); 364 const bool IsConst = FT.isConstQualified(); 365 const bool IsMutable = FD->isMutable(); 366 const Descriptor *Desc; 367 if (std::optional<PrimType> T = Ctx.classify(FT)) { 368 Desc = createDescriptor(FD, *T, std::nullopt, IsConst, 369 /*isTemporary=*/false, IsMutable); 370 } else { 371 Desc = createDescriptor(FD, FT.getTypePtr(), std::nullopt, IsConst, 372 /*isTemporary=*/false, IsMutable); 373 } 374 if (!Desc) 375 return nullptr; 376 Fields.push_back({FD, BaseSize, Desc}); 377 BaseSize += align(Desc->getAllocSize()); 378 } 379 380 Record *R = new (Allocator) Record(RD, std::move(Bases), std::move(Fields), 381 std::move(VirtBases), VirtSize, BaseSize); 382 Records[RD] = R; 383 return R; 384 } 385 386 Descriptor *Program::createDescriptor(const DeclTy &D, const Type *Ty, 387 Descriptor::MetadataSize MDSize, 388 bool IsConst, bool IsTemporary, 389 bool IsMutable, const Expr *Init) { 390 391 // Classes and structures. 392 if (const auto *RT = Ty->getAs<RecordType>()) { 393 if (const auto *Record = getOrCreateRecord(RT->getDecl())) 394 return allocateDescriptor(D, Record, MDSize, IsConst, IsTemporary, 395 IsMutable); 396 } 397 398 // Arrays. 399 if (const auto ArrayType = Ty->getAsArrayTypeUnsafe()) { 400 QualType ElemTy = ArrayType->getElementType(); 401 // Array of well-known bounds. 402 if (auto CAT = dyn_cast<ConstantArrayType>(ArrayType)) { 403 size_t NumElems = CAT->getZExtSize(); 404 if (std::optional<PrimType> T = Ctx.classify(ElemTy)) { 405 // Arrays of primitives. 406 unsigned ElemSize = primSize(*T); 407 if (std::numeric_limits<unsigned>::max() / ElemSize <= NumElems) { 408 return {}; 409 } 410 return allocateDescriptor(D, *T, MDSize, NumElems, IsConst, IsTemporary, 411 IsMutable); 412 } else { 413 // Arrays of composites. In this case, the array is a list of pointers, 414 // followed by the actual elements. 415 const Descriptor *ElemDesc = createDescriptor( 416 D, ElemTy.getTypePtr(), std::nullopt, IsConst, IsTemporary); 417 if (!ElemDesc) 418 return nullptr; 419 unsigned ElemSize = ElemDesc->getAllocSize() + sizeof(InlineDescriptor); 420 if (std::numeric_limits<unsigned>::max() / ElemSize <= NumElems) 421 return {}; 422 return allocateDescriptor(D, ElemDesc, MDSize, NumElems, IsConst, 423 IsTemporary, IsMutable); 424 } 425 } 426 427 // Array of unknown bounds - cannot be accessed and pointer arithmetic 428 // is forbidden on pointers to such objects. 429 if (isa<IncompleteArrayType>(ArrayType) || 430 isa<VariableArrayType>(ArrayType)) { 431 if (std::optional<PrimType> T = Ctx.classify(ElemTy)) { 432 return allocateDescriptor(D, *T, MDSize, IsTemporary, 433 Descriptor::UnknownSize{}); 434 } else { 435 const Descriptor *Desc = createDescriptor(D, ElemTy.getTypePtr(), 436 MDSize, IsConst, IsTemporary); 437 if (!Desc) 438 return nullptr; 439 return allocateDescriptor(D, Desc, MDSize, IsTemporary, 440 Descriptor::UnknownSize{}); 441 } 442 } 443 } 444 445 // Atomic types. 446 if (const auto *AT = Ty->getAs<AtomicType>()) { 447 const Type *InnerTy = AT->getValueType().getTypePtr(); 448 return createDescriptor(D, InnerTy, MDSize, IsConst, IsTemporary, 449 IsMutable); 450 } 451 452 // Complex types - represented as arrays of elements. 453 if (const auto *CT = Ty->getAs<ComplexType>()) { 454 PrimType ElemTy = *Ctx.classify(CT->getElementType()); 455 return allocateDescriptor(D, ElemTy, MDSize, 2, IsConst, IsTemporary, 456 IsMutable); 457 } 458 459 // Same with vector types. 460 if (const auto *VT = Ty->getAs<VectorType>()) { 461 PrimType ElemTy = *Ctx.classify(VT->getElementType()); 462 return allocateDescriptor(D, ElemTy, MDSize, VT->getNumElements(), IsConst, 463 IsTemporary, IsMutable); 464 } 465 466 return nullptr; 467 } 468