1 //===--- Program.cpp - Bytecode for the constexpr VM ------------*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 9 #include "Program.h" 10 #include "Context.h" 11 #include "Function.h" 12 #include "Integral.h" 13 #include "Opcode.h" 14 #include "PrimType.h" 15 #include "clang/AST/Decl.h" 16 #include "clang/AST/DeclCXX.h" 17 18 using namespace clang; 19 using namespace clang::interp; 20 21 unsigned Program::getOrCreateNativePointer(const void *Ptr) { 22 auto It = NativePointerIndices.find(Ptr); 23 if (It != NativePointerIndices.end()) 24 return It->second; 25 26 unsigned Idx = NativePointers.size(); 27 NativePointers.push_back(Ptr); 28 NativePointerIndices[Ptr] = Idx; 29 return Idx; 30 } 31 32 const void *Program::getNativePointer(unsigned Idx) { 33 return NativePointers[Idx]; 34 } 35 36 unsigned Program::createGlobalString(const StringLiteral *S) { 37 const size_t CharWidth = S->getCharByteWidth(); 38 const size_t BitWidth = CharWidth * Ctx.getCharBit(); 39 40 PrimType CharType; 41 switch (CharWidth) { 42 case 1: 43 CharType = PT_Sint8; 44 break; 45 case 2: 46 CharType = PT_Uint16; 47 break; 48 case 4: 49 CharType = PT_Uint32; 50 break; 51 default: 52 llvm_unreachable("unsupported character width"); 53 } 54 55 // Create a descriptor for the string. 56 Descriptor *Desc = 57 allocateDescriptor(S, CharType, Descriptor::GlobalMD, S->getLength() + 1, 58 /*isConst=*/true, 59 /*isTemporary=*/false, 60 /*isMutable=*/false); 61 62 // Allocate storage for the string. 63 // The byte length does not include the null terminator. 64 unsigned I = Globals.size(); 65 unsigned Sz = Desc->getAllocSize(); 66 auto *G = new (Allocator, Sz) Global(Ctx.getEvalID(), Desc, /*isStatic=*/true, 67 /*isExtern=*/false); 68 G->block()->invokeCtor(); 69 70 new (G->block()->rawData()) InlineDescriptor(Desc); 71 Globals.push_back(G); 72 73 // Construct the string in storage. 74 const Pointer Ptr(G->block()); 75 for (unsigned I = 0, N = S->getLength(); I <= N; ++I) { 76 Pointer Field = Ptr.atIndex(I).narrow(); 77 const uint32_t CodePoint = I == N ? 0 : S->getCodeUnit(I); 78 switch (CharType) { 79 case PT_Sint8: { 80 using T = PrimConv<PT_Sint8>::T; 81 Field.deref<T>() = T::from(CodePoint, BitWidth); 82 Field.initialize(); 83 break; 84 } 85 case PT_Uint16: { 86 using T = PrimConv<PT_Uint16>::T; 87 Field.deref<T>() = T::from(CodePoint, BitWidth); 88 Field.initialize(); 89 break; 90 } 91 case PT_Uint32: { 92 using T = PrimConv<PT_Uint32>::T; 93 Field.deref<T>() = T::from(CodePoint, BitWidth); 94 Field.initialize(); 95 break; 96 } 97 default: 98 llvm_unreachable("unsupported character type"); 99 } 100 } 101 return I; 102 } 103 104 Pointer Program::getPtrGlobal(unsigned Idx) const { 105 assert(Idx < Globals.size()); 106 return Pointer(Globals[Idx]->block()); 107 } 108 109 std::optional<unsigned> Program::getGlobal(const ValueDecl *VD) { 110 if (auto It = GlobalIndices.find(VD); It != GlobalIndices.end()) 111 return It->second; 112 113 // Find any previous declarations which were already evaluated. 114 std::optional<unsigned> Index; 115 for (const Decl *P = VD->getPreviousDecl(); P; P = P->getPreviousDecl()) { 116 if (auto It = GlobalIndices.find(P); It != GlobalIndices.end()) { 117 Index = It->second; 118 break; 119 } 120 } 121 122 // Map the decl to the existing index. 123 if (Index) 124 GlobalIndices[VD] = *Index; 125 126 return std::nullopt; 127 } 128 129 std::optional<unsigned> Program::getGlobal(const Expr *E) { 130 if (auto It = GlobalIndices.find(E); It != GlobalIndices.end()) 131 return It->second; 132 return std::nullopt; 133 } 134 135 std::optional<unsigned> Program::getOrCreateGlobal(const ValueDecl *VD, 136 const Expr *Init) { 137 if (auto Idx = getGlobal(VD)) 138 return Idx; 139 140 if (auto Idx = createGlobal(VD, Init)) { 141 GlobalIndices[VD] = *Idx; 142 return Idx; 143 } 144 return std::nullopt; 145 } 146 147 std::optional<unsigned> Program::getOrCreateDummy(const DeclTy &D) { 148 assert(D); 149 // Dedup blocks since they are immutable and pointers cannot be compared. 150 if (auto It = DummyVariables.find(D.getOpaqueValue()); 151 It != DummyVariables.end()) 152 return It->second; 153 154 QualType QT; 155 bool IsWeak = false; 156 if (const auto *E = D.dyn_cast<const Expr *>()) { 157 QT = E->getType(); 158 } else { 159 const ValueDecl *VD = cast<ValueDecl>(D.get<const Decl *>()); 160 IsWeak = VD->isWeak(); 161 QT = VD->getType(); 162 if (const auto *RT = QT->getAs<ReferenceType>()) 163 QT = RT->getPointeeType(); 164 } 165 assert(!QT.isNull()); 166 167 Descriptor *Desc; 168 if (std::optional<PrimType> T = Ctx.classify(QT)) 169 Desc = createDescriptor(D, *T, std::nullopt, /*IsTemporary=*/true, 170 /*IsMutable=*/false); 171 else 172 Desc = createDescriptor(D, QT.getTypePtr(), std::nullopt, 173 /*IsTemporary=*/true, /*IsMutable=*/false); 174 if (!Desc) 175 Desc = allocateDescriptor(D); 176 177 assert(Desc); 178 Desc->makeDummy(); 179 180 assert(Desc->isDummy()); 181 182 // Allocate a block for storage. 183 unsigned I = Globals.size(); 184 185 auto *G = new (Allocator, Desc->getAllocSize()) 186 Global(Ctx.getEvalID(), getCurrentDecl(), Desc, /*IsStatic=*/true, 187 /*IsExtern=*/false, IsWeak); 188 G->block()->invokeCtor(); 189 190 Globals.push_back(G); 191 DummyVariables[D.getOpaqueValue()] = I; 192 return I; 193 } 194 195 std::optional<unsigned> Program::createGlobal(const ValueDecl *VD, 196 const Expr *Init) { 197 bool IsStatic, IsExtern; 198 bool IsWeak = VD->isWeak(); 199 if (const auto *Var = dyn_cast<VarDecl>(VD)) { 200 IsStatic = Context::shouldBeGloballyIndexed(VD); 201 IsExtern = Var->hasExternalStorage(); 202 } else if (isa<UnnamedGlobalConstantDecl, MSGuidDecl, 203 TemplateParamObjectDecl>(VD)) { 204 IsStatic = true; 205 IsExtern = false; 206 } else { 207 IsStatic = false; 208 IsExtern = true; 209 } 210 211 // Register all previous declarations as well. For extern blocks, just replace 212 // the index with the new variable. 213 if (auto Idx = 214 createGlobal(VD, VD->getType(), IsStatic, IsExtern, IsWeak, Init)) { 215 for (const Decl *P = VD; P; P = P->getPreviousDecl()) { 216 if (P != VD) { 217 unsigned PIdx = GlobalIndices[P]; 218 if (Globals[PIdx]->block()->isExtern()) 219 Globals[PIdx] = Globals[*Idx]; 220 } 221 GlobalIndices[P] = *Idx; 222 } 223 return *Idx; 224 } 225 return std::nullopt; 226 } 227 228 std::optional<unsigned> Program::createGlobal(const Expr *E) { 229 if (auto Idx = getGlobal(E)) 230 return Idx; 231 if (auto Idx = createGlobal(E, E->getType(), /*isStatic=*/true, 232 /*isExtern=*/false, /*IsWeak=*/false)) { 233 GlobalIndices[E] = *Idx; 234 return *Idx; 235 } 236 return std::nullopt; 237 } 238 239 std::optional<unsigned> Program::createGlobal(const DeclTy &D, QualType Ty, 240 bool IsStatic, bool IsExtern, 241 bool IsWeak, const Expr *Init) { 242 // Create a descriptor for the global. 243 Descriptor *Desc; 244 const bool IsConst = Ty.isConstQualified(); 245 const bool IsTemporary = D.dyn_cast<const Expr *>(); 246 if (std::optional<PrimType> T = Ctx.classify(Ty)) 247 Desc = createDescriptor(D, *T, Descriptor::GlobalMD, IsConst, IsTemporary); 248 else 249 Desc = createDescriptor(D, Ty.getTypePtr(), Descriptor::GlobalMD, IsConst, 250 IsTemporary); 251 252 if (!Desc) 253 return std::nullopt; 254 255 // Allocate a block for storage. 256 unsigned I = Globals.size(); 257 258 auto *G = new (Allocator, Desc->getAllocSize()) Global( 259 Ctx.getEvalID(), getCurrentDecl(), Desc, IsStatic, IsExtern, IsWeak); 260 G->block()->invokeCtor(); 261 262 // Initialize InlineDescriptor fields. 263 auto *GD = new (G->block()->rawData()) GlobalInlineDescriptor(); 264 if (!Init) 265 GD->InitState = GlobalInitState::NoInitializer; 266 Globals.push_back(G); 267 268 return I; 269 } 270 271 Function *Program::getFunction(const FunctionDecl *F) { 272 F = F->getCanonicalDecl(); 273 assert(F); 274 auto It = Funcs.find(F); 275 return It == Funcs.end() ? nullptr : It->second.get(); 276 } 277 278 Record *Program::getOrCreateRecord(const RecordDecl *RD) { 279 // Use the actual definition as a key. 280 RD = RD->getDefinition(); 281 if (!RD) 282 return nullptr; 283 284 if (!RD->isCompleteDefinition()) 285 return nullptr; 286 287 // Deduplicate records. 288 if (auto It = Records.find(RD); It != Records.end()) 289 return It->second; 290 291 // We insert nullptr now and replace that later, so recursive calls 292 // to this function with the same RecordDecl don't run into 293 // infinite recursion. 294 Records.insert({RD, nullptr}); 295 296 // Number of bytes required by fields and base classes. 297 unsigned BaseSize = 0; 298 // Number of bytes required by virtual base. 299 unsigned VirtSize = 0; 300 301 // Helper to get a base descriptor. 302 auto GetBaseDesc = [this](const RecordDecl *BD, 303 const Record *BR) -> const Descriptor * { 304 if (!BR) 305 return nullptr; 306 return allocateDescriptor(BD, BR, std::nullopt, /*isConst=*/false, 307 /*isTemporary=*/false, 308 /*isMutable=*/false); 309 }; 310 311 // Reserve space for base classes. 312 Record::BaseList Bases; 313 Record::VirtualBaseList VirtBases; 314 if (const auto *CD = dyn_cast<CXXRecordDecl>(RD)) { 315 for (const CXXBaseSpecifier &Spec : CD->bases()) { 316 if (Spec.isVirtual()) 317 continue; 318 319 // In error cases, the base might not be a RecordType. 320 const auto *RT = Spec.getType()->getAs<RecordType>(); 321 if (!RT) 322 return nullptr; 323 const RecordDecl *BD = RT->getDecl(); 324 const Record *BR = getOrCreateRecord(BD); 325 326 const Descriptor *Desc = GetBaseDesc(BD, BR); 327 if (!Desc) 328 return nullptr; 329 330 BaseSize += align(sizeof(InlineDescriptor)); 331 Bases.push_back({BD, BaseSize, Desc, BR}); 332 BaseSize += align(BR->getSize()); 333 } 334 335 for (const CXXBaseSpecifier &Spec : CD->vbases()) { 336 const auto *RT = Spec.getType()->getAs<RecordType>(); 337 if (!RT) 338 return nullptr; 339 340 const RecordDecl *BD = RT->getDecl(); 341 const Record *BR = getOrCreateRecord(BD); 342 343 const Descriptor *Desc = GetBaseDesc(BD, BR); 344 if (!Desc) 345 return nullptr; 346 347 VirtSize += align(sizeof(InlineDescriptor)); 348 VirtBases.push_back({BD, VirtSize, Desc, BR}); 349 VirtSize += align(BR->getSize()); 350 } 351 } 352 353 // Reserve space for fields. 354 Record::FieldList Fields; 355 for (const FieldDecl *FD : RD->fields()) { 356 FD = FD->getFirstDecl(); 357 // Note that we DO create fields and descriptors 358 // for unnamed bitfields here, even though we later ignore 359 // them everywhere. That's so the FieldDecl's getFieldIndex() matches. 360 361 // Reserve space for the field's descriptor and the offset. 362 BaseSize += align(sizeof(InlineDescriptor)); 363 364 // Classify the field and add its metadata. 365 QualType FT = FD->getType(); 366 const bool IsConst = FT.isConstQualified(); 367 const bool IsMutable = FD->isMutable(); 368 const Descriptor *Desc; 369 if (std::optional<PrimType> T = Ctx.classify(FT)) { 370 Desc = createDescriptor(FD, *T, std::nullopt, IsConst, 371 /*isTemporary=*/false, IsMutable); 372 } else { 373 Desc = createDescriptor(FD, FT.getTypePtr(), std::nullopt, IsConst, 374 /*isTemporary=*/false, IsMutable); 375 } 376 if (!Desc) 377 return nullptr; 378 Fields.push_back({FD, BaseSize, Desc}); 379 BaseSize += align(Desc->getAllocSize()); 380 } 381 382 Record *R = new (Allocator) Record(RD, std::move(Bases), std::move(Fields), 383 std::move(VirtBases), VirtSize, BaseSize); 384 Records[RD] = R; 385 return R; 386 } 387 388 Descriptor *Program::createDescriptor(const DeclTy &D, const Type *Ty, 389 Descriptor::MetadataSize MDSize, 390 bool IsConst, bool IsTemporary, 391 bool IsMutable, const Expr *Init) { 392 393 // Classes and structures. 394 if (const auto *RT = Ty->getAs<RecordType>()) { 395 if (const auto *Record = getOrCreateRecord(RT->getDecl())) 396 return allocateDescriptor(D, Record, MDSize, IsConst, IsTemporary, 397 IsMutable); 398 } 399 400 // Arrays. 401 if (const auto ArrayType = Ty->getAsArrayTypeUnsafe()) { 402 QualType ElemTy = ArrayType->getElementType(); 403 // Array of well-known bounds. 404 if (auto CAT = dyn_cast<ConstantArrayType>(ArrayType)) { 405 size_t NumElems = CAT->getZExtSize(); 406 if (std::optional<PrimType> T = Ctx.classify(ElemTy)) { 407 // Arrays of primitives. 408 unsigned ElemSize = primSize(*T); 409 if (std::numeric_limits<unsigned>::max() / ElemSize <= NumElems) { 410 return {}; 411 } 412 return allocateDescriptor(D, *T, MDSize, NumElems, IsConst, IsTemporary, 413 IsMutable); 414 } else { 415 // Arrays of composites. In this case, the array is a list of pointers, 416 // followed by the actual elements. 417 const Descriptor *ElemDesc = createDescriptor( 418 D, ElemTy.getTypePtr(), std::nullopt, IsConst, IsTemporary); 419 if (!ElemDesc) 420 return nullptr; 421 unsigned ElemSize = ElemDesc->getAllocSize() + sizeof(InlineDescriptor); 422 if (std::numeric_limits<unsigned>::max() / ElemSize <= NumElems) 423 return {}; 424 return allocateDescriptor(D, ElemDesc, MDSize, NumElems, IsConst, 425 IsTemporary, IsMutable); 426 } 427 } 428 429 // Array of unknown bounds - cannot be accessed and pointer arithmetic 430 // is forbidden on pointers to such objects. 431 if (isa<IncompleteArrayType>(ArrayType) || 432 isa<VariableArrayType>(ArrayType)) { 433 if (std::optional<PrimType> T = Ctx.classify(ElemTy)) { 434 return allocateDescriptor(D, *T, MDSize, IsTemporary, 435 Descriptor::UnknownSize{}); 436 } else { 437 const Descriptor *Desc = createDescriptor(D, ElemTy.getTypePtr(), 438 MDSize, IsConst, IsTemporary); 439 if (!Desc) 440 return nullptr; 441 return allocateDescriptor(D, Desc, MDSize, IsTemporary, 442 Descriptor::UnknownSize{}); 443 } 444 } 445 } 446 447 // Atomic types. 448 if (const auto *AT = Ty->getAs<AtomicType>()) { 449 const Type *InnerTy = AT->getValueType().getTypePtr(); 450 return createDescriptor(D, InnerTy, MDSize, IsConst, IsTemporary, 451 IsMutable); 452 } 453 454 // Complex types - represented as arrays of elements. 455 if (const auto *CT = Ty->getAs<ComplexType>()) { 456 PrimType ElemTy = *Ctx.classify(CT->getElementType()); 457 return allocateDescriptor(D, ElemTy, MDSize, 2, IsConst, IsTemporary, 458 IsMutable); 459 } 460 461 // Same with vector types. 462 if (const auto *VT = Ty->getAs<VectorType>()) { 463 PrimType ElemTy = *Ctx.classify(VT->getElementType()); 464 return allocateDescriptor(D, ElemTy, MDSize, VT->getNumElements(), IsConst, 465 IsTemporary, IsMutable); 466 } 467 468 return nullptr; 469 } 470