1 //===--- Program.cpp - Bytecode for the constexpr VM ------------*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 9 #include "Program.h" 10 #include "Context.h" 11 #include "Function.h" 12 #include "Integral.h" 13 #include "Opcode.h" 14 #include "PrimType.h" 15 #include "clang/AST/Decl.h" 16 #include "clang/AST/DeclCXX.h" 17 18 using namespace clang; 19 using namespace clang::interp; 20 21 unsigned Program::getOrCreateNativePointer(const void *Ptr) { 22 auto It = NativePointerIndices.find(Ptr); 23 if (It != NativePointerIndices.end()) 24 return It->second; 25 26 unsigned Idx = NativePointers.size(); 27 NativePointers.push_back(Ptr); 28 NativePointerIndices[Ptr] = Idx; 29 return Idx; 30 } 31 32 const void *Program::getNativePointer(unsigned Idx) { 33 return NativePointers[Idx]; 34 } 35 36 unsigned Program::createGlobalString(const StringLiteral *S) { 37 const size_t CharWidth = S->getCharByteWidth(); 38 const size_t BitWidth = CharWidth * Ctx.getCharBit(); 39 40 PrimType CharType; 41 switch (CharWidth) { 42 case 1: 43 CharType = PT_Sint8; 44 break; 45 case 2: 46 CharType = PT_Uint16; 47 break; 48 case 4: 49 CharType = PT_Uint32; 50 break; 51 default: 52 llvm_unreachable("unsupported character width"); 53 } 54 55 // Create a descriptor for the string. 56 Descriptor *Desc = 57 allocateDescriptor(S, CharType, Descriptor::GlobalMD, S->getLength() + 1, 58 /*isConst=*/true, 59 /*isTemporary=*/false, 60 /*isMutable=*/false); 61 62 // Allocate storage for the string. 63 // The byte length does not include the null terminator. 64 unsigned I = Globals.size(); 65 unsigned Sz = Desc->getAllocSize(); 66 auto *G = new (Allocator, Sz) Global(Ctx.getEvalID(), Desc, /*isStatic=*/true, 67 /*isExtern=*/false); 68 G->block()->invokeCtor(); 69 70 new (G->block()->rawData()) InlineDescriptor(Desc); 71 Globals.push_back(G); 72 73 // Construct the string in storage. 74 const Pointer Ptr(G->block()); 75 for (unsigned I = 0, N = S->getLength(); I <= N; ++I) { 76 Pointer Field = Ptr.atIndex(I).narrow(); 77 const uint32_t CodePoint = I == N ? 0 : S->getCodeUnit(I); 78 switch (CharType) { 79 case PT_Sint8: { 80 using T = PrimConv<PT_Sint8>::T; 81 Field.deref<T>() = T::from(CodePoint, BitWidth); 82 Field.initialize(); 83 break; 84 } 85 case PT_Uint16: { 86 using T = PrimConv<PT_Uint16>::T; 87 Field.deref<T>() = T::from(CodePoint, BitWidth); 88 Field.initialize(); 89 break; 90 } 91 case PT_Uint32: { 92 using T = PrimConv<PT_Uint32>::T; 93 Field.deref<T>() = T::from(CodePoint, BitWidth); 94 Field.initialize(); 95 break; 96 } 97 default: 98 llvm_unreachable("unsupported character type"); 99 } 100 } 101 return I; 102 } 103 104 Pointer Program::getPtrGlobal(unsigned Idx) const { 105 assert(Idx < Globals.size()); 106 return Pointer(Globals[Idx]->block()); 107 } 108 109 std::optional<unsigned> Program::getGlobal(const ValueDecl *VD) { 110 if (auto It = GlobalIndices.find(VD); It != GlobalIndices.end()) 111 return It->second; 112 113 // Find any previous declarations which were already evaluated. 114 std::optional<unsigned> Index; 115 for (const Decl *P = VD->getPreviousDecl(); P; P = P->getPreviousDecl()) { 116 if (auto It = GlobalIndices.find(P); It != GlobalIndices.end()) { 117 Index = It->second; 118 break; 119 } 120 } 121 122 // Map the decl to the existing index. 123 if (Index) 124 GlobalIndices[VD] = *Index; 125 126 return std::nullopt; 127 } 128 129 std::optional<unsigned> Program::getGlobal(const Expr *E) { 130 if (auto It = GlobalIndices.find(E); It != GlobalIndices.end()) 131 return It->second; 132 return std::nullopt; 133 } 134 135 std::optional<unsigned> Program::getOrCreateGlobal(const ValueDecl *VD, 136 const Expr *Init) { 137 if (auto Idx = getGlobal(VD)) 138 return Idx; 139 140 if (auto Idx = createGlobal(VD, Init)) { 141 GlobalIndices[VD] = *Idx; 142 return Idx; 143 } 144 return std::nullopt; 145 } 146 147 std::optional<unsigned> Program::getOrCreateDummy(const DeclTy &D) { 148 assert(D); 149 // Dedup blocks since they are immutable and pointers cannot be compared. 150 if (auto It = DummyVariables.find(D.getOpaqueValue()); 151 It != DummyVariables.end()) 152 return It->second; 153 154 QualType QT; 155 if (const auto *E = D.dyn_cast<const Expr *>()) { 156 QT = E->getType(); 157 } else { 158 const ValueDecl *VD = cast<ValueDecl>(D.get<const Decl *>()); 159 QT = VD->getType(); 160 if (const auto *RT = QT->getAs<ReferenceType>()) 161 QT = RT->getPointeeType(); 162 } 163 assert(!QT.isNull()); 164 165 Descriptor *Desc; 166 if (std::optional<PrimType> T = Ctx.classify(QT)) 167 Desc = createDescriptor(D, *T, std::nullopt, /*IsTemporary=*/true, 168 /*IsMutable=*/false); 169 else 170 Desc = createDescriptor(D, QT.getTypePtr(), std::nullopt, 171 /*IsTemporary=*/true, /*IsMutable=*/false); 172 if (!Desc) 173 Desc = allocateDescriptor(D); 174 175 assert(Desc); 176 Desc->makeDummy(); 177 178 assert(Desc->isDummy()); 179 180 // Allocate a block for storage. 181 unsigned I = Globals.size(); 182 183 auto *G = new (Allocator, Desc->getAllocSize()) 184 Global(Ctx.getEvalID(), getCurrentDecl(), Desc, /*IsStatic=*/true, 185 /*IsExtern=*/false); 186 G->block()->invokeCtor(); 187 188 Globals.push_back(G); 189 DummyVariables[D.getOpaqueValue()] = I; 190 return I; 191 } 192 193 std::optional<unsigned> Program::createGlobal(const ValueDecl *VD, 194 const Expr *Init) { 195 bool IsStatic, IsExtern; 196 if (const auto *Var = dyn_cast<VarDecl>(VD)) { 197 IsStatic = Context::shouldBeGloballyIndexed(VD); 198 IsExtern = Var->hasExternalStorage(); 199 } else if (isa<UnnamedGlobalConstantDecl, MSGuidDecl, 200 TemplateParamObjectDecl>(VD)) { 201 IsStatic = true; 202 IsExtern = false; 203 } else { 204 IsStatic = false; 205 IsExtern = true; 206 } 207 if (auto Idx = createGlobal(VD, VD->getType(), IsStatic, IsExtern, Init)) { 208 for (const Decl *P = VD; P; P = P->getPreviousDecl()) 209 GlobalIndices[P] = *Idx; 210 return *Idx; 211 } 212 return std::nullopt; 213 } 214 215 std::optional<unsigned> Program::createGlobal(const Expr *E) { 216 if (auto Idx = getGlobal(E)) 217 return Idx; 218 if (auto Idx = createGlobal(E, E->getType(), /*isStatic=*/true, 219 /*isExtern=*/false)) { 220 GlobalIndices[E] = *Idx; 221 return *Idx; 222 } 223 return std::nullopt; 224 } 225 226 std::optional<unsigned> Program::createGlobal(const DeclTy &D, QualType Ty, 227 bool IsStatic, bool IsExtern, 228 const Expr *Init) { 229 // Create a descriptor for the global. 230 Descriptor *Desc; 231 const bool IsConst = Ty.isConstQualified(); 232 const bool IsTemporary = D.dyn_cast<const Expr *>(); 233 if (std::optional<PrimType> T = Ctx.classify(Ty)) 234 Desc = createDescriptor(D, *T, Descriptor::GlobalMD, IsConst, IsTemporary); 235 else 236 Desc = createDescriptor(D, Ty.getTypePtr(), Descriptor::GlobalMD, IsConst, 237 IsTemporary); 238 239 if (!Desc) 240 return std::nullopt; 241 242 // Allocate a block for storage. 243 unsigned I = Globals.size(); 244 245 auto *G = new (Allocator, Desc->getAllocSize()) 246 Global(Ctx.getEvalID(), getCurrentDecl(), Desc, IsStatic, IsExtern); 247 G->block()->invokeCtor(); 248 249 // Initialize InlineDescriptor fields. 250 auto *GD = new (G->block()->rawData()) GlobalInlineDescriptor(); 251 if (!Init) 252 GD->InitState = GlobalInitState::NoInitializer; 253 Globals.push_back(G); 254 255 return I; 256 } 257 258 Function *Program::getFunction(const FunctionDecl *F) { 259 F = F->getCanonicalDecl(); 260 assert(F); 261 auto It = Funcs.find(F); 262 return It == Funcs.end() ? nullptr : It->second.get(); 263 } 264 265 Record *Program::getOrCreateRecord(const RecordDecl *RD) { 266 // Use the actual definition as a key. 267 RD = RD->getDefinition(); 268 if (!RD) 269 return nullptr; 270 271 if (!RD->isCompleteDefinition()) 272 return nullptr; 273 274 // Deduplicate records. 275 if (auto It = Records.find(RD); It != Records.end()) 276 return It->second; 277 278 // We insert nullptr now and replace that later, so recursive calls 279 // to this function with the same RecordDecl don't run into 280 // infinite recursion. 281 Records.insert({RD, nullptr}); 282 283 // Number of bytes required by fields and base classes. 284 unsigned BaseSize = 0; 285 // Number of bytes required by virtual base. 286 unsigned VirtSize = 0; 287 288 // Helper to get a base descriptor. 289 auto GetBaseDesc = [this](const RecordDecl *BD, 290 const Record *BR) -> const Descriptor * { 291 if (!BR) 292 return nullptr; 293 return allocateDescriptor(BD, BR, std::nullopt, /*isConst=*/false, 294 /*isTemporary=*/false, 295 /*isMutable=*/false); 296 }; 297 298 // Reserve space for base classes. 299 Record::BaseList Bases; 300 Record::VirtualBaseList VirtBases; 301 if (const auto *CD = dyn_cast<CXXRecordDecl>(RD)) { 302 for (const CXXBaseSpecifier &Spec : CD->bases()) { 303 if (Spec.isVirtual()) 304 continue; 305 306 // In error cases, the base might not be a RecordType. 307 const auto *RT = Spec.getType()->getAs<RecordType>(); 308 if (!RT) 309 return nullptr; 310 const RecordDecl *BD = RT->getDecl(); 311 const Record *BR = getOrCreateRecord(BD); 312 313 const Descriptor *Desc = GetBaseDesc(BD, BR); 314 if (!Desc) 315 return nullptr; 316 317 BaseSize += align(sizeof(InlineDescriptor)); 318 Bases.push_back({BD, BaseSize, Desc, BR}); 319 BaseSize += align(BR->getSize()); 320 } 321 322 for (const CXXBaseSpecifier &Spec : CD->vbases()) { 323 const auto *RT = Spec.getType()->getAs<RecordType>(); 324 if (!RT) 325 return nullptr; 326 327 const RecordDecl *BD = RT->getDecl(); 328 const Record *BR = getOrCreateRecord(BD); 329 330 const Descriptor *Desc = GetBaseDesc(BD, BR); 331 if (!Desc) 332 return nullptr; 333 334 VirtSize += align(sizeof(InlineDescriptor)); 335 VirtBases.push_back({BD, VirtSize, Desc, BR}); 336 VirtSize += align(BR->getSize()); 337 } 338 } 339 340 // Reserve space for fields. 341 Record::FieldList Fields; 342 for (const FieldDecl *FD : RD->fields()) { 343 FD = FD->getFirstDecl(); 344 // Note that we DO create fields and descriptors 345 // for unnamed bitfields here, even though we later ignore 346 // them everywhere. That's so the FieldDecl's getFieldIndex() matches. 347 348 // Reserve space for the field's descriptor and the offset. 349 BaseSize += align(sizeof(InlineDescriptor)); 350 351 // Classify the field and add its metadata. 352 QualType FT = FD->getType(); 353 const bool IsConst = FT.isConstQualified(); 354 const bool IsMutable = FD->isMutable(); 355 const Descriptor *Desc; 356 if (std::optional<PrimType> T = Ctx.classify(FT)) { 357 Desc = createDescriptor(FD, *T, std::nullopt, IsConst, 358 /*isTemporary=*/false, IsMutable); 359 } else { 360 Desc = createDescriptor(FD, FT.getTypePtr(), std::nullopt, IsConst, 361 /*isTemporary=*/false, IsMutable); 362 } 363 if (!Desc) 364 return nullptr; 365 Fields.push_back({FD, BaseSize, Desc}); 366 BaseSize += align(Desc->getAllocSize()); 367 } 368 369 Record *R = new (Allocator) Record(RD, std::move(Bases), std::move(Fields), 370 std::move(VirtBases), VirtSize, BaseSize); 371 Records[RD] = R; 372 return R; 373 } 374 375 Descriptor *Program::createDescriptor(const DeclTy &D, const Type *Ty, 376 Descriptor::MetadataSize MDSize, 377 bool IsConst, bool IsTemporary, 378 bool IsMutable, const Expr *Init) { 379 380 // Classes and structures. 381 if (const auto *RT = Ty->getAs<RecordType>()) { 382 if (const auto *Record = getOrCreateRecord(RT->getDecl())) 383 return allocateDescriptor(D, Record, MDSize, IsConst, IsTemporary, 384 IsMutable); 385 } 386 387 // Arrays. 388 if (const auto ArrayType = Ty->getAsArrayTypeUnsafe()) { 389 QualType ElemTy = ArrayType->getElementType(); 390 // Array of well-known bounds. 391 if (auto CAT = dyn_cast<ConstantArrayType>(ArrayType)) { 392 size_t NumElems = CAT->getZExtSize(); 393 if (std::optional<PrimType> T = Ctx.classify(ElemTy)) { 394 // Arrays of primitives. 395 unsigned ElemSize = primSize(*T); 396 if (std::numeric_limits<unsigned>::max() / ElemSize <= NumElems) { 397 return {}; 398 } 399 return allocateDescriptor(D, *T, MDSize, NumElems, IsConst, IsTemporary, 400 IsMutable); 401 } else { 402 // Arrays of composites. In this case, the array is a list of pointers, 403 // followed by the actual elements. 404 const Descriptor *ElemDesc = createDescriptor( 405 D, ElemTy.getTypePtr(), std::nullopt, IsConst, IsTemporary); 406 if (!ElemDesc) 407 return nullptr; 408 unsigned ElemSize = ElemDesc->getAllocSize() + sizeof(InlineDescriptor); 409 if (std::numeric_limits<unsigned>::max() / ElemSize <= NumElems) 410 return {}; 411 return allocateDescriptor(D, ElemDesc, MDSize, NumElems, IsConst, 412 IsTemporary, IsMutable); 413 } 414 } 415 416 // Array of unknown bounds - cannot be accessed and pointer arithmetic 417 // is forbidden on pointers to such objects. 418 if (isa<IncompleteArrayType>(ArrayType) || 419 isa<VariableArrayType>(ArrayType)) { 420 if (std::optional<PrimType> T = Ctx.classify(ElemTy)) { 421 return allocateDescriptor(D, *T, MDSize, IsTemporary, 422 Descriptor::UnknownSize{}); 423 } else { 424 const Descriptor *Desc = createDescriptor(D, ElemTy.getTypePtr(), 425 MDSize, IsConst, IsTemporary); 426 if (!Desc) 427 return nullptr; 428 return allocateDescriptor(D, Desc, MDSize, IsTemporary, 429 Descriptor::UnknownSize{}); 430 } 431 } 432 } 433 434 // Atomic types. 435 if (const auto *AT = Ty->getAs<AtomicType>()) { 436 const Type *InnerTy = AT->getValueType().getTypePtr(); 437 return createDescriptor(D, InnerTy, MDSize, IsConst, IsTemporary, 438 IsMutable); 439 } 440 441 // Complex types - represented as arrays of elements. 442 if (const auto *CT = Ty->getAs<ComplexType>()) { 443 PrimType ElemTy = *Ctx.classify(CT->getElementType()); 444 return allocateDescriptor(D, ElemTy, MDSize, 2, IsConst, IsTemporary, 445 IsMutable); 446 } 447 448 // Same with vector types. 449 if (const auto *VT = Ty->getAs<VectorType>()) { 450 PrimType ElemTy = *Ctx.classify(VT->getElementType()); 451 return allocateDescriptor(D, ElemTy, MDSize, VT->getNumElements(), IsConst, 452 IsTemporary, IsMutable); 453 } 454 455 return nullptr; 456 } 457