xref: /llvm-project/clang/lib/AST/ByteCode/Program.cpp (revision 8e2dbab24276a8521d241463b4161c78bc4d39d2)
1 //===--- Program.cpp - Bytecode for the constexpr VM ------------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
9 #include "Program.h"
10 #include "Context.h"
11 #include "Function.h"
12 #include "Integral.h"
13 #include "Opcode.h"
14 #include "PrimType.h"
15 #include "clang/AST/Decl.h"
16 #include "clang/AST/DeclCXX.h"
17 
18 using namespace clang;
19 using namespace clang::interp;
20 
21 unsigned Program::getOrCreateNativePointer(const void *Ptr) {
22   auto It = NativePointerIndices.find(Ptr);
23   if (It != NativePointerIndices.end())
24     return It->second;
25 
26   unsigned Idx = NativePointers.size();
27   NativePointers.push_back(Ptr);
28   NativePointerIndices[Ptr] = Idx;
29   return Idx;
30 }
31 
32 const void *Program::getNativePointer(unsigned Idx) {
33   return NativePointers[Idx];
34 }
35 
36 unsigned Program::createGlobalString(const StringLiteral *S) {
37   const size_t CharWidth = S->getCharByteWidth();
38   const size_t BitWidth = CharWidth * Ctx.getCharBit();
39 
40   PrimType CharType;
41   switch (CharWidth) {
42   case 1:
43     CharType = PT_Sint8;
44     break;
45   case 2:
46     CharType = PT_Uint16;
47     break;
48   case 4:
49     CharType = PT_Uint32;
50     break;
51   default:
52     llvm_unreachable("unsupported character width");
53   }
54 
55   // Create a descriptor for the string.
56   Descriptor *Desc =
57       allocateDescriptor(S, CharType, Descriptor::GlobalMD, S->getLength() + 1,
58                          /*isConst=*/true,
59                          /*isTemporary=*/false,
60                          /*isMutable=*/false);
61 
62   // Allocate storage for the string.
63   // The byte length does not include the null terminator.
64   unsigned I = Globals.size();
65   unsigned Sz = Desc->getAllocSize();
66   auto *G = new (Allocator, Sz) Global(Ctx.getEvalID(), Desc, /*isStatic=*/true,
67                                        /*isExtern=*/false);
68   G->block()->invokeCtor();
69 
70   new (G->block()->rawData()) InlineDescriptor(Desc);
71   Globals.push_back(G);
72 
73   // Construct the string in storage.
74   const Pointer Ptr(G->block());
75   for (unsigned I = 0, N = S->getLength(); I <= N; ++I) {
76     Pointer Field = Ptr.atIndex(I).narrow();
77     const uint32_t CodePoint = I == N ? 0 : S->getCodeUnit(I);
78     switch (CharType) {
79     case PT_Sint8: {
80       using T = PrimConv<PT_Sint8>::T;
81       Field.deref<T>() = T::from(CodePoint, BitWidth);
82       Field.initialize();
83       break;
84     }
85     case PT_Uint16: {
86       using T = PrimConv<PT_Uint16>::T;
87       Field.deref<T>() = T::from(CodePoint, BitWidth);
88       Field.initialize();
89       break;
90     }
91     case PT_Uint32: {
92       using T = PrimConv<PT_Uint32>::T;
93       Field.deref<T>() = T::from(CodePoint, BitWidth);
94       Field.initialize();
95       break;
96     }
97     default:
98       llvm_unreachable("unsupported character type");
99     }
100   }
101   return I;
102 }
103 
104 Pointer Program::getPtrGlobal(unsigned Idx) const {
105   assert(Idx < Globals.size());
106   return Pointer(Globals[Idx]->block());
107 }
108 
109 std::optional<unsigned> Program::getGlobal(const ValueDecl *VD) {
110   if (auto It = GlobalIndices.find(VD); It != GlobalIndices.end())
111     return It->second;
112 
113   // Find any previous declarations which were already evaluated.
114   std::optional<unsigned> Index;
115   for (const Decl *P = VD->getPreviousDecl(); P; P = P->getPreviousDecl()) {
116     if (auto It = GlobalIndices.find(P); It != GlobalIndices.end()) {
117       Index = It->second;
118       break;
119     }
120   }
121 
122   // Map the decl to the existing index.
123   if (Index)
124     GlobalIndices[VD] = *Index;
125 
126   return std::nullopt;
127 }
128 
129 std::optional<unsigned> Program::getGlobal(const Expr *E) {
130   if (auto It = GlobalIndices.find(E); It != GlobalIndices.end())
131     return It->second;
132   return std::nullopt;
133 }
134 
135 std::optional<unsigned> Program::getOrCreateGlobal(const ValueDecl *VD,
136                                                    const Expr *Init) {
137   if (auto Idx = getGlobal(VD))
138     return Idx;
139 
140   if (auto Idx = createGlobal(VD, Init)) {
141     GlobalIndices[VD] = *Idx;
142     return Idx;
143   }
144   return std::nullopt;
145 }
146 
147 std::optional<unsigned> Program::getOrCreateDummy(const DeclTy &D) {
148   assert(D);
149   // Dedup blocks since they are immutable and pointers cannot be compared.
150   if (auto It = DummyVariables.find(D.getOpaqueValue());
151       It != DummyVariables.end())
152     return It->second;
153 
154   QualType QT;
155   if (const auto *E = D.dyn_cast<const Expr *>()) {
156     QT = E->getType();
157   } else {
158     const ValueDecl *VD = cast<ValueDecl>(D.get<const Decl *>());
159     QT = VD->getType();
160     if (const auto *RT = QT->getAs<ReferenceType>())
161       QT = RT->getPointeeType();
162   }
163   assert(!QT.isNull());
164 
165   Descriptor *Desc;
166   if (std::optional<PrimType> T = Ctx.classify(QT))
167     Desc = createDescriptor(D, *T, std::nullopt, /*IsTemporary=*/true,
168                             /*IsMutable=*/false);
169   else
170     Desc = createDescriptor(D, QT.getTypePtr(), std::nullopt,
171                             /*IsTemporary=*/true, /*IsMutable=*/false);
172   if (!Desc)
173     Desc = allocateDescriptor(D);
174 
175   assert(Desc);
176   Desc->makeDummy();
177 
178   assert(Desc->isDummy());
179 
180   // Allocate a block for storage.
181   unsigned I = Globals.size();
182 
183   auto *G = new (Allocator, Desc->getAllocSize())
184       Global(Ctx.getEvalID(), getCurrentDecl(), Desc, /*IsStatic=*/true,
185              /*IsExtern=*/false);
186   G->block()->invokeCtor();
187 
188   Globals.push_back(G);
189   DummyVariables[D.getOpaqueValue()] = I;
190   return I;
191 }
192 
193 std::optional<unsigned> Program::createGlobal(const ValueDecl *VD,
194                                               const Expr *Init) {
195   bool IsStatic, IsExtern;
196   if (const auto *Var = dyn_cast<VarDecl>(VD)) {
197     IsStatic = Context::shouldBeGloballyIndexed(VD);
198     IsExtern = Var->hasExternalStorage();
199   } else if (isa<UnnamedGlobalConstantDecl, MSGuidDecl,
200                  TemplateParamObjectDecl>(VD)) {
201     IsStatic = true;
202     IsExtern = false;
203   } else {
204     IsStatic = false;
205     IsExtern = true;
206   }
207 
208   // Register all previous declarations as well. For extern blocks, just replace
209   // the index with the new variable.
210   if (auto Idx = createGlobal(VD, VD->getType(), IsStatic, IsExtern, Init)) {
211     for (const Decl *P = VD; P; P = P->getPreviousDecl()) {
212       if (P != VD) {
213         unsigned PIdx = GlobalIndices[P];
214         if (Globals[PIdx]->block()->isExtern())
215           Globals[PIdx] = Globals[*Idx];
216       }
217       GlobalIndices[P] = *Idx;
218     }
219     return *Idx;
220   }
221   return std::nullopt;
222 }
223 
224 std::optional<unsigned> Program::createGlobal(const Expr *E) {
225   if (auto Idx = getGlobal(E))
226     return Idx;
227   if (auto Idx = createGlobal(E, E->getType(), /*isStatic=*/true,
228                               /*isExtern=*/false)) {
229     GlobalIndices[E] = *Idx;
230     return *Idx;
231   }
232   return std::nullopt;
233 }
234 
235 std::optional<unsigned> Program::createGlobal(const DeclTy &D, QualType Ty,
236                                               bool IsStatic, bool IsExtern,
237                                               const Expr *Init) {
238   // Create a descriptor for the global.
239   Descriptor *Desc;
240   const bool IsConst = Ty.isConstQualified();
241   const bool IsTemporary = D.dyn_cast<const Expr *>();
242   if (std::optional<PrimType> T = Ctx.classify(Ty))
243     Desc = createDescriptor(D, *T, Descriptor::GlobalMD, IsConst, IsTemporary);
244   else
245     Desc = createDescriptor(D, Ty.getTypePtr(), Descriptor::GlobalMD, IsConst,
246                             IsTemporary);
247 
248   if (!Desc)
249     return std::nullopt;
250 
251   // Allocate a block for storage.
252   unsigned I = Globals.size();
253 
254   auto *G = new (Allocator, Desc->getAllocSize())
255       Global(Ctx.getEvalID(), getCurrentDecl(), Desc, IsStatic, IsExtern);
256   G->block()->invokeCtor();
257 
258   // Initialize InlineDescriptor fields.
259   auto *GD = new (G->block()->rawData()) GlobalInlineDescriptor();
260   if (!Init)
261     GD->InitState = GlobalInitState::NoInitializer;
262   Globals.push_back(G);
263 
264   return I;
265 }
266 
267 Function *Program::getFunction(const FunctionDecl *F) {
268   F = F->getCanonicalDecl();
269   assert(F);
270   auto It = Funcs.find(F);
271   return It == Funcs.end() ? nullptr : It->second.get();
272 }
273 
274 Record *Program::getOrCreateRecord(const RecordDecl *RD) {
275   // Use the actual definition as a key.
276   RD = RD->getDefinition();
277   if (!RD)
278     return nullptr;
279 
280   if (!RD->isCompleteDefinition())
281     return nullptr;
282 
283   // Deduplicate records.
284   if (auto It = Records.find(RD); It != Records.end())
285     return It->second;
286 
287   // We insert nullptr now and replace that later, so recursive calls
288   // to this function with the same RecordDecl don't run into
289   // infinite recursion.
290   Records.insert({RD, nullptr});
291 
292   // Number of bytes required by fields and base classes.
293   unsigned BaseSize = 0;
294   // Number of bytes required by virtual base.
295   unsigned VirtSize = 0;
296 
297   // Helper to get a base descriptor.
298   auto GetBaseDesc = [this](const RecordDecl *BD,
299                             const Record *BR) -> const Descriptor * {
300     if (!BR)
301       return nullptr;
302     return allocateDescriptor(BD, BR, std::nullopt, /*isConst=*/false,
303                               /*isTemporary=*/false,
304                               /*isMutable=*/false);
305   };
306 
307   // Reserve space for base classes.
308   Record::BaseList Bases;
309   Record::VirtualBaseList VirtBases;
310   if (const auto *CD = dyn_cast<CXXRecordDecl>(RD)) {
311     for (const CXXBaseSpecifier &Spec : CD->bases()) {
312       if (Spec.isVirtual())
313         continue;
314 
315       // In error cases, the base might not be a RecordType.
316       const auto *RT = Spec.getType()->getAs<RecordType>();
317       if (!RT)
318         return nullptr;
319       const RecordDecl *BD = RT->getDecl();
320       const Record *BR = getOrCreateRecord(BD);
321 
322       const Descriptor *Desc = GetBaseDesc(BD, BR);
323       if (!Desc)
324         return nullptr;
325 
326       BaseSize += align(sizeof(InlineDescriptor));
327       Bases.push_back({BD, BaseSize, Desc, BR});
328       BaseSize += align(BR->getSize());
329     }
330 
331     for (const CXXBaseSpecifier &Spec : CD->vbases()) {
332       const auto *RT = Spec.getType()->getAs<RecordType>();
333       if (!RT)
334         return nullptr;
335 
336       const RecordDecl *BD = RT->getDecl();
337       const Record *BR = getOrCreateRecord(BD);
338 
339       const Descriptor *Desc = GetBaseDesc(BD, BR);
340       if (!Desc)
341         return nullptr;
342 
343       VirtSize += align(sizeof(InlineDescriptor));
344       VirtBases.push_back({BD, VirtSize, Desc, BR});
345       VirtSize += align(BR->getSize());
346     }
347   }
348 
349   // Reserve space for fields.
350   Record::FieldList Fields;
351   for (const FieldDecl *FD : RD->fields()) {
352     FD = FD->getFirstDecl();
353     // Note that we DO create fields and descriptors
354     // for unnamed bitfields here, even though we later ignore
355     // them everywhere. That's so the FieldDecl's getFieldIndex() matches.
356 
357     // Reserve space for the field's descriptor and the offset.
358     BaseSize += align(sizeof(InlineDescriptor));
359 
360     // Classify the field and add its metadata.
361     QualType FT = FD->getType();
362     const bool IsConst = FT.isConstQualified();
363     const bool IsMutable = FD->isMutable();
364     const Descriptor *Desc;
365     if (std::optional<PrimType> T = Ctx.classify(FT)) {
366       Desc = createDescriptor(FD, *T, std::nullopt, IsConst,
367                               /*isTemporary=*/false, IsMutable);
368     } else {
369       Desc = createDescriptor(FD, FT.getTypePtr(), std::nullopt, IsConst,
370                               /*isTemporary=*/false, IsMutable);
371     }
372     if (!Desc)
373       return nullptr;
374     Fields.push_back({FD, BaseSize, Desc});
375     BaseSize += align(Desc->getAllocSize());
376   }
377 
378   Record *R = new (Allocator) Record(RD, std::move(Bases), std::move(Fields),
379                                      std::move(VirtBases), VirtSize, BaseSize);
380   Records[RD] = R;
381   return R;
382 }
383 
384 Descriptor *Program::createDescriptor(const DeclTy &D, const Type *Ty,
385                                       Descriptor::MetadataSize MDSize,
386                                       bool IsConst, bool IsTemporary,
387                                       bool IsMutable, const Expr *Init) {
388 
389   // Classes and structures.
390   if (const auto *RT = Ty->getAs<RecordType>()) {
391     if (const auto *Record = getOrCreateRecord(RT->getDecl()))
392       return allocateDescriptor(D, Record, MDSize, IsConst, IsTemporary,
393                                 IsMutable);
394   }
395 
396   // Arrays.
397   if (const auto ArrayType = Ty->getAsArrayTypeUnsafe()) {
398     QualType ElemTy = ArrayType->getElementType();
399     // Array of well-known bounds.
400     if (auto CAT = dyn_cast<ConstantArrayType>(ArrayType)) {
401       size_t NumElems = CAT->getZExtSize();
402       if (std::optional<PrimType> T = Ctx.classify(ElemTy)) {
403         // Arrays of primitives.
404         unsigned ElemSize = primSize(*T);
405         if (std::numeric_limits<unsigned>::max() / ElemSize <= NumElems) {
406           return {};
407         }
408         return allocateDescriptor(D, *T, MDSize, NumElems, IsConst, IsTemporary,
409                                   IsMutable);
410       } else {
411         // Arrays of composites. In this case, the array is a list of pointers,
412         // followed by the actual elements.
413         const Descriptor *ElemDesc = createDescriptor(
414             D, ElemTy.getTypePtr(), std::nullopt, IsConst, IsTemporary);
415         if (!ElemDesc)
416           return nullptr;
417         unsigned ElemSize = ElemDesc->getAllocSize() + sizeof(InlineDescriptor);
418         if (std::numeric_limits<unsigned>::max() / ElemSize <= NumElems)
419           return {};
420         return allocateDescriptor(D, ElemDesc, MDSize, NumElems, IsConst,
421                                   IsTemporary, IsMutable);
422       }
423     }
424 
425     // Array of unknown bounds - cannot be accessed and pointer arithmetic
426     // is forbidden on pointers to such objects.
427     if (isa<IncompleteArrayType>(ArrayType) ||
428         isa<VariableArrayType>(ArrayType)) {
429       if (std::optional<PrimType> T = Ctx.classify(ElemTy)) {
430         return allocateDescriptor(D, *T, MDSize, IsTemporary,
431                                   Descriptor::UnknownSize{});
432       } else {
433         const Descriptor *Desc = createDescriptor(D, ElemTy.getTypePtr(),
434                                                   MDSize, IsConst, IsTemporary);
435         if (!Desc)
436           return nullptr;
437         return allocateDescriptor(D, Desc, MDSize, IsTemporary,
438                                   Descriptor::UnknownSize{});
439       }
440     }
441   }
442 
443   // Atomic types.
444   if (const auto *AT = Ty->getAs<AtomicType>()) {
445     const Type *InnerTy = AT->getValueType().getTypePtr();
446     return createDescriptor(D, InnerTy, MDSize, IsConst, IsTemporary,
447                             IsMutable);
448   }
449 
450   // Complex types - represented as arrays of elements.
451   if (const auto *CT = Ty->getAs<ComplexType>()) {
452     PrimType ElemTy = *Ctx.classify(CT->getElementType());
453     return allocateDescriptor(D, ElemTy, MDSize, 2, IsConst, IsTemporary,
454                               IsMutable);
455   }
456 
457   // Same with vector types.
458   if (const auto *VT = Ty->getAs<VectorType>()) {
459     PrimType ElemTy = *Ctx.classify(VT->getElementType());
460     return allocateDescriptor(D, ElemTy, MDSize, VT->getNumElements(), IsConst,
461                               IsTemporary, IsMutable);
462   }
463 
464   return nullptr;
465 }
466