xref: /llvm-project/clang/lib/AST/ByteCode/Interp.cpp (revision a9731dff0a0133f718e8e4fb6c729aa1d7c909a4)
1 //===------- Interp.cpp - Interpreter for the constexpr VM ------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
9 #include "Interp.h"
10 #include "Function.h"
11 #include "InterpFrame.h"
12 #include "InterpShared.h"
13 #include "InterpStack.h"
14 #include "Opcode.h"
15 #include "PrimType.h"
16 #include "Program.h"
17 #include "State.h"
18 #include "clang/AST/ASTContext.h"
19 #include "clang/AST/CXXInheritance.h"
20 #include "clang/AST/DeclObjC.h"
21 #include "clang/AST/Expr.h"
22 #include "clang/AST/ExprCXX.h"
23 #include "clang/Basic/DiagnosticSema.h"
24 #include "clang/Basic/TargetInfo.h"
25 #include "llvm/ADT/StringExtras.h"
26 
27 using namespace clang;
28 using namespace clang::interp;
29 
30 static bool RetValue(InterpState &S, CodePtr &Pt, APValue &Result) {
31   llvm::report_fatal_error("Interpreter cannot return values");
32 }
33 
34 //===----------------------------------------------------------------------===//
35 // Jmp, Jt, Jf
36 //===----------------------------------------------------------------------===//
37 
38 static bool Jmp(InterpState &S, CodePtr &PC, int32_t Offset) {
39   PC += Offset;
40   return true;
41 }
42 
43 static bool Jt(InterpState &S, CodePtr &PC, int32_t Offset) {
44   if (S.Stk.pop<bool>()) {
45     PC += Offset;
46   }
47   return true;
48 }
49 
50 static bool Jf(InterpState &S, CodePtr &PC, int32_t Offset) {
51   if (!S.Stk.pop<bool>()) {
52     PC += Offset;
53   }
54   return true;
55 }
56 
57 static void diagnoseMissingInitializer(InterpState &S, CodePtr OpPC,
58                                        const ValueDecl *VD) {
59   const SourceInfo &E = S.Current->getSource(OpPC);
60   S.FFDiag(E, diag::note_constexpr_var_init_unknown, 1) << VD;
61   S.Note(VD->getLocation(), diag::note_declared_at) << VD->getSourceRange();
62 }
63 
64 static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC,
65                                      const ValueDecl *VD);
66 static bool diagnoseUnknownDecl(InterpState &S, CodePtr OpPC,
67                                 const ValueDecl *D) {
68   const SourceInfo &E = S.Current->getSource(OpPC);
69 
70   if (isa<ParmVarDecl>(D)) {
71     if (S.getLangOpts().CPlusPlus11) {
72       S.FFDiag(E, diag::note_constexpr_function_param_value_unknown) << D;
73       S.Note(D->getLocation(), diag::note_declared_at) << D->getSourceRange();
74     } else {
75       S.FFDiag(E);
76     }
77     return false;
78   }
79 
80   if (!D->getType().isConstQualified()) {
81     diagnoseNonConstVariable(S, OpPC, D);
82   } else if (const auto *VD = dyn_cast<VarDecl>(D)) {
83     if (!VD->getAnyInitializer()) {
84       diagnoseMissingInitializer(S, OpPC, VD);
85     } else {
86       const SourceInfo &Loc = S.Current->getSource(OpPC);
87       S.FFDiag(Loc, diag::note_constexpr_var_init_non_constant, 1) << VD;
88       S.Note(VD->getLocation(), diag::note_declared_at);
89     }
90   }
91 
92   return false;
93 }
94 
95 static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC,
96                                      const ValueDecl *VD) {
97   const SourceInfo &Loc = S.Current->getSource(OpPC);
98   if (!S.getLangOpts().CPlusPlus) {
99     S.FFDiag(Loc);
100     return;
101   }
102 
103   if (const auto *VarD = dyn_cast<VarDecl>(VD);
104       VarD && VarD->getType().isConstQualified() &&
105       !VarD->getAnyInitializer()) {
106     diagnoseMissingInitializer(S, OpPC, VD);
107     return;
108   }
109 
110   // Rather random, but this is to match the diagnostic output of the current
111   // interpreter.
112   if (isa<ObjCIvarDecl>(VD))
113     return;
114 
115   if (VD->getType()->isIntegralOrEnumerationType()) {
116     S.FFDiag(Loc, diag::note_constexpr_ltor_non_const_int, 1) << VD;
117     S.Note(VD->getLocation(), diag::note_declared_at);
118     return;
119   }
120 
121   S.FFDiag(Loc,
122            S.getLangOpts().CPlusPlus11 ? diag::note_constexpr_ltor_non_constexpr
123                                        : diag::note_constexpr_ltor_non_integral,
124            1)
125       << VD << VD->getType();
126   S.Note(VD->getLocation(), diag::note_declared_at);
127 }
128 
129 static bool CheckActive(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
130                         AccessKinds AK) {
131   if (Ptr.isActive())
132     return true;
133 
134   assert(Ptr.inUnion());
135   assert(Ptr.isField() && Ptr.getField());
136 
137   Pointer U = Ptr.getBase();
138   Pointer C = Ptr;
139   while (!U.isRoot() && U.inUnion() && !U.isActive()) {
140     if (U.getField())
141       C = U;
142     U = U.getBase();
143   }
144   assert(C.isField());
145 
146   // Get the inactive field descriptor.
147   const FieldDecl *InactiveField = C.getField();
148   assert(InactiveField);
149 
150   // Consider:
151   // union U {
152   //   struct {
153   //     int x;
154   //     int y;
155   //   } a;
156   // }
157   //
158   // When activating x, we will also activate a. If we now try to read
159   // from y, we will get to CheckActive, because y is not active. In that
160   // case, our U will be a (not a union). We return here and let later code
161   // handle this.
162   if (!U.getFieldDesc()->isUnion())
163     return true;
164 
165   // Find the active field of the union.
166   const Record *R = U.getRecord();
167   assert(R && R->isUnion() && "Not a union");
168 
169   const FieldDecl *ActiveField = nullptr;
170   for (const Record::Field &F : R->fields()) {
171     const Pointer &Field = U.atField(F.Offset);
172     if (Field.isActive()) {
173       ActiveField = Field.getField();
174       break;
175     }
176   }
177 
178   const SourceInfo &Loc = S.Current->getSource(OpPC);
179   S.FFDiag(Loc, diag::note_constexpr_access_inactive_union_member)
180       << AK << InactiveField << !ActiveField << ActiveField;
181   return false;
182 }
183 
184 static bool CheckTemporary(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
185                            AccessKinds AK) {
186   if (auto ID = Ptr.getDeclID()) {
187     if (!Ptr.isStaticTemporary())
188       return true;
189 
190     const auto *MTE = dyn_cast_if_present<MaterializeTemporaryExpr>(
191         Ptr.getDeclDesc()->asExpr());
192     if (!MTE)
193       return true;
194 
195     // FIXME(perf): Since we do this check on every Load from a static
196     // temporary, it might make sense to cache the value of the
197     // isUsableInConstantExpressions call.
198     if (!MTE->isUsableInConstantExpressions(S.getASTContext()) &&
199         Ptr.block()->getEvalID() != S.Ctx.getEvalID()) {
200       const SourceInfo &E = S.Current->getSource(OpPC);
201       S.FFDiag(E, diag::note_constexpr_access_static_temporary, 1) << AK;
202       S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here);
203       return false;
204     }
205   }
206   return true;
207 }
208 
209 static bool CheckGlobal(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
210   if (auto ID = Ptr.getDeclID()) {
211     if (!Ptr.isStatic())
212       return true;
213 
214     if (S.P.getCurrentDecl() == ID)
215       return true;
216 
217     S.FFDiag(S.Current->getLocation(OpPC), diag::note_constexpr_modify_global);
218     return false;
219   }
220   return true;
221 }
222 
223 namespace clang {
224 namespace interp {
225 static void popArg(InterpState &S, const Expr *Arg) {
226   PrimType Ty = S.getContext().classify(Arg).value_or(PT_Ptr);
227   TYPE_SWITCH(Ty, S.Stk.discard<T>());
228 }
229 
230 void cleanupAfterFunctionCall(InterpState &S, CodePtr OpPC,
231                               const Function *Func) {
232   assert(S.Current);
233   assert(Func);
234 
235   if (Func->isUnevaluatedBuiltin())
236     return;
237 
238   // Some builtin functions require us to only look at the call site, since
239   // the classified parameter types do not match.
240   if (unsigned BID = Func->getBuiltinID();
241       BID && S.getASTContext().BuiltinInfo.hasCustomTypechecking(BID)) {
242     const auto *CE =
243         cast<CallExpr>(S.Current->Caller->getExpr(S.Current->getRetPC()));
244     for (int32_t I = CE->getNumArgs() - 1; I >= 0; --I) {
245       const Expr *A = CE->getArg(I);
246       popArg(S, A);
247     }
248     return;
249   }
250 
251   if (S.Current->Caller && Func->isVariadic()) {
252     // CallExpr we're look for is at the return PC of the current function, i.e.
253     // in the caller.
254     // This code path should be executed very rarely.
255     unsigned NumVarArgs;
256     const Expr *const *Args = nullptr;
257     unsigned NumArgs = 0;
258     const Expr *CallSite = S.Current->Caller->getExpr(S.Current->getRetPC());
259     if (const auto *CE = dyn_cast<CallExpr>(CallSite)) {
260       Args = CE->getArgs();
261       NumArgs = CE->getNumArgs();
262     } else if (const auto *CE = dyn_cast<CXXConstructExpr>(CallSite)) {
263       Args = CE->getArgs();
264       NumArgs = CE->getNumArgs();
265     } else
266       assert(false && "Can't get arguments from that expression type");
267 
268     assert(NumArgs >= Func->getNumWrittenParams());
269     NumVarArgs = NumArgs - (Func->getNumWrittenParams() +
270                             isa<CXXOperatorCallExpr>(CallSite));
271     for (unsigned I = 0; I != NumVarArgs; ++I) {
272       const Expr *A = Args[NumArgs - 1 - I];
273       popArg(S, A);
274     }
275   }
276 
277   // And in any case, remove the fixed parameters (the non-variadic ones)
278   // at the end.
279   for (PrimType Ty : Func->args_reverse())
280     TYPE_SWITCH(Ty, S.Stk.discard<T>());
281 }
282 
283 bool CheckExtern(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
284   if (!Ptr.isExtern())
285     return true;
286 
287   if (Ptr.isInitialized() ||
288       (Ptr.getDeclDesc()->asVarDecl() == S.EvaluatingDecl))
289     return true;
290 
291   if (!S.checkingPotentialConstantExpression() && S.getLangOpts().CPlusPlus) {
292     const auto *VD = Ptr.getDeclDesc()->asValueDecl();
293     diagnoseNonConstVariable(S, OpPC, VD);
294   }
295   return false;
296 }
297 
298 bool CheckArray(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
299   if (!Ptr.isUnknownSizeArray())
300     return true;
301   const SourceInfo &E = S.Current->getSource(OpPC);
302   S.FFDiag(E, diag::note_constexpr_unsized_array_indexed);
303   return false;
304 }
305 
306 bool CheckLive(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
307                AccessKinds AK) {
308   if (Ptr.isZero()) {
309     const auto &Src = S.Current->getSource(OpPC);
310 
311     if (Ptr.isField())
312       S.FFDiag(Src, diag::note_constexpr_null_subobject) << CSK_Field;
313     else
314       S.FFDiag(Src, diag::note_constexpr_access_null) << AK;
315 
316     return false;
317   }
318 
319   if (!Ptr.isLive()) {
320     const auto &Src = S.Current->getSource(OpPC);
321 
322     if (Ptr.isDynamic()) {
323       S.FFDiag(Src, diag::note_constexpr_access_deleted_object) << AK;
324     } else {
325       bool IsTemp = Ptr.isTemporary();
326       S.FFDiag(Src, diag::note_constexpr_lifetime_ended, 1) << AK << !IsTemp;
327 
328       if (IsTemp)
329         S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here);
330       else
331         S.Note(Ptr.getDeclLoc(), diag::note_declared_at);
332     }
333 
334     return false;
335   }
336 
337   return true;
338 }
339 
340 bool CheckConstant(InterpState &S, CodePtr OpPC, const Descriptor *Desc) {
341   assert(Desc);
342 
343   const auto *D = Desc->asVarDecl();
344   if (!D || !D->hasGlobalStorage())
345     return true;
346 
347   if (D == S.EvaluatingDecl)
348     return true;
349 
350   if (D->isConstexpr())
351     return true;
352 
353   // If we're evaluating the initializer for a constexpr variable in C23, we may
354   // only read other contexpr variables. Abort here since this one isn't
355   // constexpr.
356   if (const auto *VD = dyn_cast_if_present<VarDecl>(S.EvaluatingDecl);
357       VD && VD->isConstexpr() && S.getLangOpts().C23)
358     return Invalid(S, OpPC);
359 
360   QualType T = D->getType();
361   bool IsConstant = T.isConstant(S.getASTContext());
362   if (T->isIntegralOrEnumerationType()) {
363     if (!IsConstant) {
364       diagnoseNonConstVariable(S, OpPC, D);
365       return false;
366     }
367     return true;
368   }
369 
370   if (IsConstant) {
371     if (S.getLangOpts().CPlusPlus) {
372       S.CCEDiag(S.Current->getLocation(OpPC),
373                 S.getLangOpts().CPlusPlus11
374                     ? diag::note_constexpr_ltor_non_constexpr
375                     : diag::note_constexpr_ltor_non_integral,
376                 1)
377           << D << T;
378       S.Note(D->getLocation(), diag::note_declared_at);
379     } else {
380       S.CCEDiag(S.Current->getLocation(OpPC));
381     }
382     return true;
383   }
384 
385   if (T->isPointerOrReferenceType()) {
386     if (!T->getPointeeType().isConstant(S.getASTContext()) ||
387         !S.getLangOpts().CPlusPlus11) {
388       diagnoseNonConstVariable(S, OpPC, D);
389       return false;
390     }
391     return true;
392   }
393 
394   diagnoseNonConstVariable(S, OpPC, D);
395   return false;
396 }
397 
398 static bool CheckConstant(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
399   if (!Ptr.isStatic() || !Ptr.isBlockPointer())
400     return true;
401   return CheckConstant(S, OpPC, Ptr.getDeclDesc());
402 }
403 
404 bool CheckNull(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
405                CheckSubobjectKind CSK) {
406   if (!Ptr.isZero())
407     return true;
408   const SourceInfo &Loc = S.Current->getSource(OpPC);
409   S.FFDiag(Loc, diag::note_constexpr_null_subobject)
410       << CSK << S.Current->getRange(OpPC);
411 
412   return false;
413 }
414 
415 bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
416                 AccessKinds AK) {
417   if (!Ptr.isOnePastEnd())
418     return true;
419   const SourceInfo &Loc = S.Current->getSource(OpPC);
420   S.FFDiag(Loc, diag::note_constexpr_access_past_end)
421       << AK << S.Current->getRange(OpPC);
422   return false;
423 }
424 
425 bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
426                 CheckSubobjectKind CSK) {
427   if (!Ptr.isElementPastEnd())
428     return true;
429   const SourceInfo &Loc = S.Current->getSource(OpPC);
430   S.FFDiag(Loc, diag::note_constexpr_past_end_subobject)
431       << CSK << S.Current->getRange(OpPC);
432   return false;
433 }
434 
435 bool CheckSubobject(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
436                     CheckSubobjectKind CSK) {
437   if (!Ptr.isOnePastEnd())
438     return true;
439 
440   const SourceInfo &Loc = S.Current->getSource(OpPC);
441   S.FFDiag(Loc, diag::note_constexpr_past_end_subobject)
442       << CSK << S.Current->getRange(OpPC);
443   return false;
444 }
445 
446 bool CheckDowncast(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
447                    uint32_t Offset) {
448   uint32_t MinOffset = Ptr.getDeclDesc()->getMetadataSize();
449   uint32_t PtrOffset = Ptr.getByteOffset();
450 
451   // We subtract Offset from PtrOffset. The result must be at least
452   // MinOffset.
453   if (Offset < PtrOffset && (PtrOffset - Offset) >= MinOffset)
454     return true;
455 
456   const auto *E = cast<CastExpr>(S.Current->getExpr(OpPC));
457   QualType TargetQT = E->getType()->getPointeeType();
458   QualType MostDerivedQT = Ptr.getDeclPtr().getType();
459 
460   S.CCEDiag(E, diag::note_constexpr_invalid_downcast)
461       << MostDerivedQT << TargetQT;
462 
463   return false;
464 }
465 
466 bool CheckConst(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
467   assert(Ptr.isLive() && "Pointer is not live");
468   if (!Ptr.isConst() || Ptr.isMutable())
469     return true;
470 
471   // The This pointer is writable in constructors and destructors,
472   // even if isConst() returns true.
473   // TODO(perf): We could be hitting this code path quite a lot in complex
474   // constructors. Is there a better way to do this?
475   if (S.Current->getFunction()) {
476     for (const InterpFrame *Frame = S.Current; Frame; Frame = Frame->Caller) {
477       if (const Function *Func = Frame->getFunction();
478           Func && (Func->isConstructor() || Func->isDestructor()) &&
479           Ptr.block() == Frame->getThis().block()) {
480         return true;
481       }
482     }
483   }
484 
485   if (!Ptr.isBlockPointer())
486     return false;
487 
488   const QualType Ty = Ptr.getType();
489   const SourceInfo &Loc = S.Current->getSource(OpPC);
490   S.FFDiag(Loc, diag::note_constexpr_modify_const_type) << Ty;
491   return false;
492 }
493 
494 bool CheckMutable(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
495   assert(Ptr.isLive() && "Pointer is not live");
496   if (!Ptr.isMutable())
497     return true;
498 
499   // In C++14 onwards, it is permitted to read a mutable member whose
500   // lifetime began within the evaluation.
501   if (S.getLangOpts().CPlusPlus14 &&
502       Ptr.block()->getEvalID() == S.Ctx.getEvalID())
503     return true;
504 
505   const SourceInfo &Loc = S.Current->getSource(OpPC);
506   const FieldDecl *Field = Ptr.getField();
507   S.FFDiag(Loc, diag::note_constexpr_access_mutable, 1) << AK_Read << Field;
508   S.Note(Field->getLocation(), diag::note_declared_at);
509   return false;
510 }
511 
512 static bool CheckVolatile(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
513                           AccessKinds AK) {
514   assert(Ptr.isLive());
515 
516   // FIXME: This check here might be kinda expensive. Maybe it would be better
517   // to have another field in InlineDescriptor for this?
518   if (!Ptr.isBlockPointer())
519     return true;
520 
521   QualType PtrType = Ptr.getType();
522   if (!PtrType.isVolatileQualified())
523     return true;
524 
525   const SourceInfo &Loc = S.Current->getSource(OpPC);
526   if (S.getLangOpts().CPlusPlus)
527     S.FFDiag(Loc, diag::note_constexpr_access_volatile_type) << AK << PtrType;
528   else
529     S.FFDiag(Loc);
530   return false;
531 }
532 
533 bool CheckInitialized(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
534                       AccessKinds AK) {
535   assert(Ptr.isLive());
536 
537   if (Ptr.isInitialized())
538     return true;
539 
540   if (const auto *VD = Ptr.getDeclDesc()->asVarDecl();
541       VD && VD->hasGlobalStorage()) {
542     const SourceInfo &Loc = S.Current->getSource(OpPC);
543     if (VD->getAnyInitializer()) {
544       S.FFDiag(Loc, diag::note_constexpr_var_init_non_constant, 1) << VD;
545       S.Note(VD->getLocation(), diag::note_declared_at);
546     } else {
547       diagnoseMissingInitializer(S, OpPC, VD);
548     }
549     return false;
550   }
551 
552   if (!S.checkingPotentialConstantExpression()) {
553     S.FFDiag(S.Current->getSource(OpPC), diag::note_constexpr_access_uninit)
554         << AK << /*uninitialized=*/true << S.Current->getRange(OpPC);
555   }
556   return false;
557 }
558 
559 bool CheckGlobalInitialized(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
560   if (Ptr.isInitialized())
561     return true;
562 
563   assert(S.getLangOpts().CPlusPlus);
564   const auto *VD = cast<VarDecl>(Ptr.getDeclDesc()->asValueDecl());
565   if ((!VD->hasConstantInitialization() &&
566        VD->mightBeUsableInConstantExpressions(S.getASTContext())) ||
567       (S.getLangOpts().OpenCL && !S.getLangOpts().CPlusPlus11 &&
568        !VD->hasICEInitializer(S.getASTContext()))) {
569     const SourceInfo &Loc = S.Current->getSource(OpPC);
570     S.FFDiag(Loc, diag::note_constexpr_var_init_non_constant, 1) << VD;
571     S.Note(VD->getLocation(), diag::note_declared_at);
572   }
573   return false;
574 }
575 
576 static bool CheckWeak(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
577   if (!Ptr.isWeak())
578     return true;
579 
580   const auto *VD = Ptr.getDeclDesc()->asVarDecl();
581   assert(VD);
582   S.FFDiag(S.Current->getLocation(OpPC), diag::note_constexpr_var_init_weak)
583       << VD;
584   S.Note(VD->getLocation(), diag::note_declared_at);
585 
586   return false;
587 }
588 
589 bool CheckLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
590                AccessKinds AK) {
591   if (!CheckLive(S, OpPC, Ptr, AK))
592     return false;
593   if (!CheckConstant(S, OpPC, Ptr))
594     return false;
595   if (!CheckDummy(S, OpPC, Ptr, AK))
596     return false;
597   if (!CheckExtern(S, OpPC, Ptr))
598     return false;
599   if (!CheckRange(S, OpPC, Ptr, AK))
600     return false;
601   if (!CheckActive(S, OpPC, Ptr, AK))
602     return false;
603   if (!CheckInitialized(S, OpPC, Ptr, AK))
604     return false;
605   if (!CheckTemporary(S, OpPC, Ptr, AK))
606     return false;
607   if (!CheckWeak(S, OpPC, Ptr))
608     return false;
609   if (!CheckMutable(S, OpPC, Ptr))
610     return false;
611   if (!CheckVolatile(S, OpPC, Ptr, AK))
612     return false;
613   return true;
614 }
615 
616 /// This is not used by any of the opcodes directly. It's used by
617 /// EvalEmitter to do the final lvalue-to-rvalue conversion.
618 bool CheckFinalLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
619   if (!CheckLive(S, OpPC, Ptr, AK_Read))
620     return false;
621   if (!CheckConstant(S, OpPC, Ptr))
622     return false;
623 
624   if (!CheckDummy(S, OpPC, Ptr, AK_Read))
625     return false;
626   if (!CheckExtern(S, OpPC, Ptr))
627     return false;
628   if (!CheckRange(S, OpPC, Ptr, AK_Read))
629     return false;
630   if (!CheckActive(S, OpPC, Ptr, AK_Read))
631     return false;
632   if (!CheckInitialized(S, OpPC, Ptr, AK_Read))
633     return false;
634   if (!CheckTemporary(S, OpPC, Ptr, AK_Read))
635     return false;
636   if (!CheckWeak(S, OpPC, Ptr))
637     return false;
638   if (!CheckMutable(S, OpPC, Ptr))
639     return false;
640   return true;
641 }
642 
643 bool CheckStore(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
644   if (!CheckLive(S, OpPC, Ptr, AK_Assign))
645     return false;
646   if (!CheckDummy(S, OpPC, Ptr, AK_Assign))
647     return false;
648   if (!CheckExtern(S, OpPC, Ptr))
649     return false;
650   if (!CheckRange(S, OpPC, Ptr, AK_Assign))
651     return false;
652   if (!CheckGlobal(S, OpPC, Ptr))
653     return false;
654   if (!CheckConst(S, OpPC, Ptr))
655     return false;
656   return true;
657 }
658 
659 bool CheckInvoke(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
660   if (!CheckLive(S, OpPC, Ptr, AK_MemberCall))
661     return false;
662   if (!Ptr.isDummy()) {
663     if (!CheckExtern(S, OpPC, Ptr))
664       return false;
665     if (!CheckRange(S, OpPC, Ptr, AK_MemberCall))
666       return false;
667   }
668   return true;
669 }
670 
671 bool CheckInit(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
672   if (!CheckLive(S, OpPC, Ptr, AK_Assign))
673     return false;
674   if (!CheckRange(S, OpPC, Ptr, AK_Assign))
675     return false;
676   return true;
677 }
678 
679 bool CheckCallable(InterpState &S, CodePtr OpPC, const Function *F) {
680 
681   if (F->isVirtual() && !S.getLangOpts().CPlusPlus20) {
682     const SourceLocation &Loc = S.Current->getLocation(OpPC);
683     S.CCEDiag(Loc, diag::note_constexpr_virtual_call);
684     return false;
685   }
686 
687   if (F->isConstexpr() && F->hasBody() &&
688       (F->getDecl()->isConstexpr() || F->getDecl()->hasAttr<MSConstexprAttr>()))
689     return true;
690 
691   // Implicitly constexpr.
692   if (F->isLambdaStaticInvoker())
693     return true;
694 
695   const SourceLocation &Loc = S.Current->getLocation(OpPC);
696   if (S.getLangOpts().CPlusPlus11) {
697     const FunctionDecl *DiagDecl = F->getDecl();
698 
699     // Invalid decls have been diagnosed before.
700     if (DiagDecl->isInvalidDecl())
701       return false;
702 
703     // If this function is not constexpr because it is an inherited
704     // non-constexpr constructor, diagnose that directly.
705     const auto *CD = dyn_cast<CXXConstructorDecl>(DiagDecl);
706     if (CD && CD->isInheritingConstructor()) {
707       const auto *Inherited = CD->getInheritedConstructor().getConstructor();
708       if (!Inherited->isConstexpr())
709         DiagDecl = CD = Inherited;
710     }
711 
712     // FIXME: If DiagDecl is an implicitly-declared special member function
713     // or an inheriting constructor, we should be much more explicit about why
714     // it's not constexpr.
715     if (CD && CD->isInheritingConstructor()) {
716       S.FFDiag(Loc, diag::note_constexpr_invalid_inhctor, 1)
717           << CD->getInheritedConstructor().getConstructor()->getParent();
718       S.Note(DiagDecl->getLocation(), diag::note_declared_at);
719     } else {
720       // Don't emit anything if the function isn't defined and we're checking
721       // for a constant expression. It might be defined at the point we're
722       // actually calling it.
723       bool IsExtern = DiagDecl->getStorageClass() == SC_Extern;
724       if (!DiagDecl->isDefined() && !IsExtern && DiagDecl->isConstexpr() &&
725           S.checkingPotentialConstantExpression())
726         return false;
727 
728       // If the declaration is defined, declared 'constexpr' _and_ has a body,
729       // the below diagnostic doesn't add anything useful.
730       if (DiagDecl->isDefined() && DiagDecl->isConstexpr() &&
731           DiagDecl->hasBody())
732         return false;
733 
734       S.FFDiag(Loc, diag::note_constexpr_invalid_function, 1)
735           << DiagDecl->isConstexpr() << (bool)CD << DiagDecl;
736 
737       if (DiagDecl->getDefinition())
738         S.Note(DiagDecl->getDefinition()->getLocation(),
739                diag::note_declared_at);
740       else
741         S.Note(DiagDecl->getLocation(), diag::note_declared_at);
742     }
743   } else {
744     S.FFDiag(Loc, diag::note_invalid_subexpr_in_const_expr);
745   }
746 
747   return false;
748 }
749 
750 bool CheckCallDepth(InterpState &S, CodePtr OpPC) {
751   if ((S.Current->getDepth() + 1) > S.getLangOpts().ConstexprCallDepth) {
752     S.FFDiag(S.Current->getSource(OpPC),
753              diag::note_constexpr_depth_limit_exceeded)
754         << S.getLangOpts().ConstexprCallDepth;
755     return false;
756   }
757 
758   return true;
759 }
760 
761 bool CheckThis(InterpState &S, CodePtr OpPC, const Pointer &This) {
762   if (!This.isZero())
763     return true;
764 
765   const SourceInfo &Loc = S.Current->getSource(OpPC);
766 
767   bool IsImplicit = false;
768   if (const auto *E = dyn_cast_if_present<CXXThisExpr>(Loc.asExpr()))
769     IsImplicit = E->isImplicit();
770 
771   if (S.getLangOpts().CPlusPlus11)
772     S.FFDiag(Loc, diag::note_constexpr_this) << IsImplicit;
773   else
774     S.FFDiag(Loc);
775 
776   return false;
777 }
778 
779 bool CheckPure(InterpState &S, CodePtr OpPC, const CXXMethodDecl *MD) {
780   if (!MD->isPureVirtual())
781     return true;
782   const SourceInfo &E = S.Current->getSource(OpPC);
783   S.FFDiag(E, diag::note_constexpr_pure_virtual_call, 1) << MD;
784   S.Note(MD->getLocation(), diag::note_declared_at);
785   return false;
786 }
787 
788 bool CheckFloatResult(InterpState &S, CodePtr OpPC, const Floating &Result,
789                       APFloat::opStatus Status, FPOptions FPO) {
790   // [expr.pre]p4:
791   //   If during the evaluation of an expression, the result is not
792   //   mathematically defined [...], the behavior is undefined.
793   // FIXME: C++ rules require us to not conform to IEEE 754 here.
794   if (Result.isNan()) {
795     const SourceInfo &E = S.Current->getSource(OpPC);
796     S.CCEDiag(E, diag::note_constexpr_float_arithmetic)
797         << /*NaN=*/true << S.Current->getRange(OpPC);
798     return S.noteUndefinedBehavior();
799   }
800 
801   // In a constant context, assume that any dynamic rounding mode or FP
802   // exception state matches the default floating-point environment.
803   if (S.inConstantContext())
804     return true;
805 
806   if ((Status & APFloat::opInexact) &&
807       FPO.getRoundingMode() == llvm::RoundingMode::Dynamic) {
808     // Inexact result means that it depends on rounding mode. If the requested
809     // mode is dynamic, the evaluation cannot be made in compile time.
810     const SourceInfo &E = S.Current->getSource(OpPC);
811     S.FFDiag(E, diag::note_constexpr_dynamic_rounding);
812     return false;
813   }
814 
815   if ((Status != APFloat::opOK) &&
816       (FPO.getRoundingMode() == llvm::RoundingMode::Dynamic ||
817        FPO.getExceptionMode() != LangOptions::FPE_Ignore ||
818        FPO.getAllowFEnvAccess())) {
819     const SourceInfo &E = S.Current->getSource(OpPC);
820     S.FFDiag(E, diag::note_constexpr_float_arithmetic_strict);
821     return false;
822   }
823 
824   if ((Status & APFloat::opStatus::opInvalidOp) &&
825       FPO.getExceptionMode() != LangOptions::FPE_Ignore) {
826     const SourceInfo &E = S.Current->getSource(OpPC);
827     // There is no usefully definable result.
828     S.FFDiag(E);
829     return false;
830   }
831 
832   return true;
833 }
834 
835 bool CheckDynamicMemoryAllocation(InterpState &S, CodePtr OpPC) {
836   if (S.getLangOpts().CPlusPlus20)
837     return true;
838 
839   const SourceInfo &E = S.Current->getSource(OpPC);
840   S.CCEDiag(E, diag::note_constexpr_new);
841   return true;
842 }
843 
844 bool CheckNewDeleteForms(InterpState &S, CodePtr OpPC,
845                          DynamicAllocator::Form AllocForm,
846                          DynamicAllocator::Form DeleteForm, const Descriptor *D,
847                          const Expr *NewExpr) {
848   if (AllocForm == DeleteForm)
849     return true;
850 
851   QualType TypeToDiagnose;
852   // We need to shuffle things around a bit here to get a better diagnostic,
853   // because the expression we allocated the block for was of type int*,
854   // but we want to get the array size right.
855   if (D->isArray()) {
856     QualType ElemQT = D->getType()->getPointeeType();
857     TypeToDiagnose = S.getASTContext().getConstantArrayType(
858         ElemQT, APInt(64, static_cast<uint64_t>(D->getNumElems()), false),
859         nullptr, ArraySizeModifier::Normal, 0);
860   } else
861     TypeToDiagnose = D->getType()->getPointeeType();
862 
863   const SourceInfo &E = S.Current->getSource(OpPC);
864   S.FFDiag(E, diag::note_constexpr_new_delete_mismatch)
865       << static_cast<int>(DeleteForm) << static_cast<int>(AllocForm)
866       << TypeToDiagnose;
867   S.Note(NewExpr->getExprLoc(), diag::note_constexpr_dynamic_alloc_here)
868       << NewExpr->getSourceRange();
869   return false;
870 }
871 
872 bool CheckDeleteSource(InterpState &S, CodePtr OpPC, const Expr *Source,
873                        const Pointer &Ptr) {
874   // The two sources we currently allow are new expressions and
875   // __builtin_operator_new calls.
876   if (isa_and_nonnull<CXXNewExpr>(Source))
877     return true;
878   if (const CallExpr *CE = dyn_cast_if_present<CallExpr>(Source);
879       CE && CE->getBuiltinCallee() == Builtin::BI__builtin_operator_new)
880     return true;
881 
882   // Whatever this is, we didn't heap allocate it.
883   const SourceInfo &Loc = S.Current->getSource(OpPC);
884   S.FFDiag(Loc, diag::note_constexpr_delete_not_heap_alloc)
885       << Ptr.toDiagnosticString(S.getASTContext());
886 
887   if (Ptr.isTemporary())
888     S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here);
889   else
890     S.Note(Ptr.getDeclLoc(), diag::note_declared_at);
891   return false;
892 }
893 
894 /// We aleady know the given DeclRefExpr is invalid for some reason,
895 /// now figure out why and print appropriate diagnostics.
896 bool CheckDeclRef(InterpState &S, CodePtr OpPC, const DeclRefExpr *DR) {
897   const ValueDecl *D = DR->getDecl();
898   return diagnoseUnknownDecl(S, OpPC, D);
899 }
900 
901 bool CheckDummy(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
902                 AccessKinds AK) {
903   if (!Ptr.isDummy())
904     return true;
905 
906   const Descriptor *Desc = Ptr.getDeclDesc();
907   const ValueDecl *D = Desc->asValueDecl();
908   if (!D)
909     return false;
910 
911   if (AK == AK_Read || AK == AK_Increment || AK == AK_Decrement)
912     return diagnoseUnknownDecl(S, OpPC, D);
913 
914   assert(AK == AK_Assign);
915   if (S.getLangOpts().CPlusPlus14) {
916     const SourceInfo &E = S.Current->getSource(OpPC);
917     S.FFDiag(E, diag::note_constexpr_modify_global);
918   }
919   return false;
920 }
921 
922 bool CheckNonNullArgs(InterpState &S, CodePtr OpPC, const Function *F,
923                       const CallExpr *CE, unsigned ArgSize) {
924   auto Args = llvm::ArrayRef(CE->getArgs(), CE->getNumArgs());
925   auto NonNullArgs = collectNonNullArgs(F->getDecl(), Args);
926   unsigned Offset = 0;
927   unsigned Index = 0;
928   for (const Expr *Arg : Args) {
929     if (NonNullArgs[Index] && Arg->getType()->isPointerType()) {
930       const Pointer &ArgPtr = S.Stk.peek<Pointer>(ArgSize - Offset);
931       if (ArgPtr.isZero()) {
932         const SourceLocation &Loc = S.Current->getLocation(OpPC);
933         S.CCEDiag(Loc, diag::note_non_null_attribute_failed);
934         return false;
935       }
936     }
937 
938     Offset += align(primSize(S.Ctx.classify(Arg).value_or(PT_Ptr)));
939     ++Index;
940   }
941   return true;
942 }
943 
944 // FIXME: This is similar to code we already have in Compiler.cpp.
945 // I think it makes sense to instead add the field and base destruction stuff
946 // to the destructor Function itself. Then destroying a record would really
947 // _just_ be calling its destructor. That would also help with the diagnostic
948 // difference when the destructor or a field/base fails.
949 static bool runRecordDestructor(InterpState &S, CodePtr OpPC,
950                                 const Pointer &BasePtr,
951                                 const Descriptor *Desc) {
952   assert(Desc->isRecord());
953   const Record *R = Desc->ElemRecord;
954   assert(R);
955 
956   if (Pointer::pointToSameBlock(BasePtr, S.Current->getThis())) {
957     const SourceInfo &Loc = S.Current->getSource(OpPC);
958     S.FFDiag(Loc, diag::note_constexpr_double_destroy);
959     return false;
960   }
961 
962   // Destructor of this record.
963   if (const CXXDestructorDecl *Dtor = R->getDestructor();
964       Dtor && !Dtor->isTrivial()) {
965     const Function *DtorFunc = S.getContext().getOrCreateFunction(Dtor);
966     if (!DtorFunc)
967       return false;
968 
969     S.Stk.push<Pointer>(BasePtr);
970     if (!Call(S, OpPC, DtorFunc, 0))
971       return false;
972   }
973   return true;
974 }
975 
976 static bool RunDestructors(InterpState &S, CodePtr OpPC, const Block *B) {
977   assert(B);
978   const Descriptor *Desc = B->getDescriptor();
979 
980   if (Desc->isPrimitive() || Desc->isPrimitiveArray())
981     return true;
982 
983   assert(Desc->isRecord() || Desc->isCompositeArray());
984 
985   if (Desc->isCompositeArray()) {
986     const Descriptor *ElemDesc = Desc->ElemDesc;
987     assert(ElemDesc->isRecord());
988 
989     Pointer RP(const_cast<Block *>(B));
990     for (unsigned I = 0; I != Desc->getNumElems(); ++I) {
991       if (!runRecordDestructor(S, OpPC, RP.atIndex(I).narrow(), ElemDesc))
992         return false;
993     }
994     return true;
995   }
996 
997   assert(Desc->isRecord());
998   return runRecordDestructor(S, OpPC, Pointer(const_cast<Block *>(B)), Desc);
999 }
1000 
1001 static bool hasVirtualDestructor(QualType T) {
1002   if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl())
1003     if (const CXXDestructorDecl *DD = RD->getDestructor())
1004       return DD->isVirtual();
1005   return false;
1006 }
1007 
1008 bool Free(InterpState &S, CodePtr OpPC, bool DeleteIsArrayForm,
1009           bool IsGlobalDelete) {
1010   if (!CheckDynamicMemoryAllocation(S, OpPC))
1011     return false;
1012 
1013   const Expr *Source = nullptr;
1014   const Block *BlockToDelete = nullptr;
1015   {
1016     // Extra scope for this so the block doesn't have this pointer
1017     // pointing to it when we destroy it.
1018     Pointer Ptr = S.Stk.pop<Pointer>();
1019 
1020     // Deleteing nullptr is always fine.
1021     if (Ptr.isZero())
1022       return true;
1023 
1024     // Remove base casts.
1025     QualType InitialType = Ptr.getType();
1026     while (Ptr.isBaseClass())
1027       Ptr = Ptr.getBase();
1028 
1029     // For the non-array case, the types must match if the static type
1030     // does not have a virtual destructor.
1031     if (!DeleteIsArrayForm && Ptr.getType() != InitialType &&
1032         !hasVirtualDestructor(InitialType)) {
1033       S.FFDiag(S.Current->getSource(OpPC),
1034                diag::note_constexpr_delete_base_nonvirt_dtor)
1035           << InitialType << Ptr.getType();
1036       return false;
1037     }
1038 
1039     if (!Ptr.isRoot() || Ptr.isOnePastEnd() || Ptr.isArrayElement()) {
1040       const SourceInfo &Loc = S.Current->getSource(OpPC);
1041       S.FFDiag(Loc, diag::note_constexpr_delete_subobject)
1042           << Ptr.toDiagnosticString(S.getASTContext()) << Ptr.isOnePastEnd();
1043       return false;
1044     }
1045 
1046     Source = Ptr.getDeclDesc()->asExpr();
1047     BlockToDelete = Ptr.block();
1048 
1049     if (!CheckDeleteSource(S, OpPC, Source, Ptr))
1050       return false;
1051 
1052     // For a class type with a virtual destructor, the selected operator delete
1053     // is the one looked up when building the destructor.
1054     QualType AllocType = Ptr.getType();
1055     if (!DeleteIsArrayForm && !IsGlobalDelete) {
1056       auto getVirtualOperatorDelete = [](QualType T) -> const FunctionDecl * {
1057         if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl())
1058           if (const CXXDestructorDecl *DD = RD->getDestructor())
1059             return DD->isVirtual() ? DD->getOperatorDelete() : nullptr;
1060         return nullptr;
1061       };
1062 
1063       if (const FunctionDecl *VirtualDelete =
1064               getVirtualOperatorDelete(AllocType);
1065           VirtualDelete &&
1066           !VirtualDelete->isReplaceableGlobalAllocationFunction()) {
1067         S.FFDiag(S.Current->getSource(OpPC),
1068                  diag::note_constexpr_new_non_replaceable)
1069             << isa<CXXMethodDecl>(VirtualDelete) << VirtualDelete;
1070         return false;
1071       }
1072     }
1073   }
1074   assert(Source);
1075   assert(BlockToDelete);
1076 
1077   // Invoke destructors before deallocating the memory.
1078   if (!RunDestructors(S, OpPC, BlockToDelete))
1079     return false;
1080 
1081   DynamicAllocator &Allocator = S.getAllocator();
1082   const Descriptor *BlockDesc = BlockToDelete->getDescriptor();
1083   std::optional<DynamicAllocator::Form> AllocForm =
1084       Allocator.getAllocationForm(Source);
1085 
1086   if (!Allocator.deallocate(Source, BlockToDelete, S)) {
1087     // Nothing has been deallocated, this must be a double-delete.
1088     const SourceInfo &Loc = S.Current->getSource(OpPC);
1089     S.FFDiag(Loc, diag::note_constexpr_double_delete);
1090     return false;
1091   }
1092 
1093   assert(AllocForm);
1094   DynamicAllocator::Form DeleteForm = DeleteIsArrayForm
1095                                           ? DynamicAllocator::Form::Array
1096                                           : DynamicAllocator::Form::NonArray;
1097   return CheckNewDeleteForms(S, OpPC, *AllocForm, DeleteForm, BlockDesc,
1098                              Source);
1099 }
1100 
1101 void diagnoseEnumValue(InterpState &S, CodePtr OpPC, const EnumDecl *ED,
1102                        const APSInt &Value) {
1103   llvm::APInt Min;
1104   llvm::APInt Max;
1105 
1106   if (S.EvaluatingDecl && !S.EvaluatingDecl->isConstexpr())
1107     return;
1108 
1109   ED->getValueRange(Max, Min);
1110   --Max;
1111 
1112   if (ED->getNumNegativeBits() &&
1113       (Max.slt(Value.getSExtValue()) || Min.sgt(Value.getSExtValue()))) {
1114     const SourceLocation &Loc = S.Current->getLocation(OpPC);
1115     S.CCEDiag(Loc, diag::note_constexpr_unscoped_enum_out_of_range)
1116         << llvm::toString(Value, 10) << Min.getSExtValue() << Max.getSExtValue()
1117         << ED;
1118   } else if (!ED->getNumNegativeBits() && Max.ult(Value.getZExtValue())) {
1119     const SourceLocation &Loc = S.Current->getLocation(OpPC);
1120     S.CCEDiag(Loc, diag::note_constexpr_unscoped_enum_out_of_range)
1121         << llvm::toString(Value, 10) << Min.getZExtValue() << Max.getZExtValue()
1122         << ED;
1123   }
1124 }
1125 
1126 bool CheckLiteralType(InterpState &S, CodePtr OpPC, const Type *T) {
1127   assert(T);
1128   assert(!S.getLangOpts().CPlusPlus23);
1129 
1130   // C++1y: A constant initializer for an object o [...] may also invoke
1131   // constexpr constructors for o and its subobjects even if those objects
1132   // are of non-literal class types.
1133   //
1134   // C++11 missed this detail for aggregates, so classes like this:
1135   //   struct foo_t { union { int i; volatile int j; } u; };
1136   // are not (obviously) initializable like so:
1137   //   __attribute__((__require_constant_initialization__))
1138   //   static const foo_t x = {{0}};
1139   // because "i" is a subobject with non-literal initialization (due to the
1140   // volatile member of the union). See:
1141   //   http://www.open-std.org/jtc1/sc22/wg21/docs/cwg_active.html#1677
1142   // Therefore, we use the C++1y behavior.
1143 
1144   if (S.Current->getFunction() && S.Current->getFunction()->isConstructor() &&
1145       S.Current->getThis().getDeclDesc()->asDecl() == S.EvaluatingDecl) {
1146     return true;
1147   }
1148 
1149   const Expr *E = S.Current->getExpr(OpPC);
1150   if (S.getLangOpts().CPlusPlus11)
1151     S.FFDiag(E, diag::note_constexpr_nonliteral) << E->getType();
1152   else
1153     S.FFDiag(E, diag::note_invalid_subexpr_in_const_expr);
1154   return false;
1155 }
1156 
1157 static bool checkConstructor(InterpState &S, CodePtr OpPC, const Function *Func,
1158                              const Pointer &ThisPtr) {
1159   assert(Func->isConstructor());
1160 
1161   const Descriptor *D = ThisPtr.getFieldDesc();
1162 
1163   // FIXME: I think this case is not 100% correct. E.g. a pointer into a
1164   // subobject of a composite array.
1165   if (!D->ElemRecord)
1166     return true;
1167 
1168   if (D->ElemRecord->getNumVirtualBases() == 0)
1169     return true;
1170 
1171   S.FFDiag(S.Current->getLocation(OpPC), diag::note_constexpr_virtual_base)
1172       << Func->getParentDecl();
1173   return false;
1174 }
1175 
1176 bool CallVar(InterpState &S, CodePtr OpPC, const Function *Func,
1177              uint32_t VarArgSize) {
1178   if (Func->hasThisPointer()) {
1179     size_t ArgSize = Func->getArgSize() + VarArgSize;
1180     size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0);
1181     const Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset);
1182 
1183     // If the current function is a lambda static invoker and
1184     // the function we're about to call is a lambda call operator,
1185     // skip the CheckInvoke, since the ThisPtr is a null pointer
1186     // anyway.
1187     if (!(S.Current->getFunction() &&
1188           S.Current->getFunction()->isLambdaStaticInvoker() &&
1189           Func->isLambdaCallOperator())) {
1190       if (!CheckInvoke(S, OpPC, ThisPtr))
1191         return false;
1192     }
1193 
1194     if (S.checkingPotentialConstantExpression())
1195       return false;
1196   }
1197 
1198   if (!CheckCallable(S, OpPC, Func))
1199     return false;
1200 
1201   if (!CheckCallDepth(S, OpPC))
1202     return false;
1203 
1204   auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC, VarArgSize);
1205   InterpFrame *FrameBefore = S.Current;
1206   S.Current = NewFrame.get();
1207 
1208   APValue CallResult;
1209   // Note that we cannot assert(CallResult.hasValue()) here since
1210   // Ret() above only sets the APValue if the curent frame doesn't
1211   // have a caller set.
1212   if (Interpret(S, CallResult)) {
1213     NewFrame.release(); // Frame was delete'd already.
1214     assert(S.Current == FrameBefore);
1215     return true;
1216   }
1217 
1218   // Interpreting the function failed somehow. Reset to
1219   // previous state.
1220   S.Current = FrameBefore;
1221   return false;
1222 }
1223 
1224 bool Call(InterpState &S, CodePtr OpPC, const Function *Func,
1225           uint32_t VarArgSize) {
1226   assert(Func);
1227   auto cleanup = [&]() -> bool {
1228     cleanupAfterFunctionCall(S, OpPC, Func);
1229     return false;
1230   };
1231 
1232   if (Func->hasThisPointer()) {
1233     size_t ArgSize = Func->getArgSize() + VarArgSize;
1234     size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0);
1235 
1236     const Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset);
1237 
1238     // If the current function is a lambda static invoker and
1239     // the function we're about to call is a lambda call operator,
1240     // skip the CheckInvoke, since the ThisPtr is a null pointer
1241     // anyway.
1242     if (S.Current->getFunction() &&
1243         S.Current->getFunction()->isLambdaStaticInvoker() &&
1244         Func->isLambdaCallOperator()) {
1245       assert(ThisPtr.isZero());
1246     } else {
1247       if (!CheckInvoke(S, OpPC, ThisPtr))
1248         return cleanup();
1249     }
1250 
1251     if (Func->isConstructor() && !checkConstructor(S, OpPC, Func, ThisPtr))
1252       return false;
1253   }
1254 
1255   if (!CheckCallable(S, OpPC, Func))
1256     return cleanup();
1257 
1258   // FIXME: The isConstructor() check here is not always right. The current
1259   // constant evaluator is somewhat inconsistent in when it allows a function
1260   // call when checking for a constant expression.
1261   if (Func->hasThisPointer() && S.checkingPotentialConstantExpression() &&
1262       !Func->isConstructor())
1263     return cleanup();
1264 
1265   if (!CheckCallDepth(S, OpPC))
1266     return cleanup();
1267 
1268   auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC, VarArgSize);
1269   InterpFrame *FrameBefore = S.Current;
1270   S.Current = NewFrame.get();
1271 
1272   InterpStateCCOverride CCOverride(S, Func->getDecl()->isImmediateFunction());
1273   APValue CallResult;
1274   // Note that we cannot assert(CallResult.hasValue()) here since
1275   // Ret() above only sets the APValue if the curent frame doesn't
1276   // have a caller set.
1277   if (Interpret(S, CallResult)) {
1278     NewFrame.release(); // Frame was delete'd already.
1279     assert(S.Current == FrameBefore);
1280     return true;
1281   }
1282 
1283   // Interpreting the function failed somehow. Reset to
1284   // previous state.
1285   S.Current = FrameBefore;
1286   return false;
1287 }
1288 
1289 bool CallVirt(InterpState &S, CodePtr OpPC, const Function *Func,
1290               uint32_t VarArgSize) {
1291   assert(Func->hasThisPointer());
1292   assert(Func->isVirtual());
1293   size_t ArgSize = Func->getArgSize() + VarArgSize;
1294   size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0);
1295   Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset);
1296 
1297   const CXXRecordDecl *DynamicDecl = nullptr;
1298   {
1299     Pointer TypePtr = ThisPtr;
1300     while (TypePtr.isBaseClass())
1301       TypePtr = TypePtr.getBase();
1302 
1303     QualType DynamicType = TypePtr.getType();
1304     if (DynamicType->isPointerType() || DynamicType->isReferenceType())
1305       DynamicDecl = DynamicType->getPointeeCXXRecordDecl();
1306     else
1307       DynamicDecl = DynamicType->getAsCXXRecordDecl();
1308   }
1309   assert(DynamicDecl);
1310 
1311   const auto *StaticDecl = cast<CXXRecordDecl>(Func->getParentDecl());
1312   const auto *InitialFunction = cast<CXXMethodDecl>(Func->getDecl());
1313   const CXXMethodDecl *Overrider = S.getContext().getOverridingFunction(
1314       DynamicDecl, StaticDecl, InitialFunction);
1315 
1316   if (Overrider != InitialFunction) {
1317     // DR1872: An instantiated virtual constexpr function can't be called in a
1318     // constant expression (prior to C++20). We can still constant-fold such a
1319     // call.
1320     if (!S.getLangOpts().CPlusPlus20 && Overrider->isVirtual()) {
1321       const Expr *E = S.Current->getExpr(OpPC);
1322       S.CCEDiag(E, diag::note_constexpr_virtual_call) << E->getSourceRange();
1323     }
1324 
1325     Func = S.getContext().getOrCreateFunction(Overrider);
1326 
1327     const CXXRecordDecl *ThisFieldDecl =
1328         ThisPtr.getFieldDesc()->getType()->getAsCXXRecordDecl();
1329     if (Func->getParentDecl()->isDerivedFrom(ThisFieldDecl)) {
1330       // If the function we call is further DOWN the hierarchy than the
1331       // FieldDesc of our pointer, just go up the hierarchy of this field
1332       // the furthest we can go.
1333       while (ThisPtr.isBaseClass())
1334         ThisPtr = ThisPtr.getBase();
1335     }
1336   }
1337 
1338   if (!Call(S, OpPC, Func, VarArgSize))
1339     return false;
1340 
1341   // Covariant return types. The return type of Overrider is a pointer
1342   // or reference to a class type.
1343   if (Overrider != InitialFunction &&
1344       Overrider->getReturnType()->isPointerOrReferenceType() &&
1345       InitialFunction->getReturnType()->isPointerOrReferenceType()) {
1346     QualType OverriderPointeeType =
1347         Overrider->getReturnType()->getPointeeType();
1348     QualType InitialPointeeType =
1349         InitialFunction->getReturnType()->getPointeeType();
1350     // We've called Overrider above, but calling code expects us to return what
1351     // InitialFunction returned. According to the rules for covariant return
1352     // types, what InitialFunction returns needs to be a base class of what
1353     // Overrider returns. So, we need to do an upcast here.
1354     unsigned Offset = S.getContext().collectBaseOffset(
1355         InitialPointeeType->getAsRecordDecl(),
1356         OverriderPointeeType->getAsRecordDecl());
1357     return GetPtrBasePop(S, OpPC, Offset);
1358   }
1359 
1360   return true;
1361 }
1362 
1363 bool CallBI(InterpState &S, CodePtr OpPC, const Function *Func,
1364             const CallExpr *CE, uint32_t BuiltinID) {
1365   if (S.checkingPotentialConstantExpression())
1366     return false;
1367   auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC);
1368 
1369   InterpFrame *FrameBefore = S.Current;
1370   S.Current = NewFrame.get();
1371 
1372   if (InterpretBuiltin(S, OpPC, Func, CE, BuiltinID)) {
1373     // Release ownership of NewFrame to prevent it from being deleted.
1374     NewFrame.release(); // Frame was deleted already.
1375     // Ensure that S.Current is correctly reset to the previous frame.
1376     assert(S.Current == FrameBefore);
1377     return true;
1378   }
1379 
1380   // Interpreting the function failed somehow. Reset to
1381   // previous state.
1382   S.Current = FrameBefore;
1383   return false;
1384 }
1385 
1386 bool CallPtr(InterpState &S, CodePtr OpPC, uint32_t ArgSize,
1387              const CallExpr *CE) {
1388   const FunctionPointer &FuncPtr = S.Stk.pop<FunctionPointer>();
1389 
1390   const Function *F = FuncPtr.getFunction();
1391   if (!F) {
1392     const auto *E = cast<CallExpr>(S.Current->getExpr(OpPC));
1393     S.FFDiag(E, diag::note_constexpr_null_callee)
1394         << const_cast<Expr *>(E->getCallee()) << E->getSourceRange();
1395     return false;
1396   }
1397 
1398   if (!FuncPtr.isValid() || !F->getDecl())
1399     return Invalid(S, OpPC);
1400 
1401   assert(F);
1402 
1403   // This happens when the call expression has been cast to
1404   // something else, but we don't support that.
1405   if (S.Ctx.classify(F->getDecl()->getReturnType()) !=
1406       S.Ctx.classify(CE->getType()))
1407     return false;
1408 
1409   // Check argument nullability state.
1410   if (F->hasNonNullAttr()) {
1411     if (!CheckNonNullArgs(S, OpPC, F, CE, ArgSize))
1412       return false;
1413   }
1414 
1415   assert(ArgSize >= F->getWrittenArgSize());
1416   uint32_t VarArgSize = ArgSize - F->getWrittenArgSize();
1417 
1418   // We need to do this explicitly here since we don't have the necessary
1419   // information to do it automatically.
1420   if (F->isThisPointerExplicit())
1421     VarArgSize -= align(primSize(PT_Ptr));
1422 
1423   if (F->isVirtual())
1424     return CallVirt(S, OpPC, F, VarArgSize);
1425 
1426   return Call(S, OpPC, F, VarArgSize);
1427 }
1428 
1429 bool CheckNewTypeMismatch(InterpState &S, CodePtr OpPC, const Expr *E,
1430                           std::optional<uint64_t> ArraySize) {
1431   const Pointer &Ptr = S.Stk.peek<Pointer>();
1432 
1433   if (!CheckStore(S, OpPC, Ptr))
1434     return false;
1435 
1436   if (!InvalidNewDeleteExpr(S, OpPC, E))
1437     return false;
1438 
1439   const auto *NewExpr = cast<CXXNewExpr>(E);
1440   QualType StorageType = Ptr.getType();
1441 
1442   if (isa_and_nonnull<CXXNewExpr>(Ptr.getFieldDesc()->asExpr()) &&
1443       StorageType->isPointerType()) {
1444     // FIXME: Are there other cases where this is a problem?
1445     StorageType = StorageType->getPointeeType();
1446   }
1447 
1448   const ASTContext &ASTCtx = S.getASTContext();
1449   QualType AllocType;
1450   if (ArraySize) {
1451     AllocType = ASTCtx.getConstantArrayType(
1452         NewExpr->getAllocatedType(),
1453         APInt(64, static_cast<uint64_t>(*ArraySize), false), nullptr,
1454         ArraySizeModifier::Normal, 0);
1455   } else {
1456     AllocType = NewExpr->getAllocatedType();
1457   }
1458 
1459   unsigned StorageSize = 1;
1460   unsigned AllocSize = 1;
1461   if (const auto *CAT = dyn_cast<ConstantArrayType>(AllocType))
1462     AllocSize = CAT->getZExtSize();
1463   if (const auto *CAT = dyn_cast<ConstantArrayType>(StorageType))
1464     StorageSize = CAT->getZExtSize();
1465 
1466   if (AllocSize > StorageSize ||
1467       !ASTCtx.hasSimilarType(ASTCtx.getBaseElementType(AllocType),
1468                              ASTCtx.getBaseElementType(StorageType))) {
1469     S.FFDiag(S.Current->getLocation(OpPC),
1470              diag::note_constexpr_placement_new_wrong_type)
1471         << StorageType << AllocType;
1472     return false;
1473   }
1474 
1475   // Can't activate fields in a union, unless the direct base is the union.
1476   if (Ptr.inUnion() && !Ptr.isActive() && !Ptr.getBase().getRecord()->isUnion())
1477     return CheckActive(S, OpPC, Ptr, AK_Construct);
1478 
1479   return true;
1480 }
1481 
1482 bool InvalidNewDeleteExpr(InterpState &S, CodePtr OpPC, const Expr *E) {
1483   assert(E);
1484 
1485   if (S.getLangOpts().CPlusPlus26)
1486     return true;
1487 
1488   const auto &Loc = S.Current->getSource(OpPC);
1489 
1490   if (const auto *NewExpr = dyn_cast<CXXNewExpr>(E)) {
1491     const FunctionDecl *OperatorNew = NewExpr->getOperatorNew();
1492 
1493     if (!S.getLangOpts().CPlusPlus26 && NewExpr->getNumPlacementArgs() > 0) {
1494       // This is allowed pre-C++26, but only an std function.
1495       if (S.Current->isStdFunction())
1496         return true;
1497       S.FFDiag(Loc, diag::note_constexpr_new_placement)
1498           << /*C++26 feature*/ 1 << E->getSourceRange();
1499     } else if (NewExpr->getNumPlacementArgs() == 1 &&
1500                !OperatorNew->isReservedGlobalPlacementOperator()) {
1501       S.FFDiag(Loc, diag::note_constexpr_new_placement)
1502           << /*Unsupported*/ 0 << E->getSourceRange();
1503     } else if (!OperatorNew->isReplaceableGlobalAllocationFunction()) {
1504       S.FFDiag(Loc, diag::note_constexpr_new_non_replaceable)
1505           << isa<CXXMethodDecl>(OperatorNew) << OperatorNew;
1506     }
1507   } else {
1508     const auto *DeleteExpr = cast<CXXDeleteExpr>(E);
1509     const FunctionDecl *OperatorDelete = DeleteExpr->getOperatorDelete();
1510     if (!OperatorDelete->isReplaceableGlobalAllocationFunction()) {
1511       S.FFDiag(Loc, diag::note_constexpr_new_non_replaceable)
1512           << isa<CXXMethodDecl>(OperatorDelete) << OperatorDelete;
1513     }
1514   }
1515 
1516   return false;
1517 }
1518 
1519 bool handleFixedPointOverflow(InterpState &S, CodePtr OpPC,
1520                               const FixedPoint &FP) {
1521   const Expr *E = S.Current->getExpr(OpPC);
1522   if (S.checkingForUndefinedBehavior()) {
1523     S.getASTContext().getDiagnostics().Report(
1524         E->getExprLoc(), diag::warn_fixedpoint_constant_overflow)
1525         << FP.toDiagnosticString(S.getASTContext()) << E->getType();
1526   }
1527   S.CCEDiag(E, diag::note_constexpr_overflow)
1528       << FP.toDiagnosticString(S.getASTContext()) << E->getType();
1529   return S.noteUndefinedBehavior();
1530 }
1531 
1532 bool InvalidShuffleVectorIndex(InterpState &S, CodePtr OpPC, uint32_t Index) {
1533   const SourceInfo &Loc = S.Current->getSource(OpPC);
1534   S.FFDiag(Loc,
1535            diag::err_shufflevector_minus_one_is_undefined_behavior_constexpr)
1536       << Index;
1537   return false;
1538 }
1539 
1540 bool CheckPointerToIntegralCast(InterpState &S, CodePtr OpPC,
1541                                 const Pointer &Ptr, unsigned BitWidth) {
1542   if (Ptr.isDummy())
1543     return false;
1544 
1545   const SourceInfo &E = S.Current->getSource(OpPC);
1546   S.CCEDiag(E, diag::note_constexpr_invalid_cast)
1547       << 2 << S.getLangOpts().CPlusPlus << S.Current->getRange(OpPC);
1548 
1549   if (Ptr.isBlockPointer() && !Ptr.isZero()) {
1550     // Only allow based lvalue casts if they are lossless.
1551     if (S.getASTContext().getTargetInfo().getPointerWidth(LangAS::Default) !=
1552         BitWidth)
1553       return Invalid(S, OpPC);
1554   }
1555   return true;
1556 }
1557 
1558 bool CastPointerIntegralAP(InterpState &S, CodePtr OpPC, uint32_t BitWidth) {
1559   const Pointer &Ptr = S.Stk.pop<Pointer>();
1560 
1561   if (!CheckPointerToIntegralCast(S, OpPC, Ptr, BitWidth))
1562     return false;
1563 
1564   S.Stk.push<IntegralAP<false>>(
1565       IntegralAP<false>::from(Ptr.getIntegerRepresentation(), BitWidth));
1566   return true;
1567 }
1568 
1569 bool CastPointerIntegralAPS(InterpState &S, CodePtr OpPC, uint32_t BitWidth) {
1570   const Pointer &Ptr = S.Stk.pop<Pointer>();
1571 
1572   if (!CheckPointerToIntegralCast(S, OpPC, Ptr, BitWidth))
1573     return false;
1574 
1575   S.Stk.push<IntegralAP<true>>(
1576       IntegralAP<true>::from(Ptr.getIntegerRepresentation(), BitWidth));
1577   return true;
1578 }
1579 
1580 bool CheckBitCast(InterpState &S, CodePtr OpPC, bool HasIndeterminateBits,
1581                   bool TargetIsUCharOrByte) {
1582   // This is always fine.
1583   if (!HasIndeterminateBits)
1584     return true;
1585 
1586   // Indeterminate bits can only be bitcast to unsigned char or std::byte.
1587   if (TargetIsUCharOrByte)
1588     return true;
1589 
1590   const Expr *E = S.Current->getExpr(OpPC);
1591   QualType ExprType = E->getType();
1592   S.FFDiag(E, diag::note_constexpr_bit_cast_indet_dest)
1593       << ExprType << S.getLangOpts().CharIsSigned << E->getSourceRange();
1594   return false;
1595 }
1596 
1597 // https://github.com/llvm/llvm-project/issues/102513
1598 #if defined(_MSC_VER) && !defined(__clang__) && !defined(NDEBUG)
1599 #pragma optimize("", off)
1600 #endif
1601 bool Interpret(InterpState &S, APValue &Result) {
1602   // The current stack frame when we started Interpret().
1603   // This is being used by the ops to determine wheter
1604   // to return from this function and thus terminate
1605   // interpretation.
1606   const InterpFrame *StartFrame = S.Current;
1607   assert(!S.Current->isRoot());
1608   CodePtr PC = S.Current->getPC();
1609 
1610   // Empty program.
1611   if (!PC)
1612     return true;
1613 
1614   for (;;) {
1615     auto Op = PC.read<Opcode>();
1616     CodePtr OpPC = PC;
1617 
1618     switch (Op) {
1619 #define GET_INTERP
1620 #include "Opcodes.inc"
1621 #undef GET_INTERP
1622     }
1623   }
1624 }
1625 // https://github.com/llvm/llvm-project/issues/102513
1626 #if defined(_MSC_VER) && !defined(__clang__) && !defined(NDEBUG)
1627 #pragma optimize("", on)
1628 #endif
1629 
1630 } // namespace interp
1631 } // namespace clang
1632