xref: /llvm-project/clang/lib/AST/ByteCode/Interp.cpp (revision f93258e4aca518cba3d48db59ed6143ca19ca99b)
1 //===------- Interp.cpp - Interpreter for the constexpr VM ------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
9 #include "Interp.h"
10 #include "Function.h"
11 #include "InterpFrame.h"
12 #include "InterpShared.h"
13 #include "InterpStack.h"
14 #include "Opcode.h"
15 #include "PrimType.h"
16 #include "Program.h"
17 #include "State.h"
18 #include "clang/AST/ASTContext.h"
19 #include "clang/AST/ASTDiagnostic.h"
20 #include "clang/AST/CXXInheritance.h"
21 #include "clang/AST/DeclObjC.h"
22 #include "clang/AST/Expr.h"
23 #include "clang/AST/ExprCXX.h"
24 #include "clang/Basic/DiagnosticSema.h"
25 #include "clang/Basic/TargetInfo.h"
26 #include "llvm/ADT/APSInt.h"
27 #include "llvm/ADT/StringExtras.h"
28 #include <limits>
29 #include <vector>
30 
31 using namespace clang;
32 using namespace clang::interp;
33 
34 static bool RetValue(InterpState &S, CodePtr &Pt, APValue &Result) {
35   llvm::report_fatal_error("Interpreter cannot return values");
36 }
37 
38 //===----------------------------------------------------------------------===//
39 // Jmp, Jt, Jf
40 //===----------------------------------------------------------------------===//
41 
42 static bool Jmp(InterpState &S, CodePtr &PC, int32_t Offset) {
43   PC += Offset;
44   return true;
45 }
46 
47 static bool Jt(InterpState &S, CodePtr &PC, int32_t Offset) {
48   if (S.Stk.pop<bool>()) {
49     PC += Offset;
50   }
51   return true;
52 }
53 
54 static bool Jf(InterpState &S, CodePtr &PC, int32_t Offset) {
55   if (!S.Stk.pop<bool>()) {
56     PC += Offset;
57   }
58   return true;
59 }
60 
61 static void diagnoseMissingInitializer(InterpState &S, CodePtr OpPC,
62                                        const ValueDecl *VD) {
63   const SourceInfo &E = S.Current->getSource(OpPC);
64   S.FFDiag(E, diag::note_constexpr_var_init_unknown, 1) << VD;
65   S.Note(VD->getLocation(), diag::note_declared_at) << VD->getSourceRange();
66 }
67 
68 static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC,
69                                      const ValueDecl *VD);
70 static bool diagnoseUnknownDecl(InterpState &S, CodePtr OpPC,
71                                 const ValueDecl *D) {
72   const SourceInfo &E = S.Current->getSource(OpPC);
73 
74   if (isa<ParmVarDecl>(D)) {
75     if (S.getLangOpts().CPlusPlus11) {
76       S.FFDiag(E, diag::note_constexpr_function_param_value_unknown) << D;
77       S.Note(D->getLocation(), diag::note_declared_at) << D->getSourceRange();
78     } else {
79       S.FFDiag(E);
80     }
81     return false;
82   }
83 
84   if (!D->getType().isConstQualified())
85     diagnoseNonConstVariable(S, OpPC, D);
86   else if (const auto *VD = dyn_cast<VarDecl>(D);
87            VD && !VD->getAnyInitializer())
88     diagnoseMissingInitializer(S, OpPC, VD);
89 
90   return false;
91 }
92 
93 static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC,
94                                      const ValueDecl *VD) {
95   const SourceInfo &Loc = S.Current->getSource(OpPC);
96   if (!S.getLangOpts().CPlusPlus) {
97     S.FFDiag(Loc);
98     return;
99   }
100 
101   if (const auto *VarD = dyn_cast<VarDecl>(VD);
102       VarD && VarD->getType().isConstQualified() &&
103       !VarD->getAnyInitializer()) {
104     diagnoseMissingInitializer(S, OpPC, VD);
105     return;
106   }
107 
108   // Rather random, but this is to match the diagnostic output of the current
109   // interpreter.
110   if (isa<ObjCIvarDecl>(VD))
111     return;
112 
113   if (VD->getType()->isIntegralOrEnumerationType()) {
114     S.FFDiag(Loc, diag::note_constexpr_ltor_non_const_int, 1) << VD;
115     S.Note(VD->getLocation(), diag::note_declared_at);
116     return;
117   }
118 
119   S.FFDiag(Loc,
120            S.getLangOpts().CPlusPlus11 ? diag::note_constexpr_ltor_non_constexpr
121                                        : diag::note_constexpr_ltor_non_integral,
122            1)
123       << VD << VD->getType();
124   S.Note(VD->getLocation(), diag::note_declared_at);
125 }
126 
127 static bool CheckActive(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
128                         AccessKinds AK) {
129   if (Ptr.isActive())
130     return true;
131 
132   assert(Ptr.inUnion());
133   assert(Ptr.isField() && Ptr.getField());
134 
135   Pointer U = Ptr.getBase();
136   Pointer C = Ptr;
137   while (!U.isRoot() && U.inUnion() && !U.isActive()) {
138     if (U.getField())
139       C = U;
140     U = U.getBase();
141   }
142   assert(C.isField());
143 
144   // Get the inactive field descriptor.
145   const FieldDecl *InactiveField = C.getField();
146   assert(InactiveField);
147 
148   // Consider:
149   // union U {
150   //   struct {
151   //     int x;
152   //     int y;
153   //   } a;
154   // }
155   //
156   // When activating x, we will also activate a. If we now try to read
157   // from y, we will get to CheckActive, because y is not active. In that
158   // case, our U will be a (not a union). We return here and let later code
159   // handle this.
160   if (!U.getFieldDesc()->isUnion())
161     return true;
162 
163   // Find the active field of the union.
164   const Record *R = U.getRecord();
165   assert(R && R->isUnion() && "Not a union");
166 
167   const FieldDecl *ActiveField = nullptr;
168   for (const Record::Field &F : R->fields()) {
169     const Pointer &Field = U.atField(F.Offset);
170     if (Field.isActive()) {
171       ActiveField = Field.getField();
172       break;
173     }
174   }
175 
176   const SourceInfo &Loc = S.Current->getSource(OpPC);
177   S.FFDiag(Loc, diag::note_constexpr_access_inactive_union_member)
178       << AK << InactiveField << !ActiveField << ActiveField;
179   return false;
180 }
181 
182 static bool CheckTemporary(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
183                            AccessKinds AK) {
184   if (auto ID = Ptr.getDeclID()) {
185     if (!Ptr.isStaticTemporary())
186       return true;
187 
188     const auto *MTE = dyn_cast_if_present<MaterializeTemporaryExpr>(
189         Ptr.getDeclDesc()->asExpr());
190     if (!MTE)
191       return true;
192 
193     // FIXME(perf): Since we do this check on every Load from a static
194     // temporary, it might make sense to cache the value of the
195     // isUsableInConstantExpressions call.
196     if (!MTE->isUsableInConstantExpressions(S.getASTContext()) &&
197         Ptr.block()->getEvalID() != S.Ctx.getEvalID()) {
198       const SourceInfo &E = S.Current->getSource(OpPC);
199       S.FFDiag(E, diag::note_constexpr_access_static_temporary, 1) << AK;
200       S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here);
201       return false;
202     }
203   }
204   return true;
205 }
206 
207 static bool CheckGlobal(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
208   if (auto ID = Ptr.getDeclID()) {
209     if (!Ptr.isStatic())
210       return true;
211 
212     if (S.P.getCurrentDecl() == ID)
213       return true;
214 
215     S.FFDiag(S.Current->getLocation(OpPC), diag::note_constexpr_modify_global);
216     return false;
217   }
218   return true;
219 }
220 
221 namespace clang {
222 namespace interp {
223 static void popArg(InterpState &S, const Expr *Arg) {
224   PrimType Ty = S.getContext().classify(Arg).value_or(PT_Ptr);
225   TYPE_SWITCH(Ty, S.Stk.discard<T>());
226 }
227 
228 void cleanupAfterFunctionCall(InterpState &S, CodePtr OpPC,
229                               const Function *Func) {
230   assert(S.Current);
231   assert(Func);
232 
233   if (Func->isUnevaluatedBuiltin())
234     return;
235 
236   // Some builtin functions require us to only look at the call site, since
237   // the classified parameter types do not match.
238   if (unsigned BID = Func->getBuiltinID();
239       BID && S.getASTContext().BuiltinInfo.hasCustomTypechecking(BID)) {
240     const auto *CE =
241         cast<CallExpr>(S.Current->Caller->getExpr(S.Current->getRetPC()));
242     for (int32_t I = CE->getNumArgs() - 1; I >= 0; --I) {
243       const Expr *A = CE->getArg(I);
244       popArg(S, A);
245     }
246     return;
247   }
248 
249   if (S.Current->Caller && Func->isVariadic()) {
250     // CallExpr we're look for is at the return PC of the current function, i.e.
251     // in the caller.
252     // This code path should be executed very rarely.
253     unsigned NumVarArgs;
254     const Expr *const *Args = nullptr;
255     unsigned NumArgs = 0;
256     const Expr *CallSite = S.Current->Caller->getExpr(S.Current->getRetPC());
257     if (const auto *CE = dyn_cast<CallExpr>(CallSite)) {
258       Args = CE->getArgs();
259       NumArgs = CE->getNumArgs();
260     } else if (const auto *CE = dyn_cast<CXXConstructExpr>(CallSite)) {
261       Args = CE->getArgs();
262       NumArgs = CE->getNumArgs();
263     } else
264       assert(false && "Can't get arguments from that expression type");
265 
266     assert(NumArgs >= Func->getNumWrittenParams());
267     NumVarArgs = NumArgs - (Func->getNumWrittenParams() +
268                             isa<CXXOperatorCallExpr>(CallSite));
269     for (unsigned I = 0; I != NumVarArgs; ++I) {
270       const Expr *A = Args[NumArgs - 1 - I];
271       popArg(S, A);
272     }
273   }
274 
275   // And in any case, remove the fixed parameters (the non-variadic ones)
276   // at the end.
277   for (PrimType Ty : Func->args_reverse())
278     TYPE_SWITCH(Ty, S.Stk.discard<T>());
279 }
280 
281 bool CheckExtern(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
282   if (!Ptr.isExtern())
283     return true;
284 
285   if (Ptr.isInitialized() ||
286       (Ptr.getDeclDesc()->asVarDecl() == S.EvaluatingDecl))
287     return true;
288 
289   if (!S.checkingPotentialConstantExpression() && S.getLangOpts().CPlusPlus) {
290     const auto *VD = Ptr.getDeclDesc()->asValueDecl();
291     diagnoseNonConstVariable(S, OpPC, VD);
292   }
293   return false;
294 }
295 
296 bool CheckArray(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
297   if (!Ptr.isUnknownSizeArray())
298     return true;
299   const SourceInfo &E = S.Current->getSource(OpPC);
300   S.FFDiag(E, diag::note_constexpr_unsized_array_indexed);
301   return false;
302 }
303 
304 bool CheckLive(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
305                AccessKinds AK) {
306   if (Ptr.isZero()) {
307     const auto &Src = S.Current->getSource(OpPC);
308 
309     if (Ptr.isField())
310       S.FFDiag(Src, diag::note_constexpr_null_subobject) << CSK_Field;
311     else
312       S.FFDiag(Src, diag::note_constexpr_access_null) << AK;
313 
314     return false;
315   }
316 
317   if (!Ptr.isLive()) {
318     const auto &Src = S.Current->getSource(OpPC);
319 
320     if (Ptr.isDynamic()) {
321       S.FFDiag(Src, diag::note_constexpr_access_deleted_object) << AK;
322     } else {
323       bool IsTemp = Ptr.isTemporary();
324       S.FFDiag(Src, diag::note_constexpr_lifetime_ended, 1) << AK << !IsTemp;
325 
326       if (IsTemp)
327         S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here);
328       else
329         S.Note(Ptr.getDeclLoc(), diag::note_declared_at);
330     }
331 
332     return false;
333   }
334 
335   return true;
336 }
337 
338 bool CheckConstant(InterpState &S, CodePtr OpPC, const Descriptor *Desc) {
339   assert(Desc);
340 
341   const auto *D = Desc->asVarDecl();
342   if (!D || !D->hasGlobalStorage())
343     return true;
344 
345   if (D == S.EvaluatingDecl)
346     return true;
347 
348   if (D->isConstexpr())
349     return true;
350 
351   QualType T = D->getType();
352   bool IsConstant = T.isConstant(S.getASTContext());
353   if (T->isIntegralOrEnumerationType()) {
354     if (!IsConstant) {
355       diagnoseNonConstVariable(S, OpPC, D);
356       return false;
357     }
358     return true;
359   }
360 
361   if (IsConstant) {
362     if (S.getLangOpts().CPlusPlus) {
363       S.CCEDiag(S.Current->getLocation(OpPC),
364                 S.getLangOpts().CPlusPlus11
365                     ? diag::note_constexpr_ltor_non_constexpr
366                     : diag::note_constexpr_ltor_non_integral,
367                 1)
368           << D << T;
369       S.Note(D->getLocation(), diag::note_declared_at);
370     } else {
371       S.CCEDiag(S.Current->getLocation(OpPC));
372     }
373     return true;
374   }
375 
376   if (T->isPointerOrReferenceType()) {
377     if (!T->getPointeeType().isConstant(S.getASTContext()) ||
378         !S.getLangOpts().CPlusPlus11) {
379       diagnoseNonConstVariable(S, OpPC, D);
380       return false;
381     }
382     return true;
383   }
384 
385   diagnoseNonConstVariable(S, OpPC, D);
386   return false;
387 }
388 
389 static bool CheckConstant(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
390   if (!Ptr.isBlockPointer())
391     return true;
392   return CheckConstant(S, OpPC, Ptr.getDeclDesc());
393 }
394 
395 bool CheckNull(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
396                CheckSubobjectKind CSK) {
397   if (!Ptr.isZero())
398     return true;
399   const SourceInfo &Loc = S.Current->getSource(OpPC);
400   S.FFDiag(Loc, diag::note_constexpr_null_subobject)
401       << CSK << S.Current->getRange(OpPC);
402 
403   return false;
404 }
405 
406 bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
407                 AccessKinds AK) {
408   if (!Ptr.isOnePastEnd())
409     return true;
410   const SourceInfo &Loc = S.Current->getSource(OpPC);
411   S.FFDiag(Loc, diag::note_constexpr_access_past_end)
412       << AK << S.Current->getRange(OpPC);
413   return false;
414 }
415 
416 bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
417                 CheckSubobjectKind CSK) {
418   if (!Ptr.isElementPastEnd())
419     return true;
420   const SourceInfo &Loc = S.Current->getSource(OpPC);
421   S.FFDiag(Loc, diag::note_constexpr_past_end_subobject)
422       << CSK << S.Current->getRange(OpPC);
423   return false;
424 }
425 
426 bool CheckSubobject(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
427                     CheckSubobjectKind CSK) {
428   if (!Ptr.isOnePastEnd())
429     return true;
430 
431   const SourceInfo &Loc = S.Current->getSource(OpPC);
432   S.FFDiag(Loc, diag::note_constexpr_past_end_subobject)
433       << CSK << S.Current->getRange(OpPC);
434   return false;
435 }
436 
437 bool CheckDowncast(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
438                    uint32_t Offset) {
439   uint32_t MinOffset = Ptr.getDeclDesc()->getMetadataSize();
440   uint32_t PtrOffset = Ptr.getByteOffset();
441 
442   // We subtract Offset from PtrOffset. The result must be at least
443   // MinOffset.
444   if (Offset < PtrOffset && (PtrOffset - Offset) >= MinOffset)
445     return true;
446 
447   const auto *E = cast<CastExpr>(S.Current->getExpr(OpPC));
448   QualType TargetQT = E->getType()->getPointeeType();
449   QualType MostDerivedQT = Ptr.getDeclPtr().getType();
450 
451   S.CCEDiag(E, diag::note_constexpr_invalid_downcast)
452       << MostDerivedQT << TargetQT;
453 
454   return false;
455 }
456 
457 bool CheckConst(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
458   assert(Ptr.isLive() && "Pointer is not live");
459   if (!Ptr.isConst() || Ptr.isMutable())
460     return true;
461 
462   // The This pointer is writable in constructors and destructors,
463   // even if isConst() returns true.
464   // TODO(perf): We could be hitting this code path quite a lot in complex
465   // constructors. Is there a better way to do this?
466   if (S.Current->getFunction()) {
467     for (const InterpFrame *Frame = S.Current; Frame; Frame = Frame->Caller) {
468       if (const Function *Func = Frame->getFunction();
469           Func && (Func->isConstructor() || Func->isDestructor()) &&
470           Ptr.block() == Frame->getThis().block()) {
471         return true;
472       }
473     }
474   }
475 
476   if (!Ptr.isBlockPointer())
477     return false;
478 
479   const QualType Ty = Ptr.getType();
480   const SourceInfo &Loc = S.Current->getSource(OpPC);
481   S.FFDiag(Loc, diag::note_constexpr_modify_const_type) << Ty;
482   return false;
483 }
484 
485 bool CheckMutable(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
486   assert(Ptr.isLive() && "Pointer is not live");
487   if (!Ptr.isMutable())
488     return true;
489 
490   // In C++14 onwards, it is permitted to read a mutable member whose
491   // lifetime began within the evaluation.
492   if (S.getLangOpts().CPlusPlus14 &&
493       Ptr.block()->getEvalID() == S.Ctx.getEvalID())
494     return true;
495 
496   const SourceInfo &Loc = S.Current->getSource(OpPC);
497   const FieldDecl *Field = Ptr.getField();
498   S.FFDiag(Loc, diag::note_constexpr_access_mutable, 1) << AK_Read << Field;
499   S.Note(Field->getLocation(), diag::note_declared_at);
500   return false;
501 }
502 
503 bool CheckVolatile(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
504                    AccessKinds AK) {
505   assert(Ptr.isLive());
506 
507   // FIXME: This check here might be kinda expensive. Maybe it would be better
508   // to have another field in InlineDescriptor for this?
509   if (!Ptr.isBlockPointer())
510     return true;
511 
512   QualType PtrType = Ptr.getType();
513   if (!PtrType.isVolatileQualified())
514     return true;
515 
516   const SourceInfo &Loc = S.Current->getSource(OpPC);
517   if (S.getLangOpts().CPlusPlus)
518     S.FFDiag(Loc, diag::note_constexpr_access_volatile_type) << AK << PtrType;
519   else
520     S.FFDiag(Loc);
521   return false;
522 }
523 
524 bool CheckInitialized(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
525                       AccessKinds AK) {
526   assert(Ptr.isLive());
527 
528   if (Ptr.isInitialized())
529     return true;
530 
531   if (const auto *VD = Ptr.getDeclDesc()->asVarDecl();
532       VD && VD->hasGlobalStorage()) {
533     const SourceInfo &Loc = S.Current->getSource(OpPC);
534     if (VD->getAnyInitializer()) {
535       S.FFDiag(Loc, diag::note_constexpr_var_init_non_constant, 1) << VD;
536       S.Note(VD->getLocation(), diag::note_declared_at);
537     } else {
538       diagnoseMissingInitializer(S, OpPC, VD);
539     }
540     return false;
541   }
542 
543   if (!S.checkingPotentialConstantExpression()) {
544     S.FFDiag(S.Current->getSource(OpPC), diag::note_constexpr_access_uninit)
545         << AK << /*uninitialized=*/true << S.Current->getRange(OpPC);
546   }
547   return false;
548 }
549 
550 bool CheckGlobalInitialized(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
551   if (Ptr.isInitialized())
552     return true;
553 
554   assert(S.getLangOpts().CPlusPlus);
555   const auto *VD = cast<VarDecl>(Ptr.getDeclDesc()->asValueDecl());
556   if ((!VD->hasConstantInitialization() &&
557        VD->mightBeUsableInConstantExpressions(S.getASTContext())) ||
558       (S.getLangOpts().OpenCL && !S.getLangOpts().CPlusPlus11 &&
559        !VD->hasICEInitializer(S.getASTContext()))) {
560     const SourceInfo &Loc = S.Current->getSource(OpPC);
561     S.FFDiag(Loc, diag::note_constexpr_var_init_non_constant, 1) << VD;
562     S.Note(VD->getLocation(), diag::note_declared_at);
563   }
564   return false;
565 }
566 
567 static bool CheckWeak(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
568   if (!Ptr.isWeak())
569     return true;
570 
571   const auto *VD = Ptr.getDeclDesc()->asVarDecl();
572   assert(VD);
573   S.FFDiag(S.Current->getLocation(OpPC), diag::note_constexpr_var_init_weak)
574       << VD;
575   S.Note(VD->getLocation(), diag::note_declared_at);
576 
577   return false;
578 }
579 
580 bool CheckLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
581                AccessKinds AK) {
582   if (!CheckLive(S, OpPC, Ptr, AK))
583     return false;
584   if (!CheckConstant(S, OpPC, Ptr))
585     return false;
586   if (!CheckDummy(S, OpPC, Ptr, AK))
587     return false;
588   if (!CheckExtern(S, OpPC, Ptr))
589     return false;
590   if (!CheckRange(S, OpPC, Ptr, AK))
591     return false;
592   if (!CheckActive(S, OpPC, Ptr, AK))
593     return false;
594   if (!CheckInitialized(S, OpPC, Ptr, AK))
595     return false;
596   if (!CheckTemporary(S, OpPC, Ptr, AK))
597     return false;
598   if (!CheckWeak(S, OpPC, Ptr))
599     return false;
600   if (!CheckMutable(S, OpPC, Ptr))
601     return false;
602   if (!CheckVolatile(S, OpPC, Ptr, AK))
603     return false;
604   return true;
605 }
606 
607 /// This is not used by any of the opcodes directly. It's used by
608 /// EvalEmitter to do the final lvalue-to-rvalue conversion.
609 bool CheckFinalLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
610   if (!CheckLive(S, OpPC, Ptr, AK_Read))
611     return false;
612   if (!CheckConstant(S, OpPC, Ptr))
613     return false;
614 
615   if (!CheckDummy(S, OpPC, Ptr, AK_Read))
616     return false;
617   if (!CheckExtern(S, OpPC, Ptr))
618     return false;
619   if (!CheckRange(S, OpPC, Ptr, AK_Read))
620     return false;
621   if (!CheckActive(S, OpPC, Ptr, AK_Read))
622     return false;
623   if (!CheckInitialized(S, OpPC, Ptr, AK_Read))
624     return false;
625   if (!CheckTemporary(S, OpPC, Ptr, AK_Read))
626     return false;
627   if (!CheckWeak(S, OpPC, Ptr))
628     return false;
629   if (!CheckMutable(S, OpPC, Ptr))
630     return false;
631   return true;
632 }
633 
634 bool CheckStore(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
635   if (!CheckLive(S, OpPC, Ptr, AK_Assign))
636     return false;
637   if (!CheckDummy(S, OpPC, Ptr, AK_Assign))
638     return false;
639   if (!CheckExtern(S, OpPC, Ptr))
640     return false;
641   if (!CheckRange(S, OpPC, Ptr, AK_Assign))
642     return false;
643   if (!CheckGlobal(S, OpPC, Ptr))
644     return false;
645   if (!CheckConst(S, OpPC, Ptr))
646     return false;
647   return true;
648 }
649 
650 bool CheckInvoke(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
651   if (!CheckLive(S, OpPC, Ptr, AK_MemberCall))
652     return false;
653   if (!Ptr.isDummy()) {
654     if (!CheckExtern(S, OpPC, Ptr))
655       return false;
656     if (!CheckRange(S, OpPC, Ptr, AK_MemberCall))
657       return false;
658   }
659   return true;
660 }
661 
662 bool CheckInit(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
663   if (!CheckLive(S, OpPC, Ptr, AK_Assign))
664     return false;
665   if (!CheckRange(S, OpPC, Ptr, AK_Assign))
666     return false;
667   return true;
668 }
669 
670 bool CheckCallable(InterpState &S, CodePtr OpPC, const Function *F) {
671 
672   if (F->isVirtual() && !S.getLangOpts().CPlusPlus20) {
673     const SourceLocation &Loc = S.Current->getLocation(OpPC);
674     S.CCEDiag(Loc, diag::note_constexpr_virtual_call);
675     return false;
676   }
677 
678   if (F->isConstexpr() && F->hasBody() &&
679       (F->getDecl()->isConstexpr() || F->getDecl()->hasAttr<MSConstexprAttr>()))
680     return true;
681 
682   // Implicitly constexpr.
683   if (F->isLambdaStaticInvoker())
684     return true;
685 
686   const SourceLocation &Loc = S.Current->getLocation(OpPC);
687   if (S.getLangOpts().CPlusPlus11) {
688     const FunctionDecl *DiagDecl = F->getDecl();
689 
690     // Invalid decls have been diagnosed before.
691     if (DiagDecl->isInvalidDecl())
692       return false;
693 
694     // If this function is not constexpr because it is an inherited
695     // non-constexpr constructor, diagnose that directly.
696     const auto *CD = dyn_cast<CXXConstructorDecl>(DiagDecl);
697     if (CD && CD->isInheritingConstructor()) {
698       const auto *Inherited = CD->getInheritedConstructor().getConstructor();
699       if (!Inherited->isConstexpr())
700         DiagDecl = CD = Inherited;
701     }
702 
703     // FIXME: If DiagDecl is an implicitly-declared special member function
704     // or an inheriting constructor, we should be much more explicit about why
705     // it's not constexpr.
706     if (CD && CD->isInheritingConstructor()) {
707       S.FFDiag(Loc, diag::note_constexpr_invalid_inhctor, 1)
708           << CD->getInheritedConstructor().getConstructor()->getParent();
709       S.Note(DiagDecl->getLocation(), diag::note_declared_at);
710     } else {
711       // Don't emit anything if the function isn't defined and we're checking
712       // for a constant expression. It might be defined at the point we're
713       // actually calling it.
714       bool IsExtern = DiagDecl->getStorageClass() == SC_Extern;
715       if (!DiagDecl->isDefined() && !IsExtern && DiagDecl->isConstexpr() &&
716           S.checkingPotentialConstantExpression())
717         return false;
718 
719       // If the declaration is defined, declared 'constexpr' _and_ has a body,
720       // the below diagnostic doesn't add anything useful.
721       if (DiagDecl->isDefined() && DiagDecl->isConstexpr() &&
722           DiagDecl->hasBody())
723         return false;
724 
725       S.FFDiag(Loc, diag::note_constexpr_invalid_function, 1)
726           << DiagDecl->isConstexpr() << (bool)CD << DiagDecl;
727 
728       if (DiagDecl->getDefinition())
729         S.Note(DiagDecl->getDefinition()->getLocation(),
730                diag::note_declared_at);
731       else
732         S.Note(DiagDecl->getLocation(), diag::note_declared_at);
733     }
734   } else {
735     S.FFDiag(Loc, diag::note_invalid_subexpr_in_const_expr);
736   }
737 
738   return false;
739 }
740 
741 bool CheckCallDepth(InterpState &S, CodePtr OpPC) {
742   if ((S.Current->getDepth() + 1) > S.getLangOpts().ConstexprCallDepth) {
743     S.FFDiag(S.Current->getSource(OpPC),
744              diag::note_constexpr_depth_limit_exceeded)
745         << S.getLangOpts().ConstexprCallDepth;
746     return false;
747   }
748 
749   return true;
750 }
751 
752 bool CheckThis(InterpState &S, CodePtr OpPC, const Pointer &This) {
753   if (!This.isZero())
754     return true;
755 
756   const SourceInfo &Loc = S.Current->getSource(OpPC);
757 
758   bool IsImplicit = false;
759   if (const auto *E = dyn_cast_if_present<CXXThisExpr>(Loc.asExpr()))
760     IsImplicit = E->isImplicit();
761 
762   if (S.getLangOpts().CPlusPlus11)
763     S.FFDiag(Loc, diag::note_constexpr_this) << IsImplicit;
764   else
765     S.FFDiag(Loc);
766 
767   return false;
768 }
769 
770 bool CheckPure(InterpState &S, CodePtr OpPC, const CXXMethodDecl *MD) {
771   if (!MD->isPureVirtual())
772     return true;
773   const SourceInfo &E = S.Current->getSource(OpPC);
774   S.FFDiag(E, diag::note_constexpr_pure_virtual_call, 1) << MD;
775   S.Note(MD->getLocation(), diag::note_declared_at);
776   return false;
777 }
778 
779 bool CheckFloatResult(InterpState &S, CodePtr OpPC, const Floating &Result,
780                       APFloat::opStatus Status, FPOptions FPO) {
781   // [expr.pre]p4:
782   //   If during the evaluation of an expression, the result is not
783   //   mathematically defined [...], the behavior is undefined.
784   // FIXME: C++ rules require us to not conform to IEEE 754 here.
785   if (Result.isNan()) {
786     const SourceInfo &E = S.Current->getSource(OpPC);
787     S.CCEDiag(E, diag::note_constexpr_float_arithmetic)
788         << /*NaN=*/true << S.Current->getRange(OpPC);
789     return S.noteUndefinedBehavior();
790   }
791 
792   // In a constant context, assume that any dynamic rounding mode or FP
793   // exception state matches the default floating-point environment.
794   if (S.inConstantContext())
795     return true;
796 
797   if ((Status & APFloat::opInexact) &&
798       FPO.getRoundingMode() == llvm::RoundingMode::Dynamic) {
799     // Inexact result means that it depends on rounding mode. If the requested
800     // mode is dynamic, the evaluation cannot be made in compile time.
801     const SourceInfo &E = S.Current->getSource(OpPC);
802     S.FFDiag(E, diag::note_constexpr_dynamic_rounding);
803     return false;
804   }
805 
806   if ((Status != APFloat::opOK) &&
807       (FPO.getRoundingMode() == llvm::RoundingMode::Dynamic ||
808        FPO.getExceptionMode() != LangOptions::FPE_Ignore ||
809        FPO.getAllowFEnvAccess())) {
810     const SourceInfo &E = S.Current->getSource(OpPC);
811     S.FFDiag(E, diag::note_constexpr_float_arithmetic_strict);
812     return false;
813   }
814 
815   if ((Status & APFloat::opStatus::opInvalidOp) &&
816       FPO.getExceptionMode() != LangOptions::FPE_Ignore) {
817     const SourceInfo &E = S.Current->getSource(OpPC);
818     // There is no usefully definable result.
819     S.FFDiag(E);
820     return false;
821   }
822 
823   return true;
824 }
825 
826 bool CheckDynamicMemoryAllocation(InterpState &S, CodePtr OpPC) {
827   if (S.getLangOpts().CPlusPlus20)
828     return true;
829 
830   const SourceInfo &E = S.Current->getSource(OpPC);
831   S.CCEDiag(E, diag::note_constexpr_new);
832   return true;
833 }
834 
835 bool CheckNewDeleteForms(InterpState &S, CodePtr OpPC,
836                          DynamicAllocator::Form AllocForm,
837                          DynamicAllocator::Form DeleteForm, const Descriptor *D,
838                          const Expr *NewExpr) {
839   if (AllocForm == DeleteForm)
840     return true;
841 
842   QualType TypeToDiagnose;
843   // We need to shuffle things around a bit here to get a better diagnostic,
844   // because the expression we allocated the block for was of type int*,
845   // but we want to get the array size right.
846   if (D->isArray()) {
847     QualType ElemQT = D->getType()->getPointeeType();
848     TypeToDiagnose = S.getASTContext().getConstantArrayType(
849         ElemQT, APInt(64, static_cast<uint64_t>(D->getNumElems()), false),
850         nullptr, ArraySizeModifier::Normal, 0);
851   } else
852     TypeToDiagnose = D->getType()->getPointeeType();
853 
854   const SourceInfo &E = S.Current->getSource(OpPC);
855   S.FFDiag(E, diag::note_constexpr_new_delete_mismatch)
856       << static_cast<int>(DeleteForm) << static_cast<int>(AllocForm)
857       << TypeToDiagnose;
858   S.Note(NewExpr->getExprLoc(), diag::note_constexpr_dynamic_alloc_here)
859       << NewExpr->getSourceRange();
860   return false;
861 }
862 
863 bool CheckDeleteSource(InterpState &S, CodePtr OpPC, const Expr *Source,
864                        const Pointer &Ptr) {
865   // The two sources we currently allow are new expressions and
866   // __builtin_operator_new calls.
867   if (isa_and_nonnull<CXXNewExpr>(Source))
868     return true;
869   if (const CallExpr *CE = dyn_cast_if_present<CallExpr>(Source);
870       CE && CE->getBuiltinCallee() == Builtin::BI__builtin_operator_new)
871     return true;
872 
873   // Whatever this is, we didn't heap allocate it.
874   const SourceInfo &Loc = S.Current->getSource(OpPC);
875   S.FFDiag(Loc, diag::note_constexpr_delete_not_heap_alloc)
876       << Ptr.toDiagnosticString(S.getASTContext());
877 
878   if (Ptr.isTemporary())
879     S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here);
880   else
881     S.Note(Ptr.getDeclLoc(), diag::note_declared_at);
882   return false;
883 }
884 
885 /// We aleady know the given DeclRefExpr is invalid for some reason,
886 /// now figure out why and print appropriate diagnostics.
887 bool CheckDeclRef(InterpState &S, CodePtr OpPC, const DeclRefExpr *DR) {
888   const ValueDecl *D = DR->getDecl();
889   return diagnoseUnknownDecl(S, OpPC, D);
890 }
891 
892 bool CheckDummy(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
893                 AccessKinds AK) {
894   if (!Ptr.isDummy())
895     return true;
896 
897   const Descriptor *Desc = Ptr.getDeclDesc();
898   const ValueDecl *D = Desc->asValueDecl();
899   if (!D)
900     return false;
901 
902   if (AK == AK_Read || AK == AK_Increment || AK == AK_Decrement)
903     return diagnoseUnknownDecl(S, OpPC, D);
904 
905   assert(AK == AK_Assign);
906   if (S.getLangOpts().CPlusPlus14) {
907     const SourceInfo &E = S.Current->getSource(OpPC);
908     S.FFDiag(E, diag::note_constexpr_modify_global);
909   }
910   return false;
911 }
912 
913 bool CheckNonNullArgs(InterpState &S, CodePtr OpPC, const Function *F,
914                       const CallExpr *CE, unsigned ArgSize) {
915   auto Args = llvm::ArrayRef(CE->getArgs(), CE->getNumArgs());
916   auto NonNullArgs = collectNonNullArgs(F->getDecl(), Args);
917   unsigned Offset = 0;
918   unsigned Index = 0;
919   for (const Expr *Arg : Args) {
920     if (NonNullArgs[Index] && Arg->getType()->isPointerType()) {
921       const Pointer &ArgPtr = S.Stk.peek<Pointer>(ArgSize - Offset);
922       if (ArgPtr.isZero()) {
923         const SourceLocation &Loc = S.Current->getLocation(OpPC);
924         S.CCEDiag(Loc, diag::note_non_null_attribute_failed);
925         return false;
926       }
927     }
928 
929     Offset += align(primSize(S.Ctx.classify(Arg).value_or(PT_Ptr)));
930     ++Index;
931   }
932   return true;
933 }
934 
935 // FIXME: This is similar to code we already have in Compiler.cpp.
936 // I think it makes sense to instead add the field and base destruction stuff
937 // to the destructor Function itself. Then destroying a record would really
938 // _just_ be calling its destructor. That would also help with the diagnostic
939 // difference when the destructor or a field/base fails.
940 static bool runRecordDestructor(InterpState &S, CodePtr OpPC,
941                                 const Pointer &BasePtr,
942                                 const Descriptor *Desc) {
943   assert(Desc->isRecord());
944   const Record *R = Desc->ElemRecord;
945   assert(R);
946 
947   if (Pointer::pointToSameBlock(BasePtr, S.Current->getThis())) {
948     const SourceInfo &Loc = S.Current->getSource(OpPC);
949     S.FFDiag(Loc, diag::note_constexpr_double_destroy);
950     return false;
951   }
952 
953   // Destructor of this record.
954   if (const CXXDestructorDecl *Dtor = R->getDestructor();
955       Dtor && !Dtor->isTrivial()) {
956     const Function *DtorFunc = S.getContext().getOrCreateFunction(Dtor);
957     if (!DtorFunc)
958       return false;
959 
960     S.Stk.push<Pointer>(BasePtr);
961     if (!Call(S, OpPC, DtorFunc, 0))
962       return false;
963   }
964   return true;
965 }
966 
967 static bool RunDestructors(InterpState &S, CodePtr OpPC, const Block *B) {
968   assert(B);
969   const Descriptor *Desc = B->getDescriptor();
970 
971   if (Desc->isPrimitive() || Desc->isPrimitiveArray())
972     return true;
973 
974   assert(Desc->isRecord() || Desc->isCompositeArray());
975 
976   if (Desc->isCompositeArray()) {
977     const Descriptor *ElemDesc = Desc->ElemDesc;
978     assert(ElemDesc->isRecord());
979 
980     Pointer RP(const_cast<Block *>(B));
981     for (unsigned I = 0; I != Desc->getNumElems(); ++I) {
982       if (!runRecordDestructor(S, OpPC, RP.atIndex(I).narrow(), ElemDesc))
983         return false;
984     }
985     return true;
986   }
987 
988   assert(Desc->isRecord());
989   return runRecordDestructor(S, OpPC, Pointer(const_cast<Block *>(B)), Desc);
990 }
991 
992 bool Free(InterpState &S, CodePtr OpPC, bool DeleteIsArrayForm,
993           bool IsGlobalDelete) {
994   if (!CheckDynamicMemoryAllocation(S, OpPC))
995     return false;
996 
997   const Expr *Source = nullptr;
998   const Block *BlockToDelete = nullptr;
999   {
1000     // Extra scope for this so the block doesn't have this pointer
1001     // pointing to it when we destroy it.
1002     Pointer Ptr = S.Stk.pop<Pointer>();
1003 
1004     // Deleteing nullptr is always fine.
1005     if (Ptr.isZero())
1006       return true;
1007 
1008     // Remove base casts.
1009     while (Ptr.isBaseClass())
1010       Ptr = Ptr.getBase();
1011 
1012     if (!Ptr.isRoot() || Ptr.isOnePastEnd() || Ptr.isArrayElement()) {
1013       const SourceInfo &Loc = S.Current->getSource(OpPC);
1014       S.FFDiag(Loc, diag::note_constexpr_delete_subobject)
1015           << Ptr.toDiagnosticString(S.getASTContext()) << Ptr.isOnePastEnd();
1016       return false;
1017     }
1018 
1019     Source = Ptr.getDeclDesc()->asExpr();
1020     BlockToDelete = Ptr.block();
1021 
1022     if (!CheckDeleteSource(S, OpPC, Source, Ptr))
1023       return false;
1024 
1025     // For a class type with a virtual destructor, the selected operator delete
1026     // is the one looked up when building the destructor.
1027     QualType AllocType = Ptr.getType();
1028     if (!DeleteIsArrayForm && !IsGlobalDelete) {
1029       auto getVirtualOperatorDelete = [](QualType T) -> const FunctionDecl * {
1030         if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl())
1031           if (const CXXDestructorDecl *DD = RD->getDestructor())
1032             return DD->isVirtual() ? DD->getOperatorDelete() : nullptr;
1033         return nullptr;
1034       };
1035 
1036       AllocType->dump();
1037       if (const FunctionDecl *VirtualDelete =
1038               getVirtualOperatorDelete(AllocType);
1039           VirtualDelete &&
1040           !VirtualDelete->isReplaceableGlobalAllocationFunction()) {
1041         S.FFDiag(S.Current->getSource(OpPC),
1042                  diag::note_constexpr_new_non_replaceable)
1043             << isa<CXXMethodDecl>(VirtualDelete) << VirtualDelete;
1044         return false;
1045       }
1046     }
1047   }
1048   assert(Source);
1049   assert(BlockToDelete);
1050 
1051   // Invoke destructors before deallocating the memory.
1052   if (!RunDestructors(S, OpPC, BlockToDelete))
1053     return false;
1054 
1055   DynamicAllocator &Allocator = S.getAllocator();
1056   const Descriptor *BlockDesc = BlockToDelete->getDescriptor();
1057   std::optional<DynamicAllocator::Form> AllocForm =
1058       Allocator.getAllocationForm(Source);
1059 
1060   if (!Allocator.deallocate(Source, BlockToDelete, S)) {
1061     // Nothing has been deallocated, this must be a double-delete.
1062     const SourceInfo &Loc = S.Current->getSource(OpPC);
1063     S.FFDiag(Loc, diag::note_constexpr_double_delete);
1064     return false;
1065   }
1066 
1067   assert(AllocForm);
1068   DynamicAllocator::Form DeleteForm = DeleteIsArrayForm
1069                                           ? DynamicAllocator::Form::Array
1070                                           : DynamicAllocator::Form::NonArray;
1071   return CheckNewDeleteForms(S, OpPC, *AllocForm, DeleteForm, BlockDesc,
1072                              Source);
1073 }
1074 
1075 void diagnoseEnumValue(InterpState &S, CodePtr OpPC, const EnumDecl *ED,
1076                        const APSInt &Value) {
1077   llvm::APInt Min;
1078   llvm::APInt Max;
1079 
1080   if (S.EvaluatingDecl && !S.EvaluatingDecl->isConstexpr())
1081     return;
1082 
1083   ED->getValueRange(Max, Min);
1084   --Max;
1085 
1086   if (ED->getNumNegativeBits() &&
1087       (Max.slt(Value.getSExtValue()) || Min.sgt(Value.getSExtValue()))) {
1088     const SourceLocation &Loc = S.Current->getLocation(OpPC);
1089     S.CCEDiag(Loc, diag::note_constexpr_unscoped_enum_out_of_range)
1090         << llvm::toString(Value, 10) << Min.getSExtValue() << Max.getSExtValue()
1091         << ED;
1092   } else if (!ED->getNumNegativeBits() && Max.ult(Value.getZExtValue())) {
1093     const SourceLocation &Loc = S.Current->getLocation(OpPC);
1094     S.CCEDiag(Loc, diag::note_constexpr_unscoped_enum_out_of_range)
1095         << llvm::toString(Value, 10) << Min.getZExtValue() << Max.getZExtValue()
1096         << ED;
1097   }
1098 }
1099 
1100 bool CheckLiteralType(InterpState &S, CodePtr OpPC, const Type *T) {
1101   assert(T);
1102   assert(!S.getLangOpts().CPlusPlus23);
1103 
1104   // C++1y: A constant initializer for an object o [...] may also invoke
1105   // constexpr constructors for o and its subobjects even if those objects
1106   // are of non-literal class types.
1107   //
1108   // C++11 missed this detail for aggregates, so classes like this:
1109   //   struct foo_t { union { int i; volatile int j; } u; };
1110   // are not (obviously) initializable like so:
1111   //   __attribute__((__require_constant_initialization__))
1112   //   static const foo_t x = {{0}};
1113   // because "i" is a subobject with non-literal initialization (due to the
1114   // volatile member of the union). See:
1115   //   http://www.open-std.org/jtc1/sc22/wg21/docs/cwg_active.html#1677
1116   // Therefore, we use the C++1y behavior.
1117 
1118   if (S.Current->getFunction() && S.Current->getFunction()->isConstructor() &&
1119       S.Current->getThis().getDeclDesc()->asDecl() == S.EvaluatingDecl) {
1120     return true;
1121   }
1122 
1123   const Expr *E = S.Current->getExpr(OpPC);
1124   if (S.getLangOpts().CPlusPlus11)
1125     S.FFDiag(E, diag::note_constexpr_nonliteral) << E->getType();
1126   else
1127     S.FFDiag(E, diag::note_invalid_subexpr_in_const_expr);
1128   return false;
1129 }
1130 
1131 static bool checkConstructor(InterpState &S, CodePtr OpPC, const Function *Func,
1132                              const Pointer &ThisPtr) {
1133   assert(Func->isConstructor());
1134 
1135   const Descriptor *D = ThisPtr.getFieldDesc();
1136 
1137   // FIXME: I think this case is not 100% correct. E.g. a pointer into a
1138   // subobject of a composite array.
1139   if (!D->ElemRecord)
1140     return true;
1141 
1142   if (D->ElemRecord->getNumVirtualBases() == 0)
1143     return true;
1144 
1145   S.FFDiag(S.Current->getLocation(OpPC), diag::note_constexpr_virtual_base)
1146       << Func->getParentDecl();
1147   return false;
1148 }
1149 
1150 bool CallVar(InterpState &S, CodePtr OpPC, const Function *Func,
1151              uint32_t VarArgSize) {
1152   if (Func->hasThisPointer()) {
1153     size_t ArgSize = Func->getArgSize() + VarArgSize;
1154     size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0);
1155     const Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset);
1156 
1157     // If the current function is a lambda static invoker and
1158     // the function we're about to call is a lambda call operator,
1159     // skip the CheckInvoke, since the ThisPtr is a null pointer
1160     // anyway.
1161     if (!(S.Current->getFunction() &&
1162           S.Current->getFunction()->isLambdaStaticInvoker() &&
1163           Func->isLambdaCallOperator())) {
1164       if (!CheckInvoke(S, OpPC, ThisPtr))
1165         return false;
1166     }
1167 
1168     if (S.checkingPotentialConstantExpression())
1169       return false;
1170   }
1171 
1172   if (!CheckCallable(S, OpPC, Func))
1173     return false;
1174 
1175   if (!CheckCallDepth(S, OpPC))
1176     return false;
1177 
1178   auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC, VarArgSize);
1179   InterpFrame *FrameBefore = S.Current;
1180   S.Current = NewFrame.get();
1181 
1182   APValue CallResult;
1183   // Note that we cannot assert(CallResult.hasValue()) here since
1184   // Ret() above only sets the APValue if the curent frame doesn't
1185   // have a caller set.
1186   if (Interpret(S, CallResult)) {
1187     NewFrame.release(); // Frame was delete'd already.
1188     assert(S.Current == FrameBefore);
1189     return true;
1190   }
1191 
1192   // Interpreting the function failed somehow. Reset to
1193   // previous state.
1194   S.Current = FrameBefore;
1195   return false;
1196 }
1197 
1198 bool Call(InterpState &S, CodePtr OpPC, const Function *Func,
1199           uint32_t VarArgSize) {
1200   assert(Func);
1201   auto cleanup = [&]() -> bool {
1202     cleanupAfterFunctionCall(S, OpPC, Func);
1203     return false;
1204   };
1205 
1206   if (Func->hasThisPointer()) {
1207     size_t ArgSize = Func->getArgSize() + VarArgSize;
1208     size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0);
1209 
1210     const Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset);
1211 
1212     // If the current function is a lambda static invoker and
1213     // the function we're about to call is a lambda call operator,
1214     // skip the CheckInvoke, since the ThisPtr is a null pointer
1215     // anyway.
1216     if (S.Current->getFunction() &&
1217         S.Current->getFunction()->isLambdaStaticInvoker() &&
1218         Func->isLambdaCallOperator()) {
1219       assert(ThisPtr.isZero());
1220     } else {
1221       if (!CheckInvoke(S, OpPC, ThisPtr))
1222         return cleanup();
1223     }
1224 
1225     if (Func->isConstructor() && !checkConstructor(S, OpPC, Func, ThisPtr))
1226       return false;
1227   }
1228 
1229   if (!CheckCallable(S, OpPC, Func))
1230     return cleanup();
1231 
1232   // FIXME: The isConstructor() check here is not always right. The current
1233   // constant evaluator is somewhat inconsistent in when it allows a function
1234   // call when checking for a constant expression.
1235   if (Func->hasThisPointer() && S.checkingPotentialConstantExpression() &&
1236       !Func->isConstructor())
1237     return cleanup();
1238 
1239   if (!CheckCallDepth(S, OpPC))
1240     return cleanup();
1241 
1242   auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC, VarArgSize);
1243   InterpFrame *FrameBefore = S.Current;
1244   S.Current = NewFrame.get();
1245 
1246   InterpStateCCOverride CCOverride(S, Func->getDecl()->isImmediateFunction());
1247   APValue CallResult;
1248   // Note that we cannot assert(CallResult.hasValue()) here since
1249   // Ret() above only sets the APValue if the curent frame doesn't
1250   // have a caller set.
1251   if (Interpret(S, CallResult)) {
1252     NewFrame.release(); // Frame was delete'd already.
1253     assert(S.Current == FrameBefore);
1254     return true;
1255   }
1256 
1257   // Interpreting the function failed somehow. Reset to
1258   // previous state.
1259   S.Current = FrameBefore;
1260   return false;
1261 }
1262 
1263 bool CallVirt(InterpState &S, CodePtr OpPC, const Function *Func,
1264               uint32_t VarArgSize) {
1265   assert(Func->hasThisPointer());
1266   assert(Func->isVirtual());
1267   size_t ArgSize = Func->getArgSize() + VarArgSize;
1268   size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0);
1269   Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset);
1270 
1271   const CXXRecordDecl *DynamicDecl = nullptr;
1272   {
1273     Pointer TypePtr = ThisPtr;
1274     while (TypePtr.isBaseClass())
1275       TypePtr = TypePtr.getBase();
1276 
1277     QualType DynamicType = TypePtr.getType();
1278     if (DynamicType->isPointerType() || DynamicType->isReferenceType())
1279       DynamicDecl = DynamicType->getPointeeCXXRecordDecl();
1280     else
1281       DynamicDecl = DynamicType->getAsCXXRecordDecl();
1282   }
1283   assert(DynamicDecl);
1284 
1285   const auto *StaticDecl = cast<CXXRecordDecl>(Func->getParentDecl());
1286   const auto *InitialFunction = cast<CXXMethodDecl>(Func->getDecl());
1287   const CXXMethodDecl *Overrider = S.getContext().getOverridingFunction(
1288       DynamicDecl, StaticDecl, InitialFunction);
1289 
1290   if (Overrider != InitialFunction) {
1291     // DR1872: An instantiated virtual constexpr function can't be called in a
1292     // constant expression (prior to C++20). We can still constant-fold such a
1293     // call.
1294     if (!S.getLangOpts().CPlusPlus20 && Overrider->isVirtual()) {
1295       const Expr *E = S.Current->getExpr(OpPC);
1296       S.CCEDiag(E, diag::note_constexpr_virtual_call) << E->getSourceRange();
1297     }
1298 
1299     Func = S.getContext().getOrCreateFunction(Overrider);
1300 
1301     const CXXRecordDecl *ThisFieldDecl =
1302         ThisPtr.getFieldDesc()->getType()->getAsCXXRecordDecl();
1303     if (Func->getParentDecl()->isDerivedFrom(ThisFieldDecl)) {
1304       // If the function we call is further DOWN the hierarchy than the
1305       // FieldDesc of our pointer, just go up the hierarchy of this field
1306       // the furthest we can go.
1307       while (ThisPtr.isBaseClass())
1308         ThisPtr = ThisPtr.getBase();
1309     }
1310   }
1311 
1312   if (!Call(S, OpPC, Func, VarArgSize))
1313     return false;
1314 
1315   // Covariant return types. The return type of Overrider is a pointer
1316   // or reference to a class type.
1317   if (Overrider != InitialFunction &&
1318       Overrider->getReturnType()->isPointerOrReferenceType() &&
1319       InitialFunction->getReturnType()->isPointerOrReferenceType()) {
1320     QualType OverriderPointeeType =
1321         Overrider->getReturnType()->getPointeeType();
1322     QualType InitialPointeeType =
1323         InitialFunction->getReturnType()->getPointeeType();
1324     // We've called Overrider above, but calling code expects us to return what
1325     // InitialFunction returned. According to the rules for covariant return
1326     // types, what InitialFunction returns needs to be a base class of what
1327     // Overrider returns. So, we need to do an upcast here.
1328     unsigned Offset = S.getContext().collectBaseOffset(
1329         InitialPointeeType->getAsRecordDecl(),
1330         OverriderPointeeType->getAsRecordDecl());
1331     return GetPtrBasePop(S, OpPC, Offset);
1332   }
1333 
1334   return true;
1335 }
1336 
1337 bool CallBI(InterpState &S, CodePtr OpPC, const Function *Func,
1338             const CallExpr *CE, uint32_t BuiltinID) {
1339   if (S.checkingPotentialConstantExpression())
1340     return false;
1341   auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC);
1342 
1343   InterpFrame *FrameBefore = S.Current;
1344   S.Current = NewFrame.get();
1345 
1346   if (InterpretBuiltin(S, OpPC, Func, CE, BuiltinID)) {
1347     NewFrame.release();
1348     return true;
1349   }
1350   S.Current = FrameBefore;
1351   return false;
1352 }
1353 
1354 bool CallPtr(InterpState &S, CodePtr OpPC, uint32_t ArgSize,
1355              const CallExpr *CE) {
1356   const FunctionPointer &FuncPtr = S.Stk.pop<FunctionPointer>();
1357 
1358   const Function *F = FuncPtr.getFunction();
1359   if (!F) {
1360     const auto *E = cast<CallExpr>(S.Current->getExpr(OpPC));
1361     S.FFDiag(E, diag::note_constexpr_null_callee)
1362         << const_cast<Expr *>(E->getCallee()) << E->getSourceRange();
1363     return false;
1364   }
1365 
1366   if (!FuncPtr.isValid() || !F->getDecl())
1367     return Invalid(S, OpPC);
1368 
1369   assert(F);
1370 
1371   // This happens when the call expression has been cast to
1372   // something else, but we don't support that.
1373   if (S.Ctx.classify(F->getDecl()->getReturnType()) !=
1374       S.Ctx.classify(CE->getType()))
1375     return false;
1376 
1377   // Check argument nullability state.
1378   if (F->hasNonNullAttr()) {
1379     if (!CheckNonNullArgs(S, OpPC, F, CE, ArgSize))
1380       return false;
1381   }
1382 
1383   assert(ArgSize >= F->getWrittenArgSize());
1384   uint32_t VarArgSize = ArgSize - F->getWrittenArgSize();
1385 
1386   // We need to do this explicitly here since we don't have the necessary
1387   // information to do it automatically.
1388   if (F->isThisPointerExplicit())
1389     VarArgSize -= align(primSize(PT_Ptr));
1390 
1391   if (F->isVirtual())
1392     return CallVirt(S, OpPC, F, VarArgSize);
1393 
1394   return Call(S, OpPC, F, VarArgSize);
1395 }
1396 
1397 bool CheckNewTypeMismatch(InterpState &S, CodePtr OpPC, const Expr *E,
1398                           std::optional<uint64_t> ArraySize) {
1399   const Pointer &Ptr = S.Stk.peek<Pointer>();
1400 
1401   if (!CheckStore(S, OpPC, Ptr))
1402     return false;
1403 
1404   if (!InvalidNewDeleteExpr(S, OpPC, E))
1405     return false;
1406 
1407   const auto *NewExpr = cast<CXXNewExpr>(E);
1408   QualType StorageType = Ptr.getType();
1409 
1410   if (isa_and_nonnull<CXXNewExpr>(Ptr.getFieldDesc()->asExpr()) &&
1411       StorageType->isPointerType()) {
1412     // FIXME: Are there other cases where this is a problem?
1413     StorageType = StorageType->getPointeeType();
1414   }
1415 
1416   const ASTContext &ASTCtx = S.getASTContext();
1417   QualType AllocType;
1418   if (ArraySize) {
1419     AllocType = ASTCtx.getConstantArrayType(
1420         NewExpr->getAllocatedType(),
1421         APInt(64, static_cast<uint64_t>(*ArraySize), false), nullptr,
1422         ArraySizeModifier::Normal, 0);
1423   } else {
1424     AllocType = NewExpr->getAllocatedType();
1425   }
1426 
1427   unsigned StorageSize = 1;
1428   unsigned AllocSize = 1;
1429   if (const auto *CAT = dyn_cast<ConstantArrayType>(AllocType))
1430     AllocSize = CAT->getZExtSize();
1431   if (const auto *CAT = dyn_cast<ConstantArrayType>(StorageType))
1432     StorageSize = CAT->getZExtSize();
1433 
1434   if (AllocSize > StorageSize ||
1435       !ASTCtx.hasSimilarType(ASTCtx.getBaseElementType(AllocType),
1436                              ASTCtx.getBaseElementType(StorageType))) {
1437     S.FFDiag(S.Current->getLocation(OpPC),
1438              diag::note_constexpr_placement_new_wrong_type)
1439         << StorageType << AllocType;
1440     return false;
1441   }
1442   return true;
1443 }
1444 
1445 bool InvalidNewDeleteExpr(InterpState &S, CodePtr OpPC, const Expr *E) {
1446   assert(E);
1447   const auto &Loc = S.Current->getSource(OpPC);
1448 
1449   if (S.getLangOpts().CPlusPlus26)
1450     return true;
1451 
1452   if (const auto *NewExpr = dyn_cast<CXXNewExpr>(E)) {
1453     const FunctionDecl *OperatorNew = NewExpr->getOperatorNew();
1454 
1455     if (!S.getLangOpts().CPlusPlus26 && NewExpr->getNumPlacementArgs() > 0) {
1456       // This is allowed pre-C++26, but only an std function.
1457       if (S.Current->isStdFunction())
1458         return true;
1459       S.FFDiag(Loc, diag::note_constexpr_new_placement)
1460           << /*C++26 feature*/ 1 << E->getSourceRange();
1461     } else if (NewExpr->getNumPlacementArgs() == 1 &&
1462                !OperatorNew->isReservedGlobalPlacementOperator()) {
1463       S.FFDiag(Loc, diag::note_constexpr_new_placement)
1464           << /*Unsupported*/ 0 << E->getSourceRange();
1465     } else if (!OperatorNew->isReplaceableGlobalAllocationFunction()) {
1466       S.FFDiag(Loc, diag::note_constexpr_new_non_replaceable)
1467           << isa<CXXMethodDecl>(OperatorNew) << OperatorNew;
1468     }
1469   } else {
1470     const auto *DeleteExpr = cast<CXXDeleteExpr>(E);
1471     const FunctionDecl *OperatorDelete = DeleteExpr->getOperatorDelete();
1472     if (!OperatorDelete->isReplaceableGlobalAllocationFunction()) {
1473       S.FFDiag(Loc, diag::note_constexpr_new_non_replaceable)
1474           << isa<CXXMethodDecl>(OperatorDelete) << OperatorDelete;
1475     }
1476   }
1477 
1478   return false;
1479 }
1480 
1481 bool handleFixedPointOverflow(InterpState &S, CodePtr OpPC,
1482                               const FixedPoint &FP) {
1483   const Expr *E = S.Current->getExpr(OpPC);
1484   if (S.checkingForUndefinedBehavior()) {
1485     S.getASTContext().getDiagnostics().Report(
1486         E->getExprLoc(), diag::warn_fixedpoint_constant_overflow)
1487         << FP.toDiagnosticString(S.getASTContext()) << E->getType();
1488   }
1489   S.CCEDiag(E, diag::note_constexpr_overflow)
1490       << FP.toDiagnosticString(S.getASTContext()) << E->getType();
1491   return S.noteUndefinedBehavior();
1492 }
1493 
1494 bool InvalidShuffleVectorIndex(InterpState &S, CodePtr OpPC, uint32_t Index) {
1495   const SourceInfo &Loc = S.Current->getSource(OpPC);
1496   S.FFDiag(Loc,
1497            diag::err_shufflevector_minus_one_is_undefined_behavior_constexpr)
1498       << Index;
1499   return false;
1500 }
1501 
1502 bool CheckPointerToIntegralCast(InterpState &S, CodePtr OpPC,
1503                                 const Pointer &Ptr, unsigned BitWidth) {
1504   if (Ptr.isDummy())
1505     return false;
1506 
1507   const SourceInfo &E = S.Current->getSource(OpPC);
1508   S.CCEDiag(E, diag::note_constexpr_invalid_cast)
1509       << 2 << S.getLangOpts().CPlusPlus << S.Current->getRange(OpPC);
1510 
1511   if (Ptr.isBlockPointer() && !Ptr.isZero()) {
1512     // Only allow based lvalue casts if they are lossless.
1513     if (S.getASTContext().getTargetInfo().getPointerWidth(LangAS::Default) !=
1514         BitWidth)
1515       return Invalid(S, OpPC);
1516   }
1517   return true;
1518 }
1519 
1520 bool CastPointerIntegralAP(InterpState &S, CodePtr OpPC, uint32_t BitWidth) {
1521   const Pointer &Ptr = S.Stk.pop<Pointer>();
1522 
1523   if (!CheckPointerToIntegralCast(S, OpPC, Ptr, BitWidth))
1524     return false;
1525 
1526   S.Stk.push<IntegralAP<false>>(
1527       IntegralAP<false>::from(Ptr.getIntegerRepresentation(), BitWidth));
1528   return true;
1529 }
1530 
1531 bool CastPointerIntegralAPS(InterpState &S, CodePtr OpPC, uint32_t BitWidth) {
1532   const Pointer &Ptr = S.Stk.pop<Pointer>();
1533 
1534   if (!CheckPointerToIntegralCast(S, OpPC, Ptr, BitWidth))
1535     return false;
1536 
1537   S.Stk.push<IntegralAP<true>>(
1538       IntegralAP<true>::from(Ptr.getIntegerRepresentation(), BitWidth));
1539   return true;
1540 }
1541 
1542 // https://github.com/llvm/llvm-project/issues/102513
1543 #if defined(_WIN32) && !defined(__clang__) && !defined(NDEBUG)
1544 #pragma optimize("", off)
1545 #endif
1546 bool Interpret(InterpState &S, APValue &Result) {
1547   // The current stack frame when we started Interpret().
1548   // This is being used by the ops to determine wheter
1549   // to return from this function and thus terminate
1550   // interpretation.
1551   const InterpFrame *StartFrame = S.Current;
1552   assert(!S.Current->isRoot());
1553   CodePtr PC = S.Current->getPC();
1554 
1555   // Empty program.
1556   if (!PC)
1557     return true;
1558 
1559   for (;;) {
1560     auto Op = PC.read<Opcode>();
1561     CodePtr OpPC = PC;
1562 
1563     switch (Op) {
1564 #define GET_INTERP
1565 #include "Opcodes.inc"
1566 #undef GET_INTERP
1567     }
1568   }
1569 }
1570 // https://github.com/llvm/llvm-project/issues/102513
1571 #if defined(_WIN32) && !defined(__clang__) && !defined(NDEBUG)
1572 #pragma optimize("", on)
1573 #endif
1574 
1575 } // namespace interp
1576 } // namespace clang
1577