xref: /llvm-project/clang/lib/AST/ByteCode/Interp.cpp (revision 87b6ec3be6b80f8e35d2eaea468e6bca79e79c2e)
1 //===------- Interp.cpp - Interpreter for the constexpr VM ------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
9 #include "Interp.h"
10 #include "Function.h"
11 #include "InterpFrame.h"
12 #include "InterpShared.h"
13 #include "InterpStack.h"
14 #include "Opcode.h"
15 #include "PrimType.h"
16 #include "Program.h"
17 #include "State.h"
18 #include "clang/AST/ASTContext.h"
19 #include "clang/AST/ASTDiagnostic.h"
20 #include "clang/AST/CXXInheritance.h"
21 #include "clang/AST/DeclObjC.h"
22 #include "clang/AST/Expr.h"
23 #include "clang/AST/ExprCXX.h"
24 #include "clang/Basic/DiagnosticSema.h"
25 #include "clang/Basic/TargetInfo.h"
26 #include "llvm/ADT/APSInt.h"
27 #include "llvm/ADT/StringExtras.h"
28 #include <limits>
29 #include <vector>
30 
31 using namespace clang;
32 using namespace clang::interp;
33 
34 static bool RetValue(InterpState &S, CodePtr &Pt, APValue &Result) {
35   llvm::report_fatal_error("Interpreter cannot return values");
36 }
37 
38 //===----------------------------------------------------------------------===//
39 // Jmp, Jt, Jf
40 //===----------------------------------------------------------------------===//
41 
42 static bool Jmp(InterpState &S, CodePtr &PC, int32_t Offset) {
43   PC += Offset;
44   return true;
45 }
46 
47 static bool Jt(InterpState &S, CodePtr &PC, int32_t Offset) {
48   if (S.Stk.pop<bool>()) {
49     PC += Offset;
50   }
51   return true;
52 }
53 
54 static bool Jf(InterpState &S, CodePtr &PC, int32_t Offset) {
55   if (!S.Stk.pop<bool>()) {
56     PC += Offset;
57   }
58   return true;
59 }
60 
61 static void diagnoseMissingInitializer(InterpState &S, CodePtr OpPC,
62                                        const ValueDecl *VD) {
63   const SourceInfo &E = S.Current->getSource(OpPC);
64   S.FFDiag(E, diag::note_constexpr_var_init_unknown, 1) << VD;
65   S.Note(VD->getLocation(), diag::note_declared_at) << VD->getSourceRange();
66 }
67 
68 static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC,
69                                      const ValueDecl *VD);
70 static bool diagnoseUnknownDecl(InterpState &S, CodePtr OpPC,
71                                 const ValueDecl *D) {
72   const SourceInfo &E = S.Current->getSource(OpPC);
73 
74   if (isa<ParmVarDecl>(D)) {
75     if (S.getLangOpts().CPlusPlus11) {
76       S.FFDiag(E, diag::note_constexpr_function_param_value_unknown) << D;
77       S.Note(D->getLocation(), diag::note_declared_at) << D->getSourceRange();
78     } else {
79       S.FFDiag(E);
80     }
81     return false;
82   }
83 
84   if (!D->getType().isConstQualified()) {
85     diagnoseNonConstVariable(S, OpPC, D);
86   } else if (const auto *VD = dyn_cast<VarDecl>(D)) {
87     if (!VD->getAnyInitializer()) {
88       diagnoseMissingInitializer(S, OpPC, VD);
89     } else {
90       const SourceInfo &Loc = S.Current->getSource(OpPC);
91       S.FFDiag(Loc, diag::note_constexpr_var_init_non_constant, 1) << VD;
92       S.Note(VD->getLocation(), diag::note_declared_at);
93     }
94   }
95 
96   return false;
97 }
98 
99 static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC,
100                                      const ValueDecl *VD) {
101   const SourceInfo &Loc = S.Current->getSource(OpPC);
102   if (!S.getLangOpts().CPlusPlus) {
103     S.FFDiag(Loc);
104     return;
105   }
106 
107   if (const auto *VarD = dyn_cast<VarDecl>(VD);
108       VarD && VarD->getType().isConstQualified() &&
109       !VarD->getAnyInitializer()) {
110     diagnoseMissingInitializer(S, OpPC, VD);
111     return;
112   }
113 
114   // Rather random, but this is to match the diagnostic output of the current
115   // interpreter.
116   if (isa<ObjCIvarDecl>(VD))
117     return;
118 
119   if (VD->getType()->isIntegralOrEnumerationType()) {
120     S.FFDiag(Loc, diag::note_constexpr_ltor_non_const_int, 1) << VD;
121     S.Note(VD->getLocation(), diag::note_declared_at);
122     return;
123   }
124 
125   S.FFDiag(Loc,
126            S.getLangOpts().CPlusPlus11 ? diag::note_constexpr_ltor_non_constexpr
127                                        : diag::note_constexpr_ltor_non_integral,
128            1)
129       << VD << VD->getType();
130   S.Note(VD->getLocation(), diag::note_declared_at);
131 }
132 
133 static bool CheckActive(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
134                         AccessKinds AK) {
135   if (Ptr.isActive())
136     return true;
137 
138   assert(Ptr.inUnion());
139   assert(Ptr.isField() && Ptr.getField());
140 
141   Pointer U = Ptr.getBase();
142   Pointer C = Ptr;
143   while (!U.isRoot() && U.inUnion() && !U.isActive()) {
144     if (U.getField())
145       C = U;
146     U = U.getBase();
147   }
148   assert(C.isField());
149 
150   // Get the inactive field descriptor.
151   const FieldDecl *InactiveField = C.getField();
152   assert(InactiveField);
153 
154   // Consider:
155   // union U {
156   //   struct {
157   //     int x;
158   //     int y;
159   //   } a;
160   // }
161   //
162   // When activating x, we will also activate a. If we now try to read
163   // from y, we will get to CheckActive, because y is not active. In that
164   // case, our U will be a (not a union). We return here and let later code
165   // handle this.
166   if (!U.getFieldDesc()->isUnion())
167     return true;
168 
169   // Find the active field of the union.
170   const Record *R = U.getRecord();
171   assert(R && R->isUnion() && "Not a union");
172 
173   const FieldDecl *ActiveField = nullptr;
174   for (const Record::Field &F : R->fields()) {
175     const Pointer &Field = U.atField(F.Offset);
176     if (Field.isActive()) {
177       ActiveField = Field.getField();
178       break;
179     }
180   }
181 
182   const SourceInfo &Loc = S.Current->getSource(OpPC);
183   S.FFDiag(Loc, diag::note_constexpr_access_inactive_union_member)
184       << AK << InactiveField << !ActiveField << ActiveField;
185   return false;
186 }
187 
188 static bool CheckTemporary(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
189                            AccessKinds AK) {
190   if (auto ID = Ptr.getDeclID()) {
191     if (!Ptr.isStaticTemporary())
192       return true;
193 
194     const auto *MTE = dyn_cast_if_present<MaterializeTemporaryExpr>(
195         Ptr.getDeclDesc()->asExpr());
196     if (!MTE)
197       return true;
198 
199     // FIXME(perf): Since we do this check on every Load from a static
200     // temporary, it might make sense to cache the value of the
201     // isUsableInConstantExpressions call.
202     if (!MTE->isUsableInConstantExpressions(S.getASTContext()) &&
203         Ptr.block()->getEvalID() != S.Ctx.getEvalID()) {
204       const SourceInfo &E = S.Current->getSource(OpPC);
205       S.FFDiag(E, diag::note_constexpr_access_static_temporary, 1) << AK;
206       S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here);
207       return false;
208     }
209   }
210   return true;
211 }
212 
213 static bool CheckGlobal(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
214   if (auto ID = Ptr.getDeclID()) {
215     if (!Ptr.isStatic())
216       return true;
217 
218     if (S.P.getCurrentDecl() == ID)
219       return true;
220 
221     S.FFDiag(S.Current->getLocation(OpPC), diag::note_constexpr_modify_global);
222     return false;
223   }
224   return true;
225 }
226 
227 namespace clang {
228 namespace interp {
229 static void popArg(InterpState &S, const Expr *Arg) {
230   PrimType Ty = S.getContext().classify(Arg).value_or(PT_Ptr);
231   TYPE_SWITCH(Ty, S.Stk.discard<T>());
232 }
233 
234 void cleanupAfterFunctionCall(InterpState &S, CodePtr OpPC,
235                               const Function *Func) {
236   assert(S.Current);
237   assert(Func);
238 
239   if (Func->isUnevaluatedBuiltin())
240     return;
241 
242   // Some builtin functions require us to only look at the call site, since
243   // the classified parameter types do not match.
244   if (unsigned BID = Func->getBuiltinID();
245       BID && S.getASTContext().BuiltinInfo.hasCustomTypechecking(BID)) {
246     const auto *CE =
247         cast<CallExpr>(S.Current->Caller->getExpr(S.Current->getRetPC()));
248     for (int32_t I = CE->getNumArgs() - 1; I >= 0; --I) {
249       const Expr *A = CE->getArg(I);
250       popArg(S, A);
251     }
252     return;
253   }
254 
255   if (S.Current->Caller && Func->isVariadic()) {
256     // CallExpr we're look for is at the return PC of the current function, i.e.
257     // in the caller.
258     // This code path should be executed very rarely.
259     unsigned NumVarArgs;
260     const Expr *const *Args = nullptr;
261     unsigned NumArgs = 0;
262     const Expr *CallSite = S.Current->Caller->getExpr(S.Current->getRetPC());
263     if (const auto *CE = dyn_cast<CallExpr>(CallSite)) {
264       Args = CE->getArgs();
265       NumArgs = CE->getNumArgs();
266     } else if (const auto *CE = dyn_cast<CXXConstructExpr>(CallSite)) {
267       Args = CE->getArgs();
268       NumArgs = CE->getNumArgs();
269     } else
270       assert(false && "Can't get arguments from that expression type");
271 
272     assert(NumArgs >= Func->getNumWrittenParams());
273     NumVarArgs = NumArgs - (Func->getNumWrittenParams() +
274                             isa<CXXOperatorCallExpr>(CallSite));
275     for (unsigned I = 0; I != NumVarArgs; ++I) {
276       const Expr *A = Args[NumArgs - 1 - I];
277       popArg(S, A);
278     }
279   }
280 
281   // And in any case, remove the fixed parameters (the non-variadic ones)
282   // at the end.
283   for (PrimType Ty : Func->args_reverse())
284     TYPE_SWITCH(Ty, S.Stk.discard<T>());
285 }
286 
287 bool CheckExtern(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
288   if (!Ptr.isExtern())
289     return true;
290 
291   if (Ptr.isInitialized() ||
292       (Ptr.getDeclDesc()->asVarDecl() == S.EvaluatingDecl))
293     return true;
294 
295   if (!S.checkingPotentialConstantExpression() && S.getLangOpts().CPlusPlus) {
296     const auto *VD = Ptr.getDeclDesc()->asValueDecl();
297     diagnoseNonConstVariable(S, OpPC, VD);
298   }
299   return false;
300 }
301 
302 bool CheckArray(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
303   if (!Ptr.isUnknownSizeArray())
304     return true;
305   const SourceInfo &E = S.Current->getSource(OpPC);
306   S.FFDiag(E, diag::note_constexpr_unsized_array_indexed);
307   return false;
308 }
309 
310 bool CheckLive(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
311                AccessKinds AK) {
312   if (Ptr.isZero()) {
313     const auto &Src = S.Current->getSource(OpPC);
314 
315     if (Ptr.isField())
316       S.FFDiag(Src, diag::note_constexpr_null_subobject) << CSK_Field;
317     else
318       S.FFDiag(Src, diag::note_constexpr_access_null) << AK;
319 
320     return false;
321   }
322 
323   if (!Ptr.isLive()) {
324     const auto &Src = S.Current->getSource(OpPC);
325 
326     if (Ptr.isDynamic()) {
327       S.FFDiag(Src, diag::note_constexpr_access_deleted_object) << AK;
328     } else {
329       bool IsTemp = Ptr.isTemporary();
330       S.FFDiag(Src, diag::note_constexpr_lifetime_ended, 1) << AK << !IsTemp;
331 
332       if (IsTemp)
333         S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here);
334       else
335         S.Note(Ptr.getDeclLoc(), diag::note_declared_at);
336     }
337 
338     return false;
339   }
340 
341   return true;
342 }
343 
344 bool CheckConstant(InterpState &S, CodePtr OpPC, const Descriptor *Desc) {
345   assert(Desc);
346 
347   const auto *D = Desc->asVarDecl();
348   if (!D || !D->hasGlobalStorage())
349     return true;
350 
351   if (D == S.EvaluatingDecl)
352     return true;
353 
354   if (D->isConstexpr())
355     return true;
356 
357   // If we're evaluating the initializer for a constexpr variable in C23, we may
358   // only read other contexpr variables. Abort here since this one isn't
359   // constexpr.
360   if (const auto *VD = dyn_cast_if_present<VarDecl>(S.EvaluatingDecl);
361       VD && VD->isConstexpr() && S.getLangOpts().C23)
362     return Invalid(S, OpPC);
363 
364   QualType T = D->getType();
365   bool IsConstant = T.isConstant(S.getASTContext());
366   if (T->isIntegralOrEnumerationType()) {
367     if (!IsConstant) {
368       diagnoseNonConstVariable(S, OpPC, D);
369       return false;
370     }
371     return true;
372   }
373 
374   if (IsConstant) {
375     if (S.getLangOpts().CPlusPlus) {
376       S.CCEDiag(S.Current->getLocation(OpPC),
377                 S.getLangOpts().CPlusPlus11
378                     ? diag::note_constexpr_ltor_non_constexpr
379                     : diag::note_constexpr_ltor_non_integral,
380                 1)
381           << D << T;
382       S.Note(D->getLocation(), diag::note_declared_at);
383     } else {
384       S.CCEDiag(S.Current->getLocation(OpPC));
385     }
386     return true;
387   }
388 
389   if (T->isPointerOrReferenceType()) {
390     if (!T->getPointeeType().isConstant(S.getASTContext()) ||
391         !S.getLangOpts().CPlusPlus11) {
392       diagnoseNonConstVariable(S, OpPC, D);
393       return false;
394     }
395     return true;
396   }
397 
398   diagnoseNonConstVariable(S, OpPC, D);
399   return false;
400 }
401 
402 static bool CheckConstant(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
403   if (!Ptr.isStatic() || !Ptr.isBlockPointer())
404     return true;
405   return CheckConstant(S, OpPC, Ptr.getDeclDesc());
406 }
407 
408 bool CheckNull(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
409                CheckSubobjectKind CSK) {
410   if (!Ptr.isZero())
411     return true;
412   const SourceInfo &Loc = S.Current->getSource(OpPC);
413   S.FFDiag(Loc, diag::note_constexpr_null_subobject)
414       << CSK << S.Current->getRange(OpPC);
415 
416   return false;
417 }
418 
419 bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
420                 AccessKinds AK) {
421   if (!Ptr.isOnePastEnd())
422     return true;
423   const SourceInfo &Loc = S.Current->getSource(OpPC);
424   S.FFDiag(Loc, diag::note_constexpr_access_past_end)
425       << AK << S.Current->getRange(OpPC);
426   return false;
427 }
428 
429 bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
430                 CheckSubobjectKind CSK) {
431   if (!Ptr.isElementPastEnd())
432     return true;
433   const SourceInfo &Loc = S.Current->getSource(OpPC);
434   S.FFDiag(Loc, diag::note_constexpr_past_end_subobject)
435       << CSK << S.Current->getRange(OpPC);
436   return false;
437 }
438 
439 bool CheckSubobject(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
440                     CheckSubobjectKind CSK) {
441   if (!Ptr.isOnePastEnd())
442     return true;
443 
444   const SourceInfo &Loc = S.Current->getSource(OpPC);
445   S.FFDiag(Loc, diag::note_constexpr_past_end_subobject)
446       << CSK << S.Current->getRange(OpPC);
447   return false;
448 }
449 
450 bool CheckDowncast(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
451                    uint32_t Offset) {
452   uint32_t MinOffset = Ptr.getDeclDesc()->getMetadataSize();
453   uint32_t PtrOffset = Ptr.getByteOffset();
454 
455   // We subtract Offset from PtrOffset. The result must be at least
456   // MinOffset.
457   if (Offset < PtrOffset && (PtrOffset - Offset) >= MinOffset)
458     return true;
459 
460   const auto *E = cast<CastExpr>(S.Current->getExpr(OpPC));
461   QualType TargetQT = E->getType()->getPointeeType();
462   QualType MostDerivedQT = Ptr.getDeclPtr().getType();
463 
464   S.CCEDiag(E, diag::note_constexpr_invalid_downcast)
465       << MostDerivedQT << TargetQT;
466 
467   return false;
468 }
469 
470 bool CheckConst(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
471   assert(Ptr.isLive() && "Pointer is not live");
472   if (!Ptr.isConst() || Ptr.isMutable())
473     return true;
474 
475   // The This pointer is writable in constructors and destructors,
476   // even if isConst() returns true.
477   // TODO(perf): We could be hitting this code path quite a lot in complex
478   // constructors. Is there a better way to do this?
479   if (S.Current->getFunction()) {
480     for (const InterpFrame *Frame = S.Current; Frame; Frame = Frame->Caller) {
481       if (const Function *Func = Frame->getFunction();
482           Func && (Func->isConstructor() || Func->isDestructor()) &&
483           Ptr.block() == Frame->getThis().block()) {
484         return true;
485       }
486     }
487   }
488 
489   if (!Ptr.isBlockPointer())
490     return false;
491 
492   const QualType Ty = Ptr.getType();
493   const SourceInfo &Loc = S.Current->getSource(OpPC);
494   S.FFDiag(Loc, diag::note_constexpr_modify_const_type) << Ty;
495   return false;
496 }
497 
498 bool CheckMutable(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
499   assert(Ptr.isLive() && "Pointer is not live");
500   if (!Ptr.isMutable())
501     return true;
502 
503   // In C++14 onwards, it is permitted to read a mutable member whose
504   // lifetime began within the evaluation.
505   if (S.getLangOpts().CPlusPlus14 &&
506       Ptr.block()->getEvalID() == S.Ctx.getEvalID())
507     return true;
508 
509   const SourceInfo &Loc = S.Current->getSource(OpPC);
510   const FieldDecl *Field = Ptr.getField();
511   S.FFDiag(Loc, diag::note_constexpr_access_mutable, 1) << AK_Read << Field;
512   S.Note(Field->getLocation(), diag::note_declared_at);
513   return false;
514 }
515 
516 static bool CheckVolatile(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
517                           AccessKinds AK) {
518   assert(Ptr.isLive());
519 
520   // FIXME: This check here might be kinda expensive. Maybe it would be better
521   // to have another field in InlineDescriptor for this?
522   if (!Ptr.isBlockPointer())
523     return true;
524 
525   QualType PtrType = Ptr.getType();
526   if (!PtrType.isVolatileQualified())
527     return true;
528 
529   const SourceInfo &Loc = S.Current->getSource(OpPC);
530   if (S.getLangOpts().CPlusPlus)
531     S.FFDiag(Loc, diag::note_constexpr_access_volatile_type) << AK << PtrType;
532   else
533     S.FFDiag(Loc);
534   return false;
535 }
536 
537 bool CheckInitialized(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
538                       AccessKinds AK) {
539   assert(Ptr.isLive());
540 
541   if (Ptr.isInitialized())
542     return true;
543 
544   if (const auto *VD = Ptr.getDeclDesc()->asVarDecl();
545       VD && VD->hasGlobalStorage()) {
546     const SourceInfo &Loc = S.Current->getSource(OpPC);
547     if (VD->getAnyInitializer()) {
548       S.FFDiag(Loc, diag::note_constexpr_var_init_non_constant, 1) << VD;
549       S.Note(VD->getLocation(), diag::note_declared_at);
550     } else {
551       diagnoseMissingInitializer(S, OpPC, VD);
552     }
553     return false;
554   }
555 
556   if (!S.checkingPotentialConstantExpression()) {
557     S.FFDiag(S.Current->getSource(OpPC), diag::note_constexpr_access_uninit)
558         << AK << /*uninitialized=*/true << S.Current->getRange(OpPC);
559   }
560   return false;
561 }
562 
563 bool CheckGlobalInitialized(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
564   if (Ptr.isInitialized())
565     return true;
566 
567   assert(S.getLangOpts().CPlusPlus);
568   const auto *VD = cast<VarDecl>(Ptr.getDeclDesc()->asValueDecl());
569   if ((!VD->hasConstantInitialization() &&
570        VD->mightBeUsableInConstantExpressions(S.getASTContext())) ||
571       (S.getLangOpts().OpenCL && !S.getLangOpts().CPlusPlus11 &&
572        !VD->hasICEInitializer(S.getASTContext()))) {
573     const SourceInfo &Loc = S.Current->getSource(OpPC);
574     S.FFDiag(Loc, diag::note_constexpr_var_init_non_constant, 1) << VD;
575     S.Note(VD->getLocation(), diag::note_declared_at);
576   }
577   return false;
578 }
579 
580 static bool CheckWeak(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
581   if (!Ptr.isWeak())
582     return true;
583 
584   const auto *VD = Ptr.getDeclDesc()->asVarDecl();
585   assert(VD);
586   S.FFDiag(S.Current->getLocation(OpPC), diag::note_constexpr_var_init_weak)
587       << VD;
588   S.Note(VD->getLocation(), diag::note_declared_at);
589 
590   return false;
591 }
592 
593 bool CheckLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
594                AccessKinds AK) {
595   if (!CheckLive(S, OpPC, Ptr, AK))
596     return false;
597   if (!CheckConstant(S, OpPC, Ptr))
598     return false;
599   if (!CheckDummy(S, OpPC, Ptr, AK))
600     return false;
601   if (!CheckExtern(S, OpPC, Ptr))
602     return false;
603   if (!CheckRange(S, OpPC, Ptr, AK))
604     return false;
605   if (!CheckActive(S, OpPC, Ptr, AK))
606     return false;
607   if (!CheckInitialized(S, OpPC, Ptr, AK))
608     return false;
609   if (!CheckTemporary(S, OpPC, Ptr, AK))
610     return false;
611   if (!CheckWeak(S, OpPC, Ptr))
612     return false;
613   if (!CheckMutable(S, OpPC, Ptr))
614     return false;
615   if (!CheckVolatile(S, OpPC, Ptr, AK))
616     return false;
617   return true;
618 }
619 
620 /// This is not used by any of the opcodes directly. It's used by
621 /// EvalEmitter to do the final lvalue-to-rvalue conversion.
622 bool CheckFinalLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
623   if (!CheckLive(S, OpPC, Ptr, AK_Read))
624     return false;
625   if (!CheckConstant(S, OpPC, Ptr))
626     return false;
627 
628   if (!CheckDummy(S, OpPC, Ptr, AK_Read))
629     return false;
630   if (!CheckExtern(S, OpPC, Ptr))
631     return false;
632   if (!CheckRange(S, OpPC, Ptr, AK_Read))
633     return false;
634   if (!CheckActive(S, OpPC, Ptr, AK_Read))
635     return false;
636   if (!CheckInitialized(S, OpPC, Ptr, AK_Read))
637     return false;
638   if (!CheckTemporary(S, OpPC, Ptr, AK_Read))
639     return false;
640   if (!CheckWeak(S, OpPC, Ptr))
641     return false;
642   if (!CheckMutable(S, OpPC, Ptr))
643     return false;
644   return true;
645 }
646 
647 bool CheckStore(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
648   if (!CheckLive(S, OpPC, Ptr, AK_Assign))
649     return false;
650   if (!CheckDummy(S, OpPC, Ptr, AK_Assign))
651     return false;
652   if (!CheckExtern(S, OpPC, Ptr))
653     return false;
654   if (!CheckRange(S, OpPC, Ptr, AK_Assign))
655     return false;
656   if (!CheckGlobal(S, OpPC, Ptr))
657     return false;
658   if (!CheckConst(S, OpPC, Ptr))
659     return false;
660   return true;
661 }
662 
663 bool CheckInvoke(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
664   if (!CheckLive(S, OpPC, Ptr, AK_MemberCall))
665     return false;
666   if (!Ptr.isDummy()) {
667     if (!CheckExtern(S, OpPC, Ptr))
668       return false;
669     if (!CheckRange(S, OpPC, Ptr, AK_MemberCall))
670       return false;
671   }
672   return true;
673 }
674 
675 bool CheckInit(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
676   if (!CheckLive(S, OpPC, Ptr, AK_Assign))
677     return false;
678   if (!CheckRange(S, OpPC, Ptr, AK_Assign))
679     return false;
680   return true;
681 }
682 
683 bool CheckCallable(InterpState &S, CodePtr OpPC, const Function *F) {
684 
685   if (F->isVirtual() && !S.getLangOpts().CPlusPlus20) {
686     const SourceLocation &Loc = S.Current->getLocation(OpPC);
687     S.CCEDiag(Loc, diag::note_constexpr_virtual_call);
688     return false;
689   }
690 
691   if (F->isConstexpr() && F->hasBody() &&
692       (F->getDecl()->isConstexpr() || F->getDecl()->hasAttr<MSConstexprAttr>()))
693     return true;
694 
695   // Implicitly constexpr.
696   if (F->isLambdaStaticInvoker())
697     return true;
698 
699   const SourceLocation &Loc = S.Current->getLocation(OpPC);
700   if (S.getLangOpts().CPlusPlus11) {
701     const FunctionDecl *DiagDecl = F->getDecl();
702 
703     // Invalid decls have been diagnosed before.
704     if (DiagDecl->isInvalidDecl())
705       return false;
706 
707     // If this function is not constexpr because it is an inherited
708     // non-constexpr constructor, diagnose that directly.
709     const auto *CD = dyn_cast<CXXConstructorDecl>(DiagDecl);
710     if (CD && CD->isInheritingConstructor()) {
711       const auto *Inherited = CD->getInheritedConstructor().getConstructor();
712       if (!Inherited->isConstexpr())
713         DiagDecl = CD = Inherited;
714     }
715 
716     // FIXME: If DiagDecl is an implicitly-declared special member function
717     // or an inheriting constructor, we should be much more explicit about why
718     // it's not constexpr.
719     if (CD && CD->isInheritingConstructor()) {
720       S.FFDiag(Loc, diag::note_constexpr_invalid_inhctor, 1)
721           << CD->getInheritedConstructor().getConstructor()->getParent();
722       S.Note(DiagDecl->getLocation(), diag::note_declared_at);
723     } else {
724       // Don't emit anything if the function isn't defined and we're checking
725       // for a constant expression. It might be defined at the point we're
726       // actually calling it.
727       bool IsExtern = DiagDecl->getStorageClass() == SC_Extern;
728       if (!DiagDecl->isDefined() && !IsExtern && DiagDecl->isConstexpr() &&
729           S.checkingPotentialConstantExpression())
730         return false;
731 
732       // If the declaration is defined, declared 'constexpr' _and_ has a body,
733       // the below diagnostic doesn't add anything useful.
734       if (DiagDecl->isDefined() && DiagDecl->isConstexpr() &&
735           DiagDecl->hasBody())
736         return false;
737 
738       S.FFDiag(Loc, diag::note_constexpr_invalid_function, 1)
739           << DiagDecl->isConstexpr() << (bool)CD << DiagDecl;
740 
741       if (DiagDecl->getDefinition())
742         S.Note(DiagDecl->getDefinition()->getLocation(),
743                diag::note_declared_at);
744       else
745         S.Note(DiagDecl->getLocation(), diag::note_declared_at);
746     }
747   } else {
748     S.FFDiag(Loc, diag::note_invalid_subexpr_in_const_expr);
749   }
750 
751   return false;
752 }
753 
754 bool CheckCallDepth(InterpState &S, CodePtr OpPC) {
755   if ((S.Current->getDepth() + 1) > S.getLangOpts().ConstexprCallDepth) {
756     S.FFDiag(S.Current->getSource(OpPC),
757              diag::note_constexpr_depth_limit_exceeded)
758         << S.getLangOpts().ConstexprCallDepth;
759     return false;
760   }
761 
762   return true;
763 }
764 
765 bool CheckThis(InterpState &S, CodePtr OpPC, const Pointer &This) {
766   if (!This.isZero())
767     return true;
768 
769   const SourceInfo &Loc = S.Current->getSource(OpPC);
770 
771   bool IsImplicit = false;
772   if (const auto *E = dyn_cast_if_present<CXXThisExpr>(Loc.asExpr()))
773     IsImplicit = E->isImplicit();
774 
775   if (S.getLangOpts().CPlusPlus11)
776     S.FFDiag(Loc, diag::note_constexpr_this) << IsImplicit;
777   else
778     S.FFDiag(Loc);
779 
780   return false;
781 }
782 
783 bool CheckPure(InterpState &S, CodePtr OpPC, const CXXMethodDecl *MD) {
784   if (!MD->isPureVirtual())
785     return true;
786   const SourceInfo &E = S.Current->getSource(OpPC);
787   S.FFDiag(E, diag::note_constexpr_pure_virtual_call, 1) << MD;
788   S.Note(MD->getLocation(), diag::note_declared_at);
789   return false;
790 }
791 
792 bool CheckFloatResult(InterpState &S, CodePtr OpPC, const Floating &Result,
793                       APFloat::opStatus Status, FPOptions FPO) {
794   // [expr.pre]p4:
795   //   If during the evaluation of an expression, the result is not
796   //   mathematically defined [...], the behavior is undefined.
797   // FIXME: C++ rules require us to not conform to IEEE 754 here.
798   if (Result.isNan()) {
799     const SourceInfo &E = S.Current->getSource(OpPC);
800     S.CCEDiag(E, diag::note_constexpr_float_arithmetic)
801         << /*NaN=*/true << S.Current->getRange(OpPC);
802     return S.noteUndefinedBehavior();
803   }
804 
805   // In a constant context, assume that any dynamic rounding mode or FP
806   // exception state matches the default floating-point environment.
807   if (S.inConstantContext())
808     return true;
809 
810   if ((Status & APFloat::opInexact) &&
811       FPO.getRoundingMode() == llvm::RoundingMode::Dynamic) {
812     // Inexact result means that it depends on rounding mode. If the requested
813     // mode is dynamic, the evaluation cannot be made in compile time.
814     const SourceInfo &E = S.Current->getSource(OpPC);
815     S.FFDiag(E, diag::note_constexpr_dynamic_rounding);
816     return false;
817   }
818 
819   if ((Status != APFloat::opOK) &&
820       (FPO.getRoundingMode() == llvm::RoundingMode::Dynamic ||
821        FPO.getExceptionMode() != LangOptions::FPE_Ignore ||
822        FPO.getAllowFEnvAccess())) {
823     const SourceInfo &E = S.Current->getSource(OpPC);
824     S.FFDiag(E, diag::note_constexpr_float_arithmetic_strict);
825     return false;
826   }
827 
828   if ((Status & APFloat::opStatus::opInvalidOp) &&
829       FPO.getExceptionMode() != LangOptions::FPE_Ignore) {
830     const SourceInfo &E = S.Current->getSource(OpPC);
831     // There is no usefully definable result.
832     S.FFDiag(E);
833     return false;
834   }
835 
836   return true;
837 }
838 
839 bool CheckDynamicMemoryAllocation(InterpState &S, CodePtr OpPC) {
840   if (S.getLangOpts().CPlusPlus20)
841     return true;
842 
843   const SourceInfo &E = S.Current->getSource(OpPC);
844   S.CCEDiag(E, diag::note_constexpr_new);
845   return true;
846 }
847 
848 bool CheckNewDeleteForms(InterpState &S, CodePtr OpPC,
849                          DynamicAllocator::Form AllocForm,
850                          DynamicAllocator::Form DeleteForm, const Descriptor *D,
851                          const Expr *NewExpr) {
852   if (AllocForm == DeleteForm)
853     return true;
854 
855   QualType TypeToDiagnose;
856   // We need to shuffle things around a bit here to get a better diagnostic,
857   // because the expression we allocated the block for was of type int*,
858   // but we want to get the array size right.
859   if (D->isArray()) {
860     QualType ElemQT = D->getType()->getPointeeType();
861     TypeToDiagnose = S.getASTContext().getConstantArrayType(
862         ElemQT, APInt(64, static_cast<uint64_t>(D->getNumElems()), false),
863         nullptr, ArraySizeModifier::Normal, 0);
864   } else
865     TypeToDiagnose = D->getType()->getPointeeType();
866 
867   const SourceInfo &E = S.Current->getSource(OpPC);
868   S.FFDiag(E, diag::note_constexpr_new_delete_mismatch)
869       << static_cast<int>(DeleteForm) << static_cast<int>(AllocForm)
870       << TypeToDiagnose;
871   S.Note(NewExpr->getExprLoc(), diag::note_constexpr_dynamic_alloc_here)
872       << NewExpr->getSourceRange();
873   return false;
874 }
875 
876 bool CheckDeleteSource(InterpState &S, CodePtr OpPC, const Expr *Source,
877                        const Pointer &Ptr) {
878   // The two sources we currently allow are new expressions and
879   // __builtin_operator_new calls.
880   if (isa_and_nonnull<CXXNewExpr>(Source))
881     return true;
882   if (const CallExpr *CE = dyn_cast_if_present<CallExpr>(Source);
883       CE && CE->getBuiltinCallee() == Builtin::BI__builtin_operator_new)
884     return true;
885 
886   // Whatever this is, we didn't heap allocate it.
887   const SourceInfo &Loc = S.Current->getSource(OpPC);
888   S.FFDiag(Loc, diag::note_constexpr_delete_not_heap_alloc)
889       << Ptr.toDiagnosticString(S.getASTContext());
890 
891   if (Ptr.isTemporary())
892     S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here);
893   else
894     S.Note(Ptr.getDeclLoc(), diag::note_declared_at);
895   return false;
896 }
897 
898 /// We aleady know the given DeclRefExpr is invalid for some reason,
899 /// now figure out why and print appropriate diagnostics.
900 bool CheckDeclRef(InterpState &S, CodePtr OpPC, const DeclRefExpr *DR) {
901   const ValueDecl *D = DR->getDecl();
902   return diagnoseUnknownDecl(S, OpPC, D);
903 }
904 
905 bool CheckDummy(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
906                 AccessKinds AK) {
907   if (!Ptr.isDummy())
908     return true;
909 
910   const Descriptor *Desc = Ptr.getDeclDesc();
911   const ValueDecl *D = Desc->asValueDecl();
912   if (!D)
913     return false;
914 
915   if (AK == AK_Read || AK == AK_Increment || AK == AK_Decrement)
916     return diagnoseUnknownDecl(S, OpPC, D);
917 
918   assert(AK == AK_Assign);
919   if (S.getLangOpts().CPlusPlus14) {
920     const SourceInfo &E = S.Current->getSource(OpPC);
921     S.FFDiag(E, diag::note_constexpr_modify_global);
922   }
923   return false;
924 }
925 
926 bool CheckNonNullArgs(InterpState &S, CodePtr OpPC, const Function *F,
927                       const CallExpr *CE, unsigned ArgSize) {
928   auto Args = llvm::ArrayRef(CE->getArgs(), CE->getNumArgs());
929   auto NonNullArgs = collectNonNullArgs(F->getDecl(), Args);
930   unsigned Offset = 0;
931   unsigned Index = 0;
932   for (const Expr *Arg : Args) {
933     if (NonNullArgs[Index] && Arg->getType()->isPointerType()) {
934       const Pointer &ArgPtr = S.Stk.peek<Pointer>(ArgSize - Offset);
935       if (ArgPtr.isZero()) {
936         const SourceLocation &Loc = S.Current->getLocation(OpPC);
937         S.CCEDiag(Loc, diag::note_non_null_attribute_failed);
938         return false;
939       }
940     }
941 
942     Offset += align(primSize(S.Ctx.classify(Arg).value_or(PT_Ptr)));
943     ++Index;
944   }
945   return true;
946 }
947 
948 // FIXME: This is similar to code we already have in Compiler.cpp.
949 // I think it makes sense to instead add the field and base destruction stuff
950 // to the destructor Function itself. Then destroying a record would really
951 // _just_ be calling its destructor. That would also help with the diagnostic
952 // difference when the destructor or a field/base fails.
953 static bool runRecordDestructor(InterpState &S, CodePtr OpPC,
954                                 const Pointer &BasePtr,
955                                 const Descriptor *Desc) {
956   assert(Desc->isRecord());
957   const Record *R = Desc->ElemRecord;
958   assert(R);
959 
960   if (Pointer::pointToSameBlock(BasePtr, S.Current->getThis())) {
961     const SourceInfo &Loc = S.Current->getSource(OpPC);
962     S.FFDiag(Loc, diag::note_constexpr_double_destroy);
963     return false;
964   }
965 
966   // Destructor of this record.
967   if (const CXXDestructorDecl *Dtor = R->getDestructor();
968       Dtor && !Dtor->isTrivial()) {
969     const Function *DtorFunc = S.getContext().getOrCreateFunction(Dtor);
970     if (!DtorFunc)
971       return false;
972 
973     S.Stk.push<Pointer>(BasePtr);
974     if (!Call(S, OpPC, DtorFunc, 0))
975       return false;
976   }
977   return true;
978 }
979 
980 static bool RunDestructors(InterpState &S, CodePtr OpPC, const Block *B) {
981   assert(B);
982   const Descriptor *Desc = B->getDescriptor();
983 
984   if (Desc->isPrimitive() || Desc->isPrimitiveArray())
985     return true;
986 
987   assert(Desc->isRecord() || Desc->isCompositeArray());
988 
989   if (Desc->isCompositeArray()) {
990     const Descriptor *ElemDesc = Desc->ElemDesc;
991     assert(ElemDesc->isRecord());
992 
993     Pointer RP(const_cast<Block *>(B));
994     for (unsigned I = 0; I != Desc->getNumElems(); ++I) {
995       if (!runRecordDestructor(S, OpPC, RP.atIndex(I).narrow(), ElemDesc))
996         return false;
997     }
998     return true;
999   }
1000 
1001   assert(Desc->isRecord());
1002   return runRecordDestructor(S, OpPC, Pointer(const_cast<Block *>(B)), Desc);
1003 }
1004 
1005 bool Free(InterpState &S, CodePtr OpPC, bool DeleteIsArrayForm,
1006           bool IsGlobalDelete) {
1007   if (!CheckDynamicMemoryAllocation(S, OpPC))
1008     return false;
1009 
1010   const Expr *Source = nullptr;
1011   const Block *BlockToDelete = nullptr;
1012   {
1013     // Extra scope for this so the block doesn't have this pointer
1014     // pointing to it when we destroy it.
1015     Pointer Ptr = S.Stk.pop<Pointer>();
1016 
1017     // Deleteing nullptr is always fine.
1018     if (Ptr.isZero())
1019       return true;
1020 
1021     // Remove base casts.
1022     while (Ptr.isBaseClass())
1023       Ptr = Ptr.getBase();
1024 
1025     if (!Ptr.isRoot() || Ptr.isOnePastEnd() || Ptr.isArrayElement()) {
1026       const SourceInfo &Loc = S.Current->getSource(OpPC);
1027       S.FFDiag(Loc, diag::note_constexpr_delete_subobject)
1028           << Ptr.toDiagnosticString(S.getASTContext()) << Ptr.isOnePastEnd();
1029       return false;
1030     }
1031 
1032     Source = Ptr.getDeclDesc()->asExpr();
1033     BlockToDelete = Ptr.block();
1034 
1035     if (!CheckDeleteSource(S, OpPC, Source, Ptr))
1036       return false;
1037 
1038     // For a class type with a virtual destructor, the selected operator delete
1039     // is the one looked up when building the destructor.
1040     QualType AllocType = Ptr.getType();
1041     if (!DeleteIsArrayForm && !IsGlobalDelete) {
1042       auto getVirtualOperatorDelete = [](QualType T) -> const FunctionDecl * {
1043         if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl())
1044           if (const CXXDestructorDecl *DD = RD->getDestructor())
1045             return DD->isVirtual() ? DD->getOperatorDelete() : nullptr;
1046         return nullptr;
1047       };
1048 
1049       if (const FunctionDecl *VirtualDelete =
1050               getVirtualOperatorDelete(AllocType);
1051           VirtualDelete &&
1052           !VirtualDelete->isReplaceableGlobalAllocationFunction()) {
1053         S.FFDiag(S.Current->getSource(OpPC),
1054                  diag::note_constexpr_new_non_replaceable)
1055             << isa<CXXMethodDecl>(VirtualDelete) << VirtualDelete;
1056         return false;
1057       }
1058     }
1059   }
1060   assert(Source);
1061   assert(BlockToDelete);
1062 
1063   // Invoke destructors before deallocating the memory.
1064   if (!RunDestructors(S, OpPC, BlockToDelete))
1065     return false;
1066 
1067   DynamicAllocator &Allocator = S.getAllocator();
1068   const Descriptor *BlockDesc = BlockToDelete->getDescriptor();
1069   std::optional<DynamicAllocator::Form> AllocForm =
1070       Allocator.getAllocationForm(Source);
1071 
1072   if (!Allocator.deallocate(Source, BlockToDelete, S)) {
1073     // Nothing has been deallocated, this must be a double-delete.
1074     const SourceInfo &Loc = S.Current->getSource(OpPC);
1075     S.FFDiag(Loc, diag::note_constexpr_double_delete);
1076     return false;
1077   }
1078 
1079   assert(AllocForm);
1080   DynamicAllocator::Form DeleteForm = DeleteIsArrayForm
1081                                           ? DynamicAllocator::Form::Array
1082                                           : DynamicAllocator::Form::NonArray;
1083   return CheckNewDeleteForms(S, OpPC, *AllocForm, DeleteForm, BlockDesc,
1084                              Source);
1085 }
1086 
1087 void diagnoseEnumValue(InterpState &S, CodePtr OpPC, const EnumDecl *ED,
1088                        const APSInt &Value) {
1089   llvm::APInt Min;
1090   llvm::APInt Max;
1091 
1092   if (S.EvaluatingDecl && !S.EvaluatingDecl->isConstexpr())
1093     return;
1094 
1095   ED->getValueRange(Max, Min);
1096   --Max;
1097 
1098   if (ED->getNumNegativeBits() &&
1099       (Max.slt(Value.getSExtValue()) || Min.sgt(Value.getSExtValue()))) {
1100     const SourceLocation &Loc = S.Current->getLocation(OpPC);
1101     S.CCEDiag(Loc, diag::note_constexpr_unscoped_enum_out_of_range)
1102         << llvm::toString(Value, 10) << Min.getSExtValue() << Max.getSExtValue()
1103         << ED;
1104   } else if (!ED->getNumNegativeBits() && Max.ult(Value.getZExtValue())) {
1105     const SourceLocation &Loc = S.Current->getLocation(OpPC);
1106     S.CCEDiag(Loc, diag::note_constexpr_unscoped_enum_out_of_range)
1107         << llvm::toString(Value, 10) << Min.getZExtValue() << Max.getZExtValue()
1108         << ED;
1109   }
1110 }
1111 
1112 bool CheckLiteralType(InterpState &S, CodePtr OpPC, const Type *T) {
1113   assert(T);
1114   assert(!S.getLangOpts().CPlusPlus23);
1115 
1116   // C++1y: A constant initializer for an object o [...] may also invoke
1117   // constexpr constructors for o and its subobjects even if those objects
1118   // are of non-literal class types.
1119   //
1120   // C++11 missed this detail for aggregates, so classes like this:
1121   //   struct foo_t { union { int i; volatile int j; } u; };
1122   // are not (obviously) initializable like so:
1123   //   __attribute__((__require_constant_initialization__))
1124   //   static const foo_t x = {{0}};
1125   // because "i" is a subobject with non-literal initialization (due to the
1126   // volatile member of the union). See:
1127   //   http://www.open-std.org/jtc1/sc22/wg21/docs/cwg_active.html#1677
1128   // Therefore, we use the C++1y behavior.
1129 
1130   if (S.Current->getFunction() && S.Current->getFunction()->isConstructor() &&
1131       S.Current->getThis().getDeclDesc()->asDecl() == S.EvaluatingDecl) {
1132     return true;
1133   }
1134 
1135   const Expr *E = S.Current->getExpr(OpPC);
1136   if (S.getLangOpts().CPlusPlus11)
1137     S.FFDiag(E, diag::note_constexpr_nonliteral) << E->getType();
1138   else
1139     S.FFDiag(E, diag::note_invalid_subexpr_in_const_expr);
1140   return false;
1141 }
1142 
1143 static bool checkConstructor(InterpState &S, CodePtr OpPC, const Function *Func,
1144                              const Pointer &ThisPtr) {
1145   assert(Func->isConstructor());
1146 
1147   const Descriptor *D = ThisPtr.getFieldDesc();
1148 
1149   // FIXME: I think this case is not 100% correct. E.g. a pointer into a
1150   // subobject of a composite array.
1151   if (!D->ElemRecord)
1152     return true;
1153 
1154   if (D->ElemRecord->getNumVirtualBases() == 0)
1155     return true;
1156 
1157   S.FFDiag(S.Current->getLocation(OpPC), diag::note_constexpr_virtual_base)
1158       << Func->getParentDecl();
1159   return false;
1160 }
1161 
1162 bool CallVar(InterpState &S, CodePtr OpPC, const Function *Func,
1163              uint32_t VarArgSize) {
1164   if (Func->hasThisPointer()) {
1165     size_t ArgSize = Func->getArgSize() + VarArgSize;
1166     size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0);
1167     const Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset);
1168 
1169     // If the current function is a lambda static invoker and
1170     // the function we're about to call is a lambda call operator,
1171     // skip the CheckInvoke, since the ThisPtr is a null pointer
1172     // anyway.
1173     if (!(S.Current->getFunction() &&
1174           S.Current->getFunction()->isLambdaStaticInvoker() &&
1175           Func->isLambdaCallOperator())) {
1176       if (!CheckInvoke(S, OpPC, ThisPtr))
1177         return false;
1178     }
1179 
1180     if (S.checkingPotentialConstantExpression())
1181       return false;
1182   }
1183 
1184   if (!CheckCallable(S, OpPC, Func))
1185     return false;
1186 
1187   if (!CheckCallDepth(S, OpPC))
1188     return false;
1189 
1190   auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC, VarArgSize);
1191   InterpFrame *FrameBefore = S.Current;
1192   S.Current = NewFrame.get();
1193 
1194   APValue CallResult;
1195   // Note that we cannot assert(CallResult.hasValue()) here since
1196   // Ret() above only sets the APValue if the curent frame doesn't
1197   // have a caller set.
1198   if (Interpret(S, CallResult)) {
1199     NewFrame.release(); // Frame was delete'd already.
1200     assert(S.Current == FrameBefore);
1201     return true;
1202   }
1203 
1204   // Interpreting the function failed somehow. Reset to
1205   // previous state.
1206   S.Current = FrameBefore;
1207   return false;
1208 }
1209 
1210 bool Call(InterpState &S, CodePtr OpPC, const Function *Func,
1211           uint32_t VarArgSize) {
1212   assert(Func);
1213   auto cleanup = [&]() -> bool {
1214     cleanupAfterFunctionCall(S, OpPC, Func);
1215     return false;
1216   };
1217 
1218   if (Func->hasThisPointer()) {
1219     size_t ArgSize = Func->getArgSize() + VarArgSize;
1220     size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0);
1221 
1222     const Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset);
1223 
1224     // If the current function is a lambda static invoker and
1225     // the function we're about to call is a lambda call operator,
1226     // skip the CheckInvoke, since the ThisPtr is a null pointer
1227     // anyway.
1228     if (S.Current->getFunction() &&
1229         S.Current->getFunction()->isLambdaStaticInvoker() &&
1230         Func->isLambdaCallOperator()) {
1231       assert(ThisPtr.isZero());
1232     } else {
1233       if (!CheckInvoke(S, OpPC, ThisPtr))
1234         return cleanup();
1235     }
1236 
1237     if (Func->isConstructor() && !checkConstructor(S, OpPC, Func, ThisPtr))
1238       return false;
1239   }
1240 
1241   if (!CheckCallable(S, OpPC, Func))
1242     return cleanup();
1243 
1244   // FIXME: The isConstructor() check here is not always right. The current
1245   // constant evaluator is somewhat inconsistent in when it allows a function
1246   // call when checking for a constant expression.
1247   if (Func->hasThisPointer() && S.checkingPotentialConstantExpression() &&
1248       !Func->isConstructor())
1249     return cleanup();
1250 
1251   if (!CheckCallDepth(S, OpPC))
1252     return cleanup();
1253 
1254   auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC, VarArgSize);
1255   InterpFrame *FrameBefore = S.Current;
1256   S.Current = NewFrame.get();
1257 
1258   InterpStateCCOverride CCOverride(S, Func->getDecl()->isImmediateFunction());
1259   APValue CallResult;
1260   // Note that we cannot assert(CallResult.hasValue()) here since
1261   // Ret() above only sets the APValue if the curent frame doesn't
1262   // have a caller set.
1263   if (Interpret(S, CallResult)) {
1264     NewFrame.release(); // Frame was delete'd already.
1265     assert(S.Current == FrameBefore);
1266     return true;
1267   }
1268 
1269   // Interpreting the function failed somehow. Reset to
1270   // previous state.
1271   S.Current = FrameBefore;
1272   return false;
1273 }
1274 
1275 bool CallVirt(InterpState &S, CodePtr OpPC, const Function *Func,
1276               uint32_t VarArgSize) {
1277   assert(Func->hasThisPointer());
1278   assert(Func->isVirtual());
1279   size_t ArgSize = Func->getArgSize() + VarArgSize;
1280   size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0);
1281   Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset);
1282 
1283   const CXXRecordDecl *DynamicDecl = nullptr;
1284   {
1285     Pointer TypePtr = ThisPtr;
1286     while (TypePtr.isBaseClass())
1287       TypePtr = TypePtr.getBase();
1288 
1289     QualType DynamicType = TypePtr.getType();
1290     if (DynamicType->isPointerType() || DynamicType->isReferenceType())
1291       DynamicDecl = DynamicType->getPointeeCXXRecordDecl();
1292     else
1293       DynamicDecl = DynamicType->getAsCXXRecordDecl();
1294   }
1295   assert(DynamicDecl);
1296 
1297   const auto *StaticDecl = cast<CXXRecordDecl>(Func->getParentDecl());
1298   const auto *InitialFunction = cast<CXXMethodDecl>(Func->getDecl());
1299   const CXXMethodDecl *Overrider = S.getContext().getOverridingFunction(
1300       DynamicDecl, StaticDecl, InitialFunction);
1301 
1302   if (Overrider != InitialFunction) {
1303     // DR1872: An instantiated virtual constexpr function can't be called in a
1304     // constant expression (prior to C++20). We can still constant-fold such a
1305     // call.
1306     if (!S.getLangOpts().CPlusPlus20 && Overrider->isVirtual()) {
1307       const Expr *E = S.Current->getExpr(OpPC);
1308       S.CCEDiag(E, diag::note_constexpr_virtual_call) << E->getSourceRange();
1309     }
1310 
1311     Func = S.getContext().getOrCreateFunction(Overrider);
1312 
1313     const CXXRecordDecl *ThisFieldDecl =
1314         ThisPtr.getFieldDesc()->getType()->getAsCXXRecordDecl();
1315     if (Func->getParentDecl()->isDerivedFrom(ThisFieldDecl)) {
1316       // If the function we call is further DOWN the hierarchy than the
1317       // FieldDesc of our pointer, just go up the hierarchy of this field
1318       // the furthest we can go.
1319       while (ThisPtr.isBaseClass())
1320         ThisPtr = ThisPtr.getBase();
1321     }
1322   }
1323 
1324   if (!Call(S, OpPC, Func, VarArgSize))
1325     return false;
1326 
1327   // Covariant return types. The return type of Overrider is a pointer
1328   // or reference to a class type.
1329   if (Overrider != InitialFunction &&
1330       Overrider->getReturnType()->isPointerOrReferenceType() &&
1331       InitialFunction->getReturnType()->isPointerOrReferenceType()) {
1332     QualType OverriderPointeeType =
1333         Overrider->getReturnType()->getPointeeType();
1334     QualType InitialPointeeType =
1335         InitialFunction->getReturnType()->getPointeeType();
1336     // We've called Overrider above, but calling code expects us to return what
1337     // InitialFunction returned. According to the rules for covariant return
1338     // types, what InitialFunction returns needs to be a base class of what
1339     // Overrider returns. So, we need to do an upcast here.
1340     unsigned Offset = S.getContext().collectBaseOffset(
1341         InitialPointeeType->getAsRecordDecl(),
1342         OverriderPointeeType->getAsRecordDecl());
1343     return GetPtrBasePop(S, OpPC, Offset);
1344   }
1345 
1346   return true;
1347 }
1348 
1349 bool CallBI(InterpState &S, CodePtr OpPC, const Function *Func,
1350             const CallExpr *CE, uint32_t BuiltinID) {
1351   if (S.checkingPotentialConstantExpression())
1352     return false;
1353   auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC);
1354 
1355   InterpFrame *FrameBefore = S.Current;
1356   S.Current = NewFrame.get();
1357 
1358   if (InterpretBuiltin(S, OpPC, Func, CE, BuiltinID)) {
1359     NewFrame.release();
1360     return true;
1361   }
1362   S.Current = FrameBefore;
1363   return false;
1364 }
1365 
1366 bool CallPtr(InterpState &S, CodePtr OpPC, uint32_t ArgSize,
1367              const CallExpr *CE) {
1368   const FunctionPointer &FuncPtr = S.Stk.pop<FunctionPointer>();
1369 
1370   const Function *F = FuncPtr.getFunction();
1371   if (!F) {
1372     const auto *E = cast<CallExpr>(S.Current->getExpr(OpPC));
1373     S.FFDiag(E, diag::note_constexpr_null_callee)
1374         << const_cast<Expr *>(E->getCallee()) << E->getSourceRange();
1375     return false;
1376   }
1377 
1378   if (!FuncPtr.isValid() || !F->getDecl())
1379     return Invalid(S, OpPC);
1380 
1381   assert(F);
1382 
1383   // This happens when the call expression has been cast to
1384   // something else, but we don't support that.
1385   if (S.Ctx.classify(F->getDecl()->getReturnType()) !=
1386       S.Ctx.classify(CE->getType()))
1387     return false;
1388 
1389   // Check argument nullability state.
1390   if (F->hasNonNullAttr()) {
1391     if (!CheckNonNullArgs(S, OpPC, F, CE, ArgSize))
1392       return false;
1393   }
1394 
1395   assert(ArgSize >= F->getWrittenArgSize());
1396   uint32_t VarArgSize = ArgSize - F->getWrittenArgSize();
1397 
1398   // We need to do this explicitly here since we don't have the necessary
1399   // information to do it automatically.
1400   if (F->isThisPointerExplicit())
1401     VarArgSize -= align(primSize(PT_Ptr));
1402 
1403   if (F->isVirtual())
1404     return CallVirt(S, OpPC, F, VarArgSize);
1405 
1406   return Call(S, OpPC, F, VarArgSize);
1407 }
1408 
1409 bool CheckNewTypeMismatch(InterpState &S, CodePtr OpPC, const Expr *E,
1410                           std::optional<uint64_t> ArraySize) {
1411   const Pointer &Ptr = S.Stk.peek<Pointer>();
1412 
1413   if (!CheckStore(S, OpPC, Ptr))
1414     return false;
1415 
1416   if (!InvalidNewDeleteExpr(S, OpPC, E))
1417     return false;
1418 
1419   const auto *NewExpr = cast<CXXNewExpr>(E);
1420   QualType StorageType = Ptr.getType();
1421 
1422   if (isa_and_nonnull<CXXNewExpr>(Ptr.getFieldDesc()->asExpr()) &&
1423       StorageType->isPointerType()) {
1424     // FIXME: Are there other cases where this is a problem?
1425     StorageType = StorageType->getPointeeType();
1426   }
1427 
1428   const ASTContext &ASTCtx = S.getASTContext();
1429   QualType AllocType;
1430   if (ArraySize) {
1431     AllocType = ASTCtx.getConstantArrayType(
1432         NewExpr->getAllocatedType(),
1433         APInt(64, static_cast<uint64_t>(*ArraySize), false), nullptr,
1434         ArraySizeModifier::Normal, 0);
1435   } else {
1436     AllocType = NewExpr->getAllocatedType();
1437   }
1438 
1439   unsigned StorageSize = 1;
1440   unsigned AllocSize = 1;
1441   if (const auto *CAT = dyn_cast<ConstantArrayType>(AllocType))
1442     AllocSize = CAT->getZExtSize();
1443   if (const auto *CAT = dyn_cast<ConstantArrayType>(StorageType))
1444     StorageSize = CAT->getZExtSize();
1445 
1446   if (AllocSize > StorageSize ||
1447       !ASTCtx.hasSimilarType(ASTCtx.getBaseElementType(AllocType),
1448                              ASTCtx.getBaseElementType(StorageType))) {
1449     S.FFDiag(S.Current->getLocation(OpPC),
1450              diag::note_constexpr_placement_new_wrong_type)
1451         << StorageType << AllocType;
1452     return false;
1453   }
1454 
1455   // Can't activate fields in a union, unless the direct base is the union.
1456   if (Ptr.inUnion() && !Ptr.isActive() && !Ptr.getBase().getRecord()->isUnion())
1457     return CheckActive(S, OpPC, Ptr, AK_Construct);
1458 
1459   return true;
1460 }
1461 
1462 bool InvalidNewDeleteExpr(InterpState &S, CodePtr OpPC, const Expr *E) {
1463   assert(E);
1464   const auto &Loc = S.Current->getSource(OpPC);
1465 
1466   if (S.getLangOpts().CPlusPlus26)
1467     return true;
1468 
1469   if (const auto *NewExpr = dyn_cast<CXXNewExpr>(E)) {
1470     const FunctionDecl *OperatorNew = NewExpr->getOperatorNew();
1471 
1472     if (!S.getLangOpts().CPlusPlus26 && NewExpr->getNumPlacementArgs() > 0) {
1473       // This is allowed pre-C++26, but only an std function.
1474       if (S.Current->isStdFunction())
1475         return true;
1476       S.FFDiag(Loc, diag::note_constexpr_new_placement)
1477           << /*C++26 feature*/ 1 << E->getSourceRange();
1478     } else if (NewExpr->getNumPlacementArgs() == 1 &&
1479                !OperatorNew->isReservedGlobalPlacementOperator()) {
1480       S.FFDiag(Loc, diag::note_constexpr_new_placement)
1481           << /*Unsupported*/ 0 << E->getSourceRange();
1482     } else if (!OperatorNew->isReplaceableGlobalAllocationFunction()) {
1483       S.FFDiag(Loc, diag::note_constexpr_new_non_replaceable)
1484           << isa<CXXMethodDecl>(OperatorNew) << OperatorNew;
1485     }
1486   } else {
1487     const auto *DeleteExpr = cast<CXXDeleteExpr>(E);
1488     const FunctionDecl *OperatorDelete = DeleteExpr->getOperatorDelete();
1489     if (!OperatorDelete->isReplaceableGlobalAllocationFunction()) {
1490       S.FFDiag(Loc, diag::note_constexpr_new_non_replaceable)
1491           << isa<CXXMethodDecl>(OperatorDelete) << OperatorDelete;
1492     }
1493   }
1494 
1495   return false;
1496 }
1497 
1498 bool handleFixedPointOverflow(InterpState &S, CodePtr OpPC,
1499                               const FixedPoint &FP) {
1500   const Expr *E = S.Current->getExpr(OpPC);
1501   if (S.checkingForUndefinedBehavior()) {
1502     S.getASTContext().getDiagnostics().Report(
1503         E->getExprLoc(), diag::warn_fixedpoint_constant_overflow)
1504         << FP.toDiagnosticString(S.getASTContext()) << E->getType();
1505   }
1506   S.CCEDiag(E, diag::note_constexpr_overflow)
1507       << FP.toDiagnosticString(S.getASTContext()) << E->getType();
1508   return S.noteUndefinedBehavior();
1509 }
1510 
1511 bool InvalidShuffleVectorIndex(InterpState &S, CodePtr OpPC, uint32_t Index) {
1512   const SourceInfo &Loc = S.Current->getSource(OpPC);
1513   S.FFDiag(Loc,
1514            diag::err_shufflevector_minus_one_is_undefined_behavior_constexpr)
1515       << Index;
1516   return false;
1517 }
1518 
1519 bool CheckPointerToIntegralCast(InterpState &S, CodePtr OpPC,
1520                                 const Pointer &Ptr, unsigned BitWidth) {
1521   if (Ptr.isDummy())
1522     return false;
1523 
1524   const SourceInfo &E = S.Current->getSource(OpPC);
1525   S.CCEDiag(E, diag::note_constexpr_invalid_cast)
1526       << 2 << S.getLangOpts().CPlusPlus << S.Current->getRange(OpPC);
1527 
1528   if (Ptr.isBlockPointer() && !Ptr.isZero()) {
1529     // Only allow based lvalue casts if they are lossless.
1530     if (S.getASTContext().getTargetInfo().getPointerWidth(LangAS::Default) !=
1531         BitWidth)
1532       return Invalid(S, OpPC);
1533   }
1534   return true;
1535 }
1536 
1537 bool CastPointerIntegralAP(InterpState &S, CodePtr OpPC, uint32_t BitWidth) {
1538   const Pointer &Ptr = S.Stk.pop<Pointer>();
1539 
1540   if (!CheckPointerToIntegralCast(S, OpPC, Ptr, BitWidth))
1541     return false;
1542 
1543   S.Stk.push<IntegralAP<false>>(
1544       IntegralAP<false>::from(Ptr.getIntegerRepresentation(), BitWidth));
1545   return true;
1546 }
1547 
1548 bool CastPointerIntegralAPS(InterpState &S, CodePtr OpPC, uint32_t BitWidth) {
1549   const Pointer &Ptr = S.Stk.pop<Pointer>();
1550 
1551   if (!CheckPointerToIntegralCast(S, OpPC, Ptr, BitWidth))
1552     return false;
1553 
1554   S.Stk.push<IntegralAP<true>>(
1555       IntegralAP<true>::from(Ptr.getIntegerRepresentation(), BitWidth));
1556   return true;
1557 }
1558 
1559 // https://github.com/llvm/llvm-project/issues/102513
1560 #if defined(_WIN32) && !defined(__clang__) && !defined(NDEBUG)
1561 #pragma optimize("", off)
1562 #endif
1563 bool Interpret(InterpState &S, APValue &Result) {
1564   // The current stack frame when we started Interpret().
1565   // This is being used by the ops to determine wheter
1566   // to return from this function and thus terminate
1567   // interpretation.
1568   const InterpFrame *StartFrame = S.Current;
1569   assert(!S.Current->isRoot());
1570   CodePtr PC = S.Current->getPC();
1571 
1572   // Empty program.
1573   if (!PC)
1574     return true;
1575 
1576   for (;;) {
1577     auto Op = PC.read<Opcode>();
1578     CodePtr OpPC = PC;
1579 
1580     switch (Op) {
1581 #define GET_INTERP
1582 #include "Opcodes.inc"
1583 #undef GET_INTERP
1584     }
1585   }
1586 }
1587 // https://github.com/llvm/llvm-project/issues/102513
1588 #if defined(_WIN32) && !defined(__clang__) && !defined(NDEBUG)
1589 #pragma optimize("", on)
1590 #endif
1591 
1592 } // namespace interp
1593 } // namespace clang
1594