xref: /llvm-project/clang/lib/AST/ByteCode/Interp.cpp (revision 125168744810fffff4aba039208afd9ffe1d11b1)
1 //===------- Interp.cpp - Interpreter for the constexpr VM ------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
9 #include "Interp.h"
10 #include "Function.h"
11 #include "InterpFrame.h"
12 #include "InterpShared.h"
13 #include "InterpStack.h"
14 #include "Opcode.h"
15 #include "PrimType.h"
16 #include "Program.h"
17 #include "State.h"
18 #include "clang/AST/ASTContext.h"
19 #include "clang/AST/ASTDiagnostic.h"
20 #include "clang/AST/CXXInheritance.h"
21 #include "clang/AST/DeclObjC.h"
22 #include "clang/AST/Expr.h"
23 #include "clang/AST/ExprCXX.h"
24 #include "clang/Basic/DiagnosticSema.h"
25 #include "clang/Basic/TargetInfo.h"
26 #include "llvm/ADT/APSInt.h"
27 #include "llvm/ADT/StringExtras.h"
28 #include <limits>
29 #include <vector>
30 
31 using namespace clang;
32 using namespace clang::interp;
33 
34 static bool RetValue(InterpState &S, CodePtr &Pt, APValue &Result) {
35   llvm::report_fatal_error("Interpreter cannot return values");
36 }
37 
38 //===----------------------------------------------------------------------===//
39 // Jmp, Jt, Jf
40 //===----------------------------------------------------------------------===//
41 
42 static bool Jmp(InterpState &S, CodePtr &PC, int32_t Offset) {
43   PC += Offset;
44   return true;
45 }
46 
47 static bool Jt(InterpState &S, CodePtr &PC, int32_t Offset) {
48   if (S.Stk.pop<bool>()) {
49     PC += Offset;
50   }
51   return true;
52 }
53 
54 static bool Jf(InterpState &S, CodePtr &PC, int32_t Offset) {
55   if (!S.Stk.pop<bool>()) {
56     PC += Offset;
57   }
58   return true;
59 }
60 
61 static void diagnoseMissingInitializer(InterpState &S, CodePtr OpPC,
62                                        const ValueDecl *VD) {
63   const SourceInfo &E = S.Current->getSource(OpPC);
64   S.FFDiag(E, diag::note_constexpr_var_init_unknown, 1) << VD;
65   S.Note(VD->getLocation(), diag::note_declared_at) << VD->getSourceRange();
66 }
67 
68 static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC,
69                                      const ValueDecl *VD);
70 static bool diagnoseUnknownDecl(InterpState &S, CodePtr OpPC,
71                                 const ValueDecl *D) {
72   const SourceInfo &E = S.Current->getSource(OpPC);
73 
74   if (isa<ParmVarDecl>(D)) {
75     if (S.getLangOpts().CPlusPlus11) {
76       S.FFDiag(E, diag::note_constexpr_function_param_value_unknown) << D;
77       S.Note(D->getLocation(), diag::note_declared_at) << D->getSourceRange();
78     } else {
79       S.FFDiag(E);
80     }
81     return false;
82   }
83 
84   if (!D->getType().isConstQualified())
85     diagnoseNonConstVariable(S, OpPC, D);
86   else if (const auto *VD = dyn_cast<VarDecl>(D);
87            VD && !VD->getAnyInitializer())
88     diagnoseMissingInitializer(S, OpPC, VD);
89 
90   return false;
91 }
92 
93 static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC,
94                                      const ValueDecl *VD) {
95   const SourceInfo &Loc = S.Current->getSource(OpPC);
96   if (!S.getLangOpts().CPlusPlus) {
97     S.FFDiag(Loc);
98     return;
99   }
100 
101   if (const auto *VarD = dyn_cast<VarDecl>(VD);
102       VarD && VarD->getType().isConstQualified() &&
103       !VarD->getAnyInitializer()) {
104     diagnoseMissingInitializer(S, OpPC, VD);
105     return;
106   }
107 
108   // Rather random, but this is to match the diagnostic output of the current
109   // interpreter.
110   if (isa<ObjCIvarDecl>(VD))
111     return;
112 
113   if (VD->getType()->isIntegralOrEnumerationType()) {
114     S.FFDiag(Loc, diag::note_constexpr_ltor_non_const_int, 1) << VD;
115     S.Note(VD->getLocation(), diag::note_declared_at);
116     return;
117   }
118 
119   S.FFDiag(Loc,
120            S.getLangOpts().CPlusPlus11 ? diag::note_constexpr_ltor_non_constexpr
121                                        : diag::note_constexpr_ltor_non_integral,
122            1)
123       << VD << VD->getType();
124   S.Note(VD->getLocation(), diag::note_declared_at);
125 }
126 
127 static bool CheckActive(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
128                         AccessKinds AK) {
129   if (Ptr.isActive())
130     return true;
131 
132   assert(Ptr.inUnion());
133   assert(Ptr.isField() && Ptr.getField());
134 
135   Pointer U = Ptr.getBase();
136   Pointer C = Ptr;
137   while (!U.isRoot() && U.inUnion() && !U.isActive()) {
138     if (U.getField())
139       C = U;
140     U = U.getBase();
141   }
142   assert(C.isField());
143 
144   // Get the inactive field descriptor.
145   const FieldDecl *InactiveField = C.getField();
146   assert(InactiveField);
147 
148   // Consider:
149   // union U {
150   //   struct {
151   //     int x;
152   //     int y;
153   //   } a;
154   // }
155   //
156   // When activating x, we will also activate a. If we now try to read
157   // from y, we will get to CheckActive, because y is not active. In that
158   // case, our U will be a (not a union). We return here and let later code
159   // handle this.
160   if (!U.getFieldDesc()->isUnion())
161     return true;
162 
163   // Find the active field of the union.
164   const Record *R = U.getRecord();
165   assert(R && R->isUnion() && "Not a union");
166 
167   const FieldDecl *ActiveField = nullptr;
168   for (const Record::Field &F : R->fields()) {
169     const Pointer &Field = U.atField(F.Offset);
170     if (Field.isActive()) {
171       ActiveField = Field.getField();
172       break;
173     }
174   }
175 
176   const SourceInfo &Loc = S.Current->getSource(OpPC);
177   S.FFDiag(Loc, diag::note_constexpr_access_inactive_union_member)
178       << AK << InactiveField << !ActiveField << ActiveField;
179   return false;
180 }
181 
182 static bool CheckTemporary(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
183                            AccessKinds AK) {
184   if (auto ID = Ptr.getDeclID()) {
185     if (!Ptr.isStaticTemporary())
186       return true;
187 
188     const auto *MTE = dyn_cast_if_present<MaterializeTemporaryExpr>(
189         Ptr.getDeclDesc()->asExpr());
190     if (!MTE)
191       return true;
192 
193     // FIXME(perf): Since we do this check on every Load from a static
194     // temporary, it might make sense to cache the value of the
195     // isUsableInConstantExpressions call.
196     if (!MTE->isUsableInConstantExpressions(S.getASTContext()) &&
197         Ptr.block()->getEvalID() != S.Ctx.getEvalID()) {
198       const SourceInfo &E = S.Current->getSource(OpPC);
199       S.FFDiag(E, diag::note_constexpr_access_static_temporary, 1) << AK;
200       S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here);
201       return false;
202     }
203   }
204   return true;
205 }
206 
207 static bool CheckGlobal(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
208   if (auto ID = Ptr.getDeclID()) {
209     if (!Ptr.isStatic())
210       return true;
211 
212     if (S.P.getCurrentDecl() == ID)
213       return true;
214 
215     S.FFDiag(S.Current->getLocation(OpPC), diag::note_constexpr_modify_global);
216     return false;
217   }
218   return true;
219 }
220 
221 namespace clang {
222 namespace interp {
223 static void popArg(InterpState &S, const Expr *Arg) {
224   PrimType Ty = S.getContext().classify(Arg).value_or(PT_Ptr);
225   TYPE_SWITCH(Ty, S.Stk.discard<T>());
226 }
227 
228 void cleanupAfterFunctionCall(InterpState &S, CodePtr OpPC,
229                               const Function *Func) {
230   assert(S.Current);
231   assert(Func);
232 
233   if (Func->isUnevaluatedBuiltin())
234     return;
235 
236   // Some builtin functions require us to only look at the call site, since
237   // the classified parameter types do not match.
238   if (unsigned BID = Func->getBuiltinID();
239       BID && S.getASTContext().BuiltinInfo.hasCustomTypechecking(BID)) {
240     const auto *CE =
241         cast<CallExpr>(S.Current->Caller->getExpr(S.Current->getRetPC()));
242     for (int32_t I = CE->getNumArgs() - 1; I >= 0; --I) {
243       const Expr *A = CE->getArg(I);
244       popArg(S, A);
245     }
246     return;
247   }
248 
249   if (S.Current->Caller && Func->isVariadic()) {
250     // CallExpr we're look for is at the return PC of the current function, i.e.
251     // in the caller.
252     // This code path should be executed very rarely.
253     unsigned NumVarArgs;
254     const Expr *const *Args = nullptr;
255     unsigned NumArgs = 0;
256     const Expr *CallSite = S.Current->Caller->getExpr(S.Current->getRetPC());
257     if (const auto *CE = dyn_cast<CallExpr>(CallSite)) {
258       Args = CE->getArgs();
259       NumArgs = CE->getNumArgs();
260     } else if (const auto *CE = dyn_cast<CXXConstructExpr>(CallSite)) {
261       Args = CE->getArgs();
262       NumArgs = CE->getNumArgs();
263     } else
264       assert(false && "Can't get arguments from that expression type");
265 
266     assert(NumArgs >= Func->getNumWrittenParams());
267     NumVarArgs = NumArgs - (Func->getNumWrittenParams() +
268                             isa<CXXOperatorCallExpr>(CallSite));
269     for (unsigned I = 0; I != NumVarArgs; ++I) {
270       const Expr *A = Args[NumArgs - 1 - I];
271       popArg(S, A);
272     }
273   }
274 
275   // And in any case, remove the fixed parameters (the non-variadic ones)
276   // at the end.
277   for (PrimType Ty : Func->args_reverse())
278     TYPE_SWITCH(Ty, S.Stk.discard<T>());
279 }
280 
281 bool CheckExtern(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
282   if (!Ptr.isExtern())
283     return true;
284 
285   if (Ptr.isInitialized() ||
286       (Ptr.getDeclDesc()->asVarDecl() == S.EvaluatingDecl))
287     return true;
288 
289   if (!S.checkingPotentialConstantExpression() && S.getLangOpts().CPlusPlus) {
290     const auto *VD = Ptr.getDeclDesc()->asValueDecl();
291     diagnoseNonConstVariable(S, OpPC, VD);
292   }
293   return false;
294 }
295 
296 bool CheckArray(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
297   if (!Ptr.isUnknownSizeArray())
298     return true;
299   const SourceInfo &E = S.Current->getSource(OpPC);
300   S.FFDiag(E, diag::note_constexpr_unsized_array_indexed);
301   return false;
302 }
303 
304 bool CheckLive(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
305                AccessKinds AK) {
306   if (Ptr.isZero()) {
307     const auto &Src = S.Current->getSource(OpPC);
308 
309     if (Ptr.isField())
310       S.FFDiag(Src, diag::note_constexpr_null_subobject) << CSK_Field;
311     else
312       S.FFDiag(Src, diag::note_constexpr_access_null) << AK;
313 
314     return false;
315   }
316 
317   if (!Ptr.isLive()) {
318     const auto &Src = S.Current->getSource(OpPC);
319 
320     if (Ptr.isDynamic()) {
321       S.FFDiag(Src, diag::note_constexpr_access_deleted_object) << AK;
322     } else {
323       bool IsTemp = Ptr.isTemporary();
324       S.FFDiag(Src, diag::note_constexpr_lifetime_ended, 1) << AK << !IsTemp;
325 
326       if (IsTemp)
327         S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here);
328       else
329         S.Note(Ptr.getDeclLoc(), diag::note_declared_at);
330     }
331 
332     return false;
333   }
334 
335   return true;
336 }
337 
338 bool CheckConstant(InterpState &S, CodePtr OpPC, const Descriptor *Desc) {
339   assert(Desc);
340 
341   const auto *D = Desc->asVarDecl();
342   if (!D || !D->hasGlobalStorage())
343     return true;
344 
345   if (D == S.EvaluatingDecl)
346     return true;
347 
348   if (D->isConstexpr())
349     return true;
350 
351   // If we're evaluating the initializer for a constexpr variable in C23, we may
352   // only read other contexpr variables. Abort here since this one isn't
353   // constexpr.
354   if (const auto *VD = dyn_cast_if_present<VarDecl>(S.EvaluatingDecl);
355       VD && VD->isConstexpr() && S.getLangOpts().C23)
356     return Invalid(S, OpPC);
357 
358   QualType T = D->getType();
359   bool IsConstant = T.isConstant(S.getASTContext());
360   if (T->isIntegralOrEnumerationType()) {
361     if (!IsConstant) {
362       diagnoseNonConstVariable(S, OpPC, D);
363       return false;
364     }
365     return true;
366   }
367 
368   if (IsConstant) {
369     if (S.getLangOpts().CPlusPlus) {
370       S.CCEDiag(S.Current->getLocation(OpPC),
371                 S.getLangOpts().CPlusPlus11
372                     ? diag::note_constexpr_ltor_non_constexpr
373                     : diag::note_constexpr_ltor_non_integral,
374                 1)
375           << D << T;
376       S.Note(D->getLocation(), diag::note_declared_at);
377     } else {
378       S.CCEDiag(S.Current->getLocation(OpPC));
379     }
380     return true;
381   }
382 
383   if (T->isPointerOrReferenceType()) {
384     if (!T->getPointeeType().isConstant(S.getASTContext()) ||
385         !S.getLangOpts().CPlusPlus11) {
386       diagnoseNonConstVariable(S, OpPC, D);
387       return false;
388     }
389     return true;
390   }
391 
392   diagnoseNonConstVariable(S, OpPC, D);
393   return false;
394 }
395 
396 static bool CheckConstant(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
397   if (!Ptr.isBlockPointer())
398     return true;
399   return CheckConstant(S, OpPC, Ptr.getDeclDesc());
400 }
401 
402 bool CheckNull(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
403                CheckSubobjectKind CSK) {
404   if (!Ptr.isZero())
405     return true;
406   const SourceInfo &Loc = S.Current->getSource(OpPC);
407   S.FFDiag(Loc, diag::note_constexpr_null_subobject)
408       << CSK << S.Current->getRange(OpPC);
409 
410   return false;
411 }
412 
413 bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
414                 AccessKinds AK) {
415   if (!Ptr.isOnePastEnd())
416     return true;
417   const SourceInfo &Loc = S.Current->getSource(OpPC);
418   S.FFDiag(Loc, diag::note_constexpr_access_past_end)
419       << AK << S.Current->getRange(OpPC);
420   return false;
421 }
422 
423 bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
424                 CheckSubobjectKind CSK) {
425   if (!Ptr.isElementPastEnd())
426     return true;
427   const SourceInfo &Loc = S.Current->getSource(OpPC);
428   S.FFDiag(Loc, diag::note_constexpr_past_end_subobject)
429       << CSK << S.Current->getRange(OpPC);
430   return false;
431 }
432 
433 bool CheckSubobject(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
434                     CheckSubobjectKind CSK) {
435   if (!Ptr.isOnePastEnd())
436     return true;
437 
438   const SourceInfo &Loc = S.Current->getSource(OpPC);
439   S.FFDiag(Loc, diag::note_constexpr_past_end_subobject)
440       << CSK << S.Current->getRange(OpPC);
441   return false;
442 }
443 
444 bool CheckDowncast(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
445                    uint32_t Offset) {
446   uint32_t MinOffset = Ptr.getDeclDesc()->getMetadataSize();
447   uint32_t PtrOffset = Ptr.getByteOffset();
448 
449   // We subtract Offset from PtrOffset. The result must be at least
450   // MinOffset.
451   if (Offset < PtrOffset && (PtrOffset - Offset) >= MinOffset)
452     return true;
453 
454   const auto *E = cast<CastExpr>(S.Current->getExpr(OpPC));
455   QualType TargetQT = E->getType()->getPointeeType();
456   QualType MostDerivedQT = Ptr.getDeclPtr().getType();
457 
458   S.CCEDiag(E, diag::note_constexpr_invalid_downcast)
459       << MostDerivedQT << TargetQT;
460 
461   return false;
462 }
463 
464 bool CheckConst(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
465   assert(Ptr.isLive() && "Pointer is not live");
466   if (!Ptr.isConst() || Ptr.isMutable())
467     return true;
468 
469   // The This pointer is writable in constructors and destructors,
470   // even if isConst() returns true.
471   // TODO(perf): We could be hitting this code path quite a lot in complex
472   // constructors. Is there a better way to do this?
473   if (S.Current->getFunction()) {
474     for (const InterpFrame *Frame = S.Current; Frame; Frame = Frame->Caller) {
475       if (const Function *Func = Frame->getFunction();
476           Func && (Func->isConstructor() || Func->isDestructor()) &&
477           Ptr.block() == Frame->getThis().block()) {
478         return true;
479       }
480     }
481   }
482 
483   if (!Ptr.isBlockPointer())
484     return false;
485 
486   const QualType Ty = Ptr.getType();
487   const SourceInfo &Loc = S.Current->getSource(OpPC);
488   S.FFDiag(Loc, diag::note_constexpr_modify_const_type) << Ty;
489   return false;
490 }
491 
492 bool CheckMutable(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
493   assert(Ptr.isLive() && "Pointer is not live");
494   if (!Ptr.isMutable())
495     return true;
496 
497   // In C++14 onwards, it is permitted to read a mutable member whose
498   // lifetime began within the evaluation.
499   if (S.getLangOpts().CPlusPlus14 &&
500       Ptr.block()->getEvalID() == S.Ctx.getEvalID())
501     return true;
502 
503   const SourceInfo &Loc = S.Current->getSource(OpPC);
504   const FieldDecl *Field = Ptr.getField();
505   S.FFDiag(Loc, diag::note_constexpr_access_mutable, 1) << AK_Read << Field;
506   S.Note(Field->getLocation(), diag::note_declared_at);
507   return false;
508 }
509 
510 bool CheckVolatile(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
511                    AccessKinds AK) {
512   assert(Ptr.isLive());
513 
514   // FIXME: This check here might be kinda expensive. Maybe it would be better
515   // to have another field in InlineDescriptor for this?
516   if (!Ptr.isBlockPointer())
517     return true;
518 
519   QualType PtrType = Ptr.getType();
520   if (!PtrType.isVolatileQualified())
521     return true;
522 
523   const SourceInfo &Loc = S.Current->getSource(OpPC);
524   if (S.getLangOpts().CPlusPlus)
525     S.FFDiag(Loc, diag::note_constexpr_access_volatile_type) << AK << PtrType;
526   else
527     S.FFDiag(Loc);
528   return false;
529 }
530 
531 bool CheckInitialized(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
532                       AccessKinds AK) {
533   assert(Ptr.isLive());
534 
535   if (Ptr.isInitialized())
536     return true;
537 
538   if (const auto *VD = Ptr.getDeclDesc()->asVarDecl();
539       VD && VD->hasGlobalStorage()) {
540     const SourceInfo &Loc = S.Current->getSource(OpPC);
541     if (VD->getAnyInitializer()) {
542       S.FFDiag(Loc, diag::note_constexpr_var_init_non_constant, 1) << VD;
543       S.Note(VD->getLocation(), diag::note_declared_at);
544     } else {
545       diagnoseMissingInitializer(S, OpPC, VD);
546     }
547     return false;
548   }
549 
550   if (!S.checkingPotentialConstantExpression()) {
551     S.FFDiag(S.Current->getSource(OpPC), diag::note_constexpr_access_uninit)
552         << AK << /*uninitialized=*/true << S.Current->getRange(OpPC);
553   }
554   return false;
555 }
556 
557 bool CheckGlobalInitialized(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
558   if (Ptr.isInitialized())
559     return true;
560 
561   assert(S.getLangOpts().CPlusPlus);
562   const auto *VD = cast<VarDecl>(Ptr.getDeclDesc()->asValueDecl());
563   if ((!VD->hasConstantInitialization() &&
564        VD->mightBeUsableInConstantExpressions(S.getASTContext())) ||
565       (S.getLangOpts().OpenCL && !S.getLangOpts().CPlusPlus11 &&
566        !VD->hasICEInitializer(S.getASTContext()))) {
567     const SourceInfo &Loc = S.Current->getSource(OpPC);
568     S.FFDiag(Loc, diag::note_constexpr_var_init_non_constant, 1) << VD;
569     S.Note(VD->getLocation(), diag::note_declared_at);
570   }
571   return false;
572 }
573 
574 static bool CheckWeak(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
575   if (!Ptr.isWeak())
576     return true;
577 
578   const auto *VD = Ptr.getDeclDesc()->asVarDecl();
579   assert(VD);
580   S.FFDiag(S.Current->getLocation(OpPC), diag::note_constexpr_var_init_weak)
581       << VD;
582   S.Note(VD->getLocation(), diag::note_declared_at);
583 
584   return false;
585 }
586 
587 bool CheckLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
588                AccessKinds AK) {
589   if (!CheckLive(S, OpPC, Ptr, AK))
590     return false;
591   if (!CheckConstant(S, OpPC, Ptr))
592     return false;
593   if (!CheckDummy(S, OpPC, Ptr, AK))
594     return false;
595   if (!CheckExtern(S, OpPC, Ptr))
596     return false;
597   if (!CheckRange(S, OpPC, Ptr, AK))
598     return false;
599   if (!CheckActive(S, OpPC, Ptr, AK))
600     return false;
601   if (!CheckInitialized(S, OpPC, Ptr, AK))
602     return false;
603   if (!CheckTemporary(S, OpPC, Ptr, AK))
604     return false;
605   if (!CheckWeak(S, OpPC, Ptr))
606     return false;
607   if (!CheckMutable(S, OpPC, Ptr))
608     return false;
609   if (!CheckVolatile(S, OpPC, Ptr, AK))
610     return false;
611   return true;
612 }
613 
614 /// This is not used by any of the opcodes directly. It's used by
615 /// EvalEmitter to do the final lvalue-to-rvalue conversion.
616 bool CheckFinalLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
617   if (!CheckLive(S, OpPC, Ptr, AK_Read))
618     return false;
619   if (!CheckConstant(S, OpPC, Ptr))
620     return false;
621 
622   if (!CheckDummy(S, OpPC, Ptr, AK_Read))
623     return false;
624   if (!CheckExtern(S, OpPC, Ptr))
625     return false;
626   if (!CheckRange(S, OpPC, Ptr, AK_Read))
627     return false;
628   if (!CheckActive(S, OpPC, Ptr, AK_Read))
629     return false;
630   if (!CheckInitialized(S, OpPC, Ptr, AK_Read))
631     return false;
632   if (!CheckTemporary(S, OpPC, Ptr, AK_Read))
633     return false;
634   if (!CheckWeak(S, OpPC, Ptr))
635     return false;
636   if (!CheckMutable(S, OpPC, Ptr))
637     return false;
638   return true;
639 }
640 
641 bool CheckStore(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
642   if (!CheckLive(S, OpPC, Ptr, AK_Assign))
643     return false;
644   if (!CheckDummy(S, OpPC, Ptr, AK_Assign))
645     return false;
646   if (!CheckExtern(S, OpPC, Ptr))
647     return false;
648   if (!CheckRange(S, OpPC, Ptr, AK_Assign))
649     return false;
650   if (!CheckGlobal(S, OpPC, Ptr))
651     return false;
652   if (!CheckConst(S, OpPC, Ptr))
653     return false;
654   return true;
655 }
656 
657 bool CheckInvoke(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
658   if (!CheckLive(S, OpPC, Ptr, AK_MemberCall))
659     return false;
660   if (!Ptr.isDummy()) {
661     if (!CheckExtern(S, OpPC, Ptr))
662       return false;
663     if (!CheckRange(S, OpPC, Ptr, AK_MemberCall))
664       return false;
665   }
666   return true;
667 }
668 
669 bool CheckInit(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
670   if (!CheckLive(S, OpPC, Ptr, AK_Assign))
671     return false;
672   if (!CheckRange(S, OpPC, Ptr, AK_Assign))
673     return false;
674   return true;
675 }
676 
677 bool CheckCallable(InterpState &S, CodePtr OpPC, const Function *F) {
678 
679   if (F->isVirtual() && !S.getLangOpts().CPlusPlus20) {
680     const SourceLocation &Loc = S.Current->getLocation(OpPC);
681     S.CCEDiag(Loc, diag::note_constexpr_virtual_call);
682     return false;
683   }
684 
685   if (F->isConstexpr() && F->hasBody() &&
686       (F->getDecl()->isConstexpr() || F->getDecl()->hasAttr<MSConstexprAttr>()))
687     return true;
688 
689   // Implicitly constexpr.
690   if (F->isLambdaStaticInvoker())
691     return true;
692 
693   const SourceLocation &Loc = S.Current->getLocation(OpPC);
694   if (S.getLangOpts().CPlusPlus11) {
695     const FunctionDecl *DiagDecl = F->getDecl();
696 
697     // Invalid decls have been diagnosed before.
698     if (DiagDecl->isInvalidDecl())
699       return false;
700 
701     // If this function is not constexpr because it is an inherited
702     // non-constexpr constructor, diagnose that directly.
703     const auto *CD = dyn_cast<CXXConstructorDecl>(DiagDecl);
704     if (CD && CD->isInheritingConstructor()) {
705       const auto *Inherited = CD->getInheritedConstructor().getConstructor();
706       if (!Inherited->isConstexpr())
707         DiagDecl = CD = Inherited;
708     }
709 
710     // FIXME: If DiagDecl is an implicitly-declared special member function
711     // or an inheriting constructor, we should be much more explicit about why
712     // it's not constexpr.
713     if (CD && CD->isInheritingConstructor()) {
714       S.FFDiag(Loc, diag::note_constexpr_invalid_inhctor, 1)
715           << CD->getInheritedConstructor().getConstructor()->getParent();
716       S.Note(DiagDecl->getLocation(), diag::note_declared_at);
717     } else {
718       // Don't emit anything if the function isn't defined and we're checking
719       // for a constant expression. It might be defined at the point we're
720       // actually calling it.
721       bool IsExtern = DiagDecl->getStorageClass() == SC_Extern;
722       if (!DiagDecl->isDefined() && !IsExtern && DiagDecl->isConstexpr() &&
723           S.checkingPotentialConstantExpression())
724         return false;
725 
726       // If the declaration is defined, declared 'constexpr' _and_ has a body,
727       // the below diagnostic doesn't add anything useful.
728       if (DiagDecl->isDefined() && DiagDecl->isConstexpr() &&
729           DiagDecl->hasBody())
730         return false;
731 
732       S.FFDiag(Loc, diag::note_constexpr_invalid_function, 1)
733           << DiagDecl->isConstexpr() << (bool)CD << DiagDecl;
734 
735       if (DiagDecl->getDefinition())
736         S.Note(DiagDecl->getDefinition()->getLocation(),
737                diag::note_declared_at);
738       else
739         S.Note(DiagDecl->getLocation(), diag::note_declared_at);
740     }
741   } else {
742     S.FFDiag(Loc, diag::note_invalid_subexpr_in_const_expr);
743   }
744 
745   return false;
746 }
747 
748 bool CheckCallDepth(InterpState &S, CodePtr OpPC) {
749   if ((S.Current->getDepth() + 1) > S.getLangOpts().ConstexprCallDepth) {
750     S.FFDiag(S.Current->getSource(OpPC),
751              diag::note_constexpr_depth_limit_exceeded)
752         << S.getLangOpts().ConstexprCallDepth;
753     return false;
754   }
755 
756   return true;
757 }
758 
759 bool CheckThis(InterpState &S, CodePtr OpPC, const Pointer &This) {
760   if (!This.isZero())
761     return true;
762 
763   const SourceInfo &Loc = S.Current->getSource(OpPC);
764 
765   bool IsImplicit = false;
766   if (const auto *E = dyn_cast_if_present<CXXThisExpr>(Loc.asExpr()))
767     IsImplicit = E->isImplicit();
768 
769   if (S.getLangOpts().CPlusPlus11)
770     S.FFDiag(Loc, diag::note_constexpr_this) << IsImplicit;
771   else
772     S.FFDiag(Loc);
773 
774   return false;
775 }
776 
777 bool CheckPure(InterpState &S, CodePtr OpPC, const CXXMethodDecl *MD) {
778   if (!MD->isPureVirtual())
779     return true;
780   const SourceInfo &E = S.Current->getSource(OpPC);
781   S.FFDiag(E, diag::note_constexpr_pure_virtual_call, 1) << MD;
782   S.Note(MD->getLocation(), diag::note_declared_at);
783   return false;
784 }
785 
786 bool CheckFloatResult(InterpState &S, CodePtr OpPC, const Floating &Result,
787                       APFloat::opStatus Status, FPOptions FPO) {
788   // [expr.pre]p4:
789   //   If during the evaluation of an expression, the result is not
790   //   mathematically defined [...], the behavior is undefined.
791   // FIXME: C++ rules require us to not conform to IEEE 754 here.
792   if (Result.isNan()) {
793     const SourceInfo &E = S.Current->getSource(OpPC);
794     S.CCEDiag(E, diag::note_constexpr_float_arithmetic)
795         << /*NaN=*/true << S.Current->getRange(OpPC);
796     return S.noteUndefinedBehavior();
797   }
798 
799   // In a constant context, assume that any dynamic rounding mode or FP
800   // exception state matches the default floating-point environment.
801   if (S.inConstantContext())
802     return true;
803 
804   if ((Status & APFloat::opInexact) &&
805       FPO.getRoundingMode() == llvm::RoundingMode::Dynamic) {
806     // Inexact result means that it depends on rounding mode. If the requested
807     // mode is dynamic, the evaluation cannot be made in compile time.
808     const SourceInfo &E = S.Current->getSource(OpPC);
809     S.FFDiag(E, diag::note_constexpr_dynamic_rounding);
810     return false;
811   }
812 
813   if ((Status != APFloat::opOK) &&
814       (FPO.getRoundingMode() == llvm::RoundingMode::Dynamic ||
815        FPO.getExceptionMode() != LangOptions::FPE_Ignore ||
816        FPO.getAllowFEnvAccess())) {
817     const SourceInfo &E = S.Current->getSource(OpPC);
818     S.FFDiag(E, diag::note_constexpr_float_arithmetic_strict);
819     return false;
820   }
821 
822   if ((Status & APFloat::opStatus::opInvalidOp) &&
823       FPO.getExceptionMode() != LangOptions::FPE_Ignore) {
824     const SourceInfo &E = S.Current->getSource(OpPC);
825     // There is no usefully definable result.
826     S.FFDiag(E);
827     return false;
828   }
829 
830   return true;
831 }
832 
833 bool CheckDynamicMemoryAllocation(InterpState &S, CodePtr OpPC) {
834   if (S.getLangOpts().CPlusPlus20)
835     return true;
836 
837   const SourceInfo &E = S.Current->getSource(OpPC);
838   S.CCEDiag(E, diag::note_constexpr_new);
839   return true;
840 }
841 
842 bool CheckNewDeleteForms(InterpState &S, CodePtr OpPC,
843                          DynamicAllocator::Form AllocForm,
844                          DynamicAllocator::Form DeleteForm, const Descriptor *D,
845                          const Expr *NewExpr) {
846   if (AllocForm == DeleteForm)
847     return true;
848 
849   QualType TypeToDiagnose;
850   // We need to shuffle things around a bit here to get a better diagnostic,
851   // because the expression we allocated the block for was of type int*,
852   // but we want to get the array size right.
853   if (D->isArray()) {
854     QualType ElemQT = D->getType()->getPointeeType();
855     TypeToDiagnose = S.getASTContext().getConstantArrayType(
856         ElemQT, APInt(64, static_cast<uint64_t>(D->getNumElems()), false),
857         nullptr, ArraySizeModifier::Normal, 0);
858   } else
859     TypeToDiagnose = D->getType()->getPointeeType();
860 
861   const SourceInfo &E = S.Current->getSource(OpPC);
862   S.FFDiag(E, diag::note_constexpr_new_delete_mismatch)
863       << static_cast<int>(DeleteForm) << static_cast<int>(AllocForm)
864       << TypeToDiagnose;
865   S.Note(NewExpr->getExprLoc(), diag::note_constexpr_dynamic_alloc_here)
866       << NewExpr->getSourceRange();
867   return false;
868 }
869 
870 bool CheckDeleteSource(InterpState &S, CodePtr OpPC, const Expr *Source,
871                        const Pointer &Ptr) {
872   // The two sources we currently allow are new expressions and
873   // __builtin_operator_new calls.
874   if (isa_and_nonnull<CXXNewExpr>(Source))
875     return true;
876   if (const CallExpr *CE = dyn_cast_if_present<CallExpr>(Source);
877       CE && CE->getBuiltinCallee() == Builtin::BI__builtin_operator_new)
878     return true;
879 
880   // Whatever this is, we didn't heap allocate it.
881   const SourceInfo &Loc = S.Current->getSource(OpPC);
882   S.FFDiag(Loc, diag::note_constexpr_delete_not_heap_alloc)
883       << Ptr.toDiagnosticString(S.getASTContext());
884 
885   if (Ptr.isTemporary())
886     S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here);
887   else
888     S.Note(Ptr.getDeclLoc(), diag::note_declared_at);
889   return false;
890 }
891 
892 /// We aleady know the given DeclRefExpr is invalid for some reason,
893 /// now figure out why and print appropriate diagnostics.
894 bool CheckDeclRef(InterpState &S, CodePtr OpPC, const DeclRefExpr *DR) {
895   const ValueDecl *D = DR->getDecl();
896   return diagnoseUnknownDecl(S, OpPC, D);
897 }
898 
899 bool CheckDummy(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
900                 AccessKinds AK) {
901   if (!Ptr.isDummy())
902     return true;
903 
904   const Descriptor *Desc = Ptr.getDeclDesc();
905   const ValueDecl *D = Desc->asValueDecl();
906   if (!D)
907     return false;
908 
909   if (AK == AK_Read || AK == AK_Increment || AK == AK_Decrement)
910     return diagnoseUnknownDecl(S, OpPC, D);
911 
912   assert(AK == AK_Assign);
913   if (S.getLangOpts().CPlusPlus14) {
914     const SourceInfo &E = S.Current->getSource(OpPC);
915     S.FFDiag(E, diag::note_constexpr_modify_global);
916   }
917   return false;
918 }
919 
920 bool CheckNonNullArgs(InterpState &S, CodePtr OpPC, const Function *F,
921                       const CallExpr *CE, unsigned ArgSize) {
922   auto Args = llvm::ArrayRef(CE->getArgs(), CE->getNumArgs());
923   auto NonNullArgs = collectNonNullArgs(F->getDecl(), Args);
924   unsigned Offset = 0;
925   unsigned Index = 0;
926   for (const Expr *Arg : Args) {
927     if (NonNullArgs[Index] && Arg->getType()->isPointerType()) {
928       const Pointer &ArgPtr = S.Stk.peek<Pointer>(ArgSize - Offset);
929       if (ArgPtr.isZero()) {
930         const SourceLocation &Loc = S.Current->getLocation(OpPC);
931         S.CCEDiag(Loc, diag::note_non_null_attribute_failed);
932         return false;
933       }
934     }
935 
936     Offset += align(primSize(S.Ctx.classify(Arg).value_or(PT_Ptr)));
937     ++Index;
938   }
939   return true;
940 }
941 
942 // FIXME: This is similar to code we already have in Compiler.cpp.
943 // I think it makes sense to instead add the field and base destruction stuff
944 // to the destructor Function itself. Then destroying a record would really
945 // _just_ be calling its destructor. That would also help with the diagnostic
946 // difference when the destructor or a field/base fails.
947 static bool runRecordDestructor(InterpState &S, CodePtr OpPC,
948                                 const Pointer &BasePtr,
949                                 const Descriptor *Desc) {
950   assert(Desc->isRecord());
951   const Record *R = Desc->ElemRecord;
952   assert(R);
953 
954   if (Pointer::pointToSameBlock(BasePtr, S.Current->getThis())) {
955     const SourceInfo &Loc = S.Current->getSource(OpPC);
956     S.FFDiag(Loc, diag::note_constexpr_double_destroy);
957     return false;
958   }
959 
960   // Destructor of this record.
961   if (const CXXDestructorDecl *Dtor = R->getDestructor();
962       Dtor && !Dtor->isTrivial()) {
963     const Function *DtorFunc = S.getContext().getOrCreateFunction(Dtor);
964     if (!DtorFunc)
965       return false;
966 
967     S.Stk.push<Pointer>(BasePtr);
968     if (!Call(S, OpPC, DtorFunc, 0))
969       return false;
970   }
971   return true;
972 }
973 
974 static bool RunDestructors(InterpState &S, CodePtr OpPC, const Block *B) {
975   assert(B);
976   const Descriptor *Desc = B->getDescriptor();
977 
978   if (Desc->isPrimitive() || Desc->isPrimitiveArray())
979     return true;
980 
981   assert(Desc->isRecord() || Desc->isCompositeArray());
982 
983   if (Desc->isCompositeArray()) {
984     const Descriptor *ElemDesc = Desc->ElemDesc;
985     assert(ElemDesc->isRecord());
986 
987     Pointer RP(const_cast<Block *>(B));
988     for (unsigned I = 0; I != Desc->getNumElems(); ++I) {
989       if (!runRecordDestructor(S, OpPC, RP.atIndex(I).narrow(), ElemDesc))
990         return false;
991     }
992     return true;
993   }
994 
995   assert(Desc->isRecord());
996   return runRecordDestructor(S, OpPC, Pointer(const_cast<Block *>(B)), Desc);
997 }
998 
999 bool Free(InterpState &S, CodePtr OpPC, bool DeleteIsArrayForm,
1000           bool IsGlobalDelete) {
1001   if (!CheckDynamicMemoryAllocation(S, OpPC))
1002     return false;
1003 
1004   const Expr *Source = nullptr;
1005   const Block *BlockToDelete = nullptr;
1006   {
1007     // Extra scope for this so the block doesn't have this pointer
1008     // pointing to it when we destroy it.
1009     Pointer Ptr = S.Stk.pop<Pointer>();
1010 
1011     // Deleteing nullptr is always fine.
1012     if (Ptr.isZero())
1013       return true;
1014 
1015     // Remove base casts.
1016     while (Ptr.isBaseClass())
1017       Ptr = Ptr.getBase();
1018 
1019     if (!Ptr.isRoot() || Ptr.isOnePastEnd() || Ptr.isArrayElement()) {
1020       const SourceInfo &Loc = S.Current->getSource(OpPC);
1021       S.FFDiag(Loc, diag::note_constexpr_delete_subobject)
1022           << Ptr.toDiagnosticString(S.getASTContext()) << Ptr.isOnePastEnd();
1023       return false;
1024     }
1025 
1026     Source = Ptr.getDeclDesc()->asExpr();
1027     BlockToDelete = Ptr.block();
1028 
1029     if (!CheckDeleteSource(S, OpPC, Source, Ptr))
1030       return false;
1031 
1032     // For a class type with a virtual destructor, the selected operator delete
1033     // is the one looked up when building the destructor.
1034     QualType AllocType = Ptr.getType();
1035     if (!DeleteIsArrayForm && !IsGlobalDelete) {
1036       auto getVirtualOperatorDelete = [](QualType T) -> const FunctionDecl * {
1037         if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl())
1038           if (const CXXDestructorDecl *DD = RD->getDestructor())
1039             return DD->isVirtual() ? DD->getOperatorDelete() : nullptr;
1040         return nullptr;
1041       };
1042 
1043       if (const FunctionDecl *VirtualDelete =
1044               getVirtualOperatorDelete(AllocType);
1045           VirtualDelete &&
1046           !VirtualDelete->isReplaceableGlobalAllocationFunction()) {
1047         S.FFDiag(S.Current->getSource(OpPC),
1048                  diag::note_constexpr_new_non_replaceable)
1049             << isa<CXXMethodDecl>(VirtualDelete) << VirtualDelete;
1050         return false;
1051       }
1052     }
1053   }
1054   assert(Source);
1055   assert(BlockToDelete);
1056 
1057   // Invoke destructors before deallocating the memory.
1058   if (!RunDestructors(S, OpPC, BlockToDelete))
1059     return false;
1060 
1061   DynamicAllocator &Allocator = S.getAllocator();
1062   const Descriptor *BlockDesc = BlockToDelete->getDescriptor();
1063   std::optional<DynamicAllocator::Form> AllocForm =
1064       Allocator.getAllocationForm(Source);
1065 
1066   if (!Allocator.deallocate(Source, BlockToDelete, S)) {
1067     // Nothing has been deallocated, this must be a double-delete.
1068     const SourceInfo &Loc = S.Current->getSource(OpPC);
1069     S.FFDiag(Loc, diag::note_constexpr_double_delete);
1070     return false;
1071   }
1072 
1073   assert(AllocForm);
1074   DynamicAllocator::Form DeleteForm = DeleteIsArrayForm
1075                                           ? DynamicAllocator::Form::Array
1076                                           : DynamicAllocator::Form::NonArray;
1077   return CheckNewDeleteForms(S, OpPC, *AllocForm, DeleteForm, BlockDesc,
1078                              Source);
1079 }
1080 
1081 void diagnoseEnumValue(InterpState &S, CodePtr OpPC, const EnumDecl *ED,
1082                        const APSInt &Value) {
1083   llvm::APInt Min;
1084   llvm::APInt Max;
1085 
1086   if (S.EvaluatingDecl && !S.EvaluatingDecl->isConstexpr())
1087     return;
1088 
1089   ED->getValueRange(Max, Min);
1090   --Max;
1091 
1092   if (ED->getNumNegativeBits() &&
1093       (Max.slt(Value.getSExtValue()) || Min.sgt(Value.getSExtValue()))) {
1094     const SourceLocation &Loc = S.Current->getLocation(OpPC);
1095     S.CCEDiag(Loc, diag::note_constexpr_unscoped_enum_out_of_range)
1096         << llvm::toString(Value, 10) << Min.getSExtValue() << Max.getSExtValue()
1097         << ED;
1098   } else if (!ED->getNumNegativeBits() && Max.ult(Value.getZExtValue())) {
1099     const SourceLocation &Loc = S.Current->getLocation(OpPC);
1100     S.CCEDiag(Loc, diag::note_constexpr_unscoped_enum_out_of_range)
1101         << llvm::toString(Value, 10) << Min.getZExtValue() << Max.getZExtValue()
1102         << ED;
1103   }
1104 }
1105 
1106 bool CheckLiteralType(InterpState &S, CodePtr OpPC, const Type *T) {
1107   assert(T);
1108   assert(!S.getLangOpts().CPlusPlus23);
1109 
1110   // C++1y: A constant initializer for an object o [...] may also invoke
1111   // constexpr constructors for o and its subobjects even if those objects
1112   // are of non-literal class types.
1113   //
1114   // C++11 missed this detail for aggregates, so classes like this:
1115   //   struct foo_t { union { int i; volatile int j; } u; };
1116   // are not (obviously) initializable like so:
1117   //   __attribute__((__require_constant_initialization__))
1118   //   static const foo_t x = {{0}};
1119   // because "i" is a subobject with non-literal initialization (due to the
1120   // volatile member of the union). See:
1121   //   http://www.open-std.org/jtc1/sc22/wg21/docs/cwg_active.html#1677
1122   // Therefore, we use the C++1y behavior.
1123 
1124   if (S.Current->getFunction() && S.Current->getFunction()->isConstructor() &&
1125       S.Current->getThis().getDeclDesc()->asDecl() == S.EvaluatingDecl) {
1126     return true;
1127   }
1128 
1129   const Expr *E = S.Current->getExpr(OpPC);
1130   if (S.getLangOpts().CPlusPlus11)
1131     S.FFDiag(E, diag::note_constexpr_nonliteral) << E->getType();
1132   else
1133     S.FFDiag(E, diag::note_invalid_subexpr_in_const_expr);
1134   return false;
1135 }
1136 
1137 static bool checkConstructor(InterpState &S, CodePtr OpPC, const Function *Func,
1138                              const Pointer &ThisPtr) {
1139   assert(Func->isConstructor());
1140 
1141   const Descriptor *D = ThisPtr.getFieldDesc();
1142 
1143   // FIXME: I think this case is not 100% correct. E.g. a pointer into a
1144   // subobject of a composite array.
1145   if (!D->ElemRecord)
1146     return true;
1147 
1148   if (D->ElemRecord->getNumVirtualBases() == 0)
1149     return true;
1150 
1151   S.FFDiag(S.Current->getLocation(OpPC), diag::note_constexpr_virtual_base)
1152       << Func->getParentDecl();
1153   return false;
1154 }
1155 
1156 bool CallVar(InterpState &S, CodePtr OpPC, const Function *Func,
1157              uint32_t VarArgSize) {
1158   if (Func->hasThisPointer()) {
1159     size_t ArgSize = Func->getArgSize() + VarArgSize;
1160     size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0);
1161     const Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset);
1162 
1163     // If the current function is a lambda static invoker and
1164     // the function we're about to call is a lambda call operator,
1165     // skip the CheckInvoke, since the ThisPtr is a null pointer
1166     // anyway.
1167     if (!(S.Current->getFunction() &&
1168           S.Current->getFunction()->isLambdaStaticInvoker() &&
1169           Func->isLambdaCallOperator())) {
1170       if (!CheckInvoke(S, OpPC, ThisPtr))
1171         return false;
1172     }
1173 
1174     if (S.checkingPotentialConstantExpression())
1175       return false;
1176   }
1177 
1178   if (!CheckCallable(S, OpPC, Func))
1179     return false;
1180 
1181   if (!CheckCallDepth(S, OpPC))
1182     return false;
1183 
1184   auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC, VarArgSize);
1185   InterpFrame *FrameBefore = S.Current;
1186   S.Current = NewFrame.get();
1187 
1188   APValue CallResult;
1189   // Note that we cannot assert(CallResult.hasValue()) here since
1190   // Ret() above only sets the APValue if the curent frame doesn't
1191   // have a caller set.
1192   if (Interpret(S, CallResult)) {
1193     NewFrame.release(); // Frame was delete'd already.
1194     assert(S.Current == FrameBefore);
1195     return true;
1196   }
1197 
1198   // Interpreting the function failed somehow. Reset to
1199   // previous state.
1200   S.Current = FrameBefore;
1201   return false;
1202 }
1203 
1204 bool Call(InterpState &S, CodePtr OpPC, const Function *Func,
1205           uint32_t VarArgSize) {
1206   assert(Func);
1207   auto cleanup = [&]() -> bool {
1208     cleanupAfterFunctionCall(S, OpPC, Func);
1209     return false;
1210   };
1211 
1212   if (Func->hasThisPointer()) {
1213     size_t ArgSize = Func->getArgSize() + VarArgSize;
1214     size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0);
1215 
1216     const Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset);
1217 
1218     // If the current function is a lambda static invoker and
1219     // the function we're about to call is a lambda call operator,
1220     // skip the CheckInvoke, since the ThisPtr is a null pointer
1221     // anyway.
1222     if (S.Current->getFunction() &&
1223         S.Current->getFunction()->isLambdaStaticInvoker() &&
1224         Func->isLambdaCallOperator()) {
1225       assert(ThisPtr.isZero());
1226     } else {
1227       if (!CheckInvoke(S, OpPC, ThisPtr))
1228         return cleanup();
1229     }
1230 
1231     if (Func->isConstructor() && !checkConstructor(S, OpPC, Func, ThisPtr))
1232       return false;
1233   }
1234 
1235   if (!CheckCallable(S, OpPC, Func))
1236     return cleanup();
1237 
1238   // FIXME: The isConstructor() check here is not always right. The current
1239   // constant evaluator is somewhat inconsistent in when it allows a function
1240   // call when checking for a constant expression.
1241   if (Func->hasThisPointer() && S.checkingPotentialConstantExpression() &&
1242       !Func->isConstructor())
1243     return cleanup();
1244 
1245   if (!CheckCallDepth(S, OpPC))
1246     return cleanup();
1247 
1248   auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC, VarArgSize);
1249   InterpFrame *FrameBefore = S.Current;
1250   S.Current = NewFrame.get();
1251 
1252   InterpStateCCOverride CCOverride(S, Func->getDecl()->isImmediateFunction());
1253   APValue CallResult;
1254   // Note that we cannot assert(CallResult.hasValue()) here since
1255   // Ret() above only sets the APValue if the curent frame doesn't
1256   // have a caller set.
1257   if (Interpret(S, CallResult)) {
1258     NewFrame.release(); // Frame was delete'd already.
1259     assert(S.Current == FrameBefore);
1260     return true;
1261   }
1262 
1263   // Interpreting the function failed somehow. Reset to
1264   // previous state.
1265   S.Current = FrameBefore;
1266   return false;
1267 }
1268 
1269 bool CallVirt(InterpState &S, CodePtr OpPC, const Function *Func,
1270               uint32_t VarArgSize) {
1271   assert(Func->hasThisPointer());
1272   assert(Func->isVirtual());
1273   size_t ArgSize = Func->getArgSize() + VarArgSize;
1274   size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0);
1275   Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset);
1276 
1277   const CXXRecordDecl *DynamicDecl = nullptr;
1278   {
1279     Pointer TypePtr = ThisPtr;
1280     while (TypePtr.isBaseClass())
1281       TypePtr = TypePtr.getBase();
1282 
1283     QualType DynamicType = TypePtr.getType();
1284     if (DynamicType->isPointerType() || DynamicType->isReferenceType())
1285       DynamicDecl = DynamicType->getPointeeCXXRecordDecl();
1286     else
1287       DynamicDecl = DynamicType->getAsCXXRecordDecl();
1288   }
1289   assert(DynamicDecl);
1290 
1291   const auto *StaticDecl = cast<CXXRecordDecl>(Func->getParentDecl());
1292   const auto *InitialFunction = cast<CXXMethodDecl>(Func->getDecl());
1293   const CXXMethodDecl *Overrider = S.getContext().getOverridingFunction(
1294       DynamicDecl, StaticDecl, InitialFunction);
1295 
1296   if (Overrider != InitialFunction) {
1297     // DR1872: An instantiated virtual constexpr function can't be called in a
1298     // constant expression (prior to C++20). We can still constant-fold such a
1299     // call.
1300     if (!S.getLangOpts().CPlusPlus20 && Overrider->isVirtual()) {
1301       const Expr *E = S.Current->getExpr(OpPC);
1302       S.CCEDiag(E, diag::note_constexpr_virtual_call) << E->getSourceRange();
1303     }
1304 
1305     Func = S.getContext().getOrCreateFunction(Overrider);
1306 
1307     const CXXRecordDecl *ThisFieldDecl =
1308         ThisPtr.getFieldDesc()->getType()->getAsCXXRecordDecl();
1309     if (Func->getParentDecl()->isDerivedFrom(ThisFieldDecl)) {
1310       // If the function we call is further DOWN the hierarchy than the
1311       // FieldDesc of our pointer, just go up the hierarchy of this field
1312       // the furthest we can go.
1313       while (ThisPtr.isBaseClass())
1314         ThisPtr = ThisPtr.getBase();
1315     }
1316   }
1317 
1318   if (!Call(S, OpPC, Func, VarArgSize))
1319     return false;
1320 
1321   // Covariant return types. The return type of Overrider is a pointer
1322   // or reference to a class type.
1323   if (Overrider != InitialFunction &&
1324       Overrider->getReturnType()->isPointerOrReferenceType() &&
1325       InitialFunction->getReturnType()->isPointerOrReferenceType()) {
1326     QualType OverriderPointeeType =
1327         Overrider->getReturnType()->getPointeeType();
1328     QualType InitialPointeeType =
1329         InitialFunction->getReturnType()->getPointeeType();
1330     // We've called Overrider above, but calling code expects us to return what
1331     // InitialFunction returned. According to the rules for covariant return
1332     // types, what InitialFunction returns needs to be a base class of what
1333     // Overrider returns. So, we need to do an upcast here.
1334     unsigned Offset = S.getContext().collectBaseOffset(
1335         InitialPointeeType->getAsRecordDecl(),
1336         OverriderPointeeType->getAsRecordDecl());
1337     return GetPtrBasePop(S, OpPC, Offset);
1338   }
1339 
1340   return true;
1341 }
1342 
1343 bool CallBI(InterpState &S, CodePtr OpPC, const Function *Func,
1344             const CallExpr *CE, uint32_t BuiltinID) {
1345   if (S.checkingPotentialConstantExpression())
1346     return false;
1347   auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC);
1348 
1349   InterpFrame *FrameBefore = S.Current;
1350   S.Current = NewFrame.get();
1351 
1352   if (InterpretBuiltin(S, OpPC, Func, CE, BuiltinID)) {
1353     NewFrame.release();
1354     return true;
1355   }
1356   S.Current = FrameBefore;
1357   return false;
1358 }
1359 
1360 bool CallPtr(InterpState &S, CodePtr OpPC, uint32_t ArgSize,
1361              const CallExpr *CE) {
1362   const FunctionPointer &FuncPtr = S.Stk.pop<FunctionPointer>();
1363 
1364   const Function *F = FuncPtr.getFunction();
1365   if (!F) {
1366     const auto *E = cast<CallExpr>(S.Current->getExpr(OpPC));
1367     S.FFDiag(E, diag::note_constexpr_null_callee)
1368         << const_cast<Expr *>(E->getCallee()) << E->getSourceRange();
1369     return false;
1370   }
1371 
1372   if (!FuncPtr.isValid() || !F->getDecl())
1373     return Invalid(S, OpPC);
1374 
1375   assert(F);
1376 
1377   // This happens when the call expression has been cast to
1378   // something else, but we don't support that.
1379   if (S.Ctx.classify(F->getDecl()->getReturnType()) !=
1380       S.Ctx.classify(CE->getType()))
1381     return false;
1382 
1383   // Check argument nullability state.
1384   if (F->hasNonNullAttr()) {
1385     if (!CheckNonNullArgs(S, OpPC, F, CE, ArgSize))
1386       return false;
1387   }
1388 
1389   assert(ArgSize >= F->getWrittenArgSize());
1390   uint32_t VarArgSize = ArgSize - F->getWrittenArgSize();
1391 
1392   // We need to do this explicitly here since we don't have the necessary
1393   // information to do it automatically.
1394   if (F->isThisPointerExplicit())
1395     VarArgSize -= align(primSize(PT_Ptr));
1396 
1397   if (F->isVirtual())
1398     return CallVirt(S, OpPC, F, VarArgSize);
1399 
1400   return Call(S, OpPC, F, VarArgSize);
1401 }
1402 
1403 bool CheckNewTypeMismatch(InterpState &S, CodePtr OpPC, const Expr *E,
1404                           std::optional<uint64_t> ArraySize) {
1405   const Pointer &Ptr = S.Stk.peek<Pointer>();
1406 
1407   if (!CheckStore(S, OpPC, Ptr))
1408     return false;
1409 
1410   if (!InvalidNewDeleteExpr(S, OpPC, E))
1411     return false;
1412 
1413   const auto *NewExpr = cast<CXXNewExpr>(E);
1414   QualType StorageType = Ptr.getType();
1415 
1416   if (isa_and_nonnull<CXXNewExpr>(Ptr.getFieldDesc()->asExpr()) &&
1417       StorageType->isPointerType()) {
1418     // FIXME: Are there other cases where this is a problem?
1419     StorageType = StorageType->getPointeeType();
1420   }
1421 
1422   const ASTContext &ASTCtx = S.getASTContext();
1423   QualType AllocType;
1424   if (ArraySize) {
1425     AllocType = ASTCtx.getConstantArrayType(
1426         NewExpr->getAllocatedType(),
1427         APInt(64, static_cast<uint64_t>(*ArraySize), false), nullptr,
1428         ArraySizeModifier::Normal, 0);
1429   } else {
1430     AllocType = NewExpr->getAllocatedType();
1431   }
1432 
1433   unsigned StorageSize = 1;
1434   unsigned AllocSize = 1;
1435   if (const auto *CAT = dyn_cast<ConstantArrayType>(AllocType))
1436     AllocSize = CAT->getZExtSize();
1437   if (const auto *CAT = dyn_cast<ConstantArrayType>(StorageType))
1438     StorageSize = CAT->getZExtSize();
1439 
1440   if (AllocSize > StorageSize ||
1441       !ASTCtx.hasSimilarType(ASTCtx.getBaseElementType(AllocType),
1442                              ASTCtx.getBaseElementType(StorageType))) {
1443     S.FFDiag(S.Current->getLocation(OpPC),
1444              diag::note_constexpr_placement_new_wrong_type)
1445         << StorageType << AllocType;
1446     return false;
1447   }
1448   return true;
1449 }
1450 
1451 bool InvalidNewDeleteExpr(InterpState &S, CodePtr OpPC, const Expr *E) {
1452   assert(E);
1453   const auto &Loc = S.Current->getSource(OpPC);
1454 
1455   if (S.getLangOpts().CPlusPlus26)
1456     return true;
1457 
1458   if (const auto *NewExpr = dyn_cast<CXXNewExpr>(E)) {
1459     const FunctionDecl *OperatorNew = NewExpr->getOperatorNew();
1460 
1461     if (!S.getLangOpts().CPlusPlus26 && NewExpr->getNumPlacementArgs() > 0) {
1462       // This is allowed pre-C++26, but only an std function.
1463       if (S.Current->isStdFunction())
1464         return true;
1465       S.FFDiag(Loc, diag::note_constexpr_new_placement)
1466           << /*C++26 feature*/ 1 << E->getSourceRange();
1467     } else if (NewExpr->getNumPlacementArgs() == 1 &&
1468                !OperatorNew->isReservedGlobalPlacementOperator()) {
1469       S.FFDiag(Loc, diag::note_constexpr_new_placement)
1470           << /*Unsupported*/ 0 << E->getSourceRange();
1471     } else if (!OperatorNew->isReplaceableGlobalAllocationFunction()) {
1472       S.FFDiag(Loc, diag::note_constexpr_new_non_replaceable)
1473           << isa<CXXMethodDecl>(OperatorNew) << OperatorNew;
1474     }
1475   } else {
1476     const auto *DeleteExpr = cast<CXXDeleteExpr>(E);
1477     const FunctionDecl *OperatorDelete = DeleteExpr->getOperatorDelete();
1478     if (!OperatorDelete->isReplaceableGlobalAllocationFunction()) {
1479       S.FFDiag(Loc, diag::note_constexpr_new_non_replaceable)
1480           << isa<CXXMethodDecl>(OperatorDelete) << OperatorDelete;
1481     }
1482   }
1483 
1484   return false;
1485 }
1486 
1487 bool handleFixedPointOverflow(InterpState &S, CodePtr OpPC,
1488                               const FixedPoint &FP) {
1489   const Expr *E = S.Current->getExpr(OpPC);
1490   if (S.checkingForUndefinedBehavior()) {
1491     S.getASTContext().getDiagnostics().Report(
1492         E->getExprLoc(), diag::warn_fixedpoint_constant_overflow)
1493         << FP.toDiagnosticString(S.getASTContext()) << E->getType();
1494   }
1495   S.CCEDiag(E, diag::note_constexpr_overflow)
1496       << FP.toDiagnosticString(S.getASTContext()) << E->getType();
1497   return S.noteUndefinedBehavior();
1498 }
1499 
1500 bool InvalidShuffleVectorIndex(InterpState &S, CodePtr OpPC, uint32_t Index) {
1501   const SourceInfo &Loc = S.Current->getSource(OpPC);
1502   S.FFDiag(Loc,
1503            diag::err_shufflevector_minus_one_is_undefined_behavior_constexpr)
1504       << Index;
1505   return false;
1506 }
1507 
1508 bool CheckPointerToIntegralCast(InterpState &S, CodePtr OpPC,
1509                                 const Pointer &Ptr, unsigned BitWidth) {
1510   if (Ptr.isDummy())
1511     return false;
1512 
1513   const SourceInfo &E = S.Current->getSource(OpPC);
1514   S.CCEDiag(E, diag::note_constexpr_invalid_cast)
1515       << 2 << S.getLangOpts().CPlusPlus << S.Current->getRange(OpPC);
1516 
1517   if (Ptr.isBlockPointer() && !Ptr.isZero()) {
1518     // Only allow based lvalue casts if they are lossless.
1519     if (S.getASTContext().getTargetInfo().getPointerWidth(LangAS::Default) !=
1520         BitWidth)
1521       return Invalid(S, OpPC);
1522   }
1523   return true;
1524 }
1525 
1526 bool CastPointerIntegralAP(InterpState &S, CodePtr OpPC, uint32_t BitWidth) {
1527   const Pointer &Ptr = S.Stk.pop<Pointer>();
1528 
1529   if (!CheckPointerToIntegralCast(S, OpPC, Ptr, BitWidth))
1530     return false;
1531 
1532   S.Stk.push<IntegralAP<false>>(
1533       IntegralAP<false>::from(Ptr.getIntegerRepresentation(), BitWidth));
1534   return true;
1535 }
1536 
1537 bool CastPointerIntegralAPS(InterpState &S, CodePtr OpPC, uint32_t BitWidth) {
1538   const Pointer &Ptr = S.Stk.pop<Pointer>();
1539 
1540   if (!CheckPointerToIntegralCast(S, OpPC, Ptr, BitWidth))
1541     return false;
1542 
1543   S.Stk.push<IntegralAP<true>>(
1544       IntegralAP<true>::from(Ptr.getIntegerRepresentation(), BitWidth));
1545   return true;
1546 }
1547 
1548 // https://github.com/llvm/llvm-project/issues/102513
1549 #if defined(_WIN32) && !defined(__clang__) && !defined(NDEBUG)
1550 #pragma optimize("", off)
1551 #endif
1552 bool Interpret(InterpState &S, APValue &Result) {
1553   // The current stack frame when we started Interpret().
1554   // This is being used by the ops to determine wheter
1555   // to return from this function and thus terminate
1556   // interpretation.
1557   const InterpFrame *StartFrame = S.Current;
1558   assert(!S.Current->isRoot());
1559   CodePtr PC = S.Current->getPC();
1560 
1561   // Empty program.
1562   if (!PC)
1563     return true;
1564 
1565   for (;;) {
1566     auto Op = PC.read<Opcode>();
1567     CodePtr OpPC = PC;
1568 
1569     switch (Op) {
1570 #define GET_INTERP
1571 #include "Opcodes.inc"
1572 #undef GET_INTERP
1573     }
1574   }
1575 }
1576 // https://github.com/llvm/llvm-project/issues/102513
1577 #if defined(_WIN32) && !defined(__clang__) && !defined(NDEBUG)
1578 #pragma optimize("", on)
1579 #endif
1580 
1581 } // namespace interp
1582 } // namespace clang
1583