xref: /llvm-project/clang/lib/AST/ByteCode/Interp.cpp (revision 97aa8cc94d94e8f0adc85489f7832ba7c0a9b577)
1 //===------- Interp.cpp - Interpreter for the constexpr VM ------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
9 #include "Interp.h"
10 #include "Function.h"
11 #include "InterpFrame.h"
12 #include "InterpShared.h"
13 #include "InterpStack.h"
14 #include "Opcode.h"
15 #include "PrimType.h"
16 #include "Program.h"
17 #include "State.h"
18 #include "clang/AST/ASTContext.h"
19 #include "clang/AST/ASTDiagnostic.h"
20 #include "clang/AST/CXXInheritance.h"
21 #include "clang/AST/DeclObjC.h"
22 #include "clang/AST/Expr.h"
23 #include "clang/AST/ExprCXX.h"
24 #include "llvm/ADT/APSInt.h"
25 #include "llvm/ADT/StringExtras.h"
26 #include <limits>
27 #include <vector>
28 
29 using namespace clang;
30 using namespace clang::interp;
31 
32 static bool RetValue(InterpState &S, CodePtr &Pt, APValue &Result) {
33   llvm::report_fatal_error("Interpreter cannot return values");
34 }
35 
36 //===----------------------------------------------------------------------===//
37 // Jmp, Jt, Jf
38 //===----------------------------------------------------------------------===//
39 
40 static bool Jmp(InterpState &S, CodePtr &PC, int32_t Offset) {
41   PC += Offset;
42   return true;
43 }
44 
45 static bool Jt(InterpState &S, CodePtr &PC, int32_t Offset) {
46   if (S.Stk.pop<bool>()) {
47     PC += Offset;
48   }
49   return true;
50 }
51 
52 static bool Jf(InterpState &S, CodePtr &PC, int32_t Offset) {
53   if (!S.Stk.pop<bool>()) {
54     PC += Offset;
55   }
56   return true;
57 }
58 
59 static void diagnoseMissingInitializer(InterpState &S, CodePtr OpPC,
60                                        const ValueDecl *VD) {
61   const SourceInfo &E = S.Current->getSource(OpPC);
62   S.FFDiag(E, diag::note_constexpr_var_init_unknown, 1) << VD;
63   S.Note(VD->getLocation(), diag::note_declared_at) << VD->getSourceRange();
64 }
65 
66 static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC,
67                                      const ValueDecl *VD);
68 static bool diagnoseUnknownDecl(InterpState &S, CodePtr OpPC,
69                                 const ValueDecl *D) {
70   const SourceInfo &E = S.Current->getSource(OpPC);
71 
72   if (isa<ParmVarDecl>(D)) {
73     if (S.getLangOpts().CPlusPlus11) {
74       S.FFDiag(E, diag::note_constexpr_function_param_value_unknown) << D;
75       S.Note(D->getLocation(), diag::note_declared_at) << D->getSourceRange();
76     } else {
77       S.FFDiag(E);
78     }
79     return false;
80   }
81 
82   if (!D->getType().isConstQualified())
83     diagnoseNonConstVariable(S, OpPC, D);
84   else if (const auto *VD = dyn_cast<VarDecl>(D);
85            VD && !VD->getAnyInitializer())
86     diagnoseMissingInitializer(S, OpPC, VD);
87 
88   return false;
89 }
90 
91 static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC,
92                                      const ValueDecl *VD) {
93   if (!S.getLangOpts().CPlusPlus)
94     return;
95 
96   const SourceInfo &Loc = S.Current->getSource(OpPC);
97   if (const auto *VarD = dyn_cast<VarDecl>(VD);
98       VarD && VarD->getType().isConstQualified() &&
99       !VarD->getAnyInitializer()) {
100     diagnoseMissingInitializer(S, OpPC, VD);
101     return;
102   }
103 
104   // Rather random, but this is to match the diagnostic output of the current
105   // interpreter.
106   if (isa<ObjCIvarDecl>(VD))
107     return;
108 
109   if (VD->getType()->isIntegralOrEnumerationType()) {
110     S.FFDiag(Loc, diag::note_constexpr_ltor_non_const_int, 1) << VD;
111     S.Note(VD->getLocation(), diag::note_declared_at);
112     return;
113   }
114 
115   S.FFDiag(Loc,
116            S.getLangOpts().CPlusPlus11 ? diag::note_constexpr_ltor_non_constexpr
117                                        : diag::note_constexpr_ltor_non_integral,
118            1)
119       << VD << VD->getType();
120   S.Note(VD->getLocation(), diag::note_declared_at);
121 }
122 
123 static bool CheckActive(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
124                         AccessKinds AK) {
125   if (Ptr.isActive())
126     return true;
127 
128   assert(Ptr.inUnion());
129   assert(Ptr.isField() && Ptr.getField());
130 
131   Pointer U = Ptr.getBase();
132   Pointer C = Ptr;
133   while (!U.isRoot() && U.inUnion() && !U.isActive()) {
134     if (U.getField())
135       C = U;
136     U = U.getBase();
137   }
138   assert(C.isField());
139 
140   // Get the inactive field descriptor.
141   const FieldDecl *InactiveField = C.getField();
142   assert(InactiveField);
143 
144   // Consider:
145   // union U {
146   //   struct {
147   //     int x;
148   //     int y;
149   //   } a;
150   // }
151   //
152   // When activating x, we will also activate a. If we now try to read
153   // from y, we will get to CheckActive, because y is not active. In that
154   // case, our U will be a (not a union). We return here and let later code
155   // handle this.
156   if (!U.getFieldDesc()->isUnion())
157     return true;
158 
159   // Find the active field of the union.
160   const Record *R = U.getRecord();
161   assert(R && R->isUnion() && "Not a union");
162 
163   const FieldDecl *ActiveField = nullptr;
164   for (const Record::Field &F : R->fields()) {
165     const Pointer &Field = U.atField(F.Offset);
166     if (Field.isActive()) {
167       ActiveField = Field.getField();
168       break;
169     }
170   }
171 
172   const SourceInfo &Loc = S.Current->getSource(OpPC);
173   S.FFDiag(Loc, diag::note_constexpr_access_inactive_union_member)
174       << AK << InactiveField << !ActiveField << ActiveField;
175   return false;
176 }
177 
178 static bool CheckTemporary(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
179                            AccessKinds AK) {
180   if (auto ID = Ptr.getDeclID()) {
181     if (!Ptr.isStaticTemporary())
182       return true;
183 
184     const auto *MTE = dyn_cast_if_present<MaterializeTemporaryExpr>(
185         Ptr.getDeclDesc()->asExpr());
186     if (!MTE)
187       return true;
188 
189     // FIXME(perf): Since we do this check on every Load from a static
190     // temporary, it might make sense to cache the value of the
191     // isUsableInConstantExpressions call.
192     if (!MTE->isUsableInConstantExpressions(S.getASTContext()) &&
193         Ptr.block()->getEvalID() != S.Ctx.getEvalID()) {
194       const SourceInfo &E = S.Current->getSource(OpPC);
195       S.FFDiag(E, diag::note_constexpr_access_static_temporary, 1) << AK;
196       S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here);
197       return false;
198     }
199   }
200   return true;
201 }
202 
203 static bool CheckGlobal(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
204   if (auto ID = Ptr.getDeclID()) {
205     if (!Ptr.isStatic())
206       return true;
207 
208     if (S.P.getCurrentDecl() == ID)
209       return true;
210 
211     S.FFDiag(S.Current->getLocation(OpPC), diag::note_constexpr_modify_global);
212     return false;
213   }
214   return true;
215 }
216 
217 namespace clang {
218 namespace interp {
219 static void popArg(InterpState &S, const Expr *Arg) {
220   PrimType Ty = S.getContext().classify(Arg).value_or(PT_Ptr);
221   TYPE_SWITCH(Ty, S.Stk.discard<T>());
222 }
223 
224 void cleanupAfterFunctionCall(InterpState &S, CodePtr OpPC,
225                               const Function *Func) {
226   assert(S.Current);
227   assert(Func);
228 
229   if (Func->isUnevaluatedBuiltin())
230     return;
231 
232   // Some builtin functions require us to only look at the call site, since
233   // the classified parameter types do not match.
234   if (unsigned BID = Func->getBuiltinID();
235       BID && S.getASTContext().BuiltinInfo.hasCustomTypechecking(BID)) {
236     const auto *CE =
237         cast<CallExpr>(S.Current->Caller->getExpr(S.Current->getRetPC()));
238     for (int32_t I = CE->getNumArgs() - 1; I >= 0; --I) {
239       const Expr *A = CE->getArg(I);
240       popArg(S, A);
241     }
242     return;
243   }
244 
245   if (S.Current->Caller && Func->isVariadic()) {
246     // CallExpr we're look for is at the return PC of the current function, i.e.
247     // in the caller.
248     // This code path should be executed very rarely.
249     unsigned NumVarArgs;
250     const Expr *const *Args = nullptr;
251     unsigned NumArgs = 0;
252     const Expr *CallSite = S.Current->Caller->getExpr(S.Current->getRetPC());
253     if (const auto *CE = dyn_cast<CallExpr>(CallSite)) {
254       Args = CE->getArgs();
255       NumArgs = CE->getNumArgs();
256     } else if (const auto *CE = dyn_cast<CXXConstructExpr>(CallSite)) {
257       Args = CE->getArgs();
258       NumArgs = CE->getNumArgs();
259     } else
260       assert(false && "Can't get arguments from that expression type");
261 
262     assert(NumArgs >= Func->getNumWrittenParams());
263     NumVarArgs = NumArgs - (Func->getNumWrittenParams() +
264                             isa<CXXOperatorCallExpr>(CallSite));
265     for (unsigned I = 0; I != NumVarArgs; ++I) {
266       const Expr *A = Args[NumArgs - 1 - I];
267       popArg(S, A);
268     }
269   }
270 
271   // And in any case, remove the fixed parameters (the non-variadic ones)
272   // at the end.
273   for (PrimType Ty : Func->args_reverse())
274     TYPE_SWITCH(Ty, S.Stk.discard<T>());
275 }
276 
277 bool CheckExtern(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
278   if (!Ptr.isExtern())
279     return true;
280 
281   if (Ptr.isInitialized() ||
282       (Ptr.getDeclDesc()->asVarDecl() == S.EvaluatingDecl))
283     return true;
284 
285   if (!S.checkingPotentialConstantExpression() && S.getLangOpts().CPlusPlus) {
286     const auto *VD = Ptr.getDeclDesc()->asValueDecl();
287     diagnoseNonConstVariable(S, OpPC, VD);
288   }
289   return false;
290 }
291 
292 bool CheckArray(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
293   if (!Ptr.isUnknownSizeArray())
294     return true;
295   const SourceInfo &E = S.Current->getSource(OpPC);
296   S.FFDiag(E, diag::note_constexpr_unsized_array_indexed);
297   return false;
298 }
299 
300 bool CheckLive(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
301                AccessKinds AK) {
302   if (Ptr.isZero()) {
303     const auto &Src = S.Current->getSource(OpPC);
304 
305     if (Ptr.isField())
306       S.FFDiag(Src, diag::note_constexpr_null_subobject) << CSK_Field;
307     else
308       S.FFDiag(Src, diag::note_constexpr_access_null) << AK;
309 
310     return false;
311   }
312 
313   if (!Ptr.isLive()) {
314     const auto &Src = S.Current->getSource(OpPC);
315 
316     if (Ptr.isDynamic()) {
317       S.FFDiag(Src, diag::note_constexpr_access_deleted_object) << AK;
318     } else {
319       bool IsTemp = Ptr.isTemporary();
320       S.FFDiag(Src, diag::note_constexpr_lifetime_ended, 1) << AK << !IsTemp;
321 
322       if (IsTemp)
323         S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here);
324       else
325         S.Note(Ptr.getDeclLoc(), diag::note_declared_at);
326     }
327 
328     return false;
329   }
330 
331   return true;
332 }
333 
334 bool CheckConstant(InterpState &S, CodePtr OpPC, const Descriptor *Desc) {
335   assert(Desc);
336 
337   const auto *D = Desc->asVarDecl();
338   if (!D || !D->hasGlobalStorage())
339     return true;
340 
341   if (D == S.EvaluatingDecl)
342     return true;
343 
344   if (D->isConstexpr())
345     return true;
346 
347   QualType T = D->getType();
348   bool IsConstant = T.isConstant(S.getASTContext());
349   if (T->isIntegralOrEnumerationType()) {
350     if (!IsConstant) {
351       diagnoseNonConstVariable(S, OpPC, D);
352       return false;
353     }
354     return true;
355   }
356 
357   if (IsConstant) {
358     if (S.getLangOpts().CPlusPlus) {
359       S.CCEDiag(S.Current->getLocation(OpPC),
360                 S.getLangOpts().CPlusPlus11
361                     ? diag::note_constexpr_ltor_non_constexpr
362                     : diag::note_constexpr_ltor_non_integral,
363                 1)
364           << D << T;
365       S.Note(D->getLocation(), diag::note_declared_at);
366     } else {
367       S.CCEDiag(S.Current->getLocation(OpPC));
368     }
369     return true;
370   }
371 
372   if (T->isPointerOrReferenceType()) {
373     if (!T->getPointeeType().isConstant(S.getASTContext()) ||
374         !S.getLangOpts().CPlusPlus11) {
375       diagnoseNonConstVariable(S, OpPC, D);
376       return false;
377     }
378     return true;
379   }
380 
381   diagnoseNonConstVariable(S, OpPC, D);
382   return false;
383 }
384 
385 static bool CheckConstant(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
386   if (!Ptr.isBlockPointer())
387     return true;
388   return CheckConstant(S, OpPC, Ptr.getDeclDesc());
389 }
390 
391 bool CheckNull(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
392                CheckSubobjectKind CSK) {
393   if (!Ptr.isZero())
394     return true;
395   const SourceInfo &Loc = S.Current->getSource(OpPC);
396   S.FFDiag(Loc, diag::note_constexpr_null_subobject)
397       << CSK << S.Current->getRange(OpPC);
398 
399   return false;
400 }
401 
402 bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
403                 AccessKinds AK) {
404   if (!Ptr.isOnePastEnd())
405     return true;
406   const SourceInfo &Loc = S.Current->getSource(OpPC);
407   S.FFDiag(Loc, diag::note_constexpr_access_past_end)
408       << AK << S.Current->getRange(OpPC);
409   return false;
410 }
411 
412 bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
413                 CheckSubobjectKind CSK) {
414   if (!Ptr.isElementPastEnd())
415     return true;
416   const SourceInfo &Loc = S.Current->getSource(OpPC);
417   S.FFDiag(Loc, diag::note_constexpr_past_end_subobject)
418       << CSK << S.Current->getRange(OpPC);
419   return false;
420 }
421 
422 bool CheckSubobject(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
423                     CheckSubobjectKind CSK) {
424   if (!Ptr.isOnePastEnd())
425     return true;
426 
427   const SourceInfo &Loc = S.Current->getSource(OpPC);
428   S.FFDiag(Loc, diag::note_constexpr_past_end_subobject)
429       << CSK << S.Current->getRange(OpPC);
430   return false;
431 }
432 
433 bool CheckDowncast(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
434                    uint32_t Offset) {
435   uint32_t MinOffset = Ptr.getDeclDesc()->getMetadataSize();
436   uint32_t PtrOffset = Ptr.getByteOffset();
437 
438   // We subtract Offset from PtrOffset. The result must be at least
439   // MinOffset.
440   if (Offset < PtrOffset && (PtrOffset - Offset) >= MinOffset)
441     return true;
442 
443   const auto *E = cast<CastExpr>(S.Current->getExpr(OpPC));
444   QualType TargetQT = E->getType()->getPointeeType();
445   QualType MostDerivedQT = Ptr.getDeclPtr().getType();
446 
447   S.CCEDiag(E, diag::note_constexpr_invalid_downcast)
448       << MostDerivedQT << TargetQT;
449 
450   return false;
451 }
452 
453 bool CheckConst(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
454   assert(Ptr.isLive() && "Pointer is not live");
455   if (!Ptr.isConst() || Ptr.isMutable())
456     return true;
457 
458   // The This pointer is writable in constructors and destructors,
459   // even if isConst() returns true.
460   // TODO(perf): We could be hitting this code path quite a lot in complex
461   // constructors. Is there a better way to do this?
462   if (S.Current->getFunction()) {
463     for (const InterpFrame *Frame = S.Current; Frame; Frame = Frame->Caller) {
464       if (const Function *Func = Frame->getFunction();
465           Func && (Func->isConstructor() || Func->isDestructor()) &&
466           Ptr.block() == Frame->getThis().block()) {
467         return true;
468       }
469     }
470   }
471 
472   if (!Ptr.isBlockPointer())
473     return false;
474 
475   const QualType Ty = Ptr.getType();
476   const SourceInfo &Loc = S.Current->getSource(OpPC);
477   S.FFDiag(Loc, diag::note_constexpr_modify_const_type) << Ty;
478   return false;
479 }
480 
481 bool CheckMutable(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
482   assert(Ptr.isLive() && "Pointer is not live");
483   if (!Ptr.isMutable())
484     return true;
485 
486   // In C++14 onwards, it is permitted to read a mutable member whose
487   // lifetime began within the evaluation.
488   if (S.getLangOpts().CPlusPlus14 &&
489       Ptr.block()->getEvalID() == S.Ctx.getEvalID())
490     return true;
491 
492   const SourceInfo &Loc = S.Current->getSource(OpPC);
493   const FieldDecl *Field = Ptr.getField();
494   S.FFDiag(Loc, diag::note_constexpr_access_mutable, 1) << AK_Read << Field;
495   S.Note(Field->getLocation(), diag::note_declared_at);
496   return false;
497 }
498 
499 bool CheckVolatile(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
500                    AccessKinds AK) {
501   assert(Ptr.isLive());
502 
503   // FIXME: This check here might be kinda expensive. Maybe it would be better
504   // to have another field in InlineDescriptor for this?
505   if (!Ptr.isBlockPointer())
506     return true;
507 
508   QualType PtrType = Ptr.getType();
509   if (!PtrType.isVolatileQualified())
510     return true;
511 
512   const SourceInfo &Loc = S.Current->getSource(OpPC);
513   if (S.getLangOpts().CPlusPlus)
514     S.FFDiag(Loc, diag::note_constexpr_access_volatile_type) << AK << PtrType;
515   else
516     S.FFDiag(Loc);
517   return false;
518 }
519 
520 bool CheckInitialized(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
521                       AccessKinds AK) {
522   assert(Ptr.isLive());
523 
524   if (Ptr.isInitialized())
525     return true;
526 
527   if (const auto *VD = Ptr.getDeclDesc()->asVarDecl();
528       VD && VD->hasGlobalStorage()) {
529     const SourceInfo &Loc = S.Current->getSource(OpPC);
530     if (VD->getAnyInitializer()) {
531       S.FFDiag(Loc, diag::note_constexpr_var_init_non_constant, 1) << VD;
532       S.Note(VD->getLocation(), diag::note_declared_at);
533     } else {
534       diagnoseMissingInitializer(S, OpPC, VD);
535     }
536     return false;
537   }
538 
539   if (!S.checkingPotentialConstantExpression()) {
540     S.FFDiag(S.Current->getSource(OpPC), diag::note_constexpr_access_uninit)
541         << AK << /*uninitialized=*/true << S.Current->getRange(OpPC);
542   }
543   return false;
544 }
545 
546 bool CheckGlobalInitialized(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
547   if (Ptr.isInitialized())
548     return true;
549 
550   assert(S.getLangOpts().CPlusPlus);
551   const auto *VD = cast<VarDecl>(Ptr.getDeclDesc()->asValueDecl());
552   if ((!VD->hasConstantInitialization() &&
553        VD->mightBeUsableInConstantExpressions(S.getASTContext())) ||
554       (S.getLangOpts().OpenCL && !S.getLangOpts().CPlusPlus11 &&
555        !VD->hasICEInitializer(S.getASTContext()))) {
556     const SourceInfo &Loc = S.Current->getSource(OpPC);
557     S.FFDiag(Loc, diag::note_constexpr_var_init_non_constant, 1) << VD;
558     S.Note(VD->getLocation(), diag::note_declared_at);
559   }
560   return false;
561 }
562 
563 static bool CheckWeak(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
564   if (!Ptr.isWeak())
565     return true;
566 
567   const auto *VD = Ptr.getDeclDesc()->asVarDecl();
568   assert(VD);
569   S.FFDiag(S.Current->getLocation(OpPC), diag::note_constexpr_var_init_weak)
570       << VD;
571   S.Note(VD->getLocation(), diag::note_declared_at);
572 
573   return false;
574 }
575 
576 bool CheckLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
577                AccessKinds AK) {
578   if (!CheckLive(S, OpPC, Ptr, AK))
579     return false;
580   if (!CheckConstant(S, OpPC, Ptr))
581     return false;
582   if (!CheckDummy(S, OpPC, Ptr, AK))
583     return false;
584   if (!CheckExtern(S, OpPC, Ptr))
585     return false;
586   if (!CheckRange(S, OpPC, Ptr, AK))
587     return false;
588   if (!CheckActive(S, OpPC, Ptr, AK))
589     return false;
590   if (!CheckInitialized(S, OpPC, Ptr, AK))
591     return false;
592   if (!CheckTemporary(S, OpPC, Ptr, AK))
593     return false;
594   if (!CheckWeak(S, OpPC, Ptr))
595     return false;
596   if (!CheckMutable(S, OpPC, Ptr))
597     return false;
598   if (!CheckVolatile(S, OpPC, Ptr, AK))
599     return false;
600   return true;
601 }
602 
603 /// This is not used by any of the opcodes directly. It's used by
604 /// EvalEmitter to do the final lvalue-to-rvalue conversion.
605 bool CheckFinalLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
606   if (!CheckLive(S, OpPC, Ptr, AK_Read))
607     return false;
608   if (!CheckConstant(S, OpPC, Ptr))
609     return false;
610 
611   if (!CheckDummy(S, OpPC, Ptr, AK_Read))
612     return false;
613   if (!CheckExtern(S, OpPC, Ptr))
614     return false;
615   if (!CheckRange(S, OpPC, Ptr, AK_Read))
616     return false;
617   if (!CheckActive(S, OpPC, Ptr, AK_Read))
618     return false;
619   if (!CheckInitialized(S, OpPC, Ptr, AK_Read))
620     return false;
621   if (!CheckTemporary(S, OpPC, Ptr, AK_Read))
622     return false;
623   if (!CheckWeak(S, OpPC, Ptr))
624     return false;
625   if (!CheckMutable(S, OpPC, Ptr))
626     return false;
627   return true;
628 }
629 
630 bool CheckStore(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
631   if (!CheckLive(S, OpPC, Ptr, AK_Assign))
632     return false;
633   if (!CheckDummy(S, OpPC, Ptr, AK_Assign))
634     return false;
635   if (!CheckExtern(S, OpPC, Ptr))
636     return false;
637   if (!CheckRange(S, OpPC, Ptr, AK_Assign))
638     return false;
639   if (!CheckGlobal(S, OpPC, Ptr))
640     return false;
641   if (!CheckConst(S, OpPC, Ptr))
642     return false;
643   return true;
644 }
645 
646 bool CheckInvoke(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
647   if (!CheckLive(S, OpPC, Ptr, AK_MemberCall))
648     return false;
649   if (!Ptr.isDummy()) {
650     if (!CheckExtern(S, OpPC, Ptr))
651       return false;
652     if (!CheckRange(S, OpPC, Ptr, AK_MemberCall))
653       return false;
654   }
655   return true;
656 }
657 
658 bool CheckInit(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
659   if (!CheckLive(S, OpPC, Ptr, AK_Assign))
660     return false;
661   if (!CheckRange(S, OpPC, Ptr, AK_Assign))
662     return false;
663   return true;
664 }
665 
666 bool CheckCallable(InterpState &S, CodePtr OpPC, const Function *F) {
667 
668   if (F->isVirtual() && !S.getLangOpts().CPlusPlus20) {
669     const SourceLocation &Loc = S.Current->getLocation(OpPC);
670     S.CCEDiag(Loc, diag::note_constexpr_virtual_call);
671     return false;
672   }
673 
674   if (F->isConstexpr() && F->hasBody() &&
675       (F->getDecl()->isConstexpr() || F->getDecl()->hasAttr<MSConstexprAttr>()))
676     return true;
677 
678   // Implicitly constexpr.
679   if (F->isLambdaStaticInvoker())
680     return true;
681 
682   const SourceLocation &Loc = S.Current->getLocation(OpPC);
683   if (S.getLangOpts().CPlusPlus11) {
684     const FunctionDecl *DiagDecl = F->getDecl();
685 
686     // Invalid decls have been diagnosed before.
687     if (DiagDecl->isInvalidDecl())
688       return false;
689 
690     // If this function is not constexpr because it is an inherited
691     // non-constexpr constructor, diagnose that directly.
692     const auto *CD = dyn_cast<CXXConstructorDecl>(DiagDecl);
693     if (CD && CD->isInheritingConstructor()) {
694       const auto *Inherited = CD->getInheritedConstructor().getConstructor();
695       if (!Inherited->isConstexpr())
696         DiagDecl = CD = Inherited;
697     }
698 
699     // FIXME: If DiagDecl is an implicitly-declared special member function
700     // or an inheriting constructor, we should be much more explicit about why
701     // it's not constexpr.
702     if (CD && CD->isInheritingConstructor()) {
703       S.FFDiag(Loc, diag::note_constexpr_invalid_inhctor, 1)
704           << CD->getInheritedConstructor().getConstructor()->getParent();
705       S.Note(DiagDecl->getLocation(), diag::note_declared_at);
706     } else {
707       // Don't emit anything if the function isn't defined and we're checking
708       // for a constant expression. It might be defined at the point we're
709       // actually calling it.
710       bool IsExtern = DiagDecl->getStorageClass() == SC_Extern;
711       if (!DiagDecl->isDefined() && !IsExtern && DiagDecl->isConstexpr() &&
712           S.checkingPotentialConstantExpression())
713         return false;
714 
715       // If the declaration is defined, declared 'constexpr' _and_ has a body,
716       // the below diagnostic doesn't add anything useful.
717       if (DiagDecl->isDefined() && DiagDecl->isConstexpr() &&
718           DiagDecl->hasBody())
719         return false;
720 
721       S.FFDiag(Loc, diag::note_constexpr_invalid_function, 1)
722           << DiagDecl->isConstexpr() << (bool)CD << DiagDecl;
723 
724       if (DiagDecl->getDefinition())
725         S.Note(DiagDecl->getDefinition()->getLocation(),
726                diag::note_declared_at);
727       else
728         S.Note(DiagDecl->getLocation(), diag::note_declared_at);
729     }
730   } else {
731     S.FFDiag(Loc, diag::note_invalid_subexpr_in_const_expr);
732   }
733 
734   return false;
735 }
736 
737 bool CheckCallDepth(InterpState &S, CodePtr OpPC) {
738   if ((S.Current->getDepth() + 1) > S.getLangOpts().ConstexprCallDepth) {
739     S.FFDiag(S.Current->getSource(OpPC),
740              diag::note_constexpr_depth_limit_exceeded)
741         << S.getLangOpts().ConstexprCallDepth;
742     return false;
743   }
744 
745   return true;
746 }
747 
748 bool CheckThis(InterpState &S, CodePtr OpPC, const Pointer &This) {
749   if (!This.isZero())
750     return true;
751 
752   const SourceInfo &Loc = S.Current->getSource(OpPC);
753 
754   bool IsImplicit = false;
755   if (const auto *E = dyn_cast_if_present<CXXThisExpr>(Loc.asExpr()))
756     IsImplicit = E->isImplicit();
757 
758   if (S.getLangOpts().CPlusPlus11)
759     S.FFDiag(Loc, diag::note_constexpr_this) << IsImplicit;
760   else
761     S.FFDiag(Loc);
762 
763   return false;
764 }
765 
766 bool CheckPure(InterpState &S, CodePtr OpPC, const CXXMethodDecl *MD) {
767   if (!MD->isPureVirtual())
768     return true;
769   const SourceInfo &E = S.Current->getSource(OpPC);
770   S.FFDiag(E, diag::note_constexpr_pure_virtual_call, 1) << MD;
771   S.Note(MD->getLocation(), diag::note_declared_at);
772   return false;
773 }
774 
775 bool CheckFloatResult(InterpState &S, CodePtr OpPC, const Floating &Result,
776                       APFloat::opStatus Status, FPOptions FPO) {
777   // [expr.pre]p4:
778   //   If during the evaluation of an expression, the result is not
779   //   mathematically defined [...], the behavior is undefined.
780   // FIXME: C++ rules require us to not conform to IEEE 754 here.
781   if (Result.isNan()) {
782     const SourceInfo &E = S.Current->getSource(OpPC);
783     S.CCEDiag(E, diag::note_constexpr_float_arithmetic)
784         << /*NaN=*/true << S.Current->getRange(OpPC);
785     return S.noteUndefinedBehavior();
786   }
787 
788   // In a constant context, assume that any dynamic rounding mode or FP
789   // exception state matches the default floating-point environment.
790   if (S.inConstantContext())
791     return true;
792 
793   if ((Status & APFloat::opInexact) &&
794       FPO.getRoundingMode() == llvm::RoundingMode::Dynamic) {
795     // Inexact result means that it depends on rounding mode. If the requested
796     // mode is dynamic, the evaluation cannot be made in compile time.
797     const SourceInfo &E = S.Current->getSource(OpPC);
798     S.FFDiag(E, diag::note_constexpr_dynamic_rounding);
799     return false;
800   }
801 
802   if ((Status != APFloat::opOK) &&
803       (FPO.getRoundingMode() == llvm::RoundingMode::Dynamic ||
804        FPO.getExceptionMode() != LangOptions::FPE_Ignore ||
805        FPO.getAllowFEnvAccess())) {
806     const SourceInfo &E = S.Current->getSource(OpPC);
807     S.FFDiag(E, diag::note_constexpr_float_arithmetic_strict);
808     return false;
809   }
810 
811   if ((Status & APFloat::opStatus::opInvalidOp) &&
812       FPO.getExceptionMode() != LangOptions::FPE_Ignore) {
813     const SourceInfo &E = S.Current->getSource(OpPC);
814     // There is no usefully definable result.
815     S.FFDiag(E);
816     return false;
817   }
818 
819   return true;
820 }
821 
822 bool CheckDynamicMemoryAllocation(InterpState &S, CodePtr OpPC) {
823   if (S.getLangOpts().CPlusPlus20)
824     return true;
825 
826   const SourceInfo &E = S.Current->getSource(OpPC);
827   S.CCEDiag(E, diag::note_constexpr_new);
828   return true;
829 }
830 
831 bool CheckNewDeleteForms(InterpState &S, CodePtr OpPC,
832                          DynamicAllocator::Form AllocForm,
833                          DynamicAllocator::Form DeleteForm, const Descriptor *D,
834                          const Expr *NewExpr) {
835   if (AllocForm == DeleteForm)
836     return true;
837 
838   QualType TypeToDiagnose;
839   // We need to shuffle things around a bit here to get a better diagnostic,
840   // because the expression we allocated the block for was of type int*,
841   // but we want to get the array size right.
842   if (D->isArray()) {
843     QualType ElemQT = D->getType()->getPointeeType();
844     TypeToDiagnose = S.getASTContext().getConstantArrayType(
845         ElemQT, APInt(64, static_cast<uint64_t>(D->getNumElems()), false),
846         nullptr, ArraySizeModifier::Normal, 0);
847   } else
848     TypeToDiagnose = D->getType()->getPointeeType();
849 
850   const SourceInfo &E = S.Current->getSource(OpPC);
851   S.FFDiag(E, diag::note_constexpr_new_delete_mismatch)
852       << static_cast<int>(DeleteForm) << static_cast<int>(AllocForm)
853       << TypeToDiagnose;
854   S.Note(NewExpr->getExprLoc(), diag::note_constexpr_dynamic_alloc_here)
855       << NewExpr->getSourceRange();
856   return false;
857 }
858 
859 bool CheckDeleteSource(InterpState &S, CodePtr OpPC, const Expr *Source,
860                        const Pointer &Ptr) {
861   // The two sources we currently allow are new expressions and
862   // __builtin_operator_new calls.
863   if (isa_and_nonnull<CXXNewExpr>(Source))
864     return true;
865   if (const CallExpr *CE = dyn_cast_if_present<CallExpr>(Source);
866       CE && CE->getBuiltinCallee() == Builtin::BI__builtin_operator_new)
867     return true;
868 
869   // Whatever this is, we didn't heap allocate it.
870   const SourceInfo &Loc = S.Current->getSource(OpPC);
871   S.FFDiag(Loc, diag::note_constexpr_delete_not_heap_alloc)
872       << Ptr.toDiagnosticString(S.getASTContext());
873 
874   if (Ptr.isTemporary())
875     S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here);
876   else
877     S.Note(Ptr.getDeclLoc(), diag::note_declared_at);
878   return false;
879 }
880 
881 /// We aleady know the given DeclRefExpr is invalid for some reason,
882 /// now figure out why and print appropriate diagnostics.
883 bool CheckDeclRef(InterpState &S, CodePtr OpPC, const DeclRefExpr *DR) {
884   const ValueDecl *D = DR->getDecl();
885   return diagnoseUnknownDecl(S, OpPC, D);
886 }
887 
888 bool CheckDummy(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
889                 AccessKinds AK) {
890   if (!Ptr.isDummy())
891     return true;
892 
893   const Descriptor *Desc = Ptr.getDeclDesc();
894   const ValueDecl *D = Desc->asValueDecl();
895   if (!D)
896     return false;
897 
898   if (AK == AK_Read || AK == AK_Increment || AK == AK_Decrement)
899     return diagnoseUnknownDecl(S, OpPC, D);
900 
901   assert(AK == AK_Assign);
902   if (S.getLangOpts().CPlusPlus14) {
903     const SourceInfo &E = S.Current->getSource(OpPC);
904     S.FFDiag(E, diag::note_constexpr_modify_global);
905   }
906   return false;
907 }
908 
909 bool CheckNonNullArgs(InterpState &S, CodePtr OpPC, const Function *F,
910                       const CallExpr *CE, unsigned ArgSize) {
911   auto Args = llvm::ArrayRef(CE->getArgs(), CE->getNumArgs());
912   auto NonNullArgs = collectNonNullArgs(F->getDecl(), Args);
913   unsigned Offset = 0;
914   unsigned Index = 0;
915   for (const Expr *Arg : Args) {
916     if (NonNullArgs[Index] && Arg->getType()->isPointerType()) {
917       const Pointer &ArgPtr = S.Stk.peek<Pointer>(ArgSize - Offset);
918       if (ArgPtr.isZero()) {
919         const SourceLocation &Loc = S.Current->getLocation(OpPC);
920         S.CCEDiag(Loc, diag::note_non_null_attribute_failed);
921         return false;
922       }
923     }
924 
925     Offset += align(primSize(S.Ctx.classify(Arg).value_or(PT_Ptr)));
926     ++Index;
927   }
928   return true;
929 }
930 
931 // FIXME: This is similar to code we already have in Compiler.cpp.
932 // I think it makes sense to instead add the field and base destruction stuff
933 // to the destructor Function itself. Then destroying a record would really
934 // _just_ be calling its destructor. That would also help with the diagnostic
935 // difference when the destructor or a field/base fails.
936 static bool runRecordDestructor(InterpState &S, CodePtr OpPC,
937                                 const Pointer &BasePtr,
938                                 const Descriptor *Desc) {
939   assert(Desc->isRecord());
940   const Record *R = Desc->ElemRecord;
941   assert(R);
942 
943   if (Pointer::pointToSameBlock(BasePtr, S.Current->getThis())) {
944     const SourceInfo &Loc = S.Current->getSource(OpPC);
945     S.FFDiag(Loc, diag::note_constexpr_double_destroy);
946     return false;
947   }
948 
949   // Destructor of this record.
950   if (const CXXDestructorDecl *Dtor = R->getDestructor();
951       Dtor && !Dtor->isTrivial()) {
952     const Function *DtorFunc = S.getContext().getOrCreateFunction(Dtor);
953     if (!DtorFunc)
954       return false;
955 
956     S.Stk.push<Pointer>(BasePtr);
957     if (!Call(S, OpPC, DtorFunc, 0))
958       return false;
959   }
960   return true;
961 }
962 
963 bool RunDestructors(InterpState &S, CodePtr OpPC, const Block *B) {
964   assert(B);
965   const Descriptor *Desc = B->getDescriptor();
966 
967   if (Desc->isPrimitive() || Desc->isPrimitiveArray())
968     return true;
969 
970   assert(Desc->isRecord() || Desc->isCompositeArray());
971 
972   if (Desc->isCompositeArray()) {
973     const Descriptor *ElemDesc = Desc->ElemDesc;
974     assert(ElemDesc->isRecord());
975 
976     Pointer RP(const_cast<Block *>(B));
977     for (unsigned I = 0; I != Desc->getNumElems(); ++I) {
978       if (!runRecordDestructor(S, OpPC, RP.atIndex(I).narrow(), ElemDesc))
979         return false;
980     }
981     return true;
982   }
983 
984   assert(Desc->isRecord());
985   return runRecordDestructor(S, OpPC, Pointer(const_cast<Block *>(B)), Desc);
986 }
987 
988 void diagnoseEnumValue(InterpState &S, CodePtr OpPC, const EnumDecl *ED,
989                        const APSInt &Value) {
990   llvm::APInt Min;
991   llvm::APInt Max;
992 
993   if (S.EvaluatingDecl && !S.EvaluatingDecl->isConstexpr())
994     return;
995 
996   ED->getValueRange(Max, Min);
997   --Max;
998 
999   if (ED->getNumNegativeBits() &&
1000       (Max.slt(Value.getSExtValue()) || Min.sgt(Value.getSExtValue()))) {
1001     const SourceLocation &Loc = S.Current->getLocation(OpPC);
1002     S.CCEDiag(Loc, diag::note_constexpr_unscoped_enum_out_of_range)
1003         << llvm::toString(Value, 10) << Min.getSExtValue() << Max.getSExtValue()
1004         << ED;
1005   } else if (!ED->getNumNegativeBits() && Max.ult(Value.getZExtValue())) {
1006     const SourceLocation &Loc = S.Current->getLocation(OpPC);
1007     S.CCEDiag(Loc, diag::note_constexpr_unscoped_enum_out_of_range)
1008         << llvm::toString(Value, 10) << Min.getZExtValue() << Max.getZExtValue()
1009         << ED;
1010   }
1011 }
1012 
1013 bool CallVar(InterpState &S, CodePtr OpPC, const Function *Func,
1014              uint32_t VarArgSize) {
1015   if (Func->hasThisPointer()) {
1016     size_t ArgSize = Func->getArgSize() + VarArgSize;
1017     size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0);
1018     const Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset);
1019 
1020     // If the current function is a lambda static invoker and
1021     // the function we're about to call is a lambda call operator,
1022     // skip the CheckInvoke, since the ThisPtr is a null pointer
1023     // anyway.
1024     if (!(S.Current->getFunction() &&
1025           S.Current->getFunction()->isLambdaStaticInvoker() &&
1026           Func->isLambdaCallOperator())) {
1027       if (!CheckInvoke(S, OpPC, ThisPtr))
1028         return false;
1029     }
1030 
1031     if (S.checkingPotentialConstantExpression())
1032       return false;
1033   }
1034 
1035   if (!CheckCallable(S, OpPC, Func))
1036     return false;
1037 
1038   if (!CheckCallDepth(S, OpPC))
1039     return false;
1040 
1041   auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC, VarArgSize);
1042   InterpFrame *FrameBefore = S.Current;
1043   S.Current = NewFrame.get();
1044 
1045   APValue CallResult;
1046   // Note that we cannot assert(CallResult.hasValue()) here since
1047   // Ret() above only sets the APValue if the curent frame doesn't
1048   // have a caller set.
1049   if (Interpret(S, CallResult)) {
1050     NewFrame.release(); // Frame was delete'd already.
1051     assert(S.Current == FrameBefore);
1052     return true;
1053   }
1054 
1055   // Interpreting the function failed somehow. Reset to
1056   // previous state.
1057   S.Current = FrameBefore;
1058   return false;
1059 }
1060 
1061 bool Call(InterpState &S, CodePtr OpPC, const Function *Func,
1062           uint32_t VarArgSize) {
1063   assert(Func);
1064   auto cleanup = [&]() -> bool {
1065     cleanupAfterFunctionCall(S, OpPC, Func);
1066     return false;
1067   };
1068 
1069   if (Func->hasThisPointer()) {
1070     size_t ArgSize = Func->getArgSize() + VarArgSize;
1071     size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0);
1072 
1073     const Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset);
1074 
1075     // If the current function is a lambda static invoker and
1076     // the function we're about to call is a lambda call operator,
1077     // skip the CheckInvoke, since the ThisPtr is a null pointer
1078     // anyway.
1079     if (S.Current->getFunction() &&
1080         S.Current->getFunction()->isLambdaStaticInvoker() &&
1081         Func->isLambdaCallOperator()) {
1082       assert(ThisPtr.isZero());
1083     } else {
1084       if (!CheckInvoke(S, OpPC, ThisPtr))
1085         return cleanup();
1086     }
1087   }
1088 
1089   if (!CheckCallable(S, OpPC, Func))
1090     return cleanup();
1091 
1092   // FIXME: The isConstructor() check here is not always right. The current
1093   // constant evaluator is somewhat inconsistent in when it allows a function
1094   // call when checking for a constant expression.
1095   if (Func->hasThisPointer() && S.checkingPotentialConstantExpression() &&
1096       !Func->isConstructor())
1097     return cleanup();
1098 
1099   if (!CheckCallDepth(S, OpPC))
1100     return cleanup();
1101 
1102   auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC, VarArgSize);
1103   InterpFrame *FrameBefore = S.Current;
1104   S.Current = NewFrame.get();
1105 
1106   APValue CallResult;
1107   // Note that we cannot assert(CallResult.hasValue()) here since
1108   // Ret() above only sets the APValue if the curent frame doesn't
1109   // have a caller set.
1110   if (Interpret(S, CallResult)) {
1111     NewFrame.release(); // Frame was delete'd already.
1112     assert(S.Current == FrameBefore);
1113     return true;
1114   }
1115 
1116   // Interpreting the function failed somehow. Reset to
1117   // previous state.
1118   S.Current = FrameBefore;
1119   return false;
1120 }
1121 
1122 bool CallVirt(InterpState &S, CodePtr OpPC, const Function *Func,
1123               uint32_t VarArgSize) {
1124   assert(Func->hasThisPointer());
1125   assert(Func->isVirtual());
1126   size_t ArgSize = Func->getArgSize() + VarArgSize;
1127   size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0);
1128   Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset);
1129 
1130   const CXXRecordDecl *DynamicDecl = nullptr;
1131   {
1132     Pointer TypePtr = ThisPtr;
1133     while (TypePtr.isBaseClass())
1134       TypePtr = TypePtr.getBase();
1135 
1136     QualType DynamicType = TypePtr.getType();
1137     if (DynamicType->isPointerType() || DynamicType->isReferenceType())
1138       DynamicDecl = DynamicType->getPointeeCXXRecordDecl();
1139     else
1140       DynamicDecl = DynamicType->getAsCXXRecordDecl();
1141   }
1142   assert(DynamicDecl);
1143 
1144   const auto *StaticDecl = cast<CXXRecordDecl>(Func->getParentDecl());
1145   const auto *InitialFunction = cast<CXXMethodDecl>(Func->getDecl());
1146   const CXXMethodDecl *Overrider = S.getContext().getOverridingFunction(
1147       DynamicDecl, StaticDecl, InitialFunction);
1148 
1149   if (Overrider != InitialFunction) {
1150     // DR1872: An instantiated virtual constexpr function can't be called in a
1151     // constant expression (prior to C++20). We can still constant-fold such a
1152     // call.
1153     if (!S.getLangOpts().CPlusPlus20 && Overrider->isVirtual()) {
1154       const Expr *E = S.Current->getExpr(OpPC);
1155       S.CCEDiag(E, diag::note_constexpr_virtual_call) << E->getSourceRange();
1156     }
1157 
1158     Func = S.getContext().getOrCreateFunction(Overrider);
1159 
1160     const CXXRecordDecl *ThisFieldDecl =
1161         ThisPtr.getFieldDesc()->getType()->getAsCXXRecordDecl();
1162     if (Func->getParentDecl()->isDerivedFrom(ThisFieldDecl)) {
1163       // If the function we call is further DOWN the hierarchy than the
1164       // FieldDesc of our pointer, just go up the hierarchy of this field
1165       // the furthest we can go.
1166       while (ThisPtr.isBaseClass())
1167         ThisPtr = ThisPtr.getBase();
1168     }
1169   }
1170 
1171   if (!Call(S, OpPC, Func, VarArgSize))
1172     return false;
1173 
1174   // Covariant return types. The return type of Overrider is a pointer
1175   // or reference to a class type.
1176   if (Overrider != InitialFunction &&
1177       Overrider->getReturnType()->isPointerOrReferenceType() &&
1178       InitialFunction->getReturnType()->isPointerOrReferenceType()) {
1179     QualType OverriderPointeeType =
1180         Overrider->getReturnType()->getPointeeType();
1181     QualType InitialPointeeType =
1182         InitialFunction->getReturnType()->getPointeeType();
1183     // We've called Overrider above, but calling code expects us to return what
1184     // InitialFunction returned. According to the rules for covariant return
1185     // types, what InitialFunction returns needs to be a base class of what
1186     // Overrider returns. So, we need to do an upcast here.
1187     unsigned Offset = S.getContext().collectBaseOffset(
1188         InitialPointeeType->getAsRecordDecl(),
1189         OverriderPointeeType->getAsRecordDecl());
1190     return GetPtrBasePop(S, OpPC, Offset);
1191   }
1192 
1193   return true;
1194 }
1195 
1196 bool CallBI(InterpState &S, CodePtr OpPC, const Function *Func,
1197             const CallExpr *CE, uint32_t BuiltinID) {
1198   if (S.checkingPotentialConstantExpression())
1199     return false;
1200   auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC);
1201 
1202   InterpFrame *FrameBefore = S.Current;
1203   S.Current = NewFrame.get();
1204 
1205   if (InterpretBuiltin(S, OpPC, Func, CE, BuiltinID)) {
1206     NewFrame.release();
1207     return true;
1208   }
1209   S.Current = FrameBefore;
1210   return false;
1211 }
1212 
1213 bool CallPtr(InterpState &S, CodePtr OpPC, uint32_t ArgSize,
1214              const CallExpr *CE) {
1215   const FunctionPointer &FuncPtr = S.Stk.pop<FunctionPointer>();
1216 
1217   const Function *F = FuncPtr.getFunction();
1218   if (!F) {
1219     const auto *E = cast<CallExpr>(S.Current->getExpr(OpPC));
1220     S.FFDiag(E, diag::note_constexpr_null_callee)
1221         << const_cast<Expr *>(E->getCallee()) << E->getSourceRange();
1222     return false;
1223   }
1224 
1225   if (!FuncPtr.isValid() || !F->getDecl())
1226     return Invalid(S, OpPC);
1227 
1228   assert(F);
1229 
1230   // This happens when the call expression has been cast to
1231   // something else, but we don't support that.
1232   if (S.Ctx.classify(F->getDecl()->getReturnType()) !=
1233       S.Ctx.classify(CE->getType()))
1234     return false;
1235 
1236   // Check argument nullability state.
1237   if (F->hasNonNullAttr()) {
1238     if (!CheckNonNullArgs(S, OpPC, F, CE, ArgSize))
1239       return false;
1240   }
1241 
1242   assert(ArgSize >= F->getWrittenArgSize());
1243   uint32_t VarArgSize = ArgSize - F->getWrittenArgSize();
1244 
1245   // We need to do this explicitly here since we don't have the necessary
1246   // information to do it automatically.
1247   if (F->isThisPointerExplicit())
1248     VarArgSize -= align(primSize(PT_Ptr));
1249 
1250   if (F->isVirtual())
1251     return CallVirt(S, OpPC, F, VarArgSize);
1252 
1253   return Call(S, OpPC, F, VarArgSize);
1254 }
1255 
1256 bool Interpret(InterpState &S, APValue &Result) {
1257   // The current stack frame when we started Interpret().
1258   // This is being used by the ops to determine wheter
1259   // to return from this function and thus terminate
1260   // interpretation.
1261   const InterpFrame *StartFrame = S.Current;
1262   assert(!S.Current->isRoot());
1263   CodePtr PC = S.Current->getPC();
1264 
1265   // Empty program.
1266   if (!PC)
1267     return true;
1268 
1269   for (;;) {
1270     auto Op = PC.read<Opcode>();
1271     CodePtr OpPC = PC;
1272 
1273     switch (Op) {
1274 #define GET_INTERP
1275 #include "Opcodes.inc"
1276 #undef GET_INTERP
1277     }
1278   }
1279 }
1280 
1281 } // namespace interp
1282 } // namespace clang
1283