xref: /llvm-project/clang/lib/AST/ByteCode/Interp.cpp (revision e637a5c9fef866158018dcaecc3c385d157460f5)
1 //===------- Interp.cpp - Interpreter for the constexpr VM ------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
9 #include "Interp.h"
10 #include "Function.h"
11 #include "InterpFrame.h"
12 #include "InterpShared.h"
13 #include "InterpStack.h"
14 #include "Opcode.h"
15 #include "PrimType.h"
16 #include "Program.h"
17 #include "State.h"
18 #include "clang/AST/ASTContext.h"
19 #include "clang/AST/ASTDiagnostic.h"
20 #include "clang/AST/CXXInheritance.h"
21 #include "clang/AST/DeclObjC.h"
22 #include "clang/AST/Expr.h"
23 #include "clang/AST/ExprCXX.h"
24 #include "clang/Basic/DiagnosticSema.h"
25 #include "clang/Basic/TargetInfo.h"
26 #include "llvm/ADT/APSInt.h"
27 #include "llvm/ADT/StringExtras.h"
28 #include <limits>
29 #include <vector>
30 
31 using namespace clang;
32 using namespace clang::interp;
33 
34 static bool RetValue(InterpState &S, CodePtr &Pt, APValue &Result) {
35   llvm::report_fatal_error("Interpreter cannot return values");
36 }
37 
38 //===----------------------------------------------------------------------===//
39 // Jmp, Jt, Jf
40 //===----------------------------------------------------------------------===//
41 
42 static bool Jmp(InterpState &S, CodePtr &PC, int32_t Offset) {
43   PC += Offset;
44   return true;
45 }
46 
47 static bool Jt(InterpState &S, CodePtr &PC, int32_t Offset) {
48   if (S.Stk.pop<bool>()) {
49     PC += Offset;
50   }
51   return true;
52 }
53 
54 static bool Jf(InterpState &S, CodePtr &PC, int32_t Offset) {
55   if (!S.Stk.pop<bool>()) {
56     PC += Offset;
57   }
58   return true;
59 }
60 
61 static void diagnoseMissingInitializer(InterpState &S, CodePtr OpPC,
62                                        const ValueDecl *VD) {
63   const SourceInfo &E = S.Current->getSource(OpPC);
64   S.FFDiag(E, diag::note_constexpr_var_init_unknown, 1) << VD;
65   S.Note(VD->getLocation(), diag::note_declared_at) << VD->getSourceRange();
66 }
67 
68 static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC,
69                                      const ValueDecl *VD);
70 static bool diagnoseUnknownDecl(InterpState &S, CodePtr OpPC,
71                                 const ValueDecl *D) {
72   const SourceInfo &E = S.Current->getSource(OpPC);
73 
74   if (isa<ParmVarDecl>(D)) {
75     if (S.getLangOpts().CPlusPlus11) {
76       S.FFDiag(E, diag::note_constexpr_function_param_value_unknown) << D;
77       S.Note(D->getLocation(), diag::note_declared_at) << D->getSourceRange();
78     } else {
79       S.FFDiag(E);
80     }
81     return false;
82   }
83 
84   if (!D->getType().isConstQualified())
85     diagnoseNonConstVariable(S, OpPC, D);
86   else if (const auto *VD = dyn_cast<VarDecl>(D);
87            VD && !VD->getAnyInitializer())
88     diagnoseMissingInitializer(S, OpPC, VD);
89 
90   return false;
91 }
92 
93 static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC,
94                                      const ValueDecl *VD) {
95   const SourceInfo &Loc = S.Current->getSource(OpPC);
96   if (!S.getLangOpts().CPlusPlus) {
97     S.FFDiag(Loc);
98     return;
99   }
100 
101   if (const auto *VarD = dyn_cast<VarDecl>(VD);
102       VarD && VarD->getType().isConstQualified() &&
103       !VarD->getAnyInitializer()) {
104     diagnoseMissingInitializer(S, OpPC, VD);
105     return;
106   }
107 
108   // Rather random, but this is to match the diagnostic output of the current
109   // interpreter.
110   if (isa<ObjCIvarDecl>(VD))
111     return;
112 
113   if (VD->getType()->isIntegralOrEnumerationType()) {
114     S.FFDiag(Loc, diag::note_constexpr_ltor_non_const_int, 1) << VD;
115     S.Note(VD->getLocation(), diag::note_declared_at);
116     return;
117   }
118 
119   S.FFDiag(Loc,
120            S.getLangOpts().CPlusPlus11 ? diag::note_constexpr_ltor_non_constexpr
121                                        : diag::note_constexpr_ltor_non_integral,
122            1)
123       << VD << VD->getType();
124   S.Note(VD->getLocation(), diag::note_declared_at);
125 }
126 
127 static bool CheckActive(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
128                         AccessKinds AK) {
129   if (Ptr.isActive())
130     return true;
131 
132   assert(Ptr.inUnion());
133   assert(Ptr.isField() && Ptr.getField());
134 
135   Pointer U = Ptr.getBase();
136   Pointer C = Ptr;
137   while (!U.isRoot() && U.inUnion() && !U.isActive()) {
138     if (U.getField())
139       C = U;
140     U = U.getBase();
141   }
142   assert(C.isField());
143 
144   // Get the inactive field descriptor.
145   const FieldDecl *InactiveField = C.getField();
146   assert(InactiveField);
147 
148   // Consider:
149   // union U {
150   //   struct {
151   //     int x;
152   //     int y;
153   //   } a;
154   // }
155   //
156   // When activating x, we will also activate a. If we now try to read
157   // from y, we will get to CheckActive, because y is not active. In that
158   // case, our U will be a (not a union). We return here and let later code
159   // handle this.
160   if (!U.getFieldDesc()->isUnion())
161     return true;
162 
163   // Find the active field of the union.
164   const Record *R = U.getRecord();
165   assert(R && R->isUnion() && "Not a union");
166 
167   const FieldDecl *ActiveField = nullptr;
168   for (const Record::Field &F : R->fields()) {
169     const Pointer &Field = U.atField(F.Offset);
170     if (Field.isActive()) {
171       ActiveField = Field.getField();
172       break;
173     }
174   }
175 
176   const SourceInfo &Loc = S.Current->getSource(OpPC);
177   S.FFDiag(Loc, diag::note_constexpr_access_inactive_union_member)
178       << AK << InactiveField << !ActiveField << ActiveField;
179   return false;
180 }
181 
182 static bool CheckTemporary(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
183                            AccessKinds AK) {
184   if (auto ID = Ptr.getDeclID()) {
185     if (!Ptr.isStaticTemporary())
186       return true;
187 
188     const auto *MTE = dyn_cast_if_present<MaterializeTemporaryExpr>(
189         Ptr.getDeclDesc()->asExpr());
190     if (!MTE)
191       return true;
192 
193     // FIXME(perf): Since we do this check on every Load from a static
194     // temporary, it might make sense to cache the value of the
195     // isUsableInConstantExpressions call.
196     if (!MTE->isUsableInConstantExpressions(S.getASTContext()) &&
197         Ptr.block()->getEvalID() != S.Ctx.getEvalID()) {
198       const SourceInfo &E = S.Current->getSource(OpPC);
199       S.FFDiag(E, diag::note_constexpr_access_static_temporary, 1) << AK;
200       S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here);
201       return false;
202     }
203   }
204   return true;
205 }
206 
207 static bool CheckGlobal(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
208   if (auto ID = Ptr.getDeclID()) {
209     if (!Ptr.isStatic())
210       return true;
211 
212     if (S.P.getCurrentDecl() == ID)
213       return true;
214 
215     S.FFDiag(S.Current->getLocation(OpPC), diag::note_constexpr_modify_global);
216     return false;
217   }
218   return true;
219 }
220 
221 namespace clang {
222 namespace interp {
223 static void popArg(InterpState &S, const Expr *Arg) {
224   PrimType Ty = S.getContext().classify(Arg).value_or(PT_Ptr);
225   TYPE_SWITCH(Ty, S.Stk.discard<T>());
226 }
227 
228 void cleanupAfterFunctionCall(InterpState &S, CodePtr OpPC,
229                               const Function *Func) {
230   assert(S.Current);
231   assert(Func);
232 
233   if (Func->isUnevaluatedBuiltin())
234     return;
235 
236   // Some builtin functions require us to only look at the call site, since
237   // the classified parameter types do not match.
238   if (unsigned BID = Func->getBuiltinID();
239       BID && S.getASTContext().BuiltinInfo.hasCustomTypechecking(BID)) {
240     const auto *CE =
241         cast<CallExpr>(S.Current->Caller->getExpr(S.Current->getRetPC()));
242     for (int32_t I = CE->getNumArgs() - 1; I >= 0; --I) {
243       const Expr *A = CE->getArg(I);
244       popArg(S, A);
245     }
246     return;
247   }
248 
249   if (S.Current->Caller && Func->isVariadic()) {
250     // CallExpr we're look for is at the return PC of the current function, i.e.
251     // in the caller.
252     // This code path should be executed very rarely.
253     unsigned NumVarArgs;
254     const Expr *const *Args = nullptr;
255     unsigned NumArgs = 0;
256     const Expr *CallSite = S.Current->Caller->getExpr(S.Current->getRetPC());
257     if (const auto *CE = dyn_cast<CallExpr>(CallSite)) {
258       Args = CE->getArgs();
259       NumArgs = CE->getNumArgs();
260     } else if (const auto *CE = dyn_cast<CXXConstructExpr>(CallSite)) {
261       Args = CE->getArgs();
262       NumArgs = CE->getNumArgs();
263     } else
264       assert(false && "Can't get arguments from that expression type");
265 
266     assert(NumArgs >= Func->getNumWrittenParams());
267     NumVarArgs = NumArgs - (Func->getNumWrittenParams() +
268                             isa<CXXOperatorCallExpr>(CallSite));
269     for (unsigned I = 0; I != NumVarArgs; ++I) {
270       const Expr *A = Args[NumArgs - 1 - I];
271       popArg(S, A);
272     }
273   }
274 
275   // And in any case, remove the fixed parameters (the non-variadic ones)
276   // at the end.
277   for (PrimType Ty : Func->args_reverse())
278     TYPE_SWITCH(Ty, S.Stk.discard<T>());
279 }
280 
281 bool CheckExtern(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
282   if (!Ptr.isExtern())
283     return true;
284 
285   if (Ptr.isInitialized() ||
286       (Ptr.getDeclDesc()->asVarDecl() == S.EvaluatingDecl))
287     return true;
288 
289   if (!S.checkingPotentialConstantExpression() && S.getLangOpts().CPlusPlus) {
290     const auto *VD = Ptr.getDeclDesc()->asValueDecl();
291     diagnoseNonConstVariable(S, OpPC, VD);
292   }
293   return false;
294 }
295 
296 bool CheckArray(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
297   if (!Ptr.isUnknownSizeArray())
298     return true;
299   const SourceInfo &E = S.Current->getSource(OpPC);
300   S.FFDiag(E, diag::note_constexpr_unsized_array_indexed);
301   return false;
302 }
303 
304 bool CheckLive(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
305                AccessKinds AK) {
306   if (Ptr.isZero()) {
307     const auto &Src = S.Current->getSource(OpPC);
308 
309     if (Ptr.isField())
310       S.FFDiag(Src, diag::note_constexpr_null_subobject) << CSK_Field;
311     else
312       S.FFDiag(Src, diag::note_constexpr_access_null) << AK;
313 
314     return false;
315   }
316 
317   if (!Ptr.isLive()) {
318     const auto &Src = S.Current->getSource(OpPC);
319 
320     if (Ptr.isDynamic()) {
321       S.FFDiag(Src, diag::note_constexpr_access_deleted_object) << AK;
322     } else {
323       bool IsTemp = Ptr.isTemporary();
324       S.FFDiag(Src, diag::note_constexpr_lifetime_ended, 1) << AK << !IsTemp;
325 
326       if (IsTemp)
327         S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here);
328       else
329         S.Note(Ptr.getDeclLoc(), diag::note_declared_at);
330     }
331 
332     return false;
333   }
334 
335   return true;
336 }
337 
338 bool CheckConstant(InterpState &S, CodePtr OpPC, const Descriptor *Desc) {
339   assert(Desc);
340 
341   const auto *D = Desc->asVarDecl();
342   if (!D || !D->hasGlobalStorage())
343     return true;
344 
345   if (D == S.EvaluatingDecl)
346     return true;
347 
348   if (D->isConstexpr())
349     return true;
350 
351   QualType T = D->getType();
352   bool IsConstant = T.isConstant(S.getASTContext());
353   if (T->isIntegralOrEnumerationType()) {
354     if (!IsConstant) {
355       diagnoseNonConstVariable(S, OpPC, D);
356       return false;
357     }
358     return true;
359   }
360 
361   if (IsConstant) {
362     if (S.getLangOpts().CPlusPlus) {
363       S.CCEDiag(S.Current->getLocation(OpPC),
364                 S.getLangOpts().CPlusPlus11
365                     ? diag::note_constexpr_ltor_non_constexpr
366                     : diag::note_constexpr_ltor_non_integral,
367                 1)
368           << D << T;
369       S.Note(D->getLocation(), diag::note_declared_at);
370     } else {
371       S.CCEDiag(S.Current->getLocation(OpPC));
372     }
373     return true;
374   }
375 
376   if (T->isPointerOrReferenceType()) {
377     if (!T->getPointeeType().isConstant(S.getASTContext()) ||
378         !S.getLangOpts().CPlusPlus11) {
379       diagnoseNonConstVariable(S, OpPC, D);
380       return false;
381     }
382     return true;
383   }
384 
385   diagnoseNonConstVariable(S, OpPC, D);
386   return false;
387 }
388 
389 static bool CheckConstant(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
390   if (!Ptr.isBlockPointer())
391     return true;
392   return CheckConstant(S, OpPC, Ptr.getDeclDesc());
393 }
394 
395 bool CheckNull(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
396                CheckSubobjectKind CSK) {
397   if (!Ptr.isZero())
398     return true;
399   const SourceInfo &Loc = S.Current->getSource(OpPC);
400   S.FFDiag(Loc, diag::note_constexpr_null_subobject)
401       << CSK << S.Current->getRange(OpPC);
402 
403   return false;
404 }
405 
406 bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
407                 AccessKinds AK) {
408   if (!Ptr.isOnePastEnd())
409     return true;
410   const SourceInfo &Loc = S.Current->getSource(OpPC);
411   S.FFDiag(Loc, diag::note_constexpr_access_past_end)
412       << AK << S.Current->getRange(OpPC);
413   return false;
414 }
415 
416 bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
417                 CheckSubobjectKind CSK) {
418   if (!Ptr.isElementPastEnd())
419     return true;
420   const SourceInfo &Loc = S.Current->getSource(OpPC);
421   S.FFDiag(Loc, diag::note_constexpr_past_end_subobject)
422       << CSK << S.Current->getRange(OpPC);
423   return false;
424 }
425 
426 bool CheckSubobject(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
427                     CheckSubobjectKind CSK) {
428   if (!Ptr.isOnePastEnd())
429     return true;
430 
431   const SourceInfo &Loc = S.Current->getSource(OpPC);
432   S.FFDiag(Loc, diag::note_constexpr_past_end_subobject)
433       << CSK << S.Current->getRange(OpPC);
434   return false;
435 }
436 
437 bool CheckDowncast(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
438                    uint32_t Offset) {
439   uint32_t MinOffset = Ptr.getDeclDesc()->getMetadataSize();
440   uint32_t PtrOffset = Ptr.getByteOffset();
441 
442   // We subtract Offset from PtrOffset. The result must be at least
443   // MinOffset.
444   if (Offset < PtrOffset && (PtrOffset - Offset) >= MinOffset)
445     return true;
446 
447   const auto *E = cast<CastExpr>(S.Current->getExpr(OpPC));
448   QualType TargetQT = E->getType()->getPointeeType();
449   QualType MostDerivedQT = Ptr.getDeclPtr().getType();
450 
451   S.CCEDiag(E, diag::note_constexpr_invalid_downcast)
452       << MostDerivedQT << TargetQT;
453 
454   return false;
455 }
456 
457 bool CheckConst(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
458   assert(Ptr.isLive() && "Pointer is not live");
459   if (!Ptr.isConst() || Ptr.isMutable())
460     return true;
461 
462   // The This pointer is writable in constructors and destructors,
463   // even if isConst() returns true.
464   // TODO(perf): We could be hitting this code path quite a lot in complex
465   // constructors. Is there a better way to do this?
466   if (S.Current->getFunction()) {
467     for (const InterpFrame *Frame = S.Current; Frame; Frame = Frame->Caller) {
468       if (const Function *Func = Frame->getFunction();
469           Func && (Func->isConstructor() || Func->isDestructor()) &&
470           Ptr.block() == Frame->getThis().block()) {
471         return true;
472       }
473     }
474   }
475 
476   if (!Ptr.isBlockPointer())
477     return false;
478 
479   const QualType Ty = Ptr.getType();
480   const SourceInfo &Loc = S.Current->getSource(OpPC);
481   S.FFDiag(Loc, diag::note_constexpr_modify_const_type) << Ty;
482   return false;
483 }
484 
485 bool CheckMutable(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
486   assert(Ptr.isLive() && "Pointer is not live");
487   if (!Ptr.isMutable())
488     return true;
489 
490   // In C++14 onwards, it is permitted to read a mutable member whose
491   // lifetime began within the evaluation.
492   if (S.getLangOpts().CPlusPlus14 &&
493       Ptr.block()->getEvalID() == S.Ctx.getEvalID())
494     return true;
495 
496   const SourceInfo &Loc = S.Current->getSource(OpPC);
497   const FieldDecl *Field = Ptr.getField();
498   S.FFDiag(Loc, diag::note_constexpr_access_mutable, 1) << AK_Read << Field;
499   S.Note(Field->getLocation(), diag::note_declared_at);
500   return false;
501 }
502 
503 bool CheckVolatile(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
504                    AccessKinds AK) {
505   assert(Ptr.isLive());
506 
507   // FIXME: This check here might be kinda expensive. Maybe it would be better
508   // to have another field in InlineDescriptor for this?
509   if (!Ptr.isBlockPointer())
510     return true;
511 
512   QualType PtrType = Ptr.getType();
513   if (!PtrType.isVolatileQualified())
514     return true;
515 
516   const SourceInfo &Loc = S.Current->getSource(OpPC);
517   if (S.getLangOpts().CPlusPlus)
518     S.FFDiag(Loc, diag::note_constexpr_access_volatile_type) << AK << PtrType;
519   else
520     S.FFDiag(Loc);
521   return false;
522 }
523 
524 bool CheckInitialized(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
525                       AccessKinds AK) {
526   assert(Ptr.isLive());
527 
528   if (Ptr.isInitialized())
529     return true;
530 
531   if (const auto *VD = Ptr.getDeclDesc()->asVarDecl();
532       VD && VD->hasGlobalStorage()) {
533     const SourceInfo &Loc = S.Current->getSource(OpPC);
534     if (VD->getAnyInitializer()) {
535       S.FFDiag(Loc, diag::note_constexpr_var_init_non_constant, 1) << VD;
536       S.Note(VD->getLocation(), diag::note_declared_at);
537     } else {
538       diagnoseMissingInitializer(S, OpPC, VD);
539     }
540     return false;
541   }
542 
543   if (!S.checkingPotentialConstantExpression()) {
544     S.FFDiag(S.Current->getSource(OpPC), diag::note_constexpr_access_uninit)
545         << AK << /*uninitialized=*/true << S.Current->getRange(OpPC);
546   }
547   return false;
548 }
549 
550 bool CheckGlobalInitialized(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
551   if (Ptr.isInitialized())
552     return true;
553 
554   assert(S.getLangOpts().CPlusPlus);
555   const auto *VD = cast<VarDecl>(Ptr.getDeclDesc()->asValueDecl());
556   if ((!VD->hasConstantInitialization() &&
557        VD->mightBeUsableInConstantExpressions(S.getASTContext())) ||
558       (S.getLangOpts().OpenCL && !S.getLangOpts().CPlusPlus11 &&
559        !VD->hasICEInitializer(S.getASTContext()))) {
560     const SourceInfo &Loc = S.Current->getSource(OpPC);
561     S.FFDiag(Loc, diag::note_constexpr_var_init_non_constant, 1) << VD;
562     S.Note(VD->getLocation(), diag::note_declared_at);
563   }
564   return false;
565 }
566 
567 static bool CheckWeak(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
568   if (!Ptr.isWeak())
569     return true;
570 
571   const auto *VD = Ptr.getDeclDesc()->asVarDecl();
572   assert(VD);
573   S.FFDiag(S.Current->getLocation(OpPC), diag::note_constexpr_var_init_weak)
574       << VD;
575   S.Note(VD->getLocation(), diag::note_declared_at);
576 
577   return false;
578 }
579 
580 bool CheckLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
581                AccessKinds AK) {
582   if (!CheckLive(S, OpPC, Ptr, AK))
583     return false;
584   if (!CheckConstant(S, OpPC, Ptr))
585     return false;
586   if (!CheckDummy(S, OpPC, Ptr, AK))
587     return false;
588   if (!CheckExtern(S, OpPC, Ptr))
589     return false;
590   if (!CheckRange(S, OpPC, Ptr, AK))
591     return false;
592   if (!CheckActive(S, OpPC, Ptr, AK))
593     return false;
594   if (!CheckInitialized(S, OpPC, Ptr, AK))
595     return false;
596   if (!CheckTemporary(S, OpPC, Ptr, AK))
597     return false;
598   if (!CheckWeak(S, OpPC, Ptr))
599     return false;
600   if (!CheckMutable(S, OpPC, Ptr))
601     return false;
602   if (!CheckVolatile(S, OpPC, Ptr, AK))
603     return false;
604   return true;
605 }
606 
607 /// This is not used by any of the opcodes directly. It's used by
608 /// EvalEmitter to do the final lvalue-to-rvalue conversion.
609 bool CheckFinalLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
610   if (!CheckLive(S, OpPC, Ptr, AK_Read))
611     return false;
612   if (!CheckConstant(S, OpPC, Ptr))
613     return false;
614 
615   if (!CheckDummy(S, OpPC, Ptr, AK_Read))
616     return false;
617   if (!CheckExtern(S, OpPC, Ptr))
618     return false;
619   if (!CheckRange(S, OpPC, Ptr, AK_Read))
620     return false;
621   if (!CheckActive(S, OpPC, Ptr, AK_Read))
622     return false;
623   if (!CheckInitialized(S, OpPC, Ptr, AK_Read))
624     return false;
625   if (!CheckTemporary(S, OpPC, Ptr, AK_Read))
626     return false;
627   if (!CheckWeak(S, OpPC, Ptr))
628     return false;
629   if (!CheckMutable(S, OpPC, Ptr))
630     return false;
631   return true;
632 }
633 
634 bool CheckStore(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
635   if (!CheckLive(S, OpPC, Ptr, AK_Assign))
636     return false;
637   if (!CheckDummy(S, OpPC, Ptr, AK_Assign))
638     return false;
639   if (!CheckExtern(S, OpPC, Ptr))
640     return false;
641   if (!CheckRange(S, OpPC, Ptr, AK_Assign))
642     return false;
643   if (!CheckGlobal(S, OpPC, Ptr))
644     return false;
645   if (!CheckConst(S, OpPC, Ptr))
646     return false;
647   return true;
648 }
649 
650 bool CheckInvoke(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
651   if (!CheckLive(S, OpPC, Ptr, AK_MemberCall))
652     return false;
653   if (!Ptr.isDummy()) {
654     if (!CheckExtern(S, OpPC, Ptr))
655       return false;
656     if (!CheckRange(S, OpPC, Ptr, AK_MemberCall))
657       return false;
658   }
659   return true;
660 }
661 
662 bool CheckInit(InterpState &S, CodePtr OpPC, const Pointer &Ptr) {
663   if (!CheckLive(S, OpPC, Ptr, AK_Assign))
664     return false;
665   if (!CheckRange(S, OpPC, Ptr, AK_Assign))
666     return false;
667   return true;
668 }
669 
670 bool CheckCallable(InterpState &S, CodePtr OpPC, const Function *F) {
671 
672   if (F->isVirtual() && !S.getLangOpts().CPlusPlus20) {
673     const SourceLocation &Loc = S.Current->getLocation(OpPC);
674     S.CCEDiag(Loc, diag::note_constexpr_virtual_call);
675     return false;
676   }
677 
678   if (F->isConstexpr() && F->hasBody() &&
679       (F->getDecl()->isConstexpr() || F->getDecl()->hasAttr<MSConstexprAttr>()))
680     return true;
681 
682   // Implicitly constexpr.
683   if (F->isLambdaStaticInvoker())
684     return true;
685 
686   const SourceLocation &Loc = S.Current->getLocation(OpPC);
687   if (S.getLangOpts().CPlusPlus11) {
688     const FunctionDecl *DiagDecl = F->getDecl();
689 
690     // Invalid decls have been diagnosed before.
691     if (DiagDecl->isInvalidDecl())
692       return false;
693 
694     // If this function is not constexpr because it is an inherited
695     // non-constexpr constructor, diagnose that directly.
696     const auto *CD = dyn_cast<CXXConstructorDecl>(DiagDecl);
697     if (CD && CD->isInheritingConstructor()) {
698       const auto *Inherited = CD->getInheritedConstructor().getConstructor();
699       if (!Inherited->isConstexpr())
700         DiagDecl = CD = Inherited;
701     }
702 
703     // FIXME: If DiagDecl is an implicitly-declared special member function
704     // or an inheriting constructor, we should be much more explicit about why
705     // it's not constexpr.
706     if (CD && CD->isInheritingConstructor()) {
707       S.FFDiag(Loc, diag::note_constexpr_invalid_inhctor, 1)
708           << CD->getInheritedConstructor().getConstructor()->getParent();
709       S.Note(DiagDecl->getLocation(), diag::note_declared_at);
710     } else {
711       // Don't emit anything if the function isn't defined and we're checking
712       // for a constant expression. It might be defined at the point we're
713       // actually calling it.
714       bool IsExtern = DiagDecl->getStorageClass() == SC_Extern;
715       if (!DiagDecl->isDefined() && !IsExtern && DiagDecl->isConstexpr() &&
716           S.checkingPotentialConstantExpression())
717         return false;
718 
719       // If the declaration is defined, declared 'constexpr' _and_ has a body,
720       // the below diagnostic doesn't add anything useful.
721       if (DiagDecl->isDefined() && DiagDecl->isConstexpr() &&
722           DiagDecl->hasBody())
723         return false;
724 
725       S.FFDiag(Loc, diag::note_constexpr_invalid_function, 1)
726           << DiagDecl->isConstexpr() << (bool)CD << DiagDecl;
727 
728       if (DiagDecl->getDefinition())
729         S.Note(DiagDecl->getDefinition()->getLocation(),
730                diag::note_declared_at);
731       else
732         S.Note(DiagDecl->getLocation(), diag::note_declared_at);
733     }
734   } else {
735     S.FFDiag(Loc, diag::note_invalid_subexpr_in_const_expr);
736   }
737 
738   return false;
739 }
740 
741 bool CheckCallDepth(InterpState &S, CodePtr OpPC) {
742   if ((S.Current->getDepth() + 1) > S.getLangOpts().ConstexprCallDepth) {
743     S.FFDiag(S.Current->getSource(OpPC),
744              diag::note_constexpr_depth_limit_exceeded)
745         << S.getLangOpts().ConstexprCallDepth;
746     return false;
747   }
748 
749   return true;
750 }
751 
752 bool CheckThis(InterpState &S, CodePtr OpPC, const Pointer &This) {
753   if (!This.isZero())
754     return true;
755 
756   const SourceInfo &Loc = S.Current->getSource(OpPC);
757 
758   bool IsImplicit = false;
759   if (const auto *E = dyn_cast_if_present<CXXThisExpr>(Loc.asExpr()))
760     IsImplicit = E->isImplicit();
761 
762   if (S.getLangOpts().CPlusPlus11)
763     S.FFDiag(Loc, diag::note_constexpr_this) << IsImplicit;
764   else
765     S.FFDiag(Loc);
766 
767   return false;
768 }
769 
770 bool CheckPure(InterpState &S, CodePtr OpPC, const CXXMethodDecl *MD) {
771   if (!MD->isPureVirtual())
772     return true;
773   const SourceInfo &E = S.Current->getSource(OpPC);
774   S.FFDiag(E, diag::note_constexpr_pure_virtual_call, 1) << MD;
775   S.Note(MD->getLocation(), diag::note_declared_at);
776   return false;
777 }
778 
779 bool CheckFloatResult(InterpState &S, CodePtr OpPC, const Floating &Result,
780                       APFloat::opStatus Status, FPOptions FPO) {
781   // [expr.pre]p4:
782   //   If during the evaluation of an expression, the result is not
783   //   mathematically defined [...], the behavior is undefined.
784   // FIXME: C++ rules require us to not conform to IEEE 754 here.
785   if (Result.isNan()) {
786     const SourceInfo &E = S.Current->getSource(OpPC);
787     S.CCEDiag(E, diag::note_constexpr_float_arithmetic)
788         << /*NaN=*/true << S.Current->getRange(OpPC);
789     return S.noteUndefinedBehavior();
790   }
791 
792   // In a constant context, assume that any dynamic rounding mode or FP
793   // exception state matches the default floating-point environment.
794   if (S.inConstantContext())
795     return true;
796 
797   if ((Status & APFloat::opInexact) &&
798       FPO.getRoundingMode() == llvm::RoundingMode::Dynamic) {
799     // Inexact result means that it depends on rounding mode. If the requested
800     // mode is dynamic, the evaluation cannot be made in compile time.
801     const SourceInfo &E = S.Current->getSource(OpPC);
802     S.FFDiag(E, diag::note_constexpr_dynamic_rounding);
803     return false;
804   }
805 
806   if ((Status != APFloat::opOK) &&
807       (FPO.getRoundingMode() == llvm::RoundingMode::Dynamic ||
808        FPO.getExceptionMode() != LangOptions::FPE_Ignore ||
809        FPO.getAllowFEnvAccess())) {
810     const SourceInfo &E = S.Current->getSource(OpPC);
811     S.FFDiag(E, diag::note_constexpr_float_arithmetic_strict);
812     return false;
813   }
814 
815   if ((Status & APFloat::opStatus::opInvalidOp) &&
816       FPO.getExceptionMode() != LangOptions::FPE_Ignore) {
817     const SourceInfo &E = S.Current->getSource(OpPC);
818     // There is no usefully definable result.
819     S.FFDiag(E);
820     return false;
821   }
822 
823   return true;
824 }
825 
826 bool CheckDynamicMemoryAllocation(InterpState &S, CodePtr OpPC) {
827   if (S.getLangOpts().CPlusPlus20)
828     return true;
829 
830   const SourceInfo &E = S.Current->getSource(OpPC);
831   S.CCEDiag(E, diag::note_constexpr_new);
832   return true;
833 }
834 
835 bool CheckNewDeleteForms(InterpState &S, CodePtr OpPC,
836                          DynamicAllocator::Form AllocForm,
837                          DynamicAllocator::Form DeleteForm, const Descriptor *D,
838                          const Expr *NewExpr) {
839   if (AllocForm == DeleteForm)
840     return true;
841 
842   QualType TypeToDiagnose;
843   // We need to shuffle things around a bit here to get a better diagnostic,
844   // because the expression we allocated the block for was of type int*,
845   // but we want to get the array size right.
846   if (D->isArray()) {
847     QualType ElemQT = D->getType()->getPointeeType();
848     TypeToDiagnose = S.getASTContext().getConstantArrayType(
849         ElemQT, APInt(64, static_cast<uint64_t>(D->getNumElems()), false),
850         nullptr, ArraySizeModifier::Normal, 0);
851   } else
852     TypeToDiagnose = D->getType()->getPointeeType();
853 
854   const SourceInfo &E = S.Current->getSource(OpPC);
855   S.FFDiag(E, diag::note_constexpr_new_delete_mismatch)
856       << static_cast<int>(DeleteForm) << static_cast<int>(AllocForm)
857       << TypeToDiagnose;
858   S.Note(NewExpr->getExprLoc(), diag::note_constexpr_dynamic_alloc_here)
859       << NewExpr->getSourceRange();
860   return false;
861 }
862 
863 bool CheckDeleteSource(InterpState &S, CodePtr OpPC, const Expr *Source,
864                        const Pointer &Ptr) {
865   // The two sources we currently allow are new expressions and
866   // __builtin_operator_new calls.
867   if (isa_and_nonnull<CXXNewExpr>(Source))
868     return true;
869   if (const CallExpr *CE = dyn_cast_if_present<CallExpr>(Source);
870       CE && CE->getBuiltinCallee() == Builtin::BI__builtin_operator_new)
871     return true;
872 
873   // Whatever this is, we didn't heap allocate it.
874   const SourceInfo &Loc = S.Current->getSource(OpPC);
875   S.FFDiag(Loc, diag::note_constexpr_delete_not_heap_alloc)
876       << Ptr.toDiagnosticString(S.getASTContext());
877 
878   if (Ptr.isTemporary())
879     S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here);
880   else
881     S.Note(Ptr.getDeclLoc(), diag::note_declared_at);
882   return false;
883 }
884 
885 /// We aleady know the given DeclRefExpr is invalid for some reason,
886 /// now figure out why and print appropriate diagnostics.
887 bool CheckDeclRef(InterpState &S, CodePtr OpPC, const DeclRefExpr *DR) {
888   const ValueDecl *D = DR->getDecl();
889   return diagnoseUnknownDecl(S, OpPC, D);
890 }
891 
892 bool CheckDummy(InterpState &S, CodePtr OpPC, const Pointer &Ptr,
893                 AccessKinds AK) {
894   if (!Ptr.isDummy())
895     return true;
896 
897   const Descriptor *Desc = Ptr.getDeclDesc();
898   const ValueDecl *D = Desc->asValueDecl();
899   if (!D)
900     return false;
901 
902   if (AK == AK_Read || AK == AK_Increment || AK == AK_Decrement)
903     return diagnoseUnknownDecl(S, OpPC, D);
904 
905   assert(AK == AK_Assign);
906   if (S.getLangOpts().CPlusPlus14) {
907     const SourceInfo &E = S.Current->getSource(OpPC);
908     S.FFDiag(E, diag::note_constexpr_modify_global);
909   }
910   return false;
911 }
912 
913 bool CheckNonNullArgs(InterpState &S, CodePtr OpPC, const Function *F,
914                       const CallExpr *CE, unsigned ArgSize) {
915   auto Args = llvm::ArrayRef(CE->getArgs(), CE->getNumArgs());
916   auto NonNullArgs = collectNonNullArgs(F->getDecl(), Args);
917   unsigned Offset = 0;
918   unsigned Index = 0;
919   for (const Expr *Arg : Args) {
920     if (NonNullArgs[Index] && Arg->getType()->isPointerType()) {
921       const Pointer &ArgPtr = S.Stk.peek<Pointer>(ArgSize - Offset);
922       if (ArgPtr.isZero()) {
923         const SourceLocation &Loc = S.Current->getLocation(OpPC);
924         S.CCEDiag(Loc, diag::note_non_null_attribute_failed);
925         return false;
926       }
927     }
928 
929     Offset += align(primSize(S.Ctx.classify(Arg).value_or(PT_Ptr)));
930     ++Index;
931   }
932   return true;
933 }
934 
935 // FIXME: This is similar to code we already have in Compiler.cpp.
936 // I think it makes sense to instead add the field and base destruction stuff
937 // to the destructor Function itself. Then destroying a record would really
938 // _just_ be calling its destructor. That would also help with the diagnostic
939 // difference when the destructor or a field/base fails.
940 static bool runRecordDestructor(InterpState &S, CodePtr OpPC,
941                                 const Pointer &BasePtr,
942                                 const Descriptor *Desc) {
943   assert(Desc->isRecord());
944   const Record *R = Desc->ElemRecord;
945   assert(R);
946 
947   if (Pointer::pointToSameBlock(BasePtr, S.Current->getThis())) {
948     const SourceInfo &Loc = S.Current->getSource(OpPC);
949     S.FFDiag(Loc, diag::note_constexpr_double_destroy);
950     return false;
951   }
952 
953   // Destructor of this record.
954   if (const CXXDestructorDecl *Dtor = R->getDestructor();
955       Dtor && !Dtor->isTrivial()) {
956     const Function *DtorFunc = S.getContext().getOrCreateFunction(Dtor);
957     if (!DtorFunc)
958       return false;
959 
960     S.Stk.push<Pointer>(BasePtr);
961     if (!Call(S, OpPC, DtorFunc, 0))
962       return false;
963   }
964   return true;
965 }
966 
967 bool RunDestructors(InterpState &S, CodePtr OpPC, const Block *B) {
968   assert(B);
969   const Descriptor *Desc = B->getDescriptor();
970 
971   if (Desc->isPrimitive() || Desc->isPrimitiveArray())
972     return true;
973 
974   assert(Desc->isRecord() || Desc->isCompositeArray());
975 
976   if (Desc->isCompositeArray()) {
977     const Descriptor *ElemDesc = Desc->ElemDesc;
978     assert(ElemDesc->isRecord());
979 
980     Pointer RP(const_cast<Block *>(B));
981     for (unsigned I = 0; I != Desc->getNumElems(); ++I) {
982       if (!runRecordDestructor(S, OpPC, RP.atIndex(I).narrow(), ElemDesc))
983         return false;
984     }
985     return true;
986   }
987 
988   assert(Desc->isRecord());
989   return runRecordDestructor(S, OpPC, Pointer(const_cast<Block *>(B)), Desc);
990 }
991 
992 void diagnoseEnumValue(InterpState &S, CodePtr OpPC, const EnumDecl *ED,
993                        const APSInt &Value) {
994   llvm::APInt Min;
995   llvm::APInt Max;
996 
997   if (S.EvaluatingDecl && !S.EvaluatingDecl->isConstexpr())
998     return;
999 
1000   ED->getValueRange(Max, Min);
1001   --Max;
1002 
1003   if (ED->getNumNegativeBits() &&
1004       (Max.slt(Value.getSExtValue()) || Min.sgt(Value.getSExtValue()))) {
1005     const SourceLocation &Loc = S.Current->getLocation(OpPC);
1006     S.CCEDiag(Loc, diag::note_constexpr_unscoped_enum_out_of_range)
1007         << llvm::toString(Value, 10) << Min.getSExtValue() << Max.getSExtValue()
1008         << ED;
1009   } else if (!ED->getNumNegativeBits() && Max.ult(Value.getZExtValue())) {
1010     const SourceLocation &Loc = S.Current->getLocation(OpPC);
1011     S.CCEDiag(Loc, diag::note_constexpr_unscoped_enum_out_of_range)
1012         << llvm::toString(Value, 10) << Min.getZExtValue() << Max.getZExtValue()
1013         << ED;
1014   }
1015 }
1016 
1017 bool CheckLiteralType(InterpState &S, CodePtr OpPC, const Type *T) {
1018   assert(T);
1019   assert(!S.getLangOpts().CPlusPlus23);
1020 
1021   // C++1y: A constant initializer for an object o [...] may also invoke
1022   // constexpr constructors for o and its subobjects even if those objects
1023   // are of non-literal class types.
1024   //
1025   // C++11 missed this detail for aggregates, so classes like this:
1026   //   struct foo_t { union { int i; volatile int j; } u; };
1027   // are not (obviously) initializable like so:
1028   //   __attribute__((__require_constant_initialization__))
1029   //   static const foo_t x = {{0}};
1030   // because "i" is a subobject with non-literal initialization (due to the
1031   // volatile member of the union). See:
1032   //   http://www.open-std.org/jtc1/sc22/wg21/docs/cwg_active.html#1677
1033   // Therefore, we use the C++1y behavior.
1034 
1035   if (S.Current->getFunction() && S.Current->getFunction()->isConstructor() &&
1036       S.Current->getThis().getDeclDesc()->asDecl() == S.EvaluatingDecl) {
1037     return true;
1038   }
1039 
1040   const Expr *E = S.Current->getExpr(OpPC);
1041   if (S.getLangOpts().CPlusPlus11)
1042     S.FFDiag(E, diag::note_constexpr_nonliteral) << E->getType();
1043   else
1044     S.FFDiag(E, diag::note_invalid_subexpr_in_const_expr);
1045   return false;
1046 }
1047 
1048 static bool checkConstructor(InterpState &S, CodePtr OpPC, const Function *Func,
1049                              const Pointer &ThisPtr) {
1050   assert(Func->isConstructor());
1051 
1052   const Descriptor *D = ThisPtr.getFieldDesc();
1053 
1054   // FIXME: I think this case is not 100% correct. E.g. a pointer into a
1055   // subobject of a composite array.
1056   if (!D->ElemRecord)
1057     return true;
1058 
1059   if (D->ElemRecord->getNumVirtualBases() == 0)
1060     return true;
1061 
1062   S.FFDiag(S.Current->getLocation(OpPC), diag::note_constexpr_virtual_base)
1063       << Func->getParentDecl();
1064   return false;
1065 }
1066 
1067 bool CallVar(InterpState &S, CodePtr OpPC, const Function *Func,
1068              uint32_t VarArgSize) {
1069   if (Func->hasThisPointer()) {
1070     size_t ArgSize = Func->getArgSize() + VarArgSize;
1071     size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0);
1072     const Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset);
1073 
1074     // If the current function is a lambda static invoker and
1075     // the function we're about to call is a lambda call operator,
1076     // skip the CheckInvoke, since the ThisPtr is a null pointer
1077     // anyway.
1078     if (!(S.Current->getFunction() &&
1079           S.Current->getFunction()->isLambdaStaticInvoker() &&
1080           Func->isLambdaCallOperator())) {
1081       if (!CheckInvoke(S, OpPC, ThisPtr))
1082         return false;
1083     }
1084 
1085     if (S.checkingPotentialConstantExpression())
1086       return false;
1087   }
1088 
1089   if (!CheckCallable(S, OpPC, Func))
1090     return false;
1091 
1092   if (!CheckCallDepth(S, OpPC))
1093     return false;
1094 
1095   auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC, VarArgSize);
1096   InterpFrame *FrameBefore = S.Current;
1097   S.Current = NewFrame.get();
1098 
1099   APValue CallResult;
1100   // Note that we cannot assert(CallResult.hasValue()) here since
1101   // Ret() above only sets the APValue if the curent frame doesn't
1102   // have a caller set.
1103   if (Interpret(S, CallResult)) {
1104     NewFrame.release(); // Frame was delete'd already.
1105     assert(S.Current == FrameBefore);
1106     return true;
1107   }
1108 
1109   // Interpreting the function failed somehow. Reset to
1110   // previous state.
1111   S.Current = FrameBefore;
1112   return false;
1113 }
1114 
1115 bool Call(InterpState &S, CodePtr OpPC, const Function *Func,
1116           uint32_t VarArgSize) {
1117   assert(Func);
1118   auto cleanup = [&]() -> bool {
1119     cleanupAfterFunctionCall(S, OpPC, Func);
1120     return false;
1121   };
1122 
1123   if (Func->hasThisPointer()) {
1124     size_t ArgSize = Func->getArgSize() + VarArgSize;
1125     size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0);
1126 
1127     const Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset);
1128 
1129     // If the current function is a lambda static invoker and
1130     // the function we're about to call is a lambda call operator,
1131     // skip the CheckInvoke, since the ThisPtr is a null pointer
1132     // anyway.
1133     if (S.Current->getFunction() &&
1134         S.Current->getFunction()->isLambdaStaticInvoker() &&
1135         Func->isLambdaCallOperator()) {
1136       assert(ThisPtr.isZero());
1137     } else {
1138       if (!CheckInvoke(S, OpPC, ThisPtr))
1139         return cleanup();
1140     }
1141 
1142     if (Func->isConstructor() && !checkConstructor(S, OpPC, Func, ThisPtr))
1143       return false;
1144   }
1145 
1146   if (!CheckCallable(S, OpPC, Func))
1147     return cleanup();
1148 
1149   // FIXME: The isConstructor() check here is not always right. The current
1150   // constant evaluator is somewhat inconsistent in when it allows a function
1151   // call when checking for a constant expression.
1152   if (Func->hasThisPointer() && S.checkingPotentialConstantExpression() &&
1153       !Func->isConstructor())
1154     return cleanup();
1155 
1156   if (!CheckCallDepth(S, OpPC))
1157     return cleanup();
1158 
1159   auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC, VarArgSize);
1160   InterpFrame *FrameBefore = S.Current;
1161   S.Current = NewFrame.get();
1162 
1163   InterpStateCCOverride CCOverride(S, Func->getDecl()->isImmediateFunction());
1164   APValue CallResult;
1165   // Note that we cannot assert(CallResult.hasValue()) here since
1166   // Ret() above only sets the APValue if the curent frame doesn't
1167   // have a caller set.
1168   if (Interpret(S, CallResult)) {
1169     NewFrame.release(); // Frame was delete'd already.
1170     assert(S.Current == FrameBefore);
1171     return true;
1172   }
1173 
1174   // Interpreting the function failed somehow. Reset to
1175   // previous state.
1176   S.Current = FrameBefore;
1177   return false;
1178 }
1179 
1180 bool CallVirt(InterpState &S, CodePtr OpPC, const Function *Func,
1181               uint32_t VarArgSize) {
1182   assert(Func->hasThisPointer());
1183   assert(Func->isVirtual());
1184   size_t ArgSize = Func->getArgSize() + VarArgSize;
1185   size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0);
1186   Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset);
1187 
1188   const CXXRecordDecl *DynamicDecl = nullptr;
1189   {
1190     Pointer TypePtr = ThisPtr;
1191     while (TypePtr.isBaseClass())
1192       TypePtr = TypePtr.getBase();
1193 
1194     QualType DynamicType = TypePtr.getType();
1195     if (DynamicType->isPointerType() || DynamicType->isReferenceType())
1196       DynamicDecl = DynamicType->getPointeeCXXRecordDecl();
1197     else
1198       DynamicDecl = DynamicType->getAsCXXRecordDecl();
1199   }
1200   assert(DynamicDecl);
1201 
1202   const auto *StaticDecl = cast<CXXRecordDecl>(Func->getParentDecl());
1203   const auto *InitialFunction = cast<CXXMethodDecl>(Func->getDecl());
1204   const CXXMethodDecl *Overrider = S.getContext().getOverridingFunction(
1205       DynamicDecl, StaticDecl, InitialFunction);
1206 
1207   if (Overrider != InitialFunction) {
1208     // DR1872: An instantiated virtual constexpr function can't be called in a
1209     // constant expression (prior to C++20). We can still constant-fold such a
1210     // call.
1211     if (!S.getLangOpts().CPlusPlus20 && Overrider->isVirtual()) {
1212       const Expr *E = S.Current->getExpr(OpPC);
1213       S.CCEDiag(E, diag::note_constexpr_virtual_call) << E->getSourceRange();
1214     }
1215 
1216     Func = S.getContext().getOrCreateFunction(Overrider);
1217 
1218     const CXXRecordDecl *ThisFieldDecl =
1219         ThisPtr.getFieldDesc()->getType()->getAsCXXRecordDecl();
1220     if (Func->getParentDecl()->isDerivedFrom(ThisFieldDecl)) {
1221       // If the function we call is further DOWN the hierarchy than the
1222       // FieldDesc of our pointer, just go up the hierarchy of this field
1223       // the furthest we can go.
1224       while (ThisPtr.isBaseClass())
1225         ThisPtr = ThisPtr.getBase();
1226     }
1227   }
1228 
1229   if (!Call(S, OpPC, Func, VarArgSize))
1230     return false;
1231 
1232   // Covariant return types. The return type of Overrider is a pointer
1233   // or reference to a class type.
1234   if (Overrider != InitialFunction &&
1235       Overrider->getReturnType()->isPointerOrReferenceType() &&
1236       InitialFunction->getReturnType()->isPointerOrReferenceType()) {
1237     QualType OverriderPointeeType =
1238         Overrider->getReturnType()->getPointeeType();
1239     QualType InitialPointeeType =
1240         InitialFunction->getReturnType()->getPointeeType();
1241     // We've called Overrider above, but calling code expects us to return what
1242     // InitialFunction returned. According to the rules for covariant return
1243     // types, what InitialFunction returns needs to be a base class of what
1244     // Overrider returns. So, we need to do an upcast here.
1245     unsigned Offset = S.getContext().collectBaseOffset(
1246         InitialPointeeType->getAsRecordDecl(),
1247         OverriderPointeeType->getAsRecordDecl());
1248     return GetPtrBasePop(S, OpPC, Offset);
1249   }
1250 
1251   return true;
1252 }
1253 
1254 bool CallBI(InterpState &S, CodePtr OpPC, const Function *Func,
1255             const CallExpr *CE, uint32_t BuiltinID) {
1256   if (S.checkingPotentialConstantExpression())
1257     return false;
1258   auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC);
1259 
1260   InterpFrame *FrameBefore = S.Current;
1261   S.Current = NewFrame.get();
1262 
1263   if (InterpretBuiltin(S, OpPC, Func, CE, BuiltinID)) {
1264     NewFrame.release();
1265     return true;
1266   }
1267   S.Current = FrameBefore;
1268   return false;
1269 }
1270 
1271 bool CallPtr(InterpState &S, CodePtr OpPC, uint32_t ArgSize,
1272              const CallExpr *CE) {
1273   const FunctionPointer &FuncPtr = S.Stk.pop<FunctionPointer>();
1274 
1275   const Function *F = FuncPtr.getFunction();
1276   if (!F) {
1277     const auto *E = cast<CallExpr>(S.Current->getExpr(OpPC));
1278     S.FFDiag(E, diag::note_constexpr_null_callee)
1279         << const_cast<Expr *>(E->getCallee()) << E->getSourceRange();
1280     return false;
1281   }
1282 
1283   if (!FuncPtr.isValid() || !F->getDecl())
1284     return Invalid(S, OpPC);
1285 
1286   assert(F);
1287 
1288   // This happens when the call expression has been cast to
1289   // something else, but we don't support that.
1290   if (S.Ctx.classify(F->getDecl()->getReturnType()) !=
1291       S.Ctx.classify(CE->getType()))
1292     return false;
1293 
1294   // Check argument nullability state.
1295   if (F->hasNonNullAttr()) {
1296     if (!CheckNonNullArgs(S, OpPC, F, CE, ArgSize))
1297       return false;
1298   }
1299 
1300   assert(ArgSize >= F->getWrittenArgSize());
1301   uint32_t VarArgSize = ArgSize - F->getWrittenArgSize();
1302 
1303   // We need to do this explicitly here since we don't have the necessary
1304   // information to do it automatically.
1305   if (F->isThisPointerExplicit())
1306     VarArgSize -= align(primSize(PT_Ptr));
1307 
1308   if (F->isVirtual())
1309     return CallVirt(S, OpPC, F, VarArgSize);
1310 
1311   return Call(S, OpPC, F, VarArgSize);
1312 }
1313 
1314 bool CheckNewTypeMismatch(InterpState &S, CodePtr OpPC, const Expr *E,
1315                           std::optional<uint64_t> ArraySize) {
1316   const Pointer &Ptr = S.Stk.peek<Pointer>();
1317 
1318   if (!CheckStore(S, OpPC, Ptr))
1319     return false;
1320 
1321   if (!InvalidNewDeleteExpr(S, OpPC, E))
1322     return false;
1323 
1324   const auto *NewExpr = cast<CXXNewExpr>(E);
1325   QualType StorageType = Ptr.getType();
1326 
1327   if (isa_and_nonnull<CXXNewExpr>(Ptr.getFieldDesc()->asExpr()) &&
1328       StorageType->isPointerType()) {
1329     // FIXME: Are there other cases where this is a problem?
1330     StorageType = StorageType->getPointeeType();
1331   }
1332 
1333   const ASTContext &ASTCtx = S.getASTContext();
1334   QualType AllocType;
1335   if (ArraySize) {
1336     AllocType = ASTCtx.getConstantArrayType(
1337         NewExpr->getAllocatedType(),
1338         APInt(64, static_cast<uint64_t>(*ArraySize), false), nullptr,
1339         ArraySizeModifier::Normal, 0);
1340   } else {
1341     AllocType = NewExpr->getAllocatedType();
1342   }
1343 
1344   unsigned StorageSize = 1;
1345   unsigned AllocSize = 1;
1346   if (const auto *CAT = dyn_cast<ConstantArrayType>(AllocType))
1347     AllocSize = CAT->getZExtSize();
1348   if (const auto *CAT = dyn_cast<ConstantArrayType>(StorageType))
1349     StorageSize = CAT->getZExtSize();
1350 
1351   if (AllocSize > StorageSize ||
1352       !ASTCtx.hasSimilarType(ASTCtx.getBaseElementType(AllocType),
1353                              ASTCtx.getBaseElementType(StorageType))) {
1354     S.FFDiag(S.Current->getLocation(OpPC),
1355              diag::note_constexpr_placement_new_wrong_type)
1356         << StorageType << AllocType;
1357     return false;
1358   }
1359   return true;
1360 }
1361 
1362 bool InvalidNewDeleteExpr(InterpState &S, CodePtr OpPC, const Expr *E) {
1363   assert(E);
1364   const auto &Loc = S.Current->getSource(OpPC);
1365 
1366   if (S.getLangOpts().CPlusPlus26)
1367     return true;
1368 
1369   if (const auto *NewExpr = dyn_cast<CXXNewExpr>(E)) {
1370     const FunctionDecl *OperatorNew = NewExpr->getOperatorNew();
1371 
1372     if (!S.getLangOpts().CPlusPlus26 && NewExpr->getNumPlacementArgs() > 0) {
1373       // This is allowed pre-C++26, but only an std function.
1374       if (S.Current->isStdFunction())
1375         return true;
1376       S.FFDiag(Loc, diag::note_constexpr_new_placement)
1377           << /*C++26 feature*/ 1 << E->getSourceRange();
1378     } else if (NewExpr->getNumPlacementArgs() == 1 &&
1379                !OperatorNew->isReservedGlobalPlacementOperator()) {
1380       S.FFDiag(Loc, diag::note_constexpr_new_placement)
1381           << /*Unsupported*/ 0 << E->getSourceRange();
1382     } else if (!OperatorNew->isReplaceableGlobalAllocationFunction()) {
1383       S.FFDiag(Loc, diag::note_constexpr_new_non_replaceable)
1384           << isa<CXXMethodDecl>(OperatorNew) << OperatorNew;
1385     }
1386   } else {
1387     const auto *DeleteExpr = cast<CXXDeleteExpr>(E);
1388     const FunctionDecl *OperatorDelete = DeleteExpr->getOperatorDelete();
1389     if (!OperatorDelete->isReplaceableGlobalAllocationFunction()) {
1390       S.FFDiag(Loc, diag::note_constexpr_new_non_replaceable)
1391           << isa<CXXMethodDecl>(OperatorDelete) << OperatorDelete;
1392     }
1393   }
1394 
1395   return false;
1396 }
1397 
1398 bool handleFixedPointOverflow(InterpState &S, CodePtr OpPC,
1399                               const FixedPoint &FP) {
1400   const Expr *E = S.Current->getExpr(OpPC);
1401   if (S.checkingForUndefinedBehavior()) {
1402     S.getASTContext().getDiagnostics().Report(
1403         E->getExprLoc(), diag::warn_fixedpoint_constant_overflow)
1404         << FP.toDiagnosticString(S.getASTContext()) << E->getType();
1405   }
1406   S.CCEDiag(E, diag::note_constexpr_overflow)
1407       << FP.toDiagnosticString(S.getASTContext()) << E->getType();
1408   return S.noteUndefinedBehavior();
1409 }
1410 
1411 bool InvalidShuffleVectorIndex(InterpState &S, CodePtr OpPC, uint32_t Index) {
1412   const SourceInfo &Loc = S.Current->getSource(OpPC);
1413   S.FFDiag(Loc,
1414            diag::err_shufflevector_minus_one_is_undefined_behavior_constexpr)
1415       << Index;
1416   return false;
1417 }
1418 
1419 bool CheckPointerToIntegralCast(InterpState &S, CodePtr OpPC,
1420                                 const Pointer &Ptr, unsigned BitWidth) {
1421   if (Ptr.isDummy())
1422     return false;
1423 
1424   const SourceInfo &E = S.Current->getSource(OpPC);
1425   S.CCEDiag(E, diag::note_constexpr_invalid_cast)
1426       << 2 << S.getLangOpts().CPlusPlus << S.Current->getRange(OpPC);
1427 
1428   if (Ptr.isBlockPointer() && !Ptr.isZero()) {
1429     // Only allow based lvalue casts if they are lossless.
1430     if (S.getASTContext().getTargetInfo().getPointerWidth(LangAS::Default) !=
1431         BitWidth)
1432       return Invalid(S, OpPC);
1433   }
1434   return true;
1435 }
1436 
1437 bool CastPointerIntegralAP(InterpState &S, CodePtr OpPC, uint32_t BitWidth) {
1438   const Pointer &Ptr = S.Stk.pop<Pointer>();
1439 
1440   if (!CheckPointerToIntegralCast(S, OpPC, Ptr, BitWidth))
1441     return false;
1442 
1443   S.Stk.push<IntegralAP<false>>(
1444       IntegralAP<false>::from(Ptr.getIntegerRepresentation(), BitWidth));
1445   return true;
1446 }
1447 
1448 bool CastPointerIntegralAPS(InterpState &S, CodePtr OpPC, uint32_t BitWidth) {
1449   const Pointer &Ptr = S.Stk.pop<Pointer>();
1450 
1451   if (!CheckPointerToIntegralCast(S, OpPC, Ptr, BitWidth))
1452     return false;
1453 
1454   S.Stk.push<IntegralAP<true>>(
1455       IntegralAP<true>::from(Ptr.getIntegerRepresentation(), BitWidth));
1456   return true;
1457 }
1458 
1459 // https://github.com/llvm/llvm-project/issues/102513
1460 #if defined(_WIN32) && !defined(__clang__) && !defined(NDEBUG)
1461 #pragma optimize("", off)
1462 #endif
1463 bool Interpret(InterpState &S, APValue &Result) {
1464   // The current stack frame when we started Interpret().
1465   // This is being used by the ops to determine wheter
1466   // to return from this function and thus terminate
1467   // interpretation.
1468   const InterpFrame *StartFrame = S.Current;
1469   assert(!S.Current->isRoot());
1470   CodePtr PC = S.Current->getPC();
1471 
1472   // Empty program.
1473   if (!PC)
1474     return true;
1475 
1476   for (;;) {
1477     auto Op = PC.read<Opcode>();
1478     CodePtr OpPC = PC;
1479 
1480     switch (Op) {
1481 #define GET_INTERP
1482 #include "Opcodes.inc"
1483 #undef GET_INTERP
1484     }
1485   }
1486 }
1487 // https://github.com/llvm/llvm-project/issues/102513
1488 #if defined(_WIN32) && !defined(__clang__) && !defined(NDEBUG)
1489 #pragma optimize("", on)
1490 #endif
1491 
1492 } // namespace interp
1493 } // namespace clang
1494