1 //===------- Interp.cpp - Interpreter for the constexpr VM ------*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 9 #include "Interp.h" 10 #include "Function.h" 11 #include "InterpFrame.h" 12 #include "InterpShared.h" 13 #include "InterpStack.h" 14 #include "Opcode.h" 15 #include "PrimType.h" 16 #include "Program.h" 17 #include "State.h" 18 #include "clang/AST/ASTContext.h" 19 #include "clang/AST/ASTDiagnostic.h" 20 #include "clang/AST/CXXInheritance.h" 21 #include "clang/AST/DeclObjC.h" 22 #include "clang/AST/Expr.h" 23 #include "clang/AST/ExprCXX.h" 24 #include "llvm/ADT/APSInt.h" 25 #include "llvm/ADT/StringExtras.h" 26 #include <limits> 27 #include <vector> 28 29 using namespace clang; 30 using namespace clang::interp; 31 32 static bool RetValue(InterpState &S, CodePtr &Pt, APValue &Result) { 33 llvm::report_fatal_error("Interpreter cannot return values"); 34 } 35 36 //===----------------------------------------------------------------------===// 37 // Jmp, Jt, Jf 38 //===----------------------------------------------------------------------===// 39 40 static bool Jmp(InterpState &S, CodePtr &PC, int32_t Offset) { 41 PC += Offset; 42 return true; 43 } 44 45 static bool Jt(InterpState &S, CodePtr &PC, int32_t Offset) { 46 if (S.Stk.pop<bool>()) { 47 PC += Offset; 48 } 49 return true; 50 } 51 52 static bool Jf(InterpState &S, CodePtr &PC, int32_t Offset) { 53 if (!S.Stk.pop<bool>()) { 54 PC += Offset; 55 } 56 return true; 57 } 58 59 static void diagnoseMissingInitializer(InterpState &S, CodePtr OpPC, 60 const ValueDecl *VD) { 61 const SourceInfo &E = S.Current->getSource(OpPC); 62 S.FFDiag(E, diag::note_constexpr_var_init_unknown, 1) << VD; 63 S.Note(VD->getLocation(), diag::note_declared_at) << VD->getSourceRange(); 64 } 65 66 static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC, 67 const ValueDecl *VD); 68 static bool diagnoseUnknownDecl(InterpState &S, CodePtr OpPC, 69 const ValueDecl *D) { 70 const SourceInfo &E = S.Current->getSource(OpPC); 71 72 if (isa<ParmVarDecl>(D)) { 73 if (S.getLangOpts().CPlusPlus11) { 74 S.FFDiag(E, diag::note_constexpr_function_param_value_unknown) << D; 75 S.Note(D->getLocation(), diag::note_declared_at) << D->getSourceRange(); 76 } else { 77 S.FFDiag(E); 78 } 79 return false; 80 } 81 82 if (!D->getType().isConstQualified()) 83 diagnoseNonConstVariable(S, OpPC, D); 84 else if (const auto *VD = dyn_cast<VarDecl>(D); 85 VD && !VD->getAnyInitializer()) 86 diagnoseMissingInitializer(S, OpPC, VD); 87 88 return false; 89 } 90 91 static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC, 92 const ValueDecl *VD) { 93 if (!S.getLangOpts().CPlusPlus) 94 return; 95 96 const SourceInfo &Loc = S.Current->getSource(OpPC); 97 if (const auto *VarD = dyn_cast<VarDecl>(VD); 98 VarD && VarD->getType().isConstQualified() && 99 !VarD->getAnyInitializer()) { 100 diagnoseMissingInitializer(S, OpPC, VD); 101 return; 102 } 103 104 // Rather random, but this is to match the diagnostic output of the current 105 // interpreter. 106 if (isa<ObjCIvarDecl>(VD)) 107 return; 108 109 if (VD->getType()->isIntegralOrEnumerationType()) { 110 S.FFDiag(Loc, diag::note_constexpr_ltor_non_const_int, 1) << VD; 111 S.Note(VD->getLocation(), diag::note_declared_at); 112 return; 113 } 114 115 S.FFDiag(Loc, 116 S.getLangOpts().CPlusPlus11 ? diag::note_constexpr_ltor_non_constexpr 117 : diag::note_constexpr_ltor_non_integral, 118 1) 119 << VD << VD->getType(); 120 S.Note(VD->getLocation(), diag::note_declared_at); 121 } 122 123 static bool CheckActive(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 124 AccessKinds AK) { 125 if (Ptr.isActive()) 126 return true; 127 128 assert(Ptr.inUnion()); 129 assert(Ptr.isField() && Ptr.getField()); 130 131 Pointer U = Ptr.getBase(); 132 Pointer C = Ptr; 133 while (!U.isRoot() && U.inUnion() && !U.isActive()) { 134 if (U.getField()) 135 C = U; 136 U = U.getBase(); 137 } 138 assert(C.isField()); 139 140 // Get the inactive field descriptor. 141 const FieldDecl *InactiveField = C.getField(); 142 assert(InactiveField); 143 144 // Consider: 145 // union U { 146 // struct { 147 // int x; 148 // int y; 149 // } a; 150 // } 151 // 152 // When activating x, we will also activate a. If we now try to read 153 // from y, we will get to CheckActive, because y is not active. In that 154 // case, our U will be a (not a union). We return here and let later code 155 // handle this. 156 if (!U.getFieldDesc()->isUnion()) 157 return true; 158 159 // Find the active field of the union. 160 const Record *R = U.getRecord(); 161 assert(R && R->isUnion() && "Not a union"); 162 163 const FieldDecl *ActiveField = nullptr; 164 for (const Record::Field &F : R->fields()) { 165 const Pointer &Field = U.atField(F.Offset); 166 if (Field.isActive()) { 167 ActiveField = Field.getField(); 168 break; 169 } 170 } 171 172 const SourceInfo &Loc = S.Current->getSource(OpPC); 173 S.FFDiag(Loc, diag::note_constexpr_access_inactive_union_member) 174 << AK << InactiveField << !ActiveField << ActiveField; 175 return false; 176 } 177 178 static bool CheckTemporary(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 179 AccessKinds AK) { 180 if (auto ID = Ptr.getDeclID()) { 181 if (!Ptr.isStaticTemporary()) 182 return true; 183 184 const auto *MTE = dyn_cast_if_present<MaterializeTemporaryExpr>( 185 Ptr.getDeclDesc()->asExpr()); 186 if (!MTE) 187 return true; 188 189 // FIXME(perf): Since we do this check on every Load from a static 190 // temporary, it might make sense to cache the value of the 191 // isUsableInConstantExpressions call. 192 if (!MTE->isUsableInConstantExpressions(S.getASTContext()) && 193 Ptr.block()->getEvalID() != S.Ctx.getEvalID()) { 194 const SourceInfo &E = S.Current->getSource(OpPC); 195 S.FFDiag(E, diag::note_constexpr_access_static_temporary, 1) << AK; 196 S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here); 197 return false; 198 } 199 } 200 return true; 201 } 202 203 static bool CheckGlobal(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 204 if (auto ID = Ptr.getDeclID()) { 205 if (!Ptr.isStatic()) 206 return true; 207 208 if (S.P.getCurrentDecl() == ID) 209 return true; 210 211 S.FFDiag(S.Current->getLocation(OpPC), diag::note_constexpr_modify_global); 212 return false; 213 } 214 return true; 215 } 216 217 namespace clang { 218 namespace interp { 219 static void popArg(InterpState &S, const Expr *Arg) { 220 PrimType Ty = S.getContext().classify(Arg).value_or(PT_Ptr); 221 TYPE_SWITCH(Ty, S.Stk.discard<T>()); 222 } 223 224 void cleanupAfterFunctionCall(InterpState &S, CodePtr OpPC) { 225 assert(S.Current); 226 const Function *CurFunc = S.Current->getFunction(); 227 assert(CurFunc); 228 229 if (CurFunc->isUnevaluatedBuiltin()) 230 return; 231 232 // Some builtin functions require us to only look at the call site, since 233 // the classified parameter types do not match. 234 if (unsigned BID = CurFunc->getBuiltinID(); 235 BID && S.getASTContext().BuiltinInfo.hasCustomTypechecking(BID)) { 236 const auto *CE = 237 cast<CallExpr>(S.Current->Caller->getExpr(S.Current->getRetPC())); 238 for (int32_t I = CE->getNumArgs() - 1; I >= 0; --I) { 239 const Expr *A = CE->getArg(I); 240 popArg(S, A); 241 } 242 return; 243 } 244 245 if (S.Current->Caller && CurFunc->isVariadic()) { 246 // CallExpr we're look for is at the return PC of the current function, i.e. 247 // in the caller. 248 // This code path should be executed very rarely. 249 unsigned NumVarArgs; 250 const Expr *const *Args = nullptr; 251 unsigned NumArgs = 0; 252 const Expr *CallSite = S.Current->Caller->getExpr(S.Current->getRetPC()); 253 if (const auto *CE = dyn_cast<CallExpr>(CallSite)) { 254 Args = CE->getArgs(); 255 NumArgs = CE->getNumArgs(); 256 } else if (const auto *CE = dyn_cast<CXXConstructExpr>(CallSite)) { 257 Args = CE->getArgs(); 258 NumArgs = CE->getNumArgs(); 259 } else 260 assert(false && "Can't get arguments from that expression type"); 261 262 assert(NumArgs >= CurFunc->getNumWrittenParams()); 263 NumVarArgs = NumArgs - (CurFunc->getNumWrittenParams() + 264 isa<CXXOperatorCallExpr>(CallSite)); 265 for (unsigned I = 0; I != NumVarArgs; ++I) { 266 const Expr *A = Args[NumArgs - 1 - I]; 267 popArg(S, A); 268 } 269 } 270 271 // And in any case, remove the fixed parameters (the non-variadic ones) 272 // at the end. 273 S.Current->popArgs(); 274 } 275 276 bool CheckExtern(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 277 if (!Ptr.isExtern()) 278 return true; 279 280 if (Ptr.isInitialized() || 281 (Ptr.getDeclDesc()->asVarDecl() == S.EvaluatingDecl)) 282 return true; 283 284 if (!S.checkingPotentialConstantExpression() && S.getLangOpts().CPlusPlus) { 285 const auto *VD = Ptr.getDeclDesc()->asValueDecl(); 286 diagnoseNonConstVariable(S, OpPC, VD); 287 } 288 return false; 289 } 290 291 bool CheckArray(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 292 if (!Ptr.isUnknownSizeArray()) 293 return true; 294 const SourceInfo &E = S.Current->getSource(OpPC); 295 S.FFDiag(E, diag::note_constexpr_unsized_array_indexed); 296 return false; 297 } 298 299 bool CheckLive(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 300 AccessKinds AK) { 301 if (Ptr.isZero()) { 302 const auto &Src = S.Current->getSource(OpPC); 303 304 if (Ptr.isField()) 305 S.FFDiag(Src, diag::note_constexpr_null_subobject) << CSK_Field; 306 else 307 S.FFDiag(Src, diag::note_constexpr_access_null) << AK; 308 309 return false; 310 } 311 312 if (!Ptr.isLive()) { 313 const auto &Src = S.Current->getSource(OpPC); 314 315 if (Ptr.isDynamic()) { 316 S.FFDiag(Src, diag::note_constexpr_access_deleted_object) << AK; 317 } else { 318 bool IsTemp = Ptr.isTemporary(); 319 S.FFDiag(Src, diag::note_constexpr_lifetime_ended, 1) << AK << !IsTemp; 320 321 if (IsTemp) 322 S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here); 323 else 324 S.Note(Ptr.getDeclLoc(), diag::note_declared_at); 325 } 326 327 return false; 328 } 329 330 return true; 331 } 332 333 bool CheckConstant(InterpState &S, CodePtr OpPC, const Descriptor *Desc) { 334 assert(Desc); 335 336 const auto *D = Desc->asVarDecl(); 337 if (!D || !D->hasGlobalStorage()) 338 return true; 339 340 if (D == S.EvaluatingDecl) 341 return true; 342 343 if (D->isConstexpr()) 344 return true; 345 346 QualType T = D->getType(); 347 bool IsConstant = T.isConstant(S.getASTContext()); 348 if (T->isIntegralOrEnumerationType()) { 349 if (!IsConstant) { 350 diagnoseNonConstVariable(S, OpPC, D); 351 return false; 352 } 353 return true; 354 } 355 356 if (IsConstant) { 357 if (S.getLangOpts().CPlusPlus) { 358 S.CCEDiag(S.Current->getLocation(OpPC), 359 S.getLangOpts().CPlusPlus11 360 ? diag::note_constexpr_ltor_non_constexpr 361 : diag::note_constexpr_ltor_non_integral, 362 1) 363 << D << T; 364 S.Note(D->getLocation(), diag::note_declared_at); 365 } else { 366 S.CCEDiag(S.Current->getLocation(OpPC)); 367 } 368 return true; 369 } 370 371 if (T->isPointerOrReferenceType()) { 372 if (!T->getPointeeType().isConstant(S.getASTContext()) || 373 !S.getLangOpts().CPlusPlus11) { 374 diagnoseNonConstVariable(S, OpPC, D); 375 return false; 376 } 377 return true; 378 } 379 380 diagnoseNonConstVariable(S, OpPC, D); 381 return false; 382 } 383 384 static bool CheckConstant(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 385 if (!Ptr.isBlockPointer()) 386 return true; 387 return CheckConstant(S, OpPC, Ptr.getDeclDesc()); 388 } 389 390 bool CheckNull(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 391 CheckSubobjectKind CSK) { 392 if (!Ptr.isZero()) 393 return true; 394 const SourceInfo &Loc = S.Current->getSource(OpPC); 395 S.FFDiag(Loc, diag::note_constexpr_null_subobject) 396 << CSK << S.Current->getRange(OpPC); 397 398 return false; 399 } 400 401 bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 402 AccessKinds AK) { 403 if (!Ptr.isOnePastEnd()) 404 return true; 405 const SourceInfo &Loc = S.Current->getSource(OpPC); 406 S.FFDiag(Loc, diag::note_constexpr_access_past_end) 407 << AK << S.Current->getRange(OpPC); 408 return false; 409 } 410 411 bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 412 CheckSubobjectKind CSK) { 413 if (!Ptr.isElementPastEnd()) 414 return true; 415 const SourceInfo &Loc = S.Current->getSource(OpPC); 416 S.FFDiag(Loc, diag::note_constexpr_past_end_subobject) 417 << CSK << S.Current->getRange(OpPC); 418 return false; 419 } 420 421 bool CheckSubobject(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 422 CheckSubobjectKind CSK) { 423 if (!Ptr.isOnePastEnd()) 424 return true; 425 426 const SourceInfo &Loc = S.Current->getSource(OpPC); 427 S.FFDiag(Loc, diag::note_constexpr_past_end_subobject) 428 << CSK << S.Current->getRange(OpPC); 429 return false; 430 } 431 432 bool CheckDowncast(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 433 uint32_t Offset) { 434 uint32_t MinOffset = Ptr.getDeclDesc()->getMetadataSize(); 435 uint32_t PtrOffset = Ptr.getByteOffset(); 436 437 // We subtract Offset from PtrOffset. The result must be at least 438 // MinOffset. 439 if (Offset < PtrOffset && (PtrOffset - Offset) >= MinOffset) 440 return true; 441 442 const auto *E = cast<CastExpr>(S.Current->getExpr(OpPC)); 443 QualType TargetQT = E->getType()->getPointeeType(); 444 QualType MostDerivedQT = Ptr.getDeclPtr().getType(); 445 446 S.CCEDiag(E, diag::note_constexpr_invalid_downcast) 447 << MostDerivedQT << TargetQT; 448 449 return false; 450 } 451 452 bool CheckConst(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 453 assert(Ptr.isLive() && "Pointer is not live"); 454 if (!Ptr.isConst() || Ptr.isMutable()) 455 return true; 456 457 // The This pointer is writable in constructors and destructors, 458 // even if isConst() returns true. 459 // TODO(perf): We could be hitting this code path quite a lot in complex 460 // constructors. Is there a better way to do this? 461 if (S.Current->getFunction()) { 462 for (const InterpFrame *Frame = S.Current; Frame; Frame = Frame->Caller) { 463 if (const Function *Func = Frame->getFunction(); 464 Func && (Func->isConstructor() || Func->isDestructor()) && 465 Ptr.block() == Frame->getThis().block()) { 466 return true; 467 } 468 } 469 } 470 471 if (!Ptr.isBlockPointer()) 472 return false; 473 474 const QualType Ty = Ptr.getType(); 475 const SourceInfo &Loc = S.Current->getSource(OpPC); 476 S.FFDiag(Loc, diag::note_constexpr_modify_const_type) << Ty; 477 return false; 478 } 479 480 bool CheckMutable(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 481 assert(Ptr.isLive() && "Pointer is not live"); 482 if (!Ptr.isMutable()) 483 return true; 484 485 // In C++14 onwards, it is permitted to read a mutable member whose 486 // lifetime began within the evaluation. 487 if (S.getLangOpts().CPlusPlus14 && 488 Ptr.block()->getEvalID() == S.Ctx.getEvalID()) 489 return true; 490 491 const SourceInfo &Loc = S.Current->getSource(OpPC); 492 const FieldDecl *Field = Ptr.getField(); 493 S.FFDiag(Loc, diag::note_constexpr_access_mutable, 1) << AK_Read << Field; 494 S.Note(Field->getLocation(), diag::note_declared_at); 495 return false; 496 } 497 498 bool CheckVolatile(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 499 AccessKinds AK) { 500 assert(Ptr.isLive()); 501 502 // FIXME: This check here might be kinda expensive. Maybe it would be better 503 // to have another field in InlineDescriptor for this? 504 if (!Ptr.isBlockPointer()) 505 return true; 506 507 QualType PtrType = Ptr.getType(); 508 if (!PtrType.isVolatileQualified()) 509 return true; 510 511 const SourceInfo &Loc = S.Current->getSource(OpPC); 512 if (S.getLangOpts().CPlusPlus) 513 S.FFDiag(Loc, diag::note_constexpr_access_volatile_type) << AK << PtrType; 514 else 515 S.FFDiag(Loc); 516 return false; 517 } 518 519 bool CheckInitialized(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 520 AccessKinds AK) { 521 assert(Ptr.isLive()); 522 523 if (Ptr.isInitialized()) 524 return true; 525 526 if (const auto *VD = Ptr.getDeclDesc()->asVarDecl(); 527 VD && VD->hasGlobalStorage()) { 528 const SourceInfo &Loc = S.Current->getSource(OpPC); 529 if (VD->getAnyInitializer()) { 530 S.FFDiag(Loc, diag::note_constexpr_var_init_non_constant, 1) << VD; 531 S.Note(VD->getLocation(), diag::note_declared_at); 532 } else { 533 diagnoseMissingInitializer(S, OpPC, VD); 534 } 535 return false; 536 } 537 538 if (!S.checkingPotentialConstantExpression()) { 539 S.FFDiag(S.Current->getSource(OpPC), diag::note_constexpr_access_uninit) 540 << AK << /*uninitialized=*/true << S.Current->getRange(OpPC); 541 } 542 return false; 543 } 544 545 bool CheckGlobalInitialized(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 546 if (Ptr.isInitialized()) 547 return true; 548 549 assert(S.getLangOpts().CPlusPlus); 550 const auto *VD = cast<VarDecl>(Ptr.getDeclDesc()->asValueDecl()); 551 if ((!VD->hasConstantInitialization() && 552 VD->mightBeUsableInConstantExpressions(S.getASTContext())) || 553 (S.getLangOpts().OpenCL && !S.getLangOpts().CPlusPlus11 && 554 !VD->hasICEInitializer(S.getASTContext()))) { 555 const SourceInfo &Loc = S.Current->getSource(OpPC); 556 S.FFDiag(Loc, diag::note_constexpr_var_init_non_constant, 1) << VD; 557 S.Note(VD->getLocation(), diag::note_declared_at); 558 } 559 return false; 560 } 561 562 bool CheckLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 563 AccessKinds AK) { 564 if (!CheckLive(S, OpPC, Ptr, AK)) 565 return false; 566 if (!CheckConstant(S, OpPC, Ptr)) 567 return false; 568 569 if (!CheckDummy(S, OpPC, Ptr, AK)) 570 return false; 571 if (!CheckExtern(S, OpPC, Ptr)) 572 return false; 573 if (!CheckRange(S, OpPC, Ptr, AK)) 574 return false; 575 if (!CheckActive(S, OpPC, Ptr, AK)) 576 return false; 577 if (!CheckInitialized(S, OpPC, Ptr, AK)) 578 return false; 579 if (!CheckTemporary(S, OpPC, Ptr, AK)) 580 return false; 581 if (!CheckMutable(S, OpPC, Ptr)) 582 return false; 583 if (!CheckVolatile(S, OpPC, Ptr, AK)) 584 return false; 585 return true; 586 } 587 588 /// This is not used by any of the opcodes directly. It's used by 589 /// EvalEmitter to do the final lvalue-to-rvalue conversion. 590 bool CheckFinalLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 591 if (!CheckLive(S, OpPC, Ptr, AK_Read)) 592 return false; 593 if (!CheckConstant(S, OpPC, Ptr)) 594 return false; 595 596 if (!CheckDummy(S, OpPC, Ptr, AK_Read)) 597 return false; 598 if (!CheckExtern(S, OpPC, Ptr)) 599 return false; 600 if (!CheckRange(S, OpPC, Ptr, AK_Read)) 601 return false; 602 if (!CheckActive(S, OpPC, Ptr, AK_Read)) 603 return false; 604 if (!CheckInitialized(S, OpPC, Ptr, AK_Read)) 605 return false; 606 if (!CheckTemporary(S, OpPC, Ptr, AK_Read)) 607 return false; 608 if (!CheckMutable(S, OpPC, Ptr)) 609 return false; 610 return true; 611 } 612 613 bool CheckStore(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 614 if (!CheckLive(S, OpPC, Ptr, AK_Assign)) 615 return false; 616 if (!CheckDummy(S, OpPC, Ptr, AK_Assign)) 617 return false; 618 if (!CheckExtern(S, OpPC, Ptr)) 619 return false; 620 if (!CheckRange(S, OpPC, Ptr, AK_Assign)) 621 return false; 622 if (!CheckGlobal(S, OpPC, Ptr)) 623 return false; 624 if (!CheckConst(S, OpPC, Ptr)) 625 return false; 626 return true; 627 } 628 629 bool CheckInvoke(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 630 if (!CheckLive(S, OpPC, Ptr, AK_MemberCall)) 631 return false; 632 if (!Ptr.isDummy()) { 633 if (!CheckExtern(S, OpPC, Ptr)) 634 return false; 635 if (!CheckRange(S, OpPC, Ptr, AK_MemberCall)) 636 return false; 637 } 638 return true; 639 } 640 641 bool CheckInit(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 642 if (!CheckLive(S, OpPC, Ptr, AK_Assign)) 643 return false; 644 if (!CheckRange(S, OpPC, Ptr, AK_Assign)) 645 return false; 646 return true; 647 } 648 649 bool CheckCallable(InterpState &S, CodePtr OpPC, const Function *F) { 650 651 if (F->isVirtual() && !S.getLangOpts().CPlusPlus20) { 652 const SourceLocation &Loc = S.Current->getLocation(OpPC); 653 S.CCEDiag(Loc, diag::note_constexpr_virtual_call); 654 return false; 655 } 656 657 if (F->isConstexpr() && F->hasBody() && 658 (F->getDecl()->isConstexpr() || F->getDecl()->hasAttr<MSConstexprAttr>())) 659 return true; 660 661 // Implicitly constexpr. 662 if (F->isLambdaStaticInvoker()) 663 return true; 664 665 const SourceLocation &Loc = S.Current->getLocation(OpPC); 666 if (S.getLangOpts().CPlusPlus11) { 667 const FunctionDecl *DiagDecl = F->getDecl(); 668 669 // Invalid decls have been diagnosed before. 670 if (DiagDecl->isInvalidDecl()) 671 return false; 672 673 // If this function is not constexpr because it is an inherited 674 // non-constexpr constructor, diagnose that directly. 675 const auto *CD = dyn_cast<CXXConstructorDecl>(DiagDecl); 676 if (CD && CD->isInheritingConstructor()) { 677 const auto *Inherited = CD->getInheritedConstructor().getConstructor(); 678 if (!Inherited->isConstexpr()) 679 DiagDecl = CD = Inherited; 680 } 681 682 // FIXME: If DiagDecl is an implicitly-declared special member function 683 // or an inheriting constructor, we should be much more explicit about why 684 // it's not constexpr. 685 if (CD && CD->isInheritingConstructor()) { 686 S.FFDiag(Loc, diag::note_constexpr_invalid_inhctor, 1) 687 << CD->getInheritedConstructor().getConstructor()->getParent(); 688 S.Note(DiagDecl->getLocation(), diag::note_declared_at); 689 } else { 690 // Don't emit anything if the function isn't defined and we're checking 691 // for a constant expression. It might be defined at the point we're 692 // actually calling it. 693 bool IsExtern = DiagDecl->getStorageClass() == SC_Extern; 694 if (!DiagDecl->isDefined() && !IsExtern && DiagDecl->isConstexpr() && 695 S.checkingPotentialConstantExpression()) 696 return false; 697 698 // If the declaration is defined, declared 'constexpr' _and_ has a body, 699 // the below diagnostic doesn't add anything useful. 700 if (DiagDecl->isDefined() && DiagDecl->isConstexpr() && 701 DiagDecl->hasBody()) 702 return false; 703 704 S.FFDiag(Loc, diag::note_constexpr_invalid_function, 1) 705 << DiagDecl->isConstexpr() << (bool)CD << DiagDecl; 706 707 if (DiagDecl->getDefinition()) 708 S.Note(DiagDecl->getDefinition()->getLocation(), 709 diag::note_declared_at); 710 else 711 S.Note(DiagDecl->getLocation(), diag::note_declared_at); 712 } 713 } else { 714 S.FFDiag(Loc, diag::note_invalid_subexpr_in_const_expr); 715 } 716 717 return false; 718 } 719 720 bool CheckCallDepth(InterpState &S, CodePtr OpPC) { 721 if ((S.Current->getDepth() + 1) > S.getLangOpts().ConstexprCallDepth) { 722 S.FFDiag(S.Current->getSource(OpPC), 723 diag::note_constexpr_depth_limit_exceeded) 724 << S.getLangOpts().ConstexprCallDepth; 725 return false; 726 } 727 728 return true; 729 } 730 731 bool CheckThis(InterpState &S, CodePtr OpPC, const Pointer &This) { 732 if (!This.isZero()) 733 return true; 734 735 const SourceInfo &Loc = S.Current->getSource(OpPC); 736 737 bool IsImplicit = false; 738 if (const auto *E = dyn_cast_if_present<CXXThisExpr>(Loc.asExpr())) 739 IsImplicit = E->isImplicit(); 740 741 if (S.getLangOpts().CPlusPlus11) 742 S.FFDiag(Loc, diag::note_constexpr_this) << IsImplicit; 743 else 744 S.FFDiag(Loc); 745 746 return false; 747 } 748 749 bool CheckPure(InterpState &S, CodePtr OpPC, const CXXMethodDecl *MD) { 750 if (!MD->isPureVirtual()) 751 return true; 752 const SourceInfo &E = S.Current->getSource(OpPC); 753 S.FFDiag(E, diag::note_constexpr_pure_virtual_call, 1) << MD; 754 S.Note(MD->getLocation(), diag::note_declared_at); 755 return false; 756 } 757 758 bool CheckFloatResult(InterpState &S, CodePtr OpPC, const Floating &Result, 759 APFloat::opStatus Status, FPOptions FPO) { 760 // [expr.pre]p4: 761 // If during the evaluation of an expression, the result is not 762 // mathematically defined [...], the behavior is undefined. 763 // FIXME: C++ rules require us to not conform to IEEE 754 here. 764 if (Result.isNan()) { 765 const SourceInfo &E = S.Current->getSource(OpPC); 766 S.CCEDiag(E, diag::note_constexpr_float_arithmetic) 767 << /*NaN=*/true << S.Current->getRange(OpPC); 768 return S.noteUndefinedBehavior(); 769 } 770 771 // In a constant context, assume that any dynamic rounding mode or FP 772 // exception state matches the default floating-point environment. 773 if (S.inConstantContext()) 774 return true; 775 776 if ((Status & APFloat::opInexact) && 777 FPO.getRoundingMode() == llvm::RoundingMode::Dynamic) { 778 // Inexact result means that it depends on rounding mode. If the requested 779 // mode is dynamic, the evaluation cannot be made in compile time. 780 const SourceInfo &E = S.Current->getSource(OpPC); 781 S.FFDiag(E, diag::note_constexpr_dynamic_rounding); 782 return false; 783 } 784 785 if ((Status != APFloat::opOK) && 786 (FPO.getRoundingMode() == llvm::RoundingMode::Dynamic || 787 FPO.getExceptionMode() != LangOptions::FPE_Ignore || 788 FPO.getAllowFEnvAccess())) { 789 const SourceInfo &E = S.Current->getSource(OpPC); 790 S.FFDiag(E, diag::note_constexpr_float_arithmetic_strict); 791 return false; 792 } 793 794 if ((Status & APFloat::opStatus::opInvalidOp) && 795 FPO.getExceptionMode() != LangOptions::FPE_Ignore) { 796 const SourceInfo &E = S.Current->getSource(OpPC); 797 // There is no usefully definable result. 798 S.FFDiag(E); 799 return false; 800 } 801 802 return true; 803 } 804 805 bool CheckDynamicMemoryAllocation(InterpState &S, CodePtr OpPC) { 806 if (S.getLangOpts().CPlusPlus20) 807 return true; 808 809 const SourceInfo &E = S.Current->getSource(OpPC); 810 S.CCEDiag(E, diag::note_constexpr_new); 811 return true; 812 } 813 814 bool CheckNewDeleteForms(InterpState &S, CodePtr OpPC, bool NewWasArray, 815 bool DeleteIsArray, const Descriptor *D, 816 const Expr *NewExpr) { 817 if (NewWasArray == DeleteIsArray) 818 return true; 819 820 QualType TypeToDiagnose; 821 // We need to shuffle things around a bit here to get a better diagnostic, 822 // because the expression we allocated the block for was of type int*, 823 // but we want to get the array size right. 824 if (D->isArray()) { 825 QualType ElemQT = D->getType()->getPointeeType(); 826 TypeToDiagnose = S.getASTContext().getConstantArrayType( 827 ElemQT, APInt(64, static_cast<uint64_t>(D->getNumElems()), false), 828 nullptr, ArraySizeModifier::Normal, 0); 829 } else 830 TypeToDiagnose = D->getType()->getPointeeType(); 831 832 const SourceInfo &E = S.Current->getSource(OpPC); 833 S.FFDiag(E, diag::note_constexpr_new_delete_mismatch) 834 << DeleteIsArray << 0 << TypeToDiagnose; 835 S.Note(NewExpr->getExprLoc(), diag::note_constexpr_dynamic_alloc_here) 836 << NewExpr->getSourceRange(); 837 return false; 838 } 839 840 bool CheckDeleteSource(InterpState &S, CodePtr OpPC, const Expr *Source, 841 const Pointer &Ptr) { 842 if (Source && isa<CXXNewExpr>(Source)) 843 return true; 844 845 // Whatever this is, we didn't heap allocate it. 846 const SourceInfo &Loc = S.Current->getSource(OpPC); 847 S.FFDiag(Loc, diag::note_constexpr_delete_not_heap_alloc) 848 << Ptr.toDiagnosticString(S.getASTContext()); 849 850 if (Ptr.isTemporary()) 851 S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here); 852 else 853 S.Note(Ptr.getDeclLoc(), diag::note_declared_at); 854 return false; 855 } 856 857 /// We aleady know the given DeclRefExpr is invalid for some reason, 858 /// now figure out why and print appropriate diagnostics. 859 bool CheckDeclRef(InterpState &S, CodePtr OpPC, const DeclRefExpr *DR) { 860 const ValueDecl *D = DR->getDecl(); 861 return diagnoseUnknownDecl(S, OpPC, D); 862 } 863 864 bool CheckDummy(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 865 AccessKinds AK) { 866 if (!Ptr.isDummy()) 867 return true; 868 869 const Descriptor *Desc = Ptr.getDeclDesc(); 870 const ValueDecl *D = Desc->asValueDecl(); 871 if (!D) 872 return false; 873 874 if (AK == AK_Read || AK == AK_Increment || AK == AK_Decrement) 875 return diagnoseUnknownDecl(S, OpPC, D); 876 877 assert(AK == AK_Assign); 878 if (S.getLangOpts().CPlusPlus11) { 879 const SourceInfo &E = S.Current->getSource(OpPC); 880 S.FFDiag(E, diag::note_constexpr_modify_global); 881 } 882 return false; 883 } 884 885 bool CheckNonNullArgs(InterpState &S, CodePtr OpPC, const Function *F, 886 const CallExpr *CE, unsigned ArgSize) { 887 auto Args = llvm::ArrayRef(CE->getArgs(), CE->getNumArgs()); 888 auto NonNullArgs = collectNonNullArgs(F->getDecl(), Args); 889 unsigned Offset = 0; 890 unsigned Index = 0; 891 for (const Expr *Arg : Args) { 892 if (NonNullArgs[Index] && Arg->getType()->isPointerType()) { 893 const Pointer &ArgPtr = S.Stk.peek<Pointer>(ArgSize - Offset); 894 if (ArgPtr.isZero()) { 895 const SourceLocation &Loc = S.Current->getLocation(OpPC); 896 S.CCEDiag(Loc, diag::note_non_null_attribute_failed); 897 return false; 898 } 899 } 900 901 Offset += align(primSize(S.Ctx.classify(Arg).value_or(PT_Ptr))); 902 ++Index; 903 } 904 return true; 905 } 906 907 // FIXME: This is similar to code we already have in Compiler.cpp. 908 // I think it makes sense to instead add the field and base destruction stuff 909 // to the destructor Function itself. Then destroying a record would really 910 // _just_ be calling its destructor. That would also help with the diagnostic 911 // difference when the destructor or a field/base fails. 912 static bool runRecordDestructor(InterpState &S, CodePtr OpPC, 913 const Pointer &BasePtr, 914 const Descriptor *Desc) { 915 assert(Desc->isRecord()); 916 const Record *R = Desc->ElemRecord; 917 assert(R); 918 919 if (Pointer::pointToSameBlock(BasePtr, S.Current->getThis())) { 920 const SourceInfo &Loc = S.Current->getSource(OpPC); 921 S.FFDiag(Loc, diag::note_constexpr_double_destroy); 922 return false; 923 } 924 925 // Destructor of this record. 926 if (const CXXDestructorDecl *Dtor = R->getDestructor(); 927 Dtor && !Dtor->isTrivial()) { 928 const Function *DtorFunc = S.getContext().getOrCreateFunction(Dtor); 929 if (!DtorFunc) 930 return false; 931 932 S.Stk.push<Pointer>(BasePtr); 933 if (!Call(S, OpPC, DtorFunc, 0)) 934 return false; 935 } 936 return true; 937 } 938 939 bool RunDestructors(InterpState &S, CodePtr OpPC, const Block *B) { 940 assert(B); 941 const Descriptor *Desc = B->getDescriptor(); 942 943 if (Desc->isPrimitive() || Desc->isPrimitiveArray()) 944 return true; 945 946 assert(Desc->isRecord() || Desc->isCompositeArray()); 947 948 if (Desc->isCompositeArray()) { 949 const Descriptor *ElemDesc = Desc->ElemDesc; 950 assert(ElemDesc->isRecord()); 951 952 Pointer RP(const_cast<Block *>(B)); 953 for (unsigned I = 0; I != Desc->getNumElems(); ++I) { 954 if (!runRecordDestructor(S, OpPC, RP.atIndex(I).narrow(), ElemDesc)) 955 return false; 956 } 957 return true; 958 } 959 960 assert(Desc->isRecord()); 961 return runRecordDestructor(S, OpPC, Pointer(const_cast<Block *>(B)), Desc); 962 } 963 964 void diagnoseEnumValue(InterpState &S, CodePtr OpPC, const EnumDecl *ED, 965 const APSInt &Value) { 966 llvm::APInt Min; 967 llvm::APInt Max; 968 969 if (S.EvaluatingDecl && !S.EvaluatingDecl->isConstexpr()) 970 return; 971 972 ED->getValueRange(Max, Min); 973 --Max; 974 975 if (ED->getNumNegativeBits() && 976 (Max.slt(Value.getSExtValue()) || Min.sgt(Value.getSExtValue()))) { 977 const SourceLocation &Loc = S.Current->getLocation(OpPC); 978 S.CCEDiag(Loc, diag::note_constexpr_unscoped_enum_out_of_range) 979 << llvm::toString(Value, 10) << Min.getSExtValue() << Max.getSExtValue() 980 << ED; 981 } else if (!ED->getNumNegativeBits() && Max.ult(Value.getZExtValue())) { 982 const SourceLocation &Loc = S.Current->getLocation(OpPC); 983 S.CCEDiag(Loc, diag::note_constexpr_unscoped_enum_out_of_range) 984 << llvm::toString(Value, 10) << Min.getZExtValue() << Max.getZExtValue() 985 << ED; 986 } 987 } 988 989 bool CallVar(InterpState &S, CodePtr OpPC, const Function *Func, 990 uint32_t VarArgSize) { 991 if (Func->hasThisPointer()) { 992 size_t ArgSize = Func->getArgSize() + VarArgSize; 993 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0); 994 const Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset); 995 996 // If the current function is a lambda static invoker and 997 // the function we're about to call is a lambda call operator, 998 // skip the CheckInvoke, since the ThisPtr is a null pointer 999 // anyway. 1000 if (!(S.Current->getFunction() && 1001 S.Current->getFunction()->isLambdaStaticInvoker() && 1002 Func->isLambdaCallOperator())) { 1003 if (!CheckInvoke(S, OpPC, ThisPtr)) 1004 return false; 1005 } 1006 1007 if (S.checkingPotentialConstantExpression()) 1008 return false; 1009 } 1010 1011 if (!CheckCallable(S, OpPC, Func)) 1012 return false; 1013 1014 if (!CheckCallDepth(S, OpPC)) 1015 return false; 1016 1017 auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC, VarArgSize); 1018 InterpFrame *FrameBefore = S.Current; 1019 S.Current = NewFrame.get(); 1020 1021 APValue CallResult; 1022 // Note that we cannot assert(CallResult.hasValue()) here since 1023 // Ret() above only sets the APValue if the curent frame doesn't 1024 // have a caller set. 1025 if (Interpret(S, CallResult)) { 1026 NewFrame.release(); // Frame was delete'd already. 1027 assert(S.Current == FrameBefore); 1028 return true; 1029 } 1030 1031 // Interpreting the function failed somehow. Reset to 1032 // previous state. 1033 S.Current = FrameBefore; 1034 return false; 1035 } 1036 1037 bool Call(InterpState &S, CodePtr OpPC, const Function *Func, 1038 uint32_t VarArgSize) { 1039 if (Func->hasThisPointer()) { 1040 size_t ArgSize = Func->getArgSize() + VarArgSize; 1041 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0); 1042 1043 const Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset); 1044 1045 // If the current function is a lambda static invoker and 1046 // the function we're about to call is a lambda call operator, 1047 // skip the CheckInvoke, since the ThisPtr is a null pointer 1048 // anyway. 1049 if (S.Current->getFunction() && 1050 S.Current->getFunction()->isLambdaStaticInvoker() && 1051 Func->isLambdaCallOperator()) { 1052 assert(ThisPtr.isZero()); 1053 } else { 1054 if (!CheckInvoke(S, OpPC, ThisPtr)) 1055 return false; 1056 } 1057 } 1058 1059 if (!CheckCallable(S, OpPC, Func)) 1060 return false; 1061 1062 // FIXME: The isConstructor() check here is not always right. The current 1063 // constant evaluator is somewhat inconsistent in when it allows a function 1064 // call when checking for a constant expression. 1065 if (Func->hasThisPointer() && S.checkingPotentialConstantExpression() && 1066 !Func->isConstructor()) 1067 return false; 1068 1069 if (!CheckCallDepth(S, OpPC)) 1070 return false; 1071 1072 auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC, VarArgSize); 1073 InterpFrame *FrameBefore = S.Current; 1074 S.Current = NewFrame.get(); 1075 1076 APValue CallResult; 1077 // Note that we cannot assert(CallResult.hasValue()) here since 1078 // Ret() above only sets the APValue if the curent frame doesn't 1079 // have a caller set. 1080 if (Interpret(S, CallResult)) { 1081 NewFrame.release(); // Frame was delete'd already. 1082 assert(S.Current == FrameBefore); 1083 return true; 1084 } 1085 1086 // Interpreting the function failed somehow. Reset to 1087 // previous state. 1088 S.Current = FrameBefore; 1089 return false; 1090 } 1091 1092 bool CallVirt(InterpState &S, CodePtr OpPC, const Function *Func, 1093 uint32_t VarArgSize) { 1094 assert(Func->hasThisPointer()); 1095 assert(Func->isVirtual()); 1096 size_t ArgSize = Func->getArgSize() + VarArgSize; 1097 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0); 1098 Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset); 1099 1100 const CXXRecordDecl *DynamicDecl = nullptr; 1101 { 1102 Pointer TypePtr = ThisPtr; 1103 while (TypePtr.isBaseClass()) 1104 TypePtr = TypePtr.getBase(); 1105 1106 QualType DynamicType = TypePtr.getType(); 1107 if (DynamicType->isPointerType() || DynamicType->isReferenceType()) 1108 DynamicDecl = DynamicType->getPointeeCXXRecordDecl(); 1109 else 1110 DynamicDecl = DynamicType->getAsCXXRecordDecl(); 1111 } 1112 assert(DynamicDecl); 1113 1114 const auto *StaticDecl = cast<CXXRecordDecl>(Func->getParentDecl()); 1115 const auto *InitialFunction = cast<CXXMethodDecl>(Func->getDecl()); 1116 const CXXMethodDecl *Overrider = S.getContext().getOverridingFunction( 1117 DynamicDecl, StaticDecl, InitialFunction); 1118 1119 if (Overrider != InitialFunction) { 1120 // DR1872: An instantiated virtual constexpr function can't be called in a 1121 // constant expression (prior to C++20). We can still constant-fold such a 1122 // call. 1123 if (!S.getLangOpts().CPlusPlus20 && Overrider->isVirtual()) { 1124 const Expr *E = S.Current->getExpr(OpPC); 1125 S.CCEDiag(E, diag::note_constexpr_virtual_call) << E->getSourceRange(); 1126 } 1127 1128 Func = S.getContext().getOrCreateFunction(Overrider); 1129 1130 const CXXRecordDecl *ThisFieldDecl = 1131 ThisPtr.getFieldDesc()->getType()->getAsCXXRecordDecl(); 1132 if (Func->getParentDecl()->isDerivedFrom(ThisFieldDecl)) { 1133 // If the function we call is further DOWN the hierarchy than the 1134 // FieldDesc of our pointer, just go up the hierarchy of this field 1135 // the furthest we can go. 1136 while (ThisPtr.isBaseClass()) 1137 ThisPtr = ThisPtr.getBase(); 1138 } 1139 } 1140 1141 if (!Call(S, OpPC, Func, VarArgSize)) 1142 return false; 1143 1144 // Covariant return types. The return type of Overrider is a pointer 1145 // or reference to a class type. 1146 if (Overrider != InitialFunction && 1147 Overrider->getReturnType()->isPointerOrReferenceType() && 1148 InitialFunction->getReturnType()->isPointerOrReferenceType()) { 1149 QualType OverriderPointeeType = 1150 Overrider->getReturnType()->getPointeeType(); 1151 QualType InitialPointeeType = 1152 InitialFunction->getReturnType()->getPointeeType(); 1153 // We've called Overrider above, but calling code expects us to return what 1154 // InitialFunction returned. According to the rules for covariant return 1155 // types, what InitialFunction returns needs to be a base class of what 1156 // Overrider returns. So, we need to do an upcast here. 1157 unsigned Offset = S.getContext().collectBaseOffset( 1158 InitialPointeeType->getAsRecordDecl(), 1159 OverriderPointeeType->getAsRecordDecl()); 1160 return GetPtrBasePop(S, OpPC, Offset); 1161 } 1162 1163 return true; 1164 } 1165 1166 bool CallBI(InterpState &S, CodePtr &PC, const Function *Func, 1167 const CallExpr *CE) { 1168 auto NewFrame = std::make_unique<InterpFrame>(S, Func, PC); 1169 1170 InterpFrame *FrameBefore = S.Current; 1171 S.Current = NewFrame.get(); 1172 1173 if (InterpretBuiltin(S, PC, Func, CE)) { 1174 NewFrame.release(); 1175 return true; 1176 } 1177 S.Current = FrameBefore; 1178 return false; 1179 } 1180 1181 bool CallPtr(InterpState &S, CodePtr OpPC, uint32_t ArgSize, 1182 const CallExpr *CE) { 1183 const FunctionPointer &FuncPtr = S.Stk.pop<FunctionPointer>(); 1184 1185 const Function *F = FuncPtr.getFunction(); 1186 if (!F) { 1187 const auto *E = cast<CallExpr>(S.Current->getExpr(OpPC)); 1188 S.FFDiag(E, diag::note_constexpr_null_callee) 1189 << const_cast<Expr *>(E->getCallee()) << E->getSourceRange(); 1190 return false; 1191 } 1192 1193 if (!FuncPtr.isValid() || !F->getDecl()) 1194 return Invalid(S, OpPC); 1195 1196 assert(F); 1197 1198 // This happens when the call expression has been cast to 1199 // something else, but we don't support that. 1200 if (S.Ctx.classify(F->getDecl()->getReturnType()) != 1201 S.Ctx.classify(CE->getType())) 1202 return false; 1203 1204 // Check argument nullability state. 1205 if (F->hasNonNullAttr()) { 1206 if (!CheckNonNullArgs(S, OpPC, F, CE, ArgSize)) 1207 return false; 1208 } 1209 1210 assert(ArgSize >= F->getWrittenArgSize()); 1211 uint32_t VarArgSize = ArgSize - F->getWrittenArgSize(); 1212 1213 // We need to do this explicitly here since we don't have the necessary 1214 // information to do it automatically. 1215 if (F->isThisPointerExplicit()) 1216 VarArgSize -= align(primSize(PT_Ptr)); 1217 1218 if (F->isVirtual()) 1219 return CallVirt(S, OpPC, F, VarArgSize); 1220 1221 return Call(S, OpPC, F, VarArgSize); 1222 } 1223 1224 bool Interpret(InterpState &S, APValue &Result) { 1225 // The current stack frame when we started Interpret(). 1226 // This is being used by the ops to determine wheter 1227 // to return from this function and thus terminate 1228 // interpretation. 1229 const InterpFrame *StartFrame = S.Current; 1230 assert(!S.Current->isRoot()); 1231 CodePtr PC = S.Current->getPC(); 1232 1233 // Empty program. 1234 if (!PC) 1235 return true; 1236 1237 for (;;) { 1238 auto Op = PC.read<Opcode>(); 1239 CodePtr OpPC = PC; 1240 1241 switch (Op) { 1242 #define GET_INTERP 1243 #include "Opcodes.inc" 1244 #undef GET_INTERP 1245 } 1246 } 1247 } 1248 1249 } // namespace interp 1250 } // namespace clang 1251