1 //===------- Interp.cpp - Interpreter for the constexpr VM ------*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 9 #include "Interp.h" 10 #include "Function.h" 11 #include "InterpFrame.h" 12 #include "InterpShared.h" 13 #include "InterpStack.h" 14 #include "Opcode.h" 15 #include "PrimType.h" 16 #include "Program.h" 17 #include "State.h" 18 #include "clang/AST/ASTContext.h" 19 #include "clang/AST/ASTDiagnostic.h" 20 #include "clang/AST/CXXInheritance.h" 21 #include "clang/AST/DeclObjC.h" 22 #include "clang/AST/Expr.h" 23 #include "clang/AST/ExprCXX.h" 24 #include "llvm/ADT/APSInt.h" 25 #include "llvm/ADT/StringExtras.h" 26 #include <limits> 27 #include <vector> 28 29 using namespace clang; 30 using namespace clang::interp; 31 32 static bool RetValue(InterpState &S, CodePtr &Pt, APValue &Result) { 33 llvm::report_fatal_error("Interpreter cannot return values"); 34 } 35 36 //===----------------------------------------------------------------------===// 37 // Jmp, Jt, Jf 38 //===----------------------------------------------------------------------===// 39 40 static bool Jmp(InterpState &S, CodePtr &PC, int32_t Offset) { 41 PC += Offset; 42 return true; 43 } 44 45 static bool Jt(InterpState &S, CodePtr &PC, int32_t Offset) { 46 if (S.Stk.pop<bool>()) { 47 PC += Offset; 48 } 49 return true; 50 } 51 52 static bool Jf(InterpState &S, CodePtr &PC, int32_t Offset) { 53 if (!S.Stk.pop<bool>()) { 54 PC += Offset; 55 } 56 return true; 57 } 58 59 static void diagnoseMissingInitializer(InterpState &S, CodePtr OpPC, 60 const ValueDecl *VD) { 61 const SourceInfo &E = S.Current->getSource(OpPC); 62 S.FFDiag(E, diag::note_constexpr_var_init_unknown, 1) << VD; 63 S.Note(VD->getLocation(), diag::note_declared_at) << VD->getSourceRange(); 64 } 65 66 static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC, 67 const ValueDecl *VD); 68 static bool diagnoseUnknownDecl(InterpState &S, CodePtr OpPC, 69 const ValueDecl *D) { 70 const SourceInfo &E = S.Current->getSource(OpPC); 71 72 if (isa<ParmVarDecl>(D)) { 73 if (S.getLangOpts().CPlusPlus11) { 74 S.FFDiag(E, diag::note_constexpr_function_param_value_unknown) << D; 75 S.Note(D->getLocation(), diag::note_declared_at) << D->getSourceRange(); 76 } else { 77 S.FFDiag(E); 78 } 79 return false; 80 } 81 82 if (!D->getType().isConstQualified()) 83 diagnoseNonConstVariable(S, OpPC, D); 84 else if (const auto *VD = dyn_cast<VarDecl>(D); 85 VD && !VD->getAnyInitializer()) 86 diagnoseMissingInitializer(S, OpPC, VD); 87 88 return false; 89 } 90 91 static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC, 92 const ValueDecl *VD) { 93 const SourceInfo &Loc = S.Current->getSource(OpPC); 94 if (!S.getLangOpts().CPlusPlus) { 95 S.FFDiag(Loc); 96 return; 97 } 98 99 if (const auto *VarD = dyn_cast<VarDecl>(VD); 100 VarD && VarD->getType().isConstQualified() && 101 !VarD->getAnyInitializer()) { 102 diagnoseMissingInitializer(S, OpPC, VD); 103 return; 104 } 105 106 // Rather random, but this is to match the diagnostic output of the current 107 // interpreter. 108 if (isa<ObjCIvarDecl>(VD)) 109 return; 110 111 if (VD->getType()->isIntegralOrEnumerationType()) { 112 S.FFDiag(Loc, diag::note_constexpr_ltor_non_const_int, 1) << VD; 113 S.Note(VD->getLocation(), diag::note_declared_at); 114 return; 115 } 116 117 S.FFDiag(Loc, 118 S.getLangOpts().CPlusPlus11 ? diag::note_constexpr_ltor_non_constexpr 119 : diag::note_constexpr_ltor_non_integral, 120 1) 121 << VD << VD->getType(); 122 S.Note(VD->getLocation(), diag::note_declared_at); 123 } 124 125 static bool CheckActive(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 126 AccessKinds AK) { 127 if (Ptr.isActive()) 128 return true; 129 130 assert(Ptr.inUnion()); 131 assert(Ptr.isField() && Ptr.getField()); 132 133 Pointer U = Ptr.getBase(); 134 Pointer C = Ptr; 135 while (!U.isRoot() && U.inUnion() && !U.isActive()) { 136 if (U.getField()) 137 C = U; 138 U = U.getBase(); 139 } 140 assert(C.isField()); 141 142 // Get the inactive field descriptor. 143 const FieldDecl *InactiveField = C.getField(); 144 assert(InactiveField); 145 146 // Consider: 147 // union U { 148 // struct { 149 // int x; 150 // int y; 151 // } a; 152 // } 153 // 154 // When activating x, we will also activate a. If we now try to read 155 // from y, we will get to CheckActive, because y is not active. In that 156 // case, our U will be a (not a union). We return here and let later code 157 // handle this. 158 if (!U.getFieldDesc()->isUnion()) 159 return true; 160 161 // Find the active field of the union. 162 const Record *R = U.getRecord(); 163 assert(R && R->isUnion() && "Not a union"); 164 165 const FieldDecl *ActiveField = nullptr; 166 for (const Record::Field &F : R->fields()) { 167 const Pointer &Field = U.atField(F.Offset); 168 if (Field.isActive()) { 169 ActiveField = Field.getField(); 170 break; 171 } 172 } 173 174 const SourceInfo &Loc = S.Current->getSource(OpPC); 175 S.FFDiag(Loc, diag::note_constexpr_access_inactive_union_member) 176 << AK << InactiveField << !ActiveField << ActiveField; 177 return false; 178 } 179 180 static bool CheckTemporary(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 181 AccessKinds AK) { 182 if (auto ID = Ptr.getDeclID()) { 183 if (!Ptr.isStaticTemporary()) 184 return true; 185 186 const auto *MTE = dyn_cast_if_present<MaterializeTemporaryExpr>( 187 Ptr.getDeclDesc()->asExpr()); 188 if (!MTE) 189 return true; 190 191 // FIXME(perf): Since we do this check on every Load from a static 192 // temporary, it might make sense to cache the value of the 193 // isUsableInConstantExpressions call. 194 if (!MTE->isUsableInConstantExpressions(S.getASTContext()) && 195 Ptr.block()->getEvalID() != S.Ctx.getEvalID()) { 196 const SourceInfo &E = S.Current->getSource(OpPC); 197 S.FFDiag(E, diag::note_constexpr_access_static_temporary, 1) << AK; 198 S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here); 199 return false; 200 } 201 } 202 return true; 203 } 204 205 static bool CheckGlobal(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 206 if (auto ID = Ptr.getDeclID()) { 207 if (!Ptr.isStatic()) 208 return true; 209 210 if (S.P.getCurrentDecl() == ID) 211 return true; 212 213 S.FFDiag(S.Current->getLocation(OpPC), diag::note_constexpr_modify_global); 214 return false; 215 } 216 return true; 217 } 218 219 namespace clang { 220 namespace interp { 221 static void popArg(InterpState &S, const Expr *Arg) { 222 PrimType Ty = S.getContext().classify(Arg).value_or(PT_Ptr); 223 TYPE_SWITCH(Ty, S.Stk.discard<T>()); 224 } 225 226 void cleanupAfterFunctionCall(InterpState &S, CodePtr OpPC, 227 const Function *Func) { 228 assert(S.Current); 229 assert(Func); 230 231 if (Func->isUnevaluatedBuiltin()) 232 return; 233 234 // Some builtin functions require us to only look at the call site, since 235 // the classified parameter types do not match. 236 if (unsigned BID = Func->getBuiltinID(); 237 BID && S.getASTContext().BuiltinInfo.hasCustomTypechecking(BID)) { 238 const auto *CE = 239 cast<CallExpr>(S.Current->Caller->getExpr(S.Current->getRetPC())); 240 for (int32_t I = CE->getNumArgs() - 1; I >= 0; --I) { 241 const Expr *A = CE->getArg(I); 242 popArg(S, A); 243 } 244 return; 245 } 246 247 if (S.Current->Caller && Func->isVariadic()) { 248 // CallExpr we're look for is at the return PC of the current function, i.e. 249 // in the caller. 250 // This code path should be executed very rarely. 251 unsigned NumVarArgs; 252 const Expr *const *Args = nullptr; 253 unsigned NumArgs = 0; 254 const Expr *CallSite = S.Current->Caller->getExpr(S.Current->getRetPC()); 255 if (const auto *CE = dyn_cast<CallExpr>(CallSite)) { 256 Args = CE->getArgs(); 257 NumArgs = CE->getNumArgs(); 258 } else if (const auto *CE = dyn_cast<CXXConstructExpr>(CallSite)) { 259 Args = CE->getArgs(); 260 NumArgs = CE->getNumArgs(); 261 } else 262 assert(false && "Can't get arguments from that expression type"); 263 264 assert(NumArgs >= Func->getNumWrittenParams()); 265 NumVarArgs = NumArgs - (Func->getNumWrittenParams() + 266 isa<CXXOperatorCallExpr>(CallSite)); 267 for (unsigned I = 0; I != NumVarArgs; ++I) { 268 const Expr *A = Args[NumArgs - 1 - I]; 269 popArg(S, A); 270 } 271 } 272 273 // And in any case, remove the fixed parameters (the non-variadic ones) 274 // at the end. 275 for (PrimType Ty : Func->args_reverse()) 276 TYPE_SWITCH(Ty, S.Stk.discard<T>()); 277 } 278 279 bool CheckExtern(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 280 if (!Ptr.isExtern()) 281 return true; 282 283 if (Ptr.isInitialized() || 284 (Ptr.getDeclDesc()->asVarDecl() == S.EvaluatingDecl)) 285 return true; 286 287 if (!S.checkingPotentialConstantExpression() && S.getLangOpts().CPlusPlus) { 288 const auto *VD = Ptr.getDeclDesc()->asValueDecl(); 289 diagnoseNonConstVariable(S, OpPC, VD); 290 } 291 return false; 292 } 293 294 bool CheckArray(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 295 if (!Ptr.isUnknownSizeArray()) 296 return true; 297 const SourceInfo &E = S.Current->getSource(OpPC); 298 S.FFDiag(E, diag::note_constexpr_unsized_array_indexed); 299 return false; 300 } 301 302 bool CheckLive(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 303 AccessKinds AK) { 304 if (Ptr.isZero()) { 305 const auto &Src = S.Current->getSource(OpPC); 306 307 if (Ptr.isField()) 308 S.FFDiag(Src, diag::note_constexpr_null_subobject) << CSK_Field; 309 else 310 S.FFDiag(Src, diag::note_constexpr_access_null) << AK; 311 312 return false; 313 } 314 315 if (!Ptr.isLive()) { 316 const auto &Src = S.Current->getSource(OpPC); 317 318 if (Ptr.isDynamic()) { 319 S.FFDiag(Src, diag::note_constexpr_access_deleted_object) << AK; 320 } else { 321 bool IsTemp = Ptr.isTemporary(); 322 S.FFDiag(Src, diag::note_constexpr_lifetime_ended, 1) << AK << !IsTemp; 323 324 if (IsTemp) 325 S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here); 326 else 327 S.Note(Ptr.getDeclLoc(), diag::note_declared_at); 328 } 329 330 return false; 331 } 332 333 return true; 334 } 335 336 bool CheckConstant(InterpState &S, CodePtr OpPC, const Descriptor *Desc) { 337 assert(Desc); 338 339 const auto *D = Desc->asVarDecl(); 340 if (!D || !D->hasGlobalStorage()) 341 return true; 342 343 if (D == S.EvaluatingDecl) 344 return true; 345 346 if (D->isConstexpr()) 347 return true; 348 349 QualType T = D->getType(); 350 bool IsConstant = T.isConstant(S.getASTContext()); 351 if (T->isIntegralOrEnumerationType()) { 352 if (!IsConstant) { 353 diagnoseNonConstVariable(S, OpPC, D); 354 return false; 355 } 356 return true; 357 } 358 359 if (IsConstant) { 360 if (S.getLangOpts().CPlusPlus) { 361 S.CCEDiag(S.Current->getLocation(OpPC), 362 S.getLangOpts().CPlusPlus11 363 ? diag::note_constexpr_ltor_non_constexpr 364 : diag::note_constexpr_ltor_non_integral, 365 1) 366 << D << T; 367 S.Note(D->getLocation(), diag::note_declared_at); 368 } else { 369 S.CCEDiag(S.Current->getLocation(OpPC)); 370 } 371 return true; 372 } 373 374 if (T->isPointerOrReferenceType()) { 375 if (!T->getPointeeType().isConstant(S.getASTContext()) || 376 !S.getLangOpts().CPlusPlus11) { 377 diagnoseNonConstVariable(S, OpPC, D); 378 return false; 379 } 380 return true; 381 } 382 383 diagnoseNonConstVariable(S, OpPC, D); 384 return false; 385 } 386 387 static bool CheckConstant(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 388 if (!Ptr.isBlockPointer()) 389 return true; 390 return CheckConstant(S, OpPC, Ptr.getDeclDesc()); 391 } 392 393 bool CheckNull(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 394 CheckSubobjectKind CSK) { 395 if (!Ptr.isZero()) 396 return true; 397 const SourceInfo &Loc = S.Current->getSource(OpPC); 398 S.FFDiag(Loc, diag::note_constexpr_null_subobject) 399 << CSK << S.Current->getRange(OpPC); 400 401 return false; 402 } 403 404 bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 405 AccessKinds AK) { 406 if (!Ptr.isOnePastEnd()) 407 return true; 408 const SourceInfo &Loc = S.Current->getSource(OpPC); 409 S.FFDiag(Loc, diag::note_constexpr_access_past_end) 410 << AK << S.Current->getRange(OpPC); 411 return false; 412 } 413 414 bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 415 CheckSubobjectKind CSK) { 416 if (!Ptr.isElementPastEnd()) 417 return true; 418 const SourceInfo &Loc = S.Current->getSource(OpPC); 419 S.FFDiag(Loc, diag::note_constexpr_past_end_subobject) 420 << CSK << S.Current->getRange(OpPC); 421 return false; 422 } 423 424 bool CheckSubobject(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 425 CheckSubobjectKind CSK) { 426 if (!Ptr.isOnePastEnd()) 427 return true; 428 429 const SourceInfo &Loc = S.Current->getSource(OpPC); 430 S.FFDiag(Loc, diag::note_constexpr_past_end_subobject) 431 << CSK << S.Current->getRange(OpPC); 432 return false; 433 } 434 435 bool CheckDowncast(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 436 uint32_t Offset) { 437 uint32_t MinOffset = Ptr.getDeclDesc()->getMetadataSize(); 438 uint32_t PtrOffset = Ptr.getByteOffset(); 439 440 // We subtract Offset from PtrOffset. The result must be at least 441 // MinOffset. 442 if (Offset < PtrOffset && (PtrOffset - Offset) >= MinOffset) 443 return true; 444 445 const auto *E = cast<CastExpr>(S.Current->getExpr(OpPC)); 446 QualType TargetQT = E->getType()->getPointeeType(); 447 QualType MostDerivedQT = Ptr.getDeclPtr().getType(); 448 449 S.CCEDiag(E, diag::note_constexpr_invalid_downcast) 450 << MostDerivedQT << TargetQT; 451 452 return false; 453 } 454 455 bool CheckConst(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 456 assert(Ptr.isLive() && "Pointer is not live"); 457 if (!Ptr.isConst() || Ptr.isMutable()) 458 return true; 459 460 // The This pointer is writable in constructors and destructors, 461 // even if isConst() returns true. 462 // TODO(perf): We could be hitting this code path quite a lot in complex 463 // constructors. Is there a better way to do this? 464 if (S.Current->getFunction()) { 465 for (const InterpFrame *Frame = S.Current; Frame; Frame = Frame->Caller) { 466 if (const Function *Func = Frame->getFunction(); 467 Func && (Func->isConstructor() || Func->isDestructor()) && 468 Ptr.block() == Frame->getThis().block()) { 469 return true; 470 } 471 } 472 } 473 474 if (!Ptr.isBlockPointer()) 475 return false; 476 477 const QualType Ty = Ptr.getType(); 478 const SourceInfo &Loc = S.Current->getSource(OpPC); 479 S.FFDiag(Loc, diag::note_constexpr_modify_const_type) << Ty; 480 return false; 481 } 482 483 bool CheckMutable(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 484 assert(Ptr.isLive() && "Pointer is not live"); 485 if (!Ptr.isMutable()) 486 return true; 487 488 // In C++14 onwards, it is permitted to read a mutable member whose 489 // lifetime began within the evaluation. 490 if (S.getLangOpts().CPlusPlus14 && 491 Ptr.block()->getEvalID() == S.Ctx.getEvalID()) 492 return true; 493 494 const SourceInfo &Loc = S.Current->getSource(OpPC); 495 const FieldDecl *Field = Ptr.getField(); 496 S.FFDiag(Loc, diag::note_constexpr_access_mutable, 1) << AK_Read << Field; 497 S.Note(Field->getLocation(), diag::note_declared_at); 498 return false; 499 } 500 501 bool CheckVolatile(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 502 AccessKinds AK) { 503 assert(Ptr.isLive()); 504 505 // FIXME: This check here might be kinda expensive. Maybe it would be better 506 // to have another field in InlineDescriptor for this? 507 if (!Ptr.isBlockPointer()) 508 return true; 509 510 QualType PtrType = Ptr.getType(); 511 if (!PtrType.isVolatileQualified()) 512 return true; 513 514 const SourceInfo &Loc = S.Current->getSource(OpPC); 515 if (S.getLangOpts().CPlusPlus) 516 S.FFDiag(Loc, diag::note_constexpr_access_volatile_type) << AK << PtrType; 517 else 518 S.FFDiag(Loc); 519 return false; 520 } 521 522 bool CheckInitialized(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 523 AccessKinds AK) { 524 assert(Ptr.isLive()); 525 526 if (Ptr.isInitialized()) 527 return true; 528 529 if (const auto *VD = Ptr.getDeclDesc()->asVarDecl(); 530 VD && VD->hasGlobalStorage()) { 531 const SourceInfo &Loc = S.Current->getSource(OpPC); 532 if (VD->getAnyInitializer()) { 533 S.FFDiag(Loc, diag::note_constexpr_var_init_non_constant, 1) << VD; 534 S.Note(VD->getLocation(), diag::note_declared_at); 535 } else { 536 diagnoseMissingInitializer(S, OpPC, VD); 537 } 538 return false; 539 } 540 541 if (!S.checkingPotentialConstantExpression()) { 542 S.FFDiag(S.Current->getSource(OpPC), diag::note_constexpr_access_uninit) 543 << AK << /*uninitialized=*/true << S.Current->getRange(OpPC); 544 } 545 return false; 546 } 547 548 bool CheckGlobalInitialized(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 549 if (Ptr.isInitialized()) 550 return true; 551 552 assert(S.getLangOpts().CPlusPlus); 553 const auto *VD = cast<VarDecl>(Ptr.getDeclDesc()->asValueDecl()); 554 if ((!VD->hasConstantInitialization() && 555 VD->mightBeUsableInConstantExpressions(S.getASTContext())) || 556 (S.getLangOpts().OpenCL && !S.getLangOpts().CPlusPlus11 && 557 !VD->hasICEInitializer(S.getASTContext()))) { 558 const SourceInfo &Loc = S.Current->getSource(OpPC); 559 S.FFDiag(Loc, diag::note_constexpr_var_init_non_constant, 1) << VD; 560 S.Note(VD->getLocation(), diag::note_declared_at); 561 } 562 return false; 563 } 564 565 static bool CheckWeak(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 566 if (!Ptr.isWeak()) 567 return true; 568 569 const auto *VD = Ptr.getDeclDesc()->asVarDecl(); 570 assert(VD); 571 S.FFDiag(S.Current->getLocation(OpPC), diag::note_constexpr_var_init_weak) 572 << VD; 573 S.Note(VD->getLocation(), diag::note_declared_at); 574 575 return false; 576 } 577 578 bool CheckLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 579 AccessKinds AK) { 580 if (!CheckLive(S, OpPC, Ptr, AK)) 581 return false; 582 if (!CheckConstant(S, OpPC, Ptr)) 583 return false; 584 if (!CheckDummy(S, OpPC, Ptr, AK)) 585 return false; 586 if (!CheckExtern(S, OpPC, Ptr)) 587 return false; 588 if (!CheckRange(S, OpPC, Ptr, AK)) 589 return false; 590 if (!CheckActive(S, OpPC, Ptr, AK)) 591 return false; 592 if (!CheckInitialized(S, OpPC, Ptr, AK)) 593 return false; 594 if (!CheckTemporary(S, OpPC, Ptr, AK)) 595 return false; 596 if (!CheckWeak(S, OpPC, Ptr)) 597 return false; 598 if (!CheckMutable(S, OpPC, Ptr)) 599 return false; 600 if (!CheckVolatile(S, OpPC, Ptr, AK)) 601 return false; 602 return true; 603 } 604 605 /// This is not used by any of the opcodes directly. It's used by 606 /// EvalEmitter to do the final lvalue-to-rvalue conversion. 607 bool CheckFinalLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 608 if (!CheckLive(S, OpPC, Ptr, AK_Read)) 609 return false; 610 if (!CheckConstant(S, OpPC, Ptr)) 611 return false; 612 613 if (!CheckDummy(S, OpPC, Ptr, AK_Read)) 614 return false; 615 if (!CheckExtern(S, OpPC, Ptr)) 616 return false; 617 if (!CheckRange(S, OpPC, Ptr, AK_Read)) 618 return false; 619 if (!CheckActive(S, OpPC, Ptr, AK_Read)) 620 return false; 621 if (!CheckInitialized(S, OpPC, Ptr, AK_Read)) 622 return false; 623 if (!CheckTemporary(S, OpPC, Ptr, AK_Read)) 624 return false; 625 if (!CheckWeak(S, OpPC, Ptr)) 626 return false; 627 if (!CheckMutable(S, OpPC, Ptr)) 628 return false; 629 return true; 630 } 631 632 bool CheckStore(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 633 if (!CheckLive(S, OpPC, Ptr, AK_Assign)) 634 return false; 635 if (!CheckDummy(S, OpPC, Ptr, AK_Assign)) 636 return false; 637 if (!CheckExtern(S, OpPC, Ptr)) 638 return false; 639 if (!CheckRange(S, OpPC, Ptr, AK_Assign)) 640 return false; 641 if (!CheckGlobal(S, OpPC, Ptr)) 642 return false; 643 if (!CheckConst(S, OpPC, Ptr)) 644 return false; 645 return true; 646 } 647 648 bool CheckInvoke(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 649 if (!CheckLive(S, OpPC, Ptr, AK_MemberCall)) 650 return false; 651 if (!Ptr.isDummy()) { 652 if (!CheckExtern(S, OpPC, Ptr)) 653 return false; 654 if (!CheckRange(S, OpPC, Ptr, AK_MemberCall)) 655 return false; 656 } 657 return true; 658 } 659 660 bool CheckInit(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 661 if (!CheckLive(S, OpPC, Ptr, AK_Assign)) 662 return false; 663 if (!CheckRange(S, OpPC, Ptr, AK_Assign)) 664 return false; 665 return true; 666 } 667 668 bool CheckCallable(InterpState &S, CodePtr OpPC, const Function *F) { 669 670 if (F->isVirtual() && !S.getLangOpts().CPlusPlus20) { 671 const SourceLocation &Loc = S.Current->getLocation(OpPC); 672 S.CCEDiag(Loc, diag::note_constexpr_virtual_call); 673 return false; 674 } 675 676 if (F->isConstexpr() && F->hasBody() && 677 (F->getDecl()->isConstexpr() || F->getDecl()->hasAttr<MSConstexprAttr>())) 678 return true; 679 680 // Implicitly constexpr. 681 if (F->isLambdaStaticInvoker()) 682 return true; 683 684 const SourceLocation &Loc = S.Current->getLocation(OpPC); 685 if (S.getLangOpts().CPlusPlus11) { 686 const FunctionDecl *DiagDecl = F->getDecl(); 687 688 // Invalid decls have been diagnosed before. 689 if (DiagDecl->isInvalidDecl()) 690 return false; 691 692 // If this function is not constexpr because it is an inherited 693 // non-constexpr constructor, diagnose that directly. 694 const auto *CD = dyn_cast<CXXConstructorDecl>(DiagDecl); 695 if (CD && CD->isInheritingConstructor()) { 696 const auto *Inherited = CD->getInheritedConstructor().getConstructor(); 697 if (!Inherited->isConstexpr()) 698 DiagDecl = CD = Inherited; 699 } 700 701 // FIXME: If DiagDecl is an implicitly-declared special member function 702 // or an inheriting constructor, we should be much more explicit about why 703 // it's not constexpr. 704 if (CD && CD->isInheritingConstructor()) { 705 S.FFDiag(Loc, diag::note_constexpr_invalid_inhctor, 1) 706 << CD->getInheritedConstructor().getConstructor()->getParent(); 707 S.Note(DiagDecl->getLocation(), diag::note_declared_at); 708 } else { 709 // Don't emit anything if the function isn't defined and we're checking 710 // for a constant expression. It might be defined at the point we're 711 // actually calling it. 712 bool IsExtern = DiagDecl->getStorageClass() == SC_Extern; 713 if (!DiagDecl->isDefined() && !IsExtern && DiagDecl->isConstexpr() && 714 S.checkingPotentialConstantExpression()) 715 return false; 716 717 // If the declaration is defined, declared 'constexpr' _and_ has a body, 718 // the below diagnostic doesn't add anything useful. 719 if (DiagDecl->isDefined() && DiagDecl->isConstexpr() && 720 DiagDecl->hasBody()) 721 return false; 722 723 S.FFDiag(Loc, diag::note_constexpr_invalid_function, 1) 724 << DiagDecl->isConstexpr() << (bool)CD << DiagDecl; 725 726 if (DiagDecl->getDefinition()) 727 S.Note(DiagDecl->getDefinition()->getLocation(), 728 diag::note_declared_at); 729 else 730 S.Note(DiagDecl->getLocation(), diag::note_declared_at); 731 } 732 } else { 733 S.FFDiag(Loc, diag::note_invalid_subexpr_in_const_expr); 734 } 735 736 return false; 737 } 738 739 bool CheckCallDepth(InterpState &S, CodePtr OpPC) { 740 if ((S.Current->getDepth() + 1) > S.getLangOpts().ConstexprCallDepth) { 741 S.FFDiag(S.Current->getSource(OpPC), 742 diag::note_constexpr_depth_limit_exceeded) 743 << S.getLangOpts().ConstexprCallDepth; 744 return false; 745 } 746 747 return true; 748 } 749 750 bool CheckThis(InterpState &S, CodePtr OpPC, const Pointer &This) { 751 if (!This.isZero()) 752 return true; 753 754 const SourceInfo &Loc = S.Current->getSource(OpPC); 755 756 bool IsImplicit = false; 757 if (const auto *E = dyn_cast_if_present<CXXThisExpr>(Loc.asExpr())) 758 IsImplicit = E->isImplicit(); 759 760 if (S.getLangOpts().CPlusPlus11) 761 S.FFDiag(Loc, diag::note_constexpr_this) << IsImplicit; 762 else 763 S.FFDiag(Loc); 764 765 return false; 766 } 767 768 bool CheckPure(InterpState &S, CodePtr OpPC, const CXXMethodDecl *MD) { 769 if (!MD->isPureVirtual()) 770 return true; 771 const SourceInfo &E = S.Current->getSource(OpPC); 772 S.FFDiag(E, diag::note_constexpr_pure_virtual_call, 1) << MD; 773 S.Note(MD->getLocation(), diag::note_declared_at); 774 return false; 775 } 776 777 bool CheckFloatResult(InterpState &S, CodePtr OpPC, const Floating &Result, 778 APFloat::opStatus Status, FPOptions FPO) { 779 // [expr.pre]p4: 780 // If during the evaluation of an expression, the result is not 781 // mathematically defined [...], the behavior is undefined. 782 // FIXME: C++ rules require us to not conform to IEEE 754 here. 783 if (Result.isNan()) { 784 const SourceInfo &E = S.Current->getSource(OpPC); 785 S.CCEDiag(E, diag::note_constexpr_float_arithmetic) 786 << /*NaN=*/true << S.Current->getRange(OpPC); 787 return S.noteUndefinedBehavior(); 788 } 789 790 // In a constant context, assume that any dynamic rounding mode or FP 791 // exception state matches the default floating-point environment. 792 if (S.inConstantContext()) 793 return true; 794 795 if ((Status & APFloat::opInexact) && 796 FPO.getRoundingMode() == llvm::RoundingMode::Dynamic) { 797 // Inexact result means that it depends on rounding mode. If the requested 798 // mode is dynamic, the evaluation cannot be made in compile time. 799 const SourceInfo &E = S.Current->getSource(OpPC); 800 S.FFDiag(E, diag::note_constexpr_dynamic_rounding); 801 return false; 802 } 803 804 if ((Status != APFloat::opOK) && 805 (FPO.getRoundingMode() == llvm::RoundingMode::Dynamic || 806 FPO.getExceptionMode() != LangOptions::FPE_Ignore || 807 FPO.getAllowFEnvAccess())) { 808 const SourceInfo &E = S.Current->getSource(OpPC); 809 S.FFDiag(E, diag::note_constexpr_float_arithmetic_strict); 810 return false; 811 } 812 813 if ((Status & APFloat::opStatus::opInvalidOp) && 814 FPO.getExceptionMode() != LangOptions::FPE_Ignore) { 815 const SourceInfo &E = S.Current->getSource(OpPC); 816 // There is no usefully definable result. 817 S.FFDiag(E); 818 return false; 819 } 820 821 return true; 822 } 823 824 bool CheckDynamicMemoryAllocation(InterpState &S, CodePtr OpPC) { 825 if (S.getLangOpts().CPlusPlus20) 826 return true; 827 828 const SourceInfo &E = S.Current->getSource(OpPC); 829 S.CCEDiag(E, diag::note_constexpr_new); 830 return true; 831 } 832 833 bool CheckNewDeleteForms(InterpState &S, CodePtr OpPC, 834 DynamicAllocator::Form AllocForm, 835 DynamicAllocator::Form DeleteForm, const Descriptor *D, 836 const Expr *NewExpr) { 837 if (AllocForm == DeleteForm) 838 return true; 839 840 QualType TypeToDiagnose; 841 // We need to shuffle things around a bit here to get a better diagnostic, 842 // because the expression we allocated the block for was of type int*, 843 // but we want to get the array size right. 844 if (D->isArray()) { 845 QualType ElemQT = D->getType()->getPointeeType(); 846 TypeToDiagnose = S.getASTContext().getConstantArrayType( 847 ElemQT, APInt(64, static_cast<uint64_t>(D->getNumElems()), false), 848 nullptr, ArraySizeModifier::Normal, 0); 849 } else 850 TypeToDiagnose = D->getType()->getPointeeType(); 851 852 const SourceInfo &E = S.Current->getSource(OpPC); 853 S.FFDiag(E, diag::note_constexpr_new_delete_mismatch) 854 << static_cast<int>(DeleteForm) << static_cast<int>(AllocForm) 855 << TypeToDiagnose; 856 S.Note(NewExpr->getExprLoc(), diag::note_constexpr_dynamic_alloc_here) 857 << NewExpr->getSourceRange(); 858 return false; 859 } 860 861 bool CheckDeleteSource(InterpState &S, CodePtr OpPC, const Expr *Source, 862 const Pointer &Ptr) { 863 // The two sources we currently allow are new expressions and 864 // __builtin_operator_new calls. 865 if (isa_and_nonnull<CXXNewExpr>(Source)) 866 return true; 867 if (const CallExpr *CE = dyn_cast_if_present<CallExpr>(Source); 868 CE && CE->getBuiltinCallee() == Builtin::BI__builtin_operator_new) 869 return true; 870 871 // Whatever this is, we didn't heap allocate it. 872 const SourceInfo &Loc = S.Current->getSource(OpPC); 873 S.FFDiag(Loc, diag::note_constexpr_delete_not_heap_alloc) 874 << Ptr.toDiagnosticString(S.getASTContext()); 875 876 if (Ptr.isTemporary()) 877 S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here); 878 else 879 S.Note(Ptr.getDeclLoc(), diag::note_declared_at); 880 return false; 881 } 882 883 /// We aleady know the given DeclRefExpr is invalid for some reason, 884 /// now figure out why and print appropriate diagnostics. 885 bool CheckDeclRef(InterpState &S, CodePtr OpPC, const DeclRefExpr *DR) { 886 const ValueDecl *D = DR->getDecl(); 887 return diagnoseUnknownDecl(S, OpPC, D); 888 } 889 890 bool CheckDummy(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 891 AccessKinds AK) { 892 if (!Ptr.isDummy()) 893 return true; 894 895 const Descriptor *Desc = Ptr.getDeclDesc(); 896 const ValueDecl *D = Desc->asValueDecl(); 897 if (!D) 898 return false; 899 900 if (AK == AK_Read || AK == AK_Increment || AK == AK_Decrement) 901 return diagnoseUnknownDecl(S, OpPC, D); 902 903 assert(AK == AK_Assign); 904 if (S.getLangOpts().CPlusPlus14) { 905 const SourceInfo &E = S.Current->getSource(OpPC); 906 S.FFDiag(E, diag::note_constexpr_modify_global); 907 } 908 return false; 909 } 910 911 bool CheckNonNullArgs(InterpState &S, CodePtr OpPC, const Function *F, 912 const CallExpr *CE, unsigned ArgSize) { 913 auto Args = llvm::ArrayRef(CE->getArgs(), CE->getNumArgs()); 914 auto NonNullArgs = collectNonNullArgs(F->getDecl(), Args); 915 unsigned Offset = 0; 916 unsigned Index = 0; 917 for (const Expr *Arg : Args) { 918 if (NonNullArgs[Index] && Arg->getType()->isPointerType()) { 919 const Pointer &ArgPtr = S.Stk.peek<Pointer>(ArgSize - Offset); 920 if (ArgPtr.isZero()) { 921 const SourceLocation &Loc = S.Current->getLocation(OpPC); 922 S.CCEDiag(Loc, diag::note_non_null_attribute_failed); 923 return false; 924 } 925 } 926 927 Offset += align(primSize(S.Ctx.classify(Arg).value_or(PT_Ptr))); 928 ++Index; 929 } 930 return true; 931 } 932 933 // FIXME: This is similar to code we already have in Compiler.cpp. 934 // I think it makes sense to instead add the field and base destruction stuff 935 // to the destructor Function itself. Then destroying a record would really 936 // _just_ be calling its destructor. That would also help with the diagnostic 937 // difference when the destructor or a field/base fails. 938 static bool runRecordDestructor(InterpState &S, CodePtr OpPC, 939 const Pointer &BasePtr, 940 const Descriptor *Desc) { 941 assert(Desc->isRecord()); 942 const Record *R = Desc->ElemRecord; 943 assert(R); 944 945 if (Pointer::pointToSameBlock(BasePtr, S.Current->getThis())) { 946 const SourceInfo &Loc = S.Current->getSource(OpPC); 947 S.FFDiag(Loc, diag::note_constexpr_double_destroy); 948 return false; 949 } 950 951 // Destructor of this record. 952 if (const CXXDestructorDecl *Dtor = R->getDestructor(); 953 Dtor && !Dtor->isTrivial()) { 954 const Function *DtorFunc = S.getContext().getOrCreateFunction(Dtor); 955 if (!DtorFunc) 956 return false; 957 958 S.Stk.push<Pointer>(BasePtr); 959 if (!Call(S, OpPC, DtorFunc, 0)) 960 return false; 961 } 962 return true; 963 } 964 965 bool RunDestructors(InterpState &S, CodePtr OpPC, const Block *B) { 966 assert(B); 967 const Descriptor *Desc = B->getDescriptor(); 968 969 if (Desc->isPrimitive() || Desc->isPrimitiveArray()) 970 return true; 971 972 assert(Desc->isRecord() || Desc->isCompositeArray()); 973 974 if (Desc->isCompositeArray()) { 975 const Descriptor *ElemDesc = Desc->ElemDesc; 976 assert(ElemDesc->isRecord()); 977 978 Pointer RP(const_cast<Block *>(B)); 979 for (unsigned I = 0; I != Desc->getNumElems(); ++I) { 980 if (!runRecordDestructor(S, OpPC, RP.atIndex(I).narrow(), ElemDesc)) 981 return false; 982 } 983 return true; 984 } 985 986 assert(Desc->isRecord()); 987 return runRecordDestructor(S, OpPC, Pointer(const_cast<Block *>(B)), Desc); 988 } 989 990 void diagnoseEnumValue(InterpState &S, CodePtr OpPC, const EnumDecl *ED, 991 const APSInt &Value) { 992 llvm::APInt Min; 993 llvm::APInt Max; 994 995 if (S.EvaluatingDecl && !S.EvaluatingDecl->isConstexpr()) 996 return; 997 998 ED->getValueRange(Max, Min); 999 --Max; 1000 1001 if (ED->getNumNegativeBits() && 1002 (Max.slt(Value.getSExtValue()) || Min.sgt(Value.getSExtValue()))) { 1003 const SourceLocation &Loc = S.Current->getLocation(OpPC); 1004 S.CCEDiag(Loc, diag::note_constexpr_unscoped_enum_out_of_range) 1005 << llvm::toString(Value, 10) << Min.getSExtValue() << Max.getSExtValue() 1006 << ED; 1007 } else if (!ED->getNumNegativeBits() && Max.ult(Value.getZExtValue())) { 1008 const SourceLocation &Loc = S.Current->getLocation(OpPC); 1009 S.CCEDiag(Loc, diag::note_constexpr_unscoped_enum_out_of_range) 1010 << llvm::toString(Value, 10) << Min.getZExtValue() << Max.getZExtValue() 1011 << ED; 1012 } 1013 } 1014 1015 bool CheckLiteralType(InterpState &S, CodePtr OpPC, const Type *T) { 1016 assert(T); 1017 assert(!S.getLangOpts().CPlusPlus23); 1018 1019 // C++1y: A constant initializer for an object o [...] may also invoke 1020 // constexpr constructors for o and its subobjects even if those objects 1021 // are of non-literal class types. 1022 // 1023 // C++11 missed this detail for aggregates, so classes like this: 1024 // struct foo_t { union { int i; volatile int j; } u; }; 1025 // are not (obviously) initializable like so: 1026 // __attribute__((__require_constant_initialization__)) 1027 // static const foo_t x = {{0}}; 1028 // because "i" is a subobject with non-literal initialization (due to the 1029 // volatile member of the union). See: 1030 // http://www.open-std.org/jtc1/sc22/wg21/docs/cwg_active.html#1677 1031 // Therefore, we use the C++1y behavior. 1032 1033 if (S.Current->getFunction() && S.Current->getFunction()->isConstructor() && 1034 S.Current->getThis().getDeclDesc()->asDecl() == S.EvaluatingDecl) { 1035 return true; 1036 } 1037 1038 const Expr *E = S.Current->getExpr(OpPC); 1039 if (S.getLangOpts().CPlusPlus11) 1040 S.FFDiag(E, diag::note_constexpr_nonliteral) << E->getType(); 1041 else 1042 S.FFDiag(E, diag::note_invalid_subexpr_in_const_expr); 1043 return false; 1044 } 1045 1046 bool CallVar(InterpState &S, CodePtr OpPC, const Function *Func, 1047 uint32_t VarArgSize) { 1048 if (Func->hasThisPointer()) { 1049 size_t ArgSize = Func->getArgSize() + VarArgSize; 1050 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0); 1051 const Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset); 1052 1053 // If the current function is a lambda static invoker and 1054 // the function we're about to call is a lambda call operator, 1055 // skip the CheckInvoke, since the ThisPtr is a null pointer 1056 // anyway. 1057 if (!(S.Current->getFunction() && 1058 S.Current->getFunction()->isLambdaStaticInvoker() && 1059 Func->isLambdaCallOperator())) { 1060 if (!CheckInvoke(S, OpPC, ThisPtr)) 1061 return false; 1062 } 1063 1064 if (S.checkingPotentialConstantExpression()) 1065 return false; 1066 } 1067 1068 if (!CheckCallable(S, OpPC, Func)) 1069 return false; 1070 1071 if (!CheckCallDepth(S, OpPC)) 1072 return false; 1073 1074 auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC, VarArgSize); 1075 InterpFrame *FrameBefore = S.Current; 1076 S.Current = NewFrame.get(); 1077 1078 APValue CallResult; 1079 // Note that we cannot assert(CallResult.hasValue()) here since 1080 // Ret() above only sets the APValue if the curent frame doesn't 1081 // have a caller set. 1082 if (Interpret(S, CallResult)) { 1083 NewFrame.release(); // Frame was delete'd already. 1084 assert(S.Current == FrameBefore); 1085 return true; 1086 } 1087 1088 // Interpreting the function failed somehow. Reset to 1089 // previous state. 1090 S.Current = FrameBefore; 1091 return false; 1092 } 1093 1094 bool Call(InterpState &S, CodePtr OpPC, const Function *Func, 1095 uint32_t VarArgSize) { 1096 assert(Func); 1097 auto cleanup = [&]() -> bool { 1098 cleanupAfterFunctionCall(S, OpPC, Func); 1099 return false; 1100 }; 1101 1102 if (Func->hasThisPointer()) { 1103 size_t ArgSize = Func->getArgSize() + VarArgSize; 1104 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0); 1105 1106 const Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset); 1107 1108 // If the current function is a lambda static invoker and 1109 // the function we're about to call is a lambda call operator, 1110 // skip the CheckInvoke, since the ThisPtr is a null pointer 1111 // anyway. 1112 if (S.Current->getFunction() && 1113 S.Current->getFunction()->isLambdaStaticInvoker() && 1114 Func->isLambdaCallOperator()) { 1115 assert(ThisPtr.isZero()); 1116 } else { 1117 if (!CheckInvoke(S, OpPC, ThisPtr)) 1118 return cleanup(); 1119 } 1120 } 1121 1122 if (!CheckCallable(S, OpPC, Func)) 1123 return cleanup(); 1124 1125 // FIXME: The isConstructor() check here is not always right. The current 1126 // constant evaluator is somewhat inconsistent in when it allows a function 1127 // call when checking for a constant expression. 1128 if (Func->hasThisPointer() && S.checkingPotentialConstantExpression() && 1129 !Func->isConstructor()) 1130 return cleanup(); 1131 1132 if (!CheckCallDepth(S, OpPC)) 1133 return cleanup(); 1134 1135 auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC, VarArgSize); 1136 InterpFrame *FrameBefore = S.Current; 1137 S.Current = NewFrame.get(); 1138 1139 APValue CallResult; 1140 // Note that we cannot assert(CallResult.hasValue()) here since 1141 // Ret() above only sets the APValue if the curent frame doesn't 1142 // have a caller set. 1143 if (Interpret(S, CallResult)) { 1144 NewFrame.release(); // Frame was delete'd already. 1145 assert(S.Current == FrameBefore); 1146 return true; 1147 } 1148 1149 // Interpreting the function failed somehow. Reset to 1150 // previous state. 1151 S.Current = FrameBefore; 1152 return false; 1153 } 1154 1155 bool CallVirt(InterpState &S, CodePtr OpPC, const Function *Func, 1156 uint32_t VarArgSize) { 1157 assert(Func->hasThisPointer()); 1158 assert(Func->isVirtual()); 1159 size_t ArgSize = Func->getArgSize() + VarArgSize; 1160 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0); 1161 Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset); 1162 1163 const CXXRecordDecl *DynamicDecl = nullptr; 1164 { 1165 Pointer TypePtr = ThisPtr; 1166 while (TypePtr.isBaseClass()) 1167 TypePtr = TypePtr.getBase(); 1168 1169 QualType DynamicType = TypePtr.getType(); 1170 if (DynamicType->isPointerType() || DynamicType->isReferenceType()) 1171 DynamicDecl = DynamicType->getPointeeCXXRecordDecl(); 1172 else 1173 DynamicDecl = DynamicType->getAsCXXRecordDecl(); 1174 } 1175 assert(DynamicDecl); 1176 1177 const auto *StaticDecl = cast<CXXRecordDecl>(Func->getParentDecl()); 1178 const auto *InitialFunction = cast<CXXMethodDecl>(Func->getDecl()); 1179 const CXXMethodDecl *Overrider = S.getContext().getOverridingFunction( 1180 DynamicDecl, StaticDecl, InitialFunction); 1181 1182 if (Overrider != InitialFunction) { 1183 // DR1872: An instantiated virtual constexpr function can't be called in a 1184 // constant expression (prior to C++20). We can still constant-fold such a 1185 // call. 1186 if (!S.getLangOpts().CPlusPlus20 && Overrider->isVirtual()) { 1187 const Expr *E = S.Current->getExpr(OpPC); 1188 S.CCEDiag(E, diag::note_constexpr_virtual_call) << E->getSourceRange(); 1189 } 1190 1191 Func = S.getContext().getOrCreateFunction(Overrider); 1192 1193 const CXXRecordDecl *ThisFieldDecl = 1194 ThisPtr.getFieldDesc()->getType()->getAsCXXRecordDecl(); 1195 if (Func->getParentDecl()->isDerivedFrom(ThisFieldDecl)) { 1196 // If the function we call is further DOWN the hierarchy than the 1197 // FieldDesc of our pointer, just go up the hierarchy of this field 1198 // the furthest we can go. 1199 while (ThisPtr.isBaseClass()) 1200 ThisPtr = ThisPtr.getBase(); 1201 } 1202 } 1203 1204 if (!Call(S, OpPC, Func, VarArgSize)) 1205 return false; 1206 1207 // Covariant return types. The return type of Overrider is a pointer 1208 // or reference to a class type. 1209 if (Overrider != InitialFunction && 1210 Overrider->getReturnType()->isPointerOrReferenceType() && 1211 InitialFunction->getReturnType()->isPointerOrReferenceType()) { 1212 QualType OverriderPointeeType = 1213 Overrider->getReturnType()->getPointeeType(); 1214 QualType InitialPointeeType = 1215 InitialFunction->getReturnType()->getPointeeType(); 1216 // We've called Overrider above, but calling code expects us to return what 1217 // InitialFunction returned. According to the rules for covariant return 1218 // types, what InitialFunction returns needs to be a base class of what 1219 // Overrider returns. So, we need to do an upcast here. 1220 unsigned Offset = S.getContext().collectBaseOffset( 1221 InitialPointeeType->getAsRecordDecl(), 1222 OverriderPointeeType->getAsRecordDecl()); 1223 return GetPtrBasePop(S, OpPC, Offset); 1224 } 1225 1226 return true; 1227 } 1228 1229 bool CallBI(InterpState &S, CodePtr OpPC, const Function *Func, 1230 const CallExpr *CE, uint32_t BuiltinID) { 1231 if (S.checkingPotentialConstantExpression()) 1232 return false; 1233 auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC); 1234 1235 InterpFrame *FrameBefore = S.Current; 1236 S.Current = NewFrame.get(); 1237 1238 if (InterpretBuiltin(S, OpPC, Func, CE, BuiltinID)) { 1239 NewFrame.release(); 1240 return true; 1241 } 1242 S.Current = FrameBefore; 1243 return false; 1244 } 1245 1246 bool CallPtr(InterpState &S, CodePtr OpPC, uint32_t ArgSize, 1247 const CallExpr *CE) { 1248 const FunctionPointer &FuncPtr = S.Stk.pop<FunctionPointer>(); 1249 1250 const Function *F = FuncPtr.getFunction(); 1251 if (!F) { 1252 const auto *E = cast<CallExpr>(S.Current->getExpr(OpPC)); 1253 S.FFDiag(E, diag::note_constexpr_null_callee) 1254 << const_cast<Expr *>(E->getCallee()) << E->getSourceRange(); 1255 return false; 1256 } 1257 1258 if (!FuncPtr.isValid() || !F->getDecl()) 1259 return Invalid(S, OpPC); 1260 1261 assert(F); 1262 1263 // This happens when the call expression has been cast to 1264 // something else, but we don't support that. 1265 if (S.Ctx.classify(F->getDecl()->getReturnType()) != 1266 S.Ctx.classify(CE->getType())) 1267 return false; 1268 1269 // Check argument nullability state. 1270 if (F->hasNonNullAttr()) { 1271 if (!CheckNonNullArgs(S, OpPC, F, CE, ArgSize)) 1272 return false; 1273 } 1274 1275 assert(ArgSize >= F->getWrittenArgSize()); 1276 uint32_t VarArgSize = ArgSize - F->getWrittenArgSize(); 1277 1278 // We need to do this explicitly here since we don't have the necessary 1279 // information to do it automatically. 1280 if (F->isThisPointerExplicit()) 1281 VarArgSize -= align(primSize(PT_Ptr)); 1282 1283 if (F->isVirtual()) 1284 return CallVirt(S, OpPC, F, VarArgSize); 1285 1286 return Call(S, OpPC, F, VarArgSize); 1287 } 1288 1289 bool CheckNewTypeMismatch(InterpState &S, CodePtr OpPC, const Expr *E, 1290 std::optional<uint64_t> ArraySize) { 1291 const Pointer &Ptr = S.Stk.peek<Pointer>(); 1292 1293 if (!CheckStore(S, OpPC, Ptr)) 1294 return false; 1295 1296 const auto *NewExpr = cast<CXXNewExpr>(E); 1297 QualType StorageType = Ptr.getType(); 1298 1299 if (isa_and_nonnull<CXXNewExpr>(Ptr.getFieldDesc()->asExpr())) { 1300 // FIXME: Are there other cases where this is a problem? 1301 StorageType = StorageType->getPointeeType(); 1302 } 1303 1304 const ASTContext &ASTCtx = S.getASTContext(); 1305 QualType AllocType; 1306 if (ArraySize) { 1307 AllocType = ASTCtx.getConstantArrayType( 1308 NewExpr->getAllocatedType(), 1309 APInt(64, static_cast<uint64_t>(*ArraySize), false), nullptr, 1310 ArraySizeModifier::Normal, 0); 1311 } else { 1312 AllocType = NewExpr->getAllocatedType(); 1313 } 1314 1315 unsigned StorageSize = 1; 1316 unsigned AllocSize = 1; 1317 if (const auto *CAT = dyn_cast<ConstantArrayType>(AllocType)) 1318 AllocSize = CAT->getZExtSize(); 1319 if (const auto *CAT = dyn_cast<ConstantArrayType>(StorageType)) 1320 StorageSize = CAT->getZExtSize(); 1321 1322 if (AllocSize > StorageSize || 1323 !ASTCtx.hasSimilarType(ASTCtx.getBaseElementType(AllocType), 1324 ASTCtx.getBaseElementType(StorageType))) { 1325 S.FFDiag(S.Current->getLocation(OpPC), 1326 diag::note_constexpr_placement_new_wrong_type) 1327 << StorageType << AllocType; 1328 return false; 1329 } 1330 return true; 1331 } 1332 1333 bool InvalidNewDeleteExpr(InterpState &S, CodePtr OpPC, const Expr *E) { 1334 assert(E); 1335 const auto &Loc = S.Current->getSource(OpPC); 1336 1337 if (const auto *NewExpr = dyn_cast<CXXNewExpr>(E)) { 1338 const FunctionDecl *OperatorNew = NewExpr->getOperatorNew(); 1339 1340 if (!S.getLangOpts().CPlusPlus26 && NewExpr->getNumPlacementArgs() > 0) { 1341 S.FFDiag(Loc, diag::note_constexpr_new_placement) 1342 << /*C++26 feature*/ 1 << E->getSourceRange(); 1343 } else if (NewExpr->getNumPlacementArgs() == 1 && 1344 !OperatorNew->isReservedGlobalPlacementOperator()) { 1345 S.FFDiag(Loc, diag::note_constexpr_new_placement) 1346 << /*Unsupported*/ 0 << E->getSourceRange(); 1347 } else if (!OperatorNew->isReplaceableGlobalAllocationFunction()) { 1348 S.FFDiag(Loc, diag::note_constexpr_new_non_replaceable) 1349 << isa<CXXMethodDecl>(OperatorNew) << OperatorNew; 1350 } 1351 } else { 1352 const auto *DeleteExpr = cast<CXXDeleteExpr>(E); 1353 const FunctionDecl *OperatorDelete = DeleteExpr->getOperatorDelete(); 1354 if (!OperatorDelete->isReplaceableGlobalAllocationFunction()) { 1355 S.FFDiag(Loc, diag::note_constexpr_new_non_replaceable) 1356 << isa<CXXMethodDecl>(OperatorDelete) << OperatorDelete; 1357 } 1358 } 1359 1360 return false; 1361 } 1362 1363 bool Interpret(InterpState &S, APValue &Result) { 1364 // The current stack frame when we started Interpret(). 1365 // This is being used by the ops to determine wheter 1366 // to return from this function and thus terminate 1367 // interpretation. 1368 const InterpFrame *StartFrame = S.Current; 1369 assert(!S.Current->isRoot()); 1370 CodePtr PC = S.Current->getPC(); 1371 1372 // Empty program. 1373 if (!PC) 1374 return true; 1375 1376 for (;;) { 1377 auto Op = PC.read<Opcode>(); 1378 CodePtr OpPC = PC; 1379 1380 switch (Op) { 1381 #define GET_INTERP 1382 #include "Opcodes.inc" 1383 #undef GET_INTERP 1384 } 1385 } 1386 } 1387 1388 } // namespace interp 1389 } // namespace clang 1390