1 //===------- Interp.cpp - Interpreter for the constexpr VM ------*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 9 #include "Interp.h" 10 #include "Function.h" 11 #include "InterpFrame.h" 12 #include "InterpShared.h" 13 #include "InterpStack.h" 14 #include "Opcode.h" 15 #include "PrimType.h" 16 #include "Program.h" 17 #include "State.h" 18 #include "clang/AST/ASTContext.h" 19 #include "clang/AST/ASTDiagnostic.h" 20 #include "clang/AST/CXXInheritance.h" 21 #include "clang/AST/DeclObjC.h" 22 #include "clang/AST/Expr.h" 23 #include "clang/AST/ExprCXX.h" 24 #include "clang/Basic/DiagnosticSema.h" 25 #include "llvm/ADT/APSInt.h" 26 #include "llvm/ADT/StringExtras.h" 27 #include <limits> 28 #include <vector> 29 30 using namespace clang; 31 using namespace clang::interp; 32 33 static bool RetValue(InterpState &S, CodePtr &Pt, APValue &Result) { 34 llvm::report_fatal_error("Interpreter cannot return values"); 35 } 36 37 //===----------------------------------------------------------------------===// 38 // Jmp, Jt, Jf 39 //===----------------------------------------------------------------------===// 40 41 static bool Jmp(InterpState &S, CodePtr &PC, int32_t Offset) { 42 PC += Offset; 43 return true; 44 } 45 46 static bool Jt(InterpState &S, CodePtr &PC, int32_t Offset) { 47 if (S.Stk.pop<bool>()) { 48 PC += Offset; 49 } 50 return true; 51 } 52 53 static bool Jf(InterpState &S, CodePtr &PC, int32_t Offset) { 54 if (!S.Stk.pop<bool>()) { 55 PC += Offset; 56 } 57 return true; 58 } 59 60 static void diagnoseMissingInitializer(InterpState &S, CodePtr OpPC, 61 const ValueDecl *VD) { 62 const SourceInfo &E = S.Current->getSource(OpPC); 63 S.FFDiag(E, diag::note_constexpr_var_init_unknown, 1) << VD; 64 S.Note(VD->getLocation(), diag::note_declared_at) << VD->getSourceRange(); 65 } 66 67 static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC, 68 const ValueDecl *VD); 69 static bool diagnoseUnknownDecl(InterpState &S, CodePtr OpPC, 70 const ValueDecl *D) { 71 const SourceInfo &E = S.Current->getSource(OpPC); 72 73 if (isa<ParmVarDecl>(D)) { 74 if (S.getLangOpts().CPlusPlus11) { 75 S.FFDiag(E, diag::note_constexpr_function_param_value_unknown) << D; 76 S.Note(D->getLocation(), diag::note_declared_at) << D->getSourceRange(); 77 } else { 78 S.FFDiag(E); 79 } 80 return false; 81 } 82 83 if (!D->getType().isConstQualified()) 84 diagnoseNonConstVariable(S, OpPC, D); 85 else if (const auto *VD = dyn_cast<VarDecl>(D); 86 VD && !VD->getAnyInitializer()) 87 diagnoseMissingInitializer(S, OpPC, VD); 88 89 return false; 90 } 91 92 static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC, 93 const ValueDecl *VD) { 94 const SourceInfo &Loc = S.Current->getSource(OpPC); 95 if (!S.getLangOpts().CPlusPlus) { 96 S.FFDiag(Loc); 97 return; 98 } 99 100 if (const auto *VarD = dyn_cast<VarDecl>(VD); 101 VarD && VarD->getType().isConstQualified() && 102 !VarD->getAnyInitializer()) { 103 diagnoseMissingInitializer(S, OpPC, VD); 104 return; 105 } 106 107 // Rather random, but this is to match the diagnostic output of the current 108 // interpreter. 109 if (isa<ObjCIvarDecl>(VD)) 110 return; 111 112 if (VD->getType()->isIntegralOrEnumerationType()) { 113 S.FFDiag(Loc, diag::note_constexpr_ltor_non_const_int, 1) << VD; 114 S.Note(VD->getLocation(), diag::note_declared_at); 115 return; 116 } 117 118 S.FFDiag(Loc, 119 S.getLangOpts().CPlusPlus11 ? diag::note_constexpr_ltor_non_constexpr 120 : diag::note_constexpr_ltor_non_integral, 121 1) 122 << VD << VD->getType(); 123 S.Note(VD->getLocation(), diag::note_declared_at); 124 } 125 126 static bool CheckActive(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 127 AccessKinds AK) { 128 if (Ptr.isActive()) 129 return true; 130 131 assert(Ptr.inUnion()); 132 assert(Ptr.isField() && Ptr.getField()); 133 134 Pointer U = Ptr.getBase(); 135 Pointer C = Ptr; 136 while (!U.isRoot() && U.inUnion() && !U.isActive()) { 137 if (U.getField()) 138 C = U; 139 U = U.getBase(); 140 } 141 assert(C.isField()); 142 143 // Get the inactive field descriptor. 144 const FieldDecl *InactiveField = C.getField(); 145 assert(InactiveField); 146 147 // Consider: 148 // union U { 149 // struct { 150 // int x; 151 // int y; 152 // } a; 153 // } 154 // 155 // When activating x, we will also activate a. If we now try to read 156 // from y, we will get to CheckActive, because y is not active. In that 157 // case, our U will be a (not a union). We return here and let later code 158 // handle this. 159 if (!U.getFieldDesc()->isUnion()) 160 return true; 161 162 // Find the active field of the union. 163 const Record *R = U.getRecord(); 164 assert(R && R->isUnion() && "Not a union"); 165 166 const FieldDecl *ActiveField = nullptr; 167 for (const Record::Field &F : R->fields()) { 168 const Pointer &Field = U.atField(F.Offset); 169 if (Field.isActive()) { 170 ActiveField = Field.getField(); 171 break; 172 } 173 } 174 175 const SourceInfo &Loc = S.Current->getSource(OpPC); 176 S.FFDiag(Loc, diag::note_constexpr_access_inactive_union_member) 177 << AK << InactiveField << !ActiveField << ActiveField; 178 return false; 179 } 180 181 static bool CheckTemporary(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 182 AccessKinds AK) { 183 if (auto ID = Ptr.getDeclID()) { 184 if (!Ptr.isStaticTemporary()) 185 return true; 186 187 const auto *MTE = dyn_cast_if_present<MaterializeTemporaryExpr>( 188 Ptr.getDeclDesc()->asExpr()); 189 if (!MTE) 190 return true; 191 192 // FIXME(perf): Since we do this check on every Load from a static 193 // temporary, it might make sense to cache the value of the 194 // isUsableInConstantExpressions call. 195 if (!MTE->isUsableInConstantExpressions(S.getASTContext()) && 196 Ptr.block()->getEvalID() != S.Ctx.getEvalID()) { 197 const SourceInfo &E = S.Current->getSource(OpPC); 198 S.FFDiag(E, diag::note_constexpr_access_static_temporary, 1) << AK; 199 S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here); 200 return false; 201 } 202 } 203 return true; 204 } 205 206 static bool CheckGlobal(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 207 if (auto ID = Ptr.getDeclID()) { 208 if (!Ptr.isStatic()) 209 return true; 210 211 if (S.P.getCurrentDecl() == ID) 212 return true; 213 214 S.FFDiag(S.Current->getLocation(OpPC), diag::note_constexpr_modify_global); 215 return false; 216 } 217 return true; 218 } 219 220 namespace clang { 221 namespace interp { 222 static void popArg(InterpState &S, const Expr *Arg) { 223 PrimType Ty = S.getContext().classify(Arg).value_or(PT_Ptr); 224 TYPE_SWITCH(Ty, S.Stk.discard<T>()); 225 } 226 227 void cleanupAfterFunctionCall(InterpState &S, CodePtr OpPC, 228 const Function *Func) { 229 assert(S.Current); 230 assert(Func); 231 232 if (Func->isUnevaluatedBuiltin()) 233 return; 234 235 // Some builtin functions require us to only look at the call site, since 236 // the classified parameter types do not match. 237 if (unsigned BID = Func->getBuiltinID(); 238 BID && S.getASTContext().BuiltinInfo.hasCustomTypechecking(BID)) { 239 const auto *CE = 240 cast<CallExpr>(S.Current->Caller->getExpr(S.Current->getRetPC())); 241 for (int32_t I = CE->getNumArgs() - 1; I >= 0; --I) { 242 const Expr *A = CE->getArg(I); 243 popArg(S, A); 244 } 245 return; 246 } 247 248 if (S.Current->Caller && Func->isVariadic()) { 249 // CallExpr we're look for is at the return PC of the current function, i.e. 250 // in the caller. 251 // This code path should be executed very rarely. 252 unsigned NumVarArgs; 253 const Expr *const *Args = nullptr; 254 unsigned NumArgs = 0; 255 const Expr *CallSite = S.Current->Caller->getExpr(S.Current->getRetPC()); 256 if (const auto *CE = dyn_cast<CallExpr>(CallSite)) { 257 Args = CE->getArgs(); 258 NumArgs = CE->getNumArgs(); 259 } else if (const auto *CE = dyn_cast<CXXConstructExpr>(CallSite)) { 260 Args = CE->getArgs(); 261 NumArgs = CE->getNumArgs(); 262 } else 263 assert(false && "Can't get arguments from that expression type"); 264 265 assert(NumArgs >= Func->getNumWrittenParams()); 266 NumVarArgs = NumArgs - (Func->getNumWrittenParams() + 267 isa<CXXOperatorCallExpr>(CallSite)); 268 for (unsigned I = 0; I != NumVarArgs; ++I) { 269 const Expr *A = Args[NumArgs - 1 - I]; 270 popArg(S, A); 271 } 272 } 273 274 // And in any case, remove the fixed parameters (the non-variadic ones) 275 // at the end. 276 for (PrimType Ty : Func->args_reverse()) 277 TYPE_SWITCH(Ty, S.Stk.discard<T>()); 278 } 279 280 bool CheckExtern(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 281 if (!Ptr.isExtern()) 282 return true; 283 284 if (Ptr.isInitialized() || 285 (Ptr.getDeclDesc()->asVarDecl() == S.EvaluatingDecl)) 286 return true; 287 288 if (!S.checkingPotentialConstantExpression() && S.getLangOpts().CPlusPlus) { 289 const auto *VD = Ptr.getDeclDesc()->asValueDecl(); 290 diagnoseNonConstVariable(S, OpPC, VD); 291 } 292 return false; 293 } 294 295 bool CheckArray(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 296 if (!Ptr.isUnknownSizeArray()) 297 return true; 298 const SourceInfo &E = S.Current->getSource(OpPC); 299 S.FFDiag(E, diag::note_constexpr_unsized_array_indexed); 300 return false; 301 } 302 303 bool CheckLive(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 304 AccessKinds AK) { 305 if (Ptr.isZero()) { 306 const auto &Src = S.Current->getSource(OpPC); 307 308 if (Ptr.isField()) 309 S.FFDiag(Src, diag::note_constexpr_null_subobject) << CSK_Field; 310 else 311 S.FFDiag(Src, diag::note_constexpr_access_null) << AK; 312 313 return false; 314 } 315 316 if (!Ptr.isLive()) { 317 const auto &Src = S.Current->getSource(OpPC); 318 319 if (Ptr.isDynamic()) { 320 S.FFDiag(Src, diag::note_constexpr_access_deleted_object) << AK; 321 } else { 322 bool IsTemp = Ptr.isTemporary(); 323 S.FFDiag(Src, diag::note_constexpr_lifetime_ended, 1) << AK << !IsTemp; 324 325 if (IsTemp) 326 S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here); 327 else 328 S.Note(Ptr.getDeclLoc(), diag::note_declared_at); 329 } 330 331 return false; 332 } 333 334 return true; 335 } 336 337 bool CheckConstant(InterpState &S, CodePtr OpPC, const Descriptor *Desc) { 338 assert(Desc); 339 340 const auto *D = Desc->asVarDecl(); 341 if (!D || !D->hasGlobalStorage()) 342 return true; 343 344 if (D == S.EvaluatingDecl) 345 return true; 346 347 if (D->isConstexpr()) 348 return true; 349 350 QualType T = D->getType(); 351 bool IsConstant = T.isConstant(S.getASTContext()); 352 if (T->isIntegralOrEnumerationType()) { 353 if (!IsConstant) { 354 diagnoseNonConstVariable(S, OpPC, D); 355 return false; 356 } 357 return true; 358 } 359 360 if (IsConstant) { 361 if (S.getLangOpts().CPlusPlus) { 362 S.CCEDiag(S.Current->getLocation(OpPC), 363 S.getLangOpts().CPlusPlus11 364 ? diag::note_constexpr_ltor_non_constexpr 365 : diag::note_constexpr_ltor_non_integral, 366 1) 367 << D << T; 368 S.Note(D->getLocation(), diag::note_declared_at); 369 } else { 370 S.CCEDiag(S.Current->getLocation(OpPC)); 371 } 372 return true; 373 } 374 375 if (T->isPointerOrReferenceType()) { 376 if (!T->getPointeeType().isConstant(S.getASTContext()) || 377 !S.getLangOpts().CPlusPlus11) { 378 diagnoseNonConstVariable(S, OpPC, D); 379 return false; 380 } 381 return true; 382 } 383 384 diagnoseNonConstVariable(S, OpPC, D); 385 return false; 386 } 387 388 static bool CheckConstant(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 389 if (!Ptr.isBlockPointer()) 390 return true; 391 return CheckConstant(S, OpPC, Ptr.getDeclDesc()); 392 } 393 394 bool CheckNull(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 395 CheckSubobjectKind CSK) { 396 if (!Ptr.isZero()) 397 return true; 398 const SourceInfo &Loc = S.Current->getSource(OpPC); 399 S.FFDiag(Loc, diag::note_constexpr_null_subobject) 400 << CSK << S.Current->getRange(OpPC); 401 402 return false; 403 } 404 405 bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 406 AccessKinds AK) { 407 if (!Ptr.isOnePastEnd()) 408 return true; 409 const SourceInfo &Loc = S.Current->getSource(OpPC); 410 S.FFDiag(Loc, diag::note_constexpr_access_past_end) 411 << AK << S.Current->getRange(OpPC); 412 return false; 413 } 414 415 bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 416 CheckSubobjectKind CSK) { 417 if (!Ptr.isElementPastEnd()) 418 return true; 419 const SourceInfo &Loc = S.Current->getSource(OpPC); 420 S.FFDiag(Loc, diag::note_constexpr_past_end_subobject) 421 << CSK << S.Current->getRange(OpPC); 422 return false; 423 } 424 425 bool CheckSubobject(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 426 CheckSubobjectKind CSK) { 427 if (!Ptr.isOnePastEnd()) 428 return true; 429 430 const SourceInfo &Loc = S.Current->getSource(OpPC); 431 S.FFDiag(Loc, diag::note_constexpr_past_end_subobject) 432 << CSK << S.Current->getRange(OpPC); 433 return false; 434 } 435 436 bool CheckDowncast(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 437 uint32_t Offset) { 438 uint32_t MinOffset = Ptr.getDeclDesc()->getMetadataSize(); 439 uint32_t PtrOffset = Ptr.getByteOffset(); 440 441 // We subtract Offset from PtrOffset. The result must be at least 442 // MinOffset. 443 if (Offset < PtrOffset && (PtrOffset - Offset) >= MinOffset) 444 return true; 445 446 const auto *E = cast<CastExpr>(S.Current->getExpr(OpPC)); 447 QualType TargetQT = E->getType()->getPointeeType(); 448 QualType MostDerivedQT = Ptr.getDeclPtr().getType(); 449 450 S.CCEDiag(E, diag::note_constexpr_invalid_downcast) 451 << MostDerivedQT << TargetQT; 452 453 return false; 454 } 455 456 bool CheckConst(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 457 assert(Ptr.isLive() && "Pointer is not live"); 458 if (!Ptr.isConst() || Ptr.isMutable()) 459 return true; 460 461 // The This pointer is writable in constructors and destructors, 462 // even if isConst() returns true. 463 // TODO(perf): We could be hitting this code path quite a lot in complex 464 // constructors. Is there a better way to do this? 465 if (S.Current->getFunction()) { 466 for (const InterpFrame *Frame = S.Current; Frame; Frame = Frame->Caller) { 467 if (const Function *Func = Frame->getFunction(); 468 Func && (Func->isConstructor() || Func->isDestructor()) && 469 Ptr.block() == Frame->getThis().block()) { 470 return true; 471 } 472 } 473 } 474 475 if (!Ptr.isBlockPointer()) 476 return false; 477 478 const QualType Ty = Ptr.getType(); 479 const SourceInfo &Loc = S.Current->getSource(OpPC); 480 S.FFDiag(Loc, diag::note_constexpr_modify_const_type) << Ty; 481 return false; 482 } 483 484 bool CheckMutable(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 485 assert(Ptr.isLive() && "Pointer is not live"); 486 if (!Ptr.isMutable()) 487 return true; 488 489 // In C++14 onwards, it is permitted to read a mutable member whose 490 // lifetime began within the evaluation. 491 if (S.getLangOpts().CPlusPlus14 && 492 Ptr.block()->getEvalID() == S.Ctx.getEvalID()) 493 return true; 494 495 const SourceInfo &Loc = S.Current->getSource(OpPC); 496 const FieldDecl *Field = Ptr.getField(); 497 S.FFDiag(Loc, diag::note_constexpr_access_mutable, 1) << AK_Read << Field; 498 S.Note(Field->getLocation(), diag::note_declared_at); 499 return false; 500 } 501 502 bool CheckVolatile(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 503 AccessKinds AK) { 504 assert(Ptr.isLive()); 505 506 // FIXME: This check here might be kinda expensive. Maybe it would be better 507 // to have another field in InlineDescriptor for this? 508 if (!Ptr.isBlockPointer()) 509 return true; 510 511 QualType PtrType = Ptr.getType(); 512 if (!PtrType.isVolatileQualified()) 513 return true; 514 515 const SourceInfo &Loc = S.Current->getSource(OpPC); 516 if (S.getLangOpts().CPlusPlus) 517 S.FFDiag(Loc, diag::note_constexpr_access_volatile_type) << AK << PtrType; 518 else 519 S.FFDiag(Loc); 520 return false; 521 } 522 523 bool CheckInitialized(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 524 AccessKinds AK) { 525 assert(Ptr.isLive()); 526 527 if (Ptr.isInitialized()) 528 return true; 529 530 if (const auto *VD = Ptr.getDeclDesc()->asVarDecl(); 531 VD && VD->hasGlobalStorage()) { 532 const SourceInfo &Loc = S.Current->getSource(OpPC); 533 if (VD->getAnyInitializer()) { 534 S.FFDiag(Loc, diag::note_constexpr_var_init_non_constant, 1) << VD; 535 S.Note(VD->getLocation(), diag::note_declared_at); 536 } else { 537 diagnoseMissingInitializer(S, OpPC, VD); 538 } 539 return false; 540 } 541 542 if (!S.checkingPotentialConstantExpression()) { 543 S.FFDiag(S.Current->getSource(OpPC), diag::note_constexpr_access_uninit) 544 << AK << /*uninitialized=*/true << S.Current->getRange(OpPC); 545 } 546 return false; 547 } 548 549 bool CheckGlobalInitialized(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 550 if (Ptr.isInitialized()) 551 return true; 552 553 assert(S.getLangOpts().CPlusPlus); 554 const auto *VD = cast<VarDecl>(Ptr.getDeclDesc()->asValueDecl()); 555 if ((!VD->hasConstantInitialization() && 556 VD->mightBeUsableInConstantExpressions(S.getASTContext())) || 557 (S.getLangOpts().OpenCL && !S.getLangOpts().CPlusPlus11 && 558 !VD->hasICEInitializer(S.getASTContext()))) { 559 const SourceInfo &Loc = S.Current->getSource(OpPC); 560 S.FFDiag(Loc, diag::note_constexpr_var_init_non_constant, 1) << VD; 561 S.Note(VD->getLocation(), diag::note_declared_at); 562 } 563 return false; 564 } 565 566 static bool CheckWeak(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 567 if (!Ptr.isWeak()) 568 return true; 569 570 const auto *VD = Ptr.getDeclDesc()->asVarDecl(); 571 assert(VD); 572 S.FFDiag(S.Current->getLocation(OpPC), diag::note_constexpr_var_init_weak) 573 << VD; 574 S.Note(VD->getLocation(), diag::note_declared_at); 575 576 return false; 577 } 578 579 bool CheckLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 580 AccessKinds AK) { 581 if (!CheckLive(S, OpPC, Ptr, AK)) 582 return false; 583 if (!CheckConstant(S, OpPC, Ptr)) 584 return false; 585 if (!CheckDummy(S, OpPC, Ptr, AK)) 586 return false; 587 if (!CheckExtern(S, OpPC, Ptr)) 588 return false; 589 if (!CheckRange(S, OpPC, Ptr, AK)) 590 return false; 591 if (!CheckActive(S, OpPC, Ptr, AK)) 592 return false; 593 if (!CheckInitialized(S, OpPC, Ptr, AK)) 594 return false; 595 if (!CheckTemporary(S, OpPC, Ptr, AK)) 596 return false; 597 if (!CheckWeak(S, OpPC, Ptr)) 598 return false; 599 if (!CheckMutable(S, OpPC, Ptr)) 600 return false; 601 if (!CheckVolatile(S, OpPC, Ptr, AK)) 602 return false; 603 return true; 604 } 605 606 /// This is not used by any of the opcodes directly. It's used by 607 /// EvalEmitter to do the final lvalue-to-rvalue conversion. 608 bool CheckFinalLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 609 if (!CheckLive(S, OpPC, Ptr, AK_Read)) 610 return false; 611 if (!CheckConstant(S, OpPC, Ptr)) 612 return false; 613 614 if (!CheckDummy(S, OpPC, Ptr, AK_Read)) 615 return false; 616 if (!CheckExtern(S, OpPC, Ptr)) 617 return false; 618 if (!CheckRange(S, OpPC, Ptr, AK_Read)) 619 return false; 620 if (!CheckActive(S, OpPC, Ptr, AK_Read)) 621 return false; 622 if (!CheckInitialized(S, OpPC, Ptr, AK_Read)) 623 return false; 624 if (!CheckTemporary(S, OpPC, Ptr, AK_Read)) 625 return false; 626 if (!CheckWeak(S, OpPC, Ptr)) 627 return false; 628 if (!CheckMutable(S, OpPC, Ptr)) 629 return false; 630 return true; 631 } 632 633 bool CheckStore(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 634 if (!CheckLive(S, OpPC, Ptr, AK_Assign)) 635 return false; 636 if (!CheckDummy(S, OpPC, Ptr, AK_Assign)) 637 return false; 638 if (!CheckExtern(S, OpPC, Ptr)) 639 return false; 640 if (!CheckRange(S, OpPC, Ptr, AK_Assign)) 641 return false; 642 if (!CheckGlobal(S, OpPC, Ptr)) 643 return false; 644 if (!CheckConst(S, OpPC, Ptr)) 645 return false; 646 return true; 647 } 648 649 bool CheckInvoke(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 650 if (!CheckLive(S, OpPC, Ptr, AK_MemberCall)) 651 return false; 652 if (!Ptr.isDummy()) { 653 if (!CheckExtern(S, OpPC, Ptr)) 654 return false; 655 if (!CheckRange(S, OpPC, Ptr, AK_MemberCall)) 656 return false; 657 } 658 return true; 659 } 660 661 bool CheckInit(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 662 if (!CheckLive(S, OpPC, Ptr, AK_Assign)) 663 return false; 664 if (!CheckRange(S, OpPC, Ptr, AK_Assign)) 665 return false; 666 return true; 667 } 668 669 bool CheckCallable(InterpState &S, CodePtr OpPC, const Function *F) { 670 671 if (F->isVirtual() && !S.getLangOpts().CPlusPlus20) { 672 const SourceLocation &Loc = S.Current->getLocation(OpPC); 673 S.CCEDiag(Loc, diag::note_constexpr_virtual_call); 674 return false; 675 } 676 677 if (F->isConstexpr() && F->hasBody() && 678 (F->getDecl()->isConstexpr() || F->getDecl()->hasAttr<MSConstexprAttr>())) 679 return true; 680 681 // Implicitly constexpr. 682 if (F->isLambdaStaticInvoker()) 683 return true; 684 685 const SourceLocation &Loc = S.Current->getLocation(OpPC); 686 if (S.getLangOpts().CPlusPlus11) { 687 const FunctionDecl *DiagDecl = F->getDecl(); 688 689 // Invalid decls have been diagnosed before. 690 if (DiagDecl->isInvalidDecl()) 691 return false; 692 693 // If this function is not constexpr because it is an inherited 694 // non-constexpr constructor, diagnose that directly. 695 const auto *CD = dyn_cast<CXXConstructorDecl>(DiagDecl); 696 if (CD && CD->isInheritingConstructor()) { 697 const auto *Inherited = CD->getInheritedConstructor().getConstructor(); 698 if (!Inherited->isConstexpr()) 699 DiagDecl = CD = Inherited; 700 } 701 702 // FIXME: If DiagDecl is an implicitly-declared special member function 703 // or an inheriting constructor, we should be much more explicit about why 704 // it's not constexpr. 705 if (CD && CD->isInheritingConstructor()) { 706 S.FFDiag(Loc, diag::note_constexpr_invalid_inhctor, 1) 707 << CD->getInheritedConstructor().getConstructor()->getParent(); 708 S.Note(DiagDecl->getLocation(), diag::note_declared_at); 709 } else { 710 // Don't emit anything if the function isn't defined and we're checking 711 // for a constant expression. It might be defined at the point we're 712 // actually calling it. 713 bool IsExtern = DiagDecl->getStorageClass() == SC_Extern; 714 if (!DiagDecl->isDefined() && !IsExtern && DiagDecl->isConstexpr() && 715 S.checkingPotentialConstantExpression()) 716 return false; 717 718 // If the declaration is defined, declared 'constexpr' _and_ has a body, 719 // the below diagnostic doesn't add anything useful. 720 if (DiagDecl->isDefined() && DiagDecl->isConstexpr() && 721 DiagDecl->hasBody()) 722 return false; 723 724 S.FFDiag(Loc, diag::note_constexpr_invalid_function, 1) 725 << DiagDecl->isConstexpr() << (bool)CD << DiagDecl; 726 727 if (DiagDecl->getDefinition()) 728 S.Note(DiagDecl->getDefinition()->getLocation(), 729 diag::note_declared_at); 730 else 731 S.Note(DiagDecl->getLocation(), diag::note_declared_at); 732 } 733 } else { 734 S.FFDiag(Loc, diag::note_invalid_subexpr_in_const_expr); 735 } 736 737 return false; 738 } 739 740 bool CheckCallDepth(InterpState &S, CodePtr OpPC) { 741 if ((S.Current->getDepth() + 1) > S.getLangOpts().ConstexprCallDepth) { 742 S.FFDiag(S.Current->getSource(OpPC), 743 diag::note_constexpr_depth_limit_exceeded) 744 << S.getLangOpts().ConstexprCallDepth; 745 return false; 746 } 747 748 return true; 749 } 750 751 bool CheckThis(InterpState &S, CodePtr OpPC, const Pointer &This) { 752 if (!This.isZero()) 753 return true; 754 755 const SourceInfo &Loc = S.Current->getSource(OpPC); 756 757 bool IsImplicit = false; 758 if (const auto *E = dyn_cast_if_present<CXXThisExpr>(Loc.asExpr())) 759 IsImplicit = E->isImplicit(); 760 761 if (S.getLangOpts().CPlusPlus11) 762 S.FFDiag(Loc, diag::note_constexpr_this) << IsImplicit; 763 else 764 S.FFDiag(Loc); 765 766 return false; 767 } 768 769 bool CheckPure(InterpState &S, CodePtr OpPC, const CXXMethodDecl *MD) { 770 if (!MD->isPureVirtual()) 771 return true; 772 const SourceInfo &E = S.Current->getSource(OpPC); 773 S.FFDiag(E, diag::note_constexpr_pure_virtual_call, 1) << MD; 774 S.Note(MD->getLocation(), diag::note_declared_at); 775 return false; 776 } 777 778 bool CheckFloatResult(InterpState &S, CodePtr OpPC, const Floating &Result, 779 APFloat::opStatus Status, FPOptions FPO) { 780 // [expr.pre]p4: 781 // If during the evaluation of an expression, the result is not 782 // mathematically defined [...], the behavior is undefined. 783 // FIXME: C++ rules require us to not conform to IEEE 754 here. 784 if (Result.isNan()) { 785 const SourceInfo &E = S.Current->getSource(OpPC); 786 S.CCEDiag(E, diag::note_constexpr_float_arithmetic) 787 << /*NaN=*/true << S.Current->getRange(OpPC); 788 return S.noteUndefinedBehavior(); 789 } 790 791 // In a constant context, assume that any dynamic rounding mode or FP 792 // exception state matches the default floating-point environment. 793 if (S.inConstantContext()) 794 return true; 795 796 if ((Status & APFloat::opInexact) && 797 FPO.getRoundingMode() == llvm::RoundingMode::Dynamic) { 798 // Inexact result means that it depends on rounding mode. If the requested 799 // mode is dynamic, the evaluation cannot be made in compile time. 800 const SourceInfo &E = S.Current->getSource(OpPC); 801 S.FFDiag(E, diag::note_constexpr_dynamic_rounding); 802 return false; 803 } 804 805 if ((Status != APFloat::opOK) && 806 (FPO.getRoundingMode() == llvm::RoundingMode::Dynamic || 807 FPO.getExceptionMode() != LangOptions::FPE_Ignore || 808 FPO.getAllowFEnvAccess())) { 809 const SourceInfo &E = S.Current->getSource(OpPC); 810 S.FFDiag(E, diag::note_constexpr_float_arithmetic_strict); 811 return false; 812 } 813 814 if ((Status & APFloat::opStatus::opInvalidOp) && 815 FPO.getExceptionMode() != LangOptions::FPE_Ignore) { 816 const SourceInfo &E = S.Current->getSource(OpPC); 817 // There is no usefully definable result. 818 S.FFDiag(E); 819 return false; 820 } 821 822 return true; 823 } 824 825 bool CheckDynamicMemoryAllocation(InterpState &S, CodePtr OpPC) { 826 if (S.getLangOpts().CPlusPlus20) 827 return true; 828 829 const SourceInfo &E = S.Current->getSource(OpPC); 830 S.CCEDiag(E, diag::note_constexpr_new); 831 return true; 832 } 833 834 bool CheckNewDeleteForms(InterpState &S, CodePtr OpPC, 835 DynamicAllocator::Form AllocForm, 836 DynamicAllocator::Form DeleteForm, const Descriptor *D, 837 const Expr *NewExpr) { 838 if (AllocForm == DeleteForm) 839 return true; 840 841 QualType TypeToDiagnose; 842 // We need to shuffle things around a bit here to get a better diagnostic, 843 // because the expression we allocated the block for was of type int*, 844 // but we want to get the array size right. 845 if (D->isArray()) { 846 QualType ElemQT = D->getType()->getPointeeType(); 847 TypeToDiagnose = S.getASTContext().getConstantArrayType( 848 ElemQT, APInt(64, static_cast<uint64_t>(D->getNumElems()), false), 849 nullptr, ArraySizeModifier::Normal, 0); 850 } else 851 TypeToDiagnose = D->getType()->getPointeeType(); 852 853 const SourceInfo &E = S.Current->getSource(OpPC); 854 S.FFDiag(E, diag::note_constexpr_new_delete_mismatch) 855 << static_cast<int>(DeleteForm) << static_cast<int>(AllocForm) 856 << TypeToDiagnose; 857 S.Note(NewExpr->getExprLoc(), diag::note_constexpr_dynamic_alloc_here) 858 << NewExpr->getSourceRange(); 859 return false; 860 } 861 862 bool CheckDeleteSource(InterpState &S, CodePtr OpPC, const Expr *Source, 863 const Pointer &Ptr) { 864 // The two sources we currently allow are new expressions and 865 // __builtin_operator_new calls. 866 if (isa_and_nonnull<CXXNewExpr>(Source)) 867 return true; 868 if (const CallExpr *CE = dyn_cast_if_present<CallExpr>(Source); 869 CE && CE->getBuiltinCallee() == Builtin::BI__builtin_operator_new) 870 return true; 871 872 // Whatever this is, we didn't heap allocate it. 873 const SourceInfo &Loc = S.Current->getSource(OpPC); 874 S.FFDiag(Loc, diag::note_constexpr_delete_not_heap_alloc) 875 << Ptr.toDiagnosticString(S.getASTContext()); 876 877 if (Ptr.isTemporary()) 878 S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here); 879 else 880 S.Note(Ptr.getDeclLoc(), diag::note_declared_at); 881 return false; 882 } 883 884 /// We aleady know the given DeclRefExpr is invalid for some reason, 885 /// now figure out why and print appropriate diagnostics. 886 bool CheckDeclRef(InterpState &S, CodePtr OpPC, const DeclRefExpr *DR) { 887 const ValueDecl *D = DR->getDecl(); 888 return diagnoseUnknownDecl(S, OpPC, D); 889 } 890 891 bool CheckDummy(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 892 AccessKinds AK) { 893 if (!Ptr.isDummy()) 894 return true; 895 896 const Descriptor *Desc = Ptr.getDeclDesc(); 897 const ValueDecl *D = Desc->asValueDecl(); 898 if (!D) 899 return false; 900 901 if (AK == AK_Read || AK == AK_Increment || AK == AK_Decrement) 902 return diagnoseUnknownDecl(S, OpPC, D); 903 904 assert(AK == AK_Assign); 905 if (S.getLangOpts().CPlusPlus14) { 906 const SourceInfo &E = S.Current->getSource(OpPC); 907 S.FFDiag(E, diag::note_constexpr_modify_global); 908 } 909 return false; 910 } 911 912 bool CheckNonNullArgs(InterpState &S, CodePtr OpPC, const Function *F, 913 const CallExpr *CE, unsigned ArgSize) { 914 auto Args = llvm::ArrayRef(CE->getArgs(), CE->getNumArgs()); 915 auto NonNullArgs = collectNonNullArgs(F->getDecl(), Args); 916 unsigned Offset = 0; 917 unsigned Index = 0; 918 for (const Expr *Arg : Args) { 919 if (NonNullArgs[Index] && Arg->getType()->isPointerType()) { 920 const Pointer &ArgPtr = S.Stk.peek<Pointer>(ArgSize - Offset); 921 if (ArgPtr.isZero()) { 922 const SourceLocation &Loc = S.Current->getLocation(OpPC); 923 S.CCEDiag(Loc, diag::note_non_null_attribute_failed); 924 return false; 925 } 926 } 927 928 Offset += align(primSize(S.Ctx.classify(Arg).value_or(PT_Ptr))); 929 ++Index; 930 } 931 return true; 932 } 933 934 // FIXME: This is similar to code we already have in Compiler.cpp. 935 // I think it makes sense to instead add the field and base destruction stuff 936 // to the destructor Function itself. Then destroying a record would really 937 // _just_ be calling its destructor. That would also help with the diagnostic 938 // difference when the destructor or a field/base fails. 939 static bool runRecordDestructor(InterpState &S, CodePtr OpPC, 940 const Pointer &BasePtr, 941 const Descriptor *Desc) { 942 assert(Desc->isRecord()); 943 const Record *R = Desc->ElemRecord; 944 assert(R); 945 946 if (Pointer::pointToSameBlock(BasePtr, S.Current->getThis())) { 947 const SourceInfo &Loc = S.Current->getSource(OpPC); 948 S.FFDiag(Loc, diag::note_constexpr_double_destroy); 949 return false; 950 } 951 952 // Destructor of this record. 953 if (const CXXDestructorDecl *Dtor = R->getDestructor(); 954 Dtor && !Dtor->isTrivial()) { 955 const Function *DtorFunc = S.getContext().getOrCreateFunction(Dtor); 956 if (!DtorFunc) 957 return false; 958 959 S.Stk.push<Pointer>(BasePtr); 960 if (!Call(S, OpPC, DtorFunc, 0)) 961 return false; 962 } 963 return true; 964 } 965 966 bool RunDestructors(InterpState &S, CodePtr OpPC, const Block *B) { 967 assert(B); 968 const Descriptor *Desc = B->getDescriptor(); 969 970 if (Desc->isPrimitive() || Desc->isPrimitiveArray()) 971 return true; 972 973 assert(Desc->isRecord() || Desc->isCompositeArray()); 974 975 if (Desc->isCompositeArray()) { 976 const Descriptor *ElemDesc = Desc->ElemDesc; 977 assert(ElemDesc->isRecord()); 978 979 Pointer RP(const_cast<Block *>(B)); 980 for (unsigned I = 0; I != Desc->getNumElems(); ++I) { 981 if (!runRecordDestructor(S, OpPC, RP.atIndex(I).narrow(), ElemDesc)) 982 return false; 983 } 984 return true; 985 } 986 987 assert(Desc->isRecord()); 988 return runRecordDestructor(S, OpPC, Pointer(const_cast<Block *>(B)), Desc); 989 } 990 991 void diagnoseEnumValue(InterpState &S, CodePtr OpPC, const EnumDecl *ED, 992 const APSInt &Value) { 993 llvm::APInt Min; 994 llvm::APInt Max; 995 996 if (S.EvaluatingDecl && !S.EvaluatingDecl->isConstexpr()) 997 return; 998 999 ED->getValueRange(Max, Min); 1000 --Max; 1001 1002 if (ED->getNumNegativeBits() && 1003 (Max.slt(Value.getSExtValue()) || Min.sgt(Value.getSExtValue()))) { 1004 const SourceLocation &Loc = S.Current->getLocation(OpPC); 1005 S.CCEDiag(Loc, diag::note_constexpr_unscoped_enum_out_of_range) 1006 << llvm::toString(Value, 10) << Min.getSExtValue() << Max.getSExtValue() 1007 << ED; 1008 } else if (!ED->getNumNegativeBits() && Max.ult(Value.getZExtValue())) { 1009 const SourceLocation &Loc = S.Current->getLocation(OpPC); 1010 S.CCEDiag(Loc, diag::note_constexpr_unscoped_enum_out_of_range) 1011 << llvm::toString(Value, 10) << Min.getZExtValue() << Max.getZExtValue() 1012 << ED; 1013 } 1014 } 1015 1016 bool CheckLiteralType(InterpState &S, CodePtr OpPC, const Type *T) { 1017 assert(T); 1018 assert(!S.getLangOpts().CPlusPlus23); 1019 1020 // C++1y: A constant initializer for an object o [...] may also invoke 1021 // constexpr constructors for o and its subobjects even if those objects 1022 // are of non-literal class types. 1023 // 1024 // C++11 missed this detail for aggregates, so classes like this: 1025 // struct foo_t { union { int i; volatile int j; } u; }; 1026 // are not (obviously) initializable like so: 1027 // __attribute__((__require_constant_initialization__)) 1028 // static const foo_t x = {{0}}; 1029 // because "i" is a subobject with non-literal initialization (due to the 1030 // volatile member of the union). See: 1031 // http://www.open-std.org/jtc1/sc22/wg21/docs/cwg_active.html#1677 1032 // Therefore, we use the C++1y behavior. 1033 1034 if (S.Current->getFunction() && S.Current->getFunction()->isConstructor() && 1035 S.Current->getThis().getDeclDesc()->asDecl() == S.EvaluatingDecl) { 1036 return true; 1037 } 1038 1039 const Expr *E = S.Current->getExpr(OpPC); 1040 if (S.getLangOpts().CPlusPlus11) 1041 S.FFDiag(E, diag::note_constexpr_nonliteral) << E->getType(); 1042 else 1043 S.FFDiag(E, diag::note_invalid_subexpr_in_const_expr); 1044 return false; 1045 } 1046 1047 static bool checkConstructor(InterpState &S, CodePtr OpPC, const Function *Func, 1048 const Pointer &ThisPtr) { 1049 assert(Func->isConstructor()); 1050 1051 const Descriptor *D = ThisPtr.getFieldDesc(); 1052 1053 // FIXME: I think this case is not 100% correct. E.g. a pointer into a 1054 // subobject of a composite array. 1055 if (!D->ElemRecord) 1056 return true; 1057 1058 if (D->ElemRecord->getNumVirtualBases() == 0) 1059 return true; 1060 1061 S.FFDiag(S.Current->getLocation(OpPC), diag::note_constexpr_virtual_base) 1062 << Func->getParentDecl(); 1063 return false; 1064 } 1065 1066 bool CallVar(InterpState &S, CodePtr OpPC, const Function *Func, 1067 uint32_t VarArgSize) { 1068 if (Func->hasThisPointer()) { 1069 size_t ArgSize = Func->getArgSize() + VarArgSize; 1070 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0); 1071 const Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset); 1072 1073 // If the current function is a lambda static invoker and 1074 // the function we're about to call is a lambda call operator, 1075 // skip the CheckInvoke, since the ThisPtr is a null pointer 1076 // anyway. 1077 if (!(S.Current->getFunction() && 1078 S.Current->getFunction()->isLambdaStaticInvoker() && 1079 Func->isLambdaCallOperator())) { 1080 if (!CheckInvoke(S, OpPC, ThisPtr)) 1081 return false; 1082 } 1083 1084 if (S.checkingPotentialConstantExpression()) 1085 return false; 1086 } 1087 1088 if (!CheckCallable(S, OpPC, Func)) 1089 return false; 1090 1091 if (!CheckCallDepth(S, OpPC)) 1092 return false; 1093 1094 auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC, VarArgSize); 1095 InterpFrame *FrameBefore = S.Current; 1096 S.Current = NewFrame.get(); 1097 1098 APValue CallResult; 1099 // Note that we cannot assert(CallResult.hasValue()) here since 1100 // Ret() above only sets the APValue if the curent frame doesn't 1101 // have a caller set. 1102 if (Interpret(S, CallResult)) { 1103 NewFrame.release(); // Frame was delete'd already. 1104 assert(S.Current == FrameBefore); 1105 return true; 1106 } 1107 1108 // Interpreting the function failed somehow. Reset to 1109 // previous state. 1110 S.Current = FrameBefore; 1111 return false; 1112 } 1113 1114 bool Call(InterpState &S, CodePtr OpPC, const Function *Func, 1115 uint32_t VarArgSize) { 1116 assert(Func); 1117 auto cleanup = [&]() -> bool { 1118 cleanupAfterFunctionCall(S, OpPC, Func); 1119 return false; 1120 }; 1121 1122 if (Func->hasThisPointer()) { 1123 size_t ArgSize = Func->getArgSize() + VarArgSize; 1124 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0); 1125 1126 const Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset); 1127 1128 // If the current function is a lambda static invoker and 1129 // the function we're about to call is a lambda call operator, 1130 // skip the CheckInvoke, since the ThisPtr is a null pointer 1131 // anyway. 1132 if (S.Current->getFunction() && 1133 S.Current->getFunction()->isLambdaStaticInvoker() && 1134 Func->isLambdaCallOperator()) { 1135 assert(ThisPtr.isZero()); 1136 } else { 1137 if (!CheckInvoke(S, OpPC, ThisPtr)) 1138 return cleanup(); 1139 } 1140 1141 if (Func->isConstructor() && !checkConstructor(S, OpPC, Func, ThisPtr)) 1142 return false; 1143 } 1144 1145 if (!CheckCallable(S, OpPC, Func)) 1146 return cleanup(); 1147 1148 // FIXME: The isConstructor() check here is not always right. The current 1149 // constant evaluator is somewhat inconsistent in when it allows a function 1150 // call when checking for a constant expression. 1151 if (Func->hasThisPointer() && S.checkingPotentialConstantExpression() && 1152 !Func->isConstructor()) 1153 return cleanup(); 1154 1155 if (!CheckCallDepth(S, OpPC)) 1156 return cleanup(); 1157 1158 auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC, VarArgSize); 1159 InterpFrame *FrameBefore = S.Current; 1160 S.Current = NewFrame.get(); 1161 1162 InterpStateCCOverride CCOverride(S, Func->getDecl()->isImmediateFunction()); 1163 APValue CallResult; 1164 // Note that we cannot assert(CallResult.hasValue()) here since 1165 // Ret() above only sets the APValue if the curent frame doesn't 1166 // have a caller set. 1167 if (Interpret(S, CallResult)) { 1168 NewFrame.release(); // Frame was delete'd already. 1169 assert(S.Current == FrameBefore); 1170 return true; 1171 } 1172 1173 // Interpreting the function failed somehow. Reset to 1174 // previous state. 1175 S.Current = FrameBefore; 1176 return false; 1177 } 1178 1179 bool CallVirt(InterpState &S, CodePtr OpPC, const Function *Func, 1180 uint32_t VarArgSize) { 1181 assert(Func->hasThisPointer()); 1182 assert(Func->isVirtual()); 1183 size_t ArgSize = Func->getArgSize() + VarArgSize; 1184 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0); 1185 Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset); 1186 1187 const CXXRecordDecl *DynamicDecl = nullptr; 1188 { 1189 Pointer TypePtr = ThisPtr; 1190 while (TypePtr.isBaseClass()) 1191 TypePtr = TypePtr.getBase(); 1192 1193 QualType DynamicType = TypePtr.getType(); 1194 if (DynamicType->isPointerType() || DynamicType->isReferenceType()) 1195 DynamicDecl = DynamicType->getPointeeCXXRecordDecl(); 1196 else 1197 DynamicDecl = DynamicType->getAsCXXRecordDecl(); 1198 } 1199 assert(DynamicDecl); 1200 1201 const auto *StaticDecl = cast<CXXRecordDecl>(Func->getParentDecl()); 1202 const auto *InitialFunction = cast<CXXMethodDecl>(Func->getDecl()); 1203 const CXXMethodDecl *Overrider = S.getContext().getOverridingFunction( 1204 DynamicDecl, StaticDecl, InitialFunction); 1205 1206 if (Overrider != InitialFunction) { 1207 // DR1872: An instantiated virtual constexpr function can't be called in a 1208 // constant expression (prior to C++20). We can still constant-fold such a 1209 // call. 1210 if (!S.getLangOpts().CPlusPlus20 && Overrider->isVirtual()) { 1211 const Expr *E = S.Current->getExpr(OpPC); 1212 S.CCEDiag(E, diag::note_constexpr_virtual_call) << E->getSourceRange(); 1213 } 1214 1215 Func = S.getContext().getOrCreateFunction(Overrider); 1216 1217 const CXXRecordDecl *ThisFieldDecl = 1218 ThisPtr.getFieldDesc()->getType()->getAsCXXRecordDecl(); 1219 if (Func->getParentDecl()->isDerivedFrom(ThisFieldDecl)) { 1220 // If the function we call is further DOWN the hierarchy than the 1221 // FieldDesc of our pointer, just go up the hierarchy of this field 1222 // the furthest we can go. 1223 while (ThisPtr.isBaseClass()) 1224 ThisPtr = ThisPtr.getBase(); 1225 } 1226 } 1227 1228 if (!Call(S, OpPC, Func, VarArgSize)) 1229 return false; 1230 1231 // Covariant return types. The return type of Overrider is a pointer 1232 // or reference to a class type. 1233 if (Overrider != InitialFunction && 1234 Overrider->getReturnType()->isPointerOrReferenceType() && 1235 InitialFunction->getReturnType()->isPointerOrReferenceType()) { 1236 QualType OverriderPointeeType = 1237 Overrider->getReturnType()->getPointeeType(); 1238 QualType InitialPointeeType = 1239 InitialFunction->getReturnType()->getPointeeType(); 1240 // We've called Overrider above, but calling code expects us to return what 1241 // InitialFunction returned. According to the rules for covariant return 1242 // types, what InitialFunction returns needs to be a base class of what 1243 // Overrider returns. So, we need to do an upcast here. 1244 unsigned Offset = S.getContext().collectBaseOffset( 1245 InitialPointeeType->getAsRecordDecl(), 1246 OverriderPointeeType->getAsRecordDecl()); 1247 return GetPtrBasePop(S, OpPC, Offset); 1248 } 1249 1250 return true; 1251 } 1252 1253 bool CallBI(InterpState &S, CodePtr OpPC, const Function *Func, 1254 const CallExpr *CE, uint32_t BuiltinID) { 1255 if (S.checkingPotentialConstantExpression()) 1256 return false; 1257 auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC); 1258 1259 InterpFrame *FrameBefore = S.Current; 1260 S.Current = NewFrame.get(); 1261 1262 if (InterpretBuiltin(S, OpPC, Func, CE, BuiltinID)) { 1263 NewFrame.release(); 1264 return true; 1265 } 1266 S.Current = FrameBefore; 1267 return false; 1268 } 1269 1270 bool CallPtr(InterpState &S, CodePtr OpPC, uint32_t ArgSize, 1271 const CallExpr *CE) { 1272 const FunctionPointer &FuncPtr = S.Stk.pop<FunctionPointer>(); 1273 1274 const Function *F = FuncPtr.getFunction(); 1275 if (!F) { 1276 const auto *E = cast<CallExpr>(S.Current->getExpr(OpPC)); 1277 S.FFDiag(E, diag::note_constexpr_null_callee) 1278 << const_cast<Expr *>(E->getCallee()) << E->getSourceRange(); 1279 return false; 1280 } 1281 1282 if (!FuncPtr.isValid() || !F->getDecl()) 1283 return Invalid(S, OpPC); 1284 1285 assert(F); 1286 1287 // This happens when the call expression has been cast to 1288 // something else, but we don't support that. 1289 if (S.Ctx.classify(F->getDecl()->getReturnType()) != 1290 S.Ctx.classify(CE->getType())) 1291 return false; 1292 1293 // Check argument nullability state. 1294 if (F->hasNonNullAttr()) { 1295 if (!CheckNonNullArgs(S, OpPC, F, CE, ArgSize)) 1296 return false; 1297 } 1298 1299 assert(ArgSize >= F->getWrittenArgSize()); 1300 uint32_t VarArgSize = ArgSize - F->getWrittenArgSize(); 1301 1302 // We need to do this explicitly here since we don't have the necessary 1303 // information to do it automatically. 1304 if (F->isThisPointerExplicit()) 1305 VarArgSize -= align(primSize(PT_Ptr)); 1306 1307 if (F->isVirtual()) 1308 return CallVirt(S, OpPC, F, VarArgSize); 1309 1310 return Call(S, OpPC, F, VarArgSize); 1311 } 1312 1313 bool CheckNewTypeMismatch(InterpState &S, CodePtr OpPC, const Expr *E, 1314 std::optional<uint64_t> ArraySize) { 1315 const Pointer &Ptr = S.Stk.peek<Pointer>(); 1316 1317 if (!CheckStore(S, OpPC, Ptr)) 1318 return false; 1319 1320 if (!InvalidNewDeleteExpr(S, OpPC, E)) 1321 return false; 1322 1323 const auto *NewExpr = cast<CXXNewExpr>(E); 1324 QualType StorageType = Ptr.getType(); 1325 1326 if (isa_and_nonnull<CXXNewExpr>(Ptr.getFieldDesc()->asExpr()) && 1327 StorageType->isPointerType()) { 1328 // FIXME: Are there other cases where this is a problem? 1329 StorageType = StorageType->getPointeeType(); 1330 } 1331 1332 const ASTContext &ASTCtx = S.getASTContext(); 1333 QualType AllocType; 1334 if (ArraySize) { 1335 AllocType = ASTCtx.getConstantArrayType( 1336 NewExpr->getAllocatedType(), 1337 APInt(64, static_cast<uint64_t>(*ArraySize), false), nullptr, 1338 ArraySizeModifier::Normal, 0); 1339 } else { 1340 AllocType = NewExpr->getAllocatedType(); 1341 } 1342 1343 unsigned StorageSize = 1; 1344 unsigned AllocSize = 1; 1345 if (const auto *CAT = dyn_cast<ConstantArrayType>(AllocType)) 1346 AllocSize = CAT->getZExtSize(); 1347 if (const auto *CAT = dyn_cast<ConstantArrayType>(StorageType)) 1348 StorageSize = CAT->getZExtSize(); 1349 1350 if (AllocSize > StorageSize || 1351 !ASTCtx.hasSimilarType(ASTCtx.getBaseElementType(AllocType), 1352 ASTCtx.getBaseElementType(StorageType))) { 1353 S.FFDiag(S.Current->getLocation(OpPC), 1354 diag::note_constexpr_placement_new_wrong_type) 1355 << StorageType << AllocType; 1356 return false; 1357 } 1358 return true; 1359 } 1360 1361 bool InvalidNewDeleteExpr(InterpState &S, CodePtr OpPC, const Expr *E) { 1362 assert(E); 1363 const auto &Loc = S.Current->getSource(OpPC); 1364 1365 if (S.getLangOpts().CPlusPlus26) 1366 return true; 1367 1368 if (const auto *NewExpr = dyn_cast<CXXNewExpr>(E)) { 1369 const FunctionDecl *OperatorNew = NewExpr->getOperatorNew(); 1370 1371 if (!S.getLangOpts().CPlusPlus26 && NewExpr->getNumPlacementArgs() > 0) { 1372 // This is allowed pre-C++26, but only an std function. 1373 if (S.Current->isStdFunction()) 1374 return true; 1375 S.FFDiag(Loc, diag::note_constexpr_new_placement) 1376 << /*C++26 feature*/ 1 << E->getSourceRange(); 1377 } else if (NewExpr->getNumPlacementArgs() == 1 && 1378 !OperatorNew->isReservedGlobalPlacementOperator()) { 1379 S.FFDiag(Loc, diag::note_constexpr_new_placement) 1380 << /*Unsupported*/ 0 << E->getSourceRange(); 1381 } else if (!OperatorNew->isReplaceableGlobalAllocationFunction()) { 1382 S.FFDiag(Loc, diag::note_constexpr_new_non_replaceable) 1383 << isa<CXXMethodDecl>(OperatorNew) << OperatorNew; 1384 } 1385 } else { 1386 const auto *DeleteExpr = cast<CXXDeleteExpr>(E); 1387 const FunctionDecl *OperatorDelete = DeleteExpr->getOperatorDelete(); 1388 if (!OperatorDelete->isReplaceableGlobalAllocationFunction()) { 1389 S.FFDiag(Loc, diag::note_constexpr_new_non_replaceable) 1390 << isa<CXXMethodDecl>(OperatorDelete) << OperatorDelete; 1391 } 1392 } 1393 1394 return false; 1395 } 1396 1397 bool handleFixedPointOverflow(InterpState &S, CodePtr OpPC, 1398 const FixedPoint &FP) { 1399 const Expr *E = S.Current->getExpr(OpPC); 1400 if (S.checkingForUndefinedBehavior()) { 1401 S.getASTContext().getDiagnostics().Report( 1402 E->getExprLoc(), diag::warn_fixedpoint_constant_overflow) 1403 << FP.toDiagnosticString(S.getASTContext()) << E->getType(); 1404 } 1405 S.CCEDiag(E, diag::note_constexpr_overflow) 1406 << FP.toDiagnosticString(S.getASTContext()) << E->getType(); 1407 return S.noteUndefinedBehavior(); 1408 } 1409 1410 bool InvalidShuffleVectorIndex(InterpState &S, CodePtr OpPC, uint32_t Index) { 1411 const SourceInfo &Loc = S.Current->getSource(OpPC); 1412 S.FFDiag(Loc, 1413 diag::err_shufflevector_minus_one_is_undefined_behavior_constexpr) 1414 << Index; 1415 return false; 1416 } 1417 1418 // https://github.com/llvm/llvm-project/issues/102513 1419 #if defined(_WIN32) && !defined(__clang__) && !defined(NDEBUG) 1420 #pragma optimize("", off) 1421 #endif 1422 bool Interpret(InterpState &S, APValue &Result) { 1423 // The current stack frame when we started Interpret(). 1424 // This is being used by the ops to determine wheter 1425 // to return from this function and thus terminate 1426 // interpretation. 1427 const InterpFrame *StartFrame = S.Current; 1428 assert(!S.Current->isRoot()); 1429 CodePtr PC = S.Current->getPC(); 1430 1431 // Empty program. 1432 if (!PC) 1433 return true; 1434 1435 for (;;) { 1436 auto Op = PC.read<Opcode>(); 1437 CodePtr OpPC = PC; 1438 1439 switch (Op) { 1440 #define GET_INTERP 1441 #include "Opcodes.inc" 1442 #undef GET_INTERP 1443 } 1444 } 1445 } 1446 // https://github.com/llvm/llvm-project/issues/102513 1447 #if defined(_WIN32) && !defined(__clang__) && !defined(NDEBUG) 1448 #pragma optimize("", on) 1449 #endif 1450 1451 } // namespace interp 1452 } // namespace clang 1453