1 //===------- Interp.cpp - Interpreter for the constexpr VM ------*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 9 #include "Interp.h" 10 #include "Function.h" 11 #include "InterpFrame.h" 12 #include "InterpShared.h" 13 #include "InterpStack.h" 14 #include "Opcode.h" 15 #include "PrimType.h" 16 #include "Program.h" 17 #include "State.h" 18 #include "clang/AST/ASTContext.h" 19 #include "clang/AST/ASTDiagnostic.h" 20 #include "clang/AST/CXXInheritance.h" 21 #include "clang/AST/DeclObjC.h" 22 #include "clang/AST/Expr.h" 23 #include "clang/AST/ExprCXX.h" 24 #include "clang/Basic/DiagnosticSema.h" 25 #include "clang/Basic/TargetInfo.h" 26 #include "llvm/ADT/APSInt.h" 27 #include "llvm/ADT/StringExtras.h" 28 #include <limits> 29 #include <vector> 30 31 using namespace clang; 32 using namespace clang::interp; 33 34 static bool RetValue(InterpState &S, CodePtr &Pt, APValue &Result) { 35 llvm::report_fatal_error("Interpreter cannot return values"); 36 } 37 38 //===----------------------------------------------------------------------===// 39 // Jmp, Jt, Jf 40 //===----------------------------------------------------------------------===// 41 42 static bool Jmp(InterpState &S, CodePtr &PC, int32_t Offset) { 43 PC += Offset; 44 return true; 45 } 46 47 static bool Jt(InterpState &S, CodePtr &PC, int32_t Offset) { 48 if (S.Stk.pop<bool>()) { 49 PC += Offset; 50 } 51 return true; 52 } 53 54 static bool Jf(InterpState &S, CodePtr &PC, int32_t Offset) { 55 if (!S.Stk.pop<bool>()) { 56 PC += Offset; 57 } 58 return true; 59 } 60 61 static void diagnoseMissingInitializer(InterpState &S, CodePtr OpPC, 62 const ValueDecl *VD) { 63 const SourceInfo &E = S.Current->getSource(OpPC); 64 S.FFDiag(E, diag::note_constexpr_var_init_unknown, 1) << VD; 65 S.Note(VD->getLocation(), diag::note_declared_at) << VD->getSourceRange(); 66 } 67 68 static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC, 69 const ValueDecl *VD); 70 static bool diagnoseUnknownDecl(InterpState &S, CodePtr OpPC, 71 const ValueDecl *D) { 72 const SourceInfo &E = S.Current->getSource(OpPC); 73 74 if (isa<ParmVarDecl>(D)) { 75 if (S.getLangOpts().CPlusPlus11) { 76 S.FFDiag(E, diag::note_constexpr_function_param_value_unknown) << D; 77 S.Note(D->getLocation(), diag::note_declared_at) << D->getSourceRange(); 78 } else { 79 S.FFDiag(E); 80 } 81 return false; 82 } 83 84 if (!D->getType().isConstQualified()) { 85 diagnoseNonConstVariable(S, OpPC, D); 86 } else if (const auto *VD = dyn_cast<VarDecl>(D)) { 87 if (!VD->getAnyInitializer()) { 88 diagnoseMissingInitializer(S, OpPC, VD); 89 } else { 90 const SourceInfo &Loc = S.Current->getSource(OpPC); 91 S.FFDiag(Loc, diag::note_constexpr_var_init_non_constant, 1) << VD; 92 S.Note(VD->getLocation(), diag::note_declared_at); 93 } 94 } 95 96 return false; 97 } 98 99 static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC, 100 const ValueDecl *VD) { 101 const SourceInfo &Loc = S.Current->getSource(OpPC); 102 if (!S.getLangOpts().CPlusPlus) { 103 S.FFDiag(Loc); 104 return; 105 } 106 107 if (const auto *VarD = dyn_cast<VarDecl>(VD); 108 VarD && VarD->getType().isConstQualified() && 109 !VarD->getAnyInitializer()) { 110 diagnoseMissingInitializer(S, OpPC, VD); 111 return; 112 } 113 114 // Rather random, but this is to match the diagnostic output of the current 115 // interpreter. 116 if (isa<ObjCIvarDecl>(VD)) 117 return; 118 119 if (VD->getType()->isIntegralOrEnumerationType()) { 120 S.FFDiag(Loc, diag::note_constexpr_ltor_non_const_int, 1) << VD; 121 S.Note(VD->getLocation(), diag::note_declared_at); 122 return; 123 } 124 125 S.FFDiag(Loc, 126 S.getLangOpts().CPlusPlus11 ? diag::note_constexpr_ltor_non_constexpr 127 : diag::note_constexpr_ltor_non_integral, 128 1) 129 << VD << VD->getType(); 130 S.Note(VD->getLocation(), diag::note_declared_at); 131 } 132 133 static bool CheckActive(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 134 AccessKinds AK) { 135 if (Ptr.isActive()) 136 return true; 137 138 assert(Ptr.inUnion()); 139 assert(Ptr.isField() && Ptr.getField()); 140 141 Pointer U = Ptr.getBase(); 142 Pointer C = Ptr; 143 while (!U.isRoot() && U.inUnion() && !U.isActive()) { 144 if (U.getField()) 145 C = U; 146 U = U.getBase(); 147 } 148 assert(C.isField()); 149 150 // Get the inactive field descriptor. 151 const FieldDecl *InactiveField = C.getField(); 152 assert(InactiveField); 153 154 // Consider: 155 // union U { 156 // struct { 157 // int x; 158 // int y; 159 // } a; 160 // } 161 // 162 // When activating x, we will also activate a. If we now try to read 163 // from y, we will get to CheckActive, because y is not active. In that 164 // case, our U will be a (not a union). We return here and let later code 165 // handle this. 166 if (!U.getFieldDesc()->isUnion()) 167 return true; 168 169 // Find the active field of the union. 170 const Record *R = U.getRecord(); 171 assert(R && R->isUnion() && "Not a union"); 172 173 const FieldDecl *ActiveField = nullptr; 174 for (const Record::Field &F : R->fields()) { 175 const Pointer &Field = U.atField(F.Offset); 176 if (Field.isActive()) { 177 ActiveField = Field.getField(); 178 break; 179 } 180 } 181 182 const SourceInfo &Loc = S.Current->getSource(OpPC); 183 S.FFDiag(Loc, diag::note_constexpr_access_inactive_union_member) 184 << AK << InactiveField << !ActiveField << ActiveField; 185 return false; 186 } 187 188 static bool CheckTemporary(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 189 AccessKinds AK) { 190 if (auto ID = Ptr.getDeclID()) { 191 if (!Ptr.isStaticTemporary()) 192 return true; 193 194 const auto *MTE = dyn_cast_if_present<MaterializeTemporaryExpr>( 195 Ptr.getDeclDesc()->asExpr()); 196 if (!MTE) 197 return true; 198 199 // FIXME(perf): Since we do this check on every Load from a static 200 // temporary, it might make sense to cache the value of the 201 // isUsableInConstantExpressions call. 202 if (!MTE->isUsableInConstantExpressions(S.getASTContext()) && 203 Ptr.block()->getEvalID() != S.Ctx.getEvalID()) { 204 const SourceInfo &E = S.Current->getSource(OpPC); 205 S.FFDiag(E, diag::note_constexpr_access_static_temporary, 1) << AK; 206 S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here); 207 return false; 208 } 209 } 210 return true; 211 } 212 213 static bool CheckGlobal(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 214 if (auto ID = Ptr.getDeclID()) { 215 if (!Ptr.isStatic()) 216 return true; 217 218 if (S.P.getCurrentDecl() == ID) 219 return true; 220 221 S.FFDiag(S.Current->getLocation(OpPC), diag::note_constexpr_modify_global); 222 return false; 223 } 224 return true; 225 } 226 227 namespace clang { 228 namespace interp { 229 static void popArg(InterpState &S, const Expr *Arg) { 230 PrimType Ty = S.getContext().classify(Arg).value_or(PT_Ptr); 231 TYPE_SWITCH(Ty, S.Stk.discard<T>()); 232 } 233 234 void cleanupAfterFunctionCall(InterpState &S, CodePtr OpPC, 235 const Function *Func) { 236 assert(S.Current); 237 assert(Func); 238 239 if (Func->isUnevaluatedBuiltin()) 240 return; 241 242 // Some builtin functions require us to only look at the call site, since 243 // the classified parameter types do not match. 244 if (unsigned BID = Func->getBuiltinID(); 245 BID && S.getASTContext().BuiltinInfo.hasCustomTypechecking(BID)) { 246 const auto *CE = 247 cast<CallExpr>(S.Current->Caller->getExpr(S.Current->getRetPC())); 248 for (int32_t I = CE->getNumArgs() - 1; I >= 0; --I) { 249 const Expr *A = CE->getArg(I); 250 popArg(S, A); 251 } 252 return; 253 } 254 255 if (S.Current->Caller && Func->isVariadic()) { 256 // CallExpr we're look for is at the return PC of the current function, i.e. 257 // in the caller. 258 // This code path should be executed very rarely. 259 unsigned NumVarArgs; 260 const Expr *const *Args = nullptr; 261 unsigned NumArgs = 0; 262 const Expr *CallSite = S.Current->Caller->getExpr(S.Current->getRetPC()); 263 if (const auto *CE = dyn_cast<CallExpr>(CallSite)) { 264 Args = CE->getArgs(); 265 NumArgs = CE->getNumArgs(); 266 } else if (const auto *CE = dyn_cast<CXXConstructExpr>(CallSite)) { 267 Args = CE->getArgs(); 268 NumArgs = CE->getNumArgs(); 269 } else 270 assert(false && "Can't get arguments from that expression type"); 271 272 assert(NumArgs >= Func->getNumWrittenParams()); 273 NumVarArgs = NumArgs - (Func->getNumWrittenParams() + 274 isa<CXXOperatorCallExpr>(CallSite)); 275 for (unsigned I = 0; I != NumVarArgs; ++I) { 276 const Expr *A = Args[NumArgs - 1 - I]; 277 popArg(S, A); 278 } 279 } 280 281 // And in any case, remove the fixed parameters (the non-variadic ones) 282 // at the end. 283 for (PrimType Ty : Func->args_reverse()) 284 TYPE_SWITCH(Ty, S.Stk.discard<T>()); 285 } 286 287 bool CheckExtern(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 288 if (!Ptr.isExtern()) 289 return true; 290 291 if (Ptr.isInitialized() || 292 (Ptr.getDeclDesc()->asVarDecl() == S.EvaluatingDecl)) 293 return true; 294 295 if (!S.checkingPotentialConstantExpression() && S.getLangOpts().CPlusPlus) { 296 const auto *VD = Ptr.getDeclDesc()->asValueDecl(); 297 diagnoseNonConstVariable(S, OpPC, VD); 298 } 299 return false; 300 } 301 302 bool CheckArray(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 303 if (!Ptr.isUnknownSizeArray()) 304 return true; 305 const SourceInfo &E = S.Current->getSource(OpPC); 306 S.FFDiag(E, diag::note_constexpr_unsized_array_indexed); 307 return false; 308 } 309 310 bool CheckLive(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 311 AccessKinds AK) { 312 if (Ptr.isZero()) { 313 const auto &Src = S.Current->getSource(OpPC); 314 315 if (Ptr.isField()) 316 S.FFDiag(Src, diag::note_constexpr_null_subobject) << CSK_Field; 317 else 318 S.FFDiag(Src, diag::note_constexpr_access_null) << AK; 319 320 return false; 321 } 322 323 if (!Ptr.isLive()) { 324 const auto &Src = S.Current->getSource(OpPC); 325 326 if (Ptr.isDynamic()) { 327 S.FFDiag(Src, diag::note_constexpr_access_deleted_object) << AK; 328 } else { 329 bool IsTemp = Ptr.isTemporary(); 330 S.FFDiag(Src, diag::note_constexpr_lifetime_ended, 1) << AK << !IsTemp; 331 332 if (IsTemp) 333 S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here); 334 else 335 S.Note(Ptr.getDeclLoc(), diag::note_declared_at); 336 } 337 338 return false; 339 } 340 341 return true; 342 } 343 344 bool CheckConstant(InterpState &S, CodePtr OpPC, const Descriptor *Desc) { 345 assert(Desc); 346 347 const auto *D = Desc->asVarDecl(); 348 if (!D || !D->hasGlobalStorage()) 349 return true; 350 351 if (D == S.EvaluatingDecl) 352 return true; 353 354 if (D->isConstexpr()) 355 return true; 356 357 // If we're evaluating the initializer for a constexpr variable in C23, we may 358 // only read other contexpr variables. Abort here since this one isn't 359 // constexpr. 360 if (const auto *VD = dyn_cast_if_present<VarDecl>(S.EvaluatingDecl); 361 VD && VD->isConstexpr() && S.getLangOpts().C23) 362 return Invalid(S, OpPC); 363 364 QualType T = D->getType(); 365 bool IsConstant = T.isConstant(S.getASTContext()); 366 if (T->isIntegralOrEnumerationType()) { 367 if (!IsConstant) { 368 diagnoseNonConstVariable(S, OpPC, D); 369 return false; 370 } 371 return true; 372 } 373 374 if (IsConstant) { 375 if (S.getLangOpts().CPlusPlus) { 376 S.CCEDiag(S.Current->getLocation(OpPC), 377 S.getLangOpts().CPlusPlus11 378 ? diag::note_constexpr_ltor_non_constexpr 379 : diag::note_constexpr_ltor_non_integral, 380 1) 381 << D << T; 382 S.Note(D->getLocation(), diag::note_declared_at); 383 } else { 384 S.CCEDiag(S.Current->getLocation(OpPC)); 385 } 386 return true; 387 } 388 389 if (T->isPointerOrReferenceType()) { 390 if (!T->getPointeeType().isConstant(S.getASTContext()) || 391 !S.getLangOpts().CPlusPlus11) { 392 diagnoseNonConstVariable(S, OpPC, D); 393 return false; 394 } 395 return true; 396 } 397 398 diagnoseNonConstVariable(S, OpPC, D); 399 return false; 400 } 401 402 static bool CheckConstant(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 403 if (!Ptr.isStatic() || !Ptr.isBlockPointer()) 404 return true; 405 return CheckConstant(S, OpPC, Ptr.getDeclDesc()); 406 } 407 408 bool CheckNull(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 409 CheckSubobjectKind CSK) { 410 if (!Ptr.isZero()) 411 return true; 412 const SourceInfo &Loc = S.Current->getSource(OpPC); 413 S.FFDiag(Loc, diag::note_constexpr_null_subobject) 414 << CSK << S.Current->getRange(OpPC); 415 416 return false; 417 } 418 419 bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 420 AccessKinds AK) { 421 if (!Ptr.isOnePastEnd()) 422 return true; 423 const SourceInfo &Loc = S.Current->getSource(OpPC); 424 S.FFDiag(Loc, diag::note_constexpr_access_past_end) 425 << AK << S.Current->getRange(OpPC); 426 return false; 427 } 428 429 bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 430 CheckSubobjectKind CSK) { 431 if (!Ptr.isElementPastEnd()) 432 return true; 433 const SourceInfo &Loc = S.Current->getSource(OpPC); 434 S.FFDiag(Loc, diag::note_constexpr_past_end_subobject) 435 << CSK << S.Current->getRange(OpPC); 436 return false; 437 } 438 439 bool CheckSubobject(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 440 CheckSubobjectKind CSK) { 441 if (!Ptr.isOnePastEnd()) 442 return true; 443 444 const SourceInfo &Loc = S.Current->getSource(OpPC); 445 S.FFDiag(Loc, diag::note_constexpr_past_end_subobject) 446 << CSK << S.Current->getRange(OpPC); 447 return false; 448 } 449 450 bool CheckDowncast(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 451 uint32_t Offset) { 452 uint32_t MinOffset = Ptr.getDeclDesc()->getMetadataSize(); 453 uint32_t PtrOffset = Ptr.getByteOffset(); 454 455 // We subtract Offset from PtrOffset. The result must be at least 456 // MinOffset. 457 if (Offset < PtrOffset && (PtrOffset - Offset) >= MinOffset) 458 return true; 459 460 const auto *E = cast<CastExpr>(S.Current->getExpr(OpPC)); 461 QualType TargetQT = E->getType()->getPointeeType(); 462 QualType MostDerivedQT = Ptr.getDeclPtr().getType(); 463 464 S.CCEDiag(E, diag::note_constexpr_invalid_downcast) 465 << MostDerivedQT << TargetQT; 466 467 return false; 468 } 469 470 bool CheckConst(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 471 assert(Ptr.isLive() && "Pointer is not live"); 472 if (!Ptr.isConst() || Ptr.isMutable()) 473 return true; 474 475 // The This pointer is writable in constructors and destructors, 476 // even if isConst() returns true. 477 // TODO(perf): We could be hitting this code path quite a lot in complex 478 // constructors. Is there a better way to do this? 479 if (S.Current->getFunction()) { 480 for (const InterpFrame *Frame = S.Current; Frame; Frame = Frame->Caller) { 481 if (const Function *Func = Frame->getFunction(); 482 Func && (Func->isConstructor() || Func->isDestructor()) && 483 Ptr.block() == Frame->getThis().block()) { 484 return true; 485 } 486 } 487 } 488 489 if (!Ptr.isBlockPointer()) 490 return false; 491 492 const QualType Ty = Ptr.getType(); 493 const SourceInfo &Loc = S.Current->getSource(OpPC); 494 S.FFDiag(Loc, diag::note_constexpr_modify_const_type) << Ty; 495 return false; 496 } 497 498 bool CheckMutable(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 499 assert(Ptr.isLive() && "Pointer is not live"); 500 if (!Ptr.isMutable()) 501 return true; 502 503 // In C++14 onwards, it is permitted to read a mutable member whose 504 // lifetime began within the evaluation. 505 if (S.getLangOpts().CPlusPlus14 && 506 Ptr.block()->getEvalID() == S.Ctx.getEvalID()) 507 return true; 508 509 const SourceInfo &Loc = S.Current->getSource(OpPC); 510 const FieldDecl *Field = Ptr.getField(); 511 S.FFDiag(Loc, diag::note_constexpr_access_mutable, 1) << AK_Read << Field; 512 S.Note(Field->getLocation(), diag::note_declared_at); 513 return false; 514 } 515 516 static bool CheckVolatile(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 517 AccessKinds AK) { 518 assert(Ptr.isLive()); 519 520 // FIXME: This check here might be kinda expensive. Maybe it would be better 521 // to have another field in InlineDescriptor for this? 522 if (!Ptr.isBlockPointer()) 523 return true; 524 525 QualType PtrType = Ptr.getType(); 526 if (!PtrType.isVolatileQualified()) 527 return true; 528 529 const SourceInfo &Loc = S.Current->getSource(OpPC); 530 if (S.getLangOpts().CPlusPlus) 531 S.FFDiag(Loc, diag::note_constexpr_access_volatile_type) << AK << PtrType; 532 else 533 S.FFDiag(Loc); 534 return false; 535 } 536 537 bool CheckInitialized(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 538 AccessKinds AK) { 539 assert(Ptr.isLive()); 540 541 if (Ptr.isInitialized()) 542 return true; 543 544 if (const auto *VD = Ptr.getDeclDesc()->asVarDecl(); 545 VD && VD->hasGlobalStorage()) { 546 const SourceInfo &Loc = S.Current->getSource(OpPC); 547 if (VD->getAnyInitializer()) { 548 S.FFDiag(Loc, diag::note_constexpr_var_init_non_constant, 1) << VD; 549 S.Note(VD->getLocation(), diag::note_declared_at); 550 } else { 551 diagnoseMissingInitializer(S, OpPC, VD); 552 } 553 return false; 554 } 555 556 if (!S.checkingPotentialConstantExpression()) { 557 S.FFDiag(S.Current->getSource(OpPC), diag::note_constexpr_access_uninit) 558 << AK << /*uninitialized=*/true << S.Current->getRange(OpPC); 559 } 560 return false; 561 } 562 563 bool CheckGlobalInitialized(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 564 if (Ptr.isInitialized()) 565 return true; 566 567 assert(S.getLangOpts().CPlusPlus); 568 const auto *VD = cast<VarDecl>(Ptr.getDeclDesc()->asValueDecl()); 569 if ((!VD->hasConstantInitialization() && 570 VD->mightBeUsableInConstantExpressions(S.getASTContext())) || 571 (S.getLangOpts().OpenCL && !S.getLangOpts().CPlusPlus11 && 572 !VD->hasICEInitializer(S.getASTContext()))) { 573 const SourceInfo &Loc = S.Current->getSource(OpPC); 574 S.FFDiag(Loc, diag::note_constexpr_var_init_non_constant, 1) << VD; 575 S.Note(VD->getLocation(), diag::note_declared_at); 576 } 577 return false; 578 } 579 580 static bool CheckWeak(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 581 if (!Ptr.isWeak()) 582 return true; 583 584 const auto *VD = Ptr.getDeclDesc()->asVarDecl(); 585 assert(VD); 586 S.FFDiag(S.Current->getLocation(OpPC), diag::note_constexpr_var_init_weak) 587 << VD; 588 S.Note(VD->getLocation(), diag::note_declared_at); 589 590 return false; 591 } 592 593 bool CheckLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 594 AccessKinds AK) { 595 if (!CheckLive(S, OpPC, Ptr, AK)) 596 return false; 597 if (!CheckConstant(S, OpPC, Ptr)) 598 return false; 599 if (!CheckDummy(S, OpPC, Ptr, AK)) 600 return false; 601 if (!CheckExtern(S, OpPC, Ptr)) 602 return false; 603 if (!CheckRange(S, OpPC, Ptr, AK)) 604 return false; 605 if (!CheckActive(S, OpPC, Ptr, AK)) 606 return false; 607 if (!CheckInitialized(S, OpPC, Ptr, AK)) 608 return false; 609 if (!CheckTemporary(S, OpPC, Ptr, AK)) 610 return false; 611 if (!CheckWeak(S, OpPC, Ptr)) 612 return false; 613 if (!CheckMutable(S, OpPC, Ptr)) 614 return false; 615 if (!CheckVolatile(S, OpPC, Ptr, AK)) 616 return false; 617 return true; 618 } 619 620 /// This is not used by any of the opcodes directly. It's used by 621 /// EvalEmitter to do the final lvalue-to-rvalue conversion. 622 bool CheckFinalLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 623 if (!CheckLive(S, OpPC, Ptr, AK_Read)) 624 return false; 625 if (!CheckConstant(S, OpPC, Ptr)) 626 return false; 627 628 if (!CheckDummy(S, OpPC, Ptr, AK_Read)) 629 return false; 630 if (!CheckExtern(S, OpPC, Ptr)) 631 return false; 632 if (!CheckRange(S, OpPC, Ptr, AK_Read)) 633 return false; 634 if (!CheckActive(S, OpPC, Ptr, AK_Read)) 635 return false; 636 if (!CheckInitialized(S, OpPC, Ptr, AK_Read)) 637 return false; 638 if (!CheckTemporary(S, OpPC, Ptr, AK_Read)) 639 return false; 640 if (!CheckWeak(S, OpPC, Ptr)) 641 return false; 642 if (!CheckMutable(S, OpPC, Ptr)) 643 return false; 644 return true; 645 } 646 647 bool CheckStore(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 648 if (!CheckLive(S, OpPC, Ptr, AK_Assign)) 649 return false; 650 if (!CheckDummy(S, OpPC, Ptr, AK_Assign)) 651 return false; 652 if (!CheckExtern(S, OpPC, Ptr)) 653 return false; 654 if (!CheckRange(S, OpPC, Ptr, AK_Assign)) 655 return false; 656 if (!CheckGlobal(S, OpPC, Ptr)) 657 return false; 658 if (!CheckConst(S, OpPC, Ptr)) 659 return false; 660 return true; 661 } 662 663 bool CheckInvoke(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 664 if (!CheckLive(S, OpPC, Ptr, AK_MemberCall)) 665 return false; 666 if (!Ptr.isDummy()) { 667 if (!CheckExtern(S, OpPC, Ptr)) 668 return false; 669 if (!CheckRange(S, OpPC, Ptr, AK_MemberCall)) 670 return false; 671 } 672 return true; 673 } 674 675 bool CheckInit(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 676 if (!CheckLive(S, OpPC, Ptr, AK_Assign)) 677 return false; 678 if (!CheckRange(S, OpPC, Ptr, AK_Assign)) 679 return false; 680 return true; 681 } 682 683 bool CheckCallable(InterpState &S, CodePtr OpPC, const Function *F) { 684 685 if (F->isVirtual() && !S.getLangOpts().CPlusPlus20) { 686 const SourceLocation &Loc = S.Current->getLocation(OpPC); 687 S.CCEDiag(Loc, diag::note_constexpr_virtual_call); 688 return false; 689 } 690 691 if (F->isConstexpr() && F->hasBody() && 692 (F->getDecl()->isConstexpr() || F->getDecl()->hasAttr<MSConstexprAttr>())) 693 return true; 694 695 // Implicitly constexpr. 696 if (F->isLambdaStaticInvoker()) 697 return true; 698 699 const SourceLocation &Loc = S.Current->getLocation(OpPC); 700 if (S.getLangOpts().CPlusPlus11) { 701 const FunctionDecl *DiagDecl = F->getDecl(); 702 703 // Invalid decls have been diagnosed before. 704 if (DiagDecl->isInvalidDecl()) 705 return false; 706 707 // If this function is not constexpr because it is an inherited 708 // non-constexpr constructor, diagnose that directly. 709 const auto *CD = dyn_cast<CXXConstructorDecl>(DiagDecl); 710 if (CD && CD->isInheritingConstructor()) { 711 const auto *Inherited = CD->getInheritedConstructor().getConstructor(); 712 if (!Inherited->isConstexpr()) 713 DiagDecl = CD = Inherited; 714 } 715 716 // FIXME: If DiagDecl is an implicitly-declared special member function 717 // or an inheriting constructor, we should be much more explicit about why 718 // it's not constexpr. 719 if (CD && CD->isInheritingConstructor()) { 720 S.FFDiag(Loc, diag::note_constexpr_invalid_inhctor, 1) 721 << CD->getInheritedConstructor().getConstructor()->getParent(); 722 S.Note(DiagDecl->getLocation(), diag::note_declared_at); 723 } else { 724 // Don't emit anything if the function isn't defined and we're checking 725 // for a constant expression. It might be defined at the point we're 726 // actually calling it. 727 bool IsExtern = DiagDecl->getStorageClass() == SC_Extern; 728 if (!DiagDecl->isDefined() && !IsExtern && DiagDecl->isConstexpr() && 729 S.checkingPotentialConstantExpression()) 730 return false; 731 732 // If the declaration is defined, declared 'constexpr' _and_ has a body, 733 // the below diagnostic doesn't add anything useful. 734 if (DiagDecl->isDefined() && DiagDecl->isConstexpr() && 735 DiagDecl->hasBody()) 736 return false; 737 738 S.FFDiag(Loc, diag::note_constexpr_invalid_function, 1) 739 << DiagDecl->isConstexpr() << (bool)CD << DiagDecl; 740 741 if (DiagDecl->getDefinition()) 742 S.Note(DiagDecl->getDefinition()->getLocation(), 743 diag::note_declared_at); 744 else 745 S.Note(DiagDecl->getLocation(), diag::note_declared_at); 746 } 747 } else { 748 S.FFDiag(Loc, diag::note_invalid_subexpr_in_const_expr); 749 } 750 751 return false; 752 } 753 754 bool CheckCallDepth(InterpState &S, CodePtr OpPC) { 755 if ((S.Current->getDepth() + 1) > S.getLangOpts().ConstexprCallDepth) { 756 S.FFDiag(S.Current->getSource(OpPC), 757 diag::note_constexpr_depth_limit_exceeded) 758 << S.getLangOpts().ConstexprCallDepth; 759 return false; 760 } 761 762 return true; 763 } 764 765 bool CheckThis(InterpState &S, CodePtr OpPC, const Pointer &This) { 766 if (!This.isZero()) 767 return true; 768 769 const SourceInfo &Loc = S.Current->getSource(OpPC); 770 771 bool IsImplicit = false; 772 if (const auto *E = dyn_cast_if_present<CXXThisExpr>(Loc.asExpr())) 773 IsImplicit = E->isImplicit(); 774 775 if (S.getLangOpts().CPlusPlus11) 776 S.FFDiag(Loc, diag::note_constexpr_this) << IsImplicit; 777 else 778 S.FFDiag(Loc); 779 780 return false; 781 } 782 783 bool CheckPure(InterpState &S, CodePtr OpPC, const CXXMethodDecl *MD) { 784 if (!MD->isPureVirtual()) 785 return true; 786 const SourceInfo &E = S.Current->getSource(OpPC); 787 S.FFDiag(E, diag::note_constexpr_pure_virtual_call, 1) << MD; 788 S.Note(MD->getLocation(), diag::note_declared_at); 789 return false; 790 } 791 792 bool CheckFloatResult(InterpState &S, CodePtr OpPC, const Floating &Result, 793 APFloat::opStatus Status, FPOptions FPO) { 794 // [expr.pre]p4: 795 // If during the evaluation of an expression, the result is not 796 // mathematically defined [...], the behavior is undefined. 797 // FIXME: C++ rules require us to not conform to IEEE 754 here. 798 if (Result.isNan()) { 799 const SourceInfo &E = S.Current->getSource(OpPC); 800 S.CCEDiag(E, diag::note_constexpr_float_arithmetic) 801 << /*NaN=*/true << S.Current->getRange(OpPC); 802 return S.noteUndefinedBehavior(); 803 } 804 805 // In a constant context, assume that any dynamic rounding mode or FP 806 // exception state matches the default floating-point environment. 807 if (S.inConstantContext()) 808 return true; 809 810 if ((Status & APFloat::opInexact) && 811 FPO.getRoundingMode() == llvm::RoundingMode::Dynamic) { 812 // Inexact result means that it depends on rounding mode. If the requested 813 // mode is dynamic, the evaluation cannot be made in compile time. 814 const SourceInfo &E = S.Current->getSource(OpPC); 815 S.FFDiag(E, diag::note_constexpr_dynamic_rounding); 816 return false; 817 } 818 819 if ((Status != APFloat::opOK) && 820 (FPO.getRoundingMode() == llvm::RoundingMode::Dynamic || 821 FPO.getExceptionMode() != LangOptions::FPE_Ignore || 822 FPO.getAllowFEnvAccess())) { 823 const SourceInfo &E = S.Current->getSource(OpPC); 824 S.FFDiag(E, diag::note_constexpr_float_arithmetic_strict); 825 return false; 826 } 827 828 if ((Status & APFloat::opStatus::opInvalidOp) && 829 FPO.getExceptionMode() != LangOptions::FPE_Ignore) { 830 const SourceInfo &E = S.Current->getSource(OpPC); 831 // There is no usefully definable result. 832 S.FFDiag(E); 833 return false; 834 } 835 836 return true; 837 } 838 839 bool CheckDynamicMemoryAllocation(InterpState &S, CodePtr OpPC) { 840 if (S.getLangOpts().CPlusPlus20) 841 return true; 842 843 const SourceInfo &E = S.Current->getSource(OpPC); 844 S.CCEDiag(E, diag::note_constexpr_new); 845 return true; 846 } 847 848 bool CheckNewDeleteForms(InterpState &S, CodePtr OpPC, 849 DynamicAllocator::Form AllocForm, 850 DynamicAllocator::Form DeleteForm, const Descriptor *D, 851 const Expr *NewExpr) { 852 if (AllocForm == DeleteForm) 853 return true; 854 855 QualType TypeToDiagnose; 856 // We need to shuffle things around a bit here to get a better diagnostic, 857 // because the expression we allocated the block for was of type int*, 858 // but we want to get the array size right. 859 if (D->isArray()) { 860 QualType ElemQT = D->getType()->getPointeeType(); 861 TypeToDiagnose = S.getASTContext().getConstantArrayType( 862 ElemQT, APInt(64, static_cast<uint64_t>(D->getNumElems()), false), 863 nullptr, ArraySizeModifier::Normal, 0); 864 } else 865 TypeToDiagnose = D->getType()->getPointeeType(); 866 867 const SourceInfo &E = S.Current->getSource(OpPC); 868 S.FFDiag(E, diag::note_constexpr_new_delete_mismatch) 869 << static_cast<int>(DeleteForm) << static_cast<int>(AllocForm) 870 << TypeToDiagnose; 871 S.Note(NewExpr->getExprLoc(), diag::note_constexpr_dynamic_alloc_here) 872 << NewExpr->getSourceRange(); 873 return false; 874 } 875 876 bool CheckDeleteSource(InterpState &S, CodePtr OpPC, const Expr *Source, 877 const Pointer &Ptr) { 878 // The two sources we currently allow are new expressions and 879 // __builtin_operator_new calls. 880 if (isa_and_nonnull<CXXNewExpr>(Source)) 881 return true; 882 if (const CallExpr *CE = dyn_cast_if_present<CallExpr>(Source); 883 CE && CE->getBuiltinCallee() == Builtin::BI__builtin_operator_new) 884 return true; 885 886 // Whatever this is, we didn't heap allocate it. 887 const SourceInfo &Loc = S.Current->getSource(OpPC); 888 S.FFDiag(Loc, diag::note_constexpr_delete_not_heap_alloc) 889 << Ptr.toDiagnosticString(S.getASTContext()); 890 891 if (Ptr.isTemporary()) 892 S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here); 893 else 894 S.Note(Ptr.getDeclLoc(), diag::note_declared_at); 895 return false; 896 } 897 898 /// We aleady know the given DeclRefExpr is invalid for some reason, 899 /// now figure out why and print appropriate diagnostics. 900 bool CheckDeclRef(InterpState &S, CodePtr OpPC, const DeclRefExpr *DR) { 901 const ValueDecl *D = DR->getDecl(); 902 return diagnoseUnknownDecl(S, OpPC, D); 903 } 904 905 bool CheckDummy(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 906 AccessKinds AK) { 907 if (!Ptr.isDummy()) 908 return true; 909 910 const Descriptor *Desc = Ptr.getDeclDesc(); 911 const ValueDecl *D = Desc->asValueDecl(); 912 if (!D) 913 return false; 914 915 if (AK == AK_Read || AK == AK_Increment || AK == AK_Decrement) 916 return diagnoseUnknownDecl(S, OpPC, D); 917 918 assert(AK == AK_Assign); 919 if (S.getLangOpts().CPlusPlus14) { 920 const SourceInfo &E = S.Current->getSource(OpPC); 921 S.FFDiag(E, diag::note_constexpr_modify_global); 922 } 923 return false; 924 } 925 926 bool CheckNonNullArgs(InterpState &S, CodePtr OpPC, const Function *F, 927 const CallExpr *CE, unsigned ArgSize) { 928 auto Args = llvm::ArrayRef(CE->getArgs(), CE->getNumArgs()); 929 auto NonNullArgs = collectNonNullArgs(F->getDecl(), Args); 930 unsigned Offset = 0; 931 unsigned Index = 0; 932 for (const Expr *Arg : Args) { 933 if (NonNullArgs[Index] && Arg->getType()->isPointerType()) { 934 const Pointer &ArgPtr = S.Stk.peek<Pointer>(ArgSize - Offset); 935 if (ArgPtr.isZero()) { 936 const SourceLocation &Loc = S.Current->getLocation(OpPC); 937 S.CCEDiag(Loc, diag::note_non_null_attribute_failed); 938 return false; 939 } 940 } 941 942 Offset += align(primSize(S.Ctx.classify(Arg).value_or(PT_Ptr))); 943 ++Index; 944 } 945 return true; 946 } 947 948 // FIXME: This is similar to code we already have in Compiler.cpp. 949 // I think it makes sense to instead add the field and base destruction stuff 950 // to the destructor Function itself. Then destroying a record would really 951 // _just_ be calling its destructor. That would also help with the diagnostic 952 // difference when the destructor or a field/base fails. 953 static bool runRecordDestructor(InterpState &S, CodePtr OpPC, 954 const Pointer &BasePtr, 955 const Descriptor *Desc) { 956 assert(Desc->isRecord()); 957 const Record *R = Desc->ElemRecord; 958 assert(R); 959 960 if (Pointer::pointToSameBlock(BasePtr, S.Current->getThis())) { 961 const SourceInfo &Loc = S.Current->getSource(OpPC); 962 S.FFDiag(Loc, diag::note_constexpr_double_destroy); 963 return false; 964 } 965 966 // Destructor of this record. 967 if (const CXXDestructorDecl *Dtor = R->getDestructor(); 968 Dtor && !Dtor->isTrivial()) { 969 const Function *DtorFunc = S.getContext().getOrCreateFunction(Dtor); 970 if (!DtorFunc) 971 return false; 972 973 S.Stk.push<Pointer>(BasePtr); 974 if (!Call(S, OpPC, DtorFunc, 0)) 975 return false; 976 } 977 return true; 978 } 979 980 static bool RunDestructors(InterpState &S, CodePtr OpPC, const Block *B) { 981 assert(B); 982 const Descriptor *Desc = B->getDescriptor(); 983 984 if (Desc->isPrimitive() || Desc->isPrimitiveArray()) 985 return true; 986 987 assert(Desc->isRecord() || Desc->isCompositeArray()); 988 989 if (Desc->isCompositeArray()) { 990 const Descriptor *ElemDesc = Desc->ElemDesc; 991 assert(ElemDesc->isRecord()); 992 993 Pointer RP(const_cast<Block *>(B)); 994 for (unsigned I = 0; I != Desc->getNumElems(); ++I) { 995 if (!runRecordDestructor(S, OpPC, RP.atIndex(I).narrow(), ElemDesc)) 996 return false; 997 } 998 return true; 999 } 1000 1001 assert(Desc->isRecord()); 1002 return runRecordDestructor(S, OpPC, Pointer(const_cast<Block *>(B)), Desc); 1003 } 1004 1005 static bool hasVirtualDestructor(QualType T) { 1006 if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl()) 1007 if (const CXXDestructorDecl *DD = RD->getDestructor()) 1008 return DD->isVirtual(); 1009 return false; 1010 } 1011 1012 bool Free(InterpState &S, CodePtr OpPC, bool DeleteIsArrayForm, 1013 bool IsGlobalDelete) { 1014 if (!CheckDynamicMemoryAllocation(S, OpPC)) 1015 return false; 1016 1017 const Expr *Source = nullptr; 1018 const Block *BlockToDelete = nullptr; 1019 { 1020 // Extra scope for this so the block doesn't have this pointer 1021 // pointing to it when we destroy it. 1022 Pointer Ptr = S.Stk.pop<Pointer>(); 1023 1024 // Deleteing nullptr is always fine. 1025 if (Ptr.isZero()) 1026 return true; 1027 1028 // Remove base casts. 1029 QualType InitialType = Ptr.getType(); 1030 while (Ptr.isBaseClass()) 1031 Ptr = Ptr.getBase(); 1032 1033 // For the non-array case, the types must match if the static type 1034 // does not have a virtual destructor. 1035 if (!DeleteIsArrayForm && Ptr.getType() != InitialType && 1036 !hasVirtualDestructor(InitialType)) { 1037 S.FFDiag(S.Current->getSource(OpPC), 1038 diag::note_constexpr_delete_base_nonvirt_dtor) 1039 << InitialType << Ptr.getType(); 1040 return false; 1041 } 1042 1043 if (!Ptr.isRoot() || Ptr.isOnePastEnd() || Ptr.isArrayElement()) { 1044 const SourceInfo &Loc = S.Current->getSource(OpPC); 1045 S.FFDiag(Loc, diag::note_constexpr_delete_subobject) 1046 << Ptr.toDiagnosticString(S.getASTContext()) << Ptr.isOnePastEnd(); 1047 return false; 1048 } 1049 1050 Source = Ptr.getDeclDesc()->asExpr(); 1051 BlockToDelete = Ptr.block(); 1052 1053 if (!CheckDeleteSource(S, OpPC, Source, Ptr)) 1054 return false; 1055 1056 // For a class type with a virtual destructor, the selected operator delete 1057 // is the one looked up when building the destructor. 1058 QualType AllocType = Ptr.getType(); 1059 if (!DeleteIsArrayForm && !IsGlobalDelete) { 1060 auto getVirtualOperatorDelete = [](QualType T) -> const FunctionDecl * { 1061 if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl()) 1062 if (const CXXDestructorDecl *DD = RD->getDestructor()) 1063 return DD->isVirtual() ? DD->getOperatorDelete() : nullptr; 1064 return nullptr; 1065 }; 1066 1067 if (const FunctionDecl *VirtualDelete = 1068 getVirtualOperatorDelete(AllocType); 1069 VirtualDelete && 1070 !VirtualDelete->isReplaceableGlobalAllocationFunction()) { 1071 S.FFDiag(S.Current->getSource(OpPC), 1072 diag::note_constexpr_new_non_replaceable) 1073 << isa<CXXMethodDecl>(VirtualDelete) << VirtualDelete; 1074 return false; 1075 } 1076 } 1077 } 1078 assert(Source); 1079 assert(BlockToDelete); 1080 1081 // Invoke destructors before deallocating the memory. 1082 if (!RunDestructors(S, OpPC, BlockToDelete)) 1083 return false; 1084 1085 DynamicAllocator &Allocator = S.getAllocator(); 1086 const Descriptor *BlockDesc = BlockToDelete->getDescriptor(); 1087 std::optional<DynamicAllocator::Form> AllocForm = 1088 Allocator.getAllocationForm(Source); 1089 1090 if (!Allocator.deallocate(Source, BlockToDelete, S)) { 1091 // Nothing has been deallocated, this must be a double-delete. 1092 const SourceInfo &Loc = S.Current->getSource(OpPC); 1093 S.FFDiag(Loc, diag::note_constexpr_double_delete); 1094 return false; 1095 } 1096 1097 assert(AllocForm); 1098 DynamicAllocator::Form DeleteForm = DeleteIsArrayForm 1099 ? DynamicAllocator::Form::Array 1100 : DynamicAllocator::Form::NonArray; 1101 return CheckNewDeleteForms(S, OpPC, *AllocForm, DeleteForm, BlockDesc, 1102 Source); 1103 } 1104 1105 void diagnoseEnumValue(InterpState &S, CodePtr OpPC, const EnumDecl *ED, 1106 const APSInt &Value) { 1107 llvm::APInt Min; 1108 llvm::APInt Max; 1109 1110 if (S.EvaluatingDecl && !S.EvaluatingDecl->isConstexpr()) 1111 return; 1112 1113 ED->getValueRange(Max, Min); 1114 --Max; 1115 1116 if (ED->getNumNegativeBits() && 1117 (Max.slt(Value.getSExtValue()) || Min.sgt(Value.getSExtValue()))) { 1118 const SourceLocation &Loc = S.Current->getLocation(OpPC); 1119 S.CCEDiag(Loc, diag::note_constexpr_unscoped_enum_out_of_range) 1120 << llvm::toString(Value, 10) << Min.getSExtValue() << Max.getSExtValue() 1121 << ED; 1122 } else if (!ED->getNumNegativeBits() && Max.ult(Value.getZExtValue())) { 1123 const SourceLocation &Loc = S.Current->getLocation(OpPC); 1124 S.CCEDiag(Loc, diag::note_constexpr_unscoped_enum_out_of_range) 1125 << llvm::toString(Value, 10) << Min.getZExtValue() << Max.getZExtValue() 1126 << ED; 1127 } 1128 } 1129 1130 bool CheckLiteralType(InterpState &S, CodePtr OpPC, const Type *T) { 1131 assert(T); 1132 assert(!S.getLangOpts().CPlusPlus23); 1133 1134 // C++1y: A constant initializer for an object o [...] may also invoke 1135 // constexpr constructors for o and its subobjects even if those objects 1136 // are of non-literal class types. 1137 // 1138 // C++11 missed this detail for aggregates, so classes like this: 1139 // struct foo_t { union { int i; volatile int j; } u; }; 1140 // are not (obviously) initializable like so: 1141 // __attribute__((__require_constant_initialization__)) 1142 // static const foo_t x = {{0}}; 1143 // because "i" is a subobject with non-literal initialization (due to the 1144 // volatile member of the union). See: 1145 // http://www.open-std.org/jtc1/sc22/wg21/docs/cwg_active.html#1677 1146 // Therefore, we use the C++1y behavior. 1147 1148 if (S.Current->getFunction() && S.Current->getFunction()->isConstructor() && 1149 S.Current->getThis().getDeclDesc()->asDecl() == S.EvaluatingDecl) { 1150 return true; 1151 } 1152 1153 const Expr *E = S.Current->getExpr(OpPC); 1154 if (S.getLangOpts().CPlusPlus11) 1155 S.FFDiag(E, diag::note_constexpr_nonliteral) << E->getType(); 1156 else 1157 S.FFDiag(E, diag::note_invalid_subexpr_in_const_expr); 1158 return false; 1159 } 1160 1161 static bool checkConstructor(InterpState &S, CodePtr OpPC, const Function *Func, 1162 const Pointer &ThisPtr) { 1163 assert(Func->isConstructor()); 1164 1165 const Descriptor *D = ThisPtr.getFieldDesc(); 1166 1167 // FIXME: I think this case is not 100% correct. E.g. a pointer into a 1168 // subobject of a composite array. 1169 if (!D->ElemRecord) 1170 return true; 1171 1172 if (D->ElemRecord->getNumVirtualBases() == 0) 1173 return true; 1174 1175 S.FFDiag(S.Current->getLocation(OpPC), diag::note_constexpr_virtual_base) 1176 << Func->getParentDecl(); 1177 return false; 1178 } 1179 1180 bool CallVar(InterpState &S, CodePtr OpPC, const Function *Func, 1181 uint32_t VarArgSize) { 1182 if (Func->hasThisPointer()) { 1183 size_t ArgSize = Func->getArgSize() + VarArgSize; 1184 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0); 1185 const Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset); 1186 1187 // If the current function is a lambda static invoker and 1188 // the function we're about to call is a lambda call operator, 1189 // skip the CheckInvoke, since the ThisPtr is a null pointer 1190 // anyway. 1191 if (!(S.Current->getFunction() && 1192 S.Current->getFunction()->isLambdaStaticInvoker() && 1193 Func->isLambdaCallOperator())) { 1194 if (!CheckInvoke(S, OpPC, ThisPtr)) 1195 return false; 1196 } 1197 1198 if (S.checkingPotentialConstantExpression()) 1199 return false; 1200 } 1201 1202 if (!CheckCallable(S, OpPC, Func)) 1203 return false; 1204 1205 if (!CheckCallDepth(S, OpPC)) 1206 return false; 1207 1208 auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC, VarArgSize); 1209 InterpFrame *FrameBefore = S.Current; 1210 S.Current = NewFrame.get(); 1211 1212 APValue CallResult; 1213 // Note that we cannot assert(CallResult.hasValue()) here since 1214 // Ret() above only sets the APValue if the curent frame doesn't 1215 // have a caller set. 1216 if (Interpret(S, CallResult)) { 1217 NewFrame.release(); // Frame was delete'd already. 1218 assert(S.Current == FrameBefore); 1219 return true; 1220 } 1221 1222 // Interpreting the function failed somehow. Reset to 1223 // previous state. 1224 S.Current = FrameBefore; 1225 return false; 1226 } 1227 1228 bool Call(InterpState &S, CodePtr OpPC, const Function *Func, 1229 uint32_t VarArgSize) { 1230 assert(Func); 1231 auto cleanup = [&]() -> bool { 1232 cleanupAfterFunctionCall(S, OpPC, Func); 1233 return false; 1234 }; 1235 1236 if (Func->hasThisPointer()) { 1237 size_t ArgSize = Func->getArgSize() + VarArgSize; 1238 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0); 1239 1240 const Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset); 1241 1242 // If the current function is a lambda static invoker and 1243 // the function we're about to call is a lambda call operator, 1244 // skip the CheckInvoke, since the ThisPtr is a null pointer 1245 // anyway. 1246 if (S.Current->getFunction() && 1247 S.Current->getFunction()->isLambdaStaticInvoker() && 1248 Func->isLambdaCallOperator()) { 1249 assert(ThisPtr.isZero()); 1250 } else { 1251 if (!CheckInvoke(S, OpPC, ThisPtr)) 1252 return cleanup(); 1253 } 1254 1255 if (Func->isConstructor() && !checkConstructor(S, OpPC, Func, ThisPtr)) 1256 return false; 1257 } 1258 1259 if (!CheckCallable(S, OpPC, Func)) 1260 return cleanup(); 1261 1262 // FIXME: The isConstructor() check here is not always right. The current 1263 // constant evaluator is somewhat inconsistent in when it allows a function 1264 // call when checking for a constant expression. 1265 if (Func->hasThisPointer() && S.checkingPotentialConstantExpression() && 1266 !Func->isConstructor()) 1267 return cleanup(); 1268 1269 if (!CheckCallDepth(S, OpPC)) 1270 return cleanup(); 1271 1272 auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC, VarArgSize); 1273 InterpFrame *FrameBefore = S.Current; 1274 S.Current = NewFrame.get(); 1275 1276 InterpStateCCOverride CCOverride(S, Func->getDecl()->isImmediateFunction()); 1277 APValue CallResult; 1278 // Note that we cannot assert(CallResult.hasValue()) here since 1279 // Ret() above only sets the APValue if the curent frame doesn't 1280 // have a caller set. 1281 if (Interpret(S, CallResult)) { 1282 NewFrame.release(); // Frame was delete'd already. 1283 assert(S.Current == FrameBefore); 1284 return true; 1285 } 1286 1287 // Interpreting the function failed somehow. Reset to 1288 // previous state. 1289 S.Current = FrameBefore; 1290 return false; 1291 } 1292 1293 bool CallVirt(InterpState &S, CodePtr OpPC, const Function *Func, 1294 uint32_t VarArgSize) { 1295 assert(Func->hasThisPointer()); 1296 assert(Func->isVirtual()); 1297 size_t ArgSize = Func->getArgSize() + VarArgSize; 1298 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0); 1299 Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset); 1300 1301 const CXXRecordDecl *DynamicDecl = nullptr; 1302 { 1303 Pointer TypePtr = ThisPtr; 1304 while (TypePtr.isBaseClass()) 1305 TypePtr = TypePtr.getBase(); 1306 1307 QualType DynamicType = TypePtr.getType(); 1308 if (DynamicType->isPointerType() || DynamicType->isReferenceType()) 1309 DynamicDecl = DynamicType->getPointeeCXXRecordDecl(); 1310 else 1311 DynamicDecl = DynamicType->getAsCXXRecordDecl(); 1312 } 1313 assert(DynamicDecl); 1314 1315 const auto *StaticDecl = cast<CXXRecordDecl>(Func->getParentDecl()); 1316 const auto *InitialFunction = cast<CXXMethodDecl>(Func->getDecl()); 1317 const CXXMethodDecl *Overrider = S.getContext().getOverridingFunction( 1318 DynamicDecl, StaticDecl, InitialFunction); 1319 1320 if (Overrider != InitialFunction) { 1321 // DR1872: An instantiated virtual constexpr function can't be called in a 1322 // constant expression (prior to C++20). We can still constant-fold such a 1323 // call. 1324 if (!S.getLangOpts().CPlusPlus20 && Overrider->isVirtual()) { 1325 const Expr *E = S.Current->getExpr(OpPC); 1326 S.CCEDiag(E, diag::note_constexpr_virtual_call) << E->getSourceRange(); 1327 } 1328 1329 Func = S.getContext().getOrCreateFunction(Overrider); 1330 1331 const CXXRecordDecl *ThisFieldDecl = 1332 ThisPtr.getFieldDesc()->getType()->getAsCXXRecordDecl(); 1333 if (Func->getParentDecl()->isDerivedFrom(ThisFieldDecl)) { 1334 // If the function we call is further DOWN the hierarchy than the 1335 // FieldDesc of our pointer, just go up the hierarchy of this field 1336 // the furthest we can go. 1337 while (ThisPtr.isBaseClass()) 1338 ThisPtr = ThisPtr.getBase(); 1339 } 1340 } 1341 1342 if (!Call(S, OpPC, Func, VarArgSize)) 1343 return false; 1344 1345 // Covariant return types. The return type of Overrider is a pointer 1346 // or reference to a class type. 1347 if (Overrider != InitialFunction && 1348 Overrider->getReturnType()->isPointerOrReferenceType() && 1349 InitialFunction->getReturnType()->isPointerOrReferenceType()) { 1350 QualType OverriderPointeeType = 1351 Overrider->getReturnType()->getPointeeType(); 1352 QualType InitialPointeeType = 1353 InitialFunction->getReturnType()->getPointeeType(); 1354 // We've called Overrider above, but calling code expects us to return what 1355 // InitialFunction returned. According to the rules for covariant return 1356 // types, what InitialFunction returns needs to be a base class of what 1357 // Overrider returns. So, we need to do an upcast here. 1358 unsigned Offset = S.getContext().collectBaseOffset( 1359 InitialPointeeType->getAsRecordDecl(), 1360 OverriderPointeeType->getAsRecordDecl()); 1361 return GetPtrBasePop(S, OpPC, Offset); 1362 } 1363 1364 return true; 1365 } 1366 1367 bool CallBI(InterpState &S, CodePtr OpPC, const Function *Func, 1368 const CallExpr *CE, uint32_t BuiltinID) { 1369 if (S.checkingPotentialConstantExpression()) 1370 return false; 1371 auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC); 1372 1373 InterpFrame *FrameBefore = S.Current; 1374 S.Current = NewFrame.get(); 1375 1376 if (InterpretBuiltin(S, OpPC, Func, CE, BuiltinID)) { 1377 NewFrame.release(); 1378 return true; 1379 } 1380 S.Current = FrameBefore; 1381 return false; 1382 } 1383 1384 bool CallPtr(InterpState &S, CodePtr OpPC, uint32_t ArgSize, 1385 const CallExpr *CE) { 1386 const FunctionPointer &FuncPtr = S.Stk.pop<FunctionPointer>(); 1387 1388 const Function *F = FuncPtr.getFunction(); 1389 if (!F) { 1390 const auto *E = cast<CallExpr>(S.Current->getExpr(OpPC)); 1391 S.FFDiag(E, diag::note_constexpr_null_callee) 1392 << const_cast<Expr *>(E->getCallee()) << E->getSourceRange(); 1393 return false; 1394 } 1395 1396 if (!FuncPtr.isValid() || !F->getDecl()) 1397 return Invalid(S, OpPC); 1398 1399 assert(F); 1400 1401 // This happens when the call expression has been cast to 1402 // something else, but we don't support that. 1403 if (S.Ctx.classify(F->getDecl()->getReturnType()) != 1404 S.Ctx.classify(CE->getType())) 1405 return false; 1406 1407 // Check argument nullability state. 1408 if (F->hasNonNullAttr()) { 1409 if (!CheckNonNullArgs(S, OpPC, F, CE, ArgSize)) 1410 return false; 1411 } 1412 1413 assert(ArgSize >= F->getWrittenArgSize()); 1414 uint32_t VarArgSize = ArgSize - F->getWrittenArgSize(); 1415 1416 // We need to do this explicitly here since we don't have the necessary 1417 // information to do it automatically. 1418 if (F->isThisPointerExplicit()) 1419 VarArgSize -= align(primSize(PT_Ptr)); 1420 1421 if (F->isVirtual()) 1422 return CallVirt(S, OpPC, F, VarArgSize); 1423 1424 return Call(S, OpPC, F, VarArgSize); 1425 } 1426 1427 bool CheckNewTypeMismatch(InterpState &S, CodePtr OpPC, const Expr *E, 1428 std::optional<uint64_t> ArraySize) { 1429 const Pointer &Ptr = S.Stk.peek<Pointer>(); 1430 1431 if (!CheckStore(S, OpPC, Ptr)) 1432 return false; 1433 1434 if (!InvalidNewDeleteExpr(S, OpPC, E)) 1435 return false; 1436 1437 const auto *NewExpr = cast<CXXNewExpr>(E); 1438 QualType StorageType = Ptr.getType(); 1439 1440 if (isa_and_nonnull<CXXNewExpr>(Ptr.getFieldDesc()->asExpr()) && 1441 StorageType->isPointerType()) { 1442 // FIXME: Are there other cases where this is a problem? 1443 StorageType = StorageType->getPointeeType(); 1444 } 1445 1446 const ASTContext &ASTCtx = S.getASTContext(); 1447 QualType AllocType; 1448 if (ArraySize) { 1449 AllocType = ASTCtx.getConstantArrayType( 1450 NewExpr->getAllocatedType(), 1451 APInt(64, static_cast<uint64_t>(*ArraySize), false), nullptr, 1452 ArraySizeModifier::Normal, 0); 1453 } else { 1454 AllocType = NewExpr->getAllocatedType(); 1455 } 1456 1457 unsigned StorageSize = 1; 1458 unsigned AllocSize = 1; 1459 if (const auto *CAT = dyn_cast<ConstantArrayType>(AllocType)) 1460 AllocSize = CAT->getZExtSize(); 1461 if (const auto *CAT = dyn_cast<ConstantArrayType>(StorageType)) 1462 StorageSize = CAT->getZExtSize(); 1463 1464 if (AllocSize > StorageSize || 1465 !ASTCtx.hasSimilarType(ASTCtx.getBaseElementType(AllocType), 1466 ASTCtx.getBaseElementType(StorageType))) { 1467 S.FFDiag(S.Current->getLocation(OpPC), 1468 diag::note_constexpr_placement_new_wrong_type) 1469 << StorageType << AllocType; 1470 return false; 1471 } 1472 1473 // Can't activate fields in a union, unless the direct base is the union. 1474 if (Ptr.inUnion() && !Ptr.isActive() && !Ptr.getBase().getRecord()->isUnion()) 1475 return CheckActive(S, OpPC, Ptr, AK_Construct); 1476 1477 return true; 1478 } 1479 1480 bool InvalidNewDeleteExpr(InterpState &S, CodePtr OpPC, const Expr *E) { 1481 assert(E); 1482 const auto &Loc = S.Current->getSource(OpPC); 1483 1484 if (S.getLangOpts().CPlusPlus26) 1485 return true; 1486 1487 if (const auto *NewExpr = dyn_cast<CXXNewExpr>(E)) { 1488 const FunctionDecl *OperatorNew = NewExpr->getOperatorNew(); 1489 1490 if (!S.getLangOpts().CPlusPlus26 && NewExpr->getNumPlacementArgs() > 0) { 1491 // This is allowed pre-C++26, but only an std function. 1492 if (S.Current->isStdFunction()) 1493 return true; 1494 S.FFDiag(Loc, diag::note_constexpr_new_placement) 1495 << /*C++26 feature*/ 1 << E->getSourceRange(); 1496 } else if (NewExpr->getNumPlacementArgs() == 1 && 1497 !OperatorNew->isReservedGlobalPlacementOperator()) { 1498 S.FFDiag(Loc, diag::note_constexpr_new_placement) 1499 << /*Unsupported*/ 0 << E->getSourceRange(); 1500 } else if (!OperatorNew->isReplaceableGlobalAllocationFunction()) { 1501 S.FFDiag(Loc, diag::note_constexpr_new_non_replaceable) 1502 << isa<CXXMethodDecl>(OperatorNew) << OperatorNew; 1503 } 1504 } else { 1505 const auto *DeleteExpr = cast<CXXDeleteExpr>(E); 1506 const FunctionDecl *OperatorDelete = DeleteExpr->getOperatorDelete(); 1507 if (!OperatorDelete->isReplaceableGlobalAllocationFunction()) { 1508 S.FFDiag(Loc, diag::note_constexpr_new_non_replaceable) 1509 << isa<CXXMethodDecl>(OperatorDelete) << OperatorDelete; 1510 } 1511 } 1512 1513 return false; 1514 } 1515 1516 bool handleFixedPointOverflow(InterpState &S, CodePtr OpPC, 1517 const FixedPoint &FP) { 1518 const Expr *E = S.Current->getExpr(OpPC); 1519 if (S.checkingForUndefinedBehavior()) { 1520 S.getASTContext().getDiagnostics().Report( 1521 E->getExprLoc(), diag::warn_fixedpoint_constant_overflow) 1522 << FP.toDiagnosticString(S.getASTContext()) << E->getType(); 1523 } 1524 S.CCEDiag(E, diag::note_constexpr_overflow) 1525 << FP.toDiagnosticString(S.getASTContext()) << E->getType(); 1526 return S.noteUndefinedBehavior(); 1527 } 1528 1529 bool InvalidShuffleVectorIndex(InterpState &S, CodePtr OpPC, uint32_t Index) { 1530 const SourceInfo &Loc = S.Current->getSource(OpPC); 1531 S.FFDiag(Loc, 1532 diag::err_shufflevector_minus_one_is_undefined_behavior_constexpr) 1533 << Index; 1534 return false; 1535 } 1536 1537 bool CheckPointerToIntegralCast(InterpState &S, CodePtr OpPC, 1538 const Pointer &Ptr, unsigned BitWidth) { 1539 if (Ptr.isDummy()) 1540 return false; 1541 1542 const SourceInfo &E = S.Current->getSource(OpPC); 1543 S.CCEDiag(E, diag::note_constexpr_invalid_cast) 1544 << 2 << S.getLangOpts().CPlusPlus << S.Current->getRange(OpPC); 1545 1546 if (Ptr.isBlockPointer() && !Ptr.isZero()) { 1547 // Only allow based lvalue casts if they are lossless. 1548 if (S.getASTContext().getTargetInfo().getPointerWidth(LangAS::Default) != 1549 BitWidth) 1550 return Invalid(S, OpPC); 1551 } 1552 return true; 1553 } 1554 1555 bool CastPointerIntegralAP(InterpState &S, CodePtr OpPC, uint32_t BitWidth) { 1556 const Pointer &Ptr = S.Stk.pop<Pointer>(); 1557 1558 if (!CheckPointerToIntegralCast(S, OpPC, Ptr, BitWidth)) 1559 return false; 1560 1561 S.Stk.push<IntegralAP<false>>( 1562 IntegralAP<false>::from(Ptr.getIntegerRepresentation(), BitWidth)); 1563 return true; 1564 } 1565 1566 bool CastPointerIntegralAPS(InterpState &S, CodePtr OpPC, uint32_t BitWidth) { 1567 const Pointer &Ptr = S.Stk.pop<Pointer>(); 1568 1569 if (!CheckPointerToIntegralCast(S, OpPC, Ptr, BitWidth)) 1570 return false; 1571 1572 S.Stk.push<IntegralAP<true>>( 1573 IntegralAP<true>::from(Ptr.getIntegerRepresentation(), BitWidth)); 1574 return true; 1575 } 1576 1577 // https://github.com/llvm/llvm-project/issues/102513 1578 #if defined(_WIN32) && !defined(__clang__) && !defined(NDEBUG) 1579 #pragma optimize("", off) 1580 #endif 1581 bool Interpret(InterpState &S, APValue &Result) { 1582 // The current stack frame when we started Interpret(). 1583 // This is being used by the ops to determine wheter 1584 // to return from this function and thus terminate 1585 // interpretation. 1586 const InterpFrame *StartFrame = S.Current; 1587 assert(!S.Current->isRoot()); 1588 CodePtr PC = S.Current->getPC(); 1589 1590 // Empty program. 1591 if (!PC) 1592 return true; 1593 1594 for (;;) { 1595 auto Op = PC.read<Opcode>(); 1596 CodePtr OpPC = PC; 1597 1598 switch (Op) { 1599 #define GET_INTERP 1600 #include "Opcodes.inc" 1601 #undef GET_INTERP 1602 } 1603 } 1604 } 1605 // https://github.com/llvm/llvm-project/issues/102513 1606 #if defined(_WIN32) && !defined(__clang__) && !defined(NDEBUG) 1607 #pragma optimize("", on) 1608 #endif 1609 1610 } // namespace interp 1611 } // namespace clang 1612