1 //===------- Interp.cpp - Interpreter for the constexpr VM ------*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 9 #include "Interp.h" 10 #include "Function.h" 11 #include "InterpFrame.h" 12 #include "InterpShared.h" 13 #include "InterpStack.h" 14 #include "Opcode.h" 15 #include "PrimType.h" 16 #include "Program.h" 17 #include "State.h" 18 #include "clang/AST/ASTContext.h" 19 #include "clang/AST/CXXInheritance.h" 20 #include "clang/AST/DeclObjC.h" 21 #include "clang/AST/Expr.h" 22 #include "clang/AST/ExprCXX.h" 23 #include "clang/Basic/DiagnosticSema.h" 24 #include "clang/Basic/TargetInfo.h" 25 #include "llvm/ADT/StringExtras.h" 26 27 using namespace clang; 28 using namespace clang::interp; 29 30 static bool RetValue(InterpState &S, CodePtr &Pt) { 31 llvm::report_fatal_error("Interpreter cannot return values"); 32 } 33 34 //===----------------------------------------------------------------------===// 35 // Jmp, Jt, Jf 36 //===----------------------------------------------------------------------===// 37 38 static bool Jmp(InterpState &S, CodePtr &PC, int32_t Offset) { 39 PC += Offset; 40 return true; 41 } 42 43 static bool Jt(InterpState &S, CodePtr &PC, int32_t Offset) { 44 if (S.Stk.pop<bool>()) { 45 PC += Offset; 46 } 47 return true; 48 } 49 50 static bool Jf(InterpState &S, CodePtr &PC, int32_t Offset) { 51 if (!S.Stk.pop<bool>()) { 52 PC += Offset; 53 } 54 return true; 55 } 56 57 static void diagnoseMissingInitializer(InterpState &S, CodePtr OpPC, 58 const ValueDecl *VD) { 59 const SourceInfo &E = S.Current->getSource(OpPC); 60 S.FFDiag(E, diag::note_constexpr_var_init_unknown, 1) << VD; 61 S.Note(VD->getLocation(), diag::note_declared_at) << VD->getSourceRange(); 62 } 63 64 static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC, 65 const ValueDecl *VD); 66 static bool diagnoseUnknownDecl(InterpState &S, CodePtr OpPC, 67 const ValueDecl *D) { 68 const SourceInfo &E = S.Current->getSource(OpPC); 69 70 if (isa<ParmVarDecl>(D)) { 71 if (D->getType()->isReferenceType()) 72 return false; 73 74 if (S.getLangOpts().CPlusPlus11) { 75 S.FFDiag(E, diag::note_constexpr_function_param_value_unknown) << D; 76 S.Note(D->getLocation(), diag::note_declared_at) << D->getSourceRange(); 77 } else { 78 S.FFDiag(E); 79 } 80 return false; 81 } 82 83 if (!D->getType().isConstQualified()) { 84 diagnoseNonConstVariable(S, OpPC, D); 85 } else if (const auto *VD = dyn_cast<VarDecl>(D)) { 86 if (!VD->getAnyInitializer()) { 87 diagnoseMissingInitializer(S, OpPC, VD); 88 } else { 89 const SourceInfo &Loc = S.Current->getSource(OpPC); 90 S.FFDiag(Loc, diag::note_constexpr_var_init_non_constant, 1) << VD; 91 S.Note(VD->getLocation(), diag::note_declared_at); 92 } 93 } 94 95 return false; 96 } 97 98 static void diagnoseNonConstVariable(InterpState &S, CodePtr OpPC, 99 const ValueDecl *VD) { 100 const SourceInfo &Loc = S.Current->getSource(OpPC); 101 if (!S.getLangOpts().CPlusPlus) { 102 S.FFDiag(Loc); 103 return; 104 } 105 106 if (const auto *VarD = dyn_cast<VarDecl>(VD); 107 VarD && VarD->getType().isConstQualified() && 108 !VarD->getAnyInitializer()) { 109 diagnoseMissingInitializer(S, OpPC, VD); 110 return; 111 } 112 113 // Rather random, but this is to match the diagnostic output of the current 114 // interpreter. 115 if (isa<ObjCIvarDecl>(VD)) 116 return; 117 118 if (VD->getType()->isIntegralOrEnumerationType()) { 119 S.FFDiag(Loc, diag::note_constexpr_ltor_non_const_int, 1) << VD; 120 S.Note(VD->getLocation(), diag::note_declared_at); 121 return; 122 } 123 124 S.FFDiag(Loc, 125 S.getLangOpts().CPlusPlus11 ? diag::note_constexpr_ltor_non_constexpr 126 : diag::note_constexpr_ltor_non_integral, 127 1) 128 << VD << VD->getType(); 129 S.Note(VD->getLocation(), diag::note_declared_at); 130 } 131 132 static bool CheckActive(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 133 AccessKinds AK) { 134 if (Ptr.isActive()) 135 return true; 136 137 assert(Ptr.inUnion()); 138 assert(Ptr.isField() && Ptr.getField()); 139 140 Pointer U = Ptr.getBase(); 141 Pointer C = Ptr; 142 while (!U.isRoot() && U.inUnion() && !U.isActive()) { 143 if (U.getField()) 144 C = U; 145 U = U.getBase(); 146 } 147 assert(C.isField()); 148 149 // Get the inactive field descriptor. 150 const FieldDecl *InactiveField = C.getField(); 151 assert(InactiveField); 152 153 // Consider: 154 // union U { 155 // struct { 156 // int x; 157 // int y; 158 // } a; 159 // } 160 // 161 // When activating x, we will also activate a. If we now try to read 162 // from y, we will get to CheckActive, because y is not active. In that 163 // case, our U will be a (not a union). We return here and let later code 164 // handle this. 165 if (!U.getFieldDesc()->isUnion()) 166 return true; 167 168 // Find the active field of the union. 169 const Record *R = U.getRecord(); 170 assert(R && R->isUnion() && "Not a union"); 171 172 const FieldDecl *ActiveField = nullptr; 173 for (const Record::Field &F : R->fields()) { 174 const Pointer &Field = U.atField(F.Offset); 175 if (Field.isActive()) { 176 ActiveField = Field.getField(); 177 break; 178 } 179 } 180 181 const SourceInfo &Loc = S.Current->getSource(OpPC); 182 S.FFDiag(Loc, diag::note_constexpr_access_inactive_union_member) 183 << AK << InactiveField << !ActiveField << ActiveField; 184 return false; 185 } 186 187 static bool CheckTemporary(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 188 AccessKinds AK) { 189 if (auto ID = Ptr.getDeclID()) { 190 if (!Ptr.isStaticTemporary()) 191 return true; 192 193 const auto *MTE = dyn_cast_if_present<MaterializeTemporaryExpr>( 194 Ptr.getDeclDesc()->asExpr()); 195 if (!MTE) 196 return true; 197 198 // FIXME(perf): Since we do this check on every Load from a static 199 // temporary, it might make sense to cache the value of the 200 // isUsableInConstantExpressions call. 201 if (!MTE->isUsableInConstantExpressions(S.getASTContext()) && 202 Ptr.block()->getEvalID() != S.Ctx.getEvalID()) { 203 const SourceInfo &E = S.Current->getSource(OpPC); 204 S.FFDiag(E, diag::note_constexpr_access_static_temporary, 1) << AK; 205 S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here); 206 return false; 207 } 208 } 209 return true; 210 } 211 212 static bool CheckGlobal(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 213 if (auto ID = Ptr.getDeclID()) { 214 if (!Ptr.isStatic()) 215 return true; 216 217 if (S.P.getCurrentDecl() == ID) 218 return true; 219 220 S.FFDiag(S.Current->getLocation(OpPC), diag::note_constexpr_modify_global); 221 return false; 222 } 223 return true; 224 } 225 226 namespace clang { 227 namespace interp { 228 static void popArg(InterpState &S, const Expr *Arg) { 229 PrimType Ty = S.getContext().classify(Arg).value_or(PT_Ptr); 230 TYPE_SWITCH(Ty, S.Stk.discard<T>()); 231 } 232 233 void cleanupAfterFunctionCall(InterpState &S, CodePtr OpPC, 234 const Function *Func) { 235 assert(S.Current); 236 assert(Func); 237 238 if (Func->isUnevaluatedBuiltin()) 239 return; 240 241 // Some builtin functions require us to only look at the call site, since 242 // the classified parameter types do not match. 243 if (unsigned BID = Func->getBuiltinID(); 244 BID && S.getASTContext().BuiltinInfo.hasCustomTypechecking(BID)) { 245 const auto *CE = 246 cast<CallExpr>(S.Current->Caller->getExpr(S.Current->getRetPC())); 247 for (int32_t I = CE->getNumArgs() - 1; I >= 0; --I) { 248 const Expr *A = CE->getArg(I); 249 popArg(S, A); 250 } 251 return; 252 } 253 254 if (S.Current->Caller && Func->isVariadic()) { 255 // CallExpr we're look for is at the return PC of the current function, i.e. 256 // in the caller. 257 // This code path should be executed very rarely. 258 unsigned NumVarArgs; 259 const Expr *const *Args = nullptr; 260 unsigned NumArgs = 0; 261 const Expr *CallSite = S.Current->Caller->getExpr(S.Current->getRetPC()); 262 if (const auto *CE = dyn_cast<CallExpr>(CallSite)) { 263 Args = CE->getArgs(); 264 NumArgs = CE->getNumArgs(); 265 } else if (const auto *CE = dyn_cast<CXXConstructExpr>(CallSite)) { 266 Args = CE->getArgs(); 267 NumArgs = CE->getNumArgs(); 268 } else 269 assert(false && "Can't get arguments from that expression type"); 270 271 assert(NumArgs >= Func->getNumWrittenParams()); 272 NumVarArgs = NumArgs - (Func->getNumWrittenParams() + 273 isa<CXXOperatorCallExpr>(CallSite)); 274 for (unsigned I = 0; I != NumVarArgs; ++I) { 275 const Expr *A = Args[NumArgs - 1 - I]; 276 popArg(S, A); 277 } 278 } 279 280 // And in any case, remove the fixed parameters (the non-variadic ones) 281 // at the end. 282 for (PrimType Ty : Func->args_reverse()) 283 TYPE_SWITCH(Ty, S.Stk.discard<T>()); 284 } 285 286 bool CheckExtern(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 287 if (!Ptr.isExtern()) 288 return true; 289 290 if (Ptr.isInitialized() || 291 (Ptr.getDeclDesc()->asVarDecl() == S.EvaluatingDecl)) 292 return true; 293 294 if (!S.checkingPotentialConstantExpression() && S.getLangOpts().CPlusPlus) { 295 const auto *VD = Ptr.getDeclDesc()->asValueDecl(); 296 diagnoseNonConstVariable(S, OpPC, VD); 297 } 298 return false; 299 } 300 301 bool CheckArray(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 302 if (!Ptr.isUnknownSizeArray()) 303 return true; 304 const SourceInfo &E = S.Current->getSource(OpPC); 305 S.FFDiag(E, diag::note_constexpr_unsized_array_indexed); 306 return false; 307 } 308 309 bool CheckLive(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 310 AccessKinds AK) { 311 if (Ptr.isZero()) { 312 const auto &Src = S.Current->getSource(OpPC); 313 314 if (Ptr.isField()) 315 S.FFDiag(Src, diag::note_constexpr_null_subobject) << CSK_Field; 316 else 317 S.FFDiag(Src, diag::note_constexpr_access_null) << AK; 318 319 return false; 320 } 321 322 if (!Ptr.isLive()) { 323 const auto &Src = S.Current->getSource(OpPC); 324 325 if (Ptr.isDynamic()) { 326 S.FFDiag(Src, diag::note_constexpr_access_deleted_object) << AK; 327 } else if (!S.checkingPotentialConstantExpression()) { 328 bool IsTemp = Ptr.isTemporary(); 329 S.FFDiag(Src, diag::note_constexpr_lifetime_ended, 1) << AK << !IsTemp; 330 331 if (IsTemp) 332 S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here); 333 else 334 S.Note(Ptr.getDeclLoc(), diag::note_declared_at); 335 } 336 337 return false; 338 } 339 340 return true; 341 } 342 343 bool CheckConstant(InterpState &S, CodePtr OpPC, const Descriptor *Desc) { 344 assert(Desc); 345 346 const auto *D = Desc->asVarDecl(); 347 if (!D || !D->hasGlobalStorage()) 348 return true; 349 350 if (D == S.EvaluatingDecl) 351 return true; 352 353 if (D->isConstexpr()) 354 return true; 355 356 // If we're evaluating the initializer for a constexpr variable in C23, we may 357 // only read other contexpr variables. Abort here since this one isn't 358 // constexpr. 359 if (const auto *VD = dyn_cast_if_present<VarDecl>(S.EvaluatingDecl); 360 VD && VD->isConstexpr() && S.getLangOpts().C23) 361 return Invalid(S, OpPC); 362 363 QualType T = D->getType(); 364 bool IsConstant = T.isConstant(S.getASTContext()); 365 if (T->isIntegralOrEnumerationType()) { 366 if (!IsConstant) { 367 diagnoseNonConstVariable(S, OpPC, D); 368 return false; 369 } 370 return true; 371 } 372 373 if (IsConstant) { 374 if (S.getLangOpts().CPlusPlus) { 375 S.CCEDiag(S.Current->getLocation(OpPC), 376 S.getLangOpts().CPlusPlus11 377 ? diag::note_constexpr_ltor_non_constexpr 378 : diag::note_constexpr_ltor_non_integral, 379 1) 380 << D << T; 381 S.Note(D->getLocation(), diag::note_declared_at); 382 } else { 383 S.CCEDiag(S.Current->getLocation(OpPC)); 384 } 385 return true; 386 } 387 388 if (T->isPointerOrReferenceType()) { 389 if (!T->getPointeeType().isConstant(S.getASTContext()) || 390 !S.getLangOpts().CPlusPlus11) { 391 diagnoseNonConstVariable(S, OpPC, D); 392 return false; 393 } 394 return true; 395 } 396 397 diagnoseNonConstVariable(S, OpPC, D); 398 return false; 399 } 400 401 static bool CheckConstant(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 402 if (!Ptr.isStatic() || !Ptr.isBlockPointer()) 403 return true; 404 return CheckConstant(S, OpPC, Ptr.getDeclDesc()); 405 } 406 407 bool CheckNull(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 408 CheckSubobjectKind CSK) { 409 if (!Ptr.isZero()) 410 return true; 411 const SourceInfo &Loc = S.Current->getSource(OpPC); 412 S.FFDiag(Loc, diag::note_constexpr_null_subobject) 413 << CSK << S.Current->getRange(OpPC); 414 415 return false; 416 } 417 418 bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 419 AccessKinds AK) { 420 if (!Ptr.isOnePastEnd()) 421 return true; 422 if (S.getLangOpts().CPlusPlus) { 423 const SourceInfo &Loc = S.Current->getSource(OpPC); 424 S.FFDiag(Loc, diag::note_constexpr_access_past_end) 425 << AK << S.Current->getRange(OpPC); 426 } 427 return false; 428 } 429 430 bool CheckRange(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 431 CheckSubobjectKind CSK) { 432 if (!Ptr.isElementPastEnd()) 433 return true; 434 const SourceInfo &Loc = S.Current->getSource(OpPC); 435 S.FFDiag(Loc, diag::note_constexpr_past_end_subobject) 436 << CSK << S.Current->getRange(OpPC); 437 return false; 438 } 439 440 bool CheckSubobject(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 441 CheckSubobjectKind CSK) { 442 if (!Ptr.isOnePastEnd()) 443 return true; 444 445 const SourceInfo &Loc = S.Current->getSource(OpPC); 446 S.FFDiag(Loc, diag::note_constexpr_past_end_subobject) 447 << CSK << S.Current->getRange(OpPC); 448 return false; 449 } 450 451 bool CheckDowncast(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 452 uint32_t Offset) { 453 uint32_t MinOffset = Ptr.getDeclDesc()->getMetadataSize(); 454 uint32_t PtrOffset = Ptr.getByteOffset(); 455 456 // We subtract Offset from PtrOffset. The result must be at least 457 // MinOffset. 458 if (Offset < PtrOffset && (PtrOffset - Offset) >= MinOffset) 459 return true; 460 461 const auto *E = cast<CastExpr>(S.Current->getExpr(OpPC)); 462 QualType TargetQT = E->getType()->getPointeeType(); 463 QualType MostDerivedQT = Ptr.getDeclPtr().getType(); 464 465 S.CCEDiag(E, diag::note_constexpr_invalid_downcast) 466 << MostDerivedQT << TargetQT; 467 468 return false; 469 } 470 471 bool CheckConst(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 472 assert(Ptr.isLive() && "Pointer is not live"); 473 if (!Ptr.isConst() || Ptr.isMutable()) 474 return true; 475 476 // The This pointer is writable in constructors and destructors, 477 // even if isConst() returns true. 478 // TODO(perf): We could be hitting this code path quite a lot in complex 479 // constructors. Is there a better way to do this? 480 if (S.Current->getFunction()) { 481 for (const InterpFrame *Frame = S.Current; Frame; Frame = Frame->Caller) { 482 if (const Function *Func = Frame->getFunction(); 483 Func && (Func->isConstructor() || Func->isDestructor()) && 484 Ptr.block() == Frame->getThis().block()) { 485 return true; 486 } 487 } 488 } 489 490 if (!Ptr.isBlockPointer()) 491 return false; 492 493 const QualType Ty = Ptr.getType(); 494 const SourceInfo &Loc = S.Current->getSource(OpPC); 495 S.FFDiag(Loc, diag::note_constexpr_modify_const_type) << Ty; 496 return false; 497 } 498 499 bool CheckMutable(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 500 assert(Ptr.isLive() && "Pointer is not live"); 501 if (!Ptr.isMutable()) 502 return true; 503 504 // In C++14 onwards, it is permitted to read a mutable member whose 505 // lifetime began within the evaluation. 506 if (S.getLangOpts().CPlusPlus14 && 507 Ptr.block()->getEvalID() == S.Ctx.getEvalID()) 508 return true; 509 510 const SourceInfo &Loc = S.Current->getSource(OpPC); 511 const FieldDecl *Field = Ptr.getField(); 512 S.FFDiag(Loc, diag::note_constexpr_access_mutable, 1) << AK_Read << Field; 513 S.Note(Field->getLocation(), diag::note_declared_at); 514 return false; 515 } 516 517 static bool CheckVolatile(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 518 AccessKinds AK) { 519 assert(Ptr.isLive()); 520 521 // FIXME: This check here might be kinda expensive. Maybe it would be better 522 // to have another field in InlineDescriptor for this? 523 if (!Ptr.isBlockPointer()) 524 return true; 525 526 QualType PtrType = Ptr.getType(); 527 if (!PtrType.isVolatileQualified()) 528 return true; 529 530 const SourceInfo &Loc = S.Current->getSource(OpPC); 531 if (S.getLangOpts().CPlusPlus) 532 S.FFDiag(Loc, diag::note_constexpr_access_volatile_type) << AK << PtrType; 533 else 534 S.FFDiag(Loc); 535 return false; 536 } 537 538 bool CheckInitialized(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 539 AccessKinds AK) { 540 assert(Ptr.isLive()); 541 542 if (Ptr.isInitialized()) 543 return true; 544 545 if (const auto *VD = Ptr.getDeclDesc()->asVarDecl(); 546 VD && (VD->isConstexpr() || VD->hasGlobalStorage())) { 547 const SourceInfo &Loc = S.Current->getSource(OpPC); 548 if (VD->getAnyInitializer()) { 549 S.FFDiag(Loc, diag::note_constexpr_var_init_non_constant, 1) << VD; 550 S.Note(VD->getLocation(), diag::note_declared_at); 551 } else { 552 diagnoseMissingInitializer(S, OpPC, VD); 553 } 554 return false; 555 } 556 557 if (!S.checkingPotentialConstantExpression()) { 558 S.FFDiag(S.Current->getSource(OpPC), diag::note_constexpr_access_uninit) 559 << AK << /*uninitialized=*/true << S.Current->getRange(OpPC); 560 } 561 return false; 562 } 563 564 bool CheckGlobalInitialized(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 565 if (Ptr.isInitialized()) 566 return true; 567 568 assert(S.getLangOpts().CPlusPlus); 569 const auto *VD = cast<VarDecl>(Ptr.getDeclDesc()->asValueDecl()); 570 if ((!VD->hasConstantInitialization() && 571 VD->mightBeUsableInConstantExpressions(S.getASTContext())) || 572 (S.getLangOpts().OpenCL && !S.getLangOpts().CPlusPlus11 && 573 !VD->hasICEInitializer(S.getASTContext()))) { 574 const SourceInfo &Loc = S.Current->getSource(OpPC); 575 S.FFDiag(Loc, diag::note_constexpr_var_init_non_constant, 1) << VD; 576 S.Note(VD->getLocation(), diag::note_declared_at); 577 } 578 return false; 579 } 580 581 static bool CheckWeak(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 582 if (!Ptr.isWeak()) 583 return true; 584 585 const auto *VD = Ptr.getDeclDesc()->asVarDecl(); 586 assert(VD); 587 S.FFDiag(S.Current->getLocation(OpPC), diag::note_constexpr_var_init_weak) 588 << VD; 589 S.Note(VD->getLocation(), diag::note_declared_at); 590 591 return false; 592 } 593 594 bool CheckLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 595 AccessKinds AK) { 596 if (!CheckLive(S, OpPC, Ptr, AK)) 597 return false; 598 if (!CheckConstant(S, OpPC, Ptr)) 599 return false; 600 if (!CheckDummy(S, OpPC, Ptr, AK)) 601 return false; 602 if (!CheckExtern(S, OpPC, Ptr)) 603 return false; 604 if (!CheckRange(S, OpPC, Ptr, AK)) 605 return false; 606 if (!CheckActive(S, OpPC, Ptr, AK)) 607 return false; 608 if (!CheckInitialized(S, OpPC, Ptr, AK)) 609 return false; 610 if (!CheckTemporary(S, OpPC, Ptr, AK)) 611 return false; 612 if (!CheckWeak(S, OpPC, Ptr)) 613 return false; 614 if (!CheckMutable(S, OpPC, Ptr)) 615 return false; 616 if (!CheckVolatile(S, OpPC, Ptr, AK)) 617 return false; 618 return true; 619 } 620 621 /// This is not used by any of the opcodes directly. It's used by 622 /// EvalEmitter to do the final lvalue-to-rvalue conversion. 623 bool CheckFinalLoad(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 624 if (!CheckLive(S, OpPC, Ptr, AK_Read)) 625 return false; 626 if (!CheckConstant(S, OpPC, Ptr)) 627 return false; 628 629 if (!CheckDummy(S, OpPC, Ptr, AK_Read)) 630 return false; 631 if (!CheckExtern(S, OpPC, Ptr)) 632 return false; 633 if (!CheckRange(S, OpPC, Ptr, AK_Read)) 634 return false; 635 if (!CheckActive(S, OpPC, Ptr, AK_Read)) 636 return false; 637 if (!CheckInitialized(S, OpPC, Ptr, AK_Read)) 638 return false; 639 if (!CheckTemporary(S, OpPC, Ptr, AK_Read)) 640 return false; 641 if (!CheckWeak(S, OpPC, Ptr)) 642 return false; 643 if (!CheckMutable(S, OpPC, Ptr)) 644 return false; 645 return true; 646 } 647 648 bool CheckStore(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 649 if (!CheckLive(S, OpPC, Ptr, AK_Assign)) 650 return false; 651 if (!CheckDummy(S, OpPC, Ptr, AK_Assign)) 652 return false; 653 if (!CheckExtern(S, OpPC, Ptr)) 654 return false; 655 if (!CheckRange(S, OpPC, Ptr, AK_Assign)) 656 return false; 657 if (!CheckGlobal(S, OpPC, Ptr)) 658 return false; 659 if (!CheckConst(S, OpPC, Ptr)) 660 return false; 661 return true; 662 } 663 664 bool CheckInvoke(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 665 if (!CheckLive(S, OpPC, Ptr, AK_MemberCall)) 666 return false; 667 if (!Ptr.isDummy()) { 668 if (!CheckExtern(S, OpPC, Ptr)) 669 return false; 670 if (!CheckRange(S, OpPC, Ptr, AK_MemberCall)) 671 return false; 672 } 673 return true; 674 } 675 676 bool CheckInit(InterpState &S, CodePtr OpPC, const Pointer &Ptr) { 677 if (!CheckLive(S, OpPC, Ptr, AK_Assign)) 678 return false; 679 if (!CheckRange(S, OpPC, Ptr, AK_Assign)) 680 return false; 681 return true; 682 } 683 684 bool CheckCallable(InterpState &S, CodePtr OpPC, const Function *F) { 685 686 if (F->isVirtual() && !S.getLangOpts().CPlusPlus20) { 687 const SourceLocation &Loc = S.Current->getLocation(OpPC); 688 S.CCEDiag(Loc, diag::note_constexpr_virtual_call); 689 return false; 690 } 691 692 if (F->isConstexpr() && F->hasBody() && 693 (F->getDecl()->isConstexpr() || F->getDecl()->hasAttr<MSConstexprAttr>())) 694 return true; 695 696 // Implicitly constexpr. 697 if (F->isLambdaStaticInvoker()) 698 return true; 699 700 const SourceLocation &Loc = S.Current->getLocation(OpPC); 701 if (S.getLangOpts().CPlusPlus11) { 702 const FunctionDecl *DiagDecl = F->getDecl(); 703 704 // Invalid decls have been diagnosed before. 705 if (DiagDecl->isInvalidDecl()) 706 return false; 707 708 // If this function is not constexpr because it is an inherited 709 // non-constexpr constructor, diagnose that directly. 710 const auto *CD = dyn_cast<CXXConstructorDecl>(DiagDecl); 711 if (CD && CD->isInheritingConstructor()) { 712 const auto *Inherited = CD->getInheritedConstructor().getConstructor(); 713 if (!Inherited->isConstexpr()) 714 DiagDecl = CD = Inherited; 715 } 716 717 // FIXME: If DiagDecl is an implicitly-declared special member function 718 // or an inheriting constructor, we should be much more explicit about why 719 // it's not constexpr. 720 if (CD && CD->isInheritingConstructor()) { 721 S.FFDiag(Loc, diag::note_constexpr_invalid_inhctor, 1) 722 << CD->getInheritedConstructor().getConstructor()->getParent(); 723 S.Note(DiagDecl->getLocation(), diag::note_declared_at); 724 } else { 725 // Don't emit anything if the function isn't defined and we're checking 726 // for a constant expression. It might be defined at the point we're 727 // actually calling it. 728 bool IsExtern = DiagDecl->getStorageClass() == SC_Extern; 729 if (!DiagDecl->isDefined() && !IsExtern && DiagDecl->isConstexpr() && 730 S.checkingPotentialConstantExpression()) 731 return false; 732 733 // If the declaration is defined, declared 'constexpr' _and_ has a body, 734 // the below diagnostic doesn't add anything useful. 735 if (DiagDecl->isDefined() && DiagDecl->isConstexpr() && 736 DiagDecl->hasBody()) 737 return false; 738 739 S.FFDiag(Loc, diag::note_constexpr_invalid_function, 1) 740 << DiagDecl->isConstexpr() << (bool)CD << DiagDecl; 741 742 if (DiagDecl->getDefinition()) 743 S.Note(DiagDecl->getDefinition()->getLocation(), 744 diag::note_declared_at); 745 else 746 S.Note(DiagDecl->getLocation(), diag::note_declared_at); 747 } 748 } else { 749 S.FFDiag(Loc, diag::note_invalid_subexpr_in_const_expr); 750 } 751 752 return false; 753 } 754 755 bool CheckCallDepth(InterpState &S, CodePtr OpPC) { 756 if ((S.Current->getDepth() + 1) > S.getLangOpts().ConstexprCallDepth) { 757 S.FFDiag(S.Current->getSource(OpPC), 758 diag::note_constexpr_depth_limit_exceeded) 759 << S.getLangOpts().ConstexprCallDepth; 760 return false; 761 } 762 763 return true; 764 } 765 766 bool CheckThis(InterpState &S, CodePtr OpPC, const Pointer &This) { 767 if (!This.isZero()) 768 return true; 769 770 const SourceInfo &Loc = S.Current->getSource(OpPC); 771 772 bool IsImplicit = false; 773 if (const auto *E = dyn_cast_if_present<CXXThisExpr>(Loc.asExpr())) 774 IsImplicit = E->isImplicit(); 775 776 if (S.getLangOpts().CPlusPlus11) 777 S.FFDiag(Loc, diag::note_constexpr_this) << IsImplicit; 778 else 779 S.FFDiag(Loc); 780 781 return false; 782 } 783 784 bool CheckPure(InterpState &S, CodePtr OpPC, const CXXMethodDecl *MD) { 785 if (!MD->isPureVirtual()) 786 return true; 787 const SourceInfo &E = S.Current->getSource(OpPC); 788 S.FFDiag(E, diag::note_constexpr_pure_virtual_call, 1) << MD; 789 S.Note(MD->getLocation(), diag::note_declared_at); 790 return false; 791 } 792 793 bool CheckFloatResult(InterpState &S, CodePtr OpPC, const Floating &Result, 794 APFloat::opStatus Status, FPOptions FPO) { 795 // [expr.pre]p4: 796 // If during the evaluation of an expression, the result is not 797 // mathematically defined [...], the behavior is undefined. 798 // FIXME: C++ rules require us to not conform to IEEE 754 here. 799 if (Result.isNan()) { 800 const SourceInfo &E = S.Current->getSource(OpPC); 801 S.CCEDiag(E, diag::note_constexpr_float_arithmetic) 802 << /*NaN=*/true << S.Current->getRange(OpPC); 803 return S.noteUndefinedBehavior(); 804 } 805 806 // In a constant context, assume that any dynamic rounding mode or FP 807 // exception state matches the default floating-point environment. 808 if (S.inConstantContext()) 809 return true; 810 811 if ((Status & APFloat::opInexact) && 812 FPO.getRoundingMode() == llvm::RoundingMode::Dynamic) { 813 // Inexact result means that it depends on rounding mode. If the requested 814 // mode is dynamic, the evaluation cannot be made in compile time. 815 const SourceInfo &E = S.Current->getSource(OpPC); 816 S.FFDiag(E, diag::note_constexpr_dynamic_rounding); 817 return false; 818 } 819 820 if ((Status != APFloat::opOK) && 821 (FPO.getRoundingMode() == llvm::RoundingMode::Dynamic || 822 FPO.getExceptionMode() != LangOptions::FPE_Ignore || 823 FPO.getAllowFEnvAccess())) { 824 const SourceInfo &E = S.Current->getSource(OpPC); 825 S.FFDiag(E, diag::note_constexpr_float_arithmetic_strict); 826 return false; 827 } 828 829 if ((Status & APFloat::opStatus::opInvalidOp) && 830 FPO.getExceptionMode() != LangOptions::FPE_Ignore) { 831 const SourceInfo &E = S.Current->getSource(OpPC); 832 // There is no usefully definable result. 833 S.FFDiag(E); 834 return false; 835 } 836 837 return true; 838 } 839 840 bool CheckDynamicMemoryAllocation(InterpState &S, CodePtr OpPC) { 841 if (S.getLangOpts().CPlusPlus20) 842 return true; 843 844 const SourceInfo &E = S.Current->getSource(OpPC); 845 S.CCEDiag(E, diag::note_constexpr_new); 846 return true; 847 } 848 849 bool CheckNewDeleteForms(InterpState &S, CodePtr OpPC, 850 DynamicAllocator::Form AllocForm, 851 DynamicAllocator::Form DeleteForm, const Descriptor *D, 852 const Expr *NewExpr) { 853 if (AllocForm == DeleteForm) 854 return true; 855 856 QualType TypeToDiagnose; 857 // We need to shuffle things around a bit here to get a better diagnostic, 858 // because the expression we allocated the block for was of type int*, 859 // but we want to get the array size right. 860 if (D->isArray()) { 861 QualType ElemQT = D->getType()->getPointeeType(); 862 TypeToDiagnose = S.getASTContext().getConstantArrayType( 863 ElemQT, APInt(64, static_cast<uint64_t>(D->getNumElems()), false), 864 nullptr, ArraySizeModifier::Normal, 0); 865 } else 866 TypeToDiagnose = D->getType()->getPointeeType(); 867 868 const SourceInfo &E = S.Current->getSource(OpPC); 869 S.FFDiag(E, diag::note_constexpr_new_delete_mismatch) 870 << static_cast<int>(DeleteForm) << static_cast<int>(AllocForm) 871 << TypeToDiagnose; 872 S.Note(NewExpr->getExprLoc(), diag::note_constexpr_dynamic_alloc_here) 873 << NewExpr->getSourceRange(); 874 return false; 875 } 876 877 bool CheckDeleteSource(InterpState &S, CodePtr OpPC, const Expr *Source, 878 const Pointer &Ptr) { 879 // Regular new type(...) call. 880 if (isa_and_nonnull<CXXNewExpr>(Source)) 881 return true; 882 // operator new. 883 if (const auto *CE = dyn_cast_if_present<CallExpr>(Source); 884 CE && CE->getBuiltinCallee() == Builtin::BI__builtin_operator_new) 885 return true; 886 // std::allocator.allocate() call 887 if (const auto *MCE = dyn_cast_if_present<CXXMemberCallExpr>(Source); 888 MCE && MCE->getMethodDecl()->getIdentifier()->isStr("allocate")) 889 return true; 890 891 // Whatever this is, we didn't heap allocate it. 892 const SourceInfo &Loc = S.Current->getSource(OpPC); 893 S.FFDiag(Loc, diag::note_constexpr_delete_not_heap_alloc) 894 << Ptr.toDiagnosticString(S.getASTContext()); 895 896 if (Ptr.isTemporary()) 897 S.Note(Ptr.getDeclLoc(), diag::note_constexpr_temporary_here); 898 else 899 S.Note(Ptr.getDeclLoc(), diag::note_declared_at); 900 return false; 901 } 902 903 /// We aleady know the given DeclRefExpr is invalid for some reason, 904 /// now figure out why and print appropriate diagnostics. 905 bool CheckDeclRef(InterpState &S, CodePtr OpPC, const DeclRefExpr *DR) { 906 const ValueDecl *D = DR->getDecl(); 907 return diagnoseUnknownDecl(S, OpPC, D); 908 } 909 910 bool CheckDummy(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 911 AccessKinds AK) { 912 if (!Ptr.isDummy()) 913 return true; 914 915 const Descriptor *Desc = Ptr.getDeclDesc(); 916 const ValueDecl *D = Desc->asValueDecl(); 917 if (!D) 918 return false; 919 920 if (AK == AK_Read || AK == AK_Increment || AK == AK_Decrement) 921 return diagnoseUnknownDecl(S, OpPC, D); 922 923 assert(AK == AK_Assign); 924 if (S.getLangOpts().CPlusPlus14) { 925 const SourceInfo &E = S.Current->getSource(OpPC); 926 S.FFDiag(E, diag::note_constexpr_modify_global); 927 } 928 return false; 929 } 930 931 bool CheckNonNullArgs(InterpState &S, CodePtr OpPC, const Function *F, 932 const CallExpr *CE, unsigned ArgSize) { 933 auto Args = llvm::ArrayRef(CE->getArgs(), CE->getNumArgs()); 934 auto NonNullArgs = collectNonNullArgs(F->getDecl(), Args); 935 unsigned Offset = 0; 936 unsigned Index = 0; 937 for (const Expr *Arg : Args) { 938 if (NonNullArgs[Index] && Arg->getType()->isPointerType()) { 939 const Pointer &ArgPtr = S.Stk.peek<Pointer>(ArgSize - Offset); 940 if (ArgPtr.isZero()) { 941 const SourceLocation &Loc = S.Current->getLocation(OpPC); 942 S.CCEDiag(Loc, diag::note_non_null_attribute_failed); 943 return false; 944 } 945 } 946 947 Offset += align(primSize(S.Ctx.classify(Arg).value_or(PT_Ptr))); 948 ++Index; 949 } 950 return true; 951 } 952 953 // FIXME: This is similar to code we already have in Compiler.cpp. 954 // I think it makes sense to instead add the field and base destruction stuff 955 // to the destructor Function itself. Then destroying a record would really 956 // _just_ be calling its destructor. That would also help with the diagnostic 957 // difference when the destructor or a field/base fails. 958 static bool runRecordDestructor(InterpState &S, CodePtr OpPC, 959 const Pointer &BasePtr, 960 const Descriptor *Desc) { 961 assert(Desc->isRecord()); 962 const Record *R = Desc->ElemRecord; 963 assert(R); 964 965 if (Pointer::pointToSameBlock(BasePtr, S.Current->getThis())) { 966 const SourceInfo &Loc = S.Current->getSource(OpPC); 967 S.FFDiag(Loc, diag::note_constexpr_double_destroy); 968 return false; 969 } 970 971 // Destructor of this record. 972 if (const CXXDestructorDecl *Dtor = R->getDestructor(); 973 Dtor && !Dtor->isTrivial()) { 974 const Function *DtorFunc = S.getContext().getOrCreateFunction(Dtor); 975 if (!DtorFunc) 976 return false; 977 978 S.Stk.push<Pointer>(BasePtr); 979 if (!Call(S, OpPC, DtorFunc, 0)) 980 return false; 981 } 982 return true; 983 } 984 985 static bool RunDestructors(InterpState &S, CodePtr OpPC, const Block *B) { 986 assert(B); 987 const Descriptor *Desc = B->getDescriptor(); 988 989 if (Desc->isPrimitive() || Desc->isPrimitiveArray()) 990 return true; 991 992 assert(Desc->isRecord() || Desc->isCompositeArray()); 993 994 if (Desc->isCompositeArray()) { 995 const Descriptor *ElemDesc = Desc->ElemDesc; 996 assert(ElemDesc->isRecord()); 997 998 Pointer RP(const_cast<Block *>(B)); 999 for (unsigned I = 0; I != Desc->getNumElems(); ++I) { 1000 if (!runRecordDestructor(S, OpPC, RP.atIndex(I).narrow(), ElemDesc)) 1001 return false; 1002 } 1003 return true; 1004 } 1005 1006 assert(Desc->isRecord()); 1007 return runRecordDestructor(S, OpPC, Pointer(const_cast<Block *>(B)), Desc); 1008 } 1009 1010 static bool hasVirtualDestructor(QualType T) { 1011 if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl()) 1012 if (const CXXDestructorDecl *DD = RD->getDestructor()) 1013 return DD->isVirtual(); 1014 return false; 1015 } 1016 1017 bool Free(InterpState &S, CodePtr OpPC, bool DeleteIsArrayForm, 1018 bool IsGlobalDelete) { 1019 if (!CheckDynamicMemoryAllocation(S, OpPC)) 1020 return false; 1021 1022 const Expr *Source = nullptr; 1023 const Block *BlockToDelete = nullptr; 1024 { 1025 // Extra scope for this so the block doesn't have this pointer 1026 // pointing to it when we destroy it. 1027 Pointer Ptr = S.Stk.pop<Pointer>(); 1028 1029 // Deleteing nullptr is always fine. 1030 if (Ptr.isZero()) 1031 return true; 1032 1033 // Remove base casts. 1034 QualType InitialType = Ptr.getType(); 1035 while (Ptr.isBaseClass()) 1036 Ptr = Ptr.getBase(); 1037 1038 // For the non-array case, the types must match if the static type 1039 // does not have a virtual destructor. 1040 if (!DeleteIsArrayForm && Ptr.getType() != InitialType && 1041 !hasVirtualDestructor(InitialType)) { 1042 S.FFDiag(S.Current->getSource(OpPC), 1043 diag::note_constexpr_delete_base_nonvirt_dtor) 1044 << InitialType << Ptr.getType(); 1045 return false; 1046 } 1047 1048 if (!Ptr.isRoot() || Ptr.isOnePastEnd() || Ptr.isArrayElement()) { 1049 const SourceInfo &Loc = S.Current->getSource(OpPC); 1050 S.FFDiag(Loc, diag::note_constexpr_delete_subobject) 1051 << Ptr.toDiagnosticString(S.getASTContext()) << Ptr.isOnePastEnd(); 1052 return false; 1053 } 1054 1055 Source = Ptr.getDeclDesc()->asExpr(); 1056 BlockToDelete = Ptr.block(); 1057 1058 if (!CheckDeleteSource(S, OpPC, Source, Ptr)) 1059 return false; 1060 1061 // For a class type with a virtual destructor, the selected operator delete 1062 // is the one looked up when building the destructor. 1063 QualType AllocType = Ptr.getType(); 1064 if (!DeleteIsArrayForm && !IsGlobalDelete) { 1065 auto getVirtualOperatorDelete = [](QualType T) -> const FunctionDecl * { 1066 if (const CXXRecordDecl *RD = T->getAsCXXRecordDecl()) 1067 if (const CXXDestructorDecl *DD = RD->getDestructor()) 1068 return DD->isVirtual() ? DD->getOperatorDelete() : nullptr; 1069 return nullptr; 1070 }; 1071 1072 if (const FunctionDecl *VirtualDelete = 1073 getVirtualOperatorDelete(AllocType); 1074 VirtualDelete && 1075 !VirtualDelete->isReplaceableGlobalAllocationFunction()) { 1076 S.FFDiag(S.Current->getSource(OpPC), 1077 diag::note_constexpr_new_non_replaceable) 1078 << isa<CXXMethodDecl>(VirtualDelete) << VirtualDelete; 1079 return false; 1080 } 1081 } 1082 } 1083 assert(Source); 1084 assert(BlockToDelete); 1085 1086 // Invoke destructors before deallocating the memory. 1087 if (!RunDestructors(S, OpPC, BlockToDelete)) 1088 return false; 1089 1090 DynamicAllocator &Allocator = S.getAllocator(); 1091 const Descriptor *BlockDesc = BlockToDelete->getDescriptor(); 1092 std::optional<DynamicAllocator::Form> AllocForm = 1093 Allocator.getAllocationForm(Source); 1094 1095 if (!Allocator.deallocate(Source, BlockToDelete, S)) { 1096 // Nothing has been deallocated, this must be a double-delete. 1097 const SourceInfo &Loc = S.Current->getSource(OpPC); 1098 S.FFDiag(Loc, diag::note_constexpr_double_delete); 1099 return false; 1100 } 1101 1102 assert(AllocForm); 1103 DynamicAllocator::Form DeleteForm = DeleteIsArrayForm 1104 ? DynamicAllocator::Form::Array 1105 : DynamicAllocator::Form::NonArray; 1106 return CheckNewDeleteForms(S, OpPC, *AllocForm, DeleteForm, BlockDesc, 1107 Source); 1108 } 1109 1110 void diagnoseEnumValue(InterpState &S, CodePtr OpPC, const EnumDecl *ED, 1111 const APSInt &Value) { 1112 llvm::APInt Min; 1113 llvm::APInt Max; 1114 1115 if (S.EvaluatingDecl && !S.EvaluatingDecl->isConstexpr()) 1116 return; 1117 1118 ED->getValueRange(Max, Min); 1119 --Max; 1120 1121 if (ED->getNumNegativeBits() && 1122 (Max.slt(Value.getSExtValue()) || Min.sgt(Value.getSExtValue()))) { 1123 const SourceLocation &Loc = S.Current->getLocation(OpPC); 1124 S.CCEDiag(Loc, diag::note_constexpr_unscoped_enum_out_of_range) 1125 << llvm::toString(Value, 10) << Min.getSExtValue() << Max.getSExtValue() 1126 << ED; 1127 } else if (!ED->getNumNegativeBits() && Max.ult(Value.getZExtValue())) { 1128 const SourceLocation &Loc = S.Current->getLocation(OpPC); 1129 S.CCEDiag(Loc, diag::note_constexpr_unscoped_enum_out_of_range) 1130 << llvm::toString(Value, 10) << Min.getZExtValue() << Max.getZExtValue() 1131 << ED; 1132 } 1133 } 1134 1135 bool CheckLiteralType(InterpState &S, CodePtr OpPC, const Type *T) { 1136 assert(T); 1137 assert(!S.getLangOpts().CPlusPlus23); 1138 1139 // C++1y: A constant initializer for an object o [...] may also invoke 1140 // constexpr constructors for o and its subobjects even if those objects 1141 // are of non-literal class types. 1142 // 1143 // C++11 missed this detail for aggregates, so classes like this: 1144 // struct foo_t { union { int i; volatile int j; } u; }; 1145 // are not (obviously) initializable like so: 1146 // __attribute__((__require_constant_initialization__)) 1147 // static const foo_t x = {{0}}; 1148 // because "i" is a subobject with non-literal initialization (due to the 1149 // volatile member of the union). See: 1150 // http://www.open-std.org/jtc1/sc22/wg21/docs/cwg_active.html#1677 1151 // Therefore, we use the C++1y behavior. 1152 1153 if (S.Current->getFunction() && S.Current->getFunction()->isConstructor() && 1154 S.Current->getThis().getDeclDesc()->asDecl() == S.EvaluatingDecl) { 1155 return true; 1156 } 1157 1158 const Expr *E = S.Current->getExpr(OpPC); 1159 if (S.getLangOpts().CPlusPlus11) 1160 S.FFDiag(E, diag::note_constexpr_nonliteral) << E->getType(); 1161 else 1162 S.FFDiag(E, diag::note_invalid_subexpr_in_const_expr); 1163 return false; 1164 } 1165 1166 static bool getField(InterpState &S, CodePtr OpPC, const Pointer &Ptr, 1167 uint32_t Off) { 1168 if (S.getLangOpts().CPlusPlus && S.inConstantContext() && 1169 !CheckNull(S, OpPC, Ptr, CSK_Field)) 1170 return false; 1171 1172 if (!CheckExtern(S, OpPC, Ptr)) 1173 return false; 1174 if (!CheckRange(S, OpPC, Ptr, CSK_Field)) 1175 return false; 1176 if (!CheckArray(S, OpPC, Ptr)) 1177 return false; 1178 if (!CheckSubobject(S, OpPC, Ptr, CSK_Field)) 1179 return false; 1180 1181 if (Ptr.isIntegralPointer()) { 1182 S.Stk.push<Pointer>(Ptr.asIntPointer().atOffset(S.getASTContext(), Off)); 1183 return true; 1184 } 1185 1186 if (!Ptr.isBlockPointer()) { 1187 // FIXME: The only time we (seem to) get here is when trying to access a 1188 // field of a typeid pointer. In that case, we're supposed to diagnose e.g. 1189 // `typeid(int).name`, but we currently diagnose `&typeid(int)`. 1190 S.FFDiag(S.Current->getSource(OpPC), 1191 diag::note_constexpr_access_unreadable_object) 1192 << AK_Read << Ptr.toDiagnosticString(S.getASTContext()); 1193 return false; 1194 } 1195 1196 if (Off > Ptr.block()->getSize()) 1197 return false; 1198 1199 S.Stk.push<Pointer>(Ptr.atField(Off)); 1200 return true; 1201 } 1202 1203 bool GetPtrField(InterpState &S, CodePtr OpPC, uint32_t Off) { 1204 const auto &Ptr = S.Stk.peek<Pointer>(); 1205 return getField(S, OpPC, Ptr, Off); 1206 } 1207 1208 bool GetPtrFieldPop(InterpState &S, CodePtr OpPC, uint32_t Off) { 1209 const auto &Ptr = S.Stk.pop<Pointer>(); 1210 return getField(S, OpPC, Ptr, Off); 1211 } 1212 1213 static bool checkConstructor(InterpState &S, CodePtr OpPC, const Function *Func, 1214 const Pointer &ThisPtr) { 1215 assert(Func->isConstructor()); 1216 1217 const Descriptor *D = ThisPtr.getFieldDesc(); 1218 1219 // FIXME: I think this case is not 100% correct. E.g. a pointer into a 1220 // subobject of a composite array. 1221 if (!D->ElemRecord) 1222 return true; 1223 1224 if (D->ElemRecord->getNumVirtualBases() == 0) 1225 return true; 1226 1227 S.FFDiag(S.Current->getLocation(OpPC), diag::note_constexpr_virtual_base) 1228 << Func->getParentDecl(); 1229 return false; 1230 } 1231 1232 bool CallVar(InterpState &S, CodePtr OpPC, const Function *Func, 1233 uint32_t VarArgSize) { 1234 if (Func->hasThisPointer()) { 1235 size_t ArgSize = Func->getArgSize() + VarArgSize; 1236 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0); 1237 const Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset); 1238 1239 // If the current function is a lambda static invoker and 1240 // the function we're about to call is a lambda call operator, 1241 // skip the CheckInvoke, since the ThisPtr is a null pointer 1242 // anyway. 1243 if (!(S.Current->getFunction() && 1244 S.Current->getFunction()->isLambdaStaticInvoker() && 1245 Func->isLambdaCallOperator())) { 1246 if (!CheckInvoke(S, OpPC, ThisPtr)) 1247 return false; 1248 } 1249 1250 if (S.checkingPotentialConstantExpression()) 1251 return false; 1252 } 1253 1254 if (!CheckCallable(S, OpPC, Func)) 1255 return false; 1256 1257 if (!CheckCallDepth(S, OpPC)) 1258 return false; 1259 1260 auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC, VarArgSize); 1261 InterpFrame *FrameBefore = S.Current; 1262 S.Current = NewFrame.get(); 1263 1264 // Note that we cannot assert(CallResult.hasValue()) here since 1265 // Ret() above only sets the APValue if the curent frame doesn't 1266 // have a caller set. 1267 if (Interpret(S)) { 1268 NewFrame.release(); // Frame was delete'd already. 1269 assert(S.Current == FrameBefore); 1270 return true; 1271 } 1272 1273 // Interpreting the function failed somehow. Reset to 1274 // previous state. 1275 S.Current = FrameBefore; 1276 return false; 1277 } 1278 1279 bool Call(InterpState &S, CodePtr OpPC, const Function *Func, 1280 uint32_t VarArgSize) { 1281 assert(Func); 1282 auto cleanup = [&]() -> bool { 1283 cleanupAfterFunctionCall(S, OpPC, Func); 1284 return false; 1285 }; 1286 1287 if (Func->hasThisPointer()) { 1288 size_t ArgSize = Func->getArgSize() + VarArgSize; 1289 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0); 1290 1291 const Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset); 1292 1293 // C++23 [expr.const]p5.6 1294 // an invocation of a virtual function ([class.virtual]) for an object whose 1295 // dynamic type is constexpr-unknown; 1296 if (ThisPtr.isDummy() && Func->isVirtual()) 1297 return false; 1298 1299 // If the current function is a lambda static invoker and 1300 // the function we're about to call is a lambda call operator, 1301 // skip the CheckInvoke, since the ThisPtr is a null pointer 1302 // anyway. 1303 if (S.Current->getFunction() && 1304 S.Current->getFunction()->isLambdaStaticInvoker() && 1305 Func->isLambdaCallOperator()) { 1306 assert(ThisPtr.isZero()); 1307 } else { 1308 if (!CheckInvoke(S, OpPC, ThisPtr)) 1309 return cleanup(); 1310 } 1311 1312 if (Func->isConstructor() && !checkConstructor(S, OpPC, Func, ThisPtr)) 1313 return false; 1314 } 1315 1316 if (!CheckCallable(S, OpPC, Func)) 1317 return cleanup(); 1318 1319 // FIXME: The isConstructor() check here is not always right. The current 1320 // constant evaluator is somewhat inconsistent in when it allows a function 1321 // call when checking for a constant expression. 1322 if (Func->hasThisPointer() && S.checkingPotentialConstantExpression() && 1323 !Func->isConstructor()) 1324 return cleanup(); 1325 1326 if (!CheckCallDepth(S, OpPC)) 1327 return cleanup(); 1328 1329 auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC, VarArgSize); 1330 InterpFrame *FrameBefore = S.Current; 1331 S.Current = NewFrame.get(); 1332 1333 InterpStateCCOverride CCOverride(S, Func->getDecl()->isImmediateFunction()); 1334 // Note that we cannot assert(CallResult.hasValue()) here since 1335 // Ret() above only sets the APValue if the curent frame doesn't 1336 // have a caller set. 1337 if (Interpret(S)) { 1338 NewFrame.release(); // Frame was delete'd already. 1339 assert(S.Current == FrameBefore); 1340 return true; 1341 } 1342 1343 // Interpreting the function failed somehow. Reset to 1344 // previous state. 1345 S.Current = FrameBefore; 1346 return false; 1347 } 1348 1349 bool CallVirt(InterpState &S, CodePtr OpPC, const Function *Func, 1350 uint32_t VarArgSize) { 1351 assert(Func->hasThisPointer()); 1352 assert(Func->isVirtual()); 1353 size_t ArgSize = Func->getArgSize() + VarArgSize; 1354 size_t ThisOffset = ArgSize - (Func->hasRVO() ? primSize(PT_Ptr) : 0); 1355 Pointer &ThisPtr = S.Stk.peek<Pointer>(ThisOffset); 1356 1357 const CXXRecordDecl *DynamicDecl = nullptr; 1358 { 1359 Pointer TypePtr = ThisPtr; 1360 while (TypePtr.isBaseClass()) 1361 TypePtr = TypePtr.getBase(); 1362 1363 QualType DynamicType = TypePtr.getType(); 1364 if (DynamicType->isPointerType() || DynamicType->isReferenceType()) 1365 DynamicDecl = DynamicType->getPointeeCXXRecordDecl(); 1366 else 1367 DynamicDecl = DynamicType->getAsCXXRecordDecl(); 1368 } 1369 assert(DynamicDecl); 1370 1371 const auto *StaticDecl = cast<CXXRecordDecl>(Func->getParentDecl()); 1372 const auto *InitialFunction = cast<CXXMethodDecl>(Func->getDecl()); 1373 const CXXMethodDecl *Overrider = S.getContext().getOverridingFunction( 1374 DynamicDecl, StaticDecl, InitialFunction); 1375 1376 if (Overrider != InitialFunction) { 1377 // DR1872: An instantiated virtual constexpr function can't be called in a 1378 // constant expression (prior to C++20). We can still constant-fold such a 1379 // call. 1380 if (!S.getLangOpts().CPlusPlus20 && Overrider->isVirtual()) { 1381 const Expr *E = S.Current->getExpr(OpPC); 1382 S.CCEDiag(E, diag::note_constexpr_virtual_call) << E->getSourceRange(); 1383 } 1384 1385 Func = S.getContext().getOrCreateFunction(Overrider); 1386 1387 const CXXRecordDecl *ThisFieldDecl = 1388 ThisPtr.getFieldDesc()->getType()->getAsCXXRecordDecl(); 1389 if (Func->getParentDecl()->isDerivedFrom(ThisFieldDecl)) { 1390 // If the function we call is further DOWN the hierarchy than the 1391 // FieldDesc of our pointer, just go up the hierarchy of this field 1392 // the furthest we can go. 1393 while (ThisPtr.isBaseClass()) 1394 ThisPtr = ThisPtr.getBase(); 1395 } 1396 } 1397 1398 if (!Call(S, OpPC, Func, VarArgSize)) 1399 return false; 1400 1401 // Covariant return types. The return type of Overrider is a pointer 1402 // or reference to a class type. 1403 if (Overrider != InitialFunction && 1404 Overrider->getReturnType()->isPointerOrReferenceType() && 1405 InitialFunction->getReturnType()->isPointerOrReferenceType()) { 1406 QualType OverriderPointeeType = 1407 Overrider->getReturnType()->getPointeeType(); 1408 QualType InitialPointeeType = 1409 InitialFunction->getReturnType()->getPointeeType(); 1410 // We've called Overrider above, but calling code expects us to return what 1411 // InitialFunction returned. According to the rules for covariant return 1412 // types, what InitialFunction returns needs to be a base class of what 1413 // Overrider returns. So, we need to do an upcast here. 1414 unsigned Offset = S.getContext().collectBaseOffset( 1415 InitialPointeeType->getAsRecordDecl(), 1416 OverriderPointeeType->getAsRecordDecl()); 1417 return GetPtrBasePop(S, OpPC, Offset); 1418 } 1419 1420 return true; 1421 } 1422 1423 bool CallBI(InterpState &S, CodePtr OpPC, const Function *Func, 1424 const CallExpr *CE, uint32_t BuiltinID) { 1425 // A little arbitrary, but the current interpreter allows evaluation 1426 // of builtin functions in this mode, with some exceptions. 1427 if (BuiltinID == Builtin::BI__builtin_operator_new && 1428 S.checkingPotentialConstantExpression()) 1429 return false; 1430 auto NewFrame = std::make_unique<InterpFrame>(S, Func, OpPC); 1431 1432 InterpFrame *FrameBefore = S.Current; 1433 S.Current = NewFrame.get(); 1434 1435 if (InterpretBuiltin(S, OpPC, Func, CE, BuiltinID)) { 1436 // Release ownership of NewFrame to prevent it from being deleted. 1437 NewFrame.release(); // Frame was deleted already. 1438 // Ensure that S.Current is correctly reset to the previous frame. 1439 assert(S.Current == FrameBefore); 1440 return true; 1441 } 1442 1443 // Interpreting the function failed somehow. Reset to 1444 // previous state. 1445 S.Current = FrameBefore; 1446 return false; 1447 } 1448 1449 bool CallPtr(InterpState &S, CodePtr OpPC, uint32_t ArgSize, 1450 const CallExpr *CE) { 1451 const FunctionPointer &FuncPtr = S.Stk.pop<FunctionPointer>(); 1452 1453 const Function *F = FuncPtr.getFunction(); 1454 if (!F) { 1455 const auto *E = cast<CallExpr>(S.Current->getExpr(OpPC)); 1456 S.FFDiag(E, diag::note_constexpr_null_callee) 1457 << const_cast<Expr *>(E->getCallee()) << E->getSourceRange(); 1458 return false; 1459 } 1460 1461 if (!FuncPtr.isValid() || !F->getDecl()) 1462 return Invalid(S, OpPC); 1463 1464 assert(F); 1465 1466 // This happens when the call expression has been cast to 1467 // something else, but we don't support that. 1468 if (S.Ctx.classify(F->getDecl()->getReturnType()) != 1469 S.Ctx.classify(CE->getType())) 1470 return false; 1471 1472 // Check argument nullability state. 1473 if (F->hasNonNullAttr()) { 1474 if (!CheckNonNullArgs(S, OpPC, F, CE, ArgSize)) 1475 return false; 1476 } 1477 1478 assert(ArgSize >= F->getWrittenArgSize()); 1479 uint32_t VarArgSize = ArgSize - F->getWrittenArgSize(); 1480 1481 // We need to do this explicitly here since we don't have the necessary 1482 // information to do it automatically. 1483 if (F->isThisPointerExplicit()) 1484 VarArgSize -= align(primSize(PT_Ptr)); 1485 1486 if (F->isVirtual()) 1487 return CallVirt(S, OpPC, F, VarArgSize); 1488 1489 return Call(S, OpPC, F, VarArgSize); 1490 } 1491 1492 bool CheckNewTypeMismatch(InterpState &S, CodePtr OpPC, const Expr *E, 1493 std::optional<uint64_t> ArraySize) { 1494 const Pointer &Ptr = S.Stk.peek<Pointer>(); 1495 1496 if (!CheckStore(S, OpPC, Ptr)) 1497 return false; 1498 1499 if (!InvalidNewDeleteExpr(S, OpPC, E)) 1500 return false; 1501 1502 const auto *NewExpr = cast<CXXNewExpr>(E); 1503 QualType StorageType = Ptr.getType(); 1504 1505 if ((isa_and_nonnull<CXXNewExpr>(Ptr.getFieldDesc()->asExpr()) || 1506 isa_and_nonnull<CXXMemberCallExpr>(Ptr.getFieldDesc()->asExpr())) && 1507 StorageType->isPointerType()) { 1508 // FIXME: Are there other cases where this is a problem? 1509 StorageType = StorageType->getPointeeType(); 1510 } 1511 1512 const ASTContext &ASTCtx = S.getASTContext(); 1513 QualType AllocType; 1514 if (ArraySize) { 1515 AllocType = ASTCtx.getConstantArrayType( 1516 NewExpr->getAllocatedType(), 1517 APInt(64, static_cast<uint64_t>(*ArraySize), false), nullptr, 1518 ArraySizeModifier::Normal, 0); 1519 } else { 1520 AllocType = NewExpr->getAllocatedType(); 1521 } 1522 1523 unsigned StorageSize = 1; 1524 unsigned AllocSize = 1; 1525 if (const auto *CAT = dyn_cast<ConstantArrayType>(AllocType)) 1526 AllocSize = CAT->getZExtSize(); 1527 if (const auto *CAT = dyn_cast<ConstantArrayType>(StorageType)) 1528 StorageSize = CAT->getZExtSize(); 1529 1530 if (AllocSize > StorageSize || 1531 !ASTCtx.hasSimilarType(ASTCtx.getBaseElementType(AllocType), 1532 ASTCtx.getBaseElementType(StorageType))) { 1533 S.FFDiag(S.Current->getLocation(OpPC), 1534 diag::note_constexpr_placement_new_wrong_type) 1535 << StorageType << AllocType; 1536 return false; 1537 } 1538 1539 // Can't activate fields in a union, unless the direct base is the union. 1540 if (Ptr.inUnion() && !Ptr.isActive() && !Ptr.getBase().getRecord()->isUnion()) 1541 return CheckActive(S, OpPC, Ptr, AK_Construct); 1542 1543 return true; 1544 } 1545 1546 bool InvalidNewDeleteExpr(InterpState &S, CodePtr OpPC, const Expr *E) { 1547 assert(E); 1548 1549 if (S.getLangOpts().CPlusPlus26) 1550 return true; 1551 1552 const auto &Loc = S.Current->getSource(OpPC); 1553 1554 if (const auto *NewExpr = dyn_cast<CXXNewExpr>(E)) { 1555 const FunctionDecl *OperatorNew = NewExpr->getOperatorNew(); 1556 1557 if (!S.getLangOpts().CPlusPlus26 && NewExpr->getNumPlacementArgs() > 0) { 1558 // This is allowed pre-C++26, but only an std function. 1559 if (S.Current->isStdFunction()) 1560 return true; 1561 S.FFDiag(Loc, diag::note_constexpr_new_placement) 1562 << /*C++26 feature*/ 1 << E->getSourceRange(); 1563 } else if (NewExpr->getNumPlacementArgs() == 1 && 1564 !OperatorNew->isReservedGlobalPlacementOperator()) { 1565 S.FFDiag(Loc, diag::note_constexpr_new_placement) 1566 << /*Unsupported*/ 0 << E->getSourceRange(); 1567 } else if (!OperatorNew->isReplaceableGlobalAllocationFunction()) { 1568 S.FFDiag(Loc, diag::note_constexpr_new_non_replaceable) 1569 << isa<CXXMethodDecl>(OperatorNew) << OperatorNew; 1570 } 1571 } else { 1572 const auto *DeleteExpr = cast<CXXDeleteExpr>(E); 1573 const FunctionDecl *OperatorDelete = DeleteExpr->getOperatorDelete(); 1574 if (!OperatorDelete->isReplaceableGlobalAllocationFunction()) { 1575 S.FFDiag(Loc, diag::note_constexpr_new_non_replaceable) 1576 << isa<CXXMethodDecl>(OperatorDelete) << OperatorDelete; 1577 } 1578 } 1579 1580 return false; 1581 } 1582 1583 bool handleFixedPointOverflow(InterpState &S, CodePtr OpPC, 1584 const FixedPoint &FP) { 1585 const Expr *E = S.Current->getExpr(OpPC); 1586 if (S.checkingForUndefinedBehavior()) { 1587 S.getASTContext().getDiagnostics().Report( 1588 E->getExprLoc(), diag::warn_fixedpoint_constant_overflow) 1589 << FP.toDiagnosticString(S.getASTContext()) << E->getType(); 1590 } 1591 S.CCEDiag(E, diag::note_constexpr_overflow) 1592 << FP.toDiagnosticString(S.getASTContext()) << E->getType(); 1593 return S.noteUndefinedBehavior(); 1594 } 1595 1596 bool InvalidShuffleVectorIndex(InterpState &S, CodePtr OpPC, uint32_t Index) { 1597 const SourceInfo &Loc = S.Current->getSource(OpPC); 1598 S.FFDiag(Loc, 1599 diag::err_shufflevector_minus_one_is_undefined_behavior_constexpr) 1600 << Index; 1601 return false; 1602 } 1603 1604 bool CheckPointerToIntegralCast(InterpState &S, CodePtr OpPC, 1605 const Pointer &Ptr, unsigned BitWidth) { 1606 if (Ptr.isDummy()) 1607 return false; 1608 1609 const SourceInfo &E = S.Current->getSource(OpPC); 1610 S.CCEDiag(E, diag::note_constexpr_invalid_cast) 1611 << 2 << S.getLangOpts().CPlusPlus << S.Current->getRange(OpPC); 1612 1613 if (Ptr.isBlockPointer() && !Ptr.isZero()) { 1614 // Only allow based lvalue casts if they are lossless. 1615 if (S.getASTContext().getTargetInfo().getPointerWidth(LangAS::Default) != 1616 BitWidth) 1617 return Invalid(S, OpPC); 1618 } 1619 return true; 1620 } 1621 1622 bool CastPointerIntegralAP(InterpState &S, CodePtr OpPC, uint32_t BitWidth) { 1623 const Pointer &Ptr = S.Stk.pop<Pointer>(); 1624 1625 if (!CheckPointerToIntegralCast(S, OpPC, Ptr, BitWidth)) 1626 return false; 1627 1628 S.Stk.push<IntegralAP<false>>( 1629 IntegralAP<false>::from(Ptr.getIntegerRepresentation(), BitWidth)); 1630 return true; 1631 } 1632 1633 bool CastPointerIntegralAPS(InterpState &S, CodePtr OpPC, uint32_t BitWidth) { 1634 const Pointer &Ptr = S.Stk.pop<Pointer>(); 1635 1636 if (!CheckPointerToIntegralCast(S, OpPC, Ptr, BitWidth)) 1637 return false; 1638 1639 S.Stk.push<IntegralAP<true>>( 1640 IntegralAP<true>::from(Ptr.getIntegerRepresentation(), BitWidth)); 1641 return true; 1642 } 1643 1644 bool CheckBitCast(InterpState &S, CodePtr OpPC, bool HasIndeterminateBits, 1645 bool TargetIsUCharOrByte) { 1646 // This is always fine. 1647 if (!HasIndeterminateBits) 1648 return true; 1649 1650 // Indeterminate bits can only be bitcast to unsigned char or std::byte. 1651 if (TargetIsUCharOrByte) 1652 return true; 1653 1654 const Expr *E = S.Current->getExpr(OpPC); 1655 QualType ExprType = E->getType(); 1656 S.FFDiag(E, diag::note_constexpr_bit_cast_indet_dest) 1657 << ExprType << S.getLangOpts().CharIsSigned << E->getSourceRange(); 1658 return false; 1659 } 1660 1661 bool GetTypeid(InterpState &S, CodePtr OpPC, const Type *TypePtr, 1662 const Type *TypeInfoType) { 1663 S.Stk.push<Pointer>(TypePtr, TypeInfoType); 1664 return true; 1665 } 1666 1667 bool GetTypeidPtr(InterpState &S, CodePtr OpPC, const Type *TypeInfoType) { 1668 const auto &P = S.Stk.pop<Pointer>(); 1669 1670 if (!P.isBlockPointer()) 1671 return false; 1672 1673 S.Stk.push<Pointer>(P.getType().getTypePtr(), TypeInfoType); 1674 return true; 1675 } 1676 1677 bool DiagTypeid(InterpState &S, CodePtr OpPC) { 1678 const auto *E = cast<CXXTypeidExpr>(S.Current->getExpr(OpPC)); 1679 S.CCEDiag(E, diag::note_constexpr_typeid_polymorphic) 1680 << E->getExprOperand()->getType() 1681 << E->getExprOperand()->getSourceRange(); 1682 return false; 1683 } 1684 1685 // https://github.com/llvm/llvm-project/issues/102513 1686 #if defined(_MSC_VER) && !defined(__clang__) && !defined(NDEBUG) 1687 #pragma optimize("", off) 1688 #endif 1689 bool Interpret(InterpState &S) { 1690 // The current stack frame when we started Interpret(). 1691 // This is being used by the ops to determine wheter 1692 // to return from this function and thus terminate 1693 // interpretation. 1694 const InterpFrame *StartFrame = S.Current; 1695 assert(!S.Current->isRoot()); 1696 CodePtr PC = S.Current->getPC(); 1697 1698 // Empty program. 1699 if (!PC) 1700 return true; 1701 1702 for (;;) { 1703 auto Op = PC.read<Opcode>(); 1704 CodePtr OpPC = PC; 1705 1706 switch (Op) { 1707 #define GET_INTERP 1708 #include "Opcodes.inc" 1709 #undef GET_INTERP 1710 } 1711 } 1712 } 1713 // https://github.com/llvm/llvm-project/issues/102513 1714 #if defined(_MSC_VER) && !defined(__clang__) && !defined(NDEBUG) 1715 #pragma optimize("", on) 1716 #endif 1717 1718 } // namespace interp 1719 } // namespace clang 1720