1 //===-- DataflowEnvironment.cpp ---------------------------------*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file defines an Environment class that is used by dataflow analyses 10 // that run over Control-Flow Graphs (CFGs) to keep track of the state of the 11 // program at given program points. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #include "clang/Analysis/FlowSensitive/DataflowEnvironment.h" 16 #include "clang/AST/Decl.h" 17 #include "clang/AST/DeclCXX.h" 18 #include "clang/AST/Type.h" 19 #include "clang/Analysis/FlowSensitive/DataflowLattice.h" 20 #include "clang/Analysis/FlowSensitive/Value.h" 21 #include "llvm/ADT/DenseMap.h" 22 #include "llvm/ADT/DenseSet.h" 23 #include "llvm/ADT/STLExtras.h" 24 #include "llvm/Support/Casting.h" 25 #include "llvm/Support/ErrorHandling.h" 26 #include <cassert> 27 #include <memory> 28 #include <utility> 29 30 namespace clang { 31 namespace dataflow { 32 33 // FIXME: convert these to parameters of the analysis or environment. Current 34 // settings have been experimentaly validated, but only for a particular 35 // analysis. 36 static constexpr int MaxCompositeValueDepth = 3; 37 static constexpr int MaxCompositeValueSize = 1000; 38 39 /// Returns a map consisting of key-value entries that are present in both maps. 40 template <typename K, typename V> 41 llvm::DenseMap<K, V> intersectDenseMaps(const llvm::DenseMap<K, V> &Map1, 42 const llvm::DenseMap<K, V> &Map2) { 43 llvm::DenseMap<K, V> Result; 44 for (auto &Entry : Map1) { 45 auto It = Map2.find(Entry.first); 46 if (It != Map2.end() && Entry.second == It->second) 47 Result.insert({Entry.first, Entry.second}); 48 } 49 return Result; 50 } 51 52 static bool compareDistinctValues(QualType Type, Value &Val1, 53 const Environment &Env1, Value &Val2, 54 const Environment &Env2, 55 Environment::ValueModel &Model) { 56 // Note: Potentially costly, but, for booleans, we could check whether both 57 // can be proven equivalent in their respective environments. 58 59 // FIXME: move the reference/pointers logic from `areEquivalentValues` to here 60 // and implement separate, join/widen specific handling for 61 // reference/pointers. 62 switch (Model.compare(Type, Val1, Env1, Val2, Env2)) { 63 case ComparisonResult::Same: 64 return true; 65 case ComparisonResult::Different: 66 return false; 67 case ComparisonResult::Unknown: 68 switch (Val1.getKind()) { 69 case Value::Kind::Integer: 70 case Value::Kind::Reference: 71 case Value::Kind::Pointer: 72 case Value::Kind::Struct: 73 // FIXME: this choice intentionally introduces unsoundness to allow 74 // for convergence. Once we have widening support for the 75 // reference/pointer and struct built-in models, this should be 76 // `false`. 77 return true; 78 default: 79 return false; 80 } 81 } 82 llvm_unreachable("All cases covered in switch"); 83 } 84 85 /// Attempts to merge distinct values `Val1` and `Val2` in `Env1` and `Env2`, 86 /// respectively, of the same type `Type`. Merging generally produces a single 87 /// value that (soundly) approximates the two inputs, although the actual 88 /// meaning depends on `Model`. 89 static Value *mergeDistinctValues(QualType Type, Value &Val1, 90 const Environment &Env1, Value &Val2, 91 const Environment &Env2, 92 Environment &MergedEnv, 93 Environment::ValueModel &Model) { 94 // Join distinct boolean values preserving information about the constraints 95 // in the respective path conditions. 96 if (isa<BoolValue>(&Val1) && isa<BoolValue>(&Val2)) { 97 // FIXME: Checking both values should be unnecessary, since they should have 98 // a consistent shape. However, right now we can end up with BoolValue's in 99 // integer-typed variables due to our incorrect handling of 100 // boolean-to-integer casts (we just propagate the BoolValue to the result 101 // of the cast). So, a join can encounter an integer in one branch but a 102 // bool in the other. 103 // For example: 104 // ``` 105 // std::optional<bool> o; 106 // int x; 107 // if (o.has_value()) 108 // x = o.value(); 109 // ``` 110 auto *Expr1 = cast<BoolValue>(&Val1); 111 auto *Expr2 = cast<BoolValue>(&Val2); 112 auto &MergedVal = MergedEnv.makeAtomicBoolValue(); 113 MergedEnv.addToFlowCondition(MergedEnv.makeOr( 114 MergedEnv.makeAnd(Env1.getFlowConditionToken(), 115 MergedEnv.makeIff(MergedVal, *Expr1)), 116 MergedEnv.makeAnd(Env2.getFlowConditionToken(), 117 MergedEnv.makeIff(MergedVal, *Expr2)))); 118 return &MergedVal; 119 } 120 121 // FIXME: Consider destroying `MergedValue` immediately if `ValueModel::merge` 122 // returns false to avoid storing unneeded values in `DACtx`. 123 // FIXME: Creating the value based on the type alone creates misshapen values 124 // for lvalues, since the type does not reflect the need for `ReferenceValue`. 125 if (Value *MergedVal = MergedEnv.createValue(Type)) 126 if (Model.merge(Type, Val1, Env1, Val2, Env2, *MergedVal, MergedEnv)) 127 return MergedVal; 128 129 return nullptr; 130 } 131 132 // When widening does not change `Current`, return value will equal `&Prev`. 133 static Value &widenDistinctValues(QualType Type, Value &Prev, 134 const Environment &PrevEnv, Value &Current, 135 Environment &CurrentEnv, 136 Environment::ValueModel &Model) { 137 // Boolean-model widening. 138 if (isa<BoolValue>(&Prev)) { 139 assert(isa<BoolValue>(Current)); 140 // Widen to Top, because we know they are different values. If previous was 141 // already Top, re-use that to (implicitly) indicate that no change occured. 142 if (isa<TopBoolValue>(Prev)) 143 return Prev; 144 return CurrentEnv.makeTopBoolValue(); 145 } 146 147 // FIXME: Add other built-in model widening. 148 149 // Custom-model widening. 150 if (auto *W = Model.widen(Type, Prev, PrevEnv, Current, CurrentEnv)) 151 return *W; 152 153 // Default of widening is a no-op: leave the current value unchanged. 154 return Current; 155 } 156 157 /// Initializes a global storage value. 158 static void insertIfGlobal(const Decl &D, 159 llvm::DenseSet<const VarDecl *> &Vars) { 160 if (auto *V = dyn_cast<VarDecl>(&D)) 161 if (V->hasGlobalStorage()) 162 Vars.insert(V); 163 } 164 165 static void insertIfFunction(const Decl &D, 166 llvm::DenseSet<const FunctionDecl *> &Funcs) { 167 if (auto *FD = dyn_cast<FunctionDecl>(&D)) 168 Funcs.insert(FD); 169 } 170 171 static void 172 getFieldsGlobalsAndFuncs(const Decl &D, 173 llvm::DenseSet<const FieldDecl *> &Fields, 174 llvm::DenseSet<const VarDecl *> &Vars, 175 llvm::DenseSet<const FunctionDecl *> &Funcs) { 176 insertIfGlobal(D, Vars); 177 insertIfFunction(D, Funcs); 178 if (const auto *Decomp = dyn_cast<DecompositionDecl>(&D)) 179 for (const auto *B : Decomp->bindings()) 180 if (auto *ME = dyn_cast_or_null<MemberExpr>(B->getBinding())) 181 // FIXME: should we be using `E->getFoundDecl()`? 182 if (const auto *FD = dyn_cast<FieldDecl>(ME->getMemberDecl())) 183 Fields.insert(FD); 184 } 185 186 /// Traverses `S` and inserts into `Fields`, `Vars` and `Funcs` any fields, 187 /// global variables and functions that are declared in or referenced from 188 /// sub-statements. 189 static void 190 getFieldsGlobalsAndFuncs(const Stmt &S, 191 llvm::DenseSet<const FieldDecl *> &Fields, 192 llvm::DenseSet<const VarDecl *> &Vars, 193 llvm::DenseSet<const FunctionDecl *> &Funcs) { 194 for (auto *Child : S.children()) 195 if (Child != nullptr) 196 getFieldsGlobalsAndFuncs(*Child, Fields, Vars, Funcs); 197 if (const auto *DefaultInit = dyn_cast<CXXDefaultInitExpr>(&S)) 198 getFieldsGlobalsAndFuncs(*DefaultInit->getExpr(), Fields, Vars, Funcs); 199 200 if (auto *DS = dyn_cast<DeclStmt>(&S)) { 201 if (DS->isSingleDecl()) 202 getFieldsGlobalsAndFuncs(*DS->getSingleDecl(), Fields, Vars, Funcs); 203 else 204 for (auto *D : DS->getDeclGroup()) 205 getFieldsGlobalsAndFuncs(*D, Fields, Vars, Funcs); 206 } else if (auto *E = dyn_cast<DeclRefExpr>(&S)) { 207 insertIfGlobal(*E->getDecl(), Vars); 208 insertIfFunction(*E->getDecl(), Funcs); 209 } else if (auto *E = dyn_cast<MemberExpr>(&S)) { 210 // FIXME: should we be using `E->getFoundDecl()`? 211 const ValueDecl *VD = E->getMemberDecl(); 212 insertIfGlobal(*VD, Vars); 213 insertIfFunction(*VD, Funcs); 214 if (const auto *FD = dyn_cast<FieldDecl>(VD)) 215 Fields.insert(FD); 216 } 217 } 218 219 // FIXME: Add support for resetting globals after function calls to enable 220 // the implementation of sound analyses. 221 void Environment::initFieldsGlobalsAndFuncs(const FunctionDecl *FuncDecl) { 222 assert(FuncDecl->getBody() != nullptr); 223 224 llvm::DenseSet<const FieldDecl *> Fields; 225 llvm::DenseSet<const VarDecl *> Vars; 226 llvm::DenseSet<const FunctionDecl *> Funcs; 227 228 // Look for global variable and field references in the 229 // constructor-initializers. 230 if (const auto *CtorDecl = dyn_cast<CXXConstructorDecl>(FuncDecl)) { 231 for (const auto *Init : CtorDecl->inits()) { 232 if (Init->isMemberInitializer()) { 233 Fields.insert(Init->getMember()); 234 } else if (Init->isIndirectMemberInitializer()) { 235 for (const auto *I : Init->getIndirectMember()->chain()) 236 Fields.insert(cast<FieldDecl>(I)); 237 } 238 const Expr *E = Init->getInit(); 239 assert(E != nullptr); 240 getFieldsGlobalsAndFuncs(*E, Fields, Vars, Funcs); 241 } 242 // Add all fields mentioned in default member initializers. 243 for (const FieldDecl *F : CtorDecl->getParent()->fields()) 244 if (const auto *I = F->getInClassInitializer()) 245 getFieldsGlobalsAndFuncs(*I, Fields, Vars, Funcs); 246 } 247 getFieldsGlobalsAndFuncs(*FuncDecl->getBody(), Fields, Vars, Funcs); 248 249 // These have to be added before the lines that follow to ensure that 250 // `create*` work correctly for structs. 251 DACtx->addModeledFields(Fields); 252 253 for (const VarDecl *D : Vars) { 254 if (getStorageLocation(*D) != nullptr) 255 continue; 256 auto &Loc = createStorageLocation(D->getType().getNonReferenceType()); 257 setStorageLocation(*D, Loc); 258 if (auto *Val = createValue(D->getType().getNonReferenceType())) 259 setValue(Loc, *Val); 260 } 261 262 for (const FunctionDecl *FD : Funcs) { 263 if (getStorageLocation(*FD) != nullptr) 264 continue; 265 auto &Loc = createStorageLocation(FD->getType()); 266 setStorageLocation(*FD, Loc); 267 } 268 } 269 270 Environment::Environment(DataflowAnalysisContext &DACtx) 271 : DACtx(&DACtx), 272 FlowConditionToken(&DACtx.arena().makeFlowConditionToken()) {} 273 274 Environment Environment::fork() const { 275 Environment Copy(*this); 276 Copy.FlowConditionToken = &DACtx->forkFlowCondition(*FlowConditionToken); 277 return Copy; 278 } 279 280 Environment::Environment(DataflowAnalysisContext &DACtx, 281 const DeclContext &DeclCtx) 282 : Environment(DACtx) { 283 CallStack.push_back(&DeclCtx); 284 285 if (const auto *FuncDecl = dyn_cast<FunctionDecl>(&DeclCtx)) { 286 assert(FuncDecl->getBody() != nullptr); 287 288 initFieldsGlobalsAndFuncs(FuncDecl); 289 290 for (const auto *ParamDecl : FuncDecl->parameters()) { 291 assert(ParamDecl != nullptr); 292 // References aren't objects, so the reference itself doesn't have a 293 // storage location. Instead, the storage location for a reference refers 294 // directly to an object of the referenced type -- so strip off any 295 // reference from the type. 296 auto &ParamLoc = 297 createStorageLocation(ParamDecl->getType().getNonReferenceType()); 298 setStorageLocation(*ParamDecl, ParamLoc); 299 if (Value *ParamVal = 300 createValue(ParamDecl->getType().getNonReferenceType())) 301 setValue(ParamLoc, *ParamVal); 302 } 303 } 304 305 if (const auto *MethodDecl = dyn_cast<CXXMethodDecl>(&DeclCtx)) { 306 auto *Parent = MethodDecl->getParent(); 307 assert(Parent != nullptr); 308 if (Parent->isLambda()) 309 MethodDecl = dyn_cast<CXXMethodDecl>(Parent->getDeclContext()); 310 311 // FIXME: Initialize the ThisPointeeLoc of lambdas too. 312 if (MethodDecl && !MethodDecl->isStatic()) { 313 QualType ThisPointeeType = MethodDecl->getThisObjectType(); 314 ThisPointeeLoc = &cast<AggregateStorageLocation>( 315 createStorageLocation(ThisPointeeType)); 316 if (Value *ThisPointeeVal = createValue(ThisPointeeType)) 317 setValue(*ThisPointeeLoc, *ThisPointeeVal); 318 } 319 } 320 } 321 322 bool Environment::canDescend(unsigned MaxDepth, 323 const DeclContext *Callee) const { 324 return CallStack.size() <= MaxDepth && !llvm::is_contained(CallStack, Callee); 325 } 326 327 Environment Environment::pushCall(const CallExpr *Call) const { 328 Environment Env(*this); 329 330 if (const auto *MethodCall = dyn_cast<CXXMemberCallExpr>(Call)) { 331 if (const Expr *Arg = MethodCall->getImplicitObjectArgument()) { 332 if (!isa<CXXThisExpr>(Arg)) 333 Env.ThisPointeeLoc = cast<AggregateStorageLocation>( 334 getStorageLocation(*Arg, SkipPast::Reference)); 335 // Otherwise (when the argument is `this`), retain the current 336 // environment's `ThisPointeeLoc`. 337 } 338 } 339 340 Env.pushCallInternal(Call->getDirectCallee(), 341 llvm::ArrayRef(Call->getArgs(), Call->getNumArgs())); 342 343 return Env; 344 } 345 346 Environment Environment::pushCall(const CXXConstructExpr *Call) const { 347 Environment Env(*this); 348 349 Env.ThisPointeeLoc = &cast<AggregateStorageLocation>( 350 Env.createStorageLocation(Call->getType())); 351 if (Value *Val = Env.createValue(Call->getType())) 352 Env.setValue(*Env.ThisPointeeLoc, *Val); 353 354 Env.pushCallInternal(Call->getConstructor(), 355 llvm::ArrayRef(Call->getArgs(), Call->getNumArgs())); 356 357 return Env; 358 } 359 360 void Environment::pushCallInternal(const FunctionDecl *FuncDecl, 361 ArrayRef<const Expr *> Args) { 362 // Canonicalize to the definition of the function. This ensures that we're 363 // putting arguments into the same `ParamVarDecl`s` that the callee will later 364 // be retrieving them from. 365 assert(FuncDecl->getDefinition() != nullptr); 366 FuncDecl = FuncDecl->getDefinition(); 367 368 CallStack.push_back(FuncDecl); 369 370 initFieldsGlobalsAndFuncs(FuncDecl); 371 372 const auto *ParamIt = FuncDecl->param_begin(); 373 374 // FIXME: Parameters don't always map to arguments 1:1; examples include 375 // overloaded operators implemented as member functions, and parameter packs. 376 for (unsigned ArgIndex = 0; ArgIndex < Args.size(); ++ParamIt, ++ArgIndex) { 377 assert(ParamIt != FuncDecl->param_end()); 378 379 const Expr *Arg = Args[ArgIndex]; 380 auto *ArgLoc = getStorageLocation(*Arg, SkipPast::Reference); 381 if (ArgLoc == nullptr) 382 continue; 383 384 const VarDecl *Param = *ParamIt; 385 386 QualType ParamType = Param->getType(); 387 if (ParamType->isReferenceType()) { 388 setStorageLocation(*Param, *ArgLoc); 389 } else { 390 auto &Loc = createStorageLocation(*Param); 391 setStorageLocation(*Param, Loc); 392 393 if (auto *ArgVal = getValue(*ArgLoc)) { 394 setValue(Loc, *ArgVal); 395 } else if (Value *Val = createValue(ParamType)) { 396 setValue(Loc, *Val); 397 } 398 } 399 } 400 } 401 402 void Environment::popCall(const CallExpr *Call, const Environment &CalleeEnv) { 403 // We ignore `DACtx` because it's already the same in both. We don't want the 404 // callee's `DeclCtx`, `ReturnVal`, `ReturnLoc` or `ThisPointeeLoc`. We don't 405 // bring back `DeclToLoc` and `ExprToLoc` because we want to be able to later 406 // analyze the same callee in a different context, and `setStorageLocation` 407 // requires there to not already be a storage location assigned. Conceptually, 408 // these maps capture information from the local scope, so when popping that 409 // scope, we do not propagate the maps. 410 this->LocToVal = std::move(CalleeEnv.LocToVal); 411 this->MemberLocToStruct = std::move(CalleeEnv.MemberLocToStruct); 412 this->FlowConditionToken = std::move(CalleeEnv.FlowConditionToken); 413 414 if (Call->isGLValue()) { 415 if (CalleeEnv.ReturnLoc != nullptr) 416 setStorageLocationStrict(*Call, *CalleeEnv.ReturnLoc); 417 } else if (!Call->getType()->isVoidType()) { 418 if (CalleeEnv.ReturnVal != nullptr) 419 setValueStrict(*Call, *CalleeEnv.ReturnVal); 420 } 421 } 422 423 void Environment::popCall(const CXXConstructExpr *Call, 424 const Environment &CalleeEnv) { 425 // See also comment in `popCall(const CallExpr *, const Environment &)` above. 426 this->LocToVal = std::move(CalleeEnv.LocToVal); 427 this->MemberLocToStruct = std::move(CalleeEnv.MemberLocToStruct); 428 this->FlowConditionToken = std::move(CalleeEnv.FlowConditionToken); 429 430 if (Value *Val = CalleeEnv.getValue(*CalleeEnv.ThisPointeeLoc)) { 431 setValueStrict(*Call, *Val); 432 } 433 } 434 435 bool Environment::equivalentTo(const Environment &Other, 436 Environment::ValueModel &Model) const { 437 assert(DACtx == Other.DACtx); 438 439 if (ReturnVal != Other.ReturnVal) 440 return false; 441 442 if (ReturnLoc != Other.ReturnLoc) 443 return false; 444 445 if (ThisPointeeLoc != Other.ThisPointeeLoc) 446 return false; 447 448 if (DeclToLoc != Other.DeclToLoc) 449 return false; 450 451 if (ExprToLoc != Other.ExprToLoc) 452 return false; 453 454 // Compare the contents for the intersection of their domains. 455 for (auto &Entry : LocToVal) { 456 const StorageLocation *Loc = Entry.first; 457 assert(Loc != nullptr); 458 459 Value *Val = Entry.second; 460 assert(Val != nullptr); 461 462 auto It = Other.LocToVal.find(Loc); 463 if (It == Other.LocToVal.end()) 464 continue; 465 assert(It->second != nullptr); 466 467 if (!areEquivalentValues(*Val, *It->second) && 468 !compareDistinctValues(Loc->getType(), *Val, *this, *It->second, Other, 469 Model)) 470 return false; 471 } 472 473 return true; 474 } 475 476 LatticeJoinEffect Environment::widen(const Environment &PrevEnv, 477 Environment::ValueModel &Model) { 478 assert(DACtx == PrevEnv.DACtx); 479 assert(ReturnVal == PrevEnv.ReturnVal); 480 assert(ReturnLoc == PrevEnv.ReturnLoc); 481 assert(ThisPointeeLoc == PrevEnv.ThisPointeeLoc); 482 assert(CallStack == PrevEnv.CallStack); 483 484 auto Effect = LatticeJoinEffect::Unchanged; 485 486 // By the API, `PrevEnv` is a previous version of the environment for the same 487 // block, so we have some guarantees about its shape. In particular, it will 488 // be the result of a join or widen operation on previous values for this 489 // block. For `DeclToLoc` and `ExprToLoc`, join guarantees that these maps are 490 // subsets of the maps in `PrevEnv`. So, as long as we maintain this property 491 // here, we don't need change their current values to widen. 492 // 493 // FIXME: `MemberLocToStruct` does not share the above property, because 494 // `join` can cause the map size to increase (when we add fresh data in places 495 // of conflict). Once this issue with join is resolved, re-enable the 496 // assertion below or replace with something that captures the desired 497 // invariant. 498 assert(DeclToLoc.size() <= PrevEnv.DeclToLoc.size()); 499 assert(ExprToLoc.size() <= PrevEnv.ExprToLoc.size()); 500 // assert(MemberLocToStruct.size() <= PrevEnv.MemberLocToStruct.size()); 501 502 llvm::DenseMap<const StorageLocation *, Value *> WidenedLocToVal; 503 for (auto &Entry : LocToVal) { 504 const StorageLocation *Loc = Entry.first; 505 assert(Loc != nullptr); 506 507 Value *Val = Entry.second; 508 assert(Val != nullptr); 509 510 auto PrevIt = PrevEnv.LocToVal.find(Loc); 511 if (PrevIt == PrevEnv.LocToVal.end()) 512 continue; 513 assert(PrevIt->second != nullptr); 514 515 if (areEquivalentValues(*Val, *PrevIt->second)) { 516 WidenedLocToVal.insert({Loc, Val}); 517 continue; 518 } 519 520 Value &WidenedVal = widenDistinctValues(Loc->getType(), *PrevIt->second, 521 PrevEnv, *Val, *this, Model); 522 WidenedLocToVal.insert({Loc, &WidenedVal}); 523 if (&WidenedVal != PrevIt->second) 524 Effect = LatticeJoinEffect::Changed; 525 } 526 LocToVal = std::move(WidenedLocToVal); 527 // FIXME: update the equivalence calculation for `MemberLocToStruct`, once we 528 // have a systematic way of soundly comparing this map. 529 if (DeclToLoc.size() != PrevEnv.DeclToLoc.size() || 530 ExprToLoc.size() != PrevEnv.ExprToLoc.size() || 531 LocToVal.size() != PrevEnv.LocToVal.size() || 532 MemberLocToStruct.size() != PrevEnv.MemberLocToStruct.size()) 533 Effect = LatticeJoinEffect::Changed; 534 535 return Effect; 536 } 537 538 Environment Environment::join(const Environment &EnvA, const Environment &EnvB, 539 Environment::ValueModel &Model) { 540 assert(EnvA.DACtx == EnvB.DACtx); 541 assert(EnvA.ThisPointeeLoc == EnvB.ThisPointeeLoc); 542 assert(EnvA.CallStack == EnvB.CallStack); 543 544 Environment JoinedEnv(*EnvA.DACtx); 545 546 JoinedEnv.CallStack = EnvA.CallStack; 547 JoinedEnv.ThisPointeeLoc = EnvA.ThisPointeeLoc; 548 549 if (EnvA.ReturnVal == nullptr || EnvB.ReturnVal == nullptr) { 550 // `ReturnVal` might not always get set -- for example if we have a return 551 // statement of the form `return some_other_func()` and we decide not to 552 // analyze `some_other_func()`. 553 // In this case, we can't say anything about the joined return value -- we 554 // don't simply want to propagate the return value that we do have, because 555 // it might not be the correct one. 556 // This occurs for example in the test `ContextSensitiveMutualRecursion`. 557 JoinedEnv.ReturnVal = nullptr; 558 } else if (areEquivalentValues(*EnvA.ReturnVal, *EnvB.ReturnVal)) { 559 JoinedEnv.ReturnVal = EnvA.ReturnVal; 560 } else { 561 assert(!EnvA.CallStack.empty()); 562 // FIXME: Make `CallStack` a vector of `FunctionDecl` so we don't need this 563 // cast. 564 auto *Func = dyn_cast<FunctionDecl>(EnvA.CallStack.back()); 565 assert(Func != nullptr); 566 if (Value *MergedVal = 567 mergeDistinctValues(Func->getReturnType(), *EnvA.ReturnVal, EnvA, 568 *EnvB.ReturnVal, EnvB, JoinedEnv, Model)) 569 JoinedEnv.ReturnVal = MergedVal; 570 } 571 572 if (EnvA.ReturnLoc == EnvB.ReturnLoc) 573 JoinedEnv.ReturnLoc = EnvA.ReturnLoc; 574 else 575 JoinedEnv.ReturnLoc = nullptr; 576 577 // FIXME: Once we're able to remove declarations from `DeclToLoc` when their 578 // lifetime ends, add an assertion that there aren't any entries in 579 // `DeclToLoc` and `Other.DeclToLoc` that map the same declaration to 580 // different storage locations. 581 JoinedEnv.DeclToLoc = intersectDenseMaps(EnvA.DeclToLoc, EnvB.DeclToLoc); 582 583 JoinedEnv.ExprToLoc = intersectDenseMaps(EnvA.ExprToLoc, EnvB.ExprToLoc); 584 585 JoinedEnv.MemberLocToStruct = 586 intersectDenseMaps(EnvA.MemberLocToStruct, EnvB.MemberLocToStruct); 587 588 // FIXME: update join to detect backedges and simplify the flow condition 589 // accordingly. 590 JoinedEnv.FlowConditionToken = &EnvA.DACtx->joinFlowConditions( 591 *EnvA.FlowConditionToken, *EnvB.FlowConditionToken); 592 593 for (auto &Entry : EnvA.LocToVal) { 594 const StorageLocation *Loc = Entry.first; 595 assert(Loc != nullptr); 596 597 Value *Val = Entry.second; 598 assert(Val != nullptr); 599 600 auto It = EnvB.LocToVal.find(Loc); 601 if (It == EnvB.LocToVal.end()) 602 continue; 603 assert(It->second != nullptr); 604 605 if (areEquivalentValues(*Val, *It->second)) { 606 JoinedEnv.LocToVal.insert({Loc, Val}); 607 continue; 608 } 609 610 if (Value *MergedVal = mergeDistinctValues( 611 Loc->getType(), *Val, EnvA, *It->second, EnvB, JoinedEnv, Model)) { 612 JoinedEnv.LocToVal.insert({Loc, MergedVal}); 613 } 614 } 615 616 return JoinedEnv; 617 } 618 619 StorageLocation &Environment::createStorageLocation(QualType Type) { 620 return DACtx->createStorageLocation(Type); 621 } 622 623 StorageLocation &Environment::createStorageLocation(const VarDecl &D) { 624 // Evaluated declarations are always assigned the same storage locations to 625 // ensure that the environment stabilizes across loop iterations. Storage 626 // locations for evaluated declarations are stored in the analysis context. 627 return DACtx->getStableStorageLocation(D); 628 } 629 630 StorageLocation &Environment::createStorageLocation(const Expr &E) { 631 // Evaluated expressions are always assigned the same storage locations to 632 // ensure that the environment stabilizes across loop iterations. Storage 633 // locations for evaluated expressions are stored in the analysis context. 634 return DACtx->getStableStorageLocation(E); 635 } 636 637 void Environment::setStorageLocation(const ValueDecl &D, StorageLocation &Loc) { 638 assert(!DeclToLoc.contains(&D)); 639 assert(!isa_and_nonnull<ReferenceValue>(getValue(Loc))); 640 DeclToLoc[&D] = &Loc; 641 } 642 643 StorageLocation *Environment::getStorageLocation(const ValueDecl &D) const { 644 auto It = DeclToLoc.find(&D); 645 if (It == DeclToLoc.end()) 646 return nullptr; 647 648 StorageLocation *Loc = It->second; 649 650 assert(!isa_and_nonnull<ReferenceValue>(getValue(*Loc))); 651 652 return Loc; 653 } 654 655 void Environment::setStorageLocation(const Expr &E, StorageLocation &Loc) { 656 const Expr &CanonE = ignoreCFGOmittedNodes(E); 657 assert(!ExprToLoc.contains(&CanonE)); 658 ExprToLoc[&CanonE] = &Loc; 659 } 660 661 void Environment::setStorageLocationStrict(const Expr &E, 662 StorageLocation &Loc) { 663 // `DeclRefExpr`s to builtin function types aren't glvalues, for some reason, 664 // but we still want to be able to associate a `StorageLocation` with them, 665 // so allow these as an exception. 666 assert(E.isGLValue() || 667 E.getType()->isSpecificBuiltinType(BuiltinType::BuiltinFn)); 668 setStorageLocation(E, Loc); 669 } 670 671 StorageLocation *Environment::getStorageLocation(const Expr &E, 672 SkipPast SP) const { 673 // FIXME: Add a test with parens. 674 auto It = ExprToLoc.find(&ignoreCFGOmittedNodes(E)); 675 return It == ExprToLoc.end() ? nullptr : &skip(*It->second, SP); 676 } 677 678 StorageLocation *Environment::getStorageLocationStrict(const Expr &E) const { 679 // See comment in `setStorageLocationStrict()`. 680 assert(E.isGLValue() || 681 E.getType()->isSpecificBuiltinType(BuiltinType::BuiltinFn)); 682 StorageLocation *Loc = getStorageLocation(E, SkipPast::None); 683 684 if (Loc == nullptr) 685 return nullptr; 686 687 if (auto *RefVal = dyn_cast_or_null<ReferenceValue>(getValue(*Loc))) 688 return &RefVal->getReferentLoc(); 689 690 return Loc; 691 } 692 693 AggregateStorageLocation *Environment::getThisPointeeStorageLocation() const { 694 return ThisPointeeLoc; 695 } 696 697 PointerValue &Environment::getOrCreateNullPointerValue(QualType PointeeType) { 698 return DACtx->getOrCreateNullPointerValue(PointeeType); 699 } 700 701 void Environment::setValue(const StorageLocation &Loc, Value &Val) { 702 LocToVal[&Loc] = &Val; 703 704 if (auto *StructVal = dyn_cast<StructValue>(&Val)) { 705 auto &AggregateLoc = *cast<AggregateStorageLocation>(&Loc); 706 707 const QualType Type = AggregateLoc.getType(); 708 assert(Type->isRecordType()); 709 710 for (const FieldDecl *Field : DACtx->getReferencedFields(Type)) { 711 assert(Field != nullptr); 712 StorageLocation &FieldLoc = AggregateLoc.getChild(*Field); 713 MemberLocToStruct[&FieldLoc] = std::make_pair(StructVal, Field); 714 if (auto *FieldVal = StructVal->getChild(*Field)) 715 setValue(FieldLoc, *FieldVal); 716 } 717 } 718 719 auto It = MemberLocToStruct.find(&Loc); 720 if (It != MemberLocToStruct.end()) { 721 // `Loc` is the location of a struct member so we need to also update the 722 // value of the member in the corresponding `StructValue`. 723 724 assert(It->second.first != nullptr); 725 StructValue &StructVal = *It->second.first; 726 727 assert(It->second.second != nullptr); 728 const ValueDecl &Member = *It->second.second; 729 730 StructVal.setChild(Member, Val); 731 } 732 } 733 734 void Environment::clearValue(const StorageLocation &Loc) { 735 LocToVal.erase(&Loc); 736 737 if (auto It = MemberLocToStruct.find(&Loc); It != MemberLocToStruct.end()) { 738 // `Loc` is the location of a struct member so we need to also clear the 739 // member in the corresponding `StructValue`. 740 741 assert(It->second.first != nullptr); 742 StructValue &StructVal = *It->second.first; 743 744 assert(It->second.second != nullptr); 745 const ValueDecl &Member = *It->second.second; 746 747 StructVal.clearChild(Member); 748 } 749 } 750 751 void Environment::setValueStrict(const Expr &E, Value &Val) { 752 assert(E.isPRValue()); 753 assert(!isa<ReferenceValue>(Val)); 754 755 StorageLocation *Loc = getStorageLocation(E, SkipPast::None); 756 if (Loc == nullptr) { 757 Loc = &createStorageLocation(E); 758 setStorageLocation(E, *Loc); 759 } 760 setValue(*Loc, Val); 761 } 762 763 Value *Environment::getValue(const StorageLocation &Loc) const { 764 return LocToVal.lookup(&Loc); 765 } 766 767 Value *Environment::getValue(const ValueDecl &D) const { 768 auto *Loc = getStorageLocation(D); 769 if (Loc == nullptr) 770 return nullptr; 771 return getValue(*Loc); 772 } 773 774 Value *Environment::getValue(const Expr &E, SkipPast SP) const { 775 auto *Loc = getStorageLocation(E, SP); 776 if (Loc == nullptr) 777 return nullptr; 778 return getValue(*Loc); 779 } 780 781 Value *Environment::getValueStrict(const Expr &E) const { 782 assert(E.isPRValue()); 783 Value *Val = getValue(E, SkipPast::None); 784 785 assert(Val == nullptr || !isa<ReferenceValue>(Val)); 786 787 return Val; 788 } 789 790 Value *Environment::createValue(QualType Type) { 791 llvm::DenseSet<QualType> Visited; 792 int CreatedValuesCount = 0; 793 Value *Val = createValueUnlessSelfReferential(Type, Visited, /*Depth=*/0, 794 CreatedValuesCount); 795 if (CreatedValuesCount > MaxCompositeValueSize) { 796 llvm::errs() << "Attempting to initialize a huge value of type: " << Type 797 << '\n'; 798 } 799 return Val; 800 } 801 802 Value *Environment::createValueUnlessSelfReferential( 803 QualType Type, llvm::DenseSet<QualType> &Visited, int Depth, 804 int &CreatedValuesCount) { 805 assert(!Type.isNull()); 806 807 // Allow unlimited fields at depth 1; only cap at deeper nesting levels. 808 if ((Depth > 1 && CreatedValuesCount > MaxCompositeValueSize) || 809 Depth > MaxCompositeValueDepth) 810 return nullptr; 811 812 if (Type->isBooleanType()) { 813 CreatedValuesCount++; 814 return &makeAtomicBoolValue(); 815 } 816 817 if (Type->isIntegerType()) { 818 // FIXME: consider instead `return nullptr`, given that we do nothing useful 819 // with integers, and so distinguishing them serves no purpose, but could 820 // prevent convergence. 821 CreatedValuesCount++; 822 return &DACtx->arena().create<IntegerValue>(); 823 } 824 825 if (Type->isReferenceType() || Type->isPointerType()) { 826 CreatedValuesCount++; 827 QualType PointeeType = Type->getPointeeType(); 828 auto &PointeeLoc = createStorageLocation(PointeeType); 829 830 if (Visited.insert(PointeeType.getCanonicalType()).second) { 831 Value *PointeeVal = createValueUnlessSelfReferential( 832 PointeeType, Visited, Depth, CreatedValuesCount); 833 Visited.erase(PointeeType.getCanonicalType()); 834 835 if (PointeeVal != nullptr) 836 setValue(PointeeLoc, *PointeeVal); 837 } 838 839 if (Type->isReferenceType()) 840 return &DACtx->arena().create<ReferenceValue>(PointeeLoc); 841 else 842 return &DACtx->arena().create<PointerValue>(PointeeLoc); 843 } 844 845 if (Type->isRecordType()) { 846 CreatedValuesCount++; 847 llvm::DenseMap<const ValueDecl *, Value *> FieldValues; 848 for (const FieldDecl *Field : DACtx->getReferencedFields(Type)) { 849 assert(Field != nullptr); 850 851 QualType FieldType = Field->getType(); 852 if (Visited.contains(FieldType.getCanonicalType())) 853 continue; 854 855 Visited.insert(FieldType.getCanonicalType()); 856 if (auto *FieldValue = createValueUnlessSelfReferential( 857 FieldType, Visited, Depth + 1, CreatedValuesCount)) 858 FieldValues.insert({Field, FieldValue}); 859 Visited.erase(FieldType.getCanonicalType()); 860 } 861 862 return &DACtx->arena().create<StructValue>(std::move(FieldValues)); 863 } 864 865 return nullptr; 866 } 867 868 StorageLocation &Environment::skip(StorageLocation &Loc, SkipPast SP) const { 869 switch (SP) { 870 case SkipPast::None: 871 return Loc; 872 case SkipPast::Reference: 873 // References cannot be chained so we only need to skip past one level of 874 // indirection. 875 if (auto *Val = dyn_cast_or_null<ReferenceValue>(getValue(Loc))) 876 return Val->getReferentLoc(); 877 return Loc; 878 } 879 llvm_unreachable("bad SkipPast kind"); 880 } 881 882 const StorageLocation &Environment::skip(const StorageLocation &Loc, 883 SkipPast SP) const { 884 return skip(*const_cast<StorageLocation *>(&Loc), SP); 885 } 886 887 void Environment::addToFlowCondition(BoolValue &Val) { 888 DACtx->addFlowConditionConstraint(*FlowConditionToken, Val); 889 } 890 891 bool Environment::flowConditionImplies(BoolValue &Val) const { 892 return DACtx->flowConditionImplies(*FlowConditionToken, Val); 893 } 894 895 void Environment::dump(raw_ostream &OS) const { 896 // FIXME: add printing for remaining fields and allow caller to decide what 897 // fields are printed. 898 OS << "DeclToLoc:\n"; 899 for (auto [D, L] : DeclToLoc) 900 OS << " [" << D->getNameAsString() << ", " << L << "]\n"; 901 902 OS << "ExprToLoc:\n"; 903 for (auto [E, L] : ExprToLoc) 904 OS << " [" << E << ", " << L << "]\n"; 905 906 OS << "LocToVal:\n"; 907 for (auto [L, V] : LocToVal) { 908 OS << " [" << L << ", " << V << ": " << *V << "]\n"; 909 } 910 911 OS << "FlowConditionToken:\n"; 912 DACtx->dumpFlowCondition(*FlowConditionToken, OS); 913 } 914 915 void Environment::dump() const { 916 dump(llvm::dbgs()); 917 } 918 919 AggregateStorageLocation * 920 getImplicitObjectLocation(const CXXMemberCallExpr &MCE, 921 const Environment &Env) { 922 Expr *ImplicitObject = MCE.getImplicitObjectArgument(); 923 if (ImplicitObject == nullptr) 924 return nullptr; 925 StorageLocation *Loc = 926 Env.getStorageLocation(*ImplicitObject, SkipPast::Reference); 927 if (Loc == nullptr) 928 return nullptr; 929 if (ImplicitObject->getType()->isPointerType()) { 930 if (auto *Val = cast_or_null<PointerValue>(Env.getValue(*Loc))) 931 return &cast<AggregateStorageLocation>(Val->getPointeeLoc()); 932 return nullptr; 933 } 934 return cast<AggregateStorageLocation>(Loc); 935 } 936 937 AggregateStorageLocation *getBaseObjectLocation(const MemberExpr &ME, 938 const Environment &Env) { 939 Expr *Base = ME.getBase(); 940 if (Base == nullptr) 941 return nullptr; 942 StorageLocation *Loc = Env.getStorageLocation(*Base, SkipPast::Reference); 943 if (Loc == nullptr) 944 return nullptr; 945 if (ME.isArrow()) { 946 if (auto *Val = cast_or_null<PointerValue>(Env.getValue(*Loc))) 947 return &cast<AggregateStorageLocation>(Val->getPointeeLoc()); 948 return nullptr; 949 } 950 return cast<AggregateStorageLocation>(Loc); 951 } 952 953 } // namespace dataflow 954 } // namespace clang 955