1 //== RegionStore.cpp - Field-sensitive store model --------------*- C++ -*--==// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file defines a basic region store model. In this model, we do have field 10 // sensitivity. But we assume nothing about the heap shape. So recursive data 11 // structures are largely ignored. Basically we do 1-limiting analysis. 12 // Parameter pointers are assumed with no aliasing. Pointee objects of 13 // parameters are created lazily. 14 // 15 //===----------------------------------------------------------------------===// 16 17 #include "clang/AST/Attr.h" 18 #include "clang/AST/CharUnits.h" 19 #include "clang/ASTMatchers/ASTMatchFinder.h" 20 #include "clang/Analysis/Analyses/LiveVariables.h" 21 #include "clang/Analysis/AnalysisDeclContext.h" 22 #include "clang/Basic/JsonSupport.h" 23 #include "clang/Basic/TargetInfo.h" 24 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 25 #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" 26 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" 27 #include "clang/StaticAnalyzer/Core/PathSensitive/MemRegion.h" 28 #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState.h" 29 #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramStateTrait.h" 30 #include "llvm/ADT/ImmutableMap.h" 31 #include "llvm/ADT/Optional.h" 32 #include "llvm/Support/raw_ostream.h" 33 #include <utility> 34 35 using namespace clang; 36 using namespace ento; 37 38 //===----------------------------------------------------------------------===// 39 // Representation of binding keys. 40 //===----------------------------------------------------------------------===// 41 42 namespace { 43 class BindingKey { 44 public: 45 enum Kind { Default = 0x0, Direct = 0x1 }; 46 private: 47 enum { Symbolic = 0x2 }; 48 49 llvm::PointerIntPair<const MemRegion *, 2> P; 50 uint64_t Data; 51 52 /// Create a key for a binding to region \p r, which has a symbolic offset 53 /// from region \p Base. 54 explicit BindingKey(const SubRegion *r, const SubRegion *Base, Kind k) 55 : P(r, k | Symbolic), Data(reinterpret_cast<uintptr_t>(Base)) { 56 assert(r && Base && "Must have known regions."); 57 assert(getConcreteOffsetRegion() == Base && "Failed to store base region"); 58 } 59 60 /// Create a key for a binding at \p offset from base region \p r. 61 explicit BindingKey(const MemRegion *r, uint64_t offset, Kind k) 62 : P(r, k), Data(offset) { 63 assert(r && "Must have known regions."); 64 assert(getOffset() == offset && "Failed to store offset"); 65 assert((r == r->getBaseRegion() || 66 isa<ObjCIvarRegion, CXXDerivedObjectRegion>(r)) && 67 "Not a base"); 68 } 69 public: 70 71 bool isDirect() const { return P.getInt() & Direct; } 72 bool hasSymbolicOffset() const { return P.getInt() & Symbolic; } 73 74 const MemRegion *getRegion() const { return P.getPointer(); } 75 uint64_t getOffset() const { 76 assert(!hasSymbolicOffset()); 77 return Data; 78 } 79 80 const SubRegion *getConcreteOffsetRegion() const { 81 assert(hasSymbolicOffset()); 82 return reinterpret_cast<const SubRegion *>(static_cast<uintptr_t>(Data)); 83 } 84 85 const MemRegion *getBaseRegion() const { 86 if (hasSymbolicOffset()) 87 return getConcreteOffsetRegion()->getBaseRegion(); 88 return getRegion()->getBaseRegion(); 89 } 90 91 void Profile(llvm::FoldingSetNodeID& ID) const { 92 ID.AddPointer(P.getOpaqueValue()); 93 ID.AddInteger(Data); 94 } 95 96 static BindingKey Make(const MemRegion *R, Kind k); 97 98 bool operator<(const BindingKey &X) const { 99 if (P.getOpaqueValue() < X.P.getOpaqueValue()) 100 return true; 101 if (P.getOpaqueValue() > X.P.getOpaqueValue()) 102 return false; 103 return Data < X.Data; 104 } 105 106 bool operator==(const BindingKey &X) const { 107 return P.getOpaqueValue() == X.P.getOpaqueValue() && 108 Data == X.Data; 109 } 110 111 LLVM_DUMP_METHOD void dump() const; 112 }; 113 } // end anonymous namespace 114 115 BindingKey BindingKey::Make(const MemRegion *R, Kind k) { 116 const RegionOffset &RO = R->getAsOffset(); 117 if (RO.hasSymbolicOffset()) 118 return BindingKey(cast<SubRegion>(R), cast<SubRegion>(RO.getRegion()), k); 119 120 return BindingKey(RO.getRegion(), RO.getOffset(), k); 121 } 122 123 namespace llvm { 124 static inline raw_ostream &operator<<(raw_ostream &Out, BindingKey K) { 125 Out << "\"kind\": \"" << (K.isDirect() ? "Direct" : "Default") 126 << "\", \"offset\": "; 127 128 if (!K.hasSymbolicOffset()) 129 Out << K.getOffset(); 130 else 131 Out << "null"; 132 133 return Out; 134 } 135 136 } // namespace llvm 137 138 #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP) 139 void BindingKey::dump() const { llvm::errs() << *this; } 140 #endif 141 142 //===----------------------------------------------------------------------===// 143 // Actual Store type. 144 //===----------------------------------------------------------------------===// 145 146 typedef llvm::ImmutableMap<BindingKey, SVal> ClusterBindings; 147 typedef llvm::ImmutableMapRef<BindingKey, SVal> ClusterBindingsRef; 148 typedef std::pair<BindingKey, SVal> BindingPair; 149 150 typedef llvm::ImmutableMap<const MemRegion *, ClusterBindings> 151 RegionBindings; 152 153 namespace { 154 class RegionBindingsRef : public llvm::ImmutableMapRef<const MemRegion *, 155 ClusterBindings> { 156 ClusterBindings::Factory *CBFactory; 157 158 // This flag indicates whether the current bindings are within the analysis 159 // that has started from main(). It affects how we perform loads from 160 // global variables that have initializers: if we have observed the 161 // program execution from the start and we know that these variables 162 // have not been overwritten yet, we can be sure that their initializers 163 // are still relevant. This flag never gets changed when the bindings are 164 // updated, so it could potentially be moved into RegionStoreManager 165 // (as if it's the same bindings but a different loading procedure) 166 // however that would have made the manager needlessly stateful. 167 bool IsMainAnalysis; 168 169 public: 170 typedef llvm::ImmutableMapRef<const MemRegion *, ClusterBindings> 171 ParentTy; 172 173 RegionBindingsRef(ClusterBindings::Factory &CBFactory, 174 const RegionBindings::TreeTy *T, 175 RegionBindings::TreeTy::Factory *F, 176 bool IsMainAnalysis) 177 : llvm::ImmutableMapRef<const MemRegion *, ClusterBindings>(T, F), 178 CBFactory(&CBFactory), IsMainAnalysis(IsMainAnalysis) {} 179 180 RegionBindingsRef(const ParentTy &P, 181 ClusterBindings::Factory &CBFactory, 182 bool IsMainAnalysis) 183 : llvm::ImmutableMapRef<const MemRegion *, ClusterBindings>(P), 184 CBFactory(&CBFactory), IsMainAnalysis(IsMainAnalysis) {} 185 186 RegionBindingsRef add(key_type_ref K, data_type_ref D) const { 187 return RegionBindingsRef(static_cast<const ParentTy *>(this)->add(K, D), 188 *CBFactory, IsMainAnalysis); 189 } 190 191 RegionBindingsRef remove(key_type_ref K) const { 192 return RegionBindingsRef(static_cast<const ParentTy *>(this)->remove(K), 193 *CBFactory, IsMainAnalysis); 194 } 195 196 RegionBindingsRef addBinding(BindingKey K, SVal V) const; 197 198 RegionBindingsRef addBinding(const MemRegion *R, 199 BindingKey::Kind k, SVal V) const; 200 201 const SVal *lookup(BindingKey K) const; 202 const SVal *lookup(const MemRegion *R, BindingKey::Kind k) const; 203 using llvm::ImmutableMapRef<const MemRegion *, ClusterBindings>::lookup; 204 205 RegionBindingsRef removeBinding(BindingKey K); 206 207 RegionBindingsRef removeBinding(const MemRegion *R, 208 BindingKey::Kind k); 209 210 RegionBindingsRef removeBinding(const MemRegion *R) { 211 return removeBinding(R, BindingKey::Direct). 212 removeBinding(R, BindingKey::Default); 213 } 214 215 Optional<SVal> getDirectBinding(const MemRegion *R) const; 216 217 /// getDefaultBinding - Returns an SVal* representing an optional default 218 /// binding associated with a region and its subregions. 219 Optional<SVal> getDefaultBinding(const MemRegion *R) const; 220 221 /// Return the internal tree as a Store. 222 Store asStore() const { 223 llvm::PointerIntPair<Store, 1, bool> Ptr = { 224 asImmutableMap().getRootWithoutRetain(), IsMainAnalysis}; 225 return reinterpret_cast<Store>(Ptr.getOpaqueValue()); 226 } 227 228 bool isMainAnalysis() const { 229 return IsMainAnalysis; 230 } 231 232 void printJson(raw_ostream &Out, const char *NL = "\n", 233 unsigned int Space = 0, bool IsDot = false) const { 234 for (iterator I = begin(); I != end(); ++I) { 235 // TODO: We might need a .printJson for I.getKey() as well. 236 Indent(Out, Space, IsDot) 237 << "{ \"cluster\": \"" << I.getKey() << "\", \"pointer\": \"" 238 << (const void *)I.getKey() << "\", \"items\": [" << NL; 239 240 ++Space; 241 const ClusterBindings &CB = I.getData(); 242 for (ClusterBindings::iterator CI = CB.begin(); CI != CB.end(); ++CI) { 243 Indent(Out, Space, IsDot) << "{ " << CI.getKey() << ", \"value\": "; 244 CI.getData().printJson(Out, /*AddQuotes=*/true); 245 Out << " }"; 246 if (std::next(CI) != CB.end()) 247 Out << ','; 248 Out << NL; 249 } 250 251 --Space; 252 Indent(Out, Space, IsDot) << "]}"; 253 if (std::next(I) != end()) 254 Out << ','; 255 Out << NL; 256 } 257 } 258 259 LLVM_DUMP_METHOD void dump() const { printJson(llvm::errs()); } 260 }; 261 } // end anonymous namespace 262 263 typedef const RegionBindingsRef& RegionBindingsConstRef; 264 265 Optional<SVal> RegionBindingsRef::getDirectBinding(const MemRegion *R) const { 266 const SVal *V = lookup(R, BindingKey::Direct); 267 return V ? Optional<SVal>(*V) : std::nullopt; 268 } 269 270 Optional<SVal> RegionBindingsRef::getDefaultBinding(const MemRegion *R) const { 271 const SVal *V = lookup(R, BindingKey::Default); 272 return V ? Optional<SVal>(*V) : std::nullopt; 273 } 274 275 RegionBindingsRef RegionBindingsRef::addBinding(BindingKey K, SVal V) const { 276 const MemRegion *Base = K.getBaseRegion(); 277 278 const ClusterBindings *ExistingCluster = lookup(Base); 279 ClusterBindings Cluster = 280 (ExistingCluster ? *ExistingCluster : CBFactory->getEmptyMap()); 281 282 ClusterBindings NewCluster = CBFactory->add(Cluster, K, V); 283 return add(Base, NewCluster); 284 } 285 286 287 RegionBindingsRef RegionBindingsRef::addBinding(const MemRegion *R, 288 BindingKey::Kind k, 289 SVal V) const { 290 return addBinding(BindingKey::Make(R, k), V); 291 } 292 293 const SVal *RegionBindingsRef::lookup(BindingKey K) const { 294 const ClusterBindings *Cluster = lookup(K.getBaseRegion()); 295 if (!Cluster) 296 return nullptr; 297 return Cluster->lookup(K); 298 } 299 300 const SVal *RegionBindingsRef::lookup(const MemRegion *R, 301 BindingKey::Kind k) const { 302 return lookup(BindingKey::Make(R, k)); 303 } 304 305 RegionBindingsRef RegionBindingsRef::removeBinding(BindingKey K) { 306 const MemRegion *Base = K.getBaseRegion(); 307 const ClusterBindings *Cluster = lookup(Base); 308 if (!Cluster) 309 return *this; 310 311 ClusterBindings NewCluster = CBFactory->remove(*Cluster, K); 312 if (NewCluster.isEmpty()) 313 return remove(Base); 314 return add(Base, NewCluster); 315 } 316 317 RegionBindingsRef RegionBindingsRef::removeBinding(const MemRegion *R, 318 BindingKey::Kind k){ 319 return removeBinding(BindingKey::Make(R, k)); 320 } 321 322 //===----------------------------------------------------------------------===// 323 // Main RegionStore logic. 324 //===----------------------------------------------------------------------===// 325 326 namespace { 327 class InvalidateRegionsWorker; 328 329 class RegionStoreManager : public StoreManager { 330 public: 331 RegionBindings::Factory RBFactory; 332 mutable ClusterBindings::Factory CBFactory; 333 334 typedef std::vector<SVal> SValListTy; 335 private: 336 typedef llvm::DenseMap<const LazyCompoundValData *, 337 SValListTy> LazyBindingsMapTy; 338 LazyBindingsMapTy LazyBindingsMap; 339 340 /// The largest number of fields a struct can have and still be 341 /// considered "small". 342 /// 343 /// This is currently used to decide whether or not it is worth "forcing" a 344 /// LazyCompoundVal on bind. 345 /// 346 /// This is controlled by 'region-store-small-struct-limit' option. 347 /// To disable all small-struct-dependent behavior, set the option to "0". 348 unsigned SmallStructLimit; 349 350 /// The largest number of element an array can have and still be 351 /// considered "small". 352 /// 353 /// This is currently used to decide whether or not it is worth "forcing" a 354 /// LazyCompoundVal on bind. 355 /// 356 /// This is controlled by 'region-store-small-struct-limit' option. 357 /// To disable all small-struct-dependent behavior, set the option to "0". 358 unsigned SmallArrayLimit; 359 360 /// A helper used to populate the work list with the given set of 361 /// regions. 362 void populateWorkList(InvalidateRegionsWorker &W, 363 ArrayRef<SVal> Values, 364 InvalidatedRegions *TopLevelRegions); 365 366 public: 367 RegionStoreManager(ProgramStateManager &mgr) 368 : StoreManager(mgr), RBFactory(mgr.getAllocator()), 369 CBFactory(mgr.getAllocator()), SmallStructLimit(0), SmallArrayLimit(0) { 370 ExprEngine &Eng = StateMgr.getOwningEngine(); 371 AnalyzerOptions &Options = Eng.getAnalysisManager().options; 372 SmallStructLimit = Options.RegionStoreSmallStructLimit; 373 SmallArrayLimit = Options.RegionStoreSmallArrayLimit; 374 } 375 376 /// setImplicitDefaultValue - Set the default binding for the provided 377 /// MemRegion to the value implicitly defined for compound literals when 378 /// the value is not specified. 379 RegionBindingsRef setImplicitDefaultValue(RegionBindingsConstRef B, 380 const MemRegion *R, QualType T); 381 382 /// ArrayToPointer - Emulates the "decay" of an array to a pointer 383 /// type. 'Array' represents the lvalue of the array being decayed 384 /// to a pointer, and the returned SVal represents the decayed 385 /// version of that lvalue (i.e., a pointer to the first element of 386 /// the array). This is called by ExprEngine when evaluating 387 /// casts from arrays to pointers. 388 SVal ArrayToPointer(Loc Array, QualType ElementTy) override; 389 390 /// Creates the Store that correctly represents memory contents before 391 /// the beginning of the analysis of the given top-level stack frame. 392 StoreRef getInitialStore(const LocationContext *InitLoc) override { 393 bool IsMainAnalysis = false; 394 if (const auto *FD = dyn_cast<FunctionDecl>(InitLoc->getDecl())) 395 IsMainAnalysis = FD->isMain() && !Ctx.getLangOpts().CPlusPlus; 396 return StoreRef(RegionBindingsRef( 397 RegionBindingsRef::ParentTy(RBFactory.getEmptyMap(), RBFactory), 398 CBFactory, IsMainAnalysis).asStore(), *this); 399 } 400 401 //===-------------------------------------------------------------------===// 402 // Binding values to regions. 403 //===-------------------------------------------------------------------===// 404 RegionBindingsRef invalidateGlobalRegion(MemRegion::Kind K, 405 const Expr *Ex, 406 unsigned Count, 407 const LocationContext *LCtx, 408 RegionBindingsRef B, 409 InvalidatedRegions *Invalidated); 410 411 StoreRef invalidateRegions(Store store, 412 ArrayRef<SVal> Values, 413 const Expr *E, unsigned Count, 414 const LocationContext *LCtx, 415 const CallEvent *Call, 416 InvalidatedSymbols &IS, 417 RegionAndSymbolInvalidationTraits &ITraits, 418 InvalidatedRegions *Invalidated, 419 InvalidatedRegions *InvalidatedTopLevel) override; 420 421 bool scanReachableSymbols(Store S, const MemRegion *R, 422 ScanReachableSymbols &Callbacks) override; 423 424 RegionBindingsRef removeSubRegionBindings(RegionBindingsConstRef B, 425 const SubRegion *R); 426 Optional<SVal> 427 getConstantValFromConstArrayInitializer(RegionBindingsConstRef B, 428 const ElementRegion *R); 429 Optional<SVal> 430 getSValFromInitListExpr(const InitListExpr *ILE, 431 const SmallVector<uint64_t, 2> &ConcreteOffsets, 432 QualType ElemT); 433 SVal getSValFromStringLiteral(const StringLiteral *SL, uint64_t Offset, 434 QualType ElemT); 435 436 public: // Part of public interface to class. 437 438 StoreRef Bind(Store store, Loc LV, SVal V) override { 439 return StoreRef(bind(getRegionBindings(store), LV, V).asStore(), *this); 440 } 441 442 RegionBindingsRef bind(RegionBindingsConstRef B, Loc LV, SVal V); 443 444 // BindDefaultInitial is only used to initialize a region with 445 // a default value. 446 StoreRef BindDefaultInitial(Store store, const MemRegion *R, 447 SVal V) override { 448 RegionBindingsRef B = getRegionBindings(store); 449 // Use other APIs when you have to wipe the region that was initialized 450 // earlier. 451 assert(!(B.getDefaultBinding(R) || B.getDirectBinding(R)) && 452 "Double initialization!"); 453 B = B.addBinding(BindingKey::Make(R, BindingKey::Default), V); 454 return StoreRef(B.asImmutableMap().getRootWithoutRetain(), *this); 455 } 456 457 // BindDefaultZero is used for zeroing constructors that may accidentally 458 // overwrite existing bindings. 459 StoreRef BindDefaultZero(Store store, const MemRegion *R) override { 460 // FIXME: The offsets of empty bases can be tricky because of 461 // of the so called "empty base class optimization". 462 // If a base class has been optimized out 463 // we should not try to create a binding, otherwise we should. 464 // Unfortunately, at the moment ASTRecordLayout doesn't expose 465 // the actual sizes of the empty bases 466 // and trying to infer them from offsets/alignments 467 // seems to be error-prone and non-trivial because of the trailing padding. 468 // As a temporary mitigation we don't create bindings for empty bases. 469 if (const auto *BR = dyn_cast<CXXBaseObjectRegion>(R)) 470 if (BR->getDecl()->isEmpty()) 471 return StoreRef(store, *this); 472 473 RegionBindingsRef B = getRegionBindings(store); 474 SVal V = svalBuilder.makeZeroVal(Ctx.CharTy); 475 B = removeSubRegionBindings(B, cast<SubRegion>(R)); 476 B = B.addBinding(BindingKey::Make(R, BindingKey::Default), V); 477 return StoreRef(B.asImmutableMap().getRootWithoutRetain(), *this); 478 } 479 480 /// Attempt to extract the fields of \p LCV and bind them to the struct region 481 /// \p R. 482 /// 483 /// This path is used when it seems advantageous to "force" loading the values 484 /// within a LazyCompoundVal to bind memberwise to the struct region, rather 485 /// than using a Default binding at the base of the entire region. This is a 486 /// heuristic attempting to avoid building long chains of LazyCompoundVals. 487 /// 488 /// \returns The updated store bindings, or \c std::nullopt if binding 489 /// non-lazily would be too expensive. 490 Optional<RegionBindingsRef> tryBindSmallStruct(RegionBindingsConstRef B, 491 const TypedValueRegion *R, 492 const RecordDecl *RD, 493 nonloc::LazyCompoundVal LCV); 494 495 /// BindStruct - Bind a compound value to a structure. 496 RegionBindingsRef bindStruct(RegionBindingsConstRef B, 497 const TypedValueRegion* R, SVal V); 498 499 /// BindVector - Bind a compound value to a vector. 500 RegionBindingsRef bindVector(RegionBindingsConstRef B, 501 const TypedValueRegion* R, SVal V); 502 503 Optional<RegionBindingsRef> tryBindSmallArray(RegionBindingsConstRef B, 504 const TypedValueRegion *R, 505 const ArrayType *AT, 506 nonloc::LazyCompoundVal LCV); 507 508 RegionBindingsRef bindArray(RegionBindingsConstRef B, 509 const TypedValueRegion* R, 510 SVal V); 511 512 /// Clears out all bindings in the given region and assigns a new value 513 /// as a Default binding. 514 RegionBindingsRef bindAggregate(RegionBindingsConstRef B, 515 const TypedRegion *R, 516 SVal DefaultVal); 517 518 /// Create a new store with the specified binding removed. 519 /// \param ST the original store, that is the basis for the new store. 520 /// \param L the location whose binding should be removed. 521 StoreRef killBinding(Store ST, Loc L) override; 522 523 void incrementReferenceCount(Store store) override { 524 getRegionBindings(store).manualRetain(); 525 } 526 527 /// If the StoreManager supports it, decrement the reference count of 528 /// the specified Store object. If the reference count hits 0, the memory 529 /// associated with the object is recycled. 530 void decrementReferenceCount(Store store) override { 531 getRegionBindings(store).manualRelease(); 532 } 533 534 bool includedInBindings(Store store, const MemRegion *region) const override; 535 536 /// Return the value bound to specified location in a given state. 537 /// 538 /// The high level logic for this method is this: 539 /// getBinding (L) 540 /// if L has binding 541 /// return L's binding 542 /// else if L is in killset 543 /// return unknown 544 /// else 545 /// if L is on stack or heap 546 /// return undefined 547 /// else 548 /// return symbolic 549 SVal getBinding(Store S, Loc L, QualType T) override { 550 return getBinding(getRegionBindings(S), L, T); 551 } 552 553 Optional<SVal> getDefaultBinding(Store S, const MemRegion *R) override { 554 RegionBindingsRef B = getRegionBindings(S); 555 // Default bindings are always applied over a base region so look up the 556 // base region's default binding, otherwise the lookup will fail when R 557 // is at an offset from R->getBaseRegion(). 558 return B.getDefaultBinding(R->getBaseRegion()); 559 } 560 561 SVal getBinding(RegionBindingsConstRef B, Loc L, QualType T = QualType()); 562 563 SVal getBindingForElement(RegionBindingsConstRef B, const ElementRegion *R); 564 565 SVal getBindingForField(RegionBindingsConstRef B, const FieldRegion *R); 566 567 SVal getBindingForObjCIvar(RegionBindingsConstRef B, const ObjCIvarRegion *R); 568 569 SVal getBindingForVar(RegionBindingsConstRef B, const VarRegion *R); 570 571 SVal getBindingForLazySymbol(const TypedValueRegion *R); 572 573 SVal getBindingForFieldOrElementCommon(RegionBindingsConstRef B, 574 const TypedValueRegion *R, 575 QualType Ty); 576 577 SVal getLazyBinding(const SubRegion *LazyBindingRegion, 578 RegionBindingsRef LazyBinding); 579 580 /// Get bindings for the values in a struct and return a CompoundVal, used 581 /// when doing struct copy: 582 /// struct s x, y; 583 /// x = y; 584 /// y's value is retrieved by this method. 585 SVal getBindingForStruct(RegionBindingsConstRef B, const TypedValueRegion *R); 586 SVal getBindingForArray(RegionBindingsConstRef B, const TypedValueRegion *R); 587 NonLoc createLazyBinding(RegionBindingsConstRef B, const TypedValueRegion *R); 588 589 /// Used to lazily generate derived symbols for bindings that are defined 590 /// implicitly by default bindings in a super region. 591 /// 592 /// Note that callers may need to specially handle LazyCompoundVals, which 593 /// are returned as is in case the caller needs to treat them differently. 594 Optional<SVal> getBindingForDerivedDefaultValue(RegionBindingsConstRef B, 595 const MemRegion *superR, 596 const TypedValueRegion *R, 597 QualType Ty); 598 599 /// Get the state and region whose binding this region \p R corresponds to. 600 /// 601 /// If there is no lazy binding for \p R, the returned value will have a null 602 /// \c second. Note that a null pointer can represents a valid Store. 603 std::pair<Store, const SubRegion *> 604 findLazyBinding(RegionBindingsConstRef B, const SubRegion *R, 605 const SubRegion *originalRegion); 606 607 /// Returns the cached set of interesting SVals contained within a lazy 608 /// binding. 609 /// 610 /// The precise value of "interesting" is determined for the purposes of 611 /// RegionStore's internal analysis. It must always contain all regions and 612 /// symbols, but may omit constants and other kinds of SVal. 613 /// 614 /// In contrast to compound values, LazyCompoundVals are also added 615 /// to the 'interesting values' list in addition to the child interesting 616 /// values. 617 const SValListTy &getInterestingValues(nonloc::LazyCompoundVal LCV); 618 619 //===------------------------------------------------------------------===// 620 // State pruning. 621 //===------------------------------------------------------------------===// 622 623 /// removeDeadBindings - Scans the RegionStore of 'state' for dead values. 624 /// It returns a new Store with these values removed. 625 StoreRef removeDeadBindings(Store store, const StackFrameContext *LCtx, 626 SymbolReaper& SymReaper) override; 627 628 //===------------------------------------------------------------------===// 629 // Utility methods. 630 //===------------------------------------------------------------------===// 631 632 RegionBindingsRef getRegionBindings(Store store) const { 633 llvm::PointerIntPair<Store, 1, bool> Ptr; 634 Ptr.setFromOpaqueValue(const_cast<void *>(store)); 635 return RegionBindingsRef( 636 CBFactory, 637 static_cast<const RegionBindings::TreeTy *>(Ptr.getPointer()), 638 RBFactory.getTreeFactory(), 639 Ptr.getInt()); 640 } 641 642 void printJson(raw_ostream &Out, Store S, const char *NL = "\n", 643 unsigned int Space = 0, bool IsDot = false) const override; 644 645 void iterBindings(Store store, BindingsHandler& f) override { 646 RegionBindingsRef B = getRegionBindings(store); 647 for (RegionBindingsRef::iterator I = B.begin(), E = B.end(); I != E; ++I) { 648 const ClusterBindings &Cluster = I.getData(); 649 for (ClusterBindings::iterator CI = Cluster.begin(), CE = Cluster.end(); 650 CI != CE; ++CI) { 651 const BindingKey &K = CI.getKey(); 652 if (!K.isDirect()) 653 continue; 654 if (const SubRegion *R = dyn_cast<SubRegion>(K.getRegion())) { 655 // FIXME: Possibly incorporate the offset? 656 if (!f.HandleBinding(*this, store, R, CI.getData())) 657 return; 658 } 659 } 660 } 661 } 662 }; 663 664 } // end anonymous namespace 665 666 //===----------------------------------------------------------------------===// 667 // RegionStore creation. 668 //===----------------------------------------------------------------------===// 669 670 std::unique_ptr<StoreManager> 671 ento::CreateRegionStoreManager(ProgramStateManager &StMgr) { 672 return std::make_unique<RegionStoreManager>(StMgr); 673 } 674 675 //===----------------------------------------------------------------------===// 676 // Region Cluster analysis. 677 //===----------------------------------------------------------------------===// 678 679 namespace { 680 /// Used to determine which global regions are automatically included in the 681 /// initial worklist of a ClusterAnalysis. 682 enum GlobalsFilterKind { 683 /// Don't include any global regions. 684 GFK_None, 685 /// Only include system globals. 686 GFK_SystemOnly, 687 /// Include all global regions. 688 GFK_All 689 }; 690 691 template <typename DERIVED> 692 class ClusterAnalysis { 693 protected: 694 typedef llvm::DenseMap<const MemRegion *, const ClusterBindings *> ClusterMap; 695 typedef const MemRegion * WorkListElement; 696 typedef SmallVector<WorkListElement, 10> WorkList; 697 698 llvm::SmallPtrSet<const ClusterBindings *, 16> Visited; 699 700 WorkList WL; 701 702 RegionStoreManager &RM; 703 ASTContext &Ctx; 704 SValBuilder &svalBuilder; 705 706 RegionBindingsRef B; 707 708 709 protected: 710 const ClusterBindings *getCluster(const MemRegion *R) { 711 return B.lookup(R); 712 } 713 714 /// Returns true if all clusters in the given memspace should be initially 715 /// included in the cluster analysis. Subclasses may provide their 716 /// own implementation. 717 bool includeEntireMemorySpace(const MemRegion *Base) { 718 return false; 719 } 720 721 public: 722 ClusterAnalysis(RegionStoreManager &rm, ProgramStateManager &StateMgr, 723 RegionBindingsRef b) 724 : RM(rm), Ctx(StateMgr.getContext()), 725 svalBuilder(StateMgr.getSValBuilder()), B(std::move(b)) {} 726 727 RegionBindingsRef getRegionBindings() const { return B; } 728 729 bool isVisited(const MemRegion *R) { 730 return Visited.count(getCluster(R)); 731 } 732 733 void GenerateClusters() { 734 // Scan the entire set of bindings and record the region clusters. 735 for (RegionBindingsRef::iterator RI = B.begin(), RE = B.end(); 736 RI != RE; ++RI){ 737 const MemRegion *Base = RI.getKey(); 738 739 const ClusterBindings &Cluster = RI.getData(); 740 assert(!Cluster.isEmpty() && "Empty clusters should be removed"); 741 static_cast<DERIVED*>(this)->VisitAddedToCluster(Base, Cluster); 742 743 // If the base's memspace should be entirely invalidated, add the cluster 744 // to the workspace up front. 745 if (static_cast<DERIVED*>(this)->includeEntireMemorySpace(Base)) 746 AddToWorkList(WorkListElement(Base), &Cluster); 747 } 748 } 749 750 bool AddToWorkList(WorkListElement E, const ClusterBindings *C) { 751 if (C && !Visited.insert(C).second) 752 return false; 753 WL.push_back(E); 754 return true; 755 } 756 757 bool AddToWorkList(const MemRegion *R) { 758 return static_cast<DERIVED*>(this)->AddToWorkList(R); 759 } 760 761 void RunWorkList() { 762 while (!WL.empty()) { 763 WorkListElement E = WL.pop_back_val(); 764 const MemRegion *BaseR = E; 765 766 static_cast<DERIVED*>(this)->VisitCluster(BaseR, getCluster(BaseR)); 767 } 768 } 769 770 void VisitAddedToCluster(const MemRegion *baseR, const ClusterBindings &C) {} 771 void VisitCluster(const MemRegion *baseR, const ClusterBindings *C) {} 772 773 void VisitCluster(const MemRegion *BaseR, const ClusterBindings *C, 774 bool Flag) { 775 static_cast<DERIVED*>(this)->VisitCluster(BaseR, C); 776 } 777 }; 778 } 779 780 //===----------------------------------------------------------------------===// 781 // Binding invalidation. 782 //===----------------------------------------------------------------------===// 783 784 bool RegionStoreManager::scanReachableSymbols(Store S, const MemRegion *R, 785 ScanReachableSymbols &Callbacks) { 786 assert(R == R->getBaseRegion() && "Should only be called for base regions"); 787 RegionBindingsRef B = getRegionBindings(S); 788 const ClusterBindings *Cluster = B.lookup(R); 789 790 if (!Cluster) 791 return true; 792 793 for (ClusterBindings::iterator RI = Cluster->begin(), RE = Cluster->end(); 794 RI != RE; ++RI) { 795 if (!Callbacks.scan(RI.getData())) 796 return false; 797 } 798 799 return true; 800 } 801 802 static inline bool isUnionField(const FieldRegion *FR) { 803 return FR->getDecl()->getParent()->isUnion(); 804 } 805 806 typedef SmallVector<const FieldDecl *, 8> FieldVector; 807 808 static void getSymbolicOffsetFields(BindingKey K, FieldVector &Fields) { 809 assert(K.hasSymbolicOffset() && "Not implemented for concrete offset keys"); 810 811 const MemRegion *Base = K.getConcreteOffsetRegion(); 812 const MemRegion *R = K.getRegion(); 813 814 while (R != Base) { 815 if (const FieldRegion *FR = dyn_cast<FieldRegion>(R)) 816 if (!isUnionField(FR)) 817 Fields.push_back(FR->getDecl()); 818 819 R = cast<SubRegion>(R)->getSuperRegion(); 820 } 821 } 822 823 static bool isCompatibleWithFields(BindingKey K, const FieldVector &Fields) { 824 assert(K.hasSymbolicOffset() && "Not implemented for concrete offset keys"); 825 826 if (Fields.empty()) 827 return true; 828 829 FieldVector FieldsInBindingKey; 830 getSymbolicOffsetFields(K, FieldsInBindingKey); 831 832 ptrdiff_t Delta = FieldsInBindingKey.size() - Fields.size(); 833 if (Delta >= 0) 834 return std::equal(FieldsInBindingKey.begin() + Delta, 835 FieldsInBindingKey.end(), 836 Fields.begin()); 837 else 838 return std::equal(FieldsInBindingKey.begin(), FieldsInBindingKey.end(), 839 Fields.begin() - Delta); 840 } 841 842 /// Collects all bindings in \p Cluster that may refer to bindings within 843 /// \p Top. 844 /// 845 /// Each binding is a pair whose \c first is the key (a BindingKey) and whose 846 /// \c second is the value (an SVal). 847 /// 848 /// The \p IncludeAllDefaultBindings parameter specifies whether to include 849 /// default bindings that may extend beyond \p Top itself, e.g. if \p Top is 850 /// an aggregate within a larger aggregate with a default binding. 851 static void 852 collectSubRegionBindings(SmallVectorImpl<BindingPair> &Bindings, 853 SValBuilder &SVB, const ClusterBindings &Cluster, 854 const SubRegion *Top, BindingKey TopKey, 855 bool IncludeAllDefaultBindings) { 856 FieldVector FieldsInSymbolicSubregions; 857 if (TopKey.hasSymbolicOffset()) { 858 getSymbolicOffsetFields(TopKey, FieldsInSymbolicSubregions); 859 Top = TopKey.getConcreteOffsetRegion(); 860 TopKey = BindingKey::Make(Top, BindingKey::Default); 861 } 862 863 // Find the length (in bits) of the region being invalidated. 864 uint64_t Length = UINT64_MAX; 865 SVal Extent = Top->getMemRegionManager().getStaticSize(Top, SVB); 866 if (Optional<nonloc::ConcreteInt> ExtentCI = 867 Extent.getAs<nonloc::ConcreteInt>()) { 868 const llvm::APSInt &ExtentInt = ExtentCI->getValue(); 869 assert(ExtentInt.isNonNegative() || ExtentInt.isUnsigned()); 870 // Extents are in bytes but region offsets are in bits. Be careful! 871 Length = ExtentInt.getLimitedValue() * SVB.getContext().getCharWidth(); 872 } else if (const FieldRegion *FR = dyn_cast<FieldRegion>(Top)) { 873 if (FR->getDecl()->isBitField()) 874 Length = FR->getDecl()->getBitWidthValue(SVB.getContext()); 875 } 876 877 for (ClusterBindings::iterator I = Cluster.begin(), E = Cluster.end(); 878 I != E; ++I) { 879 BindingKey NextKey = I.getKey(); 880 if (NextKey.getRegion() == TopKey.getRegion()) { 881 // FIXME: This doesn't catch the case where we're really invalidating a 882 // region with a symbolic offset. Example: 883 // R: points[i].y 884 // Next: points[0].x 885 886 if (NextKey.getOffset() > TopKey.getOffset() && 887 NextKey.getOffset() - TopKey.getOffset() < Length) { 888 // Case 1: The next binding is inside the region we're invalidating. 889 // Include it. 890 Bindings.push_back(*I); 891 892 } else if (NextKey.getOffset() == TopKey.getOffset()) { 893 // Case 2: The next binding is at the same offset as the region we're 894 // invalidating. In this case, we need to leave default bindings alone, 895 // since they may be providing a default value for a regions beyond what 896 // we're invalidating. 897 // FIXME: This is probably incorrect; consider invalidating an outer 898 // struct whose first field is bound to a LazyCompoundVal. 899 if (IncludeAllDefaultBindings || NextKey.isDirect()) 900 Bindings.push_back(*I); 901 } 902 903 } else if (NextKey.hasSymbolicOffset()) { 904 const MemRegion *Base = NextKey.getConcreteOffsetRegion(); 905 if (Top->isSubRegionOf(Base) && Top != Base) { 906 // Case 3: The next key is symbolic and we just changed something within 907 // its concrete region. We don't know if the binding is still valid, so 908 // we'll be conservative and include it. 909 if (IncludeAllDefaultBindings || NextKey.isDirect()) 910 if (isCompatibleWithFields(NextKey, FieldsInSymbolicSubregions)) 911 Bindings.push_back(*I); 912 } else if (const SubRegion *BaseSR = dyn_cast<SubRegion>(Base)) { 913 // Case 4: The next key is symbolic, but we changed a known 914 // super-region. In this case the binding is certainly included. 915 if (BaseSR->isSubRegionOf(Top)) 916 if (isCompatibleWithFields(NextKey, FieldsInSymbolicSubregions)) 917 Bindings.push_back(*I); 918 } 919 } 920 } 921 } 922 923 static void 924 collectSubRegionBindings(SmallVectorImpl<BindingPair> &Bindings, 925 SValBuilder &SVB, const ClusterBindings &Cluster, 926 const SubRegion *Top, bool IncludeAllDefaultBindings) { 927 collectSubRegionBindings(Bindings, SVB, Cluster, Top, 928 BindingKey::Make(Top, BindingKey::Default), 929 IncludeAllDefaultBindings); 930 } 931 932 RegionBindingsRef 933 RegionStoreManager::removeSubRegionBindings(RegionBindingsConstRef B, 934 const SubRegion *Top) { 935 BindingKey TopKey = BindingKey::Make(Top, BindingKey::Default); 936 const MemRegion *ClusterHead = TopKey.getBaseRegion(); 937 938 if (Top == ClusterHead) { 939 // We can remove an entire cluster's bindings all in one go. 940 return B.remove(Top); 941 } 942 943 const ClusterBindings *Cluster = B.lookup(ClusterHead); 944 if (!Cluster) { 945 // If we're invalidating a region with a symbolic offset, we need to make 946 // sure we don't treat the base region as uninitialized anymore. 947 if (TopKey.hasSymbolicOffset()) { 948 const SubRegion *Concrete = TopKey.getConcreteOffsetRegion(); 949 return B.addBinding(Concrete, BindingKey::Default, UnknownVal()); 950 } 951 return B; 952 } 953 954 SmallVector<BindingPair, 32> Bindings; 955 collectSubRegionBindings(Bindings, svalBuilder, *Cluster, Top, TopKey, 956 /*IncludeAllDefaultBindings=*/false); 957 958 ClusterBindingsRef Result(*Cluster, CBFactory); 959 for (SmallVectorImpl<BindingPair>::const_iterator I = Bindings.begin(), 960 E = Bindings.end(); 961 I != E; ++I) 962 Result = Result.remove(I->first); 963 964 // If we're invalidating a region with a symbolic offset, we need to make sure 965 // we don't treat the base region as uninitialized anymore. 966 // FIXME: This isn't very precise; see the example in 967 // collectSubRegionBindings. 968 if (TopKey.hasSymbolicOffset()) { 969 const SubRegion *Concrete = TopKey.getConcreteOffsetRegion(); 970 Result = Result.add(BindingKey::Make(Concrete, BindingKey::Default), 971 UnknownVal()); 972 } 973 974 if (Result.isEmpty()) 975 return B.remove(ClusterHead); 976 return B.add(ClusterHead, Result.asImmutableMap()); 977 } 978 979 namespace { 980 class InvalidateRegionsWorker : public ClusterAnalysis<InvalidateRegionsWorker> 981 { 982 const Expr *Ex; 983 unsigned Count; 984 const LocationContext *LCtx; 985 InvalidatedSymbols &IS; 986 RegionAndSymbolInvalidationTraits &ITraits; 987 StoreManager::InvalidatedRegions *Regions; 988 GlobalsFilterKind GlobalsFilter; 989 public: 990 InvalidateRegionsWorker(RegionStoreManager &rm, 991 ProgramStateManager &stateMgr, 992 RegionBindingsRef b, 993 const Expr *ex, unsigned count, 994 const LocationContext *lctx, 995 InvalidatedSymbols &is, 996 RegionAndSymbolInvalidationTraits &ITraitsIn, 997 StoreManager::InvalidatedRegions *r, 998 GlobalsFilterKind GFK) 999 : ClusterAnalysis<InvalidateRegionsWorker>(rm, stateMgr, b), 1000 Ex(ex), Count(count), LCtx(lctx), IS(is), ITraits(ITraitsIn), Regions(r), 1001 GlobalsFilter(GFK) {} 1002 1003 void VisitCluster(const MemRegion *baseR, const ClusterBindings *C); 1004 void VisitBinding(SVal V); 1005 1006 using ClusterAnalysis::AddToWorkList; 1007 1008 bool AddToWorkList(const MemRegion *R); 1009 1010 /// Returns true if all clusters in the memory space for \p Base should be 1011 /// be invalidated. 1012 bool includeEntireMemorySpace(const MemRegion *Base); 1013 1014 /// Returns true if the memory space of the given region is one of the global 1015 /// regions specially included at the start of invalidation. 1016 bool isInitiallyIncludedGlobalRegion(const MemRegion *R); 1017 }; 1018 } 1019 1020 bool InvalidateRegionsWorker::AddToWorkList(const MemRegion *R) { 1021 bool doNotInvalidateSuperRegion = ITraits.hasTrait( 1022 R, RegionAndSymbolInvalidationTraits::TK_DoNotInvalidateSuperRegion); 1023 const MemRegion *BaseR = doNotInvalidateSuperRegion ? R : R->getBaseRegion(); 1024 return AddToWorkList(WorkListElement(BaseR), getCluster(BaseR)); 1025 } 1026 1027 void InvalidateRegionsWorker::VisitBinding(SVal V) { 1028 // A symbol? Mark it touched by the invalidation. 1029 if (SymbolRef Sym = V.getAsSymbol()) 1030 IS.insert(Sym); 1031 1032 if (const MemRegion *R = V.getAsRegion()) { 1033 AddToWorkList(R); 1034 return; 1035 } 1036 1037 // Is it a LazyCompoundVal? All references get invalidated as well. 1038 if (Optional<nonloc::LazyCompoundVal> LCS = 1039 V.getAs<nonloc::LazyCompoundVal>()) { 1040 1041 // `getInterestingValues()` returns SVals contained within LazyCompoundVals, 1042 // so there is no need to visit them. 1043 for (SVal V : RM.getInterestingValues(*LCS)) 1044 if (!isa<nonloc::LazyCompoundVal>(V)) 1045 VisitBinding(V); 1046 1047 return; 1048 } 1049 } 1050 1051 void InvalidateRegionsWorker::VisitCluster(const MemRegion *baseR, 1052 const ClusterBindings *C) { 1053 1054 bool PreserveRegionsContents = 1055 ITraits.hasTrait(baseR, 1056 RegionAndSymbolInvalidationTraits::TK_PreserveContents); 1057 1058 if (C) { 1059 for (ClusterBindings::iterator I = C->begin(), E = C->end(); I != E; ++I) 1060 VisitBinding(I.getData()); 1061 1062 // Invalidate regions contents. 1063 if (!PreserveRegionsContents) 1064 B = B.remove(baseR); 1065 } 1066 1067 if (const auto *TO = dyn_cast<TypedValueRegion>(baseR)) { 1068 if (const auto *RD = TO->getValueType()->getAsCXXRecordDecl()) { 1069 1070 // Lambdas can affect all static local variables without explicitly 1071 // capturing those. 1072 // We invalidate all static locals referenced inside the lambda body. 1073 if (RD->isLambda() && RD->getLambdaCallOperator()->getBody()) { 1074 using namespace ast_matchers; 1075 1076 const char *DeclBind = "DeclBind"; 1077 StatementMatcher RefToStatic = stmt(hasDescendant(declRefExpr( 1078 to(varDecl(hasStaticStorageDuration()).bind(DeclBind))))); 1079 auto Matches = 1080 match(RefToStatic, *RD->getLambdaCallOperator()->getBody(), 1081 RD->getASTContext()); 1082 1083 for (BoundNodes &Match : Matches) { 1084 auto *VD = Match.getNodeAs<VarDecl>(DeclBind); 1085 const VarRegion *ToInvalidate = 1086 RM.getRegionManager().getVarRegion(VD, LCtx); 1087 AddToWorkList(ToInvalidate); 1088 } 1089 } 1090 } 1091 } 1092 1093 // BlockDataRegion? If so, invalidate captured variables that are passed 1094 // by reference. 1095 if (const BlockDataRegion *BR = dyn_cast<BlockDataRegion>(baseR)) { 1096 for (BlockDataRegion::referenced_vars_iterator 1097 BI = BR->referenced_vars_begin(), BE = BR->referenced_vars_end() ; 1098 BI != BE; ++BI) { 1099 const VarRegion *VR = BI.getCapturedRegion(); 1100 const VarDecl *VD = VR->getDecl(); 1101 if (VD->hasAttr<BlocksAttr>() || !VD->hasLocalStorage()) { 1102 AddToWorkList(VR); 1103 } 1104 else if (Loc::isLocType(VR->getValueType())) { 1105 // Map the current bindings to a Store to retrieve the value 1106 // of the binding. If that binding itself is a region, we should 1107 // invalidate that region. This is because a block may capture 1108 // a pointer value, but the thing pointed by that pointer may 1109 // get invalidated. 1110 SVal V = RM.getBinding(B, loc::MemRegionVal(VR)); 1111 if (Optional<Loc> L = V.getAs<Loc>()) { 1112 if (const MemRegion *LR = L->getAsRegion()) 1113 AddToWorkList(LR); 1114 } 1115 } 1116 } 1117 return; 1118 } 1119 1120 // Symbolic region? 1121 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(baseR)) 1122 IS.insert(SR->getSymbol()); 1123 1124 // Nothing else should be done in the case when we preserve regions context. 1125 if (PreserveRegionsContents) 1126 return; 1127 1128 // Otherwise, we have a normal data region. Record that we touched the region. 1129 if (Regions) 1130 Regions->push_back(baseR); 1131 1132 if (isa<AllocaRegion, SymbolicRegion>(baseR)) { 1133 // Invalidate the region by setting its default value to 1134 // conjured symbol. The type of the symbol is irrelevant. 1135 DefinedOrUnknownSVal V = 1136 svalBuilder.conjureSymbolVal(baseR, Ex, LCtx, Ctx.IntTy, Count); 1137 B = B.addBinding(baseR, BindingKey::Default, V); 1138 return; 1139 } 1140 1141 if (!baseR->isBoundable()) 1142 return; 1143 1144 const TypedValueRegion *TR = cast<TypedValueRegion>(baseR); 1145 QualType T = TR->getValueType(); 1146 1147 if (isInitiallyIncludedGlobalRegion(baseR)) { 1148 // If the region is a global and we are invalidating all globals, 1149 // erasing the entry is good enough. This causes all globals to be lazily 1150 // symbolicated from the same base symbol. 1151 return; 1152 } 1153 1154 if (T->isRecordType()) { 1155 // Invalidate the region by setting its default value to 1156 // conjured symbol. The type of the symbol is irrelevant. 1157 DefinedOrUnknownSVal V = svalBuilder.conjureSymbolVal(baseR, Ex, LCtx, 1158 Ctx.IntTy, Count); 1159 B = B.addBinding(baseR, BindingKey::Default, V); 1160 return; 1161 } 1162 1163 if (const ArrayType *AT = Ctx.getAsArrayType(T)) { 1164 bool doNotInvalidateSuperRegion = ITraits.hasTrait( 1165 baseR, 1166 RegionAndSymbolInvalidationTraits::TK_DoNotInvalidateSuperRegion); 1167 1168 if (doNotInvalidateSuperRegion) { 1169 // We are not doing blank invalidation of the whole array region so we 1170 // have to manually invalidate each elements. 1171 Optional<uint64_t> NumElements; 1172 1173 // Compute lower and upper offsets for region within array. 1174 if (const ConstantArrayType *CAT = dyn_cast<ConstantArrayType>(AT)) 1175 NumElements = CAT->getSize().getZExtValue(); 1176 if (!NumElements) // We are not dealing with a constant size array 1177 goto conjure_default; 1178 QualType ElementTy = AT->getElementType(); 1179 uint64_t ElemSize = Ctx.getTypeSize(ElementTy); 1180 const RegionOffset &RO = baseR->getAsOffset(); 1181 const MemRegion *SuperR = baseR->getBaseRegion(); 1182 if (RO.hasSymbolicOffset()) { 1183 // If base region has a symbolic offset, 1184 // we revert to invalidating the super region. 1185 if (SuperR) 1186 AddToWorkList(SuperR); 1187 goto conjure_default; 1188 } 1189 1190 uint64_t LowerOffset = RO.getOffset(); 1191 uint64_t UpperOffset = LowerOffset + *NumElements * ElemSize; 1192 bool UpperOverflow = UpperOffset < LowerOffset; 1193 1194 // Invalidate regions which are within array boundaries, 1195 // or have a symbolic offset. 1196 if (!SuperR) 1197 goto conjure_default; 1198 1199 const ClusterBindings *C = B.lookup(SuperR); 1200 if (!C) 1201 goto conjure_default; 1202 1203 for (ClusterBindings::iterator I = C->begin(), E = C->end(); I != E; 1204 ++I) { 1205 const BindingKey &BK = I.getKey(); 1206 Optional<uint64_t> ROffset = 1207 BK.hasSymbolicOffset() ? Optional<uint64_t>() : BK.getOffset(); 1208 1209 // Check offset is not symbolic and within array's boundaries. 1210 // Handles arrays of 0 elements and of 0-sized elements as well. 1211 if (!ROffset || 1212 ((*ROffset >= LowerOffset && *ROffset < UpperOffset) || 1213 (UpperOverflow && 1214 (*ROffset >= LowerOffset || *ROffset < UpperOffset)) || 1215 (LowerOffset == UpperOffset && *ROffset == LowerOffset))) { 1216 B = B.removeBinding(I.getKey()); 1217 // Bound symbolic regions need to be invalidated for dead symbol 1218 // detection. 1219 SVal V = I.getData(); 1220 const MemRegion *R = V.getAsRegion(); 1221 if (isa_and_nonnull<SymbolicRegion>(R)) 1222 VisitBinding(V); 1223 } 1224 } 1225 } 1226 conjure_default: 1227 // Set the default value of the array to conjured symbol. 1228 DefinedOrUnknownSVal V = 1229 svalBuilder.conjureSymbolVal(baseR, Ex, LCtx, 1230 AT->getElementType(), Count); 1231 B = B.addBinding(baseR, BindingKey::Default, V); 1232 return; 1233 } 1234 1235 DefinedOrUnknownSVal V = svalBuilder.conjureSymbolVal(baseR, Ex, LCtx, 1236 T,Count); 1237 assert(SymbolManager::canSymbolicate(T) || V.isUnknown()); 1238 B = B.addBinding(baseR, BindingKey::Direct, V); 1239 } 1240 1241 bool InvalidateRegionsWorker::isInitiallyIncludedGlobalRegion( 1242 const MemRegion *R) { 1243 switch (GlobalsFilter) { 1244 case GFK_None: 1245 return false; 1246 case GFK_SystemOnly: 1247 return isa<GlobalSystemSpaceRegion>(R->getMemorySpace()); 1248 case GFK_All: 1249 return isa<NonStaticGlobalSpaceRegion>(R->getMemorySpace()); 1250 } 1251 1252 llvm_unreachable("unknown globals filter"); 1253 } 1254 1255 bool InvalidateRegionsWorker::includeEntireMemorySpace(const MemRegion *Base) { 1256 if (isInitiallyIncludedGlobalRegion(Base)) 1257 return true; 1258 1259 const MemSpaceRegion *MemSpace = Base->getMemorySpace(); 1260 return ITraits.hasTrait(MemSpace, 1261 RegionAndSymbolInvalidationTraits::TK_EntireMemSpace); 1262 } 1263 1264 RegionBindingsRef 1265 RegionStoreManager::invalidateGlobalRegion(MemRegion::Kind K, 1266 const Expr *Ex, 1267 unsigned Count, 1268 const LocationContext *LCtx, 1269 RegionBindingsRef B, 1270 InvalidatedRegions *Invalidated) { 1271 // Bind the globals memory space to a new symbol that we will use to derive 1272 // the bindings for all globals. 1273 const GlobalsSpaceRegion *GS = MRMgr.getGlobalsRegion(K); 1274 SVal V = svalBuilder.conjureSymbolVal(/* symbolTag = */ (const void*) GS, Ex, LCtx, 1275 /* type does not matter */ Ctx.IntTy, 1276 Count); 1277 1278 B = B.removeBinding(GS) 1279 .addBinding(BindingKey::Make(GS, BindingKey::Default), V); 1280 1281 // Even if there are no bindings in the global scope, we still need to 1282 // record that we touched it. 1283 if (Invalidated) 1284 Invalidated->push_back(GS); 1285 1286 return B; 1287 } 1288 1289 void RegionStoreManager::populateWorkList(InvalidateRegionsWorker &W, 1290 ArrayRef<SVal> Values, 1291 InvalidatedRegions *TopLevelRegions) { 1292 for (ArrayRef<SVal>::iterator I = Values.begin(), 1293 E = Values.end(); I != E; ++I) { 1294 SVal V = *I; 1295 if (Optional<nonloc::LazyCompoundVal> LCS = 1296 V.getAs<nonloc::LazyCompoundVal>()) { 1297 1298 for (SVal S : getInterestingValues(*LCS)) 1299 if (const MemRegion *R = S.getAsRegion()) 1300 W.AddToWorkList(R); 1301 1302 continue; 1303 } 1304 1305 if (const MemRegion *R = V.getAsRegion()) { 1306 if (TopLevelRegions) 1307 TopLevelRegions->push_back(R); 1308 W.AddToWorkList(R); 1309 continue; 1310 } 1311 } 1312 } 1313 1314 StoreRef 1315 RegionStoreManager::invalidateRegions(Store store, 1316 ArrayRef<SVal> Values, 1317 const Expr *Ex, unsigned Count, 1318 const LocationContext *LCtx, 1319 const CallEvent *Call, 1320 InvalidatedSymbols &IS, 1321 RegionAndSymbolInvalidationTraits &ITraits, 1322 InvalidatedRegions *TopLevelRegions, 1323 InvalidatedRegions *Invalidated) { 1324 GlobalsFilterKind GlobalsFilter; 1325 if (Call) { 1326 if (Call->isInSystemHeader()) 1327 GlobalsFilter = GFK_SystemOnly; 1328 else 1329 GlobalsFilter = GFK_All; 1330 } else { 1331 GlobalsFilter = GFK_None; 1332 } 1333 1334 RegionBindingsRef B = getRegionBindings(store); 1335 InvalidateRegionsWorker W(*this, StateMgr, B, Ex, Count, LCtx, IS, ITraits, 1336 Invalidated, GlobalsFilter); 1337 1338 // Scan the bindings and generate the clusters. 1339 W.GenerateClusters(); 1340 1341 // Add the regions to the worklist. 1342 populateWorkList(W, Values, TopLevelRegions); 1343 1344 W.RunWorkList(); 1345 1346 // Return the new bindings. 1347 B = W.getRegionBindings(); 1348 1349 // For calls, determine which global regions should be invalidated and 1350 // invalidate them. (Note that function-static and immutable globals are never 1351 // invalidated by this.) 1352 // TODO: This could possibly be more precise with modules. 1353 switch (GlobalsFilter) { 1354 case GFK_All: 1355 B = invalidateGlobalRegion(MemRegion::GlobalInternalSpaceRegionKind, 1356 Ex, Count, LCtx, B, Invalidated); 1357 [[fallthrough]]; 1358 case GFK_SystemOnly: 1359 B = invalidateGlobalRegion(MemRegion::GlobalSystemSpaceRegionKind, 1360 Ex, Count, LCtx, B, Invalidated); 1361 [[fallthrough]]; 1362 case GFK_None: 1363 break; 1364 } 1365 1366 return StoreRef(B.asStore(), *this); 1367 } 1368 1369 //===----------------------------------------------------------------------===// 1370 // Location and region casting. 1371 //===----------------------------------------------------------------------===// 1372 1373 /// ArrayToPointer - Emulates the "decay" of an array to a pointer 1374 /// type. 'Array' represents the lvalue of the array being decayed 1375 /// to a pointer, and the returned SVal represents the decayed 1376 /// version of that lvalue (i.e., a pointer to the first element of 1377 /// the array). This is called by ExprEngine when evaluating casts 1378 /// from arrays to pointers. 1379 SVal RegionStoreManager::ArrayToPointer(Loc Array, QualType T) { 1380 if (isa<loc::ConcreteInt>(Array)) 1381 return Array; 1382 1383 if (!isa<loc::MemRegionVal>(Array)) 1384 return UnknownVal(); 1385 1386 const SubRegion *R = 1387 cast<SubRegion>(Array.castAs<loc::MemRegionVal>().getRegion()); 1388 NonLoc ZeroIdx = svalBuilder.makeZeroArrayIndex(); 1389 return loc::MemRegionVal(MRMgr.getElementRegion(T, ZeroIdx, R, Ctx)); 1390 } 1391 1392 //===----------------------------------------------------------------------===// 1393 // Loading values from regions. 1394 //===----------------------------------------------------------------------===// 1395 1396 SVal RegionStoreManager::getBinding(RegionBindingsConstRef B, Loc L, QualType T) { 1397 assert(!isa<UnknownVal>(L) && "location unknown"); 1398 assert(!isa<UndefinedVal>(L) && "location undefined"); 1399 1400 // For access to concrete addresses, return UnknownVal. Checks 1401 // for null dereferences (and similar errors) are done by checkers, not 1402 // the Store. 1403 // FIXME: We can consider lazily symbolicating such memory, but we really 1404 // should defer this when we can reason easily about symbolicating arrays 1405 // of bytes. 1406 if (L.getAs<loc::ConcreteInt>()) { 1407 return UnknownVal(); 1408 } 1409 if (!L.getAs<loc::MemRegionVal>()) { 1410 return UnknownVal(); 1411 } 1412 1413 const MemRegion *MR = L.castAs<loc::MemRegionVal>().getRegion(); 1414 1415 if (isa<BlockDataRegion>(MR)) { 1416 return UnknownVal(); 1417 } 1418 1419 // Auto-detect the binding type. 1420 if (T.isNull()) { 1421 if (const auto *TVR = dyn_cast<TypedValueRegion>(MR)) 1422 T = TVR->getValueType(); 1423 else if (const auto *TR = dyn_cast<TypedRegion>(MR)) 1424 T = TR->getLocationType()->getPointeeType(); 1425 else if (const auto *SR = dyn_cast<SymbolicRegion>(MR)) 1426 T = SR->getPointeeStaticType(); 1427 } 1428 assert(!T.isNull() && "Unable to auto-detect binding type!"); 1429 assert(!T->isVoidType() && "Attempting to dereference a void pointer!"); 1430 1431 if (!isa<TypedValueRegion>(MR)) 1432 MR = GetElementZeroRegion(cast<SubRegion>(MR), T); 1433 1434 // FIXME: Perhaps this method should just take a 'const MemRegion*' argument 1435 // instead of 'Loc', and have the other Loc cases handled at a higher level. 1436 const TypedValueRegion *R = cast<TypedValueRegion>(MR); 1437 QualType RTy = R->getValueType(); 1438 1439 // FIXME: we do not yet model the parts of a complex type, so treat the 1440 // whole thing as "unknown". 1441 if (RTy->isAnyComplexType()) 1442 return UnknownVal(); 1443 1444 // FIXME: We should eventually handle funny addressing. e.g.: 1445 // 1446 // int x = ...; 1447 // int *p = &x; 1448 // char *q = (char*) p; 1449 // char c = *q; // returns the first byte of 'x'. 1450 // 1451 // Such funny addressing will occur due to layering of regions. 1452 if (RTy->isStructureOrClassType()) 1453 return getBindingForStruct(B, R); 1454 1455 // FIXME: Handle unions. 1456 if (RTy->isUnionType()) 1457 return createLazyBinding(B, R); 1458 1459 if (RTy->isArrayType()) { 1460 if (RTy->isConstantArrayType()) 1461 return getBindingForArray(B, R); 1462 else 1463 return UnknownVal(); 1464 } 1465 1466 // FIXME: handle Vector types. 1467 if (RTy->isVectorType()) 1468 return UnknownVal(); 1469 1470 if (const FieldRegion* FR = dyn_cast<FieldRegion>(R)) 1471 return svalBuilder.evalCast(getBindingForField(B, FR), T, QualType{}); 1472 1473 if (const ElementRegion* ER = dyn_cast<ElementRegion>(R)) { 1474 // FIXME: Here we actually perform an implicit conversion from the loaded 1475 // value to the element type. Eventually we want to compose these values 1476 // more intelligently. For example, an 'element' can encompass multiple 1477 // bound regions (e.g., several bound bytes), or could be a subset of 1478 // a larger value. 1479 return svalBuilder.evalCast(getBindingForElement(B, ER), T, QualType{}); 1480 } 1481 1482 if (const ObjCIvarRegion *IVR = dyn_cast<ObjCIvarRegion>(R)) { 1483 // FIXME: Here we actually perform an implicit conversion from the loaded 1484 // value to the ivar type. What we should model is stores to ivars 1485 // that blow past the extent of the ivar. If the address of the ivar is 1486 // reinterpretted, it is possible we stored a different value that could 1487 // fit within the ivar. Either we need to cast these when storing them 1488 // or reinterpret them lazily (as we do here). 1489 return svalBuilder.evalCast(getBindingForObjCIvar(B, IVR), T, QualType{}); 1490 } 1491 1492 if (const VarRegion *VR = dyn_cast<VarRegion>(R)) { 1493 // FIXME: Here we actually perform an implicit conversion from the loaded 1494 // value to the variable type. What we should model is stores to variables 1495 // that blow past the extent of the variable. If the address of the 1496 // variable is reinterpretted, it is possible we stored a different value 1497 // that could fit within the variable. Either we need to cast these when 1498 // storing them or reinterpret them lazily (as we do here). 1499 return svalBuilder.evalCast(getBindingForVar(B, VR), T, QualType{}); 1500 } 1501 1502 const SVal *V = B.lookup(R, BindingKey::Direct); 1503 1504 // Check if the region has a binding. 1505 if (V) 1506 return *V; 1507 1508 // The location does not have a bound value. This means that it has 1509 // the value it had upon its creation and/or entry to the analyzed 1510 // function/method. These are either symbolic values or 'undefined'. 1511 if (R->hasStackNonParametersStorage()) { 1512 // All stack variables are considered to have undefined values 1513 // upon creation. All heap allocated blocks are considered to 1514 // have undefined values as well unless they are explicitly bound 1515 // to specific values. 1516 return UndefinedVal(); 1517 } 1518 1519 // All other values are symbolic. 1520 return svalBuilder.getRegionValueSymbolVal(R); 1521 } 1522 1523 static QualType getUnderlyingType(const SubRegion *R) { 1524 QualType RegionTy; 1525 if (const TypedValueRegion *TVR = dyn_cast<TypedValueRegion>(R)) 1526 RegionTy = TVR->getValueType(); 1527 1528 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(R)) 1529 RegionTy = SR->getSymbol()->getType(); 1530 1531 return RegionTy; 1532 } 1533 1534 /// Checks to see if store \p B has a lazy binding for region \p R. 1535 /// 1536 /// If \p AllowSubregionBindings is \c false, a lazy binding will be rejected 1537 /// if there are additional bindings within \p R. 1538 /// 1539 /// Note that unlike RegionStoreManager::findLazyBinding, this will not search 1540 /// for lazy bindings for super-regions of \p R. 1541 static Optional<nonloc::LazyCompoundVal> 1542 getExistingLazyBinding(SValBuilder &SVB, RegionBindingsConstRef B, 1543 const SubRegion *R, bool AllowSubregionBindings) { 1544 Optional<SVal> V = B.getDefaultBinding(R); 1545 if (!V) 1546 return std::nullopt; 1547 1548 Optional<nonloc::LazyCompoundVal> LCV = V->getAs<nonloc::LazyCompoundVal>(); 1549 if (!LCV) 1550 return std::nullopt; 1551 1552 // If the LCV is for a subregion, the types might not match, and we shouldn't 1553 // reuse the binding. 1554 QualType RegionTy = getUnderlyingType(R); 1555 if (!RegionTy.isNull() && 1556 !RegionTy->isVoidPointerType()) { 1557 QualType SourceRegionTy = LCV->getRegion()->getValueType(); 1558 if (!SVB.getContext().hasSameUnqualifiedType(RegionTy, SourceRegionTy)) 1559 return std::nullopt; 1560 } 1561 1562 if (!AllowSubregionBindings) { 1563 // If there are any other bindings within this region, we shouldn't reuse 1564 // the top-level binding. 1565 SmallVector<BindingPair, 16> Bindings; 1566 collectSubRegionBindings(Bindings, SVB, *B.lookup(R->getBaseRegion()), R, 1567 /*IncludeAllDefaultBindings=*/true); 1568 if (Bindings.size() > 1) 1569 return std::nullopt; 1570 } 1571 1572 return *LCV; 1573 } 1574 1575 1576 std::pair<Store, const SubRegion *> 1577 RegionStoreManager::findLazyBinding(RegionBindingsConstRef B, 1578 const SubRegion *R, 1579 const SubRegion *originalRegion) { 1580 if (originalRegion != R) { 1581 if (Optional<nonloc::LazyCompoundVal> V = 1582 getExistingLazyBinding(svalBuilder, B, R, true)) 1583 return std::make_pair(V->getStore(), V->getRegion()); 1584 } 1585 1586 typedef std::pair<Store, const SubRegion *> StoreRegionPair; 1587 StoreRegionPair Result = StoreRegionPair(); 1588 1589 if (const ElementRegion *ER = dyn_cast<ElementRegion>(R)) { 1590 Result = findLazyBinding(B, cast<SubRegion>(ER->getSuperRegion()), 1591 originalRegion); 1592 1593 if (Result.second) 1594 Result.second = MRMgr.getElementRegionWithSuper(ER, Result.second); 1595 1596 } else if (const FieldRegion *FR = dyn_cast<FieldRegion>(R)) { 1597 Result = findLazyBinding(B, cast<SubRegion>(FR->getSuperRegion()), 1598 originalRegion); 1599 1600 if (Result.second) 1601 Result.second = MRMgr.getFieldRegionWithSuper(FR, Result.second); 1602 1603 } else if (const CXXBaseObjectRegion *BaseReg = 1604 dyn_cast<CXXBaseObjectRegion>(R)) { 1605 // C++ base object region is another kind of region that we should blast 1606 // through to look for lazy compound value. It is like a field region. 1607 Result = findLazyBinding(B, cast<SubRegion>(BaseReg->getSuperRegion()), 1608 originalRegion); 1609 1610 if (Result.second) 1611 Result.second = MRMgr.getCXXBaseObjectRegionWithSuper(BaseReg, 1612 Result.second); 1613 } 1614 1615 return Result; 1616 } 1617 1618 /// This is a helper function for `getConstantValFromConstArrayInitializer`. 1619 /// 1620 /// Return an array of extents of the declared array type. 1621 /// 1622 /// E.g. for `int x[1][2][3];` returns { 1, 2, 3 }. 1623 static SmallVector<uint64_t, 2> 1624 getConstantArrayExtents(const ConstantArrayType *CAT) { 1625 assert(CAT && "ConstantArrayType should not be null"); 1626 CAT = cast<ConstantArrayType>(CAT->getCanonicalTypeInternal()); 1627 SmallVector<uint64_t, 2> Extents; 1628 do { 1629 Extents.push_back(CAT->getSize().getZExtValue()); 1630 } while ((CAT = dyn_cast<ConstantArrayType>(CAT->getElementType()))); 1631 return Extents; 1632 } 1633 1634 /// This is a helper function for `getConstantValFromConstArrayInitializer`. 1635 /// 1636 /// Return an array of offsets from nested ElementRegions and a root base 1637 /// region. The array is never empty and a base region is never null. 1638 /// 1639 /// E.g. for `Element{Element{Element{VarRegion},1},2},3}` returns { 3, 2, 1 }. 1640 /// This represents an access through indirection: `arr[1][2][3];` 1641 /// 1642 /// \param ER The given (possibly nested) ElementRegion. 1643 /// 1644 /// \note The result array is in the reverse order of indirection expression: 1645 /// arr[1][2][3] -> { 3, 2, 1 }. This helps to provide complexity O(n), where n 1646 /// is a number of indirections. It may not affect performance in real-life 1647 /// code, though. 1648 static std::pair<SmallVector<SVal, 2>, const MemRegion *> 1649 getElementRegionOffsetsWithBase(const ElementRegion *ER) { 1650 assert(ER && "ConstantArrayType should not be null"); 1651 const MemRegion *Base; 1652 SmallVector<SVal, 2> SValOffsets; 1653 do { 1654 SValOffsets.push_back(ER->getIndex()); 1655 Base = ER->getSuperRegion(); 1656 ER = dyn_cast<ElementRegion>(Base); 1657 } while (ER); 1658 return {SValOffsets, Base}; 1659 } 1660 1661 /// This is a helper function for `getConstantValFromConstArrayInitializer`. 1662 /// 1663 /// Convert array of offsets from `SVal` to `uint64_t` in consideration of 1664 /// respective array extents. 1665 /// \param SrcOffsets [in] The array of offsets of type `SVal` in reversed 1666 /// order (expectedly received from `getElementRegionOffsetsWithBase`). 1667 /// \param ArrayExtents [in] The array of extents. 1668 /// \param DstOffsets [out] The array of offsets of type `uint64_t`. 1669 /// \returns: 1670 /// - `std::nullopt` for successful convertion. 1671 /// - `UndefinedVal` or `UnknownVal` otherwise. It's expected that this SVal 1672 /// will be returned as a suitable value of the access operation. 1673 /// which should be returned as a correct 1674 /// 1675 /// \example: 1676 /// const int arr[10][20][30] = {}; // ArrayExtents { 10, 20, 30 } 1677 /// int x1 = arr[4][5][6]; // SrcOffsets { NonLoc(6), NonLoc(5), NonLoc(4) } 1678 /// // DstOffsets { 4, 5, 6 } 1679 /// // returns std::nullopt 1680 /// int x2 = arr[42][5][-6]; // returns UndefinedVal 1681 /// int x3 = arr[4][5][x2]; // returns UnknownVal 1682 static Optional<SVal> 1683 convertOffsetsFromSvalToUnsigneds(const SmallVector<SVal, 2> &SrcOffsets, 1684 const SmallVector<uint64_t, 2> ArrayExtents, 1685 SmallVector<uint64_t, 2> &DstOffsets) { 1686 // Check offsets for being out of bounds. 1687 // C++20 [expr.add] 7.6.6.4 (excerpt): 1688 // If P points to an array element i of an array object x with n 1689 // elements, where i < 0 or i > n, the behavior is undefined. 1690 // Dereferencing is not allowed on the "one past the last 1691 // element", when i == n. 1692 // Example: 1693 // const int arr[3][2] = {{1, 2}, {3, 4}}; 1694 // arr[0][0]; // 1 1695 // arr[0][1]; // 2 1696 // arr[0][2]; // UB 1697 // arr[1][0]; // 3 1698 // arr[1][1]; // 4 1699 // arr[1][-1]; // UB 1700 // arr[2][0]; // 0 1701 // arr[2][1]; // 0 1702 // arr[-2][0]; // UB 1703 DstOffsets.resize(SrcOffsets.size()); 1704 auto ExtentIt = ArrayExtents.begin(); 1705 auto OffsetIt = DstOffsets.begin(); 1706 // Reverse `SValOffsets` to make it consistent with `ArrayExtents`. 1707 for (SVal V : llvm::reverse(SrcOffsets)) { 1708 if (auto CI = V.getAs<nonloc::ConcreteInt>()) { 1709 // When offset is out of array's bounds, result is UB. 1710 const llvm::APSInt &Offset = CI->getValue(); 1711 if (Offset.isNegative() || Offset.uge(*(ExtentIt++))) 1712 return UndefinedVal(); 1713 // Store index in a reversive order. 1714 *(OffsetIt++) = Offset.getZExtValue(); 1715 continue; 1716 } 1717 // Symbolic index presented. Return Unknown value. 1718 // FIXME: We also need to take ElementRegions with symbolic indexes into 1719 // account. 1720 return UnknownVal(); 1721 } 1722 return std::nullopt; 1723 } 1724 1725 Optional<SVal> RegionStoreManager::getConstantValFromConstArrayInitializer( 1726 RegionBindingsConstRef B, const ElementRegion *R) { 1727 assert(R && "ElementRegion should not be null"); 1728 1729 // Treat an n-dimensional array. 1730 SmallVector<SVal, 2> SValOffsets; 1731 const MemRegion *Base; 1732 std::tie(SValOffsets, Base) = getElementRegionOffsetsWithBase(R); 1733 const VarRegion *VR = dyn_cast<VarRegion>(Base); 1734 if (!VR) 1735 return std::nullopt; 1736 1737 assert(!SValOffsets.empty() && "getElementRegionOffsets guarantees the " 1738 "offsets vector is not empty."); 1739 1740 // Check if the containing array has an initialized value that we can trust. 1741 // We can trust a const value or a value of a global initializer in main(). 1742 const VarDecl *VD = VR->getDecl(); 1743 if (!VD->getType().isConstQualified() && 1744 !R->getElementType().isConstQualified() && 1745 (!B.isMainAnalysis() || !VD->hasGlobalStorage())) 1746 return std::nullopt; 1747 1748 // Array's declaration should have `ConstantArrayType` type, because only this 1749 // type contains an array extent. It may happen that array type can be of 1750 // `IncompleteArrayType` type. To get the declaration of `ConstantArrayType` 1751 // type, we should find the declaration in the redeclarations chain that has 1752 // the initialization expression. 1753 // NOTE: `getAnyInitializer` has an out-parameter, which returns a new `VD` 1754 // from which an initializer is obtained. We replace current `VD` with the new 1755 // `VD`. If the return value of the function is null than `VD` won't be 1756 // replaced. 1757 const Expr *Init = VD->getAnyInitializer(VD); 1758 // NOTE: If `Init` is non-null, then a new `VD` is non-null for sure. So check 1759 // `Init` for null only and don't worry about the replaced `VD`. 1760 if (!Init) 1761 return std::nullopt; 1762 1763 // Array's declaration should have ConstantArrayType type, because only this 1764 // type contains an array extent. 1765 const ConstantArrayType *CAT = Ctx.getAsConstantArrayType(VD->getType()); 1766 if (!CAT) 1767 return std::nullopt; 1768 1769 // Get array extents. 1770 SmallVector<uint64_t, 2> Extents = getConstantArrayExtents(CAT); 1771 1772 // The number of offsets should equal to the numbers of extents, 1773 // otherwise wrong type punning occurred. For instance: 1774 // int arr[1][2][3]; 1775 // auto ptr = (int(*)[42])arr; 1776 // auto x = ptr[4][2]; // UB 1777 // FIXME: Should return UndefinedVal. 1778 if (SValOffsets.size() != Extents.size()) 1779 return std::nullopt; 1780 1781 SmallVector<uint64_t, 2> ConcreteOffsets; 1782 if (Optional<SVal> V = convertOffsetsFromSvalToUnsigneds(SValOffsets, Extents, 1783 ConcreteOffsets)) 1784 return *V; 1785 1786 // Handle InitListExpr. 1787 // Example: 1788 // const char arr[4][2] = { { 1, 2 }, { 3 }, 4, 5 }; 1789 if (const auto *ILE = dyn_cast<InitListExpr>(Init)) 1790 return getSValFromInitListExpr(ILE, ConcreteOffsets, R->getElementType()); 1791 1792 // Handle StringLiteral. 1793 // Example: 1794 // const char arr[] = "abc"; 1795 if (const auto *SL = dyn_cast<StringLiteral>(Init)) 1796 return getSValFromStringLiteral(SL, ConcreteOffsets.front(), 1797 R->getElementType()); 1798 1799 // FIXME: Handle CompoundLiteralExpr. 1800 1801 return std::nullopt; 1802 } 1803 1804 /// Returns an SVal, if possible, for the specified position of an 1805 /// initialization list. 1806 /// 1807 /// \param ILE The given initialization list. 1808 /// \param Offsets The array of unsigned offsets. E.g. for the expression 1809 /// `int x = arr[1][2][3];` an array should be { 1, 2, 3 }. 1810 /// \param ElemT The type of the result SVal expression. 1811 /// \return Optional SVal for the particular position in the initialization 1812 /// list. E.g. for the list `{{1, 2},[3, 4],{5, 6}, {}}` offsets: 1813 /// - {1, 1} returns SVal{4}, because it's the second position in the second 1814 /// sublist; 1815 /// - {3, 0} returns SVal{0}, because there's no explicit value at this 1816 /// position in the sublist. 1817 /// 1818 /// NOTE: Inorder to get a valid SVal, a caller shall guarantee valid offsets 1819 /// for the given initialization list. Otherwise SVal can be an equivalent to 0 1820 /// or lead to assertion. 1821 Optional<SVal> RegionStoreManager::getSValFromInitListExpr( 1822 const InitListExpr *ILE, const SmallVector<uint64_t, 2> &Offsets, 1823 QualType ElemT) { 1824 assert(ILE && "InitListExpr should not be null"); 1825 1826 for (uint64_t Offset : Offsets) { 1827 // C++20 [dcl.init.string] 9.4.2.1: 1828 // An array of ordinary character type [...] can be initialized by [...] 1829 // an appropriately-typed string-literal enclosed in braces. 1830 // Example: 1831 // const char arr[] = { "abc" }; 1832 if (ILE->isStringLiteralInit()) 1833 if (const auto *SL = dyn_cast<StringLiteral>(ILE->getInit(0))) 1834 return getSValFromStringLiteral(SL, Offset, ElemT); 1835 1836 // C++20 [expr.add] 9.4.17.5 (excerpt): 1837 // i-th array element is value-initialized for each k < i ≤ n, 1838 // where k is an expression-list size and n is an array extent. 1839 if (Offset >= ILE->getNumInits()) 1840 return svalBuilder.makeZeroVal(ElemT); 1841 1842 const Expr *E = ILE->getInit(Offset); 1843 const auto *IL = dyn_cast<InitListExpr>(E); 1844 if (!IL) 1845 // Return a constant value, if it is presented. 1846 // FIXME: Support other SVals. 1847 return svalBuilder.getConstantVal(E); 1848 1849 // Go to the nested initializer list. 1850 ILE = IL; 1851 } 1852 llvm_unreachable( 1853 "Unhandled InitListExpr sub-expressions or invalid offsets."); 1854 } 1855 1856 /// Returns an SVal, if possible, for the specified position in a string 1857 /// literal. 1858 /// 1859 /// \param SL The given string literal. 1860 /// \param Offset The unsigned offset. E.g. for the expression 1861 /// `char x = str[42];` an offset should be 42. 1862 /// E.g. for the string "abc" offset: 1863 /// - 1 returns SVal{b}, because it's the second position in the string. 1864 /// - 42 returns SVal{0}, because there's no explicit value at this 1865 /// position in the string. 1866 /// \param ElemT The type of the result SVal expression. 1867 /// 1868 /// NOTE: We return `0` for every offset >= the literal length for array 1869 /// declarations, like: 1870 /// const char str[42] = "123"; // Literal length is 4. 1871 /// char c = str[41]; // Offset is 41. 1872 /// FIXME: Nevertheless, we can't do the same for pointer declaraions, like: 1873 /// const char * const str = "123"; // Literal length is 4. 1874 /// char c = str[41]; // Offset is 41. Returns `0`, but Undef 1875 /// // expected. 1876 /// It should be properly handled before reaching this point. 1877 /// The main problem is that we can't distinguish between these declarations, 1878 /// because in case of array we can get the Decl from VarRegion, but in case 1879 /// of pointer the region is a StringRegion, which doesn't contain a Decl. 1880 /// Possible solution could be passing an array extent along with the offset. 1881 SVal RegionStoreManager::getSValFromStringLiteral(const StringLiteral *SL, 1882 uint64_t Offset, 1883 QualType ElemT) { 1884 assert(SL && "StringLiteral should not be null"); 1885 // C++20 [dcl.init.string] 9.4.2.3: 1886 // If there are fewer initializers than there are array elements, each 1887 // element not explicitly initialized shall be zero-initialized [dcl.init]. 1888 uint32_t Code = (Offset >= SL->getLength()) ? 0 : SL->getCodeUnit(Offset); 1889 return svalBuilder.makeIntVal(Code, ElemT); 1890 } 1891 1892 static Optional<SVal> getDerivedSymbolForBinding( 1893 RegionBindingsConstRef B, const TypedValueRegion *BaseRegion, 1894 const TypedValueRegion *SubReg, const ASTContext &Ctx, SValBuilder &SVB) { 1895 assert(BaseRegion); 1896 QualType BaseTy = BaseRegion->getValueType(); 1897 QualType Ty = SubReg->getValueType(); 1898 if (BaseTy->isScalarType() && Ty->isScalarType()) { 1899 if (Ctx.getTypeSizeInChars(BaseTy) >= Ctx.getTypeSizeInChars(Ty)) { 1900 if (const Optional<SVal> &ParentValue = B.getDirectBinding(BaseRegion)) { 1901 if (SymbolRef ParentValueAsSym = ParentValue->getAsSymbol()) 1902 return SVB.getDerivedRegionValueSymbolVal(ParentValueAsSym, SubReg); 1903 1904 if (ParentValue->isUndef()) 1905 return UndefinedVal(); 1906 1907 // Other cases: give up. We are indexing into a larger object 1908 // that has some value, but we don't know how to handle that yet. 1909 return UnknownVal(); 1910 } 1911 } 1912 } 1913 return std::nullopt; 1914 } 1915 1916 SVal RegionStoreManager::getBindingForElement(RegionBindingsConstRef B, 1917 const ElementRegion* R) { 1918 // Check if the region has a binding. 1919 if (const Optional<SVal> &V = B.getDirectBinding(R)) 1920 return *V; 1921 1922 const MemRegion* superR = R->getSuperRegion(); 1923 1924 // Check if the region is an element region of a string literal. 1925 if (const StringRegion *StrR = dyn_cast<StringRegion>(superR)) { 1926 // FIXME: Handle loads from strings where the literal is treated as 1927 // an integer, e.g., *((unsigned int*)"hello"). Such loads are UB according 1928 // to C++20 7.2.1.11 [basic.lval]. 1929 QualType T = Ctx.getAsArrayType(StrR->getValueType())->getElementType(); 1930 if (!Ctx.hasSameUnqualifiedType(T, R->getElementType())) 1931 return UnknownVal(); 1932 if (const auto CI = R->getIndex().getAs<nonloc::ConcreteInt>()) { 1933 const llvm::APSInt &Idx = CI->getValue(); 1934 if (Idx < 0) 1935 return UndefinedVal(); 1936 const StringLiteral *SL = StrR->getStringLiteral(); 1937 return getSValFromStringLiteral(SL, Idx.getZExtValue(), T); 1938 } 1939 } else if (isa<ElementRegion, VarRegion>(superR)) { 1940 if (Optional<SVal> V = getConstantValFromConstArrayInitializer(B, R)) 1941 return *V; 1942 } 1943 1944 // Check for loads from a code text region. For such loads, just give up. 1945 if (isa<CodeTextRegion>(superR)) 1946 return UnknownVal(); 1947 1948 // Handle the case where we are indexing into a larger scalar object. 1949 // For example, this handles: 1950 // int x = ... 1951 // char *y = &x; 1952 // return *y; 1953 // FIXME: This is a hack, and doesn't do anything really intelligent yet. 1954 const RegionRawOffset &O = R->getAsArrayOffset(); 1955 1956 // If we cannot reason about the offset, return an unknown value. 1957 if (!O.getRegion()) 1958 return UnknownVal(); 1959 1960 if (const TypedValueRegion *baseR = dyn_cast<TypedValueRegion>(O.getRegion())) 1961 if (auto V = getDerivedSymbolForBinding(B, baseR, R, Ctx, svalBuilder)) 1962 return *V; 1963 1964 return getBindingForFieldOrElementCommon(B, R, R->getElementType()); 1965 } 1966 1967 SVal RegionStoreManager::getBindingForField(RegionBindingsConstRef B, 1968 const FieldRegion* R) { 1969 1970 // Check if the region has a binding. 1971 if (const Optional<SVal> &V = B.getDirectBinding(R)) 1972 return *V; 1973 1974 // If the containing record was initialized, try to get its constant value. 1975 const FieldDecl *FD = R->getDecl(); 1976 QualType Ty = FD->getType(); 1977 const MemRegion* superR = R->getSuperRegion(); 1978 if (const auto *VR = dyn_cast<VarRegion>(superR)) { 1979 const VarDecl *VD = VR->getDecl(); 1980 QualType RecordVarTy = VD->getType(); 1981 unsigned Index = FD->getFieldIndex(); 1982 // Either the record variable or the field has an initializer that we can 1983 // trust. We trust initializers of constants and, additionally, respect 1984 // initializers of globals when analyzing main(). 1985 if (RecordVarTy.isConstQualified() || Ty.isConstQualified() || 1986 (B.isMainAnalysis() && VD->hasGlobalStorage())) 1987 if (const Expr *Init = VD->getAnyInitializer()) 1988 if (const auto *InitList = dyn_cast<InitListExpr>(Init)) { 1989 if (Index < InitList->getNumInits()) { 1990 if (const Expr *FieldInit = InitList->getInit(Index)) 1991 if (Optional<SVal> V = svalBuilder.getConstantVal(FieldInit)) 1992 return *V; 1993 } else { 1994 return svalBuilder.makeZeroVal(Ty); 1995 } 1996 } 1997 } 1998 1999 // Handle the case where we are accessing into a larger scalar object. 2000 // For example, this handles: 2001 // struct header { 2002 // unsigned a : 1; 2003 // unsigned b : 1; 2004 // }; 2005 // struct parse_t { 2006 // unsigned bits0 : 1; 2007 // unsigned bits2 : 2; // <-- header 2008 // unsigned bits4 : 4; 2009 // }; 2010 // int parse(parse_t *p) { 2011 // unsigned copy = p->bits2; 2012 // header *bits = (header *)© 2013 // return bits->b; <-- here 2014 // } 2015 if (const auto *Base = dyn_cast<TypedValueRegion>(R->getBaseRegion())) 2016 if (auto V = getDerivedSymbolForBinding(B, Base, R, Ctx, svalBuilder)) 2017 return *V; 2018 2019 return getBindingForFieldOrElementCommon(B, R, Ty); 2020 } 2021 2022 Optional<SVal> 2023 RegionStoreManager::getBindingForDerivedDefaultValue(RegionBindingsConstRef B, 2024 const MemRegion *superR, 2025 const TypedValueRegion *R, 2026 QualType Ty) { 2027 2028 if (const Optional<SVal> &D = B.getDefaultBinding(superR)) { 2029 const SVal &val = *D; 2030 if (SymbolRef parentSym = val.getAsSymbol()) 2031 return svalBuilder.getDerivedRegionValueSymbolVal(parentSym, R); 2032 2033 if (val.isZeroConstant()) 2034 return svalBuilder.makeZeroVal(Ty); 2035 2036 if (val.isUnknownOrUndef()) 2037 return val; 2038 2039 // Lazy bindings are usually handled through getExistingLazyBinding(). 2040 // We should unify these two code paths at some point. 2041 if (isa<nonloc::LazyCompoundVal, nonloc::CompoundVal>(val)) 2042 return val; 2043 2044 llvm_unreachable("Unknown default value"); 2045 } 2046 2047 return std::nullopt; 2048 } 2049 2050 SVal RegionStoreManager::getLazyBinding(const SubRegion *LazyBindingRegion, 2051 RegionBindingsRef LazyBinding) { 2052 SVal Result; 2053 if (const ElementRegion *ER = dyn_cast<ElementRegion>(LazyBindingRegion)) 2054 Result = getBindingForElement(LazyBinding, ER); 2055 else 2056 Result = getBindingForField(LazyBinding, 2057 cast<FieldRegion>(LazyBindingRegion)); 2058 2059 // FIXME: This is a hack to deal with RegionStore's inability to distinguish a 2060 // default value for /part/ of an aggregate from a default value for the 2061 // /entire/ aggregate. The most common case of this is when struct Outer 2062 // has as its first member a struct Inner, which is copied in from a stack 2063 // variable. In this case, even if the Outer's default value is symbolic, 0, 2064 // or unknown, it gets overridden by the Inner's default value of undefined. 2065 // 2066 // This is a general problem -- if the Inner is zero-initialized, the Outer 2067 // will now look zero-initialized. The proper way to solve this is with a 2068 // new version of RegionStore that tracks the extent of a binding as well 2069 // as the offset. 2070 // 2071 // This hack only takes care of the undefined case because that can very 2072 // quickly result in a warning. 2073 if (Result.isUndef()) 2074 Result = UnknownVal(); 2075 2076 return Result; 2077 } 2078 2079 SVal 2080 RegionStoreManager::getBindingForFieldOrElementCommon(RegionBindingsConstRef B, 2081 const TypedValueRegion *R, 2082 QualType Ty) { 2083 2084 // At this point we have already checked in either getBindingForElement or 2085 // getBindingForField if 'R' has a direct binding. 2086 2087 // Lazy binding? 2088 Store lazyBindingStore = nullptr; 2089 const SubRegion *lazyBindingRegion = nullptr; 2090 std::tie(lazyBindingStore, lazyBindingRegion) = findLazyBinding(B, R, R); 2091 if (lazyBindingRegion) 2092 return getLazyBinding(lazyBindingRegion, 2093 getRegionBindings(lazyBindingStore)); 2094 2095 // Record whether or not we see a symbolic index. That can completely 2096 // be out of scope of our lookup. 2097 bool hasSymbolicIndex = false; 2098 2099 // FIXME: This is a hack to deal with RegionStore's inability to distinguish a 2100 // default value for /part/ of an aggregate from a default value for the 2101 // /entire/ aggregate. The most common case of this is when struct Outer 2102 // has as its first member a struct Inner, which is copied in from a stack 2103 // variable. In this case, even if the Outer's default value is symbolic, 0, 2104 // or unknown, it gets overridden by the Inner's default value of undefined. 2105 // 2106 // This is a general problem -- if the Inner is zero-initialized, the Outer 2107 // will now look zero-initialized. The proper way to solve this is with a 2108 // new version of RegionStore that tracks the extent of a binding as well 2109 // as the offset. 2110 // 2111 // This hack only takes care of the undefined case because that can very 2112 // quickly result in a warning. 2113 bool hasPartialLazyBinding = false; 2114 2115 const SubRegion *SR = R; 2116 while (SR) { 2117 const MemRegion *Base = SR->getSuperRegion(); 2118 if (Optional<SVal> D = getBindingForDerivedDefaultValue(B, Base, R, Ty)) { 2119 if (D->getAs<nonloc::LazyCompoundVal>()) { 2120 hasPartialLazyBinding = true; 2121 break; 2122 } 2123 2124 return *D; 2125 } 2126 2127 if (const ElementRegion *ER = dyn_cast<ElementRegion>(Base)) { 2128 NonLoc index = ER->getIndex(); 2129 if (!index.isConstant()) 2130 hasSymbolicIndex = true; 2131 } 2132 2133 // If our super region is a field or element itself, walk up the region 2134 // hierarchy to see if there is a default value installed in an ancestor. 2135 SR = dyn_cast<SubRegion>(Base); 2136 } 2137 2138 if (R->hasStackNonParametersStorage()) { 2139 if (isa<ElementRegion>(R)) { 2140 // Currently we don't reason specially about Clang-style vectors. Check 2141 // if superR is a vector and if so return Unknown. 2142 if (const TypedValueRegion *typedSuperR = 2143 dyn_cast<TypedValueRegion>(R->getSuperRegion())) { 2144 if (typedSuperR->getValueType()->isVectorType()) 2145 return UnknownVal(); 2146 } 2147 } 2148 2149 // FIXME: We also need to take ElementRegions with symbolic indexes into 2150 // account. This case handles both directly accessing an ElementRegion 2151 // with a symbolic offset, but also fields within an element with 2152 // a symbolic offset. 2153 if (hasSymbolicIndex) 2154 return UnknownVal(); 2155 2156 // Additionally allow introspection of a block's internal layout. 2157 // Try to get direct binding if all other attempts failed thus far. 2158 // Else, return UndefinedVal() 2159 if (!hasPartialLazyBinding && !isa<BlockDataRegion>(R->getBaseRegion())) { 2160 if (const Optional<SVal> &V = B.getDefaultBinding(R)) 2161 return *V; 2162 return UndefinedVal(); 2163 } 2164 } 2165 2166 // All other values are symbolic. 2167 return svalBuilder.getRegionValueSymbolVal(R); 2168 } 2169 2170 SVal RegionStoreManager::getBindingForObjCIvar(RegionBindingsConstRef B, 2171 const ObjCIvarRegion* R) { 2172 // Check if the region has a binding. 2173 if (const Optional<SVal> &V = B.getDirectBinding(R)) 2174 return *V; 2175 2176 const MemRegion *superR = R->getSuperRegion(); 2177 2178 // Check if the super region has a default binding. 2179 if (const Optional<SVal> &V = B.getDefaultBinding(superR)) { 2180 if (SymbolRef parentSym = V->getAsSymbol()) 2181 return svalBuilder.getDerivedRegionValueSymbolVal(parentSym, R); 2182 2183 // Other cases: give up. 2184 return UnknownVal(); 2185 } 2186 2187 return getBindingForLazySymbol(R); 2188 } 2189 2190 SVal RegionStoreManager::getBindingForVar(RegionBindingsConstRef B, 2191 const VarRegion *R) { 2192 2193 // Check if the region has a binding. 2194 if (Optional<SVal> V = B.getDirectBinding(R)) 2195 return *V; 2196 2197 if (Optional<SVal> V = B.getDefaultBinding(R)) 2198 return *V; 2199 2200 // Lazily derive a value for the VarRegion. 2201 const VarDecl *VD = R->getDecl(); 2202 const MemSpaceRegion *MS = R->getMemorySpace(); 2203 2204 // Arguments are always symbolic. 2205 if (isa<StackArgumentsSpaceRegion>(MS)) 2206 return svalBuilder.getRegionValueSymbolVal(R); 2207 2208 // Is 'VD' declared constant? If so, retrieve the constant value. 2209 if (VD->getType().isConstQualified()) { 2210 if (const Expr *Init = VD->getAnyInitializer()) { 2211 if (Optional<SVal> V = svalBuilder.getConstantVal(Init)) 2212 return *V; 2213 2214 // If the variable is const qualified and has an initializer but 2215 // we couldn't evaluate initializer to a value, treat the value as 2216 // unknown. 2217 return UnknownVal(); 2218 } 2219 } 2220 2221 // This must come after the check for constants because closure-captured 2222 // constant variables may appear in UnknownSpaceRegion. 2223 if (isa<UnknownSpaceRegion>(MS)) 2224 return svalBuilder.getRegionValueSymbolVal(R); 2225 2226 if (isa<GlobalsSpaceRegion>(MS)) { 2227 QualType T = VD->getType(); 2228 2229 // If we're in main(), then global initializers have not become stale yet. 2230 if (B.isMainAnalysis()) 2231 if (const Expr *Init = VD->getAnyInitializer()) 2232 if (Optional<SVal> V = svalBuilder.getConstantVal(Init)) 2233 return *V; 2234 2235 // Function-scoped static variables are default-initialized to 0; if they 2236 // have an initializer, it would have been processed by now. 2237 // FIXME: This is only true when we're starting analysis from main(). 2238 // We're losing a lot of coverage here. 2239 if (isa<StaticGlobalSpaceRegion>(MS)) 2240 return svalBuilder.makeZeroVal(T); 2241 2242 if (Optional<SVal> V = getBindingForDerivedDefaultValue(B, MS, R, T)) { 2243 assert(!V->getAs<nonloc::LazyCompoundVal>()); 2244 return *V; 2245 } 2246 2247 return svalBuilder.getRegionValueSymbolVal(R); 2248 } 2249 2250 return UndefinedVal(); 2251 } 2252 2253 SVal RegionStoreManager::getBindingForLazySymbol(const TypedValueRegion *R) { 2254 // All other values are symbolic. 2255 return svalBuilder.getRegionValueSymbolVal(R); 2256 } 2257 2258 const RegionStoreManager::SValListTy & 2259 RegionStoreManager::getInterestingValues(nonloc::LazyCompoundVal LCV) { 2260 // First, check the cache. 2261 LazyBindingsMapTy::iterator I = LazyBindingsMap.find(LCV.getCVData()); 2262 if (I != LazyBindingsMap.end()) 2263 return I->second; 2264 2265 // If we don't have a list of values cached, start constructing it. 2266 SValListTy List; 2267 2268 const SubRegion *LazyR = LCV.getRegion(); 2269 RegionBindingsRef B = getRegionBindings(LCV.getStore()); 2270 2271 // If this region had /no/ bindings at the time, there are no interesting 2272 // values to return. 2273 const ClusterBindings *Cluster = B.lookup(LazyR->getBaseRegion()); 2274 if (!Cluster) 2275 return (LazyBindingsMap[LCV.getCVData()] = std::move(List)); 2276 2277 SmallVector<BindingPair, 32> Bindings; 2278 collectSubRegionBindings(Bindings, svalBuilder, *Cluster, LazyR, 2279 /*IncludeAllDefaultBindings=*/true); 2280 for (SmallVectorImpl<BindingPair>::const_iterator I = Bindings.begin(), 2281 E = Bindings.end(); 2282 I != E; ++I) { 2283 SVal V = I->second; 2284 if (V.isUnknownOrUndef() || V.isConstant()) 2285 continue; 2286 2287 if (auto InnerLCV = V.getAs<nonloc::LazyCompoundVal>()) { 2288 const SValListTy &InnerList = getInterestingValues(*InnerLCV); 2289 List.insert(List.end(), InnerList.begin(), InnerList.end()); 2290 } 2291 2292 List.push_back(V); 2293 } 2294 2295 return (LazyBindingsMap[LCV.getCVData()] = std::move(List)); 2296 } 2297 2298 NonLoc RegionStoreManager::createLazyBinding(RegionBindingsConstRef B, 2299 const TypedValueRegion *R) { 2300 if (Optional<nonloc::LazyCompoundVal> V = 2301 getExistingLazyBinding(svalBuilder, B, R, false)) 2302 return *V; 2303 2304 return svalBuilder.makeLazyCompoundVal(StoreRef(B.asStore(), *this), R); 2305 } 2306 2307 static bool isRecordEmpty(const RecordDecl *RD) { 2308 if (!RD->field_empty()) 2309 return false; 2310 if (const CXXRecordDecl *CRD = dyn_cast<CXXRecordDecl>(RD)) 2311 return CRD->getNumBases() == 0; 2312 return true; 2313 } 2314 2315 SVal RegionStoreManager::getBindingForStruct(RegionBindingsConstRef B, 2316 const TypedValueRegion *R) { 2317 const RecordDecl *RD = R->getValueType()->castAs<RecordType>()->getDecl(); 2318 if (!RD->getDefinition() || isRecordEmpty(RD)) 2319 return UnknownVal(); 2320 2321 return createLazyBinding(B, R); 2322 } 2323 2324 SVal RegionStoreManager::getBindingForArray(RegionBindingsConstRef B, 2325 const TypedValueRegion *R) { 2326 assert(Ctx.getAsConstantArrayType(R->getValueType()) && 2327 "Only constant array types can have compound bindings."); 2328 2329 return createLazyBinding(B, R); 2330 } 2331 2332 bool RegionStoreManager::includedInBindings(Store store, 2333 const MemRegion *region) const { 2334 RegionBindingsRef B = getRegionBindings(store); 2335 region = region->getBaseRegion(); 2336 2337 // Quick path: if the base is the head of a cluster, the region is live. 2338 if (B.lookup(region)) 2339 return true; 2340 2341 // Slow path: if the region is the VALUE of any binding, it is live. 2342 for (RegionBindingsRef::iterator RI = B.begin(), RE = B.end(); RI != RE; ++RI) { 2343 const ClusterBindings &Cluster = RI.getData(); 2344 for (ClusterBindings::iterator CI = Cluster.begin(), CE = Cluster.end(); 2345 CI != CE; ++CI) { 2346 const SVal &D = CI.getData(); 2347 if (const MemRegion *R = D.getAsRegion()) 2348 if (R->getBaseRegion() == region) 2349 return true; 2350 } 2351 } 2352 2353 return false; 2354 } 2355 2356 //===----------------------------------------------------------------------===// 2357 // Binding values to regions. 2358 //===----------------------------------------------------------------------===// 2359 2360 StoreRef RegionStoreManager::killBinding(Store ST, Loc L) { 2361 if (Optional<loc::MemRegionVal> LV = L.getAs<loc::MemRegionVal>()) 2362 if (const MemRegion* R = LV->getRegion()) 2363 return StoreRef(getRegionBindings(ST).removeBinding(R) 2364 .asImmutableMap() 2365 .getRootWithoutRetain(), 2366 *this); 2367 2368 return StoreRef(ST, *this); 2369 } 2370 2371 RegionBindingsRef 2372 RegionStoreManager::bind(RegionBindingsConstRef B, Loc L, SVal V) { 2373 if (L.getAs<loc::ConcreteInt>()) 2374 return B; 2375 2376 // If we get here, the location should be a region. 2377 const MemRegion *R = L.castAs<loc::MemRegionVal>().getRegion(); 2378 2379 // Check if the region is a struct region. 2380 if (const TypedValueRegion* TR = dyn_cast<TypedValueRegion>(R)) { 2381 QualType Ty = TR->getValueType(); 2382 if (Ty->isArrayType()) 2383 return bindArray(B, TR, V); 2384 if (Ty->isStructureOrClassType()) 2385 return bindStruct(B, TR, V); 2386 if (Ty->isVectorType()) 2387 return bindVector(B, TR, V); 2388 if (Ty->isUnionType()) 2389 return bindAggregate(B, TR, V); 2390 } 2391 2392 // Binding directly to a symbolic region should be treated as binding 2393 // to element 0. 2394 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(R)) 2395 R = GetElementZeroRegion(SR, SR->getPointeeStaticType()); 2396 2397 assert((!isa<CXXThisRegion>(R) || !B.lookup(R)) && 2398 "'this' pointer is not an l-value and is not assignable"); 2399 2400 // Clear out bindings that may overlap with this binding. 2401 RegionBindingsRef NewB = removeSubRegionBindings(B, cast<SubRegion>(R)); 2402 2403 // LazyCompoundVals should be always bound as 'default' bindings. 2404 auto KeyKind = isa<nonloc::LazyCompoundVal>(V) ? BindingKey::Default 2405 : BindingKey::Direct; 2406 return NewB.addBinding(BindingKey::Make(R, KeyKind), V); 2407 } 2408 2409 RegionBindingsRef 2410 RegionStoreManager::setImplicitDefaultValue(RegionBindingsConstRef B, 2411 const MemRegion *R, 2412 QualType T) { 2413 SVal V; 2414 2415 if (Loc::isLocType(T)) 2416 V = svalBuilder.makeNullWithType(T); 2417 else if (T->isIntegralOrEnumerationType()) 2418 V = svalBuilder.makeZeroVal(T); 2419 else if (T->isStructureOrClassType() || T->isArrayType()) { 2420 // Set the default value to a zero constant when it is a structure 2421 // or array. The type doesn't really matter. 2422 V = svalBuilder.makeZeroVal(Ctx.IntTy); 2423 } 2424 else { 2425 // We can't represent values of this type, but we still need to set a value 2426 // to record that the region has been initialized. 2427 // If this assertion ever fires, a new case should be added above -- we 2428 // should know how to default-initialize any value we can symbolicate. 2429 assert(!SymbolManager::canSymbolicate(T) && "This type is representable"); 2430 V = UnknownVal(); 2431 } 2432 2433 return B.addBinding(R, BindingKey::Default, V); 2434 } 2435 2436 Optional<RegionBindingsRef> RegionStoreManager::tryBindSmallArray( 2437 RegionBindingsConstRef B, const TypedValueRegion *R, const ArrayType *AT, 2438 nonloc::LazyCompoundVal LCV) { 2439 2440 auto CAT = dyn_cast<ConstantArrayType>(AT); 2441 2442 // If we don't know the size, create a lazyCompoundVal instead. 2443 if (!CAT) 2444 return std::nullopt; 2445 2446 QualType Ty = CAT->getElementType(); 2447 if (!(Ty->isScalarType() || Ty->isReferenceType())) 2448 return std::nullopt; 2449 2450 // If the array is too big, create a LCV instead. 2451 uint64_t ArrSize = CAT->getSize().getLimitedValue(); 2452 if (ArrSize > SmallArrayLimit) 2453 return std::nullopt; 2454 2455 RegionBindingsRef NewB = B; 2456 2457 for (uint64_t i = 0; i < ArrSize; ++i) { 2458 auto Idx = svalBuilder.makeArrayIndex(i); 2459 const ElementRegion *SrcER = 2460 MRMgr.getElementRegion(Ty, Idx, LCV.getRegion(), Ctx); 2461 SVal V = getBindingForElement(getRegionBindings(LCV.getStore()), SrcER); 2462 2463 const ElementRegion *DstER = MRMgr.getElementRegion(Ty, Idx, R, Ctx); 2464 NewB = bind(NewB, loc::MemRegionVal(DstER), V); 2465 } 2466 2467 return NewB; 2468 } 2469 2470 RegionBindingsRef 2471 RegionStoreManager::bindArray(RegionBindingsConstRef B, 2472 const TypedValueRegion* R, 2473 SVal Init) { 2474 2475 const ArrayType *AT =cast<ArrayType>(Ctx.getCanonicalType(R->getValueType())); 2476 QualType ElementTy = AT->getElementType(); 2477 Optional<uint64_t> Size; 2478 2479 if (const ConstantArrayType* CAT = dyn_cast<ConstantArrayType>(AT)) 2480 Size = CAT->getSize().getZExtValue(); 2481 2482 // Check if the init expr is a literal. If so, bind the rvalue instead. 2483 // FIXME: It's not responsibility of the Store to transform this lvalue 2484 // to rvalue. ExprEngine or maybe even CFG should do this before binding. 2485 if (Optional<loc::MemRegionVal> MRV = Init.getAs<loc::MemRegionVal>()) { 2486 SVal V = getBinding(B.asStore(), *MRV, R->getValueType()); 2487 return bindAggregate(B, R, V); 2488 } 2489 2490 // Handle lazy compound values. 2491 if (Optional<nonloc::LazyCompoundVal> LCV = 2492 Init.getAs<nonloc::LazyCompoundVal>()) { 2493 if (Optional<RegionBindingsRef> NewB = tryBindSmallArray(B, R, AT, *LCV)) 2494 return *NewB; 2495 2496 return bindAggregate(B, R, Init); 2497 } 2498 2499 if (Init.isUnknown()) 2500 return bindAggregate(B, R, UnknownVal()); 2501 2502 // Remaining case: explicit compound values. 2503 const nonloc::CompoundVal& CV = Init.castAs<nonloc::CompoundVal>(); 2504 nonloc::CompoundVal::iterator VI = CV.begin(), VE = CV.end(); 2505 uint64_t i = 0; 2506 2507 RegionBindingsRef NewB(B); 2508 2509 for (; Size ? i < *Size : true; ++i, ++VI) { 2510 // The init list might be shorter than the array length. 2511 if (VI == VE) 2512 break; 2513 2514 const NonLoc &Idx = svalBuilder.makeArrayIndex(i); 2515 const ElementRegion *ER = MRMgr.getElementRegion(ElementTy, Idx, R, Ctx); 2516 2517 if (ElementTy->isStructureOrClassType()) 2518 NewB = bindStruct(NewB, ER, *VI); 2519 else if (ElementTy->isArrayType()) 2520 NewB = bindArray(NewB, ER, *VI); 2521 else 2522 NewB = bind(NewB, loc::MemRegionVal(ER), *VI); 2523 } 2524 2525 // If the init list is shorter than the array length (or the array has 2526 // variable length), set the array default value. Values that are already set 2527 // are not overwritten. 2528 if (!Size || i < *Size) 2529 NewB = setImplicitDefaultValue(NewB, R, ElementTy); 2530 2531 return NewB; 2532 } 2533 2534 RegionBindingsRef RegionStoreManager::bindVector(RegionBindingsConstRef B, 2535 const TypedValueRegion* R, 2536 SVal V) { 2537 QualType T = R->getValueType(); 2538 const VectorType *VT = T->castAs<VectorType>(); // Use castAs for typedefs. 2539 2540 // Handle lazy compound values and symbolic values. 2541 if (isa<nonloc::LazyCompoundVal, nonloc::SymbolVal>(V)) 2542 return bindAggregate(B, R, V); 2543 2544 // We may get non-CompoundVal accidentally due to imprecise cast logic or 2545 // that we are binding symbolic struct value. Kill the field values, and if 2546 // the value is symbolic go and bind it as a "default" binding. 2547 if (!isa<nonloc::CompoundVal>(V)) { 2548 return bindAggregate(B, R, UnknownVal()); 2549 } 2550 2551 QualType ElemType = VT->getElementType(); 2552 nonloc::CompoundVal CV = V.castAs<nonloc::CompoundVal>(); 2553 nonloc::CompoundVal::iterator VI = CV.begin(), VE = CV.end(); 2554 unsigned index = 0, numElements = VT->getNumElements(); 2555 RegionBindingsRef NewB(B); 2556 2557 for ( ; index != numElements ; ++index) { 2558 if (VI == VE) 2559 break; 2560 2561 NonLoc Idx = svalBuilder.makeArrayIndex(index); 2562 const ElementRegion *ER = MRMgr.getElementRegion(ElemType, Idx, R, Ctx); 2563 2564 if (ElemType->isArrayType()) 2565 NewB = bindArray(NewB, ER, *VI); 2566 else if (ElemType->isStructureOrClassType()) 2567 NewB = bindStruct(NewB, ER, *VI); 2568 else 2569 NewB = bind(NewB, loc::MemRegionVal(ER), *VI); 2570 } 2571 return NewB; 2572 } 2573 2574 Optional<RegionBindingsRef> 2575 RegionStoreManager::tryBindSmallStruct(RegionBindingsConstRef B, 2576 const TypedValueRegion *R, 2577 const RecordDecl *RD, 2578 nonloc::LazyCompoundVal LCV) { 2579 FieldVector Fields; 2580 2581 if (const CXXRecordDecl *Class = dyn_cast<CXXRecordDecl>(RD)) 2582 if (Class->getNumBases() != 0 || Class->getNumVBases() != 0) 2583 return std::nullopt; 2584 2585 for (const auto *FD : RD->fields()) { 2586 if (FD->isUnnamedBitfield()) 2587 continue; 2588 2589 // If there are too many fields, or if any of the fields are aggregates, 2590 // just use the LCV as a default binding. 2591 if (Fields.size() == SmallStructLimit) 2592 return std::nullopt; 2593 2594 QualType Ty = FD->getType(); 2595 2596 // Zero length arrays are basically no-ops, so we also ignore them here. 2597 if (Ty->isConstantArrayType() && 2598 Ctx.getConstantArrayElementCount(Ctx.getAsConstantArrayType(Ty)) == 0) 2599 continue; 2600 2601 if (!(Ty->isScalarType() || Ty->isReferenceType())) 2602 return std::nullopt; 2603 2604 Fields.push_back(FD); 2605 } 2606 2607 RegionBindingsRef NewB = B; 2608 2609 for (FieldVector::iterator I = Fields.begin(), E = Fields.end(); I != E; ++I){ 2610 const FieldRegion *SourceFR = MRMgr.getFieldRegion(*I, LCV.getRegion()); 2611 SVal V = getBindingForField(getRegionBindings(LCV.getStore()), SourceFR); 2612 2613 const FieldRegion *DestFR = MRMgr.getFieldRegion(*I, R); 2614 NewB = bind(NewB, loc::MemRegionVal(DestFR), V); 2615 } 2616 2617 return NewB; 2618 } 2619 2620 RegionBindingsRef RegionStoreManager::bindStruct(RegionBindingsConstRef B, 2621 const TypedValueRegion *R, 2622 SVal V) { 2623 QualType T = R->getValueType(); 2624 assert(T->isStructureOrClassType()); 2625 2626 const RecordType* RT = T->castAs<RecordType>(); 2627 const RecordDecl *RD = RT->getDecl(); 2628 2629 if (!RD->isCompleteDefinition()) 2630 return B; 2631 2632 // Handle lazy compound values and symbolic values. 2633 if (Optional<nonloc::LazyCompoundVal> LCV = 2634 V.getAs<nonloc::LazyCompoundVal>()) { 2635 if (Optional<RegionBindingsRef> NewB = tryBindSmallStruct(B, R, RD, *LCV)) 2636 return *NewB; 2637 return bindAggregate(B, R, V); 2638 } 2639 if (isa<nonloc::SymbolVal>(V)) 2640 return bindAggregate(B, R, V); 2641 2642 // We may get non-CompoundVal accidentally due to imprecise cast logic or 2643 // that we are binding symbolic struct value. Kill the field values, and if 2644 // the value is symbolic go and bind it as a "default" binding. 2645 if (V.isUnknown() || !isa<nonloc::CompoundVal>(V)) 2646 return bindAggregate(B, R, UnknownVal()); 2647 2648 // The raw CompoundVal is essentially a symbolic InitListExpr: an (immutable) 2649 // list of other values. It appears pretty much only when there's an actual 2650 // initializer list expression in the program, and the analyzer tries to 2651 // unwrap it as soon as possible. 2652 // This code is where such unwrap happens: when the compound value is put into 2653 // the object that it was supposed to initialize (it's an *initializer* list, 2654 // after all), instead of binding the whole value to the whole object, we bind 2655 // sub-values to sub-objects. Sub-values may themselves be compound values, 2656 // and in this case the procedure becomes recursive. 2657 // FIXME: The annoying part about compound values is that they don't carry 2658 // any sort of information about which value corresponds to which sub-object. 2659 // It's simply a list of values in the middle of nowhere; we expect to match 2660 // them to sub-objects, essentially, "by index": first value binds to 2661 // the first field, second value binds to the second field, etc. 2662 // It would have been much safer to organize non-lazy compound values as 2663 // a mapping from fields/bases to values. 2664 const nonloc::CompoundVal& CV = V.castAs<nonloc::CompoundVal>(); 2665 nonloc::CompoundVal::iterator VI = CV.begin(), VE = CV.end(); 2666 2667 RegionBindingsRef NewB(B); 2668 2669 // In C++17 aggregates may have base classes, handle those as well. 2670 // They appear before fields in the initializer list / compound value. 2671 if (const auto *CRD = dyn_cast<CXXRecordDecl>(RD)) { 2672 // If the object was constructed with a constructor, its value is a 2673 // LazyCompoundVal. If it's a raw CompoundVal, it means that we're 2674 // performing aggregate initialization. The only exception from this 2675 // rule is sending an Objective-C++ message that returns a C++ object 2676 // to a nil receiver; in this case the semantics is to return a 2677 // zero-initialized object even if it's a C++ object that doesn't have 2678 // this sort of constructor; the CompoundVal is empty in this case. 2679 assert((CRD->isAggregate() || (Ctx.getLangOpts().ObjC && VI == VE)) && 2680 "Non-aggregates are constructed with a constructor!"); 2681 2682 for (const auto &B : CRD->bases()) { 2683 // (Multiple inheritance is fine though.) 2684 assert(!B.isVirtual() && "Aggregates cannot have virtual base classes!"); 2685 2686 if (VI == VE) 2687 break; 2688 2689 QualType BTy = B.getType(); 2690 assert(BTy->isStructureOrClassType() && "Base classes must be classes!"); 2691 2692 const CXXRecordDecl *BRD = BTy->getAsCXXRecordDecl(); 2693 assert(BRD && "Base classes must be C++ classes!"); 2694 2695 const CXXBaseObjectRegion *BR = 2696 MRMgr.getCXXBaseObjectRegion(BRD, R, /*IsVirtual=*/false); 2697 2698 NewB = bindStruct(NewB, BR, *VI); 2699 2700 ++VI; 2701 } 2702 } 2703 2704 RecordDecl::field_iterator FI, FE; 2705 2706 for (FI = RD->field_begin(), FE = RD->field_end(); FI != FE; ++FI) { 2707 2708 if (VI == VE) 2709 break; 2710 2711 // Skip any unnamed bitfields to stay in sync with the initializers. 2712 if (FI->isUnnamedBitfield()) 2713 continue; 2714 2715 QualType FTy = FI->getType(); 2716 const FieldRegion* FR = MRMgr.getFieldRegion(*FI, R); 2717 2718 if (FTy->isArrayType()) 2719 NewB = bindArray(NewB, FR, *VI); 2720 else if (FTy->isStructureOrClassType()) 2721 NewB = bindStruct(NewB, FR, *VI); 2722 else 2723 NewB = bind(NewB, loc::MemRegionVal(FR), *VI); 2724 ++VI; 2725 } 2726 2727 // There may be fewer values in the initialize list than the fields of struct. 2728 if (FI != FE) { 2729 NewB = NewB.addBinding(R, BindingKey::Default, 2730 svalBuilder.makeIntVal(0, false)); 2731 } 2732 2733 return NewB; 2734 } 2735 2736 RegionBindingsRef 2737 RegionStoreManager::bindAggregate(RegionBindingsConstRef B, 2738 const TypedRegion *R, 2739 SVal Val) { 2740 // Remove the old bindings, using 'R' as the root of all regions 2741 // we will invalidate. Then add the new binding. 2742 return removeSubRegionBindings(B, R).addBinding(R, BindingKey::Default, Val); 2743 } 2744 2745 //===----------------------------------------------------------------------===// 2746 // State pruning. 2747 //===----------------------------------------------------------------------===// 2748 2749 namespace { 2750 class RemoveDeadBindingsWorker 2751 : public ClusterAnalysis<RemoveDeadBindingsWorker> { 2752 SmallVector<const SymbolicRegion *, 12> Postponed; 2753 SymbolReaper &SymReaper; 2754 const StackFrameContext *CurrentLCtx; 2755 2756 public: 2757 RemoveDeadBindingsWorker(RegionStoreManager &rm, 2758 ProgramStateManager &stateMgr, 2759 RegionBindingsRef b, SymbolReaper &symReaper, 2760 const StackFrameContext *LCtx) 2761 : ClusterAnalysis<RemoveDeadBindingsWorker>(rm, stateMgr, b), 2762 SymReaper(symReaper), CurrentLCtx(LCtx) {} 2763 2764 // Called by ClusterAnalysis. 2765 void VisitAddedToCluster(const MemRegion *baseR, const ClusterBindings &C); 2766 void VisitCluster(const MemRegion *baseR, const ClusterBindings *C); 2767 using ClusterAnalysis<RemoveDeadBindingsWorker>::VisitCluster; 2768 2769 using ClusterAnalysis::AddToWorkList; 2770 2771 bool AddToWorkList(const MemRegion *R); 2772 2773 bool UpdatePostponed(); 2774 void VisitBinding(SVal V); 2775 }; 2776 } 2777 2778 bool RemoveDeadBindingsWorker::AddToWorkList(const MemRegion *R) { 2779 const MemRegion *BaseR = R->getBaseRegion(); 2780 return AddToWorkList(WorkListElement(BaseR), getCluster(BaseR)); 2781 } 2782 2783 void RemoveDeadBindingsWorker::VisitAddedToCluster(const MemRegion *baseR, 2784 const ClusterBindings &C) { 2785 2786 if (const VarRegion *VR = dyn_cast<VarRegion>(baseR)) { 2787 if (SymReaper.isLive(VR)) 2788 AddToWorkList(baseR, &C); 2789 2790 return; 2791 } 2792 2793 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(baseR)) { 2794 if (SymReaper.isLive(SR->getSymbol())) 2795 AddToWorkList(SR, &C); 2796 else 2797 Postponed.push_back(SR); 2798 2799 return; 2800 } 2801 2802 if (isa<NonStaticGlobalSpaceRegion>(baseR)) { 2803 AddToWorkList(baseR, &C); 2804 return; 2805 } 2806 2807 // CXXThisRegion in the current or parent location context is live. 2808 if (const CXXThisRegion *TR = dyn_cast<CXXThisRegion>(baseR)) { 2809 const auto *StackReg = 2810 cast<StackArgumentsSpaceRegion>(TR->getSuperRegion()); 2811 const StackFrameContext *RegCtx = StackReg->getStackFrame(); 2812 if (CurrentLCtx && 2813 (RegCtx == CurrentLCtx || RegCtx->isParentOf(CurrentLCtx))) 2814 AddToWorkList(TR, &C); 2815 } 2816 } 2817 2818 void RemoveDeadBindingsWorker::VisitCluster(const MemRegion *baseR, 2819 const ClusterBindings *C) { 2820 if (!C) 2821 return; 2822 2823 // Mark the symbol for any SymbolicRegion with live bindings as live itself. 2824 // This means we should continue to track that symbol. 2825 if (const SymbolicRegion *SymR = dyn_cast<SymbolicRegion>(baseR)) 2826 SymReaper.markLive(SymR->getSymbol()); 2827 2828 for (ClusterBindings::iterator I = C->begin(), E = C->end(); I != E; ++I) { 2829 // Element index of a binding key is live. 2830 SymReaper.markElementIndicesLive(I.getKey().getRegion()); 2831 2832 VisitBinding(I.getData()); 2833 } 2834 } 2835 2836 void RemoveDeadBindingsWorker::VisitBinding(SVal V) { 2837 // Is it a LazyCompoundVal? All referenced regions are live as well. 2838 // The LazyCompoundVal itself is not live but should be readable. 2839 if (auto LCS = V.getAs<nonloc::LazyCompoundVal>()) { 2840 SymReaper.markLazilyCopied(LCS->getRegion()); 2841 2842 for (SVal V : RM.getInterestingValues(*LCS)) { 2843 if (auto DepLCS = V.getAs<nonloc::LazyCompoundVal>()) 2844 SymReaper.markLazilyCopied(DepLCS->getRegion()); 2845 else 2846 VisitBinding(V); 2847 } 2848 2849 return; 2850 } 2851 2852 // If V is a region, then add it to the worklist. 2853 if (const MemRegion *R = V.getAsRegion()) { 2854 AddToWorkList(R); 2855 SymReaper.markLive(R); 2856 2857 // All regions captured by a block are also live. 2858 if (const BlockDataRegion *BR = dyn_cast<BlockDataRegion>(R)) { 2859 BlockDataRegion::referenced_vars_iterator I = BR->referenced_vars_begin(), 2860 E = BR->referenced_vars_end(); 2861 for ( ; I != E; ++I) 2862 AddToWorkList(I.getCapturedRegion()); 2863 } 2864 } 2865 2866 2867 // Update the set of live symbols. 2868 for (auto SI = V.symbol_begin(), SE = V.symbol_end(); SI!=SE; ++SI) 2869 SymReaper.markLive(*SI); 2870 } 2871 2872 bool RemoveDeadBindingsWorker::UpdatePostponed() { 2873 // See if any postponed SymbolicRegions are actually live now, after 2874 // having done a scan. 2875 bool Changed = false; 2876 2877 for (auto I = Postponed.begin(), E = Postponed.end(); I != E; ++I) { 2878 if (const SymbolicRegion *SR = *I) { 2879 if (SymReaper.isLive(SR->getSymbol())) { 2880 Changed |= AddToWorkList(SR); 2881 *I = nullptr; 2882 } 2883 } 2884 } 2885 2886 return Changed; 2887 } 2888 2889 StoreRef RegionStoreManager::removeDeadBindings(Store store, 2890 const StackFrameContext *LCtx, 2891 SymbolReaper& SymReaper) { 2892 RegionBindingsRef B = getRegionBindings(store); 2893 RemoveDeadBindingsWorker W(*this, StateMgr, B, SymReaper, LCtx); 2894 W.GenerateClusters(); 2895 2896 // Enqueue the region roots onto the worklist. 2897 for (SymbolReaper::region_iterator I = SymReaper.region_begin(), 2898 E = SymReaper.region_end(); I != E; ++I) { 2899 W.AddToWorkList(*I); 2900 } 2901 2902 do W.RunWorkList(); while (W.UpdatePostponed()); 2903 2904 // We have now scanned the store, marking reachable regions and symbols 2905 // as live. We now remove all the regions that are dead from the store 2906 // as well as update DSymbols with the set symbols that are now dead. 2907 for (RegionBindingsRef::iterator I = B.begin(), E = B.end(); I != E; ++I) { 2908 const MemRegion *Base = I.getKey(); 2909 2910 // If the cluster has been visited, we know the region has been marked. 2911 // Otherwise, remove the dead entry. 2912 if (!W.isVisited(Base)) 2913 B = B.remove(Base); 2914 } 2915 2916 return StoreRef(B.asStore(), *this); 2917 } 2918 2919 //===----------------------------------------------------------------------===// 2920 // Utility methods. 2921 //===----------------------------------------------------------------------===// 2922 2923 void RegionStoreManager::printJson(raw_ostream &Out, Store S, const char *NL, 2924 unsigned int Space, bool IsDot) const { 2925 RegionBindingsRef Bindings = getRegionBindings(S); 2926 2927 Indent(Out, Space, IsDot) << "\"store\": "; 2928 2929 if (Bindings.isEmpty()) { 2930 Out << "null," << NL; 2931 return; 2932 } 2933 2934 Out << "{ \"pointer\": \"" << Bindings.asStore() << "\", \"items\": [" << NL; 2935 Bindings.printJson(Out, NL, Space + 1, IsDot); 2936 Indent(Out, Space, IsDot) << "]}," << NL; 2937 } 2938