1 //== ArrayBoundCheckerV2.cpp ------------------------------------*- C++ -*--==// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file defines ArrayBoundCheckerV2, which is a path-sensitive check 10 // which looks for an out-of-bound array element access. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "clang/AST/CharUnits.h" 15 #include "clang/StaticAnalyzer/Checkers/BuiltinCheckerRegistration.h" 16 #include "clang/StaticAnalyzer/Checkers/Taint.h" 17 #include "clang/StaticAnalyzer/Core/BugReporter/BugType.h" 18 #include "clang/StaticAnalyzer/Core/Checker.h" 19 #include "clang/StaticAnalyzer/Core/CheckerManager.h" 20 #include "clang/StaticAnalyzer/Core/PathSensitive/APSIntType.h" 21 #include "clang/StaticAnalyzer/Core/PathSensitive/CheckerContext.h" 22 #include "clang/StaticAnalyzer/Core/PathSensitive/DynamicExtent.h" 23 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" 24 #include "llvm/ADT/SmallString.h" 25 #include "llvm/Support/raw_ostream.h" 26 #include <optional> 27 28 using namespace clang; 29 using namespace ento; 30 using namespace taint; 31 32 namespace { 33 class ArrayBoundCheckerV2 : 34 public Checker<check::Location> { 35 mutable std::unique_ptr<BuiltinBug> BT; 36 mutable std::unique_ptr<BugType> TaintBT; 37 38 enum OOB_Kind { OOB_Precedes, OOB_Excedes }; 39 40 void reportOOB(CheckerContext &C, ProgramStateRef errorState, 41 OOB_Kind kind) const; 42 void reportTaintOOB(CheckerContext &C, ProgramStateRef errorState, 43 SVal TaintedSVal) const; 44 45 public: 46 void checkLocation(SVal l, bool isLoad, const Stmt*S, 47 CheckerContext &C) const; 48 }; 49 50 // FIXME: Eventually replace RegionRawOffset with this class. 51 class RegionRawOffsetV2 { 52 private: 53 const SubRegion *baseRegion; 54 SVal byteOffset; 55 56 RegionRawOffsetV2() 57 : baseRegion(nullptr), byteOffset(UnknownVal()) {} 58 59 public: 60 RegionRawOffsetV2(const SubRegion* base, SVal offset) 61 : baseRegion(base), byteOffset(offset) {} 62 63 NonLoc getByteOffset() const { return byteOffset.castAs<NonLoc>(); } 64 const SubRegion *getRegion() const { return baseRegion; } 65 66 static RegionRawOffsetV2 computeOffset(ProgramStateRef state, 67 SValBuilder &svalBuilder, 68 SVal location); 69 70 void dump() const; 71 void dumpToStream(raw_ostream &os) const; 72 }; 73 } 74 75 static SVal computeExtentBegin(SValBuilder &svalBuilder, 76 const MemRegion *region) { 77 const MemSpaceRegion *SR = region->getMemorySpace(); 78 if (SR->getKind() == MemRegion::UnknownSpaceRegionKind) 79 return UnknownVal(); 80 else 81 return svalBuilder.makeZeroArrayIndex(); 82 } 83 84 // TODO: once the constraint manager is smart enough to handle non simplified 85 // symbolic expressions remove this function. Note that this can not be used in 86 // the constraint manager as is, since this does not handle overflows. It is 87 // safe to assume, however, that memory offsets will not overflow. 88 static std::pair<NonLoc, nonloc::ConcreteInt> 89 getSimplifiedOffsets(NonLoc offset, nonloc::ConcreteInt extent, 90 SValBuilder &svalBuilder) { 91 std::optional<nonloc::SymbolVal> SymVal = offset.getAs<nonloc::SymbolVal>(); 92 if (SymVal && SymVal->isExpression()) { 93 if (const SymIntExpr *SIE = dyn_cast<SymIntExpr>(SymVal->getSymbol())) { 94 llvm::APSInt constant = 95 APSIntType(extent.getValue()).convert(SIE->getRHS()); 96 switch (SIE->getOpcode()) { 97 case BO_Mul: 98 // The constant should never be 0 here, since it the result of scaling 99 // based on the size of a type which is never 0. 100 if ((extent.getValue() % constant) != 0) 101 return std::pair<NonLoc, nonloc::ConcreteInt>(offset, extent); 102 else 103 return getSimplifiedOffsets( 104 nonloc::SymbolVal(SIE->getLHS()), 105 svalBuilder.makeIntVal(extent.getValue() / constant), 106 svalBuilder); 107 case BO_Add: 108 return getSimplifiedOffsets( 109 nonloc::SymbolVal(SIE->getLHS()), 110 svalBuilder.makeIntVal(extent.getValue() - constant), svalBuilder); 111 default: 112 break; 113 } 114 } 115 } 116 117 return std::pair<NonLoc, nonloc::ConcreteInt>(offset, extent); 118 } 119 120 void ArrayBoundCheckerV2::checkLocation(SVal location, bool isLoad, 121 const Stmt* LoadS, 122 CheckerContext &checkerContext) const { 123 124 // NOTE: Instead of using ProgramState::assumeInBound(), we are prototyping 125 // some new logic here that reasons directly about memory region extents. 126 // Once that logic is more mature, we can bring it back to assumeInBound() 127 // for all clients to use. 128 // 129 // The algorithm we are using here for bounds checking is to see if the 130 // memory access is within the extent of the base region. Since we 131 // have some flexibility in defining the base region, we can achieve 132 // various levels of conservatism in our buffer overflow checking. 133 ProgramStateRef state = checkerContext.getState(); 134 135 SValBuilder &svalBuilder = checkerContext.getSValBuilder(); 136 const RegionRawOffsetV2 &rawOffset = 137 RegionRawOffsetV2::computeOffset(state, svalBuilder, location); 138 139 if (!rawOffset.getRegion()) 140 return; 141 142 NonLoc rawOffsetVal = rawOffset.getByteOffset(); 143 144 // CHECK LOWER BOUND: Is byteOffset < extent begin? 145 // If so, we are doing a load/store 146 // before the first valid offset in the memory region. 147 148 SVal extentBegin = computeExtentBegin(svalBuilder, rawOffset.getRegion()); 149 150 if (std::optional<NonLoc> NV = extentBegin.getAs<NonLoc>()) { 151 if (auto ConcreteNV = NV->getAs<nonloc::ConcreteInt>()) { 152 std::pair<NonLoc, nonloc::ConcreteInt> simplifiedOffsets = 153 getSimplifiedOffsets(rawOffset.getByteOffset(), *ConcreteNV, 154 svalBuilder); 155 rawOffsetVal = simplifiedOffsets.first; 156 *NV = simplifiedOffsets.second; 157 } 158 159 SVal lowerBound = svalBuilder.evalBinOpNN(state, BO_LT, rawOffsetVal, *NV, 160 svalBuilder.getConditionType()); 161 162 std::optional<NonLoc> lowerBoundToCheck = lowerBound.getAs<NonLoc>(); 163 if (!lowerBoundToCheck) 164 return; 165 166 ProgramStateRef state_precedesLowerBound, state_withinLowerBound; 167 std::tie(state_precedesLowerBound, state_withinLowerBound) = 168 state->assume(*lowerBoundToCheck); 169 170 // Are we constrained enough to definitely precede the lower bound? 171 if (state_precedesLowerBound && !state_withinLowerBound) { 172 reportOOB(checkerContext, state_precedesLowerBound, OOB_Precedes); 173 return; 174 } 175 176 // Otherwise, assume the constraint of the lower bound. 177 assert(state_withinLowerBound); 178 state = state_withinLowerBound; 179 } 180 181 do { 182 // CHECK UPPER BOUND: Is byteOffset >= size(baseRegion)? If so, 183 // we are doing a load/store after the last valid offset. 184 const MemRegion *MR = rawOffset.getRegion(); 185 DefinedOrUnknownSVal Size = getDynamicExtent(state, MR, svalBuilder); 186 if (!isa<NonLoc>(Size)) 187 break; 188 189 if (auto ConcreteSize = Size.getAs<nonloc::ConcreteInt>()) { 190 std::pair<NonLoc, nonloc::ConcreteInt> simplifiedOffsets = 191 getSimplifiedOffsets(rawOffset.getByteOffset(), *ConcreteSize, 192 svalBuilder); 193 rawOffsetVal = simplifiedOffsets.first; 194 Size = simplifiedOffsets.second; 195 } 196 197 SVal upperbound = svalBuilder.evalBinOpNN(state, BO_GE, rawOffsetVal, 198 Size.castAs<NonLoc>(), 199 svalBuilder.getConditionType()); 200 201 std::optional<NonLoc> upperboundToCheck = upperbound.getAs<NonLoc>(); 202 if (!upperboundToCheck) 203 break; 204 205 ProgramStateRef state_exceedsUpperBound, state_withinUpperBound; 206 std::tie(state_exceedsUpperBound, state_withinUpperBound) = 207 state->assume(*upperboundToCheck); 208 209 // If we are under constrained and the index variables are tainted, report. 210 if (state_exceedsUpperBound && state_withinUpperBound) { 211 SVal ByteOffset = rawOffset.getByteOffset(); 212 if (isTainted(state, ByteOffset)) { 213 reportTaintOOB(checkerContext, state_exceedsUpperBound, ByteOffset); 214 return; 215 } 216 } else if (state_exceedsUpperBound) { 217 // If we are constrained enough to definitely exceed the upper bound, 218 // report. 219 assert(!state_withinUpperBound); 220 reportOOB(checkerContext, state_exceedsUpperBound, OOB_Excedes); 221 return; 222 } 223 224 assert(state_withinUpperBound); 225 state = state_withinUpperBound; 226 } 227 while (false); 228 229 checkerContext.addTransition(state); 230 } 231 void ArrayBoundCheckerV2::reportTaintOOB(CheckerContext &checkerContext, 232 ProgramStateRef errorState, 233 SVal TaintedSVal) const { 234 ExplodedNode *errorNode = checkerContext.generateErrorNode(errorState); 235 if (!errorNode) 236 return; 237 238 if (!TaintBT) 239 TaintBT.reset( 240 new BugType(this, "Out-of-bound access", categories::TaintedData)); 241 242 SmallString<256> buf; 243 llvm::raw_svector_ostream os(buf); 244 os << "Out of bound memory access (index is tainted)"; 245 auto BR = 246 std::make_unique<PathSensitiveBugReport>(*TaintBT, os.str(), errorNode); 247 248 // Track back the propagation of taintedness. 249 for (SymbolRef Sym : getTaintedSymbols(errorState, TaintedSVal)) { 250 BR->markInteresting(Sym); 251 } 252 253 checkerContext.emitReport(std::move(BR)); 254 } 255 256 void ArrayBoundCheckerV2::reportOOB(CheckerContext &checkerContext, 257 ProgramStateRef errorState, 258 OOB_Kind kind) const { 259 260 ExplodedNode *errorNode = checkerContext.generateErrorNode(errorState); 261 if (!errorNode) 262 return; 263 264 if (!BT) 265 BT.reset(new BuiltinBug(this, "Out-of-bound access")); 266 267 // FIXME: This diagnostics are preliminary. We should get far better 268 // diagnostics for explaining buffer overruns. 269 270 SmallString<256> buf; 271 llvm::raw_svector_ostream os(buf); 272 os << "Out of bound memory access "; 273 switch (kind) { 274 case OOB_Precedes: 275 os << "(accessed memory precedes memory block)"; 276 break; 277 case OOB_Excedes: 278 os << "(access exceeds upper limit of memory block)"; 279 break; 280 } 281 auto BR = std::make_unique<PathSensitiveBugReport>(*BT, os.str(), errorNode); 282 checkerContext.emitReport(std::move(BR)); 283 } 284 285 #ifndef NDEBUG 286 LLVM_DUMP_METHOD void RegionRawOffsetV2::dump() const { 287 dumpToStream(llvm::errs()); 288 } 289 290 void RegionRawOffsetV2::dumpToStream(raw_ostream &os) const { 291 os << "raw_offset_v2{" << getRegion() << ',' << getByteOffset() << '}'; 292 } 293 #endif 294 295 // Lazily computes a value to be used by 'computeOffset'. If 'val' 296 // is unknown or undefined, we lazily substitute '0'. Otherwise, 297 // return 'val'. 298 static inline SVal getValue(SVal val, SValBuilder &svalBuilder) { 299 return val.isUndef() ? svalBuilder.makeZeroArrayIndex() : val; 300 } 301 302 // Scale a base value by a scaling factor, and return the scaled 303 // value as an SVal. Used by 'computeOffset'. 304 static inline SVal scaleValue(ProgramStateRef state, 305 NonLoc baseVal, CharUnits scaling, 306 SValBuilder &sb) { 307 return sb.evalBinOpNN(state, BO_Mul, baseVal, 308 sb.makeArrayIndex(scaling.getQuantity()), 309 sb.getArrayIndexType()); 310 } 311 312 // Add an SVal to another, treating unknown and undefined values as 313 // summing to UnknownVal. Used by 'computeOffset'. 314 static SVal addValue(ProgramStateRef state, SVal x, SVal y, 315 SValBuilder &svalBuilder) { 316 // We treat UnknownVals and UndefinedVals the same here because we 317 // only care about computing offsets. 318 if (x.isUnknownOrUndef() || y.isUnknownOrUndef()) 319 return UnknownVal(); 320 321 return svalBuilder.evalBinOpNN(state, BO_Add, x.castAs<NonLoc>(), 322 y.castAs<NonLoc>(), 323 svalBuilder.getArrayIndexType()); 324 } 325 326 /// Compute a raw byte offset from a base region. Used for array bounds 327 /// checking. 328 RegionRawOffsetV2 RegionRawOffsetV2::computeOffset(ProgramStateRef state, 329 SValBuilder &svalBuilder, 330 SVal location) 331 { 332 const MemRegion *region = location.getAsRegion(); 333 SVal offset = UndefinedVal(); 334 335 while (region) { 336 switch (region->getKind()) { 337 default: { 338 if (const SubRegion *subReg = dyn_cast<SubRegion>(region)) { 339 offset = getValue(offset, svalBuilder); 340 if (!offset.isUnknownOrUndef()) 341 return RegionRawOffsetV2(subReg, offset); 342 } 343 return RegionRawOffsetV2(); 344 } 345 case MemRegion::ElementRegionKind: { 346 const ElementRegion *elemReg = cast<ElementRegion>(region); 347 SVal index = elemReg->getIndex(); 348 if (!isa<NonLoc>(index)) 349 return RegionRawOffsetV2(); 350 QualType elemType = elemReg->getElementType(); 351 // If the element is an incomplete type, go no further. 352 ASTContext &astContext = svalBuilder.getContext(); 353 if (elemType->isIncompleteType()) 354 return RegionRawOffsetV2(); 355 356 // Update the offset. 357 offset = addValue(state, 358 getValue(offset, svalBuilder), 359 scaleValue(state, 360 index.castAs<NonLoc>(), 361 astContext.getTypeSizeInChars(elemType), 362 svalBuilder), 363 svalBuilder); 364 365 if (offset.isUnknownOrUndef()) 366 return RegionRawOffsetV2(); 367 368 region = elemReg->getSuperRegion(); 369 continue; 370 } 371 } 372 } 373 return RegionRawOffsetV2(); 374 } 375 376 void ento::registerArrayBoundCheckerV2(CheckerManager &mgr) { 377 mgr.registerChecker<ArrayBoundCheckerV2>(); 378 } 379 380 bool ento::shouldRegisterArrayBoundCheckerV2(const CheckerManager &mgr) { 381 return true; 382 } 383