1 //=======- PaddingChecker.cpp ------------------------------------*- C++ -*-==// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines a checker that checks for padding that could be 11 // removed by re-ordering members. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #include "ClangSACheckers.h" 16 #include "clang/AST/CharUnits.h" 17 #include "clang/AST/DeclTemplate.h" 18 #include "clang/AST/RecordLayout.h" 19 #include "clang/AST/RecursiveASTVisitor.h" 20 #include "clang/StaticAnalyzer/Core/BugReporter/BugReporter.h" 21 #include "clang/StaticAnalyzer/Core/BugReporter/BugType.h" 22 #include "clang/StaticAnalyzer/Core/Checker.h" 23 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 24 #include "llvm/ADT/SmallString.h" 25 #include "llvm/Support/MathExtras.h" 26 #include "llvm/Support/raw_ostream.h" 27 #include <numeric> 28 29 using namespace clang; 30 using namespace ento; 31 32 namespace { 33 class PaddingChecker : public Checker<check::ASTDecl<TranslationUnitDecl>> { 34 private: 35 mutable std::unique_ptr<BugType> PaddingBug; 36 mutable int64_t AllowedPad; 37 mutable BugReporter *BR; 38 39 public: 40 void checkASTDecl(const TranslationUnitDecl *TUD, AnalysisManager &MGR, 41 BugReporter &BRArg) const { 42 BR = &BRArg; 43 AllowedPad = 44 MGR.getAnalyzerOptions().getOptionAsInteger("AllowedPad", 24, this); 45 assert(AllowedPad >= 0 && "AllowedPad option should be non-negative"); 46 47 // The calls to checkAST* from AnalysisConsumer don't 48 // visit template instantiations or lambda classes. We 49 // want to visit those, so we make our own RecursiveASTVisitor. 50 struct LocalVisitor : public RecursiveASTVisitor<LocalVisitor> { 51 const PaddingChecker *Checker; 52 bool shouldVisitTemplateInstantiations() const { return true; } 53 bool shouldVisitImplicitCode() const { return true; } 54 explicit LocalVisitor(const PaddingChecker *Checker) : Checker(Checker) {} 55 bool VisitRecordDecl(const RecordDecl *RD) { 56 Checker->visitRecord(RD); 57 return true; 58 } 59 bool VisitVarDecl(const VarDecl *VD) { 60 Checker->visitVariable(VD); 61 return true; 62 } 63 // TODO: Visit array new and mallocs for arrays. 64 }; 65 66 LocalVisitor visitor(this); 67 visitor.TraverseDecl(const_cast<TranslationUnitDecl *>(TUD)); 68 } 69 70 /// Look for records of overly padded types. If padding * 71 /// PadMultiplier exceeds AllowedPad, then generate a report. 72 /// PadMultiplier is used to share code with the array padding 73 /// checker. 74 void visitRecord(const RecordDecl *RD, uint64_t PadMultiplier = 1) const { 75 if (shouldSkipDecl(RD)) 76 return; 77 78 // TODO: Figure out why we are going through declarations and not only 79 // definitions. 80 if (!(RD = RD->getDefinition())) 81 return; 82 83 // This is the simplest correct case: a class with no fields and one base 84 // class. Other cases are more complicated because of how the base classes 85 // & fields might interact, so we don't bother dealing with them. 86 // TODO: Support other combinations of base classes and fields. 87 if (auto *CXXRD = dyn_cast<CXXRecordDecl>(RD)) 88 if (CXXRD->field_empty() && CXXRD->getNumBases() == 1) 89 return visitRecord(CXXRD->bases().begin()->getType()->getAsRecordDecl(), 90 PadMultiplier); 91 92 auto &ASTContext = RD->getASTContext(); 93 const ASTRecordLayout &RL = ASTContext.getASTRecordLayout(RD); 94 assert(llvm::isPowerOf2_64(RL.getAlignment().getQuantity())); 95 96 CharUnits BaselinePad = calculateBaselinePad(RD, ASTContext, RL); 97 if (BaselinePad.isZero()) 98 return; 99 100 CharUnits OptimalPad; 101 SmallVector<const FieldDecl *, 20> OptimalFieldsOrder; 102 std::tie(OptimalPad, OptimalFieldsOrder) = 103 calculateOptimalPad(RD, ASTContext, RL); 104 105 CharUnits DiffPad = PadMultiplier * (BaselinePad - OptimalPad); 106 if (DiffPad.getQuantity() <= AllowedPad) { 107 assert(!DiffPad.isNegative() && "DiffPad should not be negative"); 108 // There is not enough excess padding to trigger a warning. 109 return; 110 } 111 reportRecord(RD, BaselinePad, OptimalPad, OptimalFieldsOrder); 112 } 113 114 /// Look for arrays of overly padded types. If the padding of the 115 /// array type exceeds AllowedPad, then generate a report. 116 void visitVariable(const VarDecl *VD) const { 117 const ArrayType *ArrTy = VD->getType()->getAsArrayTypeUnsafe(); 118 if (ArrTy == nullptr) 119 return; 120 uint64_t Elts = 0; 121 if (const ConstantArrayType *CArrTy = dyn_cast<ConstantArrayType>(ArrTy)) 122 Elts = CArrTy->getSize().getZExtValue(); 123 if (Elts == 0) 124 return; 125 const RecordType *RT = ArrTy->getElementType()->getAs<RecordType>(); 126 if (RT == nullptr) 127 return; 128 129 // TODO: Recurse into the fields to see if they have excess padding. 130 visitRecord(RT->getDecl(), Elts); 131 } 132 133 bool shouldSkipDecl(const RecordDecl *RD) const { 134 // TODO: Figure out why we are going through declarations and not only 135 // definitions. 136 if (!(RD = RD->getDefinition())) 137 return true; 138 auto Location = RD->getLocation(); 139 // If the construct doesn't have a source file, then it's not something 140 // we want to diagnose. 141 if (!Location.isValid()) 142 return true; 143 SrcMgr::CharacteristicKind Kind = 144 BR->getSourceManager().getFileCharacteristic(Location); 145 // Throw out all records that come from system headers. 146 if (Kind != SrcMgr::C_User) 147 return true; 148 149 // Not going to attempt to optimize unions. 150 if (RD->isUnion()) 151 return true; 152 if (auto *CXXRD = dyn_cast<CXXRecordDecl>(RD)) { 153 // Tail padding with base classes ends up being very complicated. 154 // We will skip objects with base classes for now, unless they do not 155 // have fields. 156 // TODO: Handle more base class scenarios. 157 if (!CXXRD->field_empty() && CXXRD->getNumBases() != 0) 158 return true; 159 if (CXXRD->field_empty() && CXXRD->getNumBases() != 1) 160 return true; 161 // Virtual bases are complicated, skipping those for now. 162 if (CXXRD->getNumVBases() != 0) 163 return true; 164 // Can't layout a template, so skip it. We do still layout the 165 // instantiations though. 166 if (CXXRD->getTypeForDecl()->isDependentType()) 167 return true; 168 if (CXXRD->getTypeForDecl()->isInstantiationDependentType()) 169 return true; 170 } 171 // How do you reorder fields if you haven't got any? 172 else if (RD->field_empty()) 173 return true; 174 175 auto IsTrickyField = [](const FieldDecl *FD) -> bool { 176 // Bitfield layout is hard. 177 if (FD->isBitField()) 178 return true; 179 180 // Variable length arrays are tricky too. 181 QualType Ty = FD->getType(); 182 if (Ty->isIncompleteArrayType()) 183 return true; 184 return false; 185 }; 186 187 if (std::any_of(RD->field_begin(), RD->field_end(), IsTrickyField)) 188 return true; 189 return false; 190 } 191 192 static CharUnits calculateBaselinePad(const RecordDecl *RD, 193 const ASTContext &ASTContext, 194 const ASTRecordLayout &RL) { 195 CharUnits PaddingSum; 196 CharUnits Offset = ASTContext.toCharUnitsFromBits(RL.getFieldOffset(0)); 197 for (const FieldDecl *FD : RD->fields()) { 198 // This checker only cares about the padded size of the 199 // field, and not the data size. If the field is a record 200 // with tail padding, then we won't put that number in our 201 // total because reordering fields won't fix that problem. 202 CharUnits FieldSize = ASTContext.getTypeSizeInChars(FD->getType()); 203 auto FieldOffsetBits = RL.getFieldOffset(FD->getFieldIndex()); 204 CharUnits FieldOffset = ASTContext.toCharUnitsFromBits(FieldOffsetBits); 205 PaddingSum += (FieldOffset - Offset); 206 Offset = FieldOffset + FieldSize; 207 } 208 PaddingSum += RL.getSize() - Offset; 209 return PaddingSum; 210 } 211 212 /// Optimal padding overview: 213 /// 1. Find a close approximation to where we can place our first field. 214 /// This will usually be at offset 0. 215 /// 2. Try to find the best field that can legally be placed at the current 216 /// offset. 217 /// a. "Best" is the largest alignment that is legal, but smallest size. 218 /// This is to account for overly aligned types. 219 /// 3. If no fields can fit, pad by rounding the current offset up to the 220 /// smallest alignment requirement of our fields. Measure and track the 221 // amount of padding added. Go back to 2. 222 /// 4. Increment the current offset by the size of the chosen field. 223 /// 5. Remove the chosen field from the set of future possibilities. 224 /// 6. Go back to 2 if there are still unplaced fields. 225 /// 7. Add tail padding by rounding the current offset up to the structure 226 /// alignment. Track the amount of padding added. 227 228 static std::pair<CharUnits, SmallVector<const FieldDecl *, 20>> 229 calculateOptimalPad(const RecordDecl *RD, const ASTContext &ASTContext, 230 const ASTRecordLayout &RL) { 231 struct FieldInfo { 232 CharUnits Align; 233 CharUnits Size; 234 const FieldDecl *Field; 235 bool operator<(const FieldInfo &RHS) const { 236 // Order from small alignments to large alignments, 237 // then large sizes to small sizes. 238 // then large field indices to small field indices 239 return std::make_tuple(Align, -Size, 240 Field ? -static_cast<int>(Field->getFieldIndex()) 241 : 0) < 242 std::make_tuple( 243 RHS.Align, -RHS.Size, 244 RHS.Field ? -static_cast<int>(RHS.Field->getFieldIndex()) 245 : 0); 246 } 247 }; 248 SmallVector<FieldInfo, 20> Fields; 249 auto GatherSizesAndAlignments = [](const FieldDecl *FD) { 250 FieldInfo RetVal; 251 RetVal.Field = FD; 252 auto &Ctx = FD->getASTContext(); 253 std::tie(RetVal.Size, RetVal.Align) = 254 Ctx.getTypeInfoInChars(FD->getType()); 255 assert(llvm::isPowerOf2_64(RetVal.Align.getQuantity())); 256 if (auto Max = FD->getMaxAlignment()) 257 RetVal.Align = std::max(Ctx.toCharUnitsFromBits(Max), RetVal.Align); 258 return RetVal; 259 }; 260 std::transform(RD->field_begin(), RD->field_end(), 261 std::back_inserter(Fields), GatherSizesAndAlignments); 262 llvm::sort(Fields); 263 // This lets us skip over vptrs and non-virtual bases, 264 // so that we can just worry about the fields in our object. 265 // Note that this does cause us to miss some cases where we 266 // could pack more bytes in to a base class's tail padding. 267 CharUnits NewOffset = ASTContext.toCharUnitsFromBits(RL.getFieldOffset(0)); 268 CharUnits NewPad; 269 SmallVector<const FieldDecl *, 20> OptimalFieldsOrder; 270 while (!Fields.empty()) { 271 unsigned TrailingZeros = 272 llvm::countTrailingZeros((unsigned long long)NewOffset.getQuantity()); 273 // If NewOffset is zero, then countTrailingZeros will be 64. Shifting 274 // 64 will overflow our unsigned long long. Shifting 63 will turn 275 // our long long (and CharUnits internal type) negative. So shift 62. 276 long long CurAlignmentBits = 1ull << (std::min)(TrailingZeros, 62u); 277 CharUnits CurAlignment = CharUnits::fromQuantity(CurAlignmentBits); 278 FieldInfo InsertPoint = {CurAlignment, CharUnits::Zero(), nullptr}; 279 auto CurBegin = Fields.begin(); 280 auto CurEnd = Fields.end(); 281 282 // In the typical case, this will find the last element 283 // of the vector. We won't find a middle element unless 284 // we started on a poorly aligned address or have an overly 285 // aligned field. 286 auto Iter = std::upper_bound(CurBegin, CurEnd, InsertPoint); 287 if (Iter != CurBegin) { 288 // We found a field that we can layout with the current alignment. 289 --Iter; 290 NewOffset += Iter->Size; 291 OptimalFieldsOrder.push_back(Iter->Field); 292 Fields.erase(Iter); 293 } else { 294 // We are poorly aligned, and we need to pad in order to layout another 295 // field. Round up to at least the smallest field alignment that we 296 // currently have. 297 CharUnits NextOffset = NewOffset.alignTo(Fields[0].Align); 298 NewPad += NextOffset - NewOffset; 299 NewOffset = NextOffset; 300 } 301 } 302 // Calculate tail padding. 303 CharUnits NewSize = NewOffset.alignTo(RL.getAlignment()); 304 NewPad += NewSize - NewOffset; 305 return {NewPad, std::move(OptimalFieldsOrder)}; 306 } 307 308 void reportRecord( 309 const RecordDecl *RD, CharUnits BaselinePad, CharUnits OptimalPad, 310 const SmallVector<const FieldDecl *, 20> &OptimalFieldsOrder) const { 311 if (!PaddingBug) 312 PaddingBug = 313 llvm::make_unique<BugType>(this, "Excessive Padding", "Performance"); 314 315 SmallString<100> Buf; 316 llvm::raw_svector_ostream Os(Buf); 317 Os << "Excessive padding in '"; 318 Os << QualType::getAsString(RD->getTypeForDecl(), Qualifiers(), 319 LangOptions()) 320 << "'"; 321 322 if (auto *TSD = dyn_cast<ClassTemplateSpecializationDecl>(RD)) { 323 // TODO: make this show up better in the console output and in 324 // the HTML. Maybe just make it show up in HTML like the path 325 // diagnostics show. 326 SourceLocation ILoc = TSD->getPointOfInstantiation(); 327 if (ILoc.isValid()) 328 Os << " instantiated here: " 329 << ILoc.printToString(BR->getSourceManager()); 330 } 331 332 Os << " (" << BaselinePad.getQuantity() << " padding bytes, where " 333 << OptimalPad.getQuantity() << " is optimal). \n" 334 << "Optimal fields order: \n"; 335 for (const auto *FD : OptimalFieldsOrder) 336 Os << FD->getName() << ", \n"; 337 Os << "consider reordering the fields or adding explicit padding " 338 "members."; 339 340 PathDiagnosticLocation CELoc = 341 PathDiagnosticLocation::create(RD, BR->getSourceManager()); 342 auto Report = llvm::make_unique<BugReport>(*PaddingBug, Os.str(), CELoc); 343 Report->setDeclWithIssue(RD); 344 Report->addRange(RD->getSourceRange()); 345 BR->emitReport(std::move(Report)); 346 } 347 }; 348 } // namespace 349 350 void ento::registerPaddingChecker(CheckerManager &Mgr) { 351 Mgr.registerChecker<PaddingChecker>(); 352 } 353