1 //=======- PaddingChecker.cpp ------------------------------------*- C++ -*-==// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines a checker that checks for padding that could be 11 // removed by re-ordering members. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #include "ClangSACheckers.h" 16 #include "clang/AST/CharUnits.h" 17 #include "clang/AST/DeclTemplate.h" 18 #include "clang/AST/RecordLayout.h" 19 #include "clang/AST/RecursiveASTVisitor.h" 20 #include "clang/StaticAnalyzer/Core/BugReporter/BugReporter.h" 21 #include "clang/StaticAnalyzer/Core/BugReporter/BugType.h" 22 #include "clang/StaticAnalyzer/Core/Checker.h" 23 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 24 #include "llvm/ADT/SmallString.h" 25 #include "llvm/Support/MathExtras.h" 26 #include "llvm/Support/raw_ostream.h" 27 #include <numeric> 28 29 using namespace clang; 30 using namespace ento; 31 32 namespace { 33 class PaddingChecker : public Checker<check::ASTDecl<TranslationUnitDecl>> { 34 private: 35 mutable std::unique_ptr<BugType> PaddingBug; 36 mutable int64_t AllowedPad; 37 mutable BugReporter *BR; 38 39 public: 40 void checkASTDecl(const TranslationUnitDecl *TUD, AnalysisManager &MGR, 41 BugReporter &BRArg) const { 42 BR = &BRArg; 43 AllowedPad = 44 MGR.getAnalyzerOptions().getOptionAsInteger("AllowedPad", 24, this); 45 assert(AllowedPad >= 0 && "AllowedPad option should be non-negative"); 46 47 // The calls to checkAST* from AnalysisConsumer don't 48 // visit template instantiations or lambda classes. We 49 // want to visit those, so we make our own RecursiveASTVisitor. 50 struct LocalVisitor : public RecursiveASTVisitor<LocalVisitor> { 51 const PaddingChecker *Checker; 52 bool shouldVisitTemplateInstantiations() const { return true; } 53 bool shouldVisitImplicitCode() const { return true; } 54 explicit LocalVisitor(const PaddingChecker *Checker) : Checker(Checker) {} 55 bool VisitRecordDecl(const RecordDecl *RD) { 56 Checker->visitRecord(RD); 57 return true; 58 } 59 bool VisitVarDecl(const VarDecl *VD) { 60 Checker->visitVariable(VD); 61 return true; 62 } 63 // TODO: Visit array new and mallocs for arrays. 64 }; 65 66 LocalVisitor visitor(this); 67 visitor.TraverseDecl(const_cast<TranslationUnitDecl *>(TUD)); 68 } 69 70 /// Look for records of overly padded types. If padding * 71 /// PadMultiplier exceeds AllowedPad, then generate a report. 72 /// PadMultiplier is used to share code with the array padding 73 /// checker. 74 void visitRecord(const RecordDecl *RD, uint64_t PadMultiplier = 1) const { 75 if (shouldSkipDecl(RD)) 76 return; 77 78 auto &ASTContext = RD->getASTContext(); 79 const ASTRecordLayout &RL = ASTContext.getASTRecordLayout(RD); 80 assert(llvm::isPowerOf2_64(RL.getAlignment().getQuantity())); 81 82 CharUnits BaselinePad = calculateBaselinePad(RD, ASTContext, RL); 83 if (BaselinePad.isZero()) 84 return; 85 86 CharUnits OptimalPad; 87 SmallVector<const FieldDecl *, 20> OptimalFieldsOrder; 88 std::tie(OptimalPad, OptimalFieldsOrder) = 89 calculateOptimalPad(RD, ASTContext, RL); 90 91 CharUnits DiffPad = PadMultiplier * (BaselinePad - OptimalPad); 92 if (DiffPad.getQuantity() <= AllowedPad) { 93 assert(!DiffPad.isNegative() && "DiffPad should not be negative"); 94 // There is not enough excess padding to trigger a warning. 95 return; 96 } 97 reportRecord(RD, BaselinePad, OptimalPad, OptimalFieldsOrder); 98 } 99 100 /// Look for arrays of overly padded types. If the padding of the 101 /// array type exceeds AllowedPad, then generate a report. 102 void visitVariable(const VarDecl *VD) const { 103 const ArrayType *ArrTy = VD->getType()->getAsArrayTypeUnsafe(); 104 if (ArrTy == nullptr) 105 return; 106 uint64_t Elts = 0; 107 if (const ConstantArrayType *CArrTy = dyn_cast<ConstantArrayType>(ArrTy)) 108 Elts = CArrTy->getSize().getZExtValue(); 109 if (Elts == 0) 110 return; 111 const RecordType *RT = ArrTy->getElementType()->getAs<RecordType>(); 112 if (RT == nullptr) 113 return; 114 115 // TODO: Recurse into the fields and base classes to see if any 116 // of those have excess padding. 117 visitRecord(RT->getDecl(), Elts); 118 } 119 120 bool shouldSkipDecl(const RecordDecl *RD) const { 121 auto Location = RD->getLocation(); 122 // If the construct doesn't have a source file, then it's not something 123 // we want to diagnose. 124 if (!Location.isValid()) 125 return true; 126 SrcMgr::CharacteristicKind Kind = 127 BR->getSourceManager().getFileCharacteristic(Location); 128 // Throw out all records that come from system headers. 129 if (Kind != SrcMgr::C_User) 130 return true; 131 132 // Not going to attempt to optimize unions. 133 if (RD->isUnion()) 134 return true; 135 // How do you reorder fields if you haven't got any? 136 if (RD->field_empty()) 137 return true; 138 if (auto *CXXRD = dyn_cast<CXXRecordDecl>(RD)) { 139 // Tail padding with base classes ends up being very complicated. 140 // We will skip objects with base classes for now. 141 if (CXXRD->getNumBases() != 0) 142 return true; 143 // Virtual bases are complicated, skipping those for now. 144 if (CXXRD->getNumVBases() != 0) 145 return true; 146 // Can't layout a template, so skip it. We do still layout the 147 // instantiations though. 148 if (CXXRD->getTypeForDecl()->isDependentType()) 149 return true; 150 if (CXXRD->getTypeForDecl()->isInstantiationDependentType()) 151 return true; 152 } 153 auto IsTrickyField = [](const FieldDecl *FD) -> bool { 154 // Bitfield layout is hard. 155 if (FD->isBitField()) 156 return true; 157 158 // Variable length arrays are tricky too. 159 QualType Ty = FD->getType(); 160 if (Ty->isIncompleteArrayType()) 161 return true; 162 return false; 163 }; 164 165 if (std::any_of(RD->field_begin(), RD->field_end(), IsTrickyField)) 166 return true; 167 return false; 168 } 169 170 static CharUnits calculateBaselinePad(const RecordDecl *RD, 171 const ASTContext &ASTContext, 172 const ASTRecordLayout &RL) { 173 CharUnits PaddingSum; 174 CharUnits Offset = ASTContext.toCharUnitsFromBits(RL.getFieldOffset(0)); 175 for (const FieldDecl *FD : RD->fields()) { 176 // This checker only cares about the padded size of the 177 // field, and not the data size. If the field is a record 178 // with tail padding, then we won't put that number in our 179 // total because reordering fields won't fix that problem. 180 CharUnits FieldSize = ASTContext.getTypeSizeInChars(FD->getType()); 181 auto FieldOffsetBits = RL.getFieldOffset(FD->getFieldIndex()); 182 CharUnits FieldOffset = ASTContext.toCharUnitsFromBits(FieldOffsetBits); 183 PaddingSum += (FieldOffset - Offset); 184 Offset = FieldOffset + FieldSize; 185 } 186 PaddingSum += RL.getSize() - Offset; 187 return PaddingSum; 188 } 189 190 /// Optimal padding overview: 191 /// 1. Find a close approximation to where we can place our first field. 192 /// This will usually be at offset 0. 193 /// 2. Try to find the best field that can legally be placed at the current 194 /// offset. 195 /// a. "Best" is the largest alignment that is legal, but smallest size. 196 /// This is to account for overly aligned types. 197 /// 3. If no fields can fit, pad by rounding the current offset up to the 198 /// smallest alignment requirement of our fields. Measure and track the 199 // amount of padding added. Go back to 2. 200 /// 4. Increment the current offset by the size of the chosen field. 201 /// 5. Remove the chosen field from the set of future possibilities. 202 /// 6. Go back to 2 if there are still unplaced fields. 203 /// 7. Add tail padding by rounding the current offset up to the structure 204 /// alignment. Track the amount of padding added. 205 206 static std::pair<CharUnits, SmallVector<const FieldDecl *, 20>> 207 calculateOptimalPad(const RecordDecl *RD, const ASTContext &ASTContext, 208 const ASTRecordLayout &RL) { 209 struct FieldInfo { 210 CharUnits Align; 211 CharUnits Size; 212 const FieldDecl *Field; 213 bool operator<(const FieldInfo &RHS) const { 214 // Order from small alignments to large alignments, 215 // then large sizes to small sizes. 216 // then large field indices to small field indices 217 return std::make_tuple(Align, -Size, 218 Field ? -static_cast<int>(Field->getFieldIndex()) 219 : 0) < 220 std::make_tuple( 221 RHS.Align, -RHS.Size, 222 RHS.Field ? -static_cast<int>(RHS.Field->getFieldIndex()) 223 : 0); 224 } 225 }; 226 SmallVector<FieldInfo, 20> Fields; 227 auto GatherSizesAndAlignments = [](const FieldDecl *FD) { 228 FieldInfo RetVal; 229 RetVal.Field = FD; 230 auto &Ctx = FD->getASTContext(); 231 std::tie(RetVal.Size, RetVal.Align) = 232 Ctx.getTypeInfoInChars(FD->getType()); 233 assert(llvm::isPowerOf2_64(RetVal.Align.getQuantity())); 234 if (auto Max = FD->getMaxAlignment()) 235 RetVal.Align = std::max(Ctx.toCharUnitsFromBits(Max), RetVal.Align); 236 return RetVal; 237 }; 238 std::transform(RD->field_begin(), RD->field_end(), 239 std::back_inserter(Fields), GatherSizesAndAlignments); 240 llvm::sort(Fields); 241 // This lets us skip over vptrs and non-virtual bases, 242 // so that we can just worry about the fields in our object. 243 // Note that this does cause us to miss some cases where we 244 // could pack more bytes in to a base class's tail padding. 245 CharUnits NewOffset = ASTContext.toCharUnitsFromBits(RL.getFieldOffset(0)); 246 CharUnits NewPad; 247 SmallVector<const FieldDecl *, 20> OptimalFieldsOrder; 248 while (!Fields.empty()) { 249 unsigned TrailingZeros = 250 llvm::countTrailingZeros((unsigned long long)NewOffset.getQuantity()); 251 // If NewOffset is zero, then countTrailingZeros will be 64. Shifting 252 // 64 will overflow our unsigned long long. Shifting 63 will turn 253 // our long long (and CharUnits internal type) negative. So shift 62. 254 long long CurAlignmentBits = 1ull << (std::min)(TrailingZeros, 62u); 255 CharUnits CurAlignment = CharUnits::fromQuantity(CurAlignmentBits); 256 FieldInfo InsertPoint = {CurAlignment, CharUnits::Zero(), nullptr}; 257 auto CurBegin = Fields.begin(); 258 auto CurEnd = Fields.end(); 259 260 // In the typical case, this will find the last element 261 // of the vector. We won't find a middle element unless 262 // we started on a poorly aligned address or have an overly 263 // aligned field. 264 auto Iter = std::upper_bound(CurBegin, CurEnd, InsertPoint); 265 if (Iter != CurBegin) { 266 // We found a field that we can layout with the current alignment. 267 --Iter; 268 NewOffset += Iter->Size; 269 OptimalFieldsOrder.push_back(Iter->Field); 270 Fields.erase(Iter); 271 } else { 272 // We are poorly aligned, and we need to pad in order to layout another 273 // field. Round up to at least the smallest field alignment that we 274 // currently have. 275 CharUnits NextOffset = NewOffset.alignTo(Fields[0].Align); 276 NewPad += NextOffset - NewOffset; 277 NewOffset = NextOffset; 278 } 279 } 280 // Calculate tail padding. 281 CharUnits NewSize = NewOffset.alignTo(RL.getAlignment()); 282 NewPad += NewSize - NewOffset; 283 return {NewPad, std::move(OptimalFieldsOrder)}; 284 } 285 286 void reportRecord( 287 const RecordDecl *RD, CharUnits BaselinePad, CharUnits OptimalPad, 288 const SmallVector<const FieldDecl *, 20> &OptimalFieldsOrder) const { 289 if (!PaddingBug) 290 PaddingBug = 291 llvm::make_unique<BugType>(this, "Excessive Padding", "Performance"); 292 293 SmallString<100> Buf; 294 llvm::raw_svector_ostream Os(Buf); 295 Os << "Excessive padding in '"; 296 Os << QualType::getAsString(RD->getTypeForDecl(), Qualifiers(), 297 LangOptions()) 298 << "'"; 299 300 if (auto *TSD = dyn_cast<ClassTemplateSpecializationDecl>(RD)) { 301 // TODO: make this show up better in the console output and in 302 // the HTML. Maybe just make it show up in HTML like the path 303 // diagnostics show. 304 SourceLocation ILoc = TSD->getPointOfInstantiation(); 305 if (ILoc.isValid()) 306 Os << " instantiated here: " 307 << ILoc.printToString(BR->getSourceManager()); 308 } 309 310 Os << " (" << BaselinePad.getQuantity() << " padding bytes, where " 311 << OptimalPad.getQuantity() << " is optimal). \n" 312 << "Optimal fields order: \n"; 313 for (const auto *FD : OptimalFieldsOrder) 314 Os << FD->getName() << ", \n"; 315 Os << "consider reordering the fields or adding explicit padding " 316 "members."; 317 318 PathDiagnosticLocation CELoc = 319 PathDiagnosticLocation::create(RD, BR->getSourceManager()); 320 auto Report = llvm::make_unique<BugReport>(*PaddingBug, Os.str(), CELoc); 321 Report->setDeclWithIssue(RD); 322 Report->addRange(RD->getSourceRange()); 323 BR->emitReport(std::move(Report)); 324 } 325 }; 326 } 327 328 void ento::registerPaddingChecker(CheckerManager &Mgr) { 329 Mgr.registerChecker<PaddingChecker>(); 330 } 331