1 //=======- PaddingChecker.cpp ------------------------------------*- C++ -*-==// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file defines a checker that checks for padding that could be 10 // removed by re-ordering members. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "clang/AST/CharUnits.h" 15 #include "clang/AST/DeclTemplate.h" 16 #include "clang/AST/DynamicRecursiveASTVisitor.h" 17 #include "clang/AST/RecordLayout.h" 18 #include "clang/Driver/DriverDiagnostic.h" 19 #include "clang/StaticAnalyzer/Checkers/BuiltinCheckerRegistration.h" 20 #include "clang/StaticAnalyzer/Core/BugReporter/BugReporter.h" 21 #include "clang/StaticAnalyzer/Core/BugReporter/BugType.h" 22 #include "clang/StaticAnalyzer/Core/Checker.h" 23 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 24 #include "llvm/ADT/SmallString.h" 25 #include "llvm/Support/MathExtras.h" 26 #include "llvm/Support/raw_ostream.h" 27 #include <numeric> 28 29 using namespace clang; 30 using namespace ento; 31 32 namespace { 33 class PaddingChecker : public Checker<check::ASTDecl<TranslationUnitDecl>> { 34 private: 35 const BugType PaddingBug{this, "Excessive Padding", "Performance"}; 36 mutable BugReporter *BR; 37 38 public: 39 int64_t AllowedPad; 40 41 void checkASTDecl(const TranslationUnitDecl *TUD, AnalysisManager &MGR, 42 BugReporter &BRArg) const { 43 BR = &BRArg; 44 45 // The calls to checkAST* from AnalysisConsumer don't 46 // visit template instantiations or lambda classes. We 47 // want to visit those, so we make our own RecursiveASTVisitor. 48 struct LocalVisitor : DynamicRecursiveASTVisitor { 49 const PaddingChecker *Checker; 50 explicit LocalVisitor(const PaddingChecker *Checker) : Checker(Checker) { 51 ShouldVisitTemplateInstantiations = true; 52 ShouldVisitImplicitCode = true; 53 } 54 bool VisitRecordDecl(RecordDecl *RD) override { 55 Checker->visitRecord(RD); 56 return true; 57 } 58 bool VisitVarDecl(VarDecl *VD) override { 59 Checker->visitVariable(VD); 60 return true; 61 } 62 // TODO: Visit array new and mallocs for arrays. 63 }; 64 65 LocalVisitor visitor(this); 66 visitor.TraverseDecl(const_cast<TranslationUnitDecl *>(TUD)); 67 } 68 69 /// Look for records of overly padded types. If padding * 70 /// PadMultiplier exceeds AllowedPad, then generate a report. 71 /// PadMultiplier is used to share code with the array padding 72 /// checker. 73 void visitRecord(const RecordDecl *RD, uint64_t PadMultiplier = 1) const { 74 if (shouldSkipDecl(RD)) 75 return; 76 77 // TODO: Figure out why we are going through declarations and not only 78 // definitions. 79 if (!(RD = RD->getDefinition())) 80 return; 81 82 // This is the simplest correct case: a class with no fields and one base 83 // class. Other cases are more complicated because of how the base classes 84 // & fields might interact, so we don't bother dealing with them. 85 // TODO: Support other combinations of base classes and fields. 86 if (auto *CXXRD = dyn_cast<CXXRecordDecl>(RD)) 87 if (CXXRD->field_empty() && CXXRD->getNumBases() == 1) 88 return visitRecord(CXXRD->bases().begin()->getType()->getAsRecordDecl(), 89 PadMultiplier); 90 91 auto &ASTContext = RD->getASTContext(); 92 const ASTRecordLayout &RL = ASTContext.getASTRecordLayout(RD); 93 assert(llvm::isPowerOf2_64(RL.getAlignment().getQuantity())); 94 95 CharUnits BaselinePad = calculateBaselinePad(RD, ASTContext, RL); 96 if (BaselinePad.isZero()) 97 return; 98 99 CharUnits OptimalPad; 100 SmallVector<const FieldDecl *, 20> OptimalFieldsOrder; 101 std::tie(OptimalPad, OptimalFieldsOrder) = 102 calculateOptimalPad(RD, ASTContext, RL); 103 104 CharUnits DiffPad = PadMultiplier * (BaselinePad - OptimalPad); 105 if (DiffPad.getQuantity() <= AllowedPad) { 106 assert(!DiffPad.isNegative() && "DiffPad should not be negative"); 107 // There is not enough excess padding to trigger a warning. 108 return; 109 } 110 reportRecord(RD, BaselinePad, OptimalPad, OptimalFieldsOrder); 111 } 112 113 /// Look for arrays of overly padded types. If the padding of the 114 /// array type exceeds AllowedPad, then generate a report. 115 void visitVariable(const VarDecl *VD) const { 116 const ArrayType *ArrTy = VD->getType()->getAsArrayTypeUnsafe(); 117 if (ArrTy == nullptr) 118 return; 119 uint64_t Elts = 0; 120 if (const ConstantArrayType *CArrTy = dyn_cast<ConstantArrayType>(ArrTy)) 121 Elts = CArrTy->getZExtSize(); 122 if (Elts == 0) 123 return; 124 const RecordType *RT = ArrTy->getElementType()->getAs<RecordType>(); 125 if (RT == nullptr) 126 return; 127 128 // TODO: Recurse into the fields to see if they have excess padding. 129 visitRecord(RT->getDecl(), Elts); 130 } 131 132 bool shouldSkipDecl(const RecordDecl *RD) const { 133 // TODO: Figure out why we are going through declarations and not only 134 // definitions. 135 if (!(RD = RD->getDefinition())) 136 return true; 137 auto Location = RD->getLocation(); 138 // If the construct doesn't have a source file, then it's not something 139 // we want to diagnose. 140 if (!Location.isValid()) 141 return true; 142 SrcMgr::CharacteristicKind Kind = 143 BR->getSourceManager().getFileCharacteristic(Location); 144 // Throw out all records that come from system headers. 145 if (Kind != SrcMgr::C_User) 146 return true; 147 148 // Not going to attempt to optimize unions. 149 if (RD->isUnion()) 150 return true; 151 if (auto *CXXRD = dyn_cast<CXXRecordDecl>(RD)) { 152 // Tail padding with base classes ends up being very complicated. 153 // We will skip objects with base classes for now, unless they do not 154 // have fields. 155 // TODO: Handle more base class scenarios. 156 if (!CXXRD->field_empty() && CXXRD->getNumBases() != 0) 157 return true; 158 if (CXXRD->field_empty() && CXXRD->getNumBases() != 1) 159 return true; 160 // Virtual bases are complicated, skipping those for now. 161 if (CXXRD->getNumVBases() != 0) 162 return true; 163 // Can't layout a template, so skip it. We do still layout the 164 // instantiations though. 165 if (CXXRD->getTypeForDecl()->isDependentType()) 166 return true; 167 if (CXXRD->getTypeForDecl()->isInstantiationDependentType()) 168 return true; 169 } 170 // How do you reorder fields if you haven't got any? 171 else if (RD->field_empty()) 172 return true; 173 174 auto IsTrickyField = [](const FieldDecl *FD) -> bool { 175 // Bitfield layout is hard. 176 if (FD->isBitField()) 177 return true; 178 179 // Variable length arrays are tricky too. 180 QualType Ty = FD->getType(); 181 if (Ty->isIncompleteArrayType()) 182 return true; 183 return false; 184 }; 185 186 if (llvm::any_of(RD->fields(), IsTrickyField)) 187 return true; 188 return false; 189 } 190 191 static CharUnits calculateBaselinePad(const RecordDecl *RD, 192 const ASTContext &ASTContext, 193 const ASTRecordLayout &RL) { 194 CharUnits PaddingSum; 195 CharUnits Offset = ASTContext.toCharUnitsFromBits(RL.getFieldOffset(0)); 196 for (const FieldDecl *FD : RD->fields()) { 197 // Skip field that is a subobject of zero size, marked with 198 // [[no_unique_address]] or an empty bitfield, because its address can be 199 // set the same as the other fields addresses. 200 if (FD->isZeroSize(ASTContext)) 201 continue; 202 // This checker only cares about the padded size of the 203 // field, and not the data size. If the field is a record 204 // with tail padding, then we won't put that number in our 205 // total because reordering fields won't fix that problem. 206 CharUnits FieldSize = ASTContext.getTypeSizeInChars(FD->getType()); 207 auto FieldOffsetBits = RL.getFieldOffset(FD->getFieldIndex()); 208 CharUnits FieldOffset = ASTContext.toCharUnitsFromBits(FieldOffsetBits); 209 PaddingSum += (FieldOffset - Offset); 210 Offset = FieldOffset + FieldSize; 211 } 212 PaddingSum += RL.getSize() - Offset; 213 return PaddingSum; 214 } 215 216 /// Optimal padding overview: 217 /// 1. Find a close approximation to where we can place our first field. 218 /// This will usually be at offset 0. 219 /// 2. Try to find the best field that can legally be placed at the current 220 /// offset. 221 /// a. "Best" is the largest alignment that is legal, but smallest size. 222 /// This is to account for overly aligned types. 223 /// 3. If no fields can fit, pad by rounding the current offset up to the 224 /// smallest alignment requirement of our fields. Measure and track the 225 // amount of padding added. Go back to 2. 226 /// 4. Increment the current offset by the size of the chosen field. 227 /// 5. Remove the chosen field from the set of future possibilities. 228 /// 6. Go back to 2 if there are still unplaced fields. 229 /// 7. Add tail padding by rounding the current offset up to the structure 230 /// alignment. Track the amount of padding added. 231 232 static std::pair<CharUnits, SmallVector<const FieldDecl *, 20>> 233 calculateOptimalPad(const RecordDecl *RD, const ASTContext &ASTContext, 234 const ASTRecordLayout &RL) { 235 struct FieldInfo { 236 CharUnits Align; 237 CharUnits Size; 238 const FieldDecl *Field; 239 bool operator<(const FieldInfo &RHS) const { 240 // Order from small alignments to large alignments, 241 // then large sizes to small sizes. 242 // then large field indices to small field indices 243 return std::make_tuple(Align, -Size, 244 Field ? -static_cast<int>(Field->getFieldIndex()) 245 : 0) < 246 std::make_tuple( 247 RHS.Align, -RHS.Size, 248 RHS.Field ? -static_cast<int>(RHS.Field->getFieldIndex()) 249 : 0); 250 } 251 }; 252 SmallVector<FieldInfo, 20> Fields; 253 auto GatherSizesAndAlignments = [](const FieldDecl *FD) { 254 FieldInfo RetVal; 255 RetVal.Field = FD; 256 auto &Ctx = FD->getASTContext(); 257 auto Info = Ctx.getTypeInfoInChars(FD->getType()); 258 RetVal.Size = FD->isZeroSize(Ctx) ? CharUnits::Zero() : Info.Width; 259 RetVal.Align = Info.Align; 260 assert(llvm::isPowerOf2_64(RetVal.Align.getQuantity())); 261 if (auto Max = FD->getMaxAlignment()) 262 RetVal.Align = std::max(Ctx.toCharUnitsFromBits(Max), RetVal.Align); 263 return RetVal; 264 }; 265 std::transform(RD->field_begin(), RD->field_end(), 266 std::back_inserter(Fields), GatherSizesAndAlignments); 267 llvm::sort(Fields); 268 // This lets us skip over vptrs and non-virtual bases, 269 // so that we can just worry about the fields in our object. 270 // Note that this does cause us to miss some cases where we 271 // could pack more bytes in to a base class's tail padding. 272 CharUnits NewOffset = ASTContext.toCharUnitsFromBits(RL.getFieldOffset(0)); 273 CharUnits NewPad; 274 SmallVector<const FieldDecl *, 20> OptimalFieldsOrder; 275 while (!Fields.empty()) { 276 unsigned TrailingZeros = 277 llvm::countr_zero((unsigned long long)NewOffset.getQuantity()); 278 // If NewOffset is zero, then countTrailingZeros will be 64. Shifting 279 // 64 will overflow our unsigned long long. Shifting 63 will turn 280 // our long long (and CharUnits internal type) negative. So shift 62. 281 long long CurAlignmentBits = 1ull << (std::min)(TrailingZeros, 62u); 282 CharUnits CurAlignment = CharUnits::fromQuantity(CurAlignmentBits); 283 FieldInfo InsertPoint = {CurAlignment, CharUnits::Zero(), nullptr}; 284 285 // In the typical case, this will find the last element 286 // of the vector. We won't find a middle element unless 287 // we started on a poorly aligned address or have an overly 288 // aligned field. 289 auto Iter = llvm::upper_bound(Fields, InsertPoint); 290 if (Iter != Fields.begin()) { 291 // We found a field that we can layout with the current alignment. 292 --Iter; 293 NewOffset += Iter->Size; 294 OptimalFieldsOrder.push_back(Iter->Field); 295 Fields.erase(Iter); 296 } else { 297 // We are poorly aligned, and we need to pad in order to layout another 298 // field. Round up to at least the smallest field alignment that we 299 // currently have. 300 CharUnits NextOffset = NewOffset.alignTo(Fields[0].Align); 301 NewPad += NextOffset - NewOffset; 302 NewOffset = NextOffset; 303 } 304 } 305 // Calculate tail padding. 306 CharUnits NewSize = NewOffset.alignTo(RL.getAlignment()); 307 NewPad += NewSize - NewOffset; 308 return {NewPad, std::move(OptimalFieldsOrder)}; 309 } 310 311 void reportRecord( 312 const RecordDecl *RD, CharUnits BaselinePad, CharUnits OptimalPad, 313 const SmallVector<const FieldDecl *, 20> &OptimalFieldsOrder) const { 314 SmallString<100> Buf; 315 llvm::raw_svector_ostream Os(Buf); 316 Os << "Excessive padding in '"; 317 Os << QualType::getAsString(RD->getTypeForDecl(), Qualifiers(), 318 LangOptions()) 319 << "'"; 320 321 if (auto *TSD = dyn_cast<ClassTemplateSpecializationDecl>(RD)) { 322 // TODO: make this show up better in the console output and in 323 // the HTML. Maybe just make it show up in HTML like the path 324 // diagnostics show. 325 SourceLocation ILoc = TSD->getPointOfInstantiation(); 326 if (ILoc.isValid()) 327 Os << " instantiated here: " 328 << ILoc.printToString(BR->getSourceManager()); 329 } 330 331 Os << " (" << BaselinePad.getQuantity() << " padding bytes, where " 332 << OptimalPad.getQuantity() << " is optimal). " 333 << "Optimal fields order: "; 334 for (const auto *FD : OptimalFieldsOrder) 335 Os << FD->getName() << ", "; 336 Os << "consider reordering the fields or adding explicit padding " 337 "members."; 338 339 PathDiagnosticLocation CELoc = 340 PathDiagnosticLocation::create(RD, BR->getSourceManager()); 341 auto Report = std::make_unique<BasicBugReport>(PaddingBug, Os.str(), CELoc); 342 Report->setDeclWithIssue(RD); 343 Report->addRange(RD->getSourceRange()); 344 BR->emitReport(std::move(Report)); 345 } 346 }; 347 } // namespace 348 349 void ento::registerPaddingChecker(CheckerManager &Mgr) { 350 auto *Checker = Mgr.registerChecker<PaddingChecker>(); 351 Checker->AllowedPad = Mgr.getAnalyzerOptions() 352 .getCheckerIntegerOption(Checker, "AllowedPad"); 353 if (Checker->AllowedPad < 0) 354 Mgr.reportInvalidCheckerOptionValue( 355 Checker, "AllowedPad", "a non-negative value"); 356 } 357 358 bool ento::shouldRegisterPaddingChecker(const CheckerManager &mgr) { 359 return true; 360 } 361