xref: /llvm-project/clang/lib/AST/Randstruct.cpp (revision cfe26358e3051755961fb1f3b46328dc2c326895)
17aa8c38aSConnor Kuehl //===--- Randstruct.cpp ---------------------------------------------------===//
27aa8c38aSConnor Kuehl //
37aa8c38aSConnor Kuehl // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
47aa8c38aSConnor Kuehl // See https://llvm.org/LICENSE.txt for license information.
57aa8c38aSConnor Kuehl // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
67aa8c38aSConnor Kuehl //
77aa8c38aSConnor Kuehl //===----------------------------------------------------------------------===//
87aa8c38aSConnor Kuehl //
97aa8c38aSConnor Kuehl // This file contains the implementation for Clang's structure field layout
107aa8c38aSConnor Kuehl // randomization.
117aa8c38aSConnor Kuehl //
127aa8c38aSConnor Kuehl //===----------------------------------------------------------------------===//
137aa8c38aSConnor Kuehl 
147aa8c38aSConnor Kuehl #include "clang/AST/Randstruct.h"
157aa8c38aSConnor Kuehl #include "clang/AST/ASTContext.h"
167aa8c38aSConnor Kuehl #include "clang/AST/Attr.h"
17463790bfSBill Wendling #include "clang/AST/Decl.h"
18463790bfSBill Wendling #include "clang/AST/DeclCXX.h" // For StaticAssertDecl
197aa8c38aSConnor Kuehl #include "clang/Basic/Diagnostic.h"
207aa8c38aSConnor Kuehl #include "llvm/ADT/SmallVector.h"
217aa8c38aSConnor Kuehl 
227aa8c38aSConnor Kuehl #include <algorithm>
237aa8c38aSConnor Kuehl #include <random>
247aa8c38aSConnor Kuehl #include <set>
257aa8c38aSConnor Kuehl #include <string>
267aa8c38aSConnor Kuehl 
277aa8c38aSConnor Kuehl using clang::ASTContext;
287aa8c38aSConnor Kuehl using clang::FieldDecl;
297aa8c38aSConnor Kuehl using llvm::SmallVector;
307aa8c38aSConnor Kuehl 
317aa8c38aSConnor Kuehl namespace {
327aa8c38aSConnor Kuehl 
337aa8c38aSConnor Kuehl // FIXME: Replace this with some discovery once that mechanism exists.
347aa8c38aSConnor Kuehl enum { CACHE_LINE = 64 };
357aa8c38aSConnor Kuehl 
367aa8c38aSConnor Kuehl // The Bucket class holds the struct fields we're trying to fill to a
377aa8c38aSConnor Kuehl // cache-line.
387aa8c38aSConnor Kuehl class Bucket {
397aa8c38aSConnor Kuehl   SmallVector<FieldDecl *, 64> Fields;
407aa8c38aSConnor Kuehl   int Size = 0;
417aa8c38aSConnor Kuehl 
427aa8c38aSConnor Kuehl public:
437aa8c38aSConnor Kuehl   virtual ~Bucket() = default;
447aa8c38aSConnor Kuehl 
457aa8c38aSConnor Kuehl   SmallVector<FieldDecl *, 64> &fields() { return Fields; }
467aa8c38aSConnor Kuehl   void addField(FieldDecl *Field, int FieldSize);
477aa8c38aSConnor Kuehl   virtual bool canFit(int FieldSize) const {
487aa8c38aSConnor Kuehl     return Size + FieldSize <= CACHE_LINE;
497aa8c38aSConnor Kuehl   }
507aa8c38aSConnor Kuehl   virtual bool isBitfieldRun() const { return false; }
517aa8c38aSConnor Kuehl   bool full() const { return Size >= CACHE_LINE; }
527aa8c38aSConnor Kuehl };
537aa8c38aSConnor Kuehl 
547aa8c38aSConnor Kuehl void Bucket::addField(FieldDecl *Field, int FieldSize) {
557aa8c38aSConnor Kuehl   Size += FieldSize;
567aa8c38aSConnor Kuehl   Fields.push_back(Field);
577aa8c38aSConnor Kuehl }
587aa8c38aSConnor Kuehl 
597aa8c38aSConnor Kuehl struct BitfieldRunBucket : public Bucket {
607aa8c38aSConnor Kuehl   bool canFit(int FieldSize) const override { return true; }
617aa8c38aSConnor Kuehl   bool isBitfieldRun() const override { return true; }
627aa8c38aSConnor Kuehl };
637aa8c38aSConnor Kuehl 
647aa8c38aSConnor Kuehl void randomizeStructureLayoutImpl(const ASTContext &Context,
657aa8c38aSConnor Kuehl                                   llvm::SmallVectorImpl<FieldDecl *> &FieldsOut,
667aa8c38aSConnor Kuehl                                   std::mt19937 &RNG) {
677aa8c38aSConnor Kuehl   // All of the Buckets produced by best-effort cache-line algorithm.
687aa8c38aSConnor Kuehl   SmallVector<std::unique_ptr<Bucket>, 16> Buckets;
697aa8c38aSConnor Kuehl 
707aa8c38aSConnor Kuehl   // The current bucket of fields that we are trying to fill to a cache-line.
717aa8c38aSConnor Kuehl   std::unique_ptr<Bucket> CurrentBucket;
727aa8c38aSConnor Kuehl 
737aa8c38aSConnor Kuehl   // The current bucket containing the run of adjacent bitfields to ensure they
747aa8c38aSConnor Kuehl   // remain adjacent.
757aa8c38aSConnor Kuehl   std::unique_ptr<BitfieldRunBucket> CurrentBitfieldRun;
767aa8c38aSConnor Kuehl 
777aa8c38aSConnor Kuehl   // Tracks the number of fields that we failed to fit to the current bucket,
787aa8c38aSConnor Kuehl   // and thus still need to be added later.
797aa8c38aSConnor Kuehl   size_t Skipped = 0;
807aa8c38aSConnor Kuehl 
817aa8c38aSConnor Kuehl   while (!FieldsOut.empty()) {
827aa8c38aSConnor Kuehl     // If we've Skipped more fields than we have remaining to place, that means
837aa8c38aSConnor Kuehl     // that they can't fit in our current bucket, and we need to start a new
847aa8c38aSConnor Kuehl     // one.
857aa8c38aSConnor Kuehl     if (Skipped >= FieldsOut.size()) {
867aa8c38aSConnor Kuehl       Skipped = 0;
877aa8c38aSConnor Kuehl       Buckets.push_back(std::move(CurrentBucket));
887aa8c38aSConnor Kuehl     }
897aa8c38aSConnor Kuehl 
907aa8c38aSConnor Kuehl     // Take the first field that needs to be put in a bucket.
917aa8c38aSConnor Kuehl     auto FieldIter = FieldsOut.begin();
927aa8c38aSConnor Kuehl     FieldDecl *FD = *FieldIter;
937aa8c38aSConnor Kuehl 
94*cfe26358STimm Baeder     if (FD->isBitField() && !FD->isZeroLengthBitField()) {
957aa8c38aSConnor Kuehl       // Start a bitfield run if this is the first bitfield we have found.
967aa8c38aSConnor Kuehl       if (!CurrentBitfieldRun)
977aa8c38aSConnor Kuehl         CurrentBitfieldRun = std::make_unique<BitfieldRunBucket>();
987aa8c38aSConnor Kuehl 
997aa8c38aSConnor Kuehl       // We've placed the field, and can remove it from the "awaiting Buckets"
1007aa8c38aSConnor Kuehl       // vector called "Fields."
1017aa8c38aSConnor Kuehl       CurrentBitfieldRun->addField(FD, /*FieldSize is irrelevant here*/ 1);
1027aa8c38aSConnor Kuehl       FieldsOut.erase(FieldIter);
1037aa8c38aSConnor Kuehl       continue;
1047aa8c38aSConnor Kuehl     }
1057aa8c38aSConnor Kuehl 
1067aa8c38aSConnor Kuehl     // Else, current field is not a bitfield. If we were previously in a
1077aa8c38aSConnor Kuehl     // bitfield run, end it.
1087aa8c38aSConnor Kuehl     if (CurrentBitfieldRun)
1097aa8c38aSConnor Kuehl       Buckets.push_back(std::move(CurrentBitfieldRun));
1107aa8c38aSConnor Kuehl 
1117aa8c38aSConnor Kuehl     // If we don't have a bucket, make one.
1127aa8c38aSConnor Kuehl     if (!CurrentBucket)
1137aa8c38aSConnor Kuehl       CurrentBucket = std::make_unique<Bucket>();
1147aa8c38aSConnor Kuehl 
1157aa8c38aSConnor Kuehl     uint64_t Width = Context.getTypeInfo(FD->getType()).Width;
1167aa8c38aSConnor Kuehl     if (Width >= CACHE_LINE) {
1177aa8c38aSConnor Kuehl       std::unique_ptr<Bucket> OverSized = std::make_unique<Bucket>();
1187aa8c38aSConnor Kuehl       OverSized->addField(FD, Width);
1197aa8c38aSConnor Kuehl       FieldsOut.erase(FieldIter);
1207aa8c38aSConnor Kuehl       Buckets.push_back(std::move(OverSized));
1217aa8c38aSConnor Kuehl       continue;
1227aa8c38aSConnor Kuehl     }
1237aa8c38aSConnor Kuehl 
1247aa8c38aSConnor Kuehl     // If it fits, add it.
1257aa8c38aSConnor Kuehl     if (CurrentBucket->canFit(Width)) {
1267aa8c38aSConnor Kuehl       CurrentBucket->addField(FD, Width);
1277aa8c38aSConnor Kuehl       FieldsOut.erase(FieldIter);
1287aa8c38aSConnor Kuehl 
1297aa8c38aSConnor Kuehl       // If it's now full, tie off the bucket.
1307aa8c38aSConnor Kuehl       if (CurrentBucket->full()) {
1317aa8c38aSConnor Kuehl         Skipped = 0;
1327aa8c38aSConnor Kuehl         Buckets.push_back(std::move(CurrentBucket));
1337aa8c38aSConnor Kuehl       }
1347aa8c38aSConnor Kuehl     } else {
1357aa8c38aSConnor Kuehl       // We can't fit it in our current bucket. Move to the end for processing
1367aa8c38aSConnor Kuehl       // later.
1377aa8c38aSConnor Kuehl       ++Skipped; // Mark it skipped.
1387aa8c38aSConnor Kuehl       FieldsOut.push_back(FD);
1397aa8c38aSConnor Kuehl       FieldsOut.erase(FieldIter);
1407aa8c38aSConnor Kuehl     }
1417aa8c38aSConnor Kuehl   }
1427aa8c38aSConnor Kuehl 
1437aa8c38aSConnor Kuehl   // Done processing the fields awaiting a bucket.
1447aa8c38aSConnor Kuehl 
1457aa8c38aSConnor Kuehl   // If we were filling a bucket, tie it off.
1467aa8c38aSConnor Kuehl   if (CurrentBucket)
1477aa8c38aSConnor Kuehl     Buckets.push_back(std::move(CurrentBucket));
1487aa8c38aSConnor Kuehl 
1497aa8c38aSConnor Kuehl   // If we were processing a bitfield run bucket, tie it off.
1507aa8c38aSConnor Kuehl   if (CurrentBitfieldRun)
1517aa8c38aSConnor Kuehl     Buckets.push_back(std::move(CurrentBitfieldRun));
1527aa8c38aSConnor Kuehl 
1538dbc6b56SNico Weber   std::shuffle(std::begin(Buckets), std::end(Buckets), RNG);
1547aa8c38aSConnor Kuehl 
1557aa8c38aSConnor Kuehl   // Produce the new ordering of the elements from the Buckets.
1567aa8c38aSConnor Kuehl   SmallVector<FieldDecl *, 16> FinalOrder;
1577aa8c38aSConnor Kuehl   for (const std::unique_ptr<Bucket> &B : Buckets) {
1587aa8c38aSConnor Kuehl     llvm::SmallVectorImpl<FieldDecl *> &RandFields = B->fields();
1597aa8c38aSConnor Kuehl     if (!B->isBitfieldRun())
1608dbc6b56SNico Weber       std::shuffle(std::begin(RandFields), std::end(RandFields), RNG);
1617aa8c38aSConnor Kuehl 
1627aa8c38aSConnor Kuehl     FinalOrder.insert(FinalOrder.end(), RandFields.begin(), RandFields.end());
1637aa8c38aSConnor Kuehl   }
1647aa8c38aSConnor Kuehl 
1657aa8c38aSConnor Kuehl   FieldsOut = FinalOrder;
1667aa8c38aSConnor Kuehl }
1677aa8c38aSConnor Kuehl 
1687aa8c38aSConnor Kuehl } // anonymous namespace
1697aa8c38aSConnor Kuehl 
1707aa8c38aSConnor Kuehl namespace clang {
1717aa8c38aSConnor Kuehl namespace randstruct {
1727aa8c38aSConnor Kuehl 
173463790bfSBill Wendling bool randomizeStructureLayout(const ASTContext &Context, RecordDecl *RD,
1747aa8c38aSConnor Kuehl                               SmallVectorImpl<Decl *> &FinalOrdering) {
1757aa8c38aSConnor Kuehl   SmallVector<FieldDecl *, 64> RandomizedFields;
176463790bfSBill Wendling   SmallVector<Decl *, 8> PostRandomizedFields;
1777aa8c38aSConnor Kuehl 
1787aa8c38aSConnor Kuehl   unsigned TotalNumFields = 0;
179463790bfSBill Wendling   for (Decl *D : RD->decls()) {
1807aa8c38aSConnor Kuehl     ++TotalNumFields;
1817aa8c38aSConnor Kuehl     if (auto *FD = dyn_cast<FieldDecl>(D))
1827aa8c38aSConnor Kuehl       RandomizedFields.push_back(FD);
183463790bfSBill Wendling     else if (isa<StaticAssertDecl>(D) || isa<IndirectFieldDecl>(D))
184463790bfSBill Wendling       PostRandomizedFields.push_back(D);
1857aa8c38aSConnor Kuehl     else
1867aa8c38aSConnor Kuehl       FinalOrdering.push_back(D);
1877aa8c38aSConnor Kuehl   }
1887aa8c38aSConnor Kuehl 
1897aa8c38aSConnor Kuehl   if (RandomizedFields.empty())
1907aa8c38aSConnor Kuehl     return false;
1917aa8c38aSConnor Kuehl 
192463790bfSBill Wendling   // Struct might end with a flexible array or an array of size 0 or 1,
1937aa8c38aSConnor Kuehl   // in which case we don't want to randomize it.
194463790bfSBill Wendling   FieldDecl *FlexibleArray =
195463790bfSBill Wendling       RD->hasFlexibleArrayMember() ? RandomizedFields.pop_back_val() : nullptr;
196463790bfSBill Wendling   if (!FlexibleArray) {
197463790bfSBill Wendling     if (const auto *CA =
198463790bfSBill Wendling             dyn_cast<ConstantArrayType>(RandomizedFields.back()->getType()))
199463790bfSBill Wendling       if (CA->getSize().sle(2))
200463790bfSBill Wendling         FlexibleArray = RandomizedFields.pop_back_val();
2017aa8c38aSConnor Kuehl   }
2027aa8c38aSConnor Kuehl 
203463790bfSBill Wendling   std::string Seed =
204463790bfSBill Wendling       Context.getLangOpts().RandstructSeed + RD->getNameAsString();
2057aa8c38aSConnor Kuehl   std::seed_seq SeedSeq(Seed.begin(), Seed.end());
2067aa8c38aSConnor Kuehl   std::mt19937 RNG(SeedSeq);
2077aa8c38aSConnor Kuehl 
2087aa8c38aSConnor Kuehl   randomizeStructureLayoutImpl(Context, RandomizedFields, RNG);
2097aa8c38aSConnor Kuehl 
210463790bfSBill Wendling   // Plorp the randomized decls into the final ordering.
2117aa8c38aSConnor Kuehl   FinalOrdering.insert(FinalOrdering.end(), RandomizedFields.begin(),
2127aa8c38aSConnor Kuehl                        RandomizedFields.end());
2137aa8c38aSConnor Kuehl 
214463790bfSBill Wendling   // Add fields that belong towards the end of the RecordDecl.
215463790bfSBill Wendling   FinalOrdering.insert(FinalOrdering.end(), PostRandomizedFields.begin(),
216463790bfSBill Wendling                        PostRandomizedFields.end());
217463790bfSBill Wendling 
218463790bfSBill Wendling   // Add back the flexible array.
219463790bfSBill Wendling   if (FlexibleArray)
220463790bfSBill Wendling     FinalOrdering.push_back(FlexibleArray);
221463790bfSBill Wendling 
2227aa8c38aSConnor Kuehl   assert(TotalNumFields == FinalOrdering.size() &&
2237aa8c38aSConnor Kuehl          "Decl count has been altered after Randstruct randomization!");
224c98b601bSFangrui Song   (void)TotalNumFields;
2257aa8c38aSConnor Kuehl   return true;
2267aa8c38aSConnor Kuehl }
2277aa8c38aSConnor Kuehl 
2287aa8c38aSConnor Kuehl } // end namespace randstruct
2297aa8c38aSConnor Kuehl } // end namespace clang
230