xref: /openbsd-src/gnu/llvm/clang/lib/AST/Randstruct.cpp (revision 12c855180aad702bbcca06e0398d774beeafb155)
1*12c85518Srobert //===--- Randstruct.cpp ---------------------------------------------------===//
2*12c85518Srobert //
3*12c85518Srobert // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4*12c85518Srobert // See https://llvm.org/LICENSE.txt for license information.
5*12c85518Srobert // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6*12c85518Srobert //
7*12c85518Srobert //===----------------------------------------------------------------------===//
8*12c85518Srobert //
9*12c85518Srobert // This file contains the implementation for Clang's structure field layout
10*12c85518Srobert // randomization.
11*12c85518Srobert //
12*12c85518Srobert //===----------------------------------------------------------------------===//
13*12c85518Srobert 
14*12c85518Srobert #include "clang/AST/Randstruct.h"
15*12c85518Srobert #include "clang/AST/ASTContext.h"
16*12c85518Srobert #include "clang/AST/ASTDiagnostic.h"
17*12c85518Srobert #include "clang/AST/Attr.h"
18*12c85518Srobert #include "clang/AST/Decl.h"
19*12c85518Srobert #include "clang/AST/DeclCXX.h" // For StaticAssertDecl
20*12c85518Srobert #include "clang/Basic/Diagnostic.h"
21*12c85518Srobert #include "llvm/ADT/SmallVector.h"
22*12c85518Srobert 
23*12c85518Srobert #include <algorithm>
24*12c85518Srobert #include <random>
25*12c85518Srobert #include <set>
26*12c85518Srobert #include <sstream>
27*12c85518Srobert #include <string>
28*12c85518Srobert 
29*12c85518Srobert using clang::ASTContext;
30*12c85518Srobert using clang::FieldDecl;
31*12c85518Srobert using llvm::SmallVector;
32*12c85518Srobert 
33*12c85518Srobert namespace {
34*12c85518Srobert 
35*12c85518Srobert // FIXME: Replace this with some discovery once that mechanism exists.
36*12c85518Srobert enum { CACHE_LINE = 64 };
37*12c85518Srobert 
38*12c85518Srobert // The Bucket class holds the struct fields we're trying to fill to a
39*12c85518Srobert // cache-line.
40*12c85518Srobert class Bucket {
41*12c85518Srobert   SmallVector<FieldDecl *, 64> Fields;
42*12c85518Srobert   int Size = 0;
43*12c85518Srobert 
44*12c85518Srobert public:
45*12c85518Srobert   virtual ~Bucket() = default;
46*12c85518Srobert 
fields()47*12c85518Srobert   SmallVector<FieldDecl *, 64> &fields() { return Fields; }
48*12c85518Srobert   void addField(FieldDecl *Field, int FieldSize);
canFit(int FieldSize) const49*12c85518Srobert   virtual bool canFit(int FieldSize) const {
50*12c85518Srobert     return Size + FieldSize <= CACHE_LINE;
51*12c85518Srobert   }
isBitfieldRun() const52*12c85518Srobert   virtual bool isBitfieldRun() const { return false; }
full() const53*12c85518Srobert   bool full() const { return Size >= CACHE_LINE; }
54*12c85518Srobert };
55*12c85518Srobert 
addField(FieldDecl * Field,int FieldSize)56*12c85518Srobert void Bucket::addField(FieldDecl *Field, int FieldSize) {
57*12c85518Srobert   Size += FieldSize;
58*12c85518Srobert   Fields.push_back(Field);
59*12c85518Srobert }
60*12c85518Srobert 
61*12c85518Srobert struct BitfieldRunBucket : public Bucket {
canFit__anonb1a997f20111::BitfieldRunBucket62*12c85518Srobert   bool canFit(int FieldSize) const override { return true; }
isBitfieldRun__anonb1a997f20111::BitfieldRunBucket63*12c85518Srobert   bool isBitfieldRun() const override { return true; }
64*12c85518Srobert };
65*12c85518Srobert 
randomizeStructureLayoutImpl(const ASTContext & Context,llvm::SmallVectorImpl<FieldDecl * > & FieldsOut,std::mt19937 & RNG)66*12c85518Srobert void randomizeStructureLayoutImpl(const ASTContext &Context,
67*12c85518Srobert                                   llvm::SmallVectorImpl<FieldDecl *> &FieldsOut,
68*12c85518Srobert                                   std::mt19937 &RNG) {
69*12c85518Srobert   // All of the Buckets produced by best-effort cache-line algorithm.
70*12c85518Srobert   SmallVector<std::unique_ptr<Bucket>, 16> Buckets;
71*12c85518Srobert 
72*12c85518Srobert   // The current bucket of fields that we are trying to fill to a cache-line.
73*12c85518Srobert   std::unique_ptr<Bucket> CurrentBucket;
74*12c85518Srobert 
75*12c85518Srobert   // The current bucket containing the run of adjacent bitfields to ensure they
76*12c85518Srobert   // remain adjacent.
77*12c85518Srobert   std::unique_ptr<BitfieldRunBucket> CurrentBitfieldRun;
78*12c85518Srobert 
79*12c85518Srobert   // Tracks the number of fields that we failed to fit to the current bucket,
80*12c85518Srobert   // and thus still need to be added later.
81*12c85518Srobert   size_t Skipped = 0;
82*12c85518Srobert 
83*12c85518Srobert   while (!FieldsOut.empty()) {
84*12c85518Srobert     // If we've Skipped more fields than we have remaining to place, that means
85*12c85518Srobert     // that they can't fit in our current bucket, and we need to start a new
86*12c85518Srobert     // one.
87*12c85518Srobert     if (Skipped >= FieldsOut.size()) {
88*12c85518Srobert       Skipped = 0;
89*12c85518Srobert       Buckets.push_back(std::move(CurrentBucket));
90*12c85518Srobert     }
91*12c85518Srobert 
92*12c85518Srobert     // Take the first field that needs to be put in a bucket.
93*12c85518Srobert     auto FieldIter = FieldsOut.begin();
94*12c85518Srobert     FieldDecl *FD = *FieldIter;
95*12c85518Srobert 
96*12c85518Srobert     if (FD->isBitField() && !FD->isZeroLengthBitField(Context)) {
97*12c85518Srobert       // Start a bitfield run if this is the first bitfield we have found.
98*12c85518Srobert       if (!CurrentBitfieldRun)
99*12c85518Srobert         CurrentBitfieldRun = std::make_unique<BitfieldRunBucket>();
100*12c85518Srobert 
101*12c85518Srobert       // We've placed the field, and can remove it from the "awaiting Buckets"
102*12c85518Srobert       // vector called "Fields."
103*12c85518Srobert       CurrentBitfieldRun->addField(FD, /*FieldSize is irrelevant here*/ 1);
104*12c85518Srobert       FieldsOut.erase(FieldIter);
105*12c85518Srobert       continue;
106*12c85518Srobert     }
107*12c85518Srobert 
108*12c85518Srobert     // Else, current field is not a bitfield. If we were previously in a
109*12c85518Srobert     // bitfield run, end it.
110*12c85518Srobert     if (CurrentBitfieldRun)
111*12c85518Srobert       Buckets.push_back(std::move(CurrentBitfieldRun));
112*12c85518Srobert 
113*12c85518Srobert     // If we don't have a bucket, make one.
114*12c85518Srobert     if (!CurrentBucket)
115*12c85518Srobert       CurrentBucket = std::make_unique<Bucket>();
116*12c85518Srobert 
117*12c85518Srobert     uint64_t Width = Context.getTypeInfo(FD->getType()).Width;
118*12c85518Srobert     if (Width >= CACHE_LINE) {
119*12c85518Srobert       std::unique_ptr<Bucket> OverSized = std::make_unique<Bucket>();
120*12c85518Srobert       OverSized->addField(FD, Width);
121*12c85518Srobert       FieldsOut.erase(FieldIter);
122*12c85518Srobert       Buckets.push_back(std::move(OverSized));
123*12c85518Srobert       continue;
124*12c85518Srobert     }
125*12c85518Srobert 
126*12c85518Srobert     // If it fits, add it.
127*12c85518Srobert     if (CurrentBucket->canFit(Width)) {
128*12c85518Srobert       CurrentBucket->addField(FD, Width);
129*12c85518Srobert       FieldsOut.erase(FieldIter);
130*12c85518Srobert 
131*12c85518Srobert       // If it's now full, tie off the bucket.
132*12c85518Srobert       if (CurrentBucket->full()) {
133*12c85518Srobert         Skipped = 0;
134*12c85518Srobert         Buckets.push_back(std::move(CurrentBucket));
135*12c85518Srobert       }
136*12c85518Srobert     } else {
137*12c85518Srobert       // We can't fit it in our current bucket. Move to the end for processing
138*12c85518Srobert       // later.
139*12c85518Srobert       ++Skipped; // Mark it skipped.
140*12c85518Srobert       FieldsOut.push_back(FD);
141*12c85518Srobert       FieldsOut.erase(FieldIter);
142*12c85518Srobert     }
143*12c85518Srobert   }
144*12c85518Srobert 
145*12c85518Srobert   // Done processing the fields awaiting a bucket.
146*12c85518Srobert 
147*12c85518Srobert   // If we were filling a bucket, tie it off.
148*12c85518Srobert   if (CurrentBucket)
149*12c85518Srobert     Buckets.push_back(std::move(CurrentBucket));
150*12c85518Srobert 
151*12c85518Srobert   // If we were processing a bitfield run bucket, tie it off.
152*12c85518Srobert   if (CurrentBitfieldRun)
153*12c85518Srobert     Buckets.push_back(std::move(CurrentBitfieldRun));
154*12c85518Srobert 
155*12c85518Srobert   std::shuffle(std::begin(Buckets), std::end(Buckets), RNG);
156*12c85518Srobert 
157*12c85518Srobert   // Produce the new ordering of the elements from the Buckets.
158*12c85518Srobert   SmallVector<FieldDecl *, 16> FinalOrder;
159*12c85518Srobert   for (const std::unique_ptr<Bucket> &B : Buckets) {
160*12c85518Srobert     llvm::SmallVectorImpl<FieldDecl *> &RandFields = B->fields();
161*12c85518Srobert     if (!B->isBitfieldRun())
162*12c85518Srobert       std::shuffle(std::begin(RandFields), std::end(RandFields), RNG);
163*12c85518Srobert 
164*12c85518Srobert     FinalOrder.insert(FinalOrder.end(), RandFields.begin(), RandFields.end());
165*12c85518Srobert   }
166*12c85518Srobert 
167*12c85518Srobert   FieldsOut = FinalOrder;
168*12c85518Srobert }
169*12c85518Srobert 
170*12c85518Srobert } // anonymous namespace
171*12c85518Srobert 
172*12c85518Srobert namespace clang {
173*12c85518Srobert namespace randstruct {
174*12c85518Srobert 
randomizeStructureLayout(const ASTContext & Context,RecordDecl * RD,SmallVectorImpl<Decl * > & FinalOrdering)175*12c85518Srobert bool randomizeStructureLayout(const ASTContext &Context, RecordDecl *RD,
176*12c85518Srobert                               SmallVectorImpl<Decl *> &FinalOrdering) {
177*12c85518Srobert   SmallVector<FieldDecl *, 64> RandomizedFields;
178*12c85518Srobert   SmallVector<Decl *, 8> PostRandomizedFields;
179*12c85518Srobert 
180*12c85518Srobert   unsigned TotalNumFields = 0;
181*12c85518Srobert   for (Decl *D : RD->decls()) {
182*12c85518Srobert     ++TotalNumFields;
183*12c85518Srobert     if (auto *FD = dyn_cast<FieldDecl>(D))
184*12c85518Srobert       RandomizedFields.push_back(FD);
185*12c85518Srobert     else if (isa<StaticAssertDecl>(D) || isa<IndirectFieldDecl>(D))
186*12c85518Srobert       PostRandomizedFields.push_back(D);
187*12c85518Srobert     else
188*12c85518Srobert       FinalOrdering.push_back(D);
189*12c85518Srobert   }
190*12c85518Srobert 
191*12c85518Srobert   if (RandomizedFields.empty())
192*12c85518Srobert     return false;
193*12c85518Srobert 
194*12c85518Srobert   // Struct might end with a flexible array or an array of size 0 or 1,
195*12c85518Srobert   // in which case we don't want to randomize it.
196*12c85518Srobert   FieldDecl *FlexibleArray =
197*12c85518Srobert       RD->hasFlexibleArrayMember() ? RandomizedFields.pop_back_val() : nullptr;
198*12c85518Srobert   if (!FlexibleArray) {
199*12c85518Srobert     if (const auto *CA =
200*12c85518Srobert             dyn_cast<ConstantArrayType>(RandomizedFields.back()->getType()))
201*12c85518Srobert       if (CA->getSize().sle(2))
202*12c85518Srobert         FlexibleArray = RandomizedFields.pop_back_val();
203*12c85518Srobert   }
204*12c85518Srobert 
205*12c85518Srobert   std::string Seed =
206*12c85518Srobert       Context.getLangOpts().RandstructSeed + RD->getNameAsString();
207*12c85518Srobert   std::seed_seq SeedSeq(Seed.begin(), Seed.end());
208*12c85518Srobert   std::mt19937 RNG(SeedSeq);
209*12c85518Srobert 
210*12c85518Srobert   randomizeStructureLayoutImpl(Context, RandomizedFields, RNG);
211*12c85518Srobert 
212*12c85518Srobert   // Plorp the randomized decls into the final ordering.
213*12c85518Srobert   FinalOrdering.insert(FinalOrdering.end(), RandomizedFields.begin(),
214*12c85518Srobert                        RandomizedFields.end());
215*12c85518Srobert 
216*12c85518Srobert   // Add fields that belong towards the end of the RecordDecl.
217*12c85518Srobert   FinalOrdering.insert(FinalOrdering.end(), PostRandomizedFields.begin(),
218*12c85518Srobert                        PostRandomizedFields.end());
219*12c85518Srobert 
220*12c85518Srobert   // Add back the flexible array.
221*12c85518Srobert   if (FlexibleArray)
222*12c85518Srobert     FinalOrdering.push_back(FlexibleArray);
223*12c85518Srobert 
224*12c85518Srobert   assert(TotalNumFields == FinalOrdering.size() &&
225*12c85518Srobert          "Decl count has been altered after Randstruct randomization!");
226*12c85518Srobert   (void)TotalNumFields;
227*12c85518Srobert   return true;
228*12c85518Srobert }
229*12c85518Srobert 
230*12c85518Srobert } // end namespace randstruct
231*12c85518Srobert } // end namespace clang
232