xref: /freebsd-src/contrib/llvm-project/compiler-rt/lib/xray/xray_buffer_queue.cpp (revision 68d75eff68281c1b445e3010bb975eae07aac225)
1*68d75effSDimitry Andric //===-- xray_buffer_queue.cpp ----------------------------------*- C++ -*-===//
2*68d75effSDimitry Andric //
3*68d75effSDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4*68d75effSDimitry Andric // See https://llvm.org/LICENSE.txt for license information.
5*68d75effSDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6*68d75effSDimitry Andric //
7*68d75effSDimitry Andric //===----------------------------------------------------------------------===//
8*68d75effSDimitry Andric //
9*68d75effSDimitry Andric // This file is a part of XRay, a dynamic runtime instruementation system.
10*68d75effSDimitry Andric //
11*68d75effSDimitry Andric // Defines the interface for a buffer queue implementation.
12*68d75effSDimitry Andric //
13*68d75effSDimitry Andric //===----------------------------------------------------------------------===//
14*68d75effSDimitry Andric #include "xray_buffer_queue.h"
15*68d75effSDimitry Andric #include "sanitizer_common/sanitizer_atomic.h"
16*68d75effSDimitry Andric #include "sanitizer_common/sanitizer_common.h"
17*68d75effSDimitry Andric #include "sanitizer_common/sanitizer_libc.h"
18*68d75effSDimitry Andric #if !SANITIZER_FUCHSIA
19*68d75effSDimitry Andric #include "sanitizer_common/sanitizer_posix.h"
20*68d75effSDimitry Andric #endif
21*68d75effSDimitry Andric #include "xray_allocator.h"
22*68d75effSDimitry Andric #include "xray_defs.h"
23*68d75effSDimitry Andric #include <memory>
24*68d75effSDimitry Andric #include <sys/mman.h>
25*68d75effSDimitry Andric 
26*68d75effSDimitry Andric using namespace __xray;
27*68d75effSDimitry Andric 
28*68d75effSDimitry Andric namespace {
29*68d75effSDimitry Andric 
30*68d75effSDimitry Andric BufferQueue::ControlBlock *allocControlBlock(size_t Size, size_t Count) {
31*68d75effSDimitry Andric   auto B =
32*68d75effSDimitry Andric       allocateBuffer((sizeof(BufferQueue::ControlBlock) - 1) + (Size * Count));
33*68d75effSDimitry Andric   return B == nullptr ? nullptr
34*68d75effSDimitry Andric                       : reinterpret_cast<BufferQueue::ControlBlock *>(B);
35*68d75effSDimitry Andric }
36*68d75effSDimitry Andric 
37*68d75effSDimitry Andric void deallocControlBlock(BufferQueue::ControlBlock *C, size_t Size,
38*68d75effSDimitry Andric                          size_t Count) {
39*68d75effSDimitry Andric   deallocateBuffer(reinterpret_cast<unsigned char *>(C),
40*68d75effSDimitry Andric                    (sizeof(BufferQueue::ControlBlock) - 1) + (Size * Count));
41*68d75effSDimitry Andric }
42*68d75effSDimitry Andric 
43*68d75effSDimitry Andric void decRefCount(BufferQueue::ControlBlock *C, size_t Size, size_t Count) {
44*68d75effSDimitry Andric   if (C == nullptr)
45*68d75effSDimitry Andric     return;
46*68d75effSDimitry Andric   if (atomic_fetch_sub(&C->RefCount, 1, memory_order_acq_rel) == 1)
47*68d75effSDimitry Andric     deallocControlBlock(C, Size, Count);
48*68d75effSDimitry Andric }
49*68d75effSDimitry Andric 
50*68d75effSDimitry Andric void incRefCount(BufferQueue::ControlBlock *C) {
51*68d75effSDimitry Andric   if (C == nullptr)
52*68d75effSDimitry Andric     return;
53*68d75effSDimitry Andric   atomic_fetch_add(&C->RefCount, 1, memory_order_acq_rel);
54*68d75effSDimitry Andric }
55*68d75effSDimitry Andric 
56*68d75effSDimitry Andric // We use a struct to ensure that we are allocating one atomic_uint64_t per
57*68d75effSDimitry Andric // cache line. This allows us to not worry about false-sharing among atomic
58*68d75effSDimitry Andric // objects being updated (constantly) by different threads.
59*68d75effSDimitry Andric struct ExtentsPadded {
60*68d75effSDimitry Andric   union {
61*68d75effSDimitry Andric     atomic_uint64_t Extents;
62*68d75effSDimitry Andric     unsigned char Storage[kCacheLineSize];
63*68d75effSDimitry Andric   };
64*68d75effSDimitry Andric };
65*68d75effSDimitry Andric 
66*68d75effSDimitry Andric constexpr size_t kExtentsSize = sizeof(ExtentsPadded);
67*68d75effSDimitry Andric 
68*68d75effSDimitry Andric } // namespace
69*68d75effSDimitry Andric 
70*68d75effSDimitry Andric BufferQueue::ErrorCode BufferQueue::init(size_t BS, size_t BC) {
71*68d75effSDimitry Andric   SpinMutexLock Guard(&Mutex);
72*68d75effSDimitry Andric 
73*68d75effSDimitry Andric   if (!finalizing())
74*68d75effSDimitry Andric     return BufferQueue::ErrorCode::AlreadyInitialized;
75*68d75effSDimitry Andric 
76*68d75effSDimitry Andric   cleanupBuffers();
77*68d75effSDimitry Andric 
78*68d75effSDimitry Andric   bool Success = false;
79*68d75effSDimitry Andric   BufferSize = BS;
80*68d75effSDimitry Andric   BufferCount = BC;
81*68d75effSDimitry Andric 
82*68d75effSDimitry Andric   BackingStore = allocControlBlock(BufferSize, BufferCount);
83*68d75effSDimitry Andric   if (BackingStore == nullptr)
84*68d75effSDimitry Andric     return BufferQueue::ErrorCode::NotEnoughMemory;
85*68d75effSDimitry Andric 
86*68d75effSDimitry Andric   auto CleanupBackingStore = at_scope_exit([&, this] {
87*68d75effSDimitry Andric     if (Success)
88*68d75effSDimitry Andric       return;
89*68d75effSDimitry Andric     deallocControlBlock(BackingStore, BufferSize, BufferCount);
90*68d75effSDimitry Andric     BackingStore = nullptr;
91*68d75effSDimitry Andric   });
92*68d75effSDimitry Andric 
93*68d75effSDimitry Andric   // Initialize enough atomic_uint64_t instances, each
94*68d75effSDimitry Andric   ExtentsBackingStore = allocControlBlock(kExtentsSize, BufferCount);
95*68d75effSDimitry Andric   if (ExtentsBackingStore == nullptr)
96*68d75effSDimitry Andric     return BufferQueue::ErrorCode::NotEnoughMemory;
97*68d75effSDimitry Andric 
98*68d75effSDimitry Andric   auto CleanupExtentsBackingStore = at_scope_exit([&, this] {
99*68d75effSDimitry Andric     if (Success)
100*68d75effSDimitry Andric       return;
101*68d75effSDimitry Andric     deallocControlBlock(ExtentsBackingStore, kExtentsSize, BufferCount);
102*68d75effSDimitry Andric     ExtentsBackingStore = nullptr;
103*68d75effSDimitry Andric   });
104*68d75effSDimitry Andric 
105*68d75effSDimitry Andric   Buffers = initArray<BufferRep>(BufferCount);
106*68d75effSDimitry Andric   if (Buffers == nullptr)
107*68d75effSDimitry Andric     return BufferQueue::ErrorCode::NotEnoughMemory;
108*68d75effSDimitry Andric 
109*68d75effSDimitry Andric   // At this point we increment the generation number to associate the buffers
110*68d75effSDimitry Andric   // to the new generation.
111*68d75effSDimitry Andric   atomic_fetch_add(&Generation, 1, memory_order_acq_rel);
112*68d75effSDimitry Andric 
113*68d75effSDimitry Andric   // First, we initialize the refcount in the ControlBlock, which we treat as
114*68d75effSDimitry Andric   // being at the start of the BackingStore pointer.
115*68d75effSDimitry Andric   atomic_store(&BackingStore->RefCount, 1, memory_order_release);
116*68d75effSDimitry Andric   atomic_store(&ExtentsBackingStore->RefCount, 1, memory_order_release);
117*68d75effSDimitry Andric 
118*68d75effSDimitry Andric   // Then we initialise the individual buffers that sub-divide the whole backing
119*68d75effSDimitry Andric   // store. Each buffer will start at the `Data` member of the ControlBlock, and
120*68d75effSDimitry Andric   // will be offsets from these locations.
121*68d75effSDimitry Andric   for (size_t i = 0; i < BufferCount; ++i) {
122*68d75effSDimitry Andric     auto &T = Buffers[i];
123*68d75effSDimitry Andric     auto &Buf = T.Buff;
124*68d75effSDimitry Andric     auto *E = reinterpret_cast<ExtentsPadded *>(&ExtentsBackingStore->Data +
125*68d75effSDimitry Andric                                                 (kExtentsSize * i));
126*68d75effSDimitry Andric     Buf.Extents = &E->Extents;
127*68d75effSDimitry Andric     atomic_store(Buf.Extents, 0, memory_order_release);
128*68d75effSDimitry Andric     Buf.Generation = generation();
129*68d75effSDimitry Andric     Buf.Data = &BackingStore->Data + (BufferSize * i);
130*68d75effSDimitry Andric     Buf.Size = BufferSize;
131*68d75effSDimitry Andric     Buf.BackingStore = BackingStore;
132*68d75effSDimitry Andric     Buf.ExtentsBackingStore = ExtentsBackingStore;
133*68d75effSDimitry Andric     Buf.Count = BufferCount;
134*68d75effSDimitry Andric     T.Used = false;
135*68d75effSDimitry Andric   }
136*68d75effSDimitry Andric 
137*68d75effSDimitry Andric   Next = Buffers;
138*68d75effSDimitry Andric   First = Buffers;
139*68d75effSDimitry Andric   LiveBuffers = 0;
140*68d75effSDimitry Andric   atomic_store(&Finalizing, 0, memory_order_release);
141*68d75effSDimitry Andric   Success = true;
142*68d75effSDimitry Andric   return BufferQueue::ErrorCode::Ok;
143*68d75effSDimitry Andric }
144*68d75effSDimitry Andric 
145*68d75effSDimitry Andric BufferQueue::BufferQueue(size_t B, size_t N,
146*68d75effSDimitry Andric                          bool &Success) XRAY_NEVER_INSTRUMENT
147*68d75effSDimitry Andric     : BufferSize(B),
148*68d75effSDimitry Andric       BufferCount(N),
149*68d75effSDimitry Andric       Mutex(),
150*68d75effSDimitry Andric       Finalizing{1},
151*68d75effSDimitry Andric       BackingStore(nullptr),
152*68d75effSDimitry Andric       ExtentsBackingStore(nullptr),
153*68d75effSDimitry Andric       Buffers(nullptr),
154*68d75effSDimitry Andric       Next(Buffers),
155*68d75effSDimitry Andric       First(Buffers),
156*68d75effSDimitry Andric       LiveBuffers(0),
157*68d75effSDimitry Andric       Generation{0} {
158*68d75effSDimitry Andric   Success = init(B, N) == BufferQueue::ErrorCode::Ok;
159*68d75effSDimitry Andric }
160*68d75effSDimitry Andric 
161*68d75effSDimitry Andric BufferQueue::ErrorCode BufferQueue::getBuffer(Buffer &Buf) {
162*68d75effSDimitry Andric   if (atomic_load(&Finalizing, memory_order_acquire))
163*68d75effSDimitry Andric     return ErrorCode::QueueFinalizing;
164*68d75effSDimitry Andric 
165*68d75effSDimitry Andric   BufferRep *B = nullptr;
166*68d75effSDimitry Andric   {
167*68d75effSDimitry Andric     SpinMutexLock Guard(&Mutex);
168*68d75effSDimitry Andric     if (LiveBuffers == BufferCount)
169*68d75effSDimitry Andric       return ErrorCode::NotEnoughMemory;
170*68d75effSDimitry Andric     B = Next++;
171*68d75effSDimitry Andric     if (Next == (Buffers + BufferCount))
172*68d75effSDimitry Andric       Next = Buffers;
173*68d75effSDimitry Andric     ++LiveBuffers;
174*68d75effSDimitry Andric   }
175*68d75effSDimitry Andric 
176*68d75effSDimitry Andric   incRefCount(BackingStore);
177*68d75effSDimitry Andric   incRefCount(ExtentsBackingStore);
178*68d75effSDimitry Andric   Buf = B->Buff;
179*68d75effSDimitry Andric   Buf.Generation = generation();
180*68d75effSDimitry Andric   B->Used = true;
181*68d75effSDimitry Andric   return ErrorCode::Ok;
182*68d75effSDimitry Andric }
183*68d75effSDimitry Andric 
184*68d75effSDimitry Andric BufferQueue::ErrorCode BufferQueue::releaseBuffer(Buffer &Buf) {
185*68d75effSDimitry Andric   // Check whether the buffer being referred to is within the bounds of the
186*68d75effSDimitry Andric   // backing store's range.
187*68d75effSDimitry Andric   BufferRep *B = nullptr;
188*68d75effSDimitry Andric   {
189*68d75effSDimitry Andric     SpinMutexLock Guard(&Mutex);
190*68d75effSDimitry Andric     if (Buf.Generation != generation() || LiveBuffers == 0) {
191*68d75effSDimitry Andric       Buf = {};
192*68d75effSDimitry Andric       decRefCount(Buf.BackingStore, Buf.Size, Buf.Count);
193*68d75effSDimitry Andric       decRefCount(Buf.ExtentsBackingStore, kExtentsSize, Buf.Count);
194*68d75effSDimitry Andric       return BufferQueue::ErrorCode::Ok;
195*68d75effSDimitry Andric     }
196*68d75effSDimitry Andric 
197*68d75effSDimitry Andric     if (Buf.Data < &BackingStore->Data ||
198*68d75effSDimitry Andric         Buf.Data > &BackingStore->Data + (BufferCount * BufferSize))
199*68d75effSDimitry Andric       return BufferQueue::ErrorCode::UnrecognizedBuffer;
200*68d75effSDimitry Andric 
201*68d75effSDimitry Andric     --LiveBuffers;
202*68d75effSDimitry Andric     B = First++;
203*68d75effSDimitry Andric     if (First == (Buffers + BufferCount))
204*68d75effSDimitry Andric       First = Buffers;
205*68d75effSDimitry Andric   }
206*68d75effSDimitry Andric 
207*68d75effSDimitry Andric   // Now that the buffer has been released, we mark it as "used".
208*68d75effSDimitry Andric   B->Buff = Buf;
209*68d75effSDimitry Andric   B->Used = true;
210*68d75effSDimitry Andric   decRefCount(Buf.BackingStore, Buf.Size, Buf.Count);
211*68d75effSDimitry Andric   decRefCount(Buf.ExtentsBackingStore, kExtentsSize, Buf.Count);
212*68d75effSDimitry Andric   atomic_store(B->Buff.Extents, atomic_load(Buf.Extents, memory_order_acquire),
213*68d75effSDimitry Andric                memory_order_release);
214*68d75effSDimitry Andric   Buf = {};
215*68d75effSDimitry Andric   return ErrorCode::Ok;
216*68d75effSDimitry Andric }
217*68d75effSDimitry Andric 
218*68d75effSDimitry Andric BufferQueue::ErrorCode BufferQueue::finalize() {
219*68d75effSDimitry Andric   if (atomic_exchange(&Finalizing, 1, memory_order_acq_rel))
220*68d75effSDimitry Andric     return ErrorCode::QueueFinalizing;
221*68d75effSDimitry Andric   return ErrorCode::Ok;
222*68d75effSDimitry Andric }
223*68d75effSDimitry Andric 
224*68d75effSDimitry Andric void BufferQueue::cleanupBuffers() {
225*68d75effSDimitry Andric   for (auto B = Buffers, E = Buffers + BufferCount; B != E; ++B)
226*68d75effSDimitry Andric     B->~BufferRep();
227*68d75effSDimitry Andric   deallocateBuffer(Buffers, BufferCount);
228*68d75effSDimitry Andric   decRefCount(BackingStore, BufferSize, BufferCount);
229*68d75effSDimitry Andric   decRefCount(ExtentsBackingStore, kExtentsSize, BufferCount);
230*68d75effSDimitry Andric   BackingStore = nullptr;
231*68d75effSDimitry Andric   ExtentsBackingStore = nullptr;
232*68d75effSDimitry Andric   Buffers = nullptr;
233*68d75effSDimitry Andric   BufferCount = 0;
234*68d75effSDimitry Andric   BufferSize = 0;
235*68d75effSDimitry Andric }
236*68d75effSDimitry Andric 
237*68d75effSDimitry Andric BufferQueue::~BufferQueue() { cleanupBuffers(); }
238