namespace {
-void decRefCount(unsigned char *ControlBlock, size_t Size, size_t Count) {
- if (ControlBlock == nullptr)
+BufferQueue::ControlBlock *allocControlBlock(size_t Size, size_t Count) {
+ auto B =
+ allocateBuffer((sizeof(BufferQueue::ControlBlock) - 1) + (Size * Count));
+ return B == nullptr ? nullptr
+ : reinterpret_cast<BufferQueue::ControlBlock *>(B);
+}
+
+void deallocControlBlock(BufferQueue::ControlBlock *C, size_t Size,
+ size_t Count) {
+ deallocateBuffer(reinterpret_cast<unsigned char *>(C),
+ (sizeof(BufferQueue::ControlBlock) - 1) + (Size * Count));
+}
+
+void decRefCount(BufferQueue::ControlBlock *C, size_t Size, size_t Count) {
+ if (C == nullptr)
return;
- auto *RefCount = reinterpret_cast<atomic_uint64_t *>(ControlBlock);
- if (atomic_fetch_sub(RefCount, 1, memory_order_acq_rel) == 1)
- deallocateBuffer(ControlBlock, (Size * Count) + kCacheLineSize);
+ if (atomic_fetch_sub(&C->RefCount, 1, memory_order_acq_rel) == 1)
+ deallocControlBlock(C, Size, Count);
}
-void incRefCount(unsigned char *ControlBlock) {
- if (ControlBlock == nullptr)
+void incRefCount(BufferQueue::ControlBlock *C) {
+ if (C == nullptr)
return;
- auto *RefCount = reinterpret_cast<atomic_uint64_t *>(ControlBlock);
- atomic_fetch_add(RefCount, 1, memory_order_acq_rel);
+ atomic_fetch_add(&C->RefCount, 1, memory_order_acq_rel);
}
} // namespace
bool Success = false;
BufferSize = BS;
BufferCount = BC;
- BackingStore = allocateBuffer((BufferSize * BufferCount) + kCacheLineSize);
+
+ BackingStore = allocControlBlock(BufferSize, BufferCount);
if (BackingStore == nullptr)
return BufferQueue::ErrorCode::NotEnoughMemory;
auto CleanupBackingStore = __sanitizer::at_scope_exit([&, this] {
if (Success)
return;
- deallocateBuffer(BackingStore, (BufferSize * BufferCount) + kCacheLineSize);
+ deallocControlBlock(BackingStore, BufferSize, BufferCount);
BackingStore = nullptr;
});
// to the new generation.
atomic_fetch_add(&Generation, 1, memory_order_acq_rel);
- Success = true;
-
- // First, we initialize the refcount in the RefCountedBackingStore, which we
- // treat as being at the start of the BackingStore pointer.
- auto ControlBlock = reinterpret_cast<atomic_uint64_t *>(BackingStore);
- atomic_store(ControlBlock, 1, memory_order_release);
+ // First, we initialize the refcount in the ControlBlock, which we treat as
+ // being at the start of the BackingStore pointer.
+ atomic_store(&BackingStore->RefCount, 1, memory_order_release);
+ // Then we initialise the individual buffers that sub-divide the whole backing
+ // store. Each buffer will start at the `Data` member of the ControlBlock, and
+ // will be offsets from these locations.
for (size_t i = 0; i < BufferCount; ++i) {
auto &T = Buffers[i];
auto &Buf = T.Buff;
atomic_store(&Buf.Extents, 0, memory_order_release);
Buf.Generation = generation();
- Buf.Data = BackingStore + kCacheLineSize + (BufferSize * i);
+ Buf.Data = &BackingStore->Data + (BufferSize * i);
Buf.Size = BufferSize;
Buf.BackingStore = BackingStore;
Buf.Count = BufferCount;
First = Buffers;
LiveBuffers = 0;
atomic_store(&Finalizing, 0, memory_order_release);
+ Success = true;
return BufferQueue::ErrorCode::Ok;
}
}
incRefCount(BackingStore);
- Buf.Data = B->Buff.Data;
+ Buf = B->Buff;
Buf.Generation = generation();
- Buf.Size = B->Buff.Size;
- Buf.BackingStore = BackingStore;
- Buf.Count = BufferCount;
B->Used = true;
return ErrorCode::Ok;
}
BufferRep *B = nullptr;
{
SpinMutexLock Guard(&Mutex);
- if (Buf.Data < BackingStore ||
- Buf.Data > reinterpret_cast<char *>(BackingStore) +
- (BufferCount * BufferSize)) {
- if (Buf.Generation != generation()) {
- decRefCount(Buf.BackingStore, Buf.Size, Buf.Count);
- Buf.Data = nullptr;
- Buf.Size = 0;
- Buf.Generation = 0;
- Buf.Count = 0;
- Buf.BackingStore = nullptr;
- return BufferQueue::ErrorCode::Ok;
- }
- return BufferQueue::ErrorCode::UnrecognizedBuffer;
- }
-
- if (LiveBuffers == 0) {
+ if (Buf.Generation != generation() || LiveBuffers == 0) {
+ Buf = {};
decRefCount(Buf.BackingStore, Buf.Size, Buf.Count);
- Buf.Data = nullptr;
- Buf.Size = Buf.Size;
- Buf.Generation = 0;
- Buf.BackingStore = nullptr;
- Buf.Count = 0;
- return ErrorCode::Ok;
+ return BufferQueue::ErrorCode::Ok;
}
+ if (Buf.Data < &BackingStore->Data ||
+ Buf.Data > &BackingStore->Data + (BufferCount * BufferSize))
+ return BufferQueue::ErrorCode::UnrecognizedBuffer;
+
--LiveBuffers;
B = First++;
if (First == (Buffers + BufferCount))
}
// Now that the buffer has been released, we mark it as "used".
- B->Buff.Data = Buf.Data;
- B->Buff.Size = Buf.Size;
- B->Buff.Generation = Buf.Generation;
- B->Buff.BackingStore = Buf.BackingStore;
- B->Buff.Count = Buf.Count;
+ B->Buff = Buf;
B->Used = true;
decRefCount(Buf.BackingStore, Buf.Size, Buf.Count);
atomic_store(&B->Buff.Extents,
atomic_load(&Buf.Extents, memory_order_acquire),
memory_order_release);
- Buf.Data = nullptr;
- Buf.Size = 0;
- Buf.Generation = 0;
- Buf.BackingStore = nullptr;
- Buf.Count = 0;
+ Buf = {};
return ErrorCode::Ok;
}
/// trace collection.
class BufferQueue {
public:
+ /// ControlBlock represents the memory layout of how we interpret the backing
+ /// store for all buffers managed by a BufferQueue instance. The ControlBlock
+ /// has the reference count as the first member, sized according to
+ /// platform-specific cache-line size. We never use the Buffer member of the
+ /// union, which is only there for compiler-supported alignment and sizing.
+ ///
+ /// This ensures that the `Data` member will be placed at least kCacheLineSize
+ /// bytes from the beginning of the structure.
+ struct ControlBlock {
+ union {
+ atomic_uint64_t RefCount;
+ char Buffer[kCacheLineSize];
+ };
+
+ /// We need to make this size 1, to conform to the C++ rules for array data
+ /// members. Typically, we want to subtract this 1 byte for sizing
+ /// information.
+ char Data[1];
+ };
+
struct Buffer {
atomic_uint64_t Extents{0};
uint64_t Generation{0};
private:
friend class BufferQueue;
- unsigned char *BackingStore = nullptr;
+ ControlBlock *BackingStore = nullptr;
size_t Count = 0;
};
SpinMutex Mutex;
atomic_uint8_t Finalizing;
- // A pointer to a contiguous block of memory to serve as the backing store for
- // all the individual buffers handed out.
- uint8_t *BackingStore;
+ // The collocated ControlBlock and buffer storage.
+ ControlBlock *BackingStore;
// A dynamically allocated array of BufferRep instances.
BufferRep *Buffers;