11 Buffer() : bufferPtr(
nullptr), capacity(0) {}
12 Buffer(uint64_t newCapacity) : capacity(newCapacity) {
13 this->bufferPtr =
static_cast<Byte*
>(Grindstone::Memory::AllocatorCore::AllocateRaw(capacity,
alignof(Buffer),
"Buffer"));
17 Buffer(
const Buffer& other) : bufferPtr(
nullptr), capacity(other.capacity) {
18 bufferPtr =
static_cast<Byte*
>(Grindstone::Memory::AllocatorCore::AllocateRaw(capacity,
alignof(Buffer),
"Buffer"));
19 memcpy(bufferPtr, other.bufferPtr, capacity);
23 Buffer(Buffer&& other) noexcept : capacity(other.capacity), bufferPtr(other.bufferPtr) {
24 other.bufferPtr =
nullptr;
29 if (bufferPtr !=
nullptr) {
30 Grindstone::Memory::AllocatorCore::Free(bufferPtr);
36 void ZeroInitialize() {
38 memset(bufferPtr, 0, capacity);
42 [[nodiscard]]
virtual Grindstone::Containers::BufferSpan GetSpan(uint64_t segmentOffset, uint64_t segmentSize) {
43 Byte* targetPtr = bufferPtr + segmentOffset;
44 if (targetPtr < bufferPtr) {
45 GS_ASSERT_ENGINE(
"Start of span is before start of buffer.");
49 if (targetPtr + segmentSize > bufferPtr + capacity) {
50 GS_ASSERT_ENGINE(
"End of span is after end of buffer.");
54 return { targetPtr, segmentSize };
57 [[nodiscard]]
virtual Grindstone::Containers::BufferSpan GetSpan() {
58 return { bufferPtr, capacity };
63 Byte* bytePtr = bufferPtr + offset;
64 T* targetPtr =
reinterpret_cast<T*
>(bytePtr);
65 if (bytePtr < bufferPtr) {
66 GS_ASSERT_ENGINE(
"Start of span is before start of buffer.");
70 if (
reinterpret_cast<Byte*
>(bytePtr + (count *
sizeof(T))) > bufferPtr + capacity) {
71 GS_ASSERT_ENGINE(
"End of span is after end of buffer.");
75 return { targetPtr, count };
79 [[nodiscard]] T* Get(uint64_t offset) {
80 Byte* bytePtr = bufferPtr + offset;
81 T* targetPtr =
reinterpret_cast<T*
>(bytePtr);
82 if (bytePtr < bufferPtr) {
83 GS_ASSERT_ENGINE(
"Start of span is before start of buffer.");
87 if (
reinterpret_cast<Byte*
>(bytePtr +
sizeof(T)) > bufferPtr + capacity) {
88 GS_ASSERT_ENGINE(
"End of span is after end of buffer.");
96 [[nodiscard]]
const T* Get(uint64_t offset)
const {
97 Byte* bytePtr = bufferPtr + offset;
98 T* targetPtr =
reinterpret_cast<T*
>(bytePtr);
99 if (bytePtr < bufferPtr) {
100 GS_ASSERT_ENGINE(
"Start of span is before start of buffer.");
104 if (
reinterpret_cast<Byte*
>(bytePtr +
sizeof(T)) > bufferPtr + capacity) {
105 GS_ASSERT_ENGINE(
"End of span is after end of buffer.");
112 [[nodiscard]] Byte* Get(uint64_t offset) {
113 Byte* bytePtr = bufferPtr + offset;
114 if (bytePtr < bufferPtr) {
115 GS_ASSERT_ENGINE(
"Offset is before start of buffer.");
119 if (
reinterpret_cast<Byte*
>(bytePtr) >= bufferPtr + capacity) {
120 GS_ASSERT_ENGINE(
"Offset is at or after end of buffer.");
127 [[nodiscard]] Byte* Get(uint64_t offset)
const {
128 Byte* bytePtr = bufferPtr + offset;
129 if (bytePtr < bufferPtr) {
130 GS_ASSERT_ENGINE(
"Offset is before start of buffer.");
134 if (
reinterpret_cast<Byte*
>(bytePtr) >= bufferPtr + capacity) {
135 GS_ASSERT_ENGINE(
"Offset is at or after end of buffer.");
142 Buffer& operator=(
const Buffer& other) {
147 capacity = other.capacity;
148 bufferPtr =
static_cast<Byte*
>(Grindstone::Memory::AllocatorCore::AllocateRaw(capacity,
alignof(Buffer),
"Buffer"));
149 memcpy(bufferPtr, other.bufferPtr, capacity);
153 Buffer& operator=(Buffer&& other)
noexcept {
154 if (
this == &other) {
159 Grindstone::Memory::AllocatorCore::Free(bufferPtr);
162 bufferPtr = other.bufferPtr;
163 capacity = other.capacity;
165 other.bufferPtr =
nullptr;
171 [[nodiscard]] Byte& operator[](
int index) {
172 return bufferPtr[index];
175 [[nodiscard]] Byte operator[](
int index)
const {
176 return bufferPtr[index];
179 explicit operator bool()
const {
180 return bufferPtr !=
nullptr;
184 if (bufferPtr !=
nullptr) {
185 Grindstone::Memory::AllocatorCore::Free(bufferPtr);
192 [[nodiscard]] Byte* Get() {
196 [[nodiscard]]
const Byte* Get()
const {
200 [[nodiscard]] uint64_t GetCapacity()
const {
204 [[nodiscard]]
static Buffer MakeCopiedBuffer(
void* srcBufferPtr,
const uint64_t capacity) {
205 Byte* bufferPtr =
static_cast<Byte*
>(Grindstone::Memory::AllocatorCore::AllocateRaw(capacity,
alignof(Buffer),
"Buffer"));
206 memcpy(bufferPtr, srcBufferPtr, capacity);
207 return Buffer( bufferPtr, capacity );
210 [[nodiscard]]
static Buffer MakeMovedBuffer(
void* srcBufferPtr,
const uint64_t capacity) {
211 Byte* bufferPtr =
static_cast<Byte*
>(srcBufferPtr);
212 return Buffer( bufferPtr, capacity );
216 Buffer(
void* bufferPtr,
const uint64_t capacity) : bufferPtr(
static_cast<Byte*
>(bufferPtr)), capacity(capacity) {}
218 Byte* bufferPtr =
nullptr;
219 uint64_t capacity = 0;
222 class ResizableBuffer :
public Buffer {
224 ResizableBuffer() : Buffer(), currentPtr(
nullptr), size(0) {}
226 ResizableBuffer(uint64_t capacity) : Buffer() {
227 bufferPtr =
static_cast<Byte*
>(Grindstone::Memory::AllocatorCore::AllocateRaw(capacity,
alignof(ResizableBuffer),
"ResizableBuffer"));
228 currentPtr = bufferPtr;
229 this->capacity = capacity;
232 virtual ~ResizableBuffer() {
233 currentPtr =
nullptr;
238 ResizableBuffer(
const ResizableBuffer& other) {
239 capacity = other.capacity;
242 bufferPtr =
static_cast<Byte*
>(Grindstone::Memory::AllocatorCore::AllocateRaw(capacity,
alignof(ResizableBuffer),
"ResizableBuffer"));
243 currentPtr = other.currentPtr;
245 memcpy(bufferPtr, other.bufferPtr, size);
249 ResizableBuffer(ResizableBuffer&& other)
noexcept {
250 bufferPtr = other.bufferPtr;
251 currentPtr = other.currentPtr;
252 capacity = other.capacity;
255 other.bufferPtr =
nullptr;
256 other.currentPtr =
nullptr;
261 virtual Grindstone::Containers::BufferSpan GetSpan(uint64_t segmentOffset, uint64_t segmentSize)
override {
262 Byte* targetPtr = bufferPtr + segmentOffset;
263 if (targetPtr < bufferPtr) {
264 GS_ASSERT_ENGINE(
"Start of span is before start of buffer.")
265 return Grindstone::Containers::BufferSpan();
268 if (targetPtr + segmentSize > bufferPtr + size) {
269 GS_ASSERT_ENGINE(
"End of span is after end of used buffer.")
270 return Grindstone::Containers::BufferSpan();
273 return Grindstone::Containers::BufferSpan(targetPtr, segmentSize);
278 T* targetPtr =
reinterpret_cast<T*
>(bufferPtr + offset);
279 if (targetPtr < bufferPtr) {
280 GS_ASSERT_ENGINE(
"Start of span is before start of buffer.");
284 if (targetPtr + count > bufferPtr + capacity) {
285 GS_ASSERT_ENGINE(
"End of span is after end of buffer.");
289 return { targetPtr, count };
292 void* AddToBuffer(
const void* srcPtr, uint64_t srcSize) {
293 if (srcPtr ==
nullptr) {
294 GS_ASSERT_ENGINE(
"Source memory is nullptr.")
298 uint64_t spaceLeft = GetSpaceLeft();
299 if (srcSize > spaceLeft) {
300 GS_ASSERT_ENGINE(
"Source memory size is too small to fit.")
304 memcpy(currentPtr, srcPtr, srcSize);
305 Byte* prevPtr = currentPtr;
306 currentPtr += srcSize;
311 uint64_t GetSpaceLeft()
const {
312 return capacity - size;
315 uint64_t GetUsedSize()
const {
320 Byte* currentPtr =
nullptr;