11 Buffer() : bufferPtr(
nullptr), capacity(0) {}
12 Buffer(uint64_t newCapacity) : capacity(newCapacity) {
13 this->bufferPtr =
static_cast<Byte*
>(Grindstone::Memory::AllocatorCore::AllocateRaw(capacity,
alignof(Buffer),
"Buffer"));
17 Buffer(
const Buffer& other) : bufferPtr(
nullptr), capacity(other.capacity) {
18 bufferPtr =
static_cast<Byte*
>(Grindstone::Memory::AllocatorCore::AllocateRaw(capacity,
alignof(Buffer),
"Buffer"));
19 memcpy(bufferPtr, other.bufferPtr, capacity);
23 Buffer(Buffer&& other) noexcept : capacity(other.capacity), bufferPtr(other.bufferPtr) {
24 other.bufferPtr =
nullptr;
29 if (bufferPtr !=
nullptr) {
30 Grindstone::Memory::AllocatorCore::Free(bufferPtr);
36 void ZeroInitialize() {
38 memset(bufferPtr, 0, capacity);
42 [[nodiscard]]
virtual Grindstone::Containers::BufferSpan GetSpan(uint64_t segmentOffset, uint64_t segmentSize) {
43 Byte* targetPtr = bufferPtr + segmentOffset;
44 if (targetPtr < bufferPtr) {
45 GS_ASSERT_ENGINE(
"Start of span is before start of buffer.");
49 if (targetPtr + segmentSize > bufferPtr + capacity) {
50 GS_ASSERT_ENGINE(
"End of span is after end of buffer.");
54 return { targetPtr, segmentSize };
59 Byte* bytePtr = bufferPtr + offset;
60 T* targetPtr =
reinterpret_cast<T*
>(bytePtr);
61 if (bytePtr < bufferPtr) {
62 GS_ASSERT_ENGINE(
"Start of span is before start of buffer.");
66 if (
reinterpret_cast<Byte*
>(bytePtr + (count *
sizeof(T))) > bufferPtr + capacity) {
67 GS_ASSERT_ENGINE(
"End of span is after end of buffer.");
71 return { targetPtr, count };
75 [[nodiscard]] T* Get(uint64_t offset) {
76 Byte* bytePtr = bufferPtr + offset;
77 T* targetPtr =
reinterpret_cast<T*
>(bytePtr);
78 if (bytePtr < bufferPtr) {
79 GS_ASSERT_ENGINE(
"Start of span is before start of buffer.");
83 if (
reinterpret_cast<Byte*
>(bytePtr +
sizeof(T)) > bufferPtr + capacity) {
84 GS_ASSERT_ENGINE(
"End of span is after end of buffer.");
92 [[nodiscard]]
const T* Get(uint64_t offset)
const {
93 Byte* bytePtr = bufferPtr + offset;
94 T* targetPtr =
reinterpret_cast<T*
>(bytePtr);
95 if (bytePtr < bufferPtr) {
96 GS_ASSERT_ENGINE(
"Start of span is before start of buffer.");
100 if (
reinterpret_cast<Byte*
>(bytePtr +
sizeof(T)) > bufferPtr + capacity) {
101 GS_ASSERT_ENGINE(
"End of span is after end of buffer.");
108 [[nodiscard]] Byte* Get(uint64_t offset) {
109 Byte* bytePtr = bufferPtr + offset;
110 if (bytePtr < bufferPtr) {
111 GS_ASSERT_ENGINE(
"Offset is before start of buffer.");
115 if (
reinterpret_cast<Byte*
>(bytePtr) >= bufferPtr + capacity) {
116 GS_ASSERT_ENGINE(
"Offset is at or after end of buffer.");
123 [[nodiscard]] Byte* Get(uint64_t offset)
const {
124 Byte* bytePtr = bufferPtr + offset;
125 if (bytePtr < bufferPtr) {
126 GS_ASSERT_ENGINE(
"Offset is before start of buffer.");
130 if (
reinterpret_cast<Byte*
>(bytePtr) >= bufferPtr + capacity) {
131 GS_ASSERT_ENGINE(
"Offset is at or after end of buffer.");
138 Buffer& operator=(
const Buffer& other) {
143 capacity = other.capacity;
144 bufferPtr =
static_cast<Byte*
>(Grindstone::Memory::AllocatorCore::AllocateRaw(capacity,
alignof(Buffer),
"Buffer"));
145 memcpy(bufferPtr, other.bufferPtr, capacity);
149 Buffer& operator=(Buffer&& other)
noexcept {
150 if (
this == &other) {
155 Grindstone::Memory::AllocatorCore::Free(bufferPtr);
158 bufferPtr = other.bufferPtr;
159 capacity = other.capacity;
161 other.bufferPtr =
nullptr;
167 [[nodiscard]] Byte& operator[](
int index) {
168 return bufferPtr[index];
171 [[nodiscard]] Byte operator[](
int index)
const {
172 return bufferPtr[index];
175 explicit operator bool()
const {
176 return bufferPtr !=
nullptr;
180 if (bufferPtr !=
nullptr) {
181 Grindstone::Memory::AllocatorCore::Free(bufferPtr);
188 [[nodiscard]] Byte* Get() {
192 [[nodiscard]]
const Byte* Get()
const {
196 [[nodiscard]] uint64_t GetCapacity()
const {
200 [[nodiscard]]
static Buffer MakeCopiedBuffer(
void* srcBufferPtr,
const uint64_t capacity) {
201 Byte* bufferPtr =
static_cast<Byte*
>(Grindstone::Memory::AllocatorCore::AllocateRaw(capacity,
alignof(Buffer),
"Buffer"));
202 memcpy(bufferPtr, srcBufferPtr, capacity);
203 return Buffer( bufferPtr, capacity );
206 [[nodiscard]]
static Buffer MakeMovedBuffer(
void* srcBufferPtr,
const uint64_t capacity) {
207 Byte* bufferPtr =
static_cast<Byte*
>(srcBufferPtr);
208 return Buffer( bufferPtr, capacity );
212 Buffer(
void* bufferPtr,
const uint64_t capacity) : bufferPtr(
static_cast<Byte*
>(bufferPtr)), capacity(capacity) {}
214 Byte* bufferPtr =
nullptr;
215 uint64_t capacity = 0;
218 class ResizableBuffer :
public Buffer {
220 ResizableBuffer() : Buffer(), currentPtr(
nullptr), size(0) {}
222 ResizableBuffer(uint64_t capacity) : Buffer() {
223 bufferPtr =
static_cast<Byte*
>(Grindstone::Memory::AllocatorCore::AllocateRaw(capacity,
alignof(ResizableBuffer),
"ResizableBuffer"));
224 currentPtr = bufferPtr;
225 this->capacity = capacity;
228 virtual ~ResizableBuffer() {
229 currentPtr =
nullptr;
234 ResizableBuffer(
const ResizableBuffer& other) {
235 capacity = other.capacity;
238 bufferPtr =
static_cast<Byte*
>(Grindstone::Memory::AllocatorCore::AllocateRaw(capacity,
alignof(ResizableBuffer),
"ResizableBuffer"));
239 currentPtr = other.currentPtr;
241 memcpy(bufferPtr, other.bufferPtr, size);
245 ResizableBuffer(ResizableBuffer&& other)
noexcept {
246 bufferPtr = other.bufferPtr;
247 currentPtr = other.currentPtr;
248 capacity = other.capacity;
251 other.bufferPtr =
nullptr;
252 other.currentPtr =
nullptr;
257 virtual Grindstone::Containers::BufferSpan GetSpan(uint64_t segmentOffset, uint64_t segmentSize)
override {
258 Byte* targetPtr = bufferPtr + segmentOffset;
259 if (targetPtr < bufferPtr) {
260 GS_ASSERT_ENGINE(
"Start of span is before start of buffer.")
261 return Grindstone::Containers::BufferSpan();
264 if (targetPtr + segmentSize > bufferPtr + size) {
265 GS_ASSERT_ENGINE(
"End of span is after end of used buffer.")
266 return Grindstone::Containers::BufferSpan();
269 return Grindstone::Containers::BufferSpan(targetPtr, segmentSize);
274 T* targetPtr =
reinterpret_cast<T*
>(bufferPtr + offset);
275 if (targetPtr < bufferPtr) {
276 GS_ASSERT_ENGINE(
"Start of span is before start of buffer.");
280 if (targetPtr + count > bufferPtr + capacity) {
281 GS_ASSERT_ENGINE(
"End of span is after end of buffer.");
285 return { targetPtr, count };
288 void* AddToBuffer(
const void* srcPtr, uint64_t srcSize) {
289 if (srcPtr ==
nullptr) {
290 GS_ASSERT_ENGINE(
"Source memory is nullptr.")
294 uint64_t spaceLeft = GetSpaceLeft();
295 if (srcSize > spaceLeft) {
296 GS_ASSERT_ENGINE(
"Source memory size is too small to fit.")
300 memcpy(currentPtr, srcPtr, srcSize);
301 Byte* prevPtr = currentPtr;
302 currentPtr += srcSize;
307 uint64_t GetSpaceLeft()
const {
308 return capacity - size;
311 uint64_t GetUsedSize()
const {
316 Byte* currentPtr =
nullptr;