Grindstone Game Engine v0.2.0
An open source game engine and toolkit.
Loading...
Searching...
No Matches
Buffer.hpp
1#pragma once
2
3#include <EngineCore/Utils/MemoryAllocator.hpp>
4
5#include "Assert.hpp"
6#include "Containers/Span.hpp"
7
8namespace Grindstone {
9 class Buffer {
10 public:
11 Buffer() : bufferPtr(nullptr), capacity(0) {}
12 Buffer(uint64_t newCapacity) : capacity(newCapacity) {
13 this->bufferPtr = static_cast<Byte*>(Grindstone::Memory::AllocatorCore::AllocateRaw(capacity, alignof(Buffer), "Buffer"));
14 }
15
16 // Copy-Constructor
17 Buffer(const Buffer& other) : bufferPtr(nullptr), capacity(other.capacity) {
18 bufferPtr = static_cast<Byte*>(Grindstone::Memory::AllocatorCore::AllocateRaw(capacity, alignof(Buffer), "Buffer"));
19 memcpy(bufferPtr, other.bufferPtr, capacity);
20 }
21
22 // Move-Constructor
23 Buffer(Buffer&& other) noexcept : capacity(other.capacity), bufferPtr(other.bufferPtr) {
24 other.bufferPtr = nullptr;
25 other.capacity = 0;
26 }
27
28 ~Buffer() {
29 if (bufferPtr != nullptr) {
30 Grindstone::Memory::AllocatorCore::Free(bufferPtr);
31 bufferPtr = nullptr;
32 }
33 capacity = 0;
34 }
35
36 void ZeroInitialize() {
37 if (bufferPtr) {
38 memset(bufferPtr, 0, capacity);
39 }
40 }
41
42 [[nodiscard]] virtual Grindstone::Containers::BufferSpan GetSpan(uint64_t segmentOffset, uint64_t segmentSize) {
43 Byte* targetPtr = bufferPtr + segmentOffset;
44 if (targetPtr < bufferPtr) {
45 GS_ASSERT_ENGINE("Start of span is before start of buffer.");
46 return {};
47 }
48
49 if (targetPtr + segmentSize > bufferPtr + capacity) {
50 GS_ASSERT_ENGINE("End of span is after end of buffer.");
51 return {};
52 }
53
54 return { targetPtr, segmentSize };
55 }
56
57 template<typename T>
58 [[nodiscard]] Grindstone::Containers::Span<T> GetSpan(uint64_t offset, uint64_t count) {
59 Byte* bytePtr = bufferPtr + offset;
60 T* targetPtr = reinterpret_cast<T*>(bytePtr);
61 if (bytePtr < bufferPtr) {
62 GS_ASSERT_ENGINE("Start of span is before start of buffer.");
63 return {};
64 }
65
66 if (reinterpret_cast<Byte*>(bytePtr + (count * sizeof(T))) > bufferPtr + capacity) {
67 GS_ASSERT_ENGINE("End of span is after end of buffer.");
68 return {};
69 }
70
71 return { targetPtr, count };
72 }
73
74 template<typename T>
75 [[nodiscard]] T* Get(uint64_t offset) {
76 Byte* bytePtr = bufferPtr + offset;
77 T* targetPtr = reinterpret_cast<T*>(bytePtr);
78 if (bytePtr < bufferPtr) {
79 GS_ASSERT_ENGINE("Start of span is before start of buffer.");
80 return nullptr;
81 }
82
83 if (reinterpret_cast<Byte*>(bytePtr + sizeof(T)) > bufferPtr + capacity) {
84 GS_ASSERT_ENGINE("End of span is after end of buffer.");
85 return nullptr;
86 }
87
88 return targetPtr;
89 }
90
91 template<typename T>
92 [[nodiscard]] const T* Get(uint64_t offset) const {
93 Byte* bytePtr = bufferPtr + offset;
94 T* targetPtr = reinterpret_cast<T*>(bytePtr);
95 if (bytePtr < bufferPtr) {
96 GS_ASSERT_ENGINE("Start of span is before start of buffer.");
97 return nullptr;
98 }
99
100 if (reinterpret_cast<Byte*>(bytePtr + sizeof(T)) > bufferPtr + capacity) {
101 GS_ASSERT_ENGINE("End of span is after end of buffer.");
102 return nullptr;
103 }
104
105 return targetPtr;
106 }
107
108 [[nodiscard]] Byte* Get(uint64_t offset) {
109 Byte* bytePtr = bufferPtr + offset;
110 if (bytePtr < bufferPtr) {
111 GS_ASSERT_ENGINE("Offset is before start of buffer.");
112 return nullptr;
113 }
114
115 if (reinterpret_cast<Byte*>(bytePtr) >= bufferPtr + capacity) {
116 GS_ASSERT_ENGINE("Offset is at or after end of buffer.");
117 return nullptr;
118 }
119
120 return bytePtr;
121 }
122
123 [[nodiscard]] Byte* Get(uint64_t offset) const {
124 Byte* bytePtr = bufferPtr + offset;
125 if (bytePtr < bufferPtr) {
126 GS_ASSERT_ENGINE("Offset is before start of buffer.");
127 return nullptr;
128 }
129
130 if (reinterpret_cast<Byte*>(bytePtr) >= bufferPtr + capacity) {
131 GS_ASSERT_ENGINE("Offset is at or after end of buffer.");
132 return nullptr;
133 }
134
135 return bytePtr;
136 }
137
138 Buffer& operator=(const Buffer& other) {
139 if(this == &other) {
140 return *this;
141 }
142
143 capacity = other.capacity;
144 bufferPtr = static_cast<Byte*>(Grindstone::Memory::AllocatorCore::AllocateRaw(capacity, alignof(Buffer), "Buffer"));
145 memcpy(bufferPtr, other.bufferPtr, capacity);
146 return *this;
147 }
148
149 Buffer& operator=(Buffer&& other) noexcept {
150 if (this == &other) {
151 return *this;
152 }
153
154 if (bufferPtr) {
155 Grindstone::Memory::AllocatorCore::Free(bufferPtr);
156 }
157
158 bufferPtr = other.bufferPtr;
159 capacity = other.capacity;
160
161 other.bufferPtr = nullptr;
162 other.capacity = 0;
163
164 return *this;
165 }
166
167 [[nodiscard]] Byte& operator[](int index) {
168 return bufferPtr[index];
169 }
170
171 [[nodiscard]] Byte operator[](int index) const {
172 return bufferPtr[index];
173 }
174
175 explicit operator bool() const {
176 return bufferPtr != nullptr;
177 }
178
179 void Clear() {
180 if (bufferPtr != nullptr) {
181 Grindstone::Memory::AllocatorCore::Free(bufferPtr);
182 bufferPtr = nullptr;
183 }
184
185 capacity = 0;
186 }
187
188 [[nodiscard]] Byte* Get() {
189 return bufferPtr;
190 }
191
192 [[nodiscard]] const Byte* Get() const {
193 return bufferPtr;
194 }
195
196 [[nodiscard]] uint64_t GetCapacity() const {
197 return capacity;
198 }
199
200 [[nodiscard]] static Buffer MakeCopiedBuffer(void* srcBufferPtr, const uint64_t capacity) {
201 Byte* bufferPtr = static_cast<Byte*>(Grindstone::Memory::AllocatorCore::AllocateRaw(capacity, alignof(Buffer), "Buffer"));
202 memcpy(bufferPtr, srcBufferPtr, capacity);
203 return Buffer( bufferPtr, capacity );
204 }
205
206 [[nodiscard]] static Buffer MakeMovedBuffer(void* srcBufferPtr, const uint64_t capacity) {
207 Byte* bufferPtr = static_cast<Byte*>(srcBufferPtr);
208 return Buffer( bufferPtr, capacity );
209 }
210
211 protected:
212 Buffer(void* bufferPtr, const uint64_t capacity) : bufferPtr(static_cast<Byte*>(bufferPtr)), capacity(capacity) {}
213
214 Byte* bufferPtr = nullptr;
215 uint64_t capacity = 0;
216 };
217
218 class ResizableBuffer : public Buffer {
219 public:
220 ResizableBuffer() : Buffer(), currentPtr(nullptr), size(0) {}
221
222 ResizableBuffer(uint64_t capacity) : Buffer() {
223 bufferPtr = static_cast<Byte*>(Grindstone::Memory::AllocatorCore::AllocateRaw(capacity, alignof(ResizableBuffer), "ResizableBuffer"));
224 currentPtr = bufferPtr;
225 this->capacity = capacity;
226 }
227
228 virtual ~ResizableBuffer() {
229 currentPtr = nullptr;
230 size = 0;
231 }
232
233 // Copy-Constructor
234 ResizableBuffer(const ResizableBuffer& other) {
235 capacity = other.capacity;
236 size = other.size;
237
238 bufferPtr = static_cast<Byte*>(Grindstone::Memory::AllocatorCore::AllocateRaw(capacity, alignof(ResizableBuffer), "ResizableBuffer"));
239 currentPtr = other.currentPtr;
240
241 memcpy(bufferPtr, other.bufferPtr, size);
242 }
243
244 // Copy-Constructor
245 ResizableBuffer(ResizableBuffer&& other) noexcept {
246 bufferPtr = other.bufferPtr;
247 currentPtr = other.currentPtr;
248 capacity = other.capacity;
249 size = other.size;
250
251 other.bufferPtr = nullptr;
252 other.currentPtr = nullptr;
253 other.capacity = 0;
254 other.size = 0;
255 }
256
257 virtual Grindstone::Containers::BufferSpan GetSpan(uint64_t segmentOffset, uint64_t segmentSize) override {
258 Byte* targetPtr = bufferPtr + segmentOffset;
259 if (targetPtr < bufferPtr) {
260 GS_ASSERT_ENGINE("Start of span is before start of buffer.")
261 return Grindstone::Containers::BufferSpan();
262 }
263
264 if (targetPtr + segmentSize > bufferPtr + size) {
265 GS_ASSERT_ENGINE("End of span is after end of used buffer.")
266 return Grindstone::Containers::BufferSpan();
267 }
268
269 return Grindstone::Containers::BufferSpan(targetPtr, segmentSize);
270 }
271
272 template<typename T>
273 Grindstone::Containers::Span<T> GetSpan(uint64_t offset, uint64_t count) {
274 T* targetPtr = reinterpret_cast<T*>(bufferPtr + offset);
275 if (targetPtr < bufferPtr) {
276 GS_ASSERT_ENGINE("Start of span is before start of buffer.");
277 return {};
278 }
279
280 if (targetPtr + count > bufferPtr + capacity) {
281 GS_ASSERT_ENGINE("End of span is after end of buffer.");
282 return {};
283 }
284
285 return { targetPtr, count };
286 }
287
288 void* AddToBuffer(const void* srcPtr, uint64_t srcSize) {
289 if (srcPtr == nullptr) {
290 GS_ASSERT_ENGINE("Source memory is nullptr.")
291 return nullptr;
292 }
293
294 uint64_t spaceLeft = GetSpaceLeft();
295 if (srcSize > spaceLeft) {
296 GS_ASSERT_ENGINE("Source memory size is too small to fit.")
297 return nullptr;
298 }
299
300 memcpy(currentPtr, srcPtr, srcSize);
301 Byte* prevPtr = currentPtr;
302 currentPtr += srcSize;
303 size += srcSize;
304 return prevPtr;
305 }
306
307 uint64_t GetSpaceLeft() const {
308 return capacity - size;
309 }
310
311 uint64_t GetUsedSize() const {
312 return size;
313 }
314
315 protected:
316 Byte* currentPtr = nullptr;
317 uint64_t size = 0;
318 };
319}
Definition Span.hpp:12