Grindstone Game Engine v0.2.0
An open source game engine and toolkit.
Loading...
Searching...
No Matches
Buffer.hpp
1#pragma once
2
3#include <EngineCore/Utils/MemoryAllocator.hpp>
4
5#include "Assert.hpp"
6#include "Containers/Span.hpp"
7
8namespace Grindstone {
9 class Buffer {
10 public:
11 Buffer() : bufferPtr(nullptr), capacity(0) {}
12 Buffer(uint64_t newCapacity) : capacity(newCapacity) {
13 this->bufferPtr = static_cast<Byte*>(Grindstone::Memory::AllocatorCore::AllocateRaw(capacity, alignof(Buffer), "Buffer"));
14 }
15
16 // Copy-Constructor
17 Buffer(const Buffer& other) : bufferPtr(nullptr), capacity(other.capacity) {
18 bufferPtr = static_cast<Byte*>(Grindstone::Memory::AllocatorCore::AllocateRaw(capacity, alignof(Buffer), "Buffer"));
19 memcpy(bufferPtr, other.bufferPtr, capacity);
20 }
21
22 // Move-Constructor
23 Buffer(Buffer&& other) noexcept : capacity(other.capacity), bufferPtr(other.bufferPtr) {
24 other.bufferPtr = nullptr;
25 other.capacity = 0;
26 }
27
28 ~Buffer() {
29 if (bufferPtr != nullptr) {
30 Grindstone::Memory::AllocatorCore::Free(bufferPtr);
31 bufferPtr = nullptr;
32 }
33 capacity = 0;
34 }
35
36 void ZeroInitialize() {
37 if (bufferPtr) {
38 memset(bufferPtr, 0, capacity);
39 }
40 }
41
42 [[nodiscard]] virtual Grindstone::Containers::BufferSpan GetSpan(uint64_t segmentOffset, uint64_t segmentSize) {
43 Byte* targetPtr = bufferPtr + segmentOffset;
44 if (targetPtr < bufferPtr) {
45 GS_ASSERT_ENGINE("Start of span is before start of buffer.");
46 return {};
47 }
48
49 if (targetPtr + segmentSize > bufferPtr + capacity) {
50 GS_ASSERT_ENGINE("End of span is after end of buffer.");
51 return {};
52 }
53
54 return { targetPtr, segmentSize };
55 }
56
57 [[nodiscard]] virtual Grindstone::Containers::BufferSpan GetSpan() {
58 return { bufferPtr, capacity };
59 }
60
61 template<typename T>
62 [[nodiscard]] Grindstone::Containers::Span<T> GetSpan(uint64_t offset, uint64_t count) {
63 Byte* bytePtr = bufferPtr + offset;
64 T* targetPtr = reinterpret_cast<T*>(bytePtr);
65 if (bytePtr < bufferPtr) {
66 GS_ASSERT_ENGINE("Start of span is before start of buffer.");
67 return {};
68 }
69
70 if (reinterpret_cast<Byte*>(bytePtr + (count * sizeof(T))) > bufferPtr + capacity) {
71 GS_ASSERT_ENGINE("End of span is after end of buffer.");
72 return {};
73 }
74
75 return { targetPtr, count };
76 }
77
78 template<typename T>
79 [[nodiscard]] T* Get(uint64_t offset) {
80 Byte* bytePtr = bufferPtr + offset;
81 T* targetPtr = reinterpret_cast<T*>(bytePtr);
82 if (bytePtr < bufferPtr) {
83 GS_ASSERT_ENGINE("Start of span is before start of buffer.");
84 return nullptr;
85 }
86
87 if (reinterpret_cast<Byte*>(bytePtr + sizeof(T)) > bufferPtr + capacity) {
88 GS_ASSERT_ENGINE("End of span is after end of buffer.");
89 return nullptr;
90 }
91
92 return targetPtr;
93 }
94
95 template<typename T>
96 [[nodiscard]] const T* Get(uint64_t offset) const {
97 Byte* bytePtr = bufferPtr + offset;
98 T* targetPtr = reinterpret_cast<T*>(bytePtr);
99 if (bytePtr < bufferPtr) {
100 GS_ASSERT_ENGINE("Start of span is before start of buffer.");
101 return nullptr;
102 }
103
104 if (reinterpret_cast<Byte*>(bytePtr + sizeof(T)) > bufferPtr + capacity) {
105 GS_ASSERT_ENGINE("End of span is after end of buffer.");
106 return nullptr;
107 }
108
109 return targetPtr;
110 }
111
112 [[nodiscard]] Byte* Get(uint64_t offset) {
113 Byte* bytePtr = bufferPtr + offset;
114 if (bytePtr < bufferPtr) {
115 GS_ASSERT_ENGINE("Offset is before start of buffer.");
116 return nullptr;
117 }
118
119 if (reinterpret_cast<Byte*>(bytePtr) >= bufferPtr + capacity) {
120 GS_ASSERT_ENGINE("Offset is at or after end of buffer.");
121 return nullptr;
122 }
123
124 return bytePtr;
125 }
126
127 [[nodiscard]] Byte* Get(uint64_t offset) const {
128 Byte* bytePtr = bufferPtr + offset;
129 if (bytePtr < bufferPtr) {
130 GS_ASSERT_ENGINE("Offset is before start of buffer.");
131 return nullptr;
132 }
133
134 if (reinterpret_cast<Byte*>(bytePtr) >= bufferPtr + capacity) {
135 GS_ASSERT_ENGINE("Offset is at or after end of buffer.");
136 return nullptr;
137 }
138
139 return bytePtr;
140 }
141
142 Buffer& operator=(const Buffer& other) {
143 if(this == &other) {
144 return *this;
145 }
146
147 capacity = other.capacity;
148 bufferPtr = static_cast<Byte*>(Grindstone::Memory::AllocatorCore::AllocateRaw(capacity, alignof(Buffer), "Buffer"));
149 memcpy(bufferPtr, other.bufferPtr, capacity);
150 return *this;
151 }
152
153 Buffer& operator=(Buffer&& other) noexcept {
154 if (this == &other) {
155 return *this;
156 }
157
158 if (bufferPtr) {
159 Grindstone::Memory::AllocatorCore::Free(bufferPtr);
160 }
161
162 bufferPtr = other.bufferPtr;
163 capacity = other.capacity;
164
165 other.bufferPtr = nullptr;
166 other.capacity = 0;
167
168 return *this;
169 }
170
171 [[nodiscard]] Byte& operator[](int index) {
172 return bufferPtr[index];
173 }
174
175 [[nodiscard]] Byte operator[](int index) const {
176 return bufferPtr[index];
177 }
178
179 explicit operator bool() const {
180 return bufferPtr != nullptr;
181 }
182
183 void Clear() {
184 if (bufferPtr != nullptr) {
185 Grindstone::Memory::AllocatorCore::Free(bufferPtr);
186 bufferPtr = nullptr;
187 }
188
189 capacity = 0;
190 }
191
192 [[nodiscard]] Byte* Get() {
193 return bufferPtr;
194 }
195
196 [[nodiscard]] const Byte* Get() const {
197 return bufferPtr;
198 }
199
200 [[nodiscard]] uint64_t GetCapacity() const {
201 return capacity;
202 }
203
204 [[nodiscard]] static Buffer MakeCopiedBuffer(void* srcBufferPtr, const uint64_t capacity) {
205 Byte* bufferPtr = static_cast<Byte*>(Grindstone::Memory::AllocatorCore::AllocateRaw(capacity, alignof(Buffer), "Buffer"));
206 memcpy(bufferPtr, srcBufferPtr, capacity);
207 return Buffer( bufferPtr, capacity );
208 }
209
210 [[nodiscard]] static Buffer MakeMovedBuffer(void* srcBufferPtr, const uint64_t capacity) {
211 Byte* bufferPtr = static_cast<Byte*>(srcBufferPtr);
212 return Buffer( bufferPtr, capacity );
213 }
214
215 protected:
216 Buffer(void* bufferPtr, const uint64_t capacity) : bufferPtr(static_cast<Byte*>(bufferPtr)), capacity(capacity) {}
217
218 Byte* bufferPtr = nullptr;
219 uint64_t capacity = 0;
220 };
221
222 class ResizableBuffer : public Buffer {
223 public:
224 ResizableBuffer() : Buffer(), currentPtr(nullptr), size(0) {}
225
226 ResizableBuffer(uint64_t capacity) : Buffer() {
227 bufferPtr = static_cast<Byte*>(Grindstone::Memory::AllocatorCore::AllocateRaw(capacity, alignof(ResizableBuffer), "ResizableBuffer"));
228 currentPtr = bufferPtr;
229 this->capacity = capacity;
230 }
231
232 virtual ~ResizableBuffer() {
233 currentPtr = nullptr;
234 size = 0;
235 }
236
237 // Copy-Constructor
238 ResizableBuffer(const ResizableBuffer& other) {
239 capacity = other.capacity;
240 size = other.size;
241
242 bufferPtr = static_cast<Byte*>(Grindstone::Memory::AllocatorCore::AllocateRaw(capacity, alignof(ResizableBuffer), "ResizableBuffer"));
243 currentPtr = other.currentPtr;
244
245 memcpy(bufferPtr, other.bufferPtr, size);
246 }
247
248 // Copy-Constructor
249 ResizableBuffer(ResizableBuffer&& other) noexcept {
250 bufferPtr = other.bufferPtr;
251 currentPtr = other.currentPtr;
252 capacity = other.capacity;
253 size = other.size;
254
255 other.bufferPtr = nullptr;
256 other.currentPtr = nullptr;
257 other.capacity = 0;
258 other.size = 0;
259 }
260
261 virtual Grindstone::Containers::BufferSpan GetSpan(uint64_t segmentOffset, uint64_t segmentSize) override {
262 Byte* targetPtr = bufferPtr + segmentOffset;
263 if (targetPtr < bufferPtr) {
264 GS_ASSERT_ENGINE("Start of span is before start of buffer.")
265 return Grindstone::Containers::BufferSpan();
266 }
267
268 if (targetPtr + segmentSize > bufferPtr + size) {
269 GS_ASSERT_ENGINE("End of span is after end of used buffer.")
270 return Grindstone::Containers::BufferSpan();
271 }
272
273 return Grindstone::Containers::BufferSpan(targetPtr, segmentSize);
274 }
275
276 template<typename T>
277 Grindstone::Containers::Span<T> GetSpan(uint64_t offset, uint64_t count) {
278 T* targetPtr = reinterpret_cast<T*>(bufferPtr + offset);
279 if (targetPtr < bufferPtr) {
280 GS_ASSERT_ENGINE("Start of span is before start of buffer.");
281 return {};
282 }
283
284 if (targetPtr + count > bufferPtr + capacity) {
285 GS_ASSERT_ENGINE("End of span is after end of buffer.");
286 return {};
287 }
288
289 return { targetPtr, count };
290 }
291
292 void* AddToBuffer(const void* srcPtr, uint64_t srcSize) {
293 if (srcPtr == nullptr) {
294 GS_ASSERT_ENGINE("Source memory is nullptr.")
295 return nullptr;
296 }
297
298 uint64_t spaceLeft = GetSpaceLeft();
299 if (srcSize > spaceLeft) {
300 GS_ASSERT_ENGINE("Source memory size is too small to fit.")
301 return nullptr;
302 }
303
304 memcpy(currentPtr, srcPtr, srcSize);
305 Byte* prevPtr = currentPtr;
306 currentPtr += srcSize;
307 size += srcSize;
308 return prevPtr;
309 }
310
311 uint64_t GetSpaceLeft() const {
312 return capacity - size;
313 }
314
315 uint64_t GetUsedSize() const {
316 return size;
317 }
318
319 protected:
320 Byte* currentPtr = nullptr;
321 uint64_t size = 0;
322 };
323}
Definition Span.hpp:12