48 Buffer() : bufferPtr(
nullptr), capacity(0) {}
49 Buffer(uint64_t capacity) : capacity(capacity) {
50 this->bufferPtr =
static_cast<Byte*
>(Grindstone::Memory::AllocatorCore::AllocateRaw(capacity,
alignof(
Buffer),
"Buffer"));
53 Buffer(
void* bufferPtr,
const uint64_t capacity) : bufferPtr(
static_cast<Byte*
>(bufferPtr)), capacity(capacity) {}
57 bufferPtr =
static_cast<Byte*
>(Grindstone::Memory::AllocatorCore::AllocateRaw(capacity,
alignof(
Buffer),
"Buffer"));
58 memcpy(bufferPtr, other.bufferPtr, capacity);
62 Buffer(
Buffer&& other) noexcept : capacity(other.capacity), bufferPtr(bufferPtr) {
63 bufferPtr = other.bufferPtr;
64 capacity = other.capacity;
66 other.bufferPtr =
nullptr;
71 Grindstone::Memory::AllocatorCore::Free(bufferPtr);
76 void ZeroInitialize() {
78 memset(bufferPtr, 0, capacity);
82 virtual BufferView GetBufferView(uint64_t segmentOffset, uint64_t segmentSize) {
83 Byte* targetPtr = bufferPtr + segmentOffset;
84 if (targetPtr < bufferPtr) {
85 GS_ASSERT_ENGINE(
"Start of view is before start of buffer.")
89 if (targetPtr + segmentSize > bufferPtr + capacity) {
90 GS_ASSERT_ENGINE(
"End of view is after end of buffer.")
94 return {targetPtr, segmentSize};
102 capacity = other.capacity;
103 bufferPtr =
static_cast<Byte*
>(Grindstone::Memory::AllocatorCore::AllocateRaw(capacity,
alignof(
Buffer),
"Buffer"));
104 memcpy(bufferPtr, other.bufferPtr, capacity);
109 if (
this == &other) {
114 Grindstone::Memory::AllocatorCore::Free(bufferPtr);
117 bufferPtr = other.bufferPtr;
118 capacity = other.capacity;
120 other.bufferPtr =
nullptr;
126 Byte& operator[](
int index) {
127 return bufferPtr[index];
130 Byte operator[](
int index)
const {
131 return bufferPtr[index];
134 explicit operator bool()
const {
135 return bufferPtr !=
nullptr;
139 if (bufferPtr !=
nullptr) {
140 Grindstone::Memory::AllocatorCore::Free(bufferPtr);
151 const Byte* Get()
const {
155 uint64_t GetCapacity()
const {
160 Byte* bufferPtr =
nullptr;
161 uint64_t capacity = 0;
169 bufferPtr =
static_cast<Byte*
>(Grindstone::Memory::AllocatorCore::AllocateRaw(capacity,
alignof(
ResizableBuffer),
"ResizableBuffer"));
170 currentPtr = bufferPtr;
171 this->capacity = capacity;
175 currentPtr =
nullptr;
181 capacity = other.capacity;
184 bufferPtr =
static_cast<Byte*
>(Grindstone::Memory::AllocatorCore::AllocateRaw(capacity,
alignof(
ResizableBuffer),
"ResizableBuffer"));
185 currentPtr = other.currentPtr;
187 memcpy(bufferPtr, other.bufferPtr, size);
192 bufferPtr = other.bufferPtr;
193 currentPtr = other.currentPtr;
194 capacity = other.capacity;
197 other.bufferPtr =
nullptr;
198 other.currentPtr =
nullptr;
203 virtual BufferView GetBufferView(uint64_t segmentOffset, uint64_t segmentSize)
override {
204 Byte* targetPtr = bufferPtr + segmentOffset;
205 if (targetPtr < bufferPtr) {
206 GS_ASSERT_ENGINE(
"Start of view is before start of buffer.")
210 if (targetPtr + segmentSize > bufferPtr + size) {
211 GS_ASSERT_ENGINE(
"End of view is after end of used buffer.")
218 void* AddToBuffer(
const void* srcPtr, uint64_t srcSize) {
219 if (srcPtr ==
nullptr) {
220 GS_ASSERT_ENGINE(
"Source memory is nullptr.")
224 uint64_t spaceLeft = GetSpaceLeft();
225 if (srcSize > spaceLeft) {
226 GS_ASSERT_ENGINE(
"Source memory size is too small to fit.")
230 memcpy(currentPtr, srcPtr, srcSize);
231 Byte* prevPtr = currentPtr;
232 currentPtr += srcSize;
237 uint64_t GetSpaceLeft()
const {
238 return capacity - size;
241 uint64_t GetUsedSize()
const {
246 Byte* currentPtr =
nullptr;