2026-01-28 01:07:41 +01:00
/*
Crafter®.Graphics
Copyright (C) 2026 Catcrafts®
catcrafts.net
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License version 3.0 as published by the Free Software Foundation;
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
module;
#ifdef CRAFTER_GRAPHICS_VULKAN
#include <vulkan/vulkan.h>
#endif
export module Crafter.Graphics:VulkanBuffer;
#ifdef CRAFTER_GRAPHICS_VULKAN
import std;
import :VulkanDevice;
namespace Crafter {
2026-01-28 18:51:11 +01:00
export class VulkanBufferBase {
public:
2026-01-28 01:07:41 +01:00
VkDescriptorBufferInfo descriptor;
VkBuffer buffer = VK_NULL_HANDLE;
2026-01-28 18:51:11 +01:00
VkDeviceMemory memory;
};
export class VulkanBufferAdressable {
public:
2026-01-28 01:07:41 +01:00
VkDeviceAddress address;
2026-01-28 18:51:11 +01:00
};
export class VulkanBufferAdressableEmpty {};
template<bool Adressable>
using VulkanBufferAdressableConditional =
std::conditional_t<
Adressable,
VulkanBufferAdressable,
VulkanBufferAdressableEmpty
>;
export template<typename T>
class VulkanBufferMapped {
public:
T* value;
};
export class VulkanBufferMappedEmpty {};
template<typename T, bool Mapped>
using VulkanBufferMappedConditional =
std::conditional_t<
Mapped,
VulkanBufferMapped<T>,
VulkanBufferMappedEmpty
>;
export template <typename T, bool Mapped, bool Adressable, bool Staged> requires ((Mapped && !Staged) || (!Mapped && Staged) || (!Mapped && !Staged))
class VulkanBuffer;
export template <typename T>
class VulkanBufferStaged {
VulkanBuffer<T, true, false, false>* stagingBuffer;
};
export class VulkanBufferStagedEmpty {};
template<typename T, bool Staged>
using VulkanBufferStagedConditional =
std::conditional_t<
Staged,
VulkanBufferStaged<T>,
VulkanBufferStagedEmpty
>;
export template <typename T, bool Mapped, bool Adressable, bool Staged> requires ((Mapped && !Staged) || (!Mapped && Staged) || (!Mapped && !Staged))
class VulkanBuffer : public VulkanBufferBase, public VulkanBufferMappedConditional<T, Mapped>, public VulkanBufferAdressableConditional<Adressable>, public VulkanBufferStagedConditional<T, Staged> {
2026-01-28 01:07:41 +01:00
public:
2026-01-28 18:51:11 +01:00
VulkanBuffer() = default;
void Create(VkBufferUsageFlags usageFlags, VkMemoryPropertyFlags memoryPropertyFlags, std::uint32_t count) {
if constexpr(Staged) {
new (&VulkanBufferMappedConditional<T, true>::stagingBuffer) VulkanBuffer<T, true, false, false>();
VulkanBufferMappedConditional<T, true>::stagingBuffer->Create(VK_BUFFER_USAGE_TRANSFER_SRC_BIT, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, count);
}
2026-01-28 01:07:41 +01:00
VkBufferCreateInfo bufferCreateInfo {};
bufferCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
bufferCreateInfo.usage = usageFlags;
bufferCreateInfo.size = sizeof(T)*count;
VulkanDevice::CheckVkResult(vkCreateBuffer(VulkanDevice::device, &bufferCreateInfo, nullptr, &buffer));
VkMemoryRequirements memReqs;
vkGetBufferMemoryRequirements(VulkanDevice::device, buffer, &memReqs);
2026-01-28 18:51:11 +01:00
VkMemoryAllocateInfo memAlloc {
.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
.allocationSize = memReqs.size,
.memoryTypeIndex = VulkanDevice::GetMemoryType(memReqs.memoryTypeBits, memoryPropertyFlags)
};
if constexpr(Adressable) {
VkMemoryAllocateFlagsInfoKHR allocFlagsInfo {
.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR,
.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR,
};
2026-01-28 01:07:41 +01:00
memAlloc.pNext = &allocFlagsInfo;
2026-01-28 18:51:11 +01:00
VulkanDevice::CheckVkResult(vkAllocateMemory(VulkanDevice::device, &memAlloc, nullptr, &memory));
} else {
VulkanDevice::CheckVkResult(vkAllocateMemory(VulkanDevice::device, &memAlloc, nullptr, &memory));
2026-01-28 01:07:41 +01:00
}
descriptor.offset = 0;
descriptor.buffer = buffer;
2026-01-30 01:46:08 +01:00
descriptor.range = VK_WHOLE_SIZE;
2026-01-28 01:07:41 +01:00
VulkanDevice::CheckVkResult(vkBindBufferMemory(VulkanDevice::device, buffer, memory, 0));
2026-01-28 18:51:11 +01:00
if constexpr(Adressable) {
VkBufferDeviceAddressInfo addressInfo = {
.sType = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO,
.buffer = buffer
};
VulkanBufferAdressableConditional<true>::address = vkGetBufferDeviceAddress(VulkanDevice::device, &addressInfo);
}
if constexpr(Mapped) {
VulkanDevice::CheckVkResult(vkMapMemory(VulkanDevice::device, memory, 0, memReqs.size, 0, reinterpret_cast<void**>(&(VulkanBufferMappedConditional<T, true>::value))));
}
}
void Clear() {
if constexpr(Mapped) {
vkUnmapMemory(VulkanDevice::device, memory);
}
vkDestroyBuffer(VulkanDevice::device, buffer, nullptr);
vkFreeMemory(VulkanDevice::device, memory, nullptr);
buffer = VK_NULL_HANDLE;
if constexpr(Staged) {
delete VulkanBufferMappedConditional<T, true>::stagingBuffer;
}
}
void Resize(VkBufferUsageFlags usageFlags, VkMemoryPropertyFlags memoryPropertyFlags, std::uint32_t count) {
if(buffer != VK_NULL_HANDLE) {
Clear();
2026-01-28 01:07:41 +01:00
}
2026-01-28 18:51:11 +01:00
Create(usageFlags, memoryPropertyFlags, count);
}
void Copy(VkCommandBuffer cmd, VulkanBuffer& dst) {
VkBufferCopy copyRegion = {
.srcOffset = 0,
.dstOffset = 0,
.size = descriptor.range
};
2026-01-28 01:07:41 +01:00
2026-01-28 18:51:11 +01:00
vkCmdCopyBuffer(
cmd,
buffer,
dst.buffer,
1,
©Region
);
}
void Copy(VkCommandBuffer cmd, VulkanBuffer& dst, VkAccessFlags srcAccessMask, VkAccessFlags dstAccessMask, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask) {
Copy(cmd, dst);
VkBufferMemoryBarrier barrier = {
.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
.srcAccessMask = srcAccessMask,
.dstAccessMask = dstAccessMask,
.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
.buffer = buffer,
.offset = 0,
.size = VK_WHOLE_SIZE
2026-01-28 01:07:41 +01:00
};
2026-01-28 18:51:11 +01:00
vkCmdPipelineBarrier(
cmd,
srcStageMask,
dstStageMask,
0,
0, NULL,
1, &barrier,
0, NULL
);
2026-01-28 01:07:41 +01:00
}
2026-01-28 18:51:11 +01:00
void FlushDevice() requires(Mapped) {
VkMappedMemoryRange range = {
.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
.memory = memory,
.offset = 0,
.size = VK_WHOLE_SIZE
};
vkFlushMappedMemoryRanges(VulkanDevice::device, 1, &range);
}
void FlushDevice(VkCommandBuffer cmd, VkAccessFlags dstAccessMask, VkPipelineStageFlags dstStageMask) requires(Mapped) {
FlushDevice();
VkBufferMemoryBarrier barrier = {
.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT,
.dstAccessMask = dstAccessMask,
.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
.buffer = buffer,
.offset = 0,
.size = VK_WHOLE_SIZE
};
vkCmdPipelineBarrier(
cmd,
VK_PIPELINE_STAGE_HOST_BIT,
dstStageMask,
0,
0, NULL,
1, &barrier,
0, NULL
);
}
void FlushHost() requires(Mapped) {
VkMappedMemoryRange range = {
.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
.memory = memory,
.offset = 0,
.size = VK_WHOLE_SIZE
};
vkInvalidateMappedMemoryRanges(VulkanDevice::device, 1, &range);
}
void FlushDevice(VkCommandBuffer cmd) requires(Staged) {
VulkanBufferStagedConditional<T, true>::stagingBuffer.FlushDevice(VK_ACCESS_TRANSFER_READ_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT);
VulkanBufferStagedConditional<T, true>::stagingBuffer.Copy(cmd, this);
}
void FlushDevice(VkCommandBuffer cmd, VkAccessFlags dstAccessMask, VkPipelineStageFlags dstStageMask) requires(Staged) {
VulkanBufferStagedConditional<T, true>::stagingBuffer.FlushDevice(VK_ACCESS_TRANSFER_READ_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT);
VulkanBufferStagedConditional<T, true>::stagingBuffer.Copy(cmd, this, VK_ACCESS_TRANSFER_WRITE_BIT, dstAccessMask, VK_PIPELINE_STAGE_TRANSFER_BIT, dstStageMask);
}
void FlushHost(VkCommandBuffer cmd) requires(Staged) {
Copy(cmd, VulkanBufferStagedConditional<T, true>::stagingBuffer);
VulkanBufferStagedConditional<T, true>::stagingBuffer.FlushHost();
}
VulkanBuffer(VulkanBuffer&& other) {
descriptor = other.descriptor;
buffer = other.buffer;
memory = other.memory;
other.buffer = VK_NULL_HANDLE;
if constexpr(Adressable) {
VulkanBufferAdressableConditional<true>::address = other.VulkanBufferAdressableConditional<true>::address;
}
if constexpr(Mapped) {
VulkanBufferMappedConditional<T, true>::value = other.VulkanBufferMappedConditional<T, true>::value;
}
if constexpr(Staged) {
VulkanBufferStagedConditional<T, true>::stagingBuffer = other.VulkanBufferStagedConditional<T, true>::stagingBuffer;
}
};
2026-01-28 01:07:41 +01:00
~VulkanBuffer() {
2026-01-28 18:51:11 +01:00
if(buffer != VK_NULL_HANDLE) {
Clear();
2026-01-28 01:07:41 +01:00
}
}
2026-01-28 18:51:11 +01:00
VulkanBuffer(VulkanBuffer&) = delete;
VulkanBuffer& operator=(const VulkanBuffer&) = delete;
2026-01-28 01:07:41 +01:00
};
}
#endif