improved vulkanbuffer
This commit is contained in:
parent
819517d150
commit
2e11ac6484
11 changed files with 396 additions and 762 deletions
|
|
@ -29,72 +29,255 @@ import std;
|
|||
import :VulkanDevice;
|
||||
|
||||
namespace Crafter {
|
||||
export template <typename T>
|
||||
class VulkanBuffer {
|
||||
public:
|
||||
T* value;
|
||||
export class VulkanBufferBase {
|
||||
public:
|
||||
VkDescriptorBufferInfo descriptor;
|
||||
VkDeviceSize alignment = 0;
|
||||
VkMemoryPropertyFlags memoryPropertyFlags;
|
||||
VkBufferUsageFlags usageFlags;
|
||||
VkBuffer buffer = VK_NULL_HANDLE;
|
||||
VkDeviceMemory memory = VK_NULL_HANDLE;
|
||||
VkDeviceMemory memory;
|
||||
};
|
||||
|
||||
export class VulkanBufferAdressable {
|
||||
public:
|
||||
VkDeviceAddress address;
|
||||
};
|
||||
export class VulkanBufferAdressableEmpty {};
|
||||
template<bool Adressable>
|
||||
using VulkanBufferAdressableConditional =
|
||||
std::conditional_t<
|
||||
Adressable,
|
||||
VulkanBufferAdressable,
|
||||
VulkanBufferAdressableEmpty
|
||||
>;
|
||||
|
||||
export template<typename T>
|
||||
class VulkanBufferMapped {
|
||||
public:
|
||||
T* value;
|
||||
};
|
||||
export class VulkanBufferMappedEmpty {};
|
||||
template<typename T, bool Mapped>
|
||||
using VulkanBufferMappedConditional =
|
||||
std::conditional_t<
|
||||
Mapped,
|
||||
VulkanBufferMapped<T>,
|
||||
VulkanBufferMappedEmpty
|
||||
>;
|
||||
|
||||
|
||||
export template <typename T, bool Mapped, bool Adressable, bool Staged> requires ((Mapped && !Staged) || (!Mapped && Staged) || (!Mapped && !Staged))
|
||||
class VulkanBuffer;
|
||||
|
||||
export template <typename T>
|
||||
class VulkanBufferStaged {
|
||||
VulkanBuffer<T, true, false, false>* stagingBuffer;
|
||||
};
|
||||
export class VulkanBufferStagedEmpty {};
|
||||
template<typename T, bool Staged>
|
||||
using VulkanBufferStagedConditional =
|
||||
std::conditional_t<
|
||||
Staged,
|
||||
VulkanBufferStaged<T>,
|
||||
VulkanBufferStagedEmpty
|
||||
>;
|
||||
|
||||
|
||||
export template <typename T, bool Mapped, bool Adressable, bool Staged> requires ((Mapped && !Staged) || (!Mapped && Staged) || (!Mapped && !Staged))
|
||||
class VulkanBuffer : public VulkanBufferBase, public VulkanBufferMappedConditional<T, Mapped>, public VulkanBufferAdressableConditional<Adressable>, public VulkanBufferStagedConditional<T, Staged> {
|
||||
public:
|
||||
VulkanBuffer() : value(nullptr) {};
|
||||
VulkanBuffer(VkBufferUsageFlags usageFlags, VkMemoryPropertyFlags memoryPropertyFlags, std::uint32_t count = 1) {
|
||||
VulkanBuffer() = default;
|
||||
void Create(VkBufferUsageFlags usageFlags, VkMemoryPropertyFlags memoryPropertyFlags, std::uint32_t count) {
|
||||
if constexpr(Staged) {
|
||||
new (&VulkanBufferMappedConditional<T, true>::stagingBuffer) VulkanBuffer<T, true, false, false>();
|
||||
VulkanBufferMappedConditional<T, true>::stagingBuffer->Create(VK_BUFFER_USAGE_TRANSFER_SRC_BIT, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, count);
|
||||
}
|
||||
|
||||
VkBufferCreateInfo bufferCreateInfo {};
|
||||
bufferCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
|
||||
bufferCreateInfo.usage = usageFlags;
|
||||
bufferCreateInfo.size = sizeof(T)*count;
|
||||
VulkanDevice::CheckVkResult(vkCreateBuffer(VulkanDevice::device, &bufferCreateInfo, nullptr, &buffer));
|
||||
|
||||
// Create the memory backing up the buffer handle
|
||||
VkMemoryRequirements memReqs;
|
||||
VkMemoryAllocateInfo memAlloc {};
|
||||
memAlloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
|
||||
vkGetBufferMemoryRequirements(VulkanDevice::device, buffer, &memReqs);
|
||||
memAlloc.allocationSize = memReqs.size;
|
||||
// Find a memory type index that fits the properties of the buffer
|
||||
memAlloc.memoryTypeIndex = VulkanDevice::GetMemoryType(memReqs.memoryTypeBits, memoryPropertyFlags);
|
||||
// If the buffer has VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT set we also need to enable the appropriate flag during allocation
|
||||
VkMemoryAllocateFlagsInfoKHR allocFlagsInfo{};
|
||||
if (usageFlags & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT) {
|
||||
allocFlagsInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR;
|
||||
allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
|
||||
VkMemoryAllocateInfo memAlloc {
|
||||
.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
|
||||
.allocationSize = memReqs.size,
|
||||
.memoryTypeIndex = VulkanDevice::GetMemoryType(memReqs.memoryTypeBits, memoryPropertyFlags)
|
||||
};
|
||||
if constexpr(Adressable) {
|
||||
VkMemoryAllocateFlagsInfoKHR allocFlagsInfo {
|
||||
.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR,
|
||||
.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR,
|
||||
};
|
||||
memAlloc.pNext = &allocFlagsInfo;
|
||||
VulkanDevice::CheckVkResult(vkAllocateMemory(VulkanDevice::device, &memAlloc, nullptr, &memory));
|
||||
} else {
|
||||
VulkanDevice::CheckVkResult(vkAllocateMemory(VulkanDevice::device, &memAlloc, nullptr, &memory));
|
||||
}
|
||||
VulkanDevice::CheckVkResult(vkAllocateMemory(VulkanDevice::device, &memAlloc, nullptr, &memory));
|
||||
|
||||
alignment = memReqs.alignment;
|
||||
usageFlags = usageFlags;
|
||||
memoryPropertyFlags = memoryPropertyFlags;
|
||||
|
||||
descriptor.offset = 0;
|
||||
descriptor.buffer = buffer;
|
||||
descriptor.range = sizeof(T)*count;
|
||||
descriptor.range = memReqs.size;
|
||||
|
||||
VulkanDevice::CheckVkResult(vkBindBufferMemory(VulkanDevice::device, buffer, memory, 0));
|
||||
if(memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
|
||||
VulkanDevice::CheckVkResult(vkMapMemory(VulkanDevice::device, memory, 0, sizeof(T)*count, 0, reinterpret_cast<void**>(&value)));
|
||||
|
||||
if constexpr(Adressable) {
|
||||
VkBufferDeviceAddressInfo addressInfo = {
|
||||
.sType = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO,
|
||||
.buffer = buffer
|
||||
};
|
||||
VulkanBufferAdressableConditional<true>::address = vkGetBufferDeviceAddress(VulkanDevice::device, &addressInfo);
|
||||
}
|
||||
|
||||
VkBufferDeviceAddressInfo addressInfo = {
|
||||
.sType = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO,
|
||||
.buffer = buffer
|
||||
if constexpr(Mapped) {
|
||||
VulkanDevice::CheckVkResult(vkMapMemory(VulkanDevice::device, memory, 0, memReqs.size, 0, reinterpret_cast<void**>(&(VulkanBufferMappedConditional<T, true>::value))));
|
||||
}
|
||||
}
|
||||
|
||||
void Clear() {
|
||||
if constexpr(Mapped) {
|
||||
vkUnmapMemory(VulkanDevice::device, memory);
|
||||
}
|
||||
vkDestroyBuffer(VulkanDevice::device, buffer, nullptr);
|
||||
vkFreeMemory(VulkanDevice::device, memory, nullptr);
|
||||
buffer = VK_NULL_HANDLE;
|
||||
if constexpr(Staged) {
|
||||
delete VulkanBufferMappedConditional<T, true>::stagingBuffer;
|
||||
}
|
||||
}
|
||||
|
||||
void Resize(VkBufferUsageFlags usageFlags, VkMemoryPropertyFlags memoryPropertyFlags, std::uint32_t count) {
|
||||
if(buffer != VK_NULL_HANDLE) {
|
||||
Clear();
|
||||
}
|
||||
Create(usageFlags, memoryPropertyFlags, count);
|
||||
}
|
||||
|
||||
void Copy(VkCommandBuffer cmd, VulkanBuffer& dst) {
|
||||
VkBufferCopy copyRegion = {
|
||||
.srcOffset = 0,
|
||||
.dstOffset = 0,
|
||||
.size = descriptor.range
|
||||
};
|
||||
|
||||
address = vkGetBufferDeviceAddress(VulkanDevice::device, &addressInfo);
|
||||
vkCmdCopyBuffer(
|
||||
cmd,
|
||||
buffer,
|
||||
dst.buffer,
|
||||
1,
|
||||
©Region
|
||||
);
|
||||
}
|
||||
|
||||
void Copy(VkCommandBuffer cmd, VulkanBuffer& dst, VkAccessFlags srcAccessMask, VkAccessFlags dstAccessMask, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask) {
|
||||
Copy(cmd, dst);
|
||||
|
||||
VkBufferMemoryBarrier barrier = {
|
||||
.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
|
||||
.srcAccessMask = srcAccessMask,
|
||||
.dstAccessMask = dstAccessMask,
|
||||
.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
||||
.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
||||
.buffer = buffer,
|
||||
.offset = 0,
|
||||
.size = VK_WHOLE_SIZE
|
||||
};
|
||||
|
||||
vkCmdPipelineBarrier(
|
||||
cmd,
|
||||
srcStageMask,
|
||||
dstStageMask,
|
||||
0,
|
||||
0, NULL,
|
||||
1, &barrier,
|
||||
0, NULL
|
||||
);
|
||||
}
|
||||
|
||||
void FlushDevice() requires(Mapped) {
|
||||
VkMappedMemoryRange range = {
|
||||
.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
|
||||
.memory = memory,
|
||||
.offset = 0,
|
||||
.size = VK_WHOLE_SIZE
|
||||
};
|
||||
vkFlushMappedMemoryRanges(VulkanDevice::device, 1, &range);
|
||||
}
|
||||
|
||||
void FlushDevice(VkCommandBuffer cmd, VkAccessFlags dstAccessMask, VkPipelineStageFlags dstStageMask) requires(Mapped) {
|
||||
FlushDevice();
|
||||
VkBufferMemoryBarrier barrier = {
|
||||
.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
|
||||
.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT,
|
||||
.dstAccessMask = dstAccessMask,
|
||||
.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
||||
.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
||||
.buffer = buffer,
|
||||
.offset = 0,
|
||||
.size = VK_WHOLE_SIZE
|
||||
};
|
||||
|
||||
vkCmdPipelineBarrier(
|
||||
cmd,
|
||||
VK_PIPELINE_STAGE_HOST_BIT,
|
||||
dstStageMask,
|
||||
0,
|
||||
0, NULL,
|
||||
1, &barrier,
|
||||
0, NULL
|
||||
);
|
||||
}
|
||||
|
||||
void FlushHost() requires(Mapped) {
|
||||
VkMappedMemoryRange range = {
|
||||
.sType = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
|
||||
.memory = memory,
|
||||
.offset = 0,
|
||||
.size = VK_WHOLE_SIZE
|
||||
};
|
||||
vkInvalidateMappedMemoryRanges(VulkanDevice::device, 1, &range);
|
||||
}
|
||||
|
||||
void FlushDevice(VkCommandBuffer cmd) requires(Staged) {
|
||||
VulkanBufferStagedConditional<T, true>::stagingBuffer.FlushDevice(VK_ACCESS_TRANSFER_READ_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT);
|
||||
VulkanBufferStagedConditional<T, true>::stagingBuffer.Copy(cmd, this);
|
||||
}
|
||||
|
||||
void FlushDevice(VkCommandBuffer cmd, VkAccessFlags dstAccessMask, VkPipelineStageFlags dstStageMask) requires(Staged) {
|
||||
VulkanBufferStagedConditional<T, true>::stagingBuffer.FlushDevice(VK_ACCESS_TRANSFER_READ_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT);
|
||||
VulkanBufferStagedConditional<T, true>::stagingBuffer.Copy(cmd, this, VK_ACCESS_TRANSFER_WRITE_BIT, dstAccessMask, VK_PIPELINE_STAGE_TRANSFER_BIT, dstStageMask);
|
||||
}
|
||||
|
||||
void FlushHost(VkCommandBuffer cmd) requires(Staged) {
|
||||
Copy(cmd, VulkanBufferStagedConditional<T, true>::stagingBuffer);
|
||||
VulkanBufferStagedConditional<T, true>::stagingBuffer.FlushHost();
|
||||
}
|
||||
|
||||
|
||||
VulkanBuffer(VulkanBuffer&& other) {
|
||||
descriptor = other.descriptor;
|
||||
buffer = other.buffer;
|
||||
memory = other.memory;
|
||||
other.buffer = VK_NULL_HANDLE;
|
||||
if constexpr(Adressable) {
|
||||
VulkanBufferAdressableConditional<true>::address = other.VulkanBufferAdressableConditional<true>::address;
|
||||
}
|
||||
if constexpr(Mapped) {
|
||||
VulkanBufferMappedConditional<T, true>::value = other.VulkanBufferMappedConditional<T, true>::value;
|
||||
}
|
||||
if constexpr(Staged) {
|
||||
VulkanBufferStagedConditional<T, true>::stagingBuffer = other.VulkanBufferStagedConditional<T, true>::stagingBuffer;
|
||||
}
|
||||
};
|
||||
|
||||
~VulkanBuffer() {
|
||||
if(value != nullptr) {
|
||||
if(memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
|
||||
vkUnmapMemory(VulkanDevice::device, memory);
|
||||
}
|
||||
vkDestroyBuffer(VulkanDevice::device, buffer, nullptr);
|
||||
vkFreeMemory(VulkanDevice::device, memory, nullptr);
|
||||
if(buffer != VK_NULL_HANDLE) {
|
||||
Clear();
|
||||
}
|
||||
}
|
||||
|
||||
VulkanBuffer(VulkanBuffer&) = delete;
|
||||
VulkanBuffer& operator=(const VulkanBuffer&) = delete;
|
||||
};
|
||||
}
|
||||
#endif
|
||||
Loading…
Add table
Add a link
Reference in a new issue