#cxx_warnings_to_preserve = \
# -Wno-weak-vtables
# Warnings that cause issues with vulkan.hpp. Double check when we upgrade.
- ADD_COMPILE_OPTIONS(-Wno-switch -Wno-switch-enum -Wno-error=switch -Wno-error=switch-enum)
ADD_COMPILE_OPTIONS(-Wno-init-list-lifetime)
+ ADD_COMPILE_OPTIONS(-Werror)
INCLUDE(CheckCXXCompilerFlag)
CHECK_CXX_COMPILER_FLAG(-Wno-class-memaccess HAVE_NO_CLASS_MEMACCESS)
IF (HAVE_NO_CLASS_MEMACCESS)
${adaptor_graphics_dir}/vulkan-impl/vulkan-spirv.cpp
${adaptor_graphics_dir}/vulkan-impl/vulkan-surface-impl.cpp
${adaptor_graphics_dir}/vulkan-impl/vulkan-swapchain-impl.cpp
+ ${adaptor_graphics_dir}/vulkan-impl/vulkan-sampler.cpp
+ ${adaptor_graphics_dir}/vulkan-impl/vulkan-sampler-impl.cpp
+ ${adaptor_graphics_dir}/vulkan-impl/vulkan-texture.cpp
)
# module: graphics, backend: vulkan/x11
#include <cstdint>
+#if defined(DEBUG_ENABLED)
extern Debug::Filter* gVulkanFilter;
+#endif
-namespace
+namespace Dali::Graphics::Vulkan
{
-const uint32_t INVALID_MEMORY_INDEX = -1u;
-
-/**
- * Helper function which returns GPU heap index that can be used to allocate
- * particular type of resource
- */
-uint32_t GetMemoryIndex(const vk::PhysicalDeviceMemoryProperties& memoryProperties,
- uint32_t memoryTypeBits,
- vk::MemoryPropertyFlags properties)
+BufferImpl* BufferImpl::New(Device& device, size_t size, vk::BufferUsageFlags usageFlags)
{
- for(uint32_t i = 0; i < memoryProperties.memoryTypeCount; ++i)
- {
- if((memoryTypeBits & (1u << i)) &&
- ((memoryProperties.memoryTypes[i].propertyFlags & properties) == properties))
- {
- return i;
- }
- }
- return INVALID_MEMORY_INDEX;
+ return New(device, size, vk::SharingMode(vk::SharingMode::eExclusive), usageFlags, vk::MemoryPropertyFlags(vk::MemoryPropertyFlagBits::eHostVisible));
}
-} // namespace
-namespace Dali::Graphics::Vulkan
-{
-BufferImpl* BufferImpl::New(Device& device, size_t size, VkBufferUsageFlags usageFlags)
+BufferImpl* BufferImpl::New(Device& device, size_t size, vk::SharingMode sharingMode, vk::BufferUsageFlags usageFlags, vk::MemoryPropertyFlags memoryProperties)
{
auto info = vk::BufferCreateInfo{};
- info.setSharingMode(vk::SharingMode::eExclusive);
+ info.setSharingMode(sharingMode);
info.setSize(size);
- info.setUsage(static_cast<vk::BufferUsageFlags>(usageFlags));
+ info.setUsage(usageFlags);
auto bufferImpl = new BufferImpl(device, info);
VkAssert(device.GetLogicalDevice().createBuffer(&info, &device.GetAllocator(), &bufferImpl->mBuffer));
- bufferImpl->Initialize(vk::MemoryPropertyFlagBits::eHostVisible);
+ bufferImpl->Initialize(memoryProperties);
return bufferImpl;
}
{
// Allocate
auto requirements = mDevice.GetLogicalDevice().getBufferMemoryRequirements(mBuffer);
- auto memoryTypeIndex = GetMemoryIndex(mDevice.GetMemoryProperties(),
- requirements.memoryTypeBits,
- memoryProperties);
+ auto memoryTypeIndex = Device::GetMemoryIndex(mDevice.GetMemoryProperties(),
+ requirements.memoryTypeBits,
+ memoryProperties);
mMemory = std::make_unique<MemoryImpl>(mDevice, size_t(requirements.size), size_t(requirements.alignment), ((memoryProperties & vk::MemoryPropertyFlagBits::eHostVisible) == vk::MemoryPropertyFlagBits::eHostVisible));
+#pragma once
/*
* Copyright (c) 2024 Samsung Electronics Co., Ltd.
*
namespace Dali::Graphics::Vulkan
{
-
class BufferImpl // : public VkManaged
{
public:
- static BufferImpl* New(Vulkan::Device& device, size_t size, VkBufferUsageFlags usageFlags);
+ static BufferImpl* New(Vulkan::Device& device, size_t size, vk::BufferUsageFlags usageFlags);
+
+ static BufferImpl* New(Vulkan::Device& device, size_t size, vk::SharingMode sharingMode, vk::BufferUsageFlags usageFlags, vk::MemoryPropertyFlags memoryProperties);
/**
* Returns buffer usage flags
// buffer in this implementation
mTransient = true;
}
-
- //@todo Decide if we want to use creation queues
- // controller.AddBuffer(*this);
- InitializeResource();
}
bool Buffer::InitializeResource()
{
if(!mBufferImpl)
{
- auto vkUsageFlags = static_cast<VkBufferUsageFlags>(mCreateInfo.usage);
+ auto vkUsageFlags = static_cast<vk::BufferUsageFlags>(mCreateInfo.usage);
mBufferImpl = BufferImpl::New(mController.GetGraphicsDevice(), mCreateInfo.size, vkUsageFlags);
}
}
#include <dali/internal/graphics/vulkan-impl/vulkan-command-buffer-impl.h>
// INTERNAL INCLUDES
+#include <dali/internal/graphics/vulkan-impl/vulkan-buffer-impl.h>
+#include <dali/internal/graphics/vulkan-impl/vulkan-buffer.h>
#include <dali/internal/graphics/vulkan-impl/vulkan-command-pool-impl.h>
#include <dali/internal/graphics/vulkan-impl/vulkan-framebuffer-impl.h>
+#include <dali/internal/graphics/vulkan-impl/vulkan-image-impl.h>
#include <dali/internal/graphics/vulkan-impl/vulkan-swapchain-impl.h>
#include <dali/internal/graphics/vulkan-impl/vulkan-types.h>
#include <dali/internal/graphics/vulkan/vulkan-device.h>
mCommandBuffer.endRenderPass();
}
+void CommandBufferImpl::PipelineBarrier(
+ vk::PipelineStageFlags srcStageMask,
+ vk::PipelineStageFlags dstStageMask,
+ vk::DependencyFlags dependencyFlags,
+ std::vector<vk::MemoryBarrier> memoryBarriers,
+ std::vector<vk::BufferMemoryBarrier> bufferBarriers,
+ std::vector<vk::ImageMemoryBarrier> imageBarriers)
+{
+ mCommandBuffer.pipelineBarrier(srcStageMask,
+ dstStageMask,
+ dependencyFlags,
+ memoryBarriers,
+ bufferBarriers,
+ imageBarriers);
+}
+
+void CommandBufferImpl::CopyBufferToImage(
+ Vulkan::BufferImpl* srcBuffer, Vulkan::Image* dstImage, vk::ImageLayout dstLayout, const std::vector<vk::BufferImageCopy>& regions)
+{
+ mCommandBuffer.copyBufferToImage(srcBuffer->GetVkHandle(),
+ dstImage->GetVkHandle(),
+ dstLayout,
+ regions);
+}
+
+void CommandBufferImpl::CopyImage(
+ Vulkan::Image* srcImage, vk::ImageLayout srcLayout, Vulkan::Image* dstImage, vk::ImageLayout dstLayout, const std::vector<vk::ImageCopy>& regions)
+{
+ mCommandBuffer.copyImage(srcImage->GetVkHandle(), srcLayout, dstImage->GetVkHandle(), dstLayout, regions);
+}
+
uint32_t CommandBufferImpl::GetPoolAllocationIndex() const
{
return mPoolAllocationIndex;
*/
// INTERNAL INCLUDES
-#include <dali/internal/graphics/vulkan-impl/vulkan-types.h>
#include <dali/graphics-api/graphics-types.h>
+#include <dali/internal/graphics/vulkan-impl/vulkan-buffer-impl.h>
+#include <dali/internal/graphics/vulkan-impl/vulkan-image-impl.h>
+#include <dali/internal/graphics/vulkan-impl/vulkan-types.h>
namespace Dali::Graphics::Vulkan
{
+class Buffer;
class Device;
+class CommandPool;
class CommandBufferImpl : public VkManaged
{
~CommandBufferImpl() override;
/** Begin recording */
- void Begin( vk::CommandBufferUsageFlags usageFlags, vk::CommandBufferInheritanceInfo* inheritanceInfo );
+ void Begin(vk::CommandBufferUsageFlags usageFlags, vk::CommandBufferInheritanceInfo* inheritanceInfo);
/** Finish recording */
void End();
* @param renderPassBeginInfo
* @param subpassContents
*/
- void BeginRenderPass( vk::RenderPassBeginInfo renderPassBeginInfo, vk::SubpassContents subpassContents );
+ void BeginRenderPass(vk::RenderPassBeginInfo renderPassBeginInfo, vk::SubpassContents subpassContents);
/**
* Ends current render pass
*/
void EndRenderPass();
+ void PipelineBarrier(vk::PipelineStageFlags srcStageMask,
+ vk::PipelineStageFlags dstStageMask,
+ vk::DependencyFlags dependencyFlags,
+ std::vector<vk::MemoryBarrier> memoryBarriers,
+ std::vector<vk::BufferMemoryBarrier> bufferBarriers,
+ std::vector<vk::ImageMemoryBarrier> imageBarriers);
+
+ void CopyBufferToImage(Vulkan::BufferImpl* srcBuffer, Vulkan::Image* dstImage, vk::ImageLayout dstLayout, const std::vector<vk::BufferImageCopy>& regions);
+
+ void CopyImage(Vulkan::Image* srcImage, vk::ImageLayout srcLayout, Image* dstImage, vk::ImageLayout dstLayout, const std::vector<vk::ImageCopy>& regions);
+
/**
* Implements VkManaged::OnDestroy
* @return
bool OnDestroy() override;
private:
-
/**
* Returns allocation index
* @return
[[nodiscard]] uint32_t GetPoolAllocationIndex() const;
private:
-
// Constructor called by the CommandPool only
- CommandBufferImpl( CommandPool& commandPool,
- uint32_t poolIndex,
- const vk::CommandBufferAllocateInfo& allocateInfo,
- vk::CommandBuffer vulkanHandle );
+ CommandBufferImpl(
+ CommandPool& commandPool,
+ uint32_t poolIndex,
+ const vk::CommandBufferAllocateInfo& allocateInfo,
+ vk::CommandBuffer vulkanHandle);
private:
-
- CommandPool* mOwnerCommandPool;
- Device* mGraphicsDevice;
- uint32_t mPoolAllocationIndex;
+ CommandPool* mOwnerCommandPool;
+ Device* mGraphicsDevice;
+ uint32_t mPoolAllocationIndex;
vk::CommandBufferAllocateInfo mAllocateInfo{};
vk::CommandBuffer mCommandBuffer{};
- bool mRecording{ false };
+ bool mRecording{false};
};
} // namespace Dali::Graphics::Vulkan
namespace Dali::Graphics::Vulkan
{
+
CommandBuffer::CommandBuffer(const Graphics::CommandBufferCreateInfo& createInfo, VulkanGraphicsController& controller)
: CommandBufferResource(createInfo, controller),
mCommandBufferImpl(nullptr)
namespace Dali::Graphics::Vulkan
{
class Device;
+class CommandBufferImpl;
class CommandPool : public VkManaged
{
* @param createInfo
* @return
*/
- static CommandPool* New( Device& graphics, const vk::CommandPoolCreateInfo& createInfo );
+ static CommandPool* New(Device& graphics, const vk::CommandPoolCreateInfo& createInfo);
/**
*
* @param graphics
* @return
*/
- static CommandPool* New( Device& graphics );
+ static CommandPool* New(Device& graphics);
CommandPool() = delete;
- CommandPool( Device& graphicsDevice, const vk::CommandPoolCreateInfo& createInfo );
+ CommandPool(Device& graphicsDevice, const vk::CommandPoolCreateInfo& createInfo);
~CommandPool() override;
bool Initialize();
- bool OnDestroy() override; //TODO: Queue deleter for destruction
+ bool OnDestroy() override; // TODO: Queue deleter for destruction
/**
* Resets command pool
*/
- void Reset( bool releaseResources );
+ void Reset(bool releaseResources);
public: // API
/**
* @param allocateInfo
* @return
*/
- CommandBufferImpl* NewCommandBuffer( const vk::CommandBufferAllocateInfo& allocateInfo );
+ CommandBufferImpl* NewCommandBuffer(const vk::CommandBufferAllocateInfo& allocateInfo);
/**
*
* @param isPrimary
* @return
*/
- CommandBufferImpl* NewCommandBuffer( bool isPrimary = true );
+ CommandBufferImpl* NewCommandBuffer(bool isPrimary = true);
/**
* Releases command buffer
* @param buffer
* @return
*/
- bool ReleaseCommandBuffer(CommandBufferImpl& buffer );
+ bool ReleaseCommandBuffer(CommandBufferImpl& buffer);
/**
* Returns current pool capacity ( 0 if nothing allocated )
* @param level
* @return
*/
- uint32_t GetAllocationCount( vk::CommandBufferLevel level ) const;
+ uint32_t GetAllocationCount(vk::CommandBufferLevel level) const;
Device* GetGraphicsDevice()
{
return mGraphicsDevice;
}
-private: //Internal structs
+private: // Internal structs
/**
- * CommandBufferPool contains preallocated command buffers that are
- * reusable.
- */
+ * CommandBufferPool contains preallocated command buffers that are
+ * reusable.
+ */
struct InternalPool
{
- static constexpr uint32_t INVALID_NODE_INDEX{ 0xffffffffu };
+ static constexpr uint32_t INVALID_NODE_INDEX{0xffffffffu};
struct Node
{
- Node( uint32_t _nextFreeIndex, CommandBufferImpl* _commandBuffer );
+ Node(uint32_t _nextFreeIndex, CommandBufferImpl* _commandBuffer);
- uint32_t nextFreeIndex;
+ uint32_t nextFreeIndex;
CommandBufferImpl* commandBuffer;
};
- InternalPool( CommandPool& owner, Device* graphics, uint32_t initialCapacity, bool isPrimary );
+ InternalPool(CommandPool& owner, Device* graphics, uint32_t initialCapacity, bool isPrimary);
~InternalPool();
* @param allocateInfo
* @return
*/
- std::vector< vk::CommandBuffer > AllocateVkCommandBuffers( vk::CommandBufferAllocateInfo allocateInfo );
+ std::vector<vk::CommandBuffer> AllocateVkCommandBuffers(vk::CommandBufferAllocateInfo allocateInfo);
/**
* Resizes command pool to the new capacity. Pool may only grow
* @param newCapacity
*/
- void Resize( uint32_t newCapacity );
+ void Resize(uint32_t newCapacity);
/**
* Allocates new command buffer
* @return
*/
- CommandBufferImpl* AllocateCommandBuffer( bool reset );
+ CommandBufferImpl* AllocateCommandBuffer(bool reset);
/**
* Releases command buffer back to the pool
* @param reset if true, Resets command buffer
* @param ref
*/
- void ReleaseCommandBuffer(CommandBufferImpl& buffer, bool reset = false );
+ void ReleaseCommandBuffer(CommandBufferImpl& buffer, bool reset = false);
uint32_t GetCapacity() const;
uint32_t GetAllocationCount() const;
- CommandPool& mOwner;
- Device* mGraphicsDevice;
- std::vector< Node > mPoolData;
- uint32_t mFirstFree;
- uint32_t mCapacity;
- uint32_t mAllocationCount;
- bool mIsPrimary;
+ CommandPool& mOwner;
+ Device* mGraphicsDevice;
+ std::vector<Node> mPoolData;
+ uint32_t mFirstFree;
+ uint32_t mCapacity;
+ uint32_t mAllocationCount;
+ bool mIsPrimary;
};
private: // Data members
- Device* mGraphicsDevice;
+ Device* mGraphicsDevice;
vk::CommandPoolCreateInfo mCreateInfo;
- vk::CommandPool mCommandPool;
+ vk::CommandPool mCommandPool;
// Pools are lazily allocated, depends on the requested command buffers
- std::unique_ptr< InternalPool > mInternalPoolPrimary;
- std::unique_ptr< InternalPool > mInternalPoolSecondary;
-
+ std::unique_ptr<InternalPool> mInternalPoolPrimary;
+ std::unique_ptr<InternalPool> mInternalPoolSecondary;
};
} // namespace Dali::Graphics::Vulkan
-
-#endif //DALI_GRAPHICS_VULKAN_COMMAND_POOL_IMPL_H
+#endif // DALI_GRAPHICS_VULKAN_COMMAND_POOL_IMPL_H
// INTERNAL INCLUDES
#include <dali/internal/graphics/vulkan-impl/vulkan-fence-impl.h>
-#include <dali/internal/graphics/vulkan/vulkan-device.h>
#include <dali/integration-api/debug.h>
+#include <dali/internal/graphics/vulkan/vulkan-device.h>
#if defined(DEBUG_ENABLED)
extern Debug::Filter* gVulkanFilter;
namespace Dali::Graphics::Vulkan
{
-Fence::Fence( Device& graphicsDevice, vk::Fence handle )
-: mGraphicsDevice( &graphicsDevice ),
- mFence(handle)
+FenceImpl* FenceImpl::New(Device& graphicsDevice, const vk::FenceCreateInfo& fenceCreateInfo)
{
+ auto fence = new FenceImpl(graphicsDevice);
+ fence->Initialize(fenceCreateInfo);
+ return fence;
}
-const Fence& Fence::ConstRef() const
+FenceImpl::FenceImpl(Device& graphicsDevice)
+: mGraphicsDevice(&graphicsDevice)
{
- return *this;
}
-Fence& Fence::Ref()
+FenceImpl::~FenceImpl()
{
- return *this;
+ Destroy();
}
-Fence::~Fence() = default;
-
-vk::Fence Fence::GetVkHandle() const
+vk::Fence FenceImpl::GetVkHandle() const
{
return mFence;
}
-bool Fence::OnDestroy()
+void FenceImpl::Initialize(const vk::FenceCreateInfo& fenceCreateInfo)
{
- // Copy the Vulkan handles and pointers here.
- // Cannot capture the "this" pointer in the lambda.
- // When the lambda is invoked "this" is already destroyed.
- // This method is only deferring execution to the end of the frame.
auto device = mGraphicsDevice->GetLogicalDevice();
- auto fence = mFence;
- auto allocator = &mGraphicsDevice->GetAllocator();
-
- // capture copies of the pointers and handles
- mGraphicsDevice->DiscardResource( [ device, fence, allocator ]() {
- DALI_LOG_INFO( gVulkanFilter, Debug::General, "Invoking deleter function: fence->%p\n",
- static_cast< VkFence >(fence) )
- device.destroyFence( fence, allocator );
- } );
+ VkAssert(device.createFence(&fenceCreateInfo, &mGraphicsDevice->GetAllocator(), &mFence));
+}
- return false;
+void FenceImpl::Destroy()
+{
+ mGraphicsDevice->GetLogicalDevice().destroyFence(mFence, &mGraphicsDevice->GetAllocator());
}
-void Fence::Reset()
+void FenceImpl::Reset()
{
auto device = mGraphicsDevice->GetLogicalDevice();
VkAssert(device.resetFences(1, &mFence));
}
-void Fence::Wait(uint32_t timeout)
+void FenceImpl::Wait(uint32_t timeout)
{
auto device = mGraphicsDevice->GetLogicalDevice();
VkAssert(device.waitForFences(1, &mFence, VK_TRUE, timeout));
}
-vk::Result Fence::GetStatus()
+vk::Result FenceImpl::GetStatus()
{
- auto device = mGraphicsDevice->GetLogicalDevice();
+ auto device = mGraphicsDevice->GetLogicalDevice();
vk::Result result = device.getFenceStatus(mFence);
return result;
}
{
class Device;
-
/**
- * CPU sync
+ * Synchronization primitive
*/
-class Fence : public VkManaged
+class FenceImpl
{
public:
- Fence(Device& graphicsDevice, vk::Fence handle);
+ static FenceImpl* New(Device& graphicsDevice, const vk::FenceCreateInfo& fenceCreateInfo);
- ~Fence() override;
+ FenceImpl(Device& graphicsDevice);
- const Fence& ConstRef() const;
+ void Initialize(const vk::FenceCreateInfo& fenceCreateInfo);
- Fence& Ref();
+ ~FenceImpl();
vk::Fence GetVkHandle() const;
- bool OnDestroy() override;
-
void Reset();
- void Wait(uint32_t timeout=std::numeric_limits< uint32_t >::max());
+ void Wait(uint32_t timeout = std::numeric_limits<uint32_t>::max());
vk::Result GetStatus();
private:
+ void Destroy();
- Device* mGraphicsDevice;
+ Device* mGraphicsDevice;
vk::Fence mFence;
};
: Resource(createInfo, controller),
mFramebufferImpl{nullptr}
{
- // mController.AddFramebuffer(*this)
}
Framebuffer::~Framebuffer() = default;
#include <dali/internal/graphics/vulkan-impl/vulkan-graphics-controller.h>
// INTERNAL INCLUDES
+#include <dali/integration-api/pixel-data-integ.h>
+
#include <dali/internal/graphics/vulkan/vulkan-device.h>
#include <dali/internal/graphics/vulkan-impl/vulkan-buffer-impl.h>
#include <dali/internal/graphics/vulkan-impl/vulkan-buffer.h>
+#include <dali/internal/graphics/vulkan-impl/vulkan-command-buffer-impl.h>
#include <dali/internal/graphics/vulkan-impl/vulkan-command-buffer.h>
#include <dali/internal/graphics/vulkan-impl/vulkan-fence-impl.h>
#include <dali/internal/graphics/vulkan-impl/vulkan-framebuffer-impl.h>
+#include <dali/internal/graphics/vulkan-impl/vulkan-image-impl.h>
#include <dali/internal/graphics/vulkan-impl/vulkan-memory.h>
#include <dali/internal/graphics/vulkan-impl/vulkan-program.h>
#include <dali/internal/graphics/vulkan-impl/vulkan-render-pass.h>
#include <dali/internal/graphics/vulkan-impl/vulkan-render-target.h>
+#include <dali/internal/graphics/vulkan-impl/vulkan-sampler.h>
#include <dali/internal/graphics/vulkan-impl/vulkan-shader.h>
+#include <dali/internal/graphics/vulkan-impl/vulkan-texture.h>
#include <dali/internal/window-system/common/window-render-surface.h>
+#include <queue>
+#include <unordered_map>
+
#if defined(DEBUG_ENABLED)
extern Debug::Filter* gVulkanFilter;
#endif
namespace Dali::Graphics::Vulkan
{
+static bool TestCopyRectIntersection(const ResourceTransferRequest* srcRequest, const ResourceTransferRequest* currentRequest)
+{
+ auto srcOffset = srcRequest->bufferToImageInfo.copyInfo.imageOffset;
+ auto srcExtent = srcRequest->bufferToImageInfo.copyInfo.imageExtent;
+
+ auto curOffset = currentRequest->bufferToImageInfo.copyInfo.imageOffset;
+ auto curExtent = currentRequest->bufferToImageInfo.copyInfo.imageExtent;
+
+ auto offsetX0 = std::min(srcOffset.x, curOffset.x);
+ auto offsetY0 = std::min(srcOffset.y, curOffset.y);
+ auto offsetX1 = std::max(srcOffset.x + int32_t(srcExtent.width), curOffset.x + int32_t(curExtent.width));
+ auto offsetY1 = std::max(srcOffset.y + int32_t(srcExtent.height), curOffset.y + int32_t(curExtent.height));
+
+ return ((offsetX1 - offsetX0) < (int32_t(srcExtent.width) + int32_t(curExtent.width)) &&
+ (offsetY1 - offsetY0) < (int32_t(srcExtent.height) + int32_t(curExtent.height)));
+}
+
/**
* @brief Custom deleter for all Graphics objects created
* with use of the Controller.
}
// Create brand new object
- return UPtr(new VKType(info, controller), VKDeleter<VKType>());
+ UPtr gfxObject(new VKType(info, controller), VKDeleter<VKType>());
+ static_cast<VKType*>(gfxObject.get())->InitializeResource(); // @todo Consider using create queues?
+ return gfxObject;
}
}
{
mGraphicsDevice = &device;
- // Create factories.
- // Create pipeline cache
- // Initialize thread pool
+ // @todo Create pipeline cache & descriptor set allocator here
+
+ mThreadPool.Initialize();
return true;
}
}
}
+ /**
+ * Mappign the staging buffer may take some time, so can delegate to a worker thread
+ * if necessary.
+ */
+ Dali::SharedFuture InitializeTextureStagingBuffer(uint32_t size, bool useWorkerThread)
+ {
+ // Check if we can reuse existing staging buffer for that frame
+ if(!mTextureStagingBuffer ||
+ mTextureStagingBuffer->GetImpl()->GetSize() < size)
+ {
+ auto workerFunc = [&, size](auto workerIndex)
+ {
+ Graphics::BufferCreateInfo createInfo{};
+ createInfo.SetSize(size)
+ .SetUsage(0u | Dali::Graphics::BufferUsage::TRANSFER_SRC);
+ mTextureStagingBuffer.reset(static_cast<Vulkan::Buffer*>(mGraphicsController.CreateBuffer(createInfo, nullptr).release()));
+ MapTextureStagingBuffer();
+ };
+
+ if(useWorkerThread)
+ {
+ return mThreadPool.SubmitTask(0u, workerFunc);
+ }
+ else
+ {
+ workerFunc(0);
+ }
+ }
+ return {};
+ }
+
+ void MapTextureStagingBuffer()
+ {
+ // Write into memory in parallel
+ if(!mTextureStagingBufferMappedMemory)
+ {
+ auto size = mTextureStagingBuffer->GetImpl()->GetSize();
+ MapBufferInfo mapInfo{mTextureStagingBuffer.get(), 0 | Graphics::MemoryUsageFlagBits::WRITE, 0, size};
+ mTextureStagingBufferMappedMemory = mGraphicsController.MapBufferRange(mapInfo);
+ mTextureStagingBufferMappedPtr = mTextureStagingBufferMappedMemory->LockRegion(0, size);
+ }
+ }
+
+ void UnmapTextureStagingBuffer()
+ {
+ // Unmap memory
+ mTextureStagingBufferMappedPtr = nullptr;
+ mTextureStagingBufferMappedMemory.reset();
+ }
+
+ void ProcessResourceTransferRequests(bool immediateOnly = false)
+ {
+ std::lock_guard<std::recursive_mutex> lock(mResourceTransferMutex);
+ if(!mResourceTransferRequests.empty())
+ {
+ using ResourceTransferRequestList = std::vector<const ResourceTransferRequest*>;
+
+ /**
+ * Structure associating unique images and lists of transfer requests for which
+ * the key image is a destination. It contains separate lists of requests per image.
+ * Each list of requests groups non-intersecting copy operations into smaller batches.
+ */
+ struct ResourceTransferRequestPair
+ {
+ ResourceTransferRequestPair(Vulkan::Image& key)
+ : image(key),
+ requestList{{}}
+ {
+ }
+
+ Vulkan::Image& image;
+ std::vector<ResourceTransferRequestList> requestList;
+ };
+
+ // Map of all the requests where 'image' is a key.
+ std::vector<ResourceTransferRequestPair> requestMap;
+
+ auto highestBatchIndex = 1u;
+
+ // Collect all unique destination images and all transfer requests associated with them
+ for(const auto& req : mResourceTransferRequests)
+ {
+ Vulkan::Image* image{nullptr};
+ if(req.requestType == TransferRequestType::BUFFER_TO_IMAGE)
+ {
+ image = req.bufferToImageInfo.dstImage;
+ }
+ else if(req.requestType == TransferRequestType::IMAGE_TO_IMAGE)
+ {
+ image = req.imageToImageInfo.dstImage;
+ }
+ else if(req.requestType == TransferRequestType::USE_TBM_SURFACE)
+ {
+ image = req.useTBMSurfaceInfo.srcImage;
+ }
+ else if(req.requestType == TransferRequestType::LAYOUT_TRANSITION_ONLY)
+ {
+ image = req.imageLayoutTransitionInfo.image;
+ }
+ assert(image);
+
+ auto predicate = [&](auto& item) -> bool
+ {
+ return image->GetVkHandle() == item.image.GetVkHandle();
+ };
+ auto it = std::find_if(requestMap.begin(), requestMap.end(), predicate);
+
+ if(it == requestMap.end())
+ {
+ // initialise new array
+ requestMap.emplace_back(*image);
+ it = requestMap.end() - 1;
+ }
+
+ auto& transfers = it->requestList;
+
+ // Compare with current transfer list whether there are any intersections
+ // with current image copy area. If intersection occurs, start new list
+ auto& currentList = transfers.back();
+
+ bool intersects(false);
+ for(auto& item : currentList)
+ {
+ // if area intersects create new list
+ if((intersects = TestCopyRectIntersection(item, &req)))
+ {
+ transfers.push_back({});
+ highestBatchIndex = std::max(highestBatchIndex, uint32_t(transfers.size()));
+ break;
+ }
+ }
+
+ // push request to the most recently created list
+ transfers.back().push_back(&req);
+ }
+
+ // For all unique images prepare layout transition barriers as all of them must be
+ // in eTransferDstOptimal layout
+ std::vector<vk::ImageMemoryBarrier> preLayoutBarriers;
+ std::vector<vk::ImageMemoryBarrier> postLayoutBarriers;
+ for(auto& item : requestMap)
+ {
+ auto& image = item.image;
+ // add barrier
+ preLayoutBarriers.push_back(image.CreateMemoryBarrier(vk::ImageLayout::eTransferDstOptimal));
+ postLayoutBarriers.push_back(image.CreateMemoryBarrier(vk::ImageLayout::eTransferDstOptimal, vk::ImageLayout::eShaderReadOnlyOptimal));
+ image.SetImageLayout(vk::ImageLayout::eShaderReadOnlyOptimal);
+ }
+
+ // Build command buffer for each image until reaching next sync point
+ Graphics::CommandBufferCreateInfo createInfo{};
+ createInfo.SetLevel(Graphics::CommandBufferLevel::PRIMARY);
+ auto gfxCommandBuffer = mGraphicsController.CreateCommandBuffer(createInfo, nullptr);
+ auto commandBuffer = static_cast<Vulkan::CommandBuffer*>(gfxCommandBuffer.get());
+
+ // Fence between submissions
+ auto fence = FenceImpl::New(*mGraphicsDevice, {});
+
+ /**
+ * The loop iterates through requests for each unique image. It parallelizes
+ * transfers to images until end of data in the batch.
+ * After submitting copy commands the loop waits for the fence to be signalled
+ * and repeats recording for the next batch of transfer requests.
+ */
+ for(auto i = 0u; i < highestBatchIndex; ++i)
+ {
+ Graphics::CommandBufferBeginInfo beginInfo{0 | CommandBufferUsageFlagBits::ONE_TIME_SUBMIT};
+ commandBuffer->Begin(beginInfo);
+
+ // change image layouts only once
+ if(i == 0)
+ {
+ commandBuffer->GetImpl()->PipelineBarrier(vk::PipelineStageFlagBits::eTopOfPipe, vk::PipelineStageFlagBits::eTransfer, {}, {}, {}, preLayoutBarriers);
+ }
+
+ for(auto& item : requestMap)
+ {
+ auto& batchItem = item.requestList;
+ if(batchItem.size() <= i)
+ {
+ continue;
+ }
+
+ auto& requestList = batchItem[i];
+
+ // record all copy commands for this batch
+ for(auto& req : requestList)
+ {
+ if(req->requestType == TransferRequestType::BUFFER_TO_IMAGE)
+ {
+ commandBuffer->GetImpl()->CopyBufferToImage(req->bufferToImageInfo.srcBuffer,
+ req->bufferToImageInfo.dstImage,
+ vk::ImageLayout::eTransferDstOptimal,
+ {req->bufferToImageInfo.copyInfo});
+ }
+ else if(req->requestType == TransferRequestType::IMAGE_TO_IMAGE)
+ {
+ commandBuffer->GetImpl()->CopyImage(req->imageToImageInfo.srcImage,
+ vk::ImageLayout::eTransferSrcOptimal,
+ req->imageToImageInfo.dstImage,
+ vk::ImageLayout::eTransferDstOptimal,
+ {req->imageToImageInfo.copyInfo});
+ }
+ }
+ }
+
+ // if this is the last batch restore original layouts
+ if(i == highestBatchIndex - 1)
+ {
+ commandBuffer->GetImpl()->PipelineBarrier(vk::PipelineStageFlagBits::eTransfer, vk::PipelineStageFlagBits::eFragmentShader, {}, {}, {}, postLayoutBarriers);
+ }
+ commandBuffer->End();
+
+ // submit to the queue
+ mGraphicsDevice->Submit(mGraphicsDevice->GetTransferQueue(0u), {Vulkan::SubmissionData{{}, {}, {commandBuffer->GetImpl()}, {}}}, fence);
+ fence->Wait();
+ fence->Reset();
+ }
+
+ // Destroy staging resources immediately
+ for(auto& request : mResourceTransferRequests)
+ {
+ if(request.requestType == TransferRequestType::BUFFER_TO_IMAGE)
+ {
+ auto& buffer = request.bufferToImageInfo.srcBuffer;
+ // Do not destroy
+ if(buffer != mTextureStagingBuffer->GetImpl())
+ {
+ buffer->DestroyNow();
+ }
+ }
+ else if(request.requestType == TransferRequestType::IMAGE_TO_IMAGE)
+ {
+ auto& image = request.imageToImageInfo.srcImage;
+ if(image->GetVkHandle())
+ {
+ image->DestroyNow();
+ }
+ }
+ }
+
+ // Clear transfer queue
+ mResourceTransferRequests.clear();
+ }
+ }
+
VulkanGraphicsController& mGraphicsController;
Vulkan::Device* mGraphicsDevice{nullptr};
- std::size_t mCapacity{0u}; ///< Memory Usage (of command buffers)
+
+ // used for texture<->buffer<->memory transfers
+ std::vector<ResourceTransferRequest> mResourceTransferRequests;
+ std::recursive_mutex mResourceTransferMutex{};
+
+ std::unique_ptr<Vulkan::Buffer> mTextureStagingBuffer{};
+ Dali::SharedFuture mTextureStagingBufferFuture{};
+ Graphics::UniquePtr<Graphics::Memory> mTextureStagingBufferMappedMemory{nullptr};
+ void* mTextureStagingBufferMappedPtr{nullptr};
+
+ ThreadPool mThreadPool;
+
+ std::unordered_map<uint32_t, Graphics::UniquePtr<Graphics::Texture>> mExternalTextureResources; ///< Used for ResourceId.
+ std::queue<const Vulkan::Texture*> mTextureMipmapGenerationRequests; ///< Queue for texture mipmap generation requests
+
+ std::size_t mCapacity{0u}; ///< Memory Usage (of command buffers)
};
VulkanGraphicsController::VulkanGraphicsController()
{
}
-void VulkanGraphicsController::UpdateTextures(const std::vector<TextureUpdateInfo>& updateInfoList,
- const std::vector<TextureUpdateSourceInfo>& sourceList)
+void VulkanGraphicsController::UpdateTextures(
+ const std::vector<Dali::Graphics::TextureUpdateInfo>& updateInfoList,
+ const std::vector<Dali::Graphics::TextureUpdateSourceInfo>& sourceList)
{
+ using MemoryUpdateAndOffset = std::pair<const Dali::Graphics::TextureUpdateInfo*, uint32_t>;
+ std::vector<MemoryUpdateAndOffset> relevantUpdates{};
+
+ std::vector<Task> copyTasks{};
+
+ relevantUpdates.reserve(updateInfoList.size());
+ copyTasks.reserve(updateInfoList.size());
+
+ uint32_t totalStagingBufferSize{0u};
+
+ void* stagingBufferMappedPtr = nullptr;
+
+ std::vector<uint8_t*> memoryDiscardQ;
+ std::vector<Dali::PixelData> pixelDataDiscardQ;
+
+ /**
+ * If a texture appears more than once we need to process it preserving the order
+ * of updates. It's necessary to make sure that all updates will run on
+ * the same thread.
+ */
+ struct TextureTask
+ {
+ TextureTask(const Dali::Graphics::TextureUpdateInfo* i, const Dali::Task& t)
+ : pInfo(i),
+ copyTask(t)
+ {
+ }
+ const Dali::Graphics::TextureUpdateInfo* pInfo;
+ Dali::Task copyTask;
+ };
+
+ std::map<Dali::Graphics::Texture*, std::vector<TextureTask>> updateMap;
+ for(auto& info : updateInfoList)
+ {
+ updateMap[info.dstTexture].emplace_back(&info, nullptr);
+ }
+
+ // make a copy of update info lists by storing additional information
+ for(auto& aTextureInfo : updateMap)
+ {
+ auto gfxTexture = aTextureInfo.first;
+ auto texture = static_cast<Vulkan::Texture*>(gfxTexture);
+
+ for(auto& textureTask : aTextureInfo.second)
+ {
+ auto& info = *textureTask.pInfo;
+ const auto& source = sourceList[info.srcReference];
+ if(source.sourceType == Dali::Graphics::TextureUpdateSourceInfo::Type::MEMORY ||
+ source.sourceType == Dali::Graphics::TextureUpdateSourceInfo::Type::PIXEL_DATA)
+ {
+ uint8_t* sourcePtr = nullptr;
+ if(source.sourceType == Graphics::TextureUpdateSourceInfo::Type::MEMORY)
+ {
+ sourcePtr = reinterpret_cast<uint8_t*>(source.memorySource.memory);
+ memoryDiscardQ.push_back(sourcePtr);
+ }
+ else
+ {
+ auto pixelBufferData = Dali::Integration::GetPixelDataBuffer(source.pixelDataSource.pixelData);
+
+ sourcePtr = pixelBufferData.buffer + info.srcOffset;
+ if(Dali::Integration::IsPixelDataReleaseAfterUpload(source.pixelDataSource.pixelData) &&
+ info.srcOffset == 0u)
+ {
+ pixelDataDiscardQ.push_back(source.pixelDataSource.pixelData);
+ }
+ }
+
+ auto sourceInfoPtr = &source;
+ auto pInfo = textureTask.pInfo;
+
+ // If the destination texture supports direct write access, then we can
+ // schedule direct copy task and skip the GPU upload. The update
+ // should be fully complete.
+ auto destTexture = static_cast<Vulkan::Texture*>(info.dstTexture);
+
+ if(destTexture->GetProperties().directWriteAccessEnabled)
+ {
+ auto taskLambda = [pInfo, sourcePtr, sourceInfoPtr, texture](auto workerIndex)
+ {
+ const auto& properties = texture->GetProperties();
+
+ if(properties.emulated)
+ {
+ std::vector<char> data;
+ auto memoryRequirements = texture->GetMemoryRequirements();
+ data.resize(memoryRequirements.size);
+ texture->TryConvertPixelData(sourcePtr, pInfo->srcSize, pInfo->srcExtent2D.width, pInfo->srcExtent2D.height, &data[0]);
+
+ // substitute temporary source
+ Graphics::TextureUpdateSourceInfo newSource{};
+ newSource.sourceType = Graphics::TextureUpdateSourceInfo::Type::MEMORY;
+ newSource.memorySource.memory = data.data();
+ texture->CopyMemoryDirect(*pInfo, newSource, false);
+ }
+ else
+ {
+ texture->CopyMemoryDirect(*pInfo, *sourceInfoPtr, false);
+ }
+ };
+ textureTask.copyTask = taskLambda;
+ }
+ else
+ {
+ const auto size = destTexture->GetMemoryRequirements().size;
+ auto currentOffset = totalStagingBufferSize;
+
+ relevantUpdates.emplace_back(&info, currentOffset);
+ totalStagingBufferSize += uint32_t(size);
+ auto ppStagingMemory = &stagingBufferMappedPtr; // this pointer will be set later!
+
+ // The staging buffer is not allocated yet. The task knows pointer to the pointer which will point
+ // at staging buffer right before executing tasks. The function will either perform direct copy
+ // or will do suitable conversion if source format isn't supported and emulation is available.
+ auto taskLambda = [ppStagingMemory, currentOffset, pInfo, sourcePtr, texture](auto workerThread)
+ {
+ char* pStagingMemory = reinterpret_cast<char*>(*ppStagingMemory);
+
+ // Try to initialise` texture resources explicitly if they are not yet initialised
+ texture->InitializeImageView();
+
+ // If texture is 'emulated' convert pixel data otherwise do direct copy
+ const auto& properties = texture->GetProperties();
+
+ if(properties.emulated)
+ {
+ texture->TryConvertPixelData(sourcePtr, pInfo->srcSize, pInfo->srcExtent2D.width, pInfo->srcExtent2D.height, &pStagingMemory[currentOffset]);
+ }
+ else
+ {
+ std::copy(sourcePtr, sourcePtr + pInfo->srcSize, &pStagingMemory[currentOffset]);
+ }
+ };
+
+ // Add task
+ textureTask.copyTask = taskLambda;
+ relevantUpdates.emplace_back(&info, currentOffset);
+ }
+ }
+ else
+ {
+ // for other source types offset within staging buffer doesn't matter
+ relevantUpdates.emplace_back(&info, 1u);
+ }
+ }
+ }
+
+ // Prepare one task per each texture to make sure sequential order of updates
+ // for the same texture.
+ // @todo: this step probably can be avoid in case of using optimal tiling!
+ for(auto& item : updateMap)
+ {
+ auto pUpdates = &item.second;
+ auto task = [pUpdates](auto workerIndex)
+ {
+ for(auto& update : *pUpdates)
+ {
+ update.copyTask(workerIndex);
+ }
+ };
+ copyTasks.emplace_back(task);
+ }
+
+ // Allocate staging buffer for all updates using CPU memory
+ // as source. The staging buffer exists only for a time of 1 frame.
+ auto& threadPool = mImpl->mThreadPool;
+
+ // Make sure the Initialise() function is not busy with creating first staging buffer
+ if(mImpl->mTextureStagingBufferFuture)
+ {
+ mImpl->mTextureStagingBufferFuture->Wait();
+ mImpl->mTextureStagingBufferFuture.reset();
+ }
+
+ // Check whether we need staging buffer and if we can reuse existing staging buffer for that frame.
+ if(totalStagingBufferSize)
+ {
+ if(!mImpl->mTextureStagingBuffer ||
+ mImpl->mTextureStagingBuffer->GetImpl()->GetSize() < totalStagingBufferSize)
+ {
+ // Initialise new staging buffer. Since caller function is parallelized, initialisation
+ // stays on the caller thread.
+ mImpl->InitializeTextureStagingBuffer(totalStagingBufferSize, false);
+ }
+ mImpl->MapTextureStagingBuffer();
+ }
+
+ // Submit tasks
+ auto futures = threadPool.SubmitTasks(copyTasks, 100u);
+ futures->Wait();
+
+ mImpl->UnmapTextureStagingBuffer();
+
+ for(auto& pair : relevantUpdates)
+ {
+ auto& info = *pair.first;
+ const auto& source = sourceList[info.srcReference];
+ auto destTexture = static_cast<Vulkan::Texture*>(info.dstTexture);
+
+ switch(source.sourceType)
+ {
+ // directly copy buffer
+ case Dali::Graphics::TextureUpdateSourceInfo::Type::BUFFER:
+ {
+ destTexture->CopyBuffer(*source.bufferSource.buffer,
+ info.srcOffset,
+ info.srcExtent2D,
+ info.dstOffset2D,
+ info.layer, // layer
+ info.level, // mipmap
+ {}); // update mode, deprecated
+ break;
+ }
+ // for memory, use staging buffer
+ case Dali::Graphics::TextureUpdateSourceInfo::Type::PIXEL_DATA:
+ {
+ }
+ case Dali::Graphics::TextureUpdateSourceInfo::Type::MEMORY:
+ {
+ auto memoryBufferOffset = pair.second;
+ destTexture->CopyBuffer(*mImpl->mTextureStagingBuffer,
+ memoryBufferOffset,
+ info.srcExtent2D,
+ info.dstOffset2D,
+ info.layer, // layer
+ info.level, // mipmap
+ {}); // update mode, deprecated
+ break;
+ }
+
+ case Dali::Graphics::TextureUpdateSourceInfo::Type::TEXTURE:
+ // Unsupported
+ break;
+ }
+ }
+
+ // Free source data
+ for(uint8_t* ptr : memoryDiscardQ)
+ {
+ free(reinterpret_cast<void*>(ptr));
+ }
+ for(PixelData pixelData : pixelDataDiscardQ)
+ {
+ Dali::Integration::ReleasePixelDataBuffer(pixelData);
+ }
+}
+
+void VulkanGraphicsController::ScheduleResourceTransfer(Vulkan::ResourceTransferRequest&& transferRequest)
+{
+ std::lock_guard<std::recursive_mutex> lock(mImpl->mResourceTransferMutex);
+ mImpl->mResourceTransferRequests.emplace_back(std::move(transferRequest));
+
+ // if we requested immediate upload then request will be processed instantly with skipping
+ // all the deferred update requests
+ if(!mImpl->mResourceTransferRequests.back().deferredTransferMode)
+ {
+ mImpl->ProcessResourceTransferRequests(true);
+ }
}
void VulkanGraphicsController::GenerateTextureMipmaps(const Graphics::Texture& texture)
return NewObject<Vulkan::Buffer>(bufferCreateInfo, *this, std::move(oldBuffer));
}
-UniquePtr<Graphics::Texture> VulkanGraphicsController::CreateTexture(const TextureCreateInfo& textureCreateInfo, UniquePtr<Graphics::Texture>&& oldTexture)
+UniquePtr<Graphics::Texture> VulkanGraphicsController::CreateTexture(const Graphics::TextureCreateInfo& textureCreateInfo, UniquePtr<Graphics::Texture>&& oldTexture)
{
- return UniquePtr<Graphics::Texture>{};
+ return NewObject<Vulkan::Texture>(textureCreateInfo, *this, std::move(oldTexture));
}
UniquePtr<Graphics::Framebuffer> VulkanGraphicsController::CreateFramebuffer(const Graphics::FramebufferCreateInfo& framebufferCreateInfo, UniquePtr<Graphics::Framebuffer>&& oldFramebuffer)
UniquePtr<Graphics::Program> VulkanGraphicsController::CreateProgram(const Graphics::ProgramCreateInfo& programCreateInfo, UniquePtr<Graphics::Program>&& oldProgram)
{
- return NewObject<Vulkan::Program>(programCreateInfo, *this, std::move(oldProgram));
+ return UniquePtr<Graphics::Program>(new Vulkan::Program(programCreateInfo, *this));
}
UniquePtr<Graphics::Shader> VulkanGraphicsController::CreateShader(const Graphics::ShaderCreateInfo& shaderCreateInfo, UniquePtr<Graphics::Shader>&& oldShader)
{
- return NewObject<Vulkan::Shader>(shaderCreateInfo, *this, std::move(oldShader));
+ return UniquePtr<Graphics::Shader>(new Vulkan::Shader(shaderCreateInfo, *this));
}
UniquePtr<Graphics::Sampler> VulkanGraphicsController::CreateSampler(const Graphics::SamplerCreateInfo& samplerCreateInfo, UniquePtr<Graphics::Sampler>&& oldSampler)
{
- return UniquePtr<Graphics::Sampler>{};
+ return NewObject<Vulkan::Sampler>(samplerCreateInfo, *this, std::move(oldSampler));
}
UniquePtr<Graphics::SyncObject> VulkanGraphicsController::CreateSyncObject(const Graphics::SyncObjectCreateInfo& syncObjectCreateInfo,
UniquePtr<Graphics::Memory> VulkanGraphicsController::MapTextureRange(const MapTextureInfo& mapInfo)
{
- return UniquePtr<Memory>{nullptr};
+ // Not implemented (@todo Remove from Graphics API?
+ return nullptr;
}
void VulkanGraphicsController::UnmapMemory(UniquePtr<Graphics::Memory> memory)
return bufferImpl->GetMemoryRequirements();
}
-MemoryRequirements VulkanGraphicsController::GetTextureMemoryRequirements(Graphics::Texture& texture) const
+MemoryRequirements VulkanGraphicsController::GetTextureMemoryRequirements(Graphics::Texture& gfxTexture) const
{
- return MemoryRequirements{};
+ const Vulkan::Texture* texture = static_cast<const Vulkan::Texture*>(&gfxTexture);
+ return texture->GetMemoryRequirements();
}
-TextureProperties VulkanGraphicsController::GetTextureProperties(const Graphics::Texture& texture)
+TextureProperties VulkanGraphicsController::GetTextureProperties(const Graphics::Texture& gfxTexture)
{
- return TextureProperties{};
+ Vulkan::Texture* texture = const_cast<Vulkan::Texture*>(static_cast<const Vulkan::Texture*>(&gfxTexture));
+ return texture->GetProperties();
}
const Graphics::Reflection& VulkanGraphicsController::GetProgramReflection(const Graphics::Program& program)
// @todo Add discard queues
}
+void VulkanGraphicsController::DiscardResource(Vulkan::Sampler* sampler)
+{
+}
+
+void VulkanGraphicsController::DiscardResource(Vulkan::Texture* texture)
+{
+}
+
Vulkan::Device& VulkanGraphicsController::GetGraphicsDevice()
{
return *mImpl->mGraphicsDevice;
Graphics::Texture* VulkanGraphicsController::CreateTextureByResourceId(uint32_t resourceId, const Graphics::TextureCreateInfo& createInfo)
{
- Graphics::Texture* ret = nullptr;
- /*
Graphics::UniquePtr<Graphics::Texture> texture;
- auto iter = mExternalTextureResources.find(resourceId);
- DALI_ASSERT_ALWAYS(iter == mExternalTextureResources.end());
+ // Check that this resource id hasn't been used previously
+ auto iter = mImpl->mExternalTextureResources.find(resourceId);
+ DALI_ASSERT_ALWAYS(iter == mImpl->mExternalTextureResources.end());
+
texture = CreateTexture(createInfo, std::move(texture));
- ret = texture.get();
- mExternalTextureResources.insert(std::make_pair(resourceId, std::move(texture)));
- */
- return ret;
+
+ auto gfxTexture = texture.get();
+ mImpl->mExternalTextureResources.insert(std::make_pair(resourceId, std::move(texture)));
+
+ return gfxTexture;
}
void VulkanGraphicsController::DiscardTextureFromResourceId(uint32_t resourceId)
{
- /*
- auto iter = mExternalTextureResources.find(resourceId);
- if(iter != mExternalTextureResources.end())
+ auto iter = mImpl->mExternalTextureResources.find(resourceId);
+ if(iter != mImpl->mExternalTextureResources.end())
{
- mExternalTextureResources.erase(iter);
- }*/
+ mImpl->mExternalTextureResources.erase(iter);
+ }
}
Graphics::Texture* VulkanGraphicsController::GetTextureFromResourceId(uint32_t resourceId)
{
- Graphics::Texture* ret = nullptr;
- /*
- auto iter = mExternalTextureResources.find(resourceId);
- if(iter != mExternalTextureResources.end())
+ Graphics::Texture* gfxTexture = nullptr;
+
+ auto iter = mImpl->mExternalTextureResources.find(resourceId);
+ if(iter != mImpl->mExternalTextureResources.end())
{
- ret = iter->second.get();
+ gfxTexture = iter->second.get();
}
- */
- return ret;
+
+ return gfxTexture;
}
Graphics::UniquePtr<Graphics::Texture> VulkanGraphicsController::ReleaseTextureFromResourceId(uint32_t resourceId)
{
- Graphics::UniquePtr<Graphics::Texture> texture;
- /*
- auto iter = mExternalTextureResources.find(resourceId);
- if(iter != mExternalTextureResources.end())
+ Graphics::UniquePtr<Graphics::Texture> gfxTexture;
+
+ auto iter = mImpl->mExternalTextureResources.find(resourceId);
+ if(iter != mImpl->mExternalTextureResources.end())
{
- texture = std::move(iter->second);
- mExternalTextureResources.erase(iter);
+ gfxTexture = std::move(iter->second);
+ mImpl->mExternalTextureResources.erase(iter);
}
- */
- return texture;
+
+ return gfxTexture;
}
std::size_t VulkanGraphicsController::GetCapacity() const
#include <dali/graphics-api/graphics-controller.h>
+#include <dali/devel-api/threading/thread-pool.h>
#include <dali/integration-api/debug.h>
#include <dali/internal/graphics/vulkan-impl/vulkan-framebuffer.h>
#include <dali/internal/graphics/vulkan-impl/vulkan-program.h>
#include <dali/internal/graphics/vulkan-impl/vulkan-render-target.h>
+#include <dali/internal/graphics/vulkan-impl/vulkan-resource-transfer-request.h>
namespace Dali
{
class Device;
class Surface;
class Buffer;
+class Sampler;
+class Texture;
class VulkanGraphicsController : public Graphics::Controller, public Integration::GraphicsConfig
{
void UpdateTextures(const std::vector<TextureUpdateInfo>& updateInfoList,
const std::vector<TextureUpdateSourceInfo>& sourceList) override;
+ /**
+ * Schedule (deferred: on worker thread / immediate: on this thread)
+ * a resource transfer.
+ * @param[in] transferRequest The requested resource transfer
+ */
+ void ScheduleResourceTransfer(ResourceTransferRequest&& transferRequest);
+
/**
* Auto generates mipmaps for the texture
* @param[in] texture The texture
void DiscardResource(Vulkan::RenderTarget* renderTarget);
void DiscardResource(Vulkan::Buffer* buffer);
void DiscardResource(Vulkan::Program* renderProgram);
+ void DiscardResource(Vulkan::Sampler* sampler);
+ void DiscardResource(Vulkan::Texture* texture);
public: // Integration::GraphicsConfig
bool IsBlendEquationSupported(DevelBlendEquation::Type blendEquation) override;
*
*/
-#include <dali/internal/graphics/vulkan/vulkan-device.h>
+#include <dali/integration-api/debug.h>
#include <dali/internal/graphics/vulkan-impl/vulkan-image-impl.h>
#include <dali/internal/graphics/vulkan-impl/vulkan-memory-impl.h>
-#include <dali/integration-api/debug.h>
+#include <dali/internal/graphics/vulkan/vulkan-device.h>
#if defined(DEBUG_ENABLED)
extern Debug::Filter* gVulkanFilter;
#endif
-
namespace Dali
{
namespace Graphics
namespace Vulkan
{
-Image::Image( Device& graphicsDevice, const vk::ImageCreateInfo& createInfo, vk::Image externalImage )
-: mGraphicsDevice( &graphicsDevice ),
- mCreateInfo( createInfo ),
- mImage( externalImage ),
- mImageLayout( mCreateInfo.initialLayout ),
- mIsExternal( static_cast<bool>(externalImage) )
-{
- auto depthStencilFormats = std::vector< vk::Format >{
- vk::Format::eD32Sfloat,
- vk::Format::eD16Unorm,
- vk::Format::eD32SfloatS8Uint,
- vk::Format::eD24UnormS8Uint,
- vk::Format::eD16UnormS8Uint,
- vk::Format::eS8Uint,
+Image* Image::New(Device& graphicsDevice, const vk::ImageCreateInfo& createInfo)
+{
+ auto image = new Image(graphicsDevice, createInfo, nullptr);
+ image->Initialize();
+ return image;
+}
+
+Image::Image(Device& graphicsDevice, const vk::ImageCreateInfo& createInfo, vk::Image externalImage)
+: mDevice(graphicsDevice),
+ mCreateInfo(createInfo),
+ mImage(externalImage),
+ mImageLayout(mCreateInfo.initialLayout),
+ mIsExternal(static_cast<bool>(externalImage))
+{
+ auto depthStencilFormats = std::vector<vk::Format>{
+ vk::Format::eD32Sfloat,
+ vk::Format::eD16Unorm,
+ vk::Format::eD32SfloatS8Uint,
+ vk::Format::eD24UnormS8Uint,
+ vk::Format::eD16UnormS8Uint,
+ vk::Format::eS8Uint,
};
- auto hasDepth = std::find( depthStencilFormats.begin(), depthStencilFormats.end(), createInfo.format );
+ auto hasDepth = std::find(depthStencilFormats.begin(), depthStencilFormats.end(), createInfo.format);
- if( hasDepth != depthStencilFormats.end() )
+ if(hasDepth != depthStencilFormats.end())
{
auto format = *hasDepth;
- if( format == vk::Format::eD32Sfloat || format == vk::Format::eD16Unorm )
+ if(format == vk::Format::eD32Sfloat || format == vk::Format::eD16Unorm)
{
mAspectFlags = vk::ImageAspectFlagBits::eDepth;
}
- else if( format == vk::Format::eS8Uint )
+ else if(format == vk::Format::eS8Uint)
{
mAspectFlags = vk::ImageAspectFlagBits::eStencil;
}
}
}
+void Image::Initialize()
+{
+ VkAssert(mDevice.GetLogicalDevice().createImage(&mCreateInfo, &mDevice.GetAllocator("IMAGE"), &mImage));
+}
+
+void Image::AllocateAndBind(vk::MemoryPropertyFlags memoryProperties)
+{
+ auto requirements = mDevice.GetLogicalDevice().getImageMemoryRequirements(mImage);
+ auto memoryTypeIndex = Device::GetMemoryIndex(mDevice.GetMemoryProperties(),
+ requirements.memoryTypeBits,
+ memoryProperties);
+
+ mMemory = std::make_unique<MemoryImpl>(mDevice,
+ size_t(requirements.size),
+ size_t(requirements.alignment),
+ (memoryProperties & vk::MemoryPropertyFlagBits::eHostVisible) == vk::MemoryPropertyFlagBits::eHostVisible);
+
+ auto allocateInfo = vk::MemoryAllocateInfo{}
+ .setMemoryTypeIndex(memoryTypeIndex)
+ .setAllocationSize(requirements.size);
+
+ // allocate memory for the image
+ auto result = mMemory->Allocate(allocateInfo, mDevice.GetAllocator("DEVICEMEMORY"));
+ if(result != vk::Result::eSuccess)
+ {
+ DALI_LOG_INFO(gVulkanFilter, Debug::General, "Unable to allocate memory for the image of size %d!", int(requirements.size));
+ }
+ else if(mMemory) // bind the allocated memory to the image
+ {
+ VkAssert(mDevice.GetLogicalDevice().bindImageMemory(mImage, mMemory->GetVkHandle(), 0));
+ }
+}
+
vk::Image Image::GetVkHandle() const
{
return mImage;
return mImageLayout;
}
+void Image::SetImageLayout(vk::ImageLayout imageLayout)
+{
+ mImageLayout = imageLayout;
+}
+
+vk::ImageMemoryBarrier Image::CreateMemoryBarrier(vk::ImageLayout newLayout) const
+{
+ return CreateMemoryBarrier(mImageLayout, newLayout);
+}
+
+vk::ImageMemoryBarrier Image::CreateMemoryBarrier(vk::ImageLayout oldLayout, vk::ImageLayout newLayout) const
+{
+ // This function assumes that all images have 1 mip level and 1 layer
+ // Should expand to handle any level/layer
+ auto barrier = vk::ImageMemoryBarrier{}
+ .setOldLayout(oldLayout)
+ .setNewLayout(newLayout)
+ .setSrcQueueFamilyIndex(VK_QUEUE_FAMILY_IGNORED)
+ .setDstQueueFamilyIndex(VK_QUEUE_FAMILY_IGNORED)
+ .setImage(GetVkHandle())
+ .setSubresourceRange(vk::ImageSubresourceRange{}.setBaseMipLevel(0).setLevelCount(1).setBaseArrayLayer(0).setLayerCount(1));
+
+ barrier.subresourceRange.aspectMask = GetAspectFlags();
+
+ // The srcAccessMask of the image memory barrier shows which operation
+ // must be completed using the old layout, before the transition to the
+ // new one happens.
+ switch(oldLayout)
+ {
+ case vk::ImageLayout::eUndefined:
+ barrier.srcAccessMask = vk::AccessFlags{};
+ break;
+ case vk::ImageLayout::ePreinitialized:
+ barrier.srcAccessMask = vk::AccessFlagBits::eHostWrite;
+ break;
+ case vk::ImageLayout::eColorAttachmentOptimal:
+ barrier.srcAccessMask = vk::AccessFlagBits::eColorAttachmentWrite;
+ break;
+ case vk::ImageLayout::eDepthStencilAttachmentOptimal:
+ barrier.srcAccessMask = vk::AccessFlagBits::eDepthStencilAttachmentWrite;
+ break;
+ case vk::ImageLayout::eTransferSrcOptimal:
+ barrier.srcAccessMask = vk::AccessFlagBits::eTransferRead;
+ break;
+ case vk::ImageLayout::eTransferDstOptimal:
+ barrier.srcAccessMask = vk::AccessFlagBits::eTransferWrite;
+ break;
+ case vk::ImageLayout::eShaderReadOnlyOptimal:
+ barrier.srcAccessMask = vk::AccessFlagBits::eShaderRead;
+ break;
+ default:
+ assert(false && "Image layout transition failed: Initial layout not supported.");
+ }
+
+ // Destination access mask controls the dependency for the new image layout
+ switch(newLayout)
+ {
+ case vk::ImageLayout::eTransferDstOptimal:
+ barrier.dstAccessMask = vk::AccessFlagBits::eTransferWrite;
+ break;
+ case vk::ImageLayout::eTransferSrcOptimal:
+ barrier.dstAccessMask = vk::AccessFlagBits::eTransferRead;
+ break;
+ case vk::ImageLayout::eColorAttachmentOptimal:
+ barrier.dstAccessMask = vk::AccessFlagBits::eColorAttachmentWrite;
+ break;
+ case vk::ImageLayout::eDepthStencilAttachmentOptimal:
+ barrier.dstAccessMask = vk::AccessFlagBits::eDepthStencilAttachmentWrite;
+ break;
+ case vk::ImageLayout::eShaderReadOnlyOptimal:
+ if(barrier.srcAccessMask == vk::AccessFlags{})
+ {
+ barrier.srcAccessMask = vk::AccessFlagBits::eHostWrite | vk::AccessFlagBits::eTransferWrite;
+ }
+
+ barrier.dstAccessMask = vk::AccessFlagBits::eShaderRead;
+ break;
+ default:
+ assert(false && "Image layout transition failed: Target layout not supported.");
+ }
+
+ return barrier;
+}
+
uint32_t Image::GetWidth() const
{
return mCreateInfo.extent.width;
return mAspectFlags;
}
-void Image::SetImageLayout( vk::ImageLayout imageLayout )
-{
- mImageLayout = imageLayout;
-}
-
-const Image& Image::ConstRef()
-{
- return *this;
-}
-
-Image& Image::Ref()
-{
- return *this;
-}
-
vk::ImageUsageFlags Image::GetUsageFlags() const
{
return mCreateInfo.usage;
void Image::DestroyNow()
{
- DestroyVulkanResources(mGraphicsDevice->GetLogicalDevice(), mImage, mDeviceMemory->ReleaseVkObject(),
- &mGraphicsDevice->GetAllocator() );
- mImage = nullptr;
- mDeviceMemory = nullptr;
+ DestroyVulkanResources(mDevice.GetLogicalDevice(), mImage, mMemory->ReleaseVkObject(), &mDevice.GetAllocator());
+ mImage = nullptr;
+ mMemory = nullptr;
}
bool Image::OnDestroy()
{
- if( !mIsExternal )
+ if(!mIsExternal)
{
- if( mImage )
+ if(mImage)
{
- auto device = mGraphicsDevice->GetLogicalDevice();
- auto image = mImage;
- auto allocator = &mGraphicsDevice->GetAllocator();
- auto memory = mDeviceMemory->ReleaseVkObject();
+ auto device = mDevice.GetLogicalDevice();
+ auto image = mImage;
+ auto allocator = &mDevice.GetAllocator();
+ auto memory = mMemory->ReleaseVkObject();
- mGraphicsDevice->DiscardResource( [ device, image, memory, allocator ]() {
- DestroyVulkanResources( device, image, memory, allocator );
- }
- );
+ mDevice.DiscardResource([device, image, memory, allocator]()
+ { DestroyVulkanResources(device, image, memory, allocator); });
}
}
return false;
}
-void Image::DestroyVulkanResources( vk::Device device, vk::Image image, vk::DeviceMemory memory, const vk::AllocationCallbacks* allocator )
+void Image::DestroyVulkanResources(vk::Device device, vk::Image image, vk::DeviceMemory memory, const vk::AllocationCallbacks* allocator)
{
- DALI_LOG_INFO( gVulkanFilter, Debug::General, "Invoking deleter function: image->%p\n",
- static_cast< VkImage >(image) )
- device.destroyImage( image, allocator );
-
- device.freeMemory( memory, allocator );
+ DALI_LOG_INFO(gVulkanFilter, Debug::General, "Invoking deleter function: image->%p\n", static_cast<VkImage>(image))
+ device.destroyImage(image, allocator);
+ device.freeMemory(memory, allocator);
}
-
} // namespace Vulkan
} // namespace Graphics
namespace Dali::Graphics::Vulkan
{
-
class Device;
class Memory;
-class Image : public VkManaged
-{
- friend class Device;
+// @todo use ImageImpl to make naming convention consistent
+/**
+ * Wrapper class for vk::Image
+ */
+class Image
+{
public:
+ static Image* New(Device& graphicsDevice, const vk::ImageCreateInfo& createInfo);
+
+ /**
+ * Create the wrapper object, either for the given vkImage, or as a new image
+ * that will get allocated.
+ *
+ * @param graphicsDevice The graphics Device
+ * @param createInfo The creation structure
+ * @param[in] externalImage External image, or nullptr if not external
+ */
+ Image(Device& graphicsDevice, const vk::ImageCreateInfo& createInfo, vk::Image externalImage = nullptr);
+
+ /**
+ * Second stage initialization:
+ * Creates new VkImage with given specification, it doesn't
+ * bind the memory.
+ */
+ void Initialize();
+
+ /**
+ * Allocate memory for the image and bind it.
+ * Kept separate from Initialize because reasons. ?!?!
+ *
+ * @param[in] memoryProperties The properties flags for the memory.
+ */
+ void AllocateAndBind(vk::MemoryPropertyFlags memoryProperties);
+
/**
* Returns underlying Vulkan object
* @return
*/
[[nodiscard]] vk::ImageLayout GetImageLayout() const;
+ /**
+ * Set the image layout locally (Not in GPU)
+ * @param[in] imageLayout The image layout.
+ */
+ void SetImageLayout(vk::ImageLayout imageLayout);
+
+ /**
+ * Create a memory barrier to transition from current layout to the new layout
+ */
+ vk::ImageMemoryBarrier CreateMemoryBarrier(vk::ImageLayout newLayout) const;
+
+ /**
+ * Create a memory barrier for (a future) transition from the given layout to the new layout
+ * @param[in] layout
+ * @param[in] newLayout
+ * @return the memory barrier
+ */
+ vk::ImageMemoryBarrier CreateMemoryBarrier(vk::ImageLayout oldLayout, vk::ImageLayout newLayout) const;
+
/**
* Returns width in pixels
* @return
[[nodiscard]] vk::SampleCountFlagBits GetSampleCount() const;
- void SetImageLayout(vk::ImageLayout imageLayout);
-
- const Image& ConstRef();
-
- Image& Ref();
-
- bool OnDestroy() override;
-
[[nodiscard]] MemoryImpl* GetMemory() const
{
- return mDeviceMemory.get();
+ return mMemory.get();
}
+ bool OnDestroy();
+
/**
* Destroys underlying Vulkan resources on the caller thread.
*
*/
void DestroyNow();
-private:
- /**
- * Creates new VkImage with given specification, it doesn't
- * bind the memory.
- * @param graphics
- * @param createInfo
- */
- Image(Device& graphicsDevice, const vk::ImageCreateInfo& createInfo, vk::Image externalImage = nullptr);
-
/**
* Destroys used Vulkan resource objects
* @param device Vulkan device
static void DestroyVulkanResources(vk::Device device, vk::Image image, vk::DeviceMemory memory, const vk::AllocationCallbacks* allocator);
private:
- Device* mGraphicsDevice;
+ Device& mDevice;
vk::ImageCreateInfo mCreateInfo;
vk::Image mImage;
vk::ImageLayout mImageLayout;
vk::ImageAspectFlags mAspectFlags;
- std::unique_ptr<MemoryImpl> mDeviceMemory;
+ std::unique_ptr<MemoryImpl> mMemory;
bool mIsExternal;
};
#include <dali/internal/graphics/vulkan-impl/vulkan-image-view-impl.h>
-#include <dali/internal/graphics/vulkan/vulkan-device.h>
-#include <dali/internal/graphics/vulkan-impl/vulkan-image-impl.h>
#include <dali/integration-api/debug.h>
+#include <dali/internal/graphics/vulkan-impl/vulkan-image-impl.h>
+#include <dali/internal/graphics/vulkan/vulkan-device.h>
#include <utility>
{
namespace Vulkan
{
-
-ImageView::ImageView( Device& graphicsDevice, const Image* image, vk::ImageViewCreateInfo createInfo )
-: mGraphicsDevice( &graphicsDevice ),
- mImage( image ),
- mCreateInfo( std::move( createInfo ) ),
- mImageView( nullptr )
+ImageView* ImageView::NewFromImage(Device& device, const Image& image)
{
+ vk::ComponentMapping componentsMapping = {vk::ComponentSwizzle::eR,
+ vk::ComponentSwizzle::eG,
+ vk::ComponentSwizzle::eB,
+ vk::ComponentSwizzle::eA};
+
+ return NewFromImage(device, image, componentsMapping);
}
-ImageView::~ImageView() = default;
+ImageView* ImageView::NewFromImage(
+ Device& device,
+ const Image& image,
+ const vk::ComponentMapping& componentMapping)
+{
+ auto subresourceRange = vk::ImageSubresourceRange{}
+ .setAspectMask(image.GetAspectFlags())
+ .setBaseArrayLayer(0)
+ .setBaseMipLevel(0)
+ .setLevelCount(image.GetMipLevelCount())
+ .setLayerCount(image.GetLayerCount());
+
+ auto imageView = New(device,
+ image,
+ {},
+ vk::ImageViewType::e2D,
+ image.GetFormat(),
+ componentMapping,
+ subresourceRange,
+ nullptr);
+
+ return imageView;
+}
-vk::ImageView ImageView::GetVkHandle() const
+ImageView* ImageView::New(Device& device,
+ const Image& image,
+ const vk::ImageViewCreateFlags& flags,
+ vk::ImageViewType viewType,
+ vk::Format format,
+ vk::ComponentMapping components,
+ vk::ImageSubresourceRange subresourceRange,
+ void* pNext)
{
- return mImageView;
+ auto imageViewCreateInfo = vk::ImageViewCreateInfo{}
+ .setPNext(pNext)
+ .setFlags(flags)
+ .setImage(image.GetVkHandle())
+ .setViewType(viewType)
+ .setFormat(format)
+ .setComponents(components)
+ .setSubresourceRange(std::move(subresourceRange));
+
+ return New(device, image, imageViewCreateInfo);
}
-const Image* ImageView::GetImage() const
+ImageView* ImageView::New(Device& device, const Image& image, const vk::ImageViewCreateInfo& createInfo)
{
- return mImage;
+ auto imageView = new ImageView(device, image, createInfo);
+
+ VkAssert(device.GetLogicalDevice().createImageView(&createInfo, &device.GetAllocator("IMAGEVIEW"), &imageView->mImageView));
+
+ return imageView;
}
-uint32_t ImageView::GetLayerCount() const
+ImageView::ImageView(Device& graphicsDevice, const Image& image, vk::ImageViewCreateInfo createInfo)
+: mDevice(graphicsDevice),
+ mImage(image),
+ mCreateInfo(std::move(createInfo)),
+ mImageView(nullptr)
{
- return mImage->GetLayerCount();
}
-uint32_t ImageView::GetMipLevelCount() const
+ImageView::~ImageView()
{
- return mImage->GetMipLevelCount();
+ Destroy();
}
-vk::ImageAspectFlags ImageView::GetImageAspectMask() const
+vk::ImageView ImageView::GetVkHandle() const
{
- return vk::ImageAspectFlags();
+ return mImageView;
}
-const ImageView& ImageView::ConstRef()
+const Image* ImageView::GetImage() const
{
- return *this;
+ return &mImage;
}
-ImageView& ImageView::Ref()
+uint32_t ImageView::GetLayerCount() const
{
- return *this;
+ return mImage.GetLayerCount();
}
-bool ImageView::OnDestroy()
+uint32_t ImageView::GetMipLevelCount() const
{
- auto device = mGraphicsDevice->GetLogicalDevice();
- auto imageView = mImageView;
- auto allocator = &mGraphicsDevice->GetAllocator();
+ return mImage.GetMipLevelCount();
+}
- mGraphicsDevice->DiscardResource( [ device, imageView, allocator ]() {
- DALI_LOG_INFO( gVulkanFilter, Debug::General, "Invoking deleter function: image view->%p\n",
- static_cast< VkImageView >(imageView) )
- device.destroyImageView( imageView, allocator );
- } );
+vk::ImageAspectFlags ImageView::GetImageAspectMask() const
+{
+ return vk::ImageAspectFlags();
+}
- return VkManaged::OnDestroy();
+void ImageView::Destroy()
+{
+ auto device = mDevice.GetLogicalDevice();
+ device.destroyImageView(mImageView, mDevice.GetAllocator());
}
} // namespace Vulkan
namespace Dali::Graphics::Vulkan
{
-
class Device;
/*
- * ImageView
+ * Wrapper for vkImageView
*/
-class ImageView : public VkManaged
+class ImageView
{
- friend class Device;
-
public:
+ static ImageView* NewFromImage(Device& device, const Image& image);
+
+ static ImageView* NewFromImage(Device& device, const Image& image, const vk::ComponentMapping& componentMapping);
+
+ static ImageView* New(Device& device, const Image& image, const vk::ImageViewCreateInfo& createInfo);
- ~ImageView() override;
+ static ImageView* New(
+ Device& device,
+ const Image& image,
+ const vk::ImageViewCreateFlags& flags,
+ vk::ImageViewType viewType,
+ vk::Format format,
+ vk::ComponentMapping components,
+ vk::ImageSubresourceRange subresourceRange,
+ void* pNext);
+
+ ImageView(Device& device,
+ const Image& image,
+ vk::ImageViewCreateInfo createInfo);
+
+ ~ImageView();
/**
*
[[nodiscard]] const Image* GetImage() const;
/**
- *
* @return
*/
[[nodiscard]] uint32_t GetLayerCount() const;
/**
- *
* @return
*/
[[nodiscard]] uint32_t GetMipLevelCount() const;
/**
- *
* @return
*/
[[nodiscard]] vk::ImageAspectFlags GetImageAspectMask() const;
- const ImageView& ConstRef();
-
- ImageView& Ref();
-
- bool OnDestroy() override;
+ void Destroy();
private:
- ImageView(Device& graphicsDevice,
- const Image* image,
- vk::ImageViewCreateInfo createInfo );
-
-private:
- Device* mGraphicsDevice;
- const Image* mImage;
+ Device& mDevice;
+ const Image& mImage;
vk::ImageViewCreateInfo mCreateInfo;
- vk::ImageView mImageView;
+ vk::ImageView mImageView;
};
} // namespace Dali::Graphics::Vulkan
-
-
#endif // DALI_INTERNAL_GRAPHICS_VULKAN_IMAGE_VIEW_IMPL_H
*/
// INTERNAL INCLUDES
-#include <dali/internal/graphics/vulkan-impl/vulkan-queue-impl.h>
#include <dali/internal/graphics/vulkan-impl/vulkan-fence-impl.h>
+#include <dali/internal/graphics/vulkan-impl/vulkan-queue-impl.h>
namespace Dali::Graphics::Vulkan
{
// submission
-SubmissionData::SubmissionData( const std::vector<vk::Semaphore>& waitSemaphores_,
- vk::PipelineStageFlags waitDestinationStageMask_,
- const std::vector<CommandBufferImpl*>& commandBuffers_,
- const std::vector<vk::Semaphore>& signalSemaphores_ )
- : waitSemaphores( waitSemaphores_ ),
- waitDestinationStageMask( waitDestinationStageMask_ ),
- commandBuffers( commandBuffers_ ),
- signalSemaphores( signalSemaphores_ )
+SubmissionData::SubmissionData(const std::vector<vk::Semaphore>& waitSemaphores_,
+ vk::PipelineStageFlags waitDestinationStageMask_,
+ const std::vector<CommandBufferImpl*>& commandBuffers_,
+ const std::vector<vk::Semaphore>& signalSemaphores_)
+: waitSemaphores(waitSemaphores_),
+ waitDestinationStageMask(waitDestinationStageMask_),
+ commandBuffers(commandBuffers_),
+ signalSemaphores(signalSemaphores_)
{
}
-SubmissionData& SubmissionData::SetWaitSemaphores( const std::vector< vk::Semaphore >& semaphores )
+SubmissionData& SubmissionData::SetWaitSemaphores(const std::vector<vk::Semaphore>& semaphores)
{
waitSemaphores = semaphores;
return *this;
}
-SubmissionData& SubmissionData::SetWaitDestinationStageMask( vk::PipelineStageFlags dstStageMask )
+SubmissionData& SubmissionData::SetWaitDestinationStageMask(vk::PipelineStageFlags dstStageMask)
{
waitDestinationStageMask = dstStageMask;
return *this;
}
-SubmissionData& SubmissionData::SetCommandBuffers( const std::vector<CommandBufferImpl* >& cmdBuffers )
+SubmissionData& SubmissionData::SetCommandBuffers(const std::vector<CommandBufferImpl*>& cmdBuffers)
{
commandBuffers = cmdBuffers;
return *this;
}
-SubmissionData& SubmissionData::SetSignalSemaphores( const std::vector< vk::Semaphore >& semaphores )
+SubmissionData& SubmissionData::SetSignalSemaphores(const std::vector<vk::Semaphore>& semaphores)
{
signalSemaphores = semaphores;
return *this;
}
// queue
-Queue::Queue( vk::Queue queue,
- uint32_t queueFamilyIndex,
- uint32_t queueIndex,
- vk::QueueFlags queueFlags )
- : mQueue( queue ),
- mFlags( queueFlags ),
- mQueueFamilyIndex( queueFamilyIndex ),
- mQueueIndex( queueIndex ),
- mMutex()
+Queue::Queue(vk::Queue queue,
+ uint32_t queueFamilyIndex,
+ uint32_t queueIndex,
+ vk::QueueFlags queueFlags)
+: mQueue(queue),
+ mFlags(queueFlags),
+ mQueueFamilyIndex(queueFamilyIndex),
+ mQueueIndex(queueIndex),
+ mMutex()
{
}
std::unique_ptr<std::lock_guard<std::recursive_mutex>> Queue::Lock()
{
- return std::unique_ptr<std::lock_guard<std::recursive_mutex>>( new std::lock_guard<std::recursive_mutex>( mMutex ) );
+ return std::unique_ptr<std::lock_guard<std::recursive_mutex>>(new std::lock_guard<std::recursive_mutex>(mMutex));
}
vk::Result Queue::WaitIdle()
return mQueue.presentKHR(&presentInfo);
}
-vk::Result Queue::Submit(std::vector<vk::SubmitInfo>& info, Fence* fence)
+vk::Result Queue::Submit(std::vector<vk::SubmitInfo>& info, FenceImpl* fence)
{
- return VkAssert(mQueue.submit(info, fence?fence->GetVkHandle():nullptr));
+ return VkAssert(mQueue.submit(info, fence ? fence->GetVkHandle() : nullptr));
}
} // namespace Dali::Graphics::Vulkan
namespace Dali::Graphics::Vulkan
{
class CommandBufferImpl;
-class Fence;
+class FenceImpl;
+class Device;
struct SubmissionData
{
SubmissionData() = default;
- explicit SubmissionData( const std::vector< vk::Semaphore >& waitSemaphores_,
- vk::PipelineStageFlags waitDestinationStageMask_,
- const std::vector<CommandBufferImpl* >& commandBuffers_,
- const std::vector< vk::Semaphore >& signalSemaphores_ );
+ explicit SubmissionData(const std::vector<vk::Semaphore>& waitSemaphores_,
+ vk::PipelineStageFlags waitDestinationStageMask_,
+ const std::vector<CommandBufferImpl*>& commandBuffers_,
+ const std::vector<vk::Semaphore>& signalSemaphores_);
- SubmissionData& SetWaitSemaphores( const std::vector< vk::Semaphore >& semaphores );
+ SubmissionData& SetWaitSemaphores(const std::vector<vk::Semaphore>& semaphores);
- SubmissionData& SetWaitDestinationStageMask( vk::PipelineStageFlags dstStageMask );
+ SubmissionData& SetWaitDestinationStageMask(vk::PipelineStageFlags dstStageMask);
- SubmissionData& SetCommandBuffers( const std::vector<CommandBufferImpl* >& cmdBuffers );
+ SubmissionData& SetCommandBuffers(const std::vector<CommandBufferImpl*>& cmdBuffers);
- SubmissionData& SetSignalSemaphores( const std::vector< vk::Semaphore >& semaphores );
+ SubmissionData& SetSignalSemaphores(const std::vector<vk::Semaphore>& semaphores);
- std::vector< vk::Semaphore > waitSemaphores;
- vk::PipelineStageFlags waitDestinationStageMask;
- std::vector<CommandBufferImpl* > commandBuffers;
- std::vector< vk::Semaphore > signalSemaphores;
+ std::vector<vk::Semaphore> waitSemaphores;
+ vk::PipelineStageFlags waitDestinationStageMask;
+ std::vector<CommandBufferImpl*> commandBuffers;
+ std::vector<vk::Semaphore> signalSemaphores;
};
-class Device;
-
class Queue
{
public:
- Queue(vk::Queue queue,
- uint32_t queueFamilyIndex,
- uint32_t queueIndex,
+ Queue(vk::Queue queue,
+ uint32_t queueFamilyIndex,
+ uint32_t queueIndex,
vk::QueueFlags queueFlags);
~Queue(); // queues are non-destructible
vk::Result Present(vk::PresentInfoKHR& presentInfo);
- vk::Result Submit(std::vector<vk::SubmitInfo>& info, Fence* fence);
+ vk::Result Submit(std::vector<vk::SubmitInfo>& info, FenceImpl* fence);
private:
- vk::Queue mQueue;
+ vk::Queue mQueue;
vk::QueueFlags mFlags;
- uint32_t mQueueFamilyIndex;
- uint32_t mQueueIndex;
+ uint32_t mQueueFamilyIndex;
+ uint32_t mQueueIndex;
std::recursive_mutex mMutex;
};
} // namespace Dali::Graphics::Vulkan
-
#endif // DALI_INTERNAL_GRAPHICS_VULKAN_QUEUE_IMPL_H
--- /dev/null
+#pragma once
+
+/*
+ * Copyright (c) 2024 Samsung Electronics Co., Ltd.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <dali/internal/graphics/vulkan-impl/vulkan-types.h>
+#include <dali/internal/graphics/vulkan/vulkan-hpp-wrapper.h>
+
+namespace Dali::Graphics::Vulkan
+{
+class BufferImpl;
+class Image;
+
+enum class TransferRequestType
+{
+ BUFFER_TO_IMAGE,
+ IMAGE_TO_IMAGE,
+ BUFFER_TO_BUFFER,
+ IMAGE_TO_BUFFER,
+ USE_TBM_SURFACE,
+ LAYOUT_TRANSITION_ONLY,
+ UNDEFINED
+};
+
+/**
+ * Structure describing blitting request Buffer to Image
+ */
+struct ResourceTransferRequest
+{
+ ResourceTransferRequest(TransferRequestType type)
+ : requestType(type)
+ {
+ }
+
+ TransferRequestType requestType;
+
+ struct
+ {
+ BufferImpl* srcBuffer{nullptr}; /// Source buffer
+ Image* dstImage{nullptr}; /// Destination image
+ vk::BufferImageCopy copyInfo{}; /// Vulkan specific copy info
+ } bufferToImageInfo;
+
+ struct
+ {
+ Image* srcImage{nullptr}; /// Source image
+ Image* dstImage{nullptr}; /// Destination image
+ vk::ImageCopy copyInfo{}; /// Vulkan specific copy info
+ } imageToImageInfo;
+
+ struct
+ {
+ Image* image;
+ vk::ImageLayout srcLayout;
+ vk::ImageLayout dstLayout;
+ } imageLayoutTransitionInfo;
+
+ struct
+ {
+ Image* srcImage{nullptr}; /// Source image
+ } useTBMSurfaceInfo;
+
+ bool deferredTransferMode{true}; // Vulkan implementation prefers deferred mode
+
+ // delete copy
+ ResourceTransferRequest(const ResourceTransferRequest&) = delete;
+ ResourceTransferRequest& operator=(const ResourceTransferRequest&) = delete;
+ ResourceTransferRequest& operator=(ResourceTransferRequest&& obj) = delete;
+
+ ResourceTransferRequest(ResourceTransferRequest&& obj)
+ {
+ requestType = obj.requestType;
+ deferredTransferMode = obj.deferredTransferMode;
+
+ if(requestType == TransferRequestType::BUFFER_TO_IMAGE)
+ {
+ bufferToImageInfo.srcBuffer = obj.bufferToImageInfo.srcBuffer;
+ bufferToImageInfo.dstImage = obj.bufferToImageInfo.dstImage;
+ bufferToImageInfo.copyInfo = obj.bufferToImageInfo.copyInfo;
+ }
+ else if(requestType == TransferRequestType::IMAGE_TO_IMAGE)
+ {
+ imageToImageInfo.srcImage = obj.imageToImageInfo.srcImage;
+ imageToImageInfo.dstImage = obj.imageToImageInfo.dstImage;
+ imageToImageInfo.copyInfo = obj.imageToImageInfo.copyInfo;
+ }
+ }
+};
+
+} // namespace Dali::Graphics::Vulkan
--- /dev/null
+/*
+ * Copyright (c) 2024 Samsung Electronics Co., Ltd.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <dali/integration-api/debug.h>
+#include <dali/internal/graphics/vulkan-impl/vulkan-sampler-impl.h>
+#include <dali/internal/graphics/vulkan/vulkan-device.h>
+
+#if defined(DEBUG_ENABLED)
+extern Debug::Filter* gVulkanFilter;
+#endif
+
+namespace Dali::Graphics::Vulkan
+{
+SamplerImpl* SamplerImpl::New(Device& device, const vk::SamplerCreateInfo& samplerCreateInfo)
+{
+ auto sampler = new SamplerImpl(device, samplerCreateInfo);
+
+ sampler->Initialize();
+
+ return sampler;
+}
+
+SamplerImpl::SamplerImpl(Device& device, const vk::SamplerCreateInfo& samplerCreateInfo)
+: mDevice(device),
+ mCreateInfo(samplerCreateInfo)
+{
+}
+
+SamplerImpl::~SamplerImpl() = default;
+
+void SamplerImpl::Initialize()
+{
+ VkAssert(mDevice.GetLogicalDevice().createSampler(&mCreateInfo, &mDevice.GetAllocator("SAMPLER"), &mSampler));
+}
+
+vk::Sampler SamplerImpl::GetVkHandle() const
+{
+ return mSampler;
+}
+
+vk::SamplerCreateFlags SamplerImpl::GetCreateFlags() const
+{
+ return mCreateInfo.flags;
+}
+
+vk::Filter SamplerImpl::GetMinFilter() const
+{
+ return mCreateInfo.minFilter;
+}
+
+vk::Filter SamplerImpl::GetMagFilter() const
+{
+ return mCreateInfo.magFilter;
+}
+
+vk::SamplerMipmapMode SamplerImpl::GetMipMapMode() const
+{
+ return mCreateInfo.mipmapMode;
+}
+
+vk::SamplerAddressMode SamplerImpl::GetAddressModeU() const
+{
+ return mCreateInfo.addressModeU;
+}
+
+vk::SamplerAddressMode SamplerImpl::GetAddressModeV() const
+{
+ return mCreateInfo.addressModeV;
+}
+
+vk::SamplerAddressMode SamplerImpl::GetAddressModeW() const
+{
+ return mCreateInfo.addressModeW;
+}
+
+float SamplerImpl::GetMipLodBias() const
+{
+ return mCreateInfo.mipLodBias;
+}
+
+vk::Bool32 SamplerImpl::AnisotropyEnabled() const
+{
+ return mCreateInfo.anisotropyEnable;
+}
+
+float SamplerImpl::GetMaxAnisotropy() const
+{
+ return mCreateInfo.maxAnisotropy;
+}
+
+vk::Bool32 SamplerImpl::CompareEnabled() const
+{
+ return mCreateInfo.compareEnable;
+}
+
+vk::CompareOp SamplerImpl::GetCompareOperation() const
+{
+ return mCreateInfo.compareOp;
+}
+
+float SamplerImpl::GetMinLod() const
+{
+ return mCreateInfo.minLod;
+}
+
+float SamplerImpl::GetMaxLod() const
+{
+ return mCreateInfo.maxLod;
+}
+
+vk::BorderColor SamplerImpl::GetBorderColor() const
+{
+ return mCreateInfo.borderColor;
+}
+
+vk::Bool32 SamplerImpl::UsesUnnormalizedCoordinates() const
+{
+ return mCreateInfo.unnormalizedCoordinates;
+}
+
+} // namespace Dali::Graphics::Vulkan
--- /dev/null
+#pragma once
+
+/*
+ * Copyright (c) 2024 Samsung Electronics Co., Ltd.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <dali/internal/graphics/vulkan-impl/vulkan-types.h>
+#include <dali/internal/graphics/vulkan/vulkan-hpp-wrapper.h>
+
+namespace Dali::Graphics::Vulkan
+{
+class Device;
+
+/**
+ * Wrapper for vk::Sampler
+ */
+class SamplerImpl
+{
+public:
+ static SamplerImpl* New(Device& device, const vk::SamplerCreateInfo& createInfo);
+ SamplerImpl(Device& device, const vk::SamplerCreateInfo& createInfo);
+
+ ~SamplerImpl();
+
+ void Initialize();
+
+ /**
+ * Returns VkSampler object
+ * @return
+ */
+ vk::Sampler GetVkHandle() const;
+
+ vk::SamplerCreateFlags GetCreateFlags() const;
+
+ vk::Filter GetMinFilter() const;
+
+ vk::Filter GetMagFilter() const;
+
+ vk::SamplerMipmapMode GetMipMapMode() const;
+
+ vk::SamplerAddressMode GetAddressModeU() const;
+
+ vk::SamplerAddressMode GetAddressModeV() const;
+
+ vk::SamplerAddressMode GetAddressModeW() const;
+
+ float GetMipLodBias() const;
+
+ vk::Bool32 AnisotropyEnabled() const;
+
+ float GetMaxAnisotropy() const;
+
+ vk::Bool32 CompareEnabled() const;
+
+ vk::CompareOp GetCompareOperation() const;
+
+ float GetMinLod() const;
+
+ float GetMaxLod() const;
+
+ vk::BorderColor GetBorderColor() const;
+
+ vk::Bool32 UsesUnnormalizedCoordinates() const;
+
+private:
+ Device& mDevice;
+ vk::SamplerCreateInfo mCreateInfo;
+ vk::Sampler mSampler;
+};
+
+} // namespace Dali::Graphics::Vulkan
--- /dev/null
+/*
+ * Copyright (c) 2024 Samsung Electronics Co., Ltd.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+// CLASS HEADER
+#include <dali/internal/graphics/vulkan-impl/vulkan-sampler.h>
+
+// INTERNAL INCLUDES
+#include <dali/internal/graphics/vulkan-impl/vulkan-graphics-controller.h>
+
+namespace Dali::Graphics::Vulkan
+{
+Sampler::Sampler(const Graphics::SamplerCreateInfo& createInfo, VulkanGraphicsController& controller)
+: SamplerResource(createInfo, controller)
+{
+}
+
+Sampler::~Sampler() = default;
+
+void Sampler::DestroyResource()
+{
+ // For now, no GPU resources are initialized so nothing to destroy
+}
+
+bool Sampler::InitializeResource()
+{
+ return true;
+}
+
+void Sampler::DiscardResource()
+{
+ GetController().DiscardResource(this);
+}
+
+} // namespace Dali::Graphics::Vulkan
--- /dev/null
+#ifndef DALI_GRAPHICS_VULKAN_SAMPLER_H
+#define DALI_GRAPHICS_VULKAN_SAMPLER_H
+
+/*
+ * Copyright (c) 2024 Samsung Electronics Co., Ltd.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+// EXTERNAL INCLUDES
+#include <dali/graphics-api/graphics-sampler-create-info.h>
+#include <dali/graphics-api/graphics-sampler.h>
+
+// INTERNAL INCLUDES
+#include <dali/internal/graphics/vulkan-impl/vulkan-graphics-resource.h>
+
+namespace Dali::Graphics::Vulkan
+{
+using SamplerResource = Resource<Graphics::Sampler, Graphics::SamplerCreateInfo>;
+
+class Sampler : public SamplerResource
+{
+public:
+ /**
+ * @brief Constructor
+ * @param[in] createInfo Valid createInfo structure
+ * @param[in] controller Reference to the controller
+ */
+ Sampler(const Graphics::SamplerCreateInfo& createInfo, VulkanGraphicsController& controller);
+
+ /**
+ * @brief Destructor
+ */
+ ~Sampler() override;
+
+ /**
+ * @brief Called when GPU resources are destroyed
+ */
+ void DestroyResource();
+
+ /**
+ * @brief Called when initializing the resource
+ *
+ * @return True on success
+ */
+ bool InitializeResource() override;
+
+ /**
+ * @brief Called when UniquePtr<> on client-side dies
+ */
+ void DiscardResource() override;
+};
+
+} // namespace Dali::Graphics::Vulkan
+
+#endif
// INTERNAL INCLUDES
#include <dali/internal/graphics/vulkan-impl/vulkan-fence-impl.h>
#include <dali/internal/graphics/vulkan-impl/vulkan-framebuffer-impl.h>
+#include <dali/internal/graphics/vulkan-impl/vulkan-image-view-impl.h>
#include <dali/internal/graphics/vulkan-impl/vulkan-surface-impl.h>
#include <dali/internal/graphics/vulkan-impl/vulkan-swapchain-impl.h>
#include <dali/internal/graphics/vulkan/vulkan-device.h>
*/
vk::Semaphore submitSemaphore;
- Fence* endOfFrameFence;
+ std::unique_ptr<FenceImpl> endOfFrameFence;
Device& graphicsDevice;
};
acquireNextImageSemaphore = graphicsDevice.GetLogicalDevice().createSemaphore({}, graphicsDevice.GetAllocator()).value;
submitSemaphore = graphicsDevice.GetLogicalDevice().createSemaphore({}, graphicsDevice.GetAllocator()).value;
- endOfFrameFence = graphicsDevice.CreateFence({});
+ endOfFrameFence.reset(FenceImpl::New(graphicsDevice, {}));
}
SwapchainBuffer::~SwapchainBuffer()
mSwapchainCreateInfoKHR.imageFormat,
mSwapchainCreateInfoKHR.imageExtent);
- auto colorImageView = mGraphicsDevice.CreateImageView(colorImage);
+ auto colorImageView = ImageView::NewFromImage(mGraphicsDevice, *colorImage);
// A new color attachment for each framebuffer
auto colorAttachment = FramebufferAttachment::NewColorAttachment(colorImageView,
swapchainBuffer->endOfFrameFence->Reset();
+ // @todo Should we allow multiple submits per swapchain per frame?
+ // If so, should change the fence, or at least wait for the fence
+ // prior to the reset above.
mGraphicsDevice.Submit(*mQueue,
{Vulkan::SubmissionData{
{swapchainBuffer->acquireNextImageSemaphore},
{},
{commandBuffer},
{swapchainBuffer->submitSemaphore}}},
- swapchainBuffer->endOfFrameFence); // @todo should only be endOfFrameFence on the last submit, but that's now hard to figure out!.
+ swapchainBuffer->endOfFrameFence.get());
}
void Swapchain::Present()
mGraphics->BindImageMemory( dsRefCountedImage, std::move(memory), 0 );
// create the depth stencil ImageView to be used within framebuffer
- auto depthStencilImageView = mGraphics->CreateImageView( dsRefCountedImage );
+ auto depthStencilImageView = ImageView::New(dsRefCountedImage);
auto depthClearValue = vk::ClearDepthStencilValue{}.setDepth( 0.0 )
.setStencil( STENCIL_DEFAULT_CLEAR_VALUE );
for( auto&& image : images )
{
-
- auto colorImageView = mGraphics->CreateImageView( mGraphics->CreateImageFromExternal( image, mSwapchainCreateInfoKHR.imageFormat, swapchainExtent ) );
+ // @todo When do we kill this auto image & imageView? MemLeak.
+ auto colorImageView = ImageView::New( mGraphics->CreateImageFromExternal( image, mSwapchainCreateInfoKHR.imageFormat, swapchainExtent ) );
// A new color attachment for each framebuffer
- auto colorAttachment = FramebufferAttachment::NewColorAttachment( colorImageView,
- clearColor,
- true );//presentable
-
- framebuffers.push_back( mGraphics->CreateFramebuffer( { colorAttachment },
- depthAttachment,
- swapchainExtent.width,
- swapchainExtent.height ) );
+ auto colorAttachment = FramebufferAttachment::NewColorAttachment(colorImageView,
+ clearColor,
+ true);//presentable
+
+ framebuffers.push_back( mGraphics->CreateFramebuffer({colorAttachment},
+ depthAttachment,
+ swapchainExtent.width,
+ swapchainExtent.height ) );
}
// Before replacing framebuffers in the swapchain, wait until all is done
namespace Dali::Graphics::Vulkan
{
+class CommandBufferImpl;
class Device;
+class FenceImpl;
class SurfaceImpl;
class Queue;
class SwapchainBuffer;
{
public:
static Swapchain* NewSwapchain(
- Device& device,
- Queue& presentationQueue,
- vk::SwapchainKHR oldSwapchain,
- SurfaceImpl* surface,
- vk::Format requestedFormat,
+ Device& device,
+ Queue& presentationQueue,
+ vk::SwapchainKHR oldSwapchain,
+ SurfaceImpl* surface,
+ vk::Format requestedFormat,
vk::PresentModeKHR presentMode,
- uint32_t bufferCount);
+ uint32_t bufferCount);
Swapchain(Device& graphicsDevice, Queue& presentationQueue);
~Swapchain() override;
- Swapchain( const Swapchain& ) = delete;
- Swapchain& operator=( const Swapchain& ) = delete;
+ Swapchain(const Swapchain&) = delete;
+ Swapchain& operator=(const Swapchain&) = delete;
/**
* Automatically create framebuffers (generating compatible render passes)
* @param index
* @return
*/
- [[nodiscard]] FramebufferImpl* GetFramebuffer( uint32_t index ) const;
+ [[nodiscard]] FramebufferImpl* GetFramebuffer(uint32_t index) const;
/**
* This function acquires next framebuffer
* @todo we should rather use round robin method
* @return
*/
- FramebufferImpl* AcquireNextFramebuffer( bool shouldCollectGarbageNow = true );
+ FramebufferImpl* AcquireNextFramebuffer(bool shouldCollectGarbageNow = true);
/**
* Submits the given command buffer to the swapchain queue
*
* @param[in] depthStencilFormat valid depth/stencil pixel format
*/
- void SetDepthStencil( vk::Format depthStencilFormat );
+ void SetDepthStencil(vk::Format depthStencilFormat);
/**
* Returns number of allocated swapchain images
private:
void CreateVkSwapchain(
- vk::SwapchainKHR oldSwapchain,
- SurfaceImpl* surface,
- vk::Format requestedFormat,
+ vk::SwapchainKHR oldSwapchain,
+ SurfaceImpl* surface,
+ vk::Format requestedFormat,
vk::PresentModeKHR presentMode,
- uint32_t bufferCount);
+ uint32_t bufferCount);
private:
- Device& mGraphicsDevice;
- Queue* mQueue;
+ Device& mGraphicsDevice;
+ Queue* mQueue;
SurfaceImpl* mSurface{};
uint32_t mSwapchainImageIndex{}; ///< Swapchain image index returned by vkAcquireNextImageKHR
- vk::SwapchainKHR mSwapchainKHR;
+ vk::SwapchainKHR mSwapchainKHR;
vk::SwapchainCreateInfoKHR mSwapchainCreateInfoKHR{};
/**
*/
std::vector<std::unique_ptr<SwapchainBuffer>> mSwapchainBuffers;
- Fence* mBetweenRenderPassFence{};
-
- uint32_t mFrameCounter { 0u }; ///< Current frame number
+ FenceImpl* mBetweenRenderPassFence{};
+ uint32_t mFrameCounter{0u}; ///< Current frame number
bool mIsValid; // indicates whether the swapchain is still valid or requires to be recreated
};
} // namespace Dali::Graphics::Vulkan
-#endif //DALI_INTERNAL_GRAPHICS_VULKAN_SWAPCHAIN_IMPL_H
+#endif // DALI_INTERNAL_GRAPHICS_VULKAN_SWAPCHAIN_IMPL_H
--- /dev/null
+/*
+ * Copyright (c) 2024 Samsung Electronics Co., Ltd.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// CLASS HEADER
+#include <dali/internal/graphics/vulkan-impl/vulkan-texture.h>
+
+// INTERNAL HEADERS
+#include <dali/integration-api/pixel-data-integ.h>
+
+#include <dali/internal/graphics/vulkan-impl/vulkan-buffer-impl.h>
+#include <dali/internal/graphics/vulkan-impl/vulkan-buffer.h>
+#include <dali/internal/graphics/vulkan-impl/vulkan-graphics-controller.h>
+#include <dali/internal/graphics/vulkan-impl/vulkan-image-impl.h>
+#include <dali/internal/graphics/vulkan-impl/vulkan-image-view-impl.h>
+#include <dali/internal/graphics/vulkan-impl/vulkan-resource-transfer-request.h>
+#include <dali/internal/graphics/vulkan-impl/vulkan-sampler-impl.h>
+#include <dali/internal/graphics/vulkan-impl/vulkan-utils.h>
+#include <dali/internal/graphics/vulkan/vulkan-device.h>
+
+namespace Dali::Graphics::Vulkan
+{
+/**
+ * Remaps components
+ */
+inline vk::ComponentMapping GetVkComponentMappingInlined(Dali::Graphics::Format format)
+{
+ switch(format)
+ {
+ case Dali::Graphics::Format::L8:
+ {
+ return vk::ComponentMapping{
+ vk::ComponentSwizzle::eR,
+ vk::ComponentSwizzle::eR,
+ vk::ComponentSwizzle::eR,
+ vk::ComponentSwizzle::eOne};
+ }
+ case Dali::Graphics::Format::L8A8:
+ {
+ return vk::ComponentMapping{
+ vk::ComponentSwizzle::eR,
+ vk::ComponentSwizzle::eR,
+ vk::ComponentSwizzle::eR,
+ vk::ComponentSwizzle::eG,
+ };
+ }
+ default:
+ {
+ return vk::ComponentMapping{
+ vk::ComponentSwizzle::eR,
+ vk::ComponentSwizzle::eG,
+ vk::ComponentSwizzle::eB,
+ vk::ComponentSwizzle::eA};
+ }
+ }
+}
+
+/**
+ * Converts API pixel format to Vulkan
+ */
+constexpr vk::Format ConvertApiToVkConst(Dali::Graphics::Format format)
+{
+ switch(format)
+ {
+ case Dali::Graphics::Format::L8:
+ {
+ return vk::Format::eR8Unorm;
+ }
+ case Dali::Graphics::Format::L8A8:
+ {
+ return vk::Format::eR8G8Unorm;
+ }
+ case Dali::Graphics::Format::UNDEFINED:
+ {
+ return vk::Format::eUndefined;
+ }
+ case Dali::Graphics::Format::R4G4_UNORM_PACK8:
+ {
+ return vk::Format::eR4G4UnormPack8;
+ }
+ case Dali::Graphics::Format::R4G4B4A4_UNORM_PACK16:
+ {
+ return vk::Format::eR4G4B4A4UnormPack16;
+ }
+ case Dali::Graphics::Format::B4G4R4A4_UNORM_PACK16:
+ {
+ return vk::Format::eB4G4R4A4UnormPack16;
+ }
+ case Dali::Graphics::Format::R5G6B5_UNORM_PACK16:
+ {
+ return vk::Format::eR5G6B5UnormPack16;
+ }
+ case Dali::Graphics::Format::B5G6R5_UNORM_PACK16:
+ {
+ return vk::Format::eB5G6R5UnormPack16;
+ }
+ case Dali::Graphics::Format::R5G5B5A1_UNORM_PACK16:
+ {
+ return vk::Format::eR5G5B5A1UnormPack16;
+ }
+ case Dali::Graphics::Format::B5G5R5A1_UNORM_PACK16:
+ {
+ return vk::Format::eB5G5R5A1UnormPack16;
+ }
+ case Dali::Graphics::Format::A1R5G5B5_UNORM_PACK16:
+ {
+ return vk::Format::eA1R5G5B5UnormPack16;
+ }
+ case Dali::Graphics::Format::R8_UNORM:
+ {
+ return vk::Format::eR8Unorm;
+ }
+ case Dali::Graphics::Format::R8_SNORM:
+ {
+ return vk::Format::eR8Snorm;
+ }
+ case Dali::Graphics::Format::R8_USCALED:
+ {
+ return vk::Format::eR8Uscaled;
+ }
+ case Dali::Graphics::Format::R8_SSCALED:
+ {
+ return vk::Format::eR8Sscaled;
+ }
+ case Dali::Graphics::Format::R8_UINT:
+ {
+ return vk::Format::eR8Uint;
+ }
+ case Dali::Graphics::Format::R8_SINT:
+ {
+ return vk::Format::eR8Sint;
+ }
+ case Dali::Graphics::Format::R8_SRGB:
+ {
+ return vk::Format::eR8Srgb;
+ }
+ case Dali::Graphics::Format::R8G8_UNORM:
+ {
+ return vk::Format::eR8G8Unorm;
+ }
+ case Dali::Graphics::Format::R8G8_SNORM:
+ {
+ return vk::Format::eR8G8Snorm;
+ }
+ case Dali::Graphics::Format::R8G8_USCALED:
+ {
+ return vk::Format::eR8G8Uscaled;
+ }
+ case Dali::Graphics::Format::R8G8_SSCALED:
+ {
+ return vk::Format::eR8G8Sscaled;
+ }
+ case Dali::Graphics::Format::R8G8_UINT:
+ {
+ return vk::Format::eR8G8Uint;
+ }
+ case Dali::Graphics::Format::R8G8_SINT:
+ {
+ return vk::Format::eR8G8Sint;
+ }
+ case Dali::Graphics::Format::R8G8_SRGB:
+ {
+ return vk::Format::eR8G8Srgb;
+ }
+ case Dali::Graphics::Format::R8G8B8_UNORM:
+ {
+ return vk::Format::eR8G8B8Unorm;
+ }
+ case Dali::Graphics::Format::R8G8B8_SNORM:
+ {
+ return vk::Format::eR8G8B8Snorm;
+ }
+ case Dali::Graphics::Format::R8G8B8_USCALED:
+ {
+ return vk::Format::eR8G8B8Uscaled;
+ }
+ case Dali::Graphics::Format::R8G8B8_SSCALED:
+ {
+ return vk::Format::eR8G8B8Sscaled;
+ }
+ case Dali::Graphics::Format::R8G8B8_UINT:
+ {
+ return vk::Format::eR8G8B8Uint;
+ }
+ case Dali::Graphics::Format::R8G8B8_SINT:
+ {
+ return vk::Format::eR8G8B8Sint;
+ }
+ case Dali::Graphics::Format::R8G8B8_SRGB:
+ {
+ return vk::Format::eR8G8B8Srgb;
+ }
+ case Dali::Graphics::Format::B8G8R8_UNORM:
+ {
+ return vk::Format::eB8G8R8Unorm;
+ }
+ case Dali::Graphics::Format::B8G8R8_SNORM:
+ {
+ return vk::Format::eB8G8R8Snorm;
+ }
+ case Dali::Graphics::Format::B8G8R8_USCALED:
+ {
+ return vk::Format::eB8G8R8Uscaled;
+ }
+ case Dali::Graphics::Format::B8G8R8_SSCALED:
+ {
+ return vk::Format::eB8G8R8Sscaled;
+ }
+ case Dali::Graphics::Format::B8G8R8_UINT:
+ {
+ return vk::Format::eB8G8R8Uint;
+ }
+ case Dali::Graphics::Format::B8G8R8_SINT:
+ {
+ return vk::Format::eB8G8R8Sint;
+ }
+ case Dali::Graphics::Format::B8G8R8_SRGB:
+ {
+ return vk::Format::eB8G8R8Srgb;
+ }
+ case Dali::Graphics::Format::R8G8B8A8_UNORM:
+ {
+ return vk::Format::eR8G8B8A8Unorm;
+ }
+ case Dali::Graphics::Format::R8G8B8A8_SNORM:
+ {
+ return vk::Format::eR8G8B8A8Snorm;
+ }
+ case Dali::Graphics::Format::R8G8B8A8_USCALED:
+ {
+ return vk::Format::eR8G8B8A8Uscaled;
+ }
+ case Dali::Graphics::Format::R8G8B8A8_SSCALED:
+ {
+ return vk::Format::eR8G8B8A8Sscaled;
+ }
+ case Dali::Graphics::Format::R8G8B8A8_UINT:
+ {
+ return vk::Format::eR8G8B8A8Uint;
+ }
+ case Dali::Graphics::Format::R8G8B8A8_SINT:
+ {
+ return vk::Format::eR8G8B8A8Sint;
+ }
+ case Dali::Graphics::Format::R8G8B8A8_SRGB:
+ {
+ return vk::Format::eR8G8B8A8Srgb;
+ }
+ case Dali::Graphics::Format::B8G8R8A8_UNORM:
+ {
+ return vk::Format::eB8G8R8A8Unorm;
+ }
+ case Dali::Graphics::Format::B8G8R8A8_SNORM:
+ {
+ return vk::Format::eB8G8R8A8Snorm;
+ }
+ case Dali::Graphics::Format::B8G8R8A8_USCALED:
+ {
+ return vk::Format::eB8G8R8A8Uscaled;
+ }
+ case Dali::Graphics::Format::B8G8R8A8_SSCALED:
+ {
+ return vk::Format::eB8G8R8A8Sscaled;
+ }
+ case Dali::Graphics::Format::B8G8R8A8_UINT:
+ {
+ return vk::Format::eB8G8R8A8Uint;
+ }
+ case Dali::Graphics::Format::B8G8R8A8_SINT:
+ {
+ return vk::Format::eB8G8R8A8Sint;
+ }
+ case Dali::Graphics::Format::B8G8R8A8_SRGB:
+ {
+ return vk::Format::eB8G8R8A8Srgb;
+ }
+ case Dali::Graphics::Format::A8B8G8R8_UNORM_PACK32:
+ {
+ return vk::Format::eA8B8G8R8UnormPack32;
+ }
+ case Dali::Graphics::Format::A8B8G8R8_SNORM_PACK32:
+ {
+ return vk::Format::eA8B8G8R8SnormPack32;
+ }
+ case Dali::Graphics::Format::A8B8G8R8_USCALED_PACK32:
+ {
+ return vk::Format::eA8B8G8R8UscaledPack32;
+ }
+ case Dali::Graphics::Format::A8B8G8R8_SSCALED_PACK32:
+ {
+ return vk::Format::eA8B8G8R8SscaledPack32;
+ }
+ case Dali::Graphics::Format::A8B8G8R8_UINT_PACK32:
+ {
+ return vk::Format::eA8B8G8R8UintPack32;
+ }
+ case Dali::Graphics::Format::A8B8G8R8_SINT_PACK32:
+ {
+ return vk::Format::eA8B8G8R8SintPack32;
+ }
+ case Dali::Graphics::Format::A8B8G8R8_SRGB_PACK32:
+ {
+ return vk::Format::eA8B8G8R8SrgbPack32;
+ }
+ case Dali::Graphics::Format::A2R10G10B10_UNORM_PACK32:
+ {
+ return vk::Format::eA2R10G10B10UnormPack32;
+ }
+ case Dali::Graphics::Format::A2R10G10B10_SNORM_PACK32:
+ {
+ return vk::Format::eA2R10G10B10SnormPack32;
+ }
+ case Dali::Graphics::Format::A2R10G10B10_USCALED_PACK32:
+ {
+ return vk::Format::eA2R10G10B10UscaledPack32;
+ }
+ case Dali::Graphics::Format::A2R10G10B10_SSCALED_PACK32:
+ {
+ return vk::Format::eA2R10G10B10SscaledPack32;
+ }
+ case Dali::Graphics::Format::A2R10G10B10_UINT_PACK32:
+ {
+ return vk::Format::eA2R10G10B10UintPack32;
+ }
+ case Dali::Graphics::Format::A2R10G10B10_SINT_PACK32:
+ {
+ return vk::Format::eA2R10G10B10SintPack32;
+ }
+ case Dali::Graphics::Format::A2B10G10R10_UNORM_PACK32:
+ {
+ return vk::Format::eA2B10G10R10UnormPack32;
+ }
+ case Dali::Graphics::Format::A2B10G10R10_SNORM_PACK32:
+ {
+ return vk::Format::eA2B10G10R10SnormPack32;
+ }
+ case Dali::Graphics::Format::A2B10G10R10_USCALED_PACK32:
+ {
+ return vk::Format::eA2B10G10R10UscaledPack32;
+ }
+ case Dali::Graphics::Format::A2B10G10R10_SSCALED_PACK32:
+ {
+ return vk::Format::eA2B10G10R10SscaledPack32;
+ }
+ case Dali::Graphics::Format::A2B10G10R10_UINT_PACK32:
+ {
+ return vk::Format::eA2B10G10R10UintPack32;
+ }
+ case Dali::Graphics::Format::A2B10G10R10_SINT_PACK32:
+ {
+ return vk::Format::eA2B10G10R10SintPack32;
+ }
+ case Dali::Graphics::Format::R16_UNORM:
+ {
+ return vk::Format::eR16Unorm;
+ }
+ case Dali::Graphics::Format::R16_SNORM:
+ {
+ return vk::Format::eR16Snorm;
+ }
+ case Dali::Graphics::Format::R16_USCALED:
+ {
+ return vk::Format::eR16Uscaled;
+ }
+ case Dali::Graphics::Format::R16_SSCALED:
+ {
+ return vk::Format::eR16Sscaled;
+ }
+ case Dali::Graphics::Format::R16_UINT:
+ {
+ return vk::Format::eR16Uint;
+ }
+ case Dali::Graphics::Format::R16_SINT:
+ {
+ return vk::Format::eR16Sint;
+ }
+ case Dali::Graphics::Format::R16_SFLOAT:
+ {
+ return vk::Format::eR16Sfloat;
+ }
+ case Dali::Graphics::Format::R16G16_UNORM:
+ {
+ return vk::Format::eR16G16Unorm;
+ }
+ case Dali::Graphics::Format::R16G16_SNORM:
+ {
+ return vk::Format::eR16G16Snorm;
+ }
+ case Dali::Graphics::Format::R16G16_USCALED:
+ {
+ return vk::Format::eR16G16Uscaled;
+ }
+ case Dali::Graphics::Format::R16G16_SSCALED:
+ {
+ return vk::Format::eR16G16Sscaled;
+ }
+ case Dali::Graphics::Format::R16G16_UINT:
+ {
+ return vk::Format::eR16G16Uint;
+ }
+ case Dali::Graphics::Format::R16G16_SINT:
+ {
+ return vk::Format::eR16G16Sint;
+ }
+ case Dali::Graphics::Format::R16G16_SFLOAT:
+ {
+ return vk::Format::eR16G16Sfloat;
+ }
+ case Dali::Graphics::Format::R16G16B16_UNORM:
+ {
+ return vk::Format::eR16G16B16Unorm;
+ }
+ case Dali::Graphics::Format::R16G16B16_SNORM:
+ {
+ return vk::Format::eR16G16B16Snorm;
+ }
+ case Dali::Graphics::Format::R16G16B16_USCALED:
+ {
+ return vk::Format::eR16G16B16Uscaled;
+ }
+ case Dali::Graphics::Format::R16G16B16_SSCALED:
+ {
+ return vk::Format::eR16G16B16Sscaled;
+ }
+ case Dali::Graphics::Format::R16G16B16_UINT:
+ {
+ return vk::Format::eR16G16B16Uint;
+ }
+ case Dali::Graphics::Format::R16G16B16_SINT:
+ {
+ return vk::Format::eR16G16B16Sint;
+ }
+ case Dali::Graphics::Format::R16G16B16_SFLOAT:
+ {
+ return vk::Format::eR16G16B16Sfloat;
+ }
+ case Dali::Graphics::Format::R16G16B16A16_UNORM:
+ {
+ return vk::Format::eR16G16B16A16Unorm;
+ }
+ case Dali::Graphics::Format::R16G16B16A16_SNORM:
+ {
+ return vk::Format::eR16G16B16A16Snorm;
+ }
+ case Dali::Graphics::Format::R16G16B16A16_USCALED:
+ {
+ return vk::Format::eR16G16B16A16Uscaled;
+ }
+ case Dali::Graphics::Format::R16G16B16A16_SSCALED:
+ {
+ return vk::Format::eR16G16B16A16Sscaled;
+ }
+ case Dali::Graphics::Format::R16G16B16A16_UINT:
+ {
+ return vk::Format::eR16G16B16A16Uint;
+ }
+ case Dali::Graphics::Format::R16G16B16A16_SINT:
+ {
+ return vk::Format::eR16G16B16A16Sint;
+ }
+ case Dali::Graphics::Format::R16G16B16A16_SFLOAT:
+ {
+ return vk::Format::eR16G16B16A16Sfloat;
+ }
+ case Dali::Graphics::Format::R32_UINT:
+ {
+ return vk::Format::eR32Uint;
+ }
+ case Dali::Graphics::Format::R32_SINT:
+ {
+ return vk::Format::eR32Sint;
+ }
+ case Dali::Graphics::Format::R32_SFLOAT:
+ {
+ return vk::Format::eR32Sfloat;
+ }
+ case Dali::Graphics::Format::R32G32_UINT:
+ {
+ return vk::Format::eR32G32Uint;
+ }
+ case Dali::Graphics::Format::R32G32_SINT:
+ {
+ return vk::Format::eR32G32Sint;
+ }
+ case Dali::Graphics::Format::R32G32_SFLOAT:
+ {
+ return vk::Format::eR32G32Sfloat;
+ }
+ case Dali::Graphics::Format::R32G32B32_UINT:
+ {
+ return vk::Format::eR32G32B32Uint;
+ }
+ case Dali::Graphics::Format::R32G32B32_SINT:
+ {
+ return vk::Format::eR32G32B32Sint;
+ }
+ case Dali::Graphics::Format::R32G32B32_SFLOAT:
+ {
+ return vk::Format::eR32G32B32Sfloat;
+ }
+ case Dali::Graphics::Format::R32G32B32A32_UINT:
+ {
+ return vk::Format::eR32G32B32A32Uint;
+ }
+ case Dali::Graphics::Format::R32G32B32A32_SINT:
+ {
+ return vk::Format::eR32G32B32A32Sint;
+ }
+ case Dali::Graphics::Format::R32G32B32A32_SFLOAT:
+ {
+ return vk::Format::eR32G32B32A32Sfloat;
+ }
+ case Dali::Graphics::Format::R64_UINT:
+ {
+ return vk::Format::eR64Uint;
+ }
+ case Dali::Graphics::Format::R64_SINT:
+ {
+ return vk::Format::eR64Sint;
+ }
+ case Dali::Graphics::Format::R64_SFLOAT:
+ {
+ return vk::Format::eR64Sfloat;
+ }
+ case Dali::Graphics::Format::R64G64_UINT:
+ {
+ return vk::Format::eR64G64Uint;
+ }
+ case Dali::Graphics::Format::R64G64_SINT:
+ {
+ return vk::Format::eR64G64Sint;
+ }
+ case Dali::Graphics::Format::R64G64_SFLOAT:
+ {
+ return vk::Format::eR64G64Sfloat;
+ }
+ case Dali::Graphics::Format::R64G64B64_UINT:
+ {
+ return vk::Format::eR64G64B64Uint;
+ }
+ case Dali::Graphics::Format::R64G64B64_SINT:
+ {
+ return vk::Format::eR64G64B64Sint;
+ }
+ case Dali::Graphics::Format::R64G64B64_SFLOAT:
+ {
+ return vk::Format::eR64G64B64Sfloat;
+ }
+ case Dali::Graphics::Format::R64G64B64A64_UINT:
+ {
+ return vk::Format::eR64G64B64A64Uint;
+ }
+ case Dali::Graphics::Format::R64G64B64A64_SINT:
+ {
+ return vk::Format::eR64G64B64A64Sint;
+ }
+ case Dali::Graphics::Format::R64G64B64A64_SFLOAT:
+ {
+ return vk::Format::eR64G64B64A64Sfloat;
+ }
+ case Dali::Graphics::Format::B10G11R11_UFLOAT_PACK32:
+ {
+ return vk::Format::eB10G11R11UfloatPack32;
+ }
+ case Dali::Graphics::Format::R11G11B10_UFLOAT_PACK32:
+ {
+ return vk::Format::eUndefined;
+ }
+ case Dali::Graphics::Format::E5B9G9R9_UFLOAT_PACK32:
+ {
+ return vk::Format::eE5B9G9R9UfloatPack32;
+ }
+ case Dali::Graphics::Format::D16_UNORM:
+ {
+ return vk::Format::eD16Unorm;
+ }
+ case Dali::Graphics::Format::X8_D24_UNORM_PACK32:
+ {
+ return vk::Format::eX8D24UnormPack32;
+ }
+ case Dali::Graphics::Format::D32_SFLOAT:
+ {
+ return vk::Format::eD32Sfloat;
+ }
+ case Dali::Graphics::Format::S8_UINT:
+ {
+ return vk::Format::eS8Uint;
+ }
+ case Dali::Graphics::Format::D16_UNORM_S8_UINT:
+ {
+ return vk::Format::eD16UnormS8Uint;
+ }
+ case Dali::Graphics::Format::D24_UNORM_S8_UINT:
+ {
+ return vk::Format::eD24UnormS8Uint;
+ }
+ case Dali::Graphics::Format::D32_SFLOAT_S8_UINT:
+ {
+ return vk::Format::eD32SfloatS8Uint;
+ }
+ case Dali::Graphics::Format::BC1_RGB_UNORM_BLOCK:
+ {
+ return vk::Format::eBc1RgbUnormBlock;
+ }
+ case Dali::Graphics::Format::BC1_RGB_SRGB_BLOCK:
+ {
+ return vk::Format::eBc1RgbSrgbBlock;
+ }
+ case Dali::Graphics::Format::BC1_RGBA_UNORM_BLOCK:
+ {
+ return vk::Format::eBc1RgbaUnormBlock;
+ }
+ case Dali::Graphics::Format::BC1_RGBA_SRGB_BLOCK:
+ {
+ return vk::Format::eBc1RgbaSrgbBlock;
+ }
+ case Dali::Graphics::Format::BC2_UNORM_BLOCK:
+ {
+ return vk::Format::eBc2UnormBlock;
+ }
+ case Dali::Graphics::Format::BC2_SRGB_BLOCK:
+ {
+ return vk::Format::eBc2SrgbBlock;
+ }
+ case Dali::Graphics::Format::BC3_UNORM_BLOCK:
+ {
+ return vk::Format::eBc3UnormBlock;
+ }
+ case Dali::Graphics::Format::BC3_SRGB_BLOCK:
+ {
+ return vk::Format::eBc3SrgbBlock;
+ }
+ case Dali::Graphics::Format::BC4_UNORM_BLOCK:
+ {
+ return vk::Format::eBc4UnormBlock;
+ }
+ case Dali::Graphics::Format::BC4_SNORM_BLOCK:
+ {
+ return vk::Format::eBc4SnormBlock;
+ }
+ case Dali::Graphics::Format::BC5_UNORM_BLOCK:
+ {
+ return vk::Format::eBc5UnormBlock;
+ }
+ case Dali::Graphics::Format::BC5_SNORM_BLOCK:
+ {
+ return vk::Format::eBc5SnormBlock;
+ }
+ case Dali::Graphics::Format::BC6H_UFLOAT_BLOCK:
+ {
+ return vk::Format::eBc6HUfloatBlock;
+ }
+ case Dali::Graphics::Format::BC6H_SFLOAT_BLOCK:
+ {
+ return vk::Format::eBc6HSfloatBlock;
+ }
+ case Dali::Graphics::Format::BC7_UNORM_BLOCK:
+ {
+ return vk::Format::eBc7UnormBlock;
+ }
+ case Dali::Graphics::Format::BC7_SRGB_BLOCK:
+ {
+ return vk::Format::eBc7SrgbBlock;
+ }
+ case Dali::Graphics::Format::ETC2_R8G8B8_UNORM_BLOCK:
+ {
+ return vk::Format::eEtc2R8G8B8UnormBlock;
+ }
+ case Dali::Graphics::Format::ETC2_R8G8B8_SRGB_BLOCK:
+ {
+ return vk::Format::eEtc2R8G8B8SrgbBlock;
+ }
+ case Dali::Graphics::Format::ETC2_R8G8B8A1_UNORM_BLOCK:
+ {
+ return vk::Format::eEtc2R8G8B8A1UnormBlock;
+ }
+ case Dali::Graphics::Format::ETC2_R8G8B8A1_SRGB_BLOCK:
+ {
+ return vk::Format::eEtc2R8G8B8A1SrgbBlock;
+ }
+ case Dali::Graphics::Format::ETC2_R8G8B8A8_UNORM_BLOCK:
+ {
+ return vk::Format::eEtc2R8G8B8A8UnormBlock;
+ }
+ case Dali::Graphics::Format::ETC2_R8G8B8A8_SRGB_BLOCK:
+ {
+ return vk::Format::eEtc2R8G8B8A8SrgbBlock;
+ }
+ case Dali::Graphics::Format::EAC_R11_UNORM_BLOCK:
+ {
+ return vk::Format::eEacR11UnormBlock;
+ }
+ case Dali::Graphics::Format::EAC_R11_SNORM_BLOCK:
+ {
+ return vk::Format::eEacR11SnormBlock;
+ }
+ case Dali::Graphics::Format::EAC_R11G11_UNORM_BLOCK:
+ {
+ return vk::Format::eEacR11G11UnormBlock;
+ }
+ case Dali::Graphics::Format::EAC_R11G11_SNORM_BLOCK:
+ {
+ return vk::Format::eEacR11G11SnormBlock;
+ }
+ case Dali::Graphics::Format::ASTC_4x4_UNORM_BLOCK:
+ {
+ return vk::Format::eAstc4x4UnormBlock;
+ }
+ case Dali::Graphics::Format::ASTC_4x4_SRGB_BLOCK:
+ {
+ return vk::Format::eAstc4x4SrgbBlock;
+ }
+ case Dali::Graphics::Format::ASTC_5x4_UNORM_BLOCK:
+ {
+ return vk::Format::eAstc5x4UnormBlock;
+ }
+ case Dali::Graphics::Format::ASTC_5x4_SRGB_BLOCK:
+ {
+ return vk::Format::eAstc5x4SrgbBlock;
+ }
+ case Dali::Graphics::Format::ASTC_5x5_UNORM_BLOCK:
+ {
+ return vk::Format::eAstc5x5UnormBlock;
+ }
+ case Dali::Graphics::Format::ASTC_5x5_SRGB_BLOCK:
+ {
+ return vk::Format::eAstc5x5SrgbBlock;
+ }
+ case Dali::Graphics::Format::ASTC_6x5_UNORM_BLOCK:
+ {
+ return vk::Format::eAstc6x5UnormBlock;
+ }
+ case Dali::Graphics::Format::ASTC_6x5_SRGB_BLOCK:
+ {
+ return vk::Format::eAstc6x5SrgbBlock;
+ }
+ case Dali::Graphics::Format::ASTC_6x6_UNORM_BLOCK:
+ {
+ return vk::Format::eAstc6x6UnormBlock;
+ }
+ case Dali::Graphics::Format::ASTC_6x6_SRGB_BLOCK:
+ {
+ return vk::Format::eAstc6x6SrgbBlock;
+ }
+ case Dali::Graphics::Format::ASTC_8x5_UNORM_BLOCK:
+ {
+ return vk::Format::eAstc8x5UnormBlock;
+ }
+ case Dali::Graphics::Format::ASTC_8x5_SRGB_BLOCK:
+ {
+ return vk::Format::eAstc8x5SrgbBlock;
+ }
+ case Dali::Graphics::Format::ASTC_8x6_UNORM_BLOCK:
+ {
+ return vk::Format::eAstc8x6UnormBlock;
+ }
+ case Dali::Graphics::Format::ASTC_8x6_SRGB_BLOCK:
+ {
+ return vk::Format::eAstc8x6SrgbBlock;
+ }
+ case Dali::Graphics::Format::ASTC_8x8_UNORM_BLOCK:
+ {
+ return vk::Format::eAstc8x8UnormBlock;
+ }
+ case Dali::Graphics::Format::ASTC_8x8_SRGB_BLOCK:
+ {
+ return vk::Format::eAstc8x8SrgbBlock;
+ }
+ case Dali::Graphics::Format::ASTC_10x5_UNORM_BLOCK:
+ {
+ return vk::Format::eAstc10x5UnormBlock;
+ }
+ case Dali::Graphics::Format::ASTC_10x5_SRGB_BLOCK:
+ {
+ return vk::Format::eAstc10x5SrgbBlock;
+ }
+ case Dali::Graphics::Format::ASTC_10x6_UNORM_BLOCK:
+ {
+ return vk::Format::eAstc10x6UnormBlock;
+ }
+ case Dali::Graphics::Format::ASTC_10x6_SRGB_BLOCK:
+ {
+ return vk::Format::eAstc10x6SrgbBlock;
+ }
+ case Dali::Graphics::Format::ASTC_10x8_UNORM_BLOCK:
+ {
+ return vk::Format::eAstc10x8UnormBlock;
+ }
+ case Dali::Graphics::Format::ASTC_10x8_SRGB_BLOCK:
+ {
+ return vk::Format::eAstc10x8SrgbBlock;
+ }
+ case Dali::Graphics::Format::ASTC_10x10_UNORM_BLOCK:
+ {
+ return vk::Format::eAstc10x10UnormBlock;
+ }
+ case Dali::Graphics::Format::ASTC_10x10_SRGB_BLOCK:
+ {
+ return vk::Format::eAstc10x10SrgbBlock;
+ }
+ case Dali::Graphics::Format::ASTC_12x10_UNORM_BLOCK:
+ {
+ return vk::Format::eAstc12x10UnormBlock;
+ }
+ case Dali::Graphics::Format::ASTC_12x10_SRGB_BLOCK:
+ {
+ return vk::Format::eAstc12x10SrgbBlock;
+ }
+ case Dali::Graphics::Format::ASTC_12x12_UNORM_BLOCK:
+ {
+ return vk::Format::eAstc12x12UnormBlock;
+ }
+ case Dali::Graphics::Format::ASTC_12x12_SRGB_BLOCK:
+ {
+ return vk::Format::eAstc12x12SrgbBlock;
+ }
+ case Dali::Graphics::Format::PVRTC1_2BPP_UNORM_BLOCK_IMG:
+ {
+ return vk::Format::ePvrtc12BppUnormBlockIMG;
+ }
+ case Dali::Graphics::Format::PVRTC1_4BPP_UNORM_BLOCK_IMG:
+ {
+ return vk::Format::ePvrtc14BppUnormBlockIMG;
+ }
+ case Dali::Graphics::Format::PVRTC2_2BPP_UNORM_BLOCK_IMG:
+ {
+ return vk::Format::ePvrtc22BppUnormBlockIMG;
+ }
+ case Dali::Graphics::Format::PVRTC2_4BPP_UNORM_BLOCK_IMG:
+ {
+ return vk::Format::ePvrtc24BppUnormBlockIMG;
+ }
+ case Dali::Graphics::Format::PVRTC1_2BPP_SRGB_BLOCK_IMG:
+ {
+ return vk::Format::ePvrtc12BppSrgbBlockIMG;
+ }
+ case Dali::Graphics::Format::PVRTC1_4BPP_SRGB_BLOCK_IMG:
+ {
+ return vk::Format::ePvrtc14BppSrgbBlockIMG;
+ }
+ case Dali::Graphics::Format::PVRTC2_2BPP_SRGB_BLOCK_IMG:
+ {
+ return vk::Format::ePvrtc22BppSrgbBlockIMG;
+ }
+ case Dali::Graphics::Format::PVRTC2_4BPP_SRGB_BLOCK_IMG:
+ {
+ return vk::Format::ePvrtc24BppSrgbBlockIMG;
+ }
+ }
+ return {};
+}
+
+struct ColorConversion
+{
+ vk::Format oldFormat;
+ vk::Format newFormat;
+ std::vector<uint8_t> (*pConversionFunc)(const void*, uint32_t, uint32_t, uint32_t, uint32_t);
+ void (*pConversionWriteFunc)(const void*, uint32_t, uint32_t, uint32_t, uint32_t, void*);
+};
+
+/**
+ * Converts RGB to RGBA
+ */
+inline std::vector<uint8_t> ConvertRGB32ToRGBA32(const void* pData, uint32_t sizeInBytes, uint32_t width, uint32_t height, uint32_t rowStride)
+{
+ //@todo: use stride if non-zero
+ std::vector<uint8_t> rgbaBuffer{};
+
+ auto inData = reinterpret_cast<const uint8_t*>(pData);
+
+ rgbaBuffer.resize(width * height * 4);
+ auto outData = rgbaBuffer.data();
+ auto outIdx = 0u;
+ for(auto i = 0u; i < sizeInBytes; i += 3)
+ {
+ outData[outIdx] = inData[i];
+ outData[outIdx + 1] = inData[i + 1];
+ outData[outIdx + 2] = inData[i + 2];
+ outData[outIdx + 3] = 0xff;
+ outIdx += 4;
+ }
+ return rgbaBuffer;
+}
+
+inline void WriteRGB32ToRGBA32(const void* pData, uint32_t sizeInBytes, uint32_t width, uint32_t height, uint32_t rowStride, void* pOutput)
+{
+ auto inData = reinterpret_cast<const uint8_t*>(pData);
+ auto outData = reinterpret_cast<uint8_t*>(pOutput);
+ auto outIdx = 0u;
+ for(auto i = 0u; i < sizeInBytes; i += 3)
+ {
+ outData[outIdx] = inData[i];
+ outData[outIdx + 1] = inData[i + 1];
+ outData[outIdx + 2] = inData[i + 2];
+ outData[outIdx + 3] = 0xff;
+ outIdx += 4;
+ }
+}
+
+/**
+ * Format conversion table
+ */
+static const std::vector<ColorConversion> COLOR_CONVERSION_TABLE =
+ {
+ {vk::Format::eR8G8B8Unorm, vk::Format::eR8G8B8A8Unorm, ConvertRGB32ToRGBA32, WriteRGB32ToRGBA32}};
+
+/**
+ * This function tests whether format is supported by the driver. If possible it applies
+ * format conversion to suitable supported pixel format.
+ */
+bool Texture::TryConvertPixelData(const void* pData, uint32_t sizeInBytes, uint32_t width, uint32_t height, std::vector<uint8_t>& outputBuffer)
+{
+ // No need to convert
+ if(mConvertFromFormat == vk::Format::eUndefined)
+ {
+ return false;
+ }
+
+ auto it = std::find_if(COLOR_CONVERSION_TABLE.begin(), COLOR_CONVERSION_TABLE.end(), [&](auto& item)
+ { return item.oldFormat == mConvertFromFormat; });
+
+ // No suitable format, return empty array
+ if(it == COLOR_CONVERSION_TABLE.end())
+ {
+ return false;
+ }
+
+ auto begin = reinterpret_cast<const uint8_t*>(pData);
+
+ outputBuffer = std::move(it->pConversionFunc(begin, sizeInBytes, width, height, 0u));
+ return !outputBuffer.empty();
+}
+
+bool Texture::TryConvertPixelData(const void* pData, uint32_t sizeInBytes, uint32_t width, uint32_t height, void* pOutputBuffer)
+{
+ // No need to convert
+ if(mConvertFromFormat == vk::Format::eUndefined)
+ {
+ return false;
+ }
+
+ auto it = std::find_if(COLOR_CONVERSION_TABLE.begin(), COLOR_CONVERSION_TABLE.end(), [&](auto& item)
+ { return item.oldFormat == mConvertFromFormat; });
+
+ // No suitable format, return empty array
+ if(it == COLOR_CONVERSION_TABLE.end())
+ {
+ return false;
+ }
+
+ auto begin = reinterpret_cast<const uint8_t*>(pData);
+
+ it->pConversionWriteFunc(begin, sizeInBytes, width, height, 0u, pOutputBuffer);
+
+ return true;
+}
+
+void Texture::CopyBuffer(const Dali::Graphics::Buffer& gfxBuffer,
+ uint32_t bufferOffset,
+ Dali::Graphics::Extent2D extent2D,
+ Dali::Graphics::Offset2D textureOffset2D,
+ uint32_t layer,
+ uint32_t level,
+ Dali::Graphics::TextureUpdateFlags flags)
+{
+ if(!mImageView)
+ {
+ InitializeImageView();
+ }
+
+ ResourceTransferRequest transferRequest(TransferRequestType::BUFFER_TO_IMAGE);
+
+ transferRequest.bufferToImageInfo.copyInfo
+ .setImageSubresource(vk::ImageSubresourceLayers{}
+ .setBaseArrayLayer(layer)
+ .setLayerCount(1)
+ .setAspectMask(vk::ImageAspectFlagBits::eColor)
+ .setMipLevel(level))
+ .setImageOffset({textureOffset2D.x, textureOffset2D.y, 0})
+ .setImageExtent({extent2D.width, extent2D.height, 1})
+ .setBufferRowLength({0u})
+ .setBufferOffset({bufferOffset})
+ .setBufferImageHeight({extent2D.height});
+
+ auto& buffer = const_cast<Vulkan::Buffer&>(static_cast<const Vulkan::Buffer&>(gfxBuffer));
+ transferRequest.bufferToImageInfo.dstImage = mImage;
+ transferRequest.bufferToImageInfo.srcBuffer = buffer.GetImpl();
+ transferRequest.deferredTransferMode = true;
+
+ // schedule transfer
+ mController.ScheduleResourceTransfer(std::move(transferRequest));
+}
+
+void Texture::CopyMemoryDirect(
+ const Dali::Graphics::TextureUpdateInfo& info,
+ const Dali::Graphics::TextureUpdateSourceInfo& sourceInfo,
+ bool keepMapped)
+{
+ /**
+ * Early return if the texture doesn't use linear tiling and
+ * the memory isn't host writable.
+ */
+ if(mTiling != Dali::Graphics::TextureTiling::LINEAR)
+ {
+ return;
+ }
+
+ // try to initialise resource
+ InitializeImageView();
+
+ auto memory = mImage->GetMemory();
+
+ /**
+ * @todo: The texture modified frequently could stay mapped for longer
+ */
+ auto ptr = memory->MapTyped<char>();
+
+ /**
+ * Get subresource layout to find out the rowPitch size
+ */
+ auto subresourceLayout = mDevice.GetLogicalDevice().getImageSubresourceLayout(
+ mImage->GetVkHandle(),
+ vk::ImageSubresource{}
+ .setAspectMask(vk::ImageAspectFlagBits::eColor)
+ .setMipLevel(info.level)
+ .setArrayLayer(info.layer));
+
+ auto formatInfo = Vulkan::GetFormatInfo(mImage->GetFormat());
+ int sizeInBytes = int(formatInfo.blockSizeInBits / 8);
+ auto dstRowLength = subresourceLayout.rowPitch;
+ auto dstPtr = ptr + int(dstRowLength) * info.dstOffset2D.y + sizeInBytes * info.dstOffset2D.x;
+
+ uint8_t* srcPtr = nullptr;
+ if(sourceInfo.sourceType == Dali::Graphics::TextureUpdateSourceInfo::Type::MEMORY)
+ {
+ srcPtr = reinterpret_cast<uint8_t*>(sourceInfo.memorySource.memory);
+ }
+ else if(sourceInfo.sourceType == Dali::Graphics::TextureUpdateSourceInfo::Type::PIXEL_DATA)
+ {
+ auto pixelBufferData = Dali::Integration::GetPixelDataBuffer(sourceInfo.pixelDataSource.pixelData);
+ srcPtr = pixelBufferData.buffer + info.srcOffset;
+ }
+
+ auto srcRowLength = int(info.srcExtent2D.width) * sizeInBytes;
+
+ if(formatInfo.compressed)
+ {
+ std::copy(reinterpret_cast<const char*>(srcPtr), reinterpret_cast<const char*>(srcPtr) + info.srcSize, ptr);
+ }
+ else
+ {
+ /**
+ * Copy content line by line
+ */
+ for(auto i = 0u; i < info.srcExtent2D.height; ++i)
+ {
+ std::copy(srcPtr, srcPtr + int(info.srcExtent2D.width) * sizeInBytes, dstPtr);
+ dstPtr += dstRowLength;
+ srcPtr += srcRowLength;
+ }
+ }
+
+ if(!keepMapped)
+ {
+ // Unmap
+ memory->Unmap();
+
+ // ...and flush
+ memory->Flush();
+ }
+}
+
+vk::Format Texture::ValidateFormat(vk::Format sourceFormat)
+{
+ auto formatProperties = mDevice.GetPhysicalDevice().getFormatProperties(sourceFormat);
+ vk::FormatFeatureFlags formatFlags = (mDisableStagingBuffer ? formatProperties.linearTilingFeatures : formatProperties.optimalTilingFeatures);
+
+ auto retval = vk::Format::eUndefined;
+
+ // if format isn't supported, see whether suitable conversion is implemented
+ if(!formatFlags)
+ {
+ auto it = std::find_if(COLOR_CONVERSION_TABLE.begin(), COLOR_CONVERSION_TABLE.end(), [&](auto& item)
+ { return item.oldFormat == sourceFormat; });
+
+ // No suitable format, return empty array
+ if(it != COLOR_CONVERSION_TABLE.end())
+ {
+ retval = it->newFormat;
+ }
+ }
+ else
+ {
+ retval = sourceFormat;
+ }
+
+ return retval;
+}
+
+Texture::Texture(const Dali::Graphics::TextureCreateInfo& createInfo, VulkanGraphicsController& controller)
+: Resource(createInfo, controller),
+ mDevice(controller.GetGraphicsDevice()),
+ mImage(),
+ mImageView(),
+ mSampler(),
+ mTiling(TextureTiling::LINEAR)
+{
+ // Check env variable in order to disable staging buffers
+ auto var = getenv("DALI_DISABLE_TEXTURE_STAGING_BUFFERS");
+ if(var && var[0] != '0')
+ {
+ mDisableStagingBuffer = true;
+ }
+}
+
+Texture::~Texture()
+{
+ delete mImageView;
+ delete mImage;
+}
+
+bool Texture::InitializeResource()
+{
+ SetFormatAndUsage();
+
+ if(mFormat == vk::Format::eUndefined)
+ {
+ // not supported!
+ return false;
+ }
+
+ if(InitializeTexture())
+ {
+ // force generating properties
+ GetProperties();
+
+ return true;
+ }
+
+ return false;
+}
+
+void Texture::DestroyResource()
+{
+}
+
+void Texture::DiscardResource()
+{
+}
+
+void Texture::SetFormatAndUsage()
+{
+ auto size = mCreateInfo.size;
+ mWidth = uint32_t(size.width);
+ mHeight = uint32_t(size.height);
+ mLayout = vk::ImageLayout::eUndefined;
+
+ if(mCreateInfo.usageFlags & (0 | TextureUsageFlagBits::COLOR_ATTACHMENT))
+ {
+ mUsage = vk::ImageUsageFlagBits::eColorAttachment | vk::ImageUsageFlagBits::eSampled;
+ mTiling = TextureTiling::OPTIMAL; // force always OPTIMAL tiling
+ }
+ else if(mCreateInfo.usageFlags & (0 | TextureUsageFlagBits::DEPTH_STENCIL_ATTACHMENT))
+ {
+ mUsage = vk::ImageUsageFlagBits::eDepthStencilAttachment | vk::ImageUsageFlagBits::eSampled;
+ mTiling = TextureTiling::OPTIMAL; // force always OPTIMAL tiling
+ }
+ else if(mCreateInfo.usageFlags & (0 | TextureUsageFlagBits::SAMPLE))
+ {
+ mUsage = vk::ImageUsageFlagBits::eSampled | vk::ImageUsageFlagBits::eTransferDst;
+ }
+
+ auto format = ConvertApiToVk(mCreateInfo.format);
+
+ mFormat = ValidateFormat(format);
+ mConvertFromFormat = vk::Format::eUndefined;
+
+ if(format != mFormat)
+ {
+ mConvertFromFormat = format;
+ }
+ mComponentMapping = GetVkComponentMapping(mCreateInfo.format);
+}
+
+// creates image with pre-allocated memory and default sampler, no data
+// uploaded at this point
+bool Texture::InitializeTexture()
+{
+ if(mImage)
+ {
+ return false;
+ }
+
+ // create image
+ auto imageCreateInfo = vk::ImageCreateInfo{}
+ .setFormat(mFormat)
+ .setInitialLayout(mLayout)
+ .setSamples(vk::SampleCountFlagBits::e1)
+ .setSharingMode(vk::SharingMode::eExclusive)
+ .setUsage(mUsage)
+ .setExtent({mWidth, mHeight, 1})
+ .setArrayLayers(1)
+ .setImageType(vk::ImageType::e2D)
+ .setTiling(mDisableStagingBuffer || mTiling == Dali::Graphics::TextureTiling::LINEAR ? vk::ImageTiling::eLinear : vk::ImageTiling::eOptimal)
+ .setMipLevels(1);
+
+ // Create the image handle
+ mImage = Image::New(mDevice, imageCreateInfo);
+
+ if(!mImage)
+ {
+ return false;
+ }
+
+ // Non sampled image will be lazily initialised
+ if(!(mUsage & vk::ImageUsageFlagBits::eTransferDst))
+ {
+ InitializeImageView();
+ }
+ return true;
+}
+
+void Texture::InitializeImageView()
+{
+ if(!mImageView)
+ {
+ vk::MemoryPropertyFlags memoryProperties{};
+ if(mDisableStagingBuffer)
+ {
+ memoryProperties |= vk::MemoryPropertyFlagBits::eDeviceLocal;
+ }
+ if(mTiling == Dali::Graphics::TextureTiling::LINEAR)
+ memoryProperties |= vk::MemoryPropertyFlagBits::eHostVisible | vk::MemoryPropertyFlagBits::eHostCoherent;
+ else
+ memoryProperties |= vk::MemoryPropertyFlagBits::eDeviceLocal;
+
+ mImage->AllocateAndBind(memoryProperties);
+
+ // Create image view
+ mImageView = ImageView::NewFromImage(mDevice, *mImage, mComponentMapping);
+
+ // create basic sampler
+ auto samplerCreateInfo = vk::SamplerCreateInfo()
+ .setAddressModeU(vk::SamplerAddressMode::eClampToEdge)
+ .setAddressModeV(vk::SamplerAddressMode::eClampToEdge)
+ .setAddressModeW(vk::SamplerAddressMode::eClampToEdge)
+ .setBorderColor(vk::BorderColor::eFloatOpaqueBlack)
+ .setCompareOp(vk::CompareOp::eNever)
+ .setMinFilter(vk::Filter::eLinear)
+ .setMagFilter(vk::Filter::eLinear)
+ .setMipmapMode(vk::SamplerMipmapMode::eLinear)
+ .setMaxAnisotropy(1.0f); // must be 1.0f when anisotropy feature isn't enabled
+
+ samplerCreateInfo.setBorderColor(vk::BorderColor::eFloatTransparentBlack);
+
+ mSampler = SamplerImpl::New(mDevice, samplerCreateInfo);
+ }
+}
+
+Vulkan::Image* Texture::GetImage() const
+{
+ return mImage;
+}
+
+Vulkan::ImageView* Texture::GetImageView() const
+{
+ return mImageView;
+}
+
+Vulkan::SamplerImpl* Texture::GetSampler() const
+{
+ return mSampler;
+}
+
+vk::Format Texture::ConvertApiToVk(Dali::Graphics::Format format)
+{
+ return ConvertApiToVkConst(format);
+}
+
+vk::ComponentMapping Texture::GetVkComponentMapping(Dali::Graphics::Format format)
+{
+ return GetVkComponentMappingInlined(format);
+}
+
+bool Texture::IsSamplerImmutable() const
+{
+ return false;
+}
+
+MemoryRequirements Texture::GetMemoryRequirements() const
+{
+ auto requirements = mDevice.GetLogicalDevice().getImageMemoryRequirements(mImage->GetVkHandle());
+ return MemoryRequirements{size_t(requirements.size), size_t(requirements.alignment)};
+}
+
+const TextureProperties& Texture::GetProperties()
+{
+ if(!mProperties)
+ {
+ mProperties = std::move(std::make_unique<Dali::Graphics::TextureProperties>());
+
+ auto formatInfo = GetFormatInfo(mFormat);
+ mProperties->compressed = formatInfo.compressed;
+ mProperties->packed = formatInfo.packed;
+ mProperties->emulated = mConvertFromFormat != vk::Format::eUndefined;
+ mProperties->format = mCreateInfo.format;
+ mProperties->format1 = mCreateInfo.format;
+ mProperties->extent2D = {mWidth, mHeight};
+ mProperties->directWriteAccessEnabled = (mTiling == Dali::Graphics::TextureTiling::LINEAR);
+ mProperties->nativeHandle = 0; //@todo change to Dali::Any, and pass vkImage handle
+ }
+ return *mProperties;
+}
+
+} // namespace Dali::Graphics::Vulkan
--- /dev/null
+#ifndef DALI_GRAPHICS_VULKAN_TEXTURE_IMPL_H
+#define DALI_GRAPHICS_VULKAN_TEXTURE_IMPL_H
+
+/*
+ * Copyright (c) 2024 Samsung Electronics Co., Ltd.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <dali/graphics-api/graphics-texture-create-info.h>
+#include <dali/graphics-api/graphics-texture.h>
+#include <dali/graphics-api/graphics-types.h>
+#include <dali/internal/graphics/vulkan-impl/vulkan-graphics-resource.h>
+#include <dali/internal/graphics/vulkan-impl/vulkan-types.h>
+#include <dali/internal/graphics/vulkan/vulkan-hpp-wrapper.h>
+
+namespace Dali::Graphics::Vulkan
+{
+class Device;
+class Memory;
+class Image;
+class ImageView;
+class SamplerImpl;
+
+using TextureResource = Resource<Graphics::Texture, Graphics::TextureCreateInfo>;
+
+class Texture : public TextureResource
+{
+public:
+ static Texture* New(Device& graphicsDevice);
+
+ Texture(const Graphics::TextureCreateInfo& createInfo, VulkanGraphicsController& controller);
+ ~Texture();
+
+ bool InitializeResource() override;
+
+ void DestroyResource() override;
+
+ void DiscardResource() override;
+
+ bool IsSamplerImmutable() const;
+
+ Image* GetImage() const;
+
+ ImageView* GetImageView() const;
+
+ SamplerImpl* GetSampler() const;
+
+ // @todo Temporary. We need to use the newer APIs
+ enum UpdateMode
+ {
+ UNDEFINED,
+ DEFERRED,
+ IMMEDIATE
+ };
+
+ void SetFormatAndUsage();
+
+ Dali::Graphics::MemoryRequirements GetMemoryRequirements() const;
+
+ bool InitializeTexture();
+
+ /**
+ * Returns structure with texture properties
+ * @return The reference to immutable TextureProperties object
+ */
+ const Dali::Graphics::TextureProperties& GetProperties();
+
+ /**
+ * Initialises resources like memory, image view and samplers for previously
+ * initialised image object. Used when lazy allocation is needed.
+ */
+ void InitializeImageView();
+
+ /**
+ * Tries to convert pixel data to the compatible format. As result it returns new buffer.
+ * @param pData source data
+ * @param sizeInBytes size of source data in bytes
+ * @param width width in pixels
+ * @param height height in pixels
+ * @param outputBuffer reference to an output buffer
+ * @return True if conversion was successful
+ */
+ bool TryConvertPixelData(const void* pData, uint32_t sizeInBytes, uint32_t width, uint32_t height, std::vector<uint8_t>& outputBuffer);
+
+ /**
+ * Tries to convert pixel data to the compatible format. The result is written into the specified memory area.
+ * The memory must be allocated and large enough to accomodate output data.
+ * @param pData source data
+ * @param sizeInBytes size of source data in bytes
+ * @param width width in pixels
+ * @param height height in pixels
+ * @param pOutputBuffer pointer to a valid output buffer
+ * @return True if conversion was successful
+ */
+ bool TryConvertPixelData(const void* pData, uint32_t sizeInBytes, uint32_t width, uint32_t height, void* pOutputBuffer);
+
+ /**
+ * @param[in] buffer The buffer to copy from
+ * @param[in] bufferOffset The start of the texture within the buffer
+ * @param[in] extent2D The size of the data
+ * @param[in] textureOffset2D
+ * @param[in] layer
+ * @param[in] level
+ * @param[in] flags
+ */
+ void CopyBuffer(
+ const Dali::Graphics::Buffer& buffer,
+ uint32_t bufferOffset,
+ Dali::Graphics::Extent2D extent2D,
+ Dali::Graphics::Offset2D textureOffset2D,
+ uint32_t layer,
+ uint32_t level,
+ Dali::Graphics::TextureUpdateFlags flags);
+
+ /**
+ * Direct copy memory to memory, used when linear tiling is enabled. This function
+ * doesn't check if data is valid and doesn't perform format conversion.
+ * @param info
+ * @param sourceInfo
+ * @param keepMapped if true, the memory stays mapped after the call
+ */
+ void CopyMemoryDirect(
+ const Dali::Graphics::TextureUpdateInfo& info,
+ const Dali::Graphics::TextureUpdateSourceInfo& sourceInfo,
+ bool keepMapped);
+
+private:
+ /**
+ * Validates initial format
+ * @return if valid, returns existing format
+ * if possible conversion, returns new converted format
+ * if not supported returns vk::Format::eUndefined
+ */
+ vk::Format ValidateFormat(vk::Format sourceFormat);
+
+ vk::Format ConvertApiToVk(Dali::Graphics::Format format);
+
+ vk::ComponentMapping GetVkComponentMapping(Dali::Graphics::Format format);
+
+private:
+ Vulkan::Device& mDevice;
+
+ Image* mImage;
+ ImageView* mImageView;
+ SamplerImpl* mSampler;
+
+ uint32_t mWidth;
+ uint32_t mHeight;
+ vk::Format mFormat;
+ vk::Format mConvertFromFormat{vk::Format::eUndefined};
+ vk::ImageUsageFlags mUsage;
+ vk::ImageLayout mLayout;
+ vk::ComponentMapping mComponentMapping{};
+
+ bool mDisableStagingBuffer{false};
+ Dali::Graphics::TextureTiling mTiling{Dali::Graphics::TextureTiling::OPTIMAL};
+
+ std::unique_ptr<Dali::Graphics::TextureProperties> mProperties;
+};
+
+} // namespace Dali::Graphics::Vulkan
+
+#endif // DALI_GRAPHICS_VULKAN_TEXTURE_IMPL_H
class Device;
class Queue;
-class Buffer;
-class CommandBufferImpl;
-class CommandPool;
class DescriptorPool;
class DescriptorSet;
-class Fence;
class FramebufferImpl;
class FramebufferAttachment;
class GpuMemoryBlock;
WAYLAND,
};
+struct FormatInfo
+{
+ bool packed{false};
+ bool compressed{false};
+ unsigned int paletteSizeInBits{0u};
+ unsigned int blockSizeInBits{0u};
+ unsigned int blockWidth{0u};
+ unsigned int blockHeight{0u};
+ unsigned int blockDepth{0u};
+};
+
struct VkLoadOpType
{
constexpr explicit VkLoadOpType(Graphics::AttachmentLoadOp op)
--- /dev/null
+#ifndef DALI_GRAPHICS_VULKAN_UTILS
+#define DALI_GRAPHICS_VULKAN_UTILS
+
+/*
+ * Copyright (c) 2024 Samsung Electronics Co., Ltd.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+#include <dali/internal/graphics/vulkan-impl/vulkan-types.h>
+
+namespace Dali
+{
+
+namespace Graphics
+{
+
+namespace Vulkan
+{
+
+// Returns Vulkan vk::Format details
+static inline constexpr FormatInfo GetFormatInfo(const vk::Format format)
+{
+ auto formatSize = FormatInfo{};
+ auto vkFormat = static_cast<VkFormat>(format);
+ switch(vkFormat)
+ {
+ case VK_FORMAT_R4G4_UNORM_PACK8:
+ {
+ formatSize.packed = true;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 1 * 8;
+ formatSize.blockWidth = 1;
+ formatSize.blockHeight = 1;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
+ case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
+ case VK_FORMAT_R5G6B5_UNORM_PACK16:
+ case VK_FORMAT_B5G6R5_UNORM_PACK16:
+ case VK_FORMAT_R5G5B5A1_UNORM_PACK16:
+ case VK_FORMAT_B5G5R5A1_UNORM_PACK16:
+ case VK_FORMAT_A1R5G5B5_UNORM_PACK16:
+ {
+ formatSize.packed = true;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 2 * 8;
+ formatSize.blockWidth = 1;
+ formatSize.blockHeight = 1;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_R8_UNORM:
+ case VK_FORMAT_R8_SNORM:
+ case VK_FORMAT_R8_USCALED:
+ case VK_FORMAT_R8_SSCALED:
+ case VK_FORMAT_R8_UINT:
+ case VK_FORMAT_R8_SINT:
+ case VK_FORMAT_R8_SRGB:
+ {
+ formatSize.packed = false;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 1 * 8;
+ formatSize.blockWidth = 1;
+ formatSize.blockHeight = 1;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_R8G8_UNORM:
+ case VK_FORMAT_R8G8_SNORM:
+ case VK_FORMAT_R8G8_USCALED:
+ case VK_FORMAT_R8G8_SSCALED:
+ case VK_FORMAT_R8G8_UINT:
+ case VK_FORMAT_R8G8_SINT:
+ case VK_FORMAT_R8G8_SRGB:
+ {
+ formatSize.packed = false;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 2 * 8;
+ formatSize.blockWidth = 1;
+ formatSize.blockHeight = 1;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_R8G8B8_UNORM:
+ case VK_FORMAT_R8G8B8_SNORM:
+ case VK_FORMAT_R8G8B8_USCALED:
+ case VK_FORMAT_R8G8B8_SSCALED:
+ case VK_FORMAT_R8G8B8_UINT:
+ case VK_FORMAT_R8G8B8_SINT:
+ case VK_FORMAT_R8G8B8_SRGB:
+ case VK_FORMAT_B8G8R8_UNORM:
+ case VK_FORMAT_B8G8R8_SNORM:
+ case VK_FORMAT_B8G8R8_USCALED:
+ case VK_FORMAT_B8G8R8_SSCALED:
+ case VK_FORMAT_B8G8R8_UINT:
+ case VK_FORMAT_B8G8R8_SINT:
+ case VK_FORMAT_B8G8R8_SRGB:
+ {
+ formatSize.packed = false;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 3 * 8;
+ formatSize.blockWidth = 1;
+ formatSize.blockHeight = 1;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_R8G8B8A8_UNORM:
+ case VK_FORMAT_R8G8B8A8_SNORM:
+ case VK_FORMAT_R8G8B8A8_USCALED:
+ case VK_FORMAT_R8G8B8A8_SSCALED:
+ case VK_FORMAT_R8G8B8A8_UINT:
+ case VK_FORMAT_R8G8B8A8_SINT:
+ case VK_FORMAT_R8G8B8A8_SRGB:
+ case VK_FORMAT_B8G8R8A8_UNORM:
+ case VK_FORMAT_B8G8R8A8_SNORM:
+ case VK_FORMAT_B8G8R8A8_USCALED:
+ case VK_FORMAT_B8G8R8A8_SSCALED:
+ case VK_FORMAT_B8G8R8A8_UINT:
+ case VK_FORMAT_B8G8R8A8_SINT:
+ case VK_FORMAT_B8G8R8A8_SRGB:
+ {
+ formatSize.packed = false;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 4 * 8;
+ formatSize.blockWidth = 1;
+ formatSize.blockHeight = 1;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_A8B8G8R8_UNORM_PACK32:
+ case VK_FORMAT_A8B8G8R8_SNORM_PACK32:
+ case VK_FORMAT_A8B8G8R8_USCALED_PACK32:
+ case VK_FORMAT_A8B8G8R8_SSCALED_PACK32:
+ case VK_FORMAT_A8B8G8R8_UINT_PACK32:
+ case VK_FORMAT_A8B8G8R8_SINT_PACK32:
+ case VK_FORMAT_A8B8G8R8_SRGB_PACK32:
+ {
+ formatSize.packed = true;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 4 * 8;
+ formatSize.blockWidth = 1;
+ formatSize.blockHeight = 1;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
+ case VK_FORMAT_A2R10G10B10_SNORM_PACK32:
+ case VK_FORMAT_A2R10G10B10_USCALED_PACK32:
+ case VK_FORMAT_A2R10G10B10_SSCALED_PACK32:
+ case VK_FORMAT_A2R10G10B10_UINT_PACK32:
+ case VK_FORMAT_A2R10G10B10_SINT_PACK32:
+ case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
+ case VK_FORMAT_A2B10G10R10_SNORM_PACK32:
+ case VK_FORMAT_A2B10G10R10_USCALED_PACK32:
+ case VK_FORMAT_A2B10G10R10_SSCALED_PACK32:
+ case VK_FORMAT_A2B10G10R10_UINT_PACK32:
+ case VK_FORMAT_A2B10G10R10_SINT_PACK32:
+ {
+ formatSize.packed = true;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 4 * 8;
+ formatSize.blockWidth = 1;
+ formatSize.blockHeight = 1;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_R16_UNORM:
+ case VK_FORMAT_R16_SNORM:
+ case VK_FORMAT_R16_USCALED:
+ case VK_FORMAT_R16_SSCALED:
+ case VK_FORMAT_R16_UINT:
+ case VK_FORMAT_R16_SINT:
+ case VK_FORMAT_R16_SFLOAT:
+ {
+ formatSize.packed = false;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 2 * 8;
+ formatSize.blockWidth = 1;
+ formatSize.blockHeight = 1;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_R16G16_UNORM:
+ case VK_FORMAT_R16G16_SNORM:
+ case VK_FORMAT_R16G16_USCALED:
+ case VK_FORMAT_R16G16_SSCALED:
+ case VK_FORMAT_R16G16_UINT:
+ case VK_FORMAT_R16G16_SINT:
+ case VK_FORMAT_R16G16_SFLOAT:
+ {
+ formatSize.packed = false;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 4 * 8;
+ formatSize.blockWidth = 1;
+ formatSize.blockHeight = 1;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_R16G16B16_UNORM:
+ case VK_FORMAT_R16G16B16_SNORM:
+ case VK_FORMAT_R16G16B16_USCALED:
+ case VK_FORMAT_R16G16B16_SSCALED:
+ case VK_FORMAT_R16G16B16_UINT:
+ case VK_FORMAT_R16G16B16_SINT:
+ case VK_FORMAT_R16G16B16_SFLOAT:
+ {
+ formatSize.packed = false;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 6 * 8;
+ formatSize.blockWidth = 1;
+ formatSize.blockHeight = 1;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_R16G16B16A16_UNORM:
+ case VK_FORMAT_R16G16B16A16_SNORM:
+ case VK_FORMAT_R16G16B16A16_USCALED:
+ case VK_FORMAT_R16G16B16A16_SSCALED:
+ case VK_FORMAT_R16G16B16A16_UINT:
+ case VK_FORMAT_R16G16B16A16_SINT:
+ case VK_FORMAT_R16G16B16A16_SFLOAT:
+ {
+ formatSize.packed = false;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 8 * 8;
+ formatSize.blockWidth = 1;
+ formatSize.blockHeight = 1;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_R32_UINT:
+ case VK_FORMAT_R32_SINT:
+ case VK_FORMAT_R32_SFLOAT:
+ {
+ formatSize.packed = false;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 4 * 8;
+ formatSize.blockWidth = 1;
+ formatSize.blockHeight = 1;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_R32G32_UINT:
+ case VK_FORMAT_R32G32_SINT:
+ case VK_FORMAT_R32G32_SFLOAT:
+ {
+ formatSize.packed = false;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 8 * 8;
+ formatSize.blockWidth = 1;
+ formatSize.blockHeight = 1;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_R32G32B32_UINT:
+ case VK_FORMAT_R32G32B32_SINT:
+ case VK_FORMAT_R32G32B32_SFLOAT:
+ {
+ formatSize.packed = false;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 12 * 8;
+ formatSize.blockWidth = 1;
+ formatSize.blockHeight = 1;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_R32G32B32A32_UINT:
+ case VK_FORMAT_R32G32B32A32_SINT:
+ case VK_FORMAT_R32G32B32A32_SFLOAT:
+ {
+ formatSize.packed = false;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 16 * 8;
+ formatSize.blockWidth = 1;
+ formatSize.blockHeight = 1;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_R64_UINT:
+ case VK_FORMAT_R64_SINT:
+ case VK_FORMAT_R64_SFLOAT:
+ {
+ formatSize.packed = false;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 8 * 8;
+ formatSize.blockWidth = 1;
+ formatSize.blockHeight = 1;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_R64G64_UINT:
+ case VK_FORMAT_R64G64_SINT:
+ case VK_FORMAT_R64G64_SFLOAT:
+ {
+ formatSize.packed = false;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 16 * 8;
+ formatSize.blockWidth = 1;
+ formatSize.blockHeight = 1;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_R64G64B64_UINT:
+ case VK_FORMAT_R64G64B64_SINT:
+ case VK_FORMAT_R64G64B64_SFLOAT:
+ {
+ formatSize.packed = false;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 24 * 8;
+ formatSize.blockWidth = 1;
+ formatSize.blockHeight = 1;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_R64G64B64A64_UINT:
+ case VK_FORMAT_R64G64B64A64_SINT:
+ case VK_FORMAT_R64G64B64A64_SFLOAT:
+ {
+ formatSize.packed = false;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 32 * 8;
+ formatSize.blockWidth = 1;
+ formatSize.blockHeight = 1;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_B10G11R11_UFLOAT_PACK32:
+ case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32:
+ {
+ formatSize.packed = true;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 4 * 8;
+ formatSize.blockWidth = 1;
+ formatSize.blockHeight = 1;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_D16_UNORM:
+ {
+ formatSize.packed = true;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 2 * 8;
+ formatSize.blockWidth = 1;
+ formatSize.blockHeight = 1;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_X8_D24_UNORM_PACK32:
+ {
+ formatSize.packed = true;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 4 * 8;
+ formatSize.blockWidth = 1;
+ formatSize.blockHeight = 1;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_D32_SFLOAT:
+ {
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 4 * 8;
+ formatSize.blockWidth = 1;
+ formatSize.blockHeight = 1;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_S8_UINT:
+ {
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 1 * 8;
+ formatSize.blockWidth = 1;
+ formatSize.blockHeight = 1;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_D16_UNORM_S8_UINT:
+ {
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 3 * 8;
+ formatSize.blockWidth = 1;
+ formatSize.blockHeight = 1;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_D24_UNORM_S8_UINT:
+ {
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 4 * 8;
+ formatSize.blockWidth = 1;
+ formatSize.blockHeight = 1;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_D32_SFLOAT_S8_UINT:
+ {
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 8 * 8;
+ formatSize.blockWidth = 1;
+ formatSize.blockHeight = 1;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
+ case VK_FORMAT_BC1_RGB_SRGB_BLOCK:
+ case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
+ case VK_FORMAT_BC1_RGBA_SRGB_BLOCK:
+ {
+ formatSize.compressed = true;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 8 * 8;
+ formatSize.blockWidth = 4;
+ formatSize.blockHeight = 4;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_BC2_UNORM_BLOCK:
+ case VK_FORMAT_BC2_SRGB_BLOCK:
+ case VK_FORMAT_BC3_UNORM_BLOCK:
+ case VK_FORMAT_BC3_SRGB_BLOCK:
+ case VK_FORMAT_BC4_UNORM_BLOCK:
+ case VK_FORMAT_BC4_SNORM_BLOCK:
+ case VK_FORMAT_BC5_UNORM_BLOCK:
+ case VK_FORMAT_BC5_SNORM_BLOCK:
+ case VK_FORMAT_BC6H_UFLOAT_BLOCK:
+ case VK_FORMAT_BC6H_SFLOAT_BLOCK:
+ case VK_FORMAT_BC7_UNORM_BLOCK:
+ case VK_FORMAT_BC7_SRGB_BLOCK:
+ {
+ formatSize.compressed = true;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 16 * 8;
+ formatSize.blockWidth = 4;
+ formatSize.blockHeight = 4;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
+ {
+ formatSize.compressed = true;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 8 * 8;
+ formatSize.blockWidth = 4;
+ formatSize.blockHeight = 4;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
+ case VK_FORMAT_EAC_R11_UNORM_BLOCK:
+ case VK_FORMAT_EAC_R11_SNORM_BLOCK:
+ case VK_FORMAT_EAC_R11G11_UNORM_BLOCK:
+ case VK_FORMAT_EAC_R11G11_SNORM_BLOCK:
+ {
+ formatSize.compressed = true;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 16 * 8;
+ formatSize.blockWidth = 4;
+ formatSize.blockHeight = 4;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
+ {
+ formatSize.compressed = true;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 16 * 8;
+ formatSize.blockWidth = 4;
+ formatSize.blockHeight = 4;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_ASTC_5x4_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
+ {
+ formatSize.compressed = true;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 16 * 8;
+ formatSize.blockWidth = 5;
+ formatSize.blockHeight = 4;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_ASTC_5x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
+ {
+ formatSize.compressed = true;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 16 * 8;
+ formatSize.blockWidth = 5;
+ formatSize.blockHeight = 5;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_ASTC_6x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
+ {
+ formatSize.compressed = true;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 16 * 8;
+ formatSize.blockWidth = 6;
+ formatSize.blockHeight = 5;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
+ {
+ formatSize.compressed = true;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 16 * 8;
+ formatSize.blockWidth = 6;
+ formatSize.blockHeight = 6;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_ASTC_8x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
+ {
+ formatSize.compressed = true;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 16 * 8;
+ formatSize.blockWidth = 8;
+ formatSize.blockHeight = 5;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_ASTC_8x6_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
+ {
+ formatSize.compressed = true;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 16 * 8;
+ formatSize.blockWidth = 8;
+ formatSize.blockHeight = 6;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
+ {
+ formatSize.compressed = true;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 16 * 8;
+ formatSize.blockWidth = 8;
+ formatSize.blockHeight = 8;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_ASTC_10x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
+ {
+ formatSize.compressed = true;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 16 * 8;
+ formatSize.blockWidth = 10;
+ formatSize.blockHeight = 5;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_ASTC_10x6_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
+ {
+ formatSize.compressed = true;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 16 * 8;
+ formatSize.blockWidth = 10;
+ formatSize.blockHeight = 6;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_ASTC_10x8_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
+ {
+ formatSize.compressed = true;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 16 * 8;
+ formatSize.blockWidth = 10;
+ formatSize.blockHeight = 8;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_ASTC_10x10_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
+ {
+ formatSize.compressed = true;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 16 * 8;
+ formatSize.blockWidth = 10;
+ formatSize.blockHeight = 10;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_ASTC_12x10_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
+ {
+ formatSize.compressed = true;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 16 * 8;
+ formatSize.blockWidth = 12;
+ formatSize.blockHeight = 10;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:
+ {
+ formatSize.compressed = true;
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 16 * 8;
+ formatSize.blockWidth = 12;
+ formatSize.blockHeight = 12;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ default:
+ {
+ formatSize.paletteSizeInBits = 0;
+ formatSize.blockSizeInBits = 0 * 8;
+ formatSize.blockWidth = 1;
+ formatSize.blockHeight = 1;
+ formatSize.blockDepth = 1;
+ break;
+ }
+ }
+
+ return formatSize;
+}
+
+} // Namespace Vulkan
+
+} // Namespace Graphics
+
+} // namespace Dali
+
+#endif // DALI_GRAPHICS_VULKAN_UTILS
#if defined(DEBUG_ENABLED)
Debug::Filter* gVulkanFilter = Debug::Filter::New(Debug::Concise, false, "LOG_VULKAN");
#endif
+namespace
+{
+const uint32_t INVALID_MEMORY_INDEX = -1u;
+} // Anonymous namespace
namespace Dali::Graphics::Vulkan
{
// mDiscardQueue[mCurrentBufferIndex].push_back( std::move( deleter ) );
}
-Fence* Device::CreateFence(const vk::FenceCreateInfo& fenceCreateInfo)
-{
- vk::Fence vkFence;
- VkAssert(mLogicalDevice.createFence(&fenceCreateInfo, mAllocator.get(), &vkFence));
-
- return new Fence(*this, vkFence);
-}
-
Image* Device::CreateImageFromExternal(vk::Image externalImage, vk::Format imageFormat, vk::Extent2D extent)
{
auto imageCreateInfo = vk::ImageCreateInfo{}
return nullptr;
}
-ImageView* Device::CreateImageView(const vk::ImageViewCreateFlags& flags,
- const Image& image,
- vk::ImageViewType viewType,
- vk::Format format,
- vk::ComponentMapping components,
- vk::ImageSubresourceRange subresourceRange,
- void* pNext)
-{
- auto imageViewCreateInfo = vk::ImageViewCreateInfo{}
- .setPNext(pNext)
- .setFlags(flags)
- .setImage(image.GetVkHandle())
- .setViewType(viewType)
- .setFormat(format)
- .setComponents(components)
- .setSubresourceRange(std::move(subresourceRange));
-
- auto imageView = new ImageView(*this, &image, imageViewCreateInfo);
-
- VkAssert(mLogicalDevice.createImageView(&imageViewCreateInfo, &GetAllocator("IMAGEVIEW"), &imageView->mImageView));
-
- return imageView;
-}
-
-ImageView* Device::CreateImageView(Image* image)
-{
- vk::ComponentMapping componentsMapping = {vk::ComponentSwizzle::eR,
- vk::ComponentSwizzle::eG,
- vk::ComponentSwizzle::eB,
- vk::ComponentSwizzle::eA};
-
- auto subresourceRange = vk::ImageSubresourceRange{}
- .setAspectMask(image->GetAspectFlags())
- .setBaseArrayLayer(0)
- .setBaseMipLevel(0)
- .setLevelCount(image->GetMipLevelCount())
- .setLayerCount(image->GetLayerCount());
-
- auto imageView = CreateImageView({},
- *image,
- vk::ImageViewType::e2D,
- image->GetFormat(),
- componentsMapping,
- subresourceRange);
-
- return imageView;
-}
-
-vk::Result Device::WaitForFence(Fence* fence, uint32_t timeout)
-{
- auto f = fence->GetVkHandle();
- return mLogicalDevice.waitForFences(1, &f, VK_TRUE, timeout);
-}
-
// -------------------------------------------------------------------------------------------------------
// Getters------------------------------------------------------------------------------------------------
SurfaceImpl* Device::GetSurface(Graphics::SurfaceId surfaceId)
return extensions;
}
-vk::Result Device::Submit(Queue& queue, const std::vector<SubmissionData>& submissionData, Fence* fence)
+vk::Result Device::Submit(Queue& queue, const std::vector<SubmissionData>& submissionData, FenceImpl* fence)
{
auto lock(queue.Lock());
std::transform(subData.commandBuffers.cbegin(),
subData.commandBuffers.cend(),
std::back_inserter(commandBufferHandles),
- [&](CommandBufferImpl* entry) {
+ [&](CommandBufferImpl* entry)
+ {
return entry->GetVkHandle();
});
return VkAssert(queue.Submit(submitInfos, fence));
}
+/**
+ * Helper function which returns GPU heap index that can be used to allocate
+ * particular type of resource
+ */
+uint32_t Device::GetMemoryIndex(
+ const vk::PhysicalDeviceMemoryProperties& memoryProperties,
+ uint32_t memoryTypeBits,
+ vk::MemoryPropertyFlags properties)
+{
+ for(uint32_t i = 0; i < memoryProperties.memoryTypeCount; ++i)
+ {
+ if((memoryTypeBits & (1u << i)) &&
+ ((memoryProperties.memoryTypes[i].propertyFlags & properties) == properties))
+ {
+ return i;
+ }
+ }
+ return INVALID_MEMORY_INDEX;
+}
+
} // namespace Dali::Graphics::Vulkan
namespace Dali::Graphics::Vulkan
{
class RenderPassImpl;
-
+class CommandPool;
using CommandPoolMap = std::unordered_map<std::thread::id, CommandPool*>;
struct SwapchainSurfacePair
Swapchain* CreateSwapchain(SurfaceImpl* surface, vk::Format requestedFormat, vk::PresentModeKHR presentMode, uint32_t bufferCount, Swapchain*&& oldSwapchain);
- vk::Result Submit(Queue& queue, const std::vector<SubmissionData>& submissionData, Fence* fence = nullptr);
+ vk::Result Submit(Queue& queue, const std::vector<SubmissionData>& submissionData, FenceImpl* fence = nullptr);
vk::Result Present(Queue& queue, vk::PresentInfoKHR presentInfo);
vk::Result QueueWaitIdle(Queue& queue);
vk::Result DeviceWaitIdle();
void DiscardResource(std::function<void()> deleter);
- Fence* CreateFence(const vk::FenceCreateInfo& fenceCreateInfo);
-
FramebufferImpl* CreateFramebuffer(const std::vector<FramebufferAttachment*>& colorAttachments,
FramebufferAttachment* depthAttachment,
uint32_t width,
Image* CreateImageFromExternal(vk::Image externalImage, vk::Format imageFormat, vk::Extent2D extent);
- ImageView* CreateImageView(const vk::ImageViewCreateFlags& flags,
- const Image& image,
- vk::ImageViewType viewType,
- vk::Format format,
- vk::ComponentMapping components,
- vk::ImageSubresourceRange subresourceRange,
- void* pNext = nullptr);
-
- ImageView* CreateImageView(Image* image);
-
- vk::Result WaitForFence(Fence* fence, uint32_t timeout = std::numeric_limits<uint32_t>::max());
-
uint32_t GetCurrentBufferIndex() const;
uint32_t SwapBuffers();
return mPhysicalDeviceMemoryProperties;
}
+ static uint32_t GetMemoryIndex(
+ const vk::PhysicalDeviceMemoryProperties& memoryProperties,
+ uint32_t memoryTypeBits,
+ vk::MemoryPropertyFlags properties);
+
private: // Methods
void CreateInstance(const std::vector<const char*>& extensions,
const std::vector<const char*>& validationLayers);
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wfloat-equal"
#pragma GCC diagnostic ignored "-Wswitch-enum"
+#pragma GCC diagnostic ignored "-Wswitch"
#include <vulkan/vulkan.hpp>
#pragma GCC diagnostic pop