From a642cc1ba028fc89a2e8f5c925ad2260866b28bb Mon Sep 17 00:00:00 2001 From: Gary Sweet Date: Mon, 22 Oct 2018 12:39:09 +0100 Subject: [PATCH] Fix unaligned host accesses in UBO and SSBO tests Prevent uniform blocks from starting unaligned. Previously these tests would end up casting non-4-byte aligned addresses to float*. On 32-bit ARM CPUs this can cause unaligned access faults. Components: Vulkan Affects: dEQP-VK.ubo.* dEQP-VK.ssbo.* VK-GL-CTS issue: 1434 Change-Id: Ied437b150b61f11630b5c1511ccedd410308b004 --- .../vulkancts/modules/vulkan/ssbo/vktSSBOLayoutCase.cpp | 13 +++++++++---- .../vulkancts/modules/vulkan/ubo/vktUniformBlockCase.cpp | 11 +++++++++-- 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/external/vulkancts/modules/vulkan/ssbo/vktSSBOLayoutCase.cpp b/external/vulkancts/modules/vulkan/ssbo/vktSSBOLayoutCase.cpp index 42ca654..17a8228 100644 --- a/external/vulkancts/modules/vulkan/ssbo/vktSSBOLayoutCase.cpp +++ b/external/vulkancts/modules/vulkan/ssbo/vktSSBOLayoutCase.cpp @@ -1966,11 +1966,15 @@ void initRefDataStorage (const ShaderInterface& interface, const BufferLayout& l { DE_ASSERT(storage.data.empty() && storage.pointers.empty()); - const vector bufferSizes = computeBufferSizes(interface, layout); - int totalSize = 0; + const vector bufferSizes = computeBufferSizes(interface, layout); + int totalSize = 0; + const int vec4Alignment = (int)sizeof(deUint32)*4; for (vector::const_iterator sizeIter = bufferSizes.begin(); sizeIter != bufferSizes.end(); ++sizeIter) - totalSize += *sizeIter; + { + // Include enough space for alignment of individual blocks + totalSize += deRoundUp32(*sizeIter, vec4Alignment); + } storage.data.resize(totalSize); @@ -1991,7 +1995,8 @@ void initRefDataStorage (const ShaderInterface& interface, const BufferLayout& l storage.pointers[blockNdx] = getBlockDataPtr(layout, blockLayout, basePtr + curOffset, bufferSize); - curOffset += bufferSize; + // Ensure each new block starts fully aligned to avoid unaligned host accesses + curOffset += deRoundUp32(bufferSize, vec4Alignment); } } } diff --git a/external/vulkancts/modules/vulkan/ubo/vktUniformBlockCase.cpp b/external/vulkancts/modules/vulkan/ubo/vktUniformBlockCase.cpp index f0dda16..9fee748 100644 --- a/external/vulkancts/modules/vulkan/ubo/vktUniformBlockCase.cpp +++ b/external/vulkancts/modules/vulkan/ubo/vktUniformBlockCase.cpp @@ -2176,6 +2176,8 @@ TestInstance* UniformBlockCase::createInstance (Context& context) const void UniformBlockCase::init (void) { + const int vec4Alignment = (int)sizeof(deUint32)*4; + // Compute reference layout. computeStd140Layout(m_uniformLayout, m_interface); @@ -2183,7 +2185,10 @@ void UniformBlockCase::init (void) { int totalSize = 0; for (std::vector::const_iterator blockIter = m_uniformLayout.blocks.begin(); blockIter != m_uniformLayout.blocks.end(); blockIter++) - totalSize += blockIter->size; + { + // Include enough space for alignment of individual blocks + totalSize += deRoundUp32(blockIter->size, vec4Alignment); + } m_data.resize(totalSize); // Pointers for each block. @@ -2191,7 +2196,9 @@ void UniformBlockCase::init (void) for (int blockNdx = 0; blockNdx < (int)m_uniformLayout.blocks.size(); blockNdx++) { m_blockPointers[blockNdx] = &m_data[0] + curOffset; - curOffset += m_uniformLayout.blocks[blockNdx].size; + + // Ensure each new block starts fully aligned to avoid unaligned host accesses + curOffset += deRoundUp32(m_uniformLayout.blocks[blockNdx].size, vec4Alignment); } } -- 2.7.4