Fix unaligned host accesses in UBO and SSBO tests
authorGary Sweet <gsweet@broadcom.com>
Mon, 22 Oct 2018 11:39:09 +0000 (12:39 +0100)
committerAlexander Galazin <Alexander.Galazin@arm.com>
Thu, 25 Oct 2018 10:14:17 +0000 (06:14 -0400)
Prevent uniform blocks from starting unaligned.
Previously these tests would end up casting
non-4-byte aligned addresses to float*. On 32-bit
ARM CPUs this can cause unaligned access faults.

Components: Vulkan

Affects:
dEQP-VK.ubo.*
dEQP-VK.ssbo.*

VK-GL-CTS issue: 1434

Change-Id: Ied437b150b61f11630b5c1511ccedd410308b004

external/vulkancts/modules/vulkan/ssbo/vktSSBOLayoutCase.cpp
external/vulkancts/modules/vulkan/ubo/vktUniformBlockCase.cpp

index 42ca654..17a8228 100644 (file)
@@ -1966,11 +1966,15 @@ void initRefDataStorage (const ShaderInterface& interface, const BufferLayout& l
 {
        DE_ASSERT(storage.data.empty() && storage.pointers.empty());
 
-       const vector<int>       bufferSizes = computeBufferSizes(interface, layout);
-       int                                     totalSize       = 0;
+       const vector<int>       bufferSizes             = computeBufferSizes(interface, layout);
+       int                                     totalSize               = 0;
+       const int                       vec4Alignment   = (int)sizeof(deUint32)*4;
 
        for (vector<int>::const_iterator sizeIter = bufferSizes.begin(); sizeIter != bufferSizes.end(); ++sizeIter)
-               totalSize += *sizeIter;
+       {
+               // Include enough space for alignment of individual blocks
+               totalSize += deRoundUp32(*sizeIter, vec4Alignment);
+       }
 
        storage.data.resize(totalSize);
 
@@ -1991,7 +1995,8 @@ void initRefDataStorage (const ShaderInterface& interface, const BufferLayout& l
 
                        storage.pointers[blockNdx] = getBlockDataPtr(layout, blockLayout, basePtr + curOffset, bufferSize);
 
-                       curOffset += bufferSize;
+                       // Ensure each new block starts fully aligned to avoid unaligned host accesses
+                       curOffset += deRoundUp32(bufferSize, vec4Alignment);
                }
        }
 }
index f0dda16..9fee748 100644 (file)
@@ -2176,6 +2176,8 @@ TestInstance* UniformBlockCase::createInstance (Context& context) const
 
 void UniformBlockCase::init (void)
 {
+       const int vec4Alignment = (int)sizeof(deUint32)*4;
+
        // Compute reference layout.
        computeStd140Layout(m_uniformLayout, m_interface);
 
@@ -2183,7 +2185,10 @@ void UniformBlockCase::init (void)
        {
                int totalSize = 0;
                for (std::vector<BlockLayoutEntry>::const_iterator blockIter = m_uniformLayout.blocks.begin(); blockIter != m_uniformLayout.blocks.end(); blockIter++)
-                       totalSize += blockIter->size;
+               {
+                       // Include enough space for alignment of individual blocks
+                       totalSize += deRoundUp32(blockIter->size, vec4Alignment);
+               }
                m_data.resize(totalSize);
 
                // Pointers for each block.
@@ -2191,7 +2196,9 @@ void UniformBlockCase::init (void)
                for (int blockNdx = 0; blockNdx < (int)m_uniformLayout.blocks.size(); blockNdx++)
                {
                        m_blockPointers[blockNdx] = &m_data[0] + curOffset;
-                       curOffset += m_uniformLayout.blocks[blockNdx].size;
+
+                       // Ensure each new block starts fully aligned to avoid unaligned host accesses
+                       curOffset += deRoundUp32(m_uniformLayout.blocks[blockNdx].size, vec4Alignment);
                }
        }