radeonsi: save the enable_nir option in the shader cache correctly
authorMarek Olšák <marek.olsak@amd.com>
Fri, 12 Jul 2019 19:42:44 +0000 (15:42 -0400)
committerMarek Olšák <marek.olsak@amd.com>
Sat, 20 Jul 2019 00:16:35 +0000 (20:16 -0400)
Reviewed-by: Pierre-Eric Pelloux-Prayer <pierre-eric.pelloux-prayer@amd.com>
Acked-by: Samuel Pitoiset <samuel.pitoiset@gmail.com>
src/gallium/drivers/radeonsi/si_pipe.c

index 8bd9828..d13b008 100644 (file)
@@ -849,8 +849,12 @@ static void si_disk_cache_create(struct si_screen *sscreen)
                           DBG(SI_SCHED) |                      \
                           DBG(GISEL) |                         \
                           DBG(UNSAFE_MATH))
-       uint64_t shader_debug_flags = sscreen->debug_flags &
-               ALL_FLAGS;
+       uint64_t shader_debug_flags = sscreen->debug_flags & ALL_FLAGS;
+
+       if (sscreen->options.enable_nir) {
+               STATIC_ASSERT((ALL_FLAGS & (1u << 31)) == 0);
+               shader_debug_flags |= 1u << 31;
+       }
 
        /* Add the high bits of 32-bit addresses, which affects
         * how 32-bit addresses are expanded to 64 bits.
@@ -859,9 +863,6 @@ static void si_disk_cache_create(struct si_screen *sscreen)
        assert((int16_t)sscreen->info.address32_hi == (int32_t)sscreen->info.address32_hi);
        shader_debug_flags |= (uint64_t)(sscreen->info.address32_hi & 0xffff) << 32;
 
-       if (sscreen->options.enable_nir)
-               shader_debug_flags |= 1ull << 48;
-
        sscreen->disk_shader_cache =
                disk_cache_create(sscreen->info.name,
                                  cache_id,