layers/loader: Fix string validation error
authorMark Lobodzinski <mark@lunarg.com>
Fri, 12 Feb 2016 18:30:14 +0000 (11:30 -0700)
committerMark Lobodzinski <mark@lunarg.com>
Fri, 12 Feb 2016 18:32:35 +0000 (11:32 -0700)
layers/vk_layer_utils.cpp
loader/loader.c

index f849aa4..22fb52f 100644 (file)
@@ -616,7 +616,7 @@ VkStringErrorFlags vk_string_validate(const int max_length, const char *utf8)
     {
         if (utf8[i] == 0) {
             break;
-        } else if ((utf8[i] > 0x20) && (utf8[i] < 0x7f)) {
+        } else if ((utf8[i] >= 0x20) && (utf8[i] < 0x7f)) {
             num_char_bytes = 0;
         } else if ((utf8[i] & UTF8_ONE_BYTE_MASK)   == UTF8_ONE_BYTE_CODE) {
             num_char_bytes = 1;
index f4db822..060a05a 100644 (file)
@@ -4211,7 +4211,7 @@ VkStringErrorFlags vk_string_validate(const int max_length, const char *utf8)
     {
         if (utf8[i] == 0) {
             break;
-        } else if ((utf8[i] > 0x20) && (utf8[i] < 0x7f)) {
+        } else if ((utf8[i] >= 0x20) && (utf8[i] < 0x7f)) {
             num_char_bytes = 0;
         } else if ((utf8[i] & UTF8_ONE_BYTE_MASK)   == UTF8_ONE_BYTE_CODE) {
             num_char_bytes = 1;