Verify tensor buffer and property values
authorInki Dae <inki.dae@samsung.com>
Thu, 2 Apr 2020 06:59:15 +0000 (15:59 +0900)
committerInki Dae <inki.dae@samsung.com>
Tue, 14 Apr 2020 00:42:53 +0000 (09:42 +0900)
This patch makes sure to verify whether tensor buffer vector
and propery value contains correct data or not.

Change-Id: I01a05f5530e879cc320d1d3ef38d67e342e3ac87
Signed-off-by: Inki Dae <inki.dae@samsung.com>
include/inference_engine_common_impl.h
src/inference_engine_common_impl.cpp

index 2a43f06..ccd9cb3 100755 (executable)
@@ -246,6 +246,9 @@ public:
     int SetBackendEngine(inference_backend_type_e backend);
 
 private:
+       int CheckTensorBuffers(std::vector<inference_engine_tensor_buffer> &buffers);
+       int CheckLayerProperty(inference_engine_layer_property &property);
+
     std::string mBackendLibName;
     inference_backend_type_e mSelectedBackendEngine;
 
index 2b0d936..d29ba67 100755 (executable)
@@ -38,6 +38,7 @@ extern "C" {
 namespace fs = std::experimental::filesystem;
 namespace InferenceEngineInterface {
 namespace Common {
+
 InferenceEngineCommon::InferenceEngineCommon() :
     mSelectedBackendEngine(INFERENCE_BACKEND_NONE),
     mBackendModule(nullptr),
@@ -98,6 +99,62 @@ InferenceEngineCommon::~InferenceEngineCommon()
     LOGW("LEAVE");
 }
 
+int InferenceEngineCommon::CheckTensorBuffers(std::vector<inference_engine_tensor_buffer> &buffers)
+{
+       if (buffers.size() == 0) {
+               LOGE("tensor buffer vector is empty.");
+               return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
+       }
+
+       for (std::vector<inference_engine_tensor_buffer>::const_iterator iter = buffers.begin(); iter != buffers.end(); ++iter) {
+               inference_engine_tensor_buffer tensor_buffer = *iter;
+               if (tensor_buffer.buffer == nullptr || tensor_buffer.size == 0) {
+                       LOGE("tensor buffer pointer is null or tensor buffer size is 0.");
+                       return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
+               }
+
+               if (tensor_buffer.data_type < TENSOR_DATA_TYPE_FLOAT16 || tensor_buffer.data_type > TENSOR_DATA_TYPE_UINT32) {
+                       LOGE("tensor data type is invalid.");
+                       return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
+               }
+       }
+
+       return INFERENCE_ENGINE_ERROR_NONE;
+}
+
+int InferenceEngineCommon::CheckLayerProperty(inference_engine_layer_property &property)
+{
+       // Verity tensor info values.
+       std::vector<inference_engine_tensor_info>::const_iterator info_iter;
+       for (info_iter = property.tensor_infos.begin(); info_iter != property.tensor_infos.end(); ++info_iter) {
+               inference_engine_tensor_info tensor_info = *info_iter;
+               if (tensor_info.shape.size() == 0 || tensor_info.size == 0) {
+                       LOGE("shape size of tensor info or size of it is 0.");
+                       return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
+               }
+
+               if (tensor_info.data_type < TENSOR_DATA_TYPE_FLOAT16 || tensor_info.data_type > TENSOR_DATA_TYPE_UINT32) {
+                       LOGE("tensor data type is invalid.");
+                       return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
+               }
+
+               // TODO. we may need to check shape type also.
+       }
+
+       // Verity layer names.
+       std::vector<std::string>::const_iterator name_iter;
+       for (name_iter = property.layer_names.begin(); name_iter != property.layer_names.end(); ++name_iter) {
+               std::string name = *name_iter;
+
+               if (name.length() == 0) {
+                       LOGE("layer name is invalid.");
+                       return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
+               }
+       }
+
+       return INFERENCE_ENGINE_ERROR_NONE;
+}
+
 int InferenceEngineCommon::EnableProfiler(bool enable)
 {
        if (enable != true && enable != false) {
@@ -235,31 +292,93 @@ int InferenceEngineCommon::Load(std::vector<std::string> model_paths, inference_
 
 int InferenceEngineCommon::GetInputTensorBuffers(std::vector<inference_engine_tensor_buffer> &buffers)
 {
-    return mBackendHandle->GetInputTensorBuffers(buffers);
+    int ret = mBackendHandle->GetInputTensorBuffers(buffers);
+       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
+               LOGE("Failed to get input tensor buffers.");
+               return ret;
+       }
+
+       // If backend engine doesn't provide tensor buffers then just return.
+       // In this case, InferenceEngineCommon framework will allocate the tensor buffers.
+       if (buffers.size() == 0) {
+               return ret;
+       }
+
+       return CheckTensorBuffers(buffers);
 }
 
 int InferenceEngineCommon::GetOutputTensorBuffers(std::vector<inference_engine_tensor_buffer> &buffers)
 {
-    return mBackendHandle->GetOutputTensorBuffers(buffers);
+    int ret = mBackendHandle->GetOutputTensorBuffers(buffers);
+       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
+               LOGE("Failed to get output tensor buffers.");
+               return ret;
+       }
+
+       // If backend engine doesn't provide tensor buffers then just return.
+       // In this case, InferenceEngineCommon framework will allocate the tensor buffers.
+       if (buffers.size() == 0) {
+               return ret;
+       }
+
+       return CheckTensorBuffers(buffers);
 }
 
 int InferenceEngineCommon::GetInputLayerProperty(inference_engine_layer_property &property)
 {
-    return mBackendHandle->GetInputLayerProperty(property);
+    int ret = mBackendHandle->GetInputLayerProperty(property);
+       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
+               LOGE("Failed to get input layer property.");
+               return ret;
+       }
+
+       // If backend engine doesn't provide input layer property information then just return.
+       // In this case, user has to provide the information manually.
+       if (property.layer_names.size() == 0 && property.tensor_infos.size() == 0) {
+               LOGI("backend doesn't provide input layer property.");
+               return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
+       }
+
+       return CheckLayerProperty(property);
 }
 
 int InferenceEngineCommon::GetOutputLayerProperty(inference_engine_layer_property &property)
 {
-    return mBackendHandle->GetOutputLayerProperty(property);
+    int ret = mBackendHandle->GetOutputLayerProperty(property);
+       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
+               LOGE("Failed to get output layer property.");
+               return ret;
+       }
+
+       // If backend engine doesn't provide output layer property information then just return.
+       // In this case, user has to provide the information manually.
+       if (property.layer_names.size() == 0 && property.tensor_infos.size() == 0) {
+               LOGI("backend doesn't provide output layer property.");
+               return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
+       }
+
+       return CheckLayerProperty(property);
 }
 
 int InferenceEngineCommon::SetInputLayerProperty(inference_engine_layer_property &property)
 {
+       int ret = CheckLayerProperty(property);
+       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
+               LOGE("Given input layer property is invalid.");
+               return ret;
+       }
+
     return mBackendHandle->SetInputLayerProperty(property);
 }
 
 int InferenceEngineCommon::SetOutputLayerProperty(inference_engine_layer_property &property)
 {
+       int ret = CheckLayerProperty(property);
+       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
+               LOGE("Given output layer property is invalid.");
+               return ret;
+       }
+
     return mBackendHandle->SetOutputLayerProperty(property);
 }