Revert "mv_inference: Get layer propery only in required case"
authorInki Dae <inki.dae@samsung.com>
Tue, 31 Mar 2020 08:14:50 +0000 (17:14 +0900)
committerInki Dae <inki.dae@samsung.com>
Tue, 14 Apr 2020 00:42:19 +0000 (09:42 +0900)
This reverts commit 756a6d9c297b7c756a1090ded0287795b5f12665.

mv_inference/inference/src/Inference.cpp

index a650964faa5102cdae9b8e93c14b90aa0b95d68e..057da62ad097866eb490d6ffff2b0e667ef6477c 100755 (executable)
@@ -559,16 +559,15 @@ int Inference::PrepareTenosrBuffers(void)
                return ConvertEngineErrorToVisionError(ret);
        }
 
+       ret = mBackend->GetInputLayerProperty(mInputLayerProperty);
+       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
+               LOGE("Fail to get input layer property from backend engine.");
+               return ConvertEngineErrorToVisionError(ret);
+       }
+
        // If the backend engine isn't able to allocate input tensor buffers internally,
        // then allocate the buffers at here.
        if (mInputTensorBuffers.empty()) {
-               // Get input tensor information from backend to allocate input tensor buffers.
-               ret = mBackend->GetInputLayerProperty(mInputLayerProperty);
-               if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-                       LOGE("Fail to get input layer property from backend engine.");
-                       return ConvertEngineErrorToVisionError(ret);
-               }
-
                for (int i = 0; i < mInputLayerProperty.tensor_infos.size(); ++i) {
                        inference_engine_tensor_info tensor_info = mInputLayerProperty.tensor_infos[i];
                        inference_engine_tensor_buffer tensor_buffer;
@@ -604,16 +603,15 @@ int Inference::PrepareTenosrBuffers(void)
                return ConvertEngineErrorToVisionError(ret);
        }
 
+       ret = mBackend->GetOutputLayerProperty(mOutputLayerProperty);
+       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
+               LOGE("Fail to get output layer property from backend engine.");
+               return ConvertEngineErrorToVisionError(ret);
+       }
+
        // If the backend engine isn't able to allocate output tensor buffers internally,
        // then allocate the buffers at here.
        if (mOutputTensorBuffers.empty()) {
-               // Get output tensor information from backend to allocate output tensor buffers.
-               ret = mBackend->GetOutputLayerProperty(mOutputLayerProperty);
-               if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-                       LOGE("Fail to get output layer property from backend engine.");
-                       return ConvertEngineErrorToVisionError(ret);
-               }
-
                for (int i = 0; i < mOutputLayerProperty.tensor_infos.size(); ++i) {
                        inference_engine_tensor_info tensor_info = mOutputLayerProperty.tensor_infos[i];
                        inference_engine_tensor_buffer tensor_buffer;