From: Inki Dae Date: Tue, 31 Mar 2020 08:14:50 +0000 (+0900) Subject: Revert "mv_inference: Get layer propery only in required case" X-Git-Tag: submit/tizen/20200423.063253~11 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=2d574330bcad788c019a76ba514fc86d8bc5d22f;p=platform%2Fcore%2Fapi%2Fmediavision.git Revert "mv_inference: Get layer propery only in required case" This reverts commit 756a6d9c297b7c756a1090ded0287795b5f12665. --- diff --git a/mv_inference/inference/src/Inference.cpp b/mv_inference/inference/src/Inference.cpp index a650964f..057da62a 100755 --- a/mv_inference/inference/src/Inference.cpp +++ b/mv_inference/inference/src/Inference.cpp @@ -559,16 +559,15 @@ int Inference::PrepareTenosrBuffers(void) return ConvertEngineErrorToVisionError(ret); } + ret = mBackend->GetInputLayerProperty(mInputLayerProperty); + if (ret != INFERENCE_ENGINE_ERROR_NONE) { + LOGE("Fail to get input layer property from backend engine."); + return ConvertEngineErrorToVisionError(ret); + } + // If the backend engine isn't able to allocate input tensor buffers internally, // then allocate the buffers at here. if (mInputTensorBuffers.empty()) { - // Get input tensor information from backend to allocate input tensor buffers. - ret = mBackend->GetInputLayerProperty(mInputLayerProperty); - if (ret != INFERENCE_ENGINE_ERROR_NONE) { - LOGE("Fail to get input layer property from backend engine."); - return ConvertEngineErrorToVisionError(ret); - } - for (int i = 0; i < mInputLayerProperty.tensor_infos.size(); ++i) { inference_engine_tensor_info tensor_info = mInputLayerProperty.tensor_infos[i]; inference_engine_tensor_buffer tensor_buffer; @@ -604,16 +603,15 @@ int Inference::PrepareTenosrBuffers(void) return ConvertEngineErrorToVisionError(ret); } + ret = mBackend->GetOutputLayerProperty(mOutputLayerProperty); + if (ret != INFERENCE_ENGINE_ERROR_NONE) { + LOGE("Fail to get output layer property from backend engine."); + return ConvertEngineErrorToVisionError(ret); + } + // If the backend engine isn't able to allocate output tensor buffers internally, // then allocate the buffers at here. if (mOutputTensorBuffers.empty()) { - // Get output tensor information from backend to allocate output tensor buffers. - ret = mBackend->GetOutputLayerProperty(mOutputLayerProperty); - if (ret != INFERENCE_ENGINE_ERROR_NONE) { - LOGE("Fail to get output layer property from backend engine."); - return ConvertEngineErrorToVisionError(ret); - } - for (int i = 0; i < mOutputLayerProperty.tensor_infos.size(); ++i) { inference_engine_tensor_info tensor_info = mOutputLayerProperty.tensor_infos[i]; inference_engine_tensor_buffer tensor_buffer;