GetInferenceResult function isn't used anymore becuase upper
framework(Inference class in case of Tizen) is already aware of
output tensor buffer so drop this.
Change-Id: I16a65f4cb1c63cafe3dedf252481210978bb1baf
Signed-off-by: Inki Dae <inki.dae@samsung.com>
return engine->Run(input_buffers, output_buffers);
}
-int InferenceEngineCommon::GetInferenceResult(tensor_t& results)
-{
- LOGE("ENTER");
- int ret = engine->GetInferenceResult(results);
-
- if (ret != INFERENCE_ENGINE_ERROR_NONE)
- LOGE("Fail to GetInferenceResult");
-
- LOGE("LEAVE");
- return ret;
-}
-
int InferenceEngineCommon::SetLibraryPath(std::string path)
{
LOGE("ENTER");
*/
virtual int Run(std::vector<inference_engine_tensor_buffer> &input_buffers,
std::vector<inference_engine_tensor_buffer> &output_buffers) = 0;
-
- /**
- * @brief Get inference results. Deprecated.
- *
- * @since_tizen 5.5
- */
- virtual int GetInferenceResult(tensor_t& results) = 0;
};
typedef void destroy_t(IInferenceEngineCommon*);
*
* @since_tizen 5.5
*/
- int GetInferenceResult(tensor_t& results);
int SetLibraryPath(std::string path);
int SetBackendEngine(inference_backend_type_e backend);