Drop GetInferenceResult function sandbox/inki.dae/working
authorInki Dae <inki.dae@samsung.com>
Mon, 24 Feb 2020 05:47:07 +0000 (14:47 +0900)
committerInki Dae <inki.dae@samsung.com>
Mon, 24 Feb 2020 05:47:07 +0000 (14:47 +0900)
Change-Id: Iaecaebe2eb1219a4903c06ff06dde3eb7e69e683
Signed-off-by: Inki Dae <inki.dae@samsung.com>
src/inference_engine_armnn.cpp
src/inference_engine_armnn_private.h

index 0c950e7e6d717219fcaaae0eb2f97849e750793f..a5c78024b35b4795d70da89ef45ae7aea9551134 100644 (file)
@@ -474,15 +474,6 @@ int InferenceARMNN::Run(std::vector<inference_engine_tensor_buffer> &input_buffe
     return INFERENCE_ENGINE_ERROR_NONE;
 }
 
-int InferenceARMNN::GetInferenceResult(tensor_t& results)
-{
-    LOGI("ENTER");
-
-    LOGI("LEAVE");
-
-    return INFERENCE_ENGINE_ERROR_NONE;
-}
-
 extern "C"
 {
 class IInferenceEngineCommon* EngineCommonInit(void)
index 03e7295f4c87a4d6e43cab8ea6a219a2ad75ac5d..1ee4d6997d2638a4f0abe8584d5dfcfaba1cc302 100644 (file)
@@ -68,8 +68,6 @@ public:
     int Run(std::vector<inference_engine_tensor_buffer> &input_buffers,
             std::vector<inference_engine_tensor_buffer> &output_buffers) override;
 
-    int GetInferenceResult(tensor_t& results) override;
-
 private:
     int CreateTfLiteNetwork(std::string model_path);
     int CreateNetwork(std::vector<std::string> model_paths, inference_model_format_e model_format);