Drop SetInputTensorParam/SetOutputTensorParam callbacks
authorInki Dae <inki.dae@samsung.com>
Wed, 5 Feb 2020 09:31:21 +0000 (18:31 +0900)
committerInki Dae <inki.dae@samsung.com>
Wed, 5 Feb 2020 09:31:21 +0000 (18:31 +0900)
Change-Id: I1ee785034aa06b19fdba260c87f9081a7d312952
Signed-off-by: Inki Dae <inki.dae@samsung.com>
src/inference_engine_armnn.cpp
src/inference_engine_armnn_private.h

index eb594c59371193c41243d197688a82f3c8808e6d..956c1e071f184b86e39e916380d643ec44cf34fd 100644 (file)
@@ -102,13 +102,6 @@ void InferenceARMNN::ReleaseTensorBuffer(armnn::DataType type, void *tensor_buff
     };
 }
 
-int InferenceARMNN::SetInputTensorParam()
-{
-    LOGI("ENTER");
-    LOGI("LEAVE");
-    return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED;
-}
-
 int InferenceARMNN::SetInputTensorParamNode(std::string node)
 {
     LOGI("ENTER");
@@ -120,14 +113,6 @@ int InferenceARMNN::SetInputTensorParamNode(std::string node)
     return INFERENCE_ENGINE_ERROR_NONE;
 }
 
-int InferenceARMNN::SetOutputTensorParam()
-{
-    LOGI("ENTER");
-    LOGI("LEAVE");
-
-    return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED;
-}
-
 int InferenceARMNN::SetOutputTensorParamNodes(std::vector<std::string> nodes)
 {
     LOGI("ENTER");
index 4c3ab14561cd699616275ace4f4bea3069b89957..e0286ed8025d24bd66df6d48558318aa3cdecc67 100644 (file)
@@ -48,13 +48,9 @@ public:
     ~InferenceARMNN();
 
     // InputTensor
-    int SetInputTensorParam() override;
-
     int SetInputTensorParamNode(std::string node = "input") override;
 
     // Output Tensor Params
-    int SetOutputTensorParam() override;
-
     int SetOutputTensorParamNodes(std::vector<std::string> nodes) override;
 
     int SetTargetDevice(inference_target_type_e type) override;