mv_inference: drop deprecated internal API 69/232869/1
authorInki Dae <inki.dae@samsung.com>
Fri, 8 May 2020 05:02:11 +0000 (14:02 +0900)
committerInki Dae <inki.dae@samsung.com>
Fri, 8 May 2020 05:02:11 +0000 (14:02 +0900)
This patch drops deprecated internal API below,
 - mv_inference_configure_input_node_name_open
 - mv_inference_configure_output_node_names_open
 - and relevant callback functions.

No users to use these API anymore.

Change-Id: Id77b44c8ce781ee3b8fed2e44c8c6311180de2db
Signed-off-by: Inki Dae <inki.dae@samsung.com>
mv_inference/inference/include/Inference.h
mv_inference/inference/include/mv_inference_open.h
mv_inference/inference/src/Inference.cpp
mv_inference/inference/src/mv_inference_open.cpp

index 16e84b005b58c3887951a09e1f15d63fd4e2fcc4..f96184499cb2cfe58a8b4c047296e4e3cc0ee731 100755 (executable)
@@ -198,22 +198,6 @@ public:
         */
        void ConfigureThreshold(const double threshold);
 
-       /**
-        * @brief   Configure the input node name
-        *
-        * @since_tizen 5.5
-        * @remarks deprecated Replayced by ConfigureInputInfo
-        */
-       void ConfigureInputNodeName(const std::string nodeName);
-
-       /**
-        * @brief   Configure the output node names
-        *
-        * @since_tizen 5.5
-        * @remarks deprecated Replaced by ConfigureOutputInfo
-        */
-       void ConfigureOutputNodeNames(const std::vector<std::string> nodeNames);
-
        /**
         * @brief   Bind a backend engine
         * @details Use this function to bind a backend engine for the inference.
index 7284c92a6e8de4fbdb6c936b926b46ceb20ab51c..5477b3fbc825856ff9539a4522caef986d771333 100755 (executable)
@@ -228,28 +228,6 @@ int mv_inference_configure_confidence_threshold_open(mv_inference_h infer, mv_en
  */
 int mv_inference_configure_post_process_info_open(mv_inference_h infer, mv_engine_config_h engine_config);
 
-/**
- * @brief Configure the input node name to the inference handle
- *
- * @since_tizen 5.5
- * @remarks deprecated Replaced by mv_inference_configure_input_info_open
- *
- * @param [in] infer         The handle to the inference
- * @param [in] engine_config The handle to the configuration of
- *                           engine.
- *
- * @return @c 0 on success, otherwise a negative error value
- * @retval #MEDIA_VISION_ERROR_NONE Successful
- * @retval #MEDIA_VISION_ERROR_INVALID_PARAMETER Invalid parameter
- *                                               in @a engine_config
- * @retval #MEDIA_VISION_ERROR_INVALID_PATH Invalid path of model data
- *                                          in @a engine_config
- * @retval #MEDIA_VISION_ERROR_INVALID_DATA Invalid model data
- * @retval #MEDIA_VISION_ERROR_OUT_OF_MEMORY Out of memory
- * @retval #MEDIA_VISION_ERROR_NOT_SUPPORTED Not supported
- */
-int mv_inference_configure_input_node_name_open(mv_inference_h infer, mv_engine_config_h engine_config);
-
 /**
  * @brief Configure the set of output node names to the inference handle
  *
index 8820bbd5d8e41d66f36f0be672fa79ccbaf0e783..6af0ac9d799867093473a9e7de0787d41855b223 100755 (executable)
@@ -484,32 +484,6 @@ void Inference::ConfigureThreshold(const double threshold)
                                                                         MV_INFERENCE_CONFIDENCE_THRESHOLD_MIN);
 }
 
-void Inference::ConfigureInputNodeName(const std::string nodeName)
-{
-       mConfig.mInputLayerNames.push_back(nodeName);
-
-       inference_engine_layer_property property;
-
-       property.layer_names.push_back(nodeName);
-       int ret = mBackend->SetInputLayerProperty(property);
-       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               LOGE("Fail to set input layer property");
-       }
-}
-
-void Inference::ConfigureOutputNodeNames(const std::vector<std::string> nodeNames)
-{
-       mConfig.mOutputLayerNames = nodeNames;
-
-       inference_engine_layer_property property;
-
-       property.layer_names = nodeNames;
-       int ret = mBackend->SetOutputLayerProperty(property);
-       if (ret != INFERENCE_ENGINE_ERROR_NONE) {
-               LOGE("Fail to set output layer property");
-       }
-}
-
 void Inference::CleanupTensorBuffers(void)
 {
        LOGI("ENTER");
index 17c49be72582e2dec32cf8e192293595d805325f..45679e327643cb6c7373b48176964d16db6c924c 100755 (executable)
@@ -513,78 +513,6 @@ _ERROR_:
        return ret;
 }
 
-int mv_inference_configure_input_node_name_open(mv_inference_h infer, mv_engine_config_h engine_config)
-{
-       LOGI("ENTER");
-
-    Inference *pInfer = static_cast<Inference *>(infer);
-
-    int ret = MEDIA_VISION_ERROR_NONE;
-
-       char *node_name = NULL;
-
-    ret = mv_engine_config_get_string_attribute(engine_config,
-                                                                                         MV_INFERENCE_INPUT_NODE_NAME,
-                                                                                         &node_name);
-       if (ret != MEDIA_VISION_ERROR_NONE) {
-               LOGE("Fail to get tensor width");
-               goto _ERROR_;
-       }
-
-       pInfer->ConfigureInputNodeName(std::string(node_name));
-
-_ERROR_:
-
-       if (node_name) {
-               free(node_name);
-               node_name = NULL;
-       }
-
-       LOGI("LEAVE");
-
-       return ret;
-}
-
-int mv_inference_configure_output_node_names_open(mv_inference_h infer, mv_engine_config_h engine_config)
-{
-       LOGI("ENTER");
-
-    Inference *pInfer = static_cast<Inference *>(infer);
-
-    int ret = MEDIA_VISION_ERROR_NONE;
-       int idx = 0;
-       char **node_names = NULL;
-       int size = 0;
-       std::vector<std::string> names;
-    ret = mv_engine_config_get_array_string_attribute(engine_config,
-                                                                                         MV_INFERENCE_OUTPUT_NODE_NAMES,
-                                                                                         &node_names,
-                                                                                         &size);
-       if (ret != MEDIA_VISION_ERROR_NONE) {
-               LOGE("Fail to get _output_node_names");
-               goto _ERROR_;
-       }
-
-       for (idx = 0 ; idx < size; ++idx)
-               names.push_back(std::string(node_names[idx]));
-
-       pInfer->ConfigureOutputNodeNames(names);
-
-_ERROR_:
-
-       if (node_names) {
-               for (idx = 0; idx < size; ++idx) {
-                       free(node_names[idx]);
-               }
-               free(node_names);
-               node_names = NULL;
-       }
-
-       LOGI("LEAVE");
-
-       return ret;
-}
-
 int mv_inference_configure_output_info_open(mv_inference_h infer, mv_engine_config_h engine_config)
 {
        LOGI("ENTER");