Consider one more inference target devices
authorInki Dae <inki.dae@samsung.com>
Fri, 7 Feb 2020 06:42:03 +0000 (15:42 +0900)
committerInki Dae <inki.dae@samsung.com>
Tue, 14 Apr 2020 00:42:53 +0000 (09:42 +0900)
Change-Id: I3183273117f73a36fe87d8dd6ca00c5d9720a02a
Signed-off-by: Inki Dae <inki.dae@samsung.com>
common/inference_engine_common_impl.cpp [changed mode: 0644->0755]
include/inference_engine_common.h [changed mode: 0644->0755]
include/inference_engine_common_impl.h [changed mode: 0644->0755]
include/inference_engine_type.h
include/inference_engine_vision_impl.h [changed mode: 0644->0755]
vision/inference_engine_vision_impl.cpp [changed mode: 0644->0755]

old mode 100644 (file)
new mode 100755 (executable)
index 4b98b5e..7286efb
@@ -153,9 +153,9 @@ int InferenceEngineCommon::SetOutputTensorParamNodes(std::vector<std::string> no
     return ret;
 }
 
-int InferenceEngineCommon::SetTargetDevice(inference_target_type_e type)
+int InferenceEngineCommon::SetTargetDevices(int types)
 {
-    int ret = engine->SetTargetDevice(type);
+    int ret = engine->SetTargetDevices(types);
     if (ret != INFERENCE_ENGINE_ERROR_NONE)
         LOGE("Fail to SetTargetDevice");
 
old mode 100644 (file)
new mode 100755 (executable)
index d39e57f..e446b4f
@@ -45,12 +45,12 @@ public:
     virtual int SetOutputTensorParamNodes(std::vector<std::string> nodes) = 0;
 
     /**
-     * @brief Set target device.
+     * @brief Set target devices.
      * @details See #inference_target_type_e
      *
      * @since_tizen 5.5
      */
-    virtual int SetTargetDevice(inference_target_type_e type) = 0;
+    virtual int SetTargetDevices(int types) = 0;
 
     /**
      * @brief Load model data with user-given model information.
old mode 100644 (file)
new mode 100755 (executable)
index c6910d8..cbd0fb0
@@ -60,7 +60,7 @@ public:
     void UnbindBackend(void);
 
     /**
-     * @brief Set an input node name. Deprecated.
+     * @brief Set input node name. Deprecated.
      *
      * @since_tizen 5.5
      */
@@ -74,12 +74,12 @@ public:
     int SetOutputTensorParamNodes(std::vector<std::string> nodes);
 
     /**
-     * @brief Set target device.
+     * @brief Set target devices.
      * @details See #inference_target_type_e
      *
      * @since_tizen 5.5
      */
-    int SetTargetDevice(inference_target_type_e type);
+    int SetTargetDevices(int types);
 
     /**
      * @brief Load model data with user-given model information.
index 2ea35386182c76dc8ff7bf18e527720f48f9d6f9..c2af1c70cd419ede422cc2d75be1a6657276c372 100644 (file)
@@ -110,13 +110,11 @@ typedef struct _tensor_t {
  * This structure should be configured before InferenceEngineCommon object is
  * created and then passed to InferenceEngineCommon's constructor.
  *
- * If target_devices member is defined by user then INFERENCE_TARGET_CPU will be set as a default device.
- *
  * @since_tizen 6.0
  */
 typedef struct _inference_engine_config {
     std::string backend_name; /**< a backend name which could be one among supported backends(tflite, opencv, armnn, dldt) */
-    inference_target_type_e target_devices; /**< which device or devices to be targeted for inference. */
+    int target_devices; /**< which device or devices to be targeted for inference. */
     // TODO.
 } inference_engine_config;
 
old mode 100644 (file)
new mode 100755 (executable)
index 8f52180..f231fe4
@@ -69,7 +69,7 @@ public:
     ~InferenceEngineVision();
 
     /**
-     * @brief Set an input node name. Deprecated.
+     * @brief Set input node name. Deprecated.
      *
      * @since_tizen 5.5
      */
@@ -139,12 +139,12 @@ public:
     int SetOutputTensorProperty(inference_engine_layer_property &property);
 
     /**
-     * @brief Set target device.
+     * @brief Set target devices.
      * @details See #inference_target_type_e
      *
      * @since_tizen 5.5
      */
-    int SetTargetDevice(inference_target_type_e type);
+    int SetTargetDevices(int types);
 
     /**
      * @brief Load model data with user-given model information.
old mode 100644 (file)
new mode 100755 (executable)
index b1ba62a..04b75fc
@@ -213,9 +213,9 @@ int InferenceEngineVision::SetOutputTensorProperty(inference_engine_layer_proper
     return ret;
 }
 
-int InferenceEngineVision::SetTargetDevice(inference_target_type_e type)
+int InferenceEngineVision::SetTargetDevices(int types)
 {
-    int ret = mCommonEngine->SetTargetDevice(type);
+    int ret = mCommonEngine->SetTargetDevices(types);
     if (ret != INFERENCE_ENGINE_ERROR_NONE)
         LOGE("Fail to SetTargetDevice");