Change the parameter of SetOutputTensorParamNodes() from std::string to std::vector... 57/210757/1 accepted/tizen/unified/20190823.092413 submit/tizen/20190823.012236
authorTae-Young Chung <ty83.chung@samsung.com>
Wed, 24 Jul 2019 07:55:31 +0000 (16:55 +0900)
committerTae-Young Chung <ty83.chung@samsung.com>
Wed, 24 Jul 2019 07:59:40 +0000 (16:59 +0900)
Now, multiple output tensors are supported.
You can set the output tensors' names to get multiple output tensors
Note that a batch is not supported in current inference, so an input tensor should be one string

Change-Id: I01a3a8fa7f15ce329fbab187f6d72f583f116c2a
Signed-off-by: Tae-Young Chung <ty83.chung@samsung.com>
common/inference_engine_common_impl.cpp
include/inference_engine_common.h
include/inference_engine_common_impl.h
include/inference_engine_vision.h
include/inference_engine_vision_impl.h
packaging/inference-engine-interface.spec
vision/inference_engine_vision_impl.cpp

index 758bd5a3414f1555bf6dcb13b1b1c51adc44e947..ccd43351cadc184b47d7066db883ea54b46b513e 100644 (file)
@@ -109,9 +109,9 @@ int InferenceEngineCommon::SetOutputTensorParam()
     return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED;
 }
 
-int InferenceEngineCommon::SetOutputTensorParamNode(std::string node)
+int InferenceEngineCommon::SetOutputTensorParamNodes(std::vector<std::string> nodes)
 {
-    int ret = engine->SetOutPutTensorParamNodes(node);
+    int ret = engine->SetOutputTensorParamNodes(nodes);
     if (ret != INFERENCE_ENGINE_ERROR_NONE)
         LOGE("Fail to SetOutputTensorParamNodes");
 
index 6dc02ff9b2987b27ce16af2901c86d0f4486930b..985e8d96f434cd8ffa3f61e4b679b9055dcd6c13 100644 (file)
@@ -39,7 +39,7 @@ public:
     // OutputTensor
     virtual int SetOutputTensorParam() = 0;
 
-    virtual int SetOutPutTensorParamNodes(std::string node) = 0;
+    virtual int SetOutputTensorParamNodes(std::vector<std::string> nodes) = 0;
 
     virtual int SetTargetDevice(inference_target_type_e type) = 0;
 
index f94596ef3a3dffe0ac47fb0082afe873445cfa6e..6b37b1555d179cad84c61c67dce697450871c5c9 100644 (file)
@@ -47,7 +47,7 @@ public:
     // OutputTensor
     int SetOutputTensorParam();
 
-    int SetOutputTensorParamNode(std::string node);
+    int SetOutputTensorParamNodes(std::vector<std::string> nodes);
 
     int SetTargetDevice(inference_target_type_e type);
 
@@ -74,4 +74,4 @@ private:
 } /* Common */
 } /* InferenceEngineInterface */
 
-#endif /* __INFERENCE_ENGINE_COMMON_IMPL_H__ */
\ No newline at end of file
+#endif /* __INFERENCE_ENGINE_COMMON_IMPL_H__ */
index ab6bd625b3e8860e2231e7cb4a128c824ce36796..6b143baaf44508eac4952c815a3ceb9299cef6fa 100644 (file)
@@ -50,7 +50,7 @@ public:
 
     virtual int SetOutputTensorParamType(int type) = 0;
 
-    virtual int SetOutPutTensorParamNodes(std::string node) = 0;
+    virtual int SetOutputTensorParamNodes(std::vector<std::string> nodes) = 0;
 
     virtual int SetTargetDevice(inference_target_type_e type) = 0;
 
index 0ba444fc6cf1053895d0624a08ae4b2b506b177b..12304f9b8427adb1738b6ad228eefd8e8a934b12 100644 (file)
@@ -53,7 +53,7 @@ public:
 
     int SetOutputTensorParamType(int type);
 
-    int SetOutPutTensorParamNodes(std::string node);
+    int SetOutputTensorParamNodes(std::vector<std::string> nodes);
 
     // Set target device
     int SetTargetDevice(inference_target_type_e device);
index 9a0da028ce5c144c364f35e767ad2ce2888323cc..d4ce9f31a43fe898c369d6afb93cc227189e5dfa 100644 (file)
@@ -1,7 +1,7 @@
 Name:        inference-engine-interface
 Summary:     Interface of inference engines
 Version:     0.0.1
-Release:     2
+Release:     3
 Group:       Multimedia/Framework
 License:     Apache-2.0
 Source0:     %{name}-%{version}.tar.gz
index 47c980ad76bf1025568a1ebb871aaedafef41f99..cb29c60f82725087306c4f2f8c5e53dbfa397c0e 100644 (file)
@@ -119,6 +119,15 @@ int InferenceEngineVision::SetInputTensorParamNode(std::string node)
     return ret;
 }
 
+int InferenceEngineVision::SetOutputTensorParamNodes(std::vector<std::string> nodes)
+{
+    int ret = engine->SetOutputTensorParamNodes(nodes);
+    if (ret != INFERENCE_ENGINE_ERROR_NONE)
+        LOGE("Fail to SetOutputTensorParamNodes");
+
+    return ret;
+}
+
 int InferenceEngineVision::SetOutputTensorParamThresHold(double threshold)
 {
     int ret = engine->SetOutputTensorParamThresHold(threshold);