Now, multiple output tensors are supported.
You can set the output tensors' names to get multiple output tensors
Note that a batch is not supported in current inference, so an input tensor should be one string
Change-Id: I01a3a8fa7f15ce329fbab187f6d72f583f116c2a
Signed-off-by: Tae-Young Chung <ty83.chung@samsung.com>
return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED;
}
-int InferenceEngineCommon::SetOutputTensorParamNode(std::string node)
+int InferenceEngineCommon::SetOutputTensorParamNodes(std::vector<std::string> nodes)
{
- int ret = engine->SetOutPutTensorParamNodes(node);
+ int ret = engine->SetOutputTensorParamNodes(nodes);
if (ret != INFERENCE_ENGINE_ERROR_NONE)
LOGE("Fail to SetOutputTensorParamNodes");
// OutputTensor
virtual int SetOutputTensorParam() = 0;
- virtual int SetOutPutTensorParamNodes(std::string node) = 0;
+ virtual int SetOutputTensorParamNodes(std::vector<std::string> nodes) = 0;
virtual int SetTargetDevice(inference_target_type_e type) = 0;
// OutputTensor
int SetOutputTensorParam();
- int SetOutputTensorParamNode(std::string node);
+ int SetOutputTensorParamNodes(std::vector<std::string> nodes);
int SetTargetDevice(inference_target_type_e type);
} /* Common */
} /* InferenceEngineInterface */
-#endif /* __INFERENCE_ENGINE_COMMON_IMPL_H__ */
\ No newline at end of file
+#endif /* __INFERENCE_ENGINE_COMMON_IMPL_H__ */
virtual int SetOutputTensorParamType(int type) = 0;
- virtual int SetOutPutTensorParamNodes(std::string node) = 0;
+ virtual int SetOutputTensorParamNodes(std::vector<std::string> nodes) = 0;
virtual int SetTargetDevice(inference_target_type_e type) = 0;
int SetOutputTensorParamType(int type);
- int SetOutPutTensorParamNodes(std::string node);
+ int SetOutputTensorParamNodes(std::vector<std::string> nodes);
// Set target device
int SetTargetDevice(inference_target_type_e device);
Name: inference-engine-interface
Summary: Interface of inference engines
Version: 0.0.1
-Release: 2
+Release: 3
Group: Multimedia/Framework
License: Apache-2.0
Source0: %{name}-%{version}.tar.gz
return ret;
}
+int InferenceEngineVision::SetOutputTensorParamNodes(std::vector<std::string> nodes)
+{
+ int ret = engine->SetOutputTensorParamNodes(nodes);
+ if (ret != INFERENCE_ENGINE_ERROR_NONE)
+ LOGE("Fail to SetOutputTensorParamNodes");
+
+ return ret;
+}
+
int InferenceEngineVision::SetOutputTensorParamThresHold(double threshold)
{
int ret = engine->SetOutputTensorParamThresHold(threshold);