IInferenceEngineCommon class should be parent of IInferenceEngineVision class.
So i implemented inheritance relationship between two classes.
Change-Id: I9a4e572dbba9d4e8fd118ec73305f46a912015fa
Signed-off-by: Hyunsoo Park <hance.park@samsung.com>
* limitations under the License.
*/
-#include <inference_engine_error.h>
-#include <inference_engine_common_impl.h>
+#include "inference_engine_error.h"
+#include "inference_engine_common_impl.h"
#include <fstream>
#include <iostream>
return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED;
}
-int InferenceEngineCommon::SetInputTensorParamNode(std::string node)
-{
- int ret = engine->SetInputTensorParamNode(node);
- if (ret != INFERENCE_ENGINE_ERROR_NONE)
- LOGE("Fail to SetInputTensorParamNode");
-
- return ret;
-}
-
int InferenceEngineCommon::SetOutputTensorParam()
{
return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED;
int InferenceEngineCommon::SetOutputTensorParamNodes(std::vector<std::string> nodes)
{
+ LOGI("ENTER");
int ret = engine->SetOutputTensorParamNodes(nodes);
if (ret != INFERENCE_ENGINE_ERROR_NONE)
LOGE("Fail to SetOutputTensorParamNodes");
-
+ LOGI("LEAVE");
return ret;
}
return ret;
}
-int InferenceEngineCommon::Load()
+int InferenceEngineCommon::Load(inference_input_type_e type)
{
int ret = engine->Load();
if (ret != INFERENCE_ENGINE_ERROR_NONE)
LOGE("Fail to load CreateInputLayerPassage");
- ret = engine->PrepareInputLayerPassage();
+ ret = engine->PrepareInputLayerPassage(type);
if (ret != INFERENCE_ENGINE_ERROR_NONE)
LOGE("Fail to load PrepareInputLayerPassage");
return ret;
}
+int InferenceEngineCommon::SetInputTensorParamNode(std::string node)
+{
+ LOGE("ENTER");
+ int ret = engine->SetInputTensorParamNode(node);
+ if (ret != INFERENCE_ENGINE_ERROR_NONE)
+ LOGE("Fail to SetInputTensorParamNode");
+ LOGE("LEAVE");
+ return ret;
+}
+
int InferenceEngineCommon::GetInferenceResult(std::vector<std::vector<int>>& dimInfo, std::vector<float*>& results)
{
+ LOGE("ENTER");
int ret = engine->GetInferenceResult(dimInfo, results);
if (ret != INFERENCE_ENGINE_ERROR_NONE)
LOGE("Fail to GetInferenceResult");
-
+ LOGE("LEAVE");
return ret;
}
} /* Common */
virtual int SetInputTensorParamNode(std::string node) = 0;
-
// OutputTensor
virtual int SetOutputTensorParam() = 0;
virtual int CreateInputLayerPassage() = 0;
- virtual int PrepareInputLayerPassage() = 0;
-
+ virtual int PrepareInputLayerPassage(inference_input_type_e type) = 0;
virtual int Run(std::vector<float> tensor) = 0;
#include <vector>
#include <string>
-#include <inference_engine_common.h>
-#include <inference_engine_type.h>
+#include "inference_engine_common.h"
+#include "inference_engine_type.h"
namespace InferenceEngineInterface {
namespace Common {
int SetInputTensorParamNode(std::string node);
-
// OutputTensor
int SetOutputTensorParam();
int SetTargetDevice(inference_target_type_e type);
// Load and Run
- int Load();
-
- int CreateInputLayerPassage();
-
- int PrepareInputLayerPassage();
-
+ int Load(inference_input_type_e type);
int Run(std::vector<float> tensor);
int GetInferenceResult(std::vector<std::vector<int>>& dimInfo, std::vector<float*>& results);
private:
- void *handle;
- IInferenceEngineCommon *engine;
std::string mBackendLibName;
std::vector<std::string> mUserListName;
+protected:
+ void *handle;
+ IInferenceEngineCommon *engine;
};
#include <string>
#include "inference_engine_type.h"
+#include "inference_engine_common.h"
#include <opencv2/core.hpp>
namespace InferenceEngineInterface {
namespace Vision {
-class IInferenceEngineVision {
+class IInferenceEngineVision : public virtual Common::IInferenceEngineCommon {
public:
-
+ using Common::IInferenceEngineCommon::GetInferenceResult;
virtual ~IInferenceEngineVision() {};
// InputTensor
- virtual int SetInputTensorParam() = 0;
virtual int SetInputTensorParamInput(int width, int height, int dim, int ch) = 0;
virtual int SetInputTensorParamNorm(double deviation, double mean) = 0;
- virtual int SetInputTensorParamNode(std::string node) = 0;
-
-
// OutputTensor
- virtual int SetOutputTensorParam() = 0;
virtual int SetOutputTensorParamThresHold(double threshold) = 0;
virtual int SetOutputTensorParamType(int type) = 0;
- virtual int SetOutputTensorParamNodes(std::vector<std::string> nodes) = 0;
-
- virtual int SetTargetDevice(inference_target_type_e type) = 0;
-
// Load and Run
- virtual int Load() = 0;
-
- virtual int CreateInputLayerPassage() = 0;
-
- virtual int PrepareInputLayerPassage(inference_input_type_e type) = 0;
virtual int Run(cv::Mat tensor) = 0;
- virtual int Run(std::vector<float> tensor) = 0;
-
virtual int GetInferenceResult(ImageClassificationResults& results) = 0;
virtual int GetInferenceResult(ObjectDetectionResults& results) = 0;
virtual int GetInferenceResult(FacialLandMarkDetectionResults& results) = 0;
- virtual int GetInferenceResult(std::vector<std::vector<int>>& dimInfo, std::vector<float*>& results) = 0;
-
virtual int GetNumberOfOutputs() = 0;
virtual void SetUserListName(std::string userlist) = 0;
#include <vector>
#include <string>
-
-#include <inference_engine_vision.h>
-#include <inference_engine_type.h>
+#include "inference_engine_common_impl.h"
+#include "inference_engine_vision.h"
+#include "inference_engine_type.h"
#include <opencv2/core.hpp>
namespace InferenceEngineInterface {
namespace Vision {
-class InferenceEngineVision {
+class InferenceEngineVision : public Common::InferenceEngineCommon {
public:
+ using Common::InferenceEngineCommon::GetInferenceResult;
InferenceEngineVision(std::string backend);
~InferenceEngineVision();
int SetInputTensorParamNorm(double deviation, double mean);
- int SetInputTensorParamNode(std::string node);
-
// Output Tensor parameters
int SetOutputTensorParamThresHold(double threshold);
int SetOutputTensorParamType(int type);
- int SetOutputTensorParamNodes(std::vector<std::string> nodes);
-
- // Set target device
- int SetTargetDevice(inference_target_type_e device);
-
- int Load();
-
int Run(cv::Mat tensor);
int GetInferenceResult(ImageClassificationResults& results);
int GetInferenceResult(FacialLandMarkDetectionResults& results);
- int GetInferenceResult(std::vector<std::vector<int>>& dimInfo, std::vector<float*>& results);
-
int GetNumberOfOutputs();
void SetUserListName(std::string userlist);
private:
- void *handle;
- IInferenceEngineVision *engine;
std::string mBackendLibName;
std::vector<std::string> mUserListName;
-
+protected:
+ void *handle;
+ IInferenceEngineVision *engine;
};
} /* Vision */
Name: inference-engine-interface
Summary: Interface of inference engines
Version: 0.0.1
-Release: 3
+Release: 4
Group: Multimedia/Framework
License: Apache-2.0
Source0: %{name}-%{version}.tar.gz
* limitations under the License.
*/
-#include <inference_engine_error.h>
-#include <inference_engine_vision_impl.h>
+#include "inference_engine_error.h"
+#include "inference_engine_vision_impl.h"
#include <fstream>
namespace InferenceEngineInterface {
namespace Vision {
-
-InferenceEngineVision::InferenceEngineVision(std::string backend)
+InferenceEngineVision::InferenceEngineVision(std::string backend) : Common::InferenceEngineCommon(backend)
{
LOGE("ENTER");
mBackendLibName = "libinference-engine-" + backend + ".so";
LOGW("ENTER");
char *error = NULL;
handle = dlopen(mBackendLibName.c_str(), RTLD_LAZY);
- LOGE("dlopen %s", mBackendLibName.c_str());
+ LOGI("HANDLE : [%p]", handle);
+
if (!handle) {
LOGE("Fail to dlopen %s", mBackendLibName.c_str());
LOGE("Error: %s\n", dlerror());
}
engine = EngineInit(configFile, weightFile, userFile);
+ LOGI("dlopen %s", mBackendLibName.c_str());
+
if (engine == NULL) {
LOGE("Fail to EngineInit");
dlclose(handle);
return INFERENCE_ENGINE_ERROR_INTERNAL;
}
+ Common::InferenceEngineCommon::handle = handle;
+
+ Common::InferenceEngineCommon::engine = engine;
+
LOGW("LEAVE");
return INFERENCE_ENGINE_ERROR_NONE;
}
return ret;
}
-int InferenceEngineVision::SetInputTensorParamNode(std::string node)
-{
- int ret = engine->SetInputTensorParamNode(node);
- if (ret != INFERENCE_ENGINE_ERROR_NONE)
- LOGE("Fail to SetInputTensorParamNode");
-
- return ret;
-}
-
-int InferenceEngineVision::SetOutputTensorParamNodes(std::vector<std::string> nodes)
-{
- int ret = engine->SetOutputTensorParamNodes(nodes);
- if (ret != INFERENCE_ENGINE_ERROR_NONE)
- LOGE("Fail to SetOutputTensorParamNodes");
-
- return ret;
-}
-
int InferenceEngineVision::SetOutputTensorParamThresHold(double threshold)
{
int ret = engine->SetOutputTensorParamThresHold(threshold);
return ret;
}
-int InferenceEngineVision::SetTargetDevice(inference_target_type_e type)
-{
- int ret = engine->SetTargetDevice(type);
- if (ret != INFERENCE_ENGINE_ERROR_NONE)
- LOGE("Fail to SetTargetDevice");
-
- return ret;
-}
-
-int InferenceEngineVision::Load()
-{
- int ret = engine->Load();
- if (ret != INFERENCE_ENGINE_ERROR_NONE)
- LOGE("Fail to load InferenceEngineVision");
-
- ret = engine->CreateInputLayerPassage();
- if (ret != INFERENCE_ENGINE_ERROR_NONE)
- LOGE("Fail to load CreateInputLayerPassage");
-
- ret = engine->PrepareInputLayerPassage(INFERENCE_INPUT_IMAGE);
- if (ret != INFERENCE_ENGINE_ERROR_NONE)
- LOGE("Fail to load PrepareInputLayerPassage");
-
- return ret;
-}
-
int InferenceEngineVision::Run(cv::Mat tensor)
{
int ret = engine->Run(tensor);
return ret;
}
-int InferenceEngineVision::GetInferenceResult(std::vector<std::vector<int>>& dimInfo, std::vector<float*>& results)
-{
- int ret = engine->GetInferenceResult(dimInfo, results);
-
- if (ret != INFERENCE_ENGINE_ERROR_NONE)
- LOGE("Fail to GetInferenceResult");
-
- return ret;
-}
-
int InferenceEngineVision::GetNumberOfOutputs()
{
return engine->GetNumberOfOutputs();
{
;
}
-
} /* Vision */
} /* InferenceEngineInterface */