#define LOG_TAG "INFERENCE_ENGINE_COMMON"
}
+#define CHECK_ENGINE_INSTANCE(object) \
+ if (object == nullptr) { \
+ LOGE("Inference engine handle is null."); \
+ return INFERENCE_ENGINE_ERROR_INVALID_OPERATION; \
+ }
+
namespace fs = std::experimental::filesystem;
namespace InferenceEngineInterface {
namespace Common {
int InferenceEngineCommon::SetTargetDevices(int types)
{
+ CHECK_ENGINE_INSTANCE(mBackendHandle);
+
int ret = mBackendHandle->SetTargetDevices(types);
if (ret != INFERENCE_ENGINE_ERROR_NONE)
LOGE("Fail to SetTargetDevice");
{
LOGI("ENTER");
+ CHECK_ENGINE_INSTANCE(mBackendHandle);
+
if (mUseProfiler == true) {
mProfiler->AddModelName(model_paths[0]);
mProfiler->PushEnv();
int InferenceEngineCommon::GetInputTensorBuffers(std::vector<inference_engine_tensor_buffer> &buffers)
{
+ CHECK_ENGINE_INSTANCE(mBackendHandle);
+
int ret = mBackendHandle->GetInputTensorBuffers(buffers);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
LOGE("Failed to get input tensor buffers.");
int InferenceEngineCommon::GetOutputTensorBuffers(std::vector<inference_engine_tensor_buffer> &buffers)
{
+ CHECK_ENGINE_INSTANCE(mBackendHandle);
+
int ret = mBackendHandle->GetOutputTensorBuffers(buffers);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
LOGE("Failed to get output tensor buffers.");
int InferenceEngineCommon::GetInputLayerProperty(inference_engine_layer_property &property)
{
+ CHECK_ENGINE_INSTANCE(mBackendHandle);
+
int ret = mBackendHandle->GetInputLayerProperty(property);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
LOGE("Failed to get input layer property.");
int InferenceEngineCommon::GetOutputLayerProperty(inference_engine_layer_property &property)
{
+ CHECK_ENGINE_INSTANCE(mBackendHandle);
+
int ret = mBackendHandle->GetOutputLayerProperty(property);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
LOGE("Failed to get output layer property.");
int InferenceEngineCommon::SetInputLayerProperty(inference_engine_layer_property &property)
{
+ CHECK_ENGINE_INSTANCE(mBackendHandle);
+
int ret = CheckLayerProperty(property);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
LOGE("Given input layer property is invalid.");
int InferenceEngineCommon::SetOutputLayerProperty(inference_engine_layer_property &property)
{
+ CHECK_ENGINE_INSTANCE(mBackendHandle);
+
int ret = CheckLayerProperty(property);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
LOGE("Given output layer property is invalid.");
int InferenceEngineCommon::GetBackendCapacity(inference_engine_capacity *capacity)
{
+ CHECK_ENGINE_INSTANCE(mBackendHandle);
+
if (capacity == nullptr) {
LOGE("Given inference_engine_capacity object is invalid.");
return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
int InferenceEngineCommon::Run(std::vector<inference_engine_tensor_buffer> &input_buffers,
std::vector<inference_engine_tensor_buffer> &output_buffers)
{
+ CHECK_ENGINE_INSTANCE(mBackendHandle);
+
if (mUseProfiler == true) {
mProfiler->Start(IE_PROFILER_LATENCY);
}