.target_devices = target_devices
};
- InferenceEngineCommon *engine = new InferenceEngineCommon();
+ auto engine = std::make_unique<InferenceEngineCommon>();
if (engine == nullptr) {
ASSERT_TRUE(engine);
return;
int ret = engine->EnableProfiler(true);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- delete engine;
ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
return;
}
ret = engine->DumpProfileToFile("profile_data_" + backend_name + "_tflite_model.txt");
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- delete engine;
ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
return;
}
ret = engine->BindBackend(&config);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- delete engine;
ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
return;
}
std::vector <std::string> models;
int model_type = GetModelInfo(model_paths, models);
if (model_type == -1) {
- delete engine;
ASSERT_NE(model_type, -1);
return;
}
ret = engine->SetInputLayerProperty(input_property);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- delete engine;
ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
return;
}
ret = engine->SetOutputLayerProperty(output_property);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- delete engine;
ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
return;
}
ret = engine->Load(models, (inference_model_format_e)model_type);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- delete engine;
ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
return;
}
std::vector<inference_engine_tensor_buffer> inputs, outputs;
- ret = PrepareTensorBuffers(engine, inputs, outputs);
+ ret = PrepareTensorBuffers(engine.get(), inputs, outputs);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- delete engine;
ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
return;
}
}
tensor_t result;
- FillOutputResult(engine, outputs, result);
+ FillOutputResult(engine.get(), outputs, result);
switch (test_type) {
case TEST_IMAGE_CLASSIFICATION:
engine->UnbindBackend();
models.clear();
-
- delete engine;
}
TEST_P(InferenceEngineCaffeTest, Inference)
.target_devices = target_devices
};
- InferenceEngineCommon *engine = new InferenceEngineCommon();
+ auto engine = std::make_unique<InferenceEngineCommon>();
if (engine == nullptr) {
ASSERT_TRUE(engine);
return;
int ret = engine->EnableProfiler(true);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- delete engine;
ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
return;
}
ret = engine->DumpProfileToFile("profile_data_" + backend_name + "_caffe_model.txt");
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- delete engine;
ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
return;
}
ret = engine->BindBackend(&config);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- delete engine;
ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
return;
}
std::vector <std::string> models;
int model_type = GetModelInfo(model_paths, models);
if (model_type == -1) {
- delete engine;
ASSERT_NE(model_type, -1);
return;
}
ret = engine->SetInputLayerProperty(input_property);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- delete engine;
ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
return;
}
ret = engine->SetOutputLayerProperty(output_property);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- delete engine;
ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
return;
}
ret = engine->Load(models, (inference_model_format_e)model_type);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- delete engine;
ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
return;
}
std::vector<inference_engine_tensor_buffer> inputs, outputs;
- ret = PrepareTensorBuffers(engine, inputs, outputs);
+ ret = PrepareTensorBuffers(engine.get(), inputs, outputs);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- delete engine;
ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
return;
}
}
tensor_t result;
- FillOutputResult(engine, outputs, result);
+ FillOutputResult(engine.get(), outputs, result);
switch (test_type) {
case TEST_IMAGE_CLASSIFICATION:
engine->UnbindBackend();
models.clear();
-
- delete engine;
}
TEST_P(InferenceEngineDldtTest, Inference)
.target_devices = target_devices
};
- InferenceEngineCommon *engine = new InferenceEngineCommon();
+ auto engine = std::make_unique<InferenceEngineCommon>();
if (engine == nullptr) {
ASSERT_TRUE(engine);
return;
int ret = engine->EnableProfiler(true);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- delete engine;
ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
return;
}
ret = engine->DumpProfileToFile("profile_data_" + backend_name + "_dldt_model.txt");
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- delete engine;
ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
return;
}
ret = engine->BindBackend(&config);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- delete engine;
ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
return;
}
std::vector <std::string> models;
int model_type = GetModelInfo(model_paths, models);
if (model_type == -1) {
- delete engine;
ASSERT_NE(model_type, -1);
return;
}
ret = engine->SetInputLayerProperty(input_property);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- delete engine;
ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
return;
}
ret = engine->SetOutputLayerProperty(output_property);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- delete engine;
ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
return;
}
ret = engine->Load(models, (inference_model_format_e)model_type);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- delete engine;
ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
return;
}
std::vector<inference_engine_tensor_buffer> inputs, outputs;
- ret = PrepareTensorBuffers(engine, inputs, outputs);
+ ret = PrepareTensorBuffers(engine.get(), inputs, outputs);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- delete engine;
ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
return;
}
}
tensor_t result;
- FillOutputResult(engine, outputs, result);
+ FillOutputResult(engine.get(), outputs, result);
switch (test_type) {
case TEST_IMAGE_CLASSIFICATION:
engine->UnbindBackend();
models.clear();
-
- delete engine;
}
INSTANTIATE_TEST_CASE_P(Prefix, InferenceEngineTfliteTest,
.target_devices = target_devices
};
- InferenceEngineCommon *engine = new InferenceEngineCommon();
+ auto engine = std::make_unique<InferenceEngineCommon>();
ASSERT_TRUE(engine);
int ret = engine->BindBackend(&config);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- delete engine;
ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
return;
}
EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
engine->UnbindBackend();
-
- delete engine;
}
TEST_P(InferenceEngineTestCase_G2, Load_P)
.target_devices = target_devices
};
- InferenceEngineCommon *engine = new InferenceEngineCommon();
+ auto engine = std::make_unique<InferenceEngineCommon>();
ASSERT_TRUE(engine);
int ret = engine->BindBackend(&config);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- delete engine;
ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
return;
}
std::vector <std::string> models;
int model_type = GetModelInfo(model_paths, models);
if (model_type == -1) {
- delete engine;
ASSERT_NE(model_type, -1);
return;
}
EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
engine->UnbindBackend();
-
- delete engine;
}
TEST_P(InferenceEngineTestCase_G3, Inference)
.target_devices = target_devices
};
- InferenceEngineCommon *engine = new InferenceEngineCommon();
+ auto engine = std::make_unique<InferenceEngineCommon>();
if (engine == nullptr) {
ASSERT_TRUE(engine);
return;
int ret = engine->BindBackend(&config);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- delete engine;
ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
return;
}
std::vector <std::string> models;
int model_type = GetModelInfo(model_paths, models);
if (model_type == -1) {
- delete engine;
ASSERT_NE(model_type, -1);
return;
}
ret = engine->SetInputLayerProperty(input_property);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- delete engine;
ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
return;
}
ret = engine->SetOutputLayerProperty(output_property);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- delete engine;
ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
return;
}
ret = engine->Load(models, (inference_model_format_e)model_type);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- delete engine;
ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
return;
}
std::vector<inference_engine_tensor_buffer> inputs, outputs;
- ret = PrepareTensorBuffers(engine, inputs, outputs);
+ ret = PrepareTensorBuffers(engine.get(), inputs, outputs);
if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- delete engine;
ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
return;
}
}
tensor_t result;
- FillOutputResult(engine, outputs, result);
+ FillOutputResult(engine.get(), outputs, result);
ret = VerifyImageClassificationResults(result, answers[0]);
EXPECT_EQ(ret, 1);
engine->UnbindBackend();
models.clear();
-
- delete engine;
}
INSTANTIATE_TEST_CASE_P(Prefix, InferenceEngineTestCase_G1,