ASSERT_TRUE(engine);
int ret = engine->BindBackend(&config);
- ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+ if (ret != INFERENCE_ENGINE_ERROR_NONE) {
+ delete engine;
+ ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+ return;
+ }
inference_engine_capacity capacity;
ret = engine->GetBackendCapacity(&capacity);
void CopyFileToMemory(const char *file_name, inference_engine_tensor_buffer &buffer, unsigned int size)
{
int fd = open(file_name, O_RDONLY);
- ASSERT_NE(fd, -1);
+ if (fd == -1) {
+ ASSERT_NE(fd, -1);
+ return;
+ }
int num = read(fd, buffer.buffer, size);
- ASSERT_NE(num, -1);
+ if (num == -1) {
+ close(fd);
+ ASSERT_NE(num, -1);
+ return;
+ }
close(fd);
}
ASSERT_TRUE(engine);
int ret = engine->BindBackend(&config);
- ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+ if (ret != INFERENCE_ENGINE_ERROR_NONE) {
+ delete engine;
+ ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+ return;
+ }
inference_engine_capacity capacity;
ret = engine->GetBackendCapacity(&capacity);
std::vector <std::string> models;
int model_type = GetModelInfo(model_paths, models);
- ASSERT_NE(model_type, -1);
+ if (model_type == -1) {
+ delete engine;
+ ASSERT_NE(model_type, -1);
+ return;
+ }
ret = engine->Load(models, (inference_model_format_e)model_type);
EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
};
InferenceEngineCommon *engine = new InferenceEngineCommon(&config);
- ASSERT_TRUE(engine);
+ if (engine == nullptr) {
+ ASSERT_TRUE(engine);
+ return;
+ }
int ret = engine->BindBackend(&config);
- ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+ if (ret != INFERENCE_ENGINE_ERROR_NONE) {
+ delete engine;
+ ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+ return;
+ }
inference_engine_capacity capacity;
ret = engine->GetBackendCapacity(&capacity);
std::vector <std::string> models;
int model_type = GetModelInfo(model_paths, models);
- ASSERT_NE(model_type, -1);
+ if (model_type == -1) {
+ delete engine;
+ ASSERT_NE(model_type, -1);
+ return;
+ }
inference_engine_layer_property input_property;
std::vector<std::string>::iterator iter;
}
ret = engine->SetInputLayerProperty(input_property);
- ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+ if (ret != INFERENCE_ENGINE_ERROR_NONE) {
+ delete engine;
+ ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+ return;
+ }
inference_engine_layer_property output_property;
}
ret = engine->SetOutputLayerProperty(output_property);
- ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+ if (ret != INFERENCE_ENGINE_ERROR_NONE) {
+ delete engine;
+ ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+ return;
+ }
ret = engine->Load(models, (inference_model_format_e)model_type);
- ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+ if (ret != INFERENCE_ENGINE_ERROR_NONE) {
+ delete engine;
+ ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+ return;
+ }
std::vector<inference_engine_tensor_buffer> inputs, outputs;
ret = PrepareTensorBuffers(engine, inputs, outputs);
- ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+ if (ret != INFERENCE_ENGINE_ERROR_NONE) {
+ delete engine;
+ ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+ return;
+ }
// Copy input image tensor data from a given file to input tensor buffer.
for (int i = 0; i < (int)image_paths.size(); ++i) {
CleanupTensorBuffers(inputs, outputs);
engine->UnbindBackend();
+ models.clear();
delete engine;
}