123, 99, 287, 381, 451, 287, 381, 475 }
using namespace testing;
+using namespace std;
typedef std::tuple<std::string, int, int, int, int, std::vector<std::string>,
int, int, int, std::vector<std::string>,
models.clear();
}
+TEST(HAILORT, Inference)
+{
+ std::string backend_name = "hailort";
+ int target_devices = INFERENCE_TARGET_CUSTOM;
+ std::vector<std::string> input_layers;
+ std::vector<std::string> output_layers;
+ std::string model_path = "/opt/usr/globalapps/mediavision.object.detection/models/hailo8l/yolov10s.hef";
+
+ inference_engine_config config = { .backend_name = backend_name,
+ .backend_type = -1,
+ .target_devices = target_devices };
+
+ auto engine = std::make_unique<InferenceEngineCommon>();
+ ASSERT_TRUE(engine);
+
+ int ret = engine->LoadConfigFile();
+ ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+
+ ret = engine->EnableProfiler(true);
+ ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+
+ backend_name = "hailort";
+
+ ret = engine->DumpProfileToFile("profile_data_" + backend_name +
+ "_" + Target_Formats[target_devices] +
+ "_hailo_model.txt");
+ ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+
+ ret = engine->BindBackend(&config);
+ ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+
+ ret = engine->SetTargetDevices(target_devices);
+ EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+
+ std::vector<std::string> models;
+ models.push_back(model_path);
+
+ ret = engine->Load(models, INFERENCE_MODEL_HAILORT);
+ ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+
+ IETensorBuffer inputs, outputs;
+ ret = PrepareTensorBuffers(engine.get(), inputs, outputs);
+ ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+
+ for (auto iter = inputs.begin(); iter != inputs.end(); iter++) {
+ inference_engine_tensor_buffer buffer = iter->second;
+ uint8_t *buf = static_cast<uint8_t *>(buffer.buffer);
+
+ for (size_t idx = 0; idx < buffer.size; idx++) {
+ buf[idx] = (uint8_t)(rand() % 256);
+ }
+ }
+
+ ret = engine->Run(inputs, outputs);
+ EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+
+ CleanupTensorBuffers(inputs, outputs);
+
+ engine->UnbindBackend();
+ models.clear();
+}
+
INSTANTIATE_TEST_CASE_P(
Opensource, InferenceEngineTfliteTest,
testing::Values(