test: add test case for Hailo NPU 59/319059/1
authorInki Dae <inki.dae@samsung.com>
Tue, 14 Jan 2025 06:00:09 +0000 (15:00 +0900)
committerInki Dae <inki.dae@samsung.com>
Tue, 4 Feb 2025 01:45:53 +0000 (10:45 +0900)
Change-Id: I6a6c6b1822274fe8dcd2f9edbd55f1f9325b8ed7
Signed-off-by: Inki Dae <inki.dae@samsung.com>
test/src/inference_engine_profiler.cpp

index ea6a899fa44dfb02f33afd14eae4ec642d150c8a..d3c7c41b7f3d2fc081c53cf37dd3bc4e95180083 100644 (file)
@@ -90,6 +90,7 @@
                          123, 99,  287, 381, 451, 287, 381, 475 }
 
 using namespace testing;
+using namespace std;
 
 typedef std::tuple<std::string, int, int, int, int, std::vector<std::string>,
                                   int, int, int, std::vector<std::string>,
@@ -292,6 +293,68 @@ TEST_P(InferenceEngineTfliteTest, Inference)
        models.clear();
 }
 
+TEST(HAILORT, Inference)
+{
+       std::string backend_name = "hailort";
+       int target_devices = INFERENCE_TARGET_CUSTOM;
+       std::vector<std::string> input_layers;
+       std::vector<std::string> output_layers;
+       std::string model_path = "/opt/usr/globalapps/mediavision.object.detection/models/hailo8l/yolov10s.hef";
+
+       inference_engine_config config = { .backend_name = backend_name,
+                                                                          .backend_type = -1,
+                                                                          .target_devices = target_devices };
+
+       auto engine = std::make_unique<InferenceEngineCommon>();
+       ASSERT_TRUE(engine);
+
+       int ret = engine->LoadConfigFile();
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+
+       ret = engine->EnableProfiler(true);
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+
+       backend_name = "hailort";
+
+       ret = engine->DumpProfileToFile("profile_data_" + backend_name +
+                                                                       "_" + Target_Formats[target_devices] +
+                                                                       "_hailo_model.txt");
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+
+       ret = engine->BindBackend(&config);
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+
+       ret = engine->SetTargetDevices(target_devices);
+       EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+
+       std::vector<std::string> models;
+       models.push_back(model_path);
+
+       ret = engine->Load(models, INFERENCE_MODEL_HAILORT);
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+
+       IETensorBuffer inputs, outputs;
+       ret = PrepareTensorBuffers(engine.get(), inputs, outputs);
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+
+       for (auto iter = inputs.begin(); iter != inputs.end(); iter++) {
+               inference_engine_tensor_buffer buffer = iter->second;
+               uint8_t *buf = static_cast<uint8_t *>(buffer.buffer);
+
+               for (size_t idx = 0; idx < buffer.size; idx++) {
+                       buf[idx] = (uint8_t)(rand() % 256);
+               }
+       }
+
+       ret = engine->Run(inputs, outputs);
+       EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+
+       CleanupTensorBuffers(inputs, outputs);
+
+       engine->UnbindBackend();
+       models.clear();
+}
+
 INSTANTIATE_TEST_CASE_P(
                Opensource, InferenceEngineTfliteTest,
                testing::Values(