2 * Copyright (c) 2022 Samsung Electronics Co., Ltd All Rights Reserved
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
17 #ifndef __OBJECT_DETECTION_H__
18 #define __OBJECT_DETECTION_H__
25 #include <mv_common.h>
26 #include <mv_inference_type.h>
27 #include "mv_private.h"
29 #include "EngineConfig.h"
30 #include "inference_engine_common_impl.h"
31 #include "Inference.h"
32 #include "object_detection_type.h"
33 #include "MetaParser.h"
34 #include "ObjectDetectionParser.h"
35 #include "machine_learning_config.h"
36 #include "machine_learning_preprocess.h"
37 #include "iobject_detection.h"
38 #include "async_manager.h"
42 namespace machine_learning
44 class ObjectDetection : public IObjectDetection
47 ObjectDetectionTaskType _task_type { ObjectDetectionTaskType::OBJECT_DETECTION_TASK_NONE };
48 std::unique_ptr<AsyncManager<ObjectDetectionResult> > _async_manager;
49 ObjectDetectionResult _current_result;
53 void getDeviceList(const char *engine_type);
55 void preprocess(mv_source_h &mv_src, std::shared_ptr<MetaInfo> metaInfo, std::vector<T> &inputVector);
56 std::shared_ptr<MetaInfo> getInputMetaInfo();
57 template<typename T> void perform(mv_source_h &mv_src, std::shared_ptr<MetaInfo> metaInfo);
58 template<typename T> void performAsync(ObjectDetectionInput &input, std::shared_ptr<MetaInfo> metaInfo);
61 std::unique_ptr<mediavision::inference::Inference> _inference;
62 std::shared_ptr<MachineLearningConfig> _config;
63 std::vector<std::string> _labels;
64 std::vector<std::string> _valid_backends;
65 std::vector<std::string> _valid_devices;
66 Preprocess _preprocess;
68 void getOutputNames(std::vector<std::string> &names);
69 void getOutputTensor(std::string target_name, std::vector<float> &tensor);
70 template<typename T> void inference(std::vector<std::vector<T> > &inputVectors);
71 virtual ObjectDetectionResult &result() = 0;
74 explicit ObjectDetection(ObjectDetectionTaskType task_type, std::shared_ptr<MachineLearningConfig> config);
75 virtual ~ObjectDetection() = default;
77 void preDestroy() override;
78 ObjectDetectionTaskType getTaskType() override;
79 void setUserModel(std::string model_file, std::string meta_file, std::string label_file);
80 void setEngineInfo(std::string engine_type_name, std::string device_type_name) override;
81 void getNumberOfEngines(unsigned int *number_of_engines) override;
82 void getEngineType(unsigned int engine_index, char **engine_type) override;
83 void getNumberOfDevices(const char *engine_type, unsigned int *number_of_devices) override;
84 void getDeviceType(const char *engine_type, const unsigned int device_index, char **device_type) override;
85 void configure() override;
86 void prepare() override;
87 void perform(mv_source_h &mv_src) override;
88 void performAsync(ObjectDetectionInput &input) override;
89 ObjectDetectionResult &getOutput() override;
90 ObjectDetectionResult &getOutputCache() override;