2 * Copyright (c) 2022 Samsung Electronics Co., Ltd All Rights Reserved
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
17 #ifndef __OBJECT_DETECTION_H__
18 #define __OBJECT_DETECTION_H__
23 #include <condition_variable>
26 #include <mv_common.h>
27 #include <mv_inference_type.h>
28 #include "mv_private.h"
30 #include "EngineConfig.h"
31 #include "inference_engine_common_impl.h"
32 #include "Inference.h"
33 #include "object_detection_type.h"
34 #include "ObjectDetectionParser.h"
35 #include "machine_learning_preprocess.h"
36 #include "iobject_detection.h"
37 #include "async_manager.h"
41 namespace machine_learning
43 class ObjectDetection : public IObjectDetection
46 ObjectDetectionTaskType _task_type;
47 std::unique_ptr<AsyncManager<ObjectDetectionResult> > _async_manager;
48 ObjectDetectionResult _current_result {};
52 void getDeviceList(const char *engine_type);
54 void preprocess(mv_source_h &mv_src, std::shared_ptr<MetaInfo> metaInfo, std::vector<T> &inputVector);
55 std::shared_ptr<MetaInfo> getInputMetaInfo();
56 template<typename T> void perform(mv_source_h &mv_src, std::shared_ptr<MetaInfo> metaInfo);
57 template<typename T> void performAsync(ObjectDetectionInput &input, std::shared_ptr<MetaInfo> metaInfo);
60 std::unique_ptr<mediavision::inference::Inference> _inference;
61 std::unique_ptr<MediaVision::Common::EngineConfig> _config;
62 std::unique_ptr<MetaParser> _parser;
63 std::vector<std::string> _labels;
64 std::vector<std::string> _valid_backends;
65 std::vector<std::string> _valid_devices;
66 Preprocess _preprocess;
67 std::string _modelFilePath;
68 std::string _modelMetaFilePath;
69 std::string _modelDefaultPath;
70 std::string _modelLabelFilePath;
72 int _targetDeviceType;
74 void getOutputNames(std::vector<std::string> &names);
75 void getOutputTensor(std::string target_name, std::vector<float> &tensor);
76 void parseMetaFile(std::string meta_file_name);
77 template<typename T> void inference(std::vector<std::vector<T> > &inputVectors);
78 virtual ObjectDetectionResult &result() = 0;
81 ObjectDetection(ObjectDetectionTaskType task_type);
82 virtual ~ObjectDetection() = default;
83 void preDestroy() override;
84 ObjectDetectionTaskType getTaskType() override;
85 void setUserModel(std::string model_file, std::string meta_file, std::string label_file) override;
86 void setEngineInfo(std::string engine_type, std::string device_type) override;
87 void getNumberOfEngines(unsigned int *number_of_engines) override;
88 void getEngineType(unsigned int engine_index, char **engine_type) override;
89 void getNumberOfDevices(const char *engine_type, unsigned int *number_of_devices) override;
90 void getDeviceType(const char *engine_type, const unsigned int device_index, char **device_type) override;
91 void configure(std::string configFile) override;
92 void prepare() override;
93 void perform(mv_source_h &mv_src) override;
94 void performAsync(ObjectDetectionInput &input) override;
95 ObjectDetectionResult &getOutput() override;
96 ObjectDetectionResult &getOutputCache() override;