2 * Copyright (c) 2022 Samsung Electronics Co., Ltd All Rights Reserved
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
17 #ifndef __OBJECT_DETECTION_H__
18 #define __OBJECT_DETECTION_H__
25 #include <mv_common.h>
26 #include <mv_inference_type.h>
27 #include "mv_private.h"
29 #include "EngineConfig.h"
30 #include "inference_engine_common_impl.h"
31 #include "Inference.h"
32 #include "object_detection_type.h"
33 #include "MetaParser.h"
34 #include "ObjectDetectionParser.h"
35 #include "machine_learning_config.h"
36 #include "machine_learning_preprocess.h"
37 #include "iobject_detection.h"
38 #include "async_manager.h"
42 namespace machine_learning
44 template<typename T> class ObjectDetection : public IObjectDetection
47 ObjectDetectionTaskType _task_type { ObjectDetectionTaskType::OBJECT_DETECTION_TASK_NONE };
48 std::unique_ptr<AsyncManager<ObjectDetectionResult> > _async_manager;
49 ObjectDetectionResult _current_result;
53 void getDeviceList(const char *engine_type);
54 void preprocess(mv_source_h &mv_src, std::shared_ptr<MetaInfo> metaInfo, std::vector<T> &inputVector);
55 std::shared_ptr<MetaInfo> getInputMetaInfo();
58 std::unique_ptr<mediavision::inference::Inference> _inference;
59 std::shared_ptr<MachineLearningConfig> _config;
60 std::vector<std::string> _labels;
61 std::vector<std::string> _valid_backends;
62 std::vector<std::string> _valid_devices;
63 Preprocess _preprocess;
65 void getOutputNames(std::vector<std::string> &names);
66 void getOutputTensor(std::string target_name, std::vector<float> &tensor);
67 void inference(std::vector<std::vector<T> > &inputVectors);
68 virtual ObjectDetectionResult &result() = 0;
71 explicit ObjectDetection(ObjectDetectionTaskType task_type, std::shared_ptr<MachineLearningConfig> config);
72 virtual ~ObjectDetection() = default;
74 void preDestroy() override;
75 ObjectDetectionTaskType getTaskType() override;
76 void setUserModel(std::string model_file, std::string meta_file, std::string label_file);
77 void setEngineInfo(std::string engine_type_name, std::string device_type_name) override;
78 void getNumberOfEngines(unsigned int *number_of_engines) override;
79 void getEngineType(unsigned int engine_index, char **engine_type) override;
80 void getNumberOfDevices(const char *engine_type, unsigned int *number_of_devices) override;
81 void getDeviceType(const char *engine_type, const unsigned int device_index, char **device_type) override;
82 void configure() override;
83 void prepare() override;
84 void perform(mv_source_h &mv_src) override;
85 void performAsync(ObjectDetectionInput &input) override;
86 ObjectDetectionResult &getOutput() override;
87 ObjectDetectionResult &getOutputCache() override;