c894d340bb2686b3053719834868d2c011ad3810
[platform/core/multimedia/inference-engine-interface.git] / tools / include / InputMetadata.h
1 /**
2  * Copyright (c) 2021 Samsung Electronics Co., Ltd All Rights Reserved
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16
17 #ifndef __INFERENCE_ENGINE_INPUTMETADATA_H__
18 #define __INFERENCE_ENGINE_INPUTMETADATA_H__
19
20 #include <string>
21 #include <vector>
22 #include <map>
23
24 #include <dlog.h>
25 #include "inference_engine_private_type.h"
26 #include "inference_engine_type.h"
27 #include "inference_engine_error.h"
28
29 #include <json-glib/json-glib.h>
30
31 /**
32  * @file InputMetadata.h
33  * @brief This file contains the metadata class definition which
34  *        provides metadata of a model.
35  */
36
37 namespace InferenceEngineInterface
38 {
39 namespace Cltuner
40 {
41         class Options
42         {
43         public:
44                 class Normalization
45                 {
46                 public:
47                         bool use;
48                         std::vector<double> mean;
49                         std::vector<double> std;
50
51                         Normalization() : use(false) {}
52                         ~Normalization() = default;
53                 };
54
55                 class Quantization
56                 {
57                 public:
58                         bool use;
59                         std::vector<double> scale;
60                         std::vector<double> zeropoint;
61
62                         Quantization() : use(false) {};
63                         ~Quantization() = default;
64                 };
65
66                 Normalization normalization;
67                 Quantization  quantization;
68
69                 Options() = default;
70                 ~Options() = default;
71         };
72
73         class LayerInfo
74         {
75         public:
76
77                 std::string name;
78                 std::vector<int> dims;
79                 inference_colorspace_e colorSpace;
80                 inference_tensor_data_type_e dataType;
81                 inference_tensor_shape_type_e shapeType; // TODO: define mv_inference_shape_type_e
82
83                 LayerInfo() = default;
84                 ~LayerInfo() = default;
85
86                 int GetWidth() const;
87                 int GetHeight() const;
88                 int GetChannel() const;
89         };
90
91         class InputMetadata
92         {
93         public:
94                 bool parsed;
95                 std::map<std::string, LayerInfo> layer;
96                 std::map<std::string, Options> option;
97
98                 /**
99                  * @brief   Creates an InputMetadata class instance.
100                  *
101                  * @since_tizen 6.5
102                  */
103                 InputMetadata();
104
105                 /**
106                  * @brief   Destroys an InputMetadata class instance including
107                  *          its all resources.
108                  *
109                  * @since_tizen 6.5
110                  */
111                 ~InputMetadata() = default;
112
113                 /**
114                  * @brief Parses an InputMetadata
115                  *
116                  * @since_tizen 6.5
117                  */
118                 int Parse(JsonObject *root);
119
120         private:
121                 std::map<std::string, inference_tensor_shape_type_e> mSupportedShapeType;
122                 std::map<std::string, inference_tensor_data_type_e> mSupportedDataType;
123                 std::map<std::string, inference_colorspace_e> mSupportedColorSpace;
124
125                 template <typename T>
126                 static T GetSupportedType(JsonObject* root, std::string typeName,
127                                                                 std::map<std::string, T>& supportedTypes);
128                 int GetTensorInfo(JsonObject* root);
129                 int GetPreProcess(JsonObject* root);
130
131         };
132
133 } /* Inference */
134 } /* MediaVision */
135
136 #endif /* __INFERENCE_ENGINE_INPUTMETADATA_H__ */