Fix bugs
[platform/core/multimedia/inference-engine-interface.git] / tools / include / InputMetadata.h
1 /**
2  * Copyright (c) 2021 Samsung Electronics Co., Ltd All Rights Reserved
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16
17 #ifndef __INFERENCE_ENGINE_INPUTMETADATA_H__
18 #define __INFERENCE_ENGINE_INPUTMETADATA_H__
19
20 #include <string>
21 #include <vector>
22 #include <map>
23
24 #include <dlog.h>
25 #include "inference_engine_private_type.h"
26 #include "inference_engine_type.h"
27 #include "inference_engine_error.h"
28
29 #include <json-glib/json-glib.h>
30
31 /**
32  * @file InputMetadata.h
33  * @brief This file contains the metadata class definition which
34  *        provides metadata of a model.
35  */
36
37 namespace InferenceEngineInterface
38 {
39 namespace Cltuner
40 {
41         class Options
42         {
43         public:
44                 class Normalization
45                 {
46                 public:
47                         bool use;
48                         std::vector<double> mean;
49                         std::vector<double> std;
50
51                         Normalization() : use(false) {}
52                         ~Normalization() = default;
53                 };
54
55                 class Quantization
56                 {
57                 public:
58                         bool use;
59                         std::vector<double> scale;
60                         std::vector<double> zeropoint;
61
62                         Quantization() : use(false) {};
63                         ~Quantization() = default;
64                 };
65
66                 Normalization normalization;
67                 Quantization  quantization;
68
69                 Options() = default;
70                 ~Options() = default;
71         };
72
73         class LayerInfo
74         {
75         public:
76
77                 std::string name;
78                 std::vector<int> dims;
79                 inference_colorspace_e colorSpace;
80                 inference_tensor_data_type_e dataType;
81                 inference_tensor_shape_type_e shapeType; // TODO: define mv_inference_shape_type_e
82
83                 LayerInfo() :
84                         name(),
85                         dims(),
86                         colorSpace(INFERENCE_COLORSPACE_INVALID),
87                         dataType(INFERENCE_TENSOR_DATA_TYPE_NONE),
88                         shapeType(INFERENCE_TENSOR_SHAPE_NCHW) {};
89                 ~LayerInfo() = default;
90
91                 int GetWidth() const;
92                 int GetHeight() const;
93                 int GetChannel() const;
94         };
95
96         class InputMetadata
97         {
98         public:
99                 bool parsed;
100                 std::map<std::string, LayerInfo> layer;
101                 std::map<std::string, Options> option;
102
103                 /**
104                  * @brief   Creates an InputMetadata class instance.
105                  *
106                  * @since_tizen 6.5
107                  */
108                 InputMetadata();
109
110                 /**
111                  * @brief   Destroys an InputMetadata class instance including
112                  *          its all resources.
113                  *
114                  * @since_tizen 6.5
115                  */
116                 ~InputMetadata() = default;
117
118                 /**
119                  * @brief Parses an InputMetadata
120                  *
121                  * @since_tizen 6.5
122                  */
123                 int Parse(JsonObject *root);
124
125         private:
126                 std::map<std::string, inference_tensor_shape_type_e> mSupportedShapeType;
127                 std::map<std::string, inference_tensor_data_type_e> mSupportedDataType;
128                 std::map<std::string, inference_colorspace_e> mSupportedColorSpace;
129
130                 template <typename T>
131                 static T GetSupportedType(JsonObject* root, std::string typeName,
132                                                                 std::map<std::string, T>& supportedTypes);
133                 int GetTensorInfo(JsonObject* root);
134                 int GetPreProcess(JsonObject* root);
135
136         };
137
138 } /* Inference */
139 } /* MediaVision */
140
141 #endif /* __INFERENCE_ENGINE_INPUTMETADATA_H__ */