Publishing R5 content (#72)
[platform/upstream/dldt.git] / inference-engine / src / cldnn_engine / cldnn_graph.h
1 // Copyright (C) 2018 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
3 //
4
5 #pragma once
6
7 #include <vector>
8 #include <map>
9 #include <set>
10 #include <memory>
11 #include <string>
12 #include "ie_blob.h"
13 #include "ie_plugin.hpp"
14 #include "cpp/ie_cnn_network.h"
15 #include "debug_options.h"
16 #include "inference_engine.hpp"
17 #include <CPP/network.hpp>
18 #include <CPP/memory.hpp>
19 #include <CPP/primitive.hpp>
20 #include <CPP/topology.hpp>
21 #include <CPP/pooling.hpp>
22 #include <CPP/eltwise.hpp>
23 #include <CPP/concatenation.hpp>
24 #include <CPP/detection_output.hpp>
25 #include <CPP/softmax.hpp>
26 #include <cpp_interfaces/impl/ie_executable_network_thread_safe_default.hpp>
27 #include <CPP/upsampling.hpp>
28 #include "cldnn_custom_layer.h"
29
30 namespace CLDNNPlugin {
31
32 struct InferenceEnv {
33     std::shared_ptr<const cldnn::engine> engine;
34     std::shared_ptr<cldnn::network> network;
35     std::map<std::string, cldnn::primitive_id> primitiveIDs;
36     std::map<std::string, std::vector<cldnn::primitive_id>> prevPrimitiveIDs;
37     std::map<std::string, InferenceEngine::InferenceEngineProfileInfo> perfMap;
38     std::set<cldnn::primitive_id> profilingIDs;
39
40     DebugOptions debugOptions;
41
42     std::map<std::string, InferenceEngine::SizeVector> outputDims;
43     std::map<std::string, cldnn::layout> inputLayouts;
44
45     std::vector<std::shared_ptr<cldnn::network>> batchNetworks;
46     int m_max_batch;
47     int m_bv_sz;
48 };
49
50 class CLDNNGraph : public InferenceEngine::ExecutableNetworkThreadSafeDefault {
51 public:
52     typedef std::shared_ptr<CLDNNGraph> Ptr;
53     struct Config {
54         Config() : useProfiling(false), dumpCustomKernels(false), exclusiveAsyncRequests(false),
55             memory_pool_on(true),
56             enableDynamicBatch(false),
57             queuePriority(cldnn::priority_mode_types::disabled),
58             queueThrottle(cldnn::throttle_mode_types::disabled) {}
59
60         void LoadFromMap(const std::map<std::string, std::string>& configMap);
61
62         bool enableDynamicBatch;
63         bool useProfiling;
64         bool dumpCustomKernels;
65         bool exclusiveAsyncRequests;
66         bool memory_pool_on;
67         cldnn::priority_mode_types queuePriority;
68         cldnn::throttle_mode_types queueThrottle;
69         CLDNNCustomLayerMap customLayers;
70         cldnn::tuning_config_options tuningConfig;
71         std::string graph_dumps_dir;
72         std::string sources_dumps_dir;
73     };
74     explicit CLDNNGraph(InferenceEngine::ICNNNetwork &network, const Config& config = {}, int max_batch = -1);
75
76     InferenceEngine::InferRequestInternal::Ptr
77     CreateInferRequestImpl(InferenceEngine::InputsDataMap networkInputs, InferenceEngine::OutputsDataMap networkOutputs) override;
78
79     static bool IsLayerSupported(const std::string &type) {
80         return LayerTypeFromStr(type) != NO_TYPE;
81     }
82
83 protected:
84     // graph members
85     std::shared_ptr<cldnn::topology> m_topology;
86     InferenceEnv m_env;
87     Config m_config;
88
89     InferenceEngine::InputsDataMap*  p_currentInputs;
90     InferenceEngine::OutputsDataMap* p_currentOutputs;
91     int m_curBatch;
92     static const cldnn::primitive_id m_preProcessTag;
93     static const cldnn::primitive_id m_weightsTag;
94     static const cldnn::primitive_id m_biasesTag;
95     static const cldnn::primitive_id m_meanValuesTag;
96     static const cldnn::primitive_id m_postProcessTag;
97     static const cldnn::primitive_id m_scalesTag;
98     static const cldnn::primitive_id m_workaroundTag;
99     static const cldnn::primitive_id m_preCustomLayerTag;
100     static const cldnn::primitive_id m_postCustomLayerTag;
101
102     // internal types
103     enum LayerType {
104         Convolution,
105         ReLU,
106         ReLU6,
107         Sigmoid,
108         TanH,
109         ELU,
110         Activation,
111         LRN,
112         Pooling,
113         FullyConnected,
114         SoftMax,
115         Power,
116         Split,
117         Concatenate,
118         Eltwise,
119         SimplerNMS,
120         ROIPooling,
121         Crop,
122         Deconvolution,
123         PriorBox,
124         DetectionOutput,
125         Normalize,
126         Reshape,
127         Permute,
128         Flatten,
129         BatchNormalization,
130         PReLU,
131         ScaleShift,
132         Proposal,
133         PSROIPooling,
134         Clamp,
135         Copy,
136         Upsampling,
137         Resample,
138         RegionYolo,
139         ReorgYolo,
140         ConstantBlob,
141         ArgMax,
142         MVN,
143         Unpooling,
144         Tile,
145         Pad,
146         LSTMCell,
147         RNN,
148         NO_TYPE
149     };
150
151     enum WeightRearrangeType {
152         BroadcastFeatures,
153         FlipDeconvDims,
154         NO_REARRANGE
155     };
156
157     cldnn::format m_defaultFormat;
158     cldnn::data_types m_networkPrecision;
159     void InitFormat(InferenceEngine::ICNNNetwork &network);
160
161     static cldnn::data_types DataTypeFromPrecision(InferenceEngine::Precision p);
162     static cldnn::format     FormatFromLayout(InferenceEngine::Layout l);
163     static cldnn::upsampling_sample_type UpsamplingTypeFromString(const std::string& str);
164
165     void Load(InferenceEngine::ICNNNetwork &network);
166     static LayerType LayerTypeFromStr(const std::string& str);
167     static cldnn::pooling_mode PoolingModeFromIEPooling(InferenceEngine::PoolingLayer::PoolType pt, bool excludePadding = false);
168     static cldnn::eltwise_mode EltwiseModeFromIEEltwise(InferenceEngine::EltwiseLayer::eOperation op);
169     static cldnn::concatenation::concatenation_axis ConcatAxisFromIEAxis(unsigned axis);
170     static cldnn::prior_box_code_type PriorBoxCodeFromString(const std::string& str);
171     static cldnn::softmax::dimension_t SoftmaxDimensionFromIEAxis(const InferenceEngine::SoftMaxLayer* softmaxLayer, bool isPrevFC = false);
172     void CreatePrimitiveFromBlob(cldnn::primitive_id primID,
173                                  const InferenceEngine::Blob::Ptr pBlob,
174                                  cldnn::layout blobLayout,
175                                  size_t blobByteOffset = 0,
176                                  WeightRearrangeType rearrange = NO_REARRANGE);
177     void CreateWeightAndBiasPrimitives(const InferenceEngine::CNNLayerPtr& layer,
178                                        std::vector<cldnn::primitive_id>& weightsPrimID,
179                                        std::vector<cldnn::primitive_id>& biasesPrimID);
180     void CreateScaleWeightsAndBiasesFromBN(const InferenceEngine::BatchNormalizationLayer* bnLayer,
181                                            cldnn::primitive_id weightsPrimID,
182                                            cldnn::primitive_id biasesPrimID);
183     void AddPreProcessPrimitive(InferenceEngine::InputInfo::Ptr inputInfo);
184     void AddInputPrimitive(InferenceEngine::InputInfo::Ptr inputInfo);
185     void AddOutputPrimitive(std::string outputName, const InferenceEngine::DataPtr outputData,
186                             InferenceEngine::Precision outputPrecision = InferenceEngine::Precision::UNSPECIFIED);
187     void CreateSingleLayerPrimitive(InferenceEngine::CNNLayerPtr& layer);
188     bool IsValidSplitConvMerge(const InferenceEngine::SplitLayer* splitLayer) const;
189     bool CanProcessDynBatch(InferenceEngine::ICNNNetwork &network) const;
190     static std::vector<InferenceEngine::CNNLayerPtr> GetNextLayers(const InferenceEngine::DataPtr data);
191     static std::vector<InferenceEngine::CNNLayerPtr> GetNextLayers(const InferenceEngine::CNNLayerPtr layer);
192     static InferenceEngine::CNNLayerPtr GetNextSingleLayer(const InferenceEngine::DataPtr data);
193     static InferenceEngine::CNNLayerPtr GetNextSingleLayer(const InferenceEngine::CNNLayerPtr layer);
194     std::vector<cldnn::primitive_id> GetPrevLayersPrimitives(const InferenceEngine::CNNLayerPtr layer) const;
195     void AddSingleValuePrimitive(cldnn::primitive_id valPrimID, cldnn::data_types dataType, float value);
196
197     void CreateGenericLayerBlobPrimitives(const InferenceEngine::GenericLayer* layer);
198     static void ValidateGenericLayerBlobs(const InferenceEngine::GenericLayer* layer, const std::vector<std::string>& blobNames);
199     static cldnn::tensor CldnnTensorFromIEDims(const InferenceEngine::SizeVector& dims);
200     static bool HasParam(const std::map<std::string, std::string>& layerParams, std::string paramName) {
201         auto p = layerParams.find(paramName);
202         return p != layerParams.end();
203     }
204
205     void InitProfileInfo(const std::string& layerName,
206                          const std::string& layerType,
207                          const std::string& execType,
208                          InferenceEngine::InferenceEngineProfileInfo::LayerStatus status);
209     void changeInputBatch(size_t batch);
210     void CompileNetwork();
211
212     // Layer Primitive Creators
213     void CreatePReLUPrimitive(InferenceEngine::CNNLayerPtr &layer);
214     void CreateBatchNormalizationPrimitive(InferenceEngine::CNNLayerPtr & layer);
215     void CreateFlattenPrimitive(InferenceEngine::CNNLayerPtr &layer);
216     void CreatePermutePrimitive(InferenceEngine::CNNLayerPtr &layer);
217     void CreateReshapePrimitive(InferenceEngine::CNNLayerPtr &layer);
218     void CreateNormalizePrimitive(InferenceEngine::CNNLayerPtr &layer);
219     void CreateDetectionOutputPrimitive(InferenceEngine::CNNLayerPtr &layer);
220     void CreatePriorBoxPrimitive(InferenceEngine::CNNLayerPtr &layer);
221     void CreateDeconvolutionPrimitive(InferenceEngine::CNNLayerPtr &layer);
222     void CreateCropPrimitive(InferenceEngine::CNNLayerPtr &layer);
223     void CreateROIPoolingPrimitive(InferenceEngine::CNNLayerPtr &layer);
224     void CreateSimplerNMSPrimitive(InferenceEngine::CNNLayerPtr &layer);
225     void CreateEltwisePrimitive(InferenceEngine::CNNLayerPtr &layer);
226     void CreateConcatenatePrimitive(InferenceEngine::CNNLayerPtr &layer);
227     void CreateSplitPrimitive(InferenceEngine::CNNLayerPtr &layer);
228     void CreateFusedSplitConvMergePrimitive(InferenceEngine::CNNLayerPtr &layer);
229     void CreatePowerPrimitive(InferenceEngine::CNNLayerPtr &layer);
230     void CreateSoftMaxPrimitive(InferenceEngine::CNNLayerPtr &layer);
231     void CreateFullyConnectedPrimitive(InferenceEngine::CNNLayerPtr &layer);
232     void CreatePoolingPrimitive(InferenceEngine::CNNLayerPtr &layer);
233     void CreateLRNPrimitive(InferenceEngine::CNNLayerPtr &layer);
234     void CreateActivationPrimitive(InferenceEngine::CNNLayerPtr &layer, const LayerType type);
235     void CreateConvolutionPrimitive(InferenceEngine::CNNLayerPtr &layer);
236     void CreateScaleShiftPrimitive(InferenceEngine::CNNLayerPtr &layer);
237     void CreateProposalPrimitive(InferenceEngine::CNNLayerPtr &layer);
238     void CreatePSROIPoolingPrimitive(InferenceEngine::CNNLayerPtr &layer);
239     void CreateCopyPrimitive(InferenceEngine::CNNLayerPtr &layer);
240     void CreateUpsamplingPrimitive(InferenceEngine::CNNLayerPtr &layer);
241     void CreateResamplePrimitive(InferenceEngine::CNNLayerPtr &layer);
242     void CreateYOLO2RegionPrimitive(InferenceEngine::CNNLayerPtr &layer);
243     void CreateYOLO2ReorgPrimitive(InferenceEngine::CNNLayerPtr &layer);
244     void CreateArgMaxPrimitive(InferenceEngine::CNNLayerPtr &layer);
245     void CreateMaxUnpoolingPrimitive(InferenceEngine::CNNLayerPtr &layer);
246     void CreateMVNPrimitive(InferenceEngine::CNNLayerPtr &layer);
247     void CreateTilePrimitive(InferenceEngine::CNNLayerPtr &layer);
248     void CreatePadPrimitive(InferenceEngine::CNNLayerPtr &layer);
249     void CreateRNNPrimitive(InferenceEngine::CNNLayerPtr &layer);
250     void CreateLSTMCellPrimitive(InferenceEngine::CNNLayerPtr &layer);
251     void AddConstantBlobInput(InferenceEngine::CNNLayerPtr &layer);
252     void CreateCustomLayerPrimitive(InferenceEngine::CNNLayerPtr &layer, CLDNNCustomLayerPtr customLayer);
253 };
254
255 };  // namespace CLDNNPlugin