1 // Copyright (C) 2018-2019 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
11 #include <blob_factory.hpp>
12 #include <ie_input_info.hpp>
13 #include <ie_icnn_network.hpp>
14 #include "cpp_interfaces/interface/ie_iinfer_request_internal.hpp"
16 #include "cpp_interfaces/exception2status.hpp"
17 #include "ie_preprocess_data.hpp"
18 #include "ie_memcpy.h"
19 #include "ie_compound_blob.h"
21 namespace InferenceEngine {
23 class ExecutableNetworkInternal;
25 typedef std::shared_ptr<ExecutableNetworkInternal> ExecutableNetworkInternalPtr;
28 * @brief optional implementation of IInferRequestInternal to avoid duplication in all plugins
30 class InferRequestInternal : virtual public IInferRequestInternal {
32 typedef std::shared_ptr<InferRequestInternal> Ptr;
34 InferRequestInternal(const InputsDataMap &networkInputs, const OutputsDataMap &networkOutputs)
36 // We should copy maps in order to avoid modifications in the future.
37 for (const auto &it : networkInputs) {
38 InputInfo::Ptr newPtr;
40 newPtr.reset(new InputInfo());
41 DataPtr newData(new Data(*it.second->getInputData()));
42 newPtr->getPreProcess() = it.second->getPreProcess();
43 if (newPtr->getPreProcess().getMeanVariant() == MEAN_IMAGE) {
44 for (size_t i = 0; i < newPtr->getPreProcess().getNumberOfChannels(); i++) {
45 auto blob = newPtr->getPreProcess()[i]->meanData;
46 newPtr->getPreProcess()[i]->meanData =
47 make_blob_with_precision(newPtr->getPreProcess()[i]->meanData->getTensorDesc());
48 newPtr->getPreProcess()[i]->meanData->allocate();
49 ie_memcpy(newPtr->getPreProcess()[i]->meanData->buffer(), newPtr->getPreProcess()[i]->meanData->byteSize(),
50 blob->cbuffer(), blob->byteSize());
53 newData->getInputTo().clear();
54 newPtr->setInputData(newData);
56 _networkInputs[it.first] = newPtr;
59 for (const auto &it : networkOutputs) {
62 newData.reset(new Data(*it.second));
63 newData->getInputTo().clear();
65 _networkOutputs[it.first] = newData;
70 * @brief The minimal infer function to be implemented by plugins. It infers specified input(s) in synchronous mode
71 * @note blocks all method of IInferRequest while request is ongoing (running or waiting in queue)
73 virtual void InferImpl() = 0;
76 * @brief Default common implementation for all plugins with checking input and output blobs before inference
78 void Infer() override {
84 * @brief Given optional implementation of setting blob to avoid need for it to be implemented by plugin
85 * @param name - a name of input or output blob.
86 * @param data - a reference to input or output blob. The type of Blob must correspond to the network input precision and size.
88 void SetBlob(const char *name, const Blob::Ptr &data) override {
89 IE_PROFILING_AUTO_SCOPE(SetBlob)
90 if (name == nullptr) {
91 THROW_IE_EXCEPTION << NOT_FOUND_str + "Failed to set blob with empty name";
94 THROW_IE_EXCEPTION << NOT_ALLOCATED_str << "Failed to set empty blob with name: \'" << name << "\'";
95 const bool compoundBlobPassed = data->is<CompoundBlob>();
96 if (!compoundBlobPassed && data->buffer() == nullptr)
97 THROW_IE_EXCEPTION << "Input data was not allocated. Input name: \'" << name << "\'";
98 if (data->size() == 0) {
99 THROW_IE_EXCEPTION << "Input data is empty. Input name: \'" << name << "\'";
102 InputInfo::Ptr foundInput;
104 size_t dataSize = data->size();
105 if (findInputAndOutputBlobByName(name, foundInput, foundOutput)) {
106 if (foundInput->getPrecision() != data->getTensorDesc().getPrecision()) {
107 THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str
108 << "Failed to set Blob with precision not corresponding to user input precision";
111 const bool preProcRequired = preProcessingRequired(foundInput, data);
112 if (compoundBlobPassed && !preProcRequired) {
113 THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str
114 << "cannot set compound blob: supported only for input pre-processing";
117 if (preProcRequired) {
118 _preProcData[name] = CreatePreprocDataHelper();
119 _preProcData[name]->isApplicable(data, _inputs[name]);
120 // Stores the given blob as ROI blob. It will be used to fill in network input
121 // during pre-processing
122 _preProcData[name]->setRoiBlob(data);
124 size_t inputSize = details::product(foundInput->getTensorDesc().getDims());
125 if (dataSize != inputSize) {
126 THROW_IE_EXCEPTION << "Input blob size is not equal network input size ("
127 << dataSize << "!=" << inputSize << ").";
129 _inputs[name] = data;
132 if (compoundBlobPassed) {
133 THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str
134 << "cannot set compound blob: supported only for input pre-processing";
136 size_t outputSize = details::product(foundOutput->getDims());
137 if (dataSize != outputSize) {
138 THROW_IE_EXCEPTION << "Output blob size is not equal network output size ("
139 << dataSize << "!=" << outputSize << ").";
141 if (foundOutput->getPrecision() != data->getTensorDesc().getPrecision()) {
142 THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str
143 << "Failed to set Blob with precision not corresponding to user output precision";
145 _outputs[name] = data;
150 * @brief Given optional implementation of getting blob to avoid need for it to be implemented by plugin
151 * @param name - a name of input or output blob.
152 * @param data - a reference to input or output blob. The type of Blob must correspond to the network input precision and size.
153 * @note if ROI blob was previously set it is returned (without dimensions checks) instead of default blob.
155 void GetBlob(const char *name, Blob::Ptr &data) override {
156 IE_PROFILING_AUTO_SCOPE(GetBlob)
157 InputInfo::Ptr foundInput;
159 if (findInputAndOutputBlobByName(name, foundInput, foundOutput)) {
160 // ROI blob is returned only if it was set previously. Otherwise default blob is returned.
161 auto it = _preProcData.find(name);
162 if (it != _preProcData.end()) {
163 data = it->second->getRoiBlob();
165 data = _inputs[name];
166 checkBlob(data, name, true, foundInput->getTensorDesc().getDims());
169 data = _outputs[name];
170 checkBlob(data, name, false, foundOutput->getTensorDesc().getDims());
174 void setPointerToExecutableNetworkInternal(ExecutableNetworkInternalPtr exeNetwork) {
175 _exeNetwork = exeNetwork;
178 void checkBlobs() const {
179 for (auto const &input : _inputs) {
180 checkBlob(input.second, input.first, true);
182 for (auto const &output : _outputs) {
183 checkBlob(output.second, output.first, false);
187 void SetBatch(int batch) override {
188 THROW_IE_EXCEPTION << "Dynamic batch is not supported";
192 * @brief Checks and executes input data pre-processing if needed.
194 void execDataPreprocessing(InferenceEngine::BlobMap& inputs, bool serial = false) {
195 for (auto &input : inputs) {
196 // If there is a pre-process entry for an input then it must be pre-processed
197 // using preconfigured resize algorithm.
198 auto it = _preProcData.find(input.first);
199 if (it != _preProcData.end()) {
200 _preProcData[input.first]->execute(input.second,
201 _networkInputs[input.first]->getPreProcess(),
209 InferenceEngine::InputsDataMap _networkInputs;
210 InferenceEngine::OutputsDataMap _networkOutputs;
211 InferenceEngine::BlobMap _inputs;
212 InferenceEngine::BlobMap _outputs;
213 ExecutableNetworkInternalPtr _exeNetwork;
214 std::map<std::string, PreProcessDataPtr> _preProcData; // pre-process data per input
215 int m_curBatch; // current batch value used in dynamic batching
219 * @brief helper to find input or output blob by name
220 * @param name - a name of input or output blob.
221 * @return true - if loaded network has input with provided name,
222 * false - if loaded network has output with provided name
223 * @throws [parameter_mismatch] exception if input and output has the same name
224 * @throws [not_found] exception if there is no input and output layers with given name
226 bool findInputAndOutputBlobByName(const char *name, InputInfo::Ptr &foundInput, DataPtr &foundOutput) const {
227 foundInput = nullptr;
228 foundOutput = nullptr;
229 if (_networkInputs.empty() || _networkOutputs.empty()) {
230 THROW_IE_EXCEPTION << "Internal error: network inputs and outputs is not set";
232 auto foundInputPair = std::find_if(std::begin(_networkInputs),
233 std::end(_networkInputs),
234 [&](const std::pair<std::string, InputInfo::Ptr> &pair) {
235 return pair.first == name;
237 auto foundOutputPair = std::find_if(std::begin(_networkOutputs),
238 std::end(_networkOutputs),
239 [&](const std::pair<std::string, DataPtr> &pair) {
240 return pair.first == name;
242 if (foundOutputPair == std::end(_networkOutputs) && (foundInputPair == std::end(_networkInputs))) {
243 THROW_IE_EXCEPTION << NOT_FOUND_str << "Failed to find input or output with name: \'" << name << "\'";
245 if (foundInputPair != std::end(_networkInputs)) {
246 foundInput = foundInputPair->second;
249 foundOutput = foundOutputPair->second;
254 void checkBlob(const Blob::Ptr &blob, const std::string &name, bool isInput, const SizeVector& refDims = {}) const {
255 std::string bType = isInput ? "Input" : "Output";
256 std::string sType = isInput ? "input" : "output";
257 std::string strNotAllocated(bType + " data was not allocated.");
258 std::string strNotMatched("The " + sType + " blob size is not equal to the network " + sType + " size");
260 if (!blob) THROW_IE_EXCEPTION << strNotAllocated;
262 if (refDims.empty()) {
265 auto foundInputPair = std::find_if(std::begin(_networkInputs),
266 std::end(_networkInputs),
267 [&](const std::pair<std::string, InputInfo::Ptr>& pair) {
268 return pair.first == name;
270 if (foundInputPair == std::end(_networkInputs)) {
271 THROW_IE_EXCEPTION << NOT_FOUND_str << "Failed to find input with name: \'" << name << "\'";
273 dims = foundInputPair->second->getTensorDesc().getDims();
275 auto foundOutputPair = std::find_if(std::begin(_networkOutputs),
276 std::end(_networkOutputs),
277 [&](const std::pair<std::string, DataPtr>& pair) {
278 return pair.first == name;
280 if (foundOutputPair == std::end(_networkOutputs)) {
281 THROW_IE_EXCEPTION << NOT_FOUND_str << "Failed to find output with name: \'" << name << "\'";
283 dims = foundOutputPair->second->getTensorDesc().getDims();
285 refSize = details::product(dims);
287 refSize = details::product(refDims);
290 if (refSize != blob->size()) {
291 THROW_IE_EXCEPTION << strNotMatched + ": got " << blob->size() << " expecting " << refSize;
293 if (blob->buffer() == nullptr) THROW_IE_EXCEPTION << strNotAllocated;
297 * @brief helper to decide whether pre-processing is required
298 * @param info InputInfo corresponding to input blob
299 * @param blob input Blob object corresponding to input info
300 * @return true if pre-processing is required, false otherwise
302 bool preProcessingRequired(const InputInfo::Ptr& info, const Blob::Ptr& blob) {
303 // pre-processing is required if:
304 // 1. resize algorithm is specified (resize required)
305 // 2. color format specified:
306 // 2.a. color format is not equal to network's expected (color conversion required)
307 // 2.b. network's layout != blob's layout (reorder required)
308 const auto& preProcessInfo = info->getPreProcess();
309 const auto inputColorFormat = preProcessInfo.getColorFormat();
310 // FIXME: support other network's input formats once the API is ready. Assuming input is in
311 // the BGR format by default
312 const auto networkColorFormat = ColorFormat::BGR;
314 const bool colorFormatSpecified = inputColorFormat != ColorFormat::RAW;
315 return preProcessInfo.getResizeAlgorithm() != ResizeAlgorithm::NO_RESIZE
316 || (colorFormatSpecified && inputColorFormat != networkColorFormat)
317 || (colorFormatSpecified && info->getLayout() != blob->getTensorDesc().getLayout());
321 } // namespace InferenceEngine