1 // Copyright (C) 2018-2019 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
11 #include <blob_factory.hpp>
12 #include <ie_input_info.hpp>
13 #include <ie_icnn_network.hpp>
14 #include "cpp_interfaces/interface/ie_iinfer_request_internal.hpp"
16 #include "cpp_interfaces/exception2status.hpp"
17 #include "ie_preprocess_data.hpp"
18 #include "ie_memcpy.h"
20 namespace InferenceEngine {
22 class ExecutableNetworkInternal;
24 typedef std::shared_ptr<ExecutableNetworkInternal> ExecutableNetworkInternalPtr;
27 * @brief optional implementation of IInferRequestInternal to avoid duplication in all plugins
29 class InferRequestInternal : virtual public IInferRequestInternal {
31 typedef std::shared_ptr<InferRequestInternal> Ptr;
33 InferRequestInternal(InputsDataMap networkInputs, OutputsDataMap networkOutputs)
35 // We should copy maps in order to avoid modifications in the future.
36 for (const auto &it : networkInputs) {
37 InputInfo::Ptr newPtr;
39 newPtr.reset(new InputInfo());
40 DataPtr newData(new Data(*it.second->getInputData()));
41 newPtr->getPreProcess() = it.second->getPreProcess();
42 if (newPtr->getPreProcess().getMeanVariant() == MEAN_IMAGE) {
43 for (size_t i = 0; i < newPtr->getPreProcess().getNumberOfChannels(); i++) {
44 auto blob = newPtr->getPreProcess()[i]->meanData;
45 newPtr->getPreProcess()[i]->meanData =
46 make_blob_with_precision(newPtr->getPreProcess()[i]->meanData->getTensorDesc());
47 newPtr->getPreProcess()[i]->meanData->allocate();
48 ie_memcpy(newPtr->getPreProcess()[i]->meanData->buffer(), newPtr->getPreProcess()[i]->meanData->byteSize(),
49 blob->cbuffer(), blob->byteSize());
52 newData->inputTo.clear();
53 newPtr->setInputData(newData);
55 _networkInputs[it.first] = newPtr;
58 for (const auto &it : networkOutputs) {
61 newData.reset(new Data(*it.second));
62 newData->inputTo.clear();
64 _networkOutputs[it.first] = newData;
69 * @brief The minimal infer function to be implemented by plugins. It infers specified input(s) in synchronous mode
70 * @note blocks all method of IInferRequest while request is ongoing (running or waiting in queue)
72 virtual void InferImpl() = 0;
75 * @brief Default common implementation for all plugins with checking input and output blobs before inference
77 void Infer() override {
83 * @brief Given optional implementation of setting blob to avoid need for it to be implemented by plugin
84 * @param name - a name of input or output blob.
85 * @param data - a reference to input or output blob. The type of Blob must correspond to the network input precision and size.
87 void SetBlob(const char *name, const Blob::Ptr &data) override {
89 THROW_IE_EXCEPTION << NOT_ALLOCATED_str << "Failed to set empty blob with name: \'" << name << "\'";
90 if (data->buffer() == nullptr)
91 THROW_IE_EXCEPTION << "Input data was not allocated. Input name: \'" << name << "\'";
92 if (name == nullptr) {
93 THROW_IE_EXCEPTION << NOT_FOUND_str + "Failed to set blob with empty name";
95 InputInfo::Ptr foundInput;
97 size_t dataSize = data->size();
98 if (findInputAndOutputBlobByName(name, foundInput, foundOutput)) {
99 if (foundInput->getInputPrecision() != data->precision()) {
100 THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str
101 << "Failed to set Blob with precision not corresponding to user input precision";
104 if (foundInput->getPreProcess().getResizeAlgorithm() != ResizeAlgorithm::NO_RESIZE) {
105 PreProcessData::isApplicable(data, _inputs[name]);
106 // Stores the given blob as ROI blob. It will be used to fill in network input during pre-processing.
107 _preProcData[name].setRoiBlob(data);
109 size_t inputSize = details::product(foundInput->getDims());
110 if (dataSize != inputSize) {
111 THROW_IE_EXCEPTION << "Input blob size is not equal network input size ("
112 << dataSize << "!=" << inputSize << ").";
114 _inputs[name] = data;
117 size_t outputSize = details::product(foundOutput->getDims());
118 if (dataSize != outputSize) {
119 THROW_IE_EXCEPTION << "Output blob size is not equal network output size ("
120 << dataSize << "!=" << outputSize << ").";
122 if (foundOutput->getPrecision() != data->precision()) {
123 THROW_IE_EXCEPTION << PARAMETER_MISMATCH_str
124 << "Failed to set Blob with precision not corresponding to user output precision";
126 _outputs[name] = data;
131 * @brief Given optional implementation of getting blob to avoid need for it to be implemented by plugin
132 * @param name - a name of input or output blob.
133 * @param data - a reference to input or output blob. The type of Blob must correspond to the network input precision and size.
134 * @note if ROI blob was previously set it is returned (without dimensions checks) instead of default blob.
136 void GetBlob(const char *name, Blob::Ptr &data) override {
137 InputInfo::Ptr foundInput;
139 if (findInputAndOutputBlobByName(name, foundInput, foundOutput)) {
140 // ROI blob is returned only if it was set previously. Otherwise default blob is returned.
141 auto it = _preProcData.find(name);
142 if (it != _preProcData.end()) {
143 data = it->second.getRoiBlob();
145 data = _inputs[name];
146 checkBlob(data, name, true, foundInput->getDims());
149 data = _outputs[name];
150 checkBlob(data, name, false, foundOutput->getDims());
154 void setPointerToExecutableNetworkInternal(ExecutableNetworkInternalPtr exeNetwork) {
155 _exeNetwork = exeNetwork;
158 void checkBlobs() const {
159 for (auto const &input : _inputs) {
160 checkBlob(input.second, input.first, true);
162 for (auto const &output : _outputs) {
163 checkBlob(output.second, output.first, false);
167 void SetBatch(int batch) override {
168 THROW_IE_EXCEPTION << "Dynamic batch is not supported";
172 * @brief Checks and executes input data pre-processing if needed.
174 void execDataPreprocessing(InferenceEngine::BlobMap& inputs, bool serial = false) {
175 for (auto &input : inputs) {
176 // If there is a pre-process entry for an input then it must be pre-processed
177 // using preconfigured resize algorithm.
178 auto it = _preProcData.find(input.first);
179 if (it != _preProcData.end()) {
180 _preProcData[input.first].execute(input.second,
181 _networkInputs[input.first]->getPreProcess().getResizeAlgorithm(),
189 InferenceEngine::InputsDataMap _networkInputs;
190 InferenceEngine::OutputsDataMap _networkOutputs;
191 InferenceEngine::BlobMap _inputs;
192 InferenceEngine::BlobMap _outputs;
193 ExecutableNetworkInternalPtr _exeNetwork;
194 std::map<std::string, PreProcessData> _preProcData; // pre-process data per input
195 int m_curBatch; // current batch value used in dynamic batching
199 * @brief helper to find input or output blob by name
200 * @param name - a name of input or output blob.
201 * @return true - if loaded network has input with provided name,
202 * false - if loaded network has output with provided name
203 * @throws [parameter_mismatch] exception if input and output has the same name
204 * @throws [not_found] exception if there is no input and output layers with given name
206 bool findInputAndOutputBlobByName(const char *name, InputInfo::Ptr &foundInput, DataPtr &foundOutput) const {
207 foundInput = nullptr;
208 foundOutput = nullptr;
209 if (_networkInputs.empty() || _networkOutputs.empty()) {
210 THROW_IE_EXCEPTION << "Internal error: network inputs and outputs is not set";
212 auto foundInputPair = std::find_if(std::begin(_networkInputs),
213 std::end(_networkInputs),
214 [&](const std::pair<std::string, InputInfo::Ptr> &pair) {
215 return pair.first == name;
217 auto foundOutputPair = std::find_if(std::begin(_networkOutputs),
218 std::end(_networkOutputs),
219 [&](const std::pair<std::string, DataPtr> &pair) {
220 return pair.first == name;
222 if (foundOutputPair == std::end(_networkOutputs) && (foundInputPair == std::end(_networkInputs))) {
223 THROW_IE_EXCEPTION << NOT_FOUND_str << "Failed to find input or output with name: \'" << name << "\'";
225 if (foundInputPair != std::end(_networkInputs)) {
226 foundInput = foundInputPair->second;
229 foundOutput = foundOutputPair->second;
234 void checkBlob(const Blob::Ptr &blob, const std::string &name, bool isInput, const SizeVector& refDims = {}) const {
235 std::string bType = isInput ? "Input" : "Output";
236 std::string sType = isInput ? "input" : "output";
237 std::string strNotAllocated(bType + " data was not allocated.");
238 std::string strNotMatched("The " + sType + " blob size is not equal to the network " + sType + " size");
240 if (!blob) THROW_IE_EXCEPTION << strNotAllocated;
242 if (refDims.empty()) {
245 auto foundInputPair = std::find_if(std::begin(_networkInputs),
246 std::end(_networkInputs),
247 [&](const std::pair<std::string, InputInfo::Ptr>& pair) {
248 return pair.first == name;
250 if (foundInputPair == std::end(_networkInputs)) {
251 THROW_IE_EXCEPTION << NOT_FOUND_str << "Failed to find input with name: \'" << name << "\'";
253 dims = foundInputPair->second->getDims();
255 auto foundOutputPair = std::find_if(std::begin(_networkOutputs),
256 std::end(_networkOutputs),
257 [&](const std::pair<std::string, DataPtr>& pair) {
258 return pair.first == name;
260 if (foundOutputPair == std::end(_networkOutputs)) {
261 THROW_IE_EXCEPTION << NOT_FOUND_str << "Failed to find output with name: \'" << name << "\'";
263 dims = foundOutputPair->second->getDims();
265 refSize = details::product(dims);
267 refSize = details::product(refDims);
270 if (refSize != blob->size()) {
271 THROW_IE_EXCEPTION << strNotMatched + ": got " << blob->size() << " expecting " << refSize;
273 if (blob->buffer() == nullptr) THROW_IE_EXCEPTION << strNotAllocated;
277 } // namespace InferenceEngine