1 // Copyright (C) 2018-2020 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
6 * @brief a header file for IExecutableNetwork interface
8 * @file ie_iexecutable_network.hpp
17 #include "ie_common.h"
18 #include "ie_icnn_network.hpp"
19 #include "ie_iinfer_request.hpp"
20 #include "ie_imemory_state.hpp"
21 #include "ie_input_info.hpp"
22 #include "ie_parameter.hpp"
23 #include "ie_primitive_info.hpp"
24 #include "ie_remote_context.hpp"
26 namespace InferenceEngine {
29 * @brief A collection that contains string as key, and const Data smart pointer as value
31 using ConstOutputsDataMap = std::map<std::string, CDataPtr>;
34 * @brief This is an interface of an executable network
36 class IExecutableNetwork : public details::IRelease {
39 * @brief A smart pointer to the current IExecutableNetwork object
41 using Ptr = std::shared_ptr<IExecutableNetwork>;
44 * @brief Gets the Executable network output Data node information.
46 * The received info is stored in the given ::ConstOutputsDataMap node.
47 * This method need to be called to find output names for using them later during filling of a map
48 * of blobs passed to InferenceEngine::IInferencePlugin::Infer()
50 * @param out Reference to the ::ConstOutputsDataMap object
51 * @param resp Optional: pointer to an already allocated object to contain information in case of failure
52 * @return Status code of the operation: InferenceEngine::OK (0) for success
54 virtual StatusCode GetOutputsInfo(ConstOutputsDataMap& out, ResponseDesc* resp) const noexcept = 0;
57 * @brief Gets the executable network input Data node information.
59 * The received info is stored in the given ::ConstInputsDataMap object.
60 * This method need to be called to find out input names for using them later during filling of a map
61 * of blobs passed to InferenceEngine::IInferencePlugin::Infer()
63 * @param inputs Reference to ::ConstInputsDataMap object.
64 * @param resp Optional: pointer to an already allocated object to contain information in case of failure
65 * @return Status code of the operation: InferenceEngine::OK (0) for success
67 virtual StatusCode GetInputsInfo(ConstInputsDataMap& inputs, ResponseDesc* resp) const noexcept = 0;
70 * @brief Creates an inference request object used to infer the network.
72 * The created request has allocated input and output blobs (that can be changed later).
74 * @param req Shared pointer to the created request object
75 * @param resp Optional: pointer to an already allocated object to contain information in case of failure
76 * @return Status code of the operation: InferenceEngine::OK (0) for success
78 virtual StatusCode CreateInferRequest(IInferRequest::Ptr& req, ResponseDesc* resp) noexcept = 0;
81 * @brief Exports the current executable network.
83 * @see Core::ImportNetwork
84 * @see IInferencePlugin::ImportNetwork
86 * @param modelFileName Full path to the location of the exported file
87 * @param resp Optional: pointer to an already allocated object to contain information in case of failure
88 * @return Status code of the operation: InferenceEngine::OK (0) for success
90 virtual StatusCode Export(const std::string& modelFileName, ResponseDesc* resp) noexcept = 0;
93 * @brief Exports the current executable network.
95 * @see Core::ImportNetwork
96 * @see IInferencePlugin::ImportNetwork
98 * @param networkModel Network model output stream
99 * @param resp Optional: pointer to an already allocated object to contain information in case of failure
100 * @return Status code of the operation: InferenceEngine::OK (0) for success
102 virtual StatusCode Export(std::ostream& networkModel, ResponseDesc* resp) noexcept = 0;
105 * @brief Get the mapping of IR layer names to implemented kernels
107 * @param deployedTopology Map of PrimitiveInfo objects that represent the deployed topology
108 * @param resp Optional: pointer to an already allocated object to contain information in case of failure
109 * @return Status code of the operation: InferenceEngine::OK (0) for success
111 virtual StatusCode GetMappedTopology(std::map<std::string, std::vector<PrimitiveInfo::Ptr>>& deployedTopology,
112 ResponseDesc* resp) noexcept = 0;
115 * @brief Get executable graph information from a device
117 * @param graphPtr network ptr to store executable graph information
118 * @param resp Optional: pointer to an already allocated object to contain information in case of failure
119 * @return Status code of the operation: InferenceEngine::OK (0) for success
121 virtual StatusCode GetExecGraphInfo(ICNNNetwork::Ptr& graphPtr, ResponseDesc* resp) noexcept = 0;
124 * @brief Gets state control interface for given executable network.
126 * State control essential for recurrent networks
128 * @param pState reference to a pointer that receives internal states
129 * @param idx requested index for receiving memory state
130 * @param resp Optional: pointer to an already allocated object to contain information in case of failure
131 * @return Status code of the operation: InferenceEngine::OK (0) for success, OUT_OF_BOUNDS (-6) no memory state for
134 virtual StatusCode QueryState(IMemoryState::Ptr& pState, size_t idx, ResponseDesc* resp) noexcept = 0;
137 * @brief Sets configuration for current executable network
139 * @param config Map of pairs: (config parameter name, config parameter value)
140 * @param resp Pointer to the response message that holds a description of an error if any occurred
141 * @return code of the operation. InferenceEngine::OK if succeeded
143 virtual StatusCode SetConfig(const std::map<std::string, Parameter>& config, ResponseDesc* resp) noexcept = 0;
145 /** @brief Gets configuration for current executable network.
147 * The method is responsible to extract information
148 * which affects executable network execution. The list of supported configuration values can be extracted via
149 * ExecutableNetwork::GetMetric with the SUPPORTED_CONFIG_KEYS key, but some of these keys cannot be changed
150 * dymanically, e.g. DEVICE_ID cannot changed if an executable network has already been compiled for particular
153 * @param name config key, can be found in ie_plugin_config.hpp
154 * @param result value of config corresponding to config key
155 * @param resp Pointer to the response message that holds a description of an error if any occurred
156 * @return code of the operation. InferenceEngine::OK if succeeded
158 virtual StatusCode GetConfig(const std::string& name, Parameter& result, ResponseDesc* resp) const noexcept = 0;
161 * @brief Gets general runtime metric for an executable network.
163 * It can be network name, actual device ID on
164 * which executable network is running or all other properties which cannot be changed dynamically.
166 * @param name metric name to request
167 * @param result metric value corresponding to metric key
168 * @param resp Pointer to the response message that holds a description of an error if any occurred
169 * @return code of the operation. InferenceEngine::OK if succeeded
171 virtual StatusCode GetMetric(const std::string& name, Parameter& result, ResponseDesc* resp) const noexcept = 0;
174 * @brief Gets shared context used to create an executable network.
176 * @param name metric name to request
177 * @param pContext Refernce to a pointer that will receive resulting shared context object ptr
178 * @param resp Pointer to the response message that holds a description of an error if any occurred
179 * @return code of the operation. InferenceEngine::OK if succeeded
181 virtual StatusCode GetContext(RemoteContext::Ptr& pContext, ResponseDesc* resp) const noexcept = 0;
184 } // namespace InferenceEngine