1 // Copyright (C) 2018-2020 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
6 * @brief This is a header file for the Inference Engine Core class C++ API
17 #include "cpp/ie_plugin_cpp.hpp"
18 #include "details/os/os_filesystem.hpp"
19 #include "ie_extension.h"
20 #include "ie_remote_context.hpp"
22 namespace InferenceEngine {
25 * @brief This class represents Inference Engine Core entity.
27 * It can throw exceptions safely for the application, where it is properly handled.
29 class INFERENCE_ENGINE_API_CLASS(Core) {
31 std::shared_ptr<Impl> _impl;
34 /** @brief Constructs Inference Engine Core instance using XML configuration file with
35 * plugins description.
37 * See RegisterPlugins for more details.
39 * @param xmlConfigFile A path to .xml file with plugins to load from. If XML configuration file is not specified,
40 * then default Inference Engine plugins are loaded from the default plugin.xml file.
42 explicit Core(const std::string& xmlConfigFile = std::string());
45 * @brief Returns plugins version information
47 * @param deviceName Device name to indentify plugin
48 * @return A vector of versions
50 std::map<std::string, Version> GetVersions(const std::string& deviceName) const;
53 * @brief Sets logging callback
55 * Logging is used to track what is going on inside the plugins, Inference Engine library
57 * @param listener Logging sink
59 void SetLogCallback(IErrorListener& listener) const;
61 #ifdef ENABLE_UNICODE_PATH_SUPPORT
63 * @brief Reads IR xml and bin files
64 * @param modelPath path to IR file
65 * @param binPath path to bin file, if path is empty, will try to read bin file with the same name as xml and
66 * if bin file with the same name was not found, will load IR without weights.
69 CNNNetwork ReadNetwork(const std::wstring& modelPath, const std::wstring& binPath = std::wstring()) const {
70 return ReadNetwork(details::wStringtoMBCSstringChar(modelPath), details::wStringtoMBCSstringChar(binPath));
75 * @brief Reads IR xml and bin files
76 * @param modelPath path to IR file
77 * @param binPath path to bin file, if path is empty, will try to read bin file with the same name as xml and
78 * if bin file with the same name was not found, will load IR without weights.
81 CNNNetwork ReadNetwork(const std::string& modelPath, const std::string& binPath = "") const;
83 * @brief Reads IR xml and bin (with the same name) files
84 * @param model string with IR
85 * @param weights shared pointer to constant blob with weights
88 CNNNetwork ReadNetwork(const std::string& model, const Blob::CPtr& weights) const;
91 * @brief Creates an executable network from a network object.
93 * Users can create as many networks as they need and use
94 * them simultaneously (up to the limitation of the hardware resources)
96 * @param network CNNNetwork object acquired from CNNNetReader
97 * @param deviceName Name of device to load network to
98 * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
100 * @return An executable network reference
102 ExecutableNetwork LoadNetwork(
103 CNNNetwork network, const std::string& deviceName,
104 const std::map<std::string, std::string>& config = std::map<std::string, std::string>());
107 * @brief Registers extension
108 * @param extension Pointer to already loaded extension
110 void AddExtension(const IExtensionPtr& extension);
113 * @brief Creates an executable network from a network object within a specified remote context.
114 * @param network CNNNetwork object acquired from CNNNetReader
115 * @param context Pointer to RemoteContext object
116 * @param deviceName Name of device to load network to
117 * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
119 * @return An executable network reference
121 ExecutableNetwork LoadNetwork(
122 CNNNetwork network, RemoteContext::Ptr context,
123 const std::map<std::string, std::string>& config = std::map<std::string, std::string>());
126 * @brief Registers extension for the specified plugin
128 * @param extension Pointer to already loaded extension
129 * @param deviceName Device name to identify plugin to add an executable extension
131 void AddExtension(IExtensionPtr extension, const std::string& deviceName);
134 * @brief Creates an executable network from a previously exported network
136 * @param deviceName Name of device load executable network on
137 * @param modelFileName Path to the location of the exported file
138 * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
140 * @return An executable network reference
142 ExecutableNetwork ImportNetwork(
143 const std::string& modelFileName, const std::string& deviceName,
144 const std::map<std::string, std::string>& config = std::map<std::string, std::string>());
147 * @brief Creates an executable network from a previously exported network
148 * @param deviceName Name of device load executable network on
149 * @param networkModel network model stream
150 * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
152 * @return An executable network reference
154 ExecutableNetwork ImportNetwork(std::istream& networkModel, const std::string& deviceName = {},
155 const std::map<std::string, std::string>& config = {});
158 * @brief Query device if it supports specified network with specified configuration
160 * @param deviceName A name of a device to query
161 * @param network Network object to query
162 * @param config Optional map of pairs: (config parameter name, config parameter value)
163 * @return Pointer to the response message that holds a description of an error if any occurred
165 QueryNetworkResult QueryNetwork(
166 const ICNNNetwork& network, const std::string& deviceName,
167 const std::map<std::string, std::string>& config = std::map<std::string, std::string>()) const;
170 * @brief Sets configuration for device, acceptable keys can be found in ie_plugin_config.hpp
172 * @param deviceName An optinal name of a device. If device name is not specified, the config is set for all the
173 * registered devices.
175 * @param config Map of pairs: (config parameter name, config parameter value)
177 void SetConfig(const std::map<std::string, std::string>& config, const std::string& deviceName = std::string());
180 * @brief Gets configuration dedicated to device behaviour.
182 * The method is targeted to extract information which can be set via SetConfig method.
184 * @param deviceName - A name of a device to get a configuration value.
185 * @param name - value of config corresponding to config key.
186 * @return Value of config corresponding to config key.
188 Parameter GetConfig(const std::string& deviceName, const std::string& name) const;
191 * @brief Gets general runtime metric for dedicated hardware.
193 * The method is needed to request common device properties
194 * which are executable network agnostic. It can be device name, temperature, other devices-specific values.
196 * @param deviceName - A name of a device to get a metric value.
197 * @param name - metric name to request.
198 * @return Metric value corresponding to metric key.
200 Parameter GetMetric(const std::string& deviceName, const std::string& name) const;
203 * @brief Returns devices available for neural networks inference
205 * @return A vector of devices. The devices are returned as { CPU, FPGA.0, FPGA.1, MYRIAD }
206 If there more than one device of specific type, they are enumerated with .# suffix.
208 std::vector<std::string> GetAvailableDevices() const;
211 * @brief Register new device and plugin which implement this device inside Inference Engine.
213 * @param pluginName A name of plugin. Depending on platform pluginName is wrapped with shared library suffix and
214 * prefix to identify library full name
216 * @param deviceName A device name to register plugin for. If device name is not specified, then it's taken from
217 * plugin using InferenceEnginePluginPtr::GetName function
219 void RegisterPlugin(const std::string& pluginName, const std::string& deviceName);
222 * @brief Unloads previously loaded plugin with a specified name from Inference Engine
223 * The method is needed to remove plugin instance and free its resources. If plugin for a
224 * specified device has not been created before, the method throws an exception.
226 * @param deviceName Device name identifying plugin to remove from Inference Engine
228 void UnregisterPlugin(const std::string& deviceName);
230 /** @brief Registers plugin to Inference Engine Core instance using XML configuration file with
231 * plugins description.
233 * XML file has the following structure:
238 * <plugin name="" location="">
240 * <extension location=""/>
243 * <property key="" value=""/>
250 * - `name` identifies name of device enabled by plugin
251 * - `location` specifies absolute path to dynamic library with plugin. A path can also be relative to inference
252 * engine shared library. It allows to have common config for different systems with different configurations.
253 * - Properties are set to plugin via the `SetConfig` method.
254 * - Extensions are set to plugin via the `AddExtension` method.
256 * @param xmlConfigFile A path to .xml file with plugins to register.
258 void RegisterPlugins(const std::string& xmlConfigFile);
261 * @brief Create a new shared context object on specified accelerator device
262 * using specified plugin-specific low level device API parameters (device handle, pointer, etc.)
263 * @param deviceName Name of a device to create new shared context on.
264 * @param params Map of device-specific shared context parameters.
266 RemoteContext::Ptr CreateContext(const std::string& deviceName, const ParamMap& params);
269 * @brief Get a pointer to default(plugin-supplied) shared context object for specified accelerator device.
270 * @param deviceName - A name of a device to get create shared context from.
272 RemoteContext::Ptr GetDefaultContext(const std::string& deviceName);
274 } // namespace InferenceEngine