1 // Copyright (C) 2018 Intel Corporation
3 // SPDX-License-Identifier: Apache-2.0
7 * @brief This is a header file for Inference Engine Extension Interface
8 * @file ie_iextension.h
13 #include "ie_device.hpp"
14 #include "ie_layers.h"
15 #include "ie_error.hpp"
16 #include "ie_version.hpp"
22 #include "details/ie_no_copy.hpp"
26 #if defined(_WIN32) && defined(IMPLEMENT_INFERENCE_EXTENSION_API)
27 #define INFERENCE_EXTENSION_API(TYPE) extern "C" __declspec(dllexport) TYPE
29 #define INFERENCE_EXTENSION_API(TYPE) INFERENCE_ENGINE_API(TYPE)
33 namespace InferenceEngine {
37 * @brief This structure describes data configuration
41 * @brief Format of memory descriptor
45 * @brief Index of in-place memory. If -1 memory cannot be in-place
49 * @brief Flag for determination of the constant memory. If layer contains all constant memory we can calculate it on the load stage.
51 bool constant = false;
56 * @brief This structure describes Layer configuration
60 * @brief Supported dynamic batch. If false, dynamic batch is not supported
62 bool dynBatchSupport = false;
64 * @brief Vector of input data configs
66 std::vector<DataConfig> inConfs;
68 * @brief Vector of output data configs
70 std::vector<DataConfig> outConfs;
74 * @brief This class provides interface for extension implementations
78 using Ptr = std::shared_ptr<ILayerImpl>;
83 virtual ~ILayerImpl() = default;
86 * @brief Gets all supported configurations for the current layer
87 * @param conf Vector with supported configurations
88 * @param resp Response descriptor
91 virtual StatusCode getSupportedConfigurations(std::vector<LayerConfig>& conf, ResponseDesc* resp) noexcept = 0;
94 * @brief Initializes the implementation
95 * @param config Selected supported configuration
96 * @param resp Response descriptor
99 virtual StatusCode init(LayerConfig& config, ResponseDesc* resp) noexcept = 0;
103 * @brief This class provides interface for the implementation with the custom execution code
105 class ILayerExecImpl : public ILayerImpl {
108 * @brief Execute method
109 * @param inputs Vector of blobs with input memory
110 * @param outputs Vector of blobs with output memory
111 * @param resp Response descriptor
112 * @return Status code
114 virtual StatusCode execute(std::vector<Blob::Ptr>& inputs,
115 std::vector<Blob::Ptr>& outputs, ResponseDesc* resp) noexcept = 0;
119 * @brief This class provides interface for extension factories
121 class ILayerImplFactory {
123 using Ptr = std::shared_ptr<ILayerImplFactory>;
124 using ImplCreator = std::function<ILayerImpl*()>;
129 virtual ~ILayerImplFactory() = default;
133 * @brief Sets output shapes by input shapes.
134 * @param inShapes Shapes of all inputs coming in this layer
135 * @param outShapes Generated shapes coming from this layer given the input
136 * @param resp Response descriptor
137 * @return Status code
139 virtual StatusCode getShapes(const std::vector<TensorDesc>& inShapes, std::vector<TensorDesc>& outShapes,
140 ResponseDesc* resp) noexcept = 0;
143 * @brief Gets all possible implementations for the given cnn Layer
144 * @param impls the vector with implementations which is ordered by priority
145 * @param resp response descriptor
146 * @return status code
148 virtual StatusCode getImplementations(std::vector<ILayerImpl::Ptr>& impls, ResponseDesc* resp) noexcept = 0;
152 * @class IShapeInferImpl
153 * @brief This class provides interface for the implementation with the custom execution code
155 class IShapeInferImpl {
157 using Ptr = std::shared_ptr<IShapeInferImpl>;
160 * @brief check that reshape can be applied, that parameters and shapes are valid
162 virtual StatusCode inferShapes(const std::vector<SizeVector>& inShapes,
163 const std::map<std::string, std::string>& params,
164 const std::map<std::string, Blob::Ptr>& blobs,
165 std::vector<SizeVector>& outShapes,
166 ResponseDesc* resp) noexcept = 0;
170 * @class IShapeInferExtension
171 * @brief This class is the reader extension interface to provide implementation for shape propagation
173 class IShapeInferExtension : public InferenceEngine::details::IRelease {
176 * @brief Sets logging callback.
177 * Logging is used to track what is going on inside.
178 * @param listener Logging sink
180 virtual void SetLogCallback(InferenceEngine::IErrorListener& listener) noexcept = 0;
183 * @brief Gets extension version information and stores in versionInfo
184 * @param versionInfo Pointer to version info, will be set by plugin
186 virtual void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept = 0;
189 * @brief Cleans resources up
191 virtual void Unload() noexcept = 0;
194 * @brief Gets the array with types of layers which are included in the extension
195 * @param types Array to store the layer types
196 * @param size Size of the layer types array
197 * @param resp Response descriptor
198 * @return Status code
200 virtual StatusCode getPrimitiveTypes(char**& types, unsigned int& size, ResponseDesc* resp) noexcept = 0;
203 * @brief Gets shape propagation implementation for the given string-type of cnn Layer
204 * @param impl the vector with implementations which is ordered by priority
205 * @param resp response descriptor
206 * @return status code
208 virtual StatusCode getShapeInferImpl(IShapeInferImpl::Ptr& impl,
210 ResponseDesc* resp) noexcept = 0;
214 * @brief This class is the main extension interface
216 class IExtension : public IShapeInferExtension {
218 virtual StatusCode getFactoryFor(ILayerImplFactory*& factory, const CNNLayer* cnnLayer,
219 ResponseDesc* resp) noexcept = 0;
221 StatusCode getShapeInferImpl(IShapeInferImpl::Ptr& impl,
223 ResponseDesc* resp) noexcept override {
224 return NOT_IMPLEMENTED;
228 using IExtensionPtr = std::shared_ptr<IExtension>;
229 using IShapeInferExtensionPtr = std::shared_ptr<IShapeInferExtension>;
232 * @brief Creates the default instance of the extension
233 * @param ext Extension interface
234 * @param resp Response description
235 * @return Status code
237 INFERENCE_EXTENSION_API(StatusCode) CreateExtension(IExtension*& ext, ResponseDesc* resp) noexcept;
240 * @brief Creates the default instance of the shape infer extension
241 * @param ext Shape Infer Extension interface
242 * @param resp Response description
243 * @return Status code
245 INFERENCE_EXTENSION_API(StatusCode) CreateShapeInferExtension(IShapeInferExtension*& ext, ResponseDesc* resp) noexcept;
248 } // namespace InferenceEngine