size_t size() const;
/**
- * @deprecated Use Core::AddExtension to add an extension to the library
- * @brief Registers extension within the plugin
- *
- * @param extension Pointer to already loaded reader extension with shape propagation implementations
- */
- INFERENCE_ENGINE_DEPRECATED("Use Core::AddExtension to add an extension to the library")
- void AddExtension(InferenceEngine::IShapeInferExtensionPtr extension);
-
- /**
* @brief Helper method to get collect all input shapes with names of corresponding Data objects
*
* @return Map of pairs: input name and its dimension.
static constexpr auto name = "CreateExtension";
};
-/**
- * @deprecated Implement IExtension interface. The interface will be removed in 2021.1 release.
- * @brief The SOCreatorTrait class specialization for IShapeInferExtension case, defines the name of the fabric method for
- * creating IExtension object in DLL
- */
-template <>
-class INFERENCE_ENGINE_DEPRECATED("Implement IExtension") SOCreatorTrait<IShapeInferExtension> {
-public:
- /**
- * @brief A name of the fabric method for creating an IShapeInferExtension object in DLL
- */
- static constexpr auto name = "CreateShapeInferExtension";
-};
-
} // namespace details
/**
IE_SUPPRESS_DEPRECATED_END
/**
- * @deprecated Implement ngraph::op::Op::validate_and_infer_types method in a custom ngraph operation
- * The method will be removed in 2021.1 release.
- * @brief Gets shape propagation implementation for the given string-type of CNNLayer
- *
- * @param impl the vector with implementations which is ordered by priority
- * @param type A type of CNNLayer
- * @param resp response descriptor
- * @return status code
- */
- IE_SUPPRESS_DEPRECATED_START
- INFERENCE_ENGINE_DEPRECATED("Implement ngraph::op::Op::validate_and_infer_types method in a custom ngraph operation")
- StatusCode getShapeInferImpl(IShapeInferImpl::Ptr& impl, const char* type, ResponseDesc* resp) noexcept override {
- return actual->getShapeInferImpl(impl, type, resp);
- }
- IE_SUPPRESS_DEPRECATED_END
-
- /**
- * @deprecated Implement ngraph::op::Op::validate_and_infer_types method in a custom ngraph operation
- * The method will be removed in 2021.1 release.
- * @brief Gets the array with types of layers which are included in the extension
- *
- * @param types Types array
- * @param size Size of the types array
- * @param resp Response descriptor
- * @return Status code
- */
- INFERENCE_ENGINE_DEPRECATED("Implement ngraph::op::Op::validate_and_infer_types method in a custom ngraph operation")
- StatusCode getShapeInferTypes(char**& types, unsigned int& size, ResponseDesc* resp) noexcept override {
- IE_SUPPRESS_DEPRECATED_START
- return actual->getShapeInferTypes(types, size, resp);
- IE_SUPPRESS_DEPRECATED_END
- }
-
- /**
* @brief Returns operation sets
* This method throws an exception if it was not implemented
* @return map of opset name to opset
};
/**
- * @deprecated Use a common Extension class. The interface will be removed in 2021.1 release.
- * @brief This class is a C++ helper to work with objects created using extensions.
- */
-class INFERENCE_ENGINE_DEPRECATED("Use a common Extension interface") ShapeInferExtension :
- public IShapeInferExtension {
-public:
- /**
- * @brief Loads extension from a shared library
- *
- * @param name Full or relative path to extension library
- */
- IE_SUPPRESS_DEPRECATED_START_WIN
- explicit ShapeInferExtension(const file_name_t& name): actual(name) {}
- IE_SUPPRESS_DEPRECATED_END_WIN
-
- /**
- * @brief Gets the extension version information
- *
- * @param versionInfo A pointer to version info, set by the plugin
- */
- void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept override {
- actual->GetVersion(versionInfo);
- }
-
- /**
- * @brief Cleans the resources up
- */
- void Unload() noexcept override {
- actual->Unload();
- }
-
- /**
- * @brief Does nothing since destruction is done via the regular mechanism
- */
- void Release() noexcept override {}
-
- INFERENCE_ENGINE_DEPRECATED("Implement ngraph::op::Op::validate_and_infer_types method in a custom ngraph operation")
- StatusCode getShapeInferTypes(char**& types, unsigned int& size, ResponseDesc* resp) noexcept override {
- IE_SUPPRESS_DEPRECATED_START
- return actual->getShapeInferTypes(types, size, resp);
- IE_SUPPRESS_DEPRECATED_END
- }
-
- INFERENCE_ENGINE_DEPRECATED("Implement ngraph::op::Op::validate_and_infer_types method in a custom ngraph operation")
- StatusCode getShapeInferImpl(IShapeInferImpl::Ptr& impl, const char* type, ResponseDesc* resp) noexcept override {
- IE_SUPPRESS_DEPRECATED_START
- return actual->getShapeInferImpl(impl, type, resp);
- IE_SUPPRESS_DEPRECATED_END
- }
-
-protected:
- /**
- * @brief A SOPointer instance to the loaded templated object
- */
- InferenceEngine::details::SOPointer<IShapeInferExtension> actual;
-};
-IE_SUPPRESS_DEPRECATED_END_WIN
-
-/**
- * @deprecated Use make_so_pointer with IExtension as template argument type.
- * @brief Creates a special shared_pointer wrapper for the given type from a specific shared module
- *
- * @param name Name of the shared library file
- * @return shared_pointer A wrapper for the given type from a specific shared module
- */
-template <>
-inline std::shared_ptr<IShapeInferExtension> make_so_pointer(const file_name_t& name) {
- IE_SUPPRESS_DEPRECATED_START
- return std::make_shared<ShapeInferExtension>(name);
- IE_SUPPRESS_DEPRECATED_END
-}
-
-/**
* @brief Creates a special shared_pointer wrapper for the given type from a specific shared module
*
* @param name Name of the shared library file
};
/**
- * @deprecated Use Core::AddExtension to add an extension to the library
- * @brief Registers extension within the plugin
- *
- * @param extension Pointer to already loaded reader extension with shape propagation implementations
- * @param resp Pointer to the response message that holds a description of an error if any occurred
- * @return Status code of the operation. InferenceEngine::OK if succeeded
- */
- INFERENCE_ENGINE_DEPRECATED("Use Core::AddExtension to add an extension to the library")
- virtual StatusCode AddExtension(const IShapeInferExtensionPtr& extension, ResponseDesc* resp) noexcept;
-
- /**
* @deprecated Migrate to IR v10 and use quantization approach with FakeQuantize
* @brief Gets the statistics.
* @param stats The statistics
};
/**
- * @deprecated Implement ngraph::op::Op::validate_and_infer_types method in a custom ngraph operation.
- * The interface will be removed in 2021.1 release.
- * @class IShapeInferImpl
- * @brief This class provides interface for the implementation with the custom execution code
- */
-class INFERENCE_ENGINE_DEPRECATED("Implement ngraph::op::Op::validate_and_infer_types method in a custom ngraph operation") IShapeInferImpl {
-public:
- /**
- * @brief A shared pointer to a IShapeInferImpl object
- */
- IE_SUPPRESS_DEPRECATED_START
- using Ptr = std::shared_ptr<IShapeInferImpl>;
- IE_SUPPRESS_DEPRECATED_END
-
- virtual ~IShapeInferImpl() = default;
-
- /**
- * @brief check that reshape can be applied, that parameters and shapes are valid
- */
- virtual StatusCode inferShapes(const std::vector<Blob::CPtr>& /*inBlobs*/,
- const std::map<std::string, std::string>& /*params*/,
- const std::map<std::string, Blob::Ptr>& /*blobs*/,
- std::vector<SizeVector>& /*outShapes*/, ResponseDesc* /*resp*/) noexcept {
- return NOT_IMPLEMENTED;
- } // For backward-compatibility
-};
-
-/**
- * @deprecated Implement a custom ngraph operation derived from ngraph::op::Op in IExtension implementation
- * @class IShapeInferExtension
- * @brief This class is the reader extension interface to provide implementation for shape propagation
- */
-class IShapeInferExtension : public InferenceEngine::details::IRelease {
-public:
- /**
- * @brief Gets extension version information and stores in versionInfo
- * @param versionInfo Pointer to version info, will be set by plugin
- */
- virtual void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept = 0;
-
- /**
- * @brief Cleans resources up
- */
- virtual void Unload() noexcept = 0;
-
- /**
- * @deprecated Implement ngraph::op::Op::validate_and_infer_types method in a custom ngraph operation.
- * The method will be removed in 2021.1 release.
- * @brief Fills passed array with types of layers which shape infer implementations are included in the extension
- *
- * @param types Array to store the layer types
- * @param size Size of the layer types array
- * @param resp Response descriptor
- * @return Status code
- */
- INFERENCE_ENGINE_DEPRECATED("Implement ngraph::op::Op::validate_and_infer_types method in a custom ngraph operation")
- virtual StatusCode getShapeInferTypes(char**& types, unsigned int& size, ResponseDesc* resp) noexcept = 0;
-
- /**
- * @deprecated Implement ngraph::op::Op::validate_and_infer_types method in a custom ngraph operation.
- * The method will be removed in 2021.1 release.
- * @brief Gets shape propagation implementation for the given string-type of CNNLayer
- *
- * @param impl the vector with implementations which is ordered by priority
- * @param type A type of CNNLayer
- * @param resp response descriptor
- * @return status code
- */
- IE_SUPPRESS_DEPRECATED_START
- INFERENCE_ENGINE_DEPRECATED("Implement ngraph::op::Op::validate_and_infer_types method in a custom ngraph operation")
- virtual StatusCode getShapeInferImpl(IShapeInferImpl::Ptr& impl, const char* type, ResponseDesc* resp) noexcept = 0;
- IE_SUPPRESS_DEPRECATED_END
-};
-
-IE_SUPPRESS_DEPRECATED_START_WIN
-
-/**
* @brief This class is the main extension interface
*/
-class INFERENCE_ENGINE_API_CLASS(IExtension) : public IShapeInferExtension {
+class INFERENCE_ENGINE_API_CLASS(IExtension) : public InferenceEngine::details::IRelease {
public:
/**
* @deprecated Use IExtension::getImplementation to get a concrete implementation
return NOT_IMPLEMENTED;
}
- INFERENCE_ENGINE_DEPRECATED("Implement ngraph::op::Op::validate_and_infer_types method in a custom ngraph operation")
- StatusCode getShapeInferTypes(char**&, unsigned int&, ResponseDesc*) noexcept override {
- return NOT_IMPLEMENTED;
- }
-
- INFERENCE_ENGINE_DEPRECATED("Implement ngraph::op::Op::validate_and_infer_types method in a custom ngraph operation")
- StatusCode getShapeInferImpl(IShapeInferImpl::Ptr&, const char*, ResponseDesc*) noexcept override {
- return NOT_IMPLEMENTED;
- }
-
/**
* @brief Returns operation sets
* This method throws an exception if it was not implemented
(void)implType;
return nullptr;
}
-};
-IE_SUPPRESS_DEPRECATED_END_WIN
+ /**
+ * @brief Cleans resources up
+ */
+ virtual void Unload() noexcept = 0;
+
+ /**
+ * @brief Gets extension version information and stores in versionInfo
+ * @param versionInfo Pointer to version info, will be set by plugin
+ */
+ virtual void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept = 0;
+};
/**
* @brief A shared pointer to a IExtension interface
using IExtensionPtr = std::shared_ptr<IExtension>;
/**
- * @deprecated Migrate to IR v10 and implement shape inference in the ngraph::op::Op::validate_and_infer_types method
- * This API will be removed in 2021.1 release.
- * @brief A shared pointer to a IShapeInferExtension interface
- */
-using IShapeInferExtensionPtr = std::shared_ptr<IShapeInferExtension>;
-
-/**
* @brief Creates the default instance of the extension
*
* @param ext Extension interface
*/
INFERENCE_EXTENSION_API(StatusCode) CreateExtension(IExtension*& ext, ResponseDesc* resp) noexcept;
-/**
- * @deprecated Migrate to IR v10 and implement shape inference in the ngraph::op::Op::validate_and_infer_types method
- * This API will be removed in 2021.1 release.
- * @brief Creates the default instance of the shape infer extension
- *
- * @param ext Shape Infer Extension interface
- * @param resp Response description
- * @return Status code
- */
-INFERENCE_EXTENSION_API(StatusCode)
-CreateShapeInferExtension(IShapeInferExtension*& ext, ResponseDesc* resp) noexcept;
-
} // namespace InferenceEngine
return OK;
}
-StatusCode CNNNetworkNGraphImpl::AddExtension(const InferenceEngine::IShapeInferExtensionPtr& extension,
- InferenceEngine::ResponseDesc* resp) noexcept {
- if (!cnnNetwork) {
- ::ngraph::op::GenericIE::addExtension(_ngraph_function, extension);
- }
- return cnnNetwork ? cnnNetwork->AddExtension(extension, resp) : OK;
-}
-
StatusCode CNNNetworkNGraphImpl::serialize(const std::string& xmlPath, const std::string& binPath,
ResponseDesc* resp) const noexcept {
auto network = cnnNetwork;
StatusCode reshape(const std::map<std::string, std::vector<size_t>>& inputShapes,
ResponseDesc* resp) noexcept override;
- StatusCode AddExtension(const InferenceEngine::IShapeInferExtensionPtr& extension,
- InferenceEngine::ResponseDesc* resp) noexcept override;
-
StatusCode serialize(const std::string& xmlPath, const std::string& binPath, ResponseDesc* resp) const
noexcept override;
#include <string>
#include <vector>
+#include "ie_ishape_infer_extension.hpp"
#include "cnn_network_stats_impl.hpp"
#include "description_buffer.hpp"
#include "ie_api.h"
ResponseDesc* resp) noexcept override;
StatusCode AddExtension(const InferenceEngine::IShapeInferExtensionPtr& extension,
- InferenceEngine::ResponseDesc* resp) noexcept override;
+ InferenceEngine::ResponseDesc* resp) noexcept;
StatusCode serialize(const std::string& xmlPath, const std::string& binPath, ResponseDesc* resp) const
noexcept override;
--- /dev/null
+// Copyright (C) 2018-2020 Intel Corporation
+// SPDX-License-Identifier: Apache-2.0
+//
+
+#pragma once
+
+#include <vector>
+#include <map>
+
+#include "details/ie_irelease.hpp"
+#include "ie_version.hpp"
+#include "ie_common.h"
+#include "ie_blob.h"
+
+namespace InferenceEngine {
+
+/**
+ * @class IShapeInferImpl
+ * @brief This class provides interface for the implementation with the custom execution code
+ */
+class IShapeInferImpl {
+public:
+ /**
+ * @brief A shared pointer to a IShapeInferImpl object
+ */
+ using Ptr = std::shared_ptr<IShapeInferImpl>;
+
+ virtual ~IShapeInferImpl() = default;
+
+ /**
+ * @brief check that reshape can be applied, that parameters and shapes are valid
+ */
+ virtual StatusCode inferShapes(const std::vector<Blob::CPtr>& /*inBlobs*/,
+ const std::map<std::string, std::string>& /*params*/,
+ const std::map<std::string, Blob::Ptr>& /*blobs*/,
+ std::vector<SizeVector>& /*outShapes*/, ResponseDesc* /*resp*/) noexcept {
+ return NOT_IMPLEMENTED;
+ } // For backward-compatibility
+};
+
+/**
+ * @class IShapeInferExtension
+ * @brief This class is the reader extension interface to provide implementation for shape propagation
+ */
+class IShapeInferExtension : public InferenceEngine::details::IRelease {
+public:
+ /**
+ * @brief Gets extension version information and stores in versionInfo
+ * @param versionInfo Pointer to version info, will be set by plugin
+ */
+ virtual void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept = 0;
+
+ /**
+ * @brief Cleans resources up
+ */
+ virtual void Unload() noexcept = 0;
+
+ /**
+ * The method will be removed in 2021.1 release.
+ * @brief Fills passed array with types of layers which shape infer implementations are included in the extension
+ *
+ * @param types Array to store the layer types
+ * @param size Size of the layer types array
+ * @param resp Response descriptor
+ * @return Status code
+ */
+ virtual StatusCode getShapeInferTypes(char**& types, unsigned int& size, ResponseDesc* resp) noexcept = 0;
+
+ /**
+ * @brief Gets shape propagation implementation for the given string-type of CNNLayer
+ *
+ * @param impl the vector with implementations which is ordered by priority
+ * @param type A type of CNNLayer
+ * @param resp response descriptor
+ * @return status code
+ */
+ virtual StatusCode getShapeInferImpl(IShapeInferImpl::Ptr& impl, const char* type, ResponseDesc* resp) noexcept = 0;
+};
+
+/**
+ * This API will be removed in 2021.1 release.
+ * @brief A shared pointer to a IShapeInferExtension interface
+ */
+using IShapeInferExtensionPtr = std::shared_ptr<IShapeInferExtension>;
+
+} // namespace InferenceEngine
#pragma once
-#include <ie_iextension.h>
+#include <ie_ishape_infer_extension.hpp>
#include <description_buffer.hpp>
#include <list>
return std::distance(std::begin(*this), std::end(*this));
}
-StatusCode ICNNNetwork::AddExtension(const IShapeInferExtensionPtr& extension, ResponseDesc* resp) noexcept {
- (void)extension;
- (void)resp;
- return NOT_IMPLEMENTED;
-};
-
-void CNNNetwork::AddExtension(InferenceEngine::IShapeInferExtensionPtr extension) {
- CALL_STATUS_FNC(AddExtension, extension);
-}
-
CNNLayer::CNNLayer(const LayerParams& prms)
: node(nullptr), name(prms.name), type(prms.type), precision(prms.precision), userValue({0}) {}
#include <string>
#include <vector>
+#include "ie_ishape_infer_extension.hpp"
#include "details/caseless.hpp"
#include "ie_icnn_network.hpp"
#include "ie_reshape_launcher.hpp"
return factory;
}
-IShapeInferImpl::Ptr MKLDNNExtensionManager::CreateReshaper(const InferenceEngine::CNNLayerPtr &layer) {
- if (!layer)
- THROW_IE_EXCEPTION << "Cannot get cnn layer!";
- IShapeInferImpl::Ptr reshaper = nullptr;
- for (auto& ext : _extensions) {
- ResponseDesc responseDesc;
- StatusCode rc;
- rc = ext->getShapeInferImpl(reshaper, layer->type.c_str(), &responseDesc);
- if (rc != OK) {
- reshaper = nullptr;
- continue;
- }
- if (reshaper != nullptr) {
- break;
- }
- }
- return reshaper;
-}
-
IE_SUPPRESS_DEPRECATED_END
#include <memory>
#include <ie_iextension.h>
#include <ie_layers.h>
+#include "ie_ishape_infer_extension.hpp"
namespace MKLDNNPlugin {
InferenceEngine::ILayerImpl::Ptr CreateImplementation(const std::shared_ptr<ngraph::Node>& op);
IE_SUPPRESS_DEPRECATED_START
std::shared_ptr<InferenceEngine::ILayerImplFactory> CreateExtensionFactory(const InferenceEngine::CNNLayerPtr& Layer);
- InferenceEngine::IShapeInferImpl::Ptr CreateReshaper(const InferenceEngine::CNNLayerPtr& Layer);
IE_SUPPRESS_DEPRECATED_END
void AddExtension(InferenceEngine::IExtensionPtr extension);
struct ExtensionsHolder {
std::map<std::string, ext_factory> list;
- std::map<std::string, IShapeInferImpl::Ptr> si_list;
};
class MKLDNNExtensions : public IExtension {
return OK;
}
- StatusCode getShapeInferTypes(char**& types, unsigned int& size, ResponseDesc* resp) noexcept override {
- return OK;
- }
-
- StatusCode getShapeInferImpl(IShapeInferImpl::Ptr& impl, const char* type, ResponseDesc* resp) noexcept override {
- return OK;
- }
-
void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept override {
static Version ExtensionDescription = {
{ 2, 0 }, // extension API version
bool MKLDNNGenericNode::created(const MKLDNNExtensionManager::Ptr &extMgr) {
if (getCnnLayer() && extMgr) {
- // We should save extension manager in otder to avoid situation when
+ // We should save extension manager in order to avoid situation when
// it will destroyed before extensibility primitives
if (getCnnLayer()->getNode()) {
auto impl = extMgr->CreateImplementation(getCnnLayer()->getNode());
}
if (impls.empty()) {
extFactory = extMgr->CreateExtensionFactory(getCnnLayer());
- extShapeInference = extMgr->CreateReshaper(getCnnLayer());
}
if (extFactory || !impls.empty())
}
if (isDynBatch) {
- if (extShapeInference) {
- IE_SUPPRESS_DEPRECATED_START
- auto sts = extShapeInference->inferShapes(constInputs, params, blobs, outputShapes, nullptr);
- IE_SUPPRESS_DEPRECATED_END
- if (sts != InferenceEngine::StatusCode::OK)
- isDynBatch = false;
- } else {
- isDynBatch = false;
- }
+ // TODO: use ngraph-based extension mechnism if needed to recompute shape
+ isDynBatch = false;
}
if (isDynBatch) {
#include <vector>
#include <memory>
#include <map>
+#include "ie_ishape_infer_extension.hpp"
namespace MKLDNNPlugin {
#include <string>
#include <map>
-#include <ie_iextension.h>
+#include <ie_ishape_infer_extension.hpp>
#include <ie_parameter.hpp>
#include <ie_precision.hpp>
#include "ngraph/op/op.hpp"
return InferenceEngine::OK;
}
- InferenceEngine::StatusCode getShapeInferTypes(char**& types, unsigned int& size, InferenceEngine::ResponseDesc* resp) noexcept override {
- size = 1;
- return InferenceEngine::OK;
- }
-
- InferenceEngine::StatusCode getShapeInferImpl(InferenceEngine::IShapeInferImpl::Ptr& impl, const char* type,
- InferenceEngine::ResponseDesc* resp) noexcept override {
- std::string type_str = type;
- if (type_str != "test")
- return InferenceEngine::GENERAL_ERROR;
- return InferenceEngine::OK;
- }
-
/**
* @brief Returns operation sets
* This method throws an exception if it was not implemented
return InferenceEngine::GENERAL_ERROR;
}
}
-
-// Exported function
-INFERENCE_EXTENSION_API(InferenceEngine::StatusCode) InferenceEngine::CreateShapeInferExtension(InferenceEngine::IShapeInferExtension*& ext,
- InferenceEngine::ResponseDesc* resp) noexcept {
- IExtension * pExt = nullptr;
- InferenceEngine::StatusCode result = CreateExtension(pExt, resp);
- if (result == OK) {
- ext = pExt;
- }
-
- return result;
-}
ASSERT_EQ(ngraph->get_results()[0]->get_shape(), ngraph::Shape({1, 3, 22, 22}));
}
-class CustomTestLayerImpl : public InferenceEngine::IShapeInferImpl {
-public:
- InferenceEngine::StatusCode inferShapes(const std::vector<InferenceEngine::Blob::CPtr>& inBlobs,
- const std::map<std::string, std::string>& params,
- const std::map<std::string, InferenceEngine::Blob::Ptr>& blobs,
- std::vector<InferenceEngine::SizeVector>& outShapes,
- InferenceEngine::ResponseDesc* desc) noexcept override {
- if (blobs.empty())
- return InferenceEngine::StatusCode::GENERAL_ERROR;
- for (const auto& blob : inBlobs) {
- SizeVector shape;
- for (const auto& dim : blob->getTensorDesc().getDims()) {
- shape.emplace_back(dim*2);
- }
- outShapes.push_back(shape);
- }
- return InferenceEngine::StatusCode::OK;
- }
-};
-
class CustomTestOp: public ngraph::op::Op {
public:
static constexpr ngraph::NodeTypeInfo type_info{"CustomTestLayer", 0};
class TestInPlaceExtension : public InferenceEngine::IExtension {
public:
- explicit TestInPlaceExtension(bool old = true): oldExt(old) {
- _shapeInferImpl = std::make_shared<CustomTestLayerImpl>();
- }
-
- InferenceEngine::StatusCode
- getPrimitiveTypes(char**& types, unsigned int& size, InferenceEngine::ResponseDesc* resp) noexcept override {
- if (!oldExt)
- return GENERAL_ERROR;
-
- size = 1;
- types = new char* [size];
- std::string type = "CustomTestLayer";
- types[0] = new char[type.size() + 1];
- std::copy(type.begin(), type.end(), types[0]);
- types[0][type.size()] = 0;
- return InferenceEngine::OK;
- };
-
- InferenceEngine::StatusCode
- getShapeInferTypes(char**& types, unsigned int& size, InferenceEngine::ResponseDesc* resp) noexcept override {
- return getPrimitiveTypes(types, size, resp);
- };
-
- InferenceEngine::StatusCode getShapeInferImpl(InferenceEngine::IShapeInferImpl::Ptr& impl, const char* type,
- InferenceEngine::ResponseDesc* resp) noexcept override {
- if (!oldExt)
- return GENERAL_ERROR;
- std::string typeStr = type;
- if (typeStr != "CustomTestLayer")
- return InferenceEngine::StatusCode::NOT_IMPLEMENTED;
- impl = _shapeInferImpl;
- return InferenceEngine::StatusCode::OK;
- }
+ void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept override {}
- void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept override {};
-
- void Unload() noexcept override {};
+ void Unload() noexcept override {}
void Release() noexcept override {}
- InferenceEngine::StatusCode
- getFactoryFor(InferenceEngine::ILayerImplFactory*& factory, const InferenceEngine::CNNLayer* cnnLayer,
- InferenceEngine::ResponseDesc* resp) noexcept override {
- return InferenceEngine::StatusCode::NOT_IMPLEMENTED;
- };
-
std::map<std::string, ngraph::OpSet> getOpSets() override {
static std::map<std::string, ngraph::OpSet> opsets;
- if (oldExt)
- return {};
if (opsets.empty()) {
ngraph::OpSet opset;
opset.insert<CustomTestOp>();
}
private:
- InferenceEngine::IShapeInferImpl::Ptr _shapeInferImpl;
- bool oldExt;
};
-TEST_F(NGraphReshapeTests, ReshapeOldIRWithExtension) {
- std::string model = R"V0G0N(
-<net name="Activation" version="5" precision="FP32" batch="1">
- <layers>
- <layer name="in1" type="Input" precision="FP32" id="0">
- <output>
- <port id="0">
- <dim>1</dim>
- <dim>3</dim>
- <dim>22</dim>
- <dim>22</dim>
- </port>
- </output>
- </layer>
- <layer name="activation" id="1" type="CustomTestLayer" precision="FP32">
- <input>
- <port id="1">
- <dim>1</dim>
- <dim>3</dim>
- <dim>22</dim>
- <dim>22</dim>
- </port>
- </input>
- <output>
- <port id="2">
- <dim>1</dim>
- <dim>3</dim>
- <dim>22</dim>
- <dim>22</dim>
- </port>
- </output>
- <blobs>
- <weights offset="0" size="88"/>
- </blobs>
- </layer>
- </layers>
- <edges>
- <edge from-layer="0" from-port="0" to-layer="1" to-port="1"/>
- </edges>
-</net>
-)V0G0N";
- InferenceEngine::Core ie;
- Blob::Ptr weights;
- SizeVector refBeforeReshape = {1, 3, 22, 22};
- SizeVector refAfterReshape = {4, 6, 44, 44};
-
- weights = make_shared_blob<uint8_t>(TensorDesc(Precision::U8, {88}, Layout::C));
- weights->allocate();
- fill_data(weights->buffer(), weights->size() / sizeof(float));
-
- auto network = ie.ReadNetwork(model, weights);
- InferenceEngine::ICNNNetwork::InputShapes newShapes;
- newShapes["in1"] = {2, 3, 22, 22};
- ASSERT_THROW(network.reshape(newShapes), InferenceEngine::details::InferenceEngineException);
- auto output = network.getOutputsInfo();
- SizeVector outDims = output["activation"]->getTensorDesc().getDims();
- ASSERT_EQ(outDims, refBeforeReshape);
- network.AddExtension(std::make_shared<TestInPlaceExtension>());
-
- ASSERT_NO_THROW(network.reshape(newShapes));
- output = network.getOutputsInfo();
- outDims = output["activation"]->getTensorDesc().getDims();
- ASSERT_EQ(outDims, refAfterReshape);
-}
-
-TEST_F(NGraphReshapeTests, ReshapeNewIRWithOldExtension) {
- std::string model = R"V0G0N(
-<net name="Activation" version="10">
- <layers>
- <layer name="in1" type="Parameter" id="0" version="opset1">
- <data shape="1,3,22,22" element_type="f32"/>
- <output>
- <port id="0" precision="FP32">
- <dim>1</dim>
- <dim>3</dim>
- <dim>22</dim>
- <dim>22</dim>
- </port>
- </output>
- </layer>
- <layer name="activation" id="1" type="CustomTestLayer" version="extension">
- <input>
- <port id="1" precision="FP32">
- <dim>1</dim>
- <dim>3</dim>
- <dim>22</dim>
- <dim>22</dim>
- </port>
- </input>
- <output>
- <port id="2" precision="FP32">
- <dim>1</dim>
- <dim>3</dim>
- <dim>22</dim>
- <dim>22</dim>
- </port>
- </output>
- <blobs>
- <weights offset="0" size="88"/>
- </blobs>
- </layer>
- <layer name="output" type="Result" id="2" version="opset1">
- <input>
- <port id="0" precision="FP32">
- <dim>1</dim>
- <dim>3</dim>
- <dim>22</dim>
- <dim>22</dim>
- </port>
- </input>
- </layer>
- </layers>
- <edges>
- <edge from-layer="0" from-port="0" to-layer="1" to-port="1"/>
- <edge from-layer="1" from-port="2" to-layer="2" to-port="0"/>
- </edges>
-</net>
-)V0G0N";
- InferenceEngine::Core ie;
- Blob::Ptr weights;
- SizeVector refBeforeReshape = {1, 3, 22, 22};
- SizeVector refAfterReshape = {4, 6, 44, 44};
-
- weights = make_shared_blob<uint8_t>(TensorDesc(Precision::U8, {88}, Layout::C));
- weights->allocate();
- fill_data(weights->buffer(), weights->size() / sizeof(float));
-
- auto network = ie.ReadNetwork(model, weights);
- InferenceEngine::ICNNNetwork::InputShapes newShapes;
- newShapes["in1"] = {2, 3, 22, 22};
- ASSERT_THROW(network.reshape(newShapes), InferenceEngine::details::InferenceEngineException);
- auto output = network.getOutputsInfo();
- SizeVector outDims = output["activation"]->getTensorDesc().getDims();
- ASSERT_EQ(outDims, refBeforeReshape);
- network.AddExtension(std::make_shared<TestInPlaceExtension>());
-
- ASSERT_NO_THROW(network.reshape(newShapes));
- output = network.getOutputsInfo();
- outDims = output["activation"]->getTensorDesc().getDims();
- ASSERT_EQ(outDims, refAfterReshape);
- // Convert to CNNNetwork
- auto layer = network.getLayerByName("activation");
- ASSERT_EQ("CustomTestLayer", layer->type);
-}
-
TEST_F(NGraphReshapeTests, ReshapeNewIRWithNewExtension1) {
std::string model = R"V0G0N(
<net name="Activation" version="10">
</net>
)V0G0N";
InferenceEngine::Core ie;
- ie.AddExtension(std::make_shared<TestInPlaceExtension>(false));
+ ie.AddExtension(std::make_shared<TestInPlaceExtension>());
Blob::Ptr weights;
SizeVector refBeforeReshape = {1, 3, 22, 22};
SizeVector refAfterReshape = {4, 6, 44, 44};
</net>
)V0G0N";
InferenceEngine::Core ie;
- ie.AddExtension(std::make_shared<TestInPlaceExtension>(false));
+ ie.AddExtension(std::make_shared<TestInPlaceExtension>());
Blob::Ptr weights;
SizeVector refBeforeReshape = {1, 3, 22, 22};
SizeVector refAfterReshape = {7, 10, 67, 67};
return GENERAL_ERROR;
};
- InferenceEngine::StatusCode
- getShapeInferTypes(char**& types, unsigned int& size, InferenceEngine::ResponseDesc* resp) noexcept override {
- return getPrimitiveTypes(types, size, resp);
- };
-
- InferenceEngine::StatusCode getShapeInferImpl(InferenceEngine::IShapeInferImpl::Ptr& impl, const char* type,
- InferenceEngine::ResponseDesc* resp) noexcept override {
- return InferenceEngine::StatusCode::NOT_IMPLEMENTED;
- }
-
void GetVersion(const InferenceEngine::Version*& versionInfo) const noexcept override {};
void Unload() noexcept override {};
cnn.reshape(inShape);
}
-TEST_F(NGraphReshapeTests, genericNodeWithDynShape) {
- std::shared_ptr<ngraph::Function> ngraph;
- CNNNetwork cnnNetwork;
- {
- ngraph::PartialShape shape = ngraph::PartialShape::dynamic();
- std::map<std::string, InferenceEngine::Parameter> gen_params;
- std::string typeStr = "CustomTestLayer";
- ngraph::op::GenericIE::PortIE port;
- port.precision = InferenceEngine::Precision::FP32;
- port.dims = {1, 3, 2, 2};
- std::vector<ngraph::op::GenericIE::PortIE> ports = {port};
- ngraph::element::Type type(ngraph::element::Type_t::f32);
- auto param = std::make_shared<ngraph::op::Parameter>(type, shape);
-
- ngraph::OutputVector inputs = {param};
- auto genNode = std::make_shared<ngraph::op::GenericIE>(inputs, gen_params, typeStr, ports);
- auto result = std::make_shared<ngraph::op::Result>(genNode);
-
- ngraph::ParameterVector params = {param};
- ngraph::ResultVector results = {result};
-
- std::vector<std::shared_ptr<ngraph::Node>> nodes = {genNode};
- ngraph::op::GenericIE::DisableReshape disable(nodes);
-
- ngraph = std::make_shared<ngraph::Function>(results, params);
- cnnNetwork = CNNNetwork(ngraph);
- }
-
- cnnNetwork.AddExtension(std::make_shared<TestInPlaceExtension>());
- ASSERT_NO_THROW(cnnNetwork.reshape({}));
-}
-
TEST_F(NGraphReshapeTests, ReshapeWithDefaultGenericOps) {
std::string model = R"V0G0N(
<net name="Activation" version="10">
#include "unit_test_utils/mocks/shape_infer/mock_ishape_infer_impl.hpp"
#include "unit_test_utils/mocks/shape_infer/mock_output_controller.hpp"
#include "unit_test_utils/mocks/shape_infer/mock_reshaper_launcher.hpp"
-#include "unit_test_utils/mocks/shape_infer/mock_shape_infer_extension.hpp"
MOCK_QUALIFIED_METHOD0(Release, noexcept, void());
MOCK_QUALIFIED_METHOD1(getInputShapes, const noexcept, void(InferenceEngine::ICNNNetwork::InputShapes&));
MOCK_QUALIFIED_METHOD2(reshape, noexcept, InferenceEngine::StatusCode(const InferenceEngine::ICNNNetwork::InputShapes &, InferenceEngine::ResponseDesc *));
- MOCK_QUALIFIED_METHOD2(AddExtension, noexcept, InferenceEngine::StatusCode(
- const InferenceEngine::IShapeInferExtensionPtr &,
- InferenceEngine::ResponseDesc *));
MOCK_QUALIFIED_METHOD3(serialize, const noexcept, InferenceEngine::StatusCode(
const std::string &,
const std::string &,
MOCK_QUALIFIED_METHOD0(Release, noexcept, void());
MOCK_QUALIFIED_METHOD1(getInputShapes, const noexcept, void(ICNNNetwork::InputShapes &));
MOCK_QUALIFIED_METHOD2(reshape, noexcept, StatusCode(const ICNNNetwork::InputShapes &, ResponseDesc *));
- MOCK_QUALIFIED_METHOD2(AddExtension, noexcept, StatusCode(const IShapeInferExtensionPtr &, ResponseDesc *));
MOCK_QUALIFIED_METHOD3(serialize, const noexcept, StatusCode(const std::string &, const std::string &, InferenceEngine::ResponseDesc*));
};
+++ /dev/null
-// Copyright (C) 2018-2020 Intel Corporation
-// SPDX-License-Identifier: Apache-2.0
-//
-
-#pragma once
-
-#include <memory>
-#include <gmock/gmock.h>
-
-#include <ie_extension.h>
-
-IE_SUPPRESS_DEPRECATED_START
-
-class MockShapeInferExtension : public InferenceEngine::IShapeInferExtension {
- public:
- using Ptr = std::shared_ptr<MockShapeInferExtension>;
- MOCK_QUALIFIED_METHOD1(GetVersion, const noexcept, void(const InferenceEngine::Version *&));
- MOCK_QUALIFIED_METHOD0(Release, noexcept, void());
- MOCK_QUALIFIED_METHOD0(Unload, noexcept, void());
-
- MOCK_QUALIFIED_METHOD3(getShapeInferTypes, noexcept, InferenceEngine::StatusCode
- (char**&, unsigned int&, InferenceEngine::ResponseDesc *resp));
-
- MOCK_QUALIFIED_METHOD3(getShapeInferImpl, noexcept, InferenceEngine::StatusCode
- (InferenceEngine::IShapeInferImpl::Ptr&, const char* type, InferenceEngine::ResponseDesc *resp));
-};
-
-IE_SUPPRESS_DEPRECATED_END
ASSERT_EQ(1, size);
}
-TEST(ExtensionTests, testGetShapeInferTypes) {
- IShapeInferExtensionPtr extension = make_so_pointer<IShapeInferExtension>(getExtensionPath());
- ResponseDesc resp;
- char **types;
- unsigned int size(0);
- ASSERT_EQ(OK, extension->getShapeInferTypes(types, size, &resp));
- ASSERT_EQ(1, size);
-}
-
-TEST(ExtensionTests, testGetShapeInferImpl) {
- IShapeInferExtensionPtr extension = make_so_pointer<IShapeInferExtension>(getExtensionPath());
- IShapeInferImpl::Ptr impl;
- ResponseDesc resp;
- ASSERT_EQ(OK, extension->getShapeInferImpl(impl, "test", &resp));
-}
-
-TEST(ExtensionTests, testGetIncorrectShapeInferImpl) {
- IShapeInferExtensionPtr extension = make_so_pointer<IShapeInferExtension>(getExtensionPath());
- CNNLayer testLayer({"test1", "test", Precision::FP32});
- IShapeInferImpl::Ptr impl;
- ResponseDesc resp;
- ASSERT_NE(OK, extension->getShapeInferImpl(impl, "test_incorrect", &resp));
-}
-
TEST(ExtensionTests, testGetOpSets) {
IExtensionPtr extension = make_so_pointer<IExtension>(getExtensionPath());
auto opsets = extension->getOpSets();
factory = factories[cnnLayer->type](cnnLayer);
return InferenceEngine::OK;
}
-
- StatusCode getShapeInferImpl(IShapeInferImpl::Ptr& impl, const char* type, ResponseDesc* resp) noexcept override {
- return NOT_IMPLEMENTED;
- }
private:
std::map<std::string, ext_factory> factories;
};
return InferenceEngine::OK;
}
- InferenceEngine::StatusCode getShapeInferImpl(InferenceEngine::IShapeInferImpl::Ptr& impl, const char* type,
- InferenceEngine::ResponseDesc* resp) noexcept override {
- return InferenceEngine::NOT_IMPLEMENTED;
- }
-
private:
std::map<std::string, fake_ext_factory> factories;
};
factory = factories[cnnLayer->type](cnnLayer);
return OK;
}
- StatusCode getShapeInferTypes(char **&types, unsigned int &size, ResponseDesc *resp) noexcept override {
- collectTypes(types, size, GetExtensionsHolder()->si_list);
- return OK;
- };
-
- StatusCode getShapeInferImpl(IShapeInferImpl::Ptr &impl, const char *type, ResponseDesc *resp) noexcept override {
- auto &factories = GetExtensionsHolder()->si_list;
- if (factories.find(type) == factories.end()) {
- std::string errorMsg = std::string("Shape Infer Implementation for ") + type + " wasn't found!";
- if (resp) errorMsg.copy(resp->msg, sizeof(resp->msg) - 1);
- return NOT_FOUND;
- }
- impl = factories[type];
- return OK;
- }
template<class T>
void collectTypes(char **&types, unsigned int &size, const std::map<std::string, T> &factories) {
return InferenceEngine::OK;
}
- InferenceEngine::StatusCode getShapeInferImpl(InferenceEngine::IShapeInferImpl::Ptr& impl, const char* type,
- InferenceEngine::ResponseDesc* resp) noexcept override {
- return InferenceEngine::NOT_IMPLEMENTED;
- }
-
private:
std::map<std::string, fake_ext_factory> factories;
};
return InferenceEngine::OK;
}
- InferenceEngine::StatusCode getShapeInferImpl(InferenceEngine::IShapeInferImpl::Ptr& impl, const char* type,
- InferenceEngine::ResponseDesc* resp) noexcept override {
- return InferenceEngine::NOT_IMPLEMENTED;
- }
-
private:
std::map<std::string, fake_ext_factory> factories;
};