ie_add_compiler_flags(-ffunction-sections -fdata-sections)
ie_add_compiler_flags(-fdiagnostics-show-option)
ie_add_compiler_flags(-Wundef)
+ ie_add_compiler_flags(-Wreturn-type)
# Disable noisy warnings
#include <ie_plugin_config.hpp>
#include <hetero/hetero_plugin_config.hpp>
-#include <cpp_interfaces/base/ie_plugin_base.hpp>
#include <threading/ie_executor_manager.hpp>
#include <ngraph/op/util/op_types.hpp>
namespace InferenceEngine {
namespace details {
-inline void extract_exception(StatusCode status, char* msg) {
+inline void extract_exception(StatusCode status, const char* msg) {
switch (status) {
case NOT_IMPLEMENTED:
throw NotImplemented(msg);
#include <cpp/ie_cnn_network.h>
#include <description_buffer.hpp>
#include <memory>
-#include <cpp_interfaces/base/ie_plugin_base.hpp>
#include "ie_plugin_config.hpp"
#include "caseless.hpp"
#include <legacy/details/ie_cnn_network_tools.h>
release_lock();
}
-CLDNNExecutionContextImpl::CLDNNExecutionContextImpl(const std::shared_ptr<InferencePluginInternal> plugin,
+CLDNNExecutionContextImpl::CLDNNExecutionContextImpl(const std::shared_ptr<IInferencePlugin> plugin,
const ParamMap& params,
const Config& config) :
m_plugin(plugin),
using Ptr = std::shared_ptr<CLDNNExecutionContextImpl>;
using CPtr = std::shared_ptr<const CLDNNExecutionContextImpl>;
- explicit CLDNNExecutionContextImpl(std::shared_ptr<InferencePluginInternal> plugin,
+ explicit CLDNNExecutionContextImpl(std::shared_ptr<IInferencePlugin> plugin,
const ParamMap& params,
const Config& config = {});
std::shared_ptr<cldnn::engine> GetEngine() const { return m_engine; }
Config& GetConfig() { return m_config; }
ContextType GetType() const { return m_type; }
- const std::weak_ptr<InferencePluginInternal> GetPlugin() const { return m_plugin; }
+ const std::weak_ptr<IInferencePlugin> GetPlugin() const { return m_plugin; }
void acquire_lock() {
while (lock.test_and_set(std::memory_order_acquire)) {}
Config m_config;
ContextType m_type;
- std::weak_ptr<InferencePluginInternal> m_plugin;
+ std::weak_ptr<IInferencePlugin> m_plugin;
std::atomic_flag lock;
};
using Ptr = std::shared_ptr<typedCLDNNExecutionContext>;
using CPtr = std::shared_ptr<const typedCLDNNExecutionContext>;
- explicit typedCLDNNExecutionContext(std::shared_ptr<InferencePluginInternal> plugin,
+ explicit typedCLDNNExecutionContext(std::shared_ptr<IInferencePlugin> plugin,
const ParamMap& params,
const Config& config = {})
: _impl(plugin, params, config) {}
#include <ie_common.h>
#include "descriptions/gna_input_desc.hpp"
#include "descriptions/gna_flags.hpp"
-#include "cpp_interfaces/base/ie_plugin_base.hpp"
#include "connection_details.hpp"
#include "backend/dnn.hpp"
#include "memory/polymorph_allocator.hpp"
#include <limits>
#include <legacy/graph_tools.hpp>
+#include <cpp_interfaces/exception2status.hpp>
#include <legacy/net_pass.h>
#include <debug.h>
#include <gna/gna_config.hpp>
#endif
namespace GNAPluginNS {
-class GNAPlugin : public InferenceEngine::IInferencePluginInternal, public std::enable_shared_from_this<GNAPlugin> {
+class GNAPlugin : public InferenceEngine::IInferencePlugin {
protected:
std::string _pluginName = "GNA";
//
#include <memory>
-#include <cpp_interfaces/base/ie_plugin_base.hpp>
#include "gna_plugin_internal.hpp"
using namespace InferenceEngine;
#include <vector>
-#include "ie_layers.h"
-#include "cpp_interfaces/base/ie_plugin_base.hpp"
+#include <legacy/ie_layers.h>
namespace GNAPluginNS {
// Split, Slice
#include <unordered_set>
#include "ie_plugin_config.hpp"
#include "hetero/hetero_plugin_config.hpp"
-#include <cpp_interfaces/base/ie_plugin_base.hpp>
#include "hetero_executable_network.hpp"
using namespace InferenceEngine;
#include <multi-device/multi_device_config.hpp>
#include <ngraph/opsets/opset.hpp>
+#include <cpp_interfaces/exception2status.hpp>
#include "ie_plugin_cpp.hpp"
#include "ie_plugin_config.hpp"
#include "ie_itt.hpp"
namespace InferenceEngine {
-IInferencePlugin::~IInferencePlugin() {}
-
namespace {
template <typename T>
return std::move(value);
}
+template <typename F>
+void allowNotImplemented(F && f) {
+ try {
+ f();
+ } catch (const details::InferenceEngineException & ex) {
+ std::string message = ex.what();
+ if (message.find(NOT_IMPLEMENTED_str) == std::string::npos) {
+ throw ex;
+ }
+ }
+}
+
} // namespace
DeviceIDParser::DeviceIDParser(const std::string& deviceNameWithID) {
PluginDescriptor desc = it->second;
try {
- InferenceEnginePluginPtr plugin(desc.libraryLocation);
+ InferencePlugin plugin(desc.libraryLocation);
{
- plugin->SetName(deviceName);
+ plugin.SetName(deviceName);
// Set Inference Engine class reference to plugins
ICore* mutableCore = const_cast<ICore*>(static_cast<const ICore*>(this));
- plugin->SetCore(mutableCore);
+ plugin.SetCore(mutableCore);
}
// Add registered extensions to new plugin
- for (const auto& ext : extensions) {
- plugin->AddExtension(ext, nullptr);
- }
-
- InferencePlugin cppPlugin(plugin);
+ allowNotImplemented([&](){
+ for (const auto& ext : extensions) {
+ plugin.AddExtension(ext);
+ }
+ });
// configuring
{
- cppPlugin.SetConfig(desc.defaultConfig);
-
- for (auto&& extensionLocation : desc.listOfExtentions) {
- cppPlugin.AddExtension(make_so_pointer<IExtension>(extensionLocation));
- }
+ allowNotImplemented([&]() {
+ plugin.SetConfig(desc.defaultConfig);
+ });
+
+ allowNotImplemented([&]() {
+ for (auto&& extensionLocation : desc.listOfExtentions) {
+ plugin.AddExtension(make_so_pointer<IExtension>(extensionLocation));
+ }
+ });
}
- plugins[deviceName] = cppPlugin;
+ plugins[deviceName] = plugin;
} catch (const details::InferenceEngineException& ex) {
THROW_IE_EXCEPTION << "Failed to create plugin " << FileUtils::fromFilePath(desc.libraryLocation) << " for device " << deviceName
<< "\n"
// set config for already created plugins
for (auto& plugin : plugins) {
if (deviceName.empty() || deviceName == plugin.first) {
- plugin.second.SetConfig(config);
+ allowNotImplemented([&]() {
+ plugin.second.SetConfig(config);
+ });
}
}
}
std::string deviceNameLocal = parser.getDeviceName();
InferenceEngine::InferencePlugin cppPlugin = _impl->GetCPPPluginByName(deviceNameLocal);
- const Version * version = cppPlugin.GetVersion();
- versions[deviceNameLocal] = *version;
+ const Version version = cppPlugin.GetVersion();
+ versions[deviceNameLocal] = version;
}
return versions;
#include <memory>
#include <string>
+#include "file_utils.h"
#include "cpp/ie_executable_network.hpp"
#include "cpp/ie_cnn_network.h"
#include "details/ie_exception_conversion.hpp"
#include "ie_plugin_ptr.hpp"
-#define CALL_RETURN_FNC_NO_ARGS(function, ...) \
- if (!actual) THROW_IE_EXCEPTION << "Wrapper used in the CALL_RETURN_FNC_NO_ARGS was not initialized."; \
- return actual->function(__VA_ARGS__);
+#if defined __GNUC__
+# pragma GCC diagnostic push
+# pragma GCC diagnostic ignored "-Wreturn-type"
+#endif
+
+#define CALL_STATEMENT(...) \
+ if (!actual) THROW_IE_EXCEPTION << "Wrapper used in the CALL_STATEMENT was not initialized."; \
+ try { \
+ __VA_ARGS__; \
+ } catch (const InferenceEngine::details::InferenceEngineException& iex) { \
+ InferenceEngine::details::extract_exception(iex.hasStatus() ? \
+ iex.getStatus() : GENERAL_ERROR, iex.what()); \
+ } catch (const std::exception& ex) { \
+ InferenceEngine::details::extract_exception(GENERAL_ERROR, ex.what()); \
+ } catch (...) { \
+ InferenceEngine::details::extract_exception(UNEXPECTED, ""); \
+ }
namespace InferenceEngine {
InferenceEnginePluginPtr actual;
public:
- /** @brief A default constructor */
InferencePlugin() = default;
- /**
- * @brief Constructs a plugin instance from the given pointer.
- *
- * @param pointer Initialized Plugin pointer
- */
explicit InferencePlugin(const InferenceEnginePluginPtr& pointer): actual(pointer) {
if (actual == nullptr) {
THROW_IE_EXCEPTION << "InferencePlugin wrapper was not initialized.";
}
}
- /**
- * @copybrief IInferencePlugin::GetVersion
- *
- * Wraps IInferencePlugin::GetVersion
- * @return A plugin version
- */
- const Version* GetVersion() {
- const Version* versionInfo = nullptr;
- if (actual == nullptr) THROW_IE_EXCEPTION << "InferencePlugin wrapper was not initialized";
- actual->GetVersion(versionInfo);
- if (versionInfo == nullptr) {
- THROW_IE_EXCEPTION << "Unknown device is used";
+ explicit InferencePlugin(const FileUtils::FilePath & libraryLocation) :
+ actual(libraryLocation) {
+ if (actual == nullptr) {
+ THROW_IE_EXCEPTION << "InferencePlugin wrapper was not initialized.";
}
- return versionInfo;
}
- /**
- * @copybrief IInferencePlugin::LoadNetwork
- *
- * Wraps IInferencePlugin::LoadNetwork
- *
- * @param network A network object to load
- * @param config A map of configuration options
- * @return Created Executable Network object
- */
- ExecutableNetwork LoadNetwork(const ICNNNetwork& network, const std::map<std::string, std::string>& config) {
- IExecutableNetwork::Ptr ret;
- CALL_STATUS_FNC(LoadNetwork, ret, network, config);
- return ExecutableNetwork(ret, actual);
+ void SetName(const std::string & deviceName) {
+ CALL_STATEMENT(actual->SetName(deviceName));
+ }
+
+ void SetCore(ICore* core) {
+ CALL_STATEMENT(actual->SetCore(core));
+ }
+
+ const Version GetVersion() const {
+ CALL_STATEMENT(return actual->GetVersion());
}
- /**
- * @copybrief InferencePlugin::LoadNetwork
- *
- * Wraps IInferencePlugin::LoadNetwork
- * @param network A network object to load
- * @param config A map of configuration options
- * @return Created Executable Network object
- */
ExecutableNetwork LoadNetwork(CNNNetwork network, const std::map<std::string, std::string>& config) {
IExecutableNetwork::Ptr ret;
- CALL_STATUS_FNC(LoadNetwork, ret, network, config);
- if (ret.get() == nullptr) THROW_IE_EXCEPTION << "Internal error: pointer to executable network is null";
+ CALL_STATEMENT(actual->LoadNetwork(ret, network, config));
return ExecutableNetwork(ret, actual);
}
- /**
- * @copybrief IInferencePlugin::AddExtension
- *
- * Wraps IInferencePlugin::AddExtension
- *
- * @param extension Pointer to loaded Extension
- */
void AddExtension(InferenceEngine::IExtensionPtr extension) {
- CALL_STATUS_FNC(AddExtension, extension);
+ CALL_STATEMENT(actual->AddExtension(extension));
}
- /**
- * @copybrief IInferencePlugin::SetConfig
- *
- * Wraps IInferencePlugin::SetConfig
- * @param config A configuration map
- */
void SetConfig(const std::map<std::string, std::string>& config) {
- CALL_STATUS_FNC(SetConfig, config);
+ CALL_STATEMENT(actual->SetConfig(config));
}
- /**
- * @copybrief IInferencePlugin::ImportNetwork
- *
- * Wraps IInferencePlugin::ImportNetwork
- * @param modelFileName A path to the imported network
- * @param config A configuration map
- * @return Created Executable Network object
- */
ExecutableNetwork ImportNetwork(const std::string& modelFileName,
const std::map<std::string, std::string>& config) {
- IExecutableNetwork::Ptr ret;
- CALL_STATUS_FNC(ImportNetwork, ret, modelFileName, config);
- return ExecutableNetwork(ret, actual);
+ CALL_STATEMENT(return ExecutableNetwork(actual->ImportNetwork(modelFileName, config), actual));
}
- /**
- * @copybrief IInferencePlugin::QueryNetwork
- *
- * Wraps IInferencePlugin::QueryNetwork
- *
- * @param network A network object to query
- * @param config A configuration map
- * @param res Query results
- */
void QueryNetwork(const ICNNNetwork& network, const std::map<std::string, std::string>& config,
QueryNetworkResult& res) const {
- if (actual == nullptr) THROW_IE_EXCEPTION << "InferencePlugin wrapper was not initialized";
- actual->QueryNetwork(network, config, res);
+ CALL_STATEMENT(actual->QueryNetwork(network, config, res));
if (res.rc != OK) THROW_IE_EXCEPTION << res.resp.msg;
}
ExecutableNetwork ImportNetwork(std::istream& networkModel,
const std::map<std::string, std::string> &config) {
- CALL_RETURN_FNC_NO_ARGS(ImportNetwork, networkModel, config);
+ CALL_STATEMENT(return actual->ImportNetwork(networkModel, config));
}
Parameter GetMetric(const std::string& name, const std::map<std::string, Parameter>& options) const {
- CALL_RETURN_FNC_NO_ARGS(GetMetric, name, options);
+ CALL_STATEMENT(return actual->GetMetric(name, options));
}
ExecutableNetwork LoadNetwork(const ICNNNetwork& network, const std::map<std::string, std::string>& config,
RemoteContext::Ptr context) {
- CALL_RETURN_FNC_NO_ARGS(LoadNetwork, network, config, context);
+ CALL_STATEMENT(return actual->LoadNetwork(network, config, context));
}
RemoteContext::Ptr CreateContext(const ParamMap& params) {
- CALL_RETURN_FNC_NO_ARGS(CreateContext, params);
+ CALL_STATEMENT(return actual->CreateContext(params));
}
RemoteContext::Ptr GetDefaultContext() {
- CALL_RETURN_FNC_NO_ARGS(GetDefaultContext);
+ CALL_STATEMENT(return actual->GetDefaultContext());
}
ExecutableNetwork ImportNetwork(std::istream& networkModel,
const RemoteContext::Ptr& context,
const std::map<std::string, std::string>& config) {
- CALL_RETURN_FNC_NO_ARGS(ImportNetwork, networkModel, context, config);
+ CALL_STATEMENT(return actual->ImportNetwork(networkModel, context, config));
}
Parameter GetConfig(const std::string& name, const std::map<std::string, Parameter>& options) const {
- CALL_RETURN_FNC_NO_ARGS(GetConfig, name, options);
+ CALL_STATEMENT(return actual->GetConfig(name, options));
}
/**
using Ptr = std::shared_ptr<InferencePlugin>;
};
} // namespace InferenceEngine
+
+#undef CALL_STATEMENT
+
+#if defined __GNUC__
+# pragma GCC diagnostic pop
+#endif
#include <string>
#include "details/ie_so_pointer.hpp"
-#include "ie_extension.h"
-#include "cpp_interfaces/interface/ie_plugin.hpp"
+#include "cpp_interfaces/interface/ie_iplugin_internal.hpp"
namespace InferenceEngine {
namespace details {
std::find(std::begin(streamExecutorConfigKeys), std::end(streamExecutorConfigKeys), key)) {
streamExecutorConfig.SetConfig(key, val);
} else if (key == PluginConfigParams::KEY_DYN_BATCH_LIMIT) {
- int val_i = std::stoi(val);
+ int val_i = -1;
+ try {
+ val_i = std::stoi(val);
+ } catch (const std::exception&) {
+ THROW_IE_EXCEPTION << "Wrong value for property key " << PluginConfigParams::KEY_DYN_BATCH_LIMIT
+ << ". Expected only integer numbers";
+ }
// zero and any negative value will be treated
// as default batch size
batchLimit = std::max(val_i, 0);
#include "mkldnn_itt.h"
#include <legacy/net_pass.h>
-#include <cpp_interfaces/base/ie_plugin_base.hpp>
#include <threading/ie_executor_manager.hpp>
#include <memory>
#include <ie_plugin_config.hpp>
#include "ie_metric_helpers.hpp"
#include <legacy/ie_util_internal.hpp>
-#include <cpp_interfaces/base/ie_plugin_base.hpp>
#include <cpp_interfaces/base/ie_infer_async_request_base.hpp>
#include <multi-device/multi_device_config.hpp>
#include <ie_plugin_config.hpp>
+++ /dev/null
-// Copyright (C) 2018-2020 Intel Corporation
-// SPDX-License-Identifier: Apache-2.0
-//
-
-/**
- * \brief inference engine plugin API wrapper, to be used by particular implementors
- * \file ie_plugin_base.hpp
- */
-
-#pragma once
-
-#include <map>
-#include <memory>
-#include <string>
-
-#include "cpp_interfaces/interface/ie_plugin.hpp"
-#include "cpp_interfaces/exception2status.hpp"
-#include "description_buffer.hpp"
-
-namespace InferenceEngine {
-
-/**
- * @brief Plugin `noexcept` wrapper which accepts IInferencePluginInternal derived instance which can throw exceptions
- * @ingroup ie_dev_api_plugin_api
- * @tparam T Minimal CPP implementation of IInferencePluginInternal (e.g. InferencePluginInternal)
- */
-template <class T>
-class PluginBase : public IInferencePlugin {
- class VersionStore : public Version {
- std::string _dsc;
- std::string _buildNumber;
-
- public:
- explicit VersionStore(const Version& v) {
- _dsc = v.description;
- _buildNumber = v.buildNumber;
- description = _dsc.c_str();
- buildNumber = _buildNumber.c_str();
- apiVersion = v.apiVersion;
- }
- } _version;
-
- std::shared_ptr<T> _impl;
-
-public:
- /**
- * @brief Constructor with plugin version and actual underlying implementation.
- * @param actualReported version that are to be reported
- * @param impl Underplying implementation of type IInferencePluginInternal
- */
- PluginBase(const Version& actualReported, std::shared_ptr<T> impl): _version(actualReported) {
- if (impl.get() == nullptr) {
- THROW_IE_EXCEPTION << "implementation not defined";
- }
- _impl = impl;
- }
-
- void SetName(const std::string& pluginName) noexcept override {
- _impl->SetName(pluginName);
- }
-
- std::string GetName() const noexcept override {
- return _impl->GetName();
- }
-
- void GetVersion(const Version*& versionInfo) noexcept override {
- versionInfo = &_version;
- }
-
- StatusCode LoadNetwork(IExecutableNetwork::Ptr& executableNetwork, const ICNNNetwork& network,
- const std::map<std::string, std::string>& config, ResponseDesc* resp) noexcept override {
- TO_STATUS(_impl->LoadNetwork(executableNetwork, network, config));
- }
-
- StatusCode AddExtension(InferenceEngine::IExtensionPtr extension,
- InferenceEngine::ResponseDesc* resp) noexcept override {
- TO_STATUS(_impl->AddExtension(extension));
- }
-
- StatusCode SetConfig(const std::map<std::string, std::string>& config, ResponseDesc* resp) noexcept override {
- TO_STATUS(_impl->SetConfig(config));
- }
-
- StatusCode ImportNetwork(IExecutableNetwork::Ptr& ret, const std::string& modelFileName,
- const std::map<std::string, std::string>& config, ResponseDesc* resp) noexcept override {
- TO_STATUS(ret = _impl->ImportNetwork(modelFileName, config));
- }
-
- ExecutableNetwork ImportNetwork(std::istream& networkModel,
- const std::map<std::string, std::string>& config) override {
- return _impl->ImportNetwork(networkModel, config);
- }
-
- void Release() noexcept override {
- delete this;
- }
-
- void QueryNetwork(const ICNNNetwork& network, const std::map<std::string, std::string>& config,
- QueryNetworkResult& res) const noexcept override {
- TO_STATUSVAR(_impl->QueryNetwork(network, config, res), res.rc, &res.resp);
- }
-
- void SetCore(ICore* core) noexcept override {
- _impl->SetCore(core);
- }
-
- const ICore& GetCore() const override {
- IE_ASSERT(nullptr != _impl->GetCore());
- return *_impl->GetCore();
- }
-
- Parameter GetConfig(const std::string& name, const std::map<std::string, Parameter>& options) const override {
- return _impl->GetConfig(name, options);
- }
-
- Parameter GetMetric(const std::string& name, const std::map<std::string, Parameter>& options) const override {
- return _impl->GetMetric(name, options);
- }
-
- RemoteContext::Ptr CreateContext(const ParamMap& params) override {
- return _impl->CreateContext(params);
- }
-
- RemoteContext::Ptr GetDefaultContext() override {
- return _impl->GetDefaultContext();
- }
-
- ExecutableNetwork LoadNetwork(const ICNNNetwork& network, const std::map<std::string, std::string>& config,
- RemoteContext::Ptr context) override {
- return _impl->LoadNetwork(network, config, context);
- }
-
- ExecutableNetwork ImportNetwork(std::istream& networkModel,
- const RemoteContext::Ptr& context,
- const std::map<std::string, std::string>& config) override {
- return _impl->ImportNetwork(networkModel, context, config);
- }
-
-private:
- ~PluginBase() override {}
-};
-
-template <class T>
-inline IInferencePlugin* make_ie_compatible_plugin(const Version& reported, std::shared_ptr<T> impl) {
- return new PluginBase<T>(reported, impl);
-}
-
-} // namespace InferenceEngine
* @param[in] plugin The plugin
* @note Needed to correctly handle ownership between objects.
*/
- void SetPointerToPluginInternal(IInferencePluginInternal::Ptr plugin) {
+ void SetPointerToPlugin(IInferencePlugin::Ptr plugin) {
_plugin = plugin;
}
InferenceEngine::OutputsDataMap _networkOutputs; //!< Holds information about network outputs data
/**
- * @brief A pointer to a IInferencePluginInternal interface.
+ * @brief A pointer to a IInferencePlugin interface.
* @note Needed to correctly handle ownership between objects.
*/
- IInferencePluginInternal::Ptr _plugin;
+ IInferencePlugin::Ptr _plugin;
};
} // namespace InferenceEngine
} // namespace
/**
- * @brief Optimal implementation of IInferencePluginInternal interface to avoid duplication in all plugins
+ * @brief Optimal implementation of IInferencePlugin interface to avoid duplication in all plugins
* @ingroup ie_dev_api_plugin_api
*/
-class InferencePluginInternal : public IInferencePluginInternal,
- public std::enable_shared_from_this<InferencePluginInternal> {
-public:
+class InferencePluginInternal : public IInferencePlugin {
+protected:
/**
* @brief Destroys the object.
*/
~InferencePluginInternal() override = default;
+public:
void LoadNetwork(IExecutableNetwork::Ptr& executableNetwork, const ICNNNetwork& network,
const std::map<std::string, std::string>& config) override {
- cloneAndCreateExecutableNetwork(executableNetwork, network, config);
+ LoadNetworkImplPrivate(executableNetwork, network, config);
}
ExecutableNetwork LoadNetwork(const ICNNNetwork& network, const std::map<std::string, std::string>& config,
RemoteContext::Ptr context) override {
IExecutableNetwork::Ptr executableNetworkPtr;
- cloneAndCreateExecutableNetwork(executableNetworkPtr, network, config, context);
+ LoadNetworkImplPrivate(executableNetworkPtr, network, config, context);
return ExecutableNetwork(executableNetworkPtr);
}
THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str;
}
+private:
+ /**
+ * @brief A helper method which clones a ICNNNetwork object, keeps InputsDataMap and OutputsDataMap data maps,
+ * and creates an IExecutableNetwork object
+ * @param executableNetwork An output executable network object
+ * @param network An input ICNNNetwork object used to create an executable network object
+ * @param config A map of string -> string configuration options.
+ * @param context An optional pointer to RemoteContext
+ */
+ void LoadNetworkImplPrivate(IExecutableNetwork::Ptr& executableNetwork, const ICNNNetwork& network,
+ const std::map<std::string, std::string>& config,
+ RemoteContext::Ptr context = nullptr) {
+ InputsDataMap networkInputs, networkInputsCloned;
+ OutputsDataMap networkOutputs, networkOutputsCloned;
+ network.getInputsInfo(networkInputs);
+ network.getOutputsInfo(networkOutputs);
+ copyInputOutputInfo(networkInputs, networkOutputs, networkInputsCloned, networkOutputsCloned);
+
+ ExecutableNetworkInternal::Ptr impl;
+ if (nullptr == context) {
+ impl = LoadExeNetworkImpl(network, config);
+ } else {
+ impl = LoadExeNetworkImpl(network, context, config);
+ }
+
+ impl->setNetworkInputs(networkInputsCloned);
+ impl->setNetworkOutputs(networkOutputsCloned);
+ impl->SetPointerToPlugin(shared_from_this());
+
+ executableNetwork.reset(new ExecutableNetworkBase<ExecutableNetworkInternal>(impl), [](details::IRelease* p) {
+ p->Release();
+ });
+ }
+
protected:
/**
- * @brief Creates an executable network from an pares network object, users can create as many networks as they need
+ * @brief Creates an executable network from a parsed network object, users can create as many networks as they need
* and use them simultaneously (up to the limitation of the HW resources)
* @note The function is used in
* InferencePluginInternal::LoadNetwork(IExecutableNetwork::Ptr&, const ICNNNetwork&, const std::map<std::string, std::string>&)
const std::map<std::string, std::string>& config) = 0;
/**
- * @brief Creates an executable network using remove context from an pares network object,
+ * @brief Creates an executable network using remote context from a parsed network object,
* users can create as many networks as they need and use them simultaneously (up to the limitation of the HW resources)
* @note The function is used in
* InferencePluginInternal::LoadNetwork(const ICNNNetwork&, const std::map<std::string, std::string>&, RemoteContext::Ptr)
}
/**
- * @brief A helper method which clones a ICNNNetwork object, keeps InputsDataMap and OutputsDataMap data maps,
- * and creates an IExecutableNetwork object
- * @param executableNetwork An output executable network object
- * @param network An input ICNNNetwork object used to create an executable network object
- * @param config A map of string -> string configuration options.
- * @param context An optional pointer to RemoteContext
- */
- void cloneAndCreateExecutableNetwork(IExecutableNetwork::Ptr& executableNetwork, const ICNNNetwork& network,
- const std::map<std::string, std::string>& config,
- RemoteContext::Ptr context = nullptr) {
- InputsDataMap networkInputs, networkInputsCloned;
- OutputsDataMap networkOutputs, networkOutputsCloned;
- network.getInputsInfo(networkInputs);
- network.getOutputsInfo(networkOutputs);
- copyInputOutputInfo(networkInputs, networkOutputs, networkInputsCloned, networkOutputsCloned);
-
- ExecutableNetworkInternal::Ptr impl;
- if (nullptr == context) {
- impl = LoadExeNetworkImpl(network, config);
- } else {
- impl = LoadExeNetworkImpl(network, context, config);
- }
-
- impl->setNetworkInputs(networkInputsCloned);
- impl->setNetworkOutputs(networkOutputsCloned);
- impl->SetPointerToPluginInternal(shared_from_this());
-
- executableNetwork.reset(new ExecutableNetworkBase<ExecutableNetworkInternal>(impl), [](details::IRelease* p) {
- p->Release();
- });
- }
-
- /**
* @brief Creates an executable network from an previously exported network
* @note The function is called from
- * IInferencePluginInternal::ImportNetwork(std::istream&, const RemoteContext::Ptr&, const std::map<std::string, std::string>&)
+ * IInferencePlugin::ImportNetwork(std::istream&, const RemoteContext::Ptr&, const std::map<std::string, std::string>&)
* performs common steps first and calls this plugin-dependent implementation after.
* @param networkModel Reference to network model output stream
* @param config A string -> string map of parameters
#include <ie_parameter.hpp>
#include <ie_iexecutable_network.hpp>
#include <ie_remote_context.hpp>
-#include <cpp_interfaces/base/ie_plugin_base.hpp>
#include <blob_factory.hpp>
}
/**
- * @interface IInferencePluginInternal
- * @brief An internal API of plugin to be implemented by a plugin, which is used in PluginBase forwarding mechanism
+ * @interface IInferencePlugin
+ * @brief An API of plugin to be implemented by a plugin
* @ingroup ie_dev_api_plugin_api
*/
-class IInferencePluginInternal {
+class IInferencePlugin : public details::IRelease,
+ public std::enable_shared_from_this<IInferencePlugin> {
+ class VersionStore : public Version {
+ std::string _dsc;
+ std::string _buildNumber;
+
+ void copyFrom(const Version & v) {
+ _dsc = v.description;
+ _buildNumber = v.buildNumber;
+ description = _dsc.c_str();
+ buildNumber = _buildNumber.c_str();
+ apiVersion = v.apiVersion;
+ }
+
+ public:
+ VersionStore() = default;
+
+ explicit VersionStore(const Version& v) {
+ copyFrom(v);
+ }
+
+ VersionStore & operator = (const VersionStore & v) {
+ if (&v != this) {
+ copyFrom(v);
+ }
+ return *this;
+ }
+ } _version;
+
+protected:
+ /**
+ * @brief Destroys the object.
+ */
+ ~IInferencePlugin() override = default;
+
public:
/**
- * @brief A shared pointer to IInferencePluginInternal interface
+ * @brief A shared pointer to IInferencePlugin interface
*/
- using Ptr = std::shared_ptr<IInferencePluginInternal>;
+ using Ptr = std::shared_ptr<IInferencePlugin>;
/**
- * @brief Destroys the object.
+ * @brief Sets a plugin version
+ * @param version A version to set
+ */
+ void SetVersion(const Version & version) {
+ _version = VersionStore(version);
+ }
+
+ /**
+ * @brief Gets a plugin version
+ * @return A const InferenceEngine::Version object
*/
- virtual ~IInferencePluginInternal() = default;
+ Version GetVersion() const {
+ return _version;
+ }
+
+ void Release() noexcept override {
+ delete this;
+ }
/**
* @brief Provides a name of a plugin
virtual ICore* GetCore() const noexcept = 0;
/**
- * @brief Queries a plugin about support layers in network
+ * @brief Queries a plugin about supported layers in network
* @param[in] network The network object to query
* @param[in] config The map of configuration parameters
* @param res The result of query operator containing supported layers map
* @brief Defines the exported `CreatePluginEngine` function which is used to create a plugin instance
* @ingroup ie_dev_api_plugin_api
*/
-#define IE_DEFINE_PLUGIN_CREATE_FUNCTION(PluginType, version, ...) \
- INFERENCE_PLUGIN_API(InferenceEngine::StatusCode) CreatePluginEngine( \
- InferenceEngine::IInferencePlugin *&plugin, \
- InferenceEngine::ResponseDesc *resp) noexcept { \
- try { \
- InferenceEngine::Version _version = version; \
- plugin = make_ie_compatible_plugin(_version, std::make_shared<PluginType>(__VA_ARGS__)); \
- return OK; \
- } \
- catch (std::exception &ex) { \
+#define IE_DEFINE_PLUGIN_CREATE_FUNCTION(PluginType, version, ...) \
+ INFERENCE_PLUGIN_API(InferenceEngine::StatusCode) CreatePluginEngine( \
+ InferenceEngine::IInferencePlugin *&plugin, \
+ InferenceEngine::ResponseDesc *resp) noexcept { \
+ try { \
+ plugin = new PluginType(__VA_ARGS__); \
+ plugin->SetVersion(version); \
+ return OK; \
+ } \
+ catch (std::exception &ex) { \
return InferenceEngine::DescriptionBuffer(GENERAL_ERROR, resp) << ex.what(); \
- } \
+ } \
}
+++ /dev/null
-// Copyright (C) 2018-2020 Intel Corporation
-// SPDX-License-Identifier: Apache-2.0
-//
-
-/**
- * @brief A header file for Main Inference Engine API
- *
- * @file ie_plugin.hpp
- */
-#pragma once
-
-#include <ie_iextension.h>
-#include <ie_icnn_network.hpp>
-#include <ie_icore.hpp>
-
-#include <map>
-#include <memory>
-#include <set>
-#include <string>
-#include <vector>
-
-namespace InferenceEngine {
-
-/**
- * @brief This class is a main plugin interface
- */
-class INFERENCE_ENGINE_API_CLASS(IInferencePlugin)
- : public details::IRelease {
-public:
- /**
- * @brief Returns plugin version information
- *
- * @param versionInfo Pointer to version info. Is set by plugin
- */
- virtual void GetVersion(const Version*& versionInfo) noexcept = 0;
-
- /**
- * @brief Creates an executable network from a network object. User can create as many networks as they need and use
- * them simultaneously (up to the limitation of the hardware resources)
- *
- * @param ret Reference to a shared ptr of the returned network interface
- * @param network Network object acquired from Core::ReadNetwork
- * @param config Map of pairs: (config parameter name, config parameter value) relevant only for this load operation
- * @param resp Pointer to the response message that holds a description of an error if any occurred
- * @return Status code of the operation. InferenceEngine::OK if succeeded
- */
- virtual StatusCode LoadNetwork(IExecutableNetwork::Ptr& ret, const ICNNNetwork& network,
- const std::map<std::string, std::string>& config, ResponseDesc* resp) noexcept = 0;
-
- /**
- * @brief Creates an executable network from a previously exported network
- *
- * @param ret Reference to a shared ptr of the returned network interface
- * @param modelFileName Path to the location of the exported file
- * @param config Map of pairs: (config parameter name, config parameter value) relevant only for this load
- * operation*
- * @param resp Pointer to the response message that holds a description of an error if any occurred
- * @return Status code of the operation. InferenceEngine::OK if succeeded
- */
- virtual StatusCode ImportNetwork(IExecutableNetwork::Ptr& ret, const std::string& modelFileName,
- const std::map<std::string, std::string>& config, ResponseDesc* resp) noexcept = 0;
-
- /**
- * @brief Registers extension within the plugin
- *
- * @param extension Pointer to already loaded extension
- * @param resp Pointer to the response message that holds a description of an error if any occurred
- * @return Status code of the operation. InferenceEngine::OK if succeeded
- */
- virtual StatusCode AddExtension(InferenceEngine::IExtensionPtr extension,
- InferenceEngine::ResponseDesc* resp) noexcept = 0;
-
- /**
- * @brief Sets configuration for plugin, acceptable keys can be found in ie_plugin_config.hpp
- *
- * @param config Map of pairs: (config parameter name, config parameter value)
- * @param resp Pointer to the response message that holds a description of an error if any occurred
- * @return Status code of the operation. InferenceEngine::OK if succeeded
- */
- virtual StatusCode SetConfig(const std::map<std::string, std::string>& config, ResponseDesc* resp) noexcept = 0;
-
- /**
- * @brief Query plugin if it supports specified network with specified configuration
- *
- * @param network Network object to query
- * @param config Map of pairs: (config parameter name, config parameter value)
- * @param res Reference to query network result
- */
- virtual void QueryNetwork(const ICNNNetwork& network, const std::map<std::string, std::string>& config,
- QueryNetworkResult& res) const noexcept {
- (void)network;
- (void)config;
- res.rc = InferenceEngine::NOT_IMPLEMENTED;
- }
-
- /**
- * @brief Sets plugin name
- * @param pluginName Plugin name to set
- */
- virtual void SetName(const std::string& pluginName) noexcept = 0;
-
- /**
- * @brief Returns plugin name
- * @return Plugin name
- */
- virtual std::string GetName() const noexcept = 0;
-
- /**
- * @brief Sets pointer to ICore interface
- * @param core Pointer to Core interface
- */
- virtual void SetCore(ICore* core) noexcept = 0;
-
- /**
- * @brief Gets refernce to ICore interface
- * @return Reference to core interface
- */
- virtual const ICore& GetCore() const = 0;
-
- /**
- * @brief Gets configuration dedicated to plugin behaviour
- * @param name - value of config corresponding to config key
- * @param options - configuration details for config
- * @return Value of config corresponding to config key
- */
- virtual Parameter GetConfig(const std::string& name, const std::map<std::string, Parameter>& options) const = 0;
-
- /**
- * @brief Gets general runtime metric for dedicated hardware
- * @param name - metric name to request
- * @param options - configuration details for metric
- * @return Metric value corresponding to metric key
- */
- virtual Parameter GetMetric(const std::string& name, const std::map<std::string, Parameter>& options) const = 0;
-
- /**
- * @brief Creates a remote context instance based on a map of parameters
- * @param[in] params The map of parameters
- * @return A remote context object
- */
- virtual RemoteContext::Ptr CreateContext(const ParamMap& params) = 0;
-
- /**
- * @brief Provides a default remote context instance if supported by a plugin
- * @return The default context.
- */
- virtual RemoteContext::Ptr GetDefaultContext() = 0;
-
- /**
- * @brief Wraps original method
- * IInferencePlugin::LoadNetwork
- * @param network - a network object acquired from InferenceEngine::Core::ReadNetwork
- * @param config string-string map of config parameters relevant only for this load operation
- * @param context - a pointer to plugin context derived from RemoteContext class used to
- * execute the network
- * @return Created Executable Network object
- */
- virtual ExecutableNetwork LoadNetwork(const ICNNNetwork& network, const std::map<std::string, std::string>& config,
- RemoteContext::Ptr context) = 0;
-
- /**
- * @brief Creates an executable network from an previously exported network using plugin implementation
- * and removes Inference Engine magic and plugin name
- * @param networkModel Reference to network model output stream
- * @param config A string -> string map of parameters
- * @return An Executable network
- */
- virtual ExecutableNetwork ImportNetwork(std::istream& networkModel,
- const std::map<std::string, std::string>& config) = 0;
-
- /**
- * @brief Creates an executable network from an previously exported network using plugin implementation
- * and removes Inference Engine magic and plugin name
- * @param networkModel Reference to network model output stream
- * @param context - a pointer to plugin context derived from RemoteContext class used to
- * execute the network
- * @param config A string -> string map of parameters
- * @return An Executable network
- */
- virtual ExecutableNetwork ImportNetwork(std::istream& networkModel,
- const RemoteContext::Ptr& context,
- const std::map<std::string, std::string>& config) = 0;
-
- /**
- * @brief A default virtual destructor
- */
- ~IInferencePlugin() override;
-};
-
-} // namespace InferenceEngine
//
#include <memory>
-#include <cpp_interfaces/base/ie_plugin_base.hpp>
#include "myriad_plugin.h"
#include "myriad_mvnc_wraper.h"
#include <ie_metric_helpers.hpp>
#include <cpp/ie_cnn_network.h>
-#include <cpp_interfaces/base/ie_plugin_base.hpp>
#include <cpp_interfaces/impl/ie_executable_network_internal.hpp>
#include <legacy/ie_util_internal.hpp>
//
#include <gtest/gtest.h>
-#include "cpp_interfaces/base/ie_plugin_base.hpp"
+#include <cpp/ie_executable_network.hpp>
using namespace ::testing;
using namespace std;
#include <common_test_utils/test_assertions.hpp>
#include <details/ie_so_pointer.hpp>
#include <details/ie_irelease.hpp>
-#include <cpp_interfaces/interface/ie_plugin.hpp>
+#include <cpp_interfaces/interface/ie_iplugin_internal.hpp>
#include <ie_plugin_ptr.hpp>
using namespace InferenceEngine;
std::shared_ptr<SharedObjectLoader> sharedLoader(new SharedObjectLoader(name.c_str()));
SymbolLoader<SharedObjectLoader> loader(sharedLoader);
IInferencePlugin * value = nullptr;
- ASSERT_NE(nullptr, value = loader.instantiateSymbol<IInferencePlugin>(SOCreatorTrait<IInferencePlugin>::name));
+ ASSERT_NE(nullptr, value = loader.instantiateSymbol<IInferencePlugin>(
+ SOCreatorTrait<IInferencePlugin>::name));
value->Release();
}
#include "unit_test_utils/mocks/mock_ie_imemory_state.hpp"
#include "unit_test_utils/mocks/mock_iexecutable_network.hpp"
#include "unit_test_utils/mocks/mock_iinfer_request.hpp"
-#include "unit_test_utils/mocks/mock_iinference_plugin.hpp"
#include "unit_test_utils/mocks/mock_not_empty_icnn_network.hpp"
-#include "unit_test_utils/mocks/cpp_interfaces/mock_plugin_impl.hpp"
#include "unit_test_utils/mocks/cpp_interfaces/mock_task_executor.hpp"
#include "unit_test_utils/mocks/cpp_interfaces/impl/mock_async_infer_request_default.hpp"
#include "unit_test_utils/mocks/cpp_interfaces/interface/mock_iexecutable_network_internal.hpp"
#include "unit_test_utils/mocks/cpp_interfaces/interface/mock_iinfer_request_internal.hpp"
#include "unit_test_utils/mocks/cpp_interfaces/interface/mock_imemory_state_internal.hpp"
+#include "unit_test_utils/mocks/cpp_interfaces/interface/mock_iinference_plugin.hpp"
#include "unit_test_utils/mocks/shape_infer/mock_input_controller.hpp"
#include "unit_test_utils/mocks/shape_infer/mock_ishape_infer_impl.hpp"
#include <map>
#include <string>
-#include "cpp_interfaces/interface/ie_plugin.hpp"
+#include "cpp_interfaces/interface/ie_iplugin_internal.hpp"
#include <gmock/gmock.h>
-class MockIInferencePlugin : public InferenceEngine :: IInferencePlugin {
+class MockIInferencePlugin : public InferenceEngine::IInferencePlugin {
public:
- MOCK_QUALIFIED_METHOD2(AddExtension, noexcept, InferenceEngine::StatusCode(InferenceEngine::IExtensionPtr,
- InferenceEngine::ResponseDesc *resp));
- MOCK_QUALIFIED_METHOD1(GetVersion, noexcept, void(const InferenceEngine::Version *&));
- MOCK_QUALIFIED_METHOD0(Release, noexcept, void());
- MOCK_QUALIFIED_METHOD2(LoadNetwork, noexcept, InferenceEngine::StatusCode(
- const InferenceEngine::ICNNNetwork &, InferenceEngine::ResponseDesc *resp));
- MOCK_QUALIFIED_METHOD4(LoadNetwork, noexcept, InferenceEngine::StatusCode(
- InferenceEngine::IExecutableNetwork::Ptr &,
- const InferenceEngine::ICNNNetwork &,
- const std::map<std::string, std::string> &,
- InferenceEngine::ResponseDesc *));
- MOCK_QUALIFIED_METHOD4(ImportNetwork, noexcept, InferenceEngine::StatusCode(
- InferenceEngine::IExecutableNetwork::Ptr &,
- const std::string &,
- const std::map<std::string, std::string> &,
- InferenceEngine::ResponseDesc *));
- MOCK_QUALIFIED_METHOD2(SetConfig, noexcept, InferenceEngine::StatusCode(
- const std::map<std::string, std::string> &, InferenceEngine::ResponseDesc *resp));
+ MOCK_METHOD1(AddExtension, void(InferenceEngine::IExtensionPtr));
+ MOCK_METHOD3(LoadNetwork, void(IExecutableNetwork::Ptr&,
+ const ICNNNetwork&, const std::map<std::string, std::string>&));
+ MOCK_METHOD2(ImportNetwork, IExecutableNetwork::Ptr(
+ const std::string&, const std::map<std::string, std::string>&));
+ MOCK_METHOD1(SetConfig, void(const std::map<std::string, std::string> &));
MOCK_QUALIFIED_METHOD1(SetName, noexcept, void(const std::string&));
MOCK_QUALIFIED_METHOD0(GetName, const noexcept, std::string(void));
MOCK_QUALIFIED_METHOD1(SetCore, noexcept, void(InferenceEngine::ICore*));
- MOCK_QUALIFIED_METHOD0(GetCore, const, const InferenceEngine::ICore&(void));
+ MOCK_QUALIFIED_METHOD0(GetCore, const noexcept, InferenceEngine::ICore *(void));
MOCK_QUALIFIED_METHOD2(GetConfig, const, InferenceEngine::Parameter(
const std::string&, const std::map<std::string, InferenceEngine::Parameter>&));
MOCK_QUALIFIED_METHOD2(GetMetric, const, InferenceEngine::Parameter(
+++ /dev/null
-// Copyright (C) 2018-2020 Intel Corporation
-// SPDX-License-Identifier: Apache-2.0
-//
-
-#pragma once
-
-#include <gmock/gmock.h>
-
-#include <map>
-#include <string>
-#include <vector>
-
-#include "ie_iexecutable_network.hpp"
-#include "ie_icore.hpp"
-#include "cpp/ie_executable_network.hpp"
-
-
-class MockPluginImpl {
- public:
- MOCK_METHOD3(LoadExeNetwork, void(InferenceEngine::IExecutableNetwork::Ptr &,
- const InferenceEngine::ICNNNetwork &,
- const std::map<std::string, std::string> &));
-
- void LoadNetwork(InferenceEngine::IExecutableNetwork::Ptr &exeNetwork,
- const InferenceEngine::ICNNNetwork &cnnNetwork,
- const std::map<std::string, std::string> &config) {
- LoadExeNetwork(exeNetwork, cnnNetwork, config);
- }
- MOCK_METHOD1(AddExtension, void(InferenceEngine::IExtensionPtr ext_ptr));
- MOCK_METHOD1(SetConfig, void(const std::map <std::string, std::string> &));
- MOCK_METHOD2(ImportNetwork, InferenceEngine::IExecutableNetwork::Ptr(const std::string &, const std::map<std::string, std::string> &));
- InferenceEngine::ExecutableNetwork ImportNetwork(const std::istream&, const std::map<std::string, std::string> &) {return {};}
- MOCK_QUALIFIED_METHOD0(GetName, const noexcept, std::string(void));
- MOCK_QUALIFIED_METHOD1(SetName, noexcept, void(const std::string &));
- MOCK_QUALIFIED_METHOD0(GetCore, const noexcept, InferenceEngine::ICore*(void));
- MOCK_QUALIFIED_METHOD1(SetCore, noexcept, void(InferenceEngine::ICore*));
-
- MOCK_CONST_METHOD2(GetConfig, InferenceEngine::Parameter(const std::string& name,
- const std::map<std::string, InferenceEngine::Parameter> & options));
- MOCK_CONST_METHOD2(GetMetric, InferenceEngine::Parameter(const std::string& name,
- const std::map<std::string, InferenceEngine::Parameter> & options));
- void QueryNetwork(const InferenceEngine::ICNNNetwork &network,
- const std::map<std::string, std::string>& config, InferenceEngine::QueryNetworkResult &res) const { }
-
- MOCK_METHOD1(CreateContext, InferenceEngine::RemoteContext::Ptr(const std::map<std::string, InferenceEngine::Parameter> & options));
- MOCK_METHOD0(GetDefaultContext, InferenceEngine::RemoteContext::Ptr(void));
- InferenceEngine::ExecutableNetwork LoadNetwork(const InferenceEngine::ICNNNetwork &, const std::map<std::string, std::string> &,
- InferenceEngine::RemoteContext::Ptr) { return{}; }
- InferenceEngine::ExecutableNetwork ImportNetwork(const std::istream &networkModel,
- const InferenceEngine::RemoteContext::Ptr &context,
- const std::map<std::string, std::string> &config = {}) { return{}; }
-};
#include <string>
#include "mock_plugin.hpp"
+#include <cpp_interfaces/exception2status.hpp>
#include "description_buffer.hpp"
using namespace std;
using namespace InferenceEngine;
-#define ACTION_IF_NOT_NULL(action) (nullptr == _target) ? NOT_IMPLEMENTED : _target->action
-
MockPlugin::MockPlugin(InferenceEngine::IInferencePlugin *target) {
_target = target;
}
-StatusCode MockPlugin::LoadNetwork(IExecutableNetwork::Ptr &ret, const ICNNNetwork &network,
- const std::map<std::string, std::string> &config, ResponseDesc *resp) noexcept {
- return ACTION_IF_NOT_NULL(LoadNetwork(ret, network, config, resp));
-}
-
-void MockPlugin::Release() noexcept {
- if (nullptr != _target) _target->Release();
- delete this;
-}
-
-void MockPlugin::GetVersion(const Version *&versionInfo) noexcept {
- versionInfo = &version;
-}
-
-StatusCode MockPlugin::AddExtension(IExtensionPtr extension, InferenceEngine::ResponseDesc *resp) noexcept {
- return NOT_IMPLEMENTED;
-}
-
-StatusCode MockPlugin::SetConfig(const std::map<std::string, std::string> &_config, ResponseDesc *resp) noexcept {
- config = _config;
- return InferenceEngine::OK;
-}
-
-StatusCode
-MockPlugin::ImportNetwork(IExecutableNetwork::Ptr &ret, const std::string &modelFileName,
- const std::map<std::string, std::string> &config, ResponseDesc *resp) noexcept {
- return NOT_IMPLEMENTED;
-}
-
-void MockPlugin::SetName(const std::string& pluginName) noexcept {
-}
-
-std::string MockPlugin::GetName() const noexcept {
- return {};
-}
-
-void MockPlugin::SetCore(ICore* core) noexcept {
+void MockPlugin::SetConfig(const std::map<std::string, std::string>& config) {
+ this->config = config;
}
-const ICore& MockPlugin::GetCore() const {
- static ICore * core = nullptr;
- return *core;
-}
-
-Parameter MockPlugin::GetConfig(const std::string& name, const std::map<std::string, Parameter>& options) const {
- return {};
-}
-
-Parameter MockPlugin::GetMetric(const std::string& name, const std::map<std::string, Parameter>& options) const {
- return {};
-}
-
-RemoteContext::Ptr MockPlugin::CreateContext(const ParamMap& params) {
- return {};
-}
-RemoteContext::Ptr MockPlugin::GetDefaultContext() {
- return {};
-}
-ExecutableNetwork MockPlugin::LoadNetwork(const ICNNNetwork& network, const std::map<std::string, std::string>& config,
- RemoteContext::Ptr context) {
- return {};
-}
-
-ExecutableNetwork MockPlugin::ImportNetwork(std::istream& networkModel,
- const std::map<std::string, std::string>& config) {
- return {};
+void MockPlugin::LoadNetwork(IExecutableNetwork::Ptr &ret, const ICNNNetwork &network,
+ const std::map<std::string, std::string> &config) {
+ if (_target) {
+ _target->LoadNetwork(ret, network, config);
+ } else {
+ THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str;
+ }
}
-ExecutableNetwork MockPlugin::ImportNetwork(std::istream& networkModel,
- const RemoteContext::Ptr& context,
- const std::map<std::string, std::string>& config) {
+ExecutableNetworkInternal::Ptr
+MockPlugin::LoadExeNetworkImpl(const InferenceEngine::ICNNNetwork& network,
+ const std::map<std::string, std::string>& config) {
return {};
}
#include <map>
#include <string>
-#include <cpp_interfaces/interface/ie_plugin.hpp>
+#include <cpp_interfaces/impl/ie_plugin_internal.hpp>
#include <ie_icnn_network.hpp>
-class MockPlugin : public InferenceEngine::IInferencePlugin {
+class MockPlugin : public InferenceEngine::InferencePluginInternal {
InferenceEngine::IInferencePlugin * _target = nullptr;
- InferenceEngine::Version version;
public:
explicit MockPlugin(InferenceEngine::IInferencePlugin*target);
- void GetVersion(const InferenceEngine::Version *& versionInfo) noexcept override;
-
- InferenceEngine::StatusCode AddExtension(InferenceEngine::IExtensionPtr extension, InferenceEngine::ResponseDesc *resp) noexcept override;
-
- InferenceEngine::StatusCode SetConfig(const std::map<std::string, std::string>& config,
- InferenceEngine::ResponseDesc* resp) noexcept override;
-
-
- InferenceEngine::StatusCode
- LoadNetwork(InferenceEngine::IExecutableNetwork::Ptr &ret, const InferenceEngine::ICNNNetwork &network,
- const std::map<std::string, std::string> &config, InferenceEngine::ResponseDesc *resp) noexcept override;
-
- InferenceEngine::StatusCode
- ImportNetwork(InferenceEngine::IExecutableNetwork::Ptr &ret, const std::string &modelFileName,
- const std::map<std::string, std::string> &config, InferenceEngine::ResponseDesc *resp) noexcept override;
-
- void Release() noexcept override;
-
- void SetName(const std::string& pluginName) noexcept override;
- std::string GetName() const noexcept override;
- void SetCore(InferenceEngine::ICore* core) noexcept override;
- const InferenceEngine::ICore& GetCore() const override;
- InferenceEngine::Parameter
- GetConfig(const std::string& name, const std::map<std::string, InferenceEngine::Parameter>& options) const override;
- InferenceEngine::Parameter
- GetMetric(const std::string& name, const std::map<std::string, InferenceEngine::Parameter>& options) const override;
- InferenceEngine::RemoteContext::Ptr
- CreateContext(const InferenceEngine::ParamMap& params) override;
- InferenceEngine::RemoteContext::Ptr GetDefaultContext() override;
- InferenceEngine::ExecutableNetwork
- LoadNetwork(const InferenceEngine::ICNNNetwork& network, const std::map<std::string, std::string>& config,
- InferenceEngine::RemoteContext::Ptr context) override;
- InferenceEngine::ExecutableNetwork
- ImportNetwork(std::istream& networkModel, const std::map<std::string, std::string>& config) override;
- InferenceEngine::ExecutableNetwork
- ImportNetwork(std::istream& networkModel, const InferenceEngine::RemoteContext::Ptr& context,
- const std::map<std::string, std::string>& config) override;
+ void SetConfig(const std::map<std::string, std::string>& config) override;
+ void LoadNetwork(InferenceEngine::IExecutableNetwork::Ptr &ret, const InferenceEngine::ICNNNetwork &network,
+ const std::map<std::string, std::string> &config) override;
+ ExecutableNetworkInternal::Ptr
+ LoadExeNetworkImpl(const InferenceEngine::ICNNNetwork& network,
+ const std::map<std::string, std::string>& config) override;
std::map<std::string, std::string> config;
};
+++ /dev/null
-// Copyright (C) 2018-2020 Intel Corporation
-// SPDX-License-Identifier: Apache-2.0
-//
-
-#include <gtest/gtest.h>
-#include <gmock/gmock-spec-builders.h>
-
-#include <ie_version.hpp>
-#include <ie_plugin_cpp.hpp>
-#include <cpp_interfaces/base/ie_plugin_base.hpp>
-
-#include "unit_test_utils/mocks/cpp_interfaces/mock_plugin_impl.hpp"
-
-using namespace ::testing;
-using namespace std;
-using namespace InferenceEngine;
-using namespace InferenceEngine::details;
-
-IE_SUPPRESS_DEPRECATED_START
-class PluginBaseTests: public ::testing::Test {
-protected:
- std::shared_ptr<MockPluginImpl> mock_impl;
- shared_ptr<IInferencePlugin> plugin;
- ResponseDesc dsc;
- virtual void TearDown() {
- }
- virtual void SetUp() {
- mock_impl.reset(new MockPluginImpl());
- plugin = details::shared_from_irelease(make_ie_compatible_plugin({{2, 1}, "test", "version"}, mock_impl));
- }
-};
-
-TEST_F(PluginBaseTests, canReportVersion) {
- const Version *V;
- plugin->GetVersion(V);
-
- EXPECT_STREQ(V->buildNumber, "test");
- EXPECT_STREQ(V->description, "version");
- EXPECT_EQ(V->apiVersion.major, 2);
- EXPECT_EQ(V->apiVersion.minor, 1);
-}
-
-TEST_F(PluginBaseTests, canForwardLoadExeNetwork) {
- EXPECT_CALL(*mock_impl.get(), LoadExeNetwork(_, _, _)).Times(1);
- ICNNNetwork * network = nullptr;
- IExecutableNetwork::Ptr exeNetwork = nullptr;
- ASSERT_EQ(OK, plugin->LoadNetwork(exeNetwork, *network, {}, &dsc));
-}
-
-
-TEST_F(PluginBaseTests, canReportErrorInLoadExeNetwork) {
- EXPECT_CALL(*mock_impl.get(), LoadExeNetwork(_, _, _)).WillOnce(Throw(std::runtime_error("compare")));
-
- ICNNNetwork * network = nullptr;
- IExecutableNetwork::Ptr exeNetwork = nullptr;
- ASSERT_NE(plugin->LoadNetwork(exeNetwork, *network, {}, &dsc), OK);
- ASSERT_STREQ(dsc.msg, "compare");
-}
-
-TEST_F(PluginBaseTests, canCatchUnknownErrorInLoadExeNetwork) {
- EXPECT_CALL(*mock_impl.get(), LoadExeNetwork(_, _, _)).WillOnce(Throw(5));
- ICNNNetwork * network = nullptr;
- IExecutableNetwork::Ptr exeNetwork = nullptr;
- ASSERT_EQ(UNEXPECTED, plugin->LoadNetwork(exeNetwork, *network, {}, nullptr));
-}
-
-TEST_F(PluginBaseTests, canForwarSetConfig) {
- const std::map <std::string, std::string> config;
- EXPECT_CALL(*mock_impl.get(), SetConfig(Ref(config))).Times(1);
- ASSERT_EQ(OK, plugin->SetConfig(config, &dsc));
-}
-
-TEST_F(PluginBaseTests, canReportErrorInSetConfig) {
- const std::map <std::string, std::string> config;
- EXPECT_CALL(*mock_impl.get(), SetConfig(_)).WillOnce(Throw(std::runtime_error("error")));
-
- ASSERT_NE(OK, plugin->SetConfig(config, &dsc));
- ASSERT_STREQ(dsc.msg, "error");
-}
-
-TEST_F(PluginBaseTests, canCatchUnknownErrorInSetConfig) {
- EXPECT_CALL(*mock_impl.get(), SetConfig(_)).WillOnce(Throw(5));
- const std::map <std::string, std::string> config;
- ASSERT_EQ(UNEXPECTED, plugin->SetConfig(config, nullptr));
-}
-
-TEST(InferencePluginTests, throwsOnNullptrCreation) {
- InferenceEnginePluginPtr nulptr;
- InferencePlugin plugin;
- ASSERT_THROW(plugin = InferencePlugin(nulptr), details::InferenceEngineException);
-}
-
-TEST(InferencePluginTests, throwsOnUninitializedGetVersion) {
- InferencePlugin plg;
- ASSERT_THROW(plg.GetVersion(), details::InferenceEngineException);
-}
-
-TEST(InferencePluginTests, throwsOnUninitializedLoadNetwork) {
- InferencePlugin plg;
- QueryNetworkResult r;
- ASSERT_THROW(plg.LoadNetwork(CNNNetwork(), {}), details::InferenceEngineException);
-}
-
-TEST(InferencePluginTests, throwsOnUninitializedImportNetwork) {
- InferencePlugin plg;
- ASSERT_THROW(plg.ImportNetwork({}, {}), details::InferenceEngineException);
-}
-
-TEST(InferencePluginTests, throwsOnUninitializedAddExtension) {
- InferencePlugin plg;
- ASSERT_THROW(plg.AddExtension(IExtensionPtr()), details::InferenceEngineException);
-}
-
-TEST(InferencePluginTests, throwsOnUninitializedSetConfig) {
- InferencePlugin plg;
- ASSERT_THROW(plg.SetConfig({{}}), details::InferenceEngineException);
-}
-
-TEST(InferencePluginTests, nothrowsUninitializedCast) {
- InferencePlugin plg;
- ASSERT_NO_THROW(auto plgPtr = static_cast<InferenceEnginePluginPtr>(plg));
-}
-IE_SUPPRESS_DEPRECATED_END
#include <gmock/gmock-spec-builders.h>
#include <ie_version.hpp>
-#include <cpp_interfaces/base/ie_plugin_base.hpp>
+#include <ie_plugin_cpp.hpp>
#include "unit_test_utils/mocks/mock_not_empty_icnn_network.hpp"
-#include "unit_test_utils/mocks/cpp_interfaces/mock_plugin_impl.hpp"
#include "unit_test_utils/mocks/cpp_interfaces/impl/mock_inference_plugin_internal.hpp"
#include "unit_test_utils/mocks/cpp_interfaces/impl/mock_executable_thread_safe_default.hpp"
#include "unit_test_utils/mocks/cpp_interfaces/interface/mock_iinfer_request_internal.hpp"
class InferenceEnginePluginInternalTest : public ::testing::Test {
protected:
- IE_SUPPRESS_DEPRECATED_START
shared_ptr<IInferencePlugin> plugin;
- IE_SUPPRESS_DEPRECATED_END
shared_ptr<MockInferencePluginInternal> mock_plugin_impl;
shared_ptr<MockExecutableNetworkInternal> mockExeNetworkInternal;
shared_ptr<MockExecutableNetworkThreadSafe> mockExeNetworkTS;
pluginId = "TEST";
mock_plugin_impl.reset(new MockInferencePluginInternal());
mock_plugin_impl->SetName(pluginId);
- plugin = details::shared_from_irelease(make_ie_compatible_plugin({{2, 1}, "test", "version"}, mock_plugin_impl));
+ plugin = std::static_pointer_cast<IInferencePlugin>(mock_plugin_impl);
mockExeNetworkInternal = make_shared<MockExecutableNetworkInternal>();
- mockExeNetworkInternal->SetPointerToPluginInternal(mock_plugin_impl);
+ mockExeNetworkInternal->SetPointerToPlugin(mock_plugin_impl);
}
void getInferRequestWithMockImplInside(IInferRequest::Ptr &request) {
mockExeNetworkTS = make_shared<MockExecutableNetworkThreadSafe>();
EXPECT_CALL(*mock_plugin_impl.get(), LoadExeNetworkImpl(_, _)).WillOnce(Return(mockExeNetworkTS));
EXPECT_CALL(*mockExeNetworkTS.get(), CreateInferRequestImpl(_, _)).WillOnce(Return(mockInferRequestInternal));
- IE_SUPPRESS_DEPRECATED_START
- sts = plugin->LoadNetwork(exeNetwork, mockNotEmptyNet, {}, &dsc);
- IE_SUPPRESS_DEPRECATED_END
- ASSERT_EQ((int) StatusCode::OK, sts) << dsc.msg;
+ plugin->LoadNetwork(exeNetwork, mockNotEmptyNet, {});
ASSERT_NE(exeNetwork, nullptr) << dsc.msg;
sts = exeNetwork->CreateInferRequest(request, &dsc);
ASSERT_EQ((int) StatusCode::OK, sts) << dsc.msg;
mock_plugin_impl->importedString = {};
}
+
+TEST(InferencePluginTests, throwsOnNullptrCreation) {
+ InferenceEnginePluginPtr nulptr;
+ InferencePlugin plugin;
+ ASSERT_THROW(plugin = InferencePlugin(nulptr), details::InferenceEngineException);
+}
+
+TEST(InferencePluginTests, throwsOnUninitializedGetVersion) {
+ InferencePlugin plg;
+ ASSERT_THROW(plg.GetVersion(), details::InferenceEngineException);
+}
+
+TEST(InferencePluginTests, throwsOnUninitializedLoadNetwork) {
+ InferencePlugin plg;
+ ASSERT_THROW(plg.LoadNetwork(CNNNetwork(), {}), details::InferenceEngineException);
+}
+
+TEST(InferencePluginTests, throwsOnUninitializedImportNetwork) {
+ InferencePlugin plg;
+ ASSERT_THROW(plg.ImportNetwork({}, {}), details::InferenceEngineException);
+}
+
+TEST(InferencePluginTests, throwsOnUninitializedAddExtension) {
+ InferencePlugin plg;
+ ASSERT_THROW(plg.AddExtension(IExtensionPtr()), details::InferenceEngineException);
+}
+
+TEST(InferencePluginTests, throwsOnUninitializedSetConfig) {
+ InferencePlugin plg;
+ ASSERT_THROW(plg.SetConfig({{}}), details::InferenceEngineException);
+}
+
+TEST(InferencePluginTests, nothrowsUninitializedCast) {
+ InferencePlugin plg;
+ ASSERT_NO_THROW(auto plgPtr = static_cast<InferenceEnginePluginPtr>(plg));
+}
#include "details/ie_so_loader.h"
#include "unit_test_utils/mocks/mock_engine/mock_plugin.hpp"
-#include "unit_test_utils/mocks/mock_iinference_plugin.hpp"
+#include "unit_test_utils/mocks/cpp_interfaces/impl/mock_inference_plugin_internal.hpp"
using namespace std;
using namespace ::testing;
using namespace InferenceEngine::details;
-IE_SUPPRESS_DEPRECATED_START
-
class PluginTest: public ::testing::Test {
protected:
unique_ptr<SharedObjectLoader> sharedObjectLoader;
return ptr;
}
- MockIInferencePlugin engine;
+ MockInferencePluginInternal2 engine;
};
TEST_F(PluginTest, canCreatePlugin) {
- auto ptr = make_std_function<IInferencePlugin*(IInferencePlugin*)>("CreatePluginEngineProxy");
+ auto ptr = make_std_function<IInferencePlugin*
+ (IInferencePlugin*)>("CreatePluginEngineProxy");
unique_ptr<IInferencePlugin, std::function<void(IInferencePlugin*)>> smart_ptr(ptr(nullptr), [](IInferencePlugin *p) {
p->Release();
TEST_F(PluginTest, canSetConfiguration) {
InferenceEnginePluginPtr ptr = getPtr();
- // TODO: dynamic->reinterpret because of calng/gcc cannot
+ // TODO: dynamic->reinterpret because of clang/gcc cannot
// dynamically cast this MOCK object
ASSERT_TRUE(reinterpret_cast<MockPlugin*>(*ptr)->config.empty());
- ResponseDesc resp;
std::map<std::string, std::string> config = { { "key", "value" } };
- ASSERT_EQ(ptr->SetConfig(config, &resp), OK);
+ ASSERT_NO_THROW(ptr->SetConfig(config));
config.clear();
ASSERT_STREQ(reinterpret_cast<MockPlugin*>(*ptr)->config["key"].c_str(), "value");
}
-
-IE_SUPPRESS_DEPRECATED_END
\ No newline at end of file
#include "helpers/myriad_devices.hpp"
#include <details/ie_exception.hpp>
-#include <cpp_interfaces/interface/ie_plugin.hpp>
#include <ie_plugin_ptr.hpp>
using namespace std;
protected:
void checkExtensionRemoved(extension_params p) {
try {
- StatusCode sts;
- ResponseDesc resp;
std::unique_ptr<InferenceEnginePluginPtr> score_engine;
score_engine.reset(new InferenceEnginePluginPtr(make_plugin_name(p.plugin()).c_str()));
- sts = (*score_engine)->SetConfig(p.config, &resp);
- ASSERT_TRUE(sts == OK) << resp.msg;
+ (*score_engine)->SetConfig(p.config);
ASSERT_EQ(p.extension.use_count(), 2);
- sts = (*score_engine)->AddExtension(p.extension, &resp);
- ASSERT_TRUE(sts == OK) << resp.msg;
+ (*score_engine)->AddExtension(p.extension);
// multi-device holds additional reference of the extension ptr
ASSERT_EQ(p.extension.use_count(), p.pluginName.find("Multi")==std::string::npos ? 3 : 4);
score_engine.reset();
}
void checkExtensionNotRemovedFromAnotherEngineObject(extension_params p) {
try {
- StatusCode sts;
- ResponseDesc resp;
std::unique_ptr<InferenceEnginePluginPtr> score_engine1;
score_engine1.reset(new InferenceEnginePluginPtr(make_plugin_name(p.plugin()).c_str()));
- sts = (*score_engine1)->SetConfig(p.config, &resp);
- ASSERT_TRUE(sts == OK) << resp.msg;
-
+ (*score_engine1)->SetConfig(p.config);
+
std::unique_ptr<InferenceEnginePluginPtr> score_engine2;
score_engine2.reset(new InferenceEnginePluginPtr(make_plugin_name(p.plugin()).c_str()));
- sts = (*score_engine2)->SetConfig(p.config, &resp);
- ASSERT_TRUE(sts == OK) << resp.msg;
+ (*score_engine2)->SetConfig(p.config);
ASSERT_EQ(p.extension.use_count(), 2);
- sts = (*score_engine1)->AddExtension(p.extension, &resp);
- ASSERT_TRUE(sts == OK) << resp.msg;
+ (*score_engine1)->AddExtension(p.extension);
// multi-device holds additional reference of the extension ptr
ASSERT_EQ(p.extension.use_count(), p.pluginName.find("Multi")==std::string::npos ? 3 : 4);
score_engine2.reset();
}
-void GNADumpXNNMatcher::load(GNAPlugin & plugin) {
+void GNADumpXNNMatcher::load(std::shared_ptr<GNAPlugin> & plugin) {
// matching gna DumpXNN forward call.
- plugin = GNAPlugin(_env.config);
+ plugin = std::make_shared<GNAPlugin>(_env.config);
auto loadNetworkFromIR = [&]() {
MockICNNNetwork net;
_env.cb(network);
}
- plugin.LoadNetwork(network);
+ plugin->LoadNetwork(network);
};
auto loadNetworkFromAOT = [&]() {
- plugin.ImportNetwork(_env.importedModelFileName);
+ plugin->ImportNetwork(_env.importedModelFileName);
};
auto loadNetwork = [&]() {
try {
// matching gna DumpXNN forward call.
- GNAPluginNS::GNAPlugin plugin;
+ auto plugin = std::make_shared<GNAPluginNS::GNAPlugin>();
load(plugin);
}
catch(std::exception &ex) {
protected:
bool match_in_dctor = true;
- void load(GNAPluginNS::GNAPlugin & plugin);
+ void load(std::shared_ptr<GNAPluginNS::GNAPlugin> & plugin);
void match();
};
th.join();
}
-TEST_P(PThreadBinSemaphoreTest, DestroyAcquireadSemaResultedInError) {
+TEST_P(PThreadBinSemaphoreTest, DestroyAcquiredSemaResultedInError) {
ASSERT_EQ(0, invoke_wait());
// semaphore deleted - since not blocked, even if counter is 0