void AddExtension(InferenceEngine::IExtensionPtr extension) override;
void SetConfig(const std::map<std::string, std::string> &config) override;
- void LoadNetwork(InferenceEngine::IExecutableNetwork::Ptr &executableNetwork,
- const InferenceEngine::ICNNNetwork &network,
- const std::map<std::string, std::string> &config_map) override { THROW_GNA_EXCEPTION << "Not implemented"; }
+ InferenceEngine::ExecutableNetwork LoadNetwork(const InferenceEngine::ICNNNetwork &network,
+ const std::map<std::string, std::string> &config_map) override {
+ THROW_GNA_EXCEPTION << "Not implemented";
+ }
InferenceEngine::ExecutableNetwork LoadNetwork(const InferenceEngine::ICNNNetwork &network,
const std::map<std::string, std::string> &config_map,
InferenceEngine::RemoteContext::Ptr context) override { THROW_GNA_EXCEPTION << "Not implemented"; }
}
ExecutableNetwork LoadNetwork(CNNNetwork network, const std::map<std::string, std::string>& config) {
- IExecutableNetwork::Ptr ret;
- CALL_STATEMENT(actual->LoadNetwork(ret, network, config));
- return ExecutableNetwork(ret, actual);
+ CALL_STATEMENT(return ExecutableNetwork(actual->LoadNetwork(network, config), actual));
}
void AddExtension(InferenceEngine::IExtensionPtr extension) {
ExecutableNetwork ImportNetwork(std::istream& networkModel,
const std::map<std::string, std::string> &config) {
- CALL_STATEMENT(return actual->ImportNetwork(networkModel, config));
+ CALL_STATEMENT(return ExecutableNetwork(actual->ImportNetwork(networkModel, config), actual));
}
Parameter GetMetric(const std::string& name, const std::map<std::string, Parameter>& options) const {
ExecutableNetwork LoadNetwork(const ICNNNetwork& network, const std::map<std::string, std::string>& config,
RemoteContext::Ptr context) {
- CALL_STATEMENT(return actual->LoadNetwork(network, config, context));
+ CALL_STATEMENT(return ExecutableNetwork(actual->LoadNetwork(network, config, context), actual));
}
RemoteContext::Ptr CreateContext(const ParamMap& params) {
ExecutableNetwork ImportNetwork(std::istream& networkModel,
const RemoteContext::Ptr& context,
const std::map<std::string, std::string>& config) {
- CALL_STATEMENT(return actual->ImportNetwork(networkModel, context, config));
+ CALL_STATEMENT(return ExecutableNetwork(actual->ImportNetwork(networkModel, context, config), actual));
}
Parameter GetConfig(const std::string& name, const std::map<std::string, Parameter>& options) const {
~InferencePluginInternal() override = default;
public:
- void LoadNetwork(IExecutableNetwork::Ptr& executableNetwork, const ICNNNetwork& network,
- const std::map<std::string, std::string>& config) override {
- LoadNetworkImplPrivate(executableNetwork, network, config);
+ ExecutableNetwork LoadNetwork(const ICNNNetwork& network,
+ const std::map<std::string, std::string>& config) override {
+ return LoadNetworkImplPrivate(network, config);
}
ExecutableNetwork LoadNetwork(const ICNNNetwork& network, const std::map<std::string, std::string>& config,
RemoteContext::Ptr context) override {
- IExecutableNetwork::Ptr executableNetworkPtr;
- LoadNetworkImplPrivate(executableNetworkPtr, network, config, context);
- return ExecutableNetwork(executableNetworkPtr);
+ return LoadNetworkImplPrivate(network, config, context);;
}
IExecutableNetwork::Ptr ImportNetwork(const std::string& modelFileName,
/**
* @brief A helper method which clones a ICNNNetwork object, keeps InputsDataMap and OutputsDataMap data maps,
* and creates an IExecutableNetwork object
- * @param executableNetwork An output executable network object
* @param network An input ICNNNetwork object used to create an executable network object
* @param config A map of string -> string configuration options.
* @param context An optional pointer to RemoteContext
+ * @return An output executable network object
*/
- void LoadNetworkImplPrivate(IExecutableNetwork::Ptr& executableNetwork, const ICNNNetwork& network,
- const std::map<std::string, std::string>& config,
- RemoteContext::Ptr context = nullptr) {
+ ExecutableNetwork LoadNetworkImplPrivate(const ICNNNetwork& network,
+ const std::map<std::string, std::string>& config,
+ RemoteContext::Ptr context = nullptr) {
InputsDataMap networkInputs, networkInputsCloned;
OutputsDataMap networkOutputs, networkOutputsCloned;
network.getInputsInfo(networkInputs);
impl->setNetworkOutputs(networkOutputsCloned);
impl->SetPointerToPlugin(shared_from_this());
- executableNetwork.reset(new ExecutableNetworkBase<ExecutableNetworkInternal>(impl), [](details::IRelease* p) {
- p->Release();
- });
+ auto executableNetwork = make_executable_network(impl);
+ return ExecutableNetwork(executableNetwork);
}
protected:
* @brief Creates an executable network from a parsed network object, users can create as many networks as they need
* and use them simultaneously (up to the limitation of the HW resources)
* @note The function is used in
- * InferencePluginInternal::LoadNetwork(IExecutableNetwork::Ptr&, const ICNNNetwork&, const std::map<std::string, std::string>&)
+ * InferencePluginInternal::LoadNetwork(const ICNNNetwork&, const std::map<std::string, std::string>&)
* which performs common steps first and calls this plugin-dependent method implementation after.
* @param network A network object
* @param config string-string map of config parameters relevant only for this load operation
* @param config string-string map of config parameters relevant only for this load operation
* @return Shared pointer to the ExecutableNetwork object
*/
- virtual ExecutableNetworkInternal::Ptr LoadExeNetworkImpl(const ICNNNetwork& network, RemoteContext::Ptr context,
+ virtual ExecutableNetworkInternal::Ptr LoadExeNetworkImpl(const ICNNNetwork& network,
+ RemoteContext::Ptr context,
const std::map<std::string, std::string>& config) {
(void)network;
(void)context;
/**
* @brief Creates an executable network from an pares network object, users can create as many networks as they need
* and use them simultaneously (up to the limitation of the HW resources)
- * @param executableNetwork - a reference to a shared ptr of the returned network interface
- * @param network - a network object acquired from InferenceEngine::Core::ReadNetwork
- * @param config string-string map of config parameters relevant only for this load operation
+ * @param network A network object acquired from InferenceEngine::Core::ReadNetwork
+ * @param config A string-string map of config parameters relevant only for this load operation
+ * @return Created Executable Network object
*/
- virtual void LoadNetwork(IExecutableNetwork::Ptr& executableNetwork, const ICNNNetwork& network,
- const std::map<std::string, std::string>& config) = 0;
+ virtual ExecutableNetwork LoadNetwork(const ICNNNetwork& network,
+ const std::map<std::string, std::string>& config) = 0;
/**
* @brief Creates an executable network from network object, on specified remote context
* execute the network
* @return Created Executable Network object
*/
- virtual ExecutableNetwork LoadNetwork(const ICNNNetwork& network, const std::map<std::string, std::string>& config,
+ virtual ExecutableNetwork LoadNetwork(const ICNNNetwork& network,
+ const std::map<std::string, std::string>& config,
RemoteContext::Ptr context) = 0;
/**
* @brief Registers extension within plugin
this->config = config;
}
-void MockPlugin::LoadNetwork(IExecutableNetwork::Ptr &ret, const ICNNNetwork &network,
- const std::map<std::string, std::string> &config) {
+ExecutableNetwork
+MockPlugin::LoadNetwork(const ICNNNetwork &network,
+ const std::map<std::string, std::string> &config) {
if (_target) {
- _target->LoadNetwork(ret, network, config);
+ return _target->LoadNetwork(network, config);
} else {
THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str;
}
explicit MockPlugin(InferenceEngine::IInferencePlugin*target);
void SetConfig(const std::map<std::string, std::string>& config) override;
- void LoadNetwork(InferenceEngine::IExecutableNetwork::Ptr &ret, const InferenceEngine::ICNNNetwork &network,
- const std::map<std::string, std::string> &config) override;
+ InferenceEngine::ExecutableNetwork
+ LoadNetwork(const InferenceEngine::ICNNNetwork &network,
+ const std::map<std::string, std::string> &config) override;
ExecutableNetworkInternal::Ptr
LoadExeNetworkImpl(const InferenceEngine::ICNNNetwork& network,
const std::map<std::string, std::string>& config) override;
}
void getInferRequestWithMockImplInside(IInferRequest::Ptr &request) {
- IExecutableNetwork::Ptr exeNetwork;
+ ExecutableNetwork exeNetwork;
InputsDataMap inputsInfo;
mockNotEmptyNet.getInputsInfo(inputsInfo);
OutputsDataMap outputsInfo;
mockExeNetworkTS = make_shared<MockExecutableNetworkThreadSafe>();
EXPECT_CALL(*mock_plugin_impl.get(), LoadExeNetworkImpl(_, _)).WillOnce(Return(mockExeNetworkTS));
EXPECT_CALL(*mockExeNetworkTS.get(), CreateInferRequestImpl(_, _)).WillOnce(Return(mockInferRequestInternal));
- plugin->LoadNetwork(exeNetwork, mockNotEmptyNet, {});
- ASSERT_NE(exeNetwork, nullptr) << dsc.msg;
- sts = exeNetwork->CreateInferRequest(request, &dsc);
- ASSERT_EQ((int) StatusCode::OK, sts) << dsc.msg;
+ ASSERT_NO_THROW(exeNetwork = plugin->LoadNetwork(mockNotEmptyNet, {}));
+ ASSERT_NO_THROW(request = exeNetwork.CreateInferRequest());
}
};
ASSERT_NE(1, network.getOutputsInfo().size());
std::shared_ptr<MKLDNNTestEngine> score_engine(new MKLDNNTestEngine());
- InferenceEngine::IExecutableNetwork::Ptr exeNetwork1;
- ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork1, network, {}));
+ InferenceEngine::ExecutableNetwork exeNetwork1;
+ ASSERT_NO_THROW(exeNetwork1 = score_engine->LoadNetwork(network, {}));
size_t modified_outputs_size = score_engine->getGraph(exeNetwork1).GetOutputNodes().size();
ASSERT_NO_THROW(network2 = core.ReadNetwork(model, weights_ptr));
ASSERT_EQ(1, network2.getOutputsInfo().size());
- InferenceEngine::IExecutableNetwork::Ptr exeNetwork2;
- ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork2, network2, {}));
+ InferenceEngine::ExecutableNetwork exeNetwork2;
+ ASSERT_NO_THROW(exeNetwork2 = score_engine->LoadNetwork(network2, {}));
size_t original_outputs_size = score_engine->getGraph(exeNetwork2).GetOutputNodes().size();
InferenceEngine::Core ie;
auto network = ie.ReadNetwork(model, getConvWeightsBlob(p.in1, p.conv));
std::shared_ptr<MKLDNNPlugin::Engine> score_engine(new MKLDNNPlugin::Engine());
- InferenceEngine::IExecutableNetwork::Ptr exeNetwork1;
- ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork1, network, {}));
+ InferenceEngine::ExecutableNetwork exeNetwork1;
+ ASSERT_NO_THROW(exeNetwork1 = score_engine->LoadNetwork(network, {}));
auto conv = CommonTestUtils::getLayerByName(network, "Convolution2");
auto eltwise = CommonTestUtils::getLayerByName(network, "Eltwise3");
auto network = ie.ReadNetwork(model, getConvWeightsBlob(p.in1, p.conv));
std::shared_ptr<MKLDNNPlugin::Engine> score_engine(new MKLDNNPlugin::Engine());
- InferenceEngine::IExecutableNetwork::Ptr exeNetwork1;
- ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork1, network, { }));
+ InferenceEngine::ExecutableNetwork exeNetwork1;
+ ASSERT_NO_THROW(exeNetwork1 = score_engine->LoadNetwork(network, { }));
auto conv = CommonTestUtils::getLayerByName(network, "Convolution2");
auto eltwise = CommonTestUtils::getLayerByName(network, "Eltwise3");
auto network = ie.ReadNetwork(model, getConvWeightsBlob(p.in1, p.conv));
std::shared_ptr<MKLDNNPlugin::Engine> score_engine(new MKLDNNPlugin::Engine());
- InferenceEngine::IExecutableNetwork::Ptr exeNetwork1;
- ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork1, network, { }));
+ InferenceEngine::ExecutableNetwork exeNetwork1;
+ ASSERT_NO_THROW(exeNetwork1 = score_engine->LoadNetwork(network, { }));
auto conv2 = CommonTestUtils::getLayerByName(network, "Convolution2");
auto conv3 = CommonTestUtils::getLayerByName(network, "Convolution3");
auto network = ie.ReadNetwork(model, CommonTestUtils::getWeightsBlob(weight_size));
std::shared_ptr<MKLDNNPlugin::Engine> score_engine(new MKLDNNPlugin::Engine());
- InferenceEngine::IExecutableNetwork::Ptr exeNetwork1;
- ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork1, network, { }));
+ InferenceEngine::ExecutableNetwork exeNetwork1;
+ ASSERT_NO_THROW(exeNetwork1 = score_engine->LoadNetwork(network, { }));
auto conv2 = CommonTestUtils::getLayerByName(network, "Convolution2");
auto conv3 = CommonTestUtils::getLayerByName(network, "Convolution3");
auto network = ie.ReadNetwork(model, CommonTestUtils::getWeightsBlob(weight_size));
std::shared_ptr<MKLDNNPlugin::Engine> score_engine(new MKLDNNPlugin::Engine());
- InferenceEngine::IExecutableNetwork::Ptr exeNetwork1;
- ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork1, network, {}));
+ InferenceEngine::ExecutableNetwork exeNetwork1;
+ ASSERT_NO_THROW(exeNetwork1 = score_engine->LoadNetwork(network, {}));
auto conv2 = CommonTestUtils::getLayerByName(network, "Convolution2");
auto conv3 = CommonTestUtils::getLayerByName(network, "Convolution3");