LoadNetwork interface: executable network is returned as return value (#1840)
authorIlya Lavrenov <ilya.lavrenov@intel.com>
Wed, 19 Aug 2020 08:44:29 +0000 (11:44 +0300)
committerGitHub <noreply@github.com>
Wed, 19 Aug 2020 08:44:29 +0000 (11:44 +0300)
* LoadNetwork interface

* Added reference to SOPointer to returned ExecutablNetwork

* Fixed test

inference-engine/src/gna_plugin/gna_plugin.hpp
inference-engine/src/inference_engine/ie_plugin_cpp.hpp
inference-engine/src/plugin_api/cpp_interfaces/impl/ie_plugin_internal.hpp
inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iplugin_internal.hpp
inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_engine/mock_plugin.cpp
inference-engine/tests/ie_test_utils/unit_test_utils/mocks/mock_engine/mock_plugin.hpp
inference-engine/tests/unit/inference_engine/cpp_interfaces/ie_plugin_test.cpp
inference-engine/tests_deprecated/unit/engines/mkldnn/graph/layers/internal/graph_leaks_test.cpp
inference-engine/tests_deprecated/unit/engines/mkldnn/normalizer/supported_fusions_test.cpp

index b510972..89e54c6 100644 (file)
@@ -103,9 +103,10 @@ class GNAPlugin : public InferenceEngine::IInferencePlugin {
     void AddExtension(InferenceEngine::IExtensionPtr extension) override;
 
     void SetConfig(const std::map<std::string, std::string> &config) override;
-    void LoadNetwork(InferenceEngine::IExecutableNetwork::Ptr &executableNetwork,
-                     const InferenceEngine::ICNNNetwork &network,
-                     const std::map<std::string, std::string> &config_map) override { THROW_GNA_EXCEPTION << "Not implemented"; }
+    InferenceEngine::ExecutableNetwork LoadNetwork(const InferenceEngine::ICNNNetwork &network,
+                                                   const std::map<std::string, std::string> &config_map) override {
+        THROW_GNA_EXCEPTION << "Not implemented";
+    }
     InferenceEngine::ExecutableNetwork LoadNetwork(const InferenceEngine::ICNNNetwork &network,
                                   const std::map<std::string, std::string> &config_map,
                                   InferenceEngine::RemoteContext::Ptr context) override { THROW_GNA_EXCEPTION << "Not implemented"; }
index 662f73a..9f216e8 100644 (file)
@@ -76,9 +76,7 @@ public:
     }
 
     ExecutableNetwork LoadNetwork(CNNNetwork network, const std::map<std::string, std::string>& config) {
-        IExecutableNetwork::Ptr ret;
-        CALL_STATEMENT(actual->LoadNetwork(ret, network, config));
-        return ExecutableNetwork(ret, actual);
+        CALL_STATEMENT(return ExecutableNetwork(actual->LoadNetwork(network, config), actual));
     }
 
     void AddExtension(InferenceEngine::IExtensionPtr extension) {
@@ -102,7 +100,7 @@ public:
 
     ExecutableNetwork ImportNetwork(std::istream& networkModel,
                                     const std::map<std::string, std::string> &config) {
-        CALL_STATEMENT(return actual->ImportNetwork(networkModel, config));
+        CALL_STATEMENT(return ExecutableNetwork(actual->ImportNetwork(networkModel, config), actual));
     }
 
     Parameter GetMetric(const std::string& name, const std::map<std::string, Parameter>& options) const {
@@ -111,7 +109,7 @@ public:
 
     ExecutableNetwork LoadNetwork(const ICNNNetwork& network, const std::map<std::string, std::string>& config,
                                   RemoteContext::Ptr context) {
-        CALL_STATEMENT(return actual->LoadNetwork(network, config, context));
+        CALL_STATEMENT(return ExecutableNetwork(actual->LoadNetwork(network, config, context), actual));
     }
 
     RemoteContext::Ptr CreateContext(const ParamMap& params) {
@@ -125,7 +123,7 @@ public:
     ExecutableNetwork ImportNetwork(std::istream& networkModel,
                                     const RemoteContext::Ptr& context,
                                     const std::map<std::string, std::string>& config) {
-        CALL_STATEMENT(return actual->ImportNetwork(networkModel, context, config));
+        CALL_STATEMENT(return ExecutableNetwork(actual->ImportNetwork(networkModel, context, config), actual));
     }
 
     Parameter GetConfig(const std::string& name, const std::map<std::string, Parameter>& options) const {
index 1a0e7b2..d1654cb 100644 (file)
@@ -56,16 +56,14 @@ protected:
     ~InferencePluginInternal() override = default;
 
 public:
-    void LoadNetwork(IExecutableNetwork::Ptr& executableNetwork, const ICNNNetwork& network,
-                     const std::map<std::string, std::string>& config) override {
-        LoadNetworkImplPrivate(executableNetwork, network, config);
+    ExecutableNetwork LoadNetwork(const ICNNNetwork& network,
+                                  const std::map<std::string, std::string>& config) override {
+        return LoadNetworkImplPrivate(network, config);
     }
 
     ExecutableNetwork LoadNetwork(const ICNNNetwork& network, const std::map<std::string, std::string>& config,
                                   RemoteContext::Ptr context) override {
-        IExecutableNetwork::Ptr executableNetworkPtr;
-        LoadNetworkImplPrivate(executableNetworkPtr, network, config, context);
-        return ExecutableNetwork(executableNetworkPtr);
+        return LoadNetworkImplPrivate(network, config, context);;
     }
 
     IExecutableNetwork::Ptr ImportNetwork(const std::string& modelFileName,
@@ -141,14 +139,14 @@ private:
     /**
      * @brief A helper method which clones a ICNNNetwork object, keeps InputsDataMap and OutputsDataMap data maps,
      * and creates an IExecutableNetwork object
-     * @param executableNetwork An output executable network object
      * @param network An input ICNNNetwork object used to create an executable network object
      * @param config A map of string -> string configuration options.
      * @param context An optional pointer to RemoteContext
+     * @return An output executable network object
      */
-    void LoadNetworkImplPrivate(IExecutableNetwork::Ptr& executableNetwork, const ICNNNetwork& network,
-                                 const std::map<std::string, std::string>& config,
-                                 RemoteContext::Ptr context = nullptr) {
+    ExecutableNetwork LoadNetworkImplPrivate(const ICNNNetwork& network,
+                                             const std::map<std::string, std::string>& config,
+                                             RemoteContext::Ptr context = nullptr) {
         InputsDataMap networkInputs, networkInputsCloned;
         OutputsDataMap networkOutputs, networkOutputsCloned;
         network.getInputsInfo(networkInputs);
@@ -166,9 +164,8 @@ private:
         impl->setNetworkOutputs(networkOutputsCloned);
         impl->SetPointerToPlugin(shared_from_this());
 
-        executableNetwork.reset(new ExecutableNetworkBase<ExecutableNetworkInternal>(impl), [](details::IRelease* p) {
-            p->Release();
-        });
+        auto executableNetwork = make_executable_network(impl);
+        return ExecutableNetwork(executableNetwork);
     }
 
 protected:
@@ -176,7 +173,7 @@ protected:
      * @brief Creates an executable network from a parsed network object, users can create as many networks as they need
      *        and use them simultaneously (up to the limitation of the HW resources)
      * @note The function is used in
-     * InferencePluginInternal::LoadNetwork(IExecutableNetwork::Ptr&, const ICNNNetwork&, const std::map<std::string, std::string>&)
+     * InferencePluginInternal::LoadNetwork(const ICNNNetwork&, const std::map<std::string, std::string>&)
      * which performs common steps first and calls this plugin-dependent method implementation after.
      * @param network A network object
      * @param config string-string map of config parameters relevant only for this load operation
@@ -196,7 +193,8 @@ protected:
      * @param config string-string map of config parameters relevant only for this load operation
      * @return Shared pointer to the ExecutableNetwork object
      */
-    virtual ExecutableNetworkInternal::Ptr LoadExeNetworkImpl(const ICNNNetwork& network, RemoteContext::Ptr context,
+    virtual ExecutableNetworkInternal::Ptr LoadExeNetworkImpl(const ICNNNetwork& network,
+                                                              RemoteContext::Ptr context,
                                                               const std::map<std::string, std::string>& config) {
         (void)network;
         (void)context;
index 73be14f..c8833f4 100644 (file)
@@ -163,12 +163,12 @@ public:
     /**
      * @brief Creates an executable network from an pares network object, users can create as many networks as they need
      * and use them simultaneously (up to the limitation of the HW resources)
-     * @param executableNetwork - a reference to a shared ptr of the returned network interface
-     * @param network - a network object acquired from InferenceEngine::Core::ReadNetwork
-     * @param config string-string map of config parameters relevant only for this load operation
+     * @param network A network object acquired from InferenceEngine::Core::ReadNetwork
+     * @param config A string-string map of config parameters relevant only for this load operation
+     * @return Created Executable Network object
      */
-    virtual void LoadNetwork(IExecutableNetwork::Ptr& executableNetwork, const ICNNNetwork& network,
-                             const std::map<std::string, std::string>& config) = 0;
+    virtual ExecutableNetwork LoadNetwork(const ICNNNetwork& network,
+                                          const std::map<std::string, std::string>& config) = 0;
 
     /**
      * @brief Creates an executable network from network object, on specified remote context
@@ -178,7 +178,8 @@ public:
      *        execute the network
      * @return Created Executable Network object
      */
-    virtual ExecutableNetwork LoadNetwork(const ICNNNetwork& network, const std::map<std::string, std::string>& config,
+    virtual ExecutableNetwork LoadNetwork(const ICNNNetwork& network,
+                                          const std::map<std::string, std::string>& config,
                                           RemoteContext::Ptr context) = 0;
     /**
      * @brief Registers extension within plugin
index 0b80070..dcea352 100644 (file)
@@ -22,10 +22,11 @@ void MockPlugin::SetConfig(const std::map<std::string, std::string>& config) {
     this->config = config;
 }
 
-void MockPlugin::LoadNetwork(IExecutableNetwork::Ptr &ret, const ICNNNetwork &network,
-                             const std::map<std::string, std::string> &config) {
+ExecutableNetwork
+MockPlugin::LoadNetwork(const ICNNNetwork &network,
+                        const std::map<std::string, std::string> &config) {
     if (_target) {
-        _target->LoadNetwork(ret, network, config);
+        return _target->LoadNetwork(network, config);
     } else {
         THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str;
     }
index fc4c99b..4bc5fe3 100644 (file)
@@ -17,8 +17,9 @@ public:
     explicit MockPlugin(InferenceEngine::IInferencePlugin*target);
 
     void SetConfig(const std::map<std::string, std::string>& config) override;
-    void LoadNetwork(InferenceEngine::IExecutableNetwork::Ptr &ret, const InferenceEngine::ICNNNetwork &network,
-                     const std::map<std::string, std::string> &config) override;
+    InferenceEngine::ExecutableNetwork
+    LoadNetwork(const InferenceEngine::ICNNNetwork &network,
+                const std::map<std::string, std::string> &config) override;
     ExecutableNetworkInternal::Ptr
     LoadExeNetworkImpl(const InferenceEngine::ICNNNetwork& network,
                        const std::map<std::string, std::string>& config) override;
index ee05fa1..cdf1e11 100644 (file)
@@ -48,7 +48,7 @@ protected:
     }
 
     void getInferRequestWithMockImplInside(IInferRequest::Ptr &request) {
-        IExecutableNetwork::Ptr exeNetwork;
+        ExecutableNetwork exeNetwork;
         InputsDataMap inputsInfo;
         mockNotEmptyNet.getInputsInfo(inputsInfo);
         OutputsDataMap outputsInfo;
@@ -57,10 +57,8 @@ protected:
         mockExeNetworkTS = make_shared<MockExecutableNetworkThreadSafe>();
         EXPECT_CALL(*mock_plugin_impl.get(), LoadExeNetworkImpl(_, _)).WillOnce(Return(mockExeNetworkTS));
         EXPECT_CALL(*mockExeNetworkTS.get(), CreateInferRequestImpl(_, _)).WillOnce(Return(mockInferRequestInternal));
-        plugin->LoadNetwork(exeNetwork, mockNotEmptyNet, {});
-        ASSERT_NE(exeNetwork, nullptr) << dsc.msg;
-        sts = exeNetwork->CreateInferRequest(request, &dsc);
-        ASSERT_EQ((int) StatusCode::OK, sts) << dsc.msg;
+        ASSERT_NO_THROW(exeNetwork = plugin->LoadNetwork(mockNotEmptyNet, {}));
+        ASSERT_NO_THROW(request = exeNetwork.CreateInferRequest());
     }
 };
 
index 50da7f9..a2178ab 100644 (file)
@@ -258,8 +258,8 @@ TEST_F(MKLDNNGraphLeaksTests, MKLDNN_not_release_outputs_fp32) {
         ASSERT_NE(1, network.getOutputsInfo().size());
 
         std::shared_ptr<MKLDNNTestEngine> score_engine(new MKLDNNTestEngine());
-        InferenceEngine::IExecutableNetwork::Ptr exeNetwork1;
-        ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork1, network, {}));
+        InferenceEngine::ExecutableNetwork exeNetwork1;
+        ASSERT_NO_THROW(exeNetwork1 = score_engine->LoadNetwork(network, {}));
 
         size_t modified_outputs_size = score_engine->getGraph(exeNetwork1).GetOutputNodes().size();
 
@@ -267,8 +267,8 @@ TEST_F(MKLDNNGraphLeaksTests, MKLDNN_not_release_outputs_fp32) {
         ASSERT_NO_THROW(network2 = core.ReadNetwork(model, weights_ptr));
         ASSERT_EQ(1, network2.getOutputsInfo().size());
 
-        InferenceEngine::IExecutableNetwork::Ptr exeNetwork2;
-        ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork2, network2, {}));
+        InferenceEngine::ExecutableNetwork exeNetwork2;
+        ASSERT_NO_THROW(exeNetwork2 = score_engine->LoadNetwork(network2, {}));
 
         size_t original_outputs_size = score_engine->getGraph(exeNetwork2).GetOutputNodes().size();
 
index e291238..a0936c5 100644 (file)
@@ -85,8 +85,8 @@ protected:
             InferenceEngine::Core ie;
             auto network = ie.ReadNetwork(model, getConvWeightsBlob(p.in1, p.conv));
             std::shared_ptr<MKLDNNPlugin::Engine> score_engine(new MKLDNNPlugin::Engine());
-            InferenceEngine::IExecutableNetwork::Ptr exeNetwork1;
-            ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork1, network, {}));
+            InferenceEngine::ExecutableNetwork exeNetwork1;
+            ASSERT_NO_THROW(exeNetwork1 = score_engine->LoadNetwork(network, {}));
 
             auto conv = CommonTestUtils::getLayerByName(network, "Convolution2");
             auto eltwise = CommonTestUtils::getLayerByName(network, "Eltwise3");
@@ -145,8 +145,8 @@ protected:
             auto network = ie.ReadNetwork(model, getConvWeightsBlob(p.in1, p.conv));
 
             std::shared_ptr<MKLDNNPlugin::Engine> score_engine(new MKLDNNPlugin::Engine());
-            InferenceEngine::IExecutableNetwork::Ptr exeNetwork1;
-            ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork1, network, { }));
+            InferenceEngine::ExecutableNetwork exeNetwork1;
+            ASSERT_NO_THROW(exeNetwork1 = score_engine->LoadNetwork(network, { }));
 
             auto conv = CommonTestUtils::getLayerByName(network, "Convolution2");
             auto eltwise = CommonTestUtils::getLayerByName(network, "Eltwise3");
@@ -206,8 +206,8 @@ protected:
             auto network = ie.ReadNetwork(model, getConvWeightsBlob(p.in1, p.conv));
 
             std::shared_ptr<MKLDNNPlugin::Engine> score_engine(new MKLDNNPlugin::Engine());
-            InferenceEngine::IExecutableNetwork::Ptr exeNetwork1;
-            ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork1, network, { }));
+            InferenceEngine::ExecutableNetwork exeNetwork1;
+            ASSERT_NO_THROW(exeNetwork1 = score_engine->LoadNetwork(network, { }));
 
             auto conv2 = CommonTestUtils::getLayerByName(network, "Convolution2");
             auto conv3 = CommonTestUtils::getLayerByName(network, "Convolution3");
@@ -265,8 +265,8 @@ protected:
             auto network = ie.ReadNetwork(model, CommonTestUtils::getWeightsBlob(weight_size));
 
             std::shared_ptr<MKLDNNPlugin::Engine> score_engine(new MKLDNNPlugin::Engine());
-            InferenceEngine::IExecutableNetwork::Ptr exeNetwork1;
-            ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork1, network, { }));
+            InferenceEngine::ExecutableNetwork exeNetwork1;
+            ASSERT_NO_THROW(exeNetwork1 = score_engine->LoadNetwork(network, { }));
 
             auto conv2 = CommonTestUtils::getLayerByName(network, "Convolution2");
             auto conv3 = CommonTestUtils::getLayerByName(network, "Convolution3");
@@ -341,8 +341,8 @@ protected:
             auto network = ie.ReadNetwork(model, CommonTestUtils::getWeightsBlob(weight_size));
 
             std::shared_ptr<MKLDNNPlugin::Engine> score_engine(new MKLDNNPlugin::Engine());
-            InferenceEngine::IExecutableNetwork::Ptr exeNetwork1;
-            ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork1, network, {}));
+            InferenceEngine::ExecutableNetwork exeNetwork1;
+            ASSERT_NO_THROW(exeNetwork1 = score_engine->LoadNetwork(network, {}));
 
             auto conv2 = CommonTestUtils::getLayerByName(network, "Convolution2");
             auto conv3 = CommonTestUtils::getLayerByName(network, "Convolution3");