Plugin interface refactoring (#2591)
authorIlya Lavrenov <ilya.lavrenov@intel.com>
Tue, 13 Oct 2020 14:22:51 +0000 (17:22 +0300)
committerGitHub <noreply@github.com>
Tue, 13 Oct 2020 14:22:51 +0000 (17:22 +0300)
* Simplified plugin interface

* Allow not implemented

* Fixes

* Fixed CPU plugin tests

* Fixed tests dependencies

* Fixes

* Fixed GPU plugin compilation

* Renamed plugin

* Fixes

* Removed tests for plugin base

* Fix2

* Fix 2

* Define a macro to define plugin creation function

* Clean-up

* Fixed OSX build

* Fixed CentOS

* Removed statuscode from IExecutableNetworkInternal interface

* Interface

* Removed not used macro

* QueryNetwork returns result using return value

* LoadNetwork interface

* Fixed compilation with private plugins

* Fixed compilation when NGRAPH_INTERP is not enabled

* Return ExecutableNetwork from ImportNetwork with fileName

* Updated GetContext method

* Return exec graph information as return value

* Message about deprecation of Export with file name

* Message about deprecation of QueryState

* Updated ExecutableNetwork::GetConfig signature

* Updated ExecutableNetwork::GetMetric signature

* Updated docs

* WIP createIR

* Simplified IR creation

* CreateInferRequest returns value

* Removed GetExecutor from ExecutableNetworkThreadSafeDefault

* execDataPreprocessing is protected

* Fixed mock test

57 files changed:
docs/template_plugin/src/template_executable_network.cpp
docs/template_plugin/src/template_executable_network.hpp
docs/template_plugin/src/template_plugin.cpp
docs/template_plugin/src/template_plugin.hpp
inference-engine/src/cldnn_engine/cldnn_engine.cpp
inference-engine/src/cldnn_engine/cldnn_engine.h
inference-engine/src/cldnn_engine/cldnn_executable_network.cpp
inference-engine/src/cldnn_engine/cldnn_executable_network.h
inference-engine/src/cldnn_engine/cldnn_graph.cpp
inference-engine/src/cldnn_engine/cldnn_graph.h
inference-engine/src/gna_plugin/gna_executable_network.hpp
inference-engine/src/gna_plugin/gna_infer_request.hpp
inference-engine/src/gna_plugin/gna_plugin.cpp
inference-engine/src/gna_plugin/gna_plugin.hpp
inference-engine/src/gna_plugin/gna_plugin_internal.hpp
inference-engine/src/hetero_plugin/hetero_async_infer_request.cpp
inference-engine/src/hetero_plugin/hetero_async_infer_request.hpp
inference-engine/src/hetero_plugin/hetero_executable_network.cpp
inference-engine/src/hetero_plugin/hetero_executable_network.hpp
inference-engine/src/hetero_plugin/hetero_plugin.cpp
inference-engine/src/hetero_plugin/hetero_plugin.hpp
inference-engine/src/inference_engine/ie_core.cpp
inference-engine/src/inference_engine/ie_plugin_cpp.hpp
inference-engine/src/mkldnn_plugin/mkldnn_exec_network.cpp
inference-engine/src/mkldnn_plugin/mkldnn_exec_network.h
inference-engine/src/mkldnn_plugin/mkldnn_graph.cpp
inference-engine/src/mkldnn_plugin/mkldnn_graph.h
inference-engine/src/mkldnn_plugin/mkldnn_graph_dumper.cpp
inference-engine/src/mkldnn_plugin/mkldnn_graph_dumper.h
inference-engine/src/mkldnn_plugin/mkldnn_plugin.cpp
inference-engine/src/mkldnn_plugin/mkldnn_plugin.h
inference-engine/src/multi_device/multi_device.cpp
inference-engine/src/multi_device/multi_device.hpp
inference-engine/src/plugin_api/cpp_interfaces/base/ie_executable_network_base.hpp
inference-engine/src/plugin_api/cpp_interfaces/exception2status.hpp
inference-engine/src/plugin_api/cpp_interfaces/impl/ie_executable_network_internal.hpp
inference-engine/src/plugin_api/cpp_interfaces/impl/ie_executable_network_thread_safe_async_only.hpp
inference-engine/src/plugin_api/cpp_interfaces/impl/ie_executable_network_thread_safe_default.hpp
inference-engine/src/plugin_api/cpp_interfaces/impl/ie_infer_async_request_internal.hpp
inference-engine/src/plugin_api/cpp_interfaces/impl/ie_infer_request_internal.hpp
inference-engine/src/plugin_api/cpp_interfaces/impl/ie_plugin_internal.hpp
inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iexecutable_network_internal.hpp
inference-engine/src/plugin_api/cpp_interfaces/interface/ie_iplugin_internal.hpp
inference-engine/src/plugin_api/exec_graph_info.hpp
inference-engine/src/readers/ir_reader_v7/ie_format_parser.cpp
inference-engine/src/vpu/graph_transformer/include/vpu/utils/runtime_graph.hpp
inference-engine/src/vpu/graph_transformer/src/utils/runtime_graph.cpp
inference-engine/src/vpu/myriad_plugin/myriad_executable_network.cpp
inference-engine/src/vpu/myriad_plugin/myriad_executable_network.h
inference-engine/src/vpu/myriad_plugin/myriad_plugin.cpp
inference-engine/src/vpu/myriad_plugin/myriad_plugin.h
inference-engine/tests/ie_test_utils/unit_test_utils/mocks/cpp_interfaces/impl/mock_executable_network_internal.hpp
inference-engine/tests/ie_test_utils/unit_test_utils/mocks/cpp_interfaces/interface/mock_iexecutable_network_internal.hpp
inference-engine/tests/ie_test_utils/unit_test_utils/mocks/cpp_interfaces/interface/mock_iinference_plugin.hpp
inference-engine/tests/unit/inference_engine/ie_executable_network_test.cpp
inference-engine/tests_deprecated/unit/engines/mkldnn/dumper_test.cpp
inference-engine/tests_deprecated/unit/engines/mkldnn/graph/structure/graph_structure_test.cpp

index 98b6e7d..c10105f 100644 (file)
@@ -98,27 +98,29 @@ InferenceEngine::InferRequestInternal::Ptr TemplatePlugin::ExecutableNetwork::Cr
 // ! [executable_network:create_infer_request_impl]
 
 // ! [executable_network:create_infer_request]
-void TemplatePlugin::ExecutableNetwork::CreateInferRequest(IInferRequest::Ptr& asyncRequest) {
+IInferRequest::Ptr TemplatePlugin::ExecutableNetwork::CreateInferRequest() {
+    IInferRequest::Ptr asyncRequest;
     auto internalRequest = CreateInferRequestImpl(_networkInputs, _networkOutputs);
     auto asyncThreadSafeImpl = std::make_shared<TemplateAsyncInferRequest>(std::static_pointer_cast<TemplateInferRequest>(internalRequest),
                                                                            _taskExecutor, _plugin->_waitExecutor, _callbackExecutor);
     asyncRequest.reset(new InferenceEngine::InferRequestBase<TemplateAsyncInferRequest>(asyncThreadSafeImpl),
                        [](InferenceEngine::IInferRequest *p) { p->Release(); });
     asyncThreadSafeImpl->SetPointerToPublicInterface(asyncRequest);
+    return asyncRequest;
 }
 // ! [executable_network:create_infer_request]
 
 // ! [executable_network:get_config]
-void TemplatePlugin::ExecutableNetwork::GetConfig(const std::string &name, Parameter &result, ResponseDesc *resp) const {
-    result = _cfg.Get(name);
+Parameter TemplatePlugin::ExecutableNetwork::GetConfig(const std::string &name) const {
+    return _cfg.Get(name);
 }
 // ! [executable_network:get_config]
 
 // ! [executable_network:get_metric]
-void TemplatePlugin::ExecutableNetwork::GetMetric(const std::string &name, InferenceEngine::Parameter &result, InferenceEngine::ResponseDesc *) const {
+InferenceEngine::Parameter TemplatePlugin::ExecutableNetwork::GetMetric(const std::string &name) const {
     // TODO: return more supported values for metrics
     if (METRIC_KEY(SUPPORTED_METRICS) == name) {
-        result = IE_SET_METRIC(SUPPORTED_METRICS, std::vector<std::string>{
+        IE_SET_METRIC_RETURN(SUPPORTED_METRICS, std::vector<std::string>{
             METRIC_KEY(NETWORK_NAME),
             METRIC_KEY(SUPPORTED_METRICS),
             METRIC_KEY(SUPPORTED_CONFIG_KEYS),
@@ -132,13 +134,13 @@ void TemplatePlugin::ExecutableNetwork::GetMetric(const std::string &name, Infer
         for (auto&& configKey : streamExecutorConfigKeys) {
             configKeys.emplace_back(configKey);
         }
-        result = IE_SET_METRIC(SUPPORTED_CONFIG_KEYS, configKeys);
+        IE_SET_METRIC_RETURN(SUPPORTED_CONFIG_KEYS, configKeys);
     } else if (METRIC_KEY(NETWORK_NAME) == name) {
         auto networkName = _function->get_friendly_name();
-        result = IE_SET_METRIC(NETWORK_NAME, networkName);
+        IE_SET_METRIC_RETURN(NETWORK_NAME, networkName);
     } else if (METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS) == name) {
         unsigned int value = _cfg._streamsExecutorConfig._streams;
-        result = IE_SET_METRIC(OPTIMAL_NUMBER_OF_INFER_REQUESTS, value);
+        IE_SET_METRIC_RETURN(OPTIMAL_NUMBER_OF_INFER_REQUESTS, value);
     } else {
         THROW_IE_EXCEPTION << "Unsupported ExecutableNetwork metric: " << name;
     }
index 01bcc32..ee3c882 100644 (file)
@@ -39,9 +39,9 @@ public:
     void ExportImpl(std::ostream& model) override;
     InferenceEngine::InferRequestInternal::Ptr CreateInferRequestImpl(InferenceEngine::InputsDataMap networkInputs,
                                                                       InferenceEngine::OutputsDataMap networkOutputs) override;
-    void CreateInferRequest(InferenceEngine::IInferRequest::Ptr &asyncRequest) override;
-    void GetMetric(const std::string &name, InferenceEngine::Parameter &result, InferenceEngine::ResponseDesc *resp) const override;
-    void GetConfig(const std::string &name, InferenceEngine::Parameter &result, InferenceEngine::ResponseDesc *resp) const override;
+    InferenceEngine::IInferRequest::Ptr CreateInferRequest() override;
+    InferenceEngine::Parameter GetMetric(const std::string &name) const override;
+    InferenceEngine::Parameter GetConfig(const std::string &name) const override;
 
 private:
     friend class TemplateInferRequest;
index b7481d7..0824c70 100644 (file)
@@ -138,8 +138,10 @@ InferenceEngine::ExecutableNetwork Plugin::ImportNetworkImpl(std::istream& model
 // ! [plugin:import_network_impl]
 
 // ! [plugin:query_network]
-void Plugin::QueryNetwork(const ICNNNetwork &network, const ConfigMap& config, QueryNetworkResult &res) const {
+QueryNetworkResult Plugin::QueryNetwork(const ICNNNetwork &network, const ConfigMap& config) const {
+    QueryNetworkResult res;
     Configuration cfg{config, _cfg, false};
+
     auto function = network.getFunction();
     if (function == nullptr) {
          THROW_IE_EXCEPTION << "Template Plugin supports only ngraph cnn network representation";
@@ -180,6 +182,8 @@ void Plugin::QueryNetwork(const ICNNNetwork &network, const ConfigMap& config, Q
             res.supportedLayersMap.emplace(layerName, GetName());
         }
     }
+
+    return res;
 }
 // ! [plugin:query_network]
 
index 22519df..e614705 100644 (file)
@@ -23,9 +23,9 @@ public:
     ~Plugin() override;
 
     void SetConfig(const std::map<std::string, std::string> &config) override;
-    void QueryNetwork(const InferenceEngine::ICNNNetwork &network,
-                      const std::map<std::string, std::string>& config,
-                      InferenceEngine::QueryNetworkResult &res) const override;
+    InferenceEngine::QueryNetworkResult
+    QueryNetwork(const InferenceEngine::ICNNNetwork &network,
+                 const std::map<std::string, std::string>& config) const override;
     InferenceEngine::ExecutableNetworkInternal::Ptr
     LoadExeNetworkImpl(const InferenceEngine::ICNNNetwork &network,
                        const std::map<std::string, std::string> &config) override;
index 2eb57f9..72a470e 100644 (file)
@@ -309,10 +309,9 @@ void clDNNEngine::SetConfig(const std::map<std::string, std::string> &config) {
     _impl->m_config.UpdateFromMap(config);
 }
 
-void clDNNEngine::QueryNetwork(const ICNNNetwork& network,
-                               const std::map<std::string,
-                               std::string>& config,
-                               QueryNetworkResult& res) const {
+QueryNetworkResult clDNNEngine::QueryNetwork(const ICNNNetwork& network,
+                                             const std::map<std::string, std::string>& config) const {
+    QueryNetworkResult res;
     GetDeviceInfo(config);      // Verify device id
     auto function = network.getFunction();
     if (function != nullptr) {
@@ -525,6 +524,8 @@ void clDNNEngine::QueryNetwork(const ICNNNetwork& network,
             }
         }
     }
+
+    return res;
 }
 
 Parameter clDNNEngine::GetConfig(const std::string& name, const std::map<std::string, Parameter>& /*options*/) const {
index 398712d..ff588d2 100644 (file)
@@ -41,8 +41,8 @@ public:
     void SetConfig(const std::map<std::string, std::string> &config) override;
     InferenceEngine::Parameter GetConfig(const std::string& name, const std::map<std::string, InferenceEngine::Parameter>& options) const override;
     InferenceEngine::Parameter GetMetric(const std::string& name, const std::map<std::string, InferenceEngine::Parameter>& options) const override;
-    void QueryNetwork(const InferenceEngine::ICNNNetwork& network,
-                      const std::map<std::string, std::string>& config, InferenceEngine::QueryNetworkResult& res) const override;
+    InferenceEngine::QueryNetworkResult QueryNetwork(const InferenceEngine::ICNNNetwork& network,
+                                                     const std::map<std::string, std::string>& config) const override;
 
     InferenceEngine::RemoteContext::Ptr CreateContext(const InferenceEngine::ParamMap& params) override;
     InferenceEngine::RemoteContext::Ptr GetDefaultContext() override;
index f0c1b19..ca1724b 100644 (file)
@@ -91,58 +91,52 @@ InferRequestInternal::Ptr CLDNNExecNetwork::CreateInferRequestImpl(InputsDataMap
     return ptr;
 }
 
-void CLDNNExecNetwork::CreateInferRequest(IInferRequest::Ptr &asyncRequest) {
-    auto syncRequestImpl = this->CreateInferRequestImpl(_networkInputs, _networkOutputs);
-    syncRequestImpl->setPointerToExecutableNetworkInternal(shared_from_this());
-
-    auto asyncTreadSafeImpl = std::make_shared<CLDNNAsyncInferRequest>(syncRequestImpl, _taskExecutor, _callbackExecutor);
-
-    asyncRequest.reset(new InferRequestBase<CLDNNAsyncInferRequest>(asyncTreadSafeImpl), [](IInferRequest *p) { p->Release(); });
-    asyncTreadSafeImpl->SetPointerToPublicInterface(asyncRequest);
+IInferRequest::Ptr CLDNNExecNetwork::CreateInferRequest() {
+    return CreateAsyncInferRequestFromSync<CLDNNAsyncInferRequest>();
 }
 
-void CLDNNExecNetwork::GetExecGraphInfo(InferenceEngine::ICNNNetwork::Ptr &graphPtr) {
+InferenceEngine::CNNNetwork CLDNNExecNetwork::GetExecGraphInfo() {
     if (m_graphs.empty())
         THROW_IE_EXCEPTION << NETWORK_NOT_LOADED_str;
 
-    m_graphs.front()->GetExecGraphInfo(graphPtr);
+    return m_graphs.front()->GetExecGraphInfo();
 }
 
-void CLDNNExecNetwork::GetConfig(const std::string &name, InferenceEngine::Parameter &result, InferenceEngine::ResponseDesc *resp) const {
-    auto option = m_config.key_config_map.find(name);
-    if (option != m_config.key_config_map.end()) {
-        result = option->second;
+InferenceEngine::Parameter CLDNNExecNetwork::GetConfig(const std::string &name) const {
+    auto it = m_config.key_config_map.find(name);
+    if (it != m_config.key_config_map.end()) {
+        return it->second;
     } else {
         THROW_IE_EXCEPTION << "Unsupported ExecutableNetwork config key: " << name;
     }
 }
 
-void CLDNNExecNetwork::GetMetric(const std::string &name, InferenceEngine::Parameter &result, InferenceEngine::ResponseDesc *resp) const {
+InferenceEngine::Parameter CLDNNExecNetwork::GetMetric(const std::string &name) const {
     if (name == METRIC_KEY(NETWORK_NAME)) {
         IE_ASSERT(!m_graphs.empty());
-        result = IE_SET_METRIC(NETWORK_NAME, m_graphs[0]->getName());
+        IE_SET_METRIC_RETURN(NETWORK_NAME, m_graphs[0]->getName());
     } else if (name == METRIC_KEY(SUPPORTED_METRICS)) {
         std::vector<std::string> metrics;
         metrics.push_back(METRIC_KEY(NETWORK_NAME));
         metrics.push_back(METRIC_KEY(SUPPORTED_METRICS));
         metrics.push_back(METRIC_KEY(SUPPORTED_CONFIG_KEYS));
         metrics.push_back(METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS));
-        result = IE_SET_METRIC(SUPPORTED_METRICS, metrics);
+        IE_SET_METRIC_RETURN(SUPPORTED_METRICS, metrics);
     } else if (name == METRIC_KEY(SUPPORTED_CONFIG_KEYS)) {
         std::vector<std::string> configKeys;
         for (auto && value : m_config.key_config_map)
             configKeys.push_back(value.first);
-        result = IE_SET_METRIC(SUPPORTED_CONFIG_KEYS, configKeys);
+        IE_SET_METRIC_RETURN(SUPPORTED_CONFIG_KEYS, configKeys);
     } else if (name == METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS)) {
         unsigned int nr = m_config.throughput_streams * 2u;
-        result = IE_SET_METRIC(OPTIMAL_NUMBER_OF_INFER_REQUESTS, nr);
+        IE_SET_METRIC_RETURN(OPTIMAL_NUMBER_OF_INFER_REQUESTS, nr);
     } else {
         THROW_IE_EXCEPTION << "Unsupported ExecutableNetwork metric: " << name;
     }
 }
 
-void CLDNNExecNetwork::GetContext(RemoteContext::Ptr &pContext, ResponseDesc *resp) const {
-    pContext = m_context;
+RemoteContext::Ptr CLDNNExecNetwork::GetContext() const {
+    return m_context;
 }
 
 };  // namespace CLDNNPlugin
index edfe4c3..6c7497b 100644 (file)
@@ -26,14 +26,14 @@ public:
 
     explicit CLDNNExecNetwork(InferenceEngine::ICNNNetwork &network, RemoteContext::Ptr context, Config config);
 
-    void GetExecGraphInfo(InferenceEngine::ICNNNetwork::Ptr &graphPtr) override;
-    void CreateInferRequest(InferenceEngine::IInferRequest::Ptr &asyncRequest) override;
+    InferenceEngine::CNNNetwork GetExecGraphInfo() override;
+    InferenceEngine::IInferRequest::Ptr CreateInferRequest() override;
     InferenceEngine::InferRequestInternal::Ptr CreateInferRequestImpl(InferenceEngine::InputsDataMap networkInputs,
                                                                       InferenceEngine::OutputsDataMap networkOutputs) override;
 
-    void GetMetric(const std::string &name, InferenceEngine::Parameter &result, InferenceEngine::ResponseDesc *resp) const override;
-    void GetConfig(const std::string &name, InferenceEngine::Parameter &result, InferenceEngine::ResponseDesc *resp) const override;
-    void GetContext(RemoteContext::Ptr &pContext, ResponseDesc *resp) const override;
+    InferenceEngine::Parameter GetMetric(const std::string &name) const override;
+    InferenceEngine::Parameter GetConfig(const std::string &name) const override;
+    RemoteContext::Ptr GetContext() const override;
 
 
     std::vector<std::shared_ptr<CLDNNGraph>> m_graphs;
index 673b127..7357769 100644 (file)
@@ -101,7 +101,7 @@ std::shared_ptr<cldnn::network> CLDNNGraph::BuildNetwork(std::shared_ptr<cldnn::
     return network;
 }
 
-InferenceEngine::ICNNNetwork::Ptr CLDNNGraph::GetExecGraphInfoByPrimitivesInfo(std::vector<cldnn::primitive_info>& primitives_info,
+InferenceEngine::CNNNetwork CLDNNGraph::GetExecGraphInfoByPrimitivesInfo(std::vector<cldnn::primitive_info>& primitives_info,
                                                                                bool filter_const_primitives) {
     if (m_config.useProfiling) {
         try {
@@ -465,9 +465,9 @@ InferenceEngine::ICNNNetwork::Ptr CLDNNGraph::GetExecGraphInfoByPrimitivesInfo(s
     return net;
 }
 
-void CLDNNGraph::GetExecGraphInfo(InferenceEngine::ICNNNetwork::Ptr &graphPtr) {
+InferenceEngine::CNNNetwork CLDNNGraph::GetExecGraphInfo() {
     auto primitives_info = GetNetwork()->get_primitives_info();
-    graphPtr = GetExecGraphInfoByPrimitivesInfo(primitives_info, true);
+    return GetExecGraphInfoByPrimitivesInfo(primitives_info, true);
 }
 
 
index 9376554..86e65db 100644 (file)
@@ -41,7 +41,7 @@ public:
 
     explicit CLDNNGraph(InferenceEngine::ICNNNetwork& network, gpu::ClContext::Ptr context, Config config, uint16_t stream_id = 0);
     explicit CLDNNGraph(std::shared_ptr<CLDNNGraph> graph, uint16_t stream_id = 0);
-    void GetExecGraphInfo(InferenceEngine::ICNNNetwork::Ptr& graphPtr);
+    InferenceEngine::CNNNetwork GetExecGraphInfo();
 
     bool IsLoaded() const;
 
@@ -87,8 +87,8 @@ protected:
     void Build();
     void UpdateLayersMaps();
     void UpdateImplementationsMap();
-    InferenceEngine::ICNNNetwork::Ptr GetExecGraphInfoByPrimitivesInfo(std::vector<cldnn::primitive_info>& pi,
-                                                                       bool filter_const_primitives = true);
+    InferenceEngine::CNNNetwork GetExecGraphInfoByPrimitivesInfo(std::vector<cldnn::primitive_info>& pi,
+                                                                 bool filter_const_primitives = true);
 };
 
 }  // namespace CLDNNPlugin
index 6fa2cb7..b7a1088 100644 (file)
@@ -77,8 +77,7 @@ class GNAExecutableNetwork : public InferenceEngine::ExecutableNetworkThreadSafe
         THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str;
     }
 
-    void SetConfig(const std::map<std::string, InferenceEngine::Parameter>& config,
-                   InferenceEngine::ResponseDesc* /* resp */) override {
+    void SetConfig(const std::map<std::string, InferenceEngine::Parameter>& config) override {
         using namespace InferenceEngine::GNAConfigParams;
         if (config.empty()) {
             THROW_IE_EXCEPTION << "The list of configuration values is empty";
@@ -90,8 +89,7 @@ class GNAExecutableNetwork : public InferenceEngine::ExecutableNetworkThreadSafe
             }
         }
 
-        InferenceEngine::Parameter old_mode_parameter;
-        GetConfig(KEY_GNA_DEVICE_MODE, old_mode_parameter, {});
+        InferenceEngine::Parameter old_mode_parameter = GetConfig(KEY_GNA_DEVICE_MODE);
         auto old_mode = old_mode_parameter.as<std::string>();
         if (old_mode == InferenceEngine::GNAConfigParams::GNA_SW_FP32) {
             THROW_IE_EXCEPTION << "Dynamic switching from GNA_SW_FP32 mode is not supported for ExecutableNetwork.";
@@ -107,16 +105,12 @@ class GNAExecutableNetwork : public InferenceEngine::ExecutableNetworkThreadSafe
         plg->SetConfig(configForPlugin);
     }
 
-    void GetConfig(const std::string &name,
-                   InferenceEngine::Parameter &result,
-                   InferenceEngine::ResponseDesc* /*resp*/) const override {
-        result = plg->GetConfig(name, {});
+    InferenceEngine::Parameter GetConfig(const std::string &name) const override {
+        return plg->GetConfig(name, {});
     }
 
-    void GetMetric(const std::string& name,
-                   InferenceEngine::Parameter& result,
-                   InferenceEngine::ResponseDesc* /* resp */) const override {
-        result = plg->GetMetric(name, {});
+    InferenceEngine::Parameter GetMetric(const std::string& name) const override {
+        return plg->GetMetric(name, {});
     }
 };
 
index defbc16..fd2cc69 100644 (file)
@@ -71,9 +71,9 @@ class GNAInferRequest : public InferenceEngine::AsyncInferRequestInternal {
     }
 
     /**
-        * @brief methods with _ThreadUnsafe prefix are to implement in plugins
-        * or in default wrapper (e.g. AsyncInferRequestThreadSafeDefault)
-        */
+     * @brief methods with _ThreadUnsafe prefix are to implement in plugins
+     * or in default wrapper (e.g. AsyncInferRequestThreadSafeDefault)
+     */
     void StartAsyncImpl() override {
         // execute input pre-processing.
         execDataPreprocessing(_inputs);
index c36b47e..3905da7 100644 (file)
@@ -1159,7 +1159,7 @@ void GNAPlugin::SetName(const std::string & pluginName) noexcept {
     _pluginName = pluginName;
 }
 
-InferenceEngine::IExecutableNetwork::Ptr GNAPlugin::ImportNetwork(std::istream& networkModel) {
+InferenceEngine::ExecutableNetwork GNAPlugin::ImportNetwork(std::istream& networkModel) {
     auto header = GNAModelSerial::ReadHeader(networkModel);
 
     InitGNADevice();
@@ -1233,7 +1233,7 @@ InferenceEngine::IExecutableNetwork::Ptr GNAPlugin::ImportNetwork(std::istream&
 #if GNA_LIB_VER == 2
     createRequestConfigsForGnaModels();
 #endif
-    return nullptr;
+    return {};
 }
 
 void GNAPlugin::Export(const std::string &fileName) {
@@ -1294,9 +1294,10 @@ void GNAPlugin::UpdateFieldsFromConfig() {
     *gnaFlags = config.gnaFlags;
 }
 
-void GNAPlugin::QueryNetwork(const InferenceEngine::ICNNNetwork& network,
-                             const std::map<std::string, std::string>& config,
-                             InferenceEngine::QueryNetworkResult& res) const {
+InferenceEngine::QueryNetworkResult GNAPlugin::QueryNetwork(const InferenceEngine::ICNNNetwork& network,
+                                                            const std::map<std::string, std::string>& config) const {
+    InferenceEngine::QueryNetworkResult res;
+
     if (network.getFunction()) {
         THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str << " ngraph::Function is not supported natively";
     }
@@ -1323,4 +1324,6 @@ void GNAPlugin::QueryNetwork(const InferenceEngine::ICNNNetwork& network,
                                                     res.supportedLayersMap.insert({ layer->name, GetName() });
                                                 }
                                             }, false);
+
+    return res;
 }
index 99eda6c..1e4c4fd 100644 (file)
@@ -104,9 +104,7 @@ class GNAPlugin : public InferenceEngine::IInferencePlugin {
 
     void SetConfig(const std::map<std::string, std::string> &config) override;
     InferenceEngine::ExecutableNetwork LoadNetwork(const InferenceEngine::ICNNNetwork &network,
-                                                   const std::map<std::string, std::string> &config_map) override {
-        THROW_GNA_EXCEPTION << "Not implemented";
-    }
+        const std::map<std::string, std::string> &config_map) override { THROW_GNA_EXCEPTION << "Not implemented"; }
     InferenceEngine::ExecutableNetwork LoadNetwork(const InferenceEngine::ICNNNetwork &network,
                                   const std::map<std::string, std::string> &config_map,
                                   InferenceEngine::RemoteContext::Ptr context) override { THROW_GNA_EXCEPTION << "Not implemented"; }
@@ -114,9 +112,8 @@ class GNAPlugin : public InferenceEngine::IInferencePlugin {
     void SetCore(InferenceEngine::ICore*) noexcept override {}
     InferenceEngine::ICore* GetCore() const noexcept override {return nullptr;}
     void Reset();
-    void QueryNetwork(const InferenceEngine::ICNNNetwork &network,
-                      const std::map<std::string, std::string>& config,
-                      InferenceEngine::QueryNetworkResult &res) const override;
+    InferenceEngine::QueryNetworkResult QueryNetwork(const InferenceEngine::ICNNNetwork &network,
+                                                     const std::map<std::string, std::string>& config) const override;
     uint32_t QueueInference(const InferenceEngine::BlobMap &input, InferenceEngine::BlobMap &result);
     bool Wait(uint32_t idx);
     GnaWaitStatus WaitFor(uint32_t idx, int64_t millisTimeout);
@@ -131,8 +128,8 @@ class GNAPlugin : public InferenceEngine::IInferencePlugin {
     void Wait(uint32_t sync, InferenceEngine::Blob &result) { THROW_GNA_EXCEPTION << "Not implemented"; }
 
     void Export(const std::string &fileName);
-    InferenceEngine::IExecutableNetwork::Ptr ImportNetwork(const std::string &modelFileName,
-                                                           const std::map<std::string, std::string> &config) override {
+    InferenceEngine::ExecutableNetwork ImportNetwork(const std::string &modelFileName,
+                                                     const std::map<std::string, std::string> &config) override {
         THROW_GNA_EXCEPTION << "Not implemented";
     }
     InferenceEngine::ExecutableNetwork ImportNetwork(std::istream& networkModel,
@@ -146,7 +143,7 @@ class GNAPlugin : public InferenceEngine::IInferencePlugin {
         THROW_GNA_EXCEPTION << "Not implemented";
     }
 
-    InferenceEngine::IExecutableNetwork::Ptr ImportNetwork(std::istream& networkModel);
+    InferenceEngine::ExecutableNetwork ImportNetwork(std::istream& networkModel);
 
     /**
      * utility to provide input and output blobs externally to be used by InferenceEngine request API clients
index df42d03..18b70fd 100644 (file)
@@ -43,7 +43,7 @@ public:
         defaultConfig.UpdateFromMap(config);
     }
 
-    InferenceEngine::IExecutableNetwork::Ptr ImportNetwork(
+    InferenceEngine::ExecutableNetwork ImportNetwork(
                                                 const std::string &modelFileName,
                                                 const std::map<std::string, std::string> &config) override {
         Config updated_config(defaultConfig);
@@ -69,14 +69,13 @@ public:
         return GetCurrentPlugin()->GetName();
     }
 
-    void QueryNetwork(const InferenceEngine::ICNNNetwork& network,
-                      const std::map<std::string, std::string>& config,
-                      InferenceEngine::QueryNetworkResult& res) const override {
+    InferenceEngine::QueryNetworkResult QueryNetwork(const InferenceEngine::ICNNNetwork& network,
+                                                     const std::map<std::string, std::string>& config) const override {
         auto plg = GetCurrentPlugin();
         try {
             plg->SetConfig(config);
         } catch (InferenceEngine::details::InferenceEngineException) {}
-        plg->QueryNetwork(network, config, res);
+        return plg->QueryNetwork(network, config);
     }
 
     InferenceEngine::Parameter GetMetric(const std::string& name,
index 6f639ce..ab4ef57 100644 (file)
@@ -9,11 +9,11 @@
 using namespace HeteroPlugin;
 using namespace InferenceEngine;
 
-HeteroAsyncInferRequest::HeteroAsyncInferRequest(const HeteroInferRequest::Ptr& request,
-                                                 const ITaskExecutor::Ptr&      taskExecutor,
-                                                 const ITaskExecutor::Ptr&      callbackExecutor) :
+HeteroAsyncInferRequest::HeteroAsyncInferRequest(const InferRequestInternal::Ptr& request,
+                                                 const ITaskExecutor::Ptr&        taskExecutor,
+                                                 const ITaskExecutor::Ptr&        callbackExecutor) :
     AsyncInferRequestThreadSafeDefault(request, taskExecutor, callbackExecutor),
-    _heteroInferRequest(request),
+    _heteroInferRequest(std::static_pointer_cast<HeteroInferRequest>(request)),
     _statusCodes{_heteroInferRequest->_inferRequests.size(), StatusCode::OK} {
     _pipeline.clear();
     for (std::size_t requestId = 0; requestId < _heteroInferRequest->_inferRequests.size(); ++requestId) {
index 26096b9..55497b7 100644 (file)
@@ -19,9 +19,9 @@ namespace HeteroPlugin {
 class HeteroAsyncInferRequest : public InferenceEngine::AsyncInferRequestThreadSafeDefault {
 public:
     using Ptr = std::shared_ptr<HeteroAsyncInferRequest>;
-    HeteroAsyncInferRequest(const HeteroInferRequest::Ptr&              request,
-                            const InferenceEngine::ITaskExecutor::Ptr&  taskExecutor,
-                            const InferenceEngine::ITaskExecutor::Ptr&  callbackExecutor);
+    HeteroAsyncInferRequest(const InferenceEngine::InferRequestInternal::Ptr& request,
+                            const InferenceEngine::ITaskExecutor::Ptr&        taskExecutor,
+                            const InferenceEngine::ITaskExecutor::Ptr&        callbackExecutor);
     ~HeteroAsyncInferRequest() override;
     void StartAsync_ThreadUnsafe() override;
     InferenceEngine::StatusCode Wait(int64_t millis_timeout) override;
index ed460b0..1307ee5 100644 (file)
@@ -346,7 +346,7 @@ void HeteroExecutableNetwork::InitNgraph(const InferenceEngine::ICNNNetwork& net
     if (queryNetworkResult.supportedLayersMap.empty()) {
         auto it = _config.find("TARGET_FALLBACK");
         if (it != _config.end()) {
-            _heteroPlugin->QueryNetwork(network_, _config, queryNetworkResult);
+            queryNetworkResult = _heteroPlugin->QueryNetwork(network_, _config);
         } else {
             THROW_IE_EXCEPTION << "The 'TARGET_FALLBACK' option was not defined for heterogeneous plugin";
         }
@@ -934,17 +934,12 @@ InferRequestInternal::Ptr HeteroExecutableNetwork::CreateInferRequestImpl(
                                                 _blobNameMap);
 }
 
-void HeteroExecutableNetwork::CreateInferRequest(IInferRequest::Ptr &asyncRequest) {
-    auto heteroInferRequest = std::dynamic_pointer_cast<HeteroInferRequest>(
-            CreateInferRequestImpl(_networkInputs, _networkOutputs));
-    heteroInferRequest->setPointerToExecutableNetworkInternal(shared_from_this());
-    auto asyncThreadSafeImpl = std::make_shared<HeteroAsyncInferRequest>(heteroInferRequest, _taskExecutor, _callbackExecutor);
-    asyncRequest.reset(new InferRequestBase<HeteroAsyncInferRequest>(asyncThreadSafeImpl),
-                       [](IInferRequest *p) { p->Release(); });
-    asyncThreadSafeImpl->SetPointerToPublicInterface(asyncRequest);
+IInferRequest::Ptr HeteroExecutableNetwork::CreateInferRequest() {
+    return CreateAsyncInferRequestFromSync<HeteroAsyncInferRequest>();
 }
 
-void HeteroExecutableNetwork::GetConfig(const std::string &name, InferenceEngine::Parameter &result, InferenceEngine::ResponseDesc *) const {
+InferenceEngine::Parameter HeteroExecutableNetwork::GetConfig(const std::string &name) const {
+    InferenceEngine::Parameter result;
     if (name == "TARGET_FALLBACK") {
         auto it = _config.find(name);
         if (it != _config.end()) {
@@ -964,14 +959,15 @@ void HeteroExecutableNetwork::GetConfig(const std::string &name, InferenceEngine
             auto param = execNetwork.GetMetric(METRIC_KEY(SUPPORTED_CONFIG_KEYS));
             for (auto && configKey : param.as<std::vector<std::string>>()) {
                 if (configKey == name) {
-                    result = execNetwork.GetConfig(configKey);
-                    return;
+                    return execNetwork.GetConfig(configKey);
                 }
             }
         }
 
         THROW_IE_EXCEPTION << "Unsupported ExecutableNetwork config key: " << name;
     }
+
+    return result;
 }
 
 using Metrics = std::map<std::string, Parameter>;
@@ -1011,7 +1007,7 @@ void collectPluginMetrics(std::vector<std::string> & baseMetrics,
 
 }  // namespace
 
-void HeteroExecutableNetwork::GetMetric(const std::string &name, InferenceEngine::Parameter &result, InferenceEngine::ResponseDesc *) const {
+InferenceEngine::Parameter HeteroExecutableNetwork::GetMetric(const std::string &name) const {
     if (METRIC_KEY(SUPPORTED_METRICS) == name) {
         std::vector<std::string> heteroMetrics = {
             METRIC_KEY(NETWORK_NAME),
@@ -1035,7 +1031,7 @@ void HeteroExecutableNetwork::GetMetric(const std::string &name, InferenceEngine
             collectPluginMetrics(heteroMetrics, pluginMetrics);
         }
 
-        result = IE_SET_METRIC(SUPPORTED_METRICS, heteroMetrics);
+        IE_SET_METRIC_RETURN(SUPPORTED_METRICS, heteroMetrics);
     } else if (METRIC_KEY(SUPPORTED_CONFIG_KEYS) == name) {
         std::vector<std::string> heteroConfigKeys = {
             "TARGET_FALLBACK",
@@ -1058,15 +1054,15 @@ void HeteroExecutableNetwork::GetMetric(const std::string &name, InferenceEngine
             collectPluginMetrics(heteroConfigKeys, pluginConfigKeys);
         }
 
-        result = IE_SET_METRIC(SUPPORTED_CONFIG_KEYS, heteroConfigKeys);
+        IE_SET_METRIC_RETURN(SUPPORTED_CONFIG_KEYS, heteroConfigKeys);
     } else if (METRIC_KEY(NETWORK_NAME) == name) {
-        result = IE_SET_METRIC(NETWORK_NAME, _name);
+        IE_SET_METRIC_RETURN(NETWORK_NAME, _name);
     } else if (METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS) == name) {
         unsigned int value = 0u;
         for (auto&& desc : networks) {
             value = std::max(value, desc._network.GetMetric(METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS)).as<unsigned int>());
         }
-        result = IE_SET_METRIC(OPTIMAL_NUMBER_OF_INFER_REQUESTS, value);
+        IE_SET_METRIC_RETURN(OPTIMAL_NUMBER_OF_INFER_REQUESTS, value);
     } else {
         // find metric key among plugin metrics
         for (auto&& desc : networks) {
@@ -1074,8 +1070,7 @@ void HeteroExecutableNetwork::GetMetric(const std::string &name, InferenceEngine
             auto param = execNetwork.GetMetric(METRIC_KEY(SUPPORTED_METRICS));
             for (auto && metricKey : param.as<std::vector<std::string>>()) {
                 if (metricKey == name) {
-                    result = execNetwork.GetMetric(metricKey);
-                    return;
+                    return execNetwork.GetMetric(metricKey);
                 }
             }
         }
index f8aa8c6..d3024ac 100644 (file)
@@ -53,11 +53,11 @@ public:
     InferenceEngine::InferRequestInternal::Ptr CreateInferRequestImpl(InferenceEngine::InputsDataMap networkInputs,
                                                                       InferenceEngine::OutputsDataMap networkOutputs) override;
 
-    void CreateInferRequest(InferenceEngine::IInferRequest::Ptr &asyncRequest) override;
+    InferenceEngine::IInferRequest::Ptr CreateInferRequest() override;
 
-    void GetConfig(const std::string &name, InferenceEngine::Parameter &result, InferenceEngine::ResponseDesc *resp) const override;
+    InferenceEngine::Parameter GetConfig(const std::string &name) const override;
 
-    void GetMetric(const std::string &name, InferenceEngine::Parameter &result, InferenceEngine::ResponseDesc *resp) const override;
+    InferenceEngine::Parameter GetMetric(const std::string &name) const override;
 
     void ExportImpl(std::ostream& modelFile) override;
 
index a495a0e..f6235f9 100644 (file)
@@ -157,8 +157,7 @@ void HeteroLayerColorer::operator()(const CNNLayerPtr layer,
 }
 
 void Engine::SetAffinity(InferenceEngine::ICNNNetwork &network, const Configs &config) {
-    QueryNetworkResult qr;
-    QueryNetwork(network, config, qr);
+    QueryNetworkResult qr = QueryNetwork(network, config);
 
     details::CNNNetworkIterator i(&network);
     while (i != details::CNNNetworkIterator()) {
@@ -194,7 +193,9 @@ void Engine::SetAffinity(InferenceEngine::ICNNNetwork &network, const Configs &c
     }
 }
 
-void Engine::QueryNetwork(const ICNNNetwork &network, const Configs& config, QueryNetworkResult &qr) const {
+QueryNetworkResult Engine::QueryNetwork(const ICNNNetwork &network, const Configs& config) const {
+    QueryNetworkResult qr;
+
     if (GetCore() == nullptr) {
         THROW_IE_EXCEPTION << "Please, work with HETERO device via InferencEngine::Core object";
     }
@@ -257,6 +258,8 @@ void Engine::QueryNetwork(const ICNNNetwork &network, const Configs& config, Que
 
     // set OK status
     qr.rc = StatusCode::OK;
+
+    return qr;
 }
 
 Parameter Engine::GetMetric(const std::string& name, const std::map<std::string, Parameter> & /*options*/) const {
index bdd8903..7c5d563 100644 (file)
@@ -29,8 +29,8 @@ public:
 
     void SetConfig(const Configs &config) override;
 
-    void QueryNetwork(const InferenceEngine::ICNNNetwork &network,
-                      const Configs& config, InferenceEngine::QueryNetworkResult &res) const override;
+    InferenceEngine::QueryNetworkResult QueryNetwork(const InferenceEngine::ICNNNetwork &network,
+                                                     const Configs& config) const override;
 
     InferenceEngine::Parameter GetMetric(const std::string& name, const std::map<std::string,
                                          InferenceEngine::Parameter> & options) const override;
index 0ac21ea..256d612 100644 (file)
@@ -294,10 +294,8 @@ public:
 
     QueryNetworkResult QueryNetwork(const ICNNNetwork& network, const std::string& deviceName,
                                     const std::map<std::string, std::string>& config) const override {
-        QueryNetworkResult res;
         auto parsed = parseDeviceNameIntoConfig(deviceName, config);
-        GetCPPPluginByName(parsed._deviceName).QueryNetwork(network, parsed._config, res);
-        return res;
+        return GetCPPPluginByName(parsed._deviceName).QueryNetwork(network, parsed._config);
     }
 
     Parameter GetMetric(const std::string& deviceName, const std::string& name) const override {
index 9f216e8..9b3be1f 100644 (file)
@@ -92,10 +92,12 @@ public:
         CALL_STATEMENT(return ExecutableNetwork(actual->ImportNetwork(modelFileName, config), actual));
     }
 
-    void QueryNetwork(const ICNNNetwork& network, const std::map<std::string, std::string>& config,
-                      QueryNetworkResult& res) const {
-        CALL_STATEMENT(actual->QueryNetwork(network, config, res));
+    QueryNetworkResult QueryNetwork(const ICNNNetwork& network,
+                                    const std::map<std::string, std::string>& config) const {
+        QueryNetworkResult res;
+        CALL_STATEMENT(res = actual->QueryNetwork(network, config));
         if (res.rc != OK) THROW_IE_EXCEPTION << res.resp.msg;
+        return res;
     }
 
     ExecutableNetwork ImportNetwork(std::istream& networkModel,
index 36f2c0e..f9d83da 100644 (file)
@@ -170,62 +170,54 @@ void MKLDNNExecNetwork::setProperty(const std::map<std::string, std::string> &pr
     }
 }
 
-void MKLDNNExecNetwork::CreateInferRequest(InferenceEngine::IInferRequest::Ptr &asyncRequest) {
-    auto syncRequestImpl = CreateInferRequestImpl(_networkInputs, _networkOutputs);
-    syncRequestImpl->setPointerToExecutableNetworkInternal(shared_from_this());
-    auto asyncRequestImpl = std::make_shared<MKLDNNAsyncInferRequest>(syncRequestImpl, _taskExecutor, _callbackExecutor);
-    asyncRequest.reset(new InferRequestBase<MKLDNNAsyncInferRequest>(asyncRequestImpl),
-                       [](IInferRequest *p) { p->Release(); });
-
-    asyncRequestImpl->SetPointerToPublicInterface(asyncRequest);
+InferenceEngine::IInferRequest::Ptr MKLDNNExecNetwork::CreateInferRequest() {
+    return CreateAsyncInferRequestFromSync<MKLDNNAsyncInferRequest>();
 }
 
-void MKLDNNExecNetwork::GetExecGraphInfo(InferenceEngine::ICNNNetwork::Ptr &graphPtr) {
+InferenceEngine::CNNNetwork MKLDNNExecNetwork::GetExecGraphInfo() {
     if (_graphs.size() == 0)
         THROW_IE_EXCEPTION << "No graph was found";
 
-    graphPtr = _graphs.begin()->get()->dump();
+    return _graphs.begin()->get()->dump();
 }
 
-void MKLDNNExecNetwork::GetConfig(const std::string &name, Parameter &result, ResponseDesc *resp) const {
+Parameter MKLDNNExecNetwork::GetConfig(const std::string &name) const {
     if (_graphs.size() == 0)
         THROW_IE_EXCEPTION << "No graph was found";
     Config engConfig = _graphs.begin()->get()->getProperty();
-    auto option = engConfig._config.find(name);
-    if (option != engConfig._config.end()) {
-        result = option->second;
+    auto it = engConfig._config.find(name);
+    if (it != engConfig._config.end()) {
+        return it->second;
     } else {
         THROW_IE_EXCEPTION << "Unsupported ExecutableNetwork config key: " << name;
     }
 }
 
-void MKLDNNExecNetwork::GetMetric(const std::string &name, Parameter &result, ResponseDesc *resp) const {
+InferenceEngine::Parameter MKLDNNExecNetwork::GetMetric(const std::string &name) const {
     if (_graphs.size() == 0)
         THROW_IE_EXCEPTION << "No graph was found";
 
     if (name == METRIC_KEY(NETWORK_NAME)) {
-        if (_graphs.begin()->get()->dump() == nullptr)
-            THROW_IE_EXCEPTION << "Invalid graph dump";
-        result = IE_SET_METRIC(NETWORK_NAME, _graphs.begin()->get()->dump()->getName());
+        IE_SET_METRIC_RETURN(NETWORK_NAME, _graphs.begin()->get()->GetName());
     } else if (name == METRIC_KEY(SUPPORTED_METRICS)) {
         std::vector<std::string> metrics;
         metrics.push_back(METRIC_KEY(NETWORK_NAME));
         metrics.push_back(METRIC_KEY(SUPPORTED_METRICS));
         metrics.push_back(METRIC_KEY(SUPPORTED_CONFIG_KEYS));
         metrics.push_back(METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS));
-        result = IE_SET_METRIC(SUPPORTED_METRICS, metrics);
+        IE_SET_METRIC_RETURN(SUPPORTED_METRICS, metrics);
     } else if (name == METRIC_KEY(SUPPORTED_CONFIG_KEYS)) {
         std::vector<std::string> configKeys;
         for (auto && key : _graphs.begin()->get()->getProperty()._config) {
             configKeys.push_back(key.first);
         }
-        result = IE_SET_METRIC(SUPPORTED_CONFIG_KEYS, configKeys);
+        IE_SET_METRIC_RETURN(SUPPORTED_CONFIG_KEYS, configKeys);
     } else if (name == METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS)) {
         Config engConfig = _graphs.begin()->get()->getProperty();
         auto option = engConfig._config.find(CONFIG_KEY(CPU_THROUGHPUT_STREAMS));
         IE_ASSERT(option != engConfig._config.end());
         auto streams = std::stoi(option->second);
-        result = IE_SET_METRIC(OPTIMAL_NUMBER_OF_INFER_REQUESTS, static_cast<unsigned int>(
+        IE_SET_METRIC_RETURN(OPTIMAL_NUMBER_OF_INFER_REQUESTS, static_cast<unsigned int>(
             streams ? streams : 1));
     } else {
         THROW_IE_EXCEPTION << "Unsupported ExecutableNetwork metric: " << name;
index 1a0ddfd..8ea85bb 100644 (file)
@@ -27,7 +27,7 @@ public:
     CreateInferRequestImpl(InferenceEngine::InputsDataMap networkInputs,
               InferenceEngine::OutputsDataMap networkOutputs) override;
 
-    void CreateInferRequest(InferenceEngine::IInferRequest::Ptr &asyncRequest) override;
+    InferenceEngine::IInferRequest::Ptr CreateInferRequest() override;
 
     MKLDNNExecNetwork(const InferenceEngine::ICNNNetwork &network, const Config &cfg,
                       const MKLDNNExtensionManager::Ptr &extMgr, NumaNodesWeights &weightsSharing);
@@ -36,11 +36,11 @@ public:
 
     void setProperty(const std::map<std::string, std::string> &properties);
 
-    void GetConfig(const std::string &name, InferenceEngine::Parameter &result, InferenceEngine::ResponseDesc *resp) const override;
+    InferenceEngine::Parameter GetConfig(const std::string &name) const override;
 
-    void GetMetric(const std::string &name, InferenceEngine::Parameter &result, InferenceEngine::ResponseDesc *resp) const override;
+    InferenceEngine::Parameter GetMetric(const std::string &name) const override;
 
-    void GetExecGraphInfo(InferenceEngine::ICNNNetwork::Ptr &graphPtr) override;
+    InferenceEngine::CNNNetwork GetExecGraphInfo() override;
 
     std::vector<InferenceEngine::IMemoryStateInternal::Ptr> QueryState() override;
 
index 98ea729..ceba7da 100644 (file)
@@ -1196,6 +1196,6 @@ void MKLDNNGraph::do_after(const std::string &dir, const MKLDNNNodePtr &node) {
     }
 }
 
-InferenceEngine::ICNNNetwork::Ptr MKLDNNGraph::dump() const {
+InferenceEngine::CNNNetwork MKLDNNGraph::dump() const {
     return dump_graph_as_ie_ngraph_net(*this);
 }
index 4cacba4..a758145 100644 (file)
@@ -5,7 +5,7 @@
 #pragma once
 
 #include "ie_parallel.hpp"
-#include "ie_icnn_network.hpp"
+#include "cpp/ie_cnn_network.h"
 #include "config.h"
 #include "mkldnn_memory.h"
 #include "mean_image.h"
@@ -64,6 +64,10 @@ public:
         return graphNodes;
     }
 
+    std::string GetName() {
+        return _name;
+    }
+
     std::vector<MKLDNNEdgePtr>& GetEdges() {
         return graphEdges;
     }
@@ -83,7 +87,7 @@ public:
     void DropNode(const MKLDNNNodePtr& node);
     void DropDWConvNode(const MKLDNNNodePtr& node);
 
-    InferenceEngine::ICNNNetwork::Ptr dump() const;
+    InferenceEngine::CNNNetwork dump() const;
 
     template<typename NET>
     static void ApplyUnrollPasses(NET &net);
@@ -141,8 +145,8 @@ protected:
 
     friend class MKLDNNInferRequest;
     friend class MKLDNNGraphlessInferRequest;
-    friend std::shared_ptr<InferenceEngine::ICNNNetwork> dump_graph_as_ie_net(const MKLDNNGraph &graph);
-    friend std::shared_ptr<InferenceEngine::ICNNNetwork> dump_graph_as_ie_ngraph_net(const MKLDNNGraph &graph);
+    friend InferenceEngine::CNNNetwork dump_graph_as_ie_net(const MKLDNNGraph &graph);
+    friend InferenceEngine::CNNNetwork dump_graph_as_ie_ngraph_net(const MKLDNNGraph &graph);
 
 private:
     void dumpToDotFile(std::string file) const;
index 75885fc..b481aaf 100644 (file)
@@ -41,7 +41,7 @@ CNNLayer::Ptr create_cnnlayer(const MKLDNNNodePtr &node) {
     return layer;
 }
 
-std::shared_ptr<ICNNNetwork> dump_graph_as_ie_ngraph_net(const MKLDNNGraph &graph) {
+InferenceEngine::CNNNetwork dump_graph_as_ie_ngraph_net(const MKLDNNGraph &graph) {
     std::map<MKLDNNNodePtr, std::shared_ptr<ngraph::Node> > node2layer;
 
     ngraph::ResultVector results;
@@ -142,7 +142,7 @@ std::shared_ptr<ICNNNetwork> dump_graph_as_ie_ngraph_net(const MKLDNNGraph &grap
     return net;
 }
 
-std::shared_ptr<ICNNNetwork> dump_graph_as_ie_net(const MKLDNNGraph &graph) {
+InferenceEngine::CNNNetwork dump_graph_as_ie_net(const MKLDNNGraph &graph) {
     auto net = std::make_shared<details::CNNNetworkImpl>();
 
     net->setName(graph._name);
@@ -191,14 +191,12 @@ std::shared_ptr<ICNNNetwork> dump_graph_as_ie_net(const MKLDNNGraph &graph) {
         net->setInputInfo(in_info);
     }
 
-    return net;
+    return InferenceEngine::CNNNetwork{net};
 }
 
 void dump_graph_as_dot(const MKLDNNGraph &graph, std::ostream &out) {
-    auto dump_net = dump_graph_as_ie_net(graph);
-    if (dump_net == nullptr)
-        THROW_IE_EXCEPTION << "Nullable net dump";
-    InferenceEngine::saveGraphToDot(*dump_net, out, drawer_callback);
+    InferenceEngine::CNNNetwork dump_net = dump_graph_as_ie_net(graph);
+    InferenceEngine::saveGraphToDot(dump_net, out, drawer_callback);
 }
 
 //**********************************
index f0901a4..91b1908 100644 (file)
@@ -4,7 +4,7 @@
 
 #pragma once
 
-#include "ie_icnn_network.hpp"
+#include "cpp/ie_cnn_network.h"
 #include "mkldnn_graph.h"
 
 #include <memory>
@@ -13,7 +13,7 @@ namespace MKLDNNPlugin {
 
 void dump_graph_as_dot(const MKLDNNGraph &graph, std::ostream &out);
 
-std::shared_ptr<InferenceEngine::ICNNNetwork> dump_graph_as_ie_net(const MKLDNNGraph &graph);
-std::shared_ptr<InferenceEngine::ICNNNetwork> dump_graph_as_ie_ngraph_net(const MKLDNNGraph &graph);
+InferenceEngine::CNNNetwork dump_graph_as_ie_net(const MKLDNNGraph &graph);
+InferenceEngine::CNNNetwork dump_graph_as_ie_ngraph_net(const MKLDNNGraph &graph);
 
 }  // namespace MKLDNNPlugin
index 25a2140..86b246e 100644 (file)
@@ -300,7 +300,8 @@ void Engine::AddExtension(InferenceEngine::IExtensionPtr extension) {
     extensionManager->AddExtension(extension);
 }
 
-void Engine::QueryNetwork(const ICNNNetwork& network, const std::map<std::string, std::string>& config, QueryNetworkResult& res) const {
+QueryNetworkResult Engine::QueryNetwork(const ICNNNetwork& network, const std::map<std::string, std::string>& config) const {
+    QueryNetworkResult res;
     MKLDNNWeightsSharing::Ptr fake_w_cache;
     auto function = network.getFunction();
     if (function != nullptr) {
@@ -368,6 +369,8 @@ void Engine::QueryNetwork(const ICNNNetwork& network, const std::map<std::string
             i++;
         }
     }
+
+    return res;
 }
 
 static const Version version = {{2, 1}, CI_BUILD_NUMBER, "MKLDNNPlugin"};
index 8c8ba68..07f1c0e 100644 (file)
@@ -33,8 +33,8 @@ public:
 
     InferenceEngine::Parameter GetMetric(const std::string& name, const std::map<std::string, InferenceEngine::Parameter>& options) const override;
 
-    void QueryNetwork(const InferenceEngine::ICNNNetwork& network,
-                      const std::map<std::string, std::string>& config, InferenceEngine::QueryNetworkResult& res) const override;
+    InferenceEngine::QueryNetworkResult QueryNetwork(const InferenceEngine::ICNNNetwork& network,
+                                                     const std::map<std::string, std::string>& config) const override;
 
 private:
     Config engConfig;
index 98bffd6..64ac092 100644 (file)
@@ -237,7 +237,8 @@ InferenceEngine::InferRequestInternal::Ptr MultiDeviceExecutableNetwork::CreateI
     return std::make_shared<MultiDeviceInferRequest>(networkInputs, networkOutputs);
 }
 
-void MultiDeviceExecutableNetwork::CreateInferRequest(IInferRequest::Ptr& asyncRequest) {
+IInferRequest::Ptr MultiDeviceExecutableNetwork::CreateInferRequest() {
+    IInferRequest::Ptr asyncRequest;
     auto syncRequestImpl = CreateInferRequestImpl(_networkInputs, _networkOutputs);
     syncRequestImpl->setPointerToExecutableNetworkInternal(shared_from_this());
     auto asyncTreadSafeImpl = std::make_shared<MultiDeviceAsyncInferRequest>(std::static_pointer_cast<MultiDeviceInferRequest>(syncRequestImpl),
@@ -246,10 +247,10 @@ void MultiDeviceExecutableNetwork::CreateInferRequest(IInferRequest::Ptr& asyncR
                                                                              _callbackExecutor);
     asyncRequest.reset(new InferRequestBase<MultiDeviceAsyncInferRequest>(asyncTreadSafeImpl), [](IInferRequest *p) { p->Release(); });
     asyncTreadSafeImpl->SetPointerToPublicInterface(asyncRequest);
+    return asyncRequest;
 }
 
-void MultiDeviceExecutableNetwork::SetConfig(const std::map<std::string, InferenceEngine::Parameter> &config,
-        InferenceEngine::ResponseDesc * /* resp */) {
+void MultiDeviceExecutableNetwork::SetConfig(const std::map<std::string, InferenceEngine::Parameter> &config) {
     auto priorities = config.find(MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES);
     if (priorities == config.end() || config.size() > 1) {
         THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str <<
@@ -284,17 +285,16 @@ void MultiDeviceExecutableNetwork::SetConfig(const std::map<std::string, Inferen
     }
 }
 
-void MultiDeviceExecutableNetwork::GetConfig(const std::string &name, InferenceEngine::Parameter &result,
-        InferenceEngine::ResponseDesc * /* resp */) const {
-    auto res = _config.find(name);
-    if (res != _config.end()) {
-        result =  res->second;
+InferenceEngine::Parameter MultiDeviceExecutableNetwork::GetConfig(const std::string &name) const {
+    auto it = _config.find(name);
+    if (it != _config.end()) {
+        return it->second;
     } else {
         THROW_IE_EXCEPTION << NOT_FOUND_str << name <<" not found in the ExecutableNetwork config";
     }
 }
 
-void MultiDeviceExecutableNetwork::GetMetric(const std::string &name, Parameter &result, ResponseDesc *resp) const {
+InferenceEngine::Parameter MultiDeviceExecutableNetwork::GetMetric(const std::string &name) const {
     if (name == METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS)) {
         unsigned int res = 0u;
         for (auto n : _networksPerDevice) {
@@ -307,14 +307,14 @@ void MultiDeviceExecutableNetwork::GetMetric(const std::string &name, Parameter
                         << "Failed to query the metric for the " << n.first << " with error:" << iie.what();
            }
         }
-        result = IE_SET_METRIC(OPTIMAL_NUMBER_OF_INFER_REQUESTS, res);
+        IE_SET_METRIC_RETURN(OPTIMAL_NUMBER_OF_INFER_REQUESTS, res);
     } else if (name == METRIC_KEY(NETWORK_NAME)) {
         auto it = _networksPerDevice.begin();
         IE_ASSERT(it != _networksPerDevice.end());
-        result = IE_SET_METRIC(NETWORK_NAME, it->second.GetMetric(
+        IE_SET_METRIC_RETURN(NETWORK_NAME, it->second.GetMetric(
             METRIC_KEY(NETWORK_NAME)).as<std::string>());
     } else if (name == METRIC_KEY(SUPPORTED_METRICS)) {
-        result = IE_SET_METRIC(SUPPORTED_METRICS, {
+        IE_SET_METRIC_RETURN(SUPPORTED_METRICS, {
             METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS),
             METRIC_KEY(SUPPORTED_METRICS),
             METRIC_KEY(NETWORK_NAME),
@@ -322,7 +322,7 @@ void MultiDeviceExecutableNetwork::GetMetric(const std::string &name, Parameter
         });
     } else if (name == METRIC_KEY(SUPPORTED_CONFIG_KEYS)) {
         std::vector<std::string> configKeys = { MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES };
-        result = IE_SET_METRIC(SUPPORTED_CONFIG_KEYS, configKeys);
+        IE_SET_METRIC_RETURN(SUPPORTED_CONFIG_KEYS, configKeys);
     } else {
         THROW_IE_EXCEPTION << "Unsupported Network metric: " << name;
     }
@@ -500,9 +500,10 @@ ExecutableNetworkInternal::Ptr MultiDeviceInferencePlugin::LoadExeNetworkImpl(co
                                                           enablePerfCounters);
 }
 
-void MultiDeviceInferencePlugin::QueryNetwork(const ICNNNetwork&                        network,
-                                              const std::map<std::string, std::string>& config,
-                                              QueryNetworkResult&                       queryResult) const {
+QueryNetworkResult MultiDeviceInferencePlugin::QueryNetwork(const ICNNNetwork&                        network,
+                                                            const std::map<std::string, std::string>& config) const {
+    QueryNetworkResult queryResult;
+
     if (GetCore() == nullptr) {
         THROW_IE_EXCEPTION << "Please, work with MULTI device via InferencEngine::Core object";
     }
@@ -563,5 +564,7 @@ void MultiDeviceInferencePlugin::QueryNetwork(const ICNNNetwork&
     for (auto&& supportedLayer : supportedLayers) {
         queryResult.supportedLayersMap[supportedLayer] = GetName();
     }
+
+    return queryResult;
 }
 }  // namespace MultiDevicePlugin
index 964d922..51e25e5 100644 (file)
@@ -104,11 +104,11 @@ public:
                                           const std::unordered_map<std::string, InferenceEngine::Parameter>&    config,
                                           const bool                                                            needPerfCounters = false);
 
-    void SetConfig(const std::map<std::string, InferenceEngine::Parameter> &config, InferenceEngine::ResponseDesc *resp) override;
-    void GetConfig(const std::string &name, InferenceEngine::Parameter &result, InferenceEngine::ResponseDesc *resp) const override;
-    void GetMetric(const std::string &name, InferenceEngine::Parameter &result, InferenceEngine::ResponseDesc *resp) const override;
+    void SetConfig(const std::map<std::string, InferenceEngine::Parameter> &config) override;
+    InferenceEngine::Parameter GetConfig(const std::string &name) const override;
+    InferenceEngine::Parameter GetMetric(const std::string &name) const override;
     void run(Task inferTask) override;
-    void CreateInferRequest(InferenceEngine::IInferRequest::Ptr& asyncRequest) override;
+    InferenceEngine::IInferRequest::Ptr CreateInferRequest() override;
     InferenceEngine::InferRequestInternal::Ptr CreateInferRequestImpl(InferenceEngine::InputsDataMap networkInputs,
                                                                       InferenceEngine::OutputsDataMap networkOutputs) override;
     ~MultiDeviceExecutableNetwork() override;
@@ -158,9 +158,8 @@ public:
     void SetConfig(const std::map<std::string, std::string>& config) override;
     Parameter GetConfig(const std::string& name,
                         const std::map<std::string, Parameter> & options) const override;
-    void QueryNetwork(const InferenceEngine::ICNNNetwork&       network,
-                      const std::map<std::string, std::string>& config,
-                      InferenceEngine::QueryNetworkResult&      res) const override;
+    InferenceEngine::QueryNetworkResult QueryNetwork(const InferenceEngine::ICNNNetwork&       network,
+                                                     const std::map<std::string, std::string>& config) const override;
     InferenceEngine::Parameter GetMetric(const std::string& name,
                                          const std::map<std::string, InferenceEngine::Parameter>& options) const override;
 
index c510486..fd86780 100644 (file)
@@ -51,7 +51,7 @@ public:
     }
 
     StatusCode CreateInferRequest(IInferRequest::Ptr& req, ResponseDesc* resp) noexcept override {
-        TO_STATUS(_impl->CreateInferRequest(req));
+        TO_STATUS(req = _impl->CreateInferRequest());
     }
 
     StatusCode Export(const std::string& modelFileName, ResponseDesc* resp) noexcept override {
@@ -63,7 +63,7 @@ public:
     }
 
     StatusCode GetExecGraphInfo(ICNNNetwork::Ptr& graphPtr, ResponseDesc* resp) noexcept override {
-        TO_STATUS(_impl->GetExecGraphInfo(graphPtr));
+        TO_STATUS(graphPtr = _impl->GetExecGraphInfo());
     }
 
     StatusCode QueryState(IMemoryState::Ptr& pState, size_t idx, ResponseDesc* resp) noexcept override {
@@ -91,19 +91,19 @@ public:
     }
 
     StatusCode SetConfig(const std::map<std::string, Parameter>& config, ResponseDesc* resp) noexcept override {
-        TO_STATUS(_impl->SetConfig(config, resp));
+        TO_STATUS(_impl->SetConfig(config));
     }
 
     StatusCode GetConfig(const std::string& name, Parameter& result, ResponseDesc* resp) const noexcept override {
-        TO_STATUS(_impl->GetConfig(name, result, resp));
+        TO_STATUS(result = _impl->GetConfig(name));
     }
 
     StatusCode GetMetric(const std::string& name, Parameter& result, ResponseDesc* resp) const noexcept override {
-        TO_STATUS(_impl->GetMetric(name, result, resp));
+        TO_STATUS(result = _impl->GetMetric(name));
     }
 
     StatusCode GetContext(RemoteContext::Ptr& pContext, ResponseDesc* resp) const noexcept override {
-        TO_STATUS(_impl->GetContext(pContext, resp));
+        TO_STATUS(pContext = _impl->GetContext());
     }
 
 private:
index e98cc5a..66b1260 100644 (file)
@@ -33,27 +33,6 @@ namespace InferenceEngine {
     }
 
 /**
- * @def TO_STATUSVAR(x, statusVar, descBufferVar)
- * @brief Converts C++ exceptioned function call to a status variable
- * @ingroup ie_dev_api_error_debug
- */
-#define TO_STATUSVAR(x, statusVar, descBufferVar)                                                                      \
-    do {                                                                                                               \
-        try {                                                                                                          \
-            x;                                                                                                         \
-            statusVar = OK;                                                                                            \
-        } catch (const InferenceEngine::details::InferenceEngineException& iex) {                                      \
-            statusVar =                                                                                                \
-                InferenceEngine::DescriptionBuffer((iex.hasStatus() ? iex.getStatus() : GENERAL_ERROR), descBufferVar) \
-                << iex.what();                                                                                         \
-        } catch (const std::exception& ex) {                                                                           \
-            statusVar = InferenceEngine::DescriptionBuffer(GENERAL_ERROR, descBufferVar) << ex.what();                 \
-        } catch (...) {                                                                                                \
-            statusVar = InferenceEngine::DescriptionBuffer(UNEXPECTED);                                                \
-        }                                                                                                              \
-    } while (false)
-
-/**
  * @def TO_STATUS_NO_RESP(x)
  * @brief Converts C++ exceptioned function call into a status code. Does not work with a ResponseDesc object
  * @ingroup ie_dev_api_error_debug
index f432b63..41f5d16 100644 (file)
@@ -75,8 +75,7 @@ public:
         networkModel << strm.rdbuf();
     }
 
-    void GetExecGraphInfo(ICNNNetwork::Ptr& graphPtr) override {
-        (void)graphPtr;
+    CNNNetwork GetExecGraphInfo() override {
         THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str;
     }
 
@@ -90,11 +89,10 @@ public:
     }
 
     std::vector<IMemoryStateInternal::Ptr> QueryState() override {
-        // meaning base plugin reports as no state available - plugin owners need to create proper override of this
-        return {};
+        THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str;
     }
 
-    void SetConfig(const std::map<std::string, Parameter>& config, ResponseDesc* /* resp */) override {
+    void SetConfig(const std::map<std::string, Parameter>& config) override {
         if (config.empty()) {
             THROW_IE_EXCEPTION << "The list of configuration values is empty";
         }
@@ -102,15 +100,17 @@ public:
                            << config.begin()->first;
     }
 
-    void GetConfig(const std::string& /* name */, Parameter& /* result */, ResponseDesc* /* resp */) const override {
+    Parameter GetConfig(const std::string& name) const override {
+        (void)name;
         THROW_IE_EXCEPTION << "GetConfig for executable network is not supported by this device";
     }
 
-    void GetMetric(const std::string& /* name */, Parameter& /* result */, ResponseDesc* /* resp */) const override {
+    Parameter GetMetric(const std::string& name) const override {
+        (void)name;
         THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str;
     }
 
-    void GetContext(RemoteContext::Ptr& /* pContext */, ResponseDesc* /* resp */) const override {
+    RemoteContext::Ptr GetContext() const override {
         THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str;
     }
 
index 4eb49c6..3bd2baa 100644 (file)
@@ -32,15 +32,18 @@ public:
 
     /**
      * @brief      Creates an asynchronous inference request public implementation.
-     * @param      asyncRequest  The asynchronous request public implementation
+     * @return     The asynchronous request public implementation
      */
-    void CreateInferRequest(IInferRequest::Ptr& asyncRequest) override {
+    IInferRequest::Ptr CreateInferRequest() override {
+        IInferRequest::Ptr asyncRequest;
         auto asyncRequestImpl = this->CreateAsyncInferRequestImpl(_networkInputs, _networkOutputs);
         asyncRequestImpl->setPointerToExecutableNetworkInternal(shared_from_this());
+
         asyncRequest.reset(new InferRequestBase<AsyncInferRequestInternal>(asyncRequestImpl), [](IInferRequest* p) {
             p->Release();
         });
-        asyncRequestImpl->SetPublicInterfacePtr(asyncRequest);
+        asyncRequestImpl->SetPointerToPublicInterface(asyncRequest);
+        return asyncRequest;
     }
 
 protected:
index dc34b4e..a78f43c 100644 (file)
@@ -46,33 +46,33 @@ public:
     }
 
     /**
-     * @brief Given optional implementation of creating asynchnous inference request to avoid
+     * @brief Given optional implementation of creating asynchronous inference request to avoid
      * need for it to be implemented by plugin
-     * @param asyncRequest shared_ptr for the created asynchnous inference request
+     * @return shared_ptr for the created asynchronous inference request
      */
-    void CreateInferRequest(IInferRequest::Ptr& asyncRequest) override {
+    IInferRequest::Ptr CreateInferRequest() override {
+        return CreateAsyncInferRequestFromSync();
+    }
+
+protected:
+    template <typename AsyncInferRequestType = AsyncInferRequestThreadSafeDefault>
+    IInferRequest::Ptr CreateAsyncInferRequestFromSync() {
+        IInferRequest::Ptr asyncRequest;
+
         auto syncRequestImpl = this->CreateInferRequestImpl(_networkInputs, _networkOutputs);
         syncRequestImpl->setPointerToExecutableNetworkInternal(shared_from_this());
-        auto asyncTreadSafeImpl =
-            std::make_shared<AsyncInferRequestThreadSafeDefault>(syncRequestImpl, _taskExecutor, _callbackExecutor);
-        asyncRequest.reset(new InferRequestBase<AsyncInferRequestThreadSafeDefault>(asyncTreadSafeImpl),
-                           [](IInferRequest* p) {
-                               p->Release();
-                           });
-        asyncTreadSafeImpl->SetPointerToPublicInterface(asyncRequest);
-    }
 
-    /**
-     * @brief Gets the executor.
-     * @return The executor.
-     */
-    ITaskExecutor::Ptr& GetExecutor() {
-        return _taskExecutor;
+        auto asyncThreadSafeImpl = std::make_shared<AsyncInferRequestType>(
+            syncRequestImpl, _taskExecutor, _callbackExecutor);
+        asyncRequest.reset(new InferRequestBase<AsyncInferRequestType>(asyncThreadSafeImpl),
+            [](IInferRequest *p) { p->Release(); });
+        asyncThreadSafeImpl->SetPointerToPublicInterface(asyncRequest);
+
+        return asyncRequest;
     }
 
-protected:
     /**
-     * @brief Create a synchronous inference request object used to infer the network
+     * @brief Creates a synchronous inference request object used to infer the network
      * @note Used by ExecutableNetworkThreadSafeDefault::CreateInferRequest as a plugin-specific implementation
      * @param networkInputs An input info map needed to create input blobs
      * @param networkOutputs An output data map needed to create output blobs
index 8338576..b32d305 100644 (file)
@@ -50,7 +50,7 @@ public:
      * IInferRequest::CompletionCallback
      * @param ptr A weak pointer to InferRequestBase
      */
-    void SetPublicInterfacePtr(IInferRequest::Ptr ptr) {
+    void SetPointerToPublicInterface(IInferRequest::Ptr ptr) {
         _publicInterface = ptr;
     }
 
index a5cd16f..7fe1c30 100644 (file)
@@ -197,6 +197,11 @@ public:
         }
     }
 
+    void SetBatch(int batch) override {
+        (void)batch;
+        THROW_IE_EXCEPTION << "Dynamic batch is not supported";
+    };
+
     /**
      * @brief      Sets the pointer to executable network internal.
      * @note       Needed to correctly handle ownership between objects.
@@ -218,10 +223,19 @@ public:
         }
     }
 
-    void SetBatch(int batch) override {
-        (void)batch;
-        THROW_IE_EXCEPTION << "Dynamic batch is not supported";
-    };
+protected:
+    InferenceEngine::InputsDataMap _networkInputs;  //!< Holds information about network inputs info
+    InferenceEngine::OutputsDataMap _networkOutputs;  //!< Holds information about network outputs data
+    InferenceEngine::BlobMap _inputs;  //!< A map of network input blobs
+    InferenceEngine::BlobMap _outputs;  //!< A map of network output blobs
+    std::map<std::string, PreProcessDataPtr> _preProcData;  //!< A map of pre-process data per input
+    int m_curBatch;  //!< Current batch value used in dynamic batching
+
+    /**
+     * @brief A shared pointer to ExecutableNetworkInternal interface
+     * @note Needed to correctly handle ownership between objects.
+     */
+    std::shared_ptr<ExecutableNetworkInternal> _exeNetwork;
 
     /**
      * @brief Checks and executes input data pre-processing if needed.
@@ -240,20 +254,6 @@ public:
         }
     }
 
-protected:
-    InferenceEngine::InputsDataMap _networkInputs;  //!< Holds information about network inputs info
-    InferenceEngine::OutputsDataMap _networkOutputs;  //!< Holds information about network outputs data
-    InferenceEngine::BlobMap _inputs;  //!< A map of network input blobs
-    InferenceEngine::BlobMap _outputs;  //!< A map of network output blobs
-    std::map<std::string, PreProcessDataPtr> _preProcData;  //!< A map of pre-process data per input
-    int m_curBatch;  //!< Current batch value used in dynamic batching
-
-    /**
-     * @brief A shared pointer to ExecutableNetworkInternal interface
-     * @note Needed to correctly handle ownership between objects.
-     */
-    std::shared_ptr<ExecutableNetworkInternal> _exeNetwork;
-
     /**
      * @brief Helper function to find input or output blob by name
      * @param name A name of input or output blob.
index d1654cb..14ebb91 100644 (file)
@@ -58,16 +58,34 @@ protected:
 public:
     ExecutableNetwork LoadNetwork(const ICNNNetwork& network,
                                   const std::map<std::string, std::string>& config) override {
-        return LoadNetworkImplPrivate(network, config);
+        return LoadNetwork(network, config, nullptr);
     }
 
     ExecutableNetwork LoadNetwork(const ICNNNetwork& network, const std::map<std::string, std::string>& config,
                                   RemoteContext::Ptr context) override {
-        return LoadNetworkImplPrivate(network, config, context);;
+        InputsDataMap networkInputs, networkInputsCloned;
+        OutputsDataMap networkOutputs, networkOutputsCloned;
+        network.getInputsInfo(networkInputs);
+        network.getOutputsInfo(networkOutputs);
+        copyInputOutputInfo(networkInputs, networkOutputs, networkInputsCloned, networkOutputsCloned);
+
+        ExecutableNetworkInternal::Ptr impl;
+        if (nullptr == context) {
+            impl = LoadExeNetworkImpl(network, config);
+        } else {
+            impl = LoadExeNetworkImpl(network, context, config);
+        }
+
+        impl->setNetworkInputs(networkInputsCloned);
+        impl->setNetworkOutputs(networkOutputsCloned);
+        impl->SetPointerToPlugin(shared_from_this());
+
+        auto executableNetwork = make_executable_network(impl);
+        return ExecutableNetwork(executableNetwork);
     }
 
-    IExecutableNetwork::Ptr ImportNetwork(const std::string& modelFileName,
-                                          const std::map<std::string, std::string>& config) override {
+    ExecutableNetwork ImportNetwork(const std::string& modelFileName,
+                                    const std::map<std::string, std::string>& config) override {
         (void)modelFileName;
         (void)config;
         THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str;
@@ -104,8 +122,7 @@ public:
         THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str;
     }
 
-    void QueryNetwork(const ICNNNetwork& /*network*/, const std::map<std::string, std::string>& /*config*/,
-                      QueryNetworkResult& /*res*/) const override {
+    QueryNetworkResult QueryNetwork(const ICNNNetwork& /*network*/, const std::map<std::string, std::string>& /*config*/) const override {
         THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str;
     }
 
@@ -135,39 +152,6 @@ public:
         THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str;
     }
 
-private:
-    /**
-     * @brief A helper method which clones a ICNNNetwork object, keeps InputsDataMap and OutputsDataMap data maps,
-     * and creates an IExecutableNetwork object
-     * @param network An input ICNNNetwork object used to create an executable network object
-     * @param config A map of string -> string configuration options.
-     * @param context An optional pointer to RemoteContext
-     * @return An output executable network object
-     */
-    ExecutableNetwork LoadNetworkImplPrivate(const ICNNNetwork& network,
-                                             const std::map<std::string, std::string>& config,
-                                             RemoteContext::Ptr context = nullptr) {
-        InputsDataMap networkInputs, networkInputsCloned;
-        OutputsDataMap networkOutputs, networkOutputsCloned;
-        network.getInputsInfo(networkInputs);
-        network.getOutputsInfo(networkOutputs);
-        copyInputOutputInfo(networkInputs, networkOutputs, networkInputsCloned, networkOutputsCloned);
-
-        ExecutableNetworkInternal::Ptr impl;
-        if (nullptr == context) {
-            impl = LoadExeNetworkImpl(network, config);
-        } else {
-            impl = LoadExeNetworkImpl(network, context, config);
-        }
-
-        impl->setNetworkInputs(networkInputsCloned);
-        impl->setNetworkOutputs(networkOutputsCloned);
-        impl->SetPointerToPlugin(shared_from_this());
-
-        auto executableNetwork = make_executable_network(impl);
-        return ExecutableNetwork(executableNetwork);
-    }
-
 protected:
     /**
      * @brief Creates an executable network from a parsed network object, users can create as many networks as they need
index 37f89da..9efdb66 100644 (file)
@@ -51,11 +51,12 @@ public:
     /**
      * @brief Create an inference request object used to infer the network
      *  Note: the returned request will have allocated input and output blobs (that can be changed later)
-     * @param req - shared_ptr for the created request
+     * @return shared_ptr for the created request
      */
-    virtual void CreateInferRequest(IInferRequest::Ptr& req) = 0;
+    virtual IInferRequest::Ptr CreateInferRequest() = 0;
 
     /**
+     * @deprecated Use IExecutableNetworkInternal::Export(std::ostream& networkModel)
      * @brief Export the current created executable network so it can be used later in the Import() main API
      * @param modelFileName - path to the location of the exported file
      */
@@ -69,11 +70,12 @@ public:
 
     /**
      * @brief Get executable graph information from a device
-     * @param graphPtr network ptr to store executable graph information
+     * @return A network object to store executable graph information
      */
-    virtual void GetExecGraphInfo(ICNNNetwork::Ptr& graphPtr) = 0;
+    virtual CNNNetwork GetExecGraphInfo() = 0;
 
     /**
+     * @deprecated Need to implement GetVariablesInfo for ExecutableNetwork
      * @brief Queries memory states.
      * @return Returns memory states
      */
@@ -82,33 +84,28 @@ public:
     /**
      * @brief Sets configuration for current executable network
      * @param config Map of pairs: (config parameter name, config parameter value)
-     * @param resp Pointer to the response message that holds a description of an error if any occurred
      */
-    virtual void SetConfig(const std::map<std::string, Parameter>& config, ResponseDesc* resp) = 0;
+    virtual void SetConfig(const std::map<std::string, Parameter>& config) = 0;
 
     /**
      * @brief Gets configuration dedicated to plugin behaviour
-     * @param name - config key, can be found in ie_plugin_config.hpp
-     * @param result - value of config corresponding to config key
-     * @param resp Pointer to the response message that holds a description of an error if any occurred
+     * @param name A config key, can be found in ie_plugin_config.hpp
+     * @return A value of config corresponding to config key
      */
-    virtual void GetConfig(const std::string& name, Parameter& result, ResponseDesc* resp) const = 0;
+    virtual Parameter GetConfig(const std::string& name) const = 0;
 
     /**
      * @brief Gets general runtime metric for dedicated hardware
-     * @param name  - metric name to request
-     * @param result - metric value corresponding to metric key
-     * @param resp - Pointer to the response message that holds a description of an error if any
-     *             occurred
+     * @param name  A metric name to request
+     * @return A metric value corresponding to metric key
      */
-    virtual void GetMetric(const std::string& name, Parameter& result, ResponseDesc* resp) const = 0;
+    virtual Parameter GetMetric(const std::string& name) const = 0;
 
     /**
      * @brief Gets the remote context.
-     * @param pContext  A reference to a context
-     * @param resp A response
+     * @return A reference to a context
      */
-    virtual void GetContext(RemoteContext::Ptr& pContext, ResponseDesc* resp) const = 0;
+    virtual RemoteContext::Ptr GetContext() const = 0;
 };
 
 }  // namespace InferenceEngine
index 530c81b..67fac3d 100644 (file)
@@ -219,13 +219,14 @@ public:
     virtual RemoteContext::Ptr GetDefaultContext() = 0;
 
     /**
+     * @deprecated Use ImportNetwork(std::istream& networkModel, const std::map<std::string, std::string>& config)
      * @brief Creates an executable network from an previously exported network
      * @param modelFileName - path to the location of the exported file
      * @param config A string -> string map of parameters
-     * @return A reference to a shared ptr of the returned network interface
+     * @return An Executable network
      */
-    virtual IExecutableNetwork::Ptr ImportNetwork(const std::string& modelFileName,
-                                                  const std::map<std::string, std::string>& config) = 0;
+    virtual ExecutableNetwork ImportNetwork(const std::string& modelFileName,
+                                            const std::map<std::string, std::string>& config) = 0;
 
     /**
      * @brief Creates an executable network from an previously exported network using plugin implementation
@@ -241,7 +242,7 @@ public:
      * @brief Creates an executable network from an previously exported network using plugin implementation
      *        and removes Inference Engine magic and plugin name
      * @param networkModel Reference to network model output stream
-     * @param context - a pointer to plugin context derived from RemoteContext class used to
+     * @param context A pointer to plugin context derived from RemoteContext class used to
      *        execute the network
      * @param config A string -> string map of parameters
      * @return An Executable network
@@ -266,10 +267,9 @@ public:
      * @brief      Queries a plugin about supported layers in network
      * @param[in]  network  The network object to query
      * @param[in]  config   The map of configuration parameters
-     * @param      res      The result of query operator containing supported layers map
+     * @return     The result of query operator containing supported layers map
      */
-    virtual void QueryNetwork(const ICNNNetwork& network, const std::map<std::string, std::string>& config,
-                              QueryNetworkResult& res) const = 0;
+    virtual QueryNetworkResult QueryNetwork(const ICNNNetwork& network, const std::map<std::string, std::string>& config) const = 0;
 };
 
 }  // namespace InferenceEngine
index f70c555..29389b9 100644 (file)
@@ -19,9 +19,9 @@
 /**
  * @brief A namespace with const values for Execution Graph parameters names.
  *  
- * Executable Graph Info is represented in ICNNNetwork format with general CNNLayer nodes inside
+ * Executable Graph Info is represented in CNNNetwork format with general ExecutionNode nodes inside
  * including connections between the nodes. Each node describes an executable hardware-specific
- * primitive and stores its parameters within CNNLayer::params map.
+ * primitive and stores its parameters within ExecutionNode::get_rt_info map.
  * There is a list of general keys for the parameters map.
  */
 namespace ExecGraphInfoSerialization {
index 1570e54..aae4756 100644 (file)
@@ -677,7 +677,6 @@ void FormatParser::ParsePreProcess(pugi::xml_node& root) {
 
     auto meanSegmentPrecision = GetPrecisionAttr(ppNode, "mean-precision", Precision::UNSPECIFIED);
 
-    ResponseDesc resp;
     InferenceEngine::PreProcessChannel::Ptr preProcessChannel;
 
     int lastChanNo = -1;
index 0d183d2..0f7fb47 100644 (file)
@@ -5,16 +5,16 @@
 #pragma once
 
 #include <vpu/utils/perf_report.hpp>
-#include <ie_icnn_network.hpp>
+#include <cpp/ie_cnn_network.h>
 
 #include <vector>
 
 namespace vpu {
 
-InferenceEngine::ICNNNetwork::Ptr buildRuntimeGraphAsIeNet(
+InferenceEngine::CNNNetwork buildRuntimeGraphAsIeNet(
         GraphMetaInfo& graphMetaInfo,
         const std::vector<float>& perfInfo);
-InferenceEngine::ICNNNetwork::Ptr buildRuntimeGraph(
+InferenceEngine::CNNNetwork buildRuntimeGraph(
         GraphMetaInfo& graphMetaInfo,
         const std::vector<float>& perfInfo);
 
index 270a76a..28dee42 100644 (file)
@@ -26,7 +26,7 @@ std::map<std::string, std::string> extractMeta(const StageMetaInfo&);
 
 }  // namespace
 
-InferenceEngine::ICNNNetwork::Ptr buildRuntimeGraph(GraphMetaInfo& graphMetaInfo, const std::vector<float>& perfInfo) {
+InferenceEngine::CNNNetwork buildRuntimeGraph(GraphMetaInfo& graphMetaInfo, const std::vector<float>& perfInfo) {
     std::map<size_t, std::shared_ptr<ngraph::Node>> stageMetaIndexToNode;
     std::function<void(size_t)> createNodeFromMeta;
 
@@ -118,7 +118,7 @@ InferenceEngine::ICNNNetwork::Ptr buildRuntimeGraph(GraphMetaInfo& graphMetaInfo
     return net;
 }
 
-InferenceEngine::ICNNNetwork::Ptr buildRuntimeGraphAsIeNet(GraphMetaInfo& graphMetaInfo, const std::vector<float>& perfInfo) {
+InferenceEngine::CNNNetwork buildRuntimeGraphAsIeNet(GraphMetaInfo& graphMetaInfo, const std::vector<float>& perfInfo) {
     auto net = std::make_shared<InferenceEngine::details::CNNNetworkImpl>();
     net->setName(graphMetaInfo.graphName);
 
@@ -211,7 +211,7 @@ InferenceEngine::ICNNNetwork::Ptr buildRuntimeGraphAsIeNet(GraphMetaInfo& graphM
         net->setInputInfo(inputInfo);
     }
 
-    return net;
+    return InferenceEngine::CNNNetwork{net};
 }
 
 namespace {
index ea3c626..32e7903 100644 (file)
@@ -168,25 +168,25 @@ ExecutableNetwork::ExecutableNetwork(
     Import(blobFile, devicePool, config);
 }
 
-void ExecutableNetwork::GetMetric(const std::string &name, Parameter &result, ResponseDesc *resp) const {
+InferenceEngine::Parameter ExecutableNetwork::GetMetric(const std::string &name) const {
     if (name == METRIC_KEY(NETWORK_NAME)) {
-        result = IE_SET_METRIC(NETWORK_NAME, _graphDesc._name);
+        IE_SET_METRIC_RETURN(NETWORK_NAME, _graphDesc._name);
     } else if (name == METRIC_KEY(SUPPORTED_METRICS)) {
-        result = IE_SET_METRIC(SUPPORTED_METRICS, _supportedMetrics);
+        IE_SET_METRIC_RETURN(SUPPORTED_METRICS, _supportedMetrics);
     } else if (name == METRIC_KEY(SUPPORTED_CONFIG_KEYS)) {
-        result = IE_SET_METRIC(SUPPORTED_CONFIG_KEYS, std::vector<std::string>());
+        IE_SET_METRIC_RETURN(SUPPORTED_CONFIG_KEYS, std::vector<std::string>());
     } else if (name == METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS)) {
-        result = IE_SET_METRIC(OPTIMAL_NUMBER_OF_INFER_REQUESTS, static_cast<unsigned int>(2u * _actualNumExecutors));
+        IE_SET_METRIC_RETURN(OPTIMAL_NUMBER_OF_INFER_REQUESTS, static_cast<unsigned int>(2u * _actualNumExecutors));
     } else if (name == METRIC_KEY(DEVICE_THERMAL)) {
-        result = IE_SET_METRIC(DEVICE_THERMAL, _executor->GetThermal(_device));
+        IE_SET_METRIC_RETURN(DEVICE_THERMAL, _executor->GetThermal(_device));
     } else {
         THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str;
     }
 }
 
-void ExecutableNetwork::GetExecGraphInfo(InferenceEngine::ICNNNetwork::Ptr& graphPtr) {
+InferenceEngine::CNNNetwork ExecutableNetwork::GetExecGraphInfo() {
     auto perfInfo = _executor->getPerfTimeInfo(_graphDesc._graphHandle);
-    graphPtr = buildRuntimeGraph(_graphMetaData, perfInfo);
+    return buildRuntimeGraph(_graphMetaData, perfInfo);
 }
 
 }  // namespace MyriadPlugin
index f0c5d88..3b1efe5 100644 (file)
@@ -73,7 +73,8 @@ public:
                                                     _graphMetaData.stagesMeta, _config, _log, _executor);
     }
 
-    void CreateInferRequest(InferenceEngine::IInferRequest::Ptr &asyncRequest) override {
+    InferenceEngine::IInferRequest::Ptr CreateInferRequest() override {
+        InferenceEngine::IInferRequest::Ptr asyncRequest;
         if (_device == nullptr || !_device->isBooted()) {
             THROW_IE_EXCEPTION << "Can not create infer request: there is no available devices with platform "
                                << _device->_platform;
@@ -85,12 +86,13 @@ public:
                                                                     _executor);
         syncRequestImpl->setPointerToExecutableNetworkInternal(shared_from_this());
         auto taskExecutorGetResult = getNextTaskExecutor();
-        auto asyncTreadSafeImpl = std::make_shared<MyriadAsyncInferRequest>(
+        auto asyncThreadSafeImpl = std::make_shared<MyriadAsyncInferRequest>(
                 syncRequestImpl, _taskExecutor, _callbackExecutor, taskExecutorGetResult);
         asyncRequest.reset(new InferenceEngine::InferRequestBase<InferenceEngine::AsyncInferRequestThreadSafeDefault>(
-                           asyncTreadSafeImpl),
+                           asyncThreadSafeImpl),
                            [](InferenceEngine::IInferRequest *p) { p->Release(); });
-        asyncTreadSafeImpl->SetPointerToPublicInterface(asyncRequest);
+        asyncThreadSafeImpl->SetPointerToPublicInterface(asyncRequest);
+        return asyncRequest;
     }
 
     void Export(std::ostream& model) override {
@@ -107,9 +109,9 @@ public:
         }
     }
 
-    void GetMetric(const std::string &name, InferenceEngine::Parameter &result, InferenceEngine::ResponseDesc *resp) const override;
+    InferenceEngine::Parameter GetMetric(const std::string &name) const override;
 
-    void GetExecGraphInfo(InferenceEngine::ICNNNetwork::Ptr &graphPtr) override;
+    InferenceEngine::CNNNetwork GetExecGraphInfo() override;
 
     void Import(std::istream& strm,
                 std::vector<DevicePtr> &devicePool,
index 60123a5..d3b0874 100644 (file)
@@ -66,11 +66,11 @@ Parameter Engine::GetConfig(const std::string& name, const std::map<std::string,
     return result;
 }
 
-void Engine::QueryNetwork(
+QueryNetworkResult Engine::QueryNetwork(
         const ICNNNetwork& network,
-        const std::map<std::string, std::string>& config,
-        QueryNetworkResult& res) const {
+        const std::map<std::string, std::string>& config) const {
     VPU_PROFILE(QueryNetwork);
+    QueryNetworkResult res;
 
     auto parsedConfigCopy = _parsedConfig;
     parsedConfigCopy.update(config);
@@ -236,6 +236,8 @@ void Engine::QueryNetwork(
             res.supportedLayersMap.insert({ layerName, GetName() });
         }
     }
+
+    return res;
 }
 
 Engine::Engine(std::shared_ptr<IMvnc> mvnc) :
@@ -283,7 +285,7 @@ InferenceEngine::ExecutableNetwork Engine::ImportNetwork(
     return make_executable_network(executableNetwork);
 }
 
-IExecutableNetwork::Ptr Engine::ImportNetwork(
+InferenceEngine::ExecutableNetwork Engine::ImportNetwork(
         const std::string& modelFileName,
         const std::map<std::string, std::string>& config) {
     VPU_PROFILE(ImportNetwork);
index 3d0d4eb..0a8a950 100644 (file)
@@ -31,14 +31,13 @@ public:
             const ie::ICNNNetwork& network,
             const std::map<std::string, std::string>& config) override;
 
-    void QueryNetwork(
+    ie::QueryNetworkResult QueryNetwork(
             const ie::ICNNNetwork& network,
-            const std::map<std::string, std::string>& config,
-            ie::QueryNetworkResult& res) const override;
+            const std::map<std::string, std::string>& config) const override;
 
     using ie::InferencePluginInternal::ImportNetwork;
 
-    ie::IExecutableNetwork::Ptr ImportNetwork(
+    ie::ExecutableNetwork ImportNetwork(
             const std::string& modelFileName,
             const std::map<std::string, std::string>& config) override;
 
index 52acafc..0b9348f 100644 (file)
@@ -9,7 +9,7 @@
 #include <vector>
 
 #include "ie_input_info.hpp"
-#include "ie_icnn_network.hpp"
+#include "cpp/ie_cnn_network.h"
 #include "ie_iexecutable_network.hpp"
 
 #include <cpp_interfaces/impl/ie_executable_network_internal.hpp>
@@ -26,9 +26,9 @@ class MockExecutableNetworkInternal : public ExecutableNetworkInternal {
 public:
     MOCK_METHOD1(setNetworkInputs, void(InputsDataMap));
     MOCK_METHOD1(setNetworkOutputs, void(OutputsDataMap));
-    MOCK_METHOD1(CreateInferRequest, void(IInferRequest::Ptr &));
+    MOCK_METHOD0(CreateInferRequest, IInferRequest::Ptr(void));
     MOCK_METHOD1(Export, void(const std::string &));
-    MOCK_METHOD1(GetExecGraphInfo, void(ICNNNetwork::Ptr &));
+    MOCK_METHOD0(GetExecGraphInfo, CNNNetwork(void));
     void WrapOstreamExport(std::ostream& networkModel) {
         ExecutableNetworkInternal::Export(networkModel);
     }
index 7878d67..9cec0ff 100644 (file)
@@ -23,16 +23,16 @@ using namespace InferenceEngine;
 
 class MockIExecutableNetworkInternal : public IExecutableNetworkInternal {
 public:
-    MOCK_CONST_METHOD0(GetOutputsInfo, ConstOutputsDataMap());
-    MOCK_CONST_METHOD0(GetInputsInfo, ConstInputsDataMap());
-    MOCK_METHOD1(CreateInferRequest, void(IInferRequest::Ptr &));
+    MOCK_CONST_METHOD0(GetOutputsInfo, ConstOutputsDataMap(void));
+    MOCK_CONST_METHOD0(GetInputsInfo, ConstInputsDataMap(void));
+    MOCK_METHOD0(CreateInferRequest, IInferRequest::Ptr(void));
     MOCK_METHOD1(Export, void(const std::string &));
     void Export(std::ostream &) override {};
-    MOCK_METHOD0(QueryState, std::vector<IMemoryStateInternal::Ptr>());
-    MOCK_METHOD1(GetExecGraphInfo, void(ICNNNetwork::Ptr &));
+    MOCK_METHOD0(QueryState, std::vector<IMemoryStateInternal::Ptr>(void));
+    MOCK_METHOD0(GetExecGraphInfo, CNNNetwork(void));
 
-    MOCK_METHOD2(SetConfig, void(const std::map<std::string, Parameter> &config, ResponseDesc *resp));
-    MOCK_CONST_METHOD3(GetConfig, void(const std::string &name, Parameter &result, ResponseDesc *resp));
-    MOCK_CONST_METHOD3(GetMetric, void(const std::string &name, Parameter &result, ResponseDesc *resp));
-    MOCK_CONST_METHOD2(GetContext, void(RemoteContext::Ptr &pContext, ResponseDesc *resp));
+    MOCK_METHOD1(SetConfig, void(const std::map<std::string, Parameter> &config));
+    MOCK_CONST_METHOD1(GetConfig, Parameter(const std::string &name));
+    MOCK_CONST_METHOD1(GetMetric, Parameter(const std::string &name));
+    MOCK_CONST_METHOD0(GetContext, RemoteContext::Ptr(void));
 };
index 36bc0e3..8e31e8e 100644 (file)
@@ -13,9 +13,9 @@
 class MockIInferencePlugin : public InferenceEngine::IInferencePlugin {
 public:
     MOCK_METHOD1(AddExtension, void(InferenceEngine::IExtensionPtr));
-    MOCK_METHOD3(LoadNetwork, void(IExecutableNetwork::Ptr&,
+    MOCK_METHOD2(LoadNetwork, InferenceEngine::ExecutableNetwork(
                 const ICNNNetwork&, const std::map<std::string, std::string>&));
-    MOCK_METHOD2(ImportNetwork, IExecutableNetwork::Ptr(
+    MOCK_METHOD2(ImportNetwork, InferenceEngine::ExecutableNetwork(
                 const std::string&, const std::map<std::string, std::string>&));
     MOCK_METHOD1(SetConfig, void(const std::map<std::string, std::string> &));
 
index ac1b267..0632a65 100644 (file)
@@ -226,19 +226,19 @@ protected:
 // CreateInferRequest
 TEST_F(ExecutableNetworkBaseTests, canForwardCreateInferRequest) {
     IInferRequest::Ptr req;
-    EXPECT_CALL(*mock_impl.get(), CreateInferRequest(Ref(req))).Times(1);
+    EXPECT_CALL(*mock_impl.get(), CreateInferRequest()).Times(1).WillRepeatedly(Return(req));
     ASSERT_EQ(OK, exeNetwork->CreateInferRequest(req, &dsc));
 }
 
 TEST_F(ExecutableNetworkBaseTests, canReportErrorInCreateInferRequest) {
-    EXPECT_CALL(*mock_impl.get(), CreateInferRequest(_)).WillOnce(Throw(std::runtime_error("compare")));
+    EXPECT_CALL(*mock_impl.get(), CreateInferRequest()).WillOnce(Throw(std::runtime_error("compare")));
     IInferRequest::Ptr req;
-    ASSERT_NE(exeNetwork->CreateInferRequest(req, &dsc), OK);
+    ASSERT_NE(OK, exeNetwork->CreateInferRequest(req, &dsc));
     ASSERT_STREQ(dsc.msg, "compare");
 }
 
 TEST_F(ExecutableNetworkBaseTests, canCatchUnknownErrorInCreateInferRequest) {
-    EXPECT_CALL(*mock_impl.get(), CreateInferRequest(_)).WillOnce(Throw(5));
+    EXPECT_CALL(*mock_impl.get(), CreateInferRequest()).WillOnce(Throw(5));
     IInferRequest::Ptr req;
     ASSERT_EQ(UNEXPECTED, exeNetwork->CreateInferRequest(req, nullptr));
 }
index 036db22..9c753d7 100644 (file)
@@ -69,7 +69,7 @@ TEST(MKLDNNLayersTests, DumpSimpleGraph) {
     graph.CreateGraph(net, extMgr, cache);
 
     auto dump_net = dump_graph_as_ie_net(graph);
-    auto layers = details::CNNNetSortTopologically(*dump_net);
+    auto layers = details::CNNNetSortTopologically(dump_net);
 
     ASSERT_EQ(layers.size(), 4);
     ASSERT_EQ(layers[0]->type, "Input");
index 14971d2..6e83b9b 100644 (file)
@@ -1203,8 +1203,7 @@ TEST_F(MKLDNNGraphStructureTests, TestOutputAfterInplacePlusConcat) {
     InferenceEngine::OutputsDataMap _networkOutputs = network.getOutputsInfo();
     execNetwork->setNetworkInputs(_networkInputs);
     execNetwork->setNetworkOutputs(_networkOutputs);
-    InferenceEngine::IInferRequest::Ptr inferRequest;
-    execNetwork->CreateInferRequest(inferRequest);
+    InferenceEngine::IInferRequest::Ptr inferRequest = execNetwork->CreateInferRequest();
 
     InferenceEngine::TensorDesc desc(InferenceEngine::Precision::FP32, {1, 3, 2, 2}, InferenceEngine::NCHW);
     InferenceEngine::Blob::Ptr src = InferenceEngine::make_shared_blob<float>(desc);
@@ -1719,8 +1718,7 @@ TEST_F(MKLDNNGraphStructureTests, TestResnetPart) {
     InferenceEngine::OutputsDataMap _networkOutputs = network.getOutputsInfo();
     execNetwork->setNetworkInputs(_networkInputs);
     execNetwork->setNetworkOutputs(_networkOutputs);
-    InferenceEngine::IInferRequest::Ptr inferRequest;
-    execNetwork->CreateInferRequest(inferRequest);
+    InferenceEngine::IInferRequest::Ptr inferRequest = execNetwork->CreateInferRequest();
 
     InferenceEngine::TensorDesc desc(InferenceEngine::Precision::FP32, {1, 3, 224, 224}, InferenceEngine::NCHW);
     InferenceEngine::Blob::Ptr src = InferenceEngine::make_shared_blob<float>(desc);
@@ -1869,8 +1867,7 @@ TEST_F(MKLDNNGraphStructureTests, TestConcatAfterConcat) {
     InferenceEngine::OutputsDataMap _networkOutputs = network.getOutputsInfo();
     execNetwork->setNetworkInputs(_networkInputs);
     execNetwork->setNetworkOutputs(_networkOutputs);
-    InferenceEngine::IInferRequest::Ptr inferRequest;
-    execNetwork->CreateInferRequest(inferRequest);
+    InferenceEngine::IInferRequest::Ptr inferRequest = execNetwork->CreateInferRequest();
 
     InferenceEngine::TensorDesc desc1(InferenceEngine::Precision::FP32, {1, 3, 20, 20}, InferenceEngine::NCHW);
     InferenceEngine::Blob::Ptr src1 = InferenceEngine::make_shared_blob<float>(desc1);
@@ -2050,8 +2047,7 @@ TEST_F(MKLDNNGraphStructureTests, Test2ConcatFromConcat) {
     InferenceEngine::OutputsDataMap _networkOutputs = network.getOutputsInfo();
     execNetwork->setNetworkInputs(_networkInputs);
     execNetwork->setNetworkOutputs(_networkOutputs);
-    InferenceEngine::IInferRequest::Ptr inferRequest;
-    execNetwork->CreateInferRequest(inferRequest);
+    InferenceEngine::IInferRequest::Ptr inferRequest = execNetwork->CreateInferRequest();
 
     InferenceEngine::TensorDesc desc1(InferenceEngine::Precision::FP32, {1, 3, 2, 2}, InferenceEngine::NCHW);
     InferenceEngine::Blob::Ptr src1 = InferenceEngine::make_shared_blob<float>(desc1);
@@ -2382,8 +2378,7 @@ TEST_F(MKLDNNGraphStructureTests, TestLoadTopologyWithConstLayer) {
     InferenceEngine::OutputsDataMap _networkOutputs = network.getOutputsInfo();
     execNetwork->setNetworkInputs(_networkInputs);
     execNetwork->setNetworkOutputs(_networkOutputs);
-    InferenceEngine::IInferRequest::Ptr inferRequest;
-    execNetwork->CreateInferRequest(inferRequest);
+    InferenceEngine::IInferRequest::Ptr inferRequest = execNetwork->CreateInferRequest();
 
     InferenceEngine::TensorDesc desc1(InferenceEngine::Precision::FP32, {1, 3, 20, 20}, InferenceEngine::NCHW);
     InferenceEngine::Blob::Ptr src1 = InferenceEngine::make_shared_blob<float>(desc1);
@@ -2530,8 +2525,7 @@ TEST_F(MKLDNNGraphStructureTests, TestLoadTopologyWithEltwiseBeforeConcat) {
     InferenceEngine::OutputsDataMap _networkOutputs = network.getOutputsInfo();
     execNetwork->setNetworkInputs(_networkInputs);
     execNetwork->setNetworkOutputs(_networkOutputs);
-    InferenceEngine::IInferRequest::Ptr inferRequest;
-    execNetwork->CreateInferRequest(inferRequest);
+    InferenceEngine::IInferRequest::Ptr inferRequest = execNetwork->CreateInferRequest();
 
     InferenceEngine::TensorDesc desc1(InferenceEngine::Precision::FP32, {1, 3, 20, 20}, InferenceEngine::NCHW);
     InferenceEngine::Blob::Ptr src1 = InferenceEngine::make_shared_blob<float>(desc1);